xref: /openbmc/qemu/tcg/tcg-op.c (revision a10b9d93)
1 /*
2  * Tiny Code Generator for QEMU
3  *
4  * Copyright (c) 2008 Fabrice Bellard
5  *
6  * Permission is hereby granted, free of charge, to any person obtaining a copy
7  * of this software and associated documentation files (the "Software"), to deal
8  * in the Software without restriction, including without limitation the rights
9  * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10  * copies of the Software, and to permit persons to whom the Software is
11  * furnished to do so, subject to the following conditions:
12  *
13  * The above copyright notice and this permission notice shall be included in
14  * all copies or substantial portions of the Software.
15  *
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21  * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22  * THE SOFTWARE.
23  */
24 
25 #include "qemu/osdep.h"
26 #include "cpu.h"
27 #include "exec/exec-all.h"
28 #include "tcg/tcg.h"
29 #include "tcg/tcg-op.h"
30 #include "tcg/tcg-mo.h"
31 #include "trace-tcg.h"
32 #include "trace/mem.h"
33 #include "exec/plugin-gen.h"
34 
35 /* Reduce the number of ifdefs below.  This assumes that all uses of
36    TCGV_HIGH and TCGV_LOW are properly protected by a conditional that
37    the compiler can eliminate.  */
38 #if TCG_TARGET_REG_BITS == 64
39 extern TCGv_i32 TCGV_LOW_link_error(TCGv_i64);
40 extern TCGv_i32 TCGV_HIGH_link_error(TCGv_i64);
41 #define TCGV_LOW  TCGV_LOW_link_error
42 #define TCGV_HIGH TCGV_HIGH_link_error
43 #endif
44 
45 void tcg_gen_op1(TCGOpcode opc, TCGArg a1)
46 {
47     TCGOp *op = tcg_emit_op(opc);
48     op->args[0] = a1;
49 }
50 
51 void tcg_gen_op2(TCGOpcode opc, TCGArg a1, TCGArg a2)
52 {
53     TCGOp *op = tcg_emit_op(opc);
54     op->args[0] = a1;
55     op->args[1] = a2;
56 }
57 
58 void tcg_gen_op3(TCGOpcode opc, TCGArg a1, TCGArg a2, TCGArg a3)
59 {
60     TCGOp *op = tcg_emit_op(opc);
61     op->args[0] = a1;
62     op->args[1] = a2;
63     op->args[2] = a3;
64 }
65 
66 void tcg_gen_op4(TCGOpcode opc, TCGArg a1, TCGArg a2, TCGArg a3, TCGArg a4)
67 {
68     TCGOp *op = tcg_emit_op(opc);
69     op->args[0] = a1;
70     op->args[1] = a2;
71     op->args[2] = a3;
72     op->args[3] = a4;
73 }
74 
75 void tcg_gen_op5(TCGOpcode opc, TCGArg a1, TCGArg a2, TCGArg a3,
76                  TCGArg a4, TCGArg a5)
77 {
78     TCGOp *op = tcg_emit_op(opc);
79     op->args[0] = a1;
80     op->args[1] = a2;
81     op->args[2] = a3;
82     op->args[3] = a4;
83     op->args[4] = a5;
84 }
85 
86 void tcg_gen_op6(TCGOpcode opc, TCGArg a1, TCGArg a2, TCGArg a3,
87                  TCGArg a4, TCGArg a5, TCGArg a6)
88 {
89     TCGOp *op = tcg_emit_op(opc);
90     op->args[0] = a1;
91     op->args[1] = a2;
92     op->args[2] = a3;
93     op->args[3] = a4;
94     op->args[4] = a5;
95     op->args[5] = a6;
96 }
97 
98 void tcg_gen_mb(TCGBar mb_type)
99 {
100     if (tcg_ctx->tb_cflags & CF_PARALLEL) {
101         tcg_gen_op1(INDEX_op_mb, mb_type);
102     }
103 }
104 
105 /* 32 bit ops */
106 
107 void tcg_gen_movi_i32(TCGv_i32 ret, int32_t arg)
108 {
109     tcg_gen_mov_i32(ret, tcg_constant_i32(arg));
110 }
111 
112 void tcg_gen_addi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
113 {
114     /* some cases can be optimized here */
115     if (arg2 == 0) {
116         tcg_gen_mov_i32(ret, arg1);
117     } else {
118         tcg_gen_add_i32(ret, arg1, tcg_constant_i32(arg2));
119     }
120 }
121 
122 void tcg_gen_subfi_i32(TCGv_i32 ret, int32_t arg1, TCGv_i32 arg2)
123 {
124     if (arg1 == 0 && TCG_TARGET_HAS_neg_i32) {
125         /* Don't recurse with tcg_gen_neg_i32.  */
126         tcg_gen_op2_i32(INDEX_op_neg_i32, ret, arg2);
127     } else {
128         tcg_gen_sub_i32(ret, tcg_constant_i32(arg1), arg2);
129     }
130 }
131 
132 void tcg_gen_subi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
133 {
134     /* some cases can be optimized here */
135     if (arg2 == 0) {
136         tcg_gen_mov_i32(ret, arg1);
137     } else {
138         tcg_gen_sub_i32(ret, arg1, tcg_constant_i32(arg2));
139     }
140 }
141 
142 void tcg_gen_andi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
143 {
144     /* Some cases can be optimized here.  */
145     switch (arg2) {
146     case 0:
147         tcg_gen_movi_i32(ret, 0);
148         return;
149     case -1:
150         tcg_gen_mov_i32(ret, arg1);
151         return;
152     case 0xff:
153         /* Don't recurse with tcg_gen_ext8u_i32.  */
154         if (TCG_TARGET_HAS_ext8u_i32) {
155             tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg1);
156             return;
157         }
158         break;
159     case 0xffff:
160         if (TCG_TARGET_HAS_ext16u_i32) {
161             tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg1);
162             return;
163         }
164         break;
165     }
166 
167     tcg_gen_and_i32(ret, arg1, tcg_constant_i32(arg2));
168 }
169 
170 void tcg_gen_ori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
171 {
172     /* Some cases can be optimized here.  */
173     if (arg2 == -1) {
174         tcg_gen_movi_i32(ret, -1);
175     } else if (arg2 == 0) {
176         tcg_gen_mov_i32(ret, arg1);
177     } else {
178         tcg_gen_or_i32(ret, arg1, tcg_constant_i32(arg2));
179     }
180 }
181 
182 void tcg_gen_xori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
183 {
184     /* Some cases can be optimized here.  */
185     if (arg2 == 0) {
186         tcg_gen_mov_i32(ret, arg1);
187     } else if (arg2 == -1 && TCG_TARGET_HAS_not_i32) {
188         /* Don't recurse with tcg_gen_not_i32.  */
189         tcg_gen_op2_i32(INDEX_op_not_i32, ret, arg1);
190     } else {
191         tcg_gen_xor_i32(ret, arg1, tcg_constant_i32(arg2));
192     }
193 }
194 
195 void tcg_gen_shli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
196 {
197     tcg_debug_assert(arg2 >= 0 && arg2 < 32);
198     if (arg2 == 0) {
199         tcg_gen_mov_i32(ret, arg1);
200     } else {
201         tcg_gen_shl_i32(ret, arg1, tcg_constant_i32(arg2));
202     }
203 }
204 
205 void tcg_gen_shri_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
206 {
207     tcg_debug_assert(arg2 >= 0 && arg2 < 32);
208     if (arg2 == 0) {
209         tcg_gen_mov_i32(ret, arg1);
210     } else {
211         tcg_gen_shr_i32(ret, arg1, tcg_constant_i32(arg2));
212     }
213 }
214 
215 void tcg_gen_sari_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
216 {
217     tcg_debug_assert(arg2 >= 0 && arg2 < 32);
218     if (arg2 == 0) {
219         tcg_gen_mov_i32(ret, arg1);
220     } else {
221         tcg_gen_sar_i32(ret, arg1, tcg_constant_i32(arg2));
222     }
223 }
224 
225 void tcg_gen_brcond_i32(TCGCond cond, TCGv_i32 arg1, TCGv_i32 arg2, TCGLabel *l)
226 {
227     if (cond == TCG_COND_ALWAYS) {
228         tcg_gen_br(l);
229     } else if (cond != TCG_COND_NEVER) {
230         l->refs++;
231         tcg_gen_op4ii_i32(INDEX_op_brcond_i32, arg1, arg2, cond, label_arg(l));
232     }
233 }
234 
235 void tcg_gen_brcondi_i32(TCGCond cond, TCGv_i32 arg1, int32_t arg2, TCGLabel *l)
236 {
237     if (cond == TCG_COND_ALWAYS) {
238         tcg_gen_br(l);
239     } else if (cond != TCG_COND_NEVER) {
240         tcg_gen_brcond_i32(cond, arg1, tcg_constant_i32(arg2), l);
241     }
242 }
243 
244 void tcg_gen_setcond_i32(TCGCond cond, TCGv_i32 ret,
245                          TCGv_i32 arg1, TCGv_i32 arg2)
246 {
247     if (cond == TCG_COND_ALWAYS) {
248         tcg_gen_movi_i32(ret, 1);
249     } else if (cond == TCG_COND_NEVER) {
250         tcg_gen_movi_i32(ret, 0);
251     } else {
252         tcg_gen_op4i_i32(INDEX_op_setcond_i32, ret, arg1, arg2, cond);
253     }
254 }
255 
256 void tcg_gen_setcondi_i32(TCGCond cond, TCGv_i32 ret,
257                           TCGv_i32 arg1, int32_t arg2)
258 {
259     tcg_gen_setcond_i32(cond, ret, arg1, tcg_constant_i32(arg2));
260 }
261 
262 void tcg_gen_muli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
263 {
264     if (arg2 == 0) {
265         tcg_gen_movi_i32(ret, 0);
266     } else if (is_power_of_2(arg2)) {
267         tcg_gen_shli_i32(ret, arg1, ctz32(arg2));
268     } else {
269         tcg_gen_mul_i32(ret, arg1, tcg_constant_i32(arg2));
270     }
271 }
272 
273 void tcg_gen_div_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
274 {
275     if (TCG_TARGET_HAS_div_i32) {
276         tcg_gen_op3_i32(INDEX_op_div_i32, ret, arg1, arg2);
277     } else if (TCG_TARGET_HAS_div2_i32) {
278         TCGv_i32 t0 = tcg_temp_new_i32();
279         tcg_gen_sari_i32(t0, arg1, 31);
280         tcg_gen_op5_i32(INDEX_op_div2_i32, ret, t0, arg1, t0, arg2);
281         tcg_temp_free_i32(t0);
282     } else {
283         gen_helper_div_i32(ret, arg1, arg2);
284     }
285 }
286 
287 void tcg_gen_rem_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
288 {
289     if (TCG_TARGET_HAS_rem_i32) {
290         tcg_gen_op3_i32(INDEX_op_rem_i32, ret, arg1, arg2);
291     } else if (TCG_TARGET_HAS_div_i32) {
292         TCGv_i32 t0 = tcg_temp_new_i32();
293         tcg_gen_op3_i32(INDEX_op_div_i32, t0, arg1, arg2);
294         tcg_gen_mul_i32(t0, t0, arg2);
295         tcg_gen_sub_i32(ret, arg1, t0);
296         tcg_temp_free_i32(t0);
297     } else if (TCG_TARGET_HAS_div2_i32) {
298         TCGv_i32 t0 = tcg_temp_new_i32();
299         tcg_gen_sari_i32(t0, arg1, 31);
300         tcg_gen_op5_i32(INDEX_op_div2_i32, t0, ret, arg1, t0, arg2);
301         tcg_temp_free_i32(t0);
302     } else {
303         gen_helper_rem_i32(ret, arg1, arg2);
304     }
305 }
306 
307 void tcg_gen_divu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
308 {
309     if (TCG_TARGET_HAS_div_i32) {
310         tcg_gen_op3_i32(INDEX_op_divu_i32, ret, arg1, arg2);
311     } else if (TCG_TARGET_HAS_div2_i32) {
312         TCGv_i32 t0 = tcg_temp_new_i32();
313         tcg_gen_movi_i32(t0, 0);
314         tcg_gen_op5_i32(INDEX_op_divu2_i32, ret, t0, arg1, t0, arg2);
315         tcg_temp_free_i32(t0);
316     } else {
317         gen_helper_divu_i32(ret, arg1, arg2);
318     }
319 }
320 
321 void tcg_gen_remu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
322 {
323     if (TCG_TARGET_HAS_rem_i32) {
324         tcg_gen_op3_i32(INDEX_op_remu_i32, ret, arg1, arg2);
325     } else if (TCG_TARGET_HAS_div_i32) {
326         TCGv_i32 t0 = tcg_temp_new_i32();
327         tcg_gen_op3_i32(INDEX_op_divu_i32, t0, arg1, arg2);
328         tcg_gen_mul_i32(t0, t0, arg2);
329         tcg_gen_sub_i32(ret, arg1, t0);
330         tcg_temp_free_i32(t0);
331     } else if (TCG_TARGET_HAS_div2_i32) {
332         TCGv_i32 t0 = tcg_temp_new_i32();
333         tcg_gen_movi_i32(t0, 0);
334         tcg_gen_op5_i32(INDEX_op_divu2_i32, t0, ret, arg1, t0, arg2);
335         tcg_temp_free_i32(t0);
336     } else {
337         gen_helper_remu_i32(ret, arg1, arg2);
338     }
339 }
340 
341 void tcg_gen_andc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
342 {
343     if (TCG_TARGET_HAS_andc_i32) {
344         tcg_gen_op3_i32(INDEX_op_andc_i32, ret, arg1, arg2);
345     } else {
346         TCGv_i32 t0 = tcg_temp_new_i32();
347         tcg_gen_not_i32(t0, arg2);
348         tcg_gen_and_i32(ret, arg1, t0);
349         tcg_temp_free_i32(t0);
350     }
351 }
352 
353 void tcg_gen_eqv_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
354 {
355     if (TCG_TARGET_HAS_eqv_i32) {
356         tcg_gen_op3_i32(INDEX_op_eqv_i32, ret, arg1, arg2);
357     } else {
358         tcg_gen_xor_i32(ret, arg1, arg2);
359         tcg_gen_not_i32(ret, ret);
360     }
361 }
362 
363 void tcg_gen_nand_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
364 {
365     if (TCG_TARGET_HAS_nand_i32) {
366         tcg_gen_op3_i32(INDEX_op_nand_i32, ret, arg1, arg2);
367     } else {
368         tcg_gen_and_i32(ret, arg1, arg2);
369         tcg_gen_not_i32(ret, ret);
370     }
371 }
372 
373 void tcg_gen_nor_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
374 {
375     if (TCG_TARGET_HAS_nor_i32) {
376         tcg_gen_op3_i32(INDEX_op_nor_i32, ret, arg1, arg2);
377     } else {
378         tcg_gen_or_i32(ret, arg1, arg2);
379         tcg_gen_not_i32(ret, ret);
380     }
381 }
382 
383 void tcg_gen_orc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
384 {
385     if (TCG_TARGET_HAS_orc_i32) {
386         tcg_gen_op3_i32(INDEX_op_orc_i32, ret, arg1, arg2);
387     } else {
388         TCGv_i32 t0 = tcg_temp_new_i32();
389         tcg_gen_not_i32(t0, arg2);
390         tcg_gen_or_i32(ret, arg1, t0);
391         tcg_temp_free_i32(t0);
392     }
393 }
394 
395 void tcg_gen_clz_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
396 {
397     if (TCG_TARGET_HAS_clz_i32) {
398         tcg_gen_op3_i32(INDEX_op_clz_i32, ret, arg1, arg2);
399     } else if (TCG_TARGET_HAS_clz_i64) {
400         TCGv_i64 t1 = tcg_temp_new_i64();
401         TCGv_i64 t2 = tcg_temp_new_i64();
402         tcg_gen_extu_i32_i64(t1, arg1);
403         tcg_gen_extu_i32_i64(t2, arg2);
404         tcg_gen_addi_i64(t2, t2, 32);
405         tcg_gen_clz_i64(t1, t1, t2);
406         tcg_gen_extrl_i64_i32(ret, t1);
407         tcg_temp_free_i64(t1);
408         tcg_temp_free_i64(t2);
409         tcg_gen_subi_i32(ret, ret, 32);
410     } else {
411         gen_helper_clz_i32(ret, arg1, arg2);
412     }
413 }
414 
415 void tcg_gen_clzi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
416 {
417     tcg_gen_clz_i32(ret, arg1, tcg_constant_i32(arg2));
418 }
419 
420 void tcg_gen_ctz_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
421 {
422     if (TCG_TARGET_HAS_ctz_i32) {
423         tcg_gen_op3_i32(INDEX_op_ctz_i32, ret, arg1, arg2);
424     } else if (TCG_TARGET_HAS_ctz_i64) {
425         TCGv_i64 t1 = tcg_temp_new_i64();
426         TCGv_i64 t2 = tcg_temp_new_i64();
427         tcg_gen_extu_i32_i64(t1, arg1);
428         tcg_gen_extu_i32_i64(t2, arg2);
429         tcg_gen_ctz_i64(t1, t1, t2);
430         tcg_gen_extrl_i64_i32(ret, t1);
431         tcg_temp_free_i64(t1);
432         tcg_temp_free_i64(t2);
433     } else if (TCG_TARGET_HAS_ctpop_i32
434                || TCG_TARGET_HAS_ctpop_i64
435                || TCG_TARGET_HAS_clz_i32
436                || TCG_TARGET_HAS_clz_i64) {
437         TCGv_i32 z, t = tcg_temp_new_i32();
438 
439         if (TCG_TARGET_HAS_ctpop_i32 || TCG_TARGET_HAS_ctpop_i64) {
440             tcg_gen_subi_i32(t, arg1, 1);
441             tcg_gen_andc_i32(t, t, arg1);
442             tcg_gen_ctpop_i32(t, t);
443         } else {
444             /* Since all non-x86 hosts have clz(0) == 32, don't fight it.  */
445             tcg_gen_neg_i32(t, arg1);
446             tcg_gen_and_i32(t, t, arg1);
447             tcg_gen_clzi_i32(t, t, 32);
448             tcg_gen_xori_i32(t, t, 31);
449         }
450         z = tcg_constant_i32(0);
451         tcg_gen_movcond_i32(TCG_COND_EQ, ret, arg1, z, arg2, t);
452         tcg_temp_free_i32(t);
453     } else {
454         gen_helper_ctz_i32(ret, arg1, arg2);
455     }
456 }
457 
458 void tcg_gen_ctzi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
459 {
460     if (!TCG_TARGET_HAS_ctz_i32 && TCG_TARGET_HAS_ctpop_i32 && arg2 == 32) {
461         /* This equivalence has the advantage of not requiring a fixup.  */
462         TCGv_i32 t = tcg_temp_new_i32();
463         tcg_gen_subi_i32(t, arg1, 1);
464         tcg_gen_andc_i32(t, t, arg1);
465         tcg_gen_ctpop_i32(ret, t);
466         tcg_temp_free_i32(t);
467     } else {
468         tcg_gen_ctz_i32(ret, arg1, tcg_constant_i32(arg2));
469     }
470 }
471 
472 void tcg_gen_clrsb_i32(TCGv_i32 ret, TCGv_i32 arg)
473 {
474     if (TCG_TARGET_HAS_clz_i32) {
475         TCGv_i32 t = tcg_temp_new_i32();
476         tcg_gen_sari_i32(t, arg, 31);
477         tcg_gen_xor_i32(t, t, arg);
478         tcg_gen_clzi_i32(t, t, 32);
479         tcg_gen_subi_i32(ret, t, 1);
480         tcg_temp_free_i32(t);
481     } else {
482         gen_helper_clrsb_i32(ret, arg);
483     }
484 }
485 
486 void tcg_gen_ctpop_i32(TCGv_i32 ret, TCGv_i32 arg1)
487 {
488     if (TCG_TARGET_HAS_ctpop_i32) {
489         tcg_gen_op2_i32(INDEX_op_ctpop_i32, ret, arg1);
490     } else if (TCG_TARGET_HAS_ctpop_i64) {
491         TCGv_i64 t = tcg_temp_new_i64();
492         tcg_gen_extu_i32_i64(t, arg1);
493         tcg_gen_ctpop_i64(t, t);
494         tcg_gen_extrl_i64_i32(ret, t);
495         tcg_temp_free_i64(t);
496     } else {
497         gen_helper_ctpop_i32(ret, arg1);
498     }
499 }
500 
501 void tcg_gen_rotl_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
502 {
503     if (TCG_TARGET_HAS_rot_i32) {
504         tcg_gen_op3_i32(INDEX_op_rotl_i32, ret, arg1, arg2);
505     } else {
506         TCGv_i32 t0, t1;
507 
508         t0 = tcg_temp_new_i32();
509         t1 = tcg_temp_new_i32();
510         tcg_gen_shl_i32(t0, arg1, arg2);
511         tcg_gen_subfi_i32(t1, 32, arg2);
512         tcg_gen_shr_i32(t1, arg1, t1);
513         tcg_gen_or_i32(ret, t0, t1);
514         tcg_temp_free_i32(t0);
515         tcg_temp_free_i32(t1);
516     }
517 }
518 
519 void tcg_gen_rotli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
520 {
521     tcg_debug_assert(arg2 >= 0 && arg2 < 32);
522     /* some cases can be optimized here */
523     if (arg2 == 0) {
524         tcg_gen_mov_i32(ret, arg1);
525     } else if (TCG_TARGET_HAS_rot_i32) {
526         tcg_gen_rotl_i32(ret, arg1, tcg_constant_i32(arg2));
527     } else {
528         TCGv_i32 t0, t1;
529         t0 = tcg_temp_new_i32();
530         t1 = tcg_temp_new_i32();
531         tcg_gen_shli_i32(t0, arg1, arg2);
532         tcg_gen_shri_i32(t1, arg1, 32 - arg2);
533         tcg_gen_or_i32(ret, t0, t1);
534         tcg_temp_free_i32(t0);
535         tcg_temp_free_i32(t1);
536     }
537 }
538 
539 void tcg_gen_rotr_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
540 {
541     if (TCG_TARGET_HAS_rot_i32) {
542         tcg_gen_op3_i32(INDEX_op_rotr_i32, ret, arg1, arg2);
543     } else {
544         TCGv_i32 t0, t1;
545 
546         t0 = tcg_temp_new_i32();
547         t1 = tcg_temp_new_i32();
548         tcg_gen_shr_i32(t0, arg1, arg2);
549         tcg_gen_subfi_i32(t1, 32, arg2);
550         tcg_gen_shl_i32(t1, arg1, t1);
551         tcg_gen_or_i32(ret, t0, t1);
552         tcg_temp_free_i32(t0);
553         tcg_temp_free_i32(t1);
554     }
555 }
556 
557 void tcg_gen_rotri_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
558 {
559     tcg_debug_assert(arg2 >= 0 && arg2 < 32);
560     /* some cases can be optimized here */
561     if (arg2 == 0) {
562         tcg_gen_mov_i32(ret, arg1);
563     } else {
564         tcg_gen_rotli_i32(ret, arg1, 32 - arg2);
565     }
566 }
567 
568 void tcg_gen_deposit_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2,
569                          unsigned int ofs, unsigned int len)
570 {
571     uint32_t mask;
572     TCGv_i32 t1;
573 
574     tcg_debug_assert(ofs < 32);
575     tcg_debug_assert(len > 0);
576     tcg_debug_assert(len <= 32);
577     tcg_debug_assert(ofs + len <= 32);
578 
579     if (len == 32) {
580         tcg_gen_mov_i32(ret, arg2);
581         return;
582     }
583     if (TCG_TARGET_HAS_deposit_i32 && TCG_TARGET_deposit_i32_valid(ofs, len)) {
584         tcg_gen_op5ii_i32(INDEX_op_deposit_i32, ret, arg1, arg2, ofs, len);
585         return;
586     }
587 
588     t1 = tcg_temp_new_i32();
589 
590     if (TCG_TARGET_HAS_extract2_i32) {
591         if (ofs + len == 32) {
592             tcg_gen_shli_i32(t1, arg1, len);
593             tcg_gen_extract2_i32(ret, t1, arg2, len);
594             goto done;
595         }
596         if (ofs == 0) {
597             tcg_gen_extract2_i32(ret, arg1, arg2, len);
598             tcg_gen_rotli_i32(ret, ret, len);
599             goto done;
600         }
601     }
602 
603     mask = (1u << len) - 1;
604     if (ofs + len < 32) {
605         tcg_gen_andi_i32(t1, arg2, mask);
606         tcg_gen_shli_i32(t1, t1, ofs);
607     } else {
608         tcg_gen_shli_i32(t1, arg2, ofs);
609     }
610     tcg_gen_andi_i32(ret, arg1, ~(mask << ofs));
611     tcg_gen_or_i32(ret, ret, t1);
612  done:
613     tcg_temp_free_i32(t1);
614 }
615 
616 void tcg_gen_deposit_z_i32(TCGv_i32 ret, TCGv_i32 arg,
617                            unsigned int ofs, unsigned int len)
618 {
619     tcg_debug_assert(ofs < 32);
620     tcg_debug_assert(len > 0);
621     tcg_debug_assert(len <= 32);
622     tcg_debug_assert(ofs + len <= 32);
623 
624     if (ofs + len == 32) {
625         tcg_gen_shli_i32(ret, arg, ofs);
626     } else if (ofs == 0) {
627         tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
628     } else if (TCG_TARGET_HAS_deposit_i32
629                && TCG_TARGET_deposit_i32_valid(ofs, len)) {
630         TCGv_i32 zero = tcg_constant_i32(0);
631         tcg_gen_op5ii_i32(INDEX_op_deposit_i32, ret, zero, arg, ofs, len);
632     } else {
633         /* To help two-operand hosts we prefer to zero-extend first,
634            which allows ARG to stay live.  */
635         switch (len) {
636         case 16:
637             if (TCG_TARGET_HAS_ext16u_i32) {
638                 tcg_gen_ext16u_i32(ret, arg);
639                 tcg_gen_shli_i32(ret, ret, ofs);
640                 return;
641             }
642             break;
643         case 8:
644             if (TCG_TARGET_HAS_ext8u_i32) {
645                 tcg_gen_ext8u_i32(ret, arg);
646                 tcg_gen_shli_i32(ret, ret, ofs);
647                 return;
648             }
649             break;
650         }
651         /* Otherwise prefer zero-extension over AND for code size.  */
652         switch (ofs + len) {
653         case 16:
654             if (TCG_TARGET_HAS_ext16u_i32) {
655                 tcg_gen_shli_i32(ret, arg, ofs);
656                 tcg_gen_ext16u_i32(ret, ret);
657                 return;
658             }
659             break;
660         case 8:
661             if (TCG_TARGET_HAS_ext8u_i32) {
662                 tcg_gen_shli_i32(ret, arg, ofs);
663                 tcg_gen_ext8u_i32(ret, ret);
664                 return;
665             }
666             break;
667         }
668         tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
669         tcg_gen_shli_i32(ret, ret, ofs);
670     }
671 }
672 
673 void tcg_gen_extract_i32(TCGv_i32 ret, TCGv_i32 arg,
674                          unsigned int ofs, unsigned int len)
675 {
676     tcg_debug_assert(ofs < 32);
677     tcg_debug_assert(len > 0);
678     tcg_debug_assert(len <= 32);
679     tcg_debug_assert(ofs + len <= 32);
680 
681     /* Canonicalize certain special cases, even if extract is supported.  */
682     if (ofs + len == 32) {
683         tcg_gen_shri_i32(ret, arg, 32 - len);
684         return;
685     }
686     if (ofs == 0) {
687         tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
688         return;
689     }
690 
691     if (TCG_TARGET_HAS_extract_i32
692         && TCG_TARGET_extract_i32_valid(ofs, len)) {
693         tcg_gen_op4ii_i32(INDEX_op_extract_i32, ret, arg, ofs, len);
694         return;
695     }
696 
697     /* Assume that zero-extension, if available, is cheaper than a shift.  */
698     switch (ofs + len) {
699     case 16:
700         if (TCG_TARGET_HAS_ext16u_i32) {
701             tcg_gen_ext16u_i32(ret, arg);
702             tcg_gen_shri_i32(ret, ret, ofs);
703             return;
704         }
705         break;
706     case 8:
707         if (TCG_TARGET_HAS_ext8u_i32) {
708             tcg_gen_ext8u_i32(ret, arg);
709             tcg_gen_shri_i32(ret, ret, ofs);
710             return;
711         }
712         break;
713     }
714 
715     /* ??? Ideally we'd know what values are available for immediate AND.
716        Assume that 8 bits are available, plus the special case of 16,
717        so that we get ext8u, ext16u.  */
718     switch (len) {
719     case 1 ... 8: case 16:
720         tcg_gen_shri_i32(ret, arg, ofs);
721         tcg_gen_andi_i32(ret, ret, (1u << len) - 1);
722         break;
723     default:
724         tcg_gen_shli_i32(ret, arg, 32 - len - ofs);
725         tcg_gen_shri_i32(ret, ret, 32 - len);
726         break;
727     }
728 }
729 
730 void tcg_gen_sextract_i32(TCGv_i32 ret, TCGv_i32 arg,
731                           unsigned int ofs, unsigned int len)
732 {
733     tcg_debug_assert(ofs < 32);
734     tcg_debug_assert(len > 0);
735     tcg_debug_assert(len <= 32);
736     tcg_debug_assert(ofs + len <= 32);
737 
738     /* Canonicalize certain special cases, even if extract is supported.  */
739     if (ofs + len == 32) {
740         tcg_gen_sari_i32(ret, arg, 32 - len);
741         return;
742     }
743     if (ofs == 0) {
744         switch (len) {
745         case 16:
746             tcg_gen_ext16s_i32(ret, arg);
747             return;
748         case 8:
749             tcg_gen_ext8s_i32(ret, arg);
750             return;
751         }
752     }
753 
754     if (TCG_TARGET_HAS_sextract_i32
755         && TCG_TARGET_extract_i32_valid(ofs, len)) {
756         tcg_gen_op4ii_i32(INDEX_op_sextract_i32, ret, arg, ofs, len);
757         return;
758     }
759 
760     /* Assume that sign-extension, if available, is cheaper than a shift.  */
761     switch (ofs + len) {
762     case 16:
763         if (TCG_TARGET_HAS_ext16s_i32) {
764             tcg_gen_ext16s_i32(ret, arg);
765             tcg_gen_sari_i32(ret, ret, ofs);
766             return;
767         }
768         break;
769     case 8:
770         if (TCG_TARGET_HAS_ext8s_i32) {
771             tcg_gen_ext8s_i32(ret, arg);
772             tcg_gen_sari_i32(ret, ret, ofs);
773             return;
774         }
775         break;
776     }
777     switch (len) {
778     case 16:
779         if (TCG_TARGET_HAS_ext16s_i32) {
780             tcg_gen_shri_i32(ret, arg, ofs);
781             tcg_gen_ext16s_i32(ret, ret);
782             return;
783         }
784         break;
785     case 8:
786         if (TCG_TARGET_HAS_ext8s_i32) {
787             tcg_gen_shri_i32(ret, arg, ofs);
788             tcg_gen_ext8s_i32(ret, ret);
789             return;
790         }
791         break;
792     }
793 
794     tcg_gen_shli_i32(ret, arg, 32 - len - ofs);
795     tcg_gen_sari_i32(ret, ret, 32 - len);
796 }
797 
798 /*
799  * Extract 32-bits from a 64-bit input, ah:al, starting from ofs.
800  * Unlike tcg_gen_extract_i32 above, len is fixed at 32.
801  */
802 void tcg_gen_extract2_i32(TCGv_i32 ret, TCGv_i32 al, TCGv_i32 ah,
803                           unsigned int ofs)
804 {
805     tcg_debug_assert(ofs <= 32);
806     if (ofs == 0) {
807         tcg_gen_mov_i32(ret, al);
808     } else if (ofs == 32) {
809         tcg_gen_mov_i32(ret, ah);
810     } else if (al == ah) {
811         tcg_gen_rotri_i32(ret, al, ofs);
812     } else if (TCG_TARGET_HAS_extract2_i32) {
813         tcg_gen_op4i_i32(INDEX_op_extract2_i32, ret, al, ah, ofs);
814     } else {
815         TCGv_i32 t0 = tcg_temp_new_i32();
816         tcg_gen_shri_i32(t0, al, ofs);
817         tcg_gen_deposit_i32(ret, t0, ah, 32 - ofs, ofs);
818         tcg_temp_free_i32(t0);
819     }
820 }
821 
822 void tcg_gen_movcond_i32(TCGCond cond, TCGv_i32 ret, TCGv_i32 c1,
823                          TCGv_i32 c2, TCGv_i32 v1, TCGv_i32 v2)
824 {
825     if (cond == TCG_COND_ALWAYS) {
826         tcg_gen_mov_i32(ret, v1);
827     } else if (cond == TCG_COND_NEVER) {
828         tcg_gen_mov_i32(ret, v2);
829     } else if (TCG_TARGET_HAS_movcond_i32) {
830         tcg_gen_op6i_i32(INDEX_op_movcond_i32, ret, c1, c2, v1, v2, cond);
831     } else {
832         TCGv_i32 t0 = tcg_temp_new_i32();
833         TCGv_i32 t1 = tcg_temp_new_i32();
834         tcg_gen_setcond_i32(cond, t0, c1, c2);
835         tcg_gen_neg_i32(t0, t0);
836         tcg_gen_and_i32(t1, v1, t0);
837         tcg_gen_andc_i32(ret, v2, t0);
838         tcg_gen_or_i32(ret, ret, t1);
839         tcg_temp_free_i32(t0);
840         tcg_temp_free_i32(t1);
841     }
842 }
843 
844 void tcg_gen_add2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
845                       TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
846 {
847     if (TCG_TARGET_HAS_add2_i32) {
848         tcg_gen_op6_i32(INDEX_op_add2_i32, rl, rh, al, ah, bl, bh);
849     } else {
850         TCGv_i64 t0 = tcg_temp_new_i64();
851         TCGv_i64 t1 = tcg_temp_new_i64();
852         tcg_gen_concat_i32_i64(t0, al, ah);
853         tcg_gen_concat_i32_i64(t1, bl, bh);
854         tcg_gen_add_i64(t0, t0, t1);
855         tcg_gen_extr_i64_i32(rl, rh, t0);
856         tcg_temp_free_i64(t0);
857         tcg_temp_free_i64(t1);
858     }
859 }
860 
861 void tcg_gen_sub2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
862                       TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
863 {
864     if (TCG_TARGET_HAS_sub2_i32) {
865         tcg_gen_op6_i32(INDEX_op_sub2_i32, rl, rh, al, ah, bl, bh);
866     } else {
867         TCGv_i64 t0 = tcg_temp_new_i64();
868         TCGv_i64 t1 = tcg_temp_new_i64();
869         tcg_gen_concat_i32_i64(t0, al, ah);
870         tcg_gen_concat_i32_i64(t1, bl, bh);
871         tcg_gen_sub_i64(t0, t0, t1);
872         tcg_gen_extr_i64_i32(rl, rh, t0);
873         tcg_temp_free_i64(t0);
874         tcg_temp_free_i64(t1);
875     }
876 }
877 
878 void tcg_gen_mulu2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
879 {
880     if (TCG_TARGET_HAS_mulu2_i32) {
881         tcg_gen_op4_i32(INDEX_op_mulu2_i32, rl, rh, arg1, arg2);
882     } else if (TCG_TARGET_HAS_muluh_i32) {
883         TCGv_i32 t = tcg_temp_new_i32();
884         tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
885         tcg_gen_op3_i32(INDEX_op_muluh_i32, rh, arg1, arg2);
886         tcg_gen_mov_i32(rl, t);
887         tcg_temp_free_i32(t);
888     } else {
889         TCGv_i64 t0 = tcg_temp_new_i64();
890         TCGv_i64 t1 = tcg_temp_new_i64();
891         tcg_gen_extu_i32_i64(t0, arg1);
892         tcg_gen_extu_i32_i64(t1, arg2);
893         tcg_gen_mul_i64(t0, t0, t1);
894         tcg_gen_extr_i64_i32(rl, rh, t0);
895         tcg_temp_free_i64(t0);
896         tcg_temp_free_i64(t1);
897     }
898 }
899 
900 void tcg_gen_muls2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
901 {
902     if (TCG_TARGET_HAS_muls2_i32) {
903         tcg_gen_op4_i32(INDEX_op_muls2_i32, rl, rh, arg1, arg2);
904     } else if (TCG_TARGET_HAS_mulsh_i32) {
905         TCGv_i32 t = tcg_temp_new_i32();
906         tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
907         tcg_gen_op3_i32(INDEX_op_mulsh_i32, rh, arg1, arg2);
908         tcg_gen_mov_i32(rl, t);
909         tcg_temp_free_i32(t);
910     } else if (TCG_TARGET_REG_BITS == 32) {
911         TCGv_i32 t0 = tcg_temp_new_i32();
912         TCGv_i32 t1 = tcg_temp_new_i32();
913         TCGv_i32 t2 = tcg_temp_new_i32();
914         TCGv_i32 t3 = tcg_temp_new_i32();
915         tcg_gen_mulu2_i32(t0, t1, arg1, arg2);
916         /* Adjust for negative inputs.  */
917         tcg_gen_sari_i32(t2, arg1, 31);
918         tcg_gen_sari_i32(t3, arg2, 31);
919         tcg_gen_and_i32(t2, t2, arg2);
920         tcg_gen_and_i32(t3, t3, arg1);
921         tcg_gen_sub_i32(rh, t1, t2);
922         tcg_gen_sub_i32(rh, rh, t3);
923         tcg_gen_mov_i32(rl, t0);
924         tcg_temp_free_i32(t0);
925         tcg_temp_free_i32(t1);
926         tcg_temp_free_i32(t2);
927         tcg_temp_free_i32(t3);
928     } else {
929         TCGv_i64 t0 = tcg_temp_new_i64();
930         TCGv_i64 t1 = tcg_temp_new_i64();
931         tcg_gen_ext_i32_i64(t0, arg1);
932         tcg_gen_ext_i32_i64(t1, arg2);
933         tcg_gen_mul_i64(t0, t0, t1);
934         tcg_gen_extr_i64_i32(rl, rh, t0);
935         tcg_temp_free_i64(t0);
936         tcg_temp_free_i64(t1);
937     }
938 }
939 
940 void tcg_gen_mulsu2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
941 {
942     if (TCG_TARGET_REG_BITS == 32) {
943         TCGv_i32 t0 = tcg_temp_new_i32();
944         TCGv_i32 t1 = tcg_temp_new_i32();
945         TCGv_i32 t2 = tcg_temp_new_i32();
946         tcg_gen_mulu2_i32(t0, t1, arg1, arg2);
947         /* Adjust for negative input for the signed arg1.  */
948         tcg_gen_sari_i32(t2, arg1, 31);
949         tcg_gen_and_i32(t2, t2, arg2);
950         tcg_gen_sub_i32(rh, t1, t2);
951         tcg_gen_mov_i32(rl, t0);
952         tcg_temp_free_i32(t0);
953         tcg_temp_free_i32(t1);
954         tcg_temp_free_i32(t2);
955     } else {
956         TCGv_i64 t0 = tcg_temp_new_i64();
957         TCGv_i64 t1 = tcg_temp_new_i64();
958         tcg_gen_ext_i32_i64(t0, arg1);
959         tcg_gen_extu_i32_i64(t1, arg2);
960         tcg_gen_mul_i64(t0, t0, t1);
961         tcg_gen_extr_i64_i32(rl, rh, t0);
962         tcg_temp_free_i64(t0);
963         tcg_temp_free_i64(t1);
964     }
965 }
966 
967 void tcg_gen_ext8s_i32(TCGv_i32 ret, TCGv_i32 arg)
968 {
969     if (TCG_TARGET_HAS_ext8s_i32) {
970         tcg_gen_op2_i32(INDEX_op_ext8s_i32, ret, arg);
971     } else {
972         tcg_gen_shli_i32(ret, arg, 24);
973         tcg_gen_sari_i32(ret, ret, 24);
974     }
975 }
976 
977 void tcg_gen_ext16s_i32(TCGv_i32 ret, TCGv_i32 arg)
978 {
979     if (TCG_TARGET_HAS_ext16s_i32) {
980         tcg_gen_op2_i32(INDEX_op_ext16s_i32, ret, arg);
981     } else {
982         tcg_gen_shli_i32(ret, arg, 16);
983         tcg_gen_sari_i32(ret, ret, 16);
984     }
985 }
986 
987 void tcg_gen_ext8u_i32(TCGv_i32 ret, TCGv_i32 arg)
988 {
989     if (TCG_TARGET_HAS_ext8u_i32) {
990         tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg);
991     } else {
992         tcg_gen_andi_i32(ret, arg, 0xffu);
993     }
994 }
995 
996 void tcg_gen_ext16u_i32(TCGv_i32 ret, TCGv_i32 arg)
997 {
998     if (TCG_TARGET_HAS_ext16u_i32) {
999         tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg);
1000     } else {
1001         tcg_gen_andi_i32(ret, arg, 0xffffu);
1002     }
1003 }
1004 
1005 /* Note: we assume the two high bytes are set to zero */
1006 void tcg_gen_bswap16_i32(TCGv_i32 ret, TCGv_i32 arg)
1007 {
1008     if (TCG_TARGET_HAS_bswap16_i32) {
1009         tcg_gen_op2_i32(INDEX_op_bswap16_i32, ret, arg);
1010     } else {
1011         TCGv_i32 t0 = tcg_temp_new_i32();
1012 
1013         tcg_gen_ext8u_i32(t0, arg);
1014         tcg_gen_shli_i32(t0, t0, 8);
1015         tcg_gen_shri_i32(ret, arg, 8);
1016         tcg_gen_or_i32(ret, ret, t0);
1017         tcg_temp_free_i32(t0);
1018     }
1019 }
1020 
1021 void tcg_gen_bswap32_i32(TCGv_i32 ret, TCGv_i32 arg)
1022 {
1023     if (TCG_TARGET_HAS_bswap32_i32) {
1024         tcg_gen_op2_i32(INDEX_op_bswap32_i32, ret, arg);
1025     } else {
1026         TCGv_i32 t0 = tcg_temp_new_i32();
1027         TCGv_i32 t1 = tcg_temp_new_i32();
1028         TCGv_i32 t2 = tcg_constant_i32(0x00ff00ff);
1029 
1030                                         /* arg = abcd */
1031         tcg_gen_shri_i32(t0, arg, 8);   /*  t0 = .abc */
1032         tcg_gen_and_i32(t1, arg, t2);   /*  t1 = .b.d */
1033         tcg_gen_and_i32(t0, t0, t2);    /*  t0 = .a.c */
1034         tcg_gen_shli_i32(t1, t1, 8);    /*  t1 = b.d. */
1035         tcg_gen_or_i32(ret, t0, t1);    /* ret = badc */
1036 
1037         tcg_gen_shri_i32(t0, ret, 16);  /*  t0 = ..ba */
1038         tcg_gen_shli_i32(t1, ret, 16);  /*  t1 = dc.. */
1039         tcg_gen_or_i32(ret, t0, t1);    /* ret = dcba */
1040 
1041         tcg_temp_free_i32(t0);
1042         tcg_temp_free_i32(t1);
1043     }
1044 }
1045 
1046 void tcg_gen_smin_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1047 {
1048     tcg_gen_movcond_i32(TCG_COND_LT, ret, a, b, a, b);
1049 }
1050 
1051 void tcg_gen_umin_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1052 {
1053     tcg_gen_movcond_i32(TCG_COND_LTU, ret, a, b, a, b);
1054 }
1055 
1056 void tcg_gen_smax_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1057 {
1058     tcg_gen_movcond_i32(TCG_COND_LT, ret, a, b, b, a);
1059 }
1060 
1061 void tcg_gen_umax_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1062 {
1063     tcg_gen_movcond_i32(TCG_COND_LTU, ret, a, b, b, a);
1064 }
1065 
1066 void tcg_gen_abs_i32(TCGv_i32 ret, TCGv_i32 a)
1067 {
1068     TCGv_i32 t = tcg_temp_new_i32();
1069 
1070     tcg_gen_sari_i32(t, a, 31);
1071     tcg_gen_xor_i32(ret, a, t);
1072     tcg_gen_sub_i32(ret, ret, t);
1073     tcg_temp_free_i32(t);
1074 }
1075 
1076 /* 64-bit ops */
1077 
1078 #if TCG_TARGET_REG_BITS == 32
1079 /* These are all inline for TCG_TARGET_REG_BITS == 64.  */
1080 
1081 void tcg_gen_discard_i64(TCGv_i64 arg)
1082 {
1083     tcg_gen_discard_i32(TCGV_LOW(arg));
1084     tcg_gen_discard_i32(TCGV_HIGH(arg));
1085 }
1086 
1087 void tcg_gen_mov_i64(TCGv_i64 ret, TCGv_i64 arg)
1088 {
1089     TCGTemp *ts = tcgv_i64_temp(arg);
1090 
1091     /* Canonicalize TCGv_i64 TEMP_CONST into TCGv_i32 TEMP_CONST. */
1092     if (ts->kind == TEMP_CONST) {
1093         tcg_gen_movi_i64(ret, ts->val);
1094     } else {
1095         tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1096         tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
1097     }
1098 }
1099 
1100 void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg)
1101 {
1102     tcg_gen_movi_i32(TCGV_LOW(ret), arg);
1103     tcg_gen_movi_i32(TCGV_HIGH(ret), arg >> 32);
1104 }
1105 
1106 void tcg_gen_ld8u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1107 {
1108     tcg_gen_ld8u_i32(TCGV_LOW(ret), arg2, offset);
1109     tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1110 }
1111 
1112 void tcg_gen_ld8s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1113 {
1114     tcg_gen_ld8s_i32(TCGV_LOW(ret), arg2, offset);
1115     tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1116 }
1117 
1118 void tcg_gen_ld16u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1119 {
1120     tcg_gen_ld16u_i32(TCGV_LOW(ret), arg2, offset);
1121     tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1122 }
1123 
1124 void tcg_gen_ld16s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1125 {
1126     tcg_gen_ld16s_i32(TCGV_LOW(ret), arg2, offset);
1127     tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1128 }
1129 
1130 void tcg_gen_ld32u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1131 {
1132     tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
1133     tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1134 }
1135 
1136 void tcg_gen_ld32s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1137 {
1138     tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
1139     tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1140 }
1141 
1142 void tcg_gen_ld_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1143 {
1144     /* Since arg2 and ret have different types,
1145        they cannot be the same temporary */
1146 #ifdef HOST_WORDS_BIGENDIAN
1147     tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset);
1148     tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset + 4);
1149 #else
1150     tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
1151     tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset + 4);
1152 #endif
1153 }
1154 
1155 void tcg_gen_st_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
1156 {
1157 #ifdef HOST_WORDS_BIGENDIAN
1158     tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset);
1159     tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset + 4);
1160 #else
1161     tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset);
1162     tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset + 4);
1163 #endif
1164 }
1165 
1166 void tcg_gen_and_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1167 {
1168     tcg_gen_and_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1169     tcg_gen_and_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1170 }
1171 
1172 void tcg_gen_or_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1173 {
1174     tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1175     tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1176 }
1177 
1178 void tcg_gen_xor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1179 {
1180     tcg_gen_xor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1181     tcg_gen_xor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1182 }
1183 
1184 void tcg_gen_shl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1185 {
1186     gen_helper_shl_i64(ret, arg1, arg2);
1187 }
1188 
1189 void tcg_gen_shr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1190 {
1191     gen_helper_shr_i64(ret, arg1, arg2);
1192 }
1193 
1194 void tcg_gen_sar_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1195 {
1196     gen_helper_sar_i64(ret, arg1, arg2);
1197 }
1198 
1199 void tcg_gen_mul_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1200 {
1201     TCGv_i64 t0;
1202     TCGv_i32 t1;
1203 
1204     t0 = tcg_temp_new_i64();
1205     t1 = tcg_temp_new_i32();
1206 
1207     tcg_gen_mulu2_i32(TCGV_LOW(t0), TCGV_HIGH(t0),
1208                       TCGV_LOW(arg1), TCGV_LOW(arg2));
1209 
1210     tcg_gen_mul_i32(t1, TCGV_LOW(arg1), TCGV_HIGH(arg2));
1211     tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
1212     tcg_gen_mul_i32(t1, TCGV_HIGH(arg1), TCGV_LOW(arg2));
1213     tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
1214 
1215     tcg_gen_mov_i64(ret, t0);
1216     tcg_temp_free_i64(t0);
1217     tcg_temp_free_i32(t1);
1218 }
1219 
1220 #else
1221 
1222 void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg)
1223 {
1224     tcg_gen_mov_i64(ret, tcg_constant_i64(arg));
1225 }
1226 
1227 #endif /* TCG_TARGET_REG_SIZE == 32 */
1228 
1229 void tcg_gen_addi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1230 {
1231     /* some cases can be optimized here */
1232     if (arg2 == 0) {
1233         tcg_gen_mov_i64(ret, arg1);
1234     } else if (TCG_TARGET_REG_BITS == 64) {
1235         tcg_gen_add_i64(ret, arg1, tcg_constant_i64(arg2));
1236     } else {
1237         tcg_gen_add2_i32(TCGV_LOW(ret), TCGV_HIGH(ret),
1238                          TCGV_LOW(arg1), TCGV_HIGH(arg1),
1239                          tcg_constant_i32(arg2), tcg_constant_i32(arg2 >> 32));
1240     }
1241 }
1242 
1243 void tcg_gen_subfi_i64(TCGv_i64 ret, int64_t arg1, TCGv_i64 arg2)
1244 {
1245     if (arg1 == 0 && TCG_TARGET_HAS_neg_i64) {
1246         /* Don't recurse with tcg_gen_neg_i64.  */
1247         tcg_gen_op2_i64(INDEX_op_neg_i64, ret, arg2);
1248     } else if (TCG_TARGET_REG_BITS == 64) {
1249         tcg_gen_sub_i64(ret, tcg_constant_i64(arg1), arg2);
1250     } else {
1251         tcg_gen_sub2_i32(TCGV_LOW(ret), TCGV_HIGH(ret),
1252                          tcg_constant_i32(arg1), tcg_constant_i32(arg1 >> 32),
1253                          TCGV_LOW(arg2), TCGV_HIGH(arg2));
1254     }
1255 }
1256 
1257 void tcg_gen_subi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1258 {
1259     /* some cases can be optimized here */
1260     if (arg2 == 0) {
1261         tcg_gen_mov_i64(ret, arg1);
1262     } else if (TCG_TARGET_REG_BITS == 64) {
1263         tcg_gen_sub_i64(ret, arg1, tcg_constant_i64(arg2));
1264     } else {
1265         tcg_gen_sub2_i32(TCGV_LOW(ret), TCGV_HIGH(ret),
1266                          TCGV_LOW(arg1), TCGV_HIGH(arg1),
1267                          tcg_constant_i32(arg2), tcg_constant_i32(arg2 >> 32));
1268     }
1269 }
1270 
1271 void tcg_gen_andi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1272 {
1273     if (TCG_TARGET_REG_BITS == 32) {
1274         tcg_gen_andi_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1275         tcg_gen_andi_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1276         return;
1277     }
1278 
1279     /* Some cases can be optimized here.  */
1280     switch (arg2) {
1281     case 0:
1282         tcg_gen_movi_i64(ret, 0);
1283         return;
1284     case -1:
1285         tcg_gen_mov_i64(ret, arg1);
1286         return;
1287     case 0xff:
1288         /* Don't recurse with tcg_gen_ext8u_i64.  */
1289         if (TCG_TARGET_HAS_ext8u_i64) {
1290             tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg1);
1291             return;
1292         }
1293         break;
1294     case 0xffff:
1295         if (TCG_TARGET_HAS_ext16u_i64) {
1296             tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg1);
1297             return;
1298         }
1299         break;
1300     case 0xffffffffu:
1301         if (TCG_TARGET_HAS_ext32u_i64) {
1302             tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg1);
1303             return;
1304         }
1305         break;
1306     }
1307 
1308     tcg_gen_and_i64(ret, arg1, tcg_constant_i64(arg2));
1309 }
1310 
1311 void tcg_gen_ori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1312 {
1313     if (TCG_TARGET_REG_BITS == 32) {
1314         tcg_gen_ori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1315         tcg_gen_ori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1316         return;
1317     }
1318     /* Some cases can be optimized here.  */
1319     if (arg2 == -1) {
1320         tcg_gen_movi_i64(ret, -1);
1321     } else if (arg2 == 0) {
1322         tcg_gen_mov_i64(ret, arg1);
1323     } else {
1324         tcg_gen_or_i64(ret, arg1, tcg_constant_i64(arg2));
1325     }
1326 }
1327 
1328 void tcg_gen_xori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1329 {
1330     if (TCG_TARGET_REG_BITS == 32) {
1331         tcg_gen_xori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1332         tcg_gen_xori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1333         return;
1334     }
1335     /* Some cases can be optimized here.  */
1336     if (arg2 == 0) {
1337         tcg_gen_mov_i64(ret, arg1);
1338     } else if (arg2 == -1 && TCG_TARGET_HAS_not_i64) {
1339         /* Don't recurse with tcg_gen_not_i64.  */
1340         tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg1);
1341     } else {
1342         tcg_gen_xor_i64(ret, arg1, tcg_constant_i64(arg2));
1343     }
1344 }
1345 
1346 static inline void tcg_gen_shifti_i64(TCGv_i64 ret, TCGv_i64 arg1,
1347                                       unsigned c, bool right, bool arith)
1348 {
1349     tcg_debug_assert(c < 64);
1350     if (c == 0) {
1351         tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
1352         tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
1353     } else if (c >= 32) {
1354         c -= 32;
1355         if (right) {
1356             if (arith) {
1357                 tcg_gen_sari_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
1358                 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), 31);
1359             } else {
1360                 tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
1361                 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1362             }
1363         } else {
1364             tcg_gen_shli_i32(TCGV_HIGH(ret), TCGV_LOW(arg1), c);
1365             tcg_gen_movi_i32(TCGV_LOW(ret), 0);
1366         }
1367     } else if (right) {
1368         if (TCG_TARGET_HAS_extract2_i32) {
1369             tcg_gen_extract2_i32(TCGV_LOW(ret),
1370                                  TCGV_LOW(arg1), TCGV_HIGH(arg1), c);
1371         } else {
1372             tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_LOW(arg1), c);
1373             tcg_gen_deposit_i32(TCGV_LOW(ret), TCGV_LOW(ret),
1374                                 TCGV_HIGH(arg1), 32 - c, c);
1375         }
1376         if (arith) {
1377             tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), c);
1378         } else {
1379             tcg_gen_shri_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), c);
1380         }
1381     } else {
1382         if (TCG_TARGET_HAS_extract2_i32) {
1383             tcg_gen_extract2_i32(TCGV_HIGH(ret),
1384                                  TCGV_LOW(arg1), TCGV_HIGH(arg1), 32 - c);
1385         } else {
1386             TCGv_i32 t0 = tcg_temp_new_i32();
1387             tcg_gen_shri_i32(t0, TCGV_LOW(arg1), 32 - c);
1388             tcg_gen_deposit_i32(TCGV_HIGH(ret), t0,
1389                                 TCGV_HIGH(arg1), c, 32 - c);
1390             tcg_temp_free_i32(t0);
1391         }
1392         tcg_gen_shli_i32(TCGV_LOW(ret), TCGV_LOW(arg1), c);
1393     }
1394 }
1395 
1396 void tcg_gen_shli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1397 {
1398     tcg_debug_assert(arg2 >= 0 && arg2 < 64);
1399     if (TCG_TARGET_REG_BITS == 32) {
1400         tcg_gen_shifti_i64(ret, arg1, arg2, 0, 0);
1401     } else if (arg2 == 0) {
1402         tcg_gen_mov_i64(ret, arg1);
1403     } else {
1404         tcg_gen_shl_i64(ret, arg1, tcg_constant_i64(arg2));
1405     }
1406 }
1407 
1408 void tcg_gen_shri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1409 {
1410     tcg_debug_assert(arg2 >= 0 && arg2 < 64);
1411     if (TCG_TARGET_REG_BITS == 32) {
1412         tcg_gen_shifti_i64(ret, arg1, arg2, 1, 0);
1413     } else if (arg2 == 0) {
1414         tcg_gen_mov_i64(ret, arg1);
1415     } else {
1416         tcg_gen_shr_i64(ret, arg1, tcg_constant_i64(arg2));
1417     }
1418 }
1419 
1420 void tcg_gen_sari_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1421 {
1422     tcg_debug_assert(arg2 >= 0 && arg2 < 64);
1423     if (TCG_TARGET_REG_BITS == 32) {
1424         tcg_gen_shifti_i64(ret, arg1, arg2, 1, 1);
1425     } else if (arg2 == 0) {
1426         tcg_gen_mov_i64(ret, arg1);
1427     } else {
1428         tcg_gen_sar_i64(ret, arg1, tcg_constant_i64(arg2));
1429     }
1430 }
1431 
1432 void tcg_gen_brcond_i64(TCGCond cond, TCGv_i64 arg1, TCGv_i64 arg2, TCGLabel *l)
1433 {
1434     if (cond == TCG_COND_ALWAYS) {
1435         tcg_gen_br(l);
1436     } else if (cond != TCG_COND_NEVER) {
1437         l->refs++;
1438         if (TCG_TARGET_REG_BITS == 32) {
1439             tcg_gen_op6ii_i32(INDEX_op_brcond2_i32, TCGV_LOW(arg1),
1440                               TCGV_HIGH(arg1), TCGV_LOW(arg2),
1441                               TCGV_HIGH(arg2), cond, label_arg(l));
1442         } else {
1443             tcg_gen_op4ii_i64(INDEX_op_brcond_i64, arg1, arg2, cond,
1444                               label_arg(l));
1445         }
1446     }
1447 }
1448 
1449 void tcg_gen_brcondi_i64(TCGCond cond, TCGv_i64 arg1, int64_t arg2, TCGLabel *l)
1450 {
1451     if (TCG_TARGET_REG_BITS == 64) {
1452         tcg_gen_brcond_i64(cond, arg1, tcg_constant_i64(arg2), l);
1453     } else if (cond == TCG_COND_ALWAYS) {
1454         tcg_gen_br(l);
1455     } else if (cond != TCG_COND_NEVER) {
1456         l->refs++;
1457         tcg_gen_op6ii_i32(INDEX_op_brcond2_i32,
1458                           TCGV_LOW(arg1), TCGV_HIGH(arg1),
1459                           tcg_constant_i32(arg2),
1460                           tcg_constant_i32(arg2 >> 32),
1461                           cond, label_arg(l));
1462     }
1463 }
1464 
1465 void tcg_gen_setcond_i64(TCGCond cond, TCGv_i64 ret,
1466                          TCGv_i64 arg1, TCGv_i64 arg2)
1467 {
1468     if (cond == TCG_COND_ALWAYS) {
1469         tcg_gen_movi_i64(ret, 1);
1470     } else if (cond == TCG_COND_NEVER) {
1471         tcg_gen_movi_i64(ret, 0);
1472     } else {
1473         if (TCG_TARGET_REG_BITS == 32) {
1474             tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
1475                              TCGV_LOW(arg1), TCGV_HIGH(arg1),
1476                              TCGV_LOW(arg2), TCGV_HIGH(arg2), cond);
1477             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1478         } else {
1479             tcg_gen_op4i_i64(INDEX_op_setcond_i64, ret, arg1, arg2, cond);
1480         }
1481     }
1482 }
1483 
1484 void tcg_gen_setcondi_i64(TCGCond cond, TCGv_i64 ret,
1485                           TCGv_i64 arg1, int64_t arg2)
1486 {
1487     if (TCG_TARGET_REG_BITS == 64) {
1488         tcg_gen_setcond_i64(cond, ret, arg1, tcg_constant_i64(arg2));
1489     } else if (cond == TCG_COND_ALWAYS) {
1490         tcg_gen_movi_i64(ret, 1);
1491     } else if (cond == TCG_COND_NEVER) {
1492         tcg_gen_movi_i64(ret, 0);
1493     } else {
1494         tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
1495                          TCGV_LOW(arg1), TCGV_HIGH(arg1),
1496                          tcg_constant_i32(arg2),
1497                          tcg_constant_i32(arg2 >> 32), cond);
1498         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1499     }
1500 }
1501 
1502 void tcg_gen_muli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1503 {
1504     if (arg2 == 0) {
1505         tcg_gen_movi_i64(ret, 0);
1506     } else if (is_power_of_2(arg2)) {
1507         tcg_gen_shli_i64(ret, arg1, ctz64(arg2));
1508     } else {
1509         TCGv_i64 t0 = tcg_const_i64(arg2);
1510         tcg_gen_mul_i64(ret, arg1, t0);
1511         tcg_temp_free_i64(t0);
1512     }
1513 }
1514 
1515 void tcg_gen_div_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1516 {
1517     if (TCG_TARGET_HAS_div_i64) {
1518         tcg_gen_op3_i64(INDEX_op_div_i64, ret, arg1, arg2);
1519     } else if (TCG_TARGET_HAS_div2_i64) {
1520         TCGv_i64 t0 = tcg_temp_new_i64();
1521         tcg_gen_sari_i64(t0, arg1, 63);
1522         tcg_gen_op5_i64(INDEX_op_div2_i64, ret, t0, arg1, t0, arg2);
1523         tcg_temp_free_i64(t0);
1524     } else {
1525         gen_helper_div_i64(ret, arg1, arg2);
1526     }
1527 }
1528 
1529 void tcg_gen_rem_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1530 {
1531     if (TCG_TARGET_HAS_rem_i64) {
1532         tcg_gen_op3_i64(INDEX_op_rem_i64, ret, arg1, arg2);
1533     } else if (TCG_TARGET_HAS_div_i64) {
1534         TCGv_i64 t0 = tcg_temp_new_i64();
1535         tcg_gen_op3_i64(INDEX_op_div_i64, t0, arg1, arg2);
1536         tcg_gen_mul_i64(t0, t0, arg2);
1537         tcg_gen_sub_i64(ret, arg1, t0);
1538         tcg_temp_free_i64(t0);
1539     } else if (TCG_TARGET_HAS_div2_i64) {
1540         TCGv_i64 t0 = tcg_temp_new_i64();
1541         tcg_gen_sari_i64(t0, arg1, 63);
1542         tcg_gen_op5_i64(INDEX_op_div2_i64, t0, ret, arg1, t0, arg2);
1543         tcg_temp_free_i64(t0);
1544     } else {
1545         gen_helper_rem_i64(ret, arg1, arg2);
1546     }
1547 }
1548 
1549 void tcg_gen_divu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1550 {
1551     if (TCG_TARGET_HAS_div_i64) {
1552         tcg_gen_op3_i64(INDEX_op_divu_i64, ret, arg1, arg2);
1553     } else if (TCG_TARGET_HAS_div2_i64) {
1554         TCGv_i64 t0 = tcg_temp_new_i64();
1555         tcg_gen_movi_i64(t0, 0);
1556         tcg_gen_op5_i64(INDEX_op_divu2_i64, ret, t0, arg1, t0, arg2);
1557         tcg_temp_free_i64(t0);
1558     } else {
1559         gen_helper_divu_i64(ret, arg1, arg2);
1560     }
1561 }
1562 
1563 void tcg_gen_remu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1564 {
1565     if (TCG_TARGET_HAS_rem_i64) {
1566         tcg_gen_op3_i64(INDEX_op_remu_i64, ret, arg1, arg2);
1567     } else if (TCG_TARGET_HAS_div_i64) {
1568         TCGv_i64 t0 = tcg_temp_new_i64();
1569         tcg_gen_op3_i64(INDEX_op_divu_i64, t0, arg1, arg2);
1570         tcg_gen_mul_i64(t0, t0, arg2);
1571         tcg_gen_sub_i64(ret, arg1, t0);
1572         tcg_temp_free_i64(t0);
1573     } else if (TCG_TARGET_HAS_div2_i64) {
1574         TCGv_i64 t0 = tcg_temp_new_i64();
1575         tcg_gen_movi_i64(t0, 0);
1576         tcg_gen_op5_i64(INDEX_op_divu2_i64, t0, ret, arg1, t0, arg2);
1577         tcg_temp_free_i64(t0);
1578     } else {
1579         gen_helper_remu_i64(ret, arg1, arg2);
1580     }
1581 }
1582 
1583 void tcg_gen_ext8s_i64(TCGv_i64 ret, TCGv_i64 arg)
1584 {
1585     if (TCG_TARGET_REG_BITS == 32) {
1586         tcg_gen_ext8s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1587         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1588     } else if (TCG_TARGET_HAS_ext8s_i64) {
1589         tcg_gen_op2_i64(INDEX_op_ext8s_i64, ret, arg);
1590     } else {
1591         tcg_gen_shli_i64(ret, arg, 56);
1592         tcg_gen_sari_i64(ret, ret, 56);
1593     }
1594 }
1595 
1596 void tcg_gen_ext16s_i64(TCGv_i64 ret, TCGv_i64 arg)
1597 {
1598     if (TCG_TARGET_REG_BITS == 32) {
1599         tcg_gen_ext16s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1600         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1601     } else if (TCG_TARGET_HAS_ext16s_i64) {
1602         tcg_gen_op2_i64(INDEX_op_ext16s_i64, ret, arg);
1603     } else {
1604         tcg_gen_shli_i64(ret, arg, 48);
1605         tcg_gen_sari_i64(ret, ret, 48);
1606     }
1607 }
1608 
1609 void tcg_gen_ext32s_i64(TCGv_i64 ret, TCGv_i64 arg)
1610 {
1611     if (TCG_TARGET_REG_BITS == 32) {
1612         tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1613         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1614     } else if (TCG_TARGET_HAS_ext32s_i64) {
1615         tcg_gen_op2_i64(INDEX_op_ext32s_i64, ret, arg);
1616     } else {
1617         tcg_gen_shli_i64(ret, arg, 32);
1618         tcg_gen_sari_i64(ret, ret, 32);
1619     }
1620 }
1621 
1622 void tcg_gen_ext8u_i64(TCGv_i64 ret, TCGv_i64 arg)
1623 {
1624     if (TCG_TARGET_REG_BITS == 32) {
1625         tcg_gen_ext8u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1626         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1627     } else if (TCG_TARGET_HAS_ext8u_i64) {
1628         tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg);
1629     } else {
1630         tcg_gen_andi_i64(ret, arg, 0xffu);
1631     }
1632 }
1633 
1634 void tcg_gen_ext16u_i64(TCGv_i64 ret, TCGv_i64 arg)
1635 {
1636     if (TCG_TARGET_REG_BITS == 32) {
1637         tcg_gen_ext16u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1638         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1639     } else if (TCG_TARGET_HAS_ext16u_i64) {
1640         tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg);
1641     } else {
1642         tcg_gen_andi_i64(ret, arg, 0xffffu);
1643     }
1644 }
1645 
1646 void tcg_gen_ext32u_i64(TCGv_i64 ret, TCGv_i64 arg)
1647 {
1648     if (TCG_TARGET_REG_BITS == 32) {
1649         tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1650         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1651     } else if (TCG_TARGET_HAS_ext32u_i64) {
1652         tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg);
1653     } else {
1654         tcg_gen_andi_i64(ret, arg, 0xffffffffu);
1655     }
1656 }
1657 
1658 /* Note: we assume the six high bytes are set to zero */
1659 void tcg_gen_bswap16_i64(TCGv_i64 ret, TCGv_i64 arg)
1660 {
1661     if (TCG_TARGET_REG_BITS == 32) {
1662         tcg_gen_bswap16_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1663         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1664     } else if (TCG_TARGET_HAS_bswap16_i64) {
1665         tcg_gen_op2_i64(INDEX_op_bswap16_i64, ret, arg);
1666     } else {
1667         TCGv_i64 t0 = tcg_temp_new_i64();
1668 
1669         tcg_gen_ext8u_i64(t0, arg);
1670         tcg_gen_shli_i64(t0, t0, 8);
1671         tcg_gen_shri_i64(ret, arg, 8);
1672         tcg_gen_or_i64(ret, ret, t0);
1673         tcg_temp_free_i64(t0);
1674     }
1675 }
1676 
1677 /* Note: we assume the four high bytes are set to zero */
1678 void tcg_gen_bswap32_i64(TCGv_i64 ret, TCGv_i64 arg)
1679 {
1680     if (TCG_TARGET_REG_BITS == 32) {
1681         tcg_gen_bswap32_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1682         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1683     } else if (TCG_TARGET_HAS_bswap32_i64) {
1684         tcg_gen_op2_i64(INDEX_op_bswap32_i64, ret, arg);
1685     } else {
1686         TCGv_i64 t0 = tcg_temp_new_i64();
1687         TCGv_i64 t1 = tcg_temp_new_i64();
1688         TCGv_i64 t2 = tcg_constant_i64(0x00ff00ff);
1689 
1690                                         /* arg = ....abcd */
1691         tcg_gen_shri_i64(t0, arg, 8);   /*  t0 = .....abc */
1692         tcg_gen_and_i64(t1, arg, t2);   /*  t1 = .....b.d */
1693         tcg_gen_and_i64(t0, t0, t2);    /*  t0 = .....a.c */
1694         tcg_gen_shli_i64(t1, t1, 8);    /*  t1 = ....b.d. */
1695         tcg_gen_or_i64(ret, t0, t1);    /* ret = ....badc */
1696 
1697         tcg_gen_shli_i64(t1, ret, 48);  /*  t1 = dc...... */
1698         tcg_gen_shri_i64(t0, ret, 16);  /*  t0 = ......ba */
1699         tcg_gen_shri_i64(t1, t1, 32);   /*  t1 = ....dc.. */
1700         tcg_gen_or_i64(ret, t0, t1);    /* ret = ....dcba */
1701 
1702         tcg_temp_free_i64(t0);
1703         tcg_temp_free_i64(t1);
1704     }
1705 }
1706 
1707 void tcg_gen_bswap64_i64(TCGv_i64 ret, TCGv_i64 arg)
1708 {
1709     if (TCG_TARGET_REG_BITS == 32) {
1710         TCGv_i32 t0, t1;
1711         t0 = tcg_temp_new_i32();
1712         t1 = tcg_temp_new_i32();
1713 
1714         tcg_gen_bswap32_i32(t0, TCGV_LOW(arg));
1715         tcg_gen_bswap32_i32(t1, TCGV_HIGH(arg));
1716         tcg_gen_mov_i32(TCGV_LOW(ret), t1);
1717         tcg_gen_mov_i32(TCGV_HIGH(ret), t0);
1718         tcg_temp_free_i32(t0);
1719         tcg_temp_free_i32(t1);
1720     } else if (TCG_TARGET_HAS_bswap64_i64) {
1721         tcg_gen_op2_i64(INDEX_op_bswap64_i64, ret, arg);
1722     } else {
1723         TCGv_i64 t0 = tcg_temp_new_i64();
1724         TCGv_i64 t1 = tcg_temp_new_i64();
1725         TCGv_i64 t2 = tcg_temp_new_i64();
1726 
1727                                         /* arg = abcdefgh */
1728         tcg_gen_movi_i64(t2, 0x00ff00ff00ff00ffull);
1729         tcg_gen_shri_i64(t0, arg, 8);   /*  t0 = .abcdefg */
1730         tcg_gen_and_i64(t1, arg, t2);   /*  t1 = .b.d.f.h */
1731         tcg_gen_and_i64(t0, t0, t2);    /*  t0 = .a.c.e.g */
1732         tcg_gen_shli_i64(t1, t1, 8);    /*  t1 = b.d.f.h. */
1733         tcg_gen_or_i64(ret, t0, t1);    /* ret = badcfehg */
1734 
1735         tcg_gen_movi_i64(t2, 0x0000ffff0000ffffull);
1736         tcg_gen_shri_i64(t0, ret, 16);  /*  t0 = ..badcfe */
1737         tcg_gen_and_i64(t1, ret, t2);   /*  t1 = ..dc..hg */
1738         tcg_gen_and_i64(t0, t0, t2);    /*  t0 = ..ba..fe */
1739         tcg_gen_shli_i64(t1, t1, 16);   /*  t1 = dc..hg.. */
1740         tcg_gen_or_i64(ret, t0, t1);    /* ret = dcbahgfe */
1741 
1742         tcg_gen_shri_i64(t0, ret, 32);  /*  t0 = ....dcba */
1743         tcg_gen_shli_i64(t1, ret, 32);  /*  t1 = hgfe.... */
1744         tcg_gen_or_i64(ret, t0, t1);    /* ret = hgfedcba */
1745 
1746         tcg_temp_free_i64(t0);
1747         tcg_temp_free_i64(t1);
1748         tcg_temp_free_i64(t2);
1749     }
1750 }
1751 
1752 void tcg_gen_not_i64(TCGv_i64 ret, TCGv_i64 arg)
1753 {
1754     if (TCG_TARGET_REG_BITS == 32) {
1755         tcg_gen_not_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1756         tcg_gen_not_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
1757     } else if (TCG_TARGET_HAS_not_i64) {
1758         tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg);
1759     } else {
1760         tcg_gen_xori_i64(ret, arg, -1);
1761     }
1762 }
1763 
1764 void tcg_gen_andc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1765 {
1766     if (TCG_TARGET_REG_BITS == 32) {
1767         tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1768         tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1769     } else if (TCG_TARGET_HAS_andc_i64) {
1770         tcg_gen_op3_i64(INDEX_op_andc_i64, ret, arg1, arg2);
1771     } else {
1772         TCGv_i64 t0 = tcg_temp_new_i64();
1773         tcg_gen_not_i64(t0, arg2);
1774         tcg_gen_and_i64(ret, arg1, t0);
1775         tcg_temp_free_i64(t0);
1776     }
1777 }
1778 
1779 void tcg_gen_eqv_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1780 {
1781     if (TCG_TARGET_REG_BITS == 32) {
1782         tcg_gen_eqv_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1783         tcg_gen_eqv_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1784     } else if (TCG_TARGET_HAS_eqv_i64) {
1785         tcg_gen_op3_i64(INDEX_op_eqv_i64, ret, arg1, arg2);
1786     } else {
1787         tcg_gen_xor_i64(ret, arg1, arg2);
1788         tcg_gen_not_i64(ret, ret);
1789     }
1790 }
1791 
1792 void tcg_gen_nand_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1793 {
1794     if (TCG_TARGET_REG_BITS == 32) {
1795         tcg_gen_nand_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1796         tcg_gen_nand_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1797     } else if (TCG_TARGET_HAS_nand_i64) {
1798         tcg_gen_op3_i64(INDEX_op_nand_i64, ret, arg1, arg2);
1799     } else {
1800         tcg_gen_and_i64(ret, arg1, arg2);
1801         tcg_gen_not_i64(ret, ret);
1802     }
1803 }
1804 
1805 void tcg_gen_nor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1806 {
1807     if (TCG_TARGET_REG_BITS == 32) {
1808         tcg_gen_nor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1809         tcg_gen_nor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1810     } else if (TCG_TARGET_HAS_nor_i64) {
1811         tcg_gen_op3_i64(INDEX_op_nor_i64, ret, arg1, arg2);
1812     } else {
1813         tcg_gen_or_i64(ret, arg1, arg2);
1814         tcg_gen_not_i64(ret, ret);
1815     }
1816 }
1817 
1818 void tcg_gen_orc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1819 {
1820     if (TCG_TARGET_REG_BITS == 32) {
1821         tcg_gen_orc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1822         tcg_gen_orc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1823     } else if (TCG_TARGET_HAS_orc_i64) {
1824         tcg_gen_op3_i64(INDEX_op_orc_i64, ret, arg1, arg2);
1825     } else {
1826         TCGv_i64 t0 = tcg_temp_new_i64();
1827         tcg_gen_not_i64(t0, arg2);
1828         tcg_gen_or_i64(ret, arg1, t0);
1829         tcg_temp_free_i64(t0);
1830     }
1831 }
1832 
1833 void tcg_gen_clz_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1834 {
1835     if (TCG_TARGET_HAS_clz_i64) {
1836         tcg_gen_op3_i64(INDEX_op_clz_i64, ret, arg1, arg2);
1837     } else {
1838         gen_helper_clz_i64(ret, arg1, arg2);
1839     }
1840 }
1841 
1842 void tcg_gen_clzi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
1843 {
1844     if (TCG_TARGET_REG_BITS == 32
1845         && TCG_TARGET_HAS_clz_i32
1846         && arg2 <= 0xffffffffu) {
1847         TCGv_i32 t = tcg_temp_new_i32();
1848         tcg_gen_clzi_i32(t, TCGV_LOW(arg1), arg2 - 32);
1849         tcg_gen_addi_i32(t, t, 32);
1850         tcg_gen_clz_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), t);
1851         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1852         tcg_temp_free_i32(t);
1853     } else {
1854         TCGv_i64 t0 = tcg_const_i64(arg2);
1855         tcg_gen_clz_i64(ret, arg1, t0);
1856         tcg_temp_free_i64(t0);
1857     }
1858 }
1859 
1860 void tcg_gen_ctz_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1861 {
1862     if (TCG_TARGET_HAS_ctz_i64) {
1863         tcg_gen_op3_i64(INDEX_op_ctz_i64, ret, arg1, arg2);
1864     } else if (TCG_TARGET_HAS_ctpop_i64 || TCG_TARGET_HAS_clz_i64) {
1865         TCGv_i64 z, t = tcg_temp_new_i64();
1866 
1867         if (TCG_TARGET_HAS_ctpop_i64) {
1868             tcg_gen_subi_i64(t, arg1, 1);
1869             tcg_gen_andc_i64(t, t, arg1);
1870             tcg_gen_ctpop_i64(t, t);
1871         } else {
1872             /* Since all non-x86 hosts have clz(0) == 64, don't fight it.  */
1873             tcg_gen_neg_i64(t, arg1);
1874             tcg_gen_and_i64(t, t, arg1);
1875             tcg_gen_clzi_i64(t, t, 64);
1876             tcg_gen_xori_i64(t, t, 63);
1877         }
1878         z = tcg_constant_i64(0);
1879         tcg_gen_movcond_i64(TCG_COND_EQ, ret, arg1, z, arg2, t);
1880         tcg_temp_free_i64(t);
1881         tcg_temp_free_i64(z);
1882     } else {
1883         gen_helper_ctz_i64(ret, arg1, arg2);
1884     }
1885 }
1886 
1887 void tcg_gen_ctzi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
1888 {
1889     if (TCG_TARGET_REG_BITS == 32
1890         && TCG_TARGET_HAS_ctz_i32
1891         && arg2 <= 0xffffffffu) {
1892         TCGv_i32 t32 = tcg_temp_new_i32();
1893         tcg_gen_ctzi_i32(t32, TCGV_HIGH(arg1), arg2 - 32);
1894         tcg_gen_addi_i32(t32, t32, 32);
1895         tcg_gen_ctz_i32(TCGV_LOW(ret), TCGV_LOW(arg1), t32);
1896         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1897         tcg_temp_free_i32(t32);
1898     } else if (!TCG_TARGET_HAS_ctz_i64
1899                && TCG_TARGET_HAS_ctpop_i64
1900                && arg2 == 64) {
1901         /* This equivalence has the advantage of not requiring a fixup.  */
1902         TCGv_i64 t = tcg_temp_new_i64();
1903         tcg_gen_subi_i64(t, arg1, 1);
1904         tcg_gen_andc_i64(t, t, arg1);
1905         tcg_gen_ctpop_i64(ret, t);
1906         tcg_temp_free_i64(t);
1907     } else {
1908         TCGv_i64 t0 = tcg_const_i64(arg2);
1909         tcg_gen_ctz_i64(ret, arg1, t0);
1910         tcg_temp_free_i64(t0);
1911     }
1912 }
1913 
1914 void tcg_gen_clrsb_i64(TCGv_i64 ret, TCGv_i64 arg)
1915 {
1916     if (TCG_TARGET_HAS_clz_i64 || TCG_TARGET_HAS_clz_i32) {
1917         TCGv_i64 t = tcg_temp_new_i64();
1918         tcg_gen_sari_i64(t, arg, 63);
1919         tcg_gen_xor_i64(t, t, arg);
1920         tcg_gen_clzi_i64(t, t, 64);
1921         tcg_gen_subi_i64(ret, t, 1);
1922         tcg_temp_free_i64(t);
1923     } else {
1924         gen_helper_clrsb_i64(ret, arg);
1925     }
1926 }
1927 
1928 void tcg_gen_ctpop_i64(TCGv_i64 ret, TCGv_i64 arg1)
1929 {
1930     if (TCG_TARGET_HAS_ctpop_i64) {
1931         tcg_gen_op2_i64(INDEX_op_ctpop_i64, ret, arg1);
1932     } else if (TCG_TARGET_REG_BITS == 32 && TCG_TARGET_HAS_ctpop_i32) {
1933         tcg_gen_ctpop_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
1934         tcg_gen_ctpop_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
1935         tcg_gen_add_i32(TCGV_LOW(ret), TCGV_LOW(ret), TCGV_HIGH(ret));
1936         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1937     } else {
1938         gen_helper_ctpop_i64(ret, arg1);
1939     }
1940 }
1941 
1942 void tcg_gen_rotl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1943 {
1944     if (TCG_TARGET_HAS_rot_i64) {
1945         tcg_gen_op3_i64(INDEX_op_rotl_i64, ret, arg1, arg2);
1946     } else {
1947         TCGv_i64 t0, t1;
1948         t0 = tcg_temp_new_i64();
1949         t1 = tcg_temp_new_i64();
1950         tcg_gen_shl_i64(t0, arg1, arg2);
1951         tcg_gen_subfi_i64(t1, 64, arg2);
1952         tcg_gen_shr_i64(t1, arg1, t1);
1953         tcg_gen_or_i64(ret, t0, t1);
1954         tcg_temp_free_i64(t0);
1955         tcg_temp_free_i64(t1);
1956     }
1957 }
1958 
1959 void tcg_gen_rotli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1960 {
1961     tcg_debug_assert(arg2 >= 0 && arg2 < 64);
1962     /* some cases can be optimized here */
1963     if (arg2 == 0) {
1964         tcg_gen_mov_i64(ret, arg1);
1965     } else if (TCG_TARGET_HAS_rot_i64) {
1966         tcg_gen_rotl_i64(ret, arg1, tcg_constant_i64(arg2));
1967     } else {
1968         TCGv_i64 t0, t1;
1969         t0 = tcg_temp_new_i64();
1970         t1 = tcg_temp_new_i64();
1971         tcg_gen_shli_i64(t0, arg1, arg2);
1972         tcg_gen_shri_i64(t1, arg1, 64 - arg2);
1973         tcg_gen_or_i64(ret, t0, t1);
1974         tcg_temp_free_i64(t0);
1975         tcg_temp_free_i64(t1);
1976     }
1977 }
1978 
1979 void tcg_gen_rotr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1980 {
1981     if (TCG_TARGET_HAS_rot_i64) {
1982         tcg_gen_op3_i64(INDEX_op_rotr_i64, ret, arg1, arg2);
1983     } else {
1984         TCGv_i64 t0, t1;
1985         t0 = tcg_temp_new_i64();
1986         t1 = tcg_temp_new_i64();
1987         tcg_gen_shr_i64(t0, arg1, arg2);
1988         tcg_gen_subfi_i64(t1, 64, arg2);
1989         tcg_gen_shl_i64(t1, arg1, t1);
1990         tcg_gen_or_i64(ret, t0, t1);
1991         tcg_temp_free_i64(t0);
1992         tcg_temp_free_i64(t1);
1993     }
1994 }
1995 
1996 void tcg_gen_rotri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1997 {
1998     tcg_debug_assert(arg2 >= 0 && arg2 < 64);
1999     /* some cases can be optimized here */
2000     if (arg2 == 0) {
2001         tcg_gen_mov_i64(ret, arg1);
2002     } else {
2003         tcg_gen_rotli_i64(ret, arg1, 64 - arg2);
2004     }
2005 }
2006 
2007 void tcg_gen_deposit_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2,
2008                          unsigned int ofs, unsigned int len)
2009 {
2010     uint64_t mask;
2011     TCGv_i64 t1;
2012 
2013     tcg_debug_assert(ofs < 64);
2014     tcg_debug_assert(len > 0);
2015     tcg_debug_assert(len <= 64);
2016     tcg_debug_assert(ofs + len <= 64);
2017 
2018     if (len == 64) {
2019         tcg_gen_mov_i64(ret, arg2);
2020         return;
2021     }
2022     if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(ofs, len)) {
2023         tcg_gen_op5ii_i64(INDEX_op_deposit_i64, ret, arg1, arg2, ofs, len);
2024         return;
2025     }
2026 
2027     if (TCG_TARGET_REG_BITS == 32) {
2028         if (ofs >= 32) {
2029             tcg_gen_deposit_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1),
2030                                 TCGV_LOW(arg2), ofs - 32, len);
2031             tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
2032             return;
2033         }
2034         if (ofs + len <= 32) {
2035             tcg_gen_deposit_i32(TCGV_LOW(ret), TCGV_LOW(arg1),
2036                                 TCGV_LOW(arg2), ofs, len);
2037             tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
2038             return;
2039         }
2040     }
2041 
2042     t1 = tcg_temp_new_i64();
2043 
2044     if (TCG_TARGET_HAS_extract2_i64) {
2045         if (ofs + len == 64) {
2046             tcg_gen_shli_i64(t1, arg1, len);
2047             tcg_gen_extract2_i64(ret, t1, arg2, len);
2048             goto done;
2049         }
2050         if (ofs == 0) {
2051             tcg_gen_extract2_i64(ret, arg1, arg2, len);
2052             tcg_gen_rotli_i64(ret, ret, len);
2053             goto done;
2054         }
2055     }
2056 
2057     mask = (1ull << len) - 1;
2058     if (ofs + len < 64) {
2059         tcg_gen_andi_i64(t1, arg2, mask);
2060         tcg_gen_shli_i64(t1, t1, ofs);
2061     } else {
2062         tcg_gen_shli_i64(t1, arg2, ofs);
2063     }
2064     tcg_gen_andi_i64(ret, arg1, ~(mask << ofs));
2065     tcg_gen_or_i64(ret, ret, t1);
2066  done:
2067     tcg_temp_free_i64(t1);
2068 }
2069 
2070 void tcg_gen_deposit_z_i64(TCGv_i64 ret, TCGv_i64 arg,
2071                            unsigned int ofs, unsigned int len)
2072 {
2073     tcg_debug_assert(ofs < 64);
2074     tcg_debug_assert(len > 0);
2075     tcg_debug_assert(len <= 64);
2076     tcg_debug_assert(ofs + len <= 64);
2077 
2078     if (ofs + len == 64) {
2079         tcg_gen_shli_i64(ret, arg, ofs);
2080     } else if (ofs == 0) {
2081         tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
2082     } else if (TCG_TARGET_HAS_deposit_i64
2083                && TCG_TARGET_deposit_i64_valid(ofs, len)) {
2084         TCGv_i64 zero = tcg_constant_i64(0);
2085         tcg_gen_op5ii_i64(INDEX_op_deposit_i64, ret, zero, arg, ofs, len);
2086     } else {
2087         if (TCG_TARGET_REG_BITS == 32) {
2088             if (ofs >= 32) {
2089                 tcg_gen_deposit_z_i32(TCGV_HIGH(ret), TCGV_LOW(arg),
2090                                       ofs - 32, len);
2091                 tcg_gen_movi_i32(TCGV_LOW(ret), 0);
2092                 return;
2093             }
2094             if (ofs + len <= 32) {
2095                 tcg_gen_deposit_z_i32(TCGV_LOW(ret), TCGV_LOW(arg), ofs, len);
2096                 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2097                 return;
2098             }
2099         }
2100         /* To help two-operand hosts we prefer to zero-extend first,
2101            which allows ARG to stay live.  */
2102         switch (len) {
2103         case 32:
2104             if (TCG_TARGET_HAS_ext32u_i64) {
2105                 tcg_gen_ext32u_i64(ret, arg);
2106                 tcg_gen_shli_i64(ret, ret, ofs);
2107                 return;
2108             }
2109             break;
2110         case 16:
2111             if (TCG_TARGET_HAS_ext16u_i64) {
2112                 tcg_gen_ext16u_i64(ret, arg);
2113                 tcg_gen_shli_i64(ret, ret, ofs);
2114                 return;
2115             }
2116             break;
2117         case 8:
2118             if (TCG_TARGET_HAS_ext8u_i64) {
2119                 tcg_gen_ext8u_i64(ret, arg);
2120                 tcg_gen_shli_i64(ret, ret, ofs);
2121                 return;
2122             }
2123             break;
2124         }
2125         /* Otherwise prefer zero-extension over AND for code size.  */
2126         switch (ofs + len) {
2127         case 32:
2128             if (TCG_TARGET_HAS_ext32u_i64) {
2129                 tcg_gen_shli_i64(ret, arg, ofs);
2130                 tcg_gen_ext32u_i64(ret, ret);
2131                 return;
2132             }
2133             break;
2134         case 16:
2135             if (TCG_TARGET_HAS_ext16u_i64) {
2136                 tcg_gen_shli_i64(ret, arg, ofs);
2137                 tcg_gen_ext16u_i64(ret, ret);
2138                 return;
2139             }
2140             break;
2141         case 8:
2142             if (TCG_TARGET_HAS_ext8u_i64) {
2143                 tcg_gen_shli_i64(ret, arg, ofs);
2144                 tcg_gen_ext8u_i64(ret, ret);
2145                 return;
2146             }
2147             break;
2148         }
2149         tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
2150         tcg_gen_shli_i64(ret, ret, ofs);
2151     }
2152 }
2153 
2154 void tcg_gen_extract_i64(TCGv_i64 ret, TCGv_i64 arg,
2155                          unsigned int ofs, unsigned int len)
2156 {
2157     tcg_debug_assert(ofs < 64);
2158     tcg_debug_assert(len > 0);
2159     tcg_debug_assert(len <= 64);
2160     tcg_debug_assert(ofs + len <= 64);
2161 
2162     /* Canonicalize certain special cases, even if extract is supported.  */
2163     if (ofs + len == 64) {
2164         tcg_gen_shri_i64(ret, arg, 64 - len);
2165         return;
2166     }
2167     if (ofs == 0) {
2168         tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
2169         return;
2170     }
2171 
2172     if (TCG_TARGET_REG_BITS == 32) {
2173         /* Look for a 32-bit extract within one of the two words.  */
2174         if (ofs >= 32) {
2175             tcg_gen_extract_i32(TCGV_LOW(ret), TCGV_HIGH(arg), ofs - 32, len);
2176             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2177             return;
2178         }
2179         if (ofs + len <= 32) {
2180             tcg_gen_extract_i32(TCGV_LOW(ret), TCGV_LOW(arg), ofs, len);
2181             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2182             return;
2183         }
2184         /* The field is split across two words.  One double-word
2185            shift is better than two double-word shifts.  */
2186         goto do_shift_and;
2187     }
2188 
2189     if (TCG_TARGET_HAS_extract_i64
2190         && TCG_TARGET_extract_i64_valid(ofs, len)) {
2191         tcg_gen_op4ii_i64(INDEX_op_extract_i64, ret, arg, ofs, len);
2192         return;
2193     }
2194 
2195     /* Assume that zero-extension, if available, is cheaper than a shift.  */
2196     switch (ofs + len) {
2197     case 32:
2198         if (TCG_TARGET_HAS_ext32u_i64) {
2199             tcg_gen_ext32u_i64(ret, arg);
2200             tcg_gen_shri_i64(ret, ret, ofs);
2201             return;
2202         }
2203         break;
2204     case 16:
2205         if (TCG_TARGET_HAS_ext16u_i64) {
2206             tcg_gen_ext16u_i64(ret, arg);
2207             tcg_gen_shri_i64(ret, ret, ofs);
2208             return;
2209         }
2210         break;
2211     case 8:
2212         if (TCG_TARGET_HAS_ext8u_i64) {
2213             tcg_gen_ext8u_i64(ret, arg);
2214             tcg_gen_shri_i64(ret, ret, ofs);
2215             return;
2216         }
2217         break;
2218     }
2219 
2220     /* ??? Ideally we'd know what values are available for immediate AND.
2221        Assume that 8 bits are available, plus the special cases of 16 and 32,
2222        so that we get ext8u, ext16u, and ext32u.  */
2223     switch (len) {
2224     case 1 ... 8: case 16: case 32:
2225     do_shift_and:
2226         tcg_gen_shri_i64(ret, arg, ofs);
2227         tcg_gen_andi_i64(ret, ret, (1ull << len) - 1);
2228         break;
2229     default:
2230         tcg_gen_shli_i64(ret, arg, 64 - len - ofs);
2231         tcg_gen_shri_i64(ret, ret, 64 - len);
2232         break;
2233     }
2234 }
2235 
2236 void tcg_gen_sextract_i64(TCGv_i64 ret, TCGv_i64 arg,
2237                           unsigned int ofs, unsigned int len)
2238 {
2239     tcg_debug_assert(ofs < 64);
2240     tcg_debug_assert(len > 0);
2241     tcg_debug_assert(len <= 64);
2242     tcg_debug_assert(ofs + len <= 64);
2243 
2244     /* Canonicalize certain special cases, even if sextract is supported.  */
2245     if (ofs + len == 64) {
2246         tcg_gen_sari_i64(ret, arg, 64 - len);
2247         return;
2248     }
2249     if (ofs == 0) {
2250         switch (len) {
2251         case 32:
2252             tcg_gen_ext32s_i64(ret, arg);
2253             return;
2254         case 16:
2255             tcg_gen_ext16s_i64(ret, arg);
2256             return;
2257         case 8:
2258             tcg_gen_ext8s_i64(ret, arg);
2259             return;
2260         }
2261     }
2262 
2263     if (TCG_TARGET_REG_BITS == 32) {
2264         /* Look for a 32-bit extract within one of the two words.  */
2265         if (ofs >= 32) {
2266             tcg_gen_sextract_i32(TCGV_LOW(ret), TCGV_HIGH(arg), ofs - 32, len);
2267         } else if (ofs + len <= 32) {
2268             tcg_gen_sextract_i32(TCGV_LOW(ret), TCGV_LOW(arg), ofs, len);
2269         } else if (ofs == 0) {
2270             tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2271             tcg_gen_sextract_i32(TCGV_HIGH(ret), TCGV_HIGH(arg), 0, len - 32);
2272             return;
2273         } else if (len > 32) {
2274             TCGv_i32 t = tcg_temp_new_i32();
2275             /* Extract the bits for the high word normally.  */
2276             tcg_gen_sextract_i32(t, TCGV_HIGH(arg), ofs + 32, len - 32);
2277             /* Shift the field down for the low part.  */
2278             tcg_gen_shri_i64(ret, arg, ofs);
2279             /* Overwrite the shift into the high part.  */
2280             tcg_gen_mov_i32(TCGV_HIGH(ret), t);
2281             tcg_temp_free_i32(t);
2282             return;
2283         } else {
2284             /* Shift the field down for the low part, such that the
2285                field sits at the MSB.  */
2286             tcg_gen_shri_i64(ret, arg, ofs + len - 32);
2287             /* Shift the field down from the MSB, sign extending.  */
2288             tcg_gen_sari_i32(TCGV_LOW(ret), TCGV_LOW(ret), 32 - len);
2289         }
2290         /* Sign-extend the field from 32 bits.  */
2291         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2292         return;
2293     }
2294 
2295     if (TCG_TARGET_HAS_sextract_i64
2296         && TCG_TARGET_extract_i64_valid(ofs, len)) {
2297         tcg_gen_op4ii_i64(INDEX_op_sextract_i64, ret, arg, ofs, len);
2298         return;
2299     }
2300 
2301     /* Assume that sign-extension, if available, is cheaper than a shift.  */
2302     switch (ofs + len) {
2303     case 32:
2304         if (TCG_TARGET_HAS_ext32s_i64) {
2305             tcg_gen_ext32s_i64(ret, arg);
2306             tcg_gen_sari_i64(ret, ret, ofs);
2307             return;
2308         }
2309         break;
2310     case 16:
2311         if (TCG_TARGET_HAS_ext16s_i64) {
2312             tcg_gen_ext16s_i64(ret, arg);
2313             tcg_gen_sari_i64(ret, ret, ofs);
2314             return;
2315         }
2316         break;
2317     case 8:
2318         if (TCG_TARGET_HAS_ext8s_i64) {
2319             tcg_gen_ext8s_i64(ret, arg);
2320             tcg_gen_sari_i64(ret, ret, ofs);
2321             return;
2322         }
2323         break;
2324     }
2325     switch (len) {
2326     case 32:
2327         if (TCG_TARGET_HAS_ext32s_i64) {
2328             tcg_gen_shri_i64(ret, arg, ofs);
2329             tcg_gen_ext32s_i64(ret, ret);
2330             return;
2331         }
2332         break;
2333     case 16:
2334         if (TCG_TARGET_HAS_ext16s_i64) {
2335             tcg_gen_shri_i64(ret, arg, ofs);
2336             tcg_gen_ext16s_i64(ret, ret);
2337             return;
2338         }
2339         break;
2340     case 8:
2341         if (TCG_TARGET_HAS_ext8s_i64) {
2342             tcg_gen_shri_i64(ret, arg, ofs);
2343             tcg_gen_ext8s_i64(ret, ret);
2344             return;
2345         }
2346         break;
2347     }
2348     tcg_gen_shli_i64(ret, arg, 64 - len - ofs);
2349     tcg_gen_sari_i64(ret, ret, 64 - len);
2350 }
2351 
2352 /*
2353  * Extract 64 bits from a 128-bit input, ah:al, starting from ofs.
2354  * Unlike tcg_gen_extract_i64 above, len is fixed at 64.
2355  */
2356 void tcg_gen_extract2_i64(TCGv_i64 ret, TCGv_i64 al, TCGv_i64 ah,
2357                           unsigned int ofs)
2358 {
2359     tcg_debug_assert(ofs <= 64);
2360     if (ofs == 0) {
2361         tcg_gen_mov_i64(ret, al);
2362     } else if (ofs == 64) {
2363         tcg_gen_mov_i64(ret, ah);
2364     } else if (al == ah) {
2365         tcg_gen_rotri_i64(ret, al, ofs);
2366     } else if (TCG_TARGET_HAS_extract2_i64) {
2367         tcg_gen_op4i_i64(INDEX_op_extract2_i64, ret, al, ah, ofs);
2368     } else {
2369         TCGv_i64 t0 = tcg_temp_new_i64();
2370         tcg_gen_shri_i64(t0, al, ofs);
2371         tcg_gen_deposit_i64(ret, t0, ah, 64 - ofs, ofs);
2372         tcg_temp_free_i64(t0);
2373     }
2374 }
2375 
2376 void tcg_gen_movcond_i64(TCGCond cond, TCGv_i64 ret, TCGv_i64 c1,
2377                          TCGv_i64 c2, TCGv_i64 v1, TCGv_i64 v2)
2378 {
2379     if (cond == TCG_COND_ALWAYS) {
2380         tcg_gen_mov_i64(ret, v1);
2381     } else if (cond == TCG_COND_NEVER) {
2382         tcg_gen_mov_i64(ret, v2);
2383     } else if (TCG_TARGET_REG_BITS == 32) {
2384         TCGv_i32 t0 = tcg_temp_new_i32();
2385         TCGv_i32 t1 = tcg_temp_new_i32();
2386         tcg_gen_op6i_i32(INDEX_op_setcond2_i32, t0,
2387                          TCGV_LOW(c1), TCGV_HIGH(c1),
2388                          TCGV_LOW(c2), TCGV_HIGH(c2), cond);
2389 
2390         if (TCG_TARGET_HAS_movcond_i32) {
2391             tcg_gen_movi_i32(t1, 0);
2392             tcg_gen_movcond_i32(TCG_COND_NE, TCGV_LOW(ret), t0, t1,
2393                                 TCGV_LOW(v1), TCGV_LOW(v2));
2394             tcg_gen_movcond_i32(TCG_COND_NE, TCGV_HIGH(ret), t0, t1,
2395                                 TCGV_HIGH(v1), TCGV_HIGH(v2));
2396         } else {
2397             tcg_gen_neg_i32(t0, t0);
2398 
2399             tcg_gen_and_i32(t1, TCGV_LOW(v1), t0);
2400             tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(v2), t0);
2401             tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(ret), t1);
2402 
2403             tcg_gen_and_i32(t1, TCGV_HIGH(v1), t0);
2404             tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(v2), t0);
2405             tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), t1);
2406         }
2407         tcg_temp_free_i32(t0);
2408         tcg_temp_free_i32(t1);
2409     } else if (TCG_TARGET_HAS_movcond_i64) {
2410         tcg_gen_op6i_i64(INDEX_op_movcond_i64, ret, c1, c2, v1, v2, cond);
2411     } else {
2412         TCGv_i64 t0 = tcg_temp_new_i64();
2413         TCGv_i64 t1 = tcg_temp_new_i64();
2414         tcg_gen_setcond_i64(cond, t0, c1, c2);
2415         tcg_gen_neg_i64(t0, t0);
2416         tcg_gen_and_i64(t1, v1, t0);
2417         tcg_gen_andc_i64(ret, v2, t0);
2418         tcg_gen_or_i64(ret, ret, t1);
2419         tcg_temp_free_i64(t0);
2420         tcg_temp_free_i64(t1);
2421     }
2422 }
2423 
2424 void tcg_gen_add2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
2425                       TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
2426 {
2427     if (TCG_TARGET_HAS_add2_i64) {
2428         tcg_gen_op6_i64(INDEX_op_add2_i64, rl, rh, al, ah, bl, bh);
2429     } else {
2430         TCGv_i64 t0 = tcg_temp_new_i64();
2431         TCGv_i64 t1 = tcg_temp_new_i64();
2432         tcg_gen_add_i64(t0, al, bl);
2433         tcg_gen_setcond_i64(TCG_COND_LTU, t1, t0, al);
2434         tcg_gen_add_i64(rh, ah, bh);
2435         tcg_gen_add_i64(rh, rh, t1);
2436         tcg_gen_mov_i64(rl, t0);
2437         tcg_temp_free_i64(t0);
2438         tcg_temp_free_i64(t1);
2439     }
2440 }
2441 
2442 void tcg_gen_sub2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
2443                       TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
2444 {
2445     if (TCG_TARGET_HAS_sub2_i64) {
2446         tcg_gen_op6_i64(INDEX_op_sub2_i64, rl, rh, al, ah, bl, bh);
2447     } else {
2448         TCGv_i64 t0 = tcg_temp_new_i64();
2449         TCGv_i64 t1 = tcg_temp_new_i64();
2450         tcg_gen_sub_i64(t0, al, bl);
2451         tcg_gen_setcond_i64(TCG_COND_LTU, t1, al, bl);
2452         tcg_gen_sub_i64(rh, ah, bh);
2453         tcg_gen_sub_i64(rh, rh, t1);
2454         tcg_gen_mov_i64(rl, t0);
2455         tcg_temp_free_i64(t0);
2456         tcg_temp_free_i64(t1);
2457     }
2458 }
2459 
2460 void tcg_gen_mulu2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
2461 {
2462     if (TCG_TARGET_HAS_mulu2_i64) {
2463         tcg_gen_op4_i64(INDEX_op_mulu2_i64, rl, rh, arg1, arg2);
2464     } else if (TCG_TARGET_HAS_muluh_i64) {
2465         TCGv_i64 t = tcg_temp_new_i64();
2466         tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
2467         tcg_gen_op3_i64(INDEX_op_muluh_i64, rh, arg1, arg2);
2468         tcg_gen_mov_i64(rl, t);
2469         tcg_temp_free_i64(t);
2470     } else {
2471         TCGv_i64 t0 = tcg_temp_new_i64();
2472         tcg_gen_mul_i64(t0, arg1, arg2);
2473         gen_helper_muluh_i64(rh, arg1, arg2);
2474         tcg_gen_mov_i64(rl, t0);
2475         tcg_temp_free_i64(t0);
2476     }
2477 }
2478 
2479 void tcg_gen_muls2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
2480 {
2481     if (TCG_TARGET_HAS_muls2_i64) {
2482         tcg_gen_op4_i64(INDEX_op_muls2_i64, rl, rh, arg1, arg2);
2483     } else if (TCG_TARGET_HAS_mulsh_i64) {
2484         TCGv_i64 t = tcg_temp_new_i64();
2485         tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
2486         tcg_gen_op3_i64(INDEX_op_mulsh_i64, rh, arg1, arg2);
2487         tcg_gen_mov_i64(rl, t);
2488         tcg_temp_free_i64(t);
2489     } else if (TCG_TARGET_HAS_mulu2_i64 || TCG_TARGET_HAS_muluh_i64) {
2490         TCGv_i64 t0 = tcg_temp_new_i64();
2491         TCGv_i64 t1 = tcg_temp_new_i64();
2492         TCGv_i64 t2 = tcg_temp_new_i64();
2493         TCGv_i64 t3 = tcg_temp_new_i64();
2494         tcg_gen_mulu2_i64(t0, t1, arg1, arg2);
2495         /* Adjust for negative inputs.  */
2496         tcg_gen_sari_i64(t2, arg1, 63);
2497         tcg_gen_sari_i64(t3, arg2, 63);
2498         tcg_gen_and_i64(t2, t2, arg2);
2499         tcg_gen_and_i64(t3, t3, arg1);
2500         tcg_gen_sub_i64(rh, t1, t2);
2501         tcg_gen_sub_i64(rh, rh, t3);
2502         tcg_gen_mov_i64(rl, t0);
2503         tcg_temp_free_i64(t0);
2504         tcg_temp_free_i64(t1);
2505         tcg_temp_free_i64(t2);
2506         tcg_temp_free_i64(t3);
2507     } else {
2508         TCGv_i64 t0 = tcg_temp_new_i64();
2509         tcg_gen_mul_i64(t0, arg1, arg2);
2510         gen_helper_mulsh_i64(rh, arg1, arg2);
2511         tcg_gen_mov_i64(rl, t0);
2512         tcg_temp_free_i64(t0);
2513     }
2514 }
2515 
2516 void tcg_gen_mulsu2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
2517 {
2518     TCGv_i64 t0 = tcg_temp_new_i64();
2519     TCGv_i64 t1 = tcg_temp_new_i64();
2520     TCGv_i64 t2 = tcg_temp_new_i64();
2521     tcg_gen_mulu2_i64(t0, t1, arg1, arg2);
2522     /* Adjust for negative input for the signed arg1.  */
2523     tcg_gen_sari_i64(t2, arg1, 63);
2524     tcg_gen_and_i64(t2, t2, arg2);
2525     tcg_gen_sub_i64(rh, t1, t2);
2526     tcg_gen_mov_i64(rl, t0);
2527     tcg_temp_free_i64(t0);
2528     tcg_temp_free_i64(t1);
2529     tcg_temp_free_i64(t2);
2530 }
2531 
2532 void tcg_gen_smin_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
2533 {
2534     tcg_gen_movcond_i64(TCG_COND_LT, ret, a, b, a, b);
2535 }
2536 
2537 void tcg_gen_umin_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
2538 {
2539     tcg_gen_movcond_i64(TCG_COND_LTU, ret, a, b, a, b);
2540 }
2541 
2542 void tcg_gen_smax_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
2543 {
2544     tcg_gen_movcond_i64(TCG_COND_LT, ret, a, b, b, a);
2545 }
2546 
2547 void tcg_gen_umax_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
2548 {
2549     tcg_gen_movcond_i64(TCG_COND_LTU, ret, a, b, b, a);
2550 }
2551 
2552 void tcg_gen_abs_i64(TCGv_i64 ret, TCGv_i64 a)
2553 {
2554     TCGv_i64 t = tcg_temp_new_i64();
2555 
2556     tcg_gen_sari_i64(t, a, 63);
2557     tcg_gen_xor_i64(ret, a, t);
2558     tcg_gen_sub_i64(ret, ret, t);
2559     tcg_temp_free_i64(t);
2560 }
2561 
2562 /* Size changing operations.  */
2563 
2564 void tcg_gen_extrl_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
2565 {
2566     if (TCG_TARGET_REG_BITS == 32) {
2567         tcg_gen_mov_i32(ret, TCGV_LOW(arg));
2568     } else if (TCG_TARGET_HAS_extrl_i64_i32) {
2569         tcg_gen_op2(INDEX_op_extrl_i64_i32,
2570                     tcgv_i32_arg(ret), tcgv_i64_arg(arg));
2571     } else {
2572         tcg_gen_mov_i32(ret, (TCGv_i32)arg);
2573     }
2574 }
2575 
2576 void tcg_gen_extrh_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
2577 {
2578     if (TCG_TARGET_REG_BITS == 32) {
2579         tcg_gen_mov_i32(ret, TCGV_HIGH(arg));
2580     } else if (TCG_TARGET_HAS_extrh_i64_i32) {
2581         tcg_gen_op2(INDEX_op_extrh_i64_i32,
2582                     tcgv_i32_arg(ret), tcgv_i64_arg(arg));
2583     } else {
2584         TCGv_i64 t = tcg_temp_new_i64();
2585         tcg_gen_shri_i64(t, arg, 32);
2586         tcg_gen_mov_i32(ret, (TCGv_i32)t);
2587         tcg_temp_free_i64(t);
2588     }
2589 }
2590 
2591 void tcg_gen_extu_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
2592 {
2593     if (TCG_TARGET_REG_BITS == 32) {
2594         tcg_gen_mov_i32(TCGV_LOW(ret), arg);
2595         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2596     } else {
2597         tcg_gen_op2(INDEX_op_extu_i32_i64,
2598                     tcgv_i64_arg(ret), tcgv_i32_arg(arg));
2599     }
2600 }
2601 
2602 void tcg_gen_ext_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
2603 {
2604     if (TCG_TARGET_REG_BITS == 32) {
2605         tcg_gen_mov_i32(TCGV_LOW(ret), arg);
2606         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2607     } else {
2608         tcg_gen_op2(INDEX_op_ext_i32_i64,
2609                     tcgv_i64_arg(ret), tcgv_i32_arg(arg));
2610     }
2611 }
2612 
2613 void tcg_gen_concat_i32_i64(TCGv_i64 dest, TCGv_i32 low, TCGv_i32 high)
2614 {
2615     TCGv_i64 tmp;
2616 
2617     if (TCG_TARGET_REG_BITS == 32) {
2618         tcg_gen_mov_i32(TCGV_LOW(dest), low);
2619         tcg_gen_mov_i32(TCGV_HIGH(dest), high);
2620         return;
2621     }
2622 
2623     tmp = tcg_temp_new_i64();
2624     /* These extensions are only needed for type correctness.
2625        We may be able to do better given target specific information.  */
2626     tcg_gen_extu_i32_i64(tmp, high);
2627     tcg_gen_extu_i32_i64(dest, low);
2628     /* If deposit is available, use it.  Otherwise use the extra
2629        knowledge that we have of the zero-extensions above.  */
2630     if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(32, 32)) {
2631         tcg_gen_deposit_i64(dest, dest, tmp, 32, 32);
2632     } else {
2633         tcg_gen_shli_i64(tmp, tmp, 32);
2634         tcg_gen_or_i64(dest, dest, tmp);
2635     }
2636     tcg_temp_free_i64(tmp);
2637 }
2638 
2639 void tcg_gen_extr_i64_i32(TCGv_i32 lo, TCGv_i32 hi, TCGv_i64 arg)
2640 {
2641     if (TCG_TARGET_REG_BITS == 32) {
2642         tcg_gen_mov_i32(lo, TCGV_LOW(arg));
2643         tcg_gen_mov_i32(hi, TCGV_HIGH(arg));
2644     } else {
2645         tcg_gen_extrl_i64_i32(lo, arg);
2646         tcg_gen_extrh_i64_i32(hi, arg);
2647     }
2648 }
2649 
2650 void tcg_gen_extr32_i64(TCGv_i64 lo, TCGv_i64 hi, TCGv_i64 arg)
2651 {
2652     tcg_gen_ext32u_i64(lo, arg);
2653     tcg_gen_shri_i64(hi, arg, 32);
2654 }
2655 
2656 /* QEMU specific operations.  */
2657 
2658 void tcg_gen_exit_tb(const TranslationBlock *tb, unsigned idx)
2659 {
2660     /*
2661      * Let the jit code return the read-only version of the
2662      * TranslationBlock, so that we minimize the pc-relative
2663      * distance of the address of the exit_tb code to TB.
2664      * This will improve utilization of pc-relative address loads.
2665      *
2666      * TODO: Move this to translator_loop, so that all const
2667      * TranslationBlock pointers refer to read-only memory.
2668      * This requires coordination with targets that do not use
2669      * the translator_loop.
2670      */
2671     uintptr_t val = (uintptr_t)tcg_splitwx_to_rx((void *)tb) + idx;
2672 
2673     if (tb == NULL) {
2674         tcg_debug_assert(idx == 0);
2675     } else if (idx <= TB_EXIT_IDXMAX) {
2676 #ifdef CONFIG_DEBUG_TCG
2677         /* This is an exit following a goto_tb.  Verify that we have
2678            seen this numbered exit before, via tcg_gen_goto_tb.  */
2679         tcg_debug_assert(tcg_ctx->goto_tb_issue_mask & (1 << idx));
2680 #endif
2681         /* When not chaining, exit without indicating a link.  */
2682         if (qemu_loglevel_mask(CPU_LOG_TB_NOCHAIN)) {
2683             val = 0;
2684         }
2685     } else {
2686         /* This is an exit via the exitreq label.  */
2687         tcg_debug_assert(idx == TB_EXIT_REQUESTED);
2688     }
2689 
2690     plugin_gen_disable_mem_helpers();
2691     tcg_gen_op1i(INDEX_op_exit_tb, val);
2692 }
2693 
2694 void tcg_gen_goto_tb(unsigned idx)
2695 {
2696     /* We only support two chained exits.  */
2697     tcg_debug_assert(idx <= TB_EXIT_IDXMAX);
2698 #ifdef CONFIG_DEBUG_TCG
2699     /* Verify that we havn't seen this numbered exit before.  */
2700     tcg_debug_assert((tcg_ctx->goto_tb_issue_mask & (1 << idx)) == 0);
2701     tcg_ctx->goto_tb_issue_mask |= 1 << idx;
2702 #endif
2703     plugin_gen_disable_mem_helpers();
2704     /* When not chaining, we simply fall through to the "fallback" exit.  */
2705     if (!qemu_loglevel_mask(CPU_LOG_TB_NOCHAIN)) {
2706         tcg_gen_op1i(INDEX_op_goto_tb, idx);
2707     }
2708 }
2709 
2710 void tcg_gen_lookup_and_goto_ptr(void)
2711 {
2712     if (TCG_TARGET_HAS_goto_ptr && !qemu_loglevel_mask(CPU_LOG_TB_NOCHAIN)) {
2713         TCGv_ptr ptr;
2714 
2715         plugin_gen_disable_mem_helpers();
2716         ptr = tcg_temp_new_ptr();
2717         gen_helper_lookup_tb_ptr(ptr, cpu_env);
2718         tcg_gen_op1i(INDEX_op_goto_ptr, tcgv_ptr_arg(ptr));
2719         tcg_temp_free_ptr(ptr);
2720     } else {
2721         tcg_gen_exit_tb(NULL, 0);
2722     }
2723 }
2724 
2725 static inline MemOp tcg_canonicalize_memop(MemOp op, bool is64, bool st)
2726 {
2727     /* Trigger the asserts within as early as possible.  */
2728     (void)get_alignment_bits(op);
2729 
2730     switch (op & MO_SIZE) {
2731     case MO_8:
2732         op &= ~MO_BSWAP;
2733         break;
2734     case MO_16:
2735         break;
2736     case MO_32:
2737         if (!is64) {
2738             op &= ~MO_SIGN;
2739         }
2740         break;
2741     case MO_64:
2742         if (!is64) {
2743             tcg_abort();
2744         }
2745         break;
2746     }
2747     if (st) {
2748         op &= ~MO_SIGN;
2749     }
2750     return op;
2751 }
2752 
2753 static void gen_ldst_i32(TCGOpcode opc, TCGv_i32 val, TCGv addr,
2754                          MemOp memop, TCGArg idx)
2755 {
2756     TCGMemOpIdx oi = make_memop_idx(memop, idx);
2757 #if TARGET_LONG_BITS == 32
2758     tcg_gen_op3i_i32(opc, val, addr, oi);
2759 #else
2760     if (TCG_TARGET_REG_BITS == 32) {
2761         tcg_gen_op4i_i32(opc, val, TCGV_LOW(addr), TCGV_HIGH(addr), oi);
2762     } else {
2763         tcg_gen_op3(opc, tcgv_i32_arg(val), tcgv_i64_arg(addr), oi);
2764     }
2765 #endif
2766 }
2767 
2768 static void gen_ldst_i64(TCGOpcode opc, TCGv_i64 val, TCGv addr,
2769                          MemOp memop, TCGArg idx)
2770 {
2771     TCGMemOpIdx oi = make_memop_idx(memop, idx);
2772 #if TARGET_LONG_BITS == 32
2773     if (TCG_TARGET_REG_BITS == 32) {
2774         tcg_gen_op4i_i32(opc, TCGV_LOW(val), TCGV_HIGH(val), addr, oi);
2775     } else {
2776         tcg_gen_op3(opc, tcgv_i64_arg(val), tcgv_i32_arg(addr), oi);
2777     }
2778 #else
2779     if (TCG_TARGET_REG_BITS == 32) {
2780         tcg_gen_op5i_i32(opc, TCGV_LOW(val), TCGV_HIGH(val),
2781                          TCGV_LOW(addr), TCGV_HIGH(addr), oi);
2782     } else {
2783         tcg_gen_op3i_i64(opc, val, addr, oi);
2784     }
2785 #endif
2786 }
2787 
2788 static void tcg_gen_req_mo(TCGBar type)
2789 {
2790 #ifdef TCG_GUEST_DEFAULT_MO
2791     type &= TCG_GUEST_DEFAULT_MO;
2792 #endif
2793     type &= ~TCG_TARGET_DEFAULT_MO;
2794     if (type) {
2795         tcg_gen_mb(type | TCG_BAR_SC);
2796     }
2797 }
2798 
2799 static inline TCGv plugin_prep_mem_callbacks(TCGv vaddr)
2800 {
2801 #ifdef CONFIG_PLUGIN
2802     if (tcg_ctx->plugin_insn != NULL) {
2803         /* Save a copy of the vaddr for use after a load.  */
2804         TCGv temp = tcg_temp_new();
2805         tcg_gen_mov_tl(temp, vaddr);
2806         return temp;
2807     }
2808 #endif
2809     return vaddr;
2810 }
2811 
2812 static inline void plugin_gen_mem_callbacks(TCGv vaddr, uint16_t info)
2813 {
2814 #ifdef CONFIG_PLUGIN
2815     if (tcg_ctx->plugin_insn != NULL) {
2816         plugin_gen_empty_mem_callback(vaddr, info);
2817         tcg_temp_free(vaddr);
2818     }
2819 #endif
2820 }
2821 
2822 void tcg_gen_qemu_ld_i32(TCGv_i32 val, TCGv addr, TCGArg idx, MemOp memop)
2823 {
2824     MemOp orig_memop;
2825     uint16_t info = trace_mem_get_info(memop, idx, 0);
2826 
2827     tcg_gen_req_mo(TCG_MO_LD_LD | TCG_MO_ST_LD);
2828     memop = tcg_canonicalize_memop(memop, 0, 0);
2829     trace_guest_mem_before_tcg(tcg_ctx->cpu, cpu_env, addr, info);
2830 
2831     orig_memop = memop;
2832     if (!TCG_TARGET_HAS_MEMORY_BSWAP && (memop & MO_BSWAP)) {
2833         memop &= ~MO_BSWAP;
2834         /* The bswap primitive requires zero-extended input.  */
2835         if ((memop & MO_SSIZE) == MO_SW) {
2836             memop &= ~MO_SIGN;
2837         }
2838     }
2839 
2840     addr = plugin_prep_mem_callbacks(addr);
2841     gen_ldst_i32(INDEX_op_qemu_ld_i32, val, addr, memop, idx);
2842     plugin_gen_mem_callbacks(addr, info);
2843 
2844     if ((orig_memop ^ memop) & MO_BSWAP) {
2845         switch (orig_memop & MO_SIZE) {
2846         case MO_16:
2847             tcg_gen_bswap16_i32(val, val);
2848             if (orig_memop & MO_SIGN) {
2849                 tcg_gen_ext16s_i32(val, val);
2850             }
2851             break;
2852         case MO_32:
2853             tcg_gen_bswap32_i32(val, val);
2854             break;
2855         default:
2856             g_assert_not_reached();
2857         }
2858     }
2859 }
2860 
2861 void tcg_gen_qemu_st_i32(TCGv_i32 val, TCGv addr, TCGArg idx, MemOp memop)
2862 {
2863     TCGv_i32 swap = NULL;
2864     uint16_t info = trace_mem_get_info(memop, idx, 1);
2865 
2866     tcg_gen_req_mo(TCG_MO_LD_ST | TCG_MO_ST_ST);
2867     memop = tcg_canonicalize_memop(memop, 0, 1);
2868     trace_guest_mem_before_tcg(tcg_ctx->cpu, cpu_env, addr, info);
2869 
2870     if (!TCG_TARGET_HAS_MEMORY_BSWAP && (memop & MO_BSWAP)) {
2871         swap = tcg_temp_new_i32();
2872         switch (memop & MO_SIZE) {
2873         case MO_16:
2874             tcg_gen_ext16u_i32(swap, val);
2875             tcg_gen_bswap16_i32(swap, swap);
2876             break;
2877         case MO_32:
2878             tcg_gen_bswap32_i32(swap, val);
2879             break;
2880         default:
2881             g_assert_not_reached();
2882         }
2883         val = swap;
2884         memop &= ~MO_BSWAP;
2885     }
2886 
2887     addr = plugin_prep_mem_callbacks(addr);
2888     if (TCG_TARGET_HAS_qemu_st8_i32 && (memop & MO_SIZE) == MO_8) {
2889         gen_ldst_i32(INDEX_op_qemu_st8_i32, val, addr, memop, idx);
2890     } else {
2891         gen_ldst_i32(INDEX_op_qemu_st_i32, val, addr, memop, idx);
2892     }
2893     plugin_gen_mem_callbacks(addr, info);
2894 
2895     if (swap) {
2896         tcg_temp_free_i32(swap);
2897     }
2898 }
2899 
2900 void tcg_gen_qemu_ld_i64(TCGv_i64 val, TCGv addr, TCGArg idx, MemOp memop)
2901 {
2902     MemOp orig_memop;
2903     uint16_t info;
2904 
2905     if (TCG_TARGET_REG_BITS == 32 && (memop & MO_SIZE) < MO_64) {
2906         tcg_gen_qemu_ld_i32(TCGV_LOW(val), addr, idx, memop);
2907         if (memop & MO_SIGN) {
2908             tcg_gen_sari_i32(TCGV_HIGH(val), TCGV_LOW(val), 31);
2909         } else {
2910             tcg_gen_movi_i32(TCGV_HIGH(val), 0);
2911         }
2912         return;
2913     }
2914 
2915     tcg_gen_req_mo(TCG_MO_LD_LD | TCG_MO_ST_LD);
2916     memop = tcg_canonicalize_memop(memop, 1, 0);
2917     info = trace_mem_get_info(memop, idx, 0);
2918     trace_guest_mem_before_tcg(tcg_ctx->cpu, cpu_env, addr, info);
2919 
2920     orig_memop = memop;
2921     if (!TCG_TARGET_HAS_MEMORY_BSWAP && (memop & MO_BSWAP)) {
2922         memop &= ~MO_BSWAP;
2923         /* The bswap primitive requires zero-extended input.  */
2924         if ((memop & MO_SIGN) && (memop & MO_SIZE) < MO_64) {
2925             memop &= ~MO_SIGN;
2926         }
2927     }
2928 
2929     addr = plugin_prep_mem_callbacks(addr);
2930     gen_ldst_i64(INDEX_op_qemu_ld_i64, val, addr, memop, idx);
2931     plugin_gen_mem_callbacks(addr, info);
2932 
2933     if ((orig_memop ^ memop) & MO_BSWAP) {
2934         switch (orig_memop & MO_SIZE) {
2935         case MO_16:
2936             tcg_gen_bswap16_i64(val, val);
2937             if (orig_memop & MO_SIGN) {
2938                 tcg_gen_ext16s_i64(val, val);
2939             }
2940             break;
2941         case MO_32:
2942             tcg_gen_bswap32_i64(val, val);
2943             if (orig_memop & MO_SIGN) {
2944                 tcg_gen_ext32s_i64(val, val);
2945             }
2946             break;
2947         case MO_64:
2948             tcg_gen_bswap64_i64(val, val);
2949             break;
2950         default:
2951             g_assert_not_reached();
2952         }
2953     }
2954 }
2955 
2956 void tcg_gen_qemu_st_i64(TCGv_i64 val, TCGv addr, TCGArg idx, MemOp memop)
2957 {
2958     TCGv_i64 swap = NULL;
2959     uint16_t info;
2960 
2961     if (TCG_TARGET_REG_BITS == 32 && (memop & MO_SIZE) < MO_64) {
2962         tcg_gen_qemu_st_i32(TCGV_LOW(val), addr, idx, memop);
2963         return;
2964     }
2965 
2966     tcg_gen_req_mo(TCG_MO_LD_ST | TCG_MO_ST_ST);
2967     memop = tcg_canonicalize_memop(memop, 1, 1);
2968     info = trace_mem_get_info(memop, idx, 1);
2969     trace_guest_mem_before_tcg(tcg_ctx->cpu, cpu_env, addr, info);
2970 
2971     if (!TCG_TARGET_HAS_MEMORY_BSWAP && (memop & MO_BSWAP)) {
2972         swap = tcg_temp_new_i64();
2973         switch (memop & MO_SIZE) {
2974         case MO_16:
2975             tcg_gen_ext16u_i64(swap, val);
2976             tcg_gen_bswap16_i64(swap, swap);
2977             break;
2978         case MO_32:
2979             tcg_gen_ext32u_i64(swap, val);
2980             tcg_gen_bswap32_i64(swap, swap);
2981             break;
2982         case MO_64:
2983             tcg_gen_bswap64_i64(swap, val);
2984             break;
2985         default:
2986             g_assert_not_reached();
2987         }
2988         val = swap;
2989         memop &= ~MO_BSWAP;
2990     }
2991 
2992     addr = plugin_prep_mem_callbacks(addr);
2993     gen_ldst_i64(INDEX_op_qemu_st_i64, val, addr, memop, idx);
2994     plugin_gen_mem_callbacks(addr, info);
2995 
2996     if (swap) {
2997         tcg_temp_free_i64(swap);
2998     }
2999 }
3000 
3001 static void tcg_gen_ext_i32(TCGv_i32 ret, TCGv_i32 val, MemOp opc)
3002 {
3003     switch (opc & MO_SSIZE) {
3004     case MO_SB:
3005         tcg_gen_ext8s_i32(ret, val);
3006         break;
3007     case MO_UB:
3008         tcg_gen_ext8u_i32(ret, val);
3009         break;
3010     case MO_SW:
3011         tcg_gen_ext16s_i32(ret, val);
3012         break;
3013     case MO_UW:
3014         tcg_gen_ext16u_i32(ret, val);
3015         break;
3016     default:
3017         tcg_gen_mov_i32(ret, val);
3018         break;
3019     }
3020 }
3021 
3022 static void tcg_gen_ext_i64(TCGv_i64 ret, TCGv_i64 val, MemOp opc)
3023 {
3024     switch (opc & MO_SSIZE) {
3025     case MO_SB:
3026         tcg_gen_ext8s_i64(ret, val);
3027         break;
3028     case MO_UB:
3029         tcg_gen_ext8u_i64(ret, val);
3030         break;
3031     case MO_SW:
3032         tcg_gen_ext16s_i64(ret, val);
3033         break;
3034     case MO_UW:
3035         tcg_gen_ext16u_i64(ret, val);
3036         break;
3037     case MO_SL:
3038         tcg_gen_ext32s_i64(ret, val);
3039         break;
3040     case MO_UL:
3041         tcg_gen_ext32u_i64(ret, val);
3042         break;
3043     default:
3044         tcg_gen_mov_i64(ret, val);
3045         break;
3046     }
3047 }
3048 
3049 #ifdef CONFIG_SOFTMMU
3050 typedef void (*gen_atomic_cx_i32)(TCGv_i32, TCGv_env, TCGv,
3051                                   TCGv_i32, TCGv_i32, TCGv_i32);
3052 typedef void (*gen_atomic_cx_i64)(TCGv_i64, TCGv_env, TCGv,
3053                                   TCGv_i64, TCGv_i64, TCGv_i32);
3054 typedef void (*gen_atomic_op_i32)(TCGv_i32, TCGv_env, TCGv,
3055                                   TCGv_i32, TCGv_i32);
3056 typedef void (*gen_atomic_op_i64)(TCGv_i64, TCGv_env, TCGv,
3057                                   TCGv_i64, TCGv_i32);
3058 #else
3059 typedef void (*gen_atomic_cx_i32)(TCGv_i32, TCGv_env, TCGv, TCGv_i32, TCGv_i32);
3060 typedef void (*gen_atomic_cx_i64)(TCGv_i64, TCGv_env, TCGv, TCGv_i64, TCGv_i64);
3061 typedef void (*gen_atomic_op_i32)(TCGv_i32, TCGv_env, TCGv, TCGv_i32);
3062 typedef void (*gen_atomic_op_i64)(TCGv_i64, TCGv_env, TCGv, TCGv_i64);
3063 #endif
3064 
3065 #ifdef CONFIG_ATOMIC64
3066 # define WITH_ATOMIC64(X) X,
3067 #else
3068 # define WITH_ATOMIC64(X)
3069 #endif
3070 
3071 static void * const table_cmpxchg[16] = {
3072     [MO_8] = gen_helper_atomic_cmpxchgb,
3073     [MO_16 | MO_LE] = gen_helper_atomic_cmpxchgw_le,
3074     [MO_16 | MO_BE] = gen_helper_atomic_cmpxchgw_be,
3075     [MO_32 | MO_LE] = gen_helper_atomic_cmpxchgl_le,
3076     [MO_32 | MO_BE] = gen_helper_atomic_cmpxchgl_be,
3077     WITH_ATOMIC64([MO_64 | MO_LE] = gen_helper_atomic_cmpxchgq_le)
3078     WITH_ATOMIC64([MO_64 | MO_BE] = gen_helper_atomic_cmpxchgq_be)
3079 };
3080 
3081 void tcg_gen_atomic_cmpxchg_i32(TCGv_i32 retv, TCGv addr, TCGv_i32 cmpv,
3082                                 TCGv_i32 newv, TCGArg idx, MemOp memop)
3083 {
3084     memop = tcg_canonicalize_memop(memop, 0, 0);
3085 
3086     if (!(tcg_ctx->tb_cflags & CF_PARALLEL)) {
3087         TCGv_i32 t1 = tcg_temp_new_i32();
3088         TCGv_i32 t2 = tcg_temp_new_i32();
3089 
3090         tcg_gen_ext_i32(t2, cmpv, memop & MO_SIZE);
3091 
3092         tcg_gen_qemu_ld_i32(t1, addr, idx, memop & ~MO_SIGN);
3093         tcg_gen_movcond_i32(TCG_COND_EQ, t2, t1, t2, newv, t1);
3094         tcg_gen_qemu_st_i32(t2, addr, idx, memop);
3095         tcg_temp_free_i32(t2);
3096 
3097         if (memop & MO_SIGN) {
3098             tcg_gen_ext_i32(retv, t1, memop);
3099         } else {
3100             tcg_gen_mov_i32(retv, t1);
3101         }
3102         tcg_temp_free_i32(t1);
3103     } else {
3104         gen_atomic_cx_i32 gen;
3105 
3106         gen = table_cmpxchg[memop & (MO_SIZE | MO_BSWAP)];
3107         tcg_debug_assert(gen != NULL);
3108 
3109 #ifdef CONFIG_SOFTMMU
3110         {
3111             TCGMemOpIdx oi = make_memop_idx(memop & ~MO_SIGN, idx);
3112             gen(retv, cpu_env, addr, cmpv, newv, tcg_constant_i32(oi));
3113         }
3114 #else
3115         gen(retv, cpu_env, addr, cmpv, newv);
3116 #endif
3117 
3118         if (memop & MO_SIGN) {
3119             tcg_gen_ext_i32(retv, retv, memop);
3120         }
3121     }
3122 }
3123 
3124 void tcg_gen_atomic_cmpxchg_i64(TCGv_i64 retv, TCGv addr, TCGv_i64 cmpv,
3125                                 TCGv_i64 newv, TCGArg idx, MemOp memop)
3126 {
3127     memop = tcg_canonicalize_memop(memop, 1, 0);
3128 
3129     if (!(tcg_ctx->tb_cflags & CF_PARALLEL)) {
3130         TCGv_i64 t1 = tcg_temp_new_i64();
3131         TCGv_i64 t2 = tcg_temp_new_i64();
3132 
3133         tcg_gen_ext_i64(t2, cmpv, memop & MO_SIZE);
3134 
3135         tcg_gen_qemu_ld_i64(t1, addr, idx, memop & ~MO_SIGN);
3136         tcg_gen_movcond_i64(TCG_COND_EQ, t2, t1, t2, newv, t1);
3137         tcg_gen_qemu_st_i64(t2, addr, idx, memop);
3138         tcg_temp_free_i64(t2);
3139 
3140         if (memop & MO_SIGN) {
3141             tcg_gen_ext_i64(retv, t1, memop);
3142         } else {
3143             tcg_gen_mov_i64(retv, t1);
3144         }
3145         tcg_temp_free_i64(t1);
3146     } else if ((memop & MO_SIZE) == MO_64) {
3147 #ifdef CONFIG_ATOMIC64
3148         gen_atomic_cx_i64 gen;
3149 
3150         gen = table_cmpxchg[memop & (MO_SIZE | MO_BSWAP)];
3151         tcg_debug_assert(gen != NULL);
3152 
3153 #ifdef CONFIG_SOFTMMU
3154         {
3155             TCGMemOpIdx oi = make_memop_idx(memop, idx);
3156             gen(retv, cpu_env, addr, cmpv, newv, tcg_constant_i32(oi));
3157         }
3158 #else
3159         gen(retv, cpu_env, addr, cmpv, newv);
3160 #endif
3161 #else
3162         gen_helper_exit_atomic(cpu_env);
3163         /* Produce a result, so that we have a well-formed opcode stream
3164            with respect to uses of the result in the (dead) code following.  */
3165         tcg_gen_movi_i64(retv, 0);
3166 #endif /* CONFIG_ATOMIC64 */
3167     } else {
3168         TCGv_i32 c32 = tcg_temp_new_i32();
3169         TCGv_i32 n32 = tcg_temp_new_i32();
3170         TCGv_i32 r32 = tcg_temp_new_i32();
3171 
3172         tcg_gen_extrl_i64_i32(c32, cmpv);
3173         tcg_gen_extrl_i64_i32(n32, newv);
3174         tcg_gen_atomic_cmpxchg_i32(r32, addr, c32, n32, idx, memop & ~MO_SIGN);
3175         tcg_temp_free_i32(c32);
3176         tcg_temp_free_i32(n32);
3177 
3178         tcg_gen_extu_i32_i64(retv, r32);
3179         tcg_temp_free_i32(r32);
3180 
3181         if (memop & MO_SIGN) {
3182             tcg_gen_ext_i64(retv, retv, memop);
3183         }
3184     }
3185 }
3186 
3187 static void do_nonatomic_op_i32(TCGv_i32 ret, TCGv addr, TCGv_i32 val,
3188                                 TCGArg idx, MemOp memop, bool new_val,
3189                                 void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
3190 {
3191     TCGv_i32 t1 = tcg_temp_new_i32();
3192     TCGv_i32 t2 = tcg_temp_new_i32();
3193 
3194     memop = tcg_canonicalize_memop(memop, 0, 0);
3195 
3196     tcg_gen_qemu_ld_i32(t1, addr, idx, memop);
3197     tcg_gen_ext_i32(t2, val, memop);
3198     gen(t2, t1, t2);
3199     tcg_gen_qemu_st_i32(t2, addr, idx, memop);
3200 
3201     tcg_gen_ext_i32(ret, (new_val ? t2 : t1), memop);
3202     tcg_temp_free_i32(t1);
3203     tcg_temp_free_i32(t2);
3204 }
3205 
3206 static void do_atomic_op_i32(TCGv_i32 ret, TCGv addr, TCGv_i32 val,
3207                              TCGArg idx, MemOp memop, void * const table[])
3208 {
3209     gen_atomic_op_i32 gen;
3210 
3211     memop = tcg_canonicalize_memop(memop, 0, 0);
3212 
3213     gen = table[memop & (MO_SIZE | MO_BSWAP)];
3214     tcg_debug_assert(gen != NULL);
3215 
3216 #ifdef CONFIG_SOFTMMU
3217     {
3218         TCGMemOpIdx oi = make_memop_idx(memop & ~MO_SIGN, idx);
3219         gen(ret, cpu_env, addr, val, tcg_constant_i32(oi));
3220     }
3221 #else
3222     gen(ret, cpu_env, addr, val);
3223 #endif
3224 
3225     if (memop & MO_SIGN) {
3226         tcg_gen_ext_i32(ret, ret, memop);
3227     }
3228 }
3229 
3230 static void do_nonatomic_op_i64(TCGv_i64 ret, TCGv addr, TCGv_i64 val,
3231                                 TCGArg idx, MemOp memop, bool new_val,
3232                                 void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
3233 {
3234     TCGv_i64 t1 = tcg_temp_new_i64();
3235     TCGv_i64 t2 = tcg_temp_new_i64();
3236 
3237     memop = tcg_canonicalize_memop(memop, 1, 0);
3238 
3239     tcg_gen_qemu_ld_i64(t1, addr, idx, memop);
3240     tcg_gen_ext_i64(t2, val, memop);
3241     gen(t2, t1, t2);
3242     tcg_gen_qemu_st_i64(t2, addr, idx, memop);
3243 
3244     tcg_gen_ext_i64(ret, (new_val ? t2 : t1), memop);
3245     tcg_temp_free_i64(t1);
3246     tcg_temp_free_i64(t2);
3247 }
3248 
3249 static void do_atomic_op_i64(TCGv_i64 ret, TCGv addr, TCGv_i64 val,
3250                              TCGArg idx, MemOp memop, void * const table[])
3251 {
3252     memop = tcg_canonicalize_memop(memop, 1, 0);
3253 
3254     if ((memop & MO_SIZE) == MO_64) {
3255 #ifdef CONFIG_ATOMIC64
3256         gen_atomic_op_i64 gen;
3257 
3258         gen = table[memop & (MO_SIZE | MO_BSWAP)];
3259         tcg_debug_assert(gen != NULL);
3260 
3261 #ifdef CONFIG_SOFTMMU
3262         {
3263             TCGMemOpIdx oi = make_memop_idx(memop & ~MO_SIGN, idx);
3264             gen(ret, cpu_env, addr, val, tcg_constant_i32(oi));
3265         }
3266 #else
3267         gen(ret, cpu_env, addr, val);
3268 #endif
3269 #else
3270         gen_helper_exit_atomic(cpu_env);
3271         /* Produce a result, so that we have a well-formed opcode stream
3272            with respect to uses of the result in the (dead) code following.  */
3273         tcg_gen_movi_i64(ret, 0);
3274 #endif /* CONFIG_ATOMIC64 */
3275     } else {
3276         TCGv_i32 v32 = tcg_temp_new_i32();
3277         TCGv_i32 r32 = tcg_temp_new_i32();
3278 
3279         tcg_gen_extrl_i64_i32(v32, val);
3280         do_atomic_op_i32(r32, addr, v32, idx, memop & ~MO_SIGN, table);
3281         tcg_temp_free_i32(v32);
3282 
3283         tcg_gen_extu_i32_i64(ret, r32);
3284         tcg_temp_free_i32(r32);
3285 
3286         if (memop & MO_SIGN) {
3287             tcg_gen_ext_i64(ret, ret, memop);
3288         }
3289     }
3290 }
3291 
3292 #define GEN_ATOMIC_HELPER(NAME, OP, NEW)                                \
3293 static void * const table_##NAME[16] = {                                \
3294     [MO_8] = gen_helper_atomic_##NAME##b,                               \
3295     [MO_16 | MO_LE] = gen_helper_atomic_##NAME##w_le,                   \
3296     [MO_16 | MO_BE] = gen_helper_atomic_##NAME##w_be,                   \
3297     [MO_32 | MO_LE] = gen_helper_atomic_##NAME##l_le,                   \
3298     [MO_32 | MO_BE] = gen_helper_atomic_##NAME##l_be,                   \
3299     WITH_ATOMIC64([MO_64 | MO_LE] = gen_helper_atomic_##NAME##q_le)     \
3300     WITH_ATOMIC64([MO_64 | MO_BE] = gen_helper_atomic_##NAME##q_be)     \
3301 };                                                                      \
3302 void tcg_gen_atomic_##NAME##_i32                                        \
3303     (TCGv_i32 ret, TCGv addr, TCGv_i32 val, TCGArg idx, MemOp memop)    \
3304 {                                                                       \
3305     if (tcg_ctx->tb_cflags & CF_PARALLEL) {                             \
3306         do_atomic_op_i32(ret, addr, val, idx, memop, table_##NAME);     \
3307     } else {                                                            \
3308         do_nonatomic_op_i32(ret, addr, val, idx, memop, NEW,            \
3309                             tcg_gen_##OP##_i32);                        \
3310     }                                                                   \
3311 }                                                                       \
3312 void tcg_gen_atomic_##NAME##_i64                                        \
3313     (TCGv_i64 ret, TCGv addr, TCGv_i64 val, TCGArg idx, MemOp memop)    \
3314 {                                                                       \
3315     if (tcg_ctx->tb_cflags & CF_PARALLEL) {                             \
3316         do_atomic_op_i64(ret, addr, val, idx, memop, table_##NAME);     \
3317     } else {                                                            \
3318         do_nonatomic_op_i64(ret, addr, val, idx, memop, NEW,            \
3319                             tcg_gen_##OP##_i64);                        \
3320     }                                                                   \
3321 }
3322 
3323 GEN_ATOMIC_HELPER(fetch_add, add, 0)
3324 GEN_ATOMIC_HELPER(fetch_and, and, 0)
3325 GEN_ATOMIC_HELPER(fetch_or, or, 0)
3326 GEN_ATOMIC_HELPER(fetch_xor, xor, 0)
3327 GEN_ATOMIC_HELPER(fetch_smin, smin, 0)
3328 GEN_ATOMIC_HELPER(fetch_umin, umin, 0)
3329 GEN_ATOMIC_HELPER(fetch_smax, smax, 0)
3330 GEN_ATOMIC_HELPER(fetch_umax, umax, 0)
3331 
3332 GEN_ATOMIC_HELPER(add_fetch, add, 1)
3333 GEN_ATOMIC_HELPER(and_fetch, and, 1)
3334 GEN_ATOMIC_HELPER(or_fetch, or, 1)
3335 GEN_ATOMIC_HELPER(xor_fetch, xor, 1)
3336 GEN_ATOMIC_HELPER(smin_fetch, smin, 1)
3337 GEN_ATOMIC_HELPER(umin_fetch, umin, 1)
3338 GEN_ATOMIC_HELPER(smax_fetch, smax, 1)
3339 GEN_ATOMIC_HELPER(umax_fetch, umax, 1)
3340 
3341 static void tcg_gen_mov2_i32(TCGv_i32 r, TCGv_i32 a, TCGv_i32 b)
3342 {
3343     tcg_gen_mov_i32(r, b);
3344 }
3345 
3346 static void tcg_gen_mov2_i64(TCGv_i64 r, TCGv_i64 a, TCGv_i64 b)
3347 {
3348     tcg_gen_mov_i64(r, b);
3349 }
3350 
3351 GEN_ATOMIC_HELPER(xchg, mov2, 0)
3352 
3353 #undef GEN_ATOMIC_HELPER
3354