xref: /openbmc/qemu/tcg/tcg-op.c (revision 80adf54e)
1 /*
2  * Tiny Code Generator for QEMU
3  *
4  * Copyright (c) 2008 Fabrice Bellard
5  *
6  * Permission is hereby granted, free of charge, to any person obtaining a copy
7  * of this software and associated documentation files (the "Software"), to deal
8  * in the Software without restriction, including without limitation the rights
9  * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10  * copies of the Software, and to permit persons to whom the Software is
11  * furnished to do so, subject to the following conditions:
12  *
13  * The above copyright notice and this permission notice shall be included in
14  * all copies or substantial portions of the Software.
15  *
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21  * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22  * THE SOFTWARE.
23  */
24 
25 #include "qemu/osdep.h"
26 #include "qemu-common.h"
27 #include "cpu.h"
28 #include "exec/exec-all.h"
29 #include "tcg.h"
30 #include "tcg-op.h"
31 #include "trace-tcg.h"
32 #include "trace/mem.h"
33 
34 /* Reduce the number of ifdefs below.  This assumes that all uses of
35    TCGV_HIGH and TCGV_LOW are properly protected by a conditional that
36    the compiler can eliminate.  */
37 #if TCG_TARGET_REG_BITS == 64
38 extern TCGv_i32 TCGV_LOW_link_error(TCGv_i64);
39 extern TCGv_i32 TCGV_HIGH_link_error(TCGv_i64);
40 #define TCGV_LOW  TCGV_LOW_link_error
41 #define TCGV_HIGH TCGV_HIGH_link_error
42 #endif
43 
44 /* Note that this is optimized for sequential allocation during translate.
45    Up to and including filling in the forward link immediately.  We'll do
46    proper termination of the end of the list after we finish translation.  */
47 
48 static void tcg_emit_op(TCGContext *ctx, TCGOpcode opc, int args)
49 {
50     int oi = ctx->gen_next_op_idx;
51     int ni = oi + 1;
52     int pi = oi - 1;
53 
54     tcg_debug_assert(oi < OPC_BUF_SIZE);
55     ctx->gen_op_buf[0].prev = oi;
56     ctx->gen_next_op_idx = ni;
57 
58     ctx->gen_op_buf[oi] = (TCGOp){
59         .opc = opc,
60         .args = args,
61         .prev = pi,
62         .next = ni
63     };
64 }
65 
66 void tcg_gen_op1(TCGContext *ctx, TCGOpcode opc, TCGArg a1)
67 {
68     int pi = ctx->gen_next_parm_idx;
69 
70     tcg_debug_assert(pi + 1 <= OPPARAM_BUF_SIZE);
71     ctx->gen_next_parm_idx = pi + 1;
72     ctx->gen_opparam_buf[pi] = a1;
73 
74     tcg_emit_op(ctx, opc, pi);
75 }
76 
77 void tcg_gen_op2(TCGContext *ctx, TCGOpcode opc, TCGArg a1, TCGArg a2)
78 {
79     int pi = ctx->gen_next_parm_idx;
80 
81     tcg_debug_assert(pi + 2 <= OPPARAM_BUF_SIZE);
82     ctx->gen_next_parm_idx = pi + 2;
83     ctx->gen_opparam_buf[pi + 0] = a1;
84     ctx->gen_opparam_buf[pi + 1] = a2;
85 
86     tcg_emit_op(ctx, opc, pi);
87 }
88 
89 void tcg_gen_op3(TCGContext *ctx, TCGOpcode opc, TCGArg a1,
90                  TCGArg a2, TCGArg a3)
91 {
92     int pi = ctx->gen_next_parm_idx;
93 
94     tcg_debug_assert(pi + 3 <= OPPARAM_BUF_SIZE);
95     ctx->gen_next_parm_idx = pi + 3;
96     ctx->gen_opparam_buf[pi + 0] = a1;
97     ctx->gen_opparam_buf[pi + 1] = a2;
98     ctx->gen_opparam_buf[pi + 2] = a3;
99 
100     tcg_emit_op(ctx, opc, pi);
101 }
102 
103 void tcg_gen_op4(TCGContext *ctx, TCGOpcode opc, TCGArg a1,
104                  TCGArg a2, TCGArg a3, TCGArg a4)
105 {
106     int pi = ctx->gen_next_parm_idx;
107 
108     tcg_debug_assert(pi + 4 <= OPPARAM_BUF_SIZE);
109     ctx->gen_next_parm_idx = pi + 4;
110     ctx->gen_opparam_buf[pi + 0] = a1;
111     ctx->gen_opparam_buf[pi + 1] = a2;
112     ctx->gen_opparam_buf[pi + 2] = a3;
113     ctx->gen_opparam_buf[pi + 3] = a4;
114 
115     tcg_emit_op(ctx, opc, pi);
116 }
117 
118 void tcg_gen_op5(TCGContext *ctx, TCGOpcode opc, TCGArg a1,
119                  TCGArg a2, TCGArg a3, TCGArg a4, TCGArg a5)
120 {
121     int pi = ctx->gen_next_parm_idx;
122 
123     tcg_debug_assert(pi + 5 <= OPPARAM_BUF_SIZE);
124     ctx->gen_next_parm_idx = pi + 5;
125     ctx->gen_opparam_buf[pi + 0] = a1;
126     ctx->gen_opparam_buf[pi + 1] = a2;
127     ctx->gen_opparam_buf[pi + 2] = a3;
128     ctx->gen_opparam_buf[pi + 3] = a4;
129     ctx->gen_opparam_buf[pi + 4] = a5;
130 
131     tcg_emit_op(ctx, opc, pi);
132 }
133 
134 void tcg_gen_op6(TCGContext *ctx, TCGOpcode opc, TCGArg a1, TCGArg a2,
135                  TCGArg a3, TCGArg a4, TCGArg a5, TCGArg a6)
136 {
137     int pi = ctx->gen_next_parm_idx;
138 
139     tcg_debug_assert(pi + 6 <= OPPARAM_BUF_SIZE);
140     ctx->gen_next_parm_idx = pi + 6;
141     ctx->gen_opparam_buf[pi + 0] = a1;
142     ctx->gen_opparam_buf[pi + 1] = a2;
143     ctx->gen_opparam_buf[pi + 2] = a3;
144     ctx->gen_opparam_buf[pi + 3] = a4;
145     ctx->gen_opparam_buf[pi + 4] = a5;
146     ctx->gen_opparam_buf[pi + 5] = a6;
147 
148     tcg_emit_op(ctx, opc, pi);
149 }
150 
151 void tcg_gen_mb(TCGBar mb_type)
152 {
153     if (parallel_cpus) {
154         tcg_gen_op1(&tcg_ctx, INDEX_op_mb, mb_type);
155     }
156 }
157 
158 /* 32 bit ops */
159 
160 void tcg_gen_addi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
161 {
162     /* some cases can be optimized here */
163     if (arg2 == 0) {
164         tcg_gen_mov_i32(ret, arg1);
165     } else {
166         TCGv_i32 t0 = tcg_const_i32(arg2);
167         tcg_gen_add_i32(ret, arg1, t0);
168         tcg_temp_free_i32(t0);
169     }
170 }
171 
172 void tcg_gen_subfi_i32(TCGv_i32 ret, int32_t arg1, TCGv_i32 arg2)
173 {
174     if (arg1 == 0 && TCG_TARGET_HAS_neg_i32) {
175         /* Don't recurse with tcg_gen_neg_i32.  */
176         tcg_gen_op2_i32(INDEX_op_neg_i32, ret, arg2);
177     } else {
178         TCGv_i32 t0 = tcg_const_i32(arg1);
179         tcg_gen_sub_i32(ret, t0, arg2);
180         tcg_temp_free_i32(t0);
181     }
182 }
183 
184 void tcg_gen_subi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
185 {
186     /* some cases can be optimized here */
187     if (arg2 == 0) {
188         tcg_gen_mov_i32(ret, arg1);
189     } else {
190         TCGv_i32 t0 = tcg_const_i32(arg2);
191         tcg_gen_sub_i32(ret, arg1, t0);
192         tcg_temp_free_i32(t0);
193     }
194 }
195 
196 void tcg_gen_andi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
197 {
198     TCGv_i32 t0;
199     /* Some cases can be optimized here.  */
200     switch (arg2) {
201     case 0:
202         tcg_gen_movi_i32(ret, 0);
203         return;
204     case 0xffffffffu:
205         tcg_gen_mov_i32(ret, arg1);
206         return;
207     case 0xffu:
208         /* Don't recurse with tcg_gen_ext8u_i32.  */
209         if (TCG_TARGET_HAS_ext8u_i32) {
210             tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg1);
211             return;
212         }
213         break;
214     case 0xffffu:
215         if (TCG_TARGET_HAS_ext16u_i32) {
216             tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg1);
217             return;
218         }
219         break;
220     }
221     t0 = tcg_const_i32(arg2);
222     tcg_gen_and_i32(ret, arg1, t0);
223     tcg_temp_free_i32(t0);
224 }
225 
226 void tcg_gen_ori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
227 {
228     /* Some cases can be optimized here.  */
229     if (arg2 == -1) {
230         tcg_gen_movi_i32(ret, -1);
231     } else if (arg2 == 0) {
232         tcg_gen_mov_i32(ret, arg1);
233     } else {
234         TCGv_i32 t0 = tcg_const_i32(arg2);
235         tcg_gen_or_i32(ret, arg1, t0);
236         tcg_temp_free_i32(t0);
237     }
238 }
239 
240 void tcg_gen_xori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
241 {
242     /* Some cases can be optimized here.  */
243     if (arg2 == 0) {
244         tcg_gen_mov_i32(ret, arg1);
245     } else if (arg2 == -1 && TCG_TARGET_HAS_not_i32) {
246         /* Don't recurse with tcg_gen_not_i32.  */
247         tcg_gen_op2_i32(INDEX_op_not_i32, ret, arg1);
248     } else {
249         TCGv_i32 t0 = tcg_const_i32(arg2);
250         tcg_gen_xor_i32(ret, arg1, t0);
251         tcg_temp_free_i32(t0);
252     }
253 }
254 
255 void tcg_gen_shli_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
256 {
257     tcg_debug_assert(arg2 < 32);
258     if (arg2 == 0) {
259         tcg_gen_mov_i32(ret, arg1);
260     } else {
261         TCGv_i32 t0 = tcg_const_i32(arg2);
262         tcg_gen_shl_i32(ret, arg1, t0);
263         tcg_temp_free_i32(t0);
264     }
265 }
266 
267 void tcg_gen_shri_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
268 {
269     tcg_debug_assert(arg2 < 32);
270     if (arg2 == 0) {
271         tcg_gen_mov_i32(ret, arg1);
272     } else {
273         TCGv_i32 t0 = tcg_const_i32(arg2);
274         tcg_gen_shr_i32(ret, arg1, t0);
275         tcg_temp_free_i32(t0);
276     }
277 }
278 
279 void tcg_gen_sari_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
280 {
281     tcg_debug_assert(arg2 < 32);
282     if (arg2 == 0) {
283         tcg_gen_mov_i32(ret, arg1);
284     } else {
285         TCGv_i32 t0 = tcg_const_i32(arg2);
286         tcg_gen_sar_i32(ret, arg1, t0);
287         tcg_temp_free_i32(t0);
288     }
289 }
290 
291 void tcg_gen_brcond_i32(TCGCond cond, TCGv_i32 arg1, TCGv_i32 arg2, TCGLabel *l)
292 {
293     if (cond == TCG_COND_ALWAYS) {
294         tcg_gen_br(l);
295     } else if (cond != TCG_COND_NEVER) {
296         tcg_gen_op4ii_i32(INDEX_op_brcond_i32, arg1, arg2, cond, label_arg(l));
297     }
298 }
299 
300 void tcg_gen_brcondi_i32(TCGCond cond, TCGv_i32 arg1, int32_t arg2, TCGLabel *l)
301 {
302     if (cond == TCG_COND_ALWAYS) {
303         tcg_gen_br(l);
304     } else if (cond != TCG_COND_NEVER) {
305         TCGv_i32 t0 = tcg_const_i32(arg2);
306         tcg_gen_brcond_i32(cond, arg1, t0, l);
307         tcg_temp_free_i32(t0);
308     }
309 }
310 
311 void tcg_gen_setcond_i32(TCGCond cond, TCGv_i32 ret,
312                          TCGv_i32 arg1, TCGv_i32 arg2)
313 {
314     if (cond == TCG_COND_ALWAYS) {
315         tcg_gen_movi_i32(ret, 1);
316     } else if (cond == TCG_COND_NEVER) {
317         tcg_gen_movi_i32(ret, 0);
318     } else {
319         tcg_gen_op4i_i32(INDEX_op_setcond_i32, ret, arg1, arg2, cond);
320     }
321 }
322 
323 void tcg_gen_setcondi_i32(TCGCond cond, TCGv_i32 ret,
324                           TCGv_i32 arg1, int32_t arg2)
325 {
326     TCGv_i32 t0 = tcg_const_i32(arg2);
327     tcg_gen_setcond_i32(cond, ret, arg1, t0);
328     tcg_temp_free_i32(t0);
329 }
330 
331 void tcg_gen_muli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
332 {
333     TCGv_i32 t0 = tcg_const_i32(arg2);
334     tcg_gen_mul_i32(ret, arg1, t0);
335     tcg_temp_free_i32(t0);
336 }
337 
338 void tcg_gen_div_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
339 {
340     if (TCG_TARGET_HAS_div_i32) {
341         tcg_gen_op3_i32(INDEX_op_div_i32, ret, arg1, arg2);
342     } else if (TCG_TARGET_HAS_div2_i32) {
343         TCGv_i32 t0 = tcg_temp_new_i32();
344         tcg_gen_sari_i32(t0, arg1, 31);
345         tcg_gen_op5_i32(INDEX_op_div2_i32, ret, t0, arg1, t0, arg2);
346         tcg_temp_free_i32(t0);
347     } else {
348         gen_helper_div_i32(ret, arg1, arg2);
349     }
350 }
351 
352 void tcg_gen_rem_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
353 {
354     if (TCG_TARGET_HAS_rem_i32) {
355         tcg_gen_op3_i32(INDEX_op_rem_i32, ret, arg1, arg2);
356     } else if (TCG_TARGET_HAS_div_i32) {
357         TCGv_i32 t0 = tcg_temp_new_i32();
358         tcg_gen_op3_i32(INDEX_op_div_i32, t0, arg1, arg2);
359         tcg_gen_mul_i32(t0, t0, arg2);
360         tcg_gen_sub_i32(ret, arg1, t0);
361         tcg_temp_free_i32(t0);
362     } else if (TCG_TARGET_HAS_div2_i32) {
363         TCGv_i32 t0 = tcg_temp_new_i32();
364         tcg_gen_sari_i32(t0, arg1, 31);
365         tcg_gen_op5_i32(INDEX_op_div2_i32, t0, ret, arg1, t0, arg2);
366         tcg_temp_free_i32(t0);
367     } else {
368         gen_helper_rem_i32(ret, arg1, arg2);
369     }
370 }
371 
372 void tcg_gen_divu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
373 {
374     if (TCG_TARGET_HAS_div_i32) {
375         tcg_gen_op3_i32(INDEX_op_divu_i32, ret, arg1, arg2);
376     } else if (TCG_TARGET_HAS_div2_i32) {
377         TCGv_i32 t0 = tcg_temp_new_i32();
378         tcg_gen_movi_i32(t0, 0);
379         tcg_gen_op5_i32(INDEX_op_divu2_i32, ret, t0, arg1, t0, arg2);
380         tcg_temp_free_i32(t0);
381     } else {
382         gen_helper_divu_i32(ret, arg1, arg2);
383     }
384 }
385 
386 void tcg_gen_remu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
387 {
388     if (TCG_TARGET_HAS_rem_i32) {
389         tcg_gen_op3_i32(INDEX_op_remu_i32, ret, arg1, arg2);
390     } else if (TCG_TARGET_HAS_div_i32) {
391         TCGv_i32 t0 = tcg_temp_new_i32();
392         tcg_gen_op3_i32(INDEX_op_divu_i32, t0, arg1, arg2);
393         tcg_gen_mul_i32(t0, t0, arg2);
394         tcg_gen_sub_i32(ret, arg1, t0);
395         tcg_temp_free_i32(t0);
396     } else if (TCG_TARGET_HAS_div2_i32) {
397         TCGv_i32 t0 = tcg_temp_new_i32();
398         tcg_gen_movi_i32(t0, 0);
399         tcg_gen_op5_i32(INDEX_op_divu2_i32, t0, ret, arg1, t0, arg2);
400         tcg_temp_free_i32(t0);
401     } else {
402         gen_helper_remu_i32(ret, arg1, arg2);
403     }
404 }
405 
406 void tcg_gen_andc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
407 {
408     if (TCG_TARGET_HAS_andc_i32) {
409         tcg_gen_op3_i32(INDEX_op_andc_i32, ret, arg1, arg2);
410     } else {
411         TCGv_i32 t0 = tcg_temp_new_i32();
412         tcg_gen_not_i32(t0, arg2);
413         tcg_gen_and_i32(ret, arg1, t0);
414         tcg_temp_free_i32(t0);
415     }
416 }
417 
418 void tcg_gen_eqv_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
419 {
420     if (TCG_TARGET_HAS_eqv_i32) {
421         tcg_gen_op3_i32(INDEX_op_eqv_i32, ret, arg1, arg2);
422     } else {
423         tcg_gen_xor_i32(ret, arg1, arg2);
424         tcg_gen_not_i32(ret, ret);
425     }
426 }
427 
428 void tcg_gen_nand_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
429 {
430     if (TCG_TARGET_HAS_nand_i32) {
431         tcg_gen_op3_i32(INDEX_op_nand_i32, ret, arg1, arg2);
432     } else {
433         tcg_gen_and_i32(ret, arg1, arg2);
434         tcg_gen_not_i32(ret, ret);
435     }
436 }
437 
438 void tcg_gen_nor_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
439 {
440     if (TCG_TARGET_HAS_nor_i32) {
441         tcg_gen_op3_i32(INDEX_op_nor_i32, ret, arg1, arg2);
442     } else {
443         tcg_gen_or_i32(ret, arg1, arg2);
444         tcg_gen_not_i32(ret, ret);
445     }
446 }
447 
448 void tcg_gen_orc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
449 {
450     if (TCG_TARGET_HAS_orc_i32) {
451         tcg_gen_op3_i32(INDEX_op_orc_i32, ret, arg1, arg2);
452     } else {
453         TCGv_i32 t0 = tcg_temp_new_i32();
454         tcg_gen_not_i32(t0, arg2);
455         tcg_gen_or_i32(ret, arg1, t0);
456         tcg_temp_free_i32(t0);
457     }
458 }
459 
460 void tcg_gen_clz_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
461 {
462     if (TCG_TARGET_HAS_clz_i32) {
463         tcg_gen_op3_i32(INDEX_op_clz_i32, ret, arg1, arg2);
464     } else if (TCG_TARGET_HAS_clz_i64) {
465         TCGv_i64 t1 = tcg_temp_new_i64();
466         TCGv_i64 t2 = tcg_temp_new_i64();
467         tcg_gen_extu_i32_i64(t1, arg1);
468         tcg_gen_extu_i32_i64(t2, arg2);
469         tcg_gen_addi_i64(t2, t2, 32);
470         tcg_gen_clz_i64(t1, t1, t2);
471         tcg_gen_extrl_i64_i32(ret, t1);
472         tcg_temp_free_i64(t1);
473         tcg_temp_free_i64(t2);
474         tcg_gen_subi_i32(ret, ret, 32);
475     } else {
476         gen_helper_clz_i32(ret, arg1, arg2);
477     }
478 }
479 
480 void tcg_gen_clzi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
481 {
482     TCGv_i32 t = tcg_const_i32(arg2);
483     tcg_gen_clz_i32(ret, arg1, t);
484     tcg_temp_free_i32(t);
485 }
486 
487 void tcg_gen_ctz_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
488 {
489     if (TCG_TARGET_HAS_ctz_i32) {
490         tcg_gen_op3_i32(INDEX_op_ctz_i32, ret, arg1, arg2);
491     } else if (TCG_TARGET_HAS_ctz_i64) {
492         TCGv_i64 t1 = tcg_temp_new_i64();
493         TCGv_i64 t2 = tcg_temp_new_i64();
494         tcg_gen_extu_i32_i64(t1, arg1);
495         tcg_gen_extu_i32_i64(t2, arg2);
496         tcg_gen_ctz_i64(t1, t1, t2);
497         tcg_gen_extrl_i64_i32(ret, t1);
498         tcg_temp_free_i64(t1);
499         tcg_temp_free_i64(t2);
500     } else if (TCG_TARGET_HAS_ctpop_i32
501                || TCG_TARGET_HAS_ctpop_i64
502                || TCG_TARGET_HAS_clz_i32
503                || TCG_TARGET_HAS_clz_i64) {
504         TCGv_i32 z, t = tcg_temp_new_i32();
505 
506         if (TCG_TARGET_HAS_ctpop_i32 || TCG_TARGET_HAS_ctpop_i64) {
507             tcg_gen_subi_i32(t, arg1, 1);
508             tcg_gen_andc_i32(t, t, arg1);
509             tcg_gen_ctpop_i32(t, t);
510         } else {
511             /* Since all non-x86 hosts have clz(0) == 32, don't fight it.  */
512             tcg_gen_neg_i32(t, arg1);
513             tcg_gen_and_i32(t, t, arg1);
514             tcg_gen_clzi_i32(t, t, 32);
515             tcg_gen_xori_i32(t, t, 31);
516         }
517         z = tcg_const_i32(0);
518         tcg_gen_movcond_i32(TCG_COND_EQ, ret, arg1, z, arg2, t);
519         tcg_temp_free_i32(t);
520         tcg_temp_free_i32(z);
521     } else {
522         gen_helper_ctz_i32(ret, arg1, arg2);
523     }
524 }
525 
526 void tcg_gen_ctzi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
527 {
528     if (!TCG_TARGET_HAS_ctz_i32 && TCG_TARGET_HAS_ctpop_i32 && arg2 == 32) {
529         /* This equivalence has the advantage of not requiring a fixup.  */
530         TCGv_i32 t = tcg_temp_new_i32();
531         tcg_gen_subi_i32(t, arg1, 1);
532         tcg_gen_andc_i32(t, t, arg1);
533         tcg_gen_ctpop_i32(ret, t);
534         tcg_temp_free_i32(t);
535     } else {
536         TCGv_i32 t = tcg_const_i32(arg2);
537         tcg_gen_ctz_i32(ret, arg1, t);
538         tcg_temp_free_i32(t);
539     }
540 }
541 
542 void tcg_gen_clrsb_i32(TCGv_i32 ret, TCGv_i32 arg)
543 {
544     if (TCG_TARGET_HAS_clz_i32) {
545         TCGv_i32 t = tcg_temp_new_i32();
546         tcg_gen_sari_i32(t, arg, 31);
547         tcg_gen_xor_i32(t, t, arg);
548         tcg_gen_clzi_i32(t, t, 32);
549         tcg_gen_subi_i32(ret, t, 1);
550         tcg_temp_free_i32(t);
551     } else {
552         gen_helper_clrsb_i32(ret, arg);
553     }
554 }
555 
556 void tcg_gen_ctpop_i32(TCGv_i32 ret, TCGv_i32 arg1)
557 {
558     if (TCG_TARGET_HAS_ctpop_i32) {
559         tcg_gen_op2_i32(INDEX_op_ctpop_i32, ret, arg1);
560     } else if (TCG_TARGET_HAS_ctpop_i64) {
561         TCGv_i64 t = tcg_temp_new_i64();
562         tcg_gen_extu_i32_i64(t, arg1);
563         tcg_gen_ctpop_i64(t, t);
564         tcg_gen_extrl_i64_i32(ret, t);
565         tcg_temp_free_i64(t);
566     } else {
567         gen_helper_ctpop_i32(ret, arg1);
568     }
569 }
570 
571 void tcg_gen_rotl_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
572 {
573     if (TCG_TARGET_HAS_rot_i32) {
574         tcg_gen_op3_i32(INDEX_op_rotl_i32, ret, arg1, arg2);
575     } else {
576         TCGv_i32 t0, t1;
577 
578         t0 = tcg_temp_new_i32();
579         t1 = tcg_temp_new_i32();
580         tcg_gen_shl_i32(t0, arg1, arg2);
581         tcg_gen_subfi_i32(t1, 32, arg2);
582         tcg_gen_shr_i32(t1, arg1, t1);
583         tcg_gen_or_i32(ret, t0, t1);
584         tcg_temp_free_i32(t0);
585         tcg_temp_free_i32(t1);
586     }
587 }
588 
589 void tcg_gen_rotli_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
590 {
591     tcg_debug_assert(arg2 < 32);
592     /* some cases can be optimized here */
593     if (arg2 == 0) {
594         tcg_gen_mov_i32(ret, arg1);
595     } else if (TCG_TARGET_HAS_rot_i32) {
596         TCGv_i32 t0 = tcg_const_i32(arg2);
597         tcg_gen_rotl_i32(ret, arg1, t0);
598         tcg_temp_free_i32(t0);
599     } else {
600         TCGv_i32 t0, t1;
601         t0 = tcg_temp_new_i32();
602         t1 = tcg_temp_new_i32();
603         tcg_gen_shli_i32(t0, arg1, arg2);
604         tcg_gen_shri_i32(t1, arg1, 32 - arg2);
605         tcg_gen_or_i32(ret, t0, t1);
606         tcg_temp_free_i32(t0);
607         tcg_temp_free_i32(t1);
608     }
609 }
610 
611 void tcg_gen_rotr_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
612 {
613     if (TCG_TARGET_HAS_rot_i32) {
614         tcg_gen_op3_i32(INDEX_op_rotr_i32, ret, arg1, arg2);
615     } else {
616         TCGv_i32 t0, t1;
617 
618         t0 = tcg_temp_new_i32();
619         t1 = tcg_temp_new_i32();
620         tcg_gen_shr_i32(t0, arg1, arg2);
621         tcg_gen_subfi_i32(t1, 32, arg2);
622         tcg_gen_shl_i32(t1, arg1, t1);
623         tcg_gen_or_i32(ret, t0, t1);
624         tcg_temp_free_i32(t0);
625         tcg_temp_free_i32(t1);
626     }
627 }
628 
629 void tcg_gen_rotri_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
630 {
631     tcg_debug_assert(arg2 < 32);
632     /* some cases can be optimized here */
633     if (arg2 == 0) {
634         tcg_gen_mov_i32(ret, arg1);
635     } else {
636         tcg_gen_rotli_i32(ret, arg1, 32 - arg2);
637     }
638 }
639 
640 void tcg_gen_deposit_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2,
641                          unsigned int ofs, unsigned int len)
642 {
643     uint32_t mask;
644     TCGv_i32 t1;
645 
646     tcg_debug_assert(ofs < 32);
647     tcg_debug_assert(len > 0);
648     tcg_debug_assert(len <= 32);
649     tcg_debug_assert(ofs + len <= 32);
650 
651     if (len == 32) {
652         tcg_gen_mov_i32(ret, arg2);
653         return;
654     }
655     if (TCG_TARGET_HAS_deposit_i32 && TCG_TARGET_deposit_i32_valid(ofs, len)) {
656         tcg_gen_op5ii_i32(INDEX_op_deposit_i32, ret, arg1, arg2, ofs, len);
657         return;
658     }
659 
660     mask = (1u << len) - 1;
661     t1 = tcg_temp_new_i32();
662 
663     if (ofs + len < 32) {
664         tcg_gen_andi_i32(t1, arg2, mask);
665         tcg_gen_shli_i32(t1, t1, ofs);
666     } else {
667         tcg_gen_shli_i32(t1, arg2, ofs);
668     }
669     tcg_gen_andi_i32(ret, arg1, ~(mask << ofs));
670     tcg_gen_or_i32(ret, ret, t1);
671 
672     tcg_temp_free_i32(t1);
673 }
674 
675 void tcg_gen_deposit_z_i32(TCGv_i32 ret, TCGv_i32 arg,
676                            unsigned int ofs, unsigned int len)
677 {
678     tcg_debug_assert(ofs < 32);
679     tcg_debug_assert(len > 0);
680     tcg_debug_assert(len <= 32);
681     tcg_debug_assert(ofs + len <= 32);
682 
683     if (ofs + len == 32) {
684         tcg_gen_shli_i32(ret, arg, ofs);
685     } else if (ofs == 0) {
686         tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
687     } else if (TCG_TARGET_HAS_deposit_i32
688                && TCG_TARGET_deposit_i32_valid(ofs, len)) {
689         TCGv_i32 zero = tcg_const_i32(0);
690         tcg_gen_op5ii_i32(INDEX_op_deposit_i32, ret, zero, arg, ofs, len);
691         tcg_temp_free_i32(zero);
692     } else {
693         /* To help two-operand hosts we prefer to zero-extend first,
694            which allows ARG to stay live.  */
695         switch (len) {
696         case 16:
697             if (TCG_TARGET_HAS_ext16u_i32) {
698                 tcg_gen_ext16u_i32(ret, arg);
699                 tcg_gen_shli_i32(ret, ret, ofs);
700                 return;
701             }
702             break;
703         case 8:
704             if (TCG_TARGET_HAS_ext8u_i32) {
705                 tcg_gen_ext8u_i32(ret, arg);
706                 tcg_gen_shli_i32(ret, ret, ofs);
707                 return;
708             }
709             break;
710         }
711         /* Otherwise prefer zero-extension over AND for code size.  */
712         switch (ofs + len) {
713         case 16:
714             if (TCG_TARGET_HAS_ext16u_i32) {
715                 tcg_gen_shli_i32(ret, arg, ofs);
716                 tcg_gen_ext16u_i32(ret, ret);
717                 return;
718             }
719             break;
720         case 8:
721             if (TCG_TARGET_HAS_ext8u_i32) {
722                 tcg_gen_shli_i32(ret, arg, ofs);
723                 tcg_gen_ext8u_i32(ret, ret);
724                 return;
725             }
726             break;
727         }
728         tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
729         tcg_gen_shli_i32(ret, ret, ofs);
730     }
731 }
732 
733 void tcg_gen_extract_i32(TCGv_i32 ret, TCGv_i32 arg,
734                          unsigned int ofs, unsigned int len)
735 {
736     tcg_debug_assert(ofs < 32);
737     tcg_debug_assert(len > 0);
738     tcg_debug_assert(len <= 32);
739     tcg_debug_assert(ofs + len <= 32);
740 
741     /* Canonicalize certain special cases, even if extract is supported.  */
742     if (ofs + len == 32) {
743         tcg_gen_shri_i32(ret, arg, 32 - len);
744         return;
745     }
746     if (ofs == 0) {
747         tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
748         return;
749     }
750 
751     if (TCG_TARGET_HAS_extract_i32
752         && TCG_TARGET_extract_i32_valid(ofs, len)) {
753         tcg_gen_op4ii_i32(INDEX_op_extract_i32, ret, arg, ofs, len);
754         return;
755     }
756 
757     /* Assume that zero-extension, if available, is cheaper than a shift.  */
758     switch (ofs + len) {
759     case 16:
760         if (TCG_TARGET_HAS_ext16u_i32) {
761             tcg_gen_ext16u_i32(ret, arg);
762             tcg_gen_shri_i32(ret, ret, ofs);
763             return;
764         }
765         break;
766     case 8:
767         if (TCG_TARGET_HAS_ext8u_i32) {
768             tcg_gen_ext8u_i32(ret, arg);
769             tcg_gen_shri_i32(ret, ret, ofs);
770             return;
771         }
772         break;
773     }
774 
775     /* ??? Ideally we'd know what values are available for immediate AND.
776        Assume that 8 bits are available, plus the special case of 16,
777        so that we get ext8u, ext16u.  */
778     switch (len) {
779     case 1 ... 8: case 16:
780         tcg_gen_shri_i32(ret, arg, ofs);
781         tcg_gen_andi_i32(ret, ret, (1u << len) - 1);
782         break;
783     default:
784         tcg_gen_shli_i32(ret, arg, 32 - len - ofs);
785         tcg_gen_shri_i32(ret, ret, 32 - len);
786         break;
787     }
788 }
789 
790 void tcg_gen_sextract_i32(TCGv_i32 ret, TCGv_i32 arg,
791                           unsigned int ofs, unsigned int len)
792 {
793     tcg_debug_assert(ofs < 32);
794     tcg_debug_assert(len > 0);
795     tcg_debug_assert(len <= 32);
796     tcg_debug_assert(ofs + len <= 32);
797 
798     /* Canonicalize certain special cases, even if extract is supported.  */
799     if (ofs + len == 32) {
800         tcg_gen_sari_i32(ret, arg, 32 - len);
801         return;
802     }
803     if (ofs == 0) {
804         switch (len) {
805         case 16:
806             tcg_gen_ext16s_i32(ret, arg);
807             return;
808         case 8:
809             tcg_gen_ext8s_i32(ret, arg);
810             return;
811         }
812     }
813 
814     if (TCG_TARGET_HAS_sextract_i32
815         && TCG_TARGET_extract_i32_valid(ofs, len)) {
816         tcg_gen_op4ii_i32(INDEX_op_sextract_i32, ret, arg, ofs, len);
817         return;
818     }
819 
820     /* Assume that sign-extension, if available, is cheaper than a shift.  */
821     switch (ofs + len) {
822     case 16:
823         if (TCG_TARGET_HAS_ext16s_i32) {
824             tcg_gen_ext16s_i32(ret, arg);
825             tcg_gen_sari_i32(ret, ret, ofs);
826             return;
827         }
828         break;
829     case 8:
830         if (TCG_TARGET_HAS_ext8s_i32) {
831             tcg_gen_ext8s_i32(ret, arg);
832             tcg_gen_sari_i32(ret, ret, ofs);
833             return;
834         }
835         break;
836     }
837     switch (len) {
838     case 16:
839         if (TCG_TARGET_HAS_ext16s_i32) {
840             tcg_gen_shri_i32(ret, arg, ofs);
841             tcg_gen_ext16s_i32(ret, ret);
842             return;
843         }
844         break;
845     case 8:
846         if (TCG_TARGET_HAS_ext8s_i32) {
847             tcg_gen_shri_i32(ret, arg, ofs);
848             tcg_gen_ext8s_i32(ret, ret);
849             return;
850         }
851         break;
852     }
853 
854     tcg_gen_shli_i32(ret, arg, 32 - len - ofs);
855     tcg_gen_sari_i32(ret, ret, 32 - len);
856 }
857 
858 void tcg_gen_movcond_i32(TCGCond cond, TCGv_i32 ret, TCGv_i32 c1,
859                          TCGv_i32 c2, TCGv_i32 v1, TCGv_i32 v2)
860 {
861     if (cond == TCG_COND_ALWAYS) {
862         tcg_gen_mov_i32(ret, v1);
863     } else if (cond == TCG_COND_NEVER) {
864         tcg_gen_mov_i32(ret, v2);
865     } else if (TCG_TARGET_HAS_movcond_i32) {
866         tcg_gen_op6i_i32(INDEX_op_movcond_i32, ret, c1, c2, v1, v2, cond);
867     } else {
868         TCGv_i32 t0 = tcg_temp_new_i32();
869         TCGv_i32 t1 = tcg_temp_new_i32();
870         tcg_gen_setcond_i32(cond, t0, c1, c2);
871         tcg_gen_neg_i32(t0, t0);
872         tcg_gen_and_i32(t1, v1, t0);
873         tcg_gen_andc_i32(ret, v2, t0);
874         tcg_gen_or_i32(ret, ret, t1);
875         tcg_temp_free_i32(t0);
876         tcg_temp_free_i32(t1);
877     }
878 }
879 
880 void tcg_gen_add2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
881                       TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
882 {
883     if (TCG_TARGET_HAS_add2_i32) {
884         tcg_gen_op6_i32(INDEX_op_add2_i32, rl, rh, al, ah, bl, bh);
885     } else {
886         TCGv_i64 t0 = tcg_temp_new_i64();
887         TCGv_i64 t1 = tcg_temp_new_i64();
888         tcg_gen_concat_i32_i64(t0, al, ah);
889         tcg_gen_concat_i32_i64(t1, bl, bh);
890         tcg_gen_add_i64(t0, t0, t1);
891         tcg_gen_extr_i64_i32(rl, rh, t0);
892         tcg_temp_free_i64(t0);
893         tcg_temp_free_i64(t1);
894     }
895 }
896 
897 void tcg_gen_sub2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
898                       TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
899 {
900     if (TCG_TARGET_HAS_sub2_i32) {
901         tcg_gen_op6_i32(INDEX_op_sub2_i32, rl, rh, al, ah, bl, bh);
902     } else {
903         TCGv_i64 t0 = tcg_temp_new_i64();
904         TCGv_i64 t1 = tcg_temp_new_i64();
905         tcg_gen_concat_i32_i64(t0, al, ah);
906         tcg_gen_concat_i32_i64(t1, bl, bh);
907         tcg_gen_sub_i64(t0, t0, t1);
908         tcg_gen_extr_i64_i32(rl, rh, t0);
909         tcg_temp_free_i64(t0);
910         tcg_temp_free_i64(t1);
911     }
912 }
913 
914 void tcg_gen_mulu2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
915 {
916     if (TCG_TARGET_HAS_mulu2_i32) {
917         tcg_gen_op4_i32(INDEX_op_mulu2_i32, rl, rh, arg1, arg2);
918     } else if (TCG_TARGET_HAS_muluh_i32) {
919         TCGv_i32 t = tcg_temp_new_i32();
920         tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
921         tcg_gen_op3_i32(INDEX_op_muluh_i32, rh, arg1, arg2);
922         tcg_gen_mov_i32(rl, t);
923         tcg_temp_free_i32(t);
924     } else {
925         TCGv_i64 t0 = tcg_temp_new_i64();
926         TCGv_i64 t1 = tcg_temp_new_i64();
927         tcg_gen_extu_i32_i64(t0, arg1);
928         tcg_gen_extu_i32_i64(t1, arg2);
929         tcg_gen_mul_i64(t0, t0, t1);
930         tcg_gen_extr_i64_i32(rl, rh, t0);
931         tcg_temp_free_i64(t0);
932         tcg_temp_free_i64(t1);
933     }
934 }
935 
936 void tcg_gen_muls2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
937 {
938     if (TCG_TARGET_HAS_muls2_i32) {
939         tcg_gen_op4_i32(INDEX_op_muls2_i32, rl, rh, arg1, arg2);
940     } else if (TCG_TARGET_HAS_mulsh_i32) {
941         TCGv_i32 t = tcg_temp_new_i32();
942         tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
943         tcg_gen_op3_i32(INDEX_op_mulsh_i32, rh, arg1, arg2);
944         tcg_gen_mov_i32(rl, t);
945         tcg_temp_free_i32(t);
946     } else if (TCG_TARGET_REG_BITS == 32) {
947         TCGv_i32 t0 = tcg_temp_new_i32();
948         TCGv_i32 t1 = tcg_temp_new_i32();
949         TCGv_i32 t2 = tcg_temp_new_i32();
950         TCGv_i32 t3 = tcg_temp_new_i32();
951         tcg_gen_mulu2_i32(t0, t1, arg1, arg2);
952         /* Adjust for negative inputs.  */
953         tcg_gen_sari_i32(t2, arg1, 31);
954         tcg_gen_sari_i32(t3, arg2, 31);
955         tcg_gen_and_i32(t2, t2, arg2);
956         tcg_gen_and_i32(t3, t3, arg1);
957         tcg_gen_sub_i32(rh, t1, t2);
958         tcg_gen_sub_i32(rh, rh, t3);
959         tcg_gen_mov_i32(rl, t0);
960         tcg_temp_free_i32(t0);
961         tcg_temp_free_i32(t1);
962         tcg_temp_free_i32(t2);
963         tcg_temp_free_i32(t3);
964     } else {
965         TCGv_i64 t0 = tcg_temp_new_i64();
966         TCGv_i64 t1 = tcg_temp_new_i64();
967         tcg_gen_ext_i32_i64(t0, arg1);
968         tcg_gen_ext_i32_i64(t1, arg2);
969         tcg_gen_mul_i64(t0, t0, t1);
970         tcg_gen_extr_i64_i32(rl, rh, t0);
971         tcg_temp_free_i64(t0);
972         tcg_temp_free_i64(t1);
973     }
974 }
975 
976 void tcg_gen_mulsu2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
977 {
978     if (TCG_TARGET_REG_BITS == 32) {
979         TCGv_i32 t0 = tcg_temp_new_i32();
980         TCGv_i32 t1 = tcg_temp_new_i32();
981         TCGv_i32 t2 = tcg_temp_new_i32();
982         tcg_gen_mulu2_i32(t0, t1, arg1, arg2);
983         /* Adjust for negative input for the signed arg1.  */
984         tcg_gen_sari_i32(t2, arg1, 31);
985         tcg_gen_and_i32(t2, t2, arg2);
986         tcg_gen_sub_i32(rh, t1, t2);
987         tcg_gen_mov_i32(rl, t0);
988         tcg_temp_free_i32(t0);
989         tcg_temp_free_i32(t1);
990         tcg_temp_free_i32(t2);
991     } else {
992         TCGv_i64 t0 = tcg_temp_new_i64();
993         TCGv_i64 t1 = tcg_temp_new_i64();
994         tcg_gen_ext_i32_i64(t0, arg1);
995         tcg_gen_extu_i32_i64(t1, arg2);
996         tcg_gen_mul_i64(t0, t0, t1);
997         tcg_gen_extr_i64_i32(rl, rh, t0);
998         tcg_temp_free_i64(t0);
999         tcg_temp_free_i64(t1);
1000     }
1001 }
1002 
1003 void tcg_gen_ext8s_i32(TCGv_i32 ret, TCGv_i32 arg)
1004 {
1005     if (TCG_TARGET_HAS_ext8s_i32) {
1006         tcg_gen_op2_i32(INDEX_op_ext8s_i32, ret, arg);
1007     } else {
1008         tcg_gen_shli_i32(ret, arg, 24);
1009         tcg_gen_sari_i32(ret, ret, 24);
1010     }
1011 }
1012 
1013 void tcg_gen_ext16s_i32(TCGv_i32 ret, TCGv_i32 arg)
1014 {
1015     if (TCG_TARGET_HAS_ext16s_i32) {
1016         tcg_gen_op2_i32(INDEX_op_ext16s_i32, ret, arg);
1017     } else {
1018         tcg_gen_shli_i32(ret, arg, 16);
1019         tcg_gen_sari_i32(ret, ret, 16);
1020     }
1021 }
1022 
1023 void tcg_gen_ext8u_i32(TCGv_i32 ret, TCGv_i32 arg)
1024 {
1025     if (TCG_TARGET_HAS_ext8u_i32) {
1026         tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg);
1027     } else {
1028         tcg_gen_andi_i32(ret, arg, 0xffu);
1029     }
1030 }
1031 
1032 void tcg_gen_ext16u_i32(TCGv_i32 ret, TCGv_i32 arg)
1033 {
1034     if (TCG_TARGET_HAS_ext16u_i32) {
1035         tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg);
1036     } else {
1037         tcg_gen_andi_i32(ret, arg, 0xffffu);
1038     }
1039 }
1040 
1041 /* Note: we assume the two high bytes are set to zero */
1042 void tcg_gen_bswap16_i32(TCGv_i32 ret, TCGv_i32 arg)
1043 {
1044     if (TCG_TARGET_HAS_bswap16_i32) {
1045         tcg_gen_op2_i32(INDEX_op_bswap16_i32, ret, arg);
1046     } else {
1047         TCGv_i32 t0 = tcg_temp_new_i32();
1048 
1049         tcg_gen_ext8u_i32(t0, arg);
1050         tcg_gen_shli_i32(t0, t0, 8);
1051         tcg_gen_shri_i32(ret, arg, 8);
1052         tcg_gen_or_i32(ret, ret, t0);
1053         tcg_temp_free_i32(t0);
1054     }
1055 }
1056 
1057 void tcg_gen_bswap32_i32(TCGv_i32 ret, TCGv_i32 arg)
1058 {
1059     if (TCG_TARGET_HAS_bswap32_i32) {
1060         tcg_gen_op2_i32(INDEX_op_bswap32_i32, ret, arg);
1061     } else {
1062         TCGv_i32 t0, t1;
1063         t0 = tcg_temp_new_i32();
1064         t1 = tcg_temp_new_i32();
1065 
1066         tcg_gen_shli_i32(t0, arg, 24);
1067 
1068         tcg_gen_andi_i32(t1, arg, 0x0000ff00);
1069         tcg_gen_shli_i32(t1, t1, 8);
1070         tcg_gen_or_i32(t0, t0, t1);
1071 
1072         tcg_gen_shri_i32(t1, arg, 8);
1073         tcg_gen_andi_i32(t1, t1, 0x0000ff00);
1074         tcg_gen_or_i32(t0, t0, t1);
1075 
1076         tcg_gen_shri_i32(t1, arg, 24);
1077         tcg_gen_or_i32(ret, t0, t1);
1078         tcg_temp_free_i32(t0);
1079         tcg_temp_free_i32(t1);
1080     }
1081 }
1082 
1083 /* 64-bit ops */
1084 
1085 #if TCG_TARGET_REG_BITS == 32
1086 /* These are all inline for TCG_TARGET_REG_BITS == 64.  */
1087 
1088 void tcg_gen_discard_i64(TCGv_i64 arg)
1089 {
1090     tcg_gen_discard_i32(TCGV_LOW(arg));
1091     tcg_gen_discard_i32(TCGV_HIGH(arg));
1092 }
1093 
1094 void tcg_gen_mov_i64(TCGv_i64 ret, TCGv_i64 arg)
1095 {
1096     tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1097     tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
1098 }
1099 
1100 void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg)
1101 {
1102     tcg_gen_movi_i32(TCGV_LOW(ret), arg);
1103     tcg_gen_movi_i32(TCGV_HIGH(ret), arg >> 32);
1104 }
1105 
1106 void tcg_gen_ld8u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1107 {
1108     tcg_gen_ld8u_i32(TCGV_LOW(ret), arg2, offset);
1109     tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1110 }
1111 
1112 void tcg_gen_ld8s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1113 {
1114     tcg_gen_ld8s_i32(TCGV_LOW(ret), arg2, offset);
1115     tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1116 }
1117 
1118 void tcg_gen_ld16u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1119 {
1120     tcg_gen_ld16u_i32(TCGV_LOW(ret), arg2, offset);
1121     tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1122 }
1123 
1124 void tcg_gen_ld16s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1125 {
1126     tcg_gen_ld16s_i32(TCGV_LOW(ret), arg2, offset);
1127     tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1128 }
1129 
1130 void tcg_gen_ld32u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1131 {
1132     tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
1133     tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1134 }
1135 
1136 void tcg_gen_ld32s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1137 {
1138     tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
1139     tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1140 }
1141 
1142 void tcg_gen_ld_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1143 {
1144     /* Since arg2 and ret have different types,
1145        they cannot be the same temporary */
1146 #ifdef HOST_WORDS_BIGENDIAN
1147     tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset);
1148     tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset + 4);
1149 #else
1150     tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
1151     tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset + 4);
1152 #endif
1153 }
1154 
1155 void tcg_gen_st_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
1156 {
1157 #ifdef HOST_WORDS_BIGENDIAN
1158     tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset);
1159     tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset + 4);
1160 #else
1161     tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset);
1162     tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset + 4);
1163 #endif
1164 }
1165 
1166 void tcg_gen_and_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1167 {
1168     tcg_gen_and_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1169     tcg_gen_and_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1170 }
1171 
1172 void tcg_gen_or_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1173 {
1174     tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1175     tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1176 }
1177 
1178 void tcg_gen_xor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1179 {
1180     tcg_gen_xor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1181     tcg_gen_xor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1182 }
1183 
1184 void tcg_gen_shl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1185 {
1186     gen_helper_shl_i64(ret, arg1, arg2);
1187 }
1188 
1189 void tcg_gen_shr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1190 {
1191     gen_helper_shr_i64(ret, arg1, arg2);
1192 }
1193 
1194 void tcg_gen_sar_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1195 {
1196     gen_helper_sar_i64(ret, arg1, arg2);
1197 }
1198 
1199 void tcg_gen_mul_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1200 {
1201     TCGv_i64 t0;
1202     TCGv_i32 t1;
1203 
1204     t0 = tcg_temp_new_i64();
1205     t1 = tcg_temp_new_i32();
1206 
1207     tcg_gen_mulu2_i32(TCGV_LOW(t0), TCGV_HIGH(t0),
1208                       TCGV_LOW(arg1), TCGV_LOW(arg2));
1209 
1210     tcg_gen_mul_i32(t1, TCGV_LOW(arg1), TCGV_HIGH(arg2));
1211     tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
1212     tcg_gen_mul_i32(t1, TCGV_HIGH(arg1), TCGV_LOW(arg2));
1213     tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
1214 
1215     tcg_gen_mov_i64(ret, t0);
1216     tcg_temp_free_i64(t0);
1217     tcg_temp_free_i32(t1);
1218 }
1219 #endif /* TCG_TARGET_REG_SIZE == 32 */
1220 
1221 void tcg_gen_addi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1222 {
1223     /* some cases can be optimized here */
1224     if (arg2 == 0) {
1225         tcg_gen_mov_i64(ret, arg1);
1226     } else {
1227         TCGv_i64 t0 = tcg_const_i64(arg2);
1228         tcg_gen_add_i64(ret, arg1, t0);
1229         tcg_temp_free_i64(t0);
1230     }
1231 }
1232 
1233 void tcg_gen_subfi_i64(TCGv_i64 ret, int64_t arg1, TCGv_i64 arg2)
1234 {
1235     if (arg1 == 0 && TCG_TARGET_HAS_neg_i64) {
1236         /* Don't recurse with tcg_gen_neg_i64.  */
1237         tcg_gen_op2_i64(INDEX_op_neg_i64, ret, arg2);
1238     } else {
1239         TCGv_i64 t0 = tcg_const_i64(arg1);
1240         tcg_gen_sub_i64(ret, t0, arg2);
1241         tcg_temp_free_i64(t0);
1242     }
1243 }
1244 
1245 void tcg_gen_subi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1246 {
1247     /* some cases can be optimized here */
1248     if (arg2 == 0) {
1249         tcg_gen_mov_i64(ret, arg1);
1250     } else {
1251         TCGv_i64 t0 = tcg_const_i64(arg2);
1252         tcg_gen_sub_i64(ret, arg1, t0);
1253         tcg_temp_free_i64(t0);
1254     }
1255 }
1256 
1257 void tcg_gen_andi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
1258 {
1259     TCGv_i64 t0;
1260 
1261     if (TCG_TARGET_REG_BITS == 32) {
1262         tcg_gen_andi_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1263         tcg_gen_andi_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1264         return;
1265     }
1266 
1267     /* Some cases can be optimized here.  */
1268     switch (arg2) {
1269     case 0:
1270         tcg_gen_movi_i64(ret, 0);
1271         return;
1272     case 0xffffffffffffffffull:
1273         tcg_gen_mov_i64(ret, arg1);
1274         return;
1275     case 0xffull:
1276         /* Don't recurse with tcg_gen_ext8u_i64.  */
1277         if (TCG_TARGET_HAS_ext8u_i64) {
1278             tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg1);
1279             return;
1280         }
1281         break;
1282     case 0xffffu:
1283         if (TCG_TARGET_HAS_ext16u_i64) {
1284             tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg1);
1285             return;
1286         }
1287         break;
1288     case 0xffffffffull:
1289         if (TCG_TARGET_HAS_ext32u_i64) {
1290             tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg1);
1291             return;
1292         }
1293         break;
1294     }
1295     t0 = tcg_const_i64(arg2);
1296     tcg_gen_and_i64(ret, arg1, t0);
1297     tcg_temp_free_i64(t0);
1298 }
1299 
1300 void tcg_gen_ori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1301 {
1302     if (TCG_TARGET_REG_BITS == 32) {
1303         tcg_gen_ori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1304         tcg_gen_ori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1305         return;
1306     }
1307     /* Some cases can be optimized here.  */
1308     if (arg2 == -1) {
1309         tcg_gen_movi_i64(ret, -1);
1310     } else if (arg2 == 0) {
1311         tcg_gen_mov_i64(ret, arg1);
1312     } else {
1313         TCGv_i64 t0 = tcg_const_i64(arg2);
1314         tcg_gen_or_i64(ret, arg1, t0);
1315         tcg_temp_free_i64(t0);
1316     }
1317 }
1318 
1319 void tcg_gen_xori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1320 {
1321     if (TCG_TARGET_REG_BITS == 32) {
1322         tcg_gen_xori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1323         tcg_gen_xori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1324         return;
1325     }
1326     /* Some cases can be optimized here.  */
1327     if (arg2 == 0) {
1328         tcg_gen_mov_i64(ret, arg1);
1329     } else if (arg2 == -1 && TCG_TARGET_HAS_not_i64) {
1330         /* Don't recurse with tcg_gen_not_i64.  */
1331         tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg1);
1332     } else {
1333         TCGv_i64 t0 = tcg_const_i64(arg2);
1334         tcg_gen_xor_i64(ret, arg1, t0);
1335         tcg_temp_free_i64(t0);
1336     }
1337 }
1338 
1339 static inline void tcg_gen_shifti_i64(TCGv_i64 ret, TCGv_i64 arg1,
1340                                       unsigned c, bool right, bool arith)
1341 {
1342     tcg_debug_assert(c < 64);
1343     if (c == 0) {
1344         tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
1345         tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
1346     } else if (c >= 32) {
1347         c -= 32;
1348         if (right) {
1349             if (arith) {
1350                 tcg_gen_sari_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
1351                 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), 31);
1352             } else {
1353                 tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
1354                 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1355             }
1356         } else {
1357             tcg_gen_shli_i32(TCGV_HIGH(ret), TCGV_LOW(arg1), c);
1358             tcg_gen_movi_i32(TCGV_LOW(ret), 0);
1359         }
1360     } else {
1361         TCGv_i32 t0, t1;
1362 
1363         t0 = tcg_temp_new_i32();
1364         t1 = tcg_temp_new_i32();
1365         if (right) {
1366             tcg_gen_shli_i32(t0, TCGV_HIGH(arg1), 32 - c);
1367             if (arith) {
1368                 tcg_gen_sari_i32(t1, TCGV_HIGH(arg1), c);
1369             } else {
1370                 tcg_gen_shri_i32(t1, TCGV_HIGH(arg1), c);
1371             }
1372             tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_LOW(arg1), c);
1373             tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(ret), t0);
1374             tcg_gen_mov_i32(TCGV_HIGH(ret), t1);
1375         } else {
1376             tcg_gen_shri_i32(t0, TCGV_LOW(arg1), 32 - c);
1377             /* Note: ret can be the same as arg1, so we use t1 */
1378             tcg_gen_shli_i32(t1, TCGV_LOW(arg1), c);
1379             tcg_gen_shli_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), c);
1380             tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), t0);
1381             tcg_gen_mov_i32(TCGV_LOW(ret), t1);
1382         }
1383         tcg_temp_free_i32(t0);
1384         tcg_temp_free_i32(t1);
1385     }
1386 }
1387 
1388 void tcg_gen_shli_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1389 {
1390     tcg_debug_assert(arg2 < 64);
1391     if (TCG_TARGET_REG_BITS == 32) {
1392         tcg_gen_shifti_i64(ret, arg1, arg2, 0, 0);
1393     } else if (arg2 == 0) {
1394         tcg_gen_mov_i64(ret, arg1);
1395     } else {
1396         TCGv_i64 t0 = tcg_const_i64(arg2);
1397         tcg_gen_shl_i64(ret, arg1, t0);
1398         tcg_temp_free_i64(t0);
1399     }
1400 }
1401 
1402 void tcg_gen_shri_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1403 {
1404     tcg_debug_assert(arg2 < 64);
1405     if (TCG_TARGET_REG_BITS == 32) {
1406         tcg_gen_shifti_i64(ret, arg1, arg2, 1, 0);
1407     } else if (arg2 == 0) {
1408         tcg_gen_mov_i64(ret, arg1);
1409     } else {
1410         TCGv_i64 t0 = tcg_const_i64(arg2);
1411         tcg_gen_shr_i64(ret, arg1, t0);
1412         tcg_temp_free_i64(t0);
1413     }
1414 }
1415 
1416 void tcg_gen_sari_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1417 {
1418     tcg_debug_assert(arg2 < 64);
1419     if (TCG_TARGET_REG_BITS == 32) {
1420         tcg_gen_shifti_i64(ret, arg1, arg2, 1, 1);
1421     } else if (arg2 == 0) {
1422         tcg_gen_mov_i64(ret, arg1);
1423     } else {
1424         TCGv_i64 t0 = tcg_const_i64(arg2);
1425         tcg_gen_sar_i64(ret, arg1, t0);
1426         tcg_temp_free_i64(t0);
1427     }
1428 }
1429 
1430 void tcg_gen_brcond_i64(TCGCond cond, TCGv_i64 arg1, TCGv_i64 arg2, TCGLabel *l)
1431 {
1432     if (cond == TCG_COND_ALWAYS) {
1433         tcg_gen_br(l);
1434     } else if (cond != TCG_COND_NEVER) {
1435         if (TCG_TARGET_REG_BITS == 32) {
1436             tcg_gen_op6ii_i32(INDEX_op_brcond2_i32, TCGV_LOW(arg1),
1437                               TCGV_HIGH(arg1), TCGV_LOW(arg2),
1438                               TCGV_HIGH(arg2), cond, label_arg(l));
1439         } else {
1440             tcg_gen_op4ii_i64(INDEX_op_brcond_i64, arg1, arg2, cond,
1441                               label_arg(l));
1442         }
1443     }
1444 }
1445 
1446 void tcg_gen_brcondi_i64(TCGCond cond, TCGv_i64 arg1, int64_t arg2, TCGLabel *l)
1447 {
1448     if (cond == TCG_COND_ALWAYS) {
1449         tcg_gen_br(l);
1450     } else if (cond != TCG_COND_NEVER) {
1451         TCGv_i64 t0 = tcg_const_i64(arg2);
1452         tcg_gen_brcond_i64(cond, arg1, t0, l);
1453         tcg_temp_free_i64(t0);
1454     }
1455 }
1456 
1457 void tcg_gen_setcond_i64(TCGCond cond, TCGv_i64 ret,
1458                          TCGv_i64 arg1, TCGv_i64 arg2)
1459 {
1460     if (cond == TCG_COND_ALWAYS) {
1461         tcg_gen_movi_i64(ret, 1);
1462     } else if (cond == TCG_COND_NEVER) {
1463         tcg_gen_movi_i64(ret, 0);
1464     } else {
1465         if (TCG_TARGET_REG_BITS == 32) {
1466             tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
1467                              TCGV_LOW(arg1), TCGV_HIGH(arg1),
1468                              TCGV_LOW(arg2), TCGV_HIGH(arg2), cond);
1469             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1470         } else {
1471             tcg_gen_op4i_i64(INDEX_op_setcond_i64, ret, arg1, arg2, cond);
1472         }
1473     }
1474 }
1475 
1476 void tcg_gen_setcondi_i64(TCGCond cond, TCGv_i64 ret,
1477                           TCGv_i64 arg1, int64_t arg2)
1478 {
1479     TCGv_i64 t0 = tcg_const_i64(arg2);
1480     tcg_gen_setcond_i64(cond, ret, arg1, t0);
1481     tcg_temp_free_i64(t0);
1482 }
1483 
1484 void tcg_gen_muli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1485 {
1486     TCGv_i64 t0 = tcg_const_i64(arg2);
1487     tcg_gen_mul_i64(ret, arg1, t0);
1488     tcg_temp_free_i64(t0);
1489 }
1490 
1491 void tcg_gen_div_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1492 {
1493     if (TCG_TARGET_HAS_div_i64) {
1494         tcg_gen_op3_i64(INDEX_op_div_i64, ret, arg1, arg2);
1495     } else if (TCG_TARGET_HAS_div2_i64) {
1496         TCGv_i64 t0 = tcg_temp_new_i64();
1497         tcg_gen_sari_i64(t0, arg1, 63);
1498         tcg_gen_op5_i64(INDEX_op_div2_i64, ret, t0, arg1, t0, arg2);
1499         tcg_temp_free_i64(t0);
1500     } else {
1501         gen_helper_div_i64(ret, arg1, arg2);
1502     }
1503 }
1504 
1505 void tcg_gen_rem_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1506 {
1507     if (TCG_TARGET_HAS_rem_i64) {
1508         tcg_gen_op3_i64(INDEX_op_rem_i64, ret, arg1, arg2);
1509     } else if (TCG_TARGET_HAS_div_i64) {
1510         TCGv_i64 t0 = tcg_temp_new_i64();
1511         tcg_gen_op3_i64(INDEX_op_div_i64, t0, arg1, arg2);
1512         tcg_gen_mul_i64(t0, t0, arg2);
1513         tcg_gen_sub_i64(ret, arg1, t0);
1514         tcg_temp_free_i64(t0);
1515     } else if (TCG_TARGET_HAS_div2_i64) {
1516         TCGv_i64 t0 = tcg_temp_new_i64();
1517         tcg_gen_sari_i64(t0, arg1, 63);
1518         tcg_gen_op5_i64(INDEX_op_div2_i64, t0, ret, arg1, t0, arg2);
1519         tcg_temp_free_i64(t0);
1520     } else {
1521         gen_helper_rem_i64(ret, arg1, arg2);
1522     }
1523 }
1524 
1525 void tcg_gen_divu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1526 {
1527     if (TCG_TARGET_HAS_div_i64) {
1528         tcg_gen_op3_i64(INDEX_op_divu_i64, ret, arg1, arg2);
1529     } else if (TCG_TARGET_HAS_div2_i64) {
1530         TCGv_i64 t0 = tcg_temp_new_i64();
1531         tcg_gen_movi_i64(t0, 0);
1532         tcg_gen_op5_i64(INDEX_op_divu2_i64, ret, t0, arg1, t0, arg2);
1533         tcg_temp_free_i64(t0);
1534     } else {
1535         gen_helper_divu_i64(ret, arg1, arg2);
1536     }
1537 }
1538 
1539 void tcg_gen_remu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1540 {
1541     if (TCG_TARGET_HAS_rem_i64) {
1542         tcg_gen_op3_i64(INDEX_op_remu_i64, ret, arg1, arg2);
1543     } else if (TCG_TARGET_HAS_div_i64) {
1544         TCGv_i64 t0 = tcg_temp_new_i64();
1545         tcg_gen_op3_i64(INDEX_op_divu_i64, t0, arg1, arg2);
1546         tcg_gen_mul_i64(t0, t0, arg2);
1547         tcg_gen_sub_i64(ret, arg1, t0);
1548         tcg_temp_free_i64(t0);
1549     } else if (TCG_TARGET_HAS_div2_i64) {
1550         TCGv_i64 t0 = tcg_temp_new_i64();
1551         tcg_gen_movi_i64(t0, 0);
1552         tcg_gen_op5_i64(INDEX_op_divu2_i64, t0, ret, arg1, t0, arg2);
1553         tcg_temp_free_i64(t0);
1554     } else {
1555         gen_helper_remu_i64(ret, arg1, arg2);
1556     }
1557 }
1558 
1559 void tcg_gen_ext8s_i64(TCGv_i64 ret, TCGv_i64 arg)
1560 {
1561     if (TCG_TARGET_REG_BITS == 32) {
1562         tcg_gen_ext8s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1563         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1564     } else if (TCG_TARGET_HAS_ext8s_i64) {
1565         tcg_gen_op2_i64(INDEX_op_ext8s_i64, ret, arg);
1566     } else {
1567         tcg_gen_shli_i64(ret, arg, 56);
1568         tcg_gen_sari_i64(ret, ret, 56);
1569     }
1570 }
1571 
1572 void tcg_gen_ext16s_i64(TCGv_i64 ret, TCGv_i64 arg)
1573 {
1574     if (TCG_TARGET_REG_BITS == 32) {
1575         tcg_gen_ext16s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1576         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1577     } else if (TCG_TARGET_HAS_ext16s_i64) {
1578         tcg_gen_op2_i64(INDEX_op_ext16s_i64, ret, arg);
1579     } else {
1580         tcg_gen_shli_i64(ret, arg, 48);
1581         tcg_gen_sari_i64(ret, ret, 48);
1582     }
1583 }
1584 
1585 void tcg_gen_ext32s_i64(TCGv_i64 ret, TCGv_i64 arg)
1586 {
1587     if (TCG_TARGET_REG_BITS == 32) {
1588         tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1589         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1590     } else if (TCG_TARGET_HAS_ext32s_i64) {
1591         tcg_gen_op2_i64(INDEX_op_ext32s_i64, ret, arg);
1592     } else {
1593         tcg_gen_shli_i64(ret, arg, 32);
1594         tcg_gen_sari_i64(ret, ret, 32);
1595     }
1596 }
1597 
1598 void tcg_gen_ext8u_i64(TCGv_i64 ret, TCGv_i64 arg)
1599 {
1600     if (TCG_TARGET_REG_BITS == 32) {
1601         tcg_gen_ext8u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1602         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1603     } else if (TCG_TARGET_HAS_ext8u_i64) {
1604         tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg);
1605     } else {
1606         tcg_gen_andi_i64(ret, arg, 0xffu);
1607     }
1608 }
1609 
1610 void tcg_gen_ext16u_i64(TCGv_i64 ret, TCGv_i64 arg)
1611 {
1612     if (TCG_TARGET_REG_BITS == 32) {
1613         tcg_gen_ext16u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1614         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1615     } else if (TCG_TARGET_HAS_ext16u_i64) {
1616         tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg);
1617     } else {
1618         tcg_gen_andi_i64(ret, arg, 0xffffu);
1619     }
1620 }
1621 
1622 void tcg_gen_ext32u_i64(TCGv_i64 ret, TCGv_i64 arg)
1623 {
1624     if (TCG_TARGET_REG_BITS == 32) {
1625         tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1626         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1627     } else if (TCG_TARGET_HAS_ext32u_i64) {
1628         tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg);
1629     } else {
1630         tcg_gen_andi_i64(ret, arg, 0xffffffffu);
1631     }
1632 }
1633 
1634 /* Note: we assume the six high bytes are set to zero */
1635 void tcg_gen_bswap16_i64(TCGv_i64 ret, TCGv_i64 arg)
1636 {
1637     if (TCG_TARGET_REG_BITS == 32) {
1638         tcg_gen_bswap16_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1639         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1640     } else if (TCG_TARGET_HAS_bswap16_i64) {
1641         tcg_gen_op2_i64(INDEX_op_bswap16_i64, ret, arg);
1642     } else {
1643         TCGv_i64 t0 = tcg_temp_new_i64();
1644 
1645         tcg_gen_ext8u_i64(t0, arg);
1646         tcg_gen_shli_i64(t0, t0, 8);
1647         tcg_gen_shri_i64(ret, arg, 8);
1648         tcg_gen_or_i64(ret, ret, t0);
1649         tcg_temp_free_i64(t0);
1650     }
1651 }
1652 
1653 /* Note: we assume the four high bytes are set to zero */
1654 void tcg_gen_bswap32_i64(TCGv_i64 ret, TCGv_i64 arg)
1655 {
1656     if (TCG_TARGET_REG_BITS == 32) {
1657         tcg_gen_bswap32_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1658         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1659     } else if (TCG_TARGET_HAS_bswap32_i64) {
1660         tcg_gen_op2_i64(INDEX_op_bswap32_i64, ret, arg);
1661     } else {
1662         TCGv_i64 t0, t1;
1663         t0 = tcg_temp_new_i64();
1664         t1 = tcg_temp_new_i64();
1665 
1666         tcg_gen_shli_i64(t0, arg, 24);
1667         tcg_gen_ext32u_i64(t0, t0);
1668 
1669         tcg_gen_andi_i64(t1, arg, 0x0000ff00);
1670         tcg_gen_shli_i64(t1, t1, 8);
1671         tcg_gen_or_i64(t0, t0, t1);
1672 
1673         tcg_gen_shri_i64(t1, arg, 8);
1674         tcg_gen_andi_i64(t1, t1, 0x0000ff00);
1675         tcg_gen_or_i64(t0, t0, t1);
1676 
1677         tcg_gen_shri_i64(t1, arg, 24);
1678         tcg_gen_or_i64(ret, t0, t1);
1679         tcg_temp_free_i64(t0);
1680         tcg_temp_free_i64(t1);
1681     }
1682 }
1683 
1684 void tcg_gen_bswap64_i64(TCGv_i64 ret, TCGv_i64 arg)
1685 {
1686     if (TCG_TARGET_REG_BITS == 32) {
1687         TCGv_i32 t0, t1;
1688         t0 = tcg_temp_new_i32();
1689         t1 = tcg_temp_new_i32();
1690 
1691         tcg_gen_bswap32_i32(t0, TCGV_LOW(arg));
1692         tcg_gen_bswap32_i32(t1, TCGV_HIGH(arg));
1693         tcg_gen_mov_i32(TCGV_LOW(ret), t1);
1694         tcg_gen_mov_i32(TCGV_HIGH(ret), t0);
1695         tcg_temp_free_i32(t0);
1696         tcg_temp_free_i32(t1);
1697     } else if (TCG_TARGET_HAS_bswap64_i64) {
1698         tcg_gen_op2_i64(INDEX_op_bswap64_i64, ret, arg);
1699     } else {
1700         TCGv_i64 t0 = tcg_temp_new_i64();
1701         TCGv_i64 t1 = tcg_temp_new_i64();
1702 
1703         tcg_gen_shli_i64(t0, arg, 56);
1704 
1705         tcg_gen_andi_i64(t1, arg, 0x0000ff00);
1706         tcg_gen_shli_i64(t1, t1, 40);
1707         tcg_gen_or_i64(t0, t0, t1);
1708 
1709         tcg_gen_andi_i64(t1, arg, 0x00ff0000);
1710         tcg_gen_shli_i64(t1, t1, 24);
1711         tcg_gen_or_i64(t0, t0, t1);
1712 
1713         tcg_gen_andi_i64(t1, arg, 0xff000000);
1714         tcg_gen_shli_i64(t1, t1, 8);
1715         tcg_gen_or_i64(t0, t0, t1);
1716 
1717         tcg_gen_shri_i64(t1, arg, 8);
1718         tcg_gen_andi_i64(t1, t1, 0xff000000);
1719         tcg_gen_or_i64(t0, t0, t1);
1720 
1721         tcg_gen_shri_i64(t1, arg, 24);
1722         tcg_gen_andi_i64(t1, t1, 0x00ff0000);
1723         tcg_gen_or_i64(t0, t0, t1);
1724 
1725         tcg_gen_shri_i64(t1, arg, 40);
1726         tcg_gen_andi_i64(t1, t1, 0x0000ff00);
1727         tcg_gen_or_i64(t0, t0, t1);
1728 
1729         tcg_gen_shri_i64(t1, arg, 56);
1730         tcg_gen_or_i64(ret, t0, t1);
1731         tcg_temp_free_i64(t0);
1732         tcg_temp_free_i64(t1);
1733     }
1734 }
1735 
1736 void tcg_gen_not_i64(TCGv_i64 ret, TCGv_i64 arg)
1737 {
1738     if (TCG_TARGET_REG_BITS == 32) {
1739         tcg_gen_not_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1740         tcg_gen_not_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
1741     } else if (TCG_TARGET_HAS_not_i64) {
1742         tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg);
1743     } else {
1744         tcg_gen_xori_i64(ret, arg, -1);
1745     }
1746 }
1747 
1748 void tcg_gen_andc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1749 {
1750     if (TCG_TARGET_REG_BITS == 32) {
1751         tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1752         tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1753     } else if (TCG_TARGET_HAS_andc_i64) {
1754         tcg_gen_op3_i64(INDEX_op_andc_i64, ret, arg1, arg2);
1755     } else {
1756         TCGv_i64 t0 = tcg_temp_new_i64();
1757         tcg_gen_not_i64(t0, arg2);
1758         tcg_gen_and_i64(ret, arg1, t0);
1759         tcg_temp_free_i64(t0);
1760     }
1761 }
1762 
1763 void tcg_gen_eqv_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1764 {
1765     if (TCG_TARGET_REG_BITS == 32) {
1766         tcg_gen_eqv_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1767         tcg_gen_eqv_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1768     } else if (TCG_TARGET_HAS_eqv_i64) {
1769         tcg_gen_op3_i64(INDEX_op_eqv_i64, ret, arg1, arg2);
1770     } else {
1771         tcg_gen_xor_i64(ret, arg1, arg2);
1772         tcg_gen_not_i64(ret, ret);
1773     }
1774 }
1775 
1776 void tcg_gen_nand_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1777 {
1778     if (TCG_TARGET_REG_BITS == 32) {
1779         tcg_gen_nand_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1780         tcg_gen_nand_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1781     } else if (TCG_TARGET_HAS_nand_i64) {
1782         tcg_gen_op3_i64(INDEX_op_nand_i64, ret, arg1, arg2);
1783     } else {
1784         tcg_gen_and_i64(ret, arg1, arg2);
1785         tcg_gen_not_i64(ret, ret);
1786     }
1787 }
1788 
1789 void tcg_gen_nor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1790 {
1791     if (TCG_TARGET_REG_BITS == 32) {
1792         tcg_gen_nor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1793         tcg_gen_nor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1794     } else if (TCG_TARGET_HAS_nor_i64) {
1795         tcg_gen_op3_i64(INDEX_op_nor_i64, ret, arg1, arg2);
1796     } else {
1797         tcg_gen_or_i64(ret, arg1, arg2);
1798         tcg_gen_not_i64(ret, ret);
1799     }
1800 }
1801 
1802 void tcg_gen_orc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1803 {
1804     if (TCG_TARGET_REG_BITS == 32) {
1805         tcg_gen_orc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1806         tcg_gen_orc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1807     } else if (TCG_TARGET_HAS_orc_i64) {
1808         tcg_gen_op3_i64(INDEX_op_orc_i64, ret, arg1, arg2);
1809     } else {
1810         TCGv_i64 t0 = tcg_temp_new_i64();
1811         tcg_gen_not_i64(t0, arg2);
1812         tcg_gen_or_i64(ret, arg1, t0);
1813         tcg_temp_free_i64(t0);
1814     }
1815 }
1816 
1817 void tcg_gen_clz_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1818 {
1819     if (TCG_TARGET_HAS_clz_i64) {
1820         tcg_gen_op3_i64(INDEX_op_clz_i64, ret, arg1, arg2);
1821     } else {
1822         gen_helper_clz_i64(ret, arg1, arg2);
1823     }
1824 }
1825 
1826 void tcg_gen_clzi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
1827 {
1828     if (TCG_TARGET_REG_BITS == 32
1829         && TCG_TARGET_HAS_clz_i32
1830         && arg2 <= 0xffffffffu) {
1831         TCGv_i32 t = tcg_const_i32((uint32_t)arg2 - 32);
1832         tcg_gen_clz_i32(t, TCGV_LOW(arg1), t);
1833         tcg_gen_addi_i32(t, t, 32);
1834         tcg_gen_clz_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), t);
1835         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1836         tcg_temp_free_i32(t);
1837     } else {
1838         TCGv_i64 t = tcg_const_i64(arg2);
1839         tcg_gen_clz_i64(ret, arg1, t);
1840         tcg_temp_free_i64(t);
1841     }
1842 }
1843 
1844 void tcg_gen_ctz_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1845 {
1846     if (TCG_TARGET_HAS_ctz_i64) {
1847         tcg_gen_op3_i64(INDEX_op_ctz_i64, ret, arg1, arg2);
1848     } else if (TCG_TARGET_HAS_ctpop_i64 || TCG_TARGET_HAS_clz_i64) {
1849         TCGv_i64 z, t = tcg_temp_new_i64();
1850 
1851         if (TCG_TARGET_HAS_ctpop_i64) {
1852             tcg_gen_subi_i64(t, arg1, 1);
1853             tcg_gen_andc_i64(t, t, arg1);
1854             tcg_gen_ctpop_i64(t, t);
1855         } else {
1856             /* Since all non-x86 hosts have clz(0) == 64, don't fight it.  */
1857             tcg_gen_neg_i64(t, arg1);
1858             tcg_gen_and_i64(t, t, arg1);
1859             tcg_gen_clzi_i64(t, t, 64);
1860             tcg_gen_xori_i64(t, t, 63);
1861         }
1862         z = tcg_const_i64(0);
1863         tcg_gen_movcond_i64(TCG_COND_EQ, ret, arg1, z, arg2, t);
1864         tcg_temp_free_i64(t);
1865         tcg_temp_free_i64(z);
1866     } else {
1867         gen_helper_ctz_i64(ret, arg1, arg2);
1868     }
1869 }
1870 
1871 void tcg_gen_ctzi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
1872 {
1873     if (TCG_TARGET_REG_BITS == 32
1874         && TCG_TARGET_HAS_ctz_i32
1875         && arg2 <= 0xffffffffu) {
1876         TCGv_i32 t32 = tcg_const_i32((uint32_t)arg2 - 32);
1877         tcg_gen_ctz_i32(t32, TCGV_HIGH(arg1), t32);
1878         tcg_gen_addi_i32(t32, t32, 32);
1879         tcg_gen_ctz_i32(TCGV_LOW(ret), TCGV_LOW(arg1), t32);
1880         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1881         tcg_temp_free_i32(t32);
1882     } else if (!TCG_TARGET_HAS_ctz_i64
1883                && TCG_TARGET_HAS_ctpop_i64
1884                && arg2 == 64) {
1885         /* This equivalence has the advantage of not requiring a fixup.  */
1886         TCGv_i64 t = tcg_temp_new_i64();
1887         tcg_gen_subi_i64(t, arg1, 1);
1888         tcg_gen_andc_i64(t, t, arg1);
1889         tcg_gen_ctpop_i64(ret, t);
1890         tcg_temp_free_i64(t);
1891     } else {
1892         TCGv_i64 t64 = tcg_const_i64(arg2);
1893         tcg_gen_ctz_i64(ret, arg1, t64);
1894         tcg_temp_free_i64(t64);
1895     }
1896 }
1897 
1898 void tcg_gen_clrsb_i64(TCGv_i64 ret, TCGv_i64 arg)
1899 {
1900     if (TCG_TARGET_HAS_clz_i64 || TCG_TARGET_HAS_clz_i32) {
1901         TCGv_i64 t = tcg_temp_new_i64();
1902         tcg_gen_sari_i64(t, arg, 63);
1903         tcg_gen_xor_i64(t, t, arg);
1904         tcg_gen_clzi_i64(t, t, 64);
1905         tcg_gen_subi_i64(ret, t, 1);
1906         tcg_temp_free_i64(t);
1907     } else {
1908         gen_helper_clrsb_i64(ret, arg);
1909     }
1910 }
1911 
1912 void tcg_gen_ctpop_i64(TCGv_i64 ret, TCGv_i64 arg1)
1913 {
1914     if (TCG_TARGET_HAS_ctpop_i64) {
1915         tcg_gen_op2_i64(INDEX_op_ctpop_i64, ret, arg1);
1916     } else if (TCG_TARGET_REG_BITS == 32 && TCG_TARGET_HAS_ctpop_i32) {
1917         tcg_gen_ctpop_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
1918         tcg_gen_ctpop_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
1919         tcg_gen_add_i32(TCGV_LOW(ret), TCGV_LOW(ret), TCGV_HIGH(ret));
1920         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1921     } else {
1922         gen_helper_ctpop_i64(ret, arg1);
1923     }
1924 }
1925 
1926 void tcg_gen_rotl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1927 {
1928     if (TCG_TARGET_HAS_rot_i64) {
1929         tcg_gen_op3_i64(INDEX_op_rotl_i64, ret, arg1, arg2);
1930     } else {
1931         TCGv_i64 t0, t1;
1932         t0 = tcg_temp_new_i64();
1933         t1 = tcg_temp_new_i64();
1934         tcg_gen_shl_i64(t0, arg1, arg2);
1935         tcg_gen_subfi_i64(t1, 64, arg2);
1936         tcg_gen_shr_i64(t1, arg1, t1);
1937         tcg_gen_or_i64(ret, t0, t1);
1938         tcg_temp_free_i64(t0);
1939         tcg_temp_free_i64(t1);
1940     }
1941 }
1942 
1943 void tcg_gen_rotli_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1944 {
1945     tcg_debug_assert(arg2 < 64);
1946     /* some cases can be optimized here */
1947     if (arg2 == 0) {
1948         tcg_gen_mov_i64(ret, arg1);
1949     } else if (TCG_TARGET_HAS_rot_i64) {
1950         TCGv_i64 t0 = tcg_const_i64(arg2);
1951         tcg_gen_rotl_i64(ret, arg1, t0);
1952         tcg_temp_free_i64(t0);
1953     } else {
1954         TCGv_i64 t0, t1;
1955         t0 = tcg_temp_new_i64();
1956         t1 = tcg_temp_new_i64();
1957         tcg_gen_shli_i64(t0, arg1, arg2);
1958         tcg_gen_shri_i64(t1, arg1, 64 - arg2);
1959         tcg_gen_or_i64(ret, t0, t1);
1960         tcg_temp_free_i64(t0);
1961         tcg_temp_free_i64(t1);
1962     }
1963 }
1964 
1965 void tcg_gen_rotr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1966 {
1967     if (TCG_TARGET_HAS_rot_i64) {
1968         tcg_gen_op3_i64(INDEX_op_rotr_i64, ret, arg1, arg2);
1969     } else {
1970         TCGv_i64 t0, t1;
1971         t0 = tcg_temp_new_i64();
1972         t1 = tcg_temp_new_i64();
1973         tcg_gen_shr_i64(t0, arg1, arg2);
1974         tcg_gen_subfi_i64(t1, 64, arg2);
1975         tcg_gen_shl_i64(t1, arg1, t1);
1976         tcg_gen_or_i64(ret, t0, t1);
1977         tcg_temp_free_i64(t0);
1978         tcg_temp_free_i64(t1);
1979     }
1980 }
1981 
1982 void tcg_gen_rotri_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1983 {
1984     tcg_debug_assert(arg2 < 64);
1985     /* some cases can be optimized here */
1986     if (arg2 == 0) {
1987         tcg_gen_mov_i64(ret, arg1);
1988     } else {
1989         tcg_gen_rotli_i64(ret, arg1, 64 - arg2);
1990     }
1991 }
1992 
1993 void tcg_gen_deposit_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2,
1994                          unsigned int ofs, unsigned int len)
1995 {
1996     uint64_t mask;
1997     TCGv_i64 t1;
1998 
1999     tcg_debug_assert(ofs < 64);
2000     tcg_debug_assert(len > 0);
2001     tcg_debug_assert(len <= 64);
2002     tcg_debug_assert(ofs + len <= 64);
2003 
2004     if (len == 64) {
2005         tcg_gen_mov_i64(ret, arg2);
2006         return;
2007     }
2008     if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(ofs, len)) {
2009         tcg_gen_op5ii_i64(INDEX_op_deposit_i64, ret, arg1, arg2, ofs, len);
2010         return;
2011     }
2012 
2013     if (TCG_TARGET_REG_BITS == 32) {
2014         if (ofs >= 32) {
2015             tcg_gen_deposit_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1),
2016                                 TCGV_LOW(arg2), ofs - 32, len);
2017             tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
2018             return;
2019         }
2020         if (ofs + len <= 32) {
2021             tcg_gen_deposit_i32(TCGV_LOW(ret), TCGV_LOW(arg1),
2022                                 TCGV_LOW(arg2), ofs, len);
2023             tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
2024             return;
2025         }
2026     }
2027 
2028     mask = (1ull << len) - 1;
2029     t1 = tcg_temp_new_i64();
2030 
2031     if (ofs + len < 64) {
2032         tcg_gen_andi_i64(t1, arg2, mask);
2033         tcg_gen_shli_i64(t1, t1, ofs);
2034     } else {
2035         tcg_gen_shli_i64(t1, arg2, ofs);
2036     }
2037     tcg_gen_andi_i64(ret, arg1, ~(mask << ofs));
2038     tcg_gen_or_i64(ret, ret, t1);
2039 
2040     tcg_temp_free_i64(t1);
2041 }
2042 
2043 void tcg_gen_deposit_z_i64(TCGv_i64 ret, TCGv_i64 arg,
2044                            unsigned int ofs, unsigned int len)
2045 {
2046     tcg_debug_assert(ofs < 64);
2047     tcg_debug_assert(len > 0);
2048     tcg_debug_assert(len <= 64);
2049     tcg_debug_assert(ofs + len <= 64);
2050 
2051     if (ofs + len == 64) {
2052         tcg_gen_shli_i64(ret, arg, ofs);
2053     } else if (ofs == 0) {
2054         tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
2055     } else if (TCG_TARGET_HAS_deposit_i64
2056                && TCG_TARGET_deposit_i64_valid(ofs, len)) {
2057         TCGv_i64 zero = tcg_const_i64(0);
2058         tcg_gen_op5ii_i64(INDEX_op_deposit_i64, ret, zero, arg, ofs, len);
2059         tcg_temp_free_i64(zero);
2060     } else {
2061         if (TCG_TARGET_REG_BITS == 32) {
2062             if (ofs >= 32) {
2063                 tcg_gen_deposit_z_i32(TCGV_HIGH(ret), TCGV_LOW(arg),
2064                                       ofs - 32, len);
2065                 tcg_gen_movi_i32(TCGV_LOW(ret), 0);
2066                 return;
2067             }
2068             if (ofs + len <= 32) {
2069                 tcg_gen_deposit_z_i32(TCGV_LOW(ret), TCGV_LOW(arg), ofs, len);
2070                 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2071                 return;
2072             }
2073         }
2074         /* To help two-operand hosts we prefer to zero-extend first,
2075            which allows ARG to stay live.  */
2076         switch (len) {
2077         case 32:
2078             if (TCG_TARGET_HAS_ext32u_i64) {
2079                 tcg_gen_ext32u_i64(ret, arg);
2080                 tcg_gen_shli_i64(ret, ret, ofs);
2081                 return;
2082             }
2083             break;
2084         case 16:
2085             if (TCG_TARGET_HAS_ext16u_i64) {
2086                 tcg_gen_ext16u_i64(ret, arg);
2087                 tcg_gen_shli_i64(ret, ret, ofs);
2088                 return;
2089             }
2090             break;
2091         case 8:
2092             if (TCG_TARGET_HAS_ext8u_i64) {
2093                 tcg_gen_ext8u_i64(ret, arg);
2094                 tcg_gen_shli_i64(ret, ret, ofs);
2095                 return;
2096             }
2097             break;
2098         }
2099         /* Otherwise prefer zero-extension over AND for code size.  */
2100         switch (ofs + len) {
2101         case 32:
2102             if (TCG_TARGET_HAS_ext32u_i64) {
2103                 tcg_gen_shli_i64(ret, arg, ofs);
2104                 tcg_gen_ext32u_i64(ret, ret);
2105                 return;
2106             }
2107             break;
2108         case 16:
2109             if (TCG_TARGET_HAS_ext16u_i64) {
2110                 tcg_gen_shli_i64(ret, arg, ofs);
2111                 tcg_gen_ext16u_i64(ret, ret);
2112                 return;
2113             }
2114             break;
2115         case 8:
2116             if (TCG_TARGET_HAS_ext8u_i64) {
2117                 tcg_gen_shli_i64(ret, arg, ofs);
2118                 tcg_gen_ext8u_i64(ret, ret);
2119                 return;
2120             }
2121             break;
2122         }
2123         tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
2124         tcg_gen_shli_i64(ret, ret, ofs);
2125     }
2126 }
2127 
2128 void tcg_gen_extract_i64(TCGv_i64 ret, TCGv_i64 arg,
2129                          unsigned int ofs, unsigned int len)
2130 {
2131     tcg_debug_assert(ofs < 64);
2132     tcg_debug_assert(len > 0);
2133     tcg_debug_assert(len <= 64);
2134     tcg_debug_assert(ofs + len <= 64);
2135 
2136     /* Canonicalize certain special cases, even if extract is supported.  */
2137     if (ofs + len == 64) {
2138         tcg_gen_shri_i64(ret, arg, 64 - len);
2139         return;
2140     }
2141     if (ofs == 0) {
2142         tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
2143         return;
2144     }
2145 
2146     if (TCG_TARGET_REG_BITS == 32) {
2147         /* Look for a 32-bit extract within one of the two words.  */
2148         if (ofs >= 32) {
2149             tcg_gen_extract_i32(TCGV_LOW(ret), TCGV_HIGH(arg), ofs - 32, len);
2150             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2151             return;
2152         }
2153         if (ofs + len <= 32) {
2154             tcg_gen_extract_i32(TCGV_LOW(ret), TCGV_LOW(arg), ofs, len);
2155             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2156             return;
2157         }
2158         /* The field is split across two words.  One double-word
2159            shift is better than two double-word shifts.  */
2160         goto do_shift_and;
2161     }
2162 
2163     if (TCG_TARGET_HAS_extract_i64
2164         && TCG_TARGET_extract_i64_valid(ofs, len)) {
2165         tcg_gen_op4ii_i64(INDEX_op_extract_i64, ret, arg, ofs, len);
2166         return;
2167     }
2168 
2169     /* Assume that zero-extension, if available, is cheaper than a shift.  */
2170     switch (ofs + len) {
2171     case 32:
2172         if (TCG_TARGET_HAS_ext32u_i64) {
2173             tcg_gen_ext32u_i64(ret, arg);
2174             tcg_gen_shri_i64(ret, ret, ofs);
2175             return;
2176         }
2177         break;
2178     case 16:
2179         if (TCG_TARGET_HAS_ext16u_i64) {
2180             tcg_gen_ext16u_i64(ret, arg);
2181             tcg_gen_shri_i64(ret, ret, ofs);
2182             return;
2183         }
2184         break;
2185     case 8:
2186         if (TCG_TARGET_HAS_ext8u_i64) {
2187             tcg_gen_ext8u_i64(ret, arg);
2188             tcg_gen_shri_i64(ret, ret, ofs);
2189             return;
2190         }
2191         break;
2192     }
2193 
2194     /* ??? Ideally we'd know what values are available for immediate AND.
2195        Assume that 8 bits are available, plus the special cases of 16 and 32,
2196        so that we get ext8u, ext16u, and ext32u.  */
2197     switch (len) {
2198     case 1 ... 8: case 16: case 32:
2199     do_shift_and:
2200         tcg_gen_shri_i64(ret, arg, ofs);
2201         tcg_gen_andi_i64(ret, ret, (1ull << len) - 1);
2202         break;
2203     default:
2204         tcg_gen_shli_i64(ret, arg, 64 - len - ofs);
2205         tcg_gen_shri_i64(ret, ret, 64 - len);
2206         break;
2207     }
2208 }
2209 
2210 void tcg_gen_sextract_i64(TCGv_i64 ret, TCGv_i64 arg,
2211                           unsigned int ofs, unsigned int len)
2212 {
2213     tcg_debug_assert(ofs < 64);
2214     tcg_debug_assert(len > 0);
2215     tcg_debug_assert(len <= 64);
2216     tcg_debug_assert(ofs + len <= 64);
2217 
2218     /* Canonicalize certain special cases, even if sextract is supported.  */
2219     if (ofs + len == 64) {
2220         tcg_gen_sari_i64(ret, arg, 64 - len);
2221         return;
2222     }
2223     if (ofs == 0) {
2224         switch (len) {
2225         case 32:
2226             tcg_gen_ext32s_i64(ret, arg);
2227             return;
2228         case 16:
2229             tcg_gen_ext16s_i64(ret, arg);
2230             return;
2231         case 8:
2232             tcg_gen_ext8s_i64(ret, arg);
2233             return;
2234         }
2235     }
2236 
2237     if (TCG_TARGET_REG_BITS == 32) {
2238         /* Look for a 32-bit extract within one of the two words.  */
2239         if (ofs >= 32) {
2240             tcg_gen_sextract_i32(TCGV_LOW(ret), TCGV_HIGH(arg), ofs - 32, len);
2241         } else if (ofs + len <= 32) {
2242             tcg_gen_sextract_i32(TCGV_LOW(ret), TCGV_LOW(arg), ofs, len);
2243         } else if (ofs == 0) {
2244             tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2245             tcg_gen_sextract_i32(TCGV_HIGH(ret), TCGV_HIGH(arg), 0, len - 32);
2246             return;
2247         } else if (len > 32) {
2248             TCGv_i32 t = tcg_temp_new_i32();
2249             /* Extract the bits for the high word normally.  */
2250             tcg_gen_sextract_i32(t, TCGV_HIGH(arg), ofs + 32, len - 32);
2251             /* Shift the field down for the low part.  */
2252             tcg_gen_shri_i64(ret, arg, ofs);
2253             /* Overwrite the shift into the high part.  */
2254             tcg_gen_mov_i32(TCGV_HIGH(ret), t);
2255             tcg_temp_free_i32(t);
2256             return;
2257         } else {
2258             /* Shift the field down for the low part, such that the
2259                field sits at the MSB.  */
2260             tcg_gen_shri_i64(ret, arg, ofs + len - 32);
2261             /* Shift the field down from the MSB, sign extending.  */
2262             tcg_gen_sari_i32(TCGV_LOW(ret), TCGV_LOW(ret), 32 - len);
2263         }
2264         /* Sign-extend the field from 32 bits.  */
2265         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2266         return;
2267     }
2268 
2269     if (TCG_TARGET_HAS_sextract_i64
2270         && TCG_TARGET_extract_i64_valid(ofs, len)) {
2271         tcg_gen_op4ii_i64(INDEX_op_sextract_i64, ret, arg, ofs, len);
2272         return;
2273     }
2274 
2275     /* Assume that sign-extension, if available, is cheaper than a shift.  */
2276     switch (ofs + len) {
2277     case 32:
2278         if (TCG_TARGET_HAS_ext32s_i64) {
2279             tcg_gen_ext32s_i64(ret, arg);
2280             tcg_gen_sari_i64(ret, ret, ofs);
2281             return;
2282         }
2283         break;
2284     case 16:
2285         if (TCG_TARGET_HAS_ext16s_i64) {
2286             tcg_gen_ext16s_i64(ret, arg);
2287             tcg_gen_sari_i64(ret, ret, ofs);
2288             return;
2289         }
2290         break;
2291     case 8:
2292         if (TCG_TARGET_HAS_ext8s_i64) {
2293             tcg_gen_ext8s_i64(ret, arg);
2294             tcg_gen_sari_i64(ret, ret, ofs);
2295             return;
2296         }
2297         break;
2298     }
2299     switch (len) {
2300     case 32:
2301         if (TCG_TARGET_HAS_ext32s_i64) {
2302             tcg_gen_shri_i64(ret, arg, ofs);
2303             tcg_gen_ext32s_i64(ret, ret);
2304             return;
2305         }
2306         break;
2307     case 16:
2308         if (TCG_TARGET_HAS_ext16s_i64) {
2309             tcg_gen_shri_i64(ret, arg, ofs);
2310             tcg_gen_ext16s_i64(ret, ret);
2311             return;
2312         }
2313         break;
2314     case 8:
2315         if (TCG_TARGET_HAS_ext8s_i64) {
2316             tcg_gen_shri_i64(ret, arg, ofs);
2317             tcg_gen_ext8s_i64(ret, ret);
2318             return;
2319         }
2320         break;
2321     }
2322     tcg_gen_shli_i64(ret, arg, 64 - len - ofs);
2323     tcg_gen_sari_i64(ret, ret, 64 - len);
2324 }
2325 
2326 void tcg_gen_movcond_i64(TCGCond cond, TCGv_i64 ret, TCGv_i64 c1,
2327                          TCGv_i64 c2, TCGv_i64 v1, TCGv_i64 v2)
2328 {
2329     if (cond == TCG_COND_ALWAYS) {
2330         tcg_gen_mov_i64(ret, v1);
2331     } else if (cond == TCG_COND_NEVER) {
2332         tcg_gen_mov_i64(ret, v2);
2333     } else if (TCG_TARGET_REG_BITS == 32) {
2334         TCGv_i32 t0 = tcg_temp_new_i32();
2335         TCGv_i32 t1 = tcg_temp_new_i32();
2336         tcg_gen_op6i_i32(INDEX_op_setcond2_i32, t0,
2337                          TCGV_LOW(c1), TCGV_HIGH(c1),
2338                          TCGV_LOW(c2), TCGV_HIGH(c2), cond);
2339 
2340         if (TCG_TARGET_HAS_movcond_i32) {
2341             tcg_gen_movi_i32(t1, 0);
2342             tcg_gen_movcond_i32(TCG_COND_NE, TCGV_LOW(ret), t0, t1,
2343                                 TCGV_LOW(v1), TCGV_LOW(v2));
2344             tcg_gen_movcond_i32(TCG_COND_NE, TCGV_HIGH(ret), t0, t1,
2345                                 TCGV_HIGH(v1), TCGV_HIGH(v2));
2346         } else {
2347             tcg_gen_neg_i32(t0, t0);
2348 
2349             tcg_gen_and_i32(t1, TCGV_LOW(v1), t0);
2350             tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(v2), t0);
2351             tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(ret), t1);
2352 
2353             tcg_gen_and_i32(t1, TCGV_HIGH(v1), t0);
2354             tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(v2), t0);
2355             tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), t1);
2356         }
2357         tcg_temp_free_i32(t0);
2358         tcg_temp_free_i32(t1);
2359     } else if (TCG_TARGET_HAS_movcond_i64) {
2360         tcg_gen_op6i_i64(INDEX_op_movcond_i64, ret, c1, c2, v1, v2, cond);
2361     } else {
2362         TCGv_i64 t0 = tcg_temp_new_i64();
2363         TCGv_i64 t1 = tcg_temp_new_i64();
2364         tcg_gen_setcond_i64(cond, t0, c1, c2);
2365         tcg_gen_neg_i64(t0, t0);
2366         tcg_gen_and_i64(t1, v1, t0);
2367         tcg_gen_andc_i64(ret, v2, t0);
2368         tcg_gen_or_i64(ret, ret, t1);
2369         tcg_temp_free_i64(t0);
2370         tcg_temp_free_i64(t1);
2371     }
2372 }
2373 
2374 void tcg_gen_add2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
2375                       TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
2376 {
2377     if (TCG_TARGET_HAS_add2_i64) {
2378         tcg_gen_op6_i64(INDEX_op_add2_i64, rl, rh, al, ah, bl, bh);
2379     } else {
2380         TCGv_i64 t0 = tcg_temp_new_i64();
2381         TCGv_i64 t1 = tcg_temp_new_i64();
2382         tcg_gen_add_i64(t0, al, bl);
2383         tcg_gen_setcond_i64(TCG_COND_LTU, t1, t0, al);
2384         tcg_gen_add_i64(rh, ah, bh);
2385         tcg_gen_add_i64(rh, rh, t1);
2386         tcg_gen_mov_i64(rl, t0);
2387         tcg_temp_free_i64(t0);
2388         tcg_temp_free_i64(t1);
2389     }
2390 }
2391 
2392 void tcg_gen_sub2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
2393                       TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
2394 {
2395     if (TCG_TARGET_HAS_sub2_i64) {
2396         tcg_gen_op6_i64(INDEX_op_sub2_i64, rl, rh, al, ah, bl, bh);
2397     } else {
2398         TCGv_i64 t0 = tcg_temp_new_i64();
2399         TCGv_i64 t1 = tcg_temp_new_i64();
2400         tcg_gen_sub_i64(t0, al, bl);
2401         tcg_gen_setcond_i64(TCG_COND_LTU, t1, al, bl);
2402         tcg_gen_sub_i64(rh, ah, bh);
2403         tcg_gen_sub_i64(rh, rh, t1);
2404         tcg_gen_mov_i64(rl, t0);
2405         tcg_temp_free_i64(t0);
2406         tcg_temp_free_i64(t1);
2407     }
2408 }
2409 
2410 void tcg_gen_mulu2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
2411 {
2412     if (TCG_TARGET_HAS_mulu2_i64) {
2413         tcg_gen_op4_i64(INDEX_op_mulu2_i64, rl, rh, arg1, arg2);
2414     } else if (TCG_TARGET_HAS_muluh_i64) {
2415         TCGv_i64 t = tcg_temp_new_i64();
2416         tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
2417         tcg_gen_op3_i64(INDEX_op_muluh_i64, rh, arg1, arg2);
2418         tcg_gen_mov_i64(rl, t);
2419         tcg_temp_free_i64(t);
2420     } else {
2421         TCGv_i64 t0 = tcg_temp_new_i64();
2422         tcg_gen_mul_i64(t0, arg1, arg2);
2423         gen_helper_muluh_i64(rh, arg1, arg2);
2424         tcg_gen_mov_i64(rl, t0);
2425         tcg_temp_free_i64(t0);
2426     }
2427 }
2428 
2429 void tcg_gen_muls2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
2430 {
2431     if (TCG_TARGET_HAS_muls2_i64) {
2432         tcg_gen_op4_i64(INDEX_op_muls2_i64, rl, rh, arg1, arg2);
2433     } else if (TCG_TARGET_HAS_mulsh_i64) {
2434         TCGv_i64 t = tcg_temp_new_i64();
2435         tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
2436         tcg_gen_op3_i64(INDEX_op_mulsh_i64, rh, arg1, arg2);
2437         tcg_gen_mov_i64(rl, t);
2438         tcg_temp_free_i64(t);
2439     } else if (TCG_TARGET_HAS_mulu2_i64 || TCG_TARGET_HAS_muluh_i64) {
2440         TCGv_i64 t0 = tcg_temp_new_i64();
2441         TCGv_i64 t1 = tcg_temp_new_i64();
2442         TCGv_i64 t2 = tcg_temp_new_i64();
2443         TCGv_i64 t3 = tcg_temp_new_i64();
2444         tcg_gen_mulu2_i64(t0, t1, arg1, arg2);
2445         /* Adjust for negative inputs.  */
2446         tcg_gen_sari_i64(t2, arg1, 63);
2447         tcg_gen_sari_i64(t3, arg2, 63);
2448         tcg_gen_and_i64(t2, t2, arg2);
2449         tcg_gen_and_i64(t3, t3, arg1);
2450         tcg_gen_sub_i64(rh, t1, t2);
2451         tcg_gen_sub_i64(rh, rh, t3);
2452         tcg_gen_mov_i64(rl, t0);
2453         tcg_temp_free_i64(t0);
2454         tcg_temp_free_i64(t1);
2455         tcg_temp_free_i64(t2);
2456         tcg_temp_free_i64(t3);
2457     } else {
2458         TCGv_i64 t0 = tcg_temp_new_i64();
2459         tcg_gen_mul_i64(t0, arg1, arg2);
2460         gen_helper_mulsh_i64(rh, arg1, arg2);
2461         tcg_gen_mov_i64(rl, t0);
2462         tcg_temp_free_i64(t0);
2463     }
2464 }
2465 
2466 void tcg_gen_mulsu2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
2467 {
2468     TCGv_i64 t0 = tcg_temp_new_i64();
2469     TCGv_i64 t1 = tcg_temp_new_i64();
2470     TCGv_i64 t2 = tcg_temp_new_i64();
2471     tcg_gen_mulu2_i64(t0, t1, arg1, arg2);
2472     /* Adjust for negative input for the signed arg1.  */
2473     tcg_gen_sari_i64(t2, arg1, 63);
2474     tcg_gen_and_i64(t2, t2, arg2);
2475     tcg_gen_sub_i64(rh, t1, t2);
2476     tcg_gen_mov_i64(rl, t0);
2477     tcg_temp_free_i64(t0);
2478     tcg_temp_free_i64(t1);
2479     tcg_temp_free_i64(t2);
2480 }
2481 
2482 /* Size changing operations.  */
2483 
2484 void tcg_gen_extrl_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
2485 {
2486     if (TCG_TARGET_REG_BITS == 32) {
2487         tcg_gen_mov_i32(ret, TCGV_LOW(arg));
2488     } else if (TCG_TARGET_HAS_extrl_i64_i32) {
2489         tcg_gen_op2(&tcg_ctx, INDEX_op_extrl_i64_i32,
2490                     GET_TCGV_I32(ret), GET_TCGV_I64(arg));
2491     } else {
2492         tcg_gen_mov_i32(ret, MAKE_TCGV_I32(GET_TCGV_I64(arg)));
2493     }
2494 }
2495 
2496 void tcg_gen_extrh_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
2497 {
2498     if (TCG_TARGET_REG_BITS == 32) {
2499         tcg_gen_mov_i32(ret, TCGV_HIGH(arg));
2500     } else if (TCG_TARGET_HAS_extrh_i64_i32) {
2501         tcg_gen_op2(&tcg_ctx, INDEX_op_extrh_i64_i32,
2502                     GET_TCGV_I32(ret), GET_TCGV_I64(arg));
2503     } else {
2504         TCGv_i64 t = tcg_temp_new_i64();
2505         tcg_gen_shri_i64(t, arg, 32);
2506         tcg_gen_mov_i32(ret, MAKE_TCGV_I32(GET_TCGV_I64(t)));
2507         tcg_temp_free_i64(t);
2508     }
2509 }
2510 
2511 void tcg_gen_extu_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
2512 {
2513     if (TCG_TARGET_REG_BITS == 32) {
2514         tcg_gen_mov_i32(TCGV_LOW(ret), arg);
2515         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2516     } else {
2517         tcg_gen_op2(&tcg_ctx, INDEX_op_extu_i32_i64,
2518                     GET_TCGV_I64(ret), GET_TCGV_I32(arg));
2519     }
2520 }
2521 
2522 void tcg_gen_ext_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
2523 {
2524     if (TCG_TARGET_REG_BITS == 32) {
2525         tcg_gen_mov_i32(TCGV_LOW(ret), arg);
2526         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2527     } else {
2528         tcg_gen_op2(&tcg_ctx, INDEX_op_ext_i32_i64,
2529                     GET_TCGV_I64(ret), GET_TCGV_I32(arg));
2530     }
2531 }
2532 
2533 void tcg_gen_concat_i32_i64(TCGv_i64 dest, TCGv_i32 low, TCGv_i32 high)
2534 {
2535     TCGv_i64 tmp;
2536 
2537     if (TCG_TARGET_REG_BITS == 32) {
2538         tcg_gen_mov_i32(TCGV_LOW(dest), low);
2539         tcg_gen_mov_i32(TCGV_HIGH(dest), high);
2540         return;
2541     }
2542 
2543     tmp = tcg_temp_new_i64();
2544     /* These extensions are only needed for type correctness.
2545        We may be able to do better given target specific information.  */
2546     tcg_gen_extu_i32_i64(tmp, high);
2547     tcg_gen_extu_i32_i64(dest, low);
2548     /* If deposit is available, use it.  Otherwise use the extra
2549        knowledge that we have of the zero-extensions above.  */
2550     if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(32, 32)) {
2551         tcg_gen_deposit_i64(dest, dest, tmp, 32, 32);
2552     } else {
2553         tcg_gen_shli_i64(tmp, tmp, 32);
2554         tcg_gen_or_i64(dest, dest, tmp);
2555     }
2556     tcg_temp_free_i64(tmp);
2557 }
2558 
2559 void tcg_gen_extr_i64_i32(TCGv_i32 lo, TCGv_i32 hi, TCGv_i64 arg)
2560 {
2561     if (TCG_TARGET_REG_BITS == 32) {
2562         tcg_gen_mov_i32(lo, TCGV_LOW(arg));
2563         tcg_gen_mov_i32(hi, TCGV_HIGH(arg));
2564     } else {
2565         tcg_gen_extrl_i64_i32(lo, arg);
2566         tcg_gen_extrh_i64_i32(hi, arg);
2567     }
2568 }
2569 
2570 void tcg_gen_extr32_i64(TCGv_i64 lo, TCGv_i64 hi, TCGv_i64 arg)
2571 {
2572     tcg_gen_ext32u_i64(lo, arg);
2573     tcg_gen_shri_i64(hi, arg, 32);
2574 }
2575 
2576 /* QEMU specific operations.  */
2577 
2578 void tcg_gen_goto_tb(unsigned idx)
2579 {
2580     /* We only support two chained exits.  */
2581     tcg_debug_assert(idx <= 1);
2582 #ifdef CONFIG_DEBUG_TCG
2583     /* Verify that we havn't seen this numbered exit before.  */
2584     tcg_debug_assert((tcg_ctx.goto_tb_issue_mask & (1 << idx)) == 0);
2585     tcg_ctx.goto_tb_issue_mask |= 1 << idx;
2586 #endif
2587     tcg_gen_op1i(INDEX_op_goto_tb, idx);
2588 }
2589 
2590 void tcg_gen_lookup_and_goto_ptr(TCGv addr)
2591 {
2592     if (TCG_TARGET_HAS_goto_ptr && !qemu_loglevel_mask(CPU_LOG_TB_NOCHAIN)) {
2593         TCGv_ptr ptr = tcg_temp_new_ptr();
2594         gen_helper_lookup_tb_ptr(ptr, tcg_ctx.tcg_env, addr);
2595         tcg_gen_op1i(INDEX_op_goto_ptr, GET_TCGV_PTR(ptr));
2596         tcg_temp_free_ptr(ptr);
2597     } else {
2598         tcg_gen_exit_tb(0);
2599     }
2600 }
2601 
2602 static inline TCGMemOp tcg_canonicalize_memop(TCGMemOp op, bool is64, bool st)
2603 {
2604     /* Trigger the asserts within as early as possible.  */
2605     (void)get_alignment_bits(op);
2606 
2607     switch (op & MO_SIZE) {
2608     case MO_8:
2609         op &= ~MO_BSWAP;
2610         break;
2611     case MO_16:
2612         break;
2613     case MO_32:
2614         if (!is64) {
2615             op &= ~MO_SIGN;
2616         }
2617         break;
2618     case MO_64:
2619         if (!is64) {
2620             tcg_abort();
2621         }
2622         break;
2623     }
2624     if (st) {
2625         op &= ~MO_SIGN;
2626     }
2627     return op;
2628 }
2629 
2630 static void gen_ldst_i32(TCGOpcode opc, TCGv_i32 val, TCGv addr,
2631                          TCGMemOp memop, TCGArg idx)
2632 {
2633     TCGMemOpIdx oi = make_memop_idx(memop, idx);
2634 #if TARGET_LONG_BITS == 32
2635     tcg_gen_op3i_i32(opc, val, addr, oi);
2636 #else
2637     if (TCG_TARGET_REG_BITS == 32) {
2638         tcg_gen_op4i_i32(opc, val, TCGV_LOW(addr), TCGV_HIGH(addr), oi);
2639     } else {
2640         tcg_gen_op3(&tcg_ctx, opc, GET_TCGV_I32(val), GET_TCGV_I64(addr), oi);
2641     }
2642 #endif
2643 }
2644 
2645 static void gen_ldst_i64(TCGOpcode opc, TCGv_i64 val, TCGv addr,
2646                          TCGMemOp memop, TCGArg idx)
2647 {
2648     TCGMemOpIdx oi = make_memop_idx(memop, idx);
2649 #if TARGET_LONG_BITS == 32
2650     if (TCG_TARGET_REG_BITS == 32) {
2651         tcg_gen_op4i_i32(opc, TCGV_LOW(val), TCGV_HIGH(val), addr, oi);
2652     } else {
2653         tcg_gen_op3(&tcg_ctx, opc, GET_TCGV_I64(val), GET_TCGV_I32(addr), oi);
2654     }
2655 #else
2656     if (TCG_TARGET_REG_BITS == 32) {
2657         tcg_gen_op5i_i32(opc, TCGV_LOW(val), TCGV_HIGH(val),
2658                          TCGV_LOW(addr), TCGV_HIGH(addr), oi);
2659     } else {
2660         tcg_gen_op3i_i64(opc, val, addr, oi);
2661     }
2662 #endif
2663 }
2664 
2665 void tcg_gen_qemu_ld_i32(TCGv_i32 val, TCGv addr, TCGArg idx, TCGMemOp memop)
2666 {
2667     memop = tcg_canonicalize_memop(memop, 0, 0);
2668     trace_guest_mem_before_tcg(tcg_ctx.cpu, tcg_ctx.tcg_env,
2669                                addr, trace_mem_get_info(memop, 0));
2670     gen_ldst_i32(INDEX_op_qemu_ld_i32, val, addr, memop, idx);
2671 }
2672 
2673 void tcg_gen_qemu_st_i32(TCGv_i32 val, TCGv addr, TCGArg idx, TCGMemOp memop)
2674 {
2675     memop = tcg_canonicalize_memop(memop, 0, 1);
2676     trace_guest_mem_before_tcg(tcg_ctx.cpu, tcg_ctx.tcg_env,
2677                                addr, trace_mem_get_info(memop, 1));
2678     gen_ldst_i32(INDEX_op_qemu_st_i32, val, addr, memop, idx);
2679 }
2680 
2681 void tcg_gen_qemu_ld_i64(TCGv_i64 val, TCGv addr, TCGArg idx, TCGMemOp memop)
2682 {
2683     if (TCG_TARGET_REG_BITS == 32 && (memop & MO_SIZE) < MO_64) {
2684         tcg_gen_qemu_ld_i32(TCGV_LOW(val), addr, idx, memop);
2685         if (memop & MO_SIGN) {
2686             tcg_gen_sari_i32(TCGV_HIGH(val), TCGV_LOW(val), 31);
2687         } else {
2688             tcg_gen_movi_i32(TCGV_HIGH(val), 0);
2689         }
2690         return;
2691     }
2692 
2693     memop = tcg_canonicalize_memop(memop, 1, 0);
2694     trace_guest_mem_before_tcg(tcg_ctx.cpu, tcg_ctx.tcg_env,
2695                                addr, trace_mem_get_info(memop, 0));
2696     gen_ldst_i64(INDEX_op_qemu_ld_i64, val, addr, memop, idx);
2697 }
2698 
2699 void tcg_gen_qemu_st_i64(TCGv_i64 val, TCGv addr, TCGArg idx, TCGMemOp memop)
2700 {
2701     if (TCG_TARGET_REG_BITS == 32 && (memop & MO_SIZE) < MO_64) {
2702         tcg_gen_qemu_st_i32(TCGV_LOW(val), addr, idx, memop);
2703         return;
2704     }
2705 
2706     memop = tcg_canonicalize_memop(memop, 1, 1);
2707     trace_guest_mem_before_tcg(tcg_ctx.cpu, tcg_ctx.tcg_env,
2708                                addr, trace_mem_get_info(memop, 1));
2709     gen_ldst_i64(INDEX_op_qemu_st_i64, val, addr, memop, idx);
2710 }
2711 
2712 static void tcg_gen_ext_i32(TCGv_i32 ret, TCGv_i32 val, TCGMemOp opc)
2713 {
2714     switch (opc & MO_SSIZE) {
2715     case MO_SB:
2716         tcg_gen_ext8s_i32(ret, val);
2717         break;
2718     case MO_UB:
2719         tcg_gen_ext8u_i32(ret, val);
2720         break;
2721     case MO_SW:
2722         tcg_gen_ext16s_i32(ret, val);
2723         break;
2724     case MO_UW:
2725         tcg_gen_ext16u_i32(ret, val);
2726         break;
2727     default:
2728         tcg_gen_mov_i32(ret, val);
2729         break;
2730     }
2731 }
2732 
2733 static void tcg_gen_ext_i64(TCGv_i64 ret, TCGv_i64 val, TCGMemOp opc)
2734 {
2735     switch (opc & MO_SSIZE) {
2736     case MO_SB:
2737         tcg_gen_ext8s_i64(ret, val);
2738         break;
2739     case MO_UB:
2740         tcg_gen_ext8u_i64(ret, val);
2741         break;
2742     case MO_SW:
2743         tcg_gen_ext16s_i64(ret, val);
2744         break;
2745     case MO_UW:
2746         tcg_gen_ext16u_i64(ret, val);
2747         break;
2748     case MO_SL:
2749         tcg_gen_ext32s_i64(ret, val);
2750         break;
2751     case MO_UL:
2752         tcg_gen_ext32u_i64(ret, val);
2753         break;
2754     default:
2755         tcg_gen_mov_i64(ret, val);
2756         break;
2757     }
2758 }
2759 
2760 #ifdef CONFIG_SOFTMMU
2761 typedef void (*gen_atomic_cx_i32)(TCGv_i32, TCGv_env, TCGv,
2762                                   TCGv_i32, TCGv_i32, TCGv_i32);
2763 typedef void (*gen_atomic_cx_i64)(TCGv_i64, TCGv_env, TCGv,
2764                                   TCGv_i64, TCGv_i64, TCGv_i32);
2765 typedef void (*gen_atomic_op_i32)(TCGv_i32, TCGv_env, TCGv,
2766                                   TCGv_i32, TCGv_i32);
2767 typedef void (*gen_atomic_op_i64)(TCGv_i64, TCGv_env, TCGv,
2768                                   TCGv_i64, TCGv_i32);
2769 #else
2770 typedef void (*gen_atomic_cx_i32)(TCGv_i32, TCGv_env, TCGv, TCGv_i32, TCGv_i32);
2771 typedef void (*gen_atomic_cx_i64)(TCGv_i64, TCGv_env, TCGv, TCGv_i64, TCGv_i64);
2772 typedef void (*gen_atomic_op_i32)(TCGv_i32, TCGv_env, TCGv, TCGv_i32);
2773 typedef void (*gen_atomic_op_i64)(TCGv_i64, TCGv_env, TCGv, TCGv_i64);
2774 #endif
2775 
2776 #ifdef CONFIG_ATOMIC64
2777 # define WITH_ATOMIC64(X) X,
2778 #else
2779 # define WITH_ATOMIC64(X)
2780 #endif
2781 
2782 static void * const table_cmpxchg[16] = {
2783     [MO_8] = gen_helper_atomic_cmpxchgb,
2784     [MO_16 | MO_LE] = gen_helper_atomic_cmpxchgw_le,
2785     [MO_16 | MO_BE] = gen_helper_atomic_cmpxchgw_be,
2786     [MO_32 | MO_LE] = gen_helper_atomic_cmpxchgl_le,
2787     [MO_32 | MO_BE] = gen_helper_atomic_cmpxchgl_be,
2788     WITH_ATOMIC64([MO_64 | MO_LE] = gen_helper_atomic_cmpxchgq_le)
2789     WITH_ATOMIC64([MO_64 | MO_BE] = gen_helper_atomic_cmpxchgq_be)
2790 };
2791 
2792 void tcg_gen_atomic_cmpxchg_i32(TCGv_i32 retv, TCGv addr, TCGv_i32 cmpv,
2793                                 TCGv_i32 newv, TCGArg idx, TCGMemOp memop)
2794 {
2795     memop = tcg_canonicalize_memop(memop, 0, 0);
2796 
2797     if (!parallel_cpus) {
2798         TCGv_i32 t1 = tcg_temp_new_i32();
2799         TCGv_i32 t2 = tcg_temp_new_i32();
2800 
2801         tcg_gen_ext_i32(t2, cmpv, memop & MO_SIZE);
2802 
2803         tcg_gen_qemu_ld_i32(t1, addr, idx, memop & ~MO_SIGN);
2804         tcg_gen_movcond_i32(TCG_COND_EQ, t2, t1, t2, newv, t1);
2805         tcg_gen_qemu_st_i32(t2, addr, idx, memop);
2806         tcg_temp_free_i32(t2);
2807 
2808         if (memop & MO_SIGN) {
2809             tcg_gen_ext_i32(retv, t1, memop);
2810         } else {
2811             tcg_gen_mov_i32(retv, t1);
2812         }
2813         tcg_temp_free_i32(t1);
2814     } else {
2815         gen_atomic_cx_i32 gen;
2816 
2817         gen = table_cmpxchg[memop & (MO_SIZE | MO_BSWAP)];
2818         tcg_debug_assert(gen != NULL);
2819 
2820 #ifdef CONFIG_SOFTMMU
2821         {
2822             TCGv_i32 oi = tcg_const_i32(make_memop_idx(memop & ~MO_SIGN, idx));
2823             gen(retv, tcg_ctx.tcg_env, addr, cmpv, newv, oi);
2824             tcg_temp_free_i32(oi);
2825         }
2826 #else
2827         gen(retv, tcg_ctx.tcg_env, addr, cmpv, newv);
2828 #endif
2829 
2830         if (memop & MO_SIGN) {
2831             tcg_gen_ext_i32(retv, retv, memop);
2832         }
2833     }
2834 }
2835 
2836 void tcg_gen_atomic_cmpxchg_i64(TCGv_i64 retv, TCGv addr, TCGv_i64 cmpv,
2837                                 TCGv_i64 newv, TCGArg idx, TCGMemOp memop)
2838 {
2839     memop = tcg_canonicalize_memop(memop, 1, 0);
2840 
2841     if (!parallel_cpus) {
2842         TCGv_i64 t1 = tcg_temp_new_i64();
2843         TCGv_i64 t2 = tcg_temp_new_i64();
2844 
2845         tcg_gen_ext_i64(t2, cmpv, memop & MO_SIZE);
2846 
2847         tcg_gen_qemu_ld_i64(t1, addr, idx, memop & ~MO_SIGN);
2848         tcg_gen_movcond_i64(TCG_COND_EQ, t2, t1, t2, newv, t1);
2849         tcg_gen_qemu_st_i64(t2, addr, idx, memop);
2850         tcg_temp_free_i64(t2);
2851 
2852         if (memop & MO_SIGN) {
2853             tcg_gen_ext_i64(retv, t1, memop);
2854         } else {
2855             tcg_gen_mov_i64(retv, t1);
2856         }
2857         tcg_temp_free_i64(t1);
2858     } else if ((memop & MO_SIZE) == MO_64) {
2859 #ifdef CONFIG_ATOMIC64
2860         gen_atomic_cx_i64 gen;
2861 
2862         gen = table_cmpxchg[memop & (MO_SIZE | MO_BSWAP)];
2863         tcg_debug_assert(gen != NULL);
2864 
2865 #ifdef CONFIG_SOFTMMU
2866         {
2867             TCGv_i32 oi = tcg_const_i32(make_memop_idx(memop, idx));
2868             gen(retv, tcg_ctx.tcg_env, addr, cmpv, newv, oi);
2869             tcg_temp_free_i32(oi);
2870         }
2871 #else
2872         gen(retv, tcg_ctx.tcg_env, addr, cmpv, newv);
2873 #endif
2874 #else
2875         gen_helper_exit_atomic(tcg_ctx.tcg_env);
2876         /* Produce a result, so that we have a well-formed opcode stream
2877            with respect to uses of the result in the (dead) code following.  */
2878         tcg_gen_movi_i64(retv, 0);
2879 #endif /* CONFIG_ATOMIC64 */
2880     } else {
2881         TCGv_i32 c32 = tcg_temp_new_i32();
2882         TCGv_i32 n32 = tcg_temp_new_i32();
2883         TCGv_i32 r32 = tcg_temp_new_i32();
2884 
2885         tcg_gen_extrl_i64_i32(c32, cmpv);
2886         tcg_gen_extrl_i64_i32(n32, newv);
2887         tcg_gen_atomic_cmpxchg_i32(r32, addr, c32, n32, idx, memop & ~MO_SIGN);
2888         tcg_temp_free_i32(c32);
2889         tcg_temp_free_i32(n32);
2890 
2891         tcg_gen_extu_i32_i64(retv, r32);
2892         tcg_temp_free_i32(r32);
2893 
2894         if (memop & MO_SIGN) {
2895             tcg_gen_ext_i64(retv, retv, memop);
2896         }
2897     }
2898 }
2899 
2900 static void do_nonatomic_op_i32(TCGv_i32 ret, TCGv addr, TCGv_i32 val,
2901                                 TCGArg idx, TCGMemOp memop, bool new_val,
2902                                 void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
2903 {
2904     TCGv_i32 t1 = tcg_temp_new_i32();
2905     TCGv_i32 t2 = tcg_temp_new_i32();
2906 
2907     memop = tcg_canonicalize_memop(memop, 0, 0);
2908 
2909     tcg_gen_qemu_ld_i32(t1, addr, idx, memop & ~MO_SIGN);
2910     gen(t2, t1, val);
2911     tcg_gen_qemu_st_i32(t2, addr, idx, memop);
2912 
2913     tcg_gen_ext_i32(ret, (new_val ? t2 : t1), memop);
2914     tcg_temp_free_i32(t1);
2915     tcg_temp_free_i32(t2);
2916 }
2917 
2918 static void do_atomic_op_i32(TCGv_i32 ret, TCGv addr, TCGv_i32 val,
2919                              TCGArg idx, TCGMemOp memop, void * const table[])
2920 {
2921     gen_atomic_op_i32 gen;
2922 
2923     memop = tcg_canonicalize_memop(memop, 0, 0);
2924 
2925     gen = table[memop & (MO_SIZE | MO_BSWAP)];
2926     tcg_debug_assert(gen != NULL);
2927 
2928 #ifdef CONFIG_SOFTMMU
2929     {
2930         TCGv_i32 oi = tcg_const_i32(make_memop_idx(memop & ~MO_SIGN, idx));
2931         gen(ret, tcg_ctx.tcg_env, addr, val, oi);
2932         tcg_temp_free_i32(oi);
2933     }
2934 #else
2935     gen(ret, tcg_ctx.tcg_env, addr, val);
2936 #endif
2937 
2938     if (memop & MO_SIGN) {
2939         tcg_gen_ext_i32(ret, ret, memop);
2940     }
2941 }
2942 
2943 static void do_nonatomic_op_i64(TCGv_i64 ret, TCGv addr, TCGv_i64 val,
2944                                 TCGArg idx, TCGMemOp memop, bool new_val,
2945                                 void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
2946 {
2947     TCGv_i64 t1 = tcg_temp_new_i64();
2948     TCGv_i64 t2 = tcg_temp_new_i64();
2949 
2950     memop = tcg_canonicalize_memop(memop, 1, 0);
2951 
2952     tcg_gen_qemu_ld_i64(t1, addr, idx, memop & ~MO_SIGN);
2953     gen(t2, t1, val);
2954     tcg_gen_qemu_st_i64(t2, addr, idx, memop);
2955 
2956     tcg_gen_ext_i64(ret, (new_val ? t2 : t1), memop);
2957     tcg_temp_free_i64(t1);
2958     tcg_temp_free_i64(t2);
2959 }
2960 
2961 static void do_atomic_op_i64(TCGv_i64 ret, TCGv addr, TCGv_i64 val,
2962                              TCGArg idx, TCGMemOp memop, void * const table[])
2963 {
2964     memop = tcg_canonicalize_memop(memop, 1, 0);
2965 
2966     if ((memop & MO_SIZE) == MO_64) {
2967 #ifdef CONFIG_ATOMIC64
2968         gen_atomic_op_i64 gen;
2969 
2970         gen = table[memop & (MO_SIZE | MO_BSWAP)];
2971         tcg_debug_assert(gen != NULL);
2972 
2973 #ifdef CONFIG_SOFTMMU
2974         {
2975             TCGv_i32 oi = tcg_const_i32(make_memop_idx(memop & ~MO_SIGN, idx));
2976             gen(ret, tcg_ctx.tcg_env, addr, val, oi);
2977             tcg_temp_free_i32(oi);
2978         }
2979 #else
2980         gen(ret, tcg_ctx.tcg_env, addr, val);
2981 #endif
2982 #else
2983         gen_helper_exit_atomic(tcg_ctx.tcg_env);
2984         /* Produce a result, so that we have a well-formed opcode stream
2985            with respect to uses of the result in the (dead) code following.  */
2986         tcg_gen_movi_i64(ret, 0);
2987 #endif /* CONFIG_ATOMIC64 */
2988     } else {
2989         TCGv_i32 v32 = tcg_temp_new_i32();
2990         TCGv_i32 r32 = tcg_temp_new_i32();
2991 
2992         tcg_gen_extrl_i64_i32(v32, val);
2993         do_atomic_op_i32(r32, addr, v32, idx, memop & ~MO_SIGN, table);
2994         tcg_temp_free_i32(v32);
2995 
2996         tcg_gen_extu_i32_i64(ret, r32);
2997         tcg_temp_free_i32(r32);
2998 
2999         if (memop & MO_SIGN) {
3000             tcg_gen_ext_i64(ret, ret, memop);
3001         }
3002     }
3003 }
3004 
3005 #define GEN_ATOMIC_HELPER(NAME, OP, NEW)                                \
3006 static void * const table_##NAME[16] = {                                \
3007     [MO_8] = gen_helper_atomic_##NAME##b,                               \
3008     [MO_16 | MO_LE] = gen_helper_atomic_##NAME##w_le,                   \
3009     [MO_16 | MO_BE] = gen_helper_atomic_##NAME##w_be,                   \
3010     [MO_32 | MO_LE] = gen_helper_atomic_##NAME##l_le,                   \
3011     [MO_32 | MO_BE] = gen_helper_atomic_##NAME##l_be,                   \
3012     WITH_ATOMIC64([MO_64 | MO_LE] = gen_helper_atomic_##NAME##q_le)     \
3013     WITH_ATOMIC64([MO_64 | MO_BE] = gen_helper_atomic_##NAME##q_be)     \
3014 };                                                                      \
3015 void tcg_gen_atomic_##NAME##_i32                                        \
3016     (TCGv_i32 ret, TCGv addr, TCGv_i32 val, TCGArg idx, TCGMemOp memop) \
3017 {                                                                       \
3018     if (parallel_cpus) {                                                \
3019         do_atomic_op_i32(ret, addr, val, idx, memop, table_##NAME);     \
3020     } else {                                                            \
3021         do_nonatomic_op_i32(ret, addr, val, idx, memop, NEW,            \
3022                             tcg_gen_##OP##_i32);                        \
3023     }                                                                   \
3024 }                                                                       \
3025 void tcg_gen_atomic_##NAME##_i64                                        \
3026     (TCGv_i64 ret, TCGv addr, TCGv_i64 val, TCGArg idx, TCGMemOp memop) \
3027 {                                                                       \
3028     if (parallel_cpus) {                                                \
3029         do_atomic_op_i64(ret, addr, val, idx, memop, table_##NAME);     \
3030     } else {                                                            \
3031         do_nonatomic_op_i64(ret, addr, val, idx, memop, NEW,            \
3032                             tcg_gen_##OP##_i64);                        \
3033     }                                                                   \
3034 }
3035 
3036 GEN_ATOMIC_HELPER(fetch_add, add, 0)
3037 GEN_ATOMIC_HELPER(fetch_and, and, 0)
3038 GEN_ATOMIC_HELPER(fetch_or, or, 0)
3039 GEN_ATOMIC_HELPER(fetch_xor, xor, 0)
3040 
3041 GEN_ATOMIC_HELPER(add_fetch, add, 1)
3042 GEN_ATOMIC_HELPER(and_fetch, and, 1)
3043 GEN_ATOMIC_HELPER(or_fetch, or, 1)
3044 GEN_ATOMIC_HELPER(xor_fetch, xor, 1)
3045 
3046 static void tcg_gen_mov2_i32(TCGv_i32 r, TCGv_i32 a, TCGv_i32 b)
3047 {
3048     tcg_gen_mov_i32(r, b);
3049 }
3050 
3051 static void tcg_gen_mov2_i64(TCGv_i64 r, TCGv_i64 a, TCGv_i64 b)
3052 {
3053     tcg_gen_mov_i64(r, b);
3054 }
3055 
3056 GEN_ATOMIC_HELPER(xchg, mov2, 0)
3057 
3058 #undef GEN_ATOMIC_HELPER
3059