xref: /openbmc/qemu/tcg/tcg-op.c (revision 33c11879)
1 /*
2  * Tiny Code Generator for QEMU
3  *
4  * Copyright (c) 2008 Fabrice Bellard
5  *
6  * Permission is hereby granted, free of charge, to any person obtaining a copy
7  * of this software and associated documentation files (the "Software"), to deal
8  * in the Software without restriction, including without limitation the rights
9  * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10  * copies of the Software, and to permit persons to whom the Software is
11  * furnished to do so, subject to the following conditions:
12  *
13  * The above copyright notice and this permission notice shall be included in
14  * all copies or substantial portions of the Software.
15  *
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21  * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22  * THE SOFTWARE.
23  */
24 
25 #include "qemu/osdep.h"
26 #include "qemu-common.h"
27 #include "cpu.h"
28 #include "tcg.h"
29 #include "tcg-op.h"
30 
31 /* Reduce the number of ifdefs below.  This assumes that all uses of
32    TCGV_HIGH and TCGV_LOW are properly protected by a conditional that
33    the compiler can eliminate.  */
34 #if TCG_TARGET_REG_BITS == 64
35 extern TCGv_i32 TCGV_LOW_link_error(TCGv_i64);
36 extern TCGv_i32 TCGV_HIGH_link_error(TCGv_i64);
37 #define TCGV_LOW  TCGV_LOW_link_error
38 #define TCGV_HIGH TCGV_HIGH_link_error
39 #endif
40 
41 /* Note that this is optimized for sequential allocation during translate.
42    Up to and including filling in the forward link immediately.  We'll do
43    proper termination of the end of the list after we finish translation.  */
44 
45 static void tcg_emit_op(TCGContext *ctx, TCGOpcode opc, int args)
46 {
47     int oi = ctx->gen_next_op_idx;
48     int ni = oi + 1;
49     int pi = oi - 1;
50 
51     tcg_debug_assert(oi < OPC_BUF_SIZE);
52     ctx->gen_last_op_idx = oi;
53     ctx->gen_next_op_idx = ni;
54 
55     ctx->gen_op_buf[oi] = (TCGOp){
56         .opc = opc,
57         .args = args,
58         .prev = pi,
59         .next = ni
60     };
61 }
62 
63 void tcg_gen_op1(TCGContext *ctx, TCGOpcode opc, TCGArg a1)
64 {
65     int pi = ctx->gen_next_parm_idx;
66 
67     tcg_debug_assert(pi + 1 <= OPPARAM_BUF_SIZE);
68     ctx->gen_next_parm_idx = pi + 1;
69     ctx->gen_opparam_buf[pi] = a1;
70 
71     tcg_emit_op(ctx, opc, pi);
72 }
73 
74 void tcg_gen_op2(TCGContext *ctx, TCGOpcode opc, TCGArg a1, TCGArg a2)
75 {
76     int pi = ctx->gen_next_parm_idx;
77 
78     tcg_debug_assert(pi + 2 <= OPPARAM_BUF_SIZE);
79     ctx->gen_next_parm_idx = pi + 2;
80     ctx->gen_opparam_buf[pi + 0] = a1;
81     ctx->gen_opparam_buf[pi + 1] = a2;
82 
83     tcg_emit_op(ctx, opc, pi);
84 }
85 
86 void tcg_gen_op3(TCGContext *ctx, TCGOpcode opc, TCGArg a1,
87                  TCGArg a2, TCGArg a3)
88 {
89     int pi = ctx->gen_next_parm_idx;
90 
91     tcg_debug_assert(pi + 3 <= OPPARAM_BUF_SIZE);
92     ctx->gen_next_parm_idx = pi + 3;
93     ctx->gen_opparam_buf[pi + 0] = a1;
94     ctx->gen_opparam_buf[pi + 1] = a2;
95     ctx->gen_opparam_buf[pi + 2] = a3;
96 
97     tcg_emit_op(ctx, opc, pi);
98 }
99 
100 void tcg_gen_op4(TCGContext *ctx, TCGOpcode opc, TCGArg a1,
101                  TCGArg a2, TCGArg a3, TCGArg a4)
102 {
103     int pi = ctx->gen_next_parm_idx;
104 
105     tcg_debug_assert(pi + 4 <= OPPARAM_BUF_SIZE);
106     ctx->gen_next_parm_idx = pi + 4;
107     ctx->gen_opparam_buf[pi + 0] = a1;
108     ctx->gen_opparam_buf[pi + 1] = a2;
109     ctx->gen_opparam_buf[pi + 2] = a3;
110     ctx->gen_opparam_buf[pi + 3] = a4;
111 
112     tcg_emit_op(ctx, opc, pi);
113 }
114 
115 void tcg_gen_op5(TCGContext *ctx, TCGOpcode opc, TCGArg a1,
116                  TCGArg a2, TCGArg a3, TCGArg a4, TCGArg a5)
117 {
118     int pi = ctx->gen_next_parm_idx;
119 
120     tcg_debug_assert(pi + 5 <= OPPARAM_BUF_SIZE);
121     ctx->gen_next_parm_idx = pi + 5;
122     ctx->gen_opparam_buf[pi + 0] = a1;
123     ctx->gen_opparam_buf[pi + 1] = a2;
124     ctx->gen_opparam_buf[pi + 2] = a3;
125     ctx->gen_opparam_buf[pi + 3] = a4;
126     ctx->gen_opparam_buf[pi + 4] = a5;
127 
128     tcg_emit_op(ctx, opc, pi);
129 }
130 
131 void tcg_gen_op6(TCGContext *ctx, TCGOpcode opc, TCGArg a1, TCGArg a2,
132                  TCGArg a3, TCGArg a4, TCGArg a5, TCGArg a6)
133 {
134     int pi = ctx->gen_next_parm_idx;
135 
136     tcg_debug_assert(pi + 6 <= OPPARAM_BUF_SIZE);
137     ctx->gen_next_parm_idx = pi + 6;
138     ctx->gen_opparam_buf[pi + 0] = a1;
139     ctx->gen_opparam_buf[pi + 1] = a2;
140     ctx->gen_opparam_buf[pi + 2] = a3;
141     ctx->gen_opparam_buf[pi + 3] = a4;
142     ctx->gen_opparam_buf[pi + 4] = a5;
143     ctx->gen_opparam_buf[pi + 5] = a6;
144 
145     tcg_emit_op(ctx, opc, pi);
146 }
147 
148 /* 32 bit ops */
149 
150 void tcg_gen_addi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
151 {
152     /* some cases can be optimized here */
153     if (arg2 == 0) {
154         tcg_gen_mov_i32(ret, arg1);
155     } else {
156         TCGv_i32 t0 = tcg_const_i32(arg2);
157         tcg_gen_add_i32(ret, arg1, t0);
158         tcg_temp_free_i32(t0);
159     }
160 }
161 
162 void tcg_gen_subfi_i32(TCGv_i32 ret, int32_t arg1, TCGv_i32 arg2)
163 {
164     if (arg1 == 0 && TCG_TARGET_HAS_neg_i32) {
165         /* Don't recurse with tcg_gen_neg_i32.  */
166         tcg_gen_op2_i32(INDEX_op_neg_i32, ret, arg2);
167     } else {
168         TCGv_i32 t0 = tcg_const_i32(arg1);
169         tcg_gen_sub_i32(ret, t0, arg2);
170         tcg_temp_free_i32(t0);
171     }
172 }
173 
174 void tcg_gen_subi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
175 {
176     /* some cases can be optimized here */
177     if (arg2 == 0) {
178         tcg_gen_mov_i32(ret, arg1);
179     } else {
180         TCGv_i32 t0 = tcg_const_i32(arg2);
181         tcg_gen_sub_i32(ret, arg1, t0);
182         tcg_temp_free_i32(t0);
183     }
184 }
185 
186 void tcg_gen_andi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
187 {
188     TCGv_i32 t0;
189     /* Some cases can be optimized here.  */
190     switch (arg2) {
191     case 0:
192         tcg_gen_movi_i32(ret, 0);
193         return;
194     case 0xffffffffu:
195         tcg_gen_mov_i32(ret, arg1);
196         return;
197     case 0xffu:
198         /* Don't recurse with tcg_gen_ext8u_i32.  */
199         if (TCG_TARGET_HAS_ext8u_i32) {
200             tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg1);
201             return;
202         }
203         break;
204     case 0xffffu:
205         if (TCG_TARGET_HAS_ext16u_i32) {
206             tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg1);
207             return;
208         }
209         break;
210     }
211     t0 = tcg_const_i32(arg2);
212     tcg_gen_and_i32(ret, arg1, t0);
213     tcg_temp_free_i32(t0);
214 }
215 
216 void tcg_gen_ori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
217 {
218     /* Some cases can be optimized here.  */
219     if (arg2 == -1) {
220         tcg_gen_movi_i32(ret, -1);
221     } else if (arg2 == 0) {
222         tcg_gen_mov_i32(ret, arg1);
223     } else {
224         TCGv_i32 t0 = tcg_const_i32(arg2);
225         tcg_gen_or_i32(ret, arg1, t0);
226         tcg_temp_free_i32(t0);
227     }
228 }
229 
230 void tcg_gen_xori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
231 {
232     /* Some cases can be optimized here.  */
233     if (arg2 == 0) {
234         tcg_gen_mov_i32(ret, arg1);
235     } else if (arg2 == -1 && TCG_TARGET_HAS_not_i32) {
236         /* Don't recurse with tcg_gen_not_i32.  */
237         tcg_gen_op2_i32(INDEX_op_not_i32, ret, arg1);
238     } else {
239         TCGv_i32 t0 = tcg_const_i32(arg2);
240         tcg_gen_xor_i32(ret, arg1, t0);
241         tcg_temp_free_i32(t0);
242     }
243 }
244 
245 void tcg_gen_shli_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
246 {
247     tcg_debug_assert(arg2 < 32);
248     if (arg2 == 0) {
249         tcg_gen_mov_i32(ret, arg1);
250     } else {
251         TCGv_i32 t0 = tcg_const_i32(arg2);
252         tcg_gen_shl_i32(ret, arg1, t0);
253         tcg_temp_free_i32(t0);
254     }
255 }
256 
257 void tcg_gen_shri_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
258 {
259     tcg_debug_assert(arg2 < 32);
260     if (arg2 == 0) {
261         tcg_gen_mov_i32(ret, arg1);
262     } else {
263         TCGv_i32 t0 = tcg_const_i32(arg2);
264         tcg_gen_shr_i32(ret, arg1, t0);
265         tcg_temp_free_i32(t0);
266     }
267 }
268 
269 void tcg_gen_sari_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
270 {
271     tcg_debug_assert(arg2 < 32);
272     if (arg2 == 0) {
273         tcg_gen_mov_i32(ret, arg1);
274     } else {
275         TCGv_i32 t0 = tcg_const_i32(arg2);
276         tcg_gen_sar_i32(ret, arg1, t0);
277         tcg_temp_free_i32(t0);
278     }
279 }
280 
281 void tcg_gen_brcond_i32(TCGCond cond, TCGv_i32 arg1, TCGv_i32 arg2, TCGLabel *l)
282 {
283     if (cond == TCG_COND_ALWAYS) {
284         tcg_gen_br(l);
285     } else if (cond != TCG_COND_NEVER) {
286         tcg_gen_op4ii_i32(INDEX_op_brcond_i32, arg1, arg2, cond, label_arg(l));
287     }
288 }
289 
290 void tcg_gen_brcondi_i32(TCGCond cond, TCGv_i32 arg1, int32_t arg2, TCGLabel *l)
291 {
292     if (cond == TCG_COND_ALWAYS) {
293         tcg_gen_br(l);
294     } else if (cond != TCG_COND_NEVER) {
295         TCGv_i32 t0 = tcg_const_i32(arg2);
296         tcg_gen_brcond_i32(cond, arg1, t0, l);
297         tcg_temp_free_i32(t0);
298     }
299 }
300 
301 void tcg_gen_setcond_i32(TCGCond cond, TCGv_i32 ret,
302                          TCGv_i32 arg1, TCGv_i32 arg2)
303 {
304     if (cond == TCG_COND_ALWAYS) {
305         tcg_gen_movi_i32(ret, 1);
306     } else if (cond == TCG_COND_NEVER) {
307         tcg_gen_movi_i32(ret, 0);
308     } else {
309         tcg_gen_op4i_i32(INDEX_op_setcond_i32, ret, arg1, arg2, cond);
310     }
311 }
312 
313 void tcg_gen_setcondi_i32(TCGCond cond, TCGv_i32 ret,
314                           TCGv_i32 arg1, int32_t arg2)
315 {
316     TCGv_i32 t0 = tcg_const_i32(arg2);
317     tcg_gen_setcond_i32(cond, ret, arg1, t0);
318     tcg_temp_free_i32(t0);
319 }
320 
321 void tcg_gen_muli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
322 {
323     TCGv_i32 t0 = tcg_const_i32(arg2);
324     tcg_gen_mul_i32(ret, arg1, t0);
325     tcg_temp_free_i32(t0);
326 }
327 
328 void tcg_gen_div_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
329 {
330     if (TCG_TARGET_HAS_div_i32) {
331         tcg_gen_op3_i32(INDEX_op_div_i32, ret, arg1, arg2);
332     } else if (TCG_TARGET_HAS_div2_i32) {
333         TCGv_i32 t0 = tcg_temp_new_i32();
334         tcg_gen_sari_i32(t0, arg1, 31);
335         tcg_gen_op5_i32(INDEX_op_div2_i32, ret, t0, arg1, t0, arg2);
336         tcg_temp_free_i32(t0);
337     } else {
338         gen_helper_div_i32(ret, arg1, arg2);
339     }
340 }
341 
342 void tcg_gen_rem_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
343 {
344     if (TCG_TARGET_HAS_rem_i32) {
345         tcg_gen_op3_i32(INDEX_op_rem_i32, ret, arg1, arg2);
346     } else if (TCG_TARGET_HAS_div_i32) {
347         TCGv_i32 t0 = tcg_temp_new_i32();
348         tcg_gen_op3_i32(INDEX_op_div_i32, t0, arg1, arg2);
349         tcg_gen_mul_i32(t0, t0, arg2);
350         tcg_gen_sub_i32(ret, arg1, t0);
351         tcg_temp_free_i32(t0);
352     } else if (TCG_TARGET_HAS_div2_i32) {
353         TCGv_i32 t0 = tcg_temp_new_i32();
354         tcg_gen_sari_i32(t0, arg1, 31);
355         tcg_gen_op5_i32(INDEX_op_div2_i32, t0, ret, arg1, t0, arg2);
356         tcg_temp_free_i32(t0);
357     } else {
358         gen_helper_rem_i32(ret, arg1, arg2);
359     }
360 }
361 
362 void tcg_gen_divu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
363 {
364     if (TCG_TARGET_HAS_div_i32) {
365         tcg_gen_op3_i32(INDEX_op_divu_i32, ret, arg1, arg2);
366     } else if (TCG_TARGET_HAS_div2_i32) {
367         TCGv_i32 t0 = tcg_temp_new_i32();
368         tcg_gen_movi_i32(t0, 0);
369         tcg_gen_op5_i32(INDEX_op_divu2_i32, ret, t0, arg1, t0, arg2);
370         tcg_temp_free_i32(t0);
371     } else {
372         gen_helper_divu_i32(ret, arg1, arg2);
373     }
374 }
375 
376 void tcg_gen_remu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
377 {
378     if (TCG_TARGET_HAS_rem_i32) {
379         tcg_gen_op3_i32(INDEX_op_remu_i32, ret, arg1, arg2);
380     } else if (TCG_TARGET_HAS_div_i32) {
381         TCGv_i32 t0 = tcg_temp_new_i32();
382         tcg_gen_op3_i32(INDEX_op_divu_i32, t0, arg1, arg2);
383         tcg_gen_mul_i32(t0, t0, arg2);
384         tcg_gen_sub_i32(ret, arg1, t0);
385         tcg_temp_free_i32(t0);
386     } else if (TCG_TARGET_HAS_div2_i32) {
387         TCGv_i32 t0 = tcg_temp_new_i32();
388         tcg_gen_movi_i32(t0, 0);
389         tcg_gen_op5_i32(INDEX_op_divu2_i32, t0, ret, arg1, t0, arg2);
390         tcg_temp_free_i32(t0);
391     } else {
392         gen_helper_remu_i32(ret, arg1, arg2);
393     }
394 }
395 
396 void tcg_gen_andc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
397 {
398     if (TCG_TARGET_HAS_andc_i32) {
399         tcg_gen_op3_i32(INDEX_op_andc_i32, ret, arg1, arg2);
400     } else {
401         TCGv_i32 t0 = tcg_temp_new_i32();
402         tcg_gen_not_i32(t0, arg2);
403         tcg_gen_and_i32(ret, arg1, t0);
404         tcg_temp_free_i32(t0);
405     }
406 }
407 
408 void tcg_gen_eqv_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
409 {
410     if (TCG_TARGET_HAS_eqv_i32) {
411         tcg_gen_op3_i32(INDEX_op_eqv_i32, ret, arg1, arg2);
412     } else {
413         tcg_gen_xor_i32(ret, arg1, arg2);
414         tcg_gen_not_i32(ret, ret);
415     }
416 }
417 
418 void tcg_gen_nand_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
419 {
420     if (TCG_TARGET_HAS_nand_i32) {
421         tcg_gen_op3_i32(INDEX_op_nand_i32, ret, arg1, arg2);
422     } else {
423         tcg_gen_and_i32(ret, arg1, arg2);
424         tcg_gen_not_i32(ret, ret);
425     }
426 }
427 
428 void tcg_gen_nor_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
429 {
430     if (TCG_TARGET_HAS_nor_i32) {
431         tcg_gen_op3_i32(INDEX_op_nor_i32, ret, arg1, arg2);
432     } else {
433         tcg_gen_or_i32(ret, arg1, arg2);
434         tcg_gen_not_i32(ret, ret);
435     }
436 }
437 
438 void tcg_gen_orc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
439 {
440     if (TCG_TARGET_HAS_orc_i32) {
441         tcg_gen_op3_i32(INDEX_op_orc_i32, ret, arg1, arg2);
442     } else {
443         TCGv_i32 t0 = tcg_temp_new_i32();
444         tcg_gen_not_i32(t0, arg2);
445         tcg_gen_or_i32(ret, arg1, t0);
446         tcg_temp_free_i32(t0);
447     }
448 }
449 
450 void tcg_gen_rotl_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
451 {
452     if (TCG_TARGET_HAS_rot_i32) {
453         tcg_gen_op3_i32(INDEX_op_rotl_i32, ret, arg1, arg2);
454     } else {
455         TCGv_i32 t0, t1;
456 
457         t0 = tcg_temp_new_i32();
458         t1 = tcg_temp_new_i32();
459         tcg_gen_shl_i32(t0, arg1, arg2);
460         tcg_gen_subfi_i32(t1, 32, arg2);
461         tcg_gen_shr_i32(t1, arg1, t1);
462         tcg_gen_or_i32(ret, t0, t1);
463         tcg_temp_free_i32(t0);
464         tcg_temp_free_i32(t1);
465     }
466 }
467 
468 void tcg_gen_rotli_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
469 {
470     tcg_debug_assert(arg2 < 32);
471     /* some cases can be optimized here */
472     if (arg2 == 0) {
473         tcg_gen_mov_i32(ret, arg1);
474     } else if (TCG_TARGET_HAS_rot_i32) {
475         TCGv_i32 t0 = tcg_const_i32(arg2);
476         tcg_gen_rotl_i32(ret, arg1, t0);
477         tcg_temp_free_i32(t0);
478     } else {
479         TCGv_i32 t0, t1;
480         t0 = tcg_temp_new_i32();
481         t1 = tcg_temp_new_i32();
482         tcg_gen_shli_i32(t0, arg1, arg2);
483         tcg_gen_shri_i32(t1, arg1, 32 - arg2);
484         tcg_gen_or_i32(ret, t0, t1);
485         tcg_temp_free_i32(t0);
486         tcg_temp_free_i32(t1);
487     }
488 }
489 
490 void tcg_gen_rotr_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
491 {
492     if (TCG_TARGET_HAS_rot_i32) {
493         tcg_gen_op3_i32(INDEX_op_rotr_i32, ret, arg1, arg2);
494     } else {
495         TCGv_i32 t0, t1;
496 
497         t0 = tcg_temp_new_i32();
498         t1 = tcg_temp_new_i32();
499         tcg_gen_shr_i32(t0, arg1, arg2);
500         tcg_gen_subfi_i32(t1, 32, arg2);
501         tcg_gen_shl_i32(t1, arg1, t1);
502         tcg_gen_or_i32(ret, t0, t1);
503         tcg_temp_free_i32(t0);
504         tcg_temp_free_i32(t1);
505     }
506 }
507 
508 void tcg_gen_rotri_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
509 {
510     tcg_debug_assert(arg2 < 32);
511     /* some cases can be optimized here */
512     if (arg2 == 0) {
513         tcg_gen_mov_i32(ret, arg1);
514     } else {
515         tcg_gen_rotli_i32(ret, arg1, 32 - arg2);
516     }
517 }
518 
519 void tcg_gen_deposit_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2,
520                          unsigned int ofs, unsigned int len)
521 {
522     uint32_t mask;
523     TCGv_i32 t1;
524 
525     tcg_debug_assert(ofs < 32);
526     tcg_debug_assert(len <= 32);
527     tcg_debug_assert(ofs + len <= 32);
528 
529     if (ofs == 0 && len == 32) {
530         tcg_gen_mov_i32(ret, arg2);
531         return;
532     }
533     if (TCG_TARGET_HAS_deposit_i32 && TCG_TARGET_deposit_i32_valid(ofs, len)) {
534         tcg_gen_op5ii_i32(INDEX_op_deposit_i32, ret, arg1, arg2, ofs, len);
535         return;
536     }
537 
538     mask = (1u << len) - 1;
539     t1 = tcg_temp_new_i32();
540 
541     if (ofs + len < 32) {
542         tcg_gen_andi_i32(t1, arg2, mask);
543         tcg_gen_shli_i32(t1, t1, ofs);
544     } else {
545         tcg_gen_shli_i32(t1, arg2, ofs);
546     }
547     tcg_gen_andi_i32(ret, arg1, ~(mask << ofs));
548     tcg_gen_or_i32(ret, ret, t1);
549 
550     tcg_temp_free_i32(t1);
551 }
552 
553 void tcg_gen_movcond_i32(TCGCond cond, TCGv_i32 ret, TCGv_i32 c1,
554                          TCGv_i32 c2, TCGv_i32 v1, TCGv_i32 v2)
555 {
556     if (cond == TCG_COND_ALWAYS) {
557         tcg_gen_mov_i32(ret, v1);
558     } else if (cond == TCG_COND_NEVER) {
559         tcg_gen_mov_i32(ret, v2);
560     } else if (TCG_TARGET_HAS_movcond_i32) {
561         tcg_gen_op6i_i32(INDEX_op_movcond_i32, ret, c1, c2, v1, v2, cond);
562     } else {
563         TCGv_i32 t0 = tcg_temp_new_i32();
564         TCGv_i32 t1 = tcg_temp_new_i32();
565         tcg_gen_setcond_i32(cond, t0, c1, c2);
566         tcg_gen_neg_i32(t0, t0);
567         tcg_gen_and_i32(t1, v1, t0);
568         tcg_gen_andc_i32(ret, v2, t0);
569         tcg_gen_or_i32(ret, ret, t1);
570         tcg_temp_free_i32(t0);
571         tcg_temp_free_i32(t1);
572     }
573 }
574 
575 void tcg_gen_add2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
576                       TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
577 {
578     if (TCG_TARGET_HAS_add2_i32) {
579         tcg_gen_op6_i32(INDEX_op_add2_i32, rl, rh, al, ah, bl, bh);
580     } else {
581         TCGv_i64 t0 = tcg_temp_new_i64();
582         TCGv_i64 t1 = tcg_temp_new_i64();
583         tcg_gen_concat_i32_i64(t0, al, ah);
584         tcg_gen_concat_i32_i64(t1, bl, bh);
585         tcg_gen_add_i64(t0, t0, t1);
586         tcg_gen_extr_i64_i32(rl, rh, t0);
587         tcg_temp_free_i64(t0);
588         tcg_temp_free_i64(t1);
589     }
590 }
591 
592 void tcg_gen_sub2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
593                       TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
594 {
595     if (TCG_TARGET_HAS_sub2_i32) {
596         tcg_gen_op6_i32(INDEX_op_sub2_i32, rl, rh, al, ah, bl, bh);
597     } else {
598         TCGv_i64 t0 = tcg_temp_new_i64();
599         TCGv_i64 t1 = tcg_temp_new_i64();
600         tcg_gen_concat_i32_i64(t0, al, ah);
601         tcg_gen_concat_i32_i64(t1, bl, bh);
602         tcg_gen_sub_i64(t0, t0, t1);
603         tcg_gen_extr_i64_i32(rl, rh, t0);
604         tcg_temp_free_i64(t0);
605         tcg_temp_free_i64(t1);
606     }
607 }
608 
609 void tcg_gen_mulu2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
610 {
611     if (TCG_TARGET_HAS_mulu2_i32) {
612         tcg_gen_op4_i32(INDEX_op_mulu2_i32, rl, rh, arg1, arg2);
613     } else if (TCG_TARGET_HAS_muluh_i32) {
614         TCGv_i32 t = tcg_temp_new_i32();
615         tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
616         tcg_gen_op3_i32(INDEX_op_muluh_i32, rh, arg1, arg2);
617         tcg_gen_mov_i32(rl, t);
618         tcg_temp_free_i32(t);
619     } else {
620         TCGv_i64 t0 = tcg_temp_new_i64();
621         TCGv_i64 t1 = tcg_temp_new_i64();
622         tcg_gen_extu_i32_i64(t0, arg1);
623         tcg_gen_extu_i32_i64(t1, arg2);
624         tcg_gen_mul_i64(t0, t0, t1);
625         tcg_gen_extr_i64_i32(rl, rh, t0);
626         tcg_temp_free_i64(t0);
627         tcg_temp_free_i64(t1);
628     }
629 }
630 
631 void tcg_gen_muls2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
632 {
633     if (TCG_TARGET_HAS_muls2_i32) {
634         tcg_gen_op4_i32(INDEX_op_muls2_i32, rl, rh, arg1, arg2);
635     } else if (TCG_TARGET_HAS_mulsh_i32) {
636         TCGv_i32 t = tcg_temp_new_i32();
637         tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
638         tcg_gen_op3_i32(INDEX_op_mulsh_i32, rh, arg1, arg2);
639         tcg_gen_mov_i32(rl, t);
640         tcg_temp_free_i32(t);
641     } else if (TCG_TARGET_REG_BITS == 32) {
642         TCGv_i32 t0 = tcg_temp_new_i32();
643         TCGv_i32 t1 = tcg_temp_new_i32();
644         TCGv_i32 t2 = tcg_temp_new_i32();
645         TCGv_i32 t3 = tcg_temp_new_i32();
646         tcg_gen_mulu2_i32(t0, t1, arg1, arg2);
647         /* Adjust for negative inputs.  */
648         tcg_gen_sari_i32(t2, arg1, 31);
649         tcg_gen_sari_i32(t3, arg2, 31);
650         tcg_gen_and_i32(t2, t2, arg2);
651         tcg_gen_and_i32(t3, t3, arg1);
652         tcg_gen_sub_i32(rh, t1, t2);
653         tcg_gen_sub_i32(rh, rh, t3);
654         tcg_gen_mov_i32(rl, t0);
655         tcg_temp_free_i32(t0);
656         tcg_temp_free_i32(t1);
657         tcg_temp_free_i32(t2);
658         tcg_temp_free_i32(t3);
659     } else {
660         TCGv_i64 t0 = tcg_temp_new_i64();
661         TCGv_i64 t1 = tcg_temp_new_i64();
662         tcg_gen_ext_i32_i64(t0, arg1);
663         tcg_gen_ext_i32_i64(t1, arg2);
664         tcg_gen_mul_i64(t0, t0, t1);
665         tcg_gen_extr_i64_i32(rl, rh, t0);
666         tcg_temp_free_i64(t0);
667         tcg_temp_free_i64(t1);
668     }
669 }
670 
671 void tcg_gen_ext8s_i32(TCGv_i32 ret, TCGv_i32 arg)
672 {
673     if (TCG_TARGET_HAS_ext8s_i32) {
674         tcg_gen_op2_i32(INDEX_op_ext8s_i32, ret, arg);
675     } else {
676         tcg_gen_shli_i32(ret, arg, 24);
677         tcg_gen_sari_i32(ret, ret, 24);
678     }
679 }
680 
681 void tcg_gen_ext16s_i32(TCGv_i32 ret, TCGv_i32 arg)
682 {
683     if (TCG_TARGET_HAS_ext16s_i32) {
684         tcg_gen_op2_i32(INDEX_op_ext16s_i32, ret, arg);
685     } else {
686         tcg_gen_shli_i32(ret, arg, 16);
687         tcg_gen_sari_i32(ret, ret, 16);
688     }
689 }
690 
691 void tcg_gen_ext8u_i32(TCGv_i32 ret, TCGv_i32 arg)
692 {
693     if (TCG_TARGET_HAS_ext8u_i32) {
694         tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg);
695     } else {
696         tcg_gen_andi_i32(ret, arg, 0xffu);
697     }
698 }
699 
700 void tcg_gen_ext16u_i32(TCGv_i32 ret, TCGv_i32 arg)
701 {
702     if (TCG_TARGET_HAS_ext16u_i32) {
703         tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg);
704     } else {
705         tcg_gen_andi_i32(ret, arg, 0xffffu);
706     }
707 }
708 
709 /* Note: we assume the two high bytes are set to zero */
710 void tcg_gen_bswap16_i32(TCGv_i32 ret, TCGv_i32 arg)
711 {
712     if (TCG_TARGET_HAS_bswap16_i32) {
713         tcg_gen_op2_i32(INDEX_op_bswap16_i32, ret, arg);
714     } else {
715         TCGv_i32 t0 = tcg_temp_new_i32();
716 
717         tcg_gen_ext8u_i32(t0, arg);
718         tcg_gen_shli_i32(t0, t0, 8);
719         tcg_gen_shri_i32(ret, arg, 8);
720         tcg_gen_or_i32(ret, ret, t0);
721         tcg_temp_free_i32(t0);
722     }
723 }
724 
725 void tcg_gen_bswap32_i32(TCGv_i32 ret, TCGv_i32 arg)
726 {
727     if (TCG_TARGET_HAS_bswap32_i32) {
728         tcg_gen_op2_i32(INDEX_op_bswap32_i32, ret, arg);
729     } else {
730         TCGv_i32 t0, t1;
731         t0 = tcg_temp_new_i32();
732         t1 = tcg_temp_new_i32();
733 
734         tcg_gen_shli_i32(t0, arg, 24);
735 
736         tcg_gen_andi_i32(t1, arg, 0x0000ff00);
737         tcg_gen_shli_i32(t1, t1, 8);
738         tcg_gen_or_i32(t0, t0, t1);
739 
740         tcg_gen_shri_i32(t1, arg, 8);
741         tcg_gen_andi_i32(t1, t1, 0x0000ff00);
742         tcg_gen_or_i32(t0, t0, t1);
743 
744         tcg_gen_shri_i32(t1, arg, 24);
745         tcg_gen_or_i32(ret, t0, t1);
746         tcg_temp_free_i32(t0);
747         tcg_temp_free_i32(t1);
748     }
749 }
750 
751 /* 64-bit ops */
752 
753 #if TCG_TARGET_REG_BITS == 32
754 /* These are all inline for TCG_TARGET_REG_BITS == 64.  */
755 
756 void tcg_gen_discard_i64(TCGv_i64 arg)
757 {
758     tcg_gen_discard_i32(TCGV_LOW(arg));
759     tcg_gen_discard_i32(TCGV_HIGH(arg));
760 }
761 
762 void tcg_gen_mov_i64(TCGv_i64 ret, TCGv_i64 arg)
763 {
764     tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
765     tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
766 }
767 
768 void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg)
769 {
770     tcg_gen_movi_i32(TCGV_LOW(ret), arg);
771     tcg_gen_movi_i32(TCGV_HIGH(ret), arg >> 32);
772 }
773 
774 void tcg_gen_ld8u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
775 {
776     tcg_gen_ld8u_i32(TCGV_LOW(ret), arg2, offset);
777     tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
778 }
779 
780 void tcg_gen_ld8s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
781 {
782     tcg_gen_ld8s_i32(TCGV_LOW(ret), arg2, offset);
783     tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), 31);
784 }
785 
786 void tcg_gen_ld16u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
787 {
788     tcg_gen_ld16u_i32(TCGV_LOW(ret), arg2, offset);
789     tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
790 }
791 
792 void tcg_gen_ld16s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
793 {
794     tcg_gen_ld16s_i32(TCGV_LOW(ret), arg2, offset);
795     tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
796 }
797 
798 void tcg_gen_ld32u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
799 {
800     tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
801     tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
802 }
803 
804 void tcg_gen_ld32s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
805 {
806     tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
807     tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
808 }
809 
810 void tcg_gen_ld_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
811 {
812     /* Since arg2 and ret have different types,
813        they cannot be the same temporary */
814 #ifdef HOST_WORDS_BIGENDIAN
815     tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset);
816     tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset + 4);
817 #else
818     tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
819     tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset + 4);
820 #endif
821 }
822 
823 void tcg_gen_st_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
824 {
825 #ifdef HOST_WORDS_BIGENDIAN
826     tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset);
827     tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset + 4);
828 #else
829     tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset);
830     tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset + 4);
831 #endif
832 }
833 
834 void tcg_gen_and_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
835 {
836     tcg_gen_and_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
837     tcg_gen_and_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
838 }
839 
840 void tcg_gen_or_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
841 {
842     tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
843     tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
844 }
845 
846 void tcg_gen_xor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
847 {
848     tcg_gen_xor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
849     tcg_gen_xor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
850 }
851 
852 void tcg_gen_shl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
853 {
854     gen_helper_shl_i64(ret, arg1, arg2);
855 }
856 
857 void tcg_gen_shr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
858 {
859     gen_helper_shr_i64(ret, arg1, arg2);
860 }
861 
862 void tcg_gen_sar_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
863 {
864     gen_helper_sar_i64(ret, arg1, arg2);
865 }
866 
867 void tcg_gen_mul_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
868 {
869     TCGv_i64 t0;
870     TCGv_i32 t1;
871 
872     t0 = tcg_temp_new_i64();
873     t1 = tcg_temp_new_i32();
874 
875     tcg_gen_mulu2_i32(TCGV_LOW(t0), TCGV_HIGH(t0),
876                       TCGV_LOW(arg1), TCGV_LOW(arg2));
877 
878     tcg_gen_mul_i32(t1, TCGV_LOW(arg1), TCGV_HIGH(arg2));
879     tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
880     tcg_gen_mul_i32(t1, TCGV_HIGH(arg1), TCGV_LOW(arg2));
881     tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
882 
883     tcg_gen_mov_i64(ret, t0);
884     tcg_temp_free_i64(t0);
885     tcg_temp_free_i32(t1);
886 }
887 #endif /* TCG_TARGET_REG_SIZE == 32 */
888 
889 void tcg_gen_addi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
890 {
891     /* some cases can be optimized here */
892     if (arg2 == 0) {
893         tcg_gen_mov_i64(ret, arg1);
894     } else {
895         TCGv_i64 t0 = tcg_const_i64(arg2);
896         tcg_gen_add_i64(ret, arg1, t0);
897         tcg_temp_free_i64(t0);
898     }
899 }
900 
901 void tcg_gen_subfi_i64(TCGv_i64 ret, int64_t arg1, TCGv_i64 arg2)
902 {
903     if (arg1 == 0 && TCG_TARGET_HAS_neg_i64) {
904         /* Don't recurse with tcg_gen_neg_i64.  */
905         tcg_gen_op2_i64(INDEX_op_neg_i64, ret, arg2);
906     } else {
907         TCGv_i64 t0 = tcg_const_i64(arg1);
908         tcg_gen_sub_i64(ret, t0, arg2);
909         tcg_temp_free_i64(t0);
910     }
911 }
912 
913 void tcg_gen_subi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
914 {
915     /* some cases can be optimized here */
916     if (arg2 == 0) {
917         tcg_gen_mov_i64(ret, arg1);
918     } else {
919         TCGv_i64 t0 = tcg_const_i64(arg2);
920         tcg_gen_sub_i64(ret, arg1, t0);
921         tcg_temp_free_i64(t0);
922     }
923 }
924 
925 void tcg_gen_andi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
926 {
927     TCGv_i64 t0;
928 
929     if (TCG_TARGET_REG_BITS == 32) {
930         tcg_gen_andi_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
931         tcg_gen_andi_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
932         return;
933     }
934 
935     /* Some cases can be optimized here.  */
936     switch (arg2) {
937     case 0:
938         tcg_gen_movi_i64(ret, 0);
939         return;
940     case 0xffffffffffffffffull:
941         tcg_gen_mov_i64(ret, arg1);
942         return;
943     case 0xffull:
944         /* Don't recurse with tcg_gen_ext8u_i64.  */
945         if (TCG_TARGET_HAS_ext8u_i64) {
946             tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg1);
947             return;
948         }
949         break;
950     case 0xffffu:
951         if (TCG_TARGET_HAS_ext16u_i64) {
952             tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg1);
953             return;
954         }
955         break;
956     case 0xffffffffull:
957         if (TCG_TARGET_HAS_ext32u_i64) {
958             tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg1);
959             return;
960         }
961         break;
962     }
963     t0 = tcg_const_i64(arg2);
964     tcg_gen_and_i64(ret, arg1, t0);
965     tcg_temp_free_i64(t0);
966 }
967 
968 void tcg_gen_ori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
969 {
970     if (TCG_TARGET_REG_BITS == 32) {
971         tcg_gen_ori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
972         tcg_gen_ori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
973         return;
974     }
975     /* Some cases can be optimized here.  */
976     if (arg2 == -1) {
977         tcg_gen_movi_i64(ret, -1);
978     } else if (arg2 == 0) {
979         tcg_gen_mov_i64(ret, arg1);
980     } else {
981         TCGv_i64 t0 = tcg_const_i64(arg2);
982         tcg_gen_or_i64(ret, arg1, t0);
983         tcg_temp_free_i64(t0);
984     }
985 }
986 
987 void tcg_gen_xori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
988 {
989     if (TCG_TARGET_REG_BITS == 32) {
990         tcg_gen_xori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
991         tcg_gen_xori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
992         return;
993     }
994     /* Some cases can be optimized here.  */
995     if (arg2 == 0) {
996         tcg_gen_mov_i64(ret, arg1);
997     } else if (arg2 == -1 && TCG_TARGET_HAS_not_i64) {
998         /* Don't recurse with tcg_gen_not_i64.  */
999         tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg1);
1000     } else {
1001         TCGv_i64 t0 = tcg_const_i64(arg2);
1002         tcg_gen_xor_i64(ret, arg1, t0);
1003         tcg_temp_free_i64(t0);
1004     }
1005 }
1006 
1007 static inline void tcg_gen_shifti_i64(TCGv_i64 ret, TCGv_i64 arg1,
1008                                       unsigned c, bool right, bool arith)
1009 {
1010     tcg_debug_assert(c < 64);
1011     if (c == 0) {
1012         tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
1013         tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
1014     } else if (c >= 32) {
1015         c -= 32;
1016         if (right) {
1017             if (arith) {
1018                 tcg_gen_sari_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
1019                 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), 31);
1020             } else {
1021                 tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
1022                 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1023             }
1024         } else {
1025             tcg_gen_shli_i32(TCGV_HIGH(ret), TCGV_LOW(arg1), c);
1026             tcg_gen_movi_i32(TCGV_LOW(ret), 0);
1027         }
1028     } else {
1029         TCGv_i32 t0, t1;
1030 
1031         t0 = tcg_temp_new_i32();
1032         t1 = tcg_temp_new_i32();
1033         if (right) {
1034             tcg_gen_shli_i32(t0, TCGV_HIGH(arg1), 32 - c);
1035             if (arith) {
1036                 tcg_gen_sari_i32(t1, TCGV_HIGH(arg1), c);
1037             } else {
1038                 tcg_gen_shri_i32(t1, TCGV_HIGH(arg1), c);
1039             }
1040             tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_LOW(arg1), c);
1041             tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(ret), t0);
1042             tcg_gen_mov_i32(TCGV_HIGH(ret), t1);
1043         } else {
1044             tcg_gen_shri_i32(t0, TCGV_LOW(arg1), 32 - c);
1045             /* Note: ret can be the same as arg1, so we use t1 */
1046             tcg_gen_shli_i32(t1, TCGV_LOW(arg1), c);
1047             tcg_gen_shli_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), c);
1048             tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), t0);
1049             tcg_gen_mov_i32(TCGV_LOW(ret), t1);
1050         }
1051         tcg_temp_free_i32(t0);
1052         tcg_temp_free_i32(t1);
1053     }
1054 }
1055 
1056 void tcg_gen_shli_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1057 {
1058     tcg_debug_assert(arg2 < 64);
1059     if (TCG_TARGET_REG_BITS == 32) {
1060         tcg_gen_shifti_i64(ret, arg1, arg2, 0, 0);
1061     } else if (arg2 == 0) {
1062         tcg_gen_mov_i64(ret, arg1);
1063     } else {
1064         TCGv_i64 t0 = tcg_const_i64(arg2);
1065         tcg_gen_shl_i64(ret, arg1, t0);
1066         tcg_temp_free_i64(t0);
1067     }
1068 }
1069 
1070 void tcg_gen_shri_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1071 {
1072     tcg_debug_assert(arg2 < 64);
1073     if (TCG_TARGET_REG_BITS == 32) {
1074         tcg_gen_shifti_i64(ret, arg1, arg2, 1, 0);
1075     } else if (arg2 == 0) {
1076         tcg_gen_mov_i64(ret, arg1);
1077     } else {
1078         TCGv_i64 t0 = tcg_const_i64(arg2);
1079         tcg_gen_shr_i64(ret, arg1, t0);
1080         tcg_temp_free_i64(t0);
1081     }
1082 }
1083 
1084 void tcg_gen_sari_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1085 {
1086     tcg_debug_assert(arg2 < 64);
1087     if (TCG_TARGET_REG_BITS == 32) {
1088         tcg_gen_shifti_i64(ret, arg1, arg2, 1, 1);
1089     } else if (arg2 == 0) {
1090         tcg_gen_mov_i64(ret, arg1);
1091     } else {
1092         TCGv_i64 t0 = tcg_const_i64(arg2);
1093         tcg_gen_sar_i64(ret, arg1, t0);
1094         tcg_temp_free_i64(t0);
1095     }
1096 }
1097 
1098 void tcg_gen_brcond_i64(TCGCond cond, TCGv_i64 arg1, TCGv_i64 arg2, TCGLabel *l)
1099 {
1100     if (cond == TCG_COND_ALWAYS) {
1101         tcg_gen_br(l);
1102     } else if (cond != TCG_COND_NEVER) {
1103         if (TCG_TARGET_REG_BITS == 32) {
1104             tcg_gen_op6ii_i32(INDEX_op_brcond2_i32, TCGV_LOW(arg1),
1105                               TCGV_HIGH(arg1), TCGV_LOW(arg2),
1106                               TCGV_HIGH(arg2), cond, label_arg(l));
1107         } else {
1108             tcg_gen_op4ii_i64(INDEX_op_brcond_i64, arg1, arg2, cond,
1109                               label_arg(l));
1110         }
1111     }
1112 }
1113 
1114 void tcg_gen_brcondi_i64(TCGCond cond, TCGv_i64 arg1, int64_t arg2, TCGLabel *l)
1115 {
1116     if (cond == TCG_COND_ALWAYS) {
1117         tcg_gen_br(l);
1118     } else if (cond != TCG_COND_NEVER) {
1119         TCGv_i64 t0 = tcg_const_i64(arg2);
1120         tcg_gen_brcond_i64(cond, arg1, t0, l);
1121         tcg_temp_free_i64(t0);
1122     }
1123 }
1124 
1125 void tcg_gen_setcond_i64(TCGCond cond, TCGv_i64 ret,
1126                          TCGv_i64 arg1, TCGv_i64 arg2)
1127 {
1128     if (cond == TCG_COND_ALWAYS) {
1129         tcg_gen_movi_i64(ret, 1);
1130     } else if (cond == TCG_COND_NEVER) {
1131         tcg_gen_movi_i64(ret, 0);
1132     } else {
1133         if (TCG_TARGET_REG_BITS == 32) {
1134             tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
1135                              TCGV_LOW(arg1), TCGV_HIGH(arg1),
1136                              TCGV_LOW(arg2), TCGV_HIGH(arg2), cond);
1137             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1138         } else {
1139             tcg_gen_op4i_i64(INDEX_op_setcond_i64, ret, arg1, arg2, cond);
1140         }
1141     }
1142 }
1143 
1144 void tcg_gen_setcondi_i64(TCGCond cond, TCGv_i64 ret,
1145                           TCGv_i64 arg1, int64_t arg2)
1146 {
1147     TCGv_i64 t0 = tcg_const_i64(arg2);
1148     tcg_gen_setcond_i64(cond, ret, arg1, t0);
1149     tcg_temp_free_i64(t0);
1150 }
1151 
1152 void tcg_gen_muli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1153 {
1154     TCGv_i64 t0 = tcg_const_i64(arg2);
1155     tcg_gen_mul_i64(ret, arg1, t0);
1156     tcg_temp_free_i64(t0);
1157 }
1158 
1159 void tcg_gen_div_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1160 {
1161     if (TCG_TARGET_HAS_div_i64) {
1162         tcg_gen_op3_i64(INDEX_op_div_i64, ret, arg1, arg2);
1163     } else if (TCG_TARGET_HAS_div2_i64) {
1164         TCGv_i64 t0 = tcg_temp_new_i64();
1165         tcg_gen_sari_i64(t0, arg1, 63);
1166         tcg_gen_op5_i64(INDEX_op_div2_i64, ret, t0, arg1, t0, arg2);
1167         tcg_temp_free_i64(t0);
1168     } else {
1169         gen_helper_div_i64(ret, arg1, arg2);
1170     }
1171 }
1172 
1173 void tcg_gen_rem_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1174 {
1175     if (TCG_TARGET_HAS_rem_i64) {
1176         tcg_gen_op3_i64(INDEX_op_rem_i64, ret, arg1, arg2);
1177     } else if (TCG_TARGET_HAS_div_i64) {
1178         TCGv_i64 t0 = tcg_temp_new_i64();
1179         tcg_gen_op3_i64(INDEX_op_div_i64, t0, arg1, arg2);
1180         tcg_gen_mul_i64(t0, t0, arg2);
1181         tcg_gen_sub_i64(ret, arg1, t0);
1182         tcg_temp_free_i64(t0);
1183     } else if (TCG_TARGET_HAS_div2_i64) {
1184         TCGv_i64 t0 = tcg_temp_new_i64();
1185         tcg_gen_sari_i64(t0, arg1, 63);
1186         tcg_gen_op5_i64(INDEX_op_div2_i64, t0, ret, arg1, t0, arg2);
1187         tcg_temp_free_i64(t0);
1188     } else {
1189         gen_helper_rem_i64(ret, arg1, arg2);
1190     }
1191 }
1192 
1193 void tcg_gen_divu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1194 {
1195     if (TCG_TARGET_HAS_div_i64) {
1196         tcg_gen_op3_i64(INDEX_op_divu_i64, ret, arg1, arg2);
1197     } else if (TCG_TARGET_HAS_div2_i64) {
1198         TCGv_i64 t0 = tcg_temp_new_i64();
1199         tcg_gen_movi_i64(t0, 0);
1200         tcg_gen_op5_i64(INDEX_op_divu2_i64, ret, t0, arg1, t0, arg2);
1201         tcg_temp_free_i64(t0);
1202     } else {
1203         gen_helper_divu_i64(ret, arg1, arg2);
1204     }
1205 }
1206 
1207 void tcg_gen_remu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1208 {
1209     if (TCG_TARGET_HAS_rem_i64) {
1210         tcg_gen_op3_i64(INDEX_op_remu_i64, ret, arg1, arg2);
1211     } else if (TCG_TARGET_HAS_div_i64) {
1212         TCGv_i64 t0 = tcg_temp_new_i64();
1213         tcg_gen_op3_i64(INDEX_op_divu_i64, t0, arg1, arg2);
1214         tcg_gen_mul_i64(t0, t0, arg2);
1215         tcg_gen_sub_i64(ret, arg1, t0);
1216         tcg_temp_free_i64(t0);
1217     } else if (TCG_TARGET_HAS_div2_i64) {
1218         TCGv_i64 t0 = tcg_temp_new_i64();
1219         tcg_gen_movi_i64(t0, 0);
1220         tcg_gen_op5_i64(INDEX_op_divu2_i64, t0, ret, arg1, t0, arg2);
1221         tcg_temp_free_i64(t0);
1222     } else {
1223         gen_helper_remu_i64(ret, arg1, arg2);
1224     }
1225 }
1226 
1227 void tcg_gen_ext8s_i64(TCGv_i64 ret, TCGv_i64 arg)
1228 {
1229     if (TCG_TARGET_REG_BITS == 32) {
1230         tcg_gen_ext8s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1231         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1232     } else if (TCG_TARGET_HAS_ext8s_i64) {
1233         tcg_gen_op2_i64(INDEX_op_ext8s_i64, ret, arg);
1234     } else {
1235         tcg_gen_shli_i64(ret, arg, 56);
1236         tcg_gen_sari_i64(ret, ret, 56);
1237     }
1238 }
1239 
1240 void tcg_gen_ext16s_i64(TCGv_i64 ret, TCGv_i64 arg)
1241 {
1242     if (TCG_TARGET_REG_BITS == 32) {
1243         tcg_gen_ext16s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1244         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1245     } else if (TCG_TARGET_HAS_ext16s_i64) {
1246         tcg_gen_op2_i64(INDEX_op_ext16s_i64, ret, arg);
1247     } else {
1248         tcg_gen_shli_i64(ret, arg, 48);
1249         tcg_gen_sari_i64(ret, ret, 48);
1250     }
1251 }
1252 
1253 void tcg_gen_ext32s_i64(TCGv_i64 ret, TCGv_i64 arg)
1254 {
1255     if (TCG_TARGET_REG_BITS == 32) {
1256         tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1257         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1258     } else if (TCG_TARGET_HAS_ext32s_i64) {
1259         tcg_gen_op2_i64(INDEX_op_ext32s_i64, ret, arg);
1260     } else {
1261         tcg_gen_shli_i64(ret, arg, 32);
1262         tcg_gen_sari_i64(ret, ret, 32);
1263     }
1264 }
1265 
1266 void tcg_gen_ext8u_i64(TCGv_i64 ret, TCGv_i64 arg)
1267 {
1268     if (TCG_TARGET_REG_BITS == 32) {
1269         tcg_gen_ext8u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1270         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1271     } else if (TCG_TARGET_HAS_ext8u_i64) {
1272         tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg);
1273     } else {
1274         tcg_gen_andi_i64(ret, arg, 0xffu);
1275     }
1276 }
1277 
1278 void tcg_gen_ext16u_i64(TCGv_i64 ret, TCGv_i64 arg)
1279 {
1280     if (TCG_TARGET_REG_BITS == 32) {
1281         tcg_gen_ext16u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1282         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1283     } else if (TCG_TARGET_HAS_ext16u_i64) {
1284         tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg);
1285     } else {
1286         tcg_gen_andi_i64(ret, arg, 0xffffu);
1287     }
1288 }
1289 
1290 void tcg_gen_ext32u_i64(TCGv_i64 ret, TCGv_i64 arg)
1291 {
1292     if (TCG_TARGET_REG_BITS == 32) {
1293         tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1294         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1295     } else if (TCG_TARGET_HAS_ext32u_i64) {
1296         tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg);
1297     } else {
1298         tcg_gen_andi_i64(ret, arg, 0xffffffffu);
1299     }
1300 }
1301 
1302 /* Note: we assume the six high bytes are set to zero */
1303 void tcg_gen_bswap16_i64(TCGv_i64 ret, TCGv_i64 arg)
1304 {
1305     if (TCG_TARGET_REG_BITS == 32) {
1306         tcg_gen_bswap16_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1307         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1308     } else if (TCG_TARGET_HAS_bswap16_i64) {
1309         tcg_gen_op2_i64(INDEX_op_bswap16_i64, ret, arg);
1310     } else {
1311         TCGv_i64 t0 = tcg_temp_new_i64();
1312 
1313         tcg_gen_ext8u_i64(t0, arg);
1314         tcg_gen_shli_i64(t0, t0, 8);
1315         tcg_gen_shri_i64(ret, arg, 8);
1316         tcg_gen_or_i64(ret, ret, t0);
1317         tcg_temp_free_i64(t0);
1318     }
1319 }
1320 
1321 /* Note: we assume the four high bytes are set to zero */
1322 void tcg_gen_bswap32_i64(TCGv_i64 ret, TCGv_i64 arg)
1323 {
1324     if (TCG_TARGET_REG_BITS == 32) {
1325         tcg_gen_bswap32_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1326         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1327     } else if (TCG_TARGET_HAS_bswap32_i64) {
1328         tcg_gen_op2_i64(INDEX_op_bswap32_i64, ret, arg);
1329     } else {
1330         TCGv_i64 t0, t1;
1331         t0 = tcg_temp_new_i64();
1332         t1 = tcg_temp_new_i64();
1333 
1334         tcg_gen_shli_i64(t0, arg, 24);
1335         tcg_gen_ext32u_i64(t0, t0);
1336 
1337         tcg_gen_andi_i64(t1, arg, 0x0000ff00);
1338         tcg_gen_shli_i64(t1, t1, 8);
1339         tcg_gen_or_i64(t0, t0, t1);
1340 
1341         tcg_gen_shri_i64(t1, arg, 8);
1342         tcg_gen_andi_i64(t1, t1, 0x0000ff00);
1343         tcg_gen_or_i64(t0, t0, t1);
1344 
1345         tcg_gen_shri_i64(t1, arg, 24);
1346         tcg_gen_or_i64(ret, t0, t1);
1347         tcg_temp_free_i64(t0);
1348         tcg_temp_free_i64(t1);
1349     }
1350 }
1351 
1352 void tcg_gen_bswap64_i64(TCGv_i64 ret, TCGv_i64 arg)
1353 {
1354     if (TCG_TARGET_REG_BITS == 32) {
1355         TCGv_i32 t0, t1;
1356         t0 = tcg_temp_new_i32();
1357         t1 = tcg_temp_new_i32();
1358 
1359         tcg_gen_bswap32_i32(t0, TCGV_LOW(arg));
1360         tcg_gen_bswap32_i32(t1, TCGV_HIGH(arg));
1361         tcg_gen_mov_i32(TCGV_LOW(ret), t1);
1362         tcg_gen_mov_i32(TCGV_HIGH(ret), t0);
1363         tcg_temp_free_i32(t0);
1364         tcg_temp_free_i32(t1);
1365     } else if (TCG_TARGET_HAS_bswap64_i64) {
1366         tcg_gen_op2_i64(INDEX_op_bswap64_i64, ret, arg);
1367     } else {
1368         TCGv_i64 t0 = tcg_temp_new_i64();
1369         TCGv_i64 t1 = tcg_temp_new_i64();
1370 
1371         tcg_gen_shli_i64(t0, arg, 56);
1372 
1373         tcg_gen_andi_i64(t1, arg, 0x0000ff00);
1374         tcg_gen_shli_i64(t1, t1, 40);
1375         tcg_gen_or_i64(t0, t0, t1);
1376 
1377         tcg_gen_andi_i64(t1, arg, 0x00ff0000);
1378         tcg_gen_shli_i64(t1, t1, 24);
1379         tcg_gen_or_i64(t0, t0, t1);
1380 
1381         tcg_gen_andi_i64(t1, arg, 0xff000000);
1382         tcg_gen_shli_i64(t1, t1, 8);
1383         tcg_gen_or_i64(t0, t0, t1);
1384 
1385         tcg_gen_shri_i64(t1, arg, 8);
1386         tcg_gen_andi_i64(t1, t1, 0xff000000);
1387         tcg_gen_or_i64(t0, t0, t1);
1388 
1389         tcg_gen_shri_i64(t1, arg, 24);
1390         tcg_gen_andi_i64(t1, t1, 0x00ff0000);
1391         tcg_gen_or_i64(t0, t0, t1);
1392 
1393         tcg_gen_shri_i64(t1, arg, 40);
1394         tcg_gen_andi_i64(t1, t1, 0x0000ff00);
1395         tcg_gen_or_i64(t0, t0, t1);
1396 
1397         tcg_gen_shri_i64(t1, arg, 56);
1398         tcg_gen_or_i64(ret, t0, t1);
1399         tcg_temp_free_i64(t0);
1400         tcg_temp_free_i64(t1);
1401     }
1402 }
1403 
1404 void tcg_gen_not_i64(TCGv_i64 ret, TCGv_i64 arg)
1405 {
1406     if (TCG_TARGET_REG_BITS == 32) {
1407         tcg_gen_not_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1408         tcg_gen_not_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
1409     } else if (TCG_TARGET_HAS_not_i64) {
1410         tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg);
1411     } else {
1412         tcg_gen_xori_i64(ret, arg, -1);
1413     }
1414 }
1415 
1416 void tcg_gen_andc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1417 {
1418     if (TCG_TARGET_REG_BITS == 32) {
1419         tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1420         tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1421     } else if (TCG_TARGET_HAS_andc_i64) {
1422         tcg_gen_op3_i64(INDEX_op_andc_i64, ret, arg1, arg2);
1423     } else {
1424         TCGv_i64 t0 = tcg_temp_new_i64();
1425         tcg_gen_not_i64(t0, arg2);
1426         tcg_gen_and_i64(ret, arg1, t0);
1427         tcg_temp_free_i64(t0);
1428     }
1429 }
1430 
1431 void tcg_gen_eqv_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1432 {
1433     if (TCG_TARGET_REG_BITS == 32) {
1434         tcg_gen_eqv_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1435         tcg_gen_eqv_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1436     } else if (TCG_TARGET_HAS_eqv_i64) {
1437         tcg_gen_op3_i64(INDEX_op_eqv_i64, ret, arg1, arg2);
1438     } else {
1439         tcg_gen_xor_i64(ret, arg1, arg2);
1440         tcg_gen_not_i64(ret, ret);
1441     }
1442 }
1443 
1444 void tcg_gen_nand_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1445 {
1446     if (TCG_TARGET_REG_BITS == 32) {
1447         tcg_gen_nand_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1448         tcg_gen_nand_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1449     } else if (TCG_TARGET_HAS_nand_i64) {
1450         tcg_gen_op3_i64(INDEX_op_nand_i64, ret, arg1, arg2);
1451     } else {
1452         tcg_gen_and_i64(ret, arg1, arg2);
1453         tcg_gen_not_i64(ret, ret);
1454     }
1455 }
1456 
1457 void tcg_gen_nor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1458 {
1459     if (TCG_TARGET_REG_BITS == 32) {
1460         tcg_gen_nor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1461         tcg_gen_nor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1462     } else if (TCG_TARGET_HAS_nor_i64) {
1463         tcg_gen_op3_i64(INDEX_op_nor_i64, ret, arg1, arg2);
1464     } else {
1465         tcg_gen_or_i64(ret, arg1, arg2);
1466         tcg_gen_not_i64(ret, ret);
1467     }
1468 }
1469 
1470 void tcg_gen_orc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1471 {
1472     if (TCG_TARGET_REG_BITS == 32) {
1473         tcg_gen_orc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1474         tcg_gen_orc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1475     } else if (TCG_TARGET_HAS_orc_i64) {
1476         tcg_gen_op3_i64(INDEX_op_orc_i64, ret, arg1, arg2);
1477     } else {
1478         TCGv_i64 t0 = tcg_temp_new_i64();
1479         tcg_gen_not_i64(t0, arg2);
1480         tcg_gen_or_i64(ret, arg1, t0);
1481         tcg_temp_free_i64(t0);
1482     }
1483 }
1484 
1485 void tcg_gen_rotl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1486 {
1487     if (TCG_TARGET_HAS_rot_i64) {
1488         tcg_gen_op3_i64(INDEX_op_rotl_i64, ret, arg1, arg2);
1489     } else {
1490         TCGv_i64 t0, t1;
1491         t0 = tcg_temp_new_i64();
1492         t1 = tcg_temp_new_i64();
1493         tcg_gen_shl_i64(t0, arg1, arg2);
1494         tcg_gen_subfi_i64(t1, 64, arg2);
1495         tcg_gen_shr_i64(t1, arg1, t1);
1496         tcg_gen_or_i64(ret, t0, t1);
1497         tcg_temp_free_i64(t0);
1498         tcg_temp_free_i64(t1);
1499     }
1500 }
1501 
1502 void tcg_gen_rotli_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1503 {
1504     tcg_debug_assert(arg2 < 64);
1505     /* some cases can be optimized here */
1506     if (arg2 == 0) {
1507         tcg_gen_mov_i64(ret, arg1);
1508     } else if (TCG_TARGET_HAS_rot_i64) {
1509         TCGv_i64 t0 = tcg_const_i64(arg2);
1510         tcg_gen_rotl_i64(ret, arg1, t0);
1511         tcg_temp_free_i64(t0);
1512     } else {
1513         TCGv_i64 t0, t1;
1514         t0 = tcg_temp_new_i64();
1515         t1 = tcg_temp_new_i64();
1516         tcg_gen_shli_i64(t0, arg1, arg2);
1517         tcg_gen_shri_i64(t1, arg1, 64 - arg2);
1518         tcg_gen_or_i64(ret, t0, t1);
1519         tcg_temp_free_i64(t0);
1520         tcg_temp_free_i64(t1);
1521     }
1522 }
1523 
1524 void tcg_gen_rotr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1525 {
1526     if (TCG_TARGET_HAS_rot_i64) {
1527         tcg_gen_op3_i64(INDEX_op_rotr_i64, ret, arg1, arg2);
1528     } else {
1529         TCGv_i64 t0, t1;
1530         t0 = tcg_temp_new_i64();
1531         t1 = tcg_temp_new_i64();
1532         tcg_gen_shr_i64(t0, arg1, arg2);
1533         tcg_gen_subfi_i64(t1, 64, arg2);
1534         tcg_gen_shl_i64(t1, arg1, t1);
1535         tcg_gen_or_i64(ret, t0, t1);
1536         tcg_temp_free_i64(t0);
1537         tcg_temp_free_i64(t1);
1538     }
1539 }
1540 
1541 void tcg_gen_rotri_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1542 {
1543     tcg_debug_assert(arg2 < 64);
1544     /* some cases can be optimized here */
1545     if (arg2 == 0) {
1546         tcg_gen_mov_i64(ret, arg1);
1547     } else {
1548         tcg_gen_rotli_i64(ret, arg1, 64 - arg2);
1549     }
1550 }
1551 
1552 void tcg_gen_deposit_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2,
1553                          unsigned int ofs, unsigned int len)
1554 {
1555     uint64_t mask;
1556     TCGv_i64 t1;
1557 
1558     tcg_debug_assert(ofs < 64);
1559     tcg_debug_assert(len <= 64);
1560     tcg_debug_assert(ofs + len <= 64);
1561 
1562     if (ofs == 0 && len == 64) {
1563         tcg_gen_mov_i64(ret, arg2);
1564         return;
1565     }
1566     if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(ofs, len)) {
1567         tcg_gen_op5ii_i64(INDEX_op_deposit_i64, ret, arg1, arg2, ofs, len);
1568         return;
1569     }
1570 
1571     if (TCG_TARGET_REG_BITS == 32) {
1572         if (ofs >= 32) {
1573             tcg_gen_deposit_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1),
1574                                 TCGV_LOW(arg2), ofs - 32, len);
1575             tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
1576             return;
1577         }
1578         if (ofs + len <= 32) {
1579             tcg_gen_deposit_i32(TCGV_LOW(ret), TCGV_LOW(arg1),
1580                                 TCGV_LOW(arg2), ofs, len);
1581             tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
1582             return;
1583         }
1584     }
1585 
1586     mask = (1ull << len) - 1;
1587     t1 = tcg_temp_new_i64();
1588 
1589     if (ofs + len < 64) {
1590         tcg_gen_andi_i64(t1, arg2, mask);
1591         tcg_gen_shli_i64(t1, t1, ofs);
1592     } else {
1593         tcg_gen_shli_i64(t1, arg2, ofs);
1594     }
1595     tcg_gen_andi_i64(ret, arg1, ~(mask << ofs));
1596     tcg_gen_or_i64(ret, ret, t1);
1597 
1598     tcg_temp_free_i64(t1);
1599 }
1600 
1601 void tcg_gen_movcond_i64(TCGCond cond, TCGv_i64 ret, TCGv_i64 c1,
1602                          TCGv_i64 c2, TCGv_i64 v1, TCGv_i64 v2)
1603 {
1604     if (cond == TCG_COND_ALWAYS) {
1605         tcg_gen_mov_i64(ret, v1);
1606     } else if (cond == TCG_COND_NEVER) {
1607         tcg_gen_mov_i64(ret, v2);
1608     } else if (TCG_TARGET_REG_BITS == 32) {
1609         TCGv_i32 t0 = tcg_temp_new_i32();
1610         TCGv_i32 t1 = tcg_temp_new_i32();
1611         tcg_gen_op6i_i32(INDEX_op_setcond2_i32, t0,
1612                          TCGV_LOW(c1), TCGV_HIGH(c1),
1613                          TCGV_LOW(c2), TCGV_HIGH(c2), cond);
1614 
1615         if (TCG_TARGET_HAS_movcond_i32) {
1616             tcg_gen_movi_i32(t1, 0);
1617             tcg_gen_movcond_i32(TCG_COND_NE, TCGV_LOW(ret), t0, t1,
1618                                 TCGV_LOW(v1), TCGV_LOW(v2));
1619             tcg_gen_movcond_i32(TCG_COND_NE, TCGV_HIGH(ret), t0, t1,
1620                                 TCGV_HIGH(v1), TCGV_HIGH(v2));
1621         } else {
1622             tcg_gen_neg_i32(t0, t0);
1623 
1624             tcg_gen_and_i32(t1, TCGV_LOW(v1), t0);
1625             tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(v2), t0);
1626             tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(ret), t1);
1627 
1628             tcg_gen_and_i32(t1, TCGV_HIGH(v1), t0);
1629             tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(v2), t0);
1630             tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), t1);
1631         }
1632         tcg_temp_free_i32(t0);
1633         tcg_temp_free_i32(t1);
1634     } else if (TCG_TARGET_HAS_movcond_i64) {
1635         tcg_gen_op6i_i64(INDEX_op_movcond_i64, ret, c1, c2, v1, v2, cond);
1636     } else {
1637         TCGv_i64 t0 = tcg_temp_new_i64();
1638         TCGv_i64 t1 = tcg_temp_new_i64();
1639         tcg_gen_setcond_i64(cond, t0, c1, c2);
1640         tcg_gen_neg_i64(t0, t0);
1641         tcg_gen_and_i64(t1, v1, t0);
1642         tcg_gen_andc_i64(ret, v2, t0);
1643         tcg_gen_or_i64(ret, ret, t1);
1644         tcg_temp_free_i64(t0);
1645         tcg_temp_free_i64(t1);
1646     }
1647 }
1648 
1649 void tcg_gen_add2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
1650                       TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
1651 {
1652     if (TCG_TARGET_HAS_add2_i64) {
1653         tcg_gen_op6_i64(INDEX_op_add2_i64, rl, rh, al, ah, bl, bh);
1654     } else {
1655         TCGv_i64 t0 = tcg_temp_new_i64();
1656         TCGv_i64 t1 = tcg_temp_new_i64();
1657         tcg_gen_add_i64(t0, al, bl);
1658         tcg_gen_setcond_i64(TCG_COND_LTU, t1, t0, al);
1659         tcg_gen_add_i64(rh, ah, bh);
1660         tcg_gen_add_i64(rh, rh, t1);
1661         tcg_gen_mov_i64(rl, t0);
1662         tcg_temp_free_i64(t0);
1663         tcg_temp_free_i64(t1);
1664     }
1665 }
1666 
1667 void tcg_gen_sub2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
1668                       TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
1669 {
1670     if (TCG_TARGET_HAS_sub2_i64) {
1671         tcg_gen_op6_i64(INDEX_op_sub2_i64, rl, rh, al, ah, bl, bh);
1672     } else {
1673         TCGv_i64 t0 = tcg_temp_new_i64();
1674         TCGv_i64 t1 = tcg_temp_new_i64();
1675         tcg_gen_sub_i64(t0, al, bl);
1676         tcg_gen_setcond_i64(TCG_COND_LTU, t1, al, bl);
1677         tcg_gen_sub_i64(rh, ah, bh);
1678         tcg_gen_sub_i64(rh, rh, t1);
1679         tcg_gen_mov_i64(rl, t0);
1680         tcg_temp_free_i64(t0);
1681         tcg_temp_free_i64(t1);
1682     }
1683 }
1684 
1685 void tcg_gen_mulu2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
1686 {
1687     if (TCG_TARGET_HAS_mulu2_i64) {
1688         tcg_gen_op4_i64(INDEX_op_mulu2_i64, rl, rh, arg1, arg2);
1689     } else if (TCG_TARGET_HAS_muluh_i64) {
1690         TCGv_i64 t = tcg_temp_new_i64();
1691         tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
1692         tcg_gen_op3_i64(INDEX_op_muluh_i64, rh, arg1, arg2);
1693         tcg_gen_mov_i64(rl, t);
1694         tcg_temp_free_i64(t);
1695     } else {
1696         TCGv_i64 t0 = tcg_temp_new_i64();
1697         tcg_gen_mul_i64(t0, arg1, arg2);
1698         gen_helper_muluh_i64(rh, arg1, arg2);
1699         tcg_gen_mov_i64(rl, t0);
1700         tcg_temp_free_i64(t0);
1701     }
1702 }
1703 
1704 void tcg_gen_muls2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
1705 {
1706     if (TCG_TARGET_HAS_muls2_i64) {
1707         tcg_gen_op4_i64(INDEX_op_muls2_i64, rl, rh, arg1, arg2);
1708     } else if (TCG_TARGET_HAS_mulsh_i64) {
1709         TCGv_i64 t = tcg_temp_new_i64();
1710         tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
1711         tcg_gen_op3_i64(INDEX_op_mulsh_i64, rh, arg1, arg2);
1712         tcg_gen_mov_i64(rl, t);
1713         tcg_temp_free_i64(t);
1714     } else if (TCG_TARGET_HAS_mulu2_i64 || TCG_TARGET_HAS_muluh_i64) {
1715         TCGv_i64 t0 = tcg_temp_new_i64();
1716         TCGv_i64 t1 = tcg_temp_new_i64();
1717         TCGv_i64 t2 = tcg_temp_new_i64();
1718         TCGv_i64 t3 = tcg_temp_new_i64();
1719         tcg_gen_mulu2_i64(t0, t1, arg1, arg2);
1720         /* Adjust for negative inputs.  */
1721         tcg_gen_sari_i64(t2, arg1, 63);
1722         tcg_gen_sari_i64(t3, arg2, 63);
1723         tcg_gen_and_i64(t2, t2, arg2);
1724         tcg_gen_and_i64(t3, t3, arg1);
1725         tcg_gen_sub_i64(rh, t1, t2);
1726         tcg_gen_sub_i64(rh, rh, t3);
1727         tcg_gen_mov_i64(rl, t0);
1728         tcg_temp_free_i64(t0);
1729         tcg_temp_free_i64(t1);
1730         tcg_temp_free_i64(t2);
1731         tcg_temp_free_i64(t3);
1732     } else {
1733         TCGv_i64 t0 = tcg_temp_new_i64();
1734         tcg_gen_mul_i64(t0, arg1, arg2);
1735         gen_helper_mulsh_i64(rh, arg1, arg2);
1736         tcg_gen_mov_i64(rl, t0);
1737         tcg_temp_free_i64(t0);
1738     }
1739 }
1740 
1741 /* Size changing operations.  */
1742 
1743 void tcg_gen_extrl_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
1744 {
1745     if (TCG_TARGET_REG_BITS == 32) {
1746         tcg_gen_mov_i32(ret, TCGV_LOW(arg));
1747     } else if (TCG_TARGET_HAS_extrl_i64_i32) {
1748         tcg_gen_op2(&tcg_ctx, INDEX_op_extrl_i64_i32,
1749                     GET_TCGV_I32(ret), GET_TCGV_I64(arg));
1750     } else {
1751         tcg_gen_mov_i32(ret, MAKE_TCGV_I32(GET_TCGV_I64(arg)));
1752     }
1753 }
1754 
1755 void tcg_gen_extrh_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
1756 {
1757     if (TCG_TARGET_REG_BITS == 32) {
1758         tcg_gen_mov_i32(ret, TCGV_HIGH(arg));
1759     } else if (TCG_TARGET_HAS_extrh_i64_i32) {
1760         tcg_gen_op2(&tcg_ctx, INDEX_op_extrh_i64_i32,
1761                     GET_TCGV_I32(ret), GET_TCGV_I64(arg));
1762     } else {
1763         TCGv_i64 t = tcg_temp_new_i64();
1764         tcg_gen_shri_i64(t, arg, 32);
1765         tcg_gen_mov_i32(ret, MAKE_TCGV_I32(GET_TCGV_I64(t)));
1766         tcg_temp_free_i64(t);
1767     }
1768 }
1769 
1770 void tcg_gen_extu_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
1771 {
1772     if (TCG_TARGET_REG_BITS == 32) {
1773         tcg_gen_mov_i32(TCGV_LOW(ret), arg);
1774         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1775     } else {
1776         tcg_gen_op2(&tcg_ctx, INDEX_op_extu_i32_i64,
1777                     GET_TCGV_I64(ret), GET_TCGV_I32(arg));
1778     }
1779 }
1780 
1781 void tcg_gen_ext_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
1782 {
1783     if (TCG_TARGET_REG_BITS == 32) {
1784         tcg_gen_mov_i32(TCGV_LOW(ret), arg);
1785         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1786     } else {
1787         tcg_gen_op2(&tcg_ctx, INDEX_op_ext_i32_i64,
1788                     GET_TCGV_I64(ret), GET_TCGV_I32(arg));
1789     }
1790 }
1791 
1792 void tcg_gen_concat_i32_i64(TCGv_i64 dest, TCGv_i32 low, TCGv_i32 high)
1793 {
1794     TCGv_i64 tmp;
1795 
1796     if (TCG_TARGET_REG_BITS == 32) {
1797         tcg_gen_mov_i32(TCGV_LOW(dest), low);
1798         tcg_gen_mov_i32(TCGV_HIGH(dest), high);
1799         return;
1800     }
1801 
1802     tmp = tcg_temp_new_i64();
1803     /* These extensions are only needed for type correctness.
1804        We may be able to do better given target specific information.  */
1805     tcg_gen_extu_i32_i64(tmp, high);
1806     tcg_gen_extu_i32_i64(dest, low);
1807     /* If deposit is available, use it.  Otherwise use the extra
1808        knowledge that we have of the zero-extensions above.  */
1809     if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(32, 32)) {
1810         tcg_gen_deposit_i64(dest, dest, tmp, 32, 32);
1811     } else {
1812         tcg_gen_shli_i64(tmp, tmp, 32);
1813         tcg_gen_or_i64(dest, dest, tmp);
1814     }
1815     tcg_temp_free_i64(tmp);
1816 }
1817 
1818 void tcg_gen_extr_i64_i32(TCGv_i32 lo, TCGv_i32 hi, TCGv_i64 arg)
1819 {
1820     if (TCG_TARGET_REG_BITS == 32) {
1821         tcg_gen_mov_i32(lo, TCGV_LOW(arg));
1822         tcg_gen_mov_i32(hi, TCGV_HIGH(arg));
1823     } else {
1824         tcg_gen_extrl_i64_i32(lo, arg);
1825         tcg_gen_extrh_i64_i32(hi, arg);
1826     }
1827 }
1828 
1829 void tcg_gen_extr32_i64(TCGv_i64 lo, TCGv_i64 hi, TCGv_i64 arg)
1830 {
1831     tcg_gen_ext32u_i64(lo, arg);
1832     tcg_gen_shri_i64(hi, arg, 32);
1833 }
1834 
1835 /* QEMU specific operations.  */
1836 
1837 void tcg_gen_goto_tb(unsigned idx)
1838 {
1839     /* We only support two chained exits.  */
1840     tcg_debug_assert(idx <= 1);
1841 #ifdef CONFIG_DEBUG_TCG
1842     /* Verify that we havn't seen this numbered exit before.  */
1843     tcg_debug_assert((tcg_ctx.goto_tb_issue_mask & (1 << idx)) == 0);
1844     tcg_ctx.goto_tb_issue_mask |= 1 << idx;
1845 #endif
1846     tcg_gen_op1i(INDEX_op_goto_tb, idx);
1847 }
1848 
1849 static inline TCGMemOp tcg_canonicalize_memop(TCGMemOp op, bool is64, bool st)
1850 {
1851     switch (op & MO_SIZE) {
1852     case MO_8:
1853         op &= ~MO_BSWAP;
1854         break;
1855     case MO_16:
1856         break;
1857     case MO_32:
1858         if (!is64) {
1859             op &= ~MO_SIGN;
1860         }
1861         break;
1862     case MO_64:
1863         if (!is64) {
1864             tcg_abort();
1865         }
1866         break;
1867     }
1868     if (st) {
1869         op &= ~MO_SIGN;
1870     }
1871     return op;
1872 }
1873 
1874 static void gen_ldst_i32(TCGOpcode opc, TCGv_i32 val, TCGv addr,
1875                          TCGMemOp memop, TCGArg idx)
1876 {
1877     TCGMemOpIdx oi = make_memop_idx(memop, idx);
1878 #if TARGET_LONG_BITS == 32
1879     tcg_gen_op3i_i32(opc, val, addr, oi);
1880 #else
1881     if (TCG_TARGET_REG_BITS == 32) {
1882         tcg_gen_op4i_i32(opc, val, TCGV_LOW(addr), TCGV_HIGH(addr), oi);
1883     } else {
1884         tcg_gen_op3(&tcg_ctx, opc, GET_TCGV_I32(val), GET_TCGV_I64(addr), oi);
1885     }
1886 #endif
1887 }
1888 
1889 static void gen_ldst_i64(TCGOpcode opc, TCGv_i64 val, TCGv addr,
1890                          TCGMemOp memop, TCGArg idx)
1891 {
1892     TCGMemOpIdx oi = make_memop_idx(memop, idx);
1893 #if TARGET_LONG_BITS == 32
1894     if (TCG_TARGET_REG_BITS == 32) {
1895         tcg_gen_op4i_i32(opc, TCGV_LOW(val), TCGV_HIGH(val), addr, oi);
1896     } else {
1897         tcg_gen_op3(&tcg_ctx, opc, GET_TCGV_I64(val), GET_TCGV_I32(addr), oi);
1898     }
1899 #else
1900     if (TCG_TARGET_REG_BITS == 32) {
1901         tcg_gen_op5i_i32(opc, TCGV_LOW(val), TCGV_HIGH(val),
1902                          TCGV_LOW(addr), TCGV_HIGH(addr), oi);
1903     } else {
1904         tcg_gen_op3i_i64(opc, val, addr, oi);
1905     }
1906 #endif
1907 }
1908 
1909 void tcg_gen_qemu_ld_i32(TCGv_i32 val, TCGv addr, TCGArg idx, TCGMemOp memop)
1910 {
1911     memop = tcg_canonicalize_memop(memop, 0, 0);
1912     gen_ldst_i32(INDEX_op_qemu_ld_i32, val, addr, memop, idx);
1913 }
1914 
1915 void tcg_gen_qemu_st_i32(TCGv_i32 val, TCGv addr, TCGArg idx, TCGMemOp memop)
1916 {
1917     memop = tcg_canonicalize_memop(memop, 0, 1);
1918     gen_ldst_i32(INDEX_op_qemu_st_i32, val, addr, memop, idx);
1919 }
1920 
1921 void tcg_gen_qemu_ld_i64(TCGv_i64 val, TCGv addr, TCGArg idx, TCGMemOp memop)
1922 {
1923     if (TCG_TARGET_REG_BITS == 32 && (memop & MO_SIZE) < MO_64) {
1924         tcg_gen_qemu_ld_i32(TCGV_LOW(val), addr, idx, memop);
1925         if (memop & MO_SIGN) {
1926             tcg_gen_sari_i32(TCGV_HIGH(val), TCGV_LOW(val), 31);
1927         } else {
1928             tcg_gen_movi_i32(TCGV_HIGH(val), 0);
1929         }
1930         return;
1931     }
1932 
1933     memop = tcg_canonicalize_memop(memop, 1, 0);
1934     gen_ldst_i64(INDEX_op_qemu_ld_i64, val, addr, memop, idx);
1935 }
1936 
1937 void tcg_gen_qemu_st_i64(TCGv_i64 val, TCGv addr, TCGArg idx, TCGMemOp memop)
1938 {
1939     if (TCG_TARGET_REG_BITS == 32 && (memop & MO_SIZE) < MO_64) {
1940         tcg_gen_qemu_st_i32(TCGV_LOW(val), addr, idx, memop);
1941         return;
1942     }
1943 
1944     memop = tcg_canonicalize_memop(memop, 1, 1);
1945     gen_ldst_i64(INDEX_op_qemu_st_i64, val, addr, memop, idx);
1946 }
1947