xref: /openbmc/qemu/tcg/tcg-op.c (revision 9884abee)
1 /*
2  * Tiny Code Generator for QEMU
3  *
4  * Copyright (c) 2008 Fabrice Bellard
5  *
6  * Permission is hereby granted, free of charge, to any person obtaining a copy
7  * of this software and associated documentation files (the "Software"), to deal
8  * in the Software without restriction, including without limitation the rights
9  * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10  * copies of the Software, and to permit persons to whom the Software is
11  * furnished to do so, subject to the following conditions:
12  *
13  * The above copyright notice and this permission notice shall be included in
14  * all copies or substantial portions of the Software.
15  *
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21  * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22  * THE SOFTWARE.
23  */
24 
25 #include "qemu/osdep.h"
26 #include "tcg.h"
27 #include "tcg-op.h"
28 
29 /* Reduce the number of ifdefs below.  This assumes that all uses of
30    TCGV_HIGH and TCGV_LOW are properly protected by a conditional that
31    the compiler can eliminate.  */
32 #if TCG_TARGET_REG_BITS == 64
33 extern TCGv_i32 TCGV_LOW_link_error(TCGv_i64);
34 extern TCGv_i32 TCGV_HIGH_link_error(TCGv_i64);
35 #define TCGV_LOW  TCGV_LOW_link_error
36 #define TCGV_HIGH TCGV_HIGH_link_error
37 #endif
38 
39 /* Note that this is optimized for sequential allocation during translate.
40    Up to and including filling in the forward link immediately.  We'll do
41    proper termination of the end of the list after we finish translation.  */
42 
43 static void tcg_emit_op(TCGContext *ctx, TCGOpcode opc, int args)
44 {
45     int oi = ctx->gen_next_op_idx;
46     int ni = oi + 1;
47     int pi = oi - 1;
48 
49     tcg_debug_assert(oi < OPC_BUF_SIZE);
50     ctx->gen_last_op_idx = oi;
51     ctx->gen_next_op_idx = ni;
52 
53     ctx->gen_op_buf[oi] = (TCGOp){
54         .opc = opc,
55         .args = args,
56         .prev = pi,
57         .next = ni
58     };
59 }
60 
61 void tcg_gen_op1(TCGContext *ctx, TCGOpcode opc, TCGArg a1)
62 {
63     int pi = ctx->gen_next_parm_idx;
64 
65     tcg_debug_assert(pi + 1 <= OPPARAM_BUF_SIZE);
66     ctx->gen_next_parm_idx = pi + 1;
67     ctx->gen_opparam_buf[pi] = a1;
68 
69     tcg_emit_op(ctx, opc, pi);
70 }
71 
72 void tcg_gen_op2(TCGContext *ctx, TCGOpcode opc, TCGArg a1, TCGArg a2)
73 {
74     int pi = ctx->gen_next_parm_idx;
75 
76     tcg_debug_assert(pi + 2 <= OPPARAM_BUF_SIZE);
77     ctx->gen_next_parm_idx = pi + 2;
78     ctx->gen_opparam_buf[pi + 0] = a1;
79     ctx->gen_opparam_buf[pi + 1] = a2;
80 
81     tcg_emit_op(ctx, opc, pi);
82 }
83 
84 void tcg_gen_op3(TCGContext *ctx, TCGOpcode opc, TCGArg a1,
85                  TCGArg a2, TCGArg a3)
86 {
87     int pi = ctx->gen_next_parm_idx;
88 
89     tcg_debug_assert(pi + 3 <= OPPARAM_BUF_SIZE);
90     ctx->gen_next_parm_idx = pi + 3;
91     ctx->gen_opparam_buf[pi + 0] = a1;
92     ctx->gen_opparam_buf[pi + 1] = a2;
93     ctx->gen_opparam_buf[pi + 2] = a3;
94 
95     tcg_emit_op(ctx, opc, pi);
96 }
97 
98 void tcg_gen_op4(TCGContext *ctx, TCGOpcode opc, TCGArg a1,
99                  TCGArg a2, TCGArg a3, TCGArg a4)
100 {
101     int pi = ctx->gen_next_parm_idx;
102 
103     tcg_debug_assert(pi + 4 <= OPPARAM_BUF_SIZE);
104     ctx->gen_next_parm_idx = pi + 4;
105     ctx->gen_opparam_buf[pi + 0] = a1;
106     ctx->gen_opparam_buf[pi + 1] = a2;
107     ctx->gen_opparam_buf[pi + 2] = a3;
108     ctx->gen_opparam_buf[pi + 3] = a4;
109 
110     tcg_emit_op(ctx, opc, pi);
111 }
112 
113 void tcg_gen_op5(TCGContext *ctx, TCGOpcode opc, TCGArg a1,
114                  TCGArg a2, TCGArg a3, TCGArg a4, TCGArg a5)
115 {
116     int pi = ctx->gen_next_parm_idx;
117 
118     tcg_debug_assert(pi + 5 <= OPPARAM_BUF_SIZE);
119     ctx->gen_next_parm_idx = pi + 5;
120     ctx->gen_opparam_buf[pi + 0] = a1;
121     ctx->gen_opparam_buf[pi + 1] = a2;
122     ctx->gen_opparam_buf[pi + 2] = a3;
123     ctx->gen_opparam_buf[pi + 3] = a4;
124     ctx->gen_opparam_buf[pi + 4] = a5;
125 
126     tcg_emit_op(ctx, opc, pi);
127 }
128 
129 void tcg_gen_op6(TCGContext *ctx, TCGOpcode opc, TCGArg a1, TCGArg a2,
130                  TCGArg a3, TCGArg a4, TCGArg a5, TCGArg a6)
131 {
132     int pi = ctx->gen_next_parm_idx;
133 
134     tcg_debug_assert(pi + 6 <= OPPARAM_BUF_SIZE);
135     ctx->gen_next_parm_idx = pi + 6;
136     ctx->gen_opparam_buf[pi + 0] = a1;
137     ctx->gen_opparam_buf[pi + 1] = a2;
138     ctx->gen_opparam_buf[pi + 2] = a3;
139     ctx->gen_opparam_buf[pi + 3] = a4;
140     ctx->gen_opparam_buf[pi + 4] = a5;
141     ctx->gen_opparam_buf[pi + 5] = a6;
142 
143     tcg_emit_op(ctx, opc, pi);
144 }
145 
146 /* 32 bit ops */
147 
148 void tcg_gen_addi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
149 {
150     /* some cases can be optimized here */
151     if (arg2 == 0) {
152         tcg_gen_mov_i32(ret, arg1);
153     } else {
154         TCGv_i32 t0 = tcg_const_i32(arg2);
155         tcg_gen_add_i32(ret, arg1, t0);
156         tcg_temp_free_i32(t0);
157     }
158 }
159 
160 void tcg_gen_subfi_i32(TCGv_i32 ret, int32_t arg1, TCGv_i32 arg2)
161 {
162     if (arg1 == 0 && TCG_TARGET_HAS_neg_i32) {
163         /* Don't recurse with tcg_gen_neg_i32.  */
164         tcg_gen_op2_i32(INDEX_op_neg_i32, ret, arg2);
165     } else {
166         TCGv_i32 t0 = tcg_const_i32(arg1);
167         tcg_gen_sub_i32(ret, t0, arg2);
168         tcg_temp_free_i32(t0);
169     }
170 }
171 
172 void tcg_gen_subi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
173 {
174     /* some cases can be optimized here */
175     if (arg2 == 0) {
176         tcg_gen_mov_i32(ret, arg1);
177     } else {
178         TCGv_i32 t0 = tcg_const_i32(arg2);
179         tcg_gen_sub_i32(ret, arg1, t0);
180         tcg_temp_free_i32(t0);
181     }
182 }
183 
184 void tcg_gen_andi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
185 {
186     TCGv_i32 t0;
187     /* Some cases can be optimized here.  */
188     switch (arg2) {
189     case 0:
190         tcg_gen_movi_i32(ret, 0);
191         return;
192     case 0xffffffffu:
193         tcg_gen_mov_i32(ret, arg1);
194         return;
195     case 0xffu:
196         /* Don't recurse with tcg_gen_ext8u_i32.  */
197         if (TCG_TARGET_HAS_ext8u_i32) {
198             tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg1);
199             return;
200         }
201         break;
202     case 0xffffu:
203         if (TCG_TARGET_HAS_ext16u_i32) {
204             tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg1);
205             return;
206         }
207         break;
208     }
209     t0 = tcg_const_i32(arg2);
210     tcg_gen_and_i32(ret, arg1, t0);
211     tcg_temp_free_i32(t0);
212 }
213 
214 void tcg_gen_ori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
215 {
216     /* Some cases can be optimized here.  */
217     if (arg2 == -1) {
218         tcg_gen_movi_i32(ret, -1);
219     } else if (arg2 == 0) {
220         tcg_gen_mov_i32(ret, arg1);
221     } else {
222         TCGv_i32 t0 = tcg_const_i32(arg2);
223         tcg_gen_or_i32(ret, arg1, t0);
224         tcg_temp_free_i32(t0);
225     }
226 }
227 
228 void tcg_gen_xori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
229 {
230     /* Some cases can be optimized here.  */
231     if (arg2 == 0) {
232         tcg_gen_mov_i32(ret, arg1);
233     } else if (arg2 == -1 && TCG_TARGET_HAS_not_i32) {
234         /* Don't recurse with tcg_gen_not_i32.  */
235         tcg_gen_op2_i32(INDEX_op_not_i32, ret, arg1);
236     } else {
237         TCGv_i32 t0 = tcg_const_i32(arg2);
238         tcg_gen_xor_i32(ret, arg1, t0);
239         tcg_temp_free_i32(t0);
240     }
241 }
242 
243 void tcg_gen_shli_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
244 {
245     tcg_debug_assert(arg2 < 32);
246     if (arg2 == 0) {
247         tcg_gen_mov_i32(ret, arg1);
248     } else {
249         TCGv_i32 t0 = tcg_const_i32(arg2);
250         tcg_gen_shl_i32(ret, arg1, t0);
251         tcg_temp_free_i32(t0);
252     }
253 }
254 
255 void tcg_gen_shri_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
256 {
257     tcg_debug_assert(arg2 < 32);
258     if (arg2 == 0) {
259         tcg_gen_mov_i32(ret, arg1);
260     } else {
261         TCGv_i32 t0 = tcg_const_i32(arg2);
262         tcg_gen_shr_i32(ret, arg1, t0);
263         tcg_temp_free_i32(t0);
264     }
265 }
266 
267 void tcg_gen_sari_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
268 {
269     tcg_debug_assert(arg2 < 32);
270     if (arg2 == 0) {
271         tcg_gen_mov_i32(ret, arg1);
272     } else {
273         TCGv_i32 t0 = tcg_const_i32(arg2);
274         tcg_gen_sar_i32(ret, arg1, t0);
275         tcg_temp_free_i32(t0);
276     }
277 }
278 
279 void tcg_gen_brcond_i32(TCGCond cond, TCGv_i32 arg1, TCGv_i32 arg2, TCGLabel *l)
280 {
281     if (cond == TCG_COND_ALWAYS) {
282         tcg_gen_br(l);
283     } else if (cond != TCG_COND_NEVER) {
284         tcg_gen_op4ii_i32(INDEX_op_brcond_i32, arg1, arg2, cond, label_arg(l));
285     }
286 }
287 
288 void tcg_gen_brcondi_i32(TCGCond cond, TCGv_i32 arg1, int32_t arg2, TCGLabel *l)
289 {
290     if (cond == TCG_COND_ALWAYS) {
291         tcg_gen_br(l);
292     } else if (cond != TCG_COND_NEVER) {
293         TCGv_i32 t0 = tcg_const_i32(arg2);
294         tcg_gen_brcond_i32(cond, arg1, t0, l);
295         tcg_temp_free_i32(t0);
296     }
297 }
298 
299 void tcg_gen_setcond_i32(TCGCond cond, TCGv_i32 ret,
300                          TCGv_i32 arg1, TCGv_i32 arg2)
301 {
302     if (cond == TCG_COND_ALWAYS) {
303         tcg_gen_movi_i32(ret, 1);
304     } else if (cond == TCG_COND_NEVER) {
305         tcg_gen_movi_i32(ret, 0);
306     } else {
307         tcg_gen_op4i_i32(INDEX_op_setcond_i32, ret, arg1, arg2, cond);
308     }
309 }
310 
311 void tcg_gen_setcondi_i32(TCGCond cond, TCGv_i32 ret,
312                           TCGv_i32 arg1, int32_t arg2)
313 {
314     TCGv_i32 t0 = tcg_const_i32(arg2);
315     tcg_gen_setcond_i32(cond, ret, arg1, t0);
316     tcg_temp_free_i32(t0);
317 }
318 
319 void tcg_gen_muli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
320 {
321     TCGv_i32 t0 = tcg_const_i32(arg2);
322     tcg_gen_mul_i32(ret, arg1, t0);
323     tcg_temp_free_i32(t0);
324 }
325 
326 void tcg_gen_div_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
327 {
328     if (TCG_TARGET_HAS_div_i32) {
329         tcg_gen_op3_i32(INDEX_op_div_i32, ret, arg1, arg2);
330     } else if (TCG_TARGET_HAS_div2_i32) {
331         TCGv_i32 t0 = tcg_temp_new_i32();
332         tcg_gen_sari_i32(t0, arg1, 31);
333         tcg_gen_op5_i32(INDEX_op_div2_i32, ret, t0, arg1, t0, arg2);
334         tcg_temp_free_i32(t0);
335     } else {
336         gen_helper_div_i32(ret, arg1, arg2);
337     }
338 }
339 
340 void tcg_gen_rem_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
341 {
342     if (TCG_TARGET_HAS_rem_i32) {
343         tcg_gen_op3_i32(INDEX_op_rem_i32, ret, arg1, arg2);
344     } else if (TCG_TARGET_HAS_div_i32) {
345         TCGv_i32 t0 = tcg_temp_new_i32();
346         tcg_gen_op3_i32(INDEX_op_div_i32, t0, arg1, arg2);
347         tcg_gen_mul_i32(t0, t0, arg2);
348         tcg_gen_sub_i32(ret, arg1, t0);
349         tcg_temp_free_i32(t0);
350     } else if (TCG_TARGET_HAS_div2_i32) {
351         TCGv_i32 t0 = tcg_temp_new_i32();
352         tcg_gen_sari_i32(t0, arg1, 31);
353         tcg_gen_op5_i32(INDEX_op_div2_i32, t0, ret, arg1, t0, arg2);
354         tcg_temp_free_i32(t0);
355     } else {
356         gen_helper_rem_i32(ret, arg1, arg2);
357     }
358 }
359 
360 void tcg_gen_divu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
361 {
362     if (TCG_TARGET_HAS_div_i32) {
363         tcg_gen_op3_i32(INDEX_op_divu_i32, ret, arg1, arg2);
364     } else if (TCG_TARGET_HAS_div2_i32) {
365         TCGv_i32 t0 = tcg_temp_new_i32();
366         tcg_gen_movi_i32(t0, 0);
367         tcg_gen_op5_i32(INDEX_op_divu2_i32, ret, t0, arg1, t0, arg2);
368         tcg_temp_free_i32(t0);
369     } else {
370         gen_helper_divu_i32(ret, arg1, arg2);
371     }
372 }
373 
374 void tcg_gen_remu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
375 {
376     if (TCG_TARGET_HAS_rem_i32) {
377         tcg_gen_op3_i32(INDEX_op_remu_i32, ret, arg1, arg2);
378     } else if (TCG_TARGET_HAS_div_i32) {
379         TCGv_i32 t0 = tcg_temp_new_i32();
380         tcg_gen_op3_i32(INDEX_op_divu_i32, t0, arg1, arg2);
381         tcg_gen_mul_i32(t0, t0, arg2);
382         tcg_gen_sub_i32(ret, arg1, t0);
383         tcg_temp_free_i32(t0);
384     } else if (TCG_TARGET_HAS_div2_i32) {
385         TCGv_i32 t0 = tcg_temp_new_i32();
386         tcg_gen_movi_i32(t0, 0);
387         tcg_gen_op5_i32(INDEX_op_divu2_i32, t0, ret, arg1, t0, arg2);
388         tcg_temp_free_i32(t0);
389     } else {
390         gen_helper_remu_i32(ret, arg1, arg2);
391     }
392 }
393 
394 void tcg_gen_andc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
395 {
396     if (TCG_TARGET_HAS_andc_i32) {
397         tcg_gen_op3_i32(INDEX_op_andc_i32, ret, arg1, arg2);
398     } else {
399         TCGv_i32 t0 = tcg_temp_new_i32();
400         tcg_gen_not_i32(t0, arg2);
401         tcg_gen_and_i32(ret, arg1, t0);
402         tcg_temp_free_i32(t0);
403     }
404 }
405 
406 void tcg_gen_eqv_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
407 {
408     if (TCG_TARGET_HAS_eqv_i32) {
409         tcg_gen_op3_i32(INDEX_op_eqv_i32, ret, arg1, arg2);
410     } else {
411         tcg_gen_xor_i32(ret, arg1, arg2);
412         tcg_gen_not_i32(ret, ret);
413     }
414 }
415 
416 void tcg_gen_nand_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
417 {
418     if (TCG_TARGET_HAS_nand_i32) {
419         tcg_gen_op3_i32(INDEX_op_nand_i32, ret, arg1, arg2);
420     } else {
421         tcg_gen_and_i32(ret, arg1, arg2);
422         tcg_gen_not_i32(ret, ret);
423     }
424 }
425 
426 void tcg_gen_nor_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
427 {
428     if (TCG_TARGET_HAS_nor_i32) {
429         tcg_gen_op3_i32(INDEX_op_nor_i32, ret, arg1, arg2);
430     } else {
431         tcg_gen_or_i32(ret, arg1, arg2);
432         tcg_gen_not_i32(ret, ret);
433     }
434 }
435 
436 void tcg_gen_orc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
437 {
438     if (TCG_TARGET_HAS_orc_i32) {
439         tcg_gen_op3_i32(INDEX_op_orc_i32, ret, arg1, arg2);
440     } else {
441         TCGv_i32 t0 = tcg_temp_new_i32();
442         tcg_gen_not_i32(t0, arg2);
443         tcg_gen_or_i32(ret, arg1, t0);
444         tcg_temp_free_i32(t0);
445     }
446 }
447 
448 void tcg_gen_rotl_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
449 {
450     if (TCG_TARGET_HAS_rot_i32) {
451         tcg_gen_op3_i32(INDEX_op_rotl_i32, ret, arg1, arg2);
452     } else {
453         TCGv_i32 t0, t1;
454 
455         t0 = tcg_temp_new_i32();
456         t1 = tcg_temp_new_i32();
457         tcg_gen_shl_i32(t0, arg1, arg2);
458         tcg_gen_subfi_i32(t1, 32, arg2);
459         tcg_gen_shr_i32(t1, arg1, t1);
460         tcg_gen_or_i32(ret, t0, t1);
461         tcg_temp_free_i32(t0);
462         tcg_temp_free_i32(t1);
463     }
464 }
465 
466 void tcg_gen_rotli_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
467 {
468     tcg_debug_assert(arg2 < 32);
469     /* some cases can be optimized here */
470     if (arg2 == 0) {
471         tcg_gen_mov_i32(ret, arg1);
472     } else if (TCG_TARGET_HAS_rot_i32) {
473         TCGv_i32 t0 = tcg_const_i32(arg2);
474         tcg_gen_rotl_i32(ret, arg1, t0);
475         tcg_temp_free_i32(t0);
476     } else {
477         TCGv_i32 t0, t1;
478         t0 = tcg_temp_new_i32();
479         t1 = tcg_temp_new_i32();
480         tcg_gen_shli_i32(t0, arg1, arg2);
481         tcg_gen_shri_i32(t1, arg1, 32 - arg2);
482         tcg_gen_or_i32(ret, t0, t1);
483         tcg_temp_free_i32(t0);
484         tcg_temp_free_i32(t1);
485     }
486 }
487 
488 void tcg_gen_rotr_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
489 {
490     if (TCG_TARGET_HAS_rot_i32) {
491         tcg_gen_op3_i32(INDEX_op_rotr_i32, ret, arg1, arg2);
492     } else {
493         TCGv_i32 t0, t1;
494 
495         t0 = tcg_temp_new_i32();
496         t1 = tcg_temp_new_i32();
497         tcg_gen_shr_i32(t0, arg1, arg2);
498         tcg_gen_subfi_i32(t1, 32, arg2);
499         tcg_gen_shl_i32(t1, arg1, t1);
500         tcg_gen_or_i32(ret, t0, t1);
501         tcg_temp_free_i32(t0);
502         tcg_temp_free_i32(t1);
503     }
504 }
505 
506 void tcg_gen_rotri_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
507 {
508     tcg_debug_assert(arg2 < 32);
509     /* some cases can be optimized here */
510     if (arg2 == 0) {
511         tcg_gen_mov_i32(ret, arg1);
512     } else {
513         tcg_gen_rotli_i32(ret, arg1, 32 - arg2);
514     }
515 }
516 
517 void tcg_gen_deposit_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2,
518                          unsigned int ofs, unsigned int len)
519 {
520     uint32_t mask;
521     TCGv_i32 t1;
522 
523     tcg_debug_assert(ofs < 32);
524     tcg_debug_assert(len <= 32);
525     tcg_debug_assert(ofs + len <= 32);
526 
527     if (ofs == 0 && len == 32) {
528         tcg_gen_mov_i32(ret, arg2);
529         return;
530     }
531     if (TCG_TARGET_HAS_deposit_i32 && TCG_TARGET_deposit_i32_valid(ofs, len)) {
532         tcg_gen_op5ii_i32(INDEX_op_deposit_i32, ret, arg1, arg2, ofs, len);
533         return;
534     }
535 
536     mask = (1u << len) - 1;
537     t1 = tcg_temp_new_i32();
538 
539     if (ofs + len < 32) {
540         tcg_gen_andi_i32(t1, arg2, mask);
541         tcg_gen_shli_i32(t1, t1, ofs);
542     } else {
543         tcg_gen_shli_i32(t1, arg2, ofs);
544     }
545     tcg_gen_andi_i32(ret, arg1, ~(mask << ofs));
546     tcg_gen_or_i32(ret, ret, t1);
547 
548     tcg_temp_free_i32(t1);
549 }
550 
551 void tcg_gen_movcond_i32(TCGCond cond, TCGv_i32 ret, TCGv_i32 c1,
552                          TCGv_i32 c2, TCGv_i32 v1, TCGv_i32 v2)
553 {
554     if (cond == TCG_COND_ALWAYS) {
555         tcg_gen_mov_i32(ret, v1);
556     } else if (cond == TCG_COND_NEVER) {
557         tcg_gen_mov_i32(ret, v2);
558     } else if (TCG_TARGET_HAS_movcond_i32) {
559         tcg_gen_op6i_i32(INDEX_op_movcond_i32, ret, c1, c2, v1, v2, cond);
560     } else {
561         TCGv_i32 t0 = tcg_temp_new_i32();
562         TCGv_i32 t1 = tcg_temp_new_i32();
563         tcg_gen_setcond_i32(cond, t0, c1, c2);
564         tcg_gen_neg_i32(t0, t0);
565         tcg_gen_and_i32(t1, v1, t0);
566         tcg_gen_andc_i32(ret, v2, t0);
567         tcg_gen_or_i32(ret, ret, t1);
568         tcg_temp_free_i32(t0);
569         tcg_temp_free_i32(t1);
570     }
571 }
572 
573 void tcg_gen_add2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
574                       TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
575 {
576     if (TCG_TARGET_HAS_add2_i32) {
577         tcg_gen_op6_i32(INDEX_op_add2_i32, rl, rh, al, ah, bl, bh);
578     } else {
579         TCGv_i64 t0 = tcg_temp_new_i64();
580         TCGv_i64 t1 = tcg_temp_new_i64();
581         tcg_gen_concat_i32_i64(t0, al, ah);
582         tcg_gen_concat_i32_i64(t1, bl, bh);
583         tcg_gen_add_i64(t0, t0, t1);
584         tcg_gen_extr_i64_i32(rl, rh, t0);
585         tcg_temp_free_i64(t0);
586         tcg_temp_free_i64(t1);
587     }
588 }
589 
590 void tcg_gen_sub2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
591                       TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
592 {
593     if (TCG_TARGET_HAS_sub2_i32) {
594         tcg_gen_op6_i32(INDEX_op_sub2_i32, rl, rh, al, ah, bl, bh);
595     } else {
596         TCGv_i64 t0 = tcg_temp_new_i64();
597         TCGv_i64 t1 = tcg_temp_new_i64();
598         tcg_gen_concat_i32_i64(t0, al, ah);
599         tcg_gen_concat_i32_i64(t1, bl, bh);
600         tcg_gen_sub_i64(t0, t0, t1);
601         tcg_gen_extr_i64_i32(rl, rh, t0);
602         tcg_temp_free_i64(t0);
603         tcg_temp_free_i64(t1);
604     }
605 }
606 
607 void tcg_gen_mulu2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
608 {
609     if (TCG_TARGET_HAS_mulu2_i32) {
610         tcg_gen_op4_i32(INDEX_op_mulu2_i32, rl, rh, arg1, arg2);
611     } else if (TCG_TARGET_HAS_muluh_i32) {
612         TCGv_i32 t = tcg_temp_new_i32();
613         tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
614         tcg_gen_op3_i32(INDEX_op_muluh_i32, rh, arg1, arg2);
615         tcg_gen_mov_i32(rl, t);
616         tcg_temp_free_i32(t);
617     } else {
618         TCGv_i64 t0 = tcg_temp_new_i64();
619         TCGv_i64 t1 = tcg_temp_new_i64();
620         tcg_gen_extu_i32_i64(t0, arg1);
621         tcg_gen_extu_i32_i64(t1, arg2);
622         tcg_gen_mul_i64(t0, t0, t1);
623         tcg_gen_extr_i64_i32(rl, rh, t0);
624         tcg_temp_free_i64(t0);
625         tcg_temp_free_i64(t1);
626     }
627 }
628 
629 void tcg_gen_muls2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
630 {
631     if (TCG_TARGET_HAS_muls2_i32) {
632         tcg_gen_op4_i32(INDEX_op_muls2_i32, rl, rh, arg1, arg2);
633     } else if (TCG_TARGET_HAS_mulsh_i32) {
634         TCGv_i32 t = tcg_temp_new_i32();
635         tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
636         tcg_gen_op3_i32(INDEX_op_mulsh_i32, rh, arg1, arg2);
637         tcg_gen_mov_i32(rl, t);
638         tcg_temp_free_i32(t);
639     } else if (TCG_TARGET_REG_BITS == 32) {
640         TCGv_i32 t0 = tcg_temp_new_i32();
641         TCGv_i32 t1 = tcg_temp_new_i32();
642         TCGv_i32 t2 = tcg_temp_new_i32();
643         TCGv_i32 t3 = tcg_temp_new_i32();
644         tcg_gen_mulu2_i32(t0, t1, arg1, arg2);
645         /* Adjust for negative inputs.  */
646         tcg_gen_sari_i32(t2, arg1, 31);
647         tcg_gen_sari_i32(t3, arg2, 31);
648         tcg_gen_and_i32(t2, t2, arg2);
649         tcg_gen_and_i32(t3, t3, arg1);
650         tcg_gen_sub_i32(rh, t1, t2);
651         tcg_gen_sub_i32(rh, rh, t3);
652         tcg_gen_mov_i32(rl, t0);
653         tcg_temp_free_i32(t0);
654         tcg_temp_free_i32(t1);
655         tcg_temp_free_i32(t2);
656         tcg_temp_free_i32(t3);
657     } else {
658         TCGv_i64 t0 = tcg_temp_new_i64();
659         TCGv_i64 t1 = tcg_temp_new_i64();
660         tcg_gen_ext_i32_i64(t0, arg1);
661         tcg_gen_ext_i32_i64(t1, arg2);
662         tcg_gen_mul_i64(t0, t0, t1);
663         tcg_gen_extr_i64_i32(rl, rh, t0);
664         tcg_temp_free_i64(t0);
665         tcg_temp_free_i64(t1);
666     }
667 }
668 
669 void tcg_gen_ext8s_i32(TCGv_i32 ret, TCGv_i32 arg)
670 {
671     if (TCG_TARGET_HAS_ext8s_i32) {
672         tcg_gen_op2_i32(INDEX_op_ext8s_i32, ret, arg);
673     } else {
674         tcg_gen_shli_i32(ret, arg, 24);
675         tcg_gen_sari_i32(ret, ret, 24);
676     }
677 }
678 
679 void tcg_gen_ext16s_i32(TCGv_i32 ret, TCGv_i32 arg)
680 {
681     if (TCG_TARGET_HAS_ext16s_i32) {
682         tcg_gen_op2_i32(INDEX_op_ext16s_i32, ret, arg);
683     } else {
684         tcg_gen_shli_i32(ret, arg, 16);
685         tcg_gen_sari_i32(ret, ret, 16);
686     }
687 }
688 
689 void tcg_gen_ext8u_i32(TCGv_i32 ret, TCGv_i32 arg)
690 {
691     if (TCG_TARGET_HAS_ext8u_i32) {
692         tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg);
693     } else {
694         tcg_gen_andi_i32(ret, arg, 0xffu);
695     }
696 }
697 
698 void tcg_gen_ext16u_i32(TCGv_i32 ret, TCGv_i32 arg)
699 {
700     if (TCG_TARGET_HAS_ext16u_i32) {
701         tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg);
702     } else {
703         tcg_gen_andi_i32(ret, arg, 0xffffu);
704     }
705 }
706 
707 /* Note: we assume the two high bytes are set to zero */
708 void tcg_gen_bswap16_i32(TCGv_i32 ret, TCGv_i32 arg)
709 {
710     if (TCG_TARGET_HAS_bswap16_i32) {
711         tcg_gen_op2_i32(INDEX_op_bswap16_i32, ret, arg);
712     } else {
713         TCGv_i32 t0 = tcg_temp_new_i32();
714 
715         tcg_gen_ext8u_i32(t0, arg);
716         tcg_gen_shli_i32(t0, t0, 8);
717         tcg_gen_shri_i32(ret, arg, 8);
718         tcg_gen_or_i32(ret, ret, t0);
719         tcg_temp_free_i32(t0);
720     }
721 }
722 
723 void tcg_gen_bswap32_i32(TCGv_i32 ret, TCGv_i32 arg)
724 {
725     if (TCG_TARGET_HAS_bswap32_i32) {
726         tcg_gen_op2_i32(INDEX_op_bswap32_i32, ret, arg);
727     } else {
728         TCGv_i32 t0, t1;
729         t0 = tcg_temp_new_i32();
730         t1 = tcg_temp_new_i32();
731 
732         tcg_gen_shli_i32(t0, arg, 24);
733 
734         tcg_gen_andi_i32(t1, arg, 0x0000ff00);
735         tcg_gen_shli_i32(t1, t1, 8);
736         tcg_gen_or_i32(t0, t0, t1);
737 
738         tcg_gen_shri_i32(t1, arg, 8);
739         tcg_gen_andi_i32(t1, t1, 0x0000ff00);
740         tcg_gen_or_i32(t0, t0, t1);
741 
742         tcg_gen_shri_i32(t1, arg, 24);
743         tcg_gen_or_i32(ret, t0, t1);
744         tcg_temp_free_i32(t0);
745         tcg_temp_free_i32(t1);
746     }
747 }
748 
749 /* 64-bit ops */
750 
751 #if TCG_TARGET_REG_BITS == 32
752 /* These are all inline for TCG_TARGET_REG_BITS == 64.  */
753 
754 void tcg_gen_discard_i64(TCGv_i64 arg)
755 {
756     tcg_gen_discard_i32(TCGV_LOW(arg));
757     tcg_gen_discard_i32(TCGV_HIGH(arg));
758 }
759 
760 void tcg_gen_mov_i64(TCGv_i64 ret, TCGv_i64 arg)
761 {
762     tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
763     tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
764 }
765 
766 void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg)
767 {
768     tcg_gen_movi_i32(TCGV_LOW(ret), arg);
769     tcg_gen_movi_i32(TCGV_HIGH(ret), arg >> 32);
770 }
771 
772 void tcg_gen_ld8u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
773 {
774     tcg_gen_ld8u_i32(TCGV_LOW(ret), arg2, offset);
775     tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
776 }
777 
778 void tcg_gen_ld8s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
779 {
780     tcg_gen_ld8s_i32(TCGV_LOW(ret), arg2, offset);
781     tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), 31);
782 }
783 
784 void tcg_gen_ld16u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
785 {
786     tcg_gen_ld16u_i32(TCGV_LOW(ret), arg2, offset);
787     tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
788 }
789 
790 void tcg_gen_ld16s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
791 {
792     tcg_gen_ld16s_i32(TCGV_LOW(ret), arg2, offset);
793     tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
794 }
795 
796 void tcg_gen_ld32u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
797 {
798     tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
799     tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
800 }
801 
802 void tcg_gen_ld32s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
803 {
804     tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
805     tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
806 }
807 
808 void tcg_gen_ld_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
809 {
810     /* Since arg2 and ret have different types,
811        they cannot be the same temporary */
812 #ifdef HOST_WORDS_BIGENDIAN
813     tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset);
814     tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset + 4);
815 #else
816     tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
817     tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset + 4);
818 #endif
819 }
820 
821 void tcg_gen_st_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
822 {
823 #ifdef HOST_WORDS_BIGENDIAN
824     tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset);
825     tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset + 4);
826 #else
827     tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset);
828     tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset + 4);
829 #endif
830 }
831 
832 void tcg_gen_and_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
833 {
834     tcg_gen_and_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
835     tcg_gen_and_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
836 }
837 
838 void tcg_gen_or_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
839 {
840     tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
841     tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
842 }
843 
844 void tcg_gen_xor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
845 {
846     tcg_gen_xor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
847     tcg_gen_xor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
848 }
849 
850 void tcg_gen_shl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
851 {
852     gen_helper_shl_i64(ret, arg1, arg2);
853 }
854 
855 void tcg_gen_shr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
856 {
857     gen_helper_shr_i64(ret, arg1, arg2);
858 }
859 
860 void tcg_gen_sar_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
861 {
862     gen_helper_sar_i64(ret, arg1, arg2);
863 }
864 
865 void tcg_gen_mul_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
866 {
867     TCGv_i64 t0;
868     TCGv_i32 t1;
869 
870     t0 = tcg_temp_new_i64();
871     t1 = tcg_temp_new_i32();
872 
873     tcg_gen_mulu2_i32(TCGV_LOW(t0), TCGV_HIGH(t0),
874                       TCGV_LOW(arg1), TCGV_LOW(arg2));
875 
876     tcg_gen_mul_i32(t1, TCGV_LOW(arg1), TCGV_HIGH(arg2));
877     tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
878     tcg_gen_mul_i32(t1, TCGV_HIGH(arg1), TCGV_LOW(arg2));
879     tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
880 
881     tcg_gen_mov_i64(ret, t0);
882     tcg_temp_free_i64(t0);
883     tcg_temp_free_i32(t1);
884 }
885 #endif /* TCG_TARGET_REG_SIZE == 32 */
886 
887 void tcg_gen_addi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
888 {
889     /* some cases can be optimized here */
890     if (arg2 == 0) {
891         tcg_gen_mov_i64(ret, arg1);
892     } else {
893         TCGv_i64 t0 = tcg_const_i64(arg2);
894         tcg_gen_add_i64(ret, arg1, t0);
895         tcg_temp_free_i64(t0);
896     }
897 }
898 
899 void tcg_gen_subfi_i64(TCGv_i64 ret, int64_t arg1, TCGv_i64 arg2)
900 {
901     if (arg1 == 0 && TCG_TARGET_HAS_neg_i64) {
902         /* Don't recurse with tcg_gen_neg_i64.  */
903         tcg_gen_op2_i64(INDEX_op_neg_i64, ret, arg2);
904     } else {
905         TCGv_i64 t0 = tcg_const_i64(arg1);
906         tcg_gen_sub_i64(ret, t0, arg2);
907         tcg_temp_free_i64(t0);
908     }
909 }
910 
911 void tcg_gen_subi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
912 {
913     /* some cases can be optimized here */
914     if (arg2 == 0) {
915         tcg_gen_mov_i64(ret, arg1);
916     } else {
917         TCGv_i64 t0 = tcg_const_i64(arg2);
918         tcg_gen_sub_i64(ret, arg1, t0);
919         tcg_temp_free_i64(t0);
920     }
921 }
922 
923 void tcg_gen_andi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
924 {
925     TCGv_i64 t0;
926 
927     if (TCG_TARGET_REG_BITS == 32) {
928         tcg_gen_andi_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
929         tcg_gen_andi_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
930         return;
931     }
932 
933     /* Some cases can be optimized here.  */
934     switch (arg2) {
935     case 0:
936         tcg_gen_movi_i64(ret, 0);
937         return;
938     case 0xffffffffffffffffull:
939         tcg_gen_mov_i64(ret, arg1);
940         return;
941     case 0xffull:
942         /* Don't recurse with tcg_gen_ext8u_i64.  */
943         if (TCG_TARGET_HAS_ext8u_i64) {
944             tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg1);
945             return;
946         }
947         break;
948     case 0xffffu:
949         if (TCG_TARGET_HAS_ext16u_i64) {
950             tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg1);
951             return;
952         }
953         break;
954     case 0xffffffffull:
955         if (TCG_TARGET_HAS_ext32u_i64) {
956             tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg1);
957             return;
958         }
959         break;
960     }
961     t0 = tcg_const_i64(arg2);
962     tcg_gen_and_i64(ret, arg1, t0);
963     tcg_temp_free_i64(t0);
964 }
965 
966 void tcg_gen_ori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
967 {
968     if (TCG_TARGET_REG_BITS == 32) {
969         tcg_gen_ori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
970         tcg_gen_ori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
971         return;
972     }
973     /* Some cases can be optimized here.  */
974     if (arg2 == -1) {
975         tcg_gen_movi_i64(ret, -1);
976     } else if (arg2 == 0) {
977         tcg_gen_mov_i64(ret, arg1);
978     } else {
979         TCGv_i64 t0 = tcg_const_i64(arg2);
980         tcg_gen_or_i64(ret, arg1, t0);
981         tcg_temp_free_i64(t0);
982     }
983 }
984 
985 void tcg_gen_xori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
986 {
987     if (TCG_TARGET_REG_BITS == 32) {
988         tcg_gen_xori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
989         tcg_gen_xori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
990         return;
991     }
992     /* Some cases can be optimized here.  */
993     if (arg2 == 0) {
994         tcg_gen_mov_i64(ret, arg1);
995     } else if (arg2 == -1 && TCG_TARGET_HAS_not_i64) {
996         /* Don't recurse with tcg_gen_not_i64.  */
997         tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg1);
998     } else {
999         TCGv_i64 t0 = tcg_const_i64(arg2);
1000         tcg_gen_xor_i64(ret, arg1, t0);
1001         tcg_temp_free_i64(t0);
1002     }
1003 }
1004 
1005 static inline void tcg_gen_shifti_i64(TCGv_i64 ret, TCGv_i64 arg1,
1006                                       unsigned c, bool right, bool arith)
1007 {
1008     tcg_debug_assert(c < 64);
1009     if (c == 0) {
1010         tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
1011         tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
1012     } else if (c >= 32) {
1013         c -= 32;
1014         if (right) {
1015             if (arith) {
1016                 tcg_gen_sari_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
1017                 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), 31);
1018             } else {
1019                 tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
1020                 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1021             }
1022         } else {
1023             tcg_gen_shli_i32(TCGV_HIGH(ret), TCGV_LOW(arg1), c);
1024             tcg_gen_movi_i32(TCGV_LOW(ret), 0);
1025         }
1026     } else {
1027         TCGv_i32 t0, t1;
1028 
1029         t0 = tcg_temp_new_i32();
1030         t1 = tcg_temp_new_i32();
1031         if (right) {
1032             tcg_gen_shli_i32(t0, TCGV_HIGH(arg1), 32 - c);
1033             if (arith) {
1034                 tcg_gen_sari_i32(t1, TCGV_HIGH(arg1), c);
1035             } else {
1036                 tcg_gen_shri_i32(t1, TCGV_HIGH(arg1), c);
1037             }
1038             tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_LOW(arg1), c);
1039             tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(ret), t0);
1040             tcg_gen_mov_i32(TCGV_HIGH(ret), t1);
1041         } else {
1042             tcg_gen_shri_i32(t0, TCGV_LOW(arg1), 32 - c);
1043             /* Note: ret can be the same as arg1, so we use t1 */
1044             tcg_gen_shli_i32(t1, TCGV_LOW(arg1), c);
1045             tcg_gen_shli_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), c);
1046             tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), t0);
1047             tcg_gen_mov_i32(TCGV_LOW(ret), t1);
1048         }
1049         tcg_temp_free_i32(t0);
1050         tcg_temp_free_i32(t1);
1051     }
1052 }
1053 
1054 void tcg_gen_shli_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1055 {
1056     tcg_debug_assert(arg2 < 64);
1057     if (TCG_TARGET_REG_BITS == 32) {
1058         tcg_gen_shifti_i64(ret, arg1, arg2, 0, 0);
1059     } else if (arg2 == 0) {
1060         tcg_gen_mov_i64(ret, arg1);
1061     } else {
1062         TCGv_i64 t0 = tcg_const_i64(arg2);
1063         tcg_gen_shl_i64(ret, arg1, t0);
1064         tcg_temp_free_i64(t0);
1065     }
1066 }
1067 
1068 void tcg_gen_shri_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1069 {
1070     tcg_debug_assert(arg2 < 64);
1071     if (TCG_TARGET_REG_BITS == 32) {
1072         tcg_gen_shifti_i64(ret, arg1, arg2, 1, 0);
1073     } else if (arg2 == 0) {
1074         tcg_gen_mov_i64(ret, arg1);
1075     } else {
1076         TCGv_i64 t0 = tcg_const_i64(arg2);
1077         tcg_gen_shr_i64(ret, arg1, t0);
1078         tcg_temp_free_i64(t0);
1079     }
1080 }
1081 
1082 void tcg_gen_sari_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1083 {
1084     tcg_debug_assert(arg2 < 64);
1085     if (TCG_TARGET_REG_BITS == 32) {
1086         tcg_gen_shifti_i64(ret, arg1, arg2, 1, 1);
1087     } else if (arg2 == 0) {
1088         tcg_gen_mov_i64(ret, arg1);
1089     } else {
1090         TCGv_i64 t0 = tcg_const_i64(arg2);
1091         tcg_gen_sar_i64(ret, arg1, t0);
1092         tcg_temp_free_i64(t0);
1093     }
1094 }
1095 
1096 void tcg_gen_brcond_i64(TCGCond cond, TCGv_i64 arg1, TCGv_i64 arg2, TCGLabel *l)
1097 {
1098     if (cond == TCG_COND_ALWAYS) {
1099         tcg_gen_br(l);
1100     } else if (cond != TCG_COND_NEVER) {
1101         if (TCG_TARGET_REG_BITS == 32) {
1102             tcg_gen_op6ii_i32(INDEX_op_brcond2_i32, TCGV_LOW(arg1),
1103                               TCGV_HIGH(arg1), TCGV_LOW(arg2),
1104                               TCGV_HIGH(arg2), cond, label_arg(l));
1105         } else {
1106             tcg_gen_op4ii_i64(INDEX_op_brcond_i64, arg1, arg2, cond,
1107                               label_arg(l));
1108         }
1109     }
1110 }
1111 
1112 void tcg_gen_brcondi_i64(TCGCond cond, TCGv_i64 arg1, int64_t arg2, TCGLabel *l)
1113 {
1114     if (cond == TCG_COND_ALWAYS) {
1115         tcg_gen_br(l);
1116     } else if (cond != TCG_COND_NEVER) {
1117         TCGv_i64 t0 = tcg_const_i64(arg2);
1118         tcg_gen_brcond_i64(cond, arg1, t0, l);
1119         tcg_temp_free_i64(t0);
1120     }
1121 }
1122 
1123 void tcg_gen_setcond_i64(TCGCond cond, TCGv_i64 ret,
1124                          TCGv_i64 arg1, TCGv_i64 arg2)
1125 {
1126     if (cond == TCG_COND_ALWAYS) {
1127         tcg_gen_movi_i64(ret, 1);
1128     } else if (cond == TCG_COND_NEVER) {
1129         tcg_gen_movi_i64(ret, 0);
1130     } else {
1131         if (TCG_TARGET_REG_BITS == 32) {
1132             tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
1133                              TCGV_LOW(arg1), TCGV_HIGH(arg1),
1134                              TCGV_LOW(arg2), TCGV_HIGH(arg2), cond);
1135             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1136         } else {
1137             tcg_gen_op4i_i64(INDEX_op_setcond_i64, ret, arg1, arg2, cond);
1138         }
1139     }
1140 }
1141 
1142 void tcg_gen_setcondi_i64(TCGCond cond, TCGv_i64 ret,
1143                           TCGv_i64 arg1, int64_t arg2)
1144 {
1145     TCGv_i64 t0 = tcg_const_i64(arg2);
1146     tcg_gen_setcond_i64(cond, ret, arg1, t0);
1147     tcg_temp_free_i64(t0);
1148 }
1149 
1150 void tcg_gen_muli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1151 {
1152     TCGv_i64 t0 = tcg_const_i64(arg2);
1153     tcg_gen_mul_i64(ret, arg1, t0);
1154     tcg_temp_free_i64(t0);
1155 }
1156 
1157 void tcg_gen_div_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1158 {
1159     if (TCG_TARGET_HAS_div_i64) {
1160         tcg_gen_op3_i64(INDEX_op_div_i64, ret, arg1, arg2);
1161     } else if (TCG_TARGET_HAS_div2_i64) {
1162         TCGv_i64 t0 = tcg_temp_new_i64();
1163         tcg_gen_sari_i64(t0, arg1, 63);
1164         tcg_gen_op5_i64(INDEX_op_div2_i64, ret, t0, arg1, t0, arg2);
1165         tcg_temp_free_i64(t0);
1166     } else {
1167         gen_helper_div_i64(ret, arg1, arg2);
1168     }
1169 }
1170 
1171 void tcg_gen_rem_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1172 {
1173     if (TCG_TARGET_HAS_rem_i64) {
1174         tcg_gen_op3_i64(INDEX_op_rem_i64, ret, arg1, arg2);
1175     } else if (TCG_TARGET_HAS_div_i64) {
1176         TCGv_i64 t0 = tcg_temp_new_i64();
1177         tcg_gen_op3_i64(INDEX_op_div_i64, t0, arg1, arg2);
1178         tcg_gen_mul_i64(t0, t0, arg2);
1179         tcg_gen_sub_i64(ret, arg1, t0);
1180         tcg_temp_free_i64(t0);
1181     } else if (TCG_TARGET_HAS_div2_i64) {
1182         TCGv_i64 t0 = tcg_temp_new_i64();
1183         tcg_gen_sari_i64(t0, arg1, 63);
1184         tcg_gen_op5_i64(INDEX_op_div2_i64, t0, ret, arg1, t0, arg2);
1185         tcg_temp_free_i64(t0);
1186     } else {
1187         gen_helper_rem_i64(ret, arg1, arg2);
1188     }
1189 }
1190 
1191 void tcg_gen_divu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1192 {
1193     if (TCG_TARGET_HAS_div_i64) {
1194         tcg_gen_op3_i64(INDEX_op_divu_i64, ret, arg1, arg2);
1195     } else if (TCG_TARGET_HAS_div2_i64) {
1196         TCGv_i64 t0 = tcg_temp_new_i64();
1197         tcg_gen_movi_i64(t0, 0);
1198         tcg_gen_op5_i64(INDEX_op_divu2_i64, ret, t0, arg1, t0, arg2);
1199         tcg_temp_free_i64(t0);
1200     } else {
1201         gen_helper_divu_i64(ret, arg1, arg2);
1202     }
1203 }
1204 
1205 void tcg_gen_remu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1206 {
1207     if (TCG_TARGET_HAS_rem_i64) {
1208         tcg_gen_op3_i64(INDEX_op_remu_i64, ret, arg1, arg2);
1209     } else if (TCG_TARGET_HAS_div_i64) {
1210         TCGv_i64 t0 = tcg_temp_new_i64();
1211         tcg_gen_op3_i64(INDEX_op_divu_i64, t0, arg1, arg2);
1212         tcg_gen_mul_i64(t0, t0, arg2);
1213         tcg_gen_sub_i64(ret, arg1, t0);
1214         tcg_temp_free_i64(t0);
1215     } else if (TCG_TARGET_HAS_div2_i64) {
1216         TCGv_i64 t0 = tcg_temp_new_i64();
1217         tcg_gen_movi_i64(t0, 0);
1218         tcg_gen_op5_i64(INDEX_op_divu2_i64, t0, ret, arg1, t0, arg2);
1219         tcg_temp_free_i64(t0);
1220     } else {
1221         gen_helper_remu_i64(ret, arg1, arg2);
1222     }
1223 }
1224 
1225 void tcg_gen_ext8s_i64(TCGv_i64 ret, TCGv_i64 arg)
1226 {
1227     if (TCG_TARGET_REG_BITS == 32) {
1228         tcg_gen_ext8s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1229         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1230     } else if (TCG_TARGET_HAS_ext8s_i64) {
1231         tcg_gen_op2_i64(INDEX_op_ext8s_i64, ret, arg);
1232     } else {
1233         tcg_gen_shli_i64(ret, arg, 56);
1234         tcg_gen_sari_i64(ret, ret, 56);
1235     }
1236 }
1237 
1238 void tcg_gen_ext16s_i64(TCGv_i64 ret, TCGv_i64 arg)
1239 {
1240     if (TCG_TARGET_REG_BITS == 32) {
1241         tcg_gen_ext16s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1242         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1243     } else if (TCG_TARGET_HAS_ext16s_i64) {
1244         tcg_gen_op2_i64(INDEX_op_ext16s_i64, ret, arg);
1245     } else {
1246         tcg_gen_shli_i64(ret, arg, 48);
1247         tcg_gen_sari_i64(ret, ret, 48);
1248     }
1249 }
1250 
1251 void tcg_gen_ext32s_i64(TCGv_i64 ret, TCGv_i64 arg)
1252 {
1253     if (TCG_TARGET_REG_BITS == 32) {
1254         tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1255         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1256     } else if (TCG_TARGET_HAS_ext32s_i64) {
1257         tcg_gen_op2_i64(INDEX_op_ext32s_i64, ret, arg);
1258     } else {
1259         tcg_gen_shli_i64(ret, arg, 32);
1260         tcg_gen_sari_i64(ret, ret, 32);
1261     }
1262 }
1263 
1264 void tcg_gen_ext8u_i64(TCGv_i64 ret, TCGv_i64 arg)
1265 {
1266     if (TCG_TARGET_REG_BITS == 32) {
1267         tcg_gen_ext8u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1268         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1269     } else if (TCG_TARGET_HAS_ext8u_i64) {
1270         tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg);
1271     } else {
1272         tcg_gen_andi_i64(ret, arg, 0xffu);
1273     }
1274 }
1275 
1276 void tcg_gen_ext16u_i64(TCGv_i64 ret, TCGv_i64 arg)
1277 {
1278     if (TCG_TARGET_REG_BITS == 32) {
1279         tcg_gen_ext16u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1280         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1281     } else if (TCG_TARGET_HAS_ext16u_i64) {
1282         tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg);
1283     } else {
1284         tcg_gen_andi_i64(ret, arg, 0xffffu);
1285     }
1286 }
1287 
1288 void tcg_gen_ext32u_i64(TCGv_i64 ret, TCGv_i64 arg)
1289 {
1290     if (TCG_TARGET_REG_BITS == 32) {
1291         tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1292         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1293     } else if (TCG_TARGET_HAS_ext32u_i64) {
1294         tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg);
1295     } else {
1296         tcg_gen_andi_i64(ret, arg, 0xffffffffu);
1297     }
1298 }
1299 
1300 /* Note: we assume the six high bytes are set to zero */
1301 void tcg_gen_bswap16_i64(TCGv_i64 ret, TCGv_i64 arg)
1302 {
1303     if (TCG_TARGET_REG_BITS == 32) {
1304         tcg_gen_bswap16_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1305         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1306     } else if (TCG_TARGET_HAS_bswap16_i64) {
1307         tcg_gen_op2_i64(INDEX_op_bswap16_i64, ret, arg);
1308     } else {
1309         TCGv_i64 t0 = tcg_temp_new_i64();
1310 
1311         tcg_gen_ext8u_i64(t0, arg);
1312         tcg_gen_shli_i64(t0, t0, 8);
1313         tcg_gen_shri_i64(ret, arg, 8);
1314         tcg_gen_or_i64(ret, ret, t0);
1315         tcg_temp_free_i64(t0);
1316     }
1317 }
1318 
1319 /* Note: we assume the four high bytes are set to zero */
1320 void tcg_gen_bswap32_i64(TCGv_i64 ret, TCGv_i64 arg)
1321 {
1322     if (TCG_TARGET_REG_BITS == 32) {
1323         tcg_gen_bswap32_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1324         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1325     } else if (TCG_TARGET_HAS_bswap32_i64) {
1326         tcg_gen_op2_i64(INDEX_op_bswap32_i64, ret, arg);
1327     } else {
1328         TCGv_i64 t0, t1;
1329         t0 = tcg_temp_new_i64();
1330         t1 = tcg_temp_new_i64();
1331 
1332         tcg_gen_shli_i64(t0, arg, 24);
1333         tcg_gen_ext32u_i64(t0, t0);
1334 
1335         tcg_gen_andi_i64(t1, arg, 0x0000ff00);
1336         tcg_gen_shli_i64(t1, t1, 8);
1337         tcg_gen_or_i64(t0, t0, t1);
1338 
1339         tcg_gen_shri_i64(t1, arg, 8);
1340         tcg_gen_andi_i64(t1, t1, 0x0000ff00);
1341         tcg_gen_or_i64(t0, t0, t1);
1342 
1343         tcg_gen_shri_i64(t1, arg, 24);
1344         tcg_gen_or_i64(ret, t0, t1);
1345         tcg_temp_free_i64(t0);
1346         tcg_temp_free_i64(t1);
1347     }
1348 }
1349 
1350 void tcg_gen_bswap64_i64(TCGv_i64 ret, TCGv_i64 arg)
1351 {
1352     if (TCG_TARGET_REG_BITS == 32) {
1353         TCGv_i32 t0, t1;
1354         t0 = tcg_temp_new_i32();
1355         t1 = tcg_temp_new_i32();
1356 
1357         tcg_gen_bswap32_i32(t0, TCGV_LOW(arg));
1358         tcg_gen_bswap32_i32(t1, TCGV_HIGH(arg));
1359         tcg_gen_mov_i32(TCGV_LOW(ret), t1);
1360         tcg_gen_mov_i32(TCGV_HIGH(ret), t0);
1361         tcg_temp_free_i32(t0);
1362         tcg_temp_free_i32(t1);
1363     } else if (TCG_TARGET_HAS_bswap64_i64) {
1364         tcg_gen_op2_i64(INDEX_op_bswap64_i64, ret, arg);
1365     } else {
1366         TCGv_i64 t0 = tcg_temp_new_i64();
1367         TCGv_i64 t1 = tcg_temp_new_i64();
1368 
1369         tcg_gen_shli_i64(t0, arg, 56);
1370 
1371         tcg_gen_andi_i64(t1, arg, 0x0000ff00);
1372         tcg_gen_shli_i64(t1, t1, 40);
1373         tcg_gen_or_i64(t0, t0, t1);
1374 
1375         tcg_gen_andi_i64(t1, arg, 0x00ff0000);
1376         tcg_gen_shli_i64(t1, t1, 24);
1377         tcg_gen_or_i64(t0, t0, t1);
1378 
1379         tcg_gen_andi_i64(t1, arg, 0xff000000);
1380         tcg_gen_shli_i64(t1, t1, 8);
1381         tcg_gen_or_i64(t0, t0, t1);
1382 
1383         tcg_gen_shri_i64(t1, arg, 8);
1384         tcg_gen_andi_i64(t1, t1, 0xff000000);
1385         tcg_gen_or_i64(t0, t0, t1);
1386 
1387         tcg_gen_shri_i64(t1, arg, 24);
1388         tcg_gen_andi_i64(t1, t1, 0x00ff0000);
1389         tcg_gen_or_i64(t0, t0, t1);
1390 
1391         tcg_gen_shri_i64(t1, arg, 40);
1392         tcg_gen_andi_i64(t1, t1, 0x0000ff00);
1393         tcg_gen_or_i64(t0, t0, t1);
1394 
1395         tcg_gen_shri_i64(t1, arg, 56);
1396         tcg_gen_or_i64(ret, t0, t1);
1397         tcg_temp_free_i64(t0);
1398         tcg_temp_free_i64(t1);
1399     }
1400 }
1401 
1402 void tcg_gen_not_i64(TCGv_i64 ret, TCGv_i64 arg)
1403 {
1404     if (TCG_TARGET_REG_BITS == 32) {
1405         tcg_gen_not_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1406         tcg_gen_not_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
1407     } else if (TCG_TARGET_HAS_not_i64) {
1408         tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg);
1409     } else {
1410         tcg_gen_xori_i64(ret, arg, -1);
1411     }
1412 }
1413 
1414 void tcg_gen_andc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1415 {
1416     if (TCG_TARGET_REG_BITS == 32) {
1417         tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1418         tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1419     } else if (TCG_TARGET_HAS_andc_i64) {
1420         tcg_gen_op3_i64(INDEX_op_andc_i64, ret, arg1, arg2);
1421     } else {
1422         TCGv_i64 t0 = tcg_temp_new_i64();
1423         tcg_gen_not_i64(t0, arg2);
1424         tcg_gen_and_i64(ret, arg1, t0);
1425         tcg_temp_free_i64(t0);
1426     }
1427 }
1428 
1429 void tcg_gen_eqv_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1430 {
1431     if (TCG_TARGET_REG_BITS == 32) {
1432         tcg_gen_eqv_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1433         tcg_gen_eqv_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1434     } else if (TCG_TARGET_HAS_eqv_i64) {
1435         tcg_gen_op3_i64(INDEX_op_eqv_i64, ret, arg1, arg2);
1436     } else {
1437         tcg_gen_xor_i64(ret, arg1, arg2);
1438         tcg_gen_not_i64(ret, ret);
1439     }
1440 }
1441 
1442 void tcg_gen_nand_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1443 {
1444     if (TCG_TARGET_REG_BITS == 32) {
1445         tcg_gen_nand_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1446         tcg_gen_nand_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1447     } else if (TCG_TARGET_HAS_nand_i64) {
1448         tcg_gen_op3_i64(INDEX_op_nand_i64, ret, arg1, arg2);
1449     } else {
1450         tcg_gen_and_i64(ret, arg1, arg2);
1451         tcg_gen_not_i64(ret, ret);
1452     }
1453 }
1454 
1455 void tcg_gen_nor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1456 {
1457     if (TCG_TARGET_REG_BITS == 32) {
1458         tcg_gen_nor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1459         tcg_gen_nor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1460     } else if (TCG_TARGET_HAS_nor_i64) {
1461         tcg_gen_op3_i64(INDEX_op_nor_i64, ret, arg1, arg2);
1462     } else {
1463         tcg_gen_or_i64(ret, arg1, arg2);
1464         tcg_gen_not_i64(ret, ret);
1465     }
1466 }
1467 
1468 void tcg_gen_orc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1469 {
1470     if (TCG_TARGET_REG_BITS == 32) {
1471         tcg_gen_orc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1472         tcg_gen_orc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1473     } else if (TCG_TARGET_HAS_orc_i64) {
1474         tcg_gen_op3_i64(INDEX_op_orc_i64, ret, arg1, arg2);
1475     } else {
1476         TCGv_i64 t0 = tcg_temp_new_i64();
1477         tcg_gen_not_i64(t0, arg2);
1478         tcg_gen_or_i64(ret, arg1, t0);
1479         tcg_temp_free_i64(t0);
1480     }
1481 }
1482 
1483 void tcg_gen_rotl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1484 {
1485     if (TCG_TARGET_HAS_rot_i64) {
1486         tcg_gen_op3_i64(INDEX_op_rotl_i64, ret, arg1, arg2);
1487     } else {
1488         TCGv_i64 t0, t1;
1489         t0 = tcg_temp_new_i64();
1490         t1 = tcg_temp_new_i64();
1491         tcg_gen_shl_i64(t0, arg1, arg2);
1492         tcg_gen_subfi_i64(t1, 64, arg2);
1493         tcg_gen_shr_i64(t1, arg1, t1);
1494         tcg_gen_or_i64(ret, t0, t1);
1495         tcg_temp_free_i64(t0);
1496         tcg_temp_free_i64(t1);
1497     }
1498 }
1499 
1500 void tcg_gen_rotli_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1501 {
1502     tcg_debug_assert(arg2 < 64);
1503     /* some cases can be optimized here */
1504     if (arg2 == 0) {
1505         tcg_gen_mov_i64(ret, arg1);
1506     } else if (TCG_TARGET_HAS_rot_i64) {
1507         TCGv_i64 t0 = tcg_const_i64(arg2);
1508         tcg_gen_rotl_i64(ret, arg1, t0);
1509         tcg_temp_free_i64(t0);
1510     } else {
1511         TCGv_i64 t0, t1;
1512         t0 = tcg_temp_new_i64();
1513         t1 = tcg_temp_new_i64();
1514         tcg_gen_shli_i64(t0, arg1, arg2);
1515         tcg_gen_shri_i64(t1, arg1, 64 - arg2);
1516         tcg_gen_or_i64(ret, t0, t1);
1517         tcg_temp_free_i64(t0);
1518         tcg_temp_free_i64(t1);
1519     }
1520 }
1521 
1522 void tcg_gen_rotr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1523 {
1524     if (TCG_TARGET_HAS_rot_i64) {
1525         tcg_gen_op3_i64(INDEX_op_rotr_i64, ret, arg1, arg2);
1526     } else {
1527         TCGv_i64 t0, t1;
1528         t0 = tcg_temp_new_i64();
1529         t1 = tcg_temp_new_i64();
1530         tcg_gen_shr_i64(t0, arg1, arg2);
1531         tcg_gen_subfi_i64(t1, 64, arg2);
1532         tcg_gen_shl_i64(t1, arg1, t1);
1533         tcg_gen_or_i64(ret, t0, t1);
1534         tcg_temp_free_i64(t0);
1535         tcg_temp_free_i64(t1);
1536     }
1537 }
1538 
1539 void tcg_gen_rotri_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1540 {
1541     tcg_debug_assert(arg2 < 64);
1542     /* some cases can be optimized here */
1543     if (arg2 == 0) {
1544         tcg_gen_mov_i64(ret, arg1);
1545     } else {
1546         tcg_gen_rotli_i64(ret, arg1, 64 - arg2);
1547     }
1548 }
1549 
1550 void tcg_gen_deposit_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2,
1551                          unsigned int ofs, unsigned int len)
1552 {
1553     uint64_t mask;
1554     TCGv_i64 t1;
1555 
1556     tcg_debug_assert(ofs < 64);
1557     tcg_debug_assert(len <= 64);
1558     tcg_debug_assert(ofs + len <= 64);
1559 
1560     if (ofs == 0 && len == 64) {
1561         tcg_gen_mov_i64(ret, arg2);
1562         return;
1563     }
1564     if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(ofs, len)) {
1565         tcg_gen_op5ii_i64(INDEX_op_deposit_i64, ret, arg1, arg2, ofs, len);
1566         return;
1567     }
1568 
1569     if (TCG_TARGET_REG_BITS == 32) {
1570         if (ofs >= 32) {
1571             tcg_gen_deposit_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1),
1572                                 TCGV_LOW(arg2), ofs - 32, len);
1573             tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
1574             return;
1575         }
1576         if (ofs + len <= 32) {
1577             tcg_gen_deposit_i32(TCGV_LOW(ret), TCGV_LOW(arg1),
1578                                 TCGV_LOW(arg2), ofs, len);
1579             tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
1580             return;
1581         }
1582     }
1583 
1584     mask = (1ull << len) - 1;
1585     t1 = tcg_temp_new_i64();
1586 
1587     if (ofs + len < 64) {
1588         tcg_gen_andi_i64(t1, arg2, mask);
1589         tcg_gen_shli_i64(t1, t1, ofs);
1590     } else {
1591         tcg_gen_shli_i64(t1, arg2, ofs);
1592     }
1593     tcg_gen_andi_i64(ret, arg1, ~(mask << ofs));
1594     tcg_gen_or_i64(ret, ret, t1);
1595 
1596     tcg_temp_free_i64(t1);
1597 }
1598 
1599 void tcg_gen_movcond_i64(TCGCond cond, TCGv_i64 ret, TCGv_i64 c1,
1600                          TCGv_i64 c2, TCGv_i64 v1, TCGv_i64 v2)
1601 {
1602     if (cond == TCG_COND_ALWAYS) {
1603         tcg_gen_mov_i64(ret, v1);
1604     } else if (cond == TCG_COND_NEVER) {
1605         tcg_gen_mov_i64(ret, v2);
1606     } else if (TCG_TARGET_REG_BITS == 32) {
1607         TCGv_i32 t0 = tcg_temp_new_i32();
1608         TCGv_i32 t1 = tcg_temp_new_i32();
1609         tcg_gen_op6i_i32(INDEX_op_setcond2_i32, t0,
1610                          TCGV_LOW(c1), TCGV_HIGH(c1),
1611                          TCGV_LOW(c2), TCGV_HIGH(c2), cond);
1612 
1613         if (TCG_TARGET_HAS_movcond_i32) {
1614             tcg_gen_movi_i32(t1, 0);
1615             tcg_gen_movcond_i32(TCG_COND_NE, TCGV_LOW(ret), t0, t1,
1616                                 TCGV_LOW(v1), TCGV_LOW(v2));
1617             tcg_gen_movcond_i32(TCG_COND_NE, TCGV_HIGH(ret), t0, t1,
1618                                 TCGV_HIGH(v1), TCGV_HIGH(v2));
1619         } else {
1620             tcg_gen_neg_i32(t0, t0);
1621 
1622             tcg_gen_and_i32(t1, TCGV_LOW(v1), t0);
1623             tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(v2), t0);
1624             tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(ret), t1);
1625 
1626             tcg_gen_and_i32(t1, TCGV_HIGH(v1), t0);
1627             tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(v2), t0);
1628             tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), t1);
1629         }
1630         tcg_temp_free_i32(t0);
1631         tcg_temp_free_i32(t1);
1632     } else if (TCG_TARGET_HAS_movcond_i64) {
1633         tcg_gen_op6i_i64(INDEX_op_movcond_i64, ret, c1, c2, v1, v2, cond);
1634     } else {
1635         TCGv_i64 t0 = tcg_temp_new_i64();
1636         TCGv_i64 t1 = tcg_temp_new_i64();
1637         tcg_gen_setcond_i64(cond, t0, c1, c2);
1638         tcg_gen_neg_i64(t0, t0);
1639         tcg_gen_and_i64(t1, v1, t0);
1640         tcg_gen_andc_i64(ret, v2, t0);
1641         tcg_gen_or_i64(ret, ret, t1);
1642         tcg_temp_free_i64(t0);
1643         tcg_temp_free_i64(t1);
1644     }
1645 }
1646 
1647 void tcg_gen_add2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
1648                       TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
1649 {
1650     if (TCG_TARGET_HAS_add2_i64) {
1651         tcg_gen_op6_i64(INDEX_op_add2_i64, rl, rh, al, ah, bl, bh);
1652     } else {
1653         TCGv_i64 t0 = tcg_temp_new_i64();
1654         TCGv_i64 t1 = tcg_temp_new_i64();
1655         tcg_gen_add_i64(t0, al, bl);
1656         tcg_gen_setcond_i64(TCG_COND_LTU, t1, t0, al);
1657         tcg_gen_add_i64(rh, ah, bh);
1658         tcg_gen_add_i64(rh, rh, t1);
1659         tcg_gen_mov_i64(rl, t0);
1660         tcg_temp_free_i64(t0);
1661         tcg_temp_free_i64(t1);
1662     }
1663 }
1664 
1665 void tcg_gen_sub2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
1666                       TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
1667 {
1668     if (TCG_TARGET_HAS_sub2_i64) {
1669         tcg_gen_op6_i64(INDEX_op_sub2_i64, rl, rh, al, ah, bl, bh);
1670     } else {
1671         TCGv_i64 t0 = tcg_temp_new_i64();
1672         TCGv_i64 t1 = tcg_temp_new_i64();
1673         tcg_gen_sub_i64(t0, al, bl);
1674         tcg_gen_setcond_i64(TCG_COND_LTU, t1, al, bl);
1675         tcg_gen_sub_i64(rh, ah, bh);
1676         tcg_gen_sub_i64(rh, rh, t1);
1677         tcg_gen_mov_i64(rl, t0);
1678         tcg_temp_free_i64(t0);
1679         tcg_temp_free_i64(t1);
1680     }
1681 }
1682 
1683 void tcg_gen_mulu2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
1684 {
1685     if (TCG_TARGET_HAS_mulu2_i64) {
1686         tcg_gen_op4_i64(INDEX_op_mulu2_i64, rl, rh, arg1, arg2);
1687     } else if (TCG_TARGET_HAS_muluh_i64) {
1688         TCGv_i64 t = tcg_temp_new_i64();
1689         tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
1690         tcg_gen_op3_i64(INDEX_op_muluh_i64, rh, arg1, arg2);
1691         tcg_gen_mov_i64(rl, t);
1692         tcg_temp_free_i64(t);
1693     } else {
1694         TCGv_i64 t0 = tcg_temp_new_i64();
1695         tcg_gen_mul_i64(t0, arg1, arg2);
1696         gen_helper_muluh_i64(rh, arg1, arg2);
1697         tcg_gen_mov_i64(rl, t0);
1698         tcg_temp_free_i64(t0);
1699     }
1700 }
1701 
1702 void tcg_gen_muls2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
1703 {
1704     if (TCG_TARGET_HAS_muls2_i64) {
1705         tcg_gen_op4_i64(INDEX_op_muls2_i64, rl, rh, arg1, arg2);
1706     } else if (TCG_TARGET_HAS_mulsh_i64) {
1707         TCGv_i64 t = tcg_temp_new_i64();
1708         tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
1709         tcg_gen_op3_i64(INDEX_op_mulsh_i64, rh, arg1, arg2);
1710         tcg_gen_mov_i64(rl, t);
1711         tcg_temp_free_i64(t);
1712     } else if (TCG_TARGET_HAS_mulu2_i64 || TCG_TARGET_HAS_muluh_i64) {
1713         TCGv_i64 t0 = tcg_temp_new_i64();
1714         TCGv_i64 t1 = tcg_temp_new_i64();
1715         TCGv_i64 t2 = tcg_temp_new_i64();
1716         TCGv_i64 t3 = tcg_temp_new_i64();
1717         tcg_gen_mulu2_i64(t0, t1, arg1, arg2);
1718         /* Adjust for negative inputs.  */
1719         tcg_gen_sari_i64(t2, arg1, 63);
1720         tcg_gen_sari_i64(t3, arg2, 63);
1721         tcg_gen_and_i64(t2, t2, arg2);
1722         tcg_gen_and_i64(t3, t3, arg1);
1723         tcg_gen_sub_i64(rh, t1, t2);
1724         tcg_gen_sub_i64(rh, rh, t3);
1725         tcg_gen_mov_i64(rl, t0);
1726         tcg_temp_free_i64(t0);
1727         tcg_temp_free_i64(t1);
1728         tcg_temp_free_i64(t2);
1729         tcg_temp_free_i64(t3);
1730     } else {
1731         TCGv_i64 t0 = tcg_temp_new_i64();
1732         tcg_gen_mul_i64(t0, arg1, arg2);
1733         gen_helper_mulsh_i64(rh, arg1, arg2);
1734         tcg_gen_mov_i64(rl, t0);
1735         tcg_temp_free_i64(t0);
1736     }
1737 }
1738 
1739 /* Size changing operations.  */
1740 
1741 void tcg_gen_extrl_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
1742 {
1743     if (TCG_TARGET_REG_BITS == 32) {
1744         tcg_gen_mov_i32(ret, TCGV_LOW(arg));
1745     } else if (TCG_TARGET_HAS_extrl_i64_i32) {
1746         tcg_gen_op2(&tcg_ctx, INDEX_op_extrl_i64_i32,
1747                     GET_TCGV_I32(ret), GET_TCGV_I64(arg));
1748     } else {
1749         tcg_gen_mov_i32(ret, MAKE_TCGV_I32(GET_TCGV_I64(arg)));
1750     }
1751 }
1752 
1753 void tcg_gen_extrh_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
1754 {
1755     if (TCG_TARGET_REG_BITS == 32) {
1756         tcg_gen_mov_i32(ret, TCGV_HIGH(arg));
1757     } else if (TCG_TARGET_HAS_extrh_i64_i32) {
1758         tcg_gen_op2(&tcg_ctx, INDEX_op_extrh_i64_i32,
1759                     GET_TCGV_I32(ret), GET_TCGV_I64(arg));
1760     } else {
1761         TCGv_i64 t = tcg_temp_new_i64();
1762         tcg_gen_shri_i64(t, arg, 32);
1763         tcg_gen_mov_i32(ret, MAKE_TCGV_I32(GET_TCGV_I64(t)));
1764         tcg_temp_free_i64(t);
1765     }
1766 }
1767 
1768 void tcg_gen_extu_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
1769 {
1770     if (TCG_TARGET_REG_BITS == 32) {
1771         tcg_gen_mov_i32(TCGV_LOW(ret), arg);
1772         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1773     } else {
1774         tcg_gen_op2(&tcg_ctx, INDEX_op_extu_i32_i64,
1775                     GET_TCGV_I64(ret), GET_TCGV_I32(arg));
1776     }
1777 }
1778 
1779 void tcg_gen_ext_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
1780 {
1781     if (TCG_TARGET_REG_BITS == 32) {
1782         tcg_gen_mov_i32(TCGV_LOW(ret), arg);
1783         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1784     } else {
1785         tcg_gen_op2(&tcg_ctx, INDEX_op_ext_i32_i64,
1786                     GET_TCGV_I64(ret), GET_TCGV_I32(arg));
1787     }
1788 }
1789 
1790 void tcg_gen_concat_i32_i64(TCGv_i64 dest, TCGv_i32 low, TCGv_i32 high)
1791 {
1792     TCGv_i64 tmp;
1793 
1794     if (TCG_TARGET_REG_BITS == 32) {
1795         tcg_gen_mov_i32(TCGV_LOW(dest), low);
1796         tcg_gen_mov_i32(TCGV_HIGH(dest), high);
1797         return;
1798     }
1799 
1800     tmp = tcg_temp_new_i64();
1801     /* These extensions are only needed for type correctness.
1802        We may be able to do better given target specific information.  */
1803     tcg_gen_extu_i32_i64(tmp, high);
1804     tcg_gen_extu_i32_i64(dest, low);
1805     /* If deposit is available, use it.  Otherwise use the extra
1806        knowledge that we have of the zero-extensions above.  */
1807     if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(32, 32)) {
1808         tcg_gen_deposit_i64(dest, dest, tmp, 32, 32);
1809     } else {
1810         tcg_gen_shli_i64(tmp, tmp, 32);
1811         tcg_gen_or_i64(dest, dest, tmp);
1812     }
1813     tcg_temp_free_i64(tmp);
1814 }
1815 
1816 void tcg_gen_extr_i64_i32(TCGv_i32 lo, TCGv_i32 hi, TCGv_i64 arg)
1817 {
1818     if (TCG_TARGET_REG_BITS == 32) {
1819         tcg_gen_mov_i32(lo, TCGV_LOW(arg));
1820         tcg_gen_mov_i32(hi, TCGV_HIGH(arg));
1821     } else {
1822         tcg_gen_extrl_i64_i32(lo, arg);
1823         tcg_gen_extrh_i64_i32(hi, arg);
1824     }
1825 }
1826 
1827 void tcg_gen_extr32_i64(TCGv_i64 lo, TCGv_i64 hi, TCGv_i64 arg)
1828 {
1829     tcg_gen_ext32u_i64(lo, arg);
1830     tcg_gen_shri_i64(hi, arg, 32);
1831 }
1832 
1833 /* QEMU specific operations.  */
1834 
1835 void tcg_gen_goto_tb(unsigned idx)
1836 {
1837     /* We only support two chained exits.  */
1838     tcg_debug_assert(idx <= 1);
1839 #ifdef CONFIG_DEBUG_TCG
1840     /* Verify that we havn't seen this numbered exit before.  */
1841     tcg_debug_assert((tcg_ctx.goto_tb_issue_mask & (1 << idx)) == 0);
1842     tcg_ctx.goto_tb_issue_mask |= 1 << idx;
1843 #endif
1844     tcg_gen_op1i(INDEX_op_goto_tb, idx);
1845 }
1846 
1847 static inline TCGMemOp tcg_canonicalize_memop(TCGMemOp op, bool is64, bool st)
1848 {
1849     switch (op & MO_SIZE) {
1850     case MO_8:
1851         op &= ~MO_BSWAP;
1852         break;
1853     case MO_16:
1854         break;
1855     case MO_32:
1856         if (!is64) {
1857             op &= ~MO_SIGN;
1858         }
1859         break;
1860     case MO_64:
1861         if (!is64) {
1862             tcg_abort();
1863         }
1864         break;
1865     }
1866     if (st) {
1867         op &= ~MO_SIGN;
1868     }
1869     return op;
1870 }
1871 
1872 static void gen_ldst_i32(TCGOpcode opc, TCGv_i32 val, TCGv addr,
1873                          TCGMemOp memop, TCGArg idx)
1874 {
1875     TCGMemOpIdx oi = make_memop_idx(memop, idx);
1876 #if TARGET_LONG_BITS == 32
1877     tcg_gen_op3i_i32(opc, val, addr, oi);
1878 #else
1879     if (TCG_TARGET_REG_BITS == 32) {
1880         tcg_gen_op4i_i32(opc, val, TCGV_LOW(addr), TCGV_HIGH(addr), oi);
1881     } else {
1882         tcg_gen_op3(&tcg_ctx, opc, GET_TCGV_I32(val), GET_TCGV_I64(addr), oi);
1883     }
1884 #endif
1885 }
1886 
1887 static void gen_ldst_i64(TCGOpcode opc, TCGv_i64 val, TCGv addr,
1888                          TCGMemOp memop, TCGArg idx)
1889 {
1890     TCGMemOpIdx oi = make_memop_idx(memop, idx);
1891 #if TARGET_LONG_BITS == 32
1892     if (TCG_TARGET_REG_BITS == 32) {
1893         tcg_gen_op4i_i32(opc, TCGV_LOW(val), TCGV_HIGH(val), addr, oi);
1894     } else {
1895         tcg_gen_op3(&tcg_ctx, opc, GET_TCGV_I64(val), GET_TCGV_I32(addr), oi);
1896     }
1897 #else
1898     if (TCG_TARGET_REG_BITS == 32) {
1899         tcg_gen_op5i_i32(opc, TCGV_LOW(val), TCGV_HIGH(val),
1900                          TCGV_LOW(addr), TCGV_HIGH(addr), oi);
1901     } else {
1902         tcg_gen_op3i_i64(opc, val, addr, oi);
1903     }
1904 #endif
1905 }
1906 
1907 void tcg_gen_qemu_ld_i32(TCGv_i32 val, TCGv addr, TCGArg idx, TCGMemOp memop)
1908 {
1909     memop = tcg_canonicalize_memop(memop, 0, 0);
1910     gen_ldst_i32(INDEX_op_qemu_ld_i32, val, addr, memop, idx);
1911 }
1912 
1913 void tcg_gen_qemu_st_i32(TCGv_i32 val, TCGv addr, TCGArg idx, TCGMemOp memop)
1914 {
1915     memop = tcg_canonicalize_memop(memop, 0, 1);
1916     gen_ldst_i32(INDEX_op_qemu_st_i32, val, addr, memop, idx);
1917 }
1918 
1919 void tcg_gen_qemu_ld_i64(TCGv_i64 val, TCGv addr, TCGArg idx, TCGMemOp memop)
1920 {
1921     if (TCG_TARGET_REG_BITS == 32 && (memop & MO_SIZE) < MO_64) {
1922         tcg_gen_qemu_ld_i32(TCGV_LOW(val), addr, idx, memop);
1923         if (memop & MO_SIGN) {
1924             tcg_gen_sari_i32(TCGV_HIGH(val), TCGV_LOW(val), 31);
1925         } else {
1926             tcg_gen_movi_i32(TCGV_HIGH(val), 0);
1927         }
1928         return;
1929     }
1930 
1931     memop = tcg_canonicalize_memop(memop, 1, 0);
1932     gen_ldst_i64(INDEX_op_qemu_ld_i64, val, addr, memop, idx);
1933 }
1934 
1935 void tcg_gen_qemu_st_i64(TCGv_i64 val, TCGv addr, TCGArg idx, TCGMemOp memop)
1936 {
1937     if (TCG_TARGET_REG_BITS == 32 && (memop & MO_SIZE) < MO_64) {
1938         tcg_gen_qemu_st_i32(TCGV_LOW(val), addr, idx, memop);
1939         return;
1940     }
1941 
1942     memop = tcg_canonicalize_memop(memop, 1, 1);
1943     gen_ldst_i64(INDEX_op_qemu_st_i64, val, addr, memop, idx);
1944 }
1945