xref: /openbmc/qemu/tcg/tcg-op.c (revision 728cc990)
1 /*
2  * Tiny Code Generator for QEMU
3  *
4  * Copyright (c) 2008 Fabrice Bellard
5  *
6  * Permission is hereby granted, free of charge, to any person obtaining a copy
7  * of this software and associated documentation files (the "Software"), to deal
8  * in the Software without restriction, including without limitation the rights
9  * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10  * copies of the Software, and to permit persons to whom the Software is
11  * furnished to do so, subject to the following conditions:
12  *
13  * The above copyright notice and this permission notice shall be included in
14  * all copies or substantial portions of the Software.
15  *
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21  * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22  * THE SOFTWARE.
23  */
24 
25 #include "qemu/osdep.h"
26 #include "qemu-common.h"
27 #include "cpu.h"
28 #include "exec/exec-all.h"
29 #include "tcg.h"
30 #include "tcg-op.h"
31 #include "trace-tcg.h"
32 #include "trace/mem.h"
33 
34 /* Reduce the number of ifdefs below.  This assumes that all uses of
35    TCGV_HIGH and TCGV_LOW are properly protected by a conditional that
36    the compiler can eliminate.  */
37 #if TCG_TARGET_REG_BITS == 64
38 extern TCGv_i32 TCGV_LOW_link_error(TCGv_i64);
39 extern TCGv_i32 TCGV_HIGH_link_error(TCGv_i64);
40 #define TCGV_LOW  TCGV_LOW_link_error
41 #define TCGV_HIGH TCGV_HIGH_link_error
42 #endif
43 
44 /* Note that this is optimized for sequential allocation during translate.
45    Up to and including filling in the forward link immediately.  We'll do
46    proper termination of the end of the list after we finish translation.  */
47 
48 static void tcg_emit_op(TCGContext *ctx, TCGOpcode opc, int args)
49 {
50     int oi = ctx->gen_next_op_idx;
51     int ni = oi + 1;
52     int pi = oi - 1;
53 
54     tcg_debug_assert(oi < OPC_BUF_SIZE);
55     ctx->gen_last_op_idx = oi;
56     ctx->gen_next_op_idx = ni;
57 
58     ctx->gen_op_buf[oi] = (TCGOp){
59         .opc = opc,
60         .args = args,
61         .prev = pi,
62         .next = ni
63     };
64 }
65 
66 void tcg_gen_op1(TCGContext *ctx, TCGOpcode opc, TCGArg a1)
67 {
68     int pi = ctx->gen_next_parm_idx;
69 
70     tcg_debug_assert(pi + 1 <= OPPARAM_BUF_SIZE);
71     ctx->gen_next_parm_idx = pi + 1;
72     ctx->gen_opparam_buf[pi] = a1;
73 
74     tcg_emit_op(ctx, opc, pi);
75 }
76 
77 void tcg_gen_op2(TCGContext *ctx, TCGOpcode opc, TCGArg a1, TCGArg a2)
78 {
79     int pi = ctx->gen_next_parm_idx;
80 
81     tcg_debug_assert(pi + 2 <= OPPARAM_BUF_SIZE);
82     ctx->gen_next_parm_idx = pi + 2;
83     ctx->gen_opparam_buf[pi + 0] = a1;
84     ctx->gen_opparam_buf[pi + 1] = a2;
85 
86     tcg_emit_op(ctx, opc, pi);
87 }
88 
89 void tcg_gen_op3(TCGContext *ctx, TCGOpcode opc, TCGArg a1,
90                  TCGArg a2, TCGArg a3)
91 {
92     int pi = ctx->gen_next_parm_idx;
93 
94     tcg_debug_assert(pi + 3 <= OPPARAM_BUF_SIZE);
95     ctx->gen_next_parm_idx = pi + 3;
96     ctx->gen_opparam_buf[pi + 0] = a1;
97     ctx->gen_opparam_buf[pi + 1] = a2;
98     ctx->gen_opparam_buf[pi + 2] = a3;
99 
100     tcg_emit_op(ctx, opc, pi);
101 }
102 
103 void tcg_gen_op4(TCGContext *ctx, TCGOpcode opc, TCGArg a1,
104                  TCGArg a2, TCGArg a3, TCGArg a4)
105 {
106     int pi = ctx->gen_next_parm_idx;
107 
108     tcg_debug_assert(pi + 4 <= OPPARAM_BUF_SIZE);
109     ctx->gen_next_parm_idx = pi + 4;
110     ctx->gen_opparam_buf[pi + 0] = a1;
111     ctx->gen_opparam_buf[pi + 1] = a2;
112     ctx->gen_opparam_buf[pi + 2] = a3;
113     ctx->gen_opparam_buf[pi + 3] = a4;
114 
115     tcg_emit_op(ctx, opc, pi);
116 }
117 
118 void tcg_gen_op5(TCGContext *ctx, TCGOpcode opc, TCGArg a1,
119                  TCGArg a2, TCGArg a3, TCGArg a4, TCGArg a5)
120 {
121     int pi = ctx->gen_next_parm_idx;
122 
123     tcg_debug_assert(pi + 5 <= OPPARAM_BUF_SIZE);
124     ctx->gen_next_parm_idx = pi + 5;
125     ctx->gen_opparam_buf[pi + 0] = a1;
126     ctx->gen_opparam_buf[pi + 1] = a2;
127     ctx->gen_opparam_buf[pi + 2] = a3;
128     ctx->gen_opparam_buf[pi + 3] = a4;
129     ctx->gen_opparam_buf[pi + 4] = a5;
130 
131     tcg_emit_op(ctx, opc, pi);
132 }
133 
134 void tcg_gen_op6(TCGContext *ctx, TCGOpcode opc, TCGArg a1, TCGArg a2,
135                  TCGArg a3, TCGArg a4, TCGArg a5, TCGArg a6)
136 {
137     int pi = ctx->gen_next_parm_idx;
138 
139     tcg_debug_assert(pi + 6 <= OPPARAM_BUF_SIZE);
140     ctx->gen_next_parm_idx = pi + 6;
141     ctx->gen_opparam_buf[pi + 0] = a1;
142     ctx->gen_opparam_buf[pi + 1] = a2;
143     ctx->gen_opparam_buf[pi + 2] = a3;
144     ctx->gen_opparam_buf[pi + 3] = a4;
145     ctx->gen_opparam_buf[pi + 4] = a5;
146     ctx->gen_opparam_buf[pi + 5] = a6;
147 
148     tcg_emit_op(ctx, opc, pi);
149 }
150 
151 /* 32 bit ops */
152 
153 void tcg_gen_addi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
154 {
155     /* some cases can be optimized here */
156     if (arg2 == 0) {
157         tcg_gen_mov_i32(ret, arg1);
158     } else {
159         TCGv_i32 t0 = tcg_const_i32(arg2);
160         tcg_gen_add_i32(ret, arg1, t0);
161         tcg_temp_free_i32(t0);
162     }
163 }
164 
165 void tcg_gen_subfi_i32(TCGv_i32 ret, int32_t arg1, TCGv_i32 arg2)
166 {
167     if (arg1 == 0 && TCG_TARGET_HAS_neg_i32) {
168         /* Don't recurse with tcg_gen_neg_i32.  */
169         tcg_gen_op2_i32(INDEX_op_neg_i32, ret, arg2);
170     } else {
171         TCGv_i32 t0 = tcg_const_i32(arg1);
172         tcg_gen_sub_i32(ret, t0, arg2);
173         tcg_temp_free_i32(t0);
174     }
175 }
176 
177 void tcg_gen_subi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
178 {
179     /* some cases can be optimized here */
180     if (arg2 == 0) {
181         tcg_gen_mov_i32(ret, arg1);
182     } else {
183         TCGv_i32 t0 = tcg_const_i32(arg2);
184         tcg_gen_sub_i32(ret, arg1, t0);
185         tcg_temp_free_i32(t0);
186     }
187 }
188 
189 void tcg_gen_andi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
190 {
191     TCGv_i32 t0;
192     /* Some cases can be optimized here.  */
193     switch (arg2) {
194     case 0:
195         tcg_gen_movi_i32(ret, 0);
196         return;
197     case 0xffffffffu:
198         tcg_gen_mov_i32(ret, arg1);
199         return;
200     case 0xffu:
201         /* Don't recurse with tcg_gen_ext8u_i32.  */
202         if (TCG_TARGET_HAS_ext8u_i32) {
203             tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg1);
204             return;
205         }
206         break;
207     case 0xffffu:
208         if (TCG_TARGET_HAS_ext16u_i32) {
209             tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg1);
210             return;
211         }
212         break;
213     }
214     t0 = tcg_const_i32(arg2);
215     tcg_gen_and_i32(ret, arg1, t0);
216     tcg_temp_free_i32(t0);
217 }
218 
219 void tcg_gen_ori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
220 {
221     /* Some cases can be optimized here.  */
222     if (arg2 == -1) {
223         tcg_gen_movi_i32(ret, -1);
224     } else if (arg2 == 0) {
225         tcg_gen_mov_i32(ret, arg1);
226     } else {
227         TCGv_i32 t0 = tcg_const_i32(arg2);
228         tcg_gen_or_i32(ret, arg1, t0);
229         tcg_temp_free_i32(t0);
230     }
231 }
232 
233 void tcg_gen_xori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
234 {
235     /* Some cases can be optimized here.  */
236     if (arg2 == 0) {
237         tcg_gen_mov_i32(ret, arg1);
238     } else if (arg2 == -1 && TCG_TARGET_HAS_not_i32) {
239         /* Don't recurse with tcg_gen_not_i32.  */
240         tcg_gen_op2_i32(INDEX_op_not_i32, ret, arg1);
241     } else {
242         TCGv_i32 t0 = tcg_const_i32(arg2);
243         tcg_gen_xor_i32(ret, arg1, t0);
244         tcg_temp_free_i32(t0);
245     }
246 }
247 
248 void tcg_gen_shli_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
249 {
250     tcg_debug_assert(arg2 < 32);
251     if (arg2 == 0) {
252         tcg_gen_mov_i32(ret, arg1);
253     } else {
254         TCGv_i32 t0 = tcg_const_i32(arg2);
255         tcg_gen_shl_i32(ret, arg1, t0);
256         tcg_temp_free_i32(t0);
257     }
258 }
259 
260 void tcg_gen_shri_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
261 {
262     tcg_debug_assert(arg2 < 32);
263     if (arg2 == 0) {
264         tcg_gen_mov_i32(ret, arg1);
265     } else {
266         TCGv_i32 t0 = tcg_const_i32(arg2);
267         tcg_gen_shr_i32(ret, arg1, t0);
268         tcg_temp_free_i32(t0);
269     }
270 }
271 
272 void tcg_gen_sari_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
273 {
274     tcg_debug_assert(arg2 < 32);
275     if (arg2 == 0) {
276         tcg_gen_mov_i32(ret, arg1);
277     } else {
278         TCGv_i32 t0 = tcg_const_i32(arg2);
279         tcg_gen_sar_i32(ret, arg1, t0);
280         tcg_temp_free_i32(t0);
281     }
282 }
283 
284 void tcg_gen_brcond_i32(TCGCond cond, TCGv_i32 arg1, TCGv_i32 arg2, TCGLabel *l)
285 {
286     if (cond == TCG_COND_ALWAYS) {
287         tcg_gen_br(l);
288     } else if (cond != TCG_COND_NEVER) {
289         tcg_gen_op4ii_i32(INDEX_op_brcond_i32, arg1, arg2, cond, label_arg(l));
290     }
291 }
292 
293 void tcg_gen_brcondi_i32(TCGCond cond, TCGv_i32 arg1, int32_t arg2, TCGLabel *l)
294 {
295     if (cond == TCG_COND_ALWAYS) {
296         tcg_gen_br(l);
297     } else if (cond != TCG_COND_NEVER) {
298         TCGv_i32 t0 = tcg_const_i32(arg2);
299         tcg_gen_brcond_i32(cond, arg1, t0, l);
300         tcg_temp_free_i32(t0);
301     }
302 }
303 
304 void tcg_gen_setcond_i32(TCGCond cond, TCGv_i32 ret,
305                          TCGv_i32 arg1, TCGv_i32 arg2)
306 {
307     if (cond == TCG_COND_ALWAYS) {
308         tcg_gen_movi_i32(ret, 1);
309     } else if (cond == TCG_COND_NEVER) {
310         tcg_gen_movi_i32(ret, 0);
311     } else {
312         tcg_gen_op4i_i32(INDEX_op_setcond_i32, ret, arg1, arg2, cond);
313     }
314 }
315 
316 void tcg_gen_setcondi_i32(TCGCond cond, TCGv_i32 ret,
317                           TCGv_i32 arg1, int32_t arg2)
318 {
319     TCGv_i32 t0 = tcg_const_i32(arg2);
320     tcg_gen_setcond_i32(cond, ret, arg1, t0);
321     tcg_temp_free_i32(t0);
322 }
323 
324 void tcg_gen_muli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
325 {
326     TCGv_i32 t0 = tcg_const_i32(arg2);
327     tcg_gen_mul_i32(ret, arg1, t0);
328     tcg_temp_free_i32(t0);
329 }
330 
331 void tcg_gen_div_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
332 {
333     if (TCG_TARGET_HAS_div_i32) {
334         tcg_gen_op3_i32(INDEX_op_div_i32, ret, arg1, arg2);
335     } else if (TCG_TARGET_HAS_div2_i32) {
336         TCGv_i32 t0 = tcg_temp_new_i32();
337         tcg_gen_sari_i32(t0, arg1, 31);
338         tcg_gen_op5_i32(INDEX_op_div2_i32, ret, t0, arg1, t0, arg2);
339         tcg_temp_free_i32(t0);
340     } else {
341         gen_helper_div_i32(ret, arg1, arg2);
342     }
343 }
344 
345 void tcg_gen_rem_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
346 {
347     if (TCG_TARGET_HAS_rem_i32) {
348         tcg_gen_op3_i32(INDEX_op_rem_i32, ret, arg1, arg2);
349     } else if (TCG_TARGET_HAS_div_i32) {
350         TCGv_i32 t0 = tcg_temp_new_i32();
351         tcg_gen_op3_i32(INDEX_op_div_i32, t0, arg1, arg2);
352         tcg_gen_mul_i32(t0, t0, arg2);
353         tcg_gen_sub_i32(ret, arg1, t0);
354         tcg_temp_free_i32(t0);
355     } else if (TCG_TARGET_HAS_div2_i32) {
356         TCGv_i32 t0 = tcg_temp_new_i32();
357         tcg_gen_sari_i32(t0, arg1, 31);
358         tcg_gen_op5_i32(INDEX_op_div2_i32, t0, ret, arg1, t0, arg2);
359         tcg_temp_free_i32(t0);
360     } else {
361         gen_helper_rem_i32(ret, arg1, arg2);
362     }
363 }
364 
365 void tcg_gen_divu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
366 {
367     if (TCG_TARGET_HAS_div_i32) {
368         tcg_gen_op3_i32(INDEX_op_divu_i32, ret, arg1, arg2);
369     } else if (TCG_TARGET_HAS_div2_i32) {
370         TCGv_i32 t0 = tcg_temp_new_i32();
371         tcg_gen_movi_i32(t0, 0);
372         tcg_gen_op5_i32(INDEX_op_divu2_i32, ret, t0, arg1, t0, arg2);
373         tcg_temp_free_i32(t0);
374     } else {
375         gen_helper_divu_i32(ret, arg1, arg2);
376     }
377 }
378 
379 void tcg_gen_remu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
380 {
381     if (TCG_TARGET_HAS_rem_i32) {
382         tcg_gen_op3_i32(INDEX_op_remu_i32, ret, arg1, arg2);
383     } else if (TCG_TARGET_HAS_div_i32) {
384         TCGv_i32 t0 = tcg_temp_new_i32();
385         tcg_gen_op3_i32(INDEX_op_divu_i32, t0, arg1, arg2);
386         tcg_gen_mul_i32(t0, t0, arg2);
387         tcg_gen_sub_i32(ret, arg1, t0);
388         tcg_temp_free_i32(t0);
389     } else if (TCG_TARGET_HAS_div2_i32) {
390         TCGv_i32 t0 = tcg_temp_new_i32();
391         tcg_gen_movi_i32(t0, 0);
392         tcg_gen_op5_i32(INDEX_op_divu2_i32, t0, ret, arg1, t0, arg2);
393         tcg_temp_free_i32(t0);
394     } else {
395         gen_helper_remu_i32(ret, arg1, arg2);
396     }
397 }
398 
399 void tcg_gen_andc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
400 {
401     if (TCG_TARGET_HAS_andc_i32) {
402         tcg_gen_op3_i32(INDEX_op_andc_i32, ret, arg1, arg2);
403     } else {
404         TCGv_i32 t0 = tcg_temp_new_i32();
405         tcg_gen_not_i32(t0, arg2);
406         tcg_gen_and_i32(ret, arg1, t0);
407         tcg_temp_free_i32(t0);
408     }
409 }
410 
411 void tcg_gen_eqv_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
412 {
413     if (TCG_TARGET_HAS_eqv_i32) {
414         tcg_gen_op3_i32(INDEX_op_eqv_i32, ret, arg1, arg2);
415     } else {
416         tcg_gen_xor_i32(ret, arg1, arg2);
417         tcg_gen_not_i32(ret, ret);
418     }
419 }
420 
421 void tcg_gen_nand_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
422 {
423     if (TCG_TARGET_HAS_nand_i32) {
424         tcg_gen_op3_i32(INDEX_op_nand_i32, ret, arg1, arg2);
425     } else {
426         tcg_gen_and_i32(ret, arg1, arg2);
427         tcg_gen_not_i32(ret, ret);
428     }
429 }
430 
431 void tcg_gen_nor_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
432 {
433     if (TCG_TARGET_HAS_nor_i32) {
434         tcg_gen_op3_i32(INDEX_op_nor_i32, ret, arg1, arg2);
435     } else {
436         tcg_gen_or_i32(ret, arg1, arg2);
437         tcg_gen_not_i32(ret, ret);
438     }
439 }
440 
441 void tcg_gen_orc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
442 {
443     if (TCG_TARGET_HAS_orc_i32) {
444         tcg_gen_op3_i32(INDEX_op_orc_i32, ret, arg1, arg2);
445     } else {
446         TCGv_i32 t0 = tcg_temp_new_i32();
447         tcg_gen_not_i32(t0, arg2);
448         tcg_gen_or_i32(ret, arg1, t0);
449         tcg_temp_free_i32(t0);
450     }
451 }
452 
453 void tcg_gen_rotl_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
454 {
455     if (TCG_TARGET_HAS_rot_i32) {
456         tcg_gen_op3_i32(INDEX_op_rotl_i32, ret, arg1, arg2);
457     } else {
458         TCGv_i32 t0, t1;
459 
460         t0 = tcg_temp_new_i32();
461         t1 = tcg_temp_new_i32();
462         tcg_gen_shl_i32(t0, arg1, arg2);
463         tcg_gen_subfi_i32(t1, 32, arg2);
464         tcg_gen_shr_i32(t1, arg1, t1);
465         tcg_gen_or_i32(ret, t0, t1);
466         tcg_temp_free_i32(t0);
467         tcg_temp_free_i32(t1);
468     }
469 }
470 
471 void tcg_gen_rotli_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
472 {
473     tcg_debug_assert(arg2 < 32);
474     /* some cases can be optimized here */
475     if (arg2 == 0) {
476         tcg_gen_mov_i32(ret, arg1);
477     } else if (TCG_TARGET_HAS_rot_i32) {
478         TCGv_i32 t0 = tcg_const_i32(arg2);
479         tcg_gen_rotl_i32(ret, arg1, t0);
480         tcg_temp_free_i32(t0);
481     } else {
482         TCGv_i32 t0, t1;
483         t0 = tcg_temp_new_i32();
484         t1 = tcg_temp_new_i32();
485         tcg_gen_shli_i32(t0, arg1, arg2);
486         tcg_gen_shri_i32(t1, arg1, 32 - arg2);
487         tcg_gen_or_i32(ret, t0, t1);
488         tcg_temp_free_i32(t0);
489         tcg_temp_free_i32(t1);
490     }
491 }
492 
493 void tcg_gen_rotr_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
494 {
495     if (TCG_TARGET_HAS_rot_i32) {
496         tcg_gen_op3_i32(INDEX_op_rotr_i32, ret, arg1, arg2);
497     } else {
498         TCGv_i32 t0, t1;
499 
500         t0 = tcg_temp_new_i32();
501         t1 = tcg_temp_new_i32();
502         tcg_gen_shr_i32(t0, arg1, arg2);
503         tcg_gen_subfi_i32(t1, 32, arg2);
504         tcg_gen_shl_i32(t1, arg1, t1);
505         tcg_gen_or_i32(ret, t0, t1);
506         tcg_temp_free_i32(t0);
507         tcg_temp_free_i32(t1);
508     }
509 }
510 
511 void tcg_gen_rotri_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
512 {
513     tcg_debug_assert(arg2 < 32);
514     /* some cases can be optimized here */
515     if (arg2 == 0) {
516         tcg_gen_mov_i32(ret, arg1);
517     } else {
518         tcg_gen_rotli_i32(ret, arg1, 32 - arg2);
519     }
520 }
521 
522 void tcg_gen_deposit_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2,
523                          unsigned int ofs, unsigned int len)
524 {
525     uint32_t mask;
526     TCGv_i32 t1;
527 
528     tcg_debug_assert(ofs < 32);
529     tcg_debug_assert(len <= 32);
530     tcg_debug_assert(ofs + len <= 32);
531 
532     if (ofs == 0 && len == 32) {
533         tcg_gen_mov_i32(ret, arg2);
534         return;
535     }
536     if (TCG_TARGET_HAS_deposit_i32 && TCG_TARGET_deposit_i32_valid(ofs, len)) {
537         tcg_gen_op5ii_i32(INDEX_op_deposit_i32, ret, arg1, arg2, ofs, len);
538         return;
539     }
540 
541     mask = (1u << len) - 1;
542     t1 = tcg_temp_new_i32();
543 
544     if (ofs + len < 32) {
545         tcg_gen_andi_i32(t1, arg2, mask);
546         tcg_gen_shli_i32(t1, t1, ofs);
547     } else {
548         tcg_gen_shli_i32(t1, arg2, ofs);
549     }
550     tcg_gen_andi_i32(ret, arg1, ~(mask << ofs));
551     tcg_gen_or_i32(ret, ret, t1);
552 
553     tcg_temp_free_i32(t1);
554 }
555 
556 void tcg_gen_movcond_i32(TCGCond cond, TCGv_i32 ret, TCGv_i32 c1,
557                          TCGv_i32 c2, TCGv_i32 v1, TCGv_i32 v2)
558 {
559     if (cond == TCG_COND_ALWAYS) {
560         tcg_gen_mov_i32(ret, v1);
561     } else if (cond == TCG_COND_NEVER) {
562         tcg_gen_mov_i32(ret, v2);
563     } else if (TCG_TARGET_HAS_movcond_i32) {
564         tcg_gen_op6i_i32(INDEX_op_movcond_i32, ret, c1, c2, v1, v2, cond);
565     } else {
566         TCGv_i32 t0 = tcg_temp_new_i32();
567         TCGv_i32 t1 = tcg_temp_new_i32();
568         tcg_gen_setcond_i32(cond, t0, c1, c2);
569         tcg_gen_neg_i32(t0, t0);
570         tcg_gen_and_i32(t1, v1, t0);
571         tcg_gen_andc_i32(ret, v2, t0);
572         tcg_gen_or_i32(ret, ret, t1);
573         tcg_temp_free_i32(t0);
574         tcg_temp_free_i32(t1);
575     }
576 }
577 
578 void tcg_gen_add2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
579                       TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
580 {
581     if (TCG_TARGET_HAS_add2_i32) {
582         tcg_gen_op6_i32(INDEX_op_add2_i32, rl, rh, al, ah, bl, bh);
583     } else {
584         TCGv_i64 t0 = tcg_temp_new_i64();
585         TCGv_i64 t1 = tcg_temp_new_i64();
586         tcg_gen_concat_i32_i64(t0, al, ah);
587         tcg_gen_concat_i32_i64(t1, bl, bh);
588         tcg_gen_add_i64(t0, t0, t1);
589         tcg_gen_extr_i64_i32(rl, rh, t0);
590         tcg_temp_free_i64(t0);
591         tcg_temp_free_i64(t1);
592     }
593 }
594 
595 void tcg_gen_sub2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
596                       TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
597 {
598     if (TCG_TARGET_HAS_sub2_i32) {
599         tcg_gen_op6_i32(INDEX_op_sub2_i32, rl, rh, al, ah, bl, bh);
600     } else {
601         TCGv_i64 t0 = tcg_temp_new_i64();
602         TCGv_i64 t1 = tcg_temp_new_i64();
603         tcg_gen_concat_i32_i64(t0, al, ah);
604         tcg_gen_concat_i32_i64(t1, bl, bh);
605         tcg_gen_sub_i64(t0, t0, t1);
606         tcg_gen_extr_i64_i32(rl, rh, t0);
607         tcg_temp_free_i64(t0);
608         tcg_temp_free_i64(t1);
609     }
610 }
611 
612 void tcg_gen_mulu2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
613 {
614     if (TCG_TARGET_HAS_mulu2_i32) {
615         tcg_gen_op4_i32(INDEX_op_mulu2_i32, rl, rh, arg1, arg2);
616     } else if (TCG_TARGET_HAS_muluh_i32) {
617         TCGv_i32 t = tcg_temp_new_i32();
618         tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
619         tcg_gen_op3_i32(INDEX_op_muluh_i32, rh, arg1, arg2);
620         tcg_gen_mov_i32(rl, t);
621         tcg_temp_free_i32(t);
622     } else {
623         TCGv_i64 t0 = tcg_temp_new_i64();
624         TCGv_i64 t1 = tcg_temp_new_i64();
625         tcg_gen_extu_i32_i64(t0, arg1);
626         tcg_gen_extu_i32_i64(t1, arg2);
627         tcg_gen_mul_i64(t0, t0, t1);
628         tcg_gen_extr_i64_i32(rl, rh, t0);
629         tcg_temp_free_i64(t0);
630         tcg_temp_free_i64(t1);
631     }
632 }
633 
634 void tcg_gen_muls2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
635 {
636     if (TCG_TARGET_HAS_muls2_i32) {
637         tcg_gen_op4_i32(INDEX_op_muls2_i32, rl, rh, arg1, arg2);
638     } else if (TCG_TARGET_HAS_mulsh_i32) {
639         TCGv_i32 t = tcg_temp_new_i32();
640         tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
641         tcg_gen_op3_i32(INDEX_op_mulsh_i32, rh, arg1, arg2);
642         tcg_gen_mov_i32(rl, t);
643         tcg_temp_free_i32(t);
644     } else if (TCG_TARGET_REG_BITS == 32) {
645         TCGv_i32 t0 = tcg_temp_new_i32();
646         TCGv_i32 t1 = tcg_temp_new_i32();
647         TCGv_i32 t2 = tcg_temp_new_i32();
648         TCGv_i32 t3 = tcg_temp_new_i32();
649         tcg_gen_mulu2_i32(t0, t1, arg1, arg2);
650         /* Adjust for negative inputs.  */
651         tcg_gen_sari_i32(t2, arg1, 31);
652         tcg_gen_sari_i32(t3, arg2, 31);
653         tcg_gen_and_i32(t2, t2, arg2);
654         tcg_gen_and_i32(t3, t3, arg1);
655         tcg_gen_sub_i32(rh, t1, t2);
656         tcg_gen_sub_i32(rh, rh, t3);
657         tcg_gen_mov_i32(rl, t0);
658         tcg_temp_free_i32(t0);
659         tcg_temp_free_i32(t1);
660         tcg_temp_free_i32(t2);
661         tcg_temp_free_i32(t3);
662     } else {
663         TCGv_i64 t0 = tcg_temp_new_i64();
664         TCGv_i64 t1 = tcg_temp_new_i64();
665         tcg_gen_ext_i32_i64(t0, arg1);
666         tcg_gen_ext_i32_i64(t1, arg2);
667         tcg_gen_mul_i64(t0, t0, t1);
668         tcg_gen_extr_i64_i32(rl, rh, t0);
669         tcg_temp_free_i64(t0);
670         tcg_temp_free_i64(t1);
671     }
672 }
673 
674 void tcg_gen_ext8s_i32(TCGv_i32 ret, TCGv_i32 arg)
675 {
676     if (TCG_TARGET_HAS_ext8s_i32) {
677         tcg_gen_op2_i32(INDEX_op_ext8s_i32, ret, arg);
678     } else {
679         tcg_gen_shli_i32(ret, arg, 24);
680         tcg_gen_sari_i32(ret, ret, 24);
681     }
682 }
683 
684 void tcg_gen_ext16s_i32(TCGv_i32 ret, TCGv_i32 arg)
685 {
686     if (TCG_TARGET_HAS_ext16s_i32) {
687         tcg_gen_op2_i32(INDEX_op_ext16s_i32, ret, arg);
688     } else {
689         tcg_gen_shli_i32(ret, arg, 16);
690         tcg_gen_sari_i32(ret, ret, 16);
691     }
692 }
693 
694 void tcg_gen_ext8u_i32(TCGv_i32 ret, TCGv_i32 arg)
695 {
696     if (TCG_TARGET_HAS_ext8u_i32) {
697         tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg);
698     } else {
699         tcg_gen_andi_i32(ret, arg, 0xffu);
700     }
701 }
702 
703 void tcg_gen_ext16u_i32(TCGv_i32 ret, TCGv_i32 arg)
704 {
705     if (TCG_TARGET_HAS_ext16u_i32) {
706         tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg);
707     } else {
708         tcg_gen_andi_i32(ret, arg, 0xffffu);
709     }
710 }
711 
712 /* Note: we assume the two high bytes are set to zero */
713 void tcg_gen_bswap16_i32(TCGv_i32 ret, TCGv_i32 arg)
714 {
715     if (TCG_TARGET_HAS_bswap16_i32) {
716         tcg_gen_op2_i32(INDEX_op_bswap16_i32, ret, arg);
717     } else {
718         TCGv_i32 t0 = tcg_temp_new_i32();
719 
720         tcg_gen_ext8u_i32(t0, arg);
721         tcg_gen_shli_i32(t0, t0, 8);
722         tcg_gen_shri_i32(ret, arg, 8);
723         tcg_gen_or_i32(ret, ret, t0);
724         tcg_temp_free_i32(t0);
725     }
726 }
727 
728 void tcg_gen_bswap32_i32(TCGv_i32 ret, TCGv_i32 arg)
729 {
730     if (TCG_TARGET_HAS_bswap32_i32) {
731         tcg_gen_op2_i32(INDEX_op_bswap32_i32, ret, arg);
732     } else {
733         TCGv_i32 t0, t1;
734         t0 = tcg_temp_new_i32();
735         t1 = tcg_temp_new_i32();
736 
737         tcg_gen_shli_i32(t0, arg, 24);
738 
739         tcg_gen_andi_i32(t1, arg, 0x0000ff00);
740         tcg_gen_shli_i32(t1, t1, 8);
741         tcg_gen_or_i32(t0, t0, t1);
742 
743         tcg_gen_shri_i32(t1, arg, 8);
744         tcg_gen_andi_i32(t1, t1, 0x0000ff00);
745         tcg_gen_or_i32(t0, t0, t1);
746 
747         tcg_gen_shri_i32(t1, arg, 24);
748         tcg_gen_or_i32(ret, t0, t1);
749         tcg_temp_free_i32(t0);
750         tcg_temp_free_i32(t1);
751     }
752 }
753 
754 /* 64-bit ops */
755 
756 #if TCG_TARGET_REG_BITS == 32
757 /* These are all inline for TCG_TARGET_REG_BITS == 64.  */
758 
759 void tcg_gen_discard_i64(TCGv_i64 arg)
760 {
761     tcg_gen_discard_i32(TCGV_LOW(arg));
762     tcg_gen_discard_i32(TCGV_HIGH(arg));
763 }
764 
765 void tcg_gen_mov_i64(TCGv_i64 ret, TCGv_i64 arg)
766 {
767     tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
768     tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
769 }
770 
771 void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg)
772 {
773     tcg_gen_movi_i32(TCGV_LOW(ret), arg);
774     tcg_gen_movi_i32(TCGV_HIGH(ret), arg >> 32);
775 }
776 
777 void tcg_gen_ld8u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
778 {
779     tcg_gen_ld8u_i32(TCGV_LOW(ret), arg2, offset);
780     tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
781 }
782 
783 void tcg_gen_ld8s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
784 {
785     tcg_gen_ld8s_i32(TCGV_LOW(ret), arg2, offset);
786     tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), 31);
787 }
788 
789 void tcg_gen_ld16u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
790 {
791     tcg_gen_ld16u_i32(TCGV_LOW(ret), arg2, offset);
792     tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
793 }
794 
795 void tcg_gen_ld16s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
796 {
797     tcg_gen_ld16s_i32(TCGV_LOW(ret), arg2, offset);
798     tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
799 }
800 
801 void tcg_gen_ld32u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
802 {
803     tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
804     tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
805 }
806 
807 void tcg_gen_ld32s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
808 {
809     tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
810     tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
811 }
812 
813 void tcg_gen_ld_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
814 {
815     /* Since arg2 and ret have different types,
816        they cannot be the same temporary */
817 #ifdef HOST_WORDS_BIGENDIAN
818     tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset);
819     tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset + 4);
820 #else
821     tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
822     tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset + 4);
823 #endif
824 }
825 
826 void tcg_gen_st_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
827 {
828 #ifdef HOST_WORDS_BIGENDIAN
829     tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset);
830     tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset + 4);
831 #else
832     tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset);
833     tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset + 4);
834 #endif
835 }
836 
837 void tcg_gen_and_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
838 {
839     tcg_gen_and_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
840     tcg_gen_and_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
841 }
842 
843 void tcg_gen_or_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
844 {
845     tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
846     tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
847 }
848 
849 void tcg_gen_xor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
850 {
851     tcg_gen_xor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
852     tcg_gen_xor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
853 }
854 
855 void tcg_gen_shl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
856 {
857     gen_helper_shl_i64(ret, arg1, arg2);
858 }
859 
860 void tcg_gen_shr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
861 {
862     gen_helper_shr_i64(ret, arg1, arg2);
863 }
864 
865 void tcg_gen_sar_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
866 {
867     gen_helper_sar_i64(ret, arg1, arg2);
868 }
869 
870 void tcg_gen_mul_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
871 {
872     TCGv_i64 t0;
873     TCGv_i32 t1;
874 
875     t0 = tcg_temp_new_i64();
876     t1 = tcg_temp_new_i32();
877 
878     tcg_gen_mulu2_i32(TCGV_LOW(t0), TCGV_HIGH(t0),
879                       TCGV_LOW(arg1), TCGV_LOW(arg2));
880 
881     tcg_gen_mul_i32(t1, TCGV_LOW(arg1), TCGV_HIGH(arg2));
882     tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
883     tcg_gen_mul_i32(t1, TCGV_HIGH(arg1), TCGV_LOW(arg2));
884     tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
885 
886     tcg_gen_mov_i64(ret, t0);
887     tcg_temp_free_i64(t0);
888     tcg_temp_free_i32(t1);
889 }
890 #endif /* TCG_TARGET_REG_SIZE == 32 */
891 
892 void tcg_gen_addi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
893 {
894     /* some cases can be optimized here */
895     if (arg2 == 0) {
896         tcg_gen_mov_i64(ret, arg1);
897     } else {
898         TCGv_i64 t0 = tcg_const_i64(arg2);
899         tcg_gen_add_i64(ret, arg1, t0);
900         tcg_temp_free_i64(t0);
901     }
902 }
903 
904 void tcg_gen_subfi_i64(TCGv_i64 ret, int64_t arg1, TCGv_i64 arg2)
905 {
906     if (arg1 == 0 && TCG_TARGET_HAS_neg_i64) {
907         /* Don't recurse with tcg_gen_neg_i64.  */
908         tcg_gen_op2_i64(INDEX_op_neg_i64, ret, arg2);
909     } else {
910         TCGv_i64 t0 = tcg_const_i64(arg1);
911         tcg_gen_sub_i64(ret, t0, arg2);
912         tcg_temp_free_i64(t0);
913     }
914 }
915 
916 void tcg_gen_subi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
917 {
918     /* some cases can be optimized here */
919     if (arg2 == 0) {
920         tcg_gen_mov_i64(ret, arg1);
921     } else {
922         TCGv_i64 t0 = tcg_const_i64(arg2);
923         tcg_gen_sub_i64(ret, arg1, t0);
924         tcg_temp_free_i64(t0);
925     }
926 }
927 
928 void tcg_gen_andi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
929 {
930     TCGv_i64 t0;
931 
932     if (TCG_TARGET_REG_BITS == 32) {
933         tcg_gen_andi_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
934         tcg_gen_andi_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
935         return;
936     }
937 
938     /* Some cases can be optimized here.  */
939     switch (arg2) {
940     case 0:
941         tcg_gen_movi_i64(ret, 0);
942         return;
943     case 0xffffffffffffffffull:
944         tcg_gen_mov_i64(ret, arg1);
945         return;
946     case 0xffull:
947         /* Don't recurse with tcg_gen_ext8u_i64.  */
948         if (TCG_TARGET_HAS_ext8u_i64) {
949             tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg1);
950             return;
951         }
952         break;
953     case 0xffffu:
954         if (TCG_TARGET_HAS_ext16u_i64) {
955             tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg1);
956             return;
957         }
958         break;
959     case 0xffffffffull:
960         if (TCG_TARGET_HAS_ext32u_i64) {
961             tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg1);
962             return;
963         }
964         break;
965     }
966     t0 = tcg_const_i64(arg2);
967     tcg_gen_and_i64(ret, arg1, t0);
968     tcg_temp_free_i64(t0);
969 }
970 
971 void tcg_gen_ori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
972 {
973     if (TCG_TARGET_REG_BITS == 32) {
974         tcg_gen_ori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
975         tcg_gen_ori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
976         return;
977     }
978     /* Some cases can be optimized here.  */
979     if (arg2 == -1) {
980         tcg_gen_movi_i64(ret, -1);
981     } else if (arg2 == 0) {
982         tcg_gen_mov_i64(ret, arg1);
983     } else {
984         TCGv_i64 t0 = tcg_const_i64(arg2);
985         tcg_gen_or_i64(ret, arg1, t0);
986         tcg_temp_free_i64(t0);
987     }
988 }
989 
990 void tcg_gen_xori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
991 {
992     if (TCG_TARGET_REG_BITS == 32) {
993         tcg_gen_xori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
994         tcg_gen_xori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
995         return;
996     }
997     /* Some cases can be optimized here.  */
998     if (arg2 == 0) {
999         tcg_gen_mov_i64(ret, arg1);
1000     } else if (arg2 == -1 && TCG_TARGET_HAS_not_i64) {
1001         /* Don't recurse with tcg_gen_not_i64.  */
1002         tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg1);
1003     } else {
1004         TCGv_i64 t0 = tcg_const_i64(arg2);
1005         tcg_gen_xor_i64(ret, arg1, t0);
1006         tcg_temp_free_i64(t0);
1007     }
1008 }
1009 
1010 static inline void tcg_gen_shifti_i64(TCGv_i64 ret, TCGv_i64 arg1,
1011                                       unsigned c, bool right, bool arith)
1012 {
1013     tcg_debug_assert(c < 64);
1014     if (c == 0) {
1015         tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
1016         tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
1017     } else if (c >= 32) {
1018         c -= 32;
1019         if (right) {
1020             if (arith) {
1021                 tcg_gen_sari_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
1022                 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), 31);
1023             } else {
1024                 tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
1025                 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1026             }
1027         } else {
1028             tcg_gen_shli_i32(TCGV_HIGH(ret), TCGV_LOW(arg1), c);
1029             tcg_gen_movi_i32(TCGV_LOW(ret), 0);
1030         }
1031     } else {
1032         TCGv_i32 t0, t1;
1033 
1034         t0 = tcg_temp_new_i32();
1035         t1 = tcg_temp_new_i32();
1036         if (right) {
1037             tcg_gen_shli_i32(t0, TCGV_HIGH(arg1), 32 - c);
1038             if (arith) {
1039                 tcg_gen_sari_i32(t1, TCGV_HIGH(arg1), c);
1040             } else {
1041                 tcg_gen_shri_i32(t1, TCGV_HIGH(arg1), c);
1042             }
1043             tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_LOW(arg1), c);
1044             tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(ret), t0);
1045             tcg_gen_mov_i32(TCGV_HIGH(ret), t1);
1046         } else {
1047             tcg_gen_shri_i32(t0, TCGV_LOW(arg1), 32 - c);
1048             /* Note: ret can be the same as arg1, so we use t1 */
1049             tcg_gen_shli_i32(t1, TCGV_LOW(arg1), c);
1050             tcg_gen_shli_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), c);
1051             tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), t0);
1052             tcg_gen_mov_i32(TCGV_LOW(ret), t1);
1053         }
1054         tcg_temp_free_i32(t0);
1055         tcg_temp_free_i32(t1);
1056     }
1057 }
1058 
1059 void tcg_gen_shli_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1060 {
1061     tcg_debug_assert(arg2 < 64);
1062     if (TCG_TARGET_REG_BITS == 32) {
1063         tcg_gen_shifti_i64(ret, arg1, arg2, 0, 0);
1064     } else if (arg2 == 0) {
1065         tcg_gen_mov_i64(ret, arg1);
1066     } else {
1067         TCGv_i64 t0 = tcg_const_i64(arg2);
1068         tcg_gen_shl_i64(ret, arg1, t0);
1069         tcg_temp_free_i64(t0);
1070     }
1071 }
1072 
1073 void tcg_gen_shri_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1074 {
1075     tcg_debug_assert(arg2 < 64);
1076     if (TCG_TARGET_REG_BITS == 32) {
1077         tcg_gen_shifti_i64(ret, arg1, arg2, 1, 0);
1078     } else if (arg2 == 0) {
1079         tcg_gen_mov_i64(ret, arg1);
1080     } else {
1081         TCGv_i64 t0 = tcg_const_i64(arg2);
1082         tcg_gen_shr_i64(ret, arg1, t0);
1083         tcg_temp_free_i64(t0);
1084     }
1085 }
1086 
1087 void tcg_gen_sari_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1088 {
1089     tcg_debug_assert(arg2 < 64);
1090     if (TCG_TARGET_REG_BITS == 32) {
1091         tcg_gen_shifti_i64(ret, arg1, arg2, 1, 1);
1092     } else if (arg2 == 0) {
1093         tcg_gen_mov_i64(ret, arg1);
1094     } else {
1095         TCGv_i64 t0 = tcg_const_i64(arg2);
1096         tcg_gen_sar_i64(ret, arg1, t0);
1097         tcg_temp_free_i64(t0);
1098     }
1099 }
1100 
1101 void tcg_gen_brcond_i64(TCGCond cond, TCGv_i64 arg1, TCGv_i64 arg2, TCGLabel *l)
1102 {
1103     if (cond == TCG_COND_ALWAYS) {
1104         tcg_gen_br(l);
1105     } else if (cond != TCG_COND_NEVER) {
1106         if (TCG_TARGET_REG_BITS == 32) {
1107             tcg_gen_op6ii_i32(INDEX_op_brcond2_i32, TCGV_LOW(arg1),
1108                               TCGV_HIGH(arg1), TCGV_LOW(arg2),
1109                               TCGV_HIGH(arg2), cond, label_arg(l));
1110         } else {
1111             tcg_gen_op4ii_i64(INDEX_op_brcond_i64, arg1, arg2, cond,
1112                               label_arg(l));
1113         }
1114     }
1115 }
1116 
1117 void tcg_gen_brcondi_i64(TCGCond cond, TCGv_i64 arg1, int64_t arg2, TCGLabel *l)
1118 {
1119     if (cond == TCG_COND_ALWAYS) {
1120         tcg_gen_br(l);
1121     } else if (cond != TCG_COND_NEVER) {
1122         TCGv_i64 t0 = tcg_const_i64(arg2);
1123         tcg_gen_brcond_i64(cond, arg1, t0, l);
1124         tcg_temp_free_i64(t0);
1125     }
1126 }
1127 
1128 void tcg_gen_setcond_i64(TCGCond cond, TCGv_i64 ret,
1129                          TCGv_i64 arg1, TCGv_i64 arg2)
1130 {
1131     if (cond == TCG_COND_ALWAYS) {
1132         tcg_gen_movi_i64(ret, 1);
1133     } else if (cond == TCG_COND_NEVER) {
1134         tcg_gen_movi_i64(ret, 0);
1135     } else {
1136         if (TCG_TARGET_REG_BITS == 32) {
1137             tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
1138                              TCGV_LOW(arg1), TCGV_HIGH(arg1),
1139                              TCGV_LOW(arg2), TCGV_HIGH(arg2), cond);
1140             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1141         } else {
1142             tcg_gen_op4i_i64(INDEX_op_setcond_i64, ret, arg1, arg2, cond);
1143         }
1144     }
1145 }
1146 
1147 void tcg_gen_setcondi_i64(TCGCond cond, TCGv_i64 ret,
1148                           TCGv_i64 arg1, int64_t arg2)
1149 {
1150     TCGv_i64 t0 = tcg_const_i64(arg2);
1151     tcg_gen_setcond_i64(cond, ret, arg1, t0);
1152     tcg_temp_free_i64(t0);
1153 }
1154 
1155 void tcg_gen_muli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1156 {
1157     TCGv_i64 t0 = tcg_const_i64(arg2);
1158     tcg_gen_mul_i64(ret, arg1, t0);
1159     tcg_temp_free_i64(t0);
1160 }
1161 
1162 void tcg_gen_div_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1163 {
1164     if (TCG_TARGET_HAS_div_i64) {
1165         tcg_gen_op3_i64(INDEX_op_div_i64, ret, arg1, arg2);
1166     } else if (TCG_TARGET_HAS_div2_i64) {
1167         TCGv_i64 t0 = tcg_temp_new_i64();
1168         tcg_gen_sari_i64(t0, arg1, 63);
1169         tcg_gen_op5_i64(INDEX_op_div2_i64, ret, t0, arg1, t0, arg2);
1170         tcg_temp_free_i64(t0);
1171     } else {
1172         gen_helper_div_i64(ret, arg1, arg2);
1173     }
1174 }
1175 
1176 void tcg_gen_rem_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1177 {
1178     if (TCG_TARGET_HAS_rem_i64) {
1179         tcg_gen_op3_i64(INDEX_op_rem_i64, ret, arg1, arg2);
1180     } else if (TCG_TARGET_HAS_div_i64) {
1181         TCGv_i64 t0 = tcg_temp_new_i64();
1182         tcg_gen_op3_i64(INDEX_op_div_i64, t0, arg1, arg2);
1183         tcg_gen_mul_i64(t0, t0, arg2);
1184         tcg_gen_sub_i64(ret, arg1, t0);
1185         tcg_temp_free_i64(t0);
1186     } else if (TCG_TARGET_HAS_div2_i64) {
1187         TCGv_i64 t0 = tcg_temp_new_i64();
1188         tcg_gen_sari_i64(t0, arg1, 63);
1189         tcg_gen_op5_i64(INDEX_op_div2_i64, t0, ret, arg1, t0, arg2);
1190         tcg_temp_free_i64(t0);
1191     } else {
1192         gen_helper_rem_i64(ret, arg1, arg2);
1193     }
1194 }
1195 
1196 void tcg_gen_divu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1197 {
1198     if (TCG_TARGET_HAS_div_i64) {
1199         tcg_gen_op3_i64(INDEX_op_divu_i64, ret, arg1, arg2);
1200     } else if (TCG_TARGET_HAS_div2_i64) {
1201         TCGv_i64 t0 = tcg_temp_new_i64();
1202         tcg_gen_movi_i64(t0, 0);
1203         tcg_gen_op5_i64(INDEX_op_divu2_i64, ret, t0, arg1, t0, arg2);
1204         tcg_temp_free_i64(t0);
1205     } else {
1206         gen_helper_divu_i64(ret, arg1, arg2);
1207     }
1208 }
1209 
1210 void tcg_gen_remu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1211 {
1212     if (TCG_TARGET_HAS_rem_i64) {
1213         tcg_gen_op3_i64(INDEX_op_remu_i64, ret, arg1, arg2);
1214     } else if (TCG_TARGET_HAS_div_i64) {
1215         TCGv_i64 t0 = tcg_temp_new_i64();
1216         tcg_gen_op3_i64(INDEX_op_divu_i64, t0, arg1, arg2);
1217         tcg_gen_mul_i64(t0, t0, arg2);
1218         tcg_gen_sub_i64(ret, arg1, t0);
1219         tcg_temp_free_i64(t0);
1220     } else if (TCG_TARGET_HAS_div2_i64) {
1221         TCGv_i64 t0 = tcg_temp_new_i64();
1222         tcg_gen_movi_i64(t0, 0);
1223         tcg_gen_op5_i64(INDEX_op_divu2_i64, t0, ret, arg1, t0, arg2);
1224         tcg_temp_free_i64(t0);
1225     } else {
1226         gen_helper_remu_i64(ret, arg1, arg2);
1227     }
1228 }
1229 
1230 void tcg_gen_ext8s_i64(TCGv_i64 ret, TCGv_i64 arg)
1231 {
1232     if (TCG_TARGET_REG_BITS == 32) {
1233         tcg_gen_ext8s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1234         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1235     } else if (TCG_TARGET_HAS_ext8s_i64) {
1236         tcg_gen_op2_i64(INDEX_op_ext8s_i64, ret, arg);
1237     } else {
1238         tcg_gen_shli_i64(ret, arg, 56);
1239         tcg_gen_sari_i64(ret, ret, 56);
1240     }
1241 }
1242 
1243 void tcg_gen_ext16s_i64(TCGv_i64 ret, TCGv_i64 arg)
1244 {
1245     if (TCG_TARGET_REG_BITS == 32) {
1246         tcg_gen_ext16s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1247         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1248     } else if (TCG_TARGET_HAS_ext16s_i64) {
1249         tcg_gen_op2_i64(INDEX_op_ext16s_i64, ret, arg);
1250     } else {
1251         tcg_gen_shli_i64(ret, arg, 48);
1252         tcg_gen_sari_i64(ret, ret, 48);
1253     }
1254 }
1255 
1256 void tcg_gen_ext32s_i64(TCGv_i64 ret, TCGv_i64 arg)
1257 {
1258     if (TCG_TARGET_REG_BITS == 32) {
1259         tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1260         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1261     } else if (TCG_TARGET_HAS_ext32s_i64) {
1262         tcg_gen_op2_i64(INDEX_op_ext32s_i64, ret, arg);
1263     } else {
1264         tcg_gen_shli_i64(ret, arg, 32);
1265         tcg_gen_sari_i64(ret, ret, 32);
1266     }
1267 }
1268 
1269 void tcg_gen_ext8u_i64(TCGv_i64 ret, TCGv_i64 arg)
1270 {
1271     if (TCG_TARGET_REG_BITS == 32) {
1272         tcg_gen_ext8u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1273         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1274     } else if (TCG_TARGET_HAS_ext8u_i64) {
1275         tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg);
1276     } else {
1277         tcg_gen_andi_i64(ret, arg, 0xffu);
1278     }
1279 }
1280 
1281 void tcg_gen_ext16u_i64(TCGv_i64 ret, TCGv_i64 arg)
1282 {
1283     if (TCG_TARGET_REG_BITS == 32) {
1284         tcg_gen_ext16u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1285         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1286     } else if (TCG_TARGET_HAS_ext16u_i64) {
1287         tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg);
1288     } else {
1289         tcg_gen_andi_i64(ret, arg, 0xffffu);
1290     }
1291 }
1292 
1293 void tcg_gen_ext32u_i64(TCGv_i64 ret, TCGv_i64 arg)
1294 {
1295     if (TCG_TARGET_REG_BITS == 32) {
1296         tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1297         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1298     } else if (TCG_TARGET_HAS_ext32u_i64) {
1299         tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg);
1300     } else {
1301         tcg_gen_andi_i64(ret, arg, 0xffffffffu);
1302     }
1303 }
1304 
1305 /* Note: we assume the six high bytes are set to zero */
1306 void tcg_gen_bswap16_i64(TCGv_i64 ret, TCGv_i64 arg)
1307 {
1308     if (TCG_TARGET_REG_BITS == 32) {
1309         tcg_gen_bswap16_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1310         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1311     } else if (TCG_TARGET_HAS_bswap16_i64) {
1312         tcg_gen_op2_i64(INDEX_op_bswap16_i64, ret, arg);
1313     } else {
1314         TCGv_i64 t0 = tcg_temp_new_i64();
1315 
1316         tcg_gen_ext8u_i64(t0, arg);
1317         tcg_gen_shli_i64(t0, t0, 8);
1318         tcg_gen_shri_i64(ret, arg, 8);
1319         tcg_gen_or_i64(ret, ret, t0);
1320         tcg_temp_free_i64(t0);
1321     }
1322 }
1323 
1324 /* Note: we assume the four high bytes are set to zero */
1325 void tcg_gen_bswap32_i64(TCGv_i64 ret, TCGv_i64 arg)
1326 {
1327     if (TCG_TARGET_REG_BITS == 32) {
1328         tcg_gen_bswap32_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1329         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1330     } else if (TCG_TARGET_HAS_bswap32_i64) {
1331         tcg_gen_op2_i64(INDEX_op_bswap32_i64, ret, arg);
1332     } else {
1333         TCGv_i64 t0, t1;
1334         t0 = tcg_temp_new_i64();
1335         t1 = tcg_temp_new_i64();
1336 
1337         tcg_gen_shli_i64(t0, arg, 24);
1338         tcg_gen_ext32u_i64(t0, t0);
1339 
1340         tcg_gen_andi_i64(t1, arg, 0x0000ff00);
1341         tcg_gen_shli_i64(t1, t1, 8);
1342         tcg_gen_or_i64(t0, t0, t1);
1343 
1344         tcg_gen_shri_i64(t1, arg, 8);
1345         tcg_gen_andi_i64(t1, t1, 0x0000ff00);
1346         tcg_gen_or_i64(t0, t0, t1);
1347 
1348         tcg_gen_shri_i64(t1, arg, 24);
1349         tcg_gen_or_i64(ret, t0, t1);
1350         tcg_temp_free_i64(t0);
1351         tcg_temp_free_i64(t1);
1352     }
1353 }
1354 
1355 void tcg_gen_bswap64_i64(TCGv_i64 ret, TCGv_i64 arg)
1356 {
1357     if (TCG_TARGET_REG_BITS == 32) {
1358         TCGv_i32 t0, t1;
1359         t0 = tcg_temp_new_i32();
1360         t1 = tcg_temp_new_i32();
1361 
1362         tcg_gen_bswap32_i32(t0, TCGV_LOW(arg));
1363         tcg_gen_bswap32_i32(t1, TCGV_HIGH(arg));
1364         tcg_gen_mov_i32(TCGV_LOW(ret), t1);
1365         tcg_gen_mov_i32(TCGV_HIGH(ret), t0);
1366         tcg_temp_free_i32(t0);
1367         tcg_temp_free_i32(t1);
1368     } else if (TCG_TARGET_HAS_bswap64_i64) {
1369         tcg_gen_op2_i64(INDEX_op_bswap64_i64, ret, arg);
1370     } else {
1371         TCGv_i64 t0 = tcg_temp_new_i64();
1372         TCGv_i64 t1 = tcg_temp_new_i64();
1373 
1374         tcg_gen_shli_i64(t0, arg, 56);
1375 
1376         tcg_gen_andi_i64(t1, arg, 0x0000ff00);
1377         tcg_gen_shli_i64(t1, t1, 40);
1378         tcg_gen_or_i64(t0, t0, t1);
1379 
1380         tcg_gen_andi_i64(t1, arg, 0x00ff0000);
1381         tcg_gen_shli_i64(t1, t1, 24);
1382         tcg_gen_or_i64(t0, t0, t1);
1383 
1384         tcg_gen_andi_i64(t1, arg, 0xff000000);
1385         tcg_gen_shli_i64(t1, t1, 8);
1386         tcg_gen_or_i64(t0, t0, t1);
1387 
1388         tcg_gen_shri_i64(t1, arg, 8);
1389         tcg_gen_andi_i64(t1, t1, 0xff000000);
1390         tcg_gen_or_i64(t0, t0, t1);
1391 
1392         tcg_gen_shri_i64(t1, arg, 24);
1393         tcg_gen_andi_i64(t1, t1, 0x00ff0000);
1394         tcg_gen_or_i64(t0, t0, t1);
1395 
1396         tcg_gen_shri_i64(t1, arg, 40);
1397         tcg_gen_andi_i64(t1, t1, 0x0000ff00);
1398         tcg_gen_or_i64(t0, t0, t1);
1399 
1400         tcg_gen_shri_i64(t1, arg, 56);
1401         tcg_gen_or_i64(ret, t0, t1);
1402         tcg_temp_free_i64(t0);
1403         tcg_temp_free_i64(t1);
1404     }
1405 }
1406 
1407 void tcg_gen_not_i64(TCGv_i64 ret, TCGv_i64 arg)
1408 {
1409     if (TCG_TARGET_REG_BITS == 32) {
1410         tcg_gen_not_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1411         tcg_gen_not_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
1412     } else if (TCG_TARGET_HAS_not_i64) {
1413         tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg);
1414     } else {
1415         tcg_gen_xori_i64(ret, arg, -1);
1416     }
1417 }
1418 
1419 void tcg_gen_andc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1420 {
1421     if (TCG_TARGET_REG_BITS == 32) {
1422         tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1423         tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1424     } else if (TCG_TARGET_HAS_andc_i64) {
1425         tcg_gen_op3_i64(INDEX_op_andc_i64, ret, arg1, arg2);
1426     } else {
1427         TCGv_i64 t0 = tcg_temp_new_i64();
1428         tcg_gen_not_i64(t0, arg2);
1429         tcg_gen_and_i64(ret, arg1, t0);
1430         tcg_temp_free_i64(t0);
1431     }
1432 }
1433 
1434 void tcg_gen_eqv_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1435 {
1436     if (TCG_TARGET_REG_BITS == 32) {
1437         tcg_gen_eqv_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1438         tcg_gen_eqv_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1439     } else if (TCG_TARGET_HAS_eqv_i64) {
1440         tcg_gen_op3_i64(INDEX_op_eqv_i64, ret, arg1, arg2);
1441     } else {
1442         tcg_gen_xor_i64(ret, arg1, arg2);
1443         tcg_gen_not_i64(ret, ret);
1444     }
1445 }
1446 
1447 void tcg_gen_nand_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1448 {
1449     if (TCG_TARGET_REG_BITS == 32) {
1450         tcg_gen_nand_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1451         tcg_gen_nand_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1452     } else if (TCG_TARGET_HAS_nand_i64) {
1453         tcg_gen_op3_i64(INDEX_op_nand_i64, ret, arg1, arg2);
1454     } else {
1455         tcg_gen_and_i64(ret, arg1, arg2);
1456         tcg_gen_not_i64(ret, ret);
1457     }
1458 }
1459 
1460 void tcg_gen_nor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1461 {
1462     if (TCG_TARGET_REG_BITS == 32) {
1463         tcg_gen_nor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1464         tcg_gen_nor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1465     } else if (TCG_TARGET_HAS_nor_i64) {
1466         tcg_gen_op3_i64(INDEX_op_nor_i64, ret, arg1, arg2);
1467     } else {
1468         tcg_gen_or_i64(ret, arg1, arg2);
1469         tcg_gen_not_i64(ret, ret);
1470     }
1471 }
1472 
1473 void tcg_gen_orc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1474 {
1475     if (TCG_TARGET_REG_BITS == 32) {
1476         tcg_gen_orc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1477         tcg_gen_orc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1478     } else if (TCG_TARGET_HAS_orc_i64) {
1479         tcg_gen_op3_i64(INDEX_op_orc_i64, ret, arg1, arg2);
1480     } else {
1481         TCGv_i64 t0 = tcg_temp_new_i64();
1482         tcg_gen_not_i64(t0, arg2);
1483         tcg_gen_or_i64(ret, arg1, t0);
1484         tcg_temp_free_i64(t0);
1485     }
1486 }
1487 
1488 void tcg_gen_rotl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1489 {
1490     if (TCG_TARGET_HAS_rot_i64) {
1491         tcg_gen_op3_i64(INDEX_op_rotl_i64, ret, arg1, arg2);
1492     } else {
1493         TCGv_i64 t0, t1;
1494         t0 = tcg_temp_new_i64();
1495         t1 = tcg_temp_new_i64();
1496         tcg_gen_shl_i64(t0, arg1, arg2);
1497         tcg_gen_subfi_i64(t1, 64, arg2);
1498         tcg_gen_shr_i64(t1, arg1, t1);
1499         tcg_gen_or_i64(ret, t0, t1);
1500         tcg_temp_free_i64(t0);
1501         tcg_temp_free_i64(t1);
1502     }
1503 }
1504 
1505 void tcg_gen_rotli_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1506 {
1507     tcg_debug_assert(arg2 < 64);
1508     /* some cases can be optimized here */
1509     if (arg2 == 0) {
1510         tcg_gen_mov_i64(ret, arg1);
1511     } else if (TCG_TARGET_HAS_rot_i64) {
1512         TCGv_i64 t0 = tcg_const_i64(arg2);
1513         tcg_gen_rotl_i64(ret, arg1, t0);
1514         tcg_temp_free_i64(t0);
1515     } else {
1516         TCGv_i64 t0, t1;
1517         t0 = tcg_temp_new_i64();
1518         t1 = tcg_temp_new_i64();
1519         tcg_gen_shli_i64(t0, arg1, arg2);
1520         tcg_gen_shri_i64(t1, arg1, 64 - arg2);
1521         tcg_gen_or_i64(ret, t0, t1);
1522         tcg_temp_free_i64(t0);
1523         tcg_temp_free_i64(t1);
1524     }
1525 }
1526 
1527 void tcg_gen_rotr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1528 {
1529     if (TCG_TARGET_HAS_rot_i64) {
1530         tcg_gen_op3_i64(INDEX_op_rotr_i64, ret, arg1, arg2);
1531     } else {
1532         TCGv_i64 t0, t1;
1533         t0 = tcg_temp_new_i64();
1534         t1 = tcg_temp_new_i64();
1535         tcg_gen_shr_i64(t0, arg1, arg2);
1536         tcg_gen_subfi_i64(t1, 64, arg2);
1537         tcg_gen_shl_i64(t1, arg1, t1);
1538         tcg_gen_or_i64(ret, t0, t1);
1539         tcg_temp_free_i64(t0);
1540         tcg_temp_free_i64(t1);
1541     }
1542 }
1543 
1544 void tcg_gen_rotri_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1545 {
1546     tcg_debug_assert(arg2 < 64);
1547     /* some cases can be optimized here */
1548     if (arg2 == 0) {
1549         tcg_gen_mov_i64(ret, arg1);
1550     } else {
1551         tcg_gen_rotli_i64(ret, arg1, 64 - arg2);
1552     }
1553 }
1554 
1555 void tcg_gen_deposit_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2,
1556                          unsigned int ofs, unsigned int len)
1557 {
1558     uint64_t mask;
1559     TCGv_i64 t1;
1560 
1561     tcg_debug_assert(ofs < 64);
1562     tcg_debug_assert(len <= 64);
1563     tcg_debug_assert(ofs + len <= 64);
1564 
1565     if (ofs == 0 && len == 64) {
1566         tcg_gen_mov_i64(ret, arg2);
1567         return;
1568     }
1569     if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(ofs, len)) {
1570         tcg_gen_op5ii_i64(INDEX_op_deposit_i64, ret, arg1, arg2, ofs, len);
1571         return;
1572     }
1573 
1574     if (TCG_TARGET_REG_BITS == 32) {
1575         if (ofs >= 32) {
1576             tcg_gen_deposit_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1),
1577                                 TCGV_LOW(arg2), ofs - 32, len);
1578             tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
1579             return;
1580         }
1581         if (ofs + len <= 32) {
1582             tcg_gen_deposit_i32(TCGV_LOW(ret), TCGV_LOW(arg1),
1583                                 TCGV_LOW(arg2), ofs, len);
1584             tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
1585             return;
1586         }
1587     }
1588 
1589     mask = (1ull << len) - 1;
1590     t1 = tcg_temp_new_i64();
1591 
1592     if (ofs + len < 64) {
1593         tcg_gen_andi_i64(t1, arg2, mask);
1594         tcg_gen_shli_i64(t1, t1, ofs);
1595     } else {
1596         tcg_gen_shli_i64(t1, arg2, ofs);
1597     }
1598     tcg_gen_andi_i64(ret, arg1, ~(mask << ofs));
1599     tcg_gen_or_i64(ret, ret, t1);
1600 
1601     tcg_temp_free_i64(t1);
1602 }
1603 
1604 void tcg_gen_movcond_i64(TCGCond cond, TCGv_i64 ret, TCGv_i64 c1,
1605                          TCGv_i64 c2, TCGv_i64 v1, TCGv_i64 v2)
1606 {
1607     if (cond == TCG_COND_ALWAYS) {
1608         tcg_gen_mov_i64(ret, v1);
1609     } else if (cond == TCG_COND_NEVER) {
1610         tcg_gen_mov_i64(ret, v2);
1611     } else if (TCG_TARGET_REG_BITS == 32) {
1612         TCGv_i32 t0 = tcg_temp_new_i32();
1613         TCGv_i32 t1 = tcg_temp_new_i32();
1614         tcg_gen_op6i_i32(INDEX_op_setcond2_i32, t0,
1615                          TCGV_LOW(c1), TCGV_HIGH(c1),
1616                          TCGV_LOW(c2), TCGV_HIGH(c2), cond);
1617 
1618         if (TCG_TARGET_HAS_movcond_i32) {
1619             tcg_gen_movi_i32(t1, 0);
1620             tcg_gen_movcond_i32(TCG_COND_NE, TCGV_LOW(ret), t0, t1,
1621                                 TCGV_LOW(v1), TCGV_LOW(v2));
1622             tcg_gen_movcond_i32(TCG_COND_NE, TCGV_HIGH(ret), t0, t1,
1623                                 TCGV_HIGH(v1), TCGV_HIGH(v2));
1624         } else {
1625             tcg_gen_neg_i32(t0, t0);
1626 
1627             tcg_gen_and_i32(t1, TCGV_LOW(v1), t0);
1628             tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(v2), t0);
1629             tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(ret), t1);
1630 
1631             tcg_gen_and_i32(t1, TCGV_HIGH(v1), t0);
1632             tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(v2), t0);
1633             tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), t1);
1634         }
1635         tcg_temp_free_i32(t0);
1636         tcg_temp_free_i32(t1);
1637     } else if (TCG_TARGET_HAS_movcond_i64) {
1638         tcg_gen_op6i_i64(INDEX_op_movcond_i64, ret, c1, c2, v1, v2, cond);
1639     } else {
1640         TCGv_i64 t0 = tcg_temp_new_i64();
1641         TCGv_i64 t1 = tcg_temp_new_i64();
1642         tcg_gen_setcond_i64(cond, t0, c1, c2);
1643         tcg_gen_neg_i64(t0, t0);
1644         tcg_gen_and_i64(t1, v1, t0);
1645         tcg_gen_andc_i64(ret, v2, t0);
1646         tcg_gen_or_i64(ret, ret, t1);
1647         tcg_temp_free_i64(t0);
1648         tcg_temp_free_i64(t1);
1649     }
1650 }
1651 
1652 void tcg_gen_add2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
1653                       TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
1654 {
1655     if (TCG_TARGET_HAS_add2_i64) {
1656         tcg_gen_op6_i64(INDEX_op_add2_i64, rl, rh, al, ah, bl, bh);
1657     } else {
1658         TCGv_i64 t0 = tcg_temp_new_i64();
1659         TCGv_i64 t1 = tcg_temp_new_i64();
1660         tcg_gen_add_i64(t0, al, bl);
1661         tcg_gen_setcond_i64(TCG_COND_LTU, t1, t0, al);
1662         tcg_gen_add_i64(rh, ah, bh);
1663         tcg_gen_add_i64(rh, rh, t1);
1664         tcg_gen_mov_i64(rl, t0);
1665         tcg_temp_free_i64(t0);
1666         tcg_temp_free_i64(t1);
1667     }
1668 }
1669 
1670 void tcg_gen_sub2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
1671                       TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
1672 {
1673     if (TCG_TARGET_HAS_sub2_i64) {
1674         tcg_gen_op6_i64(INDEX_op_sub2_i64, rl, rh, al, ah, bl, bh);
1675     } else {
1676         TCGv_i64 t0 = tcg_temp_new_i64();
1677         TCGv_i64 t1 = tcg_temp_new_i64();
1678         tcg_gen_sub_i64(t0, al, bl);
1679         tcg_gen_setcond_i64(TCG_COND_LTU, t1, al, bl);
1680         tcg_gen_sub_i64(rh, ah, bh);
1681         tcg_gen_sub_i64(rh, rh, t1);
1682         tcg_gen_mov_i64(rl, t0);
1683         tcg_temp_free_i64(t0);
1684         tcg_temp_free_i64(t1);
1685     }
1686 }
1687 
1688 void tcg_gen_mulu2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
1689 {
1690     if (TCG_TARGET_HAS_mulu2_i64) {
1691         tcg_gen_op4_i64(INDEX_op_mulu2_i64, rl, rh, arg1, arg2);
1692     } else if (TCG_TARGET_HAS_muluh_i64) {
1693         TCGv_i64 t = tcg_temp_new_i64();
1694         tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
1695         tcg_gen_op3_i64(INDEX_op_muluh_i64, rh, arg1, arg2);
1696         tcg_gen_mov_i64(rl, t);
1697         tcg_temp_free_i64(t);
1698     } else {
1699         TCGv_i64 t0 = tcg_temp_new_i64();
1700         tcg_gen_mul_i64(t0, arg1, arg2);
1701         gen_helper_muluh_i64(rh, arg1, arg2);
1702         tcg_gen_mov_i64(rl, t0);
1703         tcg_temp_free_i64(t0);
1704     }
1705 }
1706 
1707 void tcg_gen_muls2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
1708 {
1709     if (TCG_TARGET_HAS_muls2_i64) {
1710         tcg_gen_op4_i64(INDEX_op_muls2_i64, rl, rh, arg1, arg2);
1711     } else if (TCG_TARGET_HAS_mulsh_i64) {
1712         TCGv_i64 t = tcg_temp_new_i64();
1713         tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
1714         tcg_gen_op3_i64(INDEX_op_mulsh_i64, rh, arg1, arg2);
1715         tcg_gen_mov_i64(rl, t);
1716         tcg_temp_free_i64(t);
1717     } else if (TCG_TARGET_HAS_mulu2_i64 || TCG_TARGET_HAS_muluh_i64) {
1718         TCGv_i64 t0 = tcg_temp_new_i64();
1719         TCGv_i64 t1 = tcg_temp_new_i64();
1720         TCGv_i64 t2 = tcg_temp_new_i64();
1721         TCGv_i64 t3 = tcg_temp_new_i64();
1722         tcg_gen_mulu2_i64(t0, t1, arg1, arg2);
1723         /* Adjust for negative inputs.  */
1724         tcg_gen_sari_i64(t2, arg1, 63);
1725         tcg_gen_sari_i64(t3, arg2, 63);
1726         tcg_gen_and_i64(t2, t2, arg2);
1727         tcg_gen_and_i64(t3, t3, arg1);
1728         tcg_gen_sub_i64(rh, t1, t2);
1729         tcg_gen_sub_i64(rh, rh, t3);
1730         tcg_gen_mov_i64(rl, t0);
1731         tcg_temp_free_i64(t0);
1732         tcg_temp_free_i64(t1);
1733         tcg_temp_free_i64(t2);
1734         tcg_temp_free_i64(t3);
1735     } else {
1736         TCGv_i64 t0 = tcg_temp_new_i64();
1737         tcg_gen_mul_i64(t0, arg1, arg2);
1738         gen_helper_mulsh_i64(rh, arg1, arg2);
1739         tcg_gen_mov_i64(rl, t0);
1740         tcg_temp_free_i64(t0);
1741     }
1742 }
1743 
1744 /* Size changing operations.  */
1745 
1746 void tcg_gen_extrl_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
1747 {
1748     if (TCG_TARGET_REG_BITS == 32) {
1749         tcg_gen_mov_i32(ret, TCGV_LOW(arg));
1750     } else if (TCG_TARGET_HAS_extrl_i64_i32) {
1751         tcg_gen_op2(&tcg_ctx, INDEX_op_extrl_i64_i32,
1752                     GET_TCGV_I32(ret), GET_TCGV_I64(arg));
1753     } else {
1754         tcg_gen_mov_i32(ret, MAKE_TCGV_I32(GET_TCGV_I64(arg)));
1755     }
1756 }
1757 
1758 void tcg_gen_extrh_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
1759 {
1760     if (TCG_TARGET_REG_BITS == 32) {
1761         tcg_gen_mov_i32(ret, TCGV_HIGH(arg));
1762     } else if (TCG_TARGET_HAS_extrh_i64_i32) {
1763         tcg_gen_op2(&tcg_ctx, INDEX_op_extrh_i64_i32,
1764                     GET_TCGV_I32(ret), GET_TCGV_I64(arg));
1765     } else {
1766         TCGv_i64 t = tcg_temp_new_i64();
1767         tcg_gen_shri_i64(t, arg, 32);
1768         tcg_gen_mov_i32(ret, MAKE_TCGV_I32(GET_TCGV_I64(t)));
1769         tcg_temp_free_i64(t);
1770     }
1771 }
1772 
1773 void tcg_gen_extu_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
1774 {
1775     if (TCG_TARGET_REG_BITS == 32) {
1776         tcg_gen_mov_i32(TCGV_LOW(ret), arg);
1777         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1778     } else {
1779         tcg_gen_op2(&tcg_ctx, INDEX_op_extu_i32_i64,
1780                     GET_TCGV_I64(ret), GET_TCGV_I32(arg));
1781     }
1782 }
1783 
1784 void tcg_gen_ext_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
1785 {
1786     if (TCG_TARGET_REG_BITS == 32) {
1787         tcg_gen_mov_i32(TCGV_LOW(ret), arg);
1788         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1789     } else {
1790         tcg_gen_op2(&tcg_ctx, INDEX_op_ext_i32_i64,
1791                     GET_TCGV_I64(ret), GET_TCGV_I32(arg));
1792     }
1793 }
1794 
1795 void tcg_gen_concat_i32_i64(TCGv_i64 dest, TCGv_i32 low, TCGv_i32 high)
1796 {
1797     TCGv_i64 tmp;
1798 
1799     if (TCG_TARGET_REG_BITS == 32) {
1800         tcg_gen_mov_i32(TCGV_LOW(dest), low);
1801         tcg_gen_mov_i32(TCGV_HIGH(dest), high);
1802         return;
1803     }
1804 
1805     tmp = tcg_temp_new_i64();
1806     /* These extensions are only needed for type correctness.
1807        We may be able to do better given target specific information.  */
1808     tcg_gen_extu_i32_i64(tmp, high);
1809     tcg_gen_extu_i32_i64(dest, low);
1810     /* If deposit is available, use it.  Otherwise use the extra
1811        knowledge that we have of the zero-extensions above.  */
1812     if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(32, 32)) {
1813         tcg_gen_deposit_i64(dest, dest, tmp, 32, 32);
1814     } else {
1815         tcg_gen_shli_i64(tmp, tmp, 32);
1816         tcg_gen_or_i64(dest, dest, tmp);
1817     }
1818     tcg_temp_free_i64(tmp);
1819 }
1820 
1821 void tcg_gen_extr_i64_i32(TCGv_i32 lo, TCGv_i32 hi, TCGv_i64 arg)
1822 {
1823     if (TCG_TARGET_REG_BITS == 32) {
1824         tcg_gen_mov_i32(lo, TCGV_LOW(arg));
1825         tcg_gen_mov_i32(hi, TCGV_HIGH(arg));
1826     } else {
1827         tcg_gen_extrl_i64_i32(lo, arg);
1828         tcg_gen_extrh_i64_i32(hi, arg);
1829     }
1830 }
1831 
1832 void tcg_gen_extr32_i64(TCGv_i64 lo, TCGv_i64 hi, TCGv_i64 arg)
1833 {
1834     tcg_gen_ext32u_i64(lo, arg);
1835     tcg_gen_shri_i64(hi, arg, 32);
1836 }
1837 
1838 /* QEMU specific operations.  */
1839 
1840 void tcg_gen_goto_tb(unsigned idx)
1841 {
1842     /* We only support two chained exits.  */
1843     tcg_debug_assert(idx <= 1);
1844 #ifdef CONFIG_DEBUG_TCG
1845     /* Verify that we havn't seen this numbered exit before.  */
1846     tcg_debug_assert((tcg_ctx.goto_tb_issue_mask & (1 << idx)) == 0);
1847     tcg_ctx.goto_tb_issue_mask |= 1 << idx;
1848 #endif
1849     tcg_gen_op1i(INDEX_op_goto_tb, idx);
1850 }
1851 
1852 static inline TCGMemOp tcg_canonicalize_memop(TCGMemOp op, bool is64, bool st)
1853 {
1854     switch (op & MO_SIZE) {
1855     case MO_8:
1856         op &= ~MO_BSWAP;
1857         break;
1858     case MO_16:
1859         break;
1860     case MO_32:
1861         if (!is64) {
1862             op &= ~MO_SIGN;
1863         }
1864         break;
1865     case MO_64:
1866         if (!is64) {
1867             tcg_abort();
1868         }
1869         break;
1870     }
1871     if (st) {
1872         op &= ~MO_SIGN;
1873     }
1874     return op;
1875 }
1876 
1877 static void gen_ldst_i32(TCGOpcode opc, TCGv_i32 val, TCGv addr,
1878                          TCGMemOp memop, TCGArg idx)
1879 {
1880     TCGMemOpIdx oi = make_memop_idx(memop, idx);
1881 #if TARGET_LONG_BITS == 32
1882     tcg_gen_op3i_i32(opc, val, addr, oi);
1883 #else
1884     if (TCG_TARGET_REG_BITS == 32) {
1885         tcg_gen_op4i_i32(opc, val, TCGV_LOW(addr), TCGV_HIGH(addr), oi);
1886     } else {
1887         tcg_gen_op3(&tcg_ctx, opc, GET_TCGV_I32(val), GET_TCGV_I64(addr), oi);
1888     }
1889 #endif
1890 }
1891 
1892 static void gen_ldst_i64(TCGOpcode opc, TCGv_i64 val, TCGv addr,
1893                          TCGMemOp memop, TCGArg idx)
1894 {
1895     TCGMemOpIdx oi = make_memop_idx(memop, idx);
1896 #if TARGET_LONG_BITS == 32
1897     if (TCG_TARGET_REG_BITS == 32) {
1898         tcg_gen_op4i_i32(opc, TCGV_LOW(val), TCGV_HIGH(val), addr, oi);
1899     } else {
1900         tcg_gen_op3(&tcg_ctx, opc, GET_TCGV_I64(val), GET_TCGV_I32(addr), oi);
1901     }
1902 #else
1903     if (TCG_TARGET_REG_BITS == 32) {
1904         tcg_gen_op5i_i32(opc, TCGV_LOW(val), TCGV_HIGH(val),
1905                          TCGV_LOW(addr), TCGV_HIGH(addr), oi);
1906     } else {
1907         tcg_gen_op3i_i64(opc, val, addr, oi);
1908     }
1909 #endif
1910 }
1911 
1912 void tcg_gen_qemu_ld_i32(TCGv_i32 val, TCGv addr, TCGArg idx, TCGMemOp memop)
1913 {
1914     memop = tcg_canonicalize_memop(memop, 0, 0);
1915     trace_guest_mem_before_tcg(tcg_ctx.cpu, tcg_ctx.tcg_env,
1916                                addr, trace_mem_get_info(memop, 0));
1917     gen_ldst_i32(INDEX_op_qemu_ld_i32, val, addr, memop, idx);
1918 }
1919 
1920 void tcg_gen_qemu_st_i32(TCGv_i32 val, TCGv addr, TCGArg idx, TCGMemOp memop)
1921 {
1922     memop = tcg_canonicalize_memop(memop, 0, 1);
1923     trace_guest_mem_before_tcg(tcg_ctx.cpu, tcg_ctx.tcg_env,
1924                                addr, trace_mem_get_info(memop, 1));
1925     gen_ldst_i32(INDEX_op_qemu_st_i32, val, addr, memop, idx);
1926 }
1927 
1928 void tcg_gen_qemu_ld_i64(TCGv_i64 val, TCGv addr, TCGArg idx, TCGMemOp memop)
1929 {
1930     if (TCG_TARGET_REG_BITS == 32 && (memop & MO_SIZE) < MO_64) {
1931         tcg_gen_qemu_ld_i32(TCGV_LOW(val), addr, idx, memop);
1932         if (memop & MO_SIGN) {
1933             tcg_gen_sari_i32(TCGV_HIGH(val), TCGV_LOW(val), 31);
1934         } else {
1935             tcg_gen_movi_i32(TCGV_HIGH(val), 0);
1936         }
1937         return;
1938     }
1939 
1940     memop = tcg_canonicalize_memop(memop, 1, 0);
1941     trace_guest_mem_before_tcg(tcg_ctx.cpu, tcg_ctx.tcg_env,
1942                                addr, trace_mem_get_info(memop, 0));
1943     gen_ldst_i64(INDEX_op_qemu_ld_i64, val, addr, memop, idx);
1944 }
1945 
1946 void tcg_gen_qemu_st_i64(TCGv_i64 val, TCGv addr, TCGArg idx, TCGMemOp memop)
1947 {
1948     if (TCG_TARGET_REG_BITS == 32 && (memop & MO_SIZE) < MO_64) {
1949         tcg_gen_qemu_st_i32(TCGV_LOW(val), addr, idx, memop);
1950         return;
1951     }
1952 
1953     memop = tcg_canonicalize_memop(memop, 1, 1);
1954     trace_guest_mem_before_tcg(tcg_ctx.cpu, tcg_ctx.tcg_env,
1955                                addr, trace_mem_get_info(memop, 1));
1956     gen_ldst_i64(INDEX_op_qemu_st_i64, val, addr, memop, idx);
1957 }
1958