xref: /openbmc/qemu/tcg/tcg-op.c (revision 259ebed4)
1 /*
2  * Tiny Code Generator for QEMU
3  *
4  * Copyright (c) 2008 Fabrice Bellard
5  *
6  * Permission is hereby granted, free of charge, to any person obtaining a copy
7  * of this software and associated documentation files (the "Software"), to deal
8  * in the Software without restriction, including without limitation the rights
9  * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10  * copies of the Software, and to permit persons to whom the Software is
11  * furnished to do so, subject to the following conditions:
12  *
13  * The above copyright notice and this permission notice shall be included in
14  * all copies or substantial portions of the Software.
15  *
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21  * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22  * THE SOFTWARE.
23  */
24 
25 #include "qemu/osdep.h"
26 #include "tcg/tcg.h"
27 #include "tcg/tcg-temp-internal.h"
28 #include "tcg/tcg-op-common.h"
29 #include "exec/translation-block.h"
30 #include "exec/plugin-gen.h"
31 #include "tcg-internal.h"
32 
33 
34 void tcg_gen_op1(TCGOpcode opc, TCGArg a1)
35 {
36     TCGOp *op = tcg_emit_op(opc, 1);
37     op->args[0] = a1;
38 }
39 
40 void tcg_gen_op2(TCGOpcode opc, TCGArg a1, TCGArg a2)
41 {
42     TCGOp *op = tcg_emit_op(opc, 2);
43     op->args[0] = a1;
44     op->args[1] = a2;
45 }
46 
47 void tcg_gen_op3(TCGOpcode opc, TCGArg a1, TCGArg a2, TCGArg a3)
48 {
49     TCGOp *op = tcg_emit_op(opc, 3);
50     op->args[0] = a1;
51     op->args[1] = a2;
52     op->args[2] = a3;
53 }
54 
55 void tcg_gen_op4(TCGOpcode opc, TCGArg a1, TCGArg a2, TCGArg a3, TCGArg a4)
56 {
57     TCGOp *op = tcg_emit_op(opc, 4);
58     op->args[0] = a1;
59     op->args[1] = a2;
60     op->args[2] = a3;
61     op->args[3] = a4;
62 }
63 
64 void tcg_gen_op5(TCGOpcode opc, TCGArg a1, TCGArg a2, TCGArg a3,
65                  TCGArg a4, TCGArg a5)
66 {
67     TCGOp *op = tcg_emit_op(opc, 5);
68     op->args[0] = a1;
69     op->args[1] = a2;
70     op->args[2] = a3;
71     op->args[3] = a4;
72     op->args[4] = a5;
73 }
74 
75 void tcg_gen_op6(TCGOpcode opc, TCGArg a1, TCGArg a2, TCGArg a3,
76                  TCGArg a4, TCGArg a5, TCGArg a6)
77 {
78     TCGOp *op = tcg_emit_op(opc, 6);
79     op->args[0] = a1;
80     op->args[1] = a2;
81     op->args[2] = a3;
82     op->args[3] = a4;
83     op->args[4] = a5;
84     op->args[5] = a6;
85 }
86 
87 /* Generic ops.  */
88 
89 static void add_last_as_label_use(TCGLabel *l)
90 {
91     TCGLabelUse *u = tcg_malloc(sizeof(TCGLabelUse));
92 
93     u->op = tcg_last_op();
94     QSIMPLEQ_INSERT_TAIL(&l->branches, u, next);
95 }
96 
97 void tcg_gen_br(TCGLabel *l)
98 {
99     tcg_gen_op1(INDEX_op_br, label_arg(l));
100     add_last_as_label_use(l);
101 }
102 
103 void tcg_gen_mb(TCGBar mb_type)
104 {
105 #ifdef CONFIG_USER_ONLY
106     bool parallel = tcg_ctx->gen_tb->cflags & CF_PARALLEL;
107 #else
108     /*
109      * It is tempting to elide the barrier in a uniprocessor context.
110      * However, even with a single cpu we have i/o threads running in
111      * parallel, and lack of memory order can result in e.g. virtio
112      * queue entries being read incorrectly.
113      */
114     bool parallel = true;
115 #endif
116 
117     if (parallel) {
118         tcg_gen_op1(INDEX_op_mb, mb_type);
119     }
120 }
121 
122 /* 32 bit ops */
123 
124 void tcg_gen_movi_i32(TCGv_i32 ret, int32_t arg)
125 {
126     tcg_gen_mov_i32(ret, tcg_constant_i32(arg));
127 }
128 
129 void tcg_gen_addi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
130 {
131     /* some cases can be optimized here */
132     if (arg2 == 0) {
133         tcg_gen_mov_i32(ret, arg1);
134     } else {
135         tcg_gen_add_i32(ret, arg1, tcg_constant_i32(arg2));
136     }
137 }
138 
139 void tcg_gen_subfi_i32(TCGv_i32 ret, int32_t arg1, TCGv_i32 arg2)
140 {
141     if (arg1 == 0 && TCG_TARGET_HAS_neg_i32) {
142         /* Don't recurse with tcg_gen_neg_i32.  */
143         tcg_gen_op2_i32(INDEX_op_neg_i32, ret, arg2);
144     } else {
145         tcg_gen_sub_i32(ret, tcg_constant_i32(arg1), arg2);
146     }
147 }
148 
149 void tcg_gen_subi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
150 {
151     /* some cases can be optimized here */
152     if (arg2 == 0) {
153         tcg_gen_mov_i32(ret, arg1);
154     } else {
155         tcg_gen_sub_i32(ret, arg1, tcg_constant_i32(arg2));
156     }
157 }
158 
159 void tcg_gen_andi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
160 {
161     /* Some cases can be optimized here.  */
162     switch (arg2) {
163     case 0:
164         tcg_gen_movi_i32(ret, 0);
165         return;
166     case -1:
167         tcg_gen_mov_i32(ret, arg1);
168         return;
169     case 0xff:
170         /* Don't recurse with tcg_gen_ext8u_i32.  */
171         if (TCG_TARGET_HAS_ext8u_i32) {
172             tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg1);
173             return;
174         }
175         break;
176     case 0xffff:
177         if (TCG_TARGET_HAS_ext16u_i32) {
178             tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg1);
179             return;
180         }
181         break;
182     }
183 
184     tcg_gen_and_i32(ret, arg1, tcg_constant_i32(arg2));
185 }
186 
187 void tcg_gen_ori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
188 {
189     /* Some cases can be optimized here.  */
190     if (arg2 == -1) {
191         tcg_gen_movi_i32(ret, -1);
192     } else if (arg2 == 0) {
193         tcg_gen_mov_i32(ret, arg1);
194     } else {
195         tcg_gen_or_i32(ret, arg1, tcg_constant_i32(arg2));
196     }
197 }
198 
199 void tcg_gen_xori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
200 {
201     /* Some cases can be optimized here.  */
202     if (arg2 == 0) {
203         tcg_gen_mov_i32(ret, arg1);
204     } else if (arg2 == -1 && TCG_TARGET_HAS_not_i32) {
205         /* Don't recurse with tcg_gen_not_i32.  */
206         tcg_gen_op2_i32(INDEX_op_not_i32, ret, arg1);
207     } else {
208         tcg_gen_xor_i32(ret, arg1, tcg_constant_i32(arg2));
209     }
210 }
211 
212 void tcg_gen_shli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
213 {
214     tcg_debug_assert(arg2 >= 0 && arg2 < 32);
215     if (arg2 == 0) {
216         tcg_gen_mov_i32(ret, arg1);
217     } else {
218         tcg_gen_shl_i32(ret, arg1, tcg_constant_i32(arg2));
219     }
220 }
221 
222 void tcg_gen_shri_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
223 {
224     tcg_debug_assert(arg2 >= 0 && arg2 < 32);
225     if (arg2 == 0) {
226         tcg_gen_mov_i32(ret, arg1);
227     } else {
228         tcg_gen_shr_i32(ret, arg1, tcg_constant_i32(arg2));
229     }
230 }
231 
232 void tcg_gen_sari_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
233 {
234     tcg_debug_assert(arg2 >= 0 && arg2 < 32);
235     if (arg2 == 0) {
236         tcg_gen_mov_i32(ret, arg1);
237     } else {
238         tcg_gen_sar_i32(ret, arg1, tcg_constant_i32(arg2));
239     }
240 }
241 
242 void tcg_gen_brcond_i32(TCGCond cond, TCGv_i32 arg1, TCGv_i32 arg2, TCGLabel *l)
243 {
244     if (cond == TCG_COND_ALWAYS) {
245         tcg_gen_br(l);
246     } else if (cond != TCG_COND_NEVER) {
247         tcg_gen_op4ii_i32(INDEX_op_brcond_i32, arg1, arg2, cond, label_arg(l));
248         add_last_as_label_use(l);
249     }
250 }
251 
252 void tcg_gen_brcondi_i32(TCGCond cond, TCGv_i32 arg1, int32_t arg2, TCGLabel *l)
253 {
254     if (cond == TCG_COND_ALWAYS) {
255         tcg_gen_br(l);
256     } else if (cond != TCG_COND_NEVER) {
257         tcg_gen_brcond_i32(cond, arg1, tcg_constant_i32(arg2), l);
258     }
259 }
260 
261 void tcg_gen_setcond_i32(TCGCond cond, TCGv_i32 ret,
262                          TCGv_i32 arg1, TCGv_i32 arg2)
263 {
264     if (cond == TCG_COND_ALWAYS) {
265         tcg_gen_movi_i32(ret, 1);
266     } else if (cond == TCG_COND_NEVER) {
267         tcg_gen_movi_i32(ret, 0);
268     } else {
269         tcg_gen_op4i_i32(INDEX_op_setcond_i32, ret, arg1, arg2, cond);
270     }
271 }
272 
273 void tcg_gen_setcondi_i32(TCGCond cond, TCGv_i32 ret,
274                           TCGv_i32 arg1, int32_t arg2)
275 {
276     tcg_gen_setcond_i32(cond, ret, arg1, tcg_constant_i32(arg2));
277 }
278 
279 void tcg_gen_negsetcond_i32(TCGCond cond, TCGv_i32 ret,
280                             TCGv_i32 arg1, TCGv_i32 arg2)
281 {
282     if (cond == TCG_COND_ALWAYS) {
283         tcg_gen_movi_i32(ret, -1);
284     } else if (cond == TCG_COND_NEVER) {
285         tcg_gen_movi_i32(ret, 0);
286     } else if (TCG_TARGET_HAS_negsetcond_i32) {
287         tcg_gen_op4i_i32(INDEX_op_negsetcond_i32, ret, arg1, arg2, cond);
288     } else {
289         tcg_gen_setcond_i32(cond, ret, arg1, arg2);
290         tcg_gen_neg_i32(ret, ret);
291     }
292 }
293 
294 void tcg_gen_negsetcondi_i32(TCGCond cond, TCGv_i32 ret,
295                              TCGv_i32 arg1, int32_t arg2)
296 {
297     tcg_gen_negsetcond_i32(cond, ret, arg1, tcg_constant_i32(arg2));
298 }
299 
300 void tcg_gen_muli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
301 {
302     if (arg2 == 0) {
303         tcg_gen_movi_i32(ret, 0);
304     } else if (is_power_of_2(arg2)) {
305         tcg_gen_shli_i32(ret, arg1, ctz32(arg2));
306     } else {
307         tcg_gen_mul_i32(ret, arg1, tcg_constant_i32(arg2));
308     }
309 }
310 
311 void tcg_gen_div_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
312 {
313     if (TCG_TARGET_HAS_div_i32) {
314         tcg_gen_op3_i32(INDEX_op_div_i32, ret, arg1, arg2);
315     } else if (TCG_TARGET_HAS_div2_i32) {
316         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
317         tcg_gen_sari_i32(t0, arg1, 31);
318         tcg_gen_op5_i32(INDEX_op_div2_i32, ret, t0, arg1, t0, arg2);
319         tcg_temp_free_i32(t0);
320     } else {
321         gen_helper_div_i32(ret, arg1, arg2);
322     }
323 }
324 
325 void tcg_gen_rem_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
326 {
327     if (TCG_TARGET_HAS_rem_i32) {
328         tcg_gen_op3_i32(INDEX_op_rem_i32, ret, arg1, arg2);
329     } else if (TCG_TARGET_HAS_div_i32) {
330         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
331         tcg_gen_op3_i32(INDEX_op_div_i32, t0, arg1, arg2);
332         tcg_gen_mul_i32(t0, t0, arg2);
333         tcg_gen_sub_i32(ret, arg1, t0);
334         tcg_temp_free_i32(t0);
335     } else if (TCG_TARGET_HAS_div2_i32) {
336         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
337         tcg_gen_sari_i32(t0, arg1, 31);
338         tcg_gen_op5_i32(INDEX_op_div2_i32, t0, ret, arg1, t0, arg2);
339         tcg_temp_free_i32(t0);
340     } else {
341         gen_helper_rem_i32(ret, arg1, arg2);
342     }
343 }
344 
345 void tcg_gen_divu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
346 {
347     if (TCG_TARGET_HAS_div_i32) {
348         tcg_gen_op3_i32(INDEX_op_divu_i32, ret, arg1, arg2);
349     } else if (TCG_TARGET_HAS_div2_i32) {
350         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
351         TCGv_i32 zero = tcg_constant_i32(0);
352         tcg_gen_op5_i32(INDEX_op_divu2_i32, ret, t0, arg1, zero, arg2);
353         tcg_temp_free_i32(t0);
354     } else {
355         gen_helper_divu_i32(ret, arg1, arg2);
356     }
357 }
358 
359 void tcg_gen_remu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
360 {
361     if (TCG_TARGET_HAS_rem_i32) {
362         tcg_gen_op3_i32(INDEX_op_remu_i32, ret, arg1, arg2);
363     } else if (TCG_TARGET_HAS_div_i32) {
364         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
365         tcg_gen_op3_i32(INDEX_op_divu_i32, t0, arg1, arg2);
366         tcg_gen_mul_i32(t0, t0, arg2);
367         tcg_gen_sub_i32(ret, arg1, t0);
368         tcg_temp_free_i32(t0);
369     } else if (TCG_TARGET_HAS_div2_i32) {
370         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
371         TCGv_i32 zero = tcg_constant_i32(0);
372         tcg_gen_op5_i32(INDEX_op_divu2_i32, t0, ret, arg1, zero, arg2);
373         tcg_temp_free_i32(t0);
374     } else {
375         gen_helper_remu_i32(ret, arg1, arg2);
376     }
377 }
378 
379 void tcg_gen_andc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
380 {
381     if (TCG_TARGET_HAS_andc_i32) {
382         tcg_gen_op3_i32(INDEX_op_andc_i32, ret, arg1, arg2);
383     } else {
384         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
385         tcg_gen_not_i32(t0, arg2);
386         tcg_gen_and_i32(ret, arg1, t0);
387         tcg_temp_free_i32(t0);
388     }
389 }
390 
391 void tcg_gen_eqv_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
392 {
393     if (TCG_TARGET_HAS_eqv_i32) {
394         tcg_gen_op3_i32(INDEX_op_eqv_i32, ret, arg1, arg2);
395     } else {
396         tcg_gen_xor_i32(ret, arg1, arg2);
397         tcg_gen_not_i32(ret, ret);
398     }
399 }
400 
401 void tcg_gen_nand_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
402 {
403     if (TCG_TARGET_HAS_nand_i32) {
404         tcg_gen_op3_i32(INDEX_op_nand_i32, ret, arg1, arg2);
405     } else {
406         tcg_gen_and_i32(ret, arg1, arg2);
407         tcg_gen_not_i32(ret, ret);
408     }
409 }
410 
411 void tcg_gen_nor_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
412 {
413     if (TCG_TARGET_HAS_nor_i32) {
414         tcg_gen_op3_i32(INDEX_op_nor_i32, ret, arg1, arg2);
415     } else {
416         tcg_gen_or_i32(ret, arg1, arg2);
417         tcg_gen_not_i32(ret, ret);
418     }
419 }
420 
421 void tcg_gen_orc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
422 {
423     if (TCG_TARGET_HAS_orc_i32) {
424         tcg_gen_op3_i32(INDEX_op_orc_i32, ret, arg1, arg2);
425     } else {
426         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
427         tcg_gen_not_i32(t0, arg2);
428         tcg_gen_or_i32(ret, arg1, t0);
429         tcg_temp_free_i32(t0);
430     }
431 }
432 
433 void tcg_gen_clz_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
434 {
435     if (TCG_TARGET_HAS_clz_i32) {
436         tcg_gen_op3_i32(INDEX_op_clz_i32, ret, arg1, arg2);
437     } else if (TCG_TARGET_HAS_clz_i64) {
438         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
439         TCGv_i64 t2 = tcg_temp_ebb_new_i64();
440         tcg_gen_extu_i32_i64(t1, arg1);
441         tcg_gen_extu_i32_i64(t2, arg2);
442         tcg_gen_addi_i64(t2, t2, 32);
443         tcg_gen_clz_i64(t1, t1, t2);
444         tcg_gen_extrl_i64_i32(ret, t1);
445         tcg_temp_free_i64(t1);
446         tcg_temp_free_i64(t2);
447         tcg_gen_subi_i32(ret, ret, 32);
448     } else {
449         gen_helper_clz_i32(ret, arg1, arg2);
450     }
451 }
452 
453 void tcg_gen_clzi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
454 {
455     tcg_gen_clz_i32(ret, arg1, tcg_constant_i32(arg2));
456 }
457 
458 void tcg_gen_ctz_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
459 {
460     if (TCG_TARGET_HAS_ctz_i32) {
461         tcg_gen_op3_i32(INDEX_op_ctz_i32, ret, arg1, arg2);
462     } else if (TCG_TARGET_HAS_ctz_i64) {
463         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
464         TCGv_i64 t2 = tcg_temp_ebb_new_i64();
465         tcg_gen_extu_i32_i64(t1, arg1);
466         tcg_gen_extu_i32_i64(t2, arg2);
467         tcg_gen_ctz_i64(t1, t1, t2);
468         tcg_gen_extrl_i64_i32(ret, t1);
469         tcg_temp_free_i64(t1);
470         tcg_temp_free_i64(t2);
471     } else if (TCG_TARGET_HAS_ctpop_i32
472                || TCG_TARGET_HAS_ctpop_i64
473                || TCG_TARGET_HAS_clz_i32
474                || TCG_TARGET_HAS_clz_i64) {
475         TCGv_i32 z, t = tcg_temp_ebb_new_i32();
476 
477         if (TCG_TARGET_HAS_ctpop_i32 || TCG_TARGET_HAS_ctpop_i64) {
478             tcg_gen_subi_i32(t, arg1, 1);
479             tcg_gen_andc_i32(t, t, arg1);
480             tcg_gen_ctpop_i32(t, t);
481         } else {
482             /* Since all non-x86 hosts have clz(0) == 32, don't fight it.  */
483             tcg_gen_neg_i32(t, arg1);
484             tcg_gen_and_i32(t, t, arg1);
485             tcg_gen_clzi_i32(t, t, 32);
486             tcg_gen_xori_i32(t, t, 31);
487         }
488         z = tcg_constant_i32(0);
489         tcg_gen_movcond_i32(TCG_COND_EQ, ret, arg1, z, arg2, t);
490         tcg_temp_free_i32(t);
491     } else {
492         gen_helper_ctz_i32(ret, arg1, arg2);
493     }
494 }
495 
496 void tcg_gen_ctzi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
497 {
498     if (!TCG_TARGET_HAS_ctz_i32 && TCG_TARGET_HAS_ctpop_i32 && arg2 == 32) {
499         /* This equivalence has the advantage of not requiring a fixup.  */
500         TCGv_i32 t = tcg_temp_ebb_new_i32();
501         tcg_gen_subi_i32(t, arg1, 1);
502         tcg_gen_andc_i32(t, t, arg1);
503         tcg_gen_ctpop_i32(ret, t);
504         tcg_temp_free_i32(t);
505     } else {
506         tcg_gen_ctz_i32(ret, arg1, tcg_constant_i32(arg2));
507     }
508 }
509 
510 void tcg_gen_clrsb_i32(TCGv_i32 ret, TCGv_i32 arg)
511 {
512     if (TCG_TARGET_HAS_clz_i32) {
513         TCGv_i32 t = tcg_temp_ebb_new_i32();
514         tcg_gen_sari_i32(t, arg, 31);
515         tcg_gen_xor_i32(t, t, arg);
516         tcg_gen_clzi_i32(t, t, 32);
517         tcg_gen_subi_i32(ret, t, 1);
518         tcg_temp_free_i32(t);
519     } else {
520         gen_helper_clrsb_i32(ret, arg);
521     }
522 }
523 
524 void tcg_gen_ctpop_i32(TCGv_i32 ret, TCGv_i32 arg1)
525 {
526     if (TCG_TARGET_HAS_ctpop_i32) {
527         tcg_gen_op2_i32(INDEX_op_ctpop_i32, ret, arg1);
528     } else if (TCG_TARGET_HAS_ctpop_i64) {
529         TCGv_i64 t = tcg_temp_ebb_new_i64();
530         tcg_gen_extu_i32_i64(t, arg1);
531         tcg_gen_ctpop_i64(t, t);
532         tcg_gen_extrl_i64_i32(ret, t);
533         tcg_temp_free_i64(t);
534     } else {
535         gen_helper_ctpop_i32(ret, arg1);
536     }
537 }
538 
539 void tcg_gen_rotl_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
540 {
541     if (TCG_TARGET_HAS_rot_i32) {
542         tcg_gen_op3_i32(INDEX_op_rotl_i32, ret, arg1, arg2);
543     } else {
544         TCGv_i32 t0, t1;
545 
546         t0 = tcg_temp_ebb_new_i32();
547         t1 = tcg_temp_ebb_new_i32();
548         tcg_gen_shl_i32(t0, arg1, arg2);
549         tcg_gen_subfi_i32(t1, 32, arg2);
550         tcg_gen_shr_i32(t1, arg1, t1);
551         tcg_gen_or_i32(ret, t0, t1);
552         tcg_temp_free_i32(t0);
553         tcg_temp_free_i32(t1);
554     }
555 }
556 
557 void tcg_gen_rotli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
558 {
559     tcg_debug_assert(arg2 >= 0 && arg2 < 32);
560     /* some cases can be optimized here */
561     if (arg2 == 0) {
562         tcg_gen_mov_i32(ret, arg1);
563     } else if (TCG_TARGET_HAS_rot_i32) {
564         tcg_gen_rotl_i32(ret, arg1, tcg_constant_i32(arg2));
565     } else {
566         TCGv_i32 t0, t1;
567         t0 = tcg_temp_ebb_new_i32();
568         t1 = tcg_temp_ebb_new_i32();
569         tcg_gen_shli_i32(t0, arg1, arg2);
570         tcg_gen_shri_i32(t1, arg1, 32 - arg2);
571         tcg_gen_or_i32(ret, t0, t1);
572         tcg_temp_free_i32(t0);
573         tcg_temp_free_i32(t1);
574     }
575 }
576 
577 void tcg_gen_rotr_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
578 {
579     if (TCG_TARGET_HAS_rot_i32) {
580         tcg_gen_op3_i32(INDEX_op_rotr_i32, ret, arg1, arg2);
581     } else {
582         TCGv_i32 t0, t1;
583 
584         t0 = tcg_temp_ebb_new_i32();
585         t1 = tcg_temp_ebb_new_i32();
586         tcg_gen_shr_i32(t0, arg1, arg2);
587         tcg_gen_subfi_i32(t1, 32, arg2);
588         tcg_gen_shl_i32(t1, arg1, t1);
589         tcg_gen_or_i32(ret, t0, t1);
590         tcg_temp_free_i32(t0);
591         tcg_temp_free_i32(t1);
592     }
593 }
594 
595 void tcg_gen_rotri_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
596 {
597     tcg_debug_assert(arg2 >= 0 && arg2 < 32);
598     /* some cases can be optimized here */
599     if (arg2 == 0) {
600         tcg_gen_mov_i32(ret, arg1);
601     } else {
602         tcg_gen_rotli_i32(ret, arg1, 32 - arg2);
603     }
604 }
605 
606 void tcg_gen_deposit_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2,
607                          unsigned int ofs, unsigned int len)
608 {
609     uint32_t mask;
610     TCGv_i32 t1;
611 
612     tcg_debug_assert(ofs < 32);
613     tcg_debug_assert(len > 0);
614     tcg_debug_assert(len <= 32);
615     tcg_debug_assert(ofs + len <= 32);
616 
617     if (len == 32) {
618         tcg_gen_mov_i32(ret, arg2);
619         return;
620     }
621     if (TCG_TARGET_HAS_deposit_i32 && TCG_TARGET_deposit_i32_valid(ofs, len)) {
622         tcg_gen_op5ii_i32(INDEX_op_deposit_i32, ret, arg1, arg2, ofs, len);
623         return;
624     }
625 
626     t1 = tcg_temp_ebb_new_i32();
627 
628     if (TCG_TARGET_HAS_extract2_i32) {
629         if (ofs + len == 32) {
630             tcg_gen_shli_i32(t1, arg1, len);
631             tcg_gen_extract2_i32(ret, t1, arg2, len);
632             goto done;
633         }
634         if (ofs == 0) {
635             tcg_gen_extract2_i32(ret, arg1, arg2, len);
636             tcg_gen_rotli_i32(ret, ret, len);
637             goto done;
638         }
639     }
640 
641     mask = (1u << len) - 1;
642     if (ofs + len < 32) {
643         tcg_gen_andi_i32(t1, arg2, mask);
644         tcg_gen_shli_i32(t1, t1, ofs);
645     } else {
646         tcg_gen_shli_i32(t1, arg2, ofs);
647     }
648     tcg_gen_andi_i32(ret, arg1, ~(mask << ofs));
649     tcg_gen_or_i32(ret, ret, t1);
650  done:
651     tcg_temp_free_i32(t1);
652 }
653 
654 void tcg_gen_deposit_z_i32(TCGv_i32 ret, TCGv_i32 arg,
655                            unsigned int ofs, unsigned int len)
656 {
657     tcg_debug_assert(ofs < 32);
658     tcg_debug_assert(len > 0);
659     tcg_debug_assert(len <= 32);
660     tcg_debug_assert(ofs + len <= 32);
661 
662     if (ofs + len == 32) {
663         tcg_gen_shli_i32(ret, arg, ofs);
664     } else if (ofs == 0) {
665         tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
666     } else if (TCG_TARGET_HAS_deposit_i32
667                && TCG_TARGET_deposit_i32_valid(ofs, len)) {
668         TCGv_i32 zero = tcg_constant_i32(0);
669         tcg_gen_op5ii_i32(INDEX_op_deposit_i32, ret, zero, arg, ofs, len);
670     } else {
671         /* To help two-operand hosts we prefer to zero-extend first,
672            which allows ARG to stay live.  */
673         switch (len) {
674         case 16:
675             if (TCG_TARGET_HAS_ext16u_i32) {
676                 tcg_gen_ext16u_i32(ret, arg);
677                 tcg_gen_shli_i32(ret, ret, ofs);
678                 return;
679             }
680             break;
681         case 8:
682             if (TCG_TARGET_HAS_ext8u_i32) {
683                 tcg_gen_ext8u_i32(ret, arg);
684                 tcg_gen_shli_i32(ret, ret, ofs);
685                 return;
686             }
687             break;
688         }
689         /* Otherwise prefer zero-extension over AND for code size.  */
690         switch (ofs + len) {
691         case 16:
692             if (TCG_TARGET_HAS_ext16u_i32) {
693                 tcg_gen_shli_i32(ret, arg, ofs);
694                 tcg_gen_ext16u_i32(ret, ret);
695                 return;
696             }
697             break;
698         case 8:
699             if (TCG_TARGET_HAS_ext8u_i32) {
700                 tcg_gen_shli_i32(ret, arg, ofs);
701                 tcg_gen_ext8u_i32(ret, ret);
702                 return;
703             }
704             break;
705         }
706         tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
707         tcg_gen_shli_i32(ret, ret, ofs);
708     }
709 }
710 
711 void tcg_gen_extract_i32(TCGv_i32 ret, TCGv_i32 arg,
712                          unsigned int ofs, unsigned int len)
713 {
714     tcg_debug_assert(ofs < 32);
715     tcg_debug_assert(len > 0);
716     tcg_debug_assert(len <= 32);
717     tcg_debug_assert(ofs + len <= 32);
718 
719     /* Canonicalize certain special cases, even if extract is supported.  */
720     if (ofs + len == 32) {
721         tcg_gen_shri_i32(ret, arg, 32 - len);
722         return;
723     }
724     if (ofs == 0) {
725         tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
726         return;
727     }
728 
729     if (TCG_TARGET_HAS_extract_i32
730         && TCG_TARGET_extract_i32_valid(ofs, len)) {
731         tcg_gen_op4ii_i32(INDEX_op_extract_i32, ret, arg, ofs, len);
732         return;
733     }
734 
735     /* Assume that zero-extension, if available, is cheaper than a shift.  */
736     switch (ofs + len) {
737     case 16:
738         if (TCG_TARGET_HAS_ext16u_i32) {
739             tcg_gen_ext16u_i32(ret, arg);
740             tcg_gen_shri_i32(ret, ret, ofs);
741             return;
742         }
743         break;
744     case 8:
745         if (TCG_TARGET_HAS_ext8u_i32) {
746             tcg_gen_ext8u_i32(ret, arg);
747             tcg_gen_shri_i32(ret, ret, ofs);
748             return;
749         }
750         break;
751     }
752 
753     /* ??? Ideally we'd know what values are available for immediate AND.
754        Assume that 8 bits are available, plus the special case of 16,
755        so that we get ext8u, ext16u.  */
756     switch (len) {
757     case 1 ... 8: case 16:
758         tcg_gen_shri_i32(ret, arg, ofs);
759         tcg_gen_andi_i32(ret, ret, (1u << len) - 1);
760         break;
761     default:
762         tcg_gen_shli_i32(ret, arg, 32 - len - ofs);
763         tcg_gen_shri_i32(ret, ret, 32 - len);
764         break;
765     }
766 }
767 
768 void tcg_gen_sextract_i32(TCGv_i32 ret, TCGv_i32 arg,
769                           unsigned int ofs, unsigned int len)
770 {
771     tcg_debug_assert(ofs < 32);
772     tcg_debug_assert(len > 0);
773     tcg_debug_assert(len <= 32);
774     tcg_debug_assert(ofs + len <= 32);
775 
776     /* Canonicalize certain special cases, even if extract is supported.  */
777     if (ofs + len == 32) {
778         tcg_gen_sari_i32(ret, arg, 32 - len);
779         return;
780     }
781     if (ofs == 0) {
782         switch (len) {
783         case 16:
784             tcg_gen_ext16s_i32(ret, arg);
785             return;
786         case 8:
787             tcg_gen_ext8s_i32(ret, arg);
788             return;
789         }
790     }
791 
792     if (TCG_TARGET_HAS_sextract_i32
793         && TCG_TARGET_extract_i32_valid(ofs, len)) {
794         tcg_gen_op4ii_i32(INDEX_op_sextract_i32, ret, arg, ofs, len);
795         return;
796     }
797 
798     /* Assume that sign-extension, if available, is cheaper than a shift.  */
799     switch (ofs + len) {
800     case 16:
801         if (TCG_TARGET_HAS_ext16s_i32) {
802             tcg_gen_ext16s_i32(ret, arg);
803             tcg_gen_sari_i32(ret, ret, ofs);
804             return;
805         }
806         break;
807     case 8:
808         if (TCG_TARGET_HAS_ext8s_i32) {
809             tcg_gen_ext8s_i32(ret, arg);
810             tcg_gen_sari_i32(ret, ret, ofs);
811             return;
812         }
813         break;
814     }
815     switch (len) {
816     case 16:
817         if (TCG_TARGET_HAS_ext16s_i32) {
818             tcg_gen_shri_i32(ret, arg, ofs);
819             tcg_gen_ext16s_i32(ret, ret);
820             return;
821         }
822         break;
823     case 8:
824         if (TCG_TARGET_HAS_ext8s_i32) {
825             tcg_gen_shri_i32(ret, arg, ofs);
826             tcg_gen_ext8s_i32(ret, ret);
827             return;
828         }
829         break;
830     }
831 
832     tcg_gen_shli_i32(ret, arg, 32 - len - ofs);
833     tcg_gen_sari_i32(ret, ret, 32 - len);
834 }
835 
836 /*
837  * Extract 32-bits from a 64-bit input, ah:al, starting from ofs.
838  * Unlike tcg_gen_extract_i32 above, len is fixed at 32.
839  */
840 void tcg_gen_extract2_i32(TCGv_i32 ret, TCGv_i32 al, TCGv_i32 ah,
841                           unsigned int ofs)
842 {
843     tcg_debug_assert(ofs <= 32);
844     if (ofs == 0) {
845         tcg_gen_mov_i32(ret, al);
846     } else if (ofs == 32) {
847         tcg_gen_mov_i32(ret, ah);
848     } else if (al == ah) {
849         tcg_gen_rotri_i32(ret, al, ofs);
850     } else if (TCG_TARGET_HAS_extract2_i32) {
851         tcg_gen_op4i_i32(INDEX_op_extract2_i32, ret, al, ah, ofs);
852     } else {
853         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
854         tcg_gen_shri_i32(t0, al, ofs);
855         tcg_gen_deposit_i32(ret, t0, ah, 32 - ofs, ofs);
856         tcg_temp_free_i32(t0);
857     }
858 }
859 
860 void tcg_gen_movcond_i32(TCGCond cond, TCGv_i32 ret, TCGv_i32 c1,
861                          TCGv_i32 c2, TCGv_i32 v1, TCGv_i32 v2)
862 {
863     if (cond == TCG_COND_ALWAYS) {
864         tcg_gen_mov_i32(ret, v1);
865     } else if (cond == TCG_COND_NEVER) {
866         tcg_gen_mov_i32(ret, v2);
867     } else if (TCG_TARGET_HAS_movcond_i32) {
868         tcg_gen_op6i_i32(INDEX_op_movcond_i32, ret, c1, c2, v1, v2, cond);
869     } else {
870         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
871         TCGv_i32 t1 = tcg_temp_ebb_new_i32();
872         tcg_gen_negsetcond_i32(cond, t0, c1, c2);
873         tcg_gen_and_i32(t1, v1, t0);
874         tcg_gen_andc_i32(ret, v2, t0);
875         tcg_gen_or_i32(ret, ret, t1);
876         tcg_temp_free_i32(t0);
877         tcg_temp_free_i32(t1);
878     }
879 }
880 
881 void tcg_gen_add2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
882                       TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
883 {
884     if (TCG_TARGET_HAS_add2_i32) {
885         tcg_gen_op6_i32(INDEX_op_add2_i32, rl, rh, al, ah, bl, bh);
886     } else {
887         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
888         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
889         tcg_gen_concat_i32_i64(t0, al, ah);
890         tcg_gen_concat_i32_i64(t1, bl, bh);
891         tcg_gen_add_i64(t0, t0, t1);
892         tcg_gen_extr_i64_i32(rl, rh, t0);
893         tcg_temp_free_i64(t0);
894         tcg_temp_free_i64(t1);
895     }
896 }
897 
898 void tcg_gen_sub2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
899                       TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
900 {
901     if (TCG_TARGET_HAS_sub2_i32) {
902         tcg_gen_op6_i32(INDEX_op_sub2_i32, rl, rh, al, ah, bl, bh);
903     } else {
904         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
905         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
906         tcg_gen_concat_i32_i64(t0, al, ah);
907         tcg_gen_concat_i32_i64(t1, bl, bh);
908         tcg_gen_sub_i64(t0, t0, t1);
909         tcg_gen_extr_i64_i32(rl, rh, t0);
910         tcg_temp_free_i64(t0);
911         tcg_temp_free_i64(t1);
912     }
913 }
914 
915 void tcg_gen_mulu2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
916 {
917     if (TCG_TARGET_HAS_mulu2_i32) {
918         tcg_gen_op4_i32(INDEX_op_mulu2_i32, rl, rh, arg1, arg2);
919     } else if (TCG_TARGET_HAS_muluh_i32) {
920         TCGv_i32 t = tcg_temp_ebb_new_i32();
921         tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
922         tcg_gen_op3_i32(INDEX_op_muluh_i32, rh, arg1, arg2);
923         tcg_gen_mov_i32(rl, t);
924         tcg_temp_free_i32(t);
925     } else if (TCG_TARGET_REG_BITS == 64) {
926         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
927         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
928         tcg_gen_extu_i32_i64(t0, arg1);
929         tcg_gen_extu_i32_i64(t1, arg2);
930         tcg_gen_mul_i64(t0, t0, t1);
931         tcg_gen_extr_i64_i32(rl, rh, t0);
932         tcg_temp_free_i64(t0);
933         tcg_temp_free_i64(t1);
934     } else {
935         qemu_build_not_reached();
936     }
937 }
938 
939 void tcg_gen_muls2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
940 {
941     if (TCG_TARGET_HAS_muls2_i32) {
942         tcg_gen_op4_i32(INDEX_op_muls2_i32, rl, rh, arg1, arg2);
943     } else if (TCG_TARGET_HAS_mulsh_i32) {
944         TCGv_i32 t = tcg_temp_ebb_new_i32();
945         tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
946         tcg_gen_op3_i32(INDEX_op_mulsh_i32, rh, arg1, arg2);
947         tcg_gen_mov_i32(rl, t);
948         tcg_temp_free_i32(t);
949     } else if (TCG_TARGET_REG_BITS == 32) {
950         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
951         TCGv_i32 t1 = tcg_temp_ebb_new_i32();
952         TCGv_i32 t2 = tcg_temp_ebb_new_i32();
953         TCGv_i32 t3 = tcg_temp_ebb_new_i32();
954         tcg_gen_mulu2_i32(t0, t1, arg1, arg2);
955         /* Adjust for negative inputs.  */
956         tcg_gen_sari_i32(t2, arg1, 31);
957         tcg_gen_sari_i32(t3, arg2, 31);
958         tcg_gen_and_i32(t2, t2, arg2);
959         tcg_gen_and_i32(t3, t3, arg1);
960         tcg_gen_sub_i32(rh, t1, t2);
961         tcg_gen_sub_i32(rh, rh, t3);
962         tcg_gen_mov_i32(rl, t0);
963         tcg_temp_free_i32(t0);
964         tcg_temp_free_i32(t1);
965         tcg_temp_free_i32(t2);
966         tcg_temp_free_i32(t3);
967     } else {
968         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
969         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
970         tcg_gen_ext_i32_i64(t0, arg1);
971         tcg_gen_ext_i32_i64(t1, arg2);
972         tcg_gen_mul_i64(t0, t0, t1);
973         tcg_gen_extr_i64_i32(rl, rh, t0);
974         tcg_temp_free_i64(t0);
975         tcg_temp_free_i64(t1);
976     }
977 }
978 
979 void tcg_gen_mulsu2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
980 {
981     if (TCG_TARGET_REG_BITS == 32) {
982         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
983         TCGv_i32 t1 = tcg_temp_ebb_new_i32();
984         TCGv_i32 t2 = tcg_temp_ebb_new_i32();
985         tcg_gen_mulu2_i32(t0, t1, arg1, arg2);
986         /* Adjust for negative input for the signed arg1.  */
987         tcg_gen_sari_i32(t2, arg1, 31);
988         tcg_gen_and_i32(t2, t2, arg2);
989         tcg_gen_sub_i32(rh, t1, t2);
990         tcg_gen_mov_i32(rl, t0);
991         tcg_temp_free_i32(t0);
992         tcg_temp_free_i32(t1);
993         tcg_temp_free_i32(t2);
994     } else {
995         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
996         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
997         tcg_gen_ext_i32_i64(t0, arg1);
998         tcg_gen_extu_i32_i64(t1, arg2);
999         tcg_gen_mul_i64(t0, t0, t1);
1000         tcg_gen_extr_i64_i32(rl, rh, t0);
1001         tcg_temp_free_i64(t0);
1002         tcg_temp_free_i64(t1);
1003     }
1004 }
1005 
1006 void tcg_gen_ext8s_i32(TCGv_i32 ret, TCGv_i32 arg)
1007 {
1008     if (TCG_TARGET_HAS_ext8s_i32) {
1009         tcg_gen_op2_i32(INDEX_op_ext8s_i32, ret, arg);
1010     } else {
1011         tcg_gen_shli_i32(ret, arg, 24);
1012         tcg_gen_sari_i32(ret, ret, 24);
1013     }
1014 }
1015 
1016 void tcg_gen_ext16s_i32(TCGv_i32 ret, TCGv_i32 arg)
1017 {
1018     if (TCG_TARGET_HAS_ext16s_i32) {
1019         tcg_gen_op2_i32(INDEX_op_ext16s_i32, ret, arg);
1020     } else {
1021         tcg_gen_shli_i32(ret, arg, 16);
1022         tcg_gen_sari_i32(ret, ret, 16);
1023     }
1024 }
1025 
1026 void tcg_gen_ext8u_i32(TCGv_i32 ret, TCGv_i32 arg)
1027 {
1028     if (TCG_TARGET_HAS_ext8u_i32) {
1029         tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg);
1030     } else {
1031         tcg_gen_andi_i32(ret, arg, 0xffu);
1032     }
1033 }
1034 
1035 void tcg_gen_ext16u_i32(TCGv_i32 ret, TCGv_i32 arg)
1036 {
1037     if (TCG_TARGET_HAS_ext16u_i32) {
1038         tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg);
1039     } else {
1040         tcg_gen_andi_i32(ret, arg, 0xffffu);
1041     }
1042 }
1043 
1044 /*
1045  * bswap16_i32: 16-bit byte swap on the low bits of a 32-bit value.
1046  *
1047  * Byte pattern: xxab -> yyba
1048  *
1049  * With TCG_BSWAP_IZ, x == zero, else undefined.
1050  * With TCG_BSWAP_OZ, y == zero, with TCG_BSWAP_OS y == sign, else undefined.
1051  */
1052 void tcg_gen_bswap16_i32(TCGv_i32 ret, TCGv_i32 arg, int flags)
1053 {
1054     /* Only one extension flag may be present. */
1055     tcg_debug_assert(!(flags & TCG_BSWAP_OS) || !(flags & TCG_BSWAP_OZ));
1056 
1057     if (TCG_TARGET_HAS_bswap16_i32) {
1058         tcg_gen_op3i_i32(INDEX_op_bswap16_i32, ret, arg, flags);
1059     } else {
1060         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
1061         TCGv_i32 t1 = tcg_temp_ebb_new_i32();
1062 
1063                                             /* arg = ..ab (IZ) xxab (!IZ) */
1064         tcg_gen_shri_i32(t0, arg, 8);       /*  t0 = ...a (IZ) .xxa (!IZ) */
1065         if (!(flags & TCG_BSWAP_IZ)) {
1066             tcg_gen_ext8u_i32(t0, t0);      /*  t0 = ...a */
1067         }
1068 
1069         if (flags & TCG_BSWAP_OS) {
1070             tcg_gen_shli_i32(t1, arg, 24);  /*  t1 = b... */
1071             tcg_gen_sari_i32(t1, t1, 16);   /*  t1 = ssb. */
1072         } else if (flags & TCG_BSWAP_OZ) {
1073             tcg_gen_ext8u_i32(t1, arg);     /*  t1 = ...b */
1074             tcg_gen_shli_i32(t1, t1, 8);    /*  t1 = ..b. */
1075         } else {
1076             tcg_gen_shli_i32(t1, arg, 8);   /*  t1 = xab. */
1077         }
1078 
1079         tcg_gen_or_i32(ret, t0, t1);        /* ret = ..ba (OZ) */
1080                                             /*     = ssba (OS) */
1081                                             /*     = xaba (no flag) */
1082         tcg_temp_free_i32(t0);
1083         tcg_temp_free_i32(t1);
1084     }
1085 }
1086 
1087 /*
1088  * bswap32_i32: 32-bit byte swap on a 32-bit value.
1089  *
1090  * Byte pattern: abcd -> dcba
1091  */
1092 void tcg_gen_bswap32_i32(TCGv_i32 ret, TCGv_i32 arg)
1093 {
1094     if (TCG_TARGET_HAS_bswap32_i32) {
1095         tcg_gen_op3i_i32(INDEX_op_bswap32_i32, ret, arg, 0);
1096     } else {
1097         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
1098         TCGv_i32 t1 = tcg_temp_ebb_new_i32();
1099         TCGv_i32 t2 = tcg_constant_i32(0x00ff00ff);
1100 
1101                                         /* arg = abcd */
1102         tcg_gen_shri_i32(t0, arg, 8);   /*  t0 = .abc */
1103         tcg_gen_and_i32(t1, arg, t2);   /*  t1 = .b.d */
1104         tcg_gen_and_i32(t0, t0, t2);    /*  t0 = .a.c */
1105         tcg_gen_shli_i32(t1, t1, 8);    /*  t1 = b.d. */
1106         tcg_gen_or_i32(ret, t0, t1);    /* ret = badc */
1107 
1108         tcg_gen_shri_i32(t0, ret, 16);  /*  t0 = ..ba */
1109         tcg_gen_shli_i32(t1, ret, 16);  /*  t1 = dc.. */
1110         tcg_gen_or_i32(ret, t0, t1);    /* ret = dcba */
1111 
1112         tcg_temp_free_i32(t0);
1113         tcg_temp_free_i32(t1);
1114     }
1115 }
1116 
1117 /*
1118  * hswap_i32: Swap 16-bit halfwords within a 32-bit value.
1119  *
1120  * Byte pattern: abcd -> cdab
1121  */
1122 void tcg_gen_hswap_i32(TCGv_i32 ret, TCGv_i32 arg)
1123 {
1124     /* Swapping 2 16-bit elements is a rotate. */
1125     tcg_gen_rotli_i32(ret, arg, 16);
1126 }
1127 
1128 void tcg_gen_smin_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1129 {
1130     tcg_gen_movcond_i32(TCG_COND_LT, ret, a, b, a, b);
1131 }
1132 
1133 void tcg_gen_umin_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1134 {
1135     tcg_gen_movcond_i32(TCG_COND_LTU, ret, a, b, a, b);
1136 }
1137 
1138 void tcg_gen_smax_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1139 {
1140     tcg_gen_movcond_i32(TCG_COND_LT, ret, a, b, b, a);
1141 }
1142 
1143 void tcg_gen_umax_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1144 {
1145     tcg_gen_movcond_i32(TCG_COND_LTU, ret, a, b, b, a);
1146 }
1147 
1148 void tcg_gen_abs_i32(TCGv_i32 ret, TCGv_i32 a)
1149 {
1150     TCGv_i32 t = tcg_temp_ebb_new_i32();
1151 
1152     tcg_gen_sari_i32(t, a, 31);
1153     tcg_gen_xor_i32(ret, a, t);
1154     tcg_gen_sub_i32(ret, ret, t);
1155     tcg_temp_free_i32(t);
1156 }
1157 
1158 /* 64-bit ops */
1159 
1160 #if TCG_TARGET_REG_BITS == 32
1161 /* These are all inline for TCG_TARGET_REG_BITS == 64.  */
1162 
1163 void tcg_gen_discard_i64(TCGv_i64 arg)
1164 {
1165     tcg_gen_discard_i32(TCGV_LOW(arg));
1166     tcg_gen_discard_i32(TCGV_HIGH(arg));
1167 }
1168 
1169 void tcg_gen_mov_i64(TCGv_i64 ret, TCGv_i64 arg)
1170 {
1171     TCGTemp *ts = tcgv_i64_temp(arg);
1172 
1173     /* Canonicalize TCGv_i64 TEMP_CONST into TCGv_i32 TEMP_CONST. */
1174     if (ts->kind == TEMP_CONST) {
1175         tcg_gen_movi_i64(ret, ts->val);
1176     } else {
1177         tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1178         tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
1179     }
1180 }
1181 
1182 void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg)
1183 {
1184     tcg_gen_movi_i32(TCGV_LOW(ret), arg);
1185     tcg_gen_movi_i32(TCGV_HIGH(ret), arg >> 32);
1186 }
1187 
1188 void tcg_gen_ld8u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1189 {
1190     tcg_gen_ld8u_i32(TCGV_LOW(ret), arg2, offset);
1191     tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1192 }
1193 
1194 void tcg_gen_ld8s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1195 {
1196     tcg_gen_ld8s_i32(TCGV_LOW(ret), arg2, offset);
1197     tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1198 }
1199 
1200 void tcg_gen_ld16u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1201 {
1202     tcg_gen_ld16u_i32(TCGV_LOW(ret), arg2, offset);
1203     tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1204 }
1205 
1206 void tcg_gen_ld16s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1207 {
1208     tcg_gen_ld16s_i32(TCGV_LOW(ret), arg2, offset);
1209     tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1210 }
1211 
1212 void tcg_gen_ld32u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1213 {
1214     tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
1215     tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1216 }
1217 
1218 void tcg_gen_ld32s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1219 {
1220     tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
1221     tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1222 }
1223 
1224 void tcg_gen_ld_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1225 {
1226     /* Since arg2 and ret have different types,
1227        they cannot be the same temporary */
1228 #if HOST_BIG_ENDIAN
1229     tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset);
1230     tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset + 4);
1231 #else
1232     tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
1233     tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset + 4);
1234 #endif
1235 }
1236 
1237 void tcg_gen_st8_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
1238 {
1239     tcg_gen_st8_i32(TCGV_LOW(arg1), arg2, offset);
1240 }
1241 
1242 void tcg_gen_st16_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
1243 {
1244     tcg_gen_st16_i32(TCGV_LOW(arg1), arg2, offset);
1245 }
1246 
1247 void tcg_gen_st32_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
1248 {
1249     tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset);
1250 }
1251 
1252 void tcg_gen_st_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
1253 {
1254 #if HOST_BIG_ENDIAN
1255     tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset);
1256     tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset + 4);
1257 #else
1258     tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset);
1259     tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset + 4);
1260 #endif
1261 }
1262 
1263 void tcg_gen_add_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1264 {
1265     tcg_gen_add2_i32(TCGV_LOW(ret), TCGV_HIGH(ret), TCGV_LOW(arg1),
1266                      TCGV_HIGH(arg1), TCGV_LOW(arg2), TCGV_HIGH(arg2));
1267 }
1268 
1269 void tcg_gen_sub_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1270 {
1271     tcg_gen_sub2_i32(TCGV_LOW(ret), TCGV_HIGH(ret), TCGV_LOW(arg1),
1272                      TCGV_HIGH(arg1), TCGV_LOW(arg2), TCGV_HIGH(arg2));
1273 }
1274 
1275 void tcg_gen_and_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1276 {
1277     tcg_gen_and_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1278     tcg_gen_and_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1279 }
1280 
1281 void tcg_gen_or_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1282 {
1283     tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1284     tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1285 }
1286 
1287 void tcg_gen_xor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1288 {
1289     tcg_gen_xor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1290     tcg_gen_xor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1291 }
1292 
1293 void tcg_gen_shl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1294 {
1295     gen_helper_shl_i64(ret, arg1, arg2);
1296 }
1297 
1298 void tcg_gen_shr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1299 {
1300     gen_helper_shr_i64(ret, arg1, arg2);
1301 }
1302 
1303 void tcg_gen_sar_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1304 {
1305     gen_helper_sar_i64(ret, arg1, arg2);
1306 }
1307 
1308 void tcg_gen_mul_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1309 {
1310     TCGv_i64 t0;
1311     TCGv_i32 t1;
1312 
1313     t0 = tcg_temp_ebb_new_i64();
1314     t1 = tcg_temp_ebb_new_i32();
1315 
1316     tcg_gen_mulu2_i32(TCGV_LOW(t0), TCGV_HIGH(t0),
1317                       TCGV_LOW(arg1), TCGV_LOW(arg2));
1318 
1319     tcg_gen_mul_i32(t1, TCGV_LOW(arg1), TCGV_HIGH(arg2));
1320     tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
1321     tcg_gen_mul_i32(t1, TCGV_HIGH(arg1), TCGV_LOW(arg2));
1322     tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
1323 
1324     tcg_gen_mov_i64(ret, t0);
1325     tcg_temp_free_i64(t0);
1326     tcg_temp_free_i32(t1);
1327 }
1328 
1329 #else
1330 
1331 void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg)
1332 {
1333     tcg_gen_mov_i64(ret, tcg_constant_i64(arg));
1334 }
1335 
1336 #endif /* TCG_TARGET_REG_SIZE == 32 */
1337 
1338 void tcg_gen_addi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1339 {
1340     /* some cases can be optimized here */
1341     if (arg2 == 0) {
1342         tcg_gen_mov_i64(ret, arg1);
1343     } else if (TCG_TARGET_REG_BITS == 64) {
1344         tcg_gen_add_i64(ret, arg1, tcg_constant_i64(arg2));
1345     } else {
1346         tcg_gen_add2_i32(TCGV_LOW(ret), TCGV_HIGH(ret),
1347                          TCGV_LOW(arg1), TCGV_HIGH(arg1),
1348                          tcg_constant_i32(arg2), tcg_constant_i32(arg2 >> 32));
1349     }
1350 }
1351 
1352 void tcg_gen_subfi_i64(TCGv_i64 ret, int64_t arg1, TCGv_i64 arg2)
1353 {
1354     if (arg1 == 0 && TCG_TARGET_HAS_neg_i64) {
1355         /* Don't recurse with tcg_gen_neg_i64.  */
1356         tcg_gen_op2_i64(INDEX_op_neg_i64, ret, arg2);
1357     } else if (TCG_TARGET_REG_BITS == 64) {
1358         tcg_gen_sub_i64(ret, tcg_constant_i64(arg1), arg2);
1359     } else {
1360         tcg_gen_sub2_i32(TCGV_LOW(ret), TCGV_HIGH(ret),
1361                          tcg_constant_i32(arg1), tcg_constant_i32(arg1 >> 32),
1362                          TCGV_LOW(arg2), TCGV_HIGH(arg2));
1363     }
1364 }
1365 
1366 void tcg_gen_subi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1367 {
1368     /* some cases can be optimized here */
1369     if (arg2 == 0) {
1370         tcg_gen_mov_i64(ret, arg1);
1371     } else if (TCG_TARGET_REG_BITS == 64) {
1372         tcg_gen_sub_i64(ret, arg1, tcg_constant_i64(arg2));
1373     } else {
1374         tcg_gen_sub2_i32(TCGV_LOW(ret), TCGV_HIGH(ret),
1375                          TCGV_LOW(arg1), TCGV_HIGH(arg1),
1376                          tcg_constant_i32(arg2), tcg_constant_i32(arg2 >> 32));
1377     }
1378 }
1379 
1380 void tcg_gen_andi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1381 {
1382     if (TCG_TARGET_REG_BITS == 32) {
1383         tcg_gen_andi_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1384         tcg_gen_andi_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1385         return;
1386     }
1387 
1388     /* Some cases can be optimized here.  */
1389     switch (arg2) {
1390     case 0:
1391         tcg_gen_movi_i64(ret, 0);
1392         return;
1393     case -1:
1394         tcg_gen_mov_i64(ret, arg1);
1395         return;
1396     case 0xff:
1397         /* Don't recurse with tcg_gen_ext8u_i64.  */
1398         if (TCG_TARGET_HAS_ext8u_i64) {
1399             tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg1);
1400             return;
1401         }
1402         break;
1403     case 0xffff:
1404         if (TCG_TARGET_HAS_ext16u_i64) {
1405             tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg1);
1406             return;
1407         }
1408         break;
1409     case 0xffffffffu:
1410         if (TCG_TARGET_HAS_ext32u_i64) {
1411             tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg1);
1412             return;
1413         }
1414         break;
1415     }
1416 
1417     tcg_gen_and_i64(ret, arg1, tcg_constant_i64(arg2));
1418 }
1419 
1420 void tcg_gen_ori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1421 {
1422     if (TCG_TARGET_REG_BITS == 32) {
1423         tcg_gen_ori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1424         tcg_gen_ori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1425         return;
1426     }
1427     /* Some cases can be optimized here.  */
1428     if (arg2 == -1) {
1429         tcg_gen_movi_i64(ret, -1);
1430     } else if (arg2 == 0) {
1431         tcg_gen_mov_i64(ret, arg1);
1432     } else {
1433         tcg_gen_or_i64(ret, arg1, tcg_constant_i64(arg2));
1434     }
1435 }
1436 
1437 void tcg_gen_xori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1438 {
1439     if (TCG_TARGET_REG_BITS == 32) {
1440         tcg_gen_xori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1441         tcg_gen_xori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1442         return;
1443     }
1444     /* Some cases can be optimized here.  */
1445     if (arg2 == 0) {
1446         tcg_gen_mov_i64(ret, arg1);
1447     } else if (arg2 == -1 && TCG_TARGET_HAS_not_i64) {
1448         /* Don't recurse with tcg_gen_not_i64.  */
1449         tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg1);
1450     } else {
1451         tcg_gen_xor_i64(ret, arg1, tcg_constant_i64(arg2));
1452     }
1453 }
1454 
1455 static inline void tcg_gen_shifti_i64(TCGv_i64 ret, TCGv_i64 arg1,
1456                                       unsigned c, bool right, bool arith)
1457 {
1458     tcg_debug_assert(c < 64);
1459     if (c == 0) {
1460         tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
1461         tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
1462     } else if (c >= 32) {
1463         c -= 32;
1464         if (right) {
1465             if (arith) {
1466                 tcg_gen_sari_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
1467                 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), 31);
1468             } else {
1469                 tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
1470                 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1471             }
1472         } else {
1473             tcg_gen_shli_i32(TCGV_HIGH(ret), TCGV_LOW(arg1), c);
1474             tcg_gen_movi_i32(TCGV_LOW(ret), 0);
1475         }
1476     } else if (right) {
1477         if (TCG_TARGET_HAS_extract2_i32) {
1478             tcg_gen_extract2_i32(TCGV_LOW(ret),
1479                                  TCGV_LOW(arg1), TCGV_HIGH(arg1), c);
1480         } else {
1481             tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_LOW(arg1), c);
1482             tcg_gen_deposit_i32(TCGV_LOW(ret), TCGV_LOW(ret),
1483                                 TCGV_HIGH(arg1), 32 - c, c);
1484         }
1485         if (arith) {
1486             tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), c);
1487         } else {
1488             tcg_gen_shri_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), c);
1489         }
1490     } else {
1491         if (TCG_TARGET_HAS_extract2_i32) {
1492             tcg_gen_extract2_i32(TCGV_HIGH(ret),
1493                                  TCGV_LOW(arg1), TCGV_HIGH(arg1), 32 - c);
1494         } else {
1495             TCGv_i32 t0 = tcg_temp_ebb_new_i32();
1496             tcg_gen_shri_i32(t0, TCGV_LOW(arg1), 32 - c);
1497             tcg_gen_deposit_i32(TCGV_HIGH(ret), t0,
1498                                 TCGV_HIGH(arg1), c, 32 - c);
1499             tcg_temp_free_i32(t0);
1500         }
1501         tcg_gen_shli_i32(TCGV_LOW(ret), TCGV_LOW(arg1), c);
1502     }
1503 }
1504 
1505 void tcg_gen_shli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1506 {
1507     tcg_debug_assert(arg2 >= 0 && arg2 < 64);
1508     if (TCG_TARGET_REG_BITS == 32) {
1509         tcg_gen_shifti_i64(ret, arg1, arg2, 0, 0);
1510     } else if (arg2 == 0) {
1511         tcg_gen_mov_i64(ret, arg1);
1512     } else {
1513         tcg_gen_shl_i64(ret, arg1, tcg_constant_i64(arg2));
1514     }
1515 }
1516 
1517 void tcg_gen_shri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1518 {
1519     tcg_debug_assert(arg2 >= 0 && arg2 < 64);
1520     if (TCG_TARGET_REG_BITS == 32) {
1521         tcg_gen_shifti_i64(ret, arg1, arg2, 1, 0);
1522     } else if (arg2 == 0) {
1523         tcg_gen_mov_i64(ret, arg1);
1524     } else {
1525         tcg_gen_shr_i64(ret, arg1, tcg_constant_i64(arg2));
1526     }
1527 }
1528 
1529 void tcg_gen_sari_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1530 {
1531     tcg_debug_assert(arg2 >= 0 && arg2 < 64);
1532     if (TCG_TARGET_REG_BITS == 32) {
1533         tcg_gen_shifti_i64(ret, arg1, arg2, 1, 1);
1534     } else if (arg2 == 0) {
1535         tcg_gen_mov_i64(ret, arg1);
1536     } else {
1537         tcg_gen_sar_i64(ret, arg1, tcg_constant_i64(arg2));
1538     }
1539 }
1540 
1541 void tcg_gen_brcond_i64(TCGCond cond, TCGv_i64 arg1, TCGv_i64 arg2, TCGLabel *l)
1542 {
1543     if (cond == TCG_COND_ALWAYS) {
1544         tcg_gen_br(l);
1545     } else if (cond != TCG_COND_NEVER) {
1546         if (TCG_TARGET_REG_BITS == 32) {
1547             tcg_gen_op6ii_i32(INDEX_op_brcond2_i32, TCGV_LOW(arg1),
1548                               TCGV_HIGH(arg1), TCGV_LOW(arg2),
1549                               TCGV_HIGH(arg2), cond, label_arg(l));
1550         } else {
1551             tcg_gen_op4ii_i64(INDEX_op_brcond_i64, arg1, arg2, cond,
1552                               label_arg(l));
1553         }
1554         add_last_as_label_use(l);
1555     }
1556 }
1557 
1558 void tcg_gen_brcondi_i64(TCGCond cond, TCGv_i64 arg1, int64_t arg2, TCGLabel *l)
1559 {
1560     if (TCG_TARGET_REG_BITS == 64) {
1561         tcg_gen_brcond_i64(cond, arg1, tcg_constant_i64(arg2), l);
1562     } else if (cond == TCG_COND_ALWAYS) {
1563         tcg_gen_br(l);
1564     } else if (cond != TCG_COND_NEVER) {
1565         tcg_gen_op6ii_i32(INDEX_op_brcond2_i32,
1566                           TCGV_LOW(arg1), TCGV_HIGH(arg1),
1567                           tcg_constant_i32(arg2),
1568                           tcg_constant_i32(arg2 >> 32),
1569                           cond, label_arg(l));
1570         add_last_as_label_use(l);
1571     }
1572 }
1573 
1574 void tcg_gen_setcond_i64(TCGCond cond, TCGv_i64 ret,
1575                          TCGv_i64 arg1, TCGv_i64 arg2)
1576 {
1577     if (cond == TCG_COND_ALWAYS) {
1578         tcg_gen_movi_i64(ret, 1);
1579     } else if (cond == TCG_COND_NEVER) {
1580         tcg_gen_movi_i64(ret, 0);
1581     } else {
1582         if (TCG_TARGET_REG_BITS == 32) {
1583             tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
1584                              TCGV_LOW(arg1), TCGV_HIGH(arg1),
1585                              TCGV_LOW(arg2), TCGV_HIGH(arg2), cond);
1586             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1587         } else {
1588             tcg_gen_op4i_i64(INDEX_op_setcond_i64, ret, arg1, arg2, cond);
1589         }
1590     }
1591 }
1592 
1593 void tcg_gen_setcondi_i64(TCGCond cond, TCGv_i64 ret,
1594                           TCGv_i64 arg1, int64_t arg2)
1595 {
1596     if (TCG_TARGET_REG_BITS == 64) {
1597         tcg_gen_setcond_i64(cond, ret, arg1, tcg_constant_i64(arg2));
1598     } else if (cond == TCG_COND_ALWAYS) {
1599         tcg_gen_movi_i64(ret, 1);
1600     } else if (cond == TCG_COND_NEVER) {
1601         tcg_gen_movi_i64(ret, 0);
1602     } else {
1603         tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
1604                          TCGV_LOW(arg1), TCGV_HIGH(arg1),
1605                          tcg_constant_i32(arg2),
1606                          tcg_constant_i32(arg2 >> 32), cond);
1607         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1608     }
1609 }
1610 
1611 void tcg_gen_negsetcondi_i64(TCGCond cond, TCGv_i64 ret,
1612                              TCGv_i64 arg1, int64_t arg2)
1613 {
1614     tcg_gen_negsetcond_i64(cond, ret, arg1, tcg_constant_i64(arg2));
1615 }
1616 
1617 void tcg_gen_negsetcond_i64(TCGCond cond, TCGv_i64 ret,
1618                             TCGv_i64 arg1, TCGv_i64 arg2)
1619 {
1620     if (cond == TCG_COND_ALWAYS) {
1621         tcg_gen_movi_i64(ret, -1);
1622     } else if (cond == TCG_COND_NEVER) {
1623         tcg_gen_movi_i64(ret, 0);
1624     } else if (TCG_TARGET_HAS_negsetcond_i64) {
1625         tcg_gen_op4i_i64(INDEX_op_negsetcond_i64, ret, arg1, arg2, cond);
1626     } else if (TCG_TARGET_REG_BITS == 32) {
1627         tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
1628                          TCGV_LOW(arg1), TCGV_HIGH(arg1),
1629                          TCGV_LOW(arg2), TCGV_HIGH(arg2), cond);
1630         tcg_gen_neg_i32(TCGV_LOW(ret), TCGV_LOW(ret));
1631         tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_LOW(ret));
1632     } else {
1633         tcg_gen_setcond_i64(cond, ret, arg1, arg2);
1634         tcg_gen_neg_i64(ret, ret);
1635     }
1636 }
1637 
1638 void tcg_gen_muli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1639 {
1640     if (arg2 == 0) {
1641         tcg_gen_movi_i64(ret, 0);
1642     } else if (is_power_of_2(arg2)) {
1643         tcg_gen_shli_i64(ret, arg1, ctz64(arg2));
1644     } else {
1645         tcg_gen_mul_i64(ret, arg1, tcg_constant_i64(arg2));
1646     }
1647 }
1648 
1649 void tcg_gen_div_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1650 {
1651     if (TCG_TARGET_HAS_div_i64) {
1652         tcg_gen_op3_i64(INDEX_op_div_i64, ret, arg1, arg2);
1653     } else if (TCG_TARGET_HAS_div2_i64) {
1654         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1655         tcg_gen_sari_i64(t0, arg1, 63);
1656         tcg_gen_op5_i64(INDEX_op_div2_i64, ret, t0, arg1, t0, arg2);
1657         tcg_temp_free_i64(t0);
1658     } else {
1659         gen_helper_div_i64(ret, arg1, arg2);
1660     }
1661 }
1662 
1663 void tcg_gen_rem_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1664 {
1665     if (TCG_TARGET_HAS_rem_i64) {
1666         tcg_gen_op3_i64(INDEX_op_rem_i64, ret, arg1, arg2);
1667     } else if (TCG_TARGET_HAS_div_i64) {
1668         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1669         tcg_gen_op3_i64(INDEX_op_div_i64, t0, arg1, arg2);
1670         tcg_gen_mul_i64(t0, t0, arg2);
1671         tcg_gen_sub_i64(ret, arg1, t0);
1672         tcg_temp_free_i64(t0);
1673     } else if (TCG_TARGET_HAS_div2_i64) {
1674         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1675         tcg_gen_sari_i64(t0, arg1, 63);
1676         tcg_gen_op5_i64(INDEX_op_div2_i64, t0, ret, arg1, t0, arg2);
1677         tcg_temp_free_i64(t0);
1678     } else {
1679         gen_helper_rem_i64(ret, arg1, arg2);
1680     }
1681 }
1682 
1683 void tcg_gen_divu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1684 {
1685     if (TCG_TARGET_HAS_div_i64) {
1686         tcg_gen_op3_i64(INDEX_op_divu_i64, ret, arg1, arg2);
1687     } else if (TCG_TARGET_HAS_div2_i64) {
1688         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1689         TCGv_i64 zero = tcg_constant_i64(0);
1690         tcg_gen_op5_i64(INDEX_op_divu2_i64, ret, t0, arg1, zero, arg2);
1691         tcg_temp_free_i64(t0);
1692     } else {
1693         gen_helper_divu_i64(ret, arg1, arg2);
1694     }
1695 }
1696 
1697 void tcg_gen_remu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1698 {
1699     if (TCG_TARGET_HAS_rem_i64) {
1700         tcg_gen_op3_i64(INDEX_op_remu_i64, ret, arg1, arg2);
1701     } else if (TCG_TARGET_HAS_div_i64) {
1702         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1703         tcg_gen_op3_i64(INDEX_op_divu_i64, t0, arg1, arg2);
1704         tcg_gen_mul_i64(t0, t0, arg2);
1705         tcg_gen_sub_i64(ret, arg1, t0);
1706         tcg_temp_free_i64(t0);
1707     } else if (TCG_TARGET_HAS_div2_i64) {
1708         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1709         TCGv_i64 zero = tcg_constant_i64(0);
1710         tcg_gen_op5_i64(INDEX_op_divu2_i64, t0, ret, arg1, zero, arg2);
1711         tcg_temp_free_i64(t0);
1712     } else {
1713         gen_helper_remu_i64(ret, arg1, arg2);
1714     }
1715 }
1716 
1717 void tcg_gen_ext8s_i64(TCGv_i64 ret, TCGv_i64 arg)
1718 {
1719     if (TCG_TARGET_REG_BITS == 32) {
1720         tcg_gen_ext8s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1721         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1722     } else if (TCG_TARGET_HAS_ext8s_i64) {
1723         tcg_gen_op2_i64(INDEX_op_ext8s_i64, ret, arg);
1724     } else {
1725         tcg_gen_shli_i64(ret, arg, 56);
1726         tcg_gen_sari_i64(ret, ret, 56);
1727     }
1728 }
1729 
1730 void tcg_gen_ext16s_i64(TCGv_i64 ret, TCGv_i64 arg)
1731 {
1732     if (TCG_TARGET_REG_BITS == 32) {
1733         tcg_gen_ext16s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1734         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1735     } else if (TCG_TARGET_HAS_ext16s_i64) {
1736         tcg_gen_op2_i64(INDEX_op_ext16s_i64, ret, arg);
1737     } else {
1738         tcg_gen_shli_i64(ret, arg, 48);
1739         tcg_gen_sari_i64(ret, ret, 48);
1740     }
1741 }
1742 
1743 void tcg_gen_ext32s_i64(TCGv_i64 ret, TCGv_i64 arg)
1744 {
1745     if (TCG_TARGET_REG_BITS == 32) {
1746         tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1747         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1748     } else if (TCG_TARGET_HAS_ext32s_i64) {
1749         tcg_gen_op2_i64(INDEX_op_ext32s_i64, ret, arg);
1750     } else {
1751         tcg_gen_shli_i64(ret, arg, 32);
1752         tcg_gen_sari_i64(ret, ret, 32);
1753     }
1754 }
1755 
1756 void tcg_gen_ext8u_i64(TCGv_i64 ret, TCGv_i64 arg)
1757 {
1758     if (TCG_TARGET_REG_BITS == 32) {
1759         tcg_gen_ext8u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1760         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1761     } else if (TCG_TARGET_HAS_ext8u_i64) {
1762         tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg);
1763     } else {
1764         tcg_gen_andi_i64(ret, arg, 0xffu);
1765     }
1766 }
1767 
1768 void tcg_gen_ext16u_i64(TCGv_i64 ret, TCGv_i64 arg)
1769 {
1770     if (TCG_TARGET_REG_BITS == 32) {
1771         tcg_gen_ext16u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1772         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1773     } else if (TCG_TARGET_HAS_ext16u_i64) {
1774         tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg);
1775     } else {
1776         tcg_gen_andi_i64(ret, arg, 0xffffu);
1777     }
1778 }
1779 
1780 void tcg_gen_ext32u_i64(TCGv_i64 ret, TCGv_i64 arg)
1781 {
1782     if (TCG_TARGET_REG_BITS == 32) {
1783         tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1784         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1785     } else if (TCG_TARGET_HAS_ext32u_i64) {
1786         tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg);
1787     } else {
1788         tcg_gen_andi_i64(ret, arg, 0xffffffffu);
1789     }
1790 }
1791 
1792 /*
1793  * bswap16_i64: 16-bit byte swap on the low bits of a 64-bit value.
1794  *
1795  * Byte pattern: xxxxxxxxab -> yyyyyyyyba
1796  *
1797  * With TCG_BSWAP_IZ, x == zero, else undefined.
1798  * With TCG_BSWAP_OZ, y == zero, with TCG_BSWAP_OS y == sign, else undefined.
1799  */
1800 void tcg_gen_bswap16_i64(TCGv_i64 ret, TCGv_i64 arg, int flags)
1801 {
1802     /* Only one extension flag may be present. */
1803     tcg_debug_assert(!(flags & TCG_BSWAP_OS) || !(flags & TCG_BSWAP_OZ));
1804 
1805     if (TCG_TARGET_REG_BITS == 32) {
1806         tcg_gen_bswap16_i32(TCGV_LOW(ret), TCGV_LOW(arg), flags);
1807         if (flags & TCG_BSWAP_OS) {
1808             tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1809         } else {
1810             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1811         }
1812     } else if (TCG_TARGET_HAS_bswap16_i64) {
1813         tcg_gen_op3i_i64(INDEX_op_bswap16_i64, ret, arg, flags);
1814     } else {
1815         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1816         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
1817 
1818                                             /* arg = ......ab or xxxxxxab */
1819         tcg_gen_shri_i64(t0, arg, 8);       /*  t0 = .......a or .xxxxxxa */
1820         if (!(flags & TCG_BSWAP_IZ)) {
1821             tcg_gen_ext8u_i64(t0, t0);      /*  t0 = .......a */
1822         }
1823 
1824         if (flags & TCG_BSWAP_OS) {
1825             tcg_gen_shli_i64(t1, arg, 56);  /*  t1 = b....... */
1826             tcg_gen_sari_i64(t1, t1, 48);   /*  t1 = ssssssb. */
1827         } else if (flags & TCG_BSWAP_OZ) {
1828             tcg_gen_ext8u_i64(t1, arg);     /*  t1 = .......b */
1829             tcg_gen_shli_i64(t1, t1, 8);    /*  t1 = ......b. */
1830         } else {
1831             tcg_gen_shli_i64(t1, arg, 8);   /*  t1 = xxxxxab. */
1832         }
1833 
1834         tcg_gen_or_i64(ret, t0, t1);        /* ret = ......ba (OZ) */
1835                                             /*       ssssssba (OS) */
1836                                             /*       xxxxxaba (no flag) */
1837         tcg_temp_free_i64(t0);
1838         tcg_temp_free_i64(t1);
1839     }
1840 }
1841 
1842 /*
1843  * bswap32_i64: 32-bit byte swap on the low bits of a 64-bit value.
1844  *
1845  * Byte pattern: xxxxabcd -> yyyydcba
1846  *
1847  * With TCG_BSWAP_IZ, x == zero, else undefined.
1848  * With TCG_BSWAP_OZ, y == zero, with TCG_BSWAP_OS y == sign, else undefined.
1849  */
1850 void tcg_gen_bswap32_i64(TCGv_i64 ret, TCGv_i64 arg, int flags)
1851 {
1852     /* Only one extension flag may be present. */
1853     tcg_debug_assert(!(flags & TCG_BSWAP_OS) || !(flags & TCG_BSWAP_OZ));
1854 
1855     if (TCG_TARGET_REG_BITS == 32) {
1856         tcg_gen_bswap32_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1857         if (flags & TCG_BSWAP_OS) {
1858             tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1859         } else {
1860             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1861         }
1862     } else if (TCG_TARGET_HAS_bswap32_i64) {
1863         tcg_gen_op3i_i64(INDEX_op_bswap32_i64, ret, arg, flags);
1864     } else {
1865         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1866         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
1867         TCGv_i64 t2 = tcg_constant_i64(0x00ff00ff);
1868 
1869                                             /* arg = xxxxabcd */
1870         tcg_gen_shri_i64(t0, arg, 8);       /*  t0 = .xxxxabc */
1871         tcg_gen_and_i64(t1, arg, t2);       /*  t1 = .....b.d */
1872         tcg_gen_and_i64(t0, t0, t2);        /*  t0 = .....a.c */
1873         tcg_gen_shli_i64(t1, t1, 8);        /*  t1 = ....b.d. */
1874         tcg_gen_or_i64(ret, t0, t1);        /* ret = ....badc */
1875 
1876         tcg_gen_shli_i64(t1, ret, 48);      /*  t1 = dc...... */
1877         tcg_gen_shri_i64(t0, ret, 16);      /*  t0 = ......ba */
1878         if (flags & TCG_BSWAP_OS) {
1879             tcg_gen_sari_i64(t1, t1, 32);   /*  t1 = ssssdc.. */
1880         } else {
1881             tcg_gen_shri_i64(t1, t1, 32);   /*  t1 = ....dc.. */
1882         }
1883         tcg_gen_or_i64(ret, t0, t1);        /* ret = ssssdcba (OS) */
1884                                             /*       ....dcba (else) */
1885 
1886         tcg_temp_free_i64(t0);
1887         tcg_temp_free_i64(t1);
1888     }
1889 }
1890 
1891 /*
1892  * bswap64_i64: 64-bit byte swap on a 64-bit value.
1893  *
1894  * Byte pattern: abcdefgh -> hgfedcba
1895  */
1896 void tcg_gen_bswap64_i64(TCGv_i64 ret, TCGv_i64 arg)
1897 {
1898     if (TCG_TARGET_REG_BITS == 32) {
1899         TCGv_i32 t0, t1;
1900         t0 = tcg_temp_ebb_new_i32();
1901         t1 = tcg_temp_ebb_new_i32();
1902 
1903         tcg_gen_bswap32_i32(t0, TCGV_LOW(arg));
1904         tcg_gen_bswap32_i32(t1, TCGV_HIGH(arg));
1905         tcg_gen_mov_i32(TCGV_LOW(ret), t1);
1906         tcg_gen_mov_i32(TCGV_HIGH(ret), t0);
1907         tcg_temp_free_i32(t0);
1908         tcg_temp_free_i32(t1);
1909     } else if (TCG_TARGET_HAS_bswap64_i64) {
1910         tcg_gen_op3i_i64(INDEX_op_bswap64_i64, ret, arg, 0);
1911     } else {
1912         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1913         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
1914         TCGv_i64 t2 = tcg_temp_ebb_new_i64();
1915 
1916                                         /* arg = abcdefgh */
1917         tcg_gen_movi_i64(t2, 0x00ff00ff00ff00ffull);
1918         tcg_gen_shri_i64(t0, arg, 8);   /*  t0 = .abcdefg */
1919         tcg_gen_and_i64(t1, arg, t2);   /*  t1 = .b.d.f.h */
1920         tcg_gen_and_i64(t0, t0, t2);    /*  t0 = .a.c.e.g */
1921         tcg_gen_shli_i64(t1, t1, 8);    /*  t1 = b.d.f.h. */
1922         tcg_gen_or_i64(ret, t0, t1);    /* ret = badcfehg */
1923 
1924         tcg_gen_movi_i64(t2, 0x0000ffff0000ffffull);
1925         tcg_gen_shri_i64(t0, ret, 16);  /*  t0 = ..badcfe */
1926         tcg_gen_and_i64(t1, ret, t2);   /*  t1 = ..dc..hg */
1927         tcg_gen_and_i64(t0, t0, t2);    /*  t0 = ..ba..fe */
1928         tcg_gen_shli_i64(t1, t1, 16);   /*  t1 = dc..hg.. */
1929         tcg_gen_or_i64(ret, t0, t1);    /* ret = dcbahgfe */
1930 
1931         tcg_gen_shri_i64(t0, ret, 32);  /*  t0 = ....dcba */
1932         tcg_gen_shli_i64(t1, ret, 32);  /*  t1 = hgfe.... */
1933         tcg_gen_or_i64(ret, t0, t1);    /* ret = hgfedcba */
1934 
1935         tcg_temp_free_i64(t0);
1936         tcg_temp_free_i64(t1);
1937         tcg_temp_free_i64(t2);
1938     }
1939 }
1940 
1941 /*
1942  * hswap_i64: Swap 16-bit halfwords within a 64-bit value.
1943  * See also include/qemu/bitops.h, hswap64.
1944  *
1945  * Byte pattern: abcdefgh -> ghefcdab
1946  */
1947 void tcg_gen_hswap_i64(TCGv_i64 ret, TCGv_i64 arg)
1948 {
1949     uint64_t m = 0x0000ffff0000ffffull;
1950     TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1951     TCGv_i64 t1 = tcg_temp_ebb_new_i64();
1952 
1953                                         /* arg = abcdefgh */
1954     tcg_gen_rotli_i64(t1, arg, 32);     /*  t1 = efghabcd */
1955     tcg_gen_andi_i64(t0, t1, m);        /*  t0 = ..gh..cd */
1956     tcg_gen_shli_i64(t0, t0, 16);       /*  t0 = gh..cd.. */
1957     tcg_gen_shri_i64(t1, t1, 16);       /*  t1 = ..efghab */
1958     tcg_gen_andi_i64(t1, t1, m);        /*  t1 = ..ef..ab */
1959     tcg_gen_or_i64(ret, t0, t1);        /* ret = ghefcdab */
1960 
1961     tcg_temp_free_i64(t0);
1962     tcg_temp_free_i64(t1);
1963 }
1964 
1965 /*
1966  * wswap_i64: Swap 32-bit words within a 64-bit value.
1967  *
1968  * Byte pattern: abcdefgh -> efghabcd
1969  */
1970 void tcg_gen_wswap_i64(TCGv_i64 ret, TCGv_i64 arg)
1971 {
1972     /* Swapping 2 32-bit elements is a rotate. */
1973     tcg_gen_rotli_i64(ret, arg, 32);
1974 }
1975 
1976 void tcg_gen_not_i64(TCGv_i64 ret, TCGv_i64 arg)
1977 {
1978     if (TCG_TARGET_REG_BITS == 32) {
1979         tcg_gen_not_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1980         tcg_gen_not_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
1981     } else if (TCG_TARGET_HAS_not_i64) {
1982         tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg);
1983     } else {
1984         tcg_gen_xori_i64(ret, arg, -1);
1985     }
1986 }
1987 
1988 void tcg_gen_andc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1989 {
1990     if (TCG_TARGET_REG_BITS == 32) {
1991         tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1992         tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1993     } else if (TCG_TARGET_HAS_andc_i64) {
1994         tcg_gen_op3_i64(INDEX_op_andc_i64, ret, arg1, arg2);
1995     } else {
1996         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1997         tcg_gen_not_i64(t0, arg2);
1998         tcg_gen_and_i64(ret, arg1, t0);
1999         tcg_temp_free_i64(t0);
2000     }
2001 }
2002 
2003 void tcg_gen_eqv_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2004 {
2005     if (TCG_TARGET_REG_BITS == 32) {
2006         tcg_gen_eqv_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
2007         tcg_gen_eqv_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
2008     } else if (TCG_TARGET_HAS_eqv_i64) {
2009         tcg_gen_op3_i64(INDEX_op_eqv_i64, ret, arg1, arg2);
2010     } else {
2011         tcg_gen_xor_i64(ret, arg1, arg2);
2012         tcg_gen_not_i64(ret, ret);
2013     }
2014 }
2015 
2016 void tcg_gen_nand_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2017 {
2018     if (TCG_TARGET_REG_BITS == 32) {
2019         tcg_gen_nand_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
2020         tcg_gen_nand_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
2021     } else if (TCG_TARGET_HAS_nand_i64) {
2022         tcg_gen_op3_i64(INDEX_op_nand_i64, ret, arg1, arg2);
2023     } else {
2024         tcg_gen_and_i64(ret, arg1, arg2);
2025         tcg_gen_not_i64(ret, ret);
2026     }
2027 }
2028 
2029 void tcg_gen_nor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2030 {
2031     if (TCG_TARGET_REG_BITS == 32) {
2032         tcg_gen_nor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
2033         tcg_gen_nor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
2034     } else if (TCG_TARGET_HAS_nor_i64) {
2035         tcg_gen_op3_i64(INDEX_op_nor_i64, ret, arg1, arg2);
2036     } else {
2037         tcg_gen_or_i64(ret, arg1, arg2);
2038         tcg_gen_not_i64(ret, ret);
2039     }
2040 }
2041 
2042 void tcg_gen_orc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2043 {
2044     if (TCG_TARGET_REG_BITS == 32) {
2045         tcg_gen_orc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
2046         tcg_gen_orc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
2047     } else if (TCG_TARGET_HAS_orc_i64) {
2048         tcg_gen_op3_i64(INDEX_op_orc_i64, ret, arg1, arg2);
2049     } else {
2050         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2051         tcg_gen_not_i64(t0, arg2);
2052         tcg_gen_or_i64(ret, arg1, t0);
2053         tcg_temp_free_i64(t0);
2054     }
2055 }
2056 
2057 void tcg_gen_clz_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2058 {
2059     if (TCG_TARGET_HAS_clz_i64) {
2060         tcg_gen_op3_i64(INDEX_op_clz_i64, ret, arg1, arg2);
2061     } else {
2062         gen_helper_clz_i64(ret, arg1, arg2);
2063     }
2064 }
2065 
2066 void tcg_gen_clzi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
2067 {
2068     if (TCG_TARGET_REG_BITS == 32
2069         && TCG_TARGET_HAS_clz_i32
2070         && arg2 <= 0xffffffffu) {
2071         TCGv_i32 t = tcg_temp_ebb_new_i32();
2072         tcg_gen_clzi_i32(t, TCGV_LOW(arg1), arg2 - 32);
2073         tcg_gen_addi_i32(t, t, 32);
2074         tcg_gen_clz_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), t);
2075         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2076         tcg_temp_free_i32(t);
2077     } else {
2078         tcg_gen_clz_i64(ret, arg1, tcg_constant_i64(arg2));
2079     }
2080 }
2081 
2082 void tcg_gen_ctz_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2083 {
2084     if (TCG_TARGET_HAS_ctz_i64) {
2085         tcg_gen_op3_i64(INDEX_op_ctz_i64, ret, arg1, arg2);
2086     } else if (TCG_TARGET_HAS_ctpop_i64 || TCG_TARGET_HAS_clz_i64) {
2087         TCGv_i64 z, t = tcg_temp_ebb_new_i64();
2088 
2089         if (TCG_TARGET_HAS_ctpop_i64) {
2090             tcg_gen_subi_i64(t, arg1, 1);
2091             tcg_gen_andc_i64(t, t, arg1);
2092             tcg_gen_ctpop_i64(t, t);
2093         } else {
2094             /* Since all non-x86 hosts have clz(0) == 64, don't fight it.  */
2095             tcg_gen_neg_i64(t, arg1);
2096             tcg_gen_and_i64(t, t, arg1);
2097             tcg_gen_clzi_i64(t, t, 64);
2098             tcg_gen_xori_i64(t, t, 63);
2099         }
2100         z = tcg_constant_i64(0);
2101         tcg_gen_movcond_i64(TCG_COND_EQ, ret, arg1, z, arg2, t);
2102         tcg_temp_free_i64(t);
2103         tcg_temp_free_i64(z);
2104     } else {
2105         gen_helper_ctz_i64(ret, arg1, arg2);
2106     }
2107 }
2108 
2109 void tcg_gen_ctzi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
2110 {
2111     if (TCG_TARGET_REG_BITS == 32
2112         && TCG_TARGET_HAS_ctz_i32
2113         && arg2 <= 0xffffffffu) {
2114         TCGv_i32 t32 = tcg_temp_ebb_new_i32();
2115         tcg_gen_ctzi_i32(t32, TCGV_HIGH(arg1), arg2 - 32);
2116         tcg_gen_addi_i32(t32, t32, 32);
2117         tcg_gen_ctz_i32(TCGV_LOW(ret), TCGV_LOW(arg1), t32);
2118         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2119         tcg_temp_free_i32(t32);
2120     } else if (!TCG_TARGET_HAS_ctz_i64
2121                && TCG_TARGET_HAS_ctpop_i64
2122                && arg2 == 64) {
2123         /* This equivalence has the advantage of not requiring a fixup.  */
2124         TCGv_i64 t = tcg_temp_ebb_new_i64();
2125         tcg_gen_subi_i64(t, arg1, 1);
2126         tcg_gen_andc_i64(t, t, arg1);
2127         tcg_gen_ctpop_i64(ret, t);
2128         tcg_temp_free_i64(t);
2129     } else {
2130         tcg_gen_ctz_i64(ret, arg1, tcg_constant_i64(arg2));
2131     }
2132 }
2133 
2134 void tcg_gen_clrsb_i64(TCGv_i64 ret, TCGv_i64 arg)
2135 {
2136     if (TCG_TARGET_HAS_clz_i64 || TCG_TARGET_HAS_clz_i32) {
2137         TCGv_i64 t = tcg_temp_ebb_new_i64();
2138         tcg_gen_sari_i64(t, arg, 63);
2139         tcg_gen_xor_i64(t, t, arg);
2140         tcg_gen_clzi_i64(t, t, 64);
2141         tcg_gen_subi_i64(ret, t, 1);
2142         tcg_temp_free_i64(t);
2143     } else {
2144         gen_helper_clrsb_i64(ret, arg);
2145     }
2146 }
2147 
2148 void tcg_gen_ctpop_i64(TCGv_i64 ret, TCGv_i64 arg1)
2149 {
2150     if (TCG_TARGET_HAS_ctpop_i64) {
2151         tcg_gen_op2_i64(INDEX_op_ctpop_i64, ret, arg1);
2152     } else if (TCG_TARGET_REG_BITS == 32 && TCG_TARGET_HAS_ctpop_i32) {
2153         tcg_gen_ctpop_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
2154         tcg_gen_ctpop_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
2155         tcg_gen_add_i32(TCGV_LOW(ret), TCGV_LOW(ret), TCGV_HIGH(ret));
2156         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2157     } else {
2158         gen_helper_ctpop_i64(ret, arg1);
2159     }
2160 }
2161 
2162 void tcg_gen_rotl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2163 {
2164     if (TCG_TARGET_HAS_rot_i64) {
2165         tcg_gen_op3_i64(INDEX_op_rotl_i64, ret, arg1, arg2);
2166     } else {
2167         TCGv_i64 t0, t1;
2168         t0 = tcg_temp_ebb_new_i64();
2169         t1 = tcg_temp_ebb_new_i64();
2170         tcg_gen_shl_i64(t0, arg1, arg2);
2171         tcg_gen_subfi_i64(t1, 64, arg2);
2172         tcg_gen_shr_i64(t1, arg1, t1);
2173         tcg_gen_or_i64(ret, t0, t1);
2174         tcg_temp_free_i64(t0);
2175         tcg_temp_free_i64(t1);
2176     }
2177 }
2178 
2179 void tcg_gen_rotli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
2180 {
2181     tcg_debug_assert(arg2 >= 0 && arg2 < 64);
2182     /* some cases can be optimized here */
2183     if (arg2 == 0) {
2184         tcg_gen_mov_i64(ret, arg1);
2185     } else if (TCG_TARGET_HAS_rot_i64) {
2186         tcg_gen_rotl_i64(ret, arg1, tcg_constant_i64(arg2));
2187     } else {
2188         TCGv_i64 t0, t1;
2189         t0 = tcg_temp_ebb_new_i64();
2190         t1 = tcg_temp_ebb_new_i64();
2191         tcg_gen_shli_i64(t0, arg1, arg2);
2192         tcg_gen_shri_i64(t1, arg1, 64 - arg2);
2193         tcg_gen_or_i64(ret, t0, t1);
2194         tcg_temp_free_i64(t0);
2195         tcg_temp_free_i64(t1);
2196     }
2197 }
2198 
2199 void tcg_gen_rotr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2200 {
2201     if (TCG_TARGET_HAS_rot_i64) {
2202         tcg_gen_op3_i64(INDEX_op_rotr_i64, ret, arg1, arg2);
2203     } else {
2204         TCGv_i64 t0, t1;
2205         t0 = tcg_temp_ebb_new_i64();
2206         t1 = tcg_temp_ebb_new_i64();
2207         tcg_gen_shr_i64(t0, arg1, arg2);
2208         tcg_gen_subfi_i64(t1, 64, arg2);
2209         tcg_gen_shl_i64(t1, arg1, t1);
2210         tcg_gen_or_i64(ret, t0, t1);
2211         tcg_temp_free_i64(t0);
2212         tcg_temp_free_i64(t1);
2213     }
2214 }
2215 
2216 void tcg_gen_rotri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
2217 {
2218     tcg_debug_assert(arg2 >= 0 && arg2 < 64);
2219     /* some cases can be optimized here */
2220     if (arg2 == 0) {
2221         tcg_gen_mov_i64(ret, arg1);
2222     } else {
2223         tcg_gen_rotli_i64(ret, arg1, 64 - arg2);
2224     }
2225 }
2226 
2227 void tcg_gen_deposit_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2,
2228                          unsigned int ofs, unsigned int len)
2229 {
2230     uint64_t mask;
2231     TCGv_i64 t1;
2232 
2233     tcg_debug_assert(ofs < 64);
2234     tcg_debug_assert(len > 0);
2235     tcg_debug_assert(len <= 64);
2236     tcg_debug_assert(ofs + len <= 64);
2237 
2238     if (len == 64) {
2239         tcg_gen_mov_i64(ret, arg2);
2240         return;
2241     }
2242     if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(ofs, len)) {
2243         tcg_gen_op5ii_i64(INDEX_op_deposit_i64, ret, arg1, arg2, ofs, len);
2244         return;
2245     }
2246 
2247     if (TCG_TARGET_REG_BITS == 32) {
2248         if (ofs >= 32) {
2249             tcg_gen_deposit_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1),
2250                                 TCGV_LOW(arg2), ofs - 32, len);
2251             tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
2252             return;
2253         }
2254         if (ofs + len <= 32) {
2255             tcg_gen_deposit_i32(TCGV_LOW(ret), TCGV_LOW(arg1),
2256                                 TCGV_LOW(arg2), ofs, len);
2257             tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
2258             return;
2259         }
2260     }
2261 
2262     t1 = tcg_temp_ebb_new_i64();
2263 
2264     if (TCG_TARGET_HAS_extract2_i64) {
2265         if (ofs + len == 64) {
2266             tcg_gen_shli_i64(t1, arg1, len);
2267             tcg_gen_extract2_i64(ret, t1, arg2, len);
2268             goto done;
2269         }
2270         if (ofs == 0) {
2271             tcg_gen_extract2_i64(ret, arg1, arg2, len);
2272             tcg_gen_rotli_i64(ret, ret, len);
2273             goto done;
2274         }
2275     }
2276 
2277     mask = (1ull << len) - 1;
2278     if (ofs + len < 64) {
2279         tcg_gen_andi_i64(t1, arg2, mask);
2280         tcg_gen_shli_i64(t1, t1, ofs);
2281     } else {
2282         tcg_gen_shli_i64(t1, arg2, ofs);
2283     }
2284     tcg_gen_andi_i64(ret, arg1, ~(mask << ofs));
2285     tcg_gen_or_i64(ret, ret, t1);
2286  done:
2287     tcg_temp_free_i64(t1);
2288 }
2289 
2290 void tcg_gen_deposit_z_i64(TCGv_i64 ret, TCGv_i64 arg,
2291                            unsigned int ofs, unsigned int len)
2292 {
2293     tcg_debug_assert(ofs < 64);
2294     tcg_debug_assert(len > 0);
2295     tcg_debug_assert(len <= 64);
2296     tcg_debug_assert(ofs + len <= 64);
2297 
2298     if (ofs + len == 64) {
2299         tcg_gen_shli_i64(ret, arg, ofs);
2300     } else if (ofs == 0) {
2301         tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
2302     } else if (TCG_TARGET_HAS_deposit_i64
2303                && TCG_TARGET_deposit_i64_valid(ofs, len)) {
2304         TCGv_i64 zero = tcg_constant_i64(0);
2305         tcg_gen_op5ii_i64(INDEX_op_deposit_i64, ret, zero, arg, ofs, len);
2306     } else {
2307         if (TCG_TARGET_REG_BITS == 32) {
2308             if (ofs >= 32) {
2309                 tcg_gen_deposit_z_i32(TCGV_HIGH(ret), TCGV_LOW(arg),
2310                                       ofs - 32, len);
2311                 tcg_gen_movi_i32(TCGV_LOW(ret), 0);
2312                 return;
2313             }
2314             if (ofs + len <= 32) {
2315                 tcg_gen_deposit_z_i32(TCGV_LOW(ret), TCGV_LOW(arg), ofs, len);
2316                 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2317                 return;
2318             }
2319         }
2320         /* To help two-operand hosts we prefer to zero-extend first,
2321            which allows ARG to stay live.  */
2322         switch (len) {
2323         case 32:
2324             if (TCG_TARGET_HAS_ext32u_i64) {
2325                 tcg_gen_ext32u_i64(ret, arg);
2326                 tcg_gen_shli_i64(ret, ret, ofs);
2327                 return;
2328             }
2329             break;
2330         case 16:
2331             if (TCG_TARGET_HAS_ext16u_i64) {
2332                 tcg_gen_ext16u_i64(ret, arg);
2333                 tcg_gen_shli_i64(ret, ret, ofs);
2334                 return;
2335             }
2336             break;
2337         case 8:
2338             if (TCG_TARGET_HAS_ext8u_i64) {
2339                 tcg_gen_ext8u_i64(ret, arg);
2340                 tcg_gen_shli_i64(ret, ret, ofs);
2341                 return;
2342             }
2343             break;
2344         }
2345         /* Otherwise prefer zero-extension over AND for code size.  */
2346         switch (ofs + len) {
2347         case 32:
2348             if (TCG_TARGET_HAS_ext32u_i64) {
2349                 tcg_gen_shli_i64(ret, arg, ofs);
2350                 tcg_gen_ext32u_i64(ret, ret);
2351                 return;
2352             }
2353             break;
2354         case 16:
2355             if (TCG_TARGET_HAS_ext16u_i64) {
2356                 tcg_gen_shli_i64(ret, arg, ofs);
2357                 tcg_gen_ext16u_i64(ret, ret);
2358                 return;
2359             }
2360             break;
2361         case 8:
2362             if (TCG_TARGET_HAS_ext8u_i64) {
2363                 tcg_gen_shli_i64(ret, arg, ofs);
2364                 tcg_gen_ext8u_i64(ret, ret);
2365                 return;
2366             }
2367             break;
2368         }
2369         tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
2370         tcg_gen_shli_i64(ret, ret, ofs);
2371     }
2372 }
2373 
2374 void tcg_gen_extract_i64(TCGv_i64 ret, TCGv_i64 arg,
2375                          unsigned int ofs, unsigned int len)
2376 {
2377     tcg_debug_assert(ofs < 64);
2378     tcg_debug_assert(len > 0);
2379     tcg_debug_assert(len <= 64);
2380     tcg_debug_assert(ofs + len <= 64);
2381 
2382     /* Canonicalize certain special cases, even if extract is supported.  */
2383     if (ofs + len == 64) {
2384         tcg_gen_shri_i64(ret, arg, 64 - len);
2385         return;
2386     }
2387     if (ofs == 0) {
2388         tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
2389         return;
2390     }
2391 
2392     if (TCG_TARGET_REG_BITS == 32) {
2393         /* Look for a 32-bit extract within one of the two words.  */
2394         if (ofs >= 32) {
2395             tcg_gen_extract_i32(TCGV_LOW(ret), TCGV_HIGH(arg), ofs - 32, len);
2396             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2397             return;
2398         }
2399         if (ofs + len <= 32) {
2400             tcg_gen_extract_i32(TCGV_LOW(ret), TCGV_LOW(arg), ofs, len);
2401             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2402             return;
2403         }
2404         /* The field is split across two words.  One double-word
2405            shift is better than two double-word shifts.  */
2406         goto do_shift_and;
2407     }
2408 
2409     if (TCG_TARGET_HAS_extract_i64
2410         && TCG_TARGET_extract_i64_valid(ofs, len)) {
2411         tcg_gen_op4ii_i64(INDEX_op_extract_i64, ret, arg, ofs, len);
2412         return;
2413     }
2414 
2415     /* Assume that zero-extension, if available, is cheaper than a shift.  */
2416     switch (ofs + len) {
2417     case 32:
2418         if (TCG_TARGET_HAS_ext32u_i64) {
2419             tcg_gen_ext32u_i64(ret, arg);
2420             tcg_gen_shri_i64(ret, ret, ofs);
2421             return;
2422         }
2423         break;
2424     case 16:
2425         if (TCG_TARGET_HAS_ext16u_i64) {
2426             tcg_gen_ext16u_i64(ret, arg);
2427             tcg_gen_shri_i64(ret, ret, ofs);
2428             return;
2429         }
2430         break;
2431     case 8:
2432         if (TCG_TARGET_HAS_ext8u_i64) {
2433             tcg_gen_ext8u_i64(ret, arg);
2434             tcg_gen_shri_i64(ret, ret, ofs);
2435             return;
2436         }
2437         break;
2438     }
2439 
2440     /* ??? Ideally we'd know what values are available for immediate AND.
2441        Assume that 8 bits are available, plus the special cases of 16 and 32,
2442        so that we get ext8u, ext16u, and ext32u.  */
2443     switch (len) {
2444     case 1 ... 8: case 16: case 32:
2445     do_shift_and:
2446         tcg_gen_shri_i64(ret, arg, ofs);
2447         tcg_gen_andi_i64(ret, ret, (1ull << len) - 1);
2448         break;
2449     default:
2450         tcg_gen_shli_i64(ret, arg, 64 - len - ofs);
2451         tcg_gen_shri_i64(ret, ret, 64 - len);
2452         break;
2453     }
2454 }
2455 
2456 void tcg_gen_sextract_i64(TCGv_i64 ret, TCGv_i64 arg,
2457                           unsigned int ofs, unsigned int len)
2458 {
2459     tcg_debug_assert(ofs < 64);
2460     tcg_debug_assert(len > 0);
2461     tcg_debug_assert(len <= 64);
2462     tcg_debug_assert(ofs + len <= 64);
2463 
2464     /* Canonicalize certain special cases, even if sextract is supported.  */
2465     if (ofs + len == 64) {
2466         tcg_gen_sari_i64(ret, arg, 64 - len);
2467         return;
2468     }
2469     if (ofs == 0) {
2470         switch (len) {
2471         case 32:
2472             tcg_gen_ext32s_i64(ret, arg);
2473             return;
2474         case 16:
2475             tcg_gen_ext16s_i64(ret, arg);
2476             return;
2477         case 8:
2478             tcg_gen_ext8s_i64(ret, arg);
2479             return;
2480         }
2481     }
2482 
2483     if (TCG_TARGET_REG_BITS == 32) {
2484         /* Look for a 32-bit extract within one of the two words.  */
2485         if (ofs >= 32) {
2486             tcg_gen_sextract_i32(TCGV_LOW(ret), TCGV_HIGH(arg), ofs - 32, len);
2487         } else if (ofs + len <= 32) {
2488             tcg_gen_sextract_i32(TCGV_LOW(ret), TCGV_LOW(arg), ofs, len);
2489         } else if (ofs == 0) {
2490             tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2491             tcg_gen_sextract_i32(TCGV_HIGH(ret), TCGV_HIGH(arg), 0, len - 32);
2492             return;
2493         } else if (len > 32) {
2494             TCGv_i32 t = tcg_temp_ebb_new_i32();
2495             /* Extract the bits for the high word normally.  */
2496             tcg_gen_sextract_i32(t, TCGV_HIGH(arg), ofs + 32, len - 32);
2497             /* Shift the field down for the low part.  */
2498             tcg_gen_shri_i64(ret, arg, ofs);
2499             /* Overwrite the shift into the high part.  */
2500             tcg_gen_mov_i32(TCGV_HIGH(ret), t);
2501             tcg_temp_free_i32(t);
2502             return;
2503         } else {
2504             /* Shift the field down for the low part, such that the
2505                field sits at the MSB.  */
2506             tcg_gen_shri_i64(ret, arg, ofs + len - 32);
2507             /* Shift the field down from the MSB, sign extending.  */
2508             tcg_gen_sari_i32(TCGV_LOW(ret), TCGV_LOW(ret), 32 - len);
2509         }
2510         /* Sign-extend the field from 32 bits.  */
2511         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2512         return;
2513     }
2514 
2515     if (TCG_TARGET_HAS_sextract_i64
2516         && TCG_TARGET_extract_i64_valid(ofs, len)) {
2517         tcg_gen_op4ii_i64(INDEX_op_sextract_i64, ret, arg, ofs, len);
2518         return;
2519     }
2520 
2521     /* Assume that sign-extension, if available, is cheaper than a shift.  */
2522     switch (ofs + len) {
2523     case 32:
2524         if (TCG_TARGET_HAS_ext32s_i64) {
2525             tcg_gen_ext32s_i64(ret, arg);
2526             tcg_gen_sari_i64(ret, ret, ofs);
2527             return;
2528         }
2529         break;
2530     case 16:
2531         if (TCG_TARGET_HAS_ext16s_i64) {
2532             tcg_gen_ext16s_i64(ret, arg);
2533             tcg_gen_sari_i64(ret, ret, ofs);
2534             return;
2535         }
2536         break;
2537     case 8:
2538         if (TCG_TARGET_HAS_ext8s_i64) {
2539             tcg_gen_ext8s_i64(ret, arg);
2540             tcg_gen_sari_i64(ret, ret, ofs);
2541             return;
2542         }
2543         break;
2544     }
2545     switch (len) {
2546     case 32:
2547         if (TCG_TARGET_HAS_ext32s_i64) {
2548             tcg_gen_shri_i64(ret, arg, ofs);
2549             tcg_gen_ext32s_i64(ret, ret);
2550             return;
2551         }
2552         break;
2553     case 16:
2554         if (TCG_TARGET_HAS_ext16s_i64) {
2555             tcg_gen_shri_i64(ret, arg, ofs);
2556             tcg_gen_ext16s_i64(ret, ret);
2557             return;
2558         }
2559         break;
2560     case 8:
2561         if (TCG_TARGET_HAS_ext8s_i64) {
2562             tcg_gen_shri_i64(ret, arg, ofs);
2563             tcg_gen_ext8s_i64(ret, ret);
2564             return;
2565         }
2566         break;
2567     }
2568     tcg_gen_shli_i64(ret, arg, 64 - len - ofs);
2569     tcg_gen_sari_i64(ret, ret, 64 - len);
2570 }
2571 
2572 /*
2573  * Extract 64 bits from a 128-bit input, ah:al, starting from ofs.
2574  * Unlike tcg_gen_extract_i64 above, len is fixed at 64.
2575  */
2576 void tcg_gen_extract2_i64(TCGv_i64 ret, TCGv_i64 al, TCGv_i64 ah,
2577                           unsigned int ofs)
2578 {
2579     tcg_debug_assert(ofs <= 64);
2580     if (ofs == 0) {
2581         tcg_gen_mov_i64(ret, al);
2582     } else if (ofs == 64) {
2583         tcg_gen_mov_i64(ret, ah);
2584     } else if (al == ah) {
2585         tcg_gen_rotri_i64(ret, al, ofs);
2586     } else if (TCG_TARGET_HAS_extract2_i64) {
2587         tcg_gen_op4i_i64(INDEX_op_extract2_i64, ret, al, ah, ofs);
2588     } else {
2589         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2590         tcg_gen_shri_i64(t0, al, ofs);
2591         tcg_gen_deposit_i64(ret, t0, ah, 64 - ofs, ofs);
2592         tcg_temp_free_i64(t0);
2593     }
2594 }
2595 
2596 void tcg_gen_movcond_i64(TCGCond cond, TCGv_i64 ret, TCGv_i64 c1,
2597                          TCGv_i64 c2, TCGv_i64 v1, TCGv_i64 v2)
2598 {
2599     if (cond == TCG_COND_ALWAYS) {
2600         tcg_gen_mov_i64(ret, v1);
2601     } else if (cond == TCG_COND_NEVER) {
2602         tcg_gen_mov_i64(ret, v2);
2603     } else if (TCG_TARGET_REG_BITS == 32) {
2604         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
2605         TCGv_i32 t1 = tcg_temp_ebb_new_i32();
2606         tcg_gen_op6i_i32(INDEX_op_setcond2_i32, t0,
2607                          TCGV_LOW(c1), TCGV_HIGH(c1),
2608                          TCGV_LOW(c2), TCGV_HIGH(c2), cond);
2609 
2610         if (TCG_TARGET_HAS_movcond_i32) {
2611             tcg_gen_movi_i32(t1, 0);
2612             tcg_gen_movcond_i32(TCG_COND_NE, TCGV_LOW(ret), t0, t1,
2613                                 TCGV_LOW(v1), TCGV_LOW(v2));
2614             tcg_gen_movcond_i32(TCG_COND_NE, TCGV_HIGH(ret), t0, t1,
2615                                 TCGV_HIGH(v1), TCGV_HIGH(v2));
2616         } else {
2617             tcg_gen_neg_i32(t0, t0);
2618 
2619             tcg_gen_and_i32(t1, TCGV_LOW(v1), t0);
2620             tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(v2), t0);
2621             tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(ret), t1);
2622 
2623             tcg_gen_and_i32(t1, TCGV_HIGH(v1), t0);
2624             tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(v2), t0);
2625             tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), t1);
2626         }
2627         tcg_temp_free_i32(t0);
2628         tcg_temp_free_i32(t1);
2629     } else if (TCG_TARGET_HAS_movcond_i64) {
2630         tcg_gen_op6i_i64(INDEX_op_movcond_i64, ret, c1, c2, v1, v2, cond);
2631     } else {
2632         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2633         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
2634         tcg_gen_negsetcond_i64(cond, t0, c1, c2);
2635         tcg_gen_and_i64(t1, v1, t0);
2636         tcg_gen_andc_i64(ret, v2, t0);
2637         tcg_gen_or_i64(ret, ret, t1);
2638         tcg_temp_free_i64(t0);
2639         tcg_temp_free_i64(t1);
2640     }
2641 }
2642 
2643 void tcg_gen_add2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
2644                       TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
2645 {
2646     if (TCG_TARGET_HAS_add2_i64) {
2647         tcg_gen_op6_i64(INDEX_op_add2_i64, rl, rh, al, ah, bl, bh);
2648     } else {
2649         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2650         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
2651         tcg_gen_add_i64(t0, al, bl);
2652         tcg_gen_setcond_i64(TCG_COND_LTU, t1, t0, al);
2653         tcg_gen_add_i64(rh, ah, bh);
2654         tcg_gen_add_i64(rh, rh, t1);
2655         tcg_gen_mov_i64(rl, t0);
2656         tcg_temp_free_i64(t0);
2657         tcg_temp_free_i64(t1);
2658     }
2659 }
2660 
2661 void tcg_gen_sub2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
2662                       TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
2663 {
2664     if (TCG_TARGET_HAS_sub2_i64) {
2665         tcg_gen_op6_i64(INDEX_op_sub2_i64, rl, rh, al, ah, bl, bh);
2666     } else {
2667         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2668         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
2669         tcg_gen_sub_i64(t0, al, bl);
2670         tcg_gen_setcond_i64(TCG_COND_LTU, t1, al, bl);
2671         tcg_gen_sub_i64(rh, ah, bh);
2672         tcg_gen_sub_i64(rh, rh, t1);
2673         tcg_gen_mov_i64(rl, t0);
2674         tcg_temp_free_i64(t0);
2675         tcg_temp_free_i64(t1);
2676     }
2677 }
2678 
2679 void tcg_gen_mulu2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
2680 {
2681     if (TCG_TARGET_HAS_mulu2_i64) {
2682         tcg_gen_op4_i64(INDEX_op_mulu2_i64, rl, rh, arg1, arg2);
2683     } else if (TCG_TARGET_HAS_muluh_i64) {
2684         TCGv_i64 t = tcg_temp_ebb_new_i64();
2685         tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
2686         tcg_gen_op3_i64(INDEX_op_muluh_i64, rh, arg1, arg2);
2687         tcg_gen_mov_i64(rl, t);
2688         tcg_temp_free_i64(t);
2689     } else {
2690         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2691         tcg_gen_mul_i64(t0, arg1, arg2);
2692         gen_helper_muluh_i64(rh, arg1, arg2);
2693         tcg_gen_mov_i64(rl, t0);
2694         tcg_temp_free_i64(t0);
2695     }
2696 }
2697 
2698 void tcg_gen_muls2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
2699 {
2700     if (TCG_TARGET_HAS_muls2_i64) {
2701         tcg_gen_op4_i64(INDEX_op_muls2_i64, rl, rh, arg1, arg2);
2702     } else if (TCG_TARGET_HAS_mulsh_i64) {
2703         TCGv_i64 t = tcg_temp_ebb_new_i64();
2704         tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
2705         tcg_gen_op3_i64(INDEX_op_mulsh_i64, rh, arg1, arg2);
2706         tcg_gen_mov_i64(rl, t);
2707         tcg_temp_free_i64(t);
2708     } else if (TCG_TARGET_HAS_mulu2_i64 || TCG_TARGET_HAS_muluh_i64) {
2709         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2710         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
2711         TCGv_i64 t2 = tcg_temp_ebb_new_i64();
2712         TCGv_i64 t3 = tcg_temp_ebb_new_i64();
2713         tcg_gen_mulu2_i64(t0, t1, arg1, arg2);
2714         /* Adjust for negative inputs.  */
2715         tcg_gen_sari_i64(t2, arg1, 63);
2716         tcg_gen_sari_i64(t3, arg2, 63);
2717         tcg_gen_and_i64(t2, t2, arg2);
2718         tcg_gen_and_i64(t3, t3, arg1);
2719         tcg_gen_sub_i64(rh, t1, t2);
2720         tcg_gen_sub_i64(rh, rh, t3);
2721         tcg_gen_mov_i64(rl, t0);
2722         tcg_temp_free_i64(t0);
2723         tcg_temp_free_i64(t1);
2724         tcg_temp_free_i64(t2);
2725         tcg_temp_free_i64(t3);
2726     } else {
2727         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2728         tcg_gen_mul_i64(t0, arg1, arg2);
2729         gen_helper_mulsh_i64(rh, arg1, arg2);
2730         tcg_gen_mov_i64(rl, t0);
2731         tcg_temp_free_i64(t0);
2732     }
2733 }
2734 
2735 void tcg_gen_mulsu2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
2736 {
2737     TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2738     TCGv_i64 t1 = tcg_temp_ebb_new_i64();
2739     TCGv_i64 t2 = tcg_temp_ebb_new_i64();
2740     tcg_gen_mulu2_i64(t0, t1, arg1, arg2);
2741     /* Adjust for negative input for the signed arg1.  */
2742     tcg_gen_sari_i64(t2, arg1, 63);
2743     tcg_gen_and_i64(t2, t2, arg2);
2744     tcg_gen_sub_i64(rh, t1, t2);
2745     tcg_gen_mov_i64(rl, t0);
2746     tcg_temp_free_i64(t0);
2747     tcg_temp_free_i64(t1);
2748     tcg_temp_free_i64(t2);
2749 }
2750 
2751 void tcg_gen_smin_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
2752 {
2753     tcg_gen_movcond_i64(TCG_COND_LT, ret, a, b, a, b);
2754 }
2755 
2756 void tcg_gen_umin_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
2757 {
2758     tcg_gen_movcond_i64(TCG_COND_LTU, ret, a, b, a, b);
2759 }
2760 
2761 void tcg_gen_smax_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
2762 {
2763     tcg_gen_movcond_i64(TCG_COND_LT, ret, a, b, b, a);
2764 }
2765 
2766 void tcg_gen_umax_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
2767 {
2768     tcg_gen_movcond_i64(TCG_COND_LTU, ret, a, b, b, a);
2769 }
2770 
2771 void tcg_gen_abs_i64(TCGv_i64 ret, TCGv_i64 a)
2772 {
2773     TCGv_i64 t = tcg_temp_ebb_new_i64();
2774 
2775     tcg_gen_sari_i64(t, a, 63);
2776     tcg_gen_xor_i64(ret, a, t);
2777     tcg_gen_sub_i64(ret, ret, t);
2778     tcg_temp_free_i64(t);
2779 }
2780 
2781 /* Size changing operations.  */
2782 
2783 void tcg_gen_extrl_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
2784 {
2785     if (TCG_TARGET_REG_BITS == 32) {
2786         tcg_gen_mov_i32(ret, TCGV_LOW(arg));
2787     } else if (TCG_TARGET_HAS_extr_i64_i32) {
2788         tcg_gen_op2(INDEX_op_extrl_i64_i32,
2789                     tcgv_i32_arg(ret), tcgv_i64_arg(arg));
2790     } else {
2791         tcg_gen_mov_i32(ret, (TCGv_i32)arg);
2792     }
2793 }
2794 
2795 void tcg_gen_extrh_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
2796 {
2797     if (TCG_TARGET_REG_BITS == 32) {
2798         tcg_gen_mov_i32(ret, TCGV_HIGH(arg));
2799     } else if (TCG_TARGET_HAS_extr_i64_i32) {
2800         tcg_gen_op2(INDEX_op_extrh_i64_i32,
2801                     tcgv_i32_arg(ret), tcgv_i64_arg(arg));
2802     } else {
2803         TCGv_i64 t = tcg_temp_ebb_new_i64();
2804         tcg_gen_shri_i64(t, arg, 32);
2805         tcg_gen_mov_i32(ret, (TCGv_i32)t);
2806         tcg_temp_free_i64(t);
2807     }
2808 }
2809 
2810 void tcg_gen_extu_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
2811 {
2812     if (TCG_TARGET_REG_BITS == 32) {
2813         tcg_gen_mov_i32(TCGV_LOW(ret), arg);
2814         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2815     } else {
2816         tcg_gen_op2(INDEX_op_extu_i32_i64,
2817                     tcgv_i64_arg(ret), tcgv_i32_arg(arg));
2818     }
2819 }
2820 
2821 void tcg_gen_ext_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
2822 {
2823     if (TCG_TARGET_REG_BITS == 32) {
2824         tcg_gen_mov_i32(TCGV_LOW(ret), arg);
2825         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2826     } else {
2827         tcg_gen_op2(INDEX_op_ext_i32_i64,
2828                     tcgv_i64_arg(ret), tcgv_i32_arg(arg));
2829     }
2830 }
2831 
2832 void tcg_gen_concat_i32_i64(TCGv_i64 dest, TCGv_i32 low, TCGv_i32 high)
2833 {
2834     TCGv_i64 tmp;
2835 
2836     if (TCG_TARGET_REG_BITS == 32) {
2837         tcg_gen_mov_i32(TCGV_LOW(dest), low);
2838         tcg_gen_mov_i32(TCGV_HIGH(dest), high);
2839         return;
2840     }
2841 
2842     tmp = tcg_temp_ebb_new_i64();
2843     /* These extensions are only needed for type correctness.
2844        We may be able to do better given target specific information.  */
2845     tcg_gen_extu_i32_i64(tmp, high);
2846     tcg_gen_extu_i32_i64(dest, low);
2847     /* If deposit is available, use it.  Otherwise use the extra
2848        knowledge that we have of the zero-extensions above.  */
2849     if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(32, 32)) {
2850         tcg_gen_deposit_i64(dest, dest, tmp, 32, 32);
2851     } else {
2852         tcg_gen_shli_i64(tmp, tmp, 32);
2853         tcg_gen_or_i64(dest, dest, tmp);
2854     }
2855     tcg_temp_free_i64(tmp);
2856 }
2857 
2858 void tcg_gen_extr_i64_i32(TCGv_i32 lo, TCGv_i32 hi, TCGv_i64 arg)
2859 {
2860     if (TCG_TARGET_REG_BITS == 32) {
2861         tcg_gen_mov_i32(lo, TCGV_LOW(arg));
2862         tcg_gen_mov_i32(hi, TCGV_HIGH(arg));
2863     } else {
2864         tcg_gen_extrl_i64_i32(lo, arg);
2865         tcg_gen_extrh_i64_i32(hi, arg);
2866     }
2867 }
2868 
2869 void tcg_gen_extr32_i64(TCGv_i64 lo, TCGv_i64 hi, TCGv_i64 arg)
2870 {
2871     tcg_gen_ext32u_i64(lo, arg);
2872     tcg_gen_shri_i64(hi, arg, 32);
2873 }
2874 
2875 void tcg_gen_extr_i128_i64(TCGv_i64 lo, TCGv_i64 hi, TCGv_i128 arg)
2876 {
2877     tcg_gen_mov_i64(lo, TCGV128_LOW(arg));
2878     tcg_gen_mov_i64(hi, TCGV128_HIGH(arg));
2879 }
2880 
2881 void tcg_gen_concat_i64_i128(TCGv_i128 ret, TCGv_i64 lo, TCGv_i64 hi)
2882 {
2883     tcg_gen_mov_i64(TCGV128_LOW(ret), lo);
2884     tcg_gen_mov_i64(TCGV128_HIGH(ret), hi);
2885 }
2886 
2887 void tcg_gen_mov_i128(TCGv_i128 dst, TCGv_i128 src)
2888 {
2889     if (dst != src) {
2890         tcg_gen_mov_i64(TCGV128_LOW(dst), TCGV128_LOW(src));
2891         tcg_gen_mov_i64(TCGV128_HIGH(dst), TCGV128_HIGH(src));
2892     }
2893 }
2894 
2895 void tcg_gen_ld_i128(TCGv_i128 ret, TCGv_ptr base, tcg_target_long offset)
2896 {
2897     if (HOST_BIG_ENDIAN) {
2898         tcg_gen_ld_i64(TCGV128_HIGH(ret), base, offset);
2899         tcg_gen_ld_i64(TCGV128_LOW(ret), base, offset + 8);
2900     } else {
2901         tcg_gen_ld_i64(TCGV128_LOW(ret), base, offset);
2902         tcg_gen_ld_i64(TCGV128_HIGH(ret), base, offset + 8);
2903     }
2904 }
2905 
2906 void tcg_gen_st_i128(TCGv_i128 val, TCGv_ptr base, tcg_target_long offset)
2907 {
2908     if (HOST_BIG_ENDIAN) {
2909         tcg_gen_st_i64(TCGV128_HIGH(val), base, offset);
2910         tcg_gen_st_i64(TCGV128_LOW(val), base, offset + 8);
2911     } else {
2912         tcg_gen_st_i64(TCGV128_LOW(val), base, offset);
2913         tcg_gen_st_i64(TCGV128_HIGH(val), base, offset + 8);
2914     }
2915 }
2916 
2917 /* QEMU specific operations.  */
2918 
2919 void tcg_gen_exit_tb(const TranslationBlock *tb, unsigned idx)
2920 {
2921     /*
2922      * Let the jit code return the read-only version of the
2923      * TranslationBlock, so that we minimize the pc-relative
2924      * distance of the address of the exit_tb code to TB.
2925      * This will improve utilization of pc-relative address loads.
2926      *
2927      * TODO: Move this to translator_loop, so that all const
2928      * TranslationBlock pointers refer to read-only memory.
2929      * This requires coordination with targets that do not use
2930      * the translator_loop.
2931      */
2932     uintptr_t val = (uintptr_t)tcg_splitwx_to_rx((void *)tb) + idx;
2933 
2934     if (tb == NULL) {
2935         tcg_debug_assert(idx == 0);
2936     } else if (idx <= TB_EXIT_IDXMAX) {
2937 #ifdef CONFIG_DEBUG_TCG
2938         /* This is an exit following a goto_tb.  Verify that we have
2939            seen this numbered exit before, via tcg_gen_goto_tb.  */
2940         tcg_debug_assert(tcg_ctx->goto_tb_issue_mask & (1 << idx));
2941 #endif
2942     } else {
2943         /* This is an exit via the exitreq label.  */
2944         tcg_debug_assert(idx == TB_EXIT_REQUESTED);
2945     }
2946 
2947     tcg_gen_op1i(INDEX_op_exit_tb, val);
2948 }
2949 
2950 void tcg_gen_goto_tb(unsigned idx)
2951 {
2952     /* We tested CF_NO_GOTO_TB in translator_use_goto_tb. */
2953     tcg_debug_assert(!(tcg_ctx->gen_tb->cflags & CF_NO_GOTO_TB));
2954     /* We only support two chained exits.  */
2955     tcg_debug_assert(idx <= TB_EXIT_IDXMAX);
2956 #ifdef CONFIG_DEBUG_TCG
2957     /* Verify that we haven't seen this numbered exit before.  */
2958     tcg_debug_assert((tcg_ctx->goto_tb_issue_mask & (1 << idx)) == 0);
2959     tcg_ctx->goto_tb_issue_mask |= 1 << idx;
2960 #endif
2961     plugin_gen_disable_mem_helpers();
2962     tcg_gen_op1i(INDEX_op_goto_tb, idx);
2963 }
2964 
2965 void tcg_gen_lookup_and_goto_ptr(void)
2966 {
2967     TCGv_ptr ptr;
2968 
2969     if (tcg_ctx->gen_tb->cflags & CF_NO_GOTO_PTR) {
2970         tcg_gen_exit_tb(NULL, 0);
2971         return;
2972     }
2973 
2974     plugin_gen_disable_mem_helpers();
2975     ptr = tcg_temp_ebb_new_ptr();
2976     gen_helper_lookup_tb_ptr(ptr, tcg_env);
2977     tcg_gen_op1i(INDEX_op_goto_ptr, tcgv_ptr_arg(ptr));
2978     tcg_temp_free_ptr(ptr);
2979 }
2980