xref: /openbmc/qemu/tcg/tcg-op.c (revision 4ae7d11b70a840eec7aa27269093b15d04ebc84e)
1  /*
2   * Tiny Code Generator for QEMU
3   *
4   * Copyright (c) 2008 Fabrice Bellard
5   *
6   * Permission is hereby granted, free of charge, to any person obtaining a copy
7   * of this software and associated documentation files (the "Software"), to deal
8   * in the Software without restriction, including without limitation the rights
9   * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10   * copies of the Software, and to permit persons to whom the Software is
11   * furnished to do so, subject to the following conditions:
12   *
13   * The above copyright notice and this permission notice shall be included in
14   * all copies or substantial portions of the Software.
15   *
16   * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17   * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18   * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19   * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20   * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21   * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22   * THE SOFTWARE.
23   */
24  
25  #include "qemu/osdep.h"
26  #include "tcg/tcg.h"
27  #include "tcg/tcg-temp-internal.h"
28  #include "tcg/tcg-op-common.h"
29  #include "exec/translation-block.h"
30  #include "exec/plugin-gen.h"
31  #include "tcg-internal.h"
32  
33  
34  /*
35   * Encourage the compiler to tail-call to a function, rather than inlining.
36   * Minimizes code size across 99 bottles of beer on the wall.
37   */
38  #define NI  __attribute__((noinline))
39  
tcg_gen_op1(TCGOpcode opc,TCGArg a1)40  TCGOp * NI tcg_gen_op1(TCGOpcode opc, TCGArg a1)
41  {
42      TCGOp *op = tcg_emit_op(opc, 1);
43      op->args[0] = a1;
44      return op;
45  }
46  
tcg_gen_op2(TCGOpcode opc,TCGArg a1,TCGArg a2)47  TCGOp * NI tcg_gen_op2(TCGOpcode opc, TCGArg a1, TCGArg a2)
48  {
49      TCGOp *op = tcg_emit_op(opc, 2);
50      op->args[0] = a1;
51      op->args[1] = a2;
52      return op;
53  }
54  
tcg_gen_op3(TCGOpcode opc,TCGArg a1,TCGArg a2,TCGArg a3)55  TCGOp * NI tcg_gen_op3(TCGOpcode opc, TCGArg a1, TCGArg a2, TCGArg a3)
56  {
57      TCGOp *op = tcg_emit_op(opc, 3);
58      op->args[0] = a1;
59      op->args[1] = a2;
60      op->args[2] = a3;
61      return op;
62  }
63  
tcg_gen_op4(TCGOpcode opc,TCGArg a1,TCGArg a2,TCGArg a3,TCGArg a4)64  TCGOp * NI tcg_gen_op4(TCGOpcode opc, TCGArg a1, TCGArg a2,
65                         TCGArg a3, TCGArg a4)
66  {
67      TCGOp *op = tcg_emit_op(opc, 4);
68      op->args[0] = a1;
69      op->args[1] = a2;
70      op->args[2] = a3;
71      op->args[3] = a4;
72      return op;
73  }
74  
tcg_gen_op5(TCGOpcode opc,TCGArg a1,TCGArg a2,TCGArg a3,TCGArg a4,TCGArg a5)75  TCGOp * NI tcg_gen_op5(TCGOpcode opc, TCGArg a1, TCGArg a2,
76                         TCGArg a3, TCGArg a4, TCGArg a5)
77  {
78      TCGOp *op = tcg_emit_op(opc, 5);
79      op->args[0] = a1;
80      op->args[1] = a2;
81      op->args[2] = a3;
82      op->args[3] = a4;
83      op->args[4] = a5;
84      return op;
85  }
86  
tcg_gen_op6(TCGOpcode opc,TCGArg a1,TCGArg a2,TCGArg a3,TCGArg a4,TCGArg a5,TCGArg a6)87  TCGOp * NI tcg_gen_op6(TCGOpcode opc, TCGArg a1, TCGArg a2, TCGArg a3,
88                         TCGArg a4, TCGArg a5, TCGArg a6)
89  {
90      TCGOp *op = tcg_emit_op(opc, 6);
91      op->args[0] = a1;
92      op->args[1] = a2;
93      op->args[2] = a3;
94      op->args[3] = a4;
95      op->args[4] = a5;
96      op->args[5] = a6;
97      return op;
98  }
99  
100  /*
101   * With CONFIG_DEBUG_TCG, tcgv_*_tmp via tcgv_*_arg, is an out-of-line
102   * assertion check.  Force tail calls to avoid too much code expansion.
103   */
104  #ifdef CONFIG_DEBUG_TCG
105  # define DNI NI
106  #else
107  # define DNI
108  #endif
109  
tcg_gen_op1_i32(TCGOpcode opc,TCGv_i32 a1)110  static void DNI tcg_gen_op1_i32(TCGOpcode opc, TCGv_i32 a1)
111  {
112      tcg_gen_op1(opc, tcgv_i32_arg(a1));
113  }
114  
tcg_gen_op1_i64(TCGOpcode opc,TCGv_i64 a1)115  static void DNI tcg_gen_op1_i64(TCGOpcode opc, TCGv_i64 a1)
116  {
117      tcg_gen_op1(opc, tcgv_i64_arg(a1));
118  }
119  
tcg_gen_op1i(TCGOpcode opc,TCGArg a1)120  static TCGOp * DNI tcg_gen_op1i(TCGOpcode opc, TCGArg a1)
121  {
122      return tcg_gen_op1(opc, a1);
123  }
124  
tcg_gen_op2_i32(TCGOpcode opc,TCGv_i32 a1,TCGv_i32 a2)125  static void DNI tcg_gen_op2_i32(TCGOpcode opc, TCGv_i32 a1, TCGv_i32 a2)
126  {
127      tcg_gen_op2(opc, tcgv_i32_arg(a1), tcgv_i32_arg(a2));
128  }
129  
tcg_gen_op2_i64(TCGOpcode opc,TCGv_i64 a1,TCGv_i64 a2)130  static void DNI tcg_gen_op2_i64(TCGOpcode opc, TCGv_i64 a1, TCGv_i64 a2)
131  {
132      tcg_gen_op2(opc, tcgv_i64_arg(a1), tcgv_i64_arg(a2));
133  }
134  
tcg_gen_op3_i32(TCGOpcode opc,TCGv_i32 a1,TCGv_i32 a2,TCGv_i32 a3)135  static void DNI tcg_gen_op3_i32(TCGOpcode opc, TCGv_i32 a1,
136                                  TCGv_i32 a2, TCGv_i32 a3)
137  {
138      tcg_gen_op3(opc, tcgv_i32_arg(a1), tcgv_i32_arg(a2), tcgv_i32_arg(a3));
139  }
140  
tcg_gen_op3_i64(TCGOpcode opc,TCGv_i64 a1,TCGv_i64 a2,TCGv_i64 a3)141  static void DNI tcg_gen_op3_i64(TCGOpcode opc, TCGv_i64 a1,
142                                  TCGv_i64 a2, TCGv_i64 a3)
143  {
144      tcg_gen_op3(opc, tcgv_i64_arg(a1), tcgv_i64_arg(a2), tcgv_i64_arg(a3));
145  }
146  
tcg_gen_op3i_i32(TCGOpcode opc,TCGv_i32 a1,TCGv_i32 a2,TCGArg a3)147  static void DNI tcg_gen_op3i_i32(TCGOpcode opc, TCGv_i32 a1,
148                                   TCGv_i32 a2, TCGArg a3)
149  {
150      tcg_gen_op3(opc, tcgv_i32_arg(a1), tcgv_i32_arg(a2), a3);
151  }
152  
tcg_gen_op3i_i64(TCGOpcode opc,TCGv_i64 a1,TCGv_i64 a2,TCGArg a3)153  static void DNI tcg_gen_op3i_i64(TCGOpcode opc, TCGv_i64 a1,
154                                   TCGv_i64 a2, TCGArg a3)
155  {
156      tcg_gen_op3(opc, tcgv_i64_arg(a1), tcgv_i64_arg(a2), a3);
157  }
158  
tcg_gen_ldst_op_i32(TCGOpcode opc,TCGv_i32 val,TCGv_ptr base,TCGArg offset)159  static void DNI tcg_gen_ldst_op_i32(TCGOpcode opc, TCGv_i32 val,
160                                      TCGv_ptr base, TCGArg offset)
161  {
162      tcg_gen_op3(opc, tcgv_i32_arg(val), tcgv_ptr_arg(base), offset);
163  }
164  
tcg_gen_ldst_op_i64(TCGOpcode opc,TCGv_i64 val,TCGv_ptr base,TCGArg offset)165  static void DNI tcg_gen_ldst_op_i64(TCGOpcode opc, TCGv_i64 val,
166                                      TCGv_ptr base, TCGArg offset)
167  {
168      tcg_gen_op3(opc, tcgv_i64_arg(val), tcgv_ptr_arg(base), offset);
169  }
170  
tcg_gen_op4_i32(TCGOpcode opc,TCGv_i32 a1,TCGv_i32 a2,TCGv_i32 a3,TCGv_i32 a4)171  static void DNI tcg_gen_op4_i32(TCGOpcode opc, TCGv_i32 a1, TCGv_i32 a2,
172                                  TCGv_i32 a3, TCGv_i32 a4)
173  {
174      tcg_gen_op4(opc, tcgv_i32_arg(a1), tcgv_i32_arg(a2),
175                  tcgv_i32_arg(a3), tcgv_i32_arg(a4));
176  }
177  
tcg_gen_op4_i64(TCGOpcode opc,TCGv_i64 a1,TCGv_i64 a2,TCGv_i64 a3,TCGv_i64 a4)178  static void DNI tcg_gen_op4_i64(TCGOpcode opc, TCGv_i64 a1, TCGv_i64 a2,
179                                  TCGv_i64 a3, TCGv_i64 a4)
180  {
181      tcg_gen_op4(opc, tcgv_i64_arg(a1), tcgv_i64_arg(a2),
182                  tcgv_i64_arg(a3), tcgv_i64_arg(a4));
183  }
184  
tcg_gen_op4i_i32(TCGOpcode opc,TCGv_i32 a1,TCGv_i32 a2,TCGv_i32 a3,TCGArg a4)185  static void DNI tcg_gen_op4i_i32(TCGOpcode opc, TCGv_i32 a1, TCGv_i32 a2,
186                                   TCGv_i32 a3, TCGArg a4)
187  {
188      tcg_gen_op4(opc, tcgv_i32_arg(a1), tcgv_i32_arg(a2),
189                  tcgv_i32_arg(a3), a4);
190  }
191  
tcg_gen_op4i_i64(TCGOpcode opc,TCGv_i64 a1,TCGv_i64 a2,TCGv_i64 a3,TCGArg a4)192  static void DNI tcg_gen_op4i_i64(TCGOpcode opc, TCGv_i64 a1, TCGv_i64 a2,
193                                   TCGv_i64 a3, TCGArg a4)
194  {
195      tcg_gen_op4(opc, tcgv_i64_arg(a1), tcgv_i64_arg(a2),
196                  tcgv_i64_arg(a3), a4);
197  }
198  
tcg_gen_op4ii_i32(TCGOpcode opc,TCGv_i32 a1,TCGv_i32 a2,TCGArg a3,TCGArg a4)199  static TCGOp * DNI tcg_gen_op4ii_i32(TCGOpcode opc, TCGv_i32 a1, TCGv_i32 a2,
200                                       TCGArg a3, TCGArg a4)
201  {
202      return tcg_gen_op4(opc, tcgv_i32_arg(a1), tcgv_i32_arg(a2), a3, a4);
203  }
204  
tcg_gen_op4ii_i64(TCGOpcode opc,TCGv_i64 a1,TCGv_i64 a2,TCGArg a3,TCGArg a4)205  static TCGOp * DNI tcg_gen_op4ii_i64(TCGOpcode opc, TCGv_i64 a1, TCGv_i64 a2,
206                                       TCGArg a3, TCGArg a4)
207  {
208      return tcg_gen_op4(opc, tcgv_i64_arg(a1), tcgv_i64_arg(a2), a3, a4);
209  }
210  
tcg_gen_op5_i32(TCGOpcode opc,TCGv_i32 a1,TCGv_i32 a2,TCGv_i32 a3,TCGv_i32 a4,TCGv_i32 a5)211  static void DNI tcg_gen_op5_i32(TCGOpcode opc, TCGv_i32 a1, TCGv_i32 a2,
212                                  TCGv_i32 a3, TCGv_i32 a4, TCGv_i32 a5)
213  {
214      tcg_gen_op5(opc, tcgv_i32_arg(a1), tcgv_i32_arg(a2),
215                  tcgv_i32_arg(a3), tcgv_i32_arg(a4), tcgv_i32_arg(a5));
216  }
217  
tcg_gen_op5_i64(TCGOpcode opc,TCGv_i64 a1,TCGv_i64 a2,TCGv_i64 a3,TCGv_i64 a4,TCGv_i64 a5)218  static void DNI tcg_gen_op5_i64(TCGOpcode opc, TCGv_i64 a1, TCGv_i64 a2,
219                                  TCGv_i64 a3, TCGv_i64 a4, TCGv_i64 a5)
220  {
221      tcg_gen_op5(opc, tcgv_i64_arg(a1), tcgv_i64_arg(a2),
222                  tcgv_i64_arg(a3), tcgv_i64_arg(a4), tcgv_i64_arg(a5));
223  }
224  
tcg_gen_op5ii_i32(TCGOpcode opc,TCGv_i32 a1,TCGv_i32 a2,TCGv_i32 a3,TCGArg a4,TCGArg a5)225  static void DNI tcg_gen_op5ii_i32(TCGOpcode opc, TCGv_i32 a1, TCGv_i32 a2,
226                                    TCGv_i32 a3, TCGArg a4, TCGArg a5)
227  {
228      tcg_gen_op5(opc, tcgv_i32_arg(a1), tcgv_i32_arg(a2),
229                  tcgv_i32_arg(a3), a4, a5);
230  }
231  
tcg_gen_op5ii_i64(TCGOpcode opc,TCGv_i64 a1,TCGv_i64 a2,TCGv_i64 a3,TCGArg a4,TCGArg a5)232  static void DNI tcg_gen_op5ii_i64(TCGOpcode opc, TCGv_i64 a1, TCGv_i64 a2,
233                                    TCGv_i64 a3, TCGArg a4, TCGArg a5)
234  {
235      tcg_gen_op5(opc, tcgv_i64_arg(a1), tcgv_i64_arg(a2),
236                  tcgv_i64_arg(a3), a4, a5);
237  }
238  
tcg_gen_op6_i32(TCGOpcode opc,TCGv_i32 a1,TCGv_i32 a2,TCGv_i32 a3,TCGv_i32 a4,TCGv_i32 a5,TCGv_i32 a6)239  static void DNI tcg_gen_op6_i32(TCGOpcode opc, TCGv_i32 a1, TCGv_i32 a2,
240                                  TCGv_i32 a3, TCGv_i32 a4,
241                                  TCGv_i32 a5, TCGv_i32 a6)
242  {
243      tcg_gen_op6(opc, tcgv_i32_arg(a1), tcgv_i32_arg(a2),
244                  tcgv_i32_arg(a3), tcgv_i32_arg(a4), tcgv_i32_arg(a5),
245                  tcgv_i32_arg(a6));
246  }
247  
tcg_gen_op6_i64(TCGOpcode opc,TCGv_i64 a1,TCGv_i64 a2,TCGv_i64 a3,TCGv_i64 a4,TCGv_i64 a5,TCGv_i64 a6)248  static void DNI tcg_gen_op6_i64(TCGOpcode opc, TCGv_i64 a1, TCGv_i64 a2,
249                                  TCGv_i64 a3, TCGv_i64 a4,
250                                  TCGv_i64 a5, TCGv_i64 a6)
251  {
252      tcg_gen_op6(opc, tcgv_i64_arg(a1), tcgv_i64_arg(a2),
253                  tcgv_i64_arg(a3), tcgv_i64_arg(a4), tcgv_i64_arg(a5),
254                  tcgv_i64_arg(a6));
255  }
256  
tcg_gen_op6i_i32(TCGOpcode opc,TCGv_i32 a1,TCGv_i32 a2,TCGv_i32 a3,TCGv_i32 a4,TCGv_i32 a5,TCGArg a6)257  static void DNI tcg_gen_op6i_i32(TCGOpcode opc, TCGv_i32 a1, TCGv_i32 a2,
258                                   TCGv_i32 a3, TCGv_i32 a4,
259                                   TCGv_i32 a5, TCGArg a6)
260  {
261      tcg_gen_op6(opc, tcgv_i32_arg(a1), tcgv_i32_arg(a2),
262                  tcgv_i32_arg(a3), tcgv_i32_arg(a4), tcgv_i32_arg(a5), a6);
263  }
264  
tcg_gen_op6i_i64(TCGOpcode opc,TCGv_i64 a1,TCGv_i64 a2,TCGv_i64 a3,TCGv_i64 a4,TCGv_i64 a5,TCGArg a6)265  static void DNI tcg_gen_op6i_i64(TCGOpcode opc, TCGv_i64 a1, TCGv_i64 a2,
266                                   TCGv_i64 a3, TCGv_i64 a4,
267                                   TCGv_i64 a5, TCGArg a6)
268  {
269      tcg_gen_op6(opc, tcgv_i64_arg(a1), tcgv_i64_arg(a2),
270                  tcgv_i64_arg(a3), tcgv_i64_arg(a4), tcgv_i64_arg(a5), a6);
271  }
272  
tcg_gen_op6ii_i32(TCGOpcode opc,TCGv_i32 a1,TCGv_i32 a2,TCGv_i32 a3,TCGv_i32 a4,TCGArg a5,TCGArg a6)273  static TCGOp * DNI tcg_gen_op6ii_i32(TCGOpcode opc, TCGv_i32 a1, TCGv_i32 a2,
274                                       TCGv_i32 a3, TCGv_i32 a4,
275                                       TCGArg a5, TCGArg a6)
276  {
277      return tcg_gen_op6(opc, tcgv_i32_arg(a1), tcgv_i32_arg(a2),
278                         tcgv_i32_arg(a3), tcgv_i32_arg(a4), a5, a6);
279  }
280  
281  /* Generic ops.  */
282  
gen_set_label(TCGLabel * l)283  void gen_set_label(TCGLabel *l)
284  {
285      l->present = 1;
286      tcg_gen_op1(INDEX_op_set_label, label_arg(l));
287  }
288  
add_as_label_use(TCGLabel * l,TCGOp * op)289  static void add_as_label_use(TCGLabel *l, TCGOp *op)
290  {
291      TCGLabelUse *u = tcg_malloc(sizeof(TCGLabelUse));
292  
293      u->op = op;
294      QSIMPLEQ_INSERT_TAIL(&l->branches, u, next);
295  }
296  
tcg_gen_br(TCGLabel * l)297  void tcg_gen_br(TCGLabel *l)
298  {
299      add_as_label_use(l, tcg_gen_op1(INDEX_op_br, label_arg(l)));
300  }
301  
tcg_gen_mb(TCGBar mb_type)302  void tcg_gen_mb(TCGBar mb_type)
303  {
304  #ifdef CONFIG_USER_ONLY
305      bool parallel = tcg_ctx->gen_tb->cflags & CF_PARALLEL;
306  #else
307      /*
308       * It is tempting to elide the barrier in a uniprocessor context.
309       * However, even with a single cpu we have i/o threads running in
310       * parallel, and lack of memory order can result in e.g. virtio
311       * queue entries being read incorrectly.
312       */
313      bool parallel = true;
314  #endif
315  
316      if (parallel) {
317          tcg_gen_op1(INDEX_op_mb, mb_type);
318      }
319  }
320  
tcg_gen_plugin_cb(unsigned from)321  void tcg_gen_plugin_cb(unsigned from)
322  {
323      tcg_gen_op1(INDEX_op_plugin_cb, from);
324  }
325  
tcg_gen_plugin_mem_cb(TCGv_i64 addr,unsigned meminfo)326  void tcg_gen_plugin_mem_cb(TCGv_i64 addr, unsigned meminfo)
327  {
328      tcg_gen_op2(INDEX_op_plugin_mem_cb, tcgv_i64_arg(addr), meminfo);
329  }
330  
331  /* 32 bit ops */
332  
tcg_gen_discard_i32(TCGv_i32 arg)333  void tcg_gen_discard_i32(TCGv_i32 arg)
334  {
335      tcg_gen_op1_i32(INDEX_op_discard, arg);
336  }
337  
tcg_gen_mov_i32(TCGv_i32 ret,TCGv_i32 arg)338  void tcg_gen_mov_i32(TCGv_i32 ret, TCGv_i32 arg)
339  {
340      if (ret != arg) {
341          tcg_gen_op2_i32(INDEX_op_mov_i32, ret, arg);
342      }
343  }
344  
tcg_gen_movi_i32(TCGv_i32 ret,int32_t arg)345  void tcg_gen_movi_i32(TCGv_i32 ret, int32_t arg)
346  {
347      tcg_gen_mov_i32(ret, tcg_constant_i32(arg));
348  }
349  
tcg_gen_add_i32(TCGv_i32 ret,TCGv_i32 arg1,TCGv_i32 arg2)350  void tcg_gen_add_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
351  {
352      tcg_gen_op3_i32(INDEX_op_add_i32, ret, arg1, arg2);
353  }
354  
tcg_gen_addi_i32(TCGv_i32 ret,TCGv_i32 arg1,int32_t arg2)355  void tcg_gen_addi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
356  {
357      /* some cases can be optimized here */
358      if (arg2 == 0) {
359          tcg_gen_mov_i32(ret, arg1);
360      } else {
361          tcg_gen_add_i32(ret, arg1, tcg_constant_i32(arg2));
362      }
363  }
364  
tcg_gen_sub_i32(TCGv_i32 ret,TCGv_i32 arg1,TCGv_i32 arg2)365  void tcg_gen_sub_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
366  {
367      tcg_gen_op3_i32(INDEX_op_sub_i32, ret, arg1, arg2);
368  }
369  
tcg_gen_subfi_i32(TCGv_i32 ret,int32_t arg1,TCGv_i32 arg2)370  void tcg_gen_subfi_i32(TCGv_i32 ret, int32_t arg1, TCGv_i32 arg2)
371  {
372      if (arg1 == 0) {
373          tcg_gen_neg_i32(ret, arg2);
374      } else {
375          tcg_gen_sub_i32(ret, tcg_constant_i32(arg1), arg2);
376      }
377  }
378  
tcg_gen_subi_i32(TCGv_i32 ret,TCGv_i32 arg1,int32_t arg2)379  void tcg_gen_subi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
380  {
381      tcg_gen_addi_i32(ret, arg1, -arg2);
382  }
383  
tcg_gen_neg_i32(TCGv_i32 ret,TCGv_i32 arg)384  void tcg_gen_neg_i32(TCGv_i32 ret, TCGv_i32 arg)
385  {
386      tcg_gen_op2_i32(INDEX_op_neg_i32, ret, arg);
387  }
388  
tcg_gen_and_i32(TCGv_i32 ret,TCGv_i32 arg1,TCGv_i32 arg2)389  void tcg_gen_and_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
390  {
391      tcg_gen_op3_i32(INDEX_op_and_i32, ret, arg1, arg2);
392  }
393  
tcg_gen_andi_i32(TCGv_i32 ret,TCGv_i32 arg1,int32_t arg2)394  void tcg_gen_andi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
395  {
396      /* Some cases can be optimized here.  */
397      switch (arg2) {
398      case 0:
399          tcg_gen_movi_i32(ret, 0);
400          return;
401      case -1:
402          tcg_gen_mov_i32(ret, arg1);
403          return;
404      case 0xff:
405          /* Don't recurse with tcg_gen_ext8u_i32.  */
406          if (TCG_TARGET_HAS_ext8u_i32) {
407              tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg1);
408              return;
409          }
410          break;
411      case 0xffff:
412          if (TCG_TARGET_HAS_ext16u_i32) {
413              tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg1);
414              return;
415          }
416          break;
417      }
418  
419      tcg_gen_and_i32(ret, arg1, tcg_constant_i32(arg2));
420  }
421  
tcg_gen_or_i32(TCGv_i32 ret,TCGv_i32 arg1,TCGv_i32 arg2)422  void tcg_gen_or_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
423  {
424      tcg_gen_op3_i32(INDEX_op_or_i32, ret, arg1, arg2);
425  }
426  
tcg_gen_ori_i32(TCGv_i32 ret,TCGv_i32 arg1,int32_t arg2)427  void tcg_gen_ori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
428  {
429      /* Some cases can be optimized here.  */
430      if (arg2 == -1) {
431          tcg_gen_movi_i32(ret, -1);
432      } else if (arg2 == 0) {
433          tcg_gen_mov_i32(ret, arg1);
434      } else {
435          tcg_gen_or_i32(ret, arg1, tcg_constant_i32(arg2));
436      }
437  }
438  
tcg_gen_xor_i32(TCGv_i32 ret,TCGv_i32 arg1,TCGv_i32 arg2)439  void tcg_gen_xor_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
440  {
441      tcg_gen_op3_i32(INDEX_op_xor_i32, ret, arg1, arg2);
442  }
443  
tcg_gen_xori_i32(TCGv_i32 ret,TCGv_i32 arg1,int32_t arg2)444  void tcg_gen_xori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
445  {
446      /* Some cases can be optimized here.  */
447      if (arg2 == 0) {
448          tcg_gen_mov_i32(ret, arg1);
449      } else if (arg2 == -1 && TCG_TARGET_HAS_not_i32) {
450          /* Don't recurse with tcg_gen_not_i32.  */
451          tcg_gen_op2_i32(INDEX_op_not_i32, ret, arg1);
452      } else {
453          tcg_gen_xor_i32(ret, arg1, tcg_constant_i32(arg2));
454      }
455  }
456  
tcg_gen_not_i32(TCGv_i32 ret,TCGv_i32 arg)457  void tcg_gen_not_i32(TCGv_i32 ret, TCGv_i32 arg)
458  {
459      if (TCG_TARGET_HAS_not_i32) {
460          tcg_gen_op2_i32(INDEX_op_not_i32, ret, arg);
461      } else {
462          tcg_gen_xori_i32(ret, arg, -1);
463      }
464  }
465  
tcg_gen_shl_i32(TCGv_i32 ret,TCGv_i32 arg1,TCGv_i32 arg2)466  void tcg_gen_shl_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
467  {
468      tcg_gen_op3_i32(INDEX_op_shl_i32, ret, arg1, arg2);
469  }
470  
tcg_gen_shli_i32(TCGv_i32 ret,TCGv_i32 arg1,int32_t arg2)471  void tcg_gen_shli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
472  {
473      tcg_debug_assert(arg2 >= 0 && arg2 < 32);
474      if (arg2 == 0) {
475          tcg_gen_mov_i32(ret, arg1);
476      } else {
477          tcg_gen_shl_i32(ret, arg1, tcg_constant_i32(arg2));
478      }
479  }
480  
tcg_gen_shr_i32(TCGv_i32 ret,TCGv_i32 arg1,TCGv_i32 arg2)481  void tcg_gen_shr_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
482  {
483      tcg_gen_op3_i32(INDEX_op_shr_i32, ret, arg1, arg2);
484  }
485  
tcg_gen_shri_i32(TCGv_i32 ret,TCGv_i32 arg1,int32_t arg2)486  void tcg_gen_shri_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
487  {
488      tcg_debug_assert(arg2 >= 0 && arg2 < 32);
489      if (arg2 == 0) {
490          tcg_gen_mov_i32(ret, arg1);
491      } else {
492          tcg_gen_shr_i32(ret, arg1, tcg_constant_i32(arg2));
493      }
494  }
495  
tcg_gen_sar_i32(TCGv_i32 ret,TCGv_i32 arg1,TCGv_i32 arg2)496  void tcg_gen_sar_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
497  {
498      tcg_gen_op3_i32(INDEX_op_sar_i32, ret, arg1, arg2);
499  }
500  
tcg_gen_sari_i32(TCGv_i32 ret,TCGv_i32 arg1,int32_t arg2)501  void tcg_gen_sari_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
502  {
503      tcg_debug_assert(arg2 >= 0 && arg2 < 32);
504      if (arg2 == 0) {
505          tcg_gen_mov_i32(ret, arg1);
506      } else {
507          tcg_gen_sar_i32(ret, arg1, tcg_constant_i32(arg2));
508      }
509  }
510  
tcg_gen_brcond_i32(TCGCond cond,TCGv_i32 arg1,TCGv_i32 arg2,TCGLabel * l)511  void tcg_gen_brcond_i32(TCGCond cond, TCGv_i32 arg1, TCGv_i32 arg2, TCGLabel *l)
512  {
513      if (cond == TCG_COND_ALWAYS) {
514          tcg_gen_br(l);
515      } else if (cond != TCG_COND_NEVER) {
516          TCGOp *op = tcg_gen_op4ii_i32(INDEX_op_brcond_i32,
517                                        arg1, arg2, cond, label_arg(l));
518          add_as_label_use(l, op);
519      }
520  }
521  
tcg_gen_brcondi_i32(TCGCond cond,TCGv_i32 arg1,int32_t arg2,TCGLabel * l)522  void tcg_gen_brcondi_i32(TCGCond cond, TCGv_i32 arg1, int32_t arg2, TCGLabel *l)
523  {
524      if (cond == TCG_COND_ALWAYS) {
525          tcg_gen_br(l);
526      } else if (cond != TCG_COND_NEVER) {
527          tcg_gen_brcond_i32(cond, arg1, tcg_constant_i32(arg2), l);
528      }
529  }
530  
tcg_gen_setcond_i32(TCGCond cond,TCGv_i32 ret,TCGv_i32 arg1,TCGv_i32 arg2)531  void tcg_gen_setcond_i32(TCGCond cond, TCGv_i32 ret,
532                           TCGv_i32 arg1, TCGv_i32 arg2)
533  {
534      if (cond == TCG_COND_ALWAYS) {
535          tcg_gen_movi_i32(ret, 1);
536      } else if (cond == TCG_COND_NEVER) {
537          tcg_gen_movi_i32(ret, 0);
538      } else {
539          tcg_gen_op4i_i32(INDEX_op_setcond_i32, ret, arg1, arg2, cond);
540      }
541  }
542  
tcg_gen_setcondi_i32(TCGCond cond,TCGv_i32 ret,TCGv_i32 arg1,int32_t arg2)543  void tcg_gen_setcondi_i32(TCGCond cond, TCGv_i32 ret,
544                            TCGv_i32 arg1, int32_t arg2)
545  {
546      tcg_gen_setcond_i32(cond, ret, arg1, tcg_constant_i32(arg2));
547  }
548  
tcg_gen_negsetcond_i32(TCGCond cond,TCGv_i32 ret,TCGv_i32 arg1,TCGv_i32 arg2)549  void tcg_gen_negsetcond_i32(TCGCond cond, TCGv_i32 ret,
550                              TCGv_i32 arg1, TCGv_i32 arg2)
551  {
552      if (cond == TCG_COND_ALWAYS) {
553          tcg_gen_movi_i32(ret, -1);
554      } else if (cond == TCG_COND_NEVER) {
555          tcg_gen_movi_i32(ret, 0);
556      } else if (TCG_TARGET_HAS_negsetcond_i32) {
557          tcg_gen_op4i_i32(INDEX_op_negsetcond_i32, ret, arg1, arg2, cond);
558      } else {
559          tcg_gen_setcond_i32(cond, ret, arg1, arg2);
560          tcg_gen_neg_i32(ret, ret);
561      }
562  }
563  
tcg_gen_negsetcondi_i32(TCGCond cond,TCGv_i32 ret,TCGv_i32 arg1,int32_t arg2)564  void tcg_gen_negsetcondi_i32(TCGCond cond, TCGv_i32 ret,
565                               TCGv_i32 arg1, int32_t arg2)
566  {
567      tcg_gen_negsetcond_i32(cond, ret, arg1, tcg_constant_i32(arg2));
568  }
569  
tcg_gen_mul_i32(TCGv_i32 ret,TCGv_i32 arg1,TCGv_i32 arg2)570  void tcg_gen_mul_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
571  {
572      tcg_gen_op3_i32(INDEX_op_mul_i32, ret, arg1, arg2);
573  }
574  
tcg_gen_muli_i32(TCGv_i32 ret,TCGv_i32 arg1,int32_t arg2)575  void tcg_gen_muli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
576  {
577      if (arg2 == 0) {
578          tcg_gen_movi_i32(ret, 0);
579      } else if (is_power_of_2(arg2)) {
580          tcg_gen_shli_i32(ret, arg1, ctz32(arg2));
581      } else {
582          tcg_gen_mul_i32(ret, arg1, tcg_constant_i32(arg2));
583      }
584  }
585  
tcg_gen_div_i32(TCGv_i32 ret,TCGv_i32 arg1,TCGv_i32 arg2)586  void tcg_gen_div_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
587  {
588      if (TCG_TARGET_HAS_div_i32) {
589          tcg_gen_op3_i32(INDEX_op_div_i32, ret, arg1, arg2);
590      } else if (TCG_TARGET_HAS_div2_i32) {
591          TCGv_i32 t0 = tcg_temp_ebb_new_i32();
592          tcg_gen_sari_i32(t0, arg1, 31);
593          tcg_gen_op5_i32(INDEX_op_div2_i32, ret, t0, arg1, t0, arg2);
594          tcg_temp_free_i32(t0);
595      } else {
596          gen_helper_div_i32(ret, arg1, arg2);
597      }
598  }
599  
tcg_gen_rem_i32(TCGv_i32 ret,TCGv_i32 arg1,TCGv_i32 arg2)600  void tcg_gen_rem_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
601  {
602      if (TCG_TARGET_HAS_rem_i32) {
603          tcg_gen_op3_i32(INDEX_op_rem_i32, ret, arg1, arg2);
604      } else if (TCG_TARGET_HAS_div_i32) {
605          TCGv_i32 t0 = tcg_temp_ebb_new_i32();
606          tcg_gen_op3_i32(INDEX_op_div_i32, t0, arg1, arg2);
607          tcg_gen_mul_i32(t0, t0, arg2);
608          tcg_gen_sub_i32(ret, arg1, t0);
609          tcg_temp_free_i32(t0);
610      } else if (TCG_TARGET_HAS_div2_i32) {
611          TCGv_i32 t0 = tcg_temp_ebb_new_i32();
612          tcg_gen_sari_i32(t0, arg1, 31);
613          tcg_gen_op5_i32(INDEX_op_div2_i32, t0, ret, arg1, t0, arg2);
614          tcg_temp_free_i32(t0);
615      } else {
616          gen_helper_rem_i32(ret, arg1, arg2);
617      }
618  }
619  
tcg_gen_divu_i32(TCGv_i32 ret,TCGv_i32 arg1,TCGv_i32 arg2)620  void tcg_gen_divu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
621  {
622      if (TCG_TARGET_HAS_div_i32) {
623          tcg_gen_op3_i32(INDEX_op_divu_i32, ret, arg1, arg2);
624      } else if (TCG_TARGET_HAS_div2_i32) {
625          TCGv_i32 t0 = tcg_temp_ebb_new_i32();
626          TCGv_i32 zero = tcg_constant_i32(0);
627          tcg_gen_op5_i32(INDEX_op_divu2_i32, ret, t0, arg1, zero, arg2);
628          tcg_temp_free_i32(t0);
629      } else {
630          gen_helper_divu_i32(ret, arg1, arg2);
631      }
632  }
633  
tcg_gen_remu_i32(TCGv_i32 ret,TCGv_i32 arg1,TCGv_i32 arg2)634  void tcg_gen_remu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
635  {
636      if (TCG_TARGET_HAS_rem_i32) {
637          tcg_gen_op3_i32(INDEX_op_remu_i32, ret, arg1, arg2);
638      } else if (TCG_TARGET_HAS_div_i32) {
639          TCGv_i32 t0 = tcg_temp_ebb_new_i32();
640          tcg_gen_op3_i32(INDEX_op_divu_i32, t0, arg1, arg2);
641          tcg_gen_mul_i32(t0, t0, arg2);
642          tcg_gen_sub_i32(ret, arg1, t0);
643          tcg_temp_free_i32(t0);
644      } else if (TCG_TARGET_HAS_div2_i32) {
645          TCGv_i32 t0 = tcg_temp_ebb_new_i32();
646          TCGv_i32 zero = tcg_constant_i32(0);
647          tcg_gen_op5_i32(INDEX_op_divu2_i32, t0, ret, arg1, zero, arg2);
648          tcg_temp_free_i32(t0);
649      } else {
650          gen_helper_remu_i32(ret, arg1, arg2);
651      }
652  }
653  
tcg_gen_andc_i32(TCGv_i32 ret,TCGv_i32 arg1,TCGv_i32 arg2)654  void tcg_gen_andc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
655  {
656      if (TCG_TARGET_HAS_andc_i32) {
657          tcg_gen_op3_i32(INDEX_op_andc_i32, ret, arg1, arg2);
658      } else {
659          TCGv_i32 t0 = tcg_temp_ebb_new_i32();
660          tcg_gen_not_i32(t0, arg2);
661          tcg_gen_and_i32(ret, arg1, t0);
662          tcg_temp_free_i32(t0);
663      }
664  }
665  
tcg_gen_eqv_i32(TCGv_i32 ret,TCGv_i32 arg1,TCGv_i32 arg2)666  void tcg_gen_eqv_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
667  {
668      if (TCG_TARGET_HAS_eqv_i32) {
669          tcg_gen_op3_i32(INDEX_op_eqv_i32, ret, arg1, arg2);
670      } else {
671          tcg_gen_xor_i32(ret, arg1, arg2);
672          tcg_gen_not_i32(ret, ret);
673      }
674  }
675  
tcg_gen_nand_i32(TCGv_i32 ret,TCGv_i32 arg1,TCGv_i32 arg2)676  void tcg_gen_nand_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
677  {
678      if (TCG_TARGET_HAS_nand_i32) {
679          tcg_gen_op3_i32(INDEX_op_nand_i32, ret, arg1, arg2);
680      } else {
681          tcg_gen_and_i32(ret, arg1, arg2);
682          tcg_gen_not_i32(ret, ret);
683      }
684  }
685  
tcg_gen_nor_i32(TCGv_i32 ret,TCGv_i32 arg1,TCGv_i32 arg2)686  void tcg_gen_nor_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
687  {
688      if (TCG_TARGET_HAS_nor_i32) {
689          tcg_gen_op3_i32(INDEX_op_nor_i32, ret, arg1, arg2);
690      } else {
691          tcg_gen_or_i32(ret, arg1, arg2);
692          tcg_gen_not_i32(ret, ret);
693      }
694  }
695  
tcg_gen_orc_i32(TCGv_i32 ret,TCGv_i32 arg1,TCGv_i32 arg2)696  void tcg_gen_orc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
697  {
698      if (TCG_TARGET_HAS_orc_i32) {
699          tcg_gen_op3_i32(INDEX_op_orc_i32, ret, arg1, arg2);
700      } else {
701          TCGv_i32 t0 = tcg_temp_ebb_new_i32();
702          tcg_gen_not_i32(t0, arg2);
703          tcg_gen_or_i32(ret, arg1, t0);
704          tcg_temp_free_i32(t0);
705      }
706  }
707  
tcg_gen_clz_i32(TCGv_i32 ret,TCGv_i32 arg1,TCGv_i32 arg2)708  void tcg_gen_clz_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
709  {
710      if (TCG_TARGET_HAS_clz_i32) {
711          tcg_gen_op3_i32(INDEX_op_clz_i32, ret, arg1, arg2);
712      } else if (TCG_TARGET_HAS_clz_i64) {
713          TCGv_i64 t1 = tcg_temp_ebb_new_i64();
714          TCGv_i64 t2 = tcg_temp_ebb_new_i64();
715          tcg_gen_extu_i32_i64(t1, arg1);
716          tcg_gen_extu_i32_i64(t2, arg2);
717          tcg_gen_addi_i64(t2, t2, 32);
718          tcg_gen_clz_i64(t1, t1, t2);
719          tcg_gen_extrl_i64_i32(ret, t1);
720          tcg_temp_free_i64(t1);
721          tcg_temp_free_i64(t2);
722          tcg_gen_subi_i32(ret, ret, 32);
723      } else {
724          gen_helper_clz_i32(ret, arg1, arg2);
725      }
726  }
727  
tcg_gen_clzi_i32(TCGv_i32 ret,TCGv_i32 arg1,uint32_t arg2)728  void tcg_gen_clzi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
729  {
730      tcg_gen_clz_i32(ret, arg1, tcg_constant_i32(arg2));
731  }
732  
tcg_gen_ctz_i32(TCGv_i32 ret,TCGv_i32 arg1,TCGv_i32 arg2)733  void tcg_gen_ctz_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
734  {
735      if (TCG_TARGET_HAS_ctz_i32) {
736          tcg_gen_op3_i32(INDEX_op_ctz_i32, ret, arg1, arg2);
737      } else if (TCG_TARGET_HAS_ctz_i64) {
738          TCGv_i64 t1 = tcg_temp_ebb_new_i64();
739          TCGv_i64 t2 = tcg_temp_ebb_new_i64();
740          tcg_gen_extu_i32_i64(t1, arg1);
741          tcg_gen_extu_i32_i64(t2, arg2);
742          tcg_gen_ctz_i64(t1, t1, t2);
743          tcg_gen_extrl_i64_i32(ret, t1);
744          tcg_temp_free_i64(t1);
745          tcg_temp_free_i64(t2);
746      } else if (TCG_TARGET_HAS_ctpop_i32
747                 || TCG_TARGET_HAS_ctpop_i64
748                 || TCG_TARGET_HAS_clz_i32
749                 || TCG_TARGET_HAS_clz_i64) {
750          TCGv_i32 z, t = tcg_temp_ebb_new_i32();
751  
752          if (TCG_TARGET_HAS_ctpop_i32 || TCG_TARGET_HAS_ctpop_i64) {
753              tcg_gen_subi_i32(t, arg1, 1);
754              tcg_gen_andc_i32(t, t, arg1);
755              tcg_gen_ctpop_i32(t, t);
756          } else {
757              /* Since all non-x86 hosts have clz(0) == 32, don't fight it.  */
758              tcg_gen_neg_i32(t, arg1);
759              tcg_gen_and_i32(t, t, arg1);
760              tcg_gen_clzi_i32(t, t, 32);
761              tcg_gen_xori_i32(t, t, 31);
762          }
763          z = tcg_constant_i32(0);
764          tcg_gen_movcond_i32(TCG_COND_EQ, ret, arg1, z, arg2, t);
765          tcg_temp_free_i32(t);
766      } else {
767          gen_helper_ctz_i32(ret, arg1, arg2);
768      }
769  }
770  
tcg_gen_ctzi_i32(TCGv_i32 ret,TCGv_i32 arg1,uint32_t arg2)771  void tcg_gen_ctzi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
772  {
773      if (!TCG_TARGET_HAS_ctz_i32 && TCG_TARGET_HAS_ctpop_i32 && arg2 == 32) {
774          /* This equivalence has the advantage of not requiring a fixup.  */
775          TCGv_i32 t = tcg_temp_ebb_new_i32();
776          tcg_gen_subi_i32(t, arg1, 1);
777          tcg_gen_andc_i32(t, t, arg1);
778          tcg_gen_ctpop_i32(ret, t);
779          tcg_temp_free_i32(t);
780      } else {
781          tcg_gen_ctz_i32(ret, arg1, tcg_constant_i32(arg2));
782      }
783  }
784  
tcg_gen_clrsb_i32(TCGv_i32 ret,TCGv_i32 arg)785  void tcg_gen_clrsb_i32(TCGv_i32 ret, TCGv_i32 arg)
786  {
787      if (TCG_TARGET_HAS_clz_i32) {
788          TCGv_i32 t = tcg_temp_ebb_new_i32();
789          tcg_gen_sari_i32(t, arg, 31);
790          tcg_gen_xor_i32(t, t, arg);
791          tcg_gen_clzi_i32(t, t, 32);
792          tcg_gen_subi_i32(ret, t, 1);
793          tcg_temp_free_i32(t);
794      } else {
795          gen_helper_clrsb_i32(ret, arg);
796      }
797  }
798  
tcg_gen_ctpop_i32(TCGv_i32 ret,TCGv_i32 arg1)799  void tcg_gen_ctpop_i32(TCGv_i32 ret, TCGv_i32 arg1)
800  {
801      if (TCG_TARGET_HAS_ctpop_i32) {
802          tcg_gen_op2_i32(INDEX_op_ctpop_i32, ret, arg1);
803      } else if (TCG_TARGET_HAS_ctpop_i64) {
804          TCGv_i64 t = tcg_temp_ebb_new_i64();
805          tcg_gen_extu_i32_i64(t, arg1);
806          tcg_gen_ctpop_i64(t, t);
807          tcg_gen_extrl_i64_i32(ret, t);
808          tcg_temp_free_i64(t);
809      } else {
810          gen_helper_ctpop_i32(ret, arg1);
811      }
812  }
813  
tcg_gen_rotl_i32(TCGv_i32 ret,TCGv_i32 arg1,TCGv_i32 arg2)814  void tcg_gen_rotl_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
815  {
816      if (TCG_TARGET_HAS_rot_i32) {
817          tcg_gen_op3_i32(INDEX_op_rotl_i32, ret, arg1, arg2);
818      } else {
819          TCGv_i32 t0, t1;
820  
821          t0 = tcg_temp_ebb_new_i32();
822          t1 = tcg_temp_ebb_new_i32();
823          tcg_gen_shl_i32(t0, arg1, arg2);
824          tcg_gen_subfi_i32(t1, 32, arg2);
825          tcg_gen_shr_i32(t1, arg1, t1);
826          tcg_gen_or_i32(ret, t0, t1);
827          tcg_temp_free_i32(t0);
828          tcg_temp_free_i32(t1);
829      }
830  }
831  
tcg_gen_rotli_i32(TCGv_i32 ret,TCGv_i32 arg1,int32_t arg2)832  void tcg_gen_rotli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
833  {
834      tcg_debug_assert(arg2 >= 0 && arg2 < 32);
835      /* some cases can be optimized here */
836      if (arg2 == 0) {
837          tcg_gen_mov_i32(ret, arg1);
838      } else if (TCG_TARGET_HAS_rot_i32) {
839          tcg_gen_rotl_i32(ret, arg1, tcg_constant_i32(arg2));
840      } else {
841          TCGv_i32 t0, t1;
842          t0 = tcg_temp_ebb_new_i32();
843          t1 = tcg_temp_ebb_new_i32();
844          tcg_gen_shli_i32(t0, arg1, arg2);
845          tcg_gen_shri_i32(t1, arg1, 32 - arg2);
846          tcg_gen_or_i32(ret, t0, t1);
847          tcg_temp_free_i32(t0);
848          tcg_temp_free_i32(t1);
849      }
850  }
851  
tcg_gen_rotr_i32(TCGv_i32 ret,TCGv_i32 arg1,TCGv_i32 arg2)852  void tcg_gen_rotr_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
853  {
854      if (TCG_TARGET_HAS_rot_i32) {
855          tcg_gen_op3_i32(INDEX_op_rotr_i32, ret, arg1, arg2);
856      } else {
857          TCGv_i32 t0, t1;
858  
859          t0 = tcg_temp_ebb_new_i32();
860          t1 = tcg_temp_ebb_new_i32();
861          tcg_gen_shr_i32(t0, arg1, arg2);
862          tcg_gen_subfi_i32(t1, 32, arg2);
863          tcg_gen_shl_i32(t1, arg1, t1);
864          tcg_gen_or_i32(ret, t0, t1);
865          tcg_temp_free_i32(t0);
866          tcg_temp_free_i32(t1);
867      }
868  }
869  
tcg_gen_rotri_i32(TCGv_i32 ret,TCGv_i32 arg1,int32_t arg2)870  void tcg_gen_rotri_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
871  {
872      tcg_debug_assert(arg2 >= 0 && arg2 < 32);
873      /* some cases can be optimized here */
874      if (arg2 == 0) {
875          tcg_gen_mov_i32(ret, arg1);
876      } else {
877          tcg_gen_rotli_i32(ret, arg1, 32 - arg2);
878      }
879  }
880  
tcg_gen_deposit_i32(TCGv_i32 ret,TCGv_i32 arg1,TCGv_i32 arg2,unsigned int ofs,unsigned int len)881  void tcg_gen_deposit_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2,
882                           unsigned int ofs, unsigned int len)
883  {
884      uint32_t mask;
885      TCGv_i32 t1;
886  
887      tcg_debug_assert(ofs < 32);
888      tcg_debug_assert(len > 0);
889      tcg_debug_assert(len <= 32);
890      tcg_debug_assert(ofs + len <= 32);
891  
892      if (len == 32) {
893          tcg_gen_mov_i32(ret, arg2);
894          return;
895      }
896      if (TCG_TARGET_HAS_deposit_i32 && TCG_TARGET_deposit_i32_valid(ofs, len)) {
897          tcg_gen_op5ii_i32(INDEX_op_deposit_i32, ret, arg1, arg2, ofs, len);
898          return;
899      }
900  
901      t1 = tcg_temp_ebb_new_i32();
902  
903      if (TCG_TARGET_HAS_extract2_i32) {
904          if (ofs + len == 32) {
905              tcg_gen_shli_i32(t1, arg1, len);
906              tcg_gen_extract2_i32(ret, t1, arg2, len);
907              goto done;
908          }
909          if (ofs == 0) {
910              tcg_gen_extract2_i32(ret, arg1, arg2, len);
911              tcg_gen_rotli_i32(ret, ret, len);
912              goto done;
913          }
914      }
915  
916      mask = (1u << len) - 1;
917      if (ofs + len < 32) {
918          tcg_gen_andi_i32(t1, arg2, mask);
919          tcg_gen_shli_i32(t1, t1, ofs);
920      } else {
921          tcg_gen_shli_i32(t1, arg2, ofs);
922      }
923      tcg_gen_andi_i32(ret, arg1, ~(mask << ofs));
924      tcg_gen_or_i32(ret, ret, t1);
925   done:
926      tcg_temp_free_i32(t1);
927  }
928  
tcg_gen_deposit_z_i32(TCGv_i32 ret,TCGv_i32 arg,unsigned int ofs,unsigned int len)929  void tcg_gen_deposit_z_i32(TCGv_i32 ret, TCGv_i32 arg,
930                             unsigned int ofs, unsigned int len)
931  {
932      tcg_debug_assert(ofs < 32);
933      tcg_debug_assert(len > 0);
934      tcg_debug_assert(len <= 32);
935      tcg_debug_assert(ofs + len <= 32);
936  
937      if (ofs + len == 32) {
938          tcg_gen_shli_i32(ret, arg, ofs);
939      } else if (ofs == 0) {
940          tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
941      } else if (TCG_TARGET_HAS_deposit_i32
942                 && TCG_TARGET_deposit_i32_valid(ofs, len)) {
943          TCGv_i32 zero = tcg_constant_i32(0);
944          tcg_gen_op5ii_i32(INDEX_op_deposit_i32, ret, zero, arg, ofs, len);
945      } else {
946          /* To help two-operand hosts we prefer to zero-extend first,
947             which allows ARG to stay live.  */
948          switch (len) {
949          case 16:
950              if (TCG_TARGET_HAS_ext16u_i32) {
951                  tcg_gen_ext16u_i32(ret, arg);
952                  tcg_gen_shli_i32(ret, ret, ofs);
953                  return;
954              }
955              break;
956          case 8:
957              if (TCG_TARGET_HAS_ext8u_i32) {
958                  tcg_gen_ext8u_i32(ret, arg);
959                  tcg_gen_shli_i32(ret, ret, ofs);
960                  return;
961              }
962              break;
963          }
964          /* Otherwise prefer zero-extension over AND for code size.  */
965          switch (ofs + len) {
966          case 16:
967              if (TCG_TARGET_HAS_ext16u_i32) {
968                  tcg_gen_shli_i32(ret, arg, ofs);
969                  tcg_gen_ext16u_i32(ret, ret);
970                  return;
971              }
972              break;
973          case 8:
974              if (TCG_TARGET_HAS_ext8u_i32) {
975                  tcg_gen_shli_i32(ret, arg, ofs);
976                  tcg_gen_ext8u_i32(ret, ret);
977                  return;
978              }
979              break;
980          }
981          tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
982          tcg_gen_shli_i32(ret, ret, ofs);
983      }
984  }
985  
tcg_gen_extract_i32(TCGv_i32 ret,TCGv_i32 arg,unsigned int ofs,unsigned int len)986  void tcg_gen_extract_i32(TCGv_i32 ret, TCGv_i32 arg,
987                           unsigned int ofs, unsigned int len)
988  {
989      tcg_debug_assert(ofs < 32);
990      tcg_debug_assert(len > 0);
991      tcg_debug_assert(len <= 32);
992      tcg_debug_assert(ofs + len <= 32);
993  
994      /* Canonicalize certain special cases, even if extract is supported.  */
995      if (ofs + len == 32) {
996          tcg_gen_shri_i32(ret, arg, 32 - len);
997          return;
998      }
999      if (ofs == 0) {
1000          tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
1001          return;
1002      }
1003  
1004      if (TCG_TARGET_HAS_extract_i32
1005          && TCG_TARGET_extract_i32_valid(ofs, len)) {
1006          tcg_gen_op4ii_i32(INDEX_op_extract_i32, ret, arg, ofs, len);
1007          return;
1008      }
1009  
1010      /* Assume that zero-extension, if available, is cheaper than a shift.  */
1011      switch (ofs + len) {
1012      case 16:
1013          if (TCG_TARGET_HAS_ext16u_i32) {
1014              tcg_gen_ext16u_i32(ret, arg);
1015              tcg_gen_shri_i32(ret, ret, ofs);
1016              return;
1017          }
1018          break;
1019      case 8:
1020          if (TCG_TARGET_HAS_ext8u_i32) {
1021              tcg_gen_ext8u_i32(ret, arg);
1022              tcg_gen_shri_i32(ret, ret, ofs);
1023              return;
1024          }
1025          break;
1026      }
1027  
1028      /* ??? Ideally we'd know what values are available for immediate AND.
1029         Assume that 8 bits are available, plus the special case of 16,
1030         so that we get ext8u, ext16u.  */
1031      switch (len) {
1032      case 1 ... 8: case 16:
1033          tcg_gen_shri_i32(ret, arg, ofs);
1034          tcg_gen_andi_i32(ret, ret, (1u << len) - 1);
1035          break;
1036      default:
1037          tcg_gen_shli_i32(ret, arg, 32 - len - ofs);
1038          tcg_gen_shri_i32(ret, ret, 32 - len);
1039          break;
1040      }
1041  }
1042  
tcg_gen_sextract_i32(TCGv_i32 ret,TCGv_i32 arg,unsigned int ofs,unsigned int len)1043  void tcg_gen_sextract_i32(TCGv_i32 ret, TCGv_i32 arg,
1044                            unsigned int ofs, unsigned int len)
1045  {
1046      tcg_debug_assert(ofs < 32);
1047      tcg_debug_assert(len > 0);
1048      tcg_debug_assert(len <= 32);
1049      tcg_debug_assert(ofs + len <= 32);
1050  
1051      /* Canonicalize certain special cases, even if extract is supported.  */
1052      if (ofs + len == 32) {
1053          tcg_gen_sari_i32(ret, arg, 32 - len);
1054          return;
1055      }
1056      if (ofs == 0) {
1057          switch (len) {
1058          case 16:
1059              tcg_gen_ext16s_i32(ret, arg);
1060              return;
1061          case 8:
1062              tcg_gen_ext8s_i32(ret, arg);
1063              return;
1064          }
1065      }
1066  
1067      if (TCG_TARGET_HAS_sextract_i32
1068          && TCG_TARGET_extract_i32_valid(ofs, len)) {
1069          tcg_gen_op4ii_i32(INDEX_op_sextract_i32, ret, arg, ofs, len);
1070          return;
1071      }
1072  
1073      /* Assume that sign-extension, if available, is cheaper than a shift.  */
1074      switch (ofs + len) {
1075      case 16:
1076          if (TCG_TARGET_HAS_ext16s_i32) {
1077              tcg_gen_ext16s_i32(ret, arg);
1078              tcg_gen_sari_i32(ret, ret, ofs);
1079              return;
1080          }
1081          break;
1082      case 8:
1083          if (TCG_TARGET_HAS_ext8s_i32) {
1084              tcg_gen_ext8s_i32(ret, arg);
1085              tcg_gen_sari_i32(ret, ret, ofs);
1086              return;
1087          }
1088          break;
1089      }
1090      switch (len) {
1091      case 16:
1092          if (TCG_TARGET_HAS_ext16s_i32) {
1093              tcg_gen_shri_i32(ret, arg, ofs);
1094              tcg_gen_ext16s_i32(ret, ret);
1095              return;
1096          }
1097          break;
1098      case 8:
1099          if (TCG_TARGET_HAS_ext8s_i32) {
1100              tcg_gen_shri_i32(ret, arg, ofs);
1101              tcg_gen_ext8s_i32(ret, ret);
1102              return;
1103          }
1104          break;
1105      }
1106  
1107      tcg_gen_shli_i32(ret, arg, 32 - len - ofs);
1108      tcg_gen_sari_i32(ret, ret, 32 - len);
1109  }
1110  
1111  /*
1112   * Extract 32-bits from a 64-bit input, ah:al, starting from ofs.
1113   * Unlike tcg_gen_extract_i32 above, len is fixed at 32.
1114   */
tcg_gen_extract2_i32(TCGv_i32 ret,TCGv_i32 al,TCGv_i32 ah,unsigned int ofs)1115  void tcg_gen_extract2_i32(TCGv_i32 ret, TCGv_i32 al, TCGv_i32 ah,
1116                            unsigned int ofs)
1117  {
1118      tcg_debug_assert(ofs <= 32);
1119      if (ofs == 0) {
1120          tcg_gen_mov_i32(ret, al);
1121      } else if (ofs == 32) {
1122          tcg_gen_mov_i32(ret, ah);
1123      } else if (al == ah) {
1124          tcg_gen_rotri_i32(ret, al, ofs);
1125      } else if (TCG_TARGET_HAS_extract2_i32) {
1126          tcg_gen_op4i_i32(INDEX_op_extract2_i32, ret, al, ah, ofs);
1127      } else {
1128          TCGv_i32 t0 = tcg_temp_ebb_new_i32();
1129          tcg_gen_shri_i32(t0, al, ofs);
1130          tcg_gen_deposit_i32(ret, t0, ah, 32 - ofs, ofs);
1131          tcg_temp_free_i32(t0);
1132      }
1133  }
1134  
tcg_gen_movcond_i32(TCGCond cond,TCGv_i32 ret,TCGv_i32 c1,TCGv_i32 c2,TCGv_i32 v1,TCGv_i32 v2)1135  void tcg_gen_movcond_i32(TCGCond cond, TCGv_i32 ret, TCGv_i32 c1,
1136                           TCGv_i32 c2, TCGv_i32 v1, TCGv_i32 v2)
1137  {
1138      if (cond == TCG_COND_ALWAYS) {
1139          tcg_gen_mov_i32(ret, v1);
1140      } else if (cond == TCG_COND_NEVER) {
1141          tcg_gen_mov_i32(ret, v2);
1142      } else {
1143          tcg_gen_op6i_i32(INDEX_op_movcond_i32, ret, c1, c2, v1, v2, cond);
1144      }
1145  }
1146  
tcg_gen_add2_i32(TCGv_i32 rl,TCGv_i32 rh,TCGv_i32 al,TCGv_i32 ah,TCGv_i32 bl,TCGv_i32 bh)1147  void tcg_gen_add2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
1148                        TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
1149  {
1150      if (TCG_TARGET_HAS_add2_i32) {
1151          tcg_gen_op6_i32(INDEX_op_add2_i32, rl, rh, al, ah, bl, bh);
1152      } else {
1153          TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1154          TCGv_i64 t1 = tcg_temp_ebb_new_i64();
1155          tcg_gen_concat_i32_i64(t0, al, ah);
1156          tcg_gen_concat_i32_i64(t1, bl, bh);
1157          tcg_gen_add_i64(t0, t0, t1);
1158          tcg_gen_extr_i64_i32(rl, rh, t0);
1159          tcg_temp_free_i64(t0);
1160          tcg_temp_free_i64(t1);
1161      }
1162  }
1163  
tcg_gen_sub2_i32(TCGv_i32 rl,TCGv_i32 rh,TCGv_i32 al,TCGv_i32 ah,TCGv_i32 bl,TCGv_i32 bh)1164  void tcg_gen_sub2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
1165                        TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
1166  {
1167      if (TCG_TARGET_HAS_sub2_i32) {
1168          tcg_gen_op6_i32(INDEX_op_sub2_i32, rl, rh, al, ah, bl, bh);
1169      } else {
1170          TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1171          TCGv_i64 t1 = tcg_temp_ebb_new_i64();
1172          tcg_gen_concat_i32_i64(t0, al, ah);
1173          tcg_gen_concat_i32_i64(t1, bl, bh);
1174          tcg_gen_sub_i64(t0, t0, t1);
1175          tcg_gen_extr_i64_i32(rl, rh, t0);
1176          tcg_temp_free_i64(t0);
1177          tcg_temp_free_i64(t1);
1178      }
1179  }
1180  
tcg_gen_mulu2_i32(TCGv_i32 rl,TCGv_i32 rh,TCGv_i32 arg1,TCGv_i32 arg2)1181  void tcg_gen_mulu2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
1182  {
1183      if (TCG_TARGET_HAS_mulu2_i32) {
1184          tcg_gen_op4_i32(INDEX_op_mulu2_i32, rl, rh, arg1, arg2);
1185      } else if (TCG_TARGET_HAS_muluh_i32) {
1186          TCGv_i32 t = tcg_temp_ebb_new_i32();
1187          tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
1188          tcg_gen_op3_i32(INDEX_op_muluh_i32, rh, arg1, arg2);
1189          tcg_gen_mov_i32(rl, t);
1190          tcg_temp_free_i32(t);
1191      } else if (TCG_TARGET_REG_BITS == 64) {
1192          TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1193          TCGv_i64 t1 = tcg_temp_ebb_new_i64();
1194          tcg_gen_extu_i32_i64(t0, arg1);
1195          tcg_gen_extu_i32_i64(t1, arg2);
1196          tcg_gen_mul_i64(t0, t0, t1);
1197          tcg_gen_extr_i64_i32(rl, rh, t0);
1198          tcg_temp_free_i64(t0);
1199          tcg_temp_free_i64(t1);
1200      } else {
1201          qemu_build_not_reached();
1202      }
1203  }
1204  
tcg_gen_muls2_i32(TCGv_i32 rl,TCGv_i32 rh,TCGv_i32 arg1,TCGv_i32 arg2)1205  void tcg_gen_muls2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
1206  {
1207      if (TCG_TARGET_HAS_muls2_i32) {
1208          tcg_gen_op4_i32(INDEX_op_muls2_i32, rl, rh, arg1, arg2);
1209      } else if (TCG_TARGET_HAS_mulsh_i32) {
1210          TCGv_i32 t = tcg_temp_ebb_new_i32();
1211          tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
1212          tcg_gen_op3_i32(INDEX_op_mulsh_i32, rh, arg1, arg2);
1213          tcg_gen_mov_i32(rl, t);
1214          tcg_temp_free_i32(t);
1215      } else if (TCG_TARGET_REG_BITS == 32) {
1216          TCGv_i32 t0 = tcg_temp_ebb_new_i32();
1217          TCGv_i32 t1 = tcg_temp_ebb_new_i32();
1218          TCGv_i32 t2 = tcg_temp_ebb_new_i32();
1219          TCGv_i32 t3 = tcg_temp_ebb_new_i32();
1220          tcg_gen_mulu2_i32(t0, t1, arg1, arg2);
1221          /* Adjust for negative inputs.  */
1222          tcg_gen_sari_i32(t2, arg1, 31);
1223          tcg_gen_sari_i32(t3, arg2, 31);
1224          tcg_gen_and_i32(t2, t2, arg2);
1225          tcg_gen_and_i32(t3, t3, arg1);
1226          tcg_gen_sub_i32(rh, t1, t2);
1227          tcg_gen_sub_i32(rh, rh, t3);
1228          tcg_gen_mov_i32(rl, t0);
1229          tcg_temp_free_i32(t0);
1230          tcg_temp_free_i32(t1);
1231          tcg_temp_free_i32(t2);
1232          tcg_temp_free_i32(t3);
1233      } else {
1234          TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1235          TCGv_i64 t1 = tcg_temp_ebb_new_i64();
1236          tcg_gen_ext_i32_i64(t0, arg1);
1237          tcg_gen_ext_i32_i64(t1, arg2);
1238          tcg_gen_mul_i64(t0, t0, t1);
1239          tcg_gen_extr_i64_i32(rl, rh, t0);
1240          tcg_temp_free_i64(t0);
1241          tcg_temp_free_i64(t1);
1242      }
1243  }
1244  
tcg_gen_mulsu2_i32(TCGv_i32 rl,TCGv_i32 rh,TCGv_i32 arg1,TCGv_i32 arg2)1245  void tcg_gen_mulsu2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
1246  {
1247      if (TCG_TARGET_REG_BITS == 32) {
1248          TCGv_i32 t0 = tcg_temp_ebb_new_i32();
1249          TCGv_i32 t1 = tcg_temp_ebb_new_i32();
1250          TCGv_i32 t2 = tcg_temp_ebb_new_i32();
1251          tcg_gen_mulu2_i32(t0, t1, arg1, arg2);
1252          /* Adjust for negative input for the signed arg1.  */
1253          tcg_gen_sari_i32(t2, arg1, 31);
1254          tcg_gen_and_i32(t2, t2, arg2);
1255          tcg_gen_sub_i32(rh, t1, t2);
1256          tcg_gen_mov_i32(rl, t0);
1257          tcg_temp_free_i32(t0);
1258          tcg_temp_free_i32(t1);
1259          tcg_temp_free_i32(t2);
1260      } else {
1261          TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1262          TCGv_i64 t1 = tcg_temp_ebb_new_i64();
1263          tcg_gen_ext_i32_i64(t0, arg1);
1264          tcg_gen_extu_i32_i64(t1, arg2);
1265          tcg_gen_mul_i64(t0, t0, t1);
1266          tcg_gen_extr_i64_i32(rl, rh, t0);
1267          tcg_temp_free_i64(t0);
1268          tcg_temp_free_i64(t1);
1269      }
1270  }
1271  
tcg_gen_ext8s_i32(TCGv_i32 ret,TCGv_i32 arg)1272  void tcg_gen_ext8s_i32(TCGv_i32 ret, TCGv_i32 arg)
1273  {
1274      if (TCG_TARGET_HAS_ext8s_i32) {
1275          tcg_gen_op2_i32(INDEX_op_ext8s_i32, ret, arg);
1276      } else {
1277          tcg_gen_shli_i32(ret, arg, 24);
1278          tcg_gen_sari_i32(ret, ret, 24);
1279      }
1280  }
1281  
tcg_gen_ext16s_i32(TCGv_i32 ret,TCGv_i32 arg)1282  void tcg_gen_ext16s_i32(TCGv_i32 ret, TCGv_i32 arg)
1283  {
1284      if (TCG_TARGET_HAS_ext16s_i32) {
1285          tcg_gen_op2_i32(INDEX_op_ext16s_i32, ret, arg);
1286      } else {
1287          tcg_gen_shli_i32(ret, arg, 16);
1288          tcg_gen_sari_i32(ret, ret, 16);
1289      }
1290  }
1291  
tcg_gen_ext8u_i32(TCGv_i32 ret,TCGv_i32 arg)1292  void tcg_gen_ext8u_i32(TCGv_i32 ret, TCGv_i32 arg)
1293  {
1294      if (TCG_TARGET_HAS_ext8u_i32) {
1295          tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg);
1296      } else {
1297          tcg_gen_andi_i32(ret, arg, 0xffu);
1298      }
1299  }
1300  
tcg_gen_ext16u_i32(TCGv_i32 ret,TCGv_i32 arg)1301  void tcg_gen_ext16u_i32(TCGv_i32 ret, TCGv_i32 arg)
1302  {
1303      if (TCG_TARGET_HAS_ext16u_i32) {
1304          tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg);
1305      } else {
1306          tcg_gen_andi_i32(ret, arg, 0xffffu);
1307      }
1308  }
1309  
1310  /*
1311   * bswap16_i32: 16-bit byte swap on the low bits of a 32-bit value.
1312   *
1313   * Byte pattern: xxab -> yyba
1314   *
1315   * With TCG_BSWAP_IZ, x == zero, else undefined.
1316   * With TCG_BSWAP_OZ, y == zero, with TCG_BSWAP_OS y == sign, else undefined.
1317   */
tcg_gen_bswap16_i32(TCGv_i32 ret,TCGv_i32 arg,int flags)1318  void tcg_gen_bswap16_i32(TCGv_i32 ret, TCGv_i32 arg, int flags)
1319  {
1320      /* Only one extension flag may be present. */
1321      tcg_debug_assert(!(flags & TCG_BSWAP_OS) || !(flags & TCG_BSWAP_OZ));
1322  
1323      if (TCG_TARGET_HAS_bswap16_i32) {
1324          tcg_gen_op3i_i32(INDEX_op_bswap16_i32, ret, arg, flags);
1325      } else {
1326          TCGv_i32 t0 = tcg_temp_ebb_new_i32();
1327          TCGv_i32 t1 = tcg_temp_ebb_new_i32();
1328  
1329                                              /* arg = ..ab (IZ) xxab (!IZ) */
1330          tcg_gen_shri_i32(t0, arg, 8);       /*  t0 = ...a (IZ) .xxa (!IZ) */
1331          if (!(flags & TCG_BSWAP_IZ)) {
1332              tcg_gen_ext8u_i32(t0, t0);      /*  t0 = ...a */
1333          }
1334  
1335          if (flags & TCG_BSWAP_OS) {
1336              tcg_gen_shli_i32(t1, arg, 24);  /*  t1 = b... */
1337              tcg_gen_sari_i32(t1, t1, 16);   /*  t1 = ssb. */
1338          } else if (flags & TCG_BSWAP_OZ) {
1339              tcg_gen_ext8u_i32(t1, arg);     /*  t1 = ...b */
1340              tcg_gen_shli_i32(t1, t1, 8);    /*  t1 = ..b. */
1341          } else {
1342              tcg_gen_shli_i32(t1, arg, 8);   /*  t1 = xab. */
1343          }
1344  
1345          tcg_gen_or_i32(ret, t0, t1);        /* ret = ..ba (OZ) */
1346                                              /*     = ssba (OS) */
1347                                              /*     = xaba (no flag) */
1348          tcg_temp_free_i32(t0);
1349          tcg_temp_free_i32(t1);
1350      }
1351  }
1352  
1353  /*
1354   * bswap32_i32: 32-bit byte swap on a 32-bit value.
1355   *
1356   * Byte pattern: abcd -> dcba
1357   */
tcg_gen_bswap32_i32(TCGv_i32 ret,TCGv_i32 arg)1358  void tcg_gen_bswap32_i32(TCGv_i32 ret, TCGv_i32 arg)
1359  {
1360      if (TCG_TARGET_HAS_bswap32_i32) {
1361          tcg_gen_op3i_i32(INDEX_op_bswap32_i32, ret, arg, 0);
1362      } else {
1363          TCGv_i32 t0 = tcg_temp_ebb_new_i32();
1364          TCGv_i32 t1 = tcg_temp_ebb_new_i32();
1365          TCGv_i32 t2 = tcg_constant_i32(0x00ff00ff);
1366  
1367                                          /* arg = abcd */
1368          tcg_gen_shri_i32(t0, arg, 8);   /*  t0 = .abc */
1369          tcg_gen_and_i32(t1, arg, t2);   /*  t1 = .b.d */
1370          tcg_gen_and_i32(t0, t0, t2);    /*  t0 = .a.c */
1371          tcg_gen_shli_i32(t1, t1, 8);    /*  t1 = b.d. */
1372          tcg_gen_or_i32(ret, t0, t1);    /* ret = badc */
1373  
1374          tcg_gen_shri_i32(t0, ret, 16);  /*  t0 = ..ba */
1375          tcg_gen_shli_i32(t1, ret, 16);  /*  t1 = dc.. */
1376          tcg_gen_or_i32(ret, t0, t1);    /* ret = dcba */
1377  
1378          tcg_temp_free_i32(t0);
1379          tcg_temp_free_i32(t1);
1380      }
1381  }
1382  
1383  /*
1384   * hswap_i32: Swap 16-bit halfwords within a 32-bit value.
1385   *
1386   * Byte pattern: abcd -> cdab
1387   */
tcg_gen_hswap_i32(TCGv_i32 ret,TCGv_i32 arg)1388  void tcg_gen_hswap_i32(TCGv_i32 ret, TCGv_i32 arg)
1389  {
1390      /* Swapping 2 16-bit elements is a rotate. */
1391      tcg_gen_rotli_i32(ret, arg, 16);
1392  }
1393  
tcg_gen_smin_i32(TCGv_i32 ret,TCGv_i32 a,TCGv_i32 b)1394  void tcg_gen_smin_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1395  {
1396      tcg_gen_movcond_i32(TCG_COND_LT, ret, a, b, a, b);
1397  }
1398  
tcg_gen_umin_i32(TCGv_i32 ret,TCGv_i32 a,TCGv_i32 b)1399  void tcg_gen_umin_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1400  {
1401      tcg_gen_movcond_i32(TCG_COND_LTU, ret, a, b, a, b);
1402  }
1403  
tcg_gen_smax_i32(TCGv_i32 ret,TCGv_i32 a,TCGv_i32 b)1404  void tcg_gen_smax_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1405  {
1406      tcg_gen_movcond_i32(TCG_COND_LT, ret, a, b, b, a);
1407  }
1408  
tcg_gen_umax_i32(TCGv_i32 ret,TCGv_i32 a,TCGv_i32 b)1409  void tcg_gen_umax_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1410  {
1411      tcg_gen_movcond_i32(TCG_COND_LTU, ret, a, b, b, a);
1412  }
1413  
tcg_gen_abs_i32(TCGv_i32 ret,TCGv_i32 a)1414  void tcg_gen_abs_i32(TCGv_i32 ret, TCGv_i32 a)
1415  {
1416      TCGv_i32 t = tcg_temp_ebb_new_i32();
1417  
1418      tcg_gen_sari_i32(t, a, 31);
1419      tcg_gen_xor_i32(ret, a, t);
1420      tcg_gen_sub_i32(ret, ret, t);
1421      tcg_temp_free_i32(t);
1422  }
1423  
tcg_gen_ld8u_i32(TCGv_i32 ret,TCGv_ptr arg2,tcg_target_long offset)1424  void tcg_gen_ld8u_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
1425  {
1426      tcg_gen_ldst_op_i32(INDEX_op_ld8u_i32, ret, arg2, offset);
1427  }
1428  
tcg_gen_ld8s_i32(TCGv_i32 ret,TCGv_ptr arg2,tcg_target_long offset)1429  void tcg_gen_ld8s_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
1430  {
1431      tcg_gen_ldst_op_i32(INDEX_op_ld8s_i32, ret, arg2, offset);
1432  }
1433  
tcg_gen_ld16u_i32(TCGv_i32 ret,TCGv_ptr arg2,tcg_target_long offset)1434  void tcg_gen_ld16u_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
1435  {
1436      tcg_gen_ldst_op_i32(INDEX_op_ld16u_i32, ret, arg2, offset);
1437  }
1438  
tcg_gen_ld16s_i32(TCGv_i32 ret,TCGv_ptr arg2,tcg_target_long offset)1439  void tcg_gen_ld16s_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
1440  {
1441      tcg_gen_ldst_op_i32(INDEX_op_ld16s_i32, ret, arg2, offset);
1442  }
1443  
tcg_gen_ld_i32(TCGv_i32 ret,TCGv_ptr arg2,tcg_target_long offset)1444  void tcg_gen_ld_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
1445  {
1446      tcg_gen_ldst_op_i32(INDEX_op_ld_i32, ret, arg2, offset);
1447  }
1448  
tcg_gen_st8_i32(TCGv_i32 arg1,TCGv_ptr arg2,tcg_target_long offset)1449  void tcg_gen_st8_i32(TCGv_i32 arg1, TCGv_ptr arg2, tcg_target_long offset)
1450  {
1451      tcg_gen_ldst_op_i32(INDEX_op_st8_i32, arg1, arg2, offset);
1452  }
1453  
tcg_gen_st16_i32(TCGv_i32 arg1,TCGv_ptr arg2,tcg_target_long offset)1454  void tcg_gen_st16_i32(TCGv_i32 arg1, TCGv_ptr arg2, tcg_target_long offset)
1455  {
1456      tcg_gen_ldst_op_i32(INDEX_op_st16_i32, arg1, arg2, offset);
1457  }
1458  
tcg_gen_st_i32(TCGv_i32 arg1,TCGv_ptr arg2,tcg_target_long offset)1459  void tcg_gen_st_i32(TCGv_i32 arg1, TCGv_ptr arg2, tcg_target_long offset)
1460  {
1461      tcg_gen_ldst_op_i32(INDEX_op_st_i32, arg1, arg2, offset);
1462  }
1463  
1464  
1465  /* 64-bit ops */
1466  
tcg_gen_discard_i64(TCGv_i64 arg)1467  void tcg_gen_discard_i64(TCGv_i64 arg)
1468  {
1469      if (TCG_TARGET_REG_BITS == 64) {
1470          tcg_gen_op1_i64(INDEX_op_discard, arg);
1471      } else {
1472          tcg_gen_discard_i32(TCGV_LOW(arg));
1473          tcg_gen_discard_i32(TCGV_HIGH(arg));
1474      }
1475  }
1476  
tcg_gen_mov_i64(TCGv_i64 ret,TCGv_i64 arg)1477  void tcg_gen_mov_i64(TCGv_i64 ret, TCGv_i64 arg)
1478  {
1479      if (ret == arg) {
1480          return;
1481      }
1482      if (TCG_TARGET_REG_BITS == 64) {
1483          tcg_gen_op2_i64(INDEX_op_mov_i64, ret, arg);
1484      } else {
1485          TCGTemp *ts = tcgv_i64_temp(arg);
1486  
1487          /* Canonicalize TCGv_i64 TEMP_CONST into TCGv_i32 TEMP_CONST. */
1488          if (ts->kind == TEMP_CONST) {
1489              tcg_gen_movi_i64(ret, ts->val);
1490          } else {
1491              tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1492              tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
1493          }
1494      }
1495  }
1496  
tcg_gen_movi_i64(TCGv_i64 ret,int64_t arg)1497  void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg)
1498  {
1499      if (TCG_TARGET_REG_BITS == 64) {
1500          tcg_gen_mov_i64(ret, tcg_constant_i64(arg));
1501      } else {
1502          tcg_gen_movi_i32(TCGV_LOW(ret), arg);
1503          tcg_gen_movi_i32(TCGV_HIGH(ret), arg >> 32);
1504      }
1505  }
1506  
tcg_gen_ld8u_i64(TCGv_i64 ret,TCGv_ptr arg2,tcg_target_long offset)1507  void tcg_gen_ld8u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1508  {
1509      if (TCG_TARGET_REG_BITS == 64) {
1510          tcg_gen_ldst_op_i64(INDEX_op_ld8u_i64, ret, arg2, offset);
1511      } else {
1512          tcg_gen_ld8u_i32(TCGV_LOW(ret), arg2, offset);
1513          tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1514      }
1515  }
1516  
tcg_gen_ld8s_i64(TCGv_i64 ret,TCGv_ptr arg2,tcg_target_long offset)1517  void tcg_gen_ld8s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1518  {
1519      if (TCG_TARGET_REG_BITS == 64) {
1520          tcg_gen_ldst_op_i64(INDEX_op_ld8s_i64, ret, arg2, offset);
1521      } else {
1522          tcg_gen_ld8s_i32(TCGV_LOW(ret), arg2, offset);
1523          tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1524      }
1525  }
1526  
tcg_gen_ld16u_i64(TCGv_i64 ret,TCGv_ptr arg2,tcg_target_long offset)1527  void tcg_gen_ld16u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1528  {
1529      if (TCG_TARGET_REG_BITS == 64) {
1530          tcg_gen_ldst_op_i64(INDEX_op_ld16u_i64, ret, arg2, offset);
1531      } else {
1532          tcg_gen_ld16u_i32(TCGV_LOW(ret), arg2, offset);
1533          tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1534      }
1535  }
1536  
tcg_gen_ld16s_i64(TCGv_i64 ret,TCGv_ptr arg2,tcg_target_long offset)1537  void tcg_gen_ld16s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1538  {
1539      if (TCG_TARGET_REG_BITS == 64) {
1540          tcg_gen_ldst_op_i64(INDEX_op_ld16s_i64, ret, arg2, offset);
1541      } else {
1542          tcg_gen_ld16s_i32(TCGV_LOW(ret), arg2, offset);
1543          tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1544      }
1545  }
1546  
tcg_gen_ld32u_i64(TCGv_i64 ret,TCGv_ptr arg2,tcg_target_long offset)1547  void tcg_gen_ld32u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1548  {
1549      if (TCG_TARGET_REG_BITS == 64) {
1550          tcg_gen_ldst_op_i64(INDEX_op_ld32u_i64, ret, arg2, offset);
1551      } else {
1552          tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
1553          tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1554      }
1555  }
1556  
tcg_gen_ld32s_i64(TCGv_i64 ret,TCGv_ptr arg2,tcg_target_long offset)1557  void tcg_gen_ld32s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1558  {
1559      if (TCG_TARGET_REG_BITS == 64) {
1560          tcg_gen_ldst_op_i64(INDEX_op_ld32s_i64, ret, arg2, offset);
1561      } else {
1562          tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
1563          tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1564      }
1565  }
1566  
tcg_gen_ld_i64(TCGv_i64 ret,TCGv_ptr arg2,tcg_target_long offset)1567  void tcg_gen_ld_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1568  {
1569      /*
1570       * For 32-bit host, since arg2 and ret have different types,
1571       * they cannot be the same temporary -- no chance of overlap.
1572       */
1573      if (TCG_TARGET_REG_BITS == 64) {
1574          tcg_gen_ldst_op_i64(INDEX_op_ld_i64, ret, arg2, offset);
1575      } else if (HOST_BIG_ENDIAN) {
1576          tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset);
1577          tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset + 4);
1578      } else {
1579          tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
1580          tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset + 4);
1581      }
1582  }
1583  
tcg_gen_st8_i64(TCGv_i64 arg1,TCGv_ptr arg2,tcg_target_long offset)1584  void tcg_gen_st8_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
1585  {
1586      if (TCG_TARGET_REG_BITS == 64) {
1587          tcg_gen_ldst_op_i64(INDEX_op_st8_i64, arg1, arg2, offset);
1588      } else {
1589          tcg_gen_st8_i32(TCGV_LOW(arg1), arg2, offset);
1590      }
1591  }
1592  
tcg_gen_st16_i64(TCGv_i64 arg1,TCGv_ptr arg2,tcg_target_long offset)1593  void tcg_gen_st16_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
1594  {
1595      if (TCG_TARGET_REG_BITS == 64) {
1596          tcg_gen_ldst_op_i64(INDEX_op_st16_i64, arg1, arg2, offset);
1597      } else {
1598          tcg_gen_st16_i32(TCGV_LOW(arg1), arg2, offset);
1599      }
1600  }
1601  
tcg_gen_st32_i64(TCGv_i64 arg1,TCGv_ptr arg2,tcg_target_long offset)1602  void tcg_gen_st32_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
1603  {
1604      if (TCG_TARGET_REG_BITS == 64) {
1605          tcg_gen_ldst_op_i64(INDEX_op_st32_i64, arg1, arg2, offset);
1606      } else {
1607          tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset);
1608      }
1609  }
1610  
tcg_gen_st_i64(TCGv_i64 arg1,TCGv_ptr arg2,tcg_target_long offset)1611  void tcg_gen_st_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
1612  {
1613      if (TCG_TARGET_REG_BITS == 64) {
1614          tcg_gen_ldst_op_i64(INDEX_op_st_i64, arg1, arg2, offset);
1615      } else if (HOST_BIG_ENDIAN) {
1616          tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset);
1617          tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset + 4);
1618      } else {
1619          tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset);
1620          tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset + 4);
1621      }
1622  }
1623  
tcg_gen_add_i64(TCGv_i64 ret,TCGv_i64 arg1,TCGv_i64 arg2)1624  void tcg_gen_add_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1625  {
1626      if (TCG_TARGET_REG_BITS == 64) {
1627          tcg_gen_op3_i64(INDEX_op_add_i64, ret, arg1, arg2);
1628      } else {
1629          tcg_gen_add2_i32(TCGV_LOW(ret), TCGV_HIGH(ret), TCGV_LOW(arg1),
1630                           TCGV_HIGH(arg1), TCGV_LOW(arg2), TCGV_HIGH(arg2));
1631      }
1632  }
1633  
tcg_gen_sub_i64(TCGv_i64 ret,TCGv_i64 arg1,TCGv_i64 arg2)1634  void tcg_gen_sub_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1635  {
1636      if (TCG_TARGET_REG_BITS == 64) {
1637          tcg_gen_op3_i64(INDEX_op_sub_i64, ret, arg1, arg2);
1638      } else {
1639          tcg_gen_sub2_i32(TCGV_LOW(ret), TCGV_HIGH(ret), TCGV_LOW(arg1),
1640                           TCGV_HIGH(arg1), TCGV_LOW(arg2), TCGV_HIGH(arg2));
1641      }
1642  }
1643  
tcg_gen_and_i64(TCGv_i64 ret,TCGv_i64 arg1,TCGv_i64 arg2)1644  void tcg_gen_and_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1645  {
1646      if (TCG_TARGET_REG_BITS == 64) {
1647          tcg_gen_op3_i64(INDEX_op_and_i64, ret, arg1, arg2);
1648      } else {
1649          tcg_gen_and_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1650          tcg_gen_and_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1651      }
1652  }
1653  
tcg_gen_or_i64(TCGv_i64 ret,TCGv_i64 arg1,TCGv_i64 arg2)1654  void tcg_gen_or_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1655  {
1656      if (TCG_TARGET_REG_BITS == 64) {
1657          tcg_gen_op3_i64(INDEX_op_or_i64, ret, arg1, arg2);
1658      } else {
1659          tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1660          tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1661      }
1662  }
1663  
tcg_gen_xor_i64(TCGv_i64 ret,TCGv_i64 arg1,TCGv_i64 arg2)1664  void tcg_gen_xor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1665  {
1666      if (TCG_TARGET_REG_BITS == 64) {
1667          tcg_gen_op3_i64(INDEX_op_xor_i64, ret, arg1, arg2);
1668      } else {
1669          tcg_gen_xor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1670          tcg_gen_xor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1671      }
1672  }
1673  
tcg_gen_shl_i64(TCGv_i64 ret,TCGv_i64 arg1,TCGv_i64 arg2)1674  void tcg_gen_shl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1675  {
1676      if (TCG_TARGET_REG_BITS == 64) {
1677          tcg_gen_op3_i64(INDEX_op_shl_i64, ret, arg1, arg2);
1678      } else {
1679          gen_helper_shl_i64(ret, arg1, arg2);
1680      }
1681  }
1682  
tcg_gen_shr_i64(TCGv_i64 ret,TCGv_i64 arg1,TCGv_i64 arg2)1683  void tcg_gen_shr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1684  {
1685      if (TCG_TARGET_REG_BITS == 64) {
1686          tcg_gen_op3_i64(INDEX_op_shr_i64, ret, arg1, arg2);
1687      } else {
1688          gen_helper_shr_i64(ret, arg1, arg2);
1689      }
1690  }
1691  
tcg_gen_sar_i64(TCGv_i64 ret,TCGv_i64 arg1,TCGv_i64 arg2)1692  void tcg_gen_sar_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1693  {
1694      if (TCG_TARGET_REG_BITS == 64) {
1695          tcg_gen_op3_i64(INDEX_op_sar_i64, ret, arg1, arg2);
1696      } else {
1697          gen_helper_sar_i64(ret, arg1, arg2);
1698      }
1699  }
1700  
tcg_gen_mul_i64(TCGv_i64 ret,TCGv_i64 arg1,TCGv_i64 arg2)1701  void tcg_gen_mul_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1702  {
1703      TCGv_i64 t0;
1704      TCGv_i32 t1;
1705  
1706      if (TCG_TARGET_REG_BITS == 64) {
1707          tcg_gen_op3_i64(INDEX_op_mul_i64, ret, arg1, arg2);
1708          return;
1709      }
1710  
1711  
1712      t0 = tcg_temp_ebb_new_i64();
1713      t1 = tcg_temp_ebb_new_i32();
1714  
1715      tcg_gen_mulu2_i32(TCGV_LOW(t0), TCGV_HIGH(t0),
1716                        TCGV_LOW(arg1), TCGV_LOW(arg2));
1717  
1718      tcg_gen_mul_i32(t1, TCGV_LOW(arg1), TCGV_HIGH(arg2));
1719      tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
1720      tcg_gen_mul_i32(t1, TCGV_HIGH(arg1), TCGV_LOW(arg2));
1721      tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
1722  
1723      tcg_gen_mov_i64(ret, t0);
1724      tcg_temp_free_i64(t0);
1725      tcg_temp_free_i32(t1);
1726  }
1727  
tcg_gen_addi_i64(TCGv_i64 ret,TCGv_i64 arg1,int64_t arg2)1728  void tcg_gen_addi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1729  {
1730      /* some cases can be optimized here */
1731      if (arg2 == 0) {
1732          tcg_gen_mov_i64(ret, arg1);
1733      } else if (TCG_TARGET_REG_BITS == 64) {
1734          tcg_gen_add_i64(ret, arg1, tcg_constant_i64(arg2));
1735      } else {
1736          tcg_gen_add2_i32(TCGV_LOW(ret), TCGV_HIGH(ret),
1737                           TCGV_LOW(arg1), TCGV_HIGH(arg1),
1738                           tcg_constant_i32(arg2), tcg_constant_i32(arg2 >> 32));
1739      }
1740  }
1741  
tcg_gen_subfi_i64(TCGv_i64 ret,int64_t arg1,TCGv_i64 arg2)1742  void tcg_gen_subfi_i64(TCGv_i64 ret, int64_t arg1, TCGv_i64 arg2)
1743  {
1744      if (arg1 == 0) {
1745          tcg_gen_neg_i64(ret, arg2);
1746      } else if (TCG_TARGET_REG_BITS == 64) {
1747          tcg_gen_sub_i64(ret, tcg_constant_i64(arg1), arg2);
1748      } else {
1749          tcg_gen_sub2_i32(TCGV_LOW(ret), TCGV_HIGH(ret),
1750                           tcg_constant_i32(arg1), tcg_constant_i32(arg1 >> 32),
1751                           TCGV_LOW(arg2), TCGV_HIGH(arg2));
1752      }
1753  }
1754  
tcg_gen_subi_i64(TCGv_i64 ret,TCGv_i64 arg1,int64_t arg2)1755  void tcg_gen_subi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1756  {
1757      tcg_gen_addi_i64(ret, arg1, -arg2);
1758  }
1759  
tcg_gen_neg_i64(TCGv_i64 ret,TCGv_i64 arg)1760  void tcg_gen_neg_i64(TCGv_i64 ret, TCGv_i64 arg)
1761  {
1762      if (TCG_TARGET_REG_BITS == 64) {
1763          tcg_gen_op2_i64(INDEX_op_neg_i64, ret, arg);
1764      } else {
1765          TCGv_i32 zero = tcg_constant_i32(0);
1766          tcg_gen_sub2_i32(TCGV_LOW(ret), TCGV_HIGH(ret),
1767                           zero, zero, TCGV_LOW(arg), TCGV_HIGH(arg));
1768      }
1769  }
1770  
tcg_gen_andi_i64(TCGv_i64 ret,TCGv_i64 arg1,int64_t arg2)1771  void tcg_gen_andi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1772  {
1773      if (TCG_TARGET_REG_BITS == 32) {
1774          tcg_gen_andi_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1775          tcg_gen_andi_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1776          return;
1777      }
1778  
1779      /* Some cases can be optimized here.  */
1780      switch (arg2) {
1781      case 0:
1782          tcg_gen_movi_i64(ret, 0);
1783          return;
1784      case -1:
1785          tcg_gen_mov_i64(ret, arg1);
1786          return;
1787      case 0xff:
1788          /* Don't recurse with tcg_gen_ext8u_i64.  */
1789          if (TCG_TARGET_HAS_ext8u_i64) {
1790              tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg1);
1791              return;
1792          }
1793          break;
1794      case 0xffff:
1795          if (TCG_TARGET_HAS_ext16u_i64) {
1796              tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg1);
1797              return;
1798          }
1799          break;
1800      case 0xffffffffu:
1801          if (TCG_TARGET_HAS_ext32u_i64) {
1802              tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg1);
1803              return;
1804          }
1805          break;
1806      }
1807  
1808      tcg_gen_and_i64(ret, arg1, tcg_constant_i64(arg2));
1809  }
1810  
tcg_gen_ori_i64(TCGv_i64 ret,TCGv_i64 arg1,int64_t arg2)1811  void tcg_gen_ori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1812  {
1813      if (TCG_TARGET_REG_BITS == 32) {
1814          tcg_gen_ori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1815          tcg_gen_ori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1816          return;
1817      }
1818      /* Some cases can be optimized here.  */
1819      if (arg2 == -1) {
1820          tcg_gen_movi_i64(ret, -1);
1821      } else if (arg2 == 0) {
1822          tcg_gen_mov_i64(ret, arg1);
1823      } else {
1824          tcg_gen_or_i64(ret, arg1, tcg_constant_i64(arg2));
1825      }
1826  }
1827  
tcg_gen_xori_i64(TCGv_i64 ret,TCGv_i64 arg1,int64_t arg2)1828  void tcg_gen_xori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1829  {
1830      if (TCG_TARGET_REG_BITS == 32) {
1831          tcg_gen_xori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1832          tcg_gen_xori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1833          return;
1834      }
1835      /* Some cases can be optimized here.  */
1836      if (arg2 == 0) {
1837          tcg_gen_mov_i64(ret, arg1);
1838      } else if (arg2 == -1 && TCG_TARGET_HAS_not_i64) {
1839          /* Don't recurse with tcg_gen_not_i64.  */
1840          tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg1);
1841      } else {
1842          tcg_gen_xor_i64(ret, arg1, tcg_constant_i64(arg2));
1843      }
1844  }
1845  
tcg_gen_shifti_i64(TCGv_i64 ret,TCGv_i64 arg1,unsigned c,bool right,bool arith)1846  static inline void tcg_gen_shifti_i64(TCGv_i64 ret, TCGv_i64 arg1,
1847                                        unsigned c, bool right, bool arith)
1848  {
1849      tcg_debug_assert(c < 64);
1850      if (c == 0) {
1851          tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
1852          tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
1853      } else if (c >= 32) {
1854          c -= 32;
1855          if (right) {
1856              if (arith) {
1857                  tcg_gen_sari_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
1858                  tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), 31);
1859              } else {
1860                  tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
1861                  tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1862              }
1863          } else {
1864              tcg_gen_shli_i32(TCGV_HIGH(ret), TCGV_LOW(arg1), c);
1865              tcg_gen_movi_i32(TCGV_LOW(ret), 0);
1866          }
1867      } else if (right) {
1868          if (TCG_TARGET_HAS_extract2_i32) {
1869              tcg_gen_extract2_i32(TCGV_LOW(ret),
1870                                   TCGV_LOW(arg1), TCGV_HIGH(arg1), c);
1871          } else {
1872              tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_LOW(arg1), c);
1873              tcg_gen_deposit_i32(TCGV_LOW(ret), TCGV_LOW(ret),
1874                                  TCGV_HIGH(arg1), 32 - c, c);
1875          }
1876          if (arith) {
1877              tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), c);
1878          } else {
1879              tcg_gen_shri_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), c);
1880          }
1881      } else {
1882          if (TCG_TARGET_HAS_extract2_i32) {
1883              tcg_gen_extract2_i32(TCGV_HIGH(ret),
1884                                   TCGV_LOW(arg1), TCGV_HIGH(arg1), 32 - c);
1885          } else {
1886              TCGv_i32 t0 = tcg_temp_ebb_new_i32();
1887              tcg_gen_shri_i32(t0, TCGV_LOW(arg1), 32 - c);
1888              tcg_gen_deposit_i32(TCGV_HIGH(ret), t0,
1889                                  TCGV_HIGH(arg1), c, 32 - c);
1890              tcg_temp_free_i32(t0);
1891          }
1892          tcg_gen_shli_i32(TCGV_LOW(ret), TCGV_LOW(arg1), c);
1893      }
1894  }
1895  
tcg_gen_shli_i64(TCGv_i64 ret,TCGv_i64 arg1,int64_t arg2)1896  void tcg_gen_shli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1897  {
1898      tcg_debug_assert(arg2 >= 0 && arg2 < 64);
1899      if (TCG_TARGET_REG_BITS == 32) {
1900          tcg_gen_shifti_i64(ret, arg1, arg2, 0, 0);
1901      } else if (arg2 == 0) {
1902          tcg_gen_mov_i64(ret, arg1);
1903      } else {
1904          tcg_gen_shl_i64(ret, arg1, tcg_constant_i64(arg2));
1905      }
1906  }
1907  
tcg_gen_shri_i64(TCGv_i64 ret,TCGv_i64 arg1,int64_t arg2)1908  void tcg_gen_shri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1909  {
1910      tcg_debug_assert(arg2 >= 0 && arg2 < 64);
1911      if (TCG_TARGET_REG_BITS == 32) {
1912          tcg_gen_shifti_i64(ret, arg1, arg2, 1, 0);
1913      } else if (arg2 == 0) {
1914          tcg_gen_mov_i64(ret, arg1);
1915      } else {
1916          tcg_gen_shr_i64(ret, arg1, tcg_constant_i64(arg2));
1917      }
1918  }
1919  
tcg_gen_sari_i64(TCGv_i64 ret,TCGv_i64 arg1,int64_t arg2)1920  void tcg_gen_sari_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1921  {
1922      tcg_debug_assert(arg2 >= 0 && arg2 < 64);
1923      if (TCG_TARGET_REG_BITS == 32) {
1924          tcg_gen_shifti_i64(ret, arg1, arg2, 1, 1);
1925      } else if (arg2 == 0) {
1926          tcg_gen_mov_i64(ret, arg1);
1927      } else {
1928          tcg_gen_sar_i64(ret, arg1, tcg_constant_i64(arg2));
1929      }
1930  }
1931  
tcg_gen_brcond_i64(TCGCond cond,TCGv_i64 arg1,TCGv_i64 arg2,TCGLabel * l)1932  void tcg_gen_brcond_i64(TCGCond cond, TCGv_i64 arg1, TCGv_i64 arg2, TCGLabel *l)
1933  {
1934      if (cond == TCG_COND_ALWAYS) {
1935          tcg_gen_br(l);
1936      } else if (cond != TCG_COND_NEVER) {
1937          TCGOp *op;
1938          if (TCG_TARGET_REG_BITS == 32) {
1939              op = tcg_gen_op6ii_i32(INDEX_op_brcond2_i32, TCGV_LOW(arg1),
1940                                     TCGV_HIGH(arg1), TCGV_LOW(arg2),
1941                                     TCGV_HIGH(arg2), cond, label_arg(l));
1942          } else {
1943              op = tcg_gen_op4ii_i64(INDEX_op_brcond_i64, arg1, arg2, cond,
1944                                     label_arg(l));
1945          }
1946          add_as_label_use(l, op);
1947      }
1948  }
1949  
tcg_gen_brcondi_i64(TCGCond cond,TCGv_i64 arg1,int64_t arg2,TCGLabel * l)1950  void tcg_gen_brcondi_i64(TCGCond cond, TCGv_i64 arg1, int64_t arg2, TCGLabel *l)
1951  {
1952      if (TCG_TARGET_REG_BITS == 64) {
1953          tcg_gen_brcond_i64(cond, arg1, tcg_constant_i64(arg2), l);
1954      } else if (cond == TCG_COND_ALWAYS) {
1955          tcg_gen_br(l);
1956      } else if (cond != TCG_COND_NEVER) {
1957          TCGOp *op = tcg_gen_op6ii_i32(INDEX_op_brcond2_i32,
1958                                        TCGV_LOW(arg1), TCGV_HIGH(arg1),
1959                                        tcg_constant_i32(arg2),
1960                                        tcg_constant_i32(arg2 >> 32),
1961                                        cond, label_arg(l));
1962          add_as_label_use(l, op);
1963      }
1964  }
1965  
tcg_gen_setcond_i64(TCGCond cond,TCGv_i64 ret,TCGv_i64 arg1,TCGv_i64 arg2)1966  void tcg_gen_setcond_i64(TCGCond cond, TCGv_i64 ret,
1967                           TCGv_i64 arg1, TCGv_i64 arg2)
1968  {
1969      if (cond == TCG_COND_ALWAYS) {
1970          tcg_gen_movi_i64(ret, 1);
1971      } else if (cond == TCG_COND_NEVER) {
1972          tcg_gen_movi_i64(ret, 0);
1973      } else {
1974          if (TCG_TARGET_REG_BITS == 32) {
1975              tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
1976                               TCGV_LOW(arg1), TCGV_HIGH(arg1),
1977                               TCGV_LOW(arg2), TCGV_HIGH(arg2), cond);
1978              tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1979          } else {
1980              tcg_gen_op4i_i64(INDEX_op_setcond_i64, ret, arg1, arg2, cond);
1981          }
1982      }
1983  }
1984  
tcg_gen_setcondi_i64(TCGCond cond,TCGv_i64 ret,TCGv_i64 arg1,int64_t arg2)1985  void tcg_gen_setcondi_i64(TCGCond cond, TCGv_i64 ret,
1986                            TCGv_i64 arg1, int64_t arg2)
1987  {
1988      if (TCG_TARGET_REG_BITS == 64) {
1989          tcg_gen_setcond_i64(cond, ret, arg1, tcg_constant_i64(arg2));
1990      } else if (cond == TCG_COND_ALWAYS) {
1991          tcg_gen_movi_i64(ret, 1);
1992      } else if (cond == TCG_COND_NEVER) {
1993          tcg_gen_movi_i64(ret, 0);
1994      } else {
1995          tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
1996                           TCGV_LOW(arg1), TCGV_HIGH(arg1),
1997                           tcg_constant_i32(arg2),
1998                           tcg_constant_i32(arg2 >> 32), cond);
1999          tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2000      }
2001  }
2002  
tcg_gen_negsetcondi_i64(TCGCond cond,TCGv_i64 ret,TCGv_i64 arg1,int64_t arg2)2003  void tcg_gen_negsetcondi_i64(TCGCond cond, TCGv_i64 ret,
2004                               TCGv_i64 arg1, int64_t arg2)
2005  {
2006      tcg_gen_negsetcond_i64(cond, ret, arg1, tcg_constant_i64(arg2));
2007  }
2008  
tcg_gen_negsetcond_i64(TCGCond cond,TCGv_i64 ret,TCGv_i64 arg1,TCGv_i64 arg2)2009  void tcg_gen_negsetcond_i64(TCGCond cond, TCGv_i64 ret,
2010                              TCGv_i64 arg1, TCGv_i64 arg2)
2011  {
2012      if (cond == TCG_COND_ALWAYS) {
2013          tcg_gen_movi_i64(ret, -1);
2014      } else if (cond == TCG_COND_NEVER) {
2015          tcg_gen_movi_i64(ret, 0);
2016      } else if (TCG_TARGET_HAS_negsetcond_i64) {
2017          tcg_gen_op4i_i64(INDEX_op_negsetcond_i64, ret, arg1, arg2, cond);
2018      } else if (TCG_TARGET_REG_BITS == 32) {
2019          tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
2020                           TCGV_LOW(arg1), TCGV_HIGH(arg1),
2021                           TCGV_LOW(arg2), TCGV_HIGH(arg2), cond);
2022          tcg_gen_neg_i32(TCGV_LOW(ret), TCGV_LOW(ret));
2023          tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_LOW(ret));
2024      } else {
2025          tcg_gen_setcond_i64(cond, ret, arg1, arg2);
2026          tcg_gen_neg_i64(ret, ret);
2027      }
2028  }
2029  
tcg_gen_muli_i64(TCGv_i64 ret,TCGv_i64 arg1,int64_t arg2)2030  void tcg_gen_muli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
2031  {
2032      if (arg2 == 0) {
2033          tcg_gen_movi_i64(ret, 0);
2034      } else if (is_power_of_2(arg2)) {
2035          tcg_gen_shli_i64(ret, arg1, ctz64(arg2));
2036      } else {
2037          tcg_gen_mul_i64(ret, arg1, tcg_constant_i64(arg2));
2038      }
2039  }
2040  
tcg_gen_div_i64(TCGv_i64 ret,TCGv_i64 arg1,TCGv_i64 arg2)2041  void tcg_gen_div_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2042  {
2043      if (TCG_TARGET_HAS_div_i64) {
2044          tcg_gen_op3_i64(INDEX_op_div_i64, ret, arg1, arg2);
2045      } else if (TCG_TARGET_HAS_div2_i64) {
2046          TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2047          tcg_gen_sari_i64(t0, arg1, 63);
2048          tcg_gen_op5_i64(INDEX_op_div2_i64, ret, t0, arg1, t0, arg2);
2049          tcg_temp_free_i64(t0);
2050      } else {
2051          gen_helper_div_i64(ret, arg1, arg2);
2052      }
2053  }
2054  
tcg_gen_rem_i64(TCGv_i64 ret,TCGv_i64 arg1,TCGv_i64 arg2)2055  void tcg_gen_rem_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2056  {
2057      if (TCG_TARGET_HAS_rem_i64) {
2058          tcg_gen_op3_i64(INDEX_op_rem_i64, ret, arg1, arg2);
2059      } else if (TCG_TARGET_HAS_div_i64) {
2060          TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2061          tcg_gen_op3_i64(INDEX_op_div_i64, t0, arg1, arg2);
2062          tcg_gen_mul_i64(t0, t0, arg2);
2063          tcg_gen_sub_i64(ret, arg1, t0);
2064          tcg_temp_free_i64(t0);
2065      } else if (TCG_TARGET_HAS_div2_i64) {
2066          TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2067          tcg_gen_sari_i64(t0, arg1, 63);
2068          tcg_gen_op5_i64(INDEX_op_div2_i64, t0, ret, arg1, t0, arg2);
2069          tcg_temp_free_i64(t0);
2070      } else {
2071          gen_helper_rem_i64(ret, arg1, arg2);
2072      }
2073  }
2074  
tcg_gen_divu_i64(TCGv_i64 ret,TCGv_i64 arg1,TCGv_i64 arg2)2075  void tcg_gen_divu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2076  {
2077      if (TCG_TARGET_HAS_div_i64) {
2078          tcg_gen_op3_i64(INDEX_op_divu_i64, ret, arg1, arg2);
2079      } else if (TCG_TARGET_HAS_div2_i64) {
2080          TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2081          TCGv_i64 zero = tcg_constant_i64(0);
2082          tcg_gen_op5_i64(INDEX_op_divu2_i64, ret, t0, arg1, zero, arg2);
2083          tcg_temp_free_i64(t0);
2084      } else {
2085          gen_helper_divu_i64(ret, arg1, arg2);
2086      }
2087  }
2088  
tcg_gen_remu_i64(TCGv_i64 ret,TCGv_i64 arg1,TCGv_i64 arg2)2089  void tcg_gen_remu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2090  {
2091      if (TCG_TARGET_HAS_rem_i64) {
2092          tcg_gen_op3_i64(INDEX_op_remu_i64, ret, arg1, arg2);
2093      } else if (TCG_TARGET_HAS_div_i64) {
2094          TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2095          tcg_gen_op3_i64(INDEX_op_divu_i64, t0, arg1, arg2);
2096          tcg_gen_mul_i64(t0, t0, arg2);
2097          tcg_gen_sub_i64(ret, arg1, t0);
2098          tcg_temp_free_i64(t0);
2099      } else if (TCG_TARGET_HAS_div2_i64) {
2100          TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2101          TCGv_i64 zero = tcg_constant_i64(0);
2102          tcg_gen_op5_i64(INDEX_op_divu2_i64, t0, ret, arg1, zero, arg2);
2103          tcg_temp_free_i64(t0);
2104      } else {
2105          gen_helper_remu_i64(ret, arg1, arg2);
2106      }
2107  }
2108  
tcg_gen_ext8s_i64(TCGv_i64 ret,TCGv_i64 arg)2109  void tcg_gen_ext8s_i64(TCGv_i64 ret, TCGv_i64 arg)
2110  {
2111      if (TCG_TARGET_REG_BITS == 32) {
2112          tcg_gen_ext8s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2113          tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2114      } else if (TCG_TARGET_HAS_ext8s_i64) {
2115          tcg_gen_op2_i64(INDEX_op_ext8s_i64, ret, arg);
2116      } else {
2117          tcg_gen_shli_i64(ret, arg, 56);
2118          tcg_gen_sari_i64(ret, ret, 56);
2119      }
2120  }
2121  
tcg_gen_ext16s_i64(TCGv_i64 ret,TCGv_i64 arg)2122  void tcg_gen_ext16s_i64(TCGv_i64 ret, TCGv_i64 arg)
2123  {
2124      if (TCG_TARGET_REG_BITS == 32) {
2125          tcg_gen_ext16s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2126          tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2127      } else if (TCG_TARGET_HAS_ext16s_i64) {
2128          tcg_gen_op2_i64(INDEX_op_ext16s_i64, ret, arg);
2129      } else {
2130          tcg_gen_shli_i64(ret, arg, 48);
2131          tcg_gen_sari_i64(ret, ret, 48);
2132      }
2133  }
2134  
tcg_gen_ext32s_i64(TCGv_i64 ret,TCGv_i64 arg)2135  void tcg_gen_ext32s_i64(TCGv_i64 ret, TCGv_i64 arg)
2136  {
2137      if (TCG_TARGET_REG_BITS == 32) {
2138          tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2139          tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2140      } else if (TCG_TARGET_HAS_ext32s_i64) {
2141          tcg_gen_op2_i64(INDEX_op_ext32s_i64, ret, arg);
2142      } else {
2143          tcg_gen_shli_i64(ret, arg, 32);
2144          tcg_gen_sari_i64(ret, ret, 32);
2145      }
2146  }
2147  
tcg_gen_ext8u_i64(TCGv_i64 ret,TCGv_i64 arg)2148  void tcg_gen_ext8u_i64(TCGv_i64 ret, TCGv_i64 arg)
2149  {
2150      if (TCG_TARGET_REG_BITS == 32) {
2151          tcg_gen_ext8u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2152          tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2153      } else if (TCG_TARGET_HAS_ext8u_i64) {
2154          tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg);
2155      } else {
2156          tcg_gen_andi_i64(ret, arg, 0xffu);
2157      }
2158  }
2159  
tcg_gen_ext16u_i64(TCGv_i64 ret,TCGv_i64 arg)2160  void tcg_gen_ext16u_i64(TCGv_i64 ret, TCGv_i64 arg)
2161  {
2162      if (TCG_TARGET_REG_BITS == 32) {
2163          tcg_gen_ext16u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2164          tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2165      } else if (TCG_TARGET_HAS_ext16u_i64) {
2166          tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg);
2167      } else {
2168          tcg_gen_andi_i64(ret, arg, 0xffffu);
2169      }
2170  }
2171  
tcg_gen_ext32u_i64(TCGv_i64 ret,TCGv_i64 arg)2172  void tcg_gen_ext32u_i64(TCGv_i64 ret, TCGv_i64 arg)
2173  {
2174      if (TCG_TARGET_REG_BITS == 32) {
2175          tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2176          tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2177      } else if (TCG_TARGET_HAS_ext32u_i64) {
2178          tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg);
2179      } else {
2180          tcg_gen_andi_i64(ret, arg, 0xffffffffu);
2181      }
2182  }
2183  
2184  /*
2185   * bswap16_i64: 16-bit byte swap on the low bits of a 64-bit value.
2186   *
2187   * Byte pattern: xxxxxxxxab -> yyyyyyyyba
2188   *
2189   * With TCG_BSWAP_IZ, x == zero, else undefined.
2190   * With TCG_BSWAP_OZ, y == zero, with TCG_BSWAP_OS y == sign, else undefined.
2191   */
tcg_gen_bswap16_i64(TCGv_i64 ret,TCGv_i64 arg,int flags)2192  void tcg_gen_bswap16_i64(TCGv_i64 ret, TCGv_i64 arg, int flags)
2193  {
2194      /* Only one extension flag may be present. */
2195      tcg_debug_assert(!(flags & TCG_BSWAP_OS) || !(flags & TCG_BSWAP_OZ));
2196  
2197      if (TCG_TARGET_REG_BITS == 32) {
2198          tcg_gen_bswap16_i32(TCGV_LOW(ret), TCGV_LOW(arg), flags);
2199          if (flags & TCG_BSWAP_OS) {
2200              tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2201          } else {
2202              tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2203          }
2204      } else if (TCG_TARGET_HAS_bswap16_i64) {
2205          tcg_gen_op3i_i64(INDEX_op_bswap16_i64, ret, arg, flags);
2206      } else {
2207          TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2208          TCGv_i64 t1 = tcg_temp_ebb_new_i64();
2209  
2210                                              /* arg = ......ab or xxxxxxab */
2211          tcg_gen_shri_i64(t0, arg, 8);       /*  t0 = .......a or .xxxxxxa */
2212          if (!(flags & TCG_BSWAP_IZ)) {
2213              tcg_gen_ext8u_i64(t0, t0);      /*  t0 = .......a */
2214          }
2215  
2216          if (flags & TCG_BSWAP_OS) {
2217              tcg_gen_shli_i64(t1, arg, 56);  /*  t1 = b....... */
2218              tcg_gen_sari_i64(t1, t1, 48);   /*  t1 = ssssssb. */
2219          } else if (flags & TCG_BSWAP_OZ) {
2220              tcg_gen_ext8u_i64(t1, arg);     /*  t1 = .......b */
2221              tcg_gen_shli_i64(t1, t1, 8);    /*  t1 = ......b. */
2222          } else {
2223              tcg_gen_shli_i64(t1, arg, 8);   /*  t1 = xxxxxab. */
2224          }
2225  
2226          tcg_gen_or_i64(ret, t0, t1);        /* ret = ......ba (OZ) */
2227                                              /*       ssssssba (OS) */
2228                                              /*       xxxxxaba (no flag) */
2229          tcg_temp_free_i64(t0);
2230          tcg_temp_free_i64(t1);
2231      }
2232  }
2233  
2234  /*
2235   * bswap32_i64: 32-bit byte swap on the low bits of a 64-bit value.
2236   *
2237   * Byte pattern: xxxxabcd -> yyyydcba
2238   *
2239   * With TCG_BSWAP_IZ, x == zero, else undefined.
2240   * With TCG_BSWAP_OZ, y == zero, with TCG_BSWAP_OS y == sign, else undefined.
2241   */
tcg_gen_bswap32_i64(TCGv_i64 ret,TCGv_i64 arg,int flags)2242  void tcg_gen_bswap32_i64(TCGv_i64 ret, TCGv_i64 arg, int flags)
2243  {
2244      /* Only one extension flag may be present. */
2245      tcg_debug_assert(!(flags & TCG_BSWAP_OS) || !(flags & TCG_BSWAP_OZ));
2246  
2247      if (TCG_TARGET_REG_BITS == 32) {
2248          tcg_gen_bswap32_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2249          if (flags & TCG_BSWAP_OS) {
2250              tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2251          } else {
2252              tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2253          }
2254      } else if (TCG_TARGET_HAS_bswap32_i64) {
2255          tcg_gen_op3i_i64(INDEX_op_bswap32_i64, ret, arg, flags);
2256      } else {
2257          TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2258          TCGv_i64 t1 = tcg_temp_ebb_new_i64();
2259          TCGv_i64 t2 = tcg_constant_i64(0x00ff00ff);
2260  
2261                                              /* arg = xxxxabcd */
2262          tcg_gen_shri_i64(t0, arg, 8);       /*  t0 = .xxxxabc */
2263          tcg_gen_and_i64(t1, arg, t2);       /*  t1 = .....b.d */
2264          tcg_gen_and_i64(t0, t0, t2);        /*  t0 = .....a.c */
2265          tcg_gen_shli_i64(t1, t1, 8);        /*  t1 = ....b.d. */
2266          tcg_gen_or_i64(ret, t0, t1);        /* ret = ....badc */
2267  
2268          tcg_gen_shli_i64(t1, ret, 48);      /*  t1 = dc...... */
2269          tcg_gen_shri_i64(t0, ret, 16);      /*  t0 = ......ba */
2270          if (flags & TCG_BSWAP_OS) {
2271              tcg_gen_sari_i64(t1, t1, 32);   /*  t1 = ssssdc.. */
2272          } else {
2273              tcg_gen_shri_i64(t1, t1, 32);   /*  t1 = ....dc.. */
2274          }
2275          tcg_gen_or_i64(ret, t0, t1);        /* ret = ssssdcba (OS) */
2276                                              /*       ....dcba (else) */
2277  
2278          tcg_temp_free_i64(t0);
2279          tcg_temp_free_i64(t1);
2280      }
2281  }
2282  
2283  /*
2284   * bswap64_i64: 64-bit byte swap on a 64-bit value.
2285   *
2286   * Byte pattern: abcdefgh -> hgfedcba
2287   */
tcg_gen_bswap64_i64(TCGv_i64 ret,TCGv_i64 arg)2288  void tcg_gen_bswap64_i64(TCGv_i64 ret, TCGv_i64 arg)
2289  {
2290      if (TCG_TARGET_REG_BITS == 32) {
2291          TCGv_i32 t0, t1;
2292          t0 = tcg_temp_ebb_new_i32();
2293          t1 = tcg_temp_ebb_new_i32();
2294  
2295          tcg_gen_bswap32_i32(t0, TCGV_LOW(arg));
2296          tcg_gen_bswap32_i32(t1, TCGV_HIGH(arg));
2297          tcg_gen_mov_i32(TCGV_LOW(ret), t1);
2298          tcg_gen_mov_i32(TCGV_HIGH(ret), t0);
2299          tcg_temp_free_i32(t0);
2300          tcg_temp_free_i32(t1);
2301      } else if (TCG_TARGET_HAS_bswap64_i64) {
2302          tcg_gen_op3i_i64(INDEX_op_bswap64_i64, ret, arg, 0);
2303      } else {
2304          TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2305          TCGv_i64 t1 = tcg_temp_ebb_new_i64();
2306          TCGv_i64 t2 = tcg_temp_ebb_new_i64();
2307  
2308                                          /* arg = abcdefgh */
2309          tcg_gen_movi_i64(t2, 0x00ff00ff00ff00ffull);
2310          tcg_gen_shri_i64(t0, arg, 8);   /*  t0 = .abcdefg */
2311          tcg_gen_and_i64(t1, arg, t2);   /*  t1 = .b.d.f.h */
2312          tcg_gen_and_i64(t0, t0, t2);    /*  t0 = .a.c.e.g */
2313          tcg_gen_shli_i64(t1, t1, 8);    /*  t1 = b.d.f.h. */
2314          tcg_gen_or_i64(ret, t0, t1);    /* ret = badcfehg */
2315  
2316          tcg_gen_movi_i64(t2, 0x0000ffff0000ffffull);
2317          tcg_gen_shri_i64(t0, ret, 16);  /*  t0 = ..badcfe */
2318          tcg_gen_and_i64(t1, ret, t2);   /*  t1 = ..dc..hg */
2319          tcg_gen_and_i64(t0, t0, t2);    /*  t0 = ..ba..fe */
2320          tcg_gen_shli_i64(t1, t1, 16);   /*  t1 = dc..hg.. */
2321          tcg_gen_or_i64(ret, t0, t1);    /* ret = dcbahgfe */
2322  
2323          tcg_gen_shri_i64(t0, ret, 32);  /*  t0 = ....dcba */
2324          tcg_gen_shli_i64(t1, ret, 32);  /*  t1 = hgfe.... */
2325          tcg_gen_or_i64(ret, t0, t1);    /* ret = hgfedcba */
2326  
2327          tcg_temp_free_i64(t0);
2328          tcg_temp_free_i64(t1);
2329          tcg_temp_free_i64(t2);
2330      }
2331  }
2332  
2333  /*
2334   * hswap_i64: Swap 16-bit halfwords within a 64-bit value.
2335   * See also include/qemu/bitops.h, hswap64.
2336   *
2337   * Byte pattern: abcdefgh -> ghefcdab
2338   */
tcg_gen_hswap_i64(TCGv_i64 ret,TCGv_i64 arg)2339  void tcg_gen_hswap_i64(TCGv_i64 ret, TCGv_i64 arg)
2340  {
2341      uint64_t m = 0x0000ffff0000ffffull;
2342      TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2343      TCGv_i64 t1 = tcg_temp_ebb_new_i64();
2344  
2345                                          /* arg = abcdefgh */
2346      tcg_gen_rotli_i64(t1, arg, 32);     /*  t1 = efghabcd */
2347      tcg_gen_andi_i64(t0, t1, m);        /*  t0 = ..gh..cd */
2348      tcg_gen_shli_i64(t0, t0, 16);       /*  t0 = gh..cd.. */
2349      tcg_gen_shri_i64(t1, t1, 16);       /*  t1 = ..efghab */
2350      tcg_gen_andi_i64(t1, t1, m);        /*  t1 = ..ef..ab */
2351      tcg_gen_or_i64(ret, t0, t1);        /* ret = ghefcdab */
2352  
2353      tcg_temp_free_i64(t0);
2354      tcg_temp_free_i64(t1);
2355  }
2356  
2357  /*
2358   * wswap_i64: Swap 32-bit words within a 64-bit value.
2359   *
2360   * Byte pattern: abcdefgh -> efghabcd
2361   */
tcg_gen_wswap_i64(TCGv_i64 ret,TCGv_i64 arg)2362  void tcg_gen_wswap_i64(TCGv_i64 ret, TCGv_i64 arg)
2363  {
2364      /* Swapping 2 32-bit elements is a rotate. */
2365      tcg_gen_rotli_i64(ret, arg, 32);
2366  }
2367  
tcg_gen_not_i64(TCGv_i64 ret,TCGv_i64 arg)2368  void tcg_gen_not_i64(TCGv_i64 ret, TCGv_i64 arg)
2369  {
2370      if (TCG_TARGET_REG_BITS == 32) {
2371          tcg_gen_not_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2372          tcg_gen_not_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
2373      } else if (TCG_TARGET_HAS_not_i64) {
2374          tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg);
2375      } else {
2376          tcg_gen_xori_i64(ret, arg, -1);
2377      }
2378  }
2379  
tcg_gen_andc_i64(TCGv_i64 ret,TCGv_i64 arg1,TCGv_i64 arg2)2380  void tcg_gen_andc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2381  {
2382      if (TCG_TARGET_REG_BITS == 32) {
2383          tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
2384          tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
2385      } else if (TCG_TARGET_HAS_andc_i64) {
2386          tcg_gen_op3_i64(INDEX_op_andc_i64, ret, arg1, arg2);
2387      } else {
2388          TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2389          tcg_gen_not_i64(t0, arg2);
2390          tcg_gen_and_i64(ret, arg1, t0);
2391          tcg_temp_free_i64(t0);
2392      }
2393  }
2394  
tcg_gen_eqv_i64(TCGv_i64 ret,TCGv_i64 arg1,TCGv_i64 arg2)2395  void tcg_gen_eqv_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2396  {
2397      if (TCG_TARGET_REG_BITS == 32) {
2398          tcg_gen_eqv_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
2399          tcg_gen_eqv_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
2400      } else if (TCG_TARGET_HAS_eqv_i64) {
2401          tcg_gen_op3_i64(INDEX_op_eqv_i64, ret, arg1, arg2);
2402      } else {
2403          tcg_gen_xor_i64(ret, arg1, arg2);
2404          tcg_gen_not_i64(ret, ret);
2405      }
2406  }
2407  
tcg_gen_nand_i64(TCGv_i64 ret,TCGv_i64 arg1,TCGv_i64 arg2)2408  void tcg_gen_nand_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2409  {
2410      if (TCG_TARGET_REG_BITS == 32) {
2411          tcg_gen_nand_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
2412          tcg_gen_nand_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
2413      } else if (TCG_TARGET_HAS_nand_i64) {
2414          tcg_gen_op3_i64(INDEX_op_nand_i64, ret, arg1, arg2);
2415      } else {
2416          tcg_gen_and_i64(ret, arg1, arg2);
2417          tcg_gen_not_i64(ret, ret);
2418      }
2419  }
2420  
tcg_gen_nor_i64(TCGv_i64 ret,TCGv_i64 arg1,TCGv_i64 arg2)2421  void tcg_gen_nor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2422  {
2423      if (TCG_TARGET_REG_BITS == 32) {
2424          tcg_gen_nor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
2425          tcg_gen_nor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
2426      } else if (TCG_TARGET_HAS_nor_i64) {
2427          tcg_gen_op3_i64(INDEX_op_nor_i64, ret, arg1, arg2);
2428      } else {
2429          tcg_gen_or_i64(ret, arg1, arg2);
2430          tcg_gen_not_i64(ret, ret);
2431      }
2432  }
2433  
tcg_gen_orc_i64(TCGv_i64 ret,TCGv_i64 arg1,TCGv_i64 arg2)2434  void tcg_gen_orc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2435  {
2436      if (TCG_TARGET_REG_BITS == 32) {
2437          tcg_gen_orc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
2438          tcg_gen_orc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
2439      } else if (TCG_TARGET_HAS_orc_i64) {
2440          tcg_gen_op3_i64(INDEX_op_orc_i64, ret, arg1, arg2);
2441      } else {
2442          TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2443          tcg_gen_not_i64(t0, arg2);
2444          tcg_gen_or_i64(ret, arg1, t0);
2445          tcg_temp_free_i64(t0);
2446      }
2447  }
2448  
tcg_gen_clz_i64(TCGv_i64 ret,TCGv_i64 arg1,TCGv_i64 arg2)2449  void tcg_gen_clz_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2450  {
2451      if (TCG_TARGET_HAS_clz_i64) {
2452          tcg_gen_op3_i64(INDEX_op_clz_i64, ret, arg1, arg2);
2453      } else {
2454          gen_helper_clz_i64(ret, arg1, arg2);
2455      }
2456  }
2457  
tcg_gen_clzi_i64(TCGv_i64 ret,TCGv_i64 arg1,uint64_t arg2)2458  void tcg_gen_clzi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
2459  {
2460      if (TCG_TARGET_REG_BITS == 32
2461          && TCG_TARGET_HAS_clz_i32
2462          && arg2 <= 0xffffffffu) {
2463          TCGv_i32 t = tcg_temp_ebb_new_i32();
2464          tcg_gen_clzi_i32(t, TCGV_LOW(arg1), arg2 - 32);
2465          tcg_gen_addi_i32(t, t, 32);
2466          tcg_gen_clz_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), t);
2467          tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2468          tcg_temp_free_i32(t);
2469      } else {
2470          tcg_gen_clz_i64(ret, arg1, tcg_constant_i64(arg2));
2471      }
2472  }
2473  
tcg_gen_ctz_i64(TCGv_i64 ret,TCGv_i64 arg1,TCGv_i64 arg2)2474  void tcg_gen_ctz_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2475  {
2476      if (TCG_TARGET_HAS_ctz_i64) {
2477          tcg_gen_op3_i64(INDEX_op_ctz_i64, ret, arg1, arg2);
2478      } else if (TCG_TARGET_HAS_ctpop_i64 || TCG_TARGET_HAS_clz_i64) {
2479          TCGv_i64 z, t = tcg_temp_ebb_new_i64();
2480  
2481          if (TCG_TARGET_HAS_ctpop_i64) {
2482              tcg_gen_subi_i64(t, arg1, 1);
2483              tcg_gen_andc_i64(t, t, arg1);
2484              tcg_gen_ctpop_i64(t, t);
2485          } else {
2486              /* Since all non-x86 hosts have clz(0) == 64, don't fight it.  */
2487              tcg_gen_neg_i64(t, arg1);
2488              tcg_gen_and_i64(t, t, arg1);
2489              tcg_gen_clzi_i64(t, t, 64);
2490              tcg_gen_xori_i64(t, t, 63);
2491          }
2492          z = tcg_constant_i64(0);
2493          tcg_gen_movcond_i64(TCG_COND_EQ, ret, arg1, z, arg2, t);
2494          tcg_temp_free_i64(t);
2495          tcg_temp_free_i64(z);
2496      } else {
2497          gen_helper_ctz_i64(ret, arg1, arg2);
2498      }
2499  }
2500  
tcg_gen_ctzi_i64(TCGv_i64 ret,TCGv_i64 arg1,uint64_t arg2)2501  void tcg_gen_ctzi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
2502  {
2503      if (TCG_TARGET_REG_BITS == 32
2504          && TCG_TARGET_HAS_ctz_i32
2505          && arg2 <= 0xffffffffu) {
2506          TCGv_i32 t32 = tcg_temp_ebb_new_i32();
2507          tcg_gen_ctzi_i32(t32, TCGV_HIGH(arg1), arg2 - 32);
2508          tcg_gen_addi_i32(t32, t32, 32);
2509          tcg_gen_ctz_i32(TCGV_LOW(ret), TCGV_LOW(arg1), t32);
2510          tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2511          tcg_temp_free_i32(t32);
2512      } else if (!TCG_TARGET_HAS_ctz_i64
2513                 && TCG_TARGET_HAS_ctpop_i64
2514                 && arg2 == 64) {
2515          /* This equivalence has the advantage of not requiring a fixup.  */
2516          TCGv_i64 t = tcg_temp_ebb_new_i64();
2517          tcg_gen_subi_i64(t, arg1, 1);
2518          tcg_gen_andc_i64(t, t, arg1);
2519          tcg_gen_ctpop_i64(ret, t);
2520          tcg_temp_free_i64(t);
2521      } else {
2522          tcg_gen_ctz_i64(ret, arg1, tcg_constant_i64(arg2));
2523      }
2524  }
2525  
tcg_gen_clrsb_i64(TCGv_i64 ret,TCGv_i64 arg)2526  void tcg_gen_clrsb_i64(TCGv_i64 ret, TCGv_i64 arg)
2527  {
2528      if (TCG_TARGET_HAS_clz_i64 || TCG_TARGET_HAS_clz_i32) {
2529          TCGv_i64 t = tcg_temp_ebb_new_i64();
2530          tcg_gen_sari_i64(t, arg, 63);
2531          tcg_gen_xor_i64(t, t, arg);
2532          tcg_gen_clzi_i64(t, t, 64);
2533          tcg_gen_subi_i64(ret, t, 1);
2534          tcg_temp_free_i64(t);
2535      } else {
2536          gen_helper_clrsb_i64(ret, arg);
2537      }
2538  }
2539  
tcg_gen_ctpop_i64(TCGv_i64 ret,TCGv_i64 arg1)2540  void tcg_gen_ctpop_i64(TCGv_i64 ret, TCGv_i64 arg1)
2541  {
2542      if (TCG_TARGET_HAS_ctpop_i64) {
2543          tcg_gen_op2_i64(INDEX_op_ctpop_i64, ret, arg1);
2544      } else if (TCG_TARGET_REG_BITS == 32 && TCG_TARGET_HAS_ctpop_i32) {
2545          tcg_gen_ctpop_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
2546          tcg_gen_ctpop_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
2547          tcg_gen_add_i32(TCGV_LOW(ret), TCGV_LOW(ret), TCGV_HIGH(ret));
2548          tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2549      } else {
2550          gen_helper_ctpop_i64(ret, arg1);
2551      }
2552  }
2553  
tcg_gen_rotl_i64(TCGv_i64 ret,TCGv_i64 arg1,TCGv_i64 arg2)2554  void tcg_gen_rotl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2555  {
2556      if (TCG_TARGET_HAS_rot_i64) {
2557          tcg_gen_op3_i64(INDEX_op_rotl_i64, ret, arg1, arg2);
2558      } else {
2559          TCGv_i64 t0, t1;
2560          t0 = tcg_temp_ebb_new_i64();
2561          t1 = tcg_temp_ebb_new_i64();
2562          tcg_gen_shl_i64(t0, arg1, arg2);
2563          tcg_gen_subfi_i64(t1, 64, arg2);
2564          tcg_gen_shr_i64(t1, arg1, t1);
2565          tcg_gen_or_i64(ret, t0, t1);
2566          tcg_temp_free_i64(t0);
2567          tcg_temp_free_i64(t1);
2568      }
2569  }
2570  
tcg_gen_rotli_i64(TCGv_i64 ret,TCGv_i64 arg1,int64_t arg2)2571  void tcg_gen_rotli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
2572  {
2573      tcg_debug_assert(arg2 >= 0 && arg2 < 64);
2574      /* some cases can be optimized here */
2575      if (arg2 == 0) {
2576          tcg_gen_mov_i64(ret, arg1);
2577      } else if (TCG_TARGET_HAS_rot_i64) {
2578          tcg_gen_rotl_i64(ret, arg1, tcg_constant_i64(arg2));
2579      } else {
2580          TCGv_i64 t0, t1;
2581          t0 = tcg_temp_ebb_new_i64();
2582          t1 = tcg_temp_ebb_new_i64();
2583          tcg_gen_shli_i64(t0, arg1, arg2);
2584          tcg_gen_shri_i64(t1, arg1, 64 - arg2);
2585          tcg_gen_or_i64(ret, t0, t1);
2586          tcg_temp_free_i64(t0);
2587          tcg_temp_free_i64(t1);
2588      }
2589  }
2590  
tcg_gen_rotr_i64(TCGv_i64 ret,TCGv_i64 arg1,TCGv_i64 arg2)2591  void tcg_gen_rotr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2592  {
2593      if (TCG_TARGET_HAS_rot_i64) {
2594          tcg_gen_op3_i64(INDEX_op_rotr_i64, ret, arg1, arg2);
2595      } else {
2596          TCGv_i64 t0, t1;
2597          t0 = tcg_temp_ebb_new_i64();
2598          t1 = tcg_temp_ebb_new_i64();
2599          tcg_gen_shr_i64(t0, arg1, arg2);
2600          tcg_gen_subfi_i64(t1, 64, arg2);
2601          tcg_gen_shl_i64(t1, arg1, t1);
2602          tcg_gen_or_i64(ret, t0, t1);
2603          tcg_temp_free_i64(t0);
2604          tcg_temp_free_i64(t1);
2605      }
2606  }
2607  
tcg_gen_rotri_i64(TCGv_i64 ret,TCGv_i64 arg1,int64_t arg2)2608  void tcg_gen_rotri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
2609  {
2610      tcg_debug_assert(arg2 >= 0 && arg2 < 64);
2611      /* some cases can be optimized here */
2612      if (arg2 == 0) {
2613          tcg_gen_mov_i64(ret, arg1);
2614      } else {
2615          tcg_gen_rotli_i64(ret, arg1, 64 - arg2);
2616      }
2617  }
2618  
tcg_gen_deposit_i64(TCGv_i64 ret,TCGv_i64 arg1,TCGv_i64 arg2,unsigned int ofs,unsigned int len)2619  void tcg_gen_deposit_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2,
2620                           unsigned int ofs, unsigned int len)
2621  {
2622      uint64_t mask;
2623      TCGv_i64 t1;
2624  
2625      tcg_debug_assert(ofs < 64);
2626      tcg_debug_assert(len > 0);
2627      tcg_debug_assert(len <= 64);
2628      tcg_debug_assert(ofs + len <= 64);
2629  
2630      if (len == 64) {
2631          tcg_gen_mov_i64(ret, arg2);
2632          return;
2633      }
2634      if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(ofs, len)) {
2635          tcg_gen_op5ii_i64(INDEX_op_deposit_i64, ret, arg1, arg2, ofs, len);
2636          return;
2637      }
2638  
2639      if (TCG_TARGET_REG_BITS == 32) {
2640          if (ofs >= 32) {
2641              tcg_gen_deposit_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1),
2642                                  TCGV_LOW(arg2), ofs - 32, len);
2643              tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
2644              return;
2645          }
2646          if (ofs + len <= 32) {
2647              tcg_gen_deposit_i32(TCGV_LOW(ret), TCGV_LOW(arg1),
2648                                  TCGV_LOW(arg2), ofs, len);
2649              tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
2650              return;
2651          }
2652      }
2653  
2654      t1 = tcg_temp_ebb_new_i64();
2655  
2656      if (TCG_TARGET_HAS_extract2_i64) {
2657          if (ofs + len == 64) {
2658              tcg_gen_shli_i64(t1, arg1, len);
2659              tcg_gen_extract2_i64(ret, t1, arg2, len);
2660              goto done;
2661          }
2662          if (ofs == 0) {
2663              tcg_gen_extract2_i64(ret, arg1, arg2, len);
2664              tcg_gen_rotli_i64(ret, ret, len);
2665              goto done;
2666          }
2667      }
2668  
2669      mask = (1ull << len) - 1;
2670      if (ofs + len < 64) {
2671          tcg_gen_andi_i64(t1, arg2, mask);
2672          tcg_gen_shli_i64(t1, t1, ofs);
2673      } else {
2674          tcg_gen_shli_i64(t1, arg2, ofs);
2675      }
2676      tcg_gen_andi_i64(ret, arg1, ~(mask << ofs));
2677      tcg_gen_or_i64(ret, ret, t1);
2678   done:
2679      tcg_temp_free_i64(t1);
2680  }
2681  
tcg_gen_deposit_z_i64(TCGv_i64 ret,TCGv_i64 arg,unsigned int ofs,unsigned int len)2682  void tcg_gen_deposit_z_i64(TCGv_i64 ret, TCGv_i64 arg,
2683                             unsigned int ofs, unsigned int len)
2684  {
2685      tcg_debug_assert(ofs < 64);
2686      tcg_debug_assert(len > 0);
2687      tcg_debug_assert(len <= 64);
2688      tcg_debug_assert(ofs + len <= 64);
2689  
2690      if (ofs + len == 64) {
2691          tcg_gen_shli_i64(ret, arg, ofs);
2692      } else if (ofs == 0) {
2693          tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
2694      } else if (TCG_TARGET_HAS_deposit_i64
2695                 && TCG_TARGET_deposit_i64_valid(ofs, len)) {
2696          TCGv_i64 zero = tcg_constant_i64(0);
2697          tcg_gen_op5ii_i64(INDEX_op_deposit_i64, ret, zero, arg, ofs, len);
2698      } else {
2699          if (TCG_TARGET_REG_BITS == 32) {
2700              if (ofs >= 32) {
2701                  tcg_gen_deposit_z_i32(TCGV_HIGH(ret), TCGV_LOW(arg),
2702                                        ofs - 32, len);
2703                  tcg_gen_movi_i32(TCGV_LOW(ret), 0);
2704                  return;
2705              }
2706              if (ofs + len <= 32) {
2707                  tcg_gen_deposit_z_i32(TCGV_LOW(ret), TCGV_LOW(arg), ofs, len);
2708                  tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2709                  return;
2710              }
2711          }
2712          /* To help two-operand hosts we prefer to zero-extend first,
2713             which allows ARG to stay live.  */
2714          switch (len) {
2715          case 32:
2716              if (TCG_TARGET_HAS_ext32u_i64) {
2717                  tcg_gen_ext32u_i64(ret, arg);
2718                  tcg_gen_shli_i64(ret, ret, ofs);
2719                  return;
2720              }
2721              break;
2722          case 16:
2723              if (TCG_TARGET_HAS_ext16u_i64) {
2724                  tcg_gen_ext16u_i64(ret, arg);
2725                  tcg_gen_shli_i64(ret, ret, ofs);
2726                  return;
2727              }
2728              break;
2729          case 8:
2730              if (TCG_TARGET_HAS_ext8u_i64) {
2731                  tcg_gen_ext8u_i64(ret, arg);
2732                  tcg_gen_shli_i64(ret, ret, ofs);
2733                  return;
2734              }
2735              break;
2736          }
2737          /* Otherwise prefer zero-extension over AND for code size.  */
2738          switch (ofs + len) {
2739          case 32:
2740              if (TCG_TARGET_HAS_ext32u_i64) {
2741                  tcg_gen_shli_i64(ret, arg, ofs);
2742                  tcg_gen_ext32u_i64(ret, ret);
2743                  return;
2744              }
2745              break;
2746          case 16:
2747              if (TCG_TARGET_HAS_ext16u_i64) {
2748                  tcg_gen_shli_i64(ret, arg, ofs);
2749                  tcg_gen_ext16u_i64(ret, ret);
2750                  return;
2751              }
2752              break;
2753          case 8:
2754              if (TCG_TARGET_HAS_ext8u_i64) {
2755                  tcg_gen_shli_i64(ret, arg, ofs);
2756                  tcg_gen_ext8u_i64(ret, ret);
2757                  return;
2758              }
2759              break;
2760          }
2761          tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
2762          tcg_gen_shli_i64(ret, ret, ofs);
2763      }
2764  }
2765  
tcg_gen_extract_i64(TCGv_i64 ret,TCGv_i64 arg,unsigned int ofs,unsigned int len)2766  void tcg_gen_extract_i64(TCGv_i64 ret, TCGv_i64 arg,
2767                           unsigned int ofs, unsigned int len)
2768  {
2769      tcg_debug_assert(ofs < 64);
2770      tcg_debug_assert(len > 0);
2771      tcg_debug_assert(len <= 64);
2772      tcg_debug_assert(ofs + len <= 64);
2773  
2774      /* Canonicalize certain special cases, even if extract is supported.  */
2775      if (ofs + len == 64) {
2776          tcg_gen_shri_i64(ret, arg, 64 - len);
2777          return;
2778      }
2779      if (ofs == 0) {
2780          tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
2781          return;
2782      }
2783  
2784      if (TCG_TARGET_REG_BITS == 32) {
2785          /* Look for a 32-bit extract within one of the two words.  */
2786          if (ofs >= 32) {
2787              tcg_gen_extract_i32(TCGV_LOW(ret), TCGV_HIGH(arg), ofs - 32, len);
2788              tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2789              return;
2790          }
2791          if (ofs + len <= 32) {
2792              tcg_gen_extract_i32(TCGV_LOW(ret), TCGV_LOW(arg), ofs, len);
2793              tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2794              return;
2795          }
2796          /* The field is split across two words.  One double-word
2797             shift is better than two double-word shifts.  */
2798          goto do_shift_and;
2799      }
2800  
2801      if (TCG_TARGET_HAS_extract_i64
2802          && TCG_TARGET_extract_i64_valid(ofs, len)) {
2803          tcg_gen_op4ii_i64(INDEX_op_extract_i64, ret, arg, ofs, len);
2804          return;
2805      }
2806  
2807      /* Assume that zero-extension, if available, is cheaper than a shift.  */
2808      switch (ofs + len) {
2809      case 32:
2810          if (TCG_TARGET_HAS_ext32u_i64) {
2811              tcg_gen_ext32u_i64(ret, arg);
2812              tcg_gen_shri_i64(ret, ret, ofs);
2813              return;
2814          }
2815          break;
2816      case 16:
2817          if (TCG_TARGET_HAS_ext16u_i64) {
2818              tcg_gen_ext16u_i64(ret, arg);
2819              tcg_gen_shri_i64(ret, ret, ofs);
2820              return;
2821          }
2822          break;
2823      case 8:
2824          if (TCG_TARGET_HAS_ext8u_i64) {
2825              tcg_gen_ext8u_i64(ret, arg);
2826              tcg_gen_shri_i64(ret, ret, ofs);
2827              return;
2828          }
2829          break;
2830      }
2831  
2832      /* ??? Ideally we'd know what values are available for immediate AND.
2833         Assume that 8 bits are available, plus the special cases of 16 and 32,
2834         so that we get ext8u, ext16u, and ext32u.  */
2835      switch (len) {
2836      case 1 ... 8: case 16: case 32:
2837      do_shift_and:
2838          tcg_gen_shri_i64(ret, arg, ofs);
2839          tcg_gen_andi_i64(ret, ret, (1ull << len) - 1);
2840          break;
2841      default:
2842          tcg_gen_shli_i64(ret, arg, 64 - len - ofs);
2843          tcg_gen_shri_i64(ret, ret, 64 - len);
2844          break;
2845      }
2846  }
2847  
tcg_gen_sextract_i64(TCGv_i64 ret,TCGv_i64 arg,unsigned int ofs,unsigned int len)2848  void tcg_gen_sextract_i64(TCGv_i64 ret, TCGv_i64 arg,
2849                            unsigned int ofs, unsigned int len)
2850  {
2851      tcg_debug_assert(ofs < 64);
2852      tcg_debug_assert(len > 0);
2853      tcg_debug_assert(len <= 64);
2854      tcg_debug_assert(ofs + len <= 64);
2855  
2856      /* Canonicalize certain special cases, even if sextract is supported.  */
2857      if (ofs + len == 64) {
2858          tcg_gen_sari_i64(ret, arg, 64 - len);
2859          return;
2860      }
2861      if (ofs == 0) {
2862          switch (len) {
2863          case 32:
2864              tcg_gen_ext32s_i64(ret, arg);
2865              return;
2866          case 16:
2867              tcg_gen_ext16s_i64(ret, arg);
2868              return;
2869          case 8:
2870              tcg_gen_ext8s_i64(ret, arg);
2871              return;
2872          }
2873      }
2874  
2875      if (TCG_TARGET_REG_BITS == 32) {
2876          /* Look for a 32-bit extract within one of the two words.  */
2877          if (ofs >= 32) {
2878              tcg_gen_sextract_i32(TCGV_LOW(ret), TCGV_HIGH(arg), ofs - 32, len);
2879          } else if (ofs + len <= 32) {
2880              tcg_gen_sextract_i32(TCGV_LOW(ret), TCGV_LOW(arg), ofs, len);
2881          } else if (ofs == 0) {
2882              tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2883              tcg_gen_sextract_i32(TCGV_HIGH(ret), TCGV_HIGH(arg), 0, len - 32);
2884              return;
2885          } else if (len > 32) {
2886              TCGv_i32 t = tcg_temp_ebb_new_i32();
2887              /* Extract the bits for the high word normally.  */
2888              tcg_gen_sextract_i32(t, TCGV_HIGH(arg), ofs + 32, len - 32);
2889              /* Shift the field down for the low part.  */
2890              tcg_gen_shri_i64(ret, arg, ofs);
2891              /* Overwrite the shift into the high part.  */
2892              tcg_gen_mov_i32(TCGV_HIGH(ret), t);
2893              tcg_temp_free_i32(t);
2894              return;
2895          } else {
2896              /* Shift the field down for the low part, such that the
2897                 field sits at the MSB.  */
2898              tcg_gen_shri_i64(ret, arg, ofs + len - 32);
2899              /* Shift the field down from the MSB, sign extending.  */
2900              tcg_gen_sari_i32(TCGV_LOW(ret), TCGV_LOW(ret), 32 - len);
2901          }
2902          /* Sign-extend the field from 32 bits.  */
2903          tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2904          return;
2905      }
2906  
2907      if (TCG_TARGET_HAS_sextract_i64
2908          && TCG_TARGET_extract_i64_valid(ofs, len)) {
2909          tcg_gen_op4ii_i64(INDEX_op_sextract_i64, ret, arg, ofs, len);
2910          return;
2911      }
2912  
2913      /* Assume that sign-extension, if available, is cheaper than a shift.  */
2914      switch (ofs + len) {
2915      case 32:
2916          if (TCG_TARGET_HAS_ext32s_i64) {
2917              tcg_gen_ext32s_i64(ret, arg);
2918              tcg_gen_sari_i64(ret, ret, ofs);
2919              return;
2920          }
2921          break;
2922      case 16:
2923          if (TCG_TARGET_HAS_ext16s_i64) {
2924              tcg_gen_ext16s_i64(ret, arg);
2925              tcg_gen_sari_i64(ret, ret, ofs);
2926              return;
2927          }
2928          break;
2929      case 8:
2930          if (TCG_TARGET_HAS_ext8s_i64) {
2931              tcg_gen_ext8s_i64(ret, arg);
2932              tcg_gen_sari_i64(ret, ret, ofs);
2933              return;
2934          }
2935          break;
2936      }
2937      switch (len) {
2938      case 32:
2939          if (TCG_TARGET_HAS_ext32s_i64) {
2940              tcg_gen_shri_i64(ret, arg, ofs);
2941              tcg_gen_ext32s_i64(ret, ret);
2942              return;
2943          }
2944          break;
2945      case 16:
2946          if (TCG_TARGET_HAS_ext16s_i64) {
2947              tcg_gen_shri_i64(ret, arg, ofs);
2948              tcg_gen_ext16s_i64(ret, ret);
2949              return;
2950          }
2951          break;
2952      case 8:
2953          if (TCG_TARGET_HAS_ext8s_i64) {
2954              tcg_gen_shri_i64(ret, arg, ofs);
2955              tcg_gen_ext8s_i64(ret, ret);
2956              return;
2957          }
2958          break;
2959      }
2960      tcg_gen_shli_i64(ret, arg, 64 - len - ofs);
2961      tcg_gen_sari_i64(ret, ret, 64 - len);
2962  }
2963  
2964  /*
2965   * Extract 64 bits from a 128-bit input, ah:al, starting from ofs.
2966   * Unlike tcg_gen_extract_i64 above, len is fixed at 64.
2967   */
tcg_gen_extract2_i64(TCGv_i64 ret,TCGv_i64 al,TCGv_i64 ah,unsigned int ofs)2968  void tcg_gen_extract2_i64(TCGv_i64 ret, TCGv_i64 al, TCGv_i64 ah,
2969                            unsigned int ofs)
2970  {
2971      tcg_debug_assert(ofs <= 64);
2972      if (ofs == 0) {
2973          tcg_gen_mov_i64(ret, al);
2974      } else if (ofs == 64) {
2975          tcg_gen_mov_i64(ret, ah);
2976      } else if (al == ah) {
2977          tcg_gen_rotri_i64(ret, al, ofs);
2978      } else if (TCG_TARGET_HAS_extract2_i64) {
2979          tcg_gen_op4i_i64(INDEX_op_extract2_i64, ret, al, ah, ofs);
2980      } else {
2981          TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2982          tcg_gen_shri_i64(t0, al, ofs);
2983          tcg_gen_deposit_i64(ret, t0, ah, 64 - ofs, ofs);
2984          tcg_temp_free_i64(t0);
2985      }
2986  }
2987  
tcg_gen_movcond_i64(TCGCond cond,TCGv_i64 ret,TCGv_i64 c1,TCGv_i64 c2,TCGv_i64 v1,TCGv_i64 v2)2988  void tcg_gen_movcond_i64(TCGCond cond, TCGv_i64 ret, TCGv_i64 c1,
2989                           TCGv_i64 c2, TCGv_i64 v1, TCGv_i64 v2)
2990  {
2991      if (cond == TCG_COND_ALWAYS) {
2992          tcg_gen_mov_i64(ret, v1);
2993      } else if (cond == TCG_COND_NEVER) {
2994          tcg_gen_mov_i64(ret, v2);
2995      } else if (TCG_TARGET_REG_BITS == 64) {
2996          tcg_gen_op6i_i64(INDEX_op_movcond_i64, ret, c1, c2, v1, v2, cond);
2997      } else {
2998          TCGv_i32 t0 = tcg_temp_ebb_new_i32();
2999          TCGv_i32 zero = tcg_constant_i32(0);
3000  
3001          tcg_gen_op6i_i32(INDEX_op_setcond2_i32, t0,
3002                           TCGV_LOW(c1), TCGV_HIGH(c1),
3003                           TCGV_LOW(c2), TCGV_HIGH(c2), cond);
3004  
3005          tcg_gen_movcond_i32(TCG_COND_NE, TCGV_LOW(ret), t0, zero,
3006                              TCGV_LOW(v1), TCGV_LOW(v2));
3007          tcg_gen_movcond_i32(TCG_COND_NE, TCGV_HIGH(ret), t0, zero,
3008                              TCGV_HIGH(v1), TCGV_HIGH(v2));
3009  
3010          tcg_temp_free_i32(t0);
3011      }
3012  }
3013  
tcg_gen_add2_i64(TCGv_i64 rl,TCGv_i64 rh,TCGv_i64 al,TCGv_i64 ah,TCGv_i64 bl,TCGv_i64 bh)3014  void tcg_gen_add2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
3015                        TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
3016  {
3017      if (TCG_TARGET_HAS_add2_i64) {
3018          tcg_gen_op6_i64(INDEX_op_add2_i64, rl, rh, al, ah, bl, bh);
3019      } else {
3020          TCGv_i64 t0 = tcg_temp_ebb_new_i64();
3021          TCGv_i64 t1 = tcg_temp_ebb_new_i64();
3022          tcg_gen_add_i64(t0, al, bl);
3023          tcg_gen_setcond_i64(TCG_COND_LTU, t1, t0, al);
3024          tcg_gen_add_i64(rh, ah, bh);
3025          tcg_gen_add_i64(rh, rh, t1);
3026          tcg_gen_mov_i64(rl, t0);
3027          tcg_temp_free_i64(t0);
3028          tcg_temp_free_i64(t1);
3029      }
3030  }
3031  
tcg_gen_sub2_i64(TCGv_i64 rl,TCGv_i64 rh,TCGv_i64 al,TCGv_i64 ah,TCGv_i64 bl,TCGv_i64 bh)3032  void tcg_gen_sub2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
3033                        TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
3034  {
3035      if (TCG_TARGET_HAS_sub2_i64) {
3036          tcg_gen_op6_i64(INDEX_op_sub2_i64, rl, rh, al, ah, bl, bh);
3037      } else {
3038          TCGv_i64 t0 = tcg_temp_ebb_new_i64();
3039          TCGv_i64 t1 = tcg_temp_ebb_new_i64();
3040          tcg_gen_sub_i64(t0, al, bl);
3041          tcg_gen_setcond_i64(TCG_COND_LTU, t1, al, bl);
3042          tcg_gen_sub_i64(rh, ah, bh);
3043          tcg_gen_sub_i64(rh, rh, t1);
3044          tcg_gen_mov_i64(rl, t0);
3045          tcg_temp_free_i64(t0);
3046          tcg_temp_free_i64(t1);
3047      }
3048  }
3049  
tcg_gen_mulu2_i64(TCGv_i64 rl,TCGv_i64 rh,TCGv_i64 arg1,TCGv_i64 arg2)3050  void tcg_gen_mulu2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
3051  {
3052      if (TCG_TARGET_HAS_mulu2_i64) {
3053          tcg_gen_op4_i64(INDEX_op_mulu2_i64, rl, rh, arg1, arg2);
3054      } else if (TCG_TARGET_HAS_muluh_i64) {
3055          TCGv_i64 t = tcg_temp_ebb_new_i64();
3056          tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
3057          tcg_gen_op3_i64(INDEX_op_muluh_i64, rh, arg1, arg2);
3058          tcg_gen_mov_i64(rl, t);
3059          tcg_temp_free_i64(t);
3060      } else {
3061          TCGv_i64 t0 = tcg_temp_ebb_new_i64();
3062          tcg_gen_mul_i64(t0, arg1, arg2);
3063          gen_helper_muluh_i64(rh, arg1, arg2);
3064          tcg_gen_mov_i64(rl, t0);
3065          tcg_temp_free_i64(t0);
3066      }
3067  }
3068  
tcg_gen_muls2_i64(TCGv_i64 rl,TCGv_i64 rh,TCGv_i64 arg1,TCGv_i64 arg2)3069  void tcg_gen_muls2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
3070  {
3071      if (TCG_TARGET_HAS_muls2_i64) {
3072          tcg_gen_op4_i64(INDEX_op_muls2_i64, rl, rh, arg1, arg2);
3073      } else if (TCG_TARGET_HAS_mulsh_i64) {
3074          TCGv_i64 t = tcg_temp_ebb_new_i64();
3075          tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
3076          tcg_gen_op3_i64(INDEX_op_mulsh_i64, rh, arg1, arg2);
3077          tcg_gen_mov_i64(rl, t);
3078          tcg_temp_free_i64(t);
3079      } else if (TCG_TARGET_HAS_mulu2_i64 || TCG_TARGET_HAS_muluh_i64) {
3080          TCGv_i64 t0 = tcg_temp_ebb_new_i64();
3081          TCGv_i64 t1 = tcg_temp_ebb_new_i64();
3082          TCGv_i64 t2 = tcg_temp_ebb_new_i64();
3083          TCGv_i64 t3 = tcg_temp_ebb_new_i64();
3084          tcg_gen_mulu2_i64(t0, t1, arg1, arg2);
3085          /* Adjust for negative inputs.  */
3086          tcg_gen_sari_i64(t2, arg1, 63);
3087          tcg_gen_sari_i64(t3, arg2, 63);
3088          tcg_gen_and_i64(t2, t2, arg2);
3089          tcg_gen_and_i64(t3, t3, arg1);
3090          tcg_gen_sub_i64(rh, t1, t2);
3091          tcg_gen_sub_i64(rh, rh, t3);
3092          tcg_gen_mov_i64(rl, t0);
3093          tcg_temp_free_i64(t0);
3094          tcg_temp_free_i64(t1);
3095          tcg_temp_free_i64(t2);
3096          tcg_temp_free_i64(t3);
3097      } else {
3098          TCGv_i64 t0 = tcg_temp_ebb_new_i64();
3099          tcg_gen_mul_i64(t0, arg1, arg2);
3100          gen_helper_mulsh_i64(rh, arg1, arg2);
3101          tcg_gen_mov_i64(rl, t0);
3102          tcg_temp_free_i64(t0);
3103      }
3104  }
3105  
tcg_gen_mulsu2_i64(TCGv_i64 rl,TCGv_i64 rh,TCGv_i64 arg1,TCGv_i64 arg2)3106  void tcg_gen_mulsu2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
3107  {
3108      TCGv_i64 t0 = tcg_temp_ebb_new_i64();
3109      TCGv_i64 t1 = tcg_temp_ebb_new_i64();
3110      TCGv_i64 t2 = tcg_temp_ebb_new_i64();
3111      tcg_gen_mulu2_i64(t0, t1, arg1, arg2);
3112      /* Adjust for negative input for the signed arg1.  */
3113      tcg_gen_sari_i64(t2, arg1, 63);
3114      tcg_gen_and_i64(t2, t2, arg2);
3115      tcg_gen_sub_i64(rh, t1, t2);
3116      tcg_gen_mov_i64(rl, t0);
3117      tcg_temp_free_i64(t0);
3118      tcg_temp_free_i64(t1);
3119      tcg_temp_free_i64(t2);
3120  }
3121  
tcg_gen_smin_i64(TCGv_i64 ret,TCGv_i64 a,TCGv_i64 b)3122  void tcg_gen_smin_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
3123  {
3124      tcg_gen_movcond_i64(TCG_COND_LT, ret, a, b, a, b);
3125  }
3126  
tcg_gen_umin_i64(TCGv_i64 ret,TCGv_i64 a,TCGv_i64 b)3127  void tcg_gen_umin_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
3128  {
3129      tcg_gen_movcond_i64(TCG_COND_LTU, ret, a, b, a, b);
3130  }
3131  
tcg_gen_smax_i64(TCGv_i64 ret,TCGv_i64 a,TCGv_i64 b)3132  void tcg_gen_smax_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
3133  {
3134      tcg_gen_movcond_i64(TCG_COND_LT, ret, a, b, b, a);
3135  }
3136  
tcg_gen_umax_i64(TCGv_i64 ret,TCGv_i64 a,TCGv_i64 b)3137  void tcg_gen_umax_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
3138  {
3139      tcg_gen_movcond_i64(TCG_COND_LTU, ret, a, b, b, a);
3140  }
3141  
tcg_gen_abs_i64(TCGv_i64 ret,TCGv_i64 a)3142  void tcg_gen_abs_i64(TCGv_i64 ret, TCGv_i64 a)
3143  {
3144      TCGv_i64 t = tcg_temp_ebb_new_i64();
3145  
3146      tcg_gen_sari_i64(t, a, 63);
3147      tcg_gen_xor_i64(ret, a, t);
3148      tcg_gen_sub_i64(ret, ret, t);
3149      tcg_temp_free_i64(t);
3150  }
3151  
3152  /* Size changing operations.  */
3153  
tcg_gen_extrl_i64_i32(TCGv_i32 ret,TCGv_i64 arg)3154  void tcg_gen_extrl_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
3155  {
3156      if (TCG_TARGET_REG_BITS == 32) {
3157          tcg_gen_mov_i32(ret, TCGV_LOW(arg));
3158      } else if (TCG_TARGET_HAS_extr_i64_i32) {
3159          tcg_gen_op2(INDEX_op_extrl_i64_i32,
3160                      tcgv_i32_arg(ret), tcgv_i64_arg(arg));
3161      } else {
3162          tcg_gen_mov_i32(ret, (TCGv_i32)arg);
3163      }
3164  }
3165  
tcg_gen_extrh_i64_i32(TCGv_i32 ret,TCGv_i64 arg)3166  void tcg_gen_extrh_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
3167  {
3168      if (TCG_TARGET_REG_BITS == 32) {
3169          tcg_gen_mov_i32(ret, TCGV_HIGH(arg));
3170      } else if (TCG_TARGET_HAS_extr_i64_i32) {
3171          tcg_gen_op2(INDEX_op_extrh_i64_i32,
3172                      tcgv_i32_arg(ret), tcgv_i64_arg(arg));
3173      } else {
3174          TCGv_i64 t = tcg_temp_ebb_new_i64();
3175          tcg_gen_shri_i64(t, arg, 32);
3176          tcg_gen_mov_i32(ret, (TCGv_i32)t);
3177          tcg_temp_free_i64(t);
3178      }
3179  }
3180  
tcg_gen_extu_i32_i64(TCGv_i64 ret,TCGv_i32 arg)3181  void tcg_gen_extu_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
3182  {
3183      if (TCG_TARGET_REG_BITS == 32) {
3184          tcg_gen_mov_i32(TCGV_LOW(ret), arg);
3185          tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
3186      } else {
3187          tcg_gen_op2(INDEX_op_extu_i32_i64,
3188                      tcgv_i64_arg(ret), tcgv_i32_arg(arg));
3189      }
3190  }
3191  
tcg_gen_ext_i32_i64(TCGv_i64 ret,TCGv_i32 arg)3192  void tcg_gen_ext_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
3193  {
3194      if (TCG_TARGET_REG_BITS == 32) {
3195          tcg_gen_mov_i32(TCGV_LOW(ret), arg);
3196          tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
3197      } else {
3198          tcg_gen_op2(INDEX_op_ext_i32_i64,
3199                      tcgv_i64_arg(ret), tcgv_i32_arg(arg));
3200      }
3201  }
3202  
tcg_gen_concat_i32_i64(TCGv_i64 dest,TCGv_i32 low,TCGv_i32 high)3203  void tcg_gen_concat_i32_i64(TCGv_i64 dest, TCGv_i32 low, TCGv_i32 high)
3204  {
3205      TCGv_i64 tmp;
3206  
3207      if (TCG_TARGET_REG_BITS == 32) {
3208          tcg_gen_mov_i32(TCGV_LOW(dest), low);
3209          tcg_gen_mov_i32(TCGV_HIGH(dest), high);
3210          return;
3211      }
3212  
3213      tmp = tcg_temp_ebb_new_i64();
3214      /* These extensions are only needed for type correctness.
3215         We may be able to do better given target specific information.  */
3216      tcg_gen_extu_i32_i64(tmp, high);
3217      tcg_gen_extu_i32_i64(dest, low);
3218      /* If deposit is available, use it.  Otherwise use the extra
3219         knowledge that we have of the zero-extensions above.  */
3220      if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(32, 32)) {
3221          tcg_gen_deposit_i64(dest, dest, tmp, 32, 32);
3222      } else {
3223          tcg_gen_shli_i64(tmp, tmp, 32);
3224          tcg_gen_or_i64(dest, dest, tmp);
3225      }
3226      tcg_temp_free_i64(tmp);
3227  }
3228  
tcg_gen_extr_i64_i32(TCGv_i32 lo,TCGv_i32 hi,TCGv_i64 arg)3229  void tcg_gen_extr_i64_i32(TCGv_i32 lo, TCGv_i32 hi, TCGv_i64 arg)
3230  {
3231      if (TCG_TARGET_REG_BITS == 32) {
3232          tcg_gen_mov_i32(lo, TCGV_LOW(arg));
3233          tcg_gen_mov_i32(hi, TCGV_HIGH(arg));
3234      } else {
3235          tcg_gen_extrl_i64_i32(lo, arg);
3236          tcg_gen_extrh_i64_i32(hi, arg);
3237      }
3238  }
3239  
tcg_gen_extr32_i64(TCGv_i64 lo,TCGv_i64 hi,TCGv_i64 arg)3240  void tcg_gen_extr32_i64(TCGv_i64 lo, TCGv_i64 hi, TCGv_i64 arg)
3241  {
3242      tcg_gen_ext32u_i64(lo, arg);
3243      tcg_gen_shri_i64(hi, arg, 32);
3244  }
3245  
tcg_gen_concat32_i64(TCGv_i64 ret,TCGv_i64 lo,TCGv_i64 hi)3246  void tcg_gen_concat32_i64(TCGv_i64 ret, TCGv_i64 lo, TCGv_i64 hi)
3247  {
3248      tcg_gen_deposit_i64(ret, lo, hi, 32, 32);
3249  }
3250  
tcg_gen_extr_i128_i64(TCGv_i64 lo,TCGv_i64 hi,TCGv_i128 arg)3251  void tcg_gen_extr_i128_i64(TCGv_i64 lo, TCGv_i64 hi, TCGv_i128 arg)
3252  {
3253      tcg_gen_mov_i64(lo, TCGV128_LOW(arg));
3254      tcg_gen_mov_i64(hi, TCGV128_HIGH(arg));
3255  }
3256  
tcg_gen_concat_i64_i128(TCGv_i128 ret,TCGv_i64 lo,TCGv_i64 hi)3257  void tcg_gen_concat_i64_i128(TCGv_i128 ret, TCGv_i64 lo, TCGv_i64 hi)
3258  {
3259      tcg_gen_mov_i64(TCGV128_LOW(ret), lo);
3260      tcg_gen_mov_i64(TCGV128_HIGH(ret), hi);
3261  }
3262  
tcg_gen_mov_i128(TCGv_i128 dst,TCGv_i128 src)3263  void tcg_gen_mov_i128(TCGv_i128 dst, TCGv_i128 src)
3264  {
3265      if (dst != src) {
3266          tcg_gen_mov_i64(TCGV128_LOW(dst), TCGV128_LOW(src));
3267          tcg_gen_mov_i64(TCGV128_HIGH(dst), TCGV128_HIGH(src));
3268      }
3269  }
3270  
tcg_gen_ld_i128(TCGv_i128 ret,TCGv_ptr base,tcg_target_long offset)3271  void tcg_gen_ld_i128(TCGv_i128 ret, TCGv_ptr base, tcg_target_long offset)
3272  {
3273      if (HOST_BIG_ENDIAN) {
3274          tcg_gen_ld_i64(TCGV128_HIGH(ret), base, offset);
3275          tcg_gen_ld_i64(TCGV128_LOW(ret), base, offset + 8);
3276      } else {
3277          tcg_gen_ld_i64(TCGV128_LOW(ret), base, offset);
3278          tcg_gen_ld_i64(TCGV128_HIGH(ret), base, offset + 8);
3279      }
3280  }
3281  
tcg_gen_st_i128(TCGv_i128 val,TCGv_ptr base,tcg_target_long offset)3282  void tcg_gen_st_i128(TCGv_i128 val, TCGv_ptr base, tcg_target_long offset)
3283  {
3284      if (HOST_BIG_ENDIAN) {
3285          tcg_gen_st_i64(TCGV128_HIGH(val), base, offset);
3286          tcg_gen_st_i64(TCGV128_LOW(val), base, offset + 8);
3287      } else {
3288          tcg_gen_st_i64(TCGV128_LOW(val), base, offset);
3289          tcg_gen_st_i64(TCGV128_HIGH(val), base, offset + 8);
3290      }
3291  }
3292  
3293  /* QEMU specific operations.  */
3294  
tcg_gen_exit_tb(const TranslationBlock * tb,unsigned idx)3295  void tcg_gen_exit_tb(const TranslationBlock *tb, unsigned idx)
3296  {
3297      /*
3298       * Let the jit code return the read-only version of the
3299       * TranslationBlock, so that we minimize the pc-relative
3300       * distance of the address of the exit_tb code to TB.
3301       * This will improve utilization of pc-relative address loads.
3302       *
3303       * TODO: Move this to translator_loop, so that all const
3304       * TranslationBlock pointers refer to read-only memory.
3305       * This requires coordination with targets that do not use
3306       * the translator_loop.
3307       */
3308      uintptr_t val = (uintptr_t)tcg_splitwx_to_rx((void *)tb) + idx;
3309  
3310      if (tb == NULL) {
3311          tcg_debug_assert(idx == 0);
3312      } else if (idx <= TB_EXIT_IDXMAX) {
3313  #ifdef CONFIG_DEBUG_TCG
3314          /* This is an exit following a goto_tb.  Verify that we have
3315             seen this numbered exit before, via tcg_gen_goto_tb.  */
3316          tcg_debug_assert(tcg_ctx->goto_tb_issue_mask & (1 << idx));
3317  #endif
3318      } else {
3319          /* This is an exit via the exitreq label.  */
3320          tcg_debug_assert(idx == TB_EXIT_REQUESTED);
3321      }
3322  
3323      tcg_gen_op1i(INDEX_op_exit_tb, val);
3324  }
3325  
tcg_gen_goto_tb(unsigned idx)3326  void tcg_gen_goto_tb(unsigned idx)
3327  {
3328      /* We tested CF_NO_GOTO_TB in translator_use_goto_tb. */
3329      tcg_debug_assert(!(tcg_ctx->gen_tb->cflags & CF_NO_GOTO_TB));
3330      /* We only support two chained exits.  */
3331      tcg_debug_assert(idx <= TB_EXIT_IDXMAX);
3332  #ifdef CONFIG_DEBUG_TCG
3333      /* Verify that we haven't seen this numbered exit before.  */
3334      tcg_debug_assert((tcg_ctx->goto_tb_issue_mask & (1 << idx)) == 0);
3335      tcg_ctx->goto_tb_issue_mask |= 1 << idx;
3336  #endif
3337      plugin_gen_disable_mem_helpers();
3338      tcg_gen_op1i(INDEX_op_goto_tb, idx);
3339  }
3340  
tcg_gen_lookup_and_goto_ptr(void)3341  void tcg_gen_lookup_and_goto_ptr(void)
3342  {
3343      TCGv_ptr ptr;
3344  
3345      if (tcg_ctx->gen_tb->cflags & CF_NO_GOTO_PTR) {
3346          tcg_gen_exit_tb(NULL, 0);
3347          return;
3348      }
3349  
3350      plugin_gen_disable_mem_helpers();
3351      ptr = tcg_temp_ebb_new_ptr();
3352      gen_helper_lookup_tb_ptr(ptr, tcg_env);
3353      tcg_gen_op1i(INDEX_op_goto_ptr, tcgv_ptr_arg(ptr));
3354      tcg_temp_free_ptr(ptr);
3355  }
3356