1/***                           VSX extension                               ***/
2
3static inline void get_cpu_vsrh(TCGv_i64 dst, int n)
4{
5    tcg_gen_ld_i64(dst, cpu_env, vsr64_offset(n, true));
6}
7
8static inline void get_cpu_vsrl(TCGv_i64 dst, int n)
9{
10    tcg_gen_ld_i64(dst, cpu_env, vsr64_offset(n, false));
11}
12
13static inline void set_cpu_vsrh(int n, TCGv_i64 src)
14{
15    tcg_gen_st_i64(src, cpu_env, vsr64_offset(n, true));
16}
17
18static inline void set_cpu_vsrl(int n, TCGv_i64 src)
19{
20    tcg_gen_st_i64(src, cpu_env, vsr64_offset(n, false));
21}
22
23static inline TCGv_ptr gen_vsr_ptr(int reg)
24{
25    TCGv_ptr r = tcg_temp_new_ptr();
26    tcg_gen_addi_ptr(r, cpu_env, vsr_full_offset(reg));
27    return r;
28}
29
30#define VSX_LOAD_SCALAR(name, operation)                      \
31static void gen_##name(DisasContext *ctx)                     \
32{                                                             \
33    TCGv EA;                                                  \
34    TCGv_i64 t0;                                              \
35    if (unlikely(!ctx->vsx_enabled)) {                        \
36        gen_exception(ctx, POWERPC_EXCP_VSXU);                \
37        return;                                               \
38    }                                                         \
39    t0 = tcg_temp_new_i64();                                  \
40    gen_set_access_type(ctx, ACCESS_INT);                     \
41    EA = tcg_temp_new();                                      \
42    gen_addr_reg_index(ctx, EA);                              \
43    gen_qemu_##operation(ctx, t0, EA);                        \
44    set_cpu_vsrh(xT(ctx->opcode), t0);                        \
45    /* NOTE: cpu_vsrl is undefined */                         \
46    tcg_temp_free(EA);                                        \
47    tcg_temp_free_i64(t0);                                    \
48}
49
50VSX_LOAD_SCALAR(lxsdx, ld64_i64)
51VSX_LOAD_SCALAR(lxsiwax, ld32s_i64)
52VSX_LOAD_SCALAR(lxsibzx, ld8u_i64)
53VSX_LOAD_SCALAR(lxsihzx, ld16u_i64)
54VSX_LOAD_SCALAR(lxsiwzx, ld32u_i64)
55VSX_LOAD_SCALAR(lxsspx, ld32fs)
56
57static void gen_lxvd2x(DisasContext *ctx)
58{
59    TCGv EA;
60    TCGv_i64 t0;
61    if (unlikely(!ctx->vsx_enabled)) {
62        gen_exception(ctx, POWERPC_EXCP_VSXU);
63        return;
64    }
65    t0 = tcg_temp_new_i64();
66    gen_set_access_type(ctx, ACCESS_INT);
67    EA = tcg_temp_new();
68    gen_addr_reg_index(ctx, EA);
69    gen_qemu_ld64_i64(ctx, t0, EA);
70    set_cpu_vsrh(xT(ctx->opcode), t0);
71    tcg_gen_addi_tl(EA, EA, 8);
72    gen_qemu_ld64_i64(ctx, t0, EA);
73    set_cpu_vsrl(xT(ctx->opcode), t0);
74    tcg_temp_free(EA);
75    tcg_temp_free_i64(t0);
76}
77
78static void gen_lxvdsx(DisasContext *ctx)
79{
80    TCGv EA;
81    TCGv_i64 t0;
82    TCGv_i64 t1;
83    if (unlikely(!ctx->vsx_enabled)) {
84        gen_exception(ctx, POWERPC_EXCP_VSXU);
85        return;
86    }
87    t0 = tcg_temp_new_i64();
88    t1 = tcg_temp_new_i64();
89    gen_set_access_type(ctx, ACCESS_INT);
90    EA = tcg_temp_new();
91    gen_addr_reg_index(ctx, EA);
92    gen_qemu_ld64_i64(ctx, t0, EA);
93    set_cpu_vsrh(xT(ctx->opcode), t0);
94    tcg_gen_mov_i64(t1, t0);
95    set_cpu_vsrl(xT(ctx->opcode), t1);
96    tcg_temp_free(EA);
97    tcg_temp_free_i64(t0);
98    tcg_temp_free_i64(t1);
99}
100
101static void gen_lxvw4x(DisasContext *ctx)
102{
103    TCGv EA;
104    TCGv_i64 xth;
105    TCGv_i64 xtl;
106    if (unlikely(!ctx->vsx_enabled)) {
107        gen_exception(ctx, POWERPC_EXCP_VSXU);
108        return;
109    }
110    xth = tcg_temp_new_i64();
111    xtl = tcg_temp_new_i64();
112
113    gen_set_access_type(ctx, ACCESS_INT);
114    EA = tcg_temp_new();
115
116    gen_addr_reg_index(ctx, EA);
117    if (ctx->le_mode) {
118        TCGv_i64 t0 = tcg_temp_new_i64();
119        TCGv_i64 t1 = tcg_temp_new_i64();
120
121        tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEQ);
122        tcg_gen_shri_i64(t1, t0, 32);
123        tcg_gen_deposit_i64(xth, t1, t0, 32, 32);
124        tcg_gen_addi_tl(EA, EA, 8);
125        tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEQ);
126        tcg_gen_shri_i64(t1, t0, 32);
127        tcg_gen_deposit_i64(xtl, t1, t0, 32, 32);
128        tcg_temp_free_i64(t0);
129        tcg_temp_free_i64(t1);
130    } else {
131        tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
132        tcg_gen_addi_tl(EA, EA, 8);
133        tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
134    }
135    set_cpu_vsrh(xT(ctx->opcode), xth);
136    set_cpu_vsrl(xT(ctx->opcode), xtl);
137    tcg_temp_free(EA);
138    tcg_temp_free_i64(xth);
139    tcg_temp_free_i64(xtl);
140}
141
142static void gen_bswap16x8(TCGv_i64 outh, TCGv_i64 outl,
143                          TCGv_i64 inh, TCGv_i64 inl)
144{
145    TCGv_i64 mask = tcg_const_i64(0x00FF00FF00FF00FF);
146    TCGv_i64 t0 = tcg_temp_new_i64();
147    TCGv_i64 t1 = tcg_temp_new_i64();
148
149    /* outh = ((inh & mask) << 8) | ((inh >> 8) & mask) */
150    tcg_gen_and_i64(t0, inh, mask);
151    tcg_gen_shli_i64(t0, t0, 8);
152    tcg_gen_shri_i64(t1, inh, 8);
153    tcg_gen_and_i64(t1, t1, mask);
154    tcg_gen_or_i64(outh, t0, t1);
155
156    /* outl = ((inl & mask) << 8) | ((inl >> 8) & mask) */
157    tcg_gen_and_i64(t0, inl, mask);
158    tcg_gen_shli_i64(t0, t0, 8);
159    tcg_gen_shri_i64(t1, inl, 8);
160    tcg_gen_and_i64(t1, t1, mask);
161    tcg_gen_or_i64(outl, t0, t1);
162
163    tcg_temp_free_i64(t0);
164    tcg_temp_free_i64(t1);
165    tcg_temp_free_i64(mask);
166}
167
168static void gen_bswap32x4(TCGv_i64 outh, TCGv_i64 outl,
169                          TCGv_i64 inh, TCGv_i64 inl)
170{
171    TCGv_i64 hi = tcg_temp_new_i64();
172    TCGv_i64 lo = tcg_temp_new_i64();
173
174    tcg_gen_bswap64_i64(hi, inh);
175    tcg_gen_bswap64_i64(lo, inl);
176    tcg_gen_shri_i64(outh, hi, 32);
177    tcg_gen_deposit_i64(outh, outh, hi, 32, 32);
178    tcg_gen_shri_i64(outl, lo, 32);
179    tcg_gen_deposit_i64(outl, outl, lo, 32, 32);
180
181    tcg_temp_free_i64(hi);
182    tcg_temp_free_i64(lo);
183}
184static void gen_lxvh8x(DisasContext *ctx)
185{
186    TCGv EA;
187    TCGv_i64 xth;
188    TCGv_i64 xtl;
189
190    if (unlikely(!ctx->vsx_enabled)) {
191        gen_exception(ctx, POWERPC_EXCP_VSXU);
192        return;
193    }
194    xth = tcg_temp_new_i64();
195    xtl = tcg_temp_new_i64();
196    gen_set_access_type(ctx, ACCESS_INT);
197
198    EA = tcg_temp_new();
199    gen_addr_reg_index(ctx, EA);
200    tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
201    tcg_gen_addi_tl(EA, EA, 8);
202    tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
203    if (ctx->le_mode) {
204        gen_bswap16x8(xth, xtl, xth, xtl);
205    }
206    set_cpu_vsrh(xT(ctx->opcode), xth);
207    set_cpu_vsrl(xT(ctx->opcode), xtl);
208    tcg_temp_free(EA);
209    tcg_temp_free_i64(xth);
210    tcg_temp_free_i64(xtl);
211}
212
213static void gen_lxvb16x(DisasContext *ctx)
214{
215    TCGv EA;
216    TCGv_i64 xth;
217    TCGv_i64 xtl;
218
219    if (unlikely(!ctx->vsx_enabled)) {
220        gen_exception(ctx, POWERPC_EXCP_VSXU);
221        return;
222    }
223    xth = tcg_temp_new_i64();
224    xtl = tcg_temp_new_i64();
225    gen_set_access_type(ctx, ACCESS_INT);
226    EA = tcg_temp_new();
227    gen_addr_reg_index(ctx, EA);
228    tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
229    tcg_gen_addi_tl(EA, EA, 8);
230    tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
231    set_cpu_vsrh(xT(ctx->opcode), xth);
232    set_cpu_vsrl(xT(ctx->opcode), xtl);
233    tcg_temp_free(EA);
234    tcg_temp_free_i64(xth);
235    tcg_temp_free_i64(xtl);
236}
237
238#define VSX_VECTOR_LOAD(name, op, indexed)                  \
239static void gen_##name(DisasContext *ctx)                   \
240{                                                           \
241    int xt;                                                 \
242    TCGv EA;                                                \
243    TCGv_i64 xth;                                           \
244    TCGv_i64 xtl;                                           \
245                                                            \
246    if (indexed) {                                          \
247        xt = xT(ctx->opcode);                               \
248    } else {                                                \
249        xt = DQxT(ctx->opcode);                             \
250    }                                                       \
251                                                            \
252    if (xt < 32) {                                          \
253        if (unlikely(!ctx->vsx_enabled)) {                  \
254            gen_exception(ctx, POWERPC_EXCP_VSXU);          \
255            return;                                         \
256        }                                                   \
257    } else {                                                \
258        if (unlikely(!ctx->altivec_enabled)) {              \
259            gen_exception(ctx, POWERPC_EXCP_VPU);           \
260            return;                                         \
261        }                                                   \
262    }                                                       \
263    xth = tcg_temp_new_i64();                               \
264    xtl = tcg_temp_new_i64();                               \
265    gen_set_access_type(ctx, ACCESS_INT);                   \
266    EA = tcg_temp_new();                                    \
267    if (indexed) {                                          \
268        gen_addr_reg_index(ctx, EA);                        \
269    } else {                                                \
270        gen_addr_imm_index(ctx, EA, 0x0F);                  \
271    }                                                       \
272    if (ctx->le_mode) {                                     \
273        tcg_gen_qemu_##op(xtl, EA, ctx->mem_idx, MO_LEQ);   \
274        set_cpu_vsrl(xt, xtl);                              \
275        tcg_gen_addi_tl(EA, EA, 8);                         \
276        tcg_gen_qemu_##op(xth, EA, ctx->mem_idx, MO_LEQ);   \
277        set_cpu_vsrh(xt, xth);                              \
278    } else {                                                \
279        tcg_gen_qemu_##op(xth, EA, ctx->mem_idx, MO_BEQ);   \
280        set_cpu_vsrh(xt, xth);                              \
281        tcg_gen_addi_tl(EA, EA, 8);                         \
282        tcg_gen_qemu_##op(xtl, EA, ctx->mem_idx, MO_BEQ);   \
283        set_cpu_vsrl(xt, xtl);                              \
284    }                                                       \
285    tcg_temp_free(EA);                                      \
286    tcg_temp_free_i64(xth);                                 \
287    tcg_temp_free_i64(xtl);                                 \
288}
289
290VSX_VECTOR_LOAD(lxv, ld_i64, 0)
291VSX_VECTOR_LOAD(lxvx, ld_i64, 1)
292
293#define VSX_VECTOR_STORE(name, op, indexed)                 \
294static void gen_##name(DisasContext *ctx)                   \
295{                                                           \
296    int xt;                                                 \
297    TCGv EA;                                                \
298    TCGv_i64 xth;                                           \
299    TCGv_i64 xtl;                                           \
300                                                            \
301    if (indexed) {                                          \
302        xt = xT(ctx->opcode);                               \
303    } else {                                                \
304        xt = DQxT(ctx->opcode);                             \
305    }                                                       \
306                                                            \
307    if (xt < 32) {                                          \
308        if (unlikely(!ctx->vsx_enabled)) {                  \
309            gen_exception(ctx, POWERPC_EXCP_VSXU);          \
310            return;                                         \
311        }                                                   \
312    } else {                                                \
313        if (unlikely(!ctx->altivec_enabled)) {              \
314            gen_exception(ctx, POWERPC_EXCP_VPU);           \
315            return;                                         \
316        }                                                   \
317    }                                                       \
318    xth = tcg_temp_new_i64();                               \
319    xtl = tcg_temp_new_i64();                               \
320    get_cpu_vsrh(xth, xt);                                  \
321    get_cpu_vsrl(xtl, xt);                                  \
322    gen_set_access_type(ctx, ACCESS_INT);                   \
323    EA = tcg_temp_new();                                    \
324    if (indexed) {                                          \
325        gen_addr_reg_index(ctx, EA);                        \
326    } else {                                                \
327        gen_addr_imm_index(ctx, EA, 0x0F);                  \
328    }                                                       \
329    if (ctx->le_mode) {                                     \
330        tcg_gen_qemu_##op(xtl, EA, ctx->mem_idx, MO_LEQ);   \
331        tcg_gen_addi_tl(EA, EA, 8);                         \
332        tcg_gen_qemu_##op(xth, EA, ctx->mem_idx, MO_LEQ);   \
333    } else {                                                \
334        tcg_gen_qemu_##op(xth, EA, ctx->mem_idx, MO_BEQ);   \
335        tcg_gen_addi_tl(EA, EA, 8);                         \
336        tcg_gen_qemu_##op(xtl, EA, ctx->mem_idx, MO_BEQ);   \
337    }                                                       \
338    tcg_temp_free(EA);                                      \
339    tcg_temp_free_i64(xth);                                 \
340    tcg_temp_free_i64(xtl);                                 \
341}
342
343VSX_VECTOR_STORE(stxv, st_i64, 0)
344VSX_VECTOR_STORE(stxvx, st_i64, 1)
345
346#ifdef TARGET_PPC64
347#define VSX_VECTOR_LOAD_STORE_LENGTH(name)                         \
348static void gen_##name(DisasContext *ctx)                          \
349{                                                                  \
350    TCGv EA;                                                       \
351    TCGv_ptr xt;                                                   \
352                                                                   \
353    if (xT(ctx->opcode) < 32) {                                    \
354        if (unlikely(!ctx->vsx_enabled)) {                         \
355            gen_exception(ctx, POWERPC_EXCP_VSXU);                 \
356            return;                                                \
357        }                                                          \
358    } else {                                                       \
359        if (unlikely(!ctx->altivec_enabled)) {                     \
360            gen_exception(ctx, POWERPC_EXCP_VPU);                  \
361            return;                                                \
362        }                                                          \
363    }                                                              \
364    EA = tcg_temp_new();                                           \
365    xt = gen_vsr_ptr(xT(ctx->opcode));                             \
366    gen_set_access_type(ctx, ACCESS_INT);                          \
367    gen_addr_register(ctx, EA);                                    \
368    gen_helper_##name(cpu_env, EA, xt, cpu_gpr[rB(ctx->opcode)]);  \
369    tcg_temp_free(EA);                                             \
370    tcg_temp_free_ptr(xt);                                         \
371}
372
373VSX_VECTOR_LOAD_STORE_LENGTH(lxvl)
374VSX_VECTOR_LOAD_STORE_LENGTH(lxvll)
375VSX_VECTOR_LOAD_STORE_LENGTH(stxvl)
376VSX_VECTOR_LOAD_STORE_LENGTH(stxvll)
377#endif
378
379#define VSX_LOAD_SCALAR_DS(name, operation)                       \
380static void gen_##name(DisasContext *ctx)                         \
381{                                                                 \
382    TCGv EA;                                                      \
383    TCGv_i64 xth;                                                 \
384                                                                  \
385    if (unlikely(!ctx->altivec_enabled)) {                        \
386        gen_exception(ctx, POWERPC_EXCP_VPU);                     \
387        return;                                                   \
388    }                                                             \
389    xth = tcg_temp_new_i64();                                     \
390    gen_set_access_type(ctx, ACCESS_INT);                         \
391    EA = tcg_temp_new();                                          \
392    gen_addr_imm_index(ctx, EA, 0x03);                            \
393    gen_qemu_##operation(ctx, xth, EA);                           \
394    set_cpu_vsrh(rD(ctx->opcode) + 32, xth);                      \
395    /* NOTE: cpu_vsrl is undefined */                             \
396    tcg_temp_free(EA);                                            \
397    tcg_temp_free_i64(xth);                                       \
398}
399
400VSX_LOAD_SCALAR_DS(lxsd, ld64_i64)
401VSX_LOAD_SCALAR_DS(lxssp, ld32fs)
402
403#define VSX_STORE_SCALAR(name, operation)                     \
404static void gen_##name(DisasContext *ctx)                     \
405{                                                             \
406    TCGv EA;                                                  \
407    TCGv_i64 t0;                                              \
408    if (unlikely(!ctx->vsx_enabled)) {                        \
409        gen_exception(ctx, POWERPC_EXCP_VSXU);                \
410        return;                                               \
411    }                                                         \
412    t0 = tcg_temp_new_i64();                                  \
413    gen_set_access_type(ctx, ACCESS_INT);                     \
414    EA = tcg_temp_new();                                      \
415    gen_addr_reg_index(ctx, EA);                              \
416    get_cpu_vsrh(t0, xS(ctx->opcode));                        \
417    gen_qemu_##operation(ctx, t0, EA);                        \
418    tcg_temp_free(EA);                                        \
419    tcg_temp_free_i64(t0);                                    \
420}
421
422VSX_STORE_SCALAR(stxsdx, st64_i64)
423
424VSX_STORE_SCALAR(stxsibx, st8_i64)
425VSX_STORE_SCALAR(stxsihx, st16_i64)
426VSX_STORE_SCALAR(stxsiwx, st32_i64)
427VSX_STORE_SCALAR(stxsspx, st32fs)
428
429static void gen_stxvd2x(DisasContext *ctx)
430{
431    TCGv EA;
432    TCGv_i64 t0;
433    if (unlikely(!ctx->vsx_enabled)) {
434        gen_exception(ctx, POWERPC_EXCP_VSXU);
435        return;
436    }
437    t0 = tcg_temp_new_i64();
438    gen_set_access_type(ctx, ACCESS_INT);
439    EA = tcg_temp_new();
440    gen_addr_reg_index(ctx, EA);
441    get_cpu_vsrh(t0, xS(ctx->opcode));
442    gen_qemu_st64_i64(ctx, t0, EA);
443    tcg_gen_addi_tl(EA, EA, 8);
444    get_cpu_vsrl(t0, xS(ctx->opcode));
445    gen_qemu_st64_i64(ctx, t0, EA);
446    tcg_temp_free(EA);
447    tcg_temp_free_i64(t0);
448}
449
450static void gen_stxvw4x(DisasContext *ctx)
451{
452    TCGv EA;
453    TCGv_i64 xsh;
454    TCGv_i64 xsl;
455
456    if (unlikely(!ctx->vsx_enabled)) {
457        gen_exception(ctx, POWERPC_EXCP_VSXU);
458        return;
459    }
460    xsh = tcg_temp_new_i64();
461    xsl = tcg_temp_new_i64();
462    get_cpu_vsrh(xsh, xS(ctx->opcode));
463    get_cpu_vsrl(xsl, xS(ctx->opcode));
464    gen_set_access_type(ctx, ACCESS_INT);
465    EA = tcg_temp_new();
466    gen_addr_reg_index(ctx, EA);
467    if (ctx->le_mode) {
468        TCGv_i64 t0 = tcg_temp_new_i64();
469        TCGv_i64 t1 = tcg_temp_new_i64();
470
471        tcg_gen_shri_i64(t0, xsh, 32);
472        tcg_gen_deposit_i64(t1, t0, xsh, 32, 32);
473        tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEQ);
474        tcg_gen_addi_tl(EA, EA, 8);
475        tcg_gen_shri_i64(t0, xsl, 32);
476        tcg_gen_deposit_i64(t1, t0, xsl, 32, 32);
477        tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEQ);
478        tcg_temp_free_i64(t0);
479        tcg_temp_free_i64(t1);
480    } else {
481        tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
482        tcg_gen_addi_tl(EA, EA, 8);
483        tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
484    }
485    tcg_temp_free(EA);
486    tcg_temp_free_i64(xsh);
487    tcg_temp_free_i64(xsl);
488}
489
490static void gen_stxvh8x(DisasContext *ctx)
491{
492    TCGv EA;
493    TCGv_i64 xsh;
494    TCGv_i64 xsl;
495
496    if (unlikely(!ctx->vsx_enabled)) {
497        gen_exception(ctx, POWERPC_EXCP_VSXU);
498        return;
499    }
500    xsh = tcg_temp_new_i64();
501    xsl = tcg_temp_new_i64();
502    get_cpu_vsrh(xsh, xS(ctx->opcode));
503    get_cpu_vsrl(xsl, xS(ctx->opcode));
504    gen_set_access_type(ctx, ACCESS_INT);
505    EA = tcg_temp_new();
506    gen_addr_reg_index(ctx, EA);
507    if (ctx->le_mode) {
508        TCGv_i64 outh = tcg_temp_new_i64();
509        TCGv_i64 outl = tcg_temp_new_i64();
510
511        gen_bswap16x8(outh, outl, xsh, xsl);
512        tcg_gen_qemu_st_i64(outh, EA, ctx->mem_idx, MO_BEQ);
513        tcg_gen_addi_tl(EA, EA, 8);
514        tcg_gen_qemu_st_i64(outl, EA, ctx->mem_idx, MO_BEQ);
515        tcg_temp_free_i64(outh);
516        tcg_temp_free_i64(outl);
517    } else {
518        tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
519        tcg_gen_addi_tl(EA, EA, 8);
520        tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
521    }
522    tcg_temp_free(EA);
523    tcg_temp_free_i64(xsh);
524    tcg_temp_free_i64(xsl);
525}
526
527static void gen_stxvb16x(DisasContext *ctx)
528{
529    TCGv EA;
530    TCGv_i64 xsh;
531    TCGv_i64 xsl;
532
533    if (unlikely(!ctx->vsx_enabled)) {
534        gen_exception(ctx, POWERPC_EXCP_VSXU);
535        return;
536    }
537    xsh = tcg_temp_new_i64();
538    xsl = tcg_temp_new_i64();
539    get_cpu_vsrh(xsh, xS(ctx->opcode));
540    get_cpu_vsrl(xsl, xS(ctx->opcode));
541    gen_set_access_type(ctx, ACCESS_INT);
542    EA = tcg_temp_new();
543    gen_addr_reg_index(ctx, EA);
544    tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
545    tcg_gen_addi_tl(EA, EA, 8);
546    tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
547    tcg_temp_free(EA);
548    tcg_temp_free_i64(xsh);
549    tcg_temp_free_i64(xsl);
550}
551
552#define VSX_STORE_SCALAR_DS(name, operation)                      \
553static void gen_##name(DisasContext *ctx)                         \
554{                                                                 \
555    TCGv EA;                                                      \
556    TCGv_i64 xth;                                                 \
557                                                                  \
558    if (unlikely(!ctx->altivec_enabled)) {                        \
559        gen_exception(ctx, POWERPC_EXCP_VPU);                     \
560        return;                                                   \
561    }                                                             \
562    xth = tcg_temp_new_i64();                                     \
563    get_cpu_vsrh(xth, rD(ctx->opcode) + 32);                      \
564    gen_set_access_type(ctx, ACCESS_INT);                         \
565    EA = tcg_temp_new();                                          \
566    gen_addr_imm_index(ctx, EA, 0x03);                            \
567    gen_qemu_##operation(ctx, xth, EA);                           \
568    /* NOTE: cpu_vsrl is undefined */                             \
569    tcg_temp_free(EA);                                            \
570    tcg_temp_free_i64(xth);                                       \
571}
572
573VSX_STORE_SCALAR_DS(stxsd, st64_i64)
574VSX_STORE_SCALAR_DS(stxssp, st32fs)
575
576static void gen_mfvsrwz(DisasContext *ctx)
577{
578    if (xS(ctx->opcode) < 32) {
579        if (unlikely(!ctx->fpu_enabled)) {
580            gen_exception(ctx, POWERPC_EXCP_FPU);
581            return;
582        }
583    } else {
584        if (unlikely(!ctx->altivec_enabled)) {
585            gen_exception(ctx, POWERPC_EXCP_VPU);
586            return;
587        }
588    }
589    TCGv_i64 tmp = tcg_temp_new_i64();
590    TCGv_i64 xsh = tcg_temp_new_i64();
591    get_cpu_vsrh(xsh, xS(ctx->opcode));
592    tcg_gen_ext32u_i64(tmp, xsh);
593    tcg_gen_trunc_i64_tl(cpu_gpr[rA(ctx->opcode)], tmp);
594    tcg_temp_free_i64(tmp);
595    tcg_temp_free_i64(xsh);
596}
597
598static void gen_mtvsrwa(DisasContext *ctx)
599{
600    if (xS(ctx->opcode) < 32) {
601        if (unlikely(!ctx->fpu_enabled)) {
602            gen_exception(ctx, POWERPC_EXCP_FPU);
603            return;
604        }
605    } else {
606        if (unlikely(!ctx->altivec_enabled)) {
607            gen_exception(ctx, POWERPC_EXCP_VPU);
608            return;
609        }
610    }
611    TCGv_i64 tmp = tcg_temp_new_i64();
612    TCGv_i64 xsh = tcg_temp_new_i64();
613    tcg_gen_extu_tl_i64(tmp, cpu_gpr[rA(ctx->opcode)]);
614    tcg_gen_ext32s_i64(xsh, tmp);
615    set_cpu_vsrh(xT(ctx->opcode), xsh);
616    tcg_temp_free_i64(tmp);
617    tcg_temp_free_i64(xsh);
618}
619
620static void gen_mtvsrwz(DisasContext *ctx)
621{
622    if (xS(ctx->opcode) < 32) {
623        if (unlikely(!ctx->fpu_enabled)) {
624            gen_exception(ctx, POWERPC_EXCP_FPU);
625            return;
626        }
627    } else {
628        if (unlikely(!ctx->altivec_enabled)) {
629            gen_exception(ctx, POWERPC_EXCP_VPU);
630            return;
631        }
632    }
633    TCGv_i64 tmp = tcg_temp_new_i64();
634    TCGv_i64 xsh = tcg_temp_new_i64();
635    tcg_gen_extu_tl_i64(tmp, cpu_gpr[rA(ctx->opcode)]);
636    tcg_gen_ext32u_i64(xsh, tmp);
637    set_cpu_vsrh(xT(ctx->opcode), xsh);
638    tcg_temp_free_i64(tmp);
639    tcg_temp_free_i64(xsh);
640}
641
642#if defined(TARGET_PPC64)
643static void gen_mfvsrd(DisasContext *ctx)
644{
645    TCGv_i64 t0;
646    if (xS(ctx->opcode) < 32) {
647        if (unlikely(!ctx->fpu_enabled)) {
648            gen_exception(ctx, POWERPC_EXCP_FPU);
649            return;
650        }
651    } else {
652        if (unlikely(!ctx->altivec_enabled)) {
653            gen_exception(ctx, POWERPC_EXCP_VPU);
654            return;
655        }
656    }
657    t0 = tcg_temp_new_i64();
658    get_cpu_vsrh(t0, xS(ctx->opcode));
659    tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], t0);
660    tcg_temp_free_i64(t0);
661}
662
663static void gen_mtvsrd(DisasContext *ctx)
664{
665    TCGv_i64 t0;
666    if (xS(ctx->opcode) < 32) {
667        if (unlikely(!ctx->fpu_enabled)) {
668            gen_exception(ctx, POWERPC_EXCP_FPU);
669            return;
670        }
671    } else {
672        if (unlikely(!ctx->altivec_enabled)) {
673            gen_exception(ctx, POWERPC_EXCP_VPU);
674            return;
675        }
676    }
677    t0 = tcg_temp_new_i64();
678    tcg_gen_mov_i64(t0, cpu_gpr[rA(ctx->opcode)]);
679    set_cpu_vsrh(xT(ctx->opcode), t0);
680    tcg_temp_free_i64(t0);
681}
682
683static void gen_mfvsrld(DisasContext *ctx)
684{
685    TCGv_i64 t0;
686    if (xS(ctx->opcode) < 32) {
687        if (unlikely(!ctx->vsx_enabled)) {
688            gen_exception(ctx, POWERPC_EXCP_VSXU);
689            return;
690        }
691    } else {
692        if (unlikely(!ctx->altivec_enabled)) {
693            gen_exception(ctx, POWERPC_EXCP_VPU);
694            return;
695        }
696    }
697    t0 = tcg_temp_new_i64();
698    get_cpu_vsrl(t0, xS(ctx->opcode));
699    tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], t0);
700    tcg_temp_free_i64(t0);
701}
702
703static void gen_mtvsrdd(DisasContext *ctx)
704{
705    TCGv_i64 t0;
706    if (xT(ctx->opcode) < 32) {
707        if (unlikely(!ctx->vsx_enabled)) {
708            gen_exception(ctx, POWERPC_EXCP_VSXU);
709            return;
710        }
711    } else {
712        if (unlikely(!ctx->altivec_enabled)) {
713            gen_exception(ctx, POWERPC_EXCP_VPU);
714            return;
715        }
716    }
717
718    t0 = tcg_temp_new_i64();
719    if (!rA(ctx->opcode)) {
720        tcg_gen_movi_i64(t0, 0);
721    } else {
722        tcg_gen_mov_i64(t0, cpu_gpr[rA(ctx->opcode)]);
723    }
724    set_cpu_vsrh(xT(ctx->opcode), t0);
725
726    tcg_gen_mov_i64(t0, cpu_gpr[rB(ctx->opcode)]);
727    set_cpu_vsrl(xT(ctx->opcode), t0);
728    tcg_temp_free_i64(t0);
729}
730
731static void gen_mtvsrws(DisasContext *ctx)
732{
733    TCGv_i64 t0;
734    if (xT(ctx->opcode) < 32) {
735        if (unlikely(!ctx->vsx_enabled)) {
736            gen_exception(ctx, POWERPC_EXCP_VSXU);
737            return;
738        }
739    } else {
740        if (unlikely(!ctx->altivec_enabled)) {
741            gen_exception(ctx, POWERPC_EXCP_VPU);
742            return;
743        }
744    }
745
746    t0 = tcg_temp_new_i64();
747    tcg_gen_deposit_i64(t0, cpu_gpr[rA(ctx->opcode)],
748                        cpu_gpr[rA(ctx->opcode)], 32, 32);
749    set_cpu_vsrl(xT(ctx->opcode), t0);
750    set_cpu_vsrh(xT(ctx->opcode), t0);
751    tcg_temp_free_i64(t0);
752}
753
754#endif
755
756static void gen_xxpermdi(DisasContext *ctx)
757{
758    TCGv_i64 xh, xl;
759
760    if (unlikely(!ctx->vsx_enabled)) {
761        gen_exception(ctx, POWERPC_EXCP_VSXU);
762        return;
763    }
764
765    xh = tcg_temp_new_i64();
766    xl = tcg_temp_new_i64();
767
768    if (unlikely((xT(ctx->opcode) == xA(ctx->opcode)) ||
769                 (xT(ctx->opcode) == xB(ctx->opcode)))) {
770        if ((DM(ctx->opcode) & 2) == 0) {
771            get_cpu_vsrh(xh, xA(ctx->opcode));
772        } else {
773            get_cpu_vsrl(xh, xA(ctx->opcode));
774        }
775        if ((DM(ctx->opcode) & 1) == 0) {
776            get_cpu_vsrh(xl, xB(ctx->opcode));
777        } else {
778            get_cpu_vsrl(xl, xB(ctx->opcode));
779        }
780
781        set_cpu_vsrh(xT(ctx->opcode), xh);
782        set_cpu_vsrl(xT(ctx->opcode), xl);
783    } else {
784        if ((DM(ctx->opcode) & 2) == 0) {
785            get_cpu_vsrh(xh, xA(ctx->opcode));
786            set_cpu_vsrh(xT(ctx->opcode), xh);
787        } else {
788            get_cpu_vsrl(xh, xA(ctx->opcode));
789            set_cpu_vsrh(xT(ctx->opcode), xh);
790        }
791        if ((DM(ctx->opcode) & 1) == 0) {
792            get_cpu_vsrh(xl, xB(ctx->opcode));
793            set_cpu_vsrl(xT(ctx->opcode), xl);
794        } else {
795            get_cpu_vsrl(xl, xB(ctx->opcode));
796            set_cpu_vsrl(xT(ctx->opcode), xl);
797        }
798    }
799    tcg_temp_free_i64(xh);
800    tcg_temp_free_i64(xl);
801}
802
803#define OP_ABS 1
804#define OP_NABS 2
805#define OP_NEG 3
806#define OP_CPSGN 4
807#define SGN_MASK_DP  0x8000000000000000ull
808#define SGN_MASK_SP 0x8000000080000000ull
809
810#define VSX_SCALAR_MOVE(name, op, sgn_mask)                       \
811static void glue(gen_, name)(DisasContext *ctx)                   \
812    {                                                             \
813        TCGv_i64 xb, sgm;                                         \
814        if (unlikely(!ctx->vsx_enabled)) {                        \
815            gen_exception(ctx, POWERPC_EXCP_VSXU);                \
816            return;                                               \
817        }                                                         \
818        xb = tcg_temp_new_i64();                                  \
819        sgm = tcg_temp_new_i64();                                 \
820        get_cpu_vsrh(xb, xB(ctx->opcode));                        \
821        tcg_gen_movi_i64(sgm, sgn_mask);                          \
822        switch (op) {                                             \
823            case OP_ABS: {                                        \
824                tcg_gen_andc_i64(xb, xb, sgm);                    \
825                break;                                            \
826            }                                                     \
827            case OP_NABS: {                                       \
828                tcg_gen_or_i64(xb, xb, sgm);                      \
829                break;                                            \
830            }                                                     \
831            case OP_NEG: {                                        \
832                tcg_gen_xor_i64(xb, xb, sgm);                     \
833                break;                                            \
834            }                                                     \
835            case OP_CPSGN: {                                      \
836                TCGv_i64 xa = tcg_temp_new_i64();                 \
837                get_cpu_vsrh(xa, xA(ctx->opcode));                \
838                tcg_gen_and_i64(xa, xa, sgm);                     \
839                tcg_gen_andc_i64(xb, xb, sgm);                    \
840                tcg_gen_or_i64(xb, xb, xa);                       \
841                tcg_temp_free_i64(xa);                            \
842                break;                                            \
843            }                                                     \
844        }                                                         \
845        set_cpu_vsrh(xT(ctx->opcode), xb);                        \
846        tcg_temp_free_i64(xb);                                    \
847        tcg_temp_free_i64(sgm);                                   \
848    }
849
850VSX_SCALAR_MOVE(xsabsdp, OP_ABS, SGN_MASK_DP)
851VSX_SCALAR_MOVE(xsnabsdp, OP_NABS, SGN_MASK_DP)
852VSX_SCALAR_MOVE(xsnegdp, OP_NEG, SGN_MASK_DP)
853VSX_SCALAR_MOVE(xscpsgndp, OP_CPSGN, SGN_MASK_DP)
854
855#define VSX_SCALAR_MOVE_QP(name, op, sgn_mask)                    \
856static void glue(gen_, name)(DisasContext *ctx)                   \
857{                                                                 \
858    int xa;                                                       \
859    int xt = rD(ctx->opcode) + 32;                                \
860    int xb = rB(ctx->opcode) + 32;                                \
861    TCGv_i64 xah, xbh, xbl, sgm, tmp;                             \
862                                                                  \
863    if (unlikely(!ctx->vsx_enabled)) {                            \
864        gen_exception(ctx, POWERPC_EXCP_VSXU);                    \
865        return;                                                   \
866    }                                                             \
867    xbh = tcg_temp_new_i64();                                     \
868    xbl = tcg_temp_new_i64();                                     \
869    sgm = tcg_temp_new_i64();                                     \
870    tmp = tcg_temp_new_i64();                                     \
871    get_cpu_vsrh(xbh, xb);                                        \
872    get_cpu_vsrl(xbl, xb);                                        \
873    tcg_gen_movi_i64(sgm, sgn_mask);                              \
874    switch (op) {                                                 \
875    case OP_ABS:                                                  \
876        tcg_gen_andc_i64(xbh, xbh, sgm);                          \
877        break;                                                    \
878    case OP_NABS:                                                 \
879        tcg_gen_or_i64(xbh, xbh, sgm);                            \
880        break;                                                    \
881    case OP_NEG:                                                  \
882        tcg_gen_xor_i64(xbh, xbh, sgm);                           \
883        break;                                                    \
884    case OP_CPSGN:                                                \
885        xah = tcg_temp_new_i64();                                 \
886        xa = rA(ctx->opcode) + 32;                                \
887        get_cpu_vsrh(tmp, xa);                                    \
888        tcg_gen_and_i64(xah, tmp, sgm);                           \
889        tcg_gen_andc_i64(xbh, xbh, sgm);                          \
890        tcg_gen_or_i64(xbh, xbh, xah);                            \
891        tcg_temp_free_i64(xah);                                   \
892        break;                                                    \
893    }                                                             \
894    set_cpu_vsrh(xt, xbh);                                        \
895    set_cpu_vsrl(xt, xbl);                                        \
896    tcg_temp_free_i64(xbl);                                       \
897    tcg_temp_free_i64(xbh);                                       \
898    tcg_temp_free_i64(sgm);                                       \
899    tcg_temp_free_i64(tmp);                                       \
900}
901
902VSX_SCALAR_MOVE_QP(xsabsqp, OP_ABS, SGN_MASK_DP)
903VSX_SCALAR_MOVE_QP(xsnabsqp, OP_NABS, SGN_MASK_DP)
904VSX_SCALAR_MOVE_QP(xsnegqp, OP_NEG, SGN_MASK_DP)
905VSX_SCALAR_MOVE_QP(xscpsgnqp, OP_CPSGN, SGN_MASK_DP)
906
907#define VSX_VECTOR_MOVE(name, op, sgn_mask)                      \
908static void glue(gen_, name)(DisasContext *ctx)                  \
909    {                                                            \
910        TCGv_i64 xbh, xbl, sgm;                                  \
911        if (unlikely(!ctx->vsx_enabled)) {                       \
912            gen_exception(ctx, POWERPC_EXCP_VSXU);               \
913            return;                                              \
914        }                                                        \
915        xbh = tcg_temp_new_i64();                                \
916        xbl = tcg_temp_new_i64();                                \
917        sgm = tcg_temp_new_i64();                                \
918        get_cpu_vsrh(xbh, xB(ctx->opcode));                      \
919        get_cpu_vsrl(xbl, xB(ctx->opcode));                      \
920        tcg_gen_movi_i64(sgm, sgn_mask);                         \
921        switch (op) {                                            \
922            case OP_ABS: {                                       \
923                tcg_gen_andc_i64(xbh, xbh, sgm);                 \
924                tcg_gen_andc_i64(xbl, xbl, sgm);                 \
925                break;                                           \
926            }                                                    \
927            case OP_NABS: {                                      \
928                tcg_gen_or_i64(xbh, xbh, sgm);                   \
929                tcg_gen_or_i64(xbl, xbl, sgm);                   \
930                break;                                           \
931            }                                                    \
932            case OP_NEG: {                                       \
933                tcg_gen_xor_i64(xbh, xbh, sgm);                  \
934                tcg_gen_xor_i64(xbl, xbl, sgm);                  \
935                break;                                           \
936            }                                                    \
937            case OP_CPSGN: {                                     \
938                TCGv_i64 xah = tcg_temp_new_i64();               \
939                TCGv_i64 xal = tcg_temp_new_i64();               \
940                get_cpu_vsrh(xah, xA(ctx->opcode));              \
941                get_cpu_vsrl(xal, xA(ctx->opcode));              \
942                tcg_gen_and_i64(xah, xah, sgm);                  \
943                tcg_gen_and_i64(xal, xal, sgm);                  \
944                tcg_gen_andc_i64(xbh, xbh, sgm);                 \
945                tcg_gen_andc_i64(xbl, xbl, sgm);                 \
946                tcg_gen_or_i64(xbh, xbh, xah);                   \
947                tcg_gen_or_i64(xbl, xbl, xal);                   \
948                tcg_temp_free_i64(xah);                          \
949                tcg_temp_free_i64(xal);                          \
950                break;                                           \
951            }                                                    \
952        }                                                        \
953        set_cpu_vsrh(xT(ctx->opcode), xbh);                      \
954        set_cpu_vsrl(xT(ctx->opcode), xbl);                      \
955        tcg_temp_free_i64(xbh);                                  \
956        tcg_temp_free_i64(xbl);                                  \
957        tcg_temp_free_i64(sgm);                                  \
958    }
959
960VSX_VECTOR_MOVE(xvabsdp, OP_ABS, SGN_MASK_DP)
961VSX_VECTOR_MOVE(xvnabsdp, OP_NABS, SGN_MASK_DP)
962VSX_VECTOR_MOVE(xvnegdp, OP_NEG, SGN_MASK_DP)
963VSX_VECTOR_MOVE(xvcpsgndp, OP_CPSGN, SGN_MASK_DP)
964VSX_VECTOR_MOVE(xvabssp, OP_ABS, SGN_MASK_SP)
965VSX_VECTOR_MOVE(xvnabssp, OP_NABS, SGN_MASK_SP)
966VSX_VECTOR_MOVE(xvnegsp, OP_NEG, SGN_MASK_SP)
967VSX_VECTOR_MOVE(xvcpsgnsp, OP_CPSGN, SGN_MASK_SP)
968
969#define VSX_CMP(name, op1, op2, inval, type)                                  \
970static void gen_##name(DisasContext *ctx)                                     \
971{                                                                             \
972    TCGv_i32 ignored;                                                         \
973    TCGv_ptr xt, xa, xb;                                                      \
974    if (unlikely(!ctx->vsx_enabled)) {                                        \
975        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
976        return;                                                               \
977    }                                                                         \
978    xt = gen_vsr_ptr(xT(ctx->opcode));                                        \
979    xa = gen_vsr_ptr(xA(ctx->opcode));                                        \
980    xb = gen_vsr_ptr(xB(ctx->opcode));                                        \
981    if ((ctx->opcode >> (31 - 21)) & 1) {                                     \
982        gen_helper_##name(cpu_crf[6], cpu_env, xt, xa, xb);                   \
983    } else {                                                                  \
984        ignored = tcg_temp_new_i32();                                         \
985        gen_helper_##name(ignored, cpu_env, xt, xa, xb);                      \
986        tcg_temp_free_i32(ignored);                                           \
987    }                                                                         \
988    gen_helper_float_check_status(cpu_env);                                   \
989    tcg_temp_free_ptr(xt);                                                    \
990    tcg_temp_free_ptr(xa);                                                    \
991    tcg_temp_free_ptr(xb);                                                    \
992}
993
994VSX_CMP(xvcmpeqdp, 0x0C, 0x0C, 0, PPC2_VSX)
995VSX_CMP(xvcmpgedp, 0x0C, 0x0E, 0, PPC2_VSX)
996VSX_CMP(xvcmpgtdp, 0x0C, 0x0D, 0, PPC2_VSX)
997VSX_CMP(xvcmpnedp, 0x0C, 0x0F, 0, PPC2_ISA300)
998VSX_CMP(xvcmpeqsp, 0x0C, 0x08, 0, PPC2_VSX)
999VSX_CMP(xvcmpgesp, 0x0C, 0x0A, 0, PPC2_VSX)
1000VSX_CMP(xvcmpgtsp, 0x0C, 0x09, 0, PPC2_VSX)
1001VSX_CMP(xvcmpnesp, 0x0C, 0x0B, 0, PPC2_VSX)
1002
1003static void gen_xscvqpdp(DisasContext *ctx)
1004{
1005    TCGv_i32 opc;
1006    TCGv_ptr xt, xb;
1007    if (unlikely(!ctx->vsx_enabled)) {
1008        gen_exception(ctx, POWERPC_EXCP_VSXU);
1009        return;
1010    }
1011    opc = tcg_const_i32(ctx->opcode);
1012    xt = gen_vsr_ptr(xT(ctx->opcode));
1013    xb = gen_vsr_ptr(xB(ctx->opcode));
1014    gen_helper_xscvqpdp(cpu_env, opc, xt, xb);
1015    tcg_temp_free_i32(opc);
1016    tcg_temp_free_ptr(xt);
1017    tcg_temp_free_ptr(xb);
1018}
1019
1020#define GEN_VSX_HELPER_2(name, op1, op2, inval, type)                         \
1021static void gen_##name(DisasContext *ctx)                                     \
1022{                                                                             \
1023    TCGv_i32 opc;                                                             \
1024    if (unlikely(!ctx->vsx_enabled)) {                                        \
1025        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
1026        return;                                                               \
1027    }                                                                         \
1028    opc = tcg_const_i32(ctx->opcode);                                         \
1029    gen_helper_##name(cpu_env, opc);                                          \
1030    tcg_temp_free_i32(opc);                                                   \
1031}
1032
1033#define GEN_VSX_HELPER_X3(name, op1, op2, inval, type)                        \
1034static void gen_##name(DisasContext *ctx)                                     \
1035{                                                                             \
1036    TCGv_ptr xt, xa, xb;                                                      \
1037    if (unlikely(!ctx->vsx_enabled)) {                                        \
1038        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
1039        return;                                                               \
1040    }                                                                         \
1041    xt = gen_vsr_ptr(xT(ctx->opcode));                                        \
1042    xa = gen_vsr_ptr(xA(ctx->opcode));                                        \
1043    xb = gen_vsr_ptr(xB(ctx->opcode));                                        \
1044    gen_helper_##name(cpu_env, xt, xa, xb);                                   \
1045    tcg_temp_free_ptr(xt);                                                    \
1046    tcg_temp_free_ptr(xa);                                                    \
1047    tcg_temp_free_ptr(xb);                                                    \
1048}
1049
1050#define GEN_VSX_HELPER_X2(name, op1, op2, inval, type)                        \
1051static void gen_##name(DisasContext *ctx)                                     \
1052{                                                                             \
1053    TCGv_ptr xt, xb;                                                          \
1054    if (unlikely(!ctx->vsx_enabled)) {                                        \
1055        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
1056        return;                                                               \
1057    }                                                                         \
1058    xt = gen_vsr_ptr(xT(ctx->opcode));                                        \
1059    xb = gen_vsr_ptr(xB(ctx->opcode));                                        \
1060    gen_helper_##name(cpu_env, xt, xb);                                       \
1061    tcg_temp_free_ptr(xt);                                                    \
1062    tcg_temp_free_ptr(xb);                                                    \
1063}
1064
1065#define GEN_VSX_HELPER_X2_AB(name, op1, op2, inval, type)                     \
1066static void gen_##name(DisasContext *ctx)                                     \
1067{                                                                             \
1068    TCGv_i32 opc;                                                             \
1069    TCGv_ptr xa, xb;                                                          \
1070    if (unlikely(!ctx->vsx_enabled)) {                                        \
1071        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
1072        return;                                                               \
1073    }                                                                         \
1074    opc = tcg_const_i32(ctx->opcode);                                         \
1075    xa = gen_vsr_ptr(xA(ctx->opcode));                                        \
1076    xb = gen_vsr_ptr(xB(ctx->opcode));                                        \
1077    gen_helper_##name(cpu_env, opc, xa, xb);                                  \
1078    tcg_temp_free_i32(opc);                                                   \
1079    tcg_temp_free_ptr(xa);                                                    \
1080    tcg_temp_free_ptr(xb);                                                    \
1081}
1082
1083#define GEN_VSX_HELPER_X1(name, op1, op2, inval, type)                        \
1084static void gen_##name(DisasContext *ctx)                                     \
1085{                                                                             \
1086    TCGv_i32 opc;                                                             \
1087    TCGv_ptr xb;                                                              \
1088    if (unlikely(!ctx->vsx_enabled)) {                                        \
1089        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
1090        return;                                                               \
1091    }                                                                         \
1092    opc = tcg_const_i32(ctx->opcode);                                         \
1093    xb = gen_vsr_ptr(xB(ctx->opcode));                                        \
1094    gen_helper_##name(cpu_env, opc, xb);                                      \
1095    tcg_temp_free_i32(opc);                                                   \
1096    tcg_temp_free_ptr(xb);                                                    \
1097}
1098
1099#define GEN_VSX_HELPER_R3(name, op1, op2, inval, type)                        \
1100static void gen_##name(DisasContext *ctx)                                     \
1101{                                                                             \
1102    TCGv_i32 opc;                                                             \
1103    TCGv_ptr xt, xa, xb;                                                      \
1104    if (unlikely(!ctx->vsx_enabled)) {                                        \
1105        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
1106        return;                                                               \
1107    }                                                                         \
1108    opc = tcg_const_i32(ctx->opcode);                                         \
1109    xt = gen_vsr_ptr(rD(ctx->opcode) + 32);                                   \
1110    xa = gen_vsr_ptr(rA(ctx->opcode) + 32);                                   \
1111    xb = gen_vsr_ptr(rB(ctx->opcode) + 32);                                   \
1112    gen_helper_##name(cpu_env, opc, xt, xa, xb);                              \
1113    tcg_temp_free_i32(opc);                                                   \
1114    tcg_temp_free_ptr(xt);                                                    \
1115    tcg_temp_free_ptr(xa);                                                    \
1116    tcg_temp_free_ptr(xb);                                                    \
1117}
1118
1119#define GEN_VSX_HELPER_R2(name, op1, op2, inval, type)                        \
1120static void gen_##name(DisasContext *ctx)                                     \
1121{                                                                             \
1122    TCGv_i32 opc;                                                             \
1123    TCGv_ptr xt, xb;                                                          \
1124    if (unlikely(!ctx->vsx_enabled)) {                                        \
1125        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
1126        return;                                                               \
1127    }                                                                         \
1128    opc = tcg_const_i32(ctx->opcode);                                         \
1129    xt = gen_vsr_ptr(rD(ctx->opcode) + 32);                                   \
1130    xb = gen_vsr_ptr(rB(ctx->opcode) + 32);                                   \
1131    gen_helper_##name(cpu_env, opc, xt, xb);                                  \
1132    tcg_temp_free_i32(opc);                                                   \
1133    tcg_temp_free_ptr(xt);                                                    \
1134    tcg_temp_free_ptr(xb);                                                    \
1135}
1136
1137#define GEN_VSX_HELPER_R2_AB(name, op1, op2, inval, type)                     \
1138static void gen_##name(DisasContext *ctx)                                     \
1139{                                                                             \
1140    TCGv_i32 opc;                                                             \
1141    TCGv_ptr xa, xb;                                                          \
1142    if (unlikely(!ctx->vsx_enabled)) {                                        \
1143        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
1144        return;                                                               \
1145    }                                                                         \
1146    opc = tcg_const_i32(ctx->opcode);                                         \
1147    xa = gen_vsr_ptr(rA(ctx->opcode) + 32);                                   \
1148    xb = gen_vsr_ptr(rB(ctx->opcode) + 32);                                   \
1149    gen_helper_##name(cpu_env, opc, xa, xb);                                  \
1150    tcg_temp_free_i32(opc);                                                   \
1151    tcg_temp_free_ptr(xa);                                                    \
1152    tcg_temp_free_ptr(xb);                                                    \
1153}
1154
1155#define GEN_VSX_HELPER_XT_XB_ENV(name, op1, op2, inval, type) \
1156static void gen_##name(DisasContext *ctx)                     \
1157{                                                             \
1158    TCGv_i64 t0;                                              \
1159    TCGv_i64 t1;                                              \
1160    if (unlikely(!ctx->vsx_enabled)) {                        \
1161        gen_exception(ctx, POWERPC_EXCP_VSXU);                \
1162        return;                                               \
1163    }                                                         \
1164    t0 = tcg_temp_new_i64();                                  \
1165    t1 = tcg_temp_new_i64();                                  \
1166    get_cpu_vsrh(t0, xB(ctx->opcode));                        \
1167    gen_helper_##name(t1, cpu_env, t0);                       \
1168    set_cpu_vsrh(xT(ctx->opcode), t1);                        \
1169    tcg_temp_free_i64(t0);                                    \
1170    tcg_temp_free_i64(t1);                                    \
1171}
1172
1173GEN_VSX_HELPER_X3(xsadddp, 0x00, 0x04, 0, PPC2_VSX)
1174GEN_VSX_HELPER_R3(xsaddqp, 0x04, 0x00, 0, PPC2_ISA300)
1175GEN_VSX_HELPER_X3(xssubdp, 0x00, 0x05, 0, PPC2_VSX)
1176GEN_VSX_HELPER_X3(xsmuldp, 0x00, 0x06, 0, PPC2_VSX)
1177GEN_VSX_HELPER_R3(xsmulqp, 0x04, 0x01, 0, PPC2_ISA300)
1178GEN_VSX_HELPER_X3(xsdivdp, 0x00, 0x07, 0, PPC2_VSX)
1179GEN_VSX_HELPER_R3(xsdivqp, 0x04, 0x11, 0, PPC2_ISA300)
1180GEN_VSX_HELPER_X2(xsredp, 0x14, 0x05, 0, PPC2_VSX)
1181GEN_VSX_HELPER_X2(xssqrtdp, 0x16, 0x04, 0, PPC2_VSX)
1182GEN_VSX_HELPER_X2(xsrsqrtedp, 0x14, 0x04, 0, PPC2_VSX)
1183GEN_VSX_HELPER_X2_AB(xstdivdp, 0x14, 0x07, 0, PPC2_VSX)
1184GEN_VSX_HELPER_X1(xstsqrtdp, 0x14, 0x06, 0, PPC2_VSX)
1185GEN_VSX_HELPER_X3(xscmpeqdp, 0x0C, 0x00, 0, PPC2_ISA300)
1186GEN_VSX_HELPER_X3(xscmpgtdp, 0x0C, 0x01, 0, PPC2_ISA300)
1187GEN_VSX_HELPER_X3(xscmpgedp, 0x0C, 0x02, 0, PPC2_ISA300)
1188GEN_VSX_HELPER_X3(xscmpnedp, 0x0C, 0x03, 0, PPC2_ISA300)
1189GEN_VSX_HELPER_X2_AB(xscmpexpdp, 0x0C, 0x07, 0, PPC2_ISA300)
1190GEN_VSX_HELPER_R2_AB(xscmpexpqp, 0x04, 0x05, 0, PPC2_ISA300)
1191GEN_VSX_HELPER_X2_AB(xscmpodp, 0x0C, 0x05, 0, PPC2_VSX)
1192GEN_VSX_HELPER_X2_AB(xscmpudp, 0x0C, 0x04, 0, PPC2_VSX)
1193GEN_VSX_HELPER_R2_AB(xscmpoqp, 0x04, 0x04, 0, PPC2_VSX)
1194GEN_VSX_HELPER_R2_AB(xscmpuqp, 0x04, 0x14, 0, PPC2_VSX)
1195GEN_VSX_HELPER_X3(xsmaxdp, 0x00, 0x14, 0, PPC2_VSX)
1196GEN_VSX_HELPER_X3(xsmindp, 0x00, 0x15, 0, PPC2_VSX)
1197GEN_VSX_HELPER_R3(xsmaxcdp, 0x00, 0x10, 0, PPC2_ISA300)
1198GEN_VSX_HELPER_R3(xsmincdp, 0x00, 0x11, 0, PPC2_ISA300)
1199GEN_VSX_HELPER_R3(xsmaxjdp, 0x00, 0x12, 0, PPC2_ISA300)
1200GEN_VSX_HELPER_R3(xsminjdp, 0x00, 0x12, 0, PPC2_ISA300)
1201GEN_VSX_HELPER_X2(xscvdphp, 0x16, 0x15, 0x11, PPC2_ISA300)
1202GEN_VSX_HELPER_X2(xscvdpsp, 0x12, 0x10, 0, PPC2_VSX)
1203GEN_VSX_HELPER_R2(xscvdpqp, 0x04, 0x1A, 0x16, PPC2_ISA300)
1204GEN_VSX_HELPER_XT_XB_ENV(xscvdpspn, 0x16, 0x10, 0, PPC2_VSX207)
1205GEN_VSX_HELPER_R2(xscvqpsdz, 0x04, 0x1A, 0x19, PPC2_ISA300)
1206GEN_VSX_HELPER_R2(xscvqpswz, 0x04, 0x1A, 0x09, PPC2_ISA300)
1207GEN_VSX_HELPER_R2(xscvqpudz, 0x04, 0x1A, 0x11, PPC2_ISA300)
1208GEN_VSX_HELPER_R2(xscvqpuwz, 0x04, 0x1A, 0x01, PPC2_ISA300)
1209GEN_VSX_HELPER_X2(xscvhpdp, 0x16, 0x15, 0x10, PPC2_ISA300)
1210GEN_VSX_HELPER_R2(xscvsdqp, 0x04, 0x1A, 0x0A, PPC2_ISA300)
1211GEN_VSX_HELPER_X2(xscvspdp, 0x12, 0x14, 0, PPC2_VSX)
1212GEN_VSX_HELPER_XT_XB_ENV(xscvspdpn, 0x16, 0x14, 0, PPC2_VSX207)
1213GEN_VSX_HELPER_X2(xscvdpsxds, 0x10, 0x15, 0, PPC2_VSX)
1214GEN_VSX_HELPER_X2(xscvdpsxws, 0x10, 0x05, 0, PPC2_VSX)
1215GEN_VSX_HELPER_X2(xscvdpuxds, 0x10, 0x14, 0, PPC2_VSX)
1216GEN_VSX_HELPER_X2(xscvdpuxws, 0x10, 0x04, 0, PPC2_VSX)
1217GEN_VSX_HELPER_X2(xscvsxddp, 0x10, 0x17, 0, PPC2_VSX)
1218GEN_VSX_HELPER_R2(xscvudqp, 0x04, 0x1A, 0x02, PPC2_ISA300)
1219GEN_VSX_HELPER_X2(xscvuxddp, 0x10, 0x16, 0, PPC2_VSX)
1220GEN_VSX_HELPER_X2(xsrdpi, 0x12, 0x04, 0, PPC2_VSX)
1221GEN_VSX_HELPER_X2(xsrdpic, 0x16, 0x06, 0, PPC2_VSX)
1222GEN_VSX_HELPER_X2(xsrdpim, 0x12, 0x07, 0, PPC2_VSX)
1223GEN_VSX_HELPER_X2(xsrdpip, 0x12, 0x06, 0, PPC2_VSX)
1224GEN_VSX_HELPER_X2(xsrdpiz, 0x12, 0x05, 0, PPC2_VSX)
1225GEN_VSX_HELPER_XT_XB_ENV(xsrsp, 0x12, 0x11, 0, PPC2_VSX207)
1226GEN_VSX_HELPER_R2(xsrqpi, 0x05, 0x00, 0, PPC2_ISA300)
1227GEN_VSX_HELPER_R2(xsrqpxp, 0x05, 0x01, 0, PPC2_ISA300)
1228GEN_VSX_HELPER_R2(xssqrtqp, 0x04, 0x19, 0x1B, PPC2_ISA300)
1229GEN_VSX_HELPER_R3(xssubqp, 0x04, 0x10, 0, PPC2_ISA300)
1230GEN_VSX_HELPER_X3(xsaddsp, 0x00, 0x00, 0, PPC2_VSX207)
1231GEN_VSX_HELPER_X3(xssubsp, 0x00, 0x01, 0, PPC2_VSX207)
1232GEN_VSX_HELPER_X3(xsmulsp, 0x00, 0x02, 0, PPC2_VSX207)
1233GEN_VSX_HELPER_X3(xsdivsp, 0x00, 0x03, 0, PPC2_VSX207)
1234GEN_VSX_HELPER_X2(xsresp, 0x14, 0x01, 0, PPC2_VSX207)
1235GEN_VSX_HELPER_X2(xssqrtsp, 0x16, 0x00, 0, PPC2_VSX207)
1236GEN_VSX_HELPER_X2(xsrsqrtesp, 0x14, 0x00, 0, PPC2_VSX207)
1237GEN_VSX_HELPER_X2(xscvsxdsp, 0x10, 0x13, 0, PPC2_VSX207)
1238GEN_VSX_HELPER_X2(xscvuxdsp, 0x10, 0x12, 0, PPC2_VSX207)
1239GEN_VSX_HELPER_X1(xststdcsp, 0x14, 0x12, 0, PPC2_ISA300)
1240GEN_VSX_HELPER_2(xststdcdp, 0x14, 0x16, 0, PPC2_ISA300)
1241GEN_VSX_HELPER_2(xststdcqp, 0x04, 0x16, 0, PPC2_ISA300)
1242
1243GEN_VSX_HELPER_X3(xvadddp, 0x00, 0x0C, 0, PPC2_VSX)
1244GEN_VSX_HELPER_X3(xvsubdp, 0x00, 0x0D, 0, PPC2_VSX)
1245GEN_VSX_HELPER_X3(xvmuldp, 0x00, 0x0E, 0, PPC2_VSX)
1246GEN_VSX_HELPER_X3(xvdivdp, 0x00, 0x0F, 0, PPC2_VSX)
1247GEN_VSX_HELPER_X2(xvredp, 0x14, 0x0D, 0, PPC2_VSX)
1248GEN_VSX_HELPER_X2(xvsqrtdp, 0x16, 0x0C, 0, PPC2_VSX)
1249GEN_VSX_HELPER_X2(xvrsqrtedp, 0x14, 0x0C, 0, PPC2_VSX)
1250GEN_VSX_HELPER_X2_AB(xvtdivdp, 0x14, 0x0F, 0, PPC2_VSX)
1251GEN_VSX_HELPER_X1(xvtsqrtdp, 0x14, 0x0E, 0, PPC2_VSX)
1252GEN_VSX_HELPER_X3(xvmaxdp, 0x00, 0x1C, 0, PPC2_VSX)
1253GEN_VSX_HELPER_X3(xvmindp, 0x00, 0x1D, 0, PPC2_VSX)
1254GEN_VSX_HELPER_X2(xvcvdpsp, 0x12, 0x18, 0, PPC2_VSX)
1255GEN_VSX_HELPER_X2(xvcvdpsxds, 0x10, 0x1D, 0, PPC2_VSX)
1256GEN_VSX_HELPER_X2(xvcvdpsxws, 0x10, 0x0D, 0, PPC2_VSX)
1257GEN_VSX_HELPER_X2(xvcvdpuxds, 0x10, 0x1C, 0, PPC2_VSX)
1258GEN_VSX_HELPER_X2(xvcvdpuxws, 0x10, 0x0C, 0, PPC2_VSX)
1259GEN_VSX_HELPER_X2(xvcvsxddp, 0x10, 0x1F, 0, PPC2_VSX)
1260GEN_VSX_HELPER_X2(xvcvuxddp, 0x10, 0x1E, 0, PPC2_VSX)
1261GEN_VSX_HELPER_X2(xvcvsxwdp, 0x10, 0x0F, 0, PPC2_VSX)
1262GEN_VSX_HELPER_X2(xvcvuxwdp, 0x10, 0x0E, 0, PPC2_VSX)
1263GEN_VSX_HELPER_X2(xvrdpi, 0x12, 0x0C, 0, PPC2_VSX)
1264GEN_VSX_HELPER_X2(xvrdpic, 0x16, 0x0E, 0, PPC2_VSX)
1265GEN_VSX_HELPER_X2(xvrdpim, 0x12, 0x0F, 0, PPC2_VSX)
1266GEN_VSX_HELPER_X2(xvrdpip, 0x12, 0x0E, 0, PPC2_VSX)
1267GEN_VSX_HELPER_X2(xvrdpiz, 0x12, 0x0D, 0, PPC2_VSX)
1268
1269GEN_VSX_HELPER_X3(xvaddsp, 0x00, 0x08, 0, PPC2_VSX)
1270GEN_VSX_HELPER_X3(xvsubsp, 0x00, 0x09, 0, PPC2_VSX)
1271GEN_VSX_HELPER_X3(xvmulsp, 0x00, 0x0A, 0, PPC2_VSX)
1272GEN_VSX_HELPER_X3(xvdivsp, 0x00, 0x0B, 0, PPC2_VSX)
1273GEN_VSX_HELPER_X2(xvresp, 0x14, 0x09, 0, PPC2_VSX)
1274GEN_VSX_HELPER_X2(xvsqrtsp, 0x16, 0x08, 0, PPC2_VSX)
1275GEN_VSX_HELPER_X2(xvrsqrtesp, 0x14, 0x08, 0, PPC2_VSX)
1276GEN_VSX_HELPER_X2_AB(xvtdivsp, 0x14, 0x0B, 0, PPC2_VSX)
1277GEN_VSX_HELPER_X1(xvtsqrtsp, 0x14, 0x0A, 0, PPC2_VSX)
1278GEN_VSX_HELPER_X3(xvmaxsp, 0x00, 0x18, 0, PPC2_VSX)
1279GEN_VSX_HELPER_X3(xvminsp, 0x00, 0x19, 0, PPC2_VSX)
1280GEN_VSX_HELPER_X2(xvcvspdp, 0x12, 0x1C, 0, PPC2_VSX)
1281GEN_VSX_HELPER_X2(xvcvhpsp, 0x16, 0x1D, 0x18, PPC2_ISA300)
1282GEN_VSX_HELPER_X2(xvcvsphp, 0x16, 0x1D, 0x19, PPC2_ISA300)
1283GEN_VSX_HELPER_X2(xvcvspsxds, 0x10, 0x19, 0, PPC2_VSX)
1284GEN_VSX_HELPER_X2(xvcvspsxws, 0x10, 0x09, 0, PPC2_VSX)
1285GEN_VSX_HELPER_X2(xvcvspuxds, 0x10, 0x18, 0, PPC2_VSX)
1286GEN_VSX_HELPER_X2(xvcvspuxws, 0x10, 0x08, 0, PPC2_VSX)
1287GEN_VSX_HELPER_X2(xvcvsxdsp, 0x10, 0x1B, 0, PPC2_VSX)
1288GEN_VSX_HELPER_X2(xvcvuxdsp, 0x10, 0x1A, 0, PPC2_VSX)
1289GEN_VSX_HELPER_X2(xvcvsxwsp, 0x10, 0x0B, 0, PPC2_VSX)
1290GEN_VSX_HELPER_X2(xvcvuxwsp, 0x10, 0x0A, 0, PPC2_VSX)
1291GEN_VSX_HELPER_X2(xvrspi, 0x12, 0x08, 0, PPC2_VSX)
1292GEN_VSX_HELPER_X2(xvrspic, 0x16, 0x0A, 0, PPC2_VSX)
1293GEN_VSX_HELPER_X2(xvrspim, 0x12, 0x0B, 0, PPC2_VSX)
1294GEN_VSX_HELPER_X2(xvrspip, 0x12, 0x0A, 0, PPC2_VSX)
1295GEN_VSX_HELPER_X2(xvrspiz, 0x12, 0x09, 0, PPC2_VSX)
1296GEN_VSX_HELPER_2(xvtstdcsp, 0x14, 0x1A, 0, PPC2_VSX)
1297GEN_VSX_HELPER_2(xvtstdcdp, 0x14, 0x1E, 0, PPC2_VSX)
1298GEN_VSX_HELPER_X3(xxperm, 0x08, 0x03, 0, PPC2_ISA300)
1299GEN_VSX_HELPER_X3(xxpermr, 0x08, 0x07, 0, PPC2_ISA300)
1300
1301#define GEN_VSX_HELPER_VSX_MADD(name, op1, aop, mop, inval, type)             \
1302static void gen_##name(DisasContext *ctx)                                     \
1303{                                                                             \
1304    TCGv_ptr xt, xa, b, c;                                                    \
1305    if (unlikely(!ctx->vsx_enabled)) {                                        \
1306        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
1307        return;                                                               \
1308    }                                                                         \
1309    xt = gen_vsr_ptr(xT(ctx->opcode));                                        \
1310    xa = gen_vsr_ptr(xA(ctx->opcode));                                        \
1311    if (ctx->opcode & PPC_BIT32(25)) {                                        \
1312        /*                                                                    \
1313         * AxT + B                                                            \
1314         */                                                                   \
1315        b = gen_vsr_ptr(xT(ctx->opcode));                                     \
1316        c = gen_vsr_ptr(xB(ctx->opcode));                                     \
1317    } else {                                                                  \
1318        /*                                                                    \
1319         * AxB + T                                                            \
1320         */                                                                   \
1321        b = gen_vsr_ptr(xB(ctx->opcode));                                     \
1322        c = gen_vsr_ptr(xT(ctx->opcode));                                     \
1323    }                                                                         \
1324    gen_helper_##name(cpu_env, xt, xa, b, c);                                 \
1325    tcg_temp_free_ptr(xt);                                                    \
1326    tcg_temp_free_ptr(xa);                                                    \
1327    tcg_temp_free_ptr(b);                                                     \
1328    tcg_temp_free_ptr(c);                                                     \
1329}
1330
1331GEN_VSX_HELPER_VSX_MADD(xsmadddp, 0x04, 0x04, 0x05, 0, PPC2_VSX)
1332GEN_VSX_HELPER_VSX_MADD(xsmsubdp, 0x04, 0x06, 0x07, 0, PPC2_VSX)
1333GEN_VSX_HELPER_VSX_MADD(xsnmadddp, 0x04, 0x14, 0x15, 0, PPC2_VSX)
1334GEN_VSX_HELPER_VSX_MADD(xsnmsubdp, 0x04, 0x16, 0x17, 0, PPC2_VSX)
1335GEN_VSX_HELPER_VSX_MADD(xsmaddsp, 0x04, 0x00, 0x01, 0, PPC2_VSX207)
1336GEN_VSX_HELPER_VSX_MADD(xsmsubsp, 0x04, 0x02, 0x03, 0, PPC2_VSX207)
1337GEN_VSX_HELPER_VSX_MADD(xsnmaddsp, 0x04, 0x10, 0x11, 0, PPC2_VSX207)
1338GEN_VSX_HELPER_VSX_MADD(xsnmsubsp, 0x04, 0x12, 0x13, 0, PPC2_VSX207)
1339GEN_VSX_HELPER_VSX_MADD(xvmadddp, 0x04, 0x0C, 0x0D, 0, PPC2_VSX)
1340GEN_VSX_HELPER_VSX_MADD(xvmsubdp, 0x04, 0x0E, 0x0F, 0, PPC2_VSX)
1341GEN_VSX_HELPER_VSX_MADD(xvnmadddp, 0x04, 0x1C, 0x1D, 0, PPC2_VSX)
1342GEN_VSX_HELPER_VSX_MADD(xvnmsubdp, 0x04, 0x1E, 0x1F, 0, PPC2_VSX)
1343GEN_VSX_HELPER_VSX_MADD(xvmaddsp, 0x04, 0x08, 0x09, 0, PPC2_VSX)
1344GEN_VSX_HELPER_VSX_MADD(xvmsubsp, 0x04, 0x0A, 0x0B, 0, PPC2_VSX)
1345GEN_VSX_HELPER_VSX_MADD(xvnmaddsp, 0x04, 0x18, 0x19, 0, PPC2_VSX)
1346GEN_VSX_HELPER_VSX_MADD(xvnmsubsp, 0x04, 0x1A, 0x1B, 0, PPC2_VSX)
1347
1348static void gen_xxbrd(DisasContext *ctx)
1349{
1350    TCGv_i64 xth;
1351    TCGv_i64 xtl;
1352    TCGv_i64 xbh;
1353    TCGv_i64 xbl;
1354
1355    if (unlikely(!ctx->vsx_enabled)) {
1356        gen_exception(ctx, POWERPC_EXCP_VSXU);
1357        return;
1358    }
1359    xth = tcg_temp_new_i64();
1360    xtl = tcg_temp_new_i64();
1361    xbh = tcg_temp_new_i64();
1362    xbl = tcg_temp_new_i64();
1363    get_cpu_vsrh(xbh, xB(ctx->opcode));
1364    get_cpu_vsrl(xbl, xB(ctx->opcode));
1365
1366    tcg_gen_bswap64_i64(xth, xbh);
1367    tcg_gen_bswap64_i64(xtl, xbl);
1368    set_cpu_vsrh(xT(ctx->opcode), xth);
1369    set_cpu_vsrl(xT(ctx->opcode), xtl);
1370
1371    tcg_temp_free_i64(xth);
1372    tcg_temp_free_i64(xtl);
1373    tcg_temp_free_i64(xbh);
1374    tcg_temp_free_i64(xbl);
1375}
1376
1377static void gen_xxbrh(DisasContext *ctx)
1378{
1379    TCGv_i64 xth;
1380    TCGv_i64 xtl;
1381    TCGv_i64 xbh;
1382    TCGv_i64 xbl;
1383
1384    if (unlikely(!ctx->vsx_enabled)) {
1385        gen_exception(ctx, POWERPC_EXCP_VSXU);
1386        return;
1387    }
1388    xth = tcg_temp_new_i64();
1389    xtl = tcg_temp_new_i64();
1390    xbh = tcg_temp_new_i64();
1391    xbl = tcg_temp_new_i64();
1392    get_cpu_vsrh(xbh, xB(ctx->opcode));
1393    get_cpu_vsrl(xbl, xB(ctx->opcode));
1394
1395    gen_bswap16x8(xth, xtl, xbh, xbl);
1396    set_cpu_vsrh(xT(ctx->opcode), xth);
1397    set_cpu_vsrl(xT(ctx->opcode), xtl);
1398
1399    tcg_temp_free_i64(xth);
1400    tcg_temp_free_i64(xtl);
1401    tcg_temp_free_i64(xbh);
1402    tcg_temp_free_i64(xbl);
1403}
1404
1405static void gen_xxbrq(DisasContext *ctx)
1406{
1407    TCGv_i64 xth;
1408    TCGv_i64 xtl;
1409    TCGv_i64 xbh;
1410    TCGv_i64 xbl;
1411    TCGv_i64 t0;
1412
1413    if (unlikely(!ctx->vsx_enabled)) {
1414        gen_exception(ctx, POWERPC_EXCP_VSXU);
1415        return;
1416    }
1417    xth = tcg_temp_new_i64();
1418    xtl = tcg_temp_new_i64();
1419    xbh = tcg_temp_new_i64();
1420    xbl = tcg_temp_new_i64();
1421    get_cpu_vsrh(xbh, xB(ctx->opcode));
1422    get_cpu_vsrl(xbl, xB(ctx->opcode));
1423    t0 = tcg_temp_new_i64();
1424
1425    tcg_gen_bswap64_i64(t0, xbl);
1426    tcg_gen_bswap64_i64(xtl, xbh);
1427    set_cpu_vsrl(xT(ctx->opcode), xtl);
1428    tcg_gen_mov_i64(xth, t0);
1429    set_cpu_vsrh(xT(ctx->opcode), xth);
1430
1431    tcg_temp_free_i64(t0);
1432    tcg_temp_free_i64(xth);
1433    tcg_temp_free_i64(xtl);
1434    tcg_temp_free_i64(xbh);
1435    tcg_temp_free_i64(xbl);
1436}
1437
1438static void gen_xxbrw(DisasContext *ctx)
1439{
1440    TCGv_i64 xth;
1441    TCGv_i64 xtl;
1442    TCGv_i64 xbh;
1443    TCGv_i64 xbl;
1444
1445    if (unlikely(!ctx->vsx_enabled)) {
1446        gen_exception(ctx, POWERPC_EXCP_VSXU);
1447        return;
1448    }
1449    xth = tcg_temp_new_i64();
1450    xtl = tcg_temp_new_i64();
1451    xbh = tcg_temp_new_i64();
1452    xbl = tcg_temp_new_i64();
1453    get_cpu_vsrh(xbh, xB(ctx->opcode));
1454    get_cpu_vsrl(xbl, xB(ctx->opcode));
1455
1456    gen_bswap32x4(xth, xtl, xbh, xbl);
1457    set_cpu_vsrh(xT(ctx->opcode), xth);
1458    set_cpu_vsrl(xT(ctx->opcode), xtl);
1459
1460    tcg_temp_free_i64(xth);
1461    tcg_temp_free_i64(xtl);
1462    tcg_temp_free_i64(xbh);
1463    tcg_temp_free_i64(xbl);
1464}
1465
1466#define VSX_LOGICAL(name, vece, tcg_op)                              \
1467static void glue(gen_, name)(DisasContext *ctx)                      \
1468    {                                                                \
1469        if (unlikely(!ctx->vsx_enabled)) {                           \
1470            gen_exception(ctx, POWERPC_EXCP_VSXU);                   \
1471            return;                                                  \
1472        }                                                            \
1473        tcg_op(vece, vsr_full_offset(xT(ctx->opcode)),               \
1474               vsr_full_offset(xA(ctx->opcode)),                     \
1475               vsr_full_offset(xB(ctx->opcode)), 16, 16);            \
1476    }
1477
1478VSX_LOGICAL(xxland, MO_64, tcg_gen_gvec_and)
1479VSX_LOGICAL(xxlandc, MO_64, tcg_gen_gvec_andc)
1480VSX_LOGICAL(xxlor, MO_64, tcg_gen_gvec_or)
1481VSX_LOGICAL(xxlxor, MO_64, tcg_gen_gvec_xor)
1482VSX_LOGICAL(xxlnor, MO_64, tcg_gen_gvec_nor)
1483VSX_LOGICAL(xxleqv, MO_64, tcg_gen_gvec_eqv)
1484VSX_LOGICAL(xxlnand, MO_64, tcg_gen_gvec_nand)
1485VSX_LOGICAL(xxlorc, MO_64, tcg_gen_gvec_orc)
1486
1487#define VSX_XXMRG(name, high)                               \
1488static void glue(gen_, name)(DisasContext *ctx)             \
1489    {                                                       \
1490        TCGv_i64 a0, a1, b0, b1, tmp;                       \
1491        if (unlikely(!ctx->vsx_enabled)) {                  \
1492            gen_exception(ctx, POWERPC_EXCP_VSXU);          \
1493            return;                                         \
1494        }                                                   \
1495        a0 = tcg_temp_new_i64();                            \
1496        a1 = tcg_temp_new_i64();                            \
1497        b0 = tcg_temp_new_i64();                            \
1498        b1 = tcg_temp_new_i64();                            \
1499        tmp = tcg_temp_new_i64();                           \
1500        if (high) {                                         \
1501            get_cpu_vsrh(a0, xA(ctx->opcode));              \
1502            get_cpu_vsrh(a1, xA(ctx->opcode));              \
1503            get_cpu_vsrh(b0, xB(ctx->opcode));              \
1504            get_cpu_vsrh(b1, xB(ctx->opcode));              \
1505        } else {                                            \
1506            get_cpu_vsrl(a0, xA(ctx->opcode));              \
1507            get_cpu_vsrl(a1, xA(ctx->opcode));              \
1508            get_cpu_vsrl(b0, xB(ctx->opcode));              \
1509            get_cpu_vsrl(b1, xB(ctx->opcode));              \
1510        }                                                   \
1511        tcg_gen_shri_i64(a0, a0, 32);                       \
1512        tcg_gen_shri_i64(b0, b0, 32);                       \
1513        tcg_gen_deposit_i64(tmp, b0, a0, 32, 32);           \
1514        set_cpu_vsrh(xT(ctx->opcode), tmp);                 \
1515        tcg_gen_deposit_i64(tmp, b1, a1, 32, 32);           \
1516        set_cpu_vsrl(xT(ctx->opcode), tmp);                 \
1517        tcg_temp_free_i64(a0);                              \
1518        tcg_temp_free_i64(a1);                              \
1519        tcg_temp_free_i64(b0);                              \
1520        tcg_temp_free_i64(b1);                              \
1521        tcg_temp_free_i64(tmp);                             \
1522    }
1523
1524VSX_XXMRG(xxmrghw, 1)
1525VSX_XXMRG(xxmrglw, 0)
1526
1527static void gen_xxsel(DisasContext *ctx)
1528{
1529    int rt = xT(ctx->opcode);
1530    int ra = xA(ctx->opcode);
1531    int rb = xB(ctx->opcode);
1532    int rc = xC(ctx->opcode);
1533
1534    if (unlikely(!ctx->vsx_enabled)) {
1535        gen_exception(ctx, POWERPC_EXCP_VSXU);
1536        return;
1537    }
1538    tcg_gen_gvec_bitsel(MO_64, vsr_full_offset(rt), vsr_full_offset(rc),
1539                        vsr_full_offset(rb), vsr_full_offset(ra), 16, 16);
1540}
1541
1542static void gen_xxspltw(DisasContext *ctx)
1543{
1544    int rt = xT(ctx->opcode);
1545    int rb = xB(ctx->opcode);
1546    int uim = UIM(ctx->opcode);
1547    int tofs, bofs;
1548
1549    if (unlikely(!ctx->vsx_enabled)) {
1550        gen_exception(ctx, POWERPC_EXCP_VSXU);
1551        return;
1552    }
1553
1554    tofs = vsr_full_offset(rt);
1555    bofs = vsr_full_offset(rb);
1556    bofs += uim << MO_32;
1557#ifndef HOST_WORDS_BIG_ENDIAN
1558    bofs ^= 8 | 4;
1559#endif
1560
1561    tcg_gen_gvec_dup_mem(MO_32, tofs, bofs, 16, 16);
1562}
1563
1564#define pattern(x) (((x) & 0xff) * (~(uint64_t)0 / 0xff))
1565
1566static void gen_xxspltib(DisasContext *ctx)
1567{
1568    uint8_t uim8 = IMM8(ctx->opcode);
1569    int rt = xT(ctx->opcode);
1570
1571    if (rt < 32) {
1572        if (unlikely(!ctx->vsx_enabled)) {
1573            gen_exception(ctx, POWERPC_EXCP_VSXU);
1574            return;
1575        }
1576    } else {
1577        if (unlikely(!ctx->altivec_enabled)) {
1578            gen_exception(ctx, POWERPC_EXCP_VPU);
1579            return;
1580        }
1581    }
1582    tcg_gen_gvec_dup_imm(MO_8, vsr_full_offset(rt), 16, 16, uim8);
1583}
1584
1585static void gen_xxsldwi(DisasContext *ctx)
1586{
1587    TCGv_i64 xth, xtl;
1588    if (unlikely(!ctx->vsx_enabled)) {
1589        gen_exception(ctx, POWERPC_EXCP_VSXU);
1590        return;
1591    }
1592    xth = tcg_temp_new_i64();
1593    xtl = tcg_temp_new_i64();
1594
1595    switch (SHW(ctx->opcode)) {
1596        case 0: {
1597            get_cpu_vsrh(xth, xA(ctx->opcode));
1598            get_cpu_vsrl(xtl, xA(ctx->opcode));
1599            break;
1600        }
1601        case 1: {
1602            TCGv_i64 t0 = tcg_temp_new_i64();
1603            get_cpu_vsrh(xth, xA(ctx->opcode));
1604            tcg_gen_shli_i64(xth, xth, 32);
1605            get_cpu_vsrl(t0, xA(ctx->opcode));
1606            tcg_gen_shri_i64(t0, t0, 32);
1607            tcg_gen_or_i64(xth, xth, t0);
1608            get_cpu_vsrl(xtl, xA(ctx->opcode));
1609            tcg_gen_shli_i64(xtl, xtl, 32);
1610            get_cpu_vsrh(t0, xB(ctx->opcode));
1611            tcg_gen_shri_i64(t0, t0, 32);
1612            tcg_gen_or_i64(xtl, xtl, t0);
1613            tcg_temp_free_i64(t0);
1614            break;
1615        }
1616        case 2: {
1617            get_cpu_vsrl(xth, xA(ctx->opcode));
1618            get_cpu_vsrh(xtl, xB(ctx->opcode));
1619            break;
1620        }
1621        case 3: {
1622            TCGv_i64 t0 = tcg_temp_new_i64();
1623            get_cpu_vsrl(xth, xA(ctx->opcode));
1624            tcg_gen_shli_i64(xth, xth, 32);
1625            get_cpu_vsrh(t0, xB(ctx->opcode));
1626            tcg_gen_shri_i64(t0, t0, 32);
1627            tcg_gen_or_i64(xth, xth, t0);
1628            get_cpu_vsrh(xtl, xB(ctx->opcode));
1629            tcg_gen_shli_i64(xtl, xtl, 32);
1630            get_cpu_vsrl(t0, xB(ctx->opcode));
1631            tcg_gen_shri_i64(t0, t0, 32);
1632            tcg_gen_or_i64(xtl, xtl, t0);
1633            tcg_temp_free_i64(t0);
1634            break;
1635        }
1636    }
1637
1638    set_cpu_vsrh(xT(ctx->opcode), xth);
1639    set_cpu_vsrl(xT(ctx->opcode), xtl);
1640
1641    tcg_temp_free_i64(xth);
1642    tcg_temp_free_i64(xtl);
1643}
1644
1645#define VSX_EXTRACT_INSERT(name)                                \
1646static void gen_##name(DisasContext *ctx)                       \
1647{                                                               \
1648    TCGv_ptr xt, xb;                                            \
1649    TCGv_i32 t0;                                                \
1650    TCGv_i64 t1;                                                \
1651    uint8_t uimm = UIMM4(ctx->opcode);                          \
1652                                                                \
1653    if (unlikely(!ctx->vsx_enabled)) {                          \
1654        gen_exception(ctx, POWERPC_EXCP_VSXU);                  \
1655        return;                                                 \
1656    }                                                           \
1657    xt = gen_vsr_ptr(xT(ctx->opcode));                          \
1658    xb = gen_vsr_ptr(xB(ctx->opcode));                          \
1659    t0 = tcg_temp_new_i32();                                    \
1660    t1 = tcg_temp_new_i64();                                    \
1661    /*                                                          \
1662     * uimm > 15 out of bound and for                           \
1663     * uimm > 12 handle as per hardware in helper               \
1664     */                                                         \
1665    if (uimm > 15) {                                            \
1666        tcg_gen_movi_i64(t1, 0);                                \
1667        set_cpu_vsrh(xT(ctx->opcode), t1);                      \
1668        set_cpu_vsrl(xT(ctx->opcode), t1);                      \
1669        return;                                                 \
1670    }                                                           \
1671    tcg_gen_movi_i32(t0, uimm);                                 \
1672    gen_helper_##name(cpu_env, xt, xb, t0);                     \
1673    tcg_temp_free_ptr(xb);                                      \
1674    tcg_temp_free_ptr(xt);                                      \
1675    tcg_temp_free_i32(t0);                                      \
1676    tcg_temp_free_i64(t1);                                      \
1677}
1678
1679VSX_EXTRACT_INSERT(xxextractuw)
1680VSX_EXTRACT_INSERT(xxinsertw)
1681
1682#ifdef TARGET_PPC64
1683static void gen_xsxexpdp(DisasContext *ctx)
1684{
1685    TCGv rt = cpu_gpr[rD(ctx->opcode)];
1686    TCGv_i64 t0;
1687    if (unlikely(!ctx->vsx_enabled)) {
1688        gen_exception(ctx, POWERPC_EXCP_VSXU);
1689        return;
1690    }
1691    t0 = tcg_temp_new_i64();
1692    get_cpu_vsrh(t0, xB(ctx->opcode));
1693    tcg_gen_extract_i64(rt, t0, 52, 11);
1694    tcg_temp_free_i64(t0);
1695}
1696
1697static void gen_xsxexpqp(DisasContext *ctx)
1698{
1699    TCGv_i64 xth;
1700    TCGv_i64 xtl;
1701    TCGv_i64 xbh;
1702
1703    if (unlikely(!ctx->vsx_enabled)) {
1704        gen_exception(ctx, POWERPC_EXCP_VSXU);
1705        return;
1706    }
1707    xth = tcg_temp_new_i64();
1708    xtl = tcg_temp_new_i64();
1709    xbh = tcg_temp_new_i64();
1710    get_cpu_vsrh(xbh, rB(ctx->opcode) + 32);
1711
1712    tcg_gen_extract_i64(xth, xbh, 48, 15);
1713    set_cpu_vsrh(rD(ctx->opcode) + 32, xth);
1714    tcg_gen_movi_i64(xtl, 0);
1715    set_cpu_vsrl(rD(ctx->opcode) + 32, xtl);
1716
1717    tcg_temp_free_i64(xbh);
1718    tcg_temp_free_i64(xth);
1719    tcg_temp_free_i64(xtl);
1720}
1721
1722static void gen_xsiexpdp(DisasContext *ctx)
1723{
1724    TCGv_i64 xth;
1725    TCGv ra = cpu_gpr[rA(ctx->opcode)];
1726    TCGv rb = cpu_gpr[rB(ctx->opcode)];
1727    TCGv_i64 t0;
1728
1729    if (unlikely(!ctx->vsx_enabled)) {
1730        gen_exception(ctx, POWERPC_EXCP_VSXU);
1731        return;
1732    }
1733    t0 = tcg_temp_new_i64();
1734    xth = tcg_temp_new_i64();
1735    tcg_gen_andi_i64(xth, ra, 0x800FFFFFFFFFFFFF);
1736    tcg_gen_andi_i64(t0, rb, 0x7FF);
1737    tcg_gen_shli_i64(t0, t0, 52);
1738    tcg_gen_or_i64(xth, xth, t0);
1739    set_cpu_vsrh(xT(ctx->opcode), xth);
1740    /* dword[1] is undefined */
1741    tcg_temp_free_i64(t0);
1742    tcg_temp_free_i64(xth);
1743}
1744
1745static void gen_xsiexpqp(DisasContext *ctx)
1746{
1747    TCGv_i64 xth;
1748    TCGv_i64 xtl;
1749    TCGv_i64 xah;
1750    TCGv_i64 xal;
1751    TCGv_i64 xbh;
1752    TCGv_i64 t0;
1753
1754    if (unlikely(!ctx->vsx_enabled)) {
1755        gen_exception(ctx, POWERPC_EXCP_VSXU);
1756        return;
1757    }
1758    xth = tcg_temp_new_i64();
1759    xtl = tcg_temp_new_i64();
1760    xah = tcg_temp_new_i64();
1761    xal = tcg_temp_new_i64();
1762    get_cpu_vsrh(xah, rA(ctx->opcode) + 32);
1763    get_cpu_vsrl(xal, rA(ctx->opcode) + 32);
1764    xbh = tcg_temp_new_i64();
1765    get_cpu_vsrh(xbh, rB(ctx->opcode) + 32);
1766    t0 = tcg_temp_new_i64();
1767
1768    tcg_gen_andi_i64(xth, xah, 0x8000FFFFFFFFFFFF);
1769    tcg_gen_andi_i64(t0, xbh, 0x7FFF);
1770    tcg_gen_shli_i64(t0, t0, 48);
1771    tcg_gen_or_i64(xth, xth, t0);
1772    set_cpu_vsrh(rD(ctx->opcode) + 32, xth);
1773    tcg_gen_mov_i64(xtl, xal);
1774    set_cpu_vsrl(rD(ctx->opcode) + 32, xtl);
1775
1776    tcg_temp_free_i64(t0);
1777    tcg_temp_free_i64(xth);
1778    tcg_temp_free_i64(xtl);
1779    tcg_temp_free_i64(xah);
1780    tcg_temp_free_i64(xal);
1781    tcg_temp_free_i64(xbh);
1782}
1783
1784static void gen_xsxsigdp(DisasContext *ctx)
1785{
1786    TCGv rt = cpu_gpr[rD(ctx->opcode)];
1787    TCGv_i64 t0, t1, zr, nan, exp;
1788
1789    if (unlikely(!ctx->vsx_enabled)) {
1790        gen_exception(ctx, POWERPC_EXCP_VSXU);
1791        return;
1792    }
1793    exp = tcg_temp_new_i64();
1794    t0 = tcg_temp_new_i64();
1795    t1 = tcg_temp_new_i64();
1796    zr = tcg_const_i64(0);
1797    nan = tcg_const_i64(2047);
1798
1799    get_cpu_vsrh(t1, xB(ctx->opcode));
1800    tcg_gen_extract_i64(exp, t1, 52, 11);
1801    tcg_gen_movi_i64(t0, 0x0010000000000000);
1802    tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1803    tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1804    get_cpu_vsrh(t1, xB(ctx->opcode));
1805    tcg_gen_deposit_i64(rt, t0, t1, 0, 52);
1806
1807    tcg_temp_free_i64(t0);
1808    tcg_temp_free_i64(t1);
1809    tcg_temp_free_i64(exp);
1810    tcg_temp_free_i64(zr);
1811    tcg_temp_free_i64(nan);
1812}
1813
1814static void gen_xsxsigqp(DisasContext *ctx)
1815{
1816    TCGv_i64 t0, zr, nan, exp;
1817    TCGv_i64 xth;
1818    TCGv_i64 xtl;
1819    TCGv_i64 xbh;
1820    TCGv_i64 xbl;
1821
1822    if (unlikely(!ctx->vsx_enabled)) {
1823        gen_exception(ctx, POWERPC_EXCP_VSXU);
1824        return;
1825    }
1826    xth = tcg_temp_new_i64();
1827    xtl = tcg_temp_new_i64();
1828    xbh = tcg_temp_new_i64();
1829    xbl = tcg_temp_new_i64();
1830    get_cpu_vsrh(xbh, rB(ctx->opcode) + 32);
1831    get_cpu_vsrl(xbl, rB(ctx->opcode) + 32);
1832    exp = tcg_temp_new_i64();
1833    t0 = tcg_temp_new_i64();
1834    zr = tcg_const_i64(0);
1835    nan = tcg_const_i64(32767);
1836
1837    tcg_gen_extract_i64(exp, xbh, 48, 15);
1838    tcg_gen_movi_i64(t0, 0x0001000000000000);
1839    tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1840    tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1841    tcg_gen_deposit_i64(xth, t0, xbh, 0, 48);
1842    set_cpu_vsrh(rD(ctx->opcode) + 32, xth);
1843    tcg_gen_mov_i64(xtl, xbl);
1844    set_cpu_vsrl(rD(ctx->opcode) + 32, xtl);
1845
1846    tcg_temp_free_i64(t0);
1847    tcg_temp_free_i64(exp);
1848    tcg_temp_free_i64(zr);
1849    tcg_temp_free_i64(nan);
1850    tcg_temp_free_i64(xth);
1851    tcg_temp_free_i64(xtl);
1852    tcg_temp_free_i64(xbh);
1853    tcg_temp_free_i64(xbl);
1854}
1855#endif
1856
1857static void gen_xviexpsp(DisasContext *ctx)
1858{
1859    TCGv_i64 xth;
1860    TCGv_i64 xtl;
1861    TCGv_i64 xah;
1862    TCGv_i64 xal;
1863    TCGv_i64 xbh;
1864    TCGv_i64 xbl;
1865    TCGv_i64 t0;
1866
1867    if (unlikely(!ctx->vsx_enabled)) {
1868        gen_exception(ctx, POWERPC_EXCP_VSXU);
1869        return;
1870    }
1871    xth = tcg_temp_new_i64();
1872    xtl = tcg_temp_new_i64();
1873    xah = tcg_temp_new_i64();
1874    xal = tcg_temp_new_i64();
1875    xbh = tcg_temp_new_i64();
1876    xbl = tcg_temp_new_i64();
1877    get_cpu_vsrh(xah, xA(ctx->opcode));
1878    get_cpu_vsrl(xal, xA(ctx->opcode));
1879    get_cpu_vsrh(xbh, xB(ctx->opcode));
1880    get_cpu_vsrl(xbl, xB(ctx->opcode));
1881    t0 = tcg_temp_new_i64();
1882
1883    tcg_gen_andi_i64(xth, xah, 0x807FFFFF807FFFFF);
1884    tcg_gen_andi_i64(t0, xbh, 0xFF000000FF);
1885    tcg_gen_shli_i64(t0, t0, 23);
1886    tcg_gen_or_i64(xth, xth, t0);
1887    set_cpu_vsrh(xT(ctx->opcode), xth);
1888    tcg_gen_andi_i64(xtl, xal, 0x807FFFFF807FFFFF);
1889    tcg_gen_andi_i64(t0, xbl, 0xFF000000FF);
1890    tcg_gen_shli_i64(t0, t0, 23);
1891    tcg_gen_or_i64(xtl, xtl, t0);
1892    set_cpu_vsrl(xT(ctx->opcode), xtl);
1893
1894    tcg_temp_free_i64(t0);
1895    tcg_temp_free_i64(xth);
1896    tcg_temp_free_i64(xtl);
1897    tcg_temp_free_i64(xah);
1898    tcg_temp_free_i64(xal);
1899    tcg_temp_free_i64(xbh);
1900    tcg_temp_free_i64(xbl);
1901}
1902
1903static void gen_xviexpdp(DisasContext *ctx)
1904{
1905    TCGv_i64 xth;
1906    TCGv_i64 xtl;
1907    TCGv_i64 xah;
1908    TCGv_i64 xal;
1909    TCGv_i64 xbh;
1910    TCGv_i64 xbl;
1911
1912    if (unlikely(!ctx->vsx_enabled)) {
1913        gen_exception(ctx, POWERPC_EXCP_VSXU);
1914        return;
1915    }
1916    xth = tcg_temp_new_i64();
1917    xtl = tcg_temp_new_i64();
1918    xah = tcg_temp_new_i64();
1919    xal = tcg_temp_new_i64();
1920    xbh = tcg_temp_new_i64();
1921    xbl = tcg_temp_new_i64();
1922    get_cpu_vsrh(xah, xA(ctx->opcode));
1923    get_cpu_vsrl(xal, xA(ctx->opcode));
1924    get_cpu_vsrh(xbh, xB(ctx->opcode));
1925    get_cpu_vsrl(xbl, xB(ctx->opcode));
1926
1927    tcg_gen_deposit_i64(xth, xah, xbh, 52, 11);
1928    set_cpu_vsrh(xT(ctx->opcode), xth);
1929
1930    tcg_gen_deposit_i64(xtl, xal, xbl, 52, 11);
1931    set_cpu_vsrl(xT(ctx->opcode), xtl);
1932
1933    tcg_temp_free_i64(xth);
1934    tcg_temp_free_i64(xtl);
1935    tcg_temp_free_i64(xah);
1936    tcg_temp_free_i64(xal);
1937    tcg_temp_free_i64(xbh);
1938    tcg_temp_free_i64(xbl);
1939}
1940
1941static void gen_xvxexpsp(DisasContext *ctx)
1942{
1943    TCGv_i64 xth;
1944    TCGv_i64 xtl;
1945    TCGv_i64 xbh;
1946    TCGv_i64 xbl;
1947
1948    if (unlikely(!ctx->vsx_enabled)) {
1949        gen_exception(ctx, POWERPC_EXCP_VSXU);
1950        return;
1951    }
1952    xth = tcg_temp_new_i64();
1953    xtl = tcg_temp_new_i64();
1954    xbh = tcg_temp_new_i64();
1955    xbl = tcg_temp_new_i64();
1956    get_cpu_vsrh(xbh, xB(ctx->opcode));
1957    get_cpu_vsrl(xbl, xB(ctx->opcode));
1958
1959    tcg_gen_shri_i64(xth, xbh, 23);
1960    tcg_gen_andi_i64(xth, xth, 0xFF000000FF);
1961    set_cpu_vsrh(xT(ctx->opcode), xth);
1962    tcg_gen_shri_i64(xtl, xbl, 23);
1963    tcg_gen_andi_i64(xtl, xtl, 0xFF000000FF);
1964    set_cpu_vsrl(xT(ctx->opcode), xtl);
1965
1966    tcg_temp_free_i64(xth);
1967    tcg_temp_free_i64(xtl);
1968    tcg_temp_free_i64(xbh);
1969    tcg_temp_free_i64(xbl);
1970}
1971
1972static void gen_xvxexpdp(DisasContext *ctx)
1973{
1974    TCGv_i64 xth;
1975    TCGv_i64 xtl;
1976    TCGv_i64 xbh;
1977    TCGv_i64 xbl;
1978
1979    if (unlikely(!ctx->vsx_enabled)) {
1980        gen_exception(ctx, POWERPC_EXCP_VSXU);
1981        return;
1982    }
1983    xth = tcg_temp_new_i64();
1984    xtl = tcg_temp_new_i64();
1985    xbh = tcg_temp_new_i64();
1986    xbl = tcg_temp_new_i64();
1987    get_cpu_vsrh(xbh, xB(ctx->opcode));
1988    get_cpu_vsrl(xbl, xB(ctx->opcode));
1989
1990    tcg_gen_extract_i64(xth, xbh, 52, 11);
1991    set_cpu_vsrh(xT(ctx->opcode), xth);
1992    tcg_gen_extract_i64(xtl, xbl, 52, 11);
1993    set_cpu_vsrl(xT(ctx->opcode), xtl);
1994
1995    tcg_temp_free_i64(xth);
1996    tcg_temp_free_i64(xtl);
1997    tcg_temp_free_i64(xbh);
1998    tcg_temp_free_i64(xbl);
1999}
2000
2001GEN_VSX_HELPER_X2(xvxsigsp, 0x00, 0x04, 0, PPC2_ISA300)
2002
2003static void gen_xvxsigdp(DisasContext *ctx)
2004{
2005    TCGv_i64 xth;
2006    TCGv_i64 xtl;
2007    TCGv_i64 xbh;
2008    TCGv_i64 xbl;
2009    TCGv_i64 t0, zr, nan, exp;
2010
2011    if (unlikely(!ctx->vsx_enabled)) {
2012        gen_exception(ctx, POWERPC_EXCP_VSXU);
2013        return;
2014    }
2015    xth = tcg_temp_new_i64();
2016    xtl = tcg_temp_new_i64();
2017    xbh = tcg_temp_new_i64();
2018    xbl = tcg_temp_new_i64();
2019    get_cpu_vsrh(xbh, xB(ctx->opcode));
2020    get_cpu_vsrl(xbl, xB(ctx->opcode));
2021    exp = tcg_temp_new_i64();
2022    t0 = tcg_temp_new_i64();
2023    zr = tcg_const_i64(0);
2024    nan = tcg_const_i64(2047);
2025
2026    tcg_gen_extract_i64(exp, xbh, 52, 11);
2027    tcg_gen_movi_i64(t0, 0x0010000000000000);
2028    tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
2029    tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
2030    tcg_gen_deposit_i64(xth, t0, xbh, 0, 52);
2031    set_cpu_vsrh(xT(ctx->opcode), xth);
2032
2033    tcg_gen_extract_i64(exp, xbl, 52, 11);
2034    tcg_gen_movi_i64(t0, 0x0010000000000000);
2035    tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
2036    tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
2037    tcg_gen_deposit_i64(xtl, t0, xbl, 0, 52);
2038    set_cpu_vsrl(xT(ctx->opcode), xtl);
2039
2040    tcg_temp_free_i64(t0);
2041    tcg_temp_free_i64(exp);
2042    tcg_temp_free_i64(zr);
2043    tcg_temp_free_i64(nan);
2044    tcg_temp_free_i64(xth);
2045    tcg_temp_free_i64(xtl);
2046    tcg_temp_free_i64(xbh);
2047    tcg_temp_free_i64(xbl);
2048}
2049
2050#undef GEN_XX2FORM
2051#undef GEN_XX3FORM
2052#undef GEN_XX2IFORM
2053#undef GEN_XX3_RC_FORM
2054#undef GEN_XX3FORM_DM
2055#undef VSX_LOGICAL
2056