1/***                           VSX extension                               ***/
2
3static inline void get_cpu_vsrh(TCGv_i64 dst, int n)
4{
5    tcg_gen_ld_i64(dst, cpu_env, vsr64_offset(n, true));
6}
7
8static inline void get_cpu_vsrl(TCGv_i64 dst, int n)
9{
10    tcg_gen_ld_i64(dst, cpu_env, vsr64_offset(n, false));
11}
12
13static inline void set_cpu_vsrh(int n, TCGv_i64 src)
14{
15    tcg_gen_st_i64(src, cpu_env, vsr64_offset(n, true));
16}
17
18static inline void set_cpu_vsrl(int n, TCGv_i64 src)
19{
20    tcg_gen_st_i64(src, cpu_env, vsr64_offset(n, false));
21}
22
23static inline TCGv_ptr gen_vsr_ptr(int reg)
24{
25    TCGv_ptr r = tcg_temp_new_ptr();
26    tcg_gen_addi_ptr(r, cpu_env, vsr_full_offset(reg));
27    return r;
28}
29
30#define VSX_LOAD_SCALAR(name, operation)                      \
31static void gen_##name(DisasContext *ctx)                     \
32{                                                             \
33    TCGv EA;                                                  \
34    TCGv_i64 t0;                                              \
35    if (unlikely(!ctx->vsx_enabled)) {                        \
36        gen_exception(ctx, POWERPC_EXCP_VSXU);                \
37        return;                                               \
38    }                                                         \
39    t0 = tcg_temp_new_i64();                                  \
40    gen_set_access_type(ctx, ACCESS_INT);                     \
41    EA = tcg_temp_new();                                      \
42    gen_addr_reg_index(ctx, EA);                              \
43    gen_qemu_##operation(ctx, t0, EA);                        \
44    set_cpu_vsrh(xT(ctx->opcode), t0);                        \
45    /* NOTE: cpu_vsrl is undefined */                         \
46    tcg_temp_free(EA);                                        \
47    tcg_temp_free_i64(t0);                                    \
48}
49
50VSX_LOAD_SCALAR(lxsdx, ld64_i64)
51VSX_LOAD_SCALAR(lxsiwax, ld32s_i64)
52VSX_LOAD_SCALAR(lxsibzx, ld8u_i64)
53VSX_LOAD_SCALAR(lxsihzx, ld16u_i64)
54VSX_LOAD_SCALAR(lxsiwzx, ld32u_i64)
55VSX_LOAD_SCALAR(lxsspx, ld32fs)
56
57static void gen_lxvd2x(DisasContext *ctx)
58{
59    TCGv EA;
60    TCGv_i64 t0;
61    if (unlikely(!ctx->vsx_enabled)) {
62        gen_exception(ctx, POWERPC_EXCP_VSXU);
63        return;
64    }
65    t0 = tcg_temp_new_i64();
66    gen_set_access_type(ctx, ACCESS_INT);
67    EA = tcg_temp_new();
68    gen_addr_reg_index(ctx, EA);
69    gen_qemu_ld64_i64(ctx, t0, EA);
70    set_cpu_vsrh(xT(ctx->opcode), t0);
71    tcg_gen_addi_tl(EA, EA, 8);
72    gen_qemu_ld64_i64(ctx, t0, EA);
73    set_cpu_vsrl(xT(ctx->opcode), t0);
74    tcg_temp_free(EA);
75    tcg_temp_free_i64(t0);
76}
77
78static void gen_lxvw4x(DisasContext *ctx)
79{
80    TCGv EA;
81    TCGv_i64 xth;
82    TCGv_i64 xtl;
83    if (unlikely(!ctx->vsx_enabled)) {
84        gen_exception(ctx, POWERPC_EXCP_VSXU);
85        return;
86    }
87    xth = tcg_temp_new_i64();
88    xtl = tcg_temp_new_i64();
89
90    gen_set_access_type(ctx, ACCESS_INT);
91    EA = tcg_temp_new();
92
93    gen_addr_reg_index(ctx, EA);
94    if (ctx->le_mode) {
95        TCGv_i64 t0 = tcg_temp_new_i64();
96        TCGv_i64 t1 = tcg_temp_new_i64();
97
98        tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEQ);
99        tcg_gen_shri_i64(t1, t0, 32);
100        tcg_gen_deposit_i64(xth, t1, t0, 32, 32);
101        tcg_gen_addi_tl(EA, EA, 8);
102        tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEQ);
103        tcg_gen_shri_i64(t1, t0, 32);
104        tcg_gen_deposit_i64(xtl, t1, t0, 32, 32);
105        tcg_temp_free_i64(t0);
106        tcg_temp_free_i64(t1);
107    } else {
108        tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
109        tcg_gen_addi_tl(EA, EA, 8);
110        tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
111    }
112    set_cpu_vsrh(xT(ctx->opcode), xth);
113    set_cpu_vsrl(xT(ctx->opcode), xtl);
114    tcg_temp_free(EA);
115    tcg_temp_free_i64(xth);
116    tcg_temp_free_i64(xtl);
117}
118
119static void gen_lxvwsx(DisasContext *ctx)
120{
121    TCGv EA;
122    TCGv_i32 data;
123
124    if (xT(ctx->opcode) < 32) {
125        if (unlikely(!ctx->vsx_enabled)) {
126            gen_exception(ctx, POWERPC_EXCP_VSXU);
127            return;
128        }
129    } else {
130        if (unlikely(!ctx->altivec_enabled)) {
131            gen_exception(ctx, POWERPC_EXCP_VPU);
132            return;
133        }
134    }
135
136    gen_set_access_type(ctx, ACCESS_INT);
137    EA = tcg_temp_new();
138
139    gen_addr_reg_index(ctx, EA);
140
141    data = tcg_temp_new_i32();
142    tcg_gen_qemu_ld_i32(data, EA, ctx->mem_idx, DEF_MEMOP(MO_UL));
143    tcg_gen_gvec_dup_i32(MO_UL, vsr_full_offset(xT(ctx->opcode)), 16, 16, data);
144
145    tcg_temp_free(EA);
146    tcg_temp_free_i32(data);
147}
148
149static void gen_lxvdsx(DisasContext *ctx)
150{
151    TCGv EA;
152    TCGv_i64 data;
153
154    if (unlikely(!ctx->vsx_enabled)) {
155        gen_exception(ctx, POWERPC_EXCP_VSXU);
156        return;
157    }
158
159    gen_set_access_type(ctx, ACCESS_INT);
160    EA = tcg_temp_new();
161
162    gen_addr_reg_index(ctx, EA);
163
164    data = tcg_temp_new_i64();
165    tcg_gen_qemu_ld_i64(data, EA, ctx->mem_idx, DEF_MEMOP(MO_Q));
166    tcg_gen_gvec_dup_i64(MO_Q, vsr_full_offset(xT(ctx->opcode)), 16, 16, data);
167
168    tcg_temp_free(EA);
169    tcg_temp_free_i64(data);
170}
171
172static void gen_bswap16x8(TCGv_i64 outh, TCGv_i64 outl,
173                          TCGv_i64 inh, TCGv_i64 inl)
174{
175    TCGv_i64 mask = tcg_const_i64(0x00FF00FF00FF00FF);
176    TCGv_i64 t0 = tcg_temp_new_i64();
177    TCGv_i64 t1 = tcg_temp_new_i64();
178
179    /* outh = ((inh & mask) << 8) | ((inh >> 8) & mask) */
180    tcg_gen_and_i64(t0, inh, mask);
181    tcg_gen_shli_i64(t0, t0, 8);
182    tcg_gen_shri_i64(t1, inh, 8);
183    tcg_gen_and_i64(t1, t1, mask);
184    tcg_gen_or_i64(outh, t0, t1);
185
186    /* outl = ((inl & mask) << 8) | ((inl >> 8) & mask) */
187    tcg_gen_and_i64(t0, inl, mask);
188    tcg_gen_shli_i64(t0, t0, 8);
189    tcg_gen_shri_i64(t1, inl, 8);
190    tcg_gen_and_i64(t1, t1, mask);
191    tcg_gen_or_i64(outl, t0, t1);
192
193    tcg_temp_free_i64(t0);
194    tcg_temp_free_i64(t1);
195    tcg_temp_free_i64(mask);
196}
197
198static void gen_bswap32x4(TCGv_i64 outh, TCGv_i64 outl,
199                          TCGv_i64 inh, TCGv_i64 inl)
200{
201    TCGv_i64 hi = tcg_temp_new_i64();
202    TCGv_i64 lo = tcg_temp_new_i64();
203
204    tcg_gen_bswap64_i64(hi, inh);
205    tcg_gen_bswap64_i64(lo, inl);
206    tcg_gen_shri_i64(outh, hi, 32);
207    tcg_gen_deposit_i64(outh, outh, hi, 32, 32);
208    tcg_gen_shri_i64(outl, lo, 32);
209    tcg_gen_deposit_i64(outl, outl, lo, 32, 32);
210
211    tcg_temp_free_i64(hi);
212    tcg_temp_free_i64(lo);
213}
214static void gen_lxvh8x(DisasContext *ctx)
215{
216    TCGv EA;
217    TCGv_i64 xth;
218    TCGv_i64 xtl;
219
220    if (unlikely(!ctx->vsx_enabled)) {
221        gen_exception(ctx, POWERPC_EXCP_VSXU);
222        return;
223    }
224    xth = tcg_temp_new_i64();
225    xtl = tcg_temp_new_i64();
226    gen_set_access_type(ctx, ACCESS_INT);
227
228    EA = tcg_temp_new();
229    gen_addr_reg_index(ctx, EA);
230    tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
231    tcg_gen_addi_tl(EA, EA, 8);
232    tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
233    if (ctx->le_mode) {
234        gen_bswap16x8(xth, xtl, xth, xtl);
235    }
236    set_cpu_vsrh(xT(ctx->opcode), xth);
237    set_cpu_vsrl(xT(ctx->opcode), xtl);
238    tcg_temp_free(EA);
239    tcg_temp_free_i64(xth);
240    tcg_temp_free_i64(xtl);
241}
242
243static void gen_lxvb16x(DisasContext *ctx)
244{
245    TCGv EA;
246    TCGv_i64 xth;
247    TCGv_i64 xtl;
248
249    if (unlikely(!ctx->vsx_enabled)) {
250        gen_exception(ctx, POWERPC_EXCP_VSXU);
251        return;
252    }
253    xth = tcg_temp_new_i64();
254    xtl = tcg_temp_new_i64();
255    gen_set_access_type(ctx, ACCESS_INT);
256    EA = tcg_temp_new();
257    gen_addr_reg_index(ctx, EA);
258    tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
259    tcg_gen_addi_tl(EA, EA, 8);
260    tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
261    set_cpu_vsrh(xT(ctx->opcode), xth);
262    set_cpu_vsrl(xT(ctx->opcode), xtl);
263    tcg_temp_free(EA);
264    tcg_temp_free_i64(xth);
265    tcg_temp_free_i64(xtl);
266}
267
268#define VSX_VECTOR_LOAD(name, op, indexed)                  \
269static void gen_##name(DisasContext *ctx)                   \
270{                                                           \
271    int xt;                                                 \
272    TCGv EA;                                                \
273    TCGv_i64 xth;                                           \
274    TCGv_i64 xtl;                                           \
275                                                            \
276    if (indexed) {                                          \
277        xt = xT(ctx->opcode);                               \
278    } else {                                                \
279        xt = DQxT(ctx->opcode);                             \
280    }                                                       \
281                                                            \
282    if (xt < 32) {                                          \
283        if (unlikely(!ctx->vsx_enabled)) {                  \
284            gen_exception(ctx, POWERPC_EXCP_VSXU);          \
285            return;                                         \
286        }                                                   \
287    } else {                                                \
288        if (unlikely(!ctx->altivec_enabled)) {              \
289            gen_exception(ctx, POWERPC_EXCP_VPU);           \
290            return;                                         \
291        }                                                   \
292    }                                                       \
293    xth = tcg_temp_new_i64();                               \
294    xtl = tcg_temp_new_i64();                               \
295    gen_set_access_type(ctx, ACCESS_INT);                   \
296    EA = tcg_temp_new();                                    \
297    if (indexed) {                                          \
298        gen_addr_reg_index(ctx, EA);                        \
299    } else {                                                \
300        gen_addr_imm_index(ctx, EA, 0x0F);                  \
301    }                                                       \
302    if (ctx->le_mode) {                                     \
303        tcg_gen_qemu_##op(xtl, EA, ctx->mem_idx, MO_LEQ);   \
304        set_cpu_vsrl(xt, xtl);                              \
305        tcg_gen_addi_tl(EA, EA, 8);                         \
306        tcg_gen_qemu_##op(xth, EA, ctx->mem_idx, MO_LEQ);   \
307        set_cpu_vsrh(xt, xth);                              \
308    } else {                                                \
309        tcg_gen_qemu_##op(xth, EA, ctx->mem_idx, MO_BEQ);   \
310        set_cpu_vsrh(xt, xth);                              \
311        tcg_gen_addi_tl(EA, EA, 8);                         \
312        tcg_gen_qemu_##op(xtl, EA, ctx->mem_idx, MO_BEQ);   \
313        set_cpu_vsrl(xt, xtl);                              \
314    }                                                       \
315    tcg_temp_free(EA);                                      \
316    tcg_temp_free_i64(xth);                                 \
317    tcg_temp_free_i64(xtl);                                 \
318}
319
320VSX_VECTOR_LOAD(lxv, ld_i64, 0)
321VSX_VECTOR_LOAD(lxvx, ld_i64, 1)
322
323#define VSX_VECTOR_STORE(name, op, indexed)                 \
324static void gen_##name(DisasContext *ctx)                   \
325{                                                           \
326    int xt;                                                 \
327    TCGv EA;                                                \
328    TCGv_i64 xth;                                           \
329    TCGv_i64 xtl;                                           \
330                                                            \
331    if (indexed) {                                          \
332        xt = xT(ctx->opcode);                               \
333    } else {                                                \
334        xt = DQxT(ctx->opcode);                             \
335    }                                                       \
336                                                            \
337    if (xt < 32) {                                          \
338        if (unlikely(!ctx->vsx_enabled)) {                  \
339            gen_exception(ctx, POWERPC_EXCP_VSXU);          \
340            return;                                         \
341        }                                                   \
342    } else {                                                \
343        if (unlikely(!ctx->altivec_enabled)) {              \
344            gen_exception(ctx, POWERPC_EXCP_VPU);           \
345            return;                                         \
346        }                                                   \
347    }                                                       \
348    xth = tcg_temp_new_i64();                               \
349    xtl = tcg_temp_new_i64();                               \
350    get_cpu_vsrh(xth, xt);                                  \
351    get_cpu_vsrl(xtl, xt);                                  \
352    gen_set_access_type(ctx, ACCESS_INT);                   \
353    EA = tcg_temp_new();                                    \
354    if (indexed) {                                          \
355        gen_addr_reg_index(ctx, EA);                        \
356    } else {                                                \
357        gen_addr_imm_index(ctx, EA, 0x0F);                  \
358    }                                                       \
359    if (ctx->le_mode) {                                     \
360        tcg_gen_qemu_##op(xtl, EA, ctx->mem_idx, MO_LEQ);   \
361        tcg_gen_addi_tl(EA, EA, 8);                         \
362        tcg_gen_qemu_##op(xth, EA, ctx->mem_idx, MO_LEQ);   \
363    } else {                                                \
364        tcg_gen_qemu_##op(xth, EA, ctx->mem_idx, MO_BEQ);   \
365        tcg_gen_addi_tl(EA, EA, 8);                         \
366        tcg_gen_qemu_##op(xtl, EA, ctx->mem_idx, MO_BEQ);   \
367    }                                                       \
368    tcg_temp_free(EA);                                      \
369    tcg_temp_free_i64(xth);                                 \
370    tcg_temp_free_i64(xtl);                                 \
371}
372
373VSX_VECTOR_STORE(stxv, st_i64, 0)
374VSX_VECTOR_STORE(stxvx, st_i64, 1)
375
376#ifdef TARGET_PPC64
377#define VSX_VECTOR_LOAD_STORE_LENGTH(name)                         \
378static void gen_##name(DisasContext *ctx)                          \
379{                                                                  \
380    TCGv EA;                                                       \
381    TCGv_ptr xt;                                                   \
382                                                                   \
383    if (xT(ctx->opcode) < 32) {                                    \
384        if (unlikely(!ctx->vsx_enabled)) {                         \
385            gen_exception(ctx, POWERPC_EXCP_VSXU);                 \
386            return;                                                \
387        }                                                          \
388    } else {                                                       \
389        if (unlikely(!ctx->altivec_enabled)) {                     \
390            gen_exception(ctx, POWERPC_EXCP_VPU);                  \
391            return;                                                \
392        }                                                          \
393    }                                                              \
394    EA = tcg_temp_new();                                           \
395    xt = gen_vsr_ptr(xT(ctx->opcode));                             \
396    gen_set_access_type(ctx, ACCESS_INT);                          \
397    gen_addr_register(ctx, EA);                                    \
398    gen_helper_##name(cpu_env, EA, xt, cpu_gpr[rB(ctx->opcode)]);  \
399    tcg_temp_free(EA);                                             \
400    tcg_temp_free_ptr(xt);                                         \
401}
402
403VSX_VECTOR_LOAD_STORE_LENGTH(lxvl)
404VSX_VECTOR_LOAD_STORE_LENGTH(lxvll)
405VSX_VECTOR_LOAD_STORE_LENGTH(stxvl)
406VSX_VECTOR_LOAD_STORE_LENGTH(stxvll)
407#endif
408
409#define VSX_LOAD_SCALAR_DS(name, operation)                       \
410static void gen_##name(DisasContext *ctx)                         \
411{                                                                 \
412    TCGv EA;                                                      \
413    TCGv_i64 xth;                                                 \
414                                                                  \
415    if (unlikely(!ctx->altivec_enabled)) {                        \
416        gen_exception(ctx, POWERPC_EXCP_VPU);                     \
417        return;                                                   \
418    }                                                             \
419    xth = tcg_temp_new_i64();                                     \
420    gen_set_access_type(ctx, ACCESS_INT);                         \
421    EA = tcg_temp_new();                                          \
422    gen_addr_imm_index(ctx, EA, 0x03);                            \
423    gen_qemu_##operation(ctx, xth, EA);                           \
424    set_cpu_vsrh(rD(ctx->opcode) + 32, xth);                      \
425    /* NOTE: cpu_vsrl is undefined */                             \
426    tcg_temp_free(EA);                                            \
427    tcg_temp_free_i64(xth);                                       \
428}
429
430VSX_LOAD_SCALAR_DS(lxsd, ld64_i64)
431VSX_LOAD_SCALAR_DS(lxssp, ld32fs)
432
433#define VSX_STORE_SCALAR(name, operation)                     \
434static void gen_##name(DisasContext *ctx)                     \
435{                                                             \
436    TCGv EA;                                                  \
437    TCGv_i64 t0;                                              \
438    if (unlikely(!ctx->vsx_enabled)) {                        \
439        gen_exception(ctx, POWERPC_EXCP_VSXU);                \
440        return;                                               \
441    }                                                         \
442    t0 = tcg_temp_new_i64();                                  \
443    gen_set_access_type(ctx, ACCESS_INT);                     \
444    EA = tcg_temp_new();                                      \
445    gen_addr_reg_index(ctx, EA);                              \
446    get_cpu_vsrh(t0, xS(ctx->opcode));                        \
447    gen_qemu_##operation(ctx, t0, EA);                        \
448    tcg_temp_free(EA);                                        \
449    tcg_temp_free_i64(t0);                                    \
450}
451
452VSX_STORE_SCALAR(stxsdx, st64_i64)
453
454VSX_STORE_SCALAR(stxsibx, st8_i64)
455VSX_STORE_SCALAR(stxsihx, st16_i64)
456VSX_STORE_SCALAR(stxsiwx, st32_i64)
457VSX_STORE_SCALAR(stxsspx, st32fs)
458
459static void gen_stxvd2x(DisasContext *ctx)
460{
461    TCGv EA;
462    TCGv_i64 t0;
463    if (unlikely(!ctx->vsx_enabled)) {
464        gen_exception(ctx, POWERPC_EXCP_VSXU);
465        return;
466    }
467    t0 = tcg_temp_new_i64();
468    gen_set_access_type(ctx, ACCESS_INT);
469    EA = tcg_temp_new();
470    gen_addr_reg_index(ctx, EA);
471    get_cpu_vsrh(t0, xS(ctx->opcode));
472    gen_qemu_st64_i64(ctx, t0, EA);
473    tcg_gen_addi_tl(EA, EA, 8);
474    get_cpu_vsrl(t0, xS(ctx->opcode));
475    gen_qemu_st64_i64(ctx, t0, EA);
476    tcg_temp_free(EA);
477    tcg_temp_free_i64(t0);
478}
479
480static void gen_stxvw4x(DisasContext *ctx)
481{
482    TCGv EA;
483    TCGv_i64 xsh;
484    TCGv_i64 xsl;
485
486    if (unlikely(!ctx->vsx_enabled)) {
487        gen_exception(ctx, POWERPC_EXCP_VSXU);
488        return;
489    }
490    xsh = tcg_temp_new_i64();
491    xsl = tcg_temp_new_i64();
492    get_cpu_vsrh(xsh, xS(ctx->opcode));
493    get_cpu_vsrl(xsl, xS(ctx->opcode));
494    gen_set_access_type(ctx, ACCESS_INT);
495    EA = tcg_temp_new();
496    gen_addr_reg_index(ctx, EA);
497    if (ctx->le_mode) {
498        TCGv_i64 t0 = tcg_temp_new_i64();
499        TCGv_i64 t1 = tcg_temp_new_i64();
500
501        tcg_gen_shri_i64(t0, xsh, 32);
502        tcg_gen_deposit_i64(t1, t0, xsh, 32, 32);
503        tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEQ);
504        tcg_gen_addi_tl(EA, EA, 8);
505        tcg_gen_shri_i64(t0, xsl, 32);
506        tcg_gen_deposit_i64(t1, t0, xsl, 32, 32);
507        tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEQ);
508        tcg_temp_free_i64(t0);
509        tcg_temp_free_i64(t1);
510    } else {
511        tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
512        tcg_gen_addi_tl(EA, EA, 8);
513        tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
514    }
515    tcg_temp_free(EA);
516    tcg_temp_free_i64(xsh);
517    tcg_temp_free_i64(xsl);
518}
519
520static void gen_stxvh8x(DisasContext *ctx)
521{
522    TCGv EA;
523    TCGv_i64 xsh;
524    TCGv_i64 xsl;
525
526    if (unlikely(!ctx->vsx_enabled)) {
527        gen_exception(ctx, POWERPC_EXCP_VSXU);
528        return;
529    }
530    xsh = tcg_temp_new_i64();
531    xsl = tcg_temp_new_i64();
532    get_cpu_vsrh(xsh, xS(ctx->opcode));
533    get_cpu_vsrl(xsl, xS(ctx->opcode));
534    gen_set_access_type(ctx, ACCESS_INT);
535    EA = tcg_temp_new();
536    gen_addr_reg_index(ctx, EA);
537    if (ctx->le_mode) {
538        TCGv_i64 outh = tcg_temp_new_i64();
539        TCGv_i64 outl = tcg_temp_new_i64();
540
541        gen_bswap16x8(outh, outl, xsh, xsl);
542        tcg_gen_qemu_st_i64(outh, EA, ctx->mem_idx, MO_BEQ);
543        tcg_gen_addi_tl(EA, EA, 8);
544        tcg_gen_qemu_st_i64(outl, EA, ctx->mem_idx, MO_BEQ);
545        tcg_temp_free_i64(outh);
546        tcg_temp_free_i64(outl);
547    } else {
548        tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
549        tcg_gen_addi_tl(EA, EA, 8);
550        tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
551    }
552    tcg_temp_free(EA);
553    tcg_temp_free_i64(xsh);
554    tcg_temp_free_i64(xsl);
555}
556
557static void gen_stxvb16x(DisasContext *ctx)
558{
559    TCGv EA;
560    TCGv_i64 xsh;
561    TCGv_i64 xsl;
562
563    if (unlikely(!ctx->vsx_enabled)) {
564        gen_exception(ctx, POWERPC_EXCP_VSXU);
565        return;
566    }
567    xsh = tcg_temp_new_i64();
568    xsl = tcg_temp_new_i64();
569    get_cpu_vsrh(xsh, xS(ctx->opcode));
570    get_cpu_vsrl(xsl, xS(ctx->opcode));
571    gen_set_access_type(ctx, ACCESS_INT);
572    EA = tcg_temp_new();
573    gen_addr_reg_index(ctx, EA);
574    tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
575    tcg_gen_addi_tl(EA, EA, 8);
576    tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
577    tcg_temp_free(EA);
578    tcg_temp_free_i64(xsh);
579    tcg_temp_free_i64(xsl);
580}
581
582#define VSX_STORE_SCALAR_DS(name, operation)                      \
583static void gen_##name(DisasContext *ctx)                         \
584{                                                                 \
585    TCGv EA;                                                      \
586    TCGv_i64 xth;                                                 \
587                                                                  \
588    if (unlikely(!ctx->altivec_enabled)) {                        \
589        gen_exception(ctx, POWERPC_EXCP_VPU);                     \
590        return;                                                   \
591    }                                                             \
592    xth = tcg_temp_new_i64();                                     \
593    get_cpu_vsrh(xth, rD(ctx->opcode) + 32);                      \
594    gen_set_access_type(ctx, ACCESS_INT);                         \
595    EA = tcg_temp_new();                                          \
596    gen_addr_imm_index(ctx, EA, 0x03);                            \
597    gen_qemu_##operation(ctx, xth, EA);                           \
598    /* NOTE: cpu_vsrl is undefined */                             \
599    tcg_temp_free(EA);                                            \
600    tcg_temp_free_i64(xth);                                       \
601}
602
603VSX_STORE_SCALAR_DS(stxsd, st64_i64)
604VSX_STORE_SCALAR_DS(stxssp, st32fs)
605
606static void gen_mfvsrwz(DisasContext *ctx)
607{
608    if (xS(ctx->opcode) < 32) {
609        if (unlikely(!ctx->fpu_enabled)) {
610            gen_exception(ctx, POWERPC_EXCP_FPU);
611            return;
612        }
613    } else {
614        if (unlikely(!ctx->altivec_enabled)) {
615            gen_exception(ctx, POWERPC_EXCP_VPU);
616            return;
617        }
618    }
619    TCGv_i64 tmp = tcg_temp_new_i64();
620    TCGv_i64 xsh = tcg_temp_new_i64();
621    get_cpu_vsrh(xsh, xS(ctx->opcode));
622    tcg_gen_ext32u_i64(tmp, xsh);
623    tcg_gen_trunc_i64_tl(cpu_gpr[rA(ctx->opcode)], tmp);
624    tcg_temp_free_i64(tmp);
625    tcg_temp_free_i64(xsh);
626}
627
628static void gen_mtvsrwa(DisasContext *ctx)
629{
630    if (xS(ctx->opcode) < 32) {
631        if (unlikely(!ctx->fpu_enabled)) {
632            gen_exception(ctx, POWERPC_EXCP_FPU);
633            return;
634        }
635    } else {
636        if (unlikely(!ctx->altivec_enabled)) {
637            gen_exception(ctx, POWERPC_EXCP_VPU);
638            return;
639        }
640    }
641    TCGv_i64 tmp = tcg_temp_new_i64();
642    TCGv_i64 xsh = tcg_temp_new_i64();
643    tcg_gen_extu_tl_i64(tmp, cpu_gpr[rA(ctx->opcode)]);
644    tcg_gen_ext32s_i64(xsh, tmp);
645    set_cpu_vsrh(xT(ctx->opcode), xsh);
646    tcg_temp_free_i64(tmp);
647    tcg_temp_free_i64(xsh);
648}
649
650static void gen_mtvsrwz(DisasContext *ctx)
651{
652    if (xS(ctx->opcode) < 32) {
653        if (unlikely(!ctx->fpu_enabled)) {
654            gen_exception(ctx, POWERPC_EXCP_FPU);
655            return;
656        }
657    } else {
658        if (unlikely(!ctx->altivec_enabled)) {
659            gen_exception(ctx, POWERPC_EXCP_VPU);
660            return;
661        }
662    }
663    TCGv_i64 tmp = tcg_temp_new_i64();
664    TCGv_i64 xsh = tcg_temp_new_i64();
665    tcg_gen_extu_tl_i64(tmp, cpu_gpr[rA(ctx->opcode)]);
666    tcg_gen_ext32u_i64(xsh, tmp);
667    set_cpu_vsrh(xT(ctx->opcode), xsh);
668    tcg_temp_free_i64(tmp);
669    tcg_temp_free_i64(xsh);
670}
671
672#if defined(TARGET_PPC64)
673static void gen_mfvsrd(DisasContext *ctx)
674{
675    TCGv_i64 t0;
676    if (xS(ctx->opcode) < 32) {
677        if (unlikely(!ctx->fpu_enabled)) {
678            gen_exception(ctx, POWERPC_EXCP_FPU);
679            return;
680        }
681    } else {
682        if (unlikely(!ctx->altivec_enabled)) {
683            gen_exception(ctx, POWERPC_EXCP_VPU);
684            return;
685        }
686    }
687    t0 = tcg_temp_new_i64();
688    get_cpu_vsrh(t0, xS(ctx->opcode));
689    tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], t0);
690    tcg_temp_free_i64(t0);
691}
692
693static void gen_mtvsrd(DisasContext *ctx)
694{
695    TCGv_i64 t0;
696    if (xS(ctx->opcode) < 32) {
697        if (unlikely(!ctx->fpu_enabled)) {
698            gen_exception(ctx, POWERPC_EXCP_FPU);
699            return;
700        }
701    } else {
702        if (unlikely(!ctx->altivec_enabled)) {
703            gen_exception(ctx, POWERPC_EXCP_VPU);
704            return;
705        }
706    }
707    t0 = tcg_temp_new_i64();
708    tcg_gen_mov_i64(t0, cpu_gpr[rA(ctx->opcode)]);
709    set_cpu_vsrh(xT(ctx->opcode), t0);
710    tcg_temp_free_i64(t0);
711}
712
713static void gen_mfvsrld(DisasContext *ctx)
714{
715    TCGv_i64 t0;
716    if (xS(ctx->opcode) < 32) {
717        if (unlikely(!ctx->vsx_enabled)) {
718            gen_exception(ctx, POWERPC_EXCP_VSXU);
719            return;
720        }
721    } else {
722        if (unlikely(!ctx->altivec_enabled)) {
723            gen_exception(ctx, POWERPC_EXCP_VPU);
724            return;
725        }
726    }
727    t0 = tcg_temp_new_i64();
728    get_cpu_vsrl(t0, xS(ctx->opcode));
729    tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], t0);
730    tcg_temp_free_i64(t0);
731}
732
733static void gen_mtvsrdd(DisasContext *ctx)
734{
735    TCGv_i64 t0;
736    if (xT(ctx->opcode) < 32) {
737        if (unlikely(!ctx->vsx_enabled)) {
738            gen_exception(ctx, POWERPC_EXCP_VSXU);
739            return;
740        }
741    } else {
742        if (unlikely(!ctx->altivec_enabled)) {
743            gen_exception(ctx, POWERPC_EXCP_VPU);
744            return;
745        }
746    }
747
748    t0 = tcg_temp_new_i64();
749    if (!rA(ctx->opcode)) {
750        tcg_gen_movi_i64(t0, 0);
751    } else {
752        tcg_gen_mov_i64(t0, cpu_gpr[rA(ctx->opcode)]);
753    }
754    set_cpu_vsrh(xT(ctx->opcode), t0);
755
756    tcg_gen_mov_i64(t0, cpu_gpr[rB(ctx->opcode)]);
757    set_cpu_vsrl(xT(ctx->opcode), t0);
758    tcg_temp_free_i64(t0);
759}
760
761static void gen_mtvsrws(DisasContext *ctx)
762{
763    TCGv_i64 t0;
764    if (xT(ctx->opcode) < 32) {
765        if (unlikely(!ctx->vsx_enabled)) {
766            gen_exception(ctx, POWERPC_EXCP_VSXU);
767            return;
768        }
769    } else {
770        if (unlikely(!ctx->altivec_enabled)) {
771            gen_exception(ctx, POWERPC_EXCP_VPU);
772            return;
773        }
774    }
775
776    t0 = tcg_temp_new_i64();
777    tcg_gen_deposit_i64(t0, cpu_gpr[rA(ctx->opcode)],
778                        cpu_gpr[rA(ctx->opcode)], 32, 32);
779    set_cpu_vsrl(xT(ctx->opcode), t0);
780    set_cpu_vsrh(xT(ctx->opcode), t0);
781    tcg_temp_free_i64(t0);
782}
783
784#endif
785
786static void gen_xxpermdi(DisasContext *ctx)
787{
788    TCGv_i64 xh, xl;
789
790    if (unlikely(!ctx->vsx_enabled)) {
791        gen_exception(ctx, POWERPC_EXCP_VSXU);
792        return;
793    }
794
795    xh = tcg_temp_new_i64();
796    xl = tcg_temp_new_i64();
797
798    if (unlikely((xT(ctx->opcode) == xA(ctx->opcode)) ||
799                 (xT(ctx->opcode) == xB(ctx->opcode)))) {
800        if ((DM(ctx->opcode) & 2) == 0) {
801            get_cpu_vsrh(xh, xA(ctx->opcode));
802        } else {
803            get_cpu_vsrl(xh, xA(ctx->opcode));
804        }
805        if ((DM(ctx->opcode) & 1) == 0) {
806            get_cpu_vsrh(xl, xB(ctx->opcode));
807        } else {
808            get_cpu_vsrl(xl, xB(ctx->opcode));
809        }
810
811        set_cpu_vsrh(xT(ctx->opcode), xh);
812        set_cpu_vsrl(xT(ctx->opcode), xl);
813    } else {
814        if ((DM(ctx->opcode) & 2) == 0) {
815            get_cpu_vsrh(xh, xA(ctx->opcode));
816            set_cpu_vsrh(xT(ctx->opcode), xh);
817        } else {
818            get_cpu_vsrl(xh, xA(ctx->opcode));
819            set_cpu_vsrh(xT(ctx->opcode), xh);
820        }
821        if ((DM(ctx->opcode) & 1) == 0) {
822            get_cpu_vsrh(xl, xB(ctx->opcode));
823            set_cpu_vsrl(xT(ctx->opcode), xl);
824        } else {
825            get_cpu_vsrl(xl, xB(ctx->opcode));
826            set_cpu_vsrl(xT(ctx->opcode), xl);
827        }
828    }
829    tcg_temp_free_i64(xh);
830    tcg_temp_free_i64(xl);
831}
832
833#define OP_ABS 1
834#define OP_NABS 2
835#define OP_NEG 3
836#define OP_CPSGN 4
837#define SGN_MASK_DP  0x8000000000000000ull
838#define SGN_MASK_SP 0x8000000080000000ull
839
840#define VSX_SCALAR_MOVE(name, op, sgn_mask)                       \
841static void glue(gen_, name)(DisasContext *ctx)                   \
842    {                                                             \
843        TCGv_i64 xb, sgm;                                         \
844        if (unlikely(!ctx->vsx_enabled)) {                        \
845            gen_exception(ctx, POWERPC_EXCP_VSXU);                \
846            return;                                               \
847        }                                                         \
848        xb = tcg_temp_new_i64();                                  \
849        sgm = tcg_temp_new_i64();                                 \
850        get_cpu_vsrh(xb, xB(ctx->opcode));                        \
851        tcg_gen_movi_i64(sgm, sgn_mask);                          \
852        switch (op) {                                             \
853            case OP_ABS: {                                        \
854                tcg_gen_andc_i64(xb, xb, sgm);                    \
855                break;                                            \
856            }                                                     \
857            case OP_NABS: {                                       \
858                tcg_gen_or_i64(xb, xb, sgm);                      \
859                break;                                            \
860            }                                                     \
861            case OP_NEG: {                                        \
862                tcg_gen_xor_i64(xb, xb, sgm);                     \
863                break;                                            \
864            }                                                     \
865            case OP_CPSGN: {                                      \
866                TCGv_i64 xa = tcg_temp_new_i64();                 \
867                get_cpu_vsrh(xa, xA(ctx->opcode));                \
868                tcg_gen_and_i64(xa, xa, sgm);                     \
869                tcg_gen_andc_i64(xb, xb, sgm);                    \
870                tcg_gen_or_i64(xb, xb, xa);                       \
871                tcg_temp_free_i64(xa);                            \
872                break;                                            \
873            }                                                     \
874        }                                                         \
875        set_cpu_vsrh(xT(ctx->opcode), xb);                        \
876        tcg_temp_free_i64(xb);                                    \
877        tcg_temp_free_i64(sgm);                                   \
878    }
879
880VSX_SCALAR_MOVE(xsabsdp, OP_ABS, SGN_MASK_DP)
881VSX_SCALAR_MOVE(xsnabsdp, OP_NABS, SGN_MASK_DP)
882VSX_SCALAR_MOVE(xsnegdp, OP_NEG, SGN_MASK_DP)
883VSX_SCALAR_MOVE(xscpsgndp, OP_CPSGN, SGN_MASK_DP)
884
885#define VSX_SCALAR_MOVE_QP(name, op, sgn_mask)                    \
886static void glue(gen_, name)(DisasContext *ctx)                   \
887{                                                                 \
888    int xa;                                                       \
889    int xt = rD(ctx->opcode) + 32;                                \
890    int xb = rB(ctx->opcode) + 32;                                \
891    TCGv_i64 xah, xbh, xbl, sgm, tmp;                             \
892                                                                  \
893    if (unlikely(!ctx->vsx_enabled)) {                            \
894        gen_exception(ctx, POWERPC_EXCP_VSXU);                    \
895        return;                                                   \
896    }                                                             \
897    xbh = tcg_temp_new_i64();                                     \
898    xbl = tcg_temp_new_i64();                                     \
899    sgm = tcg_temp_new_i64();                                     \
900    tmp = tcg_temp_new_i64();                                     \
901    get_cpu_vsrh(xbh, xb);                                        \
902    get_cpu_vsrl(xbl, xb);                                        \
903    tcg_gen_movi_i64(sgm, sgn_mask);                              \
904    switch (op) {                                                 \
905    case OP_ABS:                                                  \
906        tcg_gen_andc_i64(xbh, xbh, sgm);                          \
907        break;                                                    \
908    case OP_NABS:                                                 \
909        tcg_gen_or_i64(xbh, xbh, sgm);                            \
910        break;                                                    \
911    case OP_NEG:                                                  \
912        tcg_gen_xor_i64(xbh, xbh, sgm);                           \
913        break;                                                    \
914    case OP_CPSGN:                                                \
915        xah = tcg_temp_new_i64();                                 \
916        xa = rA(ctx->opcode) + 32;                                \
917        get_cpu_vsrh(tmp, xa);                                    \
918        tcg_gen_and_i64(xah, tmp, sgm);                           \
919        tcg_gen_andc_i64(xbh, xbh, sgm);                          \
920        tcg_gen_or_i64(xbh, xbh, xah);                            \
921        tcg_temp_free_i64(xah);                                   \
922        break;                                                    \
923    }                                                             \
924    set_cpu_vsrh(xt, xbh);                                        \
925    set_cpu_vsrl(xt, xbl);                                        \
926    tcg_temp_free_i64(xbl);                                       \
927    tcg_temp_free_i64(xbh);                                       \
928    tcg_temp_free_i64(sgm);                                       \
929    tcg_temp_free_i64(tmp);                                       \
930}
931
932VSX_SCALAR_MOVE_QP(xsabsqp, OP_ABS, SGN_MASK_DP)
933VSX_SCALAR_MOVE_QP(xsnabsqp, OP_NABS, SGN_MASK_DP)
934VSX_SCALAR_MOVE_QP(xsnegqp, OP_NEG, SGN_MASK_DP)
935VSX_SCALAR_MOVE_QP(xscpsgnqp, OP_CPSGN, SGN_MASK_DP)
936
937#define VSX_VECTOR_MOVE(name, op, sgn_mask)                      \
938static void glue(gen_, name)(DisasContext *ctx)                  \
939    {                                                            \
940        TCGv_i64 xbh, xbl, sgm;                                  \
941        if (unlikely(!ctx->vsx_enabled)) {                       \
942            gen_exception(ctx, POWERPC_EXCP_VSXU);               \
943            return;                                              \
944        }                                                        \
945        xbh = tcg_temp_new_i64();                                \
946        xbl = tcg_temp_new_i64();                                \
947        sgm = tcg_temp_new_i64();                                \
948        get_cpu_vsrh(xbh, xB(ctx->opcode));                      \
949        get_cpu_vsrl(xbl, xB(ctx->opcode));                      \
950        tcg_gen_movi_i64(sgm, sgn_mask);                         \
951        switch (op) {                                            \
952            case OP_ABS: {                                       \
953                tcg_gen_andc_i64(xbh, xbh, sgm);                 \
954                tcg_gen_andc_i64(xbl, xbl, sgm);                 \
955                break;                                           \
956            }                                                    \
957            case OP_NABS: {                                      \
958                tcg_gen_or_i64(xbh, xbh, sgm);                   \
959                tcg_gen_or_i64(xbl, xbl, sgm);                   \
960                break;                                           \
961            }                                                    \
962            case OP_NEG: {                                       \
963                tcg_gen_xor_i64(xbh, xbh, sgm);                  \
964                tcg_gen_xor_i64(xbl, xbl, sgm);                  \
965                break;                                           \
966            }                                                    \
967            case OP_CPSGN: {                                     \
968                TCGv_i64 xah = tcg_temp_new_i64();               \
969                TCGv_i64 xal = tcg_temp_new_i64();               \
970                get_cpu_vsrh(xah, xA(ctx->opcode));              \
971                get_cpu_vsrl(xal, xA(ctx->opcode));              \
972                tcg_gen_and_i64(xah, xah, sgm);                  \
973                tcg_gen_and_i64(xal, xal, sgm);                  \
974                tcg_gen_andc_i64(xbh, xbh, sgm);                 \
975                tcg_gen_andc_i64(xbl, xbl, sgm);                 \
976                tcg_gen_or_i64(xbh, xbh, xah);                   \
977                tcg_gen_or_i64(xbl, xbl, xal);                   \
978                tcg_temp_free_i64(xah);                          \
979                tcg_temp_free_i64(xal);                          \
980                break;                                           \
981            }                                                    \
982        }                                                        \
983        set_cpu_vsrh(xT(ctx->opcode), xbh);                      \
984        set_cpu_vsrl(xT(ctx->opcode), xbl);                      \
985        tcg_temp_free_i64(xbh);                                  \
986        tcg_temp_free_i64(xbl);                                  \
987        tcg_temp_free_i64(sgm);                                  \
988    }
989
990VSX_VECTOR_MOVE(xvabsdp, OP_ABS, SGN_MASK_DP)
991VSX_VECTOR_MOVE(xvnabsdp, OP_NABS, SGN_MASK_DP)
992VSX_VECTOR_MOVE(xvnegdp, OP_NEG, SGN_MASK_DP)
993VSX_VECTOR_MOVE(xvcpsgndp, OP_CPSGN, SGN_MASK_DP)
994VSX_VECTOR_MOVE(xvabssp, OP_ABS, SGN_MASK_SP)
995VSX_VECTOR_MOVE(xvnabssp, OP_NABS, SGN_MASK_SP)
996VSX_VECTOR_MOVE(xvnegsp, OP_NEG, SGN_MASK_SP)
997VSX_VECTOR_MOVE(xvcpsgnsp, OP_CPSGN, SGN_MASK_SP)
998
999#define VSX_CMP(name, op1, op2, inval, type)                                  \
1000static void gen_##name(DisasContext *ctx)                                     \
1001{                                                                             \
1002    TCGv_i32 ignored;                                                         \
1003    TCGv_ptr xt, xa, xb;                                                      \
1004    if (unlikely(!ctx->vsx_enabled)) {                                        \
1005        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
1006        return;                                                               \
1007    }                                                                         \
1008    xt = gen_vsr_ptr(xT(ctx->opcode));                                        \
1009    xa = gen_vsr_ptr(xA(ctx->opcode));                                        \
1010    xb = gen_vsr_ptr(xB(ctx->opcode));                                        \
1011    if ((ctx->opcode >> (31 - 21)) & 1) {                                     \
1012        gen_helper_##name(cpu_crf[6], cpu_env, xt, xa, xb);                   \
1013    } else {                                                                  \
1014        ignored = tcg_temp_new_i32();                                         \
1015        gen_helper_##name(ignored, cpu_env, xt, xa, xb);                      \
1016        tcg_temp_free_i32(ignored);                                           \
1017    }                                                                         \
1018    gen_helper_float_check_status(cpu_env);                                   \
1019    tcg_temp_free_ptr(xt);                                                    \
1020    tcg_temp_free_ptr(xa);                                                    \
1021    tcg_temp_free_ptr(xb);                                                    \
1022}
1023
1024VSX_CMP(xvcmpeqdp, 0x0C, 0x0C, 0, PPC2_VSX)
1025VSX_CMP(xvcmpgedp, 0x0C, 0x0E, 0, PPC2_VSX)
1026VSX_CMP(xvcmpgtdp, 0x0C, 0x0D, 0, PPC2_VSX)
1027VSX_CMP(xvcmpnedp, 0x0C, 0x0F, 0, PPC2_ISA300)
1028VSX_CMP(xvcmpeqsp, 0x0C, 0x08, 0, PPC2_VSX)
1029VSX_CMP(xvcmpgesp, 0x0C, 0x0A, 0, PPC2_VSX)
1030VSX_CMP(xvcmpgtsp, 0x0C, 0x09, 0, PPC2_VSX)
1031VSX_CMP(xvcmpnesp, 0x0C, 0x0B, 0, PPC2_VSX)
1032
1033static void gen_xscvqpdp(DisasContext *ctx)
1034{
1035    TCGv_i32 opc;
1036    TCGv_ptr xt, xb;
1037    if (unlikely(!ctx->vsx_enabled)) {
1038        gen_exception(ctx, POWERPC_EXCP_VSXU);
1039        return;
1040    }
1041    opc = tcg_const_i32(ctx->opcode);
1042    xt = gen_vsr_ptr(xT(ctx->opcode));
1043    xb = gen_vsr_ptr(xB(ctx->opcode));
1044    gen_helper_xscvqpdp(cpu_env, opc, xt, xb);
1045    tcg_temp_free_i32(opc);
1046    tcg_temp_free_ptr(xt);
1047    tcg_temp_free_ptr(xb);
1048}
1049
1050#define GEN_VSX_HELPER_2(name, op1, op2, inval, type)                         \
1051static void gen_##name(DisasContext *ctx)                                     \
1052{                                                                             \
1053    TCGv_i32 opc;                                                             \
1054    if (unlikely(!ctx->vsx_enabled)) {                                        \
1055        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
1056        return;                                                               \
1057    }                                                                         \
1058    opc = tcg_const_i32(ctx->opcode);                                         \
1059    gen_helper_##name(cpu_env, opc);                                          \
1060    tcg_temp_free_i32(opc);                                                   \
1061}
1062
1063#define GEN_VSX_HELPER_X3(name, op1, op2, inval, type)                        \
1064static void gen_##name(DisasContext *ctx)                                     \
1065{                                                                             \
1066    TCGv_ptr xt, xa, xb;                                                      \
1067    if (unlikely(!ctx->vsx_enabled)) {                                        \
1068        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
1069        return;                                                               \
1070    }                                                                         \
1071    xt = gen_vsr_ptr(xT(ctx->opcode));                                        \
1072    xa = gen_vsr_ptr(xA(ctx->opcode));                                        \
1073    xb = gen_vsr_ptr(xB(ctx->opcode));                                        \
1074    gen_helper_##name(cpu_env, xt, xa, xb);                                   \
1075    tcg_temp_free_ptr(xt);                                                    \
1076    tcg_temp_free_ptr(xa);                                                    \
1077    tcg_temp_free_ptr(xb);                                                    \
1078}
1079
1080#define GEN_VSX_HELPER_X2(name, op1, op2, inval, type)                        \
1081static void gen_##name(DisasContext *ctx)                                     \
1082{                                                                             \
1083    TCGv_ptr xt, xb;                                                          \
1084    if (unlikely(!ctx->vsx_enabled)) {                                        \
1085        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
1086        return;                                                               \
1087    }                                                                         \
1088    xt = gen_vsr_ptr(xT(ctx->opcode));                                        \
1089    xb = gen_vsr_ptr(xB(ctx->opcode));                                        \
1090    gen_helper_##name(cpu_env, xt, xb);                                       \
1091    tcg_temp_free_ptr(xt);                                                    \
1092    tcg_temp_free_ptr(xb);                                                    \
1093}
1094
1095#define GEN_VSX_HELPER_X2_AB(name, op1, op2, inval, type)                     \
1096static void gen_##name(DisasContext *ctx)                                     \
1097{                                                                             \
1098    TCGv_i32 opc;                                                             \
1099    TCGv_ptr xa, xb;                                                          \
1100    if (unlikely(!ctx->vsx_enabled)) {                                        \
1101        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
1102        return;                                                               \
1103    }                                                                         \
1104    opc = tcg_const_i32(ctx->opcode);                                         \
1105    xa = gen_vsr_ptr(xA(ctx->opcode));                                        \
1106    xb = gen_vsr_ptr(xB(ctx->opcode));                                        \
1107    gen_helper_##name(cpu_env, opc, xa, xb);                                  \
1108    tcg_temp_free_i32(opc);                                                   \
1109    tcg_temp_free_ptr(xa);                                                    \
1110    tcg_temp_free_ptr(xb);                                                    \
1111}
1112
1113#define GEN_VSX_HELPER_X1(name, op1, op2, inval, type)                        \
1114static void gen_##name(DisasContext *ctx)                                     \
1115{                                                                             \
1116    TCGv_i32 opc;                                                             \
1117    TCGv_ptr xb;                                                              \
1118    if (unlikely(!ctx->vsx_enabled)) {                                        \
1119        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
1120        return;                                                               \
1121    }                                                                         \
1122    opc = tcg_const_i32(ctx->opcode);                                         \
1123    xb = gen_vsr_ptr(xB(ctx->opcode));                                        \
1124    gen_helper_##name(cpu_env, opc, xb);                                      \
1125    tcg_temp_free_i32(opc);                                                   \
1126    tcg_temp_free_ptr(xb);                                                    \
1127}
1128
1129#define GEN_VSX_HELPER_R3(name, op1, op2, inval, type)                        \
1130static void gen_##name(DisasContext *ctx)                                     \
1131{                                                                             \
1132    TCGv_i32 opc;                                                             \
1133    TCGv_ptr xt, xa, xb;                                                      \
1134    if (unlikely(!ctx->vsx_enabled)) {                                        \
1135        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
1136        return;                                                               \
1137    }                                                                         \
1138    opc = tcg_const_i32(ctx->opcode);                                         \
1139    xt = gen_vsr_ptr(rD(ctx->opcode) + 32);                                   \
1140    xa = gen_vsr_ptr(rA(ctx->opcode) + 32);                                   \
1141    xb = gen_vsr_ptr(rB(ctx->opcode) + 32);                                   \
1142    gen_helper_##name(cpu_env, opc, xt, xa, xb);                              \
1143    tcg_temp_free_i32(opc);                                                   \
1144    tcg_temp_free_ptr(xt);                                                    \
1145    tcg_temp_free_ptr(xa);                                                    \
1146    tcg_temp_free_ptr(xb);                                                    \
1147}
1148
1149#define GEN_VSX_HELPER_R2(name, op1, op2, inval, type)                        \
1150static void gen_##name(DisasContext *ctx)                                     \
1151{                                                                             \
1152    TCGv_i32 opc;                                                             \
1153    TCGv_ptr xt, xb;                                                          \
1154    if (unlikely(!ctx->vsx_enabled)) {                                        \
1155        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
1156        return;                                                               \
1157    }                                                                         \
1158    opc = tcg_const_i32(ctx->opcode);                                         \
1159    xt = gen_vsr_ptr(rD(ctx->opcode) + 32);                                   \
1160    xb = gen_vsr_ptr(rB(ctx->opcode) + 32);                                   \
1161    gen_helper_##name(cpu_env, opc, xt, xb);                                  \
1162    tcg_temp_free_i32(opc);                                                   \
1163    tcg_temp_free_ptr(xt);                                                    \
1164    tcg_temp_free_ptr(xb);                                                    \
1165}
1166
1167#define GEN_VSX_HELPER_R2_AB(name, op1, op2, inval, type)                     \
1168static void gen_##name(DisasContext *ctx)                                     \
1169{                                                                             \
1170    TCGv_i32 opc;                                                             \
1171    TCGv_ptr xa, xb;                                                          \
1172    if (unlikely(!ctx->vsx_enabled)) {                                        \
1173        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
1174        return;                                                               \
1175    }                                                                         \
1176    opc = tcg_const_i32(ctx->opcode);                                         \
1177    xa = gen_vsr_ptr(rA(ctx->opcode) + 32);                                   \
1178    xb = gen_vsr_ptr(rB(ctx->opcode) + 32);                                   \
1179    gen_helper_##name(cpu_env, opc, xa, xb);                                  \
1180    tcg_temp_free_i32(opc);                                                   \
1181    tcg_temp_free_ptr(xa);                                                    \
1182    tcg_temp_free_ptr(xb);                                                    \
1183}
1184
1185#define GEN_VSX_HELPER_XT_XB_ENV(name, op1, op2, inval, type) \
1186static void gen_##name(DisasContext *ctx)                     \
1187{                                                             \
1188    TCGv_i64 t0;                                              \
1189    TCGv_i64 t1;                                              \
1190    if (unlikely(!ctx->vsx_enabled)) {                        \
1191        gen_exception(ctx, POWERPC_EXCP_VSXU);                \
1192        return;                                               \
1193    }                                                         \
1194    t0 = tcg_temp_new_i64();                                  \
1195    t1 = tcg_temp_new_i64();                                  \
1196    get_cpu_vsrh(t0, xB(ctx->opcode));                        \
1197    gen_helper_##name(t1, cpu_env, t0);                       \
1198    set_cpu_vsrh(xT(ctx->opcode), t1);                        \
1199    tcg_temp_free_i64(t0);                                    \
1200    tcg_temp_free_i64(t1);                                    \
1201}
1202
1203GEN_VSX_HELPER_X3(xsadddp, 0x00, 0x04, 0, PPC2_VSX)
1204GEN_VSX_HELPER_R3(xsaddqp, 0x04, 0x00, 0, PPC2_ISA300)
1205GEN_VSX_HELPER_X3(xssubdp, 0x00, 0x05, 0, PPC2_VSX)
1206GEN_VSX_HELPER_X3(xsmuldp, 0x00, 0x06, 0, PPC2_VSX)
1207GEN_VSX_HELPER_R3(xsmulqp, 0x04, 0x01, 0, PPC2_ISA300)
1208GEN_VSX_HELPER_X3(xsdivdp, 0x00, 0x07, 0, PPC2_VSX)
1209GEN_VSX_HELPER_R3(xsdivqp, 0x04, 0x11, 0, PPC2_ISA300)
1210GEN_VSX_HELPER_X2(xsredp, 0x14, 0x05, 0, PPC2_VSX)
1211GEN_VSX_HELPER_X2(xssqrtdp, 0x16, 0x04, 0, PPC2_VSX)
1212GEN_VSX_HELPER_X2(xsrsqrtedp, 0x14, 0x04, 0, PPC2_VSX)
1213GEN_VSX_HELPER_X2_AB(xstdivdp, 0x14, 0x07, 0, PPC2_VSX)
1214GEN_VSX_HELPER_X1(xstsqrtdp, 0x14, 0x06, 0, PPC2_VSX)
1215GEN_VSX_HELPER_X3(xscmpeqdp, 0x0C, 0x00, 0, PPC2_ISA300)
1216GEN_VSX_HELPER_X3(xscmpgtdp, 0x0C, 0x01, 0, PPC2_ISA300)
1217GEN_VSX_HELPER_X3(xscmpgedp, 0x0C, 0x02, 0, PPC2_ISA300)
1218GEN_VSX_HELPER_X3(xscmpnedp, 0x0C, 0x03, 0, PPC2_ISA300)
1219GEN_VSX_HELPER_X2_AB(xscmpexpdp, 0x0C, 0x07, 0, PPC2_ISA300)
1220GEN_VSX_HELPER_R2_AB(xscmpexpqp, 0x04, 0x05, 0, PPC2_ISA300)
1221GEN_VSX_HELPER_X2_AB(xscmpodp, 0x0C, 0x05, 0, PPC2_VSX)
1222GEN_VSX_HELPER_X2_AB(xscmpudp, 0x0C, 0x04, 0, PPC2_VSX)
1223GEN_VSX_HELPER_R2_AB(xscmpoqp, 0x04, 0x04, 0, PPC2_VSX)
1224GEN_VSX_HELPER_R2_AB(xscmpuqp, 0x04, 0x14, 0, PPC2_VSX)
1225GEN_VSX_HELPER_X3(xsmaxdp, 0x00, 0x14, 0, PPC2_VSX)
1226GEN_VSX_HELPER_X3(xsmindp, 0x00, 0x15, 0, PPC2_VSX)
1227GEN_VSX_HELPER_R3(xsmaxcdp, 0x00, 0x10, 0, PPC2_ISA300)
1228GEN_VSX_HELPER_R3(xsmincdp, 0x00, 0x11, 0, PPC2_ISA300)
1229GEN_VSX_HELPER_R3(xsmaxjdp, 0x00, 0x12, 0, PPC2_ISA300)
1230GEN_VSX_HELPER_R3(xsminjdp, 0x00, 0x12, 0, PPC2_ISA300)
1231GEN_VSX_HELPER_X2(xscvdphp, 0x16, 0x15, 0x11, PPC2_ISA300)
1232GEN_VSX_HELPER_X2(xscvdpsp, 0x12, 0x10, 0, PPC2_VSX)
1233GEN_VSX_HELPER_R2(xscvdpqp, 0x04, 0x1A, 0x16, PPC2_ISA300)
1234GEN_VSX_HELPER_XT_XB_ENV(xscvdpspn, 0x16, 0x10, 0, PPC2_VSX207)
1235GEN_VSX_HELPER_R2(xscvqpsdz, 0x04, 0x1A, 0x19, PPC2_ISA300)
1236GEN_VSX_HELPER_R2(xscvqpswz, 0x04, 0x1A, 0x09, PPC2_ISA300)
1237GEN_VSX_HELPER_R2(xscvqpudz, 0x04, 0x1A, 0x11, PPC2_ISA300)
1238GEN_VSX_HELPER_R2(xscvqpuwz, 0x04, 0x1A, 0x01, PPC2_ISA300)
1239GEN_VSX_HELPER_X2(xscvhpdp, 0x16, 0x15, 0x10, PPC2_ISA300)
1240GEN_VSX_HELPER_R2(xscvsdqp, 0x04, 0x1A, 0x0A, PPC2_ISA300)
1241GEN_VSX_HELPER_X2(xscvspdp, 0x12, 0x14, 0, PPC2_VSX)
1242GEN_VSX_HELPER_XT_XB_ENV(xscvspdpn, 0x16, 0x14, 0, PPC2_VSX207)
1243GEN_VSX_HELPER_X2(xscvdpsxds, 0x10, 0x15, 0, PPC2_VSX)
1244GEN_VSX_HELPER_X2(xscvdpsxws, 0x10, 0x05, 0, PPC2_VSX)
1245GEN_VSX_HELPER_X2(xscvdpuxds, 0x10, 0x14, 0, PPC2_VSX)
1246GEN_VSX_HELPER_X2(xscvdpuxws, 0x10, 0x04, 0, PPC2_VSX)
1247GEN_VSX_HELPER_X2(xscvsxddp, 0x10, 0x17, 0, PPC2_VSX)
1248GEN_VSX_HELPER_R2(xscvudqp, 0x04, 0x1A, 0x02, PPC2_ISA300)
1249GEN_VSX_HELPER_X2(xscvuxddp, 0x10, 0x16, 0, PPC2_VSX)
1250GEN_VSX_HELPER_X2(xsrdpi, 0x12, 0x04, 0, PPC2_VSX)
1251GEN_VSX_HELPER_X2(xsrdpic, 0x16, 0x06, 0, PPC2_VSX)
1252GEN_VSX_HELPER_X2(xsrdpim, 0x12, 0x07, 0, PPC2_VSX)
1253GEN_VSX_HELPER_X2(xsrdpip, 0x12, 0x06, 0, PPC2_VSX)
1254GEN_VSX_HELPER_X2(xsrdpiz, 0x12, 0x05, 0, PPC2_VSX)
1255GEN_VSX_HELPER_XT_XB_ENV(xsrsp, 0x12, 0x11, 0, PPC2_VSX207)
1256GEN_VSX_HELPER_R2(xsrqpi, 0x05, 0x00, 0, PPC2_ISA300)
1257GEN_VSX_HELPER_R2(xsrqpxp, 0x05, 0x01, 0, PPC2_ISA300)
1258GEN_VSX_HELPER_R2(xssqrtqp, 0x04, 0x19, 0x1B, PPC2_ISA300)
1259GEN_VSX_HELPER_R3(xssubqp, 0x04, 0x10, 0, PPC2_ISA300)
1260GEN_VSX_HELPER_X3(xsaddsp, 0x00, 0x00, 0, PPC2_VSX207)
1261GEN_VSX_HELPER_X3(xssubsp, 0x00, 0x01, 0, PPC2_VSX207)
1262GEN_VSX_HELPER_X3(xsmulsp, 0x00, 0x02, 0, PPC2_VSX207)
1263GEN_VSX_HELPER_X3(xsdivsp, 0x00, 0x03, 0, PPC2_VSX207)
1264GEN_VSX_HELPER_X2(xsresp, 0x14, 0x01, 0, PPC2_VSX207)
1265GEN_VSX_HELPER_X2(xssqrtsp, 0x16, 0x00, 0, PPC2_VSX207)
1266GEN_VSX_HELPER_X2(xsrsqrtesp, 0x14, 0x00, 0, PPC2_VSX207)
1267GEN_VSX_HELPER_X2(xscvsxdsp, 0x10, 0x13, 0, PPC2_VSX207)
1268GEN_VSX_HELPER_X2(xscvuxdsp, 0x10, 0x12, 0, PPC2_VSX207)
1269GEN_VSX_HELPER_X1(xststdcsp, 0x14, 0x12, 0, PPC2_ISA300)
1270GEN_VSX_HELPER_2(xststdcdp, 0x14, 0x16, 0, PPC2_ISA300)
1271GEN_VSX_HELPER_2(xststdcqp, 0x04, 0x16, 0, PPC2_ISA300)
1272
1273GEN_VSX_HELPER_X3(xvadddp, 0x00, 0x0C, 0, PPC2_VSX)
1274GEN_VSX_HELPER_X3(xvsubdp, 0x00, 0x0D, 0, PPC2_VSX)
1275GEN_VSX_HELPER_X3(xvmuldp, 0x00, 0x0E, 0, PPC2_VSX)
1276GEN_VSX_HELPER_X3(xvdivdp, 0x00, 0x0F, 0, PPC2_VSX)
1277GEN_VSX_HELPER_X2(xvredp, 0x14, 0x0D, 0, PPC2_VSX)
1278GEN_VSX_HELPER_X2(xvsqrtdp, 0x16, 0x0C, 0, PPC2_VSX)
1279GEN_VSX_HELPER_X2(xvrsqrtedp, 0x14, 0x0C, 0, PPC2_VSX)
1280GEN_VSX_HELPER_X2_AB(xvtdivdp, 0x14, 0x0F, 0, PPC2_VSX)
1281GEN_VSX_HELPER_X1(xvtsqrtdp, 0x14, 0x0E, 0, PPC2_VSX)
1282GEN_VSX_HELPER_X3(xvmaxdp, 0x00, 0x1C, 0, PPC2_VSX)
1283GEN_VSX_HELPER_X3(xvmindp, 0x00, 0x1D, 0, PPC2_VSX)
1284GEN_VSX_HELPER_X2(xvcvdpsp, 0x12, 0x18, 0, PPC2_VSX)
1285GEN_VSX_HELPER_X2(xvcvdpsxds, 0x10, 0x1D, 0, PPC2_VSX)
1286GEN_VSX_HELPER_X2(xvcvdpsxws, 0x10, 0x0D, 0, PPC2_VSX)
1287GEN_VSX_HELPER_X2(xvcvdpuxds, 0x10, 0x1C, 0, PPC2_VSX)
1288GEN_VSX_HELPER_X2(xvcvdpuxws, 0x10, 0x0C, 0, PPC2_VSX)
1289GEN_VSX_HELPER_X2(xvcvsxddp, 0x10, 0x1F, 0, PPC2_VSX)
1290GEN_VSX_HELPER_X2(xvcvuxddp, 0x10, 0x1E, 0, PPC2_VSX)
1291GEN_VSX_HELPER_X2(xvcvsxwdp, 0x10, 0x0F, 0, PPC2_VSX)
1292GEN_VSX_HELPER_X2(xvcvuxwdp, 0x10, 0x0E, 0, PPC2_VSX)
1293GEN_VSX_HELPER_X2(xvrdpi, 0x12, 0x0C, 0, PPC2_VSX)
1294GEN_VSX_HELPER_X2(xvrdpic, 0x16, 0x0E, 0, PPC2_VSX)
1295GEN_VSX_HELPER_X2(xvrdpim, 0x12, 0x0F, 0, PPC2_VSX)
1296GEN_VSX_HELPER_X2(xvrdpip, 0x12, 0x0E, 0, PPC2_VSX)
1297GEN_VSX_HELPER_X2(xvrdpiz, 0x12, 0x0D, 0, PPC2_VSX)
1298
1299GEN_VSX_HELPER_X3(xvaddsp, 0x00, 0x08, 0, PPC2_VSX)
1300GEN_VSX_HELPER_X3(xvsubsp, 0x00, 0x09, 0, PPC2_VSX)
1301GEN_VSX_HELPER_X3(xvmulsp, 0x00, 0x0A, 0, PPC2_VSX)
1302GEN_VSX_HELPER_X3(xvdivsp, 0x00, 0x0B, 0, PPC2_VSX)
1303GEN_VSX_HELPER_X2(xvresp, 0x14, 0x09, 0, PPC2_VSX)
1304GEN_VSX_HELPER_X2(xvsqrtsp, 0x16, 0x08, 0, PPC2_VSX)
1305GEN_VSX_HELPER_X2(xvrsqrtesp, 0x14, 0x08, 0, PPC2_VSX)
1306GEN_VSX_HELPER_X2_AB(xvtdivsp, 0x14, 0x0B, 0, PPC2_VSX)
1307GEN_VSX_HELPER_X1(xvtsqrtsp, 0x14, 0x0A, 0, PPC2_VSX)
1308GEN_VSX_HELPER_X3(xvmaxsp, 0x00, 0x18, 0, PPC2_VSX)
1309GEN_VSX_HELPER_X3(xvminsp, 0x00, 0x19, 0, PPC2_VSX)
1310GEN_VSX_HELPER_X2(xvcvspdp, 0x12, 0x1C, 0, PPC2_VSX)
1311GEN_VSX_HELPER_X2(xvcvhpsp, 0x16, 0x1D, 0x18, PPC2_ISA300)
1312GEN_VSX_HELPER_X2(xvcvsphp, 0x16, 0x1D, 0x19, PPC2_ISA300)
1313GEN_VSX_HELPER_X2(xvcvspsxds, 0x10, 0x19, 0, PPC2_VSX)
1314GEN_VSX_HELPER_X2(xvcvspsxws, 0x10, 0x09, 0, PPC2_VSX)
1315GEN_VSX_HELPER_X2(xvcvspuxds, 0x10, 0x18, 0, PPC2_VSX)
1316GEN_VSX_HELPER_X2(xvcvspuxws, 0x10, 0x08, 0, PPC2_VSX)
1317GEN_VSX_HELPER_X2(xvcvsxdsp, 0x10, 0x1B, 0, PPC2_VSX)
1318GEN_VSX_HELPER_X2(xvcvuxdsp, 0x10, 0x1A, 0, PPC2_VSX)
1319GEN_VSX_HELPER_X2(xvcvsxwsp, 0x10, 0x0B, 0, PPC2_VSX)
1320GEN_VSX_HELPER_X2(xvcvuxwsp, 0x10, 0x0A, 0, PPC2_VSX)
1321GEN_VSX_HELPER_X2(xvrspi, 0x12, 0x08, 0, PPC2_VSX)
1322GEN_VSX_HELPER_X2(xvrspic, 0x16, 0x0A, 0, PPC2_VSX)
1323GEN_VSX_HELPER_X2(xvrspim, 0x12, 0x0B, 0, PPC2_VSX)
1324GEN_VSX_HELPER_X2(xvrspip, 0x12, 0x0A, 0, PPC2_VSX)
1325GEN_VSX_HELPER_X2(xvrspiz, 0x12, 0x09, 0, PPC2_VSX)
1326GEN_VSX_HELPER_2(xvtstdcsp, 0x14, 0x1A, 0, PPC2_VSX)
1327GEN_VSX_HELPER_2(xvtstdcdp, 0x14, 0x1E, 0, PPC2_VSX)
1328GEN_VSX_HELPER_X3(xxperm, 0x08, 0x03, 0, PPC2_ISA300)
1329GEN_VSX_HELPER_X3(xxpermr, 0x08, 0x07, 0, PPC2_ISA300)
1330
1331#define GEN_VSX_HELPER_VSX_MADD(name, op1, aop, mop, inval, type)             \
1332static void gen_##name(DisasContext *ctx)                                     \
1333{                                                                             \
1334    TCGv_ptr xt, xa, b, c;                                                    \
1335    if (unlikely(!ctx->vsx_enabled)) {                                        \
1336        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
1337        return;                                                               \
1338    }                                                                         \
1339    xt = gen_vsr_ptr(xT(ctx->opcode));                                        \
1340    xa = gen_vsr_ptr(xA(ctx->opcode));                                        \
1341    if (ctx->opcode & PPC_BIT32(25)) {                                        \
1342        /*                                                                    \
1343         * AxT + B                                                            \
1344         */                                                                   \
1345        b = gen_vsr_ptr(xT(ctx->opcode));                                     \
1346        c = gen_vsr_ptr(xB(ctx->opcode));                                     \
1347    } else {                                                                  \
1348        /*                                                                    \
1349         * AxB + T                                                            \
1350         */                                                                   \
1351        b = gen_vsr_ptr(xB(ctx->opcode));                                     \
1352        c = gen_vsr_ptr(xT(ctx->opcode));                                     \
1353    }                                                                         \
1354    gen_helper_##name(cpu_env, xt, xa, b, c);                                 \
1355    tcg_temp_free_ptr(xt);                                                    \
1356    tcg_temp_free_ptr(xa);                                                    \
1357    tcg_temp_free_ptr(b);                                                     \
1358    tcg_temp_free_ptr(c);                                                     \
1359}
1360
1361GEN_VSX_HELPER_VSX_MADD(xsmadddp, 0x04, 0x04, 0x05, 0, PPC2_VSX)
1362GEN_VSX_HELPER_VSX_MADD(xsmsubdp, 0x04, 0x06, 0x07, 0, PPC2_VSX)
1363GEN_VSX_HELPER_VSX_MADD(xsnmadddp, 0x04, 0x14, 0x15, 0, PPC2_VSX)
1364GEN_VSX_HELPER_VSX_MADD(xsnmsubdp, 0x04, 0x16, 0x17, 0, PPC2_VSX)
1365GEN_VSX_HELPER_VSX_MADD(xsmaddsp, 0x04, 0x00, 0x01, 0, PPC2_VSX207)
1366GEN_VSX_HELPER_VSX_MADD(xsmsubsp, 0x04, 0x02, 0x03, 0, PPC2_VSX207)
1367GEN_VSX_HELPER_VSX_MADD(xsnmaddsp, 0x04, 0x10, 0x11, 0, PPC2_VSX207)
1368GEN_VSX_HELPER_VSX_MADD(xsnmsubsp, 0x04, 0x12, 0x13, 0, PPC2_VSX207)
1369GEN_VSX_HELPER_VSX_MADD(xvmadddp, 0x04, 0x0C, 0x0D, 0, PPC2_VSX)
1370GEN_VSX_HELPER_VSX_MADD(xvmsubdp, 0x04, 0x0E, 0x0F, 0, PPC2_VSX)
1371GEN_VSX_HELPER_VSX_MADD(xvnmadddp, 0x04, 0x1C, 0x1D, 0, PPC2_VSX)
1372GEN_VSX_HELPER_VSX_MADD(xvnmsubdp, 0x04, 0x1E, 0x1F, 0, PPC2_VSX)
1373GEN_VSX_HELPER_VSX_MADD(xvmaddsp, 0x04, 0x08, 0x09, 0, PPC2_VSX)
1374GEN_VSX_HELPER_VSX_MADD(xvmsubsp, 0x04, 0x0A, 0x0B, 0, PPC2_VSX)
1375GEN_VSX_HELPER_VSX_MADD(xvnmaddsp, 0x04, 0x18, 0x19, 0, PPC2_VSX)
1376GEN_VSX_HELPER_VSX_MADD(xvnmsubsp, 0x04, 0x1A, 0x1B, 0, PPC2_VSX)
1377
1378static void gen_xxbrd(DisasContext *ctx)
1379{
1380    TCGv_i64 xth;
1381    TCGv_i64 xtl;
1382    TCGv_i64 xbh;
1383    TCGv_i64 xbl;
1384
1385    if (unlikely(!ctx->vsx_enabled)) {
1386        gen_exception(ctx, POWERPC_EXCP_VSXU);
1387        return;
1388    }
1389    xth = tcg_temp_new_i64();
1390    xtl = tcg_temp_new_i64();
1391    xbh = tcg_temp_new_i64();
1392    xbl = tcg_temp_new_i64();
1393    get_cpu_vsrh(xbh, xB(ctx->opcode));
1394    get_cpu_vsrl(xbl, xB(ctx->opcode));
1395
1396    tcg_gen_bswap64_i64(xth, xbh);
1397    tcg_gen_bswap64_i64(xtl, xbl);
1398    set_cpu_vsrh(xT(ctx->opcode), xth);
1399    set_cpu_vsrl(xT(ctx->opcode), xtl);
1400
1401    tcg_temp_free_i64(xth);
1402    tcg_temp_free_i64(xtl);
1403    tcg_temp_free_i64(xbh);
1404    tcg_temp_free_i64(xbl);
1405}
1406
1407static void gen_xxbrh(DisasContext *ctx)
1408{
1409    TCGv_i64 xth;
1410    TCGv_i64 xtl;
1411    TCGv_i64 xbh;
1412    TCGv_i64 xbl;
1413
1414    if (unlikely(!ctx->vsx_enabled)) {
1415        gen_exception(ctx, POWERPC_EXCP_VSXU);
1416        return;
1417    }
1418    xth = tcg_temp_new_i64();
1419    xtl = tcg_temp_new_i64();
1420    xbh = tcg_temp_new_i64();
1421    xbl = tcg_temp_new_i64();
1422    get_cpu_vsrh(xbh, xB(ctx->opcode));
1423    get_cpu_vsrl(xbl, xB(ctx->opcode));
1424
1425    gen_bswap16x8(xth, xtl, xbh, xbl);
1426    set_cpu_vsrh(xT(ctx->opcode), xth);
1427    set_cpu_vsrl(xT(ctx->opcode), xtl);
1428
1429    tcg_temp_free_i64(xth);
1430    tcg_temp_free_i64(xtl);
1431    tcg_temp_free_i64(xbh);
1432    tcg_temp_free_i64(xbl);
1433}
1434
1435static void gen_xxbrq(DisasContext *ctx)
1436{
1437    TCGv_i64 xth;
1438    TCGv_i64 xtl;
1439    TCGv_i64 xbh;
1440    TCGv_i64 xbl;
1441    TCGv_i64 t0;
1442
1443    if (unlikely(!ctx->vsx_enabled)) {
1444        gen_exception(ctx, POWERPC_EXCP_VSXU);
1445        return;
1446    }
1447    xth = tcg_temp_new_i64();
1448    xtl = tcg_temp_new_i64();
1449    xbh = tcg_temp_new_i64();
1450    xbl = tcg_temp_new_i64();
1451    get_cpu_vsrh(xbh, xB(ctx->opcode));
1452    get_cpu_vsrl(xbl, xB(ctx->opcode));
1453    t0 = tcg_temp_new_i64();
1454
1455    tcg_gen_bswap64_i64(t0, xbl);
1456    tcg_gen_bswap64_i64(xtl, xbh);
1457    set_cpu_vsrl(xT(ctx->opcode), xtl);
1458    tcg_gen_mov_i64(xth, t0);
1459    set_cpu_vsrh(xT(ctx->opcode), xth);
1460
1461    tcg_temp_free_i64(t0);
1462    tcg_temp_free_i64(xth);
1463    tcg_temp_free_i64(xtl);
1464    tcg_temp_free_i64(xbh);
1465    tcg_temp_free_i64(xbl);
1466}
1467
1468static void gen_xxbrw(DisasContext *ctx)
1469{
1470    TCGv_i64 xth;
1471    TCGv_i64 xtl;
1472    TCGv_i64 xbh;
1473    TCGv_i64 xbl;
1474
1475    if (unlikely(!ctx->vsx_enabled)) {
1476        gen_exception(ctx, POWERPC_EXCP_VSXU);
1477        return;
1478    }
1479    xth = tcg_temp_new_i64();
1480    xtl = tcg_temp_new_i64();
1481    xbh = tcg_temp_new_i64();
1482    xbl = tcg_temp_new_i64();
1483    get_cpu_vsrh(xbh, xB(ctx->opcode));
1484    get_cpu_vsrl(xbl, xB(ctx->opcode));
1485
1486    gen_bswap32x4(xth, xtl, xbh, xbl);
1487    set_cpu_vsrh(xT(ctx->opcode), xth);
1488    set_cpu_vsrl(xT(ctx->opcode), xtl);
1489
1490    tcg_temp_free_i64(xth);
1491    tcg_temp_free_i64(xtl);
1492    tcg_temp_free_i64(xbh);
1493    tcg_temp_free_i64(xbl);
1494}
1495
1496#define VSX_LOGICAL(name, vece, tcg_op)                              \
1497static void glue(gen_, name)(DisasContext *ctx)                      \
1498    {                                                                \
1499        if (unlikely(!ctx->vsx_enabled)) {                           \
1500            gen_exception(ctx, POWERPC_EXCP_VSXU);                   \
1501            return;                                                  \
1502        }                                                            \
1503        tcg_op(vece, vsr_full_offset(xT(ctx->opcode)),               \
1504               vsr_full_offset(xA(ctx->opcode)),                     \
1505               vsr_full_offset(xB(ctx->opcode)), 16, 16);            \
1506    }
1507
1508VSX_LOGICAL(xxland, MO_64, tcg_gen_gvec_and)
1509VSX_LOGICAL(xxlandc, MO_64, tcg_gen_gvec_andc)
1510VSX_LOGICAL(xxlor, MO_64, tcg_gen_gvec_or)
1511VSX_LOGICAL(xxlxor, MO_64, tcg_gen_gvec_xor)
1512VSX_LOGICAL(xxlnor, MO_64, tcg_gen_gvec_nor)
1513VSX_LOGICAL(xxleqv, MO_64, tcg_gen_gvec_eqv)
1514VSX_LOGICAL(xxlnand, MO_64, tcg_gen_gvec_nand)
1515VSX_LOGICAL(xxlorc, MO_64, tcg_gen_gvec_orc)
1516
1517#define VSX_XXMRG(name, high)                               \
1518static void glue(gen_, name)(DisasContext *ctx)             \
1519    {                                                       \
1520        TCGv_i64 a0, a1, b0, b1, tmp;                       \
1521        if (unlikely(!ctx->vsx_enabled)) {                  \
1522            gen_exception(ctx, POWERPC_EXCP_VSXU);          \
1523            return;                                         \
1524        }                                                   \
1525        a0 = tcg_temp_new_i64();                            \
1526        a1 = tcg_temp_new_i64();                            \
1527        b0 = tcg_temp_new_i64();                            \
1528        b1 = tcg_temp_new_i64();                            \
1529        tmp = tcg_temp_new_i64();                           \
1530        if (high) {                                         \
1531            get_cpu_vsrh(a0, xA(ctx->opcode));              \
1532            get_cpu_vsrh(a1, xA(ctx->opcode));              \
1533            get_cpu_vsrh(b0, xB(ctx->opcode));              \
1534            get_cpu_vsrh(b1, xB(ctx->opcode));              \
1535        } else {                                            \
1536            get_cpu_vsrl(a0, xA(ctx->opcode));              \
1537            get_cpu_vsrl(a1, xA(ctx->opcode));              \
1538            get_cpu_vsrl(b0, xB(ctx->opcode));              \
1539            get_cpu_vsrl(b1, xB(ctx->opcode));              \
1540        }                                                   \
1541        tcg_gen_shri_i64(a0, a0, 32);                       \
1542        tcg_gen_shri_i64(b0, b0, 32);                       \
1543        tcg_gen_deposit_i64(tmp, b0, a0, 32, 32);           \
1544        set_cpu_vsrh(xT(ctx->opcode), tmp);                 \
1545        tcg_gen_deposit_i64(tmp, b1, a1, 32, 32);           \
1546        set_cpu_vsrl(xT(ctx->opcode), tmp);                 \
1547        tcg_temp_free_i64(a0);                              \
1548        tcg_temp_free_i64(a1);                              \
1549        tcg_temp_free_i64(b0);                              \
1550        tcg_temp_free_i64(b1);                              \
1551        tcg_temp_free_i64(tmp);                             \
1552    }
1553
1554VSX_XXMRG(xxmrghw, 1)
1555VSX_XXMRG(xxmrglw, 0)
1556
1557static void gen_xxsel(DisasContext *ctx)
1558{
1559    int rt = xT(ctx->opcode);
1560    int ra = xA(ctx->opcode);
1561    int rb = xB(ctx->opcode);
1562    int rc = xC(ctx->opcode);
1563
1564    if (unlikely(!ctx->vsx_enabled)) {
1565        gen_exception(ctx, POWERPC_EXCP_VSXU);
1566        return;
1567    }
1568    tcg_gen_gvec_bitsel(MO_64, vsr_full_offset(rt), vsr_full_offset(rc),
1569                        vsr_full_offset(rb), vsr_full_offset(ra), 16, 16);
1570}
1571
1572static void gen_xxspltw(DisasContext *ctx)
1573{
1574    int rt = xT(ctx->opcode);
1575    int rb = xB(ctx->opcode);
1576    int uim = UIM(ctx->opcode);
1577    int tofs, bofs;
1578
1579    if (unlikely(!ctx->vsx_enabled)) {
1580        gen_exception(ctx, POWERPC_EXCP_VSXU);
1581        return;
1582    }
1583
1584    tofs = vsr_full_offset(rt);
1585    bofs = vsr_full_offset(rb);
1586    bofs += uim << MO_32;
1587#ifndef HOST_WORDS_BIG_ENDIAN
1588    bofs ^= 8 | 4;
1589#endif
1590
1591    tcg_gen_gvec_dup_mem(MO_32, tofs, bofs, 16, 16);
1592}
1593
1594#define pattern(x) (((x) & 0xff) * (~(uint64_t)0 / 0xff))
1595
1596static void gen_xxspltib(DisasContext *ctx)
1597{
1598    uint8_t uim8 = IMM8(ctx->opcode);
1599    int rt = xT(ctx->opcode);
1600
1601    if (rt < 32) {
1602        if (unlikely(!ctx->vsx_enabled)) {
1603            gen_exception(ctx, POWERPC_EXCP_VSXU);
1604            return;
1605        }
1606    } else {
1607        if (unlikely(!ctx->altivec_enabled)) {
1608            gen_exception(ctx, POWERPC_EXCP_VPU);
1609            return;
1610        }
1611    }
1612    tcg_gen_gvec_dup_imm(MO_8, vsr_full_offset(rt), 16, 16, uim8);
1613}
1614
1615static void gen_xxsldwi(DisasContext *ctx)
1616{
1617    TCGv_i64 xth, xtl;
1618    if (unlikely(!ctx->vsx_enabled)) {
1619        gen_exception(ctx, POWERPC_EXCP_VSXU);
1620        return;
1621    }
1622    xth = tcg_temp_new_i64();
1623    xtl = tcg_temp_new_i64();
1624
1625    switch (SHW(ctx->opcode)) {
1626        case 0: {
1627            get_cpu_vsrh(xth, xA(ctx->opcode));
1628            get_cpu_vsrl(xtl, xA(ctx->opcode));
1629            break;
1630        }
1631        case 1: {
1632            TCGv_i64 t0 = tcg_temp_new_i64();
1633            get_cpu_vsrh(xth, xA(ctx->opcode));
1634            tcg_gen_shli_i64(xth, xth, 32);
1635            get_cpu_vsrl(t0, xA(ctx->opcode));
1636            tcg_gen_shri_i64(t0, t0, 32);
1637            tcg_gen_or_i64(xth, xth, t0);
1638            get_cpu_vsrl(xtl, xA(ctx->opcode));
1639            tcg_gen_shli_i64(xtl, xtl, 32);
1640            get_cpu_vsrh(t0, xB(ctx->opcode));
1641            tcg_gen_shri_i64(t0, t0, 32);
1642            tcg_gen_or_i64(xtl, xtl, t0);
1643            tcg_temp_free_i64(t0);
1644            break;
1645        }
1646        case 2: {
1647            get_cpu_vsrl(xth, xA(ctx->opcode));
1648            get_cpu_vsrh(xtl, xB(ctx->opcode));
1649            break;
1650        }
1651        case 3: {
1652            TCGv_i64 t0 = tcg_temp_new_i64();
1653            get_cpu_vsrl(xth, xA(ctx->opcode));
1654            tcg_gen_shli_i64(xth, xth, 32);
1655            get_cpu_vsrh(t0, xB(ctx->opcode));
1656            tcg_gen_shri_i64(t0, t0, 32);
1657            tcg_gen_or_i64(xth, xth, t0);
1658            get_cpu_vsrh(xtl, xB(ctx->opcode));
1659            tcg_gen_shli_i64(xtl, xtl, 32);
1660            get_cpu_vsrl(t0, xB(ctx->opcode));
1661            tcg_gen_shri_i64(t0, t0, 32);
1662            tcg_gen_or_i64(xtl, xtl, t0);
1663            tcg_temp_free_i64(t0);
1664            break;
1665        }
1666    }
1667
1668    set_cpu_vsrh(xT(ctx->opcode), xth);
1669    set_cpu_vsrl(xT(ctx->opcode), xtl);
1670
1671    tcg_temp_free_i64(xth);
1672    tcg_temp_free_i64(xtl);
1673}
1674
1675#define VSX_EXTRACT_INSERT(name)                                \
1676static void gen_##name(DisasContext *ctx)                       \
1677{                                                               \
1678    TCGv_ptr xt, xb;                                            \
1679    TCGv_i32 t0;                                                \
1680    TCGv_i64 t1;                                                \
1681    uint8_t uimm = UIMM4(ctx->opcode);                          \
1682                                                                \
1683    if (unlikely(!ctx->vsx_enabled)) {                          \
1684        gen_exception(ctx, POWERPC_EXCP_VSXU);                  \
1685        return;                                                 \
1686    }                                                           \
1687    xt = gen_vsr_ptr(xT(ctx->opcode));                          \
1688    xb = gen_vsr_ptr(xB(ctx->opcode));                          \
1689    t0 = tcg_temp_new_i32();                                    \
1690    t1 = tcg_temp_new_i64();                                    \
1691    /*                                                          \
1692     * uimm > 15 out of bound and for                           \
1693     * uimm > 12 handle as per hardware in helper               \
1694     */                                                         \
1695    if (uimm > 15) {                                            \
1696        tcg_gen_movi_i64(t1, 0);                                \
1697        set_cpu_vsrh(xT(ctx->opcode), t1);                      \
1698        set_cpu_vsrl(xT(ctx->opcode), t1);                      \
1699        return;                                                 \
1700    }                                                           \
1701    tcg_gen_movi_i32(t0, uimm);                                 \
1702    gen_helper_##name(cpu_env, xt, xb, t0);                     \
1703    tcg_temp_free_ptr(xb);                                      \
1704    tcg_temp_free_ptr(xt);                                      \
1705    tcg_temp_free_i32(t0);                                      \
1706    tcg_temp_free_i64(t1);                                      \
1707}
1708
1709VSX_EXTRACT_INSERT(xxextractuw)
1710VSX_EXTRACT_INSERT(xxinsertw)
1711
1712#ifdef TARGET_PPC64
1713static void gen_xsxexpdp(DisasContext *ctx)
1714{
1715    TCGv rt = cpu_gpr[rD(ctx->opcode)];
1716    TCGv_i64 t0;
1717    if (unlikely(!ctx->vsx_enabled)) {
1718        gen_exception(ctx, POWERPC_EXCP_VSXU);
1719        return;
1720    }
1721    t0 = tcg_temp_new_i64();
1722    get_cpu_vsrh(t0, xB(ctx->opcode));
1723    tcg_gen_extract_i64(rt, t0, 52, 11);
1724    tcg_temp_free_i64(t0);
1725}
1726
1727static void gen_xsxexpqp(DisasContext *ctx)
1728{
1729    TCGv_i64 xth;
1730    TCGv_i64 xtl;
1731    TCGv_i64 xbh;
1732
1733    if (unlikely(!ctx->vsx_enabled)) {
1734        gen_exception(ctx, POWERPC_EXCP_VSXU);
1735        return;
1736    }
1737    xth = tcg_temp_new_i64();
1738    xtl = tcg_temp_new_i64();
1739    xbh = tcg_temp_new_i64();
1740    get_cpu_vsrh(xbh, rB(ctx->opcode) + 32);
1741
1742    tcg_gen_extract_i64(xth, xbh, 48, 15);
1743    set_cpu_vsrh(rD(ctx->opcode) + 32, xth);
1744    tcg_gen_movi_i64(xtl, 0);
1745    set_cpu_vsrl(rD(ctx->opcode) + 32, xtl);
1746
1747    tcg_temp_free_i64(xbh);
1748    tcg_temp_free_i64(xth);
1749    tcg_temp_free_i64(xtl);
1750}
1751
1752static void gen_xsiexpdp(DisasContext *ctx)
1753{
1754    TCGv_i64 xth;
1755    TCGv ra = cpu_gpr[rA(ctx->opcode)];
1756    TCGv rb = cpu_gpr[rB(ctx->opcode)];
1757    TCGv_i64 t0;
1758
1759    if (unlikely(!ctx->vsx_enabled)) {
1760        gen_exception(ctx, POWERPC_EXCP_VSXU);
1761        return;
1762    }
1763    t0 = tcg_temp_new_i64();
1764    xth = tcg_temp_new_i64();
1765    tcg_gen_andi_i64(xth, ra, 0x800FFFFFFFFFFFFF);
1766    tcg_gen_andi_i64(t0, rb, 0x7FF);
1767    tcg_gen_shli_i64(t0, t0, 52);
1768    tcg_gen_or_i64(xth, xth, t0);
1769    set_cpu_vsrh(xT(ctx->opcode), xth);
1770    /* dword[1] is undefined */
1771    tcg_temp_free_i64(t0);
1772    tcg_temp_free_i64(xth);
1773}
1774
1775static void gen_xsiexpqp(DisasContext *ctx)
1776{
1777    TCGv_i64 xth;
1778    TCGv_i64 xtl;
1779    TCGv_i64 xah;
1780    TCGv_i64 xal;
1781    TCGv_i64 xbh;
1782    TCGv_i64 t0;
1783
1784    if (unlikely(!ctx->vsx_enabled)) {
1785        gen_exception(ctx, POWERPC_EXCP_VSXU);
1786        return;
1787    }
1788    xth = tcg_temp_new_i64();
1789    xtl = tcg_temp_new_i64();
1790    xah = tcg_temp_new_i64();
1791    xal = tcg_temp_new_i64();
1792    get_cpu_vsrh(xah, rA(ctx->opcode) + 32);
1793    get_cpu_vsrl(xal, rA(ctx->opcode) + 32);
1794    xbh = tcg_temp_new_i64();
1795    get_cpu_vsrh(xbh, rB(ctx->opcode) + 32);
1796    t0 = tcg_temp_new_i64();
1797
1798    tcg_gen_andi_i64(xth, xah, 0x8000FFFFFFFFFFFF);
1799    tcg_gen_andi_i64(t0, xbh, 0x7FFF);
1800    tcg_gen_shli_i64(t0, t0, 48);
1801    tcg_gen_or_i64(xth, xth, t0);
1802    set_cpu_vsrh(rD(ctx->opcode) + 32, xth);
1803    tcg_gen_mov_i64(xtl, xal);
1804    set_cpu_vsrl(rD(ctx->opcode) + 32, xtl);
1805
1806    tcg_temp_free_i64(t0);
1807    tcg_temp_free_i64(xth);
1808    tcg_temp_free_i64(xtl);
1809    tcg_temp_free_i64(xah);
1810    tcg_temp_free_i64(xal);
1811    tcg_temp_free_i64(xbh);
1812}
1813
1814static void gen_xsxsigdp(DisasContext *ctx)
1815{
1816    TCGv rt = cpu_gpr[rD(ctx->opcode)];
1817    TCGv_i64 t0, t1, zr, nan, exp;
1818
1819    if (unlikely(!ctx->vsx_enabled)) {
1820        gen_exception(ctx, POWERPC_EXCP_VSXU);
1821        return;
1822    }
1823    exp = tcg_temp_new_i64();
1824    t0 = tcg_temp_new_i64();
1825    t1 = tcg_temp_new_i64();
1826    zr = tcg_const_i64(0);
1827    nan = tcg_const_i64(2047);
1828
1829    get_cpu_vsrh(t1, xB(ctx->opcode));
1830    tcg_gen_extract_i64(exp, t1, 52, 11);
1831    tcg_gen_movi_i64(t0, 0x0010000000000000);
1832    tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1833    tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1834    get_cpu_vsrh(t1, xB(ctx->opcode));
1835    tcg_gen_deposit_i64(rt, t0, t1, 0, 52);
1836
1837    tcg_temp_free_i64(t0);
1838    tcg_temp_free_i64(t1);
1839    tcg_temp_free_i64(exp);
1840    tcg_temp_free_i64(zr);
1841    tcg_temp_free_i64(nan);
1842}
1843
1844static void gen_xsxsigqp(DisasContext *ctx)
1845{
1846    TCGv_i64 t0, zr, nan, exp;
1847    TCGv_i64 xth;
1848    TCGv_i64 xtl;
1849    TCGv_i64 xbh;
1850    TCGv_i64 xbl;
1851
1852    if (unlikely(!ctx->vsx_enabled)) {
1853        gen_exception(ctx, POWERPC_EXCP_VSXU);
1854        return;
1855    }
1856    xth = tcg_temp_new_i64();
1857    xtl = tcg_temp_new_i64();
1858    xbh = tcg_temp_new_i64();
1859    xbl = tcg_temp_new_i64();
1860    get_cpu_vsrh(xbh, rB(ctx->opcode) + 32);
1861    get_cpu_vsrl(xbl, rB(ctx->opcode) + 32);
1862    exp = tcg_temp_new_i64();
1863    t0 = tcg_temp_new_i64();
1864    zr = tcg_const_i64(0);
1865    nan = tcg_const_i64(32767);
1866
1867    tcg_gen_extract_i64(exp, xbh, 48, 15);
1868    tcg_gen_movi_i64(t0, 0x0001000000000000);
1869    tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1870    tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1871    tcg_gen_deposit_i64(xth, t0, xbh, 0, 48);
1872    set_cpu_vsrh(rD(ctx->opcode) + 32, xth);
1873    tcg_gen_mov_i64(xtl, xbl);
1874    set_cpu_vsrl(rD(ctx->opcode) + 32, xtl);
1875
1876    tcg_temp_free_i64(t0);
1877    tcg_temp_free_i64(exp);
1878    tcg_temp_free_i64(zr);
1879    tcg_temp_free_i64(nan);
1880    tcg_temp_free_i64(xth);
1881    tcg_temp_free_i64(xtl);
1882    tcg_temp_free_i64(xbh);
1883    tcg_temp_free_i64(xbl);
1884}
1885#endif
1886
1887static void gen_xviexpsp(DisasContext *ctx)
1888{
1889    TCGv_i64 xth;
1890    TCGv_i64 xtl;
1891    TCGv_i64 xah;
1892    TCGv_i64 xal;
1893    TCGv_i64 xbh;
1894    TCGv_i64 xbl;
1895    TCGv_i64 t0;
1896
1897    if (unlikely(!ctx->vsx_enabled)) {
1898        gen_exception(ctx, POWERPC_EXCP_VSXU);
1899        return;
1900    }
1901    xth = tcg_temp_new_i64();
1902    xtl = tcg_temp_new_i64();
1903    xah = tcg_temp_new_i64();
1904    xal = tcg_temp_new_i64();
1905    xbh = tcg_temp_new_i64();
1906    xbl = tcg_temp_new_i64();
1907    get_cpu_vsrh(xah, xA(ctx->opcode));
1908    get_cpu_vsrl(xal, xA(ctx->opcode));
1909    get_cpu_vsrh(xbh, xB(ctx->opcode));
1910    get_cpu_vsrl(xbl, xB(ctx->opcode));
1911    t0 = tcg_temp_new_i64();
1912
1913    tcg_gen_andi_i64(xth, xah, 0x807FFFFF807FFFFF);
1914    tcg_gen_andi_i64(t0, xbh, 0xFF000000FF);
1915    tcg_gen_shli_i64(t0, t0, 23);
1916    tcg_gen_or_i64(xth, xth, t0);
1917    set_cpu_vsrh(xT(ctx->opcode), xth);
1918    tcg_gen_andi_i64(xtl, xal, 0x807FFFFF807FFFFF);
1919    tcg_gen_andi_i64(t0, xbl, 0xFF000000FF);
1920    tcg_gen_shli_i64(t0, t0, 23);
1921    tcg_gen_or_i64(xtl, xtl, t0);
1922    set_cpu_vsrl(xT(ctx->opcode), xtl);
1923
1924    tcg_temp_free_i64(t0);
1925    tcg_temp_free_i64(xth);
1926    tcg_temp_free_i64(xtl);
1927    tcg_temp_free_i64(xah);
1928    tcg_temp_free_i64(xal);
1929    tcg_temp_free_i64(xbh);
1930    tcg_temp_free_i64(xbl);
1931}
1932
1933static void gen_xviexpdp(DisasContext *ctx)
1934{
1935    TCGv_i64 xth;
1936    TCGv_i64 xtl;
1937    TCGv_i64 xah;
1938    TCGv_i64 xal;
1939    TCGv_i64 xbh;
1940    TCGv_i64 xbl;
1941
1942    if (unlikely(!ctx->vsx_enabled)) {
1943        gen_exception(ctx, POWERPC_EXCP_VSXU);
1944        return;
1945    }
1946    xth = tcg_temp_new_i64();
1947    xtl = tcg_temp_new_i64();
1948    xah = tcg_temp_new_i64();
1949    xal = tcg_temp_new_i64();
1950    xbh = tcg_temp_new_i64();
1951    xbl = tcg_temp_new_i64();
1952    get_cpu_vsrh(xah, xA(ctx->opcode));
1953    get_cpu_vsrl(xal, xA(ctx->opcode));
1954    get_cpu_vsrh(xbh, xB(ctx->opcode));
1955    get_cpu_vsrl(xbl, xB(ctx->opcode));
1956
1957    tcg_gen_deposit_i64(xth, xah, xbh, 52, 11);
1958    set_cpu_vsrh(xT(ctx->opcode), xth);
1959
1960    tcg_gen_deposit_i64(xtl, xal, xbl, 52, 11);
1961    set_cpu_vsrl(xT(ctx->opcode), xtl);
1962
1963    tcg_temp_free_i64(xth);
1964    tcg_temp_free_i64(xtl);
1965    tcg_temp_free_i64(xah);
1966    tcg_temp_free_i64(xal);
1967    tcg_temp_free_i64(xbh);
1968    tcg_temp_free_i64(xbl);
1969}
1970
1971static void gen_xvxexpsp(DisasContext *ctx)
1972{
1973    TCGv_i64 xth;
1974    TCGv_i64 xtl;
1975    TCGv_i64 xbh;
1976    TCGv_i64 xbl;
1977
1978    if (unlikely(!ctx->vsx_enabled)) {
1979        gen_exception(ctx, POWERPC_EXCP_VSXU);
1980        return;
1981    }
1982    xth = tcg_temp_new_i64();
1983    xtl = tcg_temp_new_i64();
1984    xbh = tcg_temp_new_i64();
1985    xbl = tcg_temp_new_i64();
1986    get_cpu_vsrh(xbh, xB(ctx->opcode));
1987    get_cpu_vsrl(xbl, xB(ctx->opcode));
1988
1989    tcg_gen_shri_i64(xth, xbh, 23);
1990    tcg_gen_andi_i64(xth, xth, 0xFF000000FF);
1991    set_cpu_vsrh(xT(ctx->opcode), xth);
1992    tcg_gen_shri_i64(xtl, xbl, 23);
1993    tcg_gen_andi_i64(xtl, xtl, 0xFF000000FF);
1994    set_cpu_vsrl(xT(ctx->opcode), xtl);
1995
1996    tcg_temp_free_i64(xth);
1997    tcg_temp_free_i64(xtl);
1998    tcg_temp_free_i64(xbh);
1999    tcg_temp_free_i64(xbl);
2000}
2001
2002static void gen_xvxexpdp(DisasContext *ctx)
2003{
2004    TCGv_i64 xth;
2005    TCGv_i64 xtl;
2006    TCGv_i64 xbh;
2007    TCGv_i64 xbl;
2008
2009    if (unlikely(!ctx->vsx_enabled)) {
2010        gen_exception(ctx, POWERPC_EXCP_VSXU);
2011        return;
2012    }
2013    xth = tcg_temp_new_i64();
2014    xtl = tcg_temp_new_i64();
2015    xbh = tcg_temp_new_i64();
2016    xbl = tcg_temp_new_i64();
2017    get_cpu_vsrh(xbh, xB(ctx->opcode));
2018    get_cpu_vsrl(xbl, xB(ctx->opcode));
2019
2020    tcg_gen_extract_i64(xth, xbh, 52, 11);
2021    set_cpu_vsrh(xT(ctx->opcode), xth);
2022    tcg_gen_extract_i64(xtl, xbl, 52, 11);
2023    set_cpu_vsrl(xT(ctx->opcode), xtl);
2024
2025    tcg_temp_free_i64(xth);
2026    tcg_temp_free_i64(xtl);
2027    tcg_temp_free_i64(xbh);
2028    tcg_temp_free_i64(xbl);
2029}
2030
2031GEN_VSX_HELPER_X2(xvxsigsp, 0x00, 0x04, 0, PPC2_ISA300)
2032
2033static void gen_xvxsigdp(DisasContext *ctx)
2034{
2035    TCGv_i64 xth;
2036    TCGv_i64 xtl;
2037    TCGv_i64 xbh;
2038    TCGv_i64 xbl;
2039    TCGv_i64 t0, zr, nan, exp;
2040
2041    if (unlikely(!ctx->vsx_enabled)) {
2042        gen_exception(ctx, POWERPC_EXCP_VSXU);
2043        return;
2044    }
2045    xth = tcg_temp_new_i64();
2046    xtl = tcg_temp_new_i64();
2047    xbh = tcg_temp_new_i64();
2048    xbl = tcg_temp_new_i64();
2049    get_cpu_vsrh(xbh, xB(ctx->opcode));
2050    get_cpu_vsrl(xbl, xB(ctx->opcode));
2051    exp = tcg_temp_new_i64();
2052    t0 = tcg_temp_new_i64();
2053    zr = tcg_const_i64(0);
2054    nan = tcg_const_i64(2047);
2055
2056    tcg_gen_extract_i64(exp, xbh, 52, 11);
2057    tcg_gen_movi_i64(t0, 0x0010000000000000);
2058    tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
2059    tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
2060    tcg_gen_deposit_i64(xth, t0, xbh, 0, 52);
2061    set_cpu_vsrh(xT(ctx->opcode), xth);
2062
2063    tcg_gen_extract_i64(exp, xbl, 52, 11);
2064    tcg_gen_movi_i64(t0, 0x0010000000000000);
2065    tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
2066    tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
2067    tcg_gen_deposit_i64(xtl, t0, xbl, 0, 52);
2068    set_cpu_vsrl(xT(ctx->opcode), xtl);
2069
2070    tcg_temp_free_i64(t0);
2071    tcg_temp_free_i64(exp);
2072    tcg_temp_free_i64(zr);
2073    tcg_temp_free_i64(nan);
2074    tcg_temp_free_i64(xth);
2075    tcg_temp_free_i64(xtl);
2076    tcg_temp_free_i64(xbh);
2077    tcg_temp_free_i64(xbl);
2078}
2079
2080#undef GEN_XX2FORM
2081#undef GEN_XX3FORM
2082#undef GEN_XX2IFORM
2083#undef GEN_XX3_RC_FORM
2084#undef GEN_XX3FORM_DM
2085#undef VSX_LOGICAL
2086