1/***                           VSX extension                               ***/
2
3static inline void get_cpu_vsr(TCGv_i64 dst, int n, bool high)
4{
5    tcg_gen_ld_i64(dst, cpu_env, vsr64_offset(n, high));
6}
7
8static inline void set_cpu_vsr(int n, TCGv_i64 src, bool high)
9{
10    tcg_gen_st_i64(src, cpu_env, vsr64_offset(n, high));
11}
12
13static inline TCGv_ptr gen_vsr_ptr(int reg)
14{
15    TCGv_ptr r = tcg_temp_new_ptr();
16    tcg_gen_addi_ptr(r, cpu_env, vsr_full_offset(reg));
17    return r;
18}
19
20#define VSX_LOAD_SCALAR(name, operation)                      \
21static void gen_##name(DisasContext *ctx)                     \
22{                                                             \
23    TCGv EA;                                                  \
24    TCGv_i64 t0;                                              \
25    if (unlikely(!ctx->vsx_enabled)) {                        \
26        gen_exception(ctx, POWERPC_EXCP_VSXU);                \
27        return;                                               \
28    }                                                         \
29    t0 = tcg_temp_new_i64();                                  \
30    gen_set_access_type(ctx, ACCESS_INT);                     \
31    EA = tcg_temp_new();                                      \
32    gen_addr_reg_index(ctx, EA);                              \
33    gen_qemu_##operation(ctx, t0, EA);                        \
34    set_cpu_vsr(xT(ctx->opcode), t0, true);                   \
35    /* NOTE: cpu_vsrl is undefined */                         \
36    tcg_temp_free(EA);                                        \
37    tcg_temp_free_i64(t0);                                    \
38}
39
40VSX_LOAD_SCALAR(lxsdx, ld64_i64)
41VSX_LOAD_SCALAR(lxsiwax, ld32s_i64)
42VSX_LOAD_SCALAR(lxsibzx, ld8u_i64)
43VSX_LOAD_SCALAR(lxsihzx, ld16u_i64)
44VSX_LOAD_SCALAR(lxsiwzx, ld32u_i64)
45VSX_LOAD_SCALAR(lxsspx, ld32fs)
46
47static void gen_lxvd2x(DisasContext *ctx)
48{
49    TCGv EA;
50    TCGv_i64 t0;
51    if (unlikely(!ctx->vsx_enabled)) {
52        gen_exception(ctx, POWERPC_EXCP_VSXU);
53        return;
54    }
55    t0 = tcg_temp_new_i64();
56    gen_set_access_type(ctx, ACCESS_INT);
57    EA = tcg_temp_new();
58    gen_addr_reg_index(ctx, EA);
59    gen_qemu_ld64_i64(ctx, t0, EA);
60    set_cpu_vsr(xT(ctx->opcode), t0, true);
61    tcg_gen_addi_tl(EA, EA, 8);
62    gen_qemu_ld64_i64(ctx, t0, EA);
63    set_cpu_vsr(xT(ctx->opcode), t0, false);
64    tcg_temp_free(EA);
65    tcg_temp_free_i64(t0);
66}
67
68static void gen_lxvw4x(DisasContext *ctx)
69{
70    TCGv EA;
71    TCGv_i64 xth;
72    TCGv_i64 xtl;
73    if (unlikely(!ctx->vsx_enabled)) {
74        gen_exception(ctx, POWERPC_EXCP_VSXU);
75        return;
76    }
77    xth = tcg_temp_new_i64();
78    xtl = tcg_temp_new_i64();
79
80    gen_set_access_type(ctx, ACCESS_INT);
81    EA = tcg_temp_new();
82
83    gen_addr_reg_index(ctx, EA);
84    if (ctx->le_mode) {
85        TCGv_i64 t0 = tcg_temp_new_i64();
86        TCGv_i64 t1 = tcg_temp_new_i64();
87
88        tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEUQ);
89        tcg_gen_shri_i64(t1, t0, 32);
90        tcg_gen_deposit_i64(xth, t1, t0, 32, 32);
91        tcg_gen_addi_tl(EA, EA, 8);
92        tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEUQ);
93        tcg_gen_shri_i64(t1, t0, 32);
94        tcg_gen_deposit_i64(xtl, t1, t0, 32, 32);
95        tcg_temp_free_i64(t0);
96        tcg_temp_free_i64(t1);
97    } else {
98        tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEUQ);
99        tcg_gen_addi_tl(EA, EA, 8);
100        tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEUQ);
101    }
102    set_cpu_vsr(xT(ctx->opcode), xth, true);
103    set_cpu_vsr(xT(ctx->opcode), xtl, false);
104    tcg_temp_free(EA);
105    tcg_temp_free_i64(xth);
106    tcg_temp_free_i64(xtl);
107}
108
109static void gen_lxvwsx(DisasContext *ctx)
110{
111    TCGv EA;
112    TCGv_i32 data;
113
114    if (xT(ctx->opcode) < 32) {
115        if (unlikely(!ctx->vsx_enabled)) {
116            gen_exception(ctx, POWERPC_EXCP_VSXU);
117            return;
118        }
119    } else {
120        if (unlikely(!ctx->altivec_enabled)) {
121            gen_exception(ctx, POWERPC_EXCP_VPU);
122            return;
123        }
124    }
125
126    gen_set_access_type(ctx, ACCESS_INT);
127    EA = tcg_temp_new();
128
129    gen_addr_reg_index(ctx, EA);
130
131    data = tcg_temp_new_i32();
132    tcg_gen_qemu_ld_i32(data, EA, ctx->mem_idx, DEF_MEMOP(MO_UL));
133    tcg_gen_gvec_dup_i32(MO_UL, vsr_full_offset(xT(ctx->opcode)), 16, 16, data);
134
135    tcg_temp_free(EA);
136    tcg_temp_free_i32(data);
137}
138
139static void gen_lxvdsx(DisasContext *ctx)
140{
141    TCGv EA;
142    TCGv_i64 data;
143
144    if (unlikely(!ctx->vsx_enabled)) {
145        gen_exception(ctx, POWERPC_EXCP_VSXU);
146        return;
147    }
148
149    gen_set_access_type(ctx, ACCESS_INT);
150    EA = tcg_temp_new();
151
152    gen_addr_reg_index(ctx, EA);
153
154    data = tcg_temp_new_i64();
155    tcg_gen_qemu_ld_i64(data, EA, ctx->mem_idx, DEF_MEMOP(MO_UQ));
156    tcg_gen_gvec_dup_i64(MO_UQ, vsr_full_offset(xT(ctx->opcode)), 16, 16, data);
157
158    tcg_temp_free(EA);
159    tcg_temp_free_i64(data);
160}
161
162static void gen_bswap16x8(TCGv_i64 outh, TCGv_i64 outl,
163                          TCGv_i64 inh, TCGv_i64 inl)
164{
165    TCGv_i64 mask = tcg_const_i64(0x00FF00FF00FF00FF);
166    TCGv_i64 t0 = tcg_temp_new_i64();
167    TCGv_i64 t1 = tcg_temp_new_i64();
168
169    /* outh = ((inh & mask) << 8) | ((inh >> 8) & mask) */
170    tcg_gen_and_i64(t0, inh, mask);
171    tcg_gen_shli_i64(t0, t0, 8);
172    tcg_gen_shri_i64(t1, inh, 8);
173    tcg_gen_and_i64(t1, t1, mask);
174    tcg_gen_or_i64(outh, t0, t1);
175
176    /* outl = ((inl & mask) << 8) | ((inl >> 8) & mask) */
177    tcg_gen_and_i64(t0, inl, mask);
178    tcg_gen_shli_i64(t0, t0, 8);
179    tcg_gen_shri_i64(t1, inl, 8);
180    tcg_gen_and_i64(t1, t1, mask);
181    tcg_gen_or_i64(outl, t0, t1);
182
183    tcg_temp_free_i64(t0);
184    tcg_temp_free_i64(t1);
185    tcg_temp_free_i64(mask);
186}
187
188static void gen_bswap32x4(TCGv_i64 outh, TCGv_i64 outl,
189                          TCGv_i64 inh, TCGv_i64 inl)
190{
191    TCGv_i64 hi = tcg_temp_new_i64();
192    TCGv_i64 lo = tcg_temp_new_i64();
193
194    tcg_gen_bswap64_i64(hi, inh);
195    tcg_gen_bswap64_i64(lo, inl);
196    tcg_gen_shri_i64(outh, hi, 32);
197    tcg_gen_deposit_i64(outh, outh, hi, 32, 32);
198    tcg_gen_shri_i64(outl, lo, 32);
199    tcg_gen_deposit_i64(outl, outl, lo, 32, 32);
200
201    tcg_temp_free_i64(hi);
202    tcg_temp_free_i64(lo);
203}
204static void gen_lxvh8x(DisasContext *ctx)
205{
206    TCGv EA;
207    TCGv_i64 xth;
208    TCGv_i64 xtl;
209
210    if (unlikely(!ctx->vsx_enabled)) {
211        gen_exception(ctx, POWERPC_EXCP_VSXU);
212        return;
213    }
214    xth = tcg_temp_new_i64();
215    xtl = tcg_temp_new_i64();
216    gen_set_access_type(ctx, ACCESS_INT);
217
218    EA = tcg_temp_new();
219    gen_addr_reg_index(ctx, EA);
220    tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEUQ);
221    tcg_gen_addi_tl(EA, EA, 8);
222    tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEUQ);
223    if (ctx->le_mode) {
224        gen_bswap16x8(xth, xtl, xth, xtl);
225    }
226    set_cpu_vsr(xT(ctx->opcode), xth, true);
227    set_cpu_vsr(xT(ctx->opcode), xtl, false);
228    tcg_temp_free(EA);
229    tcg_temp_free_i64(xth);
230    tcg_temp_free_i64(xtl);
231}
232
233static void gen_lxvb16x(DisasContext *ctx)
234{
235    TCGv EA;
236    TCGv_i64 xth;
237    TCGv_i64 xtl;
238
239    if (unlikely(!ctx->vsx_enabled)) {
240        gen_exception(ctx, POWERPC_EXCP_VSXU);
241        return;
242    }
243    xth = tcg_temp_new_i64();
244    xtl = tcg_temp_new_i64();
245    gen_set_access_type(ctx, ACCESS_INT);
246    EA = tcg_temp_new();
247    gen_addr_reg_index(ctx, EA);
248    tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEUQ);
249    tcg_gen_addi_tl(EA, EA, 8);
250    tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEUQ);
251    set_cpu_vsr(xT(ctx->opcode), xth, true);
252    set_cpu_vsr(xT(ctx->opcode), xtl, false);
253    tcg_temp_free(EA);
254    tcg_temp_free_i64(xth);
255    tcg_temp_free_i64(xtl);
256}
257
258#ifdef TARGET_PPC64
259#define VSX_VECTOR_LOAD_STORE_LENGTH(name)                         \
260static void gen_##name(DisasContext *ctx)                          \
261{                                                                  \
262    TCGv EA;                                                       \
263    TCGv_ptr xt;                                                   \
264                                                                   \
265    if (xT(ctx->opcode) < 32) {                                    \
266        if (unlikely(!ctx->vsx_enabled)) {                         \
267            gen_exception(ctx, POWERPC_EXCP_VSXU);                 \
268            return;                                                \
269        }                                                          \
270    } else {                                                       \
271        if (unlikely(!ctx->altivec_enabled)) {                     \
272            gen_exception(ctx, POWERPC_EXCP_VPU);                  \
273            return;                                                \
274        }                                                          \
275    }                                                              \
276    EA = tcg_temp_new();                                           \
277    xt = gen_vsr_ptr(xT(ctx->opcode));                             \
278    gen_set_access_type(ctx, ACCESS_INT);                          \
279    gen_addr_register(ctx, EA);                                    \
280    gen_helper_##name(cpu_env, EA, xt, cpu_gpr[rB(ctx->opcode)]);  \
281    tcg_temp_free(EA);                                             \
282    tcg_temp_free_ptr(xt);                                         \
283}
284
285VSX_VECTOR_LOAD_STORE_LENGTH(lxvl)
286VSX_VECTOR_LOAD_STORE_LENGTH(lxvll)
287VSX_VECTOR_LOAD_STORE_LENGTH(stxvl)
288VSX_VECTOR_LOAD_STORE_LENGTH(stxvll)
289#endif
290
291#define VSX_LOAD_SCALAR_DS(name, operation)                       \
292static void gen_##name(DisasContext *ctx)                         \
293{                                                                 \
294    TCGv EA;                                                      \
295    TCGv_i64 xth;                                                 \
296                                                                  \
297    if (unlikely(!ctx->altivec_enabled)) {                        \
298        gen_exception(ctx, POWERPC_EXCP_VPU);                     \
299        return;                                                   \
300    }                                                             \
301    xth = tcg_temp_new_i64();                                     \
302    gen_set_access_type(ctx, ACCESS_INT);                         \
303    EA = tcg_temp_new();                                          \
304    gen_addr_imm_index(ctx, EA, 0x03);                            \
305    gen_qemu_##operation(ctx, xth, EA);                           \
306    set_cpu_vsr(rD(ctx->opcode) + 32, xth, true);                 \
307    /* NOTE: cpu_vsrl is undefined */                             \
308    tcg_temp_free(EA);                                            \
309    tcg_temp_free_i64(xth);                                       \
310}
311
312VSX_LOAD_SCALAR_DS(lxsd, ld64_i64)
313VSX_LOAD_SCALAR_DS(lxssp, ld32fs)
314
315#define VSX_STORE_SCALAR(name, operation)                     \
316static void gen_##name(DisasContext *ctx)                     \
317{                                                             \
318    TCGv EA;                                                  \
319    TCGv_i64 t0;                                              \
320    if (unlikely(!ctx->vsx_enabled)) {                        \
321        gen_exception(ctx, POWERPC_EXCP_VSXU);                \
322        return;                                               \
323    }                                                         \
324    t0 = tcg_temp_new_i64();                                  \
325    gen_set_access_type(ctx, ACCESS_INT);                     \
326    EA = tcg_temp_new();                                      \
327    gen_addr_reg_index(ctx, EA);                              \
328    get_cpu_vsr(t0, xS(ctx->opcode), true);                   \
329    gen_qemu_##operation(ctx, t0, EA);                        \
330    tcg_temp_free(EA);                                        \
331    tcg_temp_free_i64(t0);                                    \
332}
333
334VSX_STORE_SCALAR(stxsdx, st64_i64)
335
336VSX_STORE_SCALAR(stxsibx, st8_i64)
337VSX_STORE_SCALAR(stxsihx, st16_i64)
338VSX_STORE_SCALAR(stxsiwx, st32_i64)
339VSX_STORE_SCALAR(stxsspx, st32fs)
340
341static void gen_stxvd2x(DisasContext *ctx)
342{
343    TCGv EA;
344    TCGv_i64 t0;
345    if (unlikely(!ctx->vsx_enabled)) {
346        gen_exception(ctx, POWERPC_EXCP_VSXU);
347        return;
348    }
349    t0 = tcg_temp_new_i64();
350    gen_set_access_type(ctx, ACCESS_INT);
351    EA = tcg_temp_new();
352    gen_addr_reg_index(ctx, EA);
353    get_cpu_vsr(t0, xS(ctx->opcode), true);
354    gen_qemu_st64_i64(ctx, t0, EA);
355    tcg_gen_addi_tl(EA, EA, 8);
356    get_cpu_vsr(t0, xS(ctx->opcode), false);
357    gen_qemu_st64_i64(ctx, t0, EA);
358    tcg_temp_free(EA);
359    tcg_temp_free_i64(t0);
360}
361
362static void gen_stxvw4x(DisasContext *ctx)
363{
364    TCGv EA;
365    TCGv_i64 xsh;
366    TCGv_i64 xsl;
367
368    if (unlikely(!ctx->vsx_enabled)) {
369        gen_exception(ctx, POWERPC_EXCP_VSXU);
370        return;
371    }
372    xsh = tcg_temp_new_i64();
373    xsl = tcg_temp_new_i64();
374    get_cpu_vsr(xsh, xS(ctx->opcode), true);
375    get_cpu_vsr(xsl, xS(ctx->opcode), false);
376    gen_set_access_type(ctx, ACCESS_INT);
377    EA = tcg_temp_new();
378    gen_addr_reg_index(ctx, EA);
379    if (ctx->le_mode) {
380        TCGv_i64 t0 = tcg_temp_new_i64();
381        TCGv_i64 t1 = tcg_temp_new_i64();
382
383        tcg_gen_shri_i64(t0, xsh, 32);
384        tcg_gen_deposit_i64(t1, t0, xsh, 32, 32);
385        tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEUQ);
386        tcg_gen_addi_tl(EA, EA, 8);
387        tcg_gen_shri_i64(t0, xsl, 32);
388        tcg_gen_deposit_i64(t1, t0, xsl, 32, 32);
389        tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEUQ);
390        tcg_temp_free_i64(t0);
391        tcg_temp_free_i64(t1);
392    } else {
393        tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEUQ);
394        tcg_gen_addi_tl(EA, EA, 8);
395        tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEUQ);
396    }
397    tcg_temp_free(EA);
398    tcg_temp_free_i64(xsh);
399    tcg_temp_free_i64(xsl);
400}
401
402static void gen_stxvh8x(DisasContext *ctx)
403{
404    TCGv EA;
405    TCGv_i64 xsh;
406    TCGv_i64 xsl;
407
408    if (unlikely(!ctx->vsx_enabled)) {
409        gen_exception(ctx, POWERPC_EXCP_VSXU);
410        return;
411    }
412    xsh = tcg_temp_new_i64();
413    xsl = tcg_temp_new_i64();
414    get_cpu_vsr(xsh, xS(ctx->opcode), true);
415    get_cpu_vsr(xsl, xS(ctx->opcode), false);
416    gen_set_access_type(ctx, ACCESS_INT);
417    EA = tcg_temp_new();
418    gen_addr_reg_index(ctx, EA);
419    if (ctx->le_mode) {
420        TCGv_i64 outh = tcg_temp_new_i64();
421        TCGv_i64 outl = tcg_temp_new_i64();
422
423        gen_bswap16x8(outh, outl, xsh, xsl);
424        tcg_gen_qemu_st_i64(outh, EA, ctx->mem_idx, MO_BEUQ);
425        tcg_gen_addi_tl(EA, EA, 8);
426        tcg_gen_qemu_st_i64(outl, EA, ctx->mem_idx, MO_BEUQ);
427        tcg_temp_free_i64(outh);
428        tcg_temp_free_i64(outl);
429    } else {
430        tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEUQ);
431        tcg_gen_addi_tl(EA, EA, 8);
432        tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEUQ);
433    }
434    tcg_temp_free(EA);
435    tcg_temp_free_i64(xsh);
436    tcg_temp_free_i64(xsl);
437}
438
439static void gen_stxvb16x(DisasContext *ctx)
440{
441    TCGv EA;
442    TCGv_i64 xsh;
443    TCGv_i64 xsl;
444
445    if (unlikely(!ctx->vsx_enabled)) {
446        gen_exception(ctx, POWERPC_EXCP_VSXU);
447        return;
448    }
449    xsh = tcg_temp_new_i64();
450    xsl = tcg_temp_new_i64();
451    get_cpu_vsr(xsh, xS(ctx->opcode), true);
452    get_cpu_vsr(xsl, xS(ctx->opcode), false);
453    gen_set_access_type(ctx, ACCESS_INT);
454    EA = tcg_temp_new();
455    gen_addr_reg_index(ctx, EA);
456    tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEUQ);
457    tcg_gen_addi_tl(EA, EA, 8);
458    tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEUQ);
459    tcg_temp_free(EA);
460    tcg_temp_free_i64(xsh);
461    tcg_temp_free_i64(xsl);
462}
463
464#define VSX_STORE_SCALAR_DS(name, operation)                      \
465static void gen_##name(DisasContext *ctx)                         \
466{                                                                 \
467    TCGv EA;                                                      \
468    TCGv_i64 xth;                                                 \
469                                                                  \
470    if (unlikely(!ctx->altivec_enabled)) {                        \
471        gen_exception(ctx, POWERPC_EXCP_VPU);                     \
472        return;                                                   \
473    }                                                             \
474    xth = tcg_temp_new_i64();                                     \
475    get_cpu_vsr(xth, rD(ctx->opcode) + 32, true);                 \
476    gen_set_access_type(ctx, ACCESS_INT);                         \
477    EA = tcg_temp_new();                                          \
478    gen_addr_imm_index(ctx, EA, 0x03);                            \
479    gen_qemu_##operation(ctx, xth, EA);                           \
480    /* NOTE: cpu_vsrl is undefined */                             \
481    tcg_temp_free(EA);                                            \
482    tcg_temp_free_i64(xth);                                       \
483}
484
485VSX_STORE_SCALAR_DS(stxsd, st64_i64)
486VSX_STORE_SCALAR_DS(stxssp, st32fs)
487
488static void gen_mfvsrwz(DisasContext *ctx)
489{
490    if (xS(ctx->opcode) < 32) {
491        if (unlikely(!ctx->fpu_enabled)) {
492            gen_exception(ctx, POWERPC_EXCP_FPU);
493            return;
494        }
495    } else {
496        if (unlikely(!ctx->altivec_enabled)) {
497            gen_exception(ctx, POWERPC_EXCP_VPU);
498            return;
499        }
500    }
501    TCGv_i64 tmp = tcg_temp_new_i64();
502    TCGv_i64 xsh = tcg_temp_new_i64();
503    get_cpu_vsr(xsh, xS(ctx->opcode), true);
504    tcg_gen_ext32u_i64(tmp, xsh);
505    tcg_gen_trunc_i64_tl(cpu_gpr[rA(ctx->opcode)], tmp);
506    tcg_temp_free_i64(tmp);
507    tcg_temp_free_i64(xsh);
508}
509
510static void gen_mtvsrwa(DisasContext *ctx)
511{
512    if (xS(ctx->opcode) < 32) {
513        if (unlikely(!ctx->fpu_enabled)) {
514            gen_exception(ctx, POWERPC_EXCP_FPU);
515            return;
516        }
517    } else {
518        if (unlikely(!ctx->altivec_enabled)) {
519            gen_exception(ctx, POWERPC_EXCP_VPU);
520            return;
521        }
522    }
523    TCGv_i64 tmp = tcg_temp_new_i64();
524    TCGv_i64 xsh = tcg_temp_new_i64();
525    tcg_gen_extu_tl_i64(tmp, cpu_gpr[rA(ctx->opcode)]);
526    tcg_gen_ext32s_i64(xsh, tmp);
527    set_cpu_vsr(xT(ctx->opcode), xsh, true);
528    tcg_temp_free_i64(tmp);
529    tcg_temp_free_i64(xsh);
530}
531
532static void gen_mtvsrwz(DisasContext *ctx)
533{
534    if (xS(ctx->opcode) < 32) {
535        if (unlikely(!ctx->fpu_enabled)) {
536            gen_exception(ctx, POWERPC_EXCP_FPU);
537            return;
538        }
539    } else {
540        if (unlikely(!ctx->altivec_enabled)) {
541            gen_exception(ctx, POWERPC_EXCP_VPU);
542            return;
543        }
544    }
545    TCGv_i64 tmp = tcg_temp_new_i64();
546    TCGv_i64 xsh = tcg_temp_new_i64();
547    tcg_gen_extu_tl_i64(tmp, cpu_gpr[rA(ctx->opcode)]);
548    tcg_gen_ext32u_i64(xsh, tmp);
549    set_cpu_vsr(xT(ctx->opcode), xsh, true);
550    tcg_temp_free_i64(tmp);
551    tcg_temp_free_i64(xsh);
552}
553
554#if defined(TARGET_PPC64)
555static void gen_mfvsrd(DisasContext *ctx)
556{
557    TCGv_i64 t0;
558    if (xS(ctx->opcode) < 32) {
559        if (unlikely(!ctx->fpu_enabled)) {
560            gen_exception(ctx, POWERPC_EXCP_FPU);
561            return;
562        }
563    } else {
564        if (unlikely(!ctx->altivec_enabled)) {
565            gen_exception(ctx, POWERPC_EXCP_VPU);
566            return;
567        }
568    }
569    t0 = tcg_temp_new_i64();
570    get_cpu_vsr(t0, xS(ctx->opcode), true);
571    tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], t0);
572    tcg_temp_free_i64(t0);
573}
574
575static void gen_mtvsrd(DisasContext *ctx)
576{
577    TCGv_i64 t0;
578    if (xS(ctx->opcode) < 32) {
579        if (unlikely(!ctx->fpu_enabled)) {
580            gen_exception(ctx, POWERPC_EXCP_FPU);
581            return;
582        }
583    } else {
584        if (unlikely(!ctx->altivec_enabled)) {
585            gen_exception(ctx, POWERPC_EXCP_VPU);
586            return;
587        }
588    }
589    t0 = tcg_temp_new_i64();
590    tcg_gen_mov_i64(t0, cpu_gpr[rA(ctx->opcode)]);
591    set_cpu_vsr(xT(ctx->opcode), t0, true);
592    tcg_temp_free_i64(t0);
593}
594
595static void gen_mfvsrld(DisasContext *ctx)
596{
597    TCGv_i64 t0;
598    if (xS(ctx->opcode) < 32) {
599        if (unlikely(!ctx->vsx_enabled)) {
600            gen_exception(ctx, POWERPC_EXCP_VSXU);
601            return;
602        }
603    } else {
604        if (unlikely(!ctx->altivec_enabled)) {
605            gen_exception(ctx, POWERPC_EXCP_VPU);
606            return;
607        }
608    }
609    t0 = tcg_temp_new_i64();
610    get_cpu_vsr(t0, xS(ctx->opcode), false);
611    tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], t0);
612    tcg_temp_free_i64(t0);
613}
614
615static void gen_mtvsrdd(DisasContext *ctx)
616{
617    TCGv_i64 t0;
618    if (xT(ctx->opcode) < 32) {
619        if (unlikely(!ctx->vsx_enabled)) {
620            gen_exception(ctx, POWERPC_EXCP_VSXU);
621            return;
622        }
623    } else {
624        if (unlikely(!ctx->altivec_enabled)) {
625            gen_exception(ctx, POWERPC_EXCP_VPU);
626            return;
627        }
628    }
629
630    t0 = tcg_temp_new_i64();
631    if (!rA(ctx->opcode)) {
632        tcg_gen_movi_i64(t0, 0);
633    } else {
634        tcg_gen_mov_i64(t0, cpu_gpr[rA(ctx->opcode)]);
635    }
636    set_cpu_vsr(xT(ctx->opcode), t0, true);
637
638    tcg_gen_mov_i64(t0, cpu_gpr[rB(ctx->opcode)]);
639    set_cpu_vsr(xT(ctx->opcode), t0, false);
640    tcg_temp_free_i64(t0);
641}
642
643static void gen_mtvsrws(DisasContext *ctx)
644{
645    TCGv_i64 t0;
646    if (xT(ctx->opcode) < 32) {
647        if (unlikely(!ctx->vsx_enabled)) {
648            gen_exception(ctx, POWERPC_EXCP_VSXU);
649            return;
650        }
651    } else {
652        if (unlikely(!ctx->altivec_enabled)) {
653            gen_exception(ctx, POWERPC_EXCP_VPU);
654            return;
655        }
656    }
657
658    t0 = tcg_temp_new_i64();
659    tcg_gen_deposit_i64(t0, cpu_gpr[rA(ctx->opcode)],
660                        cpu_gpr[rA(ctx->opcode)], 32, 32);
661    set_cpu_vsr(xT(ctx->opcode), t0, false);
662    set_cpu_vsr(xT(ctx->opcode), t0, true);
663    tcg_temp_free_i64(t0);
664}
665
666#endif
667
668static void gen_xxpermdi(DisasContext *ctx)
669{
670    TCGv_i64 xh, xl;
671
672    if (unlikely(!ctx->vsx_enabled)) {
673        gen_exception(ctx, POWERPC_EXCP_VSXU);
674        return;
675    }
676
677    xh = tcg_temp_new_i64();
678    xl = tcg_temp_new_i64();
679
680    if (unlikely((xT(ctx->opcode) == xA(ctx->opcode)) ||
681                 (xT(ctx->opcode) == xB(ctx->opcode)))) {
682        get_cpu_vsr(xh, xA(ctx->opcode), (DM(ctx->opcode) & 2) == 0);
683        get_cpu_vsr(xl, xB(ctx->opcode), (DM(ctx->opcode) & 1) == 0);
684
685        set_cpu_vsr(xT(ctx->opcode), xh, true);
686        set_cpu_vsr(xT(ctx->opcode), xl, false);
687    } else {
688        if ((DM(ctx->opcode) & 2) == 0) {
689            get_cpu_vsr(xh, xA(ctx->opcode), true);
690            set_cpu_vsr(xT(ctx->opcode), xh, true);
691        } else {
692            get_cpu_vsr(xh, xA(ctx->opcode), false);
693            set_cpu_vsr(xT(ctx->opcode), xh, true);
694        }
695        if ((DM(ctx->opcode) & 1) == 0) {
696            get_cpu_vsr(xl, xB(ctx->opcode), true);
697            set_cpu_vsr(xT(ctx->opcode), xl, false);
698        } else {
699            get_cpu_vsr(xl, xB(ctx->opcode), false);
700            set_cpu_vsr(xT(ctx->opcode), xl, false);
701        }
702    }
703    tcg_temp_free_i64(xh);
704    tcg_temp_free_i64(xl);
705}
706
707#define OP_ABS 1
708#define OP_NABS 2
709#define OP_NEG 3
710#define OP_CPSGN 4
711#define SGN_MASK_DP  0x8000000000000000ull
712#define SGN_MASK_SP 0x8000000080000000ull
713
714#define VSX_SCALAR_MOVE(name, op, sgn_mask)                       \
715static void glue(gen_, name)(DisasContext *ctx)                   \
716    {                                                             \
717        TCGv_i64 xb, sgm;                                         \
718        if (unlikely(!ctx->vsx_enabled)) {                        \
719            gen_exception(ctx, POWERPC_EXCP_VSXU);                \
720            return;                                               \
721        }                                                         \
722        xb = tcg_temp_new_i64();                                  \
723        sgm = tcg_temp_new_i64();                                 \
724        get_cpu_vsr(xb, xB(ctx->opcode), true);                   \
725        tcg_gen_movi_i64(sgm, sgn_mask);                          \
726        switch (op) {                                             \
727            case OP_ABS: {                                        \
728                tcg_gen_andc_i64(xb, xb, sgm);                    \
729                break;                                            \
730            }                                                     \
731            case OP_NABS: {                                       \
732                tcg_gen_or_i64(xb, xb, sgm);                      \
733                break;                                            \
734            }                                                     \
735            case OP_NEG: {                                        \
736                tcg_gen_xor_i64(xb, xb, sgm);                     \
737                break;                                            \
738            }                                                     \
739            case OP_CPSGN: {                                      \
740                TCGv_i64 xa = tcg_temp_new_i64();                 \
741                get_cpu_vsr(xa, xA(ctx->opcode), true);           \
742                tcg_gen_and_i64(xa, xa, sgm);                     \
743                tcg_gen_andc_i64(xb, xb, sgm);                    \
744                tcg_gen_or_i64(xb, xb, xa);                       \
745                tcg_temp_free_i64(xa);                            \
746                break;                                            \
747            }                                                     \
748        }                                                         \
749        set_cpu_vsr(xT(ctx->opcode), xb, true);                   \
750        set_cpu_vsr(xT(ctx->opcode), tcg_constant_i64(0), false); \
751        tcg_temp_free_i64(xb);                                    \
752        tcg_temp_free_i64(sgm);                                   \
753    }
754
755VSX_SCALAR_MOVE(xsabsdp, OP_ABS, SGN_MASK_DP)
756VSX_SCALAR_MOVE(xsnabsdp, OP_NABS, SGN_MASK_DP)
757VSX_SCALAR_MOVE(xsnegdp, OP_NEG, SGN_MASK_DP)
758VSX_SCALAR_MOVE(xscpsgndp, OP_CPSGN, SGN_MASK_DP)
759
760#define VSX_SCALAR_MOVE_QP(name, op, sgn_mask)                    \
761static void glue(gen_, name)(DisasContext *ctx)                   \
762{                                                                 \
763    int xa;                                                       \
764    int xt = rD(ctx->opcode) + 32;                                \
765    int xb = rB(ctx->opcode) + 32;                                \
766    TCGv_i64 xah, xbh, xbl, sgm, tmp;                             \
767                                                                  \
768    if (unlikely(!ctx->vsx_enabled)) {                            \
769        gen_exception(ctx, POWERPC_EXCP_VSXU);                    \
770        return;                                                   \
771    }                                                             \
772    xbh = tcg_temp_new_i64();                                     \
773    xbl = tcg_temp_new_i64();                                     \
774    sgm = tcg_temp_new_i64();                                     \
775    tmp = tcg_temp_new_i64();                                     \
776    get_cpu_vsr(xbh, xb, true);                                   \
777    get_cpu_vsr(xbl, xb, false);                                  \
778    tcg_gen_movi_i64(sgm, sgn_mask);                              \
779    switch (op) {                                                 \
780    case OP_ABS:                                                  \
781        tcg_gen_andc_i64(xbh, xbh, sgm);                          \
782        break;                                                    \
783    case OP_NABS:                                                 \
784        tcg_gen_or_i64(xbh, xbh, sgm);                            \
785        break;                                                    \
786    case OP_NEG:                                                  \
787        tcg_gen_xor_i64(xbh, xbh, sgm);                           \
788        break;                                                    \
789    case OP_CPSGN:                                                \
790        xah = tcg_temp_new_i64();                                 \
791        xa = rA(ctx->opcode) + 32;                                \
792        get_cpu_vsr(tmp, xa, true);                               \
793        tcg_gen_and_i64(xah, tmp, sgm);                           \
794        tcg_gen_andc_i64(xbh, xbh, sgm);                          \
795        tcg_gen_or_i64(xbh, xbh, xah);                            \
796        tcg_temp_free_i64(xah);                                   \
797        break;                                                    \
798    }                                                             \
799    set_cpu_vsr(xt, xbh, true);                                   \
800    set_cpu_vsr(xt, xbl, false);                                  \
801    tcg_temp_free_i64(xbl);                                       \
802    tcg_temp_free_i64(xbh);                                       \
803    tcg_temp_free_i64(sgm);                                       \
804    tcg_temp_free_i64(tmp);                                       \
805}
806
807VSX_SCALAR_MOVE_QP(xsabsqp, OP_ABS, SGN_MASK_DP)
808VSX_SCALAR_MOVE_QP(xsnabsqp, OP_NABS, SGN_MASK_DP)
809VSX_SCALAR_MOVE_QP(xsnegqp, OP_NEG, SGN_MASK_DP)
810VSX_SCALAR_MOVE_QP(xscpsgnqp, OP_CPSGN, SGN_MASK_DP)
811
812#define VSX_VECTOR_MOVE(name, op, sgn_mask)                      \
813static void glue(gen_, name)(DisasContext *ctx)                  \
814    {                                                            \
815        TCGv_i64 xbh, xbl, sgm;                                  \
816        if (unlikely(!ctx->vsx_enabled)) {                       \
817            gen_exception(ctx, POWERPC_EXCP_VSXU);               \
818            return;                                              \
819        }                                                        \
820        xbh = tcg_temp_new_i64();                                \
821        xbl = tcg_temp_new_i64();                                \
822        sgm = tcg_temp_new_i64();                                \
823        get_cpu_vsr(xbh, xB(ctx->opcode), true);                 \
824        get_cpu_vsr(xbl, xB(ctx->opcode), false);                \
825        tcg_gen_movi_i64(sgm, sgn_mask);                         \
826        switch (op) {                                            \
827            case OP_ABS: {                                       \
828                tcg_gen_andc_i64(xbh, xbh, sgm);                 \
829                tcg_gen_andc_i64(xbl, xbl, sgm);                 \
830                break;                                           \
831            }                                                    \
832            case OP_NABS: {                                      \
833                tcg_gen_or_i64(xbh, xbh, sgm);                   \
834                tcg_gen_or_i64(xbl, xbl, sgm);                   \
835                break;                                           \
836            }                                                    \
837            case OP_NEG: {                                       \
838                tcg_gen_xor_i64(xbh, xbh, sgm);                  \
839                tcg_gen_xor_i64(xbl, xbl, sgm);                  \
840                break;                                           \
841            }                                                    \
842            case OP_CPSGN: {                                     \
843                TCGv_i64 xah = tcg_temp_new_i64();               \
844                TCGv_i64 xal = tcg_temp_new_i64();               \
845                get_cpu_vsr(xah, xA(ctx->opcode), true);         \
846                get_cpu_vsr(xal, xA(ctx->opcode), false);        \
847                tcg_gen_and_i64(xah, xah, sgm);                  \
848                tcg_gen_and_i64(xal, xal, sgm);                  \
849                tcg_gen_andc_i64(xbh, xbh, sgm);                 \
850                tcg_gen_andc_i64(xbl, xbl, sgm);                 \
851                tcg_gen_or_i64(xbh, xbh, xah);                   \
852                tcg_gen_or_i64(xbl, xbl, xal);                   \
853                tcg_temp_free_i64(xah);                          \
854                tcg_temp_free_i64(xal);                          \
855                break;                                           \
856            }                                                    \
857        }                                                        \
858        set_cpu_vsr(xT(ctx->opcode), xbh, true);                 \
859        set_cpu_vsr(xT(ctx->opcode), xbl, false);                \
860        tcg_temp_free_i64(xbh);                                  \
861        tcg_temp_free_i64(xbl);                                  \
862        tcg_temp_free_i64(sgm);                                  \
863    }
864
865VSX_VECTOR_MOVE(xvabsdp, OP_ABS, SGN_MASK_DP)
866VSX_VECTOR_MOVE(xvnabsdp, OP_NABS, SGN_MASK_DP)
867VSX_VECTOR_MOVE(xvnegdp, OP_NEG, SGN_MASK_DP)
868VSX_VECTOR_MOVE(xvcpsgndp, OP_CPSGN, SGN_MASK_DP)
869VSX_VECTOR_MOVE(xvabssp, OP_ABS, SGN_MASK_SP)
870VSX_VECTOR_MOVE(xvnabssp, OP_NABS, SGN_MASK_SP)
871VSX_VECTOR_MOVE(xvnegsp, OP_NEG, SGN_MASK_SP)
872VSX_VECTOR_MOVE(xvcpsgnsp, OP_CPSGN, SGN_MASK_SP)
873
874#define VSX_CMP(name, op1, op2, inval, type)                                  \
875static void gen_##name(DisasContext *ctx)                                     \
876{                                                                             \
877    TCGv_i32 ignored;                                                         \
878    TCGv_ptr xt, xa, xb;                                                      \
879    if (unlikely(!ctx->vsx_enabled)) {                                        \
880        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
881        return;                                                               \
882    }                                                                         \
883    xt = gen_vsr_ptr(xT(ctx->opcode));                                        \
884    xa = gen_vsr_ptr(xA(ctx->opcode));                                        \
885    xb = gen_vsr_ptr(xB(ctx->opcode));                                        \
886    if ((ctx->opcode >> (31 - 21)) & 1) {                                     \
887        gen_helper_##name(cpu_crf[6], cpu_env, xt, xa, xb);                   \
888    } else {                                                                  \
889        ignored = tcg_temp_new_i32();                                         \
890        gen_helper_##name(ignored, cpu_env, xt, xa, xb);                      \
891        tcg_temp_free_i32(ignored);                                           \
892    }                                                                         \
893    gen_helper_float_check_status(cpu_env);                                   \
894    tcg_temp_free_ptr(xt);                                                    \
895    tcg_temp_free_ptr(xa);                                                    \
896    tcg_temp_free_ptr(xb);                                                    \
897}
898
899VSX_CMP(xvcmpeqdp, 0x0C, 0x0C, 0, PPC2_VSX)
900VSX_CMP(xvcmpgedp, 0x0C, 0x0E, 0, PPC2_VSX)
901VSX_CMP(xvcmpgtdp, 0x0C, 0x0D, 0, PPC2_VSX)
902VSX_CMP(xvcmpnedp, 0x0C, 0x0F, 0, PPC2_ISA300)
903VSX_CMP(xvcmpeqsp, 0x0C, 0x08, 0, PPC2_VSX)
904VSX_CMP(xvcmpgesp, 0x0C, 0x0A, 0, PPC2_VSX)
905VSX_CMP(xvcmpgtsp, 0x0C, 0x09, 0, PPC2_VSX)
906VSX_CMP(xvcmpnesp, 0x0C, 0x0B, 0, PPC2_VSX)
907
908static bool trans_XSCVQPDP(DisasContext *ctx, arg_X_tb_rc *a)
909{
910    TCGv_i32 ro;
911    TCGv_ptr xt, xb;
912
913    REQUIRE_INSNS_FLAGS2(ctx, ISA300);
914    REQUIRE_VSX(ctx);
915
916    ro = tcg_const_i32(a->rc);
917
918    xt = gen_avr_ptr(a->rt);
919    xb = gen_avr_ptr(a->rb);
920    gen_helper_XSCVQPDP(cpu_env, ro, xt, xb);
921    tcg_temp_free_i32(ro);
922    tcg_temp_free_ptr(xt);
923    tcg_temp_free_ptr(xb);
924
925    return true;
926}
927
928#define GEN_VSX_HELPER_2(name, op1, op2, inval, type)                         \
929static void gen_##name(DisasContext *ctx)                                     \
930{                                                                             \
931    TCGv_i32 opc;                                                             \
932    if (unlikely(!ctx->vsx_enabled)) {                                        \
933        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
934        return;                                                               \
935    }                                                                         \
936    opc = tcg_const_i32(ctx->opcode);                                         \
937    gen_helper_##name(cpu_env, opc);                                          \
938    tcg_temp_free_i32(opc);                                                   \
939}
940
941#define GEN_VSX_HELPER_X3(name, op1, op2, inval, type)                        \
942static void gen_##name(DisasContext *ctx)                                     \
943{                                                                             \
944    TCGv_ptr xt, xa, xb;                                                      \
945    if (unlikely(!ctx->vsx_enabled)) {                                        \
946        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
947        return;                                                               \
948    }                                                                         \
949    xt = gen_vsr_ptr(xT(ctx->opcode));                                        \
950    xa = gen_vsr_ptr(xA(ctx->opcode));                                        \
951    xb = gen_vsr_ptr(xB(ctx->opcode));                                        \
952    gen_helper_##name(cpu_env, xt, xa, xb);                                   \
953    tcg_temp_free_ptr(xt);                                                    \
954    tcg_temp_free_ptr(xa);                                                    \
955    tcg_temp_free_ptr(xb);                                                    \
956}
957
958#define GEN_VSX_HELPER_X2(name, op1, op2, inval, type)                        \
959static void gen_##name(DisasContext *ctx)                                     \
960{                                                                             \
961    TCGv_ptr xt, xb;                                                          \
962    if (unlikely(!ctx->vsx_enabled)) {                                        \
963        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
964        return;                                                               \
965    }                                                                         \
966    xt = gen_vsr_ptr(xT(ctx->opcode));                                        \
967    xb = gen_vsr_ptr(xB(ctx->opcode));                                        \
968    gen_helper_##name(cpu_env, xt, xb);                                       \
969    tcg_temp_free_ptr(xt);                                                    \
970    tcg_temp_free_ptr(xb);                                                    \
971}
972
973#define GEN_VSX_HELPER_X2_AB(name, op1, op2, inval, type)                     \
974static void gen_##name(DisasContext *ctx)                                     \
975{                                                                             \
976    TCGv_i32 opc;                                                             \
977    TCGv_ptr xa, xb;                                                          \
978    if (unlikely(!ctx->vsx_enabled)) {                                        \
979        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
980        return;                                                               \
981    }                                                                         \
982    opc = tcg_const_i32(ctx->opcode);                                         \
983    xa = gen_vsr_ptr(xA(ctx->opcode));                                        \
984    xb = gen_vsr_ptr(xB(ctx->opcode));                                        \
985    gen_helper_##name(cpu_env, opc, xa, xb);                                  \
986    tcg_temp_free_i32(opc);                                                   \
987    tcg_temp_free_ptr(xa);                                                    \
988    tcg_temp_free_ptr(xb);                                                    \
989}
990
991#define GEN_VSX_HELPER_X1(name, op1, op2, inval, type)                        \
992static void gen_##name(DisasContext *ctx)                                     \
993{                                                                             \
994    TCGv_i32 opc;                                                             \
995    TCGv_ptr xb;                                                              \
996    if (unlikely(!ctx->vsx_enabled)) {                                        \
997        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
998        return;                                                               \
999    }                                                                         \
1000    opc = tcg_const_i32(ctx->opcode);                                         \
1001    xb = gen_vsr_ptr(xB(ctx->opcode));                                        \
1002    gen_helper_##name(cpu_env, opc, xb);                                      \
1003    tcg_temp_free_i32(opc);                                                   \
1004    tcg_temp_free_ptr(xb);                                                    \
1005}
1006
1007#define GEN_VSX_HELPER_R3(name, op1, op2, inval, type)                        \
1008static void gen_##name(DisasContext *ctx)                                     \
1009{                                                                             \
1010    TCGv_i32 opc;                                                             \
1011    TCGv_ptr xt, xa, xb;                                                      \
1012    if (unlikely(!ctx->vsx_enabled)) {                                        \
1013        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
1014        return;                                                               \
1015    }                                                                         \
1016    opc = tcg_const_i32(ctx->opcode);                                         \
1017    xt = gen_vsr_ptr(rD(ctx->opcode) + 32);                                   \
1018    xa = gen_vsr_ptr(rA(ctx->opcode) + 32);                                   \
1019    xb = gen_vsr_ptr(rB(ctx->opcode) + 32);                                   \
1020    gen_helper_##name(cpu_env, opc, xt, xa, xb);                              \
1021    tcg_temp_free_i32(opc);                                                   \
1022    tcg_temp_free_ptr(xt);                                                    \
1023    tcg_temp_free_ptr(xa);                                                    \
1024    tcg_temp_free_ptr(xb);                                                    \
1025}
1026
1027#define GEN_VSX_HELPER_R2(name, op1, op2, inval, type)                        \
1028static void gen_##name(DisasContext *ctx)                                     \
1029{                                                                             \
1030    TCGv_i32 opc;                                                             \
1031    TCGv_ptr xt, xb;                                                          \
1032    if (unlikely(!ctx->vsx_enabled)) {                                        \
1033        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
1034        return;                                                               \
1035    }                                                                         \
1036    opc = tcg_const_i32(ctx->opcode);                                         \
1037    xt = gen_vsr_ptr(rD(ctx->opcode) + 32);                                   \
1038    xb = gen_vsr_ptr(rB(ctx->opcode) + 32);                                   \
1039    gen_helper_##name(cpu_env, opc, xt, xb);                                  \
1040    tcg_temp_free_i32(opc);                                                   \
1041    tcg_temp_free_ptr(xt);                                                    \
1042    tcg_temp_free_ptr(xb);                                                    \
1043}
1044
1045#define GEN_VSX_HELPER_R2_AB(name, op1, op2, inval, type)                     \
1046static void gen_##name(DisasContext *ctx)                                     \
1047{                                                                             \
1048    TCGv_i32 opc;                                                             \
1049    TCGv_ptr xa, xb;                                                          \
1050    if (unlikely(!ctx->vsx_enabled)) {                                        \
1051        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
1052        return;                                                               \
1053    }                                                                         \
1054    opc = tcg_const_i32(ctx->opcode);                                         \
1055    xa = gen_vsr_ptr(rA(ctx->opcode) + 32);                                   \
1056    xb = gen_vsr_ptr(rB(ctx->opcode) + 32);                                   \
1057    gen_helper_##name(cpu_env, opc, xa, xb);                                  \
1058    tcg_temp_free_i32(opc);                                                   \
1059    tcg_temp_free_ptr(xa);                                                    \
1060    tcg_temp_free_ptr(xb);                                                    \
1061}
1062
1063#define GEN_VSX_HELPER_XT_XB_ENV(name, op1, op2, inval, type) \
1064static void gen_##name(DisasContext *ctx)                     \
1065{                                                             \
1066    TCGv_i64 t0;                                              \
1067    TCGv_i64 t1;                                              \
1068    if (unlikely(!ctx->vsx_enabled)) {                        \
1069        gen_exception(ctx, POWERPC_EXCP_VSXU);                \
1070        return;                                               \
1071    }                                                         \
1072    t0 = tcg_temp_new_i64();                                  \
1073    t1 = tcg_temp_new_i64();                                  \
1074    get_cpu_vsr(t0, xB(ctx->opcode), true);                   \
1075    gen_helper_##name(t1, cpu_env, t0);                       \
1076    set_cpu_vsr(xT(ctx->opcode), t1, true);                   \
1077    set_cpu_vsr(xT(ctx->opcode), tcg_constant_i64(0), false); \
1078    tcg_temp_free_i64(t0);                                    \
1079    tcg_temp_free_i64(t1);                                    \
1080}
1081
1082GEN_VSX_HELPER_X3(xsadddp, 0x00, 0x04, 0, PPC2_VSX)
1083GEN_VSX_HELPER_R3(xsaddqp, 0x04, 0x00, 0, PPC2_ISA300)
1084GEN_VSX_HELPER_X3(xssubdp, 0x00, 0x05, 0, PPC2_VSX)
1085GEN_VSX_HELPER_X3(xsmuldp, 0x00, 0x06, 0, PPC2_VSX)
1086GEN_VSX_HELPER_R3(xsmulqp, 0x04, 0x01, 0, PPC2_ISA300)
1087GEN_VSX_HELPER_X3(xsdivdp, 0x00, 0x07, 0, PPC2_VSX)
1088GEN_VSX_HELPER_R3(xsdivqp, 0x04, 0x11, 0, PPC2_ISA300)
1089GEN_VSX_HELPER_X2(xsredp, 0x14, 0x05, 0, PPC2_VSX)
1090GEN_VSX_HELPER_X2(xssqrtdp, 0x16, 0x04, 0, PPC2_VSX)
1091GEN_VSX_HELPER_X2(xsrsqrtedp, 0x14, 0x04, 0, PPC2_VSX)
1092GEN_VSX_HELPER_X2_AB(xstdivdp, 0x14, 0x07, 0, PPC2_VSX)
1093GEN_VSX_HELPER_X1(xstsqrtdp, 0x14, 0x06, 0, PPC2_VSX)
1094GEN_VSX_HELPER_X3(xscmpeqdp, 0x0C, 0x00, 0, PPC2_ISA300)
1095GEN_VSX_HELPER_X3(xscmpgtdp, 0x0C, 0x01, 0, PPC2_ISA300)
1096GEN_VSX_HELPER_X3(xscmpgedp, 0x0C, 0x02, 0, PPC2_ISA300)
1097GEN_VSX_HELPER_X3(xscmpnedp, 0x0C, 0x03, 0, PPC2_ISA300)
1098GEN_VSX_HELPER_X2_AB(xscmpexpdp, 0x0C, 0x07, 0, PPC2_ISA300)
1099GEN_VSX_HELPER_R2_AB(xscmpexpqp, 0x04, 0x05, 0, PPC2_ISA300)
1100GEN_VSX_HELPER_X2_AB(xscmpodp, 0x0C, 0x05, 0, PPC2_VSX)
1101GEN_VSX_HELPER_X2_AB(xscmpudp, 0x0C, 0x04, 0, PPC2_VSX)
1102GEN_VSX_HELPER_R2_AB(xscmpoqp, 0x04, 0x04, 0, PPC2_VSX)
1103GEN_VSX_HELPER_R2_AB(xscmpuqp, 0x04, 0x14, 0, PPC2_VSX)
1104GEN_VSX_HELPER_X3(xsmaxdp, 0x00, 0x14, 0, PPC2_VSX)
1105GEN_VSX_HELPER_X3(xsmindp, 0x00, 0x15, 0, PPC2_VSX)
1106GEN_VSX_HELPER_X2(xscvdphp, 0x16, 0x15, 0x11, PPC2_ISA300)
1107GEN_VSX_HELPER_X2(xscvdpsp, 0x12, 0x10, 0, PPC2_VSX)
1108GEN_VSX_HELPER_R2(xscvdpqp, 0x04, 0x1A, 0x16, PPC2_ISA300)
1109GEN_VSX_HELPER_XT_XB_ENV(xscvdpspn, 0x16, 0x10, 0, PPC2_VSX207)
1110GEN_VSX_HELPER_R2(xscvqpsdz, 0x04, 0x1A, 0x19, PPC2_ISA300)
1111GEN_VSX_HELPER_R2(xscvqpswz, 0x04, 0x1A, 0x09, PPC2_ISA300)
1112GEN_VSX_HELPER_R2(xscvqpudz, 0x04, 0x1A, 0x11, PPC2_ISA300)
1113GEN_VSX_HELPER_R2(xscvqpuwz, 0x04, 0x1A, 0x01, PPC2_ISA300)
1114GEN_VSX_HELPER_X2(xscvhpdp, 0x16, 0x15, 0x10, PPC2_ISA300)
1115GEN_VSX_HELPER_R2(xscvsdqp, 0x04, 0x1A, 0x0A, PPC2_ISA300)
1116GEN_VSX_HELPER_X2(xscvspdp, 0x12, 0x14, 0, PPC2_VSX)
1117GEN_VSX_HELPER_XT_XB_ENV(xscvspdpn, 0x16, 0x14, 0, PPC2_VSX207)
1118GEN_VSX_HELPER_X2(xscvdpsxds, 0x10, 0x15, 0, PPC2_VSX)
1119GEN_VSX_HELPER_X2(xscvdpsxws, 0x10, 0x05, 0, PPC2_VSX)
1120GEN_VSX_HELPER_X2(xscvdpuxds, 0x10, 0x14, 0, PPC2_VSX)
1121GEN_VSX_HELPER_X2(xscvdpuxws, 0x10, 0x04, 0, PPC2_VSX)
1122GEN_VSX_HELPER_X2(xscvsxddp, 0x10, 0x17, 0, PPC2_VSX)
1123GEN_VSX_HELPER_R2(xscvudqp, 0x04, 0x1A, 0x02, PPC2_ISA300)
1124GEN_VSX_HELPER_X2(xscvuxddp, 0x10, 0x16, 0, PPC2_VSX)
1125GEN_VSX_HELPER_X2(xsrdpi, 0x12, 0x04, 0, PPC2_VSX)
1126GEN_VSX_HELPER_X2(xsrdpic, 0x16, 0x06, 0, PPC2_VSX)
1127GEN_VSX_HELPER_X2(xsrdpim, 0x12, 0x07, 0, PPC2_VSX)
1128GEN_VSX_HELPER_X2(xsrdpip, 0x12, 0x06, 0, PPC2_VSX)
1129GEN_VSX_HELPER_X2(xsrdpiz, 0x12, 0x05, 0, PPC2_VSX)
1130GEN_VSX_HELPER_XT_XB_ENV(xsrsp, 0x12, 0x11, 0, PPC2_VSX207)
1131GEN_VSX_HELPER_R2(xsrqpi, 0x05, 0x00, 0, PPC2_ISA300)
1132GEN_VSX_HELPER_R2(xsrqpxp, 0x05, 0x01, 0, PPC2_ISA300)
1133GEN_VSX_HELPER_R2(xssqrtqp, 0x04, 0x19, 0x1B, PPC2_ISA300)
1134GEN_VSX_HELPER_R3(xssubqp, 0x04, 0x10, 0, PPC2_ISA300)
1135GEN_VSX_HELPER_X3(xsaddsp, 0x00, 0x00, 0, PPC2_VSX207)
1136GEN_VSX_HELPER_X3(xssubsp, 0x00, 0x01, 0, PPC2_VSX207)
1137GEN_VSX_HELPER_X3(xsmulsp, 0x00, 0x02, 0, PPC2_VSX207)
1138GEN_VSX_HELPER_X3(xsdivsp, 0x00, 0x03, 0, PPC2_VSX207)
1139GEN_VSX_HELPER_X2(xsresp, 0x14, 0x01, 0, PPC2_VSX207)
1140GEN_VSX_HELPER_X2(xssqrtsp, 0x16, 0x00, 0, PPC2_VSX207)
1141GEN_VSX_HELPER_X2(xsrsqrtesp, 0x14, 0x00, 0, PPC2_VSX207)
1142GEN_VSX_HELPER_X2(xscvsxdsp, 0x10, 0x13, 0, PPC2_VSX207)
1143GEN_VSX_HELPER_X2(xscvuxdsp, 0x10, 0x12, 0, PPC2_VSX207)
1144GEN_VSX_HELPER_X1(xststdcsp, 0x14, 0x12, 0, PPC2_ISA300)
1145GEN_VSX_HELPER_2(xststdcdp, 0x14, 0x16, 0, PPC2_ISA300)
1146GEN_VSX_HELPER_2(xststdcqp, 0x04, 0x16, 0, PPC2_ISA300)
1147
1148GEN_VSX_HELPER_X3(xvadddp, 0x00, 0x0C, 0, PPC2_VSX)
1149GEN_VSX_HELPER_X3(xvsubdp, 0x00, 0x0D, 0, PPC2_VSX)
1150GEN_VSX_HELPER_X3(xvmuldp, 0x00, 0x0E, 0, PPC2_VSX)
1151GEN_VSX_HELPER_X3(xvdivdp, 0x00, 0x0F, 0, PPC2_VSX)
1152GEN_VSX_HELPER_X2(xvredp, 0x14, 0x0D, 0, PPC2_VSX)
1153GEN_VSX_HELPER_X2(xvsqrtdp, 0x16, 0x0C, 0, PPC2_VSX)
1154GEN_VSX_HELPER_X2(xvrsqrtedp, 0x14, 0x0C, 0, PPC2_VSX)
1155GEN_VSX_HELPER_X2_AB(xvtdivdp, 0x14, 0x0F, 0, PPC2_VSX)
1156GEN_VSX_HELPER_X1(xvtsqrtdp, 0x14, 0x0E, 0, PPC2_VSX)
1157GEN_VSX_HELPER_X3(xvmaxdp, 0x00, 0x1C, 0, PPC2_VSX)
1158GEN_VSX_HELPER_X3(xvmindp, 0x00, 0x1D, 0, PPC2_VSX)
1159GEN_VSX_HELPER_X2(xvcvdpsp, 0x12, 0x18, 0, PPC2_VSX)
1160GEN_VSX_HELPER_X2(xvcvdpsxds, 0x10, 0x1D, 0, PPC2_VSX)
1161GEN_VSX_HELPER_X2(xvcvdpsxws, 0x10, 0x0D, 0, PPC2_VSX)
1162GEN_VSX_HELPER_X2(xvcvdpuxds, 0x10, 0x1C, 0, PPC2_VSX)
1163GEN_VSX_HELPER_X2(xvcvdpuxws, 0x10, 0x0C, 0, PPC2_VSX)
1164GEN_VSX_HELPER_X2(xvcvsxddp, 0x10, 0x1F, 0, PPC2_VSX)
1165GEN_VSX_HELPER_X2(xvcvuxddp, 0x10, 0x1E, 0, PPC2_VSX)
1166GEN_VSX_HELPER_X2(xvcvsxwdp, 0x10, 0x0F, 0, PPC2_VSX)
1167GEN_VSX_HELPER_X2(xvcvuxwdp, 0x10, 0x0E, 0, PPC2_VSX)
1168GEN_VSX_HELPER_X2(xvrdpi, 0x12, 0x0C, 0, PPC2_VSX)
1169GEN_VSX_HELPER_X2(xvrdpic, 0x16, 0x0E, 0, PPC2_VSX)
1170GEN_VSX_HELPER_X2(xvrdpim, 0x12, 0x0F, 0, PPC2_VSX)
1171GEN_VSX_HELPER_X2(xvrdpip, 0x12, 0x0E, 0, PPC2_VSX)
1172GEN_VSX_HELPER_X2(xvrdpiz, 0x12, 0x0D, 0, PPC2_VSX)
1173
1174GEN_VSX_HELPER_X3(xvaddsp, 0x00, 0x08, 0, PPC2_VSX)
1175GEN_VSX_HELPER_X3(xvsubsp, 0x00, 0x09, 0, PPC2_VSX)
1176GEN_VSX_HELPER_X3(xvmulsp, 0x00, 0x0A, 0, PPC2_VSX)
1177GEN_VSX_HELPER_X3(xvdivsp, 0x00, 0x0B, 0, PPC2_VSX)
1178GEN_VSX_HELPER_X2(xvresp, 0x14, 0x09, 0, PPC2_VSX)
1179GEN_VSX_HELPER_X2(xvsqrtsp, 0x16, 0x08, 0, PPC2_VSX)
1180GEN_VSX_HELPER_X2(xvrsqrtesp, 0x14, 0x08, 0, PPC2_VSX)
1181GEN_VSX_HELPER_X2_AB(xvtdivsp, 0x14, 0x0B, 0, PPC2_VSX)
1182GEN_VSX_HELPER_X1(xvtsqrtsp, 0x14, 0x0A, 0, PPC2_VSX)
1183GEN_VSX_HELPER_X3(xvmaxsp, 0x00, 0x18, 0, PPC2_VSX)
1184GEN_VSX_HELPER_X3(xvminsp, 0x00, 0x19, 0, PPC2_VSX)
1185GEN_VSX_HELPER_X2(xvcvspdp, 0x12, 0x1C, 0, PPC2_VSX)
1186GEN_VSX_HELPER_X2(xvcvhpsp, 0x16, 0x1D, 0x18, PPC2_ISA300)
1187GEN_VSX_HELPER_X2(xvcvsphp, 0x16, 0x1D, 0x19, PPC2_ISA300)
1188GEN_VSX_HELPER_X2(xvcvspsxds, 0x10, 0x19, 0, PPC2_VSX)
1189GEN_VSX_HELPER_X2(xvcvspsxws, 0x10, 0x09, 0, PPC2_VSX)
1190GEN_VSX_HELPER_X2(xvcvspuxds, 0x10, 0x18, 0, PPC2_VSX)
1191GEN_VSX_HELPER_X2(xvcvspuxws, 0x10, 0x08, 0, PPC2_VSX)
1192GEN_VSX_HELPER_X2(xvcvsxdsp, 0x10, 0x1B, 0, PPC2_VSX)
1193GEN_VSX_HELPER_X2(xvcvuxdsp, 0x10, 0x1A, 0, PPC2_VSX)
1194GEN_VSX_HELPER_X2(xvcvsxwsp, 0x10, 0x0B, 0, PPC2_VSX)
1195GEN_VSX_HELPER_X2(xvcvuxwsp, 0x10, 0x0A, 0, PPC2_VSX)
1196GEN_VSX_HELPER_X2(xvrspi, 0x12, 0x08, 0, PPC2_VSX)
1197GEN_VSX_HELPER_X2(xvrspic, 0x16, 0x0A, 0, PPC2_VSX)
1198GEN_VSX_HELPER_X2(xvrspim, 0x12, 0x0B, 0, PPC2_VSX)
1199GEN_VSX_HELPER_X2(xvrspip, 0x12, 0x0A, 0, PPC2_VSX)
1200GEN_VSX_HELPER_X2(xvrspiz, 0x12, 0x09, 0, PPC2_VSX)
1201GEN_VSX_HELPER_2(xvtstdcsp, 0x14, 0x1A, 0, PPC2_VSX)
1202GEN_VSX_HELPER_2(xvtstdcdp, 0x14, 0x1E, 0, PPC2_VSX)
1203GEN_VSX_HELPER_X3(xxperm, 0x08, 0x03, 0, PPC2_ISA300)
1204GEN_VSX_HELPER_X3(xxpermr, 0x08, 0x07, 0, PPC2_ISA300)
1205
1206#define GEN_VSX_HELPER_VSX_MADD(name, op1, aop, mop, inval, type)             \
1207static void gen_##name(DisasContext *ctx)                                     \
1208{                                                                             \
1209    TCGv_ptr xt, xa, b, c;                                                    \
1210    if (unlikely(!ctx->vsx_enabled)) {                                        \
1211        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
1212        return;                                                               \
1213    }                                                                         \
1214    xt = gen_vsr_ptr(xT(ctx->opcode));                                        \
1215    xa = gen_vsr_ptr(xA(ctx->opcode));                                        \
1216    if (ctx->opcode & PPC_BIT32(25)) {                                        \
1217        /*                                                                    \
1218         * AxT + B                                                            \
1219         */                                                                   \
1220        b = gen_vsr_ptr(xT(ctx->opcode));                                     \
1221        c = gen_vsr_ptr(xB(ctx->opcode));                                     \
1222    } else {                                                                  \
1223        /*                                                                    \
1224         * AxB + T                                                            \
1225         */                                                                   \
1226        b = gen_vsr_ptr(xB(ctx->opcode));                                     \
1227        c = gen_vsr_ptr(xT(ctx->opcode));                                     \
1228    }                                                                         \
1229    gen_helper_##name(cpu_env, xt, xa, b, c);                                 \
1230    tcg_temp_free_ptr(xt);                                                    \
1231    tcg_temp_free_ptr(xa);                                                    \
1232    tcg_temp_free_ptr(b);                                                     \
1233    tcg_temp_free_ptr(c);                                                     \
1234}
1235
1236GEN_VSX_HELPER_VSX_MADD(xsmadddp, 0x04, 0x04, 0x05, 0, PPC2_VSX)
1237GEN_VSX_HELPER_VSX_MADD(xsmsubdp, 0x04, 0x06, 0x07, 0, PPC2_VSX)
1238GEN_VSX_HELPER_VSX_MADD(xsnmadddp, 0x04, 0x14, 0x15, 0, PPC2_VSX)
1239GEN_VSX_HELPER_VSX_MADD(xsnmsubdp, 0x04, 0x16, 0x17, 0, PPC2_VSX)
1240GEN_VSX_HELPER_VSX_MADD(xsmaddsp, 0x04, 0x00, 0x01, 0, PPC2_VSX207)
1241GEN_VSX_HELPER_VSX_MADD(xsmsubsp, 0x04, 0x02, 0x03, 0, PPC2_VSX207)
1242GEN_VSX_HELPER_VSX_MADD(xsnmaddsp, 0x04, 0x10, 0x11, 0, PPC2_VSX207)
1243GEN_VSX_HELPER_VSX_MADD(xsnmsubsp, 0x04, 0x12, 0x13, 0, PPC2_VSX207)
1244GEN_VSX_HELPER_VSX_MADD(xvmadddp, 0x04, 0x0C, 0x0D, 0, PPC2_VSX)
1245GEN_VSX_HELPER_VSX_MADD(xvmsubdp, 0x04, 0x0E, 0x0F, 0, PPC2_VSX)
1246GEN_VSX_HELPER_VSX_MADD(xvnmadddp, 0x04, 0x1C, 0x1D, 0, PPC2_VSX)
1247GEN_VSX_HELPER_VSX_MADD(xvnmsubdp, 0x04, 0x1E, 0x1F, 0, PPC2_VSX)
1248GEN_VSX_HELPER_VSX_MADD(xvmaddsp, 0x04, 0x08, 0x09, 0, PPC2_VSX)
1249GEN_VSX_HELPER_VSX_MADD(xvmsubsp, 0x04, 0x0A, 0x0B, 0, PPC2_VSX)
1250GEN_VSX_HELPER_VSX_MADD(xvnmaddsp, 0x04, 0x18, 0x19, 0, PPC2_VSX)
1251GEN_VSX_HELPER_VSX_MADD(xvnmsubsp, 0x04, 0x1A, 0x1B, 0, PPC2_VSX)
1252
1253static void gen_xxbrd(DisasContext *ctx)
1254{
1255    TCGv_i64 xth;
1256    TCGv_i64 xtl;
1257    TCGv_i64 xbh;
1258    TCGv_i64 xbl;
1259
1260    if (unlikely(!ctx->vsx_enabled)) {
1261        gen_exception(ctx, POWERPC_EXCP_VSXU);
1262        return;
1263    }
1264    xth = tcg_temp_new_i64();
1265    xtl = tcg_temp_new_i64();
1266    xbh = tcg_temp_new_i64();
1267    xbl = tcg_temp_new_i64();
1268    get_cpu_vsr(xbh, xB(ctx->opcode), true);
1269    get_cpu_vsr(xbl, xB(ctx->opcode), false);
1270
1271    tcg_gen_bswap64_i64(xth, xbh);
1272    tcg_gen_bswap64_i64(xtl, xbl);
1273    set_cpu_vsr(xT(ctx->opcode), xth, true);
1274    set_cpu_vsr(xT(ctx->opcode), xtl, false);
1275
1276    tcg_temp_free_i64(xth);
1277    tcg_temp_free_i64(xtl);
1278    tcg_temp_free_i64(xbh);
1279    tcg_temp_free_i64(xbl);
1280}
1281
1282static void gen_xxbrh(DisasContext *ctx)
1283{
1284    TCGv_i64 xth;
1285    TCGv_i64 xtl;
1286    TCGv_i64 xbh;
1287    TCGv_i64 xbl;
1288
1289    if (unlikely(!ctx->vsx_enabled)) {
1290        gen_exception(ctx, POWERPC_EXCP_VSXU);
1291        return;
1292    }
1293    xth = tcg_temp_new_i64();
1294    xtl = tcg_temp_new_i64();
1295    xbh = tcg_temp_new_i64();
1296    xbl = tcg_temp_new_i64();
1297    get_cpu_vsr(xbh, xB(ctx->opcode), true);
1298    get_cpu_vsr(xbl, xB(ctx->opcode), false);
1299
1300    gen_bswap16x8(xth, xtl, xbh, xbl);
1301    set_cpu_vsr(xT(ctx->opcode), xth, true);
1302    set_cpu_vsr(xT(ctx->opcode), xtl, false);
1303
1304    tcg_temp_free_i64(xth);
1305    tcg_temp_free_i64(xtl);
1306    tcg_temp_free_i64(xbh);
1307    tcg_temp_free_i64(xbl);
1308}
1309
1310static void gen_xxbrq(DisasContext *ctx)
1311{
1312    TCGv_i64 xth;
1313    TCGv_i64 xtl;
1314    TCGv_i64 xbh;
1315    TCGv_i64 xbl;
1316    TCGv_i64 t0;
1317
1318    if (unlikely(!ctx->vsx_enabled)) {
1319        gen_exception(ctx, POWERPC_EXCP_VSXU);
1320        return;
1321    }
1322    xth = tcg_temp_new_i64();
1323    xtl = tcg_temp_new_i64();
1324    xbh = tcg_temp_new_i64();
1325    xbl = tcg_temp_new_i64();
1326    get_cpu_vsr(xbh, xB(ctx->opcode), true);
1327    get_cpu_vsr(xbl, xB(ctx->opcode), false);
1328    t0 = tcg_temp_new_i64();
1329
1330    tcg_gen_bswap64_i64(t0, xbl);
1331    tcg_gen_bswap64_i64(xtl, xbh);
1332    set_cpu_vsr(xT(ctx->opcode), xtl, false);
1333    tcg_gen_mov_i64(xth, t0);
1334    set_cpu_vsr(xT(ctx->opcode), xth, true);
1335
1336    tcg_temp_free_i64(t0);
1337    tcg_temp_free_i64(xth);
1338    tcg_temp_free_i64(xtl);
1339    tcg_temp_free_i64(xbh);
1340    tcg_temp_free_i64(xbl);
1341}
1342
1343static void gen_xxbrw(DisasContext *ctx)
1344{
1345    TCGv_i64 xth;
1346    TCGv_i64 xtl;
1347    TCGv_i64 xbh;
1348    TCGv_i64 xbl;
1349
1350    if (unlikely(!ctx->vsx_enabled)) {
1351        gen_exception(ctx, POWERPC_EXCP_VSXU);
1352        return;
1353    }
1354    xth = tcg_temp_new_i64();
1355    xtl = tcg_temp_new_i64();
1356    xbh = tcg_temp_new_i64();
1357    xbl = tcg_temp_new_i64();
1358    get_cpu_vsr(xbh, xB(ctx->opcode), true);
1359    get_cpu_vsr(xbl, xB(ctx->opcode), false);
1360
1361    gen_bswap32x4(xth, xtl, xbh, xbl);
1362    set_cpu_vsr(xT(ctx->opcode), xth, true);
1363    set_cpu_vsr(xT(ctx->opcode), xtl, false);
1364
1365    tcg_temp_free_i64(xth);
1366    tcg_temp_free_i64(xtl);
1367    tcg_temp_free_i64(xbh);
1368    tcg_temp_free_i64(xbl);
1369}
1370
1371#define VSX_LOGICAL(name, vece, tcg_op)                              \
1372static void glue(gen_, name)(DisasContext *ctx)                      \
1373    {                                                                \
1374        if (unlikely(!ctx->vsx_enabled)) {                           \
1375            gen_exception(ctx, POWERPC_EXCP_VSXU);                   \
1376            return;                                                  \
1377        }                                                            \
1378        tcg_op(vece, vsr_full_offset(xT(ctx->opcode)),               \
1379               vsr_full_offset(xA(ctx->opcode)),                     \
1380               vsr_full_offset(xB(ctx->opcode)), 16, 16);            \
1381    }
1382
1383VSX_LOGICAL(xxland, MO_64, tcg_gen_gvec_and)
1384VSX_LOGICAL(xxlandc, MO_64, tcg_gen_gvec_andc)
1385VSX_LOGICAL(xxlor, MO_64, tcg_gen_gvec_or)
1386VSX_LOGICAL(xxlxor, MO_64, tcg_gen_gvec_xor)
1387VSX_LOGICAL(xxlnor, MO_64, tcg_gen_gvec_nor)
1388VSX_LOGICAL(xxleqv, MO_64, tcg_gen_gvec_eqv)
1389VSX_LOGICAL(xxlnand, MO_64, tcg_gen_gvec_nand)
1390VSX_LOGICAL(xxlorc, MO_64, tcg_gen_gvec_orc)
1391
1392#define VSX_XXMRG(name, high)                               \
1393static void glue(gen_, name)(DisasContext *ctx)             \
1394    {                                                       \
1395        TCGv_i64 a0, a1, b0, b1, tmp;                       \
1396        if (unlikely(!ctx->vsx_enabled)) {                  \
1397            gen_exception(ctx, POWERPC_EXCP_VSXU);          \
1398            return;                                         \
1399        }                                                   \
1400        a0 = tcg_temp_new_i64();                            \
1401        a1 = tcg_temp_new_i64();                            \
1402        b0 = tcg_temp_new_i64();                            \
1403        b1 = tcg_temp_new_i64();                            \
1404        tmp = tcg_temp_new_i64();                           \
1405        get_cpu_vsr(a0, xA(ctx->opcode), high);             \
1406        get_cpu_vsr(a1, xA(ctx->opcode), high);             \
1407        get_cpu_vsr(b0, xB(ctx->opcode), high);             \
1408        get_cpu_vsr(b1, xB(ctx->opcode), high);             \
1409        tcg_gen_shri_i64(a0, a0, 32);                       \
1410        tcg_gen_shri_i64(b0, b0, 32);                       \
1411        tcg_gen_deposit_i64(tmp, b0, a0, 32, 32);           \
1412        set_cpu_vsr(xT(ctx->opcode), tmp, true);            \
1413        tcg_gen_deposit_i64(tmp, b1, a1, 32, 32);           \
1414        set_cpu_vsr(xT(ctx->opcode), tmp, false);           \
1415        tcg_temp_free_i64(a0);                              \
1416        tcg_temp_free_i64(a1);                              \
1417        tcg_temp_free_i64(b0);                              \
1418        tcg_temp_free_i64(b1);                              \
1419        tcg_temp_free_i64(tmp);                             \
1420    }
1421
1422VSX_XXMRG(xxmrghw, 1)
1423VSX_XXMRG(xxmrglw, 0)
1424
1425static void gen_xxsel(DisasContext *ctx)
1426{
1427    int rt = xT(ctx->opcode);
1428    int ra = xA(ctx->opcode);
1429    int rb = xB(ctx->opcode);
1430    int rc = xC(ctx->opcode);
1431
1432    if (unlikely(!ctx->vsx_enabled)) {
1433        gen_exception(ctx, POWERPC_EXCP_VSXU);
1434        return;
1435    }
1436    tcg_gen_gvec_bitsel(MO_64, vsr_full_offset(rt), vsr_full_offset(rc),
1437                        vsr_full_offset(rb), vsr_full_offset(ra), 16, 16);
1438}
1439
1440static bool trans_XXSPLTW(DisasContext *ctx, arg_XX2 *a)
1441{
1442    int tofs, bofs;
1443
1444    REQUIRE_VSX(ctx);
1445
1446    tofs = vsr_full_offset(a->xt);
1447    bofs = vsr_full_offset(a->xb);
1448    bofs += a->uim << MO_32;
1449#ifndef HOST_WORDS_BIG_ENDIAN
1450    bofs ^= 8 | 4;
1451#endif
1452
1453    tcg_gen_gvec_dup_mem(MO_32, tofs, bofs, 16, 16);
1454    return true;
1455}
1456
1457#define pattern(x) (((x) & 0xff) * (~(uint64_t)0 / 0xff))
1458
1459static bool trans_XXSPLTIB(DisasContext *ctx, arg_X_imm8 *a)
1460{
1461    if (a->xt < 32) {
1462        REQUIRE_VSX(ctx);
1463    } else {
1464        REQUIRE_VECTOR(ctx);
1465    }
1466    tcg_gen_gvec_dup_imm(MO_8, vsr_full_offset(a->xt), 16, 16, a->imm);
1467    return true;
1468}
1469
1470static bool trans_XXSPLTIW(DisasContext *ctx, arg_8RR_D *a)
1471{
1472    REQUIRE_INSNS_FLAGS2(ctx, ISA310);
1473    REQUIRE_VSX(ctx);
1474
1475    tcg_gen_gvec_dup_imm(MO_32, vsr_full_offset(a->xt), 16, 16, a->si);
1476
1477    return true;
1478}
1479
1480static bool trans_XXSPLTIDP(DisasContext *ctx, arg_8RR_D *a)
1481{
1482    REQUIRE_INSNS_FLAGS2(ctx, ISA310);
1483    REQUIRE_VSX(ctx);
1484
1485    tcg_gen_gvec_dup_imm(MO_64, vsr_full_offset(a->xt), 16, 16,
1486                         helper_todouble(a->si));
1487    return true;
1488}
1489
1490static bool trans_XXSPLTI32DX(DisasContext *ctx, arg_8RR_D_IX *a)
1491{
1492    TCGv_i32 imm;
1493
1494    REQUIRE_INSNS_FLAGS2(ctx, ISA310);
1495    REQUIRE_VSX(ctx);
1496
1497    imm = tcg_constant_i32(a->si);
1498
1499    tcg_gen_st_i32(imm, cpu_env,
1500        offsetof(CPUPPCState, vsr[a->xt].VsrW(0 + a->ix)));
1501    tcg_gen_st_i32(imm, cpu_env,
1502        offsetof(CPUPPCState, vsr[a->xt].VsrW(2 + a->ix)));
1503
1504    return true;
1505}
1506
1507static bool trans_LXVKQ(DisasContext *ctx, arg_X_uim5 *a)
1508{
1509    static const uint64_t values[32] = {
1510        0, /* Unspecified */
1511        0x3FFF000000000000llu, /* QP +1.0 */
1512        0x4000000000000000llu, /* QP +2.0 */
1513        0x4000800000000000llu, /* QP +3.0 */
1514        0x4001000000000000llu, /* QP +4.0 */
1515        0x4001400000000000llu, /* QP +5.0 */
1516        0x4001800000000000llu, /* QP +6.0 */
1517        0x4001C00000000000llu, /* QP +7.0 */
1518        0x7FFF000000000000llu, /* QP +Inf */
1519        0x7FFF800000000000llu, /* QP dQNaN */
1520        0, /* Unspecified */
1521        0, /* Unspecified */
1522        0, /* Unspecified */
1523        0, /* Unspecified */
1524        0, /* Unspecified */
1525        0, /* Unspecified */
1526        0x8000000000000000llu, /* QP -0.0 */
1527        0xBFFF000000000000llu, /* QP -1.0 */
1528        0xC000000000000000llu, /* QP -2.0 */
1529        0xC000800000000000llu, /* QP -3.0 */
1530        0xC001000000000000llu, /* QP -4.0 */
1531        0xC001400000000000llu, /* QP -5.0 */
1532        0xC001800000000000llu, /* QP -6.0 */
1533        0xC001C00000000000llu, /* QP -7.0 */
1534        0xFFFF000000000000llu, /* QP -Inf */
1535    };
1536
1537    REQUIRE_INSNS_FLAGS2(ctx, ISA310);
1538    REQUIRE_VSX(ctx);
1539
1540    if (values[a->uim]) {
1541        set_cpu_vsr(a->xt, tcg_constant_i64(0x0), false);
1542        set_cpu_vsr(a->xt, tcg_constant_i64(values[a->uim]), true);
1543    } else {
1544        gen_invalid(ctx);
1545    }
1546
1547    return true;
1548}
1549
1550static void gen_xxsldwi(DisasContext *ctx)
1551{
1552    TCGv_i64 xth, xtl;
1553    if (unlikely(!ctx->vsx_enabled)) {
1554        gen_exception(ctx, POWERPC_EXCP_VSXU);
1555        return;
1556    }
1557    xth = tcg_temp_new_i64();
1558    xtl = tcg_temp_new_i64();
1559
1560    switch (SHW(ctx->opcode)) {
1561        case 0: {
1562            get_cpu_vsr(xth, xA(ctx->opcode), true);
1563            get_cpu_vsr(xtl, xA(ctx->opcode), false);
1564            break;
1565        }
1566        case 1: {
1567            TCGv_i64 t0 = tcg_temp_new_i64();
1568            get_cpu_vsr(xth, xA(ctx->opcode), true);
1569            tcg_gen_shli_i64(xth, xth, 32);
1570            get_cpu_vsr(t0, xA(ctx->opcode), false);
1571            tcg_gen_shri_i64(t0, t0, 32);
1572            tcg_gen_or_i64(xth, xth, t0);
1573            get_cpu_vsr(xtl, xA(ctx->opcode), false);
1574            tcg_gen_shli_i64(xtl, xtl, 32);
1575            get_cpu_vsr(t0, xB(ctx->opcode), true);
1576            tcg_gen_shri_i64(t0, t0, 32);
1577            tcg_gen_or_i64(xtl, xtl, t0);
1578            tcg_temp_free_i64(t0);
1579            break;
1580        }
1581        case 2: {
1582            get_cpu_vsr(xth, xA(ctx->opcode), false);
1583            get_cpu_vsr(xtl, xB(ctx->opcode), true);
1584            break;
1585        }
1586        case 3: {
1587            TCGv_i64 t0 = tcg_temp_new_i64();
1588            get_cpu_vsr(xth, xA(ctx->opcode), false);
1589            tcg_gen_shli_i64(xth, xth, 32);
1590            get_cpu_vsr(t0, xB(ctx->opcode), true);
1591            tcg_gen_shri_i64(t0, t0, 32);
1592            tcg_gen_or_i64(xth, xth, t0);
1593            get_cpu_vsr(xtl, xB(ctx->opcode), true);
1594            tcg_gen_shli_i64(xtl, xtl, 32);
1595            get_cpu_vsr(t0, xB(ctx->opcode), false);
1596            tcg_gen_shri_i64(t0, t0, 32);
1597            tcg_gen_or_i64(xtl, xtl, t0);
1598            tcg_temp_free_i64(t0);
1599            break;
1600        }
1601    }
1602
1603    set_cpu_vsr(xT(ctx->opcode), xth, true);
1604    set_cpu_vsr(xT(ctx->opcode), xtl, false);
1605
1606    tcg_temp_free_i64(xth);
1607    tcg_temp_free_i64(xtl);
1608}
1609
1610#define VSX_EXTRACT_INSERT(name)                                \
1611static void gen_##name(DisasContext *ctx)                       \
1612{                                                               \
1613    TCGv_ptr xt, xb;                                            \
1614    TCGv_i32 t0;                                                \
1615    TCGv_i64 t1;                                                \
1616    uint8_t uimm = UIMM4(ctx->opcode);                          \
1617                                                                \
1618    if (unlikely(!ctx->vsx_enabled)) {                          \
1619        gen_exception(ctx, POWERPC_EXCP_VSXU);                  \
1620        return;                                                 \
1621    }                                                           \
1622    xt = gen_vsr_ptr(xT(ctx->opcode));                          \
1623    xb = gen_vsr_ptr(xB(ctx->opcode));                          \
1624    t0 = tcg_temp_new_i32();                                    \
1625    t1 = tcg_temp_new_i64();                                    \
1626    /*                                                          \
1627     * uimm > 15 out of bound and for                           \
1628     * uimm > 12 handle as per hardware in helper               \
1629     */                                                         \
1630    if (uimm > 15) {                                            \
1631        tcg_gen_movi_i64(t1, 0);                                \
1632        set_cpu_vsr(xT(ctx->opcode), t1, true);                 \
1633        set_cpu_vsr(xT(ctx->opcode), t1, false);                \
1634        return;                                                 \
1635    }                                                           \
1636    tcg_gen_movi_i32(t0, uimm);                                 \
1637    gen_helper_##name(cpu_env, xt, xb, t0);                     \
1638    tcg_temp_free_ptr(xb);                                      \
1639    tcg_temp_free_ptr(xt);                                      \
1640    tcg_temp_free_i32(t0);                                      \
1641    tcg_temp_free_i64(t1);                                      \
1642}
1643
1644VSX_EXTRACT_INSERT(xxextractuw)
1645VSX_EXTRACT_INSERT(xxinsertw)
1646
1647#ifdef TARGET_PPC64
1648static void gen_xsxexpdp(DisasContext *ctx)
1649{
1650    TCGv rt = cpu_gpr[rD(ctx->opcode)];
1651    TCGv_i64 t0;
1652    if (unlikely(!ctx->vsx_enabled)) {
1653        gen_exception(ctx, POWERPC_EXCP_VSXU);
1654        return;
1655    }
1656    t0 = tcg_temp_new_i64();
1657    get_cpu_vsr(t0, xB(ctx->opcode), true);
1658    tcg_gen_extract_i64(rt, t0, 52, 11);
1659    tcg_temp_free_i64(t0);
1660}
1661
1662static void gen_xsxexpqp(DisasContext *ctx)
1663{
1664    TCGv_i64 xth;
1665    TCGv_i64 xtl;
1666    TCGv_i64 xbh;
1667
1668    if (unlikely(!ctx->vsx_enabled)) {
1669        gen_exception(ctx, POWERPC_EXCP_VSXU);
1670        return;
1671    }
1672    xth = tcg_temp_new_i64();
1673    xtl = tcg_temp_new_i64();
1674    xbh = tcg_temp_new_i64();
1675    get_cpu_vsr(xbh, rB(ctx->opcode) + 32, true);
1676
1677    tcg_gen_extract_i64(xth, xbh, 48, 15);
1678    set_cpu_vsr(rD(ctx->opcode) + 32, xth, true);
1679    tcg_gen_movi_i64(xtl, 0);
1680    set_cpu_vsr(rD(ctx->opcode) + 32, xtl, false);
1681
1682    tcg_temp_free_i64(xbh);
1683    tcg_temp_free_i64(xth);
1684    tcg_temp_free_i64(xtl);
1685}
1686
1687static void gen_xsiexpdp(DisasContext *ctx)
1688{
1689    TCGv_i64 xth;
1690    TCGv ra = cpu_gpr[rA(ctx->opcode)];
1691    TCGv rb = cpu_gpr[rB(ctx->opcode)];
1692    TCGv_i64 t0;
1693
1694    if (unlikely(!ctx->vsx_enabled)) {
1695        gen_exception(ctx, POWERPC_EXCP_VSXU);
1696        return;
1697    }
1698    t0 = tcg_temp_new_i64();
1699    xth = tcg_temp_new_i64();
1700    tcg_gen_andi_i64(xth, ra, 0x800FFFFFFFFFFFFF);
1701    tcg_gen_andi_i64(t0, rb, 0x7FF);
1702    tcg_gen_shli_i64(t0, t0, 52);
1703    tcg_gen_or_i64(xth, xth, t0);
1704    set_cpu_vsr(xT(ctx->opcode), xth, true);
1705    set_cpu_vsr(xT(ctx->opcode), tcg_constant_i64(0), false);
1706    tcg_temp_free_i64(t0);
1707    tcg_temp_free_i64(xth);
1708}
1709
1710static void gen_xsiexpqp(DisasContext *ctx)
1711{
1712    TCGv_i64 xth;
1713    TCGv_i64 xtl;
1714    TCGv_i64 xah;
1715    TCGv_i64 xal;
1716    TCGv_i64 xbh;
1717    TCGv_i64 t0;
1718
1719    if (unlikely(!ctx->vsx_enabled)) {
1720        gen_exception(ctx, POWERPC_EXCP_VSXU);
1721        return;
1722    }
1723    xth = tcg_temp_new_i64();
1724    xtl = tcg_temp_new_i64();
1725    xah = tcg_temp_new_i64();
1726    xal = tcg_temp_new_i64();
1727    get_cpu_vsr(xah, rA(ctx->opcode) + 32, true);
1728    get_cpu_vsr(xal, rA(ctx->opcode) + 32, false);
1729    xbh = tcg_temp_new_i64();
1730    get_cpu_vsr(xbh, rB(ctx->opcode) + 32, true);
1731    t0 = tcg_temp_new_i64();
1732
1733    tcg_gen_andi_i64(xth, xah, 0x8000FFFFFFFFFFFF);
1734    tcg_gen_andi_i64(t0, xbh, 0x7FFF);
1735    tcg_gen_shli_i64(t0, t0, 48);
1736    tcg_gen_or_i64(xth, xth, t0);
1737    set_cpu_vsr(rD(ctx->opcode) + 32, xth, true);
1738    tcg_gen_mov_i64(xtl, xal);
1739    set_cpu_vsr(rD(ctx->opcode) + 32, xtl, false);
1740
1741    tcg_temp_free_i64(t0);
1742    tcg_temp_free_i64(xth);
1743    tcg_temp_free_i64(xtl);
1744    tcg_temp_free_i64(xah);
1745    tcg_temp_free_i64(xal);
1746    tcg_temp_free_i64(xbh);
1747}
1748
1749static void gen_xsxsigdp(DisasContext *ctx)
1750{
1751    TCGv rt = cpu_gpr[rD(ctx->opcode)];
1752    TCGv_i64 t0, t1, zr, nan, exp;
1753
1754    if (unlikely(!ctx->vsx_enabled)) {
1755        gen_exception(ctx, POWERPC_EXCP_VSXU);
1756        return;
1757    }
1758    exp = tcg_temp_new_i64();
1759    t0 = tcg_temp_new_i64();
1760    t1 = tcg_temp_new_i64();
1761    zr = tcg_const_i64(0);
1762    nan = tcg_const_i64(2047);
1763
1764    get_cpu_vsr(t1, xB(ctx->opcode), true);
1765    tcg_gen_extract_i64(exp, t1, 52, 11);
1766    tcg_gen_movi_i64(t0, 0x0010000000000000);
1767    tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1768    tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1769    get_cpu_vsr(t1, xB(ctx->opcode), true);
1770    tcg_gen_deposit_i64(rt, t0, t1, 0, 52);
1771
1772    tcg_temp_free_i64(t0);
1773    tcg_temp_free_i64(t1);
1774    tcg_temp_free_i64(exp);
1775    tcg_temp_free_i64(zr);
1776    tcg_temp_free_i64(nan);
1777}
1778
1779static void gen_xsxsigqp(DisasContext *ctx)
1780{
1781    TCGv_i64 t0, zr, nan, exp;
1782    TCGv_i64 xth;
1783    TCGv_i64 xtl;
1784    TCGv_i64 xbh;
1785    TCGv_i64 xbl;
1786
1787    if (unlikely(!ctx->vsx_enabled)) {
1788        gen_exception(ctx, POWERPC_EXCP_VSXU);
1789        return;
1790    }
1791    xth = tcg_temp_new_i64();
1792    xtl = tcg_temp_new_i64();
1793    xbh = tcg_temp_new_i64();
1794    xbl = tcg_temp_new_i64();
1795    get_cpu_vsr(xbh, rB(ctx->opcode) + 32, true);
1796    get_cpu_vsr(xbl, rB(ctx->opcode) + 32, false);
1797    exp = tcg_temp_new_i64();
1798    t0 = tcg_temp_new_i64();
1799    zr = tcg_const_i64(0);
1800    nan = tcg_const_i64(32767);
1801
1802    tcg_gen_extract_i64(exp, xbh, 48, 15);
1803    tcg_gen_movi_i64(t0, 0x0001000000000000);
1804    tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1805    tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1806    tcg_gen_deposit_i64(xth, t0, xbh, 0, 48);
1807    set_cpu_vsr(rD(ctx->opcode) + 32, xth, true);
1808    tcg_gen_mov_i64(xtl, xbl);
1809    set_cpu_vsr(rD(ctx->opcode) + 32, xtl, false);
1810
1811    tcg_temp_free_i64(t0);
1812    tcg_temp_free_i64(exp);
1813    tcg_temp_free_i64(zr);
1814    tcg_temp_free_i64(nan);
1815    tcg_temp_free_i64(xth);
1816    tcg_temp_free_i64(xtl);
1817    tcg_temp_free_i64(xbh);
1818    tcg_temp_free_i64(xbl);
1819}
1820#endif
1821
1822static void gen_xviexpsp(DisasContext *ctx)
1823{
1824    TCGv_i64 xth;
1825    TCGv_i64 xtl;
1826    TCGv_i64 xah;
1827    TCGv_i64 xal;
1828    TCGv_i64 xbh;
1829    TCGv_i64 xbl;
1830    TCGv_i64 t0;
1831
1832    if (unlikely(!ctx->vsx_enabled)) {
1833        gen_exception(ctx, POWERPC_EXCP_VSXU);
1834        return;
1835    }
1836    xth = tcg_temp_new_i64();
1837    xtl = tcg_temp_new_i64();
1838    xah = tcg_temp_new_i64();
1839    xal = tcg_temp_new_i64();
1840    xbh = tcg_temp_new_i64();
1841    xbl = tcg_temp_new_i64();
1842    get_cpu_vsr(xah, xA(ctx->opcode), true);
1843    get_cpu_vsr(xal, xA(ctx->opcode), false);
1844    get_cpu_vsr(xbh, xB(ctx->opcode), true);
1845    get_cpu_vsr(xbl, xB(ctx->opcode), false);
1846    t0 = tcg_temp_new_i64();
1847
1848    tcg_gen_andi_i64(xth, xah, 0x807FFFFF807FFFFF);
1849    tcg_gen_andi_i64(t0, xbh, 0xFF000000FF);
1850    tcg_gen_shli_i64(t0, t0, 23);
1851    tcg_gen_or_i64(xth, xth, t0);
1852    set_cpu_vsr(xT(ctx->opcode), xth, true);
1853    tcg_gen_andi_i64(xtl, xal, 0x807FFFFF807FFFFF);
1854    tcg_gen_andi_i64(t0, xbl, 0xFF000000FF);
1855    tcg_gen_shli_i64(t0, t0, 23);
1856    tcg_gen_or_i64(xtl, xtl, t0);
1857    set_cpu_vsr(xT(ctx->opcode), xtl, false);
1858
1859    tcg_temp_free_i64(t0);
1860    tcg_temp_free_i64(xth);
1861    tcg_temp_free_i64(xtl);
1862    tcg_temp_free_i64(xah);
1863    tcg_temp_free_i64(xal);
1864    tcg_temp_free_i64(xbh);
1865    tcg_temp_free_i64(xbl);
1866}
1867
1868static void gen_xviexpdp(DisasContext *ctx)
1869{
1870    TCGv_i64 xth;
1871    TCGv_i64 xtl;
1872    TCGv_i64 xah;
1873    TCGv_i64 xal;
1874    TCGv_i64 xbh;
1875    TCGv_i64 xbl;
1876
1877    if (unlikely(!ctx->vsx_enabled)) {
1878        gen_exception(ctx, POWERPC_EXCP_VSXU);
1879        return;
1880    }
1881    xth = tcg_temp_new_i64();
1882    xtl = tcg_temp_new_i64();
1883    xah = tcg_temp_new_i64();
1884    xal = tcg_temp_new_i64();
1885    xbh = tcg_temp_new_i64();
1886    xbl = tcg_temp_new_i64();
1887    get_cpu_vsr(xah, xA(ctx->opcode), true);
1888    get_cpu_vsr(xal, xA(ctx->opcode), false);
1889    get_cpu_vsr(xbh, xB(ctx->opcode), true);
1890    get_cpu_vsr(xbl, xB(ctx->opcode), false);
1891
1892    tcg_gen_deposit_i64(xth, xah, xbh, 52, 11);
1893    set_cpu_vsr(xT(ctx->opcode), xth, true);
1894
1895    tcg_gen_deposit_i64(xtl, xal, xbl, 52, 11);
1896    set_cpu_vsr(xT(ctx->opcode), xtl, false);
1897
1898    tcg_temp_free_i64(xth);
1899    tcg_temp_free_i64(xtl);
1900    tcg_temp_free_i64(xah);
1901    tcg_temp_free_i64(xal);
1902    tcg_temp_free_i64(xbh);
1903    tcg_temp_free_i64(xbl);
1904}
1905
1906static void gen_xvxexpsp(DisasContext *ctx)
1907{
1908    TCGv_i64 xth;
1909    TCGv_i64 xtl;
1910    TCGv_i64 xbh;
1911    TCGv_i64 xbl;
1912
1913    if (unlikely(!ctx->vsx_enabled)) {
1914        gen_exception(ctx, POWERPC_EXCP_VSXU);
1915        return;
1916    }
1917    xth = tcg_temp_new_i64();
1918    xtl = tcg_temp_new_i64();
1919    xbh = tcg_temp_new_i64();
1920    xbl = tcg_temp_new_i64();
1921    get_cpu_vsr(xbh, xB(ctx->opcode), true);
1922    get_cpu_vsr(xbl, xB(ctx->opcode), false);
1923
1924    tcg_gen_shri_i64(xth, xbh, 23);
1925    tcg_gen_andi_i64(xth, xth, 0xFF000000FF);
1926    set_cpu_vsr(xT(ctx->opcode), xth, true);
1927    tcg_gen_shri_i64(xtl, xbl, 23);
1928    tcg_gen_andi_i64(xtl, xtl, 0xFF000000FF);
1929    set_cpu_vsr(xT(ctx->opcode), xtl, false);
1930
1931    tcg_temp_free_i64(xth);
1932    tcg_temp_free_i64(xtl);
1933    tcg_temp_free_i64(xbh);
1934    tcg_temp_free_i64(xbl);
1935}
1936
1937static void gen_xvxexpdp(DisasContext *ctx)
1938{
1939    TCGv_i64 xth;
1940    TCGv_i64 xtl;
1941    TCGv_i64 xbh;
1942    TCGv_i64 xbl;
1943
1944    if (unlikely(!ctx->vsx_enabled)) {
1945        gen_exception(ctx, POWERPC_EXCP_VSXU);
1946        return;
1947    }
1948    xth = tcg_temp_new_i64();
1949    xtl = tcg_temp_new_i64();
1950    xbh = tcg_temp_new_i64();
1951    xbl = tcg_temp_new_i64();
1952    get_cpu_vsr(xbh, xB(ctx->opcode), true);
1953    get_cpu_vsr(xbl, xB(ctx->opcode), false);
1954
1955    tcg_gen_extract_i64(xth, xbh, 52, 11);
1956    set_cpu_vsr(xT(ctx->opcode), xth, true);
1957    tcg_gen_extract_i64(xtl, xbl, 52, 11);
1958    set_cpu_vsr(xT(ctx->opcode), xtl, false);
1959
1960    tcg_temp_free_i64(xth);
1961    tcg_temp_free_i64(xtl);
1962    tcg_temp_free_i64(xbh);
1963    tcg_temp_free_i64(xbl);
1964}
1965
1966GEN_VSX_HELPER_X2(xvxsigsp, 0x00, 0x04, 0, PPC2_ISA300)
1967
1968static void gen_xvxsigdp(DisasContext *ctx)
1969{
1970    TCGv_i64 xth;
1971    TCGv_i64 xtl;
1972    TCGv_i64 xbh;
1973    TCGv_i64 xbl;
1974    TCGv_i64 t0, zr, nan, exp;
1975
1976    if (unlikely(!ctx->vsx_enabled)) {
1977        gen_exception(ctx, POWERPC_EXCP_VSXU);
1978        return;
1979    }
1980    xth = tcg_temp_new_i64();
1981    xtl = tcg_temp_new_i64();
1982    xbh = tcg_temp_new_i64();
1983    xbl = tcg_temp_new_i64();
1984    get_cpu_vsr(xbh, xB(ctx->opcode), true);
1985    get_cpu_vsr(xbl, xB(ctx->opcode), false);
1986    exp = tcg_temp_new_i64();
1987    t0 = tcg_temp_new_i64();
1988    zr = tcg_const_i64(0);
1989    nan = tcg_const_i64(2047);
1990
1991    tcg_gen_extract_i64(exp, xbh, 52, 11);
1992    tcg_gen_movi_i64(t0, 0x0010000000000000);
1993    tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1994    tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1995    tcg_gen_deposit_i64(xth, t0, xbh, 0, 52);
1996    set_cpu_vsr(xT(ctx->opcode), xth, true);
1997
1998    tcg_gen_extract_i64(exp, xbl, 52, 11);
1999    tcg_gen_movi_i64(t0, 0x0010000000000000);
2000    tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
2001    tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
2002    tcg_gen_deposit_i64(xtl, t0, xbl, 0, 52);
2003    set_cpu_vsr(xT(ctx->opcode), xtl, false);
2004
2005    tcg_temp_free_i64(t0);
2006    tcg_temp_free_i64(exp);
2007    tcg_temp_free_i64(zr);
2008    tcg_temp_free_i64(nan);
2009    tcg_temp_free_i64(xth);
2010    tcg_temp_free_i64(xtl);
2011    tcg_temp_free_i64(xbh);
2012    tcg_temp_free_i64(xbl);
2013}
2014
2015static bool do_lstxv(DisasContext *ctx, int ra, TCGv displ,
2016                     int rt, bool store, bool paired)
2017{
2018    TCGv ea;
2019    TCGv_i64 xt;
2020    MemOp mop;
2021    int rt1, rt2;
2022
2023    xt = tcg_temp_new_i64();
2024
2025    mop = DEF_MEMOP(MO_UQ);
2026
2027    gen_set_access_type(ctx, ACCESS_INT);
2028    ea = do_ea_calc(ctx, ra, displ);
2029
2030    if (paired && ctx->le_mode) {
2031        rt1 = rt + 1;
2032        rt2 = rt;
2033    } else {
2034        rt1 = rt;
2035        rt2 = rt + 1;
2036    }
2037
2038    if (store) {
2039        get_cpu_vsr(xt, rt1, !ctx->le_mode);
2040        tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop);
2041        gen_addr_add(ctx, ea, ea, 8);
2042        get_cpu_vsr(xt, rt1, ctx->le_mode);
2043        tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop);
2044        if (paired) {
2045            gen_addr_add(ctx, ea, ea, 8);
2046            get_cpu_vsr(xt, rt2, !ctx->le_mode);
2047            tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop);
2048            gen_addr_add(ctx, ea, ea, 8);
2049            get_cpu_vsr(xt, rt2, ctx->le_mode);
2050            tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop);
2051        }
2052    } else {
2053        tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop);
2054        set_cpu_vsr(rt1, xt, !ctx->le_mode);
2055        gen_addr_add(ctx, ea, ea, 8);
2056        tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop);
2057        set_cpu_vsr(rt1, xt, ctx->le_mode);
2058        if (paired) {
2059            gen_addr_add(ctx, ea, ea, 8);
2060            tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop);
2061            set_cpu_vsr(rt2, xt, !ctx->le_mode);
2062            gen_addr_add(ctx, ea, ea, 8);
2063            tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop);
2064            set_cpu_vsr(rt2, xt, ctx->le_mode);
2065        }
2066    }
2067
2068    tcg_temp_free(ea);
2069    tcg_temp_free_i64(xt);
2070    return true;
2071}
2072
2073static bool do_lstxv_D(DisasContext *ctx, arg_D *a, bool store, bool paired)
2074{
2075    if (paired) {
2076        REQUIRE_INSNS_FLAGS2(ctx, ISA310);
2077    } else {
2078        REQUIRE_INSNS_FLAGS2(ctx, ISA300);
2079    }
2080
2081    if (paired || a->rt >= 32) {
2082        REQUIRE_VSX(ctx);
2083    } else {
2084        REQUIRE_VECTOR(ctx);
2085    }
2086
2087    return do_lstxv(ctx, a->ra, tcg_constant_tl(a->si), a->rt, store, paired);
2088}
2089
2090static bool do_lstxv_PLS_D(DisasContext *ctx, arg_PLS_D *a,
2091                           bool store, bool paired)
2092{
2093    arg_D d;
2094    REQUIRE_INSNS_FLAGS2(ctx, ISA310);
2095    REQUIRE_VSX(ctx);
2096
2097    if (!resolve_PLS_D(ctx, &d, a)) {
2098        return true;
2099    }
2100
2101    return do_lstxv(ctx, d.ra, tcg_constant_tl(d.si), d.rt, store, paired);
2102}
2103
2104static bool do_lstxv_X(DisasContext *ctx, arg_X *a, bool store, bool paired)
2105{
2106    if (paired) {
2107        REQUIRE_INSNS_FLAGS2(ctx, ISA310);
2108    } else {
2109        REQUIRE_INSNS_FLAGS2(ctx, ISA300);
2110    }
2111
2112    if (paired || a->rt >= 32) {
2113        REQUIRE_VSX(ctx);
2114    } else {
2115        REQUIRE_VECTOR(ctx);
2116    }
2117
2118    return do_lstxv(ctx, a->ra, cpu_gpr[a->rb], a->rt, store, paired);
2119}
2120
2121TRANS(STXV, do_lstxv_D, true, false)
2122TRANS(LXV, do_lstxv_D, false, false)
2123TRANS(STXVP, do_lstxv_D, true, true)
2124TRANS(LXVP, do_lstxv_D, false, true)
2125TRANS(STXVX, do_lstxv_X, true, false)
2126TRANS(LXVX, do_lstxv_X, false, false)
2127TRANS(STXVPX, do_lstxv_X, true, true)
2128TRANS(LXVPX, do_lstxv_X, false, true)
2129TRANS64(PSTXV, do_lstxv_PLS_D, true, false)
2130TRANS64(PLXV, do_lstxv_PLS_D, false, false)
2131TRANS64(PSTXVP, do_lstxv_PLS_D, true, true)
2132TRANS64(PLXVP, do_lstxv_PLS_D, false, true)
2133
2134static void gen_xxblendv_vec(unsigned vece, TCGv_vec t, TCGv_vec a, TCGv_vec b,
2135                             TCGv_vec c)
2136{
2137    TCGv_vec tmp = tcg_temp_new_vec_matching(c);
2138    tcg_gen_sari_vec(vece, tmp, c, (8 << vece) - 1);
2139    tcg_gen_bitsel_vec(vece, t, tmp, b, a);
2140    tcg_temp_free_vec(tmp);
2141}
2142
2143static bool do_xxblendv(DisasContext *ctx, arg_XX4 *a, unsigned vece)
2144{
2145    static const TCGOpcode vecop_list[] = {
2146        INDEX_op_sari_vec, 0
2147    };
2148    static const GVecGen4 ops[4] = {
2149        {
2150            .fniv = gen_xxblendv_vec,
2151            .fno = gen_helper_XXBLENDVB,
2152            .opt_opc = vecop_list,
2153            .vece = MO_8
2154        },
2155        {
2156            .fniv = gen_xxblendv_vec,
2157            .fno = gen_helper_XXBLENDVH,
2158            .opt_opc = vecop_list,
2159            .vece = MO_16
2160        },
2161        {
2162            .fniv = gen_xxblendv_vec,
2163            .fno = gen_helper_XXBLENDVW,
2164            .opt_opc = vecop_list,
2165            .vece = MO_32
2166        },
2167        {
2168            .fniv = gen_xxblendv_vec,
2169            .fno = gen_helper_XXBLENDVD,
2170            .opt_opc = vecop_list,
2171            .vece = MO_64
2172        }
2173    };
2174
2175    REQUIRE_VSX(ctx);
2176
2177    tcg_gen_gvec_4(vsr_full_offset(a->xt), vsr_full_offset(a->xa),
2178                   vsr_full_offset(a->xb), vsr_full_offset(a->xc),
2179                   16, 16, &ops[vece]);
2180
2181    return true;
2182}
2183
2184TRANS(XXBLENDVB, do_xxblendv, MO_8)
2185TRANS(XXBLENDVH, do_xxblendv, MO_16)
2186TRANS(XXBLENDVW, do_xxblendv, MO_32)
2187TRANS(XXBLENDVD, do_xxblendv, MO_64)
2188
2189static bool do_xsmaxmincjdp(DisasContext *ctx, arg_XX3 *a,
2190                            void (*helper)(TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr))
2191{
2192    TCGv_ptr xt, xa, xb;
2193
2194    REQUIRE_INSNS_FLAGS2(ctx, ISA300);
2195    REQUIRE_VSX(ctx);
2196
2197    xt = gen_vsr_ptr(a->xt);
2198    xa = gen_vsr_ptr(a->xa);
2199    xb = gen_vsr_ptr(a->xb);
2200
2201    helper(cpu_env, xt, xa, xb);
2202
2203    tcg_temp_free_ptr(xt);
2204    tcg_temp_free_ptr(xa);
2205    tcg_temp_free_ptr(xb);
2206
2207    return true;
2208}
2209
2210TRANS(XSMAXCDP, do_xsmaxmincjdp, gen_helper_xsmaxcdp)
2211TRANS(XSMINCDP, do_xsmaxmincjdp, gen_helper_xsmincdp)
2212TRANS(XSMAXJDP, do_xsmaxmincjdp, gen_helper_xsmaxjdp)
2213TRANS(XSMINJDP, do_xsmaxmincjdp, gen_helper_xsminjdp)
2214
2215#undef GEN_XX2FORM
2216#undef GEN_XX3FORM
2217#undef GEN_XX2IFORM
2218#undef GEN_XX3_RC_FORM
2219#undef GEN_XX3FORM_DM
2220#undef VSX_LOGICAL
2221