1/***                           VSX extension                               ***/
2
3static inline void get_cpu_vsr(TCGv_i64 dst, int n, bool high)
4{
5    tcg_gen_ld_i64(dst, cpu_env, vsr64_offset(n, high));
6}
7
8static inline void set_cpu_vsr(int n, TCGv_i64 src, bool high)
9{
10    tcg_gen_st_i64(src, cpu_env, vsr64_offset(n, high));
11}
12
13static inline TCGv_ptr gen_vsr_ptr(int reg)
14{
15    TCGv_ptr r = tcg_temp_new_ptr();
16    tcg_gen_addi_ptr(r, cpu_env, vsr_full_offset(reg));
17    return r;
18}
19
20static inline TCGv_ptr gen_acc_ptr(int reg)
21{
22    TCGv_ptr r = tcg_temp_new_ptr();
23    tcg_gen_addi_ptr(r, cpu_env, acc_full_offset(reg));
24    return r;
25}
26
27#define VSX_LOAD_SCALAR(name, operation)                      \
28static void gen_##name(DisasContext *ctx)                     \
29{                                                             \
30    TCGv EA;                                                  \
31    TCGv_i64 t0;                                              \
32    if (unlikely(!ctx->vsx_enabled)) {                        \
33        gen_exception(ctx, POWERPC_EXCP_VSXU);                \
34        return;                                               \
35    }                                                         \
36    t0 = tcg_temp_new_i64();                                  \
37    gen_set_access_type(ctx, ACCESS_INT);                     \
38    EA = tcg_temp_new();                                      \
39    gen_addr_reg_index(ctx, EA);                              \
40    gen_qemu_##operation(ctx, t0, EA);                        \
41    set_cpu_vsr(xT(ctx->opcode), t0, true);                   \
42    /* NOTE: cpu_vsrl is undefined */                         \
43    tcg_temp_free(EA);                                        \
44    tcg_temp_free_i64(t0);                                    \
45}
46
47VSX_LOAD_SCALAR(lxsdx, ld64_i64)
48VSX_LOAD_SCALAR(lxsiwax, ld32s_i64)
49VSX_LOAD_SCALAR(lxsibzx, ld8u_i64)
50VSX_LOAD_SCALAR(lxsihzx, ld16u_i64)
51VSX_LOAD_SCALAR(lxsiwzx, ld32u_i64)
52VSX_LOAD_SCALAR(lxsspx, ld32fs)
53
54static void gen_lxvd2x(DisasContext *ctx)
55{
56    TCGv EA;
57    TCGv_i64 t0;
58    if (unlikely(!ctx->vsx_enabled)) {
59        gen_exception(ctx, POWERPC_EXCP_VSXU);
60        return;
61    }
62    t0 = tcg_temp_new_i64();
63    gen_set_access_type(ctx, ACCESS_INT);
64    EA = tcg_temp_new();
65    gen_addr_reg_index(ctx, EA);
66    gen_qemu_ld64_i64(ctx, t0, EA);
67    set_cpu_vsr(xT(ctx->opcode), t0, true);
68    tcg_gen_addi_tl(EA, EA, 8);
69    gen_qemu_ld64_i64(ctx, t0, EA);
70    set_cpu_vsr(xT(ctx->opcode), t0, false);
71    tcg_temp_free(EA);
72    tcg_temp_free_i64(t0);
73}
74
75static void gen_lxvw4x(DisasContext *ctx)
76{
77    TCGv EA;
78    TCGv_i64 xth;
79    TCGv_i64 xtl;
80    if (unlikely(!ctx->vsx_enabled)) {
81        gen_exception(ctx, POWERPC_EXCP_VSXU);
82        return;
83    }
84    xth = tcg_temp_new_i64();
85    xtl = tcg_temp_new_i64();
86
87    gen_set_access_type(ctx, ACCESS_INT);
88    EA = tcg_temp_new();
89
90    gen_addr_reg_index(ctx, EA);
91    if (ctx->le_mode) {
92        TCGv_i64 t0 = tcg_temp_new_i64();
93        TCGv_i64 t1 = tcg_temp_new_i64();
94
95        tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEUQ);
96        tcg_gen_shri_i64(t1, t0, 32);
97        tcg_gen_deposit_i64(xth, t1, t0, 32, 32);
98        tcg_gen_addi_tl(EA, EA, 8);
99        tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEUQ);
100        tcg_gen_shri_i64(t1, t0, 32);
101        tcg_gen_deposit_i64(xtl, t1, t0, 32, 32);
102        tcg_temp_free_i64(t0);
103        tcg_temp_free_i64(t1);
104    } else {
105        tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEUQ);
106        tcg_gen_addi_tl(EA, EA, 8);
107        tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEUQ);
108    }
109    set_cpu_vsr(xT(ctx->opcode), xth, true);
110    set_cpu_vsr(xT(ctx->opcode), xtl, false);
111    tcg_temp_free(EA);
112    tcg_temp_free_i64(xth);
113    tcg_temp_free_i64(xtl);
114}
115
116static void gen_lxvwsx(DisasContext *ctx)
117{
118    TCGv EA;
119    TCGv_i32 data;
120
121    if (xT(ctx->opcode) < 32) {
122        if (unlikely(!ctx->vsx_enabled)) {
123            gen_exception(ctx, POWERPC_EXCP_VSXU);
124            return;
125        }
126    } else {
127        if (unlikely(!ctx->altivec_enabled)) {
128            gen_exception(ctx, POWERPC_EXCP_VPU);
129            return;
130        }
131    }
132
133    gen_set_access_type(ctx, ACCESS_INT);
134    EA = tcg_temp_new();
135
136    gen_addr_reg_index(ctx, EA);
137
138    data = tcg_temp_new_i32();
139    tcg_gen_qemu_ld_i32(data, EA, ctx->mem_idx, DEF_MEMOP(MO_UL));
140    tcg_gen_gvec_dup_i32(MO_UL, vsr_full_offset(xT(ctx->opcode)), 16, 16, data);
141
142    tcg_temp_free(EA);
143    tcg_temp_free_i32(data);
144}
145
146static void gen_lxvdsx(DisasContext *ctx)
147{
148    TCGv EA;
149    TCGv_i64 data;
150
151    if (unlikely(!ctx->vsx_enabled)) {
152        gen_exception(ctx, POWERPC_EXCP_VSXU);
153        return;
154    }
155
156    gen_set_access_type(ctx, ACCESS_INT);
157    EA = tcg_temp_new();
158
159    gen_addr_reg_index(ctx, EA);
160
161    data = tcg_temp_new_i64();
162    tcg_gen_qemu_ld_i64(data, EA, ctx->mem_idx, DEF_MEMOP(MO_UQ));
163    tcg_gen_gvec_dup_i64(MO_UQ, vsr_full_offset(xT(ctx->opcode)), 16, 16, data);
164
165    tcg_temp_free(EA);
166    tcg_temp_free_i64(data);
167}
168
169static void gen_bswap16x8(TCGv_i64 outh, TCGv_i64 outl,
170                          TCGv_i64 inh, TCGv_i64 inl)
171{
172    TCGv_i64 mask = tcg_const_i64(0x00FF00FF00FF00FF);
173    TCGv_i64 t0 = tcg_temp_new_i64();
174    TCGv_i64 t1 = tcg_temp_new_i64();
175
176    /* outh = ((inh & mask) << 8) | ((inh >> 8) & mask) */
177    tcg_gen_and_i64(t0, inh, mask);
178    tcg_gen_shli_i64(t0, t0, 8);
179    tcg_gen_shri_i64(t1, inh, 8);
180    tcg_gen_and_i64(t1, t1, mask);
181    tcg_gen_or_i64(outh, t0, t1);
182
183    /* outl = ((inl & mask) << 8) | ((inl >> 8) & mask) */
184    tcg_gen_and_i64(t0, inl, mask);
185    tcg_gen_shli_i64(t0, t0, 8);
186    tcg_gen_shri_i64(t1, inl, 8);
187    tcg_gen_and_i64(t1, t1, mask);
188    tcg_gen_or_i64(outl, t0, t1);
189
190    tcg_temp_free_i64(t0);
191    tcg_temp_free_i64(t1);
192    tcg_temp_free_i64(mask);
193}
194
195static void gen_bswap32x4(TCGv_i64 outh, TCGv_i64 outl,
196                          TCGv_i64 inh, TCGv_i64 inl)
197{
198    TCGv_i64 hi = tcg_temp_new_i64();
199    TCGv_i64 lo = tcg_temp_new_i64();
200
201    tcg_gen_bswap64_i64(hi, inh);
202    tcg_gen_bswap64_i64(lo, inl);
203    tcg_gen_shri_i64(outh, hi, 32);
204    tcg_gen_deposit_i64(outh, outh, hi, 32, 32);
205    tcg_gen_shri_i64(outl, lo, 32);
206    tcg_gen_deposit_i64(outl, outl, lo, 32, 32);
207
208    tcg_temp_free_i64(hi);
209    tcg_temp_free_i64(lo);
210}
211static void gen_lxvh8x(DisasContext *ctx)
212{
213    TCGv EA;
214    TCGv_i64 xth;
215    TCGv_i64 xtl;
216
217    if (unlikely(!ctx->vsx_enabled)) {
218        gen_exception(ctx, POWERPC_EXCP_VSXU);
219        return;
220    }
221    xth = tcg_temp_new_i64();
222    xtl = tcg_temp_new_i64();
223    gen_set_access_type(ctx, ACCESS_INT);
224
225    EA = tcg_temp_new();
226    gen_addr_reg_index(ctx, EA);
227    tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEUQ);
228    tcg_gen_addi_tl(EA, EA, 8);
229    tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEUQ);
230    if (ctx->le_mode) {
231        gen_bswap16x8(xth, xtl, xth, xtl);
232    }
233    set_cpu_vsr(xT(ctx->opcode), xth, true);
234    set_cpu_vsr(xT(ctx->opcode), xtl, false);
235    tcg_temp_free(EA);
236    tcg_temp_free_i64(xth);
237    tcg_temp_free_i64(xtl);
238}
239
240static void gen_lxvb16x(DisasContext *ctx)
241{
242    TCGv EA;
243    TCGv_i64 xth;
244    TCGv_i64 xtl;
245
246    if (unlikely(!ctx->vsx_enabled)) {
247        gen_exception(ctx, POWERPC_EXCP_VSXU);
248        return;
249    }
250    xth = tcg_temp_new_i64();
251    xtl = tcg_temp_new_i64();
252    gen_set_access_type(ctx, ACCESS_INT);
253    EA = tcg_temp_new();
254    gen_addr_reg_index(ctx, EA);
255    tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEUQ);
256    tcg_gen_addi_tl(EA, EA, 8);
257    tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEUQ);
258    set_cpu_vsr(xT(ctx->opcode), xth, true);
259    set_cpu_vsr(xT(ctx->opcode), xtl, false);
260    tcg_temp_free(EA);
261    tcg_temp_free_i64(xth);
262    tcg_temp_free_i64(xtl);
263}
264
265#ifdef TARGET_PPC64
266#define VSX_VECTOR_LOAD_STORE_LENGTH(name)                         \
267static void gen_##name(DisasContext *ctx)                          \
268{                                                                  \
269    TCGv EA;                                                       \
270    TCGv_ptr xt;                                                   \
271                                                                   \
272    if (xT(ctx->opcode) < 32) {                                    \
273        if (unlikely(!ctx->vsx_enabled)) {                         \
274            gen_exception(ctx, POWERPC_EXCP_VSXU);                 \
275            return;                                                \
276        }                                                          \
277    } else {                                                       \
278        if (unlikely(!ctx->altivec_enabled)) {                     \
279            gen_exception(ctx, POWERPC_EXCP_VPU);                  \
280            return;                                                \
281        }                                                          \
282    }                                                              \
283    EA = tcg_temp_new();                                           \
284    xt = gen_vsr_ptr(xT(ctx->opcode));                             \
285    gen_set_access_type(ctx, ACCESS_INT);                          \
286    gen_addr_register(ctx, EA);                                    \
287    gen_helper_##name(cpu_env, EA, xt, cpu_gpr[rB(ctx->opcode)]);  \
288    tcg_temp_free(EA);                                             \
289    tcg_temp_free_ptr(xt);                                         \
290}
291
292VSX_VECTOR_LOAD_STORE_LENGTH(lxvl)
293VSX_VECTOR_LOAD_STORE_LENGTH(lxvll)
294VSX_VECTOR_LOAD_STORE_LENGTH(stxvl)
295VSX_VECTOR_LOAD_STORE_LENGTH(stxvll)
296#endif
297
298#define VSX_STORE_SCALAR(name, operation)                     \
299static void gen_##name(DisasContext *ctx)                     \
300{                                                             \
301    TCGv EA;                                                  \
302    TCGv_i64 t0;                                              \
303    if (unlikely(!ctx->vsx_enabled)) {                        \
304        gen_exception(ctx, POWERPC_EXCP_VSXU);                \
305        return;                                               \
306    }                                                         \
307    t0 = tcg_temp_new_i64();                                  \
308    gen_set_access_type(ctx, ACCESS_INT);                     \
309    EA = tcg_temp_new();                                      \
310    gen_addr_reg_index(ctx, EA);                              \
311    get_cpu_vsr(t0, xS(ctx->opcode), true);                   \
312    gen_qemu_##operation(ctx, t0, EA);                        \
313    tcg_temp_free(EA);                                        \
314    tcg_temp_free_i64(t0);                                    \
315}
316
317VSX_STORE_SCALAR(stxsdx, st64_i64)
318
319VSX_STORE_SCALAR(stxsibx, st8_i64)
320VSX_STORE_SCALAR(stxsihx, st16_i64)
321VSX_STORE_SCALAR(stxsiwx, st32_i64)
322VSX_STORE_SCALAR(stxsspx, st32fs)
323
324static void gen_stxvd2x(DisasContext *ctx)
325{
326    TCGv EA;
327    TCGv_i64 t0;
328    if (unlikely(!ctx->vsx_enabled)) {
329        gen_exception(ctx, POWERPC_EXCP_VSXU);
330        return;
331    }
332    t0 = tcg_temp_new_i64();
333    gen_set_access_type(ctx, ACCESS_INT);
334    EA = tcg_temp_new();
335    gen_addr_reg_index(ctx, EA);
336    get_cpu_vsr(t0, xS(ctx->opcode), true);
337    gen_qemu_st64_i64(ctx, t0, EA);
338    tcg_gen_addi_tl(EA, EA, 8);
339    get_cpu_vsr(t0, xS(ctx->opcode), false);
340    gen_qemu_st64_i64(ctx, t0, EA);
341    tcg_temp_free(EA);
342    tcg_temp_free_i64(t0);
343}
344
345static void gen_stxvw4x(DisasContext *ctx)
346{
347    TCGv EA;
348    TCGv_i64 xsh;
349    TCGv_i64 xsl;
350
351    if (unlikely(!ctx->vsx_enabled)) {
352        gen_exception(ctx, POWERPC_EXCP_VSXU);
353        return;
354    }
355    xsh = tcg_temp_new_i64();
356    xsl = tcg_temp_new_i64();
357    get_cpu_vsr(xsh, xS(ctx->opcode), true);
358    get_cpu_vsr(xsl, xS(ctx->opcode), false);
359    gen_set_access_type(ctx, ACCESS_INT);
360    EA = tcg_temp_new();
361    gen_addr_reg_index(ctx, EA);
362    if (ctx->le_mode) {
363        TCGv_i64 t0 = tcg_temp_new_i64();
364        TCGv_i64 t1 = tcg_temp_new_i64();
365
366        tcg_gen_shri_i64(t0, xsh, 32);
367        tcg_gen_deposit_i64(t1, t0, xsh, 32, 32);
368        tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEUQ);
369        tcg_gen_addi_tl(EA, EA, 8);
370        tcg_gen_shri_i64(t0, xsl, 32);
371        tcg_gen_deposit_i64(t1, t0, xsl, 32, 32);
372        tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEUQ);
373        tcg_temp_free_i64(t0);
374        tcg_temp_free_i64(t1);
375    } else {
376        tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEUQ);
377        tcg_gen_addi_tl(EA, EA, 8);
378        tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEUQ);
379    }
380    tcg_temp_free(EA);
381    tcg_temp_free_i64(xsh);
382    tcg_temp_free_i64(xsl);
383}
384
385static void gen_stxvh8x(DisasContext *ctx)
386{
387    TCGv EA;
388    TCGv_i64 xsh;
389    TCGv_i64 xsl;
390
391    if (unlikely(!ctx->vsx_enabled)) {
392        gen_exception(ctx, POWERPC_EXCP_VSXU);
393        return;
394    }
395    xsh = tcg_temp_new_i64();
396    xsl = tcg_temp_new_i64();
397    get_cpu_vsr(xsh, xS(ctx->opcode), true);
398    get_cpu_vsr(xsl, xS(ctx->opcode), false);
399    gen_set_access_type(ctx, ACCESS_INT);
400    EA = tcg_temp_new();
401    gen_addr_reg_index(ctx, EA);
402    if (ctx->le_mode) {
403        TCGv_i64 outh = tcg_temp_new_i64();
404        TCGv_i64 outl = tcg_temp_new_i64();
405
406        gen_bswap16x8(outh, outl, xsh, xsl);
407        tcg_gen_qemu_st_i64(outh, EA, ctx->mem_idx, MO_BEUQ);
408        tcg_gen_addi_tl(EA, EA, 8);
409        tcg_gen_qemu_st_i64(outl, EA, ctx->mem_idx, MO_BEUQ);
410        tcg_temp_free_i64(outh);
411        tcg_temp_free_i64(outl);
412    } else {
413        tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEUQ);
414        tcg_gen_addi_tl(EA, EA, 8);
415        tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEUQ);
416    }
417    tcg_temp_free(EA);
418    tcg_temp_free_i64(xsh);
419    tcg_temp_free_i64(xsl);
420}
421
422static void gen_stxvb16x(DisasContext *ctx)
423{
424    TCGv EA;
425    TCGv_i64 xsh;
426    TCGv_i64 xsl;
427
428    if (unlikely(!ctx->vsx_enabled)) {
429        gen_exception(ctx, POWERPC_EXCP_VSXU);
430        return;
431    }
432    xsh = tcg_temp_new_i64();
433    xsl = tcg_temp_new_i64();
434    get_cpu_vsr(xsh, xS(ctx->opcode), true);
435    get_cpu_vsr(xsl, xS(ctx->opcode), false);
436    gen_set_access_type(ctx, ACCESS_INT);
437    EA = tcg_temp_new();
438    gen_addr_reg_index(ctx, EA);
439    tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEUQ);
440    tcg_gen_addi_tl(EA, EA, 8);
441    tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEUQ);
442    tcg_temp_free(EA);
443    tcg_temp_free_i64(xsh);
444    tcg_temp_free_i64(xsl);
445}
446
447static void gen_mfvsrwz(DisasContext *ctx)
448{
449    if (xS(ctx->opcode) < 32) {
450        if (unlikely(!ctx->fpu_enabled)) {
451            gen_exception(ctx, POWERPC_EXCP_FPU);
452            return;
453        }
454    } else {
455        if (unlikely(!ctx->altivec_enabled)) {
456            gen_exception(ctx, POWERPC_EXCP_VPU);
457            return;
458        }
459    }
460    TCGv_i64 tmp = tcg_temp_new_i64();
461    TCGv_i64 xsh = tcg_temp_new_i64();
462    get_cpu_vsr(xsh, xS(ctx->opcode), true);
463    tcg_gen_ext32u_i64(tmp, xsh);
464    tcg_gen_trunc_i64_tl(cpu_gpr[rA(ctx->opcode)], tmp);
465    tcg_temp_free_i64(tmp);
466    tcg_temp_free_i64(xsh);
467}
468
469static void gen_mtvsrwa(DisasContext *ctx)
470{
471    if (xS(ctx->opcode) < 32) {
472        if (unlikely(!ctx->fpu_enabled)) {
473            gen_exception(ctx, POWERPC_EXCP_FPU);
474            return;
475        }
476    } else {
477        if (unlikely(!ctx->altivec_enabled)) {
478            gen_exception(ctx, POWERPC_EXCP_VPU);
479            return;
480        }
481    }
482    TCGv_i64 tmp = tcg_temp_new_i64();
483    TCGv_i64 xsh = tcg_temp_new_i64();
484    tcg_gen_extu_tl_i64(tmp, cpu_gpr[rA(ctx->opcode)]);
485    tcg_gen_ext32s_i64(xsh, tmp);
486    set_cpu_vsr(xT(ctx->opcode), xsh, true);
487    tcg_temp_free_i64(tmp);
488    tcg_temp_free_i64(xsh);
489}
490
491static void gen_mtvsrwz(DisasContext *ctx)
492{
493    if (xS(ctx->opcode) < 32) {
494        if (unlikely(!ctx->fpu_enabled)) {
495            gen_exception(ctx, POWERPC_EXCP_FPU);
496            return;
497        }
498    } else {
499        if (unlikely(!ctx->altivec_enabled)) {
500            gen_exception(ctx, POWERPC_EXCP_VPU);
501            return;
502        }
503    }
504    TCGv_i64 tmp = tcg_temp_new_i64();
505    TCGv_i64 xsh = tcg_temp_new_i64();
506    tcg_gen_extu_tl_i64(tmp, cpu_gpr[rA(ctx->opcode)]);
507    tcg_gen_ext32u_i64(xsh, tmp);
508    set_cpu_vsr(xT(ctx->opcode), xsh, true);
509    tcg_temp_free_i64(tmp);
510    tcg_temp_free_i64(xsh);
511}
512
513#if defined(TARGET_PPC64)
514static void gen_mfvsrd(DisasContext *ctx)
515{
516    TCGv_i64 t0;
517    if (xS(ctx->opcode) < 32) {
518        if (unlikely(!ctx->fpu_enabled)) {
519            gen_exception(ctx, POWERPC_EXCP_FPU);
520            return;
521        }
522    } else {
523        if (unlikely(!ctx->altivec_enabled)) {
524            gen_exception(ctx, POWERPC_EXCP_VPU);
525            return;
526        }
527    }
528    t0 = tcg_temp_new_i64();
529    get_cpu_vsr(t0, xS(ctx->opcode), true);
530    tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], t0);
531    tcg_temp_free_i64(t0);
532}
533
534static void gen_mtvsrd(DisasContext *ctx)
535{
536    TCGv_i64 t0;
537    if (xS(ctx->opcode) < 32) {
538        if (unlikely(!ctx->fpu_enabled)) {
539            gen_exception(ctx, POWERPC_EXCP_FPU);
540            return;
541        }
542    } else {
543        if (unlikely(!ctx->altivec_enabled)) {
544            gen_exception(ctx, POWERPC_EXCP_VPU);
545            return;
546        }
547    }
548    t0 = tcg_temp_new_i64();
549    tcg_gen_mov_i64(t0, cpu_gpr[rA(ctx->opcode)]);
550    set_cpu_vsr(xT(ctx->opcode), t0, true);
551    tcg_temp_free_i64(t0);
552}
553
554static void gen_mfvsrld(DisasContext *ctx)
555{
556    TCGv_i64 t0;
557    if (xS(ctx->opcode) < 32) {
558        if (unlikely(!ctx->vsx_enabled)) {
559            gen_exception(ctx, POWERPC_EXCP_VSXU);
560            return;
561        }
562    } else {
563        if (unlikely(!ctx->altivec_enabled)) {
564            gen_exception(ctx, POWERPC_EXCP_VPU);
565            return;
566        }
567    }
568    t0 = tcg_temp_new_i64();
569    get_cpu_vsr(t0, xS(ctx->opcode), false);
570    tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], t0);
571    tcg_temp_free_i64(t0);
572}
573
574static void gen_mtvsrdd(DisasContext *ctx)
575{
576    TCGv_i64 t0;
577    if (xT(ctx->opcode) < 32) {
578        if (unlikely(!ctx->vsx_enabled)) {
579            gen_exception(ctx, POWERPC_EXCP_VSXU);
580            return;
581        }
582    } else {
583        if (unlikely(!ctx->altivec_enabled)) {
584            gen_exception(ctx, POWERPC_EXCP_VPU);
585            return;
586        }
587    }
588
589    t0 = tcg_temp_new_i64();
590    if (!rA(ctx->opcode)) {
591        tcg_gen_movi_i64(t0, 0);
592    } else {
593        tcg_gen_mov_i64(t0, cpu_gpr[rA(ctx->opcode)]);
594    }
595    set_cpu_vsr(xT(ctx->opcode), t0, true);
596
597    tcg_gen_mov_i64(t0, cpu_gpr[rB(ctx->opcode)]);
598    set_cpu_vsr(xT(ctx->opcode), t0, false);
599    tcg_temp_free_i64(t0);
600}
601
602static void gen_mtvsrws(DisasContext *ctx)
603{
604    TCGv_i64 t0;
605    if (xT(ctx->opcode) < 32) {
606        if (unlikely(!ctx->vsx_enabled)) {
607            gen_exception(ctx, POWERPC_EXCP_VSXU);
608            return;
609        }
610    } else {
611        if (unlikely(!ctx->altivec_enabled)) {
612            gen_exception(ctx, POWERPC_EXCP_VPU);
613            return;
614        }
615    }
616
617    t0 = tcg_temp_new_i64();
618    tcg_gen_deposit_i64(t0, cpu_gpr[rA(ctx->opcode)],
619                        cpu_gpr[rA(ctx->opcode)], 32, 32);
620    set_cpu_vsr(xT(ctx->opcode), t0, false);
621    set_cpu_vsr(xT(ctx->opcode), t0, true);
622    tcg_temp_free_i64(t0);
623}
624
625#endif
626
627#define OP_ABS 1
628#define OP_NABS 2
629#define OP_NEG 3
630#define OP_CPSGN 4
631#define SGN_MASK_DP  0x8000000000000000ull
632#define SGN_MASK_SP 0x8000000080000000ull
633
634#define VSX_SCALAR_MOVE(name, op, sgn_mask)                       \
635static void glue(gen_, name)(DisasContext *ctx)                   \
636    {                                                             \
637        TCGv_i64 xb, sgm;                                         \
638        if (unlikely(!ctx->vsx_enabled)) {                        \
639            gen_exception(ctx, POWERPC_EXCP_VSXU);                \
640            return;                                               \
641        }                                                         \
642        xb = tcg_temp_new_i64();                                  \
643        sgm = tcg_temp_new_i64();                                 \
644        get_cpu_vsr(xb, xB(ctx->opcode), true);                   \
645        tcg_gen_movi_i64(sgm, sgn_mask);                          \
646        switch (op) {                                             \
647            case OP_ABS: {                                        \
648                tcg_gen_andc_i64(xb, xb, sgm);                    \
649                break;                                            \
650            }                                                     \
651            case OP_NABS: {                                       \
652                tcg_gen_or_i64(xb, xb, sgm);                      \
653                break;                                            \
654            }                                                     \
655            case OP_NEG: {                                        \
656                tcg_gen_xor_i64(xb, xb, sgm);                     \
657                break;                                            \
658            }                                                     \
659            case OP_CPSGN: {                                      \
660                TCGv_i64 xa = tcg_temp_new_i64();                 \
661                get_cpu_vsr(xa, xA(ctx->opcode), true);           \
662                tcg_gen_and_i64(xa, xa, sgm);                     \
663                tcg_gen_andc_i64(xb, xb, sgm);                    \
664                tcg_gen_or_i64(xb, xb, xa);                       \
665                tcg_temp_free_i64(xa);                            \
666                break;                                            \
667            }                                                     \
668        }                                                         \
669        set_cpu_vsr(xT(ctx->opcode), xb, true);                   \
670        set_cpu_vsr(xT(ctx->opcode), tcg_constant_i64(0), false); \
671        tcg_temp_free_i64(xb);                                    \
672        tcg_temp_free_i64(sgm);                                   \
673    }
674
675VSX_SCALAR_MOVE(xsabsdp, OP_ABS, SGN_MASK_DP)
676VSX_SCALAR_MOVE(xsnabsdp, OP_NABS, SGN_MASK_DP)
677VSX_SCALAR_MOVE(xsnegdp, OP_NEG, SGN_MASK_DP)
678VSX_SCALAR_MOVE(xscpsgndp, OP_CPSGN, SGN_MASK_DP)
679
680#define VSX_SCALAR_MOVE_QP(name, op, sgn_mask)                    \
681static void glue(gen_, name)(DisasContext *ctx)                   \
682{                                                                 \
683    int xa;                                                       \
684    int xt = rD(ctx->opcode) + 32;                                \
685    int xb = rB(ctx->opcode) + 32;                                \
686    TCGv_i64 xah, xbh, xbl, sgm, tmp;                             \
687                                                                  \
688    if (unlikely(!ctx->vsx_enabled)) {                            \
689        gen_exception(ctx, POWERPC_EXCP_VSXU);                    \
690        return;                                                   \
691    }                                                             \
692    xbh = tcg_temp_new_i64();                                     \
693    xbl = tcg_temp_new_i64();                                     \
694    sgm = tcg_temp_new_i64();                                     \
695    tmp = tcg_temp_new_i64();                                     \
696    get_cpu_vsr(xbh, xb, true);                                   \
697    get_cpu_vsr(xbl, xb, false);                                  \
698    tcg_gen_movi_i64(sgm, sgn_mask);                              \
699    switch (op) {                                                 \
700    case OP_ABS:                                                  \
701        tcg_gen_andc_i64(xbh, xbh, sgm);                          \
702        break;                                                    \
703    case OP_NABS:                                                 \
704        tcg_gen_or_i64(xbh, xbh, sgm);                            \
705        break;                                                    \
706    case OP_NEG:                                                  \
707        tcg_gen_xor_i64(xbh, xbh, sgm);                           \
708        break;                                                    \
709    case OP_CPSGN:                                                \
710        xah = tcg_temp_new_i64();                                 \
711        xa = rA(ctx->opcode) + 32;                                \
712        get_cpu_vsr(tmp, xa, true);                               \
713        tcg_gen_and_i64(xah, tmp, sgm);                           \
714        tcg_gen_andc_i64(xbh, xbh, sgm);                          \
715        tcg_gen_or_i64(xbh, xbh, xah);                            \
716        tcg_temp_free_i64(xah);                                   \
717        break;                                                    \
718    }                                                             \
719    set_cpu_vsr(xt, xbh, true);                                   \
720    set_cpu_vsr(xt, xbl, false);                                  \
721    tcg_temp_free_i64(xbl);                                       \
722    tcg_temp_free_i64(xbh);                                       \
723    tcg_temp_free_i64(sgm);                                       \
724    tcg_temp_free_i64(tmp);                                       \
725}
726
727VSX_SCALAR_MOVE_QP(xsabsqp, OP_ABS, SGN_MASK_DP)
728VSX_SCALAR_MOVE_QP(xsnabsqp, OP_NABS, SGN_MASK_DP)
729VSX_SCALAR_MOVE_QP(xsnegqp, OP_NEG, SGN_MASK_DP)
730VSX_SCALAR_MOVE_QP(xscpsgnqp, OP_CPSGN, SGN_MASK_DP)
731
732#define VSX_VECTOR_MOVE(name, op, sgn_mask)                      \
733static void glue(gen_, name)(DisasContext *ctx)                  \
734    {                                                            \
735        TCGv_i64 xbh, xbl, sgm;                                  \
736        if (unlikely(!ctx->vsx_enabled)) {                       \
737            gen_exception(ctx, POWERPC_EXCP_VSXU);               \
738            return;                                              \
739        }                                                        \
740        xbh = tcg_temp_new_i64();                                \
741        xbl = tcg_temp_new_i64();                                \
742        sgm = tcg_temp_new_i64();                                \
743        get_cpu_vsr(xbh, xB(ctx->opcode), true);                 \
744        get_cpu_vsr(xbl, xB(ctx->opcode), false);                \
745        tcg_gen_movi_i64(sgm, sgn_mask);                         \
746        switch (op) {                                            \
747            case OP_ABS: {                                       \
748                tcg_gen_andc_i64(xbh, xbh, sgm);                 \
749                tcg_gen_andc_i64(xbl, xbl, sgm);                 \
750                break;                                           \
751            }                                                    \
752            case OP_NABS: {                                      \
753                tcg_gen_or_i64(xbh, xbh, sgm);                   \
754                tcg_gen_or_i64(xbl, xbl, sgm);                   \
755                break;                                           \
756            }                                                    \
757            case OP_NEG: {                                       \
758                tcg_gen_xor_i64(xbh, xbh, sgm);                  \
759                tcg_gen_xor_i64(xbl, xbl, sgm);                  \
760                break;                                           \
761            }                                                    \
762            case OP_CPSGN: {                                     \
763                TCGv_i64 xah = tcg_temp_new_i64();               \
764                TCGv_i64 xal = tcg_temp_new_i64();               \
765                get_cpu_vsr(xah, xA(ctx->opcode), true);         \
766                get_cpu_vsr(xal, xA(ctx->opcode), false);        \
767                tcg_gen_and_i64(xah, xah, sgm);                  \
768                tcg_gen_and_i64(xal, xal, sgm);                  \
769                tcg_gen_andc_i64(xbh, xbh, sgm);                 \
770                tcg_gen_andc_i64(xbl, xbl, sgm);                 \
771                tcg_gen_or_i64(xbh, xbh, xah);                   \
772                tcg_gen_or_i64(xbl, xbl, xal);                   \
773                tcg_temp_free_i64(xah);                          \
774                tcg_temp_free_i64(xal);                          \
775                break;                                           \
776            }                                                    \
777        }                                                        \
778        set_cpu_vsr(xT(ctx->opcode), xbh, true);                 \
779        set_cpu_vsr(xT(ctx->opcode), xbl, false);                \
780        tcg_temp_free_i64(xbh);                                  \
781        tcg_temp_free_i64(xbl);                                  \
782        tcg_temp_free_i64(sgm);                                  \
783    }
784
785VSX_VECTOR_MOVE(xvabsdp, OP_ABS, SGN_MASK_DP)
786VSX_VECTOR_MOVE(xvnabsdp, OP_NABS, SGN_MASK_DP)
787VSX_VECTOR_MOVE(xvnegdp, OP_NEG, SGN_MASK_DP)
788VSX_VECTOR_MOVE(xvcpsgndp, OP_CPSGN, SGN_MASK_DP)
789VSX_VECTOR_MOVE(xvabssp, OP_ABS, SGN_MASK_SP)
790VSX_VECTOR_MOVE(xvnabssp, OP_NABS, SGN_MASK_SP)
791VSX_VECTOR_MOVE(xvnegsp, OP_NEG, SGN_MASK_SP)
792VSX_VECTOR_MOVE(xvcpsgnsp, OP_CPSGN, SGN_MASK_SP)
793
794#define VSX_CMP(name, op1, op2, inval, type)                                  \
795static void gen_##name(DisasContext *ctx)                                     \
796{                                                                             \
797    TCGv_i32 ignored;                                                         \
798    TCGv_ptr xt, xa, xb;                                                      \
799    if (unlikely(!ctx->vsx_enabled)) {                                        \
800        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
801        return;                                                               \
802    }                                                                         \
803    xt = gen_vsr_ptr(xT(ctx->opcode));                                        \
804    xa = gen_vsr_ptr(xA(ctx->opcode));                                        \
805    xb = gen_vsr_ptr(xB(ctx->opcode));                                        \
806    if ((ctx->opcode >> (31 - 21)) & 1) {                                     \
807        gen_helper_##name(cpu_crf[6], cpu_env, xt, xa, xb);                   \
808    } else {                                                                  \
809        ignored = tcg_temp_new_i32();                                         \
810        gen_helper_##name(ignored, cpu_env, xt, xa, xb);                      \
811        tcg_temp_free_i32(ignored);                                           \
812    }                                                                         \
813    gen_helper_float_check_status(cpu_env);                                   \
814    tcg_temp_free_ptr(xt);                                                    \
815    tcg_temp_free_ptr(xa);                                                    \
816    tcg_temp_free_ptr(xb);                                                    \
817}
818
819VSX_CMP(xvcmpeqdp, 0x0C, 0x0C, 0, PPC2_VSX)
820VSX_CMP(xvcmpgedp, 0x0C, 0x0E, 0, PPC2_VSX)
821VSX_CMP(xvcmpgtdp, 0x0C, 0x0D, 0, PPC2_VSX)
822VSX_CMP(xvcmpnedp, 0x0C, 0x0F, 0, PPC2_ISA300)
823VSX_CMP(xvcmpeqsp, 0x0C, 0x08, 0, PPC2_VSX)
824VSX_CMP(xvcmpgesp, 0x0C, 0x0A, 0, PPC2_VSX)
825VSX_CMP(xvcmpgtsp, 0x0C, 0x09, 0, PPC2_VSX)
826VSX_CMP(xvcmpnesp, 0x0C, 0x0B, 0, PPC2_VSX)
827
828static bool trans_XSCVQPDP(DisasContext *ctx, arg_X_tb_rc *a)
829{
830    TCGv_i32 ro;
831    TCGv_ptr xt, xb;
832
833    REQUIRE_INSNS_FLAGS2(ctx, ISA300);
834    REQUIRE_VSX(ctx);
835
836    ro = tcg_const_i32(a->rc);
837
838    xt = gen_avr_ptr(a->rt);
839    xb = gen_avr_ptr(a->rb);
840    gen_helper_XSCVQPDP(cpu_env, ro, xt, xb);
841    tcg_temp_free_i32(ro);
842    tcg_temp_free_ptr(xt);
843    tcg_temp_free_ptr(xb);
844
845    return true;
846}
847
848static bool do_helper_env_X_tb(DisasContext *ctx, arg_X_tb *a,
849                               void (*gen_helper)(TCGv_ptr, TCGv_ptr, TCGv_ptr))
850{
851    TCGv_ptr xt, xb;
852
853    REQUIRE_INSNS_FLAGS2(ctx, ISA310);
854    REQUIRE_VSX(ctx);
855
856    xt = gen_avr_ptr(a->rt);
857    xb = gen_avr_ptr(a->rb);
858    gen_helper(cpu_env, xt, xb);
859    tcg_temp_free_ptr(xt);
860    tcg_temp_free_ptr(xb);
861
862    return true;
863}
864
865TRANS(XSCVUQQP, do_helper_env_X_tb, gen_helper_XSCVUQQP)
866TRANS(XSCVSQQP, do_helper_env_X_tb, gen_helper_XSCVSQQP)
867TRANS(XSCVQPUQZ, do_helper_env_X_tb, gen_helper_XSCVQPUQZ)
868TRANS(XSCVQPSQZ, do_helper_env_X_tb, gen_helper_XSCVQPSQZ)
869
870#define GEN_VSX_HELPER_2(name, op1, op2, inval, type)                         \
871static void gen_##name(DisasContext *ctx)                                     \
872{                                                                             \
873    TCGv_i32 opc;                                                             \
874    if (unlikely(!ctx->vsx_enabled)) {                                        \
875        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
876        return;                                                               \
877    }                                                                         \
878    opc = tcg_const_i32(ctx->opcode);                                         \
879    gen_helper_##name(cpu_env, opc);                                          \
880    tcg_temp_free_i32(opc);                                                   \
881}
882
883#define GEN_VSX_HELPER_X3(name, op1, op2, inval, type)                        \
884static void gen_##name(DisasContext *ctx)                                     \
885{                                                                             \
886    TCGv_ptr xt, xa, xb;                                                      \
887    if (unlikely(!ctx->vsx_enabled)) {                                        \
888        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
889        return;                                                               \
890    }                                                                         \
891    xt = gen_vsr_ptr(xT(ctx->opcode));                                        \
892    xa = gen_vsr_ptr(xA(ctx->opcode));                                        \
893    xb = gen_vsr_ptr(xB(ctx->opcode));                                        \
894    gen_helper_##name(cpu_env, xt, xa, xb);                                   \
895    tcg_temp_free_ptr(xt);                                                    \
896    tcg_temp_free_ptr(xa);                                                    \
897    tcg_temp_free_ptr(xb);                                                    \
898}
899
900#define GEN_VSX_HELPER_X2(name, op1, op2, inval, type)                        \
901static void gen_##name(DisasContext *ctx)                                     \
902{                                                                             \
903    TCGv_ptr xt, xb;                                                          \
904    if (unlikely(!ctx->vsx_enabled)) {                                        \
905        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
906        return;                                                               \
907    }                                                                         \
908    xt = gen_vsr_ptr(xT(ctx->opcode));                                        \
909    xb = gen_vsr_ptr(xB(ctx->opcode));                                        \
910    gen_helper_##name(cpu_env, xt, xb);                                       \
911    tcg_temp_free_ptr(xt);                                                    \
912    tcg_temp_free_ptr(xb);                                                    \
913}
914
915#define GEN_VSX_HELPER_X2_AB(name, op1, op2, inval, type)                     \
916static void gen_##name(DisasContext *ctx)                                     \
917{                                                                             \
918    TCGv_i32 opc;                                                             \
919    TCGv_ptr xa, xb;                                                          \
920    if (unlikely(!ctx->vsx_enabled)) {                                        \
921        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
922        return;                                                               \
923    }                                                                         \
924    opc = tcg_const_i32(ctx->opcode);                                         \
925    xa = gen_vsr_ptr(xA(ctx->opcode));                                        \
926    xb = gen_vsr_ptr(xB(ctx->opcode));                                        \
927    gen_helper_##name(cpu_env, opc, xa, xb);                                  \
928    tcg_temp_free_i32(opc);                                                   \
929    tcg_temp_free_ptr(xa);                                                    \
930    tcg_temp_free_ptr(xb);                                                    \
931}
932
933#define GEN_VSX_HELPER_X1(name, op1, op2, inval, type)                        \
934static void gen_##name(DisasContext *ctx)                                     \
935{                                                                             \
936    TCGv_i32 opc;                                                             \
937    TCGv_ptr xb;                                                              \
938    if (unlikely(!ctx->vsx_enabled)) {                                        \
939        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
940        return;                                                               \
941    }                                                                         \
942    opc = tcg_const_i32(ctx->opcode);                                         \
943    xb = gen_vsr_ptr(xB(ctx->opcode));                                        \
944    gen_helper_##name(cpu_env, opc, xb);                                      \
945    tcg_temp_free_i32(opc);                                                   \
946    tcg_temp_free_ptr(xb);                                                    \
947}
948
949#define GEN_VSX_HELPER_R3(name, op1, op2, inval, type)                        \
950static void gen_##name(DisasContext *ctx)                                     \
951{                                                                             \
952    TCGv_i32 opc;                                                             \
953    TCGv_ptr xt, xa, xb;                                                      \
954    if (unlikely(!ctx->vsx_enabled)) {                                        \
955        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
956        return;                                                               \
957    }                                                                         \
958    opc = tcg_const_i32(ctx->opcode);                                         \
959    xt = gen_vsr_ptr(rD(ctx->opcode) + 32);                                   \
960    xa = gen_vsr_ptr(rA(ctx->opcode) + 32);                                   \
961    xb = gen_vsr_ptr(rB(ctx->opcode) + 32);                                   \
962    gen_helper_##name(cpu_env, opc, xt, xa, xb);                              \
963    tcg_temp_free_i32(opc);                                                   \
964    tcg_temp_free_ptr(xt);                                                    \
965    tcg_temp_free_ptr(xa);                                                    \
966    tcg_temp_free_ptr(xb);                                                    \
967}
968
969#define GEN_VSX_HELPER_R2(name, op1, op2, inval, type)                        \
970static void gen_##name(DisasContext *ctx)                                     \
971{                                                                             \
972    TCGv_i32 opc;                                                             \
973    TCGv_ptr xt, xb;                                                          \
974    if (unlikely(!ctx->vsx_enabled)) {                                        \
975        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
976        return;                                                               \
977    }                                                                         \
978    opc = tcg_const_i32(ctx->opcode);                                         \
979    xt = gen_vsr_ptr(rD(ctx->opcode) + 32);                                   \
980    xb = gen_vsr_ptr(rB(ctx->opcode) + 32);                                   \
981    gen_helper_##name(cpu_env, opc, xt, xb);                                  \
982    tcg_temp_free_i32(opc);                                                   \
983    tcg_temp_free_ptr(xt);                                                    \
984    tcg_temp_free_ptr(xb);                                                    \
985}
986
987#define GEN_VSX_HELPER_R2_AB(name, op1, op2, inval, type)                     \
988static void gen_##name(DisasContext *ctx)                                     \
989{                                                                             \
990    TCGv_i32 opc;                                                             \
991    TCGv_ptr xa, xb;                                                          \
992    if (unlikely(!ctx->vsx_enabled)) {                                        \
993        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
994        return;                                                               \
995    }                                                                         \
996    opc = tcg_const_i32(ctx->opcode);                                         \
997    xa = gen_vsr_ptr(rA(ctx->opcode) + 32);                                   \
998    xb = gen_vsr_ptr(rB(ctx->opcode) + 32);                                   \
999    gen_helper_##name(cpu_env, opc, xa, xb);                                  \
1000    tcg_temp_free_i32(opc);                                                   \
1001    tcg_temp_free_ptr(xa);                                                    \
1002    tcg_temp_free_ptr(xb);                                                    \
1003}
1004
1005#define GEN_VSX_HELPER_XT_XB_ENV(name, op1, op2, inval, type) \
1006static void gen_##name(DisasContext *ctx)                     \
1007{                                                             \
1008    TCGv_i64 t0;                                              \
1009    TCGv_i64 t1;                                              \
1010    if (unlikely(!ctx->vsx_enabled)) {                        \
1011        gen_exception(ctx, POWERPC_EXCP_VSXU);                \
1012        return;                                               \
1013    }                                                         \
1014    t0 = tcg_temp_new_i64();                                  \
1015    t1 = tcg_temp_new_i64();                                  \
1016    get_cpu_vsr(t0, xB(ctx->opcode), true);                   \
1017    gen_helper_##name(t1, cpu_env, t0);                       \
1018    set_cpu_vsr(xT(ctx->opcode), t1, true);                   \
1019    set_cpu_vsr(xT(ctx->opcode), tcg_constant_i64(0), false); \
1020    tcg_temp_free_i64(t0);                                    \
1021    tcg_temp_free_i64(t1);                                    \
1022}
1023
1024GEN_VSX_HELPER_X3(xsadddp, 0x00, 0x04, 0, PPC2_VSX)
1025GEN_VSX_HELPER_R3(xsaddqp, 0x04, 0x00, 0, PPC2_ISA300)
1026GEN_VSX_HELPER_X3(xssubdp, 0x00, 0x05, 0, PPC2_VSX)
1027GEN_VSX_HELPER_X3(xsmuldp, 0x00, 0x06, 0, PPC2_VSX)
1028GEN_VSX_HELPER_R3(xsmulqp, 0x04, 0x01, 0, PPC2_ISA300)
1029GEN_VSX_HELPER_X3(xsdivdp, 0x00, 0x07, 0, PPC2_VSX)
1030GEN_VSX_HELPER_R3(xsdivqp, 0x04, 0x11, 0, PPC2_ISA300)
1031GEN_VSX_HELPER_X2(xsredp, 0x14, 0x05, 0, PPC2_VSX)
1032GEN_VSX_HELPER_X2(xssqrtdp, 0x16, 0x04, 0, PPC2_VSX)
1033GEN_VSX_HELPER_X2(xsrsqrtedp, 0x14, 0x04, 0, PPC2_VSX)
1034GEN_VSX_HELPER_X2_AB(xstdivdp, 0x14, 0x07, 0, PPC2_VSX)
1035GEN_VSX_HELPER_X1(xstsqrtdp, 0x14, 0x06, 0, PPC2_VSX)
1036GEN_VSX_HELPER_X2_AB(xscmpexpdp, 0x0C, 0x07, 0, PPC2_ISA300)
1037GEN_VSX_HELPER_R2_AB(xscmpexpqp, 0x04, 0x05, 0, PPC2_ISA300)
1038GEN_VSX_HELPER_X2_AB(xscmpodp, 0x0C, 0x05, 0, PPC2_VSX)
1039GEN_VSX_HELPER_X2_AB(xscmpudp, 0x0C, 0x04, 0, PPC2_VSX)
1040GEN_VSX_HELPER_R2_AB(xscmpoqp, 0x04, 0x04, 0, PPC2_VSX)
1041GEN_VSX_HELPER_R2_AB(xscmpuqp, 0x04, 0x14, 0, PPC2_VSX)
1042GEN_VSX_HELPER_X3(xsmaxdp, 0x00, 0x14, 0, PPC2_VSX)
1043GEN_VSX_HELPER_X3(xsmindp, 0x00, 0x15, 0, PPC2_VSX)
1044GEN_VSX_HELPER_X2(xscvdphp, 0x16, 0x15, 0x11, PPC2_ISA300)
1045GEN_VSX_HELPER_X2(xscvdpsp, 0x12, 0x10, 0, PPC2_VSX)
1046GEN_VSX_HELPER_R2(xscvdpqp, 0x04, 0x1A, 0x16, PPC2_ISA300)
1047GEN_VSX_HELPER_XT_XB_ENV(xscvdpspn, 0x16, 0x10, 0, PPC2_VSX207)
1048GEN_VSX_HELPER_R2(xscvqpsdz, 0x04, 0x1A, 0x19, PPC2_ISA300)
1049GEN_VSX_HELPER_R2(xscvqpswz, 0x04, 0x1A, 0x09, PPC2_ISA300)
1050GEN_VSX_HELPER_R2(xscvqpudz, 0x04, 0x1A, 0x11, PPC2_ISA300)
1051GEN_VSX_HELPER_R2(xscvqpuwz, 0x04, 0x1A, 0x01, PPC2_ISA300)
1052GEN_VSX_HELPER_X2(xscvhpdp, 0x16, 0x15, 0x10, PPC2_ISA300)
1053GEN_VSX_HELPER_R2(xscvsdqp, 0x04, 0x1A, 0x0A, PPC2_ISA300)
1054GEN_VSX_HELPER_X2(xscvspdp, 0x12, 0x14, 0, PPC2_VSX)
1055
1056bool trans_XSCVSPDPN(DisasContext *ctx, arg_XX2 *a)
1057{
1058    TCGv_i64 tmp;
1059
1060    REQUIRE_INSNS_FLAGS2(ctx, VSX207);
1061    REQUIRE_VSX(ctx);
1062
1063    tmp = tcg_temp_new_i64();
1064    get_cpu_vsr(tmp, a->xb, true);
1065
1066    gen_helper_XSCVSPDPN(tmp, tmp);
1067
1068    set_cpu_vsr(a->xt, tmp, true);
1069    set_cpu_vsr(a->xt, tcg_constant_i64(0), false);
1070
1071    tcg_temp_free_i64(tmp);
1072
1073    return true;
1074}
1075
1076GEN_VSX_HELPER_X2(xscvdpsxds, 0x10, 0x15, 0, PPC2_VSX)
1077GEN_VSX_HELPER_X2(xscvdpsxws, 0x10, 0x05, 0, PPC2_VSX)
1078GEN_VSX_HELPER_X2(xscvdpuxds, 0x10, 0x14, 0, PPC2_VSX)
1079GEN_VSX_HELPER_X2(xscvdpuxws, 0x10, 0x04, 0, PPC2_VSX)
1080GEN_VSX_HELPER_X2(xscvsxddp, 0x10, 0x17, 0, PPC2_VSX)
1081GEN_VSX_HELPER_R2(xscvudqp, 0x04, 0x1A, 0x02, PPC2_ISA300)
1082GEN_VSX_HELPER_X2(xscvuxddp, 0x10, 0x16, 0, PPC2_VSX)
1083GEN_VSX_HELPER_X2(xsrdpi, 0x12, 0x04, 0, PPC2_VSX)
1084GEN_VSX_HELPER_X2(xsrdpic, 0x16, 0x06, 0, PPC2_VSX)
1085GEN_VSX_HELPER_X2(xsrdpim, 0x12, 0x07, 0, PPC2_VSX)
1086GEN_VSX_HELPER_X2(xsrdpip, 0x12, 0x06, 0, PPC2_VSX)
1087GEN_VSX_HELPER_X2(xsrdpiz, 0x12, 0x05, 0, PPC2_VSX)
1088GEN_VSX_HELPER_XT_XB_ENV(xsrsp, 0x12, 0x11, 0, PPC2_VSX207)
1089GEN_VSX_HELPER_R2(xsrqpi, 0x05, 0x00, 0, PPC2_ISA300)
1090GEN_VSX_HELPER_R2(xsrqpxp, 0x05, 0x01, 0, PPC2_ISA300)
1091GEN_VSX_HELPER_R2(xssqrtqp, 0x04, 0x19, 0x1B, PPC2_ISA300)
1092GEN_VSX_HELPER_R3(xssubqp, 0x04, 0x10, 0, PPC2_ISA300)
1093GEN_VSX_HELPER_X3(xsaddsp, 0x00, 0x00, 0, PPC2_VSX207)
1094GEN_VSX_HELPER_X3(xssubsp, 0x00, 0x01, 0, PPC2_VSX207)
1095GEN_VSX_HELPER_X3(xsmulsp, 0x00, 0x02, 0, PPC2_VSX207)
1096GEN_VSX_HELPER_X3(xsdivsp, 0x00, 0x03, 0, PPC2_VSX207)
1097GEN_VSX_HELPER_X2(xsresp, 0x14, 0x01, 0, PPC2_VSX207)
1098GEN_VSX_HELPER_X2(xssqrtsp, 0x16, 0x00, 0, PPC2_VSX207)
1099GEN_VSX_HELPER_X2(xsrsqrtesp, 0x14, 0x00, 0, PPC2_VSX207)
1100GEN_VSX_HELPER_X2(xscvsxdsp, 0x10, 0x13, 0, PPC2_VSX207)
1101GEN_VSX_HELPER_X2(xscvuxdsp, 0x10, 0x12, 0, PPC2_VSX207)
1102GEN_VSX_HELPER_X1(xststdcsp, 0x14, 0x12, 0, PPC2_ISA300)
1103GEN_VSX_HELPER_2(xststdcdp, 0x14, 0x16, 0, PPC2_ISA300)
1104GEN_VSX_HELPER_2(xststdcqp, 0x04, 0x16, 0, PPC2_ISA300)
1105
1106GEN_VSX_HELPER_X3(xvadddp, 0x00, 0x0C, 0, PPC2_VSX)
1107GEN_VSX_HELPER_X3(xvsubdp, 0x00, 0x0D, 0, PPC2_VSX)
1108GEN_VSX_HELPER_X3(xvmuldp, 0x00, 0x0E, 0, PPC2_VSX)
1109GEN_VSX_HELPER_X3(xvdivdp, 0x00, 0x0F, 0, PPC2_VSX)
1110GEN_VSX_HELPER_X2(xvredp, 0x14, 0x0D, 0, PPC2_VSX)
1111GEN_VSX_HELPER_X2(xvsqrtdp, 0x16, 0x0C, 0, PPC2_VSX)
1112GEN_VSX_HELPER_X2(xvrsqrtedp, 0x14, 0x0C, 0, PPC2_VSX)
1113GEN_VSX_HELPER_X2_AB(xvtdivdp, 0x14, 0x0F, 0, PPC2_VSX)
1114GEN_VSX_HELPER_X1(xvtsqrtdp, 0x14, 0x0E, 0, PPC2_VSX)
1115GEN_VSX_HELPER_X3(xvmaxdp, 0x00, 0x1C, 0, PPC2_VSX)
1116GEN_VSX_HELPER_X3(xvmindp, 0x00, 0x1D, 0, PPC2_VSX)
1117GEN_VSX_HELPER_X2(xvcvdpsp, 0x12, 0x18, 0, PPC2_VSX)
1118GEN_VSX_HELPER_X2(xvcvdpsxds, 0x10, 0x1D, 0, PPC2_VSX)
1119GEN_VSX_HELPER_X2(xvcvdpsxws, 0x10, 0x0D, 0, PPC2_VSX)
1120GEN_VSX_HELPER_X2(xvcvdpuxds, 0x10, 0x1C, 0, PPC2_VSX)
1121GEN_VSX_HELPER_X2(xvcvdpuxws, 0x10, 0x0C, 0, PPC2_VSX)
1122GEN_VSX_HELPER_X2(xvcvsxddp, 0x10, 0x1F, 0, PPC2_VSX)
1123GEN_VSX_HELPER_X2(xvcvuxddp, 0x10, 0x1E, 0, PPC2_VSX)
1124GEN_VSX_HELPER_X2(xvcvsxwdp, 0x10, 0x0F, 0, PPC2_VSX)
1125GEN_VSX_HELPER_X2(xvcvuxwdp, 0x10, 0x0E, 0, PPC2_VSX)
1126GEN_VSX_HELPER_X2(xvrdpi, 0x12, 0x0C, 0, PPC2_VSX)
1127GEN_VSX_HELPER_X2(xvrdpic, 0x16, 0x0E, 0, PPC2_VSX)
1128GEN_VSX_HELPER_X2(xvrdpim, 0x12, 0x0F, 0, PPC2_VSX)
1129GEN_VSX_HELPER_X2(xvrdpip, 0x12, 0x0E, 0, PPC2_VSX)
1130GEN_VSX_HELPER_X2(xvrdpiz, 0x12, 0x0D, 0, PPC2_VSX)
1131
1132GEN_VSX_HELPER_X3(xvaddsp, 0x00, 0x08, 0, PPC2_VSX)
1133GEN_VSX_HELPER_X3(xvsubsp, 0x00, 0x09, 0, PPC2_VSX)
1134GEN_VSX_HELPER_X3(xvmulsp, 0x00, 0x0A, 0, PPC2_VSX)
1135GEN_VSX_HELPER_X3(xvdivsp, 0x00, 0x0B, 0, PPC2_VSX)
1136GEN_VSX_HELPER_X2(xvresp, 0x14, 0x09, 0, PPC2_VSX)
1137GEN_VSX_HELPER_X2(xvsqrtsp, 0x16, 0x08, 0, PPC2_VSX)
1138GEN_VSX_HELPER_X2(xvrsqrtesp, 0x14, 0x08, 0, PPC2_VSX)
1139GEN_VSX_HELPER_X2_AB(xvtdivsp, 0x14, 0x0B, 0, PPC2_VSX)
1140GEN_VSX_HELPER_X1(xvtsqrtsp, 0x14, 0x0A, 0, PPC2_VSX)
1141GEN_VSX_HELPER_X3(xvmaxsp, 0x00, 0x18, 0, PPC2_VSX)
1142GEN_VSX_HELPER_X3(xvminsp, 0x00, 0x19, 0, PPC2_VSX)
1143GEN_VSX_HELPER_X2(xvcvspdp, 0x12, 0x1C, 0, PPC2_VSX)
1144GEN_VSX_HELPER_X2(xvcvhpsp, 0x16, 0x1D, 0x18, PPC2_ISA300)
1145GEN_VSX_HELPER_X2(xvcvsphp, 0x16, 0x1D, 0x19, PPC2_ISA300)
1146GEN_VSX_HELPER_X2(xvcvspsxds, 0x10, 0x19, 0, PPC2_VSX)
1147GEN_VSX_HELPER_X2(xvcvspsxws, 0x10, 0x09, 0, PPC2_VSX)
1148GEN_VSX_HELPER_X2(xvcvspuxds, 0x10, 0x18, 0, PPC2_VSX)
1149GEN_VSX_HELPER_X2(xvcvspuxws, 0x10, 0x08, 0, PPC2_VSX)
1150GEN_VSX_HELPER_X2(xvcvsxdsp, 0x10, 0x1B, 0, PPC2_VSX)
1151GEN_VSX_HELPER_X2(xvcvuxdsp, 0x10, 0x1A, 0, PPC2_VSX)
1152GEN_VSX_HELPER_X2(xvcvsxwsp, 0x10, 0x0B, 0, PPC2_VSX)
1153GEN_VSX_HELPER_X2(xvcvuxwsp, 0x10, 0x0A, 0, PPC2_VSX)
1154GEN_VSX_HELPER_X2(xvrspi, 0x12, 0x08, 0, PPC2_VSX)
1155GEN_VSX_HELPER_X2(xvrspic, 0x16, 0x0A, 0, PPC2_VSX)
1156GEN_VSX_HELPER_X2(xvrspim, 0x12, 0x0B, 0, PPC2_VSX)
1157GEN_VSX_HELPER_X2(xvrspip, 0x12, 0x0A, 0, PPC2_VSX)
1158GEN_VSX_HELPER_X2(xvrspiz, 0x12, 0x09, 0, PPC2_VSX)
1159GEN_VSX_HELPER_2(xvtstdcsp, 0x14, 0x1A, 0, PPC2_VSX)
1160GEN_VSX_HELPER_2(xvtstdcdp, 0x14, 0x1E, 0, PPC2_VSX)
1161
1162static bool trans_XXPERM(DisasContext *ctx, arg_XX3 *a)
1163{
1164    TCGv_ptr xt, xa, xb;
1165
1166    REQUIRE_INSNS_FLAGS2(ctx, ISA300);
1167    REQUIRE_VSX(ctx);
1168
1169    xt = gen_vsr_ptr(a->xt);
1170    xa = gen_vsr_ptr(a->xa);
1171    xb = gen_vsr_ptr(a->xb);
1172
1173    gen_helper_VPERM(xt, xa, xt, xb);
1174
1175    tcg_temp_free_ptr(xt);
1176    tcg_temp_free_ptr(xa);
1177    tcg_temp_free_ptr(xb);
1178
1179    return true;
1180}
1181
1182static bool trans_XXPERMR(DisasContext *ctx, arg_XX3 *a)
1183{
1184    TCGv_ptr xt, xa, xb;
1185
1186    REQUIRE_INSNS_FLAGS2(ctx, ISA300);
1187    REQUIRE_VSX(ctx);
1188
1189    xt = gen_vsr_ptr(a->xt);
1190    xa = gen_vsr_ptr(a->xa);
1191    xb = gen_vsr_ptr(a->xb);
1192
1193    gen_helper_VPERMR(xt, xa, xt, xb);
1194
1195    tcg_temp_free_ptr(xt);
1196    tcg_temp_free_ptr(xa);
1197    tcg_temp_free_ptr(xb);
1198
1199    return true;
1200}
1201
1202static bool trans_XXPERMDI(DisasContext *ctx, arg_XX3_dm *a)
1203{
1204    TCGv_i64 t0, t1;
1205
1206    REQUIRE_INSNS_FLAGS2(ctx, VSX);
1207    REQUIRE_VSX(ctx);
1208
1209    t0 = tcg_temp_new_i64();
1210
1211    if (unlikely(a->xt == a->xa || a->xt == a->xb)) {
1212        t1 = tcg_temp_new_i64();
1213
1214        get_cpu_vsr(t0, a->xa, (a->dm & 2) == 0);
1215        get_cpu_vsr(t1, a->xb, (a->dm & 1) == 0);
1216
1217        set_cpu_vsr(a->xt, t0, true);
1218        set_cpu_vsr(a->xt, t1, false);
1219
1220        tcg_temp_free_i64(t1);
1221    } else {
1222        get_cpu_vsr(t0, a->xa, (a->dm & 2) == 0);
1223        set_cpu_vsr(a->xt, t0, true);
1224
1225        get_cpu_vsr(t0, a->xb, (a->dm & 1) == 0);
1226        set_cpu_vsr(a->xt, t0, false);
1227    }
1228
1229    tcg_temp_free_i64(t0);
1230
1231    return true;
1232}
1233
1234static bool trans_XXPERMX(DisasContext *ctx, arg_8RR_XX4_uim3 *a)
1235{
1236    TCGv_ptr xt, xa, xb, xc;
1237
1238    REQUIRE_INSNS_FLAGS2(ctx, ISA310);
1239    REQUIRE_VSX(ctx);
1240
1241    xt = gen_vsr_ptr(a->xt);
1242    xa = gen_vsr_ptr(a->xa);
1243    xb = gen_vsr_ptr(a->xb);
1244    xc = gen_vsr_ptr(a->xc);
1245
1246    gen_helper_XXPERMX(xt, xa, xb, xc, tcg_constant_tl(a->uim3));
1247
1248    tcg_temp_free_ptr(xt);
1249    tcg_temp_free_ptr(xa);
1250    tcg_temp_free_ptr(xb);
1251    tcg_temp_free_ptr(xc);
1252
1253    return true;
1254}
1255
1256typedef void (*xxgenpcv_genfn)(TCGv_ptr, TCGv_ptr);
1257
1258static bool do_xxgenpcv(DisasContext *ctx, arg_X_imm5 *a,
1259                        const xxgenpcv_genfn fn[4])
1260{
1261    TCGv_ptr xt, vrb;
1262
1263    REQUIRE_INSNS_FLAGS2(ctx, ISA310);
1264    REQUIRE_VSX(ctx);
1265
1266    if (a->imm & ~0x3) {
1267        gen_invalid(ctx);
1268        return true;
1269    }
1270
1271    xt = gen_vsr_ptr(a->xt);
1272    vrb = gen_avr_ptr(a->vrb);
1273
1274    fn[a->imm](xt, vrb);
1275
1276    tcg_temp_free_ptr(xt);
1277    tcg_temp_free_ptr(vrb);
1278
1279    return true;
1280}
1281
1282#define XXGENPCV(NAME) \
1283    static bool trans_##NAME(DisasContext *ctx, arg_X_imm5 *a)  \
1284    {                                                           \
1285        static const xxgenpcv_genfn fn[4] = {                   \
1286            gen_helper_##NAME##_be_exp,                         \
1287            gen_helper_##NAME##_be_comp,                        \
1288            gen_helper_##NAME##_le_exp,                         \
1289            gen_helper_##NAME##_le_comp,                        \
1290        };                                                      \
1291        return do_xxgenpcv(ctx, a, fn);                         \
1292    }
1293
1294XXGENPCV(XXGENPCVBM)
1295XXGENPCV(XXGENPCVHM)
1296XXGENPCV(XXGENPCVWM)
1297XXGENPCV(XXGENPCVDM)
1298#undef XXGENPCV
1299
1300static bool do_xsmadd(DisasContext *ctx, int tgt, int src1, int src2, int src3,
1301        void (*gen_helper)(TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr))
1302{
1303    TCGv_ptr t, s1, s2, s3;
1304
1305    t = gen_vsr_ptr(tgt);
1306    s1 = gen_vsr_ptr(src1);
1307    s2 = gen_vsr_ptr(src2);
1308    s3 = gen_vsr_ptr(src3);
1309
1310    gen_helper(cpu_env, t, s1, s2, s3);
1311
1312    tcg_temp_free_ptr(t);
1313    tcg_temp_free_ptr(s1);
1314    tcg_temp_free_ptr(s2);
1315    tcg_temp_free_ptr(s3);
1316
1317    return true;
1318}
1319
1320static bool do_xsmadd_XX3(DisasContext *ctx, arg_XX3 *a, bool type_a,
1321        void (*gen_helper)(TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr))
1322{
1323    REQUIRE_VSX(ctx);
1324
1325    if (type_a) {
1326        return do_xsmadd(ctx, a->xt, a->xa, a->xt, a->xb, gen_helper);
1327    }
1328    return do_xsmadd(ctx, a->xt, a->xa, a->xb, a->xt, gen_helper);
1329}
1330
1331TRANS_FLAGS2(VSX, XSMADDADP, do_xsmadd_XX3, true, gen_helper_XSMADDDP)
1332TRANS_FLAGS2(VSX, XSMADDMDP, do_xsmadd_XX3, false, gen_helper_XSMADDDP)
1333TRANS_FLAGS2(VSX, XSMSUBADP, do_xsmadd_XX3, true, gen_helper_XSMSUBDP)
1334TRANS_FLAGS2(VSX, XSMSUBMDP, do_xsmadd_XX3, false, gen_helper_XSMSUBDP)
1335TRANS_FLAGS2(VSX, XSNMADDADP, do_xsmadd_XX3, true, gen_helper_XSNMADDDP)
1336TRANS_FLAGS2(VSX, XSNMADDMDP, do_xsmadd_XX3, false, gen_helper_XSNMADDDP)
1337TRANS_FLAGS2(VSX, XSNMSUBADP, do_xsmadd_XX3, true, gen_helper_XSNMSUBDP)
1338TRANS_FLAGS2(VSX, XSNMSUBMDP, do_xsmadd_XX3, false, gen_helper_XSNMSUBDP)
1339TRANS_FLAGS2(VSX207, XSMADDASP, do_xsmadd_XX3, true, gen_helper_XSMADDSP)
1340TRANS_FLAGS2(VSX207, XSMADDMSP, do_xsmadd_XX3, false, gen_helper_XSMADDSP)
1341TRANS_FLAGS2(VSX207, XSMSUBASP, do_xsmadd_XX3, true, gen_helper_XSMSUBSP)
1342TRANS_FLAGS2(VSX207, XSMSUBMSP, do_xsmadd_XX3, false, gen_helper_XSMSUBSP)
1343TRANS_FLAGS2(VSX207, XSNMADDASP, do_xsmadd_XX3, true, gen_helper_XSNMADDSP)
1344TRANS_FLAGS2(VSX207, XSNMADDMSP, do_xsmadd_XX3, false, gen_helper_XSNMADDSP)
1345TRANS_FLAGS2(VSX207, XSNMSUBASP, do_xsmadd_XX3, true, gen_helper_XSNMSUBSP)
1346TRANS_FLAGS2(VSX207, XSNMSUBMSP, do_xsmadd_XX3, false, gen_helper_XSNMSUBSP)
1347
1348static bool do_xsmadd_X(DisasContext *ctx, arg_X_rc *a,
1349        void (*gen_helper)(TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr),
1350        void (*gen_helper_ro)(TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr))
1351{
1352    int vrt, vra, vrb;
1353
1354    REQUIRE_INSNS_FLAGS2(ctx, ISA300);
1355    REQUIRE_VSX(ctx);
1356
1357    vrt = a->rt + 32;
1358    vra = a->ra + 32;
1359    vrb = a->rb + 32;
1360
1361    if (a->rc) {
1362        return do_xsmadd(ctx, vrt, vra, vrt, vrb, gen_helper_ro);
1363    }
1364
1365    return do_xsmadd(ctx, vrt, vra, vrt, vrb, gen_helper);
1366}
1367
1368TRANS(XSMADDQP, do_xsmadd_X, gen_helper_XSMADDQP, gen_helper_XSMADDQPO)
1369TRANS(XSMSUBQP, do_xsmadd_X, gen_helper_XSMSUBQP, gen_helper_XSMSUBQPO)
1370TRANS(XSNMADDQP, do_xsmadd_X, gen_helper_XSNMADDQP, gen_helper_XSNMADDQPO)
1371TRANS(XSNMSUBQP, do_xsmadd_X, gen_helper_XSNMSUBQP, gen_helper_XSNMSUBQPO)
1372
1373#define GEN_VSX_HELPER_VSX_MADD(name, op1, aop, mop, inval, type)             \
1374static void gen_##name(DisasContext *ctx)                                     \
1375{                                                                             \
1376    TCGv_ptr xt, s1, s2, s3;                                                  \
1377    if (unlikely(!ctx->vsx_enabled)) {                                        \
1378        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
1379        return;                                                               \
1380    }                                                                         \
1381    xt = gen_vsr_ptr(xT(ctx->opcode));                                        \
1382    s1 = gen_vsr_ptr(xA(ctx->opcode));                                        \
1383    if (ctx->opcode & PPC_BIT32(25)) {                                        \
1384        /*                                                                    \
1385         * AxT + B                                                            \
1386         */                                                                   \
1387        s2 = gen_vsr_ptr(xB(ctx->opcode));                                    \
1388        s3 = gen_vsr_ptr(xT(ctx->opcode));                                    \
1389    } else {                                                                  \
1390        /*                                                                    \
1391         * AxB + T                                                            \
1392         */                                                                   \
1393        s2 = gen_vsr_ptr(xT(ctx->opcode));                                    \
1394        s3 = gen_vsr_ptr(xB(ctx->opcode));                                    \
1395    }                                                                         \
1396    gen_helper_##name(cpu_env, xt, s1, s2, s3);                               \
1397    tcg_temp_free_ptr(xt);                                                    \
1398    tcg_temp_free_ptr(s1);                                                    \
1399    tcg_temp_free_ptr(s2);                                                    \
1400    tcg_temp_free_ptr(s3);                                                    \
1401}
1402
1403GEN_VSX_HELPER_VSX_MADD(xvmadddp, 0x04, 0x0C, 0x0D, 0, PPC2_VSX)
1404GEN_VSX_HELPER_VSX_MADD(xvmsubdp, 0x04, 0x0E, 0x0F, 0, PPC2_VSX)
1405GEN_VSX_HELPER_VSX_MADD(xvnmadddp, 0x04, 0x1C, 0x1D, 0, PPC2_VSX)
1406GEN_VSX_HELPER_VSX_MADD(xvnmsubdp, 0x04, 0x1E, 0x1F, 0, PPC2_VSX)
1407GEN_VSX_HELPER_VSX_MADD(xvmaddsp, 0x04, 0x08, 0x09, 0, PPC2_VSX)
1408GEN_VSX_HELPER_VSX_MADD(xvmsubsp, 0x04, 0x0A, 0x0B, 0, PPC2_VSX)
1409GEN_VSX_HELPER_VSX_MADD(xvnmaddsp, 0x04, 0x18, 0x19, 0, PPC2_VSX)
1410GEN_VSX_HELPER_VSX_MADD(xvnmsubsp, 0x04, 0x1A, 0x1B, 0, PPC2_VSX)
1411
1412static void gen_xxbrd(DisasContext *ctx)
1413{
1414    TCGv_i64 xth;
1415    TCGv_i64 xtl;
1416    TCGv_i64 xbh;
1417    TCGv_i64 xbl;
1418
1419    if (unlikely(!ctx->vsx_enabled)) {
1420        gen_exception(ctx, POWERPC_EXCP_VSXU);
1421        return;
1422    }
1423    xth = tcg_temp_new_i64();
1424    xtl = tcg_temp_new_i64();
1425    xbh = tcg_temp_new_i64();
1426    xbl = tcg_temp_new_i64();
1427    get_cpu_vsr(xbh, xB(ctx->opcode), true);
1428    get_cpu_vsr(xbl, xB(ctx->opcode), false);
1429
1430    tcg_gen_bswap64_i64(xth, xbh);
1431    tcg_gen_bswap64_i64(xtl, xbl);
1432    set_cpu_vsr(xT(ctx->opcode), xth, true);
1433    set_cpu_vsr(xT(ctx->opcode), xtl, false);
1434
1435    tcg_temp_free_i64(xth);
1436    tcg_temp_free_i64(xtl);
1437    tcg_temp_free_i64(xbh);
1438    tcg_temp_free_i64(xbl);
1439}
1440
1441static void gen_xxbrh(DisasContext *ctx)
1442{
1443    TCGv_i64 xth;
1444    TCGv_i64 xtl;
1445    TCGv_i64 xbh;
1446    TCGv_i64 xbl;
1447
1448    if (unlikely(!ctx->vsx_enabled)) {
1449        gen_exception(ctx, POWERPC_EXCP_VSXU);
1450        return;
1451    }
1452    xth = tcg_temp_new_i64();
1453    xtl = tcg_temp_new_i64();
1454    xbh = tcg_temp_new_i64();
1455    xbl = tcg_temp_new_i64();
1456    get_cpu_vsr(xbh, xB(ctx->opcode), true);
1457    get_cpu_vsr(xbl, xB(ctx->opcode), false);
1458
1459    gen_bswap16x8(xth, xtl, xbh, xbl);
1460    set_cpu_vsr(xT(ctx->opcode), xth, true);
1461    set_cpu_vsr(xT(ctx->opcode), xtl, false);
1462
1463    tcg_temp_free_i64(xth);
1464    tcg_temp_free_i64(xtl);
1465    tcg_temp_free_i64(xbh);
1466    tcg_temp_free_i64(xbl);
1467}
1468
1469static void gen_xxbrq(DisasContext *ctx)
1470{
1471    TCGv_i64 xth;
1472    TCGv_i64 xtl;
1473    TCGv_i64 xbh;
1474    TCGv_i64 xbl;
1475    TCGv_i64 t0;
1476
1477    if (unlikely(!ctx->vsx_enabled)) {
1478        gen_exception(ctx, POWERPC_EXCP_VSXU);
1479        return;
1480    }
1481    xth = tcg_temp_new_i64();
1482    xtl = tcg_temp_new_i64();
1483    xbh = tcg_temp_new_i64();
1484    xbl = tcg_temp_new_i64();
1485    get_cpu_vsr(xbh, xB(ctx->opcode), true);
1486    get_cpu_vsr(xbl, xB(ctx->opcode), false);
1487    t0 = tcg_temp_new_i64();
1488
1489    tcg_gen_bswap64_i64(t0, xbl);
1490    tcg_gen_bswap64_i64(xtl, xbh);
1491    set_cpu_vsr(xT(ctx->opcode), xtl, false);
1492    tcg_gen_mov_i64(xth, t0);
1493    set_cpu_vsr(xT(ctx->opcode), xth, true);
1494
1495    tcg_temp_free_i64(t0);
1496    tcg_temp_free_i64(xth);
1497    tcg_temp_free_i64(xtl);
1498    tcg_temp_free_i64(xbh);
1499    tcg_temp_free_i64(xbl);
1500}
1501
1502static void gen_xxbrw(DisasContext *ctx)
1503{
1504    TCGv_i64 xth;
1505    TCGv_i64 xtl;
1506    TCGv_i64 xbh;
1507    TCGv_i64 xbl;
1508
1509    if (unlikely(!ctx->vsx_enabled)) {
1510        gen_exception(ctx, POWERPC_EXCP_VSXU);
1511        return;
1512    }
1513    xth = tcg_temp_new_i64();
1514    xtl = tcg_temp_new_i64();
1515    xbh = tcg_temp_new_i64();
1516    xbl = tcg_temp_new_i64();
1517    get_cpu_vsr(xbh, xB(ctx->opcode), true);
1518    get_cpu_vsr(xbl, xB(ctx->opcode), false);
1519
1520    gen_bswap32x4(xth, xtl, xbh, xbl);
1521    set_cpu_vsr(xT(ctx->opcode), xth, true);
1522    set_cpu_vsr(xT(ctx->opcode), xtl, false);
1523
1524    tcg_temp_free_i64(xth);
1525    tcg_temp_free_i64(xtl);
1526    tcg_temp_free_i64(xbh);
1527    tcg_temp_free_i64(xbl);
1528}
1529
1530#define VSX_LOGICAL(name, vece, tcg_op)                              \
1531static void glue(gen_, name)(DisasContext *ctx)                      \
1532    {                                                                \
1533        if (unlikely(!ctx->vsx_enabled)) {                           \
1534            gen_exception(ctx, POWERPC_EXCP_VSXU);                   \
1535            return;                                                  \
1536        }                                                            \
1537        tcg_op(vece, vsr_full_offset(xT(ctx->opcode)),               \
1538               vsr_full_offset(xA(ctx->opcode)),                     \
1539               vsr_full_offset(xB(ctx->opcode)), 16, 16);            \
1540    }
1541
1542VSX_LOGICAL(xxland, MO_64, tcg_gen_gvec_and)
1543VSX_LOGICAL(xxlandc, MO_64, tcg_gen_gvec_andc)
1544VSX_LOGICAL(xxlor, MO_64, tcg_gen_gvec_or)
1545VSX_LOGICAL(xxlxor, MO_64, tcg_gen_gvec_xor)
1546VSX_LOGICAL(xxlnor, MO_64, tcg_gen_gvec_nor)
1547VSX_LOGICAL(xxleqv, MO_64, tcg_gen_gvec_eqv)
1548VSX_LOGICAL(xxlnand, MO_64, tcg_gen_gvec_nand)
1549VSX_LOGICAL(xxlorc, MO_64, tcg_gen_gvec_orc)
1550
1551#define VSX_XXMRG(name, high)                               \
1552static void glue(gen_, name)(DisasContext *ctx)             \
1553    {                                                       \
1554        TCGv_i64 a0, a1, b0, b1, tmp;                       \
1555        if (unlikely(!ctx->vsx_enabled)) {                  \
1556            gen_exception(ctx, POWERPC_EXCP_VSXU);          \
1557            return;                                         \
1558        }                                                   \
1559        a0 = tcg_temp_new_i64();                            \
1560        a1 = tcg_temp_new_i64();                            \
1561        b0 = tcg_temp_new_i64();                            \
1562        b1 = tcg_temp_new_i64();                            \
1563        tmp = tcg_temp_new_i64();                           \
1564        get_cpu_vsr(a0, xA(ctx->opcode), high);             \
1565        get_cpu_vsr(a1, xA(ctx->opcode), high);             \
1566        get_cpu_vsr(b0, xB(ctx->opcode), high);             \
1567        get_cpu_vsr(b1, xB(ctx->opcode), high);             \
1568        tcg_gen_shri_i64(a0, a0, 32);                       \
1569        tcg_gen_shri_i64(b0, b0, 32);                       \
1570        tcg_gen_deposit_i64(tmp, b0, a0, 32, 32);           \
1571        set_cpu_vsr(xT(ctx->opcode), tmp, true);            \
1572        tcg_gen_deposit_i64(tmp, b1, a1, 32, 32);           \
1573        set_cpu_vsr(xT(ctx->opcode), tmp, false);           \
1574        tcg_temp_free_i64(a0);                              \
1575        tcg_temp_free_i64(a1);                              \
1576        tcg_temp_free_i64(b0);                              \
1577        tcg_temp_free_i64(b1);                              \
1578        tcg_temp_free_i64(tmp);                             \
1579    }
1580
1581VSX_XXMRG(xxmrghw, 1)
1582VSX_XXMRG(xxmrglw, 0)
1583
1584static bool trans_XXSEL(DisasContext *ctx, arg_XX4 *a)
1585{
1586    REQUIRE_INSNS_FLAGS2(ctx, VSX);
1587    REQUIRE_VSX(ctx);
1588
1589    tcg_gen_gvec_bitsel(MO_64, vsr_full_offset(a->xt), vsr_full_offset(a->xc),
1590                        vsr_full_offset(a->xb), vsr_full_offset(a->xa), 16, 16);
1591
1592    return true;
1593}
1594
1595static bool trans_XXSPLTW(DisasContext *ctx, arg_XX2_uim *a)
1596{
1597    int tofs, bofs;
1598
1599    REQUIRE_VSX(ctx);
1600
1601    tofs = vsr_full_offset(a->xt);
1602    bofs = vsr_full_offset(a->xb);
1603    bofs += a->uim << MO_32;
1604#if !HOST_BIG_ENDIAN
1605    bofs ^= 8 | 4;
1606#endif
1607
1608    tcg_gen_gvec_dup_mem(MO_32, tofs, bofs, 16, 16);
1609    return true;
1610}
1611
1612#define pattern(x) (((x) & 0xff) * (~(uint64_t)0 / 0xff))
1613
1614static bool trans_XXSPLTIB(DisasContext *ctx, arg_X_imm8 *a)
1615{
1616    if (a->xt < 32) {
1617        REQUIRE_VSX(ctx);
1618    } else {
1619        REQUIRE_VECTOR(ctx);
1620    }
1621    tcg_gen_gvec_dup_imm(MO_8, vsr_full_offset(a->xt), 16, 16, a->imm);
1622    return true;
1623}
1624
1625static bool trans_XXSPLTIW(DisasContext *ctx, arg_8RR_D *a)
1626{
1627    REQUIRE_INSNS_FLAGS2(ctx, ISA310);
1628    REQUIRE_VSX(ctx);
1629
1630    tcg_gen_gvec_dup_imm(MO_32, vsr_full_offset(a->xt), 16, 16, a->si);
1631
1632    return true;
1633}
1634
1635static bool trans_XXSPLTIDP(DisasContext *ctx, arg_8RR_D *a)
1636{
1637    REQUIRE_INSNS_FLAGS2(ctx, ISA310);
1638    REQUIRE_VSX(ctx);
1639
1640    tcg_gen_gvec_dup_imm(MO_64, vsr_full_offset(a->xt), 16, 16,
1641                         helper_todouble(a->si));
1642    return true;
1643}
1644
1645static bool trans_XXSPLTI32DX(DisasContext *ctx, arg_8RR_D_IX *a)
1646{
1647    TCGv_i32 imm;
1648
1649    REQUIRE_INSNS_FLAGS2(ctx, ISA310);
1650    REQUIRE_VSX(ctx);
1651
1652    imm = tcg_constant_i32(a->si);
1653
1654    tcg_gen_st_i32(imm, cpu_env,
1655        offsetof(CPUPPCState, vsr[a->xt].VsrW(0 + a->ix)));
1656    tcg_gen_st_i32(imm, cpu_env,
1657        offsetof(CPUPPCState, vsr[a->xt].VsrW(2 + a->ix)));
1658
1659    return true;
1660}
1661
1662static bool trans_LXVKQ(DisasContext *ctx, arg_X_uim5 *a)
1663{
1664    static const uint64_t values[32] = {
1665        0, /* Unspecified */
1666        0x3FFF000000000000llu, /* QP +1.0 */
1667        0x4000000000000000llu, /* QP +2.0 */
1668        0x4000800000000000llu, /* QP +3.0 */
1669        0x4001000000000000llu, /* QP +4.0 */
1670        0x4001400000000000llu, /* QP +5.0 */
1671        0x4001800000000000llu, /* QP +6.0 */
1672        0x4001C00000000000llu, /* QP +7.0 */
1673        0x7FFF000000000000llu, /* QP +Inf */
1674        0x7FFF800000000000llu, /* QP dQNaN */
1675        0, /* Unspecified */
1676        0, /* Unspecified */
1677        0, /* Unspecified */
1678        0, /* Unspecified */
1679        0, /* Unspecified */
1680        0, /* Unspecified */
1681        0x8000000000000000llu, /* QP -0.0 */
1682        0xBFFF000000000000llu, /* QP -1.0 */
1683        0xC000000000000000llu, /* QP -2.0 */
1684        0xC000800000000000llu, /* QP -3.0 */
1685        0xC001000000000000llu, /* QP -4.0 */
1686        0xC001400000000000llu, /* QP -5.0 */
1687        0xC001800000000000llu, /* QP -6.0 */
1688        0xC001C00000000000llu, /* QP -7.0 */
1689        0xFFFF000000000000llu, /* QP -Inf */
1690    };
1691
1692    REQUIRE_INSNS_FLAGS2(ctx, ISA310);
1693    REQUIRE_VSX(ctx);
1694
1695    if (values[a->uim]) {
1696        set_cpu_vsr(a->xt, tcg_constant_i64(0x0), false);
1697        set_cpu_vsr(a->xt, tcg_constant_i64(values[a->uim]), true);
1698    } else {
1699        gen_invalid(ctx);
1700    }
1701
1702    return true;
1703}
1704
1705static bool trans_XVTLSBB(DisasContext *ctx, arg_XX2_bf_xb *a)
1706{
1707    TCGv_i64 xb, t0, t1, all_true, all_false, mask, zero;
1708
1709    REQUIRE_INSNS_FLAGS2(ctx, ISA310);
1710    REQUIRE_VSX(ctx);
1711
1712    xb = tcg_temp_new_i64();
1713    t0 = tcg_temp_new_i64();
1714    t1 = tcg_temp_new_i64();
1715    all_true = tcg_temp_new_i64();
1716    all_false = tcg_temp_new_i64();
1717    mask = tcg_constant_i64(dup_const(MO_8, 1));
1718    zero = tcg_constant_i64(0);
1719
1720    get_cpu_vsr(xb, a->xb, true);
1721    tcg_gen_and_i64(t0, mask, xb);
1722    get_cpu_vsr(xb, a->xb, false);
1723    tcg_gen_and_i64(t1, mask, xb);
1724
1725    tcg_gen_or_i64(all_false, t0, t1);
1726    tcg_gen_and_i64(all_true, t0, t1);
1727
1728    tcg_gen_setcond_i64(TCG_COND_EQ, all_false, all_false, zero);
1729    tcg_gen_shli_i64(all_false, all_false, 1);
1730    tcg_gen_setcond_i64(TCG_COND_EQ, all_true, all_true, mask);
1731    tcg_gen_shli_i64(all_true, all_true, 3);
1732
1733    tcg_gen_or_i64(t0, all_false, all_true);
1734    tcg_gen_extrl_i64_i32(cpu_crf[a->bf], t0);
1735
1736    tcg_temp_free_i64(xb);
1737    tcg_temp_free_i64(t0);
1738    tcg_temp_free_i64(t1);
1739    tcg_temp_free_i64(all_true);
1740    tcg_temp_free_i64(all_false);
1741
1742    return true;
1743}
1744
1745static void gen_xxsldwi(DisasContext *ctx)
1746{
1747    TCGv_i64 xth, xtl;
1748    if (unlikely(!ctx->vsx_enabled)) {
1749        gen_exception(ctx, POWERPC_EXCP_VSXU);
1750        return;
1751    }
1752    xth = tcg_temp_new_i64();
1753    xtl = tcg_temp_new_i64();
1754
1755    switch (SHW(ctx->opcode)) {
1756        case 0: {
1757            get_cpu_vsr(xth, xA(ctx->opcode), true);
1758            get_cpu_vsr(xtl, xA(ctx->opcode), false);
1759            break;
1760        }
1761        case 1: {
1762            TCGv_i64 t0 = tcg_temp_new_i64();
1763            get_cpu_vsr(xth, xA(ctx->opcode), true);
1764            tcg_gen_shli_i64(xth, xth, 32);
1765            get_cpu_vsr(t0, xA(ctx->opcode), false);
1766            tcg_gen_shri_i64(t0, t0, 32);
1767            tcg_gen_or_i64(xth, xth, t0);
1768            get_cpu_vsr(xtl, xA(ctx->opcode), false);
1769            tcg_gen_shli_i64(xtl, xtl, 32);
1770            get_cpu_vsr(t0, xB(ctx->opcode), true);
1771            tcg_gen_shri_i64(t0, t0, 32);
1772            tcg_gen_or_i64(xtl, xtl, t0);
1773            tcg_temp_free_i64(t0);
1774            break;
1775        }
1776        case 2: {
1777            get_cpu_vsr(xth, xA(ctx->opcode), false);
1778            get_cpu_vsr(xtl, xB(ctx->opcode), true);
1779            break;
1780        }
1781        case 3: {
1782            TCGv_i64 t0 = tcg_temp_new_i64();
1783            get_cpu_vsr(xth, xA(ctx->opcode), false);
1784            tcg_gen_shli_i64(xth, xth, 32);
1785            get_cpu_vsr(t0, xB(ctx->opcode), true);
1786            tcg_gen_shri_i64(t0, t0, 32);
1787            tcg_gen_or_i64(xth, xth, t0);
1788            get_cpu_vsr(xtl, xB(ctx->opcode), true);
1789            tcg_gen_shli_i64(xtl, xtl, 32);
1790            get_cpu_vsr(t0, xB(ctx->opcode), false);
1791            tcg_gen_shri_i64(t0, t0, 32);
1792            tcg_gen_or_i64(xtl, xtl, t0);
1793            tcg_temp_free_i64(t0);
1794            break;
1795        }
1796    }
1797
1798    set_cpu_vsr(xT(ctx->opcode), xth, true);
1799    set_cpu_vsr(xT(ctx->opcode), xtl, false);
1800
1801    tcg_temp_free_i64(xth);
1802    tcg_temp_free_i64(xtl);
1803}
1804
1805static bool do_vsx_extract_insert(DisasContext *ctx, arg_XX2_uim *a,
1806    void (*gen_helper)(TCGv_ptr, TCGv_ptr, TCGv_i32))
1807{
1808    TCGv_i64 zero = tcg_constant_i64(0);
1809    TCGv_ptr xt, xb;
1810
1811    REQUIRE_INSNS_FLAGS2(ctx, ISA300);
1812    REQUIRE_VSX(ctx);
1813
1814    /*
1815     * uim > 15 out of bound and for
1816     * uim > 12 handle as per hardware in helper
1817     */
1818    if (a->uim > 15) {
1819        set_cpu_vsr(a->xt, zero, true);
1820        set_cpu_vsr(a->xt, zero, false);
1821    } else {
1822        xt = gen_vsr_ptr(a->xt);
1823        xb = gen_vsr_ptr(a->xb);
1824        gen_helper(xt, xb, tcg_constant_i32(a->uim));
1825        tcg_temp_free_ptr(xb);
1826        tcg_temp_free_ptr(xt);
1827    }
1828
1829    return true;
1830}
1831
1832TRANS(XXEXTRACTUW, do_vsx_extract_insert, gen_helper_XXEXTRACTUW)
1833TRANS(XXINSERTW, do_vsx_extract_insert, gen_helper_XXINSERTW)
1834
1835#ifdef TARGET_PPC64
1836static void gen_xsxexpdp(DisasContext *ctx)
1837{
1838    TCGv rt = cpu_gpr[rD(ctx->opcode)];
1839    TCGv_i64 t0;
1840    if (unlikely(!ctx->vsx_enabled)) {
1841        gen_exception(ctx, POWERPC_EXCP_VSXU);
1842        return;
1843    }
1844    t0 = tcg_temp_new_i64();
1845    get_cpu_vsr(t0, xB(ctx->opcode), true);
1846    tcg_gen_extract_i64(rt, t0, 52, 11);
1847    tcg_temp_free_i64(t0);
1848}
1849
1850static void gen_xsxexpqp(DisasContext *ctx)
1851{
1852    TCGv_i64 xth;
1853    TCGv_i64 xtl;
1854    TCGv_i64 xbh;
1855
1856    if (unlikely(!ctx->vsx_enabled)) {
1857        gen_exception(ctx, POWERPC_EXCP_VSXU);
1858        return;
1859    }
1860    xth = tcg_temp_new_i64();
1861    xtl = tcg_temp_new_i64();
1862    xbh = tcg_temp_new_i64();
1863    get_cpu_vsr(xbh, rB(ctx->opcode) + 32, true);
1864
1865    tcg_gen_extract_i64(xth, xbh, 48, 15);
1866    set_cpu_vsr(rD(ctx->opcode) + 32, xth, true);
1867    tcg_gen_movi_i64(xtl, 0);
1868    set_cpu_vsr(rD(ctx->opcode) + 32, xtl, false);
1869
1870    tcg_temp_free_i64(xbh);
1871    tcg_temp_free_i64(xth);
1872    tcg_temp_free_i64(xtl);
1873}
1874
1875static void gen_xsiexpdp(DisasContext *ctx)
1876{
1877    TCGv_i64 xth;
1878    TCGv ra = cpu_gpr[rA(ctx->opcode)];
1879    TCGv rb = cpu_gpr[rB(ctx->opcode)];
1880    TCGv_i64 t0;
1881
1882    if (unlikely(!ctx->vsx_enabled)) {
1883        gen_exception(ctx, POWERPC_EXCP_VSXU);
1884        return;
1885    }
1886    t0 = tcg_temp_new_i64();
1887    xth = tcg_temp_new_i64();
1888    tcg_gen_andi_i64(xth, ra, 0x800FFFFFFFFFFFFF);
1889    tcg_gen_andi_i64(t0, rb, 0x7FF);
1890    tcg_gen_shli_i64(t0, t0, 52);
1891    tcg_gen_or_i64(xth, xth, t0);
1892    set_cpu_vsr(xT(ctx->opcode), xth, true);
1893    set_cpu_vsr(xT(ctx->opcode), tcg_constant_i64(0), false);
1894    tcg_temp_free_i64(t0);
1895    tcg_temp_free_i64(xth);
1896}
1897
1898static void gen_xsiexpqp(DisasContext *ctx)
1899{
1900    TCGv_i64 xth;
1901    TCGv_i64 xtl;
1902    TCGv_i64 xah;
1903    TCGv_i64 xal;
1904    TCGv_i64 xbh;
1905    TCGv_i64 t0;
1906
1907    if (unlikely(!ctx->vsx_enabled)) {
1908        gen_exception(ctx, POWERPC_EXCP_VSXU);
1909        return;
1910    }
1911    xth = tcg_temp_new_i64();
1912    xtl = tcg_temp_new_i64();
1913    xah = tcg_temp_new_i64();
1914    xal = tcg_temp_new_i64();
1915    get_cpu_vsr(xah, rA(ctx->opcode) + 32, true);
1916    get_cpu_vsr(xal, rA(ctx->opcode) + 32, false);
1917    xbh = tcg_temp_new_i64();
1918    get_cpu_vsr(xbh, rB(ctx->opcode) + 32, true);
1919    t0 = tcg_temp_new_i64();
1920
1921    tcg_gen_andi_i64(xth, xah, 0x8000FFFFFFFFFFFF);
1922    tcg_gen_andi_i64(t0, xbh, 0x7FFF);
1923    tcg_gen_shli_i64(t0, t0, 48);
1924    tcg_gen_or_i64(xth, xth, t0);
1925    set_cpu_vsr(rD(ctx->opcode) + 32, xth, true);
1926    tcg_gen_mov_i64(xtl, xal);
1927    set_cpu_vsr(rD(ctx->opcode) + 32, xtl, false);
1928
1929    tcg_temp_free_i64(t0);
1930    tcg_temp_free_i64(xth);
1931    tcg_temp_free_i64(xtl);
1932    tcg_temp_free_i64(xah);
1933    tcg_temp_free_i64(xal);
1934    tcg_temp_free_i64(xbh);
1935}
1936
1937static void gen_xsxsigdp(DisasContext *ctx)
1938{
1939    TCGv rt = cpu_gpr[rD(ctx->opcode)];
1940    TCGv_i64 t0, t1, zr, nan, exp;
1941
1942    if (unlikely(!ctx->vsx_enabled)) {
1943        gen_exception(ctx, POWERPC_EXCP_VSXU);
1944        return;
1945    }
1946    exp = tcg_temp_new_i64();
1947    t0 = tcg_temp_new_i64();
1948    t1 = tcg_temp_new_i64();
1949    zr = tcg_const_i64(0);
1950    nan = tcg_const_i64(2047);
1951
1952    get_cpu_vsr(t1, xB(ctx->opcode), true);
1953    tcg_gen_extract_i64(exp, t1, 52, 11);
1954    tcg_gen_movi_i64(t0, 0x0010000000000000);
1955    tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1956    tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1957    get_cpu_vsr(t1, xB(ctx->opcode), true);
1958    tcg_gen_deposit_i64(rt, t0, t1, 0, 52);
1959
1960    tcg_temp_free_i64(t0);
1961    tcg_temp_free_i64(t1);
1962    tcg_temp_free_i64(exp);
1963    tcg_temp_free_i64(zr);
1964    tcg_temp_free_i64(nan);
1965}
1966
1967static void gen_xsxsigqp(DisasContext *ctx)
1968{
1969    TCGv_i64 t0, zr, nan, exp;
1970    TCGv_i64 xth;
1971    TCGv_i64 xtl;
1972    TCGv_i64 xbh;
1973    TCGv_i64 xbl;
1974
1975    if (unlikely(!ctx->vsx_enabled)) {
1976        gen_exception(ctx, POWERPC_EXCP_VSXU);
1977        return;
1978    }
1979    xth = tcg_temp_new_i64();
1980    xtl = tcg_temp_new_i64();
1981    xbh = tcg_temp_new_i64();
1982    xbl = tcg_temp_new_i64();
1983    get_cpu_vsr(xbh, rB(ctx->opcode) + 32, true);
1984    get_cpu_vsr(xbl, rB(ctx->opcode) + 32, false);
1985    exp = tcg_temp_new_i64();
1986    t0 = tcg_temp_new_i64();
1987    zr = tcg_const_i64(0);
1988    nan = tcg_const_i64(32767);
1989
1990    tcg_gen_extract_i64(exp, xbh, 48, 15);
1991    tcg_gen_movi_i64(t0, 0x0001000000000000);
1992    tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1993    tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1994    tcg_gen_deposit_i64(xth, t0, xbh, 0, 48);
1995    set_cpu_vsr(rD(ctx->opcode) + 32, xth, true);
1996    tcg_gen_mov_i64(xtl, xbl);
1997    set_cpu_vsr(rD(ctx->opcode) + 32, xtl, false);
1998
1999    tcg_temp_free_i64(t0);
2000    tcg_temp_free_i64(exp);
2001    tcg_temp_free_i64(zr);
2002    tcg_temp_free_i64(nan);
2003    tcg_temp_free_i64(xth);
2004    tcg_temp_free_i64(xtl);
2005    tcg_temp_free_i64(xbh);
2006    tcg_temp_free_i64(xbl);
2007}
2008#endif
2009
2010static void gen_xviexpsp(DisasContext *ctx)
2011{
2012    TCGv_i64 xth;
2013    TCGv_i64 xtl;
2014    TCGv_i64 xah;
2015    TCGv_i64 xal;
2016    TCGv_i64 xbh;
2017    TCGv_i64 xbl;
2018    TCGv_i64 t0;
2019
2020    if (unlikely(!ctx->vsx_enabled)) {
2021        gen_exception(ctx, POWERPC_EXCP_VSXU);
2022        return;
2023    }
2024    xth = tcg_temp_new_i64();
2025    xtl = tcg_temp_new_i64();
2026    xah = tcg_temp_new_i64();
2027    xal = tcg_temp_new_i64();
2028    xbh = tcg_temp_new_i64();
2029    xbl = tcg_temp_new_i64();
2030    get_cpu_vsr(xah, xA(ctx->opcode), true);
2031    get_cpu_vsr(xal, xA(ctx->opcode), false);
2032    get_cpu_vsr(xbh, xB(ctx->opcode), true);
2033    get_cpu_vsr(xbl, xB(ctx->opcode), false);
2034    t0 = tcg_temp_new_i64();
2035
2036    tcg_gen_andi_i64(xth, xah, 0x807FFFFF807FFFFF);
2037    tcg_gen_andi_i64(t0, xbh, 0xFF000000FF);
2038    tcg_gen_shli_i64(t0, t0, 23);
2039    tcg_gen_or_i64(xth, xth, t0);
2040    set_cpu_vsr(xT(ctx->opcode), xth, true);
2041    tcg_gen_andi_i64(xtl, xal, 0x807FFFFF807FFFFF);
2042    tcg_gen_andi_i64(t0, xbl, 0xFF000000FF);
2043    tcg_gen_shli_i64(t0, t0, 23);
2044    tcg_gen_or_i64(xtl, xtl, t0);
2045    set_cpu_vsr(xT(ctx->opcode), xtl, false);
2046
2047    tcg_temp_free_i64(t0);
2048    tcg_temp_free_i64(xth);
2049    tcg_temp_free_i64(xtl);
2050    tcg_temp_free_i64(xah);
2051    tcg_temp_free_i64(xal);
2052    tcg_temp_free_i64(xbh);
2053    tcg_temp_free_i64(xbl);
2054}
2055
2056static void gen_xviexpdp(DisasContext *ctx)
2057{
2058    TCGv_i64 xth;
2059    TCGv_i64 xtl;
2060    TCGv_i64 xah;
2061    TCGv_i64 xal;
2062    TCGv_i64 xbh;
2063    TCGv_i64 xbl;
2064
2065    if (unlikely(!ctx->vsx_enabled)) {
2066        gen_exception(ctx, POWERPC_EXCP_VSXU);
2067        return;
2068    }
2069    xth = tcg_temp_new_i64();
2070    xtl = tcg_temp_new_i64();
2071    xah = tcg_temp_new_i64();
2072    xal = tcg_temp_new_i64();
2073    xbh = tcg_temp_new_i64();
2074    xbl = tcg_temp_new_i64();
2075    get_cpu_vsr(xah, xA(ctx->opcode), true);
2076    get_cpu_vsr(xal, xA(ctx->opcode), false);
2077    get_cpu_vsr(xbh, xB(ctx->opcode), true);
2078    get_cpu_vsr(xbl, xB(ctx->opcode), false);
2079
2080    tcg_gen_deposit_i64(xth, xah, xbh, 52, 11);
2081    set_cpu_vsr(xT(ctx->opcode), xth, true);
2082
2083    tcg_gen_deposit_i64(xtl, xal, xbl, 52, 11);
2084    set_cpu_vsr(xT(ctx->opcode), xtl, false);
2085
2086    tcg_temp_free_i64(xth);
2087    tcg_temp_free_i64(xtl);
2088    tcg_temp_free_i64(xah);
2089    tcg_temp_free_i64(xal);
2090    tcg_temp_free_i64(xbh);
2091    tcg_temp_free_i64(xbl);
2092}
2093
2094static void gen_xvxexpsp(DisasContext *ctx)
2095{
2096    TCGv_i64 xth;
2097    TCGv_i64 xtl;
2098    TCGv_i64 xbh;
2099    TCGv_i64 xbl;
2100
2101    if (unlikely(!ctx->vsx_enabled)) {
2102        gen_exception(ctx, POWERPC_EXCP_VSXU);
2103        return;
2104    }
2105    xth = tcg_temp_new_i64();
2106    xtl = tcg_temp_new_i64();
2107    xbh = tcg_temp_new_i64();
2108    xbl = tcg_temp_new_i64();
2109    get_cpu_vsr(xbh, xB(ctx->opcode), true);
2110    get_cpu_vsr(xbl, xB(ctx->opcode), false);
2111
2112    tcg_gen_shri_i64(xth, xbh, 23);
2113    tcg_gen_andi_i64(xth, xth, 0xFF000000FF);
2114    set_cpu_vsr(xT(ctx->opcode), xth, true);
2115    tcg_gen_shri_i64(xtl, xbl, 23);
2116    tcg_gen_andi_i64(xtl, xtl, 0xFF000000FF);
2117    set_cpu_vsr(xT(ctx->opcode), xtl, false);
2118
2119    tcg_temp_free_i64(xth);
2120    tcg_temp_free_i64(xtl);
2121    tcg_temp_free_i64(xbh);
2122    tcg_temp_free_i64(xbl);
2123}
2124
2125static void gen_xvxexpdp(DisasContext *ctx)
2126{
2127    TCGv_i64 xth;
2128    TCGv_i64 xtl;
2129    TCGv_i64 xbh;
2130    TCGv_i64 xbl;
2131
2132    if (unlikely(!ctx->vsx_enabled)) {
2133        gen_exception(ctx, POWERPC_EXCP_VSXU);
2134        return;
2135    }
2136    xth = tcg_temp_new_i64();
2137    xtl = tcg_temp_new_i64();
2138    xbh = tcg_temp_new_i64();
2139    xbl = tcg_temp_new_i64();
2140    get_cpu_vsr(xbh, xB(ctx->opcode), true);
2141    get_cpu_vsr(xbl, xB(ctx->opcode), false);
2142
2143    tcg_gen_extract_i64(xth, xbh, 52, 11);
2144    set_cpu_vsr(xT(ctx->opcode), xth, true);
2145    tcg_gen_extract_i64(xtl, xbl, 52, 11);
2146    set_cpu_vsr(xT(ctx->opcode), xtl, false);
2147
2148    tcg_temp_free_i64(xth);
2149    tcg_temp_free_i64(xtl);
2150    tcg_temp_free_i64(xbh);
2151    tcg_temp_free_i64(xbl);
2152}
2153
2154static bool trans_XVXSIGSP(DisasContext *ctx, arg_XX2 *a)
2155{
2156    TCGv_ptr t, b;
2157
2158    REQUIRE_INSNS_FLAGS2(ctx, ISA300);
2159    REQUIRE_VSX(ctx);
2160
2161    t = gen_vsr_ptr(a->xt);
2162    b = gen_vsr_ptr(a->xb);
2163
2164    gen_helper_XVXSIGSP(t, b);
2165
2166    tcg_temp_free_ptr(t);
2167    tcg_temp_free_ptr(b);
2168
2169    return true;
2170}
2171
2172static void gen_xvxsigdp(DisasContext *ctx)
2173{
2174    TCGv_i64 xth;
2175    TCGv_i64 xtl;
2176    TCGv_i64 xbh;
2177    TCGv_i64 xbl;
2178    TCGv_i64 t0, zr, nan, exp;
2179
2180    if (unlikely(!ctx->vsx_enabled)) {
2181        gen_exception(ctx, POWERPC_EXCP_VSXU);
2182        return;
2183    }
2184    xth = tcg_temp_new_i64();
2185    xtl = tcg_temp_new_i64();
2186    xbh = tcg_temp_new_i64();
2187    xbl = tcg_temp_new_i64();
2188    get_cpu_vsr(xbh, xB(ctx->opcode), true);
2189    get_cpu_vsr(xbl, xB(ctx->opcode), false);
2190    exp = tcg_temp_new_i64();
2191    t0 = tcg_temp_new_i64();
2192    zr = tcg_const_i64(0);
2193    nan = tcg_const_i64(2047);
2194
2195    tcg_gen_extract_i64(exp, xbh, 52, 11);
2196    tcg_gen_movi_i64(t0, 0x0010000000000000);
2197    tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
2198    tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
2199    tcg_gen_deposit_i64(xth, t0, xbh, 0, 52);
2200    set_cpu_vsr(xT(ctx->opcode), xth, true);
2201
2202    tcg_gen_extract_i64(exp, xbl, 52, 11);
2203    tcg_gen_movi_i64(t0, 0x0010000000000000);
2204    tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
2205    tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
2206    tcg_gen_deposit_i64(xtl, t0, xbl, 0, 52);
2207    set_cpu_vsr(xT(ctx->opcode), xtl, false);
2208
2209    tcg_temp_free_i64(t0);
2210    tcg_temp_free_i64(exp);
2211    tcg_temp_free_i64(zr);
2212    tcg_temp_free_i64(nan);
2213    tcg_temp_free_i64(xth);
2214    tcg_temp_free_i64(xtl);
2215    tcg_temp_free_i64(xbh);
2216    tcg_temp_free_i64(xbl);
2217}
2218
2219static bool do_lstxv(DisasContext *ctx, int ra, TCGv displ,
2220                     int rt, bool store, bool paired)
2221{
2222    TCGv ea;
2223    TCGv_i64 xt;
2224    MemOp mop;
2225    int rt1, rt2;
2226
2227    xt = tcg_temp_new_i64();
2228
2229    mop = DEF_MEMOP(MO_UQ);
2230
2231    gen_set_access_type(ctx, ACCESS_INT);
2232    ea = do_ea_calc(ctx, ra, displ);
2233
2234    if (paired && ctx->le_mode) {
2235        rt1 = rt + 1;
2236        rt2 = rt;
2237    } else {
2238        rt1 = rt;
2239        rt2 = rt + 1;
2240    }
2241
2242    if (store) {
2243        get_cpu_vsr(xt, rt1, !ctx->le_mode);
2244        tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop);
2245        gen_addr_add(ctx, ea, ea, 8);
2246        get_cpu_vsr(xt, rt1, ctx->le_mode);
2247        tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop);
2248        if (paired) {
2249            gen_addr_add(ctx, ea, ea, 8);
2250            get_cpu_vsr(xt, rt2, !ctx->le_mode);
2251            tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop);
2252            gen_addr_add(ctx, ea, ea, 8);
2253            get_cpu_vsr(xt, rt2, ctx->le_mode);
2254            tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop);
2255        }
2256    } else {
2257        tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop);
2258        set_cpu_vsr(rt1, xt, !ctx->le_mode);
2259        gen_addr_add(ctx, ea, ea, 8);
2260        tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop);
2261        set_cpu_vsr(rt1, xt, ctx->le_mode);
2262        if (paired) {
2263            gen_addr_add(ctx, ea, ea, 8);
2264            tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop);
2265            set_cpu_vsr(rt2, xt, !ctx->le_mode);
2266            gen_addr_add(ctx, ea, ea, 8);
2267            tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop);
2268            set_cpu_vsr(rt2, xt, ctx->le_mode);
2269        }
2270    }
2271
2272    tcg_temp_free(ea);
2273    tcg_temp_free_i64(xt);
2274    return true;
2275}
2276
2277static bool do_lstxv_D(DisasContext *ctx, arg_D *a, bool store, bool paired)
2278{
2279    if (paired || a->rt >= 32) {
2280        REQUIRE_VSX(ctx);
2281    } else {
2282        REQUIRE_VECTOR(ctx);
2283    }
2284
2285    return do_lstxv(ctx, a->ra, tcg_constant_tl(a->si), a->rt, store, paired);
2286}
2287
2288static bool do_lstxv_PLS_D(DisasContext *ctx, arg_PLS_D *a,
2289                           bool store, bool paired)
2290{
2291    arg_D d;
2292    REQUIRE_VSX(ctx);
2293
2294    if (!resolve_PLS_D(ctx, &d, a)) {
2295        return true;
2296    }
2297
2298    return do_lstxv(ctx, d.ra, tcg_constant_tl(d.si), d.rt, store, paired);
2299}
2300
2301static bool do_lstxv_X(DisasContext *ctx, arg_X *a, bool store, bool paired)
2302{
2303    if (paired || a->rt >= 32) {
2304        REQUIRE_VSX(ctx);
2305    } else {
2306        REQUIRE_VECTOR(ctx);
2307    }
2308
2309    return do_lstxv(ctx, a->ra, cpu_gpr[a->rb], a->rt, store, paired);
2310}
2311
2312static bool do_lstxsd(DisasContext *ctx, int rt, int ra, TCGv displ, bool store)
2313{
2314    TCGv ea;
2315    TCGv_i64 xt;
2316    MemOp mop;
2317
2318    if (store) {
2319        REQUIRE_VECTOR(ctx);
2320    } else {
2321        REQUIRE_VSX(ctx);
2322    }
2323
2324    xt = tcg_temp_new_i64();
2325    mop = DEF_MEMOP(MO_UQ);
2326
2327    gen_set_access_type(ctx, ACCESS_INT);
2328    ea = do_ea_calc(ctx, ra, displ);
2329
2330    if (store) {
2331        get_cpu_vsr(xt, rt + 32, true);
2332        tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop);
2333    } else {
2334        tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop);
2335        set_cpu_vsr(rt + 32, xt, true);
2336        set_cpu_vsr(rt + 32, tcg_constant_i64(0), false);
2337    }
2338
2339    tcg_temp_free(ea);
2340    tcg_temp_free_i64(xt);
2341
2342    return true;
2343}
2344
2345static bool do_lstxsd_DS(DisasContext *ctx, arg_D *a, bool store)
2346{
2347    return do_lstxsd(ctx, a->rt, a->ra, tcg_constant_tl(a->si), store);
2348}
2349
2350static bool do_plstxsd_PLS_D(DisasContext *ctx, arg_PLS_D *a, bool store)
2351{
2352    arg_D d;
2353
2354    if (!resolve_PLS_D(ctx, &d, a)) {
2355        return true;
2356    }
2357
2358    return do_lstxsd(ctx, d.rt, d.ra, tcg_constant_tl(d.si), store);
2359}
2360
2361static bool do_lstxssp(DisasContext *ctx, int rt, int ra, TCGv displ, bool store)
2362{
2363    TCGv ea;
2364    TCGv_i64 xt;
2365
2366    REQUIRE_VECTOR(ctx);
2367
2368    xt = tcg_temp_new_i64();
2369
2370    gen_set_access_type(ctx, ACCESS_INT);
2371    ea = do_ea_calc(ctx, ra, displ);
2372
2373    if (store) {
2374        get_cpu_vsr(xt, rt + 32, true);
2375        gen_qemu_st32fs(ctx, xt, ea);
2376    } else {
2377        gen_qemu_ld32fs(ctx, xt, ea);
2378        set_cpu_vsr(rt + 32, xt, true);
2379        set_cpu_vsr(rt + 32, tcg_constant_i64(0), false);
2380    }
2381
2382    tcg_temp_free(ea);
2383    tcg_temp_free_i64(xt);
2384
2385    return true;
2386}
2387
2388static bool do_lstxssp_DS(DisasContext *ctx, arg_D *a, bool store)
2389{
2390    return do_lstxssp(ctx, a->rt, a->ra, tcg_constant_tl(a->si), store);
2391}
2392
2393static bool do_plstxssp_PLS_D(DisasContext *ctx, arg_PLS_D *a, bool store)
2394{
2395    arg_D d;
2396
2397    if (!resolve_PLS_D(ctx, &d, a)) {
2398        return true;
2399    }
2400
2401    return do_lstxssp(ctx, d.rt, d.ra, tcg_constant_tl(d.si), store);
2402}
2403
2404TRANS_FLAGS2(ISA300, LXSD, do_lstxsd_DS, false)
2405TRANS_FLAGS2(ISA300, STXSD, do_lstxsd_DS, true)
2406TRANS_FLAGS2(ISA300, LXSSP, do_lstxssp_DS, false)
2407TRANS_FLAGS2(ISA300, STXSSP, do_lstxssp_DS, true)
2408TRANS_FLAGS2(ISA300, STXV, do_lstxv_D, true, false)
2409TRANS_FLAGS2(ISA300, LXV, do_lstxv_D, false, false)
2410TRANS_FLAGS2(ISA310, STXVP, do_lstxv_D, true, true)
2411TRANS_FLAGS2(ISA310, LXVP, do_lstxv_D, false, true)
2412TRANS_FLAGS2(ISA300, STXVX, do_lstxv_X, true, false)
2413TRANS_FLAGS2(ISA300, LXVX, do_lstxv_X, false, false)
2414TRANS_FLAGS2(ISA310, STXVPX, do_lstxv_X, true, true)
2415TRANS_FLAGS2(ISA310, LXVPX, do_lstxv_X, false, true)
2416TRANS64_FLAGS2(ISA310, PLXSD, do_plstxsd_PLS_D, false)
2417TRANS64_FLAGS2(ISA310, PSTXSD, do_plstxsd_PLS_D, true)
2418TRANS64_FLAGS2(ISA310, PLXSSP, do_plstxssp_PLS_D, false)
2419TRANS64_FLAGS2(ISA310, PSTXSSP, do_plstxssp_PLS_D, true)
2420TRANS64_FLAGS2(ISA310, PSTXV, do_lstxv_PLS_D, true, false)
2421TRANS64_FLAGS2(ISA310, PLXV, do_lstxv_PLS_D, false, false)
2422TRANS64_FLAGS2(ISA310, PSTXVP, do_lstxv_PLS_D, true, true)
2423TRANS64_FLAGS2(ISA310, PLXVP, do_lstxv_PLS_D, false, true)
2424
2425static bool do_lstrm(DisasContext *ctx, arg_X *a, MemOp mop, bool store)
2426{
2427    TCGv ea;
2428    TCGv_i64 xt;
2429
2430    REQUIRE_VSX(ctx);
2431
2432    xt = tcg_temp_new_i64();
2433
2434    gen_set_access_type(ctx, ACCESS_INT);
2435    ea = do_ea_calc(ctx, a->ra , cpu_gpr[a->rb]);
2436
2437    if (store) {
2438        get_cpu_vsr(xt, a->rt, false);
2439        tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop);
2440    } else {
2441        tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop);
2442        set_cpu_vsr(a->rt, xt, false);
2443        set_cpu_vsr(a->rt, tcg_constant_i64(0), true);
2444    }
2445
2446    tcg_temp_free(ea);
2447    tcg_temp_free_i64(xt);
2448    return true;
2449}
2450
2451TRANS_FLAGS2(ISA310, LXVRBX, do_lstrm, DEF_MEMOP(MO_UB), false)
2452TRANS_FLAGS2(ISA310, LXVRHX, do_lstrm, DEF_MEMOP(MO_UW), false)
2453TRANS_FLAGS2(ISA310, LXVRWX, do_lstrm, DEF_MEMOP(MO_UL), false)
2454TRANS_FLAGS2(ISA310, LXVRDX, do_lstrm, DEF_MEMOP(MO_UQ), false)
2455TRANS_FLAGS2(ISA310, STXVRBX, do_lstrm, DEF_MEMOP(MO_UB), true)
2456TRANS_FLAGS2(ISA310, STXVRHX, do_lstrm, DEF_MEMOP(MO_UW), true)
2457TRANS_FLAGS2(ISA310, STXVRWX, do_lstrm, DEF_MEMOP(MO_UL), true)
2458TRANS_FLAGS2(ISA310, STXVRDX, do_lstrm, DEF_MEMOP(MO_UQ), true)
2459
2460static void gen_xxeval_i64(TCGv_i64 t, TCGv_i64 a, TCGv_i64 b, TCGv_i64 c,
2461                           int64_t imm)
2462{
2463    /*
2464     * Instead of processing imm bit-by-bit, we'll skip the computation of
2465     * conjunctions whose corresponding bit is unset.
2466     */
2467    int bit;
2468    TCGv_i64 conj, disj;
2469
2470    conj = tcg_temp_new_i64();
2471    disj = tcg_const_i64(0);
2472
2473    /* Iterate over set bits from the least to the most significant bit */
2474    while (imm) {
2475        /*
2476         * Get the next bit to be processed with ctz64. Invert the result of
2477         * ctz64 to match the indexing used by PowerISA.
2478         */
2479        bit = 7 - ctz64(imm);
2480        if (bit & 0x4) {
2481            tcg_gen_mov_i64(conj, a);
2482        } else {
2483            tcg_gen_not_i64(conj, a);
2484        }
2485        if (bit & 0x2) {
2486            tcg_gen_and_i64(conj, conj, b);
2487        } else {
2488            tcg_gen_andc_i64(conj, conj, b);
2489        }
2490        if (bit & 0x1) {
2491            tcg_gen_and_i64(conj, conj, c);
2492        } else {
2493            tcg_gen_andc_i64(conj, conj, c);
2494        }
2495        tcg_gen_or_i64(disj, disj, conj);
2496
2497        /* Unset the least significant bit that is set */
2498        imm &= imm - 1;
2499    }
2500
2501    tcg_gen_mov_i64(t, disj);
2502
2503    tcg_temp_free_i64(conj);
2504    tcg_temp_free_i64(disj);
2505}
2506
2507static void gen_xxeval_vec(unsigned vece, TCGv_vec t, TCGv_vec a, TCGv_vec b,
2508                           TCGv_vec c, int64_t imm)
2509{
2510    /*
2511     * Instead of processing imm bit-by-bit, we'll skip the computation of
2512     * conjunctions whose corresponding bit is unset.
2513     */
2514    int bit;
2515    TCGv_vec disj, conj;
2516
2517    disj = tcg_const_zeros_vec_matching(t);
2518    conj = tcg_temp_new_vec_matching(t);
2519
2520    /* Iterate over set bits from the least to the most significant bit */
2521    while (imm) {
2522        /*
2523         * Get the next bit to be processed with ctz64. Invert the result of
2524         * ctz64 to match the indexing used by PowerISA.
2525         */
2526        bit = 7 - ctz64(imm);
2527        if (bit & 0x4) {
2528            tcg_gen_mov_vec(conj, a);
2529        } else {
2530            tcg_gen_not_vec(vece, conj, a);
2531        }
2532        if (bit & 0x2) {
2533            tcg_gen_and_vec(vece, conj, conj, b);
2534        } else {
2535            tcg_gen_andc_vec(vece, conj, conj, b);
2536        }
2537        if (bit & 0x1) {
2538            tcg_gen_and_vec(vece, conj, conj, c);
2539        } else {
2540            tcg_gen_andc_vec(vece, conj, conj, c);
2541        }
2542        tcg_gen_or_vec(vece, disj, disj, conj);
2543
2544        /* Unset the least significant bit that is set */
2545        imm &= imm - 1;
2546    }
2547
2548    tcg_gen_mov_vec(t, disj);
2549
2550    tcg_temp_free_vec(disj);
2551    tcg_temp_free_vec(conj);
2552}
2553
2554static bool trans_XXEVAL(DisasContext *ctx, arg_8RR_XX4_imm *a)
2555{
2556    static const TCGOpcode vecop_list[] = {
2557        INDEX_op_andc_vec, 0
2558    };
2559    static const GVecGen4i op = {
2560        .fniv = gen_xxeval_vec,
2561        .fno = gen_helper_XXEVAL,
2562        .fni8 = gen_xxeval_i64,
2563        .opt_opc = vecop_list,
2564        .vece = MO_64
2565    };
2566    int xt = vsr_full_offset(a->xt), xa = vsr_full_offset(a->xa),
2567        xb = vsr_full_offset(a->xb), xc = vsr_full_offset(a->xc);
2568
2569    REQUIRE_INSNS_FLAGS2(ctx, ISA310);
2570    REQUIRE_VSX(ctx);
2571
2572    /* Equivalent functions that can be implemented with a single gen_gvec */
2573    switch (a->imm) {
2574    case 0b00000000: /* true */
2575        set_cpu_vsr(a->xt, tcg_constant_i64(0), true);
2576        set_cpu_vsr(a->xt, tcg_constant_i64(0), false);
2577        break;
2578    case 0b00000011: /* and(B,A) */
2579        tcg_gen_gvec_and(MO_64, xt, xb, xa, 16, 16);
2580        break;
2581    case 0b00000101: /* and(C,A) */
2582        tcg_gen_gvec_and(MO_64, xt, xc, xa, 16, 16);
2583        break;
2584    case 0b00001111: /* A */
2585        tcg_gen_gvec_mov(MO_64, xt, xa, 16, 16);
2586        break;
2587    case 0b00010001: /* and(C,B) */
2588        tcg_gen_gvec_and(MO_64, xt, xc, xb, 16, 16);
2589        break;
2590    case 0b00011011: /* C?B:A */
2591        tcg_gen_gvec_bitsel(MO_64, xt, xc, xb, xa, 16, 16);
2592        break;
2593    case 0b00011101: /* B?C:A */
2594        tcg_gen_gvec_bitsel(MO_64, xt, xb, xc, xa, 16, 16);
2595        break;
2596    case 0b00100111: /* C?A:B */
2597        tcg_gen_gvec_bitsel(MO_64, xt, xc, xa, xb, 16, 16);
2598        break;
2599    case 0b00110011: /* B */
2600        tcg_gen_gvec_mov(MO_64, xt, xb, 16, 16);
2601        break;
2602    case 0b00110101: /* A?C:B */
2603        tcg_gen_gvec_bitsel(MO_64, xt, xa, xc, xb, 16, 16);
2604        break;
2605    case 0b00111100: /* xor(B,A) */
2606        tcg_gen_gvec_xor(MO_64, xt, xb, xa, 16, 16);
2607        break;
2608    case 0b00111111: /* or(B,A) */
2609        tcg_gen_gvec_or(MO_64, xt, xb, xa, 16, 16);
2610        break;
2611    case 0b01000111: /* B?A:C */
2612        tcg_gen_gvec_bitsel(MO_64, xt, xb, xa, xc, 16, 16);
2613        break;
2614    case 0b01010011: /* A?B:C */
2615        tcg_gen_gvec_bitsel(MO_64, xt, xa, xb, xc, 16, 16);
2616        break;
2617    case 0b01010101: /* C */
2618        tcg_gen_gvec_mov(MO_64, xt, xc, 16, 16);
2619        break;
2620    case 0b01011010: /* xor(C,A) */
2621        tcg_gen_gvec_xor(MO_64, xt, xc, xa, 16, 16);
2622        break;
2623    case 0b01011111: /* or(C,A) */
2624        tcg_gen_gvec_or(MO_64, xt, xc, xa, 16, 16);
2625        break;
2626    case 0b01100110: /* xor(C,B) */
2627        tcg_gen_gvec_xor(MO_64, xt, xc, xb, 16, 16);
2628        break;
2629    case 0b01110111: /* or(C,B) */
2630        tcg_gen_gvec_or(MO_64, xt, xc, xb, 16, 16);
2631        break;
2632    case 0b10001000: /* nor(C,B) */
2633        tcg_gen_gvec_nor(MO_64, xt, xc, xb, 16, 16);
2634        break;
2635    case 0b10011001: /* eqv(C,B) */
2636        tcg_gen_gvec_eqv(MO_64, xt, xc, xb, 16, 16);
2637        break;
2638    case 0b10100000: /* nor(C,A) */
2639        tcg_gen_gvec_nor(MO_64, xt, xc, xa, 16, 16);
2640        break;
2641    case 0b10100101: /* eqv(C,A) */
2642        tcg_gen_gvec_eqv(MO_64, xt, xc, xa, 16, 16);
2643        break;
2644    case 0b10101010: /* not(C) */
2645        tcg_gen_gvec_not(MO_64, xt, xc, 16, 16);
2646        break;
2647    case 0b11000000: /* nor(B,A) */
2648        tcg_gen_gvec_nor(MO_64, xt,  xb, xa, 16, 16);
2649        break;
2650    case 0b11000011: /* eqv(B,A) */
2651        tcg_gen_gvec_eqv(MO_64, xt,  xb, xa, 16, 16);
2652        break;
2653    case 0b11001100: /* not(B) */
2654        tcg_gen_gvec_not(MO_64, xt, xb, 16, 16);
2655        break;
2656    case 0b11101110: /* nand(C,B) */
2657        tcg_gen_gvec_nand(MO_64, xt, xc, xb, 16, 16);
2658        break;
2659    case 0b11110000: /* not(A) */
2660        tcg_gen_gvec_not(MO_64, xt, xa, 16, 16);
2661        break;
2662    case 0b11111010: /* nand(C,A) */
2663        tcg_gen_gvec_nand(MO_64, xt, xc, xa, 16, 16);
2664        break;
2665    case 0b11111100: /* nand(B,A) */
2666        tcg_gen_gvec_nand(MO_64, xt, xb, xa, 16, 16);
2667        break;
2668    case 0b11111111: /* true */
2669        set_cpu_vsr(a->xt, tcg_constant_i64(-1), true);
2670        set_cpu_vsr(a->xt, tcg_constant_i64(-1), false);
2671        break;
2672    default:
2673        /* Fallback to compute all conjunctions/disjunctions */
2674        tcg_gen_gvec_4i(xt, xa, xb, xc, 16, 16, a->imm, &op);
2675    }
2676
2677    return true;
2678}
2679
2680static void gen_xxblendv_vec(unsigned vece, TCGv_vec t, TCGv_vec a, TCGv_vec b,
2681                             TCGv_vec c)
2682{
2683    TCGv_vec tmp = tcg_temp_new_vec_matching(c);
2684    tcg_gen_sari_vec(vece, tmp, c, (8 << vece) - 1);
2685    tcg_gen_bitsel_vec(vece, t, tmp, b, a);
2686    tcg_temp_free_vec(tmp);
2687}
2688
2689static bool do_xxblendv(DisasContext *ctx, arg_8RR_XX4 *a, unsigned vece)
2690{
2691    static const TCGOpcode vecop_list[] = {
2692        INDEX_op_sari_vec, 0
2693    };
2694    static const GVecGen4 ops[4] = {
2695        {
2696            .fniv = gen_xxblendv_vec,
2697            .fno = gen_helper_XXBLENDVB,
2698            .opt_opc = vecop_list,
2699            .vece = MO_8
2700        },
2701        {
2702            .fniv = gen_xxblendv_vec,
2703            .fno = gen_helper_XXBLENDVH,
2704            .opt_opc = vecop_list,
2705            .vece = MO_16
2706        },
2707        {
2708            .fniv = gen_xxblendv_vec,
2709            .fno = gen_helper_XXBLENDVW,
2710            .opt_opc = vecop_list,
2711            .vece = MO_32
2712        },
2713        {
2714            .fniv = gen_xxblendv_vec,
2715            .fno = gen_helper_XXBLENDVD,
2716            .opt_opc = vecop_list,
2717            .vece = MO_64
2718        }
2719    };
2720
2721    REQUIRE_VSX(ctx);
2722
2723    tcg_gen_gvec_4(vsr_full_offset(a->xt), vsr_full_offset(a->xa),
2724                   vsr_full_offset(a->xb), vsr_full_offset(a->xc),
2725                   16, 16, &ops[vece]);
2726
2727    return true;
2728}
2729
2730TRANS(XXBLENDVB, do_xxblendv, MO_8)
2731TRANS(XXBLENDVH, do_xxblendv, MO_16)
2732TRANS(XXBLENDVW, do_xxblendv, MO_32)
2733TRANS(XXBLENDVD, do_xxblendv, MO_64)
2734
2735static bool do_helper_XX3(DisasContext *ctx, arg_XX3 *a,
2736    void (*helper)(TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr))
2737{
2738    TCGv_ptr xt, xa, xb;
2739
2740    REQUIRE_INSNS_FLAGS2(ctx, ISA300);
2741    REQUIRE_VSX(ctx);
2742
2743    xt = gen_vsr_ptr(a->xt);
2744    xa = gen_vsr_ptr(a->xa);
2745    xb = gen_vsr_ptr(a->xb);
2746
2747    helper(cpu_env, xt, xa, xb);
2748
2749    tcg_temp_free_ptr(xt);
2750    tcg_temp_free_ptr(xa);
2751    tcg_temp_free_ptr(xb);
2752
2753    return true;
2754}
2755
2756TRANS(XSCMPEQDP, do_helper_XX3, gen_helper_XSCMPEQDP)
2757TRANS(XSCMPGEDP, do_helper_XX3, gen_helper_XSCMPGEDP)
2758TRANS(XSCMPGTDP, do_helper_XX3, gen_helper_XSCMPGTDP)
2759TRANS(XSMAXCDP, do_helper_XX3, gen_helper_XSMAXCDP)
2760TRANS(XSMINCDP, do_helper_XX3, gen_helper_XSMINCDP)
2761TRANS(XSMAXJDP, do_helper_XX3, gen_helper_XSMAXJDP)
2762TRANS(XSMINJDP, do_helper_XX3, gen_helper_XSMINJDP)
2763
2764static bool do_helper_X(arg_X *a,
2765    void (*helper)(TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr))
2766{
2767    TCGv_ptr rt, ra, rb;
2768
2769    rt = gen_avr_ptr(a->rt);
2770    ra = gen_avr_ptr(a->ra);
2771    rb = gen_avr_ptr(a->rb);
2772
2773    helper(cpu_env, rt, ra, rb);
2774
2775    tcg_temp_free_ptr(rt);
2776    tcg_temp_free_ptr(ra);
2777    tcg_temp_free_ptr(rb);
2778
2779    return true;
2780}
2781
2782static bool do_xscmpqp(DisasContext *ctx, arg_X *a,
2783    void (*helper)(TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr))
2784{
2785    REQUIRE_INSNS_FLAGS2(ctx, ISA310);
2786    REQUIRE_VSX(ctx);
2787
2788    return do_helper_X(a, helper);
2789}
2790
2791TRANS(XSCMPEQQP, do_xscmpqp, gen_helper_XSCMPEQQP)
2792TRANS(XSCMPGEQP, do_xscmpqp, gen_helper_XSCMPGEQP)
2793TRANS(XSCMPGTQP, do_xscmpqp, gen_helper_XSCMPGTQP)
2794TRANS(XSMAXCQP, do_xscmpqp, gen_helper_XSMAXCQP)
2795TRANS(XSMINCQP, do_xscmpqp, gen_helper_XSMINCQP)
2796
2797static bool trans_XVCVSPBF16(DisasContext *ctx, arg_XX2 *a)
2798{
2799    TCGv_ptr xt, xb;
2800
2801    REQUIRE_INSNS_FLAGS2(ctx, ISA310);
2802    REQUIRE_VSX(ctx);
2803
2804    xt = gen_vsr_ptr(a->xt);
2805    xb = gen_vsr_ptr(a->xb);
2806
2807    gen_helper_XVCVSPBF16(cpu_env, xt, xb);
2808
2809    tcg_temp_free_ptr(xt);
2810    tcg_temp_free_ptr(xb);
2811
2812    return true;
2813}
2814
2815static bool trans_XVCVBF16SPN(DisasContext *ctx, arg_XX2 *a)
2816{
2817    REQUIRE_INSNS_FLAGS2(ctx, ISA310);
2818    REQUIRE_VSX(ctx);
2819
2820    tcg_gen_gvec_shli(MO_32, vsr_full_offset(a->xt), vsr_full_offset(a->xb),
2821                      16, 16, 16);
2822
2823    return true;
2824}
2825
2826    /*
2827     *  The PowerISA 3.1 mentions that for the current version of the
2828     *  architecture, "the hardware implementation provides the effect of
2829     *  ACC[i] and VSRs 4*i to 4*i + 3 logically containing the same data"
2830     *  and "The Accumulators introduce no new logical state at this time"
2831     *  (page 501). For now it seems unnecessary to create new structures,
2832     *  so ACC[i] is the same as VSRs 4*i to 4*i+3 and therefore
2833     *  move to and from accumulators are no-ops.
2834     */
2835static bool trans_XXMFACC(DisasContext *ctx, arg_X_a *a)
2836{
2837    REQUIRE_INSNS_FLAGS2(ctx, ISA310);
2838    REQUIRE_VSX(ctx);
2839    return true;
2840}
2841
2842static bool trans_XXMTACC(DisasContext *ctx, arg_X_a *a)
2843{
2844    REQUIRE_INSNS_FLAGS2(ctx, ISA310);
2845    REQUIRE_VSX(ctx);
2846    return true;
2847}
2848
2849static bool trans_XXSETACCZ(DisasContext *ctx, arg_X_a *a)
2850{
2851    REQUIRE_INSNS_FLAGS2(ctx, ISA310);
2852    REQUIRE_VSX(ctx);
2853    tcg_gen_gvec_dup_imm(MO_64, acc_full_offset(a->ra), 64, 64, 0);
2854    return true;
2855}
2856
2857static bool do_ger(DisasContext *ctx, arg_MMIRR_XX3 *a,
2858    void (*helper)(TCGv_env, TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_i32))
2859{
2860    uint32_t mask;
2861    TCGv_ptr xt, xa, xb;
2862    REQUIRE_INSNS_FLAGS2(ctx, ISA310);
2863    REQUIRE_VSX(ctx);
2864    if (unlikely((a->xa / 4 == a->xt) || (a->xb / 4 == a->xt))) {
2865        gen_invalid(ctx);
2866        return true;
2867    }
2868
2869    xt = gen_acc_ptr(a->xt);
2870    xa = gen_vsr_ptr(a->xa);
2871    xb = gen_vsr_ptr(a->xb);
2872
2873    mask = ger_pack_masks(a->pmsk, a->ymsk, a->xmsk);
2874    helper(cpu_env, xa, xb, xt, tcg_constant_i32(mask));
2875    tcg_temp_free_ptr(xt);
2876    tcg_temp_free_ptr(xa);
2877    tcg_temp_free_ptr(xb);
2878    return true;
2879}
2880
2881TRANS(XVI4GER8, do_ger, gen_helper_XVI4GER8)
2882TRANS(XVI4GER8PP, do_ger,  gen_helper_XVI4GER8PP)
2883TRANS(XVI8GER4, do_ger, gen_helper_XVI8GER4)
2884TRANS(XVI8GER4PP, do_ger,  gen_helper_XVI8GER4PP)
2885TRANS(XVI8GER4SPP, do_ger, gen_helper_XVI8GER4SPP)
2886TRANS(XVI16GER2, do_ger, gen_helper_XVI16GER2)
2887TRANS(XVI16GER2PP, do_ger, gen_helper_XVI16GER2PP)
2888TRANS(XVI16GER2S, do_ger, gen_helper_XVI16GER2S)
2889TRANS(XVI16GER2SPP, do_ger, gen_helper_XVI16GER2SPP)
2890
2891TRANS64(PMXVI4GER8, do_ger, gen_helper_XVI4GER8)
2892TRANS64(PMXVI4GER8PP, do_ger, gen_helper_XVI4GER8PP)
2893TRANS64(PMXVI8GER4, do_ger, gen_helper_XVI8GER4)
2894TRANS64(PMXVI8GER4PP, do_ger, gen_helper_XVI8GER4PP)
2895TRANS64(PMXVI8GER4SPP, do_ger, gen_helper_XVI8GER4SPP)
2896TRANS64(PMXVI16GER2, do_ger, gen_helper_XVI16GER2)
2897TRANS64(PMXVI16GER2PP, do_ger, gen_helper_XVI16GER2PP)
2898TRANS64(PMXVI16GER2S, do_ger, gen_helper_XVI16GER2S)
2899TRANS64(PMXVI16GER2SPP, do_ger, gen_helper_XVI16GER2SPP)
2900
2901TRANS(XVBF16GER2, do_ger, gen_helper_XVBF16GER2)
2902TRANS(XVBF16GER2PP, do_ger, gen_helper_XVBF16GER2PP)
2903TRANS(XVBF16GER2PN, do_ger, gen_helper_XVBF16GER2PN)
2904TRANS(XVBF16GER2NP, do_ger, gen_helper_XVBF16GER2NP)
2905TRANS(XVBF16GER2NN, do_ger, gen_helper_XVBF16GER2NN)
2906
2907TRANS(XVF16GER2, do_ger, gen_helper_XVF16GER2)
2908TRANS(XVF16GER2PP, do_ger, gen_helper_XVF16GER2PP)
2909TRANS(XVF16GER2PN, do_ger, gen_helper_XVF16GER2PN)
2910TRANS(XVF16GER2NP, do_ger, gen_helper_XVF16GER2NP)
2911TRANS(XVF16GER2NN, do_ger, gen_helper_XVF16GER2NN)
2912
2913TRANS(XVF32GER, do_ger, gen_helper_XVF32GER)
2914TRANS(XVF32GERPP, do_ger, gen_helper_XVF32GERPP)
2915TRANS(XVF32GERPN, do_ger, gen_helper_XVF32GERPN)
2916TRANS(XVF32GERNP, do_ger, gen_helper_XVF32GERNP)
2917TRANS(XVF32GERNN, do_ger, gen_helper_XVF32GERNN)
2918
2919TRANS(XVF64GER, do_ger, gen_helper_XVF64GER)
2920TRANS(XVF64GERPP, do_ger, gen_helper_XVF64GERPP)
2921TRANS(XVF64GERPN, do_ger, gen_helper_XVF64GERPN)
2922TRANS(XVF64GERNP, do_ger, gen_helper_XVF64GERNP)
2923TRANS(XVF64GERNN, do_ger, gen_helper_XVF64GERNN)
2924
2925TRANS64(PMXVBF16GER2, do_ger, gen_helper_XVBF16GER2)
2926TRANS64(PMXVBF16GER2PP, do_ger, gen_helper_XVBF16GER2PP)
2927TRANS64(PMXVBF16GER2PN, do_ger, gen_helper_XVBF16GER2PN)
2928TRANS64(PMXVBF16GER2NP, do_ger, gen_helper_XVBF16GER2NP)
2929TRANS64(PMXVBF16GER2NN, do_ger, gen_helper_XVBF16GER2NN)
2930
2931TRANS64(PMXVF16GER2, do_ger, gen_helper_XVF16GER2)
2932TRANS64(PMXVF16GER2PP, do_ger, gen_helper_XVF16GER2PP)
2933TRANS64(PMXVF16GER2PN, do_ger, gen_helper_XVF16GER2PN)
2934TRANS64(PMXVF16GER2NP, do_ger, gen_helper_XVF16GER2NP)
2935TRANS64(PMXVF16GER2NN, do_ger, gen_helper_XVF16GER2NN)
2936
2937TRANS64(PMXVF32GER, do_ger, gen_helper_XVF32GER)
2938TRANS64(PMXVF32GERPP, do_ger, gen_helper_XVF32GERPP)
2939TRANS64(PMXVF32GERPN, do_ger, gen_helper_XVF32GERPN)
2940TRANS64(PMXVF32GERNP, do_ger, gen_helper_XVF32GERNP)
2941TRANS64(PMXVF32GERNN, do_ger, gen_helper_XVF32GERNN)
2942
2943TRANS64(PMXVF64GER, do_ger, gen_helper_XVF64GER)
2944TRANS64(PMXVF64GERPP, do_ger, gen_helper_XVF64GERPP)
2945TRANS64(PMXVF64GERPN, do_ger, gen_helper_XVF64GERPN)
2946TRANS64(PMXVF64GERNP, do_ger, gen_helper_XVF64GERNP)
2947TRANS64(PMXVF64GERNN, do_ger, gen_helper_XVF64GERNN)
2948
2949#undef GEN_XX2FORM
2950#undef GEN_XX3FORM
2951#undef GEN_XX2IFORM
2952#undef GEN_XX3_RC_FORM
2953#undef GEN_XX3FORM_DM
2954#undef VSX_LOGICAL
2955