1 /* SPDX-License-Identifier: GPL-2.0-or-later */
2
3 #include "../multiarch/test-aes-main.c.inc"
4
5 #undef BIG_ENDIAN
6 #define BIG_ENDIAN (__BYTE_ORDER__ == __ORDER_BIG_ENDIAN__)
7
8 static unsigned char bswap_le[16] __attribute__((aligned(16))) = {
9 8,9,10,11,12,13,14,15,
10 0,1,2,3,4,5,6,7
11 };
12
test_SB_SR(uint8_t * o,const uint8_t * i)13 bool test_SB_SR(uint8_t *o, const uint8_t *i)
14 {
15 /* vcipherlast also adds round key, so supply zero. */
16 if (BIG_ENDIAN) {
17 asm("lxvd2x 32,0,%1\n\t"
18 "vspltisb 1,0\n\t"
19 "vcipherlast 0,0,1\n\t"
20 "stxvd2x 32,0,%0"
21 : : "r"(o), "r"(i) : "memory", "v0", "v1");
22 } else {
23 asm("lxvd2x 32,0,%1\n\t"
24 "lxvd2x 34,0,%2\n\t"
25 "vspltisb 1,0\n\t"
26 "vperm 0,0,0,2\n\t"
27 "vcipherlast 0,0,1\n\t"
28 "vperm 0,0,0,2\n\t"
29 "stxvd2x 32,0,%0"
30 : : "r"(o), "r"(i), "r"(bswap_le) : "memory", "v0", "v1", "v2");
31 }
32 return true;
33 }
34
test_MC(uint8_t * o,const uint8_t * i)35 bool test_MC(uint8_t *o, const uint8_t *i)
36 {
37 return false;
38 }
39
test_SB_SR_MC_AK(uint8_t * o,const uint8_t * i,const uint8_t * k)40 bool test_SB_SR_MC_AK(uint8_t *o, const uint8_t *i, const uint8_t *k)
41 {
42 if (BIG_ENDIAN) {
43 asm("lxvd2x 32,0,%1\n\t"
44 "lxvd2x 33,0,%2\n\t"
45 "vcipher 0,0,1\n\t"
46 "stxvd2x 32,0,%0"
47 : : "r"(o), "r"(i), "r"(k) : "memory", "v0", "v1");
48 } else {
49 asm("lxvd2x 32,0,%1\n\t"
50 "lxvd2x 33,0,%2\n\t"
51 "lxvd2x 34,0,%3\n\t"
52 "vperm 0,0,0,2\n\t"
53 "vperm 1,1,1,2\n\t"
54 "vcipher 0,0,1\n\t"
55 "vperm 0,0,0,2\n\t"
56 "stxvd2x 32,0,%0"
57 : : "r"(o), "r"(i), "r"(k), "r"(bswap_le)
58 : "memory", "v0", "v1", "v2");
59 }
60 return true;
61 }
62
test_ISB_ISR(uint8_t * o,const uint8_t * i)63 bool test_ISB_ISR(uint8_t *o, const uint8_t *i)
64 {
65 /* vcipherlast also adds round key, so supply zero. */
66 if (BIG_ENDIAN) {
67 asm("lxvd2x 32,0,%1\n\t"
68 "vspltisb 1,0\n\t"
69 "vncipherlast 0,0,1\n\t"
70 "stxvd2x 32,0,%0"
71 : : "r"(o), "r"(i) : "memory", "v0", "v1");
72 } else {
73 asm("lxvd2x 32,0,%1\n\t"
74 "lxvd2x 34,0,%2\n\t"
75 "vspltisb 1,0\n\t"
76 "vperm 0,0,0,2\n\t"
77 "vncipherlast 0,0,1\n\t"
78 "vperm 0,0,0,2\n\t"
79 "stxvd2x 32,0,%0"
80 : : "r"(o), "r"(i), "r"(bswap_le) : "memory", "v0", "v1", "v2");
81 }
82 return true;
83 }
84
test_IMC(uint8_t * o,const uint8_t * i)85 bool test_IMC(uint8_t *o, const uint8_t *i)
86 {
87 return false;
88 }
89
test_ISB_ISR_AK_IMC(uint8_t * o,const uint8_t * i,const uint8_t * k)90 bool test_ISB_ISR_AK_IMC(uint8_t *o, const uint8_t *i, const uint8_t *k)
91 {
92 if (BIG_ENDIAN) {
93 asm("lxvd2x 32,0,%1\n\t"
94 "lxvd2x 33,0,%2\n\t"
95 "vncipher 0,0,1\n\t"
96 "stxvd2x 32,0,%0"
97 : : "r"(o), "r"(i), "r"(k) : "memory", "v0", "v1");
98 } else {
99 asm("lxvd2x 32,0,%1\n\t"
100 "lxvd2x 33,0,%2\n\t"
101 "lxvd2x 34,0,%3\n\t"
102 "vperm 0,0,0,2\n\t"
103 "vperm 1,1,1,2\n\t"
104 "vncipher 0,0,1\n\t"
105 "vperm 0,0,0,2\n\t"
106 "stxvd2x 32,0,%0"
107 : : "r"(o), "r"(i), "r"(k), "r"(bswap_le)
108 : "memory", "v0", "v1", "v2");
109 }
110 return true;
111 }
112
test_ISB_ISR_IMC_AK(uint8_t * o,const uint8_t * i,const uint8_t * k)113 bool test_ISB_ISR_IMC_AK(uint8_t *o, const uint8_t *i, const uint8_t *k)
114 {
115 return false;
116 }
117