1 /*
2 * AES round fragments, generic version
3 * SPDX-License-Identifier: GPL-2.0-or-later
4 *
5 * Copyright (C) 2023 Linaro, Ltd.
6 */
7
8 #ifndef CRYPTO_AES_ROUND_H
9 #define CRYPTO_AES_ROUND_H
10
11 /* Hosts with acceleration will usually need a 16-byte vector type. */
12 typedef uint8_t AESStateVec __attribute__((vector_size(16)));
13
14 typedef union {
15 uint8_t b[16];
16 uint32_t w[4];
17 uint64_t d[2];
18 AESStateVec v;
19 } AESState;
20
21 #include "host/crypto/aes-round.h"
22
23 /*
24 * Perform MixColumns.
25 */
26
27 void aesenc_MC_gen(AESState *ret, const AESState *st);
28 void aesenc_MC_genrev(AESState *ret, const AESState *st);
29
aesenc_MC(AESState * r,const AESState * st,bool be)30 static inline void aesenc_MC(AESState *r, const AESState *st, bool be)
31 {
32 if (HAVE_AES_ACCEL) {
33 aesenc_MC_accel(r, st, be);
34 } else if (HOST_BIG_ENDIAN == be) {
35 aesenc_MC_gen(r, st);
36 } else {
37 aesenc_MC_genrev(r, st);
38 }
39 }
40
41 /*
42 * Perform SubBytes + ShiftRows + AddRoundKey.
43 */
44
45 void aesenc_SB_SR_AK_gen(AESState *ret, const AESState *st,
46 const AESState *rk);
47 void aesenc_SB_SR_AK_genrev(AESState *ret, const AESState *st,
48 const AESState *rk);
49
aesenc_SB_SR_AK(AESState * r,const AESState * st,const AESState * rk,bool be)50 static inline void aesenc_SB_SR_AK(AESState *r, const AESState *st,
51 const AESState *rk, bool be)
52 {
53 if (HAVE_AES_ACCEL) {
54 aesenc_SB_SR_AK_accel(r, st, rk, be);
55 } else if (HOST_BIG_ENDIAN == be) {
56 aesenc_SB_SR_AK_gen(r, st, rk);
57 } else {
58 aesenc_SB_SR_AK_genrev(r, st, rk);
59 }
60 }
61
62 /*
63 * Perform SubBytes + ShiftRows + MixColumns + AddRoundKey.
64 */
65
66 void aesenc_SB_SR_MC_AK_gen(AESState *ret, const AESState *st,
67 const AESState *rk);
68 void aesenc_SB_SR_MC_AK_genrev(AESState *ret, const AESState *st,
69 const AESState *rk);
70
aesenc_SB_SR_MC_AK(AESState * r,const AESState * st,const AESState * rk,bool be)71 static inline void aesenc_SB_SR_MC_AK(AESState *r, const AESState *st,
72 const AESState *rk, bool be)
73 {
74 if (HAVE_AES_ACCEL) {
75 aesenc_SB_SR_MC_AK_accel(r, st, rk, be);
76 } else if (HOST_BIG_ENDIAN == be) {
77 aesenc_SB_SR_MC_AK_gen(r, st, rk);
78 } else {
79 aesenc_SB_SR_MC_AK_genrev(r, st, rk);
80 }
81 }
82
83 /*
84 * Perform InvMixColumns.
85 */
86
87 void aesdec_IMC_gen(AESState *ret, const AESState *st);
88 void aesdec_IMC_genrev(AESState *ret, const AESState *st);
89
aesdec_IMC(AESState * r,const AESState * st,bool be)90 static inline void aesdec_IMC(AESState *r, const AESState *st, bool be)
91 {
92 if (HAVE_AES_ACCEL) {
93 aesdec_IMC_accel(r, st, be);
94 } else if (HOST_BIG_ENDIAN == be) {
95 aesdec_IMC_gen(r, st);
96 } else {
97 aesdec_IMC_genrev(r, st);
98 }
99 }
100
101 /*
102 * Perform InvSubBytes + InvShiftRows + AddRoundKey.
103 */
104
105 void aesdec_ISB_ISR_AK_gen(AESState *ret, const AESState *st,
106 const AESState *rk);
107 void aesdec_ISB_ISR_AK_genrev(AESState *ret, const AESState *st,
108 const AESState *rk);
109
aesdec_ISB_ISR_AK(AESState * r,const AESState * st,const AESState * rk,bool be)110 static inline void aesdec_ISB_ISR_AK(AESState *r, const AESState *st,
111 const AESState *rk, bool be)
112 {
113 if (HAVE_AES_ACCEL) {
114 aesdec_ISB_ISR_AK_accel(r, st, rk, be);
115 } else if (HOST_BIG_ENDIAN == be) {
116 aesdec_ISB_ISR_AK_gen(r, st, rk);
117 } else {
118 aesdec_ISB_ISR_AK_genrev(r, st, rk);
119 }
120 }
121
122 /*
123 * Perform InvSubBytes + InvShiftRows + AddRoundKey + InvMixColumns.
124 */
125
126 void aesdec_ISB_ISR_AK_IMC_gen(AESState *ret, const AESState *st,
127 const AESState *rk);
128 void aesdec_ISB_ISR_AK_IMC_genrev(AESState *ret, const AESState *st,
129 const AESState *rk);
130
aesdec_ISB_ISR_AK_IMC(AESState * r,const AESState * st,const AESState * rk,bool be)131 static inline void aesdec_ISB_ISR_AK_IMC(AESState *r, const AESState *st,
132 const AESState *rk, bool be)
133 {
134 if (HAVE_AES_ACCEL) {
135 aesdec_ISB_ISR_AK_IMC_accel(r, st, rk, be);
136 } else if (HOST_BIG_ENDIAN == be) {
137 aesdec_ISB_ISR_AK_IMC_gen(r, st, rk);
138 } else {
139 aesdec_ISB_ISR_AK_IMC_genrev(r, st, rk);
140 }
141 }
142
143 /*
144 * Perform InvSubBytes + InvShiftRows + InvMixColumns + AddRoundKey.
145 */
146
147 void aesdec_ISB_ISR_IMC_AK_gen(AESState *ret, const AESState *st,
148 const AESState *rk);
149 void aesdec_ISB_ISR_IMC_AK_genrev(AESState *ret, const AESState *st,
150 const AESState *rk);
151
aesdec_ISB_ISR_IMC_AK(AESState * r,const AESState * st,const AESState * rk,bool be)152 static inline void aesdec_ISB_ISR_IMC_AK(AESState *r, const AESState *st,
153 const AESState *rk, bool be)
154 {
155 if (HAVE_AES_ACCEL) {
156 aesdec_ISB_ISR_IMC_AK_accel(r, st, rk, be);
157 } else if (HOST_BIG_ENDIAN == be) {
158 aesdec_ISB_ISR_IMC_AK_gen(r, st, rk);
159 } else {
160 aesdec_ISB_ISR_IMC_AK_genrev(r, st, rk);
161 }
162 }
163
164 #endif /* CRYPTO_AES_ROUND_H */
165