1 // SPDX-License-Identifier: GPL-2.0-only 2 /* 3 * aes-ccm-glue.c - AES-CCM transform for ARMv8 with Crypto Extensions 4 * 5 * Copyright (C) 2013 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org> 6 */ 7 8 #include <asm/neon.h> 9 #include <asm/unaligned.h> 10 #include <crypto/aes.h> 11 #include <crypto/scatterwalk.h> 12 #include <crypto/internal/aead.h> 13 #include <crypto/internal/skcipher.h> 14 #include <linux/module.h> 15 16 #include "aes-ce-setkey.h" 17 18 static int num_rounds(struct crypto_aes_ctx *ctx) 19 { 20 /* 21 * # of rounds specified by AES: 22 * 128 bit key 10 rounds 23 * 192 bit key 12 rounds 24 * 256 bit key 14 rounds 25 * => n byte key => 6 + (n/4) rounds 26 */ 27 return 6 + ctx->key_length / 4; 28 } 29 30 asmlinkage u32 ce_aes_ccm_auth_data(u8 mac[], u8 const in[], u32 abytes, 31 u32 macp, u32 const rk[], u32 rounds); 32 33 asmlinkage void ce_aes_ccm_encrypt(u8 out[], u8 const in[], u32 cbytes, 34 u32 const rk[], u32 rounds, u8 mac[], 35 u8 ctr[]); 36 37 asmlinkage void ce_aes_ccm_decrypt(u8 out[], u8 const in[], u32 cbytes, 38 u32 const rk[], u32 rounds, u8 mac[], 39 u8 ctr[]); 40 41 asmlinkage void ce_aes_ccm_final(u8 mac[], u8 const ctr[], u32 const rk[], 42 u32 rounds); 43 44 static int ccm_setkey(struct crypto_aead *tfm, const u8 *in_key, 45 unsigned int key_len) 46 { 47 struct crypto_aes_ctx *ctx = crypto_aead_ctx(tfm); 48 49 return ce_aes_expandkey(ctx, in_key, key_len); 50 } 51 52 static int ccm_setauthsize(struct crypto_aead *tfm, unsigned int authsize) 53 { 54 if ((authsize & 1) || authsize < 4) 55 return -EINVAL; 56 return 0; 57 } 58 59 static int ccm_init_mac(struct aead_request *req, u8 maciv[], u32 msglen) 60 { 61 struct crypto_aead *aead = crypto_aead_reqtfm(req); 62 __be32 *n = (__be32 *)&maciv[AES_BLOCK_SIZE - 8]; 63 u32 l = req->iv[0] + 1; 64 65 /* verify that CCM dimension 'L' is set correctly in the IV */ 66 if (l < 2 || l > 8) 67 return -EINVAL; 68 69 /* verify that msglen can in fact be represented in L bytes */ 70 if (l < 4 && msglen >> (8 * l)) 71 return -EOVERFLOW; 72 73 /* 74 * Even if the CCM spec allows L values of up to 8, the Linux cryptoapi 75 * uses a u32 type to represent msglen so the top 4 bytes are always 0. 76 */ 77 n[0] = 0; 78 n[1] = cpu_to_be32(msglen); 79 80 memcpy(maciv, req->iv, AES_BLOCK_SIZE - l); 81 82 /* 83 * Meaning of byte 0 according to CCM spec (RFC 3610/NIST 800-38C) 84 * - bits 0..2 : max # of bytes required to represent msglen, minus 1 85 * (already set by caller) 86 * - bits 3..5 : size of auth tag (1 => 4 bytes, 2 => 6 bytes, etc) 87 * - bit 6 : indicates presence of authenticate-only data 88 */ 89 maciv[0] |= (crypto_aead_authsize(aead) - 2) << 2; 90 if (req->assoclen) 91 maciv[0] |= 0x40; 92 93 memset(&req->iv[AES_BLOCK_SIZE - l], 0, l); 94 return 0; 95 } 96 97 static void ccm_calculate_auth_mac(struct aead_request *req, u8 mac[]) 98 { 99 struct crypto_aead *aead = crypto_aead_reqtfm(req); 100 struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead); 101 struct __packed { __be16 l; __be32 h; u16 len; } ltag; 102 struct scatter_walk walk; 103 u32 len = req->assoclen; 104 u32 macp = 0; 105 106 /* prepend the AAD with a length tag */ 107 if (len < 0xff00) { 108 ltag.l = cpu_to_be16(len); 109 ltag.len = 2; 110 } else { 111 ltag.l = cpu_to_be16(0xfffe); 112 put_unaligned_be32(len, <ag.h); 113 ltag.len = 6; 114 } 115 116 macp = ce_aes_ccm_auth_data(mac, (u8 *)<ag, ltag.len, macp, 117 ctx->key_enc, num_rounds(ctx)); 118 scatterwalk_start(&walk, req->src); 119 120 do { 121 u32 n = scatterwalk_clamp(&walk, len); 122 u8 *p; 123 124 if (!n) { 125 scatterwalk_start(&walk, sg_next(walk.sg)); 126 n = scatterwalk_clamp(&walk, len); 127 } 128 n = min_t(u32, n, SZ_4K); /* yield NEON at least every 4k */ 129 p = scatterwalk_map(&walk); 130 131 macp = ce_aes_ccm_auth_data(mac, p, n, macp, ctx->key_enc, 132 num_rounds(ctx)); 133 134 if (len / SZ_4K > (len - n) / SZ_4K) { 135 kernel_neon_end(); 136 kernel_neon_begin(); 137 } 138 len -= n; 139 140 scatterwalk_unmap(p); 141 scatterwalk_advance(&walk, n); 142 scatterwalk_done(&walk, 0, len); 143 } while (len); 144 } 145 146 static int ccm_encrypt(struct aead_request *req) 147 { 148 struct crypto_aead *aead = crypto_aead_reqtfm(req); 149 struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead); 150 struct skcipher_walk walk; 151 u8 __aligned(8) mac[AES_BLOCK_SIZE]; 152 u8 buf[AES_BLOCK_SIZE]; 153 u32 len = req->cryptlen; 154 int err; 155 156 err = ccm_init_mac(req, mac, len); 157 if (err) 158 return err; 159 160 /* preserve the original iv for the final round */ 161 memcpy(buf, req->iv, AES_BLOCK_SIZE); 162 163 err = skcipher_walk_aead_encrypt(&walk, req, false); 164 if (unlikely(err)) 165 return err; 166 167 kernel_neon_begin(); 168 169 if (req->assoclen) 170 ccm_calculate_auth_mac(req, mac); 171 172 do { 173 u32 tail = walk.nbytes % AES_BLOCK_SIZE; 174 175 if (walk.nbytes == walk.total) 176 tail = 0; 177 178 ce_aes_ccm_encrypt(walk.dst.virt.addr, walk.src.virt.addr, 179 walk.nbytes - tail, ctx->key_enc, 180 num_rounds(ctx), mac, walk.iv); 181 182 if (walk.nbytes == walk.total) 183 ce_aes_ccm_final(mac, buf, ctx->key_enc, num_rounds(ctx)); 184 185 kernel_neon_end(); 186 187 if (walk.nbytes) { 188 err = skcipher_walk_done(&walk, tail); 189 if (unlikely(err)) 190 return err; 191 if (unlikely(walk.nbytes)) 192 kernel_neon_begin(); 193 } 194 } while (walk.nbytes); 195 196 /* copy authtag to end of dst */ 197 scatterwalk_map_and_copy(mac, req->dst, req->assoclen + req->cryptlen, 198 crypto_aead_authsize(aead), 1); 199 200 return 0; 201 } 202 203 static int ccm_decrypt(struct aead_request *req) 204 { 205 struct crypto_aead *aead = crypto_aead_reqtfm(req); 206 struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead); 207 unsigned int authsize = crypto_aead_authsize(aead); 208 struct skcipher_walk walk; 209 u8 __aligned(8) mac[AES_BLOCK_SIZE]; 210 u8 buf[AES_BLOCK_SIZE]; 211 u32 len = req->cryptlen - authsize; 212 int err; 213 214 err = ccm_init_mac(req, mac, len); 215 if (err) 216 return err; 217 218 /* preserve the original iv for the final round */ 219 memcpy(buf, req->iv, AES_BLOCK_SIZE); 220 221 err = skcipher_walk_aead_decrypt(&walk, req, false); 222 if (unlikely(err)) 223 return err; 224 225 kernel_neon_begin(); 226 227 if (req->assoclen) 228 ccm_calculate_auth_mac(req, mac); 229 230 do { 231 u32 tail = walk.nbytes % AES_BLOCK_SIZE; 232 233 if (walk.nbytes == walk.total) 234 tail = 0; 235 236 ce_aes_ccm_decrypt(walk.dst.virt.addr, walk.src.virt.addr, 237 walk.nbytes - tail, ctx->key_enc, 238 num_rounds(ctx), mac, walk.iv); 239 240 if (walk.nbytes == walk.total) 241 ce_aes_ccm_final(mac, buf, ctx->key_enc, num_rounds(ctx)); 242 243 kernel_neon_end(); 244 245 if (walk.nbytes) { 246 err = skcipher_walk_done(&walk, tail); 247 if (unlikely(err)) 248 return err; 249 if (unlikely(walk.nbytes)) 250 kernel_neon_begin(); 251 } 252 } while (walk.nbytes); 253 254 /* compare calculated auth tag with the stored one */ 255 scatterwalk_map_and_copy(buf, req->src, 256 req->assoclen + req->cryptlen - authsize, 257 authsize, 0); 258 259 if (crypto_memneq(mac, buf, authsize)) 260 return -EBADMSG; 261 return 0; 262 } 263 264 static struct aead_alg ccm_aes_alg = { 265 .base = { 266 .cra_name = "ccm(aes)", 267 .cra_driver_name = "ccm-aes-ce", 268 .cra_priority = 300, 269 .cra_blocksize = 1, 270 .cra_ctxsize = sizeof(struct crypto_aes_ctx), 271 .cra_module = THIS_MODULE, 272 }, 273 .ivsize = AES_BLOCK_SIZE, 274 .chunksize = AES_BLOCK_SIZE, 275 .maxauthsize = AES_BLOCK_SIZE, 276 .setkey = ccm_setkey, 277 .setauthsize = ccm_setauthsize, 278 .encrypt = ccm_encrypt, 279 .decrypt = ccm_decrypt, 280 }; 281 282 static int __init aes_mod_init(void) 283 { 284 if (!cpu_have_named_feature(AES)) 285 return -ENODEV; 286 return crypto_register_aead(&ccm_aes_alg); 287 } 288 289 static void __exit aes_mod_exit(void) 290 { 291 crypto_unregister_aead(&ccm_aes_alg); 292 } 293 294 module_init(aes_mod_init); 295 module_exit(aes_mod_exit); 296 297 MODULE_DESCRIPTION("Synchronous AES in CCM mode using ARMv8 Crypto Extensions"); 298 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>"); 299 MODULE_LICENSE("GPL v2"); 300 MODULE_ALIAS_CRYPTO("ccm(aes)"); 301