1 /** 2 * AES XTS routines supporting VMX In-core instructions on Power 8 3 * 4 * Copyright (C) 2015 International Business Machines Inc. 5 * 6 * This program is free software; you can redistribute it and/or modify 7 * it under the terms of the GNU General Public License as published by 8 * the Free Software Foundations; version 2 only. 9 * 10 * This program is distributed in the hope that it will be useful, 11 * but WITHOUT ANY WARRANTY; without even the implied warranty of 12 * MERCHANTABILITY of FITNESS FOR A PARTICUPAR PURPOSE. See the 13 * GNU General Public License for more details. 14 * 15 * You should have received a copy of the GNU General Public License 16 * along with this program; if not, write to the Free Software 17 * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. 18 * 19 * Author: Leonidas S. Barbosa <leosilva@linux.vnet.ibm.com> 20 */ 21 22 #include <linux/types.h> 23 #include <linux/err.h> 24 #include <linux/crypto.h> 25 #include <linux/delay.h> 26 #include <linux/hardirq.h> 27 #include <asm/switch_to.h> 28 #include <crypto/aes.h> 29 #include <crypto/scatterwalk.h> 30 #include <crypto/xts.h> 31 #include <crypto/skcipher.h> 32 33 #include "aesp8-ppc.h" 34 35 struct p8_aes_xts_ctx { 36 struct crypto_skcipher *fallback; 37 struct aes_key enc_key; 38 struct aes_key dec_key; 39 struct aes_key tweak_key; 40 }; 41 42 static int p8_aes_xts_init(struct crypto_tfm *tfm) 43 { 44 const char *alg; 45 struct crypto_skcipher *fallback; 46 struct p8_aes_xts_ctx *ctx = crypto_tfm_ctx(tfm); 47 48 if (!(alg = crypto_tfm_alg_name(tfm))) { 49 printk(KERN_ERR "Failed to get algorithm name.\n"); 50 return -ENOENT; 51 } 52 53 fallback = crypto_alloc_skcipher(alg, 0, 54 CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK); 55 if (IS_ERR(fallback)) { 56 printk(KERN_ERR 57 "Failed to allocate transformation for '%s': %ld\n", 58 alg, PTR_ERR(fallback)); 59 return PTR_ERR(fallback); 60 } 61 printk(KERN_INFO "Using '%s' as fallback implementation.\n", 62 crypto_skcipher_driver_name(fallback)); 63 64 crypto_skcipher_set_flags( 65 fallback, 66 crypto_skcipher_get_flags((struct crypto_skcipher *)tfm)); 67 ctx->fallback = fallback; 68 69 return 0; 70 } 71 72 static void p8_aes_xts_exit(struct crypto_tfm *tfm) 73 { 74 struct p8_aes_xts_ctx *ctx = crypto_tfm_ctx(tfm); 75 76 if (ctx->fallback) { 77 crypto_free_skcipher(ctx->fallback); 78 ctx->fallback = NULL; 79 } 80 } 81 82 static int p8_aes_xts_setkey(struct crypto_tfm *tfm, const u8 *key, 83 unsigned int keylen) 84 { 85 int ret; 86 struct p8_aes_xts_ctx *ctx = crypto_tfm_ctx(tfm); 87 88 ret = xts_check_key(tfm, key, keylen); 89 if (ret) 90 return ret; 91 92 preempt_disable(); 93 pagefault_disable(); 94 enable_kernel_vsx(); 95 ret = aes_p8_set_encrypt_key(key + keylen/2, (keylen/2) * 8, &ctx->tweak_key); 96 ret += aes_p8_set_encrypt_key(key, (keylen/2) * 8, &ctx->enc_key); 97 ret += aes_p8_set_decrypt_key(key, (keylen/2) * 8, &ctx->dec_key); 98 disable_kernel_vsx(); 99 pagefault_enable(); 100 preempt_enable(); 101 102 ret += crypto_skcipher_setkey(ctx->fallback, key, keylen); 103 return ret; 104 } 105 106 static int p8_aes_xts_crypt(struct blkcipher_desc *desc, 107 struct scatterlist *dst, 108 struct scatterlist *src, 109 unsigned int nbytes, int enc) 110 { 111 int ret; 112 u8 tweak[AES_BLOCK_SIZE]; 113 u8 *iv; 114 struct blkcipher_walk walk; 115 struct p8_aes_xts_ctx *ctx = 116 crypto_tfm_ctx(crypto_blkcipher_tfm(desc->tfm)); 117 118 if (in_interrupt()) { 119 SKCIPHER_REQUEST_ON_STACK(req, ctx->fallback); 120 skcipher_request_set_tfm(req, ctx->fallback); 121 skcipher_request_set_callback(req, desc->flags, NULL, NULL); 122 skcipher_request_set_crypt(req, src, dst, nbytes, desc->info); 123 ret = enc? crypto_skcipher_encrypt(req) : crypto_skcipher_decrypt(req); 124 skcipher_request_zero(req); 125 } else { 126 preempt_disable(); 127 pagefault_disable(); 128 enable_kernel_vsx(); 129 130 blkcipher_walk_init(&walk, dst, src, nbytes); 131 132 ret = blkcipher_walk_virt(desc, &walk); 133 iv = walk.iv; 134 memset(tweak, 0, AES_BLOCK_SIZE); 135 aes_p8_encrypt(iv, tweak, &ctx->tweak_key); 136 137 while ((nbytes = walk.nbytes)) { 138 if (enc) 139 aes_p8_xts_encrypt(walk.src.virt.addr, walk.dst.virt.addr, 140 nbytes & AES_BLOCK_MASK, &ctx->enc_key, NULL, tweak); 141 else 142 aes_p8_xts_decrypt(walk.src.virt.addr, walk.dst.virt.addr, 143 nbytes & AES_BLOCK_MASK, &ctx->dec_key, NULL, tweak); 144 145 nbytes &= AES_BLOCK_SIZE - 1; 146 ret = blkcipher_walk_done(desc, &walk, nbytes); 147 } 148 149 disable_kernel_vsx(); 150 pagefault_enable(); 151 preempt_enable(); 152 } 153 return ret; 154 } 155 156 static int p8_aes_xts_encrypt(struct blkcipher_desc *desc, 157 struct scatterlist *dst, 158 struct scatterlist *src, unsigned int nbytes) 159 { 160 return p8_aes_xts_crypt(desc, dst, src, nbytes, 1); 161 } 162 163 static int p8_aes_xts_decrypt(struct blkcipher_desc *desc, 164 struct scatterlist *dst, 165 struct scatterlist *src, unsigned int nbytes) 166 { 167 return p8_aes_xts_crypt(desc, dst, src, nbytes, 0); 168 } 169 170 struct crypto_alg p8_aes_xts_alg = { 171 .cra_name = "xts(aes)", 172 .cra_driver_name = "p8_aes_xts", 173 .cra_module = THIS_MODULE, 174 .cra_priority = 2000, 175 .cra_type = &crypto_blkcipher_type, 176 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | CRYPTO_ALG_NEED_FALLBACK, 177 .cra_alignmask = 0, 178 .cra_blocksize = AES_BLOCK_SIZE, 179 .cra_ctxsize = sizeof(struct p8_aes_xts_ctx), 180 .cra_init = p8_aes_xts_init, 181 .cra_exit = p8_aes_xts_exit, 182 .cra_blkcipher = { 183 .ivsize = AES_BLOCK_SIZE, 184 .min_keysize = 2 * AES_MIN_KEY_SIZE, 185 .max_keysize = 2 * AES_MAX_KEY_SIZE, 186 .setkey = p8_aes_xts_setkey, 187 .encrypt = p8_aes_xts_encrypt, 188 .decrypt = p8_aes_xts_decrypt, 189 } 190 }; 191