1 /** 2 * AES XTS routines supporting VMX In-core instructions on Power 8 3 * 4 * Copyright (C) 2015 International Business Machines Inc. 5 * 6 * This program is free software; you can redistribute it and/or modify 7 * it under the terms of the GNU General Public License as published by 8 * the Free Software Foundations; version 2 only. 9 * 10 * This program is distributed in the hope that it will be useful, 11 * but WITHOUT ANY WARRANTY; without even the implied warranty of 12 * MERCHANTABILITY of FITNESS FOR A PARTICUPAR PURPOSE. See the 13 * GNU General Public License for more details. 14 * 15 * You should have received a copy of the GNU General Public License 16 * along with this program; if not, write to the Free Software 17 * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. 18 * 19 * Author: Leonidas S. Barbosa <leosilva@linux.vnet.ibm.com> 20 */ 21 22 #include <linux/types.h> 23 #include <linux/err.h> 24 #include <linux/crypto.h> 25 #include <linux/delay.h> 26 #include <linux/hardirq.h> 27 #include <asm/switch_to.h> 28 #include <crypto/aes.h> 29 #include <crypto/scatterwalk.h> 30 #include <crypto/xts.h> 31 #include <crypto/skcipher.h> 32 33 #include "aesp8-ppc.h" 34 35 struct p8_aes_xts_ctx { 36 struct crypto_skcipher *fallback; 37 struct aes_key enc_key; 38 struct aes_key dec_key; 39 struct aes_key tweak_key; 40 }; 41 42 static int p8_aes_xts_init(struct crypto_tfm *tfm) 43 { 44 const char *alg = crypto_tfm_alg_name(tfm); 45 struct crypto_skcipher *fallback; 46 struct p8_aes_xts_ctx *ctx = crypto_tfm_ctx(tfm); 47 48 fallback = crypto_alloc_skcipher(alg, 0, 49 CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK); 50 if (IS_ERR(fallback)) { 51 printk(KERN_ERR 52 "Failed to allocate transformation for '%s': %ld\n", 53 alg, PTR_ERR(fallback)); 54 return PTR_ERR(fallback); 55 } 56 57 crypto_skcipher_set_flags( 58 fallback, 59 crypto_skcipher_get_flags((struct crypto_skcipher *)tfm)); 60 ctx->fallback = fallback; 61 62 return 0; 63 } 64 65 static void p8_aes_xts_exit(struct crypto_tfm *tfm) 66 { 67 struct p8_aes_xts_ctx *ctx = crypto_tfm_ctx(tfm); 68 69 if (ctx->fallback) { 70 crypto_free_skcipher(ctx->fallback); 71 ctx->fallback = NULL; 72 } 73 } 74 75 static int p8_aes_xts_setkey(struct crypto_tfm *tfm, const u8 *key, 76 unsigned int keylen) 77 { 78 int ret; 79 struct p8_aes_xts_ctx *ctx = crypto_tfm_ctx(tfm); 80 81 ret = xts_check_key(tfm, key, keylen); 82 if (ret) 83 return ret; 84 85 preempt_disable(); 86 pagefault_disable(); 87 enable_kernel_vsx(); 88 ret = aes_p8_set_encrypt_key(key + keylen/2, (keylen/2) * 8, &ctx->tweak_key); 89 ret += aes_p8_set_encrypt_key(key, (keylen/2) * 8, &ctx->enc_key); 90 ret += aes_p8_set_decrypt_key(key, (keylen/2) * 8, &ctx->dec_key); 91 disable_kernel_vsx(); 92 pagefault_enable(); 93 preempt_enable(); 94 95 ret += crypto_skcipher_setkey(ctx->fallback, key, keylen); 96 return ret; 97 } 98 99 static int p8_aes_xts_crypt(struct blkcipher_desc *desc, 100 struct scatterlist *dst, 101 struct scatterlist *src, 102 unsigned int nbytes, int enc) 103 { 104 int ret; 105 u8 tweak[AES_BLOCK_SIZE]; 106 u8 *iv; 107 struct blkcipher_walk walk; 108 struct p8_aes_xts_ctx *ctx = 109 crypto_tfm_ctx(crypto_blkcipher_tfm(desc->tfm)); 110 111 if (in_interrupt()) { 112 SKCIPHER_REQUEST_ON_STACK(req, ctx->fallback); 113 skcipher_request_set_tfm(req, ctx->fallback); 114 skcipher_request_set_callback(req, desc->flags, NULL, NULL); 115 skcipher_request_set_crypt(req, src, dst, nbytes, desc->info); 116 ret = enc? crypto_skcipher_encrypt(req) : crypto_skcipher_decrypt(req); 117 skcipher_request_zero(req); 118 } else { 119 preempt_disable(); 120 pagefault_disable(); 121 enable_kernel_vsx(); 122 123 blkcipher_walk_init(&walk, dst, src, nbytes); 124 125 ret = blkcipher_walk_virt(desc, &walk); 126 iv = walk.iv; 127 memset(tweak, 0, AES_BLOCK_SIZE); 128 aes_p8_encrypt(iv, tweak, &ctx->tweak_key); 129 130 while ((nbytes = walk.nbytes)) { 131 if (enc) 132 aes_p8_xts_encrypt(walk.src.virt.addr, walk.dst.virt.addr, 133 nbytes & AES_BLOCK_MASK, &ctx->enc_key, NULL, tweak); 134 else 135 aes_p8_xts_decrypt(walk.src.virt.addr, walk.dst.virt.addr, 136 nbytes & AES_BLOCK_MASK, &ctx->dec_key, NULL, tweak); 137 138 nbytes &= AES_BLOCK_SIZE - 1; 139 ret = blkcipher_walk_done(desc, &walk, nbytes); 140 } 141 142 disable_kernel_vsx(); 143 pagefault_enable(); 144 preempt_enable(); 145 } 146 return ret; 147 } 148 149 static int p8_aes_xts_encrypt(struct blkcipher_desc *desc, 150 struct scatterlist *dst, 151 struct scatterlist *src, unsigned int nbytes) 152 { 153 return p8_aes_xts_crypt(desc, dst, src, nbytes, 1); 154 } 155 156 static int p8_aes_xts_decrypt(struct blkcipher_desc *desc, 157 struct scatterlist *dst, 158 struct scatterlist *src, unsigned int nbytes) 159 { 160 return p8_aes_xts_crypt(desc, dst, src, nbytes, 0); 161 } 162 163 struct crypto_alg p8_aes_xts_alg = { 164 .cra_name = "xts(aes)", 165 .cra_driver_name = "p8_aes_xts", 166 .cra_module = THIS_MODULE, 167 .cra_priority = 2000, 168 .cra_type = &crypto_blkcipher_type, 169 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | CRYPTO_ALG_NEED_FALLBACK, 170 .cra_alignmask = 0, 171 .cra_blocksize = AES_BLOCK_SIZE, 172 .cra_ctxsize = sizeof(struct p8_aes_xts_ctx), 173 .cra_init = p8_aes_xts_init, 174 .cra_exit = p8_aes_xts_exit, 175 .cra_blkcipher = { 176 .ivsize = AES_BLOCK_SIZE, 177 .min_keysize = 2 * AES_MIN_KEY_SIZE, 178 .max_keysize = 2 * AES_MAX_KEY_SIZE, 179 .setkey = p8_aes_xts_setkey, 180 .encrypt = p8_aes_xts_encrypt, 181 .decrypt = p8_aes_xts_decrypt, 182 } 183 }; 184