1 /** 2 * AES XTS routines supporting VMX In-core instructions on Power 8 3 * 4 * Copyright (C) 2015 International Business Machines Inc. 5 * 6 * This program is free software; you can redistribute it and/or modify 7 * it under the terms of the GNU General Public License as published by 8 * the Free Software Foundations; version 2 only. 9 * 10 * This program is distributed in the hope that it will be useful, 11 * but WITHOUT ANY WARRANTY; without even the implied warranty of 12 * MERCHANTABILITY of FITNESS FOR A PARTICUPAR PURPOSE. See the 13 * GNU General Public License for more details. 14 * 15 * You should have received a copy of the GNU General Public License 16 * along with this program; if not, write to the Free Software 17 * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. 18 * 19 * Author: Leonidas S. Barbosa <leosilva@linux.vnet.ibm.com> 20 */ 21 22 #include <linux/types.h> 23 #include <linux/err.h> 24 #include <linux/crypto.h> 25 #include <linux/delay.h> 26 #include <linux/hardirq.h> 27 #include <asm/switch_to.h> 28 #include <crypto/aes.h> 29 #include <crypto/scatterwalk.h> 30 #include <crypto/xts.h> 31 #include <crypto/skcipher.h> 32 33 #include "aesp8-ppc.h" 34 35 struct p8_aes_xts_ctx { 36 struct crypto_sync_skcipher *fallback; 37 struct aes_key enc_key; 38 struct aes_key dec_key; 39 struct aes_key tweak_key; 40 }; 41 42 static int p8_aes_xts_init(struct crypto_tfm *tfm) 43 { 44 const char *alg = crypto_tfm_alg_name(tfm); 45 struct crypto_sync_skcipher *fallback; 46 struct p8_aes_xts_ctx *ctx = crypto_tfm_ctx(tfm); 47 48 fallback = crypto_alloc_sync_skcipher(alg, 0, 49 CRYPTO_ALG_NEED_FALLBACK); 50 if (IS_ERR(fallback)) { 51 printk(KERN_ERR 52 "Failed to allocate transformation for '%s': %ld\n", 53 alg, PTR_ERR(fallback)); 54 return PTR_ERR(fallback); 55 } 56 57 crypto_sync_skcipher_set_flags( 58 fallback, 59 crypto_skcipher_get_flags((struct crypto_skcipher *)tfm)); 60 ctx->fallback = fallback; 61 62 return 0; 63 } 64 65 static void p8_aes_xts_exit(struct crypto_tfm *tfm) 66 { 67 struct p8_aes_xts_ctx *ctx = crypto_tfm_ctx(tfm); 68 69 if (ctx->fallback) { 70 crypto_free_sync_skcipher(ctx->fallback); 71 ctx->fallback = NULL; 72 } 73 } 74 75 static int p8_aes_xts_setkey(struct crypto_tfm *tfm, const u8 *key, 76 unsigned int keylen) 77 { 78 int ret; 79 struct p8_aes_xts_ctx *ctx = crypto_tfm_ctx(tfm); 80 81 ret = xts_check_key(tfm, key, keylen); 82 if (ret) 83 return ret; 84 85 preempt_disable(); 86 pagefault_disable(); 87 enable_kernel_vsx(); 88 ret = aes_p8_set_encrypt_key(key + keylen/2, (keylen/2) * 8, &ctx->tweak_key); 89 ret |= aes_p8_set_encrypt_key(key, (keylen/2) * 8, &ctx->enc_key); 90 ret |= aes_p8_set_decrypt_key(key, (keylen/2) * 8, &ctx->dec_key); 91 disable_kernel_vsx(); 92 pagefault_enable(); 93 preempt_enable(); 94 95 ret |= crypto_sync_skcipher_setkey(ctx->fallback, key, keylen); 96 97 return ret ? -EINVAL : 0; 98 } 99 100 static int p8_aes_xts_crypt(struct blkcipher_desc *desc, 101 struct scatterlist *dst, 102 struct scatterlist *src, 103 unsigned int nbytes, int enc) 104 { 105 int ret; 106 u8 tweak[AES_BLOCK_SIZE]; 107 u8 *iv; 108 struct blkcipher_walk walk; 109 struct p8_aes_xts_ctx *ctx = 110 crypto_tfm_ctx(crypto_blkcipher_tfm(desc->tfm)); 111 112 if (in_interrupt()) { 113 SYNC_SKCIPHER_REQUEST_ON_STACK(req, ctx->fallback); 114 skcipher_request_set_sync_tfm(req, ctx->fallback); 115 skcipher_request_set_callback(req, desc->flags, NULL, NULL); 116 skcipher_request_set_crypt(req, src, dst, nbytes, desc->info); 117 ret = enc? crypto_skcipher_encrypt(req) : crypto_skcipher_decrypt(req); 118 skcipher_request_zero(req); 119 } else { 120 blkcipher_walk_init(&walk, dst, src, nbytes); 121 122 ret = blkcipher_walk_virt(desc, &walk); 123 124 preempt_disable(); 125 pagefault_disable(); 126 enable_kernel_vsx(); 127 128 iv = walk.iv; 129 memset(tweak, 0, AES_BLOCK_SIZE); 130 aes_p8_encrypt(iv, tweak, &ctx->tweak_key); 131 132 disable_kernel_vsx(); 133 pagefault_enable(); 134 preempt_enable(); 135 136 while ((nbytes = walk.nbytes)) { 137 preempt_disable(); 138 pagefault_disable(); 139 enable_kernel_vsx(); 140 if (enc) 141 aes_p8_xts_encrypt(walk.src.virt.addr, walk.dst.virt.addr, 142 nbytes & AES_BLOCK_MASK, &ctx->enc_key, NULL, tweak); 143 else 144 aes_p8_xts_decrypt(walk.src.virt.addr, walk.dst.virt.addr, 145 nbytes & AES_BLOCK_MASK, &ctx->dec_key, NULL, tweak); 146 disable_kernel_vsx(); 147 pagefault_enable(); 148 preempt_enable(); 149 150 nbytes &= AES_BLOCK_SIZE - 1; 151 ret = blkcipher_walk_done(desc, &walk, nbytes); 152 } 153 } 154 return ret; 155 } 156 157 static int p8_aes_xts_encrypt(struct blkcipher_desc *desc, 158 struct scatterlist *dst, 159 struct scatterlist *src, unsigned int nbytes) 160 { 161 return p8_aes_xts_crypt(desc, dst, src, nbytes, 1); 162 } 163 164 static int p8_aes_xts_decrypt(struct blkcipher_desc *desc, 165 struct scatterlist *dst, 166 struct scatterlist *src, unsigned int nbytes) 167 { 168 return p8_aes_xts_crypt(desc, dst, src, nbytes, 0); 169 } 170 171 struct crypto_alg p8_aes_xts_alg = { 172 .cra_name = "xts(aes)", 173 .cra_driver_name = "p8_aes_xts", 174 .cra_module = THIS_MODULE, 175 .cra_priority = 2000, 176 .cra_type = &crypto_blkcipher_type, 177 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | CRYPTO_ALG_NEED_FALLBACK, 178 .cra_alignmask = 0, 179 .cra_blocksize = AES_BLOCK_SIZE, 180 .cra_ctxsize = sizeof(struct p8_aes_xts_ctx), 181 .cra_init = p8_aes_xts_init, 182 .cra_exit = p8_aes_xts_exit, 183 .cra_blkcipher = { 184 .ivsize = AES_BLOCK_SIZE, 185 .min_keysize = 2 * AES_MIN_KEY_SIZE, 186 .max_keysize = 2 * AES_MAX_KEY_SIZE, 187 .setkey = p8_aes_xts_setkey, 188 .encrypt = p8_aes_xts_encrypt, 189 .decrypt = p8_aes_xts_decrypt, 190 } 191 }; 192