1 /** 2 * GHASH routines supporting VMX instructions on the Power 8 3 * 4 * Copyright (C) 2015 International Business Machines Inc. 5 * 6 * This program is free software; you can redistribute it and/or modify 7 * it under the terms of the GNU General Public License as published by 8 * the Free Software Foundation; version 2 only. 9 * 10 * This program is distributed in the hope that it will be useful, 11 * but WITHOUT ANY WARRANTY; without even the implied warranty of 12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 * GNU General Public License for more details. 14 * 15 * You should have received a copy of the GNU General Public License 16 * along with this program; if not, write to the Free Software 17 * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. 18 * 19 * Author: Marcelo Henrique Cerri <mhcerri@br.ibm.com> 20 */ 21 22 #include <linux/types.h> 23 #include <linux/err.h> 24 #include <linux/crypto.h> 25 #include <linux/delay.h> 26 #include <linux/hardirq.h> 27 #include <asm/switch_to.h> 28 #include <crypto/aes.h> 29 #include <crypto/scatterwalk.h> 30 #include <crypto/internal/hash.h> 31 #include <crypto/b128ops.h> 32 33 #define IN_INTERRUPT in_interrupt() 34 35 #define GHASH_BLOCK_SIZE (16) 36 #define GHASH_DIGEST_SIZE (16) 37 #define GHASH_KEY_LEN (16) 38 39 void gcm_init_p8(u128 htable[16], const u64 Xi[2]); 40 void gcm_gmult_p8(u64 Xi[2], const u128 htable[16]); 41 void gcm_ghash_p8(u64 Xi[2], const u128 htable[16], 42 const u8 *in,size_t len); 43 44 struct p8_ghash_ctx { 45 u128 htable[16]; 46 struct crypto_shash *fallback; 47 }; 48 49 struct p8_ghash_desc_ctx { 50 u64 shash[2]; 51 u8 buffer[GHASH_DIGEST_SIZE]; 52 int bytes; 53 struct shash_desc fallback_desc; 54 }; 55 56 static int p8_ghash_init_tfm(struct crypto_tfm *tfm) 57 { 58 const char *alg; 59 struct crypto_shash *fallback; 60 struct crypto_shash *shash_tfm = __crypto_shash_cast(tfm); 61 struct p8_ghash_ctx *ctx = crypto_tfm_ctx(tfm); 62 63 if (!(alg = crypto_tfm_alg_name(tfm))) { 64 printk(KERN_ERR "Failed to get algorithm name.\n"); 65 return -ENOENT; 66 } 67 68 fallback = crypto_alloc_shash(alg, 0 ,CRYPTO_ALG_NEED_FALLBACK); 69 if (IS_ERR(fallback)) { 70 printk(KERN_ERR "Failed to allocate transformation for '%s': %ld\n", 71 alg, PTR_ERR(fallback)); 72 return PTR_ERR(fallback); 73 } 74 printk(KERN_INFO "Using '%s' as fallback implementation.\n", 75 crypto_tfm_alg_driver_name(crypto_shash_tfm(fallback))); 76 77 crypto_shash_set_flags(fallback, 78 crypto_shash_get_flags((struct crypto_shash *) tfm)); 79 ctx->fallback = fallback; 80 81 shash_tfm->descsize = sizeof(struct p8_ghash_desc_ctx) 82 + crypto_shash_descsize(fallback); 83 84 return 0; 85 } 86 87 static void p8_ghash_exit_tfm(struct crypto_tfm *tfm) 88 { 89 struct p8_ghash_ctx *ctx = crypto_tfm_ctx(tfm); 90 91 if (ctx->fallback) { 92 crypto_free_shash(ctx->fallback); 93 ctx->fallback = NULL; 94 } 95 } 96 97 static int p8_ghash_init(struct shash_desc *desc) 98 { 99 struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(desc->tfm)); 100 struct p8_ghash_desc_ctx *dctx = shash_desc_ctx(desc); 101 102 dctx->bytes = 0; 103 memset(dctx->shash, 0, GHASH_DIGEST_SIZE); 104 dctx->fallback_desc.tfm = ctx->fallback; 105 dctx->fallback_desc.flags = desc->flags; 106 return crypto_shash_init(&dctx->fallback_desc); 107 } 108 109 static int p8_ghash_setkey(struct crypto_shash *tfm, const u8 *key, 110 unsigned int keylen) 111 { 112 struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(tfm)); 113 114 if (keylen != GHASH_KEY_LEN) 115 return -EINVAL; 116 117 pagefault_disable(); 118 enable_kernel_altivec(); 119 enable_kernel_fp(); 120 gcm_init_p8(ctx->htable, (const u64 *) key); 121 pagefault_enable(); 122 return crypto_shash_setkey(ctx->fallback, key, keylen); 123 } 124 125 static int p8_ghash_update(struct shash_desc *desc, 126 const u8 *src, unsigned int srclen) 127 { 128 unsigned int len; 129 struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(desc->tfm)); 130 struct p8_ghash_desc_ctx *dctx = shash_desc_ctx(desc); 131 132 if (IN_INTERRUPT) { 133 return crypto_shash_update(&dctx->fallback_desc, src, srclen); 134 } else { 135 if (dctx->bytes) { 136 if (dctx->bytes + srclen < GHASH_DIGEST_SIZE) { 137 memcpy(dctx->buffer + dctx->bytes, src, srclen); 138 dctx->bytes += srclen; 139 return 0; 140 } 141 memcpy(dctx->buffer + dctx->bytes, src, 142 GHASH_DIGEST_SIZE - dctx->bytes); 143 pagefault_disable(); 144 enable_kernel_altivec(); 145 enable_kernel_fp(); 146 gcm_ghash_p8(dctx->shash, ctx->htable, dctx->buffer, 147 GHASH_DIGEST_SIZE); 148 pagefault_enable(); 149 src += GHASH_DIGEST_SIZE - dctx->bytes; 150 srclen -= GHASH_DIGEST_SIZE - dctx->bytes; 151 dctx->bytes = 0; 152 } 153 len = srclen & ~(GHASH_DIGEST_SIZE - 1); 154 if (len) { 155 pagefault_disable(); 156 enable_kernel_altivec(); 157 enable_kernel_fp(); 158 gcm_ghash_p8(dctx->shash, ctx->htable, src, len); 159 pagefault_enable(); 160 src += len; 161 srclen -= len; 162 } 163 if (srclen) { 164 memcpy(dctx->buffer, src, srclen); 165 dctx->bytes = srclen; 166 } 167 return 0; 168 } 169 } 170 171 static int p8_ghash_final(struct shash_desc *desc, u8 *out) 172 { 173 int i; 174 struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(desc->tfm)); 175 struct p8_ghash_desc_ctx *dctx = shash_desc_ctx(desc); 176 177 if (IN_INTERRUPT) { 178 return crypto_shash_final(&dctx->fallback_desc, out); 179 } else { 180 if (dctx->bytes) { 181 for (i = dctx->bytes; i < GHASH_DIGEST_SIZE; i++) 182 dctx->buffer[i] = 0; 183 pagefault_disable(); 184 enable_kernel_altivec(); 185 enable_kernel_fp(); 186 gcm_ghash_p8(dctx->shash, ctx->htable, dctx->buffer, 187 GHASH_DIGEST_SIZE); 188 pagefault_enable(); 189 dctx->bytes = 0; 190 } 191 memcpy(out, dctx->shash, GHASH_DIGEST_SIZE); 192 return 0; 193 } 194 } 195 196 struct shash_alg p8_ghash_alg = { 197 .digestsize = GHASH_DIGEST_SIZE, 198 .init = p8_ghash_init, 199 .update = p8_ghash_update, 200 .final = p8_ghash_final, 201 .setkey = p8_ghash_setkey, 202 .descsize = sizeof(struct p8_ghash_desc_ctx), 203 .base = { 204 .cra_name = "ghash", 205 .cra_driver_name = "p8_ghash", 206 .cra_priority = 1000, 207 .cra_flags = CRYPTO_ALG_TYPE_SHASH | CRYPTO_ALG_NEED_FALLBACK, 208 .cra_blocksize = GHASH_BLOCK_SIZE, 209 .cra_ctxsize = sizeof(struct p8_ghash_ctx), 210 .cra_module = THIS_MODULE, 211 .cra_init = p8_ghash_init_tfm, 212 .cra_exit = p8_ghash_exit_tfm, 213 }, 214 }; 215