1 /** 2 * AES routines supporting VMX instructions on the Power 8 3 * 4 * Copyright (C) 2015 International Business Machines Inc. 5 * 6 * This program is free software; you can redistribute it and/or modify 7 * it under the terms of the GNU General Public License as published by 8 * the Free Software Foundation; version 2 only. 9 * 10 * This program is distributed in the hope that it will be useful, 11 * but WITHOUT ANY WARRANTY; without even the implied warranty of 12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 * GNU General Public License for more details. 14 * 15 * You should have received a copy of the GNU General Public License 16 * along with this program; if not, write to the Free Software 17 * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. 18 * 19 * Author: Marcelo Henrique Cerri <mhcerri@br.ibm.com> 20 */ 21 22 #include <linux/types.h> 23 #include <linux/err.h> 24 #include <linux/crypto.h> 25 #include <linux/delay.h> 26 #include <linux/hardirq.h> 27 #include <asm/switch_to.h> 28 #include <crypto/aes.h> 29 30 #include "aesp8-ppc.h" 31 32 struct p8_aes_ctx { 33 struct crypto_cipher *fallback; 34 struct aes_key enc_key; 35 struct aes_key dec_key; 36 }; 37 38 static int p8_aes_init(struct crypto_tfm *tfm) 39 { 40 const char *alg = crypto_tfm_alg_name(tfm); 41 struct crypto_cipher *fallback; 42 struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm); 43 44 fallback = crypto_alloc_cipher(alg, 0, CRYPTO_ALG_NEED_FALLBACK); 45 if (IS_ERR(fallback)) { 46 printk(KERN_ERR 47 "Failed to allocate transformation for '%s': %ld\n", 48 alg, PTR_ERR(fallback)); 49 return PTR_ERR(fallback); 50 } 51 52 crypto_cipher_set_flags(fallback, 53 crypto_cipher_get_flags((struct 54 crypto_cipher *) 55 tfm)); 56 ctx->fallback = fallback; 57 58 return 0; 59 } 60 61 static void p8_aes_exit(struct crypto_tfm *tfm) 62 { 63 struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm); 64 65 if (ctx->fallback) { 66 crypto_free_cipher(ctx->fallback); 67 ctx->fallback = NULL; 68 } 69 } 70 71 static int p8_aes_setkey(struct crypto_tfm *tfm, const u8 *key, 72 unsigned int keylen) 73 { 74 int ret; 75 struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm); 76 77 preempt_disable(); 78 pagefault_disable(); 79 enable_kernel_vsx(); 80 ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key); 81 ret += aes_p8_set_decrypt_key(key, keylen * 8, &ctx->dec_key); 82 disable_kernel_vsx(); 83 pagefault_enable(); 84 preempt_enable(); 85 86 ret += crypto_cipher_setkey(ctx->fallback, key, keylen); 87 return ret; 88 } 89 90 static void p8_aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) 91 { 92 struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm); 93 94 if (in_interrupt()) { 95 crypto_cipher_encrypt_one(ctx->fallback, dst, src); 96 } else { 97 preempt_disable(); 98 pagefault_disable(); 99 enable_kernel_vsx(); 100 aes_p8_encrypt(src, dst, &ctx->enc_key); 101 disable_kernel_vsx(); 102 pagefault_enable(); 103 preempt_enable(); 104 } 105 } 106 107 static void p8_aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) 108 { 109 struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm); 110 111 if (in_interrupt()) { 112 crypto_cipher_decrypt_one(ctx->fallback, dst, src); 113 } else { 114 preempt_disable(); 115 pagefault_disable(); 116 enable_kernel_vsx(); 117 aes_p8_decrypt(src, dst, &ctx->dec_key); 118 disable_kernel_vsx(); 119 pagefault_enable(); 120 preempt_enable(); 121 } 122 } 123 124 struct crypto_alg p8_aes_alg = { 125 .cra_name = "aes", 126 .cra_driver_name = "p8_aes", 127 .cra_module = THIS_MODULE, 128 .cra_priority = 1000, 129 .cra_type = NULL, 130 .cra_flags = CRYPTO_ALG_TYPE_CIPHER | CRYPTO_ALG_NEED_FALLBACK, 131 .cra_alignmask = 0, 132 .cra_blocksize = AES_BLOCK_SIZE, 133 .cra_ctxsize = sizeof(struct p8_aes_ctx), 134 .cra_init = p8_aes_init, 135 .cra_exit = p8_aes_exit, 136 .cra_cipher = { 137 .cia_min_keysize = AES_MIN_KEY_SIZE, 138 .cia_max_keysize = AES_MAX_KEY_SIZE, 139 .cia_setkey = p8_aes_setkey, 140 .cia_encrypt = p8_aes_encrypt, 141 .cia_decrypt = p8_aes_decrypt, 142 }, 143 }; 144