1 /** 2 * AES CTR routines supporting the Power 7+ Nest Accelerators driver 3 * 4 * Copyright (C) 2011-2012 International Business Machines Inc. 5 * 6 * This program is free software; you can redistribute it and/or modify 7 * it under the terms of the GNU General Public License as published by 8 * the Free Software Foundation; version 2 only. 9 * 10 * This program is distributed in the hope that it will be useful, 11 * but WITHOUT ANY WARRANTY; without even the implied warranty of 12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 * GNU General Public License for more details. 14 * 15 * You should have received a copy of the GNU General Public License 16 * along with this program; if not, write to the Free Software 17 * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. 18 * 19 * Author: Kent Yoder <yoder1@us.ibm.com> 20 */ 21 22 #include <crypto/aes.h> 23 #include <crypto/ctr.h> 24 #include <crypto/algapi.h> 25 #include <linux/module.h> 26 #include <linux/types.h> 27 #include <linux/crypto.h> 28 #include <asm/vio.h> 29 30 #include "nx_csbcpb.h" 31 #include "nx.h" 32 33 34 static int ctr_aes_nx_set_key(struct crypto_tfm *tfm, 35 const u8 *in_key, 36 unsigned int key_len) 37 { 38 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(tfm); 39 struct nx_csbcpb *csbcpb = nx_ctx->csbcpb; 40 41 nx_ctx_init(nx_ctx, HCOP_FC_AES); 42 43 switch (key_len) { 44 case AES_KEYSIZE_128: 45 NX_CPB_SET_KEY_SIZE(csbcpb, NX_KS_AES_128); 46 nx_ctx->ap = &nx_ctx->props[NX_PROPS_AES_128]; 47 break; 48 case AES_KEYSIZE_192: 49 NX_CPB_SET_KEY_SIZE(csbcpb, NX_KS_AES_192); 50 nx_ctx->ap = &nx_ctx->props[NX_PROPS_AES_192]; 51 break; 52 case AES_KEYSIZE_256: 53 NX_CPB_SET_KEY_SIZE(csbcpb, NX_KS_AES_256); 54 nx_ctx->ap = &nx_ctx->props[NX_PROPS_AES_256]; 55 break; 56 default: 57 return -EINVAL; 58 } 59 60 csbcpb->cpb.hdr.mode = NX_MODE_AES_CTR; 61 memcpy(csbcpb->cpb.aes_ctr.key, in_key, key_len); 62 63 return 0; 64 } 65 66 static int ctr3686_aes_nx_set_key(struct crypto_tfm *tfm, 67 const u8 *in_key, 68 unsigned int key_len) 69 { 70 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(tfm); 71 72 if (key_len < CTR_RFC3686_NONCE_SIZE) 73 return -EINVAL; 74 75 memcpy(nx_ctx->priv.ctr.iv, 76 in_key + key_len - CTR_RFC3686_NONCE_SIZE, 77 CTR_RFC3686_NONCE_SIZE); 78 79 key_len -= CTR_RFC3686_NONCE_SIZE; 80 81 return ctr_aes_nx_set_key(tfm, in_key, key_len); 82 } 83 84 static int ctr_aes_nx_crypt(struct blkcipher_desc *desc, 85 struct scatterlist *dst, 86 struct scatterlist *src, 87 unsigned int nbytes) 88 { 89 struct nx_crypto_ctx *nx_ctx = crypto_blkcipher_ctx(desc->tfm); 90 struct nx_csbcpb *csbcpb = nx_ctx->csbcpb; 91 unsigned long irq_flags; 92 unsigned int processed = 0, to_process; 93 u32 max_sg_len; 94 int rc; 95 96 spin_lock_irqsave(&nx_ctx->lock, irq_flags); 97 98 max_sg_len = min_t(u32, nx_driver.of.max_sg_len/sizeof(struct nx_sg), 99 nx_ctx->ap->sglen); 100 101 do { 102 to_process = min_t(u64, nbytes - processed, 103 nx_ctx->ap->databytelen); 104 to_process = min_t(u64, to_process, 105 NX_PAGE_SIZE * (max_sg_len - 1)); 106 to_process = to_process & ~(AES_BLOCK_SIZE - 1); 107 108 rc = nx_build_sg_lists(nx_ctx, desc, dst, src, to_process, 109 processed, csbcpb->cpb.aes_ctr.iv); 110 if (rc) 111 goto out; 112 113 if (!nx_ctx->op.inlen || !nx_ctx->op.outlen) { 114 rc = -EINVAL; 115 goto out; 116 } 117 118 rc = nx_hcall_sync(nx_ctx, &nx_ctx->op, 119 desc->flags & CRYPTO_TFM_REQ_MAY_SLEEP); 120 if (rc) 121 goto out; 122 123 memcpy(desc->info, csbcpb->cpb.aes_cbc.cv, AES_BLOCK_SIZE); 124 125 atomic_inc(&(nx_ctx->stats->aes_ops)); 126 atomic64_add(csbcpb->csb.processed_byte_count, 127 &(nx_ctx->stats->aes_bytes)); 128 129 processed += to_process; 130 } while (processed < nbytes); 131 out: 132 spin_unlock_irqrestore(&nx_ctx->lock, irq_flags); 133 return rc; 134 } 135 136 static int ctr3686_aes_nx_crypt(struct blkcipher_desc *desc, 137 struct scatterlist *dst, 138 struct scatterlist *src, 139 unsigned int nbytes) 140 { 141 struct nx_crypto_ctx *nx_ctx = crypto_blkcipher_ctx(desc->tfm); 142 u8 *iv = nx_ctx->priv.ctr.iv; 143 144 memcpy(iv + CTR_RFC3686_NONCE_SIZE, 145 desc->info, CTR_RFC3686_IV_SIZE); 146 iv[15] = 1; 147 148 desc->info = nx_ctx->priv.ctr.iv; 149 150 return ctr_aes_nx_crypt(desc, dst, src, nbytes); 151 } 152 153 struct crypto_alg nx_ctr_aes_alg = { 154 .cra_name = "ctr(aes)", 155 .cra_driver_name = "ctr-aes-nx", 156 .cra_priority = 300, 157 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 158 .cra_blocksize = 1, 159 .cra_ctxsize = sizeof(struct nx_crypto_ctx), 160 .cra_type = &crypto_blkcipher_type, 161 .cra_module = THIS_MODULE, 162 .cra_init = nx_crypto_ctx_aes_ctr_init, 163 .cra_exit = nx_crypto_ctx_exit, 164 .cra_blkcipher = { 165 .min_keysize = AES_MIN_KEY_SIZE, 166 .max_keysize = AES_MAX_KEY_SIZE, 167 .ivsize = AES_BLOCK_SIZE, 168 .setkey = ctr_aes_nx_set_key, 169 .encrypt = ctr_aes_nx_crypt, 170 .decrypt = ctr_aes_nx_crypt, 171 } 172 }; 173 174 struct crypto_alg nx_ctr3686_aes_alg = { 175 .cra_name = "rfc3686(ctr(aes))", 176 .cra_driver_name = "rfc3686-ctr-aes-nx", 177 .cra_priority = 300, 178 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 179 .cra_blocksize = 1, 180 .cra_ctxsize = sizeof(struct nx_crypto_ctx), 181 .cra_type = &crypto_blkcipher_type, 182 .cra_module = THIS_MODULE, 183 .cra_init = nx_crypto_ctx_aes_ctr_init, 184 .cra_exit = nx_crypto_ctx_exit, 185 .cra_blkcipher = { 186 .min_keysize = AES_MIN_KEY_SIZE + CTR_RFC3686_NONCE_SIZE, 187 .max_keysize = AES_MAX_KEY_SIZE + CTR_RFC3686_NONCE_SIZE, 188 .ivsize = CTR_RFC3686_IV_SIZE, 189 .geniv = "seqiv", 190 .setkey = ctr3686_aes_nx_set_key, 191 .encrypt = ctr3686_aes_nx_crypt, 192 .decrypt = ctr3686_aes_nx_crypt, 193 } 194 }; 195