1 // SPDX-License-Identifier: GPL-2.0-only 2 /** 3 * AES ECB routines supporting the Power 7+ Nest Accelerators driver 4 * 5 * Copyright (C) 2011-2012 International Business Machines Inc. 6 * 7 * Author: Kent Yoder <yoder1@us.ibm.com> 8 */ 9 10 #include <crypto/aes.h> 11 #include <crypto/algapi.h> 12 #include <linux/module.h> 13 #include <linux/types.h> 14 #include <linux/crypto.h> 15 #include <asm/vio.h> 16 17 #include "nx_csbcpb.h" 18 #include "nx.h" 19 20 21 static int ecb_aes_nx_set_key(struct crypto_tfm *tfm, 22 const u8 *in_key, 23 unsigned int key_len) 24 { 25 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(tfm); 26 struct nx_csbcpb *csbcpb = (struct nx_csbcpb *)nx_ctx->csbcpb; 27 28 nx_ctx_init(nx_ctx, HCOP_FC_AES); 29 30 switch (key_len) { 31 case AES_KEYSIZE_128: 32 NX_CPB_SET_KEY_SIZE(csbcpb, NX_KS_AES_128); 33 nx_ctx->ap = &nx_ctx->props[NX_PROPS_AES_128]; 34 break; 35 case AES_KEYSIZE_192: 36 NX_CPB_SET_KEY_SIZE(csbcpb, NX_KS_AES_192); 37 nx_ctx->ap = &nx_ctx->props[NX_PROPS_AES_192]; 38 break; 39 case AES_KEYSIZE_256: 40 NX_CPB_SET_KEY_SIZE(csbcpb, NX_KS_AES_256); 41 nx_ctx->ap = &nx_ctx->props[NX_PROPS_AES_256]; 42 break; 43 default: 44 return -EINVAL; 45 } 46 47 csbcpb->cpb.hdr.mode = NX_MODE_AES_ECB; 48 memcpy(csbcpb->cpb.aes_ecb.key, in_key, key_len); 49 50 return 0; 51 } 52 53 static int ecb_aes_nx_crypt(struct blkcipher_desc *desc, 54 struct scatterlist *dst, 55 struct scatterlist *src, 56 unsigned int nbytes, 57 int enc) 58 { 59 struct nx_crypto_ctx *nx_ctx = crypto_blkcipher_ctx(desc->tfm); 60 struct nx_csbcpb *csbcpb = nx_ctx->csbcpb; 61 unsigned long irq_flags; 62 unsigned int processed = 0, to_process; 63 int rc; 64 65 spin_lock_irqsave(&nx_ctx->lock, irq_flags); 66 67 if (enc) 68 NX_CPB_FDM(csbcpb) |= NX_FDM_ENDE_ENCRYPT; 69 else 70 NX_CPB_FDM(csbcpb) &= ~NX_FDM_ENDE_ENCRYPT; 71 72 do { 73 to_process = nbytes - processed; 74 75 rc = nx_build_sg_lists(nx_ctx, desc, dst, src, &to_process, 76 processed, NULL); 77 if (rc) 78 goto out; 79 80 if (!nx_ctx->op.inlen || !nx_ctx->op.outlen) { 81 rc = -EINVAL; 82 goto out; 83 } 84 85 rc = nx_hcall_sync(nx_ctx, &nx_ctx->op, 86 desc->flags & CRYPTO_TFM_REQ_MAY_SLEEP); 87 if (rc) 88 goto out; 89 90 atomic_inc(&(nx_ctx->stats->aes_ops)); 91 atomic64_add(csbcpb->csb.processed_byte_count, 92 &(nx_ctx->stats->aes_bytes)); 93 94 processed += to_process; 95 } while (processed < nbytes); 96 97 out: 98 spin_unlock_irqrestore(&nx_ctx->lock, irq_flags); 99 return rc; 100 } 101 102 static int ecb_aes_nx_encrypt(struct blkcipher_desc *desc, 103 struct scatterlist *dst, 104 struct scatterlist *src, 105 unsigned int nbytes) 106 { 107 return ecb_aes_nx_crypt(desc, dst, src, nbytes, 1); 108 } 109 110 static int ecb_aes_nx_decrypt(struct blkcipher_desc *desc, 111 struct scatterlist *dst, 112 struct scatterlist *src, 113 unsigned int nbytes) 114 { 115 return ecb_aes_nx_crypt(desc, dst, src, nbytes, 0); 116 } 117 118 struct crypto_alg nx_ecb_aes_alg = { 119 .cra_name = "ecb(aes)", 120 .cra_driver_name = "ecb-aes-nx", 121 .cra_priority = 300, 122 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 123 .cra_blocksize = AES_BLOCK_SIZE, 124 .cra_alignmask = 0xf, 125 .cra_ctxsize = sizeof(struct nx_crypto_ctx), 126 .cra_type = &crypto_blkcipher_type, 127 .cra_module = THIS_MODULE, 128 .cra_init = nx_crypto_ctx_aes_ecb_init, 129 .cra_exit = nx_crypto_ctx_exit, 130 .cra_blkcipher = { 131 .min_keysize = AES_MIN_KEY_SIZE, 132 .max_keysize = AES_MAX_KEY_SIZE, 133 .setkey = ecb_aes_nx_set_key, 134 .encrypt = ecb_aes_nx_encrypt, 135 .decrypt = ecb_aes_nx_decrypt, 136 } 137 }; 138