1 // SPDX-License-Identifier: GPL-2.0 2 /* 3 * Xilinx ZynqMP SHA Driver. 4 * Copyright (c) 2022 Xilinx Inc. 5 */ 6 #include <linux/cacheflush.h> 7 #include <crypto/hash.h> 8 #include <crypto/internal/hash.h> 9 #include <crypto/sha3.h> 10 #include <linux/crypto.h> 11 #include <linux/device.h> 12 #include <linux/dma-mapping.h> 13 #include <linux/firmware/xlnx-zynqmp.h> 14 #include <linux/init.h> 15 #include <linux/io.h> 16 #include <linux/kernel.h> 17 #include <linux/module.h> 18 #include <linux/of_device.h> 19 #include <linux/platform_device.h> 20 21 #define ZYNQMP_DMA_BIT_MASK 32U 22 #define ZYNQMP_DMA_ALLOC_FIXED_SIZE 0x1000U 23 24 enum zynqmp_sha_op { 25 ZYNQMP_SHA3_INIT = 1, 26 ZYNQMP_SHA3_UPDATE = 2, 27 ZYNQMP_SHA3_FINAL = 4, 28 }; 29 30 struct zynqmp_sha_drv_ctx { 31 struct shash_alg sha3_384; 32 struct device *dev; 33 }; 34 35 struct zynqmp_sha_tfm_ctx { 36 struct device *dev; 37 struct crypto_shash *fbk_tfm; 38 }; 39 40 struct zynqmp_sha_desc_ctx { 41 struct shash_desc fbk_req; 42 }; 43 44 static dma_addr_t update_dma_addr, final_dma_addr; 45 static char *ubuf, *fbuf; 46 47 static int zynqmp_sha_init_tfm(struct crypto_shash *hash) 48 { 49 const char *fallback_driver_name = crypto_shash_alg_name(hash); 50 struct zynqmp_sha_tfm_ctx *tfm_ctx = crypto_shash_ctx(hash); 51 struct shash_alg *alg = crypto_shash_alg(hash); 52 struct crypto_shash *fallback_tfm; 53 struct zynqmp_sha_drv_ctx *drv_ctx; 54 55 drv_ctx = container_of(alg, struct zynqmp_sha_drv_ctx, sha3_384); 56 tfm_ctx->dev = drv_ctx->dev; 57 58 /* Allocate a fallback and abort if it failed. */ 59 fallback_tfm = crypto_alloc_shash(fallback_driver_name, 0, 60 CRYPTO_ALG_NEED_FALLBACK); 61 if (IS_ERR(fallback_tfm)) 62 return PTR_ERR(fallback_tfm); 63 64 tfm_ctx->fbk_tfm = fallback_tfm; 65 hash->descsize += crypto_shash_descsize(tfm_ctx->fbk_tfm); 66 67 return 0; 68 } 69 70 static void zynqmp_sha_exit_tfm(struct crypto_shash *hash) 71 { 72 struct zynqmp_sha_tfm_ctx *tfm_ctx = crypto_shash_ctx(hash); 73 74 if (tfm_ctx->fbk_tfm) { 75 crypto_free_shash(tfm_ctx->fbk_tfm); 76 tfm_ctx->fbk_tfm = NULL; 77 } 78 79 memzero_explicit(tfm_ctx, sizeof(struct zynqmp_sha_tfm_ctx)); 80 } 81 82 static int zynqmp_sha_init(struct shash_desc *desc) 83 { 84 struct zynqmp_sha_desc_ctx *dctx = shash_desc_ctx(desc); 85 struct zynqmp_sha_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm); 86 87 dctx->fbk_req.tfm = tctx->fbk_tfm; 88 return crypto_shash_init(&dctx->fbk_req); 89 } 90 91 static int zynqmp_sha_update(struct shash_desc *desc, const u8 *data, unsigned int length) 92 { 93 struct zynqmp_sha_desc_ctx *dctx = shash_desc_ctx(desc); 94 95 return crypto_shash_update(&dctx->fbk_req, data, length); 96 } 97 98 static int zynqmp_sha_final(struct shash_desc *desc, u8 *out) 99 { 100 struct zynqmp_sha_desc_ctx *dctx = shash_desc_ctx(desc); 101 102 return crypto_shash_final(&dctx->fbk_req, out); 103 } 104 105 static int zynqmp_sha_finup(struct shash_desc *desc, const u8 *data, unsigned int length, u8 *out) 106 { 107 struct zynqmp_sha_desc_ctx *dctx = shash_desc_ctx(desc); 108 109 return crypto_shash_finup(&dctx->fbk_req, data, length, out); 110 } 111 112 static int zynqmp_sha_import(struct shash_desc *desc, const void *in) 113 { 114 struct zynqmp_sha_desc_ctx *dctx = shash_desc_ctx(desc); 115 struct zynqmp_sha_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm); 116 117 dctx->fbk_req.tfm = tctx->fbk_tfm; 118 return crypto_shash_import(&dctx->fbk_req, in); 119 } 120 121 static int zynqmp_sha_export(struct shash_desc *desc, void *out) 122 { 123 struct zynqmp_sha_desc_ctx *dctx = shash_desc_ctx(desc); 124 125 return crypto_shash_export(&dctx->fbk_req, out); 126 } 127 128 static int zynqmp_sha_digest(struct shash_desc *desc, const u8 *data, unsigned int len, u8 *out) 129 { 130 unsigned int remaining_len = len; 131 int update_size; 132 int ret; 133 134 ret = zynqmp_pm_sha_hash(0, 0, ZYNQMP_SHA3_INIT); 135 if (ret) 136 return ret; 137 138 while (remaining_len != 0) { 139 memzero_explicit(ubuf, ZYNQMP_DMA_ALLOC_FIXED_SIZE); 140 if (remaining_len >= ZYNQMP_DMA_ALLOC_FIXED_SIZE) { 141 update_size = ZYNQMP_DMA_ALLOC_FIXED_SIZE; 142 remaining_len -= ZYNQMP_DMA_ALLOC_FIXED_SIZE; 143 } else { 144 update_size = remaining_len; 145 remaining_len = 0; 146 } 147 memcpy(ubuf, data, update_size); 148 flush_icache_range((unsigned long)ubuf, (unsigned long)ubuf + update_size); 149 ret = zynqmp_pm_sha_hash(update_dma_addr, update_size, ZYNQMP_SHA3_UPDATE); 150 if (ret) 151 return ret; 152 153 data += update_size; 154 } 155 156 ret = zynqmp_pm_sha_hash(final_dma_addr, SHA3_384_DIGEST_SIZE, ZYNQMP_SHA3_FINAL); 157 memcpy(out, fbuf, SHA3_384_DIGEST_SIZE); 158 memzero_explicit(fbuf, SHA3_384_DIGEST_SIZE); 159 160 return ret; 161 } 162 163 static struct zynqmp_sha_drv_ctx sha3_drv_ctx = { 164 .sha3_384 = { 165 .init = zynqmp_sha_init, 166 .update = zynqmp_sha_update, 167 .final = zynqmp_sha_final, 168 .finup = zynqmp_sha_finup, 169 .digest = zynqmp_sha_digest, 170 .export = zynqmp_sha_export, 171 .import = zynqmp_sha_import, 172 .init_tfm = zynqmp_sha_init_tfm, 173 .exit_tfm = zynqmp_sha_exit_tfm, 174 .descsize = sizeof(struct zynqmp_sha_desc_ctx), 175 .statesize = sizeof(struct sha3_state), 176 .digestsize = SHA3_384_DIGEST_SIZE, 177 .base = { 178 .cra_name = "sha3-384", 179 .cra_driver_name = "zynqmp-sha3-384", 180 .cra_priority = 300, 181 .cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY | 182 CRYPTO_ALG_ALLOCATES_MEMORY | 183 CRYPTO_ALG_NEED_FALLBACK, 184 .cra_blocksize = SHA3_384_BLOCK_SIZE, 185 .cra_ctxsize = sizeof(struct zynqmp_sha_tfm_ctx), 186 .cra_alignmask = 3, 187 .cra_module = THIS_MODULE, 188 } 189 } 190 }; 191 192 static int zynqmp_sha_probe(struct platform_device *pdev) 193 { 194 struct device *dev = &pdev->dev; 195 int err; 196 197 err = dma_set_mask_and_coherent(dev, DMA_BIT_MASK(ZYNQMP_DMA_BIT_MASK)); 198 if (err < 0) { 199 dev_err(dev, "No usable DMA configuration\n"); 200 return err; 201 } 202 203 err = crypto_register_shash(&sha3_drv_ctx.sha3_384); 204 if (err < 0) { 205 dev_err(dev, "Failed to register shash alg.\n"); 206 return err; 207 } 208 209 sha3_drv_ctx.dev = dev; 210 platform_set_drvdata(pdev, &sha3_drv_ctx); 211 212 ubuf = dma_alloc_coherent(dev, ZYNQMP_DMA_ALLOC_FIXED_SIZE, &update_dma_addr, GFP_KERNEL); 213 if (!ubuf) { 214 err = -ENOMEM; 215 goto err_shash; 216 } 217 218 fbuf = dma_alloc_coherent(dev, SHA3_384_DIGEST_SIZE, &final_dma_addr, GFP_KERNEL); 219 if (!fbuf) { 220 err = -ENOMEM; 221 goto err_mem; 222 } 223 224 return 0; 225 226 err_mem: 227 dma_free_coherent(sha3_drv_ctx.dev, ZYNQMP_DMA_ALLOC_FIXED_SIZE, ubuf, update_dma_addr); 228 229 err_shash: 230 crypto_unregister_shash(&sha3_drv_ctx.sha3_384); 231 232 return err; 233 } 234 235 static int zynqmp_sha_remove(struct platform_device *pdev) 236 { 237 sha3_drv_ctx.dev = platform_get_drvdata(pdev); 238 239 dma_free_coherent(sha3_drv_ctx.dev, ZYNQMP_DMA_ALLOC_FIXED_SIZE, ubuf, update_dma_addr); 240 dma_free_coherent(sha3_drv_ctx.dev, SHA3_384_DIGEST_SIZE, fbuf, final_dma_addr); 241 crypto_unregister_shash(&sha3_drv_ctx.sha3_384); 242 243 return 0; 244 } 245 246 static struct platform_driver zynqmp_sha_driver = { 247 .probe = zynqmp_sha_probe, 248 .remove = zynqmp_sha_remove, 249 .driver = { 250 .name = "zynqmp-sha3-384", 251 }, 252 }; 253 254 static int __init sha_driver_init(void) 255 { 256 struct platform_device *pdev; 257 int ret; 258 259 ret = platform_driver_register(&zynqmp_sha_driver); 260 if (ret) 261 return ret; 262 263 pdev = platform_device_register_simple(zynqmp_sha_driver.driver.name, 264 0, NULL, 0); 265 if (IS_ERR(pdev)) { 266 ret = PTR_ERR(pdev); 267 platform_driver_unregister(&zynqmp_sha_driver); 268 pr_info("Failed to register ZynqMP SHA3 dvixe %d\n", ret); 269 } 270 271 return ret; 272 } 273 274 device_initcall(sha_driver_init); 275 276 static void __exit sha_driver_exit(void) 277 { 278 platform_driver_unregister(&zynqmp_sha_driver); 279 } 280 281 MODULE_DESCRIPTION("ZynqMP SHA3 hardware acceleration support."); 282 MODULE_LICENSE("GPL v2"); 283 MODULE_AUTHOR("Harsha <harsha.harsha@xilinx.com>"); 284