1 // SPDX-License-Identifier: GPL-2.0-only 2 /* 3 * sha2-ce-glue.c - SHA-224/SHA-256 using ARMv8 Crypto Extensions 4 * 5 * Copyright (C) 2014 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org> 6 */ 7 8 #include <asm/neon.h> 9 #include <asm/simd.h> 10 #include <asm/unaligned.h> 11 #include <crypto/internal/hash.h> 12 #include <crypto/internal/simd.h> 13 #include <crypto/sha.h> 14 #include <crypto/sha256_base.h> 15 #include <linux/cpufeature.h> 16 #include <linux/crypto.h> 17 #include <linux/module.h> 18 19 MODULE_DESCRIPTION("SHA-224/SHA-256 secure hash using ARMv8 Crypto Extensions"); 20 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>"); 21 MODULE_LICENSE("GPL v2"); 22 23 struct sha256_ce_state { 24 struct sha256_state sst; 25 u32 finalize; 26 }; 27 28 extern const u32 sha256_ce_offsetof_count; 29 extern const u32 sha256_ce_offsetof_finalize; 30 31 asmlinkage void sha2_ce_transform(struct sha256_ce_state *sst, u8 const *src, 32 int blocks); 33 34 static void __sha2_ce_transform(struct sha256_state *sst, u8 const *src, 35 int blocks) 36 { 37 sha2_ce_transform(container_of(sst, struct sha256_ce_state, sst), src, 38 blocks); 39 } 40 41 const u32 sha256_ce_offsetof_count = offsetof(struct sha256_ce_state, 42 sst.count); 43 const u32 sha256_ce_offsetof_finalize = offsetof(struct sha256_ce_state, 44 finalize); 45 46 asmlinkage void sha256_block_data_order(u32 *digest, u8 const *src, int blocks); 47 48 static void __sha256_block_data_order(struct sha256_state *sst, u8 const *src, 49 int blocks) 50 { 51 sha256_block_data_order(sst->state, src, blocks); 52 } 53 54 static int sha256_ce_update(struct shash_desc *desc, const u8 *data, 55 unsigned int len) 56 { 57 struct sha256_ce_state *sctx = shash_desc_ctx(desc); 58 59 if (!crypto_simd_usable()) 60 return sha256_base_do_update(desc, data, len, 61 __sha256_block_data_order); 62 63 sctx->finalize = 0; 64 kernel_neon_begin(); 65 sha256_base_do_update(desc, data, len, __sha2_ce_transform); 66 kernel_neon_end(); 67 68 return 0; 69 } 70 71 static int sha256_ce_finup(struct shash_desc *desc, const u8 *data, 72 unsigned int len, u8 *out) 73 { 74 struct sha256_ce_state *sctx = shash_desc_ctx(desc); 75 bool finalize = !sctx->sst.count && !(len % SHA256_BLOCK_SIZE) && len; 76 77 if (!crypto_simd_usable()) { 78 if (len) 79 sha256_base_do_update(desc, data, len, 80 __sha256_block_data_order); 81 sha256_base_do_finalize(desc, __sha256_block_data_order); 82 return sha256_base_finish(desc, out); 83 } 84 85 /* 86 * Allow the asm code to perform the finalization if there is no 87 * partial data and the input is a round multiple of the block size. 88 */ 89 sctx->finalize = finalize; 90 91 kernel_neon_begin(); 92 sha256_base_do_update(desc, data, len, __sha2_ce_transform); 93 if (!finalize) 94 sha256_base_do_finalize(desc, __sha2_ce_transform); 95 kernel_neon_end(); 96 return sha256_base_finish(desc, out); 97 } 98 99 static int sha256_ce_final(struct shash_desc *desc, u8 *out) 100 { 101 struct sha256_ce_state *sctx = shash_desc_ctx(desc); 102 103 if (!crypto_simd_usable()) { 104 sha256_base_do_finalize(desc, __sha256_block_data_order); 105 return sha256_base_finish(desc, out); 106 } 107 108 sctx->finalize = 0; 109 kernel_neon_begin(); 110 sha256_base_do_finalize(desc, __sha2_ce_transform); 111 kernel_neon_end(); 112 return sha256_base_finish(desc, out); 113 } 114 115 static int sha256_ce_export(struct shash_desc *desc, void *out) 116 { 117 struct sha256_ce_state *sctx = shash_desc_ctx(desc); 118 119 memcpy(out, &sctx->sst, sizeof(struct sha256_state)); 120 return 0; 121 } 122 123 static int sha256_ce_import(struct shash_desc *desc, const void *in) 124 { 125 struct sha256_ce_state *sctx = shash_desc_ctx(desc); 126 127 memcpy(&sctx->sst, in, sizeof(struct sha256_state)); 128 sctx->finalize = 0; 129 return 0; 130 } 131 132 static struct shash_alg algs[] = { { 133 .init = sha224_base_init, 134 .update = sha256_ce_update, 135 .final = sha256_ce_final, 136 .finup = sha256_ce_finup, 137 .export = sha256_ce_export, 138 .import = sha256_ce_import, 139 .descsize = sizeof(struct sha256_ce_state), 140 .statesize = sizeof(struct sha256_state), 141 .digestsize = SHA224_DIGEST_SIZE, 142 .base = { 143 .cra_name = "sha224", 144 .cra_driver_name = "sha224-ce", 145 .cra_priority = 200, 146 .cra_blocksize = SHA256_BLOCK_SIZE, 147 .cra_module = THIS_MODULE, 148 } 149 }, { 150 .init = sha256_base_init, 151 .update = sha256_ce_update, 152 .final = sha256_ce_final, 153 .finup = sha256_ce_finup, 154 .export = sha256_ce_export, 155 .import = sha256_ce_import, 156 .descsize = sizeof(struct sha256_ce_state), 157 .statesize = sizeof(struct sha256_state), 158 .digestsize = SHA256_DIGEST_SIZE, 159 .base = { 160 .cra_name = "sha256", 161 .cra_driver_name = "sha256-ce", 162 .cra_priority = 200, 163 .cra_blocksize = SHA256_BLOCK_SIZE, 164 .cra_module = THIS_MODULE, 165 } 166 } }; 167 168 static int __init sha2_ce_mod_init(void) 169 { 170 return crypto_register_shashes(algs, ARRAY_SIZE(algs)); 171 } 172 173 static void __exit sha2_ce_mod_fini(void) 174 { 175 crypto_unregister_shashes(algs, ARRAY_SIZE(algs)); 176 } 177 178 module_cpu_feature_match(SHA2, sha2_ce_mod_init); 179 module_exit(sha2_ce_mod_fini); 180