1 /* 2 * Linux/arm64 port of the OpenSSL SHA256 implementation for AArch64 3 * 4 * Copyright (c) 2016 Linaro Ltd. <ard.biesheuvel@linaro.org> 5 * 6 * This program is free software; you can redistribute it and/or modify it 7 * under the terms of the GNU General Public License as published by the Free 8 * Software Foundation; either version 2 of the License, or (at your option) 9 * any later version. 10 * 11 */ 12 13 #include <asm/hwcap.h> 14 #include <asm/neon.h> 15 #include <asm/simd.h> 16 #include <crypto/internal/hash.h> 17 #include <crypto/sha.h> 18 #include <crypto/sha256_base.h> 19 #include <linux/cryptohash.h> 20 #include <linux/types.h> 21 #include <linux/string.h> 22 23 MODULE_DESCRIPTION("SHA-224/SHA-256 secure hash for arm64"); 24 MODULE_AUTHOR("Andy Polyakov <appro@openssl.org>"); 25 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>"); 26 MODULE_LICENSE("GPL v2"); 27 MODULE_ALIAS_CRYPTO("sha224"); 28 MODULE_ALIAS_CRYPTO("sha256"); 29 30 asmlinkage void sha256_block_data_order(u32 *digest, const void *data, 31 unsigned int num_blks); 32 EXPORT_SYMBOL(sha256_block_data_order); 33 34 asmlinkage void sha256_block_neon(u32 *digest, const void *data, 35 unsigned int num_blks); 36 37 static int sha256_update(struct shash_desc *desc, const u8 *data, 38 unsigned int len) 39 { 40 return sha256_base_do_update(desc, data, len, 41 (sha256_block_fn *)sha256_block_data_order); 42 } 43 44 static int sha256_finup(struct shash_desc *desc, const u8 *data, 45 unsigned int len, u8 *out) 46 { 47 if (len) 48 sha256_base_do_update(desc, data, len, 49 (sha256_block_fn *)sha256_block_data_order); 50 sha256_base_do_finalize(desc, 51 (sha256_block_fn *)sha256_block_data_order); 52 53 return sha256_base_finish(desc, out); 54 } 55 56 static int sha256_final(struct shash_desc *desc, u8 *out) 57 { 58 return sha256_finup(desc, NULL, 0, out); 59 } 60 61 static struct shash_alg algs[] = { { 62 .digestsize = SHA256_DIGEST_SIZE, 63 .init = sha256_base_init, 64 .update = sha256_update, 65 .final = sha256_final, 66 .finup = sha256_finup, 67 .descsize = sizeof(struct sha256_state), 68 .base.cra_name = "sha256", 69 .base.cra_driver_name = "sha256-arm64", 70 .base.cra_priority = 100, 71 .base.cra_flags = CRYPTO_ALG_TYPE_SHASH, 72 .base.cra_blocksize = SHA256_BLOCK_SIZE, 73 .base.cra_module = THIS_MODULE, 74 }, { 75 .digestsize = SHA224_DIGEST_SIZE, 76 .init = sha224_base_init, 77 .update = sha256_update, 78 .final = sha256_final, 79 .finup = sha256_finup, 80 .descsize = sizeof(struct sha256_state), 81 .base.cra_name = "sha224", 82 .base.cra_driver_name = "sha224-arm64", 83 .base.cra_priority = 100, 84 .base.cra_flags = CRYPTO_ALG_TYPE_SHASH, 85 .base.cra_blocksize = SHA224_BLOCK_SIZE, 86 .base.cra_module = THIS_MODULE, 87 } }; 88 89 static int sha256_update_neon(struct shash_desc *desc, const u8 *data, 90 unsigned int len) 91 { 92 struct sha256_state *sctx = shash_desc_ctx(desc); 93 94 if (!may_use_simd()) 95 return sha256_base_do_update(desc, data, len, 96 (sha256_block_fn *)sha256_block_data_order); 97 98 while (len > 0) { 99 unsigned int chunk = len; 100 101 /* 102 * Don't hog the CPU for the entire time it takes to process all 103 * input when running on a preemptible kernel, but process the 104 * data block by block instead. 105 */ 106 if (IS_ENABLED(CONFIG_PREEMPT) && 107 chunk + sctx->count % SHA256_BLOCK_SIZE > SHA256_BLOCK_SIZE) 108 chunk = SHA256_BLOCK_SIZE - 109 sctx->count % SHA256_BLOCK_SIZE; 110 111 kernel_neon_begin(); 112 sha256_base_do_update(desc, data, chunk, 113 (sha256_block_fn *)sha256_block_neon); 114 kernel_neon_end(); 115 data += chunk; 116 len -= chunk; 117 } 118 return 0; 119 } 120 121 static int sha256_finup_neon(struct shash_desc *desc, const u8 *data, 122 unsigned int len, u8 *out) 123 { 124 if (!may_use_simd()) { 125 if (len) 126 sha256_base_do_update(desc, data, len, 127 (sha256_block_fn *)sha256_block_data_order); 128 sha256_base_do_finalize(desc, 129 (sha256_block_fn *)sha256_block_data_order); 130 } else { 131 if (len) 132 sha256_update_neon(desc, data, len); 133 kernel_neon_begin(); 134 sha256_base_do_finalize(desc, 135 (sha256_block_fn *)sha256_block_neon); 136 kernel_neon_end(); 137 } 138 return sha256_base_finish(desc, out); 139 } 140 141 static int sha256_final_neon(struct shash_desc *desc, u8 *out) 142 { 143 return sha256_finup_neon(desc, NULL, 0, out); 144 } 145 146 static struct shash_alg neon_algs[] = { { 147 .digestsize = SHA256_DIGEST_SIZE, 148 .init = sha256_base_init, 149 .update = sha256_update_neon, 150 .final = sha256_final_neon, 151 .finup = sha256_finup_neon, 152 .descsize = sizeof(struct sha256_state), 153 .base.cra_name = "sha256", 154 .base.cra_driver_name = "sha256-arm64-neon", 155 .base.cra_priority = 150, 156 .base.cra_flags = CRYPTO_ALG_TYPE_SHASH, 157 .base.cra_blocksize = SHA256_BLOCK_SIZE, 158 .base.cra_module = THIS_MODULE, 159 }, { 160 .digestsize = SHA224_DIGEST_SIZE, 161 .init = sha224_base_init, 162 .update = sha256_update_neon, 163 .final = sha256_final_neon, 164 .finup = sha256_finup_neon, 165 .descsize = sizeof(struct sha256_state), 166 .base.cra_name = "sha224", 167 .base.cra_driver_name = "sha224-arm64-neon", 168 .base.cra_priority = 150, 169 .base.cra_flags = CRYPTO_ALG_TYPE_SHASH, 170 .base.cra_blocksize = SHA224_BLOCK_SIZE, 171 .base.cra_module = THIS_MODULE, 172 } }; 173 174 static int __init sha256_mod_init(void) 175 { 176 int ret = crypto_register_shashes(algs, ARRAY_SIZE(algs)); 177 if (ret) 178 return ret; 179 180 if (elf_hwcap & HWCAP_ASIMD) { 181 ret = crypto_register_shashes(neon_algs, ARRAY_SIZE(neon_algs)); 182 if (ret) 183 crypto_unregister_shashes(algs, ARRAY_SIZE(algs)); 184 } 185 return ret; 186 } 187 188 static void __exit sha256_mod_fini(void) 189 { 190 if (elf_hwcap & HWCAP_ASIMD) 191 crypto_unregister_shashes(neon_algs, ARRAY_SIZE(neon_algs)); 192 crypto_unregister_shashes(algs, ARRAY_SIZE(algs)); 193 } 194 195 module_init(sha256_mod_init); 196 module_exit(sha256_mod_fini); 197