diff options
author | Herbert Xu <herbert@gondor.apana.org.au> | 2025-04-18 10:59:59 +0800 |
---|---|---|
committer | Herbert Xu <herbert@gondor.apana.org.au> | 2025-04-23 15:52:46 +0800 |
commit | a417f16f885d753115bc6a3735b1d8afa5f3b7c6 (patch) | |
tree | b7a3f82f9331a780c15def42d16c283dc743818c | |
parent | be32039547e4b367afe1a2738ded32b3ff390f3a (diff) |
crypto: arm64/sha256 - Use API partial block handling
Use the Crypto API partial block handling.
Also remove the unnecessary SIMD fallback path.
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
-rw-r--r-- | arch/arm64/crypto/sha256-glue.c | 97 |
1 files changed, 37 insertions, 60 deletions
diff --git a/arch/arm64/crypto/sha256-glue.c b/arch/arm64/crypto/sha256-glue.c index 35356987cc1e..26f9fdfae87b 100644 --- a/arch/arm64/crypto/sha256-glue.c +++ b/arch/arm64/crypto/sha256-glue.c @@ -5,16 +5,13 @@ * Copyright (c) 2016 Linaro Ltd. <ard.biesheuvel@linaro.org> */ -#include <asm/hwcap.h> #include <asm/neon.h> -#include <asm/simd.h> #include <crypto/internal/hash.h> -#include <crypto/internal/simd.h> #include <crypto/sha2.h> #include <crypto/sha256_base.h> +#include <linux/cpufeature.h> +#include <linux/kernel.h> #include <linux/module.h> -#include <linux/string.h> -#include <linux/types.h> MODULE_DESCRIPTION("SHA-224/SHA-256 secure hash for arm64"); MODULE_AUTHOR("Andy Polyakov <appro@openssl.org>"); @@ -27,8 +24,8 @@ asmlinkage void sha256_block_data_order(u32 *digest, const void *data, unsigned int num_blks); EXPORT_SYMBOL(sha256_block_data_order); -static void sha256_arm64_transform(struct sha256_state *sst, u8 const *src, - int blocks) +static void sha256_arm64_transform(struct crypto_sha256_state *sst, + u8 const *src, int blocks) { sha256_block_data_order(sst->state, src, blocks); } @@ -36,55 +33,52 @@ static void sha256_arm64_transform(struct sha256_state *sst, u8 const *src, asmlinkage void sha256_block_neon(u32 *digest, const void *data, unsigned int num_blks); -static void sha256_neon_transform(struct sha256_state *sst, u8 const *src, - int blocks) +static void sha256_neon_transform(struct crypto_sha256_state *sst, + u8 const *src, int blocks) { + kernel_neon_begin(); sha256_block_neon(sst->state, src, blocks); + kernel_neon_end(); } static int crypto_sha256_arm64_update(struct shash_desc *desc, const u8 *data, unsigned int len) { - return sha256_base_do_update(desc, data, len, sha256_arm64_transform); + return sha256_base_do_update_blocks(desc, data, len, + sha256_arm64_transform); } static int crypto_sha256_arm64_finup(struct shash_desc *desc, const u8 *data, unsigned int len, u8 *out) { - if (len) - sha256_base_do_update(desc, data, len, sha256_arm64_transform); - sha256_base_do_finalize(desc, sha256_arm64_transform); - + sha256_base_do_finup(desc, data, len, sha256_arm64_transform); return sha256_base_finish(desc, out); } -static int crypto_sha256_arm64_final(struct shash_desc *desc, u8 *out) -{ - return crypto_sha256_arm64_finup(desc, NULL, 0, out); -} - static struct shash_alg algs[] = { { .digestsize = SHA256_DIGEST_SIZE, .init = sha256_base_init, .update = crypto_sha256_arm64_update, - .final = crypto_sha256_arm64_final, .finup = crypto_sha256_arm64_finup, - .descsize = sizeof(struct sha256_state), + .descsize = sizeof(struct crypto_sha256_state), .base.cra_name = "sha256", .base.cra_driver_name = "sha256-arm64", .base.cra_priority = 125, + .base.cra_flags = CRYPTO_AHASH_ALG_BLOCK_ONLY | + CRYPTO_AHASH_ALG_FINUP_MAX, .base.cra_blocksize = SHA256_BLOCK_SIZE, .base.cra_module = THIS_MODULE, }, { .digestsize = SHA224_DIGEST_SIZE, .init = sha224_base_init, .update = crypto_sha256_arm64_update, - .final = crypto_sha256_arm64_final, .finup = crypto_sha256_arm64_finup, - .descsize = sizeof(struct sha256_state), + .descsize = sizeof(struct crypto_sha256_state), .base.cra_name = "sha224", .base.cra_driver_name = "sha224-arm64", .base.cra_priority = 125, + .base.cra_flags = CRYPTO_AHASH_ALG_BLOCK_ONLY | + CRYPTO_AHASH_ALG_FINUP_MAX, .base.cra_blocksize = SHA224_BLOCK_SIZE, .base.cra_module = THIS_MODULE, } }; @@ -92,13 +86,7 @@ static struct shash_alg algs[] = { { static int sha256_update_neon(struct shash_desc *desc, const u8 *data, unsigned int len) { - struct sha256_state *sctx = shash_desc_ctx(desc); - - if (!crypto_simd_usable()) - return sha256_base_do_update(desc, data, len, - sha256_arm64_transform); - - while (len > 0) { + do { unsigned int chunk = len; /* @@ -106,65 +94,54 @@ static int sha256_update_neon(struct shash_desc *desc, const u8 *data, * input when running on a preemptible kernel, but process the * data block by block instead. */ - if (IS_ENABLED(CONFIG_PREEMPTION) && - chunk + sctx->count % SHA256_BLOCK_SIZE > SHA256_BLOCK_SIZE) - chunk = SHA256_BLOCK_SIZE - - sctx->count % SHA256_BLOCK_SIZE; - - kernel_neon_begin(); - sha256_base_do_update(desc, data, chunk, sha256_neon_transform); - kernel_neon_end(); + if (IS_ENABLED(CONFIG_PREEMPTION)) + chunk = SHA256_BLOCK_SIZE; + + chunk -= sha256_base_do_update_blocks(desc, data, chunk, + sha256_neon_transform); data += chunk; len -= chunk; - } - return 0; + } while (len >= SHA256_BLOCK_SIZE); + return len; } static int sha256_finup_neon(struct shash_desc *desc, const u8 *data, unsigned int len, u8 *out) { - if (!crypto_simd_usable()) { - if (len) - sha256_base_do_update(desc, data, len, - sha256_arm64_transform); - sha256_base_do_finalize(desc, sha256_arm64_transform); - } else { - if (len) - sha256_update_neon(desc, data, len); - kernel_neon_begin(); - sha256_base_do_finalize(desc, sha256_neon_transform); - kernel_neon_end(); + if (len >= SHA256_BLOCK_SIZE) { + int remain = sha256_update_neon(desc, data, len); + + data += len - remain; + len = remain; } + sha256_base_do_finup(desc, data, len, sha256_neon_transform); return sha256_base_finish(desc, out); } -static int sha256_final_neon(struct shash_desc *desc, u8 *out) -{ - return sha256_finup_neon(desc, NULL, 0, out); -} - static struct shash_alg neon_algs[] = { { .digestsize = SHA256_DIGEST_SIZE, .init = sha256_base_init, .update = sha256_update_neon, - .final = sha256_final_neon, .finup = sha256_finup_neon, - .descsize = sizeof(struct sha256_state), + .descsize = sizeof(struct crypto_sha256_state), .base.cra_name = "sha256", .base.cra_driver_name = "sha256-arm64-neon", .base.cra_priority = 150, + .base.cra_flags = CRYPTO_AHASH_ALG_BLOCK_ONLY | + CRYPTO_AHASH_ALG_FINUP_MAX, .base.cra_blocksize = SHA256_BLOCK_SIZE, .base.cra_module = THIS_MODULE, }, { .digestsize = SHA224_DIGEST_SIZE, .init = sha224_base_init, .update = sha256_update_neon, - .final = sha256_final_neon, .finup = sha256_finup_neon, - .descsize = sizeof(struct sha256_state), + .descsize = sizeof(struct crypto_sha256_state), .base.cra_name = "sha224", .base.cra_driver_name = "sha224-arm64-neon", .base.cra_priority = 150, + .base.cra_flags = CRYPTO_AHASH_ALG_BLOCK_ONLY | + CRYPTO_AHASH_ALG_FINUP_MAX, .base.cra_blocksize = SHA224_BLOCK_SIZE, .base.cra_module = THIS_MODULE, } }; |