summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--arch/powerpc/Kconfig1
-rw-r--r--arch/powerpc/lib/crypto/Kconfig2
-rw-r--r--arch/powerpc/lib/crypto/poly1305-p10-glue.c41
3 files changed, 4 insertions, 40 deletions
diff --git a/arch/powerpc/Kconfig b/arch/powerpc/Kconfig
index 651e0c32957a..6722625a406a 100644
--- a/arch/powerpc/Kconfig
+++ b/arch/powerpc/Kconfig
@@ -173,7 +173,6 @@ config PPC
select ARCH_STACKWALK
select ARCH_SUPPORTS_ATOMIC_RMW
select ARCH_SUPPORTS_DEBUG_PAGEALLOC if PPC_BOOK3S || PPC_8xx
- select ARCH_SUPPORTS_INT128 if PPC64 && CC_HAS_INT128
select ARCH_USE_BUILTIN_BSWAP
select ARCH_USE_CMPXCHG_LOCKREF if PPC64
select ARCH_USE_MEMTEST
diff --git a/arch/powerpc/lib/crypto/Kconfig b/arch/powerpc/lib/crypto/Kconfig
index 6761fdb6193c..ffa541ad6d5d 100644
--- a/arch/powerpc/lib/crypto/Kconfig
+++ b/arch/powerpc/lib/crypto/Kconfig
@@ -9,7 +9,7 @@ config CRYPTO_CHACHA20_P10
config CRYPTO_POLY1305_P10
tristate
- depends on PPC64 && CPU_LITTLE_ENDIAN && VSX && ARCH_SUPPORTS_INT128
+ depends on PPC64 && CPU_LITTLE_ENDIAN && VSX
default CRYPTO_LIB_POLY1305
select CRYPTO_ARCH_HAVE_LIB_POLY1305
select CRYPTO_LIB_POLY1305_GENERIC
diff --git a/arch/powerpc/lib/crypto/poly1305-p10-glue.c b/arch/powerpc/lib/crypto/poly1305-p10-glue.c
index 280c10c48c53..3f1664a724b6 100644
--- a/arch/powerpc/lib/crypto/poly1305-p10-glue.c
+++ b/arch/powerpc/lib/crypto/poly1305-p10-glue.c
@@ -6,7 +6,6 @@
*/
#include <asm/switch_to.h>
#include <crypto/internal/poly1305.h>
-#include <crypto/internal/simd.h>
#include <linux/cpufeature.h>
#include <linux/jump_label.h>
#include <linux/kernel.h>
@@ -19,11 +18,6 @@ asmlinkage void poly1305_emit_64(const struct poly1305_state *state, const u32 n
static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_p10);
-static inline bool is_state_base64(struct poly1305_block_state *state)
-{
- return state->core_r.precomputed_s.r64[2];
-}
-
static void vsx_begin(void)
{
preempt_disable();
@@ -36,35 +30,12 @@ static void vsx_end(void)
preempt_enable();
}
-static void convert_to_base2_44(struct poly1305_block_state *state)
-{
- u8 raw_key[POLY1305_BLOCK_SIZE];
- u64 h0, h1, h2;
-
- if (!is_state_base64(state))
- return;
-
- state->core_r.precomputed_s.r64[2] = 0;
- put_unaligned_le64(state->core_r.key.r64[0], raw_key + 0);
- put_unaligned_le64(state->core_r.key.r64[1], raw_key + 8);
- poly1305_core_setkey(&state->core_r, raw_key);
-
- h0 = state->h.h64[0];
- h1 = state->h.h64[1];
- h2 = state->h.h64[2];
- state->h.h64[0] = h0 & 0xfffffffffffULL;
- state->h.h64[1] = h0 >> 44 | (h1 & 0xffffffULL) << 20;
- state->h.h64[2] = h1 >> 24 | h2 << 40;
-}
-
void poly1305_block_init_arch(struct poly1305_block_state *dctx,
const u8 raw_key[POLY1305_BLOCK_SIZE])
{
- dctx->core_r.precomputed_s.r64[2] = 0;
- if (!static_key_enabled(&have_p10) || !crypto_simd_usable())
+ if (!static_key_enabled(&have_p10))
return poly1305_block_init_generic(dctx, raw_key);
- dctx->core_r.precomputed_s.r64[2] = 1;
dctx->h = (struct poly1305_state){};
dctx->core_r.key.r64[0] = get_unaligned_le64(raw_key + 0);
dctx->core_r.key.r64[1] = get_unaligned_le64(raw_key + 8);
@@ -74,11 +45,8 @@ EXPORT_SYMBOL_GPL(poly1305_block_init_arch);
void poly1305_blocks_arch(struct poly1305_block_state *state, const u8 *src,
unsigned int len, u32 padbit)
{
- if (!static_key_enabled(&have_p10) || !is_state_base64(state) ||
- !crypto_simd_usable()) {
- convert_to_base2_44(state);
+ if (!static_key_enabled(&have_p10))
return poly1305_blocks_generic(state, src, len, padbit);
- }
vsx_begin();
if (len >= POLY1305_BLOCK_SIZE * 4) {
poly1305_p10le_4blocks(state, src, len);
@@ -98,10 +66,7 @@ void poly1305_emit_arch(const struct poly1305_state *state,
u8 digest[POLY1305_DIGEST_SIZE],
const u32 nonce[4])
{
- struct poly1305_block_state *dctx =
- container_of(state, struct poly1305_block_state, h);
-
- if (!static_key_enabled(&have_p10) || !is_state_base64(dctx))
+ if (!static_key_enabled(&have_p10))
return poly1305_emit_generic(state, digest, nonce);
poly1305_emit_64(state, nonce, digest);
}