summaryrefslogtreecommitdiff
path: root/lib/crc/sparc
diff options
context:
space:
mode:
authorLinus Torvalds <torvalds@linux-foundation.org>2025-07-28 17:43:29 -0700
committerLinus Torvalds <torvalds@linux-foundation.org>2025-07-28 17:43:29 -0700
commita578dd095dfe8b56c167201d9aea43e47d27f807 (patch)
tree6d4ee6e286b92ebad6d10572af74eb15fd31973f /lib/crc/sparc
parent8e736a2eeaf261213b4557778e015699da1e1c8c (diff)
parent118da22eb6fbd48f896d17411f942399283d600c (diff)
Merge tag 'crc-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/ebiggers/linux
Pull CRC updates from Eric Biggers: - Reorganize the architecture-optimized CRC code It now lives in lib/crc/$(SRCARCH)/ rather than arch/$(SRCARCH)/lib/, and it is no longer artificially split into separate generic and arch modules. This allows better inlining and dead code elimination The generic CRC code is also no longer exported, simplifying the API. (This mirrors the similar changes to SHA-1 and SHA-2 in lib/crypto/, which can be found in the "Crypto library updates" pull request) - Improve crc32c() performance on newer x86_64 CPUs on long messages by enabling the VPCLMULQDQ optimized code - Simplify the crypto_shash wrappers for crc32_le() and crc32c() Register just one shash algorithm for each that uses the (fully optimized) library functions, instead of unnecessarily providing direct access to the generic CRC code - Remove unused and obsolete drivers for hardware CRC engines - Remove CRC-32 combination functions that are no longer used - Add kerneldoc for crc32_le(), crc32_be(), and crc32c() - Convert the crc32() macro to an inline function * tag 'crc-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/ebiggers/linux: (26 commits) lib/crc: x86/crc32c: Enable VPCLMULQDQ optimization where beneficial lib/crc: x86: Reorganize crc-pclmul static_call initialization lib/crc: crc64: Add include/linux/crc64.h to kernel-api.rst lib/crc: crc32: Change crc32() from macro to inline function and remove cast nvmem: layouts: Switch from crc32() to crc32_le() lib/crc: crc32: Document crc32_le(), crc32_be(), and crc32c() lib/crc: Explicitly include <linux/export.h> lib/crc: Remove ARCH_HAS_* kconfig symbols lib/crc: x86: Migrate optimized CRC code into lib/crc/ lib/crc: sparc: Migrate optimized CRC code into lib/crc/ lib/crc: s390: Migrate optimized CRC code into lib/crc/ lib/crc: riscv: Migrate optimized CRC code into lib/crc/ lib/crc: powerpc: Migrate optimized CRC code into lib/crc/ lib/crc: mips: Migrate optimized CRC code into lib/crc/ lib/crc: loongarch: Migrate optimized CRC code into lib/crc/ lib/crc: arm64: Migrate optimized CRC code into lib/crc/ lib/crc: arm: Migrate optimized CRC code into lib/crc/ lib/crc: Prepare for arch-optimized code in subdirs of lib/crc/ lib/crc: Move files into lib/crc/ lib/crc32: Remove unused combination support ...
Diffstat (limited to 'lib/crc/sparc')
-rw-r--r--lib/crc/sparc/crc32.h67
-rw-r--r--lib/crc/sparc/crc32c_asm.S20
2 files changed, 87 insertions, 0 deletions
diff --git a/lib/crc/sparc/crc32.h b/lib/crc/sparc/crc32.h
new file mode 100644
index 000000000000..60f2765ac015
--- /dev/null
+++ b/lib/crc/sparc/crc32.h
@@ -0,0 +1,67 @@
+// SPDX-License-Identifier: GPL-2.0-only
+/* CRC32c (Castagnoli), sparc64 crc32c opcode accelerated
+ *
+ * This is based largely upon arch/x86/crypto/crc32c-intel.c
+ *
+ * Copyright (C) 2008 Intel Corporation
+ * Authors: Austin Zhang <austin_zhang@linux.intel.com>
+ * Kent Liu <kent.liu@intel.com>
+ */
+
+#include <asm/pstate.h>
+#include <asm/elf.h>
+
+static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_crc32c_opcode);
+
+#define crc32_le_arch crc32_le_base /* not implemented on this arch */
+#define crc32_be_arch crc32_be_base /* not implemented on this arch */
+
+void crc32c_sparc64(u32 *crcp, const u64 *data, size_t len);
+
+static inline u32 crc32c_arch(u32 crc, const u8 *data, size_t len)
+{
+ size_t n = -(uintptr_t)data & 7;
+
+ if (!static_branch_likely(&have_crc32c_opcode))
+ return crc32c_base(crc, data, len);
+
+ if (n) {
+ /* Data isn't 8-byte aligned. Align it. */
+ n = min(n, len);
+ crc = crc32c_base(crc, data, n);
+ data += n;
+ len -= n;
+ }
+ n = len & ~7U;
+ if (n) {
+ crc32c_sparc64(&crc, (const u64 *)data, n);
+ data += n;
+ len -= n;
+ }
+ if (len)
+ crc = crc32c_base(crc, data, len);
+ return crc;
+}
+
+#define crc32_mod_init_arch crc32_mod_init_arch
+static inline void crc32_mod_init_arch(void)
+{
+ unsigned long cfr;
+
+ if (!(sparc64_elf_hwcap & HWCAP_SPARC_CRYPTO))
+ return;
+
+ __asm__ __volatile__("rd %%asr26, %0" : "=r" (cfr));
+ if (!(cfr & CFR_CRC32C))
+ return;
+
+ static_branch_enable(&have_crc32c_opcode);
+ pr_info("Using sparc64 crc32c opcode optimized CRC32C implementation\n");
+}
+
+static inline u32 crc32_optimizations_arch(void)
+{
+ if (static_key_enabled(&have_crc32c_opcode))
+ return CRC32C_OPTIMIZATION;
+ return 0;
+}
diff --git a/lib/crc/sparc/crc32c_asm.S b/lib/crc/sparc/crc32c_asm.S
new file mode 100644
index 000000000000..4db873850f44
--- /dev/null
+++ b/lib/crc/sparc/crc32c_asm.S
@@ -0,0 +1,20 @@
+/* SPDX-License-Identifier: GPL-2.0 */
+#include <linux/linkage.h>
+#include <asm/opcodes.h>
+#include <asm/visasm.h>
+#include <asm/asi.h>
+
+ENTRY(crc32c_sparc64)
+ /* %o0=crc32p, %o1=data_ptr, %o2=len */
+ VISEntryHalf
+ lda [%o0] ASI_PL, %f1
+1: ldd [%o1], %f2
+ CRC32C(0,2,0)
+ subcc %o2, 8, %o2
+ bne,pt %icc, 1b
+ add %o1, 0x8, %o1
+ sta %f1, [%o0] ASI_PL
+ VISExitHalf
+2: retl
+ nop
+ENDPROC(crc32c_sparc64)