lib/crc: x86: Migrate optimized CRC code into lib/crc/

Move the x86-optimized CRC code from arch/x86/lib/crc* into its new
location in lib/crc/x86/, and wire it up in the new way.  This new way
of organizing the CRC code eliminates the need to artificially split the
code for each CRC variant into separate arch and generic modules,
enabling better inlining and dead code elimination.  For more details,
see "lib/crc: Prepare for arch-optimized code in subdirs of lib/crc/".

Reviewed-by: "Martin K. Petersen" <martin.petersen@oracle.com>
Acked-by: Ingo Molnar <mingo@kernel.org>
Acked-by: "Jason A. Donenfeld" <Jason@zx2c4.com>
Link: https://lore.kernel.org/r/20250607200454.73587-12-ebiggers@kernel.org
Signed-off-by: Eric Biggers <ebiggers@kernel.org>
This commit is contained in:
Eric Biggers 2025-06-07 13:04:53 -07:00
parent 9b2d720e8a
commit b10749d89f
14 changed files with 20 additions and 69 deletions

View File

@ -79,9 +79,6 @@ config X86
select ARCH_HAS_CPU_CACHE_INVALIDATE_MEMREGION
select ARCH_HAS_CPU_FINALIZE_INIT
select ARCH_HAS_CPU_PASID if IOMMU_SVA
select ARCH_HAS_CRC32
select ARCH_HAS_CRC64 if X86_64
select ARCH_HAS_CRC_T10DIF
select ARCH_HAS_CURRENT_STACK_POINTER
select ARCH_HAS_DEBUG_VIRTUAL
select ARCH_HAS_DEBUG_VM_PGTABLE if !X86_PAE

View File

@ -40,16 +40,6 @@ lib-$(CONFIG_RANDOMIZE_BASE) += kaslr.o
lib-$(CONFIG_FUNCTION_ERROR_INJECTION) += error-inject.o
lib-$(CONFIG_MITIGATION_RETPOLINE) += retpoline.o
obj-$(CONFIG_CRC32_ARCH) += crc32-x86.o
crc32-x86-y := crc32.o crc32-pclmul.o
crc32-x86-$(CONFIG_64BIT) += crc32c-3way.o
obj-$(CONFIG_CRC64_ARCH) += crc64-x86.o
crc64-x86-y := crc64.o crc64-pclmul.o
obj-$(CONFIG_CRC_T10DIF_ARCH) += crc-t10dif-x86.o
crc-t10dif-x86-y := crc-t10dif.o crc16-msb-pclmul.o
obj-y += msr.o msr-reg.o msr-reg-export.o hweight.o
obj-y += iomem.o

View File

@ -54,6 +54,7 @@ config CRC_T10DIF_ARCH
default y if ARM64 && KERNEL_MODE_NEON
default y if PPC64 && ALTIVEC
default y if RISCV && RISCV_ISA_ZBC
default y if X86
config CRC32
tristate
@ -76,6 +77,7 @@ config CRC32_ARCH
default y if RISCV && RISCV_ISA_ZBC
default y if S390
default y if SPARC64
default y if X86
config CRC64
tristate
@ -90,6 +92,7 @@ config CRC64_ARCH
bool
depends on CRC64 && CRC_OPTIMIZATIONS
default y if RISCV && RISCV_ISA_ZBC && 64BIT
default y if X86_64
config CRC_OPTIMIZATIONS
bool "Enable optimized CRC implementations" if EXPERT

View File

@ -17,6 +17,7 @@ crc-t10dif-$(CONFIG_ARM) += arm/crc-t10dif-core.o
crc-t10dif-$(CONFIG_ARM64) += arm64/crc-t10dif-core.o
crc-t10dif-$(CONFIG_PPC) += powerpc/crct10dif-vpmsum_asm.o
crc-t10dif-$(CONFIG_RISCV) += riscv/crc16_msb.o
crc-t10dif-$(CONFIG_X86) += x86/crc16-msb-pclmul.o
endif
obj-$(CONFIG_CRC32) += crc32.o
@ -29,6 +30,8 @@ crc32-$(CONFIG_PPC) += powerpc/crc32c-vpmsum_asm.o
crc32-$(CONFIG_RISCV) += riscv/crc32_lsb.o riscv/crc32_msb.o
crc32-$(CONFIG_S390) += s390/crc32le-vx.o s390/crc32be-vx.o
crc32-$(CONFIG_SPARC) += sparc/crc32c_asm.o
crc32-$(CONFIG_X86) += x86/crc32-pclmul.o
crc32-$(CONFIG_X86_64) += x86/crc32c-3way.o
endif
obj-$(CONFIG_CRC64) += crc64.o
@ -36,6 +39,7 @@ crc64-y := crc64-main.o
ifeq ($(CONFIG_CRC64_ARCH),y)
CFLAGS_crc64-main.o += -I$(src)/$(SRCARCH)
crc64-$(CONFIG_RISCV) += riscv/crc64_lsb.o riscv/crc64_msb.o
crc64-$(CONFIG_X86) += x86/crc64-pclmul.o
endif
obj-y += tests/

View File

@ -5,36 +5,24 @@
* Copyright 2024 Google LLC
*/
#include <linux/crc-t10dif.h>
#include <linux/module.h>
#include "crc-pclmul-template.h"
static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_pclmulqdq);
DECLARE_CRC_PCLMUL_FUNCS(crc16_msb, u16);
u16 crc_t10dif_arch(u16 crc, const u8 *p, size_t len)
static inline u16 crc_t10dif_arch(u16 crc, const u8 *p, size_t len)
{
CRC_PCLMUL(crc, p, len, crc16_msb, crc16_msb_0x8bb7_consts,
have_pclmulqdq);
return crc_t10dif_generic(crc, p, len);
}
EXPORT_SYMBOL(crc_t10dif_arch);
static int __init crc_t10dif_x86_init(void)
#define crc_t10dif_mod_init_arch crc_t10dif_mod_init_arch
static inline void crc_t10dif_mod_init_arch(void)
{
if (boot_cpu_has(X86_FEATURE_PCLMULQDQ)) {
static_branch_enable(&have_pclmulqdq);
INIT_CRC_PCLMUL(crc16_msb);
}
return 0;
}
subsys_initcall(crc_t10dif_x86_init);
static void __exit crc_t10dif_x86_exit(void)
{
}
module_exit(crc_t10dif_x86_exit);
MODULE_DESCRIPTION("CRC-T10DIF using [V]PCLMULQDQ instructions");
MODULE_LICENSE("GPL");

View File

@ -7,8 +7,6 @@
* Copyright 2024 Google LLC
*/
#include <linux/crc32.h>
#include <linux/module.h>
#include "crc-pclmul-template.h"
static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_crc32);
@ -16,13 +14,12 @@ static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_pclmulqdq);
DECLARE_CRC_PCLMUL_FUNCS(crc32_lsb, u32);
u32 crc32_le_arch(u32 crc, const u8 *p, size_t len)
static inline u32 crc32_le_arch(u32 crc, const u8 *p, size_t len)
{
CRC_PCLMUL(crc, p, len, crc32_lsb, crc32_lsb_0xedb88320_consts,
have_pclmulqdq);
return crc32_le_base(crc, p, len);
}
EXPORT_SYMBOL(crc32_le_arch);
#ifdef CONFIG_X86_64
#define CRC32_INST "crc32q %1, %q0"
@ -38,7 +35,7 @@ EXPORT_SYMBOL(crc32_le_arch);
asmlinkage u32 crc32c_x86_3way(u32 crc, const u8 *buffer, size_t len);
u32 crc32c_arch(u32 crc, const u8 *p, size_t len)
static inline u32 crc32c_arch(u32 crc, const u8 *p, size_t len)
{
size_t num_longs;
@ -70,15 +67,11 @@ u32 crc32c_arch(u32 crc, const u8 *p, size_t len)
return crc;
}
EXPORT_SYMBOL(crc32c_arch);
u32 crc32_be_arch(u32 crc, const u8 *p, size_t len)
{
return crc32_be_base(crc, p, len);
}
EXPORT_SYMBOL(crc32_be_arch);
#define crc32_be_arch crc32_be_base /* not implemented on this arch */
static int __init crc32_x86_init(void)
#define crc32_mod_init_arch crc32_mod_init_arch
static inline void crc32_mod_init_arch(void)
{
if (boot_cpu_has(X86_FEATURE_XMM4_2))
static_branch_enable(&have_crc32);
@ -86,16 +79,9 @@ static int __init crc32_x86_init(void)
static_branch_enable(&have_pclmulqdq);
INIT_CRC_PCLMUL(crc32_lsb);
}
return 0;
}
subsys_initcall(crc32_x86_init);
static void __exit crc32_x86_exit(void)
{
}
module_exit(crc32_x86_exit);
u32 crc32_optimizations(void)
static inline u32 crc32_optimizations_arch(void)
{
u32 optimizations = 0;
@ -105,7 +91,3 @@ u32 crc32_optimizations(void)
optimizations |= CRC32_LE_OPTIMIZATION;
return optimizations;
}
EXPORT_SYMBOL(crc32_optimizations);
MODULE_DESCRIPTION("x86-optimized CRC32 functions");
MODULE_LICENSE("GPL");

View File

@ -5,8 +5,6 @@
* Copyright 2025 Google LLC
*/
#include <linux/crc64.h>
#include <linux/module.h>
#include "crc-pclmul-template.h"
static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_pclmulqdq);
@ -14,37 +12,26 @@ static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_pclmulqdq);
DECLARE_CRC_PCLMUL_FUNCS(crc64_msb, u64);
DECLARE_CRC_PCLMUL_FUNCS(crc64_lsb, u64);
u64 crc64_be_arch(u64 crc, const u8 *p, size_t len)
static inline u64 crc64_be_arch(u64 crc, const u8 *p, size_t len)
{
CRC_PCLMUL(crc, p, len, crc64_msb, crc64_msb_0x42f0e1eba9ea3693_consts,
have_pclmulqdq);
return crc64_be_generic(crc, p, len);
}
EXPORT_SYMBOL_GPL(crc64_be_arch);
u64 crc64_nvme_arch(u64 crc, const u8 *p, size_t len)
static inline u64 crc64_nvme_arch(u64 crc, const u8 *p, size_t len)
{
CRC_PCLMUL(crc, p, len, crc64_lsb, crc64_lsb_0x9a6c9329ac4bc9b5_consts,
have_pclmulqdq);
return crc64_nvme_generic(crc, p, len);
}
EXPORT_SYMBOL_GPL(crc64_nvme_arch);
static int __init crc64_x86_init(void)
#define crc64_mod_init_arch crc64_mod_init_arch
static inline void crc64_mod_init_arch(void)
{
if (boot_cpu_has(X86_FEATURE_PCLMULQDQ)) {
static_branch_enable(&have_pclmulqdq);
INIT_CRC_PCLMUL(crc64_msb);
INIT_CRC_PCLMUL(crc64_lsb);
}
return 0;
}
subsys_initcall(crc64_x86_init);
static void __exit crc64_x86_exit(void)
{
}
module_exit(crc64_x86_exit);
MODULE_DESCRIPTION("CRC64 using [V]PCLMULQDQ instructions");
MODULE_LICENSE("GPL");