On Sun, 3 Nov 2024 at 23:34, Eric Biggers <ebiggers@kernel.org> wrote:
>
> From: Eric Biggers <ebiggers@google.com>
>
> Make the CRC32 library export a function crc32_optimizations() which
> returns flags that indicate which CRC32 functions are actually executing
> optimized code at runtime.
>
> This will be used to determine whether the crc32[c]-$arch shash
> algorithms should be registered in the crypto API. btrfs could also
> start using these flags instead of the hack that it currently uses where
> it parses the crypto_shash_driver_name.
>
> Signed-off-by: Eric Biggers <ebiggers@google.com>
Reviewed-by: Ard Biesheuvel <ardb@kernel.org>
> ---
> arch/arm64/lib/crc32-glue.c | 10 ++++++++++
> arch/riscv/lib/crc32-riscv.c | 10 ++++++++++
> include/linux/crc32.h | 15 +++++++++++++++
> 3 files changed, 35 insertions(+)
>
> diff --git a/arch/arm64/lib/crc32-glue.c b/arch/arm64/lib/crc32-glue.c
> index d7f6e1cbf0d2..15c4c9db573e 100644
> --- a/arch/arm64/lib/crc32-glue.c
> +++ b/arch/arm64/lib/crc32-glue.c
> @@ -83,7 +83,17 @@ u32 __pure crc32_be_arch(u32 crc, const u8 *p, size_t len)
>
> return crc32_be_arm64(crc, p, len);
> }
> EXPORT_SYMBOL(crc32_be_arch);
>
> +u32 crc32_optimizations(void)
> +{
> + if (alternative_has_cap_likely(ARM64_HAS_CRC32))
> + return CRC32_LE_OPTIMIZATION |
> + CRC32_BE_OPTIMIZATION |
> + CRC32C_OPTIMIZATION;
> + return 0;
> +}
> +EXPORT_SYMBOL(crc32_optimizations);
> +
> MODULE_LICENSE("GPL");
> MODULE_DESCRIPTION("arm64-optimized CRC32 functions");
> diff --git a/arch/riscv/lib/crc32-riscv.c b/arch/riscv/lib/crc32-riscv.c
> index a3ff7db2a1ce..53d56ab422c7 100644
> --- a/arch/riscv/lib/crc32-riscv.c
> +++ b/arch/riscv/lib/crc32-riscv.c
> @@ -295,7 +295,17 @@ u32 __pure crc32_be_arch(u32 crc, const u8 *p, size_t len)
> legacy:
> return crc32_be_base(crc, p, len);
> }
> EXPORT_SYMBOL(crc32_be_arch);
>
> +u32 crc32_optimizations(void)
> +{
> + if (riscv_has_extension_likely(RISCV_ISA_EXT_ZBC))
> + return CRC32_LE_OPTIMIZATION |
> + CRC32_BE_OPTIMIZATION |
> + CRC32C_OPTIMIZATION;
> + return 0;
> +}
> +EXPORT_SYMBOL(crc32_optimizations);
> +
> MODULE_LICENSE("GPL");
> MODULE_DESCRIPTION("Accelerated CRC32 implementation with Zbc extension");
> diff --git a/include/linux/crc32.h b/include/linux/crc32.h
> index 58c632533b08..e9bd40056687 100644
> --- a/include/linux/crc32.h
> +++ b/include/linux/crc32.h
> @@ -35,10 +35,25 @@ static inline u32 __pure __crc32c_le(u32 crc, const u8 *p, size_t len)
> if (IS_ENABLED(CONFIG_CRC32_ARCH))
> return crc32c_le_arch(crc, p, len);
> return crc32c_le_base(crc, p, len);
> }
>
> +/*
> + * crc32_optimizations() returns flags that indicate which CRC32 library
> + * functions are using architecture-specific optimizations. Unlike
> + * IS_ENABLED(CONFIG_CRC32_ARCH) it takes into account the different CRC32
> + * variants and also whether any needed CPU features are available at runtime.
> + */
> +#define CRC32_LE_OPTIMIZATION BIT(0) /* crc32_le() is optimized */
> +#define CRC32_BE_OPTIMIZATION BIT(1) /* crc32_be() is optimized */
> +#define CRC32C_OPTIMIZATION BIT(2) /* __crc32c_le() is optimized */
> +#if IS_ENABLED(CONFIG_CRC32_ARCH)
> +u32 crc32_optimizations(void);
> +#else
> +static inline u32 crc32_optimizations(void) { return 0; }
> +#endif
> +
> /**
> * crc32_le_combine - Combine two crc32 check values into one. For two
> * sequences of bytes, seq1 and seq2 with lengths len1
> * and len2, crc32_le() check values were calculated
> * for each, crc1 and crc2.
> --
> 2.47.0
>
>