diff options
author | Linus Torvalds <torvalds@linux-foundation.org> | 2025-03-25 18:33:04 -0700 |
---|---|---|
committer | Linus Torvalds <torvalds@linux-foundation.org> | 2025-03-25 18:33:04 -0700 |
commit | ee6740fd34eb53c5c76be01201c15310f461b69f (patch) | |
tree | 09a51108245a2e8f644d1956a9e5b9f974bc23b8 /lib/tests | |
parent | a86c6d0b2ad12f6ce6560f735f4799cf1f631ab2 (diff) | |
parent | acf9f8da5e19fc1cbf26f2ecb749369e13e7cd85 (diff) |
Merge tag 'crc-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/ebiggers/linux
Pull CRC updates from Eric Biggers:
"Another set of improvements to the kernel's CRC (cyclic redundancy
check) code:
- Rework the CRC64 library functions to be directly optimized, like
what I did last cycle for the CRC32 and CRC-T10DIF library
functions
- Rewrite the x86 PCLMULQDQ-optimized CRC code, and add VPCLMULQDQ
support and acceleration for crc64_be and crc64_nvme
- Rewrite the riscv Zbc-optimized CRC code, and add acceleration for
crc_t10dif, crc64_be, and crc64_nvme
- Remove crc_t10dif and crc64_rocksoft from the crypto API, since
they are no longer needed there
- Rename crc64_rocksoft to crc64_nvme, as the old name was incorrect
- Add kunit test cases for crc64_nvme and crc7
- Eliminate redundant functions for calculating the Castagnoli CRC32,
settling on just crc32c()
- Remove unnecessary prompts from some of the CRC kconfig options
- Further optimize the x86 crc32c code"
* tag 'crc-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/ebiggers/linux: (36 commits)
x86/crc: drop the avx10_256 functions and rename avx10_512 to avx512
lib/crc: remove unnecessary prompt for CONFIG_CRC64
lib/crc: remove unnecessary prompt for CONFIG_LIBCRC32C
lib/crc: remove unnecessary prompt for CONFIG_CRC8
lib/crc: remove unnecessary prompt for CONFIG_CRC7
lib/crc: remove unnecessary prompt for CONFIG_CRC4
lib/crc7: unexport crc7_be_syndrome_table
lib/crc_kunit.c: update comment in crc_benchmark()
lib/crc_kunit.c: add test and benchmark for crc7_be()
x86/crc32: optimize tail handling for crc32c short inputs
riscv/crc64: add Zbc optimized CRC64 functions
riscv/crc-t10dif: add Zbc optimized CRC-T10DIF function
riscv/crc32: reimplement the CRC32 functions using new template
riscv/crc: add "template" for Zbc optimized CRC functions
x86/crc: add ANNOTATE_NOENDBR to suppress objtool warnings
x86/crc32: improve crc32c_arch() code generation with clang
x86/crc64: implement crc64_be and crc64_nvme using new template
x86/crc-t10dif: implement crc_t10dif using new template
x86/crc32: implement crc32_le using new template
x86/crc: add "template" for [V]PCLMULQDQ based CRC functions
...
Diffstat (limited to 'lib/tests')
-rw-r--r-- | lib/tests/crc_kunit.c | 68 |
1 files changed, 64 insertions, 4 deletions
diff --git a/lib/tests/crc_kunit.c b/lib/tests/crc_kunit.c index 6a61d4b5fd45..585c48b65cef 100644 --- a/lib/tests/crc_kunit.c +++ b/lib/tests/crc_kunit.c @@ -7,6 +7,7 @@ * Author: Eric Biggers <ebiggers@google.com> */ #include <kunit/test.h> +#include <linux/crc7.h> #include <linux/crc16.h> #include <linux/crc-t10dif.h> #include <linux/crc32.h> @@ -32,7 +33,9 @@ static size_t test_buflen; * @poly: The generator polynomial with the highest-order term omitted. * Bit-reversed if @le is true. * @func: The function to compute a CRC. The type signature uses u64 so that it - * can fit any CRC up to CRC-64. + * can fit any CRC up to CRC-64. The CRC is passed in, and is expected + * to be returned in, the least significant bits of the u64. The + * function is expected to *not* invert the CRC at the beginning and end. * @combine_func: Optional function to combine two CRCs. */ struct crc_variant { @@ -223,8 +226,9 @@ crc_benchmark(struct kunit *test, }; size_t len, i, j, num_iters; /* - * Some of the CRC library functions are marked as __pure, so use - * volatile to ensure that all calls are really made as intended. + * The CRC value that this function computes in a series of calls to + * crc_func is never actually used, so use volatile to ensure that the + * computations are done as intended and don't all get optimized out. */ volatile u64 crc = 0; u64 t; @@ -251,6 +255,33 @@ crc_benchmark(struct kunit *test, } } +/* crc7_be */ + +static u64 crc7_be_wrapper(u64 crc, const u8 *p, size_t len) +{ + /* + * crc7_be() left-aligns the 7-bit CRC in a u8, whereas the test wants a + * right-aligned CRC (in a u64). Convert between the conventions. + */ + return crc7_be(crc << 1, p, len) >> 1; +} + +static const struct crc_variant crc_variant_crc7_be = { + .bits = 7, + .poly = 0x9, + .func = crc7_be_wrapper, +}; + +static void crc7_be_test(struct kunit *test) +{ + crc_test(test, &crc_variant_crc7_be); +} + +static void crc7_be_benchmark(struct kunit *test) +{ + crc_benchmark(test, crc7_be_wrapper); +} + /* crc16 */ static u64 crc16_wrapper(u64 crc, const u8 *p, size_t len) @@ -362,7 +393,7 @@ static u64 crc32c_wrapper(u64 crc, const u8 *p, size_t len) static u64 crc32c_combine_wrapper(u64 crc1, u64 crc2, size_t len2) { - return __crc32c_le_combine(crc1, crc2, len2); + return crc32c_combine(crc1, crc2, len2); } static const struct crc_variant crc_variant_crc32c = { @@ -407,7 +438,34 @@ static void crc64_be_benchmark(struct kunit *test) crc_benchmark(test, crc64_be_wrapper); } +/* crc64_nvme */ + +static u64 crc64_nvme_wrapper(u64 crc, const u8 *p, size_t len) +{ + /* The inversions that crc64_nvme() does have to be undone here. */ + return ~crc64_nvme(~crc, p, len); +} + +static const struct crc_variant crc_variant_crc64_nvme = { + .bits = 64, + .le = true, + .poly = 0x9a6c9329ac4bc9b5, + .func = crc64_nvme_wrapper, +}; + +static void crc64_nvme_test(struct kunit *test) +{ + crc_test(test, &crc_variant_crc64_nvme); +} + +static void crc64_nvme_benchmark(struct kunit *test) +{ + crc_benchmark(test, crc64_nvme_wrapper); +} + static struct kunit_case crc_test_cases[] = { + KUNIT_CASE(crc7_be_test), + KUNIT_CASE(crc7_be_benchmark), KUNIT_CASE(crc16_test), KUNIT_CASE(crc16_benchmark), KUNIT_CASE(crc_t10dif_test), @@ -420,6 +478,8 @@ static struct kunit_case crc_test_cases[] = { KUNIT_CASE(crc32c_benchmark), KUNIT_CASE(crc64_be_test), KUNIT_CASE(crc64_be_benchmark), + KUNIT_CASE(crc64_nvme_test), + KUNIT_CASE(crc64_nvme_benchmark), {}, }; |