Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
torvalds
GitHub Repository: torvalds/linux
Path: blob/master/lib/crc/tests/crc_kunit.c
26282 views
1
// SPDX-License-Identifier: GPL-2.0-or-later
2
/*
3
* Unit tests and benchmarks for the CRC library functions
4
*
5
* Copyright 2024 Google LLC
6
*
7
* Author: Eric Biggers <[email protected]>
8
*/
9
#include <kunit/test.h>
10
#include <linux/crc7.h>
11
#include <linux/crc16.h>
12
#include <linux/crc-t10dif.h>
13
#include <linux/crc32.h>
14
#include <linux/crc32c.h>
15
#include <linux/crc64.h>
16
#include <linux/prandom.h>
17
#include <linux/vmalloc.h>
18
19
#define CRC_KUNIT_SEED 42
20
#define CRC_KUNIT_MAX_LEN 16384
21
#define CRC_KUNIT_NUM_TEST_ITERS 1000
22
23
static struct rnd_state rng;
24
static u8 *test_buffer;
25
static size_t test_buflen;
26
27
/**
28
* struct crc_variant - describes a CRC variant
29
* @bits: Number of bits in the CRC, 1 <= @bits <= 64.
30
* @le: true if it's a "little endian" CRC (reversed mapping between bits and
31
* polynomial coefficients in each byte), false if it's a "big endian" CRC
32
* (natural mapping between bits and polynomial coefficients in each byte)
33
* @poly: The generator polynomial with the highest-order term omitted.
34
* Bit-reversed if @le is true.
35
* @func: The function to compute a CRC. The type signature uses u64 so that it
36
* can fit any CRC up to CRC-64. The CRC is passed in, and is expected
37
* to be returned in, the least significant bits of the u64. The
38
* function is expected to *not* invert the CRC at the beginning and end.
39
*/
40
struct crc_variant {
41
int bits;
42
bool le;
43
u64 poly;
44
u64 (*func)(u64 crc, const u8 *p, size_t len);
45
};
46
47
static u32 rand32(void)
48
{
49
return prandom_u32_state(&rng);
50
}
51
52
static u64 rand64(void)
53
{
54
u32 n = rand32();
55
56
return ((u64)n << 32) | rand32();
57
}
58
59
static u64 crc_mask(const struct crc_variant *v)
60
{
61
return (u64)-1 >> (64 - v->bits);
62
}
63
64
/* Reference implementation of any CRC variant */
65
static u64 crc_ref(const struct crc_variant *v,
66
u64 crc, const u8 *p, size_t len)
67
{
68
size_t i, j;
69
70
for (i = 0; i < len; i++) {
71
for (j = 0; j < 8; j++) {
72
if (v->le) {
73
crc ^= (p[i] >> j) & 1;
74
crc = (crc >> 1) ^ ((crc & 1) ? v->poly : 0);
75
} else {
76
crc ^= (u64)((p[i] >> (7 - j)) & 1) <<
77
(v->bits - 1);
78
if (crc & (1ULL << (v->bits - 1)))
79
crc = ((crc << 1) ^ v->poly) &
80
crc_mask(v);
81
else
82
crc <<= 1;
83
}
84
}
85
}
86
return crc;
87
}
88
89
static int crc_suite_init(struct kunit_suite *suite)
90
{
91
/*
92
* Allocate the test buffer using vmalloc() with a page-aligned length
93
* so that it is immediately followed by a guard page. This allows
94
* buffer overreads to be detected, even in assembly code.
95
*/
96
test_buflen = round_up(CRC_KUNIT_MAX_LEN, PAGE_SIZE);
97
test_buffer = vmalloc(test_buflen);
98
if (!test_buffer)
99
return -ENOMEM;
100
101
prandom_seed_state(&rng, CRC_KUNIT_SEED);
102
prandom_bytes_state(&rng, test_buffer, test_buflen);
103
return 0;
104
}
105
106
static void crc_suite_exit(struct kunit_suite *suite)
107
{
108
vfree(test_buffer);
109
test_buffer = NULL;
110
}
111
112
/* Generate a random initial CRC. */
113
static u64 generate_random_initial_crc(const struct crc_variant *v)
114
{
115
switch (rand32() % 4) {
116
case 0:
117
return 0;
118
case 1:
119
return crc_mask(v); /* All 1 bits */
120
default:
121
return rand64() & crc_mask(v);
122
}
123
}
124
125
/* Generate a random length, preferring small lengths. */
126
static size_t generate_random_length(size_t max_length)
127
{
128
size_t len;
129
130
switch (rand32() % 3) {
131
case 0:
132
len = rand32() % 128;
133
break;
134
case 1:
135
len = rand32() % 3072;
136
break;
137
default:
138
len = rand32();
139
break;
140
}
141
return len % (max_length + 1);
142
}
143
144
/* Test that v->func gives the same CRCs as a reference implementation. */
145
static void crc_test(struct kunit *test, const struct crc_variant *v)
146
{
147
size_t i;
148
149
for (i = 0; i < CRC_KUNIT_NUM_TEST_ITERS; i++) {
150
u64 init_crc, expected_crc, actual_crc;
151
size_t len, offset;
152
bool nosimd;
153
154
init_crc = generate_random_initial_crc(v);
155
len = generate_random_length(CRC_KUNIT_MAX_LEN);
156
157
/* Generate a random offset. */
158
if (rand32() % 2 == 0) {
159
/* Use a random alignment mod 64 */
160
offset = rand32() % 64;
161
offset = min(offset, CRC_KUNIT_MAX_LEN - len);
162
} else {
163
/* Go up to the guard page, to catch buffer overreads */
164
offset = test_buflen - len;
165
}
166
167
if (rand32() % 8 == 0)
168
/* Refresh the data occasionally. */
169
prandom_bytes_state(&rng, &test_buffer[offset], len);
170
171
nosimd = rand32() % 8 == 0;
172
173
/*
174
* Compute the CRC, and verify that it equals the CRC computed
175
* by a simple bit-at-a-time reference implementation.
176
*/
177
expected_crc = crc_ref(v, init_crc, &test_buffer[offset], len);
178
if (nosimd)
179
local_irq_disable();
180
actual_crc = v->func(init_crc, &test_buffer[offset], len);
181
if (nosimd)
182
local_irq_enable();
183
KUNIT_EXPECT_EQ_MSG(test, expected_crc, actual_crc,
184
"Wrong result with len=%zu offset=%zu nosimd=%d",
185
len, offset, nosimd);
186
}
187
}
188
189
static __always_inline void
190
crc_benchmark(struct kunit *test,
191
u64 (*crc_func)(u64 crc, const u8 *p, size_t len))
192
{
193
static const size_t lens_to_test[] = {
194
1, 16, 64, 127, 128, 200, 256, 511, 512, 1024, 3173, 4096, 16384,
195
};
196
size_t len, i, j, num_iters;
197
/*
198
* The CRC value that this function computes in a series of calls to
199
* crc_func is never actually used, so use volatile to ensure that the
200
* computations are done as intended and don't all get optimized out.
201
*/
202
volatile u64 crc = 0;
203
u64 t;
204
205
if (!IS_ENABLED(CONFIG_CRC_BENCHMARK))
206
kunit_skip(test, "not enabled");
207
208
/* warm-up */
209
for (i = 0; i < 10000000; i += CRC_KUNIT_MAX_LEN)
210
crc = crc_func(crc, test_buffer, CRC_KUNIT_MAX_LEN);
211
212
for (i = 0; i < ARRAY_SIZE(lens_to_test); i++) {
213
len = lens_to_test[i];
214
KUNIT_ASSERT_LE(test, len, CRC_KUNIT_MAX_LEN);
215
num_iters = 10000000 / (len + 128);
216
preempt_disable();
217
t = ktime_get_ns();
218
for (j = 0; j < num_iters; j++)
219
crc = crc_func(crc, test_buffer, len);
220
t = ktime_get_ns() - t;
221
preempt_enable();
222
kunit_info(test, "len=%zu: %llu MB/s\n",
223
len, div64_u64((u64)len * num_iters * 1000, t));
224
}
225
}
226
227
/* crc7_be */
228
229
static u64 crc7_be_wrapper(u64 crc, const u8 *p, size_t len)
230
{
231
/*
232
* crc7_be() left-aligns the 7-bit CRC in a u8, whereas the test wants a
233
* right-aligned CRC (in a u64). Convert between the conventions.
234
*/
235
return crc7_be(crc << 1, p, len) >> 1;
236
}
237
238
static const struct crc_variant crc_variant_crc7_be = {
239
.bits = 7,
240
.poly = 0x9,
241
.func = crc7_be_wrapper,
242
};
243
244
static void crc7_be_test(struct kunit *test)
245
{
246
crc_test(test, &crc_variant_crc7_be);
247
}
248
249
static void crc7_be_benchmark(struct kunit *test)
250
{
251
crc_benchmark(test, crc7_be_wrapper);
252
}
253
254
/* crc16 */
255
256
static u64 crc16_wrapper(u64 crc, const u8 *p, size_t len)
257
{
258
return crc16(crc, p, len);
259
}
260
261
static const struct crc_variant crc_variant_crc16 = {
262
.bits = 16,
263
.le = true,
264
.poly = 0xa001,
265
.func = crc16_wrapper,
266
};
267
268
static void crc16_test(struct kunit *test)
269
{
270
crc_test(test, &crc_variant_crc16);
271
}
272
273
static void crc16_benchmark(struct kunit *test)
274
{
275
crc_benchmark(test, crc16_wrapper);
276
}
277
278
/* crc_t10dif */
279
280
static u64 crc_t10dif_wrapper(u64 crc, const u8 *p, size_t len)
281
{
282
return crc_t10dif_update(crc, p, len);
283
}
284
285
static const struct crc_variant crc_variant_crc_t10dif = {
286
.bits = 16,
287
.le = false,
288
.poly = 0x8bb7,
289
.func = crc_t10dif_wrapper,
290
};
291
292
static void crc_t10dif_test(struct kunit *test)
293
{
294
crc_test(test, &crc_variant_crc_t10dif);
295
}
296
297
static void crc_t10dif_benchmark(struct kunit *test)
298
{
299
crc_benchmark(test, crc_t10dif_wrapper);
300
}
301
302
/* crc32_le */
303
304
static u64 crc32_le_wrapper(u64 crc, const u8 *p, size_t len)
305
{
306
return crc32_le(crc, p, len);
307
}
308
309
static const struct crc_variant crc_variant_crc32_le = {
310
.bits = 32,
311
.le = true,
312
.poly = 0xedb88320,
313
.func = crc32_le_wrapper,
314
};
315
316
static void crc32_le_test(struct kunit *test)
317
{
318
crc_test(test, &crc_variant_crc32_le);
319
}
320
321
static void crc32_le_benchmark(struct kunit *test)
322
{
323
crc_benchmark(test, crc32_le_wrapper);
324
}
325
326
/* crc32_be */
327
328
static u64 crc32_be_wrapper(u64 crc, const u8 *p, size_t len)
329
{
330
return crc32_be(crc, p, len);
331
}
332
333
static const struct crc_variant crc_variant_crc32_be = {
334
.bits = 32,
335
.le = false,
336
.poly = 0x04c11db7,
337
.func = crc32_be_wrapper,
338
};
339
340
static void crc32_be_test(struct kunit *test)
341
{
342
crc_test(test, &crc_variant_crc32_be);
343
}
344
345
static void crc32_be_benchmark(struct kunit *test)
346
{
347
crc_benchmark(test, crc32_be_wrapper);
348
}
349
350
/* crc32c */
351
352
static u64 crc32c_wrapper(u64 crc, const u8 *p, size_t len)
353
{
354
return crc32c(crc, p, len);
355
}
356
357
static const struct crc_variant crc_variant_crc32c = {
358
.bits = 32,
359
.le = true,
360
.poly = 0x82f63b78,
361
.func = crc32c_wrapper,
362
};
363
364
static void crc32c_test(struct kunit *test)
365
{
366
crc_test(test, &crc_variant_crc32c);
367
}
368
369
static void crc32c_benchmark(struct kunit *test)
370
{
371
crc_benchmark(test, crc32c_wrapper);
372
}
373
374
/* crc64_be */
375
376
static u64 crc64_be_wrapper(u64 crc, const u8 *p, size_t len)
377
{
378
return crc64_be(crc, p, len);
379
}
380
381
static const struct crc_variant crc_variant_crc64_be = {
382
.bits = 64,
383
.le = false,
384
.poly = 0x42f0e1eba9ea3693,
385
.func = crc64_be_wrapper,
386
};
387
388
static void crc64_be_test(struct kunit *test)
389
{
390
crc_test(test, &crc_variant_crc64_be);
391
}
392
393
static void crc64_be_benchmark(struct kunit *test)
394
{
395
crc_benchmark(test, crc64_be_wrapper);
396
}
397
398
/* crc64_nvme */
399
400
static u64 crc64_nvme_wrapper(u64 crc, const u8 *p, size_t len)
401
{
402
/* The inversions that crc64_nvme() does have to be undone here. */
403
return ~crc64_nvme(~crc, p, len);
404
}
405
406
static const struct crc_variant crc_variant_crc64_nvme = {
407
.bits = 64,
408
.le = true,
409
.poly = 0x9a6c9329ac4bc9b5,
410
.func = crc64_nvme_wrapper,
411
};
412
413
static void crc64_nvme_test(struct kunit *test)
414
{
415
crc_test(test, &crc_variant_crc64_nvme);
416
}
417
418
static void crc64_nvme_benchmark(struct kunit *test)
419
{
420
crc_benchmark(test, crc64_nvme_wrapper);
421
}
422
423
static struct kunit_case crc_test_cases[] = {
424
KUNIT_CASE(crc7_be_test),
425
KUNIT_CASE(crc7_be_benchmark),
426
KUNIT_CASE(crc16_test),
427
KUNIT_CASE(crc16_benchmark),
428
KUNIT_CASE(crc_t10dif_test),
429
KUNIT_CASE(crc_t10dif_benchmark),
430
KUNIT_CASE(crc32_le_test),
431
KUNIT_CASE(crc32_le_benchmark),
432
KUNIT_CASE(crc32_be_test),
433
KUNIT_CASE(crc32_be_benchmark),
434
KUNIT_CASE(crc32c_test),
435
KUNIT_CASE(crc32c_benchmark),
436
KUNIT_CASE(crc64_be_test),
437
KUNIT_CASE(crc64_be_benchmark),
438
KUNIT_CASE(crc64_nvme_test),
439
KUNIT_CASE(crc64_nvme_benchmark),
440
{},
441
};
442
443
static struct kunit_suite crc_test_suite = {
444
.name = "crc",
445
.test_cases = crc_test_cases,
446
.suite_init = crc_suite_init,
447
.suite_exit = crc_suite_exit,
448
};
449
kunit_test_suite(crc_test_suite);
450
451
MODULE_DESCRIPTION("Unit tests and benchmarks for the CRC library functions");
452
MODULE_LICENSE("GPL");
453
454