Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
torvalds
GitHub Repository: torvalds/linux
Path: blob/master/arch/arm64/crypto/ghash-ce-glue.c
52064 views
1
// SPDX-License-Identifier: GPL-2.0-only
2
/*
3
* Accelerated GHASH implementation with ARMv8 PMULL instructions.
4
*
5
* Copyright (C) 2014 - 2018 Linaro Ltd. <[email protected]>
6
*/
7
8
#include <crypto/aes.h>
9
#include <crypto/b128ops.h>
10
#include <crypto/gcm.h>
11
#include <crypto/ghash.h>
12
#include <crypto/gf128mul.h>
13
#include <crypto/internal/aead.h>
14
#include <crypto/internal/hash.h>
15
#include <crypto/internal/skcipher.h>
16
#include <crypto/scatterwalk.h>
17
#include <linux/cpufeature.h>
18
#include <linux/errno.h>
19
#include <linux/kernel.h>
20
#include <linux/module.h>
21
#include <linux/string.h>
22
#include <linux/unaligned.h>
23
24
#include <asm/simd.h>
25
26
MODULE_DESCRIPTION("GHASH and AES-GCM using ARMv8 Crypto Extensions");
27
MODULE_AUTHOR("Ard Biesheuvel <[email protected]>");
28
MODULE_LICENSE("GPL v2");
29
MODULE_ALIAS_CRYPTO("ghash");
30
31
#define RFC4106_NONCE_SIZE 4
32
33
struct ghash_key {
34
be128 k;
35
u64 h[][2];
36
};
37
38
struct arm_ghash_desc_ctx {
39
u64 digest[GHASH_DIGEST_SIZE/sizeof(u64)];
40
};
41
42
struct gcm_aes_ctx {
43
struct aes_enckey aes_key;
44
u8 nonce[RFC4106_NONCE_SIZE];
45
struct ghash_key ghash_key;
46
};
47
48
asmlinkage void pmull_ghash_update_p64(int blocks, u64 dg[], const char *src,
49
u64 const h[][2], const char *head);
50
51
asmlinkage void pmull_ghash_update_p8(int blocks, u64 dg[], const char *src,
52
u64 const h[][2], const char *head);
53
54
asmlinkage void pmull_gcm_encrypt(int bytes, u8 dst[], const u8 src[],
55
u64 const h[][2], u64 dg[], u8 ctr[],
56
u32 const rk[], int rounds, u8 tag[]);
57
asmlinkage int pmull_gcm_decrypt(int bytes, u8 dst[], const u8 src[],
58
u64 const h[][2], u64 dg[], u8 ctr[],
59
u32 const rk[], int rounds, const u8 l[],
60
const u8 tag[], u64 authsize);
61
62
static int ghash_init(struct shash_desc *desc)
63
{
64
struct arm_ghash_desc_ctx *ctx = shash_desc_ctx(desc);
65
66
*ctx = (struct arm_ghash_desc_ctx){};
67
return 0;
68
}
69
70
static __always_inline
71
void ghash_do_simd_update(int blocks, u64 dg[], const char *src,
72
struct ghash_key *key, const char *head,
73
void (*simd_update)(int blocks, u64 dg[],
74
const char *src,
75
u64 const h[][2],
76
const char *head))
77
{
78
scoped_ksimd()
79
simd_update(blocks, dg, src, key->h, head);
80
}
81
82
/* avoid hogging the CPU for too long */
83
#define MAX_BLOCKS (SZ_64K / GHASH_BLOCK_SIZE)
84
85
static int ghash_update(struct shash_desc *desc, const u8 *src,
86
unsigned int len)
87
{
88
struct arm_ghash_desc_ctx *ctx = shash_desc_ctx(desc);
89
struct ghash_key *key = crypto_shash_ctx(desc->tfm);
90
int blocks;
91
92
blocks = len / GHASH_BLOCK_SIZE;
93
len -= blocks * GHASH_BLOCK_SIZE;
94
95
do {
96
int chunk = min(blocks, MAX_BLOCKS);
97
98
ghash_do_simd_update(chunk, ctx->digest, src, key, NULL,
99
pmull_ghash_update_p8);
100
blocks -= chunk;
101
src += chunk * GHASH_BLOCK_SIZE;
102
} while (unlikely(blocks > 0));
103
return len;
104
}
105
106
static int ghash_export(struct shash_desc *desc, void *out)
107
{
108
struct arm_ghash_desc_ctx *ctx = shash_desc_ctx(desc);
109
u8 *dst = out;
110
111
put_unaligned_be64(ctx->digest[1], dst);
112
put_unaligned_be64(ctx->digest[0], dst + 8);
113
return 0;
114
}
115
116
static int ghash_import(struct shash_desc *desc, const void *in)
117
{
118
struct arm_ghash_desc_ctx *ctx = shash_desc_ctx(desc);
119
const u8 *src = in;
120
121
ctx->digest[1] = get_unaligned_be64(src);
122
ctx->digest[0] = get_unaligned_be64(src + 8);
123
return 0;
124
}
125
126
static int ghash_finup(struct shash_desc *desc, const u8 *src,
127
unsigned int len, u8 *dst)
128
{
129
struct arm_ghash_desc_ctx *ctx = shash_desc_ctx(desc);
130
struct ghash_key *key = crypto_shash_ctx(desc->tfm);
131
132
if (len) {
133
u8 buf[GHASH_BLOCK_SIZE] = {};
134
135
memcpy(buf, src, len);
136
ghash_do_simd_update(1, ctx->digest, buf, key, NULL,
137
pmull_ghash_update_p8);
138
memzero_explicit(buf, sizeof(buf));
139
}
140
return ghash_export(desc, dst);
141
}
142
143
static void ghash_reflect(u64 h[], const be128 *k)
144
{
145
u64 carry = be64_to_cpu(k->a) & BIT(63) ? 1 : 0;
146
147
h[0] = (be64_to_cpu(k->b) << 1) | carry;
148
h[1] = (be64_to_cpu(k->a) << 1) | (be64_to_cpu(k->b) >> 63);
149
150
if (carry)
151
h[1] ^= 0xc200000000000000UL;
152
}
153
154
static int ghash_setkey(struct crypto_shash *tfm,
155
const u8 *inkey, unsigned int keylen)
156
{
157
struct ghash_key *key = crypto_shash_ctx(tfm);
158
159
if (keylen != GHASH_BLOCK_SIZE)
160
return -EINVAL;
161
162
/* needed for the fallback */
163
memcpy(&key->k, inkey, GHASH_BLOCK_SIZE);
164
165
ghash_reflect(key->h[0], &key->k);
166
return 0;
167
}
168
169
static struct shash_alg ghash_alg = {
170
.base.cra_name = "ghash",
171
.base.cra_driver_name = "ghash-neon",
172
.base.cra_priority = 150,
173
.base.cra_flags = CRYPTO_AHASH_ALG_BLOCK_ONLY,
174
.base.cra_blocksize = GHASH_BLOCK_SIZE,
175
.base.cra_ctxsize = sizeof(struct ghash_key) + sizeof(u64[2]),
176
.base.cra_module = THIS_MODULE,
177
178
.digestsize = GHASH_DIGEST_SIZE,
179
.init = ghash_init,
180
.update = ghash_update,
181
.finup = ghash_finup,
182
.setkey = ghash_setkey,
183
.export = ghash_export,
184
.import = ghash_import,
185
.descsize = sizeof(struct arm_ghash_desc_ctx),
186
.statesize = sizeof(struct ghash_desc_ctx),
187
};
188
189
static int gcm_aes_setkey(struct crypto_aead *tfm, const u8 *inkey,
190
unsigned int keylen)
191
{
192
struct gcm_aes_ctx *ctx = crypto_aead_ctx(tfm);
193
u8 key[GHASH_BLOCK_SIZE];
194
be128 h;
195
int ret;
196
197
ret = aes_prepareenckey(&ctx->aes_key, inkey, keylen);
198
if (ret)
199
return -EINVAL;
200
201
aes_encrypt(&ctx->aes_key, key, (u8[AES_BLOCK_SIZE]){});
202
203
/* needed for the fallback */
204
memcpy(&ctx->ghash_key.k, key, GHASH_BLOCK_SIZE);
205
206
ghash_reflect(ctx->ghash_key.h[0], &ctx->ghash_key.k);
207
208
h = ctx->ghash_key.k;
209
gf128mul_lle(&h, &ctx->ghash_key.k);
210
ghash_reflect(ctx->ghash_key.h[1], &h);
211
212
gf128mul_lle(&h, &ctx->ghash_key.k);
213
ghash_reflect(ctx->ghash_key.h[2], &h);
214
215
gf128mul_lle(&h, &ctx->ghash_key.k);
216
ghash_reflect(ctx->ghash_key.h[3], &h);
217
218
return 0;
219
}
220
221
static int gcm_aes_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
222
{
223
return crypto_gcm_check_authsize(authsize);
224
}
225
226
static void gcm_update_mac(u64 dg[], const u8 *src, int count, u8 buf[],
227
int *buf_count, struct gcm_aes_ctx *ctx)
228
{
229
if (*buf_count > 0) {
230
int buf_added = min(count, GHASH_BLOCK_SIZE - *buf_count);
231
232
memcpy(&buf[*buf_count], src, buf_added);
233
234
*buf_count += buf_added;
235
src += buf_added;
236
count -= buf_added;
237
}
238
239
if (count >= GHASH_BLOCK_SIZE || *buf_count == GHASH_BLOCK_SIZE) {
240
int blocks = count / GHASH_BLOCK_SIZE;
241
242
ghash_do_simd_update(blocks, dg, src, &ctx->ghash_key,
243
*buf_count ? buf : NULL,
244
pmull_ghash_update_p64);
245
246
src += blocks * GHASH_BLOCK_SIZE;
247
count %= GHASH_BLOCK_SIZE;
248
*buf_count = 0;
249
}
250
251
if (count > 0) {
252
memcpy(buf, src, count);
253
*buf_count = count;
254
}
255
}
256
257
static void gcm_calculate_auth_mac(struct aead_request *req, u64 dg[], u32 len)
258
{
259
struct crypto_aead *aead = crypto_aead_reqtfm(req);
260
struct gcm_aes_ctx *ctx = crypto_aead_ctx(aead);
261
u8 buf[GHASH_BLOCK_SIZE];
262
struct scatter_walk walk;
263
int buf_count = 0;
264
265
scatterwalk_start(&walk, req->src);
266
267
do {
268
unsigned int n;
269
270
n = scatterwalk_next(&walk, len);
271
gcm_update_mac(dg, walk.addr, n, buf, &buf_count, ctx);
272
scatterwalk_done_src(&walk, n);
273
len -= n;
274
} while (len);
275
276
if (buf_count) {
277
memset(&buf[buf_count], 0, GHASH_BLOCK_SIZE - buf_count);
278
ghash_do_simd_update(1, dg, buf, &ctx->ghash_key, NULL,
279
pmull_ghash_update_p64);
280
}
281
}
282
283
static int gcm_encrypt(struct aead_request *req, char *iv, int assoclen)
284
{
285
struct crypto_aead *aead = crypto_aead_reqtfm(req);
286
struct gcm_aes_ctx *ctx = crypto_aead_ctx(aead);
287
struct skcipher_walk walk;
288
u8 buf[AES_BLOCK_SIZE];
289
u64 dg[2] = {};
290
be128 lengths;
291
u8 *tag;
292
int err;
293
294
lengths.a = cpu_to_be64(assoclen * 8);
295
lengths.b = cpu_to_be64(req->cryptlen * 8);
296
297
if (assoclen)
298
gcm_calculate_auth_mac(req, dg, assoclen);
299
300
put_unaligned_be32(2, iv + GCM_AES_IV_SIZE);
301
302
err = skcipher_walk_aead_encrypt(&walk, req, false);
303
304
do {
305
const u8 *src = walk.src.virt.addr;
306
u8 *dst = walk.dst.virt.addr;
307
int nbytes = walk.nbytes;
308
309
tag = (u8 *)&lengths;
310
311
if (unlikely(nbytes > 0 && nbytes < AES_BLOCK_SIZE)) {
312
src = dst = memcpy(buf + sizeof(buf) - nbytes,
313
src, nbytes);
314
} else if (nbytes < walk.total) {
315
nbytes &= ~(AES_BLOCK_SIZE - 1);
316
tag = NULL;
317
}
318
319
scoped_ksimd()
320
pmull_gcm_encrypt(nbytes, dst, src, ctx->ghash_key.h,
321
dg, iv, ctx->aes_key.k.rndkeys,
322
ctx->aes_key.nrounds, tag);
323
324
if (unlikely(!nbytes))
325
break;
326
327
if (unlikely(nbytes > 0 && nbytes < AES_BLOCK_SIZE))
328
memcpy(walk.dst.virt.addr,
329
buf + sizeof(buf) - nbytes, nbytes);
330
331
err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
332
} while (walk.nbytes);
333
334
if (err)
335
return err;
336
337
/* copy authtag to end of dst */
338
scatterwalk_map_and_copy(tag, req->dst, req->assoclen + req->cryptlen,
339
crypto_aead_authsize(aead), 1);
340
341
return 0;
342
}
343
344
static int gcm_decrypt(struct aead_request *req, char *iv, int assoclen)
345
{
346
struct crypto_aead *aead = crypto_aead_reqtfm(req);
347
struct gcm_aes_ctx *ctx = crypto_aead_ctx(aead);
348
unsigned int authsize = crypto_aead_authsize(aead);
349
struct skcipher_walk walk;
350
u8 otag[AES_BLOCK_SIZE];
351
u8 buf[AES_BLOCK_SIZE];
352
u64 dg[2] = {};
353
be128 lengths;
354
u8 *tag;
355
int ret;
356
int err;
357
358
lengths.a = cpu_to_be64(assoclen * 8);
359
lengths.b = cpu_to_be64((req->cryptlen - authsize) * 8);
360
361
if (assoclen)
362
gcm_calculate_auth_mac(req, dg, assoclen);
363
364
put_unaligned_be32(2, iv + GCM_AES_IV_SIZE);
365
366
scatterwalk_map_and_copy(otag, req->src,
367
req->assoclen + req->cryptlen - authsize,
368
authsize, 0);
369
370
err = skcipher_walk_aead_decrypt(&walk, req, false);
371
372
do {
373
const u8 *src = walk.src.virt.addr;
374
u8 *dst = walk.dst.virt.addr;
375
int nbytes = walk.nbytes;
376
377
tag = (u8 *)&lengths;
378
379
if (unlikely(nbytes > 0 && nbytes < AES_BLOCK_SIZE)) {
380
src = dst = memcpy(buf + sizeof(buf) - nbytes,
381
src, nbytes);
382
} else if (nbytes < walk.total) {
383
nbytes &= ~(AES_BLOCK_SIZE - 1);
384
tag = NULL;
385
}
386
387
scoped_ksimd()
388
ret = pmull_gcm_decrypt(nbytes, dst, src,
389
ctx->ghash_key.h,
390
dg, iv, ctx->aes_key.k.rndkeys,
391
ctx->aes_key.nrounds, tag, otag,
392
authsize);
393
394
if (unlikely(!nbytes))
395
break;
396
397
if (unlikely(nbytes > 0 && nbytes < AES_BLOCK_SIZE))
398
memcpy(walk.dst.virt.addr,
399
buf + sizeof(buf) - nbytes, nbytes);
400
401
err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
402
} while (walk.nbytes);
403
404
if (err)
405
return err;
406
407
return ret ? -EBADMSG : 0;
408
}
409
410
static int gcm_aes_encrypt(struct aead_request *req)
411
{
412
u8 iv[AES_BLOCK_SIZE];
413
414
memcpy(iv, req->iv, GCM_AES_IV_SIZE);
415
return gcm_encrypt(req, iv, req->assoclen);
416
}
417
418
static int gcm_aes_decrypt(struct aead_request *req)
419
{
420
u8 iv[AES_BLOCK_SIZE];
421
422
memcpy(iv, req->iv, GCM_AES_IV_SIZE);
423
return gcm_decrypt(req, iv, req->assoclen);
424
}
425
426
static int rfc4106_setkey(struct crypto_aead *tfm, const u8 *inkey,
427
unsigned int keylen)
428
{
429
struct gcm_aes_ctx *ctx = crypto_aead_ctx(tfm);
430
int err;
431
432
keylen -= RFC4106_NONCE_SIZE;
433
err = gcm_aes_setkey(tfm, inkey, keylen);
434
if (err)
435
return err;
436
437
memcpy(ctx->nonce, inkey + keylen, RFC4106_NONCE_SIZE);
438
return 0;
439
}
440
441
static int rfc4106_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
442
{
443
return crypto_rfc4106_check_authsize(authsize);
444
}
445
446
static int rfc4106_encrypt(struct aead_request *req)
447
{
448
struct crypto_aead *aead = crypto_aead_reqtfm(req);
449
struct gcm_aes_ctx *ctx = crypto_aead_ctx(aead);
450
u8 iv[AES_BLOCK_SIZE];
451
452
memcpy(iv, ctx->nonce, RFC4106_NONCE_SIZE);
453
memcpy(iv + RFC4106_NONCE_SIZE, req->iv, GCM_RFC4106_IV_SIZE);
454
455
return crypto_ipsec_check_assoclen(req->assoclen) ?:
456
gcm_encrypt(req, iv, req->assoclen - GCM_RFC4106_IV_SIZE);
457
}
458
459
static int rfc4106_decrypt(struct aead_request *req)
460
{
461
struct crypto_aead *aead = crypto_aead_reqtfm(req);
462
struct gcm_aes_ctx *ctx = crypto_aead_ctx(aead);
463
u8 iv[AES_BLOCK_SIZE];
464
465
memcpy(iv, ctx->nonce, RFC4106_NONCE_SIZE);
466
memcpy(iv + RFC4106_NONCE_SIZE, req->iv, GCM_RFC4106_IV_SIZE);
467
468
return crypto_ipsec_check_assoclen(req->assoclen) ?:
469
gcm_decrypt(req, iv, req->assoclen - GCM_RFC4106_IV_SIZE);
470
}
471
472
static struct aead_alg gcm_aes_algs[] = {{
473
.ivsize = GCM_AES_IV_SIZE,
474
.chunksize = AES_BLOCK_SIZE,
475
.maxauthsize = AES_BLOCK_SIZE,
476
.setkey = gcm_aes_setkey,
477
.setauthsize = gcm_aes_setauthsize,
478
.encrypt = gcm_aes_encrypt,
479
.decrypt = gcm_aes_decrypt,
480
481
.base.cra_name = "gcm(aes)",
482
.base.cra_driver_name = "gcm-aes-ce",
483
.base.cra_priority = 300,
484
.base.cra_blocksize = 1,
485
.base.cra_ctxsize = sizeof(struct gcm_aes_ctx) +
486
4 * sizeof(u64[2]),
487
.base.cra_module = THIS_MODULE,
488
}, {
489
.ivsize = GCM_RFC4106_IV_SIZE,
490
.chunksize = AES_BLOCK_SIZE,
491
.maxauthsize = AES_BLOCK_SIZE,
492
.setkey = rfc4106_setkey,
493
.setauthsize = rfc4106_setauthsize,
494
.encrypt = rfc4106_encrypt,
495
.decrypt = rfc4106_decrypt,
496
497
.base.cra_name = "rfc4106(gcm(aes))",
498
.base.cra_driver_name = "rfc4106-gcm-aes-ce",
499
.base.cra_priority = 300,
500
.base.cra_blocksize = 1,
501
.base.cra_ctxsize = sizeof(struct gcm_aes_ctx) +
502
4 * sizeof(u64[2]),
503
.base.cra_module = THIS_MODULE,
504
}};
505
506
static int __init ghash_ce_mod_init(void)
507
{
508
if (!cpu_have_named_feature(ASIMD))
509
return -ENODEV;
510
511
if (cpu_have_named_feature(PMULL))
512
return crypto_register_aeads(gcm_aes_algs,
513
ARRAY_SIZE(gcm_aes_algs));
514
515
return crypto_register_shash(&ghash_alg);
516
}
517
518
static void __exit ghash_ce_mod_exit(void)
519
{
520
if (cpu_have_named_feature(PMULL))
521
crypto_unregister_aeads(gcm_aes_algs, ARRAY_SIZE(gcm_aes_algs));
522
else
523
crypto_unregister_shash(&ghash_alg);
524
}
525
526
static const struct cpu_feature __maybe_unused ghash_cpu_feature[] = {
527
{ cpu_feature(PMULL) }, { }
528
};
529
MODULE_DEVICE_TABLE(cpu, ghash_cpu_feature);
530
531
module_init(ghash_ce_mod_init);
532
module_exit(ghash_ce_mod_exit);
533
534