Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
awilliam
GitHub Repository: awilliam/linux-vfio
Path: blob/master/crypto/ccm.c
10814 views
1
/*
2
* CCM: Counter with CBC-MAC
3
*
4
* (C) Copyright IBM Corp. 2007 - Joy Latten <[email protected]>
5
*
6
* This program is free software; you can redistribute it and/or modify it
7
* under the terms of the GNU General Public License as published by the Free
8
* Software Foundation; either version 2 of the License, or (at your option)
9
* any later version.
10
*
11
*/
12
13
#include <crypto/internal/aead.h>
14
#include <crypto/internal/skcipher.h>
15
#include <crypto/scatterwalk.h>
16
#include <linux/err.h>
17
#include <linux/init.h>
18
#include <linux/kernel.h>
19
#include <linux/module.h>
20
#include <linux/slab.h>
21
22
#include "internal.h"
23
24
struct ccm_instance_ctx {
25
struct crypto_skcipher_spawn ctr;
26
struct crypto_spawn cipher;
27
};
28
29
struct crypto_ccm_ctx {
30
struct crypto_cipher *cipher;
31
struct crypto_ablkcipher *ctr;
32
};
33
34
struct crypto_rfc4309_ctx {
35
struct crypto_aead *child;
36
u8 nonce[3];
37
};
38
39
struct crypto_ccm_req_priv_ctx {
40
u8 odata[16];
41
u8 idata[16];
42
u8 auth_tag[16];
43
u32 ilen;
44
u32 flags;
45
struct scatterlist src[2];
46
struct scatterlist dst[2];
47
struct ablkcipher_request abreq;
48
};
49
50
static inline struct crypto_ccm_req_priv_ctx *crypto_ccm_reqctx(
51
struct aead_request *req)
52
{
53
unsigned long align = crypto_aead_alignmask(crypto_aead_reqtfm(req));
54
55
return (void *)PTR_ALIGN((u8 *)aead_request_ctx(req), align + 1);
56
}
57
58
static int set_msg_len(u8 *block, unsigned int msglen, int csize)
59
{
60
__be32 data;
61
62
memset(block, 0, csize);
63
block += csize;
64
65
if (csize >= 4)
66
csize = 4;
67
else if (msglen > (1 << (8 * csize)))
68
return -EOVERFLOW;
69
70
data = cpu_to_be32(msglen);
71
memcpy(block - csize, (u8 *)&data + 4 - csize, csize);
72
73
return 0;
74
}
75
76
static int crypto_ccm_setkey(struct crypto_aead *aead, const u8 *key,
77
unsigned int keylen)
78
{
79
struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
80
struct crypto_ablkcipher *ctr = ctx->ctr;
81
struct crypto_cipher *tfm = ctx->cipher;
82
int err = 0;
83
84
crypto_ablkcipher_clear_flags(ctr, CRYPTO_TFM_REQ_MASK);
85
crypto_ablkcipher_set_flags(ctr, crypto_aead_get_flags(aead) &
86
CRYPTO_TFM_REQ_MASK);
87
err = crypto_ablkcipher_setkey(ctr, key, keylen);
88
crypto_aead_set_flags(aead, crypto_ablkcipher_get_flags(ctr) &
89
CRYPTO_TFM_RES_MASK);
90
if (err)
91
goto out;
92
93
crypto_cipher_clear_flags(tfm, CRYPTO_TFM_REQ_MASK);
94
crypto_cipher_set_flags(tfm, crypto_aead_get_flags(aead) &
95
CRYPTO_TFM_REQ_MASK);
96
err = crypto_cipher_setkey(tfm, key, keylen);
97
crypto_aead_set_flags(aead, crypto_cipher_get_flags(tfm) &
98
CRYPTO_TFM_RES_MASK);
99
100
out:
101
return err;
102
}
103
104
static int crypto_ccm_setauthsize(struct crypto_aead *tfm,
105
unsigned int authsize)
106
{
107
switch (authsize) {
108
case 4:
109
case 6:
110
case 8:
111
case 10:
112
case 12:
113
case 14:
114
case 16:
115
break;
116
default:
117
return -EINVAL;
118
}
119
120
return 0;
121
}
122
123
static int format_input(u8 *info, struct aead_request *req,
124
unsigned int cryptlen)
125
{
126
struct crypto_aead *aead = crypto_aead_reqtfm(req);
127
unsigned int lp = req->iv[0];
128
unsigned int l = lp + 1;
129
unsigned int m;
130
131
m = crypto_aead_authsize(aead);
132
133
memcpy(info, req->iv, 16);
134
135
/* format control info per RFC 3610 and
136
* NIST Special Publication 800-38C
137
*/
138
*info |= (8 * ((m - 2) / 2));
139
if (req->assoclen)
140
*info |= 64;
141
142
return set_msg_len(info + 16 - l, cryptlen, l);
143
}
144
145
static int format_adata(u8 *adata, unsigned int a)
146
{
147
int len = 0;
148
149
/* add control info for associated data
150
* RFC 3610 and NIST Special Publication 800-38C
151
*/
152
if (a < 65280) {
153
*(__be16 *)adata = cpu_to_be16(a);
154
len = 2;
155
} else {
156
*(__be16 *)adata = cpu_to_be16(0xfffe);
157
*(__be32 *)&adata[2] = cpu_to_be32(a);
158
len = 6;
159
}
160
161
return len;
162
}
163
164
static void compute_mac(struct crypto_cipher *tfm, u8 *data, int n,
165
struct crypto_ccm_req_priv_ctx *pctx)
166
{
167
unsigned int bs = 16;
168
u8 *odata = pctx->odata;
169
u8 *idata = pctx->idata;
170
int datalen, getlen;
171
172
datalen = n;
173
174
/* first time in here, block may be partially filled. */
175
getlen = bs - pctx->ilen;
176
if (datalen >= getlen) {
177
memcpy(idata + pctx->ilen, data, getlen);
178
crypto_xor(odata, idata, bs);
179
crypto_cipher_encrypt_one(tfm, odata, odata);
180
datalen -= getlen;
181
data += getlen;
182
pctx->ilen = 0;
183
}
184
185
/* now encrypt rest of data */
186
while (datalen >= bs) {
187
crypto_xor(odata, data, bs);
188
crypto_cipher_encrypt_one(tfm, odata, odata);
189
190
datalen -= bs;
191
data += bs;
192
}
193
194
/* check and see if there's leftover data that wasn't
195
* enough to fill a block.
196
*/
197
if (datalen) {
198
memcpy(idata + pctx->ilen, data, datalen);
199
pctx->ilen += datalen;
200
}
201
}
202
203
static void get_data_to_compute(struct crypto_cipher *tfm,
204
struct crypto_ccm_req_priv_ctx *pctx,
205
struct scatterlist *sg, unsigned int len)
206
{
207
struct scatter_walk walk;
208
u8 *data_src;
209
int n;
210
211
scatterwalk_start(&walk, sg);
212
213
while (len) {
214
n = scatterwalk_clamp(&walk, len);
215
if (!n) {
216
scatterwalk_start(&walk, sg_next(walk.sg));
217
n = scatterwalk_clamp(&walk, len);
218
}
219
data_src = scatterwalk_map(&walk, 0);
220
221
compute_mac(tfm, data_src, n, pctx);
222
len -= n;
223
224
scatterwalk_unmap(data_src, 0);
225
scatterwalk_advance(&walk, n);
226
scatterwalk_done(&walk, 0, len);
227
if (len)
228
crypto_yield(pctx->flags);
229
}
230
231
/* any leftover needs padding and then encrypted */
232
if (pctx->ilen) {
233
int padlen;
234
u8 *odata = pctx->odata;
235
u8 *idata = pctx->idata;
236
237
padlen = 16 - pctx->ilen;
238
memset(idata + pctx->ilen, 0, padlen);
239
crypto_xor(odata, idata, 16);
240
crypto_cipher_encrypt_one(tfm, odata, odata);
241
pctx->ilen = 0;
242
}
243
}
244
245
static int crypto_ccm_auth(struct aead_request *req, struct scatterlist *plain,
246
unsigned int cryptlen)
247
{
248
struct crypto_aead *aead = crypto_aead_reqtfm(req);
249
struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
250
struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
251
struct crypto_cipher *cipher = ctx->cipher;
252
unsigned int assoclen = req->assoclen;
253
u8 *odata = pctx->odata;
254
u8 *idata = pctx->idata;
255
int err;
256
257
/* format control data for input */
258
err = format_input(odata, req, cryptlen);
259
if (err)
260
goto out;
261
262
/* encrypt first block to use as start in computing mac */
263
crypto_cipher_encrypt_one(cipher, odata, odata);
264
265
/* format associated data and compute into mac */
266
if (assoclen) {
267
pctx->ilen = format_adata(idata, assoclen);
268
get_data_to_compute(cipher, pctx, req->assoc, req->assoclen);
269
} else {
270
pctx->ilen = 0;
271
}
272
273
/* compute plaintext into mac */
274
get_data_to_compute(cipher, pctx, plain, cryptlen);
275
276
out:
277
return err;
278
}
279
280
static void crypto_ccm_encrypt_done(struct crypto_async_request *areq, int err)
281
{
282
struct aead_request *req = areq->data;
283
struct crypto_aead *aead = crypto_aead_reqtfm(req);
284
struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
285
u8 *odata = pctx->odata;
286
287
if (!err)
288
scatterwalk_map_and_copy(odata, req->dst, req->cryptlen,
289
crypto_aead_authsize(aead), 1);
290
aead_request_complete(req, err);
291
}
292
293
static inline int crypto_ccm_check_iv(const u8 *iv)
294
{
295
/* 2 <= L <= 8, so 1 <= L' <= 7. */
296
if (1 > iv[0] || iv[0] > 7)
297
return -EINVAL;
298
299
return 0;
300
}
301
302
static int crypto_ccm_encrypt(struct aead_request *req)
303
{
304
struct crypto_aead *aead = crypto_aead_reqtfm(req);
305
struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
306
struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
307
struct ablkcipher_request *abreq = &pctx->abreq;
308
struct scatterlist *dst;
309
unsigned int cryptlen = req->cryptlen;
310
u8 *odata = pctx->odata;
311
u8 *iv = req->iv;
312
int err;
313
314
err = crypto_ccm_check_iv(iv);
315
if (err)
316
return err;
317
318
pctx->flags = aead_request_flags(req);
319
320
err = crypto_ccm_auth(req, req->src, cryptlen);
321
if (err)
322
return err;
323
324
/* Note: rfc 3610 and NIST 800-38C require counter of
325
* zero to encrypt auth tag.
326
*/
327
memset(iv + 15 - iv[0], 0, iv[0] + 1);
328
329
sg_init_table(pctx->src, 2);
330
sg_set_buf(pctx->src, odata, 16);
331
scatterwalk_sg_chain(pctx->src, 2, req->src);
332
333
dst = pctx->src;
334
if (req->src != req->dst) {
335
sg_init_table(pctx->dst, 2);
336
sg_set_buf(pctx->dst, odata, 16);
337
scatterwalk_sg_chain(pctx->dst, 2, req->dst);
338
dst = pctx->dst;
339
}
340
341
ablkcipher_request_set_tfm(abreq, ctx->ctr);
342
ablkcipher_request_set_callback(abreq, pctx->flags,
343
crypto_ccm_encrypt_done, req);
344
ablkcipher_request_set_crypt(abreq, pctx->src, dst, cryptlen + 16, iv);
345
err = crypto_ablkcipher_encrypt(abreq);
346
if (err)
347
return err;
348
349
/* copy authtag to end of dst */
350
scatterwalk_map_and_copy(odata, req->dst, cryptlen,
351
crypto_aead_authsize(aead), 1);
352
return err;
353
}
354
355
static void crypto_ccm_decrypt_done(struct crypto_async_request *areq,
356
int err)
357
{
358
struct aead_request *req = areq->data;
359
struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
360
struct crypto_aead *aead = crypto_aead_reqtfm(req);
361
unsigned int authsize = crypto_aead_authsize(aead);
362
unsigned int cryptlen = req->cryptlen - authsize;
363
364
if (!err) {
365
err = crypto_ccm_auth(req, req->dst, cryptlen);
366
if (!err && memcmp(pctx->auth_tag, pctx->odata, authsize))
367
err = -EBADMSG;
368
}
369
aead_request_complete(req, err);
370
}
371
372
static int crypto_ccm_decrypt(struct aead_request *req)
373
{
374
struct crypto_aead *aead = crypto_aead_reqtfm(req);
375
struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
376
struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
377
struct ablkcipher_request *abreq = &pctx->abreq;
378
struct scatterlist *dst;
379
unsigned int authsize = crypto_aead_authsize(aead);
380
unsigned int cryptlen = req->cryptlen;
381
u8 *authtag = pctx->auth_tag;
382
u8 *odata = pctx->odata;
383
u8 *iv = req->iv;
384
int err;
385
386
if (cryptlen < authsize)
387
return -EINVAL;
388
cryptlen -= authsize;
389
390
err = crypto_ccm_check_iv(iv);
391
if (err)
392
return err;
393
394
pctx->flags = aead_request_flags(req);
395
396
scatterwalk_map_and_copy(authtag, req->src, cryptlen, authsize, 0);
397
398
memset(iv + 15 - iv[0], 0, iv[0] + 1);
399
400
sg_init_table(pctx->src, 2);
401
sg_set_buf(pctx->src, authtag, 16);
402
scatterwalk_sg_chain(pctx->src, 2, req->src);
403
404
dst = pctx->src;
405
if (req->src != req->dst) {
406
sg_init_table(pctx->dst, 2);
407
sg_set_buf(pctx->dst, authtag, 16);
408
scatterwalk_sg_chain(pctx->dst, 2, req->dst);
409
dst = pctx->dst;
410
}
411
412
ablkcipher_request_set_tfm(abreq, ctx->ctr);
413
ablkcipher_request_set_callback(abreq, pctx->flags,
414
crypto_ccm_decrypt_done, req);
415
ablkcipher_request_set_crypt(abreq, pctx->src, dst, cryptlen + 16, iv);
416
err = crypto_ablkcipher_decrypt(abreq);
417
if (err)
418
return err;
419
420
err = crypto_ccm_auth(req, req->dst, cryptlen);
421
if (err)
422
return err;
423
424
/* verify */
425
if (memcmp(authtag, odata, authsize))
426
return -EBADMSG;
427
428
return err;
429
}
430
431
static int crypto_ccm_init_tfm(struct crypto_tfm *tfm)
432
{
433
struct crypto_instance *inst = (void *)tfm->__crt_alg;
434
struct ccm_instance_ctx *ictx = crypto_instance_ctx(inst);
435
struct crypto_ccm_ctx *ctx = crypto_tfm_ctx(tfm);
436
struct crypto_cipher *cipher;
437
struct crypto_ablkcipher *ctr;
438
unsigned long align;
439
int err;
440
441
cipher = crypto_spawn_cipher(&ictx->cipher);
442
if (IS_ERR(cipher))
443
return PTR_ERR(cipher);
444
445
ctr = crypto_spawn_skcipher(&ictx->ctr);
446
err = PTR_ERR(ctr);
447
if (IS_ERR(ctr))
448
goto err_free_cipher;
449
450
ctx->cipher = cipher;
451
ctx->ctr = ctr;
452
453
align = crypto_tfm_alg_alignmask(tfm);
454
align &= ~(crypto_tfm_ctx_alignment() - 1);
455
tfm->crt_aead.reqsize = align +
456
sizeof(struct crypto_ccm_req_priv_ctx) +
457
crypto_ablkcipher_reqsize(ctr);
458
459
return 0;
460
461
err_free_cipher:
462
crypto_free_cipher(cipher);
463
return err;
464
}
465
466
static void crypto_ccm_exit_tfm(struct crypto_tfm *tfm)
467
{
468
struct crypto_ccm_ctx *ctx = crypto_tfm_ctx(tfm);
469
470
crypto_free_cipher(ctx->cipher);
471
crypto_free_ablkcipher(ctx->ctr);
472
}
473
474
static struct crypto_instance *crypto_ccm_alloc_common(struct rtattr **tb,
475
const char *full_name,
476
const char *ctr_name,
477
const char *cipher_name)
478
{
479
struct crypto_attr_type *algt;
480
struct crypto_instance *inst;
481
struct crypto_alg *ctr;
482
struct crypto_alg *cipher;
483
struct ccm_instance_ctx *ictx;
484
int err;
485
486
algt = crypto_get_attr_type(tb);
487
err = PTR_ERR(algt);
488
if (IS_ERR(algt))
489
return ERR_PTR(err);
490
491
if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
492
return ERR_PTR(-EINVAL);
493
494
cipher = crypto_alg_mod_lookup(cipher_name, CRYPTO_ALG_TYPE_CIPHER,
495
CRYPTO_ALG_TYPE_MASK);
496
err = PTR_ERR(cipher);
497
if (IS_ERR(cipher))
498
return ERR_PTR(err);
499
500
err = -EINVAL;
501
if (cipher->cra_blocksize != 16)
502
goto out_put_cipher;
503
504
inst = kzalloc(sizeof(*inst) + sizeof(*ictx), GFP_KERNEL);
505
err = -ENOMEM;
506
if (!inst)
507
goto out_put_cipher;
508
509
ictx = crypto_instance_ctx(inst);
510
511
err = crypto_init_spawn(&ictx->cipher, cipher, inst,
512
CRYPTO_ALG_TYPE_MASK);
513
if (err)
514
goto err_free_inst;
515
516
crypto_set_skcipher_spawn(&ictx->ctr, inst);
517
err = crypto_grab_skcipher(&ictx->ctr, ctr_name, 0,
518
crypto_requires_sync(algt->type,
519
algt->mask));
520
if (err)
521
goto err_drop_cipher;
522
523
ctr = crypto_skcipher_spawn_alg(&ictx->ctr);
524
525
/* Not a stream cipher? */
526
err = -EINVAL;
527
if (ctr->cra_blocksize != 1)
528
goto err_drop_ctr;
529
530
/* We want the real thing! */
531
if (ctr->cra_ablkcipher.ivsize != 16)
532
goto err_drop_ctr;
533
534
err = -ENAMETOOLONG;
535
if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
536
"ccm_base(%s,%s)", ctr->cra_driver_name,
537
cipher->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
538
goto err_drop_ctr;
539
540
memcpy(inst->alg.cra_name, full_name, CRYPTO_MAX_ALG_NAME);
541
542
inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD;
543
inst->alg.cra_flags |= ctr->cra_flags & CRYPTO_ALG_ASYNC;
544
inst->alg.cra_priority = cipher->cra_priority + ctr->cra_priority;
545
inst->alg.cra_blocksize = 1;
546
inst->alg.cra_alignmask = cipher->cra_alignmask | ctr->cra_alignmask |
547
(__alignof__(u32) - 1);
548
inst->alg.cra_type = &crypto_aead_type;
549
inst->alg.cra_aead.ivsize = 16;
550
inst->alg.cra_aead.maxauthsize = 16;
551
inst->alg.cra_ctxsize = sizeof(struct crypto_ccm_ctx);
552
inst->alg.cra_init = crypto_ccm_init_tfm;
553
inst->alg.cra_exit = crypto_ccm_exit_tfm;
554
inst->alg.cra_aead.setkey = crypto_ccm_setkey;
555
inst->alg.cra_aead.setauthsize = crypto_ccm_setauthsize;
556
inst->alg.cra_aead.encrypt = crypto_ccm_encrypt;
557
inst->alg.cra_aead.decrypt = crypto_ccm_decrypt;
558
559
out:
560
crypto_mod_put(cipher);
561
return inst;
562
563
err_drop_ctr:
564
crypto_drop_skcipher(&ictx->ctr);
565
err_drop_cipher:
566
crypto_drop_spawn(&ictx->cipher);
567
err_free_inst:
568
kfree(inst);
569
out_put_cipher:
570
inst = ERR_PTR(err);
571
goto out;
572
}
573
574
static struct crypto_instance *crypto_ccm_alloc(struct rtattr **tb)
575
{
576
int err;
577
const char *cipher_name;
578
char ctr_name[CRYPTO_MAX_ALG_NAME];
579
char full_name[CRYPTO_MAX_ALG_NAME];
580
581
cipher_name = crypto_attr_alg_name(tb[1]);
582
err = PTR_ERR(cipher_name);
583
if (IS_ERR(cipher_name))
584
return ERR_PTR(err);
585
586
if (snprintf(ctr_name, CRYPTO_MAX_ALG_NAME, "ctr(%s)",
587
cipher_name) >= CRYPTO_MAX_ALG_NAME)
588
return ERR_PTR(-ENAMETOOLONG);
589
590
if (snprintf(full_name, CRYPTO_MAX_ALG_NAME, "ccm(%s)", cipher_name) >=
591
CRYPTO_MAX_ALG_NAME)
592
return ERR_PTR(-ENAMETOOLONG);
593
594
return crypto_ccm_alloc_common(tb, full_name, ctr_name, cipher_name);
595
}
596
597
static void crypto_ccm_free(struct crypto_instance *inst)
598
{
599
struct ccm_instance_ctx *ctx = crypto_instance_ctx(inst);
600
601
crypto_drop_spawn(&ctx->cipher);
602
crypto_drop_skcipher(&ctx->ctr);
603
kfree(inst);
604
}
605
606
static struct crypto_template crypto_ccm_tmpl = {
607
.name = "ccm",
608
.alloc = crypto_ccm_alloc,
609
.free = crypto_ccm_free,
610
.module = THIS_MODULE,
611
};
612
613
static struct crypto_instance *crypto_ccm_base_alloc(struct rtattr **tb)
614
{
615
int err;
616
const char *ctr_name;
617
const char *cipher_name;
618
char full_name[CRYPTO_MAX_ALG_NAME];
619
620
ctr_name = crypto_attr_alg_name(tb[1]);
621
err = PTR_ERR(ctr_name);
622
if (IS_ERR(ctr_name))
623
return ERR_PTR(err);
624
625
cipher_name = crypto_attr_alg_name(tb[2]);
626
err = PTR_ERR(cipher_name);
627
if (IS_ERR(cipher_name))
628
return ERR_PTR(err);
629
630
if (snprintf(full_name, CRYPTO_MAX_ALG_NAME, "ccm_base(%s,%s)",
631
ctr_name, cipher_name) >= CRYPTO_MAX_ALG_NAME)
632
return ERR_PTR(-ENAMETOOLONG);
633
634
return crypto_ccm_alloc_common(tb, full_name, ctr_name, cipher_name);
635
}
636
637
static struct crypto_template crypto_ccm_base_tmpl = {
638
.name = "ccm_base",
639
.alloc = crypto_ccm_base_alloc,
640
.free = crypto_ccm_free,
641
.module = THIS_MODULE,
642
};
643
644
static int crypto_rfc4309_setkey(struct crypto_aead *parent, const u8 *key,
645
unsigned int keylen)
646
{
647
struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(parent);
648
struct crypto_aead *child = ctx->child;
649
int err;
650
651
if (keylen < 3)
652
return -EINVAL;
653
654
keylen -= 3;
655
memcpy(ctx->nonce, key + keylen, 3);
656
657
crypto_aead_clear_flags(child, CRYPTO_TFM_REQ_MASK);
658
crypto_aead_set_flags(child, crypto_aead_get_flags(parent) &
659
CRYPTO_TFM_REQ_MASK);
660
err = crypto_aead_setkey(child, key, keylen);
661
crypto_aead_set_flags(parent, crypto_aead_get_flags(child) &
662
CRYPTO_TFM_RES_MASK);
663
664
return err;
665
}
666
667
static int crypto_rfc4309_setauthsize(struct crypto_aead *parent,
668
unsigned int authsize)
669
{
670
struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(parent);
671
672
switch (authsize) {
673
case 8:
674
case 12:
675
case 16:
676
break;
677
default:
678
return -EINVAL;
679
}
680
681
return crypto_aead_setauthsize(ctx->child, authsize);
682
}
683
684
static struct aead_request *crypto_rfc4309_crypt(struct aead_request *req)
685
{
686
struct aead_request *subreq = aead_request_ctx(req);
687
struct crypto_aead *aead = crypto_aead_reqtfm(req);
688
struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(aead);
689
struct crypto_aead *child = ctx->child;
690
u8 *iv = PTR_ALIGN((u8 *)(subreq + 1) + crypto_aead_reqsize(child),
691
crypto_aead_alignmask(child) + 1);
692
693
/* L' */
694
iv[0] = 3;
695
696
memcpy(iv + 1, ctx->nonce, 3);
697
memcpy(iv + 4, req->iv, 8);
698
699
aead_request_set_tfm(subreq, child);
700
aead_request_set_callback(subreq, req->base.flags, req->base.complete,
701
req->base.data);
702
aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen, iv);
703
aead_request_set_assoc(subreq, req->assoc, req->assoclen);
704
705
return subreq;
706
}
707
708
static int crypto_rfc4309_encrypt(struct aead_request *req)
709
{
710
req = crypto_rfc4309_crypt(req);
711
712
return crypto_aead_encrypt(req);
713
}
714
715
static int crypto_rfc4309_decrypt(struct aead_request *req)
716
{
717
req = crypto_rfc4309_crypt(req);
718
719
return crypto_aead_decrypt(req);
720
}
721
722
static int crypto_rfc4309_init_tfm(struct crypto_tfm *tfm)
723
{
724
struct crypto_instance *inst = (void *)tfm->__crt_alg;
725
struct crypto_aead_spawn *spawn = crypto_instance_ctx(inst);
726
struct crypto_rfc4309_ctx *ctx = crypto_tfm_ctx(tfm);
727
struct crypto_aead *aead;
728
unsigned long align;
729
730
aead = crypto_spawn_aead(spawn);
731
if (IS_ERR(aead))
732
return PTR_ERR(aead);
733
734
ctx->child = aead;
735
736
align = crypto_aead_alignmask(aead);
737
align &= ~(crypto_tfm_ctx_alignment() - 1);
738
tfm->crt_aead.reqsize = sizeof(struct aead_request) +
739
ALIGN(crypto_aead_reqsize(aead),
740
crypto_tfm_ctx_alignment()) +
741
align + 16;
742
743
return 0;
744
}
745
746
static void crypto_rfc4309_exit_tfm(struct crypto_tfm *tfm)
747
{
748
struct crypto_rfc4309_ctx *ctx = crypto_tfm_ctx(tfm);
749
750
crypto_free_aead(ctx->child);
751
}
752
753
static struct crypto_instance *crypto_rfc4309_alloc(struct rtattr **tb)
754
{
755
struct crypto_attr_type *algt;
756
struct crypto_instance *inst;
757
struct crypto_aead_spawn *spawn;
758
struct crypto_alg *alg;
759
const char *ccm_name;
760
int err;
761
762
algt = crypto_get_attr_type(tb);
763
err = PTR_ERR(algt);
764
if (IS_ERR(algt))
765
return ERR_PTR(err);
766
767
if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
768
return ERR_PTR(-EINVAL);
769
770
ccm_name = crypto_attr_alg_name(tb[1]);
771
err = PTR_ERR(ccm_name);
772
if (IS_ERR(ccm_name))
773
return ERR_PTR(err);
774
775
inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
776
if (!inst)
777
return ERR_PTR(-ENOMEM);
778
779
spawn = crypto_instance_ctx(inst);
780
crypto_set_aead_spawn(spawn, inst);
781
err = crypto_grab_aead(spawn, ccm_name, 0,
782
crypto_requires_sync(algt->type, algt->mask));
783
if (err)
784
goto out_free_inst;
785
786
alg = crypto_aead_spawn_alg(spawn);
787
788
err = -EINVAL;
789
790
/* We only support 16-byte blocks. */
791
if (alg->cra_aead.ivsize != 16)
792
goto out_drop_alg;
793
794
/* Not a stream cipher? */
795
if (alg->cra_blocksize != 1)
796
goto out_drop_alg;
797
798
err = -ENAMETOOLONG;
799
if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME,
800
"rfc4309(%s)", alg->cra_name) >= CRYPTO_MAX_ALG_NAME ||
801
snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
802
"rfc4309(%s)", alg->cra_driver_name) >=
803
CRYPTO_MAX_ALG_NAME)
804
goto out_drop_alg;
805
806
inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD;
807
inst->alg.cra_flags |= alg->cra_flags & CRYPTO_ALG_ASYNC;
808
inst->alg.cra_priority = alg->cra_priority;
809
inst->alg.cra_blocksize = 1;
810
inst->alg.cra_alignmask = alg->cra_alignmask;
811
inst->alg.cra_type = &crypto_nivaead_type;
812
813
inst->alg.cra_aead.ivsize = 8;
814
inst->alg.cra_aead.maxauthsize = 16;
815
816
inst->alg.cra_ctxsize = sizeof(struct crypto_rfc4309_ctx);
817
818
inst->alg.cra_init = crypto_rfc4309_init_tfm;
819
inst->alg.cra_exit = crypto_rfc4309_exit_tfm;
820
821
inst->alg.cra_aead.setkey = crypto_rfc4309_setkey;
822
inst->alg.cra_aead.setauthsize = crypto_rfc4309_setauthsize;
823
inst->alg.cra_aead.encrypt = crypto_rfc4309_encrypt;
824
inst->alg.cra_aead.decrypt = crypto_rfc4309_decrypt;
825
826
inst->alg.cra_aead.geniv = "seqiv";
827
828
out:
829
return inst;
830
831
out_drop_alg:
832
crypto_drop_aead(spawn);
833
out_free_inst:
834
kfree(inst);
835
inst = ERR_PTR(err);
836
goto out;
837
}
838
839
static void crypto_rfc4309_free(struct crypto_instance *inst)
840
{
841
crypto_drop_spawn(crypto_instance_ctx(inst));
842
kfree(inst);
843
}
844
845
static struct crypto_template crypto_rfc4309_tmpl = {
846
.name = "rfc4309",
847
.alloc = crypto_rfc4309_alloc,
848
.free = crypto_rfc4309_free,
849
.module = THIS_MODULE,
850
};
851
852
static int __init crypto_ccm_module_init(void)
853
{
854
int err;
855
856
err = crypto_register_template(&crypto_ccm_base_tmpl);
857
if (err)
858
goto out;
859
860
err = crypto_register_template(&crypto_ccm_tmpl);
861
if (err)
862
goto out_undo_base;
863
864
err = crypto_register_template(&crypto_rfc4309_tmpl);
865
if (err)
866
goto out_undo_ccm;
867
868
out:
869
return err;
870
871
out_undo_ccm:
872
crypto_unregister_template(&crypto_ccm_tmpl);
873
out_undo_base:
874
crypto_unregister_template(&crypto_ccm_base_tmpl);
875
goto out;
876
}
877
878
static void __exit crypto_ccm_module_exit(void)
879
{
880
crypto_unregister_template(&crypto_rfc4309_tmpl);
881
crypto_unregister_template(&crypto_ccm_tmpl);
882
crypto_unregister_template(&crypto_ccm_base_tmpl);
883
}
884
885
module_init(crypto_ccm_module_init);
886
module_exit(crypto_ccm_module_exit);
887
888
MODULE_LICENSE("GPL");
889
MODULE_DESCRIPTION("Counter with CBC MAC");
890
MODULE_ALIAS("ccm_base");
891
MODULE_ALIAS("rfc4309");
892
893