Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
torvalds
GitHub Repository: torvalds/linux
Path: blob/master/crypto/algapi.c
26135 views
1
// SPDX-License-Identifier: GPL-2.0-or-later
2
/*
3
* Cryptographic API for algorithms (i.e., low-level API).
4
*
5
* Copyright (c) 2006 Herbert Xu <[email protected]>
6
*/
7
8
#include <crypto/algapi.h>
9
#include <linux/err.h>
10
#include <linux/errno.h>
11
#include <linux/fips.h>
12
#include <linux/init.h>
13
#include <linux/kernel.h>
14
#include <linux/list.h>
15
#include <linux/module.h>
16
#include <linux/rtnetlink.h>
17
#include <linux/slab.h>
18
#include <linux/string.h>
19
#include <linux/workqueue.h>
20
21
#include "internal.h"
22
23
static LIST_HEAD(crypto_template_list);
24
25
static inline void crypto_check_module_sig(struct module *mod)
26
{
27
if (fips_enabled && mod && !module_sig_ok(mod))
28
panic("Module %s signature verification failed in FIPS mode\n",
29
module_name(mod));
30
}
31
32
static int crypto_check_alg(struct crypto_alg *alg)
33
{
34
crypto_check_module_sig(alg->cra_module);
35
36
if (!alg->cra_name[0] || !alg->cra_driver_name[0])
37
return -EINVAL;
38
39
if (alg->cra_alignmask & (alg->cra_alignmask + 1))
40
return -EINVAL;
41
42
/* General maximums for all algs. */
43
if (alg->cra_alignmask > MAX_ALGAPI_ALIGNMASK)
44
return -EINVAL;
45
46
if (alg->cra_blocksize > MAX_ALGAPI_BLOCKSIZE)
47
return -EINVAL;
48
49
/* Lower maximums for specific alg types. */
50
if (!alg->cra_type && (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
51
CRYPTO_ALG_TYPE_CIPHER) {
52
if (alg->cra_alignmask > MAX_CIPHER_ALIGNMASK)
53
return -EINVAL;
54
55
if (alg->cra_blocksize > MAX_CIPHER_BLOCKSIZE)
56
return -EINVAL;
57
}
58
59
if (alg->cra_priority < 0)
60
return -EINVAL;
61
62
refcount_set(&alg->cra_refcnt, 1);
63
64
return 0;
65
}
66
67
static void crypto_free_instance(struct crypto_instance *inst)
68
{
69
inst->alg.cra_type->free(inst);
70
}
71
72
static void crypto_destroy_instance_workfn(struct work_struct *w)
73
{
74
struct crypto_template *tmpl = container_of(w, struct crypto_template,
75
free_work);
76
struct crypto_instance *inst;
77
struct hlist_node *n;
78
HLIST_HEAD(list);
79
80
down_write(&crypto_alg_sem);
81
hlist_for_each_entry_safe(inst, n, &tmpl->dead, list) {
82
if (refcount_read(&inst->alg.cra_refcnt) != -1)
83
continue;
84
hlist_del(&inst->list);
85
hlist_add_head(&inst->list, &list);
86
}
87
up_write(&crypto_alg_sem);
88
89
hlist_for_each_entry_safe(inst, n, &list, list)
90
crypto_free_instance(inst);
91
}
92
93
static void crypto_destroy_instance(struct crypto_alg *alg)
94
{
95
struct crypto_instance *inst = container_of(alg,
96
struct crypto_instance,
97
alg);
98
struct crypto_template *tmpl = inst->tmpl;
99
100
refcount_set(&alg->cra_refcnt, -1);
101
schedule_work(&tmpl->free_work);
102
}
103
104
/*
105
* This function adds a spawn to the list secondary_spawns which
106
* will be used at the end of crypto_remove_spawns to unregister
107
* instances, unless the spawn happens to be one that is depended
108
* on by the new algorithm (nalg in crypto_remove_spawns).
109
*
110
* This function is also responsible for resurrecting any algorithms
111
* in the dependency chain of nalg by unsetting n->dead.
112
*/
113
static struct list_head *crypto_more_spawns(struct crypto_alg *alg,
114
struct list_head *stack,
115
struct list_head *top,
116
struct list_head *secondary_spawns)
117
{
118
struct crypto_spawn *spawn, *n;
119
120
spawn = list_first_entry_or_null(stack, struct crypto_spawn, list);
121
if (!spawn)
122
return NULL;
123
124
n = list_prev_entry(spawn, list);
125
list_move(&spawn->list, secondary_spawns);
126
127
if (list_is_last(&n->list, stack))
128
return top;
129
130
n = list_next_entry(n, list);
131
if (!spawn->dead)
132
n->dead = false;
133
134
return &n->inst->alg.cra_users;
135
}
136
137
static void crypto_remove_instance(struct crypto_instance *inst,
138
struct list_head *list)
139
{
140
struct crypto_template *tmpl = inst->tmpl;
141
142
if (crypto_is_dead(&inst->alg))
143
return;
144
145
inst->alg.cra_flags |= CRYPTO_ALG_DEAD;
146
147
if (!tmpl)
148
return;
149
150
list_del_init(&inst->alg.cra_list);
151
hlist_del(&inst->list);
152
hlist_add_head(&inst->list, &tmpl->dead);
153
154
BUG_ON(!list_empty(&inst->alg.cra_users));
155
156
crypto_alg_put(&inst->alg);
157
}
158
159
/*
160
* Given an algorithm alg, remove all algorithms that depend on it
161
* through spawns. If nalg is not null, then exempt any algorithms
162
* that is depended on by nalg. This is useful when nalg itself
163
* depends on alg.
164
*/
165
void crypto_remove_spawns(struct crypto_alg *alg, struct list_head *list,
166
struct crypto_alg *nalg)
167
{
168
u32 new_type = (nalg ?: alg)->cra_flags;
169
struct crypto_spawn *spawn, *n;
170
LIST_HEAD(secondary_spawns);
171
struct list_head *spawns;
172
LIST_HEAD(stack);
173
LIST_HEAD(top);
174
175
spawns = &alg->cra_users;
176
list_for_each_entry_safe(spawn, n, spawns, list) {
177
if ((spawn->alg->cra_flags ^ new_type) & spawn->mask)
178
continue;
179
180
list_move(&spawn->list, &top);
181
}
182
183
/*
184
* Perform a depth-first walk starting from alg through
185
* the cra_users tree. The list stack records the path
186
* from alg to the current spawn.
187
*/
188
spawns = &top;
189
do {
190
while (!list_empty(spawns)) {
191
struct crypto_instance *inst;
192
193
spawn = list_first_entry(spawns, struct crypto_spawn,
194
list);
195
inst = spawn->inst;
196
197
list_move(&spawn->list, &stack);
198
spawn->dead = !spawn->registered || &inst->alg != nalg;
199
200
if (!spawn->registered)
201
break;
202
203
BUG_ON(&inst->alg == alg);
204
205
if (&inst->alg == nalg)
206
break;
207
208
spawns = &inst->alg.cra_users;
209
210
/*
211
* Even if spawn->registered is true, the
212
* instance itself may still be unregistered.
213
* This is because it may have failed during
214
* registration. Therefore we still need to
215
* make the following test.
216
*
217
* We may encounter an unregistered instance here, since
218
* an instance's spawns are set up prior to the instance
219
* being registered. An unregistered instance will have
220
* NULL ->cra_users.next, since ->cra_users isn't
221
* properly initialized until registration. But an
222
* unregistered instance cannot have any users, so treat
223
* it the same as ->cra_users being empty.
224
*/
225
if (spawns->next == NULL)
226
break;
227
}
228
} while ((spawns = crypto_more_spawns(alg, &stack, &top,
229
&secondary_spawns)));
230
231
/*
232
* Remove all instances that are marked as dead. Also
233
* complete the resurrection of the others by moving them
234
* back to the cra_users list.
235
*/
236
list_for_each_entry_safe(spawn, n, &secondary_spawns, list) {
237
if (!spawn->dead)
238
list_move(&spawn->list, &spawn->alg->cra_users);
239
else if (spawn->registered)
240
crypto_remove_instance(spawn->inst, list);
241
}
242
}
243
EXPORT_SYMBOL_GPL(crypto_remove_spawns);
244
245
static void crypto_alg_finish_registration(struct crypto_alg *alg,
246
struct list_head *algs_to_put)
247
{
248
struct crypto_alg *q;
249
250
list_for_each_entry(q, &crypto_alg_list, cra_list) {
251
if (q == alg)
252
continue;
253
254
if (crypto_is_moribund(q))
255
continue;
256
257
if (crypto_is_larval(q))
258
continue;
259
260
if (strcmp(alg->cra_name, q->cra_name))
261
continue;
262
263
if (strcmp(alg->cra_driver_name, q->cra_driver_name) &&
264
q->cra_priority > alg->cra_priority)
265
continue;
266
267
crypto_remove_spawns(q, algs_to_put, alg);
268
}
269
270
crypto_notify(CRYPTO_MSG_ALG_LOADED, alg);
271
}
272
273
static struct crypto_larval *crypto_alloc_test_larval(struct crypto_alg *alg)
274
{
275
struct crypto_larval *larval;
276
277
if (!IS_ENABLED(CONFIG_CRYPTO_SELFTESTS) ||
278
(alg->cra_flags & CRYPTO_ALG_INTERNAL))
279
return NULL; /* No self-test needed */
280
281
larval = crypto_larval_alloc(alg->cra_name,
282
alg->cra_flags | CRYPTO_ALG_TESTED, 0);
283
if (IS_ERR(larval))
284
return larval;
285
286
larval->adult = crypto_mod_get(alg);
287
if (!larval->adult) {
288
kfree(larval);
289
return ERR_PTR(-ENOENT);
290
}
291
292
refcount_set(&larval->alg.cra_refcnt, 1);
293
memcpy(larval->alg.cra_driver_name, alg->cra_driver_name,
294
CRYPTO_MAX_ALG_NAME);
295
larval->alg.cra_priority = alg->cra_priority;
296
297
return larval;
298
}
299
300
static struct crypto_larval *
301
__crypto_register_alg(struct crypto_alg *alg, struct list_head *algs_to_put)
302
{
303
struct crypto_alg *q;
304
struct crypto_larval *larval;
305
int ret = -EAGAIN;
306
307
if (crypto_is_dead(alg))
308
goto err;
309
310
INIT_LIST_HEAD(&alg->cra_users);
311
312
ret = -EEXIST;
313
314
list_for_each_entry(q, &crypto_alg_list, cra_list) {
315
if (q == alg)
316
goto err;
317
318
if (crypto_is_moribund(q))
319
continue;
320
321
if (crypto_is_larval(q)) {
322
if (!strcmp(alg->cra_driver_name, q->cra_driver_name))
323
goto err;
324
continue;
325
}
326
327
if (!strcmp(q->cra_driver_name, alg->cra_name) ||
328
!strcmp(q->cra_driver_name, alg->cra_driver_name) ||
329
!strcmp(q->cra_name, alg->cra_driver_name))
330
goto err;
331
}
332
333
larval = crypto_alloc_test_larval(alg);
334
if (IS_ERR(larval))
335
goto out;
336
337
list_add(&alg->cra_list, &crypto_alg_list);
338
339
if (larval) {
340
/* No cheating! */
341
alg->cra_flags &= ~CRYPTO_ALG_TESTED;
342
343
list_add(&larval->alg.cra_list, &crypto_alg_list);
344
} else {
345
alg->cra_flags |= CRYPTO_ALG_TESTED;
346
crypto_alg_finish_registration(alg, algs_to_put);
347
}
348
349
out:
350
return larval;
351
352
err:
353
larval = ERR_PTR(ret);
354
goto out;
355
}
356
357
void crypto_alg_tested(const char *name, int err)
358
{
359
struct crypto_larval *test;
360
struct crypto_alg *alg;
361
struct crypto_alg *q;
362
LIST_HEAD(list);
363
364
down_write(&crypto_alg_sem);
365
list_for_each_entry(q, &crypto_alg_list, cra_list) {
366
if (crypto_is_moribund(q) || !crypto_is_larval(q))
367
continue;
368
369
test = (struct crypto_larval *)q;
370
371
if (!strcmp(q->cra_driver_name, name))
372
goto found;
373
}
374
375
pr_err("alg: Unexpected test result for %s: %d\n", name, err);
376
up_write(&crypto_alg_sem);
377
return;
378
379
found:
380
q->cra_flags |= CRYPTO_ALG_DEAD;
381
alg = test->adult;
382
383
if (crypto_is_dead(alg))
384
goto complete;
385
386
if (err == -ECANCELED)
387
alg->cra_flags |= CRYPTO_ALG_FIPS_INTERNAL;
388
else if (err)
389
goto complete;
390
else
391
alg->cra_flags &= ~CRYPTO_ALG_FIPS_INTERNAL;
392
393
alg->cra_flags |= CRYPTO_ALG_TESTED;
394
395
crypto_alg_finish_registration(alg, &list);
396
397
complete:
398
list_del_init(&test->alg.cra_list);
399
complete_all(&test->completion);
400
401
up_write(&crypto_alg_sem);
402
403
crypto_alg_put(&test->alg);
404
crypto_remove_final(&list);
405
}
406
EXPORT_SYMBOL_GPL(crypto_alg_tested);
407
408
void crypto_remove_final(struct list_head *list)
409
{
410
struct crypto_alg *alg;
411
struct crypto_alg *n;
412
413
list_for_each_entry_safe(alg, n, list, cra_list) {
414
list_del_init(&alg->cra_list);
415
crypto_alg_put(alg);
416
}
417
}
418
EXPORT_SYMBOL_GPL(crypto_remove_final);
419
420
static void crypto_free_alg(struct crypto_alg *alg)
421
{
422
unsigned int algsize = alg->cra_type->algsize;
423
u8 *p = (u8 *)alg - algsize;
424
425
crypto_destroy_alg(alg);
426
kfree(p);
427
}
428
429
int crypto_register_alg(struct crypto_alg *alg)
430
{
431
struct crypto_larval *larval;
432
bool test_started = false;
433
LIST_HEAD(algs_to_put);
434
int err;
435
436
alg->cra_flags &= ~CRYPTO_ALG_DEAD;
437
err = crypto_check_alg(alg);
438
if (err)
439
return err;
440
441
if (alg->cra_flags & CRYPTO_ALG_DUP_FIRST &&
442
!WARN_ON_ONCE(alg->cra_destroy)) {
443
unsigned int algsize = alg->cra_type->algsize;
444
u8 *p = (u8 *)alg - algsize;
445
446
p = kmemdup(p, algsize + sizeof(*alg), GFP_KERNEL);
447
if (!p)
448
return -ENOMEM;
449
450
alg = (void *)(p + algsize);
451
alg->cra_destroy = crypto_free_alg;
452
}
453
454
down_write(&crypto_alg_sem);
455
larval = __crypto_register_alg(alg, &algs_to_put);
456
if (!IS_ERR_OR_NULL(larval)) {
457
test_started = crypto_boot_test_finished();
458
larval->test_started = test_started;
459
}
460
up_write(&crypto_alg_sem);
461
462
if (IS_ERR(larval)) {
463
crypto_alg_put(alg);
464
return PTR_ERR(larval);
465
}
466
467
if (test_started)
468
crypto_schedule_test(larval);
469
else
470
crypto_remove_final(&algs_to_put);
471
472
return 0;
473
}
474
EXPORT_SYMBOL_GPL(crypto_register_alg);
475
476
static int crypto_remove_alg(struct crypto_alg *alg, struct list_head *list)
477
{
478
if (unlikely(list_empty(&alg->cra_list)))
479
return -ENOENT;
480
481
alg->cra_flags |= CRYPTO_ALG_DEAD;
482
483
list_del_init(&alg->cra_list);
484
crypto_remove_spawns(alg, list, NULL);
485
486
return 0;
487
}
488
489
void crypto_unregister_alg(struct crypto_alg *alg)
490
{
491
int ret;
492
LIST_HEAD(list);
493
494
down_write(&crypto_alg_sem);
495
ret = crypto_remove_alg(alg, &list);
496
up_write(&crypto_alg_sem);
497
498
if (WARN(ret, "Algorithm %s is not registered", alg->cra_driver_name))
499
return;
500
501
WARN_ON(!alg->cra_destroy && refcount_read(&alg->cra_refcnt) != 1);
502
503
list_add(&alg->cra_list, &list);
504
crypto_remove_final(&list);
505
}
506
EXPORT_SYMBOL_GPL(crypto_unregister_alg);
507
508
int crypto_register_algs(struct crypto_alg *algs, int count)
509
{
510
int i, ret;
511
512
for (i = 0; i < count; i++) {
513
ret = crypto_register_alg(&algs[i]);
514
if (ret)
515
goto err;
516
}
517
518
return 0;
519
520
err:
521
for (--i; i >= 0; --i)
522
crypto_unregister_alg(&algs[i]);
523
524
return ret;
525
}
526
EXPORT_SYMBOL_GPL(crypto_register_algs);
527
528
void crypto_unregister_algs(struct crypto_alg *algs, int count)
529
{
530
int i;
531
532
for (i = 0; i < count; i++)
533
crypto_unregister_alg(&algs[i]);
534
}
535
EXPORT_SYMBOL_GPL(crypto_unregister_algs);
536
537
int crypto_register_template(struct crypto_template *tmpl)
538
{
539
struct crypto_template *q;
540
int err = -EEXIST;
541
542
INIT_WORK(&tmpl->free_work, crypto_destroy_instance_workfn);
543
544
down_write(&crypto_alg_sem);
545
546
crypto_check_module_sig(tmpl->module);
547
548
list_for_each_entry(q, &crypto_template_list, list) {
549
if (q == tmpl)
550
goto out;
551
}
552
553
list_add(&tmpl->list, &crypto_template_list);
554
err = 0;
555
out:
556
up_write(&crypto_alg_sem);
557
return err;
558
}
559
EXPORT_SYMBOL_GPL(crypto_register_template);
560
561
int crypto_register_templates(struct crypto_template *tmpls, int count)
562
{
563
int i, err;
564
565
for (i = 0; i < count; i++) {
566
err = crypto_register_template(&tmpls[i]);
567
if (err)
568
goto out;
569
}
570
return 0;
571
572
out:
573
for (--i; i >= 0; --i)
574
crypto_unregister_template(&tmpls[i]);
575
return err;
576
}
577
EXPORT_SYMBOL_GPL(crypto_register_templates);
578
579
void crypto_unregister_template(struct crypto_template *tmpl)
580
{
581
struct crypto_instance *inst;
582
struct hlist_node *n;
583
struct hlist_head *list;
584
LIST_HEAD(users);
585
586
down_write(&crypto_alg_sem);
587
588
BUG_ON(list_empty(&tmpl->list));
589
list_del_init(&tmpl->list);
590
591
list = &tmpl->instances;
592
hlist_for_each_entry(inst, list, list) {
593
int err = crypto_remove_alg(&inst->alg, &users);
594
595
BUG_ON(err);
596
}
597
598
up_write(&crypto_alg_sem);
599
600
hlist_for_each_entry_safe(inst, n, list, list) {
601
BUG_ON(refcount_read(&inst->alg.cra_refcnt) != 1);
602
crypto_free_instance(inst);
603
}
604
crypto_remove_final(&users);
605
606
flush_work(&tmpl->free_work);
607
}
608
EXPORT_SYMBOL_GPL(crypto_unregister_template);
609
610
void crypto_unregister_templates(struct crypto_template *tmpls, int count)
611
{
612
int i;
613
614
for (i = count - 1; i >= 0; --i)
615
crypto_unregister_template(&tmpls[i]);
616
}
617
EXPORT_SYMBOL_GPL(crypto_unregister_templates);
618
619
static struct crypto_template *__crypto_lookup_template(const char *name)
620
{
621
struct crypto_template *q, *tmpl = NULL;
622
623
down_read(&crypto_alg_sem);
624
list_for_each_entry(q, &crypto_template_list, list) {
625
if (strcmp(q->name, name))
626
continue;
627
if (unlikely(!crypto_tmpl_get(q)))
628
continue;
629
630
tmpl = q;
631
break;
632
}
633
up_read(&crypto_alg_sem);
634
635
return tmpl;
636
}
637
638
struct crypto_template *crypto_lookup_template(const char *name)
639
{
640
return try_then_request_module(__crypto_lookup_template(name),
641
"crypto-%s", name);
642
}
643
EXPORT_SYMBOL_GPL(crypto_lookup_template);
644
645
int crypto_register_instance(struct crypto_template *tmpl,
646
struct crypto_instance *inst)
647
{
648
struct crypto_larval *larval;
649
struct crypto_spawn *spawn;
650
u32 fips_internal = 0;
651
LIST_HEAD(algs_to_put);
652
int err;
653
654
err = crypto_check_alg(&inst->alg);
655
if (err)
656
return err;
657
658
inst->alg.cra_module = tmpl->module;
659
inst->alg.cra_flags |= CRYPTO_ALG_INSTANCE;
660
inst->alg.cra_destroy = crypto_destroy_instance;
661
662
down_write(&crypto_alg_sem);
663
664
larval = ERR_PTR(-EAGAIN);
665
for (spawn = inst->spawns; spawn;) {
666
struct crypto_spawn *next;
667
668
if (spawn->dead)
669
goto unlock;
670
671
next = spawn->next;
672
spawn->inst = inst;
673
spawn->registered = true;
674
675
fips_internal |= spawn->alg->cra_flags;
676
677
crypto_mod_put(spawn->alg);
678
679
spawn = next;
680
}
681
682
inst->alg.cra_flags |= (fips_internal & CRYPTO_ALG_FIPS_INTERNAL);
683
684
larval = __crypto_register_alg(&inst->alg, &algs_to_put);
685
if (IS_ERR(larval))
686
goto unlock;
687
else if (larval)
688
larval->test_started = true;
689
690
hlist_add_head(&inst->list, &tmpl->instances);
691
inst->tmpl = tmpl;
692
693
unlock:
694
up_write(&crypto_alg_sem);
695
696
if (IS_ERR(larval))
697
return PTR_ERR(larval);
698
699
if (larval)
700
crypto_schedule_test(larval);
701
else
702
crypto_remove_final(&algs_to_put);
703
704
return 0;
705
}
706
EXPORT_SYMBOL_GPL(crypto_register_instance);
707
708
void crypto_unregister_instance(struct crypto_instance *inst)
709
{
710
LIST_HEAD(list);
711
712
down_write(&crypto_alg_sem);
713
714
crypto_remove_spawns(&inst->alg, &list, NULL);
715
crypto_remove_instance(inst, &list);
716
717
up_write(&crypto_alg_sem);
718
719
crypto_remove_final(&list);
720
}
721
EXPORT_SYMBOL_GPL(crypto_unregister_instance);
722
723
int crypto_grab_spawn(struct crypto_spawn *spawn, struct crypto_instance *inst,
724
const char *name, u32 type, u32 mask)
725
{
726
struct crypto_alg *alg;
727
int err = -EAGAIN;
728
729
if (WARN_ON_ONCE(inst == NULL))
730
return -EINVAL;
731
732
/* Allow the result of crypto_attr_alg_name() to be passed directly */
733
if (IS_ERR(name))
734
return PTR_ERR(name);
735
736
alg = crypto_find_alg(name, spawn->frontend,
737
type | CRYPTO_ALG_FIPS_INTERNAL, mask);
738
if (IS_ERR(alg))
739
return PTR_ERR(alg);
740
741
down_write(&crypto_alg_sem);
742
if (!crypto_is_moribund(alg)) {
743
list_add(&spawn->list, &alg->cra_users);
744
spawn->alg = alg;
745
spawn->mask = mask;
746
spawn->next = inst->spawns;
747
inst->spawns = spawn;
748
inst->alg.cra_flags |=
749
(alg->cra_flags & CRYPTO_ALG_INHERITED_FLAGS);
750
err = 0;
751
}
752
up_write(&crypto_alg_sem);
753
if (err)
754
crypto_mod_put(alg);
755
return err;
756
}
757
EXPORT_SYMBOL_GPL(crypto_grab_spawn);
758
759
void crypto_drop_spawn(struct crypto_spawn *spawn)
760
{
761
if (!spawn->alg) /* not yet initialized? */
762
return;
763
764
down_write(&crypto_alg_sem);
765
if (!spawn->dead)
766
list_del(&spawn->list);
767
up_write(&crypto_alg_sem);
768
769
if (!spawn->registered)
770
crypto_mod_put(spawn->alg);
771
}
772
EXPORT_SYMBOL_GPL(crypto_drop_spawn);
773
774
static struct crypto_alg *crypto_spawn_alg(struct crypto_spawn *spawn)
775
{
776
struct crypto_alg *alg = ERR_PTR(-EAGAIN);
777
struct crypto_alg *target;
778
bool shoot = false;
779
780
down_read(&crypto_alg_sem);
781
if (!spawn->dead) {
782
alg = spawn->alg;
783
if (!crypto_mod_get(alg)) {
784
target = crypto_alg_get(alg);
785
shoot = true;
786
alg = ERR_PTR(-EAGAIN);
787
}
788
}
789
up_read(&crypto_alg_sem);
790
791
if (shoot) {
792
crypto_shoot_alg(target);
793
crypto_alg_put(target);
794
}
795
796
return alg;
797
}
798
799
struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
800
u32 mask)
801
{
802
struct crypto_alg *alg;
803
struct crypto_tfm *tfm;
804
805
alg = crypto_spawn_alg(spawn);
806
if (IS_ERR(alg))
807
return ERR_CAST(alg);
808
809
tfm = ERR_PTR(-EINVAL);
810
if (unlikely((alg->cra_flags ^ type) & mask))
811
goto out_put_alg;
812
813
tfm = __crypto_alloc_tfm(alg, type, mask);
814
if (IS_ERR(tfm))
815
goto out_put_alg;
816
817
return tfm;
818
819
out_put_alg:
820
crypto_mod_put(alg);
821
return tfm;
822
}
823
EXPORT_SYMBOL_GPL(crypto_spawn_tfm);
824
825
void *crypto_spawn_tfm2(struct crypto_spawn *spawn)
826
{
827
struct crypto_alg *alg;
828
struct crypto_tfm *tfm;
829
830
alg = crypto_spawn_alg(spawn);
831
if (IS_ERR(alg))
832
return ERR_CAST(alg);
833
834
tfm = crypto_create_tfm(alg, spawn->frontend);
835
if (IS_ERR(tfm))
836
goto out_put_alg;
837
838
return tfm;
839
840
out_put_alg:
841
crypto_mod_put(alg);
842
return tfm;
843
}
844
EXPORT_SYMBOL_GPL(crypto_spawn_tfm2);
845
846
int crypto_register_notifier(struct notifier_block *nb)
847
{
848
return blocking_notifier_chain_register(&crypto_chain, nb);
849
}
850
EXPORT_SYMBOL_GPL(crypto_register_notifier);
851
852
int crypto_unregister_notifier(struct notifier_block *nb)
853
{
854
return blocking_notifier_chain_unregister(&crypto_chain, nb);
855
}
856
EXPORT_SYMBOL_GPL(crypto_unregister_notifier);
857
858
struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb)
859
{
860
struct rtattr *rta = tb[0];
861
struct crypto_attr_type *algt;
862
863
if (!rta)
864
return ERR_PTR(-ENOENT);
865
if (RTA_PAYLOAD(rta) < sizeof(*algt))
866
return ERR_PTR(-EINVAL);
867
if (rta->rta_type != CRYPTOA_TYPE)
868
return ERR_PTR(-EINVAL);
869
870
algt = RTA_DATA(rta);
871
872
return algt;
873
}
874
EXPORT_SYMBOL_GPL(crypto_get_attr_type);
875
876
/**
877
* crypto_check_attr_type() - check algorithm type and compute inherited mask
878
* @tb: the template parameters
879
* @type: the algorithm type the template would be instantiated as
880
* @mask_ret: (output) the mask that should be passed to crypto_grab_*()
881
* to restrict the flags of any inner algorithms
882
*
883
* Validate that the algorithm type the user requested is compatible with the
884
* one the template would actually be instantiated as. E.g., if the user is
885
* doing crypto_alloc_shash("cbc(aes)", ...), this would return an error because
886
* the "cbc" template creates an "skcipher" algorithm, not an "shash" algorithm.
887
*
888
* Also compute the mask to use to restrict the flags of any inner algorithms.
889
*
890
* Return: 0 on success; -errno on failure
891
*/
892
int crypto_check_attr_type(struct rtattr **tb, u32 type, u32 *mask_ret)
893
{
894
struct crypto_attr_type *algt;
895
896
algt = crypto_get_attr_type(tb);
897
if (IS_ERR(algt))
898
return PTR_ERR(algt);
899
900
if ((algt->type ^ type) & algt->mask)
901
return -EINVAL;
902
903
*mask_ret = crypto_algt_inherited_mask(algt);
904
return 0;
905
}
906
EXPORT_SYMBOL_GPL(crypto_check_attr_type);
907
908
const char *crypto_attr_alg_name(struct rtattr *rta)
909
{
910
struct crypto_attr_alg *alga;
911
912
if (!rta)
913
return ERR_PTR(-ENOENT);
914
if (RTA_PAYLOAD(rta) < sizeof(*alga))
915
return ERR_PTR(-EINVAL);
916
if (rta->rta_type != CRYPTOA_ALG)
917
return ERR_PTR(-EINVAL);
918
919
alga = RTA_DATA(rta);
920
alga->name[CRYPTO_MAX_ALG_NAME - 1] = 0;
921
922
return alga->name;
923
}
924
EXPORT_SYMBOL_GPL(crypto_attr_alg_name);
925
926
int __crypto_inst_setname(struct crypto_instance *inst, const char *name,
927
const char *driver, struct crypto_alg *alg)
928
{
929
if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", name,
930
alg->cra_name) >= CRYPTO_MAX_ALG_NAME)
931
return -ENAMETOOLONG;
932
933
if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s(%s)",
934
driver, alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
935
return -ENAMETOOLONG;
936
937
return 0;
938
}
939
EXPORT_SYMBOL_GPL(__crypto_inst_setname);
940
941
void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen)
942
{
943
INIT_LIST_HEAD(&queue->list);
944
queue->backlog = &queue->list;
945
queue->qlen = 0;
946
queue->max_qlen = max_qlen;
947
}
948
EXPORT_SYMBOL_GPL(crypto_init_queue);
949
950
int crypto_enqueue_request(struct crypto_queue *queue,
951
struct crypto_async_request *request)
952
{
953
int err = -EINPROGRESS;
954
955
if (unlikely(queue->qlen >= queue->max_qlen)) {
956
if (!(request->flags & CRYPTO_TFM_REQ_MAY_BACKLOG)) {
957
err = -ENOSPC;
958
goto out;
959
}
960
err = -EBUSY;
961
if (queue->backlog == &queue->list)
962
queue->backlog = &request->list;
963
}
964
965
queue->qlen++;
966
list_add_tail(&request->list, &queue->list);
967
968
out:
969
return err;
970
}
971
EXPORT_SYMBOL_GPL(crypto_enqueue_request);
972
973
void crypto_enqueue_request_head(struct crypto_queue *queue,
974
struct crypto_async_request *request)
975
{
976
if (unlikely(queue->qlen >= queue->max_qlen))
977
queue->backlog = queue->backlog->prev;
978
979
queue->qlen++;
980
list_add(&request->list, &queue->list);
981
}
982
EXPORT_SYMBOL_GPL(crypto_enqueue_request_head);
983
984
struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue)
985
{
986
struct list_head *request;
987
988
if (unlikely(!queue->qlen))
989
return NULL;
990
991
queue->qlen--;
992
993
if (queue->backlog != &queue->list)
994
queue->backlog = queue->backlog->next;
995
996
request = queue->list.next;
997
list_del_init(request);
998
999
return list_entry(request, struct crypto_async_request, list);
1000
}
1001
EXPORT_SYMBOL_GPL(crypto_dequeue_request);
1002
1003
static inline void crypto_inc_byte(u8 *a, unsigned int size)
1004
{
1005
u8 *b = (a + size);
1006
u8 c;
1007
1008
for (; size; size--) {
1009
c = *--b + 1;
1010
*b = c;
1011
if (c)
1012
break;
1013
}
1014
}
1015
1016
void crypto_inc(u8 *a, unsigned int size)
1017
{
1018
__be32 *b = (__be32 *)(a + size);
1019
u32 c;
1020
1021
if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) ||
1022
IS_ALIGNED((unsigned long)b, __alignof__(*b)))
1023
for (; size >= 4; size -= 4) {
1024
c = be32_to_cpu(*--b) + 1;
1025
*b = cpu_to_be32(c);
1026
if (likely(c))
1027
return;
1028
}
1029
1030
crypto_inc_byte(a, size);
1031
}
1032
EXPORT_SYMBOL_GPL(crypto_inc);
1033
1034
unsigned int crypto_alg_extsize(struct crypto_alg *alg)
1035
{
1036
return alg->cra_ctxsize +
1037
(alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1));
1038
}
1039
EXPORT_SYMBOL_GPL(crypto_alg_extsize);
1040
1041
int crypto_type_has_alg(const char *name, const struct crypto_type *frontend,
1042
u32 type, u32 mask)
1043
{
1044
int ret = 0;
1045
struct crypto_alg *alg = crypto_find_alg(name, frontend, type, mask);
1046
1047
if (!IS_ERR(alg)) {
1048
crypto_mod_put(alg);
1049
ret = 1;
1050
}
1051
1052
return ret;
1053
}
1054
EXPORT_SYMBOL_GPL(crypto_type_has_alg);
1055
1056
static void __init crypto_start_tests(void)
1057
{
1058
if (!IS_BUILTIN(CONFIG_CRYPTO_ALGAPI))
1059
return;
1060
1061
if (!IS_ENABLED(CONFIG_CRYPTO_SELFTESTS))
1062
return;
1063
1064
set_crypto_boot_test_finished();
1065
1066
for (;;) {
1067
struct crypto_larval *larval = NULL;
1068
struct crypto_alg *q;
1069
1070
down_write(&crypto_alg_sem);
1071
1072
list_for_each_entry(q, &crypto_alg_list, cra_list) {
1073
struct crypto_larval *l;
1074
1075
if (!crypto_is_larval(q))
1076
continue;
1077
1078
l = (void *)q;
1079
1080
if (!crypto_is_test_larval(l))
1081
continue;
1082
1083
if (l->test_started)
1084
continue;
1085
1086
l->test_started = true;
1087
larval = l;
1088
break;
1089
}
1090
1091
up_write(&crypto_alg_sem);
1092
1093
if (!larval)
1094
break;
1095
1096
crypto_schedule_test(larval);
1097
}
1098
}
1099
1100
static int __init crypto_algapi_init(void)
1101
{
1102
crypto_init_proc();
1103
crypto_start_tests();
1104
return 0;
1105
}
1106
1107
static void __exit crypto_algapi_exit(void)
1108
{
1109
crypto_exit_proc();
1110
}
1111
1112
/*
1113
* We run this at late_initcall so that all the built-in algorithms
1114
* have had a chance to register themselves first.
1115
*/
1116
late_initcall(crypto_algapi_init);
1117
module_exit(crypto_algapi_exit);
1118
1119
MODULE_LICENSE("GPL");
1120
MODULE_DESCRIPTION("Cryptographic algorithms API");
1121
MODULE_SOFTDEP("pre: cryptomgr");
1122
1123