Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
awilliam
GitHub Repository: awilliam/linux-vfio
Path: blob/master/crypto/algapi.c
10814 views
1
/*
2
* Cryptographic API for algorithms (i.e., low-level API).
3
*
4
* Copyright (c) 2006 Herbert Xu <[email protected]>
5
*
6
* This program is free software; you can redistribute it and/or modify it
7
* under the terms of the GNU General Public License as published by the Free
8
* Software Foundation; either version 2 of the License, or (at your option)
9
* any later version.
10
*
11
*/
12
13
#include <linux/err.h>
14
#include <linux/errno.h>
15
#include <linux/init.h>
16
#include <linux/kernel.h>
17
#include <linux/list.h>
18
#include <linux/module.h>
19
#include <linux/rtnetlink.h>
20
#include <linux/slab.h>
21
#include <linux/string.h>
22
23
#include "internal.h"
24
25
static void crypto_remove_final(struct list_head *list);
26
27
static LIST_HEAD(crypto_template_list);
28
29
void crypto_larval_error(const char *name, u32 type, u32 mask)
30
{
31
struct crypto_alg *alg;
32
33
alg = crypto_alg_lookup(name, type, mask);
34
35
if (alg) {
36
if (crypto_is_larval(alg)) {
37
struct crypto_larval *larval = (void *)alg;
38
complete_all(&larval->completion);
39
}
40
crypto_mod_put(alg);
41
}
42
}
43
EXPORT_SYMBOL_GPL(crypto_larval_error);
44
45
static inline int crypto_set_driver_name(struct crypto_alg *alg)
46
{
47
static const char suffix[] = "-generic";
48
char *driver_name = alg->cra_driver_name;
49
int len;
50
51
if (*driver_name)
52
return 0;
53
54
len = strlcpy(driver_name, alg->cra_name, CRYPTO_MAX_ALG_NAME);
55
if (len + sizeof(suffix) > CRYPTO_MAX_ALG_NAME)
56
return -ENAMETOOLONG;
57
58
memcpy(driver_name + len, suffix, sizeof(suffix));
59
return 0;
60
}
61
62
static int crypto_check_alg(struct crypto_alg *alg)
63
{
64
if (alg->cra_alignmask & (alg->cra_alignmask + 1))
65
return -EINVAL;
66
67
if (alg->cra_blocksize > PAGE_SIZE / 8)
68
return -EINVAL;
69
70
if (alg->cra_priority < 0)
71
return -EINVAL;
72
73
return crypto_set_driver_name(alg);
74
}
75
76
static void crypto_destroy_instance(struct crypto_alg *alg)
77
{
78
struct crypto_instance *inst = (void *)alg;
79
struct crypto_template *tmpl = inst->tmpl;
80
81
tmpl->free(inst);
82
crypto_tmpl_put(tmpl);
83
}
84
85
static struct list_head *crypto_more_spawns(struct crypto_alg *alg,
86
struct list_head *stack,
87
struct list_head *top,
88
struct list_head *secondary_spawns)
89
{
90
struct crypto_spawn *spawn, *n;
91
92
if (list_empty(stack))
93
return NULL;
94
95
spawn = list_first_entry(stack, struct crypto_spawn, list);
96
n = list_entry(spawn->list.next, struct crypto_spawn, list);
97
98
if (spawn->alg && &n->list != stack && !n->alg)
99
n->alg = (n->list.next == stack) ? alg :
100
&list_entry(n->list.next, struct crypto_spawn,
101
list)->inst->alg;
102
103
list_move(&spawn->list, secondary_spawns);
104
105
return &n->list == stack ? top : &n->inst->alg.cra_users;
106
}
107
108
static void crypto_remove_spawn(struct crypto_spawn *spawn,
109
struct list_head *list)
110
{
111
struct crypto_instance *inst = spawn->inst;
112
struct crypto_template *tmpl = inst->tmpl;
113
114
if (crypto_is_dead(&inst->alg))
115
return;
116
117
inst->alg.cra_flags |= CRYPTO_ALG_DEAD;
118
if (hlist_unhashed(&inst->list))
119
return;
120
121
if (!tmpl || !crypto_tmpl_get(tmpl))
122
return;
123
124
crypto_notify(CRYPTO_MSG_ALG_UNREGISTER, &inst->alg);
125
list_move(&inst->alg.cra_list, list);
126
hlist_del(&inst->list);
127
inst->alg.cra_destroy = crypto_destroy_instance;
128
129
BUG_ON(!list_empty(&inst->alg.cra_users));
130
}
131
132
static void crypto_remove_spawns(struct crypto_alg *alg,
133
struct list_head *list,
134
struct crypto_alg *nalg)
135
{
136
u32 new_type = (nalg ?: alg)->cra_flags;
137
struct crypto_spawn *spawn, *n;
138
LIST_HEAD(secondary_spawns);
139
struct list_head *spawns;
140
LIST_HEAD(stack);
141
LIST_HEAD(top);
142
143
spawns = &alg->cra_users;
144
list_for_each_entry_safe(spawn, n, spawns, list) {
145
if ((spawn->alg->cra_flags ^ new_type) & spawn->mask)
146
continue;
147
148
list_move(&spawn->list, &top);
149
}
150
151
spawns = &top;
152
do {
153
while (!list_empty(spawns)) {
154
struct crypto_instance *inst;
155
156
spawn = list_first_entry(spawns, struct crypto_spawn,
157
list);
158
inst = spawn->inst;
159
160
BUG_ON(&inst->alg == alg);
161
162
list_move(&spawn->list, &stack);
163
164
if (&inst->alg == nalg)
165
break;
166
167
spawn->alg = NULL;
168
spawns = &inst->alg.cra_users;
169
}
170
} while ((spawns = crypto_more_spawns(alg, &stack, &top,
171
&secondary_spawns)));
172
173
list_for_each_entry_safe(spawn, n, &secondary_spawns, list) {
174
if (spawn->alg)
175
list_move(&spawn->list, &spawn->alg->cra_users);
176
else
177
crypto_remove_spawn(spawn, list);
178
}
179
}
180
181
static struct crypto_larval *__crypto_register_alg(struct crypto_alg *alg)
182
{
183
struct crypto_alg *q;
184
struct crypto_larval *larval;
185
int ret = -EAGAIN;
186
187
if (crypto_is_dead(alg))
188
goto err;
189
190
INIT_LIST_HEAD(&alg->cra_users);
191
192
/* No cheating! */
193
alg->cra_flags &= ~CRYPTO_ALG_TESTED;
194
195
ret = -EEXIST;
196
197
atomic_set(&alg->cra_refcnt, 1);
198
list_for_each_entry(q, &crypto_alg_list, cra_list) {
199
if (q == alg)
200
goto err;
201
202
if (crypto_is_moribund(q))
203
continue;
204
205
if (crypto_is_larval(q)) {
206
if (!strcmp(alg->cra_driver_name, q->cra_driver_name))
207
goto err;
208
continue;
209
}
210
211
if (!strcmp(q->cra_driver_name, alg->cra_name) ||
212
!strcmp(q->cra_name, alg->cra_driver_name))
213
goto err;
214
}
215
216
larval = crypto_larval_alloc(alg->cra_name,
217
alg->cra_flags | CRYPTO_ALG_TESTED, 0);
218
if (IS_ERR(larval))
219
goto out;
220
221
ret = -ENOENT;
222
larval->adult = crypto_mod_get(alg);
223
if (!larval->adult)
224
goto free_larval;
225
226
atomic_set(&larval->alg.cra_refcnt, 1);
227
memcpy(larval->alg.cra_driver_name, alg->cra_driver_name,
228
CRYPTO_MAX_ALG_NAME);
229
larval->alg.cra_priority = alg->cra_priority;
230
231
list_add(&alg->cra_list, &crypto_alg_list);
232
list_add(&larval->alg.cra_list, &crypto_alg_list);
233
234
out:
235
return larval;
236
237
free_larval:
238
kfree(larval);
239
err:
240
larval = ERR_PTR(ret);
241
goto out;
242
}
243
244
void crypto_alg_tested(const char *name, int err)
245
{
246
struct crypto_larval *test;
247
struct crypto_alg *alg;
248
struct crypto_alg *q;
249
LIST_HEAD(list);
250
251
down_write(&crypto_alg_sem);
252
list_for_each_entry(q, &crypto_alg_list, cra_list) {
253
if (crypto_is_moribund(q) || !crypto_is_larval(q))
254
continue;
255
256
test = (struct crypto_larval *)q;
257
258
if (!strcmp(q->cra_driver_name, name))
259
goto found;
260
}
261
262
printk(KERN_ERR "alg: Unexpected test result for %s: %d\n", name, err);
263
goto unlock;
264
265
found:
266
q->cra_flags |= CRYPTO_ALG_DEAD;
267
alg = test->adult;
268
if (err || list_empty(&alg->cra_list))
269
goto complete;
270
271
alg->cra_flags |= CRYPTO_ALG_TESTED;
272
273
list_for_each_entry(q, &crypto_alg_list, cra_list) {
274
if (q == alg)
275
continue;
276
277
if (crypto_is_moribund(q))
278
continue;
279
280
if (crypto_is_larval(q)) {
281
struct crypto_larval *larval = (void *)q;
282
283
/*
284
* Check to see if either our generic name or
285
* specific name can satisfy the name requested
286
* by the larval entry q.
287
*/
288
if (strcmp(alg->cra_name, q->cra_name) &&
289
strcmp(alg->cra_driver_name, q->cra_name))
290
continue;
291
292
if (larval->adult)
293
continue;
294
if ((q->cra_flags ^ alg->cra_flags) & larval->mask)
295
continue;
296
if (!crypto_mod_get(alg))
297
continue;
298
299
larval->adult = alg;
300
complete_all(&larval->completion);
301
continue;
302
}
303
304
if (strcmp(alg->cra_name, q->cra_name))
305
continue;
306
307
if (strcmp(alg->cra_driver_name, q->cra_driver_name) &&
308
q->cra_priority > alg->cra_priority)
309
continue;
310
311
crypto_remove_spawns(q, &list, alg);
312
}
313
314
complete:
315
complete_all(&test->completion);
316
317
unlock:
318
up_write(&crypto_alg_sem);
319
320
crypto_remove_final(&list);
321
}
322
EXPORT_SYMBOL_GPL(crypto_alg_tested);
323
324
static void crypto_remove_final(struct list_head *list)
325
{
326
struct crypto_alg *alg;
327
struct crypto_alg *n;
328
329
list_for_each_entry_safe(alg, n, list, cra_list) {
330
list_del_init(&alg->cra_list);
331
crypto_alg_put(alg);
332
}
333
}
334
335
static void crypto_wait_for_test(struct crypto_larval *larval)
336
{
337
int err;
338
339
err = crypto_probing_notify(CRYPTO_MSG_ALG_REGISTER, larval->adult);
340
if (err != NOTIFY_STOP) {
341
if (WARN_ON(err != NOTIFY_DONE))
342
goto out;
343
crypto_alg_tested(larval->alg.cra_driver_name, 0);
344
}
345
346
err = wait_for_completion_interruptible(&larval->completion);
347
WARN_ON(err);
348
349
out:
350
crypto_larval_kill(&larval->alg);
351
}
352
353
int crypto_register_alg(struct crypto_alg *alg)
354
{
355
struct crypto_larval *larval;
356
int err;
357
358
err = crypto_check_alg(alg);
359
if (err)
360
return err;
361
362
down_write(&crypto_alg_sem);
363
larval = __crypto_register_alg(alg);
364
up_write(&crypto_alg_sem);
365
366
if (IS_ERR(larval))
367
return PTR_ERR(larval);
368
369
crypto_wait_for_test(larval);
370
return 0;
371
}
372
EXPORT_SYMBOL_GPL(crypto_register_alg);
373
374
static int crypto_remove_alg(struct crypto_alg *alg, struct list_head *list)
375
{
376
if (unlikely(list_empty(&alg->cra_list)))
377
return -ENOENT;
378
379
alg->cra_flags |= CRYPTO_ALG_DEAD;
380
381
crypto_notify(CRYPTO_MSG_ALG_UNREGISTER, alg);
382
list_del_init(&alg->cra_list);
383
crypto_remove_spawns(alg, list, NULL);
384
385
return 0;
386
}
387
388
int crypto_unregister_alg(struct crypto_alg *alg)
389
{
390
int ret;
391
LIST_HEAD(list);
392
393
down_write(&crypto_alg_sem);
394
ret = crypto_remove_alg(alg, &list);
395
up_write(&crypto_alg_sem);
396
397
if (ret)
398
return ret;
399
400
BUG_ON(atomic_read(&alg->cra_refcnt) != 1);
401
if (alg->cra_destroy)
402
alg->cra_destroy(alg);
403
404
crypto_remove_final(&list);
405
return 0;
406
}
407
EXPORT_SYMBOL_GPL(crypto_unregister_alg);
408
409
int crypto_register_template(struct crypto_template *tmpl)
410
{
411
struct crypto_template *q;
412
int err = -EEXIST;
413
414
down_write(&crypto_alg_sem);
415
416
list_for_each_entry(q, &crypto_template_list, list) {
417
if (q == tmpl)
418
goto out;
419
}
420
421
list_add(&tmpl->list, &crypto_template_list);
422
crypto_notify(CRYPTO_MSG_TMPL_REGISTER, tmpl);
423
err = 0;
424
out:
425
up_write(&crypto_alg_sem);
426
return err;
427
}
428
EXPORT_SYMBOL_GPL(crypto_register_template);
429
430
void crypto_unregister_template(struct crypto_template *tmpl)
431
{
432
struct crypto_instance *inst;
433
struct hlist_node *p, *n;
434
struct hlist_head *list;
435
LIST_HEAD(users);
436
437
down_write(&crypto_alg_sem);
438
439
BUG_ON(list_empty(&tmpl->list));
440
list_del_init(&tmpl->list);
441
442
list = &tmpl->instances;
443
hlist_for_each_entry(inst, p, list, list) {
444
int err = crypto_remove_alg(&inst->alg, &users);
445
BUG_ON(err);
446
}
447
448
crypto_notify(CRYPTO_MSG_TMPL_UNREGISTER, tmpl);
449
450
up_write(&crypto_alg_sem);
451
452
hlist_for_each_entry_safe(inst, p, n, list, list) {
453
BUG_ON(atomic_read(&inst->alg.cra_refcnt) != 1);
454
tmpl->free(inst);
455
}
456
crypto_remove_final(&users);
457
}
458
EXPORT_SYMBOL_GPL(crypto_unregister_template);
459
460
static struct crypto_template *__crypto_lookup_template(const char *name)
461
{
462
struct crypto_template *q, *tmpl = NULL;
463
464
down_read(&crypto_alg_sem);
465
list_for_each_entry(q, &crypto_template_list, list) {
466
if (strcmp(q->name, name))
467
continue;
468
if (unlikely(!crypto_tmpl_get(q)))
469
continue;
470
471
tmpl = q;
472
break;
473
}
474
up_read(&crypto_alg_sem);
475
476
return tmpl;
477
}
478
479
struct crypto_template *crypto_lookup_template(const char *name)
480
{
481
return try_then_request_module(__crypto_lookup_template(name), name);
482
}
483
EXPORT_SYMBOL_GPL(crypto_lookup_template);
484
485
int crypto_register_instance(struct crypto_template *tmpl,
486
struct crypto_instance *inst)
487
{
488
struct crypto_larval *larval;
489
int err;
490
491
err = crypto_check_alg(&inst->alg);
492
if (err)
493
goto err;
494
495
inst->alg.cra_module = tmpl->module;
496
497
down_write(&crypto_alg_sem);
498
499
larval = __crypto_register_alg(&inst->alg);
500
if (IS_ERR(larval))
501
goto unlock;
502
503
hlist_add_head(&inst->list, &tmpl->instances);
504
inst->tmpl = tmpl;
505
506
unlock:
507
up_write(&crypto_alg_sem);
508
509
err = PTR_ERR(larval);
510
if (IS_ERR(larval))
511
goto err;
512
513
crypto_wait_for_test(larval);
514
err = 0;
515
516
err:
517
return err;
518
}
519
EXPORT_SYMBOL_GPL(crypto_register_instance);
520
521
int crypto_init_spawn(struct crypto_spawn *spawn, struct crypto_alg *alg,
522
struct crypto_instance *inst, u32 mask)
523
{
524
int err = -EAGAIN;
525
526
spawn->inst = inst;
527
spawn->mask = mask;
528
529
down_write(&crypto_alg_sem);
530
if (!crypto_is_moribund(alg)) {
531
list_add(&spawn->list, &alg->cra_users);
532
spawn->alg = alg;
533
err = 0;
534
}
535
up_write(&crypto_alg_sem);
536
537
return err;
538
}
539
EXPORT_SYMBOL_GPL(crypto_init_spawn);
540
541
int crypto_init_spawn2(struct crypto_spawn *spawn, struct crypto_alg *alg,
542
struct crypto_instance *inst,
543
const struct crypto_type *frontend)
544
{
545
int err = -EINVAL;
546
547
if ((alg->cra_flags ^ frontend->type) & frontend->maskset)
548
goto out;
549
550
spawn->frontend = frontend;
551
err = crypto_init_spawn(spawn, alg, inst, frontend->maskset);
552
553
out:
554
return err;
555
}
556
EXPORT_SYMBOL_GPL(crypto_init_spawn2);
557
558
void crypto_drop_spawn(struct crypto_spawn *spawn)
559
{
560
if (!spawn->alg)
561
return;
562
563
down_write(&crypto_alg_sem);
564
list_del(&spawn->list);
565
up_write(&crypto_alg_sem);
566
}
567
EXPORT_SYMBOL_GPL(crypto_drop_spawn);
568
569
static struct crypto_alg *crypto_spawn_alg(struct crypto_spawn *spawn)
570
{
571
struct crypto_alg *alg;
572
struct crypto_alg *alg2;
573
574
down_read(&crypto_alg_sem);
575
alg = spawn->alg;
576
alg2 = alg;
577
if (alg2)
578
alg2 = crypto_mod_get(alg2);
579
up_read(&crypto_alg_sem);
580
581
if (!alg2) {
582
if (alg)
583
crypto_shoot_alg(alg);
584
return ERR_PTR(-EAGAIN);
585
}
586
587
return alg;
588
}
589
590
struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
591
u32 mask)
592
{
593
struct crypto_alg *alg;
594
struct crypto_tfm *tfm;
595
596
alg = crypto_spawn_alg(spawn);
597
if (IS_ERR(alg))
598
return ERR_CAST(alg);
599
600
tfm = ERR_PTR(-EINVAL);
601
if (unlikely((alg->cra_flags ^ type) & mask))
602
goto out_put_alg;
603
604
tfm = __crypto_alloc_tfm(alg, type, mask);
605
if (IS_ERR(tfm))
606
goto out_put_alg;
607
608
return tfm;
609
610
out_put_alg:
611
crypto_mod_put(alg);
612
return tfm;
613
}
614
EXPORT_SYMBOL_GPL(crypto_spawn_tfm);
615
616
void *crypto_spawn_tfm2(struct crypto_spawn *spawn)
617
{
618
struct crypto_alg *alg;
619
struct crypto_tfm *tfm;
620
621
alg = crypto_spawn_alg(spawn);
622
if (IS_ERR(alg))
623
return ERR_CAST(alg);
624
625
tfm = crypto_create_tfm(alg, spawn->frontend);
626
if (IS_ERR(tfm))
627
goto out_put_alg;
628
629
return tfm;
630
631
out_put_alg:
632
crypto_mod_put(alg);
633
return tfm;
634
}
635
EXPORT_SYMBOL_GPL(crypto_spawn_tfm2);
636
637
int crypto_register_notifier(struct notifier_block *nb)
638
{
639
return blocking_notifier_chain_register(&crypto_chain, nb);
640
}
641
EXPORT_SYMBOL_GPL(crypto_register_notifier);
642
643
int crypto_unregister_notifier(struct notifier_block *nb)
644
{
645
return blocking_notifier_chain_unregister(&crypto_chain, nb);
646
}
647
EXPORT_SYMBOL_GPL(crypto_unregister_notifier);
648
649
struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb)
650
{
651
struct rtattr *rta = tb[0];
652
struct crypto_attr_type *algt;
653
654
if (!rta)
655
return ERR_PTR(-ENOENT);
656
if (RTA_PAYLOAD(rta) < sizeof(*algt))
657
return ERR_PTR(-EINVAL);
658
if (rta->rta_type != CRYPTOA_TYPE)
659
return ERR_PTR(-EINVAL);
660
661
algt = RTA_DATA(rta);
662
663
return algt;
664
}
665
EXPORT_SYMBOL_GPL(crypto_get_attr_type);
666
667
int crypto_check_attr_type(struct rtattr **tb, u32 type)
668
{
669
struct crypto_attr_type *algt;
670
671
algt = crypto_get_attr_type(tb);
672
if (IS_ERR(algt))
673
return PTR_ERR(algt);
674
675
if ((algt->type ^ type) & algt->mask)
676
return -EINVAL;
677
678
return 0;
679
}
680
EXPORT_SYMBOL_GPL(crypto_check_attr_type);
681
682
const char *crypto_attr_alg_name(struct rtattr *rta)
683
{
684
struct crypto_attr_alg *alga;
685
686
if (!rta)
687
return ERR_PTR(-ENOENT);
688
if (RTA_PAYLOAD(rta) < sizeof(*alga))
689
return ERR_PTR(-EINVAL);
690
if (rta->rta_type != CRYPTOA_ALG)
691
return ERR_PTR(-EINVAL);
692
693
alga = RTA_DATA(rta);
694
alga->name[CRYPTO_MAX_ALG_NAME - 1] = 0;
695
696
return alga->name;
697
}
698
EXPORT_SYMBOL_GPL(crypto_attr_alg_name);
699
700
struct crypto_alg *crypto_attr_alg2(struct rtattr *rta,
701
const struct crypto_type *frontend,
702
u32 type, u32 mask)
703
{
704
const char *name;
705
int err;
706
707
name = crypto_attr_alg_name(rta);
708
err = PTR_ERR(name);
709
if (IS_ERR(name))
710
return ERR_PTR(err);
711
712
return crypto_find_alg(name, frontend, type, mask);
713
}
714
EXPORT_SYMBOL_GPL(crypto_attr_alg2);
715
716
int crypto_attr_u32(struct rtattr *rta, u32 *num)
717
{
718
struct crypto_attr_u32 *nu32;
719
720
if (!rta)
721
return -ENOENT;
722
if (RTA_PAYLOAD(rta) < sizeof(*nu32))
723
return -EINVAL;
724
if (rta->rta_type != CRYPTOA_U32)
725
return -EINVAL;
726
727
nu32 = RTA_DATA(rta);
728
*num = nu32->num;
729
730
return 0;
731
}
732
EXPORT_SYMBOL_GPL(crypto_attr_u32);
733
734
void *crypto_alloc_instance2(const char *name, struct crypto_alg *alg,
735
unsigned int head)
736
{
737
struct crypto_instance *inst;
738
char *p;
739
int err;
740
741
p = kzalloc(head + sizeof(*inst) + sizeof(struct crypto_spawn),
742
GFP_KERNEL);
743
if (!p)
744
return ERR_PTR(-ENOMEM);
745
746
inst = (void *)(p + head);
747
748
err = -ENAMETOOLONG;
749
if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", name,
750
alg->cra_name) >= CRYPTO_MAX_ALG_NAME)
751
goto err_free_inst;
752
753
if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s(%s)",
754
name, alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
755
goto err_free_inst;
756
757
return p;
758
759
err_free_inst:
760
kfree(p);
761
return ERR_PTR(err);
762
}
763
EXPORT_SYMBOL_GPL(crypto_alloc_instance2);
764
765
struct crypto_instance *crypto_alloc_instance(const char *name,
766
struct crypto_alg *alg)
767
{
768
struct crypto_instance *inst;
769
struct crypto_spawn *spawn;
770
int err;
771
772
inst = crypto_alloc_instance2(name, alg, 0);
773
if (IS_ERR(inst))
774
goto out;
775
776
spawn = crypto_instance_ctx(inst);
777
err = crypto_init_spawn(spawn, alg, inst,
778
CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC);
779
780
if (err)
781
goto err_free_inst;
782
783
return inst;
784
785
err_free_inst:
786
kfree(inst);
787
inst = ERR_PTR(err);
788
789
out:
790
return inst;
791
}
792
EXPORT_SYMBOL_GPL(crypto_alloc_instance);
793
794
void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen)
795
{
796
INIT_LIST_HEAD(&queue->list);
797
queue->backlog = &queue->list;
798
queue->qlen = 0;
799
queue->max_qlen = max_qlen;
800
}
801
EXPORT_SYMBOL_GPL(crypto_init_queue);
802
803
int crypto_enqueue_request(struct crypto_queue *queue,
804
struct crypto_async_request *request)
805
{
806
int err = -EINPROGRESS;
807
808
if (unlikely(queue->qlen >= queue->max_qlen)) {
809
err = -EBUSY;
810
if (!(request->flags & CRYPTO_TFM_REQ_MAY_BACKLOG))
811
goto out;
812
if (queue->backlog == &queue->list)
813
queue->backlog = &request->list;
814
}
815
816
queue->qlen++;
817
list_add_tail(&request->list, &queue->list);
818
819
out:
820
return err;
821
}
822
EXPORT_SYMBOL_GPL(crypto_enqueue_request);
823
824
void *__crypto_dequeue_request(struct crypto_queue *queue, unsigned int offset)
825
{
826
struct list_head *request;
827
828
if (unlikely(!queue->qlen))
829
return NULL;
830
831
queue->qlen--;
832
833
if (queue->backlog != &queue->list)
834
queue->backlog = queue->backlog->next;
835
836
request = queue->list.next;
837
list_del(request);
838
839
return (char *)list_entry(request, struct crypto_async_request, list) -
840
offset;
841
}
842
EXPORT_SYMBOL_GPL(__crypto_dequeue_request);
843
844
struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue)
845
{
846
return __crypto_dequeue_request(queue, 0);
847
}
848
EXPORT_SYMBOL_GPL(crypto_dequeue_request);
849
850
int crypto_tfm_in_queue(struct crypto_queue *queue, struct crypto_tfm *tfm)
851
{
852
struct crypto_async_request *req;
853
854
list_for_each_entry(req, &queue->list, list) {
855
if (req->tfm == tfm)
856
return 1;
857
}
858
859
return 0;
860
}
861
EXPORT_SYMBOL_GPL(crypto_tfm_in_queue);
862
863
static inline void crypto_inc_byte(u8 *a, unsigned int size)
864
{
865
u8 *b = (a + size);
866
u8 c;
867
868
for (; size; size--) {
869
c = *--b + 1;
870
*b = c;
871
if (c)
872
break;
873
}
874
}
875
876
void crypto_inc(u8 *a, unsigned int size)
877
{
878
__be32 *b = (__be32 *)(a + size);
879
u32 c;
880
881
for (; size >= 4; size -= 4) {
882
c = be32_to_cpu(*--b) + 1;
883
*b = cpu_to_be32(c);
884
if (c)
885
return;
886
}
887
888
crypto_inc_byte(a, size);
889
}
890
EXPORT_SYMBOL_GPL(crypto_inc);
891
892
static inline void crypto_xor_byte(u8 *a, const u8 *b, unsigned int size)
893
{
894
for (; size; size--)
895
*a++ ^= *b++;
896
}
897
898
void crypto_xor(u8 *dst, const u8 *src, unsigned int size)
899
{
900
u32 *a = (u32 *)dst;
901
u32 *b = (u32 *)src;
902
903
for (; size >= 4; size -= 4)
904
*a++ ^= *b++;
905
906
crypto_xor_byte((u8 *)a, (u8 *)b, size);
907
}
908
EXPORT_SYMBOL_GPL(crypto_xor);
909
910
static int __init crypto_algapi_init(void)
911
{
912
crypto_init_proc();
913
return 0;
914
}
915
916
static void __exit crypto_algapi_exit(void)
917
{
918
crypto_exit_proc();
919
}
920
921
module_init(crypto_algapi_init);
922
module_exit(crypto_algapi_exit);
923
924
MODULE_LICENSE("GPL");
925
MODULE_DESCRIPTION("Cryptographic algorithms API");
926
927