Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
awilliam
GitHub Repository: awilliam/linux-vfio
Path: blob/master/arch/s390/crypto/des_s390.c
10817 views
1
/*
2
* Cryptographic API.
3
*
4
* s390 implementation of the DES Cipher Algorithm.
5
*
6
* Copyright IBM Corp. 2003,2011
7
* Author(s): Thomas Spatzier
8
* Jan Glauber ([email protected])
9
*
10
* This program is free software; you can redistribute it and/or modify
11
* it under the terms of the GNU General Public License as published by
12
* the Free Software Foundation; either version 2 of the License, or
13
* (at your option) any later version.
14
*
15
*/
16
17
#include <linux/init.h>
18
#include <linux/module.h>
19
#include <linux/crypto.h>
20
#include <crypto/algapi.h>
21
#include <crypto/des.h>
22
23
#include "crypt_s390.h"
24
25
#define DES3_KEY_SIZE (3 * DES_KEY_SIZE)
26
27
static u8 *ctrblk;
28
29
struct s390_des_ctx {
30
u8 iv[DES_BLOCK_SIZE];
31
u8 key[DES3_KEY_SIZE];
32
};
33
34
static int des_setkey(struct crypto_tfm *tfm, const u8 *key,
35
unsigned int key_len)
36
{
37
struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm);
38
u32 *flags = &tfm->crt_flags;
39
u32 tmp[DES_EXPKEY_WORDS];
40
41
/* check for weak keys */
42
if (!des_ekey(tmp, key) && (*flags & CRYPTO_TFM_REQ_WEAK_KEY)) {
43
*flags |= CRYPTO_TFM_RES_WEAK_KEY;
44
return -EINVAL;
45
}
46
47
memcpy(ctx->key, key, key_len);
48
return 0;
49
}
50
51
static void des_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
52
{
53
struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm);
54
55
crypt_s390_km(KM_DEA_ENCRYPT, ctx->key, out, in, DES_BLOCK_SIZE);
56
}
57
58
static void des_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
59
{
60
struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm);
61
62
crypt_s390_km(KM_DEA_DECRYPT, ctx->key, out, in, DES_BLOCK_SIZE);
63
}
64
65
static struct crypto_alg des_alg = {
66
.cra_name = "des",
67
.cra_driver_name = "des-s390",
68
.cra_priority = CRYPT_S390_PRIORITY,
69
.cra_flags = CRYPTO_ALG_TYPE_CIPHER,
70
.cra_blocksize = DES_BLOCK_SIZE,
71
.cra_ctxsize = sizeof(struct s390_des_ctx),
72
.cra_module = THIS_MODULE,
73
.cra_list = LIST_HEAD_INIT(des_alg.cra_list),
74
.cra_u = {
75
.cipher = {
76
.cia_min_keysize = DES_KEY_SIZE,
77
.cia_max_keysize = DES_KEY_SIZE,
78
.cia_setkey = des_setkey,
79
.cia_encrypt = des_encrypt,
80
.cia_decrypt = des_decrypt,
81
}
82
}
83
};
84
85
static int ecb_desall_crypt(struct blkcipher_desc *desc, long func,
86
u8 *key, struct blkcipher_walk *walk)
87
{
88
int ret = blkcipher_walk_virt(desc, walk);
89
unsigned int nbytes;
90
91
while ((nbytes = walk->nbytes)) {
92
/* only use complete blocks */
93
unsigned int n = nbytes & ~(DES_BLOCK_SIZE - 1);
94
u8 *out = walk->dst.virt.addr;
95
u8 *in = walk->src.virt.addr;
96
97
ret = crypt_s390_km(func, key, out, in, n);
98
BUG_ON((ret < 0) || (ret != n));
99
100
nbytes &= DES_BLOCK_SIZE - 1;
101
ret = blkcipher_walk_done(desc, walk, nbytes);
102
}
103
104
return ret;
105
}
106
107
static int cbc_desall_crypt(struct blkcipher_desc *desc, long func,
108
u8 *iv, struct blkcipher_walk *walk)
109
{
110
int ret = blkcipher_walk_virt(desc, walk);
111
unsigned int nbytes = walk->nbytes;
112
113
if (!nbytes)
114
goto out;
115
116
memcpy(iv, walk->iv, DES_BLOCK_SIZE);
117
do {
118
/* only use complete blocks */
119
unsigned int n = nbytes & ~(DES_BLOCK_SIZE - 1);
120
u8 *out = walk->dst.virt.addr;
121
u8 *in = walk->src.virt.addr;
122
123
ret = crypt_s390_kmc(func, iv, out, in, n);
124
BUG_ON((ret < 0) || (ret != n));
125
126
nbytes &= DES_BLOCK_SIZE - 1;
127
ret = blkcipher_walk_done(desc, walk, nbytes);
128
} while ((nbytes = walk->nbytes));
129
memcpy(walk->iv, iv, DES_BLOCK_SIZE);
130
131
out:
132
return ret;
133
}
134
135
static int ecb_des_encrypt(struct blkcipher_desc *desc,
136
struct scatterlist *dst, struct scatterlist *src,
137
unsigned int nbytes)
138
{
139
struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
140
struct blkcipher_walk walk;
141
142
blkcipher_walk_init(&walk, dst, src, nbytes);
143
return ecb_desall_crypt(desc, KM_DEA_ENCRYPT, ctx->key, &walk);
144
}
145
146
static int ecb_des_decrypt(struct blkcipher_desc *desc,
147
struct scatterlist *dst, struct scatterlist *src,
148
unsigned int nbytes)
149
{
150
struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
151
struct blkcipher_walk walk;
152
153
blkcipher_walk_init(&walk, dst, src, nbytes);
154
return ecb_desall_crypt(desc, KM_DEA_DECRYPT, ctx->key, &walk);
155
}
156
157
static struct crypto_alg ecb_des_alg = {
158
.cra_name = "ecb(des)",
159
.cra_driver_name = "ecb-des-s390",
160
.cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
161
.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
162
.cra_blocksize = DES_BLOCK_SIZE,
163
.cra_ctxsize = sizeof(struct s390_des_ctx),
164
.cra_type = &crypto_blkcipher_type,
165
.cra_module = THIS_MODULE,
166
.cra_list = LIST_HEAD_INIT(ecb_des_alg.cra_list),
167
.cra_u = {
168
.blkcipher = {
169
.min_keysize = DES_KEY_SIZE,
170
.max_keysize = DES_KEY_SIZE,
171
.setkey = des_setkey,
172
.encrypt = ecb_des_encrypt,
173
.decrypt = ecb_des_decrypt,
174
}
175
}
176
};
177
178
static int cbc_des_encrypt(struct blkcipher_desc *desc,
179
struct scatterlist *dst, struct scatterlist *src,
180
unsigned int nbytes)
181
{
182
struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
183
struct blkcipher_walk walk;
184
185
blkcipher_walk_init(&walk, dst, src, nbytes);
186
return cbc_desall_crypt(desc, KMC_DEA_ENCRYPT, ctx->iv, &walk);
187
}
188
189
static int cbc_des_decrypt(struct blkcipher_desc *desc,
190
struct scatterlist *dst, struct scatterlist *src,
191
unsigned int nbytes)
192
{
193
struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
194
struct blkcipher_walk walk;
195
196
blkcipher_walk_init(&walk, dst, src, nbytes);
197
return cbc_desall_crypt(desc, KMC_DEA_DECRYPT, ctx->iv, &walk);
198
}
199
200
static struct crypto_alg cbc_des_alg = {
201
.cra_name = "cbc(des)",
202
.cra_driver_name = "cbc-des-s390",
203
.cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
204
.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
205
.cra_blocksize = DES_BLOCK_SIZE,
206
.cra_ctxsize = sizeof(struct s390_des_ctx),
207
.cra_type = &crypto_blkcipher_type,
208
.cra_module = THIS_MODULE,
209
.cra_list = LIST_HEAD_INIT(cbc_des_alg.cra_list),
210
.cra_u = {
211
.blkcipher = {
212
.min_keysize = DES_KEY_SIZE,
213
.max_keysize = DES_KEY_SIZE,
214
.ivsize = DES_BLOCK_SIZE,
215
.setkey = des_setkey,
216
.encrypt = cbc_des_encrypt,
217
.decrypt = cbc_des_decrypt,
218
}
219
}
220
};
221
222
/*
223
* RFC2451:
224
*
225
* For DES-EDE3, there is no known need to reject weak or
226
* complementation keys. Any weakness is obviated by the use of
227
* multiple keys.
228
*
229
* However, if the first two or last two independent 64-bit keys are
230
* equal (k1 == k2 or k2 == k3), then the DES3 operation is simply the
231
* same as DES. Implementers MUST reject keys that exhibit this
232
* property.
233
*
234
*/
235
static int des3_setkey(struct crypto_tfm *tfm, const u8 *key,
236
unsigned int key_len)
237
{
238
struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm);
239
u32 *flags = &tfm->crt_flags;
240
241
if (!(memcmp(key, &key[DES_KEY_SIZE], DES_KEY_SIZE) &&
242
memcmp(&key[DES_KEY_SIZE], &key[DES_KEY_SIZE * 2],
243
DES_KEY_SIZE)) &&
244
(*flags & CRYPTO_TFM_REQ_WEAK_KEY)) {
245
*flags |= CRYPTO_TFM_RES_WEAK_KEY;
246
return -EINVAL;
247
}
248
memcpy(ctx->key, key, key_len);
249
return 0;
250
}
251
252
static void des3_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
253
{
254
struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm);
255
256
crypt_s390_km(KM_TDEA_192_ENCRYPT, ctx->key, dst, src, DES_BLOCK_SIZE);
257
}
258
259
static void des3_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
260
{
261
struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm);
262
263
crypt_s390_km(KM_TDEA_192_DECRYPT, ctx->key, dst, src, DES_BLOCK_SIZE);
264
}
265
266
static struct crypto_alg des3_alg = {
267
.cra_name = "des3_ede",
268
.cra_driver_name = "des3_ede-s390",
269
.cra_priority = CRYPT_S390_PRIORITY,
270
.cra_flags = CRYPTO_ALG_TYPE_CIPHER,
271
.cra_blocksize = DES_BLOCK_SIZE,
272
.cra_ctxsize = sizeof(struct s390_des_ctx),
273
.cra_module = THIS_MODULE,
274
.cra_list = LIST_HEAD_INIT(des3_alg.cra_list),
275
.cra_u = {
276
.cipher = {
277
.cia_min_keysize = DES3_KEY_SIZE,
278
.cia_max_keysize = DES3_KEY_SIZE,
279
.cia_setkey = des3_setkey,
280
.cia_encrypt = des3_encrypt,
281
.cia_decrypt = des3_decrypt,
282
}
283
}
284
};
285
286
static int ecb_des3_encrypt(struct blkcipher_desc *desc,
287
struct scatterlist *dst, struct scatterlist *src,
288
unsigned int nbytes)
289
{
290
struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
291
struct blkcipher_walk walk;
292
293
blkcipher_walk_init(&walk, dst, src, nbytes);
294
return ecb_desall_crypt(desc, KM_TDEA_192_ENCRYPT, ctx->key, &walk);
295
}
296
297
static int ecb_des3_decrypt(struct blkcipher_desc *desc,
298
struct scatterlist *dst, struct scatterlist *src,
299
unsigned int nbytes)
300
{
301
struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
302
struct blkcipher_walk walk;
303
304
blkcipher_walk_init(&walk, dst, src, nbytes);
305
return ecb_desall_crypt(desc, KM_TDEA_192_DECRYPT, ctx->key, &walk);
306
}
307
308
static struct crypto_alg ecb_des3_alg = {
309
.cra_name = "ecb(des3_ede)",
310
.cra_driver_name = "ecb-des3_ede-s390",
311
.cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
312
.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
313
.cra_blocksize = DES_BLOCK_SIZE,
314
.cra_ctxsize = sizeof(struct s390_des_ctx),
315
.cra_type = &crypto_blkcipher_type,
316
.cra_module = THIS_MODULE,
317
.cra_list = LIST_HEAD_INIT(
318
ecb_des3_alg.cra_list),
319
.cra_u = {
320
.blkcipher = {
321
.min_keysize = DES3_KEY_SIZE,
322
.max_keysize = DES3_KEY_SIZE,
323
.setkey = des3_setkey,
324
.encrypt = ecb_des3_encrypt,
325
.decrypt = ecb_des3_decrypt,
326
}
327
}
328
};
329
330
static int cbc_des3_encrypt(struct blkcipher_desc *desc,
331
struct scatterlist *dst, struct scatterlist *src,
332
unsigned int nbytes)
333
{
334
struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
335
struct blkcipher_walk walk;
336
337
blkcipher_walk_init(&walk, dst, src, nbytes);
338
return cbc_desall_crypt(desc, KMC_TDEA_192_ENCRYPT, ctx->iv, &walk);
339
}
340
341
static int cbc_des3_decrypt(struct blkcipher_desc *desc,
342
struct scatterlist *dst, struct scatterlist *src,
343
unsigned int nbytes)
344
{
345
struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
346
struct blkcipher_walk walk;
347
348
blkcipher_walk_init(&walk, dst, src, nbytes);
349
return cbc_desall_crypt(desc, KMC_TDEA_192_DECRYPT, ctx->iv, &walk);
350
}
351
352
static struct crypto_alg cbc_des3_alg = {
353
.cra_name = "cbc(des3_ede)",
354
.cra_driver_name = "cbc-des3_ede-s390",
355
.cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
356
.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
357
.cra_blocksize = DES_BLOCK_SIZE,
358
.cra_ctxsize = sizeof(struct s390_des_ctx),
359
.cra_type = &crypto_blkcipher_type,
360
.cra_module = THIS_MODULE,
361
.cra_list = LIST_HEAD_INIT(
362
cbc_des3_alg.cra_list),
363
.cra_u = {
364
.blkcipher = {
365
.min_keysize = DES3_KEY_SIZE,
366
.max_keysize = DES3_KEY_SIZE,
367
.ivsize = DES_BLOCK_SIZE,
368
.setkey = des3_setkey,
369
.encrypt = cbc_des3_encrypt,
370
.decrypt = cbc_des3_decrypt,
371
}
372
}
373
};
374
375
static int ctr_desall_crypt(struct blkcipher_desc *desc, long func,
376
struct s390_des_ctx *ctx, struct blkcipher_walk *walk)
377
{
378
int ret = blkcipher_walk_virt_block(desc, walk, DES_BLOCK_SIZE);
379
unsigned int i, n, nbytes;
380
u8 buf[DES_BLOCK_SIZE];
381
u8 *out, *in;
382
383
memcpy(ctrblk, walk->iv, DES_BLOCK_SIZE);
384
while ((nbytes = walk->nbytes) >= DES_BLOCK_SIZE) {
385
out = walk->dst.virt.addr;
386
in = walk->src.virt.addr;
387
while (nbytes >= DES_BLOCK_SIZE) {
388
/* align to block size, max. PAGE_SIZE */
389
n = (nbytes > PAGE_SIZE) ? PAGE_SIZE :
390
nbytes & ~(DES_BLOCK_SIZE - 1);
391
for (i = DES_BLOCK_SIZE; i < n; i += DES_BLOCK_SIZE) {
392
memcpy(ctrblk + i, ctrblk + i - DES_BLOCK_SIZE,
393
DES_BLOCK_SIZE);
394
crypto_inc(ctrblk + i, DES_BLOCK_SIZE);
395
}
396
ret = crypt_s390_kmctr(func, ctx->key, out, in, n, ctrblk);
397
BUG_ON((ret < 0) || (ret != n));
398
if (n > DES_BLOCK_SIZE)
399
memcpy(ctrblk, ctrblk + n - DES_BLOCK_SIZE,
400
DES_BLOCK_SIZE);
401
crypto_inc(ctrblk, DES_BLOCK_SIZE);
402
out += n;
403
in += n;
404
nbytes -= n;
405
}
406
ret = blkcipher_walk_done(desc, walk, nbytes);
407
}
408
409
/* final block may be < DES_BLOCK_SIZE, copy only nbytes */
410
if (nbytes) {
411
out = walk->dst.virt.addr;
412
in = walk->src.virt.addr;
413
ret = crypt_s390_kmctr(func, ctx->key, buf, in,
414
DES_BLOCK_SIZE, ctrblk);
415
BUG_ON(ret < 0 || ret != DES_BLOCK_SIZE);
416
memcpy(out, buf, nbytes);
417
crypto_inc(ctrblk, DES_BLOCK_SIZE);
418
ret = blkcipher_walk_done(desc, walk, 0);
419
}
420
memcpy(walk->iv, ctrblk, DES_BLOCK_SIZE);
421
return ret;
422
}
423
424
static int ctr_des_encrypt(struct blkcipher_desc *desc,
425
struct scatterlist *dst, struct scatterlist *src,
426
unsigned int nbytes)
427
{
428
struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
429
struct blkcipher_walk walk;
430
431
blkcipher_walk_init(&walk, dst, src, nbytes);
432
return ctr_desall_crypt(desc, KMCTR_DEA_ENCRYPT, ctx, &walk);
433
}
434
435
static int ctr_des_decrypt(struct blkcipher_desc *desc,
436
struct scatterlist *dst, struct scatterlist *src,
437
unsigned int nbytes)
438
{
439
struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
440
struct blkcipher_walk walk;
441
442
blkcipher_walk_init(&walk, dst, src, nbytes);
443
return ctr_desall_crypt(desc, KMCTR_DEA_DECRYPT, ctx, &walk);
444
}
445
446
static struct crypto_alg ctr_des_alg = {
447
.cra_name = "ctr(des)",
448
.cra_driver_name = "ctr-des-s390",
449
.cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
450
.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
451
.cra_blocksize = 1,
452
.cra_ctxsize = sizeof(struct s390_des_ctx),
453
.cra_type = &crypto_blkcipher_type,
454
.cra_module = THIS_MODULE,
455
.cra_list = LIST_HEAD_INIT(ctr_des_alg.cra_list),
456
.cra_u = {
457
.blkcipher = {
458
.min_keysize = DES_KEY_SIZE,
459
.max_keysize = DES_KEY_SIZE,
460
.ivsize = DES_BLOCK_SIZE,
461
.setkey = des_setkey,
462
.encrypt = ctr_des_encrypt,
463
.decrypt = ctr_des_decrypt,
464
}
465
}
466
};
467
468
static int ctr_des3_encrypt(struct blkcipher_desc *desc,
469
struct scatterlist *dst, struct scatterlist *src,
470
unsigned int nbytes)
471
{
472
struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
473
struct blkcipher_walk walk;
474
475
blkcipher_walk_init(&walk, dst, src, nbytes);
476
return ctr_desall_crypt(desc, KMCTR_TDEA_192_ENCRYPT, ctx, &walk);
477
}
478
479
static int ctr_des3_decrypt(struct blkcipher_desc *desc,
480
struct scatterlist *dst, struct scatterlist *src,
481
unsigned int nbytes)
482
{
483
struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
484
struct blkcipher_walk walk;
485
486
blkcipher_walk_init(&walk, dst, src, nbytes);
487
return ctr_desall_crypt(desc, KMCTR_TDEA_192_DECRYPT, ctx, &walk);
488
}
489
490
static struct crypto_alg ctr_des3_alg = {
491
.cra_name = "ctr(des3_ede)",
492
.cra_driver_name = "ctr-des3_ede-s390",
493
.cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
494
.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
495
.cra_blocksize = 1,
496
.cra_ctxsize = sizeof(struct s390_des_ctx),
497
.cra_type = &crypto_blkcipher_type,
498
.cra_module = THIS_MODULE,
499
.cra_list = LIST_HEAD_INIT(ctr_des3_alg.cra_list),
500
.cra_u = {
501
.blkcipher = {
502
.min_keysize = DES3_KEY_SIZE,
503
.max_keysize = DES3_KEY_SIZE,
504
.ivsize = DES_BLOCK_SIZE,
505
.setkey = des3_setkey,
506
.encrypt = ctr_des3_encrypt,
507
.decrypt = ctr_des3_decrypt,
508
}
509
}
510
};
511
512
static int __init des_s390_init(void)
513
{
514
int ret;
515
516
if (!crypt_s390_func_available(KM_DEA_ENCRYPT, CRYPT_S390_MSA) ||
517
!crypt_s390_func_available(KM_TDEA_192_ENCRYPT, CRYPT_S390_MSA))
518
return -EOPNOTSUPP;
519
520
ret = crypto_register_alg(&des_alg);
521
if (ret)
522
goto des_err;
523
ret = crypto_register_alg(&ecb_des_alg);
524
if (ret)
525
goto ecb_des_err;
526
ret = crypto_register_alg(&cbc_des_alg);
527
if (ret)
528
goto cbc_des_err;
529
ret = crypto_register_alg(&des3_alg);
530
if (ret)
531
goto des3_err;
532
ret = crypto_register_alg(&ecb_des3_alg);
533
if (ret)
534
goto ecb_des3_err;
535
ret = crypto_register_alg(&cbc_des3_alg);
536
if (ret)
537
goto cbc_des3_err;
538
539
if (crypt_s390_func_available(KMCTR_DEA_ENCRYPT,
540
CRYPT_S390_MSA | CRYPT_S390_MSA4) &&
541
crypt_s390_func_available(KMCTR_TDEA_192_ENCRYPT,
542
CRYPT_S390_MSA | CRYPT_S390_MSA4)) {
543
ret = crypto_register_alg(&ctr_des_alg);
544
if (ret)
545
goto ctr_des_err;
546
ret = crypto_register_alg(&ctr_des3_alg);
547
if (ret)
548
goto ctr_des3_err;
549
ctrblk = (u8 *) __get_free_page(GFP_KERNEL);
550
if (!ctrblk) {
551
ret = -ENOMEM;
552
goto ctr_mem_err;
553
}
554
}
555
out:
556
return ret;
557
558
ctr_mem_err:
559
crypto_unregister_alg(&ctr_des3_alg);
560
ctr_des3_err:
561
crypto_unregister_alg(&ctr_des_alg);
562
ctr_des_err:
563
crypto_unregister_alg(&cbc_des3_alg);
564
cbc_des3_err:
565
crypto_unregister_alg(&ecb_des3_alg);
566
ecb_des3_err:
567
crypto_unregister_alg(&des3_alg);
568
des3_err:
569
crypto_unregister_alg(&cbc_des_alg);
570
cbc_des_err:
571
crypto_unregister_alg(&ecb_des_alg);
572
ecb_des_err:
573
crypto_unregister_alg(&des_alg);
574
des_err:
575
goto out;
576
}
577
578
static void __exit des_s390_exit(void)
579
{
580
if (ctrblk) {
581
crypto_unregister_alg(&ctr_des_alg);
582
crypto_unregister_alg(&ctr_des3_alg);
583
free_page((unsigned long) ctrblk);
584
}
585
crypto_unregister_alg(&cbc_des3_alg);
586
crypto_unregister_alg(&ecb_des3_alg);
587
crypto_unregister_alg(&des3_alg);
588
crypto_unregister_alg(&cbc_des_alg);
589
crypto_unregister_alg(&ecb_des_alg);
590
crypto_unregister_alg(&des_alg);
591
}
592
593
module_init(des_s390_init);
594
module_exit(des_s390_exit);
595
596
MODULE_ALIAS("des");
597
MODULE_ALIAS("des3_ede");
598
599
MODULE_LICENSE("GPL");
600
MODULE_DESCRIPTION("DES & Triple DES EDE Cipher Algorithms");
601
602