Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
torvalds
GitHub Repository: torvalds/linux
Path: blob/master/drivers/crypto/allwinner/sun8i-ce/sun8i-ce-core.c
26296 views
1
// SPDX-License-Identifier: GPL-2.0
2
/*
3
* sun8i-ce-core.c - hardware cryptographic offloader for
4
* Allwinner H3/A64/H5/H2+/H6/R40 SoC
5
*
6
* Copyright (C) 2015-2019 Corentin Labbe <[email protected]>
7
*
8
* Core file which registers crypto algorithms supported by the CryptoEngine.
9
*
10
* You could find a link for the datasheet in Documentation/arch/arm/sunxi.rst
11
*/
12
13
#include <crypto/engine.h>
14
#include <crypto/internal/hash.h>
15
#include <crypto/internal/rng.h>
16
#include <crypto/internal/skcipher.h>
17
#include <linux/clk.h>
18
#include <linux/delay.h>
19
#include <linux/dma-mapping.h>
20
#include <linux/err.h>
21
#include <linux/interrupt.h>
22
#include <linux/io.h>
23
#include <linux/irq.h>
24
#include <linux/kernel.h>
25
#include <linux/module.h>
26
#include <linux/of.h>
27
#include <linux/platform_device.h>
28
#include <linux/pm_runtime.h>
29
#include <linux/reset.h>
30
31
#include "sun8i-ce.h"
32
33
/*
34
* mod clock is lower on H3 than other SoC due to some DMA timeout occurring
35
* with high value.
36
* If you want to tune mod clock, loading driver and passing selftest is
37
* insufficient, you need to test with some LUKS test (mount and write to it)
38
*/
39
static const struct ce_variant ce_h3_variant = {
40
.alg_cipher = { CE_ALG_AES, CE_ALG_DES, CE_ALG_3DES,
41
},
42
.alg_hash = { CE_ALG_MD5, CE_ALG_SHA1, CE_ALG_SHA224, CE_ALG_SHA256,
43
CE_ALG_SHA384, CE_ALG_SHA512
44
},
45
.op_mode = { CE_OP_ECB, CE_OP_CBC
46
},
47
.ce_clks = {
48
{ "bus", 0, 200000000 },
49
{ "mod", 50000000, 0 },
50
},
51
.esr = ESR_H3,
52
.prng = CE_ALG_PRNG,
53
.trng = CE_ID_NOTSUPP,
54
};
55
56
static const struct ce_variant ce_h5_variant = {
57
.alg_cipher = { CE_ALG_AES, CE_ALG_DES, CE_ALG_3DES,
58
},
59
.alg_hash = { CE_ALG_MD5, CE_ALG_SHA1, CE_ALG_SHA224, CE_ALG_SHA256,
60
CE_ID_NOTSUPP, CE_ID_NOTSUPP
61
},
62
.op_mode = { CE_OP_ECB, CE_OP_CBC
63
},
64
.ce_clks = {
65
{ "bus", 0, 200000000 },
66
{ "mod", 300000000, 0 },
67
},
68
.esr = ESR_H5,
69
.prng = CE_ALG_PRNG,
70
.trng = CE_ID_NOTSUPP,
71
};
72
73
static const struct ce_variant ce_h6_variant = {
74
.alg_cipher = { CE_ALG_AES, CE_ALG_DES, CE_ALG_3DES,
75
},
76
.alg_hash = { CE_ALG_MD5, CE_ALG_SHA1, CE_ALG_SHA224, CE_ALG_SHA256,
77
CE_ALG_SHA384, CE_ALG_SHA512
78
},
79
.op_mode = { CE_OP_ECB, CE_OP_CBC
80
},
81
.cipher_t_dlen_in_bytes = true,
82
.hash_t_dlen_in_bits = true,
83
.prng_t_dlen_in_bytes = true,
84
.trng_t_dlen_in_bytes = true,
85
.ce_clks = {
86
{ "bus", 0, 200000000 },
87
{ "mod", 300000000, 0 },
88
{ "ram", 0, 400000000 },
89
},
90
.esr = ESR_H6,
91
.prng = CE_ALG_PRNG_V2,
92
.trng = CE_ALG_TRNG_V2,
93
};
94
95
static const struct ce_variant ce_h616_variant = {
96
.alg_cipher = { CE_ALG_AES, CE_ALG_DES, CE_ALG_3DES,
97
},
98
.alg_hash = { CE_ALG_MD5, CE_ALG_SHA1, CE_ALG_SHA224, CE_ALG_SHA256,
99
CE_ALG_SHA384, CE_ALG_SHA512
100
},
101
.op_mode = { CE_OP_ECB, CE_OP_CBC
102
},
103
.cipher_t_dlen_in_bytes = true,
104
.hash_t_dlen_in_bits = true,
105
.prng_t_dlen_in_bytes = true,
106
.trng_t_dlen_in_bytes = true,
107
.needs_word_addresses = true,
108
.ce_clks = {
109
{ "bus", 0, 200000000 },
110
{ "mod", 300000000, 0 },
111
{ "ram", 0, 400000000 },
112
{ "trng", 0, 0 },
113
},
114
.esr = ESR_H6,
115
.prng = CE_ALG_PRNG_V2,
116
.trng = CE_ALG_TRNG_V2,
117
};
118
119
static const struct ce_variant ce_a64_variant = {
120
.alg_cipher = { CE_ALG_AES, CE_ALG_DES, CE_ALG_3DES,
121
},
122
.alg_hash = { CE_ALG_MD5, CE_ALG_SHA1, CE_ALG_SHA224, CE_ALG_SHA256,
123
CE_ID_NOTSUPP, CE_ID_NOTSUPP
124
},
125
.op_mode = { CE_OP_ECB, CE_OP_CBC
126
},
127
.ce_clks = {
128
{ "bus", 0, 200000000 },
129
{ "mod", 300000000, 0 },
130
},
131
.esr = ESR_A64,
132
.prng = CE_ALG_PRNG,
133
.trng = CE_ID_NOTSUPP,
134
};
135
136
static const struct ce_variant ce_d1_variant = {
137
.alg_cipher = { CE_ALG_AES, CE_ALG_DES, CE_ALG_3DES,
138
},
139
.alg_hash = { CE_ALG_MD5, CE_ALG_SHA1, CE_ALG_SHA224, CE_ALG_SHA256,
140
CE_ALG_SHA384, CE_ALG_SHA512
141
},
142
.op_mode = { CE_OP_ECB, CE_OP_CBC
143
},
144
.ce_clks = {
145
{ "bus", 0, 200000000 },
146
{ "mod", 300000000, 0 },
147
{ "ram", 0, 400000000 },
148
{ "trng", 0, 0 },
149
},
150
.esr = ESR_D1,
151
.prng = CE_ALG_PRNG,
152
.trng = CE_ALG_TRNG,
153
};
154
155
static const struct ce_variant ce_r40_variant = {
156
.alg_cipher = { CE_ALG_AES, CE_ALG_DES, CE_ALG_3DES,
157
},
158
.alg_hash = { CE_ALG_MD5, CE_ALG_SHA1, CE_ALG_SHA224, CE_ALG_SHA256,
159
CE_ID_NOTSUPP, CE_ID_NOTSUPP
160
},
161
.op_mode = { CE_OP_ECB, CE_OP_CBC
162
},
163
.ce_clks = {
164
{ "bus", 0, 200000000 },
165
{ "mod", 300000000, 0 },
166
},
167
.esr = ESR_R40,
168
.prng = CE_ALG_PRNG,
169
.trng = CE_ID_NOTSUPP,
170
};
171
172
/*
173
* sun8i_ce_get_engine_number() get the next channel slot
174
* This is a simple round-robin way of getting the next channel
175
* The flow 3 is reserve for xRNG operations
176
*/
177
int sun8i_ce_get_engine_number(struct sun8i_ce_dev *ce)
178
{
179
return atomic_inc_return(&ce->flow) % (MAXFLOW - 1);
180
}
181
182
int sun8i_ce_run_task(struct sun8i_ce_dev *ce, int flow, const char *name)
183
{
184
u32 v;
185
int err = 0;
186
struct ce_task *cet = ce->chanlist[flow].tl;
187
188
#ifdef CONFIG_CRYPTO_DEV_SUN8I_CE_DEBUG
189
ce->chanlist[flow].stat_req++;
190
#endif
191
192
mutex_lock(&ce->mlock);
193
194
v = readl(ce->base + CE_ICR);
195
v |= 1 << flow;
196
writel(v, ce->base + CE_ICR);
197
198
reinit_completion(&ce->chanlist[flow].complete);
199
writel(desc_addr_val(ce, ce->chanlist[flow].t_phy), ce->base + CE_TDQ);
200
201
ce->chanlist[flow].status = 0;
202
/* Be sure all data is written before enabling the task */
203
wmb();
204
205
/* Only H6 needs to write a part of t_common_ctl along with "1", but since it is ignored
206
* on older SoCs, we have no reason to complicate things.
207
*/
208
v = 1 | ((le32_to_cpu(ce->chanlist[flow].tl->t_common_ctl) & 0x7F) << 8);
209
writel(v, ce->base + CE_TLR);
210
mutex_unlock(&ce->mlock);
211
212
wait_for_completion_interruptible_timeout(&ce->chanlist[flow].complete,
213
msecs_to_jiffies(ce->chanlist[flow].timeout));
214
215
if (ce->chanlist[flow].status == 0) {
216
dev_err(ce->dev, "DMA timeout for %s (tm=%d) on flow %d\n", name,
217
ce->chanlist[flow].timeout, flow);
218
err = -EFAULT;
219
}
220
/* No need to lock for this read, the channel is locked so
221
* nothing could modify the error value for this channel
222
*/
223
v = readl(ce->base + CE_ESR);
224
switch (ce->variant->esr) {
225
case ESR_H3:
226
/* Sadly, the error bit is not per flow */
227
if (v) {
228
dev_err(ce->dev, "CE ERROR: %x for flow %x\n", v, flow);
229
err = -EFAULT;
230
print_hex_dump(KERN_INFO, "TASK: ", DUMP_PREFIX_NONE, 16, 4,
231
cet, sizeof(struct ce_task), false);
232
}
233
if (v & CE_ERR_ALGO_NOTSUP)
234
dev_err(ce->dev, "CE ERROR: algorithm not supported\n");
235
if (v & CE_ERR_DATALEN)
236
dev_err(ce->dev, "CE ERROR: data length error\n");
237
if (v & CE_ERR_KEYSRAM)
238
dev_err(ce->dev, "CE ERROR: keysram access error for AES\n");
239
break;
240
case ESR_A64:
241
case ESR_D1:
242
case ESR_H5:
243
case ESR_R40:
244
v >>= (flow * 4);
245
v &= 0xF;
246
if (v) {
247
dev_err(ce->dev, "CE ERROR: %x for flow %x\n", v, flow);
248
err = -EFAULT;
249
print_hex_dump(KERN_INFO, "TASK: ", DUMP_PREFIX_NONE, 16, 4,
250
cet, sizeof(struct ce_task), false);
251
}
252
if (v & CE_ERR_ALGO_NOTSUP)
253
dev_err(ce->dev, "CE ERROR: algorithm not supported\n");
254
if (v & CE_ERR_DATALEN)
255
dev_err(ce->dev, "CE ERROR: data length error\n");
256
if (v & CE_ERR_KEYSRAM)
257
dev_err(ce->dev, "CE ERROR: keysram access error for AES\n");
258
break;
259
case ESR_H6:
260
v >>= (flow * 8);
261
v &= 0xFF;
262
if (v) {
263
dev_err(ce->dev, "CE ERROR: %x for flow %x\n", v, flow);
264
err = -EFAULT;
265
print_hex_dump(KERN_INFO, "TASK: ", DUMP_PREFIX_NONE, 16, 4,
266
cet, sizeof(struct ce_task), false);
267
}
268
if (v & CE_ERR_ALGO_NOTSUP)
269
dev_err(ce->dev, "CE ERROR: algorithm not supported\n");
270
if (v & CE_ERR_DATALEN)
271
dev_err(ce->dev, "CE ERROR: data length error\n");
272
if (v & CE_ERR_KEYSRAM)
273
dev_err(ce->dev, "CE ERROR: keysram access error for AES\n");
274
if (v & CE_ERR_ADDR_INVALID)
275
dev_err(ce->dev, "CE ERROR: address invalid\n");
276
if (v & CE_ERR_KEYLADDER)
277
dev_err(ce->dev, "CE ERROR: key ladder configuration error\n");
278
break;
279
}
280
281
return err;
282
}
283
284
static irqreturn_t ce_irq_handler(int irq, void *data)
285
{
286
struct sun8i_ce_dev *ce = (struct sun8i_ce_dev *)data;
287
int flow = 0;
288
u32 p;
289
290
p = readl(ce->base + CE_ISR);
291
for (flow = 0; flow < MAXFLOW; flow++) {
292
if (p & (BIT(flow))) {
293
writel(BIT(flow), ce->base + CE_ISR);
294
ce->chanlist[flow].status = 1;
295
complete(&ce->chanlist[flow].complete);
296
}
297
}
298
299
return IRQ_HANDLED;
300
}
301
302
static struct sun8i_ce_alg_template ce_algs[] = {
303
{
304
.type = CRYPTO_ALG_TYPE_SKCIPHER,
305
.ce_algo_id = CE_ID_CIPHER_AES,
306
.ce_blockmode = CE_ID_OP_CBC,
307
.alg.skcipher.base = {
308
.base = {
309
.cra_name = "cbc(aes)",
310
.cra_driver_name = "cbc-aes-sun8i-ce",
311
.cra_priority = 400,
312
.cra_blocksize = AES_BLOCK_SIZE,
313
.cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
314
CRYPTO_ALG_ASYNC |
315
CRYPTO_ALG_NEED_FALLBACK,
316
.cra_ctxsize = sizeof(struct sun8i_cipher_tfm_ctx),
317
.cra_module = THIS_MODULE,
318
.cra_alignmask = 0xf,
319
.cra_init = sun8i_ce_cipher_init,
320
.cra_exit = sun8i_ce_cipher_exit,
321
},
322
.min_keysize = AES_MIN_KEY_SIZE,
323
.max_keysize = AES_MAX_KEY_SIZE,
324
.ivsize = AES_BLOCK_SIZE,
325
.setkey = sun8i_ce_aes_setkey,
326
.encrypt = sun8i_ce_skencrypt,
327
.decrypt = sun8i_ce_skdecrypt,
328
},
329
.alg.skcipher.op = {
330
.do_one_request = sun8i_ce_cipher_do_one,
331
},
332
},
333
{
334
.type = CRYPTO_ALG_TYPE_SKCIPHER,
335
.ce_algo_id = CE_ID_CIPHER_AES,
336
.ce_blockmode = CE_ID_OP_ECB,
337
.alg.skcipher.base = {
338
.base = {
339
.cra_name = "ecb(aes)",
340
.cra_driver_name = "ecb-aes-sun8i-ce",
341
.cra_priority = 400,
342
.cra_blocksize = AES_BLOCK_SIZE,
343
.cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
344
CRYPTO_ALG_ASYNC |
345
CRYPTO_ALG_NEED_FALLBACK,
346
.cra_ctxsize = sizeof(struct sun8i_cipher_tfm_ctx),
347
.cra_module = THIS_MODULE,
348
.cra_alignmask = 0xf,
349
.cra_init = sun8i_ce_cipher_init,
350
.cra_exit = sun8i_ce_cipher_exit,
351
},
352
.min_keysize = AES_MIN_KEY_SIZE,
353
.max_keysize = AES_MAX_KEY_SIZE,
354
.setkey = sun8i_ce_aes_setkey,
355
.encrypt = sun8i_ce_skencrypt,
356
.decrypt = sun8i_ce_skdecrypt,
357
},
358
.alg.skcipher.op = {
359
.do_one_request = sun8i_ce_cipher_do_one,
360
},
361
},
362
{
363
.type = CRYPTO_ALG_TYPE_SKCIPHER,
364
.ce_algo_id = CE_ID_CIPHER_DES3,
365
.ce_blockmode = CE_ID_OP_CBC,
366
.alg.skcipher.base = {
367
.base = {
368
.cra_name = "cbc(des3_ede)",
369
.cra_driver_name = "cbc-des3-sun8i-ce",
370
.cra_priority = 400,
371
.cra_blocksize = DES3_EDE_BLOCK_SIZE,
372
.cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
373
CRYPTO_ALG_ASYNC |
374
CRYPTO_ALG_NEED_FALLBACK,
375
.cra_ctxsize = sizeof(struct sun8i_cipher_tfm_ctx),
376
.cra_module = THIS_MODULE,
377
.cra_alignmask = 0xf,
378
.cra_init = sun8i_ce_cipher_init,
379
.cra_exit = sun8i_ce_cipher_exit,
380
},
381
.min_keysize = DES3_EDE_KEY_SIZE,
382
.max_keysize = DES3_EDE_KEY_SIZE,
383
.ivsize = DES3_EDE_BLOCK_SIZE,
384
.setkey = sun8i_ce_des3_setkey,
385
.encrypt = sun8i_ce_skencrypt,
386
.decrypt = sun8i_ce_skdecrypt,
387
},
388
.alg.skcipher.op = {
389
.do_one_request = sun8i_ce_cipher_do_one,
390
},
391
},
392
{
393
.type = CRYPTO_ALG_TYPE_SKCIPHER,
394
.ce_algo_id = CE_ID_CIPHER_DES3,
395
.ce_blockmode = CE_ID_OP_ECB,
396
.alg.skcipher.base = {
397
.base = {
398
.cra_name = "ecb(des3_ede)",
399
.cra_driver_name = "ecb-des3-sun8i-ce",
400
.cra_priority = 400,
401
.cra_blocksize = DES3_EDE_BLOCK_SIZE,
402
.cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
403
CRYPTO_ALG_ASYNC |
404
CRYPTO_ALG_NEED_FALLBACK,
405
.cra_ctxsize = sizeof(struct sun8i_cipher_tfm_ctx),
406
.cra_module = THIS_MODULE,
407
.cra_alignmask = 0xf,
408
.cra_init = sun8i_ce_cipher_init,
409
.cra_exit = sun8i_ce_cipher_exit,
410
},
411
.min_keysize = DES3_EDE_KEY_SIZE,
412
.max_keysize = DES3_EDE_KEY_SIZE,
413
.setkey = sun8i_ce_des3_setkey,
414
.encrypt = sun8i_ce_skencrypt,
415
.decrypt = sun8i_ce_skdecrypt,
416
},
417
.alg.skcipher.op = {
418
.do_one_request = sun8i_ce_cipher_do_one,
419
},
420
},
421
#ifdef CONFIG_CRYPTO_DEV_SUN8I_CE_HASH
422
{ .type = CRYPTO_ALG_TYPE_AHASH,
423
.ce_algo_id = CE_ID_HASH_MD5,
424
.alg.hash.base = {
425
.init = sun8i_ce_hash_init,
426
.update = sun8i_ce_hash_update,
427
.final = sun8i_ce_hash_final,
428
.finup = sun8i_ce_hash_finup,
429
.digest = sun8i_ce_hash_digest,
430
.export = sun8i_ce_hash_export,
431
.import = sun8i_ce_hash_import,
432
.init_tfm = sun8i_ce_hash_init_tfm,
433
.exit_tfm = sun8i_ce_hash_exit_tfm,
434
.halg = {
435
.digestsize = MD5_DIGEST_SIZE,
436
.statesize = sizeof(struct md5_state),
437
.base = {
438
.cra_name = "md5",
439
.cra_driver_name = "md5-sun8i-ce",
440
.cra_priority = 300,
441
.cra_flags = CRYPTO_ALG_TYPE_AHASH |
442
CRYPTO_ALG_ASYNC |
443
CRYPTO_ALG_NEED_FALLBACK,
444
.cra_blocksize = MD5_HMAC_BLOCK_SIZE,
445
.cra_ctxsize = sizeof(struct sun8i_ce_hash_tfm_ctx),
446
.cra_module = THIS_MODULE,
447
}
448
}
449
},
450
.alg.hash.op = {
451
.do_one_request = sun8i_ce_hash_run,
452
},
453
454
},
455
{ .type = CRYPTO_ALG_TYPE_AHASH,
456
.ce_algo_id = CE_ID_HASH_SHA1,
457
.alg.hash.base = {
458
.init = sun8i_ce_hash_init,
459
.update = sun8i_ce_hash_update,
460
.final = sun8i_ce_hash_final,
461
.finup = sun8i_ce_hash_finup,
462
.digest = sun8i_ce_hash_digest,
463
.export = sun8i_ce_hash_export,
464
.import = sun8i_ce_hash_import,
465
.init_tfm = sun8i_ce_hash_init_tfm,
466
.exit_tfm = sun8i_ce_hash_exit_tfm,
467
.halg = {
468
.digestsize = SHA1_DIGEST_SIZE,
469
.statesize = sizeof(struct sha1_state),
470
.base = {
471
.cra_name = "sha1",
472
.cra_driver_name = "sha1-sun8i-ce",
473
.cra_priority = 300,
474
.cra_flags = CRYPTO_ALG_TYPE_AHASH |
475
CRYPTO_ALG_ASYNC |
476
CRYPTO_ALG_NEED_FALLBACK,
477
.cra_blocksize = SHA1_BLOCK_SIZE,
478
.cra_ctxsize = sizeof(struct sun8i_ce_hash_tfm_ctx),
479
.cra_module = THIS_MODULE,
480
}
481
}
482
},
483
.alg.hash.op = {
484
.do_one_request = sun8i_ce_hash_run,
485
},
486
},
487
{ .type = CRYPTO_ALG_TYPE_AHASH,
488
.ce_algo_id = CE_ID_HASH_SHA224,
489
.alg.hash.base = {
490
.init = sun8i_ce_hash_init,
491
.update = sun8i_ce_hash_update,
492
.final = sun8i_ce_hash_final,
493
.finup = sun8i_ce_hash_finup,
494
.digest = sun8i_ce_hash_digest,
495
.export = sun8i_ce_hash_export,
496
.import = sun8i_ce_hash_import,
497
.init_tfm = sun8i_ce_hash_init_tfm,
498
.exit_tfm = sun8i_ce_hash_exit_tfm,
499
.halg = {
500
.digestsize = SHA224_DIGEST_SIZE,
501
.statesize = sizeof(struct sha256_state),
502
.base = {
503
.cra_name = "sha224",
504
.cra_driver_name = "sha224-sun8i-ce",
505
.cra_priority = 300,
506
.cra_flags = CRYPTO_ALG_TYPE_AHASH |
507
CRYPTO_ALG_ASYNC |
508
CRYPTO_ALG_NEED_FALLBACK,
509
.cra_blocksize = SHA224_BLOCK_SIZE,
510
.cra_ctxsize = sizeof(struct sun8i_ce_hash_tfm_ctx),
511
.cra_module = THIS_MODULE,
512
}
513
}
514
},
515
.alg.hash.op = {
516
.do_one_request = sun8i_ce_hash_run,
517
},
518
},
519
{ .type = CRYPTO_ALG_TYPE_AHASH,
520
.ce_algo_id = CE_ID_HASH_SHA256,
521
.alg.hash.base = {
522
.init = sun8i_ce_hash_init,
523
.update = sun8i_ce_hash_update,
524
.final = sun8i_ce_hash_final,
525
.finup = sun8i_ce_hash_finup,
526
.digest = sun8i_ce_hash_digest,
527
.export = sun8i_ce_hash_export,
528
.import = sun8i_ce_hash_import,
529
.init_tfm = sun8i_ce_hash_init_tfm,
530
.exit_tfm = sun8i_ce_hash_exit_tfm,
531
.halg = {
532
.digestsize = SHA256_DIGEST_SIZE,
533
.statesize = sizeof(struct sha256_state),
534
.base = {
535
.cra_name = "sha256",
536
.cra_driver_name = "sha256-sun8i-ce",
537
.cra_priority = 300,
538
.cra_flags = CRYPTO_ALG_TYPE_AHASH |
539
CRYPTO_ALG_ASYNC |
540
CRYPTO_ALG_NEED_FALLBACK,
541
.cra_blocksize = SHA256_BLOCK_SIZE,
542
.cra_ctxsize = sizeof(struct sun8i_ce_hash_tfm_ctx),
543
.cra_module = THIS_MODULE,
544
}
545
}
546
},
547
.alg.hash.op = {
548
.do_one_request = sun8i_ce_hash_run,
549
},
550
},
551
{ .type = CRYPTO_ALG_TYPE_AHASH,
552
.ce_algo_id = CE_ID_HASH_SHA384,
553
.alg.hash.base = {
554
.init = sun8i_ce_hash_init,
555
.update = sun8i_ce_hash_update,
556
.final = sun8i_ce_hash_final,
557
.finup = sun8i_ce_hash_finup,
558
.digest = sun8i_ce_hash_digest,
559
.export = sun8i_ce_hash_export,
560
.import = sun8i_ce_hash_import,
561
.init_tfm = sun8i_ce_hash_init_tfm,
562
.exit_tfm = sun8i_ce_hash_exit_tfm,
563
.halg = {
564
.digestsize = SHA384_DIGEST_SIZE,
565
.statesize = sizeof(struct sha512_state),
566
.base = {
567
.cra_name = "sha384",
568
.cra_driver_name = "sha384-sun8i-ce",
569
.cra_priority = 300,
570
.cra_flags = CRYPTO_ALG_TYPE_AHASH |
571
CRYPTO_ALG_ASYNC |
572
CRYPTO_ALG_NEED_FALLBACK,
573
.cra_blocksize = SHA384_BLOCK_SIZE,
574
.cra_ctxsize = sizeof(struct sun8i_ce_hash_tfm_ctx),
575
.cra_module = THIS_MODULE,
576
}
577
}
578
},
579
.alg.hash.op = {
580
.do_one_request = sun8i_ce_hash_run,
581
},
582
},
583
{ .type = CRYPTO_ALG_TYPE_AHASH,
584
.ce_algo_id = CE_ID_HASH_SHA512,
585
.alg.hash.base = {
586
.init = sun8i_ce_hash_init,
587
.update = sun8i_ce_hash_update,
588
.final = sun8i_ce_hash_final,
589
.finup = sun8i_ce_hash_finup,
590
.digest = sun8i_ce_hash_digest,
591
.export = sun8i_ce_hash_export,
592
.import = sun8i_ce_hash_import,
593
.init_tfm = sun8i_ce_hash_init_tfm,
594
.exit_tfm = sun8i_ce_hash_exit_tfm,
595
.halg = {
596
.digestsize = SHA512_DIGEST_SIZE,
597
.statesize = sizeof(struct sha512_state),
598
.base = {
599
.cra_name = "sha512",
600
.cra_driver_name = "sha512-sun8i-ce",
601
.cra_priority = 300,
602
.cra_flags = CRYPTO_ALG_TYPE_AHASH |
603
CRYPTO_ALG_ASYNC |
604
CRYPTO_ALG_NEED_FALLBACK,
605
.cra_blocksize = SHA512_BLOCK_SIZE,
606
.cra_ctxsize = sizeof(struct sun8i_ce_hash_tfm_ctx),
607
.cra_module = THIS_MODULE,
608
}
609
}
610
},
611
.alg.hash.op = {
612
.do_one_request = sun8i_ce_hash_run,
613
},
614
},
615
#endif
616
#ifdef CONFIG_CRYPTO_DEV_SUN8I_CE_PRNG
617
{
618
.type = CRYPTO_ALG_TYPE_RNG,
619
.alg.rng = {
620
.base = {
621
.cra_name = "stdrng",
622
.cra_driver_name = "sun8i-ce-prng",
623
.cra_priority = 300,
624
.cra_ctxsize = sizeof(struct sun8i_ce_rng_tfm_ctx),
625
.cra_module = THIS_MODULE,
626
.cra_init = sun8i_ce_prng_init,
627
.cra_exit = sun8i_ce_prng_exit,
628
},
629
.generate = sun8i_ce_prng_generate,
630
.seed = sun8i_ce_prng_seed,
631
.seedsize = PRNG_SEED_SIZE,
632
}
633
},
634
#endif
635
};
636
637
static int sun8i_ce_debugfs_show(struct seq_file *seq, void *v)
638
{
639
struct sun8i_ce_dev *ce __maybe_unused = seq->private;
640
unsigned int i;
641
642
for (i = 0; i < MAXFLOW; i++)
643
seq_printf(seq, "Channel %d: nreq %lu\n", i,
644
#ifdef CONFIG_CRYPTO_DEV_SUN8I_CE_DEBUG
645
ce->chanlist[i].stat_req);
646
#else
647
0ul);
648
#endif
649
650
for (i = 0; i < ARRAY_SIZE(ce_algs); i++) {
651
if (!ce_algs[i].ce)
652
continue;
653
switch (ce_algs[i].type) {
654
case CRYPTO_ALG_TYPE_SKCIPHER:
655
seq_printf(seq, "%s %s reqs=%lu fallback=%lu\n",
656
ce_algs[i].alg.skcipher.base.base.cra_driver_name,
657
ce_algs[i].alg.skcipher.base.base.cra_name,
658
ce_algs[i].stat_req, ce_algs[i].stat_fb);
659
seq_printf(seq, "\tLast fallback is: %s\n",
660
ce_algs[i].fbname);
661
seq_printf(seq, "\tFallback due to 0 length: %lu\n",
662
ce_algs[i].stat_fb_len0);
663
seq_printf(seq, "\tFallback due to length !mod16: %lu\n",
664
ce_algs[i].stat_fb_mod16);
665
seq_printf(seq, "\tFallback due to length < IV: %lu\n",
666
ce_algs[i].stat_fb_leniv);
667
seq_printf(seq, "\tFallback due to source alignment: %lu\n",
668
ce_algs[i].stat_fb_srcali);
669
seq_printf(seq, "\tFallback due to dest alignment: %lu\n",
670
ce_algs[i].stat_fb_dstali);
671
seq_printf(seq, "\tFallback due to source length: %lu\n",
672
ce_algs[i].stat_fb_srclen);
673
seq_printf(seq, "\tFallback due to dest length: %lu\n",
674
ce_algs[i].stat_fb_dstlen);
675
seq_printf(seq, "\tFallback due to SG numbers: %lu\n",
676
ce_algs[i].stat_fb_maxsg);
677
break;
678
case CRYPTO_ALG_TYPE_AHASH:
679
seq_printf(seq, "%s %s reqs=%lu fallback=%lu\n",
680
ce_algs[i].alg.hash.base.halg.base.cra_driver_name,
681
ce_algs[i].alg.hash.base.halg.base.cra_name,
682
ce_algs[i].stat_req, ce_algs[i].stat_fb);
683
seq_printf(seq, "\tLast fallback is: %s\n",
684
ce_algs[i].fbname);
685
seq_printf(seq, "\tFallback due to 0 length: %lu\n",
686
ce_algs[i].stat_fb_len0);
687
seq_printf(seq, "\tFallback due to length: %lu\n",
688
ce_algs[i].stat_fb_srclen);
689
seq_printf(seq, "\tFallback due to alignment: %lu\n",
690
ce_algs[i].stat_fb_srcali);
691
seq_printf(seq, "\tFallback due to SG numbers: %lu\n",
692
ce_algs[i].stat_fb_maxsg);
693
break;
694
case CRYPTO_ALG_TYPE_RNG:
695
seq_printf(seq, "%s %s reqs=%lu bytes=%lu\n",
696
ce_algs[i].alg.rng.base.cra_driver_name,
697
ce_algs[i].alg.rng.base.cra_name,
698
ce_algs[i].stat_req, ce_algs[i].stat_bytes);
699
break;
700
}
701
}
702
#if defined(CONFIG_CRYPTO_DEV_SUN8I_CE_TRNG) && \
703
defined(CONFIG_CRYPTO_DEV_SUN8I_CE_DEBUG)
704
seq_printf(seq, "HWRNG %lu %lu\n",
705
ce->hwrng_stat_req, ce->hwrng_stat_bytes);
706
#endif
707
return 0;
708
}
709
710
DEFINE_SHOW_ATTRIBUTE(sun8i_ce_debugfs);
711
712
static void sun8i_ce_free_chanlist(struct sun8i_ce_dev *ce, int i)
713
{
714
while (i >= 0) {
715
crypto_engine_exit(ce->chanlist[i].engine);
716
if (ce->chanlist[i].tl)
717
dma_free_coherent(ce->dev, sizeof(struct ce_task),
718
ce->chanlist[i].tl,
719
ce->chanlist[i].t_phy);
720
i--;
721
}
722
}
723
724
/*
725
* Allocate the channel list structure
726
*/
727
static int sun8i_ce_allocate_chanlist(struct sun8i_ce_dev *ce)
728
{
729
int i, err;
730
731
ce->chanlist = devm_kcalloc(ce->dev, MAXFLOW,
732
sizeof(struct sun8i_ce_flow), GFP_KERNEL);
733
if (!ce->chanlist)
734
return -ENOMEM;
735
736
for (i = 0; i < MAXFLOW; i++) {
737
init_completion(&ce->chanlist[i].complete);
738
739
ce->chanlist[i].engine = crypto_engine_alloc_init(ce->dev, true);
740
if (!ce->chanlist[i].engine) {
741
dev_err(ce->dev, "Cannot allocate engine\n");
742
i--;
743
err = -ENOMEM;
744
goto error_engine;
745
}
746
err = crypto_engine_start(ce->chanlist[i].engine);
747
if (err) {
748
dev_err(ce->dev, "Cannot start engine\n");
749
goto error_engine;
750
}
751
ce->chanlist[i].tl = dma_alloc_coherent(ce->dev,
752
sizeof(struct ce_task),
753
&ce->chanlist[i].t_phy,
754
GFP_KERNEL);
755
if (!ce->chanlist[i].tl) {
756
dev_err(ce->dev, "Cannot get DMA memory for task %d\n",
757
i);
758
err = -ENOMEM;
759
goto error_engine;
760
}
761
ce->chanlist[i].bounce_iv = devm_kmalloc(ce->dev, AES_BLOCK_SIZE,
762
GFP_KERNEL | GFP_DMA);
763
if (!ce->chanlist[i].bounce_iv) {
764
err = -ENOMEM;
765
goto error_engine;
766
}
767
ce->chanlist[i].backup_iv = devm_kmalloc(ce->dev, AES_BLOCK_SIZE,
768
GFP_KERNEL);
769
if (!ce->chanlist[i].backup_iv) {
770
err = -ENOMEM;
771
goto error_engine;
772
}
773
}
774
return 0;
775
error_engine:
776
sun8i_ce_free_chanlist(ce, i);
777
return err;
778
}
779
780
/*
781
* Power management strategy: The device is suspended unless a TFM exists for
782
* one of the algorithms proposed by this driver.
783
*/
784
static int sun8i_ce_pm_suspend(struct device *dev)
785
{
786
struct sun8i_ce_dev *ce = dev_get_drvdata(dev);
787
int i;
788
789
reset_control_assert(ce->reset);
790
for (i = 0; i < CE_MAX_CLOCKS; i++)
791
clk_disable_unprepare(ce->ceclks[i]);
792
return 0;
793
}
794
795
static int sun8i_ce_pm_resume(struct device *dev)
796
{
797
struct sun8i_ce_dev *ce = dev_get_drvdata(dev);
798
int err, i;
799
800
for (i = 0; i < CE_MAX_CLOCKS; i++) {
801
if (!ce->variant->ce_clks[i].name)
802
continue;
803
err = clk_prepare_enable(ce->ceclks[i]);
804
if (err) {
805
dev_err(ce->dev, "Cannot prepare_enable %s\n",
806
ce->variant->ce_clks[i].name);
807
goto error;
808
}
809
}
810
err = reset_control_deassert(ce->reset);
811
if (err) {
812
dev_err(ce->dev, "Cannot deassert reset control\n");
813
goto error;
814
}
815
return 0;
816
error:
817
sun8i_ce_pm_suspend(dev);
818
return err;
819
}
820
821
static const struct dev_pm_ops sun8i_ce_pm_ops = {
822
SET_RUNTIME_PM_OPS(sun8i_ce_pm_suspend, sun8i_ce_pm_resume, NULL)
823
};
824
825
static int sun8i_ce_pm_init(struct sun8i_ce_dev *ce)
826
{
827
int err;
828
829
pm_runtime_use_autosuspend(ce->dev);
830
pm_runtime_set_autosuspend_delay(ce->dev, 2000);
831
832
err = pm_runtime_set_suspended(ce->dev);
833
if (err)
834
return err;
835
836
err = devm_pm_runtime_enable(ce->dev);
837
if (err)
838
return err;
839
840
return 0;
841
}
842
843
static int sun8i_ce_get_clks(struct sun8i_ce_dev *ce)
844
{
845
unsigned long cr;
846
int err, i;
847
848
for (i = 0; i < CE_MAX_CLOCKS; i++) {
849
if (!ce->variant->ce_clks[i].name)
850
continue;
851
ce->ceclks[i] = devm_clk_get(ce->dev, ce->variant->ce_clks[i].name);
852
if (IS_ERR(ce->ceclks[i])) {
853
err = PTR_ERR(ce->ceclks[i]);
854
dev_err(ce->dev, "Cannot get %s CE clock err=%d\n",
855
ce->variant->ce_clks[i].name, err);
856
return err;
857
}
858
cr = clk_get_rate(ce->ceclks[i]);
859
if (!cr)
860
return -EINVAL;
861
if (ce->variant->ce_clks[i].freq > 0 &&
862
cr != ce->variant->ce_clks[i].freq) {
863
dev_info(ce->dev, "Set %s clock to %lu (%lu Mhz) from %lu (%lu Mhz)\n",
864
ce->variant->ce_clks[i].name,
865
ce->variant->ce_clks[i].freq,
866
ce->variant->ce_clks[i].freq / 1000000,
867
cr, cr / 1000000);
868
err = clk_set_rate(ce->ceclks[i], ce->variant->ce_clks[i].freq);
869
if (err)
870
dev_err(ce->dev, "Fail to set %s clk speed to %lu hz\n",
871
ce->variant->ce_clks[i].name,
872
ce->variant->ce_clks[i].freq);
873
}
874
if (ce->variant->ce_clks[i].max_freq > 0 &&
875
cr > ce->variant->ce_clks[i].max_freq)
876
dev_warn(ce->dev, "Frequency for %s (%lu hz) is higher than datasheet's recommendation (%lu hz)",
877
ce->variant->ce_clks[i].name, cr,
878
ce->variant->ce_clks[i].max_freq);
879
}
880
return 0;
881
}
882
883
static int sun8i_ce_register_algs(struct sun8i_ce_dev *ce)
884
{
885
int ce_method, err, id;
886
unsigned int i;
887
888
for (i = 0; i < ARRAY_SIZE(ce_algs); i++) {
889
ce_algs[i].ce = ce;
890
switch (ce_algs[i].type) {
891
case CRYPTO_ALG_TYPE_SKCIPHER:
892
id = ce_algs[i].ce_algo_id;
893
ce_method = ce->variant->alg_cipher[id];
894
if (ce_method == CE_ID_NOTSUPP) {
895
dev_dbg(ce->dev,
896
"DEBUG: Algo of %s not supported\n",
897
ce_algs[i].alg.skcipher.base.base.cra_name);
898
ce_algs[i].ce = NULL;
899
break;
900
}
901
id = ce_algs[i].ce_blockmode;
902
ce_method = ce->variant->op_mode[id];
903
if (ce_method == CE_ID_NOTSUPP) {
904
dev_dbg(ce->dev, "DEBUG: Blockmode of %s not supported\n",
905
ce_algs[i].alg.skcipher.base.base.cra_name);
906
ce_algs[i].ce = NULL;
907
break;
908
}
909
dev_info(ce->dev, "Register %s\n",
910
ce_algs[i].alg.skcipher.base.base.cra_name);
911
err = crypto_engine_register_skcipher(&ce_algs[i].alg.skcipher);
912
if (err) {
913
dev_err(ce->dev, "ERROR: Fail to register %s\n",
914
ce_algs[i].alg.skcipher.base.base.cra_name);
915
ce_algs[i].ce = NULL;
916
return err;
917
}
918
break;
919
case CRYPTO_ALG_TYPE_AHASH:
920
id = ce_algs[i].ce_algo_id;
921
ce_method = ce->variant->alg_hash[id];
922
if (ce_method == CE_ID_NOTSUPP) {
923
dev_info(ce->dev,
924
"DEBUG: Algo of %s not supported\n",
925
ce_algs[i].alg.hash.base.halg.base.cra_name);
926
ce_algs[i].ce = NULL;
927
break;
928
}
929
dev_info(ce->dev, "Register %s\n",
930
ce_algs[i].alg.hash.base.halg.base.cra_name);
931
err = crypto_engine_register_ahash(&ce_algs[i].alg.hash);
932
if (err) {
933
dev_err(ce->dev, "ERROR: Fail to register %s\n",
934
ce_algs[i].alg.hash.base.halg.base.cra_name);
935
ce_algs[i].ce = NULL;
936
return err;
937
}
938
break;
939
case CRYPTO_ALG_TYPE_RNG:
940
if (ce->variant->prng == CE_ID_NOTSUPP) {
941
dev_info(ce->dev,
942
"DEBUG: Algo of %s not supported\n",
943
ce_algs[i].alg.rng.base.cra_name);
944
ce_algs[i].ce = NULL;
945
break;
946
}
947
dev_info(ce->dev, "Register %s\n",
948
ce_algs[i].alg.rng.base.cra_name);
949
err = crypto_register_rng(&ce_algs[i].alg.rng);
950
if (err) {
951
dev_err(ce->dev, "Fail to register %s\n",
952
ce_algs[i].alg.rng.base.cra_name);
953
ce_algs[i].ce = NULL;
954
}
955
break;
956
default:
957
ce_algs[i].ce = NULL;
958
dev_err(ce->dev, "ERROR: tried to register an unknown algo\n");
959
}
960
}
961
return 0;
962
}
963
964
static void sun8i_ce_unregister_algs(struct sun8i_ce_dev *ce)
965
{
966
unsigned int i;
967
968
for (i = 0; i < ARRAY_SIZE(ce_algs); i++) {
969
if (!ce_algs[i].ce)
970
continue;
971
switch (ce_algs[i].type) {
972
case CRYPTO_ALG_TYPE_SKCIPHER:
973
dev_info(ce->dev, "Unregister %d %s\n", i,
974
ce_algs[i].alg.skcipher.base.base.cra_name);
975
crypto_engine_unregister_skcipher(&ce_algs[i].alg.skcipher);
976
break;
977
case CRYPTO_ALG_TYPE_AHASH:
978
dev_info(ce->dev, "Unregister %d %s\n", i,
979
ce_algs[i].alg.hash.base.halg.base.cra_name);
980
crypto_engine_unregister_ahash(&ce_algs[i].alg.hash);
981
break;
982
case CRYPTO_ALG_TYPE_RNG:
983
dev_info(ce->dev, "Unregister %d %s\n", i,
984
ce_algs[i].alg.rng.base.cra_name);
985
crypto_unregister_rng(&ce_algs[i].alg.rng);
986
break;
987
}
988
}
989
}
990
991
static int sun8i_ce_probe(struct platform_device *pdev)
992
{
993
struct sun8i_ce_dev *ce;
994
int err, irq;
995
u32 v;
996
997
ce = devm_kzalloc(&pdev->dev, sizeof(*ce), GFP_KERNEL);
998
if (!ce)
999
return -ENOMEM;
1000
1001
ce->dev = &pdev->dev;
1002
platform_set_drvdata(pdev, ce);
1003
1004
ce->variant = of_device_get_match_data(&pdev->dev);
1005
if (!ce->variant) {
1006
dev_err(&pdev->dev, "Missing Crypto Engine variant\n");
1007
return -EINVAL;
1008
}
1009
1010
ce->base = devm_platform_ioremap_resource(pdev, 0);
1011
if (IS_ERR(ce->base))
1012
return PTR_ERR(ce->base);
1013
1014
err = sun8i_ce_get_clks(ce);
1015
if (err)
1016
return err;
1017
1018
/* Get Non Secure IRQ */
1019
irq = platform_get_irq(pdev, 0);
1020
if (irq < 0)
1021
return irq;
1022
1023
ce->reset = devm_reset_control_get(&pdev->dev, NULL);
1024
if (IS_ERR(ce->reset))
1025
return dev_err_probe(&pdev->dev, PTR_ERR(ce->reset),
1026
"No reset control found\n");
1027
1028
mutex_init(&ce->mlock);
1029
mutex_init(&ce->rnglock);
1030
1031
err = sun8i_ce_allocate_chanlist(ce);
1032
if (err)
1033
return err;
1034
1035
err = sun8i_ce_pm_init(ce);
1036
if (err)
1037
goto error_pm;
1038
1039
err = devm_request_irq(&pdev->dev, irq, ce_irq_handler, 0,
1040
"sun8i-ce-ns", ce);
1041
if (err) {
1042
dev_err(ce->dev, "Cannot request CryptoEngine Non-secure IRQ (err=%d)\n", err);
1043
goto error_pm;
1044
}
1045
1046
err = sun8i_ce_register_algs(ce);
1047
if (err)
1048
goto error_alg;
1049
1050
err = pm_runtime_resume_and_get(ce->dev);
1051
if (err < 0)
1052
goto error_alg;
1053
1054
#ifdef CONFIG_CRYPTO_DEV_SUN8I_CE_TRNG
1055
sun8i_ce_hwrng_register(ce);
1056
#endif
1057
1058
v = readl(ce->base + CE_CTR);
1059
v >>= CE_DIE_ID_SHIFT;
1060
v &= CE_DIE_ID_MASK;
1061
dev_info(&pdev->dev, "CryptoEngine Die ID %x\n", v);
1062
1063
pm_runtime_put_sync(ce->dev);
1064
1065
if (IS_ENABLED(CONFIG_CRYPTO_DEV_SUN8I_CE_DEBUG)) {
1066
struct dentry *dbgfs_dir __maybe_unused;
1067
struct dentry *dbgfs_stats __maybe_unused;
1068
1069
/* Ignore error of debugfs */
1070
dbgfs_dir = debugfs_create_dir("sun8i-ce", NULL);
1071
dbgfs_stats = debugfs_create_file("stats", 0444,
1072
dbgfs_dir, ce,
1073
&sun8i_ce_debugfs_fops);
1074
1075
#ifdef CONFIG_CRYPTO_DEV_SUN8I_CE_DEBUG
1076
ce->dbgfs_dir = dbgfs_dir;
1077
ce->dbgfs_stats = dbgfs_stats;
1078
#endif
1079
}
1080
1081
return 0;
1082
error_alg:
1083
sun8i_ce_unregister_algs(ce);
1084
error_pm:
1085
sun8i_ce_free_chanlist(ce, MAXFLOW - 1);
1086
return err;
1087
}
1088
1089
static void sun8i_ce_remove(struct platform_device *pdev)
1090
{
1091
struct sun8i_ce_dev *ce = platform_get_drvdata(pdev);
1092
1093
#ifdef CONFIG_CRYPTO_DEV_SUN8I_CE_TRNG
1094
sun8i_ce_hwrng_unregister(ce);
1095
#endif
1096
1097
sun8i_ce_unregister_algs(ce);
1098
1099
#ifdef CONFIG_CRYPTO_DEV_SUN8I_CE_DEBUG
1100
debugfs_remove_recursive(ce->dbgfs_dir);
1101
#endif
1102
1103
sun8i_ce_free_chanlist(ce, MAXFLOW - 1);
1104
}
1105
1106
static const struct of_device_id sun8i_ce_crypto_of_match_table[] = {
1107
{ .compatible = "allwinner,sun8i-h3-crypto",
1108
.data = &ce_h3_variant },
1109
{ .compatible = "allwinner,sun8i-r40-crypto",
1110
.data = &ce_r40_variant },
1111
{ .compatible = "allwinner,sun20i-d1-crypto",
1112
.data = &ce_d1_variant },
1113
{ .compatible = "allwinner,sun50i-a64-crypto",
1114
.data = &ce_a64_variant },
1115
{ .compatible = "allwinner,sun50i-h5-crypto",
1116
.data = &ce_h5_variant },
1117
{ .compatible = "allwinner,sun50i-h6-crypto",
1118
.data = &ce_h6_variant },
1119
{ .compatible = "allwinner,sun50i-h616-crypto",
1120
.data = &ce_h616_variant },
1121
{}
1122
};
1123
MODULE_DEVICE_TABLE(of, sun8i_ce_crypto_of_match_table);
1124
1125
static struct platform_driver sun8i_ce_driver = {
1126
.probe = sun8i_ce_probe,
1127
.remove = sun8i_ce_remove,
1128
.driver = {
1129
.name = "sun8i-ce",
1130
.pm = &sun8i_ce_pm_ops,
1131
.of_match_table = sun8i_ce_crypto_of_match_table,
1132
},
1133
};
1134
1135
module_platform_driver(sun8i_ce_driver);
1136
1137
MODULE_DESCRIPTION("Allwinner Crypto Engine cryptographic offloader");
1138
MODULE_LICENSE("GPL");
1139
MODULE_AUTHOR("Corentin Labbe <[email protected]>");
1140
1141