Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
torvalds
GitHub Repository: torvalds/linux
Path: blob/master/drivers/crypto/allwinner/sun8i-ss/sun8i-ss-core.c
26292 views
1
// SPDX-License-Identifier: GPL-2.0
2
/*
3
* sun8i-ss-core.c - hardware cryptographic offloader for
4
* Allwinner A80/A83T SoC
5
*
6
* Copyright (C) 2015-2019 Corentin Labbe <[email protected]>
7
*
8
* Core file which registers crypto algorithms supported by the SecuritySystem
9
*
10
* You could find a link for the datasheet in Documentation/arch/arm/sunxi.rst
11
*/
12
13
#include <crypto/engine.h>
14
#include <crypto/internal/rng.h>
15
#include <crypto/internal/skcipher.h>
16
#include <linux/clk.h>
17
#include <linux/delay.h>
18
#include <linux/dma-mapping.h>
19
#include <linux/err.h>
20
#include <linux/interrupt.h>
21
#include <linux/io.h>
22
#include <linux/irq.h>
23
#include <linux/kernel.h>
24
#include <linux/module.h>
25
#include <linux/of.h>
26
#include <linux/platform_device.h>
27
#include <linux/pm_runtime.h>
28
#include <linux/reset.h>
29
30
#include "sun8i-ss.h"
31
32
static const struct ss_variant ss_a80_variant = {
33
.alg_cipher = { SS_ALG_AES, SS_ALG_DES, SS_ALG_3DES,
34
},
35
.alg_hash = { SS_ID_NOTSUPP, SS_ID_NOTSUPP, SS_ID_NOTSUPP, SS_ID_NOTSUPP,
36
},
37
.op_mode = { SS_OP_ECB, SS_OP_CBC,
38
},
39
.ss_clks = {
40
{ "bus", 0, 300 * 1000 * 1000 },
41
{ "mod", 0, 300 * 1000 * 1000 },
42
}
43
};
44
45
static const struct ss_variant ss_a83t_variant = {
46
.alg_cipher = { SS_ALG_AES, SS_ALG_DES, SS_ALG_3DES,
47
},
48
.alg_hash = { SS_ALG_MD5, SS_ALG_SHA1, SS_ALG_SHA224, SS_ALG_SHA256,
49
},
50
.op_mode = { SS_OP_ECB, SS_OP_CBC,
51
},
52
.ss_clks = {
53
{ "bus", 0, 300 * 1000 * 1000 },
54
{ "mod", 0, 300 * 1000 * 1000 },
55
}
56
};
57
58
/*
59
* sun8i_ss_get_engine_number() get the next channel slot
60
* This is a simple round-robin way of getting the next channel
61
*/
62
int sun8i_ss_get_engine_number(struct sun8i_ss_dev *ss)
63
{
64
return atomic_inc_return(&ss->flow) % MAXFLOW;
65
}
66
67
int sun8i_ss_run_task(struct sun8i_ss_dev *ss, struct sun8i_cipher_req_ctx *rctx,
68
const char *name)
69
{
70
int flow = rctx->flow;
71
unsigned int ivlen = rctx->ivlen;
72
u32 v = SS_START;
73
int i;
74
75
#ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_DEBUG
76
ss->flows[flow].stat_req++;
77
#endif
78
79
/* choose between stream0/stream1 */
80
if (flow)
81
v |= SS_FLOW1;
82
else
83
v |= SS_FLOW0;
84
85
v |= rctx->op_mode;
86
v |= rctx->method;
87
88
if (rctx->op_dir)
89
v |= SS_DECRYPTION;
90
91
switch (rctx->keylen) {
92
case 128 / 8:
93
v |= SS_AES_128BITS << 7;
94
break;
95
case 192 / 8:
96
v |= SS_AES_192BITS << 7;
97
break;
98
case 256 / 8:
99
v |= SS_AES_256BITS << 7;
100
break;
101
}
102
103
for (i = 0; i < MAX_SG; i++) {
104
if (!rctx->t_dst[i].addr)
105
break;
106
107
mutex_lock(&ss->mlock);
108
writel(rctx->p_key, ss->base + SS_KEY_ADR_REG);
109
110
if (ivlen) {
111
if (rctx->op_dir == SS_ENCRYPTION) {
112
if (i == 0)
113
writel(rctx->p_iv[0], ss->base + SS_IV_ADR_REG);
114
else
115
writel(rctx->t_dst[i - 1].addr + rctx->t_dst[i - 1].len * 4 - ivlen, ss->base + SS_IV_ADR_REG);
116
} else {
117
writel(rctx->p_iv[i], ss->base + SS_IV_ADR_REG);
118
}
119
}
120
121
dev_dbg(ss->dev,
122
"Processing SG %d on flow %d %s ctl=%x %d to %d method=%x opmode=%x opdir=%x srclen=%d\n",
123
i, flow, name, v,
124
rctx->t_src[i].len, rctx->t_dst[i].len,
125
rctx->method, rctx->op_mode,
126
rctx->op_dir, rctx->t_src[i].len);
127
128
writel(rctx->t_src[i].addr, ss->base + SS_SRC_ADR_REG);
129
writel(rctx->t_dst[i].addr, ss->base + SS_DST_ADR_REG);
130
writel(rctx->t_src[i].len, ss->base + SS_LEN_ADR_REG);
131
132
reinit_completion(&ss->flows[flow].complete);
133
ss->flows[flow].status = 0;
134
wmb();
135
136
writel(v, ss->base + SS_CTL_REG);
137
mutex_unlock(&ss->mlock);
138
wait_for_completion_interruptible_timeout(&ss->flows[flow].complete,
139
msecs_to_jiffies(2000));
140
if (ss->flows[flow].status == 0) {
141
dev_err(ss->dev, "DMA timeout for %s\n", name);
142
return -EFAULT;
143
}
144
}
145
146
return 0;
147
}
148
149
static irqreturn_t ss_irq_handler(int irq, void *data)
150
{
151
struct sun8i_ss_dev *ss = (struct sun8i_ss_dev *)data;
152
int flow = 0;
153
u32 p;
154
155
p = readl(ss->base + SS_INT_STA_REG);
156
for (flow = 0; flow < MAXFLOW; flow++) {
157
if (p & (BIT(flow))) {
158
writel(BIT(flow), ss->base + SS_INT_STA_REG);
159
ss->flows[flow].status = 1;
160
complete(&ss->flows[flow].complete);
161
}
162
}
163
164
return IRQ_HANDLED;
165
}
166
167
static struct sun8i_ss_alg_template ss_algs[] = {
168
{
169
.type = CRYPTO_ALG_TYPE_SKCIPHER,
170
.ss_algo_id = SS_ID_CIPHER_AES,
171
.ss_blockmode = SS_ID_OP_CBC,
172
.alg.skcipher.base = {
173
.base = {
174
.cra_name = "cbc(aes)",
175
.cra_driver_name = "cbc-aes-sun8i-ss",
176
.cra_priority = 400,
177
.cra_blocksize = AES_BLOCK_SIZE,
178
.cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
179
CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY |
180
CRYPTO_ALG_NEED_FALLBACK,
181
.cra_ctxsize = sizeof(struct sun8i_cipher_tfm_ctx),
182
.cra_module = THIS_MODULE,
183
.cra_alignmask = 0xf,
184
.cra_init = sun8i_ss_cipher_init,
185
.cra_exit = sun8i_ss_cipher_exit,
186
},
187
.min_keysize = AES_MIN_KEY_SIZE,
188
.max_keysize = AES_MAX_KEY_SIZE,
189
.ivsize = AES_BLOCK_SIZE,
190
.setkey = sun8i_ss_aes_setkey,
191
.encrypt = sun8i_ss_skencrypt,
192
.decrypt = sun8i_ss_skdecrypt,
193
},
194
.alg.skcipher.op = {
195
.do_one_request = sun8i_ss_handle_cipher_request,
196
},
197
},
198
{
199
.type = CRYPTO_ALG_TYPE_SKCIPHER,
200
.ss_algo_id = SS_ID_CIPHER_AES,
201
.ss_blockmode = SS_ID_OP_ECB,
202
.alg.skcipher.base = {
203
.base = {
204
.cra_name = "ecb(aes)",
205
.cra_driver_name = "ecb-aes-sun8i-ss",
206
.cra_priority = 400,
207
.cra_blocksize = AES_BLOCK_SIZE,
208
.cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
209
CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY |
210
CRYPTO_ALG_NEED_FALLBACK,
211
.cra_ctxsize = sizeof(struct sun8i_cipher_tfm_ctx),
212
.cra_module = THIS_MODULE,
213
.cra_alignmask = 0xf,
214
.cra_init = sun8i_ss_cipher_init,
215
.cra_exit = sun8i_ss_cipher_exit,
216
},
217
.min_keysize = AES_MIN_KEY_SIZE,
218
.max_keysize = AES_MAX_KEY_SIZE,
219
.setkey = sun8i_ss_aes_setkey,
220
.encrypt = sun8i_ss_skencrypt,
221
.decrypt = sun8i_ss_skdecrypt,
222
},
223
.alg.skcipher.op = {
224
.do_one_request = sun8i_ss_handle_cipher_request,
225
},
226
},
227
{
228
.type = CRYPTO_ALG_TYPE_SKCIPHER,
229
.ss_algo_id = SS_ID_CIPHER_DES3,
230
.ss_blockmode = SS_ID_OP_CBC,
231
.alg.skcipher.base = {
232
.base = {
233
.cra_name = "cbc(des3_ede)",
234
.cra_driver_name = "cbc-des3-sun8i-ss",
235
.cra_priority = 400,
236
.cra_blocksize = DES3_EDE_BLOCK_SIZE,
237
.cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
238
CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY |
239
CRYPTO_ALG_NEED_FALLBACK,
240
.cra_ctxsize = sizeof(struct sun8i_cipher_tfm_ctx),
241
.cra_module = THIS_MODULE,
242
.cra_alignmask = 0xf,
243
.cra_init = sun8i_ss_cipher_init,
244
.cra_exit = sun8i_ss_cipher_exit,
245
},
246
.min_keysize = DES3_EDE_KEY_SIZE,
247
.max_keysize = DES3_EDE_KEY_SIZE,
248
.ivsize = DES3_EDE_BLOCK_SIZE,
249
.setkey = sun8i_ss_des3_setkey,
250
.encrypt = sun8i_ss_skencrypt,
251
.decrypt = sun8i_ss_skdecrypt,
252
},
253
.alg.skcipher.op = {
254
.do_one_request = sun8i_ss_handle_cipher_request,
255
},
256
},
257
{
258
.type = CRYPTO_ALG_TYPE_SKCIPHER,
259
.ss_algo_id = SS_ID_CIPHER_DES3,
260
.ss_blockmode = SS_ID_OP_ECB,
261
.alg.skcipher.base = {
262
.base = {
263
.cra_name = "ecb(des3_ede)",
264
.cra_driver_name = "ecb-des3-sun8i-ss",
265
.cra_priority = 400,
266
.cra_blocksize = DES3_EDE_BLOCK_SIZE,
267
.cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
268
CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY |
269
CRYPTO_ALG_NEED_FALLBACK,
270
.cra_ctxsize = sizeof(struct sun8i_cipher_tfm_ctx),
271
.cra_module = THIS_MODULE,
272
.cra_alignmask = 0xf,
273
.cra_init = sun8i_ss_cipher_init,
274
.cra_exit = sun8i_ss_cipher_exit,
275
},
276
.min_keysize = DES3_EDE_KEY_SIZE,
277
.max_keysize = DES3_EDE_KEY_SIZE,
278
.setkey = sun8i_ss_des3_setkey,
279
.encrypt = sun8i_ss_skencrypt,
280
.decrypt = sun8i_ss_skdecrypt,
281
},
282
.alg.skcipher.op = {
283
.do_one_request = sun8i_ss_handle_cipher_request,
284
},
285
},
286
#ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_PRNG
287
{
288
.type = CRYPTO_ALG_TYPE_RNG,
289
.alg.rng = {
290
.base = {
291
.cra_name = "stdrng",
292
.cra_driver_name = "sun8i-ss-prng",
293
.cra_priority = 300,
294
.cra_ctxsize = sizeof(struct sun8i_ss_rng_tfm_ctx),
295
.cra_module = THIS_MODULE,
296
.cra_init = sun8i_ss_prng_init,
297
.cra_exit = sun8i_ss_prng_exit,
298
},
299
.generate = sun8i_ss_prng_generate,
300
.seed = sun8i_ss_prng_seed,
301
.seedsize = PRNG_SEED_SIZE,
302
}
303
},
304
#endif
305
#ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_HASH
306
{ .type = CRYPTO_ALG_TYPE_AHASH,
307
.ss_algo_id = SS_ID_HASH_MD5,
308
.alg.hash.base = {
309
.init = sun8i_ss_hash_init,
310
.update = sun8i_ss_hash_update,
311
.final = sun8i_ss_hash_final,
312
.finup = sun8i_ss_hash_finup,
313
.digest = sun8i_ss_hash_digest,
314
.export = sun8i_ss_hash_export,
315
.import = sun8i_ss_hash_import,
316
.init_tfm = sun8i_ss_hash_init_tfm,
317
.exit_tfm = sun8i_ss_hash_exit_tfm,
318
.halg = {
319
.digestsize = MD5_DIGEST_SIZE,
320
.statesize = sizeof(struct md5_state),
321
.base = {
322
.cra_name = "md5",
323
.cra_driver_name = "md5-sun8i-ss",
324
.cra_priority = 300,
325
.cra_flags = CRYPTO_ALG_TYPE_AHASH |
326
CRYPTO_ALG_ASYNC |
327
CRYPTO_ALG_NEED_FALLBACK,
328
.cra_blocksize = MD5_HMAC_BLOCK_SIZE,
329
.cra_ctxsize = sizeof(struct sun8i_ss_hash_tfm_ctx),
330
.cra_module = THIS_MODULE,
331
}
332
}
333
},
334
.alg.hash.op = {
335
.do_one_request = sun8i_ss_hash_run,
336
},
337
},
338
{ .type = CRYPTO_ALG_TYPE_AHASH,
339
.ss_algo_id = SS_ID_HASH_SHA1,
340
.alg.hash.base = {
341
.init = sun8i_ss_hash_init,
342
.update = sun8i_ss_hash_update,
343
.final = sun8i_ss_hash_final,
344
.finup = sun8i_ss_hash_finup,
345
.digest = sun8i_ss_hash_digest,
346
.export = sun8i_ss_hash_export,
347
.import = sun8i_ss_hash_import,
348
.init_tfm = sun8i_ss_hash_init_tfm,
349
.exit_tfm = sun8i_ss_hash_exit_tfm,
350
.halg = {
351
.digestsize = SHA1_DIGEST_SIZE,
352
.statesize = sizeof(struct sha1_state),
353
.base = {
354
.cra_name = "sha1",
355
.cra_driver_name = "sha1-sun8i-ss",
356
.cra_priority = 300,
357
.cra_flags = CRYPTO_ALG_TYPE_AHASH |
358
CRYPTO_ALG_ASYNC |
359
CRYPTO_ALG_NEED_FALLBACK,
360
.cra_blocksize = SHA1_BLOCK_SIZE,
361
.cra_ctxsize = sizeof(struct sun8i_ss_hash_tfm_ctx),
362
.cra_module = THIS_MODULE,
363
}
364
}
365
},
366
.alg.hash.op = {
367
.do_one_request = sun8i_ss_hash_run,
368
},
369
},
370
{ .type = CRYPTO_ALG_TYPE_AHASH,
371
.ss_algo_id = SS_ID_HASH_SHA224,
372
.alg.hash.base = {
373
.init = sun8i_ss_hash_init,
374
.update = sun8i_ss_hash_update,
375
.final = sun8i_ss_hash_final,
376
.finup = sun8i_ss_hash_finup,
377
.digest = sun8i_ss_hash_digest,
378
.export = sun8i_ss_hash_export,
379
.import = sun8i_ss_hash_import,
380
.init_tfm = sun8i_ss_hash_init_tfm,
381
.exit_tfm = sun8i_ss_hash_exit_tfm,
382
.halg = {
383
.digestsize = SHA224_DIGEST_SIZE,
384
.statesize = sizeof(struct sha256_state),
385
.base = {
386
.cra_name = "sha224",
387
.cra_driver_name = "sha224-sun8i-ss",
388
.cra_priority = 300,
389
.cra_flags = CRYPTO_ALG_TYPE_AHASH |
390
CRYPTO_ALG_ASYNC |
391
CRYPTO_ALG_NEED_FALLBACK,
392
.cra_blocksize = SHA224_BLOCK_SIZE,
393
.cra_ctxsize = sizeof(struct sun8i_ss_hash_tfm_ctx),
394
.cra_module = THIS_MODULE,
395
}
396
}
397
},
398
.alg.hash.op = {
399
.do_one_request = sun8i_ss_hash_run,
400
},
401
},
402
{ .type = CRYPTO_ALG_TYPE_AHASH,
403
.ss_algo_id = SS_ID_HASH_SHA256,
404
.alg.hash.base = {
405
.init = sun8i_ss_hash_init,
406
.update = sun8i_ss_hash_update,
407
.final = sun8i_ss_hash_final,
408
.finup = sun8i_ss_hash_finup,
409
.digest = sun8i_ss_hash_digest,
410
.export = sun8i_ss_hash_export,
411
.import = sun8i_ss_hash_import,
412
.init_tfm = sun8i_ss_hash_init_tfm,
413
.exit_tfm = sun8i_ss_hash_exit_tfm,
414
.halg = {
415
.digestsize = SHA256_DIGEST_SIZE,
416
.statesize = sizeof(struct sha256_state),
417
.base = {
418
.cra_name = "sha256",
419
.cra_driver_name = "sha256-sun8i-ss",
420
.cra_priority = 300,
421
.cra_flags = CRYPTO_ALG_TYPE_AHASH |
422
CRYPTO_ALG_ASYNC |
423
CRYPTO_ALG_NEED_FALLBACK,
424
.cra_blocksize = SHA256_BLOCK_SIZE,
425
.cra_ctxsize = sizeof(struct sun8i_ss_hash_tfm_ctx),
426
.cra_module = THIS_MODULE,
427
}
428
}
429
},
430
.alg.hash.op = {
431
.do_one_request = sun8i_ss_hash_run,
432
},
433
},
434
{ .type = CRYPTO_ALG_TYPE_AHASH,
435
.ss_algo_id = SS_ID_HASH_SHA1,
436
.alg.hash.base = {
437
.init = sun8i_ss_hash_init,
438
.update = sun8i_ss_hash_update,
439
.final = sun8i_ss_hash_final,
440
.finup = sun8i_ss_hash_finup,
441
.digest = sun8i_ss_hash_digest,
442
.export = sun8i_ss_hash_export,
443
.import = sun8i_ss_hash_import,
444
.init_tfm = sun8i_ss_hash_init_tfm,
445
.exit_tfm = sun8i_ss_hash_exit_tfm,
446
.setkey = sun8i_ss_hmac_setkey,
447
.halg = {
448
.digestsize = SHA1_DIGEST_SIZE,
449
.statesize = sizeof(struct sha1_state),
450
.base = {
451
.cra_name = "hmac(sha1)",
452
.cra_driver_name = "hmac-sha1-sun8i-ss",
453
.cra_priority = 300,
454
.cra_flags = CRYPTO_ALG_TYPE_AHASH |
455
CRYPTO_ALG_ASYNC |
456
CRYPTO_ALG_NEED_FALLBACK,
457
.cra_blocksize = SHA1_BLOCK_SIZE,
458
.cra_ctxsize = sizeof(struct sun8i_ss_hash_tfm_ctx),
459
.cra_module = THIS_MODULE,
460
}
461
}
462
},
463
.alg.hash.op = {
464
.do_one_request = sun8i_ss_hash_run,
465
},
466
},
467
#endif
468
};
469
470
static int sun8i_ss_debugfs_show(struct seq_file *seq, void *v)
471
{
472
struct sun8i_ss_dev *ss __maybe_unused = seq->private;
473
unsigned int i;
474
475
for (i = 0; i < MAXFLOW; i++)
476
seq_printf(seq, "Channel %d: nreq %lu\n", i,
477
#ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_DEBUG
478
ss->flows[i].stat_req);
479
#else
480
0ul);
481
#endif
482
483
for (i = 0; i < ARRAY_SIZE(ss_algs); i++) {
484
if (!ss_algs[i].ss)
485
continue;
486
switch (ss_algs[i].type) {
487
case CRYPTO_ALG_TYPE_SKCIPHER:
488
seq_printf(seq, "%s %s reqs=%lu fallback=%lu\n",
489
ss_algs[i].alg.skcipher.base.base.cra_driver_name,
490
ss_algs[i].alg.skcipher.base.base.cra_name,
491
ss_algs[i].stat_req, ss_algs[i].stat_fb);
492
493
seq_printf(seq, "\tLast fallback is: %s\n",
494
ss_algs[i].fbname);
495
seq_printf(seq, "\tFallback due to length: %lu\n",
496
ss_algs[i].stat_fb_len);
497
seq_printf(seq, "\tFallback due to SG length: %lu\n",
498
ss_algs[i].stat_fb_sglen);
499
seq_printf(seq, "\tFallback due to alignment: %lu\n",
500
ss_algs[i].stat_fb_align);
501
seq_printf(seq, "\tFallback due to SG numbers: %lu\n",
502
ss_algs[i].stat_fb_sgnum);
503
break;
504
case CRYPTO_ALG_TYPE_RNG:
505
seq_printf(seq, "%s %s reqs=%lu tsize=%lu\n",
506
ss_algs[i].alg.rng.base.cra_driver_name,
507
ss_algs[i].alg.rng.base.cra_name,
508
ss_algs[i].stat_req, ss_algs[i].stat_bytes);
509
break;
510
case CRYPTO_ALG_TYPE_AHASH:
511
seq_printf(seq, "%s %s reqs=%lu fallback=%lu\n",
512
ss_algs[i].alg.hash.base.halg.base.cra_driver_name,
513
ss_algs[i].alg.hash.base.halg.base.cra_name,
514
ss_algs[i].stat_req, ss_algs[i].stat_fb);
515
seq_printf(seq, "\tLast fallback is: %s\n",
516
ss_algs[i].fbname);
517
seq_printf(seq, "\tFallback due to length: %lu\n",
518
ss_algs[i].stat_fb_len);
519
seq_printf(seq, "\tFallback due to SG length: %lu\n",
520
ss_algs[i].stat_fb_sglen);
521
seq_printf(seq, "\tFallback due to alignment: %lu\n",
522
ss_algs[i].stat_fb_align);
523
seq_printf(seq, "\tFallback due to SG numbers: %lu\n",
524
ss_algs[i].stat_fb_sgnum);
525
break;
526
}
527
}
528
return 0;
529
}
530
531
DEFINE_SHOW_ATTRIBUTE(sun8i_ss_debugfs);
532
533
static void sun8i_ss_free_flows(struct sun8i_ss_dev *ss, int i)
534
{
535
while (i >= 0) {
536
crypto_engine_exit(ss->flows[i].engine);
537
i--;
538
}
539
}
540
541
/*
542
* Allocate the flow list structure
543
*/
544
static int allocate_flows(struct sun8i_ss_dev *ss)
545
{
546
int i, j, err;
547
548
ss->flows = devm_kcalloc(ss->dev, MAXFLOW, sizeof(struct sun8i_ss_flow),
549
GFP_KERNEL);
550
if (!ss->flows)
551
return -ENOMEM;
552
553
for (i = 0; i < MAXFLOW; i++) {
554
init_completion(&ss->flows[i].complete);
555
556
ss->flows[i].biv = devm_kmalloc(ss->dev, AES_BLOCK_SIZE,
557
GFP_KERNEL);
558
if (!ss->flows[i].biv) {
559
err = -ENOMEM;
560
goto error_engine;
561
}
562
563
for (j = 0; j < MAX_SG; j++) {
564
ss->flows[i].iv[j] = devm_kmalloc(ss->dev, AES_BLOCK_SIZE,
565
GFP_KERNEL);
566
if (!ss->flows[i].iv[j]) {
567
err = -ENOMEM;
568
goto error_engine;
569
}
570
}
571
572
/* the padding could be up to two block. */
573
ss->flows[i].pad = devm_kmalloc(ss->dev, MAX_PAD_SIZE,
574
GFP_KERNEL);
575
if (!ss->flows[i].pad) {
576
err = -ENOMEM;
577
goto error_engine;
578
}
579
ss->flows[i].result =
580
devm_kmalloc(ss->dev, max(SHA256_DIGEST_SIZE,
581
dma_get_cache_alignment()),
582
GFP_KERNEL);
583
if (!ss->flows[i].result) {
584
err = -ENOMEM;
585
goto error_engine;
586
}
587
588
ss->flows[i].engine = crypto_engine_alloc_init(ss->dev, true);
589
if (!ss->flows[i].engine) {
590
dev_err(ss->dev, "Cannot allocate engine\n");
591
i--;
592
err = -ENOMEM;
593
goto error_engine;
594
}
595
err = crypto_engine_start(ss->flows[i].engine);
596
if (err) {
597
dev_err(ss->dev, "Cannot start engine\n");
598
goto error_engine;
599
}
600
}
601
return 0;
602
error_engine:
603
sun8i_ss_free_flows(ss, i);
604
return err;
605
}
606
607
/*
608
* Power management strategy: The device is suspended unless a TFM exists for
609
* one of the algorithms proposed by this driver.
610
*/
611
static int sun8i_ss_pm_suspend(struct device *dev)
612
{
613
struct sun8i_ss_dev *ss = dev_get_drvdata(dev);
614
int i;
615
616
reset_control_assert(ss->reset);
617
for (i = 0; i < SS_MAX_CLOCKS; i++)
618
clk_disable_unprepare(ss->ssclks[i]);
619
return 0;
620
}
621
622
static int sun8i_ss_pm_resume(struct device *dev)
623
{
624
struct sun8i_ss_dev *ss = dev_get_drvdata(dev);
625
int err, i;
626
627
for (i = 0; i < SS_MAX_CLOCKS; i++) {
628
if (!ss->variant->ss_clks[i].name)
629
continue;
630
err = clk_prepare_enable(ss->ssclks[i]);
631
if (err) {
632
dev_err(ss->dev, "Cannot prepare_enable %s\n",
633
ss->variant->ss_clks[i].name);
634
goto error;
635
}
636
}
637
err = reset_control_deassert(ss->reset);
638
if (err) {
639
dev_err(ss->dev, "Cannot deassert reset control\n");
640
goto error;
641
}
642
/* enable interrupts for all flows */
643
writel(BIT(0) | BIT(1), ss->base + SS_INT_CTL_REG);
644
645
return 0;
646
error:
647
sun8i_ss_pm_suspend(dev);
648
return err;
649
}
650
651
static const struct dev_pm_ops sun8i_ss_pm_ops = {
652
SET_RUNTIME_PM_OPS(sun8i_ss_pm_suspend, sun8i_ss_pm_resume, NULL)
653
};
654
655
static int sun8i_ss_pm_init(struct sun8i_ss_dev *ss)
656
{
657
int err;
658
659
pm_runtime_use_autosuspend(ss->dev);
660
pm_runtime_set_autosuspend_delay(ss->dev, 2000);
661
662
err = pm_runtime_set_suspended(ss->dev);
663
if (err)
664
return err;
665
pm_runtime_enable(ss->dev);
666
return err;
667
}
668
669
static void sun8i_ss_pm_exit(struct sun8i_ss_dev *ss)
670
{
671
pm_runtime_disable(ss->dev);
672
}
673
674
static int sun8i_ss_register_algs(struct sun8i_ss_dev *ss)
675
{
676
int ss_method, err, id;
677
unsigned int i;
678
679
for (i = 0; i < ARRAY_SIZE(ss_algs); i++) {
680
ss_algs[i].ss = ss;
681
switch (ss_algs[i].type) {
682
case CRYPTO_ALG_TYPE_SKCIPHER:
683
id = ss_algs[i].ss_algo_id;
684
ss_method = ss->variant->alg_cipher[id];
685
if (ss_method == SS_ID_NOTSUPP) {
686
dev_info(ss->dev,
687
"DEBUG: Algo of %s not supported\n",
688
ss_algs[i].alg.skcipher.base.base.cra_name);
689
ss_algs[i].ss = NULL;
690
break;
691
}
692
id = ss_algs[i].ss_blockmode;
693
ss_method = ss->variant->op_mode[id];
694
if (ss_method == SS_ID_NOTSUPP) {
695
dev_info(ss->dev, "DEBUG: Blockmode of %s not supported\n",
696
ss_algs[i].alg.skcipher.base.base.cra_name);
697
ss_algs[i].ss = NULL;
698
break;
699
}
700
dev_info(ss->dev, "DEBUG: Register %s\n",
701
ss_algs[i].alg.skcipher.base.base.cra_name);
702
err = crypto_engine_register_skcipher(&ss_algs[i].alg.skcipher);
703
if (err) {
704
dev_err(ss->dev, "Fail to register %s\n",
705
ss_algs[i].alg.skcipher.base.base.cra_name);
706
ss_algs[i].ss = NULL;
707
return err;
708
}
709
break;
710
case CRYPTO_ALG_TYPE_RNG:
711
err = crypto_register_rng(&ss_algs[i].alg.rng);
712
if (err) {
713
dev_err(ss->dev, "Fail to register %s\n",
714
ss_algs[i].alg.rng.base.cra_name);
715
ss_algs[i].ss = NULL;
716
}
717
break;
718
case CRYPTO_ALG_TYPE_AHASH:
719
id = ss_algs[i].ss_algo_id;
720
ss_method = ss->variant->alg_hash[id];
721
if (ss_method == SS_ID_NOTSUPP) {
722
dev_info(ss->dev,
723
"DEBUG: Algo of %s not supported\n",
724
ss_algs[i].alg.hash.base.halg.base.cra_name);
725
ss_algs[i].ss = NULL;
726
break;
727
}
728
dev_info(ss->dev, "Register %s\n",
729
ss_algs[i].alg.hash.base.halg.base.cra_name);
730
err = crypto_engine_register_ahash(&ss_algs[i].alg.hash);
731
if (err) {
732
dev_err(ss->dev, "ERROR: Fail to register %s\n",
733
ss_algs[i].alg.hash.base.halg.base.cra_name);
734
ss_algs[i].ss = NULL;
735
return err;
736
}
737
break;
738
default:
739
ss_algs[i].ss = NULL;
740
dev_err(ss->dev, "ERROR: tried to register an unknown algo\n");
741
}
742
}
743
return 0;
744
}
745
746
static void sun8i_ss_unregister_algs(struct sun8i_ss_dev *ss)
747
{
748
unsigned int i;
749
750
for (i = 0; i < ARRAY_SIZE(ss_algs); i++) {
751
if (!ss_algs[i].ss)
752
continue;
753
switch (ss_algs[i].type) {
754
case CRYPTO_ALG_TYPE_SKCIPHER:
755
dev_info(ss->dev, "Unregister %d %s\n", i,
756
ss_algs[i].alg.skcipher.base.base.cra_name);
757
crypto_engine_unregister_skcipher(&ss_algs[i].alg.skcipher);
758
break;
759
case CRYPTO_ALG_TYPE_RNG:
760
dev_info(ss->dev, "Unregister %d %s\n", i,
761
ss_algs[i].alg.rng.base.cra_name);
762
crypto_unregister_rng(&ss_algs[i].alg.rng);
763
break;
764
case CRYPTO_ALG_TYPE_AHASH:
765
dev_info(ss->dev, "Unregister %d %s\n", i,
766
ss_algs[i].alg.hash.base.halg.base.cra_name);
767
crypto_engine_unregister_ahash(&ss_algs[i].alg.hash);
768
break;
769
}
770
}
771
}
772
773
static int sun8i_ss_get_clks(struct sun8i_ss_dev *ss)
774
{
775
unsigned long cr;
776
int err, i;
777
778
for (i = 0; i < SS_MAX_CLOCKS; i++) {
779
if (!ss->variant->ss_clks[i].name)
780
continue;
781
ss->ssclks[i] = devm_clk_get(ss->dev, ss->variant->ss_clks[i].name);
782
if (IS_ERR(ss->ssclks[i])) {
783
err = PTR_ERR(ss->ssclks[i]);
784
dev_err(ss->dev, "Cannot get %s SS clock err=%d\n",
785
ss->variant->ss_clks[i].name, err);
786
return err;
787
}
788
cr = clk_get_rate(ss->ssclks[i]);
789
if (!cr)
790
return -EINVAL;
791
if (ss->variant->ss_clks[i].freq > 0 &&
792
cr != ss->variant->ss_clks[i].freq) {
793
dev_info(ss->dev, "Set %s clock to %lu (%lu Mhz) from %lu (%lu Mhz)\n",
794
ss->variant->ss_clks[i].name,
795
ss->variant->ss_clks[i].freq,
796
ss->variant->ss_clks[i].freq / 1000000,
797
cr, cr / 1000000);
798
err = clk_set_rate(ss->ssclks[i], ss->variant->ss_clks[i].freq);
799
if (err)
800
dev_err(ss->dev, "Fail to set %s clk speed to %lu hz\n",
801
ss->variant->ss_clks[i].name,
802
ss->variant->ss_clks[i].freq);
803
}
804
if (ss->variant->ss_clks[i].max_freq > 0 &&
805
cr > ss->variant->ss_clks[i].max_freq)
806
dev_warn(ss->dev, "Frequency for %s (%lu hz) is higher than datasheet's recommendation (%lu hz)",
807
ss->variant->ss_clks[i].name, cr,
808
ss->variant->ss_clks[i].max_freq);
809
}
810
return 0;
811
}
812
813
static int sun8i_ss_probe(struct platform_device *pdev)
814
{
815
struct sun8i_ss_dev *ss;
816
int err, irq;
817
u32 v;
818
819
ss = devm_kzalloc(&pdev->dev, sizeof(*ss), GFP_KERNEL);
820
if (!ss)
821
return -ENOMEM;
822
823
ss->dev = &pdev->dev;
824
platform_set_drvdata(pdev, ss);
825
826
ss->variant = of_device_get_match_data(&pdev->dev);
827
if (!ss->variant) {
828
dev_err(&pdev->dev, "Missing Crypto Engine variant\n");
829
return -EINVAL;
830
}
831
832
ss->base = devm_platform_ioremap_resource(pdev, 0);
833
if (IS_ERR(ss->base))
834
return PTR_ERR(ss->base);
835
836
err = sun8i_ss_get_clks(ss);
837
if (err)
838
return err;
839
840
irq = platform_get_irq(pdev, 0);
841
if (irq < 0)
842
return irq;
843
844
ss->reset = devm_reset_control_get(&pdev->dev, NULL);
845
if (IS_ERR(ss->reset))
846
return dev_err_probe(&pdev->dev, PTR_ERR(ss->reset),
847
"No reset control found\n");
848
849
mutex_init(&ss->mlock);
850
851
err = allocate_flows(ss);
852
if (err)
853
return err;
854
855
err = sun8i_ss_pm_init(ss);
856
if (err)
857
goto error_pm;
858
859
err = devm_request_irq(&pdev->dev, irq, ss_irq_handler, 0, "sun8i-ss", ss);
860
if (err) {
861
dev_err(ss->dev, "Cannot request SecuritySystem IRQ (err=%d)\n", err);
862
goto error_irq;
863
}
864
865
err = sun8i_ss_register_algs(ss);
866
if (err)
867
goto error_alg;
868
869
err = pm_runtime_resume_and_get(ss->dev);
870
if (err < 0)
871
goto error_alg;
872
873
v = readl(ss->base + SS_CTL_REG);
874
v >>= SS_DIE_ID_SHIFT;
875
v &= SS_DIE_ID_MASK;
876
dev_info(&pdev->dev, "Security System Die ID %x\n", v);
877
878
pm_runtime_put_sync(ss->dev);
879
880
if (IS_ENABLED(CONFIG_CRYPTO_DEV_SUN8I_SS_DEBUG)) {
881
struct dentry *dbgfs_dir __maybe_unused;
882
struct dentry *dbgfs_stats __maybe_unused;
883
884
/* Ignore error of debugfs */
885
dbgfs_dir = debugfs_create_dir("sun8i-ss", NULL);
886
dbgfs_stats = debugfs_create_file("stats", 0444,
887
dbgfs_dir, ss,
888
&sun8i_ss_debugfs_fops);
889
890
#ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_DEBUG
891
ss->dbgfs_dir = dbgfs_dir;
892
ss->dbgfs_stats = dbgfs_stats;
893
#endif
894
}
895
896
return 0;
897
error_alg:
898
sun8i_ss_unregister_algs(ss);
899
error_irq:
900
sun8i_ss_pm_exit(ss);
901
error_pm:
902
sun8i_ss_free_flows(ss, MAXFLOW - 1);
903
return err;
904
}
905
906
static void sun8i_ss_remove(struct platform_device *pdev)
907
{
908
struct sun8i_ss_dev *ss = platform_get_drvdata(pdev);
909
910
sun8i_ss_unregister_algs(ss);
911
912
#ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_DEBUG
913
debugfs_remove_recursive(ss->dbgfs_dir);
914
#endif
915
916
sun8i_ss_free_flows(ss, MAXFLOW - 1);
917
918
sun8i_ss_pm_exit(ss);
919
}
920
921
static const struct of_device_id sun8i_ss_crypto_of_match_table[] = {
922
{ .compatible = "allwinner,sun8i-a83t-crypto",
923
.data = &ss_a83t_variant },
924
{ .compatible = "allwinner,sun9i-a80-crypto",
925
.data = &ss_a80_variant },
926
{}
927
};
928
MODULE_DEVICE_TABLE(of, sun8i_ss_crypto_of_match_table);
929
930
static struct platform_driver sun8i_ss_driver = {
931
.probe = sun8i_ss_probe,
932
.remove = sun8i_ss_remove,
933
.driver = {
934
.name = "sun8i-ss",
935
.pm = &sun8i_ss_pm_ops,
936
.of_match_table = sun8i_ss_crypto_of_match_table,
937
},
938
};
939
940
module_platform_driver(sun8i_ss_driver);
941
942
MODULE_DESCRIPTION("Allwinner SecuritySystem cryptographic offloader");
943
MODULE_LICENSE("GPL");
944
MODULE_AUTHOR("Corentin Labbe <[email protected]>");
945
946