Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
torvalds
GitHub Repository: torvalds/linux
Path: blob/master/arch/sparc/crypto/camellia_glue.c
26424 views
1
// SPDX-License-Identifier: GPL-2.0-only
2
/* Glue code for CAMELLIA encryption optimized for sparc64 crypto opcodes.
3
*
4
* Copyright (C) 2012 David S. Miller <[email protected]>
5
*/
6
7
#define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
8
9
#include <linux/crypto.h>
10
#include <linux/init.h>
11
#include <linux/module.h>
12
#include <linux/mm.h>
13
#include <linux/types.h>
14
#include <crypto/algapi.h>
15
#include <crypto/internal/skcipher.h>
16
17
#include <asm/fpumacro.h>
18
#include <asm/opcodes.h>
19
#include <asm/pstate.h>
20
#include <asm/elf.h>
21
22
#define CAMELLIA_MIN_KEY_SIZE 16
23
#define CAMELLIA_MAX_KEY_SIZE 32
24
#define CAMELLIA_BLOCK_SIZE 16
25
#define CAMELLIA_TABLE_BYTE_LEN 272
26
27
struct camellia_sparc64_ctx {
28
u64 encrypt_key[CAMELLIA_TABLE_BYTE_LEN / sizeof(u64)];
29
u64 decrypt_key[CAMELLIA_TABLE_BYTE_LEN / sizeof(u64)];
30
int key_len;
31
};
32
33
extern void camellia_sparc64_key_expand(const u32 *in_key, u64 *encrypt_key,
34
unsigned int key_len, u64 *decrypt_key);
35
36
static int camellia_set_key(struct crypto_tfm *tfm, const u8 *_in_key,
37
unsigned int key_len)
38
{
39
struct camellia_sparc64_ctx *ctx = crypto_tfm_ctx(tfm);
40
const u32 *in_key = (const u32 *) _in_key;
41
42
if (key_len != 16 && key_len != 24 && key_len != 32)
43
return -EINVAL;
44
45
ctx->key_len = key_len;
46
47
camellia_sparc64_key_expand(in_key, &ctx->encrypt_key[0],
48
key_len, &ctx->decrypt_key[0]);
49
return 0;
50
}
51
52
static int camellia_set_key_skcipher(struct crypto_skcipher *tfm,
53
const u8 *in_key, unsigned int key_len)
54
{
55
return camellia_set_key(crypto_skcipher_tfm(tfm), in_key, key_len);
56
}
57
58
extern void camellia_sparc64_crypt(const u64 *key, const u32 *input,
59
u32 *output, unsigned int key_len);
60
61
static void camellia_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
62
{
63
struct camellia_sparc64_ctx *ctx = crypto_tfm_ctx(tfm);
64
65
camellia_sparc64_crypt(&ctx->encrypt_key[0],
66
(const u32 *) src,
67
(u32 *) dst, ctx->key_len);
68
}
69
70
static void camellia_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
71
{
72
struct camellia_sparc64_ctx *ctx = crypto_tfm_ctx(tfm);
73
74
camellia_sparc64_crypt(&ctx->decrypt_key[0],
75
(const u32 *) src,
76
(u32 *) dst, ctx->key_len);
77
}
78
79
extern void camellia_sparc64_load_keys(const u64 *key, unsigned int key_len);
80
81
typedef void ecb_crypt_op(const u64 *input, u64 *output, unsigned int len,
82
const u64 *key);
83
84
extern ecb_crypt_op camellia_sparc64_ecb_crypt_3_grand_rounds;
85
extern ecb_crypt_op camellia_sparc64_ecb_crypt_4_grand_rounds;
86
87
static int __ecb_crypt(struct skcipher_request *req, bool encrypt)
88
{
89
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
90
const struct camellia_sparc64_ctx *ctx = crypto_skcipher_ctx(tfm);
91
struct skcipher_walk walk;
92
ecb_crypt_op *op;
93
const u64 *key;
94
unsigned int nbytes;
95
int err;
96
97
op = camellia_sparc64_ecb_crypt_3_grand_rounds;
98
if (ctx->key_len != 16)
99
op = camellia_sparc64_ecb_crypt_4_grand_rounds;
100
101
err = skcipher_walk_virt(&walk, req, true);
102
if (err)
103
return err;
104
105
if (encrypt)
106
key = &ctx->encrypt_key[0];
107
else
108
key = &ctx->decrypt_key[0];
109
camellia_sparc64_load_keys(key, ctx->key_len);
110
while ((nbytes = walk.nbytes) != 0) {
111
op(walk.src.virt.addr, walk.dst.virt.addr,
112
round_down(nbytes, CAMELLIA_BLOCK_SIZE), key);
113
err = skcipher_walk_done(&walk, nbytes % CAMELLIA_BLOCK_SIZE);
114
}
115
fprs_write(0);
116
return err;
117
}
118
119
static int ecb_encrypt(struct skcipher_request *req)
120
{
121
return __ecb_crypt(req, true);
122
}
123
124
static int ecb_decrypt(struct skcipher_request *req)
125
{
126
return __ecb_crypt(req, false);
127
}
128
129
typedef void cbc_crypt_op(const u64 *input, u64 *output, unsigned int len,
130
const u64 *key, u64 *iv);
131
132
extern cbc_crypt_op camellia_sparc64_cbc_encrypt_3_grand_rounds;
133
extern cbc_crypt_op camellia_sparc64_cbc_encrypt_4_grand_rounds;
134
extern cbc_crypt_op camellia_sparc64_cbc_decrypt_3_grand_rounds;
135
extern cbc_crypt_op camellia_sparc64_cbc_decrypt_4_grand_rounds;
136
137
static int cbc_encrypt(struct skcipher_request *req)
138
{
139
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
140
const struct camellia_sparc64_ctx *ctx = crypto_skcipher_ctx(tfm);
141
struct skcipher_walk walk;
142
cbc_crypt_op *op;
143
const u64 *key;
144
unsigned int nbytes;
145
int err;
146
147
op = camellia_sparc64_cbc_encrypt_3_grand_rounds;
148
if (ctx->key_len != 16)
149
op = camellia_sparc64_cbc_encrypt_4_grand_rounds;
150
151
err = skcipher_walk_virt(&walk, req, true);
152
if (err)
153
return err;
154
155
key = &ctx->encrypt_key[0];
156
camellia_sparc64_load_keys(key, ctx->key_len);
157
while ((nbytes = walk.nbytes) != 0) {
158
op(walk.src.virt.addr, walk.dst.virt.addr,
159
round_down(nbytes, CAMELLIA_BLOCK_SIZE), key, walk.iv);
160
err = skcipher_walk_done(&walk, nbytes % CAMELLIA_BLOCK_SIZE);
161
}
162
fprs_write(0);
163
return err;
164
}
165
166
static int cbc_decrypt(struct skcipher_request *req)
167
{
168
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
169
const struct camellia_sparc64_ctx *ctx = crypto_skcipher_ctx(tfm);
170
struct skcipher_walk walk;
171
cbc_crypt_op *op;
172
const u64 *key;
173
unsigned int nbytes;
174
int err;
175
176
op = camellia_sparc64_cbc_decrypt_3_grand_rounds;
177
if (ctx->key_len != 16)
178
op = camellia_sparc64_cbc_decrypt_4_grand_rounds;
179
180
err = skcipher_walk_virt(&walk, req, true);
181
if (err)
182
return err;
183
184
key = &ctx->decrypt_key[0];
185
camellia_sparc64_load_keys(key, ctx->key_len);
186
while ((nbytes = walk.nbytes) != 0) {
187
op(walk.src.virt.addr, walk.dst.virt.addr,
188
round_down(nbytes, CAMELLIA_BLOCK_SIZE), key, walk.iv);
189
err = skcipher_walk_done(&walk, nbytes % CAMELLIA_BLOCK_SIZE);
190
}
191
fprs_write(0);
192
return err;
193
}
194
195
static struct crypto_alg cipher_alg = {
196
.cra_name = "camellia",
197
.cra_driver_name = "camellia-sparc64",
198
.cra_priority = SPARC_CR_OPCODE_PRIORITY,
199
.cra_flags = CRYPTO_ALG_TYPE_CIPHER,
200
.cra_blocksize = CAMELLIA_BLOCK_SIZE,
201
.cra_ctxsize = sizeof(struct camellia_sparc64_ctx),
202
.cra_alignmask = 3,
203
.cra_module = THIS_MODULE,
204
.cra_u = {
205
.cipher = {
206
.cia_min_keysize = CAMELLIA_MIN_KEY_SIZE,
207
.cia_max_keysize = CAMELLIA_MAX_KEY_SIZE,
208
.cia_setkey = camellia_set_key,
209
.cia_encrypt = camellia_encrypt,
210
.cia_decrypt = camellia_decrypt
211
}
212
}
213
};
214
215
static struct skcipher_alg skcipher_algs[] = {
216
{
217
.base.cra_name = "ecb(camellia)",
218
.base.cra_driver_name = "ecb-camellia-sparc64",
219
.base.cra_priority = SPARC_CR_OPCODE_PRIORITY,
220
.base.cra_blocksize = CAMELLIA_BLOCK_SIZE,
221
.base.cra_ctxsize = sizeof(struct camellia_sparc64_ctx),
222
.base.cra_alignmask = 7,
223
.base.cra_module = THIS_MODULE,
224
.min_keysize = CAMELLIA_MIN_KEY_SIZE,
225
.max_keysize = CAMELLIA_MAX_KEY_SIZE,
226
.setkey = camellia_set_key_skcipher,
227
.encrypt = ecb_encrypt,
228
.decrypt = ecb_decrypt,
229
}, {
230
.base.cra_name = "cbc(camellia)",
231
.base.cra_driver_name = "cbc-camellia-sparc64",
232
.base.cra_priority = SPARC_CR_OPCODE_PRIORITY,
233
.base.cra_blocksize = CAMELLIA_BLOCK_SIZE,
234
.base.cra_ctxsize = sizeof(struct camellia_sparc64_ctx),
235
.base.cra_alignmask = 7,
236
.base.cra_module = THIS_MODULE,
237
.min_keysize = CAMELLIA_MIN_KEY_SIZE,
238
.max_keysize = CAMELLIA_MAX_KEY_SIZE,
239
.ivsize = CAMELLIA_BLOCK_SIZE,
240
.setkey = camellia_set_key_skcipher,
241
.encrypt = cbc_encrypt,
242
.decrypt = cbc_decrypt,
243
}
244
};
245
246
static bool __init sparc64_has_camellia_opcode(void)
247
{
248
unsigned long cfr;
249
250
if (!(sparc64_elf_hwcap & HWCAP_SPARC_CRYPTO))
251
return false;
252
253
__asm__ __volatile__("rd %%asr26, %0" : "=r" (cfr));
254
if (!(cfr & CFR_CAMELLIA))
255
return false;
256
257
return true;
258
}
259
260
static int __init camellia_sparc64_mod_init(void)
261
{
262
int err;
263
264
if (!sparc64_has_camellia_opcode()) {
265
pr_info("sparc64 camellia opcodes not available.\n");
266
return -ENODEV;
267
}
268
pr_info("Using sparc64 camellia opcodes optimized CAMELLIA implementation\n");
269
err = crypto_register_alg(&cipher_alg);
270
if (err)
271
return err;
272
err = crypto_register_skciphers(skcipher_algs,
273
ARRAY_SIZE(skcipher_algs));
274
if (err)
275
crypto_unregister_alg(&cipher_alg);
276
return err;
277
}
278
279
static void __exit camellia_sparc64_mod_fini(void)
280
{
281
crypto_unregister_alg(&cipher_alg);
282
crypto_unregister_skciphers(skcipher_algs, ARRAY_SIZE(skcipher_algs));
283
}
284
285
module_init(camellia_sparc64_mod_init);
286
module_exit(camellia_sparc64_mod_fini);
287
288
MODULE_LICENSE("GPL");
289
MODULE_DESCRIPTION("Camellia Cipher Algorithm, sparc64 camellia opcode accelerated");
290
291
MODULE_ALIAS_CRYPTO("camellia");
292
293
#include "crop_devid.c"
294
295