Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
awilliam
GitHub Repository: awilliam/linux-vfio
Path: blob/master/arch/x86/crypto/ghash-clmulni-intel_glue.c
10817 views
1
/*
2
* Accelerated GHASH implementation with Intel PCLMULQDQ-NI
3
* instructions. This file contains glue code.
4
*
5
* Copyright (c) 2009 Intel Corp.
6
* Author: Huang Ying <[email protected]>
7
*
8
* This program is free software; you can redistribute it and/or modify it
9
* under the terms of the GNU General Public License version 2 as published
10
* by the Free Software Foundation.
11
*/
12
13
#include <linux/err.h>
14
#include <linux/module.h>
15
#include <linux/init.h>
16
#include <linux/kernel.h>
17
#include <linux/crypto.h>
18
#include <crypto/algapi.h>
19
#include <crypto/cryptd.h>
20
#include <crypto/gf128mul.h>
21
#include <crypto/internal/hash.h>
22
#include <asm/i387.h>
23
24
#define GHASH_BLOCK_SIZE 16
25
#define GHASH_DIGEST_SIZE 16
26
27
void clmul_ghash_mul(char *dst, const be128 *shash);
28
29
void clmul_ghash_update(char *dst, const char *src, unsigned int srclen,
30
const be128 *shash);
31
32
void clmul_ghash_setkey(be128 *shash, const u8 *key);
33
34
struct ghash_async_ctx {
35
struct cryptd_ahash *cryptd_tfm;
36
};
37
38
struct ghash_ctx {
39
be128 shash;
40
};
41
42
struct ghash_desc_ctx {
43
u8 buffer[GHASH_BLOCK_SIZE];
44
u32 bytes;
45
};
46
47
static int ghash_init(struct shash_desc *desc)
48
{
49
struct ghash_desc_ctx *dctx = shash_desc_ctx(desc);
50
51
memset(dctx, 0, sizeof(*dctx));
52
53
return 0;
54
}
55
56
static int ghash_setkey(struct crypto_shash *tfm,
57
const u8 *key, unsigned int keylen)
58
{
59
struct ghash_ctx *ctx = crypto_shash_ctx(tfm);
60
61
if (keylen != GHASH_BLOCK_SIZE) {
62
crypto_shash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
63
return -EINVAL;
64
}
65
66
clmul_ghash_setkey(&ctx->shash, key);
67
68
return 0;
69
}
70
71
static int ghash_update(struct shash_desc *desc,
72
const u8 *src, unsigned int srclen)
73
{
74
struct ghash_desc_ctx *dctx = shash_desc_ctx(desc);
75
struct ghash_ctx *ctx = crypto_shash_ctx(desc->tfm);
76
u8 *dst = dctx->buffer;
77
78
kernel_fpu_begin();
79
if (dctx->bytes) {
80
int n = min(srclen, dctx->bytes);
81
u8 *pos = dst + (GHASH_BLOCK_SIZE - dctx->bytes);
82
83
dctx->bytes -= n;
84
srclen -= n;
85
86
while (n--)
87
*pos++ ^= *src++;
88
89
if (!dctx->bytes)
90
clmul_ghash_mul(dst, &ctx->shash);
91
}
92
93
clmul_ghash_update(dst, src, srclen, &ctx->shash);
94
kernel_fpu_end();
95
96
if (srclen & 0xf) {
97
src += srclen - (srclen & 0xf);
98
srclen &= 0xf;
99
dctx->bytes = GHASH_BLOCK_SIZE - srclen;
100
while (srclen--)
101
*dst++ ^= *src++;
102
}
103
104
return 0;
105
}
106
107
static void ghash_flush(struct ghash_ctx *ctx, struct ghash_desc_ctx *dctx)
108
{
109
u8 *dst = dctx->buffer;
110
111
if (dctx->bytes) {
112
u8 *tmp = dst + (GHASH_BLOCK_SIZE - dctx->bytes);
113
114
while (dctx->bytes--)
115
*tmp++ ^= 0;
116
117
kernel_fpu_begin();
118
clmul_ghash_mul(dst, &ctx->shash);
119
kernel_fpu_end();
120
}
121
122
dctx->bytes = 0;
123
}
124
125
static int ghash_final(struct shash_desc *desc, u8 *dst)
126
{
127
struct ghash_desc_ctx *dctx = shash_desc_ctx(desc);
128
struct ghash_ctx *ctx = crypto_shash_ctx(desc->tfm);
129
u8 *buf = dctx->buffer;
130
131
ghash_flush(ctx, dctx);
132
memcpy(dst, buf, GHASH_BLOCK_SIZE);
133
134
return 0;
135
}
136
137
static struct shash_alg ghash_alg = {
138
.digestsize = GHASH_DIGEST_SIZE,
139
.init = ghash_init,
140
.update = ghash_update,
141
.final = ghash_final,
142
.setkey = ghash_setkey,
143
.descsize = sizeof(struct ghash_desc_ctx),
144
.base = {
145
.cra_name = "__ghash",
146
.cra_driver_name = "__ghash-pclmulqdqni",
147
.cra_priority = 0,
148
.cra_flags = CRYPTO_ALG_TYPE_SHASH,
149
.cra_blocksize = GHASH_BLOCK_SIZE,
150
.cra_ctxsize = sizeof(struct ghash_ctx),
151
.cra_module = THIS_MODULE,
152
.cra_list = LIST_HEAD_INIT(ghash_alg.base.cra_list),
153
},
154
};
155
156
static int ghash_async_init(struct ahash_request *req)
157
{
158
struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
159
struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
160
struct ahash_request *cryptd_req = ahash_request_ctx(req);
161
struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
162
163
if (!irq_fpu_usable()) {
164
memcpy(cryptd_req, req, sizeof(*req));
165
ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base);
166
return crypto_ahash_init(cryptd_req);
167
} else {
168
struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
169
struct crypto_shash *child = cryptd_ahash_child(cryptd_tfm);
170
171
desc->tfm = child;
172
desc->flags = req->base.flags;
173
return crypto_shash_init(desc);
174
}
175
}
176
177
static int ghash_async_update(struct ahash_request *req)
178
{
179
struct ahash_request *cryptd_req = ahash_request_ctx(req);
180
181
if (!irq_fpu_usable()) {
182
struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
183
struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
184
struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
185
186
memcpy(cryptd_req, req, sizeof(*req));
187
ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base);
188
return crypto_ahash_update(cryptd_req);
189
} else {
190
struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
191
return shash_ahash_update(req, desc);
192
}
193
}
194
195
static int ghash_async_final(struct ahash_request *req)
196
{
197
struct ahash_request *cryptd_req = ahash_request_ctx(req);
198
199
if (!irq_fpu_usable()) {
200
struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
201
struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
202
struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
203
204
memcpy(cryptd_req, req, sizeof(*req));
205
ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base);
206
return crypto_ahash_final(cryptd_req);
207
} else {
208
struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
209
return crypto_shash_final(desc, req->result);
210
}
211
}
212
213
static int ghash_async_digest(struct ahash_request *req)
214
{
215
struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
216
struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
217
struct ahash_request *cryptd_req = ahash_request_ctx(req);
218
struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
219
220
if (!irq_fpu_usable()) {
221
memcpy(cryptd_req, req, sizeof(*req));
222
ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base);
223
return crypto_ahash_digest(cryptd_req);
224
} else {
225
struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
226
struct crypto_shash *child = cryptd_ahash_child(cryptd_tfm);
227
228
desc->tfm = child;
229
desc->flags = req->base.flags;
230
return shash_ahash_digest(req, desc);
231
}
232
}
233
234
static int ghash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
235
unsigned int keylen)
236
{
237
struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
238
struct crypto_ahash *child = &ctx->cryptd_tfm->base;
239
int err;
240
241
crypto_ahash_clear_flags(child, CRYPTO_TFM_REQ_MASK);
242
crypto_ahash_set_flags(child, crypto_ahash_get_flags(tfm)
243
& CRYPTO_TFM_REQ_MASK);
244
err = crypto_ahash_setkey(child, key, keylen);
245
crypto_ahash_set_flags(tfm, crypto_ahash_get_flags(child)
246
& CRYPTO_TFM_RES_MASK);
247
248
return 0;
249
}
250
251
static int ghash_async_init_tfm(struct crypto_tfm *tfm)
252
{
253
struct cryptd_ahash *cryptd_tfm;
254
struct ghash_async_ctx *ctx = crypto_tfm_ctx(tfm);
255
256
cryptd_tfm = cryptd_alloc_ahash("__ghash-pclmulqdqni", 0, 0);
257
if (IS_ERR(cryptd_tfm))
258
return PTR_ERR(cryptd_tfm);
259
ctx->cryptd_tfm = cryptd_tfm;
260
crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
261
sizeof(struct ahash_request) +
262
crypto_ahash_reqsize(&cryptd_tfm->base));
263
264
return 0;
265
}
266
267
static void ghash_async_exit_tfm(struct crypto_tfm *tfm)
268
{
269
struct ghash_async_ctx *ctx = crypto_tfm_ctx(tfm);
270
271
cryptd_free_ahash(ctx->cryptd_tfm);
272
}
273
274
static struct ahash_alg ghash_async_alg = {
275
.init = ghash_async_init,
276
.update = ghash_async_update,
277
.final = ghash_async_final,
278
.setkey = ghash_async_setkey,
279
.digest = ghash_async_digest,
280
.halg = {
281
.digestsize = GHASH_DIGEST_SIZE,
282
.base = {
283
.cra_name = "ghash",
284
.cra_driver_name = "ghash-clmulni",
285
.cra_priority = 400,
286
.cra_flags = CRYPTO_ALG_TYPE_AHASH | CRYPTO_ALG_ASYNC,
287
.cra_blocksize = GHASH_BLOCK_SIZE,
288
.cra_type = &crypto_ahash_type,
289
.cra_module = THIS_MODULE,
290
.cra_list = LIST_HEAD_INIT(ghash_async_alg.halg.base.cra_list),
291
.cra_init = ghash_async_init_tfm,
292
.cra_exit = ghash_async_exit_tfm,
293
},
294
},
295
};
296
297
static int __init ghash_pclmulqdqni_mod_init(void)
298
{
299
int err;
300
301
if (!cpu_has_pclmulqdq) {
302
printk(KERN_INFO "Intel PCLMULQDQ-NI instructions are not"
303
" detected.\n");
304
return -ENODEV;
305
}
306
307
err = crypto_register_shash(&ghash_alg);
308
if (err)
309
goto err_out;
310
err = crypto_register_ahash(&ghash_async_alg);
311
if (err)
312
goto err_shash;
313
314
return 0;
315
316
err_shash:
317
crypto_unregister_shash(&ghash_alg);
318
err_out:
319
return err;
320
}
321
322
static void __exit ghash_pclmulqdqni_mod_exit(void)
323
{
324
crypto_unregister_ahash(&ghash_async_alg);
325
crypto_unregister_shash(&ghash_alg);
326
}
327
328
module_init(ghash_pclmulqdqni_mod_init);
329
module_exit(ghash_pclmulqdqni_mod_exit);
330
331
MODULE_LICENSE("GPL");
332
MODULE_DESCRIPTION("GHASH Message Digest Algorithm, "
333
"acclerated by PCLMULQDQ-NI");
334
MODULE_ALIAS("ghash");
335
336