Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
torvalds
GitHub Repository: torvalds/linux
Path: blob/master/lib/crypto/sparc/aes.h
121833 views
1
/* SPDX-License-Identifier: GPL-2.0-only */
2
/*
3
* AES accelerated using the sparc64 aes opcodes
4
*
5
* Copyright (C) 2008, Intel Corp.
6
* Copyright (c) 2010, Intel Corporation.
7
* Copyright 2026 Google LLC
8
*/
9
10
#include <asm/fpumacro.h>
11
#include <asm/opcodes.h>
12
#include <asm/pstate.h>
13
#include <asm/elf.h>
14
15
static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_aes_opcodes);
16
17
EXPORT_SYMBOL_GPL(aes_sparc64_key_expand);
18
EXPORT_SYMBOL_GPL(aes_sparc64_load_encrypt_keys_128);
19
EXPORT_SYMBOL_GPL(aes_sparc64_load_encrypt_keys_192);
20
EXPORT_SYMBOL_GPL(aes_sparc64_load_encrypt_keys_256);
21
EXPORT_SYMBOL_GPL(aes_sparc64_load_decrypt_keys_128);
22
EXPORT_SYMBOL_GPL(aes_sparc64_load_decrypt_keys_192);
23
EXPORT_SYMBOL_GPL(aes_sparc64_load_decrypt_keys_256);
24
EXPORT_SYMBOL_GPL(aes_sparc64_ecb_encrypt_128);
25
EXPORT_SYMBOL_GPL(aes_sparc64_ecb_encrypt_192);
26
EXPORT_SYMBOL_GPL(aes_sparc64_ecb_encrypt_256);
27
EXPORT_SYMBOL_GPL(aes_sparc64_ecb_decrypt_128);
28
EXPORT_SYMBOL_GPL(aes_sparc64_ecb_decrypt_192);
29
EXPORT_SYMBOL_GPL(aes_sparc64_ecb_decrypt_256);
30
EXPORT_SYMBOL_GPL(aes_sparc64_cbc_encrypt_128);
31
EXPORT_SYMBOL_GPL(aes_sparc64_cbc_encrypt_192);
32
EXPORT_SYMBOL_GPL(aes_sparc64_cbc_encrypt_256);
33
EXPORT_SYMBOL_GPL(aes_sparc64_cbc_decrypt_128);
34
EXPORT_SYMBOL_GPL(aes_sparc64_cbc_decrypt_192);
35
EXPORT_SYMBOL_GPL(aes_sparc64_cbc_decrypt_256);
36
EXPORT_SYMBOL_GPL(aes_sparc64_ctr_crypt_128);
37
EXPORT_SYMBOL_GPL(aes_sparc64_ctr_crypt_192);
38
EXPORT_SYMBOL_GPL(aes_sparc64_ctr_crypt_256);
39
40
void aes_sparc64_encrypt_128(const u64 *key, const u32 *input, u32 *output);
41
void aes_sparc64_encrypt_192(const u64 *key, const u32 *input, u32 *output);
42
void aes_sparc64_encrypt_256(const u64 *key, const u32 *input, u32 *output);
43
void aes_sparc64_decrypt_128(const u64 *key, const u32 *input, u32 *output);
44
void aes_sparc64_decrypt_192(const u64 *key, const u32 *input, u32 *output);
45
void aes_sparc64_decrypt_256(const u64 *key, const u32 *input, u32 *output);
46
47
static void aes_preparekey_arch(union aes_enckey_arch *k,
48
union aes_invkey_arch *inv_k,
49
const u8 *in_key, int key_len, int nrounds)
50
{
51
if (static_branch_likely(&have_aes_opcodes)) {
52
u32 aligned_key[AES_MAX_KEY_SIZE / 4];
53
54
if (IS_ALIGNED((uintptr_t)in_key, 4)) {
55
aes_sparc64_key_expand((const u32 *)in_key,
56
k->sparc_rndkeys, key_len);
57
} else {
58
memcpy(aligned_key, in_key, key_len);
59
aes_sparc64_key_expand(aligned_key,
60
k->sparc_rndkeys, key_len);
61
memzero_explicit(aligned_key, key_len);
62
}
63
/*
64
* Note that nothing needs to be written to inv_k (if it's
65
* non-NULL) here, since the SPARC64 assembly code uses
66
* k->sparc_rndkeys for both encryption and decryption.
67
*/
68
} else {
69
aes_expandkey_generic(k->rndkeys,
70
inv_k ? inv_k->inv_rndkeys : NULL,
71
in_key, key_len);
72
}
73
}
74
75
static void aes_sparc64_encrypt(const struct aes_enckey *key,
76
const u32 *input, u32 *output)
77
{
78
if (key->len == AES_KEYSIZE_128)
79
aes_sparc64_encrypt_128(key->k.sparc_rndkeys, input, output);
80
else if (key->len == AES_KEYSIZE_192)
81
aes_sparc64_encrypt_192(key->k.sparc_rndkeys, input, output);
82
else
83
aes_sparc64_encrypt_256(key->k.sparc_rndkeys, input, output);
84
}
85
86
static void aes_encrypt_arch(const struct aes_enckey *key,
87
u8 out[AES_BLOCK_SIZE],
88
const u8 in[AES_BLOCK_SIZE])
89
{
90
u32 bounce_buf[AES_BLOCK_SIZE / 4];
91
92
if (static_branch_likely(&have_aes_opcodes)) {
93
if (IS_ALIGNED((uintptr_t)in | (uintptr_t)out, 4)) {
94
aes_sparc64_encrypt(key, (const u32 *)in, (u32 *)out);
95
} else {
96
memcpy(bounce_buf, in, AES_BLOCK_SIZE);
97
aes_sparc64_encrypt(key, bounce_buf, bounce_buf);
98
memcpy(out, bounce_buf, AES_BLOCK_SIZE);
99
}
100
} else {
101
aes_encrypt_generic(key->k.rndkeys, key->nrounds, out, in);
102
}
103
}
104
105
static void aes_sparc64_decrypt(const struct aes_key *key,
106
const u32 *input, u32 *output)
107
{
108
if (key->len == AES_KEYSIZE_128)
109
aes_sparc64_decrypt_128(key->k.sparc_rndkeys, input, output);
110
else if (key->len == AES_KEYSIZE_192)
111
aes_sparc64_decrypt_192(key->k.sparc_rndkeys, input, output);
112
else
113
aes_sparc64_decrypt_256(key->k.sparc_rndkeys, input, output);
114
}
115
116
static void aes_decrypt_arch(const struct aes_key *key,
117
u8 out[AES_BLOCK_SIZE],
118
const u8 in[AES_BLOCK_SIZE])
119
{
120
u32 bounce_buf[AES_BLOCK_SIZE / 4];
121
122
if (static_branch_likely(&have_aes_opcodes)) {
123
if (IS_ALIGNED((uintptr_t)in | (uintptr_t)out, 4)) {
124
aes_sparc64_decrypt(key, (const u32 *)in, (u32 *)out);
125
} else {
126
memcpy(bounce_buf, in, AES_BLOCK_SIZE);
127
aes_sparc64_decrypt(key, bounce_buf, bounce_buf);
128
memcpy(out, bounce_buf, AES_BLOCK_SIZE);
129
}
130
} else {
131
aes_decrypt_generic(key->inv_k.inv_rndkeys, key->nrounds,
132
out, in);
133
}
134
}
135
136
#define aes_mod_init_arch aes_mod_init_arch
137
static void aes_mod_init_arch(void)
138
{
139
unsigned long cfr;
140
141
if (!(sparc64_elf_hwcap & HWCAP_SPARC_CRYPTO))
142
return;
143
144
__asm__ __volatile__("rd %%asr26, %0" : "=r" (cfr));
145
if (!(cfr & CFR_AES))
146
return;
147
148
static_branch_enable(&have_aes_opcodes);
149
}
150
151