Path: blob/master/drivers/crypto/ccp/ccp-crypto-aes-xts.c
26282 views
// SPDX-License-Identifier: GPL-2.0-only1/*2* AMD Cryptographic Coprocessor (CCP) AES XTS crypto API support3*4* Copyright (C) 2013,2017 Advanced Micro Devices, Inc.5*6* Author: Gary R Hook <[email protected]>7* Author: Tom Lendacky <[email protected]>8*/910#include <linux/module.h>11#include <linux/sched.h>12#include <linux/delay.h>13#include <linux/scatterlist.h>14#include <crypto/aes.h>15#include <crypto/xts.h>16#include <crypto/internal/skcipher.h>17#include <crypto/scatterwalk.h>1819#include "ccp-crypto.h"2021struct ccp_aes_xts_def {22const char *name;23const char *drv_name;24};2526static const struct ccp_aes_xts_def aes_xts_algs[] = {27{28.name = "xts(aes)",29.drv_name = "xts-aes-ccp",30},31};3233struct ccp_unit_size_map {34unsigned int size;35u32 value;36};3738static struct ccp_unit_size_map xts_unit_sizes[] = {39{40.size = 16,41.value = CCP_XTS_AES_UNIT_SIZE_16,42},43{44.size = 512,45.value = CCP_XTS_AES_UNIT_SIZE_512,46},47{48.size = 1024,49.value = CCP_XTS_AES_UNIT_SIZE_1024,50},51{52.size = 2048,53.value = CCP_XTS_AES_UNIT_SIZE_2048,54},55{56.size = 4096,57.value = CCP_XTS_AES_UNIT_SIZE_4096,58},59};6061static int ccp_aes_xts_complete(struct crypto_async_request *async_req, int ret)62{63struct skcipher_request *req = skcipher_request_cast(async_req);64struct ccp_aes_req_ctx *rctx = skcipher_request_ctx_dma(req);6566if (ret)67return ret;6869memcpy(req->iv, rctx->iv, AES_BLOCK_SIZE);7071return 0;72}7374static int ccp_aes_xts_setkey(struct crypto_skcipher *tfm, const u8 *key,75unsigned int key_len)76{77struct ccp_ctx *ctx = crypto_skcipher_ctx_dma(tfm);78unsigned int ccpversion = ccp_version();79int ret;8081ret = xts_verify_key(tfm, key, key_len);82if (ret)83return ret;8485/* Version 3 devices support 128-bit keys; version 5 devices can86* accommodate 128- and 256-bit keys.87*/88switch (key_len) {89case AES_KEYSIZE_128 * 2:90memcpy(ctx->u.aes.key, key, key_len);91break;92case AES_KEYSIZE_256 * 2:93if (ccpversion > CCP_VERSION(3, 0))94memcpy(ctx->u.aes.key, key, key_len);95break;96}97ctx->u.aes.key_len = key_len / 2;98sg_init_one(&ctx->u.aes.key_sg, ctx->u.aes.key, key_len);99100return crypto_skcipher_setkey(ctx->u.aes.tfm_skcipher, key, key_len);101}102103static int ccp_aes_xts_crypt(struct skcipher_request *req,104unsigned int encrypt)105{106struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);107struct ccp_ctx *ctx = crypto_skcipher_ctx_dma(tfm);108struct ccp_aes_req_ctx *rctx = skcipher_request_ctx_dma(req);109unsigned int ccpversion = ccp_version();110unsigned int fallback = 0;111unsigned int unit;112u32 unit_size;113int ret;114115if (!ctx->u.aes.key_len)116return -EINVAL;117118if (!req->iv)119return -EINVAL;120121/* Check conditions under which the CCP can fulfill a request. The122* device can handle input plaintext of a length that is a multiple123* of the unit_size, bug the crypto implementation only supports124* the unit_size being equal to the input length. This limits the125* number of scenarios we can handle.126*/127unit_size = CCP_XTS_AES_UNIT_SIZE__LAST;128for (unit = 0; unit < ARRAY_SIZE(xts_unit_sizes); unit++) {129if (req->cryptlen == xts_unit_sizes[unit].size) {130unit_size = unit;131break;132}133}134/* The CCP has restrictions on block sizes. Also, a version 3 device135* only supports AES-128 operations; version 5 CCPs support both136* AES-128 and -256 operations.137*/138if (unit_size == CCP_XTS_AES_UNIT_SIZE__LAST)139fallback = 1;140if ((ccpversion < CCP_VERSION(5, 0)) &&141(ctx->u.aes.key_len != AES_KEYSIZE_128))142fallback = 1;143if ((ctx->u.aes.key_len != AES_KEYSIZE_128) &&144(ctx->u.aes.key_len != AES_KEYSIZE_256))145fallback = 1;146if (fallback) {147/* Use the fallback to process the request for any148* unsupported unit sizes or key sizes149*/150skcipher_request_set_tfm(&rctx->fallback_req,151ctx->u.aes.tfm_skcipher);152skcipher_request_set_callback(&rctx->fallback_req,153req->base.flags,154req->base.complete,155req->base.data);156skcipher_request_set_crypt(&rctx->fallback_req, req->src,157req->dst, req->cryptlen, req->iv);158ret = encrypt ? crypto_skcipher_encrypt(&rctx->fallback_req) :159crypto_skcipher_decrypt(&rctx->fallback_req);160return ret;161}162163memcpy(rctx->iv, req->iv, AES_BLOCK_SIZE);164sg_init_one(&rctx->iv_sg, rctx->iv, AES_BLOCK_SIZE);165166memset(&rctx->cmd, 0, sizeof(rctx->cmd));167INIT_LIST_HEAD(&rctx->cmd.entry);168rctx->cmd.engine = CCP_ENGINE_XTS_AES_128;169rctx->cmd.u.xts.type = CCP_AES_TYPE_128;170rctx->cmd.u.xts.action = (encrypt) ? CCP_AES_ACTION_ENCRYPT171: CCP_AES_ACTION_DECRYPT;172rctx->cmd.u.xts.unit_size = unit_size;173rctx->cmd.u.xts.key = &ctx->u.aes.key_sg;174rctx->cmd.u.xts.key_len = ctx->u.aes.key_len;175rctx->cmd.u.xts.iv = &rctx->iv_sg;176rctx->cmd.u.xts.iv_len = AES_BLOCK_SIZE;177rctx->cmd.u.xts.src = req->src;178rctx->cmd.u.xts.src_len = req->cryptlen;179rctx->cmd.u.xts.dst = req->dst;180181ret = ccp_crypto_enqueue_request(&req->base, &rctx->cmd);182183return ret;184}185186static int ccp_aes_xts_encrypt(struct skcipher_request *req)187{188return ccp_aes_xts_crypt(req, 1);189}190191static int ccp_aes_xts_decrypt(struct skcipher_request *req)192{193return ccp_aes_xts_crypt(req, 0);194}195196static int ccp_aes_xts_init_tfm(struct crypto_skcipher *tfm)197{198struct ccp_ctx *ctx = crypto_skcipher_ctx_dma(tfm);199struct crypto_skcipher *fallback_tfm;200201ctx->complete = ccp_aes_xts_complete;202ctx->u.aes.key_len = 0;203204fallback_tfm = crypto_alloc_skcipher("xts(aes)", 0,205CRYPTO_ALG_NEED_FALLBACK);206if (IS_ERR(fallback_tfm)) {207pr_warn("could not load fallback driver xts(aes)\n");208return PTR_ERR(fallback_tfm);209}210ctx->u.aes.tfm_skcipher = fallback_tfm;211212crypto_skcipher_set_reqsize_dma(tfm,213sizeof(struct ccp_aes_req_ctx) +214crypto_skcipher_reqsize(fallback_tfm));215216return 0;217}218219static void ccp_aes_xts_exit_tfm(struct crypto_skcipher *tfm)220{221struct ccp_ctx *ctx = crypto_skcipher_ctx_dma(tfm);222223crypto_free_skcipher(ctx->u.aes.tfm_skcipher);224}225226static int ccp_register_aes_xts_alg(struct list_head *head,227const struct ccp_aes_xts_def *def)228{229struct ccp_crypto_skcipher_alg *ccp_alg;230struct skcipher_alg *alg;231int ret;232233ccp_alg = kzalloc(sizeof(*ccp_alg), GFP_KERNEL);234if (!ccp_alg)235return -ENOMEM;236237INIT_LIST_HEAD(&ccp_alg->entry);238239alg = &ccp_alg->alg;240241snprintf(alg->base.cra_name, CRYPTO_MAX_ALG_NAME, "%s", def->name);242snprintf(alg->base.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s",243def->drv_name);244alg->base.cra_flags = CRYPTO_ALG_ASYNC |245CRYPTO_ALG_ALLOCATES_MEMORY |246CRYPTO_ALG_KERN_DRIVER_ONLY |247CRYPTO_ALG_NEED_FALLBACK;248alg->base.cra_blocksize = AES_BLOCK_SIZE;249alg->base.cra_ctxsize = sizeof(struct ccp_ctx) +250crypto_dma_padding();251alg->base.cra_priority = CCP_CRA_PRIORITY;252alg->base.cra_module = THIS_MODULE;253254alg->setkey = ccp_aes_xts_setkey;255alg->encrypt = ccp_aes_xts_encrypt;256alg->decrypt = ccp_aes_xts_decrypt;257alg->min_keysize = AES_MIN_KEY_SIZE * 2;258alg->max_keysize = AES_MAX_KEY_SIZE * 2;259alg->ivsize = AES_BLOCK_SIZE;260alg->init = ccp_aes_xts_init_tfm;261alg->exit = ccp_aes_xts_exit_tfm;262263ret = crypto_register_skcipher(alg);264if (ret) {265pr_err("%s skcipher algorithm registration error (%d)\n",266alg->base.cra_name, ret);267kfree(ccp_alg);268return ret;269}270271list_add(&ccp_alg->entry, head);272273return 0;274}275276int ccp_register_aes_xts_algs(struct list_head *head)277{278int i, ret;279280for (i = 0; i < ARRAY_SIZE(aes_xts_algs); i++) {281ret = ccp_register_aes_xts_alg(head, &aes_xts_algs[i]);282if (ret)283return ret;284}285286return 0;287}288289290