Path: blob/master/drivers/crypto/ccp/ccp-crypto-aes-cmac.c
26282 views
// SPDX-License-Identifier: GPL-2.0-only1/*2* AMD Cryptographic Coprocessor (CCP) AES CMAC crypto API support3*4* Copyright (C) 2013,2018 Advanced Micro Devices, Inc.5*6* Author: Tom Lendacky <[email protected]>7*/89#include <linux/module.h>10#include <linux/sched.h>11#include <linux/delay.h>12#include <linux/scatterlist.h>13#include <linux/crypto.h>14#include <crypto/algapi.h>15#include <crypto/aes.h>16#include <crypto/hash.h>17#include <crypto/internal/hash.h>18#include <crypto/scatterwalk.h>1920#include "ccp-crypto.h"2122static int ccp_aes_cmac_complete(struct crypto_async_request *async_req,23int ret)24{25struct ahash_request *req = ahash_request_cast(async_req);26struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);27struct ccp_aes_cmac_req_ctx *rctx = ahash_request_ctx_dma(req);28unsigned int digest_size = crypto_ahash_digestsize(tfm);2930if (ret)31goto e_free;3233if (rctx->hash_rem) {34/* Save remaining data to buffer */35unsigned int offset = rctx->nbytes - rctx->hash_rem;3637scatterwalk_map_and_copy(rctx->buf, rctx->src,38offset, rctx->hash_rem, 0);39rctx->buf_count = rctx->hash_rem;40} else {41rctx->buf_count = 0;42}4344/* Update result area if supplied */45if (req->result && rctx->final)46memcpy(req->result, rctx->iv, digest_size);4748e_free:49sg_free_table(&rctx->data_sg);5051return ret;52}5354static int ccp_do_cmac_update(struct ahash_request *req, unsigned int nbytes,55unsigned int final)56{57struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);58struct ccp_ctx *ctx = crypto_ahash_ctx_dma(tfm);59struct ccp_aes_cmac_req_ctx *rctx = ahash_request_ctx_dma(req);60struct scatterlist *sg, *cmac_key_sg = NULL;61unsigned int block_size =62crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));63unsigned int need_pad, sg_count;64gfp_t gfp;65u64 len;66int ret;6768if (!ctx->u.aes.key_len)69return -EINVAL;7071if (nbytes)72rctx->null_msg = 0;7374len = (u64)rctx->buf_count + (u64)nbytes;7576if (!final && (len <= block_size)) {77scatterwalk_map_and_copy(rctx->buf + rctx->buf_count, req->src,780, nbytes, 0);79rctx->buf_count += nbytes;8081return 0;82}8384rctx->src = req->src;85rctx->nbytes = nbytes;8687rctx->final = final;88rctx->hash_rem = final ? 0 : len & (block_size - 1);89rctx->hash_cnt = len - rctx->hash_rem;90if (!final && !rctx->hash_rem) {91/* CCP can't do zero length final, so keep some data around */92rctx->hash_cnt -= block_size;93rctx->hash_rem = block_size;94}9596if (final && (rctx->null_msg || (len & (block_size - 1))))97need_pad = 1;98else99need_pad = 0;100101sg_init_one(&rctx->iv_sg, rctx->iv, sizeof(rctx->iv));102103/* Build the data scatterlist table - allocate enough entries for all104* possible data pieces (buffer, input data, padding)105*/106sg_count = (nbytes) ? sg_nents(req->src) + 2 : 2;107gfp = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP ?108GFP_KERNEL : GFP_ATOMIC;109ret = sg_alloc_table(&rctx->data_sg, sg_count, gfp);110if (ret)111return ret;112113sg = NULL;114if (rctx->buf_count) {115sg_init_one(&rctx->buf_sg, rctx->buf, rctx->buf_count);116sg = ccp_crypto_sg_table_add(&rctx->data_sg, &rctx->buf_sg);117if (!sg) {118ret = -EINVAL;119goto e_free;120}121}122123if (nbytes) {124sg = ccp_crypto_sg_table_add(&rctx->data_sg, req->src);125if (!sg) {126ret = -EINVAL;127goto e_free;128}129}130131if (need_pad) {132int pad_length = block_size - (len & (block_size - 1));133134rctx->hash_cnt += pad_length;135136memset(rctx->pad, 0, sizeof(rctx->pad));137rctx->pad[0] = 0x80;138sg_init_one(&rctx->pad_sg, rctx->pad, pad_length);139sg = ccp_crypto_sg_table_add(&rctx->data_sg, &rctx->pad_sg);140if (!sg) {141ret = -EINVAL;142goto e_free;143}144}145if (sg) {146sg_mark_end(sg);147sg = rctx->data_sg.sgl;148}149150/* Initialize the K1/K2 scatterlist */151if (final)152cmac_key_sg = (need_pad) ? &ctx->u.aes.k2_sg153: &ctx->u.aes.k1_sg;154155memset(&rctx->cmd, 0, sizeof(rctx->cmd));156INIT_LIST_HEAD(&rctx->cmd.entry);157rctx->cmd.engine = CCP_ENGINE_AES;158rctx->cmd.u.aes.type = ctx->u.aes.type;159rctx->cmd.u.aes.mode = ctx->u.aes.mode;160rctx->cmd.u.aes.action = CCP_AES_ACTION_ENCRYPT;161rctx->cmd.u.aes.key = &ctx->u.aes.key_sg;162rctx->cmd.u.aes.key_len = ctx->u.aes.key_len;163rctx->cmd.u.aes.iv = &rctx->iv_sg;164rctx->cmd.u.aes.iv_len = AES_BLOCK_SIZE;165rctx->cmd.u.aes.src = sg;166rctx->cmd.u.aes.src_len = rctx->hash_cnt;167rctx->cmd.u.aes.dst = NULL;168rctx->cmd.u.aes.cmac_key = cmac_key_sg;169rctx->cmd.u.aes.cmac_key_len = ctx->u.aes.kn_len;170rctx->cmd.u.aes.cmac_final = final;171172ret = ccp_crypto_enqueue_request(&req->base, &rctx->cmd);173174return ret;175176e_free:177sg_free_table(&rctx->data_sg);178179return ret;180}181182static int ccp_aes_cmac_init(struct ahash_request *req)183{184struct ccp_aes_cmac_req_ctx *rctx = ahash_request_ctx_dma(req);185186memset(rctx, 0, sizeof(*rctx));187188rctx->null_msg = 1;189190return 0;191}192193static int ccp_aes_cmac_update(struct ahash_request *req)194{195return ccp_do_cmac_update(req, req->nbytes, 0);196}197198static int ccp_aes_cmac_final(struct ahash_request *req)199{200return ccp_do_cmac_update(req, 0, 1);201}202203static int ccp_aes_cmac_finup(struct ahash_request *req)204{205return ccp_do_cmac_update(req, req->nbytes, 1);206}207208static int ccp_aes_cmac_digest(struct ahash_request *req)209{210int ret;211212ret = ccp_aes_cmac_init(req);213if (ret)214return ret;215216return ccp_aes_cmac_finup(req);217}218219static int ccp_aes_cmac_export(struct ahash_request *req, void *out)220{221struct ccp_aes_cmac_req_ctx *rctx = ahash_request_ctx_dma(req);222struct ccp_aes_cmac_exp_ctx state;223224/* Don't let anything leak to 'out' */225memset(&state, 0, sizeof(state));226227state.null_msg = rctx->null_msg;228memcpy(state.iv, rctx->iv, sizeof(state.iv));229state.buf_count = rctx->buf_count;230memcpy(state.buf, rctx->buf, sizeof(state.buf));231232/* 'out' may not be aligned so memcpy from local variable */233memcpy(out, &state, sizeof(state));234235return 0;236}237238static int ccp_aes_cmac_import(struct ahash_request *req, const void *in)239{240struct ccp_aes_cmac_req_ctx *rctx = ahash_request_ctx_dma(req);241struct ccp_aes_cmac_exp_ctx state;242243/* 'in' may not be aligned so memcpy to local variable */244memcpy(&state, in, sizeof(state));245246memset(rctx, 0, sizeof(*rctx));247rctx->null_msg = state.null_msg;248memcpy(rctx->iv, state.iv, sizeof(rctx->iv));249rctx->buf_count = state.buf_count;250memcpy(rctx->buf, state.buf, sizeof(rctx->buf));251252return 0;253}254255static int ccp_aes_cmac_setkey(struct crypto_ahash *tfm, const u8 *key,256unsigned int key_len)257{258struct ccp_ctx *ctx = crypto_ahash_ctx_dma(tfm);259struct ccp_crypto_ahash_alg *alg =260ccp_crypto_ahash_alg(crypto_ahash_tfm(tfm));261u64 k0_hi, k0_lo, k1_hi, k1_lo, k2_hi, k2_lo;262u64 rb_hi = 0x00, rb_lo = 0x87;263struct crypto_aes_ctx aes;264__be64 *gk;265int ret;266267switch (key_len) {268case AES_KEYSIZE_128:269ctx->u.aes.type = CCP_AES_TYPE_128;270break;271case AES_KEYSIZE_192:272ctx->u.aes.type = CCP_AES_TYPE_192;273break;274case AES_KEYSIZE_256:275ctx->u.aes.type = CCP_AES_TYPE_256;276break;277default:278return -EINVAL;279}280ctx->u.aes.mode = alg->mode;281282/* Set to zero until complete */283ctx->u.aes.key_len = 0;284285/* Set the key for the AES cipher used to generate the keys */286ret = aes_expandkey(&aes, key, key_len);287if (ret)288return ret;289290/* Encrypt a block of zeroes - use key area in context */291memset(ctx->u.aes.key, 0, sizeof(ctx->u.aes.key));292aes_encrypt(&aes, ctx->u.aes.key, ctx->u.aes.key);293memzero_explicit(&aes, sizeof(aes));294295/* Generate K1 and K2 */296k0_hi = be64_to_cpu(*((__be64 *)ctx->u.aes.key));297k0_lo = be64_to_cpu(*((__be64 *)ctx->u.aes.key + 1));298299k1_hi = (k0_hi << 1) | (k0_lo >> 63);300k1_lo = k0_lo << 1;301if (ctx->u.aes.key[0] & 0x80) {302k1_hi ^= rb_hi;303k1_lo ^= rb_lo;304}305gk = (__be64 *)ctx->u.aes.k1;306*gk = cpu_to_be64(k1_hi);307gk++;308*gk = cpu_to_be64(k1_lo);309310k2_hi = (k1_hi << 1) | (k1_lo >> 63);311k2_lo = k1_lo << 1;312if (ctx->u.aes.k1[0] & 0x80) {313k2_hi ^= rb_hi;314k2_lo ^= rb_lo;315}316gk = (__be64 *)ctx->u.aes.k2;317*gk = cpu_to_be64(k2_hi);318gk++;319*gk = cpu_to_be64(k2_lo);320321ctx->u.aes.kn_len = sizeof(ctx->u.aes.k1);322sg_init_one(&ctx->u.aes.k1_sg, ctx->u.aes.k1, sizeof(ctx->u.aes.k1));323sg_init_one(&ctx->u.aes.k2_sg, ctx->u.aes.k2, sizeof(ctx->u.aes.k2));324325/* Save the supplied key */326memset(ctx->u.aes.key, 0, sizeof(ctx->u.aes.key));327memcpy(ctx->u.aes.key, key, key_len);328ctx->u.aes.key_len = key_len;329sg_init_one(&ctx->u.aes.key_sg, ctx->u.aes.key, key_len);330331return ret;332}333334static int ccp_aes_cmac_cra_init(struct crypto_tfm *tfm)335{336struct ccp_ctx *ctx = crypto_tfm_ctx_dma(tfm);337struct crypto_ahash *ahash = __crypto_ahash_cast(tfm);338339ctx->complete = ccp_aes_cmac_complete;340ctx->u.aes.key_len = 0;341342crypto_ahash_set_reqsize_dma(ahash,343sizeof(struct ccp_aes_cmac_req_ctx));344345return 0;346}347348int ccp_register_aes_cmac_algs(struct list_head *head)349{350struct ccp_crypto_ahash_alg *ccp_alg;351struct ahash_alg *alg;352struct hash_alg_common *halg;353struct crypto_alg *base;354int ret;355356ccp_alg = kzalloc(sizeof(*ccp_alg), GFP_KERNEL);357if (!ccp_alg)358return -ENOMEM;359360INIT_LIST_HEAD(&ccp_alg->entry);361ccp_alg->mode = CCP_AES_MODE_CMAC;362363alg = &ccp_alg->alg;364alg->init = ccp_aes_cmac_init;365alg->update = ccp_aes_cmac_update;366alg->final = ccp_aes_cmac_final;367alg->finup = ccp_aes_cmac_finup;368alg->digest = ccp_aes_cmac_digest;369alg->export = ccp_aes_cmac_export;370alg->import = ccp_aes_cmac_import;371alg->setkey = ccp_aes_cmac_setkey;372373halg = &alg->halg;374halg->digestsize = AES_BLOCK_SIZE;375halg->statesize = sizeof(struct ccp_aes_cmac_exp_ctx);376377base = &halg->base;378snprintf(base->cra_name, CRYPTO_MAX_ALG_NAME, "cmac(aes)");379snprintf(base->cra_driver_name, CRYPTO_MAX_ALG_NAME, "cmac-aes-ccp");380base->cra_flags = CRYPTO_ALG_ASYNC |381CRYPTO_ALG_ALLOCATES_MEMORY |382CRYPTO_ALG_KERN_DRIVER_ONLY |383CRYPTO_ALG_NEED_FALLBACK;384base->cra_blocksize = AES_BLOCK_SIZE;385base->cra_ctxsize = sizeof(struct ccp_ctx) + crypto_dma_padding();386base->cra_priority = CCP_CRA_PRIORITY;387base->cra_init = ccp_aes_cmac_cra_init;388base->cra_module = THIS_MODULE;389390ret = crypto_register_ahash(alg);391if (ret) {392pr_err("%s ahash algorithm registration error (%d)\n",393base->cra_name, ret);394kfree(ccp_alg);395return ret;396}397398list_add(&ccp_alg->entry, head);399400return 0;401}402403404