Path: blob/master/drivers/crypto/marvell/octeontx2/otx2_cptvf_algs.c
26285 views
// SPDX-License-Identifier: GPL-2.0-only1/* Copyright (C) 2020 Marvell. */23#include <crypto/aes.h>4#include <crypto/authenc.h>5#include <crypto/cryptd.h>6#include <crypto/des.h>7#include <crypto/internal/aead.h>8#include <crypto/sha1.h>9#include <crypto/sha2.h>10#include <crypto/xts.h>11#include <crypto/gcm.h>12#include <crypto/scatterwalk.h>13#include <linux/sort.h>14#include <linux/module.h>15#include "otx2_cptvf.h"16#include "otx2_cptvf_algs.h"17#include "otx2_cpt_reqmgr.h"18#include "cn10k_cpt.h"1920/* Size of salt in AES GCM mode */21#define AES_GCM_SALT_SIZE 422/* Size of IV in AES GCM mode */23#define AES_GCM_IV_SIZE 824/* Size of ICV (Integrity Check Value) in AES GCM mode */25#define AES_GCM_ICV_SIZE 1626/* Offset of IV in AES GCM mode */27#define AES_GCM_IV_OFFSET 828#define CONTROL_WORD_LEN 829#define KEY2_OFFSET 4830#define DMA_MODE_FLAG(dma_mode) \31(((dma_mode) == OTX2_CPT_DMA_MODE_SG) ? (1 << 7) : 0)3233/* Truncated SHA digest size */34#define SHA1_TRUNC_DIGEST_SIZE 1235#define SHA256_TRUNC_DIGEST_SIZE 1636#define SHA384_TRUNC_DIGEST_SIZE 2437#define SHA512_TRUNC_DIGEST_SIZE 323839static DEFINE_MUTEX(mutex);40static int is_crypto_registered;4142struct cpt_device_desc {43struct pci_dev *dev;44int num_queues;45};4647struct cpt_device_table {48atomic_t count;49struct cpt_device_desc desc[OTX2_CPT_MAX_LFS_NUM];50};5152static struct cpt_device_table se_devices = {53.count = ATOMIC_INIT(0)54};5556static struct otx2_cpt_sdesc *alloc_sdesc(struct crypto_shash *alg);5758static inline int get_se_device(struct pci_dev **pdev, int *cpu_num)59{60int count;6162count = atomic_read(&se_devices.count);63if (count < 1)64return -ENODEV;6566*cpu_num = get_cpu();67/*68* On OcteonTX2 platform CPT instruction queue is bound to each69* local function LF, in turn LFs can be attached to PF70* or VF therefore we always use first device. We get maximum71* performance if one CPT queue is available for each cpu72* otherwise CPT queues need to be shared between cpus.73*/74if (*cpu_num >= se_devices.desc[0].num_queues)75*cpu_num %= se_devices.desc[0].num_queues;76*pdev = se_devices.desc[0].dev;7778put_cpu();7980return 0;81}8283static inline int validate_hmac_cipher_null(struct otx2_cpt_req_info *cpt_req)84{85struct otx2_cpt_req_ctx *rctx;86struct aead_request *req;87struct crypto_aead *tfm;8889req = container_of(cpt_req->areq, struct aead_request, base);90tfm = crypto_aead_reqtfm(req);91rctx = aead_request_ctx_dma(req);92if (memcmp(rctx->fctx.hmac.s.hmac_calc,93rctx->fctx.hmac.s.hmac_recv,94crypto_aead_authsize(tfm)) != 0)95return -EBADMSG;9697return 0;98}99100static void otx2_cpt_aead_callback(int status, void *arg1, void *arg2)101{102struct otx2_cpt_inst_info *inst_info = arg2;103struct crypto_async_request *areq = arg1;104struct otx2_cpt_req_info *cpt_req;105struct pci_dev *pdev;106107if (inst_info) {108cpt_req = inst_info->req;109if (!status) {110/*111* When selected cipher is NULL we need to manually112* verify whether calculated hmac value matches113* received hmac value114*/115if (cpt_req->req_type ==116OTX2_CPT_AEAD_ENC_DEC_NULL_REQ &&117!cpt_req->is_enc)118status = validate_hmac_cipher_null(cpt_req);119}120pdev = inst_info->pdev;121otx2_cpt_info_destroy(pdev, inst_info);122}123if (areq)124crypto_request_complete(areq, status);125}126127static void output_iv_copyback(struct crypto_async_request *areq)128{129struct otx2_cpt_req_info *req_info;130struct otx2_cpt_req_ctx *rctx;131struct skcipher_request *sreq;132struct crypto_skcipher *stfm;133struct otx2_cpt_enc_ctx *ctx;134u32 start, ivsize;135136sreq = container_of(areq, struct skcipher_request, base);137stfm = crypto_skcipher_reqtfm(sreq);138ctx = crypto_skcipher_ctx(stfm);139if (ctx->cipher_type == OTX2_CPT_AES_CBC ||140ctx->cipher_type == OTX2_CPT_DES3_CBC) {141rctx = skcipher_request_ctx_dma(sreq);142req_info = &rctx->cpt_req;143ivsize = crypto_skcipher_ivsize(stfm);144start = sreq->cryptlen - ivsize;145146if (req_info->is_enc) {147scatterwalk_map_and_copy(sreq->iv, sreq->dst, start,148ivsize, 0);149} else {150if (sreq->src != sreq->dst) {151scatterwalk_map_and_copy(sreq->iv, sreq->src,152start, ivsize, 0);153} else {154memcpy(sreq->iv, req_info->iv_out, ivsize);155kfree(req_info->iv_out);156}157}158}159}160161static void otx2_cpt_skcipher_callback(int status, void *arg1, void *arg2)162{163struct otx2_cpt_inst_info *inst_info = arg2;164struct crypto_async_request *areq = arg1;165struct pci_dev *pdev;166167if (areq) {168if (!status)169output_iv_copyback(areq);170if (inst_info) {171pdev = inst_info->pdev;172otx2_cpt_info_destroy(pdev, inst_info);173}174crypto_request_complete(areq, status);175}176}177178static inline void update_input_data(struct otx2_cpt_req_info *req_info,179struct scatterlist *inp_sg,180u32 nbytes, u32 *argcnt)181{182req_info->req.dlen += nbytes;183184while (nbytes) {185u32 len = (nbytes < inp_sg->length) ? nbytes : inp_sg->length;186u8 *ptr = sg_virt(inp_sg);187188req_info->in[*argcnt].vptr = (void *)ptr;189req_info->in[*argcnt].size = len;190nbytes -= len;191++(*argcnt);192inp_sg = sg_next(inp_sg);193}194}195196static inline void update_output_data(struct otx2_cpt_req_info *req_info,197struct scatterlist *outp_sg,198u32 offset, u32 nbytes, u32 *argcnt)199{200u32 len, sg_len;201u8 *ptr;202203req_info->rlen += nbytes;204205while (nbytes) {206sg_len = outp_sg->length - offset;207len = (nbytes < sg_len) ? nbytes : sg_len;208ptr = sg_virt(outp_sg);209210req_info->out[*argcnt].vptr = (void *) (ptr + offset);211req_info->out[*argcnt].size = len;212nbytes -= len;213++(*argcnt);214offset = 0;215outp_sg = sg_next(outp_sg);216}217}218219static inline int create_ctx_hdr(struct skcipher_request *req, u32 enc,220u32 *argcnt)221{222struct crypto_skcipher *stfm = crypto_skcipher_reqtfm(req);223struct otx2_cpt_req_ctx *rctx = skcipher_request_ctx_dma(req);224struct otx2_cpt_enc_ctx *ctx = crypto_skcipher_ctx(stfm);225struct otx2_cpt_req_info *req_info = &rctx->cpt_req;226struct otx2_cpt_fc_ctx *fctx = &rctx->fctx;227int ivsize = crypto_skcipher_ivsize(stfm);228u32 start = req->cryptlen - ivsize;229gfp_t flags;230231flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?232GFP_KERNEL : GFP_ATOMIC;233req_info->ctrl.s.dma_mode = OTX2_CPT_DMA_MODE_SG;234req_info->ctrl.s.se_req = 1;235236req_info->req.opcode.s.major = OTX2_CPT_MAJOR_OP_FC |237DMA_MODE_FLAG(OTX2_CPT_DMA_MODE_SG);238if (enc) {239req_info->req.opcode.s.minor = 2;240} else {241req_info->req.opcode.s.minor = 3;242if ((ctx->cipher_type == OTX2_CPT_AES_CBC ||243ctx->cipher_type == OTX2_CPT_DES3_CBC) &&244req->src == req->dst) {245req_info->iv_out = kmalloc(ivsize, flags);246if (!req_info->iv_out)247return -ENOMEM;248249scatterwalk_map_and_copy(req_info->iv_out, req->src,250start, ivsize, 0);251}252}253/* Encryption data length */254req_info->req.param1 = req->cryptlen;255/* Authentication data length */256req_info->req.param2 = 0;257258fctx->enc.enc_ctrl.e.enc_cipher = ctx->cipher_type;259fctx->enc.enc_ctrl.e.aes_key = ctx->key_type;260fctx->enc.enc_ctrl.e.iv_source = OTX2_CPT_FROM_CPTR;261262if (ctx->cipher_type == OTX2_CPT_AES_XTS)263memcpy(fctx->enc.encr_key, ctx->enc_key, ctx->key_len * 2);264else265memcpy(fctx->enc.encr_key, ctx->enc_key, ctx->key_len);266267memcpy(fctx->enc.encr_iv, req->iv, crypto_skcipher_ivsize(stfm));268269cpu_to_be64s(&fctx->enc.enc_ctrl.u);270271/*272* Storing Packet Data Information in offset273* Control Word First 8 bytes274*/275req_info->in[*argcnt].vptr = (u8 *)&rctx->ctrl_word;276req_info->in[*argcnt].size = CONTROL_WORD_LEN;277req_info->req.dlen += CONTROL_WORD_LEN;278++(*argcnt);279280req_info->in[*argcnt].vptr = (u8 *)fctx;281req_info->in[*argcnt].size = sizeof(struct otx2_cpt_fc_ctx);282req_info->req.dlen += sizeof(struct otx2_cpt_fc_ctx);283284++(*argcnt);285286return 0;287}288289static inline int create_input_list(struct skcipher_request *req, u32 enc,290u32 enc_iv_len)291{292struct otx2_cpt_req_ctx *rctx = skcipher_request_ctx_dma(req);293struct otx2_cpt_req_info *req_info = &rctx->cpt_req;294u32 argcnt = 0;295int ret;296297ret = create_ctx_hdr(req, enc, &argcnt);298if (ret)299return ret;300301update_input_data(req_info, req->src, req->cryptlen, &argcnt);302req_info->in_cnt = argcnt;303304return 0;305}306307static inline void create_output_list(struct skcipher_request *req,308u32 enc_iv_len)309{310struct otx2_cpt_req_ctx *rctx = skcipher_request_ctx_dma(req);311struct otx2_cpt_req_info *req_info = &rctx->cpt_req;312u32 argcnt = 0;313314/*315* OUTPUT Buffer Processing316* AES encryption/decryption output would be317* received in the following format318*319* ------IV--------|------ENCRYPTED/DECRYPTED DATA-----|320* [ 16 Bytes/ [ Request Enc/Dec/ DATA Len AES CBC ]321*/322update_output_data(req_info, req->dst, 0, req->cryptlen, &argcnt);323req_info->out_cnt = argcnt;324}325326static int skcipher_do_fallback(struct skcipher_request *req, bool is_enc)327{328struct crypto_skcipher *stfm = crypto_skcipher_reqtfm(req);329struct otx2_cpt_req_ctx *rctx = skcipher_request_ctx_dma(req);330struct otx2_cpt_enc_ctx *ctx = crypto_skcipher_ctx(stfm);331int ret;332333if (ctx->fbk_cipher) {334skcipher_request_set_tfm(&rctx->sk_fbk_req, ctx->fbk_cipher);335skcipher_request_set_callback(&rctx->sk_fbk_req,336req->base.flags,337req->base.complete,338req->base.data);339skcipher_request_set_crypt(&rctx->sk_fbk_req, req->src,340req->dst, req->cryptlen, req->iv);341ret = is_enc ? crypto_skcipher_encrypt(&rctx->sk_fbk_req) :342crypto_skcipher_decrypt(&rctx->sk_fbk_req);343} else {344ret = -EINVAL;345}346return ret;347}348349static inline int cpt_enc_dec(struct skcipher_request *req, u32 enc)350{351struct crypto_skcipher *stfm = crypto_skcipher_reqtfm(req);352struct otx2_cpt_req_ctx *rctx = skcipher_request_ctx_dma(req);353struct otx2_cpt_enc_ctx *ctx = crypto_skcipher_ctx(stfm);354struct otx2_cpt_req_info *req_info = &rctx->cpt_req;355u32 enc_iv_len = crypto_skcipher_ivsize(stfm);356struct pci_dev *pdev;357int status, cpu_num;358359if (req->cryptlen == 0)360return 0;361362if (!IS_ALIGNED(req->cryptlen, ctx->enc_align_len))363return -EINVAL;364365if (req->cryptlen > OTX2_CPT_MAX_REQ_SIZE)366return skcipher_do_fallback(req, enc);367368/* Clear control words */369rctx->ctrl_word.flags = 0;370rctx->fctx.enc.enc_ctrl.u = 0;371372status = create_input_list(req, enc, enc_iv_len);373if (status)374return status;375create_output_list(req, enc_iv_len);376377status = get_se_device(&pdev, &cpu_num);378if (status)379return status;380381req_info->callback = otx2_cpt_skcipher_callback;382req_info->areq = &req->base;383req_info->req_type = OTX2_CPT_ENC_DEC_REQ;384req_info->is_enc = enc;385req_info->is_trunc_hmac = false;386req_info->ctrl.s.grp = otx2_cpt_get_eng_grp_num(pdev,387OTX2_CPT_SE_TYPES);388389req_info->req.cptr = ctx->er_ctx.hw_ctx;390req_info->req.cptr_dma = ctx->er_ctx.cptr_dma;391392/*393* We perform an asynchronous send and once394* the request is completed the driver would395* intimate through registered call back functions396*/397status = otx2_cpt_do_request(pdev, req_info, cpu_num);398399return status;400}401402static int otx2_cpt_skcipher_encrypt(struct skcipher_request *req)403{404return cpt_enc_dec(req, true);405}406407static int otx2_cpt_skcipher_decrypt(struct skcipher_request *req)408{409return cpt_enc_dec(req, false);410}411412static int otx2_cpt_skcipher_xts_setkey(struct crypto_skcipher *tfm,413const u8 *key, u32 keylen)414{415struct otx2_cpt_enc_ctx *ctx = crypto_skcipher_ctx(tfm);416const u8 *key2 = key + (keylen / 2);417const u8 *key1 = key;418int ret;419420ret = xts_verify_key(tfm, key, keylen);421if (ret)422return ret;423ctx->key_len = keylen;424ctx->enc_align_len = 1;425memcpy(ctx->enc_key, key1, keylen / 2);426memcpy(ctx->enc_key + KEY2_OFFSET, key2, keylen / 2);427ctx->cipher_type = OTX2_CPT_AES_XTS;428switch (ctx->key_len) {429case 2 * AES_KEYSIZE_128:430ctx->key_type = OTX2_CPT_AES_128_BIT;431break;432case 2 * AES_KEYSIZE_192:433ctx->key_type = OTX2_CPT_AES_192_BIT;434break;435case 2 * AES_KEYSIZE_256:436ctx->key_type = OTX2_CPT_AES_256_BIT;437break;438default:439return -EINVAL;440}441return crypto_skcipher_setkey(ctx->fbk_cipher, key, keylen);442}443444static int cpt_des_setkey(struct crypto_skcipher *tfm, const u8 *key,445u32 keylen, u8 cipher_type)446{447struct otx2_cpt_enc_ctx *ctx = crypto_skcipher_ctx(tfm);448449if (keylen != DES3_EDE_KEY_SIZE)450return -EINVAL;451452ctx->key_len = keylen;453ctx->cipher_type = cipher_type;454ctx->enc_align_len = 8;455456memcpy(ctx->enc_key, key, keylen);457458return crypto_skcipher_setkey(ctx->fbk_cipher, key, keylen);459}460461static int cpt_aes_setkey(struct crypto_skcipher *tfm, const u8 *key,462u32 keylen, u8 cipher_type)463{464struct otx2_cpt_enc_ctx *ctx = crypto_skcipher_ctx(tfm);465466switch (keylen) {467case AES_KEYSIZE_128:468ctx->key_type = OTX2_CPT_AES_128_BIT;469break;470case AES_KEYSIZE_192:471ctx->key_type = OTX2_CPT_AES_192_BIT;472break;473case AES_KEYSIZE_256:474ctx->key_type = OTX2_CPT_AES_256_BIT;475break;476default:477return -EINVAL;478}479if (cipher_type == OTX2_CPT_AES_CBC || cipher_type == OTX2_CPT_AES_ECB)480ctx->enc_align_len = 16;481else482ctx->enc_align_len = 1;483484ctx->key_len = keylen;485ctx->cipher_type = cipher_type;486487memcpy(ctx->enc_key, key, keylen);488489return crypto_skcipher_setkey(ctx->fbk_cipher, key, keylen);490}491492static int otx2_cpt_skcipher_cbc_aes_setkey(struct crypto_skcipher *tfm,493const u8 *key, u32 keylen)494{495return cpt_aes_setkey(tfm, key, keylen, OTX2_CPT_AES_CBC);496}497498static int otx2_cpt_skcipher_ecb_aes_setkey(struct crypto_skcipher *tfm,499const u8 *key, u32 keylen)500{501return cpt_aes_setkey(tfm, key, keylen, OTX2_CPT_AES_ECB);502}503504static int otx2_cpt_skcipher_cbc_des3_setkey(struct crypto_skcipher *tfm,505const u8 *key, u32 keylen)506{507return cpt_des_setkey(tfm, key, keylen, OTX2_CPT_DES3_CBC);508}509510static int otx2_cpt_skcipher_ecb_des3_setkey(struct crypto_skcipher *tfm,511const u8 *key, u32 keylen)512{513return cpt_des_setkey(tfm, key, keylen, OTX2_CPT_DES3_ECB);514}515516static int cpt_skcipher_fallback_init(struct otx2_cpt_enc_ctx *ctx,517struct crypto_alg *alg)518{519if (alg->cra_flags & CRYPTO_ALG_NEED_FALLBACK) {520ctx->fbk_cipher =521crypto_alloc_skcipher(alg->cra_name, 0,522CRYPTO_ALG_ASYNC |523CRYPTO_ALG_NEED_FALLBACK);524if (IS_ERR(ctx->fbk_cipher)) {525pr_err("%s() failed to allocate fallback for %s\n",526__func__, alg->cra_name);527return PTR_ERR(ctx->fbk_cipher);528}529}530return 0;531}532533static int otx2_cpt_enc_dec_init(struct crypto_skcipher *stfm)534{535struct otx2_cpt_enc_ctx *ctx = crypto_skcipher_ctx(stfm);536struct crypto_tfm *tfm = crypto_skcipher_tfm(stfm);537struct crypto_alg *alg = tfm->__crt_alg;538struct pci_dev *pdev;539int ret, cpu_num;540541memset(ctx, 0, sizeof(*ctx));542/*543* Additional memory for skcipher_request is544* allocated since the cryptd daemon uses545* this memory for request_ctx information546*/547crypto_skcipher_set_reqsize_dma(548stfm, sizeof(struct otx2_cpt_req_ctx) +549sizeof(struct skcipher_request));550551ret = get_se_device(&pdev, &cpu_num);552if (ret)553return ret;554555ctx->pdev = pdev;556ret = cn10k_cpt_hw_ctx_init(pdev, &ctx->er_ctx);557if (ret)558return ret;559560return cpt_skcipher_fallback_init(ctx, alg);561}562563static void otx2_cpt_skcipher_exit(struct crypto_skcipher *tfm)564{565struct otx2_cpt_enc_ctx *ctx = crypto_skcipher_ctx(tfm);566567if (ctx->fbk_cipher) {568crypto_free_skcipher(ctx->fbk_cipher);569ctx->fbk_cipher = NULL;570}571cn10k_cpt_hw_ctx_clear(ctx->pdev, &ctx->er_ctx);572}573574static int cpt_aead_fallback_init(struct otx2_cpt_aead_ctx *ctx,575struct crypto_alg *alg)576{577if (alg->cra_flags & CRYPTO_ALG_NEED_FALLBACK) {578ctx->fbk_cipher =579crypto_alloc_aead(alg->cra_name, 0,580CRYPTO_ALG_ASYNC |581CRYPTO_ALG_NEED_FALLBACK);582if (IS_ERR(ctx->fbk_cipher)) {583pr_err("%s() failed to allocate fallback for %s\n",584__func__, alg->cra_name);585return PTR_ERR(ctx->fbk_cipher);586}587}588return 0;589}590591static int cpt_aead_init(struct crypto_aead *atfm, u8 cipher_type, u8 mac_type)592{593struct otx2_cpt_aead_ctx *ctx = crypto_aead_ctx_dma(atfm);594struct crypto_tfm *tfm = crypto_aead_tfm(atfm);595struct crypto_alg *alg = tfm->__crt_alg;596struct pci_dev *pdev;597int ret, cpu_num;598599ctx->cipher_type = cipher_type;600ctx->mac_type = mac_type;601602switch (ctx->mac_type) {603case OTX2_CPT_SHA1:604ctx->hashalg = crypto_alloc_shash("sha1", 0, 0);605break;606607case OTX2_CPT_SHA256:608ctx->hashalg = crypto_alloc_shash("sha256", 0, 0);609break;610611case OTX2_CPT_SHA384:612ctx->hashalg = crypto_alloc_shash("sha384", 0, 0);613break;614615case OTX2_CPT_SHA512:616ctx->hashalg = crypto_alloc_shash("sha512", 0, 0);617break;618}619620if (IS_ERR(ctx->hashalg))621return PTR_ERR(ctx->hashalg);622623if (ctx->hashalg) {624ctx->sdesc = alloc_sdesc(ctx->hashalg);625if (!ctx->sdesc) {626crypto_free_shash(ctx->hashalg);627return -ENOMEM;628}629}630631/*632* When selected cipher is NULL we use HMAC opcode instead of633* FLEXICRYPTO opcode therefore we don't need to use HASH algorithms634* for calculating ipad and opad635*/636if (ctx->cipher_type != OTX2_CPT_CIPHER_NULL && ctx->hashalg) {637int ss = crypto_shash_statesize(ctx->hashalg);638639ctx->ipad = kzalloc(ss, GFP_KERNEL);640if (!ctx->ipad) {641kfree(ctx->sdesc);642crypto_free_shash(ctx->hashalg);643return -ENOMEM;644}645646ctx->opad = kzalloc(ss, GFP_KERNEL);647if (!ctx->opad) {648kfree(ctx->ipad);649kfree(ctx->sdesc);650crypto_free_shash(ctx->hashalg);651return -ENOMEM;652}653}654switch (ctx->cipher_type) {655case OTX2_CPT_AES_CBC:656case OTX2_CPT_AES_ECB:657ctx->enc_align_len = 16;658break;659case OTX2_CPT_DES3_CBC:660case OTX2_CPT_DES3_ECB:661ctx->enc_align_len = 8;662break;663case OTX2_CPT_AES_GCM:664case OTX2_CPT_CIPHER_NULL:665ctx->enc_align_len = 1;666break;667}668crypto_aead_set_reqsize_dma(atfm, sizeof(struct otx2_cpt_req_ctx));669670ret = get_se_device(&pdev, &cpu_num);671if (ret)672return ret;673674ctx->pdev = pdev;675ret = cn10k_cpt_hw_ctx_init(pdev, &ctx->er_ctx);676if (ret)677return ret;678679return cpt_aead_fallback_init(ctx, alg);680}681682static int otx2_cpt_aead_cbc_aes_sha1_init(struct crypto_aead *tfm)683{684return cpt_aead_init(tfm, OTX2_CPT_AES_CBC, OTX2_CPT_SHA1);685}686687static int otx2_cpt_aead_cbc_aes_sha256_init(struct crypto_aead *tfm)688{689return cpt_aead_init(tfm, OTX2_CPT_AES_CBC, OTX2_CPT_SHA256);690}691692static int otx2_cpt_aead_cbc_aes_sha384_init(struct crypto_aead *tfm)693{694return cpt_aead_init(tfm, OTX2_CPT_AES_CBC, OTX2_CPT_SHA384);695}696697static int otx2_cpt_aead_cbc_aes_sha512_init(struct crypto_aead *tfm)698{699return cpt_aead_init(tfm, OTX2_CPT_AES_CBC, OTX2_CPT_SHA512);700}701702static int otx2_cpt_aead_ecb_null_sha1_init(struct crypto_aead *tfm)703{704return cpt_aead_init(tfm, OTX2_CPT_CIPHER_NULL, OTX2_CPT_SHA1);705}706707static int otx2_cpt_aead_ecb_null_sha256_init(struct crypto_aead *tfm)708{709return cpt_aead_init(tfm, OTX2_CPT_CIPHER_NULL, OTX2_CPT_SHA256);710}711712static int otx2_cpt_aead_ecb_null_sha384_init(struct crypto_aead *tfm)713{714return cpt_aead_init(tfm, OTX2_CPT_CIPHER_NULL, OTX2_CPT_SHA384);715}716717static int otx2_cpt_aead_ecb_null_sha512_init(struct crypto_aead *tfm)718{719return cpt_aead_init(tfm, OTX2_CPT_CIPHER_NULL, OTX2_CPT_SHA512);720}721722static int otx2_cpt_aead_gcm_aes_init(struct crypto_aead *tfm)723{724return cpt_aead_init(tfm, OTX2_CPT_AES_GCM, OTX2_CPT_MAC_NULL);725}726727static void otx2_cpt_aead_exit(struct crypto_aead *tfm)728{729struct otx2_cpt_aead_ctx *ctx = crypto_aead_ctx_dma(tfm);730731kfree(ctx->ipad);732kfree(ctx->opad);733crypto_free_shash(ctx->hashalg);734kfree(ctx->sdesc);735736if (ctx->fbk_cipher) {737crypto_free_aead(ctx->fbk_cipher);738ctx->fbk_cipher = NULL;739}740cn10k_cpt_hw_ctx_clear(ctx->pdev, &ctx->er_ctx);741}742743static int otx2_cpt_aead_gcm_set_authsize(struct crypto_aead *tfm,744unsigned int authsize)745{746struct otx2_cpt_aead_ctx *ctx = crypto_aead_ctx_dma(tfm);747748if (crypto_rfc4106_check_authsize(authsize))749return -EINVAL;750751tfm->authsize = authsize;752/* Set authsize for fallback case */753if (ctx->fbk_cipher)754ctx->fbk_cipher->authsize = authsize;755756return 0;757}758759static int otx2_cpt_aead_set_authsize(struct crypto_aead *tfm,760unsigned int authsize)761{762tfm->authsize = authsize;763764return 0;765}766767static int otx2_cpt_aead_null_set_authsize(struct crypto_aead *tfm,768unsigned int authsize)769{770struct otx2_cpt_aead_ctx *ctx = crypto_aead_ctx_dma(tfm);771772ctx->is_trunc_hmac = true;773tfm->authsize = authsize;774775return 0;776}777778static struct otx2_cpt_sdesc *alloc_sdesc(struct crypto_shash *alg)779{780struct otx2_cpt_sdesc *sdesc;781int size;782783size = sizeof(struct shash_desc) + crypto_shash_descsize(alg);784sdesc = kmalloc(size, GFP_KERNEL);785if (!sdesc)786return NULL;787788sdesc->shash.tfm = alg;789790return sdesc;791}792793static inline void swap_data32(void *buf, u32 len)794{795cpu_to_be32_array(buf, buf, len / 4);796}797798static inline void swap_data64(void *buf, u32 len)799{800u64 *src = buf;801int i = 0;802803for (i = 0 ; i < len / 8; i++, src++)804cpu_to_be64s(src);805}806807static int swap_pad(u8 mac_type, u8 *pad)808{809struct sha512_state *sha512;810struct sha256_state *sha256;811struct sha1_state *sha1;812813switch (mac_type) {814case OTX2_CPT_SHA1:815sha1 = (struct sha1_state *)pad;816swap_data32(sha1->state, SHA1_DIGEST_SIZE);817break;818819case OTX2_CPT_SHA256:820sha256 = (struct sha256_state *)pad;821swap_data32(sha256->state, SHA256_DIGEST_SIZE);822break;823824case OTX2_CPT_SHA384:825case OTX2_CPT_SHA512:826sha512 = (struct sha512_state *)pad;827swap_data64(sha512->state, SHA512_DIGEST_SIZE);828break;829830default:831return -EINVAL;832}833834return 0;835}836837static int aead_hmac_init(struct crypto_aead *cipher,838struct crypto_authenc_keys *keys)839{840struct otx2_cpt_aead_ctx *ctx = crypto_aead_ctx_dma(cipher);841int ds = crypto_shash_digestsize(ctx->hashalg);842int bs = crypto_shash_blocksize(ctx->hashalg);843int authkeylen = keys->authkeylen;844u8 *ipad = NULL, *opad = NULL;845int icount = 0;846int ret;847848if (authkeylen > bs) {849ret = crypto_shash_digest(&ctx->sdesc->shash, keys->authkey,850authkeylen, ctx->key);851if (ret)852goto calc_fail;853854authkeylen = ds;855} else856memcpy(ctx->key, keys->authkey, authkeylen);857858ctx->enc_key_len = keys->enckeylen;859ctx->auth_key_len = authkeylen;860861if (ctx->cipher_type == OTX2_CPT_CIPHER_NULL)862return keys->enckeylen ? -EINVAL : 0;863864switch (keys->enckeylen) {865case AES_KEYSIZE_128:866ctx->key_type = OTX2_CPT_AES_128_BIT;867break;868case AES_KEYSIZE_192:869ctx->key_type = OTX2_CPT_AES_192_BIT;870break;871case AES_KEYSIZE_256:872ctx->key_type = OTX2_CPT_AES_256_BIT;873break;874default:875/* Invalid key length */876return -EINVAL;877}878879memcpy(ctx->key + authkeylen, keys->enckey, keys->enckeylen);880881ipad = ctx->ipad;882opad = ctx->opad;883884memcpy(ipad, ctx->key, authkeylen);885memset(ipad + authkeylen, 0, bs - authkeylen);886memcpy(opad, ipad, bs);887888for (icount = 0; icount < bs; icount++) {889ipad[icount] ^= 0x36;890opad[icount] ^= 0x5c;891}892893/*894* Partial Hash calculated from the software895* algorithm is retrieved for IPAD & OPAD896*/897898/* IPAD Calculation */899crypto_shash_init(&ctx->sdesc->shash);900crypto_shash_update(&ctx->sdesc->shash, ipad, bs);901crypto_shash_export(&ctx->sdesc->shash, ipad);902ret = swap_pad(ctx->mac_type, ipad);903if (ret)904goto calc_fail;905906/* OPAD Calculation */907crypto_shash_init(&ctx->sdesc->shash);908crypto_shash_update(&ctx->sdesc->shash, opad, bs);909crypto_shash_export(&ctx->sdesc->shash, opad);910ret = swap_pad(ctx->mac_type, opad);911912calc_fail:913return ret;914}915916static int otx2_cpt_aead_cbc_aes_sha_setkey(struct crypto_aead *cipher,917const unsigned char *key,918unsigned int keylen)919{920struct crypto_authenc_keys authenc_keys;921922return crypto_authenc_extractkeys(&authenc_keys, key, keylen) ?:923aead_hmac_init(cipher, &authenc_keys);924}925926static int otx2_cpt_aead_ecb_null_sha_setkey(struct crypto_aead *cipher,927const unsigned char *key,928unsigned int keylen)929{930return otx2_cpt_aead_cbc_aes_sha_setkey(cipher, key, keylen);931}932933static int otx2_cpt_aead_gcm_aes_setkey(struct crypto_aead *cipher,934const unsigned char *key,935unsigned int keylen)936{937struct otx2_cpt_aead_ctx *ctx = crypto_aead_ctx_dma(cipher);938939/*940* For aes gcm we expect to get encryption key (16, 24, 32 bytes)941* and salt (4 bytes)942*/943switch (keylen) {944case AES_KEYSIZE_128 + AES_GCM_SALT_SIZE:945ctx->key_type = OTX2_CPT_AES_128_BIT;946ctx->enc_key_len = AES_KEYSIZE_128;947break;948case AES_KEYSIZE_192 + AES_GCM_SALT_SIZE:949ctx->key_type = OTX2_CPT_AES_192_BIT;950ctx->enc_key_len = AES_KEYSIZE_192;951break;952case AES_KEYSIZE_256 + AES_GCM_SALT_SIZE:953ctx->key_type = OTX2_CPT_AES_256_BIT;954ctx->enc_key_len = AES_KEYSIZE_256;955break;956default:957/* Invalid key and salt length */958return -EINVAL;959}960961/* Store encryption key and salt */962memcpy(ctx->key, key, keylen);963964return crypto_aead_setkey(ctx->fbk_cipher, key, keylen);965}966967static inline int create_aead_ctx_hdr(struct aead_request *req, u32 enc,968u32 *argcnt)969{970struct otx2_cpt_req_ctx *rctx = aead_request_ctx_dma(req);971struct crypto_aead *tfm = crypto_aead_reqtfm(req);972struct otx2_cpt_aead_ctx *ctx = crypto_aead_ctx_dma(tfm);973struct otx2_cpt_req_info *req_info = &rctx->cpt_req;974struct otx2_cpt_fc_ctx *fctx = &rctx->fctx;975int mac_len = crypto_aead_authsize(tfm);976int ds;977978rctx->ctrl_word.e.enc_data_offset = req->assoclen;979980switch (ctx->cipher_type) {981case OTX2_CPT_AES_CBC:982if (req->assoclen > 248 || !IS_ALIGNED(req->assoclen, 8))983return -EINVAL;984985fctx->enc.enc_ctrl.e.iv_source = OTX2_CPT_FROM_CPTR;986/* Copy encryption key to context */987memcpy(fctx->enc.encr_key, ctx->key + ctx->auth_key_len,988ctx->enc_key_len);989/* Copy IV to context */990memcpy(fctx->enc.encr_iv, req->iv, crypto_aead_ivsize(tfm));991992ds = crypto_shash_digestsize(ctx->hashalg);993if (ctx->mac_type == OTX2_CPT_SHA384)994ds = SHA512_DIGEST_SIZE;995if (ctx->ipad)996memcpy(fctx->hmac.e.ipad, ctx->ipad, ds);997if (ctx->opad)998memcpy(fctx->hmac.e.opad, ctx->opad, ds);999break;10001001case OTX2_CPT_AES_GCM:1002if (crypto_ipsec_check_assoclen(req->assoclen))1003return -EINVAL;10041005fctx->enc.enc_ctrl.e.iv_source = OTX2_CPT_FROM_DPTR;1006/* Copy encryption key to context */1007memcpy(fctx->enc.encr_key, ctx->key, ctx->enc_key_len);1008/* Copy salt to context */1009memcpy(fctx->enc.encr_iv, ctx->key + ctx->enc_key_len,1010AES_GCM_SALT_SIZE);10111012rctx->ctrl_word.e.iv_offset = req->assoclen - AES_GCM_IV_OFFSET;1013break;10141015default:1016/* Unknown cipher type */1017return -EINVAL;1018}1019cpu_to_be64s(&rctx->ctrl_word.flags);10201021req_info->ctrl.s.dma_mode = OTX2_CPT_DMA_MODE_SG;1022req_info->ctrl.s.se_req = 1;1023req_info->req.opcode.s.major = OTX2_CPT_MAJOR_OP_FC |1024DMA_MODE_FLAG(OTX2_CPT_DMA_MODE_SG);1025if (enc) {1026req_info->req.opcode.s.minor = 2;1027req_info->req.param1 = req->cryptlen;1028req_info->req.param2 = req->cryptlen + req->assoclen;1029} else {1030req_info->req.opcode.s.minor = 3;1031req_info->req.param1 = req->cryptlen - mac_len;1032req_info->req.param2 = req->cryptlen + req->assoclen - mac_len;1033}10341035fctx->enc.enc_ctrl.e.enc_cipher = ctx->cipher_type;1036fctx->enc.enc_ctrl.e.aes_key = ctx->key_type;1037fctx->enc.enc_ctrl.e.mac_type = ctx->mac_type;1038fctx->enc.enc_ctrl.e.mac_len = mac_len;1039cpu_to_be64s(&fctx->enc.enc_ctrl.u);10401041/*1042* Storing Packet Data Information in offset1043* Control Word First 8 bytes1044*/1045req_info->in[*argcnt].vptr = (u8 *)&rctx->ctrl_word;1046req_info->in[*argcnt].size = CONTROL_WORD_LEN;1047req_info->req.dlen += CONTROL_WORD_LEN;1048++(*argcnt);10491050req_info->in[*argcnt].vptr = (u8 *)fctx;1051req_info->in[*argcnt].size = sizeof(struct otx2_cpt_fc_ctx);1052req_info->req.dlen += sizeof(struct otx2_cpt_fc_ctx);1053++(*argcnt);10541055return 0;1056}10571058static inline void create_hmac_ctx_hdr(struct aead_request *req, u32 *argcnt,1059u32 enc)1060{1061struct otx2_cpt_req_ctx *rctx = aead_request_ctx_dma(req);1062struct crypto_aead *tfm = crypto_aead_reqtfm(req);1063struct otx2_cpt_aead_ctx *ctx = crypto_aead_ctx_dma(tfm);1064struct otx2_cpt_req_info *req_info = &rctx->cpt_req;10651066req_info->ctrl.s.dma_mode = OTX2_CPT_DMA_MODE_SG;1067req_info->ctrl.s.se_req = 1;1068req_info->req.opcode.s.major = OTX2_CPT_MAJOR_OP_HMAC |1069DMA_MODE_FLAG(OTX2_CPT_DMA_MODE_SG);1070req_info->is_trunc_hmac = ctx->is_trunc_hmac;10711072req_info->req.opcode.s.minor = 0;1073req_info->req.param1 = ctx->auth_key_len;1074req_info->req.param2 = ctx->mac_type << 8;10751076/* Add authentication key */1077req_info->in[*argcnt].vptr = ctx->key;1078req_info->in[*argcnt].size = round_up(ctx->auth_key_len, 8);1079req_info->req.dlen += round_up(ctx->auth_key_len, 8);1080++(*argcnt);1081}10821083static inline int create_aead_input_list(struct aead_request *req, u32 enc)1084{1085struct otx2_cpt_req_ctx *rctx = aead_request_ctx_dma(req);1086struct otx2_cpt_req_info *req_info = &rctx->cpt_req;1087u32 inputlen = req->cryptlen + req->assoclen;1088u32 status, argcnt = 0;10891090status = create_aead_ctx_hdr(req, enc, &argcnt);1091if (status)1092return status;1093update_input_data(req_info, req->src, inputlen, &argcnt);1094req_info->in_cnt = argcnt;10951096return 0;1097}10981099static inline void create_aead_output_list(struct aead_request *req, u32 enc,1100u32 mac_len)1101{1102struct otx2_cpt_req_ctx *rctx = aead_request_ctx_dma(req);1103struct otx2_cpt_req_info *req_info = &rctx->cpt_req;1104u32 argcnt = 0, outputlen = 0;11051106if (enc)1107outputlen = req->cryptlen + req->assoclen + mac_len;1108else1109outputlen = req->cryptlen + req->assoclen - mac_len;11101111update_output_data(req_info, req->dst, 0, outputlen, &argcnt);1112req_info->out_cnt = argcnt;1113}11141115static inline void create_aead_null_input_list(struct aead_request *req,1116u32 enc, u32 mac_len)1117{1118struct otx2_cpt_req_ctx *rctx = aead_request_ctx_dma(req);1119struct otx2_cpt_req_info *req_info = &rctx->cpt_req;1120u32 inputlen, argcnt = 0;11211122if (enc)1123inputlen = req->cryptlen + req->assoclen;1124else1125inputlen = req->cryptlen + req->assoclen - mac_len;11261127create_hmac_ctx_hdr(req, &argcnt, enc);1128update_input_data(req_info, req->src, inputlen, &argcnt);1129req_info->in_cnt = argcnt;1130}11311132static inline int create_aead_null_output_list(struct aead_request *req,1133u32 enc, u32 mac_len)1134{1135struct otx2_cpt_req_ctx *rctx = aead_request_ctx_dma(req);1136struct otx2_cpt_req_info *req_info = &rctx->cpt_req;1137struct scatterlist *dst;1138u8 *ptr = NULL;1139int argcnt = 0, status, offset;1140u32 inputlen;11411142if (enc)1143inputlen = req->cryptlen + req->assoclen;1144else1145inputlen = req->cryptlen + req->assoclen - mac_len;11461147/*1148* If source and destination are different1149* then copy payload to destination1150*/1151if (req->src != req->dst) {11521153ptr = kmalloc(inputlen, (req_info->areq->flags &1154CRYPTO_TFM_REQ_MAY_SLEEP) ?1155GFP_KERNEL : GFP_ATOMIC);1156if (!ptr)1157return -ENOMEM;11581159status = sg_copy_to_buffer(req->src, sg_nents(req->src), ptr,1160inputlen);1161if (status != inputlen) {1162status = -EINVAL;1163goto error_free;1164}1165status = sg_copy_from_buffer(req->dst, sg_nents(req->dst), ptr,1166inputlen);1167if (status != inputlen) {1168status = -EINVAL;1169goto error_free;1170}1171kfree(ptr);1172}11731174if (enc) {1175/*1176* In an encryption scenario hmac needs1177* to be appended after payload1178*/1179dst = req->dst;1180offset = inputlen;1181while (offset >= dst->length) {1182offset -= dst->length;1183dst = sg_next(dst);1184if (!dst)1185return -ENOENT;1186}11871188update_output_data(req_info, dst, offset, mac_len, &argcnt);1189} else {1190/*1191* In a decryption scenario calculated hmac for received1192* payload needs to be compare with hmac received1193*/1194status = sg_copy_buffer(req->src, sg_nents(req->src),1195rctx->fctx.hmac.s.hmac_recv, mac_len,1196inputlen, true);1197if (status != mac_len)1198return -EINVAL;11991200req_info->out[argcnt].vptr = rctx->fctx.hmac.s.hmac_calc;1201req_info->out[argcnt].size = mac_len;1202argcnt++;1203}12041205req_info->out_cnt = argcnt;1206return 0;12071208error_free:1209kfree(ptr);1210return status;1211}12121213static int aead_do_fallback(struct aead_request *req, bool is_enc)1214{1215struct otx2_cpt_req_ctx *rctx = aead_request_ctx_dma(req);1216struct crypto_aead *aead = crypto_aead_reqtfm(req);1217struct otx2_cpt_aead_ctx *ctx = crypto_aead_ctx_dma(aead);1218int ret;12191220if (ctx->fbk_cipher) {1221/* Store the cipher tfm and then use the fallback tfm */1222aead_request_set_tfm(&rctx->fbk_req, ctx->fbk_cipher);1223aead_request_set_callback(&rctx->fbk_req, req->base.flags,1224req->base.complete, req->base.data);1225aead_request_set_crypt(&rctx->fbk_req, req->src,1226req->dst, req->cryptlen, req->iv);1227aead_request_set_ad(&rctx->fbk_req, req->assoclen);1228ret = is_enc ? crypto_aead_encrypt(&rctx->fbk_req) :1229crypto_aead_decrypt(&rctx->fbk_req);1230} else {1231ret = -EINVAL;1232}12331234return ret;1235}12361237static int cpt_aead_enc_dec(struct aead_request *req, u8 reg_type, u8 enc)1238{1239struct otx2_cpt_req_ctx *rctx = aead_request_ctx_dma(req);1240struct otx2_cpt_req_info *req_info = &rctx->cpt_req;1241struct crypto_aead *tfm = crypto_aead_reqtfm(req);1242struct otx2_cpt_aead_ctx *ctx = crypto_aead_ctx_dma(tfm);1243struct pci_dev *pdev;1244int status, cpu_num;12451246/* Clear control words */1247rctx->ctrl_word.flags = 0;1248rctx->fctx.enc.enc_ctrl.u = 0;12491250req_info->callback = otx2_cpt_aead_callback;1251req_info->areq = &req->base;1252req_info->req_type = reg_type;1253req_info->is_enc = enc;1254req_info->is_trunc_hmac = false;12551256req_info->req.cptr = ctx->er_ctx.hw_ctx;1257req_info->req.cptr_dma = ctx->er_ctx.cptr_dma;12581259switch (reg_type) {1260case OTX2_CPT_AEAD_ENC_DEC_REQ:1261status = create_aead_input_list(req, enc);1262if (status)1263return status;1264create_aead_output_list(req, enc, crypto_aead_authsize(tfm));1265break;12661267case OTX2_CPT_AEAD_ENC_DEC_NULL_REQ:1268create_aead_null_input_list(req, enc,1269crypto_aead_authsize(tfm));1270status = create_aead_null_output_list(req, enc,1271crypto_aead_authsize(tfm));1272if (status)1273return status;1274break;12751276default:1277return -EINVAL;1278}1279if (!IS_ALIGNED(req_info->req.param1, ctx->enc_align_len))1280return -EINVAL;12811282if (!req_info->req.param2 ||1283(req_info->req.param1 > OTX2_CPT_MAX_REQ_SIZE) ||1284(req_info->req.param2 > OTX2_CPT_MAX_REQ_SIZE))1285return aead_do_fallback(req, enc);12861287status = get_se_device(&pdev, &cpu_num);1288if (status)1289return status;12901291req_info->ctrl.s.grp = otx2_cpt_get_eng_grp_num(pdev,1292OTX2_CPT_SE_TYPES);12931294/*1295* We perform an asynchronous send and once1296* the request is completed the driver would1297* intimate through registered call back functions1298*/1299return otx2_cpt_do_request(pdev, req_info, cpu_num);1300}13011302static int otx2_cpt_aead_encrypt(struct aead_request *req)1303{1304return cpt_aead_enc_dec(req, OTX2_CPT_AEAD_ENC_DEC_REQ, true);1305}13061307static int otx2_cpt_aead_decrypt(struct aead_request *req)1308{1309return cpt_aead_enc_dec(req, OTX2_CPT_AEAD_ENC_DEC_REQ, false);1310}13111312static int otx2_cpt_aead_null_encrypt(struct aead_request *req)1313{1314return cpt_aead_enc_dec(req, OTX2_CPT_AEAD_ENC_DEC_NULL_REQ, true);1315}13161317static int otx2_cpt_aead_null_decrypt(struct aead_request *req)1318{1319return cpt_aead_enc_dec(req, OTX2_CPT_AEAD_ENC_DEC_NULL_REQ, false);1320}13211322static struct skcipher_alg otx2_cpt_skciphers[] = { {1323.base.cra_name = "xts(aes)",1324.base.cra_driver_name = "cpt_xts_aes",1325.base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK,1326.base.cra_blocksize = AES_BLOCK_SIZE,1327.base.cra_ctxsize = sizeof(struct otx2_cpt_enc_ctx),1328.base.cra_alignmask = 7,1329.base.cra_priority = 4001,1330.base.cra_module = THIS_MODULE,13311332.init = otx2_cpt_enc_dec_init,1333.exit = otx2_cpt_skcipher_exit,1334.ivsize = AES_BLOCK_SIZE,1335.min_keysize = 2 * AES_MIN_KEY_SIZE,1336.max_keysize = 2 * AES_MAX_KEY_SIZE,1337.setkey = otx2_cpt_skcipher_xts_setkey,1338.encrypt = otx2_cpt_skcipher_encrypt,1339.decrypt = otx2_cpt_skcipher_decrypt,1340}, {1341.base.cra_name = "cbc(aes)",1342.base.cra_driver_name = "cpt_cbc_aes",1343.base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK,1344.base.cra_blocksize = AES_BLOCK_SIZE,1345.base.cra_ctxsize = sizeof(struct otx2_cpt_enc_ctx),1346.base.cra_alignmask = 7,1347.base.cra_priority = 4001,1348.base.cra_module = THIS_MODULE,13491350.init = otx2_cpt_enc_dec_init,1351.exit = otx2_cpt_skcipher_exit,1352.ivsize = AES_BLOCK_SIZE,1353.min_keysize = AES_MIN_KEY_SIZE,1354.max_keysize = AES_MAX_KEY_SIZE,1355.setkey = otx2_cpt_skcipher_cbc_aes_setkey,1356.encrypt = otx2_cpt_skcipher_encrypt,1357.decrypt = otx2_cpt_skcipher_decrypt,1358}, {1359.base.cra_name = "ecb(aes)",1360.base.cra_driver_name = "cpt_ecb_aes",1361.base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK,1362.base.cra_blocksize = AES_BLOCK_SIZE,1363.base.cra_ctxsize = sizeof(struct otx2_cpt_enc_ctx),1364.base.cra_alignmask = 7,1365.base.cra_priority = 4001,1366.base.cra_module = THIS_MODULE,13671368.init = otx2_cpt_enc_dec_init,1369.exit = otx2_cpt_skcipher_exit,1370.ivsize = 0,1371.min_keysize = AES_MIN_KEY_SIZE,1372.max_keysize = AES_MAX_KEY_SIZE,1373.setkey = otx2_cpt_skcipher_ecb_aes_setkey,1374.encrypt = otx2_cpt_skcipher_encrypt,1375.decrypt = otx2_cpt_skcipher_decrypt,1376}, {1377.base.cra_name = "cbc(des3_ede)",1378.base.cra_driver_name = "cpt_cbc_des3_ede",1379.base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK,1380.base.cra_blocksize = DES3_EDE_BLOCK_SIZE,1381.base.cra_ctxsize = sizeof(struct otx2_cpt_enc_ctx),1382.base.cra_alignmask = 7,1383.base.cra_priority = 4001,1384.base.cra_module = THIS_MODULE,13851386.init = otx2_cpt_enc_dec_init,1387.exit = otx2_cpt_skcipher_exit,1388.min_keysize = DES3_EDE_KEY_SIZE,1389.max_keysize = DES3_EDE_KEY_SIZE,1390.ivsize = DES_BLOCK_SIZE,1391.setkey = otx2_cpt_skcipher_cbc_des3_setkey,1392.encrypt = otx2_cpt_skcipher_encrypt,1393.decrypt = otx2_cpt_skcipher_decrypt,1394}, {1395.base.cra_name = "ecb(des3_ede)",1396.base.cra_driver_name = "cpt_ecb_des3_ede",1397.base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK,1398.base.cra_blocksize = DES3_EDE_BLOCK_SIZE,1399.base.cra_ctxsize = sizeof(struct otx2_cpt_enc_ctx),1400.base.cra_alignmask = 7,1401.base.cra_priority = 4001,1402.base.cra_module = THIS_MODULE,14031404.init = otx2_cpt_enc_dec_init,1405.exit = otx2_cpt_skcipher_exit,1406.min_keysize = DES3_EDE_KEY_SIZE,1407.max_keysize = DES3_EDE_KEY_SIZE,1408.ivsize = 0,1409.setkey = otx2_cpt_skcipher_ecb_des3_setkey,1410.encrypt = otx2_cpt_skcipher_encrypt,1411.decrypt = otx2_cpt_skcipher_decrypt,1412} };14131414static struct aead_alg otx2_cpt_aeads[] = { {1415.base = {1416.cra_name = "authenc(hmac(sha1),cbc(aes))",1417.cra_driver_name = "cpt_hmac_sha1_cbc_aes",1418.cra_blocksize = AES_BLOCK_SIZE,1419.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK,1420.cra_ctxsize = sizeof(struct otx2_cpt_aead_ctx) + CRYPTO_DMA_PADDING,1421.cra_priority = 4001,1422.cra_alignmask = 0,1423.cra_module = THIS_MODULE,1424},1425.init = otx2_cpt_aead_cbc_aes_sha1_init,1426.exit = otx2_cpt_aead_exit,1427.setkey = otx2_cpt_aead_cbc_aes_sha_setkey,1428.setauthsize = otx2_cpt_aead_set_authsize,1429.encrypt = otx2_cpt_aead_encrypt,1430.decrypt = otx2_cpt_aead_decrypt,1431.ivsize = AES_BLOCK_SIZE,1432.maxauthsize = SHA1_DIGEST_SIZE,1433}, {1434.base = {1435.cra_name = "authenc(hmac(sha256),cbc(aes))",1436.cra_driver_name = "cpt_hmac_sha256_cbc_aes",1437.cra_blocksize = AES_BLOCK_SIZE,1438.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK,1439.cra_ctxsize = sizeof(struct otx2_cpt_aead_ctx) + CRYPTO_DMA_PADDING,1440.cra_priority = 4001,1441.cra_alignmask = 0,1442.cra_module = THIS_MODULE,1443},1444.init = otx2_cpt_aead_cbc_aes_sha256_init,1445.exit = otx2_cpt_aead_exit,1446.setkey = otx2_cpt_aead_cbc_aes_sha_setkey,1447.setauthsize = otx2_cpt_aead_set_authsize,1448.encrypt = otx2_cpt_aead_encrypt,1449.decrypt = otx2_cpt_aead_decrypt,1450.ivsize = AES_BLOCK_SIZE,1451.maxauthsize = SHA256_DIGEST_SIZE,1452}, {1453.base = {1454.cra_name = "authenc(hmac(sha384),cbc(aes))",1455.cra_driver_name = "cpt_hmac_sha384_cbc_aes",1456.cra_blocksize = AES_BLOCK_SIZE,1457.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK,1458.cra_ctxsize = sizeof(struct otx2_cpt_aead_ctx) + CRYPTO_DMA_PADDING,1459.cra_priority = 4001,1460.cra_alignmask = 0,1461.cra_module = THIS_MODULE,1462},1463.init = otx2_cpt_aead_cbc_aes_sha384_init,1464.exit = otx2_cpt_aead_exit,1465.setkey = otx2_cpt_aead_cbc_aes_sha_setkey,1466.setauthsize = otx2_cpt_aead_set_authsize,1467.encrypt = otx2_cpt_aead_encrypt,1468.decrypt = otx2_cpt_aead_decrypt,1469.ivsize = AES_BLOCK_SIZE,1470.maxauthsize = SHA384_DIGEST_SIZE,1471}, {1472.base = {1473.cra_name = "authenc(hmac(sha512),cbc(aes))",1474.cra_driver_name = "cpt_hmac_sha512_cbc_aes",1475.cra_blocksize = AES_BLOCK_SIZE,1476.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK,1477.cra_ctxsize = sizeof(struct otx2_cpt_aead_ctx) + CRYPTO_DMA_PADDING,1478.cra_priority = 4001,1479.cra_alignmask = 0,1480.cra_module = THIS_MODULE,1481},1482.init = otx2_cpt_aead_cbc_aes_sha512_init,1483.exit = otx2_cpt_aead_exit,1484.setkey = otx2_cpt_aead_cbc_aes_sha_setkey,1485.setauthsize = otx2_cpt_aead_set_authsize,1486.encrypt = otx2_cpt_aead_encrypt,1487.decrypt = otx2_cpt_aead_decrypt,1488.ivsize = AES_BLOCK_SIZE,1489.maxauthsize = SHA512_DIGEST_SIZE,1490}, {1491.base = {1492.cra_name = "authenc(hmac(sha1),ecb(cipher_null))",1493.cra_driver_name = "cpt_hmac_sha1_ecb_null",1494.cra_blocksize = 1,1495.cra_flags = CRYPTO_ALG_ASYNC,1496.cra_ctxsize = sizeof(struct otx2_cpt_aead_ctx) + CRYPTO_DMA_PADDING,1497.cra_priority = 4001,1498.cra_alignmask = 0,1499.cra_module = THIS_MODULE,1500},1501.init = otx2_cpt_aead_ecb_null_sha1_init,1502.exit = otx2_cpt_aead_exit,1503.setkey = otx2_cpt_aead_ecb_null_sha_setkey,1504.setauthsize = otx2_cpt_aead_null_set_authsize,1505.encrypt = otx2_cpt_aead_null_encrypt,1506.decrypt = otx2_cpt_aead_null_decrypt,1507.ivsize = 0,1508.maxauthsize = SHA1_DIGEST_SIZE,1509}, {1510.base = {1511.cra_name = "authenc(hmac(sha256),ecb(cipher_null))",1512.cra_driver_name = "cpt_hmac_sha256_ecb_null",1513.cra_blocksize = 1,1514.cra_flags = CRYPTO_ALG_ASYNC,1515.cra_ctxsize = sizeof(struct otx2_cpt_aead_ctx) + CRYPTO_DMA_PADDING,1516.cra_priority = 4001,1517.cra_alignmask = 0,1518.cra_module = THIS_MODULE,1519},1520.init = otx2_cpt_aead_ecb_null_sha256_init,1521.exit = otx2_cpt_aead_exit,1522.setkey = otx2_cpt_aead_ecb_null_sha_setkey,1523.setauthsize = otx2_cpt_aead_null_set_authsize,1524.encrypt = otx2_cpt_aead_null_encrypt,1525.decrypt = otx2_cpt_aead_null_decrypt,1526.ivsize = 0,1527.maxauthsize = SHA256_DIGEST_SIZE,1528}, {1529.base = {1530.cra_name = "authenc(hmac(sha384),ecb(cipher_null))",1531.cra_driver_name = "cpt_hmac_sha384_ecb_null",1532.cra_blocksize = 1,1533.cra_flags = CRYPTO_ALG_ASYNC,1534.cra_ctxsize = sizeof(struct otx2_cpt_aead_ctx) + CRYPTO_DMA_PADDING,1535.cra_priority = 4001,1536.cra_alignmask = 0,1537.cra_module = THIS_MODULE,1538},1539.init = otx2_cpt_aead_ecb_null_sha384_init,1540.exit = otx2_cpt_aead_exit,1541.setkey = otx2_cpt_aead_ecb_null_sha_setkey,1542.setauthsize = otx2_cpt_aead_null_set_authsize,1543.encrypt = otx2_cpt_aead_null_encrypt,1544.decrypt = otx2_cpt_aead_null_decrypt,1545.ivsize = 0,1546.maxauthsize = SHA384_DIGEST_SIZE,1547}, {1548.base = {1549.cra_name = "authenc(hmac(sha512),ecb(cipher_null))",1550.cra_driver_name = "cpt_hmac_sha512_ecb_null",1551.cra_blocksize = 1,1552.cra_flags = CRYPTO_ALG_ASYNC,1553.cra_ctxsize = sizeof(struct otx2_cpt_aead_ctx) + CRYPTO_DMA_PADDING,1554.cra_priority = 4001,1555.cra_alignmask = 0,1556.cra_module = THIS_MODULE,1557},1558.init = otx2_cpt_aead_ecb_null_sha512_init,1559.exit = otx2_cpt_aead_exit,1560.setkey = otx2_cpt_aead_ecb_null_sha_setkey,1561.setauthsize = otx2_cpt_aead_null_set_authsize,1562.encrypt = otx2_cpt_aead_null_encrypt,1563.decrypt = otx2_cpt_aead_null_decrypt,1564.ivsize = 0,1565.maxauthsize = SHA512_DIGEST_SIZE,1566}, {1567.base = {1568.cra_name = "rfc4106(gcm(aes))",1569.cra_driver_name = "cpt_rfc4106_gcm_aes",1570.cra_blocksize = 1,1571.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK,1572.cra_ctxsize = sizeof(struct otx2_cpt_aead_ctx) + CRYPTO_DMA_PADDING,1573.cra_priority = 4001,1574.cra_alignmask = 0,1575.cra_module = THIS_MODULE,1576},1577.init = otx2_cpt_aead_gcm_aes_init,1578.exit = otx2_cpt_aead_exit,1579.setkey = otx2_cpt_aead_gcm_aes_setkey,1580.setauthsize = otx2_cpt_aead_gcm_set_authsize,1581.encrypt = otx2_cpt_aead_encrypt,1582.decrypt = otx2_cpt_aead_decrypt,1583.ivsize = AES_GCM_IV_SIZE,1584.maxauthsize = AES_GCM_ICV_SIZE,1585} };15861587static inline int cpt_register_algs(void)1588{1589int i, err = 0;15901591for (i = 0; i < ARRAY_SIZE(otx2_cpt_skciphers); i++)1592otx2_cpt_skciphers[i].base.cra_flags &= ~CRYPTO_ALG_DEAD;15931594err = crypto_register_skciphers(otx2_cpt_skciphers,1595ARRAY_SIZE(otx2_cpt_skciphers));1596if (err)1597return err;15981599for (i = 0; i < ARRAY_SIZE(otx2_cpt_aeads); i++)1600otx2_cpt_aeads[i].base.cra_flags &= ~CRYPTO_ALG_DEAD;16011602err = crypto_register_aeads(otx2_cpt_aeads,1603ARRAY_SIZE(otx2_cpt_aeads));1604if (err) {1605crypto_unregister_skciphers(otx2_cpt_skciphers,1606ARRAY_SIZE(otx2_cpt_skciphers));1607return err;1608}16091610return 0;1611}16121613static inline void cpt_unregister_algs(void)1614{1615crypto_unregister_skciphers(otx2_cpt_skciphers,1616ARRAY_SIZE(otx2_cpt_skciphers));1617crypto_unregister_aeads(otx2_cpt_aeads, ARRAY_SIZE(otx2_cpt_aeads));1618}16191620static int compare_func(const void *lptr, const void *rptr)1621{1622const struct cpt_device_desc *ldesc = (struct cpt_device_desc *) lptr;1623const struct cpt_device_desc *rdesc = (struct cpt_device_desc *) rptr;16241625if (ldesc->dev->devfn < rdesc->dev->devfn)1626return -1;1627if (ldesc->dev->devfn > rdesc->dev->devfn)1628return 1;1629return 0;1630}16311632int otx2_cpt_crypto_init(struct pci_dev *pdev, struct module *mod,1633int num_queues, int num_devices)1634{1635int ret = 0;1636int count;16371638mutex_lock(&mutex);1639count = atomic_read(&se_devices.count);1640if (count >= OTX2_CPT_MAX_LFS_NUM) {1641dev_err(&pdev->dev, "No space to add a new device\n");1642ret = -ENOSPC;1643goto unlock;1644}1645se_devices.desc[count].num_queues = num_queues;1646se_devices.desc[count++].dev = pdev;1647atomic_inc(&se_devices.count);16481649if (atomic_read(&se_devices.count) == num_devices &&1650is_crypto_registered == false) {1651if (cpt_register_algs()) {1652dev_err(&pdev->dev,1653"Error in registering crypto algorithms\n");1654ret = -EINVAL;1655goto unlock;1656}1657try_module_get(mod);1658is_crypto_registered = true;1659}1660sort(se_devices.desc, count, sizeof(struct cpt_device_desc),1661compare_func, NULL);16621663unlock:1664mutex_unlock(&mutex);1665return ret;1666}16671668void otx2_cpt_crypto_exit(struct pci_dev *pdev, struct module *mod)1669{1670struct cpt_device_table *dev_tbl;1671bool dev_found = false;1672int i, j, count;16731674mutex_lock(&mutex);16751676dev_tbl = &se_devices;1677count = atomic_read(&dev_tbl->count);1678for (i = 0; i < count; i++) {1679if (pdev == dev_tbl->desc[i].dev) {1680for (j = i; j < count-1; j++)1681dev_tbl->desc[j] = dev_tbl->desc[j+1];1682dev_found = true;1683break;1684}1685}16861687if (!dev_found) {1688dev_err(&pdev->dev, "%s device not found\n", __func__);1689goto unlock;1690}1691if (atomic_dec_and_test(&se_devices.count)) {1692cpt_unregister_algs();1693module_put(mod);1694is_crypto_registered = false;1695}16961697unlock:1698mutex_unlock(&mutex);1699}170017011702