Path: blob/master/drivers/crypto/gemini/sl3516-ce-cipher.c
26282 views
// SPDX-License-Identifier: GPL-2.01/*2* sl3516-ce-cipher.c - hardware cryptographic offloader for Storlink SL3516 SoC3*4* Copyright (C) 2021 Corentin LABBE <[email protected]>5*6* This file adds support for AES cipher with 128,192,256 bits keysize in7* ECB mode.8*/910#include <crypto/engine.h>11#include <crypto/internal/skcipher.h>12#include <crypto/scatterwalk.h>13#include <linux/dma-mapping.h>14#include <linux/delay.h>15#include <linux/err.h>16#include <linux/io.h>17#include <linux/kernel.h>18#include <linux/pm_runtime.h>19#include <linux/slab.h>20#include <linux/string.h>21#include "sl3516-ce.h"2223/* sl3516_ce_need_fallback - check if a request can be handled by the CE */24static bool sl3516_ce_need_fallback(struct skcipher_request *areq)25{26struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);27struct sl3516_ce_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm);28struct sl3516_ce_dev *ce = op->ce;29struct scatterlist *in_sg;30struct scatterlist *out_sg;31struct scatterlist *sg;3233if (areq->cryptlen == 0 || areq->cryptlen % 16) {34ce->fallback_mod16++;35return true;36}3738/*39* check if we have enough descriptors for TX40* Note: TX need one control desc for each SG41*/42if (sg_nents(areq->src) > MAXDESC / 2) {43ce->fallback_sg_count_tx++;44return true;45}46/* check if we have enough descriptors for RX */47if (sg_nents(areq->dst) > MAXDESC) {48ce->fallback_sg_count_rx++;49return true;50}5152sg = areq->src;53while (sg) {54if ((sg->length % 16) != 0) {55ce->fallback_mod16++;56return true;57}58if ((sg_dma_len(sg) % 16) != 0) {59ce->fallback_mod16++;60return true;61}62if (!IS_ALIGNED(sg->offset, 16)) {63ce->fallback_align16++;64return true;65}66sg = sg_next(sg);67}68sg = areq->dst;69while (sg) {70if ((sg->length % 16) != 0) {71ce->fallback_mod16++;72return true;73}74if ((sg_dma_len(sg) % 16) != 0) {75ce->fallback_mod16++;76return true;77}78if (!IS_ALIGNED(sg->offset, 16)) {79ce->fallback_align16++;80return true;81}82sg = sg_next(sg);83}8485/* need same numbers of SG (with same length) for source and destination */86in_sg = areq->src;87out_sg = areq->dst;88while (in_sg && out_sg) {89if (in_sg->length != out_sg->length) {90ce->fallback_not_same_len++;91return true;92}93in_sg = sg_next(in_sg);94out_sg = sg_next(out_sg);95}96if (in_sg || out_sg)97return true;9899return false;100}101102static int sl3516_ce_cipher_fallback(struct skcipher_request *areq)103{104struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);105struct sl3516_ce_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm);106struct sl3516_ce_cipher_req_ctx *rctx = skcipher_request_ctx(areq);107struct skcipher_alg *alg = crypto_skcipher_alg(tfm);108struct sl3516_ce_alg_template *algt;109int err;110111algt = container_of(alg, struct sl3516_ce_alg_template, alg.skcipher.base);112algt->stat_fb++;113114skcipher_request_set_tfm(&rctx->fallback_req, op->fallback_tfm);115skcipher_request_set_callback(&rctx->fallback_req, areq->base.flags,116areq->base.complete, areq->base.data);117skcipher_request_set_crypt(&rctx->fallback_req, areq->src, areq->dst,118areq->cryptlen, areq->iv);119if (rctx->op_dir == CE_DECRYPTION)120err = crypto_skcipher_decrypt(&rctx->fallback_req);121else122err = crypto_skcipher_encrypt(&rctx->fallback_req);123return err;124}125126static int sl3516_ce_cipher(struct skcipher_request *areq)127{128struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);129struct sl3516_ce_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm);130struct sl3516_ce_dev *ce = op->ce;131struct sl3516_ce_cipher_req_ctx *rctx = skcipher_request_ctx(areq);132struct skcipher_alg *alg = crypto_skcipher_alg(tfm);133struct sl3516_ce_alg_template *algt;134struct scatterlist *sg;135unsigned int todo, len;136struct pkt_control_ecb *ecb;137int nr_sgs = 0;138int nr_sgd = 0;139int err = 0;140int i;141142algt = container_of(alg, struct sl3516_ce_alg_template, alg.skcipher.base);143144dev_dbg(ce->dev, "%s %s %u %x IV(%p %u) key=%u\n", __func__,145crypto_tfm_alg_name(areq->base.tfm),146areq->cryptlen,147rctx->op_dir, areq->iv, crypto_skcipher_ivsize(tfm),148op->keylen);149150algt->stat_req++;151152if (areq->src == areq->dst) {153nr_sgs = dma_map_sg(ce->dev, areq->src, sg_nents(areq->src),154DMA_BIDIRECTIONAL);155if (nr_sgs <= 0 || nr_sgs > MAXDESC / 2) {156dev_err(ce->dev, "Invalid sg number %d\n", nr_sgs);157err = -EINVAL;158goto theend;159}160nr_sgd = nr_sgs;161} else {162nr_sgs = dma_map_sg(ce->dev, areq->src, sg_nents(areq->src),163DMA_TO_DEVICE);164if (nr_sgs <= 0 || nr_sgs > MAXDESC / 2) {165dev_err(ce->dev, "Invalid sg number %d\n", nr_sgs);166err = -EINVAL;167goto theend;168}169nr_sgd = dma_map_sg(ce->dev, areq->dst, sg_nents(areq->dst),170DMA_FROM_DEVICE);171if (nr_sgd <= 0 || nr_sgd > MAXDESC) {172dev_err(ce->dev, "Invalid sg number %d\n", nr_sgd);173err = -EINVAL;174goto theend_sgs;175}176}177178len = areq->cryptlen;179i = 0;180sg = areq->src;181while (i < nr_sgs && sg && len) {182if (sg_dma_len(sg) == 0)183goto sgs_next;184rctx->t_src[i].addr = sg_dma_address(sg);185todo = min(len, sg_dma_len(sg));186rctx->t_src[i].len = todo;187dev_dbg(ce->dev, "%s total=%u SGS(%d %u off=%d) todo=%u\n", __func__,188areq->cryptlen, i, rctx->t_src[i].len, sg->offset, todo);189len -= todo;190i++;191sgs_next:192sg = sg_next(sg);193}194if (len > 0) {195dev_err(ce->dev, "remaining len %d/%u nr_sgs=%d\n", len, areq->cryptlen, nr_sgs);196err = -EINVAL;197goto theend_sgs;198}199200len = areq->cryptlen;201i = 0;202sg = areq->dst;203while (i < nr_sgd && sg && len) {204if (sg_dma_len(sg) == 0)205goto sgd_next;206rctx->t_dst[i].addr = sg_dma_address(sg);207todo = min(len, sg_dma_len(sg));208rctx->t_dst[i].len = todo;209dev_dbg(ce->dev, "%s total=%u SGD(%d %u off=%d) todo=%u\n", __func__,210areq->cryptlen, i, rctx->t_dst[i].len, sg->offset, todo);211len -= todo;212i++;213214sgd_next:215sg = sg_next(sg);216}217if (len > 0) {218dev_err(ce->dev, "remaining len %d\n", len);219err = -EINVAL;220goto theend_sgs;221}222223switch (algt->mode) {224case ECB_AES:225rctx->pctrllen = sizeof(struct pkt_control_ecb);226ecb = (struct pkt_control_ecb *)ce->pctrl;227228rctx->tqflag = TQ0_TYPE_CTRL;229rctx->tqflag |= TQ1_CIPHER;230ecb->control.op_mode = rctx->op_dir;231ecb->control.cipher_algorithm = ECB_AES;232ecb->cipher.header_len = 0;233ecb->cipher.algorithm_len = areq->cryptlen;234cpu_to_be32_array((__be32 *)ecb->key, (u32 *)op->key, op->keylen / 4);235rctx->h = &ecb->cipher;236237rctx->tqflag |= TQ4_KEY0;238rctx->tqflag |= TQ5_KEY4;239rctx->tqflag |= TQ6_KEY6;240ecb->control.aesnk = op->keylen / 4;241break;242}243244rctx->nr_sgs = nr_sgs;245rctx->nr_sgd = nr_sgd;246err = sl3516_ce_run_task(ce, rctx, crypto_tfm_alg_name(areq->base.tfm));247248theend_sgs:249if (areq->src == areq->dst) {250dma_unmap_sg(ce->dev, areq->src, sg_nents(areq->src),251DMA_BIDIRECTIONAL);252} else {253dma_unmap_sg(ce->dev, areq->src, sg_nents(areq->src),254DMA_TO_DEVICE);255dma_unmap_sg(ce->dev, areq->dst, sg_nents(areq->dst),256DMA_FROM_DEVICE);257}258259theend:260261return err;262}263264int sl3516_ce_handle_cipher_request(struct crypto_engine *engine, void *areq)265{266int err;267struct skcipher_request *breq = container_of(areq, struct skcipher_request, base);268269err = sl3516_ce_cipher(breq);270local_bh_disable();271crypto_finalize_skcipher_request(engine, breq, err);272local_bh_enable();273274return 0;275}276277int sl3516_ce_skdecrypt(struct skcipher_request *areq)278{279struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);280struct sl3516_ce_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm);281struct sl3516_ce_cipher_req_ctx *rctx = skcipher_request_ctx(areq);282struct crypto_engine *engine;283284memset(rctx, 0, sizeof(struct sl3516_ce_cipher_req_ctx));285rctx->op_dir = CE_DECRYPTION;286287if (sl3516_ce_need_fallback(areq))288return sl3516_ce_cipher_fallback(areq);289290engine = op->ce->engine;291292return crypto_transfer_skcipher_request_to_engine(engine, areq);293}294295int sl3516_ce_skencrypt(struct skcipher_request *areq)296{297struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);298struct sl3516_ce_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm);299struct sl3516_ce_cipher_req_ctx *rctx = skcipher_request_ctx(areq);300struct crypto_engine *engine;301302memset(rctx, 0, sizeof(struct sl3516_ce_cipher_req_ctx));303rctx->op_dir = CE_ENCRYPTION;304305if (sl3516_ce_need_fallback(areq))306return sl3516_ce_cipher_fallback(areq);307308engine = op->ce->engine;309310return crypto_transfer_skcipher_request_to_engine(engine, areq);311}312313int sl3516_ce_cipher_init(struct crypto_tfm *tfm)314{315struct sl3516_ce_cipher_tfm_ctx *op = crypto_tfm_ctx(tfm);316struct sl3516_ce_alg_template *algt;317const char *name = crypto_tfm_alg_name(tfm);318struct crypto_skcipher *sktfm = __crypto_skcipher_cast(tfm);319struct skcipher_alg *alg = crypto_skcipher_alg(sktfm);320int err;321322memset(op, 0, sizeof(struct sl3516_ce_cipher_tfm_ctx));323324algt = container_of(alg, struct sl3516_ce_alg_template, alg.skcipher.base);325op->ce = algt->ce;326327op->fallback_tfm = crypto_alloc_skcipher(name, 0, CRYPTO_ALG_NEED_FALLBACK);328if (IS_ERR(op->fallback_tfm)) {329dev_err(op->ce->dev, "ERROR: Cannot allocate fallback for %s %ld\n",330name, PTR_ERR(op->fallback_tfm));331return PTR_ERR(op->fallback_tfm);332}333334crypto_skcipher_set_reqsize(sktfm, sizeof(struct sl3516_ce_cipher_req_ctx) +335crypto_skcipher_reqsize(op->fallback_tfm));336337dev_info(op->ce->dev, "Fallback for %s is %s\n",338crypto_tfm_alg_driver_name(&sktfm->base),339crypto_tfm_alg_driver_name(crypto_skcipher_tfm(op->fallback_tfm)));340341err = pm_runtime_get_sync(op->ce->dev);342if (err < 0)343goto error_pm;344345return 0;346error_pm:347pm_runtime_put_noidle(op->ce->dev);348crypto_free_skcipher(op->fallback_tfm);349return err;350}351352void sl3516_ce_cipher_exit(struct crypto_tfm *tfm)353{354struct sl3516_ce_cipher_tfm_ctx *op = crypto_tfm_ctx(tfm);355356kfree_sensitive(op->key);357crypto_free_skcipher(op->fallback_tfm);358pm_runtime_put_sync_suspend(op->ce->dev);359}360361int sl3516_ce_aes_setkey(struct crypto_skcipher *tfm, const u8 *key,362unsigned int keylen)363{364struct sl3516_ce_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm);365struct sl3516_ce_dev *ce = op->ce;366367switch (keylen) {368case 128 / 8:369break;370case 192 / 8:371break;372case 256 / 8:373break;374default:375dev_dbg(ce->dev, "ERROR: Invalid keylen %u\n", keylen);376return -EINVAL;377}378kfree_sensitive(op->key);379op->keylen = keylen;380op->key = kmemdup(key, keylen, GFP_KERNEL | GFP_DMA);381if (!op->key)382return -ENOMEM;383384crypto_skcipher_clear_flags(op->fallback_tfm, CRYPTO_TFM_REQ_MASK);385crypto_skcipher_set_flags(op->fallback_tfm, tfm->base.crt_flags & CRYPTO_TFM_REQ_MASK);386387return crypto_skcipher_setkey(op->fallback_tfm, key, keylen);388}389390391