Path: blob/master/drivers/crypto/allwinner/sun8i-ce/sun8i-ce-cipher.c
26296 views
// SPDX-License-Identifier: GPL-2.01/*2* sun8i-ce-cipher.c - hardware cryptographic offloader for3* Allwinner H3/A64/H5/H2+/H6/R40 SoC4*5* Copyright (C) 2016-2019 Corentin LABBE <[email protected]>6*7* This file add support for AES cipher with 128,192,256 bits keysize in8* CBC and ECB mode.9*10* You could find a link for the datasheet in Documentation/arch/arm/sunxi.rst11*/1213#include <linux/bottom_half.h>14#include <linux/crypto.h>15#include <linux/dma-mapping.h>16#include <linux/io.h>17#include <linux/pm_runtime.h>18#include <crypto/scatterwalk.h>19#include <crypto/internal/des.h>20#include <crypto/internal/skcipher.h>21#include "sun8i-ce.h"2223static int sun8i_ce_cipher_need_fallback(struct skcipher_request *areq)24{25struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);26struct scatterlist *sg;27struct skcipher_alg *alg = crypto_skcipher_alg(tfm);28struct sun8i_ce_alg_template *algt;29unsigned int todo, len;3031algt = container_of(alg, struct sun8i_ce_alg_template, alg.skcipher.base);3233if (sg_nents_for_len(areq->src, areq->cryptlen) > MAX_SG ||34sg_nents_for_len(areq->dst, areq->cryptlen) > MAX_SG) {35if (IS_ENABLED(CONFIG_CRYPTO_DEV_SUN8I_CE_DEBUG))36algt->stat_fb_maxsg++;3738return true;39}4041if (areq->cryptlen < crypto_skcipher_ivsize(tfm)) {42if (IS_ENABLED(CONFIG_CRYPTO_DEV_SUN8I_CE_DEBUG))43algt->stat_fb_leniv++;4445return true;46}4748if (areq->cryptlen == 0) {49if (IS_ENABLED(CONFIG_CRYPTO_DEV_SUN8I_CE_DEBUG))50algt->stat_fb_len0++;5152return true;53}5455if (areq->cryptlen % 16) {56if (IS_ENABLED(CONFIG_CRYPTO_DEV_SUN8I_CE_DEBUG))57algt->stat_fb_mod16++;5859return true;60}6162len = areq->cryptlen;63sg = areq->src;64while (sg) {65if (!IS_ALIGNED(sg->offset, sizeof(u32))) {66if (IS_ENABLED(CONFIG_CRYPTO_DEV_SUN8I_CE_DEBUG))67algt->stat_fb_srcali++;6869return true;70}71todo = min(len, sg->length);72if (todo % 4) {73if (IS_ENABLED(CONFIG_CRYPTO_DEV_SUN8I_CE_DEBUG))74algt->stat_fb_srclen++;7576return true;77}78len -= todo;79sg = sg_next(sg);80}8182len = areq->cryptlen;83sg = areq->dst;84while (sg) {85if (!IS_ALIGNED(sg->offset, sizeof(u32))) {86if (IS_ENABLED(CONFIG_CRYPTO_DEV_SUN8I_CE_DEBUG))87algt->stat_fb_dstali++;8889return true;90}91todo = min(len, sg->length);92if (todo % 4) {93if (IS_ENABLED(CONFIG_CRYPTO_DEV_SUN8I_CE_DEBUG))94algt->stat_fb_dstlen++;9596return true;97}98len -= todo;99sg = sg_next(sg);100}101return false;102}103104static int sun8i_ce_cipher_fallback(struct skcipher_request *areq)105{106struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);107struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm);108struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq);109int err;110111if (IS_ENABLED(CONFIG_CRYPTO_DEV_SUN8I_CE_DEBUG)) {112struct skcipher_alg *alg = crypto_skcipher_alg(tfm);113struct sun8i_ce_alg_template *algt __maybe_unused;114115algt = container_of(alg, struct sun8i_ce_alg_template,116alg.skcipher.base);117118algt->stat_fb++;119}120121skcipher_request_set_tfm(&rctx->fallback_req, op->fallback_tfm);122skcipher_request_set_callback(&rctx->fallback_req, areq->base.flags,123areq->base.complete, areq->base.data);124skcipher_request_set_crypt(&rctx->fallback_req, areq->src, areq->dst,125areq->cryptlen, areq->iv);126if (rctx->op_dir & CE_DECRYPTION)127err = crypto_skcipher_decrypt(&rctx->fallback_req);128else129err = crypto_skcipher_encrypt(&rctx->fallback_req);130return err;131}132133static int sun8i_ce_cipher_prepare(struct crypto_engine *engine, void *async_req)134{135struct skcipher_request *areq = container_of(async_req, struct skcipher_request, base);136struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);137struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm);138struct sun8i_ce_dev *ce = op->ce;139struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq);140struct skcipher_alg *alg = crypto_skcipher_alg(tfm);141struct sun8i_ce_alg_template *algt;142struct sun8i_ce_flow *chan;143struct ce_task *cet;144struct scatterlist *sg;145unsigned int todo, len, offset, ivsize;146u32 common, sym;147int flow, i;148int nr_sgs = 0;149int nr_sgd = 0;150int err = 0;151int ns = sg_nents_for_len(areq->src, areq->cryptlen);152int nd = sg_nents_for_len(areq->dst, areq->cryptlen);153154algt = container_of(alg, struct sun8i_ce_alg_template, alg.skcipher.base);155156dev_dbg(ce->dev, "%s %s %u %x IV(%p %u) key=%u\n", __func__,157crypto_tfm_alg_name(areq->base.tfm),158areq->cryptlen,159rctx->op_dir, areq->iv, crypto_skcipher_ivsize(tfm),160op->keylen);161162if (IS_ENABLED(CONFIG_CRYPTO_DEV_SUN8I_CE_DEBUG))163algt->stat_req++;164165flow = rctx->flow;166167chan = &ce->chanlist[flow];168169cet = chan->tl;170memset(cet, 0, sizeof(struct ce_task));171172cet->t_id = cpu_to_le32(flow);173common = ce->variant->alg_cipher[algt->ce_algo_id];174common |= rctx->op_dir | CE_COMM_INT;175cet->t_common_ctl = cpu_to_le32(common);176/* CTS and recent CE (H6) need length in bytes, in word otherwise */177if (ce->variant->cipher_t_dlen_in_bytes)178cet->t_dlen = cpu_to_le32(areq->cryptlen);179else180cet->t_dlen = cpu_to_le32(areq->cryptlen / 4);181182sym = ce->variant->op_mode[algt->ce_blockmode];183len = op->keylen;184switch (len) {185case 128 / 8:186sym |= CE_AES_128BITS;187break;188case 192 / 8:189sym |= CE_AES_192BITS;190break;191case 256 / 8:192sym |= CE_AES_256BITS;193break;194}195196cet->t_sym_ctl = cpu_to_le32(sym);197cet->t_asym_ctl = 0;198199rctx->addr_key = dma_map_single(ce->dev, op->key, op->keylen, DMA_TO_DEVICE);200if (dma_mapping_error(ce->dev, rctx->addr_key)) {201dev_err(ce->dev, "Cannot DMA MAP KEY\n");202err = -EFAULT;203goto theend;204}205cet->t_key = desc_addr_val_le32(ce, rctx->addr_key);206207ivsize = crypto_skcipher_ivsize(tfm);208if (areq->iv && ivsize > 0) {209if (rctx->op_dir & CE_DECRYPTION) {210offset = areq->cryptlen - ivsize;211scatterwalk_map_and_copy(chan->backup_iv, areq->src,212offset, ivsize, 0);213}214memcpy(chan->bounce_iv, areq->iv, ivsize);215rctx->addr_iv = dma_map_single(ce->dev, chan->bounce_iv, ivsize,216DMA_TO_DEVICE);217if (dma_mapping_error(ce->dev, rctx->addr_iv)) {218dev_err(ce->dev, "Cannot DMA MAP IV\n");219err = -ENOMEM;220goto theend_iv;221}222cet->t_iv = desc_addr_val_le32(ce, rctx->addr_iv);223}224225if (areq->src == areq->dst) {226nr_sgs = dma_map_sg(ce->dev, areq->src, ns, DMA_BIDIRECTIONAL);227if (nr_sgs <= 0 || nr_sgs > MAX_SG) {228dev_err(ce->dev, "Invalid sg number %d\n", nr_sgs);229err = -EINVAL;230goto theend_iv;231}232nr_sgd = nr_sgs;233} else {234nr_sgs = dma_map_sg(ce->dev, areq->src, ns, DMA_TO_DEVICE);235if (nr_sgs <= 0 || nr_sgs > MAX_SG) {236dev_err(ce->dev, "Invalid sg number %d\n", nr_sgs);237err = -EINVAL;238goto theend_iv;239}240nr_sgd = dma_map_sg(ce->dev, areq->dst, nd, DMA_FROM_DEVICE);241if (nr_sgd <= 0 || nr_sgd > MAX_SG) {242dev_err(ce->dev, "Invalid sg number %d\n", nr_sgd);243err = -EINVAL;244goto theend_sgs;245}246}247248len = areq->cryptlen;249for_each_sg(areq->src, sg, nr_sgs, i) {250cet->t_src[i].addr = desc_addr_val_le32(ce, sg_dma_address(sg));251todo = min(len, sg_dma_len(sg));252cet->t_src[i].len = cpu_to_le32(todo / 4);253dev_dbg(ce->dev, "%s total=%u SG(%d %u off=%d) todo=%u\n", __func__,254areq->cryptlen, i, cet->t_src[i].len, sg->offset, todo);255len -= todo;256}257if (len > 0) {258dev_err(ce->dev, "remaining len %d\n", len);259err = -EINVAL;260goto theend_sgs;261}262263len = areq->cryptlen;264for_each_sg(areq->dst, sg, nr_sgd, i) {265cet->t_dst[i].addr = desc_addr_val_le32(ce, sg_dma_address(sg));266todo = min(len, sg_dma_len(sg));267cet->t_dst[i].len = cpu_to_le32(todo / 4);268dev_dbg(ce->dev, "%s total=%u SG(%d %u off=%d) todo=%u\n", __func__,269areq->cryptlen, i, cet->t_dst[i].len, sg->offset, todo);270len -= todo;271}272if (len > 0) {273dev_err(ce->dev, "remaining len %d\n", len);274err = -EINVAL;275goto theend_sgs;276}277278chan->timeout = areq->cryptlen;279rctx->nr_sgs = ns;280rctx->nr_sgd = nd;281return 0;282283theend_sgs:284if (areq->src == areq->dst) {285dma_unmap_sg(ce->dev, areq->src, ns, DMA_BIDIRECTIONAL);286} else {287if (nr_sgs > 0)288dma_unmap_sg(ce->dev, areq->src, ns, DMA_TO_DEVICE);289290if (nr_sgd > 0)291dma_unmap_sg(ce->dev, areq->dst, nd, DMA_FROM_DEVICE);292}293294theend_iv:295if (areq->iv && ivsize > 0) {296if (!dma_mapping_error(ce->dev, rctx->addr_iv))297dma_unmap_single(ce->dev, rctx->addr_iv, ivsize,298DMA_TO_DEVICE);299300offset = areq->cryptlen - ivsize;301if (rctx->op_dir & CE_DECRYPTION) {302memcpy(areq->iv, chan->backup_iv, ivsize);303memzero_explicit(chan->backup_iv, ivsize);304} else {305scatterwalk_map_and_copy(areq->iv, areq->dst, offset,306ivsize, 0);307}308memzero_explicit(chan->bounce_iv, ivsize);309}310311dma_unmap_single(ce->dev, rctx->addr_key, op->keylen, DMA_TO_DEVICE);312313theend:314return err;315}316317static void sun8i_ce_cipher_unprepare(struct crypto_engine *engine,318void *async_req)319{320struct skcipher_request *areq = container_of(async_req, struct skcipher_request, base);321struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);322struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm);323struct sun8i_ce_dev *ce = op->ce;324struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq);325struct sun8i_ce_flow *chan;326struct ce_task *cet;327unsigned int ivsize, offset;328int nr_sgs = rctx->nr_sgs;329int nr_sgd = rctx->nr_sgd;330int flow;331332flow = rctx->flow;333chan = &ce->chanlist[flow];334cet = chan->tl;335ivsize = crypto_skcipher_ivsize(tfm);336337if (areq->src == areq->dst) {338dma_unmap_sg(ce->dev, areq->src, nr_sgs, DMA_BIDIRECTIONAL);339} else {340if (nr_sgs > 0)341dma_unmap_sg(ce->dev, areq->src, nr_sgs, DMA_TO_DEVICE);342dma_unmap_sg(ce->dev, areq->dst, nr_sgd, DMA_FROM_DEVICE);343}344345if (areq->iv && ivsize > 0) {346if (cet->t_iv)347dma_unmap_single(ce->dev, rctx->addr_iv, ivsize,348DMA_TO_DEVICE);349offset = areq->cryptlen - ivsize;350if (rctx->op_dir & CE_DECRYPTION) {351memcpy(areq->iv, chan->backup_iv, ivsize);352memzero_explicit(chan->backup_iv, ivsize);353} else {354scatterwalk_map_and_copy(areq->iv, areq->dst, offset,355ivsize, 0);356}357memzero_explicit(chan->bounce_iv, ivsize);358}359360dma_unmap_single(ce->dev, rctx->addr_key, op->keylen, DMA_TO_DEVICE);361}362363static void sun8i_ce_cipher_run(struct crypto_engine *engine, void *areq)364{365struct skcipher_request *breq = container_of(areq, struct skcipher_request, base);366struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(breq);367struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm);368struct sun8i_ce_dev *ce = op->ce;369struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(breq);370int flow, err;371372flow = rctx->flow;373err = sun8i_ce_run_task(ce, flow, crypto_tfm_alg_name(breq->base.tfm));374sun8i_ce_cipher_unprepare(engine, areq);375local_bh_disable();376crypto_finalize_skcipher_request(engine, breq, err);377local_bh_enable();378}379380int sun8i_ce_cipher_do_one(struct crypto_engine *engine, void *areq)381{382int err = sun8i_ce_cipher_prepare(engine, areq);383384if (err)385return err;386387sun8i_ce_cipher_run(engine, areq);388return 0;389}390391int sun8i_ce_skdecrypt(struct skcipher_request *areq)392{393struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);394struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm);395struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq);396struct crypto_engine *engine;397int e;398399rctx->op_dir = CE_DECRYPTION;400if (sun8i_ce_cipher_need_fallback(areq))401return sun8i_ce_cipher_fallback(areq);402403e = sun8i_ce_get_engine_number(op->ce);404rctx->flow = e;405engine = op->ce->chanlist[e].engine;406407return crypto_transfer_skcipher_request_to_engine(engine, areq);408}409410int sun8i_ce_skencrypt(struct skcipher_request *areq)411{412struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);413struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm);414struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq);415struct crypto_engine *engine;416int e;417418rctx->op_dir = CE_ENCRYPTION;419if (sun8i_ce_cipher_need_fallback(areq))420return sun8i_ce_cipher_fallback(areq);421422e = sun8i_ce_get_engine_number(op->ce);423rctx->flow = e;424engine = op->ce->chanlist[e].engine;425426return crypto_transfer_skcipher_request_to_engine(engine, areq);427}428429int sun8i_ce_cipher_init(struct crypto_tfm *tfm)430{431struct sun8i_cipher_tfm_ctx *op = crypto_tfm_ctx(tfm);432struct sun8i_ce_alg_template *algt;433const char *name = crypto_tfm_alg_name(tfm);434struct crypto_skcipher *sktfm = __crypto_skcipher_cast(tfm);435struct skcipher_alg *alg = crypto_skcipher_alg(sktfm);436int err;437438memset(op, 0, sizeof(struct sun8i_cipher_tfm_ctx));439440algt = container_of(alg, struct sun8i_ce_alg_template, alg.skcipher.base);441op->ce = algt->ce;442443op->fallback_tfm = crypto_alloc_skcipher(name, 0, CRYPTO_ALG_NEED_FALLBACK);444if (IS_ERR(op->fallback_tfm)) {445dev_err(op->ce->dev, "ERROR: Cannot allocate fallback for %s %ld\n",446name, PTR_ERR(op->fallback_tfm));447return PTR_ERR(op->fallback_tfm);448}449450crypto_skcipher_set_reqsize(sktfm, sizeof(struct sun8i_cipher_req_ctx) +451crypto_skcipher_reqsize(op->fallback_tfm));452453if (IS_ENABLED(CONFIG_CRYPTO_DEV_SUN8I_CE_DEBUG))454memcpy(algt->fbname,455crypto_skcipher_driver_name(op->fallback_tfm),456CRYPTO_MAX_ALG_NAME);457458err = pm_runtime_resume_and_get(op->ce->dev);459if (err < 0)460goto error_pm;461462return 0;463error_pm:464crypto_free_skcipher(op->fallback_tfm);465return err;466}467468void sun8i_ce_cipher_exit(struct crypto_tfm *tfm)469{470struct sun8i_cipher_tfm_ctx *op = crypto_tfm_ctx(tfm);471472kfree_sensitive(op->key);473crypto_free_skcipher(op->fallback_tfm);474pm_runtime_put_sync_suspend(op->ce->dev);475}476477int sun8i_ce_aes_setkey(struct crypto_skcipher *tfm, const u8 *key,478unsigned int keylen)479{480struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm);481struct sun8i_ce_dev *ce = op->ce;482483switch (keylen) {484case 128 / 8:485break;486case 192 / 8:487break;488case 256 / 8:489break;490default:491dev_dbg(ce->dev, "ERROR: Invalid keylen %u\n", keylen);492return -EINVAL;493}494kfree_sensitive(op->key);495op->keylen = keylen;496op->key = kmemdup(key, keylen, GFP_KERNEL | GFP_DMA);497if (!op->key)498return -ENOMEM;499500crypto_skcipher_clear_flags(op->fallback_tfm, CRYPTO_TFM_REQ_MASK);501crypto_skcipher_set_flags(op->fallback_tfm, tfm->base.crt_flags & CRYPTO_TFM_REQ_MASK);502503return crypto_skcipher_setkey(op->fallback_tfm, key, keylen);504}505506int sun8i_ce_des3_setkey(struct crypto_skcipher *tfm, const u8 *key,507unsigned int keylen)508{509struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm);510int err;511512err = verify_skcipher_des3_key(tfm, key);513if (err)514return err;515516kfree_sensitive(op->key);517op->keylen = keylen;518op->key = kmemdup(key, keylen, GFP_KERNEL | GFP_DMA);519if (!op->key)520return -ENOMEM;521522crypto_skcipher_clear_flags(op->fallback_tfm, CRYPTO_TFM_REQ_MASK);523crypto_skcipher_set_flags(op->fallback_tfm, tfm->base.crt_flags & CRYPTO_TFM_REQ_MASK);524525return crypto_skcipher_setkey(op->fallback_tfm, key, keylen);526}527528529