Path: blob/master/drivers/crypto/allwinner/sun8i-ss/sun8i-ss-cipher.c
26292 views
// SPDX-License-Identifier: GPL-2.01/*2* sun8i-ss-cipher.c - hardware cryptographic offloader for3* Allwinner A80/A83T SoC4*5* Copyright (C) 2016-2019 Corentin LABBE <[email protected]>6*7* This file add support for AES cipher with 128,192,256 bits keysize in8* CBC and ECB mode.9*10* You could find a link for the datasheet in Documentation/arch/arm/sunxi.rst11*/1213#include <linux/bottom_half.h>14#include <linux/crypto.h>15#include <linux/dma-mapping.h>16#include <linux/io.h>17#include <linux/pm_runtime.h>18#include <crypto/scatterwalk.h>19#include <crypto/internal/skcipher.h>20#include "sun8i-ss.h"2122static bool sun8i_ss_need_fallback(struct skcipher_request *areq)23{24struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);25struct skcipher_alg *alg = crypto_skcipher_alg(tfm);26struct sun8i_ss_alg_template *algt = container_of(alg, struct sun8i_ss_alg_template, alg.skcipher.base);27struct scatterlist *in_sg = areq->src;28struct scatterlist *out_sg = areq->dst;29struct scatterlist *sg;30unsigned int todo, len;3132if (areq->cryptlen == 0 || areq->cryptlen % 16) {33algt->stat_fb_len++;34return true;35}3637if (sg_nents_for_len(areq->src, areq->cryptlen) > 8 ||38sg_nents_for_len(areq->dst, areq->cryptlen) > 8) {39algt->stat_fb_sgnum++;40return true;41}4243len = areq->cryptlen;44sg = areq->src;45while (sg) {46todo = min(len, sg->length);47if ((todo % 16) != 0) {48algt->stat_fb_sglen++;49return true;50}51if (!IS_ALIGNED(sg->offset, 16)) {52algt->stat_fb_align++;53return true;54}55len -= todo;56sg = sg_next(sg);57}58len = areq->cryptlen;59sg = areq->dst;60while (sg) {61todo = min(len, sg->length);62if ((todo % 16) != 0) {63algt->stat_fb_sglen++;64return true;65}66if (!IS_ALIGNED(sg->offset, 16)) {67algt->stat_fb_align++;68return true;69}70len -= todo;71sg = sg_next(sg);72}7374/* SS need same numbers of SG (with same length) for source and destination */75in_sg = areq->src;76out_sg = areq->dst;77while (in_sg && out_sg) {78if (in_sg->length != out_sg->length)79return true;80in_sg = sg_next(in_sg);81out_sg = sg_next(out_sg);82}83if (in_sg || out_sg)84return true;85return false;86}8788static int sun8i_ss_cipher_fallback(struct skcipher_request *areq)89{90struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);91struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm);92struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq);93int err;9495if (IS_ENABLED(CONFIG_CRYPTO_DEV_SUN8I_SS_DEBUG)) {96struct skcipher_alg *alg = crypto_skcipher_alg(tfm);97struct sun8i_ss_alg_template *algt __maybe_unused;9899algt = container_of(alg, struct sun8i_ss_alg_template,100alg.skcipher.base);101102#ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_DEBUG103algt->stat_fb++;104#endif105}106107skcipher_request_set_tfm(&rctx->fallback_req, op->fallback_tfm);108skcipher_request_set_callback(&rctx->fallback_req, areq->base.flags,109areq->base.complete, areq->base.data);110skcipher_request_set_crypt(&rctx->fallback_req, areq->src, areq->dst,111areq->cryptlen, areq->iv);112if (rctx->op_dir & SS_DECRYPTION)113err = crypto_skcipher_decrypt(&rctx->fallback_req);114else115err = crypto_skcipher_encrypt(&rctx->fallback_req);116return err;117}118119static int sun8i_ss_setup_ivs(struct skcipher_request *areq)120{121struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);122struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm);123struct sun8i_ss_dev *ss = op->ss;124struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq);125struct scatterlist *sg = areq->src;126unsigned int todo, offset;127unsigned int len = areq->cryptlen;128unsigned int ivsize = crypto_skcipher_ivsize(tfm);129struct sun8i_ss_flow *sf = &ss->flows[rctx->flow];130int i = 0;131dma_addr_t a;132int err;133134rctx->ivlen = ivsize;135if (rctx->op_dir & SS_DECRYPTION) {136offset = areq->cryptlen - ivsize;137scatterwalk_map_and_copy(sf->biv, areq->src, offset,138ivsize, 0);139}140141/* we need to copy all IVs from source in case DMA is bi-directionnal */142while (sg && len) {143if (sg->length == 0) {144sg = sg_next(sg);145continue;146}147if (i == 0)148memcpy(sf->iv[0], areq->iv, ivsize);149a = dma_map_single(ss->dev, sf->iv[i], ivsize, DMA_TO_DEVICE);150if (dma_mapping_error(ss->dev, a)) {151memzero_explicit(sf->iv[i], ivsize);152dev_err(ss->dev, "Cannot DMA MAP IV\n");153err = -EFAULT;154goto dma_iv_error;155}156rctx->p_iv[i] = a;157/* we need to setup all others IVs only in the decrypt way */158if (rctx->op_dir == SS_ENCRYPTION)159return 0;160todo = min(len, sg_dma_len(sg));161len -= todo;162i++;163if (i < MAX_SG) {164offset = sg->length - ivsize;165scatterwalk_map_and_copy(sf->iv[i], sg, offset, ivsize, 0);166}167rctx->niv = i;168sg = sg_next(sg);169}170171return 0;172dma_iv_error:173i--;174while (i >= 0) {175dma_unmap_single(ss->dev, rctx->p_iv[i], ivsize, DMA_TO_DEVICE);176memzero_explicit(sf->iv[i], ivsize);177i--;178}179return err;180}181182static int sun8i_ss_cipher(struct skcipher_request *areq)183{184struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);185struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm);186struct sun8i_ss_dev *ss = op->ss;187struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq);188struct skcipher_alg *alg = crypto_skcipher_alg(tfm);189struct sun8i_ss_alg_template *algt;190struct sun8i_ss_flow *sf = &ss->flows[rctx->flow];191struct scatterlist *sg;192unsigned int todo, len, offset, ivsize;193int nr_sgs = 0;194int nr_sgd = 0;195int err = 0;196int nsgs = sg_nents_for_len(areq->src, areq->cryptlen);197int nsgd = sg_nents_for_len(areq->dst, areq->cryptlen);198int i;199200algt = container_of(alg, struct sun8i_ss_alg_template, alg.skcipher.base);201202dev_dbg(ss->dev, "%s %s %u %x IV(%p %u) key=%u\n", __func__,203crypto_tfm_alg_name(areq->base.tfm),204areq->cryptlen,205rctx->op_dir, areq->iv, crypto_skcipher_ivsize(tfm),206op->keylen);207208#ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_DEBUG209algt->stat_req++;210#endif211212rctx->op_mode = ss->variant->op_mode[algt->ss_blockmode];213rctx->method = ss->variant->alg_cipher[algt->ss_algo_id];214rctx->keylen = op->keylen;215216rctx->p_key = dma_map_single(ss->dev, op->key, op->keylen, DMA_TO_DEVICE);217if (dma_mapping_error(ss->dev, rctx->p_key)) {218dev_err(ss->dev, "Cannot DMA MAP KEY\n");219err = -EFAULT;220goto theend;221}222223ivsize = crypto_skcipher_ivsize(tfm);224if (areq->iv && crypto_skcipher_ivsize(tfm) > 0) {225err = sun8i_ss_setup_ivs(areq);226if (err)227goto theend_key;228}229if (areq->src == areq->dst) {230nr_sgs = dma_map_sg(ss->dev, areq->src, nsgs, DMA_BIDIRECTIONAL);231if (nr_sgs <= 0 || nr_sgs > 8) {232dev_err(ss->dev, "Invalid sg number %d\n", nr_sgs);233err = -EINVAL;234goto theend_iv;235}236nr_sgd = nr_sgs;237} else {238nr_sgs = dma_map_sg(ss->dev, areq->src, nsgs, DMA_TO_DEVICE);239if (nr_sgs <= 0 || nr_sgs > 8) {240dev_err(ss->dev, "Invalid sg number %d\n", nr_sgs);241err = -EINVAL;242goto theend_iv;243}244nr_sgd = dma_map_sg(ss->dev, areq->dst, nsgd, DMA_FROM_DEVICE);245if (nr_sgd <= 0 || nr_sgd > 8) {246dev_err(ss->dev, "Invalid sg number %d\n", nr_sgd);247err = -EINVAL;248goto theend_sgs;249}250}251252len = areq->cryptlen;253i = 0;254sg = areq->src;255while (i < nr_sgs && sg && len) {256if (sg_dma_len(sg) == 0)257goto sgs_next;258rctx->t_src[i].addr = sg_dma_address(sg);259todo = min(len, sg_dma_len(sg));260rctx->t_src[i].len = todo / 4;261dev_dbg(ss->dev, "%s total=%u SGS(%d %u off=%d) todo=%u\n", __func__,262areq->cryptlen, i, rctx->t_src[i].len, sg->offset, todo);263len -= todo;264i++;265sgs_next:266sg = sg_next(sg);267}268if (len > 0) {269dev_err(ss->dev, "remaining len %d\n", len);270err = -EINVAL;271goto theend_sgs;272}273274len = areq->cryptlen;275i = 0;276sg = areq->dst;277while (i < nr_sgd && sg && len) {278if (sg_dma_len(sg) == 0)279goto sgd_next;280rctx->t_dst[i].addr = sg_dma_address(sg);281todo = min(len, sg_dma_len(sg));282rctx->t_dst[i].len = todo / 4;283dev_dbg(ss->dev, "%s total=%u SGD(%d %u off=%d) todo=%u\n", __func__,284areq->cryptlen, i, rctx->t_dst[i].len, sg->offset, todo);285len -= todo;286i++;287sgd_next:288sg = sg_next(sg);289}290if (len > 0) {291dev_err(ss->dev, "remaining len %d\n", len);292err = -EINVAL;293goto theend_sgs;294}295296err = sun8i_ss_run_task(ss, rctx, crypto_tfm_alg_name(areq->base.tfm));297298theend_sgs:299if (areq->src == areq->dst) {300dma_unmap_sg(ss->dev, areq->src, nsgs, DMA_BIDIRECTIONAL);301} else {302dma_unmap_sg(ss->dev, areq->src, nsgs, DMA_TO_DEVICE);303dma_unmap_sg(ss->dev, areq->dst, nsgd, DMA_FROM_DEVICE);304}305306theend_iv:307if (areq->iv && ivsize > 0) {308for (i = 0; i < rctx->niv; i++) {309dma_unmap_single(ss->dev, rctx->p_iv[i], ivsize, DMA_TO_DEVICE);310memzero_explicit(sf->iv[i], ivsize);311}312313offset = areq->cryptlen - ivsize;314if (rctx->op_dir & SS_DECRYPTION) {315memcpy(areq->iv, sf->biv, ivsize);316memzero_explicit(sf->biv, ivsize);317} else {318scatterwalk_map_and_copy(areq->iv, areq->dst, offset,319ivsize, 0);320}321}322323theend_key:324dma_unmap_single(ss->dev, rctx->p_key, op->keylen, DMA_TO_DEVICE);325326theend:327328return err;329}330331int sun8i_ss_handle_cipher_request(struct crypto_engine *engine, void *areq)332{333int err;334struct skcipher_request *breq = container_of(areq, struct skcipher_request, base);335336err = sun8i_ss_cipher(breq);337local_bh_disable();338crypto_finalize_skcipher_request(engine, breq, err);339local_bh_enable();340341return 0;342}343344int sun8i_ss_skdecrypt(struct skcipher_request *areq)345{346struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);347struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm);348struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq);349struct crypto_engine *engine;350int e;351352memset(rctx, 0, sizeof(struct sun8i_cipher_req_ctx));353rctx->op_dir = SS_DECRYPTION;354355if (sun8i_ss_need_fallback(areq))356return sun8i_ss_cipher_fallback(areq);357358e = sun8i_ss_get_engine_number(op->ss);359engine = op->ss->flows[e].engine;360rctx->flow = e;361362return crypto_transfer_skcipher_request_to_engine(engine, areq);363}364365int sun8i_ss_skencrypt(struct skcipher_request *areq)366{367struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);368struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm);369struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq);370struct crypto_engine *engine;371int e;372373memset(rctx, 0, sizeof(struct sun8i_cipher_req_ctx));374rctx->op_dir = SS_ENCRYPTION;375376if (sun8i_ss_need_fallback(areq))377return sun8i_ss_cipher_fallback(areq);378379e = sun8i_ss_get_engine_number(op->ss);380engine = op->ss->flows[e].engine;381rctx->flow = e;382383return crypto_transfer_skcipher_request_to_engine(engine, areq);384}385386int sun8i_ss_cipher_init(struct crypto_tfm *tfm)387{388struct sun8i_cipher_tfm_ctx *op = crypto_tfm_ctx(tfm);389struct sun8i_ss_alg_template *algt;390const char *name = crypto_tfm_alg_name(tfm);391struct crypto_skcipher *sktfm = __crypto_skcipher_cast(tfm);392struct skcipher_alg *alg = crypto_skcipher_alg(sktfm);393int err;394395memset(op, 0, sizeof(struct sun8i_cipher_tfm_ctx));396397algt = container_of(alg, struct sun8i_ss_alg_template, alg.skcipher.base);398op->ss = algt->ss;399400op->fallback_tfm = crypto_alloc_skcipher(name, 0, CRYPTO_ALG_NEED_FALLBACK);401if (IS_ERR(op->fallback_tfm)) {402dev_err(op->ss->dev, "ERROR: Cannot allocate fallback for %s %ld\n",403name, PTR_ERR(op->fallback_tfm));404return PTR_ERR(op->fallback_tfm);405}406407crypto_skcipher_set_reqsize(sktfm, sizeof(struct sun8i_cipher_req_ctx) +408crypto_skcipher_reqsize(op->fallback_tfm));409410memcpy(algt->fbname,411crypto_tfm_alg_driver_name(crypto_skcipher_tfm(op->fallback_tfm)),412CRYPTO_MAX_ALG_NAME);413414err = pm_runtime_resume_and_get(op->ss->dev);415if (err < 0) {416dev_err(op->ss->dev, "pm error %d\n", err);417goto error_pm;418}419420return 0;421error_pm:422crypto_free_skcipher(op->fallback_tfm);423return err;424}425426void sun8i_ss_cipher_exit(struct crypto_tfm *tfm)427{428struct sun8i_cipher_tfm_ctx *op = crypto_tfm_ctx(tfm);429430kfree_sensitive(op->key);431crypto_free_skcipher(op->fallback_tfm);432pm_runtime_put_sync(op->ss->dev);433}434435int sun8i_ss_aes_setkey(struct crypto_skcipher *tfm, const u8 *key,436unsigned int keylen)437{438struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm);439struct sun8i_ss_dev *ss = op->ss;440441switch (keylen) {442case 128 / 8:443break;444case 192 / 8:445break;446case 256 / 8:447break;448default:449dev_dbg(ss->dev, "ERROR: Invalid keylen %u\n", keylen);450return -EINVAL;451}452kfree_sensitive(op->key);453op->keylen = keylen;454op->key = kmemdup(key, keylen, GFP_KERNEL);455if (!op->key)456return -ENOMEM;457458crypto_skcipher_clear_flags(op->fallback_tfm, CRYPTO_TFM_REQ_MASK);459crypto_skcipher_set_flags(op->fallback_tfm, tfm->base.crt_flags & CRYPTO_TFM_REQ_MASK);460461return crypto_skcipher_setkey(op->fallback_tfm, key, keylen);462}463464int sun8i_ss_des3_setkey(struct crypto_skcipher *tfm, const u8 *key,465unsigned int keylen)466{467struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm);468struct sun8i_ss_dev *ss = op->ss;469470if (unlikely(keylen != 3 * DES_KEY_SIZE)) {471dev_dbg(ss->dev, "Invalid keylen %u\n", keylen);472return -EINVAL;473}474475kfree_sensitive(op->key);476op->keylen = keylen;477op->key = kmemdup(key, keylen, GFP_KERNEL);478if (!op->key)479return -ENOMEM;480481crypto_skcipher_clear_flags(op->fallback_tfm, CRYPTO_TFM_REQ_MASK);482crypto_skcipher_set_flags(op->fallback_tfm, tfm->base.crt_flags & CRYPTO_TFM_REQ_MASK);483484return crypto_skcipher_setkey(op->fallback_tfm, key, keylen);485}486487488