Path: blob/master/drivers/crypto/gemini/sl3516-ce-core.c
26282 views
// SPDX-License-Identifier: GPL-2.01/*2* sl3516-ce-core.c - hardware cryptographic offloader for Storlink SL3516 SoC3*4* Copyright (C) 2021 Corentin Labbe <[email protected]>5*6* Core file which registers crypto algorithms supported by the CryptoEngine7*/89#include <crypto/engine.h>10#include <crypto/internal/rng.h>11#include <crypto/internal/skcipher.h>12#include <linux/clk.h>13#include <linux/debugfs.h>14#include <linux/dev_printk.h>15#include <linux/dma-mapping.h>16#include <linux/err.h>17#include <linux/interrupt.h>18#include <linux/io.h>19#include <linux/irq.h>20#include <linux/kernel.h>21#include <linux/module.h>22#include <linux/of.h>23#include <linux/platform_device.h>24#include <linux/pm_runtime.h>25#include <linux/reset.h>2627#include "sl3516-ce.h"2829static int sl3516_ce_desc_init(struct sl3516_ce_dev *ce)30{31const size_t sz = sizeof(struct descriptor) * MAXDESC;32int i;3334ce->tx = dma_alloc_coherent(ce->dev, sz, &ce->dtx, GFP_KERNEL);35if (!ce->tx)36return -ENOMEM;37ce->rx = dma_alloc_coherent(ce->dev, sz, &ce->drx, GFP_KERNEL);38if (!ce->rx)39goto err_rx;4041for (i = 0; i < MAXDESC; i++) {42ce->tx[i].frame_ctrl.bits.own = CE_CPU;43ce->tx[i].next_desc.next_descriptor = ce->dtx + (i + 1) * sizeof(struct descriptor);44}45ce->tx[MAXDESC - 1].next_desc.next_descriptor = ce->dtx;4647for (i = 0; i < MAXDESC; i++) {48ce->rx[i].frame_ctrl.bits.own = CE_CPU;49ce->rx[i].next_desc.next_descriptor = ce->drx + (i + 1) * sizeof(struct descriptor);50}51ce->rx[MAXDESC - 1].next_desc.next_descriptor = ce->drx;5253ce->pctrl = dma_alloc_coherent(ce->dev, sizeof(struct pkt_control_ecb),54&ce->dctrl, GFP_KERNEL);55if (!ce->pctrl)56goto err_pctrl;5758return 0;59err_pctrl:60dma_free_coherent(ce->dev, sz, ce->rx, ce->drx);61err_rx:62dma_free_coherent(ce->dev, sz, ce->tx, ce->dtx);63return -ENOMEM;64}6566static void sl3516_ce_free_descs(struct sl3516_ce_dev *ce)67{68const size_t sz = sizeof(struct descriptor) * MAXDESC;6970dma_free_coherent(ce->dev, sz, ce->tx, ce->dtx);71dma_free_coherent(ce->dev, sz, ce->rx, ce->drx);72dma_free_coherent(ce->dev, sizeof(struct pkt_control_ecb), ce->pctrl,73ce->dctrl);74}7576static void start_dma_tx(struct sl3516_ce_dev *ce)77{78u32 v;7980v = TXDMA_CTRL_START | TXDMA_CTRL_CHAIN_MODE | TXDMA_CTRL_CONTINUE | \81TXDMA_CTRL_INT_FAIL | TXDMA_CTRL_INT_PERR | TXDMA_CTRL_BURST_UNK;8283writel(v, ce->base + IPSEC_TXDMA_CTRL);84}8586static void start_dma_rx(struct sl3516_ce_dev *ce)87{88u32 v;8990v = RXDMA_CTRL_START | RXDMA_CTRL_CHAIN_MODE | RXDMA_CTRL_CONTINUE | \91RXDMA_CTRL_BURST_UNK | RXDMA_CTRL_INT_FINISH | \92RXDMA_CTRL_INT_FAIL | RXDMA_CTRL_INT_PERR | \93RXDMA_CTRL_INT_EOD | RXDMA_CTRL_INT_EOF;9495writel(v, ce->base + IPSEC_RXDMA_CTRL);96}9798static struct descriptor *get_desc_tx(struct sl3516_ce_dev *ce)99{100struct descriptor *dd;101102dd = &ce->tx[ce->ctx];103ce->ctx++;104if (ce->ctx >= MAXDESC)105ce->ctx = 0;106return dd;107}108109static struct descriptor *get_desc_rx(struct sl3516_ce_dev *ce)110{111struct descriptor *rdd;112113rdd = &ce->rx[ce->crx];114ce->crx++;115if (ce->crx >= MAXDESC)116ce->crx = 0;117return rdd;118}119120int sl3516_ce_run_task(struct sl3516_ce_dev *ce, struct sl3516_ce_cipher_req_ctx *rctx,121const char *name)122{123struct descriptor *dd, *rdd = NULL;124u32 v;125int i, err = 0;126127ce->stat_req++;128129reinit_completion(&ce->complete);130ce->status = 0;131132for (i = 0; i < rctx->nr_sgd; i++) {133dev_dbg(ce->dev, "%s handle DST SG %d/%d len=%d\n", __func__,134i, rctx->nr_sgd, rctx->t_dst[i].len);135rdd = get_desc_rx(ce);136rdd->buf_adr = rctx->t_dst[i].addr;137rdd->frame_ctrl.bits.buffer_size = rctx->t_dst[i].len;138rdd->frame_ctrl.bits.own = CE_DMA;139}140rdd->next_desc.bits.eofie = 1;141142for (i = 0; i < rctx->nr_sgs; i++) {143dev_dbg(ce->dev, "%s handle SRC SG %d/%d len=%d\n", __func__,144i, rctx->nr_sgs, rctx->t_src[i].len);145rctx->h->algorithm_len = rctx->t_src[i].len;146147dd = get_desc_tx(ce);148dd->frame_ctrl.raw = 0;149dd->flag_status.raw = 0;150dd->frame_ctrl.bits.buffer_size = rctx->pctrllen;151dd->buf_adr = ce->dctrl;152dd->flag_status.tx_flag.tqflag = rctx->tqflag;153dd->next_desc.bits.eofie = 0;154dd->next_desc.bits.dec = 0;155dd->next_desc.bits.sof_eof = DESC_FIRST | DESC_LAST;156dd->frame_ctrl.bits.own = CE_DMA;157158dd = get_desc_tx(ce);159dd->frame_ctrl.raw = 0;160dd->flag_status.raw = 0;161dd->frame_ctrl.bits.buffer_size = rctx->t_src[i].len;162dd->buf_adr = rctx->t_src[i].addr;163dd->flag_status.tx_flag.tqflag = 0;164dd->next_desc.bits.eofie = 0;165dd->next_desc.bits.dec = 0;166dd->next_desc.bits.sof_eof = DESC_FIRST | DESC_LAST;167dd->frame_ctrl.bits.own = CE_DMA;168start_dma_tx(ce);169start_dma_rx(ce);170}171wait_for_completion_interruptible_timeout(&ce->complete,172msecs_to_jiffies(5000));173if (ce->status == 0) {174dev_err(ce->dev, "DMA timeout for %s\n", name);175err = -EFAULT;176}177v = readl(ce->base + IPSEC_STATUS_REG);178if (v & 0xFFF) {179dev_err(ce->dev, "IPSEC_STATUS_REG %x\n", v);180err = -EFAULT;181}182183return err;184}185186static irqreturn_t ce_irq_handler(int irq, void *data)187{188struct sl3516_ce_dev *ce = (struct sl3516_ce_dev *)data;189u32 v;190191ce->stat_irq++;192193v = readl(ce->base + IPSEC_DMA_STATUS);194writel(v, ce->base + IPSEC_DMA_STATUS);195196if (v & DMA_STATUS_TS_DERR)197dev_err(ce->dev, "AHB bus Error While Tx !!!\n");198if (v & DMA_STATUS_TS_PERR)199dev_err(ce->dev, "Tx Descriptor Protocol Error !!!\n");200if (v & DMA_STATUS_RS_DERR)201dev_err(ce->dev, "AHB bus Error While Rx !!!\n");202if (v & DMA_STATUS_RS_PERR)203dev_err(ce->dev, "Rx Descriptor Protocol Error !!!\n");204205if (v & DMA_STATUS_TS_EOFI)206ce->stat_irq_tx++;207if (v & DMA_STATUS_RS_EOFI) {208ce->status = 1;209complete(&ce->complete);210ce->stat_irq_rx++;211return IRQ_HANDLED;212}213214return IRQ_HANDLED;215}216217static struct sl3516_ce_alg_template ce_algs[] = {218{219.type = CRYPTO_ALG_TYPE_SKCIPHER,220.mode = ECB_AES,221.alg.skcipher.base = {222.base = {223.cra_name = "ecb(aes)",224.cra_driver_name = "ecb-aes-sl3516",225.cra_priority = 400,226.cra_blocksize = AES_BLOCK_SIZE,227.cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |228CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK,229.cra_ctxsize = sizeof(struct sl3516_ce_cipher_tfm_ctx),230.cra_module = THIS_MODULE,231.cra_alignmask = 0xf,232.cra_init = sl3516_ce_cipher_init,233.cra_exit = sl3516_ce_cipher_exit,234},235.min_keysize = AES_MIN_KEY_SIZE,236.max_keysize = AES_MAX_KEY_SIZE,237.setkey = sl3516_ce_aes_setkey,238.encrypt = sl3516_ce_skencrypt,239.decrypt = sl3516_ce_skdecrypt,240},241.alg.skcipher.op = {242.do_one_request = sl3516_ce_handle_cipher_request,243},244},245};246247static int sl3516_ce_debugfs_show(struct seq_file *seq, void *v)248{249struct sl3516_ce_dev *ce = seq->private;250unsigned int i;251252seq_printf(seq, "HWRNG %lu %lu\n",253ce->hwrng_stat_req, ce->hwrng_stat_bytes);254seq_printf(seq, "IRQ %lu\n", ce->stat_irq);255seq_printf(seq, "IRQ TX %lu\n", ce->stat_irq_tx);256seq_printf(seq, "IRQ RX %lu\n", ce->stat_irq_rx);257seq_printf(seq, "nreq %lu\n", ce->stat_req);258seq_printf(seq, "fallback SG count TX %lu\n", ce->fallback_sg_count_tx);259seq_printf(seq, "fallback SG count RX %lu\n", ce->fallback_sg_count_rx);260seq_printf(seq, "fallback modulo16 %lu\n", ce->fallback_mod16);261seq_printf(seq, "fallback align16 %lu\n", ce->fallback_align16);262seq_printf(seq, "fallback not same len %lu\n", ce->fallback_not_same_len);263264for (i = 0; i < ARRAY_SIZE(ce_algs); i++) {265if (!ce_algs[i].ce)266continue;267switch (ce_algs[i].type) {268case CRYPTO_ALG_TYPE_SKCIPHER:269seq_printf(seq, "%s %s reqs=%lu fallback=%lu\n",270ce_algs[i].alg.skcipher.base.base.cra_driver_name,271ce_algs[i].alg.skcipher.base.base.cra_name,272ce_algs[i].stat_req, ce_algs[i].stat_fb);273break;274}275}276return 0;277}278279DEFINE_SHOW_ATTRIBUTE(sl3516_ce_debugfs);280281static int sl3516_ce_register_algs(struct sl3516_ce_dev *ce)282{283int err;284unsigned int i;285286for (i = 0; i < ARRAY_SIZE(ce_algs); i++) {287ce_algs[i].ce = ce;288switch (ce_algs[i].type) {289case CRYPTO_ALG_TYPE_SKCIPHER:290dev_info(ce->dev, "DEBUG: Register %s\n",291ce_algs[i].alg.skcipher.base.base.cra_name);292err = crypto_engine_register_skcipher(&ce_algs[i].alg.skcipher);293if (err) {294dev_err(ce->dev, "Fail to register %s\n",295ce_algs[i].alg.skcipher.base.base.cra_name);296ce_algs[i].ce = NULL;297return err;298}299break;300default:301ce_algs[i].ce = NULL;302dev_err(ce->dev, "ERROR: tried to register an unknown algo\n");303}304}305return 0;306}307308static void sl3516_ce_unregister_algs(struct sl3516_ce_dev *ce)309{310unsigned int i;311312for (i = 0; i < ARRAY_SIZE(ce_algs); i++) {313if (!ce_algs[i].ce)314continue;315switch (ce_algs[i].type) {316case CRYPTO_ALG_TYPE_SKCIPHER:317dev_info(ce->dev, "Unregister %d %s\n", i,318ce_algs[i].alg.skcipher.base.base.cra_name);319crypto_engine_unregister_skcipher(&ce_algs[i].alg.skcipher);320break;321}322}323}324325static void sl3516_ce_start(struct sl3516_ce_dev *ce)326{327ce->ctx = 0;328ce->crx = 0;329writel(ce->dtx, ce->base + IPSEC_TXDMA_CURR_DESC);330writel(ce->drx, ce->base + IPSEC_RXDMA_CURR_DESC);331writel(0, ce->base + IPSEC_DMA_STATUS);332}333334/*335* Power management strategy: The device is suspended unless a TFM exists for336* one of the algorithms proposed by this driver.337*/338static int sl3516_ce_pm_suspend(struct device *dev)339{340struct sl3516_ce_dev *ce = dev_get_drvdata(dev);341342reset_control_assert(ce->reset);343clk_disable_unprepare(ce->clks);344return 0;345}346347static int sl3516_ce_pm_resume(struct device *dev)348{349struct sl3516_ce_dev *ce = dev_get_drvdata(dev);350int err;351352err = clk_prepare_enable(ce->clks);353if (err) {354dev_err(ce->dev, "Cannot prepare_enable\n");355goto error;356}357err = reset_control_deassert(ce->reset);358if (err) {359dev_err(ce->dev, "Cannot deassert reset control\n");360goto error;361}362363sl3516_ce_start(ce);364365return 0;366error:367sl3516_ce_pm_suspend(dev);368return err;369}370371static const struct dev_pm_ops sl3516_ce_pm_ops = {372SET_RUNTIME_PM_OPS(sl3516_ce_pm_suspend, sl3516_ce_pm_resume, NULL)373};374375static int sl3516_ce_pm_init(struct sl3516_ce_dev *ce)376{377int err;378379pm_runtime_use_autosuspend(ce->dev);380pm_runtime_set_autosuspend_delay(ce->dev, 2000);381382err = pm_runtime_set_suspended(ce->dev);383if (err)384return err;385pm_runtime_enable(ce->dev);386return err;387}388389static void sl3516_ce_pm_exit(struct sl3516_ce_dev *ce)390{391pm_runtime_disable(ce->dev);392}393394static int sl3516_ce_probe(struct platform_device *pdev)395{396struct sl3516_ce_dev *ce;397int err, irq;398u32 v;399400ce = devm_kzalloc(&pdev->dev, sizeof(*ce), GFP_KERNEL);401if (!ce)402return -ENOMEM;403404ce->dev = &pdev->dev;405platform_set_drvdata(pdev, ce);406407ce->base = devm_platform_ioremap_resource(pdev, 0);408if (IS_ERR(ce->base))409return PTR_ERR(ce->base);410411irq = platform_get_irq(pdev, 0);412if (irq < 0)413return irq;414415err = devm_request_irq(&pdev->dev, irq, ce_irq_handler, 0, "crypto", ce);416if (err) {417dev_err(ce->dev, "Cannot request Crypto Engine IRQ (err=%d)\n", err);418return err;419}420421ce->reset = devm_reset_control_get(&pdev->dev, NULL);422if (IS_ERR(ce->reset))423return dev_err_probe(&pdev->dev, PTR_ERR(ce->reset),424"No reset control found\n");425ce->clks = devm_clk_get(ce->dev, NULL);426if (IS_ERR(ce->clks)) {427err = PTR_ERR(ce->clks);428dev_err(ce->dev, "Cannot get clock err=%d\n", err);429return err;430}431432err = sl3516_ce_desc_init(ce);433if (err)434return err;435436err = sl3516_ce_pm_init(ce);437if (err)438goto error_pm;439440init_completion(&ce->complete);441442ce->engine = crypto_engine_alloc_init(ce->dev, true);443if (!ce->engine) {444dev_err(ce->dev, "Cannot allocate engine\n");445err = -ENOMEM;446goto error_engine;447}448449err = crypto_engine_start(ce->engine);450if (err) {451dev_err(ce->dev, "Cannot start engine\n");452goto error_engine;453}454455err = sl3516_ce_register_algs(ce);456if (err)457goto error_alg;458459err = sl3516_ce_rng_register(ce);460if (err)461goto error_rng;462463err = pm_runtime_resume_and_get(ce->dev);464if (err < 0)465goto error_pmuse;466467v = readl(ce->base + IPSEC_ID);468dev_info(ce->dev, "SL3516 dev %lx rev %lx\n",469v & GENMASK(31, 4),470v & GENMASK(3, 0));471v = readl(ce->base + IPSEC_DMA_DEVICE_ID);472dev_info(ce->dev, "SL3516 DMA dev %lx rev %lx\n",473v & GENMASK(15, 4),474v & GENMASK(3, 0));475476pm_runtime_put_sync(ce->dev);477478if (IS_ENABLED(CONFIG_CRYPTO_DEV_SL3516_DEBUG)) {479struct dentry *dbgfs_dir __maybe_unused;480struct dentry *dbgfs_stats __maybe_unused;481482/* Ignore error of debugfs */483dbgfs_dir = debugfs_create_dir("sl3516", NULL);484dbgfs_stats = debugfs_create_file("stats", 0444,485dbgfs_dir, ce,486&sl3516_ce_debugfs_fops);487#ifdef CONFIG_CRYPTO_DEV_SL3516_DEBUG488ce->dbgfs_dir = dbgfs_dir;489ce->dbgfs_stats = dbgfs_stats;490#endif491}492493return 0;494error_pmuse:495sl3516_ce_rng_unregister(ce);496error_rng:497sl3516_ce_unregister_algs(ce);498error_alg:499crypto_engine_exit(ce->engine);500error_engine:501sl3516_ce_pm_exit(ce);502error_pm:503sl3516_ce_free_descs(ce);504return err;505}506507static void sl3516_ce_remove(struct platform_device *pdev)508{509struct sl3516_ce_dev *ce = platform_get_drvdata(pdev);510511sl3516_ce_rng_unregister(ce);512sl3516_ce_unregister_algs(ce);513crypto_engine_exit(ce->engine);514sl3516_ce_pm_exit(ce);515sl3516_ce_free_descs(ce);516517#ifdef CONFIG_CRYPTO_DEV_SL3516_DEBUG518debugfs_remove_recursive(ce->dbgfs_dir);519#endif520}521522static const struct of_device_id sl3516_ce_crypto_of_match_table[] = {523{ .compatible = "cortina,sl3516-crypto"},524{}525};526MODULE_DEVICE_TABLE(of, sl3516_ce_crypto_of_match_table);527528static struct platform_driver sl3516_ce_driver = {529.probe = sl3516_ce_probe,530.remove = sl3516_ce_remove,531.driver = {532.name = "sl3516-crypto",533.pm = &sl3516_ce_pm_ops,534.of_match_table = sl3516_ce_crypto_of_match_table,535},536};537538module_platform_driver(sl3516_ce_driver);539540MODULE_DESCRIPTION("SL3516 cryptographic offloader");541MODULE_LICENSE("GPL");542MODULE_AUTHOR("Corentin Labbe <[email protected]>");543544545