Path: blob/master/drivers/crypto/hisilicon/hpre/hpre_crypto.c
26289 views
// SPDX-License-Identifier: GPL-2.01/* Copyright (c) 2019 HiSilicon Limited. */2#include <crypto/akcipher.h>3#include <crypto/curve25519.h>4#include <crypto/dh.h>5#include <crypto/ecc_curve.h>6#include <crypto/ecdh.h>7#include <crypto/rng.h>8#include <crypto/internal/akcipher.h>9#include <crypto/internal/kpp.h>10#include <crypto/internal/rsa.h>11#include <crypto/kpp.h>12#include <crypto/scatterwalk.h>13#include <linux/dma-mapping.h>14#include <linux/fips.h>15#include <linux/module.h>16#include <linux/time.h>17#include "hpre.h"1819struct hpre_ctx;2021#define HPRE_CRYPTO_ALG_PRI 100022#define HPRE_ALIGN_SZ 6423#define HPRE_BITS_2_BYTES_SHIFT 324#define HPRE_RSA_512BITS_KSZ 6425#define HPRE_RSA_1536BITS_KSZ 19226#define HPRE_CRT_PRMS 527#define HPRE_CRT_Q 228#define HPRE_CRT_P 329#define HPRE_CRT_INV 430#define HPRE_DH_G_FLAG 0x0231#define HPRE_TRY_SEND_TIMES 10032#define HPRE_INVLD_REQ_ID (-1)3334#define HPRE_SQE_ALG_BITS 535#define HPRE_SQE_DONE_SHIFT 3036#define HPRE_DH_MAX_P_SZ 5123738#define HPRE_DFX_SEC_TO_US 100000039#define HPRE_DFX_US_TO_NS 10004041#define HPRE_ENABLE_HPCORE_SHIFT 74243/* due to nist p521 */44#define HPRE_ECC_MAX_KSZ 664546/* size in bytes of the n prime */47#define HPRE_ECC_NIST_P192_N_SIZE 2448#define HPRE_ECC_NIST_P256_N_SIZE 3249#define HPRE_ECC_NIST_P384_N_SIZE 485051/* size in bytes */52#define HPRE_ECC_HW256_KSZ_B 3253#define HPRE_ECC_HW384_KSZ_B 485455/* capability register mask of driver */56#define HPRE_DRV_RSA_MASK_CAP BIT(0)57#define HPRE_DRV_DH_MASK_CAP BIT(1)58#define HPRE_DRV_ECDH_MASK_CAP BIT(2)59#define HPRE_DRV_X25519_MASK_CAP BIT(5)6061static DEFINE_MUTEX(hpre_algs_lock);62static unsigned int hpre_available_devs;6364typedef void (*hpre_cb)(struct hpre_ctx *ctx, void *sqe);6566struct hpre_rsa_ctx {67/* low address: e--->n */68char *pubkey;69dma_addr_t dma_pubkey;7071/* low address: d--->n */72char *prikey;73dma_addr_t dma_prikey;7475/* low address: dq->dp->q->p->qinv */76char *crt_prikey;77dma_addr_t dma_crt_prikey;7879struct crypto_akcipher *soft_tfm;80};8182struct hpre_dh_ctx {83/*84* If base is g we compute the public key85* ya = g^xa mod p; [RFC2631 sec 2.1.1]86* else if base if the counterpart public key we87* compute the shared secret88* ZZ = yb^xa mod p; [RFC2631 sec 2.1.1]89* low address: d--->n, please refer to Hisilicon HPRE UM90*/91char *xa_p;92dma_addr_t dma_xa_p;9394char *g; /* m */95dma_addr_t dma_g;96};9798struct hpre_ecdh_ctx {99/* low address: p->a->k->b */100unsigned char *p;101dma_addr_t dma_p;102103/* low address: x->y */104unsigned char *g;105dma_addr_t dma_g;106};107108struct hpre_curve25519_ctx {109/* low address: p->a->k */110unsigned char *p;111dma_addr_t dma_p;112113/* gx coordinate */114unsigned char *g;115dma_addr_t dma_g;116};117118struct hpre_ctx {119struct hisi_qp *qp;120struct device *dev;121struct hpre_asym_request **req_list;122struct hpre *hpre;123spinlock_t req_lock;124unsigned int key_sz;125bool crt_g2_mode;126struct idr req_idr;127union {128struct hpre_rsa_ctx rsa;129struct hpre_dh_ctx dh;130struct hpre_ecdh_ctx ecdh;131struct hpre_curve25519_ctx curve25519;132};133/* for ecc algorithms */134unsigned int curve_id;135/* for high performance core */136u8 enable_hpcore;137};138139struct hpre_asym_request {140char *src;141char *dst;142struct hpre_sqe req;143struct hpre_ctx *ctx;144union {145struct akcipher_request *rsa;146struct kpp_request *dh;147struct kpp_request *ecdh;148struct kpp_request *curve25519;149} areq;150int err;151int req_id;152hpre_cb cb;153struct timespec64 req_time;154};155156static inline unsigned int hpre_align_sz(void)157{158return ((crypto_dma_align() - 1) | (HPRE_ALIGN_SZ - 1)) + 1;159}160161static inline unsigned int hpre_align_pd(void)162{163return (hpre_align_sz() - 1) & ~(crypto_tfm_ctx_alignment() - 1);164}165166static int hpre_alloc_req_id(struct hpre_ctx *ctx)167{168unsigned long flags;169int id;170171spin_lock_irqsave(&ctx->req_lock, flags);172id = idr_alloc(&ctx->req_idr, NULL, 0, ctx->qp->sq_depth, GFP_ATOMIC);173spin_unlock_irqrestore(&ctx->req_lock, flags);174175return id;176}177178static void hpre_free_req_id(struct hpre_ctx *ctx, int req_id)179{180unsigned long flags;181182spin_lock_irqsave(&ctx->req_lock, flags);183idr_remove(&ctx->req_idr, req_id);184spin_unlock_irqrestore(&ctx->req_lock, flags);185}186187static int hpre_add_req_to_ctx(struct hpre_asym_request *hpre_req)188{189struct hpre_ctx *ctx;190struct hpre_dfx *dfx;191int id;192193ctx = hpre_req->ctx;194id = hpre_alloc_req_id(ctx);195if (unlikely(id < 0))196return -EINVAL;197198ctx->req_list[id] = hpre_req;199hpre_req->req_id = id;200201dfx = ctx->hpre->debug.dfx;202if (atomic64_read(&dfx[HPRE_OVERTIME_THRHLD].value))203ktime_get_ts64(&hpre_req->req_time);204205return id;206}207208static void hpre_rm_req_from_ctx(struct hpre_asym_request *hpre_req)209{210struct hpre_ctx *ctx = hpre_req->ctx;211int id = hpre_req->req_id;212213if (hpre_req->req_id >= 0) {214hpre_req->req_id = HPRE_INVLD_REQ_ID;215ctx->req_list[id] = NULL;216hpre_free_req_id(ctx, id);217}218}219220static struct hisi_qp *hpre_get_qp_and_start(u8 type)221{222struct hisi_qp *qp;223int ret;224225qp = hpre_create_qp(type);226if (!qp) {227pr_err("Can not create hpre qp!\n");228return ERR_PTR(-ENODEV);229}230231ret = hisi_qm_start_qp(qp, 0);232if (ret < 0) {233hisi_qm_free_qps(&qp, 1);234pci_err(qp->qm->pdev, "Can not start qp!\n");235return ERR_PTR(-EINVAL);236}237238return qp;239}240241static int hpre_get_data_dma_addr(struct hpre_asym_request *hpre_req,242struct scatterlist *data, unsigned int len,243int is_src, dma_addr_t *tmp)244{245struct device *dev = hpre_req->ctx->dev;246enum dma_data_direction dma_dir;247248if (is_src) {249hpre_req->src = NULL;250dma_dir = DMA_TO_DEVICE;251} else {252hpre_req->dst = NULL;253dma_dir = DMA_FROM_DEVICE;254}255*tmp = dma_map_single(dev, sg_virt(data), len, dma_dir);256if (unlikely(dma_mapping_error(dev, *tmp))) {257dev_err(dev, "dma map data err!\n");258return -ENOMEM;259}260261return 0;262}263264static int hpre_prepare_dma_buf(struct hpre_asym_request *hpre_req,265struct scatterlist *data, unsigned int len,266int is_src, dma_addr_t *tmp)267{268struct hpre_ctx *ctx = hpre_req->ctx;269struct device *dev = ctx->dev;270void *ptr;271int shift;272273shift = ctx->key_sz - len;274if (unlikely(shift < 0))275return -EINVAL;276277ptr = dma_alloc_coherent(dev, ctx->key_sz, tmp, GFP_ATOMIC);278if (unlikely(!ptr))279return -ENOMEM;280281if (is_src) {282scatterwalk_map_and_copy(ptr + shift, data, 0, len, 0);283hpre_req->src = ptr;284} else {285hpre_req->dst = ptr;286}287288return 0;289}290291static int hpre_hw_data_init(struct hpre_asym_request *hpre_req,292struct scatterlist *data, unsigned int len,293int is_src, int is_dh)294{295struct hpre_sqe *msg = &hpre_req->req;296struct hpre_ctx *ctx = hpre_req->ctx;297dma_addr_t tmp = 0;298int ret;299300/* when the data is dh's source, we should format it */301if ((sg_is_last(data) && len == ctx->key_sz) &&302((is_dh && !is_src) || !is_dh))303ret = hpre_get_data_dma_addr(hpre_req, data, len, is_src, &tmp);304else305ret = hpre_prepare_dma_buf(hpre_req, data, len, is_src, &tmp);306307if (unlikely(ret))308return ret;309310if (is_src)311msg->in = cpu_to_le64(tmp);312else313msg->out = cpu_to_le64(tmp);314315return 0;316}317318static void hpre_hw_data_clr_all(struct hpre_ctx *ctx,319struct hpre_asym_request *req,320struct scatterlist *dst,321struct scatterlist *src)322{323struct device *dev = ctx->dev;324struct hpre_sqe *sqe = &req->req;325dma_addr_t tmp;326327tmp = le64_to_cpu(sqe->in);328if (unlikely(dma_mapping_error(dev, tmp)))329return;330331if (src) {332if (req->src)333dma_free_coherent(dev, ctx->key_sz, req->src, tmp);334else335dma_unmap_single(dev, tmp, ctx->key_sz, DMA_TO_DEVICE);336}337338tmp = le64_to_cpu(sqe->out);339if (unlikely(dma_mapping_error(dev, tmp)))340return;341342if (req->dst) {343if (dst)344scatterwalk_map_and_copy(req->dst, dst, 0,345ctx->key_sz, 1);346dma_free_coherent(dev, ctx->key_sz, req->dst, tmp);347} else {348dma_unmap_single(dev, tmp, ctx->key_sz, DMA_FROM_DEVICE);349}350}351352static int hpre_alg_res_post_hf(struct hpre_ctx *ctx, struct hpre_sqe *sqe,353void **kreq)354{355struct hpre_asym_request *req;356unsigned int err, done, alg;357int id;358359#define HPRE_NO_HW_ERR 0360#define HPRE_HW_TASK_DONE 3361#define HREE_HW_ERR_MASK GENMASK(10, 0)362#define HREE_SQE_DONE_MASK GENMASK(1, 0)363#define HREE_ALG_TYPE_MASK GENMASK(4, 0)364id = (int)le16_to_cpu(sqe->tag);365req = ctx->req_list[id];366hpre_rm_req_from_ctx(req);367*kreq = req;368369err = (le32_to_cpu(sqe->dw0) >> HPRE_SQE_ALG_BITS) &370HREE_HW_ERR_MASK;371372done = (le32_to_cpu(sqe->dw0) >> HPRE_SQE_DONE_SHIFT) &373HREE_SQE_DONE_MASK;374375if (likely(err == HPRE_NO_HW_ERR && done == HPRE_HW_TASK_DONE))376return 0;377378alg = le32_to_cpu(sqe->dw0) & HREE_ALG_TYPE_MASK;379dev_err_ratelimited(ctx->dev, "alg[0x%x] error: done[0x%x], etype[0x%x]\n",380alg, done, err);381382return -EINVAL;383}384385static int hpre_ctx_set(struct hpre_ctx *ctx, struct hisi_qp *qp, int qlen)386{387struct hpre *hpre;388389if (!ctx || !qp || qlen < 0)390return -EINVAL;391392spin_lock_init(&ctx->req_lock);393ctx->qp = qp;394ctx->dev = &qp->qm->pdev->dev;395396hpre = container_of(ctx->qp->qm, struct hpre, qm);397ctx->hpre = hpre;398ctx->req_list = kcalloc(qlen, sizeof(void *), GFP_KERNEL);399if (!ctx->req_list)400return -ENOMEM;401ctx->key_sz = 0;402ctx->crt_g2_mode = false;403idr_init(&ctx->req_idr);404405return 0;406}407408static void hpre_ctx_clear(struct hpre_ctx *ctx, bool is_clear_all)409{410if (is_clear_all) {411idr_destroy(&ctx->req_idr);412kfree(ctx->req_list);413hisi_qm_free_qps(&ctx->qp, 1);414}415416ctx->crt_g2_mode = false;417ctx->key_sz = 0;418}419420static bool hpre_is_bd_timeout(struct hpre_asym_request *req,421u64 overtime_thrhld)422{423struct timespec64 reply_time;424u64 time_use_us;425426ktime_get_ts64(&reply_time);427time_use_us = (reply_time.tv_sec - req->req_time.tv_sec) *428HPRE_DFX_SEC_TO_US +429(reply_time.tv_nsec - req->req_time.tv_nsec) /430HPRE_DFX_US_TO_NS;431432if (time_use_us <= overtime_thrhld)433return false;434435return true;436}437438static void hpre_dh_cb(struct hpre_ctx *ctx, void *resp)439{440struct hpre_dfx *dfx = ctx->hpre->debug.dfx;441struct hpre_asym_request *req;442struct kpp_request *areq;443u64 overtime_thrhld;444int ret;445446ret = hpre_alg_res_post_hf(ctx, resp, (void **)&req);447areq = req->areq.dh;448areq->dst_len = ctx->key_sz;449450overtime_thrhld = atomic64_read(&dfx[HPRE_OVERTIME_THRHLD].value);451if (overtime_thrhld && hpre_is_bd_timeout(req, overtime_thrhld))452atomic64_inc(&dfx[HPRE_OVER_THRHLD_CNT].value);453454hpre_hw_data_clr_all(ctx, req, areq->dst, areq->src);455kpp_request_complete(areq, ret);456atomic64_inc(&dfx[HPRE_RECV_CNT].value);457}458459static void hpre_rsa_cb(struct hpre_ctx *ctx, void *resp)460{461struct hpre_dfx *dfx = ctx->hpre->debug.dfx;462struct hpre_asym_request *req;463struct akcipher_request *areq;464u64 overtime_thrhld;465int ret;466467ret = hpre_alg_res_post_hf(ctx, resp, (void **)&req);468469overtime_thrhld = atomic64_read(&dfx[HPRE_OVERTIME_THRHLD].value);470if (overtime_thrhld && hpre_is_bd_timeout(req, overtime_thrhld))471atomic64_inc(&dfx[HPRE_OVER_THRHLD_CNT].value);472473areq = req->areq.rsa;474areq->dst_len = ctx->key_sz;475hpre_hw_data_clr_all(ctx, req, areq->dst, areq->src);476akcipher_request_complete(areq, ret);477atomic64_inc(&dfx[HPRE_RECV_CNT].value);478}479480static void hpre_alg_cb(struct hisi_qp *qp, void *resp)481{482struct hpre_ctx *ctx = qp->qp_ctx;483struct hpre_dfx *dfx = ctx->hpre->debug.dfx;484struct hpre_sqe *sqe = resp;485struct hpre_asym_request *req = ctx->req_list[le16_to_cpu(sqe->tag)];486487if (unlikely(!req)) {488atomic64_inc(&dfx[HPRE_INVALID_REQ_CNT].value);489return;490}491492req->cb(ctx, resp);493}494495static void hpre_stop_qp_and_put(struct hisi_qp *qp)496{497hisi_qm_stop_qp(qp);498hisi_qm_free_qps(&qp, 1);499}500501static int hpre_ctx_init(struct hpre_ctx *ctx, u8 type)502{503struct hisi_qp *qp;504int ret;505506qp = hpre_get_qp_and_start(type);507if (IS_ERR(qp))508return PTR_ERR(qp);509510qp->qp_ctx = ctx;511qp->req_cb = hpre_alg_cb;512513ret = hpre_ctx_set(ctx, qp, qp->sq_depth);514if (ret)515hpre_stop_qp_and_put(qp);516517return ret;518}519520static int hpre_msg_request_set(struct hpre_ctx *ctx, void *req, bool is_rsa)521{522struct hpre_asym_request *h_req;523struct hpre_sqe *msg;524int req_id;525void *tmp;526527if (is_rsa) {528struct akcipher_request *akreq = req;529530if (akreq->dst_len < ctx->key_sz) {531akreq->dst_len = ctx->key_sz;532return -EOVERFLOW;533}534535tmp = akcipher_request_ctx(akreq);536h_req = PTR_ALIGN(tmp, hpre_align_sz());537h_req->cb = hpre_rsa_cb;538h_req->areq.rsa = akreq;539msg = &h_req->req;540memset(msg, 0, sizeof(*msg));541} else {542struct kpp_request *kreq = req;543544if (kreq->dst_len < ctx->key_sz) {545kreq->dst_len = ctx->key_sz;546return -EOVERFLOW;547}548549tmp = kpp_request_ctx(kreq);550h_req = PTR_ALIGN(tmp, hpre_align_sz());551h_req->cb = hpre_dh_cb;552h_req->areq.dh = kreq;553msg = &h_req->req;554memset(msg, 0, sizeof(*msg));555msg->key = cpu_to_le64(ctx->dh.dma_xa_p);556}557558msg->in = cpu_to_le64(DMA_MAPPING_ERROR);559msg->out = cpu_to_le64(DMA_MAPPING_ERROR);560msg->dw0 |= cpu_to_le32(0x1 << HPRE_SQE_DONE_SHIFT);561msg->task_len1 = (ctx->key_sz >> HPRE_BITS_2_BYTES_SHIFT) - 1;562h_req->ctx = ctx;563564req_id = hpre_add_req_to_ctx(h_req);565if (req_id < 0)566return -EBUSY;567568msg->tag = cpu_to_le16((u16)req_id);569570return 0;571}572573static int hpre_send(struct hpre_ctx *ctx, struct hpre_sqe *msg)574{575struct hpre_dfx *dfx = ctx->hpre->debug.dfx;576int ctr = 0;577int ret;578579do {580atomic64_inc(&dfx[HPRE_SEND_CNT].value);581spin_lock_bh(&ctx->req_lock);582ret = hisi_qp_send(ctx->qp, msg);583spin_unlock_bh(&ctx->req_lock);584if (ret != -EBUSY)585break;586atomic64_inc(&dfx[HPRE_SEND_BUSY_CNT].value);587} while (ctr++ < HPRE_TRY_SEND_TIMES);588589if (likely(!ret))590return ret;591592if (ret != -EBUSY)593atomic64_inc(&dfx[HPRE_SEND_FAIL_CNT].value);594595return ret;596}597598static int hpre_dh_compute_value(struct kpp_request *req)599{600struct crypto_kpp *tfm = crypto_kpp_reqtfm(req);601struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);602void *tmp = kpp_request_ctx(req);603struct hpre_asym_request *hpre_req = PTR_ALIGN(tmp, hpre_align_sz());604struct hpre_sqe *msg = &hpre_req->req;605int ret;606607ret = hpre_msg_request_set(ctx, req, false);608if (unlikely(ret))609return ret;610611if (req->src) {612ret = hpre_hw_data_init(hpre_req, req->src, req->src_len, 1, 1);613if (unlikely(ret))614goto clear_all;615} else {616msg->in = cpu_to_le64(ctx->dh.dma_g);617}618619ret = hpre_hw_data_init(hpre_req, req->dst, req->dst_len, 0, 1);620if (unlikely(ret))621goto clear_all;622623if (ctx->crt_g2_mode && !req->src)624msg->dw0 = cpu_to_le32(le32_to_cpu(msg->dw0) | HPRE_ALG_DH_G2);625else626msg->dw0 = cpu_to_le32(le32_to_cpu(msg->dw0) | HPRE_ALG_DH);627628/* success */629ret = hpre_send(ctx, msg);630if (likely(!ret))631return -EINPROGRESS;632633clear_all:634hpre_rm_req_from_ctx(hpre_req);635hpre_hw_data_clr_all(ctx, hpre_req, req->dst, req->src);636637return ret;638}639640static int hpre_is_dh_params_length_valid(unsigned int key_sz)641{642#define _HPRE_DH_GRP1 768643#define _HPRE_DH_GRP2 1024644#define _HPRE_DH_GRP5 1536645#define _HPRE_DH_GRP14 2048646#define _HPRE_DH_GRP15 3072647#define _HPRE_DH_GRP16 4096648switch (key_sz) {649case _HPRE_DH_GRP1:650case _HPRE_DH_GRP2:651case _HPRE_DH_GRP5:652case _HPRE_DH_GRP14:653case _HPRE_DH_GRP15:654case _HPRE_DH_GRP16:655return 0;656default:657return -EINVAL;658}659}660661static int hpre_dh_set_params(struct hpre_ctx *ctx, struct dh *params)662{663struct device *dev = ctx->dev;664unsigned int sz;665666if (params->p_size > HPRE_DH_MAX_P_SZ)667return -EINVAL;668669if (hpre_is_dh_params_length_valid(params->p_size <<670HPRE_BITS_2_BYTES_SHIFT))671return -EINVAL;672673sz = ctx->key_sz = params->p_size;674ctx->dh.xa_p = dma_alloc_coherent(dev, sz << 1,675&ctx->dh.dma_xa_p, GFP_KERNEL);676if (!ctx->dh.xa_p)677return -ENOMEM;678679memcpy(ctx->dh.xa_p + sz, params->p, sz);680681/* If g equals 2 don't copy it */682if (params->g_size == 1 && *(char *)params->g == HPRE_DH_G_FLAG) {683ctx->crt_g2_mode = true;684return 0;685}686687ctx->dh.g = dma_alloc_coherent(dev, sz, &ctx->dh.dma_g, GFP_KERNEL);688if (!ctx->dh.g) {689dma_free_coherent(dev, sz << 1, ctx->dh.xa_p,690ctx->dh.dma_xa_p);691ctx->dh.xa_p = NULL;692return -ENOMEM;693}694695memcpy(ctx->dh.g + (sz - params->g_size), params->g, params->g_size);696697return 0;698}699700static void hpre_dh_clear_ctx(struct hpre_ctx *ctx, bool is_clear_all)701{702struct device *dev = ctx->dev;703unsigned int sz = ctx->key_sz;704705if (is_clear_all)706hisi_qm_stop_qp(ctx->qp);707708if (ctx->dh.g) {709dma_free_coherent(dev, sz, ctx->dh.g, ctx->dh.dma_g);710ctx->dh.g = NULL;711}712713if (ctx->dh.xa_p) {714memzero_explicit(ctx->dh.xa_p, sz);715dma_free_coherent(dev, sz << 1, ctx->dh.xa_p,716ctx->dh.dma_xa_p);717ctx->dh.xa_p = NULL;718}719720hpre_ctx_clear(ctx, is_clear_all);721}722723static int hpre_dh_set_secret(struct crypto_kpp *tfm, const void *buf,724unsigned int len)725{726struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);727struct dh params;728int ret;729730if (crypto_dh_decode_key(buf, len, ¶ms) < 0)731return -EINVAL;732733/* Free old secret if any */734hpre_dh_clear_ctx(ctx, false);735736ret = hpre_dh_set_params(ctx, ¶ms);737if (ret < 0)738goto err_clear_ctx;739740memcpy(ctx->dh.xa_p + (ctx->key_sz - params.key_size), params.key,741params.key_size);742743return 0;744745err_clear_ctx:746hpre_dh_clear_ctx(ctx, false);747return ret;748}749750static unsigned int hpre_dh_max_size(struct crypto_kpp *tfm)751{752struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);753754return ctx->key_sz;755}756757static int hpre_dh_init_tfm(struct crypto_kpp *tfm)758{759struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);760761kpp_set_reqsize(tfm, sizeof(struct hpre_asym_request) + hpre_align_pd());762763return hpre_ctx_init(ctx, HPRE_V2_ALG_TYPE);764}765766static void hpre_dh_exit_tfm(struct crypto_kpp *tfm)767{768struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);769770hpre_dh_clear_ctx(ctx, true);771}772773static void hpre_rsa_drop_leading_zeros(const char **ptr, size_t *len)774{775while (!**ptr && *len) {776(*ptr)++;777(*len)--;778}779}780781static bool hpre_rsa_key_size_is_support(unsigned int len)782{783unsigned int bits = len << HPRE_BITS_2_BYTES_SHIFT;784785#define _RSA_1024BITS_KEY_WDTH 1024786#define _RSA_2048BITS_KEY_WDTH 2048787#define _RSA_3072BITS_KEY_WDTH 3072788#define _RSA_4096BITS_KEY_WDTH 4096789790switch (bits) {791case _RSA_1024BITS_KEY_WDTH:792case _RSA_2048BITS_KEY_WDTH:793case _RSA_3072BITS_KEY_WDTH:794case _RSA_4096BITS_KEY_WDTH:795return true;796default:797return false;798}799}800801static int hpre_rsa_enc(struct akcipher_request *req)802{803struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req);804struct hpre_ctx *ctx = akcipher_tfm_ctx(tfm);805void *tmp = akcipher_request_ctx(req);806struct hpre_asym_request *hpre_req = PTR_ALIGN(tmp, hpre_align_sz());807struct hpre_sqe *msg = &hpre_req->req;808int ret;809810/* For 512 and 1536 bits key size, use soft tfm instead */811if (ctx->key_sz == HPRE_RSA_512BITS_KSZ ||812ctx->key_sz == HPRE_RSA_1536BITS_KSZ) {813akcipher_request_set_tfm(req, ctx->rsa.soft_tfm);814ret = crypto_akcipher_encrypt(req);815akcipher_request_set_tfm(req, tfm);816return ret;817}818819if (unlikely(!ctx->rsa.pubkey))820return -EINVAL;821822ret = hpre_msg_request_set(ctx, req, true);823if (unlikely(ret))824return ret;825826msg->dw0 |= cpu_to_le32(HPRE_ALG_NC_NCRT);827msg->key = cpu_to_le64(ctx->rsa.dma_pubkey);828829ret = hpre_hw_data_init(hpre_req, req->src, req->src_len, 1, 0);830if (unlikely(ret))831goto clear_all;832833ret = hpre_hw_data_init(hpre_req, req->dst, req->dst_len, 0, 0);834if (unlikely(ret))835goto clear_all;836837/* success */838ret = hpre_send(ctx, msg);839if (likely(!ret))840return -EINPROGRESS;841842clear_all:843hpre_rm_req_from_ctx(hpre_req);844hpre_hw_data_clr_all(ctx, hpre_req, req->dst, req->src);845846return ret;847}848849static int hpre_rsa_dec(struct akcipher_request *req)850{851struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req);852struct hpre_ctx *ctx = akcipher_tfm_ctx(tfm);853void *tmp = akcipher_request_ctx(req);854struct hpre_asym_request *hpre_req = PTR_ALIGN(tmp, hpre_align_sz());855struct hpre_sqe *msg = &hpre_req->req;856int ret;857858/* For 512 and 1536 bits key size, use soft tfm instead */859if (ctx->key_sz == HPRE_RSA_512BITS_KSZ ||860ctx->key_sz == HPRE_RSA_1536BITS_KSZ) {861akcipher_request_set_tfm(req, ctx->rsa.soft_tfm);862ret = crypto_akcipher_decrypt(req);863akcipher_request_set_tfm(req, tfm);864return ret;865}866867if (unlikely(!ctx->rsa.prikey))868return -EINVAL;869870ret = hpre_msg_request_set(ctx, req, true);871if (unlikely(ret))872return ret;873874if (ctx->crt_g2_mode) {875msg->key = cpu_to_le64(ctx->rsa.dma_crt_prikey);876msg->dw0 = cpu_to_le32(le32_to_cpu(msg->dw0) |877HPRE_ALG_NC_CRT);878} else {879msg->key = cpu_to_le64(ctx->rsa.dma_prikey);880msg->dw0 = cpu_to_le32(le32_to_cpu(msg->dw0) |881HPRE_ALG_NC_NCRT);882}883884ret = hpre_hw_data_init(hpre_req, req->src, req->src_len, 1, 0);885if (unlikely(ret))886goto clear_all;887888ret = hpre_hw_data_init(hpre_req, req->dst, req->dst_len, 0, 0);889if (unlikely(ret))890goto clear_all;891892/* success */893ret = hpre_send(ctx, msg);894if (likely(!ret))895return -EINPROGRESS;896897clear_all:898hpre_rm_req_from_ctx(hpre_req);899hpre_hw_data_clr_all(ctx, hpre_req, req->dst, req->src);900901return ret;902}903904static int hpre_rsa_set_n(struct hpre_ctx *ctx, const char *value,905size_t vlen, bool private)906{907const char *ptr = value;908909hpre_rsa_drop_leading_zeros(&ptr, &vlen);910911ctx->key_sz = vlen;912913/* if invalid key size provided, we use software tfm */914if (!hpre_rsa_key_size_is_support(ctx->key_sz))915return 0;916917ctx->rsa.pubkey = dma_alloc_coherent(ctx->dev, vlen << 1,918&ctx->rsa.dma_pubkey,919GFP_KERNEL);920if (!ctx->rsa.pubkey)921return -ENOMEM;922923if (private) {924ctx->rsa.prikey = dma_alloc_coherent(ctx->dev, vlen << 1,925&ctx->rsa.dma_prikey,926GFP_KERNEL);927if (!ctx->rsa.prikey) {928dma_free_coherent(ctx->dev, vlen << 1,929ctx->rsa.pubkey,930ctx->rsa.dma_pubkey);931ctx->rsa.pubkey = NULL;932return -ENOMEM;933}934memcpy(ctx->rsa.prikey + vlen, ptr, vlen);935}936memcpy(ctx->rsa.pubkey + vlen, ptr, vlen);937938/* Using hardware HPRE to do RSA */939return 1;940}941942static int hpre_rsa_set_e(struct hpre_ctx *ctx, const char *value,943size_t vlen)944{945const char *ptr = value;946947hpre_rsa_drop_leading_zeros(&ptr, &vlen);948949if (!ctx->key_sz || !vlen || vlen > ctx->key_sz)950return -EINVAL;951952memcpy(ctx->rsa.pubkey + ctx->key_sz - vlen, ptr, vlen);953954return 0;955}956957static int hpre_rsa_set_d(struct hpre_ctx *ctx, const char *value,958size_t vlen)959{960const char *ptr = value;961962hpre_rsa_drop_leading_zeros(&ptr, &vlen);963964if (!ctx->key_sz || !vlen || vlen > ctx->key_sz)965return -EINVAL;966967memcpy(ctx->rsa.prikey + ctx->key_sz - vlen, ptr, vlen);968969return 0;970}971972static int hpre_crt_para_get(char *para, size_t para_sz,973const char *raw, size_t raw_sz)974{975const char *ptr = raw;976size_t len = raw_sz;977978hpre_rsa_drop_leading_zeros(&ptr, &len);979if (!len || len > para_sz)980return -EINVAL;981982memcpy(para + para_sz - len, ptr, len);983984return 0;985}986987static int hpre_rsa_setkey_crt(struct hpre_ctx *ctx, struct rsa_key *rsa_key)988{989unsigned int hlf_ksz = ctx->key_sz >> 1;990struct device *dev = ctx->dev;991u64 offset;992int ret;993994ctx->rsa.crt_prikey = dma_alloc_coherent(dev, hlf_ksz * HPRE_CRT_PRMS,995&ctx->rsa.dma_crt_prikey,996GFP_KERNEL);997if (!ctx->rsa.crt_prikey)998return -ENOMEM;9991000ret = hpre_crt_para_get(ctx->rsa.crt_prikey, hlf_ksz,1001rsa_key->dq, rsa_key->dq_sz);1002if (ret)1003goto free_key;10041005offset = hlf_ksz;1006ret = hpre_crt_para_get(ctx->rsa.crt_prikey + offset, hlf_ksz,1007rsa_key->dp, rsa_key->dp_sz);1008if (ret)1009goto free_key;10101011offset = hlf_ksz * HPRE_CRT_Q;1012ret = hpre_crt_para_get(ctx->rsa.crt_prikey + offset, hlf_ksz,1013rsa_key->q, rsa_key->q_sz);1014if (ret)1015goto free_key;10161017offset = hlf_ksz * HPRE_CRT_P;1018ret = hpre_crt_para_get(ctx->rsa.crt_prikey + offset, hlf_ksz,1019rsa_key->p, rsa_key->p_sz);1020if (ret)1021goto free_key;10221023offset = hlf_ksz * HPRE_CRT_INV;1024ret = hpre_crt_para_get(ctx->rsa.crt_prikey + offset, hlf_ksz,1025rsa_key->qinv, rsa_key->qinv_sz);1026if (ret)1027goto free_key;10281029ctx->crt_g2_mode = true;10301031return 0;10321033free_key:1034offset = hlf_ksz * HPRE_CRT_PRMS;1035memzero_explicit(ctx->rsa.crt_prikey, offset);1036dma_free_coherent(dev, hlf_ksz * HPRE_CRT_PRMS, ctx->rsa.crt_prikey,1037ctx->rsa.dma_crt_prikey);1038ctx->rsa.crt_prikey = NULL;1039ctx->crt_g2_mode = false;10401041return ret;1042}10431044/* If it is clear all, all the resources of the QP will be cleaned. */1045static void hpre_rsa_clear_ctx(struct hpre_ctx *ctx, bool is_clear_all)1046{1047unsigned int half_key_sz = ctx->key_sz >> 1;1048struct device *dev = ctx->dev;10491050if (is_clear_all)1051hisi_qm_stop_qp(ctx->qp);10521053if (ctx->rsa.pubkey) {1054dma_free_coherent(dev, ctx->key_sz << 1,1055ctx->rsa.pubkey, ctx->rsa.dma_pubkey);1056ctx->rsa.pubkey = NULL;1057}10581059if (ctx->rsa.crt_prikey) {1060memzero_explicit(ctx->rsa.crt_prikey,1061half_key_sz * HPRE_CRT_PRMS);1062dma_free_coherent(dev, half_key_sz * HPRE_CRT_PRMS,1063ctx->rsa.crt_prikey, ctx->rsa.dma_crt_prikey);1064ctx->rsa.crt_prikey = NULL;1065}10661067if (ctx->rsa.prikey) {1068memzero_explicit(ctx->rsa.prikey, ctx->key_sz);1069dma_free_coherent(dev, ctx->key_sz << 1, ctx->rsa.prikey,1070ctx->rsa.dma_prikey);1071ctx->rsa.prikey = NULL;1072}10731074hpre_ctx_clear(ctx, is_clear_all);1075}10761077/*1078* we should judge if it is CRT or not,1079* CRT: return true, N-CRT: return false .1080*/1081static bool hpre_is_crt_key(struct rsa_key *key)1082{1083u16 len = key->p_sz + key->q_sz + key->dp_sz + key->dq_sz +1084key->qinv_sz;10851086#define LEN_OF_NCRT_PARA 510871088/* N-CRT less than 5 parameters */1089return len > LEN_OF_NCRT_PARA;1090}10911092static int hpre_rsa_setkey(struct hpre_ctx *ctx, const void *key,1093unsigned int keylen, bool private)1094{1095struct rsa_key rsa_key;1096int ret;10971098hpre_rsa_clear_ctx(ctx, false);10991100if (private)1101ret = rsa_parse_priv_key(&rsa_key, key, keylen);1102else1103ret = rsa_parse_pub_key(&rsa_key, key, keylen);1104if (ret < 0)1105return ret;11061107ret = hpre_rsa_set_n(ctx, rsa_key.n, rsa_key.n_sz, private);1108if (ret <= 0)1109return ret;11101111if (private) {1112ret = hpre_rsa_set_d(ctx, rsa_key.d, rsa_key.d_sz);1113if (ret < 0)1114goto free;11151116if (hpre_is_crt_key(&rsa_key)) {1117ret = hpre_rsa_setkey_crt(ctx, &rsa_key);1118if (ret < 0)1119goto free;1120}1121}11221123ret = hpre_rsa_set_e(ctx, rsa_key.e, rsa_key.e_sz);1124if (ret < 0)1125goto free;11261127if ((private && !ctx->rsa.prikey) || !ctx->rsa.pubkey) {1128ret = -EINVAL;1129goto free;1130}11311132return 0;11331134free:1135hpre_rsa_clear_ctx(ctx, false);1136return ret;1137}11381139static int hpre_rsa_setpubkey(struct crypto_akcipher *tfm, const void *key,1140unsigned int keylen)1141{1142struct hpre_ctx *ctx = akcipher_tfm_ctx(tfm);1143int ret;11441145ret = crypto_akcipher_set_pub_key(ctx->rsa.soft_tfm, key, keylen);1146if (ret)1147return ret;11481149return hpre_rsa_setkey(ctx, key, keylen, false);1150}11511152static int hpre_rsa_setprivkey(struct crypto_akcipher *tfm, const void *key,1153unsigned int keylen)1154{1155struct hpre_ctx *ctx = akcipher_tfm_ctx(tfm);1156int ret;11571158ret = crypto_akcipher_set_priv_key(ctx->rsa.soft_tfm, key, keylen);1159if (ret)1160return ret;11611162return hpre_rsa_setkey(ctx, key, keylen, true);1163}11641165static unsigned int hpre_rsa_max_size(struct crypto_akcipher *tfm)1166{1167struct hpre_ctx *ctx = akcipher_tfm_ctx(tfm);11681169/* For 512 and 1536 bits key size, use soft tfm instead */1170if (ctx->key_sz == HPRE_RSA_512BITS_KSZ ||1171ctx->key_sz == HPRE_RSA_1536BITS_KSZ)1172return crypto_akcipher_maxsize(ctx->rsa.soft_tfm);11731174return ctx->key_sz;1175}11761177static int hpre_rsa_init_tfm(struct crypto_akcipher *tfm)1178{1179struct hpre_ctx *ctx = akcipher_tfm_ctx(tfm);1180int ret;11811182ctx->rsa.soft_tfm = crypto_alloc_akcipher("rsa-generic", 0, 0);1183if (IS_ERR(ctx->rsa.soft_tfm)) {1184pr_err("Can not alloc_akcipher!\n");1185return PTR_ERR(ctx->rsa.soft_tfm);1186}11871188akcipher_set_reqsize(tfm, sizeof(struct hpre_asym_request) +1189hpre_align_pd());11901191ret = hpre_ctx_init(ctx, HPRE_V2_ALG_TYPE);1192if (ret)1193crypto_free_akcipher(ctx->rsa.soft_tfm);11941195return ret;1196}11971198static void hpre_rsa_exit_tfm(struct crypto_akcipher *tfm)1199{1200struct hpre_ctx *ctx = akcipher_tfm_ctx(tfm);12011202hpre_rsa_clear_ctx(ctx, true);1203crypto_free_akcipher(ctx->rsa.soft_tfm);1204}12051206static void hpre_key_to_big_end(u8 *data, int len)1207{1208int i, j;12091210for (i = 0; i < len / 2; i++) {1211j = len - i - 1;1212swap(data[j], data[i]);1213}1214}12151216static void hpre_ecc_clear_ctx(struct hpre_ctx *ctx, bool is_clear_all,1217bool is_ecdh)1218{1219struct device *dev = ctx->dev;1220unsigned int sz = ctx->key_sz;1221unsigned int shift = sz << 1;12221223if (is_clear_all)1224hisi_qm_stop_qp(ctx->qp);12251226if (is_ecdh && ctx->ecdh.p) {1227/* ecdh: p->a->k->b */1228memzero_explicit(ctx->ecdh.p + shift, sz);1229dma_free_coherent(dev, sz << 3, ctx->ecdh.p, ctx->ecdh.dma_p);1230ctx->ecdh.p = NULL;1231} else if (!is_ecdh && ctx->curve25519.p) {1232/* curve25519: p->a->k */1233memzero_explicit(ctx->curve25519.p + shift, sz);1234dma_free_coherent(dev, sz << 2, ctx->curve25519.p,1235ctx->curve25519.dma_p);1236ctx->curve25519.p = NULL;1237}12381239hpre_ctx_clear(ctx, is_clear_all);1240}12411242/*1243* The bits of 192/224/256/384/521 are supported by HPRE,1244* and convert the bits like:1245* bits<=256, bits=256; 256<bits<=384, bits=384; 384<bits<=576, bits=576;1246* If the parameter bit width is insufficient, then we fill in the1247* high-order zeros by soft, so TASK_LENGTH1 is 0x3/0x5/0x8;1248*/1249static unsigned int hpre_ecdh_supported_curve(unsigned short id)1250{1251switch (id) {1252case ECC_CURVE_NIST_P192:1253case ECC_CURVE_NIST_P256:1254return HPRE_ECC_HW256_KSZ_B;1255case ECC_CURVE_NIST_P384:1256return HPRE_ECC_HW384_KSZ_B;1257default:1258break;1259}12601261return 0;1262}12631264static void fill_curve_param(void *addr, u64 *param, unsigned int cur_sz, u8 ndigits)1265{1266unsigned int sz = cur_sz - (ndigits - 1) * sizeof(u64);1267u8 i = 0;12681269while (i < ndigits - 1) {1270memcpy(addr + sizeof(u64) * i, ¶m[i], sizeof(u64));1271i++;1272}12731274memcpy(addr + sizeof(u64) * i, ¶m[ndigits - 1], sz);1275hpre_key_to_big_end((u8 *)addr, cur_sz);1276}12771278static int hpre_ecdh_fill_curve(struct hpre_ctx *ctx, struct ecdh *params,1279unsigned int cur_sz)1280{1281unsigned int shifta = ctx->key_sz << 1;1282unsigned int shiftb = ctx->key_sz << 2;1283void *p = ctx->ecdh.p + ctx->key_sz - cur_sz;1284void *a = ctx->ecdh.p + shifta - cur_sz;1285void *b = ctx->ecdh.p + shiftb - cur_sz;1286void *x = ctx->ecdh.g + ctx->key_sz - cur_sz;1287void *y = ctx->ecdh.g + shifta - cur_sz;1288const struct ecc_curve *curve = ecc_get_curve(ctx->curve_id);1289char *n;12901291if (unlikely(!curve))1292return -EINVAL;12931294n = kzalloc(ctx->key_sz, GFP_KERNEL);1295if (!n)1296return -ENOMEM;12971298fill_curve_param(p, curve->p, cur_sz, curve->g.ndigits);1299fill_curve_param(a, curve->a, cur_sz, curve->g.ndigits);1300fill_curve_param(b, curve->b, cur_sz, curve->g.ndigits);1301fill_curve_param(x, curve->g.x, cur_sz, curve->g.ndigits);1302fill_curve_param(y, curve->g.y, cur_sz, curve->g.ndigits);1303fill_curve_param(n, curve->n, cur_sz, curve->g.ndigits);13041305if (params->key_size == cur_sz && memcmp(params->key, n, cur_sz) >= 0) {1306kfree(n);1307return -EINVAL;1308}13091310kfree(n);1311return 0;1312}13131314static unsigned int hpre_ecdh_get_curvesz(unsigned short id)1315{1316switch (id) {1317case ECC_CURVE_NIST_P192:1318return HPRE_ECC_NIST_P192_N_SIZE;1319case ECC_CURVE_NIST_P256:1320return HPRE_ECC_NIST_P256_N_SIZE;1321case ECC_CURVE_NIST_P384:1322return HPRE_ECC_NIST_P384_N_SIZE;1323default:1324break;1325}13261327return 0;1328}13291330static int hpre_ecdh_set_param(struct hpre_ctx *ctx, struct ecdh *params)1331{1332struct device *dev = ctx->dev;1333unsigned int sz, shift, curve_sz;1334int ret;13351336ctx->key_sz = hpre_ecdh_supported_curve(ctx->curve_id);1337if (!ctx->key_sz)1338return -EINVAL;13391340curve_sz = hpre_ecdh_get_curvesz(ctx->curve_id);1341if (!curve_sz || params->key_size > curve_sz)1342return -EINVAL;13431344sz = ctx->key_sz;13451346if (!ctx->ecdh.p) {1347ctx->ecdh.p = dma_alloc_coherent(dev, sz << 3, &ctx->ecdh.dma_p,1348GFP_KERNEL);1349if (!ctx->ecdh.p)1350return -ENOMEM;1351}13521353shift = sz << 2;1354ctx->ecdh.g = ctx->ecdh.p + shift;1355ctx->ecdh.dma_g = ctx->ecdh.dma_p + shift;13561357ret = hpre_ecdh_fill_curve(ctx, params, curve_sz);1358if (ret) {1359dev_err(dev, "failed to fill curve_param, ret = %d!\n", ret);1360dma_free_coherent(dev, sz << 3, ctx->ecdh.p, ctx->ecdh.dma_p);1361ctx->ecdh.p = NULL;1362return ret;1363}13641365return 0;1366}13671368static bool hpre_key_is_zero(char *key, unsigned short key_sz)1369{1370int i;13711372for (i = 0; i < key_sz; i++)1373if (key[i])1374return false;13751376return true;1377}13781379static int ecdh_gen_privkey(struct hpre_ctx *ctx, struct ecdh *params)1380{1381struct device *dev = ctx->dev;1382int ret;13831384ret = crypto_get_default_rng();1385if (ret) {1386dev_err(dev, "failed to get default rng, ret = %d!\n", ret);1387return ret;1388}13891390ret = crypto_rng_get_bytes(crypto_default_rng, (u8 *)params->key,1391params->key_size);1392crypto_put_default_rng();1393if (ret)1394dev_err(dev, "failed to get rng, ret = %d!\n", ret);13951396return ret;1397}13981399static int hpre_ecdh_set_secret(struct crypto_kpp *tfm, const void *buf,1400unsigned int len)1401{1402struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);1403unsigned int sz, sz_shift, curve_sz;1404struct device *dev = ctx->dev;1405char key[HPRE_ECC_MAX_KSZ];1406struct ecdh params;1407int ret;14081409if (crypto_ecdh_decode_key(buf, len, ¶ms) < 0) {1410dev_err(dev, "failed to decode ecdh key!\n");1411return -EINVAL;1412}14131414/* Use stdrng to generate private key */1415if (!params.key || !params.key_size) {1416params.key = key;1417curve_sz = hpre_ecdh_get_curvesz(ctx->curve_id);1418if (!curve_sz) {1419dev_err(dev, "Invalid curve size!\n");1420return -EINVAL;1421}14221423params.key_size = curve_sz - 1;1424ret = ecdh_gen_privkey(ctx, ¶ms);1425if (ret)1426return ret;1427}14281429if (hpre_key_is_zero(params.key, params.key_size)) {1430dev_err(dev, "Invalid hpre key!\n");1431return -EINVAL;1432}14331434hpre_ecc_clear_ctx(ctx, false, true);14351436ret = hpre_ecdh_set_param(ctx, ¶ms);1437if (ret < 0) {1438dev_err(dev, "failed to set hpre param, ret = %d!\n", ret);1439return ret;1440}14411442sz = ctx->key_sz;1443sz_shift = (sz << 1) + sz - params.key_size;1444memcpy(ctx->ecdh.p + sz_shift, params.key, params.key_size);14451446return 0;1447}14481449static void hpre_ecdh_hw_data_clr_all(struct hpre_ctx *ctx,1450struct hpre_asym_request *req,1451struct scatterlist *dst,1452struct scatterlist *src)1453{1454struct device *dev = ctx->dev;1455struct hpre_sqe *sqe = &req->req;1456dma_addr_t dma;14571458dma = le64_to_cpu(sqe->in);1459if (unlikely(dma_mapping_error(dev, dma)))1460return;14611462if (src && req->src)1463dma_free_coherent(dev, ctx->key_sz << 2, req->src, dma);14641465dma = le64_to_cpu(sqe->out);1466if (unlikely(dma_mapping_error(dev, dma)))1467return;14681469if (req->dst)1470dma_free_coherent(dev, ctx->key_sz << 1, req->dst, dma);1471if (dst)1472dma_unmap_single(dev, dma, ctx->key_sz << 1, DMA_FROM_DEVICE);1473}14741475static void hpre_ecdh_cb(struct hpre_ctx *ctx, void *resp)1476{1477unsigned int curve_sz = hpre_ecdh_get_curvesz(ctx->curve_id);1478struct hpre_dfx *dfx = ctx->hpre->debug.dfx;1479struct hpre_asym_request *req = NULL;1480struct kpp_request *areq;1481u64 overtime_thrhld;1482char *p;1483int ret;14841485ret = hpre_alg_res_post_hf(ctx, resp, (void **)&req);1486areq = req->areq.ecdh;1487areq->dst_len = ctx->key_sz << 1;14881489overtime_thrhld = atomic64_read(&dfx[HPRE_OVERTIME_THRHLD].value);1490if (overtime_thrhld && hpre_is_bd_timeout(req, overtime_thrhld))1491atomic64_inc(&dfx[HPRE_OVER_THRHLD_CNT].value);14921493/* Do unmap before data processing */1494hpre_ecdh_hw_data_clr_all(ctx, req, areq->dst, areq->src);14951496p = sg_virt(areq->dst);1497memmove(p, p + ctx->key_sz - curve_sz, curve_sz);1498memmove(p + curve_sz, p + areq->dst_len - curve_sz, curve_sz);14991500kpp_request_complete(areq, ret);15011502atomic64_inc(&dfx[HPRE_RECV_CNT].value);1503}15041505static int hpre_ecdh_msg_request_set(struct hpre_ctx *ctx,1506struct kpp_request *req)1507{1508struct hpre_asym_request *h_req;1509struct hpre_sqe *msg;1510int req_id;1511void *tmp;15121513if (req->dst_len < ctx->key_sz << 1) {1514req->dst_len = ctx->key_sz << 1;1515return -EINVAL;1516}15171518tmp = kpp_request_ctx(req);1519h_req = PTR_ALIGN(tmp, hpre_align_sz());1520h_req->cb = hpre_ecdh_cb;1521h_req->areq.ecdh = req;1522msg = &h_req->req;1523memset(msg, 0, sizeof(*msg));1524msg->in = cpu_to_le64(DMA_MAPPING_ERROR);1525msg->out = cpu_to_le64(DMA_MAPPING_ERROR);1526msg->key = cpu_to_le64(ctx->ecdh.dma_p);15271528msg->dw0 |= cpu_to_le32(0x1U << HPRE_SQE_DONE_SHIFT);1529msg->task_len1 = (ctx->key_sz >> HPRE_BITS_2_BYTES_SHIFT) - 1;1530h_req->ctx = ctx;15311532req_id = hpre_add_req_to_ctx(h_req);1533if (req_id < 0)1534return -EBUSY;15351536msg->tag = cpu_to_le16((u16)req_id);1537return 0;1538}15391540static int hpre_ecdh_src_data_init(struct hpre_asym_request *hpre_req,1541struct scatterlist *data, unsigned int len)1542{1543struct hpre_sqe *msg = &hpre_req->req;1544struct hpre_ctx *ctx = hpre_req->ctx;1545struct device *dev = ctx->dev;1546unsigned int tmpshift;1547dma_addr_t dma = 0;1548void *ptr;1549int shift;15501551/* Src_data include gx and gy. */1552shift = ctx->key_sz - (len >> 1);1553if (unlikely(shift < 0))1554return -EINVAL;15551556ptr = dma_alloc_coherent(dev, ctx->key_sz << 2, &dma, GFP_KERNEL);1557if (unlikely(!ptr))1558return -ENOMEM;15591560tmpshift = ctx->key_sz << 1;1561scatterwalk_map_and_copy(ptr + tmpshift, data, 0, len, 0);1562memcpy(ptr + shift, ptr + tmpshift, len >> 1);1563memcpy(ptr + ctx->key_sz + shift, ptr + tmpshift + (len >> 1), len >> 1);15641565hpre_req->src = ptr;1566msg->in = cpu_to_le64(dma);1567return 0;1568}15691570static int hpre_ecdh_dst_data_init(struct hpre_asym_request *hpre_req,1571struct scatterlist *data, unsigned int len)1572{1573struct hpre_sqe *msg = &hpre_req->req;1574struct hpre_ctx *ctx = hpre_req->ctx;1575struct device *dev = ctx->dev;1576dma_addr_t dma;15771578if (unlikely(!data || !sg_is_last(data) || len != ctx->key_sz << 1)) {1579dev_err(dev, "data or data length is illegal!\n");1580return -EINVAL;1581}15821583hpre_req->dst = NULL;1584dma = dma_map_single(dev, sg_virt(data), len, DMA_FROM_DEVICE);1585if (unlikely(dma_mapping_error(dev, dma))) {1586dev_err(dev, "dma map data err!\n");1587return -ENOMEM;1588}15891590msg->out = cpu_to_le64(dma);1591return 0;1592}15931594static int hpre_ecdh_compute_value(struct kpp_request *req)1595{1596struct crypto_kpp *tfm = crypto_kpp_reqtfm(req);1597struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);1598struct device *dev = ctx->dev;1599void *tmp = kpp_request_ctx(req);1600struct hpre_asym_request *hpre_req = PTR_ALIGN(tmp, hpre_align_sz());1601struct hpre_sqe *msg = &hpre_req->req;1602int ret;16031604ret = hpre_ecdh_msg_request_set(ctx, req);1605if (unlikely(ret)) {1606dev_err(dev, "failed to set ecdh request, ret = %d!\n", ret);1607return ret;1608}16091610if (req->src) {1611ret = hpre_ecdh_src_data_init(hpre_req, req->src, req->src_len);1612if (unlikely(ret)) {1613dev_err(dev, "failed to init src data, ret = %d!\n", ret);1614goto clear_all;1615}1616} else {1617msg->in = cpu_to_le64(ctx->ecdh.dma_g);1618}16191620ret = hpre_ecdh_dst_data_init(hpre_req, req->dst, req->dst_len);1621if (unlikely(ret)) {1622dev_err(dev, "failed to init dst data, ret = %d!\n", ret);1623goto clear_all;1624}16251626msg->dw0 = cpu_to_le32(le32_to_cpu(msg->dw0) | HPRE_ALG_ECC_MUL);1627msg->resv1 = ctx->enable_hpcore << HPRE_ENABLE_HPCORE_SHIFT;16281629ret = hpre_send(ctx, msg);1630if (likely(!ret))1631return -EINPROGRESS;16321633clear_all:1634hpre_rm_req_from_ctx(hpre_req);1635hpre_ecdh_hw_data_clr_all(ctx, hpre_req, req->dst, req->src);1636return ret;1637}16381639static unsigned int hpre_ecdh_max_size(struct crypto_kpp *tfm)1640{1641struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);16421643/* max size is the pub_key_size, include x and y */1644return ctx->key_sz << 1;1645}16461647static int hpre_ecdh_nist_p192_init_tfm(struct crypto_kpp *tfm)1648{1649struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);16501651ctx->curve_id = ECC_CURVE_NIST_P192;16521653kpp_set_reqsize(tfm, sizeof(struct hpre_asym_request) + hpre_align_pd());16541655return hpre_ctx_init(ctx, HPRE_V3_ECC_ALG_TYPE);1656}16571658static int hpre_ecdh_nist_p256_init_tfm(struct crypto_kpp *tfm)1659{1660struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);16611662ctx->curve_id = ECC_CURVE_NIST_P256;1663ctx->enable_hpcore = 1;16641665kpp_set_reqsize(tfm, sizeof(struct hpre_asym_request) + hpre_align_pd());16661667return hpre_ctx_init(ctx, HPRE_V3_ECC_ALG_TYPE);1668}16691670static int hpre_ecdh_nist_p384_init_tfm(struct crypto_kpp *tfm)1671{1672struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);16731674ctx->curve_id = ECC_CURVE_NIST_P384;16751676kpp_set_reqsize(tfm, sizeof(struct hpre_asym_request) + hpre_align_pd());16771678return hpre_ctx_init(ctx, HPRE_V3_ECC_ALG_TYPE);1679}16801681static void hpre_ecdh_exit_tfm(struct crypto_kpp *tfm)1682{1683struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);16841685hpre_ecc_clear_ctx(ctx, true, true);1686}16871688static void hpre_curve25519_fill_curve(struct hpre_ctx *ctx, const void *buf,1689unsigned int len)1690{1691u8 secret[CURVE25519_KEY_SIZE] = { 0 };1692unsigned int sz = ctx->key_sz;1693const struct ecc_curve *curve;1694unsigned int shift = sz << 1;1695void *p;16961697/*1698* The key from 'buf' is in little-endian, we should preprocess it as1699* the description in rfc7748: "k[0] &= 248, k[31] &= 127, k[31] |= 64",1700* then convert it to big endian. Only in this way, the result can be1701* the same as the software curve-25519 that exists in crypto.1702*/1703memcpy(secret, buf, len);1704curve25519_clamp_secret(secret);1705hpre_key_to_big_end(secret, CURVE25519_KEY_SIZE);17061707p = ctx->curve25519.p + sz - len;17081709curve = ecc_get_curve25519();17101711/* fill curve parameters */1712fill_curve_param(p, curve->p, len, curve->g.ndigits);1713fill_curve_param(p + sz, curve->a, len, curve->g.ndigits);1714memcpy(p + shift, secret, len);1715fill_curve_param(p + shift + sz, curve->g.x, len, curve->g.ndigits);1716memzero_explicit(secret, CURVE25519_KEY_SIZE);1717}17181719static int hpre_curve25519_set_param(struct hpre_ctx *ctx, const void *buf,1720unsigned int len)1721{1722struct device *dev = ctx->dev;1723unsigned int sz = ctx->key_sz;1724unsigned int shift = sz << 1;17251726/* p->a->k->gx */1727if (!ctx->curve25519.p) {1728ctx->curve25519.p = dma_alloc_coherent(dev, sz << 2,1729&ctx->curve25519.dma_p,1730GFP_KERNEL);1731if (!ctx->curve25519.p)1732return -ENOMEM;1733}17341735ctx->curve25519.g = ctx->curve25519.p + shift + sz;1736ctx->curve25519.dma_g = ctx->curve25519.dma_p + shift + sz;17371738hpre_curve25519_fill_curve(ctx, buf, len);17391740return 0;1741}17421743static int hpre_curve25519_set_secret(struct crypto_kpp *tfm, const void *buf,1744unsigned int len)1745{1746struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);1747struct device *dev = ctx->dev;1748int ret = -EINVAL;17491750if (len != CURVE25519_KEY_SIZE ||1751!crypto_memneq(buf, curve25519_null_point, CURVE25519_KEY_SIZE)) {1752dev_err(dev, "key is null or key len is not 32bytes!\n");1753return ret;1754}17551756/* Free old secret if any */1757hpre_ecc_clear_ctx(ctx, false, false);17581759ctx->key_sz = CURVE25519_KEY_SIZE;1760ret = hpre_curve25519_set_param(ctx, buf, CURVE25519_KEY_SIZE);1761if (ret) {1762dev_err(dev, "failed to set curve25519 param, ret = %d!\n", ret);1763hpre_ecc_clear_ctx(ctx, false, false);1764return ret;1765}17661767return 0;1768}17691770static void hpre_curve25519_hw_data_clr_all(struct hpre_ctx *ctx,1771struct hpre_asym_request *req,1772struct scatterlist *dst,1773struct scatterlist *src)1774{1775struct device *dev = ctx->dev;1776struct hpre_sqe *sqe = &req->req;1777dma_addr_t dma;17781779dma = le64_to_cpu(sqe->in);1780if (unlikely(dma_mapping_error(dev, dma)))1781return;17821783if (src && req->src)1784dma_free_coherent(dev, ctx->key_sz, req->src, dma);17851786dma = le64_to_cpu(sqe->out);1787if (unlikely(dma_mapping_error(dev, dma)))1788return;17891790if (req->dst)1791dma_free_coherent(dev, ctx->key_sz, req->dst, dma);1792if (dst)1793dma_unmap_single(dev, dma, ctx->key_sz, DMA_FROM_DEVICE);1794}17951796static void hpre_curve25519_cb(struct hpre_ctx *ctx, void *resp)1797{1798struct hpre_dfx *dfx = ctx->hpre->debug.dfx;1799struct hpre_asym_request *req = NULL;1800struct kpp_request *areq;1801u64 overtime_thrhld;1802int ret;18031804ret = hpre_alg_res_post_hf(ctx, resp, (void **)&req);1805areq = req->areq.curve25519;1806areq->dst_len = ctx->key_sz;18071808overtime_thrhld = atomic64_read(&dfx[HPRE_OVERTIME_THRHLD].value);1809if (overtime_thrhld && hpre_is_bd_timeout(req, overtime_thrhld))1810atomic64_inc(&dfx[HPRE_OVER_THRHLD_CNT].value);18111812/* Do unmap before data processing */1813hpre_curve25519_hw_data_clr_all(ctx, req, areq->dst, areq->src);18141815hpre_key_to_big_end(sg_virt(areq->dst), CURVE25519_KEY_SIZE);18161817kpp_request_complete(areq, ret);18181819atomic64_inc(&dfx[HPRE_RECV_CNT].value);1820}18211822static int hpre_curve25519_msg_request_set(struct hpre_ctx *ctx,1823struct kpp_request *req)1824{1825struct hpre_asym_request *h_req;1826struct hpre_sqe *msg;1827int req_id;1828void *tmp;18291830if (unlikely(req->dst_len < ctx->key_sz)) {1831req->dst_len = ctx->key_sz;1832return -EINVAL;1833}18341835tmp = kpp_request_ctx(req);1836h_req = PTR_ALIGN(tmp, hpre_align_sz());1837h_req->cb = hpre_curve25519_cb;1838h_req->areq.curve25519 = req;1839msg = &h_req->req;1840memset(msg, 0, sizeof(*msg));1841msg->in = cpu_to_le64(DMA_MAPPING_ERROR);1842msg->out = cpu_to_le64(DMA_MAPPING_ERROR);1843msg->key = cpu_to_le64(ctx->curve25519.dma_p);18441845msg->dw0 |= cpu_to_le32(0x1U << HPRE_SQE_DONE_SHIFT);1846msg->task_len1 = (ctx->key_sz >> HPRE_BITS_2_BYTES_SHIFT) - 1;1847h_req->ctx = ctx;18481849req_id = hpre_add_req_to_ctx(h_req);1850if (req_id < 0)1851return -EBUSY;18521853msg->tag = cpu_to_le16((u16)req_id);1854return 0;1855}18561857static void hpre_curve25519_src_modulo_p(u8 *ptr)1858{1859int i;18601861for (i = 0; i < CURVE25519_KEY_SIZE - 1; i++)1862ptr[i] = 0;18631864/* The modulus is ptr's last byte minus '0xed'(last byte of p) */1865ptr[i] -= 0xed;1866}18671868static int hpre_curve25519_src_init(struct hpre_asym_request *hpre_req,1869struct scatterlist *data, unsigned int len)1870{1871struct hpre_sqe *msg = &hpre_req->req;1872struct hpre_ctx *ctx = hpre_req->ctx;1873struct device *dev = ctx->dev;1874u8 p[CURVE25519_KEY_SIZE] = { 0 };1875const struct ecc_curve *curve;1876dma_addr_t dma = 0;1877u8 *ptr;18781879if (len != CURVE25519_KEY_SIZE) {1880dev_err(dev, "sourc_data len is not 32bytes, len = %u!\n", len);1881return -EINVAL;1882}18831884ptr = dma_alloc_coherent(dev, ctx->key_sz, &dma, GFP_KERNEL);1885if (unlikely(!ptr))1886return -ENOMEM;18871888scatterwalk_map_and_copy(ptr, data, 0, len, 0);18891890if (!crypto_memneq(ptr, curve25519_null_point, CURVE25519_KEY_SIZE)) {1891dev_err(dev, "gx is null!\n");1892goto err;1893}18941895/*1896* Src_data(gx) is in little-endian order, MSB in the final byte should1897* be masked as described in RFC7748, then transform it to big-endian1898* form, then hisi_hpre can use the data.1899*/1900ptr[31] &= 0x7f;1901hpre_key_to_big_end(ptr, CURVE25519_KEY_SIZE);19021903curve = ecc_get_curve25519();19041905fill_curve_param(p, curve->p, CURVE25519_KEY_SIZE, curve->g.ndigits);19061907/*1908* When src_data equals (2^255 - 19) ~ (2^255 - 1), it is out of p,1909* we get its modulus to p, and then use it.1910*/1911if (memcmp(ptr, p, ctx->key_sz) == 0) {1912dev_err(dev, "gx is p!\n");1913goto err;1914} else if (memcmp(ptr, p, ctx->key_sz) > 0) {1915hpre_curve25519_src_modulo_p(ptr);1916}19171918hpre_req->src = ptr;1919msg->in = cpu_to_le64(dma);1920return 0;19211922err:1923dma_free_coherent(dev, ctx->key_sz, ptr, dma);1924return -EINVAL;1925}19261927static int hpre_curve25519_dst_init(struct hpre_asym_request *hpre_req,1928struct scatterlist *data, unsigned int len)1929{1930struct hpre_sqe *msg = &hpre_req->req;1931struct hpre_ctx *ctx = hpre_req->ctx;1932struct device *dev = ctx->dev;1933dma_addr_t dma;19341935if (!data || !sg_is_last(data) || len != ctx->key_sz) {1936dev_err(dev, "data or data length is illegal!\n");1937return -EINVAL;1938}19391940hpre_req->dst = NULL;1941dma = dma_map_single(dev, sg_virt(data), len, DMA_FROM_DEVICE);1942if (unlikely(dma_mapping_error(dev, dma))) {1943dev_err(dev, "dma map data err!\n");1944return -ENOMEM;1945}19461947msg->out = cpu_to_le64(dma);1948return 0;1949}19501951static int hpre_curve25519_compute_value(struct kpp_request *req)1952{1953struct crypto_kpp *tfm = crypto_kpp_reqtfm(req);1954struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);1955struct device *dev = ctx->dev;1956void *tmp = kpp_request_ctx(req);1957struct hpre_asym_request *hpre_req = PTR_ALIGN(tmp, hpre_align_sz());1958struct hpre_sqe *msg = &hpre_req->req;1959int ret;19601961ret = hpre_curve25519_msg_request_set(ctx, req);1962if (unlikely(ret)) {1963dev_err(dev, "failed to set curve25519 request, ret = %d!\n", ret);1964return ret;1965}19661967if (req->src) {1968ret = hpre_curve25519_src_init(hpre_req, req->src, req->src_len);1969if (unlikely(ret)) {1970dev_err(dev, "failed to init src data, ret = %d!\n",1971ret);1972goto clear_all;1973}1974} else {1975msg->in = cpu_to_le64(ctx->curve25519.dma_g);1976}19771978ret = hpre_curve25519_dst_init(hpre_req, req->dst, req->dst_len);1979if (unlikely(ret)) {1980dev_err(dev, "failed to init dst data, ret = %d!\n", ret);1981goto clear_all;1982}19831984msg->dw0 = cpu_to_le32(le32_to_cpu(msg->dw0) | HPRE_ALG_CURVE25519_MUL);1985ret = hpre_send(ctx, msg);1986if (likely(!ret))1987return -EINPROGRESS;19881989clear_all:1990hpre_rm_req_from_ctx(hpre_req);1991hpre_curve25519_hw_data_clr_all(ctx, hpre_req, req->dst, req->src);1992return ret;1993}19941995static unsigned int hpre_curve25519_max_size(struct crypto_kpp *tfm)1996{1997struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);19981999return ctx->key_sz;2000}20012002static int hpre_curve25519_init_tfm(struct crypto_kpp *tfm)2003{2004struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);20052006kpp_set_reqsize(tfm, sizeof(struct hpre_asym_request) + hpre_align_pd());20072008return hpre_ctx_init(ctx, HPRE_V3_ECC_ALG_TYPE);2009}20102011static void hpre_curve25519_exit_tfm(struct crypto_kpp *tfm)2012{2013struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);20142015hpre_ecc_clear_ctx(ctx, true, false);2016}20172018static struct akcipher_alg rsa = {2019.encrypt = hpre_rsa_enc,2020.decrypt = hpre_rsa_dec,2021.set_pub_key = hpre_rsa_setpubkey,2022.set_priv_key = hpre_rsa_setprivkey,2023.max_size = hpre_rsa_max_size,2024.init = hpre_rsa_init_tfm,2025.exit = hpre_rsa_exit_tfm,2026.base = {2027.cra_ctxsize = sizeof(struct hpre_ctx),2028.cra_priority = HPRE_CRYPTO_ALG_PRI,2029.cra_name = "rsa",2030.cra_driver_name = "hpre-rsa",2031.cra_module = THIS_MODULE,2032},2033};20342035static struct kpp_alg dh = {2036.set_secret = hpre_dh_set_secret,2037.generate_public_key = hpre_dh_compute_value,2038.compute_shared_secret = hpre_dh_compute_value,2039.max_size = hpre_dh_max_size,2040.init = hpre_dh_init_tfm,2041.exit = hpre_dh_exit_tfm,2042.base = {2043.cra_ctxsize = sizeof(struct hpre_ctx),2044.cra_priority = HPRE_CRYPTO_ALG_PRI,2045.cra_name = "dh",2046.cra_driver_name = "hpre-dh",2047.cra_module = THIS_MODULE,2048},2049};20502051static struct kpp_alg ecdh_curves[] = {2052{2053.set_secret = hpre_ecdh_set_secret,2054.generate_public_key = hpre_ecdh_compute_value,2055.compute_shared_secret = hpre_ecdh_compute_value,2056.max_size = hpre_ecdh_max_size,2057.init = hpre_ecdh_nist_p192_init_tfm,2058.exit = hpre_ecdh_exit_tfm,2059.base = {2060.cra_ctxsize = sizeof(struct hpre_ctx),2061.cra_priority = HPRE_CRYPTO_ALG_PRI,2062.cra_name = "ecdh-nist-p192",2063.cra_driver_name = "hpre-ecdh-nist-p192",2064.cra_module = THIS_MODULE,2065},2066}, {2067.set_secret = hpre_ecdh_set_secret,2068.generate_public_key = hpre_ecdh_compute_value,2069.compute_shared_secret = hpre_ecdh_compute_value,2070.max_size = hpre_ecdh_max_size,2071.init = hpre_ecdh_nist_p256_init_tfm,2072.exit = hpre_ecdh_exit_tfm,2073.base = {2074.cra_ctxsize = sizeof(struct hpre_ctx),2075.cra_priority = HPRE_CRYPTO_ALG_PRI,2076.cra_name = "ecdh-nist-p256",2077.cra_driver_name = "hpre-ecdh-nist-p256",2078.cra_module = THIS_MODULE,2079},2080}, {2081.set_secret = hpre_ecdh_set_secret,2082.generate_public_key = hpre_ecdh_compute_value,2083.compute_shared_secret = hpre_ecdh_compute_value,2084.max_size = hpre_ecdh_max_size,2085.init = hpre_ecdh_nist_p384_init_tfm,2086.exit = hpre_ecdh_exit_tfm,2087.base = {2088.cra_ctxsize = sizeof(struct hpre_ctx),2089.cra_priority = HPRE_CRYPTO_ALG_PRI,2090.cra_name = "ecdh-nist-p384",2091.cra_driver_name = "hpre-ecdh-nist-p384",2092.cra_module = THIS_MODULE,2093},2094}2095};20962097static struct kpp_alg curve25519_alg = {2098.set_secret = hpre_curve25519_set_secret,2099.generate_public_key = hpre_curve25519_compute_value,2100.compute_shared_secret = hpre_curve25519_compute_value,2101.max_size = hpre_curve25519_max_size,2102.init = hpre_curve25519_init_tfm,2103.exit = hpre_curve25519_exit_tfm,2104.base = {2105.cra_ctxsize = sizeof(struct hpre_ctx),2106.cra_priority = HPRE_CRYPTO_ALG_PRI,2107.cra_name = "curve25519",2108.cra_driver_name = "hpre-curve25519",2109.cra_module = THIS_MODULE,2110},2111};21122113static int hpre_register_rsa(struct hisi_qm *qm)2114{2115int ret;21162117if (!hpre_check_alg_support(qm, HPRE_DRV_RSA_MASK_CAP))2118return 0;21192120rsa.base.cra_flags = 0;2121ret = crypto_register_akcipher(&rsa);2122if (ret)2123dev_err(&qm->pdev->dev, "failed to register rsa (%d)!\n", ret);21242125return ret;2126}21272128static void hpre_unregister_rsa(struct hisi_qm *qm)2129{2130if (!hpre_check_alg_support(qm, HPRE_DRV_RSA_MASK_CAP))2131return;21322133crypto_unregister_akcipher(&rsa);2134}21352136static int hpre_register_dh(struct hisi_qm *qm)2137{2138int ret;21392140if (!hpre_check_alg_support(qm, HPRE_DRV_DH_MASK_CAP))2141return 0;21422143ret = crypto_register_kpp(&dh);2144if (ret)2145dev_err(&qm->pdev->dev, "failed to register dh (%d)!\n", ret);21462147return ret;2148}21492150static void hpre_unregister_dh(struct hisi_qm *qm)2151{2152if (!hpre_check_alg_support(qm, HPRE_DRV_DH_MASK_CAP))2153return;21542155crypto_unregister_kpp(&dh);2156}21572158static int hpre_register_ecdh(struct hisi_qm *qm)2159{2160int ret, i;21612162if (!hpre_check_alg_support(qm, HPRE_DRV_ECDH_MASK_CAP))2163return 0;21642165for (i = 0; i < ARRAY_SIZE(ecdh_curves); i++) {2166ret = crypto_register_kpp(&ecdh_curves[i]);2167if (ret) {2168dev_err(&qm->pdev->dev, "failed to register %s (%d)!\n",2169ecdh_curves[i].base.cra_name, ret);2170goto unreg_kpp;2171}2172}21732174return 0;21752176unreg_kpp:2177for (--i; i >= 0; --i)2178crypto_unregister_kpp(&ecdh_curves[i]);21792180return ret;2181}21822183static void hpre_unregister_ecdh(struct hisi_qm *qm)2184{2185int i;21862187if (!hpre_check_alg_support(qm, HPRE_DRV_ECDH_MASK_CAP))2188return;21892190for (i = ARRAY_SIZE(ecdh_curves) - 1; i >= 0; --i)2191crypto_unregister_kpp(&ecdh_curves[i]);2192}21932194static int hpre_register_x25519(struct hisi_qm *qm)2195{2196int ret;21972198if (!hpre_check_alg_support(qm, HPRE_DRV_X25519_MASK_CAP))2199return 0;22002201ret = crypto_register_kpp(&curve25519_alg);2202if (ret)2203dev_err(&qm->pdev->dev, "failed to register x25519 (%d)!\n", ret);22042205return ret;2206}22072208static void hpre_unregister_x25519(struct hisi_qm *qm)2209{2210if (!hpre_check_alg_support(qm, HPRE_DRV_X25519_MASK_CAP))2211return;22122213crypto_unregister_kpp(&curve25519_alg);2214}22152216int hpre_algs_register(struct hisi_qm *qm)2217{2218int ret = 0;22192220mutex_lock(&hpre_algs_lock);2221if (hpre_available_devs) {2222hpre_available_devs++;2223goto unlock;2224}22252226ret = hpre_register_rsa(qm);2227if (ret)2228goto unlock;22292230ret = hpre_register_dh(qm);2231if (ret)2232goto unreg_rsa;22332234ret = hpre_register_ecdh(qm);2235if (ret)2236goto unreg_dh;22372238ret = hpre_register_x25519(qm);2239if (ret)2240goto unreg_ecdh;22412242hpre_available_devs++;2243mutex_unlock(&hpre_algs_lock);22442245return ret;22462247unreg_ecdh:2248hpre_unregister_ecdh(qm);2249unreg_dh:2250hpre_unregister_dh(qm);2251unreg_rsa:2252hpre_unregister_rsa(qm);2253unlock:2254mutex_unlock(&hpre_algs_lock);2255return ret;2256}22572258void hpre_algs_unregister(struct hisi_qm *qm)2259{2260mutex_lock(&hpre_algs_lock);2261if (--hpre_available_devs)2262goto unlock;22632264hpre_unregister_x25519(qm);2265hpre_unregister_ecdh(qm);2266hpre_unregister_dh(qm);2267hpre_unregister_rsa(qm);22682269unlock:2270mutex_unlock(&hpre_algs_lock);2271}227222732274