Path: blob/master/drivers/crypto/hisilicon/hpre/hpre_crypto.c
53157 views
// SPDX-License-Identifier: GPL-2.01/* Copyright (c) 2019 HiSilicon Limited. */2#include <crypto/akcipher.h>3#include <crypto/dh.h>4#include <crypto/ecc_curve.h>5#include <crypto/ecdh.h>6#include <crypto/rng.h>7#include <crypto/internal/akcipher.h>8#include <crypto/internal/kpp.h>9#include <crypto/internal/rsa.h>10#include <crypto/kpp.h>11#include <crypto/scatterwalk.h>12#include <linux/dma-mapping.h>13#include <linux/fips.h>14#include <linux/module.h>15#include <linux/time.h>16#include "hpre.h"1718struct hpre_ctx;1920#define HPRE_CRYPTO_ALG_PRI 100021#define HPRE_ALIGN_SZ 6422#define HPRE_BITS_2_BYTES_SHIFT 323#define HPRE_RSA_512BITS_KSZ 6424#define HPRE_RSA_1536BITS_KSZ 19225#define HPRE_CRT_PRMS 526#define HPRE_CRT_Q 227#define HPRE_CRT_P 328#define HPRE_CRT_INV 429#define HPRE_DH_G_FLAG 0x0230#define HPRE_TRY_SEND_TIMES 10031#define HPRE_INVLD_REQ_ID (-1)3233#define HPRE_SQE_ALG_BITS 534#define HPRE_SQE_DONE_SHIFT 3035#define HPRE_DH_MAX_P_SZ 5123637#define HPRE_DFX_SEC_TO_US 100000038#define HPRE_DFX_US_TO_NS 10003940#define HPRE_ENABLE_HPCORE_SHIFT 74142/* due to nist p521 */43#define HPRE_ECC_MAX_KSZ 664445/* size in bytes of the n prime */46#define HPRE_ECC_NIST_P192_N_SIZE 2447#define HPRE_ECC_NIST_P256_N_SIZE 3248#define HPRE_ECC_NIST_P384_N_SIZE 484950/* size in bytes */51#define HPRE_ECC_HW256_KSZ_B 3252#define HPRE_ECC_HW384_KSZ_B 485354/* capability register mask of driver */55#define HPRE_DRV_RSA_MASK_CAP BIT(0)56#define HPRE_DRV_DH_MASK_CAP BIT(1)57#define HPRE_DRV_ECDH_MASK_CAP BIT(2)58#define HPRE_DRV_X25519_MASK_CAP BIT(5)5960static DEFINE_MUTEX(hpre_algs_lock);61static unsigned int hpre_available_devs;6263typedef void (*hpre_cb)(struct hpre_ctx *ctx, void *sqe);6465struct hpre_rsa_ctx {66/* low address: e--->n */67char *pubkey;68dma_addr_t dma_pubkey;6970/* low address: d--->n */71char *prikey;72dma_addr_t dma_prikey;7374/* low address: dq->dp->q->p->qinv */75char *crt_prikey;76dma_addr_t dma_crt_prikey;7778struct crypto_akcipher *soft_tfm;79};8081struct hpre_dh_ctx {82/*83* If base is g we compute the public key84* ya = g^xa mod p; [RFC2631 sec 2.1.1]85* else if base if the counterpart public key we86* compute the shared secret87* ZZ = yb^xa mod p; [RFC2631 sec 2.1.1]88* low address: d--->n, please refer to Hisilicon HPRE UM89*/90char *xa_p;91dma_addr_t dma_xa_p;9293char *g; /* m */94dma_addr_t dma_g;95struct crypto_kpp *soft_tfm;96};9798struct hpre_ecdh_ctx {99/* low address: p->a->k->b */100unsigned char *p;101dma_addr_t dma_p;102103/* low address: x->y */104unsigned char *g;105dma_addr_t dma_g;106struct crypto_kpp *soft_tfm;107};108109struct hpre_ctx {110struct hisi_qp *qp;111struct device *dev;112struct hpre *hpre;113unsigned int key_sz;114bool crt_g2_mode;115union {116struct hpre_rsa_ctx rsa;117struct hpre_dh_ctx dh;118struct hpre_ecdh_ctx ecdh;119};120/* for ecc algorithms */121unsigned int curve_id;122/* for high performance core */123u8 enable_hpcore;124bool fallback;125};126127struct hpre_asym_request {128char *src;129char *dst;130struct hpre_sqe req;131struct hpre_ctx *ctx;132union {133struct akcipher_request *rsa;134struct kpp_request *dh;135struct kpp_request *ecdh;136} areq;137int err;138hpre_cb cb;139struct timespec64 req_time;140};141142static inline unsigned int hpre_align_sz(void)143{144return ((crypto_dma_align() - 1) | (HPRE_ALIGN_SZ - 1)) + 1;145}146147static inline unsigned int hpre_align_pd(void)148{149return (hpre_align_sz() - 1) & ~(crypto_tfm_ctx_alignment() - 1);150}151152static void hpre_dfx_add_req_time(struct hpre_asym_request *hpre_req)153{154struct hpre_ctx *ctx = hpre_req->ctx;155struct hpre_dfx *dfx = ctx->hpre->debug.dfx;156157if (atomic64_read(&dfx[HPRE_OVERTIME_THRHLD].value))158ktime_get_ts64(&hpre_req->req_time);159}160161static int hpre_get_data_dma_addr(struct hpre_asym_request *hpre_req,162struct scatterlist *data, unsigned int len,163int is_src, dma_addr_t *tmp)164{165struct device *dev = hpre_req->ctx->dev;166enum dma_data_direction dma_dir;167168if (is_src) {169hpre_req->src = NULL;170dma_dir = DMA_TO_DEVICE;171} else {172hpre_req->dst = NULL;173dma_dir = DMA_FROM_DEVICE;174}175*tmp = dma_map_single(dev, sg_virt(data), len, dma_dir);176if (unlikely(dma_mapping_error(dev, *tmp))) {177dev_err(dev, "dma map data err!\n");178return -ENOMEM;179}180181return 0;182}183184static int hpre_prepare_dma_buf(struct hpre_asym_request *hpre_req,185struct scatterlist *data, unsigned int len,186int is_src, dma_addr_t *tmp)187{188struct hpre_ctx *ctx = hpre_req->ctx;189struct device *dev = ctx->dev;190void *ptr;191int shift;192193shift = ctx->key_sz - len;194if (unlikely(shift < 0))195return -EINVAL;196197ptr = dma_alloc_coherent(dev, ctx->key_sz, tmp, GFP_ATOMIC);198if (unlikely(!ptr))199return -ENOMEM;200201if (is_src) {202scatterwalk_map_and_copy(ptr + shift, data, 0, len, 0);203hpre_req->src = ptr;204} else {205hpre_req->dst = ptr;206}207208return 0;209}210211static int hpre_hw_data_init(struct hpre_asym_request *hpre_req,212struct scatterlist *data, unsigned int len,213int is_src, int is_dh)214{215struct hpre_sqe *msg = &hpre_req->req;216struct hpre_ctx *ctx = hpre_req->ctx;217dma_addr_t tmp = 0;218int ret;219220/* when the data is dh's source, we should format it */221if ((sg_is_last(data) && len == ctx->key_sz) &&222((is_dh && !is_src) || !is_dh))223ret = hpre_get_data_dma_addr(hpre_req, data, len, is_src, &tmp);224else225ret = hpre_prepare_dma_buf(hpre_req, data, len, is_src, &tmp);226227if (unlikely(ret))228return ret;229230if (is_src)231msg->in = cpu_to_le64(tmp);232else233msg->out = cpu_to_le64(tmp);234235return 0;236}237238static void hpre_hw_data_clr_all(struct hpre_ctx *ctx,239struct hpre_asym_request *req,240struct scatterlist *dst,241struct scatterlist *src)242{243struct device *dev = ctx->dev;244struct hpre_sqe *sqe = &req->req;245dma_addr_t tmp;246247tmp = le64_to_cpu(sqe->in);248if (unlikely(dma_mapping_error(dev, tmp)))249return;250251if (src) {252if (req->src)253dma_free_coherent(dev, ctx->key_sz, req->src, tmp);254else255dma_unmap_single(dev, tmp, ctx->key_sz, DMA_TO_DEVICE);256}257258tmp = le64_to_cpu(sqe->out);259if (unlikely(dma_mapping_error(dev, tmp)))260return;261262if (req->dst) {263if (dst)264scatterwalk_map_and_copy(req->dst, dst, 0,265ctx->key_sz, 1);266dma_free_coherent(dev, ctx->key_sz, req->dst, tmp);267} else {268dma_unmap_single(dev, tmp, ctx->key_sz, DMA_FROM_DEVICE);269}270}271272static int hpre_alg_res_post_hf(struct hpre_ctx *ctx, struct hpre_sqe *sqe,273void **kreq)274{275unsigned int err, done, alg;276277#define HPRE_NO_HW_ERR 0278#define HPRE_HW_TASK_DONE 3279#define HREE_HW_ERR_MASK GENMASK(10, 0)280#define HREE_SQE_DONE_MASK GENMASK(1, 0)281#define HREE_ALG_TYPE_MASK GENMASK(4, 0)282*kreq = (void *)le64_to_cpu(sqe->tag);283284err = (le32_to_cpu(sqe->dw0) >> HPRE_SQE_ALG_BITS) &285HREE_HW_ERR_MASK;286done = (le32_to_cpu(sqe->dw0) >> HPRE_SQE_DONE_SHIFT) &287HREE_SQE_DONE_MASK;288if (likely(err == HPRE_NO_HW_ERR && done == HPRE_HW_TASK_DONE))289return 0;290291alg = le32_to_cpu(sqe->dw0) & HREE_ALG_TYPE_MASK;292dev_err_ratelimited(ctx->dev, "alg[0x%x] error: done[0x%x], etype[0x%x]\n",293alg, done, err);294295return -EINVAL;296}297298static void hpre_ctx_clear(struct hpre_ctx *ctx, bool is_clear_all)299{300if (is_clear_all)301hisi_qm_free_qps(&ctx->qp, 1);302303ctx->crt_g2_mode = false;304ctx->key_sz = 0;305}306307static bool hpre_is_bd_timeout(struct hpre_asym_request *req,308u64 overtime_thrhld)309{310struct timespec64 reply_time;311u64 time_use_us;312313ktime_get_ts64(&reply_time);314time_use_us = (reply_time.tv_sec - req->req_time.tv_sec) *315HPRE_DFX_SEC_TO_US +316(reply_time.tv_nsec - req->req_time.tv_nsec) /317HPRE_DFX_US_TO_NS;318319if (time_use_us <= overtime_thrhld)320return false;321322return true;323}324325static void hpre_dh_cb(struct hpre_ctx *ctx, void *resp)326{327struct hpre_dfx *dfx = ctx->hpre->debug.dfx;328struct hpre_asym_request *req;329struct kpp_request *areq;330u64 overtime_thrhld;331int ret;332333ret = hpre_alg_res_post_hf(ctx, resp, (void **)&req);334areq = req->areq.dh;335areq->dst_len = ctx->key_sz;336337overtime_thrhld = atomic64_read(&dfx[HPRE_OVERTIME_THRHLD].value);338if (overtime_thrhld && hpre_is_bd_timeout(req, overtime_thrhld))339atomic64_inc(&dfx[HPRE_OVER_THRHLD_CNT].value);340341hpre_hw_data_clr_all(ctx, req, areq->dst, areq->src);342kpp_request_complete(areq, ret);343atomic64_inc(&dfx[HPRE_RECV_CNT].value);344}345346static void hpre_rsa_cb(struct hpre_ctx *ctx, void *resp)347{348struct hpre_dfx *dfx = ctx->hpre->debug.dfx;349struct hpre_asym_request *req;350struct akcipher_request *areq;351u64 overtime_thrhld;352int ret;353354ret = hpre_alg_res_post_hf(ctx, resp, (void **)&req);355356overtime_thrhld = atomic64_read(&dfx[HPRE_OVERTIME_THRHLD].value);357if (overtime_thrhld && hpre_is_bd_timeout(req, overtime_thrhld))358atomic64_inc(&dfx[HPRE_OVER_THRHLD_CNT].value);359360areq = req->areq.rsa;361areq->dst_len = ctx->key_sz;362hpre_hw_data_clr_all(ctx, req, areq->dst, areq->src);363akcipher_request_complete(areq, ret);364atomic64_inc(&dfx[HPRE_RECV_CNT].value);365}366367static void hpre_alg_cb(struct hisi_qp *qp, void *resp)368{369struct hpre_asym_request *h_req;370struct hpre_sqe *sqe = resp;371372h_req = (struct hpre_asym_request *)le64_to_cpu(sqe->tag);373if (unlikely(!h_req)) {374pr_err("Failed to get request, and qp_id is %u\n", qp->qp_id);375return;376}377378h_req->cb(h_req->ctx, resp);379}380381static int hpre_ctx_init(struct hpre_ctx *ctx, u8 type)382{383struct hisi_qp *qp;384struct hpre *hpre;385386qp = hpre_create_qp(type);387if (!qp) {388ctx->qp = NULL;389return -ENODEV;390}391392qp->req_cb = hpre_alg_cb;393ctx->qp = qp;394ctx->dev = &qp->qm->pdev->dev;395hpre = container_of(ctx->qp->qm, struct hpre, qm);396ctx->hpre = hpre;397ctx->key_sz = 0;398ctx->crt_g2_mode = false;399400return 0;401}402403static int hpre_msg_request_set(struct hpre_ctx *ctx, void *req, bool is_rsa)404{405struct hpre_asym_request *h_req;406struct hpre_sqe *msg;407void *tmp;408409if (is_rsa) {410struct akcipher_request *akreq = req;411412if (akreq->dst_len < ctx->key_sz) {413akreq->dst_len = ctx->key_sz;414return -EOVERFLOW;415}416417tmp = akcipher_request_ctx(akreq);418h_req = PTR_ALIGN(tmp, hpre_align_sz());419h_req->cb = hpre_rsa_cb;420h_req->areq.rsa = akreq;421msg = &h_req->req;422memset(msg, 0, sizeof(*msg));423} else {424struct kpp_request *kreq = req;425426if (kreq->dst_len < ctx->key_sz) {427kreq->dst_len = ctx->key_sz;428return -EOVERFLOW;429}430431tmp = kpp_request_ctx(kreq);432h_req = PTR_ALIGN(tmp, hpre_align_sz());433h_req->cb = hpre_dh_cb;434h_req->areq.dh = kreq;435msg = &h_req->req;436memset(msg, 0, sizeof(*msg));437msg->key = cpu_to_le64(ctx->dh.dma_xa_p);438}439440msg->in = cpu_to_le64(DMA_MAPPING_ERROR);441msg->out = cpu_to_le64(DMA_MAPPING_ERROR);442msg->dw0 |= cpu_to_le32(0x1 << HPRE_SQE_DONE_SHIFT);443msg->task_len1 = (ctx->key_sz >> HPRE_BITS_2_BYTES_SHIFT) - 1;444h_req->ctx = ctx;445446hpre_dfx_add_req_time(h_req);447msg->tag = cpu_to_le64((uintptr_t)h_req);448449return 0;450}451452static int hpre_send(struct hpre_ctx *ctx, struct hpre_sqe *msg)453{454struct hpre_dfx *dfx = ctx->hpre->debug.dfx;455int ctr = 0;456int ret;457458do {459atomic64_inc(&dfx[HPRE_SEND_CNT].value);460ret = hisi_qp_send(ctx->qp, msg);461if (ret != -EBUSY)462break;463atomic64_inc(&dfx[HPRE_SEND_BUSY_CNT].value);464} while (ctr++ < HPRE_TRY_SEND_TIMES);465466if (likely(!ret))467return ret;468469if (ret != -EBUSY)470atomic64_inc(&dfx[HPRE_SEND_FAIL_CNT].value);471472return ret;473}474475static int hpre_dh_compute_value(struct kpp_request *req)476{477struct crypto_kpp *tfm = crypto_kpp_reqtfm(req);478struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);479void *tmp = kpp_request_ctx(req);480struct hpre_asym_request *hpre_req = PTR_ALIGN(tmp, hpre_align_sz());481struct hpre_sqe *msg = &hpre_req->req;482int ret;483484ret = hpre_msg_request_set(ctx, req, false);485if (unlikely(ret))486return ret;487488if (req->src) {489ret = hpre_hw_data_init(hpre_req, req->src, req->src_len, 1, 1);490if (unlikely(ret))491goto clear_all;492} else {493msg->in = cpu_to_le64(ctx->dh.dma_g);494}495496ret = hpre_hw_data_init(hpre_req, req->dst, req->dst_len, 0, 1);497if (unlikely(ret))498goto clear_all;499500if (ctx->crt_g2_mode && !req->src)501msg->dw0 = cpu_to_le32(le32_to_cpu(msg->dw0) | HPRE_ALG_DH_G2);502else503msg->dw0 = cpu_to_le32(le32_to_cpu(msg->dw0) | HPRE_ALG_DH);504505/* success */506ret = hpre_send(ctx, msg);507if (likely(!ret))508return -EINPROGRESS;509510clear_all:511hpre_hw_data_clr_all(ctx, hpre_req, req->dst, req->src);512513return ret;514}515516static struct kpp_request *hpre_dh_prepare_fb_req(struct kpp_request *req)517{518struct kpp_request *fb_req = kpp_request_ctx(req);519struct crypto_kpp *tfm = crypto_kpp_reqtfm(req);520struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);521522kpp_request_set_tfm(fb_req, ctx->dh.soft_tfm);523kpp_request_set_callback(fb_req, req->base.flags, req->base.complete, req->base.data);524kpp_request_set_input(fb_req, req->src, req->src_len);525kpp_request_set_output(fb_req, req->dst, req->dst_len);526527return fb_req;528}529530static int hpre_dh_generate_public_key(struct kpp_request *req)531{532struct crypto_kpp *tfm = crypto_kpp_reqtfm(req);533struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);534struct kpp_request *fb_req;535536if (ctx->fallback) {537fb_req = hpre_dh_prepare_fb_req(req);538return crypto_kpp_generate_public_key(fb_req);539}540541return hpre_dh_compute_value(req);542}543544static int hpre_dh_compute_shared_secret(struct kpp_request *req)545{546struct crypto_kpp *tfm = crypto_kpp_reqtfm(req);547struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);548struct kpp_request *fb_req;549550if (ctx->fallback) {551fb_req = hpre_dh_prepare_fb_req(req);552return crypto_kpp_compute_shared_secret(fb_req);553}554555return hpre_dh_compute_value(req);556}557558static int hpre_is_dh_params_length_valid(unsigned int key_sz)559{560#define _HPRE_DH_GRP1 768561#define _HPRE_DH_GRP2 1024562#define _HPRE_DH_GRP5 1536563#define _HPRE_DH_GRP14 2048564#define _HPRE_DH_GRP15 3072565#define _HPRE_DH_GRP16 4096566switch (key_sz) {567case _HPRE_DH_GRP1:568case _HPRE_DH_GRP2:569case _HPRE_DH_GRP5:570case _HPRE_DH_GRP14:571case _HPRE_DH_GRP15:572case _HPRE_DH_GRP16:573return 0;574default:575return -EINVAL;576}577}578579static int hpre_dh_set_params(struct hpre_ctx *ctx, struct dh *params)580{581struct device *dev = ctx->dev;582unsigned int sz;583584sz = ctx->key_sz = params->p_size;585ctx->dh.xa_p = dma_alloc_coherent(dev, sz << 1,586&ctx->dh.dma_xa_p, GFP_KERNEL);587if (!ctx->dh.xa_p)588return -ENOMEM;589590memcpy(ctx->dh.xa_p + sz, params->p, sz);591592/* If g equals 2 don't copy it */593if (params->g_size == 1 && *(char *)params->g == HPRE_DH_G_FLAG) {594ctx->crt_g2_mode = true;595return 0;596}597598ctx->dh.g = dma_alloc_coherent(dev, sz, &ctx->dh.dma_g, GFP_KERNEL);599if (!ctx->dh.g) {600dma_free_coherent(dev, sz << 1, ctx->dh.xa_p,601ctx->dh.dma_xa_p);602ctx->dh.xa_p = NULL;603return -ENOMEM;604}605606memcpy(ctx->dh.g + (sz - params->g_size), params->g, params->g_size);607608return 0;609}610611static void hpre_dh_clear_ctx(struct hpre_ctx *ctx, bool is_clear_all)612{613struct device *dev = ctx->dev;614unsigned int sz = ctx->key_sz;615616if (!ctx->qp)617return;618619if (ctx->dh.g) {620dma_free_coherent(dev, sz, ctx->dh.g, ctx->dh.dma_g);621ctx->dh.g = NULL;622}623624if (ctx->dh.xa_p) {625memzero_explicit(ctx->dh.xa_p, sz);626dma_free_coherent(dev, sz << 1, ctx->dh.xa_p,627ctx->dh.dma_xa_p);628ctx->dh.xa_p = NULL;629}630631hpre_ctx_clear(ctx, is_clear_all);632}633634static int hpre_dh_set_secret(struct crypto_kpp *tfm, const void *buf,635unsigned int len)636{637struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);638struct dh params;639int ret;640641if (crypto_dh_decode_key(buf, len, ¶ms) < 0)642return -EINVAL;643644if (!ctx->qp)645goto set_soft_secret;646647if (hpre_is_dh_params_length_valid(params.p_size <<648HPRE_BITS_2_BYTES_SHIFT))649goto set_soft_secret;650651/* Free old secret if any */652hpre_dh_clear_ctx(ctx, false);653654ret = hpre_dh_set_params(ctx, ¶ms);655if (ret < 0)656goto err_clear_ctx;657658memcpy(ctx->dh.xa_p + (ctx->key_sz - params.key_size), params.key,659params.key_size);660661ctx->fallback = false;662return 0;663664err_clear_ctx:665hpre_dh_clear_ctx(ctx, false);666return ret;667set_soft_secret:668ctx->fallback = true;669return crypto_kpp_set_secret(ctx->dh.soft_tfm, buf, len);670}671672static unsigned int hpre_dh_max_size(struct crypto_kpp *tfm)673{674struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);675676if (ctx->fallback)677return crypto_kpp_maxsize(ctx->dh.soft_tfm);678679return ctx->key_sz;680}681682static int hpre_dh_init_tfm(struct crypto_kpp *tfm)683{684struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);685const char *alg = kpp_alg_name(tfm);686unsigned int reqsize;687int ret;688689ctx->dh.soft_tfm = crypto_alloc_kpp(alg, 0, CRYPTO_ALG_NEED_FALLBACK);690if (IS_ERR(ctx->dh.soft_tfm)) {691pr_err("Failed to alloc dh tfm!\n");692return PTR_ERR(ctx->dh.soft_tfm);693}694695crypto_kpp_set_flags(ctx->dh.soft_tfm, crypto_kpp_get_flags(tfm));696697reqsize = max(sizeof(struct hpre_asym_request) + hpre_align_pd(),698sizeof(struct kpp_request) + crypto_kpp_reqsize(ctx->dh.soft_tfm));699kpp_set_reqsize(tfm, reqsize);700701ret = hpre_ctx_init(ctx, HPRE_V2_ALG_TYPE);702if (ret && ret != -ENODEV) {703crypto_free_kpp(ctx->dh.soft_tfm);704return ret;705} else if (ret == -ENODEV) {706ctx->fallback = true;707}708709return 0;710}711712static void hpre_dh_exit_tfm(struct crypto_kpp *tfm)713{714struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);715716hpre_dh_clear_ctx(ctx, true);717crypto_free_kpp(ctx->dh.soft_tfm);718}719720static void hpre_rsa_drop_leading_zeros(const char **ptr, size_t *len)721{722while (!**ptr && *len) {723(*ptr)++;724(*len)--;725}726}727728static bool hpre_rsa_key_size_is_support(unsigned int len)729{730unsigned int bits = len << HPRE_BITS_2_BYTES_SHIFT;731732#define _RSA_1024BITS_KEY_WDTH 1024733#define _RSA_2048BITS_KEY_WDTH 2048734#define _RSA_3072BITS_KEY_WDTH 3072735#define _RSA_4096BITS_KEY_WDTH 4096736737switch (bits) {738case _RSA_1024BITS_KEY_WDTH:739case _RSA_2048BITS_KEY_WDTH:740case _RSA_3072BITS_KEY_WDTH:741case _RSA_4096BITS_KEY_WDTH:742return true;743default:744return false;745}746}747748static int hpre_rsa_enc(struct akcipher_request *req)749{750struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req);751struct hpre_ctx *ctx = akcipher_tfm_ctx(tfm);752void *tmp = akcipher_request_ctx(req);753struct hpre_asym_request *hpre_req = PTR_ALIGN(tmp, hpre_align_sz());754struct hpre_sqe *msg = &hpre_req->req;755int ret;756757/* For unsupported key size and unavailable devices, use soft tfm instead */758if (ctx->fallback) {759akcipher_request_set_tfm(req, ctx->rsa.soft_tfm);760ret = crypto_akcipher_encrypt(req);761akcipher_request_set_tfm(req, tfm);762return ret;763}764765if (unlikely(!ctx->rsa.pubkey))766return -EINVAL;767768ret = hpre_msg_request_set(ctx, req, true);769if (unlikely(ret))770return ret;771772msg->dw0 |= cpu_to_le32(HPRE_ALG_NC_NCRT);773msg->key = cpu_to_le64(ctx->rsa.dma_pubkey);774775ret = hpre_hw_data_init(hpre_req, req->src, req->src_len, 1, 0);776if (unlikely(ret))777goto clear_all;778779ret = hpre_hw_data_init(hpre_req, req->dst, req->dst_len, 0, 0);780if (unlikely(ret))781goto clear_all;782783/* success */784ret = hpre_send(ctx, msg);785if (likely(!ret))786return -EINPROGRESS;787788clear_all:789hpre_hw_data_clr_all(ctx, hpre_req, req->dst, req->src);790791return ret;792}793794static int hpre_rsa_dec(struct akcipher_request *req)795{796struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req);797struct hpre_ctx *ctx = akcipher_tfm_ctx(tfm);798void *tmp = akcipher_request_ctx(req);799struct hpre_asym_request *hpre_req = PTR_ALIGN(tmp, hpre_align_sz());800struct hpre_sqe *msg = &hpre_req->req;801int ret;802803/* For unsupported key size and unavailable devices, use soft tfm instead */804if (ctx->fallback) {805akcipher_request_set_tfm(req, ctx->rsa.soft_tfm);806ret = crypto_akcipher_decrypt(req);807akcipher_request_set_tfm(req, tfm);808return ret;809}810811if (unlikely(!ctx->rsa.prikey))812return -EINVAL;813814ret = hpre_msg_request_set(ctx, req, true);815if (unlikely(ret))816return ret;817818if (ctx->crt_g2_mode) {819msg->key = cpu_to_le64(ctx->rsa.dma_crt_prikey);820msg->dw0 = cpu_to_le32(le32_to_cpu(msg->dw0) |821HPRE_ALG_NC_CRT);822} else {823msg->key = cpu_to_le64(ctx->rsa.dma_prikey);824msg->dw0 = cpu_to_le32(le32_to_cpu(msg->dw0) |825HPRE_ALG_NC_NCRT);826}827828ret = hpre_hw_data_init(hpre_req, req->src, req->src_len, 1, 0);829if (unlikely(ret))830goto clear_all;831832ret = hpre_hw_data_init(hpre_req, req->dst, req->dst_len, 0, 0);833if (unlikely(ret))834goto clear_all;835836/* success */837ret = hpre_send(ctx, msg);838if (likely(!ret))839return -EINPROGRESS;840841clear_all:842hpre_hw_data_clr_all(ctx, hpre_req, req->dst, req->src);843844return ret;845}846847static int hpre_rsa_set_n(struct hpre_ctx *ctx, const char *value,848size_t vlen, bool private)849{850const char *ptr = value;851852hpre_rsa_drop_leading_zeros(&ptr, &vlen);853854ctx->key_sz = vlen;855856/* if invalid key size provided, we use software tfm */857if (!hpre_rsa_key_size_is_support(ctx->key_sz)) {858ctx->fallback = true;859return 0;860}861862ctx->rsa.pubkey = dma_alloc_coherent(ctx->dev, vlen << 1,863&ctx->rsa.dma_pubkey,864GFP_KERNEL);865if (!ctx->rsa.pubkey)866return -ENOMEM;867868if (private) {869ctx->rsa.prikey = dma_alloc_coherent(ctx->dev, vlen << 1,870&ctx->rsa.dma_prikey,871GFP_KERNEL);872if (!ctx->rsa.prikey) {873dma_free_coherent(ctx->dev, vlen << 1,874ctx->rsa.pubkey,875ctx->rsa.dma_pubkey);876ctx->rsa.pubkey = NULL;877return -ENOMEM;878}879memcpy(ctx->rsa.prikey + vlen, ptr, vlen);880}881memcpy(ctx->rsa.pubkey + vlen, ptr, vlen);882883/* Using hardware HPRE to do RSA */884return 1;885}886887static int hpre_rsa_set_e(struct hpre_ctx *ctx, const char *value,888size_t vlen)889{890const char *ptr = value;891892hpre_rsa_drop_leading_zeros(&ptr, &vlen);893894if (!ctx->key_sz || !vlen || vlen > ctx->key_sz)895return -EINVAL;896897memcpy(ctx->rsa.pubkey + ctx->key_sz - vlen, ptr, vlen);898899return 0;900}901902static int hpre_rsa_set_d(struct hpre_ctx *ctx, const char *value,903size_t vlen)904{905const char *ptr = value;906907hpre_rsa_drop_leading_zeros(&ptr, &vlen);908909if (!ctx->key_sz || !vlen || vlen > ctx->key_sz)910return -EINVAL;911912memcpy(ctx->rsa.prikey + ctx->key_sz - vlen, ptr, vlen);913914return 0;915}916917static int hpre_crt_para_get(char *para, size_t para_sz,918const char *raw, size_t raw_sz)919{920const char *ptr = raw;921size_t len = raw_sz;922923hpre_rsa_drop_leading_zeros(&ptr, &len);924if (!len || len > para_sz)925return -EINVAL;926927memcpy(para + para_sz - len, ptr, len);928929return 0;930}931932static int hpre_rsa_setkey_crt(struct hpre_ctx *ctx, struct rsa_key *rsa_key)933{934unsigned int hlf_ksz = ctx->key_sz >> 1;935struct device *dev = ctx->dev;936u64 offset;937int ret;938939ctx->rsa.crt_prikey = dma_alloc_coherent(dev, hlf_ksz * HPRE_CRT_PRMS,940&ctx->rsa.dma_crt_prikey,941GFP_KERNEL);942if (!ctx->rsa.crt_prikey)943return -ENOMEM;944945ret = hpre_crt_para_get(ctx->rsa.crt_prikey, hlf_ksz,946rsa_key->dq, rsa_key->dq_sz);947if (ret)948goto free_key;949950offset = hlf_ksz;951ret = hpre_crt_para_get(ctx->rsa.crt_prikey + offset, hlf_ksz,952rsa_key->dp, rsa_key->dp_sz);953if (ret)954goto free_key;955956offset = hlf_ksz * HPRE_CRT_Q;957ret = hpre_crt_para_get(ctx->rsa.crt_prikey + offset, hlf_ksz,958rsa_key->q, rsa_key->q_sz);959if (ret)960goto free_key;961962offset = hlf_ksz * HPRE_CRT_P;963ret = hpre_crt_para_get(ctx->rsa.crt_prikey + offset, hlf_ksz,964rsa_key->p, rsa_key->p_sz);965if (ret)966goto free_key;967968offset = hlf_ksz * HPRE_CRT_INV;969ret = hpre_crt_para_get(ctx->rsa.crt_prikey + offset, hlf_ksz,970rsa_key->qinv, rsa_key->qinv_sz);971if (ret)972goto free_key;973974ctx->crt_g2_mode = true;975976return 0;977978free_key:979offset = hlf_ksz * HPRE_CRT_PRMS;980memzero_explicit(ctx->rsa.crt_prikey, offset);981dma_free_coherent(dev, hlf_ksz * HPRE_CRT_PRMS, ctx->rsa.crt_prikey,982ctx->rsa.dma_crt_prikey);983ctx->rsa.crt_prikey = NULL;984ctx->crt_g2_mode = false;985986return ret;987}988989/* If it is clear all, all the resources of the QP will be cleaned. */990static void hpre_rsa_clear_ctx(struct hpre_ctx *ctx, bool is_clear_all)991{992unsigned int half_key_sz = ctx->key_sz >> 1;993struct device *dev = ctx->dev;994995if (!ctx->qp)996return;997998if (ctx->rsa.pubkey) {999dma_free_coherent(dev, ctx->key_sz << 1,1000ctx->rsa.pubkey, ctx->rsa.dma_pubkey);1001ctx->rsa.pubkey = NULL;1002}10031004if (ctx->rsa.crt_prikey) {1005memzero_explicit(ctx->rsa.crt_prikey,1006half_key_sz * HPRE_CRT_PRMS);1007dma_free_coherent(dev, half_key_sz * HPRE_CRT_PRMS,1008ctx->rsa.crt_prikey, ctx->rsa.dma_crt_prikey);1009ctx->rsa.crt_prikey = NULL;1010}10111012if (ctx->rsa.prikey) {1013memzero_explicit(ctx->rsa.prikey, ctx->key_sz);1014dma_free_coherent(dev, ctx->key_sz << 1, ctx->rsa.prikey,1015ctx->rsa.dma_prikey);1016ctx->rsa.prikey = NULL;1017}10181019hpre_ctx_clear(ctx, is_clear_all);1020}10211022/*1023* we should judge if it is CRT or not,1024* CRT: return true, N-CRT: return false .1025*/1026static bool hpre_is_crt_key(struct rsa_key *key)1027{1028u16 len = key->p_sz + key->q_sz + key->dp_sz + key->dq_sz +1029key->qinv_sz;10301031#define LEN_OF_NCRT_PARA 510321033/* N-CRT less than 5 parameters */1034return len > LEN_OF_NCRT_PARA;1035}10361037static int hpre_rsa_setkey(struct hpre_ctx *ctx, const void *key,1038unsigned int keylen, bool private)1039{1040struct rsa_key rsa_key;1041int ret;10421043hpre_rsa_clear_ctx(ctx, false);10441045if (private)1046ret = rsa_parse_priv_key(&rsa_key, key, keylen);1047else1048ret = rsa_parse_pub_key(&rsa_key, key, keylen);1049if (ret < 0)1050return ret;10511052ret = hpre_rsa_set_n(ctx, rsa_key.n, rsa_key.n_sz, private);1053if (ret <= 0)1054return ret;10551056if (private) {1057ret = hpre_rsa_set_d(ctx, rsa_key.d, rsa_key.d_sz);1058if (ret < 0)1059goto free;10601061if (hpre_is_crt_key(&rsa_key)) {1062ret = hpre_rsa_setkey_crt(ctx, &rsa_key);1063if (ret < 0)1064goto free;1065}1066}10671068ret = hpre_rsa_set_e(ctx, rsa_key.e, rsa_key.e_sz);1069if (ret < 0)1070goto free;10711072if ((private && !ctx->rsa.prikey) || !ctx->rsa.pubkey) {1073ret = -EINVAL;1074goto free;1075}10761077ctx->fallback = false;1078return 0;10791080free:1081hpre_rsa_clear_ctx(ctx, false);1082return ret;1083}10841085static int hpre_rsa_setpubkey(struct crypto_akcipher *tfm, const void *key,1086unsigned int keylen)1087{1088struct hpre_ctx *ctx = akcipher_tfm_ctx(tfm);1089int ret;10901091ret = crypto_akcipher_set_pub_key(ctx->rsa.soft_tfm, key, keylen);1092if (ret)1093return ret;10941095if (!ctx->qp)1096return 0;10971098return hpre_rsa_setkey(ctx, key, keylen, false);1099}11001101static int hpre_rsa_setprivkey(struct crypto_akcipher *tfm, const void *key,1102unsigned int keylen)1103{1104struct hpre_ctx *ctx = akcipher_tfm_ctx(tfm);1105int ret;11061107ret = crypto_akcipher_set_priv_key(ctx->rsa.soft_tfm, key, keylen);1108if (ret)1109return ret;11101111if (!ctx->qp)1112return 0;11131114return hpre_rsa_setkey(ctx, key, keylen, true);1115}11161117static unsigned int hpre_rsa_max_size(struct crypto_akcipher *tfm)1118{1119struct hpre_ctx *ctx = akcipher_tfm_ctx(tfm);11201121/* For unsupported key size and unavailable devices, use soft tfm instead */1122if (ctx->fallback)1123return crypto_akcipher_maxsize(ctx->rsa.soft_tfm);11241125return ctx->key_sz;1126}11271128static int hpre_rsa_init_tfm(struct crypto_akcipher *tfm)1129{1130struct hpre_ctx *ctx = akcipher_tfm_ctx(tfm);1131int ret;11321133ctx->rsa.soft_tfm = crypto_alloc_akcipher("rsa-generic", 0, 0);1134if (IS_ERR(ctx->rsa.soft_tfm)) {1135pr_err("Can not alloc_akcipher!\n");1136return PTR_ERR(ctx->rsa.soft_tfm);1137}11381139akcipher_set_reqsize(tfm, sizeof(struct hpre_asym_request) +1140hpre_align_pd());11411142ret = hpre_ctx_init(ctx, HPRE_V2_ALG_TYPE);1143if (ret && ret != -ENODEV) {1144crypto_free_akcipher(ctx->rsa.soft_tfm);1145return ret;1146} else if (ret == -ENODEV) {1147ctx->fallback = true;1148}11491150return 0;1151}11521153static void hpre_rsa_exit_tfm(struct crypto_akcipher *tfm)1154{1155struct hpre_ctx *ctx = akcipher_tfm_ctx(tfm);11561157hpre_rsa_clear_ctx(ctx, true);1158crypto_free_akcipher(ctx->rsa.soft_tfm);1159}11601161static void hpre_key_to_big_end(u8 *data, int len)1162{1163int i, j;11641165for (i = 0; i < len / 2; i++) {1166j = len - i - 1;1167swap(data[j], data[i]);1168}1169}11701171static void hpre_ecc_clear_ctx(struct hpre_ctx *ctx, bool is_clear_all)1172{1173struct device *dev = ctx->dev;1174unsigned int sz = ctx->key_sz;1175unsigned int shift = sz << 1;11761177if (ctx->ecdh.p) {1178/* ecdh: p->a->k->b */1179memzero_explicit(ctx->ecdh.p + shift, sz);1180dma_free_coherent(dev, sz << 3, ctx->ecdh.p, ctx->ecdh.dma_p);1181ctx->ecdh.p = NULL;1182}11831184hpre_ctx_clear(ctx, is_clear_all);1185}11861187/*1188* The bits of 192/224/256/384/521 are supported by HPRE,1189* and convert the bits like:1190* bits<=256, bits=256; 256<bits<=384, bits=384; 384<bits<=576, bits=576;1191* If the parameter bit width is insufficient, then we fill in the1192* high-order zeros by soft, so TASK_LENGTH1 is 0x3/0x5/0x8;1193*/1194static unsigned int hpre_ecdh_supported_curve(unsigned short id)1195{1196switch (id) {1197case ECC_CURVE_NIST_P192:1198case ECC_CURVE_NIST_P256:1199return HPRE_ECC_HW256_KSZ_B;1200case ECC_CURVE_NIST_P384:1201return HPRE_ECC_HW384_KSZ_B;1202default:1203break;1204}12051206return 0;1207}12081209static void fill_curve_param(void *addr, u64 *param, unsigned int cur_sz, u8 ndigits)1210{1211unsigned int sz = cur_sz - (ndigits - 1) * sizeof(u64);1212u8 i = 0;12131214while (i < ndigits - 1) {1215memcpy(addr + sizeof(u64) * i, ¶m[i], sizeof(u64));1216i++;1217}12181219memcpy(addr + sizeof(u64) * i, ¶m[ndigits - 1], sz);1220hpre_key_to_big_end((u8 *)addr, cur_sz);1221}12221223static int hpre_ecdh_fill_curve(struct hpre_ctx *ctx, struct ecdh *params,1224unsigned int cur_sz)1225{1226unsigned int shifta = ctx->key_sz << 1;1227unsigned int shiftb = ctx->key_sz << 2;1228void *p = ctx->ecdh.p + ctx->key_sz - cur_sz;1229void *a = ctx->ecdh.p + shifta - cur_sz;1230void *b = ctx->ecdh.p + shiftb - cur_sz;1231void *x = ctx->ecdh.g + ctx->key_sz - cur_sz;1232void *y = ctx->ecdh.g + shifta - cur_sz;1233const struct ecc_curve *curve = ecc_get_curve(ctx->curve_id);1234char *n;12351236if (unlikely(!curve))1237return -EINVAL;12381239n = kzalloc(ctx->key_sz, GFP_KERNEL);1240if (!n)1241return -ENOMEM;12421243fill_curve_param(p, curve->p, cur_sz, curve->g.ndigits);1244fill_curve_param(a, curve->a, cur_sz, curve->g.ndigits);1245fill_curve_param(b, curve->b, cur_sz, curve->g.ndigits);1246fill_curve_param(x, curve->g.x, cur_sz, curve->g.ndigits);1247fill_curve_param(y, curve->g.y, cur_sz, curve->g.ndigits);1248fill_curve_param(n, curve->n, cur_sz, curve->g.ndigits);12491250if (params->key_size == cur_sz && memcmp(params->key, n, cur_sz) >= 0) {1251kfree(n);1252return -EINVAL;1253}12541255kfree(n);1256return 0;1257}12581259static unsigned int hpre_ecdh_get_curvesz(unsigned short id)1260{1261switch (id) {1262case ECC_CURVE_NIST_P192:1263return HPRE_ECC_NIST_P192_N_SIZE;1264case ECC_CURVE_NIST_P256:1265return HPRE_ECC_NIST_P256_N_SIZE;1266case ECC_CURVE_NIST_P384:1267return HPRE_ECC_NIST_P384_N_SIZE;1268default:1269break;1270}12711272return 0;1273}12741275static int hpre_ecdh_set_param(struct hpre_ctx *ctx, struct ecdh *params)1276{1277struct device *dev = ctx->dev;1278unsigned int sz, shift, curve_sz;1279int ret;12801281ctx->key_sz = hpre_ecdh_supported_curve(ctx->curve_id);1282if (!ctx->key_sz)1283return -EINVAL;12841285curve_sz = hpre_ecdh_get_curvesz(ctx->curve_id);1286if (!curve_sz || params->key_size > curve_sz)1287return -EINVAL;12881289sz = ctx->key_sz;12901291if (!ctx->ecdh.p) {1292ctx->ecdh.p = dma_alloc_coherent(dev, sz << 3, &ctx->ecdh.dma_p,1293GFP_KERNEL);1294if (!ctx->ecdh.p)1295return -ENOMEM;1296}12971298shift = sz << 2;1299ctx->ecdh.g = ctx->ecdh.p + shift;1300ctx->ecdh.dma_g = ctx->ecdh.dma_p + shift;13011302ret = hpre_ecdh_fill_curve(ctx, params, curve_sz);1303if (ret) {1304dev_err(dev, "failed to fill curve_param, ret = %d!\n", ret);1305dma_free_coherent(dev, sz << 3, ctx->ecdh.p, ctx->ecdh.dma_p);1306ctx->ecdh.p = NULL;1307return ret;1308}13091310return 0;1311}13121313static bool hpre_key_is_zero(const char *key, unsigned short key_sz)1314{1315int i;13161317for (i = 0; i < key_sz; i++)1318if (key[i])1319return false;13201321return true;1322}13231324static int ecdh_gen_privkey(struct hpre_ctx *ctx, struct ecdh *params)1325{1326struct device *dev = ctx->dev;1327int ret;13281329ret = crypto_get_default_rng();1330if (ret) {1331dev_err(dev, "failed to get default rng, ret = %d!\n", ret);1332return ret;1333}13341335ret = crypto_rng_get_bytes(crypto_default_rng, (u8 *)params->key,1336params->key_size);1337crypto_put_default_rng();1338if (ret)1339dev_err(dev, "failed to get rng, ret = %d!\n", ret);13401341return ret;1342}13431344static int hpre_ecdh_set_secret(struct crypto_kpp *tfm, const void *buf,1345unsigned int len)1346{1347struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);1348unsigned int sz, sz_shift, curve_sz;1349struct device *dev = ctx->dev;1350char key[HPRE_ECC_MAX_KSZ];1351struct ecdh params;1352int ret;13531354if (ctx->fallback)1355return crypto_kpp_set_secret(ctx->ecdh.soft_tfm, buf, len);13561357if (crypto_ecdh_decode_key(buf, len, ¶ms) < 0) {1358dev_err(dev, "failed to decode ecdh key!\n");1359return -EINVAL;1360}13611362/* Use stdrng to generate private key */1363if (!params.key || !params.key_size) {1364params.key = key;1365curve_sz = hpre_ecdh_get_curvesz(ctx->curve_id);1366if (!curve_sz) {1367dev_err(dev, "Invalid curve size!\n");1368return -EINVAL;1369}13701371params.key_size = curve_sz - 1;1372ret = ecdh_gen_privkey(ctx, ¶ms);1373if (ret)1374return ret;1375}13761377if (hpre_key_is_zero(params.key, params.key_size)) {1378dev_err(dev, "Invalid hpre key!\n");1379return -EINVAL;1380}13811382hpre_ecc_clear_ctx(ctx, false);13831384ret = hpre_ecdh_set_param(ctx, ¶ms);1385if (ret < 0) {1386dev_err(dev, "failed to set hpre param, ret = %d!\n", ret);1387return ret;1388}13891390sz = ctx->key_sz;1391sz_shift = (sz << 1) + sz - params.key_size;1392memcpy(ctx->ecdh.p + sz_shift, params.key, params.key_size);13931394return 0;1395}13961397static void hpre_ecdh_hw_data_clr_all(struct hpre_ctx *ctx,1398struct hpre_asym_request *req,1399struct scatterlist *dst,1400struct scatterlist *src)1401{1402struct device *dev = ctx->dev;1403struct hpre_sqe *sqe = &req->req;1404dma_addr_t dma;14051406dma = le64_to_cpu(sqe->in);1407if (unlikely(dma_mapping_error(dev, dma)))1408return;14091410if (src && req->src)1411dma_free_coherent(dev, ctx->key_sz << 2, req->src, dma);14121413dma = le64_to_cpu(sqe->out);1414if (unlikely(dma_mapping_error(dev, dma)))1415return;14161417if (req->dst)1418dma_free_coherent(dev, ctx->key_sz << 1, req->dst, dma);1419if (dst)1420dma_unmap_single(dev, dma, ctx->key_sz << 1, DMA_FROM_DEVICE);1421}14221423static void hpre_ecdh_cb(struct hpre_ctx *ctx, void *resp)1424{1425unsigned int curve_sz = hpre_ecdh_get_curvesz(ctx->curve_id);1426struct hpre_dfx *dfx = ctx->hpre->debug.dfx;1427struct hpre_asym_request *req = NULL;1428struct kpp_request *areq;1429u64 overtime_thrhld;1430char *p;1431int ret;14321433ret = hpre_alg_res_post_hf(ctx, resp, (void **)&req);1434areq = req->areq.ecdh;1435areq->dst_len = ctx->key_sz << 1;14361437overtime_thrhld = atomic64_read(&dfx[HPRE_OVERTIME_THRHLD].value);1438if (overtime_thrhld && hpre_is_bd_timeout(req, overtime_thrhld))1439atomic64_inc(&dfx[HPRE_OVER_THRHLD_CNT].value);14401441/* Do unmap before data processing */1442hpre_ecdh_hw_data_clr_all(ctx, req, areq->dst, areq->src);14431444p = sg_virt(areq->dst);1445memmove(p, p + ctx->key_sz - curve_sz, curve_sz);1446memmove(p + curve_sz, p + areq->dst_len - curve_sz, curve_sz);14471448kpp_request_complete(areq, ret);14491450atomic64_inc(&dfx[HPRE_RECV_CNT].value);1451}14521453static int hpre_ecdh_msg_request_set(struct hpre_ctx *ctx,1454struct kpp_request *req)1455{1456struct hpre_asym_request *h_req;1457struct hpre_sqe *msg;1458void *tmp;14591460if (req->dst_len < ctx->key_sz << 1) {1461req->dst_len = ctx->key_sz << 1;1462return -EINVAL;1463}14641465tmp = kpp_request_ctx(req);1466h_req = PTR_ALIGN(tmp, hpre_align_sz());1467h_req->cb = hpre_ecdh_cb;1468h_req->areq.ecdh = req;1469msg = &h_req->req;1470memset(msg, 0, sizeof(*msg));1471msg->in = cpu_to_le64(DMA_MAPPING_ERROR);1472msg->out = cpu_to_le64(DMA_MAPPING_ERROR);1473msg->key = cpu_to_le64(ctx->ecdh.dma_p);14741475msg->dw0 |= cpu_to_le32(0x1U << HPRE_SQE_DONE_SHIFT);1476msg->task_len1 = (ctx->key_sz >> HPRE_BITS_2_BYTES_SHIFT) - 1;1477h_req->ctx = ctx;14781479hpre_dfx_add_req_time(h_req);1480msg->tag = cpu_to_le64((uintptr_t)h_req);1481return 0;1482}14831484static int hpre_ecdh_src_data_init(struct hpre_asym_request *hpre_req,1485struct scatterlist *data, unsigned int len)1486{1487struct hpre_sqe *msg = &hpre_req->req;1488struct hpre_ctx *ctx = hpre_req->ctx;1489struct device *dev = ctx->dev;1490unsigned int tmpshift;1491dma_addr_t dma = 0;1492void *ptr;1493int shift;14941495/* Src_data include gx and gy. */1496shift = ctx->key_sz - (len >> 1);1497if (unlikely(shift < 0))1498return -EINVAL;14991500ptr = dma_alloc_coherent(dev, ctx->key_sz << 2, &dma, GFP_KERNEL);1501if (unlikely(!ptr))1502return -ENOMEM;15031504tmpshift = ctx->key_sz << 1;1505scatterwalk_map_and_copy(ptr + tmpshift, data, 0, len, 0);1506memcpy(ptr + shift, ptr + tmpshift, len >> 1);1507memcpy(ptr + ctx->key_sz + shift, ptr + tmpshift + (len >> 1), len >> 1);15081509hpre_req->src = ptr;1510msg->in = cpu_to_le64(dma);1511return 0;1512}15131514static int hpre_ecdh_dst_data_init(struct hpre_asym_request *hpre_req,1515struct scatterlist *data, unsigned int len)1516{1517struct hpre_sqe *msg = &hpre_req->req;1518struct hpre_ctx *ctx = hpre_req->ctx;1519struct device *dev = ctx->dev;1520dma_addr_t dma;15211522if (unlikely(!data || !sg_is_last(data) || len != ctx->key_sz << 1)) {1523dev_err(dev, "data or data length is illegal!\n");1524return -EINVAL;1525}15261527hpre_req->dst = NULL;1528dma = dma_map_single(dev, sg_virt(data), len, DMA_FROM_DEVICE);1529if (unlikely(dma_mapping_error(dev, dma))) {1530dev_err(dev, "dma map data err!\n");1531return -ENOMEM;1532}15331534msg->out = cpu_to_le64(dma);1535return 0;1536}15371538static int hpre_ecdh_compute_value(struct kpp_request *req)1539{1540struct crypto_kpp *tfm = crypto_kpp_reqtfm(req);1541struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);1542struct device *dev = ctx->dev;1543void *tmp = kpp_request_ctx(req);1544struct hpre_asym_request *hpre_req = PTR_ALIGN(tmp, hpre_align_sz());1545struct hpre_sqe *msg = &hpre_req->req;1546int ret;15471548ret = hpre_ecdh_msg_request_set(ctx, req);1549if (unlikely(ret)) {1550dev_err(dev, "failed to set ecdh request, ret = %d!\n", ret);1551return ret;1552}15531554if (req->src) {1555ret = hpre_ecdh_src_data_init(hpre_req, req->src, req->src_len);1556if (unlikely(ret)) {1557dev_err(dev, "failed to init src data, ret = %d!\n", ret);1558goto clear_all;1559}1560} else {1561msg->in = cpu_to_le64(ctx->ecdh.dma_g);1562}15631564ret = hpre_ecdh_dst_data_init(hpre_req, req->dst, req->dst_len);1565if (unlikely(ret)) {1566dev_err(dev, "failed to init dst data, ret = %d!\n", ret);1567goto clear_all;1568}15691570msg->dw0 = cpu_to_le32(le32_to_cpu(msg->dw0) | HPRE_ALG_ECC_MUL);1571msg->resv1 = ctx->enable_hpcore << HPRE_ENABLE_HPCORE_SHIFT;15721573ret = hpre_send(ctx, msg);1574if (likely(!ret))1575return -EINPROGRESS;15761577clear_all:1578hpre_ecdh_hw_data_clr_all(ctx, hpre_req, req->dst, req->src);1579return ret;1580}15811582static int hpre_ecdh_generate_public_key(struct kpp_request *req)1583{1584struct crypto_kpp *tfm = crypto_kpp_reqtfm(req);1585struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);1586int ret;15871588if (ctx->fallback) {1589kpp_request_set_tfm(req, ctx->ecdh.soft_tfm);1590ret = crypto_kpp_generate_public_key(req);1591kpp_request_set_tfm(req, tfm);1592return ret;1593}15941595return hpre_ecdh_compute_value(req);1596}15971598static int hpre_ecdh_compute_shared_secret(struct kpp_request *req)1599{1600struct crypto_kpp *tfm = crypto_kpp_reqtfm(req);1601struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);1602int ret;16031604if (ctx->fallback) {1605kpp_request_set_tfm(req, ctx->ecdh.soft_tfm);1606ret = crypto_kpp_compute_shared_secret(req);1607kpp_request_set_tfm(req, tfm);1608return ret;1609}16101611return hpre_ecdh_compute_value(req);1612}16131614static unsigned int hpre_ecdh_max_size(struct crypto_kpp *tfm)1615{1616struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);16171618if (ctx->fallback)1619return crypto_kpp_maxsize(ctx->ecdh.soft_tfm);16201621/* max size is the pub_key_size, include x and y */1622return ctx->key_sz << 1;1623}16241625static int hpre_ecdh_init_tfm(struct crypto_kpp *tfm)1626{1627struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);1628const char *alg = kpp_alg_name(tfm);1629int ret;16301631ret = hpre_ctx_init(ctx, HPRE_V3_ECC_ALG_TYPE);1632if (!ret) {1633kpp_set_reqsize(tfm, sizeof(struct hpre_asym_request) + hpre_align_pd());1634return 0;1635} else if (ret && ret != -ENODEV) {1636return ret;1637}16381639ctx->ecdh.soft_tfm = crypto_alloc_kpp(alg, 0, CRYPTO_ALG_NEED_FALLBACK);1640if (IS_ERR(ctx->ecdh.soft_tfm)) {1641pr_err("Failed to alloc %s tfm!\n", alg);1642return PTR_ERR(ctx->ecdh.soft_tfm);1643}16441645crypto_kpp_set_flags(ctx->ecdh.soft_tfm, crypto_kpp_get_flags(tfm));1646ctx->fallback = true;16471648return 0;1649}16501651static int hpre_ecdh_nist_p192_init_tfm(struct crypto_kpp *tfm)1652{1653struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);16541655ctx->curve_id = ECC_CURVE_NIST_P192;16561657return hpre_ecdh_init_tfm(tfm);1658}16591660static int hpre_ecdh_nist_p256_init_tfm(struct crypto_kpp *tfm)1661{1662struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);16631664ctx->curve_id = ECC_CURVE_NIST_P256;1665ctx->enable_hpcore = 1;16661667return hpre_ecdh_init_tfm(tfm);1668}16691670static int hpre_ecdh_nist_p384_init_tfm(struct crypto_kpp *tfm)1671{1672struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);16731674ctx->curve_id = ECC_CURVE_NIST_P384;16751676return hpre_ecdh_init_tfm(tfm);1677}16781679static void hpre_ecdh_exit_tfm(struct crypto_kpp *tfm)1680{1681struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);16821683if (ctx->fallback) {1684crypto_free_kpp(ctx->ecdh.soft_tfm);1685return;1686}16871688hpre_ecc_clear_ctx(ctx, true);1689}16901691static struct akcipher_alg rsa = {1692.encrypt = hpre_rsa_enc,1693.decrypt = hpre_rsa_dec,1694.set_pub_key = hpre_rsa_setpubkey,1695.set_priv_key = hpre_rsa_setprivkey,1696.max_size = hpre_rsa_max_size,1697.init = hpre_rsa_init_tfm,1698.exit = hpre_rsa_exit_tfm,1699.base = {1700.cra_ctxsize = sizeof(struct hpre_ctx),1701.cra_priority = HPRE_CRYPTO_ALG_PRI,1702.cra_name = "rsa",1703.cra_driver_name = "hpre-rsa",1704.cra_module = THIS_MODULE,1705.cra_flags = CRYPTO_ALG_NEED_FALLBACK,1706},1707};17081709static struct kpp_alg dh = {1710.set_secret = hpre_dh_set_secret,1711.generate_public_key = hpre_dh_generate_public_key,1712.compute_shared_secret = hpre_dh_compute_shared_secret,1713.max_size = hpre_dh_max_size,1714.init = hpre_dh_init_tfm,1715.exit = hpre_dh_exit_tfm,1716.base = {1717.cra_ctxsize = sizeof(struct hpre_ctx),1718.cra_priority = HPRE_CRYPTO_ALG_PRI,1719.cra_name = "dh",1720.cra_driver_name = "hpre-dh",1721.cra_module = THIS_MODULE,1722.cra_flags = CRYPTO_ALG_NEED_FALLBACK,1723},1724};17251726static struct kpp_alg ecdh_curves[] = {1727{1728.set_secret = hpre_ecdh_set_secret,1729.generate_public_key = hpre_ecdh_generate_public_key,1730.compute_shared_secret = hpre_ecdh_compute_shared_secret,1731.max_size = hpre_ecdh_max_size,1732.init = hpre_ecdh_nist_p192_init_tfm,1733.exit = hpre_ecdh_exit_tfm,1734.base = {1735.cra_ctxsize = sizeof(struct hpre_ctx),1736.cra_priority = HPRE_CRYPTO_ALG_PRI,1737.cra_name = "ecdh-nist-p192",1738.cra_driver_name = "hpre-ecdh-nist-p192",1739.cra_module = THIS_MODULE,1740.cra_flags = CRYPTO_ALG_NEED_FALLBACK,1741},1742}, {1743.set_secret = hpre_ecdh_set_secret,1744.generate_public_key = hpre_ecdh_generate_public_key,1745.compute_shared_secret = hpre_ecdh_compute_shared_secret,1746.max_size = hpre_ecdh_max_size,1747.init = hpre_ecdh_nist_p256_init_tfm,1748.exit = hpre_ecdh_exit_tfm,1749.base = {1750.cra_ctxsize = sizeof(struct hpre_ctx),1751.cra_priority = HPRE_CRYPTO_ALG_PRI,1752.cra_name = "ecdh-nist-p256",1753.cra_driver_name = "hpre-ecdh-nist-p256",1754.cra_module = THIS_MODULE,1755.cra_flags = CRYPTO_ALG_NEED_FALLBACK,1756},1757}, {1758.set_secret = hpre_ecdh_set_secret,1759.generate_public_key = hpre_ecdh_generate_public_key,1760.compute_shared_secret = hpre_ecdh_compute_shared_secret,1761.max_size = hpre_ecdh_max_size,1762.init = hpre_ecdh_nist_p384_init_tfm,1763.exit = hpre_ecdh_exit_tfm,1764.base = {1765.cra_ctxsize = sizeof(struct hpre_ctx),1766.cra_priority = HPRE_CRYPTO_ALG_PRI,1767.cra_name = "ecdh-nist-p384",1768.cra_driver_name = "hpre-ecdh-nist-p384",1769.cra_module = THIS_MODULE,1770.cra_flags = CRYPTO_ALG_NEED_FALLBACK,1771},1772}1773};17741775static int hpre_register_rsa(struct hisi_qm *qm)1776{1777int ret;17781779if (!hpre_check_alg_support(qm, HPRE_DRV_RSA_MASK_CAP))1780return 0;17811782rsa.base.cra_flags = 0;1783ret = crypto_register_akcipher(&rsa);1784if (ret)1785dev_err(&qm->pdev->dev, "failed to register rsa (%d)!\n", ret);17861787return ret;1788}17891790static void hpre_unregister_rsa(struct hisi_qm *qm)1791{1792if (!hpre_check_alg_support(qm, HPRE_DRV_RSA_MASK_CAP))1793return;17941795crypto_unregister_akcipher(&rsa);1796}17971798static int hpre_register_dh(struct hisi_qm *qm)1799{1800int ret;18011802if (!hpre_check_alg_support(qm, HPRE_DRV_DH_MASK_CAP))1803return 0;18041805ret = crypto_register_kpp(&dh);1806if (ret)1807dev_err(&qm->pdev->dev, "failed to register dh (%d)!\n", ret);18081809return ret;1810}18111812static void hpre_unregister_dh(struct hisi_qm *qm)1813{1814if (!hpre_check_alg_support(qm, HPRE_DRV_DH_MASK_CAP))1815return;18161817crypto_unregister_kpp(&dh);1818}18191820static int hpre_register_ecdh(struct hisi_qm *qm)1821{1822int ret, i;18231824if (!hpre_check_alg_support(qm, HPRE_DRV_ECDH_MASK_CAP))1825return 0;18261827for (i = 0; i < ARRAY_SIZE(ecdh_curves); i++) {1828ret = crypto_register_kpp(&ecdh_curves[i]);1829if (ret) {1830dev_err(&qm->pdev->dev, "failed to register %s (%d)!\n",1831ecdh_curves[i].base.cra_name, ret);1832goto unreg_kpp;1833}1834}18351836return 0;18371838unreg_kpp:1839for (--i; i >= 0; --i)1840crypto_unregister_kpp(&ecdh_curves[i]);18411842return ret;1843}18441845static void hpre_unregister_ecdh(struct hisi_qm *qm)1846{1847int i;18481849if (!hpre_check_alg_support(qm, HPRE_DRV_ECDH_MASK_CAP))1850return;18511852for (i = ARRAY_SIZE(ecdh_curves) - 1; i >= 0; --i)1853crypto_unregister_kpp(&ecdh_curves[i]);1854}18551856int hpre_algs_register(struct hisi_qm *qm)1857{1858int ret = 0;18591860mutex_lock(&hpre_algs_lock);1861if (hpre_available_devs) {1862hpre_available_devs++;1863goto unlock;1864}18651866ret = hpre_register_rsa(qm);1867if (ret)1868goto unlock;18691870ret = hpre_register_dh(qm);1871if (ret)1872goto unreg_rsa;18731874ret = hpre_register_ecdh(qm);1875if (ret)1876goto unreg_dh;18771878hpre_available_devs++;1879mutex_unlock(&hpre_algs_lock);18801881return ret;18821883unreg_dh:1884hpre_unregister_dh(qm);1885unreg_rsa:1886hpre_unregister_rsa(qm);1887unlock:1888mutex_unlock(&hpre_algs_lock);1889return ret;1890}18911892void hpre_algs_unregister(struct hisi_qm *qm)1893{1894mutex_lock(&hpre_algs_lock);1895if (--hpre_available_devs)1896goto unlock;18971898hpre_unregister_ecdh(qm);1899hpre_unregister_dh(qm);1900hpre_unregister_rsa(qm);19011902unlock:1903mutex_unlock(&hpre_algs_lock);1904}190519061907