Path: blob/master/drivers/crypto/aspeed/aspeed-hace-hash.c
26282 views
// SPDX-License-Identifier: GPL-2.0+1/*2* Copyright (c) 2021 Aspeed Technology Inc.3*/45#include "aspeed-hace.h"6#include <crypto/engine.h>7#include <crypto/internal/hash.h>8#include <crypto/scatterwalk.h>9#include <crypto/sha1.h>10#include <crypto/sha2.h>11#include <linux/dma-mapping.h>12#include <linux/err.h>13#include <linux/io.h>14#include <linux/kernel.h>15#include <linux/scatterlist.h>16#include <linux/string.h>1718#ifdef CONFIG_CRYPTO_DEV_ASPEED_DEBUG19#define AHASH_DBG(h, fmt, ...) \20dev_info((h)->dev, "%s() " fmt, __func__, ##__VA_ARGS__)21#else22#define AHASH_DBG(h, fmt, ...) \23dev_dbg((h)->dev, "%s() " fmt, __func__, ##__VA_ARGS__)24#endif2526/* Initialization Vectors for SHA-family */27static const __be32 sha1_iv[8] = {28cpu_to_be32(SHA1_H0), cpu_to_be32(SHA1_H1),29cpu_to_be32(SHA1_H2), cpu_to_be32(SHA1_H3),30cpu_to_be32(SHA1_H4), 0, 0, 031};3233static const __be32 sha224_iv[8] = {34cpu_to_be32(SHA224_H0), cpu_to_be32(SHA224_H1),35cpu_to_be32(SHA224_H2), cpu_to_be32(SHA224_H3),36cpu_to_be32(SHA224_H4), cpu_to_be32(SHA224_H5),37cpu_to_be32(SHA224_H6), cpu_to_be32(SHA224_H7),38};3940static const __be32 sha256_iv[8] = {41cpu_to_be32(SHA256_H0), cpu_to_be32(SHA256_H1),42cpu_to_be32(SHA256_H2), cpu_to_be32(SHA256_H3),43cpu_to_be32(SHA256_H4), cpu_to_be32(SHA256_H5),44cpu_to_be32(SHA256_H6), cpu_to_be32(SHA256_H7),45};4647static const __be64 sha384_iv[8] = {48cpu_to_be64(SHA384_H0), cpu_to_be64(SHA384_H1),49cpu_to_be64(SHA384_H2), cpu_to_be64(SHA384_H3),50cpu_to_be64(SHA384_H4), cpu_to_be64(SHA384_H5),51cpu_to_be64(SHA384_H6), cpu_to_be64(SHA384_H7)52};5354static const __be64 sha512_iv[8] = {55cpu_to_be64(SHA512_H0), cpu_to_be64(SHA512_H1),56cpu_to_be64(SHA512_H2), cpu_to_be64(SHA512_H3),57cpu_to_be64(SHA512_H4), cpu_to_be64(SHA512_H5),58cpu_to_be64(SHA512_H6), cpu_to_be64(SHA512_H7)59};6061static int aspeed_sham_init(struct ahash_request *req);62static int aspeed_ahash_req_update(struct aspeed_hace_dev *hace_dev);6364static int aspeed_sham_export(struct ahash_request *req, void *out)65{66struct aspeed_sham_reqctx *rctx = ahash_request_ctx(req);67union {68u8 *u8;69u64 *u64;70} p = { .u8 = out };7172memcpy(out, rctx->digest, rctx->ivsize);73p.u8 += rctx->ivsize;74put_unaligned(rctx->digcnt[0], p.u64++);75if (rctx->ivsize == 64)76put_unaligned(rctx->digcnt[1], p.u64);77return 0;78}7980static int aspeed_sham_import(struct ahash_request *req, const void *in)81{82struct aspeed_sham_reqctx *rctx = ahash_request_ctx(req);83union {84const u8 *u8;85const u64 *u64;86} p = { .u8 = in };87int err;8889err = aspeed_sham_init(req);90if (err)91return err;9293memcpy(rctx->digest, in, rctx->ivsize);94p.u8 += rctx->ivsize;95rctx->digcnt[0] = get_unaligned(p.u64++);96if (rctx->ivsize == 64)97rctx->digcnt[1] = get_unaligned(p.u64);98return 0;99}100101/* The purpose of this padding is to ensure that the padded message is a102* multiple of 512 bits (SHA1/SHA224/SHA256) or 1024 bits (SHA384/SHA512).103* The bit "1" is appended at the end of the message followed by104* "padlen-1" zero bits. Then a 64 bits block (SHA1/SHA224/SHA256) or105* 128 bits block (SHA384/SHA512) equals to the message length in bits106* is appended.107*108* For SHA1/SHA224/SHA256, padlen is calculated as followed:109* - if message length < 56 bytes then padlen = 56 - message length110* - else padlen = 64 + 56 - message length111*112* For SHA384/SHA512, padlen is calculated as followed:113* - if message length < 112 bytes then padlen = 112 - message length114* - else padlen = 128 + 112 - message length115*/116static int aspeed_ahash_fill_padding(struct aspeed_hace_dev *hace_dev,117struct aspeed_sham_reqctx *rctx, u8 *buf)118{119unsigned int index, padlen, bitslen;120__be64 bits[2];121122AHASH_DBG(hace_dev, "rctx flags:0x%x\n", (u32)rctx->flags);123124switch (rctx->flags & SHA_FLAGS_MASK) {125case SHA_FLAGS_SHA1:126case SHA_FLAGS_SHA224:127case SHA_FLAGS_SHA256:128bits[0] = cpu_to_be64(rctx->digcnt[0] << 3);129index = rctx->digcnt[0] & 0x3f;130padlen = (index < 56) ? (56 - index) : ((64 + 56) - index);131bitslen = 8;132break;133default:134bits[1] = cpu_to_be64(rctx->digcnt[0] << 3);135bits[0] = cpu_to_be64(rctx->digcnt[1] << 3 |136rctx->digcnt[0] >> 61);137index = rctx->digcnt[0] & 0x7f;138padlen = (index < 112) ? (112 - index) : ((128 + 112) - index);139bitslen = 16;140break;141}142buf[0] = 0x80;143memset(buf + 1, 0, padlen - 1);144memcpy(buf + padlen, bits, bitslen);145return padlen + bitslen;146}147148static void aspeed_ahash_update_counter(struct aspeed_sham_reqctx *rctx,149unsigned int len)150{151rctx->offset += len;152rctx->digcnt[0] += len;153if (rctx->digcnt[0] < len)154rctx->digcnt[1]++;155}156157/*158* Prepare DMA buffer before hardware engine159* processing.160*/161static int aspeed_ahash_dma_prepare(struct aspeed_hace_dev *hace_dev)162{163struct aspeed_engine_hash *hash_engine = &hace_dev->hash_engine;164struct ahash_request *req = hash_engine->req;165struct aspeed_sham_reqctx *rctx = ahash_request_ctx(req);166unsigned int length, remain;167bool final = false;168169length = rctx->total - rctx->offset;170remain = length - round_down(length, rctx->block_size);171172AHASH_DBG(hace_dev, "length:0x%x, remain:0x%x\n", length, remain);173174if (length > ASPEED_HASH_SRC_DMA_BUF_LEN)175length = ASPEED_HASH_SRC_DMA_BUF_LEN;176else if (rctx->flags & SHA_FLAGS_FINUP) {177if (round_up(length, rctx->block_size) + rctx->block_size >178ASPEED_CRYPTO_SRC_DMA_BUF_LEN)179length = round_down(length - 1, rctx->block_size);180else181final = true;182} else183length -= remain;184scatterwalk_map_and_copy(hash_engine->ahash_src_addr, rctx->src_sg,185rctx->offset, length, 0);186aspeed_ahash_update_counter(rctx, length);187if (final)188length += aspeed_ahash_fill_padding(189hace_dev, rctx, hash_engine->ahash_src_addr + length);190191rctx->digest_dma_addr = dma_map_single(hace_dev->dev, rctx->digest,192SHA512_DIGEST_SIZE,193DMA_BIDIRECTIONAL);194if (dma_mapping_error(hace_dev->dev, rctx->digest_dma_addr)) {195dev_warn(hace_dev->dev, "dma_map() rctx digest error\n");196return -ENOMEM;197}198199hash_engine->src_length = length;200hash_engine->src_dma = hash_engine->ahash_src_dma_addr;201hash_engine->digest_dma = rctx->digest_dma_addr;202203return 0;204}205206/*207* Prepare DMA buffer as SG list buffer before208* hardware engine processing.209*/210static int aspeed_ahash_dma_prepare_sg(struct aspeed_hace_dev *hace_dev)211{212struct aspeed_engine_hash *hash_engine = &hace_dev->hash_engine;213struct ahash_request *req = hash_engine->req;214struct aspeed_sham_reqctx *rctx = ahash_request_ctx(req);215bool final = rctx->flags & SHA_FLAGS_FINUP;216int remain, sg_len, i, max_sg_nents;217unsigned int length, offset, total;218struct aspeed_sg_list *src_list;219struct scatterlist *s;220int rc = 0;221222offset = rctx->offset;223length = rctx->total - offset;224remain = final ? 0 : length - round_down(length, rctx->block_size);225length -= remain;226227AHASH_DBG(hace_dev, "%s:0x%x, %s:0x%x, %s:0x%x\n",228"rctx total", rctx->total,229"length", length, "remain", remain);230231sg_len = dma_map_sg(hace_dev->dev, rctx->src_sg, rctx->src_nents,232DMA_TO_DEVICE);233if (!sg_len) {234dev_warn(hace_dev->dev, "dma_map_sg() src error\n");235rc = -ENOMEM;236goto end;237}238239max_sg_nents = ASPEED_HASH_SRC_DMA_BUF_LEN / sizeof(*src_list) - final;240sg_len = min(sg_len, max_sg_nents);241src_list = (struct aspeed_sg_list *)hash_engine->ahash_src_addr;242rctx->digest_dma_addr = dma_map_single(hace_dev->dev, rctx->digest,243SHA512_DIGEST_SIZE,244DMA_BIDIRECTIONAL);245if (dma_mapping_error(hace_dev->dev, rctx->digest_dma_addr)) {246dev_warn(hace_dev->dev, "dma_map() rctx digest error\n");247rc = -ENOMEM;248goto free_src_sg;249}250251total = 0;252for_each_sg(rctx->src_sg, s, sg_len, i) {253u32 phy_addr = sg_dma_address(s);254u32 len = sg_dma_len(s);255256if (len <= offset) {257offset -= len;258continue;259}260261len -= offset;262phy_addr += offset;263offset = 0;264265if (length > len)266length -= len;267else {268/* Last sg list */269len = length;270length = 0;271}272273total += len;274src_list[i].phy_addr = cpu_to_le32(phy_addr);275src_list[i].len = cpu_to_le32(len);276}277278if (length != 0) {279total = round_down(total, rctx->block_size);280final = false;281}282283aspeed_ahash_update_counter(rctx, total);284if (final) {285int len = aspeed_ahash_fill_padding(hace_dev, rctx,286rctx->buffer);287288total += len;289rctx->buffer_dma_addr = dma_map_single(hace_dev->dev,290rctx->buffer,291sizeof(rctx->buffer),292DMA_TO_DEVICE);293if (dma_mapping_error(hace_dev->dev, rctx->buffer_dma_addr)) {294dev_warn(hace_dev->dev, "dma_map() rctx buffer error\n");295rc = -ENOMEM;296goto free_rctx_digest;297}298299src_list[i].phy_addr = cpu_to_le32(rctx->buffer_dma_addr);300src_list[i].len = cpu_to_le32(len);301i++;302}303src_list[i - 1].len |= cpu_to_le32(HASH_SG_LAST_LIST);304305hash_engine->src_length = total;306hash_engine->src_dma = hash_engine->ahash_src_dma_addr;307hash_engine->digest_dma = rctx->digest_dma_addr;308309return 0;310311free_rctx_digest:312dma_unmap_single(hace_dev->dev, rctx->digest_dma_addr,313SHA512_DIGEST_SIZE, DMA_BIDIRECTIONAL);314free_src_sg:315dma_unmap_sg(hace_dev->dev, rctx->src_sg, rctx->src_nents,316DMA_TO_DEVICE);317end:318return rc;319}320321static int aspeed_ahash_complete(struct aspeed_hace_dev *hace_dev)322{323struct aspeed_engine_hash *hash_engine = &hace_dev->hash_engine;324struct ahash_request *req = hash_engine->req;325struct aspeed_sham_reqctx *rctx = ahash_request_ctx(req);326327AHASH_DBG(hace_dev, "\n");328329dma_unmap_single(hace_dev->dev, rctx->digest_dma_addr,330SHA512_DIGEST_SIZE, DMA_BIDIRECTIONAL);331332if (rctx->total - rctx->offset >= rctx->block_size ||333(rctx->total != rctx->offset && rctx->flags & SHA_FLAGS_FINUP))334return aspeed_ahash_req_update(hace_dev);335336hash_engine->flags &= ~CRYPTO_FLAGS_BUSY;337338if (rctx->flags & SHA_FLAGS_FINUP)339memcpy(req->result, rctx->digest, rctx->digsize);340341crypto_finalize_hash_request(hace_dev->crypt_engine_hash, req,342rctx->total - rctx->offset);343344return 0;345}346347/*348* Trigger hardware engines to do the math.349*/350static int aspeed_hace_ahash_trigger(struct aspeed_hace_dev *hace_dev,351aspeed_hace_fn_t resume)352{353struct aspeed_engine_hash *hash_engine = &hace_dev->hash_engine;354struct ahash_request *req = hash_engine->req;355struct aspeed_sham_reqctx *rctx = ahash_request_ctx(req);356357AHASH_DBG(hace_dev, "src_dma:%pad, digest_dma:%pad, length:%zu\n",358&hash_engine->src_dma, &hash_engine->digest_dma,359hash_engine->src_length);360361rctx->cmd |= HASH_CMD_INT_ENABLE;362hash_engine->resume = resume;363364ast_hace_write(hace_dev, hash_engine->src_dma, ASPEED_HACE_HASH_SRC);365ast_hace_write(hace_dev, hash_engine->digest_dma,366ASPEED_HACE_HASH_DIGEST_BUFF);367ast_hace_write(hace_dev, hash_engine->digest_dma,368ASPEED_HACE_HASH_KEY_BUFF);369ast_hace_write(hace_dev, hash_engine->src_length,370ASPEED_HACE_HASH_DATA_LEN);371372/* Memory barrier to ensure all data setup before engine starts */373mb();374375ast_hace_write(hace_dev, rctx->cmd, ASPEED_HACE_HASH_CMD);376377return -EINPROGRESS;378}379380static int aspeed_ahash_update_resume_sg(struct aspeed_hace_dev *hace_dev)381{382struct aspeed_engine_hash *hash_engine = &hace_dev->hash_engine;383struct ahash_request *req = hash_engine->req;384struct aspeed_sham_reqctx *rctx = ahash_request_ctx(req);385386AHASH_DBG(hace_dev, "\n");387388dma_unmap_sg(hace_dev->dev, rctx->src_sg, rctx->src_nents,389DMA_TO_DEVICE);390391if (rctx->flags & SHA_FLAGS_FINUP && rctx->total == rctx->offset)392dma_unmap_single(hace_dev->dev, rctx->buffer_dma_addr,393sizeof(rctx->buffer), DMA_TO_DEVICE);394395rctx->cmd &= ~HASH_CMD_HASH_SRC_SG_CTRL;396397return aspeed_ahash_complete(hace_dev);398}399400static int aspeed_ahash_req_update(struct aspeed_hace_dev *hace_dev)401{402struct aspeed_engine_hash *hash_engine = &hace_dev->hash_engine;403struct ahash_request *req = hash_engine->req;404struct aspeed_sham_reqctx *rctx = ahash_request_ctx(req);405aspeed_hace_fn_t resume;406int ret;407408AHASH_DBG(hace_dev, "\n");409410if (hace_dev->version == AST2600_VERSION) {411rctx->cmd |= HASH_CMD_HASH_SRC_SG_CTRL;412resume = aspeed_ahash_update_resume_sg;413414} else {415resume = aspeed_ahash_complete;416}417418ret = hash_engine->dma_prepare(hace_dev);419if (ret)420return ret;421422return aspeed_hace_ahash_trigger(hace_dev, resume);423}424425static int aspeed_hace_hash_handle_queue(struct aspeed_hace_dev *hace_dev,426struct ahash_request *req)427{428return crypto_transfer_hash_request_to_engine(429hace_dev->crypt_engine_hash, req);430}431432static noinline int aspeed_ahash_fallback(struct ahash_request *req)433{434struct aspeed_sham_reqctx *rctx = ahash_request_ctx(req);435HASH_FBREQ_ON_STACK(fbreq, req);436u8 *state = rctx->buffer;437struct scatterlist sg[2];438struct scatterlist *ssg;439int ret;440441ssg = scatterwalk_ffwd(sg, req->src, rctx->offset);442ahash_request_set_crypt(fbreq, ssg, req->result,443rctx->total - rctx->offset);444445ret = aspeed_sham_export(req, state) ?:446crypto_ahash_import_core(fbreq, state);447448if (rctx->flags & SHA_FLAGS_FINUP)449ret = ret ?: crypto_ahash_finup(fbreq);450else451ret = ret ?: crypto_ahash_update(fbreq) ?:452crypto_ahash_export_core(fbreq, state) ?:453aspeed_sham_import(req, state);454HASH_REQUEST_ZERO(fbreq);455return ret;456}457458static int aspeed_ahash_do_request(struct crypto_engine *engine, void *areq)459{460struct ahash_request *req = ahash_request_cast(areq);461struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);462struct aspeed_sham_ctx *tctx = crypto_ahash_ctx(tfm);463struct aspeed_hace_dev *hace_dev = tctx->hace_dev;464struct aspeed_engine_hash *hash_engine;465int ret;466467hash_engine = &hace_dev->hash_engine;468hash_engine->flags |= CRYPTO_FLAGS_BUSY;469470ret = aspeed_ahash_req_update(hace_dev);471if (ret != -EINPROGRESS)472return aspeed_ahash_fallback(req);473474return 0;475}476477static void aspeed_ahash_prepare_request(struct crypto_engine *engine,478void *areq)479{480struct ahash_request *req = ahash_request_cast(areq);481struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);482struct aspeed_sham_ctx *tctx = crypto_ahash_ctx(tfm);483struct aspeed_hace_dev *hace_dev = tctx->hace_dev;484struct aspeed_engine_hash *hash_engine;485486hash_engine = &hace_dev->hash_engine;487hash_engine->req = req;488489if (hace_dev->version == AST2600_VERSION)490hash_engine->dma_prepare = aspeed_ahash_dma_prepare_sg;491else492hash_engine->dma_prepare = aspeed_ahash_dma_prepare;493}494495static int aspeed_ahash_do_one(struct crypto_engine *engine, void *areq)496{497aspeed_ahash_prepare_request(engine, areq);498return aspeed_ahash_do_request(engine, areq);499}500501static int aspeed_sham_update(struct ahash_request *req)502{503struct aspeed_sham_reqctx *rctx = ahash_request_ctx(req);504struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);505struct aspeed_sham_ctx *tctx = crypto_ahash_ctx(tfm);506struct aspeed_hace_dev *hace_dev = tctx->hace_dev;507508AHASH_DBG(hace_dev, "req->nbytes: %d\n", req->nbytes);509510rctx->total = req->nbytes;511rctx->src_sg = req->src;512rctx->offset = 0;513rctx->src_nents = sg_nents_for_len(req->src, req->nbytes);514515return aspeed_hace_hash_handle_queue(hace_dev, req);516}517518static int aspeed_sham_finup(struct ahash_request *req)519{520struct aspeed_sham_reqctx *rctx = ahash_request_ctx(req);521struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);522struct aspeed_sham_ctx *tctx = crypto_ahash_ctx(tfm);523struct aspeed_hace_dev *hace_dev = tctx->hace_dev;524525AHASH_DBG(hace_dev, "req->nbytes: %d\n", req->nbytes);526527rctx->flags |= SHA_FLAGS_FINUP;528529return aspeed_sham_update(req);530}531532static int aspeed_sham_init(struct ahash_request *req)533{534struct aspeed_sham_reqctx *rctx = ahash_request_ctx(req);535struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);536struct aspeed_sham_ctx *tctx = crypto_ahash_ctx(tfm);537struct aspeed_hace_dev *hace_dev = tctx->hace_dev;538539AHASH_DBG(hace_dev, "%s: digest size:%d\n",540crypto_tfm_alg_name(&tfm->base),541crypto_ahash_digestsize(tfm));542543rctx->cmd = HASH_CMD_ACC_MODE;544rctx->flags = 0;545546switch (crypto_ahash_digestsize(tfm)) {547case SHA1_DIGEST_SIZE:548rctx->cmd |= HASH_CMD_SHA1 | HASH_CMD_SHA_SWAP;549rctx->flags |= SHA_FLAGS_SHA1;550rctx->digsize = SHA1_DIGEST_SIZE;551rctx->block_size = SHA1_BLOCK_SIZE;552rctx->ivsize = 32;553memcpy(rctx->digest, sha1_iv, rctx->ivsize);554break;555case SHA224_DIGEST_SIZE:556rctx->cmd |= HASH_CMD_SHA224 | HASH_CMD_SHA_SWAP;557rctx->flags |= SHA_FLAGS_SHA224;558rctx->digsize = SHA224_DIGEST_SIZE;559rctx->block_size = SHA224_BLOCK_SIZE;560rctx->ivsize = 32;561memcpy(rctx->digest, sha224_iv, rctx->ivsize);562break;563case SHA256_DIGEST_SIZE:564rctx->cmd |= HASH_CMD_SHA256 | HASH_CMD_SHA_SWAP;565rctx->flags |= SHA_FLAGS_SHA256;566rctx->digsize = SHA256_DIGEST_SIZE;567rctx->block_size = SHA256_BLOCK_SIZE;568rctx->ivsize = 32;569memcpy(rctx->digest, sha256_iv, rctx->ivsize);570break;571case SHA384_DIGEST_SIZE:572rctx->cmd |= HASH_CMD_SHA512_SER | HASH_CMD_SHA384 |573HASH_CMD_SHA_SWAP;574rctx->flags |= SHA_FLAGS_SHA384;575rctx->digsize = SHA384_DIGEST_SIZE;576rctx->block_size = SHA384_BLOCK_SIZE;577rctx->ivsize = 64;578memcpy(rctx->digest, sha384_iv, rctx->ivsize);579break;580case SHA512_DIGEST_SIZE:581rctx->cmd |= HASH_CMD_SHA512_SER | HASH_CMD_SHA512 |582HASH_CMD_SHA_SWAP;583rctx->flags |= SHA_FLAGS_SHA512;584rctx->digsize = SHA512_DIGEST_SIZE;585rctx->block_size = SHA512_BLOCK_SIZE;586rctx->ivsize = 64;587memcpy(rctx->digest, sha512_iv, rctx->ivsize);588break;589default:590dev_warn(tctx->hace_dev->dev, "digest size %d not support\n",591crypto_ahash_digestsize(tfm));592return -EINVAL;593}594595rctx->total = 0;596rctx->digcnt[0] = 0;597rctx->digcnt[1] = 0;598599return 0;600}601602static int aspeed_sham_digest(struct ahash_request *req)603{604return aspeed_sham_init(req) ? : aspeed_sham_finup(req);605}606607static int aspeed_sham_cra_init(struct crypto_ahash *tfm)608{609struct ahash_alg *alg = crypto_ahash_alg(tfm);610struct aspeed_sham_ctx *tctx = crypto_ahash_ctx(tfm);611struct aspeed_hace_alg *ast_alg;612613ast_alg = container_of(alg, struct aspeed_hace_alg, alg.ahash.base);614tctx->hace_dev = ast_alg->hace_dev;615616return 0;617}618619static struct aspeed_hace_alg aspeed_ahash_algs[] = {620{621.alg.ahash.base = {622.init = aspeed_sham_init,623.update = aspeed_sham_update,624.finup = aspeed_sham_finup,625.digest = aspeed_sham_digest,626.export = aspeed_sham_export,627.import = aspeed_sham_import,628.init_tfm = aspeed_sham_cra_init,629.halg = {630.digestsize = SHA1_DIGEST_SIZE,631.statesize = sizeof(struct aspeed_sham_reqctx),632.base = {633.cra_name = "sha1",634.cra_driver_name = "aspeed-sha1",635.cra_priority = 300,636.cra_flags = CRYPTO_ALG_TYPE_AHASH |637CRYPTO_ALG_ASYNC |638CRYPTO_AHASH_ALG_BLOCK_ONLY |639CRYPTO_ALG_KERN_DRIVER_ONLY,640.cra_blocksize = SHA1_BLOCK_SIZE,641.cra_ctxsize = sizeof(struct aspeed_sham_ctx),642.cra_reqsize = sizeof(struct aspeed_sham_reqctx),643.cra_alignmask = 0,644.cra_module = THIS_MODULE,645}646}647},648.alg.ahash.op = {649.do_one_request = aspeed_ahash_do_one,650},651},652{653.alg.ahash.base = {654.init = aspeed_sham_init,655.update = aspeed_sham_update,656.finup = aspeed_sham_finup,657.digest = aspeed_sham_digest,658.export = aspeed_sham_export,659.import = aspeed_sham_import,660.init_tfm = aspeed_sham_cra_init,661.halg = {662.digestsize = SHA256_DIGEST_SIZE,663.statesize = sizeof(struct aspeed_sham_reqctx),664.base = {665.cra_name = "sha256",666.cra_driver_name = "aspeed-sha256",667.cra_priority = 300,668.cra_flags = CRYPTO_ALG_TYPE_AHASH |669CRYPTO_ALG_ASYNC |670CRYPTO_AHASH_ALG_BLOCK_ONLY |671CRYPTO_ALG_KERN_DRIVER_ONLY,672.cra_blocksize = SHA256_BLOCK_SIZE,673.cra_ctxsize = sizeof(struct aspeed_sham_ctx),674.cra_reqsize = sizeof(struct aspeed_sham_reqctx),675.cra_alignmask = 0,676.cra_module = THIS_MODULE,677}678}679},680.alg.ahash.op = {681.do_one_request = aspeed_ahash_do_one,682},683},684{685.alg.ahash.base = {686.init = aspeed_sham_init,687.update = aspeed_sham_update,688.finup = aspeed_sham_finup,689.digest = aspeed_sham_digest,690.export = aspeed_sham_export,691.import = aspeed_sham_import,692.init_tfm = aspeed_sham_cra_init,693.halg = {694.digestsize = SHA224_DIGEST_SIZE,695.statesize = sizeof(struct aspeed_sham_reqctx),696.base = {697.cra_name = "sha224",698.cra_driver_name = "aspeed-sha224",699.cra_priority = 300,700.cra_flags = CRYPTO_ALG_TYPE_AHASH |701CRYPTO_ALG_ASYNC |702CRYPTO_AHASH_ALG_BLOCK_ONLY |703CRYPTO_ALG_KERN_DRIVER_ONLY,704.cra_blocksize = SHA224_BLOCK_SIZE,705.cra_ctxsize = sizeof(struct aspeed_sham_ctx),706.cra_reqsize = sizeof(struct aspeed_sham_reqctx),707.cra_alignmask = 0,708.cra_module = THIS_MODULE,709}710}711},712.alg.ahash.op = {713.do_one_request = aspeed_ahash_do_one,714},715},716};717718static struct aspeed_hace_alg aspeed_ahash_algs_g6[] = {719{720.alg.ahash.base = {721.init = aspeed_sham_init,722.update = aspeed_sham_update,723.finup = aspeed_sham_finup,724.digest = aspeed_sham_digest,725.export = aspeed_sham_export,726.import = aspeed_sham_import,727.init_tfm = aspeed_sham_cra_init,728.halg = {729.digestsize = SHA384_DIGEST_SIZE,730.statesize = sizeof(struct aspeed_sham_reqctx),731.base = {732.cra_name = "sha384",733.cra_driver_name = "aspeed-sha384",734.cra_priority = 300,735.cra_flags = CRYPTO_ALG_TYPE_AHASH |736CRYPTO_ALG_ASYNC |737CRYPTO_AHASH_ALG_BLOCK_ONLY |738CRYPTO_ALG_KERN_DRIVER_ONLY,739.cra_blocksize = SHA384_BLOCK_SIZE,740.cra_ctxsize = sizeof(struct aspeed_sham_ctx),741.cra_reqsize = sizeof(struct aspeed_sham_reqctx),742.cra_alignmask = 0,743.cra_module = THIS_MODULE,744}745}746},747.alg.ahash.op = {748.do_one_request = aspeed_ahash_do_one,749},750},751{752.alg.ahash.base = {753.init = aspeed_sham_init,754.update = aspeed_sham_update,755.finup = aspeed_sham_finup,756.digest = aspeed_sham_digest,757.export = aspeed_sham_export,758.import = aspeed_sham_import,759.init_tfm = aspeed_sham_cra_init,760.halg = {761.digestsize = SHA512_DIGEST_SIZE,762.statesize = sizeof(struct aspeed_sham_reqctx),763.base = {764.cra_name = "sha512",765.cra_driver_name = "aspeed-sha512",766.cra_priority = 300,767.cra_flags = CRYPTO_ALG_TYPE_AHASH |768CRYPTO_ALG_ASYNC |769CRYPTO_AHASH_ALG_BLOCK_ONLY |770CRYPTO_ALG_KERN_DRIVER_ONLY,771.cra_blocksize = SHA512_BLOCK_SIZE,772.cra_ctxsize = sizeof(struct aspeed_sham_ctx),773.cra_reqsize = sizeof(struct aspeed_sham_reqctx),774.cra_alignmask = 0,775.cra_module = THIS_MODULE,776}777}778},779.alg.ahash.op = {780.do_one_request = aspeed_ahash_do_one,781},782},783};784785void aspeed_unregister_hace_hash_algs(struct aspeed_hace_dev *hace_dev)786{787int i;788789for (i = 0; i < ARRAY_SIZE(aspeed_ahash_algs); i++)790crypto_engine_unregister_ahash(&aspeed_ahash_algs[i].alg.ahash);791792if (hace_dev->version != AST2600_VERSION)793return;794795for (i = 0; i < ARRAY_SIZE(aspeed_ahash_algs_g6); i++)796crypto_engine_unregister_ahash(&aspeed_ahash_algs_g6[i].alg.ahash);797}798799void aspeed_register_hace_hash_algs(struct aspeed_hace_dev *hace_dev)800{801int rc, i;802803AHASH_DBG(hace_dev, "\n");804805for (i = 0; i < ARRAY_SIZE(aspeed_ahash_algs); i++) {806aspeed_ahash_algs[i].hace_dev = hace_dev;807rc = crypto_engine_register_ahash(&aspeed_ahash_algs[i].alg.ahash);808if (rc) {809AHASH_DBG(hace_dev, "Failed to register %s\n",810aspeed_ahash_algs[i].alg.ahash.base.halg.base.cra_name);811}812}813814if (hace_dev->version != AST2600_VERSION)815return;816817for (i = 0; i < ARRAY_SIZE(aspeed_ahash_algs_g6); i++) {818aspeed_ahash_algs_g6[i].hace_dev = hace_dev;819rc = crypto_engine_register_ahash(&aspeed_ahash_algs_g6[i].alg.ahash);820if (rc) {821AHASH_DBG(hace_dev, "Failed to register %s\n",822aspeed_ahash_algs_g6[i].alg.ahash.base.halg.base.cra_name);823}824}825}826827828