Path: blob/master/arch/x86/crypto/aesni-intel_glue.c
10817 views
/*1* Support for Intel AES-NI instructions. This file contains glue2* code, the real AES implementation is in intel-aes_asm.S.3*4* Copyright (C) 2008, Intel Corp.5* Author: Huang Ying <[email protected]>6*7* Added RFC4106 AES-GCM support for 128-bit keys under the AEAD8* interface for 64-bit kernels.9* Authors: Adrian Hoban <[email protected]>10* Gabriele Paoloni <[email protected]>11* Tadeusz Struk ([email protected])12* Aidan O'Mahony ([email protected])13* Copyright (c) 2010, Intel Corporation.14*15* This program is free software; you can redistribute it and/or modify16* it under the terms of the GNU General Public License as published by17* the Free Software Foundation; either version 2 of the License, or18* (at your option) any later version.19*/2021#include <linux/hardirq.h>22#include <linux/types.h>23#include <linux/crypto.h>24#include <linux/err.h>25#include <crypto/algapi.h>26#include <crypto/aes.h>27#include <crypto/cryptd.h>28#include <crypto/ctr.h>29#include <asm/i387.h>30#include <asm/aes.h>31#include <crypto/scatterwalk.h>32#include <crypto/internal/aead.h>33#include <linux/workqueue.h>34#include <linux/spinlock.h>3536#if defined(CONFIG_CRYPTO_CTR) || defined(CONFIG_CRYPTO_CTR_MODULE)37#define HAS_CTR38#endif3940#if defined(CONFIG_CRYPTO_LRW) || defined(CONFIG_CRYPTO_LRW_MODULE)41#define HAS_LRW42#endif4344#if defined(CONFIG_CRYPTO_PCBC) || defined(CONFIG_CRYPTO_PCBC_MODULE)45#define HAS_PCBC46#endif4748#if defined(CONFIG_CRYPTO_XTS) || defined(CONFIG_CRYPTO_XTS_MODULE)49#define HAS_XTS50#endif5152struct async_aes_ctx {53struct cryptd_ablkcipher *cryptd_tfm;54};5556/* This data is stored at the end of the crypto_tfm struct.57* It's a type of per "session" data storage location.58* This needs to be 16 byte aligned.59*/60struct aesni_rfc4106_gcm_ctx {61u8 hash_subkey[16];62struct crypto_aes_ctx aes_key_expanded;63u8 nonce[4];64struct cryptd_aead *cryptd_tfm;65};6667struct aesni_gcm_set_hash_subkey_result {68int err;69struct completion completion;70};7172struct aesni_hash_subkey_req_data {73u8 iv[16];74struct aesni_gcm_set_hash_subkey_result result;75struct scatterlist sg;76};7778#define AESNI_ALIGN (16)79#define AES_BLOCK_MASK (~(AES_BLOCK_SIZE-1))80#define RFC4106_HASH_SUBKEY_SIZE 168182asmlinkage int aesni_set_key(struct crypto_aes_ctx *ctx, const u8 *in_key,83unsigned int key_len);84asmlinkage void aesni_enc(struct crypto_aes_ctx *ctx, u8 *out,85const u8 *in);86asmlinkage void aesni_dec(struct crypto_aes_ctx *ctx, u8 *out,87const u8 *in);88asmlinkage void aesni_ecb_enc(struct crypto_aes_ctx *ctx, u8 *out,89const u8 *in, unsigned int len);90asmlinkage void aesni_ecb_dec(struct crypto_aes_ctx *ctx, u8 *out,91const u8 *in, unsigned int len);92asmlinkage void aesni_cbc_enc(struct crypto_aes_ctx *ctx, u8 *out,93const u8 *in, unsigned int len, u8 *iv);94asmlinkage void aesni_cbc_dec(struct crypto_aes_ctx *ctx, u8 *out,95const u8 *in, unsigned int len, u8 *iv);9697int crypto_fpu_init(void);98void crypto_fpu_exit(void);99100#ifdef CONFIG_X86_64101asmlinkage void aesni_ctr_enc(struct crypto_aes_ctx *ctx, u8 *out,102const u8 *in, unsigned int len, u8 *iv);103104/* asmlinkage void aesni_gcm_enc()105* void *ctx, AES Key schedule. Starts on a 16 byte boundary.106* u8 *out, Ciphertext output. Encrypt in-place is allowed.107* const u8 *in, Plaintext input108* unsigned long plaintext_len, Length of data in bytes for encryption.109* u8 *iv, Pre-counter block j0: 4 byte salt (from Security Association)110* concatenated with 8 byte Initialisation Vector (from IPSec ESP111* Payload) concatenated with 0x00000001. 16-byte aligned pointer.112* u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.113* const u8 *aad, Additional Authentication Data (AAD)114* unsigned long aad_len, Length of AAD in bytes. With RFC4106 this115* is going to be 8 or 12 bytes116* u8 *auth_tag, Authenticated Tag output.117* unsigned long auth_tag_len), Authenticated Tag Length in bytes.118* Valid values are 16 (most likely), 12 or 8.119*/120asmlinkage void aesni_gcm_enc(void *ctx, u8 *out,121const u8 *in, unsigned long plaintext_len, u8 *iv,122u8 *hash_subkey, const u8 *aad, unsigned long aad_len,123u8 *auth_tag, unsigned long auth_tag_len);124125/* asmlinkage void aesni_gcm_dec()126* void *ctx, AES Key schedule. Starts on a 16 byte boundary.127* u8 *out, Plaintext output. Decrypt in-place is allowed.128* const u8 *in, Ciphertext input129* unsigned long ciphertext_len, Length of data in bytes for decryption.130* u8 *iv, Pre-counter block j0: 4 byte salt (from Security Association)131* concatenated with 8 byte Initialisation Vector (from IPSec ESP132* Payload) concatenated with 0x00000001. 16-byte aligned pointer.133* u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.134* const u8 *aad, Additional Authentication Data (AAD)135* unsigned long aad_len, Length of AAD in bytes. With RFC4106 this is going136* to be 8 or 12 bytes137* u8 *auth_tag, Authenticated Tag output.138* unsigned long auth_tag_len) Authenticated Tag Length in bytes.139* Valid values are 16 (most likely), 12 or 8.140*/141asmlinkage void aesni_gcm_dec(void *ctx, u8 *out,142const u8 *in, unsigned long ciphertext_len, u8 *iv,143u8 *hash_subkey, const u8 *aad, unsigned long aad_len,144u8 *auth_tag, unsigned long auth_tag_len);145146static inline struct147aesni_rfc4106_gcm_ctx *aesni_rfc4106_gcm_ctx_get(struct crypto_aead *tfm)148{149return150(struct aesni_rfc4106_gcm_ctx *)151PTR_ALIGN((u8 *)152crypto_tfm_ctx(crypto_aead_tfm(tfm)), AESNI_ALIGN);153}154#endif155156static inline struct crypto_aes_ctx *aes_ctx(void *raw_ctx)157{158unsigned long addr = (unsigned long)raw_ctx;159unsigned long align = AESNI_ALIGN;160161if (align <= crypto_tfm_ctx_alignment())162align = 1;163return (struct crypto_aes_ctx *)ALIGN(addr, align);164}165166static int aes_set_key_common(struct crypto_tfm *tfm, void *raw_ctx,167const u8 *in_key, unsigned int key_len)168{169struct crypto_aes_ctx *ctx = aes_ctx(raw_ctx);170u32 *flags = &tfm->crt_flags;171int err;172173if (key_len != AES_KEYSIZE_128 && key_len != AES_KEYSIZE_192 &&174key_len != AES_KEYSIZE_256) {175*flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;176return -EINVAL;177}178179if (!irq_fpu_usable())180err = crypto_aes_expand_key(ctx, in_key, key_len);181else {182kernel_fpu_begin();183err = aesni_set_key(ctx, in_key, key_len);184kernel_fpu_end();185}186187return err;188}189190static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,191unsigned int key_len)192{193return aes_set_key_common(tfm, crypto_tfm_ctx(tfm), in_key, key_len);194}195196static void aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)197{198struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));199200if (!irq_fpu_usable())201crypto_aes_encrypt_x86(ctx, dst, src);202else {203kernel_fpu_begin();204aesni_enc(ctx, dst, src);205kernel_fpu_end();206}207}208209static void aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)210{211struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));212213if (!irq_fpu_usable())214crypto_aes_decrypt_x86(ctx, dst, src);215else {216kernel_fpu_begin();217aesni_dec(ctx, dst, src);218kernel_fpu_end();219}220}221222static struct crypto_alg aesni_alg = {223.cra_name = "aes",224.cra_driver_name = "aes-aesni",225.cra_priority = 300,226.cra_flags = CRYPTO_ALG_TYPE_CIPHER,227.cra_blocksize = AES_BLOCK_SIZE,228.cra_ctxsize = sizeof(struct crypto_aes_ctx)+AESNI_ALIGN-1,229.cra_alignmask = 0,230.cra_module = THIS_MODULE,231.cra_list = LIST_HEAD_INIT(aesni_alg.cra_list),232.cra_u = {233.cipher = {234.cia_min_keysize = AES_MIN_KEY_SIZE,235.cia_max_keysize = AES_MAX_KEY_SIZE,236.cia_setkey = aes_set_key,237.cia_encrypt = aes_encrypt,238.cia_decrypt = aes_decrypt239}240}241};242243static void __aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)244{245struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));246247aesni_enc(ctx, dst, src);248}249250static void __aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)251{252struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));253254aesni_dec(ctx, dst, src);255}256257static struct crypto_alg __aesni_alg = {258.cra_name = "__aes-aesni",259.cra_driver_name = "__driver-aes-aesni",260.cra_priority = 0,261.cra_flags = CRYPTO_ALG_TYPE_CIPHER,262.cra_blocksize = AES_BLOCK_SIZE,263.cra_ctxsize = sizeof(struct crypto_aes_ctx)+AESNI_ALIGN-1,264.cra_alignmask = 0,265.cra_module = THIS_MODULE,266.cra_list = LIST_HEAD_INIT(__aesni_alg.cra_list),267.cra_u = {268.cipher = {269.cia_min_keysize = AES_MIN_KEY_SIZE,270.cia_max_keysize = AES_MAX_KEY_SIZE,271.cia_setkey = aes_set_key,272.cia_encrypt = __aes_encrypt,273.cia_decrypt = __aes_decrypt274}275}276};277278static int ecb_encrypt(struct blkcipher_desc *desc,279struct scatterlist *dst, struct scatterlist *src,280unsigned int nbytes)281{282struct crypto_aes_ctx *ctx = aes_ctx(crypto_blkcipher_ctx(desc->tfm));283struct blkcipher_walk walk;284int err;285286blkcipher_walk_init(&walk, dst, src, nbytes);287err = blkcipher_walk_virt(desc, &walk);288desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;289290kernel_fpu_begin();291while ((nbytes = walk.nbytes)) {292aesni_ecb_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr,293nbytes & AES_BLOCK_MASK);294nbytes &= AES_BLOCK_SIZE - 1;295err = blkcipher_walk_done(desc, &walk, nbytes);296}297kernel_fpu_end();298299return err;300}301302static int ecb_decrypt(struct blkcipher_desc *desc,303struct scatterlist *dst, struct scatterlist *src,304unsigned int nbytes)305{306struct crypto_aes_ctx *ctx = aes_ctx(crypto_blkcipher_ctx(desc->tfm));307struct blkcipher_walk walk;308int err;309310blkcipher_walk_init(&walk, dst, src, nbytes);311err = blkcipher_walk_virt(desc, &walk);312desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;313314kernel_fpu_begin();315while ((nbytes = walk.nbytes)) {316aesni_ecb_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr,317nbytes & AES_BLOCK_MASK);318nbytes &= AES_BLOCK_SIZE - 1;319err = blkcipher_walk_done(desc, &walk, nbytes);320}321kernel_fpu_end();322323return err;324}325326static struct crypto_alg blk_ecb_alg = {327.cra_name = "__ecb-aes-aesni",328.cra_driver_name = "__driver-ecb-aes-aesni",329.cra_priority = 0,330.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,331.cra_blocksize = AES_BLOCK_SIZE,332.cra_ctxsize = sizeof(struct crypto_aes_ctx)+AESNI_ALIGN-1,333.cra_alignmask = 0,334.cra_type = &crypto_blkcipher_type,335.cra_module = THIS_MODULE,336.cra_list = LIST_HEAD_INIT(blk_ecb_alg.cra_list),337.cra_u = {338.blkcipher = {339.min_keysize = AES_MIN_KEY_SIZE,340.max_keysize = AES_MAX_KEY_SIZE,341.setkey = aes_set_key,342.encrypt = ecb_encrypt,343.decrypt = ecb_decrypt,344},345},346};347348static int cbc_encrypt(struct blkcipher_desc *desc,349struct scatterlist *dst, struct scatterlist *src,350unsigned int nbytes)351{352struct crypto_aes_ctx *ctx = aes_ctx(crypto_blkcipher_ctx(desc->tfm));353struct blkcipher_walk walk;354int err;355356blkcipher_walk_init(&walk, dst, src, nbytes);357err = blkcipher_walk_virt(desc, &walk);358desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;359360kernel_fpu_begin();361while ((nbytes = walk.nbytes)) {362aesni_cbc_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr,363nbytes & AES_BLOCK_MASK, walk.iv);364nbytes &= AES_BLOCK_SIZE - 1;365err = blkcipher_walk_done(desc, &walk, nbytes);366}367kernel_fpu_end();368369return err;370}371372static int cbc_decrypt(struct blkcipher_desc *desc,373struct scatterlist *dst, struct scatterlist *src,374unsigned int nbytes)375{376struct crypto_aes_ctx *ctx = aes_ctx(crypto_blkcipher_ctx(desc->tfm));377struct blkcipher_walk walk;378int err;379380blkcipher_walk_init(&walk, dst, src, nbytes);381err = blkcipher_walk_virt(desc, &walk);382desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;383384kernel_fpu_begin();385while ((nbytes = walk.nbytes)) {386aesni_cbc_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr,387nbytes & AES_BLOCK_MASK, walk.iv);388nbytes &= AES_BLOCK_SIZE - 1;389err = blkcipher_walk_done(desc, &walk, nbytes);390}391kernel_fpu_end();392393return err;394}395396static struct crypto_alg blk_cbc_alg = {397.cra_name = "__cbc-aes-aesni",398.cra_driver_name = "__driver-cbc-aes-aesni",399.cra_priority = 0,400.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,401.cra_blocksize = AES_BLOCK_SIZE,402.cra_ctxsize = sizeof(struct crypto_aes_ctx)+AESNI_ALIGN-1,403.cra_alignmask = 0,404.cra_type = &crypto_blkcipher_type,405.cra_module = THIS_MODULE,406.cra_list = LIST_HEAD_INIT(blk_cbc_alg.cra_list),407.cra_u = {408.blkcipher = {409.min_keysize = AES_MIN_KEY_SIZE,410.max_keysize = AES_MAX_KEY_SIZE,411.setkey = aes_set_key,412.encrypt = cbc_encrypt,413.decrypt = cbc_decrypt,414},415},416};417418#ifdef CONFIG_X86_64419static void ctr_crypt_final(struct crypto_aes_ctx *ctx,420struct blkcipher_walk *walk)421{422u8 *ctrblk = walk->iv;423u8 keystream[AES_BLOCK_SIZE];424u8 *src = walk->src.virt.addr;425u8 *dst = walk->dst.virt.addr;426unsigned int nbytes = walk->nbytes;427428aesni_enc(ctx, keystream, ctrblk);429crypto_xor(keystream, src, nbytes);430memcpy(dst, keystream, nbytes);431crypto_inc(ctrblk, AES_BLOCK_SIZE);432}433434static int ctr_crypt(struct blkcipher_desc *desc,435struct scatterlist *dst, struct scatterlist *src,436unsigned int nbytes)437{438struct crypto_aes_ctx *ctx = aes_ctx(crypto_blkcipher_ctx(desc->tfm));439struct blkcipher_walk walk;440int err;441442blkcipher_walk_init(&walk, dst, src, nbytes);443err = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE);444desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;445446kernel_fpu_begin();447while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {448aesni_ctr_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr,449nbytes & AES_BLOCK_MASK, walk.iv);450nbytes &= AES_BLOCK_SIZE - 1;451err = blkcipher_walk_done(desc, &walk, nbytes);452}453if (walk.nbytes) {454ctr_crypt_final(ctx, &walk);455err = blkcipher_walk_done(desc, &walk, 0);456}457kernel_fpu_end();458459return err;460}461462static struct crypto_alg blk_ctr_alg = {463.cra_name = "__ctr-aes-aesni",464.cra_driver_name = "__driver-ctr-aes-aesni",465.cra_priority = 0,466.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,467.cra_blocksize = 1,468.cra_ctxsize = sizeof(struct crypto_aes_ctx)+AESNI_ALIGN-1,469.cra_alignmask = 0,470.cra_type = &crypto_blkcipher_type,471.cra_module = THIS_MODULE,472.cra_list = LIST_HEAD_INIT(blk_ctr_alg.cra_list),473.cra_u = {474.blkcipher = {475.min_keysize = AES_MIN_KEY_SIZE,476.max_keysize = AES_MAX_KEY_SIZE,477.ivsize = AES_BLOCK_SIZE,478.setkey = aes_set_key,479.encrypt = ctr_crypt,480.decrypt = ctr_crypt,481},482},483};484#endif485486static int ablk_set_key(struct crypto_ablkcipher *tfm, const u8 *key,487unsigned int key_len)488{489struct async_aes_ctx *ctx = crypto_ablkcipher_ctx(tfm);490struct crypto_ablkcipher *child = &ctx->cryptd_tfm->base;491int err;492493crypto_ablkcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);494crypto_ablkcipher_set_flags(child, crypto_ablkcipher_get_flags(tfm)495& CRYPTO_TFM_REQ_MASK);496err = crypto_ablkcipher_setkey(child, key, key_len);497crypto_ablkcipher_set_flags(tfm, crypto_ablkcipher_get_flags(child)498& CRYPTO_TFM_RES_MASK);499return err;500}501502static int ablk_encrypt(struct ablkcipher_request *req)503{504struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);505struct async_aes_ctx *ctx = crypto_ablkcipher_ctx(tfm);506507if (!irq_fpu_usable()) {508struct ablkcipher_request *cryptd_req =509ablkcipher_request_ctx(req);510memcpy(cryptd_req, req, sizeof(*req));511ablkcipher_request_set_tfm(cryptd_req, &ctx->cryptd_tfm->base);512return crypto_ablkcipher_encrypt(cryptd_req);513} else {514struct blkcipher_desc desc;515desc.tfm = cryptd_ablkcipher_child(ctx->cryptd_tfm);516desc.info = req->info;517desc.flags = 0;518return crypto_blkcipher_crt(desc.tfm)->encrypt(519&desc, req->dst, req->src, req->nbytes);520}521}522523static int ablk_decrypt(struct ablkcipher_request *req)524{525struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);526struct async_aes_ctx *ctx = crypto_ablkcipher_ctx(tfm);527528if (!irq_fpu_usable()) {529struct ablkcipher_request *cryptd_req =530ablkcipher_request_ctx(req);531memcpy(cryptd_req, req, sizeof(*req));532ablkcipher_request_set_tfm(cryptd_req, &ctx->cryptd_tfm->base);533return crypto_ablkcipher_decrypt(cryptd_req);534} else {535struct blkcipher_desc desc;536desc.tfm = cryptd_ablkcipher_child(ctx->cryptd_tfm);537desc.info = req->info;538desc.flags = 0;539return crypto_blkcipher_crt(desc.tfm)->decrypt(540&desc, req->dst, req->src, req->nbytes);541}542}543544static void ablk_exit(struct crypto_tfm *tfm)545{546struct async_aes_ctx *ctx = crypto_tfm_ctx(tfm);547548cryptd_free_ablkcipher(ctx->cryptd_tfm);549}550551static void ablk_init_common(struct crypto_tfm *tfm,552struct cryptd_ablkcipher *cryptd_tfm)553{554struct async_aes_ctx *ctx = crypto_tfm_ctx(tfm);555556ctx->cryptd_tfm = cryptd_tfm;557tfm->crt_ablkcipher.reqsize = sizeof(struct ablkcipher_request) +558crypto_ablkcipher_reqsize(&cryptd_tfm->base);559}560561static int ablk_ecb_init(struct crypto_tfm *tfm)562{563struct cryptd_ablkcipher *cryptd_tfm;564565cryptd_tfm = cryptd_alloc_ablkcipher("__driver-ecb-aes-aesni", 0, 0);566if (IS_ERR(cryptd_tfm))567return PTR_ERR(cryptd_tfm);568ablk_init_common(tfm, cryptd_tfm);569return 0;570}571572static struct crypto_alg ablk_ecb_alg = {573.cra_name = "ecb(aes)",574.cra_driver_name = "ecb-aes-aesni",575.cra_priority = 400,576.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,577.cra_blocksize = AES_BLOCK_SIZE,578.cra_ctxsize = sizeof(struct async_aes_ctx),579.cra_alignmask = 0,580.cra_type = &crypto_ablkcipher_type,581.cra_module = THIS_MODULE,582.cra_list = LIST_HEAD_INIT(ablk_ecb_alg.cra_list),583.cra_init = ablk_ecb_init,584.cra_exit = ablk_exit,585.cra_u = {586.ablkcipher = {587.min_keysize = AES_MIN_KEY_SIZE,588.max_keysize = AES_MAX_KEY_SIZE,589.setkey = ablk_set_key,590.encrypt = ablk_encrypt,591.decrypt = ablk_decrypt,592},593},594};595596static int ablk_cbc_init(struct crypto_tfm *tfm)597{598struct cryptd_ablkcipher *cryptd_tfm;599600cryptd_tfm = cryptd_alloc_ablkcipher("__driver-cbc-aes-aesni", 0, 0);601if (IS_ERR(cryptd_tfm))602return PTR_ERR(cryptd_tfm);603ablk_init_common(tfm, cryptd_tfm);604return 0;605}606607static struct crypto_alg ablk_cbc_alg = {608.cra_name = "cbc(aes)",609.cra_driver_name = "cbc-aes-aesni",610.cra_priority = 400,611.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,612.cra_blocksize = AES_BLOCK_SIZE,613.cra_ctxsize = sizeof(struct async_aes_ctx),614.cra_alignmask = 0,615.cra_type = &crypto_ablkcipher_type,616.cra_module = THIS_MODULE,617.cra_list = LIST_HEAD_INIT(ablk_cbc_alg.cra_list),618.cra_init = ablk_cbc_init,619.cra_exit = ablk_exit,620.cra_u = {621.ablkcipher = {622.min_keysize = AES_MIN_KEY_SIZE,623.max_keysize = AES_MAX_KEY_SIZE,624.ivsize = AES_BLOCK_SIZE,625.setkey = ablk_set_key,626.encrypt = ablk_encrypt,627.decrypt = ablk_decrypt,628},629},630};631632#ifdef CONFIG_X86_64633static int ablk_ctr_init(struct crypto_tfm *tfm)634{635struct cryptd_ablkcipher *cryptd_tfm;636637cryptd_tfm = cryptd_alloc_ablkcipher("__driver-ctr-aes-aesni", 0, 0);638if (IS_ERR(cryptd_tfm))639return PTR_ERR(cryptd_tfm);640ablk_init_common(tfm, cryptd_tfm);641return 0;642}643644static struct crypto_alg ablk_ctr_alg = {645.cra_name = "ctr(aes)",646.cra_driver_name = "ctr-aes-aesni",647.cra_priority = 400,648.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,649.cra_blocksize = 1,650.cra_ctxsize = sizeof(struct async_aes_ctx),651.cra_alignmask = 0,652.cra_type = &crypto_ablkcipher_type,653.cra_module = THIS_MODULE,654.cra_list = LIST_HEAD_INIT(ablk_ctr_alg.cra_list),655.cra_init = ablk_ctr_init,656.cra_exit = ablk_exit,657.cra_u = {658.ablkcipher = {659.min_keysize = AES_MIN_KEY_SIZE,660.max_keysize = AES_MAX_KEY_SIZE,661.ivsize = AES_BLOCK_SIZE,662.setkey = ablk_set_key,663.encrypt = ablk_encrypt,664.decrypt = ablk_encrypt,665.geniv = "chainiv",666},667},668};669670#ifdef HAS_CTR671static int ablk_rfc3686_ctr_init(struct crypto_tfm *tfm)672{673struct cryptd_ablkcipher *cryptd_tfm;674675cryptd_tfm = cryptd_alloc_ablkcipher(676"rfc3686(__driver-ctr-aes-aesni)", 0, 0);677if (IS_ERR(cryptd_tfm))678return PTR_ERR(cryptd_tfm);679ablk_init_common(tfm, cryptd_tfm);680return 0;681}682683static struct crypto_alg ablk_rfc3686_ctr_alg = {684.cra_name = "rfc3686(ctr(aes))",685.cra_driver_name = "rfc3686-ctr-aes-aesni",686.cra_priority = 400,687.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,688.cra_blocksize = 1,689.cra_ctxsize = sizeof(struct async_aes_ctx),690.cra_alignmask = 0,691.cra_type = &crypto_ablkcipher_type,692.cra_module = THIS_MODULE,693.cra_list = LIST_HEAD_INIT(ablk_rfc3686_ctr_alg.cra_list),694.cra_init = ablk_rfc3686_ctr_init,695.cra_exit = ablk_exit,696.cra_u = {697.ablkcipher = {698.min_keysize = AES_MIN_KEY_SIZE+CTR_RFC3686_NONCE_SIZE,699.max_keysize = AES_MAX_KEY_SIZE+CTR_RFC3686_NONCE_SIZE,700.ivsize = CTR_RFC3686_IV_SIZE,701.setkey = ablk_set_key,702.encrypt = ablk_encrypt,703.decrypt = ablk_decrypt,704.geniv = "seqiv",705},706},707};708#endif709#endif710711#ifdef HAS_LRW712static int ablk_lrw_init(struct crypto_tfm *tfm)713{714struct cryptd_ablkcipher *cryptd_tfm;715716cryptd_tfm = cryptd_alloc_ablkcipher("fpu(lrw(__driver-aes-aesni))",7170, 0);718if (IS_ERR(cryptd_tfm))719return PTR_ERR(cryptd_tfm);720ablk_init_common(tfm, cryptd_tfm);721return 0;722}723724static struct crypto_alg ablk_lrw_alg = {725.cra_name = "lrw(aes)",726.cra_driver_name = "lrw-aes-aesni",727.cra_priority = 400,728.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,729.cra_blocksize = AES_BLOCK_SIZE,730.cra_ctxsize = sizeof(struct async_aes_ctx),731.cra_alignmask = 0,732.cra_type = &crypto_ablkcipher_type,733.cra_module = THIS_MODULE,734.cra_list = LIST_HEAD_INIT(ablk_lrw_alg.cra_list),735.cra_init = ablk_lrw_init,736.cra_exit = ablk_exit,737.cra_u = {738.ablkcipher = {739.min_keysize = AES_MIN_KEY_SIZE + AES_BLOCK_SIZE,740.max_keysize = AES_MAX_KEY_SIZE + AES_BLOCK_SIZE,741.ivsize = AES_BLOCK_SIZE,742.setkey = ablk_set_key,743.encrypt = ablk_encrypt,744.decrypt = ablk_decrypt,745},746},747};748#endif749750#ifdef HAS_PCBC751static int ablk_pcbc_init(struct crypto_tfm *tfm)752{753struct cryptd_ablkcipher *cryptd_tfm;754755cryptd_tfm = cryptd_alloc_ablkcipher("fpu(pcbc(__driver-aes-aesni))",7560, 0);757if (IS_ERR(cryptd_tfm))758return PTR_ERR(cryptd_tfm);759ablk_init_common(tfm, cryptd_tfm);760return 0;761}762763static struct crypto_alg ablk_pcbc_alg = {764.cra_name = "pcbc(aes)",765.cra_driver_name = "pcbc-aes-aesni",766.cra_priority = 400,767.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,768.cra_blocksize = AES_BLOCK_SIZE,769.cra_ctxsize = sizeof(struct async_aes_ctx),770.cra_alignmask = 0,771.cra_type = &crypto_ablkcipher_type,772.cra_module = THIS_MODULE,773.cra_list = LIST_HEAD_INIT(ablk_pcbc_alg.cra_list),774.cra_init = ablk_pcbc_init,775.cra_exit = ablk_exit,776.cra_u = {777.ablkcipher = {778.min_keysize = AES_MIN_KEY_SIZE,779.max_keysize = AES_MAX_KEY_SIZE,780.ivsize = AES_BLOCK_SIZE,781.setkey = ablk_set_key,782.encrypt = ablk_encrypt,783.decrypt = ablk_decrypt,784},785},786};787#endif788789#ifdef HAS_XTS790static int ablk_xts_init(struct crypto_tfm *tfm)791{792struct cryptd_ablkcipher *cryptd_tfm;793794cryptd_tfm = cryptd_alloc_ablkcipher("fpu(xts(__driver-aes-aesni))",7950, 0);796if (IS_ERR(cryptd_tfm))797return PTR_ERR(cryptd_tfm);798ablk_init_common(tfm, cryptd_tfm);799return 0;800}801802static struct crypto_alg ablk_xts_alg = {803.cra_name = "xts(aes)",804.cra_driver_name = "xts-aes-aesni",805.cra_priority = 400,806.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,807.cra_blocksize = AES_BLOCK_SIZE,808.cra_ctxsize = sizeof(struct async_aes_ctx),809.cra_alignmask = 0,810.cra_type = &crypto_ablkcipher_type,811.cra_module = THIS_MODULE,812.cra_list = LIST_HEAD_INIT(ablk_xts_alg.cra_list),813.cra_init = ablk_xts_init,814.cra_exit = ablk_exit,815.cra_u = {816.ablkcipher = {817.min_keysize = 2 * AES_MIN_KEY_SIZE,818.max_keysize = 2 * AES_MAX_KEY_SIZE,819.ivsize = AES_BLOCK_SIZE,820.setkey = ablk_set_key,821.encrypt = ablk_encrypt,822.decrypt = ablk_decrypt,823},824},825};826#endif827828#ifdef CONFIG_X86_64829static int rfc4106_init(struct crypto_tfm *tfm)830{831struct cryptd_aead *cryptd_tfm;832struct aesni_rfc4106_gcm_ctx *ctx = (struct aesni_rfc4106_gcm_ctx *)833PTR_ALIGN((u8 *)crypto_tfm_ctx(tfm), AESNI_ALIGN);834struct crypto_aead *cryptd_child;835struct aesni_rfc4106_gcm_ctx *child_ctx;836cryptd_tfm = cryptd_alloc_aead("__driver-gcm-aes-aesni", 0, 0);837if (IS_ERR(cryptd_tfm))838return PTR_ERR(cryptd_tfm);839840cryptd_child = cryptd_aead_child(cryptd_tfm);841child_ctx = aesni_rfc4106_gcm_ctx_get(cryptd_child);842memcpy(child_ctx, ctx, sizeof(*ctx));843ctx->cryptd_tfm = cryptd_tfm;844tfm->crt_aead.reqsize = sizeof(struct aead_request)845+ crypto_aead_reqsize(&cryptd_tfm->base);846return 0;847}848849static void rfc4106_exit(struct crypto_tfm *tfm)850{851struct aesni_rfc4106_gcm_ctx *ctx =852(struct aesni_rfc4106_gcm_ctx *)853PTR_ALIGN((u8 *)crypto_tfm_ctx(tfm), AESNI_ALIGN);854if (!IS_ERR(ctx->cryptd_tfm))855cryptd_free_aead(ctx->cryptd_tfm);856return;857}858859static void860rfc4106_set_hash_subkey_done(struct crypto_async_request *req, int err)861{862struct aesni_gcm_set_hash_subkey_result *result = req->data;863864if (err == -EINPROGRESS)865return;866result->err = err;867complete(&result->completion);868}869870static int871rfc4106_set_hash_subkey(u8 *hash_subkey, const u8 *key, unsigned int key_len)872{873struct crypto_ablkcipher *ctr_tfm;874struct ablkcipher_request *req;875int ret = -EINVAL;876struct aesni_hash_subkey_req_data *req_data;877878ctr_tfm = crypto_alloc_ablkcipher("ctr(aes)", 0, 0);879if (IS_ERR(ctr_tfm))880return PTR_ERR(ctr_tfm);881882crypto_ablkcipher_clear_flags(ctr_tfm, ~0);883884ret = crypto_ablkcipher_setkey(ctr_tfm, key, key_len);885if (ret)886goto out_free_ablkcipher;887888ret = -ENOMEM;889req = ablkcipher_request_alloc(ctr_tfm, GFP_KERNEL);890if (!req)891goto out_free_ablkcipher;892893req_data = kmalloc(sizeof(*req_data), GFP_KERNEL);894if (!req_data)895goto out_free_request;896897memset(req_data->iv, 0, sizeof(req_data->iv));898899/* Clear the data in the hash sub key container to zero.*/900/* We want to cipher all zeros to create the hash sub key. */901memset(hash_subkey, 0, RFC4106_HASH_SUBKEY_SIZE);902903init_completion(&req_data->result.completion);904sg_init_one(&req_data->sg, hash_subkey, RFC4106_HASH_SUBKEY_SIZE);905ablkcipher_request_set_tfm(req, ctr_tfm);906ablkcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_SLEEP |907CRYPTO_TFM_REQ_MAY_BACKLOG,908rfc4106_set_hash_subkey_done,909&req_data->result);910911ablkcipher_request_set_crypt(req, &req_data->sg,912&req_data->sg, RFC4106_HASH_SUBKEY_SIZE, req_data->iv);913914ret = crypto_ablkcipher_encrypt(req);915if (ret == -EINPROGRESS || ret == -EBUSY) {916ret = wait_for_completion_interruptible917(&req_data->result.completion);918if (!ret)919ret = req_data->result.err;920}921kfree(req_data);922out_free_request:923ablkcipher_request_free(req);924out_free_ablkcipher:925crypto_free_ablkcipher(ctr_tfm);926return ret;927}928929static int rfc4106_set_key(struct crypto_aead *parent, const u8 *key,930unsigned int key_len)931{932int ret = 0;933struct crypto_tfm *tfm = crypto_aead_tfm(parent);934struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(parent);935struct crypto_aead *cryptd_child = cryptd_aead_child(ctx->cryptd_tfm);936struct aesni_rfc4106_gcm_ctx *child_ctx =937aesni_rfc4106_gcm_ctx_get(cryptd_child);938u8 *new_key_mem = NULL;939940if (key_len < 4) {941crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);942return -EINVAL;943}944/*Account for 4 byte nonce at the end.*/945key_len -= 4;946if (key_len != AES_KEYSIZE_128) {947crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);948return -EINVAL;949}950951memcpy(ctx->nonce, key + key_len, sizeof(ctx->nonce));952/*This must be on a 16 byte boundary!*/953if ((unsigned long)(&(ctx->aes_key_expanded.key_enc[0])) % AESNI_ALIGN)954return -EINVAL;955956if ((unsigned long)key % AESNI_ALIGN) {957/*key is not aligned: use an auxuliar aligned pointer*/958new_key_mem = kmalloc(key_len+AESNI_ALIGN, GFP_KERNEL);959if (!new_key_mem)960return -ENOMEM;961962new_key_mem = PTR_ALIGN(new_key_mem, AESNI_ALIGN);963memcpy(new_key_mem, key, key_len);964key = new_key_mem;965}966967if (!irq_fpu_usable())968ret = crypto_aes_expand_key(&(ctx->aes_key_expanded),969key, key_len);970else {971kernel_fpu_begin();972ret = aesni_set_key(&(ctx->aes_key_expanded), key, key_len);973kernel_fpu_end();974}975/*This must be on a 16 byte boundary!*/976if ((unsigned long)(&(ctx->hash_subkey[0])) % AESNI_ALIGN) {977ret = -EINVAL;978goto exit;979}980ret = rfc4106_set_hash_subkey(ctx->hash_subkey, key, key_len);981memcpy(child_ctx, ctx, sizeof(*ctx));982exit:983kfree(new_key_mem);984return ret;985}986987/* This is the Integrity Check Value (aka the authentication tag length and can988* be 8, 12 or 16 bytes long. */989static int rfc4106_set_authsize(struct crypto_aead *parent,990unsigned int authsize)991{992struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(parent);993struct crypto_aead *cryptd_child = cryptd_aead_child(ctx->cryptd_tfm);994995switch (authsize) {996case 8:997case 12:998case 16:999break;1000default:1001return -EINVAL;1002}1003crypto_aead_crt(parent)->authsize = authsize;1004crypto_aead_crt(cryptd_child)->authsize = authsize;1005return 0;1006}10071008static int rfc4106_encrypt(struct aead_request *req)1009{1010int ret;1011struct crypto_aead *tfm = crypto_aead_reqtfm(req);1012struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(tfm);10131014if (!irq_fpu_usable()) {1015struct aead_request *cryptd_req =1016(struct aead_request *) aead_request_ctx(req);1017memcpy(cryptd_req, req, sizeof(*req));1018aead_request_set_tfm(cryptd_req, &ctx->cryptd_tfm->base);1019return crypto_aead_encrypt(cryptd_req);1020} else {1021struct crypto_aead *cryptd_child = cryptd_aead_child(ctx->cryptd_tfm);1022kernel_fpu_begin();1023ret = cryptd_child->base.crt_aead.encrypt(req);1024kernel_fpu_end();1025return ret;1026}1027}10281029static int rfc4106_decrypt(struct aead_request *req)1030{1031int ret;1032struct crypto_aead *tfm = crypto_aead_reqtfm(req);1033struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(tfm);10341035if (!irq_fpu_usable()) {1036struct aead_request *cryptd_req =1037(struct aead_request *) aead_request_ctx(req);1038memcpy(cryptd_req, req, sizeof(*req));1039aead_request_set_tfm(cryptd_req, &ctx->cryptd_tfm->base);1040return crypto_aead_decrypt(cryptd_req);1041} else {1042struct crypto_aead *cryptd_child = cryptd_aead_child(ctx->cryptd_tfm);1043kernel_fpu_begin();1044ret = cryptd_child->base.crt_aead.decrypt(req);1045kernel_fpu_end();1046return ret;1047}1048}10491050static struct crypto_alg rfc4106_alg = {1051.cra_name = "rfc4106(gcm(aes))",1052.cra_driver_name = "rfc4106-gcm-aesni",1053.cra_priority = 400,1054.cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_ASYNC,1055.cra_blocksize = 1,1056.cra_ctxsize = sizeof(struct aesni_rfc4106_gcm_ctx) + AESNI_ALIGN,1057.cra_alignmask = 0,1058.cra_type = &crypto_nivaead_type,1059.cra_module = THIS_MODULE,1060.cra_list = LIST_HEAD_INIT(rfc4106_alg.cra_list),1061.cra_init = rfc4106_init,1062.cra_exit = rfc4106_exit,1063.cra_u = {1064.aead = {1065.setkey = rfc4106_set_key,1066.setauthsize = rfc4106_set_authsize,1067.encrypt = rfc4106_encrypt,1068.decrypt = rfc4106_decrypt,1069.geniv = "seqiv",1070.ivsize = 8,1071.maxauthsize = 16,1072},1073},1074};10751076static int __driver_rfc4106_encrypt(struct aead_request *req)1077{1078u8 one_entry_in_sg = 0;1079u8 *src, *dst, *assoc;1080__be32 counter = cpu_to_be32(1);1081struct crypto_aead *tfm = crypto_aead_reqtfm(req);1082struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(tfm);1083void *aes_ctx = &(ctx->aes_key_expanded);1084unsigned long auth_tag_len = crypto_aead_authsize(tfm);1085u8 iv_tab[16+AESNI_ALIGN];1086u8* iv = (u8 *) PTR_ALIGN((u8 *)iv_tab, AESNI_ALIGN);1087struct scatter_walk src_sg_walk;1088struct scatter_walk assoc_sg_walk;1089struct scatter_walk dst_sg_walk;1090unsigned int i;10911092/* Assuming we are supporting rfc4106 64-bit extended */1093/* sequence numbers We need to have the AAD length equal */1094/* to 8 or 12 bytes */1095if (unlikely(req->assoclen != 8 && req->assoclen != 12))1096return -EINVAL;1097/* IV below built */1098for (i = 0; i < 4; i++)1099*(iv+i) = ctx->nonce[i];1100for (i = 0; i < 8; i++)1101*(iv+4+i) = req->iv[i];1102*((__be32 *)(iv+12)) = counter;11031104if ((sg_is_last(req->src)) && (sg_is_last(req->assoc))) {1105one_entry_in_sg = 1;1106scatterwalk_start(&src_sg_walk, req->src);1107scatterwalk_start(&assoc_sg_walk, req->assoc);1108src = scatterwalk_map(&src_sg_walk, 0);1109assoc = scatterwalk_map(&assoc_sg_walk, 0);1110dst = src;1111if (unlikely(req->src != req->dst)) {1112scatterwalk_start(&dst_sg_walk, req->dst);1113dst = scatterwalk_map(&dst_sg_walk, 0);1114}11151116} else {1117/* Allocate memory for src, dst, assoc */1118src = kmalloc(req->cryptlen + auth_tag_len + req->assoclen,1119GFP_ATOMIC);1120if (unlikely(!src))1121return -ENOMEM;1122assoc = (src + req->cryptlen + auth_tag_len);1123scatterwalk_map_and_copy(src, req->src, 0, req->cryptlen, 0);1124scatterwalk_map_and_copy(assoc, req->assoc, 0,1125req->assoclen, 0);1126dst = src;1127}11281129aesni_gcm_enc(aes_ctx, dst, src, (unsigned long)req->cryptlen, iv,1130ctx->hash_subkey, assoc, (unsigned long)req->assoclen, dst1131+ ((unsigned long)req->cryptlen), auth_tag_len);11321133/* The authTag (aka the Integrity Check Value) needs to be written1134* back to the packet. */1135if (one_entry_in_sg) {1136if (unlikely(req->src != req->dst)) {1137scatterwalk_unmap(dst, 0);1138scatterwalk_done(&dst_sg_walk, 0, 0);1139}1140scatterwalk_unmap(src, 0);1141scatterwalk_unmap(assoc, 0);1142scatterwalk_done(&src_sg_walk, 0, 0);1143scatterwalk_done(&assoc_sg_walk, 0, 0);1144} else {1145scatterwalk_map_and_copy(dst, req->dst, 0,1146req->cryptlen + auth_tag_len, 1);1147kfree(src);1148}1149return 0;1150}11511152static int __driver_rfc4106_decrypt(struct aead_request *req)1153{1154u8 one_entry_in_sg = 0;1155u8 *src, *dst, *assoc;1156unsigned long tempCipherLen = 0;1157__be32 counter = cpu_to_be32(1);1158int retval = 0;1159struct crypto_aead *tfm = crypto_aead_reqtfm(req);1160struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(tfm);1161void *aes_ctx = &(ctx->aes_key_expanded);1162unsigned long auth_tag_len = crypto_aead_authsize(tfm);1163u8 iv_and_authTag[32+AESNI_ALIGN];1164u8 *iv = (u8 *) PTR_ALIGN((u8 *)iv_and_authTag, AESNI_ALIGN);1165u8 *authTag = iv + 16;1166struct scatter_walk src_sg_walk;1167struct scatter_walk assoc_sg_walk;1168struct scatter_walk dst_sg_walk;1169unsigned int i;11701171if (unlikely((req->cryptlen < auth_tag_len) ||1172(req->assoclen != 8 && req->assoclen != 12)))1173return -EINVAL;1174/* Assuming we are supporting rfc4106 64-bit extended */1175/* sequence numbers We need to have the AAD length */1176/* equal to 8 or 12 bytes */11771178tempCipherLen = (unsigned long)(req->cryptlen - auth_tag_len);1179/* IV below built */1180for (i = 0; i < 4; i++)1181*(iv+i) = ctx->nonce[i];1182for (i = 0; i < 8; i++)1183*(iv+4+i) = req->iv[i];1184*((__be32 *)(iv+12)) = counter;11851186if ((sg_is_last(req->src)) && (sg_is_last(req->assoc))) {1187one_entry_in_sg = 1;1188scatterwalk_start(&src_sg_walk, req->src);1189scatterwalk_start(&assoc_sg_walk, req->assoc);1190src = scatterwalk_map(&src_sg_walk, 0);1191assoc = scatterwalk_map(&assoc_sg_walk, 0);1192dst = src;1193if (unlikely(req->src != req->dst)) {1194scatterwalk_start(&dst_sg_walk, req->dst);1195dst = scatterwalk_map(&dst_sg_walk, 0);1196}11971198} else {1199/* Allocate memory for src, dst, assoc */1200src = kmalloc(req->cryptlen + req->assoclen, GFP_ATOMIC);1201if (!src)1202return -ENOMEM;1203assoc = (src + req->cryptlen + auth_tag_len);1204scatterwalk_map_and_copy(src, req->src, 0, req->cryptlen, 0);1205scatterwalk_map_and_copy(assoc, req->assoc, 0,1206req->assoclen, 0);1207dst = src;1208}12091210aesni_gcm_dec(aes_ctx, dst, src, tempCipherLen, iv,1211ctx->hash_subkey, assoc, (unsigned long)req->assoclen,1212authTag, auth_tag_len);12131214/* Compare generated tag with passed in tag. */1215retval = memcmp(src + tempCipherLen, authTag, auth_tag_len) ?1216-EBADMSG : 0;12171218if (one_entry_in_sg) {1219if (unlikely(req->src != req->dst)) {1220scatterwalk_unmap(dst, 0);1221scatterwalk_done(&dst_sg_walk, 0, 0);1222}1223scatterwalk_unmap(src, 0);1224scatterwalk_unmap(assoc, 0);1225scatterwalk_done(&src_sg_walk, 0, 0);1226scatterwalk_done(&assoc_sg_walk, 0, 0);1227} else {1228scatterwalk_map_and_copy(dst, req->dst, 0, req->cryptlen, 1);1229kfree(src);1230}1231return retval;1232}12331234static struct crypto_alg __rfc4106_alg = {1235.cra_name = "__gcm-aes-aesni",1236.cra_driver_name = "__driver-gcm-aes-aesni",1237.cra_priority = 0,1238.cra_flags = CRYPTO_ALG_TYPE_AEAD,1239.cra_blocksize = 1,1240.cra_ctxsize = sizeof(struct aesni_rfc4106_gcm_ctx) + AESNI_ALIGN,1241.cra_alignmask = 0,1242.cra_type = &crypto_aead_type,1243.cra_module = THIS_MODULE,1244.cra_list = LIST_HEAD_INIT(__rfc4106_alg.cra_list),1245.cra_u = {1246.aead = {1247.encrypt = __driver_rfc4106_encrypt,1248.decrypt = __driver_rfc4106_decrypt,1249},1250},1251};1252#endif12531254static int __init aesni_init(void)1255{1256int err;12571258if (!cpu_has_aes) {1259printk(KERN_INFO "Intel AES-NI instructions are not detected.\n");1260return -ENODEV;1261}12621263if ((err = crypto_fpu_init()))1264goto fpu_err;1265if ((err = crypto_register_alg(&aesni_alg)))1266goto aes_err;1267if ((err = crypto_register_alg(&__aesni_alg)))1268goto __aes_err;1269if ((err = crypto_register_alg(&blk_ecb_alg)))1270goto blk_ecb_err;1271if ((err = crypto_register_alg(&blk_cbc_alg)))1272goto blk_cbc_err;1273if ((err = crypto_register_alg(&ablk_ecb_alg)))1274goto ablk_ecb_err;1275if ((err = crypto_register_alg(&ablk_cbc_alg)))1276goto ablk_cbc_err;1277#ifdef CONFIG_X86_641278if ((err = crypto_register_alg(&blk_ctr_alg)))1279goto blk_ctr_err;1280if ((err = crypto_register_alg(&ablk_ctr_alg)))1281goto ablk_ctr_err;1282if ((err = crypto_register_alg(&__rfc4106_alg)))1283goto __aead_gcm_err;1284if ((err = crypto_register_alg(&rfc4106_alg)))1285goto aead_gcm_err;1286#ifdef HAS_CTR1287if ((err = crypto_register_alg(&ablk_rfc3686_ctr_alg)))1288goto ablk_rfc3686_ctr_err;1289#endif1290#endif1291#ifdef HAS_LRW1292if ((err = crypto_register_alg(&ablk_lrw_alg)))1293goto ablk_lrw_err;1294#endif1295#ifdef HAS_PCBC1296if ((err = crypto_register_alg(&ablk_pcbc_alg)))1297goto ablk_pcbc_err;1298#endif1299#ifdef HAS_XTS1300if ((err = crypto_register_alg(&ablk_xts_alg)))1301goto ablk_xts_err;1302#endif1303return err;13041305#ifdef HAS_XTS1306ablk_xts_err:1307#endif1308#ifdef HAS_PCBC1309crypto_unregister_alg(&ablk_pcbc_alg);1310ablk_pcbc_err:1311#endif1312#ifdef HAS_LRW1313crypto_unregister_alg(&ablk_lrw_alg);1314ablk_lrw_err:1315#endif1316#ifdef CONFIG_X86_641317#ifdef HAS_CTR1318crypto_unregister_alg(&ablk_rfc3686_ctr_alg);1319ablk_rfc3686_ctr_err:1320#endif1321crypto_unregister_alg(&rfc4106_alg);1322aead_gcm_err:1323crypto_unregister_alg(&__rfc4106_alg);1324__aead_gcm_err:1325crypto_unregister_alg(&ablk_ctr_alg);1326ablk_ctr_err:1327crypto_unregister_alg(&blk_ctr_alg);1328blk_ctr_err:1329#endif1330crypto_unregister_alg(&ablk_cbc_alg);1331ablk_cbc_err:1332crypto_unregister_alg(&ablk_ecb_alg);1333ablk_ecb_err:1334crypto_unregister_alg(&blk_cbc_alg);1335blk_cbc_err:1336crypto_unregister_alg(&blk_ecb_alg);1337blk_ecb_err:1338crypto_unregister_alg(&__aesni_alg);1339__aes_err:1340crypto_unregister_alg(&aesni_alg);1341aes_err:1342fpu_err:1343return err;1344}13451346static void __exit aesni_exit(void)1347{1348#ifdef HAS_XTS1349crypto_unregister_alg(&ablk_xts_alg);1350#endif1351#ifdef HAS_PCBC1352crypto_unregister_alg(&ablk_pcbc_alg);1353#endif1354#ifdef HAS_LRW1355crypto_unregister_alg(&ablk_lrw_alg);1356#endif1357#ifdef CONFIG_X86_641358#ifdef HAS_CTR1359crypto_unregister_alg(&ablk_rfc3686_ctr_alg);1360#endif1361crypto_unregister_alg(&rfc4106_alg);1362crypto_unregister_alg(&__rfc4106_alg);1363crypto_unregister_alg(&ablk_ctr_alg);1364crypto_unregister_alg(&blk_ctr_alg);1365#endif1366crypto_unregister_alg(&ablk_cbc_alg);1367crypto_unregister_alg(&ablk_ecb_alg);1368crypto_unregister_alg(&blk_cbc_alg);1369crypto_unregister_alg(&blk_ecb_alg);1370crypto_unregister_alg(&__aesni_alg);1371crypto_unregister_alg(&aesni_alg);13721373crypto_fpu_exit();1374}13751376module_init(aesni_init);1377module_exit(aesni_exit);13781379MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm, Intel AES-NI instructions optimized");1380MODULE_LICENSE("GPL");1381MODULE_ALIAS("aes");138213831384