Path: blob/main/sys/crypto/openssl/ossl_chacha20.c
39536 views
/*-1* SPDX-License-Identifier: BSD-2-Clause2*3* Copyright (c) 2020 Netflix, Inc4*5* Redistribution and use in source and binary forms, with or without6* modification, are permitted provided that the following conditions7* are met:8* 1. Redistributions of source code must retain the above copyright9* notice, this list of conditions and the following disclaimer,10* without modification.11* 2. Redistributions in binary form must reproduce at minimum a disclaimer12* similar to the "NO WARRANTY" disclaimer below ("Disclaimer") and any13* redistribution must be conditioned upon including a substantially14* similar Disclaimer requirement for further binary redistribution.15*16* NO WARRANTY17* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS18* ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT19* LIMITED TO, THE IMPLIED WARRANTIES OF NONINFRINGEMENT, MERCHANTIBILITY20* AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL21* THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR SPECIAL, EXEMPLARY,22* OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF23* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS24* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER25* IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)26* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF27* THE POSSIBILITY OF SUCH DAMAGES.28*/2930#include <sys/types.h>31#include <sys/endian.h>32#include <sys/malloc.h>33#include <sys/time.h>3435#include <opencrypto/cryptodev.h>3637#include <crypto/openssl/ossl.h>38#include <crypto/openssl/ossl_chacha.h>39#include <crypto/openssl/ossl_cipher.h>40#include <crypto/openssl/ossl_poly1305.h>4142static ossl_cipher_process_t ossl_chacha20;4344struct ossl_cipher ossl_cipher_chacha20 = {45.type = CRYPTO_CHACHA20,46.blocksize = CHACHA_BLK_SIZE,47.ivsize = CHACHA_CTR_SIZE,4849.set_encrypt_key = NULL,50.set_decrypt_key = NULL,51.process = ossl_chacha2052};5354static int55ossl_chacha20(struct ossl_session_cipher *s, struct cryptop *crp,56const struct crypto_session_params *csp)57{58_Alignas(8) unsigned int key[CHACHA_KEY_SIZE / 4];59unsigned int counter[CHACHA_CTR_SIZE / 4];60unsigned char block[CHACHA_BLK_SIZE];61struct crypto_buffer_cursor cc_in, cc_out;62const unsigned char *in, *inseg, *cipher_key;63unsigned char *out, *outseg;64size_t resid, todo, inlen, outlen;65uint32_t next_counter;66u_int i;6768if (crp->crp_cipher_key != NULL)69cipher_key = crp->crp_cipher_key;70else71cipher_key = csp->csp_cipher_key;72for (i = 0; i < nitems(key); i++)73key[i] = CHACHA_U8TOU32(cipher_key + i * 4);74crypto_read_iv(crp, counter);75for (i = 0; i < nitems(counter); i++)76counter[i] = le32toh(counter[i]);7778resid = crp->crp_payload_length;79crypto_cursor_init(&cc_in, &crp->crp_buf);80crypto_cursor_advance(&cc_in, crp->crp_payload_start);81inseg = crypto_cursor_segment(&cc_in, &inlen);82if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) {83crypto_cursor_init(&cc_out, &crp->crp_obuf);84crypto_cursor_advance(&cc_out, crp->crp_payload_output_start);85} else86cc_out = cc_in;87outseg = crypto_cursor_segment(&cc_out, &outlen);88while (resid >= CHACHA_BLK_SIZE) {89if (inlen < CHACHA_BLK_SIZE) {90crypto_cursor_copydata(&cc_in, CHACHA_BLK_SIZE, block);91in = block;92inlen = CHACHA_BLK_SIZE;93} else94in = inseg;95if (outlen < CHACHA_BLK_SIZE) {96out = block;97outlen = CHACHA_BLK_SIZE;98} else99out = outseg;100101/* Figure out how many blocks we can encrypt/decrypt at once. */102todo = rounddown(MIN(resid, MIN(inlen, outlen)),103CHACHA_BLK_SIZE);104105#ifdef __LP64__106/* ChaCha20_ctr32() assumes length is <= 4GB. */107todo = (uint32_t)todo;108#endif109110/* Truncate if the 32-bit counter would roll over. */111next_counter = counter[0] + todo / CHACHA_BLK_SIZE;112if (next_counter < counter[0]) {113todo -= next_counter * CHACHA_BLK_SIZE;114next_counter = 0;115}116117ChaCha20_ctr32(out, in, todo, key, counter);118119counter[0] = next_counter;120if (counter[0] == 0)121counter[1]++;122123if (out == block) {124crypto_cursor_copyback(&cc_out, CHACHA_BLK_SIZE, block);125outseg = crypto_cursor_segment(&cc_out, &outlen);126} else {127crypto_cursor_advance(&cc_out, todo);128outseg += todo;129outlen -= todo;130}131if (in == block) {132inseg = crypto_cursor_segment(&cc_in, &inlen);133} else {134crypto_cursor_advance(&cc_in, todo);135inseg += todo;136inlen -= todo;137}138resid -= todo;139}140141if (resid > 0) {142memset(block, 0, sizeof(block));143crypto_cursor_copydata(&cc_in, resid, block);144ChaCha20_ctr32(block, block, CHACHA_BLK_SIZE, key, counter);145crypto_cursor_copyback(&cc_out, resid, block);146}147148explicit_bzero(block, sizeof(block));149explicit_bzero(counter, sizeof(counter));150explicit_bzero(key, sizeof(key));151return (0);152}153154int155ossl_chacha20_poly1305_encrypt(struct cryptop *crp,156const struct crypto_session_params *csp)157{158_Alignas(8) unsigned int key[CHACHA_KEY_SIZE / 4];159unsigned int counter[CHACHA_CTR_SIZE / 4];160_Alignas(8) unsigned char block[CHACHA_BLK_SIZE];161unsigned char tag[POLY1305_HASH_LEN];162POLY1305 auth_ctx;163struct crypto_buffer_cursor cc_in, cc_out;164const unsigned char *in, *inseg, *cipher_key;165unsigned char *out, *outseg;166size_t resid, todo, inlen, outlen;167uint32_t next_counter;168u_int i;169170if (crp->crp_cipher_key != NULL)171cipher_key = crp->crp_cipher_key;172else173cipher_key = csp->csp_cipher_key;174for (i = 0; i < nitems(key); i++)175key[i] = CHACHA_U8TOU32(cipher_key + i * 4);176177memset(counter, 0, sizeof(counter));178crypto_read_iv(crp, counter + (CHACHA_CTR_SIZE - csp->csp_ivlen) / 4);179for (i = 1; i < nitems(counter); i++)180counter[i] = le32toh(counter[i]);181182/* Block 0 is used to generate the poly1305 key. */183counter[0] = 0;184185memset(block, 0, sizeof(block));186ChaCha20_ctr32(block, block, sizeof(block), key, counter);187Poly1305_Init(&auth_ctx, block);188189/* MAC the AAD. */190if (crp->crp_aad != NULL)191Poly1305_Update(&auth_ctx, crp->crp_aad, crp->crp_aad_length);192else193crypto_apply(crp, crp->crp_aad_start, crp->crp_aad_length,194ossl_poly1305_update, &auth_ctx);195if (crp->crp_aad_length % 16 != 0) {196/* padding1 */197memset(block, 0, 16);198Poly1305_Update(&auth_ctx, block,19916 - crp->crp_aad_length % 16);200}201202/* Encryption starts with block 1. */203counter[0] = 1;204205/* Do encryption with MAC */206resid = crp->crp_payload_length;207crypto_cursor_init(&cc_in, &crp->crp_buf);208crypto_cursor_advance(&cc_in, crp->crp_payload_start);209inseg = crypto_cursor_segment(&cc_in, &inlen);210if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) {211crypto_cursor_init(&cc_out, &crp->crp_obuf);212crypto_cursor_advance(&cc_out, crp->crp_payload_output_start);213} else214cc_out = cc_in;215outseg = crypto_cursor_segment(&cc_out, &outlen);216while (resid >= CHACHA_BLK_SIZE) {217if (inlen < CHACHA_BLK_SIZE) {218crypto_cursor_copydata(&cc_in, CHACHA_BLK_SIZE, block);219in = block;220inlen = CHACHA_BLK_SIZE;221} else222in = inseg;223if (outlen < CHACHA_BLK_SIZE) {224out = block;225outlen = CHACHA_BLK_SIZE;226} else227out = outseg;228229/* Figure out how many blocks we can encrypt/decrypt at once. */230todo = rounddown(MIN(resid, MIN(inlen, outlen)),231CHACHA_BLK_SIZE);232233#ifdef __LP64__234/* ChaCha20_ctr32() assumes length is <= 4GB. */235todo = (uint32_t)todo;236#endif237238/* Truncate if the 32-bit counter would roll over. */239next_counter = counter[0] + todo / CHACHA_BLK_SIZE;240if (csp->csp_ivlen == 8 && next_counter < counter[0]) {241todo -= next_counter * CHACHA_BLK_SIZE;242next_counter = 0;243}244245ChaCha20_ctr32(out, in, todo, key, counter);246Poly1305_Update(&auth_ctx, out, todo);247248counter[0] = next_counter;249if (csp->csp_ivlen == 8 && counter[0] == 0)250counter[1]++;251252if (out == block) {253crypto_cursor_copyback(&cc_out, CHACHA_BLK_SIZE, block);254outseg = crypto_cursor_segment(&cc_out, &outlen);255} else {256crypto_cursor_advance(&cc_out, todo);257outseg += todo;258outlen -= todo;259}260if (in == block) {261inseg = crypto_cursor_segment(&cc_in, &inlen);262} else {263crypto_cursor_advance(&cc_in, todo);264inseg += todo;265inlen -= todo;266}267resid -= todo;268}269270if (resid > 0) {271memset(block, 0, sizeof(block));272crypto_cursor_copydata(&cc_in, resid, block);273ChaCha20_ctr32(block, block, CHACHA_BLK_SIZE, key, counter);274crypto_cursor_copyback(&cc_out, resid, block);275276/* padding2 */277todo = roundup2(resid, 16);278memset(block + resid, 0, todo - resid);279Poly1305_Update(&auth_ctx, block, todo);280}281282/* lengths */283le64enc(block, crp->crp_aad_length);284le64enc(block + 8, crp->crp_payload_length);285Poly1305_Update(&auth_ctx, block, sizeof(uint64_t) * 2);286287Poly1305_Final(&auth_ctx, tag);288crypto_copyback(crp, crp->crp_digest_start, csp->csp_auth_mlen == 0 ?289POLY1305_HASH_LEN : csp->csp_auth_mlen, tag);290291explicit_bzero(&auth_ctx, sizeof(auth_ctx));292explicit_bzero(tag, sizeof(tag));293explicit_bzero(block, sizeof(block));294explicit_bzero(counter, sizeof(counter));295explicit_bzero(key, sizeof(key));296return (0);297}298299300int301ossl_chacha20_poly1305_decrypt(struct cryptop *crp,302const struct crypto_session_params *csp)303{304_Alignas(8) unsigned int key[CHACHA_KEY_SIZE / 4];305unsigned int counter[CHACHA_CTR_SIZE / 4];306_Alignas(8) unsigned char block[CHACHA_BLK_SIZE];307unsigned char tag[POLY1305_HASH_LEN], tag2[POLY1305_HASH_LEN];308struct poly1305_context auth_ctx;309struct crypto_buffer_cursor cc_in, cc_out;310const unsigned char *in, *inseg, *cipher_key;311unsigned char *out, *outseg;312size_t resid, todo, inlen, outlen;313uint32_t next_counter;314int error;315u_int i, mlen;316317if (crp->crp_cipher_key != NULL)318cipher_key = crp->crp_cipher_key;319else320cipher_key = csp->csp_cipher_key;321for (i = 0; i < nitems(key); i++)322key[i] = CHACHA_U8TOU32(cipher_key + i * 4);323324memset(counter, 0, sizeof(counter));325crypto_read_iv(crp, counter + (CHACHA_CTR_SIZE - csp->csp_ivlen) / 4);326for (i = 1; i < nitems(counter); i++)327counter[i] = le32toh(counter[i]);328329/* Block 0 is used to generate the poly1305 key. */330counter[0] = 0;331332memset(block, 0, sizeof(block));333ChaCha20_ctr32(block, block, sizeof(block), key, counter);334Poly1305_Init(&auth_ctx, block);335336/* MAC the AAD. */337if (crp->crp_aad != NULL)338Poly1305_Update(&auth_ctx, crp->crp_aad, crp->crp_aad_length);339else340crypto_apply(crp, crp->crp_aad_start, crp->crp_aad_length,341ossl_poly1305_update, &auth_ctx);342if (crp->crp_aad_length % 16 != 0) {343/* padding1 */344memset(block, 0, 16);345Poly1305_Update(&auth_ctx, block,34616 - crp->crp_aad_length % 16);347}348349/* Mac the ciphertext. */350crypto_apply(crp, crp->crp_payload_start, crp->crp_payload_length,351ossl_poly1305_update, &auth_ctx);352if (crp->crp_payload_length % 16 != 0) {353/* padding2 */354memset(block, 0, 16);355Poly1305_Update(&auth_ctx, block,35616 - crp->crp_payload_length % 16);357}358359/* lengths */360le64enc(block, crp->crp_aad_length);361le64enc(block + 8, crp->crp_payload_length);362Poly1305_Update(&auth_ctx, block, sizeof(uint64_t) * 2);363364Poly1305_Final(&auth_ctx, tag);365mlen = csp->csp_auth_mlen == 0 ? POLY1305_HASH_LEN : csp->csp_auth_mlen;366crypto_copydata(crp, crp->crp_digest_start, mlen, tag2);367if (timingsafe_bcmp(tag, tag2, mlen) != 0) {368error = EBADMSG;369goto out;370}371372/* Decryption starts with block 1. */373counter[0] = 1;374375resid = crp->crp_payload_length;376crypto_cursor_init(&cc_in, &crp->crp_buf);377crypto_cursor_advance(&cc_in, crp->crp_payload_start);378inseg = crypto_cursor_segment(&cc_in, &inlen);379if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) {380crypto_cursor_init(&cc_out, &crp->crp_obuf);381crypto_cursor_advance(&cc_out, crp->crp_payload_output_start);382} else383cc_out = cc_in;384outseg = crypto_cursor_segment(&cc_out, &outlen);385while (resid >= CHACHA_BLK_SIZE) {386if (inlen < CHACHA_BLK_SIZE) {387crypto_cursor_copydata(&cc_in, CHACHA_BLK_SIZE, block);388in = block;389inlen = CHACHA_BLK_SIZE;390} else391in = inseg;392if (outlen < CHACHA_BLK_SIZE) {393out = block;394outlen = CHACHA_BLK_SIZE;395} else396out = outseg;397398/* Figure out how many blocks we can encrypt/decrypt at once. */399todo = rounddown(MIN(resid, MIN(inlen, outlen)),400CHACHA_BLK_SIZE);401402#ifdef __LP64__403/* ChaCha20_ctr32() assumes length is <= 4GB. */404todo = (uint32_t)todo;405#endif406407/* Truncate if the 32-bit counter would roll over. */408next_counter = counter[0] + todo / CHACHA_BLK_SIZE;409if (csp->csp_ivlen == 8 && next_counter < counter[0]) {410todo -= next_counter * CHACHA_BLK_SIZE;411next_counter = 0;412}413414ChaCha20_ctr32(out, in, todo, key, counter);415416counter[0] = next_counter;417if (csp->csp_ivlen == 8 && counter[0] == 0)418counter[1]++;419420if (out == block) {421crypto_cursor_copyback(&cc_out, CHACHA_BLK_SIZE, block);422outseg = crypto_cursor_segment(&cc_out, &outlen);423} else {424crypto_cursor_advance(&cc_out, todo);425outseg += todo;426outlen -= todo;427}428if (in == block) {429inseg = crypto_cursor_segment(&cc_in, &inlen);430} else {431crypto_cursor_advance(&cc_in, todo);432inseg += todo;433inlen -= todo;434}435resid -= todo;436}437438if (resid > 0) {439memset(block, 0, sizeof(block));440crypto_cursor_copydata(&cc_in, resid, block);441ChaCha20_ctr32(block, block, CHACHA_BLK_SIZE, key, counter);442crypto_cursor_copyback(&cc_out, resid, block);443}444445error = 0;446out:447explicit_bzero(&auth_ctx, sizeof(auth_ctx));448explicit_bzero(tag, sizeof(tag));449explicit_bzero(block, sizeof(block));450explicit_bzero(counter, sizeof(counter));451explicit_bzero(key, sizeof(key));452return (error);453}454455456