Path: blob/master/thirdparty/libwebp/src/enc/vp8l_enc.c
21817 views
// Copyright 2012 Google Inc. All Rights Reserved.1//2// Use of this source code is governed by a BSD-style license3// that can be found in the COPYING file in the root of the source4// tree. An additional intellectual property rights grant can be found5// in the file PATENTS. All contributing project authors may6// be found in the AUTHORS file in the root of the source tree.7// -----------------------------------------------------------------------------8//9// main entry for the lossless encoder.10//11// Author: Vikas Arora ([email protected])12//1314#include <assert.h>15#include <stdlib.h>16#include <string.h>1718#include "src/dsp/lossless.h"19#include "src/dsp/lossless_common.h"20#include "src/enc/backward_references_enc.h"21#include "src/enc/histogram_enc.h"22#include "src/enc/vp8i_enc.h"23#include "src/enc/vp8li_enc.h"24#include "src/utils/bit_writer_utils.h"25#include "src/utils/huffman_encode_utils.h"26#include "src/utils/palette.h"27#include "src/utils/thread_utils.h"28#include "src/utils/utils.h"29#include "src/webp/encode.h"30#include "src/webp/format_constants.h"31#include "src/webp/types.h"3233// Maximum number of histogram images (sub-blocks).34#define MAX_HUFF_IMAGE_SIZE 260035#define MAX_HUFFMAN_BITS (MIN_HUFFMAN_BITS + (1 << NUM_HUFFMAN_BITS) - 1)36// Empirical value for which it becomes too computationally expensive to37// compute the best predictor image.38#define MAX_PREDICTOR_IMAGE_SIZE (1 << 14)3940// -----------------------------------------------------------------------------41// Palette4243// These five modes are evaluated and their respective entropy is computed.44typedef enum {45kDirect = 0,46kSpatial = 1,47kSubGreen = 2,48kSpatialSubGreen = 3,49kPalette = 4,50kPaletteAndSpatial = 5,51kNumEntropyIx = 652} EntropyIx;5354typedef enum {55kHistoAlpha = 0,56kHistoAlphaPred,57kHistoGreen,58kHistoGreenPred,59kHistoRed,60kHistoRedPred,61kHistoBlue,62kHistoBluePred,63kHistoRedSubGreen,64kHistoRedPredSubGreen,65kHistoBlueSubGreen,66kHistoBluePredSubGreen,67kHistoPalette,68kHistoTotal // Must be last.69} HistoIx;707172#define NUM_BUCKETS 2567374typedef uint32_t HistogramBuckets[NUM_BUCKETS];7576// Keeping track of histograms, indexed by HistoIx.77// Ideally, this would just be a struct with meaningful fields, but the78// calculation of `entropy_comp` uses the index. One refactoring at a time :)79typedef struct {80HistogramBuckets category[kHistoTotal];81} Histograms;8283static void AddSingleSubGreen(uint32_t p,84HistogramBuckets r, HistogramBuckets b) {85const int green = (int)p >> 8; // The upper bits are masked away later.86++r[(((int)p >> 16) - green) & 0xff];87++b[(((int)p >> 0) - green) & 0xff];88}8990static void AddSingle(uint32_t p,91HistogramBuckets a, HistogramBuckets r,92HistogramBuckets g, HistogramBuckets b) {93++a[(p >> 24) & 0xff];94++r[(p >> 16) & 0xff];95++g[(p >> 8) & 0xff];96++b[(p >> 0) & 0xff];97}9899static WEBP_INLINE uint8_t HashPix(uint32_t pix) {100// Note that masking with 0xffffffffu is for preventing an101// 'unsigned int overflow' warning. Doesn't impact the compiled code.102return ((((uint64_t)pix + (pix >> 19)) * 0x39c5fba7ull) & 0xffffffffu) >> 24;103}104105static int AnalyzeEntropy(const uint32_t* argb,106int width, int height, int argb_stride,107int use_palette,108int palette_size, int transform_bits,109EntropyIx* const min_entropy_ix,110int* const red_and_blue_always_zero) {111Histograms* histo;112113if (use_palette && palette_size <= 16) {114// In the case of small palettes, we pack 2, 4 or 8 pixels together. In115// practice, small palettes are better than any other transform.116*min_entropy_ix = kPalette;117*red_and_blue_always_zero = 1;118return 1;119}120121histo = (Histograms*)WebPSafeCalloc(1, sizeof(*histo));122if (histo != NULL) {123int i, x, y;124const uint32_t* prev_row = NULL;125const uint32_t* curr_row = argb;126uint32_t pix_prev = argb[0]; // Skip the first pixel.127for (y = 0; y < height; ++y) {128for (x = 0; x < width; ++x) {129const uint32_t pix = curr_row[x];130const uint32_t pix_diff = VP8LSubPixels(pix, pix_prev);131pix_prev = pix;132if ((pix_diff == 0) || (prev_row != NULL && pix == prev_row[x])) {133continue;134}135AddSingle(pix,136histo->category[kHistoAlpha],137histo->category[kHistoRed],138histo->category[kHistoGreen],139histo->category[kHistoBlue]);140AddSingle(pix_diff,141histo->category[kHistoAlphaPred],142histo->category[kHistoRedPred],143histo->category[kHistoGreenPred],144histo->category[kHistoBluePred]);145AddSingleSubGreen(pix,146histo->category[kHistoRedSubGreen],147histo->category[kHistoBlueSubGreen]);148AddSingleSubGreen(pix_diff,149histo->category[kHistoRedPredSubGreen],150histo->category[kHistoBluePredSubGreen]);151{152// Approximate the palette by the entropy of the multiplicative hash.153const uint8_t hash = HashPix(pix);154++histo->category[kHistoPalette][hash];155}156}157prev_row = curr_row;158curr_row += argb_stride;159}160{161uint64_t entropy_comp[kHistoTotal];162uint64_t entropy[kNumEntropyIx];163int k;164int last_mode_to_analyze = use_palette ? kPalette : kSpatialSubGreen;165int j;166// Let's add one zero to the predicted histograms. The zeros are removed167// too efficiently by the pix_diff == 0 comparison, at least one of the168// zeros is likely to exist.169++histo->category[kHistoRedPredSubGreen][0];170++histo->category[kHistoBluePredSubGreen][0];171++histo->category[kHistoRedPred][0];172++histo->category[kHistoGreenPred][0];173++histo->category[kHistoBluePred][0];174++histo->category[kHistoAlphaPred][0];175176for (j = 0; j < kHistoTotal; ++j) {177entropy_comp[j] = VP8LBitsEntropy(histo->category[j], NUM_BUCKETS);178}179entropy[kDirect] = entropy_comp[kHistoAlpha] +180entropy_comp[kHistoRed] +181entropy_comp[kHistoGreen] +182entropy_comp[kHistoBlue];183entropy[kSpatial] = entropy_comp[kHistoAlphaPred] +184entropy_comp[kHistoRedPred] +185entropy_comp[kHistoGreenPred] +186entropy_comp[kHistoBluePred];187entropy[kSubGreen] = entropy_comp[kHistoAlpha] +188entropy_comp[kHistoRedSubGreen] +189entropy_comp[kHistoGreen] +190entropy_comp[kHistoBlueSubGreen];191entropy[kSpatialSubGreen] = entropy_comp[kHistoAlphaPred] +192entropy_comp[kHistoRedPredSubGreen] +193entropy_comp[kHistoGreenPred] +194entropy_comp[kHistoBluePredSubGreen];195entropy[kPalette] = entropy_comp[kHistoPalette];196197// When including transforms, there is an overhead in bits from198// storing them. This overhead is small but matters for small images.199// For spatial, there are 14 transformations.200entropy[kSpatial] += (uint64_t)VP8LSubSampleSize(width, transform_bits) *201VP8LSubSampleSize(height, transform_bits) *202VP8LFastLog2(14);203// For color transforms: 24 as only 3 channels are considered in a204// ColorTransformElement.205entropy[kSpatialSubGreen] +=206(uint64_t)VP8LSubSampleSize(width, transform_bits) *207VP8LSubSampleSize(height, transform_bits) * VP8LFastLog2(24);208// For palettes, add the cost of storing the palette.209// We empirically estimate the cost of a compressed entry as 8 bits.210// The palette is differential-coded when compressed hence a much211// lower cost than sizeof(uint32_t)*8.212entropy[kPalette] += (palette_size * 8ull) << LOG_2_PRECISION_BITS;213214*min_entropy_ix = kDirect;215for (k = kDirect + 1; k <= last_mode_to_analyze; ++k) {216if (entropy[*min_entropy_ix] > entropy[k]) {217*min_entropy_ix = (EntropyIx)k;218}219}220assert((int)*min_entropy_ix <= last_mode_to_analyze);221*red_and_blue_always_zero = 1;222// Let's check if the histogram of the chosen entropy mode has223// non-zero red and blue values. If all are zero, we can later skip224// the cross color optimization.225{226static const uint8_t kHistoPairs[5][2] = {227{ kHistoRed, kHistoBlue },228{ kHistoRedPred, kHistoBluePred },229{ kHistoRedSubGreen, kHistoBlueSubGreen },230{ kHistoRedPredSubGreen, kHistoBluePredSubGreen },231{ kHistoRed, kHistoBlue }232};233const HistogramBuckets* const red_histo =234&histo->category[kHistoPairs[*min_entropy_ix][0]];235const HistogramBuckets* const blue_histo =236&histo->category[kHistoPairs[*min_entropy_ix][1]];237for (i = 1; i < NUM_BUCKETS; ++i) {238if (((*red_histo)[i] | (*blue_histo)[i]) != 0) {239*red_and_blue_always_zero = 0;240break;241}242}243}244}245WebPSafeFree(histo);246return 1;247} else {248return 0;249}250}251252// Clamp histogram and transform bits.253static int ClampBits(int width, int height, int bits, int min_bits,254int max_bits, int image_size_max) {255int image_size;256bits = (bits < min_bits) ? min_bits : (bits > max_bits) ? max_bits : bits;257image_size = VP8LSubSampleSize(width, bits) * VP8LSubSampleSize(height, bits);258while (bits < max_bits && image_size > image_size_max) {259++bits;260image_size =261VP8LSubSampleSize(width, bits) * VP8LSubSampleSize(height, bits);262}263// In case the bits reduce the image too much, choose the smallest value264// setting the histogram image size to 1.265while (bits > min_bits && image_size == 1) {266image_size = VP8LSubSampleSize(width, bits - 1) *267VP8LSubSampleSize(height, bits - 1);268if (image_size != 1) break;269--bits;270}271return bits;272}273274static int GetHistoBits(int method, int use_palette, int width, int height) {275// Make tile size a function of encoding method (Range: 0 to 6).276const int histo_bits = (use_palette ? 9 : 7) - method;277return ClampBits(width, height, histo_bits, MIN_HUFFMAN_BITS,278MAX_HUFFMAN_BITS, MAX_HUFF_IMAGE_SIZE);279}280281static int GetTransformBits(int method, int histo_bits) {282const int max_transform_bits = (method < 4) ? 6 : (method > 4) ? 4 : 5;283const int res =284(histo_bits > max_transform_bits) ? max_transform_bits : histo_bits;285assert(res <= MAX_TRANSFORM_BITS);286return res;287}288289// Set of parameters to be used in each iteration of the cruncher.290#define CRUNCH_SUBCONFIGS_MAX 2291typedef struct {292int lz77;293int do_no_cache;294} CrunchSubConfig;295typedef struct {296int entropy_idx;297PaletteSorting palette_sorting_type;298CrunchSubConfig sub_configs[CRUNCH_SUBCONFIGS_MAX];299int sub_configs_size;300} CrunchConfig;301302// +2 because we add a palette sorting configuration for kPalette and303// kPaletteAndSpatial.304#define CRUNCH_CONFIGS_MAX (kNumEntropyIx + 2 * kPaletteSortingNum)305306static int EncoderAnalyze(VP8LEncoder* const enc,307CrunchConfig crunch_configs[CRUNCH_CONFIGS_MAX],308int* const crunch_configs_size,309int* const red_and_blue_always_zero) {310const WebPPicture* const pic = enc->pic;311const int width = pic->width;312const int height = pic->height;313const WebPConfig* const config = enc->config;314const int method = config->method;315const int low_effort = (config->method == 0);316int i;317int use_palette, transform_bits;318int n_lz77s;319// If set to 0, analyze the cache with the computed cache value. If 1, also320// analyze with no-cache.321int do_no_cache = 0;322assert(pic != NULL && pic->argb != NULL);323324// Check whether a palette is possible.325enc->palette_size = GetColorPalette(pic, enc->palette_sorted);326use_palette = (enc->palette_size <= MAX_PALETTE_SIZE);327if (!use_palette) {328enc->palette_size = 0;329}330331// Empirical bit sizes.332enc->histo_bits = GetHistoBits(method, use_palette,333pic->width, pic->height);334transform_bits = GetTransformBits(method, enc->histo_bits);335enc->predictor_transform_bits = transform_bits;336enc->cross_color_transform_bits = transform_bits;337338if (low_effort) {339// AnalyzeEntropy is somewhat slow.340crunch_configs[0].entropy_idx = use_palette ? kPalette : kSpatialSubGreen;341crunch_configs[0].palette_sorting_type =342use_palette ? kSortedDefault : kUnusedPalette;343n_lz77s = 1;344*crunch_configs_size = 1;345} else {346EntropyIx min_entropy_ix;347// Try out multiple LZ77 on images with few colors.348n_lz77s = (enc->palette_size > 0 && enc->palette_size <= 16) ? 2 : 1;349if (!AnalyzeEntropy(pic->argb, width, height, pic->argb_stride, use_palette,350enc->palette_size, transform_bits, &min_entropy_ix,351red_and_blue_always_zero)) {352return 0;353}354if (method == 6 && config->quality == 100) {355do_no_cache = 1;356// Go brute force on all transforms.357*crunch_configs_size = 0;358for (i = 0; i < kNumEntropyIx; ++i) {359// We can only apply kPalette or kPaletteAndSpatial if we can indeed use360// a palette.361if ((i != kPalette && i != kPaletteAndSpatial) || use_palette) {362assert(*crunch_configs_size < CRUNCH_CONFIGS_MAX);363if (use_palette && (i == kPalette || i == kPaletteAndSpatial)) {364int sorting_method;365for (sorting_method = 0; sorting_method < kPaletteSortingNum;366++sorting_method) {367const PaletteSorting typed_sorting_method =368(PaletteSorting)sorting_method;369// TODO(vrabaud) kSortedDefault should be tested. It is omitted370// for now for backward compatibility.371if (typed_sorting_method == kUnusedPalette ||372typed_sorting_method == kSortedDefault) {373continue;374}375crunch_configs[(*crunch_configs_size)].entropy_idx = i;376crunch_configs[(*crunch_configs_size)].palette_sorting_type =377typed_sorting_method;378++*crunch_configs_size;379}380} else {381crunch_configs[(*crunch_configs_size)].entropy_idx = i;382crunch_configs[(*crunch_configs_size)].palette_sorting_type =383kUnusedPalette;384++*crunch_configs_size;385}386}387}388} else {389// Only choose the guessed best transform.390*crunch_configs_size = 1;391crunch_configs[0].entropy_idx = min_entropy_ix;392crunch_configs[0].palette_sorting_type =393use_palette ? kMinimizeDelta : kUnusedPalette;394if (config->quality >= 75 && method == 5) {395// Test with and without color cache.396do_no_cache = 1;397// If we have a palette, also check in combination with spatial.398if (min_entropy_ix == kPalette) {399*crunch_configs_size = 2;400crunch_configs[1].entropy_idx = kPaletteAndSpatial;401crunch_configs[1].palette_sorting_type = kMinimizeDelta;402}403}404}405}406// Fill in the different LZ77s.407assert(n_lz77s <= CRUNCH_SUBCONFIGS_MAX);408for (i = 0; i < *crunch_configs_size; ++i) {409int j;410for (j = 0; j < n_lz77s; ++j) {411assert(j < CRUNCH_SUBCONFIGS_MAX);412crunch_configs[i].sub_configs[j].lz77 =413(j == 0) ? kLZ77Standard | kLZ77RLE : kLZ77Box;414crunch_configs[i].sub_configs[j].do_no_cache = do_no_cache;415}416crunch_configs[i].sub_configs_size = n_lz77s;417}418return 1;419}420421static int EncoderInit(VP8LEncoder* const enc) {422const WebPPicture* const pic = enc->pic;423const int width = pic->width;424const int height = pic->height;425const int pix_cnt = width * height;426// we round the block size up, so we're guaranteed to have427// at most MAX_REFS_BLOCK_PER_IMAGE blocks used:428const int refs_block_size = (pix_cnt - 1) / MAX_REFS_BLOCK_PER_IMAGE + 1;429int i;430if (!VP8LHashChainInit(&enc->hash_chain, pix_cnt)) return 0;431432for (i = 0; i < 4; ++i) VP8LBackwardRefsInit(&enc->refs[i], refs_block_size);433434return 1;435}436437// Returns false in case of memory error.438static int GetHuffBitLengthsAndCodes(439const VP8LHistogramSet* const histogram_image,440HuffmanTreeCode* const huffman_codes) {441int i, k;442int ok = 0;443uint64_t total_length_size = 0;444uint8_t* mem_buf = NULL;445const int histogram_image_size = histogram_image->size;446int max_num_symbols = 0;447uint8_t* buf_rle = NULL;448HuffmanTree* huff_tree = NULL;449450// Iterate over all histograms and get the aggregate number of codes used.451for (i = 0; i < histogram_image_size; ++i) {452const VP8LHistogram* const histo = histogram_image->histograms[i];453HuffmanTreeCode* const codes = &huffman_codes[5 * i];454assert(histo != NULL);455for (k = 0; k < 5; ++k) {456const int num_symbols =457(k == 0) ? VP8LHistogramNumCodes(histo->palette_code_bits) :458(k == 4) ? NUM_DISTANCE_CODES : 256;459codes[k].num_symbols = num_symbols;460total_length_size += num_symbols;461}462}463464// Allocate and Set Huffman codes.465{466uint16_t* codes;467uint8_t* lengths;468mem_buf = (uint8_t*)WebPSafeCalloc(total_length_size,469sizeof(*lengths) + sizeof(*codes));470if (mem_buf == NULL) goto End;471472codes = (uint16_t*)mem_buf;473lengths = (uint8_t*)&codes[total_length_size];474for (i = 0; i < 5 * histogram_image_size; ++i) {475const int bit_length = huffman_codes[i].num_symbols;476huffman_codes[i].codes = codes;477huffman_codes[i].code_lengths = lengths;478codes += bit_length;479lengths += bit_length;480if (max_num_symbols < bit_length) {481max_num_symbols = bit_length;482}483}484}485486buf_rle = (uint8_t*)WebPSafeMalloc(1ULL, max_num_symbols);487huff_tree = (HuffmanTree*)WebPSafeMalloc(3ULL * max_num_symbols,488sizeof(*huff_tree));489if (buf_rle == NULL || huff_tree == NULL) goto End;490491// Create Huffman trees.492for (i = 0; i < histogram_image_size; ++i) {493HuffmanTreeCode* const codes = &huffman_codes[5 * i];494VP8LHistogram* const histo = histogram_image->histograms[i];495VP8LCreateHuffmanTree(histo->literal, 15, buf_rle, huff_tree, codes + 0);496VP8LCreateHuffmanTree(histo->red, 15, buf_rle, huff_tree, codes + 1);497VP8LCreateHuffmanTree(histo->blue, 15, buf_rle, huff_tree, codes + 2);498VP8LCreateHuffmanTree(histo->alpha, 15, buf_rle, huff_tree, codes + 3);499VP8LCreateHuffmanTree(histo->distance, 15, buf_rle, huff_tree, codes + 4);500}501ok = 1;502End:503WebPSafeFree(huff_tree);504WebPSafeFree(buf_rle);505if (!ok) {506WebPSafeFree(mem_buf);507memset(huffman_codes, 0, 5 * histogram_image_size * sizeof(*huffman_codes));508}509return ok;510}511512static void StoreHuffmanTreeOfHuffmanTreeToBitMask(513VP8LBitWriter* const bw, const uint8_t* code_length_bitdepth) {514// RFC 1951 will calm you down if you are worried about this funny sequence.515// This sequence is tuned from that, but more weighted for lower symbol count,516// and more spiking histograms.517static const uint8_t kStorageOrder[CODE_LENGTH_CODES] = {51817, 18, 0, 1, 2, 3, 4, 5, 16, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15519};520int i;521// Throw away trailing zeros:522int codes_to_store = CODE_LENGTH_CODES;523for (; codes_to_store > 4; --codes_to_store) {524if (code_length_bitdepth[kStorageOrder[codes_to_store - 1]] != 0) {525break;526}527}528VP8LPutBits(bw, codes_to_store - 4, 4);529for (i = 0; i < codes_to_store; ++i) {530VP8LPutBits(bw, code_length_bitdepth[kStorageOrder[i]], 3);531}532}533534static void ClearHuffmanTreeIfOnlyOneSymbol(535HuffmanTreeCode* const huffman_code) {536int k;537int count = 0;538for (k = 0; k < huffman_code->num_symbols; ++k) {539if (huffman_code->code_lengths[k] != 0) {540++count;541if (count > 1) return;542}543}544for (k = 0; k < huffman_code->num_symbols; ++k) {545huffman_code->code_lengths[k] = 0;546huffman_code->codes[k] = 0;547}548}549550static void StoreHuffmanTreeToBitMask(551VP8LBitWriter* const bw,552const HuffmanTreeToken* const tokens, const int num_tokens,553const HuffmanTreeCode* const huffman_code) {554int i;555for (i = 0; i < num_tokens; ++i) {556const int ix = tokens[i].code;557const int extra_bits = tokens[i].extra_bits;558VP8LPutBits(bw, huffman_code->codes[ix], huffman_code->code_lengths[ix]);559switch (ix) {560case 16:561VP8LPutBits(bw, extra_bits, 2);562break;563case 17:564VP8LPutBits(bw, extra_bits, 3);565break;566case 18:567VP8LPutBits(bw, extra_bits, 7);568break;569}570}571}572573// 'huff_tree' and 'tokens' are pre-alloacted buffers.574static void StoreFullHuffmanCode(VP8LBitWriter* const bw,575HuffmanTree* const huff_tree,576HuffmanTreeToken* const tokens,577const HuffmanTreeCode* const tree) {578uint8_t code_length_bitdepth[CODE_LENGTH_CODES] = { 0 };579uint16_t code_length_bitdepth_symbols[CODE_LENGTH_CODES] = { 0 };580const int max_tokens = tree->num_symbols;581int num_tokens;582HuffmanTreeCode huffman_code;583huffman_code.num_symbols = CODE_LENGTH_CODES;584huffman_code.code_lengths = code_length_bitdepth;585huffman_code.codes = code_length_bitdepth_symbols;586587VP8LPutBits(bw, 0, 1);588num_tokens = VP8LCreateCompressedHuffmanTree(tree, tokens, max_tokens);589{590uint32_t histogram[CODE_LENGTH_CODES] = { 0 };591uint8_t buf_rle[CODE_LENGTH_CODES] = { 0 };592int i;593for (i = 0; i < num_tokens; ++i) {594++histogram[tokens[i].code];595}596597VP8LCreateHuffmanTree(histogram, 7, buf_rle, huff_tree, &huffman_code);598}599600StoreHuffmanTreeOfHuffmanTreeToBitMask(bw, code_length_bitdepth);601ClearHuffmanTreeIfOnlyOneSymbol(&huffman_code);602{603int trailing_zero_bits = 0;604int trimmed_length = num_tokens;605int write_trimmed_length;606int length;607int i = num_tokens;608while (i-- > 0) {609const int ix = tokens[i].code;610if (ix == 0 || ix == 17 || ix == 18) {611--trimmed_length; // discount trailing zeros612trailing_zero_bits += code_length_bitdepth[ix];613if (ix == 17) {614trailing_zero_bits += 3;615} else if (ix == 18) {616trailing_zero_bits += 7;617}618} else {619break;620}621}622write_trimmed_length = (trimmed_length > 1 && trailing_zero_bits > 12);623length = write_trimmed_length ? trimmed_length : num_tokens;624VP8LPutBits(bw, write_trimmed_length, 1);625if (write_trimmed_length) {626if (trimmed_length == 2) {627VP8LPutBits(bw, 0, 3 + 2); // nbitpairs=1, trimmed_length=2628} else {629const int nbits = BitsLog2Floor(trimmed_length - 2);630const int nbitpairs = nbits / 2 + 1;631assert(trimmed_length > 2);632assert(nbitpairs - 1 < 8);633VP8LPutBits(bw, nbitpairs - 1, 3);634VP8LPutBits(bw, trimmed_length - 2, nbitpairs * 2);635}636}637StoreHuffmanTreeToBitMask(bw, tokens, length, &huffman_code);638}639}640641// 'huff_tree' and 'tokens' are pre-alloacted buffers.642static void StoreHuffmanCode(VP8LBitWriter* const bw,643HuffmanTree* const huff_tree,644HuffmanTreeToken* const tokens,645const HuffmanTreeCode* const huffman_code) {646int i;647int count = 0;648int symbols[2] = { 0, 0 };649const int kMaxBits = 8;650const int kMaxSymbol = 1 << kMaxBits;651652// Check whether it's a small tree.653for (i = 0; i < huffman_code->num_symbols && count < 3; ++i) {654if (huffman_code->code_lengths[i] != 0) {655if (count < 2) symbols[count] = i;656++count;657}658}659660if (count == 0) { // emit minimal tree for empty cases661// bits: small tree marker: 1, count-1: 0, large 8-bit code: 0, code: 0662VP8LPutBits(bw, 0x01, 4);663} else if (count <= 2 && symbols[0] < kMaxSymbol && symbols[1] < kMaxSymbol) {664VP8LPutBits(bw, 1, 1); // Small tree marker to encode 1 or 2 symbols.665VP8LPutBits(bw, count - 1, 1);666if (symbols[0] <= 1) {667VP8LPutBits(bw, 0, 1); // Code bit for small (1 bit) symbol value.668VP8LPutBits(bw, symbols[0], 1);669} else {670VP8LPutBits(bw, 1, 1);671VP8LPutBits(bw, symbols[0], 8);672}673if (count == 2) {674VP8LPutBits(bw, symbols[1], 8);675}676} else {677StoreFullHuffmanCode(bw, huff_tree, tokens, huffman_code);678}679}680681static WEBP_INLINE void WriteHuffmanCode(VP8LBitWriter* const bw,682const HuffmanTreeCode* const code,683int code_index) {684const int depth = code->code_lengths[code_index];685const int symbol = code->codes[code_index];686VP8LPutBits(bw, symbol, depth);687}688689static WEBP_INLINE void WriteHuffmanCodeWithExtraBits(690VP8LBitWriter* const bw,691const HuffmanTreeCode* const code,692int code_index,693int bits,694int n_bits) {695const int depth = code->code_lengths[code_index];696const int symbol = code->codes[code_index];697VP8LPutBits(bw, (bits << depth) | symbol, depth + n_bits);698}699700static int StoreImageToBitMask(VP8LBitWriter* const bw, int width,701int histo_bits,702const VP8LBackwardRefs* const refs,703const uint32_t* histogram_symbols,704const HuffmanTreeCode* const huffman_codes,705const WebPPicture* const pic) {706const int histo_xsize = histo_bits ? VP8LSubSampleSize(width, histo_bits) : 1;707const int tile_mask = (histo_bits == 0) ? 0 : -(1 << histo_bits);708// x and y trace the position in the image.709int x = 0;710int y = 0;711int tile_x = x & tile_mask;712int tile_y = y & tile_mask;713int histogram_ix = (histogram_symbols[0] >> 8) & 0xffff;714const HuffmanTreeCode* codes = huffman_codes + 5 * histogram_ix;715VP8LRefsCursor c = VP8LRefsCursorInit(refs);716while (VP8LRefsCursorOk(&c)) {717const PixOrCopy* const v = c.cur_pos;718if ((tile_x != (x & tile_mask)) || (tile_y != (y & tile_mask))) {719tile_x = x & tile_mask;720tile_y = y & tile_mask;721histogram_ix = (histogram_symbols[(y >> histo_bits) * histo_xsize +722(x >> histo_bits)] >>7238) &7240xffff;725codes = huffman_codes + 5 * histogram_ix;726}727if (PixOrCopyIsLiteral(v)) {728static const uint8_t order[] = { 1, 2, 0, 3 };729int k;730for (k = 0; k < 4; ++k) {731const int code = PixOrCopyLiteral(v, order[k]);732WriteHuffmanCode(bw, codes + k, code);733}734} else if (PixOrCopyIsCacheIdx(v)) {735const int code = PixOrCopyCacheIdx(v);736const int literal_ix = 256 + NUM_LENGTH_CODES + code;737WriteHuffmanCode(bw, codes, literal_ix);738} else {739int bits, n_bits;740int code;741742const int distance = PixOrCopyDistance(v);743VP8LPrefixEncode(v->len, &code, &n_bits, &bits);744WriteHuffmanCodeWithExtraBits(bw, codes, 256 + code, bits, n_bits);745746// Don't write the distance with the extra bits code since747// the distance can be up to 18 bits of extra bits, and the prefix748// 15 bits, totaling to 33, and our PutBits only supports up to 32 bits.749VP8LPrefixEncode(distance, &code, &n_bits, &bits);750WriteHuffmanCode(bw, codes + 4, code);751VP8LPutBits(bw, bits, n_bits);752}753x += PixOrCopyLength(v);754while (x >= width) {755x -= width;756++y;757}758VP8LRefsCursorNext(&c);759}760if (bw->error) {761return WebPEncodingSetError(pic, VP8_ENC_ERROR_OUT_OF_MEMORY);762}763return 1;764}765766// Special case of EncodeImageInternal() for cache-bits=0, histo_bits=31.767// pic and percent are for progress.768static int EncodeImageNoHuffman(VP8LBitWriter* const bw,769const uint32_t* const argb,770VP8LHashChain* const hash_chain,771VP8LBackwardRefs* const refs_array, int width,772int height, int quality, int low_effort,773const WebPPicture* const pic, int percent_range,774int* const percent) {775int i;776int max_tokens = 0;777VP8LBackwardRefs* refs;778HuffmanTreeToken* tokens = NULL;779HuffmanTreeCode huffman_codes[5] = {{0, NULL, NULL}};780const uint32_t histogram_symbols[1] = {0}; // only one tree, one symbol781int cache_bits = 0;782VP8LHistogramSet* histogram_image = NULL;783HuffmanTree* const huff_tree = (HuffmanTree*)WebPSafeMalloc(7843ULL * CODE_LENGTH_CODES, sizeof(*huff_tree));785if (huff_tree == NULL) {786WebPEncodingSetError(pic, VP8_ENC_ERROR_OUT_OF_MEMORY);787goto Error;788}789790// Calculate backward references from ARGB image.791if (!VP8LHashChainFill(hash_chain, quality, argb, width, height, low_effort,792pic, percent_range / 2, percent)) {793goto Error;794}795if (!VP8LGetBackwardReferences(width, height, argb, quality, /*low_effort=*/0,796kLZ77Standard | kLZ77RLE, cache_bits,797/*do_no_cache=*/0, hash_chain, refs_array,798&cache_bits, pic,799percent_range - percent_range / 2, percent)) {800goto Error;801}802refs = &refs_array[0];803histogram_image = VP8LAllocateHistogramSet(1, cache_bits);804if (histogram_image == NULL) {805WebPEncodingSetError(pic, VP8_ENC_ERROR_OUT_OF_MEMORY);806goto Error;807}808VP8LHistogramSetClear(histogram_image);809810// Build histogram image and symbols from backward references.811VP8LHistogramStoreRefs(refs, /*distance_modifier=*/NULL,812/*distance_modifier_arg0=*/0,813histogram_image->histograms[0]);814815// Create Huffman bit lengths and codes for each histogram image.816assert(histogram_image->size == 1);817if (!GetHuffBitLengthsAndCodes(histogram_image, huffman_codes)) {818WebPEncodingSetError(pic, VP8_ENC_ERROR_OUT_OF_MEMORY);819goto Error;820}821822// No color cache, no Huffman image.823VP8LPutBits(bw, 0, 1);824825// Find maximum number of symbols for the huffman tree-set.826for (i = 0; i < 5; ++i) {827HuffmanTreeCode* const codes = &huffman_codes[i];828if (max_tokens < codes->num_symbols) {829max_tokens = codes->num_symbols;830}831}832833tokens = (HuffmanTreeToken*)WebPSafeMalloc(max_tokens, sizeof(*tokens));834if (tokens == NULL) {835WebPEncodingSetError(pic, VP8_ENC_ERROR_OUT_OF_MEMORY);836goto Error;837}838839// Store Huffman codes.840for (i = 0; i < 5; ++i) {841HuffmanTreeCode* const codes = &huffman_codes[i];842StoreHuffmanCode(bw, huff_tree, tokens, codes);843ClearHuffmanTreeIfOnlyOneSymbol(codes);844}845846// Store actual literals.847if (!StoreImageToBitMask(bw, width, 0, refs, histogram_symbols, huffman_codes,848pic)) {849goto Error;850}851852Error:853WebPSafeFree(tokens);854WebPSafeFree(huff_tree);855VP8LFreeHistogramSet(histogram_image);856WebPSafeFree(huffman_codes[0].codes);857return (pic->error_code == VP8_ENC_OK);858}859860// pic and percent are for progress.861static int EncodeImageInternal(862VP8LBitWriter* const bw, const uint32_t* const argb,863VP8LHashChain* const hash_chain, VP8LBackwardRefs refs_array[4], int width,864int height, int quality, int low_effort, const CrunchConfig* const config,865int* cache_bits, int histogram_bits_in, size_t init_byte_position,866int* const hdr_size, int* const data_size, const WebPPicture* const pic,867int percent_range, int* const percent) {868const uint32_t histogram_image_xysize =869VP8LSubSampleSize(width, histogram_bits_in) *870VP8LSubSampleSize(height, histogram_bits_in);871int remaining_percent = percent_range;872int percent_start = *percent;873VP8LHistogramSet* histogram_image = NULL;874VP8LHistogram* tmp_histo = NULL;875uint32_t i, histogram_image_size = 0;876size_t bit_array_size = 0;877HuffmanTree* const huff_tree = (HuffmanTree*)WebPSafeMalloc(8783ULL * CODE_LENGTH_CODES, sizeof(*huff_tree));879HuffmanTreeToken* tokens = NULL;880HuffmanTreeCode* huffman_codes = NULL;881uint32_t* const histogram_argb = (uint32_t*)WebPSafeMalloc(882histogram_image_xysize, sizeof(*histogram_argb));883int sub_configs_idx;884int cache_bits_init, write_histogram_image;885VP8LBitWriter bw_init = *bw, bw_best;886int hdr_size_tmp;887VP8LHashChain hash_chain_histogram; // histogram image hash chain888size_t bw_size_best = ~(size_t)0;889assert(histogram_bits_in >= MIN_HUFFMAN_BITS);890assert(histogram_bits_in <= MAX_HUFFMAN_BITS);891assert(hdr_size != NULL);892assert(data_size != NULL);893894memset(&hash_chain_histogram, 0, sizeof(hash_chain_histogram));895if (!VP8LBitWriterInit(&bw_best, 0)) {896WebPEncodingSetError(pic, VP8_ENC_ERROR_OUT_OF_MEMORY);897goto Error;898}899900// Make sure we can allocate the different objects.901if (huff_tree == NULL || histogram_argb == NULL ||902!VP8LHashChainInit(&hash_chain_histogram, histogram_image_xysize)) {903WebPEncodingSetError(pic, VP8_ENC_ERROR_OUT_OF_MEMORY);904goto Error;905}906907percent_range = remaining_percent / 5;908if (!VP8LHashChainFill(hash_chain, quality, argb, width, height,909low_effort, pic, percent_range, percent)) {910goto Error;911}912percent_start += percent_range;913remaining_percent -= percent_range;914915// If the value is different from zero, it has been set during the palette916// analysis.917cache_bits_init = (*cache_bits == 0) ? MAX_COLOR_CACHE_BITS : *cache_bits;918// If several iterations will happen, clone into bw_best.919if ((config->sub_configs_size > 1 || config->sub_configs[0].do_no_cache) &&920!VP8LBitWriterClone(bw, &bw_best)) {921WebPEncodingSetError(pic, VP8_ENC_ERROR_OUT_OF_MEMORY);922goto Error;923}924925for (sub_configs_idx = 0; sub_configs_idx < config->sub_configs_size;926++sub_configs_idx) {927const CrunchSubConfig* const sub_config =928&config->sub_configs[sub_configs_idx];929int cache_bits_best, i_cache;930int i_remaining_percent = remaining_percent / config->sub_configs_size;931int i_percent_range = i_remaining_percent / 4;932i_remaining_percent -= i_percent_range;933934if (!VP8LGetBackwardReferences(935width, height, argb, quality, low_effort, sub_config->lz77,936cache_bits_init, sub_config->do_no_cache, hash_chain,937&refs_array[0], &cache_bits_best, pic, i_percent_range, percent)) {938goto Error;939}940941for (i_cache = 0; i_cache < (sub_config->do_no_cache ? 2 : 1); ++i_cache) {942const int cache_bits_tmp = (i_cache == 0) ? cache_bits_best : 0;943int histogram_bits = histogram_bits_in;944// Speed-up: no need to study the no-cache case if it was already studied945// in i_cache == 0.946if (i_cache == 1 && cache_bits_best == 0) break;947948// Reset the bit writer for this iteration.949VP8LBitWriterReset(&bw_init, bw);950951// Build histogram image and symbols from backward references.952histogram_image =953VP8LAllocateHistogramSet(histogram_image_xysize, cache_bits_tmp);954tmp_histo = VP8LAllocateHistogram(cache_bits_tmp);955if (histogram_image == NULL || tmp_histo == NULL) {956WebPEncodingSetError(pic, VP8_ENC_ERROR_OUT_OF_MEMORY);957goto Error;958}959960i_percent_range = i_remaining_percent / 3;961i_remaining_percent -= i_percent_range;962if (!VP8LGetHistoImageSymbols(963width, height, &refs_array[i_cache], quality, low_effort,964histogram_bits, cache_bits_tmp, histogram_image, tmp_histo,965histogram_argb, pic, i_percent_range, percent)) {966goto Error;967}968// Create Huffman bit lengths and codes for each histogram image.969histogram_image_size = histogram_image->size;970bit_array_size = 5 * histogram_image_size;971huffman_codes = (HuffmanTreeCode*)WebPSafeCalloc(bit_array_size,972sizeof(*huffman_codes));973// Note: some histogram_image entries may point to tmp_histos[], so the974// latter need to outlive the following call to975// GetHuffBitLengthsAndCodes().976if (huffman_codes == NULL ||977!GetHuffBitLengthsAndCodes(histogram_image, huffman_codes)) {978WebPEncodingSetError(pic, VP8_ENC_ERROR_OUT_OF_MEMORY);979goto Error;980}981// Free combined histograms.982VP8LFreeHistogramSet(histogram_image);983histogram_image = NULL;984985// Free scratch histograms.986VP8LFreeHistogram(tmp_histo);987tmp_histo = NULL;988989// Color Cache parameters.990if (cache_bits_tmp > 0) {991VP8LPutBits(bw, 1, 1);992VP8LPutBits(bw, cache_bits_tmp, 4);993} else {994VP8LPutBits(bw, 0, 1);995}996997// Huffman image + meta huffman.998histogram_image_size = 0;999for (i = 0; i < histogram_image_xysize; ++i) {1000if (histogram_argb[i] >= histogram_image_size) {1001histogram_image_size = histogram_argb[i] + 1;1002}1003histogram_argb[i] <<= 8;1004}10051006write_histogram_image = (histogram_image_size > 1);1007VP8LPutBits(bw, write_histogram_image, 1);1008if (write_histogram_image) {1009VP8LOptimizeSampling(histogram_argb, width, height, histogram_bits_in,1010MAX_HUFFMAN_BITS, &histogram_bits);1011VP8LPutBits(bw, histogram_bits - 2, 3);1012i_percent_range = i_remaining_percent / 2;1013i_remaining_percent -= i_percent_range;1014if (!EncodeImageNoHuffman(1015bw, histogram_argb, &hash_chain_histogram, &refs_array[2],1016VP8LSubSampleSize(width, histogram_bits),1017VP8LSubSampleSize(height, histogram_bits), quality, low_effort,1018pic, i_percent_range, percent)) {1019goto Error;1020}1021}10221023// Store Huffman codes.1024{1025int max_tokens = 0;1026// Find maximum number of symbols for the huffman tree-set.1027for (i = 0; i < 5 * histogram_image_size; ++i) {1028HuffmanTreeCode* const codes = &huffman_codes[i];1029if (max_tokens < codes->num_symbols) {1030max_tokens = codes->num_symbols;1031}1032}1033tokens = (HuffmanTreeToken*)WebPSafeMalloc(max_tokens, sizeof(*tokens));1034if (tokens == NULL) {1035WebPEncodingSetError(pic, VP8_ENC_ERROR_OUT_OF_MEMORY);1036goto Error;1037}1038for (i = 0; i < 5 * histogram_image_size; ++i) {1039HuffmanTreeCode* const codes = &huffman_codes[i];1040StoreHuffmanCode(bw, huff_tree, tokens, codes);1041ClearHuffmanTreeIfOnlyOneSymbol(codes);1042}1043}1044// Store actual literals.1045hdr_size_tmp = (int)(VP8LBitWriterNumBytes(bw) - init_byte_position);1046if (!StoreImageToBitMask(bw, width, histogram_bits, &refs_array[i_cache],1047histogram_argb, huffman_codes, pic)) {1048goto Error;1049}1050// Keep track of the smallest image so far.1051if (VP8LBitWriterNumBytes(bw) < bw_size_best) {1052bw_size_best = VP8LBitWriterNumBytes(bw);1053*cache_bits = cache_bits_tmp;1054*hdr_size = hdr_size_tmp;1055*data_size =1056(int)(VP8LBitWriterNumBytes(bw) - init_byte_position - *hdr_size);1057VP8LBitWriterSwap(bw, &bw_best);1058}1059WebPSafeFree(tokens);1060tokens = NULL;1061if (huffman_codes != NULL) {1062WebPSafeFree(huffman_codes->codes);1063WebPSafeFree(huffman_codes);1064huffman_codes = NULL;1065}1066}1067}1068VP8LBitWriterSwap(bw, &bw_best);10691070if (!WebPReportProgress(pic, percent_start + remaining_percent, percent)) {1071goto Error;1072}10731074Error:1075WebPSafeFree(tokens);1076WebPSafeFree(huff_tree);1077VP8LFreeHistogramSet(histogram_image);1078VP8LFreeHistogram(tmp_histo);1079VP8LHashChainClear(&hash_chain_histogram);1080if (huffman_codes != NULL) {1081WebPSafeFree(huffman_codes->codes);1082WebPSafeFree(huffman_codes);1083}1084WebPSafeFree(histogram_argb);1085VP8LBitWriterWipeOut(&bw_best);1086return (pic->error_code == VP8_ENC_OK);1087}10881089// -----------------------------------------------------------------------------1090// Transforms10911092static void ApplySubtractGreen(VP8LEncoder* const enc, int width, int height,1093VP8LBitWriter* const bw) {1094VP8LPutBits(bw, TRANSFORM_PRESENT, 1);1095VP8LPutBits(bw, SUBTRACT_GREEN_TRANSFORM, 2);1096VP8LSubtractGreenFromBlueAndRed(enc->argb, width * height);1097}10981099static int ApplyPredictFilter(VP8LEncoder* const enc, int width, int height,1100int quality, int low_effort,1101int used_subtract_green, VP8LBitWriter* const bw,1102int percent_range, int* const percent,1103int* const best_bits) {1104const int near_lossless_strength =1105enc->use_palette ? 100 : enc->config->near_lossless;1106const int max_bits = ClampBits(width, height, enc->predictor_transform_bits,1107MIN_TRANSFORM_BITS, MAX_TRANSFORM_BITS,1108MAX_PREDICTOR_IMAGE_SIZE);1109const int min_bits = ClampBits(1110width, height,1111max_bits - 2 * (enc->config->method > 4 ? enc->config->method - 4 : 0),1112MIN_TRANSFORM_BITS, MAX_TRANSFORM_BITS, MAX_PREDICTOR_IMAGE_SIZE);11131114if (!VP8LResidualImage(width, height, min_bits, max_bits, low_effort,1115enc->argb, enc->argb_scratch, enc->transform_data,1116near_lossless_strength, enc->config->exact,1117used_subtract_green, enc->pic, percent_range / 2,1118percent, best_bits)) {1119return 0;1120}1121VP8LPutBits(bw, TRANSFORM_PRESENT, 1);1122VP8LPutBits(bw, PREDICTOR_TRANSFORM, 2);1123assert(*best_bits >= MIN_TRANSFORM_BITS && *best_bits <= MAX_TRANSFORM_BITS);1124VP8LPutBits(bw, *best_bits - MIN_TRANSFORM_BITS, NUM_TRANSFORM_BITS);1125return EncodeImageNoHuffman(1126bw, enc->transform_data, &enc->hash_chain, &enc->refs[0],1127VP8LSubSampleSize(width, *best_bits),1128VP8LSubSampleSize(height, *best_bits), quality, low_effort, enc->pic,1129percent_range - percent_range / 2, percent);1130}11311132static int ApplyCrossColorFilter(VP8LEncoder* const enc, int width, int height,1133int quality, int low_effort,1134VP8LBitWriter* const bw, int percent_range,1135int* const percent, int* const best_bits) {1136const int min_bits = enc->cross_color_transform_bits;11371138if (!VP8LColorSpaceTransform(width, height, min_bits, quality, enc->argb,1139enc->transform_data, enc->pic, percent_range / 2,1140percent, best_bits)) {1141return 0;1142}1143VP8LPutBits(bw, TRANSFORM_PRESENT, 1);1144VP8LPutBits(bw, CROSS_COLOR_TRANSFORM, 2);1145assert(*best_bits >= MIN_TRANSFORM_BITS && *best_bits <= MAX_TRANSFORM_BITS);1146VP8LPutBits(bw, *best_bits - MIN_TRANSFORM_BITS, NUM_TRANSFORM_BITS);1147return EncodeImageNoHuffman(1148bw, enc->transform_data, &enc->hash_chain, &enc->refs[0],1149VP8LSubSampleSize(width, *best_bits),1150VP8LSubSampleSize(height, *best_bits), quality, low_effort, enc->pic,1151percent_range - percent_range / 2, percent);1152}11531154// -----------------------------------------------------------------------------11551156static int WriteRiffHeader(const WebPPicture* const pic, size_t riff_size,1157size_t vp8l_size) {1158uint8_t riff[RIFF_HEADER_SIZE + CHUNK_HEADER_SIZE + VP8L_SIGNATURE_SIZE] = {1159'R', 'I', 'F', 'F', 0, 0, 0, 0, 'W', 'E', 'B', 'P',1160'V', 'P', '8', 'L', 0, 0, 0, 0, VP8L_MAGIC_BYTE,1161};1162PutLE32(riff + TAG_SIZE, (uint32_t)riff_size);1163PutLE32(riff + RIFF_HEADER_SIZE + TAG_SIZE, (uint32_t)vp8l_size);1164return pic->writer(riff, sizeof(riff), pic);1165}11661167static int WriteImageSize(const WebPPicture* const pic,1168VP8LBitWriter* const bw) {1169const int width = pic->width - 1;1170const int height = pic->height - 1;1171assert(width < WEBP_MAX_DIMENSION && height < WEBP_MAX_DIMENSION);11721173VP8LPutBits(bw, width, VP8L_IMAGE_SIZE_BITS);1174VP8LPutBits(bw, height, VP8L_IMAGE_SIZE_BITS);1175return !bw->error;1176}11771178static int WriteRealAlphaAndVersion(VP8LBitWriter* const bw, int has_alpha) {1179VP8LPutBits(bw, has_alpha, 1);1180VP8LPutBits(bw, VP8L_VERSION, VP8L_VERSION_BITS);1181return !bw->error;1182}11831184static int WriteImage(const WebPPicture* const pic, VP8LBitWriter* const bw,1185size_t* const coded_size) {1186const uint8_t* const webpll_data = VP8LBitWriterFinish(bw);1187const size_t webpll_size = VP8LBitWriterNumBytes(bw);1188const size_t vp8l_size = VP8L_SIGNATURE_SIZE + webpll_size;1189const size_t pad = vp8l_size & 1;1190const size_t riff_size = TAG_SIZE + CHUNK_HEADER_SIZE + vp8l_size + pad;1191*coded_size = 0;11921193if (bw->error) {1194return WebPEncodingSetError(pic, VP8_ENC_ERROR_OUT_OF_MEMORY);1195}11961197if (!WriteRiffHeader(pic, riff_size, vp8l_size) ||1198!pic->writer(webpll_data, webpll_size, pic)) {1199return WebPEncodingSetError(pic, VP8_ENC_ERROR_BAD_WRITE);1200}12011202if (pad) {1203const uint8_t pad_byte[1] = { 0 };1204if (!pic->writer(pad_byte, 1, pic)) {1205return WebPEncodingSetError(pic, VP8_ENC_ERROR_BAD_WRITE);1206}1207}1208*coded_size = CHUNK_HEADER_SIZE + riff_size;1209return 1;1210}12111212// -----------------------------------------------------------------------------12131214static void ClearTransformBuffer(VP8LEncoder* const enc) {1215WebPSafeFree(enc->transform_mem);1216enc->transform_mem = NULL;1217enc->transform_mem_size = 0;1218}12191220// Allocates the memory for argb (W x H) buffer, 2 rows of context for1221// prediction and transform data.1222// Flags influencing the memory allocated:1223// enc->transform_bits1224// enc->use_predict, enc->use_cross_color1225static int AllocateTransformBuffer(VP8LEncoder* const enc, int width,1226int height) {1227const uint64_t image_size = (uint64_t)width * height;1228// VP8LResidualImage needs room for 2 scanlines of uint32 pixels with an extra1229// pixel in each, plus 2 regular scanlines of bytes.1230// TODO(skal): Clean up by using arithmetic in bytes instead of words.1231const uint64_t argb_scratch_size =1232enc->use_predict ? (width + 1) * 2 + (width * 2 + sizeof(uint32_t) - 1) /1233sizeof(uint32_t)1234: 0;1235const uint64_t transform_data_size =1236(enc->use_predict || enc->use_cross_color)1237? (uint64_t)VP8LSubSampleSize(width, MIN_TRANSFORM_BITS) *1238VP8LSubSampleSize(height, MIN_TRANSFORM_BITS)1239: 0;1240const uint64_t max_alignment_in_words =1241(WEBP_ALIGN_CST + sizeof(uint32_t) - 1) / sizeof(uint32_t);1242const uint64_t mem_size = image_size + max_alignment_in_words +1243argb_scratch_size + max_alignment_in_words +1244transform_data_size;1245uint32_t* mem = enc->transform_mem;1246if (mem == NULL || mem_size > enc->transform_mem_size) {1247ClearTransformBuffer(enc);1248mem = (uint32_t*)WebPSafeMalloc(mem_size, sizeof(*mem));1249if (mem == NULL) {1250return WebPEncodingSetError(enc->pic, VP8_ENC_ERROR_OUT_OF_MEMORY);1251}1252enc->transform_mem = mem;1253enc->transform_mem_size = (size_t)mem_size;1254enc->argb_content = kEncoderNone;1255}1256enc->argb = mem;1257mem = (uint32_t*)WEBP_ALIGN(mem + image_size);1258enc->argb_scratch = mem;1259mem = (uint32_t*)WEBP_ALIGN(mem + argb_scratch_size);1260enc->transform_data = mem;12611262enc->current_width = width;1263return 1;1264}12651266static int MakeInputImageCopy(VP8LEncoder* const enc) {1267const WebPPicture* const picture = enc->pic;1268const int width = picture->width;1269const int height = picture->height;12701271if (!AllocateTransformBuffer(enc, width, height)) return 0;1272if (enc->argb_content == kEncoderARGB) return 1;12731274{1275uint32_t* dst = enc->argb;1276const uint32_t* src = picture->argb;1277int y;1278for (y = 0; y < height; ++y) {1279memcpy(dst, src, width * sizeof(*dst));1280dst += width;1281src += picture->argb_stride;1282}1283}1284enc->argb_content = kEncoderARGB;1285assert(enc->current_width == width);1286return 1;1287}12881289// -----------------------------------------------------------------------------12901291#define APPLY_PALETTE_GREEDY_MAX 412921293static WEBP_INLINE uint32_t SearchColorGreedy(const uint32_t palette[],1294int palette_size,1295uint32_t color) {1296(void)palette_size;1297assert(palette_size < APPLY_PALETTE_GREEDY_MAX);1298assert(3 == APPLY_PALETTE_GREEDY_MAX - 1);1299if (color == palette[0]) return 0;1300if (color == palette[1]) return 1;1301if (color == palette[2]) return 2;1302return 3;1303}13041305static WEBP_INLINE uint32_t ApplyPaletteHash0(uint32_t color) {1306// Focus on the green color.1307return (color >> 8) & 0xff;1308}13091310#define PALETTE_INV_SIZE_BITS 111311#define PALETTE_INV_SIZE (1 << PALETTE_INV_SIZE_BITS)13121313static WEBP_INLINE uint32_t ApplyPaletteHash1(uint32_t color) {1314// Forget about alpha.1315return ((uint32_t)((color & 0x00ffffffu) * 4222244071ull)) >>1316(32 - PALETTE_INV_SIZE_BITS);1317}13181319static WEBP_INLINE uint32_t ApplyPaletteHash2(uint32_t color) {1320// Forget about alpha.1321return ((uint32_t)((color & 0x00ffffffu) * ((1ull << 31) - 1))) >>1322(32 - PALETTE_INV_SIZE_BITS);1323}13241325// Use 1 pixel cache for ARGB pixels.1326#define APPLY_PALETTE_FOR(COLOR_INDEX) do { \1327uint32_t prev_pix = palette[0]; \1328uint32_t prev_idx = 0; \1329for (y = 0; y < height; ++y) { \1330for (x = 0; x < width; ++x) { \1331const uint32_t pix = src[x]; \1332if (pix != prev_pix) { \1333prev_idx = COLOR_INDEX; \1334prev_pix = pix; \1335} \1336tmp_row[x] = prev_idx; \1337} \1338VP8LBundleColorMap(tmp_row, width, xbits, dst); \1339src += src_stride; \1340dst += dst_stride; \1341} \1342} while (0)13431344// Remap argb values in src[] to packed palettes entries in dst[]1345// using 'row' as a temporary buffer of size 'width'.1346// We assume that all src[] values have a corresponding entry in the palette.1347// Note: src[] can be the same as dst[]1348static int ApplyPalette(const uint32_t* src, uint32_t src_stride, uint32_t* dst,1349uint32_t dst_stride, const uint32_t* palette,1350int palette_size, int width, int height, int xbits,1351const WebPPicture* const pic) {1352// TODO(skal): this tmp buffer is not needed if VP8LBundleColorMap() can be1353// made to work in-place.1354uint8_t* const tmp_row = (uint8_t*)WebPSafeMalloc(width, sizeof(*tmp_row));1355int x, y;13561357if (tmp_row == NULL) {1358return WebPEncodingSetError(pic, VP8_ENC_ERROR_OUT_OF_MEMORY);1359}13601361if (palette_size < APPLY_PALETTE_GREEDY_MAX) {1362APPLY_PALETTE_FOR(SearchColorGreedy(palette, palette_size, pix));1363} else {1364int i, j;1365uint16_t buffer[PALETTE_INV_SIZE];1366uint32_t (*const hash_functions[])(uint32_t) = {1367ApplyPaletteHash0, ApplyPaletteHash1, ApplyPaletteHash21368};13691370// Try to find a perfect hash function able to go from a color to an index1371// within 1 << PALETTE_INV_SIZE_BITS in order to build a hash map to go1372// from color to index in palette.1373for (i = 0; i < 3; ++i) {1374int use_LUT = 1;1375// Set each element in buffer to max uint16_t.1376memset(buffer, 0xff, sizeof(buffer));1377for (j = 0; j < palette_size; ++j) {1378const uint32_t ind = hash_functions[i](palette[j]);1379if (buffer[ind] != 0xffffu) {1380use_LUT = 0;1381break;1382} else {1383buffer[ind] = j;1384}1385}1386if (use_LUT) break;1387}13881389if (i == 0) {1390APPLY_PALETTE_FOR(buffer[ApplyPaletteHash0(pix)]);1391} else if (i == 1) {1392APPLY_PALETTE_FOR(buffer[ApplyPaletteHash1(pix)]);1393} else if (i == 2) {1394APPLY_PALETTE_FOR(buffer[ApplyPaletteHash2(pix)]);1395} else {1396uint32_t idx_map[MAX_PALETTE_SIZE];1397uint32_t palette_sorted[MAX_PALETTE_SIZE];1398PrepareMapToPalette(palette, palette_size, palette_sorted, idx_map);1399APPLY_PALETTE_FOR(1400idx_map[SearchColorNoIdx(palette_sorted, pix, palette_size)]);1401}1402}1403WebPSafeFree(tmp_row);1404return 1;1405}1406#undef APPLY_PALETTE_FOR1407#undef PALETTE_INV_SIZE_BITS1408#undef PALETTE_INV_SIZE1409#undef APPLY_PALETTE_GREEDY_MAX14101411// Note: Expects "enc->palette" to be set properly.1412static int MapImageFromPalette(VP8LEncoder* const enc) {1413const WebPPicture* const pic = enc->pic;1414const int width = pic->width;1415const int height = pic->height;1416const uint32_t* const palette = enc->palette;1417const int palette_size = enc->palette_size;1418int xbits;14191420// Replace each input pixel by corresponding palette index.1421// This is done line by line.1422if (palette_size <= 4) {1423xbits = (palette_size <= 2) ? 3 : 2;1424} else {1425xbits = (palette_size <= 16) ? 1 : 0;1426}14271428if (!AllocateTransformBuffer(enc, VP8LSubSampleSize(width, xbits), height)) {1429return 0;1430}1431if (!ApplyPalette(pic->argb, pic->argb_stride, enc->argb,1432enc->current_width, palette, palette_size, width, height,1433xbits, pic)) {1434return 0;1435}1436enc->argb_content = kEncoderPalette;1437return 1;1438}14391440// Save palette[] to bitstream.1441static int EncodePalette(VP8LBitWriter* const bw, int low_effort,1442VP8LEncoder* const enc, int percent_range,1443int* const percent) {1444int i;1445uint32_t tmp_palette[MAX_PALETTE_SIZE];1446const int palette_size = enc->palette_size;1447const uint32_t* const palette = enc->palette;1448// If the last element is 0, do not store it and count on automatic palette1449// 0-filling. This can only happen if there is no pixel packing, hence if1450// there are strictly more than 16 colors (after 0 is removed).1451const uint32_t encoded_palette_size =1452(enc->palette[palette_size - 1] == 0 && palette_size > 17)1453? palette_size - 11454: palette_size;1455VP8LPutBits(bw, TRANSFORM_PRESENT, 1);1456VP8LPutBits(bw, COLOR_INDEXING_TRANSFORM, 2);1457assert(palette_size >= 1 && palette_size <= MAX_PALETTE_SIZE);1458VP8LPutBits(bw, encoded_palette_size - 1, 8);1459for (i = encoded_palette_size - 1; i >= 1; --i) {1460tmp_palette[i] = VP8LSubPixels(palette[i], palette[i - 1]);1461}1462tmp_palette[0] = palette[0];1463return EncodeImageNoHuffman(1464bw, tmp_palette, &enc->hash_chain, &enc->refs[0], encoded_palette_size,14651, /*quality=*/20, low_effort, enc->pic, percent_range, percent);1466}14671468// -----------------------------------------------------------------------------1469// VP8LEncoder14701471static VP8LEncoder* VP8LEncoderNew(const WebPConfig* const config,1472const WebPPicture* const picture) {1473VP8LEncoder* const enc = (VP8LEncoder*)WebPSafeCalloc(1ULL, sizeof(*enc));1474if (enc == NULL) {1475WebPEncodingSetError(picture, VP8_ENC_ERROR_OUT_OF_MEMORY);1476return NULL;1477}1478enc->config = config;1479enc->pic = picture;1480enc->argb_content = kEncoderNone;14811482VP8LEncDspInit();14831484return enc;1485}14861487static void VP8LEncoderDelete(VP8LEncoder* enc) {1488if (enc != NULL) {1489int i;1490VP8LHashChainClear(&enc->hash_chain);1491for (i = 0; i < 4; ++i) VP8LBackwardRefsClear(&enc->refs[i]);1492ClearTransformBuffer(enc);1493WebPSafeFree(enc);1494}1495}14961497// -----------------------------------------------------------------------------1498// Main call14991500typedef struct {1501const WebPConfig* config;1502const WebPPicture* picture;1503VP8LBitWriter* bw;1504VP8LEncoder* enc;1505CrunchConfig crunch_configs[CRUNCH_CONFIGS_MAX];1506int num_crunch_configs;1507int red_and_blue_always_zero;1508WebPAuxStats* stats;1509} StreamEncodeContext;15101511static int EncodeStreamHook(void* input, void* data2) {1512StreamEncodeContext* const params = (StreamEncodeContext*)input;1513const WebPConfig* const config = params->config;1514const WebPPicture* const picture = params->picture;1515VP8LBitWriter* const bw = params->bw;1516VP8LEncoder* const enc = params->enc;1517const CrunchConfig* const crunch_configs = params->crunch_configs;1518const int num_crunch_configs = params->num_crunch_configs;1519const int red_and_blue_always_zero = params->red_and_blue_always_zero;1520#if !defined(WEBP_DISABLE_STATS)1521WebPAuxStats* const stats = params->stats;1522#endif1523const int quality = (int)config->quality;1524const int low_effort = (config->method == 0);1525#if (WEBP_NEAR_LOSSLESS == 1)1526const int width = picture->width;1527#endif1528const int height = picture->height;1529const size_t byte_position = VP8LBitWriterNumBytes(bw);1530int percent = 2; // for WebPProgressHook1531#if (WEBP_NEAR_LOSSLESS == 1)1532int use_near_lossless = 0;1533#endif1534int hdr_size = 0;1535int data_size = 0;1536int idx;1537size_t best_size = ~(size_t)0;1538VP8LBitWriter bw_init = *bw, bw_best;1539(void)data2;15401541if (!VP8LBitWriterInit(&bw_best, 0) ||1542(num_crunch_configs > 1 && !VP8LBitWriterClone(bw, &bw_best))) {1543WebPEncodingSetError(picture, VP8_ENC_ERROR_OUT_OF_MEMORY);1544goto Error;1545}15461547for (idx = 0; idx < num_crunch_configs; ++idx) {1548const int entropy_idx = crunch_configs[idx].entropy_idx;1549int remaining_percent = 97 / num_crunch_configs, percent_range;1550int predictor_transform_bits = 0, cross_color_transform_bits = 0;1551enc->use_palette =1552(entropy_idx == kPalette) || (entropy_idx == kPaletteAndSpatial);1553enc->use_subtract_green =1554(entropy_idx == kSubGreen) || (entropy_idx == kSpatialSubGreen);1555enc->use_predict = (entropy_idx == kSpatial) ||1556(entropy_idx == kSpatialSubGreen) ||1557(entropy_idx == kPaletteAndSpatial);1558// When using a palette, R/B==0, hence no need to test for cross-color.1559if (low_effort || enc->use_palette) {1560enc->use_cross_color = 0;1561} else {1562enc->use_cross_color = red_and_blue_always_zero ? 0 : enc->use_predict;1563}1564// Reset any parameter in the encoder that is set in the previous iteration.1565enc->cache_bits = 0;1566VP8LBackwardRefsClear(&enc->refs[0]);1567VP8LBackwardRefsClear(&enc->refs[1]);15681569#if (WEBP_NEAR_LOSSLESS == 1)1570// Apply near-lossless preprocessing.1571use_near_lossless = (config->near_lossless < 100) && !enc->use_palette &&1572!enc->use_predict;1573if (use_near_lossless) {1574if (!AllocateTransformBuffer(enc, width, height)) goto Error;1575if ((enc->argb_content != kEncoderNearLossless) &&1576!VP8ApplyNearLossless(picture, config->near_lossless, enc->argb)) {1577WebPEncodingSetError(picture, VP8_ENC_ERROR_OUT_OF_MEMORY);1578goto Error;1579}1580enc->argb_content = kEncoderNearLossless;1581} else {1582enc->argb_content = kEncoderNone;1583}1584#else1585enc->argb_content = kEncoderNone;1586#endif15871588// Encode palette1589if (enc->use_palette) {1590if (!PaletteSort(crunch_configs[idx].palette_sorting_type, enc->pic,1591enc->palette_sorted, enc->palette_size,1592enc->palette)) {1593WebPEncodingSetError(enc->pic, VP8_ENC_ERROR_OUT_OF_MEMORY);1594goto Error;1595}1596percent_range = remaining_percent / 4;1597if (!EncodePalette(bw, low_effort, enc, percent_range, &percent)) {1598goto Error;1599}1600remaining_percent -= percent_range;1601if (!MapImageFromPalette(enc)) goto Error;1602// If using a color cache, do not have it bigger than the number of1603// colors.1604if (enc->palette_size < (1 << MAX_COLOR_CACHE_BITS)) {1605enc->cache_bits = BitsLog2Floor(enc->palette_size) + 1;1606}1607}1608// In case image is not packed.1609if (enc->argb_content != kEncoderNearLossless &&1610enc->argb_content != kEncoderPalette) {1611if (!MakeInputImageCopy(enc)) goto Error;1612}16131614// -------------------------------------------------------------------------1615// Apply transforms and write transform data.16161617if (enc->use_subtract_green) {1618ApplySubtractGreen(enc, enc->current_width, height, bw);1619}16201621if (enc->use_predict) {1622percent_range = remaining_percent / 3;1623if (!ApplyPredictFilter(enc, enc->current_width, height, quality,1624low_effort, enc->use_subtract_green, bw,1625percent_range, &percent,1626&predictor_transform_bits)) {1627goto Error;1628}1629remaining_percent -= percent_range;1630}16311632if (enc->use_cross_color) {1633percent_range = remaining_percent / 2;1634if (!ApplyCrossColorFilter(enc, enc->current_width, height, quality,1635low_effort, bw, percent_range, &percent,1636&cross_color_transform_bits)) {1637goto Error;1638}1639remaining_percent -= percent_range;1640}16411642VP8LPutBits(bw, !TRANSFORM_PRESENT, 1); // No more transforms.16431644// -------------------------------------------------------------------------1645// Encode and write the transformed image.1646if (!EncodeImageInternal(1647bw, enc->argb, &enc->hash_chain, enc->refs, enc->current_width,1648height, quality, low_effort, &crunch_configs[idx],1649&enc->cache_bits, enc->histo_bits, byte_position, &hdr_size,1650&data_size, picture, remaining_percent, &percent)) {1651goto Error;1652}16531654// If we are better than what we already have.1655if (VP8LBitWriterNumBytes(bw) < best_size) {1656best_size = VP8LBitWriterNumBytes(bw);1657// Store the BitWriter.1658VP8LBitWriterSwap(bw, &bw_best);1659#if !defined(WEBP_DISABLE_STATS)1660// Update the stats.1661if (stats != NULL) {1662stats->lossless_features = 0;1663if (enc->use_predict) stats->lossless_features |= 1;1664if (enc->use_cross_color) stats->lossless_features |= 2;1665if (enc->use_subtract_green) stats->lossless_features |= 4;1666if (enc->use_palette) stats->lossless_features |= 8;1667stats->histogram_bits = enc->histo_bits;1668stats->transform_bits = predictor_transform_bits;1669stats->cross_color_transform_bits = cross_color_transform_bits;1670stats->cache_bits = enc->cache_bits;1671stats->palette_size = enc->palette_size;1672stats->lossless_size = (int)(best_size - byte_position);1673stats->lossless_hdr_size = hdr_size;1674stats->lossless_data_size = data_size;1675}1676#endif1677}1678// Reset the bit writer for the following iteration if any.1679if (num_crunch_configs > 1) VP8LBitWriterReset(&bw_init, bw);1680}1681VP8LBitWriterSwap(&bw_best, bw);16821683Error:1684VP8LBitWriterWipeOut(&bw_best);1685// The hook should return false in case of error.1686return (params->picture->error_code == VP8_ENC_OK);1687}16881689int VP8LEncodeStream(const WebPConfig* const config,1690const WebPPicture* const picture,1691VP8LBitWriter* const bw_main) {1692VP8LEncoder* const enc_main = VP8LEncoderNew(config, picture);1693VP8LEncoder* enc_side = NULL;1694CrunchConfig crunch_configs[CRUNCH_CONFIGS_MAX];1695int num_crunch_configs_main, num_crunch_configs_side = 0;1696int idx;1697int red_and_blue_always_zero = 0;1698WebPWorker worker_main, worker_side;1699StreamEncodeContext params_main, params_side;1700// The main thread uses picture->stats, the side thread uses stats_side.1701WebPAuxStats stats_side;1702VP8LBitWriter bw_side;1703WebPPicture picture_side;1704const WebPWorkerInterface* const worker_interface = WebPGetWorkerInterface();1705int ok_main;17061707if (enc_main == NULL || !VP8LBitWriterInit(&bw_side, 0)) {1708VP8LEncoderDelete(enc_main);1709return WebPEncodingSetError(picture, VP8_ENC_ERROR_OUT_OF_MEMORY);1710}17111712// Avoid "garbage value" error from Clang's static analysis tool.1713if (!WebPPictureInit(&picture_side)) {1714goto Error;1715}17161717// Analyze image (entropy, num_palettes etc)1718if (!EncoderAnalyze(enc_main, crunch_configs, &num_crunch_configs_main,1719&red_and_blue_always_zero) ||1720!EncoderInit(enc_main)) {1721WebPEncodingSetError(picture, VP8_ENC_ERROR_OUT_OF_MEMORY);1722goto Error;1723}17241725// Split the configs between the main and side threads (if any).1726if (config->thread_level > 0) {1727num_crunch_configs_side = num_crunch_configs_main / 2;1728for (idx = 0; idx < num_crunch_configs_side; ++idx) {1729params_side.crunch_configs[idx] =1730crunch_configs[num_crunch_configs_main - num_crunch_configs_side +1731idx];1732}1733params_side.num_crunch_configs = num_crunch_configs_side;1734}1735num_crunch_configs_main -= num_crunch_configs_side;1736for (idx = 0; idx < num_crunch_configs_main; ++idx) {1737params_main.crunch_configs[idx] = crunch_configs[idx];1738}1739params_main.num_crunch_configs = num_crunch_configs_main;17401741// Fill in the parameters for the thread workers.1742{1743const int params_size = (num_crunch_configs_side > 0) ? 2 : 1;1744for (idx = 0; idx < params_size; ++idx) {1745// Create the parameters for each worker.1746WebPWorker* const worker = (idx == 0) ? &worker_main : &worker_side;1747StreamEncodeContext* const param =1748(idx == 0) ? ¶ms_main : ¶ms_side;1749param->config = config;1750param->red_and_blue_always_zero = red_and_blue_always_zero;1751if (idx == 0) {1752param->picture = picture;1753param->stats = picture->stats;1754param->bw = bw_main;1755param->enc = enc_main;1756} else {1757// Create a side picture (error_code is not thread-safe).1758if (!WebPPictureView(picture, /*left=*/0, /*top=*/0, picture->width,1759picture->height, &picture_side)) {1760assert(0);1761}1762picture_side.progress_hook = NULL; // Progress hook is not thread-safe.1763param->picture = &picture_side; // No need to free a view afterwards.1764param->stats = (picture->stats == NULL) ? NULL : &stats_side;1765// Create a side bit writer.1766if (!VP8LBitWriterClone(bw_main, &bw_side)) {1767WebPEncodingSetError(picture, VP8_ENC_ERROR_OUT_OF_MEMORY);1768goto Error;1769}1770param->bw = &bw_side;1771// Create a side encoder.1772enc_side = VP8LEncoderNew(config, &picture_side);1773if (enc_side == NULL || !EncoderInit(enc_side)) {1774WebPEncodingSetError(picture, VP8_ENC_ERROR_OUT_OF_MEMORY);1775goto Error;1776}1777// Copy the values that were computed for the main encoder.1778enc_side->histo_bits = enc_main->histo_bits;1779enc_side->predictor_transform_bits =1780enc_main->predictor_transform_bits;1781enc_side->cross_color_transform_bits =1782enc_main->cross_color_transform_bits;1783enc_side->palette_size = enc_main->palette_size;1784memcpy(enc_side->palette, enc_main->palette,1785sizeof(enc_main->palette));1786memcpy(enc_side->palette_sorted, enc_main->palette_sorted,1787sizeof(enc_main->palette_sorted));1788param->enc = enc_side;1789}1790// Create the workers.1791worker_interface->Init(worker);1792worker->data1 = param;1793worker->data2 = NULL;1794worker->hook = EncodeStreamHook;1795}1796}17971798// Start the second thread if needed.1799if (num_crunch_configs_side != 0) {1800if (!worker_interface->Reset(&worker_side)) {1801WebPEncodingSetError(picture, VP8_ENC_ERROR_OUT_OF_MEMORY);1802goto Error;1803}1804#if !defined(WEBP_DISABLE_STATS)1805// This line is here and not in the param initialization above to remove a1806// Clang static analyzer warning.1807if (picture->stats != NULL) {1808memcpy(&stats_side, picture->stats, sizeof(stats_side));1809}1810#endif1811worker_interface->Launch(&worker_side);1812}1813// Execute the main thread.1814worker_interface->Execute(&worker_main);1815ok_main = worker_interface->Sync(&worker_main);1816worker_interface->End(&worker_main);1817if (num_crunch_configs_side != 0) {1818// Wait for the second thread.1819const int ok_side = worker_interface->Sync(&worker_side);1820worker_interface->End(&worker_side);1821if (!ok_main || !ok_side) {1822if (picture->error_code == VP8_ENC_OK) {1823assert(picture_side.error_code != VP8_ENC_OK);1824WebPEncodingSetError(picture, picture_side.error_code);1825}1826goto Error;1827}1828if (VP8LBitWriterNumBytes(&bw_side) < VP8LBitWriterNumBytes(bw_main)) {1829VP8LBitWriterSwap(bw_main, &bw_side);1830#if !defined(WEBP_DISABLE_STATS)1831if (picture->stats != NULL) {1832memcpy(picture->stats, &stats_side, sizeof(*picture->stats));1833}1834#endif1835}1836}18371838Error:1839VP8LBitWriterWipeOut(&bw_side);1840VP8LEncoderDelete(enc_main);1841VP8LEncoderDelete(enc_side);1842return (picture->error_code == VP8_ENC_OK);1843}18441845#undef CRUNCH_CONFIGS_MAX1846#undef CRUNCH_SUBCONFIGS_MAX18471848int VP8LEncodeImage(const WebPConfig* const config,1849const WebPPicture* const picture) {1850int width, height;1851int has_alpha;1852size_t coded_size;1853int percent = 0;1854int initial_size;1855VP8LBitWriter bw;18561857if (picture == NULL) return 0;18581859if (config == NULL || picture->argb == NULL) {1860return WebPEncodingSetError(picture, VP8_ENC_ERROR_NULL_PARAMETER);1861}18621863width = picture->width;1864height = picture->height;1865// Initialize BitWriter with size corresponding to 16 bpp to photo images and1866// 8 bpp for graphical images.1867initial_size = (config->image_hint == WEBP_HINT_GRAPH) ?1868width * height : width * height * 2;1869if (!VP8LBitWriterInit(&bw, initial_size)) {1870WebPEncodingSetError(picture, VP8_ENC_ERROR_OUT_OF_MEMORY);1871goto Error;1872}18731874if (!WebPReportProgress(picture, 1, &percent)) {1875UserAbort:1876WebPEncodingSetError(picture, VP8_ENC_ERROR_USER_ABORT);1877goto Error;1878}1879// Reset stats (for pure lossless coding)1880if (picture->stats != NULL) {1881WebPAuxStats* const stats = picture->stats;1882memset(stats, 0, sizeof(*stats));1883stats->PSNR[0] = 99.f;1884stats->PSNR[1] = 99.f;1885stats->PSNR[2] = 99.f;1886stats->PSNR[3] = 99.f;1887stats->PSNR[4] = 99.f;1888}18891890// Write image size.1891if (!WriteImageSize(picture, &bw)) {1892WebPEncodingSetError(picture, VP8_ENC_ERROR_OUT_OF_MEMORY);1893goto Error;1894}18951896has_alpha = WebPPictureHasTransparency(picture);1897// Write the non-trivial Alpha flag and lossless version.1898if (!WriteRealAlphaAndVersion(&bw, has_alpha)) {1899WebPEncodingSetError(picture, VP8_ENC_ERROR_OUT_OF_MEMORY);1900goto Error;1901}19021903if (!WebPReportProgress(picture, 2, &percent)) goto UserAbort;19041905// Encode main image stream.1906if (!VP8LEncodeStream(config, picture, &bw)) goto Error;19071908if (!WebPReportProgress(picture, 99, &percent)) goto UserAbort;19091910// Finish the RIFF chunk.1911if (!WriteImage(picture, &bw, &coded_size)) goto Error;19121913if (!WebPReportProgress(picture, 100, &percent)) goto UserAbort;19141915#if !defined(WEBP_DISABLE_STATS)1916// Save size.1917if (picture->stats != NULL) {1918picture->stats->coded_size += (int)coded_size;1919picture->stats->lossless_size = (int)coded_size;1920}1921#endif19221923if (picture->extra_info != NULL) {1924const int mb_w = (width + 15) >> 4;1925const int mb_h = (height + 15) >> 4;1926memset(picture->extra_info, 0, mb_w * mb_h * sizeof(*picture->extra_info));1927}19281929Error:1930if (bw.error) {1931WebPEncodingSetError(picture, VP8_ENC_ERROR_OUT_OF_MEMORY);1932}1933VP8LBitWriterWipeOut(&bw);1934return (picture->error_code == VP8_ENC_OK);1935}19361937//------------------------------------------------------------------------------193819391940