Path: blob/master/thirdparty/libwebp/src/enc/vp8l_enc.c
9913 views
// Copyright 2012 Google Inc. All Rights Reserved.1//2// Use of this source code is governed by a BSD-style license3// that can be found in the COPYING file in the root of the source4// tree. An additional intellectual property rights grant can be found5// in the file PATENTS. All contributing project authors may6// be found in the AUTHORS file in the root of the source tree.7// -----------------------------------------------------------------------------8//9// main entry for the lossless encoder.10//11// Author: Vikas Arora ([email protected])12//1314#include <assert.h>15#include <stdlib.h>1617#include "src/dsp/lossless.h"18#include "src/dsp/lossless_common.h"19#include "src/enc/backward_references_enc.h"20#include "src/enc/histogram_enc.h"21#include "src/enc/vp8i_enc.h"22#include "src/enc/vp8li_enc.h"23#include "src/utils/bit_writer_utils.h"24#include "src/utils/huffman_encode_utils.h"25#include "src/utils/palette.h"26#include "src/utils/utils.h"27#include "src/webp/encode.h"28#include "src/webp/format_constants.h"2930// Maximum number of histogram images (sub-blocks).31#define MAX_HUFF_IMAGE_SIZE 260032#define MAX_HUFFMAN_BITS (MIN_HUFFMAN_BITS + (1 << NUM_HUFFMAN_BITS) - 1)33// Empirical value for which it becomes too computationally expensive to34// compute the best predictor image.35#define MAX_PREDICTOR_IMAGE_SIZE (1 << 14)3637// -----------------------------------------------------------------------------38// Palette3940// These five modes are evaluated and their respective entropy is computed.41typedef enum {42kDirect = 0,43kSpatial = 1,44kSubGreen = 2,45kSpatialSubGreen = 3,46kPalette = 4,47kPaletteAndSpatial = 5,48kNumEntropyIx = 649} EntropyIx;5051typedef enum {52kHistoAlpha = 0,53kHistoAlphaPred,54kHistoGreen,55kHistoGreenPred,56kHistoRed,57kHistoRedPred,58kHistoBlue,59kHistoBluePred,60kHistoRedSubGreen,61kHistoRedPredSubGreen,62kHistoBlueSubGreen,63kHistoBluePredSubGreen,64kHistoPalette,65kHistoTotal // Must be last.66} HistoIx;6768static void AddSingleSubGreen(uint32_t p,69uint32_t* const r, uint32_t* const b) {70const int green = (int)p >> 8; // The upper bits are masked away later.71++r[(((int)p >> 16) - green) & 0xff];72++b[(((int)p >> 0) - green) & 0xff];73}7475static void AddSingle(uint32_t p,76uint32_t* const a, uint32_t* const r,77uint32_t* const g, uint32_t* const b) {78++a[(p >> 24) & 0xff];79++r[(p >> 16) & 0xff];80++g[(p >> 8) & 0xff];81++b[(p >> 0) & 0xff];82}8384static WEBP_INLINE uint32_t HashPix(uint32_t pix) {85// Note that masking with 0xffffffffu is for preventing an86// 'unsigned int overflow' warning. Doesn't impact the compiled code.87return ((((uint64_t)pix + (pix >> 19)) * 0x39c5fba7ull) & 0xffffffffu) >> 24;88}8990static int AnalyzeEntropy(const uint32_t* argb,91int width, int height, int argb_stride,92int use_palette,93int palette_size, int transform_bits,94EntropyIx* const min_entropy_ix,95int* const red_and_blue_always_zero) {96// Allocate histogram set with cache_bits = 0.97uint32_t* histo;9899if (use_palette && palette_size <= 16) {100// In the case of small palettes, we pack 2, 4 or 8 pixels together. In101// practice, small palettes are better than any other transform.102*min_entropy_ix = kPalette;103*red_and_blue_always_zero = 1;104return 1;105}106histo = (uint32_t*)WebPSafeCalloc(kHistoTotal, sizeof(*histo) * 256);107if (histo != NULL) {108int i, x, y;109const uint32_t* prev_row = NULL;110const uint32_t* curr_row = argb;111uint32_t pix_prev = argb[0]; // Skip the first pixel.112for (y = 0; y < height; ++y) {113for (x = 0; x < width; ++x) {114const uint32_t pix = curr_row[x];115const uint32_t pix_diff = VP8LSubPixels(pix, pix_prev);116pix_prev = pix;117if ((pix_diff == 0) || (prev_row != NULL && pix == prev_row[x])) {118continue;119}120AddSingle(pix,121&histo[kHistoAlpha * 256],122&histo[kHistoRed * 256],123&histo[kHistoGreen * 256],124&histo[kHistoBlue * 256]);125AddSingle(pix_diff,126&histo[kHistoAlphaPred * 256],127&histo[kHistoRedPred * 256],128&histo[kHistoGreenPred * 256],129&histo[kHistoBluePred * 256]);130AddSingleSubGreen(pix,131&histo[kHistoRedSubGreen * 256],132&histo[kHistoBlueSubGreen * 256]);133AddSingleSubGreen(pix_diff,134&histo[kHistoRedPredSubGreen * 256],135&histo[kHistoBluePredSubGreen * 256]);136{137// Approximate the palette by the entropy of the multiplicative hash.138const uint32_t hash = HashPix(pix);139++histo[kHistoPalette * 256 + hash];140}141}142prev_row = curr_row;143curr_row += argb_stride;144}145{146uint64_t entropy_comp[kHistoTotal];147uint64_t entropy[kNumEntropyIx];148int k;149int last_mode_to_analyze = use_palette ? kPalette : kSpatialSubGreen;150int j;151// Let's add one zero to the predicted histograms. The zeros are removed152// too efficiently by the pix_diff == 0 comparison, at least one of the153// zeros is likely to exist.154++histo[kHistoRedPredSubGreen * 256];155++histo[kHistoBluePredSubGreen * 256];156++histo[kHistoRedPred * 256];157++histo[kHistoGreenPred * 256];158++histo[kHistoBluePred * 256];159++histo[kHistoAlphaPred * 256];160161for (j = 0; j < kHistoTotal; ++j) {162entropy_comp[j] = VP8LBitsEntropy(&histo[j * 256], 256);163}164entropy[kDirect] = entropy_comp[kHistoAlpha] +165entropy_comp[kHistoRed] +166entropy_comp[kHistoGreen] +167entropy_comp[kHistoBlue];168entropy[kSpatial] = entropy_comp[kHistoAlphaPred] +169entropy_comp[kHistoRedPred] +170entropy_comp[kHistoGreenPred] +171entropy_comp[kHistoBluePred];172entropy[kSubGreen] = entropy_comp[kHistoAlpha] +173entropy_comp[kHistoRedSubGreen] +174entropy_comp[kHistoGreen] +175entropy_comp[kHistoBlueSubGreen];176entropy[kSpatialSubGreen] = entropy_comp[kHistoAlphaPred] +177entropy_comp[kHistoRedPredSubGreen] +178entropy_comp[kHistoGreenPred] +179entropy_comp[kHistoBluePredSubGreen];180entropy[kPalette] = entropy_comp[kHistoPalette];181182// When including transforms, there is an overhead in bits from183// storing them. This overhead is small but matters for small images.184// For spatial, there are 14 transformations.185entropy[kSpatial] += (uint64_t)VP8LSubSampleSize(width, transform_bits) *186VP8LSubSampleSize(height, transform_bits) *187VP8LFastLog2(14);188// For color transforms: 24 as only 3 channels are considered in a189// ColorTransformElement.190entropy[kSpatialSubGreen] +=191(uint64_t)VP8LSubSampleSize(width, transform_bits) *192VP8LSubSampleSize(height, transform_bits) * VP8LFastLog2(24);193// For palettes, add the cost of storing the palette.194// We empirically estimate the cost of a compressed entry as 8 bits.195// The palette is differential-coded when compressed hence a much196// lower cost than sizeof(uint32_t)*8.197entropy[kPalette] += (palette_size * 8ull) << LOG_2_PRECISION_BITS;198199*min_entropy_ix = kDirect;200for (k = kDirect + 1; k <= last_mode_to_analyze; ++k) {201if (entropy[*min_entropy_ix] > entropy[k]) {202*min_entropy_ix = (EntropyIx)k;203}204}205assert((int)*min_entropy_ix <= last_mode_to_analyze);206*red_and_blue_always_zero = 1;207// Let's check if the histogram of the chosen entropy mode has208// non-zero red and blue values. If all are zero, we can later skip209// the cross color optimization.210{211static const uint8_t kHistoPairs[5][2] = {212{ kHistoRed, kHistoBlue },213{ kHistoRedPred, kHistoBluePred },214{ kHistoRedSubGreen, kHistoBlueSubGreen },215{ kHistoRedPredSubGreen, kHistoBluePredSubGreen },216{ kHistoRed, kHistoBlue }217};218const uint32_t* const red_histo =219&histo[256 * kHistoPairs[*min_entropy_ix][0]];220const uint32_t* const blue_histo =221&histo[256 * kHistoPairs[*min_entropy_ix][1]];222for (i = 1; i < 256; ++i) {223if ((red_histo[i] | blue_histo[i]) != 0) {224*red_and_blue_always_zero = 0;225break;226}227}228}229}230WebPSafeFree(histo);231return 1;232} else {233return 0;234}235}236237// Clamp histogram and transform bits.238static int ClampBits(int width, int height, int bits, int min_bits,239int max_bits, int image_size_max) {240int image_size;241bits = (bits < min_bits) ? min_bits : (bits > max_bits) ? max_bits : bits;242image_size = VP8LSubSampleSize(width, bits) * VP8LSubSampleSize(height, bits);243while (bits < max_bits && image_size > image_size_max) {244++bits;245image_size =246VP8LSubSampleSize(width, bits) * VP8LSubSampleSize(height, bits);247}248// In case the bits reduce the image too much, choose the smallest value249// setting the histogram image size to 1.250while (bits > min_bits && image_size == 1) {251image_size = VP8LSubSampleSize(width, bits - 1) *252VP8LSubSampleSize(height, bits - 1);253if (image_size != 1) break;254--bits;255}256return bits;257}258259static int GetHistoBits(int method, int use_palette, int width, int height) {260// Make tile size a function of encoding method (Range: 0 to 6).261const int histo_bits = (use_palette ? 9 : 7) - method;262return ClampBits(width, height, histo_bits, MIN_HUFFMAN_BITS,263MAX_HUFFMAN_BITS, MAX_HUFF_IMAGE_SIZE);264}265266static int GetTransformBits(int method, int histo_bits) {267const int max_transform_bits = (method < 4) ? 6 : (method > 4) ? 4 : 5;268const int res =269(histo_bits > max_transform_bits) ? max_transform_bits : histo_bits;270assert(res <= MAX_TRANSFORM_BITS);271return res;272}273274// Set of parameters to be used in each iteration of the cruncher.275#define CRUNCH_SUBCONFIGS_MAX 2276typedef struct {277int lz77_;278int do_no_cache_;279} CrunchSubConfig;280typedef struct {281int entropy_idx_;282PaletteSorting palette_sorting_type_;283CrunchSubConfig sub_configs_[CRUNCH_SUBCONFIGS_MAX];284int sub_configs_size_;285} CrunchConfig;286287// +2 because we add a palette sorting configuration for kPalette and288// kPaletteAndSpatial.289#define CRUNCH_CONFIGS_MAX (kNumEntropyIx + 2 * kPaletteSortingNum)290291static int EncoderAnalyze(VP8LEncoder* const enc,292CrunchConfig crunch_configs[CRUNCH_CONFIGS_MAX],293int* const crunch_configs_size,294int* const red_and_blue_always_zero) {295const WebPPicture* const pic = enc->pic_;296const int width = pic->width;297const int height = pic->height;298const WebPConfig* const config = enc->config_;299const int method = config->method;300const int low_effort = (config->method == 0);301int i;302int use_palette, transform_bits;303int n_lz77s;304// If set to 0, analyze the cache with the computed cache value. If 1, also305// analyze with no-cache.306int do_no_cache = 0;307assert(pic != NULL && pic->argb != NULL);308309// Check whether a palette is possible.310enc->palette_size_ = GetColorPalette(pic, enc->palette_sorted_);311use_palette = (enc->palette_size_ <= MAX_PALETTE_SIZE);312if (!use_palette) {313enc->palette_size_ = 0;314}315316// Empirical bit sizes.317enc->histo_bits_ = GetHistoBits(method, use_palette,318pic->width, pic->height);319transform_bits = GetTransformBits(method, enc->histo_bits_);320enc->predictor_transform_bits_ = transform_bits;321enc->cross_color_transform_bits_ = transform_bits;322323if (low_effort) {324// AnalyzeEntropy is somewhat slow.325crunch_configs[0].entropy_idx_ = use_palette ? kPalette : kSpatialSubGreen;326crunch_configs[0].palette_sorting_type_ =327use_palette ? kSortedDefault : kUnusedPalette;328n_lz77s = 1;329*crunch_configs_size = 1;330} else {331EntropyIx min_entropy_ix;332// Try out multiple LZ77 on images with few colors.333n_lz77s = (enc->palette_size_ > 0 && enc->palette_size_ <= 16) ? 2 : 1;334if (!AnalyzeEntropy(pic->argb, width, height, pic->argb_stride, use_palette,335enc->palette_size_, transform_bits, &min_entropy_ix,336red_and_blue_always_zero)) {337return 0;338}339if (method == 6 && config->quality == 100) {340do_no_cache = 1;341// Go brute force on all transforms.342*crunch_configs_size = 0;343for (i = 0; i < kNumEntropyIx; ++i) {344// We can only apply kPalette or kPaletteAndSpatial if we can indeed use345// a palette.346if ((i != kPalette && i != kPaletteAndSpatial) || use_palette) {347assert(*crunch_configs_size < CRUNCH_CONFIGS_MAX);348if (use_palette && (i == kPalette || i == kPaletteAndSpatial)) {349int sorting_method;350for (sorting_method = 0; sorting_method < kPaletteSortingNum;351++sorting_method) {352const PaletteSorting typed_sorting_method =353(PaletteSorting)sorting_method;354// TODO(vrabaud) kSortedDefault should be tested. It is omitted355// for now for backward compatibility.356if (typed_sorting_method == kUnusedPalette ||357typed_sorting_method == kSortedDefault) {358continue;359}360crunch_configs[(*crunch_configs_size)].entropy_idx_ = i;361crunch_configs[(*crunch_configs_size)].palette_sorting_type_ =362typed_sorting_method;363++*crunch_configs_size;364}365} else {366crunch_configs[(*crunch_configs_size)].entropy_idx_ = i;367crunch_configs[(*crunch_configs_size)].palette_sorting_type_ =368kUnusedPalette;369++*crunch_configs_size;370}371}372}373} else {374// Only choose the guessed best transform.375*crunch_configs_size = 1;376crunch_configs[0].entropy_idx_ = min_entropy_ix;377crunch_configs[0].palette_sorting_type_ =378use_palette ? kMinimizeDelta : kUnusedPalette;379if (config->quality >= 75 && method == 5) {380// Test with and without color cache.381do_no_cache = 1;382// If we have a palette, also check in combination with spatial.383if (min_entropy_ix == kPalette) {384*crunch_configs_size = 2;385crunch_configs[1].entropy_idx_ = kPaletteAndSpatial;386crunch_configs[1].palette_sorting_type_ = kMinimizeDelta;387}388}389}390}391// Fill in the different LZ77s.392assert(n_lz77s <= CRUNCH_SUBCONFIGS_MAX);393for (i = 0; i < *crunch_configs_size; ++i) {394int j;395for (j = 0; j < n_lz77s; ++j) {396assert(j < CRUNCH_SUBCONFIGS_MAX);397crunch_configs[i].sub_configs_[j].lz77_ =398(j == 0) ? kLZ77Standard | kLZ77RLE : kLZ77Box;399crunch_configs[i].sub_configs_[j].do_no_cache_ = do_no_cache;400}401crunch_configs[i].sub_configs_size_ = n_lz77s;402}403return 1;404}405406static int EncoderInit(VP8LEncoder* const enc) {407const WebPPicture* const pic = enc->pic_;408const int width = pic->width;409const int height = pic->height;410const int pix_cnt = width * height;411// we round the block size up, so we're guaranteed to have412// at most MAX_REFS_BLOCK_PER_IMAGE blocks used:413const int refs_block_size = (pix_cnt - 1) / MAX_REFS_BLOCK_PER_IMAGE + 1;414int i;415if (!VP8LHashChainInit(&enc->hash_chain_, pix_cnt)) return 0;416417for (i = 0; i < 4; ++i) VP8LBackwardRefsInit(&enc->refs_[i], refs_block_size);418419return 1;420}421422// Returns false in case of memory error.423static int GetHuffBitLengthsAndCodes(424const VP8LHistogramSet* const histogram_image,425HuffmanTreeCode* const huffman_codes) {426int i, k;427int ok = 0;428uint64_t total_length_size = 0;429uint8_t* mem_buf = NULL;430const int histogram_image_size = histogram_image->size;431int max_num_symbols = 0;432uint8_t* buf_rle = NULL;433HuffmanTree* huff_tree = NULL;434435// Iterate over all histograms and get the aggregate number of codes used.436for (i = 0; i < histogram_image_size; ++i) {437const VP8LHistogram* const histo = histogram_image->histograms[i];438HuffmanTreeCode* const codes = &huffman_codes[5 * i];439assert(histo != NULL);440for (k = 0; k < 5; ++k) {441const int num_symbols =442(k == 0) ? VP8LHistogramNumCodes(histo->palette_code_bits_) :443(k == 4) ? NUM_DISTANCE_CODES : 256;444codes[k].num_symbols = num_symbols;445total_length_size += num_symbols;446}447}448449// Allocate and Set Huffman codes.450{451uint16_t* codes;452uint8_t* lengths;453mem_buf = (uint8_t*)WebPSafeCalloc(total_length_size,454sizeof(*lengths) + sizeof(*codes));455if (mem_buf == NULL) goto End;456457codes = (uint16_t*)mem_buf;458lengths = (uint8_t*)&codes[total_length_size];459for (i = 0; i < 5 * histogram_image_size; ++i) {460const int bit_length = huffman_codes[i].num_symbols;461huffman_codes[i].codes = codes;462huffman_codes[i].code_lengths = lengths;463codes += bit_length;464lengths += bit_length;465if (max_num_symbols < bit_length) {466max_num_symbols = bit_length;467}468}469}470471buf_rle = (uint8_t*)WebPSafeMalloc(1ULL, max_num_symbols);472huff_tree = (HuffmanTree*)WebPSafeMalloc(3ULL * max_num_symbols,473sizeof(*huff_tree));474if (buf_rle == NULL || huff_tree == NULL) goto End;475476// Create Huffman trees.477for (i = 0; i < histogram_image_size; ++i) {478HuffmanTreeCode* const codes = &huffman_codes[5 * i];479VP8LHistogram* const histo = histogram_image->histograms[i];480VP8LCreateHuffmanTree(histo->literal_, 15, buf_rle, huff_tree, codes + 0);481VP8LCreateHuffmanTree(histo->red_, 15, buf_rle, huff_tree, codes + 1);482VP8LCreateHuffmanTree(histo->blue_, 15, buf_rle, huff_tree, codes + 2);483VP8LCreateHuffmanTree(histo->alpha_, 15, buf_rle, huff_tree, codes + 3);484VP8LCreateHuffmanTree(histo->distance_, 15, buf_rle, huff_tree, codes + 4);485}486ok = 1;487End:488WebPSafeFree(huff_tree);489WebPSafeFree(buf_rle);490if (!ok) {491WebPSafeFree(mem_buf);492memset(huffman_codes, 0, 5 * histogram_image_size * sizeof(*huffman_codes));493}494return ok;495}496497static void StoreHuffmanTreeOfHuffmanTreeToBitMask(498VP8LBitWriter* const bw, const uint8_t* code_length_bitdepth) {499// RFC 1951 will calm you down if you are worried about this funny sequence.500// This sequence is tuned from that, but more weighted for lower symbol count,501// and more spiking histograms.502static const uint8_t kStorageOrder[CODE_LENGTH_CODES] = {50317, 18, 0, 1, 2, 3, 4, 5, 16, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15504};505int i;506// Throw away trailing zeros:507int codes_to_store = CODE_LENGTH_CODES;508for (; codes_to_store > 4; --codes_to_store) {509if (code_length_bitdepth[kStorageOrder[codes_to_store - 1]] != 0) {510break;511}512}513VP8LPutBits(bw, codes_to_store - 4, 4);514for (i = 0; i < codes_to_store; ++i) {515VP8LPutBits(bw, code_length_bitdepth[kStorageOrder[i]], 3);516}517}518519static void ClearHuffmanTreeIfOnlyOneSymbol(520HuffmanTreeCode* const huffman_code) {521int k;522int count = 0;523for (k = 0; k < huffman_code->num_symbols; ++k) {524if (huffman_code->code_lengths[k] != 0) {525++count;526if (count > 1) return;527}528}529for (k = 0; k < huffman_code->num_symbols; ++k) {530huffman_code->code_lengths[k] = 0;531huffman_code->codes[k] = 0;532}533}534535static void StoreHuffmanTreeToBitMask(536VP8LBitWriter* const bw,537const HuffmanTreeToken* const tokens, const int num_tokens,538const HuffmanTreeCode* const huffman_code) {539int i;540for (i = 0; i < num_tokens; ++i) {541const int ix = tokens[i].code;542const int extra_bits = tokens[i].extra_bits;543VP8LPutBits(bw, huffman_code->codes[ix], huffman_code->code_lengths[ix]);544switch (ix) {545case 16:546VP8LPutBits(bw, extra_bits, 2);547break;548case 17:549VP8LPutBits(bw, extra_bits, 3);550break;551case 18:552VP8LPutBits(bw, extra_bits, 7);553break;554}555}556}557558// 'huff_tree' and 'tokens' are pre-alloacted buffers.559static void StoreFullHuffmanCode(VP8LBitWriter* const bw,560HuffmanTree* const huff_tree,561HuffmanTreeToken* const tokens,562const HuffmanTreeCode* const tree) {563uint8_t code_length_bitdepth[CODE_LENGTH_CODES] = { 0 };564uint16_t code_length_bitdepth_symbols[CODE_LENGTH_CODES] = { 0 };565const int max_tokens = tree->num_symbols;566int num_tokens;567HuffmanTreeCode huffman_code;568huffman_code.num_symbols = CODE_LENGTH_CODES;569huffman_code.code_lengths = code_length_bitdepth;570huffman_code.codes = code_length_bitdepth_symbols;571572VP8LPutBits(bw, 0, 1);573num_tokens = VP8LCreateCompressedHuffmanTree(tree, tokens, max_tokens);574{575uint32_t histogram[CODE_LENGTH_CODES] = { 0 };576uint8_t buf_rle[CODE_LENGTH_CODES] = { 0 };577int i;578for (i = 0; i < num_tokens; ++i) {579++histogram[tokens[i].code];580}581582VP8LCreateHuffmanTree(histogram, 7, buf_rle, huff_tree, &huffman_code);583}584585StoreHuffmanTreeOfHuffmanTreeToBitMask(bw, code_length_bitdepth);586ClearHuffmanTreeIfOnlyOneSymbol(&huffman_code);587{588int trailing_zero_bits = 0;589int trimmed_length = num_tokens;590int write_trimmed_length;591int length;592int i = num_tokens;593while (i-- > 0) {594const int ix = tokens[i].code;595if (ix == 0 || ix == 17 || ix == 18) {596--trimmed_length; // discount trailing zeros597trailing_zero_bits += code_length_bitdepth[ix];598if (ix == 17) {599trailing_zero_bits += 3;600} else if (ix == 18) {601trailing_zero_bits += 7;602}603} else {604break;605}606}607write_trimmed_length = (trimmed_length > 1 && trailing_zero_bits > 12);608length = write_trimmed_length ? trimmed_length : num_tokens;609VP8LPutBits(bw, write_trimmed_length, 1);610if (write_trimmed_length) {611if (trimmed_length == 2) {612VP8LPutBits(bw, 0, 3 + 2); // nbitpairs=1, trimmed_length=2613} else {614const int nbits = BitsLog2Floor(trimmed_length - 2);615const int nbitpairs = nbits / 2 + 1;616assert(trimmed_length > 2);617assert(nbitpairs - 1 < 8);618VP8LPutBits(bw, nbitpairs - 1, 3);619VP8LPutBits(bw, trimmed_length - 2, nbitpairs * 2);620}621}622StoreHuffmanTreeToBitMask(bw, tokens, length, &huffman_code);623}624}625626// 'huff_tree' and 'tokens' are pre-alloacted buffers.627static void StoreHuffmanCode(VP8LBitWriter* const bw,628HuffmanTree* const huff_tree,629HuffmanTreeToken* const tokens,630const HuffmanTreeCode* const huffman_code) {631int i;632int count = 0;633int symbols[2] = { 0, 0 };634const int kMaxBits = 8;635const int kMaxSymbol = 1 << kMaxBits;636637// Check whether it's a small tree.638for (i = 0; i < huffman_code->num_symbols && count < 3; ++i) {639if (huffman_code->code_lengths[i] != 0) {640if (count < 2) symbols[count] = i;641++count;642}643}644645if (count == 0) { // emit minimal tree for empty cases646// bits: small tree marker: 1, count-1: 0, large 8-bit code: 0, code: 0647VP8LPutBits(bw, 0x01, 4);648} else if (count <= 2 && symbols[0] < kMaxSymbol && symbols[1] < kMaxSymbol) {649VP8LPutBits(bw, 1, 1); // Small tree marker to encode 1 or 2 symbols.650VP8LPutBits(bw, count - 1, 1);651if (symbols[0] <= 1) {652VP8LPutBits(bw, 0, 1); // Code bit for small (1 bit) symbol value.653VP8LPutBits(bw, symbols[0], 1);654} else {655VP8LPutBits(bw, 1, 1);656VP8LPutBits(bw, symbols[0], 8);657}658if (count == 2) {659VP8LPutBits(bw, symbols[1], 8);660}661} else {662StoreFullHuffmanCode(bw, huff_tree, tokens, huffman_code);663}664}665666static WEBP_INLINE void WriteHuffmanCode(VP8LBitWriter* const bw,667const HuffmanTreeCode* const code,668int code_index) {669const int depth = code->code_lengths[code_index];670const int symbol = code->codes[code_index];671VP8LPutBits(bw, symbol, depth);672}673674static WEBP_INLINE void WriteHuffmanCodeWithExtraBits(675VP8LBitWriter* const bw,676const HuffmanTreeCode* const code,677int code_index,678int bits,679int n_bits) {680const int depth = code->code_lengths[code_index];681const int symbol = code->codes[code_index];682VP8LPutBits(bw, (bits << depth) | symbol, depth + n_bits);683}684685static int StoreImageToBitMask(VP8LBitWriter* const bw, int width,686int histo_bits,687const VP8LBackwardRefs* const refs,688const uint32_t* histogram_symbols,689const HuffmanTreeCode* const huffman_codes,690const WebPPicture* const pic) {691const int histo_xsize = histo_bits ? VP8LSubSampleSize(width, histo_bits) : 1;692const int tile_mask = (histo_bits == 0) ? 0 : -(1 << histo_bits);693// x and y trace the position in the image.694int x = 0;695int y = 0;696int tile_x = x & tile_mask;697int tile_y = y & tile_mask;698int histogram_ix = (histogram_symbols[0] >> 8) & 0xffff;699const HuffmanTreeCode* codes = huffman_codes + 5 * histogram_ix;700VP8LRefsCursor c = VP8LRefsCursorInit(refs);701while (VP8LRefsCursorOk(&c)) {702const PixOrCopy* const v = c.cur_pos;703if ((tile_x != (x & tile_mask)) || (tile_y != (y & tile_mask))) {704tile_x = x & tile_mask;705tile_y = y & tile_mask;706histogram_ix = (histogram_symbols[(y >> histo_bits) * histo_xsize +707(x >> histo_bits)] >>7088) &7090xffff;710codes = huffman_codes + 5 * histogram_ix;711}712if (PixOrCopyIsLiteral(v)) {713static const uint8_t order[] = { 1, 2, 0, 3 };714int k;715for (k = 0; k < 4; ++k) {716const int code = PixOrCopyLiteral(v, order[k]);717WriteHuffmanCode(bw, codes + k, code);718}719} else if (PixOrCopyIsCacheIdx(v)) {720const int code = PixOrCopyCacheIdx(v);721const int literal_ix = 256 + NUM_LENGTH_CODES + code;722WriteHuffmanCode(bw, codes, literal_ix);723} else {724int bits, n_bits;725int code;726727const int distance = PixOrCopyDistance(v);728VP8LPrefixEncode(v->len, &code, &n_bits, &bits);729WriteHuffmanCodeWithExtraBits(bw, codes, 256 + code, bits, n_bits);730731// Don't write the distance with the extra bits code since732// the distance can be up to 18 bits of extra bits, and the prefix733// 15 bits, totaling to 33, and our PutBits only supports up to 32 bits.734VP8LPrefixEncode(distance, &code, &n_bits, &bits);735WriteHuffmanCode(bw, codes + 4, code);736VP8LPutBits(bw, bits, n_bits);737}738x += PixOrCopyLength(v);739while (x >= width) {740x -= width;741++y;742}743VP8LRefsCursorNext(&c);744}745if (bw->error_) {746return WebPEncodingSetError(pic, VP8_ENC_ERROR_OUT_OF_MEMORY);747}748return 1;749}750751// Special case of EncodeImageInternal() for cache-bits=0, histo_bits=31.752// pic and percent are for progress.753static int EncodeImageNoHuffman(VP8LBitWriter* const bw,754const uint32_t* const argb,755VP8LHashChain* const hash_chain,756VP8LBackwardRefs* const refs_array, int width,757int height, int quality, int low_effort,758const WebPPicture* const pic, int percent_range,759int* const percent) {760int i;761int max_tokens = 0;762VP8LBackwardRefs* refs;763HuffmanTreeToken* tokens = NULL;764HuffmanTreeCode huffman_codes[5] = {{0, NULL, NULL}};765const uint32_t histogram_symbols[1] = {0}; // only one tree, one symbol766int cache_bits = 0;767VP8LHistogramSet* histogram_image = NULL;768HuffmanTree* const huff_tree = (HuffmanTree*)WebPSafeMalloc(7693ULL * CODE_LENGTH_CODES, sizeof(*huff_tree));770if (huff_tree == NULL) {771WebPEncodingSetError(pic, VP8_ENC_ERROR_OUT_OF_MEMORY);772goto Error;773}774775// Calculate backward references from ARGB image.776if (!VP8LHashChainFill(hash_chain, quality, argb, width, height, low_effort,777pic, percent_range / 2, percent)) {778goto Error;779}780if (!VP8LGetBackwardReferences(width, height, argb, quality, /*low_effort=*/0,781kLZ77Standard | kLZ77RLE, cache_bits,782/*do_no_cache=*/0, hash_chain, refs_array,783&cache_bits, pic,784percent_range - percent_range / 2, percent)) {785goto Error;786}787refs = &refs_array[0];788histogram_image = VP8LAllocateHistogramSet(1, cache_bits);789if (histogram_image == NULL) {790WebPEncodingSetError(pic, VP8_ENC_ERROR_OUT_OF_MEMORY);791goto Error;792}793VP8LHistogramSetClear(histogram_image);794795// Build histogram image and symbols from backward references.796VP8LHistogramStoreRefs(refs, histogram_image->histograms[0]);797798// Create Huffman bit lengths and codes for each histogram image.799assert(histogram_image->size == 1);800if (!GetHuffBitLengthsAndCodes(histogram_image, huffman_codes)) {801WebPEncodingSetError(pic, VP8_ENC_ERROR_OUT_OF_MEMORY);802goto Error;803}804805// No color cache, no Huffman image.806VP8LPutBits(bw, 0, 1);807808// Find maximum number of symbols for the huffman tree-set.809for (i = 0; i < 5; ++i) {810HuffmanTreeCode* const codes = &huffman_codes[i];811if (max_tokens < codes->num_symbols) {812max_tokens = codes->num_symbols;813}814}815816tokens = (HuffmanTreeToken*)WebPSafeMalloc(max_tokens, sizeof(*tokens));817if (tokens == NULL) {818WebPEncodingSetError(pic, VP8_ENC_ERROR_OUT_OF_MEMORY);819goto Error;820}821822// Store Huffman codes.823for (i = 0; i < 5; ++i) {824HuffmanTreeCode* const codes = &huffman_codes[i];825StoreHuffmanCode(bw, huff_tree, tokens, codes);826ClearHuffmanTreeIfOnlyOneSymbol(codes);827}828829// Store actual literals.830if (!StoreImageToBitMask(bw, width, 0, refs, histogram_symbols, huffman_codes,831pic)) {832goto Error;833}834835Error:836WebPSafeFree(tokens);837WebPSafeFree(huff_tree);838VP8LFreeHistogramSet(histogram_image);839WebPSafeFree(huffman_codes[0].codes);840return (pic->error_code == VP8_ENC_OK);841}842843// pic and percent are for progress.844static int EncodeImageInternal(845VP8LBitWriter* const bw, const uint32_t* const argb,846VP8LHashChain* const hash_chain, VP8LBackwardRefs refs_array[4], int width,847int height, int quality, int low_effort, const CrunchConfig* const config,848int* cache_bits, int histogram_bits_in, size_t init_byte_position,849int* const hdr_size, int* const data_size, const WebPPicture* const pic,850int percent_range, int* const percent) {851const uint32_t histogram_image_xysize =852VP8LSubSampleSize(width, histogram_bits_in) *853VP8LSubSampleSize(height, histogram_bits_in);854int remaining_percent = percent_range;855int percent_start = *percent;856VP8LHistogramSet* histogram_image = NULL;857VP8LHistogram* tmp_histo = NULL;858uint32_t i, histogram_image_size = 0;859size_t bit_array_size = 0;860HuffmanTree* const huff_tree = (HuffmanTree*)WebPSafeMalloc(8613ULL * CODE_LENGTH_CODES, sizeof(*huff_tree));862HuffmanTreeToken* tokens = NULL;863HuffmanTreeCode* huffman_codes = NULL;864uint32_t* const histogram_argb = (uint32_t*)WebPSafeMalloc(865histogram_image_xysize, sizeof(*histogram_argb));866int sub_configs_idx;867int cache_bits_init, write_histogram_image;868VP8LBitWriter bw_init = *bw, bw_best;869int hdr_size_tmp;870VP8LHashChain hash_chain_histogram; // histogram image hash chain871size_t bw_size_best = ~(size_t)0;872assert(histogram_bits_in >= MIN_HUFFMAN_BITS);873assert(histogram_bits_in <= MAX_HUFFMAN_BITS);874assert(hdr_size != NULL);875assert(data_size != NULL);876877memset(&hash_chain_histogram, 0, sizeof(hash_chain_histogram));878if (!VP8LBitWriterInit(&bw_best, 0)) {879WebPEncodingSetError(pic, VP8_ENC_ERROR_OUT_OF_MEMORY);880goto Error;881}882883// Make sure we can allocate the different objects.884if (huff_tree == NULL || histogram_argb == NULL ||885!VP8LHashChainInit(&hash_chain_histogram, histogram_image_xysize)) {886WebPEncodingSetError(pic, VP8_ENC_ERROR_OUT_OF_MEMORY);887goto Error;888}889890percent_range = remaining_percent / 5;891if (!VP8LHashChainFill(hash_chain, quality, argb, width, height,892low_effort, pic, percent_range, percent)) {893goto Error;894}895percent_start += percent_range;896remaining_percent -= percent_range;897898// If the value is different from zero, it has been set during the palette899// analysis.900cache_bits_init = (*cache_bits == 0) ? MAX_COLOR_CACHE_BITS : *cache_bits;901// If several iterations will happen, clone into bw_best.902if ((config->sub_configs_size_ > 1 || config->sub_configs_[0].do_no_cache_) &&903!VP8LBitWriterClone(bw, &bw_best)) {904WebPEncodingSetError(pic, VP8_ENC_ERROR_OUT_OF_MEMORY);905goto Error;906}907908for (sub_configs_idx = 0; sub_configs_idx < config->sub_configs_size_;909++sub_configs_idx) {910const CrunchSubConfig* const sub_config =911&config->sub_configs_[sub_configs_idx];912int cache_bits_best, i_cache;913int i_remaining_percent = remaining_percent / config->sub_configs_size_;914int i_percent_range = i_remaining_percent / 4;915i_remaining_percent -= i_percent_range;916917if (!VP8LGetBackwardReferences(918width, height, argb, quality, low_effort, sub_config->lz77_,919cache_bits_init, sub_config->do_no_cache_, hash_chain,920&refs_array[0], &cache_bits_best, pic, i_percent_range, percent)) {921goto Error;922}923924for (i_cache = 0; i_cache < (sub_config->do_no_cache_ ? 2 : 1); ++i_cache) {925const int cache_bits_tmp = (i_cache == 0) ? cache_bits_best : 0;926int histogram_bits = histogram_bits_in;927// Speed-up: no need to study the no-cache case if it was already studied928// in i_cache == 0.929if (i_cache == 1 && cache_bits_best == 0) break;930931// Reset the bit writer for this iteration.932VP8LBitWriterReset(&bw_init, bw);933934// Build histogram image and symbols from backward references.935histogram_image =936VP8LAllocateHistogramSet(histogram_image_xysize, cache_bits_tmp);937tmp_histo = VP8LAllocateHistogram(cache_bits_tmp);938if (histogram_image == NULL || tmp_histo == NULL) {939WebPEncodingSetError(pic, VP8_ENC_ERROR_OUT_OF_MEMORY);940goto Error;941}942943i_percent_range = i_remaining_percent / 3;944i_remaining_percent -= i_percent_range;945if (!VP8LGetHistoImageSymbols(946width, height, &refs_array[i_cache], quality, low_effort,947histogram_bits, cache_bits_tmp, histogram_image, tmp_histo,948histogram_argb, pic, i_percent_range, percent)) {949goto Error;950}951// Create Huffman bit lengths and codes for each histogram image.952histogram_image_size = histogram_image->size;953bit_array_size = 5 * histogram_image_size;954huffman_codes = (HuffmanTreeCode*)WebPSafeCalloc(bit_array_size,955sizeof(*huffman_codes));956// Note: some histogram_image entries may point to tmp_histos[], so the957// latter need to outlive the following call to958// GetHuffBitLengthsAndCodes().959if (huffman_codes == NULL ||960!GetHuffBitLengthsAndCodes(histogram_image, huffman_codes)) {961WebPEncodingSetError(pic, VP8_ENC_ERROR_OUT_OF_MEMORY);962goto Error;963}964// Free combined histograms.965VP8LFreeHistogramSet(histogram_image);966histogram_image = NULL;967968// Free scratch histograms.969VP8LFreeHistogram(tmp_histo);970tmp_histo = NULL;971972// Color Cache parameters.973if (cache_bits_tmp > 0) {974VP8LPutBits(bw, 1, 1);975VP8LPutBits(bw, cache_bits_tmp, 4);976} else {977VP8LPutBits(bw, 0, 1);978}979980// Huffman image + meta huffman.981histogram_image_size = 0;982for (i = 0; i < histogram_image_xysize; ++i) {983if (histogram_argb[i] >= histogram_image_size) {984histogram_image_size = histogram_argb[i] + 1;985}986histogram_argb[i] <<= 8;987}988989write_histogram_image = (histogram_image_size > 1);990VP8LPutBits(bw, write_histogram_image, 1);991if (write_histogram_image) {992VP8LOptimizeSampling(histogram_argb, width, height, histogram_bits_in,993MAX_HUFFMAN_BITS, &histogram_bits);994VP8LPutBits(bw, histogram_bits - 2, 3);995i_percent_range = i_remaining_percent / 2;996i_remaining_percent -= i_percent_range;997if (!EncodeImageNoHuffman(998bw, histogram_argb, &hash_chain_histogram, &refs_array[2],999VP8LSubSampleSize(width, histogram_bits),1000VP8LSubSampleSize(height, histogram_bits), quality, low_effort,1001pic, i_percent_range, percent)) {1002goto Error;1003}1004}10051006// Store Huffman codes.1007{1008int max_tokens = 0;1009// Find maximum number of symbols for the huffman tree-set.1010for (i = 0; i < 5 * histogram_image_size; ++i) {1011HuffmanTreeCode* const codes = &huffman_codes[i];1012if (max_tokens < codes->num_symbols) {1013max_tokens = codes->num_symbols;1014}1015}1016tokens = (HuffmanTreeToken*)WebPSafeMalloc(max_tokens, sizeof(*tokens));1017if (tokens == NULL) {1018WebPEncodingSetError(pic, VP8_ENC_ERROR_OUT_OF_MEMORY);1019goto Error;1020}1021for (i = 0; i < 5 * histogram_image_size; ++i) {1022HuffmanTreeCode* const codes = &huffman_codes[i];1023StoreHuffmanCode(bw, huff_tree, tokens, codes);1024ClearHuffmanTreeIfOnlyOneSymbol(codes);1025}1026}1027// Store actual literals.1028hdr_size_tmp = (int)(VP8LBitWriterNumBytes(bw) - init_byte_position);1029if (!StoreImageToBitMask(bw, width, histogram_bits, &refs_array[i_cache],1030histogram_argb, huffman_codes, pic)) {1031goto Error;1032}1033// Keep track of the smallest image so far.1034if (VP8LBitWriterNumBytes(bw) < bw_size_best) {1035bw_size_best = VP8LBitWriterNumBytes(bw);1036*cache_bits = cache_bits_tmp;1037*hdr_size = hdr_size_tmp;1038*data_size =1039(int)(VP8LBitWriterNumBytes(bw) - init_byte_position - *hdr_size);1040VP8LBitWriterSwap(bw, &bw_best);1041}1042WebPSafeFree(tokens);1043tokens = NULL;1044if (huffman_codes != NULL) {1045WebPSafeFree(huffman_codes->codes);1046WebPSafeFree(huffman_codes);1047huffman_codes = NULL;1048}1049}1050}1051VP8LBitWriterSwap(bw, &bw_best);10521053if (!WebPReportProgress(pic, percent_start + remaining_percent, percent)) {1054goto Error;1055}10561057Error:1058WebPSafeFree(tokens);1059WebPSafeFree(huff_tree);1060VP8LFreeHistogramSet(histogram_image);1061VP8LFreeHistogram(tmp_histo);1062VP8LHashChainClear(&hash_chain_histogram);1063if (huffman_codes != NULL) {1064WebPSafeFree(huffman_codes->codes);1065WebPSafeFree(huffman_codes);1066}1067WebPSafeFree(histogram_argb);1068VP8LBitWriterWipeOut(&bw_best);1069return (pic->error_code == VP8_ENC_OK);1070}10711072// -----------------------------------------------------------------------------1073// Transforms10741075static void ApplySubtractGreen(VP8LEncoder* const enc, int width, int height,1076VP8LBitWriter* const bw) {1077VP8LPutBits(bw, TRANSFORM_PRESENT, 1);1078VP8LPutBits(bw, SUBTRACT_GREEN_TRANSFORM, 2);1079VP8LSubtractGreenFromBlueAndRed(enc->argb_, width * height);1080}10811082static int ApplyPredictFilter(VP8LEncoder* const enc, int width, int height,1083int quality, int low_effort,1084int used_subtract_green, VP8LBitWriter* const bw,1085int percent_range, int* const percent) {1086int best_bits;1087const int near_lossless_strength =1088enc->use_palette_ ? 100 : enc->config_->near_lossless;1089const int max_bits = ClampBits(width, height, enc->predictor_transform_bits_,1090MIN_TRANSFORM_BITS, MAX_TRANSFORM_BITS,1091MAX_PREDICTOR_IMAGE_SIZE);1092const int min_bits = ClampBits(1093width, height,1094max_bits - 2 * (enc->config_->method > 4 ? enc->config_->method - 4 : 0),1095MIN_TRANSFORM_BITS, MAX_TRANSFORM_BITS, MAX_PREDICTOR_IMAGE_SIZE);10961097if (!VP8LResidualImage(width, height, min_bits, max_bits, low_effort,1098enc->argb_, enc->argb_scratch_, enc->transform_data_,1099near_lossless_strength, enc->config_->exact,1100used_subtract_green, enc->pic_, percent_range / 2,1101percent, &best_bits)) {1102return 0;1103}1104VP8LPutBits(bw, TRANSFORM_PRESENT, 1);1105VP8LPutBits(bw, PREDICTOR_TRANSFORM, 2);1106assert(best_bits >= MIN_TRANSFORM_BITS && best_bits <= MAX_TRANSFORM_BITS);1107VP8LPutBits(bw, best_bits - MIN_TRANSFORM_BITS, NUM_TRANSFORM_BITS);1108enc->predictor_transform_bits_ = best_bits;1109return EncodeImageNoHuffman(1110bw, enc->transform_data_, &enc->hash_chain_, &enc->refs_[0],1111VP8LSubSampleSize(width, best_bits), VP8LSubSampleSize(height, best_bits),1112quality, low_effort, enc->pic_, percent_range - percent_range / 2,1113percent);1114}11151116static int ApplyCrossColorFilter(VP8LEncoder* const enc, int width, int height,1117int quality, int low_effort,1118VP8LBitWriter* const bw, int percent_range,1119int* const percent) {1120const int min_bits = enc->cross_color_transform_bits_;1121int best_bits;11221123if (!VP8LColorSpaceTransform(width, height, min_bits, quality, enc->argb_,1124enc->transform_data_, enc->pic_,1125percent_range / 2, percent, &best_bits)) {1126return 0;1127}1128VP8LPutBits(bw, TRANSFORM_PRESENT, 1);1129VP8LPutBits(bw, CROSS_COLOR_TRANSFORM, 2);1130assert(best_bits >= MIN_TRANSFORM_BITS && best_bits <= MAX_TRANSFORM_BITS);1131VP8LPutBits(bw, best_bits - MIN_TRANSFORM_BITS, NUM_TRANSFORM_BITS);1132enc->cross_color_transform_bits_ = best_bits;1133return EncodeImageNoHuffman(1134bw, enc->transform_data_, &enc->hash_chain_, &enc->refs_[0],1135VP8LSubSampleSize(width, best_bits), VP8LSubSampleSize(height, best_bits),1136quality, low_effort, enc->pic_, percent_range - percent_range / 2,1137percent);1138}11391140// -----------------------------------------------------------------------------11411142static int WriteRiffHeader(const WebPPicture* const pic, size_t riff_size,1143size_t vp8l_size) {1144uint8_t riff[RIFF_HEADER_SIZE + CHUNK_HEADER_SIZE + VP8L_SIGNATURE_SIZE] = {1145'R', 'I', 'F', 'F', 0, 0, 0, 0, 'W', 'E', 'B', 'P',1146'V', 'P', '8', 'L', 0, 0, 0, 0, VP8L_MAGIC_BYTE,1147};1148PutLE32(riff + TAG_SIZE, (uint32_t)riff_size);1149PutLE32(riff + RIFF_HEADER_SIZE + TAG_SIZE, (uint32_t)vp8l_size);1150return pic->writer(riff, sizeof(riff), pic);1151}11521153static int WriteImageSize(const WebPPicture* const pic,1154VP8LBitWriter* const bw) {1155const int width = pic->width - 1;1156const int height = pic->height - 1;1157assert(width < WEBP_MAX_DIMENSION && height < WEBP_MAX_DIMENSION);11581159VP8LPutBits(bw, width, VP8L_IMAGE_SIZE_BITS);1160VP8LPutBits(bw, height, VP8L_IMAGE_SIZE_BITS);1161return !bw->error_;1162}11631164static int WriteRealAlphaAndVersion(VP8LBitWriter* const bw, int has_alpha) {1165VP8LPutBits(bw, has_alpha, 1);1166VP8LPutBits(bw, VP8L_VERSION, VP8L_VERSION_BITS);1167return !bw->error_;1168}11691170static int WriteImage(const WebPPicture* const pic, VP8LBitWriter* const bw,1171size_t* const coded_size) {1172const uint8_t* const webpll_data = VP8LBitWriterFinish(bw);1173const size_t webpll_size = VP8LBitWriterNumBytes(bw);1174const size_t vp8l_size = VP8L_SIGNATURE_SIZE + webpll_size;1175const size_t pad = vp8l_size & 1;1176const size_t riff_size = TAG_SIZE + CHUNK_HEADER_SIZE + vp8l_size + pad;1177*coded_size = 0;11781179if (bw->error_) {1180return WebPEncodingSetError(pic, VP8_ENC_ERROR_OUT_OF_MEMORY);1181}11821183if (!WriteRiffHeader(pic, riff_size, vp8l_size) ||1184!pic->writer(webpll_data, webpll_size, pic)) {1185return WebPEncodingSetError(pic, VP8_ENC_ERROR_BAD_WRITE);1186}11871188if (pad) {1189const uint8_t pad_byte[1] = { 0 };1190if (!pic->writer(pad_byte, 1, pic)) {1191return WebPEncodingSetError(pic, VP8_ENC_ERROR_BAD_WRITE);1192}1193}1194*coded_size = CHUNK_HEADER_SIZE + riff_size;1195return 1;1196}11971198// -----------------------------------------------------------------------------11991200static void ClearTransformBuffer(VP8LEncoder* const enc) {1201WebPSafeFree(enc->transform_mem_);1202enc->transform_mem_ = NULL;1203enc->transform_mem_size_ = 0;1204}12051206// Allocates the memory for argb (W x H) buffer, 2 rows of context for1207// prediction and transform data.1208// Flags influencing the memory allocated:1209// enc->transform_bits_1210// enc->use_predict_, enc->use_cross_color_1211static int AllocateTransformBuffer(VP8LEncoder* const enc, int width,1212int height) {1213const uint64_t image_size = (uint64_t)width * height;1214// VP8LResidualImage needs room for 2 scanlines of uint32 pixels with an extra1215// pixel in each, plus 2 regular scanlines of bytes.1216// TODO(skal): Clean up by using arithmetic in bytes instead of words.1217const uint64_t argb_scratch_size =1218enc->use_predict_ ? (width + 1) * 2 + (width * 2 + sizeof(uint32_t) - 1) /1219sizeof(uint32_t)1220: 0;1221const uint64_t transform_data_size =1222(enc->use_predict_ || enc->use_cross_color_)1223? (uint64_t)VP8LSubSampleSize(width, MIN_TRANSFORM_BITS) *1224VP8LSubSampleSize(height, MIN_TRANSFORM_BITS)1225: 0;1226const uint64_t max_alignment_in_words =1227(WEBP_ALIGN_CST + sizeof(uint32_t) - 1) / sizeof(uint32_t);1228const uint64_t mem_size = image_size + max_alignment_in_words +1229argb_scratch_size + max_alignment_in_words +1230transform_data_size;1231uint32_t* mem = enc->transform_mem_;1232if (mem == NULL || mem_size > enc->transform_mem_size_) {1233ClearTransformBuffer(enc);1234mem = (uint32_t*)WebPSafeMalloc(mem_size, sizeof(*mem));1235if (mem == NULL) {1236return WebPEncodingSetError(enc->pic_, VP8_ENC_ERROR_OUT_OF_MEMORY);1237}1238enc->transform_mem_ = mem;1239enc->transform_mem_size_ = (size_t)mem_size;1240enc->argb_content_ = kEncoderNone;1241}1242enc->argb_ = mem;1243mem = (uint32_t*)WEBP_ALIGN(mem + image_size);1244enc->argb_scratch_ = mem;1245mem = (uint32_t*)WEBP_ALIGN(mem + argb_scratch_size);1246enc->transform_data_ = mem;12471248enc->current_width_ = width;1249return 1;1250}12511252static int MakeInputImageCopy(VP8LEncoder* const enc) {1253const WebPPicture* const picture = enc->pic_;1254const int width = picture->width;1255const int height = picture->height;12561257if (!AllocateTransformBuffer(enc, width, height)) return 0;1258if (enc->argb_content_ == kEncoderARGB) return 1;12591260{1261uint32_t* dst = enc->argb_;1262const uint32_t* src = picture->argb;1263int y;1264for (y = 0; y < height; ++y) {1265memcpy(dst, src, width * sizeof(*dst));1266dst += width;1267src += picture->argb_stride;1268}1269}1270enc->argb_content_ = kEncoderARGB;1271assert(enc->current_width_ == width);1272return 1;1273}12741275// -----------------------------------------------------------------------------12761277#define APPLY_PALETTE_GREEDY_MAX 412781279static WEBP_INLINE uint32_t SearchColorGreedy(const uint32_t palette[],1280int palette_size,1281uint32_t color) {1282(void)palette_size;1283assert(palette_size < APPLY_PALETTE_GREEDY_MAX);1284assert(3 == APPLY_PALETTE_GREEDY_MAX - 1);1285if (color == palette[0]) return 0;1286if (color == palette[1]) return 1;1287if (color == palette[2]) return 2;1288return 3;1289}12901291static WEBP_INLINE uint32_t ApplyPaletteHash0(uint32_t color) {1292// Focus on the green color.1293return (color >> 8) & 0xff;1294}12951296#define PALETTE_INV_SIZE_BITS 111297#define PALETTE_INV_SIZE (1 << PALETTE_INV_SIZE_BITS)12981299static WEBP_INLINE uint32_t ApplyPaletteHash1(uint32_t color) {1300// Forget about alpha.1301return ((uint32_t)((color & 0x00ffffffu) * 4222244071ull)) >>1302(32 - PALETTE_INV_SIZE_BITS);1303}13041305static WEBP_INLINE uint32_t ApplyPaletteHash2(uint32_t color) {1306// Forget about alpha.1307return ((uint32_t)((color & 0x00ffffffu) * ((1ull << 31) - 1))) >>1308(32 - PALETTE_INV_SIZE_BITS);1309}13101311// Use 1 pixel cache for ARGB pixels.1312#define APPLY_PALETTE_FOR(COLOR_INDEX) do { \1313uint32_t prev_pix = palette[0]; \1314uint32_t prev_idx = 0; \1315for (y = 0; y < height; ++y) { \1316for (x = 0; x < width; ++x) { \1317const uint32_t pix = src[x]; \1318if (pix != prev_pix) { \1319prev_idx = COLOR_INDEX; \1320prev_pix = pix; \1321} \1322tmp_row[x] = prev_idx; \1323} \1324VP8LBundleColorMap(tmp_row, width, xbits, dst); \1325src += src_stride; \1326dst += dst_stride; \1327} \1328} while (0)13291330// Remap argb values in src[] to packed palettes entries in dst[]1331// using 'row' as a temporary buffer of size 'width'.1332// We assume that all src[] values have a corresponding entry in the palette.1333// Note: src[] can be the same as dst[]1334static int ApplyPalette(const uint32_t* src, uint32_t src_stride, uint32_t* dst,1335uint32_t dst_stride, const uint32_t* palette,1336int palette_size, int width, int height, int xbits,1337const WebPPicture* const pic) {1338// TODO(skal): this tmp buffer is not needed if VP8LBundleColorMap() can be1339// made to work in-place.1340uint8_t* const tmp_row = (uint8_t*)WebPSafeMalloc(width, sizeof(*tmp_row));1341int x, y;13421343if (tmp_row == NULL) {1344return WebPEncodingSetError(pic, VP8_ENC_ERROR_OUT_OF_MEMORY);1345}13461347if (palette_size < APPLY_PALETTE_GREEDY_MAX) {1348APPLY_PALETTE_FOR(SearchColorGreedy(palette, palette_size, pix));1349} else {1350int i, j;1351uint16_t buffer[PALETTE_INV_SIZE];1352uint32_t (*const hash_functions[])(uint32_t) = {1353ApplyPaletteHash0, ApplyPaletteHash1, ApplyPaletteHash21354};13551356// Try to find a perfect hash function able to go from a color to an index1357// within 1 << PALETTE_INV_SIZE_BITS in order to build a hash map to go1358// from color to index in palette.1359for (i = 0; i < 3; ++i) {1360int use_LUT = 1;1361// Set each element in buffer to max uint16_t.1362memset(buffer, 0xff, sizeof(buffer));1363for (j = 0; j < palette_size; ++j) {1364const uint32_t ind = hash_functions[i](palette[j]);1365if (buffer[ind] != 0xffffu) {1366use_LUT = 0;1367break;1368} else {1369buffer[ind] = j;1370}1371}1372if (use_LUT) break;1373}13741375if (i == 0) {1376APPLY_PALETTE_FOR(buffer[ApplyPaletteHash0(pix)]);1377} else if (i == 1) {1378APPLY_PALETTE_FOR(buffer[ApplyPaletteHash1(pix)]);1379} else if (i == 2) {1380APPLY_PALETTE_FOR(buffer[ApplyPaletteHash2(pix)]);1381} else {1382uint32_t idx_map[MAX_PALETTE_SIZE];1383uint32_t palette_sorted[MAX_PALETTE_SIZE];1384PrepareMapToPalette(palette, palette_size, palette_sorted, idx_map);1385APPLY_PALETTE_FOR(1386idx_map[SearchColorNoIdx(palette_sorted, pix, palette_size)]);1387}1388}1389WebPSafeFree(tmp_row);1390return 1;1391}1392#undef APPLY_PALETTE_FOR1393#undef PALETTE_INV_SIZE_BITS1394#undef PALETTE_INV_SIZE1395#undef APPLY_PALETTE_GREEDY_MAX13961397// Note: Expects "enc->palette_" to be set properly.1398static int MapImageFromPalette(VP8LEncoder* const enc) {1399const WebPPicture* const pic = enc->pic_;1400const int width = pic->width;1401const int height = pic->height;1402const uint32_t* const palette = enc->palette_;1403const int palette_size = enc->palette_size_;1404int xbits;14051406// Replace each input pixel by corresponding palette index.1407// This is done line by line.1408if (palette_size <= 4) {1409xbits = (palette_size <= 2) ? 3 : 2;1410} else {1411xbits = (palette_size <= 16) ? 1 : 0;1412}14131414if (!AllocateTransformBuffer(enc, VP8LSubSampleSize(width, xbits), height)) {1415return 0;1416}1417if (!ApplyPalette(pic->argb, pic->argb_stride, enc->argb_,1418enc->current_width_, palette, palette_size, width, height,1419xbits, pic)) {1420return 0;1421}1422enc->argb_content_ = kEncoderPalette;1423return 1;1424}14251426// Save palette_[] to bitstream.1427static int EncodePalette(VP8LBitWriter* const bw, int low_effort,1428VP8LEncoder* const enc, int percent_range,1429int* const percent) {1430int i;1431uint32_t tmp_palette[MAX_PALETTE_SIZE];1432const int palette_size = enc->palette_size_;1433const uint32_t* const palette = enc->palette_;1434// If the last element is 0, do not store it and count on automatic palette1435// 0-filling. This can only happen if there is no pixel packing, hence if1436// there are strictly more than 16 colors (after 0 is removed).1437const uint32_t encoded_palette_size =1438(enc->palette_[palette_size - 1] == 0 && palette_size > 17)1439? palette_size - 11440: palette_size;1441VP8LPutBits(bw, TRANSFORM_PRESENT, 1);1442VP8LPutBits(bw, COLOR_INDEXING_TRANSFORM, 2);1443assert(palette_size >= 1 && palette_size <= MAX_PALETTE_SIZE);1444VP8LPutBits(bw, encoded_palette_size - 1, 8);1445for (i = encoded_palette_size - 1; i >= 1; --i) {1446tmp_palette[i] = VP8LSubPixels(palette[i], palette[i - 1]);1447}1448tmp_palette[0] = palette[0];1449return EncodeImageNoHuffman(1450bw, tmp_palette, &enc->hash_chain_, &enc->refs_[0], encoded_palette_size,14511, /*quality=*/20, low_effort, enc->pic_, percent_range, percent);1452}14531454// -----------------------------------------------------------------------------1455// VP8LEncoder14561457static VP8LEncoder* VP8LEncoderNew(const WebPConfig* const config,1458const WebPPicture* const picture) {1459VP8LEncoder* const enc = (VP8LEncoder*)WebPSafeCalloc(1ULL, sizeof(*enc));1460if (enc == NULL) {1461WebPEncodingSetError(picture, VP8_ENC_ERROR_OUT_OF_MEMORY);1462return NULL;1463}1464enc->config_ = config;1465enc->pic_ = picture;1466enc->argb_content_ = kEncoderNone;14671468VP8LEncDspInit();14691470return enc;1471}14721473static void VP8LEncoderDelete(VP8LEncoder* enc) {1474if (enc != NULL) {1475int i;1476VP8LHashChainClear(&enc->hash_chain_);1477for (i = 0; i < 4; ++i) VP8LBackwardRefsClear(&enc->refs_[i]);1478ClearTransformBuffer(enc);1479WebPSafeFree(enc);1480}1481}14821483// -----------------------------------------------------------------------------1484// Main call14851486typedef struct {1487const WebPConfig* config_;1488const WebPPicture* picture_;1489VP8LBitWriter* bw_;1490VP8LEncoder* enc_;1491CrunchConfig crunch_configs_[CRUNCH_CONFIGS_MAX];1492int num_crunch_configs_;1493int red_and_blue_always_zero_;1494WebPAuxStats* stats_;1495} StreamEncodeContext;14961497static int EncodeStreamHook(void* input, void* data2) {1498StreamEncodeContext* const params = (StreamEncodeContext*)input;1499const WebPConfig* const config = params->config_;1500const WebPPicture* const picture = params->picture_;1501VP8LBitWriter* const bw = params->bw_;1502VP8LEncoder* const enc = params->enc_;1503const CrunchConfig* const crunch_configs = params->crunch_configs_;1504const int num_crunch_configs = params->num_crunch_configs_;1505const int red_and_blue_always_zero = params->red_and_blue_always_zero_;1506#if !defined(WEBP_DISABLE_STATS)1507WebPAuxStats* const stats = params->stats_;1508#endif1509const int quality = (int)config->quality;1510const int low_effort = (config->method == 0);1511#if (WEBP_NEAR_LOSSLESS == 1)1512const int width = picture->width;1513#endif1514const int height = picture->height;1515const size_t byte_position = VP8LBitWriterNumBytes(bw);1516int percent = 2; // for WebPProgressHook1517#if (WEBP_NEAR_LOSSLESS == 1)1518int use_near_lossless = 0;1519#endif1520int hdr_size = 0;1521int data_size = 0;1522int idx;1523size_t best_size = ~(size_t)0;1524VP8LBitWriter bw_init = *bw, bw_best;1525(void)data2;15261527if (!VP8LBitWriterInit(&bw_best, 0) ||1528(num_crunch_configs > 1 && !VP8LBitWriterClone(bw, &bw_best))) {1529WebPEncodingSetError(picture, VP8_ENC_ERROR_OUT_OF_MEMORY);1530goto Error;1531}15321533for (idx = 0; idx < num_crunch_configs; ++idx) {1534const int entropy_idx = crunch_configs[idx].entropy_idx_;1535int remaining_percent = 97 / num_crunch_configs, percent_range;1536enc->use_palette_ =1537(entropy_idx == kPalette) || (entropy_idx == kPaletteAndSpatial);1538enc->use_subtract_green_ =1539(entropy_idx == kSubGreen) || (entropy_idx == kSpatialSubGreen);1540enc->use_predict_ = (entropy_idx == kSpatial) ||1541(entropy_idx == kSpatialSubGreen) ||1542(entropy_idx == kPaletteAndSpatial);1543// When using a palette, R/B==0, hence no need to test for cross-color.1544if (low_effort || enc->use_palette_) {1545enc->use_cross_color_ = 0;1546} else {1547enc->use_cross_color_ = red_and_blue_always_zero ? 0 : enc->use_predict_;1548}1549// Reset any parameter in the encoder that is set in the previous iteration.1550enc->cache_bits_ = 0;1551VP8LBackwardRefsClear(&enc->refs_[0]);1552VP8LBackwardRefsClear(&enc->refs_[1]);15531554#if (WEBP_NEAR_LOSSLESS == 1)1555// Apply near-lossless preprocessing.1556use_near_lossless = (config->near_lossless < 100) && !enc->use_palette_ &&1557!enc->use_predict_;1558if (use_near_lossless) {1559if (!AllocateTransformBuffer(enc, width, height)) goto Error;1560if ((enc->argb_content_ != kEncoderNearLossless) &&1561!VP8ApplyNearLossless(picture, config->near_lossless, enc->argb_)) {1562WebPEncodingSetError(picture, VP8_ENC_ERROR_OUT_OF_MEMORY);1563goto Error;1564}1565enc->argb_content_ = kEncoderNearLossless;1566} else {1567enc->argb_content_ = kEncoderNone;1568}1569#else1570enc->argb_content_ = kEncoderNone;1571#endif15721573// Encode palette1574if (enc->use_palette_) {1575if (!PaletteSort(crunch_configs[idx].palette_sorting_type_, enc->pic_,1576enc->palette_sorted_, enc->palette_size_,1577enc->palette_)) {1578WebPEncodingSetError(enc->pic_, VP8_ENC_ERROR_OUT_OF_MEMORY);1579goto Error;1580}1581percent_range = remaining_percent / 4;1582if (!EncodePalette(bw, low_effort, enc, percent_range, &percent)) {1583goto Error;1584}1585remaining_percent -= percent_range;1586if (!MapImageFromPalette(enc)) goto Error;1587// If using a color cache, do not have it bigger than the number of1588// colors.1589if (enc->palette_size_ < (1 << MAX_COLOR_CACHE_BITS)) {1590enc->cache_bits_ = BitsLog2Floor(enc->palette_size_) + 1;1591}1592}1593// In case image is not packed.1594if (enc->argb_content_ != kEncoderNearLossless &&1595enc->argb_content_ != kEncoderPalette) {1596if (!MakeInputImageCopy(enc)) goto Error;1597}15981599// -------------------------------------------------------------------------1600// Apply transforms and write transform data.16011602if (enc->use_subtract_green_) {1603ApplySubtractGreen(enc, enc->current_width_, height, bw);1604}16051606if (enc->use_predict_) {1607percent_range = remaining_percent / 3;1608if (!ApplyPredictFilter(enc, enc->current_width_, height, quality,1609low_effort, enc->use_subtract_green_, bw,1610percent_range, &percent)) {1611goto Error;1612}1613remaining_percent -= percent_range;1614}16151616if (enc->use_cross_color_) {1617percent_range = remaining_percent / 2;1618if (!ApplyCrossColorFilter(enc, enc->current_width_, height, quality,1619low_effort, bw, percent_range, &percent)) {1620goto Error;1621}1622remaining_percent -= percent_range;1623}16241625VP8LPutBits(bw, !TRANSFORM_PRESENT, 1); // No more transforms.16261627// -------------------------------------------------------------------------1628// Encode and write the transformed image.1629if (!EncodeImageInternal(1630bw, enc->argb_, &enc->hash_chain_, enc->refs_, enc->current_width_,1631height, quality, low_effort, &crunch_configs[idx],1632&enc->cache_bits_, enc->histo_bits_, byte_position, &hdr_size,1633&data_size, picture, remaining_percent, &percent)) {1634goto Error;1635}16361637// If we are better than what we already have.1638if (VP8LBitWriterNumBytes(bw) < best_size) {1639best_size = VP8LBitWriterNumBytes(bw);1640// Store the BitWriter.1641VP8LBitWriterSwap(bw, &bw_best);1642#if !defined(WEBP_DISABLE_STATS)1643// Update the stats.1644if (stats != NULL) {1645stats->lossless_features = 0;1646if (enc->use_predict_) stats->lossless_features |= 1;1647if (enc->use_cross_color_) stats->lossless_features |= 2;1648if (enc->use_subtract_green_) stats->lossless_features |= 4;1649if (enc->use_palette_) stats->lossless_features |= 8;1650stats->histogram_bits = enc->histo_bits_;1651stats->transform_bits = enc->predictor_transform_bits_;1652stats->cross_color_transform_bits = enc->cross_color_transform_bits_;1653stats->cache_bits = enc->cache_bits_;1654stats->palette_size = enc->palette_size_;1655stats->lossless_size = (int)(best_size - byte_position);1656stats->lossless_hdr_size = hdr_size;1657stats->lossless_data_size = data_size;1658}1659#endif1660}1661// Reset the bit writer for the following iteration if any.1662if (num_crunch_configs > 1) VP8LBitWriterReset(&bw_init, bw);1663}1664VP8LBitWriterSwap(&bw_best, bw);16651666Error:1667VP8LBitWriterWipeOut(&bw_best);1668// The hook should return false in case of error.1669return (params->picture_->error_code == VP8_ENC_OK);1670}16711672int VP8LEncodeStream(const WebPConfig* const config,1673const WebPPicture* const picture,1674VP8LBitWriter* const bw_main) {1675VP8LEncoder* const enc_main = VP8LEncoderNew(config, picture);1676VP8LEncoder* enc_side = NULL;1677CrunchConfig crunch_configs[CRUNCH_CONFIGS_MAX];1678int num_crunch_configs_main, num_crunch_configs_side = 0;1679int idx;1680int red_and_blue_always_zero = 0;1681WebPWorker worker_main, worker_side;1682StreamEncodeContext params_main, params_side;1683// The main thread uses picture->stats, the side thread uses stats_side.1684WebPAuxStats stats_side;1685VP8LBitWriter bw_side;1686WebPPicture picture_side;1687const WebPWorkerInterface* const worker_interface = WebPGetWorkerInterface();1688int ok_main;16891690if (enc_main == NULL || !VP8LBitWriterInit(&bw_side, 0)) {1691VP8LEncoderDelete(enc_main);1692return WebPEncodingSetError(picture, VP8_ENC_ERROR_OUT_OF_MEMORY);1693}16941695// Avoid "garbage value" error from Clang's static analysis tool.1696if (!WebPPictureInit(&picture_side)) {1697goto Error;1698}16991700// Analyze image (entropy, num_palettes etc)1701if (!EncoderAnalyze(enc_main, crunch_configs, &num_crunch_configs_main,1702&red_and_blue_always_zero) ||1703!EncoderInit(enc_main)) {1704WebPEncodingSetError(picture, VP8_ENC_ERROR_OUT_OF_MEMORY);1705goto Error;1706}17071708// Split the configs between the main and side threads (if any).1709if (config->thread_level > 0) {1710num_crunch_configs_side = num_crunch_configs_main / 2;1711for (idx = 0; idx < num_crunch_configs_side; ++idx) {1712params_side.crunch_configs_[idx] =1713crunch_configs[num_crunch_configs_main - num_crunch_configs_side +1714idx];1715}1716params_side.num_crunch_configs_ = num_crunch_configs_side;1717}1718num_crunch_configs_main -= num_crunch_configs_side;1719for (idx = 0; idx < num_crunch_configs_main; ++idx) {1720params_main.crunch_configs_[idx] = crunch_configs[idx];1721}1722params_main.num_crunch_configs_ = num_crunch_configs_main;17231724// Fill in the parameters for the thread workers.1725{1726const int params_size = (num_crunch_configs_side > 0) ? 2 : 1;1727for (idx = 0; idx < params_size; ++idx) {1728// Create the parameters for each worker.1729WebPWorker* const worker = (idx == 0) ? &worker_main : &worker_side;1730StreamEncodeContext* const param =1731(idx == 0) ? ¶ms_main : ¶ms_side;1732param->config_ = config;1733param->red_and_blue_always_zero_ = red_and_blue_always_zero;1734if (idx == 0) {1735param->picture_ = picture;1736param->stats_ = picture->stats;1737param->bw_ = bw_main;1738param->enc_ = enc_main;1739} else {1740// Create a side picture (error_code is not thread-safe).1741if (!WebPPictureView(picture, /*left=*/0, /*top=*/0, picture->width,1742picture->height, &picture_side)) {1743assert(0);1744}1745picture_side.progress_hook = NULL; // Progress hook is not thread-safe.1746param->picture_ = &picture_side; // No need to free a view afterwards.1747param->stats_ = (picture->stats == NULL) ? NULL : &stats_side;1748// Create a side bit writer.1749if (!VP8LBitWriterClone(bw_main, &bw_side)) {1750WebPEncodingSetError(picture, VP8_ENC_ERROR_OUT_OF_MEMORY);1751goto Error;1752}1753param->bw_ = &bw_side;1754// Create a side encoder.1755enc_side = VP8LEncoderNew(config, &picture_side);1756if (enc_side == NULL || !EncoderInit(enc_side)) {1757WebPEncodingSetError(picture, VP8_ENC_ERROR_OUT_OF_MEMORY);1758goto Error;1759}1760// Copy the values that were computed for the main encoder.1761enc_side->histo_bits_ = enc_main->histo_bits_;1762enc_side->predictor_transform_bits_ =1763enc_main->predictor_transform_bits_;1764enc_side->cross_color_transform_bits_ =1765enc_main->cross_color_transform_bits_;1766enc_side->palette_size_ = enc_main->palette_size_;1767memcpy(enc_side->palette_, enc_main->palette_,1768sizeof(enc_main->palette_));1769memcpy(enc_side->palette_sorted_, enc_main->palette_sorted_,1770sizeof(enc_main->palette_sorted_));1771param->enc_ = enc_side;1772}1773// Create the workers.1774worker_interface->Init(worker);1775worker->data1 = param;1776worker->data2 = NULL;1777worker->hook = EncodeStreamHook;1778}1779}17801781// Start the second thread if needed.1782if (num_crunch_configs_side != 0) {1783if (!worker_interface->Reset(&worker_side)) {1784WebPEncodingSetError(picture, VP8_ENC_ERROR_OUT_OF_MEMORY);1785goto Error;1786}1787#if !defined(WEBP_DISABLE_STATS)1788// This line is here and not in the param initialization above to remove a1789// Clang static analyzer warning.1790if (picture->stats != NULL) {1791memcpy(&stats_side, picture->stats, sizeof(stats_side));1792}1793#endif1794worker_interface->Launch(&worker_side);1795}1796// Execute the main thread.1797worker_interface->Execute(&worker_main);1798ok_main = worker_interface->Sync(&worker_main);1799worker_interface->End(&worker_main);1800if (num_crunch_configs_side != 0) {1801// Wait for the second thread.1802const int ok_side = worker_interface->Sync(&worker_side);1803worker_interface->End(&worker_side);1804if (!ok_main || !ok_side) {1805if (picture->error_code == VP8_ENC_OK) {1806assert(picture_side.error_code != VP8_ENC_OK);1807WebPEncodingSetError(picture, picture_side.error_code);1808}1809goto Error;1810}1811if (VP8LBitWriterNumBytes(&bw_side) < VP8LBitWriterNumBytes(bw_main)) {1812VP8LBitWriterSwap(bw_main, &bw_side);1813#if !defined(WEBP_DISABLE_STATS)1814if (picture->stats != NULL) {1815memcpy(picture->stats, &stats_side, sizeof(*picture->stats));1816}1817#endif1818}1819}18201821Error:1822VP8LBitWriterWipeOut(&bw_side);1823VP8LEncoderDelete(enc_main);1824VP8LEncoderDelete(enc_side);1825return (picture->error_code == VP8_ENC_OK);1826}18271828#undef CRUNCH_CONFIGS_MAX1829#undef CRUNCH_SUBCONFIGS_MAX18301831int VP8LEncodeImage(const WebPConfig* const config,1832const WebPPicture* const picture) {1833int width, height;1834int has_alpha;1835size_t coded_size;1836int percent = 0;1837int initial_size;1838VP8LBitWriter bw;18391840if (picture == NULL) return 0;18411842if (config == NULL || picture->argb == NULL) {1843return WebPEncodingSetError(picture, VP8_ENC_ERROR_NULL_PARAMETER);1844}18451846width = picture->width;1847height = picture->height;1848// Initialize BitWriter with size corresponding to 16 bpp to photo images and1849// 8 bpp for graphical images.1850initial_size = (config->image_hint == WEBP_HINT_GRAPH) ?1851width * height : width * height * 2;1852if (!VP8LBitWriterInit(&bw, initial_size)) {1853WebPEncodingSetError(picture, VP8_ENC_ERROR_OUT_OF_MEMORY);1854goto Error;1855}18561857if (!WebPReportProgress(picture, 1, &percent)) {1858UserAbort:1859WebPEncodingSetError(picture, VP8_ENC_ERROR_USER_ABORT);1860goto Error;1861}1862// Reset stats (for pure lossless coding)1863if (picture->stats != NULL) {1864WebPAuxStats* const stats = picture->stats;1865memset(stats, 0, sizeof(*stats));1866stats->PSNR[0] = 99.f;1867stats->PSNR[1] = 99.f;1868stats->PSNR[2] = 99.f;1869stats->PSNR[3] = 99.f;1870stats->PSNR[4] = 99.f;1871}18721873// Write image size.1874if (!WriteImageSize(picture, &bw)) {1875WebPEncodingSetError(picture, VP8_ENC_ERROR_OUT_OF_MEMORY);1876goto Error;1877}18781879has_alpha = WebPPictureHasTransparency(picture);1880// Write the non-trivial Alpha flag and lossless version.1881if (!WriteRealAlphaAndVersion(&bw, has_alpha)) {1882WebPEncodingSetError(picture, VP8_ENC_ERROR_OUT_OF_MEMORY);1883goto Error;1884}18851886if (!WebPReportProgress(picture, 2, &percent)) goto UserAbort;18871888// Encode main image stream.1889if (!VP8LEncodeStream(config, picture, &bw)) goto Error;18901891if (!WebPReportProgress(picture, 99, &percent)) goto UserAbort;18921893// Finish the RIFF chunk.1894if (!WriteImage(picture, &bw, &coded_size)) goto Error;18951896if (!WebPReportProgress(picture, 100, &percent)) goto UserAbort;18971898#if !defined(WEBP_DISABLE_STATS)1899// Save size.1900if (picture->stats != NULL) {1901picture->stats->coded_size += (int)coded_size;1902picture->stats->lossless_size = (int)coded_size;1903}1904#endif19051906if (picture->extra_info != NULL) {1907const int mb_w = (width + 15) >> 4;1908const int mb_h = (height + 15) >> 4;1909memset(picture->extra_info, 0, mb_w * mb_h * sizeof(*picture->extra_info));1910}19111912Error:1913if (bw.error_) {1914WebPEncodingSetError(picture, VP8_ENC_ERROR_OUT_OF_MEMORY);1915}1916VP8LBitWriterWipeOut(&bw);1917return (picture->error_code == VP8_ENC_OK);1918}19191920//------------------------------------------------------------------------------192119221923