Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
godotengine
GitHub Repository: godotengine/godot
Path: blob/master/thirdparty/libwebp/src/enc/vp8l_enc.c
21817 views
1
// Copyright 2012 Google Inc. All Rights Reserved.
2
//
3
// Use of this source code is governed by a BSD-style license
4
// that can be found in the COPYING file in the root of the source
5
// tree. An additional intellectual property rights grant can be found
6
// in the file PATENTS. All contributing project authors may
7
// be found in the AUTHORS file in the root of the source tree.
8
// -----------------------------------------------------------------------------
9
//
10
// main entry for the lossless encoder.
11
//
12
// Author: Vikas Arora ([email protected])
13
//
14
15
#include <assert.h>
16
#include <stdlib.h>
17
#include <string.h>
18
19
#include "src/dsp/lossless.h"
20
#include "src/dsp/lossless_common.h"
21
#include "src/enc/backward_references_enc.h"
22
#include "src/enc/histogram_enc.h"
23
#include "src/enc/vp8i_enc.h"
24
#include "src/enc/vp8li_enc.h"
25
#include "src/utils/bit_writer_utils.h"
26
#include "src/utils/huffman_encode_utils.h"
27
#include "src/utils/palette.h"
28
#include "src/utils/thread_utils.h"
29
#include "src/utils/utils.h"
30
#include "src/webp/encode.h"
31
#include "src/webp/format_constants.h"
32
#include "src/webp/types.h"
33
34
// Maximum number of histogram images (sub-blocks).
35
#define MAX_HUFF_IMAGE_SIZE 2600
36
#define MAX_HUFFMAN_BITS (MIN_HUFFMAN_BITS + (1 << NUM_HUFFMAN_BITS) - 1)
37
// Empirical value for which it becomes too computationally expensive to
38
// compute the best predictor image.
39
#define MAX_PREDICTOR_IMAGE_SIZE (1 << 14)
40
41
// -----------------------------------------------------------------------------
42
// Palette
43
44
// These five modes are evaluated and their respective entropy is computed.
45
typedef enum {
46
kDirect = 0,
47
kSpatial = 1,
48
kSubGreen = 2,
49
kSpatialSubGreen = 3,
50
kPalette = 4,
51
kPaletteAndSpatial = 5,
52
kNumEntropyIx = 6
53
} EntropyIx;
54
55
typedef enum {
56
kHistoAlpha = 0,
57
kHistoAlphaPred,
58
kHistoGreen,
59
kHistoGreenPred,
60
kHistoRed,
61
kHistoRedPred,
62
kHistoBlue,
63
kHistoBluePred,
64
kHistoRedSubGreen,
65
kHistoRedPredSubGreen,
66
kHistoBlueSubGreen,
67
kHistoBluePredSubGreen,
68
kHistoPalette,
69
kHistoTotal // Must be last.
70
} HistoIx;
71
72
73
#define NUM_BUCKETS 256
74
75
typedef uint32_t HistogramBuckets[NUM_BUCKETS];
76
77
// Keeping track of histograms, indexed by HistoIx.
78
// Ideally, this would just be a struct with meaningful fields, but the
79
// calculation of `entropy_comp` uses the index. One refactoring at a time :)
80
typedef struct {
81
HistogramBuckets category[kHistoTotal];
82
} Histograms;
83
84
static void AddSingleSubGreen(uint32_t p,
85
HistogramBuckets r, HistogramBuckets b) {
86
const int green = (int)p >> 8; // The upper bits are masked away later.
87
++r[(((int)p >> 16) - green) & 0xff];
88
++b[(((int)p >> 0) - green) & 0xff];
89
}
90
91
static void AddSingle(uint32_t p,
92
HistogramBuckets a, HistogramBuckets r,
93
HistogramBuckets g, HistogramBuckets b) {
94
++a[(p >> 24) & 0xff];
95
++r[(p >> 16) & 0xff];
96
++g[(p >> 8) & 0xff];
97
++b[(p >> 0) & 0xff];
98
}
99
100
static WEBP_INLINE uint8_t HashPix(uint32_t pix) {
101
// Note that masking with 0xffffffffu is for preventing an
102
// 'unsigned int overflow' warning. Doesn't impact the compiled code.
103
return ((((uint64_t)pix + (pix >> 19)) * 0x39c5fba7ull) & 0xffffffffu) >> 24;
104
}
105
106
static int AnalyzeEntropy(const uint32_t* argb,
107
int width, int height, int argb_stride,
108
int use_palette,
109
int palette_size, int transform_bits,
110
EntropyIx* const min_entropy_ix,
111
int* const red_and_blue_always_zero) {
112
Histograms* histo;
113
114
if (use_palette && palette_size <= 16) {
115
// In the case of small palettes, we pack 2, 4 or 8 pixels together. In
116
// practice, small palettes are better than any other transform.
117
*min_entropy_ix = kPalette;
118
*red_and_blue_always_zero = 1;
119
return 1;
120
}
121
122
histo = (Histograms*)WebPSafeCalloc(1, sizeof(*histo));
123
if (histo != NULL) {
124
int i, x, y;
125
const uint32_t* prev_row = NULL;
126
const uint32_t* curr_row = argb;
127
uint32_t pix_prev = argb[0]; // Skip the first pixel.
128
for (y = 0; y < height; ++y) {
129
for (x = 0; x < width; ++x) {
130
const uint32_t pix = curr_row[x];
131
const uint32_t pix_diff = VP8LSubPixels(pix, pix_prev);
132
pix_prev = pix;
133
if ((pix_diff == 0) || (prev_row != NULL && pix == prev_row[x])) {
134
continue;
135
}
136
AddSingle(pix,
137
histo->category[kHistoAlpha],
138
histo->category[kHistoRed],
139
histo->category[kHistoGreen],
140
histo->category[kHistoBlue]);
141
AddSingle(pix_diff,
142
histo->category[kHistoAlphaPred],
143
histo->category[kHistoRedPred],
144
histo->category[kHistoGreenPred],
145
histo->category[kHistoBluePred]);
146
AddSingleSubGreen(pix,
147
histo->category[kHistoRedSubGreen],
148
histo->category[kHistoBlueSubGreen]);
149
AddSingleSubGreen(pix_diff,
150
histo->category[kHistoRedPredSubGreen],
151
histo->category[kHistoBluePredSubGreen]);
152
{
153
// Approximate the palette by the entropy of the multiplicative hash.
154
const uint8_t hash = HashPix(pix);
155
++histo->category[kHistoPalette][hash];
156
}
157
}
158
prev_row = curr_row;
159
curr_row += argb_stride;
160
}
161
{
162
uint64_t entropy_comp[kHistoTotal];
163
uint64_t entropy[kNumEntropyIx];
164
int k;
165
int last_mode_to_analyze = use_palette ? kPalette : kSpatialSubGreen;
166
int j;
167
// Let's add one zero to the predicted histograms. The zeros are removed
168
// too efficiently by the pix_diff == 0 comparison, at least one of the
169
// zeros is likely to exist.
170
++histo->category[kHistoRedPredSubGreen][0];
171
++histo->category[kHistoBluePredSubGreen][0];
172
++histo->category[kHistoRedPred][0];
173
++histo->category[kHistoGreenPred][0];
174
++histo->category[kHistoBluePred][0];
175
++histo->category[kHistoAlphaPred][0];
176
177
for (j = 0; j < kHistoTotal; ++j) {
178
entropy_comp[j] = VP8LBitsEntropy(histo->category[j], NUM_BUCKETS);
179
}
180
entropy[kDirect] = entropy_comp[kHistoAlpha] +
181
entropy_comp[kHistoRed] +
182
entropy_comp[kHistoGreen] +
183
entropy_comp[kHistoBlue];
184
entropy[kSpatial] = entropy_comp[kHistoAlphaPred] +
185
entropy_comp[kHistoRedPred] +
186
entropy_comp[kHistoGreenPred] +
187
entropy_comp[kHistoBluePred];
188
entropy[kSubGreen] = entropy_comp[kHistoAlpha] +
189
entropy_comp[kHistoRedSubGreen] +
190
entropy_comp[kHistoGreen] +
191
entropy_comp[kHistoBlueSubGreen];
192
entropy[kSpatialSubGreen] = entropy_comp[kHistoAlphaPred] +
193
entropy_comp[kHistoRedPredSubGreen] +
194
entropy_comp[kHistoGreenPred] +
195
entropy_comp[kHistoBluePredSubGreen];
196
entropy[kPalette] = entropy_comp[kHistoPalette];
197
198
// When including transforms, there is an overhead in bits from
199
// storing them. This overhead is small but matters for small images.
200
// For spatial, there are 14 transformations.
201
entropy[kSpatial] += (uint64_t)VP8LSubSampleSize(width, transform_bits) *
202
VP8LSubSampleSize(height, transform_bits) *
203
VP8LFastLog2(14);
204
// For color transforms: 24 as only 3 channels are considered in a
205
// ColorTransformElement.
206
entropy[kSpatialSubGreen] +=
207
(uint64_t)VP8LSubSampleSize(width, transform_bits) *
208
VP8LSubSampleSize(height, transform_bits) * VP8LFastLog2(24);
209
// For palettes, add the cost of storing the palette.
210
// We empirically estimate the cost of a compressed entry as 8 bits.
211
// The palette is differential-coded when compressed hence a much
212
// lower cost than sizeof(uint32_t)*8.
213
entropy[kPalette] += (palette_size * 8ull) << LOG_2_PRECISION_BITS;
214
215
*min_entropy_ix = kDirect;
216
for (k = kDirect + 1; k <= last_mode_to_analyze; ++k) {
217
if (entropy[*min_entropy_ix] > entropy[k]) {
218
*min_entropy_ix = (EntropyIx)k;
219
}
220
}
221
assert((int)*min_entropy_ix <= last_mode_to_analyze);
222
*red_and_blue_always_zero = 1;
223
// Let's check if the histogram of the chosen entropy mode has
224
// non-zero red and blue values. If all are zero, we can later skip
225
// the cross color optimization.
226
{
227
static const uint8_t kHistoPairs[5][2] = {
228
{ kHistoRed, kHistoBlue },
229
{ kHistoRedPred, kHistoBluePred },
230
{ kHistoRedSubGreen, kHistoBlueSubGreen },
231
{ kHistoRedPredSubGreen, kHistoBluePredSubGreen },
232
{ kHistoRed, kHistoBlue }
233
};
234
const HistogramBuckets* const red_histo =
235
&histo->category[kHistoPairs[*min_entropy_ix][0]];
236
const HistogramBuckets* const blue_histo =
237
&histo->category[kHistoPairs[*min_entropy_ix][1]];
238
for (i = 1; i < NUM_BUCKETS; ++i) {
239
if (((*red_histo)[i] | (*blue_histo)[i]) != 0) {
240
*red_and_blue_always_zero = 0;
241
break;
242
}
243
}
244
}
245
}
246
WebPSafeFree(histo);
247
return 1;
248
} else {
249
return 0;
250
}
251
}
252
253
// Clamp histogram and transform bits.
254
static int ClampBits(int width, int height, int bits, int min_bits,
255
int max_bits, int image_size_max) {
256
int image_size;
257
bits = (bits < min_bits) ? min_bits : (bits > max_bits) ? max_bits : bits;
258
image_size = VP8LSubSampleSize(width, bits) * VP8LSubSampleSize(height, bits);
259
while (bits < max_bits && image_size > image_size_max) {
260
++bits;
261
image_size =
262
VP8LSubSampleSize(width, bits) * VP8LSubSampleSize(height, bits);
263
}
264
// In case the bits reduce the image too much, choose the smallest value
265
// setting the histogram image size to 1.
266
while (bits > min_bits && image_size == 1) {
267
image_size = VP8LSubSampleSize(width, bits - 1) *
268
VP8LSubSampleSize(height, bits - 1);
269
if (image_size != 1) break;
270
--bits;
271
}
272
return bits;
273
}
274
275
static int GetHistoBits(int method, int use_palette, int width, int height) {
276
// Make tile size a function of encoding method (Range: 0 to 6).
277
const int histo_bits = (use_palette ? 9 : 7) - method;
278
return ClampBits(width, height, histo_bits, MIN_HUFFMAN_BITS,
279
MAX_HUFFMAN_BITS, MAX_HUFF_IMAGE_SIZE);
280
}
281
282
static int GetTransformBits(int method, int histo_bits) {
283
const int max_transform_bits = (method < 4) ? 6 : (method > 4) ? 4 : 5;
284
const int res =
285
(histo_bits > max_transform_bits) ? max_transform_bits : histo_bits;
286
assert(res <= MAX_TRANSFORM_BITS);
287
return res;
288
}
289
290
// Set of parameters to be used in each iteration of the cruncher.
291
#define CRUNCH_SUBCONFIGS_MAX 2
292
typedef struct {
293
int lz77;
294
int do_no_cache;
295
} CrunchSubConfig;
296
typedef struct {
297
int entropy_idx;
298
PaletteSorting palette_sorting_type;
299
CrunchSubConfig sub_configs[CRUNCH_SUBCONFIGS_MAX];
300
int sub_configs_size;
301
} CrunchConfig;
302
303
// +2 because we add a palette sorting configuration for kPalette and
304
// kPaletteAndSpatial.
305
#define CRUNCH_CONFIGS_MAX (kNumEntropyIx + 2 * kPaletteSortingNum)
306
307
static int EncoderAnalyze(VP8LEncoder* const enc,
308
CrunchConfig crunch_configs[CRUNCH_CONFIGS_MAX],
309
int* const crunch_configs_size,
310
int* const red_and_blue_always_zero) {
311
const WebPPicture* const pic = enc->pic;
312
const int width = pic->width;
313
const int height = pic->height;
314
const WebPConfig* const config = enc->config;
315
const int method = config->method;
316
const int low_effort = (config->method == 0);
317
int i;
318
int use_palette, transform_bits;
319
int n_lz77s;
320
// If set to 0, analyze the cache with the computed cache value. If 1, also
321
// analyze with no-cache.
322
int do_no_cache = 0;
323
assert(pic != NULL && pic->argb != NULL);
324
325
// Check whether a palette is possible.
326
enc->palette_size = GetColorPalette(pic, enc->palette_sorted);
327
use_palette = (enc->palette_size <= MAX_PALETTE_SIZE);
328
if (!use_palette) {
329
enc->palette_size = 0;
330
}
331
332
// Empirical bit sizes.
333
enc->histo_bits = GetHistoBits(method, use_palette,
334
pic->width, pic->height);
335
transform_bits = GetTransformBits(method, enc->histo_bits);
336
enc->predictor_transform_bits = transform_bits;
337
enc->cross_color_transform_bits = transform_bits;
338
339
if (low_effort) {
340
// AnalyzeEntropy is somewhat slow.
341
crunch_configs[0].entropy_idx = use_palette ? kPalette : kSpatialSubGreen;
342
crunch_configs[0].palette_sorting_type =
343
use_palette ? kSortedDefault : kUnusedPalette;
344
n_lz77s = 1;
345
*crunch_configs_size = 1;
346
} else {
347
EntropyIx min_entropy_ix;
348
// Try out multiple LZ77 on images with few colors.
349
n_lz77s = (enc->palette_size > 0 && enc->palette_size <= 16) ? 2 : 1;
350
if (!AnalyzeEntropy(pic->argb, width, height, pic->argb_stride, use_palette,
351
enc->palette_size, transform_bits, &min_entropy_ix,
352
red_and_blue_always_zero)) {
353
return 0;
354
}
355
if (method == 6 && config->quality == 100) {
356
do_no_cache = 1;
357
// Go brute force on all transforms.
358
*crunch_configs_size = 0;
359
for (i = 0; i < kNumEntropyIx; ++i) {
360
// We can only apply kPalette or kPaletteAndSpatial if we can indeed use
361
// a palette.
362
if ((i != kPalette && i != kPaletteAndSpatial) || use_palette) {
363
assert(*crunch_configs_size < CRUNCH_CONFIGS_MAX);
364
if (use_palette && (i == kPalette || i == kPaletteAndSpatial)) {
365
int sorting_method;
366
for (sorting_method = 0; sorting_method < kPaletteSortingNum;
367
++sorting_method) {
368
const PaletteSorting typed_sorting_method =
369
(PaletteSorting)sorting_method;
370
// TODO(vrabaud) kSortedDefault should be tested. It is omitted
371
// for now for backward compatibility.
372
if (typed_sorting_method == kUnusedPalette ||
373
typed_sorting_method == kSortedDefault) {
374
continue;
375
}
376
crunch_configs[(*crunch_configs_size)].entropy_idx = i;
377
crunch_configs[(*crunch_configs_size)].palette_sorting_type =
378
typed_sorting_method;
379
++*crunch_configs_size;
380
}
381
} else {
382
crunch_configs[(*crunch_configs_size)].entropy_idx = i;
383
crunch_configs[(*crunch_configs_size)].palette_sorting_type =
384
kUnusedPalette;
385
++*crunch_configs_size;
386
}
387
}
388
}
389
} else {
390
// Only choose the guessed best transform.
391
*crunch_configs_size = 1;
392
crunch_configs[0].entropy_idx = min_entropy_ix;
393
crunch_configs[0].palette_sorting_type =
394
use_palette ? kMinimizeDelta : kUnusedPalette;
395
if (config->quality >= 75 && method == 5) {
396
// Test with and without color cache.
397
do_no_cache = 1;
398
// If we have a palette, also check in combination with spatial.
399
if (min_entropy_ix == kPalette) {
400
*crunch_configs_size = 2;
401
crunch_configs[1].entropy_idx = kPaletteAndSpatial;
402
crunch_configs[1].palette_sorting_type = kMinimizeDelta;
403
}
404
}
405
}
406
}
407
// Fill in the different LZ77s.
408
assert(n_lz77s <= CRUNCH_SUBCONFIGS_MAX);
409
for (i = 0; i < *crunch_configs_size; ++i) {
410
int j;
411
for (j = 0; j < n_lz77s; ++j) {
412
assert(j < CRUNCH_SUBCONFIGS_MAX);
413
crunch_configs[i].sub_configs[j].lz77 =
414
(j == 0) ? kLZ77Standard | kLZ77RLE : kLZ77Box;
415
crunch_configs[i].sub_configs[j].do_no_cache = do_no_cache;
416
}
417
crunch_configs[i].sub_configs_size = n_lz77s;
418
}
419
return 1;
420
}
421
422
static int EncoderInit(VP8LEncoder* const enc) {
423
const WebPPicture* const pic = enc->pic;
424
const int width = pic->width;
425
const int height = pic->height;
426
const int pix_cnt = width * height;
427
// we round the block size up, so we're guaranteed to have
428
// at most MAX_REFS_BLOCK_PER_IMAGE blocks used:
429
const int refs_block_size = (pix_cnt - 1) / MAX_REFS_BLOCK_PER_IMAGE + 1;
430
int i;
431
if (!VP8LHashChainInit(&enc->hash_chain, pix_cnt)) return 0;
432
433
for (i = 0; i < 4; ++i) VP8LBackwardRefsInit(&enc->refs[i], refs_block_size);
434
435
return 1;
436
}
437
438
// Returns false in case of memory error.
439
static int GetHuffBitLengthsAndCodes(
440
const VP8LHistogramSet* const histogram_image,
441
HuffmanTreeCode* const huffman_codes) {
442
int i, k;
443
int ok = 0;
444
uint64_t total_length_size = 0;
445
uint8_t* mem_buf = NULL;
446
const int histogram_image_size = histogram_image->size;
447
int max_num_symbols = 0;
448
uint8_t* buf_rle = NULL;
449
HuffmanTree* huff_tree = NULL;
450
451
// Iterate over all histograms and get the aggregate number of codes used.
452
for (i = 0; i < histogram_image_size; ++i) {
453
const VP8LHistogram* const histo = histogram_image->histograms[i];
454
HuffmanTreeCode* const codes = &huffman_codes[5 * i];
455
assert(histo != NULL);
456
for (k = 0; k < 5; ++k) {
457
const int num_symbols =
458
(k == 0) ? VP8LHistogramNumCodes(histo->palette_code_bits) :
459
(k == 4) ? NUM_DISTANCE_CODES : 256;
460
codes[k].num_symbols = num_symbols;
461
total_length_size += num_symbols;
462
}
463
}
464
465
// Allocate and Set Huffman codes.
466
{
467
uint16_t* codes;
468
uint8_t* lengths;
469
mem_buf = (uint8_t*)WebPSafeCalloc(total_length_size,
470
sizeof(*lengths) + sizeof(*codes));
471
if (mem_buf == NULL) goto End;
472
473
codes = (uint16_t*)mem_buf;
474
lengths = (uint8_t*)&codes[total_length_size];
475
for (i = 0; i < 5 * histogram_image_size; ++i) {
476
const int bit_length = huffman_codes[i].num_symbols;
477
huffman_codes[i].codes = codes;
478
huffman_codes[i].code_lengths = lengths;
479
codes += bit_length;
480
lengths += bit_length;
481
if (max_num_symbols < bit_length) {
482
max_num_symbols = bit_length;
483
}
484
}
485
}
486
487
buf_rle = (uint8_t*)WebPSafeMalloc(1ULL, max_num_symbols);
488
huff_tree = (HuffmanTree*)WebPSafeMalloc(3ULL * max_num_symbols,
489
sizeof(*huff_tree));
490
if (buf_rle == NULL || huff_tree == NULL) goto End;
491
492
// Create Huffman trees.
493
for (i = 0; i < histogram_image_size; ++i) {
494
HuffmanTreeCode* const codes = &huffman_codes[5 * i];
495
VP8LHistogram* const histo = histogram_image->histograms[i];
496
VP8LCreateHuffmanTree(histo->literal, 15, buf_rle, huff_tree, codes + 0);
497
VP8LCreateHuffmanTree(histo->red, 15, buf_rle, huff_tree, codes + 1);
498
VP8LCreateHuffmanTree(histo->blue, 15, buf_rle, huff_tree, codes + 2);
499
VP8LCreateHuffmanTree(histo->alpha, 15, buf_rle, huff_tree, codes + 3);
500
VP8LCreateHuffmanTree(histo->distance, 15, buf_rle, huff_tree, codes + 4);
501
}
502
ok = 1;
503
End:
504
WebPSafeFree(huff_tree);
505
WebPSafeFree(buf_rle);
506
if (!ok) {
507
WebPSafeFree(mem_buf);
508
memset(huffman_codes, 0, 5 * histogram_image_size * sizeof(*huffman_codes));
509
}
510
return ok;
511
}
512
513
static void StoreHuffmanTreeOfHuffmanTreeToBitMask(
514
VP8LBitWriter* const bw, const uint8_t* code_length_bitdepth) {
515
// RFC 1951 will calm you down if you are worried about this funny sequence.
516
// This sequence is tuned from that, but more weighted for lower symbol count,
517
// and more spiking histograms.
518
static const uint8_t kStorageOrder[CODE_LENGTH_CODES] = {
519
17, 18, 0, 1, 2, 3, 4, 5, 16, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15
520
};
521
int i;
522
// Throw away trailing zeros:
523
int codes_to_store = CODE_LENGTH_CODES;
524
for (; codes_to_store > 4; --codes_to_store) {
525
if (code_length_bitdepth[kStorageOrder[codes_to_store - 1]] != 0) {
526
break;
527
}
528
}
529
VP8LPutBits(bw, codes_to_store - 4, 4);
530
for (i = 0; i < codes_to_store; ++i) {
531
VP8LPutBits(bw, code_length_bitdepth[kStorageOrder[i]], 3);
532
}
533
}
534
535
static void ClearHuffmanTreeIfOnlyOneSymbol(
536
HuffmanTreeCode* const huffman_code) {
537
int k;
538
int count = 0;
539
for (k = 0; k < huffman_code->num_symbols; ++k) {
540
if (huffman_code->code_lengths[k] != 0) {
541
++count;
542
if (count > 1) return;
543
}
544
}
545
for (k = 0; k < huffman_code->num_symbols; ++k) {
546
huffman_code->code_lengths[k] = 0;
547
huffman_code->codes[k] = 0;
548
}
549
}
550
551
static void StoreHuffmanTreeToBitMask(
552
VP8LBitWriter* const bw,
553
const HuffmanTreeToken* const tokens, const int num_tokens,
554
const HuffmanTreeCode* const huffman_code) {
555
int i;
556
for (i = 0; i < num_tokens; ++i) {
557
const int ix = tokens[i].code;
558
const int extra_bits = tokens[i].extra_bits;
559
VP8LPutBits(bw, huffman_code->codes[ix], huffman_code->code_lengths[ix]);
560
switch (ix) {
561
case 16:
562
VP8LPutBits(bw, extra_bits, 2);
563
break;
564
case 17:
565
VP8LPutBits(bw, extra_bits, 3);
566
break;
567
case 18:
568
VP8LPutBits(bw, extra_bits, 7);
569
break;
570
}
571
}
572
}
573
574
// 'huff_tree' and 'tokens' are pre-alloacted buffers.
575
static void StoreFullHuffmanCode(VP8LBitWriter* const bw,
576
HuffmanTree* const huff_tree,
577
HuffmanTreeToken* const tokens,
578
const HuffmanTreeCode* const tree) {
579
uint8_t code_length_bitdepth[CODE_LENGTH_CODES] = { 0 };
580
uint16_t code_length_bitdepth_symbols[CODE_LENGTH_CODES] = { 0 };
581
const int max_tokens = tree->num_symbols;
582
int num_tokens;
583
HuffmanTreeCode huffman_code;
584
huffman_code.num_symbols = CODE_LENGTH_CODES;
585
huffman_code.code_lengths = code_length_bitdepth;
586
huffman_code.codes = code_length_bitdepth_symbols;
587
588
VP8LPutBits(bw, 0, 1);
589
num_tokens = VP8LCreateCompressedHuffmanTree(tree, tokens, max_tokens);
590
{
591
uint32_t histogram[CODE_LENGTH_CODES] = { 0 };
592
uint8_t buf_rle[CODE_LENGTH_CODES] = { 0 };
593
int i;
594
for (i = 0; i < num_tokens; ++i) {
595
++histogram[tokens[i].code];
596
}
597
598
VP8LCreateHuffmanTree(histogram, 7, buf_rle, huff_tree, &huffman_code);
599
}
600
601
StoreHuffmanTreeOfHuffmanTreeToBitMask(bw, code_length_bitdepth);
602
ClearHuffmanTreeIfOnlyOneSymbol(&huffman_code);
603
{
604
int trailing_zero_bits = 0;
605
int trimmed_length = num_tokens;
606
int write_trimmed_length;
607
int length;
608
int i = num_tokens;
609
while (i-- > 0) {
610
const int ix = tokens[i].code;
611
if (ix == 0 || ix == 17 || ix == 18) {
612
--trimmed_length; // discount trailing zeros
613
trailing_zero_bits += code_length_bitdepth[ix];
614
if (ix == 17) {
615
trailing_zero_bits += 3;
616
} else if (ix == 18) {
617
trailing_zero_bits += 7;
618
}
619
} else {
620
break;
621
}
622
}
623
write_trimmed_length = (trimmed_length > 1 && trailing_zero_bits > 12);
624
length = write_trimmed_length ? trimmed_length : num_tokens;
625
VP8LPutBits(bw, write_trimmed_length, 1);
626
if (write_trimmed_length) {
627
if (trimmed_length == 2) {
628
VP8LPutBits(bw, 0, 3 + 2); // nbitpairs=1, trimmed_length=2
629
} else {
630
const int nbits = BitsLog2Floor(trimmed_length - 2);
631
const int nbitpairs = nbits / 2 + 1;
632
assert(trimmed_length > 2);
633
assert(nbitpairs - 1 < 8);
634
VP8LPutBits(bw, nbitpairs - 1, 3);
635
VP8LPutBits(bw, trimmed_length - 2, nbitpairs * 2);
636
}
637
}
638
StoreHuffmanTreeToBitMask(bw, tokens, length, &huffman_code);
639
}
640
}
641
642
// 'huff_tree' and 'tokens' are pre-alloacted buffers.
643
static void StoreHuffmanCode(VP8LBitWriter* const bw,
644
HuffmanTree* const huff_tree,
645
HuffmanTreeToken* const tokens,
646
const HuffmanTreeCode* const huffman_code) {
647
int i;
648
int count = 0;
649
int symbols[2] = { 0, 0 };
650
const int kMaxBits = 8;
651
const int kMaxSymbol = 1 << kMaxBits;
652
653
// Check whether it's a small tree.
654
for (i = 0; i < huffman_code->num_symbols && count < 3; ++i) {
655
if (huffman_code->code_lengths[i] != 0) {
656
if (count < 2) symbols[count] = i;
657
++count;
658
}
659
}
660
661
if (count == 0) { // emit minimal tree for empty cases
662
// bits: small tree marker: 1, count-1: 0, large 8-bit code: 0, code: 0
663
VP8LPutBits(bw, 0x01, 4);
664
} else if (count <= 2 && symbols[0] < kMaxSymbol && symbols[1] < kMaxSymbol) {
665
VP8LPutBits(bw, 1, 1); // Small tree marker to encode 1 or 2 symbols.
666
VP8LPutBits(bw, count - 1, 1);
667
if (symbols[0] <= 1) {
668
VP8LPutBits(bw, 0, 1); // Code bit for small (1 bit) symbol value.
669
VP8LPutBits(bw, symbols[0], 1);
670
} else {
671
VP8LPutBits(bw, 1, 1);
672
VP8LPutBits(bw, symbols[0], 8);
673
}
674
if (count == 2) {
675
VP8LPutBits(bw, symbols[1], 8);
676
}
677
} else {
678
StoreFullHuffmanCode(bw, huff_tree, tokens, huffman_code);
679
}
680
}
681
682
static WEBP_INLINE void WriteHuffmanCode(VP8LBitWriter* const bw,
683
const HuffmanTreeCode* const code,
684
int code_index) {
685
const int depth = code->code_lengths[code_index];
686
const int symbol = code->codes[code_index];
687
VP8LPutBits(bw, symbol, depth);
688
}
689
690
static WEBP_INLINE void WriteHuffmanCodeWithExtraBits(
691
VP8LBitWriter* const bw,
692
const HuffmanTreeCode* const code,
693
int code_index,
694
int bits,
695
int n_bits) {
696
const int depth = code->code_lengths[code_index];
697
const int symbol = code->codes[code_index];
698
VP8LPutBits(bw, (bits << depth) | symbol, depth + n_bits);
699
}
700
701
static int StoreImageToBitMask(VP8LBitWriter* const bw, int width,
702
int histo_bits,
703
const VP8LBackwardRefs* const refs,
704
const uint32_t* histogram_symbols,
705
const HuffmanTreeCode* const huffman_codes,
706
const WebPPicture* const pic) {
707
const int histo_xsize = histo_bits ? VP8LSubSampleSize(width, histo_bits) : 1;
708
const int tile_mask = (histo_bits == 0) ? 0 : -(1 << histo_bits);
709
// x and y trace the position in the image.
710
int x = 0;
711
int y = 0;
712
int tile_x = x & tile_mask;
713
int tile_y = y & tile_mask;
714
int histogram_ix = (histogram_symbols[0] >> 8) & 0xffff;
715
const HuffmanTreeCode* codes = huffman_codes + 5 * histogram_ix;
716
VP8LRefsCursor c = VP8LRefsCursorInit(refs);
717
while (VP8LRefsCursorOk(&c)) {
718
const PixOrCopy* const v = c.cur_pos;
719
if ((tile_x != (x & tile_mask)) || (tile_y != (y & tile_mask))) {
720
tile_x = x & tile_mask;
721
tile_y = y & tile_mask;
722
histogram_ix = (histogram_symbols[(y >> histo_bits) * histo_xsize +
723
(x >> histo_bits)] >>
724
8) &
725
0xffff;
726
codes = huffman_codes + 5 * histogram_ix;
727
}
728
if (PixOrCopyIsLiteral(v)) {
729
static const uint8_t order[] = { 1, 2, 0, 3 };
730
int k;
731
for (k = 0; k < 4; ++k) {
732
const int code = PixOrCopyLiteral(v, order[k]);
733
WriteHuffmanCode(bw, codes + k, code);
734
}
735
} else if (PixOrCopyIsCacheIdx(v)) {
736
const int code = PixOrCopyCacheIdx(v);
737
const int literal_ix = 256 + NUM_LENGTH_CODES + code;
738
WriteHuffmanCode(bw, codes, literal_ix);
739
} else {
740
int bits, n_bits;
741
int code;
742
743
const int distance = PixOrCopyDistance(v);
744
VP8LPrefixEncode(v->len, &code, &n_bits, &bits);
745
WriteHuffmanCodeWithExtraBits(bw, codes, 256 + code, bits, n_bits);
746
747
// Don't write the distance with the extra bits code since
748
// the distance can be up to 18 bits of extra bits, and the prefix
749
// 15 bits, totaling to 33, and our PutBits only supports up to 32 bits.
750
VP8LPrefixEncode(distance, &code, &n_bits, &bits);
751
WriteHuffmanCode(bw, codes + 4, code);
752
VP8LPutBits(bw, bits, n_bits);
753
}
754
x += PixOrCopyLength(v);
755
while (x >= width) {
756
x -= width;
757
++y;
758
}
759
VP8LRefsCursorNext(&c);
760
}
761
if (bw->error) {
762
return WebPEncodingSetError(pic, VP8_ENC_ERROR_OUT_OF_MEMORY);
763
}
764
return 1;
765
}
766
767
// Special case of EncodeImageInternal() for cache-bits=0, histo_bits=31.
768
// pic and percent are for progress.
769
static int EncodeImageNoHuffman(VP8LBitWriter* const bw,
770
const uint32_t* const argb,
771
VP8LHashChain* const hash_chain,
772
VP8LBackwardRefs* const refs_array, int width,
773
int height, int quality, int low_effort,
774
const WebPPicture* const pic, int percent_range,
775
int* const percent) {
776
int i;
777
int max_tokens = 0;
778
VP8LBackwardRefs* refs;
779
HuffmanTreeToken* tokens = NULL;
780
HuffmanTreeCode huffman_codes[5] = {{0, NULL, NULL}};
781
const uint32_t histogram_symbols[1] = {0}; // only one tree, one symbol
782
int cache_bits = 0;
783
VP8LHistogramSet* histogram_image = NULL;
784
HuffmanTree* const huff_tree = (HuffmanTree*)WebPSafeMalloc(
785
3ULL * CODE_LENGTH_CODES, sizeof(*huff_tree));
786
if (huff_tree == NULL) {
787
WebPEncodingSetError(pic, VP8_ENC_ERROR_OUT_OF_MEMORY);
788
goto Error;
789
}
790
791
// Calculate backward references from ARGB image.
792
if (!VP8LHashChainFill(hash_chain, quality, argb, width, height, low_effort,
793
pic, percent_range / 2, percent)) {
794
goto Error;
795
}
796
if (!VP8LGetBackwardReferences(width, height, argb, quality, /*low_effort=*/0,
797
kLZ77Standard | kLZ77RLE, cache_bits,
798
/*do_no_cache=*/0, hash_chain, refs_array,
799
&cache_bits, pic,
800
percent_range - percent_range / 2, percent)) {
801
goto Error;
802
}
803
refs = &refs_array[0];
804
histogram_image = VP8LAllocateHistogramSet(1, cache_bits);
805
if (histogram_image == NULL) {
806
WebPEncodingSetError(pic, VP8_ENC_ERROR_OUT_OF_MEMORY);
807
goto Error;
808
}
809
VP8LHistogramSetClear(histogram_image);
810
811
// Build histogram image and symbols from backward references.
812
VP8LHistogramStoreRefs(refs, /*distance_modifier=*/NULL,
813
/*distance_modifier_arg0=*/0,
814
histogram_image->histograms[0]);
815
816
// Create Huffman bit lengths and codes for each histogram image.
817
assert(histogram_image->size == 1);
818
if (!GetHuffBitLengthsAndCodes(histogram_image, huffman_codes)) {
819
WebPEncodingSetError(pic, VP8_ENC_ERROR_OUT_OF_MEMORY);
820
goto Error;
821
}
822
823
// No color cache, no Huffman image.
824
VP8LPutBits(bw, 0, 1);
825
826
// Find maximum number of symbols for the huffman tree-set.
827
for (i = 0; i < 5; ++i) {
828
HuffmanTreeCode* const codes = &huffman_codes[i];
829
if (max_tokens < codes->num_symbols) {
830
max_tokens = codes->num_symbols;
831
}
832
}
833
834
tokens = (HuffmanTreeToken*)WebPSafeMalloc(max_tokens, sizeof(*tokens));
835
if (tokens == NULL) {
836
WebPEncodingSetError(pic, VP8_ENC_ERROR_OUT_OF_MEMORY);
837
goto Error;
838
}
839
840
// Store Huffman codes.
841
for (i = 0; i < 5; ++i) {
842
HuffmanTreeCode* const codes = &huffman_codes[i];
843
StoreHuffmanCode(bw, huff_tree, tokens, codes);
844
ClearHuffmanTreeIfOnlyOneSymbol(codes);
845
}
846
847
// Store actual literals.
848
if (!StoreImageToBitMask(bw, width, 0, refs, histogram_symbols, huffman_codes,
849
pic)) {
850
goto Error;
851
}
852
853
Error:
854
WebPSafeFree(tokens);
855
WebPSafeFree(huff_tree);
856
VP8LFreeHistogramSet(histogram_image);
857
WebPSafeFree(huffman_codes[0].codes);
858
return (pic->error_code == VP8_ENC_OK);
859
}
860
861
// pic and percent are for progress.
862
static int EncodeImageInternal(
863
VP8LBitWriter* const bw, const uint32_t* const argb,
864
VP8LHashChain* const hash_chain, VP8LBackwardRefs refs_array[4], int width,
865
int height, int quality, int low_effort, const CrunchConfig* const config,
866
int* cache_bits, int histogram_bits_in, size_t init_byte_position,
867
int* const hdr_size, int* const data_size, const WebPPicture* const pic,
868
int percent_range, int* const percent) {
869
const uint32_t histogram_image_xysize =
870
VP8LSubSampleSize(width, histogram_bits_in) *
871
VP8LSubSampleSize(height, histogram_bits_in);
872
int remaining_percent = percent_range;
873
int percent_start = *percent;
874
VP8LHistogramSet* histogram_image = NULL;
875
VP8LHistogram* tmp_histo = NULL;
876
uint32_t i, histogram_image_size = 0;
877
size_t bit_array_size = 0;
878
HuffmanTree* const huff_tree = (HuffmanTree*)WebPSafeMalloc(
879
3ULL * CODE_LENGTH_CODES, sizeof(*huff_tree));
880
HuffmanTreeToken* tokens = NULL;
881
HuffmanTreeCode* huffman_codes = NULL;
882
uint32_t* const histogram_argb = (uint32_t*)WebPSafeMalloc(
883
histogram_image_xysize, sizeof(*histogram_argb));
884
int sub_configs_idx;
885
int cache_bits_init, write_histogram_image;
886
VP8LBitWriter bw_init = *bw, bw_best;
887
int hdr_size_tmp;
888
VP8LHashChain hash_chain_histogram; // histogram image hash chain
889
size_t bw_size_best = ~(size_t)0;
890
assert(histogram_bits_in >= MIN_HUFFMAN_BITS);
891
assert(histogram_bits_in <= MAX_HUFFMAN_BITS);
892
assert(hdr_size != NULL);
893
assert(data_size != NULL);
894
895
memset(&hash_chain_histogram, 0, sizeof(hash_chain_histogram));
896
if (!VP8LBitWriterInit(&bw_best, 0)) {
897
WebPEncodingSetError(pic, VP8_ENC_ERROR_OUT_OF_MEMORY);
898
goto Error;
899
}
900
901
// Make sure we can allocate the different objects.
902
if (huff_tree == NULL || histogram_argb == NULL ||
903
!VP8LHashChainInit(&hash_chain_histogram, histogram_image_xysize)) {
904
WebPEncodingSetError(pic, VP8_ENC_ERROR_OUT_OF_MEMORY);
905
goto Error;
906
}
907
908
percent_range = remaining_percent / 5;
909
if (!VP8LHashChainFill(hash_chain, quality, argb, width, height,
910
low_effort, pic, percent_range, percent)) {
911
goto Error;
912
}
913
percent_start += percent_range;
914
remaining_percent -= percent_range;
915
916
// If the value is different from zero, it has been set during the palette
917
// analysis.
918
cache_bits_init = (*cache_bits == 0) ? MAX_COLOR_CACHE_BITS : *cache_bits;
919
// If several iterations will happen, clone into bw_best.
920
if ((config->sub_configs_size > 1 || config->sub_configs[0].do_no_cache) &&
921
!VP8LBitWriterClone(bw, &bw_best)) {
922
WebPEncodingSetError(pic, VP8_ENC_ERROR_OUT_OF_MEMORY);
923
goto Error;
924
}
925
926
for (sub_configs_idx = 0; sub_configs_idx < config->sub_configs_size;
927
++sub_configs_idx) {
928
const CrunchSubConfig* const sub_config =
929
&config->sub_configs[sub_configs_idx];
930
int cache_bits_best, i_cache;
931
int i_remaining_percent = remaining_percent / config->sub_configs_size;
932
int i_percent_range = i_remaining_percent / 4;
933
i_remaining_percent -= i_percent_range;
934
935
if (!VP8LGetBackwardReferences(
936
width, height, argb, quality, low_effort, sub_config->lz77,
937
cache_bits_init, sub_config->do_no_cache, hash_chain,
938
&refs_array[0], &cache_bits_best, pic, i_percent_range, percent)) {
939
goto Error;
940
}
941
942
for (i_cache = 0; i_cache < (sub_config->do_no_cache ? 2 : 1); ++i_cache) {
943
const int cache_bits_tmp = (i_cache == 0) ? cache_bits_best : 0;
944
int histogram_bits = histogram_bits_in;
945
// Speed-up: no need to study the no-cache case if it was already studied
946
// in i_cache == 0.
947
if (i_cache == 1 && cache_bits_best == 0) break;
948
949
// Reset the bit writer for this iteration.
950
VP8LBitWriterReset(&bw_init, bw);
951
952
// Build histogram image and symbols from backward references.
953
histogram_image =
954
VP8LAllocateHistogramSet(histogram_image_xysize, cache_bits_tmp);
955
tmp_histo = VP8LAllocateHistogram(cache_bits_tmp);
956
if (histogram_image == NULL || tmp_histo == NULL) {
957
WebPEncodingSetError(pic, VP8_ENC_ERROR_OUT_OF_MEMORY);
958
goto Error;
959
}
960
961
i_percent_range = i_remaining_percent / 3;
962
i_remaining_percent -= i_percent_range;
963
if (!VP8LGetHistoImageSymbols(
964
width, height, &refs_array[i_cache], quality, low_effort,
965
histogram_bits, cache_bits_tmp, histogram_image, tmp_histo,
966
histogram_argb, pic, i_percent_range, percent)) {
967
goto Error;
968
}
969
// Create Huffman bit lengths and codes for each histogram image.
970
histogram_image_size = histogram_image->size;
971
bit_array_size = 5 * histogram_image_size;
972
huffman_codes = (HuffmanTreeCode*)WebPSafeCalloc(bit_array_size,
973
sizeof(*huffman_codes));
974
// Note: some histogram_image entries may point to tmp_histos[], so the
975
// latter need to outlive the following call to
976
// GetHuffBitLengthsAndCodes().
977
if (huffman_codes == NULL ||
978
!GetHuffBitLengthsAndCodes(histogram_image, huffman_codes)) {
979
WebPEncodingSetError(pic, VP8_ENC_ERROR_OUT_OF_MEMORY);
980
goto Error;
981
}
982
// Free combined histograms.
983
VP8LFreeHistogramSet(histogram_image);
984
histogram_image = NULL;
985
986
// Free scratch histograms.
987
VP8LFreeHistogram(tmp_histo);
988
tmp_histo = NULL;
989
990
// Color Cache parameters.
991
if (cache_bits_tmp > 0) {
992
VP8LPutBits(bw, 1, 1);
993
VP8LPutBits(bw, cache_bits_tmp, 4);
994
} else {
995
VP8LPutBits(bw, 0, 1);
996
}
997
998
// Huffman image + meta huffman.
999
histogram_image_size = 0;
1000
for (i = 0; i < histogram_image_xysize; ++i) {
1001
if (histogram_argb[i] >= histogram_image_size) {
1002
histogram_image_size = histogram_argb[i] + 1;
1003
}
1004
histogram_argb[i] <<= 8;
1005
}
1006
1007
write_histogram_image = (histogram_image_size > 1);
1008
VP8LPutBits(bw, write_histogram_image, 1);
1009
if (write_histogram_image) {
1010
VP8LOptimizeSampling(histogram_argb, width, height, histogram_bits_in,
1011
MAX_HUFFMAN_BITS, &histogram_bits);
1012
VP8LPutBits(bw, histogram_bits - 2, 3);
1013
i_percent_range = i_remaining_percent / 2;
1014
i_remaining_percent -= i_percent_range;
1015
if (!EncodeImageNoHuffman(
1016
bw, histogram_argb, &hash_chain_histogram, &refs_array[2],
1017
VP8LSubSampleSize(width, histogram_bits),
1018
VP8LSubSampleSize(height, histogram_bits), quality, low_effort,
1019
pic, i_percent_range, percent)) {
1020
goto Error;
1021
}
1022
}
1023
1024
// Store Huffman codes.
1025
{
1026
int max_tokens = 0;
1027
// Find maximum number of symbols for the huffman tree-set.
1028
for (i = 0; i < 5 * histogram_image_size; ++i) {
1029
HuffmanTreeCode* const codes = &huffman_codes[i];
1030
if (max_tokens < codes->num_symbols) {
1031
max_tokens = codes->num_symbols;
1032
}
1033
}
1034
tokens = (HuffmanTreeToken*)WebPSafeMalloc(max_tokens, sizeof(*tokens));
1035
if (tokens == NULL) {
1036
WebPEncodingSetError(pic, VP8_ENC_ERROR_OUT_OF_MEMORY);
1037
goto Error;
1038
}
1039
for (i = 0; i < 5 * histogram_image_size; ++i) {
1040
HuffmanTreeCode* const codes = &huffman_codes[i];
1041
StoreHuffmanCode(bw, huff_tree, tokens, codes);
1042
ClearHuffmanTreeIfOnlyOneSymbol(codes);
1043
}
1044
}
1045
// Store actual literals.
1046
hdr_size_tmp = (int)(VP8LBitWriterNumBytes(bw) - init_byte_position);
1047
if (!StoreImageToBitMask(bw, width, histogram_bits, &refs_array[i_cache],
1048
histogram_argb, huffman_codes, pic)) {
1049
goto Error;
1050
}
1051
// Keep track of the smallest image so far.
1052
if (VP8LBitWriterNumBytes(bw) < bw_size_best) {
1053
bw_size_best = VP8LBitWriterNumBytes(bw);
1054
*cache_bits = cache_bits_tmp;
1055
*hdr_size = hdr_size_tmp;
1056
*data_size =
1057
(int)(VP8LBitWriterNumBytes(bw) - init_byte_position - *hdr_size);
1058
VP8LBitWriterSwap(bw, &bw_best);
1059
}
1060
WebPSafeFree(tokens);
1061
tokens = NULL;
1062
if (huffman_codes != NULL) {
1063
WebPSafeFree(huffman_codes->codes);
1064
WebPSafeFree(huffman_codes);
1065
huffman_codes = NULL;
1066
}
1067
}
1068
}
1069
VP8LBitWriterSwap(bw, &bw_best);
1070
1071
if (!WebPReportProgress(pic, percent_start + remaining_percent, percent)) {
1072
goto Error;
1073
}
1074
1075
Error:
1076
WebPSafeFree(tokens);
1077
WebPSafeFree(huff_tree);
1078
VP8LFreeHistogramSet(histogram_image);
1079
VP8LFreeHistogram(tmp_histo);
1080
VP8LHashChainClear(&hash_chain_histogram);
1081
if (huffman_codes != NULL) {
1082
WebPSafeFree(huffman_codes->codes);
1083
WebPSafeFree(huffman_codes);
1084
}
1085
WebPSafeFree(histogram_argb);
1086
VP8LBitWriterWipeOut(&bw_best);
1087
return (pic->error_code == VP8_ENC_OK);
1088
}
1089
1090
// -----------------------------------------------------------------------------
1091
// Transforms
1092
1093
static void ApplySubtractGreen(VP8LEncoder* const enc, int width, int height,
1094
VP8LBitWriter* const bw) {
1095
VP8LPutBits(bw, TRANSFORM_PRESENT, 1);
1096
VP8LPutBits(bw, SUBTRACT_GREEN_TRANSFORM, 2);
1097
VP8LSubtractGreenFromBlueAndRed(enc->argb, width * height);
1098
}
1099
1100
static int ApplyPredictFilter(VP8LEncoder* const enc, int width, int height,
1101
int quality, int low_effort,
1102
int used_subtract_green, VP8LBitWriter* const bw,
1103
int percent_range, int* const percent,
1104
int* const best_bits) {
1105
const int near_lossless_strength =
1106
enc->use_palette ? 100 : enc->config->near_lossless;
1107
const int max_bits = ClampBits(width, height, enc->predictor_transform_bits,
1108
MIN_TRANSFORM_BITS, MAX_TRANSFORM_BITS,
1109
MAX_PREDICTOR_IMAGE_SIZE);
1110
const int min_bits = ClampBits(
1111
width, height,
1112
max_bits - 2 * (enc->config->method > 4 ? enc->config->method - 4 : 0),
1113
MIN_TRANSFORM_BITS, MAX_TRANSFORM_BITS, MAX_PREDICTOR_IMAGE_SIZE);
1114
1115
if (!VP8LResidualImage(width, height, min_bits, max_bits, low_effort,
1116
enc->argb, enc->argb_scratch, enc->transform_data,
1117
near_lossless_strength, enc->config->exact,
1118
used_subtract_green, enc->pic, percent_range / 2,
1119
percent, best_bits)) {
1120
return 0;
1121
}
1122
VP8LPutBits(bw, TRANSFORM_PRESENT, 1);
1123
VP8LPutBits(bw, PREDICTOR_TRANSFORM, 2);
1124
assert(*best_bits >= MIN_TRANSFORM_BITS && *best_bits <= MAX_TRANSFORM_BITS);
1125
VP8LPutBits(bw, *best_bits - MIN_TRANSFORM_BITS, NUM_TRANSFORM_BITS);
1126
return EncodeImageNoHuffman(
1127
bw, enc->transform_data, &enc->hash_chain, &enc->refs[0],
1128
VP8LSubSampleSize(width, *best_bits),
1129
VP8LSubSampleSize(height, *best_bits), quality, low_effort, enc->pic,
1130
percent_range - percent_range / 2, percent);
1131
}
1132
1133
static int ApplyCrossColorFilter(VP8LEncoder* const enc, int width, int height,
1134
int quality, int low_effort,
1135
VP8LBitWriter* const bw, int percent_range,
1136
int* const percent, int* const best_bits) {
1137
const int min_bits = enc->cross_color_transform_bits;
1138
1139
if (!VP8LColorSpaceTransform(width, height, min_bits, quality, enc->argb,
1140
enc->transform_data, enc->pic, percent_range / 2,
1141
percent, best_bits)) {
1142
return 0;
1143
}
1144
VP8LPutBits(bw, TRANSFORM_PRESENT, 1);
1145
VP8LPutBits(bw, CROSS_COLOR_TRANSFORM, 2);
1146
assert(*best_bits >= MIN_TRANSFORM_BITS && *best_bits <= MAX_TRANSFORM_BITS);
1147
VP8LPutBits(bw, *best_bits - MIN_TRANSFORM_BITS, NUM_TRANSFORM_BITS);
1148
return EncodeImageNoHuffman(
1149
bw, enc->transform_data, &enc->hash_chain, &enc->refs[0],
1150
VP8LSubSampleSize(width, *best_bits),
1151
VP8LSubSampleSize(height, *best_bits), quality, low_effort, enc->pic,
1152
percent_range - percent_range / 2, percent);
1153
}
1154
1155
// -----------------------------------------------------------------------------
1156
1157
static int WriteRiffHeader(const WebPPicture* const pic, size_t riff_size,
1158
size_t vp8l_size) {
1159
uint8_t riff[RIFF_HEADER_SIZE + CHUNK_HEADER_SIZE + VP8L_SIGNATURE_SIZE] = {
1160
'R', 'I', 'F', 'F', 0, 0, 0, 0, 'W', 'E', 'B', 'P',
1161
'V', 'P', '8', 'L', 0, 0, 0, 0, VP8L_MAGIC_BYTE,
1162
};
1163
PutLE32(riff + TAG_SIZE, (uint32_t)riff_size);
1164
PutLE32(riff + RIFF_HEADER_SIZE + TAG_SIZE, (uint32_t)vp8l_size);
1165
return pic->writer(riff, sizeof(riff), pic);
1166
}
1167
1168
static int WriteImageSize(const WebPPicture* const pic,
1169
VP8LBitWriter* const bw) {
1170
const int width = pic->width - 1;
1171
const int height = pic->height - 1;
1172
assert(width < WEBP_MAX_DIMENSION && height < WEBP_MAX_DIMENSION);
1173
1174
VP8LPutBits(bw, width, VP8L_IMAGE_SIZE_BITS);
1175
VP8LPutBits(bw, height, VP8L_IMAGE_SIZE_BITS);
1176
return !bw->error;
1177
}
1178
1179
static int WriteRealAlphaAndVersion(VP8LBitWriter* const bw, int has_alpha) {
1180
VP8LPutBits(bw, has_alpha, 1);
1181
VP8LPutBits(bw, VP8L_VERSION, VP8L_VERSION_BITS);
1182
return !bw->error;
1183
}
1184
1185
static int WriteImage(const WebPPicture* const pic, VP8LBitWriter* const bw,
1186
size_t* const coded_size) {
1187
const uint8_t* const webpll_data = VP8LBitWriterFinish(bw);
1188
const size_t webpll_size = VP8LBitWriterNumBytes(bw);
1189
const size_t vp8l_size = VP8L_SIGNATURE_SIZE + webpll_size;
1190
const size_t pad = vp8l_size & 1;
1191
const size_t riff_size = TAG_SIZE + CHUNK_HEADER_SIZE + vp8l_size + pad;
1192
*coded_size = 0;
1193
1194
if (bw->error) {
1195
return WebPEncodingSetError(pic, VP8_ENC_ERROR_OUT_OF_MEMORY);
1196
}
1197
1198
if (!WriteRiffHeader(pic, riff_size, vp8l_size) ||
1199
!pic->writer(webpll_data, webpll_size, pic)) {
1200
return WebPEncodingSetError(pic, VP8_ENC_ERROR_BAD_WRITE);
1201
}
1202
1203
if (pad) {
1204
const uint8_t pad_byte[1] = { 0 };
1205
if (!pic->writer(pad_byte, 1, pic)) {
1206
return WebPEncodingSetError(pic, VP8_ENC_ERROR_BAD_WRITE);
1207
}
1208
}
1209
*coded_size = CHUNK_HEADER_SIZE + riff_size;
1210
return 1;
1211
}
1212
1213
// -----------------------------------------------------------------------------
1214
1215
static void ClearTransformBuffer(VP8LEncoder* const enc) {
1216
WebPSafeFree(enc->transform_mem);
1217
enc->transform_mem = NULL;
1218
enc->transform_mem_size = 0;
1219
}
1220
1221
// Allocates the memory for argb (W x H) buffer, 2 rows of context for
1222
// prediction and transform data.
1223
// Flags influencing the memory allocated:
1224
// enc->transform_bits
1225
// enc->use_predict, enc->use_cross_color
1226
static int AllocateTransformBuffer(VP8LEncoder* const enc, int width,
1227
int height) {
1228
const uint64_t image_size = (uint64_t)width * height;
1229
// VP8LResidualImage needs room for 2 scanlines of uint32 pixels with an extra
1230
// pixel in each, plus 2 regular scanlines of bytes.
1231
// TODO(skal): Clean up by using arithmetic in bytes instead of words.
1232
const uint64_t argb_scratch_size =
1233
enc->use_predict ? (width + 1) * 2 + (width * 2 + sizeof(uint32_t) - 1) /
1234
sizeof(uint32_t)
1235
: 0;
1236
const uint64_t transform_data_size =
1237
(enc->use_predict || enc->use_cross_color)
1238
? (uint64_t)VP8LSubSampleSize(width, MIN_TRANSFORM_BITS) *
1239
VP8LSubSampleSize(height, MIN_TRANSFORM_BITS)
1240
: 0;
1241
const uint64_t max_alignment_in_words =
1242
(WEBP_ALIGN_CST + sizeof(uint32_t) - 1) / sizeof(uint32_t);
1243
const uint64_t mem_size = image_size + max_alignment_in_words +
1244
argb_scratch_size + max_alignment_in_words +
1245
transform_data_size;
1246
uint32_t* mem = enc->transform_mem;
1247
if (mem == NULL || mem_size > enc->transform_mem_size) {
1248
ClearTransformBuffer(enc);
1249
mem = (uint32_t*)WebPSafeMalloc(mem_size, sizeof(*mem));
1250
if (mem == NULL) {
1251
return WebPEncodingSetError(enc->pic, VP8_ENC_ERROR_OUT_OF_MEMORY);
1252
}
1253
enc->transform_mem = mem;
1254
enc->transform_mem_size = (size_t)mem_size;
1255
enc->argb_content = kEncoderNone;
1256
}
1257
enc->argb = mem;
1258
mem = (uint32_t*)WEBP_ALIGN(mem + image_size);
1259
enc->argb_scratch = mem;
1260
mem = (uint32_t*)WEBP_ALIGN(mem + argb_scratch_size);
1261
enc->transform_data = mem;
1262
1263
enc->current_width = width;
1264
return 1;
1265
}
1266
1267
static int MakeInputImageCopy(VP8LEncoder* const enc) {
1268
const WebPPicture* const picture = enc->pic;
1269
const int width = picture->width;
1270
const int height = picture->height;
1271
1272
if (!AllocateTransformBuffer(enc, width, height)) return 0;
1273
if (enc->argb_content == kEncoderARGB) return 1;
1274
1275
{
1276
uint32_t* dst = enc->argb;
1277
const uint32_t* src = picture->argb;
1278
int y;
1279
for (y = 0; y < height; ++y) {
1280
memcpy(dst, src, width * sizeof(*dst));
1281
dst += width;
1282
src += picture->argb_stride;
1283
}
1284
}
1285
enc->argb_content = kEncoderARGB;
1286
assert(enc->current_width == width);
1287
return 1;
1288
}
1289
1290
// -----------------------------------------------------------------------------
1291
1292
#define APPLY_PALETTE_GREEDY_MAX 4
1293
1294
static WEBP_INLINE uint32_t SearchColorGreedy(const uint32_t palette[],
1295
int palette_size,
1296
uint32_t color) {
1297
(void)palette_size;
1298
assert(palette_size < APPLY_PALETTE_GREEDY_MAX);
1299
assert(3 == APPLY_PALETTE_GREEDY_MAX - 1);
1300
if (color == palette[0]) return 0;
1301
if (color == palette[1]) return 1;
1302
if (color == palette[2]) return 2;
1303
return 3;
1304
}
1305
1306
static WEBP_INLINE uint32_t ApplyPaletteHash0(uint32_t color) {
1307
// Focus on the green color.
1308
return (color >> 8) & 0xff;
1309
}
1310
1311
#define PALETTE_INV_SIZE_BITS 11
1312
#define PALETTE_INV_SIZE (1 << PALETTE_INV_SIZE_BITS)
1313
1314
static WEBP_INLINE uint32_t ApplyPaletteHash1(uint32_t color) {
1315
// Forget about alpha.
1316
return ((uint32_t)((color & 0x00ffffffu) * 4222244071ull)) >>
1317
(32 - PALETTE_INV_SIZE_BITS);
1318
}
1319
1320
static WEBP_INLINE uint32_t ApplyPaletteHash2(uint32_t color) {
1321
// Forget about alpha.
1322
return ((uint32_t)((color & 0x00ffffffu) * ((1ull << 31) - 1))) >>
1323
(32 - PALETTE_INV_SIZE_BITS);
1324
}
1325
1326
// Use 1 pixel cache for ARGB pixels.
1327
#define APPLY_PALETTE_FOR(COLOR_INDEX) do { \
1328
uint32_t prev_pix = palette[0]; \
1329
uint32_t prev_idx = 0; \
1330
for (y = 0; y < height; ++y) { \
1331
for (x = 0; x < width; ++x) { \
1332
const uint32_t pix = src[x]; \
1333
if (pix != prev_pix) { \
1334
prev_idx = COLOR_INDEX; \
1335
prev_pix = pix; \
1336
} \
1337
tmp_row[x] = prev_idx; \
1338
} \
1339
VP8LBundleColorMap(tmp_row, width, xbits, dst); \
1340
src += src_stride; \
1341
dst += dst_stride; \
1342
} \
1343
} while (0)
1344
1345
// Remap argb values in src[] to packed palettes entries in dst[]
1346
// using 'row' as a temporary buffer of size 'width'.
1347
// We assume that all src[] values have a corresponding entry in the palette.
1348
// Note: src[] can be the same as dst[]
1349
static int ApplyPalette(const uint32_t* src, uint32_t src_stride, uint32_t* dst,
1350
uint32_t dst_stride, const uint32_t* palette,
1351
int palette_size, int width, int height, int xbits,
1352
const WebPPicture* const pic) {
1353
// TODO(skal): this tmp buffer is not needed if VP8LBundleColorMap() can be
1354
// made to work in-place.
1355
uint8_t* const tmp_row = (uint8_t*)WebPSafeMalloc(width, sizeof(*tmp_row));
1356
int x, y;
1357
1358
if (tmp_row == NULL) {
1359
return WebPEncodingSetError(pic, VP8_ENC_ERROR_OUT_OF_MEMORY);
1360
}
1361
1362
if (palette_size < APPLY_PALETTE_GREEDY_MAX) {
1363
APPLY_PALETTE_FOR(SearchColorGreedy(palette, palette_size, pix));
1364
} else {
1365
int i, j;
1366
uint16_t buffer[PALETTE_INV_SIZE];
1367
uint32_t (*const hash_functions[])(uint32_t) = {
1368
ApplyPaletteHash0, ApplyPaletteHash1, ApplyPaletteHash2
1369
};
1370
1371
// Try to find a perfect hash function able to go from a color to an index
1372
// within 1 << PALETTE_INV_SIZE_BITS in order to build a hash map to go
1373
// from color to index in palette.
1374
for (i = 0; i < 3; ++i) {
1375
int use_LUT = 1;
1376
// Set each element in buffer to max uint16_t.
1377
memset(buffer, 0xff, sizeof(buffer));
1378
for (j = 0; j < palette_size; ++j) {
1379
const uint32_t ind = hash_functions[i](palette[j]);
1380
if (buffer[ind] != 0xffffu) {
1381
use_LUT = 0;
1382
break;
1383
} else {
1384
buffer[ind] = j;
1385
}
1386
}
1387
if (use_LUT) break;
1388
}
1389
1390
if (i == 0) {
1391
APPLY_PALETTE_FOR(buffer[ApplyPaletteHash0(pix)]);
1392
} else if (i == 1) {
1393
APPLY_PALETTE_FOR(buffer[ApplyPaletteHash1(pix)]);
1394
} else if (i == 2) {
1395
APPLY_PALETTE_FOR(buffer[ApplyPaletteHash2(pix)]);
1396
} else {
1397
uint32_t idx_map[MAX_PALETTE_SIZE];
1398
uint32_t palette_sorted[MAX_PALETTE_SIZE];
1399
PrepareMapToPalette(palette, palette_size, palette_sorted, idx_map);
1400
APPLY_PALETTE_FOR(
1401
idx_map[SearchColorNoIdx(palette_sorted, pix, palette_size)]);
1402
}
1403
}
1404
WebPSafeFree(tmp_row);
1405
return 1;
1406
}
1407
#undef APPLY_PALETTE_FOR
1408
#undef PALETTE_INV_SIZE_BITS
1409
#undef PALETTE_INV_SIZE
1410
#undef APPLY_PALETTE_GREEDY_MAX
1411
1412
// Note: Expects "enc->palette" to be set properly.
1413
static int MapImageFromPalette(VP8LEncoder* const enc) {
1414
const WebPPicture* const pic = enc->pic;
1415
const int width = pic->width;
1416
const int height = pic->height;
1417
const uint32_t* const palette = enc->palette;
1418
const int palette_size = enc->palette_size;
1419
int xbits;
1420
1421
// Replace each input pixel by corresponding palette index.
1422
// This is done line by line.
1423
if (palette_size <= 4) {
1424
xbits = (palette_size <= 2) ? 3 : 2;
1425
} else {
1426
xbits = (palette_size <= 16) ? 1 : 0;
1427
}
1428
1429
if (!AllocateTransformBuffer(enc, VP8LSubSampleSize(width, xbits), height)) {
1430
return 0;
1431
}
1432
if (!ApplyPalette(pic->argb, pic->argb_stride, enc->argb,
1433
enc->current_width, palette, palette_size, width, height,
1434
xbits, pic)) {
1435
return 0;
1436
}
1437
enc->argb_content = kEncoderPalette;
1438
return 1;
1439
}
1440
1441
// Save palette[] to bitstream.
1442
static int EncodePalette(VP8LBitWriter* const bw, int low_effort,
1443
VP8LEncoder* const enc, int percent_range,
1444
int* const percent) {
1445
int i;
1446
uint32_t tmp_palette[MAX_PALETTE_SIZE];
1447
const int palette_size = enc->palette_size;
1448
const uint32_t* const palette = enc->palette;
1449
// If the last element is 0, do not store it and count on automatic palette
1450
// 0-filling. This can only happen if there is no pixel packing, hence if
1451
// there are strictly more than 16 colors (after 0 is removed).
1452
const uint32_t encoded_palette_size =
1453
(enc->palette[palette_size - 1] == 0 && palette_size > 17)
1454
? palette_size - 1
1455
: palette_size;
1456
VP8LPutBits(bw, TRANSFORM_PRESENT, 1);
1457
VP8LPutBits(bw, COLOR_INDEXING_TRANSFORM, 2);
1458
assert(palette_size >= 1 && palette_size <= MAX_PALETTE_SIZE);
1459
VP8LPutBits(bw, encoded_palette_size - 1, 8);
1460
for (i = encoded_palette_size - 1; i >= 1; --i) {
1461
tmp_palette[i] = VP8LSubPixels(palette[i], palette[i - 1]);
1462
}
1463
tmp_palette[0] = palette[0];
1464
return EncodeImageNoHuffman(
1465
bw, tmp_palette, &enc->hash_chain, &enc->refs[0], encoded_palette_size,
1466
1, /*quality=*/20, low_effort, enc->pic, percent_range, percent);
1467
}
1468
1469
// -----------------------------------------------------------------------------
1470
// VP8LEncoder
1471
1472
static VP8LEncoder* VP8LEncoderNew(const WebPConfig* const config,
1473
const WebPPicture* const picture) {
1474
VP8LEncoder* const enc = (VP8LEncoder*)WebPSafeCalloc(1ULL, sizeof(*enc));
1475
if (enc == NULL) {
1476
WebPEncodingSetError(picture, VP8_ENC_ERROR_OUT_OF_MEMORY);
1477
return NULL;
1478
}
1479
enc->config = config;
1480
enc->pic = picture;
1481
enc->argb_content = kEncoderNone;
1482
1483
VP8LEncDspInit();
1484
1485
return enc;
1486
}
1487
1488
static void VP8LEncoderDelete(VP8LEncoder* enc) {
1489
if (enc != NULL) {
1490
int i;
1491
VP8LHashChainClear(&enc->hash_chain);
1492
for (i = 0; i < 4; ++i) VP8LBackwardRefsClear(&enc->refs[i]);
1493
ClearTransformBuffer(enc);
1494
WebPSafeFree(enc);
1495
}
1496
}
1497
1498
// -----------------------------------------------------------------------------
1499
// Main call
1500
1501
typedef struct {
1502
const WebPConfig* config;
1503
const WebPPicture* picture;
1504
VP8LBitWriter* bw;
1505
VP8LEncoder* enc;
1506
CrunchConfig crunch_configs[CRUNCH_CONFIGS_MAX];
1507
int num_crunch_configs;
1508
int red_and_blue_always_zero;
1509
WebPAuxStats* stats;
1510
} StreamEncodeContext;
1511
1512
static int EncodeStreamHook(void* input, void* data2) {
1513
StreamEncodeContext* const params = (StreamEncodeContext*)input;
1514
const WebPConfig* const config = params->config;
1515
const WebPPicture* const picture = params->picture;
1516
VP8LBitWriter* const bw = params->bw;
1517
VP8LEncoder* const enc = params->enc;
1518
const CrunchConfig* const crunch_configs = params->crunch_configs;
1519
const int num_crunch_configs = params->num_crunch_configs;
1520
const int red_and_blue_always_zero = params->red_and_blue_always_zero;
1521
#if !defined(WEBP_DISABLE_STATS)
1522
WebPAuxStats* const stats = params->stats;
1523
#endif
1524
const int quality = (int)config->quality;
1525
const int low_effort = (config->method == 0);
1526
#if (WEBP_NEAR_LOSSLESS == 1)
1527
const int width = picture->width;
1528
#endif
1529
const int height = picture->height;
1530
const size_t byte_position = VP8LBitWriterNumBytes(bw);
1531
int percent = 2; // for WebPProgressHook
1532
#if (WEBP_NEAR_LOSSLESS == 1)
1533
int use_near_lossless = 0;
1534
#endif
1535
int hdr_size = 0;
1536
int data_size = 0;
1537
int idx;
1538
size_t best_size = ~(size_t)0;
1539
VP8LBitWriter bw_init = *bw, bw_best;
1540
(void)data2;
1541
1542
if (!VP8LBitWriterInit(&bw_best, 0) ||
1543
(num_crunch_configs > 1 && !VP8LBitWriterClone(bw, &bw_best))) {
1544
WebPEncodingSetError(picture, VP8_ENC_ERROR_OUT_OF_MEMORY);
1545
goto Error;
1546
}
1547
1548
for (idx = 0; idx < num_crunch_configs; ++idx) {
1549
const int entropy_idx = crunch_configs[idx].entropy_idx;
1550
int remaining_percent = 97 / num_crunch_configs, percent_range;
1551
int predictor_transform_bits = 0, cross_color_transform_bits = 0;
1552
enc->use_palette =
1553
(entropy_idx == kPalette) || (entropy_idx == kPaletteAndSpatial);
1554
enc->use_subtract_green =
1555
(entropy_idx == kSubGreen) || (entropy_idx == kSpatialSubGreen);
1556
enc->use_predict = (entropy_idx == kSpatial) ||
1557
(entropy_idx == kSpatialSubGreen) ||
1558
(entropy_idx == kPaletteAndSpatial);
1559
// When using a palette, R/B==0, hence no need to test for cross-color.
1560
if (low_effort || enc->use_palette) {
1561
enc->use_cross_color = 0;
1562
} else {
1563
enc->use_cross_color = red_and_blue_always_zero ? 0 : enc->use_predict;
1564
}
1565
// Reset any parameter in the encoder that is set in the previous iteration.
1566
enc->cache_bits = 0;
1567
VP8LBackwardRefsClear(&enc->refs[0]);
1568
VP8LBackwardRefsClear(&enc->refs[1]);
1569
1570
#if (WEBP_NEAR_LOSSLESS == 1)
1571
// Apply near-lossless preprocessing.
1572
use_near_lossless = (config->near_lossless < 100) && !enc->use_palette &&
1573
!enc->use_predict;
1574
if (use_near_lossless) {
1575
if (!AllocateTransformBuffer(enc, width, height)) goto Error;
1576
if ((enc->argb_content != kEncoderNearLossless) &&
1577
!VP8ApplyNearLossless(picture, config->near_lossless, enc->argb)) {
1578
WebPEncodingSetError(picture, VP8_ENC_ERROR_OUT_OF_MEMORY);
1579
goto Error;
1580
}
1581
enc->argb_content = kEncoderNearLossless;
1582
} else {
1583
enc->argb_content = kEncoderNone;
1584
}
1585
#else
1586
enc->argb_content = kEncoderNone;
1587
#endif
1588
1589
// Encode palette
1590
if (enc->use_palette) {
1591
if (!PaletteSort(crunch_configs[idx].palette_sorting_type, enc->pic,
1592
enc->palette_sorted, enc->palette_size,
1593
enc->palette)) {
1594
WebPEncodingSetError(enc->pic, VP8_ENC_ERROR_OUT_OF_MEMORY);
1595
goto Error;
1596
}
1597
percent_range = remaining_percent / 4;
1598
if (!EncodePalette(bw, low_effort, enc, percent_range, &percent)) {
1599
goto Error;
1600
}
1601
remaining_percent -= percent_range;
1602
if (!MapImageFromPalette(enc)) goto Error;
1603
// If using a color cache, do not have it bigger than the number of
1604
// colors.
1605
if (enc->palette_size < (1 << MAX_COLOR_CACHE_BITS)) {
1606
enc->cache_bits = BitsLog2Floor(enc->palette_size) + 1;
1607
}
1608
}
1609
// In case image is not packed.
1610
if (enc->argb_content != kEncoderNearLossless &&
1611
enc->argb_content != kEncoderPalette) {
1612
if (!MakeInputImageCopy(enc)) goto Error;
1613
}
1614
1615
// -------------------------------------------------------------------------
1616
// Apply transforms and write transform data.
1617
1618
if (enc->use_subtract_green) {
1619
ApplySubtractGreen(enc, enc->current_width, height, bw);
1620
}
1621
1622
if (enc->use_predict) {
1623
percent_range = remaining_percent / 3;
1624
if (!ApplyPredictFilter(enc, enc->current_width, height, quality,
1625
low_effort, enc->use_subtract_green, bw,
1626
percent_range, &percent,
1627
&predictor_transform_bits)) {
1628
goto Error;
1629
}
1630
remaining_percent -= percent_range;
1631
}
1632
1633
if (enc->use_cross_color) {
1634
percent_range = remaining_percent / 2;
1635
if (!ApplyCrossColorFilter(enc, enc->current_width, height, quality,
1636
low_effort, bw, percent_range, &percent,
1637
&cross_color_transform_bits)) {
1638
goto Error;
1639
}
1640
remaining_percent -= percent_range;
1641
}
1642
1643
VP8LPutBits(bw, !TRANSFORM_PRESENT, 1); // No more transforms.
1644
1645
// -------------------------------------------------------------------------
1646
// Encode and write the transformed image.
1647
if (!EncodeImageInternal(
1648
bw, enc->argb, &enc->hash_chain, enc->refs, enc->current_width,
1649
height, quality, low_effort, &crunch_configs[idx],
1650
&enc->cache_bits, enc->histo_bits, byte_position, &hdr_size,
1651
&data_size, picture, remaining_percent, &percent)) {
1652
goto Error;
1653
}
1654
1655
// If we are better than what we already have.
1656
if (VP8LBitWriterNumBytes(bw) < best_size) {
1657
best_size = VP8LBitWriterNumBytes(bw);
1658
// Store the BitWriter.
1659
VP8LBitWriterSwap(bw, &bw_best);
1660
#if !defined(WEBP_DISABLE_STATS)
1661
// Update the stats.
1662
if (stats != NULL) {
1663
stats->lossless_features = 0;
1664
if (enc->use_predict) stats->lossless_features |= 1;
1665
if (enc->use_cross_color) stats->lossless_features |= 2;
1666
if (enc->use_subtract_green) stats->lossless_features |= 4;
1667
if (enc->use_palette) stats->lossless_features |= 8;
1668
stats->histogram_bits = enc->histo_bits;
1669
stats->transform_bits = predictor_transform_bits;
1670
stats->cross_color_transform_bits = cross_color_transform_bits;
1671
stats->cache_bits = enc->cache_bits;
1672
stats->palette_size = enc->palette_size;
1673
stats->lossless_size = (int)(best_size - byte_position);
1674
stats->lossless_hdr_size = hdr_size;
1675
stats->lossless_data_size = data_size;
1676
}
1677
#endif
1678
}
1679
// Reset the bit writer for the following iteration if any.
1680
if (num_crunch_configs > 1) VP8LBitWriterReset(&bw_init, bw);
1681
}
1682
VP8LBitWriterSwap(&bw_best, bw);
1683
1684
Error:
1685
VP8LBitWriterWipeOut(&bw_best);
1686
// The hook should return false in case of error.
1687
return (params->picture->error_code == VP8_ENC_OK);
1688
}
1689
1690
int VP8LEncodeStream(const WebPConfig* const config,
1691
const WebPPicture* const picture,
1692
VP8LBitWriter* const bw_main) {
1693
VP8LEncoder* const enc_main = VP8LEncoderNew(config, picture);
1694
VP8LEncoder* enc_side = NULL;
1695
CrunchConfig crunch_configs[CRUNCH_CONFIGS_MAX];
1696
int num_crunch_configs_main, num_crunch_configs_side = 0;
1697
int idx;
1698
int red_and_blue_always_zero = 0;
1699
WebPWorker worker_main, worker_side;
1700
StreamEncodeContext params_main, params_side;
1701
// The main thread uses picture->stats, the side thread uses stats_side.
1702
WebPAuxStats stats_side;
1703
VP8LBitWriter bw_side;
1704
WebPPicture picture_side;
1705
const WebPWorkerInterface* const worker_interface = WebPGetWorkerInterface();
1706
int ok_main;
1707
1708
if (enc_main == NULL || !VP8LBitWriterInit(&bw_side, 0)) {
1709
VP8LEncoderDelete(enc_main);
1710
return WebPEncodingSetError(picture, VP8_ENC_ERROR_OUT_OF_MEMORY);
1711
}
1712
1713
// Avoid "garbage value" error from Clang's static analysis tool.
1714
if (!WebPPictureInit(&picture_side)) {
1715
goto Error;
1716
}
1717
1718
// Analyze image (entropy, num_palettes etc)
1719
if (!EncoderAnalyze(enc_main, crunch_configs, &num_crunch_configs_main,
1720
&red_and_blue_always_zero) ||
1721
!EncoderInit(enc_main)) {
1722
WebPEncodingSetError(picture, VP8_ENC_ERROR_OUT_OF_MEMORY);
1723
goto Error;
1724
}
1725
1726
// Split the configs between the main and side threads (if any).
1727
if (config->thread_level > 0) {
1728
num_crunch_configs_side = num_crunch_configs_main / 2;
1729
for (idx = 0; idx < num_crunch_configs_side; ++idx) {
1730
params_side.crunch_configs[idx] =
1731
crunch_configs[num_crunch_configs_main - num_crunch_configs_side +
1732
idx];
1733
}
1734
params_side.num_crunch_configs = num_crunch_configs_side;
1735
}
1736
num_crunch_configs_main -= num_crunch_configs_side;
1737
for (idx = 0; idx < num_crunch_configs_main; ++idx) {
1738
params_main.crunch_configs[idx] = crunch_configs[idx];
1739
}
1740
params_main.num_crunch_configs = num_crunch_configs_main;
1741
1742
// Fill in the parameters for the thread workers.
1743
{
1744
const int params_size = (num_crunch_configs_side > 0) ? 2 : 1;
1745
for (idx = 0; idx < params_size; ++idx) {
1746
// Create the parameters for each worker.
1747
WebPWorker* const worker = (idx == 0) ? &worker_main : &worker_side;
1748
StreamEncodeContext* const param =
1749
(idx == 0) ? &params_main : &params_side;
1750
param->config = config;
1751
param->red_and_blue_always_zero = red_and_blue_always_zero;
1752
if (idx == 0) {
1753
param->picture = picture;
1754
param->stats = picture->stats;
1755
param->bw = bw_main;
1756
param->enc = enc_main;
1757
} else {
1758
// Create a side picture (error_code is not thread-safe).
1759
if (!WebPPictureView(picture, /*left=*/0, /*top=*/0, picture->width,
1760
picture->height, &picture_side)) {
1761
assert(0);
1762
}
1763
picture_side.progress_hook = NULL; // Progress hook is not thread-safe.
1764
param->picture = &picture_side; // No need to free a view afterwards.
1765
param->stats = (picture->stats == NULL) ? NULL : &stats_side;
1766
// Create a side bit writer.
1767
if (!VP8LBitWriterClone(bw_main, &bw_side)) {
1768
WebPEncodingSetError(picture, VP8_ENC_ERROR_OUT_OF_MEMORY);
1769
goto Error;
1770
}
1771
param->bw = &bw_side;
1772
// Create a side encoder.
1773
enc_side = VP8LEncoderNew(config, &picture_side);
1774
if (enc_side == NULL || !EncoderInit(enc_side)) {
1775
WebPEncodingSetError(picture, VP8_ENC_ERROR_OUT_OF_MEMORY);
1776
goto Error;
1777
}
1778
// Copy the values that were computed for the main encoder.
1779
enc_side->histo_bits = enc_main->histo_bits;
1780
enc_side->predictor_transform_bits =
1781
enc_main->predictor_transform_bits;
1782
enc_side->cross_color_transform_bits =
1783
enc_main->cross_color_transform_bits;
1784
enc_side->palette_size = enc_main->palette_size;
1785
memcpy(enc_side->palette, enc_main->palette,
1786
sizeof(enc_main->palette));
1787
memcpy(enc_side->palette_sorted, enc_main->palette_sorted,
1788
sizeof(enc_main->palette_sorted));
1789
param->enc = enc_side;
1790
}
1791
// Create the workers.
1792
worker_interface->Init(worker);
1793
worker->data1 = param;
1794
worker->data2 = NULL;
1795
worker->hook = EncodeStreamHook;
1796
}
1797
}
1798
1799
// Start the second thread if needed.
1800
if (num_crunch_configs_side != 0) {
1801
if (!worker_interface->Reset(&worker_side)) {
1802
WebPEncodingSetError(picture, VP8_ENC_ERROR_OUT_OF_MEMORY);
1803
goto Error;
1804
}
1805
#if !defined(WEBP_DISABLE_STATS)
1806
// This line is here and not in the param initialization above to remove a
1807
// Clang static analyzer warning.
1808
if (picture->stats != NULL) {
1809
memcpy(&stats_side, picture->stats, sizeof(stats_side));
1810
}
1811
#endif
1812
worker_interface->Launch(&worker_side);
1813
}
1814
// Execute the main thread.
1815
worker_interface->Execute(&worker_main);
1816
ok_main = worker_interface->Sync(&worker_main);
1817
worker_interface->End(&worker_main);
1818
if (num_crunch_configs_side != 0) {
1819
// Wait for the second thread.
1820
const int ok_side = worker_interface->Sync(&worker_side);
1821
worker_interface->End(&worker_side);
1822
if (!ok_main || !ok_side) {
1823
if (picture->error_code == VP8_ENC_OK) {
1824
assert(picture_side.error_code != VP8_ENC_OK);
1825
WebPEncodingSetError(picture, picture_side.error_code);
1826
}
1827
goto Error;
1828
}
1829
if (VP8LBitWriterNumBytes(&bw_side) < VP8LBitWriterNumBytes(bw_main)) {
1830
VP8LBitWriterSwap(bw_main, &bw_side);
1831
#if !defined(WEBP_DISABLE_STATS)
1832
if (picture->stats != NULL) {
1833
memcpy(picture->stats, &stats_side, sizeof(*picture->stats));
1834
}
1835
#endif
1836
}
1837
}
1838
1839
Error:
1840
VP8LBitWriterWipeOut(&bw_side);
1841
VP8LEncoderDelete(enc_main);
1842
VP8LEncoderDelete(enc_side);
1843
return (picture->error_code == VP8_ENC_OK);
1844
}
1845
1846
#undef CRUNCH_CONFIGS_MAX
1847
#undef CRUNCH_SUBCONFIGS_MAX
1848
1849
int VP8LEncodeImage(const WebPConfig* const config,
1850
const WebPPicture* const picture) {
1851
int width, height;
1852
int has_alpha;
1853
size_t coded_size;
1854
int percent = 0;
1855
int initial_size;
1856
VP8LBitWriter bw;
1857
1858
if (picture == NULL) return 0;
1859
1860
if (config == NULL || picture->argb == NULL) {
1861
return WebPEncodingSetError(picture, VP8_ENC_ERROR_NULL_PARAMETER);
1862
}
1863
1864
width = picture->width;
1865
height = picture->height;
1866
// Initialize BitWriter with size corresponding to 16 bpp to photo images and
1867
// 8 bpp for graphical images.
1868
initial_size = (config->image_hint == WEBP_HINT_GRAPH) ?
1869
width * height : width * height * 2;
1870
if (!VP8LBitWriterInit(&bw, initial_size)) {
1871
WebPEncodingSetError(picture, VP8_ENC_ERROR_OUT_OF_MEMORY);
1872
goto Error;
1873
}
1874
1875
if (!WebPReportProgress(picture, 1, &percent)) {
1876
UserAbort:
1877
WebPEncodingSetError(picture, VP8_ENC_ERROR_USER_ABORT);
1878
goto Error;
1879
}
1880
// Reset stats (for pure lossless coding)
1881
if (picture->stats != NULL) {
1882
WebPAuxStats* const stats = picture->stats;
1883
memset(stats, 0, sizeof(*stats));
1884
stats->PSNR[0] = 99.f;
1885
stats->PSNR[1] = 99.f;
1886
stats->PSNR[2] = 99.f;
1887
stats->PSNR[3] = 99.f;
1888
stats->PSNR[4] = 99.f;
1889
}
1890
1891
// Write image size.
1892
if (!WriteImageSize(picture, &bw)) {
1893
WebPEncodingSetError(picture, VP8_ENC_ERROR_OUT_OF_MEMORY);
1894
goto Error;
1895
}
1896
1897
has_alpha = WebPPictureHasTransparency(picture);
1898
// Write the non-trivial Alpha flag and lossless version.
1899
if (!WriteRealAlphaAndVersion(&bw, has_alpha)) {
1900
WebPEncodingSetError(picture, VP8_ENC_ERROR_OUT_OF_MEMORY);
1901
goto Error;
1902
}
1903
1904
if (!WebPReportProgress(picture, 2, &percent)) goto UserAbort;
1905
1906
// Encode main image stream.
1907
if (!VP8LEncodeStream(config, picture, &bw)) goto Error;
1908
1909
if (!WebPReportProgress(picture, 99, &percent)) goto UserAbort;
1910
1911
// Finish the RIFF chunk.
1912
if (!WriteImage(picture, &bw, &coded_size)) goto Error;
1913
1914
if (!WebPReportProgress(picture, 100, &percent)) goto UserAbort;
1915
1916
#if !defined(WEBP_DISABLE_STATS)
1917
// Save size.
1918
if (picture->stats != NULL) {
1919
picture->stats->coded_size += (int)coded_size;
1920
picture->stats->lossless_size = (int)coded_size;
1921
}
1922
#endif
1923
1924
if (picture->extra_info != NULL) {
1925
const int mb_w = (width + 15) >> 4;
1926
const int mb_h = (height + 15) >> 4;
1927
memset(picture->extra_info, 0, mb_w * mb_h * sizeof(*picture->extra_info));
1928
}
1929
1930
Error:
1931
if (bw.error) {
1932
WebPEncodingSetError(picture, VP8_ENC_ERROR_OUT_OF_MEMORY);
1933
}
1934
VP8LBitWriterWipeOut(&bw);
1935
return (picture->error_code == VP8_ENC_OK);
1936
}
1937
1938
//------------------------------------------------------------------------------
1939
1940