Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
Tetragramm
GitHub Repository: Tetragramm/opencv
Path: blob/master/3rdparty/libwebp/src/enc/vp8l_enc.c
16344 views
1
// Copyright 2012 Google Inc. All Rights Reserved.
2
//
3
// Use of this source code is governed by a BSD-style license
4
// that can be found in the COPYING file in the root of the source
5
// tree. An additional intellectual property rights grant can be found
6
// in the file PATENTS. All contributing project authors may
7
// be found in the AUTHORS file in the root of the source tree.
8
// -----------------------------------------------------------------------------
9
//
10
// main entry for the lossless encoder.
11
//
12
// Author: Vikas Arora ([email protected])
13
//
14
15
#include <assert.h>
16
#include <stdlib.h>
17
18
#include "src/enc/backward_references_enc.h"
19
#include "src/enc/histogram_enc.h"
20
#include "src/enc/vp8i_enc.h"
21
#include "src/enc/vp8li_enc.h"
22
#include "src/dsp/lossless.h"
23
#include "src/dsp/lossless_common.h"
24
#include "src/utils/bit_writer_utils.h"
25
#include "src/utils/huffman_encode_utils.h"
26
#include "src/utils/utils.h"
27
#include "src/webp/format_constants.h"
28
29
// Maximum number of histogram images (sub-blocks).
30
#define MAX_HUFF_IMAGE_SIZE 2600
31
32
// Palette reordering for smaller sum of deltas (and for smaller storage).
33
34
static int PaletteCompareColorsForQsort(const void* p1, const void* p2) {
35
const uint32_t a = WebPMemToUint32((uint8_t*)p1);
36
const uint32_t b = WebPMemToUint32((uint8_t*)p2);
37
assert(a != b);
38
return (a < b) ? -1 : 1;
39
}
40
41
static WEBP_INLINE uint32_t PaletteComponentDistance(uint32_t v) {
42
return (v <= 128) ? v : (256 - v);
43
}
44
45
// Computes a value that is related to the entropy created by the
46
// palette entry diff.
47
//
48
// Note that the last & 0xff is a no-operation in the next statement, but
49
// removed by most compilers and is here only for regularity of the code.
50
static WEBP_INLINE uint32_t PaletteColorDistance(uint32_t col1, uint32_t col2) {
51
const uint32_t diff = VP8LSubPixels(col1, col2);
52
const int kMoreWeightForRGBThanForAlpha = 9;
53
uint32_t score;
54
score = PaletteComponentDistance((diff >> 0) & 0xff);
55
score += PaletteComponentDistance((diff >> 8) & 0xff);
56
score += PaletteComponentDistance((diff >> 16) & 0xff);
57
score *= kMoreWeightForRGBThanForAlpha;
58
score += PaletteComponentDistance((diff >> 24) & 0xff);
59
return score;
60
}
61
62
static WEBP_INLINE void SwapColor(uint32_t* const col1, uint32_t* const col2) {
63
const uint32_t tmp = *col1;
64
*col1 = *col2;
65
*col2 = tmp;
66
}
67
68
static void GreedyMinimizeDeltas(uint32_t palette[], int num_colors) {
69
// Find greedily always the closest color of the predicted color to minimize
70
// deltas in the palette. This reduces storage needs since the
71
// palette is stored with delta encoding.
72
uint32_t predict = 0x00000000;
73
int i, k;
74
for (i = 0; i < num_colors; ++i) {
75
int best_ix = i;
76
uint32_t best_score = ~0U;
77
for (k = i; k < num_colors; ++k) {
78
const uint32_t cur_score = PaletteColorDistance(palette[k], predict);
79
if (best_score > cur_score) {
80
best_score = cur_score;
81
best_ix = k;
82
}
83
}
84
SwapColor(&palette[best_ix], &palette[i]);
85
predict = palette[i];
86
}
87
}
88
89
// The palette has been sorted by alpha. This function checks if the other
90
// components of the palette have a monotonic development with regards to
91
// position in the palette. If all have monotonic development, there is
92
// no benefit to re-organize them greedily. A monotonic development
93
// would be spotted in green-only situations (like lossy alpha) or gray-scale
94
// images.
95
static int PaletteHasNonMonotonousDeltas(uint32_t palette[], int num_colors) {
96
uint32_t predict = 0x000000;
97
int i;
98
uint8_t sign_found = 0x00;
99
for (i = 0; i < num_colors; ++i) {
100
const uint32_t diff = VP8LSubPixels(palette[i], predict);
101
const uint8_t rd = (diff >> 16) & 0xff;
102
const uint8_t gd = (diff >> 8) & 0xff;
103
const uint8_t bd = (diff >> 0) & 0xff;
104
if (rd != 0x00) {
105
sign_found |= (rd < 0x80) ? 1 : 2;
106
}
107
if (gd != 0x00) {
108
sign_found |= (gd < 0x80) ? 8 : 16;
109
}
110
if (bd != 0x00) {
111
sign_found |= (bd < 0x80) ? 64 : 128;
112
}
113
predict = palette[i];
114
}
115
return (sign_found & (sign_found << 1)) != 0; // two consequent signs.
116
}
117
118
// -----------------------------------------------------------------------------
119
// Palette
120
121
// If number of colors in the image is less than or equal to MAX_PALETTE_SIZE,
122
// creates a palette and returns true, else returns false.
123
static int AnalyzeAndCreatePalette(const WebPPicture* const pic,
124
int low_effort,
125
uint32_t palette[MAX_PALETTE_SIZE],
126
int* const palette_size) {
127
const int num_colors = WebPGetColorPalette(pic, palette);
128
if (num_colors > MAX_PALETTE_SIZE) {
129
*palette_size = 0;
130
return 0;
131
}
132
*palette_size = num_colors;
133
qsort(palette, num_colors, sizeof(*palette), PaletteCompareColorsForQsort);
134
if (!low_effort && PaletteHasNonMonotonousDeltas(palette, num_colors)) {
135
GreedyMinimizeDeltas(palette, num_colors);
136
}
137
return 1;
138
}
139
140
// These five modes are evaluated and their respective entropy is computed.
141
typedef enum {
142
kDirect = 0,
143
kSpatial = 1,
144
kSubGreen = 2,
145
kSpatialSubGreen = 3,
146
kPalette = 4,
147
kNumEntropyIx = 5
148
} EntropyIx;
149
150
typedef enum {
151
kHistoAlpha = 0,
152
kHistoAlphaPred,
153
kHistoGreen,
154
kHistoGreenPred,
155
kHistoRed,
156
kHistoRedPred,
157
kHistoBlue,
158
kHistoBluePred,
159
kHistoRedSubGreen,
160
kHistoRedPredSubGreen,
161
kHistoBlueSubGreen,
162
kHistoBluePredSubGreen,
163
kHistoPalette,
164
kHistoTotal // Must be last.
165
} HistoIx;
166
167
static void AddSingleSubGreen(int p, uint32_t* const r, uint32_t* const b) {
168
const int green = p >> 8; // The upper bits are masked away later.
169
++r[((p >> 16) - green) & 0xff];
170
++b[((p >> 0) - green) & 0xff];
171
}
172
173
static void AddSingle(uint32_t p,
174
uint32_t* const a, uint32_t* const r,
175
uint32_t* const g, uint32_t* const b) {
176
++a[(p >> 24) & 0xff];
177
++r[(p >> 16) & 0xff];
178
++g[(p >> 8) & 0xff];
179
++b[(p >> 0) & 0xff];
180
}
181
182
static WEBP_INLINE uint32_t HashPix(uint32_t pix) {
183
// Note that masking with 0xffffffffu is for preventing an
184
// 'unsigned int overflow' warning. Doesn't impact the compiled code.
185
return ((((uint64_t)pix + (pix >> 19)) * 0x39c5fba7ull) & 0xffffffffu) >> 24;
186
}
187
188
static int AnalyzeEntropy(const uint32_t* argb,
189
int width, int height, int argb_stride,
190
int use_palette,
191
int palette_size, int transform_bits,
192
EntropyIx* const min_entropy_ix,
193
int* const red_and_blue_always_zero) {
194
// Allocate histogram set with cache_bits = 0.
195
uint32_t* histo;
196
197
if (use_palette && palette_size <= 16) {
198
// In the case of small palettes, we pack 2, 4 or 8 pixels together. In
199
// practice, small palettes are better than any other transform.
200
*min_entropy_ix = kPalette;
201
*red_and_blue_always_zero = 1;
202
return 1;
203
}
204
histo = (uint32_t*)WebPSafeCalloc(kHistoTotal, sizeof(*histo) * 256);
205
if (histo != NULL) {
206
int i, x, y;
207
const uint32_t* prev_row = NULL;
208
const uint32_t* curr_row = argb;
209
uint32_t pix_prev = argb[0]; // Skip the first pixel.
210
for (y = 0; y < height; ++y) {
211
for (x = 0; x < width; ++x) {
212
const uint32_t pix = curr_row[x];
213
const uint32_t pix_diff = VP8LSubPixels(pix, pix_prev);
214
pix_prev = pix;
215
if ((pix_diff == 0) || (prev_row != NULL && pix == prev_row[x])) {
216
continue;
217
}
218
AddSingle(pix,
219
&histo[kHistoAlpha * 256],
220
&histo[kHistoRed * 256],
221
&histo[kHistoGreen * 256],
222
&histo[kHistoBlue * 256]);
223
AddSingle(pix_diff,
224
&histo[kHistoAlphaPred * 256],
225
&histo[kHistoRedPred * 256],
226
&histo[kHistoGreenPred * 256],
227
&histo[kHistoBluePred * 256]);
228
AddSingleSubGreen(pix,
229
&histo[kHistoRedSubGreen * 256],
230
&histo[kHistoBlueSubGreen * 256]);
231
AddSingleSubGreen(pix_diff,
232
&histo[kHistoRedPredSubGreen * 256],
233
&histo[kHistoBluePredSubGreen * 256]);
234
{
235
// Approximate the palette by the entropy of the multiplicative hash.
236
const uint32_t hash = HashPix(pix);
237
++histo[kHistoPalette * 256 + hash];
238
}
239
}
240
prev_row = curr_row;
241
curr_row += argb_stride;
242
}
243
{
244
double entropy_comp[kHistoTotal];
245
double entropy[kNumEntropyIx];
246
int k;
247
int last_mode_to_analyze = use_palette ? kPalette : kSpatialSubGreen;
248
int j;
249
// Let's add one zero to the predicted histograms. The zeros are removed
250
// too efficiently by the pix_diff == 0 comparison, at least one of the
251
// zeros is likely to exist.
252
++histo[kHistoRedPredSubGreen * 256];
253
++histo[kHistoBluePredSubGreen * 256];
254
++histo[kHistoRedPred * 256];
255
++histo[kHistoGreenPred * 256];
256
++histo[kHistoBluePred * 256];
257
++histo[kHistoAlphaPred * 256];
258
259
for (j = 0; j < kHistoTotal; ++j) {
260
entropy_comp[j] = VP8LBitsEntropy(&histo[j * 256], 256);
261
}
262
entropy[kDirect] = entropy_comp[kHistoAlpha] +
263
entropy_comp[kHistoRed] +
264
entropy_comp[kHistoGreen] +
265
entropy_comp[kHistoBlue];
266
entropy[kSpatial] = entropy_comp[kHistoAlphaPred] +
267
entropy_comp[kHistoRedPred] +
268
entropy_comp[kHistoGreenPred] +
269
entropy_comp[kHistoBluePred];
270
entropy[kSubGreen] = entropy_comp[kHistoAlpha] +
271
entropy_comp[kHistoRedSubGreen] +
272
entropy_comp[kHistoGreen] +
273
entropy_comp[kHistoBlueSubGreen];
274
entropy[kSpatialSubGreen] = entropy_comp[kHistoAlphaPred] +
275
entropy_comp[kHistoRedPredSubGreen] +
276
entropy_comp[kHistoGreenPred] +
277
entropy_comp[kHistoBluePredSubGreen];
278
entropy[kPalette] = entropy_comp[kHistoPalette];
279
280
// When including transforms, there is an overhead in bits from
281
// storing them. This overhead is small but matters for small images.
282
// For spatial, there are 14 transformations.
283
entropy[kSpatial] += VP8LSubSampleSize(width, transform_bits) *
284
VP8LSubSampleSize(height, transform_bits) *
285
VP8LFastLog2(14);
286
// For color transforms: 24 as only 3 channels are considered in a
287
// ColorTransformElement.
288
entropy[kSpatialSubGreen] += VP8LSubSampleSize(width, transform_bits) *
289
VP8LSubSampleSize(height, transform_bits) *
290
VP8LFastLog2(24);
291
// For palettes, add the cost of storing the palette.
292
// We empirically estimate the cost of a compressed entry as 8 bits.
293
// The palette is differential-coded when compressed hence a much
294
// lower cost than sizeof(uint32_t)*8.
295
entropy[kPalette] += palette_size * 8;
296
297
*min_entropy_ix = kDirect;
298
for (k = kDirect + 1; k <= last_mode_to_analyze; ++k) {
299
if (entropy[*min_entropy_ix] > entropy[k]) {
300
*min_entropy_ix = (EntropyIx)k;
301
}
302
}
303
assert((int)*min_entropy_ix <= last_mode_to_analyze);
304
*red_and_blue_always_zero = 1;
305
// Let's check if the histogram of the chosen entropy mode has
306
// non-zero red and blue values. If all are zero, we can later skip
307
// the cross color optimization.
308
{
309
static const uint8_t kHistoPairs[5][2] = {
310
{ kHistoRed, kHistoBlue },
311
{ kHistoRedPred, kHistoBluePred },
312
{ kHistoRedSubGreen, kHistoBlueSubGreen },
313
{ kHistoRedPredSubGreen, kHistoBluePredSubGreen },
314
{ kHistoRed, kHistoBlue }
315
};
316
const uint32_t* const red_histo =
317
&histo[256 * kHistoPairs[*min_entropy_ix][0]];
318
const uint32_t* const blue_histo =
319
&histo[256 * kHistoPairs[*min_entropy_ix][1]];
320
for (i = 1; i < 256; ++i) {
321
if ((red_histo[i] | blue_histo[i]) != 0) {
322
*red_and_blue_always_zero = 0;
323
break;
324
}
325
}
326
}
327
}
328
WebPSafeFree(histo);
329
return 1;
330
} else {
331
return 0;
332
}
333
}
334
335
static int GetHistoBits(int method, int use_palette, int width, int height) {
336
// Make tile size a function of encoding method (Range: 0 to 6).
337
int histo_bits = (use_palette ? 9 : 7) - method;
338
while (1) {
339
const int huff_image_size = VP8LSubSampleSize(width, histo_bits) *
340
VP8LSubSampleSize(height, histo_bits);
341
if (huff_image_size <= MAX_HUFF_IMAGE_SIZE) break;
342
++histo_bits;
343
}
344
return (histo_bits < MIN_HUFFMAN_BITS) ? MIN_HUFFMAN_BITS :
345
(histo_bits > MAX_HUFFMAN_BITS) ? MAX_HUFFMAN_BITS : histo_bits;
346
}
347
348
static int GetTransformBits(int method, int histo_bits) {
349
const int max_transform_bits = (method < 4) ? 6 : (method > 4) ? 4 : 5;
350
const int res =
351
(histo_bits > max_transform_bits) ? max_transform_bits : histo_bits;
352
assert(res <= MAX_TRANSFORM_BITS);
353
return res;
354
}
355
356
// Set of parameters to be used in each iteration of the cruncher.
357
#define CRUNCH_CONFIGS_LZ77_MAX 2
358
typedef struct {
359
int entropy_idx_;
360
int lz77s_types_to_try_[CRUNCH_CONFIGS_LZ77_MAX];
361
int lz77s_types_to_try_size_;
362
} CrunchConfig;
363
364
#define CRUNCH_CONFIGS_MAX kNumEntropyIx
365
366
static int EncoderAnalyze(VP8LEncoder* const enc,
367
CrunchConfig crunch_configs[CRUNCH_CONFIGS_MAX],
368
int* const crunch_configs_size,
369
int* const red_and_blue_always_zero) {
370
const WebPPicture* const pic = enc->pic_;
371
const int width = pic->width;
372
const int height = pic->height;
373
const WebPConfig* const config = enc->config_;
374
const int method = config->method;
375
const int low_effort = (config->method == 0);
376
int i;
377
int use_palette;
378
int n_lz77s;
379
assert(pic != NULL && pic->argb != NULL);
380
381
use_palette =
382
AnalyzeAndCreatePalette(pic, low_effort,
383
enc->palette_, &enc->palette_size_);
384
385
// Empirical bit sizes.
386
enc->histo_bits_ = GetHistoBits(method, use_palette,
387
pic->width, pic->height);
388
enc->transform_bits_ = GetTransformBits(method, enc->histo_bits_);
389
390
if (low_effort) {
391
// AnalyzeEntropy is somewhat slow.
392
crunch_configs[0].entropy_idx_ = use_palette ? kPalette : kSpatialSubGreen;
393
n_lz77s = 1;
394
*crunch_configs_size = 1;
395
} else {
396
EntropyIx min_entropy_ix;
397
// Try out multiple LZ77 on images with few colors.
398
n_lz77s = (enc->palette_size_ > 0 && enc->palette_size_ <= 16) ? 2 : 1;
399
if (!AnalyzeEntropy(pic->argb, width, height, pic->argb_stride, use_palette,
400
enc->palette_size_, enc->transform_bits_,
401
&min_entropy_ix, red_and_blue_always_zero)) {
402
return 0;
403
}
404
if (method == 6 && config->quality == 100) {
405
// Go brute force on all transforms.
406
*crunch_configs_size = 0;
407
for (i = 0; i < kNumEntropyIx; ++i) {
408
if (i != kPalette || use_palette) {
409
assert(*crunch_configs_size < CRUNCH_CONFIGS_MAX);
410
crunch_configs[(*crunch_configs_size)++].entropy_idx_ = i;
411
}
412
}
413
} else {
414
// Only choose the guessed best transform.
415
*crunch_configs_size = 1;
416
crunch_configs[0].entropy_idx_ = min_entropy_ix;
417
}
418
}
419
// Fill in the different LZ77s.
420
assert(n_lz77s <= CRUNCH_CONFIGS_LZ77_MAX);
421
for (i = 0; i < *crunch_configs_size; ++i) {
422
int j;
423
for (j = 0; j < n_lz77s; ++j) {
424
crunch_configs[i].lz77s_types_to_try_[j] =
425
(j == 0) ? kLZ77Standard | kLZ77RLE : kLZ77Box;
426
}
427
crunch_configs[i].lz77s_types_to_try_size_ = n_lz77s;
428
}
429
return 1;
430
}
431
432
static int EncoderInit(VP8LEncoder* const enc) {
433
const WebPPicture* const pic = enc->pic_;
434
const int width = pic->width;
435
const int height = pic->height;
436
const int pix_cnt = width * height;
437
// we round the block size up, so we're guaranteed to have
438
// at most MAX_REFS_BLOCK_PER_IMAGE blocks used:
439
const int refs_block_size = (pix_cnt - 1) / MAX_REFS_BLOCK_PER_IMAGE + 1;
440
int i;
441
if (!VP8LHashChainInit(&enc->hash_chain_, pix_cnt)) return 0;
442
443
for (i = 0; i < 3; ++i) VP8LBackwardRefsInit(&enc->refs_[i], refs_block_size);
444
445
return 1;
446
}
447
448
// Returns false in case of memory error.
449
static int GetHuffBitLengthsAndCodes(
450
const VP8LHistogramSet* const histogram_image,
451
HuffmanTreeCode* const huffman_codes) {
452
int i, k;
453
int ok = 0;
454
uint64_t total_length_size = 0;
455
uint8_t* mem_buf = NULL;
456
const int histogram_image_size = histogram_image->size;
457
int max_num_symbols = 0;
458
uint8_t* buf_rle = NULL;
459
HuffmanTree* huff_tree = NULL;
460
461
// Iterate over all histograms and get the aggregate number of codes used.
462
for (i = 0; i < histogram_image_size; ++i) {
463
const VP8LHistogram* const histo = histogram_image->histograms[i];
464
HuffmanTreeCode* const codes = &huffman_codes[5 * i];
465
for (k = 0; k < 5; ++k) {
466
const int num_symbols =
467
(k == 0) ? VP8LHistogramNumCodes(histo->palette_code_bits_) :
468
(k == 4) ? NUM_DISTANCE_CODES : 256;
469
codes[k].num_symbols = num_symbols;
470
total_length_size += num_symbols;
471
}
472
}
473
474
// Allocate and Set Huffman codes.
475
{
476
uint16_t* codes;
477
uint8_t* lengths;
478
mem_buf = (uint8_t*)WebPSafeCalloc(total_length_size,
479
sizeof(*lengths) + sizeof(*codes));
480
if (mem_buf == NULL) goto End;
481
482
codes = (uint16_t*)mem_buf;
483
lengths = (uint8_t*)&codes[total_length_size];
484
for (i = 0; i < 5 * histogram_image_size; ++i) {
485
const int bit_length = huffman_codes[i].num_symbols;
486
huffman_codes[i].codes = codes;
487
huffman_codes[i].code_lengths = lengths;
488
codes += bit_length;
489
lengths += bit_length;
490
if (max_num_symbols < bit_length) {
491
max_num_symbols = bit_length;
492
}
493
}
494
}
495
496
buf_rle = (uint8_t*)WebPSafeMalloc(1ULL, max_num_symbols);
497
huff_tree = (HuffmanTree*)WebPSafeMalloc(3ULL * max_num_symbols,
498
sizeof(*huff_tree));
499
if (buf_rle == NULL || huff_tree == NULL) goto End;
500
501
// Create Huffman trees.
502
for (i = 0; i < histogram_image_size; ++i) {
503
HuffmanTreeCode* const codes = &huffman_codes[5 * i];
504
VP8LHistogram* const histo = histogram_image->histograms[i];
505
VP8LCreateHuffmanTree(histo->literal_, 15, buf_rle, huff_tree, codes + 0);
506
VP8LCreateHuffmanTree(histo->red_, 15, buf_rle, huff_tree, codes + 1);
507
VP8LCreateHuffmanTree(histo->blue_, 15, buf_rle, huff_tree, codes + 2);
508
VP8LCreateHuffmanTree(histo->alpha_, 15, buf_rle, huff_tree, codes + 3);
509
VP8LCreateHuffmanTree(histo->distance_, 15, buf_rle, huff_tree, codes + 4);
510
}
511
ok = 1;
512
End:
513
WebPSafeFree(huff_tree);
514
WebPSafeFree(buf_rle);
515
if (!ok) {
516
WebPSafeFree(mem_buf);
517
memset(huffman_codes, 0, 5 * histogram_image_size * sizeof(*huffman_codes));
518
}
519
return ok;
520
}
521
522
static void StoreHuffmanTreeOfHuffmanTreeToBitMask(
523
VP8LBitWriter* const bw, const uint8_t* code_length_bitdepth) {
524
// RFC 1951 will calm you down if you are worried about this funny sequence.
525
// This sequence is tuned from that, but more weighted for lower symbol count,
526
// and more spiking histograms.
527
static const uint8_t kStorageOrder[CODE_LENGTH_CODES] = {
528
17, 18, 0, 1, 2, 3, 4, 5, 16, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15
529
};
530
int i;
531
// Throw away trailing zeros:
532
int codes_to_store = CODE_LENGTH_CODES;
533
for (; codes_to_store > 4; --codes_to_store) {
534
if (code_length_bitdepth[kStorageOrder[codes_to_store - 1]] != 0) {
535
break;
536
}
537
}
538
VP8LPutBits(bw, codes_to_store - 4, 4);
539
for (i = 0; i < codes_to_store; ++i) {
540
VP8LPutBits(bw, code_length_bitdepth[kStorageOrder[i]], 3);
541
}
542
}
543
544
static void ClearHuffmanTreeIfOnlyOneSymbol(
545
HuffmanTreeCode* const huffman_code) {
546
int k;
547
int count = 0;
548
for (k = 0; k < huffman_code->num_symbols; ++k) {
549
if (huffman_code->code_lengths[k] != 0) {
550
++count;
551
if (count > 1) return;
552
}
553
}
554
for (k = 0; k < huffman_code->num_symbols; ++k) {
555
huffman_code->code_lengths[k] = 0;
556
huffman_code->codes[k] = 0;
557
}
558
}
559
560
static void StoreHuffmanTreeToBitMask(
561
VP8LBitWriter* const bw,
562
const HuffmanTreeToken* const tokens, const int num_tokens,
563
const HuffmanTreeCode* const huffman_code) {
564
int i;
565
for (i = 0; i < num_tokens; ++i) {
566
const int ix = tokens[i].code;
567
const int extra_bits = tokens[i].extra_bits;
568
VP8LPutBits(bw, huffman_code->codes[ix], huffman_code->code_lengths[ix]);
569
switch (ix) {
570
case 16:
571
VP8LPutBits(bw, extra_bits, 2);
572
break;
573
case 17:
574
VP8LPutBits(bw, extra_bits, 3);
575
break;
576
case 18:
577
VP8LPutBits(bw, extra_bits, 7);
578
break;
579
}
580
}
581
}
582
583
// 'huff_tree' and 'tokens' are pre-alloacted buffers.
584
static void StoreFullHuffmanCode(VP8LBitWriter* const bw,
585
HuffmanTree* const huff_tree,
586
HuffmanTreeToken* const tokens,
587
const HuffmanTreeCode* const tree) {
588
uint8_t code_length_bitdepth[CODE_LENGTH_CODES] = { 0 };
589
uint16_t code_length_bitdepth_symbols[CODE_LENGTH_CODES] = { 0 };
590
const int max_tokens = tree->num_symbols;
591
int num_tokens;
592
HuffmanTreeCode huffman_code;
593
huffman_code.num_symbols = CODE_LENGTH_CODES;
594
huffman_code.code_lengths = code_length_bitdepth;
595
huffman_code.codes = code_length_bitdepth_symbols;
596
597
VP8LPutBits(bw, 0, 1);
598
num_tokens = VP8LCreateCompressedHuffmanTree(tree, tokens, max_tokens);
599
{
600
uint32_t histogram[CODE_LENGTH_CODES] = { 0 };
601
uint8_t buf_rle[CODE_LENGTH_CODES] = { 0 };
602
int i;
603
for (i = 0; i < num_tokens; ++i) {
604
++histogram[tokens[i].code];
605
}
606
607
VP8LCreateHuffmanTree(histogram, 7, buf_rle, huff_tree, &huffman_code);
608
}
609
610
StoreHuffmanTreeOfHuffmanTreeToBitMask(bw, code_length_bitdepth);
611
ClearHuffmanTreeIfOnlyOneSymbol(&huffman_code);
612
{
613
int trailing_zero_bits = 0;
614
int trimmed_length = num_tokens;
615
int write_trimmed_length;
616
int length;
617
int i = num_tokens;
618
while (i-- > 0) {
619
const int ix = tokens[i].code;
620
if (ix == 0 || ix == 17 || ix == 18) {
621
--trimmed_length; // discount trailing zeros
622
trailing_zero_bits += code_length_bitdepth[ix];
623
if (ix == 17) {
624
trailing_zero_bits += 3;
625
} else if (ix == 18) {
626
trailing_zero_bits += 7;
627
}
628
} else {
629
break;
630
}
631
}
632
write_trimmed_length = (trimmed_length > 1 && trailing_zero_bits > 12);
633
length = write_trimmed_length ? trimmed_length : num_tokens;
634
VP8LPutBits(bw, write_trimmed_length, 1);
635
if (write_trimmed_length) {
636
if (trimmed_length == 2) {
637
VP8LPutBits(bw, 0, 3 + 2); // nbitpairs=1, trimmed_length=2
638
} else {
639
const int nbits = BitsLog2Floor(trimmed_length - 2);
640
const int nbitpairs = nbits / 2 + 1;
641
assert(trimmed_length > 2);
642
assert(nbitpairs - 1 < 8);
643
VP8LPutBits(bw, nbitpairs - 1, 3);
644
VP8LPutBits(bw, trimmed_length - 2, nbitpairs * 2);
645
}
646
}
647
StoreHuffmanTreeToBitMask(bw, tokens, length, &huffman_code);
648
}
649
}
650
651
// 'huff_tree' and 'tokens' are pre-alloacted buffers.
652
static void StoreHuffmanCode(VP8LBitWriter* const bw,
653
HuffmanTree* const huff_tree,
654
HuffmanTreeToken* const tokens,
655
const HuffmanTreeCode* const huffman_code) {
656
int i;
657
int count = 0;
658
int symbols[2] = { 0, 0 };
659
const int kMaxBits = 8;
660
const int kMaxSymbol = 1 << kMaxBits;
661
662
// Check whether it's a small tree.
663
for (i = 0; i < huffman_code->num_symbols && count < 3; ++i) {
664
if (huffman_code->code_lengths[i] != 0) {
665
if (count < 2) symbols[count] = i;
666
++count;
667
}
668
}
669
670
if (count == 0) { // emit minimal tree for empty cases
671
// bits: small tree marker: 1, count-1: 0, large 8-bit code: 0, code: 0
672
VP8LPutBits(bw, 0x01, 4);
673
} else if (count <= 2 && symbols[0] < kMaxSymbol && symbols[1] < kMaxSymbol) {
674
VP8LPutBits(bw, 1, 1); // Small tree marker to encode 1 or 2 symbols.
675
VP8LPutBits(bw, count - 1, 1);
676
if (symbols[0] <= 1) {
677
VP8LPutBits(bw, 0, 1); // Code bit for small (1 bit) symbol value.
678
VP8LPutBits(bw, symbols[0], 1);
679
} else {
680
VP8LPutBits(bw, 1, 1);
681
VP8LPutBits(bw, symbols[0], 8);
682
}
683
if (count == 2) {
684
VP8LPutBits(bw, symbols[1], 8);
685
}
686
} else {
687
StoreFullHuffmanCode(bw, huff_tree, tokens, huffman_code);
688
}
689
}
690
691
static WEBP_INLINE void WriteHuffmanCode(VP8LBitWriter* const bw,
692
const HuffmanTreeCode* const code,
693
int code_index) {
694
const int depth = code->code_lengths[code_index];
695
const int symbol = code->codes[code_index];
696
VP8LPutBits(bw, symbol, depth);
697
}
698
699
static WEBP_INLINE void WriteHuffmanCodeWithExtraBits(
700
VP8LBitWriter* const bw,
701
const HuffmanTreeCode* const code,
702
int code_index,
703
int bits,
704
int n_bits) {
705
const int depth = code->code_lengths[code_index];
706
const int symbol = code->codes[code_index];
707
VP8LPutBits(bw, (bits << depth) | symbol, depth + n_bits);
708
}
709
710
static WebPEncodingError StoreImageToBitMask(
711
VP8LBitWriter* const bw, int width, int histo_bits,
712
const VP8LBackwardRefs* const refs,
713
const uint16_t* histogram_symbols,
714
const HuffmanTreeCode* const huffman_codes) {
715
const int histo_xsize = histo_bits ? VP8LSubSampleSize(width, histo_bits) : 1;
716
const int tile_mask = (histo_bits == 0) ? 0 : -(1 << histo_bits);
717
// x and y trace the position in the image.
718
int x = 0;
719
int y = 0;
720
int tile_x = x & tile_mask;
721
int tile_y = y & tile_mask;
722
int histogram_ix = histogram_symbols[0];
723
const HuffmanTreeCode* codes = huffman_codes + 5 * histogram_ix;
724
VP8LRefsCursor c = VP8LRefsCursorInit(refs);
725
while (VP8LRefsCursorOk(&c)) {
726
const PixOrCopy* const v = c.cur_pos;
727
if ((tile_x != (x & tile_mask)) || (tile_y != (y & tile_mask))) {
728
tile_x = x & tile_mask;
729
tile_y = y & tile_mask;
730
histogram_ix = histogram_symbols[(y >> histo_bits) * histo_xsize +
731
(x >> histo_bits)];
732
codes = huffman_codes + 5 * histogram_ix;
733
}
734
if (PixOrCopyIsLiteral(v)) {
735
static const uint8_t order[] = { 1, 2, 0, 3 };
736
int k;
737
for (k = 0; k < 4; ++k) {
738
const int code = PixOrCopyLiteral(v, order[k]);
739
WriteHuffmanCode(bw, codes + k, code);
740
}
741
} else if (PixOrCopyIsCacheIdx(v)) {
742
const int code = PixOrCopyCacheIdx(v);
743
const int literal_ix = 256 + NUM_LENGTH_CODES + code;
744
WriteHuffmanCode(bw, codes, literal_ix);
745
} else {
746
int bits, n_bits;
747
int code;
748
749
const int distance = PixOrCopyDistance(v);
750
VP8LPrefixEncode(v->len, &code, &n_bits, &bits);
751
WriteHuffmanCodeWithExtraBits(bw, codes, 256 + code, bits, n_bits);
752
753
// Don't write the distance with the extra bits code since
754
// the distance can be up to 18 bits of extra bits, and the prefix
755
// 15 bits, totaling to 33, and our PutBits only supports up to 32 bits.
756
VP8LPrefixEncode(distance, &code, &n_bits, &bits);
757
WriteHuffmanCode(bw, codes + 4, code);
758
VP8LPutBits(bw, bits, n_bits);
759
}
760
x += PixOrCopyLength(v);
761
while (x >= width) {
762
x -= width;
763
++y;
764
}
765
VP8LRefsCursorNext(&c);
766
}
767
return bw->error_ ? VP8_ENC_ERROR_OUT_OF_MEMORY : VP8_ENC_OK;
768
}
769
770
// Special case of EncodeImageInternal() for cache-bits=0, histo_bits=31
771
static WebPEncodingError EncodeImageNoHuffman(VP8LBitWriter* const bw,
772
const uint32_t* const argb,
773
VP8LHashChain* const hash_chain,
774
VP8LBackwardRefs* const refs_tmp1,
775
VP8LBackwardRefs* const refs_tmp2,
776
int width, int height,
777
int quality, int low_effort) {
778
int i;
779
int max_tokens = 0;
780
WebPEncodingError err = VP8_ENC_OK;
781
VP8LBackwardRefs* refs;
782
HuffmanTreeToken* tokens = NULL;
783
HuffmanTreeCode huffman_codes[5] = { { 0, NULL, NULL } };
784
const uint16_t histogram_symbols[1] = { 0 }; // only one tree, one symbol
785
int cache_bits = 0;
786
VP8LHistogramSet* histogram_image = NULL;
787
HuffmanTree* const huff_tree = (HuffmanTree*)WebPSafeMalloc(
788
3ULL * CODE_LENGTH_CODES, sizeof(*huff_tree));
789
if (huff_tree == NULL) {
790
err = VP8_ENC_ERROR_OUT_OF_MEMORY;
791
goto Error;
792
}
793
794
// Calculate backward references from ARGB image.
795
if (!VP8LHashChainFill(hash_chain, quality, argb, width, height,
796
low_effort)) {
797
err = VP8_ENC_ERROR_OUT_OF_MEMORY;
798
goto Error;
799
}
800
refs = VP8LGetBackwardReferences(width, height, argb, quality, 0,
801
kLZ77Standard | kLZ77RLE, &cache_bits,
802
hash_chain, refs_tmp1, refs_tmp2);
803
if (refs == NULL) {
804
err = VP8_ENC_ERROR_OUT_OF_MEMORY;
805
goto Error;
806
}
807
histogram_image = VP8LAllocateHistogramSet(1, cache_bits);
808
if (histogram_image == NULL) {
809
err = VP8_ENC_ERROR_OUT_OF_MEMORY;
810
goto Error;
811
}
812
813
// Build histogram image and symbols from backward references.
814
VP8LHistogramStoreRefs(refs, histogram_image->histograms[0]);
815
816
// Create Huffman bit lengths and codes for each histogram image.
817
assert(histogram_image->size == 1);
818
if (!GetHuffBitLengthsAndCodes(histogram_image, huffman_codes)) {
819
err = VP8_ENC_ERROR_OUT_OF_MEMORY;
820
goto Error;
821
}
822
823
// No color cache, no Huffman image.
824
VP8LPutBits(bw, 0, 1);
825
826
// Find maximum number of symbols for the huffman tree-set.
827
for (i = 0; i < 5; ++i) {
828
HuffmanTreeCode* const codes = &huffman_codes[i];
829
if (max_tokens < codes->num_symbols) {
830
max_tokens = codes->num_symbols;
831
}
832
}
833
834
tokens = (HuffmanTreeToken*)WebPSafeMalloc(max_tokens, sizeof(*tokens));
835
if (tokens == NULL) {
836
err = VP8_ENC_ERROR_OUT_OF_MEMORY;
837
goto Error;
838
}
839
840
// Store Huffman codes.
841
for (i = 0; i < 5; ++i) {
842
HuffmanTreeCode* const codes = &huffman_codes[i];
843
StoreHuffmanCode(bw, huff_tree, tokens, codes);
844
ClearHuffmanTreeIfOnlyOneSymbol(codes);
845
}
846
847
// Store actual literals.
848
err = StoreImageToBitMask(bw, width, 0, refs, histogram_symbols,
849
huffman_codes);
850
851
Error:
852
WebPSafeFree(tokens);
853
WebPSafeFree(huff_tree);
854
VP8LFreeHistogramSet(histogram_image);
855
WebPSafeFree(huffman_codes[0].codes);
856
return err;
857
}
858
859
static WebPEncodingError EncodeImageInternal(
860
VP8LBitWriter* const bw, const uint32_t* const argb,
861
VP8LHashChain* const hash_chain, VP8LBackwardRefs refs_array[3], int width,
862
int height, int quality, int low_effort, int use_cache,
863
const CrunchConfig* const config, int* cache_bits, int histogram_bits,
864
size_t init_byte_position, int* const hdr_size, int* const data_size) {
865
WebPEncodingError err = VP8_ENC_OK;
866
const uint32_t histogram_image_xysize =
867
VP8LSubSampleSize(width, histogram_bits) *
868
VP8LSubSampleSize(height, histogram_bits);
869
VP8LHistogramSet* histogram_image = NULL;
870
VP8LHistogram* tmp_histo = NULL;
871
int histogram_image_size = 0;
872
size_t bit_array_size = 0;
873
HuffmanTree* const huff_tree = (HuffmanTree*)WebPSafeMalloc(
874
3ULL * CODE_LENGTH_CODES, sizeof(*huff_tree));
875
HuffmanTreeToken* tokens = NULL;
876
HuffmanTreeCode* huffman_codes = NULL;
877
VP8LBackwardRefs* refs_best;
878
VP8LBackwardRefs* refs_tmp;
879
uint16_t* const histogram_symbols =
880
(uint16_t*)WebPSafeMalloc(histogram_image_xysize,
881
sizeof(*histogram_symbols));
882
int lz77s_idx;
883
VP8LBitWriter bw_init = *bw, bw_best;
884
int hdr_size_tmp;
885
assert(histogram_bits >= MIN_HUFFMAN_BITS);
886
assert(histogram_bits <= MAX_HUFFMAN_BITS);
887
assert(hdr_size != NULL);
888
assert(data_size != NULL);
889
890
if (histogram_symbols == NULL) {
891
err = VP8_ENC_ERROR_OUT_OF_MEMORY;
892
goto Error;
893
}
894
895
if (use_cache) {
896
// If the value is different from zero, it has been set during the
897
// palette analysis.
898
if (*cache_bits == 0) *cache_bits = MAX_COLOR_CACHE_BITS;
899
} else {
900
*cache_bits = 0;
901
}
902
// 'best_refs' is the reference to the best backward refs and points to one
903
// of refs_array[0] or refs_array[1].
904
// Calculate backward references from ARGB image.
905
if (huff_tree == NULL ||
906
!VP8LHashChainFill(hash_chain, quality, argb, width, height,
907
low_effort) ||
908
!VP8LBitWriterInit(&bw_best, 0) ||
909
(config->lz77s_types_to_try_size_ > 1 &&
910
!VP8LBitWriterClone(bw, &bw_best))) {
911
err = VP8_ENC_ERROR_OUT_OF_MEMORY;
912
goto Error;
913
}
914
for (lz77s_idx = 0; lz77s_idx < config->lz77s_types_to_try_size_;
915
++lz77s_idx) {
916
refs_best = VP8LGetBackwardReferences(
917
width, height, argb, quality, low_effort,
918
config->lz77s_types_to_try_[lz77s_idx], cache_bits, hash_chain,
919
&refs_array[0], &refs_array[1]);
920
if (refs_best == NULL) {
921
err = VP8_ENC_ERROR_OUT_OF_MEMORY;
922
goto Error;
923
}
924
// Keep the best references aside and use the other element from the first
925
// two as a temporary for later usage.
926
refs_tmp = &refs_array[refs_best == &refs_array[0] ? 1 : 0];
927
928
histogram_image =
929
VP8LAllocateHistogramSet(histogram_image_xysize, *cache_bits);
930
tmp_histo = VP8LAllocateHistogram(*cache_bits);
931
if (histogram_image == NULL || tmp_histo == NULL) {
932
err = VP8_ENC_ERROR_OUT_OF_MEMORY;
933
goto Error;
934
}
935
936
// Build histogram image and symbols from backward references.
937
if (!VP8LGetHistoImageSymbols(width, height, refs_best, quality, low_effort,
938
histogram_bits, *cache_bits, histogram_image,
939
tmp_histo, histogram_symbols)) {
940
err = VP8_ENC_ERROR_OUT_OF_MEMORY;
941
goto Error;
942
}
943
// Create Huffman bit lengths and codes for each histogram image.
944
histogram_image_size = histogram_image->size;
945
bit_array_size = 5 * histogram_image_size;
946
huffman_codes = (HuffmanTreeCode*)WebPSafeCalloc(bit_array_size,
947
sizeof(*huffman_codes));
948
// Note: some histogram_image entries may point to tmp_histos[], so the
949
// latter need to outlive the following call to GetHuffBitLengthsAndCodes().
950
if (huffman_codes == NULL ||
951
!GetHuffBitLengthsAndCodes(histogram_image, huffman_codes)) {
952
err = VP8_ENC_ERROR_OUT_OF_MEMORY;
953
goto Error;
954
}
955
// Free combined histograms.
956
VP8LFreeHistogramSet(histogram_image);
957
histogram_image = NULL;
958
959
// Free scratch histograms.
960
VP8LFreeHistogram(tmp_histo);
961
tmp_histo = NULL;
962
963
// Color Cache parameters.
964
if (*cache_bits > 0) {
965
VP8LPutBits(bw, 1, 1);
966
VP8LPutBits(bw, *cache_bits, 4);
967
} else {
968
VP8LPutBits(bw, 0, 1);
969
}
970
971
// Huffman image + meta huffman.
972
{
973
const int write_histogram_image = (histogram_image_size > 1);
974
VP8LPutBits(bw, write_histogram_image, 1);
975
if (write_histogram_image) {
976
uint32_t* const histogram_argb =
977
(uint32_t*)WebPSafeMalloc(histogram_image_xysize,
978
sizeof(*histogram_argb));
979
int max_index = 0;
980
uint32_t i;
981
if (histogram_argb == NULL) {
982
err = VP8_ENC_ERROR_OUT_OF_MEMORY;
983
goto Error;
984
}
985
for (i = 0; i < histogram_image_xysize; ++i) {
986
const int symbol_index = histogram_symbols[i] & 0xffff;
987
histogram_argb[i] = (symbol_index << 8);
988
if (symbol_index >= max_index) {
989
max_index = symbol_index + 1;
990
}
991
}
992
histogram_image_size = max_index;
993
994
VP8LPutBits(bw, histogram_bits - 2, 3);
995
err = EncodeImageNoHuffman(
996
bw, histogram_argb, hash_chain, refs_tmp, &refs_array[2],
997
VP8LSubSampleSize(width, histogram_bits),
998
VP8LSubSampleSize(height, histogram_bits), quality, low_effort);
999
WebPSafeFree(histogram_argb);
1000
if (err != VP8_ENC_OK) goto Error;
1001
}
1002
}
1003
1004
// Store Huffman codes.
1005
{
1006
int i;
1007
int max_tokens = 0;
1008
// Find maximum number of symbols for the huffman tree-set.
1009
for (i = 0; i < 5 * histogram_image_size; ++i) {
1010
HuffmanTreeCode* const codes = &huffman_codes[i];
1011
if (max_tokens < codes->num_symbols) {
1012
max_tokens = codes->num_symbols;
1013
}
1014
}
1015
tokens = (HuffmanTreeToken*)WebPSafeMalloc(max_tokens, sizeof(*tokens));
1016
if (tokens == NULL) {
1017
err = VP8_ENC_ERROR_OUT_OF_MEMORY;
1018
goto Error;
1019
}
1020
for (i = 0; i < 5 * histogram_image_size; ++i) {
1021
HuffmanTreeCode* const codes = &huffman_codes[i];
1022
StoreHuffmanCode(bw, huff_tree, tokens, codes);
1023
ClearHuffmanTreeIfOnlyOneSymbol(codes);
1024
}
1025
}
1026
// Store actual literals.
1027
hdr_size_tmp = (int)(VP8LBitWriterNumBytes(bw) - init_byte_position);
1028
err = StoreImageToBitMask(bw, width, histogram_bits, refs_best,
1029
histogram_symbols, huffman_codes);
1030
// Keep track of the smallest image so far.
1031
if (lz77s_idx == 0 ||
1032
VP8LBitWriterNumBytes(bw) < VP8LBitWriterNumBytes(&bw_best)) {
1033
*hdr_size = hdr_size_tmp;
1034
*data_size =
1035
(int)(VP8LBitWriterNumBytes(bw) - init_byte_position - *hdr_size);
1036
VP8LBitWriterSwap(bw, &bw_best);
1037
}
1038
// Reset the bit writer for the following iteration if any.
1039
if (config->lz77s_types_to_try_size_ > 1) VP8LBitWriterReset(&bw_init, bw);
1040
WebPSafeFree(tokens);
1041
tokens = NULL;
1042
if (huffman_codes != NULL) {
1043
WebPSafeFree(huffman_codes->codes);
1044
WebPSafeFree(huffman_codes);
1045
huffman_codes = NULL;
1046
}
1047
}
1048
VP8LBitWriterSwap(bw, &bw_best);
1049
1050
Error:
1051
WebPSafeFree(tokens);
1052
WebPSafeFree(huff_tree);
1053
VP8LFreeHistogramSet(histogram_image);
1054
VP8LFreeHistogram(tmp_histo);
1055
if (huffman_codes != NULL) {
1056
WebPSafeFree(huffman_codes->codes);
1057
WebPSafeFree(huffman_codes);
1058
}
1059
WebPSafeFree(histogram_symbols);
1060
VP8LBitWriterWipeOut(&bw_best);
1061
return err;
1062
}
1063
1064
// -----------------------------------------------------------------------------
1065
// Transforms
1066
1067
static void ApplySubtractGreen(VP8LEncoder* const enc, int width, int height,
1068
VP8LBitWriter* const bw) {
1069
VP8LPutBits(bw, TRANSFORM_PRESENT, 1);
1070
VP8LPutBits(bw, SUBTRACT_GREEN, 2);
1071
VP8LSubtractGreenFromBlueAndRed(enc->argb_, width * height);
1072
}
1073
1074
static WebPEncodingError ApplyPredictFilter(const VP8LEncoder* const enc,
1075
int width, int height,
1076
int quality, int low_effort,
1077
int used_subtract_green,
1078
VP8LBitWriter* const bw) {
1079
const int pred_bits = enc->transform_bits_;
1080
const int transform_width = VP8LSubSampleSize(width, pred_bits);
1081
const int transform_height = VP8LSubSampleSize(height, pred_bits);
1082
// we disable near-lossless quantization if palette is used.
1083
const int near_lossless_strength = enc->use_palette_ ? 100
1084
: enc->config_->near_lossless;
1085
1086
VP8LResidualImage(width, height, pred_bits, low_effort, enc->argb_,
1087
enc->argb_scratch_, enc->transform_data_,
1088
near_lossless_strength, enc->config_->exact,
1089
used_subtract_green);
1090
VP8LPutBits(bw, TRANSFORM_PRESENT, 1);
1091
VP8LPutBits(bw, PREDICTOR_TRANSFORM, 2);
1092
assert(pred_bits >= 2);
1093
VP8LPutBits(bw, pred_bits - 2, 3);
1094
return EncodeImageNoHuffman(
1095
bw, enc->transform_data_, (VP8LHashChain*)&enc->hash_chain_,
1096
(VP8LBackwardRefs*)&enc->refs_[0], // cast const away
1097
(VP8LBackwardRefs*)&enc->refs_[1], transform_width, transform_height,
1098
quality, low_effort);
1099
}
1100
1101
static WebPEncodingError ApplyCrossColorFilter(const VP8LEncoder* const enc,
1102
int width, int height,
1103
int quality, int low_effort,
1104
VP8LBitWriter* const bw) {
1105
const int ccolor_transform_bits = enc->transform_bits_;
1106
const int transform_width = VP8LSubSampleSize(width, ccolor_transform_bits);
1107
const int transform_height = VP8LSubSampleSize(height, ccolor_transform_bits);
1108
1109
VP8LColorSpaceTransform(width, height, ccolor_transform_bits, quality,
1110
enc->argb_, enc->transform_data_);
1111
VP8LPutBits(bw, TRANSFORM_PRESENT, 1);
1112
VP8LPutBits(bw, CROSS_COLOR_TRANSFORM, 2);
1113
assert(ccolor_transform_bits >= 2);
1114
VP8LPutBits(bw, ccolor_transform_bits - 2, 3);
1115
return EncodeImageNoHuffman(
1116
bw, enc->transform_data_, (VP8LHashChain*)&enc->hash_chain_,
1117
(VP8LBackwardRefs*)&enc->refs_[0], // cast const away
1118
(VP8LBackwardRefs*)&enc->refs_[1], transform_width, transform_height,
1119
quality, low_effort);
1120
}
1121
1122
// -----------------------------------------------------------------------------
1123
1124
static WebPEncodingError WriteRiffHeader(const WebPPicture* const pic,
1125
size_t riff_size, size_t vp8l_size) {
1126
uint8_t riff[RIFF_HEADER_SIZE + CHUNK_HEADER_SIZE + VP8L_SIGNATURE_SIZE] = {
1127
'R', 'I', 'F', 'F', 0, 0, 0, 0, 'W', 'E', 'B', 'P',
1128
'V', 'P', '8', 'L', 0, 0, 0, 0, VP8L_MAGIC_BYTE,
1129
};
1130
PutLE32(riff + TAG_SIZE, (uint32_t)riff_size);
1131
PutLE32(riff + RIFF_HEADER_SIZE + TAG_SIZE, (uint32_t)vp8l_size);
1132
if (!pic->writer(riff, sizeof(riff), pic)) {
1133
return VP8_ENC_ERROR_BAD_WRITE;
1134
}
1135
return VP8_ENC_OK;
1136
}
1137
1138
static int WriteImageSize(const WebPPicture* const pic,
1139
VP8LBitWriter* const bw) {
1140
const int width = pic->width - 1;
1141
const int height = pic->height - 1;
1142
assert(width < WEBP_MAX_DIMENSION && height < WEBP_MAX_DIMENSION);
1143
1144
VP8LPutBits(bw, width, VP8L_IMAGE_SIZE_BITS);
1145
VP8LPutBits(bw, height, VP8L_IMAGE_SIZE_BITS);
1146
return !bw->error_;
1147
}
1148
1149
static int WriteRealAlphaAndVersion(VP8LBitWriter* const bw, int has_alpha) {
1150
VP8LPutBits(bw, has_alpha, 1);
1151
VP8LPutBits(bw, VP8L_VERSION, VP8L_VERSION_BITS);
1152
return !bw->error_;
1153
}
1154
1155
static WebPEncodingError WriteImage(const WebPPicture* const pic,
1156
VP8LBitWriter* const bw,
1157
size_t* const coded_size) {
1158
WebPEncodingError err = VP8_ENC_OK;
1159
const uint8_t* const webpll_data = VP8LBitWriterFinish(bw);
1160
const size_t webpll_size = VP8LBitWriterNumBytes(bw);
1161
const size_t vp8l_size = VP8L_SIGNATURE_SIZE + webpll_size;
1162
const size_t pad = vp8l_size & 1;
1163
const size_t riff_size = TAG_SIZE + CHUNK_HEADER_SIZE + vp8l_size + pad;
1164
1165
err = WriteRiffHeader(pic, riff_size, vp8l_size);
1166
if (err != VP8_ENC_OK) goto Error;
1167
1168
if (!pic->writer(webpll_data, webpll_size, pic)) {
1169
err = VP8_ENC_ERROR_BAD_WRITE;
1170
goto Error;
1171
}
1172
1173
if (pad) {
1174
const uint8_t pad_byte[1] = { 0 };
1175
if (!pic->writer(pad_byte, 1, pic)) {
1176
err = VP8_ENC_ERROR_BAD_WRITE;
1177
goto Error;
1178
}
1179
}
1180
*coded_size = CHUNK_HEADER_SIZE + riff_size;
1181
return VP8_ENC_OK;
1182
1183
Error:
1184
return err;
1185
}
1186
1187
// -----------------------------------------------------------------------------
1188
1189
static void ClearTransformBuffer(VP8LEncoder* const enc) {
1190
WebPSafeFree(enc->transform_mem_);
1191
enc->transform_mem_ = NULL;
1192
enc->transform_mem_size_ = 0;
1193
}
1194
1195
// Allocates the memory for argb (W x H) buffer, 2 rows of context for
1196
// prediction and transform data.
1197
// Flags influencing the memory allocated:
1198
// enc->transform_bits_
1199
// enc->use_predict_, enc->use_cross_color_
1200
static WebPEncodingError AllocateTransformBuffer(VP8LEncoder* const enc,
1201
int width, int height) {
1202
WebPEncodingError err = VP8_ENC_OK;
1203
const uint64_t image_size = width * height;
1204
// VP8LResidualImage needs room for 2 scanlines of uint32 pixels with an extra
1205
// pixel in each, plus 2 regular scanlines of bytes.
1206
// TODO(skal): Clean up by using arithmetic in bytes instead of words.
1207
const uint64_t argb_scratch_size =
1208
enc->use_predict_
1209
? (width + 1) * 2 +
1210
(width * 2 + sizeof(uint32_t) - 1) / sizeof(uint32_t)
1211
: 0;
1212
const uint64_t transform_data_size =
1213
(enc->use_predict_ || enc->use_cross_color_)
1214
? VP8LSubSampleSize(width, enc->transform_bits_) *
1215
VP8LSubSampleSize(height, enc->transform_bits_)
1216
: 0;
1217
const uint64_t max_alignment_in_words =
1218
(WEBP_ALIGN_CST + sizeof(uint32_t) - 1) / sizeof(uint32_t);
1219
const uint64_t mem_size =
1220
image_size + max_alignment_in_words +
1221
argb_scratch_size + max_alignment_in_words +
1222
transform_data_size;
1223
uint32_t* mem = enc->transform_mem_;
1224
if (mem == NULL || mem_size > enc->transform_mem_size_) {
1225
ClearTransformBuffer(enc);
1226
mem = (uint32_t*)WebPSafeMalloc(mem_size, sizeof(*mem));
1227
if (mem == NULL) {
1228
err = VP8_ENC_ERROR_OUT_OF_MEMORY;
1229
goto Error;
1230
}
1231
enc->transform_mem_ = mem;
1232
enc->transform_mem_size_ = (size_t)mem_size;
1233
enc->argb_content_ = kEncoderNone;
1234
}
1235
enc->argb_ = mem;
1236
mem = (uint32_t*)WEBP_ALIGN(mem + image_size);
1237
enc->argb_scratch_ = mem;
1238
mem = (uint32_t*)WEBP_ALIGN(mem + argb_scratch_size);
1239
enc->transform_data_ = mem;
1240
1241
enc->current_width_ = width;
1242
Error:
1243
return err;
1244
}
1245
1246
static WebPEncodingError MakeInputImageCopy(VP8LEncoder* const enc) {
1247
WebPEncodingError err = VP8_ENC_OK;
1248
const WebPPicture* const picture = enc->pic_;
1249
const int width = picture->width;
1250
const int height = picture->height;
1251
int y;
1252
err = AllocateTransformBuffer(enc, width, height);
1253
if (err != VP8_ENC_OK) return err;
1254
if (enc->argb_content_ == kEncoderARGB) return VP8_ENC_OK;
1255
for (y = 0; y < height; ++y) {
1256
memcpy(enc->argb_ + y * width,
1257
picture->argb + y * picture->argb_stride,
1258
width * sizeof(*enc->argb_));
1259
}
1260
enc->argb_content_ = kEncoderARGB;
1261
assert(enc->current_width_ == width);
1262
return VP8_ENC_OK;
1263
}
1264
1265
// -----------------------------------------------------------------------------
1266
1267
static WEBP_INLINE int SearchColorNoIdx(const uint32_t sorted[], uint32_t color,
1268
int hi) {
1269
int low = 0;
1270
if (sorted[low] == color) return low; // loop invariant: sorted[low] != color
1271
while (1) {
1272
const int mid = (low + hi) >> 1;
1273
if (sorted[mid] == color) {
1274
return mid;
1275
} else if (sorted[mid] < color) {
1276
low = mid;
1277
} else {
1278
hi = mid;
1279
}
1280
}
1281
}
1282
1283
#define APPLY_PALETTE_GREEDY_MAX 4
1284
1285
static WEBP_INLINE uint32_t SearchColorGreedy(const uint32_t palette[],
1286
int palette_size,
1287
uint32_t color) {
1288
(void)palette_size;
1289
assert(palette_size < APPLY_PALETTE_GREEDY_MAX);
1290
assert(3 == APPLY_PALETTE_GREEDY_MAX - 1);
1291
if (color == palette[0]) return 0;
1292
if (color == palette[1]) return 1;
1293
if (color == palette[2]) return 2;
1294
return 3;
1295
}
1296
1297
static WEBP_INLINE uint32_t ApplyPaletteHash0(uint32_t color) {
1298
// Focus on the green color.
1299
return (color >> 8) & 0xff;
1300
}
1301
1302
#define PALETTE_INV_SIZE_BITS 11
1303
#define PALETTE_INV_SIZE (1 << PALETTE_INV_SIZE_BITS)
1304
1305
static WEBP_INLINE uint32_t ApplyPaletteHash1(uint32_t color) {
1306
// Forget about alpha.
1307
return ((uint32_t)((color & 0x00ffffffu) * 4222244071ull)) >>
1308
(32 - PALETTE_INV_SIZE_BITS);
1309
}
1310
1311
static WEBP_INLINE uint32_t ApplyPaletteHash2(uint32_t color) {
1312
// Forget about alpha.
1313
return ((uint32_t)((color & 0x00ffffffu) * ((1ull << 31) - 1))) >>
1314
(32 - PALETTE_INV_SIZE_BITS);
1315
}
1316
1317
// Sort palette in increasing order and prepare an inverse mapping array.
1318
static void PrepareMapToPalette(const uint32_t palette[], int num_colors,
1319
uint32_t sorted[], uint32_t idx_map[]) {
1320
int i;
1321
memcpy(sorted, palette, num_colors * sizeof(*sorted));
1322
qsort(sorted, num_colors, sizeof(*sorted), PaletteCompareColorsForQsort);
1323
for (i = 0; i < num_colors; ++i) {
1324
idx_map[SearchColorNoIdx(sorted, palette[i], num_colors)] = i;
1325
}
1326
}
1327
1328
// Use 1 pixel cache for ARGB pixels.
1329
#define APPLY_PALETTE_FOR(COLOR_INDEX) do { \
1330
uint32_t prev_pix = palette[0]; \
1331
uint32_t prev_idx = 0; \
1332
for (y = 0; y < height; ++y) { \
1333
for (x = 0; x < width; ++x) { \
1334
const uint32_t pix = src[x]; \
1335
if (pix != prev_pix) { \
1336
prev_idx = COLOR_INDEX; \
1337
prev_pix = pix; \
1338
} \
1339
tmp_row[x] = prev_idx; \
1340
} \
1341
VP8LBundleColorMap(tmp_row, width, xbits, dst); \
1342
src += src_stride; \
1343
dst += dst_stride; \
1344
} \
1345
} while (0)
1346
1347
// Remap argb values in src[] to packed palettes entries in dst[]
1348
// using 'row' as a temporary buffer of size 'width'.
1349
// We assume that all src[] values have a corresponding entry in the palette.
1350
// Note: src[] can be the same as dst[]
1351
static WebPEncodingError ApplyPalette(const uint32_t* src, uint32_t src_stride,
1352
uint32_t* dst, uint32_t dst_stride,
1353
const uint32_t* palette, int palette_size,
1354
int width, int height, int xbits) {
1355
// TODO(skal): this tmp buffer is not needed if VP8LBundleColorMap() can be
1356
// made to work in-place.
1357
uint8_t* const tmp_row = (uint8_t*)WebPSafeMalloc(width, sizeof(*tmp_row));
1358
int x, y;
1359
1360
if (tmp_row == NULL) return VP8_ENC_ERROR_OUT_OF_MEMORY;
1361
1362
if (palette_size < APPLY_PALETTE_GREEDY_MAX) {
1363
APPLY_PALETTE_FOR(SearchColorGreedy(palette, palette_size, pix));
1364
} else {
1365
int i, j;
1366
uint16_t buffer[PALETTE_INV_SIZE];
1367
uint32_t (*const hash_functions[])(uint32_t) = {
1368
ApplyPaletteHash0, ApplyPaletteHash1, ApplyPaletteHash2
1369
};
1370
1371
// Try to find a perfect hash function able to go from a color to an index
1372
// within 1 << PALETTE_INV_SIZE_BITS in order to build a hash map to go
1373
// from color to index in palette.
1374
for (i = 0; i < 3; ++i) {
1375
int use_LUT = 1;
1376
// Set each element in buffer to max uint16_t.
1377
memset(buffer, 0xff, sizeof(buffer));
1378
for (j = 0; j < palette_size; ++j) {
1379
const uint32_t ind = hash_functions[i](palette[j]);
1380
if (buffer[ind] != 0xffffu) {
1381
use_LUT = 0;
1382
break;
1383
} else {
1384
buffer[ind] = j;
1385
}
1386
}
1387
if (use_LUT) break;
1388
}
1389
1390
if (i == 0) {
1391
APPLY_PALETTE_FOR(buffer[ApplyPaletteHash0(pix)]);
1392
} else if (i == 1) {
1393
APPLY_PALETTE_FOR(buffer[ApplyPaletteHash1(pix)]);
1394
} else if (i == 2) {
1395
APPLY_PALETTE_FOR(buffer[ApplyPaletteHash2(pix)]);
1396
} else {
1397
uint32_t idx_map[MAX_PALETTE_SIZE];
1398
uint32_t palette_sorted[MAX_PALETTE_SIZE];
1399
PrepareMapToPalette(palette, palette_size, palette_sorted, idx_map);
1400
APPLY_PALETTE_FOR(
1401
idx_map[SearchColorNoIdx(palette_sorted, pix, palette_size)]);
1402
}
1403
}
1404
WebPSafeFree(tmp_row);
1405
return VP8_ENC_OK;
1406
}
1407
#undef APPLY_PALETTE_FOR
1408
#undef PALETTE_INV_SIZE_BITS
1409
#undef PALETTE_INV_SIZE
1410
#undef APPLY_PALETTE_GREEDY_MAX
1411
1412
// Note: Expects "enc->palette_" to be set properly.
1413
static WebPEncodingError MapImageFromPalette(VP8LEncoder* const enc,
1414
int in_place) {
1415
WebPEncodingError err = VP8_ENC_OK;
1416
const WebPPicture* const pic = enc->pic_;
1417
const int width = pic->width;
1418
const int height = pic->height;
1419
const uint32_t* const palette = enc->palette_;
1420
const uint32_t* src = in_place ? enc->argb_ : pic->argb;
1421
const int src_stride = in_place ? enc->current_width_ : pic->argb_stride;
1422
const int palette_size = enc->palette_size_;
1423
int xbits;
1424
1425
// Replace each input pixel by corresponding palette index.
1426
// This is done line by line.
1427
if (palette_size <= 4) {
1428
xbits = (palette_size <= 2) ? 3 : 2;
1429
} else {
1430
xbits = (palette_size <= 16) ? 1 : 0;
1431
}
1432
1433
err = AllocateTransformBuffer(enc, VP8LSubSampleSize(width, xbits), height);
1434
if (err != VP8_ENC_OK) return err;
1435
1436
err = ApplyPalette(src, src_stride,
1437
enc->argb_, enc->current_width_,
1438
palette, palette_size, width, height, xbits);
1439
enc->argb_content_ = kEncoderPalette;
1440
return err;
1441
}
1442
1443
// Save palette_[] to bitstream.
1444
static WebPEncodingError EncodePalette(VP8LBitWriter* const bw, int low_effort,
1445
VP8LEncoder* const enc) {
1446
int i;
1447
uint32_t tmp_palette[MAX_PALETTE_SIZE];
1448
const int palette_size = enc->palette_size_;
1449
const uint32_t* const palette = enc->palette_;
1450
VP8LPutBits(bw, TRANSFORM_PRESENT, 1);
1451
VP8LPutBits(bw, COLOR_INDEXING_TRANSFORM, 2);
1452
assert(palette_size >= 1 && palette_size <= MAX_PALETTE_SIZE);
1453
VP8LPutBits(bw, palette_size - 1, 8);
1454
for (i = palette_size - 1; i >= 1; --i) {
1455
tmp_palette[i] = VP8LSubPixels(palette[i], palette[i - 1]);
1456
}
1457
tmp_palette[0] = palette[0];
1458
return EncodeImageNoHuffman(bw, tmp_palette, &enc->hash_chain_,
1459
&enc->refs_[0], &enc->refs_[1], palette_size, 1,
1460
20 /* quality */, low_effort);
1461
}
1462
1463
// -----------------------------------------------------------------------------
1464
// VP8LEncoder
1465
1466
static VP8LEncoder* VP8LEncoderNew(const WebPConfig* const config,
1467
const WebPPicture* const picture) {
1468
VP8LEncoder* const enc = (VP8LEncoder*)WebPSafeCalloc(1ULL, sizeof(*enc));
1469
if (enc == NULL) {
1470
WebPEncodingSetError(picture, VP8_ENC_ERROR_OUT_OF_MEMORY);
1471
return NULL;
1472
}
1473
enc->config_ = config;
1474
enc->pic_ = picture;
1475
enc->argb_content_ = kEncoderNone;
1476
1477
VP8LEncDspInit();
1478
1479
return enc;
1480
}
1481
1482
static void VP8LEncoderDelete(VP8LEncoder* enc) {
1483
if (enc != NULL) {
1484
int i;
1485
VP8LHashChainClear(&enc->hash_chain_);
1486
for (i = 0; i < 3; ++i) VP8LBackwardRefsClear(&enc->refs_[i]);
1487
ClearTransformBuffer(enc);
1488
WebPSafeFree(enc);
1489
}
1490
}
1491
1492
// -----------------------------------------------------------------------------
1493
// Main call
1494
1495
typedef struct {
1496
const WebPConfig* config_;
1497
const WebPPicture* picture_;
1498
VP8LBitWriter* bw_;
1499
VP8LEncoder* enc_;
1500
int use_cache_;
1501
CrunchConfig crunch_configs_[CRUNCH_CONFIGS_MAX];
1502
int num_crunch_configs_;
1503
int red_and_blue_always_zero_;
1504
WebPEncodingError err_;
1505
WebPAuxStats* stats_;
1506
} StreamEncodeContext;
1507
1508
static int EncodeStreamHook(void* input, void* data2) {
1509
StreamEncodeContext* const params = (StreamEncodeContext*)input;
1510
const WebPConfig* const config = params->config_;
1511
const WebPPicture* const picture = params->picture_;
1512
VP8LBitWriter* const bw = params->bw_;
1513
VP8LEncoder* const enc = params->enc_;
1514
const int use_cache = params->use_cache_;
1515
const CrunchConfig* const crunch_configs = params->crunch_configs_;
1516
const int num_crunch_configs = params->num_crunch_configs_;
1517
const int red_and_blue_always_zero = params->red_and_blue_always_zero_;
1518
#if !defined(WEBP_DISABLE_STATS)
1519
WebPAuxStats* const stats = params->stats_;
1520
#endif
1521
WebPEncodingError err = VP8_ENC_OK;
1522
const int quality = (int)config->quality;
1523
const int low_effort = (config->method == 0);
1524
#if (WEBP_NEAR_LOSSLESS == 1)
1525
const int width = picture->width;
1526
#endif
1527
const int height = picture->height;
1528
const size_t byte_position = VP8LBitWriterNumBytes(bw);
1529
#if (WEBP_NEAR_LOSSLESS == 1)
1530
int use_near_lossless = 0;
1531
#endif
1532
int hdr_size = 0;
1533
int data_size = 0;
1534
int use_delta_palette = 0;
1535
int idx;
1536
size_t best_size = 0;
1537
VP8LBitWriter bw_init = *bw, bw_best;
1538
(void)data2;
1539
1540
if (!VP8LBitWriterInit(&bw_best, 0) ||
1541
(num_crunch_configs > 1 && !VP8LBitWriterClone(bw, &bw_best))) {
1542
err = VP8_ENC_ERROR_OUT_OF_MEMORY;
1543
goto Error;
1544
}
1545
1546
for (idx = 0; idx < num_crunch_configs; ++idx) {
1547
const int entropy_idx = crunch_configs[idx].entropy_idx_;
1548
enc->use_palette_ = (entropy_idx == kPalette);
1549
enc->use_subtract_green_ =
1550
(entropy_idx == kSubGreen) || (entropy_idx == kSpatialSubGreen);
1551
enc->use_predict_ =
1552
(entropy_idx == kSpatial) || (entropy_idx == kSpatialSubGreen);
1553
if (low_effort) {
1554
enc->use_cross_color_ = 0;
1555
} else {
1556
enc->use_cross_color_ = red_and_blue_always_zero ? 0 : enc->use_predict_;
1557
}
1558
// Reset any parameter in the encoder that is set in the previous iteration.
1559
enc->cache_bits_ = 0;
1560
VP8LBackwardRefsClear(&enc->refs_[0]);
1561
VP8LBackwardRefsClear(&enc->refs_[1]);
1562
1563
#if (WEBP_NEAR_LOSSLESS == 1)
1564
// Apply near-lossless preprocessing.
1565
use_near_lossless = (config->near_lossless < 100) && !enc->use_palette_ &&
1566
!enc->use_predict_;
1567
if (use_near_lossless) {
1568
err = AllocateTransformBuffer(enc, width, height);
1569
if (err != VP8_ENC_OK) goto Error;
1570
if ((enc->argb_content_ != kEncoderNearLossless) &&
1571
!VP8ApplyNearLossless(picture, config->near_lossless, enc->argb_)) {
1572
err = VP8_ENC_ERROR_OUT_OF_MEMORY;
1573
goto Error;
1574
}
1575
enc->argb_content_ = kEncoderNearLossless;
1576
} else {
1577
enc->argb_content_ = kEncoderNone;
1578
}
1579
#else
1580
enc->argb_content_ = kEncoderNone;
1581
#endif
1582
1583
// Encode palette
1584
if (enc->use_palette_) {
1585
err = EncodePalette(bw, low_effort, enc);
1586
if (err != VP8_ENC_OK) goto Error;
1587
err = MapImageFromPalette(enc, use_delta_palette);
1588
if (err != VP8_ENC_OK) goto Error;
1589
// If using a color cache, do not have it bigger than the number of
1590
// colors.
1591
if (use_cache && enc->palette_size_ < (1 << MAX_COLOR_CACHE_BITS)) {
1592
enc->cache_bits_ = BitsLog2Floor(enc->palette_size_) + 1;
1593
}
1594
}
1595
if (!use_delta_palette) {
1596
// In case image is not packed.
1597
if (enc->argb_content_ != kEncoderNearLossless &&
1598
enc->argb_content_ != kEncoderPalette) {
1599
err = MakeInputImageCopy(enc);
1600
if (err != VP8_ENC_OK) goto Error;
1601
}
1602
1603
// -----------------------------------------------------------------------
1604
// Apply transforms and write transform data.
1605
1606
if (enc->use_subtract_green_) {
1607
ApplySubtractGreen(enc, enc->current_width_, height, bw);
1608
}
1609
1610
if (enc->use_predict_) {
1611
err = ApplyPredictFilter(enc, enc->current_width_, height, quality,
1612
low_effort, enc->use_subtract_green_, bw);
1613
if (err != VP8_ENC_OK) goto Error;
1614
}
1615
1616
if (enc->use_cross_color_) {
1617
err = ApplyCrossColorFilter(enc, enc->current_width_, height, quality,
1618
low_effort, bw);
1619
if (err != VP8_ENC_OK) goto Error;
1620
}
1621
}
1622
1623
VP8LPutBits(bw, !TRANSFORM_PRESENT, 1); // No more transforms.
1624
1625
// -------------------------------------------------------------------------
1626
// Encode and write the transformed image.
1627
err = EncodeImageInternal(bw, enc->argb_, &enc->hash_chain_, enc->refs_,
1628
enc->current_width_, height, quality, low_effort,
1629
use_cache, &crunch_configs[idx],
1630
&enc->cache_bits_, enc->histo_bits_,
1631
byte_position, &hdr_size, &data_size);
1632
if (err != VP8_ENC_OK) goto Error;
1633
1634
// If we are better than what we already have.
1635
if (idx == 0 || VP8LBitWriterNumBytes(bw) < best_size) {
1636
best_size = VP8LBitWriterNumBytes(bw);
1637
// Store the BitWriter.
1638
VP8LBitWriterSwap(bw, &bw_best);
1639
#if !defined(WEBP_DISABLE_STATS)
1640
// Update the stats.
1641
if (stats != NULL) {
1642
stats->lossless_features = 0;
1643
if (enc->use_predict_) stats->lossless_features |= 1;
1644
if (enc->use_cross_color_) stats->lossless_features |= 2;
1645
if (enc->use_subtract_green_) stats->lossless_features |= 4;
1646
if (enc->use_palette_) stats->lossless_features |= 8;
1647
stats->histogram_bits = enc->histo_bits_;
1648
stats->transform_bits = enc->transform_bits_;
1649
stats->cache_bits = enc->cache_bits_;
1650
stats->palette_size = enc->palette_size_;
1651
stats->lossless_size = (int)(best_size - byte_position);
1652
stats->lossless_hdr_size = hdr_size;
1653
stats->lossless_data_size = data_size;
1654
}
1655
#endif
1656
}
1657
// Reset the bit writer for the following iteration if any.
1658
if (num_crunch_configs > 1) VP8LBitWriterReset(&bw_init, bw);
1659
}
1660
VP8LBitWriterSwap(&bw_best, bw);
1661
1662
Error:
1663
VP8LBitWriterWipeOut(&bw_best);
1664
params->err_ = err;
1665
// The hook should return false in case of error.
1666
return (err == VP8_ENC_OK);
1667
}
1668
1669
WebPEncodingError VP8LEncodeStream(const WebPConfig* const config,
1670
const WebPPicture* const picture,
1671
VP8LBitWriter* const bw_main,
1672
int use_cache) {
1673
WebPEncodingError err = VP8_ENC_OK;
1674
VP8LEncoder* const enc_main = VP8LEncoderNew(config, picture);
1675
VP8LEncoder* enc_side = NULL;
1676
CrunchConfig crunch_configs[CRUNCH_CONFIGS_MAX];
1677
int num_crunch_configs_main, num_crunch_configs_side = 0;
1678
int idx;
1679
int red_and_blue_always_zero = 0;
1680
WebPWorker worker_main, worker_side;
1681
StreamEncodeContext params_main, params_side;
1682
// The main thread uses picture->stats, the side thread uses stats_side.
1683
WebPAuxStats stats_side;
1684
VP8LBitWriter bw_side;
1685
const WebPWorkerInterface* const worker_interface = WebPGetWorkerInterface();
1686
int ok_main;
1687
1688
// Analyze image (entropy, num_palettes etc)
1689
if (enc_main == NULL ||
1690
!EncoderAnalyze(enc_main, crunch_configs, &num_crunch_configs_main,
1691
&red_and_blue_always_zero) ||
1692
!EncoderInit(enc_main) || !VP8LBitWriterInit(&bw_side, 0)) {
1693
err = VP8_ENC_ERROR_OUT_OF_MEMORY;
1694
goto Error;
1695
}
1696
1697
// Split the configs between the main and side threads (if any).
1698
if (config->thread_level > 0) {
1699
num_crunch_configs_side = num_crunch_configs_main / 2;
1700
for (idx = 0; idx < num_crunch_configs_side; ++idx) {
1701
params_side.crunch_configs_[idx] =
1702
crunch_configs[num_crunch_configs_main - num_crunch_configs_side +
1703
idx];
1704
}
1705
params_side.num_crunch_configs_ = num_crunch_configs_side;
1706
}
1707
num_crunch_configs_main -= num_crunch_configs_side;
1708
for (idx = 0; idx < num_crunch_configs_main; ++idx) {
1709
params_main.crunch_configs_[idx] = crunch_configs[idx];
1710
}
1711
params_main.num_crunch_configs_ = num_crunch_configs_main;
1712
1713
// Fill in the parameters for the thread workers.
1714
{
1715
const int params_size = (num_crunch_configs_side > 0) ? 2 : 1;
1716
for (idx = 0; idx < params_size; ++idx) {
1717
// Create the parameters for each worker.
1718
WebPWorker* const worker = (idx == 0) ? &worker_main : &worker_side;
1719
StreamEncodeContext* const param =
1720
(idx == 0) ? &params_main : &params_side;
1721
param->config_ = config;
1722
param->picture_ = picture;
1723
param->use_cache_ = use_cache;
1724
param->red_and_blue_always_zero_ = red_and_blue_always_zero;
1725
if (idx == 0) {
1726
param->stats_ = picture->stats;
1727
param->bw_ = bw_main;
1728
param->enc_ = enc_main;
1729
} else {
1730
param->stats_ = (picture->stats == NULL) ? NULL : &stats_side;
1731
// Create a side bit writer.
1732
if (!VP8LBitWriterClone(bw_main, &bw_side)) {
1733
err = VP8_ENC_ERROR_OUT_OF_MEMORY;
1734
goto Error;
1735
}
1736
param->bw_ = &bw_side;
1737
// Create a side encoder.
1738
enc_side = VP8LEncoderNew(config, picture);
1739
if (enc_side == NULL || !EncoderInit(enc_side)) {
1740
err = VP8_ENC_ERROR_OUT_OF_MEMORY;
1741
goto Error;
1742
}
1743
// Copy the values that were computed for the main encoder.
1744
enc_side->histo_bits_ = enc_main->histo_bits_;
1745
enc_side->transform_bits_ = enc_main->transform_bits_;
1746
enc_side->palette_size_ = enc_main->palette_size_;
1747
memcpy(enc_side->palette_, enc_main->palette_,
1748
sizeof(enc_main->palette_));
1749
param->enc_ = enc_side;
1750
}
1751
// Create the workers.
1752
worker_interface->Init(worker);
1753
worker->data1 = param;
1754
worker->data2 = NULL;
1755
worker->hook = EncodeStreamHook;
1756
}
1757
}
1758
1759
// Start the second thread if needed.
1760
if (num_crunch_configs_side != 0) {
1761
if (!worker_interface->Reset(&worker_side)) {
1762
err = VP8_ENC_ERROR_OUT_OF_MEMORY;
1763
goto Error;
1764
}
1765
#if !defined(WEBP_DISABLE_STATS)
1766
// This line is here and not in the param initialization above to remove a
1767
// Clang static analyzer warning.
1768
if (picture->stats != NULL) {
1769
memcpy(&stats_side, picture->stats, sizeof(stats_side));
1770
}
1771
#endif
1772
// This line is only useful to remove a Clang static analyzer warning.
1773
params_side.err_ = VP8_ENC_OK;
1774
worker_interface->Launch(&worker_side);
1775
}
1776
// Execute the main thread.
1777
worker_interface->Execute(&worker_main);
1778
ok_main = worker_interface->Sync(&worker_main);
1779
worker_interface->End(&worker_main);
1780
if (num_crunch_configs_side != 0) {
1781
// Wait for the second thread.
1782
const int ok_side = worker_interface->Sync(&worker_side);
1783
worker_interface->End(&worker_side);
1784
if (!ok_main || !ok_side) {
1785
err = ok_main ? params_side.err_ : params_main.err_;
1786
goto Error;
1787
}
1788
if (VP8LBitWriterNumBytes(&bw_side) < VP8LBitWriterNumBytes(bw_main)) {
1789
VP8LBitWriterSwap(bw_main, &bw_side);
1790
#if !defined(WEBP_DISABLE_STATS)
1791
if (picture->stats != NULL) {
1792
memcpy(picture->stats, &stats_side, sizeof(*picture->stats));
1793
}
1794
#endif
1795
}
1796
} else {
1797
if (!ok_main) {
1798
err = params_main.err_;
1799
goto Error;
1800
}
1801
}
1802
1803
Error:
1804
VP8LBitWriterWipeOut(&bw_side);
1805
VP8LEncoderDelete(enc_main);
1806
VP8LEncoderDelete(enc_side);
1807
return err;
1808
}
1809
1810
#undef CRUNCH_CONFIGS_MAX
1811
#undef CRUNCH_CONFIGS_LZ77_MAX
1812
1813
int VP8LEncodeImage(const WebPConfig* const config,
1814
const WebPPicture* const picture) {
1815
int width, height;
1816
int has_alpha;
1817
size_t coded_size;
1818
int percent = 0;
1819
int initial_size;
1820
WebPEncodingError err = VP8_ENC_OK;
1821
VP8LBitWriter bw;
1822
1823
if (picture == NULL) return 0;
1824
1825
if (config == NULL || picture->argb == NULL) {
1826
err = VP8_ENC_ERROR_NULL_PARAMETER;
1827
WebPEncodingSetError(picture, err);
1828
return 0;
1829
}
1830
1831
width = picture->width;
1832
height = picture->height;
1833
// Initialize BitWriter with size corresponding to 16 bpp to photo images and
1834
// 8 bpp for graphical images.
1835
initial_size = (config->image_hint == WEBP_HINT_GRAPH) ?
1836
width * height : width * height * 2;
1837
if (!VP8LBitWriterInit(&bw, initial_size)) {
1838
err = VP8_ENC_ERROR_OUT_OF_MEMORY;
1839
goto Error;
1840
}
1841
1842
if (!WebPReportProgress(picture, 1, &percent)) {
1843
UserAbort:
1844
err = VP8_ENC_ERROR_USER_ABORT;
1845
goto Error;
1846
}
1847
// Reset stats (for pure lossless coding)
1848
if (picture->stats != NULL) {
1849
WebPAuxStats* const stats = picture->stats;
1850
memset(stats, 0, sizeof(*stats));
1851
stats->PSNR[0] = 99.f;
1852
stats->PSNR[1] = 99.f;
1853
stats->PSNR[2] = 99.f;
1854
stats->PSNR[3] = 99.f;
1855
stats->PSNR[4] = 99.f;
1856
}
1857
1858
// Write image size.
1859
if (!WriteImageSize(picture, &bw)) {
1860
err = VP8_ENC_ERROR_OUT_OF_MEMORY;
1861
goto Error;
1862
}
1863
1864
has_alpha = WebPPictureHasTransparency(picture);
1865
// Write the non-trivial Alpha flag and lossless version.
1866
if (!WriteRealAlphaAndVersion(&bw, has_alpha)) {
1867
err = VP8_ENC_ERROR_OUT_OF_MEMORY;
1868
goto Error;
1869
}
1870
1871
if (!WebPReportProgress(picture, 5, &percent)) goto UserAbort;
1872
1873
// Encode main image stream.
1874
err = VP8LEncodeStream(config, picture, &bw, 1 /*use_cache*/);
1875
if (err != VP8_ENC_OK) goto Error;
1876
1877
if (!WebPReportProgress(picture, 90, &percent)) goto UserAbort;
1878
1879
// Finish the RIFF chunk.
1880
err = WriteImage(picture, &bw, &coded_size);
1881
if (err != VP8_ENC_OK) goto Error;
1882
1883
if (!WebPReportProgress(picture, 100, &percent)) goto UserAbort;
1884
1885
#if !defined(WEBP_DISABLE_STATS)
1886
// Save size.
1887
if (picture->stats != NULL) {
1888
picture->stats->coded_size += (int)coded_size;
1889
picture->stats->lossless_size = (int)coded_size;
1890
}
1891
#endif
1892
1893
if (picture->extra_info != NULL) {
1894
const int mb_w = (width + 15) >> 4;
1895
const int mb_h = (height + 15) >> 4;
1896
memset(picture->extra_info, 0, mb_w * mb_h * sizeof(*picture->extra_info));
1897
}
1898
1899
Error:
1900
if (bw.error_) err = VP8_ENC_ERROR_OUT_OF_MEMORY;
1901
VP8LBitWriterWipeOut(&bw);
1902
if (err != VP8_ENC_OK) {
1903
WebPEncodingSetError(picture, err);
1904
return 0;
1905
}
1906
return 1;
1907
}
1908
1909
//------------------------------------------------------------------------------
1910
1911