1 // Copyright 2012 Google Inc. All Rights Reserved. 2 // 3 // Use of this source code is governed by a BSD-style license 4 // that can be found in the COPYING file in the root of the source 5 // tree. An additional intellectual property rights grant can be found 6 // in the file PATENTS. All contributing project authors may 7 // be found in the AUTHORS file in the root of the source tree. 8 // ----------------------------------------------------------------------------- 9 // 10 // main entry for the lossless encoder. 11 // 12 // Author: Vikas Arora (vikaas.arora@gmail.com) 13 // 14 15 #include <assert.h> 16 #include <stdlib.h> 17 18 #include "src/enc/backward_references_enc.h" 19 #include "src/enc/histogram_enc.h" 20 #include "src/enc/vp8i_enc.h" 21 #include "src/enc/vp8li_enc.h" 22 #include "src/dsp/lossless.h" 23 #include "src/dsp/lossless_common.h" 24 #include "src/utils/bit_writer_utils.h" 25 #include "src/utils/huffman_encode_utils.h" 26 #include "src/utils/utils.h" 27 #include "src/webp/format_constants.h" 28 29 // Maximum number of histogram images (sub-blocks). 30 #define MAX_HUFF_IMAGE_SIZE 2600 31 32 // Palette reordering for smaller sum of deltas (and for smaller storage). 33 34 static int PaletteCompareColorsForQsort(const void* p1, const void* p2) { 35 const uint32_t a = WebPMemToUint32((uint8_t*)p1); 36 const uint32_t b = WebPMemToUint32((uint8_t*)p2); 37 assert(a != b); 38 return (a < b) ? -1 : 1; 39 } 40 41 static WEBP_INLINE uint32_t PaletteComponentDistance(uint32_t v) { 42 return (v <= 128) ? v : (256 - v); 43 } 44 45 // Computes a value that is related to the entropy created by the 46 // palette entry diff. 47 // 48 // Note that the last & 0xff is a no-operation in the next statement, but 49 // removed by most compilers and is here only for regularity of the code. 50 static WEBP_INLINE uint32_t PaletteColorDistance(uint32_t col1, uint32_t col2) { 51 const uint32_t diff = VP8LSubPixels(col1, col2); 52 const int kMoreWeightForRGBThanForAlpha = 9; 53 uint32_t score; 54 score = PaletteComponentDistance((diff >> 0) & 0xff); 55 score += PaletteComponentDistance((diff >> 8) & 0xff); 56 score += PaletteComponentDistance((diff >> 16) & 0xff); 57 score *= kMoreWeightForRGBThanForAlpha; 58 score += PaletteComponentDistance((diff >> 24) & 0xff); 59 return score; 60 } 61 62 static WEBP_INLINE void SwapColor(uint32_t* const col1, uint32_t* const col2) { 63 const uint32_t tmp = *col1; 64 *col1 = *col2; 65 *col2 = tmp; 66 } 67 68 static void GreedyMinimizeDeltas(uint32_t palette[], int num_colors) { 69 // Find greedily always the closest color of the predicted color to minimize 70 // deltas in the palette. This reduces storage needs since the 71 // palette is stored with delta encoding. 72 uint32_t predict = 0x00000000; 73 int i, k; 74 for (i = 0; i < num_colors; ++i) { 75 int best_ix = i; 76 uint32_t best_score = ~0U; 77 for (k = i; k < num_colors; ++k) { 78 const uint32_t cur_score = PaletteColorDistance(palette[k], predict); 79 if (best_score > cur_score) { 80 best_score = cur_score; 81 best_ix = k; 82 } 83 } 84 SwapColor(&palette[best_ix], &palette[i]); 85 predict = palette[i]; 86 } 87 } 88 89 // The palette has been sorted by alpha. This function checks if the other 90 // components of the palette have a monotonic development with regards to 91 // position in the palette. If all have monotonic development, there is 92 // no benefit to re-organize them greedily. A monotonic development 93 // would be spotted in green-only situations (like lossy alpha) or gray-scale 94 // images. 95 static int PaletteHasNonMonotonousDeltas(uint32_t palette[], int num_colors) { 96 uint32_t predict = 0x000000; 97 int i; 98 uint8_t sign_found = 0x00; 99 for (i = 0; i < num_colors; ++i) { 100 const uint32_t diff = VP8LSubPixels(palette[i], predict); 101 const uint8_t rd = (diff >> 16) & 0xff; 102 const uint8_t gd = (diff >> 8) & 0xff; 103 const uint8_t bd = (diff >> 0) & 0xff; 104 if (rd != 0x00) { 105 sign_found |= (rd < 0x80) ? 1 : 2; 106 } 107 if (gd != 0x00) { 108 sign_found |= (gd < 0x80) ? 8 : 16; 109 } 110 if (bd != 0x00) { 111 sign_found |= (bd < 0x80) ? 64 : 128; 112 } 113 predict = palette[i]; 114 } 115 return (sign_found & (sign_found << 1)) != 0; // two consequent signs. 116 } 117 118 // ----------------------------------------------------------------------------- 119 // Palette 120 121 // If number of colors in the image is less than or equal to MAX_PALETTE_SIZE, 122 // creates a palette and returns true, else returns false. 123 static int AnalyzeAndCreatePalette(const WebPPicture* const pic, 124 int low_effort, 125 uint32_t palette[MAX_PALETTE_SIZE], 126 int* const palette_size) { 127 const int num_colors = WebPGetColorPalette(pic, palette); 128 if (num_colors > MAX_PALETTE_SIZE) { 129 *palette_size = 0; 130 return 0; 131 } 132 *palette_size = num_colors; 133 qsort(palette, num_colors, sizeof(*palette), PaletteCompareColorsForQsort); 134 if (!low_effort && PaletteHasNonMonotonousDeltas(palette, num_colors)) { 135 GreedyMinimizeDeltas(palette, num_colors); 136 } 137 return 1; 138 } 139 140 // These five modes are evaluated and their respective entropy is computed. 141 typedef enum { 142 kDirect = 0, 143 kSpatial = 1, 144 kSubGreen = 2, 145 kSpatialSubGreen = 3, 146 kPalette = 4, 147 kNumEntropyIx = 5 148 } EntropyIx; 149 150 typedef enum { 151 kHistoAlpha = 0, 152 kHistoAlphaPred, 153 kHistoGreen, 154 kHistoGreenPred, 155 kHistoRed, 156 kHistoRedPred, 157 kHistoBlue, 158 kHistoBluePred, 159 kHistoRedSubGreen, 160 kHistoRedPredSubGreen, 161 kHistoBlueSubGreen, 162 kHistoBluePredSubGreen, 163 kHistoPalette, 164 kHistoTotal // Must be last. 165 } HistoIx; 166 167 static void AddSingleSubGreen(int p, uint32_t* const r, uint32_t* const b) { 168 const int green = p >> 8; // The upper bits are masked away later. 169 ++r[((p >> 16) - green) & 0xff]; 170 ++b[((p >> 0) - green) & 0xff]; 171 } 172 173 static void AddSingle(uint32_t p, 174 uint32_t* const a, uint32_t* const r, 175 uint32_t* const g, uint32_t* const b) { 176 ++a[(p >> 24) & 0xff]; 177 ++r[(p >> 16) & 0xff]; 178 ++g[(p >> 8) & 0xff]; 179 ++b[(p >> 0) & 0xff]; 180 } 181 182 static WEBP_INLINE uint32_t HashPix(uint32_t pix) { 183 // Note that masking with 0xffffffffu is for preventing an 184 // 'unsigned int overflow' warning. Doesn't impact the compiled code. 185 return ((((uint64_t)pix + (pix >> 19)) * 0x39c5fba7ull) & 0xffffffffu) >> 24; 186 } 187 188 static int AnalyzeEntropy(const uint32_t* argb, 189 int width, int height, int argb_stride, 190 int use_palette, 191 int palette_size, int transform_bits, 192 EntropyIx* const min_entropy_ix, 193 int* const red_and_blue_always_zero) { 194 // Allocate histogram set with cache_bits = 0. 195 uint32_t* histo; 196 197 if (use_palette && palette_size <= 16) { 198 // In the case of small palettes, we pack 2, 4 or 8 pixels together. In 199 // practice, small palettes are better than any other transform. 200 *min_entropy_ix = kPalette; 201 *red_and_blue_always_zero = 1; 202 return 1; 203 } 204 histo = (uint32_t*)WebPSafeCalloc(kHistoTotal, sizeof(*histo) * 256); 205 if (histo != NULL) { 206 int i, x, y; 207 const uint32_t* prev_row = NULL; 208 const uint32_t* curr_row = argb; 209 uint32_t pix_prev = argb[0]; // Skip the first pixel. 210 for (y = 0; y < height; ++y) { 211 for (x = 0; x < width; ++x) { 212 const uint32_t pix = curr_row[x]; 213 const uint32_t pix_diff = VP8LSubPixels(pix, pix_prev); 214 pix_prev = pix; 215 if ((pix_diff == 0) || (prev_row != NULL && pix == prev_row[x])) { 216 continue; 217 } 218 AddSingle(pix, 219 &histo[kHistoAlpha * 256], 220 &histo[kHistoRed * 256], 221 &histo[kHistoGreen * 256], 222 &histo[kHistoBlue * 256]); 223 AddSingle(pix_diff, 224 &histo[kHistoAlphaPred * 256], 225 &histo[kHistoRedPred * 256], 226 &histo[kHistoGreenPred * 256], 227 &histo[kHistoBluePred * 256]); 228 AddSingleSubGreen(pix, 229 &histo[kHistoRedSubGreen * 256], 230 &histo[kHistoBlueSubGreen * 256]); 231 AddSingleSubGreen(pix_diff, 232 &histo[kHistoRedPredSubGreen * 256], 233 &histo[kHistoBluePredSubGreen * 256]); 234 { 235 // Approximate the palette by the entropy of the multiplicative hash. 236 const uint32_t hash = HashPix(pix); 237 ++histo[kHistoPalette * 256 + hash]; 238 } 239 } 240 prev_row = curr_row; 241 curr_row += argb_stride; 242 } 243 { 244 double entropy_comp[kHistoTotal]; 245 double entropy[kNumEntropyIx]; 246 int k; 247 int last_mode_to_analyze = use_palette ? kPalette : kSpatialSubGreen; 248 int j; 249 // Let's add one zero to the predicted histograms. The zeros are removed 250 // too efficiently by the pix_diff == 0 comparison, at least one of the 251 // zeros is likely to exist. 252 ++histo[kHistoRedPredSubGreen * 256]; 253 ++histo[kHistoBluePredSubGreen * 256]; 254 ++histo[kHistoRedPred * 256]; 255 ++histo[kHistoGreenPred * 256]; 256 ++histo[kHistoBluePred * 256]; 257 ++histo[kHistoAlphaPred * 256]; 258 259 for (j = 0; j < kHistoTotal; ++j) { 260 entropy_comp[j] = VP8LBitsEntropy(&histo[j * 256], 256); 261 } 262 entropy[kDirect] = entropy_comp[kHistoAlpha] + 263 entropy_comp[kHistoRed] + 264 entropy_comp[kHistoGreen] + 265 entropy_comp[kHistoBlue]; 266 entropy[kSpatial] = entropy_comp[kHistoAlphaPred] + 267 entropy_comp[kHistoRedPred] + 268 entropy_comp[kHistoGreenPred] + 269 entropy_comp[kHistoBluePred]; 270 entropy[kSubGreen] = entropy_comp[kHistoAlpha] + 271 entropy_comp[kHistoRedSubGreen] + 272 entropy_comp[kHistoGreen] + 273 entropy_comp[kHistoBlueSubGreen]; 274 entropy[kSpatialSubGreen] = entropy_comp[kHistoAlphaPred] + 275 entropy_comp[kHistoRedPredSubGreen] + 276 entropy_comp[kHistoGreenPred] + 277 entropy_comp[kHistoBluePredSubGreen]; 278 entropy[kPalette] = entropy_comp[kHistoPalette]; 279 280 // When including transforms, there is an overhead in bits from 281 // storing them. This overhead is small but matters for small images. 282 // For spatial, there are 14 transformations. 283 entropy[kSpatial] += VP8LSubSampleSize(width, transform_bits) * 284 VP8LSubSampleSize(height, transform_bits) * 285 VP8LFastLog2(14); 286 // For color transforms: 24 as only 3 channels are considered in a 287 // ColorTransformElement. 288 entropy[kSpatialSubGreen] += VP8LSubSampleSize(width, transform_bits) * 289 VP8LSubSampleSize(height, transform_bits) * 290 VP8LFastLog2(24); 291 // For palettes, add the cost of storing the palette. 292 // We empirically estimate the cost of a compressed entry as 8 bits. 293 // The palette is differential-coded when compressed hence a much 294 // lower cost than sizeof(uint32_t)*8. 295 entropy[kPalette] += palette_size * 8; 296 297 *min_entropy_ix = kDirect; 298 for (k = kDirect + 1; k <= last_mode_to_analyze; ++k) { 299 if (entropy[*min_entropy_ix] > entropy[k]) { 300 *min_entropy_ix = (EntropyIx)k; 301 } 302 } 303 assert((int)*min_entropy_ix <= last_mode_to_analyze); 304 *red_and_blue_always_zero = 1; 305 // Let's check if the histogram of the chosen entropy mode has 306 // non-zero red and blue values. If all are zero, we can later skip 307 // the cross color optimization. 308 { 309 static const uint8_t kHistoPairs[5][2] = { 310 { kHistoRed, kHistoBlue }, 311 { kHistoRedPred, kHistoBluePred }, 312 { kHistoRedSubGreen, kHistoBlueSubGreen }, 313 { kHistoRedPredSubGreen, kHistoBluePredSubGreen }, 314 { kHistoRed, kHistoBlue } 315 }; 316 const uint32_t* const red_histo = 317 &histo[256 * kHistoPairs[*min_entropy_ix][0]]; 318 const uint32_t* const blue_histo = 319 &histo[256 * kHistoPairs[*min_entropy_ix][1]]; 320 for (i = 1; i < 256; ++i) { 321 if ((red_histo[i] | blue_histo[i]) != 0) { 322 *red_and_blue_always_zero = 0; 323 break; 324 } 325 } 326 } 327 } 328 WebPSafeFree(histo); 329 return 1; 330 } else { 331 return 0; 332 } 333 } 334 335 static int GetHistoBits(int method, int use_palette, int width, int height) { 336 // Make tile size a function of encoding method (Range: 0 to 6). 337 int histo_bits = (use_palette ? 9 : 7) - method; 338 while (1) { 339 const int huff_image_size = VP8LSubSampleSize(width, histo_bits) * 340 VP8LSubSampleSize(height, histo_bits); 341 if (huff_image_size <= MAX_HUFF_IMAGE_SIZE) break; 342 ++histo_bits; 343 } 344 return (histo_bits < MIN_HUFFMAN_BITS) ? MIN_HUFFMAN_BITS : 345 (histo_bits > MAX_HUFFMAN_BITS) ? MAX_HUFFMAN_BITS : histo_bits; 346 } 347 348 static int GetTransformBits(int method, int histo_bits) { 349 const int max_transform_bits = (method < 4) ? 6 : (method > 4) ? 4 : 5; 350 const int res = 351 (histo_bits > max_transform_bits) ? max_transform_bits : histo_bits; 352 assert(res <= MAX_TRANSFORM_BITS); 353 return res; 354 } 355 356 // Set of parameters to be used in each iteration of the cruncher. 357 #define CRUNCH_CONFIGS_LZ77_MAX 2 358 typedef struct { 359 int entropy_idx_; 360 int lz77s_types_to_try_[CRUNCH_CONFIGS_LZ77_MAX]; 361 int lz77s_types_to_try_size_; 362 } CrunchConfig; 363 364 #define CRUNCH_CONFIGS_MAX kNumEntropyIx 365 366 static int EncoderAnalyze(VP8LEncoder* const enc, 367 CrunchConfig crunch_configs[CRUNCH_CONFIGS_MAX], 368 int* const crunch_configs_size, 369 int* const red_and_blue_always_zero) { 370 const WebPPicture* const pic = enc->pic_; 371 const int width = pic->width; 372 const int height = pic->height; 373 const WebPConfig* const config = enc->config_; 374 const int method = config->method; 375 const int low_effort = (config->method == 0); 376 int i; 377 int use_palette; 378 int n_lz77s; 379 assert(pic != NULL && pic->argb != NULL); 380 381 use_palette = 382 AnalyzeAndCreatePalette(pic, low_effort, 383 enc->palette_, &enc->palette_size_); 384 385 // Empirical bit sizes. 386 enc->histo_bits_ = GetHistoBits(method, use_palette, 387 pic->width, pic->height); 388 enc->transform_bits_ = GetTransformBits(method, enc->histo_bits_); 389 390 if (low_effort) { 391 // AnalyzeEntropy is somewhat slow. 392 crunch_configs[0].entropy_idx_ = use_palette ? kPalette : kSpatialSubGreen; 393 n_lz77s = 1; 394 *crunch_configs_size = 1; 395 } else { 396 EntropyIx min_entropy_ix; 397 // Try out multiple LZ77 on images with few colors. 398 n_lz77s = (enc->palette_size_ > 0 && enc->palette_size_ <= 16) ? 2 : 1; 399 if (!AnalyzeEntropy(pic->argb, width, height, pic->argb_stride, use_palette, 400 enc->palette_size_, enc->transform_bits_, 401 &min_entropy_ix, red_and_blue_always_zero)) { 402 return 0; 403 } 404 if (method == 6 && config->quality == 100) { 405 // Go brute force on all transforms. 406 *crunch_configs_size = 0; 407 for (i = 0; i < kNumEntropyIx; ++i) { 408 if (i != kPalette || use_palette) { 409 assert(*crunch_configs_size < CRUNCH_CONFIGS_MAX); 410 crunch_configs[(*crunch_configs_size)++].entropy_idx_ = i; 411 } 412 } 413 } else { 414 // Only choose the guessed best transform. 415 *crunch_configs_size = 1; 416 crunch_configs[0].entropy_idx_ = min_entropy_ix; 417 } 418 } 419 // Fill in the different LZ77s. 420 assert(n_lz77s <= CRUNCH_CONFIGS_LZ77_MAX); 421 for (i = 0; i < *crunch_configs_size; ++i) { 422 int j; 423 for (j = 0; j < n_lz77s; ++j) { 424 crunch_configs[i].lz77s_types_to_try_[j] = 425 (j == 0) ? kLZ77Standard | kLZ77RLE : kLZ77Box; 426 } 427 crunch_configs[i].lz77s_types_to_try_size_ = n_lz77s; 428 } 429 return 1; 430 } 431 432 static int EncoderInit(VP8LEncoder* const enc) { 433 const WebPPicture* const pic = enc->pic_; 434 const int width = pic->width; 435 const int height = pic->height; 436 const int pix_cnt = width * height; 437 // we round the block size up, so we're guaranteed to have 438 // at most MAX_REFS_BLOCK_PER_IMAGE blocks used: 439 const int refs_block_size = (pix_cnt - 1) / MAX_REFS_BLOCK_PER_IMAGE + 1; 440 int i; 441 if (!VP8LHashChainInit(&enc->hash_chain_, pix_cnt)) return 0; 442 443 for (i = 0; i < 3; ++i) VP8LBackwardRefsInit(&enc->refs_[i], refs_block_size); 444 445 return 1; 446 } 447 448 // Returns false in case of memory error. 449 static int GetHuffBitLengthsAndCodes( 450 const VP8LHistogramSet* const histogram_image, 451 HuffmanTreeCode* const huffman_codes) { 452 int i, k; 453 int ok = 0; 454 uint64_t total_length_size = 0; 455 uint8_t* mem_buf = NULL; 456 const int histogram_image_size = histogram_image->size; 457 int max_num_symbols = 0; 458 uint8_t* buf_rle = NULL; 459 HuffmanTree* huff_tree = NULL; 460 461 // Iterate over all histograms and get the aggregate number of codes used. 462 for (i = 0; i < histogram_image_size; ++i) { 463 const VP8LHistogram* const histo = histogram_image->histograms[i]; 464 HuffmanTreeCode* const codes = &huffman_codes[5 * i]; 465 assert(histo != NULL); 466 for (k = 0; k < 5; ++k) { 467 const int num_symbols = 468 (k == 0) ? VP8LHistogramNumCodes(histo->palette_code_bits_) : 469 (k == 4) ? NUM_DISTANCE_CODES : 256; 470 codes[k].num_symbols = num_symbols; 471 total_length_size += num_symbols; 472 } 473 } 474 475 // Allocate and Set Huffman codes. 476 { 477 uint16_t* codes; 478 uint8_t* lengths; 479 mem_buf = (uint8_t*)WebPSafeCalloc(total_length_size, 480 sizeof(*lengths) + sizeof(*codes)); 481 if (mem_buf == NULL) goto End; 482 483 codes = (uint16_t*)mem_buf; 484 lengths = (uint8_t*)&codes[total_length_size]; 485 for (i = 0; i < 5 * histogram_image_size; ++i) { 486 const int bit_length = huffman_codes[i].num_symbols; 487 huffman_codes[i].codes = codes; 488 huffman_codes[i].code_lengths = lengths; 489 codes += bit_length; 490 lengths += bit_length; 491 if (max_num_symbols < bit_length) { 492 max_num_symbols = bit_length; 493 } 494 } 495 } 496 497 buf_rle = (uint8_t*)WebPSafeMalloc(1ULL, max_num_symbols); 498 huff_tree = (HuffmanTree*)WebPSafeMalloc(3ULL * max_num_symbols, 499 sizeof(*huff_tree)); 500 if (buf_rle == NULL || huff_tree == NULL) goto End; 501 502 // Create Huffman trees. 503 for (i = 0; i < histogram_image_size; ++i) { 504 HuffmanTreeCode* const codes = &huffman_codes[5 * i]; 505 VP8LHistogram* const histo = histogram_image->histograms[i]; 506 VP8LCreateHuffmanTree(histo->literal_, 15, buf_rle, huff_tree, codes + 0); 507 VP8LCreateHuffmanTree(histo->red_, 15, buf_rle, huff_tree, codes + 1); 508 VP8LCreateHuffmanTree(histo->blue_, 15, buf_rle, huff_tree, codes + 2); 509 VP8LCreateHuffmanTree(histo->alpha_, 15, buf_rle, huff_tree, codes + 3); 510 VP8LCreateHuffmanTree(histo->distance_, 15, buf_rle, huff_tree, codes + 4); 511 } 512 ok = 1; 513 End: 514 WebPSafeFree(huff_tree); 515 WebPSafeFree(buf_rle); 516 if (!ok) { 517 WebPSafeFree(mem_buf); 518 memset(huffman_codes, 0, 5 * histogram_image_size * sizeof(*huffman_codes)); 519 } 520 return ok; 521 } 522 523 static void StoreHuffmanTreeOfHuffmanTreeToBitMask( 524 VP8LBitWriter* const bw, const uint8_t* code_length_bitdepth) { 525 // RFC 1951 will calm you down if you are worried about this funny sequence. 526 // This sequence is tuned from that, but more weighted for lower symbol count, 527 // and more spiking histograms. 528 static const uint8_t kStorageOrder[CODE_LENGTH_CODES] = { 529 17, 18, 0, 1, 2, 3, 4, 5, 16, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 530 }; 531 int i; 532 // Throw away trailing zeros: 533 int codes_to_store = CODE_LENGTH_CODES; 534 for (; codes_to_store > 4; --codes_to_store) { 535 if (code_length_bitdepth[kStorageOrder[codes_to_store - 1]] != 0) { 536 break; 537 } 538 } 539 VP8LPutBits(bw, codes_to_store - 4, 4); 540 for (i = 0; i < codes_to_store; ++i) { 541 VP8LPutBits(bw, code_length_bitdepth[kStorageOrder[i]], 3); 542 } 543 } 544 545 static void ClearHuffmanTreeIfOnlyOneSymbol( 546 HuffmanTreeCode* const huffman_code) { 547 int k; 548 int count = 0; 549 for (k = 0; k < huffman_code->num_symbols; ++k) { 550 if (huffman_code->code_lengths[k] != 0) { 551 ++count; 552 if (count > 1) return; 553 } 554 } 555 for (k = 0; k < huffman_code->num_symbols; ++k) { 556 huffman_code->code_lengths[k] = 0; 557 huffman_code->codes[k] = 0; 558 } 559 } 560 561 static void StoreHuffmanTreeToBitMask( 562 VP8LBitWriter* const bw, 563 const HuffmanTreeToken* const tokens, const int num_tokens, 564 const HuffmanTreeCode* const huffman_code) { 565 int i; 566 for (i = 0; i < num_tokens; ++i) { 567 const int ix = tokens[i].code; 568 const int extra_bits = tokens[i].extra_bits; 569 VP8LPutBits(bw, huffman_code->codes[ix], huffman_code->code_lengths[ix]); 570 switch (ix) { 571 case 16: 572 VP8LPutBits(bw, extra_bits, 2); 573 break; 574 case 17: 575 VP8LPutBits(bw, extra_bits, 3); 576 break; 577 case 18: 578 VP8LPutBits(bw, extra_bits, 7); 579 break; 580 } 581 } 582 } 583 584 // 'huff_tree' and 'tokens' are pre-alloacted buffers. 585 static void StoreFullHuffmanCode(VP8LBitWriter* const bw, 586 HuffmanTree* const huff_tree, 587 HuffmanTreeToken* const tokens, 588 const HuffmanTreeCode* const tree) { 589 uint8_t code_length_bitdepth[CODE_LENGTH_CODES] = { 0 }; 590 uint16_t code_length_bitdepth_symbols[CODE_LENGTH_CODES] = { 0 }; 591 const int max_tokens = tree->num_symbols; 592 int num_tokens; 593 HuffmanTreeCode huffman_code; 594 huffman_code.num_symbols = CODE_LENGTH_CODES; 595 huffman_code.code_lengths = code_length_bitdepth; 596 huffman_code.codes = code_length_bitdepth_symbols; 597 598 VP8LPutBits(bw, 0, 1); 599 num_tokens = VP8LCreateCompressedHuffmanTree(tree, tokens, max_tokens); 600 { 601 uint32_t histogram[CODE_LENGTH_CODES] = { 0 }; 602 uint8_t buf_rle[CODE_LENGTH_CODES] = { 0 }; 603 int i; 604 for (i = 0; i < num_tokens; ++i) { 605 ++histogram[tokens[i].code]; 606 } 607 608 VP8LCreateHuffmanTree(histogram, 7, buf_rle, huff_tree, &huffman_code); 609 } 610 611 StoreHuffmanTreeOfHuffmanTreeToBitMask(bw, code_length_bitdepth); 612 ClearHuffmanTreeIfOnlyOneSymbol(&huffman_code); 613 { 614 int trailing_zero_bits = 0; 615 int trimmed_length = num_tokens; 616 int write_trimmed_length; 617 int length; 618 int i = num_tokens; 619 while (i-- > 0) { 620 const int ix = tokens[i].code; 621 if (ix == 0 || ix == 17 || ix == 18) { 622 --trimmed_length; // discount trailing zeros 623 trailing_zero_bits += code_length_bitdepth[ix]; 624 if (ix == 17) { 625 trailing_zero_bits += 3; 626 } else if (ix == 18) { 627 trailing_zero_bits += 7; 628 } 629 } else { 630 break; 631 } 632 } 633 write_trimmed_length = (trimmed_length > 1 && trailing_zero_bits > 12); 634 length = write_trimmed_length ? trimmed_length : num_tokens; 635 VP8LPutBits(bw, write_trimmed_length, 1); 636 if (write_trimmed_length) { 637 if (trimmed_length == 2) { 638 VP8LPutBits(bw, 0, 3 + 2); // nbitpairs=1, trimmed_length=2 639 } else { 640 const int nbits = BitsLog2Floor(trimmed_length - 2); 641 const int nbitpairs = nbits / 2 + 1; 642 assert(trimmed_length > 2); 643 assert(nbitpairs - 1 < 8); 644 VP8LPutBits(bw, nbitpairs - 1, 3); 645 VP8LPutBits(bw, trimmed_length - 2, nbitpairs * 2); 646 } 647 } 648 StoreHuffmanTreeToBitMask(bw, tokens, length, &huffman_code); 649 } 650 } 651 652 // 'huff_tree' and 'tokens' are pre-alloacted buffers. 653 static void StoreHuffmanCode(VP8LBitWriter* const bw, 654 HuffmanTree* const huff_tree, 655 HuffmanTreeToken* const tokens, 656 const HuffmanTreeCode* const huffman_code) { 657 int i; 658 int count = 0; 659 int symbols[2] = { 0, 0 }; 660 const int kMaxBits = 8; 661 const int kMaxSymbol = 1 << kMaxBits; 662 663 // Check whether it's a small tree. 664 for (i = 0; i < huffman_code->num_symbols && count < 3; ++i) { 665 if (huffman_code->code_lengths[i] != 0) { 666 if (count < 2) symbols[count] = i; 667 ++count; 668 } 669 } 670 671 if (count == 0) { // emit minimal tree for empty cases 672 // bits: small tree marker: 1, count-1: 0, large 8-bit code: 0, code: 0 673 VP8LPutBits(bw, 0x01, 4); 674 } else if (count <= 2 && symbols[0] < kMaxSymbol && symbols[1] < kMaxSymbol) { 675 VP8LPutBits(bw, 1, 1); // Small tree marker to encode 1 or 2 symbols. 676 VP8LPutBits(bw, count - 1, 1); 677 if (symbols[0] <= 1) { 678 VP8LPutBits(bw, 0, 1); // Code bit for small (1 bit) symbol value. 679 VP8LPutBits(bw, symbols[0], 1); 680 } else { 681 VP8LPutBits(bw, 1, 1); 682 VP8LPutBits(bw, symbols[0], 8); 683 } 684 if (count == 2) { 685 VP8LPutBits(bw, symbols[1], 8); 686 } 687 } else { 688 StoreFullHuffmanCode(bw, huff_tree, tokens, huffman_code); 689 } 690 } 691 692 static WEBP_INLINE void WriteHuffmanCode(VP8LBitWriter* const bw, 693 const HuffmanTreeCode* const code, 694 int code_index) { 695 const int depth = code->code_lengths[code_index]; 696 const int symbol = code->codes[code_index]; 697 VP8LPutBits(bw, symbol, depth); 698 } 699 700 static WEBP_INLINE void WriteHuffmanCodeWithExtraBits( 701 VP8LBitWriter* const bw, 702 const HuffmanTreeCode* const code, 703 int code_index, 704 int bits, 705 int n_bits) { 706 const int depth = code->code_lengths[code_index]; 707 const int symbol = code->codes[code_index]; 708 VP8LPutBits(bw, (bits << depth) | symbol, depth + n_bits); 709 } 710 711 static WebPEncodingError StoreImageToBitMask( 712 VP8LBitWriter* const bw, int width, int histo_bits, 713 const VP8LBackwardRefs* const refs, 714 const uint16_t* histogram_symbols, 715 const HuffmanTreeCode* const huffman_codes) { 716 const int histo_xsize = histo_bits ? VP8LSubSampleSize(width, histo_bits) : 1; 717 const int tile_mask = (histo_bits == 0) ? 0 : -(1 << histo_bits); 718 // x and y trace the position in the image. 719 int x = 0; 720 int y = 0; 721 int tile_x = x & tile_mask; 722 int tile_y = y & tile_mask; 723 int histogram_ix = histogram_symbols[0]; 724 const HuffmanTreeCode* codes = huffman_codes + 5 * histogram_ix; 725 VP8LRefsCursor c = VP8LRefsCursorInit(refs); 726 while (VP8LRefsCursorOk(&c)) { 727 const PixOrCopy* const v = c.cur_pos; 728 if ((tile_x != (x & tile_mask)) || (tile_y != (y & tile_mask))) { 729 tile_x = x & tile_mask; 730 tile_y = y & tile_mask; 731 histogram_ix = histogram_symbols[(y >> histo_bits) * histo_xsize + 732 (x >> histo_bits)]; 733 codes = huffman_codes + 5 * histogram_ix; 734 } 735 if (PixOrCopyIsLiteral(v)) { 736 static const uint8_t order[] = { 1, 2, 0, 3 }; 737 int k; 738 for (k = 0; k < 4; ++k) { 739 const int code = PixOrCopyLiteral(v, order[k]); 740 WriteHuffmanCode(bw, codes + k, code); 741 } 742 } else if (PixOrCopyIsCacheIdx(v)) { 743 const int code = PixOrCopyCacheIdx(v); 744 const int literal_ix = 256 + NUM_LENGTH_CODES + code; 745 WriteHuffmanCode(bw, codes, literal_ix); 746 } else { 747 int bits, n_bits; 748 int code; 749 750 const int distance = PixOrCopyDistance(v); 751 VP8LPrefixEncode(v->len, &code, &n_bits, &bits); 752 WriteHuffmanCodeWithExtraBits(bw, codes, 256 + code, bits, n_bits); 753 754 // Don't write the distance with the extra bits code since 755 // the distance can be up to 18 bits of extra bits, and the prefix 756 // 15 bits, totaling to 33, and our PutBits only supports up to 32 bits. 757 VP8LPrefixEncode(distance, &code, &n_bits, &bits); 758 WriteHuffmanCode(bw, codes + 4, code); 759 VP8LPutBits(bw, bits, n_bits); 760 } 761 x += PixOrCopyLength(v); 762 while (x >= width) { 763 x -= width; 764 ++y; 765 } 766 VP8LRefsCursorNext(&c); 767 } 768 return bw->error_ ? VP8_ENC_ERROR_OUT_OF_MEMORY : VP8_ENC_OK; 769 } 770 771 // Special case of EncodeImageInternal() for cache-bits=0, histo_bits=31 772 static WebPEncodingError EncodeImageNoHuffman(VP8LBitWriter* const bw, 773 const uint32_t* const argb, 774 VP8LHashChain* const hash_chain, 775 VP8LBackwardRefs* const refs_tmp1, 776 VP8LBackwardRefs* const refs_tmp2, 777 int width, int height, 778 int quality, int low_effort) { 779 int i; 780 int max_tokens = 0; 781 WebPEncodingError err = VP8_ENC_OK; 782 VP8LBackwardRefs* refs; 783 HuffmanTreeToken* tokens = NULL; 784 HuffmanTreeCode huffman_codes[5] = { { 0, NULL, NULL } }; 785 const uint16_t histogram_symbols[1] = { 0 }; // only one tree, one symbol 786 int cache_bits = 0; 787 VP8LHistogramSet* histogram_image = NULL; 788 HuffmanTree* const huff_tree = (HuffmanTree*)WebPSafeMalloc( 789 3ULL * CODE_LENGTH_CODES, sizeof(*huff_tree)); 790 if (huff_tree == NULL) { 791 err = VP8_ENC_ERROR_OUT_OF_MEMORY; 792 goto Error; 793 } 794 795 // Calculate backward references from ARGB image. 796 if (!VP8LHashChainFill(hash_chain, quality, argb, width, height, 797 low_effort)) { 798 err = VP8_ENC_ERROR_OUT_OF_MEMORY; 799 goto Error; 800 } 801 refs = VP8LGetBackwardReferences(width, height, argb, quality, 0, 802 kLZ77Standard | kLZ77RLE, &cache_bits, 803 hash_chain, refs_tmp1, refs_tmp2); 804 if (refs == NULL) { 805 err = VP8_ENC_ERROR_OUT_OF_MEMORY; 806 goto Error; 807 } 808 histogram_image = VP8LAllocateHistogramSet(1, cache_bits); 809 if (histogram_image == NULL) { 810 err = VP8_ENC_ERROR_OUT_OF_MEMORY; 811 goto Error; 812 } 813 VP8LHistogramSetClear(histogram_image); 814 815 // Build histogram image and symbols from backward references. 816 VP8LHistogramStoreRefs(refs, histogram_image->histograms[0]); 817 818 // Create Huffman bit lengths and codes for each histogram image. 819 assert(histogram_image->size == 1); 820 if (!GetHuffBitLengthsAndCodes(histogram_image, huffman_codes)) { 821 err = VP8_ENC_ERROR_OUT_OF_MEMORY; 822 goto Error; 823 } 824 825 // No color cache, no Huffman image. 826 VP8LPutBits(bw, 0, 1); 827 828 // Find maximum number of symbols for the huffman tree-set. 829 for (i = 0; i < 5; ++i) { 830 HuffmanTreeCode* const codes = &huffman_codes[i]; 831 if (max_tokens < codes->num_symbols) { 832 max_tokens = codes->num_symbols; 833 } 834 } 835 836 tokens = (HuffmanTreeToken*)WebPSafeMalloc(max_tokens, sizeof(*tokens)); 837 if (tokens == NULL) { 838 err = VP8_ENC_ERROR_OUT_OF_MEMORY; 839 goto Error; 840 } 841 842 // Store Huffman codes. 843 for (i = 0; i < 5; ++i) { 844 HuffmanTreeCode* const codes = &huffman_codes[i]; 845 StoreHuffmanCode(bw, huff_tree, tokens, codes); 846 ClearHuffmanTreeIfOnlyOneSymbol(codes); 847 } 848 849 // Store actual literals. 850 err = StoreImageToBitMask(bw, width, 0, refs, histogram_symbols, 851 huffman_codes); 852 853 Error: 854 WebPSafeFree(tokens); 855 WebPSafeFree(huff_tree); 856 VP8LFreeHistogramSet(histogram_image); 857 WebPSafeFree(huffman_codes[0].codes); 858 return err; 859 } 860 861 static WebPEncodingError EncodeImageInternal( 862 VP8LBitWriter* const bw, const uint32_t* const argb, 863 VP8LHashChain* const hash_chain, VP8LBackwardRefs refs_array[3], int width, 864 int height, int quality, int low_effort, int use_cache, 865 const CrunchConfig* const config, int* cache_bits, int histogram_bits, 866 size_t init_byte_position, int* const hdr_size, int* const data_size) { 867 WebPEncodingError err = VP8_ENC_OK; 868 const uint32_t histogram_image_xysize = 869 VP8LSubSampleSize(width, histogram_bits) * 870 VP8LSubSampleSize(height, histogram_bits); 871 VP8LHistogramSet* histogram_image = NULL; 872 VP8LHistogram* tmp_histo = NULL; 873 int histogram_image_size = 0; 874 size_t bit_array_size = 0; 875 HuffmanTree* const huff_tree = (HuffmanTree*)WebPSafeMalloc( 876 3ULL * CODE_LENGTH_CODES, sizeof(*huff_tree)); 877 HuffmanTreeToken* tokens = NULL; 878 HuffmanTreeCode* huffman_codes = NULL; 879 VP8LBackwardRefs* refs_best; 880 VP8LBackwardRefs* refs_tmp; 881 uint16_t* const histogram_symbols = 882 (uint16_t*)WebPSafeMalloc(histogram_image_xysize, 883 sizeof(*histogram_symbols)); 884 int lz77s_idx; 885 VP8LBitWriter bw_init = *bw, bw_best; 886 int hdr_size_tmp; 887 assert(histogram_bits >= MIN_HUFFMAN_BITS); 888 assert(histogram_bits <= MAX_HUFFMAN_BITS); 889 assert(hdr_size != NULL); 890 assert(data_size != NULL); 891 892 if (histogram_symbols == NULL) { 893 err = VP8_ENC_ERROR_OUT_OF_MEMORY; 894 goto Error; 895 } 896 897 if (use_cache) { 898 // If the value is different from zero, it has been set during the 899 // palette analysis. 900 if (*cache_bits == 0) *cache_bits = MAX_COLOR_CACHE_BITS; 901 } else { 902 *cache_bits = 0; 903 } 904 // 'best_refs' is the reference to the best backward refs and points to one 905 // of refs_array[0] or refs_array[1]. 906 // Calculate backward references from ARGB image. 907 if (huff_tree == NULL || 908 !VP8LHashChainFill(hash_chain, quality, argb, width, height, 909 low_effort) || 910 !VP8LBitWriterInit(&bw_best, 0) || 911 (config->lz77s_types_to_try_size_ > 1 && 912 !VP8LBitWriterClone(bw, &bw_best))) { 913 err = VP8_ENC_ERROR_OUT_OF_MEMORY; 914 goto Error; 915 } 916 for (lz77s_idx = 0; lz77s_idx < config->lz77s_types_to_try_size_; 917 ++lz77s_idx) { 918 refs_best = VP8LGetBackwardReferences( 919 width, height, argb, quality, low_effort, 920 config->lz77s_types_to_try_[lz77s_idx], cache_bits, hash_chain, 921 &refs_array[0], &refs_array[1]); 922 if (refs_best == NULL) { 923 err = VP8_ENC_ERROR_OUT_OF_MEMORY; 924 goto Error; 925 } 926 // Keep the best references aside and use the other element from the first 927 // two as a temporary for later usage. 928 refs_tmp = &refs_array[refs_best == &refs_array[0] ? 1 : 0]; 929 930 histogram_image = 931 VP8LAllocateHistogramSet(histogram_image_xysize, *cache_bits); 932 tmp_histo = VP8LAllocateHistogram(*cache_bits); 933 if (histogram_image == NULL || tmp_histo == NULL) { 934 err = VP8_ENC_ERROR_OUT_OF_MEMORY; 935 goto Error; 936 } 937 938 // Build histogram image and symbols from backward references. 939 if (!VP8LGetHistoImageSymbols(width, height, refs_best, quality, low_effort, 940 histogram_bits, *cache_bits, histogram_image, 941 tmp_histo, histogram_symbols)) { 942 err = VP8_ENC_ERROR_OUT_OF_MEMORY; 943 goto Error; 944 } 945 // Create Huffman bit lengths and codes for each histogram image. 946 histogram_image_size = histogram_image->size; 947 bit_array_size = 5 * histogram_image_size; 948 huffman_codes = (HuffmanTreeCode*)WebPSafeCalloc(bit_array_size, 949 sizeof(*huffman_codes)); 950 // Note: some histogram_image entries may point to tmp_histos[], so the 951 // latter need to outlive the following call to GetHuffBitLengthsAndCodes(). 952 if (huffman_codes == NULL || 953 !GetHuffBitLengthsAndCodes(histogram_image, huffman_codes)) { 954 err = VP8_ENC_ERROR_OUT_OF_MEMORY; 955 goto Error; 956 } 957 // Free combined histograms. 958 VP8LFreeHistogramSet(histogram_image); 959 histogram_image = NULL; 960 961 // Free scratch histograms. 962 VP8LFreeHistogram(tmp_histo); 963 tmp_histo = NULL; 964 965 // Color Cache parameters. 966 if (*cache_bits > 0) { 967 VP8LPutBits(bw, 1, 1); 968 VP8LPutBits(bw, *cache_bits, 4); 969 } else { 970 VP8LPutBits(bw, 0, 1); 971 } 972 973 // Huffman image + meta huffman. 974 { 975 const int write_histogram_image = (histogram_image_size > 1); 976 VP8LPutBits(bw, write_histogram_image, 1); 977 if (write_histogram_image) { 978 uint32_t* const histogram_argb = 979 (uint32_t*)WebPSafeMalloc(histogram_image_xysize, 980 sizeof(*histogram_argb)); 981 int max_index = 0; 982 uint32_t i; 983 if (histogram_argb == NULL) { 984 err = VP8_ENC_ERROR_OUT_OF_MEMORY; 985 goto Error; 986 } 987 for (i = 0; i < histogram_image_xysize; ++i) { 988 const int symbol_index = histogram_symbols[i] & 0xffff; 989 histogram_argb[i] = (symbol_index << 8); 990 if (symbol_index >= max_index) { 991 max_index = symbol_index + 1; 992 } 993 } 994 histogram_image_size = max_index; 995 996 VP8LPutBits(bw, histogram_bits - 2, 3); 997 err = EncodeImageNoHuffman( 998 bw, histogram_argb, hash_chain, refs_tmp, &refs_array[2], 999 VP8LSubSampleSize(width, histogram_bits), 1000 VP8LSubSampleSize(height, histogram_bits), quality, low_effort); 1001 WebPSafeFree(histogram_argb); 1002 if (err != VP8_ENC_OK) goto Error; 1003 } 1004 } 1005 1006 // Store Huffman codes. 1007 { 1008 int i; 1009 int max_tokens = 0; 1010 // Find maximum number of symbols for the huffman tree-set. 1011 for (i = 0; i < 5 * histogram_image_size; ++i) { 1012 HuffmanTreeCode* const codes = &huffman_codes[i]; 1013 if (max_tokens < codes->num_symbols) { 1014 max_tokens = codes->num_symbols; 1015 } 1016 } 1017 tokens = (HuffmanTreeToken*)WebPSafeMalloc(max_tokens, sizeof(*tokens)); 1018 if (tokens == NULL) { 1019 err = VP8_ENC_ERROR_OUT_OF_MEMORY; 1020 goto Error; 1021 } 1022 for (i = 0; i < 5 * histogram_image_size; ++i) { 1023 HuffmanTreeCode* const codes = &huffman_codes[i]; 1024 StoreHuffmanCode(bw, huff_tree, tokens, codes); 1025 ClearHuffmanTreeIfOnlyOneSymbol(codes); 1026 } 1027 } 1028 // Store actual literals. 1029 hdr_size_tmp = (int)(VP8LBitWriterNumBytes(bw) - init_byte_position); 1030 err = StoreImageToBitMask(bw, width, histogram_bits, refs_best, 1031 histogram_symbols, huffman_codes); 1032 // Keep track of the smallest image so far. 1033 if (lz77s_idx == 0 || 1034 VP8LBitWriterNumBytes(bw) < VP8LBitWriterNumBytes(&bw_best)) { 1035 *hdr_size = hdr_size_tmp; 1036 *data_size = 1037 (int)(VP8LBitWriterNumBytes(bw) - init_byte_position - *hdr_size); 1038 VP8LBitWriterSwap(bw, &bw_best); 1039 } 1040 // Reset the bit writer for the following iteration if any. 1041 if (config->lz77s_types_to_try_size_ > 1) VP8LBitWriterReset(&bw_init, bw); 1042 WebPSafeFree(tokens); 1043 tokens = NULL; 1044 if (huffman_codes != NULL) { 1045 WebPSafeFree(huffman_codes->codes); 1046 WebPSafeFree(huffman_codes); 1047 huffman_codes = NULL; 1048 } 1049 } 1050 VP8LBitWriterSwap(bw, &bw_best); 1051 1052 Error: 1053 WebPSafeFree(tokens); 1054 WebPSafeFree(huff_tree); 1055 VP8LFreeHistogramSet(histogram_image); 1056 VP8LFreeHistogram(tmp_histo); 1057 if (huffman_codes != NULL) { 1058 WebPSafeFree(huffman_codes->codes); 1059 WebPSafeFree(huffman_codes); 1060 } 1061 WebPSafeFree(histogram_symbols); 1062 VP8LBitWriterWipeOut(&bw_best); 1063 return err; 1064 } 1065 1066 // ----------------------------------------------------------------------------- 1067 // Transforms 1068 1069 static void ApplySubtractGreen(VP8LEncoder* const enc, int width, int height, 1070 VP8LBitWriter* const bw) { 1071 VP8LPutBits(bw, TRANSFORM_PRESENT, 1); 1072 VP8LPutBits(bw, SUBTRACT_GREEN, 2); 1073 VP8LSubtractGreenFromBlueAndRed(enc->argb_, width * height); 1074 } 1075 1076 static WebPEncodingError ApplyPredictFilter(const VP8LEncoder* const enc, 1077 int width, int height, 1078 int quality, int low_effort, 1079 int used_subtract_green, 1080 VP8LBitWriter* const bw) { 1081 const int pred_bits = enc->transform_bits_; 1082 const int transform_width = VP8LSubSampleSize(width, pred_bits); 1083 const int transform_height = VP8LSubSampleSize(height, pred_bits); 1084 // we disable near-lossless quantization if palette is used. 1085 const int near_lossless_strength = enc->use_palette_ ? 100 1086 : enc->config_->near_lossless; 1087 1088 VP8LResidualImage(width, height, pred_bits, low_effort, enc->argb_, 1089 enc->argb_scratch_, enc->transform_data_, 1090 near_lossless_strength, enc->config_->exact, 1091 used_subtract_green); 1092 VP8LPutBits(bw, TRANSFORM_PRESENT, 1); 1093 VP8LPutBits(bw, PREDICTOR_TRANSFORM, 2); 1094 assert(pred_bits >= 2); 1095 VP8LPutBits(bw, pred_bits - 2, 3); 1096 return EncodeImageNoHuffman( 1097 bw, enc->transform_data_, (VP8LHashChain*)&enc->hash_chain_, 1098 (VP8LBackwardRefs*)&enc->refs_[0], // cast const away 1099 (VP8LBackwardRefs*)&enc->refs_[1], transform_width, transform_height, 1100 quality, low_effort); 1101 } 1102 1103 static WebPEncodingError ApplyCrossColorFilter(const VP8LEncoder* const enc, 1104 int width, int height, 1105 int quality, int low_effort, 1106 VP8LBitWriter* const bw) { 1107 const int ccolor_transform_bits = enc->transform_bits_; 1108 const int transform_width = VP8LSubSampleSize(width, ccolor_transform_bits); 1109 const int transform_height = VP8LSubSampleSize(height, ccolor_transform_bits); 1110 1111 VP8LColorSpaceTransform(width, height, ccolor_transform_bits, quality, 1112 enc->argb_, enc->transform_data_); 1113 VP8LPutBits(bw, TRANSFORM_PRESENT, 1); 1114 VP8LPutBits(bw, CROSS_COLOR_TRANSFORM, 2); 1115 assert(ccolor_transform_bits >= 2); 1116 VP8LPutBits(bw, ccolor_transform_bits - 2, 3); 1117 return EncodeImageNoHuffman( 1118 bw, enc->transform_data_, (VP8LHashChain*)&enc->hash_chain_, 1119 (VP8LBackwardRefs*)&enc->refs_[0], // cast const away 1120 (VP8LBackwardRefs*)&enc->refs_[1], transform_width, transform_height, 1121 quality, low_effort); 1122 } 1123 1124 // ----------------------------------------------------------------------------- 1125 1126 static WebPEncodingError WriteRiffHeader(const WebPPicture* const pic, 1127 size_t riff_size, size_t vp8l_size) { 1128 uint8_t riff[RIFF_HEADER_SIZE + CHUNK_HEADER_SIZE + VP8L_SIGNATURE_SIZE] = { 1129 'R', 'I', 'F', 'F', 0, 0, 0, 0, 'W', 'E', 'B', 'P', 1130 'V', 'P', '8', 'L', 0, 0, 0, 0, VP8L_MAGIC_BYTE, 1131 }; 1132 PutLE32(riff + TAG_SIZE, (uint32_t)riff_size); 1133 PutLE32(riff + RIFF_HEADER_SIZE + TAG_SIZE, (uint32_t)vp8l_size); 1134 if (!pic->writer(riff, sizeof(riff), pic)) { 1135 return VP8_ENC_ERROR_BAD_WRITE; 1136 } 1137 return VP8_ENC_OK; 1138 } 1139 1140 static int WriteImageSize(const WebPPicture* const pic, 1141 VP8LBitWriter* const bw) { 1142 const int width = pic->width - 1; 1143 const int height = pic->height - 1; 1144 assert(width < WEBP_MAX_DIMENSION && height < WEBP_MAX_DIMENSION); 1145 1146 VP8LPutBits(bw, width, VP8L_IMAGE_SIZE_BITS); 1147 VP8LPutBits(bw, height, VP8L_IMAGE_SIZE_BITS); 1148 return !bw->error_; 1149 } 1150 1151 static int WriteRealAlphaAndVersion(VP8LBitWriter* const bw, int has_alpha) { 1152 VP8LPutBits(bw, has_alpha, 1); 1153 VP8LPutBits(bw, VP8L_VERSION, VP8L_VERSION_BITS); 1154 return !bw->error_; 1155 } 1156 1157 static WebPEncodingError WriteImage(const WebPPicture* const pic, 1158 VP8LBitWriter* const bw, 1159 size_t* const coded_size) { 1160 WebPEncodingError err = VP8_ENC_OK; 1161 const uint8_t* const webpll_data = VP8LBitWriterFinish(bw); 1162 const size_t webpll_size = VP8LBitWriterNumBytes(bw); 1163 const size_t vp8l_size = VP8L_SIGNATURE_SIZE + webpll_size; 1164 const size_t pad = vp8l_size & 1; 1165 const size_t riff_size = TAG_SIZE + CHUNK_HEADER_SIZE + vp8l_size + pad; 1166 1167 err = WriteRiffHeader(pic, riff_size, vp8l_size); 1168 if (err != VP8_ENC_OK) goto Error; 1169 1170 if (!pic->writer(webpll_data, webpll_size, pic)) { 1171 err = VP8_ENC_ERROR_BAD_WRITE; 1172 goto Error; 1173 } 1174 1175 if (pad) { 1176 const uint8_t pad_byte[1] = { 0 }; 1177 if (!pic->writer(pad_byte, 1, pic)) { 1178 err = VP8_ENC_ERROR_BAD_WRITE; 1179 goto Error; 1180 } 1181 } 1182 *coded_size = CHUNK_HEADER_SIZE + riff_size; 1183 return VP8_ENC_OK; 1184 1185 Error: 1186 return err; 1187 } 1188 1189 // ----------------------------------------------------------------------------- 1190 1191 static void ClearTransformBuffer(VP8LEncoder* const enc) { 1192 WebPSafeFree(enc->transform_mem_); 1193 enc->transform_mem_ = NULL; 1194 enc->transform_mem_size_ = 0; 1195 } 1196 1197 // Allocates the memory for argb (W x H) buffer, 2 rows of context for 1198 // prediction and transform data. 1199 // Flags influencing the memory allocated: 1200 // enc->transform_bits_ 1201 // enc->use_predict_, enc->use_cross_color_ 1202 static WebPEncodingError AllocateTransformBuffer(VP8LEncoder* const enc, 1203 int width, int height) { 1204 WebPEncodingError err = VP8_ENC_OK; 1205 const uint64_t image_size = width * height; 1206 // VP8LResidualImage needs room for 2 scanlines of uint32 pixels with an extra 1207 // pixel in each, plus 2 regular scanlines of bytes. 1208 // TODO(skal): Clean up by using arithmetic in bytes instead of words. 1209 const uint64_t argb_scratch_size = 1210 enc->use_predict_ 1211 ? (width + 1) * 2 + 1212 (width * 2 + sizeof(uint32_t) - 1) / sizeof(uint32_t) 1213 : 0; 1214 const uint64_t transform_data_size = 1215 (enc->use_predict_ || enc->use_cross_color_) 1216 ? VP8LSubSampleSize(width, enc->transform_bits_) * 1217 VP8LSubSampleSize(height, enc->transform_bits_) 1218 : 0; 1219 const uint64_t max_alignment_in_words = 1220 (WEBP_ALIGN_CST + sizeof(uint32_t) - 1) / sizeof(uint32_t); 1221 const uint64_t mem_size = 1222 image_size + max_alignment_in_words + 1223 argb_scratch_size + max_alignment_in_words + 1224 transform_data_size; 1225 uint32_t* mem = enc->transform_mem_; 1226 if (mem == NULL || mem_size > enc->transform_mem_size_) { 1227 ClearTransformBuffer(enc); 1228 mem = (uint32_t*)WebPSafeMalloc(mem_size, sizeof(*mem)); 1229 if (mem == NULL) { 1230 err = VP8_ENC_ERROR_OUT_OF_MEMORY; 1231 goto Error; 1232 } 1233 enc->transform_mem_ = mem; 1234 enc->transform_mem_size_ = (size_t)mem_size; 1235 enc->argb_content_ = kEncoderNone; 1236 } 1237 enc->argb_ = mem; 1238 mem = (uint32_t*)WEBP_ALIGN(mem + image_size); 1239 enc->argb_scratch_ = mem; 1240 mem = (uint32_t*)WEBP_ALIGN(mem + argb_scratch_size); 1241 enc->transform_data_ = mem; 1242 1243 enc->current_width_ = width; 1244 Error: 1245 return err; 1246 } 1247 1248 static WebPEncodingError MakeInputImageCopy(VP8LEncoder* const enc) { 1249 WebPEncodingError err = VP8_ENC_OK; 1250 const WebPPicture* const picture = enc->pic_; 1251 const int width = picture->width; 1252 const int height = picture->height; 1253 1254 err = AllocateTransformBuffer(enc, width, height); 1255 if (err != VP8_ENC_OK) return err; 1256 if (enc->argb_content_ == kEncoderARGB) return VP8_ENC_OK; 1257 1258 { 1259 uint32_t* dst = enc->argb_; 1260 const uint32_t* src = picture->argb; 1261 int y; 1262 for (y = 0; y < height; ++y) { 1263 memcpy(dst, src, width * sizeof(*dst)); 1264 dst += width; 1265 src += picture->argb_stride; 1266 } 1267 } 1268 enc->argb_content_ = kEncoderARGB; 1269 assert(enc->current_width_ == width); 1270 return VP8_ENC_OK; 1271 } 1272 1273 // ----------------------------------------------------------------------------- 1274 1275 static WEBP_INLINE int SearchColorNoIdx(const uint32_t sorted[], uint32_t color, 1276 int hi) { 1277 int low = 0; 1278 if (sorted[low] == color) return low; // loop invariant: sorted[low] != color 1279 while (1) { 1280 const int mid = (low + hi) >> 1; 1281 if (sorted[mid] == color) { 1282 return mid; 1283 } else if (sorted[mid] < color) { 1284 low = mid; 1285 } else { 1286 hi = mid; 1287 } 1288 } 1289 } 1290 1291 #define APPLY_PALETTE_GREEDY_MAX 4 1292 1293 static WEBP_INLINE uint32_t SearchColorGreedy(const uint32_t palette[], 1294 int palette_size, 1295 uint32_t color) { 1296 (void)palette_size; 1297 assert(palette_size < APPLY_PALETTE_GREEDY_MAX); 1298 assert(3 == APPLY_PALETTE_GREEDY_MAX - 1); 1299 if (color == palette[0]) return 0; 1300 if (color == palette[1]) return 1; 1301 if (color == palette[2]) return 2; 1302 return 3; 1303 } 1304 1305 static WEBP_INLINE uint32_t ApplyPaletteHash0(uint32_t color) { 1306 // Focus on the green color. 1307 return (color >> 8) & 0xff; 1308 } 1309 1310 #define PALETTE_INV_SIZE_BITS 11 1311 #define PALETTE_INV_SIZE (1 << PALETTE_INV_SIZE_BITS) 1312 1313 static WEBP_INLINE uint32_t ApplyPaletteHash1(uint32_t color) { 1314 // Forget about alpha. 1315 return ((uint32_t)((color & 0x00ffffffu) * 4222244071ull)) >> 1316 (32 - PALETTE_INV_SIZE_BITS); 1317 } 1318 1319 static WEBP_INLINE uint32_t ApplyPaletteHash2(uint32_t color) { 1320 // Forget about alpha. 1321 return ((uint32_t)((color & 0x00ffffffu) * ((1ull << 31) - 1))) >> 1322 (32 - PALETTE_INV_SIZE_BITS); 1323 } 1324 1325 // Sort palette in increasing order and prepare an inverse mapping array. 1326 static void PrepareMapToPalette(const uint32_t palette[], int num_colors, 1327 uint32_t sorted[], uint32_t idx_map[]) { 1328 int i; 1329 memcpy(sorted, palette, num_colors * sizeof(*sorted)); 1330 qsort(sorted, num_colors, sizeof(*sorted), PaletteCompareColorsForQsort); 1331 for (i = 0; i < num_colors; ++i) { 1332 idx_map[SearchColorNoIdx(sorted, palette[i], num_colors)] = i; 1333 } 1334 } 1335 1336 // Use 1 pixel cache for ARGB pixels. 1337 #define APPLY_PALETTE_FOR(COLOR_INDEX) do { \ 1338 uint32_t prev_pix = palette[0]; \ 1339 uint32_t prev_idx = 0; \ 1340 for (y = 0; y < height; ++y) { \ 1341 for (x = 0; x < width; ++x) { \ 1342 const uint32_t pix = src[x]; \ 1343 if (pix != prev_pix) { \ 1344 prev_idx = COLOR_INDEX; \ 1345 prev_pix = pix; \ 1346 } \ 1347 tmp_row[x] = prev_idx; \ 1348 } \ 1349 VP8LBundleColorMap(tmp_row, width, xbits, dst); \ 1350 src += src_stride; \ 1351 dst += dst_stride; \ 1352 } \ 1353 } while (0) 1354 1355 // Remap argb values in src[] to packed palettes entries in dst[] 1356 // using 'row' as a temporary buffer of size 'width'. 1357 // We assume that all src[] values have a corresponding entry in the palette. 1358 // Note: src[] can be the same as dst[] 1359 static WebPEncodingError ApplyPalette(const uint32_t* src, uint32_t src_stride, 1360 uint32_t* dst, uint32_t dst_stride, 1361 const uint32_t* palette, int palette_size, 1362 int width, int height, int xbits) { 1363 // TODO(skal): this tmp buffer is not needed if VP8LBundleColorMap() can be 1364 // made to work in-place. 1365 uint8_t* const tmp_row = (uint8_t*)WebPSafeMalloc(width, sizeof(*tmp_row)); 1366 int x, y; 1367 1368 if (tmp_row == NULL) return VP8_ENC_ERROR_OUT_OF_MEMORY; 1369 1370 if (palette_size < APPLY_PALETTE_GREEDY_MAX) { 1371 APPLY_PALETTE_FOR(SearchColorGreedy(palette, palette_size, pix)); 1372 } else { 1373 int i, j; 1374 uint16_t buffer[PALETTE_INV_SIZE]; 1375 uint32_t (*const hash_functions[])(uint32_t) = { 1376 ApplyPaletteHash0, ApplyPaletteHash1, ApplyPaletteHash2 1377 }; 1378 1379 // Try to find a perfect hash function able to go from a color to an index 1380 // within 1 << PALETTE_INV_SIZE_BITS in order to build a hash map to go 1381 // from color to index in palette. 1382 for (i = 0; i < 3; ++i) { 1383 int use_LUT = 1; 1384 // Set each element in buffer to max uint16_t. 1385 memset(buffer, 0xff, sizeof(buffer)); 1386 for (j = 0; j < palette_size; ++j) { 1387 const uint32_t ind = hash_functions[i](palette[j]); 1388 if (buffer[ind] != 0xffffu) { 1389 use_LUT = 0; 1390 break; 1391 } else { 1392 buffer[ind] = j; 1393 } 1394 } 1395 if (use_LUT) break; 1396 } 1397 1398 if (i == 0) { 1399 APPLY_PALETTE_FOR(buffer[ApplyPaletteHash0(pix)]); 1400 } else if (i == 1) { 1401 APPLY_PALETTE_FOR(buffer[ApplyPaletteHash1(pix)]); 1402 } else if (i == 2) { 1403 APPLY_PALETTE_FOR(buffer[ApplyPaletteHash2(pix)]); 1404 } else { 1405 uint32_t idx_map[MAX_PALETTE_SIZE]; 1406 uint32_t palette_sorted[MAX_PALETTE_SIZE]; 1407 PrepareMapToPalette(palette, palette_size, palette_sorted, idx_map); 1408 APPLY_PALETTE_FOR( 1409 idx_map[SearchColorNoIdx(palette_sorted, pix, palette_size)]); 1410 } 1411 } 1412 WebPSafeFree(tmp_row); 1413 return VP8_ENC_OK; 1414 } 1415 #undef APPLY_PALETTE_FOR 1416 #undef PALETTE_INV_SIZE_BITS 1417 #undef PALETTE_INV_SIZE 1418 #undef APPLY_PALETTE_GREEDY_MAX 1419 1420 // Note: Expects "enc->palette_" to be set properly. 1421 static WebPEncodingError MapImageFromPalette(VP8LEncoder* const enc, 1422 int in_place) { 1423 WebPEncodingError err = VP8_ENC_OK; 1424 const WebPPicture* const pic = enc->pic_; 1425 const int width = pic->width; 1426 const int height = pic->height; 1427 const uint32_t* const palette = enc->palette_; 1428 const uint32_t* src = in_place ? enc->argb_ : pic->argb; 1429 const int src_stride = in_place ? enc->current_width_ : pic->argb_stride; 1430 const int palette_size = enc->palette_size_; 1431 int xbits; 1432 1433 // Replace each input pixel by corresponding palette index. 1434 // This is done line by line. 1435 if (palette_size <= 4) { 1436 xbits = (palette_size <= 2) ? 3 : 2; 1437 } else { 1438 xbits = (palette_size <= 16) ? 1 : 0; 1439 } 1440 1441 err = AllocateTransformBuffer(enc, VP8LSubSampleSize(width, xbits), height); 1442 if (err != VP8_ENC_OK) return err; 1443 1444 err = ApplyPalette(src, src_stride, 1445 enc->argb_, enc->current_width_, 1446 palette, palette_size, width, height, xbits); 1447 enc->argb_content_ = kEncoderPalette; 1448 return err; 1449 } 1450 1451 // Save palette_[] to bitstream. 1452 static WebPEncodingError EncodePalette(VP8LBitWriter* const bw, int low_effort, 1453 VP8LEncoder* const enc) { 1454 int i; 1455 uint32_t tmp_palette[MAX_PALETTE_SIZE]; 1456 const int palette_size = enc->palette_size_; 1457 const uint32_t* const palette = enc->palette_; 1458 VP8LPutBits(bw, TRANSFORM_PRESENT, 1); 1459 VP8LPutBits(bw, COLOR_INDEXING_TRANSFORM, 2); 1460 assert(palette_size >= 1 && palette_size <= MAX_PALETTE_SIZE); 1461 VP8LPutBits(bw, palette_size - 1, 8); 1462 for (i = palette_size - 1; i >= 1; --i) { 1463 tmp_palette[i] = VP8LSubPixels(palette[i], palette[i - 1]); 1464 } 1465 tmp_palette[0] = palette[0]; 1466 return EncodeImageNoHuffman(bw, tmp_palette, &enc->hash_chain_, 1467 &enc->refs_[0], &enc->refs_[1], palette_size, 1, 1468 20 /* quality */, low_effort); 1469 } 1470 1471 // ----------------------------------------------------------------------------- 1472 // VP8LEncoder 1473 1474 static VP8LEncoder* VP8LEncoderNew(const WebPConfig* const config, 1475 const WebPPicture* const picture) { 1476 VP8LEncoder* const enc = (VP8LEncoder*)WebPSafeCalloc(1ULL, sizeof(*enc)); 1477 if (enc == NULL) { 1478 WebPEncodingSetError(picture, VP8_ENC_ERROR_OUT_OF_MEMORY); 1479 return NULL; 1480 } 1481 enc->config_ = config; 1482 enc->pic_ = picture; 1483 enc->argb_content_ = kEncoderNone; 1484 1485 VP8LEncDspInit(); 1486 1487 return enc; 1488 } 1489 1490 static void VP8LEncoderDelete(VP8LEncoder* enc) { 1491 if (enc != NULL) { 1492 int i; 1493 VP8LHashChainClear(&enc->hash_chain_); 1494 for (i = 0; i < 3; ++i) VP8LBackwardRefsClear(&enc->refs_[i]); 1495 ClearTransformBuffer(enc); 1496 WebPSafeFree(enc); 1497 } 1498 } 1499 1500 // ----------------------------------------------------------------------------- 1501 // Main call 1502 1503 typedef struct { 1504 const WebPConfig* config_; 1505 const WebPPicture* picture_; 1506 VP8LBitWriter* bw_; 1507 VP8LEncoder* enc_; 1508 int use_cache_; 1509 CrunchConfig crunch_configs_[CRUNCH_CONFIGS_MAX]; 1510 int num_crunch_configs_; 1511 int red_and_blue_always_zero_; 1512 WebPEncodingError err_; 1513 WebPAuxStats* stats_; 1514 } StreamEncodeContext; 1515 1516 static int EncodeStreamHook(void* input, void* data2) { 1517 StreamEncodeContext* const params = (StreamEncodeContext*)input; 1518 const WebPConfig* const config = params->config_; 1519 const WebPPicture* const picture = params->picture_; 1520 VP8LBitWriter* const bw = params->bw_; 1521 VP8LEncoder* const enc = params->enc_; 1522 const int use_cache = params->use_cache_; 1523 const CrunchConfig* const crunch_configs = params->crunch_configs_; 1524 const int num_crunch_configs = params->num_crunch_configs_; 1525 const int red_and_blue_always_zero = params->red_and_blue_always_zero_; 1526 #if !defined(WEBP_DISABLE_STATS) 1527 WebPAuxStats* const stats = params->stats_; 1528 #endif 1529 WebPEncodingError err = VP8_ENC_OK; 1530 const int quality = (int)config->quality; 1531 const int low_effort = (config->method == 0); 1532 #if (WEBP_NEAR_LOSSLESS == 1) 1533 const int width = picture->width; 1534 #endif 1535 const int height = picture->height; 1536 const size_t byte_position = VP8LBitWriterNumBytes(bw); 1537 #if (WEBP_NEAR_LOSSLESS == 1) 1538 int use_near_lossless = 0; 1539 #endif 1540 int hdr_size = 0; 1541 int data_size = 0; 1542 int use_delta_palette = 0; 1543 int idx; 1544 size_t best_size = 0; 1545 VP8LBitWriter bw_init = *bw, bw_best; 1546 (void)data2; 1547 1548 if (!VP8LBitWriterInit(&bw_best, 0) || 1549 (num_crunch_configs > 1 && !VP8LBitWriterClone(bw, &bw_best))) { 1550 err = VP8_ENC_ERROR_OUT_OF_MEMORY; 1551 goto Error; 1552 } 1553 1554 for (idx = 0; idx < num_crunch_configs; ++idx) { 1555 const int entropy_idx = crunch_configs[idx].entropy_idx_; 1556 enc->use_palette_ = (entropy_idx == kPalette); 1557 enc->use_subtract_green_ = 1558 (entropy_idx == kSubGreen) || (entropy_idx == kSpatialSubGreen); 1559 enc->use_predict_ = 1560 (entropy_idx == kSpatial) || (entropy_idx == kSpatialSubGreen); 1561 if (low_effort) { 1562 enc->use_cross_color_ = 0; 1563 } else { 1564 enc->use_cross_color_ = red_and_blue_always_zero ? 0 : enc->use_predict_; 1565 } 1566 // Reset any parameter in the encoder that is set in the previous iteration. 1567 enc->cache_bits_ = 0; 1568 VP8LBackwardRefsClear(&enc->refs_[0]); 1569 VP8LBackwardRefsClear(&enc->refs_[1]); 1570 1571 #if (WEBP_NEAR_LOSSLESS == 1) 1572 // Apply near-lossless preprocessing. 1573 use_near_lossless = (config->near_lossless < 100) && !enc->use_palette_ && 1574 !enc->use_predict_; 1575 if (use_near_lossless) { 1576 err = AllocateTransformBuffer(enc, width, height); 1577 if (err != VP8_ENC_OK) goto Error; 1578 if ((enc->argb_content_ != kEncoderNearLossless) && 1579 !VP8ApplyNearLossless(picture, config->near_lossless, enc->argb_)) { 1580 err = VP8_ENC_ERROR_OUT_OF_MEMORY; 1581 goto Error; 1582 } 1583 enc->argb_content_ = kEncoderNearLossless; 1584 } else { 1585 enc->argb_content_ = kEncoderNone; 1586 } 1587 #else 1588 enc->argb_content_ = kEncoderNone; 1589 #endif 1590 1591 // Encode palette 1592 if (enc->use_palette_) { 1593 err = EncodePalette(bw, low_effort, enc); 1594 if (err != VP8_ENC_OK) goto Error; 1595 err = MapImageFromPalette(enc, use_delta_palette); 1596 if (err != VP8_ENC_OK) goto Error; 1597 // If using a color cache, do not have it bigger than the number of 1598 // colors. 1599 if (use_cache && enc->palette_size_ < (1 << MAX_COLOR_CACHE_BITS)) { 1600 enc->cache_bits_ = BitsLog2Floor(enc->palette_size_) + 1; 1601 } 1602 } 1603 if (!use_delta_palette) { 1604 // In case image is not packed. 1605 if (enc->argb_content_ != kEncoderNearLossless && 1606 enc->argb_content_ != kEncoderPalette) { 1607 err = MakeInputImageCopy(enc); 1608 if (err != VP8_ENC_OK) goto Error; 1609 } 1610 1611 // ----------------------------------------------------------------------- 1612 // Apply transforms and write transform data. 1613 1614 if (enc->use_subtract_green_) { 1615 ApplySubtractGreen(enc, enc->current_width_, height, bw); 1616 } 1617 1618 if (enc->use_predict_) { 1619 err = ApplyPredictFilter(enc, enc->current_width_, height, quality, 1620 low_effort, enc->use_subtract_green_, bw); 1621 if (err != VP8_ENC_OK) goto Error; 1622 } 1623 1624 if (enc->use_cross_color_) { 1625 err = ApplyCrossColorFilter(enc, enc->current_width_, height, quality, 1626 low_effort, bw); 1627 if (err != VP8_ENC_OK) goto Error; 1628 } 1629 } 1630 1631 VP8LPutBits(bw, !TRANSFORM_PRESENT, 1); // No more transforms. 1632 1633 // ------------------------------------------------------------------------- 1634 // Encode and write the transformed image. 1635 err = EncodeImageInternal(bw, enc->argb_, &enc->hash_chain_, enc->refs_, 1636 enc->current_width_, height, quality, low_effort, 1637 use_cache, &crunch_configs[idx], 1638 &enc->cache_bits_, enc->histo_bits_, 1639 byte_position, &hdr_size, &data_size); 1640 if (err != VP8_ENC_OK) goto Error; 1641 1642 // If we are better than what we already have. 1643 if (idx == 0 || VP8LBitWriterNumBytes(bw) < best_size) { 1644 best_size = VP8LBitWriterNumBytes(bw); 1645 // Store the BitWriter. 1646 VP8LBitWriterSwap(bw, &bw_best); 1647 #if !defined(WEBP_DISABLE_STATS) 1648 // Update the stats. 1649 if (stats != NULL) { 1650 stats->lossless_features = 0; 1651 if (enc->use_predict_) stats->lossless_features |= 1; 1652 if (enc->use_cross_color_) stats->lossless_features |= 2; 1653 if (enc->use_subtract_green_) stats->lossless_features |= 4; 1654 if (enc->use_palette_) stats->lossless_features |= 8; 1655 stats->histogram_bits = enc->histo_bits_; 1656 stats->transform_bits = enc->transform_bits_; 1657 stats->cache_bits = enc->cache_bits_; 1658 stats->palette_size = enc->palette_size_; 1659 stats->lossless_size = (int)(best_size - byte_position); 1660 stats->lossless_hdr_size = hdr_size; 1661 stats->lossless_data_size = data_size; 1662 } 1663 #endif 1664 } 1665 // Reset the bit writer for the following iteration if any. 1666 if (num_crunch_configs > 1) VP8LBitWriterReset(&bw_init, bw); 1667 } 1668 VP8LBitWriterSwap(&bw_best, bw); 1669 1670 Error: 1671 VP8LBitWriterWipeOut(&bw_best); 1672 params->err_ = err; 1673 // The hook should return false in case of error. 1674 return (err == VP8_ENC_OK); 1675 } 1676 1677 WebPEncodingError VP8LEncodeStream(const WebPConfig* const config, 1678 const WebPPicture* const picture, 1679 VP8LBitWriter* const bw_main, 1680 int use_cache) { 1681 WebPEncodingError err = VP8_ENC_OK; 1682 VP8LEncoder* const enc_main = VP8LEncoderNew(config, picture); 1683 VP8LEncoder* enc_side = NULL; 1684 CrunchConfig crunch_configs[CRUNCH_CONFIGS_MAX]; 1685 int num_crunch_configs_main, num_crunch_configs_side = 0; 1686 int idx; 1687 int red_and_blue_always_zero = 0; 1688 WebPWorker worker_main, worker_side; 1689 StreamEncodeContext params_main, params_side; 1690 // The main thread uses picture->stats, the side thread uses stats_side. 1691 WebPAuxStats stats_side; 1692 VP8LBitWriter bw_side; 1693 const WebPWorkerInterface* const worker_interface = WebPGetWorkerInterface(); 1694 int ok_main; 1695 1696 // Analyze image (entropy, num_palettes etc) 1697 if (enc_main == NULL || 1698 !EncoderAnalyze(enc_main, crunch_configs, &num_crunch_configs_main, 1699 &red_and_blue_always_zero) || 1700 !EncoderInit(enc_main) || !VP8LBitWriterInit(&bw_side, 0)) { 1701 err = VP8_ENC_ERROR_OUT_OF_MEMORY; 1702 goto Error; 1703 } 1704 1705 // Split the configs between the main and side threads (if any). 1706 if (config->thread_level > 0) { 1707 num_crunch_configs_side = num_crunch_configs_main / 2; 1708 for (idx = 0; idx < num_crunch_configs_side; ++idx) { 1709 params_side.crunch_configs_[idx] = 1710 crunch_configs[num_crunch_configs_main - num_crunch_configs_side + 1711 idx]; 1712 } 1713 params_side.num_crunch_configs_ = num_crunch_configs_side; 1714 } 1715 num_crunch_configs_main -= num_crunch_configs_side; 1716 for (idx = 0; idx < num_crunch_configs_main; ++idx) { 1717 params_main.crunch_configs_[idx] = crunch_configs[idx]; 1718 } 1719 params_main.num_crunch_configs_ = num_crunch_configs_main; 1720 1721 // Fill in the parameters for the thread workers. 1722 { 1723 const int params_size = (num_crunch_configs_side > 0) ? 2 : 1; 1724 for (idx = 0; idx < params_size; ++idx) { 1725 // Create the parameters for each worker. 1726 WebPWorker* const worker = (idx == 0) ? &worker_main : &worker_side; 1727 StreamEncodeContext* const param = 1728 (idx == 0) ? ¶ms_main : ¶ms_side; 1729 param->config_ = config; 1730 param->picture_ = picture; 1731 param->use_cache_ = use_cache; 1732 param->red_and_blue_always_zero_ = red_and_blue_always_zero; 1733 if (idx == 0) { 1734 param->stats_ = picture->stats; 1735 param->bw_ = bw_main; 1736 param->enc_ = enc_main; 1737 } else { 1738 param->stats_ = (picture->stats == NULL) ? NULL : &stats_side; 1739 // Create a side bit writer. 1740 if (!VP8LBitWriterClone(bw_main, &bw_side)) { 1741 err = VP8_ENC_ERROR_OUT_OF_MEMORY; 1742 goto Error; 1743 } 1744 param->bw_ = &bw_side; 1745 // Create a side encoder. 1746 enc_side = VP8LEncoderNew(config, picture); 1747 if (enc_side == NULL || !EncoderInit(enc_side)) { 1748 err = VP8_ENC_ERROR_OUT_OF_MEMORY; 1749 goto Error; 1750 } 1751 // Copy the values that were computed for the main encoder. 1752 enc_side->histo_bits_ = enc_main->histo_bits_; 1753 enc_side->transform_bits_ = enc_main->transform_bits_; 1754 enc_side->palette_size_ = enc_main->palette_size_; 1755 memcpy(enc_side->palette_, enc_main->palette_, 1756 sizeof(enc_main->palette_)); 1757 param->enc_ = enc_side; 1758 } 1759 // Create the workers. 1760 worker_interface->Init(worker); 1761 worker->data1 = param; 1762 worker->data2 = NULL; 1763 worker->hook = EncodeStreamHook; 1764 } 1765 } 1766 1767 // Start the second thread if needed. 1768 if (num_crunch_configs_side != 0) { 1769 if (!worker_interface->Reset(&worker_side)) { 1770 err = VP8_ENC_ERROR_OUT_OF_MEMORY; 1771 goto Error; 1772 } 1773 #if !defined(WEBP_DISABLE_STATS) 1774 // This line is here and not in the param initialization above to remove a 1775 // Clang static analyzer warning. 1776 if (picture->stats != NULL) { 1777 memcpy(&stats_side, picture->stats, sizeof(stats_side)); 1778 } 1779 #endif 1780 // This line is only useful to remove a Clang static analyzer warning. 1781 params_side.err_ = VP8_ENC_OK; 1782 worker_interface->Launch(&worker_side); 1783 } 1784 // Execute the main thread. 1785 worker_interface->Execute(&worker_main); 1786 ok_main = worker_interface->Sync(&worker_main); 1787 worker_interface->End(&worker_main); 1788 if (num_crunch_configs_side != 0) { 1789 // Wait for the second thread. 1790 const int ok_side = worker_interface->Sync(&worker_side); 1791 worker_interface->End(&worker_side); 1792 if (!ok_main || !ok_side) { 1793 err = ok_main ? params_side.err_ : params_main.err_; 1794 goto Error; 1795 } 1796 if (VP8LBitWriterNumBytes(&bw_side) < VP8LBitWriterNumBytes(bw_main)) { 1797 VP8LBitWriterSwap(bw_main, &bw_side); 1798 #if !defined(WEBP_DISABLE_STATS) 1799 if (picture->stats != NULL) { 1800 memcpy(picture->stats, &stats_side, sizeof(*picture->stats)); 1801 } 1802 #endif 1803 } 1804 } else { 1805 if (!ok_main) { 1806 err = params_main.err_; 1807 goto Error; 1808 } 1809 } 1810 1811 Error: 1812 VP8LBitWriterWipeOut(&bw_side); 1813 VP8LEncoderDelete(enc_main); 1814 VP8LEncoderDelete(enc_side); 1815 return err; 1816 } 1817 1818 #undef CRUNCH_CONFIGS_MAX 1819 #undef CRUNCH_CONFIGS_LZ77_MAX 1820 1821 int VP8LEncodeImage(const WebPConfig* const config, 1822 const WebPPicture* const picture) { 1823 int width, height; 1824 int has_alpha; 1825 size_t coded_size; 1826 int percent = 0; 1827 int initial_size; 1828 WebPEncodingError err = VP8_ENC_OK; 1829 VP8LBitWriter bw; 1830 1831 if (picture == NULL) return 0; 1832 1833 if (config == NULL || picture->argb == NULL) { 1834 err = VP8_ENC_ERROR_NULL_PARAMETER; 1835 WebPEncodingSetError(picture, err); 1836 return 0; 1837 } 1838 1839 width = picture->width; 1840 height = picture->height; 1841 // Initialize BitWriter with size corresponding to 16 bpp to photo images and 1842 // 8 bpp for graphical images. 1843 initial_size = (config->image_hint == WEBP_HINT_GRAPH) ? 1844 width * height : width * height * 2; 1845 if (!VP8LBitWriterInit(&bw, initial_size)) { 1846 err = VP8_ENC_ERROR_OUT_OF_MEMORY; 1847 goto Error; 1848 } 1849 1850 if (!WebPReportProgress(picture, 1, &percent)) { 1851 UserAbort: 1852 err = VP8_ENC_ERROR_USER_ABORT; 1853 goto Error; 1854 } 1855 // Reset stats (for pure lossless coding) 1856 if (picture->stats != NULL) { 1857 WebPAuxStats* const stats = picture->stats; 1858 memset(stats, 0, sizeof(*stats)); 1859 stats->PSNR[0] = 99.f; 1860 stats->PSNR[1] = 99.f; 1861 stats->PSNR[2] = 99.f; 1862 stats->PSNR[3] = 99.f; 1863 stats->PSNR[4] = 99.f; 1864 } 1865 1866 // Write image size. 1867 if (!WriteImageSize(picture, &bw)) { 1868 err = VP8_ENC_ERROR_OUT_OF_MEMORY; 1869 goto Error; 1870 } 1871 1872 has_alpha = WebPPictureHasTransparency(picture); 1873 // Write the non-trivial Alpha flag and lossless version. 1874 if (!WriteRealAlphaAndVersion(&bw, has_alpha)) { 1875 err = VP8_ENC_ERROR_OUT_OF_MEMORY; 1876 goto Error; 1877 } 1878 1879 if (!WebPReportProgress(picture, 5, &percent)) goto UserAbort; 1880 1881 // Encode main image stream. 1882 err = VP8LEncodeStream(config, picture, &bw, 1 /*use_cache*/); 1883 if (err != VP8_ENC_OK) goto Error; 1884 1885 if (!WebPReportProgress(picture, 90, &percent)) goto UserAbort; 1886 1887 // Finish the RIFF chunk. 1888 err = WriteImage(picture, &bw, &coded_size); 1889 if (err != VP8_ENC_OK) goto Error; 1890 1891 if (!WebPReportProgress(picture, 100, &percent)) goto UserAbort; 1892 1893 #if !defined(WEBP_DISABLE_STATS) 1894 // Save size. 1895 if (picture->stats != NULL) { 1896 picture->stats->coded_size += (int)coded_size; 1897 picture->stats->lossless_size = (int)coded_size; 1898 } 1899 #endif 1900 1901 if (picture->extra_info != NULL) { 1902 const int mb_w = (width + 15) >> 4; 1903 const int mb_h = (height + 15) >> 4; 1904 memset(picture->extra_info, 0, mb_w * mb_h * sizeof(*picture->extra_info)); 1905 } 1906 1907 Error: 1908 if (bw.error_) err = VP8_ENC_ERROR_OUT_OF_MEMORY; 1909 VP8LBitWriterWipeOut(&bw); 1910 if (err != VP8_ENC_OK) { 1911 WebPEncodingSetError(picture, err); 1912 return 0; 1913 } 1914 return 1; 1915 } 1916 1917 //------------------------------------------------------------------------------ 1918