mirror of
https://github.com/webmproject/libwebp.git
synced 2024-12-27 06:08:21 +01:00
Add backward_ref, histogram & huffman encode modules from lossless.
Change-Id: Iac056d27972956782defa182caa3ea400cdb77f8
This commit is contained in:
parent
fdccaaddcf
commit
bc7037465d
787
src/enc/backward_references.c
Normal file
787
src/enc/backward_references.c
Normal file
@ -0,0 +1,787 @@
|
||||
// Copyright 2012 Google Inc. All Rights Reserved.
|
||||
//
|
||||
// This code is licensed under the same terms as WebM:
|
||||
// Software License Agreement: http://www.webmproject.org/license/software/
|
||||
// Additional IP Rights Grant: http://www.webmproject.org/license/additional/
|
||||
// -----------------------------------------------------------------------------
|
||||
//
|
||||
// Author: Jyrki Alakuijala (jyrki@google.com)
|
||||
//
|
||||
|
||||
#include <assert.h>
|
||||
#include <math.h>
|
||||
#include <stdint.h>
|
||||
#include <stdio.h>
|
||||
|
||||
#include "./backward_references.h"
|
||||
#include "./histogram.h"
|
||||
#include "../utils/color_cache.h"
|
||||
|
||||
#define VALUES_IN_BYTE 256
|
||||
|
||||
static const uint8_t plane_to_code_lut[128] = {
|
||||
96, 73, 55, 39, 23, 13, 5, 1, 255, 255, 255, 255, 255, 255, 255, 255,
|
||||
101, 78, 58, 42, 26, 16, 8, 2, 0, 3, 9, 17, 27, 43, 59, 79,
|
||||
102, 86, 62, 46, 32, 20, 10, 6, 4, 7, 11, 21, 33, 47, 63, 87,
|
||||
105, 90, 70, 52, 37, 28, 18, 14, 12, 15, 19, 29, 38, 53, 71, 91,
|
||||
110, 99, 82, 66, 48, 35, 30, 24, 22, 25, 31, 36, 49, 67, 83, 100,
|
||||
115, 108, 94, 76, 64, 50, 44, 40, 34, 41, 45, 51, 65, 77, 95, 109,
|
||||
118, 113, 103, 92, 80, 68, 60, 56, 54, 57, 61, 69, 81, 93, 104, 114,
|
||||
119, 116, 111, 106, 97, 88, 84, 74, 72, 75, 85, 89, 98, 107, 112, 117,
|
||||
};
|
||||
|
||||
static const int kMinLength = 2;
|
||||
|
||||
int DistanceToPlaneCode(int xsize, int dist) {
|
||||
int yoffset = dist / xsize;
|
||||
int xoffset = dist - yoffset * xsize;
|
||||
if (xoffset <= 8 && yoffset < 8) {
|
||||
return plane_to_code_lut[yoffset * 16 + 8 - xoffset] + 1;
|
||||
} else if (xoffset > xsize - 8 && yoffset < 7) {
|
||||
return plane_to_code_lut[(yoffset + 1) * 16 + 8 + (xsize - xoffset)] + 1;
|
||||
}
|
||||
return dist + 120;
|
||||
}
|
||||
|
||||
static WEBP_INLINE int FindMatchLength(const uint32_t* array1,
|
||||
const uint32_t* array2,
|
||||
const int max_limit) {
|
||||
int matched = 0;
|
||||
while (matched < max_limit && array1[matched] == array2[matched]) {
|
||||
++matched;
|
||||
}
|
||||
return matched;
|
||||
}
|
||||
|
||||
#define HASH_BITS 18
|
||||
#define HASH_SIZE (1 << HASH_BITS)
|
||||
static const uint64_t kHashMultiplier = 0xc6a4a7935bd1e995ULL;
|
||||
static const int kWindowSize = (1 << 20) - 120; // A window with 1M pixels
|
||||
// (4 megabytes) - 120
|
||||
// special codes for short
|
||||
// distances.
|
||||
|
||||
static WEBP_INLINE uint64_t GetHash64(uint64_t num) {
|
||||
num *= kHashMultiplier;
|
||||
num >>= 64 - HASH_BITS;
|
||||
return num;
|
||||
}
|
||||
|
||||
static WEBP_INLINE uint64_t GetPixPair(const uint32_t* argb) {
|
||||
return ((uint64_t)(argb[1]) << 32) | argb[0];
|
||||
}
|
||||
|
||||
typedef struct {
|
||||
// Stores the most recently added position with the given hash value.
|
||||
int32_t hash_to_first_index_[HASH_SIZE];
|
||||
// chain_[pos] stores the previous position with the same hash value
|
||||
// for every pixel in the image.
|
||||
int32_t* chain_;
|
||||
} VP8LHashChain;
|
||||
|
||||
static int VP8LHashChain_Init(VP8LHashChain* p, int size) {
|
||||
int i;
|
||||
p->chain_ = (int*)malloc(size * sizeof(*p->chain_));
|
||||
if (!p->chain_) {
|
||||
return 0;
|
||||
}
|
||||
for (i = 0; i < size; ++i) {
|
||||
p->chain_[i] = -1;
|
||||
}
|
||||
for (i = 0; i < HASH_SIZE; ++i) {
|
||||
p->hash_to_first_index_[i] = -1;
|
||||
}
|
||||
return 1;
|
||||
}
|
||||
|
||||
static void VP8LHashChain_Delete(VP8LHashChain* p) {
|
||||
if (p != NULL) {
|
||||
free(p->chain_);
|
||||
}
|
||||
}
|
||||
|
||||
static void VP8LHashChain_Insert(VP8LHashChain* p,
|
||||
const uint32_t* argb, int32_t ix) {
|
||||
// Insertion of two pixels at a time.
|
||||
const uint64_t key = GetPixPair(argb);
|
||||
const uint64_t hash_code = GetHash64(key);
|
||||
p->chain_[ix] = p->hash_to_first_index_[hash_code];
|
||||
p->hash_to_first_index_[hash_code] = ix;
|
||||
}
|
||||
|
||||
static int VP8LHashChain_FindCopy(VP8LHashChain* p,
|
||||
int quality,
|
||||
int index, int xsize,
|
||||
const uint32_t* argb,
|
||||
int maxlen, int* offset_out,
|
||||
int* len_out) {
|
||||
const uint64_t next_two_pixels = GetPixPair(&argb[index]);
|
||||
const uint64_t hash_code = GetHash64(next_two_pixels);
|
||||
int prev_length = 0;
|
||||
int64_t best_val = 0;
|
||||
int give_up = quality * 3 / 4 + 25;
|
||||
const int min_pos = (index > kWindowSize) ? index - kWindowSize : 0;
|
||||
int32_t pos;
|
||||
int64_t length;
|
||||
int64_t val;
|
||||
int x;
|
||||
int y;
|
||||
int len = 0;
|
||||
int offset = 0;
|
||||
for (pos = p->hash_to_first_index_[hash_code];
|
||||
pos >= min_pos;
|
||||
pos = p->chain_[pos]) {
|
||||
if (give_up < 0) {
|
||||
if (give_up < -quality * 8 ||
|
||||
best_val >= 0xff0000) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
--give_up;
|
||||
if (len != 0 && argb[pos + len - 1] != argb[index + len - 1]) {
|
||||
continue;
|
||||
}
|
||||
length = FindMatchLength(argb + pos, argb + index, maxlen);
|
||||
if (length < prev_length) {
|
||||
continue;
|
||||
}
|
||||
val = 65536 * length;
|
||||
// Favoring 2d locality here gives savings for certain images.
|
||||
if (index - pos < 9 * xsize) {
|
||||
y = (index - pos) / xsize;
|
||||
x = (index - pos) % xsize;
|
||||
if (x > xsize / 2) {
|
||||
x = xsize - x;
|
||||
}
|
||||
if (x <= 7 && x >= -8) {
|
||||
val -= y * y + x * x;
|
||||
} else {
|
||||
val -= 9 * 9 + 9 * 9;
|
||||
}
|
||||
} else {
|
||||
val -= 9 * 9 + 9 * 9;
|
||||
}
|
||||
if (best_val < val) {
|
||||
prev_length = length;
|
||||
best_val = val;
|
||||
len = length;
|
||||
offset = index - pos;
|
||||
if (length >= kMaxLength) {
|
||||
break;
|
||||
}
|
||||
if ((offset == 1 || offset == xsize) && len >= 128) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
*offset_out = offset;
|
||||
*len_out = len;
|
||||
return len >= kMinLength;
|
||||
}
|
||||
|
||||
static WEBP_INLINE void PushBackCopy(int length,
|
||||
PixOrCopy* stream,
|
||||
int* stream_size) {
|
||||
while (length >= kMaxLength) {
|
||||
stream[*stream_size] = PixOrCopyCreateCopy(1, kMaxLength);
|
||||
++(*stream_size);
|
||||
length -= kMaxLength;
|
||||
}
|
||||
if (length > 0) {
|
||||
stream[*stream_size] = PixOrCopyCreateCopy(1, length);
|
||||
++(*stream_size);
|
||||
}
|
||||
}
|
||||
|
||||
void BackwardReferencesRle(int xsize, int ysize, const uint32_t* argb,
|
||||
PixOrCopy* stream, int* stream_size) {
|
||||
const int pix_count = xsize * ysize;
|
||||
int streak = 0;
|
||||
int i;
|
||||
*stream_size = 0;
|
||||
for (i = 0; i < pix_count; ++i) {
|
||||
if (i >= 1 && argb[i] == argb[i - 1]) {
|
||||
++streak;
|
||||
} else {
|
||||
PushBackCopy(streak, stream, stream_size);
|
||||
streak = 0;
|
||||
stream[*stream_size] = PixOrCopyCreateLiteral(argb[i]);
|
||||
++(*stream_size);
|
||||
}
|
||||
}
|
||||
PushBackCopy(streak, stream, stream_size);
|
||||
}
|
||||
|
||||
// Returns 1 when successful.
|
||||
int BackwardReferencesHashChain(int xsize, int ysize, int use_palette,
|
||||
const uint32_t* argb, int palette_bits,
|
||||
int quality,
|
||||
PixOrCopy* stream, int* stream_size) {
|
||||
const int pix_count = xsize * ysize;
|
||||
int i;
|
||||
int ok = 0;
|
||||
VP8LHashChain* hash_chain = (VP8LHashChain*)malloc(sizeof(*hash_chain));
|
||||
VP8LColorCache hashers;
|
||||
if (!hash_chain ||
|
||||
!VP8LColorCacheInit(&hashers, palette_bits) ||
|
||||
!VP8LHashChain_Init(hash_chain, pix_count)) {
|
||||
goto Error;
|
||||
}
|
||||
*stream_size = 0;
|
||||
for (i = 0; i < pix_count; ) {
|
||||
// Alternative#1: Code the pixels starting at 'i' using backward reference.
|
||||
int offset = 0;
|
||||
int len = 0;
|
||||
if (i < pix_count - 1) { // FindCopy(i,..) reads pixels at [i] and [i + 1].
|
||||
int maxlen = pix_count - i;
|
||||
if (maxlen > kMaxLength) {
|
||||
maxlen = kMaxLength;
|
||||
}
|
||||
VP8LHashChain_FindCopy(hash_chain, quality,
|
||||
i, xsize, argb, maxlen, &offset, &len);
|
||||
}
|
||||
if (len >= kMinLength) {
|
||||
// Alternative#2: Insert the pixel at 'i' as literal, and code the
|
||||
// pixels starting at 'i + 1' using backward reference.
|
||||
int offset2 = 0;
|
||||
int len2 = 0;
|
||||
int k;
|
||||
VP8LHashChain_Insert(hash_chain, &argb[i], i);
|
||||
if (i < pix_count - 2) { // FindCopy(i+1,..) reads [i + 1] and [i + 2].
|
||||
int maxlen = pix_count - (i + 1);
|
||||
if (maxlen > kMaxLength) {
|
||||
maxlen = kMaxLength;
|
||||
}
|
||||
VP8LHashChain_FindCopy(hash_chain, quality,
|
||||
i + 1, xsize, argb, maxlen, &offset2, &len2);
|
||||
if (len2 > len + 1) {
|
||||
// Alternative#2 is a better match. So push pixel at 'i' as literal.
|
||||
if (use_palette && VP8LColorCacheContains(&hashers, argb[i])) {
|
||||
const int ix = VP8LColorCacheGetIndex(&hashers, argb[i]);
|
||||
stream[*stream_size] = PixOrCopyCreatePaletteIx(ix);
|
||||
} else {
|
||||
stream[*stream_size] = PixOrCopyCreateLiteral(argb[i]);
|
||||
}
|
||||
++(*stream_size);
|
||||
VP8LColorCacheInsert(&hashers, argb[i]);
|
||||
i++; // Backward reference to be done for next pixel.
|
||||
len = len2;
|
||||
offset = offset2;
|
||||
}
|
||||
}
|
||||
if (len >= kMaxLength) {
|
||||
len = kMaxLength - 1;
|
||||
}
|
||||
stream[*stream_size] = PixOrCopyCreateCopy(offset, len);
|
||||
++(*stream_size);
|
||||
for (k = 0; k < len; ++k) {
|
||||
VP8LColorCacheInsert(&hashers, argb[i + k]);
|
||||
if (k != 0 && i + k + 1 < pix_count) {
|
||||
// Add to the hash_chain (but cannot add the last pixel).
|
||||
VP8LHashChain_Insert(hash_chain, &argb[i + k], i + k);
|
||||
}
|
||||
}
|
||||
i += len;
|
||||
} else {
|
||||
if (use_palette && VP8LColorCacheContains(&hashers, argb[i])) {
|
||||
// push pixel as a palette pixel
|
||||
int ix = VP8LColorCacheGetIndex(&hashers, argb[i]);
|
||||
stream[*stream_size] = PixOrCopyCreatePaletteIx(ix);
|
||||
} else {
|
||||
stream[*stream_size] = PixOrCopyCreateLiteral(argb[i]);
|
||||
}
|
||||
++(*stream_size);
|
||||
VP8LColorCacheInsert(&hashers, argb[i]);
|
||||
if (i + 1 < pix_count) {
|
||||
VP8LHashChain_Insert(hash_chain, &argb[i], i);
|
||||
}
|
||||
++i;
|
||||
}
|
||||
}
|
||||
ok = 1;
|
||||
Error:
|
||||
VP8LHashChain_Delete(hash_chain);
|
||||
free(hash_chain);
|
||||
VP8LColorCacheDelete(&hashers);
|
||||
return ok;
|
||||
}
|
||||
|
||||
typedef struct {
|
||||
double alpha_[VALUES_IN_BYTE];
|
||||
double red_[VALUES_IN_BYTE];
|
||||
double literal_[PIX_OR_COPY_CODES_MAX];
|
||||
double blue_[VALUES_IN_BYTE];
|
||||
double distance_[DISTANCE_CODES_MAX];
|
||||
int palette_bits_;
|
||||
} CostModel;
|
||||
|
||||
static int CostModel_Build(CostModel* p, int xsize, int ysize,
|
||||
int recursion_level, int use_palette,
|
||||
const uint32_t* argb, int palette_bits) {
|
||||
int ok = 0;
|
||||
int stream_size;
|
||||
Histogram histo;
|
||||
int i;
|
||||
PixOrCopy* stream = (PixOrCopy*)malloc(xsize * ysize * sizeof(*stream));
|
||||
if (stream == NULL) {
|
||||
goto Error;
|
||||
}
|
||||
p->palette_bits_ = palette_bits;
|
||||
if (recursion_level > 0) {
|
||||
if (!BackwardReferencesTraceBackwards(xsize, ysize, recursion_level - 1,
|
||||
use_palette, argb,
|
||||
palette_bits,
|
||||
&stream[0], &stream_size)) {
|
||||
goto Error;
|
||||
}
|
||||
} else {
|
||||
const int quality = 100;
|
||||
if (!BackwardReferencesHashChain(xsize, ysize, use_palette, argb,
|
||||
palette_bits, quality,
|
||||
&stream[0], &stream_size)) {
|
||||
goto Error;
|
||||
}
|
||||
}
|
||||
HistogramInit(&histo, palette_bits);
|
||||
for (i = 0; i < stream_size; ++i) {
|
||||
HistogramAddSinglePixOrCopy(&histo, stream[i]);
|
||||
}
|
||||
ConvertPopulationCountTableToBitEstimates(
|
||||
HistogramNumPixOrCopyCodes(&histo),
|
||||
&histo.literal_[0], &p->literal_[0]);
|
||||
ConvertPopulationCountTableToBitEstimates(
|
||||
VALUES_IN_BYTE, &histo.red_[0], &p->red_[0]);
|
||||
ConvertPopulationCountTableToBitEstimates(
|
||||
VALUES_IN_BYTE, &histo.blue_[0], &p->blue_[0]);
|
||||
ConvertPopulationCountTableToBitEstimates(
|
||||
VALUES_IN_BYTE, &histo.alpha_[0], &p->alpha_[0]);
|
||||
ConvertPopulationCountTableToBitEstimates(
|
||||
DISTANCE_CODES_MAX, &histo.distance_[0], &p->distance_[0]);
|
||||
ok = 1;
|
||||
Error:
|
||||
free(stream);
|
||||
return ok;
|
||||
}
|
||||
|
||||
static WEBP_INLINE double CostModel_LiteralCost(const CostModel* p,
|
||||
uint32_t v) {
|
||||
return p->alpha_[v >> 24] +
|
||||
p->red_[(v >> 16) & 0xff] +
|
||||
p->literal_[(v >> 8) & 0xff] +
|
||||
p->blue_[v & 0xff];
|
||||
}
|
||||
|
||||
static WEBP_INLINE double CostModel_PaletteCost(const CostModel* p,
|
||||
uint32_t ix) {
|
||||
int literal_ix = VALUES_IN_BYTE + kLengthCodes + ix;
|
||||
return p->literal_[literal_ix];
|
||||
}
|
||||
|
||||
static WEBP_INLINE double CostModel_LengthCost(const CostModel* p,
|
||||
uint32_t len) {
|
||||
int code, extra_bits_count, extra_bits_value;
|
||||
PrefixEncode(len, &code, &extra_bits_count, &extra_bits_value);
|
||||
return p->literal_[VALUES_IN_BYTE + code] + extra_bits_count;
|
||||
}
|
||||
|
||||
static WEBP_INLINE double CostModel_DistanceCost(const CostModel* p,
|
||||
uint32_t distance) {
|
||||
int code, extra_bits_count, extra_bits_value;
|
||||
PrefixEncode(distance, &code, &extra_bits_count, &extra_bits_value);
|
||||
return p->distance_[code] + extra_bits_count;
|
||||
}
|
||||
|
||||
static int BackwardReferencesHashChainDistanceOnly(
|
||||
int xsize, int ysize,
|
||||
int recursive_cost_model,
|
||||
int use_palette,
|
||||
const uint32_t* argb,
|
||||
int palette_bits,
|
||||
uint32_t* dist_array) {
|
||||
const int quality = 100;
|
||||
const int pix_count = xsize * ysize;
|
||||
double* cost = (double*)malloc(pix_count * sizeof(*cost));
|
||||
int i;
|
||||
CostModel* cost_model = (CostModel*)malloc(sizeof(*cost_model));
|
||||
|
||||
VP8LColorCache hashers;
|
||||
VP8LHashChain* hash_chain = (VP8LHashChain*)malloc(sizeof(*hash_chain));
|
||||
int ok = 0;
|
||||
if (cost == NULL ||
|
||||
cost_model == NULL ||
|
||||
hash_chain == NULL ||
|
||||
!VP8LColorCacheInit(&hashers, palette_bits)) {
|
||||
goto Error;
|
||||
}
|
||||
VP8LHashChain_Init(hash_chain, pix_count);
|
||||
CostModel_Build(cost_model, xsize, ysize, recursive_cost_model,
|
||||
use_palette, argb, palette_bits);
|
||||
for (i = 0; i < pix_count; ++i) {
|
||||
cost[i] = 1e100;
|
||||
}
|
||||
// We loop one pixel at a time, but store all currently best points to
|
||||
// non-processed locations from this point.
|
||||
dist_array[0] = 0;
|
||||
for (i = 0; i < pix_count; ++i) {
|
||||
double prev_cost = 0.0;
|
||||
int shortmax;
|
||||
if (i > 0) {
|
||||
prev_cost = cost[i - 1];
|
||||
}
|
||||
for (shortmax = 0; shortmax < 2; ++shortmax) {
|
||||
int offset = 0;
|
||||
int len = 0;
|
||||
if (i < pix_count - 1) { // FindCopy reads pixels at [i] and [i + 1].
|
||||
int maxlen = shortmax ? 2 : kMaxLength;
|
||||
if (maxlen > pix_count - i) {
|
||||
maxlen = pix_count - i;
|
||||
}
|
||||
VP8LHashChain_FindCopy(hash_chain, quality, i, xsize, argb, maxlen,
|
||||
&offset, &len);
|
||||
}
|
||||
if (len >= kMinLength) {
|
||||
const int code = DistanceToPlaneCode(xsize, offset);
|
||||
const double distance_cost =
|
||||
prev_cost + CostModel_DistanceCost(cost_model, code);
|
||||
int k;
|
||||
for (k = 1; k < len; ++k) {
|
||||
const double cost_val =
|
||||
distance_cost + CostModel_LengthCost(cost_model, k);
|
||||
if (cost[i + k] > cost_val) {
|
||||
cost[i + k] = cost_val;
|
||||
dist_array[i + k] = k + 1;
|
||||
}
|
||||
}
|
||||
// This if is for speedup only. It roughly doubles the speed, and
|
||||
// makes compression worse by .1 %.
|
||||
if (len >= 128 && code < 2) {
|
||||
// Long copy for short distances, let's skip the middle
|
||||
// lookups for better copies.
|
||||
// 1) insert the hashes.
|
||||
for (k = 0; k < len; ++k) {
|
||||
VP8LColorCacheInsert(&hashers, argb[i + k]);
|
||||
if (i + k + 1 < pix_count) {
|
||||
// Add to the hash_chain (but cannot add the last pixel).
|
||||
VP8LHashChain_Insert(hash_chain, &argb[i + k], i + k);
|
||||
}
|
||||
}
|
||||
// 2) jump.
|
||||
i += len - 1; // for loop does ++i, thus -1 here.
|
||||
goto next_symbol;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (i < pix_count - 1) {
|
||||
VP8LHashChain_Insert(hash_chain, &argb[i], i);
|
||||
}
|
||||
{
|
||||
// inserting a literal pixel
|
||||
double cost_val = prev_cost;
|
||||
double mul0 = 1.0;
|
||||
double mul1 = 1.0;
|
||||
if (recursive_cost_model == 0) {
|
||||
mul0 = 0.68;
|
||||
mul1 = 0.82;
|
||||
}
|
||||
if (use_palette && VP8LColorCacheContains(&hashers, argb[i])) {
|
||||
int ix = VP8LColorCacheGetIndex(&hashers, argb[i]);
|
||||
cost_val += CostModel_PaletteCost(cost_model, ix) * mul0;
|
||||
} else {
|
||||
cost_val += CostModel_LiteralCost(cost_model, argb[i]) * mul1;
|
||||
}
|
||||
if (cost[i] > cost_val) {
|
||||
cost[i] = cost_val;
|
||||
dist_array[i] = 1; // only one is inserted.
|
||||
}
|
||||
VP8LColorCacheInsert(&hashers, argb[i]);
|
||||
}
|
||||
next_symbol: ;
|
||||
}
|
||||
// Last pixel still to do, it can only be a single step if not reached
|
||||
// through cheaper means already.
|
||||
ok = 1;
|
||||
Error:
|
||||
if (hash_chain) VP8LHashChain_Delete(hash_chain);
|
||||
free(hash_chain);
|
||||
free(cost_model);
|
||||
free(cost);
|
||||
VP8LColorCacheDelete(&hashers);
|
||||
return ok;
|
||||
}
|
||||
|
||||
static void TraceBackwards(const uint32_t* dist_array, int dist_array_size,
|
||||
uint32_t** chosen_path, int* chosen_path_size) {
|
||||
int i;
|
||||
// Count how many.
|
||||
int count = 0;
|
||||
for (i = dist_array_size - 1; i >= 0; ) {
|
||||
int k = dist_array[i];
|
||||
assert(k >= 1);
|
||||
++count;
|
||||
i -= k;
|
||||
}
|
||||
// Allocate.
|
||||
*chosen_path_size = count;
|
||||
*chosen_path = (uint32_t*)malloc(count * sizeof(*chosen_path));
|
||||
// Write in reverse order.
|
||||
for (i = dist_array_size - 1; i >= 0; ) {
|
||||
int k = dist_array[i];
|
||||
assert(k >= 1);
|
||||
(*chosen_path)[--count] = k;
|
||||
i -= k;
|
||||
}
|
||||
}
|
||||
|
||||
static int BackwardReferencesHashChainFollowChosenPath(
|
||||
int xsize,
|
||||
int ysize,
|
||||
int use_palette,
|
||||
const uint32_t* argb,
|
||||
int palette_bits,
|
||||
uint32_t* chosen_path,
|
||||
int chosen_path_size,
|
||||
PixOrCopy* stream,
|
||||
int* stream_size) {
|
||||
const int quality = 100;
|
||||
const int pix_count = xsize * ysize;
|
||||
int i = 0;
|
||||
int k;
|
||||
int ix;
|
||||
int ok = 0;
|
||||
VP8LColorCache hashers;
|
||||
VP8LHashChain* hash_chain = (VP8LHashChain*)malloc(sizeof(*hash_chain));
|
||||
VP8LHashChain_Init(hash_chain, pix_count);
|
||||
if (hash_chain == NULL ||
|
||||
!VP8LColorCacheInit(&hashers, palette_bits)) {
|
||||
goto Error;
|
||||
}
|
||||
*stream_size = 0;
|
||||
for (ix = 0; ix < chosen_path_size; ++ix) {
|
||||
int offset = 0;
|
||||
int len = 0;
|
||||
int maxlen = chosen_path[ix];
|
||||
if (maxlen != 1) {
|
||||
VP8LHashChain_FindCopy(hash_chain, quality,
|
||||
i, xsize, argb, maxlen, &offset, &len);
|
||||
assert(len == maxlen);
|
||||
stream[*stream_size] = PixOrCopyCreateCopy(offset, len);
|
||||
++(*stream_size);
|
||||
for (k = 0; k < len; ++k) {
|
||||
VP8LColorCacheInsert(&hashers, argb[i + k]);
|
||||
if (i + k + 1 < pix_count) {
|
||||
// Add to the hash_chain (but cannot add the last pixel).
|
||||
VP8LHashChain_Insert(hash_chain, &argb[i + k], i + k);
|
||||
}
|
||||
}
|
||||
i += len;
|
||||
} else {
|
||||
if (use_palette && VP8LColorCacheContains(&hashers, argb[i])) {
|
||||
// push pixel as a palette pixel
|
||||
int ix = VP8LColorCacheGetIndex(&hashers, argb[i]);
|
||||
stream[*stream_size] = PixOrCopyCreatePaletteIx(ix);
|
||||
} else {
|
||||
stream[*stream_size] = PixOrCopyCreateLiteral(argb[i]);
|
||||
}
|
||||
++(*stream_size);
|
||||
VP8LColorCacheInsert(&hashers, argb[i]);
|
||||
if (i + 1 < pix_count) {
|
||||
VP8LHashChain_Insert(hash_chain, &argb[i], i);
|
||||
}
|
||||
++i;
|
||||
}
|
||||
}
|
||||
ok = 1;
|
||||
Error:
|
||||
VP8LHashChain_Delete(hash_chain);
|
||||
if (hash_chain) {
|
||||
free(hash_chain);
|
||||
}
|
||||
VP8LColorCacheDelete(&hashers);
|
||||
return ok;
|
||||
}
|
||||
|
||||
// Returns 1 on success.
|
||||
int BackwardReferencesTraceBackwards(int xsize, int ysize,
|
||||
int recursive_cost_model,
|
||||
int use_palette,
|
||||
const uint32_t* argb,
|
||||
int palette_bits,
|
||||
PixOrCopy* stream,
|
||||
int* stream_size) {
|
||||
int ok = 0;
|
||||
const int dist_array_size = xsize * ysize;
|
||||
uint32_t* chosen_path = NULL;
|
||||
int chosen_path_size = 0;
|
||||
uint32_t* const dist_array = (uint32_t*)
|
||||
malloc(dist_array_size * sizeof(*dist_array));
|
||||
if (dist_array == NULL) {
|
||||
goto Error;
|
||||
}
|
||||
*stream_size = 0;
|
||||
if (!BackwardReferencesHashChainDistanceOnly(
|
||||
xsize, ysize, recursive_cost_model, use_palette, argb, palette_bits,
|
||||
dist_array)) {
|
||||
free(dist_array);
|
||||
goto Error;
|
||||
}
|
||||
TraceBackwards(dist_array, dist_array_size, &chosen_path, &chosen_path_size);
|
||||
free(dist_array);
|
||||
if (!BackwardReferencesHashChainFollowChosenPath(
|
||||
xsize, ysize, use_palette, argb, palette_bits,
|
||||
chosen_path, chosen_path_size,
|
||||
stream, stream_size)) {
|
||||
goto Error;
|
||||
}
|
||||
ok = 1;
|
||||
Error:
|
||||
free(chosen_path);
|
||||
return ok;
|
||||
}
|
||||
|
||||
void BackwardReferences2DLocality(int xsize, int data_size, PixOrCopy* data) {
|
||||
int i;
|
||||
for (i = 0; i < data_size; ++i) {
|
||||
if (PixOrCopyIsCopy(&data[i])) {
|
||||
int dist = data[i].argb_or_offset;
|
||||
int transformed_dist = DistanceToPlaneCode(xsize, dist);
|
||||
data[i].argb_or_offset = transformed_dist;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
int VerifyBackwardReferences(const uint32_t* argb, int xsize, int ysize,
|
||||
int palette_bits,
|
||||
const PixOrCopy* lit,
|
||||
int lit_size) {
|
||||
int num_pixels = 0;
|
||||
int i;
|
||||
VP8LColorCache hashers;
|
||||
VP8LColorCacheInit(&hashers, palette_bits);
|
||||
for (i = 0; i < lit_size; ++i) {
|
||||
if (PixOrCopyIsLiteral(&lit[i])) {
|
||||
if (argb[num_pixels] != PixOrCopyArgb(&lit[i])) {
|
||||
printf("i %d, pixel %d, original: 0x%08x, literal: 0x%08x\n",
|
||||
i, num_pixels, argb[num_pixels], PixOrCopyArgb(&lit[i]));
|
||||
VP8LColorCacheDelete(&hashers);
|
||||
return 0;
|
||||
}
|
||||
VP8LColorCacheInsert(&hashers, argb[num_pixels]);
|
||||
++num_pixels;
|
||||
} else if (PixOrCopyIsPaletteIx(&lit[i])) {
|
||||
uint32_t palette_entry =
|
||||
VP8LColorCacheLookup(&hashers, PixOrCopyPaletteIx(&lit[i]));
|
||||
if (argb[num_pixels] != palette_entry) {
|
||||
printf("i %d, pixel %d, original: 0x%08x, palette_ix: %d, "
|
||||
"palette_entry: 0x%08x\n",
|
||||
i, num_pixels, argb[num_pixels], PixOrCopyPaletteIx(&lit[i]),
|
||||
palette_entry);
|
||||
VP8LColorCacheDelete(&hashers);
|
||||
return 0;
|
||||
}
|
||||
VP8LColorCacheInsert(&hashers, argb[num_pixels]);
|
||||
++num_pixels;
|
||||
} else if (PixOrCopyIsCopy(&lit[i])) {
|
||||
int k;
|
||||
if (PixOrCopyDistance(&lit[i]) == 0) {
|
||||
printf("Bw reference with zero distance.\n");
|
||||
VP8LColorCacheDelete(&hashers);
|
||||
return 0;
|
||||
}
|
||||
for (k = 0; k < lit[i].len; ++k) {
|
||||
if (argb[num_pixels] !=
|
||||
argb[num_pixels - PixOrCopyDistance(&lit[i])]) {
|
||||
printf("i %d, pixel %d, original: 0x%08x, copied: 0x%08x, dist: %d\n",
|
||||
i, num_pixels, argb[num_pixels],
|
||||
argb[num_pixels - PixOrCopyDistance(&lit[i])],
|
||||
PixOrCopyDistance(&lit[i]));
|
||||
VP8LColorCacheDelete(&hashers);
|
||||
return 0;
|
||||
}
|
||||
VP8LColorCacheInsert(&hashers, argb[num_pixels]);
|
||||
++num_pixels;
|
||||
}
|
||||
}
|
||||
}
|
||||
{
|
||||
const int pix_count = xsize * ysize;
|
||||
if (num_pixels != pix_count) {
|
||||
printf("verify failure: %d != %d\n", num_pixels, pix_count);
|
||||
VP8LColorCacheDelete(&hashers);
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
VP8LColorCacheDelete(&hashers);
|
||||
return 1;
|
||||
}
|
||||
|
||||
// Returns 1 on success.
|
||||
static int ComputePaletteHistogram(const uint32_t* argb, int xsize, int ysize,
|
||||
PixOrCopy* stream, int stream_size,
|
||||
int palette_bits, Histogram* histo) {
|
||||
int pixel_index = 0;
|
||||
int i;
|
||||
uint32_t k;
|
||||
VP8LColorCache hashers;
|
||||
if (!VP8LColorCacheInit(&hashers, palette_bits)) {
|
||||
return 0;
|
||||
}
|
||||
for (i = 0; i < stream_size; ++i) {
|
||||
const PixOrCopy v = stream[i];
|
||||
if (PixOrCopyIsLiteral(&v)) {
|
||||
if (palette_bits != 0 &&
|
||||
VP8LColorCacheContains(&hashers, argb[pixel_index])) {
|
||||
// push pixel as a palette pixel
|
||||
const int ix = VP8LColorCacheGetIndex(&hashers, argb[pixel_index]);
|
||||
HistogramAddSinglePixOrCopy(histo, PixOrCopyCreatePaletteIx(ix));
|
||||
} else {
|
||||
HistogramAddSinglePixOrCopy(histo, v);
|
||||
}
|
||||
} else {
|
||||
HistogramAddSinglePixOrCopy(histo, v);
|
||||
}
|
||||
for (k = 0; k < PixOrCopyLength(&v); ++k) {
|
||||
VP8LColorCacheInsert(&hashers, argb[pixel_index]);
|
||||
++pixel_index;
|
||||
}
|
||||
}
|
||||
assert(pixel_index == xsize * ysize);
|
||||
(void)xsize; // xsize is not used in non-debug compilations otherwise.
|
||||
(void)ysize; // ysize is not used in non-debug compilations otherwise.
|
||||
VP8LColorCacheDelete(&hashers);
|
||||
return 1;
|
||||
}
|
||||
|
||||
// Returns how many bits are to be used for a palette.
|
||||
int CalculateEstimateForPaletteSize(const uint32_t* argb,
|
||||
int xsize, int ysize,
|
||||
int* best_palette_bits) {
|
||||
int ok = 0;
|
||||
int palette_bits;
|
||||
double lowest_entropy = 1e99;
|
||||
PixOrCopy* stream = (PixOrCopy*)malloc(xsize * ysize * sizeof(*stream));
|
||||
int stream_size;
|
||||
static const double kSmallPenaltyForLargePalette = 4.0;
|
||||
static const int quality = 30;
|
||||
if (stream == NULL ||
|
||||
!BackwardReferencesHashChain(xsize, ysize,
|
||||
0, argb, 0, quality, stream, &stream_size)) {
|
||||
goto Error;
|
||||
}
|
||||
for (palette_bits = 0; palette_bits < 12; ++palette_bits) {
|
||||
double cur_entropy;
|
||||
Histogram histo;
|
||||
HistogramInit(&histo, palette_bits);
|
||||
ComputePaletteHistogram(argb, xsize, ysize, &stream[0], stream_size,
|
||||
palette_bits, &histo);
|
||||
cur_entropy = HistogramEstimateBits(&histo) +
|
||||
kSmallPenaltyForLargePalette * palette_bits;
|
||||
if (palette_bits == 0 || cur_entropy < lowest_entropy) {
|
||||
*best_palette_bits = palette_bits;
|
||||
lowest_entropy = cur_entropy;
|
||||
}
|
||||
}
|
||||
ok = 1;
|
||||
Error:
|
||||
free(stream);
|
||||
return ok;
|
||||
}
|
234
src/enc/backward_references.h
Normal file
234
src/enc/backward_references.h
Normal file
@ -0,0 +1,234 @@
|
||||
// Copyright 2012 Google Inc. All Rights Reserved.
|
||||
//
|
||||
// This code is licensed under the same terms as WebM:
|
||||
// Software License Agreement: http://www.webmproject.org/license/software/
|
||||
// Additional IP Rights Grant: http://www.webmproject.org/license/additional/
|
||||
// -----------------------------------------------------------------------------
|
||||
//
|
||||
// Author: Jyrki Alakuijala (jyrki@google.com)
|
||||
//
|
||||
|
||||
#ifndef WEBP_ENC_BACKWARD_REFERENCES_H_
|
||||
#define WEBP_ENC_BACKWARD_REFERENCES_H_
|
||||
|
||||
#include <assert.h>
|
||||
#include <stdint.h>
|
||||
|
||||
#include "../webp/types.h"
|
||||
|
||||
#if defined(__cplusplus) || defined(c_plusplus)
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
// Backward reference distance prefix codes
|
||||
#define DISTANCE_CODES_MAX 40
|
||||
|
||||
// Compression constants
|
||||
#define CODE_LENGTH_CODES 19
|
||||
static const int kLengthCodes = 24;
|
||||
static const int kPaletteCodeBitsMax = 11;
|
||||
#define PIX_OR_COPY_CODES_MAX (256 + 24 + (1 << 11))
|
||||
static const int kMaxLength = 4096;
|
||||
|
||||
// use GNU builtins where available.
|
||||
#if defined(__GNUC__) && \
|
||||
((__GNUC__ == 3 && __GNUC_MINOR__ >= 4) || __GNUC__ >= 4)
|
||||
static WEBP_INLINE int BitsLog2Floor(uint32_t n) {
|
||||
return n == 0 ? -1 : 31 ^ __builtin_clz(n);
|
||||
}
|
||||
#else
|
||||
static WEBP_INLINE int BitsLog2Floor(uint32_t n) {
|
||||
int log;
|
||||
uint32_t value;
|
||||
int i;
|
||||
if (n == 0)
|
||||
return -1;
|
||||
log = 0;
|
||||
value = n;
|
||||
for (i = 4; i >= 0; --i) {
|
||||
int shift = (1 << i);
|
||||
uint32_t x = value >> shift;
|
||||
if (x != 0) {
|
||||
value = x;
|
||||
log += shift;
|
||||
}
|
||||
}
|
||||
return log;
|
||||
}
|
||||
#endif
|
||||
|
||||
static WEBP_INLINE int BitsLog2Ceiling(uint32_t n) {
|
||||
int floor = BitsLog2Floor(n);
|
||||
if (n == (n & ~(n - 1))) // zero or a power of two.
|
||||
return floor;
|
||||
else
|
||||
return floor + 1;
|
||||
}
|
||||
|
||||
// Splitting of distance and length codes into prefixes and
|
||||
// extra bits. The prefixes are encoded with an entropy code
|
||||
// while the extra bits are stored just as normal bits.
|
||||
static WEBP_INLINE void PrefixEncode(
|
||||
int distance,
|
||||
int *code,
|
||||
int *extra_bits_count,
|
||||
int *extra_bits_value) {
|
||||
// Collect the two most significant bits where the highest bit is 1.
|
||||
const int highest_bit = BitsLog2Floor(--distance);
|
||||
// & 0x3f is to make behavior well defined when highest_bit
|
||||
// does not exist or is the least significant bit.
|
||||
const int second_highest_bit =
|
||||
(distance >> ((highest_bit - 1) & 0x3f)) & 1;
|
||||
*extra_bits_count = (highest_bit > 0) ? highest_bit - 1 : 0;
|
||||
*extra_bits_value = distance & ((1 << *extra_bits_count) - 1);
|
||||
*code = (highest_bit > 0) ? 2 * highest_bit + second_highest_bit :
|
||||
(highest_bit == 0) ? 1 : 0;
|
||||
}
|
||||
|
||||
enum Mode {
|
||||
kLiteral,
|
||||
kPaletteIx,
|
||||
kCopy,
|
||||
kNone,
|
||||
};
|
||||
|
||||
typedef struct {
|
||||
// mode as uint8_t to make the memory layout to be exactly 8 bytes.
|
||||
uint8_t mode;
|
||||
uint16_t len;
|
||||
uint32_t argb_or_offset;
|
||||
} PixOrCopy;
|
||||
|
||||
static WEBP_INLINE PixOrCopy PixOrCopyCreateCopy(uint32_t offset,
|
||||
uint16_t len) {
|
||||
PixOrCopy retval;
|
||||
retval.mode = kCopy;
|
||||
retval.argb_or_offset = offset;
|
||||
retval.len = len;
|
||||
return retval;
|
||||
}
|
||||
|
||||
static WEBP_INLINE PixOrCopy PixOrCopyCreatePaletteIx(int ix) {
|
||||
PixOrCopy retval;
|
||||
assert(ix >= 0);
|
||||
assert(ix < (1 << kPaletteCodeBitsMax));
|
||||
retval.mode = kPaletteIx;
|
||||
retval.argb_or_offset = ix;
|
||||
retval.len = 1;
|
||||
return retval;
|
||||
}
|
||||
|
||||
static WEBP_INLINE PixOrCopy PixOrCopyCreateLiteral(uint32_t argb) {
|
||||
PixOrCopy retval;
|
||||
retval.mode = kLiteral;
|
||||
retval.argb_or_offset = argb;
|
||||
retval.len = 1;
|
||||
return retval;
|
||||
}
|
||||
|
||||
static WEBP_INLINE int PixOrCopyIsLiteral(const PixOrCopy *p) {
|
||||
return p->mode == kLiteral;
|
||||
}
|
||||
|
||||
static WEBP_INLINE int PixOrCopyIsPaletteIx(const PixOrCopy *p) {
|
||||
return p->mode == kPaletteIx;
|
||||
}
|
||||
|
||||
static WEBP_INLINE int PixOrCopyIsCopy(const PixOrCopy *p) {
|
||||
return p->mode == kCopy;
|
||||
}
|
||||
|
||||
static WEBP_INLINE uint32_t PixOrCopyLiteral(const PixOrCopy *p,
|
||||
int component) {
|
||||
assert(p->mode == kLiteral);
|
||||
return (p->argb_or_offset >> (component * 8)) & 0xff;
|
||||
}
|
||||
|
||||
static WEBP_INLINE uint32_t PixOrCopyLength(const PixOrCopy *p) {
|
||||
return p->len;
|
||||
}
|
||||
|
||||
static WEBP_INLINE uint32_t PixOrCopyArgb(const PixOrCopy *p) {
|
||||
assert(p->mode == kLiteral);
|
||||
return p->argb_or_offset;
|
||||
}
|
||||
|
||||
static WEBP_INLINE uint32_t PixOrCopyPaletteIx(const PixOrCopy *p) {
|
||||
assert(p->mode == kPaletteIx);
|
||||
assert(p->argb_or_offset < (1 << kPaletteCodeBitsMax));
|
||||
return p->argb_or_offset;
|
||||
}
|
||||
|
||||
static WEBP_INLINE uint32_t PixOrCopyDistance(const PixOrCopy *p) {
|
||||
assert(p->mode == kCopy);
|
||||
return p->argb_or_offset;
|
||||
}
|
||||
|
||||
static WEBP_INLINE void PixOrCopyLengthCodeAndBits(
|
||||
const PixOrCopy *p, int *code, int *n_bits, int *bits) {
|
||||
assert(p->len >= 1 && p->len <= kMaxLength);
|
||||
PrefixEncode(p->len, code, n_bits, bits);
|
||||
}
|
||||
|
||||
|
||||
// Ridiculously simple backward references for images where it is unlikely
|
||||
// that there are large backward references (photos).
|
||||
void BackwardReferencesRle(
|
||||
int xsize,
|
||||
int ysize,
|
||||
const uint32_t *argb,
|
||||
PixOrCopy *stream,
|
||||
int *stream_size);
|
||||
|
||||
// This is a simple fast function for obtaining backward references
|
||||
// based on simple heuristics. Returns 1 on success.
|
||||
int BackwardReferencesHashChain(
|
||||
int xsize,
|
||||
int ysize,
|
||||
int use_palette,
|
||||
const uint32_t *argb,
|
||||
int palette_bits,
|
||||
int quality,
|
||||
PixOrCopy *stream,
|
||||
int *stream_size);
|
||||
|
||||
// This method looks for a shortest path through the backward reference
|
||||
// network based on a cost model generated by a first round of compression.
|
||||
// Returns 1 on success.
|
||||
int BackwardReferencesTraceBackwards(
|
||||
int xsize,
|
||||
int ysize,
|
||||
int recursive_cost_model,
|
||||
int use_palette,
|
||||
const uint32_t *argb,
|
||||
int palette_bits,
|
||||
PixOrCopy *stream,
|
||||
int *stream_size);
|
||||
|
||||
// Convert backward references that are of linear distance along
|
||||
// the image scan lines to have a 2d locality indexing where
|
||||
// smaller values are used for backward references that are close by.
|
||||
void BackwardReferences2DLocality(int xsize, int data_size,
|
||||
PixOrCopy *data);
|
||||
|
||||
// Internals of locality transform exposed for testing use.
|
||||
int DistanceToPlaneCode(int xsize, int distance);
|
||||
|
||||
// Returns true if the given backward references actually produce
|
||||
// the image given in tuple (argb, xsize, ysize).
|
||||
int VerifyBackwardReferences(const uint32_t* argb,
|
||||
int xsize, int ysize,
|
||||
int palette_bits,
|
||||
const PixOrCopy *lit,
|
||||
int lit_size);
|
||||
|
||||
// Produce an estimate for a good emerging palette size for the image.
|
||||
int CalculateEstimateForPaletteSize(const uint32_t *argb,
|
||||
int xsize, int ysize,
|
||||
int *best_palette_bits);
|
||||
|
||||
#if defined(__cplusplus) || defined(c_plusplus)
|
||||
}
|
||||
#endif
|
||||
|
||||
#endif // WEBP_ENC_BACKWARD_REFERENCES_H_
|
515
src/enc/histogram.c
Normal file
515
src/enc/histogram.c
Normal file
@ -0,0 +1,515 @@
|
||||
// Copyright 2012 Google Inc. All Rights Reserved.
|
||||
//
|
||||
// This code is licensed under the same terms as WebM:
|
||||
// Software License Agreement: http://www.webmproject.org/license/software/
|
||||
// Additional IP Rights Grant: http://www.webmproject.org/license/additional/
|
||||
// -----------------------------------------------------------------------------
|
||||
//
|
||||
// Author: Jyrki Alakuijala (jyrki@google.com)
|
||||
//
|
||||
|
||||
|
||||
#include <math.h>
|
||||
#include <stdio.h>
|
||||
|
||||
#include "./backward_references.h"
|
||||
#include "./histogram.h"
|
||||
|
||||
// A lookup table for small values of log(int) to be used in entropy
|
||||
// computation.
|
||||
//
|
||||
// ", ".join(["%.16ff" % x for x in [0.0]+[log(x) for x in range(1, 256)]])
|
||||
static const float kLogTable[] = {
|
||||
0.0000000000000000f, 0.0000000000000000f, 0.6931471805599453f,
|
||||
1.0986122886681098f, 1.3862943611198906f, 1.6094379124341003f,
|
||||
1.7917594692280550f, 1.9459101490553132f, 2.0794415416798357f,
|
||||
2.1972245773362196f, 2.3025850929940459f, 2.3978952727983707f,
|
||||
2.4849066497880004f, 2.5649493574615367f, 2.6390573296152584f,
|
||||
2.7080502011022101f, 2.7725887222397811f, 2.8332133440562162f,
|
||||
2.8903717578961645f, 2.9444389791664403f, 2.9957322735539909f,
|
||||
3.0445224377234230f, 3.0910424533583161f, 3.1354942159291497f,
|
||||
3.1780538303479458f, 3.2188758248682006f, 3.2580965380214821f,
|
||||
3.2958368660043291f, 3.3322045101752038f, 3.3672958299864741f,
|
||||
3.4011973816621555f, 3.4339872044851463f, 3.4657359027997265f,
|
||||
3.4965075614664802f, 3.5263605246161616f, 3.5553480614894135f,
|
||||
3.5835189384561099f, 3.6109179126442243f, 3.6375861597263857f,
|
||||
3.6635616461296463f, 3.6888794541139363f, 3.7135720667043080f,
|
||||
3.7376696182833684f, 3.7612001156935624f, 3.7841896339182610f,
|
||||
3.8066624897703196f, 3.8286413964890951f, 3.8501476017100584f,
|
||||
3.8712010109078911f, 3.8918202981106265f, 3.9120230054281460f,
|
||||
3.9318256327243257f, 3.9512437185814275f, 3.9702919135521220f,
|
||||
3.9889840465642745f, 4.0073331852324712f, 4.0253516907351496f,
|
||||
4.0430512678345503f, 4.0604430105464191f, 4.0775374439057197f,
|
||||
4.0943445622221004f, 4.1108738641733114f, 4.1271343850450917f,
|
||||
4.1431347263915326f, 4.1588830833596715f, 4.1743872698956368f,
|
||||
4.1896547420264252f, 4.2046926193909657f, 4.2195077051761070f,
|
||||
4.2341065045972597f, 4.2484952420493594f, 4.2626798770413155f,
|
||||
4.2766661190160553f, 4.2904594411483910f, 4.3040650932041702f,
|
||||
4.3174881135363101f, 4.3307333402863311f, 4.3438054218536841f,
|
||||
4.3567088266895917f, 4.3694478524670215f, 4.3820266346738812f,
|
||||
4.3944491546724391f, 4.4067192472642533f, 4.4188406077965983f,
|
||||
4.4308167988433134f, 4.4426512564903167f, 4.4543472962535073f,
|
||||
4.4659081186545837f, 4.4773368144782069f, 4.4886363697321396f,
|
||||
4.4998096703302650f, 4.5108595065168497f, 4.5217885770490405f,
|
||||
4.5325994931532563f, 4.5432947822700038f, 4.5538768916005408f,
|
||||
4.5643481914678361f, 4.5747109785033828f, 4.5849674786705723f,
|
||||
4.5951198501345898f, 4.6051701859880918f, 4.6151205168412597f,
|
||||
4.6249728132842707f, 4.6347289882296359f, 4.6443908991413725f,
|
||||
4.6539603501575231f, 4.6634390941120669f, 4.6728288344619058f,
|
||||
4.6821312271242199f, 4.6913478822291435f, 4.7004803657924166f,
|
||||
4.7095302013123339f, 4.7184988712950942f, 4.7273878187123408f,
|
||||
4.7361984483944957f, 4.7449321283632502f, 4.7535901911063645f,
|
||||
4.7621739347977563f, 4.7706846244656651f, 4.7791234931115296f,
|
||||
4.7874917427820458f, 4.7957905455967413f, 4.8040210447332568f,
|
||||
4.8121843553724171f, 4.8202815656050371f, 4.8283137373023015f,
|
||||
4.8362819069514780f, 4.8441870864585912f, 4.8520302639196169f,
|
||||
4.8598124043616719f, 4.8675344504555822f, 4.8751973232011512f,
|
||||
4.8828019225863706f, 4.8903491282217537f, 4.8978397999509111f,
|
||||
4.9052747784384296f, 4.9126548857360524f, 4.9199809258281251f,
|
||||
4.9272536851572051f, 4.9344739331306915f, 4.9416424226093039f,
|
||||
4.9487598903781684f, 4.9558270576012609f, 4.9628446302599070f,
|
||||
4.9698132995760007f, 4.9767337424205742f, 4.9836066217083363f,
|
||||
4.9904325867787360f, 4.9972122737641147f, 5.0039463059454592f,
|
||||
5.0106352940962555f, 5.0172798368149243f, 5.0238805208462765f,
|
||||
5.0304379213924353f, 5.0369526024136295f, 5.0434251169192468f,
|
||||
5.0498560072495371f, 5.0562458053483077f, 5.0625950330269669f,
|
||||
5.0689042022202315f, 5.0751738152338266f, 5.0814043649844631f,
|
||||
5.0875963352323836f, 5.0937502008067623f, 5.0998664278241987f,
|
||||
5.1059454739005803f, 5.1119877883565437f, 5.1179938124167554f,
|
||||
5.1239639794032588f, 5.1298987149230735f, 5.1357984370502621f,
|
||||
5.1416635565026603f, 5.1474944768134527f, 5.1532915944977793f,
|
||||
5.1590552992145291f, 5.1647859739235145f, 5.1704839950381514f,
|
||||
5.1761497325738288f, 5.1817835502920850f, 5.1873858058407549f,
|
||||
5.1929568508902104f, 5.1984970312658261f, 5.2040066870767951f,
|
||||
5.2094861528414214f, 5.2149357576089859f, 5.2203558250783244f,
|
||||
5.2257466737132017f, 5.2311086168545868f, 5.2364419628299492f,
|
||||
5.2417470150596426f, 5.2470240721604862f, 5.2522734280466299f,
|
||||
5.2574953720277815f, 5.2626901889048856f, 5.2678581590633282f,
|
||||
5.2729995585637468f, 5.2781146592305168f, 5.2832037287379885f,
|
||||
5.2882670306945352f, 5.2933048247244923f, 5.2983173665480363f,
|
||||
5.3033049080590757f, 5.3082676974012051f, 5.3132059790417872f,
|
||||
5.3181199938442161f, 5.3230099791384085f, 5.3278761687895813f,
|
||||
5.3327187932653688f, 5.3375380797013179f, 5.3423342519648109f,
|
||||
5.3471075307174685f, 5.3518581334760666f, 5.3565862746720123f,
|
||||
5.3612921657094255f, 5.3659760150218512f, 5.3706380281276624f,
|
||||
5.3752784076841653f, 5.3798973535404597f, 5.3844950627890888f,
|
||||
5.3890717298165010f, 5.3936275463523620f, 5.3981627015177525f,
|
||||
5.4026773818722793f, 5.4071717714601188f, 5.4116460518550396f,
|
||||
5.4161004022044201f, 5.4205349992722862f, 5.4249500174814029f,
|
||||
5.4293456289544411f, 5.4337220035542400f, 5.4380793089231956f,
|
||||
5.4424177105217932f, 5.4467373716663099f, 5.4510384535657002f,
|
||||
5.4553211153577017f, 5.4595855141441589f, 5.4638318050256105f,
|
||||
5.4680601411351315f, 5.4722706736714750f, 5.4764635519315110f,
|
||||
5.4806389233419912f, 5.4847969334906548f, 5.4889377261566867f,
|
||||
5.4930614433405482f, 5.4971682252932021f, 5.5012582105447274f,
|
||||
5.5053315359323625f, 5.5093883366279774f, 5.5134287461649825f,
|
||||
5.5174528964647074f, 5.5214609178622460f, 5.5254529391317835f,
|
||||
5.5294290875114234f, 5.5333894887275203f, 5.5373342670185366f,
|
||||
5.5412635451584258f,
|
||||
};
|
||||
|
||||
// Faster logarithm for small integers, with the property of log(0) == 0.
|
||||
static WEBP_INLINE double FastLog(int v) {
|
||||
if (v < (int)(sizeof(kLogTable) / sizeof(kLogTable[0]))) {
|
||||
return kLogTable[v];
|
||||
}
|
||||
return log(v);
|
||||
}
|
||||
|
||||
void ConvertPopulationCountTableToBitEstimates(
|
||||
int num_symbols,
|
||||
const int* const population_counts,
|
||||
double* const output) {
|
||||
int sum = 0;
|
||||
int nonzeros = 0;
|
||||
int i;
|
||||
for (i = 0; i < num_symbols; ++i) {
|
||||
sum += population_counts[i];
|
||||
if (population_counts[i] > 0) {
|
||||
++nonzeros;
|
||||
}
|
||||
}
|
||||
if (nonzeros <= 1) {
|
||||
memset(output, 0, num_symbols * sizeof(*output));
|
||||
return;
|
||||
}
|
||||
{
|
||||
const double log2sum = log2(sum);
|
||||
for (i = 0; i < num_symbols; ++i) {
|
||||
if (population_counts[i] == 0) {
|
||||
output[i] = log2sum;
|
||||
} else {
|
||||
output[i] = log2sum - log2(population_counts[i]);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void HistogramAddSinglePixOrCopy(Histogram* const p, const PixOrCopy v) {
|
||||
if (PixOrCopyIsLiteral(&v)) {
|
||||
++p->alpha_[PixOrCopyLiteral(&v, 3)];
|
||||
++p->red_[PixOrCopyLiteral(&v, 2)];
|
||||
++p->literal_[PixOrCopyLiteral(&v, 1)];
|
||||
++p->blue_[PixOrCopyLiteral(&v, 0)];
|
||||
} else if (PixOrCopyIsPaletteIx(&v)) {
|
||||
int literal_ix = 256 + kLengthCodes + PixOrCopyPaletteIx(&v);
|
||||
++p->literal_[literal_ix];
|
||||
} else {
|
||||
int code, extra_bits_count, extra_bits_value;
|
||||
PrefixEncode(PixOrCopyLength(&v),
|
||||
&code, &extra_bits_count, &extra_bits_value);
|
||||
++p->literal_[256 + code];
|
||||
PrefixEncode(PixOrCopyDistance(&v),
|
||||
&code, &extra_bits_count, &extra_bits_value);
|
||||
++p->distance_[code];
|
||||
}
|
||||
}
|
||||
|
||||
void HistogramBuild(Histogram* const p,
|
||||
const PixOrCopy* const literal_and_length,
|
||||
int n_literal_and_length) {
|
||||
int i;
|
||||
HistogramClear(p);
|
||||
for (i = 0; i < n_literal_and_length; ++i) {
|
||||
HistogramAddSinglePixOrCopy(p, literal_and_length[i]);
|
||||
}
|
||||
}
|
||||
|
||||
double ShannonEntropy(const int* const array, int n) {
|
||||
int i;
|
||||
double retval = 0;
|
||||
int sum = 0;
|
||||
for (i = 0; i < n; ++i) {
|
||||
if (array[i] != 0) {
|
||||
sum += array[i];
|
||||
retval += array[i] * FastLog(array[i]);
|
||||
}
|
||||
}
|
||||
retval -= sum * FastLog(sum);
|
||||
retval *= -1.4426950408889634; // 1.0 / -FastLog(2);
|
||||
return retval;
|
||||
}
|
||||
|
||||
static double BitsEntropy(const int* const array, int n) {
|
||||
double retval = 0;
|
||||
int sum = 0;
|
||||
int nonzeros = 0;
|
||||
int max_val = 0;
|
||||
int i;
|
||||
double mix;
|
||||
for (i = 0; i < n; ++i) {
|
||||
if (array[i] != 0) {
|
||||
sum += array[i];
|
||||
++nonzeros;
|
||||
retval += array[i] * FastLog(array[i]);
|
||||
if (max_val < array[i]) {
|
||||
max_val = array[i];
|
||||
}
|
||||
}
|
||||
}
|
||||
retval -= sum * FastLog(sum);
|
||||
retval *= -1.4426950408889634; // 1.0 / -FastLog(2);
|
||||
mix = 0.627;
|
||||
if (nonzeros < 5) {
|
||||
if (nonzeros <= 1) {
|
||||
return 0;
|
||||
}
|
||||
// Two symbols, they will be 0 and 1 in a Huffman code.
|
||||
// Let's mix in a bit of entropy to favor good clustering when
|
||||
// distributions of these are combined.
|
||||
if (nonzeros == 2) {
|
||||
return 0.99 * sum + 0.01 * retval;
|
||||
}
|
||||
// No matter what the entropy says, we cannot be better than min_limit
|
||||
// with Huffman coding. I am mixing a bit of entropy into the
|
||||
// min_limit since it produces much better (~0.5 %) compression results
|
||||
// perhaps because of better entropy clustering.
|
||||
if (nonzeros == 3) {
|
||||
mix = 0.95;
|
||||
} else {
|
||||
mix = 0.7; // nonzeros == 4.
|
||||
}
|
||||
}
|
||||
{
|
||||
double min_limit = 2 * sum - max_val;
|
||||
min_limit = mix * min_limit + (1.0 - mix) * retval;
|
||||
if (retval < min_limit) {
|
||||
return min_limit;
|
||||
}
|
||||
}
|
||||
return retval;
|
||||
}
|
||||
|
||||
double HistogramEstimateBitsBulk(const Histogram* const p) {
|
||||
double retval = BitsEntropy(&p->literal_[0], HistogramNumPixOrCopyCodes(p)) +
|
||||
BitsEntropy(&p->red_[0], 256) +
|
||||
BitsEntropy(&p->blue_[0], 256) +
|
||||
BitsEntropy(&p->alpha_[0], 256) +
|
||||
BitsEntropy(&p->distance_[0], DISTANCE_CODES_MAX);
|
||||
// Compute the extra bits cost.
|
||||
size_t i;
|
||||
for (i = 2; i < kLengthCodes - 2; ++i) {
|
||||
retval +=
|
||||
(i >> 1) * p->literal_[256 + i + 2];
|
||||
}
|
||||
for (i = 2; i < DISTANCE_CODES_MAX - 2; ++i) {
|
||||
retval += (i >> 1) * p->distance_[i + 2];
|
||||
}
|
||||
return retval;
|
||||
}
|
||||
|
||||
double HistogramEstimateBits(const Histogram* const p) {
|
||||
return HistogramEstimateBitsHeader(p) + HistogramEstimateBitsBulk(p);
|
||||
}
|
||||
|
||||
// Returns the cost encode the rle-encoded entropy code.
|
||||
// The constants in this function are experimental.
|
||||
static double HuffmanCost(const int* const population, int length) {
|
||||
// Small bias because Huffman code length is typically not stored in
|
||||
// full length.
|
||||
static const int kHuffmanCodeOfHuffmanCodeSize = CODE_LENGTH_CODES * 3;
|
||||
static const double kSmallBias = 9.1;
|
||||
double retval = kHuffmanCodeOfHuffmanCodeSize - kSmallBias;
|
||||
int streak = 0;
|
||||
int i = 0;
|
||||
for (; i < length - 1; ++i) {
|
||||
++streak;
|
||||
if (population[i] == population[i + 1]) {
|
||||
continue;
|
||||
}
|
||||
last_streak_hack:
|
||||
// population[i] points now to the symbol in the streak of same values.
|
||||
if (streak > 3) {
|
||||
if (population[i] == 0) {
|
||||
retval += 1.5625 + 0.234375 * streak;
|
||||
} else {
|
||||
retval += 2.578125 + 0.703125 * streak;
|
||||
}
|
||||
} else {
|
||||
if (population[i] == 0) {
|
||||
retval += 1.796875 * streak;
|
||||
} else {
|
||||
retval += 3.28125 * streak;
|
||||
}
|
||||
}
|
||||
streak = 0;
|
||||
}
|
||||
if (i == length - 1) {
|
||||
++streak;
|
||||
goto last_streak_hack;
|
||||
}
|
||||
return retval;
|
||||
}
|
||||
|
||||
double HistogramEstimateBitsHeader(const Histogram* const p) {
|
||||
return HuffmanCost(&p->alpha_[0], 256) +
|
||||
HuffmanCost(&p->red_[0], 256) +
|
||||
HuffmanCost(&p->literal_[0], HistogramNumPixOrCopyCodes(p)) +
|
||||
HuffmanCost(&p->blue_[0], 256) +
|
||||
HuffmanCost(&p->distance_[0], DISTANCE_CODES_MAX);
|
||||
}
|
||||
|
||||
int BuildHistogramImage(int xsize, int ysize,
|
||||
int histobits,
|
||||
int palettebits,
|
||||
const PixOrCopy* backward_refs,
|
||||
int backward_refs_size,
|
||||
Histogram*** image_arg,
|
||||
int* image_size) {
|
||||
int histo_xsize = histobits ? (xsize + (1 << histobits) - 1) >> histobits : 1;
|
||||
int histo_ysize = histobits ? (ysize + (1 << histobits) - 1) >> histobits : 1;
|
||||
int i;
|
||||
int x = 0;
|
||||
int y = 0;
|
||||
Histogram** image;
|
||||
*image_arg = NULL;
|
||||
*image_size = histo_xsize * histo_ysize;
|
||||
image = (Histogram**)calloc(*image_size, sizeof(*image));
|
||||
if (image == NULL) {
|
||||
return 0;
|
||||
}
|
||||
for (i = 0; i < *image_size; ++i) {
|
||||
image[i] = (Histogram*)malloc(sizeof(*image[i]));
|
||||
if (!image[i]) {
|
||||
int k;
|
||||
for (k = 0; k < *image_size; ++k) {
|
||||
free(image[k]);
|
||||
}
|
||||
free(image);
|
||||
return 0;
|
||||
}
|
||||
HistogramInit(image[i], palettebits);
|
||||
}
|
||||
// x and y trace the position in the image.
|
||||
for (i = 0; i < backward_refs_size; ++i) {
|
||||
const PixOrCopy v = backward_refs[i];
|
||||
const int ix =
|
||||
histobits ? (y >> histobits) * histo_xsize + (x >> histobits) : 0;
|
||||
HistogramAddSinglePixOrCopy(image[ix], v);
|
||||
x += PixOrCopyLength(&v);
|
||||
while (x >= xsize) {
|
||||
x -= xsize;
|
||||
++y;
|
||||
}
|
||||
}
|
||||
*image_arg = image;
|
||||
return 1;
|
||||
}
|
||||
|
||||
int CombineHistogramImage(Histogram** in,
|
||||
int in_size,
|
||||
int quality,
|
||||
Histogram*** out_arg,
|
||||
int* out_size) {
|
||||
int ok = 0;
|
||||
int i;
|
||||
unsigned int seed = 0;
|
||||
int tries_with_no_success = 0;
|
||||
int inner_iters = 10 + quality / 2;
|
||||
int iter;
|
||||
double* bit_costs = (double*)malloc(in_size * sizeof(*bit_costs));
|
||||
Histogram** out = (Histogram**)calloc(in_size, sizeof(*out));
|
||||
*out_arg = out;
|
||||
*out_size = in_size;
|
||||
if (bit_costs == NULL || out == NULL) {
|
||||
goto Error;
|
||||
}
|
||||
// Copy
|
||||
for (i = 0; i < in_size; ++i) {
|
||||
Histogram* new_histo = (Histogram*)malloc(sizeof(*new_histo));
|
||||
if (new_histo == NULL) {
|
||||
goto Error;
|
||||
}
|
||||
*new_histo = *(in[i]);
|
||||
out[i] = new_histo;
|
||||
bit_costs[i] = HistogramEstimateBits(out[i]);
|
||||
}
|
||||
// Collapse similar histograms.
|
||||
for (iter = 0; iter < in_size * 3 && *out_size >= 2; ++iter) {
|
||||
double best_val = 0;
|
||||
int best_ix0 = 0;
|
||||
int best_ix1 = 0;
|
||||
// Try a few times.
|
||||
int k;
|
||||
for (k = 0; k < inner_iters; ++k) {
|
||||
// Choose two, build a combo out of them.
|
||||
double cost_val;
|
||||
Histogram* combo;
|
||||
int ix0 = rand_r(&seed) % *out_size;
|
||||
int ix1;
|
||||
int diff = ((k & 7) + 1) % (*out_size - 1);
|
||||
if (diff >= 3) {
|
||||
diff = rand_r(&seed) % (*out_size - 1);
|
||||
}
|
||||
ix1 = (ix0 + diff + 1) % *out_size;
|
||||
if (ix0 == ix1) {
|
||||
continue;
|
||||
}
|
||||
combo = (Histogram*)malloc(sizeof(*combo));
|
||||
if (combo == NULL) {
|
||||
goto Error;
|
||||
}
|
||||
*combo = *out[ix0];
|
||||
HistogramAdd(combo, out[ix1]);
|
||||
cost_val = HistogramEstimateBits(combo) - bit_costs[ix0] - bit_costs[ix1];
|
||||
if (best_val > cost_val) {
|
||||
best_val = cost_val;
|
||||
best_ix0 = ix0;
|
||||
best_ix1 = ix1;
|
||||
}
|
||||
free(combo);
|
||||
}
|
||||
if (best_val < 0.0) {
|
||||
HistogramAdd(out[best_ix0], out[best_ix1]);
|
||||
bit_costs[best_ix0] =
|
||||
best_val + bit_costs[best_ix0] + bit_costs[best_ix1];
|
||||
// Erase (*out)[best_ix1]
|
||||
free(out[best_ix1]);
|
||||
memmove(&out[best_ix1], &out[best_ix1 + 1],
|
||||
(*out_size - best_ix1 - 1) * sizeof(out[0]));
|
||||
memmove(&bit_costs[best_ix1], &bit_costs[best_ix1 + 1],
|
||||
(*out_size - best_ix1 - 1) * sizeof(bit_costs[0]));
|
||||
--(*out_size);
|
||||
tries_with_no_success = 0;
|
||||
}
|
||||
if (++tries_with_no_success >= 50) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
ok = 1;
|
||||
Error:
|
||||
free(bit_costs);
|
||||
if (!ok) {
|
||||
if (out) {
|
||||
int i;
|
||||
for (i = 0; i < *out_size; ++i) {
|
||||
free(out[i]);
|
||||
}
|
||||
free(out);
|
||||
}
|
||||
}
|
||||
return ok;
|
||||
}
|
||||
|
||||
// What is the bit cost of moving square_histogram from
|
||||
// cur_symbol to candidate_symbol.
|
||||
static double HistogramDistance(const Histogram* const square_histogram,
|
||||
int cur_symbol,
|
||||
int candidate_symbol,
|
||||
Histogram** candidate_histograms) {
|
||||
double new_bit_cost;
|
||||
double previous_bit_cost;
|
||||
Histogram modified;
|
||||
if (cur_symbol == candidate_symbol) {
|
||||
return 0; // Going nowhere. No savings.
|
||||
}
|
||||
previous_bit_cost =
|
||||
HistogramEstimateBits(candidate_histograms[candidate_symbol]);
|
||||
if (cur_symbol != -1) {
|
||||
previous_bit_cost +=
|
||||
HistogramEstimateBits(candidate_histograms[cur_symbol]);
|
||||
}
|
||||
|
||||
// Compute the bit cost of the histogram where the data moves to.
|
||||
modified = *candidate_histograms[candidate_symbol];
|
||||
HistogramAdd(&modified, square_histogram);
|
||||
new_bit_cost = HistogramEstimateBits(&modified);
|
||||
|
||||
// Compute the bit cost of the histogram where the data moves away.
|
||||
if (cur_symbol != -1) {
|
||||
modified = *candidate_histograms[cur_symbol];
|
||||
HistogramRemove(&modified, square_histogram);
|
||||
new_bit_cost += HistogramEstimateBits(&modified);
|
||||
}
|
||||
return new_bit_cost - previous_bit_cost;
|
||||
}
|
||||
|
||||
void RefineHistogramImage(Histogram** raw,
|
||||
int raw_size,
|
||||
uint32_t* symbols,
|
||||
int out_size,
|
||||
Histogram** out) {
|
||||
int i;
|
||||
// Find the best 'out' histogram for each of the raw histograms
|
||||
for (i = 0; i < raw_size; ++i) {
|
||||
int best_out = 0;
|
||||
double best_bits = HistogramDistance(raw[i], symbols[i], 0, out);
|
||||
int k;
|
||||
for (k = 1; k < out_size; ++k) {
|
||||
double cur_bits = HistogramDistance(raw[i], symbols[i], k, out);
|
||||
if (cur_bits < best_bits) {
|
||||
best_bits = cur_bits;
|
||||
best_out = k;
|
||||
}
|
||||
}
|
||||
symbols[i] = best_out;
|
||||
}
|
||||
|
||||
// Recompute each out based on raw and symbols.
|
||||
for (i = 0; i < out_size; ++i) {
|
||||
HistogramClear(out[i]);
|
||||
}
|
||||
for (i = 0; i < raw_size; ++i) {
|
||||
HistogramAdd(out[symbols[i]], raw[i]);
|
||||
}
|
||||
}
|
152
src/enc/histogram.h
Normal file
152
src/enc/histogram.h
Normal file
@ -0,0 +1,152 @@
|
||||
// Copyright 2012 Google Inc. All Rights Reserved.
|
||||
//
|
||||
// This code is licensed under the same terms as WebM:
|
||||
// Software License Agreement: http://www.webmproject.org/license/software/
|
||||
// Additional IP Rights Grant: http://www.webmproject.org/license/additional/
|
||||
// -----------------------------------------------------------------------------
|
||||
//
|
||||
// Author: Jyrki Alakuijala (jyrki@google.com)
|
||||
//
|
||||
// Models the histograms of literal and distance codes.
|
||||
|
||||
#ifndef WEBP_ENC_HISTOGRAM_H_
|
||||
#define WEBP_ENC_HISTOGRAM_H_
|
||||
|
||||
#include <assert.h>
|
||||
#include <stddef.h>
|
||||
#include <stdlib.h>
|
||||
#include <stdio.h>
|
||||
#include <string.h>
|
||||
|
||||
#include "./backward_references.h"
|
||||
#include "../webp/types.h"
|
||||
|
||||
#if defined(__cplusplus) || defined(c_plusplus)
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
// A simple container for histograms of data.
|
||||
typedef struct {
|
||||
// literal_ contains green literal, palette-code and
|
||||
// copy-length-prefix histogram
|
||||
int literal_[PIX_OR_COPY_CODES_MAX];
|
||||
int red_[256];
|
||||
int blue_[256];
|
||||
int alpha_[256];
|
||||
// Backward reference prefix-code histogram.
|
||||
int distance_[DISTANCE_CODES_MAX];
|
||||
int palette_code_bits_;
|
||||
} Histogram;
|
||||
|
||||
static WEBP_INLINE void HistogramClear(Histogram* const p) {
|
||||
memset(&p->literal_[0], 0, sizeof(p->literal_));
|
||||
memset(&p->red_[0], 0, sizeof(p->red_));
|
||||
memset(&p->blue_[0], 0, sizeof(p->blue_));
|
||||
memset(&p->alpha_[0], 0, sizeof(p->alpha_));
|
||||
memset(&p->distance_[0], 0, sizeof(p->distance_));
|
||||
}
|
||||
|
||||
static WEBP_INLINE void HistogramInit(Histogram* const p,
|
||||
int palette_code_bits) {
|
||||
p->palette_code_bits_ = palette_code_bits;
|
||||
HistogramClear(p);
|
||||
}
|
||||
|
||||
// Create the histogram.
|
||||
//
|
||||
// The input data is the PixOrCopy data, which models the
|
||||
// literals, stop codes and backward references (both distances and lengths)
|
||||
void HistogramBuild(Histogram* const p,
|
||||
const PixOrCopy* const literal_and_length,
|
||||
int n_literal_and_length);
|
||||
|
||||
void HistogramAddSinglePixOrCopy(Histogram* const p, const PixOrCopy v);
|
||||
|
||||
// Estimate how many bits the combined entropy of literals and distance
|
||||
// approximately maps to.
|
||||
double HistogramEstimateBits(const Histogram* const p);
|
||||
|
||||
// This function estimates the Huffman dictionary + other block overhead
|
||||
// size for creating a new deflate block.
|
||||
double HistogramEstimateBitsHeader(const Histogram* const p);
|
||||
|
||||
// This function estimates the cost in bits excluding the bits needed to
|
||||
// represent the entropy code itself.
|
||||
double HistogramEstimateBitsBulk(const Histogram* const p);
|
||||
|
||||
static WEBP_INLINE void HistogramAdd(Histogram* const p,
|
||||
const Histogram* const a) {
|
||||
int i;
|
||||
for (i = 0; i < PIX_OR_COPY_CODES_MAX; ++i) {
|
||||
p->literal_[i] += a->literal_[i];
|
||||
}
|
||||
for (i = 0; i < DISTANCE_CODES_MAX; ++i) {
|
||||
p->distance_[i] += a->distance_[i];
|
||||
}
|
||||
for (i = 0; i < 256; ++i) {
|
||||
p->red_[i] += a->red_[i];
|
||||
p->blue_[i] += a->blue_[i];
|
||||
p->alpha_[i] += a->alpha_[i];
|
||||
}
|
||||
}
|
||||
|
||||
static WEBP_INLINE void HistogramRemove(Histogram* const p,
|
||||
const Histogram* const a) {
|
||||
int i;
|
||||
for (i = 0; i < PIX_OR_COPY_CODES_MAX; ++i) {
|
||||
p->literal_[i] -= a->literal_[i];
|
||||
assert(p->literal_[i] >= 0);
|
||||
}
|
||||
for (i = 0; i < DISTANCE_CODES_MAX; ++i) {
|
||||
p->distance_[i] -= a->distance_[i];
|
||||
assert(p->distance_[i] >= 0);
|
||||
}
|
||||
for (i = 0; i < 256; ++i) {
|
||||
p->red_[i] -= a->red_[i];
|
||||
p->blue_[i] -= a->blue_[i];
|
||||
p->alpha_[i] -= a->alpha_[i];
|
||||
assert(p->red_[i] >= 0);
|
||||
assert(p->blue_[i] >= 0);
|
||||
assert(p->alpha_[i] >= 0);
|
||||
}
|
||||
}
|
||||
|
||||
static WEBP_INLINE int HistogramNumPixOrCopyCodes(const Histogram* const p) {
|
||||
return 256 + kLengthCodes + (1 << p->palette_code_bits_);
|
||||
}
|
||||
|
||||
void ConvertPopulationCountTableToBitEstimates(
|
||||
int n, const int* const population_counts, double* const output);
|
||||
|
||||
double ShannonEntropy(const int* const array, int n);
|
||||
|
||||
// Build a 2d image of histograms, subresolutioned by (1 << histobits) to
|
||||
// the original image.
|
||||
int BuildHistogramImage(int xsize, int ysize,
|
||||
int histobits,
|
||||
int palette_bits,
|
||||
const PixOrCopy* backward_refs,
|
||||
int backward_refs_size,
|
||||
Histogram*** image,
|
||||
int* histogram_size);
|
||||
|
||||
// Combines several histograms into fewer histograms.
|
||||
int CombineHistogramImage(Histogram** in,
|
||||
int in_size,
|
||||
int quality,
|
||||
Histogram*** out,
|
||||
int* out_size);
|
||||
|
||||
// Moves histograms from one cluster to another if smaller entropy can
|
||||
// be achieved by doing that.
|
||||
void RefineHistogramImage(Histogram** raw,
|
||||
int raw_size,
|
||||
uint32_t* symbols,
|
||||
int out_size,
|
||||
Histogram** out);
|
||||
|
||||
#if defined(__cplusplus) || defined(c_plusplus)
|
||||
}
|
||||
#endif
|
||||
|
||||
#endif // WEBP_ENC_HISTOGRAM_H_
|
313
src/utils/huffman_encode.c
Normal file
313
src/utils/huffman_encode.c
Normal file
@ -0,0 +1,313 @@
|
||||
// Copyright 2011 Google Inc. All Rights Reserved.
|
||||
//
|
||||
// This code is licensed under the same terms as WebM:
|
||||
// Software License Agreement: http://www.webmproject.org/license/software/
|
||||
// Additional IP Rights Grant: http://www.webmproject.org/license/additional/
|
||||
// -----------------------------------------------------------------------------
|
||||
//
|
||||
// Author: jyrki@google.com (Jyrki Alakuijala)
|
||||
//
|
||||
// Flate like entropy encoding (Huffman) for webp lossless.
|
||||
|
||||
#include "./huffman_encode.h"
|
||||
|
||||
#include <stdint.h>
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
|
||||
typedef struct {
|
||||
int total_count_;
|
||||
int value_;
|
||||
int pool_index_left_;
|
||||
int pool_index_right_;
|
||||
} HuffmanTree;
|
||||
|
||||
// Sort the root nodes, most popular first.
|
||||
static int CompHuffmanTree(const void* vp0, const void* vp1) {
|
||||
const HuffmanTree* v0 = (const HuffmanTree*)vp0;
|
||||
const HuffmanTree* v1 = (const HuffmanTree*)vp1;
|
||||
if (v0->total_count_ > v1->total_count_) {
|
||||
return -1;
|
||||
} else if (v0->total_count_ < v1->total_count_) {
|
||||
return 1;
|
||||
} else {
|
||||
if (v0->value_ < v1->value_) {
|
||||
return -1;
|
||||
}
|
||||
if (v0->value_ > v1->value_) {
|
||||
return 1;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
static void SetDepth(const HuffmanTree* p,
|
||||
HuffmanTree* pool,
|
||||
uint8_t* depth,
|
||||
const int level) {
|
||||
if (p->pool_index_left_ >= 0) {
|
||||
SetDepth(&pool[p->pool_index_left_], pool, depth, level + 1);
|
||||
SetDepth(&pool[p->pool_index_right_], pool, depth, level + 1);
|
||||
} else {
|
||||
depth[p->value_] = level;
|
||||
}
|
||||
}
|
||||
|
||||
// This function will create a Huffman tree.
|
||||
//
|
||||
// The catch here is that the tree cannot be arbitrarily deep.
|
||||
// Deflate specifies a maximum depth of 15 bits for "code trees"
|
||||
// and 7 bits for "code length code trees."
|
||||
//
|
||||
// count_limit is the value that is to be faked as the minimum value
|
||||
// and this minimum value is raised until the tree matches the
|
||||
// maximum length requirement.
|
||||
//
|
||||
// This algorithm is not of excellent performance for very long data blocks,
|
||||
// especially when population counts are longer than 2**tree_limit, but
|
||||
// we are not planning to use this with extremely long blocks.
|
||||
//
|
||||
// See http://en.wikipedia.org/wiki/Huffman_coding
|
||||
int CreateHuffmanTree(const int* const histogram, int histogram_size,
|
||||
int tree_depth_limit,
|
||||
uint8_t* const bit_depths) {
|
||||
HuffmanTree* tree;
|
||||
HuffmanTree* tree_pool;
|
||||
int tree_pool_size;
|
||||
// For block sizes with less than 64k symbols we never need to do a
|
||||
// second iteration of this loop.
|
||||
// If we actually start running inside this loop a lot, we would perhaps
|
||||
// be better off with the Katajainen algorithm.
|
||||
int count_limit;
|
||||
for (count_limit = 1; ; count_limit *= 2) {
|
||||
int tree_size = 0;
|
||||
int i;
|
||||
for (i = 0; i < histogram_size; ++i) {
|
||||
if (histogram[i]) {
|
||||
++tree_size;
|
||||
}
|
||||
}
|
||||
// 3 * tree_size is enough to cover all the nodes representing a
|
||||
// population and all the inserted nodes combining two existing nodes.
|
||||
// The tree pool needs 2 * (tree_size - 1) entities, and the
|
||||
// tree needs exactly tree_size entities.
|
||||
tree = (HuffmanTree*)malloc(3 * tree_size * sizeof(*tree));
|
||||
if (tree == NULL) {
|
||||
return 0;
|
||||
}
|
||||
{
|
||||
int j = 0;
|
||||
int i;
|
||||
for (i = 0; i < histogram_size; ++i) {
|
||||
if (histogram[i]) {
|
||||
const int count =
|
||||
(histogram[i] < count_limit) ? count_limit : histogram[i];
|
||||
tree[j].total_count_ = count;
|
||||
tree[j].value_ = i;
|
||||
tree[j].pool_index_left_ = -1;
|
||||
tree[j].pool_index_right_ = -1;
|
||||
++j;
|
||||
}
|
||||
}
|
||||
}
|
||||
qsort((void*)tree, tree_size, sizeof(*tree), CompHuffmanTree);
|
||||
tree_pool = tree + tree_size;
|
||||
tree_pool_size = 0;
|
||||
if (tree_size >= 2) {
|
||||
while (tree_size >= 2) { // Finish when we have only one root.
|
||||
int count;
|
||||
tree_pool[tree_pool_size] = tree[tree_size - 1];
|
||||
++tree_pool_size;
|
||||
tree_pool[tree_pool_size] = tree[tree_size - 2];
|
||||
++tree_pool_size;
|
||||
count =
|
||||
tree_pool[tree_pool_size - 1].total_count_ +
|
||||
tree_pool[tree_pool_size - 2].total_count_;
|
||||
tree_size -= 2;
|
||||
{
|
||||
int k = 0;
|
||||
// Search for the insertion point.
|
||||
for (k = 0; k < tree_size; ++k) {
|
||||
if (tree[k].total_count_ <= count) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
memmove(tree + (k + 1), tree + k, (tree_size - k) * sizeof(*tree));
|
||||
tree[k].total_count_ = count;
|
||||
tree[k].value_ = -1;
|
||||
|
||||
tree[k].pool_index_left_ = tree_pool_size - 1;
|
||||
tree[k].pool_index_right_ = tree_pool_size - 2;
|
||||
tree_size = tree_size + 1;
|
||||
}
|
||||
}
|
||||
SetDepth(&tree[0], tree_pool, bit_depths, 0);
|
||||
} else {
|
||||
if (tree_size == 1) {
|
||||
// Only one element.
|
||||
bit_depths[tree[0].value_] = 1;
|
||||
}
|
||||
}
|
||||
free(tree);
|
||||
// We need to pack the Huffman tree in tree_depth_limit bits.
|
||||
// If this was not successful, add fake entities to the lowest values
|
||||
// and retry.
|
||||
{
|
||||
int max_depth = bit_depths[0];
|
||||
int j;
|
||||
for (j = 1; j < histogram_size; ++j) {
|
||||
if (max_depth < bit_depths[j]) {
|
||||
max_depth = bit_depths[j];
|
||||
}
|
||||
}
|
||||
if (max_depth <= tree_depth_limit) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
return 1;
|
||||
}
|
||||
|
||||
static void WriteHuffmanTreeRepetitions(
|
||||
const int value,
|
||||
const int prev_value,
|
||||
int repetitions,
|
||||
int* num_symbols,
|
||||
uint8_t* tree,
|
||||
uint8_t* extra_bits_data) {
|
||||
if (value != prev_value) {
|
||||
tree[*num_symbols] = value;
|
||||
extra_bits_data[*num_symbols] = 0;
|
||||
++(*num_symbols);
|
||||
--repetitions;
|
||||
}
|
||||
while (repetitions >= 1) {
|
||||
if (repetitions < 3) {
|
||||
int i;
|
||||
for (i = 0; i < repetitions; ++i) {
|
||||
tree[*num_symbols] = value;
|
||||
extra_bits_data[*num_symbols] = 0;
|
||||
++(*num_symbols);
|
||||
}
|
||||
return;
|
||||
} else if (repetitions < 7) {
|
||||
// 3 to 6 left
|
||||
tree[*num_symbols] = 16;
|
||||
extra_bits_data[*num_symbols] = repetitions - 3;
|
||||
++(*num_symbols);
|
||||
return;
|
||||
} else {
|
||||
tree[*num_symbols] = 16;
|
||||
extra_bits_data[*num_symbols] = 3;
|
||||
++(*num_symbols);
|
||||
repetitions -= 6;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static void WriteHuffmanTreeRepetitionsZeros(
|
||||
const int value,
|
||||
int repetitions,
|
||||
int* num_symbols,
|
||||
uint8_t* tree,
|
||||
uint8_t* extra_bits_data) {
|
||||
while (repetitions >= 1) {
|
||||
if (repetitions < 3) {
|
||||
int i;
|
||||
for (i = 0; i < repetitions; ++i) {
|
||||
tree[*num_symbols] = value;
|
||||
extra_bits_data[*num_symbols] = 0;
|
||||
++(*num_symbols);
|
||||
}
|
||||
return;
|
||||
} else if (repetitions < 11) {
|
||||
tree[*num_symbols] = 17;
|
||||
extra_bits_data[*num_symbols] = repetitions - 3;
|
||||
++(*num_symbols);
|
||||
return;
|
||||
} else if (repetitions < 139) {
|
||||
tree[*num_symbols] = 18;
|
||||
extra_bits_data[*num_symbols] = repetitions - 11;
|
||||
++(*num_symbols);
|
||||
return;
|
||||
} else {
|
||||
tree[*num_symbols] = 18;
|
||||
extra_bits_data[*num_symbols] = 0x7f; // 138 repeated 0s
|
||||
++(*num_symbols);
|
||||
repetitions -= 138;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void CreateCompressedHuffmanTree(const uint8_t* depth,
|
||||
int depth_size,
|
||||
int* num_symbols,
|
||||
uint8_t* tree,
|
||||
uint8_t* extra_bits_data) {
|
||||
int prev_value = 8; // 8 is the initial value for rle.
|
||||
int i;
|
||||
for (i = 0; i < depth_size;) {
|
||||
const int value = depth[i];
|
||||
int reps = 1;
|
||||
int k;
|
||||
for (k = i + 1; k < depth_size && depth[k] == value; ++k) {
|
||||
++reps;
|
||||
}
|
||||
if (value == 0) {
|
||||
WriteHuffmanTreeRepetitionsZeros(value, reps,
|
||||
num_symbols,
|
||||
tree, extra_bits_data);
|
||||
} else {
|
||||
WriteHuffmanTreeRepetitions(value, prev_value, reps,
|
||||
num_symbols,
|
||||
tree, extra_bits_data);
|
||||
prev_value = value;
|
||||
}
|
||||
i += reps;
|
||||
}
|
||||
}
|
||||
|
||||
static uint32_t ReverseBits(int num_bits, uint32_t bits) {
|
||||
uint32_t retval = 0;
|
||||
int i;
|
||||
for (i = 0; i < num_bits; ++i) {
|
||||
retval <<= 1;
|
||||
retval |= bits & 1;
|
||||
bits >>= 1;
|
||||
}
|
||||
return retval;
|
||||
}
|
||||
|
||||
void ConvertBitDepthsToSymbols(const uint8_t* depth, int len,
|
||||
uint16_t* bits) {
|
||||
// This function is based on RFC 1951.
|
||||
//
|
||||
// In deflate, all bit depths are [1..15]
|
||||
// 0 bit depth means that the symbol does not exist.
|
||||
|
||||
// 0..15 are values for bits
|
||||
#define MAX_BITS 16
|
||||
uint32_t next_code[MAX_BITS];
|
||||
uint32_t bl_count[MAX_BITS] = { 0 };
|
||||
int i;
|
||||
{
|
||||
for (i = 0; i < len; ++i) {
|
||||
++bl_count[depth[i]];
|
||||
}
|
||||
bl_count[0] = 0;
|
||||
}
|
||||
next_code[0] = 0;
|
||||
{
|
||||
int code = 0;
|
||||
int bits;
|
||||
for (bits = 1; bits < MAX_BITS; ++bits) {
|
||||
code = (code + bl_count[bits - 1]) << 1;
|
||||
next_code[bits] = code;
|
||||
}
|
||||
}
|
||||
for (i = 0; i < len; ++i) {
|
||||
if (depth[i]) {
|
||||
bits[i] = ReverseBits(depth[i], next_code[depth[i]]++);
|
||||
}
|
||||
}
|
||||
}
|
52
src/utils/huffman_encode.h
Normal file
52
src/utils/huffman_encode.h
Normal file
@ -0,0 +1,52 @@
|
||||
// Copyright 2011 Google Inc. All Rights Reserved.
|
||||
//
|
||||
// This code is licensed under the same terms as WebM:
|
||||
// Software License Agreement: http://www.webmproject.org/license/software/
|
||||
// Additional IP Rights Grant: http://www.webmproject.org/license/additional/
|
||||
// -----------------------------------------------------------------------------
|
||||
//
|
||||
// Author: jyrki@google.com (Jyrki Alakuijala)
|
||||
//
|
||||
// Flate like entropy encoding (Huffman) for webp lossless
|
||||
|
||||
#ifndef WEBP_UTILS_ENTROPY_ENCODE_H_
|
||||
#define WEBP_UTILS_ENTROPY_ENCODE_H_
|
||||
|
||||
#include <stdint.h>
|
||||
|
||||
#if defined(__cplusplus) || defined(c_plusplus)
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
// This function will create a Huffman tree.
|
||||
//
|
||||
// The (data,length) contains the population counts.
|
||||
// The tree_limit is the maximum bit depth of the Huffman codes.
|
||||
//
|
||||
// The depth contains the tree, i.e., how many bits are used for
|
||||
// the symbol.
|
||||
//
|
||||
// See http://en.wikipedia.org/wiki/Huffman_coding
|
||||
//
|
||||
// Returns 0 when an error has occured.
|
||||
int CreateHuffmanTree(const int* data,
|
||||
const int length,
|
||||
const int tree_limit,
|
||||
uint8_t* depth);
|
||||
|
||||
// Write a huffman tree from bit depths into the deflate representation
|
||||
// of a Huffman tree. In deflate, the generated Huffman tree is to be
|
||||
// compressed once more using a Huffman tree.
|
||||
void CreateCompressedHuffmanTree(const uint8_t* depth, int len,
|
||||
int* num_symbols,
|
||||
uint8_t* tree,
|
||||
uint8_t* extra_bits_data);
|
||||
|
||||
// Get the actual bit values for a tree of bit depths.
|
||||
void ConvertBitDepthsToSymbols(const uint8_t* depth, int len, uint16_t* bits);
|
||||
|
||||
#if defined(__cplusplus) || defined(c_plusplus)
|
||||
}
|
||||
#endif
|
||||
|
||||
#endif // WEBP_UTILS_ENTROPY_ENCODE_H_
|
Loading…
Reference in New Issue
Block a user