Merge "Reduce memory footprint for encoding WebP lossless."

This commit is contained in:
pascal massimino 2014-04-26 01:28:43 -07:00 committed by Gerrit Code Review
commit 35d7d095dd
8 changed files with 472 additions and 328 deletions

View File

@ -21,8 +21,6 @@
#define VALUES_IN_BYTE 256
#define HASH_BITS 18
#define HASH_SIZE (1 << HASH_BITS)
#define HASH_MULTIPLIER (0xc6a4a7935bd1e995ULL)
#define MAX_ENTROPY (1e30f)
@ -34,14 +32,6 @@
#define MIN_LENGTH 2
#define MAX_LENGTH 4096
typedef struct {
// Stores the most recently added position with the given hash value.
int32_t hash_to_first_index_[HASH_SIZE];
// chain_[pos] stores the previous position with the same hash value
// for every pixel in the image.
int32_t* chain_;
} HashChain;
// -----------------------------------------------------------------------------
static const uint8_t plane_to_code_lut[128] = {
@ -79,29 +69,44 @@ static WEBP_INLINE int FindMatchLength(const uint32_t* const array1,
// -----------------------------------------------------------------------------
// VP8LBackwardRefs
void VP8LInitBackwardRefs(VP8LBackwardRefs* const refs) {
if (refs != NULL) {
refs->refs = NULL;
refs->size = 0;
refs->max_size = 0;
}
}
void VP8LClearBackwardRefs(VP8LBackwardRefs* const refs) {
if (refs != NULL) {
WebPSafeFree(refs->refs);
VP8LInitBackwardRefs(refs);
}
}
int VP8LBackwardRefsAlloc(VP8LBackwardRefs* const refs, int max_size) {
static void ClearBackwardRefs(VP8LBackwardRefs* const refs) {
assert(refs != NULL);
refs->size = 0;
}
void VP8LBackwardRefsDelete(VP8LBackwardRefs* const refs) {
if (refs != NULL) {
WebPSafeFree(refs->refs);
WebPSafeFree(refs);
}
}
VP8LBackwardRefs* VP8LBackwardRefsNew(int max_size) {
VP8LBackwardRefs* const refs =
(VP8LBackwardRefs*)WebPSafeMalloc(1ULL, sizeof(*refs));
if (refs == NULL) {
return NULL;
}
ClearBackwardRefs(refs);
refs->max_size = 0;
refs->refs = (PixOrCopy*)WebPSafeMalloc((uint64_t)max_size,
sizeof(*refs->refs));
if (refs->refs == NULL) return 0;
sizeof(*refs->refs));
if (refs->refs == NULL) {
WebPSafeFree(refs);
return NULL;
}
refs->max_size = max_size;
return refs;
}
int VP8LBackwardRefsCopy(const VP8LBackwardRefs* const src,
VP8LBackwardRefs* const dst) {
assert(src != NULL && dst != NULL);
if (dst->max_size != src->max_size) {
return 0;
}
dst->size = src->size;
memcpy(dst->refs, src->refs, src->size * sizeof(*src->refs));
return 1;
}
@ -114,9 +119,19 @@ static WEBP_INLINE uint64_t GetPixPairHash64(const uint32_t* const argb) {
return key;
}
static HashChain* HashChainNew(int size) {
static void HashChainInit(VP8LHashChain* const p) {
int i;
HashChain* const p = (HashChain*)WebPSafeMalloc(1ULL, sizeof(*p));
assert(p != NULL);
for (i = 0; i < p->size_; ++i) {
p->chain_[i] = -1;
}
for (i = 0; i < HASH_SIZE; ++i) {
p->hash_to_first_index_[i] = -1;
}
}
VP8LHashChain* VP8LHashChainNew(int size) {
VP8LHashChain* const p = (VP8LHashChain*)WebPSafeMalloc(1ULL, sizeof(*p));
if (p == NULL) {
return NULL;
}
@ -125,16 +140,12 @@ static HashChain* HashChainNew(int size) {
WebPSafeFree(p);
return NULL;
}
for (i = 0; i < size; ++i) {
p->chain_[i] = -1;
}
for (i = 0; i < HASH_SIZE; ++i) {
p->hash_to_first_index_[i] = -1;
}
p->size_ = size;
HashChainInit(p);
return p;
}
static void HashChainDelete(HashChain* const p) {
void VP8LHashChainDelete(VP8LHashChain* const p) {
if (p != NULL) {
WebPSafeFree(p->chain_);
WebPSafeFree(p);
@ -142,7 +153,7 @@ static void HashChainDelete(HashChain* const p) {
}
// Insertion of two pixels at a time.
static void HashChainInsert(HashChain* const p,
static void HashChainInsert(VP8LHashChain* const p,
const uint32_t* const argb, int pos) {
const uint64_t hash_code = GetPixPairHash64(argb);
p->chain_[pos] = p->hash_to_first_index_[hash_code];
@ -167,7 +178,7 @@ static void GetParamsForHashChainFindCopy(int quality, int xsize,
*iter_limit = (cache_bits > 0) ? iter_neg : iter_neg / 2;
}
static int HashChainFindCopy(const HashChain* const p,
static int HashChainFindCopy(const VP8LHashChain* const p,
int base_position, int xsize_signed,
const uint32_t* const argb, int max_len,
int window_size, int iter_pos, int iter_limit,
@ -260,7 +271,7 @@ static void BackwardReferencesRle(int xsize, int ysize,
const int pix_count = xsize * ysize;
int match_len = 0;
int i;
refs->size = 0;
ClearBackwardRefs(refs);
PushBackCopy(refs, match_len); // i=0 case
refs->refs[refs->size++] = PixOrCopyCreateLiteral(argb[0]);
for (i = 1; i < pix_count; ++i) {
@ -278,28 +289,27 @@ static void BackwardReferencesRle(int xsize, int ysize,
static int BackwardReferencesHashChain(int xsize, int ysize,
const uint32_t* const argb,
int cache_bits, int quality,
VP8LHashChain* const hash_chain,
VP8LBackwardRefs* const refs) {
int i;
int ok = 0;
int cc_init = 0;
const int use_color_cache = (cache_bits > 0);
const int pix_count = xsize * ysize;
HashChain* const hash_chain = HashChainNew(pix_count);
VP8LColorCache hashers;
int window_size = WINDOW_SIZE;
int iter_pos = 1;
int iter_limit = -1;
if (hash_chain == NULL) return 0;
if (use_color_cache) {
cc_init = VP8LColorCacheInit(&hashers, cache_bits);
if (!cc_init) goto Error;
}
refs->size = 0;
ClearBackwardRefs(refs);
GetParamsForHashChainFindCopy(quality, xsize, cache_bits,
&window_size, &iter_pos, &iter_limit);
HashChainInit(hash_chain);
for (i = 0; i < pix_count; ) {
// Alternative#1: Code the pixels starting at 'i' using backward reference.
int offset = 0;
@ -375,7 +385,6 @@ static int BackwardReferencesHashChain(int xsize, int ysize,
ok = 1;
Error:
if (cc_init) VP8LColorCacheClear(&hashers);
HashChainDelete(hash_chain);
return ok;
}
@ -392,6 +401,7 @@ typedef struct {
static int BackwardReferencesTraceBackwards(
int xsize, int ysize, int recursive_cost_model,
const uint32_t* const argb, int quality, int cache_bits,
VP8LHashChain* const hash_chain,
VP8LBackwardRefs* const refs);
static void ConvertPopulationCountTableToBitEstimates(
@ -417,40 +427,45 @@ static void ConvertPopulationCountTableToBitEstimates(
static int CostModelBuild(CostModel* const m, int xsize, int ysize,
int recursion_level, const uint32_t* const argb,
int quality, int cache_bits) {
int quality, int cache_bits,
VP8LHashChain* const hash_chain,
VP8LBackwardRefs* const refs) {
int ok = 0;
VP8LHistogram histo;
VP8LBackwardRefs refs;
if (!VP8LBackwardRefsAlloc(&refs, xsize * ysize)) goto Error;
VP8LHistogram* histo = NULL;
ClearBackwardRefs(refs);
if (recursion_level > 0) {
if (!BackwardReferencesTraceBackwards(xsize, ysize, recursion_level - 1,
argb, quality, cache_bits, &refs)) {
argb, quality, cache_bits, hash_chain,
refs)) {
goto Error;
}
} else {
if (!BackwardReferencesHashChain(xsize, ysize, argb, cache_bits, quality,
&refs)) {
hash_chain, refs)) {
goto Error;
}
}
VP8LHistogramCreate(&histo, &refs, cache_bits);
histo = VP8LAllocateHistogram(cache_bits);
if (histo == NULL) goto Error;
VP8LHistogramCreate(histo, refs, cache_bits);
ConvertPopulationCountTableToBitEstimates(
VP8LHistogramNumCodes(histo.palette_code_bits_),
histo.literal_, m->literal_);
VP8LHistogramNumCodes(histo->palette_code_bits_),
histo->literal_, m->literal_);
ConvertPopulationCountTableToBitEstimates(
VALUES_IN_BYTE, histo.red_, m->red_);
VALUES_IN_BYTE, histo->red_, m->red_);
ConvertPopulationCountTableToBitEstimates(
VALUES_IN_BYTE, histo.blue_, m->blue_);
VALUES_IN_BYTE, histo->blue_, m->blue_);
ConvertPopulationCountTableToBitEstimates(
VALUES_IN_BYTE, histo.alpha_, m->alpha_);
VALUES_IN_BYTE, histo->alpha_, m->alpha_);
ConvertPopulationCountTableToBitEstimates(
NUM_DISTANCE_CODES, histo.distance_, m->distance_);
NUM_DISTANCE_CODES, histo->distance_, m->distance_);
ok = 1;
Error:
VP8LClearBackwardRefs(&refs);
VP8LFreeHistogram(histo);
return ok;
}
@ -482,7 +497,8 @@ static WEBP_INLINE double GetDistanceCost(const CostModel* const m,
static int BackwardReferencesHashChainDistanceOnly(
int xsize, int ysize, int recursive_cost_model, const uint32_t* const argb,
int quality, int cache_bits, uint32_t* const dist_array) {
int quality, int cache_bits, VP8LHashChain* const hash_chain,
VP8LBackwardRefs* const refs, uint32_t* const dist_array) {
int i;
int ok = 0;
int cc_init = 0;
@ -491,7 +507,6 @@ static int BackwardReferencesHashChainDistanceOnly(
float* const cost =
(float*)WebPSafeMalloc((uint64_t)pix_count, sizeof(*cost));
CostModel* cost_model = (CostModel*)WebPSafeMalloc(1ULL, sizeof(*cost_model));
HashChain* hash_chain = HashChainNew(pix_count);
VP8LColorCache hashers;
const double mul0 = (recursive_cost_model != 0) ? 1.0 : 0.68;
const double mul1 = (recursive_cost_model != 0) ? 1.0 : 0.82;
@ -500,7 +515,7 @@ static int BackwardReferencesHashChainDistanceOnly(
int iter_pos = 1;
int iter_limit = -1;
if (cost == NULL || cost_model == NULL || hash_chain == NULL) goto Error;
if (cost == NULL || cost_model == NULL) goto Error;
if (use_color_cache) {
cc_init = VP8LColorCacheInit(&hashers, cache_bits);
@ -508,7 +523,7 @@ static int BackwardReferencesHashChainDistanceOnly(
}
if (!CostModelBuild(cost_model, xsize, ysize, recursive_cost_model, argb,
quality, cache_bits)) {
quality, cache_bits, hash_chain, refs)) {
goto Error;
}
@ -519,6 +534,7 @@ static int BackwardReferencesHashChainDistanceOnly(
dist_array[0] = 0;
GetParamsForHashChainFindCopy(quality, xsize, cache_bits,
&window_size, &iter_pos, &iter_limit);
HashChainInit(hash_chain);
for (i = 0; i < pix_count; ++i) {
double prev_cost = 0.0;
int shortmax;
@ -596,7 +612,6 @@ static int BackwardReferencesHashChainDistanceOnly(
ok = 1;
Error:
if (cc_init) VP8LColorCacheClear(&hashers);
HashChainDelete(hash_chain);
WebPSafeFree(cost_model);
WebPSafeFree(cost);
return ok;
@ -625,6 +640,7 @@ static int BackwardReferencesHashChainFollowChosenPath(
int xsize, int ysize, const uint32_t* const argb,
int quality, int cache_bits,
const uint32_t* const chosen_path, int chosen_path_size,
VP8LHashChain* const hash_chain,
VP8LBackwardRefs* const refs) {
const int pix_count = xsize * ysize;
const int use_color_cache = (cache_bits > 0);
@ -637,19 +653,17 @@ static int BackwardReferencesHashChainFollowChosenPath(
int window_size = WINDOW_SIZE;
int iter_pos = 1;
int iter_limit = -1;
HashChain* hash_chain = HashChainNew(pix_count);
VP8LColorCache hashers;
if (hash_chain == NULL) goto Error;
if (use_color_cache) {
cc_init = VP8LColorCacheInit(&hashers, cache_bits);
if (!cc_init) goto Error;
}
refs->size = 0;
ClearBackwardRefs(refs);
GetParamsForHashChainFindCopy(quality, xsize, cache_bits,
&window_size, &iter_pos, &iter_limit);
HashChainInit(hash_chain);
for (ix = 0; ix < chosen_path_size; ++ix, ++size) {
int offset = 0;
int len = 0;
@ -692,7 +706,6 @@ static int BackwardReferencesHashChainFollowChosenPath(
ok = 1;
Error:
if (cc_init) VP8LColorCacheClear(&hashers);
HashChainDelete(hash_chain);
return ok;
}
@ -701,6 +714,7 @@ static int BackwardReferencesTraceBackwards(int xsize, int ysize,
int recursive_cost_model,
const uint32_t* const argb,
int quality, int cache_bits,
VP8LHashChain* const hash_chain,
VP8LBackwardRefs* const refs) {
int ok = 0;
const int dist_array_size = xsize * ysize;
@ -712,14 +726,14 @@ static int BackwardReferencesTraceBackwards(int xsize, int ysize,
if (dist_array == NULL) goto Error;
if (!BackwardReferencesHashChainDistanceOnly(
xsize, ysize, recursive_cost_model, argb, quality, cache_bits,
dist_array)) {
xsize, ysize, recursive_cost_model, argb, quality, cache_bits, hash_chain,
refs, dist_array)) {
goto Error;
}
TraceBackwards(dist_array, dist_array_size, &chosen_path, &chosen_path_size);
if (!BackwardReferencesHashChainFollowChosenPath(
xsize, ysize, argb, quality, cache_bits, chosen_path, chosen_path_size,
refs)) {
hash_chain, refs)) {
goto Error;
}
ok = 1;
@ -740,82 +754,64 @@ static void BackwardReferences2DLocality(int xsize,
}
}
int VP8LGetBackwardReferences(int width, int height,
const uint32_t* const argb,
int quality, int cache_bits, int use_2d_locality,
VP8LBackwardRefs* const best) {
int ok = 0;
VP8LBackwardRefs* VP8LGetBackwardReferences(
int width, int height, const uint32_t* const argb, int quality,
int cache_bits, int use_2d_locality, VP8LHashChain* const hash_chain,
VP8LBackwardRefs* const refs_array[2]) {
int lz77_is_useful;
VP8LBackwardRefs refs_rle, refs_lz77;
const int num_pix = width * height;
VP8LBackwardRefs* best = NULL;
VP8LBackwardRefs* const refs_lz77 = refs_array[0];
VP8LBackwardRefs* const refs_rle = refs_array[1];
VP8LBackwardRefsAlloc(&refs_rle, num_pix);
VP8LBackwardRefsAlloc(&refs_lz77, num_pix);
VP8LInitBackwardRefs(best);
if (refs_rle.refs == NULL || refs_lz77.refs == NULL) {
Error1:
VP8LClearBackwardRefs(&refs_rle);
VP8LClearBackwardRefs(&refs_lz77);
goto End;
}
ClearBackwardRefs(refs_lz77);
if (!BackwardReferencesHashChain(width, height, argb, cache_bits, quality,
&refs_lz77)) {
hash_chain, refs_lz77)) {
goto End;
}
// Backward Reference using RLE only.
BackwardReferencesRle(width, height, argb, &refs_rle);
ClearBackwardRefs(refs_rle);
BackwardReferencesRle(width, height, argb, refs_rle);
{
double bit_cost_lz77, bit_cost_rle;
VP8LHistogram* const histo =
(VP8LHistogram*)WebPSafeMalloc(1ULL, sizeof(*histo));
if (histo == NULL) goto Error1;
// Evaluate lz77 coding
VP8LHistogramCreate(histo, &refs_lz77, cache_bits);
VP8LHistogram* const histo = VP8LAllocateHistogram(cache_bits);
if (histo == NULL) goto End;
// Evaluate LZ77 coding.
VP8LHistogramCreate(histo, refs_lz77, cache_bits);
bit_cost_lz77 = VP8LHistogramEstimateBits(histo);
// Evaluate RLE coding
VP8LHistogramCreate(histo, &refs_rle, cache_bits);
// Evaluate RLE coding.
VP8LHistogramCreate(histo, refs_rle, cache_bits);
bit_cost_rle = VP8LHistogramEstimateBits(histo);
// Decide if LZ77 is useful.
lz77_is_useful = (bit_cost_lz77 < bit_cost_rle);
WebPSafeFree(histo);
VP8LFreeHistogram(histo);
}
// Choose appropriate backward reference.
if (lz77_is_useful) {
// TraceBackwards is costly. Don't execute it at lower quality.
const int try_lz77_trace_backwards = (quality >= 25);
*best = refs_lz77; // default guess: lz77 is better
VP8LClearBackwardRefs(&refs_rle);
best = refs_lz77; // default guess: lz77 is better
if (try_lz77_trace_backwards) {
// Set recursion level for large images using a color cache.
const int recursion_level =
(num_pix < 320 * 200) && (cache_bits > 0) ? 1 : 0;
VP8LBackwardRefs refs_trace;
if (!VP8LBackwardRefsAlloc(&refs_trace, num_pix)) {
goto End;
}
VP8LBackwardRefs* const refs_trace = refs_array[1];
ClearBackwardRefs(refs_trace);
if (BackwardReferencesTraceBackwards(width, height, recursion_level, argb,
quality, cache_bits, &refs_trace)) {
VP8LClearBackwardRefs(&refs_lz77);
*best = refs_trace;
quality, cache_bits, hash_chain,
refs_trace)) {
best = refs_trace;
}
}
} else {
VP8LClearBackwardRefs(&refs_lz77);
*best = refs_rle;
best = refs_rle;
}
if (use_2d_locality) BackwardReferences2DLocality(width, best);
ok = 1;
End:
if (!ok) {
VP8LClearBackwardRefs(best);
}
return ok;
return best;
}
// Returns entropy for the given cache bits.
@ -828,17 +824,17 @@ static double ComputeCacheEntropy(const uint32_t* const argb,
uint32_t k;
const int use_color_cache = (cache_bits > 0);
int cc_init = 0;
double entropy;
double entropy = MAX_ENTROPY;
const double kSmallPenaltyForLargeCache = 4.0;
VP8LColorCache hashers;
VP8LHistogram histo;
VP8LHistogram* histo = VP8LAllocateHistogram(cache_bits);
if (histo == NULL) goto Error;
if (use_color_cache) {
cc_init = VP8LColorCacheInit(&hashers, cache_bits);
if (!cc_init) return MAX_ENTROPY;
if (!cc_init) goto Error;
}
VP8LHistogramInit(&histo, cache_bits);
for (i = 0; i < refs->size; ++i) {
const PixOrCopy* const v = &refs->refs[i];
if (PixOrCopyIsLiteral(v)) {
@ -847,12 +843,12 @@ static double ComputeCacheEntropy(const uint32_t* const argb,
// push pixel as a cache index
const int ix = VP8LColorCacheGetIndex(&hashers, argb[pixel_index]);
const PixOrCopy token = PixOrCopyCreateCacheIdx(ix);
VP8LHistogramAddSinglePixOrCopy(&histo, &token);
VP8LHistogramAddSinglePixOrCopy(histo, &token);
} else {
VP8LHistogramAddSinglePixOrCopy(&histo, v);
VP8LHistogramAddSinglePixOrCopy(histo, v);
}
} else {
VP8LHistogramAddSinglePixOrCopy(&histo, v);
VP8LHistogramAddSinglePixOrCopy(histo, v);
}
if (use_color_cache) {
for (k = 0; k < PixOrCopyLength(v); ++k) {
@ -864,15 +860,19 @@ static double ComputeCacheEntropy(const uint32_t* const argb,
assert(pixel_index == xsize * ysize);
(void)xsize; // xsize is not used in non-debug compilations otherwise.
(void)ysize; // ysize is not used in non-debug compilations otherwise.
entropy = VP8LHistogramEstimateBits(&histo) +
entropy = VP8LHistogramEstimateBits(histo) +
kSmallPenaltyForLargeCache * cache_bits;
Error:
if (cc_init) VP8LColorCacheClear(&hashers);
VP8LFreeHistogram(histo);
return entropy;
}
// Returns how many bits are to be used for a color cache.
int VP8LCalculateEstimateForCacheSize(const uint32_t* const argb,
int xsize, int ysize, int quality,
VP8LHashChain* const hash_chain,
VP8LBackwardRefs* const refs,
int* const best_cache_bits) {
int ok = 0;
int eval_low = 1;
@ -881,22 +881,22 @@ int VP8LCalculateEstimateForCacheSize(const uint32_t* const argb,
double entropy_high = MAX_ENTROPY;
int cache_bits_low = 0;
int cache_bits_high = MAX_COLOR_CACHE_BITS;
VP8LBackwardRefs refs;
if (!VP8LBackwardRefsAlloc(&refs, xsize * ysize) ||
!BackwardReferencesHashChain(xsize, ysize, argb, 0, quality, &refs)) {
ClearBackwardRefs(refs);
if (!BackwardReferencesHashChain(xsize, ysize, argb, 0, quality, hash_chain,
refs)) {
goto Error;
}
// Do a binary search to find the optimal entropy for cache_bits.
while (cache_bits_high - cache_bits_low > 1) {
if (eval_low) {
entropy_low =
ComputeCacheEntropy(argb, xsize, ysize, &refs, cache_bits_low);
ComputeCacheEntropy(argb, xsize, ysize, refs, cache_bits_low);
eval_low = 0;
}
if (eval_high) {
entropy_high =
ComputeCacheEntropy(argb, xsize, ysize, &refs, cache_bits_high);
ComputeCacheEntropy(argb, xsize, ysize, refs, cache_bits_high);
eval_high = 0;
}
if (entropy_high < entropy_low) {
@ -911,6 +911,5 @@ int VP8LCalculateEstimateForCacheSize(const uint32_t* const argb,
}
ok = 1;
Error:
VP8LClearBackwardRefs(&refs);
return ok;
}

View File

@ -115,34 +115,59 @@ static WEBP_INLINE uint32_t PixOrCopyDistance(const PixOrCopy* const p) {
// -----------------------------------------------------------------------------
// VP8LBackwardRefs
typedef struct {
#define HASH_BITS 18
#define HASH_SIZE (1 << HASH_BITS)
typedef struct VP8LHashChain VP8LHashChain;
struct VP8LHashChain {
// Stores the most recently added position with the given hash value.
int32_t hash_to_first_index_[HASH_SIZE];
// chain_[pos] stores the previous position with the same hash value
// for every pixel in the image.
int32_t* chain_;
// This is the maximum size of the hash_chain that can be constructed.
// Typically this is the pixel count (width x height) for a given image.
int size_;
};
VP8LHashChain* VP8LHashChainNew(int size);
void VP8LHashChainDelete(VP8LHashChain* const p);
typedef struct VP8LBackwardRefs VP8LBackwardRefs;
struct VP8LBackwardRefs {
PixOrCopy* refs;
int size; // currently used
int max_size; // maximum capacity
} VP8LBackwardRefs;
};
// Initialize the object. Must be called first. 'refs' can be NULL.
void VP8LInitBackwardRefs(VP8LBackwardRefs* const refs);
// Release backward references. 'refs' can be NULL.
void VP8LBackwardRefsDelete(VP8LBackwardRefs* const refs);
// Release memory and re-initialize the object. 'refs' can be NULL.
void VP8LClearBackwardRefs(VP8LBackwardRefs* const refs);
// Allocate 'max_size' references. Returns NULL in case of memory error.
VP8LBackwardRefs* VP8LBackwardRefsNew(int max_size);
// Allocate 'max_size' references. Returns false in case of memory error.
int VP8LBackwardRefsAlloc(VP8LBackwardRefs* const refs, int max_size);
// Copies the 'src' backward refs to the 'dst'. Returns 0 if there's mismatch
// in the capacity (max_size) of 'src' and 'dst' refs.
int VP8LBackwardRefsCopy(const VP8LBackwardRefs* const src,
VP8LBackwardRefs* const dst);
// -----------------------------------------------------------------------------
// Main entry points
// Evaluates best possible backward references for specified quality.
// Further optimize for 2D locality if use_2d_locality flag is set.
int VP8LGetBackwardReferences(int width, int height,
const uint32_t* const argb,
int quality, int cache_bits, int use_2d_locality,
VP8LBackwardRefs* const best);
// The return value is the pointer to the best of the two backward refs viz,
// refs[0] or refs[1].
VP8LBackwardRefs* VP8LGetBackwardReferences(
int width, int height, const uint32_t* const argb, int quality,
int cache_bits, int use_2d_locality, VP8LHashChain* const hash_chain,
VP8LBackwardRefs* const refs[2]);
// Produce an estimate for a good color cache size for the image.
int VP8LCalculateEstimateForCacheSize(const uint32_t* const argb,
int xsize, int ysize, int quality,
VP8LHashChain* const hash_chain,
VP8LBackwardRefs* const ref,
int* const best_cache_bits);
#ifdef __cplusplus

View File

@ -29,12 +29,35 @@
#define BIN_SIZE (NUM_PARTITIONS * NUM_PARTITIONS * NUM_PARTITIONS)
static void HistogramClear(VP8LHistogram* const p) {
memset(p->literal_, 0, sizeof(p->literal_));
memset(p->red_, 0, sizeof(p->red_));
memset(p->blue_, 0, sizeof(p->blue_));
memset(p->alpha_, 0, sizeof(p->alpha_));
memset(p->distance_, 0, sizeof(p->distance_));
p->bit_cost_ = 0;
int* const literal = p->literal_;
const int cache_bits = p->palette_code_bits_;
const uint64_t histo_size = VP8LGetHistogramSize(cache_bits);
memset(p, 0, histo_size);
p->palette_code_bits_ = cache_bits;
p->literal_ = literal;
}
static void HistogramCopy(const VP8LHistogram* const src,
VP8LHistogram* const dst) {
int* const dst_literal = dst->literal_;
const int dst_cache_bits = dst->palette_code_bits_;
const uint64_t histo_size = VP8LGetHistogramSize(dst_cache_bits);
assert(src->palette_code_bits_ == dst_cache_bits);
memcpy(dst, src, histo_size);
dst->literal_ = dst_literal;
}
int VP8LGetHistogramSize(int cache_bits) {
const uint64_t literal_size = VP8LHistogramNumCodes(cache_bits);
return sizeof(VP8LHistogram) + sizeof(int) * literal_size;
}
void VP8LFreeHistogram(VP8LHistogram* const histo) {
WebPSafeFree(histo);
}
void VP8LFreeHistogramSet(VP8LHistogramSet* const histo) {
WebPSafeFree(histo);
}
void VP8LHistogramStoreRefs(const VP8LBackwardRefs* const refs,
@ -60,13 +83,24 @@ void VP8LHistogramInit(VP8LHistogram* const p, int palette_code_bits) {
HistogramClear(p);
}
VP8LHistogram* VP8LAllocateHistogram(int cache_bits) {
VP8LHistogram* histo = NULL;
const uint64_t total_size = VP8LGetHistogramSize(cache_bits);
uint8_t* const memory = (uint8_t*)WebPSafeMalloc(total_size, sizeof(*memory));
if (memory == NULL) return NULL;
histo = (VP8LHistogram*)memory;
// literal_ won't necessary be aligned.
histo->literal_ = (int*)(memory + sizeof(VP8LHistogram));
VP8LHistogramInit(histo, cache_bits);
return histo;
}
VP8LHistogramSet* VP8LAllocateHistogramSet(int size, int cache_bits) {
int i;
VP8LHistogramSet* set;
VP8LHistogram* bulk;
const uint64_t total_size = sizeof(*set)
+ (uint64_t)size * sizeof(*set->histograms)
+ (uint64_t)size * sizeof(**set->histograms);
+ (uint64_t)size * VP8LGetHistogramSize(cache_bits);
uint8_t* memory = (uint8_t*)WebPSafeMalloc(total_size, sizeof(*memory));
if (memory == NULL) return NULL;
@ -74,12 +108,15 @@ VP8LHistogramSet* VP8LAllocateHistogramSet(int size, int cache_bits) {
memory += sizeof(*set);
set->histograms = (VP8LHistogram**)memory;
memory += size * sizeof(*set->histograms);
bulk = (VP8LHistogram*)memory;
set->max_size = size;
set->size = size;
for (i = 0; i < size; ++i) {
set->histograms[i] = bulk + i;
set->histograms[i] = (VP8LHistogram*)memory;
// literal_ won't necessary be aligned.
set->histograms[i]->literal_ = (int*)(memory + sizeof(VP8LHistogram));
VP8LHistogramInit(set->histograms[i], cache_bits);
// There's no padding/alignment between successive histograms.
memory += VP8LGetHistogramSize(cache_bits);
}
return set;
}
@ -94,12 +131,13 @@ void VP8LHistogramAddSinglePixOrCopy(VP8LHistogram* const histo,
++histo->literal_[PixOrCopyLiteral(v, 1)];
++histo->blue_[PixOrCopyLiteral(v, 0)];
} else if (PixOrCopyIsCacheIdx(v)) {
int literal_ix = 256 + NUM_LENGTH_CODES + PixOrCopyCacheIdx(v);
int literal_ix =
NUM_LITERAL_CODES + NUM_LENGTH_CODES + PixOrCopyCacheIdx(v);
++histo->literal_[literal_ix];
} else {
int code, extra_bits;
VP8LPrefixEncodeBits(PixOrCopyLength(v), &code, &extra_bits);
++histo->literal_[256 + code];
++histo->literal_[NUM_LITERAL_CODES + code];
VP8LPrefixEncodeBits(PixOrCopyDistance(v), &code, &extra_bits);
++histo->distance_[code];
}
@ -224,22 +262,22 @@ static double GetCombinedEntropy(const int* const X, const int* const Y,
double VP8LHistogramEstimateBits(const VP8LHistogram* const p) {
return
PopulationCost(p->literal_, VP8LHistogramNumCodes(p->palette_code_bits_))
+ PopulationCost(p->red_, 256)
+ PopulationCost(p->blue_, 256)
+ PopulationCost(p->alpha_, 256)
+ PopulationCost(p->red_, NUM_LITERAL_CODES)
+ PopulationCost(p->blue_, NUM_LITERAL_CODES)
+ PopulationCost(p->alpha_, NUM_LITERAL_CODES)
+ PopulationCost(p->distance_, NUM_DISTANCE_CODES)
+ VP8LExtraCost(p->literal_ + 256, NUM_LENGTH_CODES)
+ VP8LExtraCost(p->literal_ + NUM_LITERAL_CODES, NUM_LENGTH_CODES)
+ VP8LExtraCost(p->distance_, NUM_DISTANCE_CODES);
}
double VP8LHistogramEstimateBitsBulk(const VP8LHistogram* const p) {
return
BitsEntropy(p->literal_, VP8LHistogramNumCodes(p->palette_code_bits_))
+ BitsEntropy(p->red_, 256)
+ BitsEntropy(p->blue_, 256)
+ BitsEntropy(p->alpha_, 256)
+ BitsEntropy(p->red_, NUM_LITERAL_CODES)
+ BitsEntropy(p->blue_, NUM_LITERAL_CODES)
+ BitsEntropy(p->alpha_, NUM_LITERAL_CODES)
+ BitsEntropy(p->distance_, NUM_DISTANCE_CODES)
+ VP8LExtraCost(p->literal_ + 256, NUM_LENGTH_CODES)
+ VP8LExtraCost(p->literal_ + NUM_LITERAL_CODES, NUM_LENGTH_CODES)
+ VP8LExtraCost(p->distance_, NUM_DISTANCE_CODES);
}
@ -250,13 +288,15 @@ double VP8LHistogramEstimateBitsBulk(const VP8LHistogram* const p) {
static void HistogramAdd(const VP8LHistogram* const in,
VP8LHistogram* const out) {
int i;
for (i = 0; i < PIX_OR_COPY_CODES_MAX; ++i) {
int literal_size = VP8LHistogramNumCodes(out->palette_code_bits_);
assert(in->palette_code_bits_ == out->palette_code_bits_);
for (i = 0; i < literal_size; ++i) {
out->literal_[i] += in->literal_[i];
}
for (i = 0; i < NUM_DISTANCE_CODES; ++i) {
out->distance_[i] += in->distance_[i];
}
for (i = 0; i < 256; ++i) {
for (i = 0; i < NUM_LITERAL_CODES; ++i) {
out->red_[i] += in->red_[i];
out->blue_[i] += in->blue_[i];
out->alpha_[i] += in->alpha_[i];
@ -267,22 +307,22 @@ static int GetCombinedHistogramEntropy(const VP8LHistogram* const a,
const VP8LHistogram* const b,
double cost_threshold,
double* cost) {
const int palette_code_bits =
(a->palette_code_bits_ > b->palette_code_bits_) ? a->palette_code_bits_ :
b->palette_code_bits_;
const int palette_code_bits = a->palette_code_bits_;
assert(a->palette_code_bits_ == b->palette_code_bits_);
*cost += GetCombinedEntropy(a->literal_, b->literal_,
VP8LHistogramNumCodes(palette_code_bits));
*cost += VP8LExtraCostCombined(a->literal_ + 256, b->literal_ + 256,
*cost += VP8LExtraCostCombined(a->literal_ + NUM_LITERAL_CODES,
b->literal_ + NUM_LITERAL_CODES,
NUM_LENGTH_CODES);
if (*cost > cost_threshold) return 0;
*cost += GetCombinedEntropy(a->red_, b->red_, 256);
*cost += GetCombinedEntropy(a->red_, b->red_, NUM_LITERAL_CODES);
if (*cost > cost_threshold) return 0;
*cost += GetCombinedEntropy(a->blue_, b->blue_, 256);
*cost += GetCombinedEntropy(a->blue_, b->blue_, NUM_LITERAL_CODES);
if (*cost > cost_threshold) return 0;
*cost += GetCombinedEntropy(a->alpha_, b->alpha_, 256);
*cost += GetCombinedEntropy(a->alpha_, b->alpha_, NUM_LITERAL_CODES);
if (*cost > cost_threshold) return 0;
*cost += GetCombinedEntropy(a->distance_, b->distance_, NUM_DISTANCE_CODES);
@ -306,22 +346,24 @@ static double HistogramAddEval(const VP8LHistogram* const a,
double cost = 0;
const double sum_cost = a->bit_cost_ + b->bit_cost_;
int i;
assert(a->palette_code_bits_ == b->palette_code_bits_);
cost_threshold += sum_cost;
if (GetCombinedHistogramEntropy(a, b, cost_threshold, &cost)) {
for (i = 0; i < PIX_OR_COPY_CODES_MAX; ++i) {
int literal_size;
out->palette_code_bits_ = a->palette_code_bits_;
literal_size = VP8LHistogramNumCodes(out->palette_code_bits_);
for (i = 0; i < literal_size; ++i) {
out->literal_[i] = a->literal_[i] + b->literal_[i];
}
for (i = 0; i < NUM_DISTANCE_CODES; ++i) {
out->distance_[i] = a->distance_[i] + b->distance_[i];
}
for (i = 0; i < 256; ++i) {
for (i = 0; i < NUM_LITERAL_CODES; ++i) {
out->red_[i] = a->red_[i] + b->red_[i];
out->blue_[i] = a->blue_[i] + b->blue_[i];
out->alpha_[i] = a->alpha_[i] + b->alpha_[i];
}
out->palette_code_bits_ = (a->palette_code_bits_ > b->palette_code_bits_) ?
a->palette_code_bits_ : b->palette_code_bits_;
out->bit_cost_ = cost;
}
@ -374,15 +416,16 @@ static void UpdateDominantCostRange(
}
static void UpdateHistogramCost(VP8LHistogram* const h) {
const double alpha_cost = PopulationCost(h->alpha_, 256);
const double alpha_cost = PopulationCost(h->alpha_, NUM_LITERAL_CODES);
const double distance_cost =
PopulationCost(h->distance_, NUM_DISTANCE_CODES) +
VP8LExtraCost(h->distance_, NUM_DISTANCE_CODES);
const int num_codes = VP8LHistogramNumCodes(h->palette_code_bits_);
h->literal_cost_ = PopulationCost(h->literal_, num_codes) +
VP8LExtraCost(h->literal_ + 256, NUM_LENGTH_CODES);
h->red_cost_ = PopulationCost(h->red_, 256);
h->blue_cost_ = PopulationCost(h->blue_, 256);
VP8LExtraCost(h->literal_ + NUM_LITERAL_CODES,
NUM_LENGTH_CODES);
h->red_cost_ = PopulationCost(h->red_, NUM_LITERAL_CODES);
h->blue_cost_ = PopulationCost(h->blue_, NUM_LITERAL_CODES);
h->bit_cost_ = h->literal_cost_ + h->red_cost_ + h->blue_cost_ +
alpha_cost + distance_cost;
}
@ -439,7 +482,7 @@ static void HistogramAnalyze(
VP8LHistogram* const histo = histograms[i];
histo->bit_cost_ = VP8LHistogramEstimateBits(histo);
// Copy histograms from init_histo[] to histo_image[].
*histo_image->histograms[i] = *histo;
HistogramCopy(histo, histo_image->histograms[i]);
}
}
@ -460,7 +503,7 @@ static void HistogramAnalyzeBin(
VP8LHistogram* const histo = histograms[i];
UpdateHistogramCost(histo);
// Copy histograms from init_histo[] to histo_image[].
*histo_image->histograms[i] = *histo;
HistogramCopy(histo, histo_image->histograms[i]);
UpdateDominantCostRange(histo, &cost_range);
}
@ -503,7 +546,8 @@ static void HistogramCompactBins(VP8LHistogramSet* const histo_image) {
if (start < end) {
assert(histo_image->histograms[start] != NULL);
assert(histo_image->histograms[end] != NULL);
*histo_image->histograms[start] = *histo_image->histograms[end];
HistogramCopy(histo_image->histograms[end],
histo_image->histograms[start]);
histo_image->histograms[end] = NULL;
--end;
}
@ -533,7 +577,7 @@ static void HistogramCombineBin(VP8LHistogramSet* const histo_image,
histo_image->histograms[idx2],
cur_combo, bit_cost_thresh);
if (curr_cost_diff < bit_cost_thresh) {
*histo_image->histograms[idx1] = *cur_combo;
HistogramCopy(cur_combo, histo_image->histograms[idx1]);
histo_image->histograms[idx2]->bit_cost_ = 0.;
}
}
@ -551,7 +595,7 @@ static uint32_t MyRand(uint32_t *seed) {
}
static void HistogramCombine(VP8LHistogramSet* const histo_image,
VP8LHistogram* const histos, int quality) {
VP8LHistogramSet* const histos, int quality) {
int iter;
uint32_t seed = 0;
int tries_with_no_success = 0;
@ -561,8 +605,8 @@ static void HistogramCombine(VP8LHistogramSet* const histo_image,
const int num_pairs = histo_image_size / 2;
const int num_tries_no_success = outer_iters / 2;
const int min_cluster_size = 2;
VP8LHistogram* cur_combo = histos + 0; // trial merged histogram
VP8LHistogram* best_combo = histos + 1; // best merged histogram so far
VP8LHistogram* cur_combo = histos->histograms[0]; // trial histogram
VP8LHistogram* best_combo = histos->histograms[1]; // best histogram so far
// Collapse similar histograms in 'histo_image'.
for (iter = 0;
@ -603,12 +647,12 @@ static void HistogramCombine(VP8LHistogramSet* const histo_image,
}
if (best_idx1 >= 0) {
*histo_image->histograms[best_idx1] = *best_combo;
HistogramCopy(best_combo, histo_image->histograms[best_idx1]);
// swap best_idx2 slot with last one (which is now unused)
--histo_image_size;
if (best_idx2 != histo_image_size) {
histo_image->histograms[best_idx2] =
histo_image->histograms[histo_image_size];
HistogramCopy(histo_image->histograms[histo_image_size],
histo_image->histograms[best_idx2]);
histo_image->histograms[histo_image_size] = NULL;
}
tries_with_no_success = 0;
@ -683,8 +727,7 @@ int VP8LGetHistoImageSymbols(int xsize, int ysize,
// bin_map[n][num_histo + 1] ... bin_map[n][bin_depth - 1] = un-used indices.
const int bin_depth = histo_image_raw_size + 1;
int16_t* bin_map = NULL;
VP8LHistogram* const histos =
(VP8LHistogram*)WebPSafeMalloc(2ULL, sizeof(*histos));
VP8LHistogramSet* const histos = VP8LAllocateHistogramSet(2, cache_bits);
VP8LHistogramSet* const init_histo =
VP8LAllocateHistogramSet(histo_image_raw_size, cache_bits);
@ -709,8 +752,8 @@ int VP8LGetHistoImageSymbols(int xsize, int ysize,
const double combine_cost_factor =
GetCombineCostFactor(histo_image_raw_size, quality);
HistogramAnalyzeBin(init_histo, histo_image, bin_map);
HistogramCombineBin(histo_image, histos, bin_depth, combine_cost_factor,
bin_map);
HistogramCombineBin(histo_image, histos->histograms[0],
bin_depth, combine_cost_factor, bin_map);
} else {
HistogramAnalyze(init_histo, histo_image);
}
@ -725,7 +768,7 @@ int VP8LGetHistoImageSymbols(int xsize, int ysize,
Error:
WebPSafeFree(bin_map);
WebPSafeFree(init_histo);
WebPSafeFree(histos);
VP8LFreeHistogramSet(init_histo);
VP8LFreeHistogramSet(histos);
return ok;
}

View File

@ -32,7 +32,7 @@ extern "C" {
typedef struct {
// literal_ contains green literal, palette-code and
// copy-length-prefix histogram
int literal_[PIX_OR_COPY_CODES_MAX];
int* literal_; // Pointer to the allocated buffer for literal.
int red_[256];
int blue_[256];
int alpha_[256];
@ -62,6 +62,9 @@ void VP8LHistogramCreate(VP8LHistogram* const p,
const VP8LBackwardRefs* const refs,
int palette_code_bits);
// Return the size of the histogram for a given palette_code_bits.
int VP8LGetHistogramSize(int palette_code_bits);
// Set the palette_code_bits and reset the stats.
void VP8LHistogramInit(VP8LHistogram* const p, int palette_code_bits);
@ -69,10 +72,21 @@ void VP8LHistogramInit(VP8LHistogram* const p, int palette_code_bits);
void VP8LHistogramStoreRefs(const VP8LBackwardRefs* const refs,
VP8LHistogram* const histo);
// Free the memory allocated for the histogram.
void VP8LFreeHistogram(VP8LHistogram* const histo);
// Free the memory allocated for the histogram set.
void VP8LFreeHistogramSet(VP8LHistogramSet* const histo);
// Allocate an array of pointer to histograms, allocated and initialized
// using 'cache_bits'. Return NULL in case of memory error.
VP8LHistogramSet* VP8LAllocateHistogramSet(int size, int cache_bits);
// Allocate and initialize histogram object with specified 'cache_bits'.
// Returns NULL in case of memory error.
// Special case of VP8LAllocateHistogramSet, with size equals 1.
VP8LHistogram* VP8LAllocateHistogram(int cache_bits);
// Accumulate a token 'v' into a histogram.
void VP8LHistogramAddSinglePixOrCopy(VP8LHistogram* const histo,
const PixOrCopy* const v);
@ -86,7 +100,7 @@ double VP8LHistogramEstimateBits(const VP8LHistogram* const p);
double VP8LHistogramEstimateBitsBulk(const VP8LHistogram* const p);
static WEBP_INLINE int VP8LHistogramNumCodes(int palette_code_bits) {
return 256 + NUM_LENGTH_CODES +
return NUM_LITERAL_CODES + NUM_LENGTH_CODES +
((palette_code_bits > 0) ? (1 << palette_code_bits) : 0);
}

View File

@ -106,14 +106,9 @@ static int AnalyzeEntropy(const uint32_t* argb,
const uint32_t* last_line = NULL;
uint32_t last_pix = argb[0]; // so we're sure that pix_diff == 0
VP8LHistogram* nonpredicted = NULL;
VP8LHistogram* predicted =
(VP8LHistogram*)WebPSafeMalloc(2ULL, sizeof(*predicted));
if (predicted == NULL) return 0;
nonpredicted = predicted + 1;
VP8LHistogramSet* const histo_set = VP8LAllocateHistogramSet(2, 0);
if (histo_set == NULL) return 0;
VP8LHistogramInit(predicted, 0);
VP8LHistogramInit(nonpredicted, 0);
for (y = 0; y < height; ++y) {
for (x = 0; x < width; ++x) {
const uint32_t pix = argb[x];
@ -126,21 +121,25 @@ static int AnalyzeEntropy(const uint32_t* argb,
{
const PixOrCopy pix_token = PixOrCopyCreateLiteral(pix);
const PixOrCopy pix_diff_token = PixOrCopyCreateLiteral(pix_diff);
VP8LHistogramAddSinglePixOrCopy(nonpredicted, &pix_token);
VP8LHistogramAddSinglePixOrCopy(predicted, &pix_diff_token);
VP8LHistogramAddSinglePixOrCopy(histo_set->histograms[0], &pix_token);
VP8LHistogramAddSinglePixOrCopy(histo_set->histograms[1],
&pix_diff_token);
}
}
last_line = argb;
argb += argb_stride;
}
*nonpredicted_bits = VP8LHistogramEstimateBitsBulk(nonpredicted);
*predicted_bits = VP8LHistogramEstimateBitsBulk(predicted);
WebPSafeFree(predicted);
*nonpredicted_bits = VP8LHistogramEstimateBitsBulk(histo_set->histograms[0]);
*predicted_bits = VP8LHistogramEstimateBitsBulk(histo_set->histograms[1]);
VP8LFreeHistogramSet(histo_set);
return 1;
}
static int VP8LEncAnalyze(VP8LEncoder* const enc, WebPImageHint image_hint) {
static int AnalyzeAndInit(VP8LEncoder* const enc, WebPImageHint image_hint) {
const WebPPicture* const pic = enc->pic_;
const int width = pic->width;
const int height = pic->height;
const int pix_cnt = width * height;
assert(pic != NULL && pic->argb != NULL);
enc->use_palette_ =
@ -158,7 +157,7 @@ static int VP8LEncAnalyze(VP8LEncoder* const enc, WebPImageHint image_hint) {
enc->use_cross_color_ = 1;
} else {
double non_pred_entropy, pred_entropy;
if (!AnalyzeEntropy(pic->argb, pic->width, pic->height, pic->argb_stride,
if (!AnalyzeEntropy(pic->argb, width, height, pic->argb_stride,
&non_pred_entropy, &pred_entropy)) {
return 0;
}
@ -168,6 +167,14 @@ static int VP8LEncAnalyze(VP8LEncoder* const enc, WebPImageHint image_hint) {
}
}
}
enc->hash_chain_ = VP8LHashChainNew(pix_cnt);
if (enc->hash_chain_ == NULL) return 0;
enc->refs_[0] = VP8LBackwardRefsNew(pix_cnt);
enc->refs_[1] = VP8LBackwardRefsNew(pix_cnt);
if (enc->refs_[0] == NULL || enc->refs_[1] == NULL) {
return 0;
}
return 1;
}
@ -176,10 +183,13 @@ static int GetHuffBitLengthsAndCodes(
const VP8LHistogramSet* const histogram_image,
HuffmanTreeCode* const huffman_codes) {
int i, k;
int ok = 1;
int ok = 0;
uint64_t total_length_size = 0;
uint8_t* mem_buf = NULL;
const int histogram_image_size = histogram_image->size;
int max_num_symbols = 0;
uint8_t* buf_rle = NULL;
HuffmanTree* huff_tree = NULL;
// Iterate over all histograms and get the aggregate number of codes used.
for (i = 0; i < histogram_image_size; ++i) {
@ -200,10 +210,8 @@ static int GetHuffBitLengthsAndCodes(
uint8_t* lengths;
mem_buf = (uint8_t*)WebPSafeCalloc(total_length_size,
sizeof(*lengths) + sizeof(*codes));
if (mem_buf == NULL) {
ok = 0;
goto End;
}
if (mem_buf == NULL) goto End;
codes = (uint16_t*)mem_buf;
lengths = (uint8_t*)&codes[total_length_size];
for (i = 0; i < 5 * histogram_image_size; ++i) {
@ -212,24 +220,33 @@ static int GetHuffBitLengthsAndCodes(
huffman_codes[i].code_lengths = lengths;
codes += bit_length;
lengths += bit_length;
if (max_num_symbols < bit_length) {
max_num_symbols = bit_length;
}
}
}
buf_rle = (uint8_t*)WebPSafeMalloc(1ULL, max_num_symbols);
huff_tree = (HuffmanTree*)WebPSafeMalloc(3ULL * max_num_symbols,
sizeof(*huff_tree));
if (buf_rle == NULL || huff_tree == NULL) goto End;
// Create Huffman trees.
for (i = 0; ok && (i < histogram_image_size); ++i) {
for (i = 0; i < histogram_image_size; ++i) {
HuffmanTreeCode* const codes = &huffman_codes[5 * i];
VP8LHistogram* const histo = histogram_image->histograms[i];
ok = ok && VP8LCreateHuffmanTree(histo->literal_, 15, codes + 0);
ok = ok && VP8LCreateHuffmanTree(histo->red_, 15, codes + 1);
ok = ok && VP8LCreateHuffmanTree(histo->blue_, 15, codes + 2);
ok = ok && VP8LCreateHuffmanTree(histo->alpha_, 15, codes + 3);
ok = ok && VP8LCreateHuffmanTree(histo->distance_, 15, codes + 4);
VP8LCreateHuffmanTree(histo->literal_, 15, buf_rle, huff_tree, codes + 0);
VP8LCreateHuffmanTree(histo->red_, 15, buf_rle, huff_tree, codes + 1);
VP8LCreateHuffmanTree(histo->blue_, 15, buf_rle, huff_tree, codes + 2);
VP8LCreateHuffmanTree(histo->alpha_, 15, buf_rle, huff_tree, codes + 3);
VP8LCreateHuffmanTree(histo->distance_, 15, buf_rle, huff_tree, codes + 4);
}
ok = 1;
End:
WebPSafeFree(huff_tree);
WebPSafeFree(buf_rle);
if (!ok) {
WebPSafeFree(mem_buf);
// If one VP8LCreateHuffmanTree() above fails, we need to clean up behind.
memset(huffman_codes, 0, 5 * histogram_image_size * sizeof(*huffman_codes));
}
return ok;
@ -296,18 +313,16 @@ static void StoreHuffmanTreeToBitMask(
}
}
static int StoreFullHuffmanCode(VP8LBitWriter* const bw,
const HuffmanTreeCode* const tree) {
int ok = 0;
// 'huff_tree' and 'tokens' are pre-alloacted buffers.
static void StoreFullHuffmanCode(VP8LBitWriter* const bw,
HuffmanTree* const huff_tree,
HuffmanTreeToken* const tokens,
const HuffmanTreeCode* const tree) {
uint8_t code_length_bitdepth[CODE_LENGTH_CODES] = { 0 };
uint16_t code_length_bitdepth_symbols[CODE_LENGTH_CODES] = { 0 };
const int max_tokens = tree->num_symbols;
int num_tokens;
HuffmanTreeCode huffman_code;
HuffmanTreeToken* const tokens =
(HuffmanTreeToken*)WebPSafeMalloc((uint64_t)max_tokens, sizeof(*tokens));
if (tokens == NULL) return 0;
huffman_code.num_symbols = CODE_LENGTH_CODES;
huffman_code.code_lengths = code_length_bitdepth;
huffman_code.codes = code_length_bitdepth_symbols;
@ -316,14 +331,13 @@ static int StoreFullHuffmanCode(VP8LBitWriter* const bw,
num_tokens = VP8LCreateCompressedHuffmanTree(tree, tokens, max_tokens);
{
int histogram[CODE_LENGTH_CODES] = { 0 };
uint8_t buf_rle[CODE_LENGTH_CODES] = { 0 };
int i;
for (i = 0; i < num_tokens; ++i) {
++histogram[tokens[i].code];
}
if (!VP8LCreateHuffmanTree(histogram, 7, &huffman_code)) {
goto End;
}
VP8LCreateHuffmanTree(histogram, 7, buf_rle, huff_tree, &huffman_code);
}
StoreHuffmanTreeOfHuffmanTreeToBitMask(bw, code_length_bitdepth);
@ -360,14 +374,13 @@ static int StoreFullHuffmanCode(VP8LBitWriter* const bw,
}
StoreHuffmanTreeToBitMask(bw, tokens, length, &huffman_code);
}
ok = 1;
End:
WebPSafeFree(tokens);
return ok;
}
static int StoreHuffmanCode(VP8LBitWriter* const bw,
const HuffmanTreeCode* const huffman_code) {
// 'huff_tree' and 'tokens' are pre-alloacted buffers.
static void StoreHuffmanCode(VP8LBitWriter* const bw,
HuffmanTree* const huff_tree,
HuffmanTreeToken* const tokens,
const HuffmanTreeCode* const huffman_code) {
int i;
int count = 0;
int symbols[2] = { 0, 0 };
@ -385,7 +398,6 @@ static int StoreHuffmanCode(VP8LBitWriter* const bw,
if (count == 0) { // emit minimal tree for empty cases
// bits: small tree marker: 1, count-1: 0, large 8-bit code: 0, code: 0
VP8LWriteBits(bw, 4, 0x01);
return 1;
} else if (count <= 2 && symbols[0] < kMaxSymbol && symbols[1] < kMaxSymbol) {
VP8LWriteBits(bw, 1, 1); // Small tree marker to encode 1 or 2 symbols.
VP8LWriteBits(bw, 1, count - 1);
@ -399,9 +411,8 @@ static int StoreHuffmanCode(VP8LBitWriter* const bw,
if (count == 2) {
VP8LWriteBits(bw, 8, symbols[1]);
}
return 1;
} else {
return StoreFullHuffmanCode(bw, huffman_code);
StoreFullHuffmanCode(bw, huff_tree, tokens, huffman_code);
}
}
@ -464,21 +475,29 @@ static void StoreImageToBitMask(
// Special case of EncodeImageInternal() for cache-bits=0, histo_bits=31
static int EncodeImageNoHuffman(VP8LBitWriter* const bw,
const uint32_t* const argb,
VP8LHashChain* const hash_chain,
VP8LBackwardRefs* const refs_array[2],
int width, int height, int quality) {
int i;
int ok = 0;
VP8LBackwardRefs refs;
int max_tokens = 0;
VP8LBackwardRefs* refs;
HuffmanTreeToken* tokens = NULL;
HuffmanTreeCode huffman_codes[5] = { { 0, NULL, NULL } };
const uint16_t histogram_symbols[1] = { 0 }; // only one tree, one symbol
VP8LHistogramSet* const histogram_image = VP8LAllocateHistogramSet(1, 0);
if (histogram_image == NULL) return 0;
HuffmanTree* const huff_tree = (HuffmanTree*)WebPSafeMalloc(
3ULL * CODE_LENGTH_CODES, sizeof(*huff_tree));
if (histogram_image == NULL || huff_tree == NULL) goto Error;
// Calculate backward references from ARGB image.
if (!VP8LGetBackwardReferences(width, height, argb, quality, 0, 1, &refs)) {
refs = VP8LGetBackwardReferences(width, height, argb, quality, 0, 1,
hash_chain, refs_array);
if (refs == NULL) {
goto Error;
}
// Build histogram image and symbols from backward references.
VP8LHistogramStoreRefs(&refs, histogram_image->histograms[0]);
VP8LHistogramStoreRefs(refs, histogram_image->histograms[0]);
// Create Huffman bit lengths and codes for each histogram image.
assert(histogram_image->size == 1);
@ -489,28 +508,41 @@ static int EncodeImageNoHuffman(VP8LBitWriter* const bw,
// No color cache, no Huffman image.
VP8LWriteBits(bw, 1, 0);
// Find maximum number of symbols for the huffman tree-set.
for (i = 0; i < 5; ++i) {
HuffmanTreeCode* const codes = &huffman_codes[i];
if (max_tokens < codes->num_symbols) {
max_tokens = codes->num_symbols;
}
}
tokens = (HuffmanTreeToken*)WebPSafeMalloc((uint64_t)max_tokens,
sizeof(*tokens));
if (tokens == NULL) goto Error;
// Store Huffman codes.
for (i = 0; i < 5; ++i) {
HuffmanTreeCode* const codes = &huffman_codes[i];
if (!StoreHuffmanCode(bw, codes)) {
goto Error;
}
StoreHuffmanCode(bw, huff_tree, tokens, codes);
ClearHuffmanTreeIfOnlyOneSymbol(codes);
}
// Store actual literals.
StoreImageToBitMask(bw, width, 0, &refs, histogram_symbols, huffman_codes);
StoreImageToBitMask(bw, width, 0, refs, histogram_symbols, huffman_codes);
ok = 1;
Error:
WebPSafeFree(histogram_image);
VP8LClearBackwardRefs(&refs);
WebPSafeFree(tokens);
WebPSafeFree(huff_tree);
VP8LFreeHistogramSet(histogram_image);
WebPSafeFree(huffman_codes[0].codes);
return ok;
}
static int EncodeImageInternal(VP8LBitWriter* const bw,
const uint32_t* const argb,
VP8LHashChain* const hash_chain,
VP8LBackwardRefs* const refs_array[2],
int width, int height, int quality,
int cache_bits, int histogram_bits) {
int ok = 0;
@ -520,11 +552,14 @@ static int EncodeImageInternal(VP8LBitWriter* const bw,
VP8LSubSampleSize(width, histogram_bits) *
VP8LSubSampleSize(height, histogram_bits);
VP8LHistogramSet* histogram_image =
VP8LAllocateHistogramSet(histogram_image_xysize, 0);
VP8LAllocateHistogramSet(histogram_image_xysize, cache_bits);
int histogram_image_size = 0;
size_t bit_array_size = 0;
HuffmanTree* huff_tree = NULL;
HuffmanTreeToken* tokens = NULL;
HuffmanTreeCode* huffman_codes = NULL;
VP8LBackwardRefs refs;
VP8LBackwardRefs* refs = NULL;
VP8LBackwardRefs* best_refs;
uint16_t* const histogram_symbols =
(uint16_t*)WebPSafeMalloc((uint64_t)histogram_image_xysize,
sizeof(*histogram_symbols));
@ -532,18 +567,27 @@ static int EncodeImageInternal(VP8LBitWriter* const bw,
assert(histogram_bits <= MAX_HUFFMAN_BITS);
if (histogram_image == NULL || histogram_symbols == NULL) {
WebPSafeFree(histogram_image);
VP8LFreeHistogramSet(histogram_image);
WebPSafeFree(histogram_symbols);
return 0;
}
refs = VP8LBackwardRefsNew(refs_array[0]->max_size);
if (refs == NULL) {
goto Error;
}
// 'best_refs' is the reference to the best backward refs and points to one
// of refs_array[0] or refs_array[1].
// Calculate backward references from ARGB image.
if (!VP8LGetBackwardReferences(width, height, argb, quality, cache_bits,
use_2d_locality, &refs)) {
best_refs = VP8LGetBackwardReferences(width, height, argb, quality,
cache_bits, use_2d_locality,
hash_chain, refs_array);
if (best_refs == NULL || !VP8LBackwardRefsCopy(best_refs, refs)) {
goto Error;
}
// Build histogram image and symbols from backward references.
if (!VP8LGetHistoImageSymbols(width, height, &refs,
if (!VP8LGetHistoImageSymbols(width, height, refs,
quality, histogram_bits, cache_bits,
histogram_image,
histogram_symbols)) {
@ -559,7 +603,7 @@ static int EncodeImageInternal(VP8LBitWriter* const bw,
goto Error;
}
// Free combined histograms.
WebPSafeFree(histogram_image);
VP8LFreeHistogramSet(histogram_image);
histogram_image = NULL;
// Color Cache parameters.
@ -589,7 +633,7 @@ static int EncodeImageInternal(VP8LBitWriter* const bw,
histogram_image_size = max_index;
VP8LWriteBits(bw, 3, histogram_bits - 2);
ok = EncodeImageNoHuffman(bw, histogram_argb,
ok = EncodeImageNoHuffman(bw, histogram_argb, hash_chain, refs_array,
VP8LSubSampleSize(width, histogram_bits),
VP8LSubSampleSize(height, histogram_bits),
quality);
@ -601,22 +645,37 @@ static int EncodeImageInternal(VP8LBitWriter* const bw,
// Store Huffman codes.
{
int i;
int max_tokens = 0;
huff_tree = (HuffmanTree*)WebPSafeMalloc(3ULL * CODE_LENGTH_CODES,
sizeof(*huff_tree));
if (huff_tree == NULL) goto Error;
// Find maximum number of symbols for the huffman tree-set.
for (i = 0; i < 5 * histogram_image_size; ++i) {
HuffmanTreeCode* const codes = &huffman_codes[i];
if (!StoreHuffmanCode(bw, codes)) goto Error;
if (max_tokens < codes->num_symbols) {
max_tokens = codes->num_symbols;
}
}
tokens = (HuffmanTreeToken*)WebPSafeMalloc((uint64_t)max_tokens,
sizeof(*tokens));
if (tokens == NULL) goto Error;
for (i = 0; i < 5 * histogram_image_size; ++i) {
HuffmanTreeCode* const codes = &huffman_codes[i];
StoreHuffmanCode(bw, huff_tree, tokens, codes);
ClearHuffmanTreeIfOnlyOneSymbol(codes);
}
}
// Store actual literals.
StoreImageToBitMask(bw, width, histogram_bits, &refs,
StoreImageToBitMask(bw, width, histogram_bits, refs,
histogram_symbols, huffman_codes);
ok = 1;
Error:
WebPSafeFree(histogram_image);
VP8LClearBackwardRefs(&refs);
WebPSafeFree(tokens);
WebPSafeFree(huff_tree);
VP8LFreeHistogramSet(histogram_image);
VP8LBackwardRefsDelete(refs);
if (huffman_codes != NULL) {
WebPSafeFree(huffman_codes->codes);
WebPSafeFree(huffman_codes);
@ -637,11 +696,9 @@ static int EvalAndApplySubtractGreen(VP8LEncoder* const enc,
int i;
const uint32_t* const argb = enc->argb_;
double bit_cost_before, bit_cost_after;
VP8LHistogram* const histo =
(VP8LHistogram*)WebPSafeMalloc(1ULL, sizeof(*histo));
// Allocate histogram with cache_bits = 1.
VP8LHistogram* const histo = VP8LAllocateHistogram(1);
if (histo == NULL) return 0;
VP8LHistogramInit(histo, 1);
for (i = 0; i < width * height; ++i) {
const uint32_t c = argb[i];
++histo->red_[(c >> 16) & 0xff];
@ -657,7 +714,7 @@ static int EvalAndApplySubtractGreen(VP8LEncoder* const enc,
++histo->blue_[((c >> 0) - green) & 0xff];
}
bit_cost_after = VP8LHistogramEstimateBits(histo);
WebPSafeFree(histo);
VP8LFreeHistogram(histo);
// Check if subtracting green yields low entropy.
enc->use_subtract_green_ = (bit_cost_after < bit_cost_before);
@ -683,8 +740,9 @@ static int ApplyPredictFilter(const VP8LEncoder* const enc,
VP8LWriteBits(bw, 2, PREDICTOR_TRANSFORM);
assert(pred_bits >= 2);
VP8LWriteBits(bw, 3, pred_bits - 2);
if (!EncodeImageNoHuffman(bw, enc->transform_data_,
transform_width, transform_height, quality)) {
if (!EncodeImageNoHuffman(bw, enc->transform_data_, enc->hash_chain_,
enc->refs_, transform_width, transform_height,
quality)) {
return 0;
}
return 1;
@ -703,8 +761,9 @@ static int ApplyCrossColorFilter(const VP8LEncoder* const enc,
VP8LWriteBits(bw, 2, CROSS_COLOR_TRANSFORM);
assert(ccolor_transform_bits >= 2);
VP8LWriteBits(bw, 3, ccolor_transform_bits - 2);
if (!EncodeImageNoHuffman(bw, enc->transform_data_,
transform_width, transform_height, quality)) {
if (!EncodeImageNoHuffman(bw, enc->transform_data_, enc->hash_chain_,
enc->refs_, transform_width, transform_height,
quality)) {
return 0;
}
return 1;
@ -902,7 +961,8 @@ static WebPEncodingError EncodePalette(VP8LBitWriter* const bw,
for (i = palette_size - 1; i >= 1; --i) {
palette[i] = VP8LSubPixels(palette[i], palette[i - 1]);
}
if (!EncodeImageNoHuffman(bw, palette, palette_size, 1, quality)) {
if (!EncodeImageNoHuffman(bw, palette, enc->hash_chain_, enc->refs_,
palette_size, 1, quality)) {
err = VP8_ENC_ERROR_INVALID_CONFIGURATION;
goto Error;
}
@ -915,7 +975,7 @@ static WebPEncodingError EncodePalette(VP8LBitWriter* const bw,
// -----------------------------------------------------------------------------
static int GetHistoBits(int method, int use_palette, int width, int height) {
const uint64_t hist_size = sizeof(VP8LHistogram);
const uint64_t hist_size = VP8LGetHistogramSize(MAX_COLOR_CACHE_BITS);
// Make tile size a function of encoding method (Range: 0 to 6).
int histo_bits = (use_palette ? 9 : 7) - method;
while (1) {
@ -969,6 +1029,9 @@ static VP8LEncoder* VP8LEncoderNew(const WebPConfig* const config,
static void VP8LEncoderDelete(VP8LEncoder* enc) {
if (enc != NULL) {
VP8LHashChainDelete(enc->hash_chain_);
VP8LBackwardRefsDelete(enc->refs_[0]);
VP8LBackwardRefsDelete(enc->refs_[1]);
WebPSafeFree(enc->argb_);
WebPSafeFree(enc);
}
@ -995,7 +1058,7 @@ WebPEncodingError VP8LEncodeStream(const WebPConfig* const config,
// ---------------------------------------------------------------------------
// Analyze image (entropy, num_palettes etc)
if (!VP8LEncAnalyze(enc, config->image_hint)) {
if (!AnalyzeAndInit(enc, config->image_hint)) {
err = VP8_ENC_ERROR_OUT_OF_MEMORY;
goto Error;
}
@ -1051,8 +1114,8 @@ WebPEncodingError VP8LEncodeStream(const WebPConfig* const config,
if (enc->cache_bits_ > 0) {
if (!VP8LCalculateEstimateForCacheSize(enc->argb_, enc->current_width_,
height, quality,
&enc->cache_bits_)) {
height, quality, enc->hash_chain_,
enc->refs_[0], &enc->cache_bits_)) {
err = VP8_ENC_ERROR_INVALID_CONFIGURATION;
goto Error;
}
@ -1061,8 +1124,9 @@ WebPEncodingError VP8LEncodeStream(const WebPConfig* const config,
// ---------------------------------------------------------------------------
// Encode and write the transformed image.
if (!EncodeImageInternal(bw, enc->argb_, enc->current_width_, height,
quality, enc->cache_bits_, enc->histo_bits_)) {
if (!EncodeImageInternal(bw, enc->argb_, enc->hash_chain_, enc->refs_,
enc->current_width_, height, quality,
enc->cache_bits_, enc->histo_bits_)) {
err = VP8_ENC_ERROR_OUT_OF_MEMORY;
goto Error;
}
@ -1092,6 +1156,7 @@ int VP8LEncodeImage(const WebPConfig* const config,
int has_alpha;
size_t coded_size;
int percent = 0;
int initial_size;
WebPEncodingError err = VP8_ENC_OK;
VP8LBitWriter bw;
@ -1105,8 +1170,11 @@ int VP8LEncodeImage(const WebPConfig* const config,
width = picture->width;
height = picture->height;
// Initialize BitWriter with size corresponding to 8bpp.
if (!VP8LBitWriterInit(&bw, width * height)) {
// Initialize BitWriter with size corresponding to 16 bpp to photo images and
// 8 bpp for graphical images.
initial_size = (config->image_hint == WEBP_HINT_GRAPH) ?
width * height : width * height * 2;
if (!VP8LBitWriterInit(&bw, initial_size)) {
err = VP8_ENC_ERROR_OUT_OF_MEMORY;
goto Error;
}

View File

@ -23,6 +23,8 @@
extern "C" {
#endif
struct VP8LHashChain; // Defined in backward_references.h
typedef struct {
const WebPConfig* config_; // user configuration and parameters
const WebPPicture* pic_; // input picture.
@ -33,6 +35,11 @@ typedef struct {
uint32_t* transform_data_; // Scratch memory for transform data.
int current_width_; // Corresponds to packed image width.
struct VP8LHashChain* hash_chain_; // HashChain data for constructing
// backward references.
struct VP8LBackwardRefs* refs_[2]; // Backward Refs array corresponding to
// LZ77 & RLE coding.
// Encoding parameters derived from quality parameter.
int histo_bits_;
int transform_bits_;

View File

@ -28,13 +28,13 @@ static int ValuesShouldBeCollapsedToStrideAverage(int a, int b) {
// Change the population counts in a way that the consequent
// Huffman tree compression, especially its RLE-part, give smaller output.
static int OptimizeHuffmanForRle(int length, int* const counts) {
uint8_t* good_for_rle;
static void OptimizeHuffmanForRle(int length, uint8_t* const good_for_rle,
int* const counts) {
// 1) Let's make the Huffman code more compatible with rle encoding.
int i;
for (; length >= 0; --length) {
if (length == 0) {
return 1; // All zeros.
return; // All zeros.
}
if (counts[length - 1] != 0) {
// Now counts[0..length - 1] does not have trailing zeros.
@ -43,10 +43,6 @@ static int OptimizeHuffmanForRle(int length, int* const counts) {
}
// 2) Let's mark all population counts that already can be encoded
// with an rle code.
good_for_rle = (uint8_t*)WebPSafeCalloc(1ULL, length);
if (good_for_rle == NULL) {
return 0;
}
{
// Let's not spoil any of the existing good rle codes.
// Mark any seq of 0's that is longer as 5 as a good_for_rle.
@ -119,17 +115,8 @@ static int OptimizeHuffmanForRle(int length, int* const counts) {
}
}
}
WebPSafeFree(good_for_rle);
return 1;
}
typedef struct {
int total_count_;
int value_;
int pool_index_left_;
int pool_index_right_;
} HuffmanTree;
// A comparer function for two Huffman trees: sorts first by 'total count'
// (more comes first), and then by 'value' (more comes first).
static int CompareHuffmanTrees(const void* ptr1, const void* ptr2) {
@ -175,12 +162,11 @@ static void SetBitDepths(const HuffmanTree* const tree,
// we are not planning to use this with extremely long blocks.
//
// See http://en.wikipedia.org/wiki/Huffman_coding
static int GenerateOptimalTree(const int* const histogram, int histogram_size,
int tree_depth_limit,
uint8_t* const bit_depths) {
static void GenerateOptimalTree(const int* const histogram, int histogram_size,
HuffmanTree* tree, int tree_depth_limit,
uint8_t* const bit_depths) {
int count_min;
HuffmanTree* tree_pool;
HuffmanTree* tree;
int tree_size_orig = 0;
int i;
@ -191,15 +177,9 @@ static int GenerateOptimalTree(const int* const histogram, int histogram_size,
}
if (tree_size_orig == 0) { // pretty optimal already!
return 1;
return;
}
// 3 * tree_size is enough to cover all the nodes representing a
// population and all the inserted nodes combining two existing nodes.
// The tree pool needs 2 * (tree_size_orig - 1) entities, and the
// tree needs exactly tree_size_orig entities.
tree = (HuffmanTree*)WebPSafeMalloc(3ULL * tree_size_orig, sizeof(*tree));
if (tree == NULL) return 0;
tree_pool = tree + tree_size_orig;
// For block sizes with less than 64k symbols we never need to do a
@ -272,8 +252,6 @@ static int GenerateOptimalTree(const int* const histogram, int histogram_size,
}
}
}
WebPSafeFree(tree);
return 1;
}
// -----------------------------------------------------------------------------
@ -424,17 +402,15 @@ static void ConvertBitDepthsToSymbols(HuffmanTreeCode* const tree) {
// -----------------------------------------------------------------------------
// Main entry point
int VP8LCreateHuffmanTree(int* const histogram, int tree_depth_limit,
HuffmanTreeCode* const tree) {
const int num_symbols = tree->num_symbols;
if (!OptimizeHuffmanForRle(num_symbols, histogram)) {
return 0;
}
if (!GenerateOptimalTree(histogram, num_symbols,
tree_depth_limit, tree->code_lengths)) {
return 0;
}
void VP8LCreateHuffmanTree(int* const histogram, int tree_depth_limit,
uint8_t* const buf_rle,
HuffmanTree* const huff_tree,
HuffmanTreeCode* const huff_code) {
const int num_symbols = huff_code->num_symbols;
memset(buf_rle, 0, num_symbols * sizeof(*buf_rle));
OptimizeHuffmanForRle(num_symbols, buf_rle, histogram);
GenerateOptimalTree(histogram, num_symbols, huff_tree, tree_depth_limit,
huff_code->code_lengths);
// Create the actual bit codes for the bit lengths.
ConvertBitDepthsToSymbols(tree);
return 1;
ConvertBitDepthsToSymbols(huff_code);
}

View File

@ -33,14 +33,26 @@ typedef struct {
uint16_t* codes; // Symbol Codes.
} HuffmanTreeCode;
// Struct to represent the Huffman tree.
// TODO(vikasa): Add comment for the fields of the Struct.
typedef struct {
int total_count_;
int value_;
int pool_index_left_; // Index for the left sub-tree.
int pool_index_right_; // Index for the right sub-tree.
} HuffmanTree;
// Turn the Huffman tree into a token sequence.
// Returns the number of tokens used.
int VP8LCreateCompressedHuffmanTree(const HuffmanTreeCode* const tree,
HuffmanTreeToken* tokens, int max_tokens);
// Create an optimized tree, and tokenize it.
int VP8LCreateHuffmanTree(int* const histogram, int tree_depth_limit,
HuffmanTreeCode* const tree);
// 'buf_rle' and 'huff_tree' are pre-allocated and the 'tree' is the constructed
// huffman code tree.
void VP8LCreateHuffmanTree(int* const histogram, int tree_depth_limit,
uint8_t* const buf_rle, HuffmanTree* const huff_tree,
HuffmanTreeCode* const tree);
#ifdef __cplusplus
}