use block-based allocation for backward refs storage, and free-lists

Non-photo source produce far less literal reference and their
buffer is usually much smaller than the picture size if its compresses
well. Hence, use a block-base allocation (and recycling) to avoid
pre-allocating a buffer with maximal size.

This can reduce memory consumption up to 50% for non-photographic
content. Encode speed is also a little better (1-2%)

Change-Id: Icbc229e1e5a08976348e600c8906beaa26954a11
This commit is contained in:
skal 2014-05-05 11:11:55 -07:00
parent 1ba61b09f9
commit ca3d746e39
5 changed files with 225 additions and 124 deletions

View File

@ -23,6 +23,8 @@
#define HASH_MULTIPLIER (0xc6a4a7935bd1e995ULL)
#define MIN_BLOCK_SIZE 256 // minimum block size for backward references
#define MAX_ENTROPY (1e30f)
// 1M window (4M bytes) minus 120 special codes for short distances.
@ -69,43 +71,106 @@ static WEBP_INLINE int FindMatchLength(const uint32_t* const array1,
// -----------------------------------------------------------------------------
// VP8LBackwardRefs
struct PixOrCopyBlock {
PixOrCopyBlock* next_; // next block (or NULL)
PixOrCopy* start_; // data start
int size_; // currently used size
};
static void ClearBackwardRefs(VP8LBackwardRefs* const refs) {
assert(refs != NULL);
refs->size = 0;
if (refs->tail_ != NULL) {
*refs->tail_ = refs->free_blocks_; // recycle all blocks at once
}
refs->free_blocks_ = refs->refs_;
refs->tail_ = &refs->refs_;
refs->last_block_ = NULL;
refs->refs_ = NULL;
}
void VP8LBackwardRefsDelete(VP8LBackwardRefs* const refs) {
if (refs != NULL) {
WebPSafeFree(refs->refs);
WebPSafeFree(refs);
}
}
VP8LBackwardRefs* VP8LBackwardRefsNew(int max_size) {
VP8LBackwardRefs* const refs =
(VP8LBackwardRefs*)WebPSafeMalloc(1ULL, sizeof(*refs));
if (refs == NULL) {
return NULL;
}
void VP8LBackwardRefsClear(VP8LBackwardRefs* const refs) {
assert(refs != NULL);
ClearBackwardRefs(refs);
refs->max_size = 0;
refs->refs = (PixOrCopy*)WebPSafeMalloc(max_size, sizeof(*refs->refs));
if (refs->refs == NULL) {
WebPSafeFree(refs);
return NULL;
while (refs->free_blocks_ != NULL) {
PixOrCopyBlock* const next = refs->free_blocks_->next_;
WebPSafeFree(refs->free_blocks_);
refs->free_blocks_ = next;
}
refs->max_size = max_size;
return refs;
}
void VP8LBackwardRefsInit(VP8LBackwardRefs* const refs, int block_size) {
assert(refs != NULL);
memset(refs, 0, sizeof(*refs));
refs->tail_ = &refs->refs_;
refs->block_size_ =
(block_size < MIN_BLOCK_SIZE) ? MIN_BLOCK_SIZE : block_size;
}
VP8LRefsCursor VP8LRefsCursorInit(const VP8LBackwardRefs* const refs) {
VP8LRefsCursor c;
c.cur_block_ = refs->refs_;
if (refs->refs_ != NULL) {
c.cur_pos = c.cur_block_->start_;
c.last_pos_ = c.cur_pos + c.cur_block_->size_;
} else {
c.cur_pos = NULL;
c.last_pos_ = NULL;
}
return c;
}
void VP8LRefsCursorNextBlock(VP8LRefsCursor* const c) {
PixOrCopyBlock* const b = c->cur_block_->next_;
c->cur_pos = (b == NULL) ? NULL : b->start_;
c->last_pos_ = (b == NULL) ? NULL : b->start_ + b->size_;
c->cur_block_ = b;
}
// Create a new block, either from the free list or allocated
static PixOrCopyBlock* BackwardRefsNewBlock(VP8LBackwardRefs* const refs) {
PixOrCopyBlock* b = refs->free_blocks_;
if (b == NULL) { // allocate new memory chunk
const size_t total_size =
sizeof(*b) + refs->block_size_ * sizeof(*b->start_);
b = (PixOrCopyBlock*)WebPSafeMalloc(1ULL, total_size);
if (b == NULL) {
refs->error_ |= 1;
return NULL;
}
b->start_ = (PixOrCopy*)((uint8_t*)b + sizeof(*b)); // not always aligned
} else { // recycle from free-list
refs->free_blocks_ = b->next_;
}
*refs->tail_ = b;
refs->tail_ = &b->next_;
refs->last_block_ = b;
b->next_ = NULL;
b->size_ = 0;
return b;
}
static WEBP_INLINE void BackwardRefsCursorAdd(VP8LBackwardRefs* const refs,
const PixOrCopy v) {
PixOrCopyBlock* b = refs->last_block_;
if (b == NULL || b->size_ == refs->block_size_) {
b = BackwardRefsNewBlock(refs);
if (b == NULL) return; // refs->error_ is set
}
b->start_[b->size_++] = v;
}
int VP8LBackwardRefsCopy(const VP8LBackwardRefs* const src,
VP8LBackwardRefs* const dst) {
assert(src != NULL && dst != NULL);
if (dst->max_size != src->max_size) {
return 0;
const PixOrCopyBlock* b = src->refs_;
ClearBackwardRefs(dst);
assert(src->block_size_ == dst->block_size_);
while (b != NULL) {
PixOrCopyBlock* const new_b = BackwardRefsNewBlock(dst);
if (new_b == NULL) return 0; // dst->error_ is set
memcpy(new_b->start_, b->start_, b->size_ * sizeof(*b->start_));
new_b->size_ = b->size_;
b = b->next_;
}
dst->size = src->size;
memcpy(dst->refs, src->refs, src->size * sizeof(*src->refs));
return 1;
}
@ -252,36 +317,35 @@ static int HashChainFindCopy(const VP8LHashChain* const p,
}
static WEBP_INLINE void PushBackCopy(VP8LBackwardRefs* const refs, int length) {
int size = refs->size;
while (length >= MAX_LENGTH) {
refs->refs[size++] = PixOrCopyCreateCopy(1, MAX_LENGTH);
BackwardRefsCursorAdd(refs, PixOrCopyCreateCopy(1, MAX_LENGTH));
length -= MAX_LENGTH;
}
if (length > 0) {
refs->refs[size++] = PixOrCopyCreateCopy(1, length);
BackwardRefsCursorAdd(refs, PixOrCopyCreateCopy(1, length));
}
refs->size = size;
}
static void BackwardReferencesRle(int xsize, int ysize,
const uint32_t* const argb,
VP8LBackwardRefs* const refs) {
static int BackwardReferencesRle(int xsize, int ysize,
const uint32_t* const argb,
VP8LBackwardRefs* const refs) {
const int pix_count = xsize * ysize;
int match_len = 0;
int i;
ClearBackwardRefs(refs);
PushBackCopy(refs, match_len); // i=0 case
refs->refs[refs->size++] = PixOrCopyCreateLiteral(argb[0]);
BackwardRefsCursorAdd(refs, PixOrCopyCreateLiteral(argb[0]));
for (i = 1; i < pix_count; ++i) {
if (argb[i] == argb[i - 1]) {
++match_len;
} else {
PushBackCopy(refs, match_len);
match_len = 0;
refs->refs[refs->size++] = PixOrCopyCreateLiteral(argb[i]);
BackwardRefsCursorAdd(refs, PixOrCopyCreateLiteral(argb[i]));
}
}
PushBackCopy(refs, match_len);
return !refs->error_;
}
static int BackwardReferencesHashChain(int xsize, int ysize,
@ -333,14 +397,15 @@ static int BackwardReferencesHashChain(int xsize, int ysize,
if (len2 > len + 1) {
const uint32_t pixel = argb[i];
// Alternative#2 is a better match. So push pixel at 'i' as literal.
PixOrCopy v;
if (use_color_cache && VP8LColorCacheContains(&hashers, pixel)) {
const int ix = VP8LColorCacheGetIndex(&hashers, pixel);
refs->refs[refs->size] = PixOrCopyCreateCacheIdx(ix);
v = PixOrCopyCreateCacheIdx(ix);
} else {
if (use_color_cache) VP8LColorCacheInsert(&hashers, pixel);
refs->refs[refs->size] = PixOrCopyCreateLiteral(pixel);
v = PixOrCopyCreateLiteral(pixel);
}
++refs->size;
BackwardRefsCursorAdd(refs, v);
i++; // Backward reference to be done for next pixel.
len = len2;
offset = offset2;
@ -349,7 +414,7 @@ static int BackwardReferencesHashChain(int xsize, int ysize,
if (len >= MAX_LENGTH) {
len = MAX_LENGTH - 1;
}
refs->refs[refs->size++] = PixOrCopyCreateCopy(offset, len);
BackwardRefsCursorAdd(refs, PixOrCopyCreateCopy(offset, len));
if (use_color_cache) {
for (k = 0; k < len; ++k) {
VP8LColorCacheInsert(&hashers, argb[i + k]);
@ -365,22 +430,23 @@ static int BackwardReferencesHashChain(int xsize, int ysize,
i += len;
} else {
const uint32_t pixel = argb[i];
PixOrCopy v;
if (use_color_cache && VP8LColorCacheContains(&hashers, pixel)) {
// push pixel as a PixOrCopyCreateCacheIdx pixel
const int ix = VP8LColorCacheGetIndex(&hashers, pixel);
refs->refs[refs->size] = PixOrCopyCreateCacheIdx(ix);
v = PixOrCopyCreateCacheIdx(ix);
} else {
if (use_color_cache) VP8LColorCacheInsert(&hashers, pixel);
refs->refs[refs->size] = PixOrCopyCreateLiteral(pixel);
v = PixOrCopyCreateLiteral(pixel);
}
++refs->size;
BackwardRefsCursorAdd(refs, v);
if (i + 1 < pix_count) {
HashChainInsert(hash_chain, &argb[i], i);
}
++i;
}
}
ok = 1;
ok = !refs->error_;
Error:
if (cc_init) VP8LColorCacheClear(&hashers);
return ok;
@ -607,7 +673,7 @@ static int BackwardReferencesHashChainDistanceOnly(
}
// Last pixel still to do, it can only be a single step if not reached
// through cheaper means already.
ok = 1;
ok = !refs->error_;
Error:
if (cc_init) VP8LColorCacheClear(&hashers);
WebPSafeFree(cost_model);
@ -671,7 +737,7 @@ static int BackwardReferencesHashChainFollowChosenPath(
window_size, iter_pos, iter_limit,
&offset, &len);
assert(len == max_len);
refs->refs[size] = PixOrCopyCreateCopy(offset, len);
BackwardRefsCursorAdd(refs, PixOrCopyCreateCopy(offset, len));
if (use_color_cache) {
for (k = 0; k < len; ++k) {
VP8LColorCacheInsert(&hashers, argb[i + k]);
@ -685,23 +751,23 @@ static int BackwardReferencesHashChainFollowChosenPath(
}
i += len;
} else {
PixOrCopy v;
if (use_color_cache && VP8LColorCacheContains(&hashers, argb[i])) {
// push pixel as a color cache index
const int idx = VP8LColorCacheGetIndex(&hashers, argb[i]);
refs->refs[size] = PixOrCopyCreateCacheIdx(idx);
v = PixOrCopyCreateCacheIdx(idx);
} else {
if (use_color_cache) VP8LColorCacheInsert(&hashers, argb[i]);
refs->refs[size] = PixOrCopyCreateLiteral(argb[i]);
v = PixOrCopyCreateLiteral(argb[i]);
}
BackwardRefsCursorAdd(refs, v);
if (i + 1 < pix_count) {
HashChainInsert(hash_chain, &argb[i], i);
}
++i;
}
}
assert(size <= refs->max_size);
refs->size = size;
ok = 1;
ok = !refs->error_;
Error:
if (cc_init) VP8LColorCacheClear(&hashers);
return ok;
@ -741,39 +807,40 @@ static int BackwardReferencesTraceBackwards(int xsize, int ysize,
}
static void BackwardReferences2DLocality(int xsize,
VP8LBackwardRefs* const refs) {
int i;
for (i = 0; i < refs->size; ++i) {
if (PixOrCopyIsCopy(&refs->refs[i])) {
const int dist = refs->refs[i].argb_or_distance;
const VP8LBackwardRefs* const refs) {
VP8LRefsCursor c = VP8LRefsCursorInit(refs);
while (VP8LRefsCursorOk(&c)) {
if (PixOrCopyIsCopy(c.cur_pos)) {
const int dist = c.cur_pos->argb_or_distance;
const int transformed_dist = DistanceToPlaneCode(xsize, dist);
refs->refs[i].argb_or_distance = transformed_dist;
c.cur_pos->argb_or_distance = transformed_dist;
}
VP8LRefsCursorNext(&c);
}
}
VP8LBackwardRefs* VP8LGetBackwardReferences(
int width, int height, const uint32_t* const argb, int quality,
int cache_bits, int use_2d_locality, VP8LHashChain* const hash_chain,
VP8LBackwardRefs* const refs_array[2]) {
VP8LBackwardRefs refs_array[2]) {
int lz77_is_useful;
const int num_pix = width * height;
VP8LBackwardRefs* best = NULL;
VP8LBackwardRefs* const refs_lz77 = refs_array[0];
VP8LBackwardRefs* const refs_rle = refs_array[1];
VP8LBackwardRefs* const refs_lz77 = &refs_array[0];
VP8LBackwardRefs* const refs_rle = &refs_array[1];
ClearBackwardRefs(refs_lz77);
if (!BackwardReferencesHashChain(width, height, argb, cache_bits, quality,
hash_chain, refs_lz77)) {
goto End;
return NULL;
}
if (!BackwardReferencesRle(width, height, argb, refs_rle)) {
return NULL;
}
ClearBackwardRefs(refs_rle);
BackwardReferencesRle(width, height, argb, refs_rle);
{
double bit_cost_lz77, bit_cost_rle;
VP8LHistogram* const histo = VP8LAllocateHistogram(cache_bits);
if (histo == NULL) goto End;
if (histo == NULL) return NULL;
// Evaluate LZ77 coding.
VP8LHistogramCreate(histo, refs_lz77, cache_bits);
bit_cost_lz77 = VP8LHistogramEstimateBits(histo);
@ -794,7 +861,7 @@ VP8LBackwardRefs* VP8LGetBackwardReferences(
// Set recursion level for large images using a color cache.
const int recursion_level =
(num_pix < 320 * 200) && (cache_bits > 0) ? 1 : 0;
VP8LBackwardRefs* const refs_trace = refs_array[1];
VP8LBackwardRefs* const refs_trace = &refs_array[1];
ClearBackwardRefs(refs_trace);
if (BackwardReferencesTraceBackwards(width, height, recursion_level, argb,
quality, cache_bits, hash_chain,
@ -808,7 +875,6 @@ VP8LBackwardRefs* VP8LGetBackwardReferences(
if (use_2d_locality) BackwardReferences2DLocality(width, best);
End:
return best;
}
@ -818,13 +884,13 @@ static double ComputeCacheEntropy(const uint32_t* const argb,
const VP8LBackwardRefs* const refs,
int cache_bits) {
int pixel_index = 0;
int i;
uint32_t k;
const int use_color_cache = (cache_bits > 0);
int cc_init = 0;
double entropy = MAX_ENTROPY;
const double kSmallPenaltyForLargeCache = 4.0;
VP8LColorCache hashers;
VP8LRefsCursor c = VP8LRefsCursorInit(refs);
VP8LHistogram* histo = VP8LAllocateHistogram(cache_bits);
if (histo == NULL) goto Error;
@ -833,8 +899,8 @@ static double ComputeCacheEntropy(const uint32_t* const argb,
if (!cc_init) goto Error;
}
for (i = 0; i < refs->size; ++i) {
const PixOrCopy* const v = &refs->refs[i];
while (VP8LRefsCursorOk(&c)) {
const PixOrCopy* const v = c.cur_pos;
if (PixOrCopyIsLiteral(v)) {
if (use_color_cache &&
VP8LColorCacheContains(&hashers, argb[pixel_index])) {
@ -854,6 +920,7 @@ static double ComputeCacheEntropy(const uint32_t* const argb,
}
}
pixel_index += PixOrCopyLength(v);
VP8LRefsCursorNext(&c);
}
assert(pixel_index == xsize * ysize);
(void)xsize; // xsize is not used in non-debug compilations otherwise.
@ -872,7 +939,6 @@ int VP8LCalculateEstimateForCacheSize(const uint32_t* const argb,
VP8LHashChain* const hash_chain,
VP8LBackwardRefs* const refs,
int* const best_cache_bits) {
int ok = 0;
int eval_low = 1;
int eval_high = 1;
double entropy_low = MAX_ENTROPY;
@ -880,10 +946,9 @@ int VP8LCalculateEstimateForCacheSize(const uint32_t* const argb,
int cache_bits_low = 0;
int cache_bits_high = MAX_COLOR_CACHE_BITS;
ClearBackwardRefs(refs);
if (!BackwardReferencesHashChain(xsize, ysize, argb, 0, quality, hash_chain,
refs)) {
goto Error;
return 0;
}
// Do a binary search to find the optimal entropy for cache_bits.
while (cache_bits_high - cache_bits_low > 1) {
@ -907,7 +972,5 @@ int VP8LCalculateEstimateForCacheSize(const uint32_t* const argb,
eval_high = 1;
}
}
ok = 1;
Error:
return ok;
return 1;
}

View File

@ -113,7 +113,7 @@ static WEBP_INLINE uint32_t PixOrCopyDistance(const PixOrCopy* const p) {
}
// -----------------------------------------------------------------------------
// VP8LBackwardRefs
// VP8LHashChain
#define HASH_BITS 18
#define HASH_SIZE (1 << HASH_BITS)
@ -134,24 +134,58 @@ struct VP8LHashChain {
int VP8LHashChainInit(VP8LHashChain* const p, int size);
void VP8LHashChainClear(VP8LHashChain* const p); // release memory
// -----------------------------------------------------------------------------
// VP8LBackwardRefs (block-based backward-references storage)
// maximum number of reference blocks the image will be segmented into
#define MAX_REFS_BLOCK_PER_IMAGE 16
typedef struct PixOrCopyBlock PixOrCopyBlock; // forward declaration
typedef struct VP8LBackwardRefs VP8LBackwardRefs;
// Container for blocks chain
struct VP8LBackwardRefs {
PixOrCopy* refs;
int size; // currently used
int max_size; // maximum capacity
int block_size_; // common block-size
int error_; // set to true if some memory error occurred
PixOrCopyBlock* refs_; // list of currently used blocks
PixOrCopyBlock** tail_; // for list recycling
PixOrCopyBlock* free_blocks_; // free-list
PixOrCopyBlock* last_block_; // used for adding new refs (internal)
};
// Release backward references. 'refs' can be NULL.
void VP8LBackwardRefsDelete(VP8LBackwardRefs* const refs);
// Allocate 'max_size' references. Returns NULL in case of memory error.
VP8LBackwardRefs* VP8LBackwardRefsNew(int max_size);
// Copies the 'src' backward refs to the 'dst'. Returns 0 if there's mismatch
// in the capacity (max_size) of 'src' and 'dst' refs.
// Initialize the object. 'block_size' is the common block size to store
// references (typically, width * height / MAX_REFS_BLOCK_PER_IMAGE).
void VP8LBackwardRefsInit(VP8LBackwardRefs* const refs, int block_size);
// Release memory for backward references.
void VP8LBackwardRefsClear(VP8LBackwardRefs* const refs);
// Copies the 'src' backward refs to the 'dst'. Returns 0 in case of error.
int VP8LBackwardRefsCopy(const VP8LBackwardRefs* const src,
VP8LBackwardRefs* const dst);
// Cursor for iterating on references content
typedef struct {
// public:
PixOrCopy* cur_pos; // current position
// private:
PixOrCopyBlock* cur_block_; // current block in the refs list
const PixOrCopy* last_pos_; // sentinel for switching to next block
} VP8LRefsCursor;
// Returns a cursor positioned at the beginning of the references list.
VP8LRefsCursor VP8LRefsCursorInit(const VP8LBackwardRefs* const refs);
// Returns true if cursor is pointing at a valid position.
static WEBP_INLINE int VP8LRefsCursorOk(const VP8LRefsCursor* const c) {
return (c->cur_pos != NULL);
}
// Move to next block of references. Internal, not to be called directly.
void VP8LRefsCursorNextBlock(VP8LRefsCursor* const c);
// Move to next position, or NULL. Should not be called if !VP8LRefsCursorOk().
static WEBP_INLINE void VP8LRefsCursorNext(VP8LRefsCursor* const c) {
assert(c != NULL);
assert(VP8LRefsCursorOk(c));
if (++c->cur_pos == c->last_pos_) VP8LRefsCursorNextBlock(c);
}
// -----------------------------------------------------------------------------
// Main entry points
@ -162,7 +196,7 @@ int VP8LBackwardRefsCopy(const VP8LBackwardRefs* const src,
VP8LBackwardRefs* VP8LGetBackwardReferences(
int width, int height, const uint32_t* const argb, int quality,
int cache_bits, int use_2d_locality, VP8LHashChain* const hash_chain,
VP8LBackwardRefs* const refs[2]);
VP8LBackwardRefs refs[2]);
// Produce an estimate for a good color cache size for the image.
int VP8LCalculateEstimateForCacheSize(const uint32_t* const argb,

View File

@ -64,9 +64,10 @@ void VP8LFreeHistogramSet(VP8LHistogramSet* const histo) {
void VP8LHistogramStoreRefs(const VP8LBackwardRefs* const refs,
VP8LHistogram* const histo) {
int i;
for (i = 0; i < refs->size; ++i) {
VP8LHistogramAddSinglePixOrCopy(histo, &refs->refs[i]);
VP8LRefsCursor c = VP8LRefsCursorInit(refs);
while (VP8LRefsCursorOk(&c)) {
VP8LHistogramAddSinglePixOrCopy(histo, c.cur_pos);
VP8LRefsCursorNext(&c);
}
}
@ -423,14 +424,14 @@ static int GetHistoBinIndex(
static void HistogramBuild(
int xsize, int histo_bits, const VP8LBackwardRefs* const backward_refs,
VP8LHistogramSet* const init_histo) {
int i;
int x = 0, y = 0;
const int histo_xsize = VP8LSubSampleSize(xsize, histo_bits);
VP8LHistogram** const histograms = init_histo->histograms;
VP8LRefsCursor c = VP8LRefsCursorInit(backward_refs);
assert(histo_bits > 0);
// Construct the Histo from a given backward references.
for (i = 0; i < backward_refs->size; ++i) {
const PixOrCopy* const v = &backward_refs->refs[i];
while (VP8LRefsCursorOk(&c)) {
const PixOrCopy* const v = c.cur_pos;
const int ix = (y >> histo_bits) * histo_xsize + (x >> histo_bits);
VP8LHistogramAddSinglePixOrCopy(histograms[ix], v);
x += PixOrCopyLength(v);
@ -438,6 +439,7 @@ static void HistogramBuild(
x -= xsize;
++y;
}
VP8LRefsCursorNext(&c);
}
}

View File

@ -140,6 +140,9 @@ static int AnalyzeAndInit(VP8LEncoder* const enc, WebPImageHint image_hint) {
const int width = pic->width;
const int height = pic->height;
const int pix_cnt = width * height;
// we round the block size up, so we're guaranteed to have
// at max MAX_REFS_BLOCK_PER_IMAGE blocks used:
int refs_block_size = (pix_cnt - 1) / MAX_REFS_BLOCK_PER_IMAGE + 1;
assert(pic != NULL && pic->argb != NULL);
enc->use_palette_ =
@ -169,11 +172,11 @@ static int AnalyzeAndInit(VP8LEncoder* const enc, WebPImageHint image_hint) {
}
if (!VP8LHashChainInit(&enc->hash_chain_, pix_cnt)) return 0;
enc->refs_[0] = VP8LBackwardRefsNew(pix_cnt);
enc->refs_[1] = VP8LBackwardRefsNew(pix_cnt);
if (enc->refs_[0] == NULL || enc->refs_[1] == NULL) {
return 0;
}
// palette-friendly input typically uses less literals
// -> reduce block size a bit
if (enc->use_palette_) refs_block_size /= 2;
VP8LBackwardRefsInit(&enc->refs_[0], refs_block_size);
VP8LBackwardRefsInit(&enc->refs_[1], refs_block_size);
return 1;
}
@ -425,16 +428,16 @@ static void WriteHuffmanCode(VP8LBitWriter* const bw,
static void StoreImageToBitMask(
VP8LBitWriter* const bw, int width, int histo_bits,
const VP8LBackwardRefs* const refs,
VP8LBackwardRefs* const refs,
const uint16_t* histogram_symbols,
const HuffmanTreeCode* const huffman_codes) {
// x and y trace the position in the image.
int x = 0;
int y = 0;
const int histo_xsize = histo_bits ? VP8LSubSampleSize(width, histo_bits) : 1;
int i;
for (i = 0; i < refs->size; ++i) {
const PixOrCopy* const v = &refs->refs[i];
VP8LRefsCursor c = VP8LRefsCursorInit(refs);
while (VP8LRefsCursorOk(&c)) {
const PixOrCopy* const v = c.cur_pos;
const int histogram_ix = histogram_symbols[histo_bits ?
(y >> histo_bits) * histo_xsize +
(x >> histo_bits) : 0];
@ -468,6 +471,7 @@ static void StoreImageToBitMask(
x -= width;
++y;
}
VP8LRefsCursorNext(&c);
}
}
@ -475,7 +479,7 @@ static void StoreImageToBitMask(
static int EncodeImageNoHuffman(VP8LBitWriter* const bw,
const uint32_t* const argb,
VP8LHashChain* const hash_chain,
VP8LBackwardRefs* const refs_array[2],
VP8LBackwardRefs refs_array[2],
int width, int height, int quality) {
int i;
int ok = 0;
@ -540,7 +544,7 @@ static int EncodeImageNoHuffman(VP8LBitWriter* const bw,
static int EncodeImageInternal(VP8LBitWriter* const bw,
const uint32_t* const argb,
VP8LHashChain* const hash_chain,
VP8LBackwardRefs* const refs_array[2],
VP8LBackwardRefs refs_array[2],
int width, int height, int quality,
int cache_bits, int histogram_bits) {
int ok = 0;
@ -556,7 +560,7 @@ static int EncodeImageInternal(VP8LBitWriter* const bw,
HuffmanTree* huff_tree = NULL;
HuffmanTreeToken* tokens = NULL;
HuffmanTreeCode* huffman_codes = NULL;
VP8LBackwardRefs* refs = NULL;
VP8LBackwardRefs refs;
VP8LBackwardRefs* best_refs;
uint16_t* const histogram_symbols =
(uint16_t*)WebPSafeMalloc(histogram_image_xysize,
@ -564,28 +568,24 @@ static int EncodeImageInternal(VP8LBitWriter* const bw,
assert(histogram_bits >= MIN_HUFFMAN_BITS);
assert(histogram_bits <= MAX_HUFFMAN_BITS);
VP8LBackwardRefsInit(&refs, refs_array[0].block_size_);
if (histogram_image == NULL || histogram_symbols == NULL) {
VP8LFreeHistogramSet(histogram_image);
WebPSafeFree(histogram_symbols);
return 0;
}
refs = VP8LBackwardRefsNew(refs_array[0]->max_size);
if (refs == NULL) {
goto Error;
}
// 'best_refs' is the reference to the best backward refs and points to one
// of refs_array[0] or refs_array[1].
// Calculate backward references from ARGB image.
best_refs = VP8LGetBackwardReferences(width, height, argb, quality,
cache_bits, use_2d_locality,
hash_chain, refs_array);
if (best_refs == NULL || !VP8LBackwardRefsCopy(best_refs, refs)) {
if (best_refs == NULL || !VP8LBackwardRefsCopy(best_refs, &refs)) {
goto Error;
}
// Build histogram image and symbols from backward references.
if (!VP8LGetHistoImageSymbols(width, height, refs,
if (!VP8LGetHistoImageSymbols(width, height, &refs,
quality, histogram_bits, cache_bits,
histogram_image,
histogram_symbols)) {
@ -665,7 +665,7 @@ static int EncodeImageInternal(VP8LBitWriter* const bw,
}
// Store actual literals.
StoreImageToBitMask(bw, width, histogram_bits, refs,
StoreImageToBitMask(bw, width, histogram_bits, &refs,
histogram_symbols, huffman_codes);
ok = 1;
@ -673,7 +673,7 @@ static int EncodeImageInternal(VP8LBitWriter* const bw,
WebPSafeFree(tokens);
WebPSafeFree(huff_tree);
VP8LFreeHistogramSet(histogram_image);
VP8LBackwardRefsDelete(refs);
VP8LBackwardRefsClear(&refs);
if (huffman_codes != NULL) {
WebPSafeFree(huffman_codes->codes);
WebPSafeFree(huffman_codes);
@ -740,7 +740,8 @@ static int ApplyPredictFilter(const VP8LEncoder* const enc,
VP8LWriteBits(bw, 3, pred_bits - 2);
if (!EncodeImageNoHuffman(bw, enc->transform_data_,
(VP8LHashChain*)&enc->hash_chain_,
enc->refs_, transform_width, transform_height,
(VP8LBackwardRefs*)enc->refs_, // cast const away
transform_width, transform_height,
quality)) {
return 0;
}
@ -762,7 +763,8 @@ static int ApplyCrossColorFilter(const VP8LEncoder* const enc,
VP8LWriteBits(bw, 3, ccolor_transform_bits - 2);
if (!EncodeImageNoHuffman(bw, enc->transform_data_,
(VP8LHashChain*)&enc->hash_chain_,
enc->refs_, transform_width, transform_height,
(VP8LBackwardRefs*)enc->refs_, // cast const away
transform_width, transform_height,
quality)) {
return 0;
}
@ -1029,8 +1031,8 @@ static VP8LEncoder* VP8LEncoderNew(const WebPConfig* const config,
static void VP8LEncoderDelete(VP8LEncoder* enc) {
if (enc != NULL) {
VP8LHashChainClear(&enc->hash_chain_);
VP8LBackwardRefsDelete(enc->refs_[0]);
VP8LBackwardRefsDelete(enc->refs_[1]);
VP8LBackwardRefsClear(&enc->refs_[0]);
VP8LBackwardRefsClear(&enc->refs_[1]);
WebPSafeFree(enc->argb_);
WebPSafeFree(enc);
}
@ -1114,7 +1116,7 @@ WebPEncodingError VP8LEncodeStream(const WebPConfig* const config,
if (enc->cache_bits_ > 0) {
if (!VP8LCalculateEstimateForCacheSize(enc->argb_, enc->current_width_,
height, quality, &enc->hash_chain_,
enc->refs_[0], &enc->cache_bits_)) {
&enc->refs_[0], &enc->cache_bits_)) {
err = VP8_ENC_ERROR_INVALID_CONFIGURATION;
goto Error;
}

View File

@ -48,10 +48,10 @@ typedef struct {
uint32_t palette_[MAX_PALETTE_SIZE];
// Some 'scratch' (potentially large) objects.
struct VP8LBackwardRefs* refs_[2]; // Backward Refs array corresponding to
// LZ77 & RLE coding.
VP8LHashChain hash_chain_; // HashChain data for constructing
// backward references.
struct VP8LBackwardRefs refs_[2]; // Backward Refs array corresponding to
// LZ77 & RLE coding.
VP8LHashChain hash_chain_; // HashChain data for constructing
// backward references.
} VP8LEncoder;
//------------------------------------------------------------------------------