Simplify backward refs calculation for low-effort.

Simplify and speedup backward references for low-effort settings by evaluating
LZ77 references only. This change speeds up compression by 10-25% at lower
(q <= 25) quality range with a slight drop (0.2%) in the compression density.

Change-Id: Ibd6f03b1a062d8ab9191786c2a425e9132e4779f
This commit is contained in:
Vikas Arora 2015-01-27 09:35:32 -08:00
parent ec0d1be577
commit 4d6d7285b0
2 changed files with 33 additions and 7 deletions

View File

@ -965,9 +965,23 @@ static int BackwardRefsWithLocalCache(const uint32_t* const argb,
return 1; return 1;
} }
VP8LBackwardRefs* VP8LGetBackwardReferences( static VP8LBackwardRefs* GetBackwardReferencesLowEffort(
int width, int height, const uint32_t* const argb, int quality, int width, int height, const uint32_t* const argb, int quality,
int low_effort, int* cache_bits, VP8LHashChain* const hash_chain, int* const cache_bits, VP8LHashChain* const hash_chain,
VP8LBackwardRefs refs_array[2]) {
VP8LBackwardRefs* refs_lz77 = &refs_array[0];
*cache_bits = 0;
if (!BackwardReferencesLz77(width, height, argb, 0, quality,
1 /* Low effort. */, hash_chain, refs_lz77)) {
return NULL;
}
BackwardReferences2DLocality(width, refs_lz77);
return refs_lz77;
}
static VP8LBackwardRefs* GetBackwardReferences(
int width, int height, const uint32_t* const argb, int quality,
int* const cache_bits, VP8LHashChain* const hash_chain,
VP8LBackwardRefs refs_array[2]) { VP8LBackwardRefs refs_array[2]) {
int lz77_is_useful; int lz77_is_useful;
int lz77_computed; int lz77_computed;
@ -991,7 +1005,7 @@ VP8LBackwardRefs* VP8LGetBackwardReferences(
} }
} else { } else {
if (!BackwardReferencesLz77(width, height, argb, *cache_bits, quality, if (!BackwardReferencesLz77(width, height, argb, *cache_bits, quality,
low_effort, hash_chain, refs_lz77)) { 0 /* Low effort. */, hash_chain, refs_lz77)) {
goto Error; goto Error;
} }
} }
@ -1016,9 +1030,8 @@ VP8LBackwardRefs* VP8LGetBackwardReferences(
// Choose appropriate backward reference. // Choose appropriate backward reference.
if (lz77_is_useful) { if (lz77_is_useful) {
// TraceBackwards is costly. Don't execute it at lower quality or low effort // TraceBackwards is costly. Don't execute it at lower quality.
// compression setting. const int try_lz77_trace_backwards = (quality >= 25);
const int try_lz77_trace_backwards = (quality >= 25) && !low_effort;
best = refs_lz77; // default guess: lz77 is better best = refs_lz77; // default guess: lz77 is better
if (try_lz77_trace_backwards) { if (try_lz77_trace_backwards) {
VP8LBackwardRefs* const refs_trace = refs_rle; VP8LBackwardRefs* const refs_trace = refs_rle;
@ -1048,3 +1061,16 @@ VP8LBackwardRefs* VP8LGetBackwardReferences(
VP8LFreeHistogram(histo); VP8LFreeHistogram(histo);
return best; return best;
} }
VP8LBackwardRefs* VP8LGetBackwardReferences(
int width, int height, const uint32_t* const argb, int quality,
int low_effort, int* const cache_bits, VP8LHashChain* const hash_chain,
VP8LBackwardRefs refs_array[2]) {
if (low_effort) {
return GetBackwardReferencesLowEffort(width, height, argb, quality,
cache_bits, hash_chain, refs_array);
} else {
return GetBackwardReferences(width, height, argb, quality, cache_bits,
hash_chain, refs_array);
}
}

View File

@ -192,7 +192,7 @@ static WEBP_INLINE void VP8LRefsCursorNext(VP8LRefsCursor* const c) {
// refs[0] or refs[1]. // refs[0] or refs[1].
VP8LBackwardRefs* VP8LGetBackwardReferences( VP8LBackwardRefs* VP8LGetBackwardReferences(
int width, int height, const uint32_t* const argb, int quality, int width, int height, const uint32_t* const argb, int quality,
int low_effort, int* cache_bits, VP8LHashChain* const hash_chain, int low_effort, int* const cache_bits, VP8LHashChain* const hash_chain,
VP8LBackwardRefs refs[2]); VP8LBackwardRefs refs[2]);
#ifdef __cplusplus #ifdef __cplusplus