Enable entropy based merge histo for (q<100)

Enable bin-partition entropy based heuristic for merging histograms
for higher (q >= 90) qualities as well. Keep the old behavior at the
maximum quality level (q==100).

This speeds up the compression between Q=90-99 (method=4) by factor 5-7X
and with loss of 0.5-0.8% in the compression density.

Change-Id: I011182cb8ae5403c565a150362bc302630b3f330
This commit is contained in:
Vikas Arora 2014-10-29 14:52:43 -07:00 committed by Gerrit Code Review
parent 572022a350
commit abf04205b3

View File

@ -949,10 +949,12 @@ static void HistogramRemap(const VP8LHistogramSet* const orig_histo,
static double GetCombineCostFactor(int histo_size, int quality) {
double combine_cost_factor = 0.16;
if (histo_size > 256) combine_cost_factor /= 2.;
if (histo_size > 512) combine_cost_factor /= 2.;
if (histo_size > 1024) combine_cost_factor /= 2.;
if (quality <= 50) combine_cost_factor /= 2.;
if (quality < 90) {
if (histo_size > 256) combine_cost_factor /= 2.;
if (histo_size > 512) combine_cost_factor /= 2.;
if (histo_size > 1024) combine_cost_factor /= 2.;
if (quality <= 50) combine_cost_factor /= 2.;
}
return combine_cost_factor;
}
@ -983,9 +985,8 @@ int VP8LGetHistoImageSymbols(int xsize, int ysize,
// Don't attempt linear bin-partition heuristic for:
// histograms of small sizes, as bin_map will be very sparse and;
// Higher qualities (> 90), to preserve the compression gains at those
// quality settings.
if (orig_histo->size > 2 * BIN_SIZE && quality < 90) {
// Maximum quality (q==100), to preserve the compression gains at that level.
if (orig_histo->size > 2 * BIN_SIZE && quality < 100) {
const int bin_map_size = bin_depth * BIN_SIZE;
bin_map = (int16_t*)WebPSafeCalloc(bin_map_size, sizeof(*bin_map));
if (bin_map == NULL) goto Error;