mirror of
https://github.com/webmproject/libwebp.git
synced 2024-11-20 12:28:26 +01:00
add a fallback to ALPHA_NO_COMPRESSION
if ALPHA_LOSSLESS_COMPRESSION produces a too big file (very rare!), we fall-back to no-compression automatically. Change-Id: I5f3f509c635ce43a5e7c23f5d0f0c8329a5f24b7
This commit is contained in:
parent
a6140194ff
commit
187d379db6
@ -47,12 +47,11 @@
|
|||||||
|
|
||||||
static int EncodeLossless(const uint8_t* const data, int width, int height,
|
static int EncodeLossless(const uint8_t* const data, int width, int height,
|
||||||
int effort_level, // in [0..6] range
|
int effort_level, // in [0..6] range
|
||||||
VP8BitWriter* const bw,
|
VP8LBitWriter* const bw,
|
||||||
WebPAuxStats* const stats) {
|
WebPAuxStats* const stats) {
|
||||||
int ok = 0;
|
int ok = 0;
|
||||||
WebPConfig config;
|
WebPConfig config;
|
||||||
WebPPicture picture;
|
WebPPicture picture;
|
||||||
VP8LBitWriter tmp_bw;
|
|
||||||
|
|
||||||
WebPPictureInit(&picture);
|
WebPPictureInit(&picture);
|
||||||
picture.width = width;
|
picture.width = width;
|
||||||
@ -84,16 +83,15 @@ static int EncodeLossless(const uint8_t* const data, int width, int height,
|
|||||||
config.quality = 8.f * effort_level;
|
config.quality = 8.f * effort_level;
|
||||||
assert(config.quality >= 0 && config.quality <= 100.f);
|
assert(config.quality >= 0 && config.quality <= 100.f);
|
||||||
|
|
||||||
ok = VP8LBitWriterInit(&tmp_bw, (width * height) >> 3);
|
ok = (VP8LEncodeStream(&config, &picture, bw) == VP8_ENC_OK);
|
||||||
ok = ok && (VP8LEncodeStream(&config, &picture, &tmp_bw) == VP8_ENC_OK);
|
|
||||||
WebPPictureFree(&picture);
|
WebPPictureFree(&picture);
|
||||||
if (ok) {
|
ok = ok && !bw->error_;
|
||||||
const uint8_t* const buffer = VP8LBitWriterFinish(&tmp_bw);
|
if (!ok) {
|
||||||
const size_t buffer_size = VP8LBitWriterNumBytes(&tmp_bw);
|
VP8LBitWriterDestroy(bw);
|
||||||
VP8BitWriterAppend(bw, buffer, buffer_size);
|
return 0;
|
||||||
}
|
}
|
||||||
VP8LBitWriterDestroy(&tmp_bw);
|
return 1;
|
||||||
return ok && !bw->error_;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// -----------------------------------------------------------------------------
|
// -----------------------------------------------------------------------------
|
||||||
@ -115,8 +113,10 @@ static int EncodeAlphaInternal(const uint8_t* const data, int width, int height,
|
|||||||
const uint8_t* alpha_src;
|
const uint8_t* alpha_src;
|
||||||
WebPFilterFunc filter_func;
|
WebPFilterFunc filter_func;
|
||||||
uint8_t header;
|
uint8_t header;
|
||||||
size_t expected_size;
|
|
||||||
const size_t data_size = width * height;
|
const size_t data_size = width * height;
|
||||||
|
const uint8_t* output = NULL;
|
||||||
|
size_t output_size = 0;
|
||||||
|
VP8LBitWriter tmp_bw;
|
||||||
|
|
||||||
assert((uint64_t)data_size == (uint64_t)width * height); // as per spec
|
assert((uint64_t)data_size == (uint64_t)width * height); // as per spec
|
||||||
assert(filter >= 0 && filter < WEBP_FILTER_LAST);
|
assert(filter >= 0 && filter < WEBP_FILTER_LAST);
|
||||||
@ -125,15 +125,6 @@ static int EncodeAlphaInternal(const uint8_t* const data, int width, int height,
|
|||||||
assert(sizeof(header) == ALPHA_HEADER_LEN);
|
assert(sizeof(header) == ALPHA_HEADER_LEN);
|
||||||
// TODO(skal): have a common function and #define's to validate alpha params.
|
// TODO(skal): have a common function and #define's to validate alpha params.
|
||||||
|
|
||||||
expected_size =
|
|
||||||
(method == ALPHA_NO_COMPRESSION) ? (ALPHA_HEADER_LEN + data_size)
|
|
||||||
: (data_size >> 5);
|
|
||||||
header = method | (filter << 2);
|
|
||||||
if (reduce_levels) header |= ALPHA_PREPROCESSED_LEVELS << 4;
|
|
||||||
|
|
||||||
VP8BitWriterInit(&result->bw, expected_size);
|
|
||||||
VP8BitWriterAppend(&result->bw, &header, ALPHA_HEADER_LEN);
|
|
||||||
|
|
||||||
filter_func = WebPFilters[filter];
|
filter_func = WebPFilters[filter];
|
||||||
if (filter_func != NULL) {
|
if (filter_func != NULL) {
|
||||||
filter_func(data, width, height, width, tmp_alpha);
|
filter_func(data, width, height, width, tmp_alpha);
|
||||||
@ -142,14 +133,42 @@ static int EncodeAlphaInternal(const uint8_t* const data, int width, int height,
|
|||||||
alpha_src = data;
|
alpha_src = data;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (method == ALPHA_NO_COMPRESSION) {
|
if (method != ALPHA_NO_COMPRESSION) {
|
||||||
ok = VP8BitWriterAppend(&result->bw, alpha_src, width * height);
|
ok = VP8LBitWriterInit(&tmp_bw, data_size >> 3);
|
||||||
ok = ok && !result->bw.error_;
|
ok = ok && EncodeLossless(alpha_src, width, height, effort_level,
|
||||||
} else {
|
&tmp_bw, &result->stats);
|
||||||
ok = EncodeLossless(alpha_src, width, height, effort_level,
|
if (ok) {
|
||||||
&result->bw, &result->stats);
|
output = VP8LBitWriterFinish(&tmp_bw);
|
||||||
VP8BitWriterFinish(&result->bw);
|
output_size = VP8LBitWriterNumBytes(&tmp_bw);
|
||||||
|
if (output_size > data_size) {
|
||||||
|
// compressed size is larger than source! Revert to uncompressed mode.
|
||||||
|
method = ALPHA_NO_COMPRESSION;
|
||||||
|
VP8LBitWriterDestroy(&tmp_bw);
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
VP8LBitWriterDestroy(&tmp_bw);
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (method == ALPHA_NO_COMPRESSION) {
|
||||||
|
output = alpha_src;
|
||||||
|
output_size = data_size;
|
||||||
|
ok = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Emit final result.
|
||||||
|
header = method | (filter << 2);
|
||||||
|
if (reduce_levels) header |= ALPHA_PREPROCESSED_LEVELS << 4;
|
||||||
|
|
||||||
|
VP8BitWriterInit(&result->bw, ALPHA_HEADER_LEN + output_size);
|
||||||
|
ok = ok && VP8BitWriterAppend(&result->bw, &header, ALPHA_HEADER_LEN);
|
||||||
|
ok = ok && VP8BitWriterAppend(&result->bw, output, output_size);
|
||||||
|
|
||||||
|
if (method != ALPHA_NO_COMPRESSION) {
|
||||||
|
VP8LBitWriterDestroy(&tmp_bw);
|
||||||
|
}
|
||||||
|
ok = ok && !result->bw.error_;
|
||||||
result->score = VP8BitWriterSize(&result->bw);
|
result->score = VP8BitWriterSize(&result->bw);
|
||||||
return ok;
|
return ok;
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user