Merge "Better handling of bogus Huffman codes."

This commit is contained in:
Vincent Rabaud 2018-07-03 08:33:35 +00:00 committed by Gerrit Code Review
commit 2281bbf6f7

View File

@ -359,17 +359,22 @@ static int ReadHuffmanCodes(VP8LDecoder* const dec, int xsize, int ysize,
int color_cache_bits, int allow_recursion) { int color_cache_bits, int allow_recursion) {
int i, j; int i, j;
VP8LBitReader* const br = &dec->br_; VP8LBitReader* const br = &dec->br_;
VP8LBitReader br_tmp;
VP8LMetadata* const hdr = &dec->hdr_; VP8LMetadata* const hdr = &dec->hdr_;
uint32_t* huffman_image = NULL; uint32_t* huffman_image = NULL;
HTreeGroup* htree_groups = NULL; HTreeGroup* htree_groups = NULL;
// When reading htrees, some might be unused, as the format allows it.
// We will still read them but put them in this htree_group_bogus.
HTreeGroup htree_group_bogus;
HuffmanCode* huffman_tables = NULL; HuffmanCode* huffman_tables = NULL;
HuffmanCode* huffman_tables_bogus = NULL;
HuffmanCode* next = NULL; HuffmanCode* next = NULL;
int num_htree_groups = 1; int num_htree_groups = 1;
int num_htree_groups_limit = 1; int num_htree_groups_max = 1;
int max_alphabet_size = 0; int max_alphabet_size = 0;
int* code_lengths = NULL; int* code_lengths = NULL;
const int table_size = kTableSize[color_cache_bits]; const int table_size = kTableSize[color_cache_bits];
int* mapping = NULL;
int ok = 0;
if (allow_recursion && VP8LReadBits(br, 1)) { if (allow_recursion && VP8LReadBits(br, 1)) {
// use meta Huffman codes. // use meta Huffman codes.
@ -386,21 +391,41 @@ static int ReadHuffmanCodes(VP8LDecoder* const dec, int xsize, int ysize,
// The huffman data is stored in red and green bytes. // The huffman data is stored in red and green bytes.
const int group = (huffman_image[i] >> 8) & 0xffff; const int group = (huffman_image[i] >> 8) & 0xffff;
huffman_image[i] = group; huffman_image[i] = group;
if (group >= num_htree_groups) { if (group >= num_htree_groups_max) {
num_htree_groups = group + 1; num_htree_groups_max = group + 1;
} }
} }
// Check the validity of num_htree_groups. If it seems too big, use a // Check the validity of num_htree_groups_max. If it seems too big, use a
// smaller value for later. This will prevent big memory allocations to end // smaller value for later. This will prevent big memory allocations to end
// up with a bad bitstream anyway. // up with a bad bitstream anyway.
// The value of 1000 is totally arbitrary. We know that num_htree_groups // The value of 1000 is totally arbitrary. We know that num_htree_groups_max
// is smaller than (1 << 16) and should be smaller than the number of pixels // is smaller than (1 << 16) and should be smaller than the number of pixels
// (though the format allows it to be bigger). // (though the format allows it to be bigger).
if (num_htree_groups > 1000 || num_htree_groups > xsize * ysize) { if (num_htree_groups_max > 1000 || num_htree_groups_max > xsize * ysize) {
num_htree_groups_limit = (xsize * ysize > 1000) ? 1000 : xsize * ysize; // Create a mapping from the used indices to the minimal set of used
br_tmp = dec->br_; // values [0, num_htree_groups)
mapping = (int*)WebPSafeMalloc(num_htree_groups_max, sizeof(*mapping));
if (mapping == NULL) {
dec->status_ = VP8_STATUS_OUT_OF_MEMORY;
goto Error;
}
// -1 means a value is unmapped, and therefore unused in the Huffman
// image.
memset(mapping, 0xff, num_htree_groups_max * sizeof(*mapping));
for (num_htree_groups = 0, i = 0; i < huffman_pixs; ++i) {
// Get the current mapping for the group and remap the Huffman image.
int* const mapped_group = &mapping[huffman_image[i]];
if (*mapped_group == -1) *mapped_group = num_htree_groups++;
huffman_image[i] = *mapped_group;
}
huffman_tables_bogus = (HuffmanCode*)WebPSafeMalloc(
table_size, sizeof(*huffman_tables_bogus));
if (huffman_tables_bogus == NULL) {
dec->status_ = VP8_STATUS_OUT_OF_MEMORY;
goto Error;
}
} else { } else {
num_htree_groups_limit = num_htree_groups; num_htree_groups = num_htree_groups_max;
} }
} }
@ -419,99 +444,91 @@ static int ReadHuffmanCodes(VP8LDecoder* const dec, int xsize, int ysize,
code_lengths = (int*)WebPSafeCalloc((uint64_t)max_alphabet_size, code_lengths = (int*)WebPSafeCalloc((uint64_t)max_alphabet_size,
sizeof(*code_lengths)); sizeof(*code_lengths));
// If num_htree_groups_tmp == num_htree_groups, the following loop is executed huffman_tables = (HuffmanCode*)WebPSafeMalloc(num_htree_groups * table_size,
// once. sizeof(*huffman_tables));
// If num_htree_groups_tmp != num_htree_groups, we execute the loop the first htree_groups = VP8LHtreeGroupsNew(num_htree_groups);
// time with little memory allocation in the hope that there is a bitstream
// error. If after num_htree_groups_tmp iterations, no error appears,
// num_htree_groups is probably the right value so try it out.
do {
huffman_tables = (HuffmanCode*)WebPSafeMalloc(
num_htree_groups_limit * table_size, sizeof(*huffman_tables));
htree_groups = VP8LHtreeGroupsNew(num_htree_groups_limit);
if (htree_groups == NULL || code_lengths == NULL || if (htree_groups == NULL || code_lengths == NULL || huffman_tables == NULL) {
huffman_tables == NULL) { dec->status_ = VP8_STATUS_OUT_OF_MEMORY;
dec->status_ = VP8_STATUS_OUT_OF_MEMORY; goto Error;
goto Error; }
}
next = huffman_tables; next = huffman_tables;
for (i = 0; i < num_htree_groups_limit; ++i) { for (i = 0; i < num_htree_groups_max; ++i) {
HTreeGroup* const htree_group = &htree_groups[i]; // If the index "i" is unused in the Huffman image, read the coefficients
HuffmanCode** const htrees = htree_group->htrees; // but store them to a bogus htree_group.
int size; const int is_bogus = (mapping != NULL && mapping[i] == -1);
int total_size = 0; HTreeGroup* const htree_group =
int is_trivial_literal = 1; is_bogus ? &htree_group_bogus :
int max_bits = 0; &htree_groups[(mapping == NULL) ? i : mapping[i]];
for (j = 0; j < HUFFMAN_CODES_PER_META_CODE; ++j) { HuffmanCode** const htrees = htree_group->htrees;
int alphabet_size = kAlphabetSize[j]; HuffmanCode* huffman_tables_i = is_bogus ? huffman_tables_bogus : next;
htrees[j] = next; int size;
if (j == 0 && color_cache_bits > 0) { int total_size = 0;
alphabet_size += 1 << color_cache_bits; int is_trivial_literal = 1;
} int max_bits = 0;
size = ReadHuffmanCode(alphabet_size, dec, code_lengths, next); for (j = 0; j < HUFFMAN_CODES_PER_META_CODE; ++j) {
if (size == 0) { int alphabet_size = kAlphabetSize[j];
goto Error; htrees[j] = huffman_tables_i;
} if (j == 0 && color_cache_bits > 0) {
if (is_trivial_literal && kLiteralMap[j] == 1) { alphabet_size += 1 << color_cache_bits;
is_trivial_literal = (next->bits == 0); }
} size =
total_size += next->bits; ReadHuffmanCode(alphabet_size, dec, code_lengths, huffman_tables_i);
next += size; if (size == 0) {
if (j <= ALPHA) { goto Error;
int local_max_bits = code_lengths[0]; }
int k; if (is_trivial_literal && kLiteralMap[j] == 1) {
for (k = 1; k < alphabet_size; ++k) { is_trivial_literal = (huffman_tables_i->bits == 0);
if (code_lengths[k] > local_max_bits) { }
local_max_bits = code_lengths[k]; total_size += huffman_tables_i->bits;
} huffman_tables_i += size;
if (j <= ALPHA) {
int local_max_bits = code_lengths[0];
int k;
for (k = 1; k < alphabet_size; ++k) {
if (code_lengths[k] > local_max_bits) {
local_max_bits = code_lengths[k];
} }
max_bits += local_max_bits;
} }
max_bits += local_max_bits;
} }
htree_group->is_trivial_literal = is_trivial_literal; }
htree_group->is_trivial_code = 0; if (!is_bogus) next = huffman_tables_i;
if (is_trivial_literal) { htree_group->is_trivial_literal = is_trivial_literal;
const int red = htrees[RED][0].value; htree_group->is_trivial_code = 0;
const int blue = htrees[BLUE][0].value; if (is_trivial_literal) {
const int alpha = htrees[ALPHA][0].value; const int red = htrees[RED][0].value;
htree_group->literal_arb = ((uint32_t)alpha << 24) | (red << 16) | blue; const int blue = htrees[BLUE][0].value;
if (total_size == 0 && htrees[GREEN][0].value < NUM_LITERAL_CODES) { const int alpha = htrees[ALPHA][0].value;
htree_group->is_trivial_code = 1; htree_group->literal_arb = ((uint32_t)alpha << 24) | (red << 16) | blue;
htree_group->literal_arb |= htrees[GREEN][0].value << 8; if (total_size == 0 && htrees[GREEN][0].value < NUM_LITERAL_CODES) {
} htree_group->is_trivial_code = 1;
htree_group->literal_arb |= htrees[GREEN][0].value << 8;
} }
htree_group->use_packed_table =
!htree_group->is_trivial_code && (max_bits < HUFFMAN_PACKED_BITS);
if (htree_group->use_packed_table) BuildPackedTable(htree_group);
} }
// If we have survived up to here, num_htree_groups might actually be htree_group->use_packed_table =
// that big so restart with a proper allocation. !htree_group->is_trivial_code && (max_bits < HUFFMAN_PACKED_BITS);
if (num_htree_groups != num_htree_groups_limit) { if (htree_group->use_packed_table) BuildPackedTable(htree_group);
num_htree_groups_limit = num_htree_groups; }
WebPSafeFree(huffman_tables); ok = 1;
VP8LHtreeGroupsFree(htree_groups);
huffman_tables = NULL;
htree_groups = NULL;
dec->br_ = br_tmp;
}
} while (i != num_htree_groups);
WebPSafeFree(code_lengths);
// All OK. Finalize pointers and return. // All OK. Finalize pointers.
hdr->huffman_image_ = huffman_image; hdr->huffman_image_ = huffman_image;
hdr->num_htree_groups_ = num_htree_groups; hdr->num_htree_groups_ = num_htree_groups;
hdr->htree_groups_ = htree_groups; hdr->htree_groups_ = htree_groups;
hdr->huffman_tables_ = huffman_tables; hdr->huffman_tables_ = huffman_tables;
return 1;
Error: Error:
WebPSafeFree(code_lengths); WebPSafeFree(code_lengths);
WebPSafeFree(huffman_image); WebPSafeFree(huffman_tables_bogus);
WebPSafeFree(huffman_tables); WebPSafeFree(mapping);
VP8LHtreeGroupsFree(htree_groups); if (!ok) {
return 0; WebPSafeFree(huffman_image);
WebPSafeFree(huffman_tables);
VP8LHtreeGroupsFree(htree_groups);
}
return ok;
} }
//------------------------------------------------------------------------------ //------------------------------------------------------------------------------