diff --git a/src/dec/vp8l_dec.c b/src/dec/vp8l_dec.c index 56246dea..85db9f83 100644 --- a/src/dec/vp8l_dec.c +++ b/src/dec/vp8l_dec.c @@ -253,11 +253,11 @@ static int ReadHuffmanCodeLengths( int symbol; int max_symbol; int prev_code_len = DEFAULT_CODE_LENGTH; - HuffmanCode table[1 << LENGTHS_TABLE_BITS]; + HuffmanTables tables; - if (!VP8LBuildHuffmanTable(table, LENGTHS_TABLE_BITS, - code_length_code_lengths, - NUM_CODE_LENGTH_CODES)) { + if (!VP8LHuffmanTablesAllocate(1 << LENGTHS_TABLE_BITS, &tables) || + !VP8LBuildHuffmanTable(&tables, LENGTHS_TABLE_BITS, + code_length_code_lengths, NUM_CODE_LENGTH_CODES)) { goto End; } @@ -277,7 +277,7 @@ static int ReadHuffmanCodeLengths( int code_len; if (max_symbol-- == 0) break; VP8LFillBitWindow(br); - p = &table[VP8LPrefetchBits(br) & LENGTHS_TABLE_MASK]; + p = &tables.curr_segment->start[VP8LPrefetchBits(br) & LENGTHS_TABLE_MASK]; VP8LSetBitPos(br, br->bit_pos_ + p->bits); code_len = p->value; if (code_len < kCodeLengthLiterals) { @@ -300,6 +300,7 @@ static int ReadHuffmanCodeLengths( ok = 1; End: + VP8LHuffmanTablesDeallocate(&tables); if (!ok) dec->status_ = VP8_STATUS_BITSTREAM_ERROR; return ok; } @@ -307,7 +308,8 @@ static int ReadHuffmanCodeLengths( // 'code_lengths' is pre-allocated temporary buffer, used for creating Huffman // tree. static int ReadHuffmanCode(int alphabet_size, VP8LDecoder* const dec, - int* const code_lengths, HuffmanCode* const table) { + int* const code_lengths, + HuffmanTables* const table) { int ok = 0; int size = 0; VP8LBitReader* const br = &dec->br_; @@ -362,8 +364,7 @@ static int ReadHuffmanCodes(VP8LDecoder* const dec, int xsize, int ysize, VP8LMetadata* const hdr = &dec->hdr_; uint32_t* huffman_image = NULL; HTreeGroup* htree_groups = NULL; - HuffmanCode* huffman_tables = NULL; - HuffmanCode* huffman_table = NULL; + HuffmanTables* huffman_tables = &hdr->huffman_tables_; int num_htree_groups = 1; int num_htree_groups_max = 1; int max_alphabet_size = 0; @@ -372,6 +373,10 @@ static int ReadHuffmanCodes(VP8LDecoder* const dec, int xsize, int ysize, int* mapping = NULL; int ok = 0; + // Check the table has been 0 initialized (through InitMetadata). + assert(huffman_tables->root.start == NULL); + assert(huffman_tables->curr_segment == NULL); + if (allow_recursion && VP8LReadBits(br, 1)) { // use meta Huffman codes. const int huffman_precision = VP8LReadBits(br, 3) + 2; @@ -432,18 +437,17 @@ static int ReadHuffmanCodes(VP8LDecoder* const dec, int xsize, int ysize, } } - huffman_tables = (HuffmanCode*)WebPSafeMalloc(num_htree_groups * table_size, - sizeof(*huffman_tables)); htree_groups = VP8LHtreeGroupsNew(num_htree_groups); code_lengths = (int*)WebPSafeCalloc((uint64_t)max_alphabet_size, sizeof(*code_lengths)); - if (htree_groups == NULL || code_lengths == NULL || huffman_tables == NULL) { + if (htree_groups == NULL || code_lengths == NULL || + !VP8LHuffmanTablesAllocate(num_htree_groups * table_size, + huffman_tables)) { dec->status_ = VP8_STATUS_OUT_OF_MEMORY; goto Error; } - huffman_table = huffman_tables; for (i = 0; i < num_htree_groups_max; ++i) { // If the index "i" is unused in the Huffman image, just make sure the // coefficients are valid but do not store them. @@ -468,19 +472,20 @@ static int ReadHuffmanCodes(VP8LDecoder* const dec, int xsize, int ysize, int max_bits = 0; for (j = 0; j < HUFFMAN_CODES_PER_META_CODE; ++j) { int alphabet_size = kAlphabetSize[j]; - htrees[j] = huffman_table; if (j == 0 && color_cache_bits > 0) { alphabet_size += (1 << color_cache_bits); } - size = ReadHuffmanCode(alphabet_size, dec, code_lengths, huffman_table); + size = + ReadHuffmanCode(alphabet_size, dec, code_lengths, huffman_tables); + htrees[j] = huffman_tables->curr_segment->curr_table; if (size == 0) { goto Error; } if (is_trivial_literal && kLiteralMap[j] == 1) { - is_trivial_literal = (huffman_table->bits == 0); + is_trivial_literal = (htrees[j]->bits == 0); } - total_size += huffman_table->bits; - huffman_table += size; + total_size += htrees[j]->bits; + huffman_tables->curr_segment->curr_table += size; if (j <= ALPHA) { int local_max_bits = code_lengths[0]; int k; @@ -515,14 +520,13 @@ static int ReadHuffmanCodes(VP8LDecoder* const dec, int xsize, int ysize, hdr->huffman_image_ = huffman_image; hdr->num_htree_groups_ = num_htree_groups; hdr->htree_groups_ = htree_groups; - hdr->huffman_tables_ = huffman_tables; Error: WebPSafeFree(code_lengths); WebPSafeFree(mapping); if (!ok) { WebPSafeFree(huffman_image); - WebPSafeFree(huffman_tables); + VP8LHuffmanTablesDeallocate(huffman_tables); VP8LHtreeGroupsFree(htree_groups); } return ok; @@ -1354,7 +1358,7 @@ static void ClearMetadata(VP8LMetadata* const hdr) { assert(hdr != NULL); WebPSafeFree(hdr->huffman_image_); - WebPSafeFree(hdr->huffman_tables_); + VP8LHuffmanTablesDeallocate(&hdr->huffman_tables_); VP8LHtreeGroupsFree(hdr->htree_groups_); VP8LColorCacheClear(&hdr->color_cache_); VP8LColorCacheClear(&hdr->saved_color_cache_); @@ -1670,7 +1674,7 @@ int VP8LDecodeImage(VP8LDecoder* const dec) { // Sanity checks. if (dec == NULL) return 0; - assert(dec->hdr_.huffman_tables_ != NULL); + assert(dec->hdr_.huffman_tables_.root.start != NULL); assert(dec->hdr_.htree_groups_ != NULL); assert(dec->hdr_.num_htree_groups_ > 0); diff --git a/src/dec/vp8li_dec.h b/src/dec/vp8li_dec.h index 8e500cf9..cb4d8362 100644 --- a/src/dec/vp8li_dec.h +++ b/src/dec/vp8li_dec.h @@ -51,7 +51,7 @@ typedef struct { uint32_t *huffman_image_; int num_htree_groups_; HTreeGroup *htree_groups_; - HuffmanCode *huffman_tables_; + HuffmanTables huffman_tables_; } VP8LMetadata; typedef struct VP8LDecoder VP8LDecoder; diff --git a/src/utils/huffman_utils.c b/src/utils/huffman_utils.c index 7a69963c..9efd6283 100644 --- a/src/utils/huffman_utils.c +++ b/src/utils/huffman_utils.c @@ -91,7 +91,8 @@ static int BuildHuffmanTable(HuffmanCode* const root_table, int root_bits, assert(code_lengths_size != 0); assert(code_lengths != NULL); - assert(root_table != NULL); + assert((root_table != NULL && sorted != NULL) || + (root_table == NULL && sorted == NULL)); assert(root_bits > 0); // Build histogram of code lengths. @@ -120,16 +121,22 @@ static int BuildHuffmanTable(HuffmanCode* const root_table, int root_bits, for (symbol = 0; symbol < code_lengths_size; ++symbol) { const int symbol_code_length = code_lengths[symbol]; if (code_lengths[symbol] > 0) { - sorted[offset[symbol_code_length]++] = symbol; + if (sorted != NULL) { + sorted[offset[symbol_code_length]++] = symbol; + } else { + offset[symbol_code_length]++; + } } } // Special case code with only one value. if (offset[MAX_ALLOWED_CODE_LENGTH] == 1) { - HuffmanCode code; - code.bits = 0; - code.value = (uint16_t)sorted[0]; - ReplicateValue(table, 1, total_size, code); + if (sorted != NULL) { + HuffmanCode code; + code.bits = 0; + code.value = (uint16_t)sorted[0]; + ReplicateValue(table, 1, total_size, code); + } return total_size; } @@ -151,6 +158,7 @@ static int BuildHuffmanTable(HuffmanCode* const root_table, int root_bits, if (num_open < 0) { return 0; } + if (root_table == NULL) continue; for (; count[len] > 0; --count[len]) { HuffmanCode code; code.bits = (uint8_t)len; @@ -172,17 +180,21 @@ static int BuildHuffmanTable(HuffmanCode* const root_table, int root_bits, for (; count[len] > 0; --count[len]) { HuffmanCode code; if ((key & mask) != low) { - table += table_size; + if (root_table != NULL) table += table_size; table_bits = NextTableBitSize(count, len, root_bits); table_size = 1 << table_bits; total_size += table_size; low = key & mask; - root_table[low].bits = (uint8_t)(table_bits + root_bits); - root_table[low].value = (uint16_t)((table - root_table) - low); + if (root_table != NULL) { + root_table[low].bits = (uint8_t)(table_bits + root_bits); + root_table[low].value = (uint16_t)((table - root_table) - low); + } + } + if (root_table != NULL) { + code.bits = (uint8_t)(len - root_bits); + code.value = (uint16_t)sorted[symbol++]; + ReplicateValue(&table[key >> root_bits], step, table_size, code); } - code.bits = (uint8_t)(len - root_bits); - code.value = (uint16_t)sorted[symbol++]; - ReplicateValue(&table[key >> root_bits], step, table_size, code); key = GetNextKey(key, len); } } @@ -202,22 +214,83 @@ static int BuildHuffmanTable(HuffmanCode* const root_table, int root_bits, ((1 << MAX_CACHE_BITS) + NUM_LITERAL_CODES + NUM_LENGTH_CODES) // Cut-off value for switching between heap and stack allocation. #define SORTED_SIZE_CUTOFF 512 -int VP8LBuildHuffmanTable(HuffmanCode* const root_table, int root_bits, +int VP8LBuildHuffmanTable(HuffmanTables* const root_table, int root_bits, const int code_lengths[], int code_lengths_size) { - int total_size; + const int total_size = + BuildHuffmanTable(NULL, root_bits, code_lengths, code_lengths_size, NULL); assert(code_lengths_size <= MAX_CODE_LENGTHS_SIZE); + if (total_size == 0 || root_table == NULL) return total_size; + + if (root_table->curr_segment->curr_table + total_size >= + root_table->curr_segment->start + root_table->curr_segment->size) { + // If 'root_table' does not have enough memory, allocate a new segment. + // The available part of root_table->curr_segment is left unused because we + // need a contiguous buffer. + const int segment_size = root_table->curr_segment->size; + struct HuffmanTablesSegment* next = + (HuffmanTablesSegment*)WebPSafeMalloc(1, sizeof(*next)); + if (next == NULL) return 0; + // Fill the new segment. + // We need at least 'total_size' but if that value is small, it is better to + // allocate a big chunk to prevent more allocations later. 'segment_size' is + // therefore chosen (any other arbitrary value could be chosen). + next->size = total_size > segment_size ? total_size : segment_size; + next->start = + (HuffmanCode*)WebPSafeMalloc(next->size, sizeof(*next->start)); + if (next->start == NULL) { + WebPSafeFree(next); + return 0; + } + next->curr_table = next->start; + next->next = NULL; + // Point to the new segment. + root_table->curr_segment->next = next; + root_table->curr_segment = next; + } if (code_lengths_size <= SORTED_SIZE_CUTOFF) { // use local stack-allocated array. uint16_t sorted[SORTED_SIZE_CUTOFF]; - total_size = BuildHuffmanTable(root_table, root_bits, - code_lengths, code_lengths_size, sorted); - } else { // rare case. Use heap allocation. + BuildHuffmanTable(root_table->curr_segment->curr_table, root_bits, + code_lengths, code_lengths_size, sorted); + } else { // rare case. Use heap allocation. uint16_t* const sorted = (uint16_t*)WebPSafeMalloc(code_lengths_size, sizeof(*sorted)); if (sorted == NULL) return 0; - total_size = BuildHuffmanTable(root_table, root_bits, - code_lengths, code_lengths_size, sorted); + BuildHuffmanTable(root_table->curr_segment->curr_table, root_bits, + code_lengths, code_lengths_size, sorted); WebPSafeFree(sorted); } return total_size; } + +int VP8LHuffmanTablesAllocate(int size, HuffmanTables* huffman_tables) { + // Have 'segment' point to the first segment for now, 'root'. + HuffmanTablesSegment* const root = &huffman_tables->root; + huffman_tables->curr_segment = root; + // Allocate root. + root->start = (HuffmanCode*)WebPSafeMalloc(size, sizeof(*root->start)); + if (root->start == NULL) return 0; + root->curr_table = root->start; + root->next = NULL; + root->size = size; + return 1; +} + +void VP8LHuffmanTablesDeallocate(HuffmanTables* const huffman_tables) { + HuffmanTablesSegment *current, *next; + if (huffman_tables == NULL) return; + // Free the root node. + current = &huffman_tables->root; + next = current->next; + WebPSafeFree(current->start); + current->start = NULL; + current->next = NULL; + current = next; + // Free the following nodes. + while (current != NULL) { + next = current->next; + WebPSafeFree(current->start); + WebPSafeFree(current); + current = next; + } +} diff --git a/src/utils/huffman_utils.h b/src/utils/huffman_utils.h index ff7ef17f..98415c53 100644 --- a/src/utils/huffman_utils.h +++ b/src/utils/huffman_utils.h @@ -43,6 +43,29 @@ typedef struct { // or non-literal symbol otherwise } HuffmanCode32; +// Contiguous memory segment of HuffmanCodes. +typedef struct HuffmanTablesSegment { + HuffmanCode* start; + // Pointer to where we are writing into the segment. Starts at 'start' and + // cannot go beyond 'start' + 'size'. + HuffmanCode* curr_table; + // Pointer to the next segment in the chain. + struct HuffmanTablesSegment* next; + int size; +} HuffmanTablesSegment; + +// Chained memory segments of HuffmanCodes. +typedef struct HuffmanTables { + HuffmanTablesSegment root; + // Currently processed segment. At first, this is 'root'. + HuffmanTablesSegment* curr_segment; +} HuffmanTables; + +// Allocates a HuffmanTables with 'size' contiguous HuffmanCodes. Returns 0 on +// memory allocation error, 1 otherwise. +int VP8LHuffmanTablesAllocate(int size, HuffmanTables* huffman_tables); +void VP8LHuffmanTablesDeallocate(HuffmanTables* const huffman_tables); + #define HUFFMAN_PACKED_BITS 6 #define HUFFMAN_PACKED_TABLE_SIZE (1u << HUFFMAN_PACKED_BITS) @@ -78,7 +101,7 @@ void VP8LHtreeGroupsFree(HTreeGroup* const htree_groups); // the huffman table. // Returns built table size or 0 in case of error (invalid tree or // memory error). -int VP8LBuildHuffmanTable(HuffmanCode* const root_table, int root_bits, +int VP8LBuildHuffmanTable(HuffmanTables* const root_table, int root_bits, const int code_lengths[], int code_lengths_size); #ifdef __cplusplus