Compare commits

...

8 Commits
main ... 1.1.0

Author SHA1 Message Date
50f60add5c Fix OOB write in BuildHuffmanTable.
First, BuildHuffmanTable is called to check if the data is valid.
If it is and the table is not big enough, more memory is allocated.

This will make sure that valid (but unoptimized because of unbalanced
codes) streams are still decodable.

Bug: chromium:1479274
Change-Id: I31c36dbf3aa78d35ecf38706b50464fd3d375741
(cherry picked from commit 902bc91903)
(cherry picked from commit 2af26267cd)
2023-09-07 15:40:16 -07:00
5df85e9c0b EncodeAlphaInternal: clear result->bw on error
This avoids a double free should the function fail prior to
VP8BitWriterInit() and a previous trial result's buffer carried over.
Previously in ApplyFiltersAndEncode() trial.bw (with a previous
iteration's buffer) would be freed, followed by best.bw pointing to the
same buffer.

Since:
187d379d add a fallback to ALPHA_NO_COMPRESSION

In addition, check the return value of VP8BitWriterInit() in this
function.

Bug: webp:603
Change-Id: Ic258381ee26c8c16bc211d157c8153831c8c6910
(cherry picked from commit a486d800b6)
2023-02-28 00:26:34 +00:00
89e226a3c7 GetBackwardReferences: fail on alloc error
previously failures in the call to
VP8LBackwardReferencesTraceBackwards() would be ignored which, though it
wouldn't result in a crash, would produce non-deterministic output

Change-Id: Id9890a60883c3270ec75e968506d46eea32b76d4
(cherry picked from commit e3cfafaf71)
(cherry picked from commit 20ef03ee35)
2022-04-06 21:55:27 -07:00
4d0964cd0c BackwardReferencesHashChainDistanceOnly: fix segfault on OOM
change CostManager to calloc to avoid frees on undefined pointer
values in CostManagerClear() should the cost_model allocation succeed,
but the cost_manager allocation fail

since:
v0.5.0-93-g3e023c17 Speed-up BackwardReferencesHashChainDistanceOnly.

Tested:
for i in `seq 1 639`; do
  export MALLOC_FAIL_AT=$i
  ./examples/cwebp -m 6 -q 100 -lossless jpeg_file
done

Bug: webp:565
Change-Id: I376d81e6f41eb73529053e9e30c142b4b4f6b45b
(cherry picked from commit a828a59b49)
(cherry picked from commit dd80bb4343)
2022-04-06 21:50:21 -07:00
5d805f7205 VP8LEncodeStream: fix segfault on OOM
initialize bw_side before calling EncoderAnalyze() & EncoderInit() which
may fail; previously this would cause a free of an invalid pointer in
VP8LBitWriterWipeOut().

since at least:
v0.6.0-120-gf8c2ac15 Multi-thread the lossless cruncher.

Tested:
for i in `seq 1 639`; do
  export MALLOC_FAIL_AT=$i
  ./examples/cwebp -m 6 -q 100 -lossless jpeg_file
done

Bug: webp:565
Change-Id: I1c95883834b6e4b13aee890568ce3bad0f4266f0
(cherry picked from commit fe153fae98)
(cherry picked from commit ddd65f0d19)
2022-04-06 21:50:21 -07:00
b14eba6497 alpha_processing_neon.c: fix 0x01... typo
one instance was overlong leading to a int64->uint32 conversion warning

Change-Id: I56d5ab75d89960c79293f62cd489d7ab519bbc34
(cherry picked from commit 03d1219055)
2022-03-08 19:37:04 +00:00
9183ff2ef9 alpha_processing_neon.c: fix Dispatch/ExtractAlpha_NEON
the trailing width % 8 bytes would clear the upper bytes of
alpha_mask as they're done one at a time

since:
49d0280d NEON: implement several alpha-processing functions

Change-Id: Iff76c0af3094597285a6aa6ed032b345f9856aae
(cherry picked from commit 924e7ca654)
2022-03-03 18:06:24 +00:00
68d52453cd Fix lossless encoding for MIPS.
Bug: webp:558
Change-Id: I3d3ddb64ed26a8d8ff5664664c5f20f6eadfeb4f
(cherry picked from commit e4cbcdd2b5)
2022-03-02 02:56:13 +00:00
10 changed files with 156 additions and 60 deletions

View File

@ -253,11 +253,11 @@ static int ReadHuffmanCodeLengths(
int symbol;
int max_symbol;
int prev_code_len = DEFAULT_CODE_LENGTH;
HuffmanCode table[1 << LENGTHS_TABLE_BITS];
HuffmanTables tables;
if (!VP8LBuildHuffmanTable(table, LENGTHS_TABLE_BITS,
code_length_code_lengths,
NUM_CODE_LENGTH_CODES)) {
if (!VP8LHuffmanTablesAllocate(1 << LENGTHS_TABLE_BITS, &tables) ||
!VP8LBuildHuffmanTable(&tables, LENGTHS_TABLE_BITS,
code_length_code_lengths, NUM_CODE_LENGTH_CODES)) {
goto End;
}
@ -277,7 +277,7 @@ static int ReadHuffmanCodeLengths(
int code_len;
if (max_symbol-- == 0) break;
VP8LFillBitWindow(br);
p = &table[VP8LPrefetchBits(br) & LENGTHS_TABLE_MASK];
p = &tables.curr_segment->start[VP8LPrefetchBits(br) & LENGTHS_TABLE_MASK];
VP8LSetBitPos(br, br->bit_pos_ + p->bits);
code_len = p->value;
if (code_len < kCodeLengthLiterals) {
@ -300,6 +300,7 @@ static int ReadHuffmanCodeLengths(
ok = 1;
End:
VP8LHuffmanTablesDeallocate(&tables);
if (!ok) dec->status_ = VP8_STATUS_BITSTREAM_ERROR;
return ok;
}
@ -307,7 +308,8 @@ static int ReadHuffmanCodeLengths(
// 'code_lengths' is pre-allocated temporary buffer, used for creating Huffman
// tree.
static int ReadHuffmanCode(int alphabet_size, VP8LDecoder* const dec,
int* const code_lengths, HuffmanCode* const table) {
int* const code_lengths,
HuffmanTables* const table) {
int ok = 0;
int size = 0;
VP8LBitReader* const br = &dec->br_;
@ -362,8 +364,7 @@ static int ReadHuffmanCodes(VP8LDecoder* const dec, int xsize, int ysize,
VP8LMetadata* const hdr = &dec->hdr_;
uint32_t* huffman_image = NULL;
HTreeGroup* htree_groups = NULL;
HuffmanCode* huffman_tables = NULL;
HuffmanCode* huffman_table = NULL;
HuffmanTables* huffman_tables = &hdr->huffman_tables_;
int num_htree_groups = 1;
int num_htree_groups_max = 1;
int max_alphabet_size = 0;
@ -372,6 +373,10 @@ static int ReadHuffmanCodes(VP8LDecoder* const dec, int xsize, int ysize,
int* mapping = NULL;
int ok = 0;
// Check the table has been 0 initialized (through InitMetadata).
assert(huffman_tables->root.start == NULL);
assert(huffman_tables->curr_segment == NULL);
if (allow_recursion && VP8LReadBits(br, 1)) {
// use meta Huffman codes.
const int huffman_precision = VP8LReadBits(br, 3) + 2;
@ -434,16 +439,15 @@ static int ReadHuffmanCodes(VP8LDecoder* const dec, int xsize, int ysize,
code_lengths = (int*)WebPSafeCalloc((uint64_t)max_alphabet_size,
sizeof(*code_lengths));
huffman_tables = (HuffmanCode*)WebPSafeMalloc(num_htree_groups * table_size,
sizeof(*huffman_tables));
htree_groups = VP8LHtreeGroupsNew(num_htree_groups);
if (htree_groups == NULL || code_lengths == NULL || huffman_tables == NULL) {
if (htree_groups == NULL || code_lengths == NULL ||
!VP8LHuffmanTablesAllocate(num_htree_groups * table_size,
huffman_tables)) {
dec->status_ = VP8_STATUS_OUT_OF_MEMORY;
goto Error;
}
huffman_table = huffman_tables;
for (i = 0; i < num_htree_groups_max; ++i) {
// If the index "i" is unused in the Huffman image, just make sure the
// coefficients are valid but do not store them.
@ -468,19 +472,20 @@ static int ReadHuffmanCodes(VP8LDecoder* const dec, int xsize, int ysize,
int max_bits = 0;
for (j = 0; j < HUFFMAN_CODES_PER_META_CODE; ++j) {
int alphabet_size = kAlphabetSize[j];
htrees[j] = huffman_table;
if (j == 0 && color_cache_bits > 0) {
alphabet_size += (1 << color_cache_bits);
}
size = ReadHuffmanCode(alphabet_size, dec, code_lengths, huffman_table);
size =
ReadHuffmanCode(alphabet_size, dec, code_lengths, huffman_tables);
htrees[j] = huffman_tables->curr_segment->curr_table;
if (size == 0) {
goto Error;
}
if (is_trivial_literal && kLiteralMap[j] == 1) {
is_trivial_literal = (huffman_table->bits == 0);
is_trivial_literal = (htrees[j]->bits == 0);
}
total_size += huffman_table->bits;
huffman_table += size;
total_size += htrees[j]->bits;
huffman_tables->curr_segment->curr_table += size;
if (j <= ALPHA) {
int local_max_bits = code_lengths[0];
int k;
@ -515,14 +520,13 @@ static int ReadHuffmanCodes(VP8LDecoder* const dec, int xsize, int ysize,
hdr->huffman_image_ = huffman_image;
hdr->num_htree_groups_ = num_htree_groups;
hdr->htree_groups_ = htree_groups;
hdr->huffman_tables_ = huffman_tables;
Error:
WebPSafeFree(code_lengths);
WebPSafeFree(mapping);
if (!ok) {
WebPSafeFree(huffman_image);
WebPSafeFree(huffman_tables);
VP8LHuffmanTablesDeallocate(huffman_tables);
VP8LHtreeGroupsFree(htree_groups);
}
return ok;
@ -1354,7 +1358,7 @@ static void ClearMetadata(VP8LMetadata* const hdr) {
assert(hdr != NULL);
WebPSafeFree(hdr->huffman_image_);
WebPSafeFree(hdr->huffman_tables_);
VP8LHuffmanTablesDeallocate(&hdr->huffman_tables_);
VP8LHtreeGroupsFree(hdr->htree_groups_);
VP8LColorCacheClear(&hdr->color_cache_);
VP8LColorCacheClear(&hdr->saved_color_cache_);
@ -1670,7 +1674,7 @@ int VP8LDecodeImage(VP8LDecoder* const dec) {
// Sanity checks.
if (dec == NULL) return 0;
assert(dec->hdr_.huffman_tables_ != NULL);
assert(dec->hdr_.huffman_tables_.root.start != NULL);
assert(dec->hdr_.htree_groups_ != NULL);
assert(dec->hdr_.num_htree_groups_ > 0);

View File

@ -51,7 +51,7 @@ typedef struct {
uint32_t* huffman_image_;
int num_htree_groups_;
HTreeGroup* htree_groups_;
HuffmanCode* huffman_tables_;
HuffmanTables huffman_tables_;
} VP8LMetadata;
typedef struct VP8LDecoder VP8LDecoder;

View File

@ -83,7 +83,7 @@ static void ApplyAlphaMultiply_NEON(uint8_t* rgba, int alpha_first,
static int DispatchAlpha_NEON(const uint8_t* alpha, int alpha_stride,
int width, int height,
uint8_t* dst, int dst_stride) {
uint32_t alpha_mask = 0xffffffffu;
uint32_t alpha_mask = 0xffu;
uint8x8_t mask8 = vdup_n_u8(0xff);
uint32_t tmp[2];
int i, j;
@ -107,6 +107,7 @@ static int DispatchAlpha_NEON(const uint8_t* alpha, int alpha_stride,
dst += dst_stride;
}
vst1_u8((uint8_t*)tmp, mask8);
alpha_mask *= 0x01010101;
alpha_mask &= tmp[0];
alpha_mask &= tmp[1];
return (alpha_mask != 0xffffffffu);
@ -134,7 +135,7 @@ static void DispatchAlphaToGreen_NEON(const uint8_t* alpha, int alpha_stride,
static int ExtractAlpha_NEON(const uint8_t* argb, int argb_stride,
int width, int height,
uint8_t* alpha, int alpha_stride) {
uint32_t alpha_mask = 0xffffffffu;
uint32_t alpha_mask = 0xffu;
uint8x8_t mask8 = vdup_n_u8(0xff);
uint32_t tmp[2];
int i, j;
@ -156,6 +157,7 @@ static int ExtractAlpha_NEON(const uint8_t* argb, int argb_stride,
alpha += alpha_stride;
}
vst1_u8((uint8_t*)tmp, mask8);
alpha_mask *= 0x01010101;
alpha_mask &= tmp[0];
alpha_mask &= tmp[1];
return (alpha_mask == 0xffffffffu);

View File

@ -347,24 +347,24 @@ static void GetCombinedEntropyUnrefined_MIPS32(const uint32_t X[],
static void AddVector_MIPS32(const uint32_t* pa, const uint32_t* pb,
uint32_t* pout, int size) {
uint32_t temp0, temp1, temp2, temp3, temp4, temp5, temp6, temp7;
const uint32_t end = ((size) / 4) * 4;
const int end = ((size) / 4) * 4;
const uint32_t* const LoopEnd = pa + end;
int i;
ASM_START
ADD_TO_OUT(0, 4, 8, 12, 1, pa, pb, pout)
ASM_END_0
for (i = end; i < size; ++i) pout[i] = pa[i] + pb[i];
for (i = 0; i < size - end; ++i) pout[i] = pa[i] + pb[i];
}
static void AddVectorEq_MIPS32(const uint32_t* pa, uint32_t* pout, int size) {
uint32_t temp0, temp1, temp2, temp3, temp4, temp5, temp6, temp7;
const uint32_t end = ((size) / 4) * 4;
const int end = ((size) / 4) * 4;
const uint32_t* const LoopEnd = pa + end;
int i;
ASM_START
ADD_TO_OUT(0, 4, 8, 12, 0, pa, pout, pout)
ASM_END_1
for (i = end; i < size; ++i) pout[i] += pa[i];
for (i = 0; i < size - end; ++i) pout[i] += pa[i];
}
#undef ASM_END_1

View File

@ -13,6 +13,7 @@
#include <assert.h>
#include <stdlib.h>
#include <string.h>
#include "src/enc/vp8i_enc.h"
#include "src/dsp/dsp.h"
@ -148,6 +149,7 @@ static int EncodeAlphaInternal(const uint8_t* const data, int width, int height,
}
} else {
VP8LBitWriterWipeOut(&tmp_bw);
memset(&result->bw, 0, sizeof(result->bw));
return 0;
}
}
@ -162,7 +164,7 @@ static int EncodeAlphaInternal(const uint8_t* const data, int width, int height,
header = method | (filter << 2);
if (reduce_levels) header |= ALPHA_PREPROCESSED_LEVELS << 4;
VP8BitWriterInit(&result->bw, ALPHA_HEADER_LEN + output_size);
if (!VP8BitWriterInit(&result->bw, ALPHA_HEADER_LEN + output_size)) ok = 0;
ok = ok && VP8BitWriterAppend(&result->bw, &header, ALPHA_HEADER_LEN);
ok = ok && VP8BitWriterAppend(&result->bw, output, output_size);

View File

@ -577,7 +577,7 @@ static int BackwardReferencesHashChainDistanceOnly(
(CostModel*)WebPSafeCalloc(1ULL, cost_model_size);
VP8LColorCache hashers;
CostManager* cost_manager =
(CostManager*)WebPSafeMalloc(1ULL, sizeof(*cost_manager));
(CostManager*)WebPSafeCalloc(1ULL, sizeof(*cost_manager));
int offset_prev = -1, len_prev = -1;
double offset_cost = -1;
int first_offset_is_constant = -1; // initialized with 'impossible' value

View File

@ -912,14 +912,15 @@ static VP8LBackwardRefs* GetBackwardReferences(
quality >= 25) {
const VP8LHashChain* const hash_chain_tmp =
(lz77_type_best == kLZ77Standard) ? hash_chain : &hash_chain_box;
if (VP8LBackwardReferencesTraceBackwards(width, height, argb, *cache_bits,
hash_chain_tmp, best, worst)) {
double bit_cost_trace;
if (!VP8LBackwardReferencesTraceBackwards(width, height, argb, *cache_bits,
hash_chain_tmp, best, worst)) {
goto Error;
}
VP8LHistogramCreate(histo, worst, *cache_bits);
bit_cost_trace = VP8LHistogramEstimateBits(histo);
if (bit_cost_trace < bit_cost_best) best = worst;
}
}
BackwardReferences2DLocality(width, best);

View File

@ -1693,11 +1693,16 @@ WebPEncodingError VP8LEncodeStream(const WebPConfig* const config,
const WebPWorkerInterface* const worker_interface = WebPGetWorkerInterface();
int ok_main;
if (enc_main == NULL || !VP8LBitWriterInit(&bw_side, 0)) {
WebPEncodingSetError(picture, VP8_ENC_ERROR_OUT_OF_MEMORY);
VP8LEncoderDelete(enc_main);
return 0;
}
// Analyze image (entropy, num_palettes etc)
if (enc_main == NULL ||
!EncoderAnalyze(enc_main, crunch_configs, &num_crunch_configs_main,
if (!EncoderAnalyze(enc_main, crunch_configs, &num_crunch_configs_main,
&red_and_blue_always_zero) ||
!EncoderInit(enc_main) || !VP8LBitWriterInit(&bw_side, 0)) {
!EncoderInit(enc_main)) {
err = VP8_ENC_ERROR_OUT_OF_MEMORY;
goto Error;
}

View File

@ -177,21 +177,24 @@ static int BuildHuffmanTable(HuffmanCode* const root_table, int root_bits,
if (num_open < 0) {
return 0;
}
if (root_table == NULL) continue;
for (; count[len] > 0; --count[len]) {
HuffmanCode code;
if ((key & mask) != low) {
table += table_size;
if (root_table != NULL) table += table_size;
table_bits = NextTableBitSize(count, len, root_bits);
table_size = 1 << table_bits;
total_size += table_size;
low = key & mask;
if (root_table != NULL) {
root_table[low].bits = (uint8_t)(table_bits + root_bits);
root_table[low].value = (uint16_t)((table - root_table) - low);
}
}
if (root_table != NULL) {
code.bits = (uint8_t)(len - root_bits);
code.value = (uint16_t)sorted[symbol++];
ReplicateValue(&table[key >> root_bits], step, table_size, code);
}
key = GetNextKey(key, len);
}
}
@ -211,25 +214,83 @@ static int BuildHuffmanTable(HuffmanCode* const root_table, int root_bits,
((1 << MAX_CACHE_BITS) + NUM_LITERAL_CODES + NUM_LENGTH_CODES)
// Cut-off value for switching between heap and stack allocation.
#define SORTED_SIZE_CUTOFF 512
int VP8LBuildHuffmanTable(HuffmanCode* const root_table, int root_bits,
int VP8LBuildHuffmanTable(HuffmanTables* const root_table, int root_bits,
const int code_lengths[], int code_lengths_size) {
int total_size;
const int total_size =
BuildHuffmanTable(NULL, root_bits, code_lengths, code_lengths_size, NULL);
assert(code_lengths_size <= MAX_CODE_LENGTHS_SIZE);
if (root_table == NULL) {
total_size = BuildHuffmanTable(NULL, root_bits,
code_lengths, code_lengths_size, NULL);
} else if (code_lengths_size <= SORTED_SIZE_CUTOFF) {
if (total_size == 0 || root_table == NULL) return total_size;
if (root_table->curr_segment->curr_table + total_size >=
root_table->curr_segment->start + root_table->curr_segment->size) {
// If 'root_table' does not have enough memory, allocate a new segment.
// The available part of root_table->curr_segment is left unused because we
// need a contiguous buffer.
const int segment_size = root_table->curr_segment->size;
struct HuffmanTablesSegment* next =
(HuffmanTablesSegment*)WebPSafeMalloc(1, sizeof(*next));
if (next == NULL) return 0;
// Fill the new segment.
// We need at least 'total_size' but if that value is small, it is better to
// allocate a big chunk to prevent more allocations later. 'segment_size' is
// therefore chosen (any other arbitrary value could be chosen).
next->size = total_size > segment_size ? total_size : segment_size;
next->start =
(HuffmanCode*)WebPSafeMalloc(next->size, sizeof(*next->start));
if (next->start == NULL) {
WebPSafeFree(next);
return 0;
}
next->curr_table = next->start;
next->next = NULL;
// Point to the new segment.
root_table->curr_segment->next = next;
root_table->curr_segment = next;
}
if (code_lengths_size <= SORTED_SIZE_CUTOFF) {
// use local stack-allocated array.
uint16_t sorted[SORTED_SIZE_CUTOFF];
total_size = BuildHuffmanTable(root_table, root_bits,
BuildHuffmanTable(root_table->curr_segment->curr_table, root_bits,
code_lengths, code_lengths_size, sorted);
} else { // rare case. Use heap allocation.
uint16_t* const sorted =
(uint16_t*)WebPSafeMalloc(code_lengths_size, sizeof(*sorted));
if (sorted == NULL) return 0;
total_size = BuildHuffmanTable(root_table, root_bits,
BuildHuffmanTable(root_table->curr_segment->curr_table, root_bits,
code_lengths, code_lengths_size, sorted);
WebPSafeFree(sorted);
}
return total_size;
}
int VP8LHuffmanTablesAllocate(int size, HuffmanTables* huffman_tables) {
// Have 'segment' point to the first segment for now, 'root'.
HuffmanTablesSegment* const root = &huffman_tables->root;
huffman_tables->curr_segment = root;
// Allocate root.
root->start = (HuffmanCode*)WebPSafeMalloc(size, sizeof(*root->start));
if (root->start == NULL) return 0;
root->curr_table = root->start;
root->next = NULL;
root->size = size;
return 1;
}
void VP8LHuffmanTablesDeallocate(HuffmanTables* const huffman_tables) {
HuffmanTablesSegment *current, *next;
if (huffman_tables == NULL) return;
// Free the root node.
current = &huffman_tables->root;
next = current->next;
WebPSafeFree(current->start);
current->start = NULL;
current->next = NULL;
current = next;
// Free the following nodes.
while (current != NULL) {
next = current->next;
WebPSafeFree(current->start);
WebPSafeFree(current);
current = next;
}
}

View File

@ -43,6 +43,29 @@ typedef struct {
// or non-literal symbol otherwise
} HuffmanCode32;
// Contiguous memory segment of HuffmanCodes.
typedef struct HuffmanTablesSegment {
HuffmanCode* start;
// Pointer to where we are writing into the segment. Starts at 'start' and
// cannot go beyond 'start' + 'size'.
HuffmanCode* curr_table;
// Pointer to the next segment in the chain.
struct HuffmanTablesSegment* next;
int size;
} HuffmanTablesSegment;
// Chained memory segments of HuffmanCodes.
typedef struct HuffmanTables {
HuffmanTablesSegment root;
// Currently processed segment. At first, this is 'root'.
HuffmanTablesSegment* curr_segment;
} HuffmanTables;
// Allocates a HuffmanTables with 'size' contiguous HuffmanCodes. Returns 0 on
// memory allocation error, 1 otherwise.
int VP8LHuffmanTablesAllocate(int size, HuffmanTables* huffman_tables);
void VP8LHuffmanTablesDeallocate(HuffmanTables* const huffman_tables);
#define HUFFMAN_PACKED_BITS 6
#define HUFFMAN_PACKED_TABLE_SIZE (1u << HUFFMAN_PACKED_BITS)
@ -78,9 +101,7 @@ void VP8LHtreeGroupsFree(HTreeGroup* const htree_groups);
// the huffman table.
// Returns built table size or 0 in case of error (invalid tree or
// memory error).
// If root_table is NULL, it returns 0 if a lookup cannot be built, something
// > 0 otherwise (but not the table size).
int VP8LBuildHuffmanTable(HuffmanCode* const root_table, int root_bits,
int VP8LBuildHuffmanTable(HuffmanTables* const root_table, int root_bits,
const int code_lengths[], int code_lengths_size);
#ifdef __cplusplus