vp8io: use size_t for buffer size

changes the decoder ABI

Change-Id: I86fe384739b985bf63bcd9badbbbf4411e1eecae
This commit is contained in:
James Zern 2012-04-12 13:43:37 -07:00
parent 90ead710dc
commit 1c92bd37f3
2 changed files with 4 additions and 2 deletions

View File

@ -18,7 +18,7 @@
extern "C" {
#endif
#define WEBP_DECODER_ABI_VERSION 0x0002
#define WEBP_DECODER_ABI_VERSION 0x0003
// Return the decoder's version number, packed in hexadecimal using 8bits for
// each of major/minor/revision. E.g: v2.5.7 is 0x020507.

View File

@ -12,6 +12,8 @@
#ifndef WEBP_WEBP_DECODE_VP8_H_
#define WEBP_WEBP_DECODE_VP8_H_
#include <stddef.h> // for size_t
#include "./decode.h"
#if defined(__cplusplus) || defined(c_plusplus)
@ -82,7 +84,7 @@ struct VP8Io {
int fancy_upsampling;
// Input buffer.
uint32_t data_size;
size_t data_size;
const uint8_t* data;
// If true, in-loop filtering will not be performed even if present in the