mirror of
https://github.com/webmproject/libwebp.git
synced 2025-04-24 09:46:46 +02:00
add automatic YUVA/ARGB conversion during WebPEncode()
Adds new methods WebPPictureARGBToYUVA() and WebPPictureYUVAToARGB() Depending on the value of picture->use_argb_input, the main call WebPEncode() will convert appropriately. Note that both conversions are lossy, so it's recommended to: * use YUVA input for lossy compression (picture->use_argb_input=0) * use ARGB input for lossless compression (picture->use_argb_input=1) Change-Id: I8269d607723ee8a1136b9f4999f7ff4e657bbb04
This commit is contained in:
parent
802e012a18
commit
fcc69923b9
@ -145,13 +145,13 @@ void VP8DspInit(void);
|
|||||||
|
|
||||||
#define FANCY_UPSAMPLING // undefined to remove fancy upsampling support
|
#define FANCY_UPSAMPLING // undefined to remove fancy upsampling support
|
||||||
|
|
||||||
#ifdef FANCY_UPSAMPLING
|
|
||||||
typedef void (*WebPUpsampleLinePairFunc)(
|
typedef void (*WebPUpsampleLinePairFunc)(
|
||||||
const uint8_t* top_y, const uint8_t* bottom_y,
|
const uint8_t* top_y, const uint8_t* bottom_y,
|
||||||
const uint8_t* top_u, const uint8_t* top_v,
|
const uint8_t* top_u, const uint8_t* top_v,
|
||||||
const uint8_t* cur_u, const uint8_t* cur_v,
|
const uint8_t* cur_u, const uint8_t* cur_v,
|
||||||
uint8_t* top_dst, uint8_t* bottom_dst, int len);
|
uint8_t* top_dst, uint8_t* bottom_dst, int len);
|
||||||
|
|
||||||
|
#ifdef FANCY_UPSAMPLING
|
||||||
|
|
||||||
// Fancy upsampling functions to convert YUV to RGB(A) modes
|
// Fancy upsampling functions to convert YUV to RGB(A) modes
|
||||||
extern WebPUpsampleLinePairFunc WebPUpsamplers[/* MODE_LAST */];
|
extern WebPUpsampleLinePairFunc WebPUpsamplers[/* MODE_LAST */];
|
||||||
@ -169,6 +169,11 @@ typedef void (*WebPSampleLinePairFunc)(
|
|||||||
|
|
||||||
extern const WebPSampleLinePairFunc WebPSamplers[/* MODE_LAST */];
|
extern const WebPSampleLinePairFunc WebPSamplers[/* MODE_LAST */];
|
||||||
|
|
||||||
|
// General function for converting two lines of ARGB or RGBA.
|
||||||
|
// 'alpha_is_last' should be true if 0xff000000 is stored in memory as
|
||||||
|
// as 0x00, 0x00, 0x00, 0xff (little endian).
|
||||||
|
WebPUpsampleLinePairFunc WebPGetLinePairConverter(int alpha_is_last);
|
||||||
|
|
||||||
// YUV444->RGB converters
|
// YUV444->RGB converters
|
||||||
typedef void (*WebPYUV444Converter)(const uint8_t* y,
|
typedef void (*WebPYUV444Converter)(const uint8_t* y,
|
||||||
const uint8_t* u, const uint8_t* v,
|
const uint8_t* u, const uint8_t* v,
|
||||||
|
@ -156,6 +156,48 @@ const WebPSampleLinePairFunc WebPSamplers[MODE_LAST] = {
|
|||||||
SampleRgba4444LinePair // MODE_rgbA_4444
|
SampleRgba4444LinePair // MODE_rgbA_4444
|
||||||
};
|
};
|
||||||
|
|
||||||
|
//------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
#if !defined(FANCY_UPSAMPLING)
|
||||||
|
#define DUAL_SAMPLE_FUNC(FUNC_NAME, FUNC) \
|
||||||
|
static void FUNC_NAME(const uint8_t* top_y, const uint8_t* bot_y, \
|
||||||
|
const uint8_t* top_u, const uint8_t* top_v, \
|
||||||
|
const uint8_t* bot_u, const uint8_t* bot_v, \
|
||||||
|
uint8_t* top_dst, uint8_t* bot_dst, int len) { \
|
||||||
|
const int half_len = len >> 1; \
|
||||||
|
int x; \
|
||||||
|
if (top_dst != NULL) { \
|
||||||
|
for (x = 0; x < half_len; ++x) { \
|
||||||
|
FUNC(top_y[2 * x + 0], top_u[x], top_v[x], top_dst + 8 * x + 0); \
|
||||||
|
FUNC(top_y[2 * x + 1], top_u[x], top_v[x], top_dst + 8 * x + 4); \
|
||||||
|
} \
|
||||||
|
if (len & 1) FUNC(top_y[2 * x + 0], top_u[x], top_v[x], top_dst + 8 * x); \
|
||||||
|
} \
|
||||||
|
if (bot_dst != NULL) { \
|
||||||
|
for (x = 0; x < half_len; ++x) { \
|
||||||
|
FUNC(bot_y[2 * x + 0], bot_u[x], bot_v[x], bot_dst + 8 * x + 0); \
|
||||||
|
FUNC(bot_y[2 * x + 1], bot_u[x], bot_v[x], bot_dst + 8 * x + 4); \
|
||||||
|
} \
|
||||||
|
if (len & 1) FUNC(bot_y[2 * x + 0], bot_u[x], bot_v[x], bot_dst + 8 * x); \
|
||||||
|
} \
|
||||||
|
}
|
||||||
|
|
||||||
|
DUAL_SAMPLE_FUNC(DualLineSamplerBGRA, VP8YuvToBgra)
|
||||||
|
DUAL_SAMPLE_FUNC(DualLineSamplerARGB, VP8YuvToArgb)
|
||||||
|
#undef DUAL_SAMPLE_FUNC
|
||||||
|
|
||||||
|
#endif // !FANCY_UPSAMPLING
|
||||||
|
|
||||||
|
WebPUpsampleLinePairFunc WebPGetLinePairConverter(int alpha_is_last) {
|
||||||
|
WebPInitUpsamplers();
|
||||||
|
VP8YUVInit();
|
||||||
|
#ifdef FANCY_UPSAMPLING
|
||||||
|
return WebPUpsamplers[alpha_is_last ? MODE_BGRA : MODE_ARGB];
|
||||||
|
#else
|
||||||
|
return (alpha_is_last ? DualLineSamplerBGRA : DualLineSamplerARGB);
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
|
||||||
//------------------------------------------------------------------------------
|
//------------------------------------------------------------------------------
|
||||||
// YUV444 converter
|
// YUV444 converter
|
||||||
|
|
||||||
|
@ -15,6 +15,7 @@
|
|||||||
|
|
||||||
#include "./vp8enci.h"
|
#include "./vp8enci.h"
|
||||||
#include "../utils/rescaler.h"
|
#include "../utils/rescaler.h"
|
||||||
|
#include "../dsp/dsp.h"
|
||||||
|
|
||||||
#if defined(__cplusplus) || defined(c_plusplus)
|
#if defined(__cplusplus) || defined(c_plusplus)
|
||||||
extern "C" {
|
extern "C" {
|
||||||
@ -23,6 +24,12 @@ extern "C" {
|
|||||||
#define HALVE(x) (((x) + 1) >> 1)
|
#define HALVE(x) (((x) + 1) >> 1)
|
||||||
#define IS_YUV_CSP(csp, YUV_CSP) (((csp) & WEBP_CSP_UV_MASK) == (YUV_CSP))
|
#define IS_YUV_CSP(csp, YUV_CSP) (((csp) & WEBP_CSP_UV_MASK) == (YUV_CSP))
|
||||||
|
|
||||||
|
static const union {
|
||||||
|
uint32_t argb;
|
||||||
|
uint8_t bytes[4];
|
||||||
|
} test_endian = { 0xff000000u };
|
||||||
|
#define ALPHA_IS_LAST (test_endian.bytes[3] == 0xff)
|
||||||
|
|
||||||
//------------------------------------------------------------------------------
|
//------------------------------------------------------------------------------
|
||||||
// WebPPicture
|
// WebPPicture
|
||||||
//------------------------------------------------------------------------------
|
//------------------------------------------------------------------------------
|
||||||
@ -79,16 +86,17 @@ int WebPPictureAlloc(WebPPicture* const picture) {
|
|||||||
(size_t)total_size != total_size) { // overflow on 32bit
|
(size_t)total_size != total_size) { // overflow on 32bit
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
picture->y_stride = y_stride;
|
// Clear previous buffer and allocate a new one.
|
||||||
picture->uv_stride = uv_stride;
|
|
||||||
picture->a_stride = a_stride;
|
|
||||||
picture->uv0_stride = uv0_stride;
|
|
||||||
WebPPictureFree(picture); // erase previous buffer
|
WebPPictureFree(picture); // erase previous buffer
|
||||||
mem = (uint8_t*)malloc((size_t)total_size);
|
mem = (uint8_t*)malloc((size_t)total_size);
|
||||||
if (mem == NULL) return 0;
|
if (mem == NULL) return 0;
|
||||||
|
|
||||||
|
// From now on, we're in the clear, we can no longer fail...
|
||||||
picture->memory_ = (void*)mem;
|
picture->memory_ = (void*)mem;
|
||||||
|
picture->y_stride = y_stride;
|
||||||
|
picture->uv_stride = uv_stride;
|
||||||
|
picture->a_stride = a_stride;
|
||||||
|
picture->uv0_stride = uv0_stride;
|
||||||
// TODO(skal): we could align the y/u/v planes and adjust stride.
|
// TODO(skal): we could align the y/u/v planes and adjust stride.
|
||||||
picture->y = mem;
|
picture->y = mem;
|
||||||
mem += y_size;
|
mem += y_size;
|
||||||
@ -117,12 +125,13 @@ int WebPPictureAlloc(WebPPicture* const picture) {
|
|||||||
(size_t)total_size != total_size) {
|
(size_t)total_size != total_size) {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
// Clear previous buffer and allocate a new one.
|
||||||
WebPPictureFree(picture); // erase previous buffer
|
WebPPictureFree(picture); // erase previous buffer
|
||||||
memory = malloc((size_t)total_size);
|
memory = malloc((size_t)total_size);
|
||||||
if (memory == NULL) return 0;
|
if (memory == NULL) return 0;
|
||||||
picture->memory_argb_ = memory;
|
|
||||||
|
|
||||||
// TODO(skal): align plane to cache line?
|
// TODO(skal): align plane to cache line?
|
||||||
|
picture->memory_argb_ = memory;
|
||||||
picture->argb = (uint32_t*)memory;
|
picture->argb = (uint32_t*)memory;
|
||||||
picture->argb_stride = width;
|
picture->argb_stride = width;
|
||||||
}
|
}
|
||||||
@ -130,26 +139,57 @@ int WebPPictureAlloc(WebPPicture* const picture) {
|
|||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Grab the 'specs' (writer, *opaque, width, height...) from 'src' and copy them
|
// Remove reference to the ARGB buffer (doesn't free anything).
|
||||||
// into 'dst'. Mark 'dst' as not owning any memory. 'src' can be NULL.
|
static void PictureResetARGB(WebPPicture* const picture) {
|
||||||
static void WebPPictureGrabSpecs(const WebPPicture* const src,
|
picture->memory_argb_ = NULL;
|
||||||
WebPPicture* const dst) {
|
picture->argb = NULL;
|
||||||
assert(dst != NULL);
|
picture->argb_stride = 0;
|
||||||
if (src != NULL) *dst = *src;
|
|
||||||
dst->y = dst->u = dst->v = NULL;
|
|
||||||
dst->u0 = dst->v0 = NULL;
|
|
||||||
dst->a = NULL;
|
|
||||||
dst->argb = NULL;
|
|
||||||
dst->memory_ = NULL;
|
|
||||||
dst->memory_argb_ = NULL;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Release memory owned by 'picture'.
|
// Remove reference to the YUVA buffer (doesn't free anything).
|
||||||
|
static void PictureResetYUVA(WebPPicture* const picture) {
|
||||||
|
picture->memory_ = NULL;
|
||||||
|
picture->y = picture->u = picture->v = picture->a = NULL;
|
||||||
|
picture->u0 = picture->v0 = NULL;
|
||||||
|
picture->y_stride = picture->uv_stride = 0;
|
||||||
|
picture->a_stride = 0;
|
||||||
|
picture->uv0_stride = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Grab the 'specs' (writer, *opaque, width, height...) from 'src' and copy them
|
||||||
|
// into 'dst'. Mark 'dst' as not owning any memory.
|
||||||
|
static void WebPPictureGrabSpecs(const WebPPicture* const src,
|
||||||
|
WebPPicture* const dst) {
|
||||||
|
assert(src != NULL && dst != NULL);
|
||||||
|
*dst = *src;
|
||||||
|
PictureResetYUVA(dst);
|
||||||
|
PictureResetARGB(dst);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Allocate a new argb buffer, discarding any existing one and preserving
|
||||||
|
// the other YUV(A) buffer.
|
||||||
|
static int PictureAllocARGB(WebPPicture* const picture) {
|
||||||
|
WebPPicture tmp;
|
||||||
|
free(picture->memory_argb_);
|
||||||
|
PictureResetARGB(picture);
|
||||||
|
picture->use_argb_input = 1;
|
||||||
|
WebPPictureGrabSpecs(picture, &tmp);
|
||||||
|
if (!WebPPictureAlloc(&tmp)) {
|
||||||
|
return WebPEncodingSetError(picture, VP8_ENC_ERROR_OUT_OF_MEMORY);
|
||||||
|
}
|
||||||
|
picture->memory_argb_ = tmp.memory_argb_;
|
||||||
|
picture->argb = tmp.argb;
|
||||||
|
picture->argb_stride = tmp.argb_stride;
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Release memory owned by 'picture' (both YUV and ARGB buffers).
|
||||||
void WebPPictureFree(WebPPicture* const picture) {
|
void WebPPictureFree(WebPPicture* const picture) {
|
||||||
if (picture != NULL) {
|
if (picture != NULL) {
|
||||||
free(picture->memory_);
|
free(picture->memory_);
|
||||||
free(picture->memory_argb_);
|
free(picture->memory_argb_);
|
||||||
WebPPictureGrabSpecs(NULL, picture);
|
PictureResetYUVA(picture);
|
||||||
|
PictureResetARGB(picture);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -468,6 +508,43 @@ int WebPMemoryWrite(const uint8_t* data, size_t data_size,
|
|||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//------------------------------------------------------------------------------
|
||||||
|
// Detection of non-trivial transparency
|
||||||
|
|
||||||
|
// Returns true if alpha[] has non-0xff values.
|
||||||
|
static int CheckNonOpaque(const uint8_t* alpha, int width, int height,
|
||||||
|
int x_step, int y_step) {
|
||||||
|
if (alpha == NULL) return 0;
|
||||||
|
while (height-- > 0) {
|
||||||
|
int x;
|
||||||
|
for (x = 0; x < width * x_step; x += x_step) {
|
||||||
|
if (alpha[x] != 0xff) return 1; // TODO(skal): check 4/8 bytes at a time.
|
||||||
|
}
|
||||||
|
alpha += y_step;
|
||||||
|
}
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Checking for the presence of non-opaque alpha.
|
||||||
|
int WebPPictureHasTransparency(const WebPPicture* const picture) {
|
||||||
|
if (picture == NULL) return 0;
|
||||||
|
if (!picture->use_argb_input) {
|
||||||
|
return CheckNonOpaque(picture->a, picture->width, picture->height,
|
||||||
|
1, picture->a_stride);
|
||||||
|
} else {
|
||||||
|
int x, y;
|
||||||
|
const uint32_t* argb = picture->argb;
|
||||||
|
if (argb == NULL) return 0;
|
||||||
|
for (y = 0; y < picture->height; ++y) {
|
||||||
|
for (x = 0; x < picture->width; ++x) {
|
||||||
|
if (argb[x] < 0xff000000u) return 1; // test any alpha values != 0xff
|
||||||
|
}
|
||||||
|
argb += picture->argb_stride;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
//------------------------------------------------------------------------------
|
//------------------------------------------------------------------------------
|
||||||
// RGB -> YUV conversion
|
// RGB -> YUV conversion
|
||||||
// The exact naming is Y'CbCr, following the ITU-R BT.601 standard.
|
// The exact naming is Y'CbCr, following the ITU-R BT.601 standard.
|
||||||
@ -534,25 +611,26 @@ static void MakeGray(WebPPicture* const picture) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static int Import(WebPPicture* const picture,
|
static int ImportYUVAFromRGBA(const uint8_t* const r_ptr,
|
||||||
const uint8_t* const rgb, int rgb_stride,
|
const uint8_t* const g_ptr,
|
||||||
int step, int swap_rb, int import_alpha) {
|
const uint8_t* const b_ptr,
|
||||||
|
const uint8_t* const a_ptr,
|
||||||
|
int step, // bytes per pixel
|
||||||
|
int rgb_stride, // bytes per scanline
|
||||||
|
WebPPicture* const picture) {
|
||||||
const WebPEncCSP uv_csp = picture->colorspace & WEBP_CSP_UV_MASK;
|
const WebPEncCSP uv_csp = picture->colorspace & WEBP_CSP_UV_MASK;
|
||||||
int x, y;
|
int x, y;
|
||||||
const uint8_t* const r_ptr = rgb + (swap_rb ? 2 : 0);
|
|
||||||
const uint8_t* const g_ptr = rgb + 1;
|
|
||||||
const uint8_t* const b_ptr = rgb + (swap_rb ? 0 : 2);
|
|
||||||
const int width = picture->width;
|
const int width = picture->width;
|
||||||
const int height = picture->height;
|
const int height = picture->height;
|
||||||
|
const int has_alpha = CheckNonOpaque(a_ptr, width, height, step, rgb_stride);
|
||||||
|
|
||||||
if (import_alpha) {
|
picture->colorspace = uv_csp;
|
||||||
|
picture->use_argb_input = 0;
|
||||||
|
if (has_alpha) {
|
||||||
picture->colorspace |= WEBP_CSP_ALPHA_BIT;
|
picture->colorspace |= WEBP_CSP_ALPHA_BIT;
|
||||||
} else {
|
|
||||||
picture->colorspace &= ~WEBP_CSP_ALPHA_BIT;
|
|
||||||
}
|
}
|
||||||
if (!WebPPictureAlloc(picture)) return 0;
|
if (!WebPPictureAlloc(picture)) return 0;
|
||||||
|
|
||||||
if (!picture->use_argb_input) {
|
|
||||||
// Import luma plane
|
// Import luma plane
|
||||||
for (y = 0; y < height; ++y) {
|
for (y = 0; y < height; ++y) {
|
||||||
for (x = 0; x < width; ++x) {
|
for (x = 0; x < width; ++x) {
|
||||||
@ -604,8 +682,7 @@ static int Import(WebPPicture* const picture,
|
|||||||
MakeGray(picture);
|
MakeGray(picture);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (import_alpha) {
|
if (has_alpha) {
|
||||||
const uint8_t* const a_ptr = rgb + 3;
|
|
||||||
assert(step >= 4);
|
assert(step >= 4);
|
||||||
for (y = 0; y < height; ++y) {
|
for (y = 0; y < height; ++y) {
|
||||||
for (x = 0; x < width; ++x) {
|
for (x = 0; x < width; ++x) {
|
||||||
@ -614,13 +691,37 @@ static int Import(WebPPicture* const picture,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
static int Import(WebPPicture* const picture,
|
||||||
|
const uint8_t* const rgb, int rgb_stride,
|
||||||
|
int step, int swap_rb, int import_alpha) {
|
||||||
|
const uint8_t* const r_ptr = rgb + (swap_rb ? 2 : 0);
|
||||||
|
const uint8_t* const g_ptr = rgb + 1;
|
||||||
|
const uint8_t* const b_ptr = rgb + (swap_rb ? 0 : 2);
|
||||||
|
const uint8_t* const a_ptr = import_alpha ? rgb + 3 : NULL;
|
||||||
|
const int width = picture->width;
|
||||||
|
const int height = picture->height;
|
||||||
|
|
||||||
|
if (!picture->use_argb_input) {
|
||||||
|
return ImportYUVAFromRGBA(r_ptr, g_ptr, b_ptr, a_ptr, step, rgb_stride,
|
||||||
|
picture);
|
||||||
|
}
|
||||||
|
if (import_alpha) {
|
||||||
|
picture->colorspace |= WEBP_CSP_ALPHA_BIT;
|
||||||
} else {
|
} else {
|
||||||
|
picture->colorspace &= ~WEBP_CSP_ALPHA_BIT;
|
||||||
|
}
|
||||||
|
if (!WebPPictureAlloc(picture)) return 0;
|
||||||
|
|
||||||
if (!import_alpha) {
|
if (!import_alpha) {
|
||||||
|
int x, y;
|
||||||
for (y = 0; y < height; ++y) {
|
for (y = 0; y < height; ++y) {
|
||||||
for (x = 0; x < width; ++x) {
|
for (x = 0; x < width; ++x) {
|
||||||
const int offset = step * x + y * rgb_stride;
|
const int offset = step * x + y * rgb_stride;
|
||||||
const uint32_t argb =
|
const uint32_t argb =
|
||||||
0xff000000 |
|
0xff000000u |
|
||||||
(r_ptr[offset] << 16) |
|
(r_ptr[offset] << 16) |
|
||||||
(g_ptr[offset] << 8) |
|
(g_ptr[offset] << 8) |
|
||||||
(b_ptr[offset]);
|
(b_ptr[offset]);
|
||||||
@ -628,13 +729,12 @@ static int Import(WebPPicture* const picture,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
const uint8_t* const a_ptr = rgb + 3;
|
int x, y;
|
||||||
assert(step >= 4);
|
assert(step >= 4);
|
||||||
for (y = 0; y < height; ++y) {
|
for (y = 0; y < height; ++y) {
|
||||||
for (x = 0; x < width; ++x) {
|
for (x = 0; x < width; ++x) {
|
||||||
const int offset = step * x + y * rgb_stride;
|
const int offset = step * x + y * rgb_stride;
|
||||||
const uint32_t argb =
|
const uint32_t argb = (a_ptr[offset] << 24) |
|
||||||
(a_ptr[offset] << 24) |
|
|
||||||
(r_ptr[offset] << 16) |
|
(r_ptr[offset] << 16) |
|
||||||
(g_ptr[offset] << 8) |
|
(g_ptr[offset] << 8) |
|
||||||
(b_ptr[offset]);
|
(b_ptr[offset]);
|
||||||
@ -642,7 +742,6 @@ static int Import(WebPPicture* const picture,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
#undef SUM4
|
#undef SUM4
|
||||||
@ -681,6 +780,96 @@ int WebPPictureImportBGRX(WebPPicture* const picture,
|
|||||||
return Import(picture, rgba, rgba_stride, 4, 1, 0);
|
return Import(picture, rgba, rgba_stride, 4, 1, 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//------------------------------------------------------------------------------
|
||||||
|
// Automatic YUV <-> ARGB conversions.
|
||||||
|
|
||||||
|
int WebPPictureYUVAToARGB(WebPPicture* const picture) {
|
||||||
|
if (picture == NULL) return 0;
|
||||||
|
if (picture->memory_ == NULL || picture->y == NULL ||
|
||||||
|
picture->u == NULL || picture->v == NULL) {
|
||||||
|
return WebPEncodingSetError(picture, VP8_ENC_ERROR_NULL_PARAMETER);
|
||||||
|
}
|
||||||
|
if ((picture->colorspace & WEBP_CSP_ALPHA_BIT) && picture->a == NULL) {
|
||||||
|
return WebPEncodingSetError(picture, VP8_ENC_ERROR_NULL_PARAMETER);
|
||||||
|
}
|
||||||
|
if ((picture->colorspace & WEBP_CSP_UV_MASK) != WEBP_YUV420) {
|
||||||
|
return WebPEncodingSetError(picture, VP8_ENC_ERROR_INVALID_CONFIGURATION);
|
||||||
|
}
|
||||||
|
// Allocate a new argb buffer (discarding the previous one).
|
||||||
|
if (!PictureAllocARGB(picture)) return 0;
|
||||||
|
|
||||||
|
// Convert
|
||||||
|
{
|
||||||
|
int y;
|
||||||
|
const int width = picture->width;
|
||||||
|
const int height = picture->height;
|
||||||
|
const int argb_stride = 4 * picture->argb_stride;
|
||||||
|
uint8_t* dst = (uint8_t*)picture->argb;
|
||||||
|
const uint8_t *cur_u = picture->u, *cur_v = picture->v, *cur_y = picture->y;
|
||||||
|
WebPUpsampleLinePairFunc upsample = WebPGetLinePairConverter(ALPHA_IS_LAST);
|
||||||
|
|
||||||
|
// First row, with replicated top samples.
|
||||||
|
upsample(NULL, cur_y, cur_u, cur_v, cur_u, cur_v, NULL, dst, width);
|
||||||
|
cur_y += picture->y_stride;
|
||||||
|
dst += argb_stride;
|
||||||
|
// Center rows.
|
||||||
|
for (y = 1; y + 1 < height; y += 2) {
|
||||||
|
const uint8_t* const top_u = cur_u;
|
||||||
|
const uint8_t* const top_v = cur_v;
|
||||||
|
cur_u += picture->uv_stride;
|
||||||
|
cur_v += picture->uv_stride;
|
||||||
|
upsample(cur_y, cur_y + picture->y_stride, top_u, top_v, cur_u, cur_v,
|
||||||
|
dst, dst + argb_stride, width);
|
||||||
|
cur_y += 2 * picture->y_stride;
|
||||||
|
dst += 2 * argb_stride;
|
||||||
|
}
|
||||||
|
// Last row (if needed), with replicated bottom samples.
|
||||||
|
if (height > 1 && !(height & 1)) {
|
||||||
|
upsample(cur_y, NULL, cur_u, cur_v, cur_u, cur_v, dst, NULL, width);
|
||||||
|
}
|
||||||
|
// Insert alpha values if needed, in replacement for the default 0xff ones.
|
||||||
|
if (picture->colorspace & WEBP_CSP_ALPHA_BIT) {
|
||||||
|
for (y = 0; y < height; ++y) {
|
||||||
|
uint32_t* const dst = picture->argb + y * picture->argb_stride;
|
||||||
|
const uint8_t* const src = picture->a + y * picture->a_stride;
|
||||||
|
int x;
|
||||||
|
for (x = 0; x < width; ++x) {
|
||||||
|
dst[x] = (dst[x] & 0x00ffffffu) | (src[x] << 24);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
int WebPPictureARGBToYUVA(WebPPicture* const picture, WebPEncCSP colorspace) {
|
||||||
|
if (picture == NULL) return 0;
|
||||||
|
if (picture->argb == NULL) {
|
||||||
|
return WebPEncodingSetError(picture, VP8_ENC_ERROR_NULL_PARAMETER);
|
||||||
|
} else {
|
||||||
|
const uint8_t* const argb = (const uint8_t*)picture->argb;
|
||||||
|
const uint8_t* const r = ALPHA_IS_LAST ? argb + 2 : argb + 1;
|
||||||
|
const uint8_t* const g = ALPHA_IS_LAST ? argb + 1 : argb + 2;
|
||||||
|
const uint8_t* const b = ALPHA_IS_LAST ? argb + 0 : argb + 3;
|
||||||
|
const uint8_t* const a = ALPHA_IS_LAST ? argb + 3 : argb + 0;
|
||||||
|
// We work on a tmp copy of 'picture', because ImportYUVAFromRGBA()
|
||||||
|
// would be calling WebPPictureFree(picture) otherwise.
|
||||||
|
WebPPicture tmp = *picture;
|
||||||
|
PictureResetARGB(&tmp); // reset ARGB buffer so that it's not free()'d.
|
||||||
|
tmp.use_argb_input = 0;
|
||||||
|
tmp.colorspace = colorspace & WEBP_CSP_UV_MASK;
|
||||||
|
if (!ImportYUVAFromRGBA(r, g, b, a, 4, 4 * picture->argb_stride, &tmp)) {
|
||||||
|
return WebPEncodingSetError(picture, VP8_ENC_ERROR_OUT_OF_MEMORY);
|
||||||
|
}
|
||||||
|
// Copy back the YUV specs into 'picture'.
|
||||||
|
tmp.argb = picture->argb;
|
||||||
|
tmp.argb_stride = picture->argb_stride;
|
||||||
|
tmp.memory_argb_ = picture->memory_argb_;
|
||||||
|
*picture = tmp;
|
||||||
|
}
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
|
||||||
//------------------------------------------------------------------------------
|
//------------------------------------------------------------------------------
|
||||||
// Helper: clean up fully transparent area to help compressibility.
|
// Helper: clean up fully transparent area to help compressibility.
|
||||||
|
|
||||||
@ -746,32 +935,6 @@ void WebPCleanupTransparentArea(WebPPicture* const pic) {
|
|||||||
#undef SIZE
|
#undef SIZE
|
||||||
#undef SIZE2
|
#undef SIZE2
|
||||||
|
|
||||||
// Checking for the presence of non-opaque alpha.
|
|
||||||
int WebPPictureHasTransparency(const WebPPicture* const pic) {
|
|
||||||
if (pic == NULL) return 0;
|
|
||||||
if (!pic->use_argb_input) {
|
|
||||||
int x, y;
|
|
||||||
const uint8_t* alpha = pic->a;
|
|
||||||
if (alpha == NULL) return 0;
|
|
||||||
for (y = 0; y < pic->height; ++y) {
|
|
||||||
for (x = 0; x < pic->width; ++x) {
|
|
||||||
if (alpha[x] != 0xff) return 1;
|
|
||||||
}
|
|
||||||
alpha += pic->a_stride;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
int x, y;
|
|
||||||
const uint32_t* argb = pic->argb;
|
|
||||||
if (argb == NULL) return 1;
|
|
||||||
for (y = 0; y < pic->height; ++y) {
|
|
||||||
for (x = 0; x < pic->width; ++x) {
|
|
||||||
if (argb[x] < 0xff000000) return 1; // test any alpha values != 0xff
|
|
||||||
}
|
|
||||||
argb += pic->argb_stride;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
//------------------------------------------------------------------------------
|
//------------------------------------------------------------------------------
|
||||||
// Distortion
|
// Distortion
|
||||||
@ -794,6 +957,11 @@ int WebPPictureDistortion(const WebPPicture* const pic1,
|
|||||||
result == NULL) {
|
result == NULL) {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
// TODO(skal): provide distortion for ARGB too.
|
||||||
|
if (pic1->use_argb_input == 1 ||
|
||||||
|
pic1->use_argb_input != pic2->use_argb_input) {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
has_alpha = !!(pic1->colorspace & WEBP_CSP_ALPHA_BIT);
|
has_alpha = !!(pic1->colorspace & WEBP_CSP_ALPHA_BIT);
|
||||||
if (has_alpha != !!(pic2->colorspace & WEBP_CSP_ALPHA_BIT) ||
|
if (has_alpha != !!(pic2->colorspace & WEBP_CSP_ALPHA_BIT) ||
|
||||||
|
@ -345,8 +345,13 @@ int WebPEncode(const WebPConfig* const config, WebPPicture* const pic) {
|
|||||||
|
|
||||||
if (!config->lossless) {
|
if (!config->lossless) {
|
||||||
VP8Encoder* enc = NULL;
|
VP8Encoder* enc = NULL;
|
||||||
if (pic->y == NULL || pic->u == NULL || pic->v == NULL)
|
if (pic->y == NULL || pic->u == NULL || pic->v == NULL) {
|
||||||
|
if (pic->argb != NULL) {
|
||||||
|
if (!WebPPictureARGBToYUVA(pic, WEBP_YUV420)) return 0;
|
||||||
|
} else {
|
||||||
return WebPEncodingSetError(pic, VP8_ENC_ERROR_NULL_PARAMETER);
|
return WebPEncodingSetError(pic, VP8_ENC_ERROR_NULL_PARAMETER);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
enc = InitVP8Encoder(config, pic);
|
enc = InitVP8Encoder(config, pic);
|
||||||
if (enc == NULL) return 0; // pic->error is already set.
|
if (enc == NULL) return 0; // pic->error is already set.
|
||||||
|
@ -349,15 +349,31 @@ WEBP_EXTERN(int) WebPPictureImportBGRA(
|
|||||||
WEBP_EXTERN(int) WebPPictureImportBGRX(
|
WEBP_EXTERN(int) WebPPictureImportBGRX(
|
||||||
WebPPicture* const picture, const uint8_t* const bgrx, int bgrx_stride);
|
WebPPicture* const picture, const uint8_t* const bgrx, int bgrx_stride);
|
||||||
|
|
||||||
|
// Converts picture->argb data to the YUVA format specified by 'colorspace'.
|
||||||
|
// Upon return, picture->use_argb_input is set to false. The presence of
|
||||||
|
// real non-opaque transparent values is detected, and 'colorspace' will be
|
||||||
|
// adjusted accordingly. Note that this method is lossy.
|
||||||
|
// Returns false in case of error.
|
||||||
|
WEBP_EXTERN(int) WebPPictureARGBToYUVA(WebPPicture* const picture,
|
||||||
|
WebPEncCSP colorspace);
|
||||||
|
|
||||||
|
// Converts picture->yuv to picture->argb and sets picture->use_argb_input
|
||||||
|
// to true. The input format must be YUV_420 or YUV_420A.
|
||||||
|
// Note that the use of this method is discouraged if one has access to the
|
||||||
|
// raw ARGB samples, since using YUV420 is comparatively lossy. Also, the
|
||||||
|
// conversion from YUV420 to ARGB incurs a small loss too.
|
||||||
|
// Returns false in case of error.
|
||||||
|
WEBP_EXTERN(int) WebPPictureYUVAToARGB(WebPPicture* const picture);
|
||||||
|
|
||||||
// Helper function: given a width x height plane of YUV(A) samples
|
// Helper function: given a width x height plane of YUV(A) samples
|
||||||
// (with stride 'stride'), clean-up the YUV samples under fully transparent
|
// (with stride 'stride'), clean-up the YUV samples under fully transparent
|
||||||
// area, to help compressibility (no guarantee, though).
|
// area, to help compressibility (no guarantee, though).
|
||||||
WEBP_EXTERN(void) WebPCleanupTransparentArea(WebPPicture* const picture);
|
WEBP_EXTERN(void) WebPCleanupTransparentArea(WebPPicture* const picture);
|
||||||
|
|
||||||
// Scan the picture 'pic' for the presence of non fully opaque alpha values.
|
// Scan the picture 'picture' for the presence of non fully opaque alpha values.
|
||||||
// Returns true in such case. Otherwise returns false (indicating that the
|
// Returns true in such case. Otherwise returns false (indicating that the
|
||||||
// alpha plane can be ignored altogether e.g.).
|
// alpha plane can be ignored altogether e.g.).
|
||||||
WEBP_EXTERN(int) WebPPictureHasTransparency(const WebPPicture* const pic);
|
WEBP_EXTERN(int) WebPPictureHasTransparency(const WebPPicture* const picture);
|
||||||
|
|
||||||
//------------------------------------------------------------------------------
|
//------------------------------------------------------------------------------
|
||||||
// Main call
|
// Main call
|
||||||
@ -367,8 +383,13 @@ WEBP_EXTERN(int) WebPPictureHasTransparency(const WebPPicture* const pic);
|
|||||||
// and the 'config' object must be a valid one.
|
// and the 'config' object must be a valid one.
|
||||||
// Returns false in case of error, true otherwise.
|
// Returns false in case of error, true otherwise.
|
||||||
// In case of error, picture->error_code is updated accordingly.
|
// In case of error, picture->error_code is updated accordingly.
|
||||||
WEBP_EXTERN(int) WebPEncode(
|
// 'picture' can hold the source samples in both YUV(A) or ARGB input, depending
|
||||||
const WebPConfig* const config, WebPPicture* const picture);
|
// on the value of 'picture->use_argb_input'. It is highly recommended to
|
||||||
|
// use the former for lossy encoding, and the latter for lossless encoding
|
||||||
|
// (when config.lossless is true). Automatic conversion from one format to
|
||||||
|
// another is provided but they both incur some loss.
|
||||||
|
WEBP_EXTERN(int) WebPEncode(const WebPConfig* const config,
|
||||||
|
WebPPicture* const picture);
|
||||||
|
|
||||||
//------------------------------------------------------------------------------
|
//------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user