2012-01-06 23:49:06 +01:00
|
|
|
// Copyright 2011 Google Inc. All Rights Reserved.
|
2011-02-19 08:33:46 +01:00
|
|
|
//
|
|
|
|
// This code is licensed under the same terms as WebM:
|
|
|
|
// Software License Agreement: http://www.webmproject.org/license/software/
|
|
|
|
// Additional IP Rights Grant: http://www.webmproject.org/license/additional/
|
|
|
|
// -----------------------------------------------------------------------------
|
|
|
|
//
|
|
|
|
// WebPPicture utils: colorspace conversion, crop, ...
|
|
|
|
//
|
|
|
|
// Author: Skal (pascal.massimino@gmail.com)
|
|
|
|
|
2011-04-26 16:23:57 +02:00
|
|
|
#include <assert.h>
|
2011-02-19 08:33:46 +01:00
|
|
|
#include <stdlib.h>
|
2012-01-20 16:20:56 +01:00
|
|
|
#include <math.h>
|
2011-12-01 11:00:03 +01:00
|
|
|
|
|
|
|
#include "./vp8enci.h"
|
2012-03-22 11:30:20 +01:00
|
|
|
#include "../utils/rescaler.h"
|
2012-08-01 21:06:04 +02:00
|
|
|
#include "../utils/utils.h"
|
2012-06-28 09:34:23 +02:00
|
|
|
#include "../dsp/dsp.h"
|
2012-08-03 02:23:02 +02:00
|
|
|
#include "../dsp/yuv.h"
|
2011-02-19 08:33:46 +01:00
|
|
|
|
|
|
|
#if defined(__cplusplus) || defined(c_plusplus)
|
|
|
|
extern "C" {
|
|
|
|
#endif
|
|
|
|
|
2011-11-23 23:17:40 +01:00
|
|
|
#define HALVE(x) (((x) + 1) >> 1)
|
2012-06-21 09:30:43 +02:00
|
|
|
#define IS_YUV_CSP(csp, YUV_CSP) (((csp) & WEBP_CSP_UV_MASK) == (YUV_CSP))
|
2011-11-23 23:17:40 +01:00
|
|
|
|
2012-06-28 09:34:23 +02:00
|
|
|
static const union {
|
|
|
|
uint32_t argb;
|
|
|
|
uint8_t bytes[4];
|
|
|
|
} test_endian = { 0xff000000u };
|
|
|
|
#define ALPHA_IS_LAST (test_endian.bytes[3] == 0xff)
|
|
|
|
|
2011-08-25 23:22:32 +02:00
|
|
|
//------------------------------------------------------------------------------
|
2011-02-19 08:33:46 +01:00
|
|
|
// WebPPicture
|
2011-08-25 23:22:32 +02:00
|
|
|
//------------------------------------------------------------------------------
|
2011-02-19 08:33:46 +01:00
|
|
|
|
2012-07-18 00:01:30 +02:00
|
|
|
int WebPPictureAlloc(WebPPicture* picture) {
|
2012-01-23 09:58:09 +01:00
|
|
|
if (picture != NULL) {
|
2011-05-03 02:19:00 +02:00
|
|
|
const WebPEncCSP uv_csp = picture->colorspace & WEBP_CSP_UV_MASK;
|
|
|
|
const int has_alpha = picture->colorspace & WEBP_CSP_ALPHA_BIT;
|
2011-02-19 08:33:46 +01:00
|
|
|
const int width = picture->width;
|
|
|
|
const int height = picture->height;
|
2012-03-28 13:07:42 +02:00
|
|
|
|
2012-07-18 23:58:53 +02:00
|
|
|
if (!picture->use_argb) {
|
2012-03-28 13:07:42 +02:00
|
|
|
const int y_stride = width;
|
|
|
|
const int uv_width = HALVE(width);
|
|
|
|
const int uv_height = HALVE(height);
|
|
|
|
const int uv_stride = uv_width;
|
|
|
|
int uv0_stride = 0;
|
|
|
|
int a_width, a_stride;
|
|
|
|
uint64_t y_size, uv_size, uv0_size, a_size, total_size;
|
|
|
|
uint8_t* mem;
|
|
|
|
|
|
|
|
// U/V
|
|
|
|
switch (uv_csp) {
|
|
|
|
case WEBP_YUV420:
|
|
|
|
break;
|
2011-05-03 02:19:00 +02:00
|
|
|
#ifdef WEBP_EXPERIMENTAL_FEATURES
|
2012-03-28 13:07:42 +02:00
|
|
|
case WEBP_YUV400: // for now, we'll just reset the U/V samples
|
|
|
|
break;
|
|
|
|
case WEBP_YUV422:
|
|
|
|
uv0_stride = uv_width;
|
|
|
|
break;
|
|
|
|
case WEBP_YUV444:
|
|
|
|
uv0_stride = width;
|
|
|
|
break;
|
2011-05-03 02:19:00 +02:00
|
|
|
#endif
|
2012-03-28 13:07:42 +02:00
|
|
|
default:
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
uv0_size = height * uv0_stride;
|
|
|
|
|
|
|
|
// alpha
|
|
|
|
a_width = has_alpha ? width : 0;
|
|
|
|
a_stride = a_width;
|
|
|
|
y_size = (uint64_t)y_stride * height;
|
|
|
|
uv_size = (uint64_t)uv_stride * uv_height;
|
|
|
|
a_size = (uint64_t)a_stride * height;
|
|
|
|
|
|
|
|
total_size = y_size + a_size + 2 * uv_size + 2 * uv0_size;
|
|
|
|
|
|
|
|
// Security and validation checks
|
2012-05-09 21:19:48 +02:00
|
|
|
if (width <= 0 || height <= 0 || // luma/alpha param error
|
2012-08-01 21:06:04 +02:00
|
|
|
uv_width < 0 || uv_height < 0) { // u/v param error
|
2011-05-03 02:19:00 +02:00
|
|
|
return 0;
|
2012-03-28 13:07:42 +02:00
|
|
|
}
|
2012-06-28 09:34:23 +02:00
|
|
|
// Clear previous buffer and allocate a new one.
|
2012-03-28 13:07:42 +02:00
|
|
|
WebPPictureFree(picture); // erase previous buffer
|
2012-08-01 21:06:04 +02:00
|
|
|
mem = (uint8_t*)WebPSafeMalloc(total_size, sizeof(*mem));
|
2012-03-28 13:07:42 +02:00
|
|
|
if (mem == NULL) return 0;
|
|
|
|
|
2012-06-28 09:34:23 +02:00
|
|
|
// From now on, we're in the clear, we can no longer fail...
|
2012-06-21 09:30:43 +02:00
|
|
|
picture->memory_ = (void*)mem;
|
2012-06-28 09:34:23 +02:00
|
|
|
picture->y_stride = y_stride;
|
|
|
|
picture->uv_stride = uv_stride;
|
|
|
|
picture->a_stride = a_stride;
|
|
|
|
picture->uv0_stride = uv0_stride;
|
2012-06-21 09:30:43 +02:00
|
|
|
// TODO(skal): we could align the y/u/v planes and adjust stride.
|
2012-03-28 13:07:42 +02:00
|
|
|
picture->y = mem;
|
|
|
|
mem += y_size;
|
|
|
|
|
|
|
|
picture->u = mem;
|
|
|
|
mem += uv_size;
|
|
|
|
picture->v = mem;
|
|
|
|
mem += uv_size;
|
|
|
|
|
|
|
|
if (a_size) {
|
|
|
|
picture->a = mem;
|
|
|
|
mem += a_size;
|
|
|
|
}
|
|
|
|
if (uv0_size) {
|
|
|
|
picture->u0 = mem;
|
|
|
|
mem += uv0_size;
|
|
|
|
picture->v0 = mem;
|
|
|
|
mem += uv0_size;
|
|
|
|
}
|
|
|
|
} else {
|
2012-06-21 09:30:43 +02:00
|
|
|
void* memory;
|
2012-04-02 12:58:36 +02:00
|
|
|
const uint64_t argb_size = (uint64_t)width * height;
|
2012-08-01 21:06:04 +02:00
|
|
|
if (width <= 0 || height <= 0) {
|
2012-03-28 13:07:42 +02:00
|
|
|
return 0;
|
|
|
|
}
|
2012-06-28 09:34:23 +02:00
|
|
|
// Clear previous buffer and allocate a new one.
|
2012-03-28 13:07:42 +02:00
|
|
|
WebPPictureFree(picture); // erase previous buffer
|
2012-08-01 21:06:04 +02:00
|
|
|
memory = WebPSafeMalloc(argb_size, sizeof(*picture->argb));
|
2012-06-21 09:30:43 +02:00
|
|
|
if (memory == NULL) return 0;
|
|
|
|
|
|
|
|
// TODO(skal): align plane to cache line?
|
2012-06-28 09:34:23 +02:00
|
|
|
picture->memory_argb_ = memory;
|
2012-06-21 09:30:43 +02:00
|
|
|
picture->argb = (uint32_t*)memory;
|
2012-03-28 13:07:42 +02:00
|
|
|
picture->argb_stride = width;
|
EXPERIMENTAL: add support for alpha channel
This is a (minor) bitstream change: if the 'color_space' bit is set to '1'
(which is normally an undefined/invalid behaviour), we add extra data at the
end of partition #0 (so-called 'extensions')
Namely, we add the size of the extension data as 3 bytes (little-endian),
followed by a set of bits telling which extensions we're incorporating.
The data then _preceeds_ this trailing tags.
This is all experimental, and you'll need to have
'#define WEBP_EXPERIMENTAL_FEATURES' in webp/types.h to enable this code
(at your own risk! :))
Still, this hack produces almost-valid WebP file for decoders that don't
check this color_space bit. In particular, previous 'dwebp' (and for instance
Chrome) will recognize this files and decode them, but without the alpha
of course. Other decoder will just see random extra stuff at the end of
partition #0.
To experiment with the alpha-channel, you need to compile on Unix platform
and use PNGs for input/output.
If 'alpha.png' is a source with alpha channel, then you can try (on Unix):
cwebp alpha.png -o alpha.webp
dwebp alpha.webp -o test.png
cwebp now has a '-noalpha' flag to ignore any alpha information from the
source, if present.
More hacking and experimenting welcome!
Change-Id: I3c7b1fd8411c9e7a9f77690e898479ad85c52f3e
2011-04-26 01:58:04 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
2012-06-28 09:34:23 +02:00
|
|
|
// Remove reference to the ARGB buffer (doesn't free anything).
|
|
|
|
static void PictureResetARGB(WebPPicture* const picture) {
|
|
|
|
picture->memory_argb_ = NULL;
|
|
|
|
picture->argb = NULL;
|
|
|
|
picture->argb_stride = 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Remove reference to the YUVA buffer (doesn't free anything).
|
|
|
|
static void PictureResetYUVA(WebPPicture* const picture) {
|
|
|
|
picture->memory_ = NULL;
|
|
|
|
picture->y = picture->u = picture->v = picture->a = NULL;
|
|
|
|
picture->u0 = picture->v0 = NULL;
|
|
|
|
picture->y_stride = picture->uv_stride = 0;
|
|
|
|
picture->a_stride = 0;
|
|
|
|
picture->uv0_stride = 0;
|
|
|
|
}
|
|
|
|
|
2011-05-03 02:19:00 +02:00
|
|
|
// Grab the 'specs' (writer, *opaque, width, height...) from 'src' and copy them
|
2012-06-28 09:34:23 +02:00
|
|
|
// into 'dst'. Mark 'dst' as not owning any memory.
|
2011-05-03 02:19:00 +02:00
|
|
|
static void WebPPictureGrabSpecs(const WebPPicture* const src,
|
|
|
|
WebPPicture* const dst) {
|
2012-06-28 09:34:23 +02:00
|
|
|
assert(src != NULL && dst != NULL);
|
|
|
|
*dst = *src;
|
|
|
|
PictureResetYUVA(dst);
|
|
|
|
PictureResetARGB(dst);
|
|
|
|
}
|
|
|
|
|
|
|
|
// Allocate a new argb buffer, discarding any existing one and preserving
|
|
|
|
// the other YUV(A) buffer.
|
|
|
|
static int PictureAllocARGB(WebPPicture* const picture) {
|
|
|
|
WebPPicture tmp;
|
|
|
|
free(picture->memory_argb_);
|
|
|
|
PictureResetARGB(picture);
|
2012-07-18 23:58:53 +02:00
|
|
|
picture->use_argb = 1;
|
2012-06-28 09:34:23 +02:00
|
|
|
WebPPictureGrabSpecs(picture, &tmp);
|
|
|
|
if (!WebPPictureAlloc(&tmp)) {
|
|
|
|
return WebPEncodingSetError(picture, VP8_ENC_ERROR_OUT_OF_MEMORY);
|
|
|
|
}
|
|
|
|
picture->memory_argb_ = tmp.memory_argb_;
|
|
|
|
picture->argb = tmp.argb;
|
|
|
|
picture->argb_stride = tmp.argb_stride;
|
|
|
|
return 1;
|
2011-05-03 02:19:00 +02:00
|
|
|
}
|
|
|
|
|
2012-06-28 09:34:23 +02:00
|
|
|
// Release memory owned by 'picture' (both YUV and ARGB buffers).
|
2012-07-18 00:01:30 +02:00
|
|
|
void WebPPictureFree(WebPPicture* picture) {
|
2012-01-23 09:58:09 +01:00
|
|
|
if (picture != NULL) {
|
2012-06-21 09:30:43 +02:00
|
|
|
free(picture->memory_);
|
|
|
|
free(picture->memory_argb_);
|
2012-06-28 09:34:23 +02:00
|
|
|
PictureResetYUVA(picture);
|
|
|
|
PictureResetARGB(picture);
|
2011-02-19 08:33:46 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2011-08-25 23:22:32 +02:00
|
|
|
//------------------------------------------------------------------------------
|
2011-05-03 02:19:00 +02:00
|
|
|
// Picture copying
|
2011-02-19 08:33:46 +01:00
|
|
|
|
2012-01-23 09:58:09 +01:00
|
|
|
// Not worth moving to dsp/enc.c (only used here).
|
2011-11-23 23:17:40 +01:00
|
|
|
static void CopyPlane(const uint8_t* src, int src_stride,
|
|
|
|
uint8_t* dst, int dst_stride, int width, int height) {
|
|
|
|
while (height-- > 0) {
|
|
|
|
memcpy(dst, src, width);
|
|
|
|
src += src_stride;
|
|
|
|
dst += dst_stride;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-06-21 09:30:43 +02:00
|
|
|
// Adjust top-left corner to chroma sample position.
|
|
|
|
static void SnapTopLeftPosition(const WebPPicture* const pic,
|
|
|
|
int* const left, int* const top) {
|
2012-07-18 23:58:53 +02:00
|
|
|
if (!pic->use_argb) {
|
2012-06-21 09:30:43 +02:00
|
|
|
const int is_yuv422 = IS_YUV_CSP(pic->colorspace, WEBP_YUV422);
|
|
|
|
if (IS_YUV_CSP(pic->colorspace, WEBP_YUV420) || is_yuv422) {
|
|
|
|
*left &= ~1;
|
|
|
|
if (!is_yuv422) *top &= ~1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Adjust top-left corner and verify that the sub-rectangle is valid.
|
|
|
|
static int AdjustAndCheckRectangle(const WebPPicture* const pic,
|
|
|
|
int* const left, int* const top,
|
|
|
|
int width, int height) {
|
|
|
|
SnapTopLeftPosition(pic, left, top);
|
|
|
|
if ((*left) < 0 || (*top) < 0) return 0;
|
|
|
|
if (width <= 0 || height <= 0) return 0;
|
|
|
|
if ((*left) + width > pic->width) return 0;
|
|
|
|
if ((*top) + height > pic->height) return 0;
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
2012-07-18 00:01:30 +02:00
|
|
|
int WebPPictureCopy(const WebPPicture* src, WebPPicture* dst) {
|
2011-02-19 08:33:46 +01:00
|
|
|
if (src == NULL || dst == NULL) return 0;
|
|
|
|
if (src == dst) return 1;
|
2011-05-03 02:19:00 +02:00
|
|
|
|
|
|
|
WebPPictureGrabSpecs(src, dst);
|
2011-02-19 08:33:46 +01:00
|
|
|
if (!WebPPictureAlloc(dst)) return 0;
|
2011-05-03 02:19:00 +02:00
|
|
|
|
2012-07-18 23:58:53 +02:00
|
|
|
if (!src->use_argb) {
|
2012-03-28 13:07:42 +02:00
|
|
|
CopyPlane(src->y, src->y_stride,
|
|
|
|
dst->y, dst->y_stride, dst->width, dst->height);
|
|
|
|
CopyPlane(src->u, src->uv_stride,
|
|
|
|
dst->u, dst->uv_stride, HALVE(dst->width), HALVE(dst->height));
|
|
|
|
CopyPlane(src->v, src->uv_stride,
|
|
|
|
dst->v, dst->uv_stride, HALVE(dst->width), HALVE(dst->height));
|
|
|
|
if (dst->a != NULL) {
|
|
|
|
CopyPlane(src->a, src->a_stride,
|
|
|
|
dst->a, dst->a_stride, dst->width, dst->height);
|
|
|
|
}
|
2011-12-01 07:44:15 +01:00
|
|
|
#ifdef WEBP_EXPERIMENTAL_FEATURES
|
2012-03-28 13:07:42 +02:00
|
|
|
if (dst->u0 != NULL) {
|
|
|
|
int uv0_width = src->width;
|
2012-06-21 09:30:43 +02:00
|
|
|
if (IS_YUV_CSP(dst->colorspace, WEBP_YUV422)) {
|
2012-03-28 13:07:42 +02:00
|
|
|
uv0_width = HALVE(uv0_width);
|
|
|
|
}
|
|
|
|
CopyPlane(src->u0, src->uv0_stride,
|
|
|
|
dst->u0, dst->uv0_stride, uv0_width, dst->height);
|
|
|
|
CopyPlane(src->v0, src->uv0_stride,
|
|
|
|
dst->v0, dst->uv0_stride, uv0_width, dst->height);
|
2011-05-03 02:19:00 +02:00
|
|
|
}
|
|
|
|
#endif
|
2012-03-28 13:07:42 +02:00
|
|
|
} else {
|
2012-06-19 00:20:46 +02:00
|
|
|
CopyPlane((const uint8_t*)src->argb, 4 * src->argb_stride,
|
2012-03-28 13:07:42 +02:00
|
|
|
(uint8_t*)dst->argb, 4 * dst->argb_stride,
|
|
|
|
4 * dst->width, dst->height);
|
|
|
|
}
|
2011-02-19 08:33:46 +01:00
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
2012-07-18 00:01:30 +02:00
|
|
|
int WebPPictureIsView(const WebPPicture* picture) {
|
2012-06-21 09:30:43 +02:00
|
|
|
if (picture == NULL) return 0;
|
2012-07-18 23:58:53 +02:00
|
|
|
if (picture->use_argb) {
|
2012-06-21 09:30:43 +02:00
|
|
|
return (picture->memory_argb_ == NULL);
|
|
|
|
}
|
|
|
|
return (picture->memory_ == NULL);
|
|
|
|
}
|
|
|
|
|
2012-07-18 00:01:30 +02:00
|
|
|
int WebPPictureView(const WebPPicture* src,
|
2012-06-21 09:30:43 +02:00
|
|
|
int left, int top, int width, int height,
|
2012-07-18 00:01:30 +02:00
|
|
|
WebPPicture* dst) {
|
2012-06-21 09:30:43 +02:00
|
|
|
if (src == NULL || dst == NULL) return 0;
|
|
|
|
|
|
|
|
// verify rectangle position.
|
|
|
|
if (!AdjustAndCheckRectangle(src, &left, &top, width, height)) return 0;
|
|
|
|
|
|
|
|
if (src != dst) { // beware of aliasing! We don't want to leak 'memory_'.
|
|
|
|
WebPPictureGrabSpecs(src, dst);
|
|
|
|
}
|
|
|
|
dst->width = width;
|
|
|
|
dst->height = height;
|
2012-07-18 23:58:53 +02:00
|
|
|
if (!src->use_argb) {
|
2012-06-21 09:30:43 +02:00
|
|
|
dst->y = src->y + top * src->y_stride + left;
|
|
|
|
dst->u = src->u + (top >> 1) * src->uv_stride + (left >> 1);
|
|
|
|
dst->v = src->v + (top >> 1) * src->uv_stride + (left >> 1);
|
|
|
|
if (src->a != NULL) {
|
|
|
|
dst->a = src->a + top * src->a_stride + left;
|
|
|
|
}
|
|
|
|
#ifdef WEBP_EXPERIMENTAL_FEATURES
|
|
|
|
if (src->u0 != NULL) {
|
|
|
|
const int left_pos =
|
|
|
|
IS_YUV_CSP(dst->colorspace, WEBP_YUV422) ? (left >> 1) : left;
|
|
|
|
dst->u0 = src->u0 + top * src->uv0_stride + left_pos;
|
|
|
|
dst->v0 = src->v0 + top * src->uv0_stride + left_pos;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
} else {
|
|
|
|
dst->argb = src->argb + top * src->argb_stride + left;
|
|
|
|
}
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
2011-08-25 23:22:32 +02:00
|
|
|
//------------------------------------------------------------------------------
|
2011-05-03 02:19:00 +02:00
|
|
|
// Picture cropping
|
|
|
|
|
2012-07-18 00:01:30 +02:00
|
|
|
int WebPPictureCrop(WebPPicture* pic,
|
2011-02-19 08:33:46 +01:00
|
|
|
int left, int top, int width, int height) {
|
|
|
|
WebPPicture tmp;
|
|
|
|
|
|
|
|
if (pic == NULL) return 0;
|
2012-06-21 09:30:43 +02:00
|
|
|
if (!AdjustAndCheckRectangle(pic, &left, &top, width, height)) return 0;
|
2011-02-19 08:33:46 +01:00
|
|
|
|
2011-05-03 02:19:00 +02:00
|
|
|
WebPPictureGrabSpecs(pic, &tmp);
|
2011-02-19 08:33:46 +01:00
|
|
|
tmp.width = width;
|
|
|
|
tmp.height = height;
|
|
|
|
if (!WebPPictureAlloc(&tmp)) return 0;
|
|
|
|
|
2012-07-18 23:58:53 +02:00
|
|
|
if (!pic->use_argb) {
|
2011-11-23 23:17:40 +01:00
|
|
|
const int y_offset = top * pic->y_stride + left;
|
|
|
|
const int uv_offset = (top / 2) * pic->uv_stride + left / 2;
|
|
|
|
CopyPlane(pic->y + y_offset, pic->y_stride,
|
|
|
|
tmp.y, tmp.y_stride, width, height);
|
|
|
|
CopyPlane(pic->u + uv_offset, pic->uv_stride,
|
|
|
|
tmp.u, tmp.uv_stride, HALVE(width), HALVE(height));
|
|
|
|
CopyPlane(pic->v + uv_offset, pic->uv_stride,
|
|
|
|
tmp.v, tmp.uv_stride, HALVE(width), HALVE(height));
|
2011-05-03 02:19:00 +02:00
|
|
|
|
2012-06-19 00:20:46 +02:00
|
|
|
if (tmp.a != NULL) {
|
|
|
|
const int a_offset = top * pic->a_stride + left;
|
|
|
|
CopyPlane(pic->a + a_offset, pic->a_stride,
|
|
|
|
tmp.a, tmp.a_stride, width, height);
|
|
|
|
}
|
2011-12-01 07:44:15 +01:00
|
|
|
#ifdef WEBP_EXPERIMENTAL_FEATURES
|
2012-06-19 00:20:46 +02:00
|
|
|
if (tmp.u0 != NULL) {
|
|
|
|
int w = width;
|
2012-06-21 09:30:43 +02:00
|
|
|
int left_pos = left;
|
|
|
|
if (IS_YUV_CSP(tmp.colorspace, WEBP_YUV422)) {
|
2012-06-19 00:20:46 +02:00
|
|
|
w = HALVE(w);
|
2012-06-21 09:30:43 +02:00
|
|
|
left_pos = HALVE(left_pos);
|
2012-06-19 00:20:46 +02:00
|
|
|
}
|
2012-06-21 09:30:43 +02:00
|
|
|
CopyPlane(pic->u0 + top * pic->uv0_stride + left_pos, pic->uv0_stride,
|
|
|
|
tmp.u0, tmp.uv0_stride, w, height);
|
|
|
|
CopyPlane(pic->v0 + top * pic->uv0_stride + left_pos, pic->uv0_stride,
|
|
|
|
tmp.v0, tmp.uv0_stride, w, height);
|
2011-05-03 02:19:00 +02:00
|
|
|
}
|
|
|
|
#endif
|
2012-06-19 00:20:46 +02:00
|
|
|
} else {
|
|
|
|
const uint8_t* const src =
|
|
|
|
(const uint8_t*)(pic->argb + top * pic->argb_stride + left);
|
|
|
|
CopyPlane(src, pic->argb_stride * 4,
|
|
|
|
(uint8_t*)tmp.argb, tmp.argb_stride * 4,
|
|
|
|
width * 4, height);
|
|
|
|
}
|
2011-05-03 02:19:00 +02:00
|
|
|
WebPPictureFree(pic);
|
|
|
|
*pic = tmp;
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
2011-08-25 23:22:32 +02:00
|
|
|
//------------------------------------------------------------------------------
|
2011-05-03 02:19:00 +02:00
|
|
|
// Simple picture rescaler
|
|
|
|
|
|
|
|
static void RescalePlane(const uint8_t* src,
|
|
|
|
int src_width, int src_height, int src_stride,
|
|
|
|
uint8_t* dst,
|
|
|
|
int dst_width, int dst_height, int dst_stride,
|
2012-06-19 00:20:46 +02:00
|
|
|
int32_t* const work,
|
|
|
|
int num_channels) {
|
2012-03-22 11:30:20 +01:00
|
|
|
WebPRescaler rescaler;
|
|
|
|
int y = 0;
|
|
|
|
WebPRescalerInit(&rescaler, src_width, src_height,
|
|
|
|
dst, dst_width, dst_height, dst_stride,
|
2012-06-19 00:20:46 +02:00
|
|
|
num_channels,
|
2012-03-22 11:30:20 +01:00
|
|
|
src_width, dst_width,
|
|
|
|
src_height, dst_height,
|
|
|
|
work);
|
2012-06-19 00:20:46 +02:00
|
|
|
memset(work, 0, 2 * dst_width * num_channels * sizeof(*work));
|
2012-03-22 11:30:20 +01:00
|
|
|
while (y < src_height) {
|
2012-06-19 00:20:46 +02:00
|
|
|
y += WebPRescalerImport(&rescaler, src_height - y,
|
|
|
|
src + y * src_stride, src_stride);
|
2012-03-22 11:30:20 +01:00
|
|
|
WebPRescalerExport(&rescaler);
|
2011-05-03 02:19:00 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-07-18 00:01:30 +02:00
|
|
|
int WebPPictureRescale(WebPPicture* pic, int width, int height) {
|
2011-05-03 02:19:00 +02:00
|
|
|
WebPPicture tmp;
|
|
|
|
int prev_width, prev_height;
|
|
|
|
int32_t* work;
|
|
|
|
|
|
|
|
if (pic == NULL) return 0;
|
|
|
|
prev_width = pic->width;
|
|
|
|
prev_height = pic->height;
|
|
|
|
// if width is unspecified, scale original proportionally to height ratio.
|
|
|
|
if (width == 0) {
|
|
|
|
width = (prev_width * height + prev_height / 2) / prev_height;
|
|
|
|
}
|
|
|
|
// if height is unspecified, scale original proportionally to width ratio.
|
|
|
|
if (height == 0) {
|
|
|
|
height = (prev_height * width + prev_width / 2) / prev_width;
|
|
|
|
}
|
|
|
|
// Check if the overall dimensions still make sense.
|
|
|
|
if (width <= 0 || height <= 0) return 0;
|
|
|
|
|
|
|
|
WebPPictureGrabSpecs(pic, &tmp);
|
|
|
|
tmp.width = width;
|
|
|
|
tmp.height = height;
|
|
|
|
if (!WebPPictureAlloc(&tmp)) return 0;
|
|
|
|
|
2012-07-18 23:58:53 +02:00
|
|
|
if (!pic->use_argb) {
|
2012-08-01 21:06:04 +02:00
|
|
|
work = (int32_t*)WebPSafeMalloc(2ULL * width, sizeof(*work));
|
2012-06-19 00:20:46 +02:00
|
|
|
if (work == NULL) {
|
|
|
|
WebPPictureFree(&tmp);
|
|
|
|
return 0;
|
|
|
|
}
|
2011-05-03 02:19:00 +02:00
|
|
|
|
2012-06-19 00:20:46 +02:00
|
|
|
RescalePlane(pic->y, prev_width, prev_height, pic->y_stride,
|
|
|
|
tmp.y, width, height, tmp.y_stride, work, 1);
|
|
|
|
RescalePlane(pic->u,
|
|
|
|
HALVE(prev_width), HALVE(prev_height), pic->uv_stride,
|
|
|
|
tmp.u,
|
|
|
|
HALVE(width), HALVE(height), tmp.uv_stride, work, 1);
|
|
|
|
RescalePlane(pic->v,
|
|
|
|
HALVE(prev_width), HALVE(prev_height), pic->uv_stride,
|
|
|
|
tmp.v,
|
|
|
|
HALVE(width), HALVE(height), tmp.uv_stride, work, 1);
|
|
|
|
|
|
|
|
if (tmp.a != NULL) {
|
|
|
|
RescalePlane(pic->a, prev_width, prev_height, pic->a_stride,
|
|
|
|
tmp.a, width, height, tmp.a_stride, work, 1);
|
|
|
|
}
|
2011-12-01 07:44:15 +01:00
|
|
|
#ifdef WEBP_EXPERIMENTAL_FEATURES
|
2012-06-19 00:20:46 +02:00
|
|
|
if (tmp.u0 != NULL) {
|
2012-06-21 09:30:43 +02:00
|
|
|
const int s = IS_YUV_CSP(tmp.colorspace, WEBP_YUV422) ? 2 : 1;
|
2012-06-19 00:20:46 +02:00
|
|
|
RescalePlane(
|
|
|
|
pic->u0, (prev_width + s / 2) / s, prev_height, pic->uv0_stride,
|
|
|
|
tmp.u0, (width + s / 2) / s, height, tmp.uv0_stride, work, 1);
|
|
|
|
RescalePlane(
|
|
|
|
pic->v0, (prev_width + s / 2) / s, prev_height, pic->uv0_stride,
|
|
|
|
tmp.v0, (width + s / 2) / s, height, tmp.uv0_stride, work, 1);
|
2011-05-03 02:19:00 +02:00
|
|
|
}
|
|
|
|
#endif
|
2012-06-19 00:20:46 +02:00
|
|
|
} else {
|
2012-08-01 21:06:04 +02:00
|
|
|
work = (int32_t*)WebPSafeMalloc(2ULL * width * 4, sizeof(*work));
|
2012-06-19 00:20:46 +02:00
|
|
|
if (work == NULL) {
|
|
|
|
WebPPictureFree(&tmp);
|
|
|
|
return 0;
|
|
|
|
}
|
2011-05-03 02:19:00 +02:00
|
|
|
|
2012-06-19 00:20:46 +02:00
|
|
|
RescalePlane((const uint8_t*)pic->argb, prev_width, prev_height,
|
|
|
|
pic->argb_stride * 4,
|
|
|
|
(uint8_t*)tmp.argb, width, height,
|
|
|
|
tmp.argb_stride * 4,
|
|
|
|
work, 4);
|
|
|
|
|
|
|
|
}
|
2011-02-19 08:33:46 +01:00
|
|
|
WebPPictureFree(pic);
|
2011-05-03 02:19:00 +02:00
|
|
|
free(work);
|
2011-02-19 08:33:46 +01:00
|
|
|
*pic = tmp;
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
2011-08-25 23:22:32 +02:00
|
|
|
//------------------------------------------------------------------------------
|
2012-06-19 00:42:56 +02:00
|
|
|
// WebPMemoryWriter: Write-to-memory
|
2011-02-19 08:33:46 +01:00
|
|
|
|
2012-07-18 00:01:30 +02:00
|
|
|
void WebPMemoryWriterInit(WebPMemoryWriter* writer) {
|
2012-06-19 00:42:56 +02:00
|
|
|
writer->mem = NULL;
|
|
|
|
writer->size = 0;
|
2011-02-19 08:33:46 +01:00
|
|
|
writer->max_size = 0;
|
|
|
|
}
|
|
|
|
|
2012-06-19 00:42:56 +02:00
|
|
|
int WebPMemoryWrite(const uint8_t* data, size_t data_size,
|
2012-07-18 00:01:30 +02:00
|
|
|
const WebPPicture* picture) {
|
2011-02-19 08:33:46 +01:00
|
|
|
WebPMemoryWriter* const w = (WebPMemoryWriter*)picture->custom_ptr;
|
2012-08-01 21:06:04 +02:00
|
|
|
uint64_t next_size;
|
2011-02-19 08:33:46 +01:00
|
|
|
if (w == NULL) {
|
|
|
|
return 1;
|
|
|
|
}
|
2012-08-01 21:06:04 +02:00
|
|
|
next_size = (uint64_t)w->size + data_size;
|
2011-02-19 08:33:46 +01:00
|
|
|
if (next_size > w->max_size) {
|
|
|
|
uint8_t* new_mem;
|
2012-08-01 21:06:04 +02:00
|
|
|
uint64_t next_max_size = 2ULL * w->max_size;
|
2011-02-19 08:33:46 +01:00
|
|
|
if (next_max_size < next_size) next_max_size = next_size;
|
2012-08-01 21:06:04 +02:00
|
|
|
if (next_max_size < 8192ULL) next_max_size = 8192ULL;
|
|
|
|
new_mem = (uint8_t*)WebPSafeMalloc(next_max_size, 1);
|
2011-02-19 08:33:46 +01:00
|
|
|
if (new_mem == NULL) {
|
|
|
|
return 0;
|
|
|
|
}
|
2012-06-19 00:42:56 +02:00
|
|
|
if (w->size > 0) {
|
|
|
|
memcpy(new_mem, w->mem, w->size);
|
2011-02-19 08:33:46 +01:00
|
|
|
}
|
2012-06-19 00:42:56 +02:00
|
|
|
free(w->mem);
|
|
|
|
w->mem = new_mem;
|
2012-08-01 21:06:04 +02:00
|
|
|
// down-cast is ok, thanks to WebPSafeMalloc
|
|
|
|
w->max_size = (size_t)next_max_size;
|
2011-02-19 08:33:46 +01:00
|
|
|
}
|
2012-01-23 09:58:09 +01:00
|
|
|
if (data_size > 0) {
|
2012-06-19 00:42:56 +02:00
|
|
|
memcpy(w->mem + w->size, data, data_size);
|
|
|
|
w->size += data_size;
|
2011-02-19 08:33:46 +01:00
|
|
|
}
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
2012-06-28 09:34:23 +02:00
|
|
|
//------------------------------------------------------------------------------
|
|
|
|
// Detection of non-trivial transparency
|
|
|
|
|
|
|
|
// Returns true if alpha[] has non-0xff values.
|
|
|
|
static int CheckNonOpaque(const uint8_t* alpha, int width, int height,
|
|
|
|
int x_step, int y_step) {
|
|
|
|
if (alpha == NULL) return 0;
|
|
|
|
while (height-- > 0) {
|
|
|
|
int x;
|
|
|
|
for (x = 0; x < width * x_step; x += x_step) {
|
|
|
|
if (alpha[x] != 0xff) return 1; // TODO(skal): check 4/8 bytes at a time.
|
|
|
|
}
|
|
|
|
alpha += y_step;
|
|
|
|
}
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Checking for the presence of non-opaque alpha.
|
2012-07-18 00:01:30 +02:00
|
|
|
int WebPPictureHasTransparency(const WebPPicture* picture) {
|
2012-06-28 09:34:23 +02:00
|
|
|
if (picture == NULL) return 0;
|
2012-07-18 23:58:53 +02:00
|
|
|
if (!picture->use_argb) {
|
2012-06-28 09:34:23 +02:00
|
|
|
return CheckNonOpaque(picture->a, picture->width, picture->height,
|
|
|
|
1, picture->a_stride);
|
|
|
|
} else {
|
|
|
|
int x, y;
|
|
|
|
const uint32_t* argb = picture->argb;
|
|
|
|
if (argb == NULL) return 0;
|
|
|
|
for (y = 0; y < picture->height; ++y) {
|
|
|
|
for (x = 0; x < picture->width; ++x) {
|
|
|
|
if (argb[x] < 0xff000000u) return 1; // test any alpha values != 0xff
|
|
|
|
}
|
|
|
|
argb += picture->argb_stride;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2011-08-25 23:22:32 +02:00
|
|
|
//------------------------------------------------------------------------------
|
2011-02-19 08:33:46 +01:00
|
|
|
// RGB -> YUV conversion
|
|
|
|
|
|
|
|
// TODO: we can do better than simply 2x2 averaging on U/V samples.
|
|
|
|
#define SUM4(ptr) ((ptr)[0] + (ptr)[step] + \
|
|
|
|
(ptr)[rgb_stride] + (ptr)[rgb_stride + step])
|
|
|
|
#define SUM2H(ptr) (2 * (ptr)[0] + 2 * (ptr)[step])
|
|
|
|
#define SUM2V(ptr) (2 * (ptr)[0] + 2 * (ptr)[rgb_stride])
|
|
|
|
#define SUM1(ptr) (4 * (ptr)[0])
|
|
|
|
#define RGB_TO_UV(x, y, SUM) { \
|
|
|
|
const int src = (2 * (step * (x) + (y) * rgb_stride)); \
|
|
|
|
const int dst = (x) + (y) * picture->uv_stride; \
|
|
|
|
const int r = SUM(r_ptr + src); \
|
|
|
|
const int g = SUM(g_ptr + src); \
|
|
|
|
const int b = SUM(b_ptr + src); \
|
2012-08-03 02:23:02 +02:00
|
|
|
picture->u[dst] = VP8RGBToU(r, g, b); \
|
|
|
|
picture->v[dst] = VP8RGBToV(r, g, b); \
|
2011-02-19 08:33:46 +01:00
|
|
|
}
|
|
|
|
|
2011-05-03 02:19:00 +02:00
|
|
|
#define RGB_TO_UV0(x_in, x_out, y, SUM) { \
|
|
|
|
const int src = (step * (x_in) + (y) * rgb_stride); \
|
|
|
|
const int dst = (x_out) + (y) * picture->uv0_stride; \
|
|
|
|
const int r = SUM(r_ptr + src); \
|
|
|
|
const int g = SUM(g_ptr + src); \
|
|
|
|
const int b = SUM(b_ptr + src); \
|
2012-08-03 02:23:02 +02:00
|
|
|
picture->u0[dst] = VP8RGBToU(r, g, b); \
|
|
|
|
picture->v0[dst] = VP8RGBToV(r, g, b); \
|
2011-05-03 02:19:00 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
static void MakeGray(WebPPicture* const picture) {
|
|
|
|
int y;
|
2011-11-23 23:17:40 +01:00
|
|
|
const int uv_width = HALVE(picture->width);
|
|
|
|
const int uv_height = HALVE(picture->height);
|
|
|
|
for (y = 0; y < uv_height; ++y) {
|
2011-05-03 02:19:00 +02:00
|
|
|
memset(picture->u + y * picture->uv_stride, 128, uv_width);
|
|
|
|
memset(picture->v + y * picture->uv_stride, 128, uv_width);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-06-28 09:34:23 +02:00
|
|
|
static int ImportYUVAFromRGBA(const uint8_t* const r_ptr,
|
|
|
|
const uint8_t* const g_ptr,
|
|
|
|
const uint8_t* const b_ptr,
|
|
|
|
const uint8_t* const a_ptr,
|
|
|
|
int step, // bytes per pixel
|
|
|
|
int rgb_stride, // bytes per scanline
|
|
|
|
WebPPicture* const picture) {
|
2011-05-03 02:19:00 +02:00
|
|
|
const WebPEncCSP uv_csp = picture->colorspace & WEBP_CSP_UV_MASK;
|
2011-02-19 08:33:46 +01:00
|
|
|
int x, y;
|
2011-05-03 02:19:00 +02:00
|
|
|
const int width = picture->width;
|
|
|
|
const int height = picture->height;
|
2012-06-28 09:34:23 +02:00
|
|
|
const int has_alpha = CheckNonOpaque(a_ptr, width, height, step, rgb_stride);
|
2011-02-19 08:33:46 +01:00
|
|
|
|
2012-06-28 09:34:23 +02:00
|
|
|
picture->colorspace = uv_csp;
|
2012-07-18 23:58:53 +02:00
|
|
|
picture->use_argb = 0;
|
2012-06-28 09:34:23 +02:00
|
|
|
if (has_alpha) {
|
2012-06-05 00:55:31 +02:00
|
|
|
picture->colorspace |= WEBP_CSP_ALPHA_BIT;
|
|
|
|
}
|
|
|
|
if (!WebPPictureAlloc(picture)) return 0;
|
|
|
|
|
2012-06-28 09:34:23 +02:00
|
|
|
// Import luma plane
|
|
|
|
for (y = 0; y < height; ++y) {
|
|
|
|
for (x = 0; x < width; ++x) {
|
|
|
|
const int offset = step * x + y * rgb_stride;
|
|
|
|
picture->y[x + y * picture->y_stride] =
|
2012-08-03 02:23:02 +02:00
|
|
|
VP8RGBToY(r_ptr[offset], g_ptr[offset], b_ptr[offset]);
|
2011-02-19 08:33:46 +01:00
|
|
|
}
|
2012-06-28 09:34:23 +02:00
|
|
|
}
|
2012-03-28 13:07:42 +02:00
|
|
|
|
2012-06-28 09:34:23 +02:00
|
|
|
// Downsample U/V plane
|
|
|
|
if (uv_csp != WEBP_YUV400) {
|
|
|
|
for (y = 0; y < (height >> 1); ++y) {
|
|
|
|
for (x = 0; x < (width >> 1); ++x) {
|
|
|
|
RGB_TO_UV(x, y, SUM4);
|
|
|
|
}
|
2012-08-03 02:23:02 +02:00
|
|
|
if (width & 1) {
|
2012-06-28 09:34:23 +02:00
|
|
|
RGB_TO_UV(x, y, SUM2V);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (height & 1) {
|
|
|
|
for (x = 0; x < (width >> 1); ++x) {
|
|
|
|
RGB_TO_UV(x, y, SUM2H);
|
|
|
|
}
|
|
|
|
if (width & 1) {
|
|
|
|
RGB_TO_UV(x, y, SUM1);
|
2011-05-03 02:19:00 +02:00
|
|
|
}
|
2012-06-28 09:34:23 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
#ifdef WEBP_EXPERIMENTAL_FEATURES
|
|
|
|
// Store original U/V samples too
|
|
|
|
if (uv_csp == WEBP_YUV422) {
|
|
|
|
for (y = 0; y < height; ++y) {
|
2012-03-28 13:07:42 +02:00
|
|
|
for (x = 0; x < (width >> 1); ++x) {
|
2012-06-28 09:34:23 +02:00
|
|
|
RGB_TO_UV0(2 * x, x, y, SUM2H);
|
2012-03-28 13:07:42 +02:00
|
|
|
}
|
|
|
|
if (width & 1) {
|
2012-06-28 09:34:23 +02:00
|
|
|
RGB_TO_UV0(2 * x, x, y, SUM1);
|
2012-03-28 13:07:42 +02:00
|
|
|
}
|
2011-05-03 02:19:00 +02:00
|
|
|
}
|
2012-06-28 09:34:23 +02:00
|
|
|
} else if (uv_csp == WEBP_YUV444) {
|
|
|
|
for (y = 0; y < height; ++y) {
|
|
|
|
for (x = 0; x < width; ++x) {
|
|
|
|
RGB_TO_UV0(x, x, y, SUM1);
|
2011-05-03 02:19:00 +02:00
|
|
|
}
|
|
|
|
}
|
2012-03-28 13:07:42 +02:00
|
|
|
}
|
2012-06-28 09:34:23 +02:00
|
|
|
#endif
|
|
|
|
} else {
|
|
|
|
MakeGray(picture);
|
|
|
|
}
|
2012-03-28 13:07:42 +02:00
|
|
|
|
2012-06-28 09:34:23 +02:00
|
|
|
if (has_alpha) {
|
|
|
|
assert(step >= 4);
|
|
|
|
for (y = 0; y < height; ++y) {
|
|
|
|
for (x = 0; x < width; ++x) {
|
|
|
|
picture->a[x + y * picture->a_stride] =
|
2012-03-28 13:07:42 +02:00
|
|
|
a_ptr[step * x + y * rgb_stride];
|
2011-05-03 02:19:00 +02:00
|
|
|
}
|
2011-02-19 08:33:46 +01:00
|
|
|
}
|
2012-06-28 09:34:23 +02:00
|
|
|
}
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
static int Import(WebPPicture* const picture,
|
|
|
|
const uint8_t* const rgb, int rgb_stride,
|
|
|
|
int step, int swap_rb, int import_alpha) {
|
|
|
|
const uint8_t* const r_ptr = rgb + (swap_rb ? 2 : 0);
|
|
|
|
const uint8_t* const g_ptr = rgb + 1;
|
|
|
|
const uint8_t* const b_ptr = rgb + (swap_rb ? 0 : 2);
|
|
|
|
const uint8_t* const a_ptr = import_alpha ? rgb + 3 : NULL;
|
|
|
|
const int width = picture->width;
|
|
|
|
const int height = picture->height;
|
|
|
|
|
2012-07-18 23:58:53 +02:00
|
|
|
if (!picture->use_argb) {
|
2012-06-28 09:34:23 +02:00
|
|
|
return ImportYUVAFromRGBA(r_ptr, g_ptr, b_ptr, a_ptr, step, rgb_stride,
|
|
|
|
picture);
|
|
|
|
}
|
|
|
|
if (import_alpha) {
|
|
|
|
picture->colorspace |= WEBP_CSP_ALPHA_BIT;
|
2011-05-03 02:19:00 +02:00
|
|
|
} else {
|
2012-06-28 09:34:23 +02:00
|
|
|
picture->colorspace &= ~WEBP_CSP_ALPHA_BIT;
|
|
|
|
}
|
|
|
|
if (!WebPPictureAlloc(picture)) return 0;
|
|
|
|
|
|
|
|
if (!import_alpha) {
|
|
|
|
int x, y;
|
|
|
|
for (y = 0; y < height; ++y) {
|
|
|
|
for (x = 0; x < width; ++x) {
|
|
|
|
const int offset = step * x + y * rgb_stride;
|
|
|
|
const uint32_t argb =
|
|
|
|
0xff000000u |
|
|
|
|
(r_ptr[offset] << 16) |
|
|
|
|
(g_ptr[offset] << 8) |
|
|
|
|
(b_ptr[offset]);
|
|
|
|
picture->argb[x + y * picture->argb_stride] = argb;
|
2012-03-28 13:07:42 +02:00
|
|
|
}
|
2012-06-28 09:34:23 +02:00
|
|
|
}
|
|
|
|
} else {
|
|
|
|
int x, y;
|
|
|
|
assert(step >= 4);
|
|
|
|
for (y = 0; y < height; ++y) {
|
|
|
|
for (x = 0; x < width; ++x) {
|
|
|
|
const int offset = step * x + y * rgb_stride;
|
|
|
|
const uint32_t argb = (a_ptr[offset] << 24) |
|
|
|
|
(r_ptr[offset] << 16) |
|
|
|
|
(g_ptr[offset] << 8) |
|
|
|
|
(b_ptr[offset]);
|
|
|
|
picture->argb[x + y * picture->argb_stride] = argb;
|
EXPERIMENTAL: add support for alpha channel
This is a (minor) bitstream change: if the 'color_space' bit is set to '1'
(which is normally an undefined/invalid behaviour), we add extra data at the
end of partition #0 (so-called 'extensions')
Namely, we add the size of the extension data as 3 bytes (little-endian),
followed by a set of bits telling which extensions we're incorporating.
The data then _preceeds_ this trailing tags.
This is all experimental, and you'll need to have
'#define WEBP_EXPERIMENTAL_FEATURES' in webp/types.h to enable this code
(at your own risk! :))
Still, this hack produces almost-valid WebP file for decoders that don't
check this color_space bit. In particular, previous 'dwebp' (and for instance
Chrome) will recognize this files and decode them, but without the alpha
of course. Other decoder will just see random extra stuff at the end of
partition #0.
To experiment with the alpha-channel, you need to compile on Unix platform
and use PNGs for input/output.
If 'alpha.png' is a source with alpha channel, then you can try (on Unix):
cwebp alpha.png -o alpha.webp
dwebp alpha.webp -o test.png
cwebp now has a '-noalpha' flag to ignore any alpha information from the
source, if present.
More hacking and experimenting welcome!
Change-Id: I3c7b1fd8411c9e7a9f77690e898479ad85c52f3e
2011-04-26 01:58:04 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2011-02-19 08:33:46 +01:00
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
#undef SUM4
|
|
|
|
#undef SUM2V
|
|
|
|
#undef SUM2H
|
|
|
|
#undef SUM1
|
|
|
|
#undef RGB_TO_UV
|
|
|
|
|
2012-07-18 00:01:30 +02:00
|
|
|
int WebPPictureImportRGB(WebPPicture* picture,
|
|
|
|
const uint8_t* rgb, int rgb_stride) {
|
2011-04-26 16:23:57 +02:00
|
|
|
return Import(picture, rgb, rgb_stride, 3, 0, 0);
|
2011-02-19 08:33:46 +01:00
|
|
|
}
|
|
|
|
|
2012-07-18 00:01:30 +02:00
|
|
|
int WebPPictureImportBGR(WebPPicture* picture,
|
|
|
|
const uint8_t* rgb, int rgb_stride) {
|
2011-04-26 16:23:57 +02:00
|
|
|
return Import(picture, rgb, rgb_stride, 3, 1, 0);
|
2011-02-19 08:33:46 +01:00
|
|
|
}
|
|
|
|
|
2012-07-18 00:01:30 +02:00
|
|
|
int WebPPictureImportRGBA(WebPPicture* picture,
|
|
|
|
const uint8_t* rgba, int rgba_stride) {
|
2011-04-26 16:23:57 +02:00
|
|
|
return Import(picture, rgba, rgba_stride, 4, 0, 1);
|
2011-02-19 08:33:46 +01:00
|
|
|
}
|
|
|
|
|
2012-07-18 00:01:30 +02:00
|
|
|
int WebPPictureImportBGRA(WebPPicture* picture,
|
|
|
|
const uint8_t* rgba, int rgba_stride) {
|
2011-04-26 16:23:57 +02:00
|
|
|
return Import(picture, rgba, rgba_stride, 4, 1, 1);
|
2011-02-19 08:33:46 +01:00
|
|
|
}
|
|
|
|
|
2012-07-18 00:01:30 +02:00
|
|
|
int WebPPictureImportRGBX(WebPPicture* picture,
|
|
|
|
const uint8_t* rgba, int rgba_stride) {
|
2012-06-05 09:26:17 +02:00
|
|
|
return Import(picture, rgba, rgba_stride, 4, 0, 0);
|
|
|
|
}
|
|
|
|
|
2012-07-18 00:01:30 +02:00
|
|
|
int WebPPictureImportBGRX(WebPPicture* picture,
|
|
|
|
const uint8_t* rgba, int rgba_stride) {
|
2012-06-05 09:26:17 +02:00
|
|
|
return Import(picture, rgba, rgba_stride, 4, 1, 0);
|
|
|
|
}
|
|
|
|
|
2012-06-28 09:34:23 +02:00
|
|
|
//------------------------------------------------------------------------------
|
|
|
|
// Automatic YUV <-> ARGB conversions.
|
|
|
|
|
2012-07-18 00:01:30 +02:00
|
|
|
int WebPPictureYUVAToARGB(WebPPicture* picture) {
|
2012-06-28 09:34:23 +02:00
|
|
|
if (picture == NULL) return 0;
|
|
|
|
if (picture->memory_ == NULL || picture->y == NULL ||
|
|
|
|
picture->u == NULL || picture->v == NULL) {
|
|
|
|
return WebPEncodingSetError(picture, VP8_ENC_ERROR_NULL_PARAMETER);
|
|
|
|
}
|
|
|
|
if ((picture->colorspace & WEBP_CSP_ALPHA_BIT) && picture->a == NULL) {
|
|
|
|
return WebPEncodingSetError(picture, VP8_ENC_ERROR_NULL_PARAMETER);
|
|
|
|
}
|
|
|
|
if ((picture->colorspace & WEBP_CSP_UV_MASK) != WEBP_YUV420) {
|
|
|
|
return WebPEncodingSetError(picture, VP8_ENC_ERROR_INVALID_CONFIGURATION);
|
|
|
|
}
|
|
|
|
// Allocate a new argb buffer (discarding the previous one).
|
|
|
|
if (!PictureAllocARGB(picture)) return 0;
|
|
|
|
|
|
|
|
// Convert
|
|
|
|
{
|
|
|
|
int y;
|
|
|
|
const int width = picture->width;
|
|
|
|
const int height = picture->height;
|
|
|
|
const int argb_stride = 4 * picture->argb_stride;
|
|
|
|
uint8_t* dst = (uint8_t*)picture->argb;
|
|
|
|
const uint8_t *cur_u = picture->u, *cur_v = picture->v, *cur_y = picture->y;
|
|
|
|
WebPUpsampleLinePairFunc upsample = WebPGetLinePairConverter(ALPHA_IS_LAST);
|
|
|
|
|
|
|
|
// First row, with replicated top samples.
|
|
|
|
upsample(NULL, cur_y, cur_u, cur_v, cur_u, cur_v, NULL, dst, width);
|
|
|
|
cur_y += picture->y_stride;
|
|
|
|
dst += argb_stride;
|
|
|
|
// Center rows.
|
|
|
|
for (y = 1; y + 1 < height; y += 2) {
|
|
|
|
const uint8_t* const top_u = cur_u;
|
|
|
|
const uint8_t* const top_v = cur_v;
|
|
|
|
cur_u += picture->uv_stride;
|
|
|
|
cur_v += picture->uv_stride;
|
|
|
|
upsample(cur_y, cur_y + picture->y_stride, top_u, top_v, cur_u, cur_v,
|
|
|
|
dst, dst + argb_stride, width);
|
|
|
|
cur_y += 2 * picture->y_stride;
|
|
|
|
dst += 2 * argb_stride;
|
|
|
|
}
|
|
|
|
// Last row (if needed), with replicated bottom samples.
|
|
|
|
if (height > 1 && !(height & 1)) {
|
|
|
|
upsample(cur_y, NULL, cur_u, cur_v, cur_u, cur_v, dst, NULL, width);
|
|
|
|
}
|
|
|
|
// Insert alpha values if needed, in replacement for the default 0xff ones.
|
|
|
|
if (picture->colorspace & WEBP_CSP_ALPHA_BIT) {
|
|
|
|
for (y = 0; y < height; ++y) {
|
|
|
|
uint32_t* const dst = picture->argb + y * picture->argb_stride;
|
|
|
|
const uint8_t* const src = picture->a + y * picture->a_stride;
|
|
|
|
int x;
|
|
|
|
for (x = 0; x < width; ++x) {
|
|
|
|
dst[x] = (dst[x] & 0x00ffffffu) | (src[x] << 24);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
2012-07-18 00:01:30 +02:00
|
|
|
int WebPPictureARGBToYUVA(WebPPicture* picture, WebPEncCSP colorspace) {
|
2012-06-28 09:34:23 +02:00
|
|
|
if (picture == NULL) return 0;
|
|
|
|
if (picture->argb == NULL) {
|
|
|
|
return WebPEncodingSetError(picture, VP8_ENC_ERROR_NULL_PARAMETER);
|
|
|
|
} else {
|
|
|
|
const uint8_t* const argb = (const uint8_t*)picture->argb;
|
|
|
|
const uint8_t* const r = ALPHA_IS_LAST ? argb + 2 : argb + 1;
|
|
|
|
const uint8_t* const g = ALPHA_IS_LAST ? argb + 1 : argb + 2;
|
|
|
|
const uint8_t* const b = ALPHA_IS_LAST ? argb + 0 : argb + 3;
|
|
|
|
const uint8_t* const a = ALPHA_IS_LAST ? argb + 3 : argb + 0;
|
|
|
|
// We work on a tmp copy of 'picture', because ImportYUVAFromRGBA()
|
|
|
|
// would be calling WebPPictureFree(picture) otherwise.
|
|
|
|
WebPPicture tmp = *picture;
|
|
|
|
PictureResetARGB(&tmp); // reset ARGB buffer so that it's not free()'d.
|
2012-07-18 23:58:53 +02:00
|
|
|
tmp.use_argb = 0;
|
2012-06-28 09:34:23 +02:00
|
|
|
tmp.colorspace = colorspace & WEBP_CSP_UV_MASK;
|
|
|
|
if (!ImportYUVAFromRGBA(r, g, b, a, 4, 4 * picture->argb_stride, &tmp)) {
|
|
|
|
return WebPEncodingSetError(picture, VP8_ENC_ERROR_OUT_OF_MEMORY);
|
|
|
|
}
|
|
|
|
// Copy back the YUV specs into 'picture'.
|
|
|
|
tmp.argb = picture->argb;
|
|
|
|
tmp.argb_stride = picture->argb_stride;
|
|
|
|
tmp.memory_argb_ = picture->memory_argb_;
|
|
|
|
*picture = tmp;
|
|
|
|
}
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
2012-01-17 09:18:22 +01:00
|
|
|
//------------------------------------------------------------------------------
|
|
|
|
// Helper: clean up fully transparent area to help compressibility.
|
|
|
|
|
|
|
|
#define SIZE 8
|
|
|
|
#define SIZE2 (SIZE / 2)
|
|
|
|
static int is_transparent_area(const uint8_t* ptr, int stride, int size) {
|
|
|
|
int y, x;
|
|
|
|
for (y = 0; y < size; ++y) {
|
|
|
|
for (x = 0; x < size; ++x) {
|
|
|
|
if (ptr[x]) {
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
ptr += stride;
|
|
|
|
}
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
static WEBP_INLINE void flatten(uint8_t* ptr, int v, int stride, int size) {
|
|
|
|
int y;
|
|
|
|
for (y = 0; y < size; ++y) {
|
|
|
|
memset(ptr, v, size);
|
|
|
|
ptr += stride;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-07-18 00:01:30 +02:00
|
|
|
void WebPCleanupTransparentArea(WebPPicture* pic) {
|
2012-01-17 09:18:22 +01:00
|
|
|
int x, y, w, h;
|
|
|
|
const uint8_t* a_ptr;
|
|
|
|
int values[3] = { 0 };
|
|
|
|
|
|
|
|
if (pic == NULL) return;
|
|
|
|
|
|
|
|
a_ptr = pic->a;
|
|
|
|
if (a_ptr == NULL) return; // nothing to do
|
|
|
|
|
|
|
|
w = pic->width / SIZE;
|
|
|
|
h = pic->height / SIZE;
|
|
|
|
for (y = 0; y < h; ++y) {
|
|
|
|
int need_reset = 1;
|
|
|
|
for (x = 0; x < w; ++x) {
|
|
|
|
const int off_a = (y * pic->a_stride + x) * SIZE;
|
|
|
|
const int off_y = (y * pic->y_stride + x) * SIZE;
|
|
|
|
const int off_uv = (y * pic->uv_stride + x) * SIZE2;
|
|
|
|
if (is_transparent_area(a_ptr + off_a, pic->a_stride, SIZE)) {
|
|
|
|
if (need_reset) {
|
|
|
|
values[0] = pic->y[off_y];
|
|
|
|
values[1] = pic->u[off_uv];
|
|
|
|
values[2] = pic->v[off_uv];
|
|
|
|
need_reset = 0;
|
|
|
|
}
|
|
|
|
flatten(pic->y + off_y, values[0], pic->y_stride, SIZE);
|
|
|
|
flatten(pic->u + off_uv, values[1], pic->uv_stride, SIZE2);
|
|
|
|
flatten(pic->v + off_uv, values[2], pic->uv_stride, SIZE2);
|
|
|
|
} else {
|
|
|
|
need_reset = 1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// ignore the left-overs on right/bottom
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#undef SIZE
|
|
|
|
#undef SIZE2
|
|
|
|
|
2012-06-05 00:50:05 +02:00
|
|
|
|
2011-08-25 23:22:32 +02:00
|
|
|
//------------------------------------------------------------------------------
|
2012-01-20 16:20:56 +01:00
|
|
|
// Distortion
|
|
|
|
|
|
|
|
// Max value returned in case of exact similarity.
|
|
|
|
static const double kMinDistortion_dB = 99.;
|
|
|
|
|
2012-07-18 00:01:30 +02:00
|
|
|
int WebPPictureDistortion(const WebPPicture* pic1, const WebPPicture* pic2,
|
2012-01-20 16:20:56 +01:00
|
|
|
int type, float result[5]) {
|
|
|
|
int c;
|
|
|
|
DistoStats stats[5];
|
2012-02-10 11:52:48 +01:00
|
|
|
int has_alpha;
|
|
|
|
|
|
|
|
if (pic1 == NULL || pic2 == NULL ||
|
|
|
|
pic1->width != pic2->width || pic1->height != pic2->height ||
|
|
|
|
pic1->y == NULL || pic2->y == NULL ||
|
|
|
|
pic1->u == NULL || pic2->u == NULL ||
|
|
|
|
pic1->v == NULL || pic2->v == NULL ||
|
|
|
|
result == NULL) {
|
|
|
|
return 0;
|
|
|
|
}
|
2012-06-28 09:34:23 +02:00
|
|
|
// TODO(skal): provide distortion for ARGB too.
|
2012-07-18 23:58:53 +02:00
|
|
|
if (pic1->use_argb == 1 || pic1->use_argb != pic2->use_argb) {
|
2012-06-28 09:34:23 +02:00
|
|
|
return 0;
|
|
|
|
}
|
2012-01-20 16:20:56 +01:00
|
|
|
|
2012-02-10 11:52:48 +01:00
|
|
|
has_alpha = !!(pic1->colorspace & WEBP_CSP_ALPHA_BIT);
|
|
|
|
if (has_alpha != !!(pic2->colorspace & WEBP_CSP_ALPHA_BIT) ||
|
|
|
|
(has_alpha && (pic1->a == NULL || pic2->a == NULL))) {
|
2012-01-20 16:20:56 +01:00
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
memset(stats, 0, sizeof(stats));
|
|
|
|
VP8SSIMAccumulatePlane(pic1->y, pic1->y_stride,
|
|
|
|
pic2->y, pic2->y_stride,
|
|
|
|
pic1->width, pic1->height, &stats[0]);
|
|
|
|
VP8SSIMAccumulatePlane(pic1->u, pic1->uv_stride,
|
|
|
|
pic2->u, pic2->uv_stride,
|
|
|
|
(pic1->width + 1) >> 1, (pic1->height + 1) >> 1,
|
|
|
|
&stats[1]);
|
|
|
|
VP8SSIMAccumulatePlane(pic1->v, pic1->uv_stride,
|
|
|
|
pic2->v, pic2->uv_stride,
|
|
|
|
(pic1->width + 1) >> 1, (pic1->height + 1) >> 1,
|
|
|
|
&stats[2]);
|
2012-02-10 11:52:48 +01:00
|
|
|
if (has_alpha) {
|
2012-01-20 16:20:56 +01:00
|
|
|
VP8SSIMAccumulatePlane(pic1->a, pic1->a_stride,
|
|
|
|
pic2->a, pic2->a_stride,
|
|
|
|
pic1->width, pic1->height, &stats[3]);
|
|
|
|
}
|
|
|
|
for (c = 0; c <= 4; ++c) {
|
|
|
|
if (type == 1) {
|
|
|
|
const double v = VP8SSIMGet(&stats[c]);
|
2012-01-28 02:39:47 +01:00
|
|
|
result[c] = (float)((v < 1.) ? -10.0 * log10(1. - v)
|
|
|
|
: kMinDistortion_dB);
|
2012-01-20 16:20:56 +01:00
|
|
|
} else {
|
|
|
|
const double v = VP8SSIMGetSquaredError(&stats[c]);
|
2012-01-28 02:39:47 +01:00
|
|
|
result[c] = (float)((v > 0.) ? -4.3429448 * log(v / (255 * 255.))
|
|
|
|
: kMinDistortion_dB);
|
2012-01-20 16:20:56 +01:00
|
|
|
}
|
|
|
|
// Accumulate forward
|
|
|
|
if (c < 4) VP8SSIMAddStats(&stats[c], &stats[4]);
|
|
|
|
}
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
//------------------------------------------------------------------------------
|
|
|
|
// Simplest high-level calls:
|
2011-02-19 08:33:46 +01:00
|
|
|
|
|
|
|
typedef int (*Importer)(WebPPicture* const, const uint8_t* const, int);
|
|
|
|
|
EXPERIMENTAL: add support for alpha channel
This is a (minor) bitstream change: if the 'color_space' bit is set to '1'
(which is normally an undefined/invalid behaviour), we add extra data at the
end of partition #0 (so-called 'extensions')
Namely, we add the size of the extension data as 3 bytes (little-endian),
followed by a set of bits telling which extensions we're incorporating.
The data then _preceeds_ this trailing tags.
This is all experimental, and you'll need to have
'#define WEBP_EXPERIMENTAL_FEATURES' in webp/types.h to enable this code
(at your own risk! :))
Still, this hack produces almost-valid WebP file for decoders that don't
check this color_space bit. In particular, previous 'dwebp' (and for instance
Chrome) will recognize this files and decode them, but without the alpha
of course. Other decoder will just see random extra stuff at the end of
partition #0.
To experiment with the alpha-channel, you need to compile on Unix platform
and use PNGs for input/output.
If 'alpha.png' is a source with alpha channel, then you can try (on Unix):
cwebp alpha.png -o alpha.webp
dwebp alpha.webp -o test.png
cwebp now has a '-noalpha' flag to ignore any alpha information from the
source, if present.
More hacking and experimenting welcome!
Change-Id: I3c7b1fd8411c9e7a9f77690e898479ad85c52f3e
2011-04-26 01:58:04 +02:00
|
|
|
static size_t Encode(const uint8_t* rgba, int width, int height, int stride,
|
2012-07-17 20:56:24 +02:00
|
|
|
Importer import, float quality_factor, int lossless,
|
|
|
|
uint8_t** output) {
|
2011-02-19 08:33:46 +01:00
|
|
|
WebPPicture pic;
|
|
|
|
WebPConfig config;
|
|
|
|
WebPMemoryWriter wrt;
|
|
|
|
int ok;
|
|
|
|
|
|
|
|
if (!WebPConfigPreset(&config, WEBP_PRESET_DEFAULT, quality_factor) ||
|
|
|
|
!WebPPictureInit(&pic)) {
|
|
|
|
return 0; // shouldn't happen, except if system installation is broken
|
|
|
|
}
|
|
|
|
|
2012-07-17 20:56:24 +02:00
|
|
|
config.lossless = !!lossless;
|
2012-07-18 23:58:53 +02:00
|
|
|
pic.use_argb = !!lossless;
|
2011-02-19 08:33:46 +01:00
|
|
|
pic.width = width;
|
|
|
|
pic.height = height;
|
|
|
|
pic.writer = WebPMemoryWrite;
|
|
|
|
pic.custom_ptr = &wrt;
|
2012-01-23 09:58:09 +01:00
|
|
|
WebPMemoryWriterInit(&wrt);
|
2011-02-19 08:33:46 +01:00
|
|
|
|
EXPERIMENTAL: add support for alpha channel
This is a (minor) bitstream change: if the 'color_space' bit is set to '1'
(which is normally an undefined/invalid behaviour), we add extra data at the
end of partition #0 (so-called 'extensions')
Namely, we add the size of the extension data as 3 bytes (little-endian),
followed by a set of bits telling which extensions we're incorporating.
The data then _preceeds_ this trailing tags.
This is all experimental, and you'll need to have
'#define WEBP_EXPERIMENTAL_FEATURES' in webp/types.h to enable this code
(at your own risk! :))
Still, this hack produces almost-valid WebP file for decoders that don't
check this color_space bit. In particular, previous 'dwebp' (and for instance
Chrome) will recognize this files and decode them, but without the alpha
of course. Other decoder will just see random extra stuff at the end of
partition #0.
To experiment with the alpha-channel, you need to compile on Unix platform
and use PNGs for input/output.
If 'alpha.png' is a source with alpha channel, then you can try (on Unix):
cwebp alpha.png -o alpha.webp
dwebp alpha.webp -o test.png
cwebp now has a '-noalpha' flag to ignore any alpha information from the
source, if present.
More hacking and experimenting welcome!
Change-Id: I3c7b1fd8411c9e7a9f77690e898479ad85c52f3e
2011-04-26 01:58:04 +02:00
|
|
|
ok = import(&pic, rgba, stride) && WebPEncode(&config, &pic);
|
2011-02-19 08:33:46 +01:00
|
|
|
WebPPictureFree(&pic);
|
|
|
|
if (!ok) {
|
2012-06-19 00:42:56 +02:00
|
|
|
free(wrt.mem);
|
2011-02-19 08:33:46 +01:00
|
|
|
*output = NULL;
|
|
|
|
return 0;
|
|
|
|
}
|
2012-06-19 00:42:56 +02:00
|
|
|
*output = wrt.mem;
|
|
|
|
return wrt.size;
|
2011-02-19 08:33:46 +01:00
|
|
|
}
|
|
|
|
|
2012-07-17 20:56:24 +02:00
|
|
|
#define ENCODE_FUNC(NAME, IMPORTER) \
|
|
|
|
size_t NAME(const uint8_t* in, int w, int h, int bps, float q, \
|
|
|
|
uint8_t** out) { \
|
|
|
|
return Encode(in, w, h, bps, IMPORTER, q, 0, out); \
|
2011-02-19 08:33:46 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
ENCODE_FUNC(WebPEncodeRGB, WebPPictureImportRGB);
|
|
|
|
ENCODE_FUNC(WebPEncodeBGR, WebPPictureImportBGR);
|
|
|
|
ENCODE_FUNC(WebPEncodeRGBA, WebPPictureImportRGBA);
|
|
|
|
ENCODE_FUNC(WebPEncodeBGRA, WebPPictureImportBGRA);
|
|
|
|
|
|
|
|
#undef ENCODE_FUNC
|
|
|
|
|
2012-07-17 20:56:24 +02:00
|
|
|
#define LOSSLESS_DEFAULT_QUALITY 70.
|
|
|
|
#define LOSSLESS_ENCODE_FUNC(NAME, IMPORTER) \
|
|
|
|
size_t NAME(const uint8_t* in, int w, int h, int bps, uint8_t** out) { \
|
|
|
|
return Encode(in, w, h, bps, IMPORTER, LOSSLESS_DEFAULT_QUALITY, 1, out); \
|
|
|
|
}
|
|
|
|
|
|
|
|
LOSSLESS_ENCODE_FUNC(WebPEncodeLosslessRGB, WebPPictureImportRGB);
|
|
|
|
LOSSLESS_ENCODE_FUNC(WebPEncodeLosslessBGR, WebPPictureImportBGR);
|
|
|
|
LOSSLESS_ENCODE_FUNC(WebPEncodeLosslessRGBA, WebPPictureImportRGBA);
|
|
|
|
LOSSLESS_ENCODE_FUNC(WebPEncodeLosslessBGRA, WebPPictureImportBGRA);
|
|
|
|
|
|
|
|
#undef LOSSLESS_ENCODE_FUNC
|
|
|
|
|
2011-08-25 23:22:32 +02:00
|
|
|
//------------------------------------------------------------------------------
|
2011-02-19 08:33:46 +01:00
|
|
|
|
|
|
|
#if defined(__cplusplus) || defined(c_plusplus)
|
|
|
|
} // extern "C"
|
|
|
|
#endif
|