2012-01-06 23:49:06 +01:00
|
|
|
// Copyright 2011 Google Inc. All Rights Reserved.
|
2011-02-19 08:33:46 +01:00
|
|
|
//
|
2013-06-07 08:05:58 +02:00
|
|
|
// Use of this source code is governed by a BSD-style license
|
|
|
|
// that can be found in the COPYING file in the root of the source
|
|
|
|
// tree. An additional intellectual property rights grant can be found
|
|
|
|
// in the file PATENTS. All contributing project authors may
|
|
|
|
// be found in the AUTHORS file in the root of the source tree.
|
2011-02-19 08:33:46 +01:00
|
|
|
// -----------------------------------------------------------------------------
|
|
|
|
//
|
|
|
|
// WebPPicture utils: colorspace conversion, crop, ...
|
|
|
|
//
|
|
|
|
// Author: Skal (pascal.massimino@gmail.com)
|
|
|
|
|
2011-04-26 16:23:57 +02:00
|
|
|
#include <assert.h>
|
2011-02-19 08:33:46 +01:00
|
|
|
#include <stdlib.h>
|
2012-01-20 16:20:56 +01:00
|
|
|
#include <math.h>
|
2011-12-01 11:00:03 +01:00
|
|
|
|
|
|
|
#include "./vp8enci.h"
|
2013-07-26 21:05:42 +02:00
|
|
|
#include "../utils/alpha_processing.h"
|
2012-03-22 11:30:20 +01:00
|
|
|
#include "../utils/rescaler.h"
|
2012-08-01 21:06:04 +02:00
|
|
|
#include "../utils/utils.h"
|
2012-06-28 09:34:23 +02:00
|
|
|
#include "../dsp/dsp.h"
|
2012-08-03 02:23:02 +02:00
|
|
|
#include "../dsp/yuv.h"
|
2011-02-19 08:33:46 +01:00
|
|
|
|
|
|
|
#if defined(__cplusplus) || defined(c_plusplus)
|
|
|
|
extern "C" {
|
|
|
|
#endif
|
|
|
|
|
2011-11-23 23:17:40 +01:00
|
|
|
#define HALVE(x) (((x) + 1) >> 1)
|
2012-06-21 09:30:43 +02:00
|
|
|
#define IS_YUV_CSP(csp, YUV_CSP) (((csp) & WEBP_CSP_UV_MASK) == (YUV_CSP))
|
2011-11-23 23:17:40 +01:00
|
|
|
|
2012-06-28 09:34:23 +02:00
|
|
|
static const union {
|
|
|
|
uint32_t argb;
|
|
|
|
uint8_t bytes[4];
|
|
|
|
} test_endian = { 0xff000000u };
|
|
|
|
#define ALPHA_IS_LAST (test_endian.bytes[3] == 0xff)
|
|
|
|
|
2013-04-03 04:14:14 +02:00
|
|
|
static WEBP_INLINE uint32_t MakeARGB32(int r, int g, int b) {
|
|
|
|
return (0xff000000u | (r << 16) | (g << 8) | b);
|
|
|
|
}
|
|
|
|
|
2011-08-25 23:22:32 +02:00
|
|
|
//------------------------------------------------------------------------------
|
2011-02-19 08:33:46 +01:00
|
|
|
// WebPPicture
|
2011-08-25 23:22:32 +02:00
|
|
|
//------------------------------------------------------------------------------
|
2011-02-19 08:33:46 +01:00
|
|
|
|
2012-07-18 00:01:30 +02:00
|
|
|
int WebPPictureAlloc(WebPPicture* picture) {
|
2012-01-23 09:58:09 +01:00
|
|
|
if (picture != NULL) {
|
2011-05-03 02:19:00 +02:00
|
|
|
const WebPEncCSP uv_csp = picture->colorspace & WEBP_CSP_UV_MASK;
|
|
|
|
const int has_alpha = picture->colorspace & WEBP_CSP_ALPHA_BIT;
|
2011-02-19 08:33:46 +01:00
|
|
|
const int width = picture->width;
|
|
|
|
const int height = picture->height;
|
2012-03-28 13:07:42 +02:00
|
|
|
|
2012-07-18 23:58:53 +02:00
|
|
|
if (!picture->use_argb) {
|
2012-03-28 13:07:42 +02:00
|
|
|
const int y_stride = width;
|
|
|
|
const int uv_width = HALVE(width);
|
|
|
|
const int uv_height = HALVE(height);
|
|
|
|
const int uv_stride = uv_width;
|
|
|
|
int uv0_stride = 0;
|
|
|
|
int a_width, a_stride;
|
|
|
|
uint64_t y_size, uv_size, uv0_size, a_size, total_size;
|
|
|
|
uint8_t* mem;
|
|
|
|
|
|
|
|
// U/V
|
|
|
|
switch (uv_csp) {
|
|
|
|
case WEBP_YUV420:
|
|
|
|
break;
|
2011-05-03 02:19:00 +02:00
|
|
|
#ifdef WEBP_EXPERIMENTAL_FEATURES
|
2012-03-28 13:07:42 +02:00
|
|
|
case WEBP_YUV400: // for now, we'll just reset the U/V samples
|
|
|
|
break;
|
|
|
|
case WEBP_YUV422:
|
|
|
|
uv0_stride = uv_width;
|
|
|
|
break;
|
|
|
|
case WEBP_YUV444:
|
|
|
|
uv0_stride = width;
|
|
|
|
break;
|
2011-05-03 02:19:00 +02:00
|
|
|
#endif
|
2012-03-28 13:07:42 +02:00
|
|
|
default:
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
uv0_size = height * uv0_stride;
|
|
|
|
|
|
|
|
// alpha
|
|
|
|
a_width = has_alpha ? width : 0;
|
|
|
|
a_stride = a_width;
|
|
|
|
y_size = (uint64_t)y_stride * height;
|
|
|
|
uv_size = (uint64_t)uv_stride * uv_height;
|
|
|
|
a_size = (uint64_t)a_stride * height;
|
|
|
|
|
|
|
|
total_size = y_size + a_size + 2 * uv_size + 2 * uv0_size;
|
|
|
|
|
|
|
|
// Security and validation checks
|
2012-05-09 21:19:48 +02:00
|
|
|
if (width <= 0 || height <= 0 || // luma/alpha param error
|
2012-08-01 21:06:04 +02:00
|
|
|
uv_width < 0 || uv_height < 0) { // u/v param error
|
2011-05-03 02:19:00 +02:00
|
|
|
return 0;
|
2012-03-28 13:07:42 +02:00
|
|
|
}
|
2012-06-28 09:34:23 +02:00
|
|
|
// Clear previous buffer and allocate a new one.
|
2012-03-28 13:07:42 +02:00
|
|
|
WebPPictureFree(picture); // erase previous buffer
|
2012-08-01 21:06:04 +02:00
|
|
|
mem = (uint8_t*)WebPSafeMalloc(total_size, sizeof(*mem));
|
2012-03-28 13:07:42 +02:00
|
|
|
if (mem == NULL) return 0;
|
|
|
|
|
2012-06-28 09:34:23 +02:00
|
|
|
// From now on, we're in the clear, we can no longer fail...
|
2012-06-21 09:30:43 +02:00
|
|
|
picture->memory_ = (void*)mem;
|
2012-06-28 09:34:23 +02:00
|
|
|
picture->y_stride = y_stride;
|
|
|
|
picture->uv_stride = uv_stride;
|
|
|
|
picture->a_stride = a_stride;
|
|
|
|
picture->uv0_stride = uv0_stride;
|
2012-06-21 09:30:43 +02:00
|
|
|
// TODO(skal): we could align the y/u/v planes and adjust stride.
|
2012-03-28 13:07:42 +02:00
|
|
|
picture->y = mem;
|
|
|
|
mem += y_size;
|
|
|
|
|
|
|
|
picture->u = mem;
|
|
|
|
mem += uv_size;
|
|
|
|
picture->v = mem;
|
|
|
|
mem += uv_size;
|
|
|
|
|
|
|
|
if (a_size) {
|
|
|
|
picture->a = mem;
|
|
|
|
mem += a_size;
|
|
|
|
}
|
|
|
|
if (uv0_size) {
|
|
|
|
picture->u0 = mem;
|
|
|
|
mem += uv0_size;
|
|
|
|
picture->v0 = mem;
|
|
|
|
mem += uv0_size;
|
|
|
|
}
|
2013-09-12 09:32:28 +02:00
|
|
|
(void)mem; // makes the static analyzer happy
|
2012-03-28 13:07:42 +02:00
|
|
|
} else {
|
2012-06-21 09:30:43 +02:00
|
|
|
void* memory;
|
2012-04-02 12:58:36 +02:00
|
|
|
const uint64_t argb_size = (uint64_t)width * height;
|
2012-08-01 21:06:04 +02:00
|
|
|
if (width <= 0 || height <= 0) {
|
2012-03-28 13:07:42 +02:00
|
|
|
return 0;
|
|
|
|
}
|
2012-06-28 09:34:23 +02:00
|
|
|
// Clear previous buffer and allocate a new one.
|
2012-03-28 13:07:42 +02:00
|
|
|
WebPPictureFree(picture); // erase previous buffer
|
2012-08-01 21:06:04 +02:00
|
|
|
memory = WebPSafeMalloc(argb_size, sizeof(*picture->argb));
|
2012-06-21 09:30:43 +02:00
|
|
|
if (memory == NULL) return 0;
|
|
|
|
|
|
|
|
// TODO(skal): align plane to cache line?
|
2012-06-28 09:34:23 +02:00
|
|
|
picture->memory_argb_ = memory;
|
2012-06-21 09:30:43 +02:00
|
|
|
picture->argb = (uint32_t*)memory;
|
2012-03-28 13:07:42 +02:00
|
|
|
picture->argb_stride = width;
|
EXPERIMENTAL: add support for alpha channel
This is a (minor) bitstream change: if the 'color_space' bit is set to '1'
(which is normally an undefined/invalid behaviour), we add extra data at the
end of partition #0 (so-called 'extensions')
Namely, we add the size of the extension data as 3 bytes (little-endian),
followed by a set of bits telling which extensions we're incorporating.
The data then _preceeds_ this trailing tags.
This is all experimental, and you'll need to have
'#define WEBP_EXPERIMENTAL_FEATURES' in webp/types.h to enable this code
(at your own risk! :))
Still, this hack produces almost-valid WebP file for decoders that don't
check this color_space bit. In particular, previous 'dwebp' (and for instance
Chrome) will recognize this files and decode them, but without the alpha
of course. Other decoder will just see random extra stuff at the end of
partition #0.
To experiment with the alpha-channel, you need to compile on Unix platform
and use PNGs for input/output.
If 'alpha.png' is a source with alpha channel, then you can try (on Unix):
cwebp alpha.png -o alpha.webp
dwebp alpha.webp -o test.png
cwebp now has a '-noalpha' flag to ignore any alpha information from the
source, if present.
More hacking and experimenting welcome!
Change-Id: I3c7b1fd8411c9e7a9f77690e898479ad85c52f3e
2011-04-26 01:58:04 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
2012-06-28 09:34:23 +02:00
|
|
|
// Remove reference to the ARGB buffer (doesn't free anything).
|
|
|
|
static void PictureResetARGB(WebPPicture* const picture) {
|
|
|
|
picture->memory_argb_ = NULL;
|
|
|
|
picture->argb = NULL;
|
|
|
|
picture->argb_stride = 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Remove reference to the YUVA buffer (doesn't free anything).
|
|
|
|
static void PictureResetYUVA(WebPPicture* const picture) {
|
|
|
|
picture->memory_ = NULL;
|
|
|
|
picture->y = picture->u = picture->v = picture->a = NULL;
|
|
|
|
picture->u0 = picture->v0 = NULL;
|
|
|
|
picture->y_stride = picture->uv_stride = 0;
|
|
|
|
picture->a_stride = 0;
|
|
|
|
picture->uv0_stride = 0;
|
|
|
|
}
|
|
|
|
|
2011-05-03 02:19:00 +02:00
|
|
|
// Grab the 'specs' (writer, *opaque, width, height...) from 'src' and copy them
|
2012-06-28 09:34:23 +02:00
|
|
|
// into 'dst'. Mark 'dst' as not owning any memory.
|
2011-05-03 02:19:00 +02:00
|
|
|
static void WebPPictureGrabSpecs(const WebPPicture* const src,
|
|
|
|
WebPPicture* const dst) {
|
2012-06-28 09:34:23 +02:00
|
|
|
assert(src != NULL && dst != NULL);
|
|
|
|
*dst = *src;
|
|
|
|
PictureResetYUVA(dst);
|
|
|
|
PictureResetARGB(dst);
|
|
|
|
}
|
|
|
|
|
|
|
|
// Allocate a new argb buffer, discarding any existing one and preserving
|
|
|
|
// the other YUV(A) buffer.
|
|
|
|
static int PictureAllocARGB(WebPPicture* const picture) {
|
|
|
|
WebPPicture tmp;
|
|
|
|
free(picture->memory_argb_);
|
|
|
|
PictureResetARGB(picture);
|
2012-07-18 23:58:53 +02:00
|
|
|
picture->use_argb = 1;
|
2012-06-28 09:34:23 +02:00
|
|
|
WebPPictureGrabSpecs(picture, &tmp);
|
|
|
|
if (!WebPPictureAlloc(&tmp)) {
|
|
|
|
return WebPEncodingSetError(picture, VP8_ENC_ERROR_OUT_OF_MEMORY);
|
|
|
|
}
|
|
|
|
picture->memory_argb_ = tmp.memory_argb_;
|
|
|
|
picture->argb = tmp.argb;
|
|
|
|
picture->argb_stride = tmp.argb_stride;
|
|
|
|
return 1;
|
2011-05-03 02:19:00 +02:00
|
|
|
}
|
|
|
|
|
2012-06-28 09:34:23 +02:00
|
|
|
// Release memory owned by 'picture' (both YUV and ARGB buffers).
|
2012-07-18 00:01:30 +02:00
|
|
|
void WebPPictureFree(WebPPicture* picture) {
|
2012-01-23 09:58:09 +01:00
|
|
|
if (picture != NULL) {
|
2012-06-21 09:30:43 +02:00
|
|
|
free(picture->memory_);
|
|
|
|
free(picture->memory_argb_);
|
2012-06-28 09:34:23 +02:00
|
|
|
PictureResetYUVA(picture);
|
|
|
|
PictureResetARGB(picture);
|
2011-02-19 08:33:46 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2011-08-25 23:22:32 +02:00
|
|
|
//------------------------------------------------------------------------------
|
2011-05-03 02:19:00 +02:00
|
|
|
// Picture copying
|
2011-02-19 08:33:46 +01:00
|
|
|
|
2012-01-23 09:58:09 +01:00
|
|
|
// Not worth moving to dsp/enc.c (only used here).
|
2011-11-23 23:17:40 +01:00
|
|
|
static void CopyPlane(const uint8_t* src, int src_stride,
|
|
|
|
uint8_t* dst, int dst_stride, int width, int height) {
|
|
|
|
while (height-- > 0) {
|
|
|
|
memcpy(dst, src, width);
|
|
|
|
src += src_stride;
|
|
|
|
dst += dst_stride;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-06-21 09:30:43 +02:00
|
|
|
// Adjust top-left corner to chroma sample position.
|
|
|
|
static void SnapTopLeftPosition(const WebPPicture* const pic,
|
|
|
|
int* const left, int* const top) {
|
2012-07-18 23:58:53 +02:00
|
|
|
if (!pic->use_argb) {
|
2012-06-21 09:30:43 +02:00
|
|
|
const int is_yuv422 = IS_YUV_CSP(pic->colorspace, WEBP_YUV422);
|
|
|
|
if (IS_YUV_CSP(pic->colorspace, WEBP_YUV420) || is_yuv422) {
|
|
|
|
*left &= ~1;
|
|
|
|
if (!is_yuv422) *top &= ~1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Adjust top-left corner and verify that the sub-rectangle is valid.
|
|
|
|
static int AdjustAndCheckRectangle(const WebPPicture* const pic,
|
|
|
|
int* const left, int* const top,
|
|
|
|
int width, int height) {
|
|
|
|
SnapTopLeftPosition(pic, left, top);
|
|
|
|
if ((*left) < 0 || (*top) < 0) return 0;
|
|
|
|
if (width <= 0 || height <= 0) return 0;
|
|
|
|
if ((*left) + width > pic->width) return 0;
|
|
|
|
if ((*top) + height > pic->height) return 0;
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
2012-07-18 00:01:30 +02:00
|
|
|
int WebPPictureCopy(const WebPPicture* src, WebPPicture* dst) {
|
2011-02-19 08:33:46 +01:00
|
|
|
if (src == NULL || dst == NULL) return 0;
|
|
|
|
if (src == dst) return 1;
|
2011-05-03 02:19:00 +02:00
|
|
|
|
|
|
|
WebPPictureGrabSpecs(src, dst);
|
2011-02-19 08:33:46 +01:00
|
|
|
if (!WebPPictureAlloc(dst)) return 0;
|
2011-05-03 02:19:00 +02:00
|
|
|
|
2012-07-18 23:58:53 +02:00
|
|
|
if (!src->use_argb) {
|
2012-03-28 13:07:42 +02:00
|
|
|
CopyPlane(src->y, src->y_stride,
|
|
|
|
dst->y, dst->y_stride, dst->width, dst->height);
|
|
|
|
CopyPlane(src->u, src->uv_stride,
|
|
|
|
dst->u, dst->uv_stride, HALVE(dst->width), HALVE(dst->height));
|
|
|
|
CopyPlane(src->v, src->uv_stride,
|
|
|
|
dst->v, dst->uv_stride, HALVE(dst->width), HALVE(dst->height));
|
|
|
|
if (dst->a != NULL) {
|
|
|
|
CopyPlane(src->a, src->a_stride,
|
|
|
|
dst->a, dst->a_stride, dst->width, dst->height);
|
|
|
|
}
|
2011-12-01 07:44:15 +01:00
|
|
|
#ifdef WEBP_EXPERIMENTAL_FEATURES
|
2012-03-28 13:07:42 +02:00
|
|
|
if (dst->u0 != NULL) {
|
|
|
|
int uv0_width = src->width;
|
2012-06-21 09:30:43 +02:00
|
|
|
if (IS_YUV_CSP(dst->colorspace, WEBP_YUV422)) {
|
2012-03-28 13:07:42 +02:00
|
|
|
uv0_width = HALVE(uv0_width);
|
|
|
|
}
|
|
|
|
CopyPlane(src->u0, src->uv0_stride,
|
|
|
|
dst->u0, dst->uv0_stride, uv0_width, dst->height);
|
|
|
|
CopyPlane(src->v0, src->uv0_stride,
|
|
|
|
dst->v0, dst->uv0_stride, uv0_width, dst->height);
|
2011-05-03 02:19:00 +02:00
|
|
|
}
|
|
|
|
#endif
|
2012-03-28 13:07:42 +02:00
|
|
|
} else {
|
2012-06-19 00:20:46 +02:00
|
|
|
CopyPlane((const uint8_t*)src->argb, 4 * src->argb_stride,
|
2012-03-28 13:07:42 +02:00
|
|
|
(uint8_t*)dst->argb, 4 * dst->argb_stride,
|
|
|
|
4 * dst->width, dst->height);
|
|
|
|
}
|
2011-02-19 08:33:46 +01:00
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
2012-07-18 00:01:30 +02:00
|
|
|
int WebPPictureIsView(const WebPPicture* picture) {
|
2012-06-21 09:30:43 +02:00
|
|
|
if (picture == NULL) return 0;
|
2012-07-18 23:58:53 +02:00
|
|
|
if (picture->use_argb) {
|
2012-06-21 09:30:43 +02:00
|
|
|
return (picture->memory_argb_ == NULL);
|
|
|
|
}
|
|
|
|
return (picture->memory_ == NULL);
|
|
|
|
}
|
|
|
|
|
2012-07-18 00:01:30 +02:00
|
|
|
int WebPPictureView(const WebPPicture* src,
|
2012-06-21 09:30:43 +02:00
|
|
|
int left, int top, int width, int height,
|
2012-07-18 00:01:30 +02:00
|
|
|
WebPPicture* dst) {
|
2012-06-21 09:30:43 +02:00
|
|
|
if (src == NULL || dst == NULL) return 0;
|
|
|
|
|
|
|
|
// verify rectangle position.
|
|
|
|
if (!AdjustAndCheckRectangle(src, &left, &top, width, height)) return 0;
|
|
|
|
|
|
|
|
if (src != dst) { // beware of aliasing! We don't want to leak 'memory_'.
|
|
|
|
WebPPictureGrabSpecs(src, dst);
|
|
|
|
}
|
|
|
|
dst->width = width;
|
|
|
|
dst->height = height;
|
2012-07-18 23:58:53 +02:00
|
|
|
if (!src->use_argb) {
|
2012-06-21 09:30:43 +02:00
|
|
|
dst->y = src->y + top * src->y_stride + left;
|
|
|
|
dst->u = src->u + (top >> 1) * src->uv_stride + (left >> 1);
|
|
|
|
dst->v = src->v + (top >> 1) * src->uv_stride + (left >> 1);
|
2012-11-01 00:01:34 +01:00
|
|
|
dst->y_stride = src->y_stride;
|
|
|
|
dst->uv_stride = src->uv_stride;
|
2012-06-21 09:30:43 +02:00
|
|
|
if (src->a != NULL) {
|
|
|
|
dst->a = src->a + top * src->a_stride + left;
|
2012-11-01 00:01:34 +01:00
|
|
|
dst->a_stride = src->a_stride;
|
2012-06-21 09:30:43 +02:00
|
|
|
}
|
|
|
|
#ifdef WEBP_EXPERIMENTAL_FEATURES
|
|
|
|
if (src->u0 != NULL) {
|
|
|
|
const int left_pos =
|
|
|
|
IS_YUV_CSP(dst->colorspace, WEBP_YUV422) ? (left >> 1) : left;
|
|
|
|
dst->u0 = src->u0 + top * src->uv0_stride + left_pos;
|
|
|
|
dst->v0 = src->v0 + top * src->uv0_stride + left_pos;
|
2012-11-01 00:01:34 +01:00
|
|
|
dst->uv0_stride = src->uv0_stride;
|
2012-06-21 09:30:43 +02:00
|
|
|
}
|
|
|
|
#endif
|
|
|
|
} else {
|
|
|
|
dst->argb = src->argb + top * src->argb_stride + left;
|
2012-11-01 00:01:34 +01:00
|
|
|
dst->argb_stride = src->argb_stride;
|
2012-06-21 09:30:43 +02:00
|
|
|
}
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
2011-08-25 23:22:32 +02:00
|
|
|
//------------------------------------------------------------------------------
|
2011-05-03 02:19:00 +02:00
|
|
|
// Picture cropping
|
|
|
|
|
2012-07-18 00:01:30 +02:00
|
|
|
int WebPPictureCrop(WebPPicture* pic,
|
2011-02-19 08:33:46 +01:00
|
|
|
int left, int top, int width, int height) {
|
|
|
|
WebPPicture tmp;
|
|
|
|
|
|
|
|
if (pic == NULL) return 0;
|
2012-06-21 09:30:43 +02:00
|
|
|
if (!AdjustAndCheckRectangle(pic, &left, &top, width, height)) return 0;
|
2011-02-19 08:33:46 +01:00
|
|
|
|
2011-05-03 02:19:00 +02:00
|
|
|
WebPPictureGrabSpecs(pic, &tmp);
|
2011-02-19 08:33:46 +01:00
|
|
|
tmp.width = width;
|
|
|
|
tmp.height = height;
|
|
|
|
if (!WebPPictureAlloc(&tmp)) return 0;
|
|
|
|
|
2012-07-18 23:58:53 +02:00
|
|
|
if (!pic->use_argb) {
|
2011-11-23 23:17:40 +01:00
|
|
|
const int y_offset = top * pic->y_stride + left;
|
|
|
|
const int uv_offset = (top / 2) * pic->uv_stride + left / 2;
|
|
|
|
CopyPlane(pic->y + y_offset, pic->y_stride,
|
|
|
|
tmp.y, tmp.y_stride, width, height);
|
|
|
|
CopyPlane(pic->u + uv_offset, pic->uv_stride,
|
|
|
|
tmp.u, tmp.uv_stride, HALVE(width), HALVE(height));
|
|
|
|
CopyPlane(pic->v + uv_offset, pic->uv_stride,
|
|
|
|
tmp.v, tmp.uv_stride, HALVE(width), HALVE(height));
|
2011-05-03 02:19:00 +02:00
|
|
|
|
2012-06-19 00:20:46 +02:00
|
|
|
if (tmp.a != NULL) {
|
|
|
|
const int a_offset = top * pic->a_stride + left;
|
|
|
|
CopyPlane(pic->a + a_offset, pic->a_stride,
|
|
|
|
tmp.a, tmp.a_stride, width, height);
|
|
|
|
}
|
2011-12-01 07:44:15 +01:00
|
|
|
#ifdef WEBP_EXPERIMENTAL_FEATURES
|
2012-06-19 00:20:46 +02:00
|
|
|
if (tmp.u0 != NULL) {
|
|
|
|
int w = width;
|
2012-06-21 09:30:43 +02:00
|
|
|
int left_pos = left;
|
|
|
|
if (IS_YUV_CSP(tmp.colorspace, WEBP_YUV422)) {
|
2012-06-19 00:20:46 +02:00
|
|
|
w = HALVE(w);
|
2012-06-21 09:30:43 +02:00
|
|
|
left_pos = HALVE(left_pos);
|
2012-06-19 00:20:46 +02:00
|
|
|
}
|
2012-06-21 09:30:43 +02:00
|
|
|
CopyPlane(pic->u0 + top * pic->uv0_stride + left_pos, pic->uv0_stride,
|
|
|
|
tmp.u0, tmp.uv0_stride, w, height);
|
|
|
|
CopyPlane(pic->v0 + top * pic->uv0_stride + left_pos, pic->uv0_stride,
|
|
|
|
tmp.v0, tmp.uv0_stride, w, height);
|
2011-05-03 02:19:00 +02:00
|
|
|
}
|
|
|
|
#endif
|
2012-06-19 00:20:46 +02:00
|
|
|
} else {
|
|
|
|
const uint8_t* const src =
|
|
|
|
(const uint8_t*)(pic->argb + top * pic->argb_stride + left);
|
|
|
|
CopyPlane(src, pic->argb_stride * 4,
|
|
|
|
(uint8_t*)tmp.argb, tmp.argb_stride * 4,
|
|
|
|
width * 4, height);
|
|
|
|
}
|
2011-05-03 02:19:00 +02:00
|
|
|
WebPPictureFree(pic);
|
|
|
|
*pic = tmp;
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
2011-08-25 23:22:32 +02:00
|
|
|
//------------------------------------------------------------------------------
|
2011-05-03 02:19:00 +02:00
|
|
|
// Simple picture rescaler
|
|
|
|
|
|
|
|
static void RescalePlane(const uint8_t* src,
|
|
|
|
int src_width, int src_height, int src_stride,
|
|
|
|
uint8_t* dst,
|
|
|
|
int dst_width, int dst_height, int dst_stride,
|
2012-06-19 00:20:46 +02:00
|
|
|
int32_t* const work,
|
|
|
|
int num_channels) {
|
2012-03-22 11:30:20 +01:00
|
|
|
WebPRescaler rescaler;
|
|
|
|
int y = 0;
|
|
|
|
WebPRescalerInit(&rescaler, src_width, src_height,
|
|
|
|
dst, dst_width, dst_height, dst_stride,
|
2012-06-19 00:20:46 +02:00
|
|
|
num_channels,
|
2012-03-22 11:30:20 +01:00
|
|
|
src_width, dst_width,
|
|
|
|
src_height, dst_height,
|
|
|
|
work);
|
2012-06-19 00:20:46 +02:00
|
|
|
memset(work, 0, 2 * dst_width * num_channels * sizeof(*work));
|
2012-03-22 11:30:20 +01:00
|
|
|
while (y < src_height) {
|
2012-06-19 00:20:46 +02:00
|
|
|
y += WebPRescalerImport(&rescaler, src_height - y,
|
|
|
|
src + y * src_stride, src_stride);
|
2012-03-22 11:30:20 +01:00
|
|
|
WebPRescalerExport(&rescaler);
|
2011-05-03 02:19:00 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-07-26 21:05:42 +02:00
|
|
|
static void AlphaMultiplyARGB(WebPPicture* const pic, int inverse) {
|
|
|
|
uint32_t* ptr = pic->argb;
|
|
|
|
int y;
|
|
|
|
for (y = 0; y < pic->height; ++y) {
|
|
|
|
WebPMultARGBRow(ptr, pic->width, inverse);
|
|
|
|
ptr += pic->argb_stride;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
static void AlphaMultiplyY(WebPPicture* const pic, int inverse) {
|
|
|
|
const uint8_t* ptr_a = pic->a;
|
|
|
|
if (ptr_a != NULL) {
|
|
|
|
uint8_t* ptr_y = pic->y;
|
|
|
|
int y;
|
|
|
|
for (y = 0; y < pic->height; ++y) {
|
|
|
|
WebPMultRow(ptr_y, ptr_a, pic->width, inverse);
|
|
|
|
ptr_y += pic->y_stride;
|
|
|
|
ptr_a += pic->a_stride;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-07-18 00:01:30 +02:00
|
|
|
int WebPPictureRescale(WebPPicture* pic, int width, int height) {
|
2011-05-03 02:19:00 +02:00
|
|
|
WebPPicture tmp;
|
|
|
|
int prev_width, prev_height;
|
|
|
|
int32_t* work;
|
|
|
|
|
|
|
|
if (pic == NULL) return 0;
|
|
|
|
prev_width = pic->width;
|
|
|
|
prev_height = pic->height;
|
|
|
|
// if width is unspecified, scale original proportionally to height ratio.
|
|
|
|
if (width == 0) {
|
|
|
|
width = (prev_width * height + prev_height / 2) / prev_height;
|
|
|
|
}
|
|
|
|
// if height is unspecified, scale original proportionally to width ratio.
|
|
|
|
if (height == 0) {
|
|
|
|
height = (prev_height * width + prev_width / 2) / prev_width;
|
|
|
|
}
|
|
|
|
// Check if the overall dimensions still make sense.
|
|
|
|
if (width <= 0 || height <= 0) return 0;
|
|
|
|
|
|
|
|
WebPPictureGrabSpecs(pic, &tmp);
|
|
|
|
tmp.width = width;
|
|
|
|
tmp.height = height;
|
|
|
|
if (!WebPPictureAlloc(&tmp)) return 0;
|
|
|
|
|
2012-07-18 23:58:53 +02:00
|
|
|
if (!pic->use_argb) {
|
2012-08-01 21:06:04 +02:00
|
|
|
work = (int32_t*)WebPSafeMalloc(2ULL * width, sizeof(*work));
|
2012-06-19 00:20:46 +02:00
|
|
|
if (work == NULL) {
|
|
|
|
WebPPictureFree(&tmp);
|
|
|
|
return 0;
|
|
|
|
}
|
2013-07-26 21:05:42 +02:00
|
|
|
// If present, we need to rescale alpha first (for AlphaMultiplyY).
|
|
|
|
if (pic->a != NULL) {
|
|
|
|
RescalePlane(pic->a, prev_width, prev_height, pic->a_stride,
|
|
|
|
tmp.a, width, height, tmp.a_stride, work, 1);
|
|
|
|
}
|
2011-05-03 02:19:00 +02:00
|
|
|
|
2013-07-26 21:05:42 +02:00
|
|
|
// We take transparency into account on the luma plane only. That's not
|
|
|
|
// totally exact blending, but still is a good approximation.
|
|
|
|
AlphaMultiplyY(pic, 0);
|
2012-06-19 00:20:46 +02:00
|
|
|
RescalePlane(pic->y, prev_width, prev_height, pic->y_stride,
|
|
|
|
tmp.y, width, height, tmp.y_stride, work, 1);
|
2013-07-26 21:05:42 +02:00
|
|
|
AlphaMultiplyY(&tmp, 1);
|
|
|
|
|
2012-06-19 00:20:46 +02:00
|
|
|
RescalePlane(pic->u,
|
|
|
|
HALVE(prev_width), HALVE(prev_height), pic->uv_stride,
|
|
|
|
tmp.u,
|
|
|
|
HALVE(width), HALVE(height), tmp.uv_stride, work, 1);
|
|
|
|
RescalePlane(pic->v,
|
|
|
|
HALVE(prev_width), HALVE(prev_height), pic->uv_stride,
|
|
|
|
tmp.v,
|
|
|
|
HALVE(width), HALVE(height), tmp.uv_stride, work, 1);
|
|
|
|
|
2011-12-01 07:44:15 +01:00
|
|
|
#ifdef WEBP_EXPERIMENTAL_FEATURES
|
2012-06-19 00:20:46 +02:00
|
|
|
if (tmp.u0 != NULL) {
|
2012-06-21 09:30:43 +02:00
|
|
|
const int s = IS_YUV_CSP(tmp.colorspace, WEBP_YUV422) ? 2 : 1;
|
2012-06-19 00:20:46 +02:00
|
|
|
RescalePlane(
|
|
|
|
pic->u0, (prev_width + s / 2) / s, prev_height, pic->uv0_stride,
|
|
|
|
tmp.u0, (width + s / 2) / s, height, tmp.uv0_stride, work, 1);
|
|
|
|
RescalePlane(
|
|
|
|
pic->v0, (prev_width + s / 2) / s, prev_height, pic->uv0_stride,
|
|
|
|
tmp.v0, (width + s / 2) / s, height, tmp.uv0_stride, work, 1);
|
2011-05-03 02:19:00 +02:00
|
|
|
}
|
|
|
|
#endif
|
2012-06-19 00:20:46 +02:00
|
|
|
} else {
|
2012-08-01 21:06:04 +02:00
|
|
|
work = (int32_t*)WebPSafeMalloc(2ULL * width * 4, sizeof(*work));
|
2012-06-19 00:20:46 +02:00
|
|
|
if (work == NULL) {
|
|
|
|
WebPPictureFree(&tmp);
|
|
|
|
return 0;
|
|
|
|
}
|
2013-07-26 21:05:42 +02:00
|
|
|
// In order to correctly interpolate colors, we need to apply the alpha
|
|
|
|
// weighting first (black-matting), scale the RGB values, and remove
|
|
|
|
// the premultiplication afterward (while preserving the alpha channel).
|
|
|
|
AlphaMultiplyARGB(pic, 0);
|
2012-06-19 00:20:46 +02:00
|
|
|
RescalePlane((const uint8_t*)pic->argb, prev_width, prev_height,
|
|
|
|
pic->argb_stride * 4,
|
|
|
|
(uint8_t*)tmp.argb, width, height,
|
|
|
|
tmp.argb_stride * 4,
|
|
|
|
work, 4);
|
2013-07-26 21:05:42 +02:00
|
|
|
AlphaMultiplyARGB(&tmp, 1);
|
2012-06-19 00:20:46 +02:00
|
|
|
}
|
2011-02-19 08:33:46 +01:00
|
|
|
WebPPictureFree(pic);
|
2011-05-03 02:19:00 +02:00
|
|
|
free(work);
|
2011-02-19 08:33:46 +01:00
|
|
|
*pic = tmp;
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
2011-08-25 23:22:32 +02:00
|
|
|
//------------------------------------------------------------------------------
|
2012-06-19 00:42:56 +02:00
|
|
|
// WebPMemoryWriter: Write-to-memory
|
2011-02-19 08:33:46 +01:00
|
|
|
|
2012-07-18 00:01:30 +02:00
|
|
|
void WebPMemoryWriterInit(WebPMemoryWriter* writer) {
|
2012-06-19 00:42:56 +02:00
|
|
|
writer->mem = NULL;
|
|
|
|
writer->size = 0;
|
2011-02-19 08:33:46 +01:00
|
|
|
writer->max_size = 0;
|
|
|
|
}
|
|
|
|
|
2012-06-19 00:42:56 +02:00
|
|
|
int WebPMemoryWrite(const uint8_t* data, size_t data_size,
|
2012-07-18 00:01:30 +02:00
|
|
|
const WebPPicture* picture) {
|
2011-02-19 08:33:46 +01:00
|
|
|
WebPMemoryWriter* const w = (WebPMemoryWriter*)picture->custom_ptr;
|
2012-08-01 21:06:04 +02:00
|
|
|
uint64_t next_size;
|
2011-02-19 08:33:46 +01:00
|
|
|
if (w == NULL) {
|
|
|
|
return 1;
|
|
|
|
}
|
2012-08-01 21:06:04 +02:00
|
|
|
next_size = (uint64_t)w->size + data_size;
|
2011-02-19 08:33:46 +01:00
|
|
|
if (next_size > w->max_size) {
|
|
|
|
uint8_t* new_mem;
|
2012-08-01 21:06:04 +02:00
|
|
|
uint64_t next_max_size = 2ULL * w->max_size;
|
2011-02-19 08:33:46 +01:00
|
|
|
if (next_max_size < next_size) next_max_size = next_size;
|
2012-08-01 21:06:04 +02:00
|
|
|
if (next_max_size < 8192ULL) next_max_size = 8192ULL;
|
|
|
|
new_mem = (uint8_t*)WebPSafeMalloc(next_max_size, 1);
|
2011-02-19 08:33:46 +01:00
|
|
|
if (new_mem == NULL) {
|
|
|
|
return 0;
|
|
|
|
}
|
2012-06-19 00:42:56 +02:00
|
|
|
if (w->size > 0) {
|
|
|
|
memcpy(new_mem, w->mem, w->size);
|
2011-02-19 08:33:46 +01:00
|
|
|
}
|
2012-06-19 00:42:56 +02:00
|
|
|
free(w->mem);
|
|
|
|
w->mem = new_mem;
|
2012-08-01 21:06:04 +02:00
|
|
|
// down-cast is ok, thanks to WebPSafeMalloc
|
|
|
|
w->max_size = (size_t)next_max_size;
|
2011-02-19 08:33:46 +01:00
|
|
|
}
|
2012-01-23 09:58:09 +01:00
|
|
|
if (data_size > 0) {
|
2012-06-19 00:42:56 +02:00
|
|
|
memcpy(w->mem + w->size, data, data_size);
|
|
|
|
w->size += data_size;
|
2011-02-19 08:33:46 +01:00
|
|
|
}
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
2012-06-28 09:34:23 +02:00
|
|
|
//------------------------------------------------------------------------------
|
|
|
|
// Detection of non-trivial transparency
|
|
|
|
|
|
|
|
// Returns true if alpha[] has non-0xff values.
|
|
|
|
static int CheckNonOpaque(const uint8_t* alpha, int width, int height,
|
|
|
|
int x_step, int y_step) {
|
|
|
|
if (alpha == NULL) return 0;
|
|
|
|
while (height-- > 0) {
|
|
|
|
int x;
|
|
|
|
for (x = 0; x < width * x_step; x += x_step) {
|
|
|
|
if (alpha[x] != 0xff) return 1; // TODO(skal): check 4/8 bytes at a time.
|
|
|
|
}
|
|
|
|
alpha += y_step;
|
|
|
|
}
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Checking for the presence of non-opaque alpha.
|
2012-07-18 00:01:30 +02:00
|
|
|
int WebPPictureHasTransparency(const WebPPicture* picture) {
|
2012-06-28 09:34:23 +02:00
|
|
|
if (picture == NULL) return 0;
|
2012-07-18 23:58:53 +02:00
|
|
|
if (!picture->use_argb) {
|
2012-06-28 09:34:23 +02:00
|
|
|
return CheckNonOpaque(picture->a, picture->width, picture->height,
|
|
|
|
1, picture->a_stride);
|
|
|
|
} else {
|
|
|
|
int x, y;
|
|
|
|
const uint32_t* argb = picture->argb;
|
|
|
|
if (argb == NULL) return 0;
|
|
|
|
for (y = 0; y < picture->height; ++y) {
|
|
|
|
for (x = 0; x < picture->width; ++x) {
|
|
|
|
if (argb[x] < 0xff000000u) return 1; // test any alpha values != 0xff
|
|
|
|
}
|
|
|
|
argb += picture->argb_stride;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2011-08-25 23:22:32 +02:00
|
|
|
//------------------------------------------------------------------------------
|
2011-02-19 08:33:46 +01:00
|
|
|
// RGB -> YUV conversion
|
|
|
|
|
2013-10-17 22:36:49 +02:00
|
|
|
#define DITHER_FIX 8 // fixed-point precision for dithering
|
|
|
|
|
|
|
|
#define kRandomTableSize 55
|
|
|
|
static const uint32_t kRandomTable[kRandomTableSize] = { // 31b-range values
|
|
|
|
0x0de15230, 0x03b31886, 0x775faccb, 0x1c88626a, 0x68385c55, 0x14b3b828,
|
|
|
|
0x4a85fef8, 0x49ddb84b, 0x64fcf397, 0x5c550289, 0x4a290000, 0x0d7ec1da,
|
|
|
|
0x5940b7ab, 0x5492577d, 0x4e19ca72, 0x38d38c69, 0x0c01ee65, 0x32a1755f,
|
|
|
|
0x5437f652, 0x5abb2c32, 0x0faa57b1, 0x73f533e7, 0x685feeda, 0x7563cce2,
|
|
|
|
0x6e990e83, 0x4730a7ed, 0x4fc0d9c6, 0x496b153c, 0x4f1403fa, 0x541afb0c,
|
|
|
|
0x73990b32, 0x26d7cb1c, 0x6fcc3706, 0x2cbb77d8, 0x75762f2a, 0x6425ccdd,
|
|
|
|
0x24b35461, 0x0a7d8715, 0x220414a8, 0x141ebf67, 0x56b41583, 0x73e502e3,
|
|
|
|
0x44cab16f, 0x28264d42, 0x73baaefb, 0x0a50ebed, 0x1d6ab6fb, 0x0d3ad40b,
|
|
|
|
0x35db3b68, 0x2b081e83, 0x77ce6b95, 0x5181e5f0, 0x78853bbc, 0x009f9494,
|
|
|
|
0x27e5ed3c
|
|
|
|
};
|
|
|
|
|
|
|
|
typedef struct {
|
|
|
|
int index1_, index2_;
|
|
|
|
uint32_t tab_[kRandomTableSize];
|
|
|
|
int amp_;
|
|
|
|
} VP8Random;
|
|
|
|
|
|
|
|
static void InitRandom(VP8Random* const rg, float dithering) {
|
|
|
|
memcpy(rg->tab_, kRandomTable, sizeof(rg->tab_));
|
|
|
|
rg->index1_ = 0;
|
|
|
|
rg->index2_ = 31;
|
|
|
|
rg->amp_ = (dithering < 0.0) ? 0
|
|
|
|
: (dithering > 1.0) ? (1 << DITHER_FIX)
|
|
|
|
: (uint32_t)((1 << DITHER_FIX) * dithering);
|
|
|
|
}
|
|
|
|
|
|
|
|
// D.Knuth's Difference-based random generator.
|
|
|
|
static WEBP_INLINE int Random(VP8Random* const rg, int num_bits) {
|
|
|
|
int diff;
|
|
|
|
assert(num_bits + DITHER_FIX <= 31);
|
|
|
|
diff = rg->tab_[rg->index1_] - rg->tab_[rg->index2_];
|
|
|
|
if (diff < 0) diff += (1u << 31);
|
|
|
|
rg->tab_[rg->index1_] = diff;
|
|
|
|
if (++rg->index1_ == kRandomTableSize) rg->index1_ = 0;
|
|
|
|
if (++rg->index2_ == kRandomTableSize) rg->index2_ = 0;
|
|
|
|
diff = (diff << 1) >> (32 - num_bits); // sign-extend, 0-center
|
|
|
|
diff = (diff * rg->amp_) >> DITHER_FIX; // restrict range
|
|
|
|
diff += 1 << (num_bits - 1); // shift back to 0.5-center
|
|
|
|
return diff;
|
|
|
|
}
|
|
|
|
|
|
|
|
static int RGBToY(int r, int g, int b, VP8Random* const rg) {
|
|
|
|
return VP8RGBToY(r, g, b, Random(rg, YUV_FIX));
|
|
|
|
}
|
|
|
|
|
|
|
|
static int RGBToU(int r, int g, int b, VP8Random* const rg) {
|
|
|
|
return VP8RGBToU(r, g, b, Random(rg, YUV_FIX + 2));
|
|
|
|
}
|
|
|
|
|
|
|
|
static int RGBToV(int r, int g, int b, VP8Random* const rg) {
|
|
|
|
return VP8RGBToV(r, g, b, Random(rg, YUV_FIX + 2));
|
|
|
|
}
|
|
|
|
|
2011-02-19 08:33:46 +01:00
|
|
|
// TODO: we can do better than simply 2x2 averaging on U/V samples.
|
|
|
|
#define SUM4(ptr) ((ptr)[0] + (ptr)[step] + \
|
|
|
|
(ptr)[rgb_stride] + (ptr)[rgb_stride + step])
|
|
|
|
#define SUM2H(ptr) (2 * (ptr)[0] + 2 * (ptr)[step])
|
|
|
|
#define SUM2V(ptr) (2 * (ptr)[0] + 2 * (ptr)[rgb_stride])
|
|
|
|
#define SUM1(ptr) (4 * (ptr)[0])
|
|
|
|
#define RGB_TO_UV(x, y, SUM) { \
|
|
|
|
const int src = (2 * (step * (x) + (y) * rgb_stride)); \
|
|
|
|
const int dst = (x) + (y) * picture->uv_stride; \
|
|
|
|
const int r = SUM(r_ptr + src); \
|
|
|
|
const int g = SUM(g_ptr + src); \
|
|
|
|
const int b = SUM(b_ptr + src); \
|
2013-10-17 22:36:49 +02:00
|
|
|
picture->u[dst] = RGBToU(r, g, b, &rg); \
|
|
|
|
picture->v[dst] = RGBToV(r, g, b, &rg); \
|
2011-02-19 08:33:46 +01:00
|
|
|
}
|
|
|
|
|
2011-05-03 02:19:00 +02:00
|
|
|
#define RGB_TO_UV0(x_in, x_out, y, SUM) { \
|
|
|
|
const int src = (step * (x_in) + (y) * rgb_stride); \
|
|
|
|
const int dst = (x_out) + (y) * picture->uv0_stride; \
|
|
|
|
const int r = SUM(r_ptr + src); \
|
|
|
|
const int g = SUM(g_ptr + src); \
|
|
|
|
const int b = SUM(b_ptr + src); \
|
2013-10-17 22:36:49 +02:00
|
|
|
picture->u0[dst] = RGBToU(r, g, b, &rg); \
|
|
|
|
picture->v0[dst] = RGBToV(r, g, b, &rg); \
|
2011-05-03 02:19:00 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
static void MakeGray(WebPPicture* const picture) {
|
|
|
|
int y;
|
2011-11-23 23:17:40 +01:00
|
|
|
const int uv_width = HALVE(picture->width);
|
|
|
|
const int uv_height = HALVE(picture->height);
|
|
|
|
for (y = 0; y < uv_height; ++y) {
|
2011-05-03 02:19:00 +02:00
|
|
|
memset(picture->u + y * picture->uv_stride, 128, uv_width);
|
|
|
|
memset(picture->v + y * picture->uv_stride, 128, uv_width);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-06-28 09:34:23 +02:00
|
|
|
static int ImportYUVAFromRGBA(const uint8_t* const r_ptr,
|
|
|
|
const uint8_t* const g_ptr,
|
|
|
|
const uint8_t* const b_ptr,
|
|
|
|
const uint8_t* const a_ptr,
|
|
|
|
int step, // bytes per pixel
|
|
|
|
int rgb_stride, // bytes per scanline
|
2013-10-17 22:36:49 +02:00
|
|
|
float dithering,
|
2012-06-28 09:34:23 +02:00
|
|
|
WebPPicture* const picture) {
|
2011-05-03 02:19:00 +02:00
|
|
|
const WebPEncCSP uv_csp = picture->colorspace & WEBP_CSP_UV_MASK;
|
2011-02-19 08:33:46 +01:00
|
|
|
int x, y;
|
2011-05-03 02:19:00 +02:00
|
|
|
const int width = picture->width;
|
|
|
|
const int height = picture->height;
|
2012-06-28 09:34:23 +02:00
|
|
|
const int has_alpha = CheckNonOpaque(a_ptr, width, height, step, rgb_stride);
|
2013-10-17 22:36:49 +02:00
|
|
|
VP8Random rg;
|
2011-02-19 08:33:46 +01:00
|
|
|
|
2012-06-28 09:34:23 +02:00
|
|
|
picture->colorspace = uv_csp;
|
2012-07-18 23:58:53 +02:00
|
|
|
picture->use_argb = 0;
|
2012-06-28 09:34:23 +02:00
|
|
|
if (has_alpha) {
|
2012-06-05 00:55:31 +02:00
|
|
|
picture->colorspace |= WEBP_CSP_ALPHA_BIT;
|
|
|
|
}
|
|
|
|
if (!WebPPictureAlloc(picture)) return 0;
|
|
|
|
|
2013-10-17 22:36:49 +02:00
|
|
|
InitRandom(&rg, dithering);
|
|
|
|
|
2012-06-28 09:34:23 +02:00
|
|
|
// Import luma plane
|
|
|
|
for (y = 0; y < height; ++y) {
|
|
|
|
for (x = 0; x < width; ++x) {
|
|
|
|
const int offset = step * x + y * rgb_stride;
|
|
|
|
picture->y[x + y * picture->y_stride] =
|
2013-10-17 22:36:49 +02:00
|
|
|
RGBToY(r_ptr[offset], g_ptr[offset], b_ptr[offset], &rg);
|
2011-02-19 08:33:46 +01:00
|
|
|
}
|
2012-06-28 09:34:23 +02:00
|
|
|
}
|
2012-03-28 13:07:42 +02:00
|
|
|
|
2012-06-28 09:34:23 +02:00
|
|
|
// Downsample U/V plane
|
|
|
|
if (uv_csp != WEBP_YUV400) {
|
|
|
|
for (y = 0; y < (height >> 1); ++y) {
|
|
|
|
for (x = 0; x < (width >> 1); ++x) {
|
|
|
|
RGB_TO_UV(x, y, SUM4);
|
|
|
|
}
|
2012-08-03 02:23:02 +02:00
|
|
|
if (width & 1) {
|
2012-06-28 09:34:23 +02:00
|
|
|
RGB_TO_UV(x, y, SUM2V);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (height & 1) {
|
|
|
|
for (x = 0; x < (width >> 1); ++x) {
|
|
|
|
RGB_TO_UV(x, y, SUM2H);
|
|
|
|
}
|
|
|
|
if (width & 1) {
|
|
|
|
RGB_TO_UV(x, y, SUM1);
|
2011-05-03 02:19:00 +02:00
|
|
|
}
|
2012-06-28 09:34:23 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
#ifdef WEBP_EXPERIMENTAL_FEATURES
|
|
|
|
// Store original U/V samples too
|
|
|
|
if (uv_csp == WEBP_YUV422) {
|
|
|
|
for (y = 0; y < height; ++y) {
|
2012-03-28 13:07:42 +02:00
|
|
|
for (x = 0; x < (width >> 1); ++x) {
|
2012-06-28 09:34:23 +02:00
|
|
|
RGB_TO_UV0(2 * x, x, y, SUM2H);
|
2012-03-28 13:07:42 +02:00
|
|
|
}
|
|
|
|
if (width & 1) {
|
2012-06-28 09:34:23 +02:00
|
|
|
RGB_TO_UV0(2 * x, x, y, SUM1);
|
2012-03-28 13:07:42 +02:00
|
|
|
}
|
2011-05-03 02:19:00 +02:00
|
|
|
}
|
2012-06-28 09:34:23 +02:00
|
|
|
} else if (uv_csp == WEBP_YUV444) {
|
|
|
|
for (y = 0; y < height; ++y) {
|
|
|
|
for (x = 0; x < width; ++x) {
|
|
|
|
RGB_TO_UV0(x, x, y, SUM1);
|
2011-05-03 02:19:00 +02:00
|
|
|
}
|
|
|
|
}
|
2012-03-28 13:07:42 +02:00
|
|
|
}
|
2012-06-28 09:34:23 +02:00
|
|
|
#endif
|
|
|
|
} else {
|
|
|
|
MakeGray(picture);
|
|
|
|
}
|
2012-03-28 13:07:42 +02:00
|
|
|
|
2012-06-28 09:34:23 +02:00
|
|
|
if (has_alpha) {
|
|
|
|
assert(step >= 4);
|
2013-09-12 09:32:28 +02:00
|
|
|
assert(picture->a != NULL);
|
2012-06-28 09:34:23 +02:00
|
|
|
for (y = 0; y < height; ++y) {
|
|
|
|
for (x = 0; x < width; ++x) {
|
|
|
|
picture->a[x + y * picture->a_stride] =
|
2012-03-28 13:07:42 +02:00
|
|
|
a_ptr[step * x + y * rgb_stride];
|
2011-05-03 02:19:00 +02:00
|
|
|
}
|
2011-02-19 08:33:46 +01:00
|
|
|
}
|
2012-06-28 09:34:23 +02:00
|
|
|
}
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
static int Import(WebPPicture* const picture,
|
|
|
|
const uint8_t* const rgb, int rgb_stride,
|
|
|
|
int step, int swap_rb, int import_alpha) {
|
|
|
|
const uint8_t* const r_ptr = rgb + (swap_rb ? 2 : 0);
|
|
|
|
const uint8_t* const g_ptr = rgb + 1;
|
|
|
|
const uint8_t* const b_ptr = rgb + (swap_rb ? 0 : 2);
|
|
|
|
const uint8_t* const a_ptr = import_alpha ? rgb + 3 : NULL;
|
|
|
|
const int width = picture->width;
|
|
|
|
const int height = picture->height;
|
|
|
|
|
2012-07-18 23:58:53 +02:00
|
|
|
if (!picture->use_argb) {
|
2012-06-28 09:34:23 +02:00
|
|
|
return ImportYUVAFromRGBA(r_ptr, g_ptr, b_ptr, a_ptr, step, rgb_stride,
|
2013-10-17 22:36:49 +02:00
|
|
|
0.f /* no dithering */, picture);
|
2012-06-28 09:34:23 +02:00
|
|
|
}
|
|
|
|
if (import_alpha) {
|
|
|
|
picture->colorspace |= WEBP_CSP_ALPHA_BIT;
|
2011-05-03 02:19:00 +02:00
|
|
|
} else {
|
2012-06-28 09:34:23 +02:00
|
|
|
picture->colorspace &= ~WEBP_CSP_ALPHA_BIT;
|
|
|
|
}
|
|
|
|
if (!WebPPictureAlloc(picture)) return 0;
|
|
|
|
|
|
|
|
if (!import_alpha) {
|
|
|
|
int x, y;
|
|
|
|
for (y = 0; y < height; ++y) {
|
|
|
|
for (x = 0; x < width; ++x) {
|
|
|
|
const int offset = step * x + y * rgb_stride;
|
|
|
|
const uint32_t argb =
|
2013-04-03 04:14:14 +02:00
|
|
|
MakeARGB32(r_ptr[offset], g_ptr[offset], b_ptr[offset]);
|
2012-06-28 09:34:23 +02:00
|
|
|
picture->argb[x + y * picture->argb_stride] = argb;
|
2012-03-28 13:07:42 +02:00
|
|
|
}
|
2012-06-28 09:34:23 +02:00
|
|
|
}
|
|
|
|
} else {
|
|
|
|
int x, y;
|
|
|
|
assert(step >= 4);
|
|
|
|
for (y = 0; y < height; ++y) {
|
|
|
|
for (x = 0; x < width; ++x) {
|
|
|
|
const int offset = step * x + y * rgb_stride;
|
2013-04-13 19:33:41 +02:00
|
|
|
const uint32_t argb = ((uint32_t)a_ptr[offset] << 24) |
|
2012-06-28 09:34:23 +02:00
|
|
|
(r_ptr[offset] << 16) |
|
|
|
|
(g_ptr[offset] << 8) |
|
|
|
|
(b_ptr[offset]);
|
|
|
|
picture->argb[x + y * picture->argb_stride] = argb;
|
EXPERIMENTAL: add support for alpha channel
This is a (minor) bitstream change: if the 'color_space' bit is set to '1'
(which is normally an undefined/invalid behaviour), we add extra data at the
end of partition #0 (so-called 'extensions')
Namely, we add the size of the extension data as 3 bytes (little-endian),
followed by a set of bits telling which extensions we're incorporating.
The data then _preceeds_ this trailing tags.
This is all experimental, and you'll need to have
'#define WEBP_EXPERIMENTAL_FEATURES' in webp/types.h to enable this code
(at your own risk! :))
Still, this hack produces almost-valid WebP file for decoders that don't
check this color_space bit. In particular, previous 'dwebp' (and for instance
Chrome) will recognize this files and decode them, but without the alpha
of course. Other decoder will just see random extra stuff at the end of
partition #0.
To experiment with the alpha-channel, you need to compile on Unix platform
and use PNGs for input/output.
If 'alpha.png' is a source with alpha channel, then you can try (on Unix):
cwebp alpha.png -o alpha.webp
dwebp alpha.webp -o test.png
cwebp now has a '-noalpha' flag to ignore any alpha information from the
source, if present.
More hacking and experimenting welcome!
Change-Id: I3c7b1fd8411c9e7a9f77690e898479ad85c52f3e
2011-04-26 01:58:04 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2011-02-19 08:33:46 +01:00
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
#undef SUM4
|
|
|
|
#undef SUM2V
|
|
|
|
#undef SUM2H
|
|
|
|
#undef SUM1
|
|
|
|
#undef RGB_TO_UV
|
|
|
|
|
2012-07-18 00:01:30 +02:00
|
|
|
int WebPPictureImportRGB(WebPPicture* picture,
|
|
|
|
const uint8_t* rgb, int rgb_stride) {
|
2011-04-26 16:23:57 +02:00
|
|
|
return Import(picture, rgb, rgb_stride, 3, 0, 0);
|
2011-02-19 08:33:46 +01:00
|
|
|
}
|
|
|
|
|
2012-07-18 00:01:30 +02:00
|
|
|
int WebPPictureImportBGR(WebPPicture* picture,
|
|
|
|
const uint8_t* rgb, int rgb_stride) {
|
2011-04-26 16:23:57 +02:00
|
|
|
return Import(picture, rgb, rgb_stride, 3, 1, 0);
|
2011-02-19 08:33:46 +01:00
|
|
|
}
|
|
|
|
|
2012-07-18 00:01:30 +02:00
|
|
|
int WebPPictureImportRGBA(WebPPicture* picture,
|
|
|
|
const uint8_t* rgba, int rgba_stride) {
|
2011-04-26 16:23:57 +02:00
|
|
|
return Import(picture, rgba, rgba_stride, 4, 0, 1);
|
2011-02-19 08:33:46 +01:00
|
|
|
}
|
|
|
|
|
2012-07-18 00:01:30 +02:00
|
|
|
int WebPPictureImportBGRA(WebPPicture* picture,
|
|
|
|
const uint8_t* rgba, int rgba_stride) {
|
2011-04-26 16:23:57 +02:00
|
|
|
return Import(picture, rgba, rgba_stride, 4, 1, 1);
|
2011-02-19 08:33:46 +01:00
|
|
|
}
|
|
|
|
|
2012-07-18 00:01:30 +02:00
|
|
|
int WebPPictureImportRGBX(WebPPicture* picture,
|
|
|
|
const uint8_t* rgba, int rgba_stride) {
|
2012-06-05 09:26:17 +02:00
|
|
|
return Import(picture, rgba, rgba_stride, 4, 0, 0);
|
|
|
|
}
|
|
|
|
|
2012-07-18 00:01:30 +02:00
|
|
|
int WebPPictureImportBGRX(WebPPicture* picture,
|
|
|
|
const uint8_t* rgba, int rgba_stride) {
|
2012-06-05 09:26:17 +02:00
|
|
|
return Import(picture, rgba, rgba_stride, 4, 1, 0);
|
|
|
|
}
|
|
|
|
|
2012-06-28 09:34:23 +02:00
|
|
|
//------------------------------------------------------------------------------
|
|
|
|
// Automatic YUV <-> ARGB conversions.
|
|
|
|
|
2012-07-18 00:01:30 +02:00
|
|
|
int WebPPictureYUVAToARGB(WebPPicture* picture) {
|
2012-06-28 09:34:23 +02:00
|
|
|
if (picture == NULL) return 0;
|
|
|
|
if (picture->memory_ == NULL || picture->y == NULL ||
|
|
|
|
picture->u == NULL || picture->v == NULL) {
|
|
|
|
return WebPEncodingSetError(picture, VP8_ENC_ERROR_NULL_PARAMETER);
|
|
|
|
}
|
|
|
|
if ((picture->colorspace & WEBP_CSP_ALPHA_BIT) && picture->a == NULL) {
|
|
|
|
return WebPEncodingSetError(picture, VP8_ENC_ERROR_NULL_PARAMETER);
|
|
|
|
}
|
|
|
|
if ((picture->colorspace & WEBP_CSP_UV_MASK) != WEBP_YUV420) {
|
|
|
|
return WebPEncodingSetError(picture, VP8_ENC_ERROR_INVALID_CONFIGURATION);
|
|
|
|
}
|
|
|
|
// Allocate a new argb buffer (discarding the previous one).
|
|
|
|
if (!PictureAllocARGB(picture)) return 0;
|
|
|
|
|
|
|
|
// Convert
|
|
|
|
{
|
|
|
|
int y;
|
|
|
|
const int width = picture->width;
|
|
|
|
const int height = picture->height;
|
|
|
|
const int argb_stride = 4 * picture->argb_stride;
|
|
|
|
uint8_t* dst = (uint8_t*)picture->argb;
|
|
|
|
const uint8_t *cur_u = picture->u, *cur_v = picture->v, *cur_y = picture->y;
|
|
|
|
WebPUpsampleLinePairFunc upsample = WebPGetLinePairConverter(ALPHA_IS_LAST);
|
|
|
|
|
|
|
|
// First row, with replicated top samples.
|
2013-08-19 21:40:25 +02:00
|
|
|
upsample(cur_y, NULL, cur_u, cur_v, cur_u, cur_v, dst, NULL, width);
|
2012-06-28 09:34:23 +02:00
|
|
|
cur_y += picture->y_stride;
|
|
|
|
dst += argb_stride;
|
|
|
|
// Center rows.
|
|
|
|
for (y = 1; y + 1 < height; y += 2) {
|
|
|
|
const uint8_t* const top_u = cur_u;
|
|
|
|
const uint8_t* const top_v = cur_v;
|
|
|
|
cur_u += picture->uv_stride;
|
|
|
|
cur_v += picture->uv_stride;
|
|
|
|
upsample(cur_y, cur_y + picture->y_stride, top_u, top_v, cur_u, cur_v,
|
|
|
|
dst, dst + argb_stride, width);
|
|
|
|
cur_y += 2 * picture->y_stride;
|
|
|
|
dst += 2 * argb_stride;
|
|
|
|
}
|
|
|
|
// Last row (if needed), with replicated bottom samples.
|
|
|
|
if (height > 1 && !(height & 1)) {
|
|
|
|
upsample(cur_y, NULL, cur_u, cur_v, cur_u, cur_v, dst, NULL, width);
|
|
|
|
}
|
|
|
|
// Insert alpha values if needed, in replacement for the default 0xff ones.
|
|
|
|
if (picture->colorspace & WEBP_CSP_ALPHA_BIT) {
|
|
|
|
for (y = 0; y < height; ++y) {
|
2013-01-21 17:20:14 +01:00
|
|
|
uint32_t* const argb_dst = picture->argb + y * picture->argb_stride;
|
2012-06-28 09:34:23 +02:00
|
|
|
const uint8_t* const src = picture->a + y * picture->a_stride;
|
|
|
|
int x;
|
|
|
|
for (x = 0; x < width; ++x) {
|
2013-04-13 19:33:41 +02:00
|
|
|
argb_dst[x] = (argb_dst[x] & 0x00ffffffu) | ((uint32_t)src[x] << 24);
|
2012-06-28 09:34:23 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
2013-10-17 22:36:49 +02:00
|
|
|
int WebPPictureARGBToYUVADithered(WebPPicture* picture, WebPEncCSP colorspace,
|
|
|
|
float dithering) {
|
2012-06-28 09:34:23 +02:00
|
|
|
if (picture == NULL) return 0;
|
|
|
|
if (picture->argb == NULL) {
|
|
|
|
return WebPEncodingSetError(picture, VP8_ENC_ERROR_NULL_PARAMETER);
|
|
|
|
} else {
|
|
|
|
const uint8_t* const argb = (const uint8_t*)picture->argb;
|
|
|
|
const uint8_t* const r = ALPHA_IS_LAST ? argb + 2 : argb + 1;
|
|
|
|
const uint8_t* const g = ALPHA_IS_LAST ? argb + 1 : argb + 2;
|
|
|
|
const uint8_t* const b = ALPHA_IS_LAST ? argb + 0 : argb + 3;
|
|
|
|
const uint8_t* const a = ALPHA_IS_LAST ? argb + 3 : argb + 0;
|
|
|
|
// We work on a tmp copy of 'picture', because ImportYUVAFromRGBA()
|
|
|
|
// would be calling WebPPictureFree(picture) otherwise.
|
|
|
|
WebPPicture tmp = *picture;
|
|
|
|
PictureResetARGB(&tmp); // reset ARGB buffer so that it's not free()'d.
|
2012-07-18 23:58:53 +02:00
|
|
|
tmp.use_argb = 0;
|
2012-06-28 09:34:23 +02:00
|
|
|
tmp.colorspace = colorspace & WEBP_CSP_UV_MASK;
|
2013-10-17 22:36:49 +02:00
|
|
|
if (!ImportYUVAFromRGBA(r, g, b, a, 4, 4 * picture->argb_stride, dithering,
|
|
|
|
&tmp)) {
|
2012-06-28 09:34:23 +02:00
|
|
|
return WebPEncodingSetError(picture, VP8_ENC_ERROR_OUT_OF_MEMORY);
|
|
|
|
}
|
|
|
|
// Copy back the YUV specs into 'picture'.
|
|
|
|
tmp.argb = picture->argb;
|
|
|
|
tmp.argb_stride = picture->argb_stride;
|
|
|
|
tmp.memory_argb_ = picture->memory_argb_;
|
|
|
|
*picture = tmp;
|
|
|
|
}
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
2013-10-17 22:36:49 +02:00
|
|
|
int WebPPictureARGBToYUVA(WebPPicture* picture, WebPEncCSP colorspace) {
|
|
|
|
return WebPPictureARGBToYUVADithered(picture, colorspace, 0.f);
|
|
|
|
}
|
|
|
|
|
2012-01-17 09:18:22 +01:00
|
|
|
//------------------------------------------------------------------------------
|
|
|
|
// Helper: clean up fully transparent area to help compressibility.
|
|
|
|
|
|
|
|
#define SIZE 8
|
|
|
|
#define SIZE2 (SIZE / 2)
|
|
|
|
static int is_transparent_area(const uint8_t* ptr, int stride, int size) {
|
|
|
|
int y, x;
|
|
|
|
for (y = 0; y < size; ++y) {
|
|
|
|
for (x = 0; x < size; ++x) {
|
|
|
|
if (ptr[x]) {
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
ptr += stride;
|
|
|
|
}
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
static WEBP_INLINE void flatten(uint8_t* ptr, int v, int stride, int size) {
|
|
|
|
int y;
|
|
|
|
for (y = 0; y < size; ++y) {
|
|
|
|
memset(ptr, v, size);
|
|
|
|
ptr += stride;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-07-18 00:01:30 +02:00
|
|
|
void WebPCleanupTransparentArea(WebPPicture* pic) {
|
2012-01-17 09:18:22 +01:00
|
|
|
int x, y, w, h;
|
|
|
|
const uint8_t* a_ptr;
|
|
|
|
int values[3] = { 0 };
|
|
|
|
|
|
|
|
if (pic == NULL) return;
|
|
|
|
|
|
|
|
a_ptr = pic->a;
|
|
|
|
if (a_ptr == NULL) return; // nothing to do
|
|
|
|
|
|
|
|
w = pic->width / SIZE;
|
|
|
|
h = pic->height / SIZE;
|
|
|
|
for (y = 0; y < h; ++y) {
|
|
|
|
int need_reset = 1;
|
|
|
|
for (x = 0; x < w; ++x) {
|
|
|
|
const int off_a = (y * pic->a_stride + x) * SIZE;
|
|
|
|
const int off_y = (y * pic->y_stride + x) * SIZE;
|
|
|
|
const int off_uv = (y * pic->uv_stride + x) * SIZE2;
|
|
|
|
if (is_transparent_area(a_ptr + off_a, pic->a_stride, SIZE)) {
|
|
|
|
if (need_reset) {
|
|
|
|
values[0] = pic->y[off_y];
|
|
|
|
values[1] = pic->u[off_uv];
|
|
|
|
values[2] = pic->v[off_uv];
|
|
|
|
need_reset = 0;
|
|
|
|
}
|
|
|
|
flatten(pic->y + off_y, values[0], pic->y_stride, SIZE);
|
|
|
|
flatten(pic->u + off_uv, values[1], pic->uv_stride, SIZE2);
|
|
|
|
flatten(pic->v + off_uv, values[2], pic->uv_stride, SIZE2);
|
|
|
|
} else {
|
|
|
|
need_reset = 1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// ignore the left-overs on right/bottom
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#undef SIZE
|
|
|
|
#undef SIZE2
|
|
|
|
|
2013-04-03 04:14:14 +02:00
|
|
|
//------------------------------------------------------------------------------
|
|
|
|
// Blend color and remove transparency info
|
|
|
|
|
|
|
|
#define BLEND(V0, V1, ALPHA) \
|
|
|
|
((((V0) * (255 - (ALPHA)) + (V1) * (ALPHA)) * 0x101) >> 16)
|
|
|
|
#define BLEND_10BIT(V0, V1, ALPHA) \
|
|
|
|
((((V0) * (1020 - (ALPHA)) + (V1) * (ALPHA)) * 0x101) >> 18)
|
|
|
|
|
|
|
|
void WebPBlendAlpha(WebPPicture* pic, uint32_t background_rgb) {
|
|
|
|
const int red = (background_rgb >> 16) & 0xff;
|
|
|
|
const int green = (background_rgb >> 8) & 0xff;
|
|
|
|
const int blue = (background_rgb >> 0) & 0xff;
|
2013-10-17 22:36:49 +02:00
|
|
|
VP8Random rg;
|
2013-04-03 04:14:14 +02:00
|
|
|
int x, y;
|
|
|
|
if (pic == NULL) return;
|
2013-10-17 22:36:49 +02:00
|
|
|
InitRandom(&rg, 0.f);
|
2013-04-03 04:14:14 +02:00
|
|
|
if (!pic->use_argb) {
|
|
|
|
const int uv_width = (pic->width >> 1); // omit last pixel during u/v loop
|
2013-10-17 22:36:49 +02:00
|
|
|
const int Y0 = RGBToY(red, green, blue, &rg);
|
2013-04-03 04:14:14 +02:00
|
|
|
// VP8RGBToU/V expects the u/v values summed over four pixels
|
2013-10-17 22:36:49 +02:00
|
|
|
const int U0 = RGBToU(4 * red, 4 * green, 4 * blue, &rg);
|
|
|
|
const int V0 = RGBToV(4 * red, 4 * green, 4 * blue, &rg);
|
2013-04-03 04:14:14 +02:00
|
|
|
const int has_alpha = pic->colorspace & WEBP_CSP_ALPHA_BIT;
|
|
|
|
if (!has_alpha || pic->a == NULL) return; // nothing to do
|
|
|
|
for (y = 0; y < pic->height; ++y) {
|
|
|
|
// Luma blending
|
|
|
|
uint8_t* const y_ptr = pic->y + y * pic->y_stride;
|
|
|
|
uint8_t* const a_ptr = pic->a + y * pic->a_stride;
|
|
|
|
for (x = 0; x < pic->width; ++x) {
|
|
|
|
const int alpha = a_ptr[x];
|
|
|
|
if (alpha < 0xff) {
|
|
|
|
y_ptr[x] = BLEND(Y0, y_ptr[x], a_ptr[x]);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// Chroma blending every even line
|
|
|
|
if ((y & 1) == 0) {
|
|
|
|
uint8_t* const u = pic->u + (y >> 1) * pic->uv_stride;
|
|
|
|
uint8_t* const v = pic->v + (y >> 1) * pic->uv_stride;
|
|
|
|
uint8_t* const a_ptr2 =
|
|
|
|
(y + 1 == pic->height) ? a_ptr : a_ptr + pic->a_stride;
|
|
|
|
for (x = 0; x < uv_width; ++x) {
|
|
|
|
// Average four alpha values into a single blending weight.
|
|
|
|
// TODO(skal): might lead to visible contouring. Can we do better?
|
|
|
|
const int alpha =
|
|
|
|
a_ptr[2 * x + 0] + a_ptr[2 * x + 1] +
|
|
|
|
a_ptr2[2 * x + 0] + a_ptr2[2 * x + 1];
|
|
|
|
u[x] = BLEND_10BIT(U0, u[x], alpha);
|
|
|
|
v[x] = BLEND_10BIT(V0, v[x], alpha);
|
|
|
|
}
|
|
|
|
if (pic->width & 1) { // rightmost pixel
|
|
|
|
const int alpha = 2 * (a_ptr[2 * x + 0] + a_ptr2[2 * x + 0]);
|
|
|
|
u[x] = BLEND_10BIT(U0, u[x], alpha);
|
|
|
|
v[x] = BLEND_10BIT(V0, v[x], alpha);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
memset(a_ptr, 0xff, pic->width);
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
uint32_t* argb = pic->argb;
|
|
|
|
const uint32_t background = MakeARGB32(red, green, blue);
|
|
|
|
for (y = 0; y < pic->height; ++y) {
|
|
|
|
for (x = 0; x < pic->width; ++x) {
|
|
|
|
const int alpha = (argb[x] >> 24) & 0xff;
|
|
|
|
if (alpha != 0xff) {
|
|
|
|
if (alpha > 0) {
|
|
|
|
int r = (argb[x] >> 16) & 0xff;
|
|
|
|
int g = (argb[x] >> 8) & 0xff;
|
|
|
|
int b = (argb[x] >> 0) & 0xff;
|
|
|
|
r = BLEND(red, r, alpha);
|
|
|
|
g = BLEND(green, g, alpha);
|
|
|
|
b = BLEND(blue, b, alpha);
|
|
|
|
argb[x] = MakeARGB32(r, g, b);
|
|
|
|
} else {
|
|
|
|
argb[x] = background;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
argb += pic->argb_stride;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#undef BLEND
|
|
|
|
#undef BLEND_10BIT
|
|
|
|
|
2012-10-18 17:26:40 +02:00
|
|
|
//------------------------------------------------------------------------------
|
|
|
|
// local-min distortion
|
|
|
|
//
|
|
|
|
// For every pixel in the *reference* picture, we search for the local best
|
|
|
|
// match in the compressed image. This is not a symmetrical measure.
|
|
|
|
|
|
|
|
// search radius. Shouldn't be too large.
|
|
|
|
#define RADIUS 2
|
|
|
|
|
2012-10-20 04:23:39 +02:00
|
|
|
static float AccumulateLSIM(const uint8_t* src, int src_stride,
|
|
|
|
const uint8_t* ref, int ref_stride,
|
|
|
|
int w, int h) {
|
2012-10-18 17:26:40 +02:00
|
|
|
int x, y;
|
|
|
|
double total_sse = 0.;
|
|
|
|
for (y = 0; y < h; ++y) {
|
2013-01-21 17:20:14 +01:00
|
|
|
const int y_0 = (y - RADIUS < 0) ? 0 : y - RADIUS;
|
|
|
|
const int y_1 = (y + RADIUS + 1 >= h) ? h : y + RADIUS + 1;
|
2012-10-18 17:26:40 +02:00
|
|
|
for (x = 0; x < w; ++x) {
|
2013-01-21 17:20:14 +01:00
|
|
|
const int x_0 = (x - RADIUS < 0) ? 0 : x - RADIUS;
|
|
|
|
const int x_1 = (x + RADIUS + 1 >= w) ? w : x + RADIUS + 1;
|
2012-10-18 17:26:40 +02:00
|
|
|
double best_sse = 255. * 255.;
|
|
|
|
const double value = (double)ref[y * ref_stride + x];
|
|
|
|
int i, j;
|
2013-01-21 17:20:14 +01:00
|
|
|
for (j = y_0; j < y_1; ++j) {
|
2012-10-18 17:26:40 +02:00
|
|
|
const uint8_t* s = src + j * src_stride;
|
2013-01-21 17:20:14 +01:00
|
|
|
for (i = x_0; i < x_1; ++i) {
|
2012-10-18 17:26:40 +02:00
|
|
|
const double sse = (double)(s[i] - value) * (s[i] - value);
|
|
|
|
if (sse < best_sse) best_sse = sse;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
total_sse += best_sse;
|
|
|
|
}
|
|
|
|
}
|
2012-10-20 04:23:39 +02:00
|
|
|
return (float)total_sse;
|
2012-10-18 17:26:40 +02:00
|
|
|
}
|
|
|
|
#undef RADIUS
|
2012-06-05 00:50:05 +02:00
|
|
|
|
2011-08-25 23:22:32 +02:00
|
|
|
//------------------------------------------------------------------------------
|
2012-01-20 16:20:56 +01:00
|
|
|
// Distortion
|
|
|
|
|
|
|
|
// Max value returned in case of exact similarity.
|
|
|
|
static const double kMinDistortion_dB = 99.;
|
2012-10-18 17:26:40 +02:00
|
|
|
static float GetPSNR(const double v) {
|
|
|
|
return (float)((v > 0.) ? -4.3429448 * log(v / (255 * 255.))
|
|
|
|
: kMinDistortion_dB);
|
|
|
|
}
|
2012-01-20 16:20:56 +01:00
|
|
|
|
2012-10-18 17:26:40 +02:00
|
|
|
int WebPPictureDistortion(const WebPPicture* src, const WebPPicture* ref,
|
2012-01-20 16:20:56 +01:00
|
|
|
int type, float result[5]) {
|
|
|
|
DistoStats stats[5];
|
2012-02-10 11:52:48 +01:00
|
|
|
int has_alpha;
|
2012-10-18 17:26:40 +02:00
|
|
|
int uv_w, uv_h;
|
2012-02-10 11:52:48 +01:00
|
|
|
|
2012-10-18 17:26:40 +02:00
|
|
|
if (src == NULL || ref == NULL ||
|
|
|
|
src->width != ref->width || src->height != ref->height ||
|
|
|
|
src->y == NULL || ref->y == NULL ||
|
|
|
|
src->u == NULL || ref->u == NULL ||
|
|
|
|
src->v == NULL || ref->v == NULL ||
|
2012-02-10 11:52:48 +01:00
|
|
|
result == NULL) {
|
|
|
|
return 0;
|
|
|
|
}
|
2012-06-28 09:34:23 +02:00
|
|
|
// TODO(skal): provide distortion for ARGB too.
|
2012-10-18 17:26:40 +02:00
|
|
|
if (src->use_argb == 1 || src->use_argb != ref->use_argb) {
|
2012-06-28 09:34:23 +02:00
|
|
|
return 0;
|
|
|
|
}
|
2012-01-20 16:20:56 +01:00
|
|
|
|
2012-10-18 17:26:40 +02:00
|
|
|
has_alpha = !!(src->colorspace & WEBP_CSP_ALPHA_BIT);
|
|
|
|
if (has_alpha != !!(ref->colorspace & WEBP_CSP_ALPHA_BIT) ||
|
|
|
|
(has_alpha && (src->a == NULL || ref->a == NULL))) {
|
2012-01-20 16:20:56 +01:00
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
memset(stats, 0, sizeof(stats));
|
2012-10-18 17:26:40 +02:00
|
|
|
|
|
|
|
uv_w = HALVE(src->width);
|
|
|
|
uv_h = HALVE(src->height);
|
|
|
|
if (type >= 2) {
|
|
|
|
float sse[4];
|
|
|
|
sse[0] = AccumulateLSIM(src->y, src->y_stride,
|
|
|
|
ref->y, ref->y_stride, src->width, src->height);
|
|
|
|
sse[1] = AccumulateLSIM(src->u, src->uv_stride,
|
|
|
|
ref->u, ref->uv_stride, uv_w, uv_h);
|
|
|
|
sse[2] = AccumulateLSIM(src->v, src->uv_stride,
|
|
|
|
ref->v, ref->uv_stride, uv_w, uv_h);
|
|
|
|
sse[3] = has_alpha ? AccumulateLSIM(src->a, src->a_stride,
|
|
|
|
ref->a, ref->a_stride,
|
|
|
|
src->width, src->height)
|
2012-10-20 04:23:39 +02:00
|
|
|
: 0.f;
|
2012-10-18 17:26:40 +02:00
|
|
|
result[0] = GetPSNR(sse[0] / (src->width * src->height));
|
|
|
|
result[1] = GetPSNR(sse[1] / (uv_w * uv_h));
|
|
|
|
result[2] = GetPSNR(sse[2] / (uv_w * uv_h));
|
|
|
|
result[3] = GetPSNR(sse[3] / (src->width * src->height));
|
|
|
|
{
|
|
|
|
double total_sse = sse[0] + sse[1] + sse[2];
|
|
|
|
int total_pixels = src->width * src->height + 2 * uv_w * uv_h;
|
|
|
|
if (has_alpha) {
|
|
|
|
total_pixels += src->width * src->height;
|
|
|
|
total_sse += sse[3];
|
|
|
|
}
|
|
|
|
result[4] = GetPSNR(total_sse / total_pixels);
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
int c;
|
|
|
|
VP8SSIMAccumulatePlane(src->y, src->y_stride,
|
|
|
|
ref->y, ref->y_stride,
|
|
|
|
src->width, src->height, &stats[0]);
|
|
|
|
VP8SSIMAccumulatePlane(src->u, src->uv_stride,
|
|
|
|
ref->u, ref->uv_stride,
|
|
|
|
uv_w, uv_h, &stats[1]);
|
|
|
|
VP8SSIMAccumulatePlane(src->v, src->uv_stride,
|
|
|
|
ref->v, ref->uv_stride,
|
|
|
|
uv_w, uv_h, &stats[2]);
|
|
|
|
if (has_alpha) {
|
|
|
|
VP8SSIMAccumulatePlane(src->a, src->a_stride,
|
|
|
|
ref->a, ref->a_stride,
|
|
|
|
src->width, src->height, &stats[3]);
|
|
|
|
}
|
|
|
|
for (c = 0; c <= 4; ++c) {
|
|
|
|
if (type == 1) {
|
|
|
|
const double v = VP8SSIMGet(&stats[c]);
|
|
|
|
result[c] = (float)((v < 1.) ? -10.0 * log10(1. - v)
|
|
|
|
: kMinDistortion_dB);
|
|
|
|
} else {
|
|
|
|
const double v = VP8SSIMGetSquaredError(&stats[c]);
|
|
|
|
result[c] = GetPSNR(v);
|
|
|
|
}
|
|
|
|
// Accumulate forward
|
|
|
|
if (c < 4) VP8SSIMAddStats(&stats[c], &stats[4]);
|
2012-01-20 16:20:56 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
//------------------------------------------------------------------------------
|
|
|
|
// Simplest high-level calls:
|
2011-02-19 08:33:46 +01:00
|
|
|
|
|
|
|
typedef int (*Importer)(WebPPicture* const, const uint8_t* const, int);
|
|
|
|
|
EXPERIMENTAL: add support for alpha channel
This is a (minor) bitstream change: if the 'color_space' bit is set to '1'
(which is normally an undefined/invalid behaviour), we add extra data at the
end of partition #0 (so-called 'extensions')
Namely, we add the size of the extension data as 3 bytes (little-endian),
followed by a set of bits telling which extensions we're incorporating.
The data then _preceeds_ this trailing tags.
This is all experimental, and you'll need to have
'#define WEBP_EXPERIMENTAL_FEATURES' in webp/types.h to enable this code
(at your own risk! :))
Still, this hack produces almost-valid WebP file for decoders that don't
check this color_space bit. In particular, previous 'dwebp' (and for instance
Chrome) will recognize this files and decode them, but without the alpha
of course. Other decoder will just see random extra stuff at the end of
partition #0.
To experiment with the alpha-channel, you need to compile on Unix platform
and use PNGs for input/output.
If 'alpha.png' is a source with alpha channel, then you can try (on Unix):
cwebp alpha.png -o alpha.webp
dwebp alpha.webp -o test.png
cwebp now has a '-noalpha' flag to ignore any alpha information from the
source, if present.
More hacking and experimenting welcome!
Change-Id: I3c7b1fd8411c9e7a9f77690e898479ad85c52f3e
2011-04-26 01:58:04 +02:00
|
|
|
static size_t Encode(const uint8_t* rgba, int width, int height, int stride,
|
2012-07-17 20:56:24 +02:00
|
|
|
Importer import, float quality_factor, int lossless,
|
|
|
|
uint8_t** output) {
|
2011-02-19 08:33:46 +01:00
|
|
|
WebPPicture pic;
|
|
|
|
WebPConfig config;
|
|
|
|
WebPMemoryWriter wrt;
|
|
|
|
int ok;
|
|
|
|
|
|
|
|
if (!WebPConfigPreset(&config, WEBP_PRESET_DEFAULT, quality_factor) ||
|
|
|
|
!WebPPictureInit(&pic)) {
|
|
|
|
return 0; // shouldn't happen, except if system installation is broken
|
|
|
|
}
|
|
|
|
|
2012-07-17 20:56:24 +02:00
|
|
|
config.lossless = !!lossless;
|
2012-07-18 23:58:53 +02:00
|
|
|
pic.use_argb = !!lossless;
|
2011-02-19 08:33:46 +01:00
|
|
|
pic.width = width;
|
|
|
|
pic.height = height;
|
|
|
|
pic.writer = WebPMemoryWrite;
|
|
|
|
pic.custom_ptr = &wrt;
|
2012-01-23 09:58:09 +01:00
|
|
|
WebPMemoryWriterInit(&wrt);
|
2011-02-19 08:33:46 +01:00
|
|
|
|
EXPERIMENTAL: add support for alpha channel
This is a (minor) bitstream change: if the 'color_space' bit is set to '1'
(which is normally an undefined/invalid behaviour), we add extra data at the
end of partition #0 (so-called 'extensions')
Namely, we add the size of the extension data as 3 bytes (little-endian),
followed by a set of bits telling which extensions we're incorporating.
The data then _preceeds_ this trailing tags.
This is all experimental, and you'll need to have
'#define WEBP_EXPERIMENTAL_FEATURES' in webp/types.h to enable this code
(at your own risk! :))
Still, this hack produces almost-valid WebP file for decoders that don't
check this color_space bit. In particular, previous 'dwebp' (and for instance
Chrome) will recognize this files and decode them, but without the alpha
of course. Other decoder will just see random extra stuff at the end of
partition #0.
To experiment with the alpha-channel, you need to compile on Unix platform
and use PNGs for input/output.
If 'alpha.png' is a source with alpha channel, then you can try (on Unix):
cwebp alpha.png -o alpha.webp
dwebp alpha.webp -o test.png
cwebp now has a '-noalpha' flag to ignore any alpha information from the
source, if present.
More hacking and experimenting welcome!
Change-Id: I3c7b1fd8411c9e7a9f77690e898479ad85c52f3e
2011-04-26 01:58:04 +02:00
|
|
|
ok = import(&pic, rgba, stride) && WebPEncode(&config, &pic);
|
2011-02-19 08:33:46 +01:00
|
|
|
WebPPictureFree(&pic);
|
|
|
|
if (!ok) {
|
2012-06-19 00:42:56 +02:00
|
|
|
free(wrt.mem);
|
2011-02-19 08:33:46 +01:00
|
|
|
*output = NULL;
|
|
|
|
return 0;
|
|
|
|
}
|
2012-06-19 00:42:56 +02:00
|
|
|
*output = wrt.mem;
|
|
|
|
return wrt.size;
|
2011-02-19 08:33:46 +01:00
|
|
|
}
|
|
|
|
|
2012-07-17 20:56:24 +02:00
|
|
|
#define ENCODE_FUNC(NAME, IMPORTER) \
|
|
|
|
size_t NAME(const uint8_t* in, int w, int h, int bps, float q, \
|
|
|
|
uint8_t** out) { \
|
|
|
|
return Encode(in, w, h, bps, IMPORTER, q, 0, out); \
|
2011-02-19 08:33:46 +01:00
|
|
|
}
|
|
|
|
|
2013-04-13 19:49:35 +02:00
|
|
|
ENCODE_FUNC(WebPEncodeRGB, WebPPictureImportRGB)
|
|
|
|
ENCODE_FUNC(WebPEncodeBGR, WebPPictureImportBGR)
|
|
|
|
ENCODE_FUNC(WebPEncodeRGBA, WebPPictureImportRGBA)
|
|
|
|
ENCODE_FUNC(WebPEncodeBGRA, WebPPictureImportBGRA)
|
2011-02-19 08:33:46 +01:00
|
|
|
|
|
|
|
#undef ENCODE_FUNC
|
|
|
|
|
2012-07-17 20:56:24 +02:00
|
|
|
#define LOSSLESS_DEFAULT_QUALITY 70.
|
|
|
|
#define LOSSLESS_ENCODE_FUNC(NAME, IMPORTER) \
|
|
|
|
size_t NAME(const uint8_t* in, int w, int h, int bps, uint8_t** out) { \
|
|
|
|
return Encode(in, w, h, bps, IMPORTER, LOSSLESS_DEFAULT_QUALITY, 1, out); \
|
|
|
|
}
|
|
|
|
|
2013-04-13 19:49:35 +02:00
|
|
|
LOSSLESS_ENCODE_FUNC(WebPEncodeLosslessRGB, WebPPictureImportRGB)
|
|
|
|
LOSSLESS_ENCODE_FUNC(WebPEncodeLosslessBGR, WebPPictureImportBGR)
|
|
|
|
LOSSLESS_ENCODE_FUNC(WebPEncodeLosslessRGBA, WebPPictureImportRGBA)
|
|
|
|
LOSSLESS_ENCODE_FUNC(WebPEncodeLosslessBGRA, WebPPictureImportBGRA)
|
2012-07-17 20:56:24 +02:00
|
|
|
|
|
|
|
#undef LOSSLESS_ENCODE_FUNC
|
|
|
|
|
2011-08-25 23:22:32 +02:00
|
|
|
//------------------------------------------------------------------------------
|
2011-02-19 08:33:46 +01:00
|
|
|
|
|
|
|
#if defined(__cplusplus) || defined(c_plusplus)
|
|
|
|
} // extern "C"
|
|
|
|
#endif
|