summaryrefslogtreecommitdiffstats
path: root/remoting/codec
diff options
context:
space:
mode:
authorwez@chromium.org <wez@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2014-05-12 13:19:13 +0000
committerwez@chromium.org <wez@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2014-05-12 13:19:13 +0000
commit3684eb98ab743236aabfce4b1efebe4d25df4d3c (patch)
treed9241f1c5cb861c94c403d6e938d5ad880295b03 /remoting/codec
parentdd1ae25d386049557ae066fdb4b148c1297c3d1e (diff)
downloadchromium_src-3684eb98ab743236aabfce4b1efebe4d25df4d3c.zip
chromium_src-3684eb98ab743236aabfce4b1efebe4d25df4d3c.tar.gz
chromium_src-3684eb98ab743236aabfce4b1efebe4d25df4d3c.tar.bz2
Implement VP9/I444 encode support in the Chromoting host.
This will be selectable by clients that want to avoid I420 artefacts. BUG=260879,134202 Review URL: https://codereview.chromium.org/261753013 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@269774 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'remoting/codec')
-rw-r--r--remoting/codec/video_decoder_vpx_unittest.cc2
-rw-r--r--remoting/codec/video_encoder_vpx.cc194
-rw-r--r--remoting/codec/video_encoder_vpx.h25
3 files changed, 147 insertions, 74 deletions
diff --git a/remoting/codec/video_decoder_vpx_unittest.cc b/remoting/codec/video_decoder_vpx_unittest.cc
index 63cf870..fe77351 100644
--- a/remoting/codec/video_decoder_vpx_unittest.cc
+++ b/remoting/codec/video_decoder_vpx_unittest.cc
@@ -45,7 +45,7 @@ class VideoDecoderVp8Test : public VideoDecoderVpxTest {
class VideoDecoderVp9Test : public VideoDecoderVpxTest {
protected:
VideoDecoderVp9Test() {
- encoder_ = VideoEncoderVpx::CreateForVP9();
+ encoder_ = VideoEncoderVpx::CreateForVP9I420();
decoder_ = VideoDecoderVpx::CreateForVP9();
}
};
diff --git a/remoting/codec/video_encoder_vpx.cc b/remoting/codec/video_encoder_vpx.cc
index 59bb5dc..3b27172 100644
--- a/remoting/codec/video_encoder_vpx.cc
+++ b/remoting/codec/video_encoder_vpx.cc
@@ -7,9 +7,9 @@
#include "base/bind.h"
#include "base/logging.h"
#include "base/sys_info.h"
-#include "media/base/yuv_convert.h"
#include "remoting/base/util.h"
#include "remoting/proto/video.pb.h"
+#include "third_party/libyuv/include/libyuv/convert_from_argb.h"
#include "third_party/webrtc/modules/desktop_capture/desktop_frame.h"
#include "third_party/webrtc/modules/desktop_capture/desktop_geometry.h"
#include "third_party/webrtc/modules/desktop_capture/desktop_region.h"
@@ -24,10 +24,17 @@ namespace remoting {
namespace {
+// Number of bytes in an RGBx pixel.
+const int kBytesPerRgbPixel = 4;
+
// Defines the dimension of a macro block. This is used to compute the active
// map for the encoder.
const int kMacroBlockSize = 16;
+// Magic encoder profile numbers for I420 and I444 input formats.
+const int kVp9I420ProfileNumber = 0;
+const int kVp9I444ProfileNumber = 1;
+
void SetCommonCodecParameters(const webrtc::DesktopSize& size,
vpx_codec_enc_cfg_t* config) {
// Use millisecond granularity time base.
@@ -90,7 +97,7 @@ ScopedVpxCodec CreateVP8Codec(const webrtc::DesktopSize& size) {
return codec.Pass();
}
-ScopedVpxCodec CreateVP9Codec(const webrtc::DesktopSize& size) {
+ScopedVpxCodec CreateVP9Codec(bool use_i444, const webrtc::DesktopSize& size) {
ScopedVpxCodec codec(new vpx_codec_ctx_t);
// Configure the encoder.
@@ -103,8 +110,8 @@ ScopedVpxCodec CreateVP9Codec(const webrtc::DesktopSize& size) {
SetCommonCodecParameters(size, &config);
- // Configure VP9 for I420 source frames.
- config.g_profile = 0;
+ // Configure VP9 for I420 or I444 source frames.
+ config.g_profile = use_i444 ? kVp9I444ProfileNumber : kVp9I420ProfileNumber;
// Disable quantization entirely, putting the encoder in "lossless" mode.
config.rc_min_quantizer = 0;
@@ -128,18 +135,90 @@ ScopedVpxCodec CreateVP9Codec(const webrtc::DesktopSize& size) {
return codec.Pass();
}
-} // namespace
+void CreateImage(bool use_i444,
+ const webrtc::DesktopSize& size,
+ scoped_ptr<vpx_image_t>* out_image,
+ scoped_ptr<uint8[]>* out_image_buffer) {
+ DCHECK(!size.is_empty());
+
+ scoped_ptr<vpx_image_t> image(new vpx_image_t());
+ memset(image.get(), 0, sizeof(vpx_image_t));
+
+ // libvpx seems to require both to be assigned.
+ image->d_w = size.width();
+ image->w = size.width();
+ image->d_h = size.height();
+ image->h = size.height();
+
+ // libvpx should derive chroma shifts from|fmt| but currently has a bug:
+ // https://code.google.com/p/webm/issues/detail?id=627
+ if (use_i444) {
+ image->fmt = VPX_IMG_FMT_I444;
+ image->x_chroma_shift = 0;
+ image->y_chroma_shift = 0;
+ } else { // I420
+ image->fmt = VPX_IMG_FMT_YV12;
+ image->x_chroma_shift = 1;
+ image->y_chroma_shift = 1;
+ }
+
+ // libyuv's fast-path requires 16-byte aligned pointers and strides, so pad
+ // the Y, U and V planes' strides to multiples of 16 bytes.
+ const int y_stride = ((image->w - 1) & ~15) + 16;
+ const int uv_unaligned_stride = y_stride >> image->x_chroma_shift;
+ const int uv_stride = ((uv_unaligned_stride - 1) & ~15) + 16;
+
+ // libvpx accesses the source image in macro blocks, and will over-read
+ // if the image is not padded out to the next macroblock: crbug.com/119633.
+ // Pad the Y, U and V planes' height out to compensate.
+ // Assuming macroblocks are 16x16, aligning the planes' strides above also
+ // macroblock aligned them.
+ DCHECK_EQ(16, kMacroBlockSize);
+ const int y_rows = ((image->h - 1) & ~(kMacroBlockSize-1)) + kMacroBlockSize;
+ const int uv_rows = y_rows >> image->y_chroma_shift;
+
+ // Allocate a YUV buffer large enough for the aligned data & padding.
+ const int buffer_size = y_stride * y_rows + 2*uv_stride * uv_rows;
+ scoped_ptr<uint8[]> image_buffer(new uint8[buffer_size]);
+
+ // Reset image value to 128 so we just need to fill in the y plane.
+ memset(image_buffer.get(), 128, buffer_size);
+
+ // Fill in the information for |image_|.
+ unsigned char* uchar_buffer =
+ reinterpret_cast<unsigned char*>(image_buffer.get());
+ image->planes[0] = uchar_buffer;
+ image->planes[1] = image->planes[0] + y_stride * y_rows;
+ image->planes[2] = image->planes[1] + uv_stride * uv_rows;
+ image->stride[0] = y_stride;
+ image->stride[1] = uv_stride;
+ image->stride[2] = uv_stride;
+
+ *out_image = image.Pass();
+ *out_image_buffer = image_buffer.Pass();
+}
+
+} // namespace
// static
scoped_ptr<VideoEncoderVpx> VideoEncoderVpx::CreateForVP8() {
return scoped_ptr<VideoEncoderVpx>(
- new VideoEncoderVpx(base::Bind(&CreateVP8Codec)));
+ new VideoEncoderVpx(base::Bind(&CreateVP8Codec),
+ base::Bind(&CreateImage, false)));
+}
+
+// static
+scoped_ptr<VideoEncoderVpx> VideoEncoderVpx::CreateForVP9I420() {
+ return scoped_ptr<VideoEncoderVpx>(
+ new VideoEncoderVpx(base::Bind(&CreateVP9Codec, false),
+ base::Bind(&CreateImage, false)));
}
// static
-scoped_ptr<VideoEncoderVpx> VideoEncoderVpx::CreateForVP9() {
+scoped_ptr<VideoEncoderVpx> VideoEncoderVpx::CreateForVP9I444() {
return scoped_ptr<VideoEncoderVpx>(
- new VideoEncoderVpx(base::Bind(&CreateVP9Codec)));
+ new VideoEncoderVpx(base::Bind(&CreateVP9Codec, true),
+ base::Bind(&CreateImage, true)));
}
VideoEncoderVpx::~VideoEncoderVpx() {}
@@ -233,8 +312,10 @@ scoped_ptr<VideoPacket> VideoEncoderVpx::Encode(
return packet.Pass();
}
-VideoEncoderVpx::VideoEncoderVpx(const InitializeCodecCallback& init_codec)
- : init_codec_(init_codec),
+VideoEncoderVpx::VideoEncoderVpx(const CreateCodecCallback& create_codec,
+ const CreateImageCallback& create_image)
+ : create_codec_(create_codec),
+ create_image_(create_image),
active_map_width_(0),
active_map_height_(0) {
}
@@ -242,60 +323,16 @@ VideoEncoderVpx::VideoEncoderVpx(const InitializeCodecCallback& init_codec)
bool VideoEncoderVpx::Initialize(const webrtc::DesktopSize& size) {
codec_.reset();
- image_.reset(new vpx_image_t());
- memset(image_.get(), 0, sizeof(vpx_image_t));
-
- image_->fmt = VPX_IMG_FMT_YV12;
-
- // libvpx seems to require both to be assigned.
- image_->d_w = size.width();
- image_->w = size.width();
- image_->d_h = size.height();
- image_->h = size.height();
-
- // libvpx should derive this from|fmt| but currently has a bug:
- // https://code.google.com/p/webm/issues/detail?id=627
- image_->x_chroma_shift = 1;
- image_->y_chroma_shift = 1;
+ // (Re)Create the VPX image structure and pixel buffer.
+ create_image_.Run(size, &image_, &image_buffer_);
// Initialize active map.
active_map_width_ = (image_->w + kMacroBlockSize - 1) / kMacroBlockSize;
active_map_height_ = (image_->h + kMacroBlockSize - 1) / kMacroBlockSize;
active_map_.reset(new uint8[active_map_width_ * active_map_height_]);
- // libyuv's fast-path requires 16-byte aligned pointers and strides, so pad
- // the Y, U and V planes' strides to multiples of 16 bytes.
- const int y_stride = ((image_->w - 1) & ~15) + 16;
- const int uv_unaligned_stride = y_stride / 2;
- const int uv_stride = ((uv_unaligned_stride - 1) & ~15) + 16;
-
- // libvpx accesses the source image in macro blocks, and will over-read
- // if the image is not padded out to the next macroblock: crbug.com/119633.
- // Pad the Y, U and V planes' height out to compensate.
- // Assuming macroblocks are 16x16, aligning the planes' strides above also
- // macroblock aligned them.
- DCHECK_EQ(16, kMacroBlockSize);
- const int y_rows = active_map_height_ * kMacroBlockSize;
- const int uv_rows = y_rows / 2;
-
- // Allocate a YUV buffer large enough for the aligned data & padding.
- const int buffer_size = y_stride * y_rows + 2 * uv_stride * uv_rows;
- yuv_image_.reset(new uint8[buffer_size]);
-
- // Reset image value to 128 so we just need to fill in the y plane.
- memset(yuv_image_.get(), 128, buffer_size);
-
- // Fill in the information for |image_|.
- unsigned char* image = reinterpret_cast<unsigned char*>(yuv_image_.get());
- image_->planes[0] = image;
- image_->planes[1] = image_->planes[0] + y_stride * y_rows;
- image_->planes[2] = image_->planes[1] + uv_stride * uv_rows;
- image_->stride[0] = y_stride;
- image_->stride[1] = uv_stride;
- image_->stride[2] = uv_stride;
-
- // Initialize the codec.
- codec_ = init_codec_.Run(size);
+ // (Re)Initialize the codec.
+ codec_ = create_codec_.Run(size);
return codec_;
}
@@ -336,13 +373,40 @@ void VideoEncoderVpx::PrepareImage(const webrtc::DesktopFrame& frame,
uint8* y_data = image_->planes[0];
uint8* u_data = image_->planes[1];
uint8* v_data = image_->planes[2];
- for (webrtc::DesktopRegion::Iterator r(*updated_region); !r.IsAtEnd();
- r.Advance()) {
- const webrtc::DesktopRect& rect = r.rect();
- ConvertRGB32ToYUVWithRect(
- rgb_data, y_data, u_data, v_data,
- rect.left(), rect.top(), rect.width(), rect.height(),
- rgb_stride, y_stride, uv_stride);
+
+ switch (image_->fmt) {
+ case VPX_IMG_FMT_I444:
+ for (webrtc::DesktopRegion::Iterator r(*updated_region); !r.IsAtEnd();
+ r.Advance()) {
+ const webrtc::DesktopRect& rect = r.rect();
+ int rgb_offset = rgb_stride * rect.top() +
+ rect.left() * kBytesPerRgbPixel;
+ int yuv_offset = uv_stride * rect.top() + rect.left();
+ libyuv::ARGBToI444(rgb_data + rgb_offset, rgb_stride,
+ y_data + yuv_offset, y_stride,
+ u_data + yuv_offset, uv_stride,
+ v_data + yuv_offset, uv_stride,
+ rect.width(), rect.height());
+ }
+ break;
+ case VPX_IMG_FMT_YV12:
+ for (webrtc::DesktopRegion::Iterator r(*updated_region); !r.IsAtEnd();
+ r.Advance()) {
+ const webrtc::DesktopRect& rect = r.rect();
+ int rgb_offset = rgb_stride * rect.top() +
+ rect.left() * kBytesPerRgbPixel;
+ int y_offset = y_stride * rect.top() + rect.left();
+ int uv_offset = uv_stride * rect.top() / 2 + rect.left() / 2;
+ libyuv::ARGBToI420(rgb_data + rgb_offset, rgb_stride,
+ y_data + y_offset, y_stride,
+ u_data + uv_offset, uv_stride,
+ v_data + uv_offset, uv_stride,
+ rect.width(), rect.height());
+ }
+ break;
+ default:
+ NOTREACHED();
+ break;
}
}
diff --git a/remoting/codec/video_encoder_vpx.h b/remoting/codec/video_encoder_vpx.h
index dd7fb2e..8a8d68b 100644
--- a/remoting/codec/video_encoder_vpx.h
+++ b/remoting/codec/video_encoder_vpx.h
@@ -23,7 +23,8 @@ class VideoEncoderVpx : public VideoEncoder {
public:
// Create encoder for the specified protocol.
static scoped_ptr<VideoEncoderVpx> CreateForVP8();
- static scoped_ptr<VideoEncoderVpx> CreateForVP9();
+ static scoped_ptr<VideoEncoderVpx> CreateForVP9I420();
+ static scoped_ptr<VideoEncoderVpx> CreateForVP9I444();
virtual ~VideoEncoderVpx();
@@ -33,9 +34,14 @@ class VideoEncoderVpx : public VideoEncoder {
private:
typedef base::Callback<ScopedVpxCodec(const webrtc::DesktopSize&)>
- InitializeCodecCallback;
+ CreateCodecCallback;
+ typedef base::Callback<void(const webrtc::DesktopSize&,
+ scoped_ptr<vpx_image_t>* out_image,
+ scoped_ptr<uint8[]>* out_image_buffer)>
+ CreateImageCallback;
- VideoEncoderVpx(const InitializeCodecCallback& init_codec);
+ VideoEncoderVpx(const CreateCodecCallback& create_codec,
+ const CreateImageCallback& create_image);
// Initializes the codec for frames of |size|. Returns true if successful.
bool Initialize(const webrtc::DesktopSize& size);
@@ -49,17 +55,20 @@ class VideoEncoderVpx : public VideoEncoder {
// given to the encoder to speed up encoding.
void PrepareActiveMap(const webrtc::DesktopRegion& updated_region);
- InitializeCodecCallback init_codec_;
+ CreateCodecCallback create_codec_;
+ CreateImageCallback create_image_;
ScopedVpxCodec codec_;
+ base::TimeTicks timestamp_base_;
+
+ // VPX image and buffer to hold the actual YUV planes.
scoped_ptr<vpx_image_t> image_;
+ scoped_ptr<uint8[]> image_buffer_;
+
+ // Active map used to optimize out processing of un-changed macroblocks.
scoped_ptr<uint8[]> active_map_;
int active_map_width_;
int active_map_height_;
- base::TimeTicks timestamp_base_;
-
- // Buffer for storing the yuv image.
- scoped_ptr<uint8[]> yuv_image_;
DISALLOW_COPY_AND_ASSIGN(VideoEncoderVpx);
};