diff options
author | ralphl@chromium.org <ralphl@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98> | 2009-03-11 01:06:36 +0000 |
---|---|---|
committer | ralphl@chromium.org <ralphl@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98> | 2009-03-11 01:06:36 +0000 |
commit | 193e1cdbcaac17d0fac6093f9bd63cd5477e4325 (patch) | |
tree | 12a1a18a0ceca1084c86d436c215b92ff6620597 /media/base | |
parent | 0b0fe3b6803ba1ed2d6b565474d93ccec34fedb9 (diff) | |
download | chromium_src-193e1cdbcaac17d0fac6093f9bd63cd5477e4325.zip chromium_src-193e1cdbcaac17d0fac6093f9bd63cd5477e4325.tar.gz chromium_src-193e1cdbcaac17d0fac6093f9bd63cd5477e4325.tar.bz2 |
Implementaion of the VideoFrame interaface plus unit tests. Modified the mock video decoder to use the new frame implementation.
Review URL: http://codereview.chromium.org/42038
git-svn-id: svn://svn.chromium.org/chrome/trunk/src@11410 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'media/base')
-rw-r--r-- | media/base/buffers.h | 8 | ||||
-rw-r--r-- | media/base/mock_media_filters.h | 134 | ||||
-rw-r--r-- | media/base/video_frame_impl.cc | 140 | ||||
-rw-r--r-- | media/base/video_frame_impl.h | 48 | ||||
-rw-r--r-- | media/base/video_frame_impl_unittest.cc | 107 |
5 files changed, 355 insertions, 82 deletions
diff --git a/media/base/buffers.h b/media/base/buffers.h index 972a124..dddc5d3 100644 --- a/media/base/buffers.h +++ b/media/base/buffers.h @@ -121,6 +121,14 @@ class WritableBuffer : public Buffer { struct VideoSurface { static const size_t kMaxPlanes = 3; + static const size_t kNumRGBPlanes = 1; + static const size_t kRGBPlane = 0; + + static const size_t kNumYUVPlanes = 3; + static const size_t kYPlane = 0; + static const size_t kUPlane = 1; + static const size_t kVPlane = 2; + // Surface formats roughly based on FOURCC labels, see: // http://www.fourcc.org/rgb.php // http://www.fourcc.org/yuv.php diff --git a/media/base/mock_media_filters.h b/media/base/mock_media_filters.h index 0620eff..5a3c6bf 100644 --- a/media/base/mock_media_filters.h +++ b/media/base/mock_media_filters.h @@ -14,6 +14,7 @@ #include "media/base/filters.h" #include "media/base/media_format.h" #include "media/base/pipeline.h" +#include "media/base/video_frame_impl.h" #include "testing/gtest/include/gtest/gtest.h" namespace media { @@ -366,75 +367,6 @@ class MockAudioRenderer : public AudioRenderer { //------------------------------------------------------------------------------ -class MockVideoFrame : public VideoFrame { - public: - MockVideoFrame(size_t video_width, - size_t video_height, - VideoSurface::Format video_surface_format, - base::TimeDelta timestamp, - base::TimeDelta duration, - double ratio_white_to_black) { - surface_locked_ = false; - SetTimestamp(timestamp); - SetDuration(duration); - size_t y_byte_count = video_width * video_height; - size_t uv_byte_count = y_byte_count / 4; - surface_.format = video_surface_format; - surface_.width = video_width; - surface_.height = video_height; - surface_.planes = 3; - surface_.data[0] = new uint8[y_byte_count]; - surface_.data[1] = new uint8[uv_byte_count]; - surface_.data[2] = new uint8[uv_byte_count]; - surface_.strides[0] = video_width; - surface_.strides[1] = video_width / 2; - surface_.strides[2] = video_width / 2; - memset(surface_.data[0], 0, y_byte_count); - memset(surface_.data[1], 0x80, uv_byte_count); - memset(surface_.data[2], 0x80, uv_byte_count); - int64 num_white_pixels = static_cast<int64>(y_byte_count * - ratio_white_to_black); - if (num_white_pixels > y_byte_count) { - ADD_FAILURE(); - num_white_pixels = y_byte_count; - } - if (num_white_pixels < 0) { - ADD_FAILURE(); - num_white_pixels = 0; - } - memset(surface_.data[0], 0xFF, static_cast<size_t>(num_white_pixels)); - } - - virtual ~MockVideoFrame() { - delete[] surface_.data[0]; - delete[] surface_.data[1]; - delete[] surface_.data[2]; - } - - virtual bool Lock(VideoSurface* surface) { - EXPECT_FALSE(surface_locked_); - if (surface_locked_) { - memset(surface, 0, sizeof(*surface)); - return false; - } - surface_locked_ = true; - COMPILE_ASSERT(sizeof(*surface) == sizeof(surface_), surface_size_mismatch); - memcpy(surface, &surface_, sizeof(*surface)); - return true; - } - - virtual void Unlock() { - EXPECT_TRUE(surface_locked_); - surface_locked_ = false; - } - - private: - bool surface_locked_; - VideoSurface surface_; - - DISALLOW_COPY_AND_ASSIGN(MockVideoFrame); -}; - class MockVideoDecoder : public VideoDecoder { public: static FilterFactory* CreateFactory(const MockFilterConfig* config) { @@ -446,6 +378,35 @@ class MockVideoDecoder : public VideoDecoder { return true; // TODO(ralphl): check for a supported format. } + // Helper function that initializes a YV12 frame with white and black scan + // lines based on the |white_to_black| parameter. If 0, then the entire + // frame will be black, if 1 then the entire frame will be white. + static void InitializeYV12Frame(VideoFrame* frame, double white_to_black) { + VideoSurface surface; + if (!frame->Lock(&surface)) { + ADD_FAILURE(); + } else { + EXPECT_EQ(surface.format, VideoSurface::YV12); + size_t first_black_row = static_cast<size_t>(surface.height * + white_to_black); + uint8* y_plane = surface.data[VideoSurface::kYPlane]; + for (size_t row = 0; row < surface.height; ++row) { + int color = (row < first_black_row) ? 0xFF : 0x00; + memset(y_plane, color, surface.width); + y_plane += surface.strides[VideoSurface::kYPlane]; + } + uint8* u_plane = surface.data[VideoSurface::kUPlane]; + uint8* v_plane = surface.data[VideoSurface::kVPlane]; + for (size_t row = 0; row < surface.height; row += 2) { + memset(u_plane, 0x80, surface.width / 2); + memset(v_plane, 0x80, surface.width / 2); + u_plane += surface.strides[VideoSurface::kUPlane]; + v_plane += surface.strides[VideoSurface::kVPlane]; + } + frame->Unlock(); + } + } + explicit MockVideoDecoder(const MockFilterConfig* config) : config_(config) { media_format_.SetAsString(MediaFormat::kMimeType, @@ -477,20 +438,29 @@ class MockVideoDecoder : public VideoDecoder { void DoRead(Assignable<VideoFrame>* buffer) { if (mock_frame_time_ < config_->media_duration) { - VideoFrame* frame = new MockVideoFrame( - config_->video_width, - config_->video_height, - config_->video_surface_format, - mock_frame_time_, - config_->frame_duration, - (mock_frame_time_.InSecondsF() / - config_->media_duration.InSecondsF())); - mock_frame_time_ += config_->frame_duration; - if (mock_frame_time_ >= config_->media_duration) { - frame->SetEndOfStream(true); + // TODO(ralphl): Mock video decoder only works with YV12. Implement other + // formats as needed. + EXPECT_EQ(config_->video_surface_format, VideoSurface::YV12); + scoped_refptr<VideoFrame> frame; + VideoFrameImpl::CreateFrame(config_->video_surface_format, + config_->video_width, + config_->video_height, + mock_frame_time_, + config_->frame_duration, + &frame); + if (!frame) { + host_->Error(PIPELINE_ERROR_OUT_OF_MEMORY); + ADD_FAILURE(); + } else { + mock_frame_time_ += config_->frame_duration; + if (mock_frame_time_ >= config_->media_duration) { + frame->SetEndOfStream(true); + } + InitializeYV12Frame(frame, (mock_frame_time_.InSecondsF() / + config_->media_duration.InSecondsF())); + buffer->SetBuffer(frame); + buffer->OnAssignment(); } - buffer->SetBuffer(frame); - buffer->OnAssignment(); } buffer->Release(); } diff --git a/media/base/video_frame_impl.cc b/media/base/video_frame_impl.cc new file mode 100644 index 0000000..8d9ae59 --- /dev/null +++ b/media/base/video_frame_impl.cc @@ -0,0 +1,140 @@ +// Copyright (c) 2009 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#include "media/base/video_frame_impl.h" + +namespace media { + +// static +void VideoFrameImpl::CreateFrame(VideoSurface::Format format, + size_t width, + size_t height, + base::TimeDelta timestamp, + base::TimeDelta duration, + scoped_refptr<VideoFrame>* frame_out) { + DCHECK(width > 0 && height > 0); + DCHECK(width * height < 100000000); + DCHECK(frame_out); + bool alloc_worked = false; + scoped_refptr<VideoFrameImpl> frame = + new VideoFrameImpl(format, width, height); + if (frame) { + frame->SetTimestamp(timestamp); + frame->SetDuration(duration); + switch (format) { + case VideoSurface::RGB555: + case VideoSurface::RGB565: + alloc_worked = frame->AllocateRGB(2u); + break; + case VideoSurface::RGB24: + alloc_worked = frame->AllocateRGB(3u); + break; + case VideoSurface::RGB32: + case VideoSurface::RGBA: + alloc_worked = frame->AllocateRGB(4u); + break; + case VideoSurface::YV12: + case VideoSurface::YV16: + alloc_worked = frame->AllocateYUV(); + break; + default: + NOTREACHED(); + alloc_worked = false; + break; + } + } + *frame_out = alloc_worked ? frame : NULL; +} + +static inline size_t RoundUp(size_t value, size_t alignment) { + // Check that |alignment| is a power of 2. + DCHECK((alignment + (alignment - 1)) == (alignment | (alignment - 1))); + return ((value + (alignment - 1)) & ~(alignment-1)); +} + +bool VideoFrameImpl::AllocateRGB(size_t bytes_per_pixel) { + // Round up to align at a 64-bit (8 byte) boundary for each row. This + // is sufficient for MMX reads (movq). + size_t bytes_per_row = RoundUp(surface_.width * bytes_per_pixel, 8); + surface_.planes = VideoSurface::kNumRGBPlanes; + surface_.strides[VideoSurface::kRGBPlane] = bytes_per_row; + surface_.data[VideoSurface::kRGBPlane] = new uint8[bytes_per_row * + surface_.height]; + DCHECK(surface_.data[VideoSurface::kRGBPlane]); + DCHECK(!(reinterpret_cast<int>(surface_.data[VideoSurface::kRGBPlane]) & 7)); + COMPILE_ASSERT(0 == VideoSurface::kRGBPlane, RGB_data_must_be_index_0); + return (NULL != surface_.data[VideoSurface::kRGBPlane]); +} + +bool VideoFrameImpl::AllocateYUV() { + DCHECK(surface_.format == VideoSurface::YV12 || + surface_.format == VideoSurface::YV16); + // Align Y rows at 32-bit (4 byte) boundaries. The stride for both YV12 and + // YV16 is 1/2 of the stride of Y. For YV12, every row of bytes for U and V + // applies to two rows of Y (one byte of UV for 4 bytes of Y), so in the + // case of YV12 the strides are identical for the same width surface, but the + // number of bytes allocated for YV12 is 1/2 the amount for U & V as YV16. + // We also round the height of the surface allocated to be an even number + // to avoid any potential of faulting by code that attempts to access the Y + // values of the final row, but assumes that the last row of U & V applies to + // a full two rows of Y. + size_t alloc_height = RoundUp(surface_.height, 2); + size_t y_bytes_per_row = RoundUp(surface_.width, 4); + size_t uv_stride = RoundUp(y_bytes_per_row / 2, 4); + size_t y_bytes = alloc_height * y_bytes_per_row; + size_t uv_bytes = alloc_height * uv_stride; + if (surface_.format == VideoSurface::YV12) { + uv_bytes /= 2; + } + uint8* data = new uint8[y_bytes + (uv_bytes * 2)]; + if (data) { + surface_.planes = VideoSurface::kNumYUVPlanes; + COMPILE_ASSERT(0 == VideoSurface::kYPlane, y_plane_data_must_be_index_0); + surface_.data[VideoSurface::kYPlane] = data; + surface_.data[VideoSurface::kUPlane] = data + y_bytes; + surface_.data[VideoSurface::kVPlane] = data + y_bytes + uv_bytes; + surface_.strides[VideoSurface::kYPlane] = y_bytes_per_row; + surface_.strides[VideoSurface::kUPlane] = uv_stride; + surface_.strides[VideoSurface::kVPlane] = uv_stride; + return true; + } + NOTREACHED(); + return false; +} + +VideoFrameImpl::VideoFrameImpl(VideoSurface::Format format, + size_t width, + size_t height) { + locked_ = false; + memset(&surface_, 0, sizeof(surface_)); + surface_.format = format; + surface_.width = width; + surface_.height = height; +} + +VideoFrameImpl::~VideoFrameImpl() { + // In multi-plane allocations, only a single block of memory is allocated + // on the heap, and other |data| pointers point inside the same, single block + // so just delete index 0. + delete[] surface_.data[0]; +} + +bool VideoFrameImpl::Lock(VideoSurface* surface) { + DCHECK(!locked_); + if (locked_) { + memset(surface, 0, sizeof(*surface)); + return false; + } + locked_ = true; + COMPILE_ASSERT(sizeof(*surface) == sizeof(surface_), surface_size_mismatch); + memcpy(surface, &surface_, sizeof(*surface)); + return true; +} + +void VideoFrameImpl::Unlock() { + DCHECK(locked_); + locked_ = false; +} + +} // namespace media diff --git a/media/base/video_frame_impl.h b/media/base/video_frame_impl.h new file mode 100644 index 0000000..36a2f2c --- /dev/null +++ b/media/base/video_frame_impl.h @@ -0,0 +1,48 @@ +// Copyright (c) 2009 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Simple class that implements the VideoFrame interface with memory allocated +// on the system heap. This class supports every format defined in the +// VideoSurface::Format enum. The implementation attempts to properly align +// allocations for maximum system bus efficency. +#ifndef MEDIA_BASE_VIDEO_FRAME_IMPL_H_ +#define MEDIA_BASE_VIDEO_FRAME_IMPL_H_ + +#include "media/base/buffers.h" + +namespace media { + +class VideoFrameImpl : public VideoFrame { + public: + static void CreateFrame(VideoSurface::Format format, + size_t width, + size_t height, + base::TimeDelta timestamp, + base::TimeDelta duration, + scoped_refptr<VideoFrame>* frame_out); + + // Implementation of VideoFrame. + virtual bool Lock(VideoSurface* surface); + virtual void Unlock(); + + private: + // Clients must use the static CreateFrame() method to create a new frame. + VideoFrameImpl(VideoSurface::Format format, + size_t video_width, + size_t video_height); + + virtual ~VideoFrameImpl(); + + bool AllocateRGB(size_t bytes_per_pixel); + bool AllocateYUV(); + + bool locked_; + VideoSurface surface_; + + DISALLOW_COPY_AND_ASSIGN(VideoFrameImpl); +}; + +} // namespace media + +#endif // MEDIA_BASE_VIDEO_FRAME_IMPL_H_ diff --git a/media/base/video_frame_impl_unittest.cc b/media/base/video_frame_impl_unittest.cc new file mode 100644 index 0000000..352e2368 --- /dev/null +++ b/media/base/video_frame_impl_unittest.cc @@ -0,0 +1,107 @@ +// Copyright (c) 2009 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#include "media/base/buffers.h" +#include "media/base/mock_media_filters.h" +#include "media/base/video_frame_impl.h" +#include "media/base/yuv_convert.h" +#include "testing/gtest/include/gtest/gtest.h" + +using media::VideoFrameImpl; +using media::VideoSurface; + +namespace { + +// Given a |yv12_frame| this method converts the YV12 frame to RGBA and +// makes sure that all the pixels of the RBG frame equal |expect_rgb_color|. +void ExpectFrameColor(media::VideoFrame* yv12_frame, uint32 expect_rgb_color) { + // On linux and mac builds if you directly compare using EXPECT_EQ and use + // the VideoSurface::kNumxxxPlanes constants, it generates an error when + // linking. These are declared so that we can compare against locals. + const size_t expect_yuv_planes = VideoSurface::kNumYUVPlanes; + const size_t expect_rgb_planes = VideoSurface::kNumRGBPlanes; + + VideoSurface yuv_surface; + EXPECT_TRUE(yv12_frame->Lock(&yuv_surface)); + EXPECT_EQ(yuv_surface.format, VideoSurface::YV12); + EXPECT_EQ(yuv_surface.planes, expect_yuv_planes); + EXPECT_EQ(yuv_surface.strides[VideoSurface::kUPlane], + yuv_surface.strides[VideoSurface::kVPlane]); + + scoped_refptr<media::VideoFrame> rgb_frame; + media::VideoFrameImpl::CreateFrame(VideoSurface::RGBA, + yuv_surface.width, + yuv_surface.height, + yv12_frame->GetTimestamp(), + yv12_frame->GetDuration(), + &rgb_frame); + media::VideoSurface rgb_surface; + EXPECT_TRUE(rgb_frame->Lock(&rgb_surface)); + EXPECT_EQ(rgb_surface.width, yuv_surface.width); + EXPECT_EQ(rgb_surface.height, yuv_surface.height); + EXPECT_EQ(rgb_surface.planes, expect_rgb_planes); + + media::ConvertYV12ToRGB32(yuv_surface.data[VideoSurface::kYPlane], + yuv_surface.data[VideoSurface::kUPlane], + yuv_surface.data[VideoSurface::kVPlane], + rgb_surface.data[VideoSurface::kRGBPlane], + rgb_surface.width, + rgb_surface.height, + yuv_surface.strides[VideoSurface::kYPlane], + yuv_surface.strides[VideoSurface::kUPlane], + rgb_surface.strides[VideoSurface::kRGBPlane]); + + for (size_t row = 0; row < rgb_surface.height; ++row) { + uint32* rgb_row_data = reinterpret_cast<uint32*>( + rgb_surface.data[VideoSurface::kRGBPlane] + + (rgb_surface.strides[VideoSurface::kRGBPlane] * row)); + for (size_t col = 0; col < rgb_surface.width; ++col) { + EXPECT_EQ(rgb_row_data[col], expect_rgb_color); + } + } + rgb_frame->Unlock(); + yv12_frame->Unlock(); +} + +} // namespace + + +TEST(VideoFrameImpl, Basic) { + const size_t kWidth = 64; + const size_t kHeight = 48; + const base::TimeDelta kTimestampA = base::TimeDelta::FromMicroseconds(1337); + const base::TimeDelta kDurationA = base::TimeDelta::FromMicroseconds(1667); + const base::TimeDelta kTimestampB = base::TimeDelta::FromMicroseconds(1234); + const base::TimeDelta kDurationB = base::TimeDelta::FromMicroseconds(5678); + + // Create a YV12 Video Frame. + scoped_refptr<media::VideoFrame> frame; + media::VideoFrameImpl::CreateFrame(media::VideoSurface::YV12, kWidth, kHeight, + kTimestampA, kDurationA, &frame); + ASSERT_TRUE(frame); + + // Test StreamSample implementation. + EXPECT_TRUE(kTimestampA == frame->GetTimestamp()); + EXPECT_TRUE(kDurationA == frame->GetDuration()); + EXPECT_FALSE(frame->IsEndOfStream()); + EXPECT_FALSE(frame->IsDiscontinuous()); + frame->SetTimestamp(kTimestampB); + frame->SetDuration(kDurationB); + EXPECT_TRUE(kTimestampB == frame->GetTimestamp()); + EXPECT_TRUE(kDurationB == frame->GetDuration()); + frame->SetEndOfStream(true); + EXPECT_TRUE(frame->IsEndOfStream()); + frame->SetEndOfStream(false); + EXPECT_FALSE(frame->IsEndOfStream()); + frame->SetDiscontinuous(true); + EXPECT_TRUE(frame->IsDiscontinuous()); + frame->SetDiscontinuous(false); + EXPECT_FALSE(frame->IsDiscontinuous()); + + // Test VideoFrame implementation. + media::MockVideoDecoder::InitializeYV12Frame(frame, 0.0f); + ExpectFrameColor(frame, 0xFF000000); + media::MockVideoDecoder::InitializeYV12Frame(frame, 1.0f); + ExpectFrameColor(frame, 0xFFFFFFFF); +} |