summaryrefslogtreecommitdiffstats
path: root/media
diff options
context:
space:
mode:
authoracolwell@chromium.org <acolwell@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2011-03-23 21:28:59 +0000
committeracolwell@chromium.org <acolwell@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2011-03-23 21:28:59 +0000
commitd82b18ae3cf1a5976e85f773e9212431e46146cb (patch)
tree7547c63a852a7e9164985fedd9de7622a59234a5 /media
parent238ca86df5ced41e7de04492fb547259803a93a4 (diff)
downloadchromium_src-d82b18ae3cf1a5976e85f773e9212431e46146cb.zip
chromium_src-d82b18ae3cf1a5976e85f773e9212431e46146cb.tar.gz
chromium_src-d82b18ae3cf1a5976e85f773e9212431e46146cb.tar.bz2
Build a raw video pipeline for the <video> with a special src attribute (media://...).
The raw video pipeline graph only has two filters - one pass-thru decoder filter and one renderer filter. Contributed by ronghuawu@google.com Original code reviews: http://codereview.chromium.org/6658001/ (pipeline changes) http://codereview.chromium.org/6621049/ (pass-thru filter) BUG=none TEST=media_unittests Committed: http://src.chromium.org/viewvc/chrome?view=rev&revision=79149 Review URL: http://codereview.chromium.org/6726006 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@79185 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'media')
-rw-r--r--media/base/mock_filters.cc5
-rw-r--r--media/base/mock_filters.h3
-rw-r--r--media/base/pipeline_impl.cc32
-rw-r--r--media/base/pipeline_impl_unittest.cc43
-rw-r--r--media/filters/rtc_video_decoder.cc281
-rw-r--r--media/filters/rtc_video_decoder.h94
-rw-r--r--media/filters/rtc_video_decoder_unittest.cc170
-rw-r--r--media/media.gyp5
8 files changed, 617 insertions, 16 deletions
diff --git a/media/base/mock_filters.cc b/media/base/mock_filters.cc
index 5fc5b55..da601c4 100644
--- a/media/base/mock_filters.cc
+++ b/media/base/mock_filters.cc
@@ -135,6 +135,7 @@ MockFilterCollection::~MockFilterCollection() {}
FilterCollection* MockFilterCollection::filter_collection(
bool include_demuxer,
bool run_build_callback,
+ bool run_build,
PipelineStatus build_status) const {
FilterCollection* collection = new FilterCollection();
@@ -151,7 +152,9 @@ FilterCollection* MockFilterCollection::filter_collection(
ON_CALL(*demuxer_factory, Build(_, NotNull())).WillByDefault(Invoke(
demuxer_factory, &MockDemuxerFactory::DestroyBuildCallback));
}
- EXPECT_CALL(*demuxer_factory, Build(_, NotNull()));
+
+ if (run_build)
+ EXPECT_CALL(*demuxer_factory, Build(_, NotNull()));
collection->SetDemuxerFactory(demuxer_factory);
collection->AddVideoDecoder(video_decoder_);
diff --git a/media/base/mock_filters.h b/media/base/mock_filters.h
index c108c57..44d3236 100644
--- a/media/base/mock_filters.h
+++ b/media/base/mock_filters.h
@@ -289,11 +289,12 @@ class MockFilterCollection {
MockAudioRenderer* audio_renderer() const { return audio_renderer_; }
FilterCollection* filter_collection() const {
- return filter_collection(true, true, PIPELINE_OK);
+ return filter_collection(true, true, true, PIPELINE_OK);
}
FilterCollection* filter_collection(bool include_demuxer,
bool run_build_callback,
+ bool run_build,
PipelineStatus build_status) const;
private:
diff --git a/media/base/pipeline_impl.cc b/media/base/pipeline_impl.cc
index 2b66661..7f5f3f3 100644
--- a/media/base/pipeline_impl.cc
+++ b/media/base/pipeline_impl.cc
@@ -5,14 +5,18 @@
// TODO(scherkus): clean up PipelineImpl... too many crazy function names,
// potential deadlocks, etc...
+#include "media/base/pipeline_impl.h"
+
+#include <algorithm>
+
#include "base/callback.h"
#include "base/compiler_specific.h"
#include "base/stl_util-inl.h"
#include "base/synchronization/condition_variable.h"
+#include "media/filters/rtc_video_decoder.h"
#include "media/base/clock.h"
#include "media/base/filter_collection.h"
#include "media/base/media_format.h"
-#include "media/base/pipeline_impl.h"
namespace media {
@@ -578,12 +582,17 @@ void PipelineImpl::StartTask(FilterCollection* filter_collection,
seek_callback_.reset(start_callback);
// Kick off initialization.
- set_state(kInitDemuxer);
pipeline_init_state_.reset(new PipelineInitState());
pipeline_init_state_->composite_ = new CompositeFilter(message_loop_);
pipeline_init_state_->composite_->set_host(this);
- InitializeDemuxer();
+ if (RTCVideoDecoder::IsUrlSupported(url)) {
+ set_state(kInitVideoDecoder);
+ InitializeVideoDecoder(NULL);
+ } else {
+ set_state(kInitDemuxer);
+ InitializeDemuxer();
+ }
}
// Main initialization method called on the pipeline thread. This code attempts
@@ -914,7 +923,7 @@ void PipelineImpl::FilterStateTransitionTask() {
NewCallback(this, &PipelineImpl::OnFilterStateTransition));
} else if (state_ == kStarting) {
pipeline_filter_->Play(
- NewCallback(this,&PipelineImpl::OnFilterStateTransition));
+ NewCallback(this, &PipelineImpl::OnFilterStateTransition));
} else if (state_ == kStopping) {
pipeline_filter_->Stop(
NewCallback(this, &PipelineImpl::OnFilterStateTransition));
@@ -946,7 +955,7 @@ void PipelineImpl::FilterStateTransitionTask() {
void PipelineImpl::TeardownStateTransitionTask() {
DCHECK(IsPipelineTearingDown());
- switch(state_) {
+ switch (state_) {
case kStopping:
set_state(error_caused_teardown_ ? kError : kStopped);
FinishDestroyingFiltersTask();
@@ -1087,11 +1096,14 @@ bool PipelineImpl::InitializeVideoDecoder(
DCHECK_EQ(MessageLoop::current(), message_loop_);
DCHECK(IsPipelineOk());
- scoped_refptr<DemuxerStream> stream =
- demuxer->GetStream(DemuxerStream::VIDEO);
+ scoped_refptr<DemuxerStream> stream;
- if (!stream)
- return false;
+ if (demuxer) {
+ stream = demuxer->GetStream(DemuxerStream::VIDEO);
+
+ if (!stream)
+ return false;
+ }
scoped_refptr<VideoDecoder> video_decoder;
filter_collection_->SelectVideoDecoder(&video_decoder);
@@ -1169,7 +1181,7 @@ void PipelineImpl::TearDownPipeline() {
// Mark that we already start tearing down operation.
tearing_down_ = true;
- switch(state_) {
+ switch (state_) {
case kCreated:
case kError:
set_state(kStopped);
diff --git a/media/base/pipeline_impl_unittest.cc b/media/base/pipeline_impl_unittest.cc
index 396b69c..87f3e96 100644
--- a/media/base/pipeline_impl_unittest.cc
+++ b/media/base/pipeline_impl_unittest.cc
@@ -33,6 +33,9 @@ static const int kTotalBytes = 1024;
// Buffered bytes of the data source.
static const int kBufferedBytes = 1024;
+// Test url for raw video pipeline.
+static const std::string kUrlMedia = "media://raw_video_stream";
+
// Used for setting expectations on pipeline callbacks. Using a StrictMock
// also lets us test for missing callbacks.
class CallbackHelper {
@@ -187,11 +190,27 @@ class PipelineImplTest : public ::testing::Test {
// But some tests require different statuses in build & Start.
void InitializePipeline(PipelineStatus build_status,
PipelineStatus start_status) {
+ InitializePipeline(build_status, start_status, "");
+ }
+
+ void InitializePipeline(PipelineStatus build_status,
+ PipelineStatus start_status,
+ const std::string& url) {
// Expect an initialization callback.
EXPECT_CALL(callbacks_, OnStart(start_status));
- pipeline_->Start(mocks_->filter_collection(true, true, build_status), "",
+
+ bool run_build = true;
+ if (url.compare(kUrlMedia) == 0)
+ run_build = false;
+
+ pipeline_->Start(mocks_->filter_collection(true,
+ true,
+ run_build,
+ build_status),
+ url,
NewCallback(reinterpret_cast<CallbackHelper*>(&callbacks_),
&CallbackHelper::OnStart));
+
message_loop_.RunAllPending();
}
@@ -232,7 +251,6 @@ class PipelineImplTest : public ::testing::Test {
// We expect a successful seek callback.
EXPECT_CALL(callbacks_, OnSeek(PIPELINE_OK));
-
}
void DoSeek(const base::TimeDelta& seek_time) {
@@ -309,7 +327,11 @@ TEST_F(PipelineImplTest, NeverInitializes) {
// This test hangs during initialization by never calling
// InitializationComplete(). StrictMock<> will ensure that the callback is
// never executed.
- pipeline_->Start(mocks_->filter_collection(false, false, PIPELINE_OK), "",
+ pipeline_->Start(mocks_->filter_collection(false,
+ false,
+ true,
+ PIPELINE_OK),
+ "",
NewCallback(reinterpret_cast<CallbackHelper*>(&callbacks_),
&CallbackHelper::OnStart));
message_loop_.RunAllPending();
@@ -333,7 +355,9 @@ TEST_F(PipelineImplTest, RequiredFilterMissing) {
// Create a filter collection with missing filter.
FilterCollection* collection =
- mocks_->filter_collection(false, true,
+ mocks_->filter_collection(false,
+ true,
+ true,
PIPELINE_ERROR_REQUIRED_FILTER_MISSING);
pipeline_->Start(collection, "",
NewCallback(reinterpret_cast<CallbackHelper*>(&callbacks_),
@@ -396,6 +420,16 @@ TEST_F(PipelineImplTest, VideoStream) {
EXPECT_TRUE(pipeline_->HasVideo());
}
+TEST_F(PipelineImplTest, RawVideoStream) {
+ InitializeVideoDecoder(NULL);
+ InitializeVideoRenderer();
+
+ InitializePipeline(PIPELINE_OK, PIPELINE_OK, kUrlMedia);
+ EXPECT_TRUE(pipeline_->IsInitialized());
+ EXPECT_FALSE(pipeline_->HasAudio());
+ EXPECT_TRUE(pipeline_->HasVideo());
+}
+
TEST_F(PipelineImplTest, AudioVideoStream) {
CreateAudioStream();
CreateVideoStream();
@@ -816,3 +850,4 @@ TEST(PipelineStatusNotificationTest, DelayedCallback) {
}
} // namespace media
+
diff --git a/media/filters/rtc_video_decoder.cc b/media/filters/rtc_video_decoder.cc
new file mode 100644
index 0000000..0ddf630
--- /dev/null
+++ b/media/filters/rtc_video_decoder.cc
@@ -0,0 +1,281 @@
+// Copyright (c) 2011 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/filters/rtc_video_decoder.h"
+
+#include <deque>
+
+#include "base/task.h"
+#include "googleurl/src/gurl.h"
+#include "media/base/callback.h"
+#include "media/base/filter_host.h"
+#include "media/base/filters.h"
+#include "media/base/limits.h"
+#include "media/base/media_format.h"
+#include "media/base/video_frame.h"
+
+namespace media {
+
+static const char kMediaScheme[] = "media";
+
+RTCVideoDecoder::RTCVideoDecoder(MessageLoop* message_loop,
+ const std::string& url)
+ : message_loop_(message_loop),
+ width_(176),
+ height_(144),
+ url_(url),
+ state_(kUnInitialized) {
+}
+
+RTCVideoDecoder::~RTCVideoDecoder() {
+}
+
+void RTCVideoDecoder::Initialize(DemuxerStream* demuxer_stream,
+ FilterCallback* filter_callback,
+ StatisticsCallback* stat_callback) {
+ if (MessageLoop::current() != message_loop_) {
+ message_loop_->PostTask(
+ FROM_HERE,
+ NewRunnableMethod(this,
+ &RTCVideoDecoder::Initialize,
+ make_scoped_refptr(demuxer_stream),
+ filter_callback, stat_callback));
+ return;
+ }
+
+ DCHECK_EQ(MessageLoop::current(), message_loop_);
+
+ lock_.Acquire();
+ frame_queue_available_.clear();
+ lock_.Release();
+ media_format_.SetAsInteger(MediaFormat::kWidth, width_);
+ media_format_.SetAsInteger(MediaFormat::kHeight, height_);
+ media_format_.SetAsInteger(MediaFormat::kSurfaceType,
+ static_cast<int>(VideoFrame::YV12));
+ media_format_.SetAsInteger(MediaFormat::kSurfaceFormat,
+ static_cast<int>(VideoFrame::TYPE_SYSTEM_MEMORY));
+
+ state_ = kNormal;
+
+ filter_callback->Run();
+ delete filter_callback;
+
+ // TODO(acolwell): Implement stats.
+ delete stat_callback;
+}
+
+void RTCVideoDecoder::Play(FilterCallback* callback) {
+ if (MessageLoop::current() != message_loop_) {
+ message_loop_->PostTask(FROM_HERE,
+ NewRunnableMethod(this,
+ &RTCVideoDecoder::Play,
+ callback));
+ return;
+ }
+
+ DCHECK_EQ(MessageLoop::current(), message_loop_);
+
+ VideoDecoder::Play(callback);
+}
+
+void RTCVideoDecoder::Pause(FilterCallback* callback) {
+ if (MessageLoop::current() != message_loop_) {
+ message_loop_->PostTask(FROM_HERE,
+ NewRunnableMethod(this,
+ &RTCVideoDecoder::Pause,
+ callback));
+ return;
+ }
+
+ DCHECK_EQ(MessageLoop::current(), message_loop_);
+
+ state_ = kPaused;
+
+ VideoDecoder::Pause(callback);
+}
+
+void RTCVideoDecoder::Stop(FilterCallback* callback) {
+ if (MessageLoop::current() != message_loop_) {
+ message_loop_->PostTask(FROM_HERE,
+ NewRunnableMethod(this,
+ &RTCVideoDecoder::Stop,
+ callback));
+ return;
+ }
+
+ DCHECK_EQ(MessageLoop::current(), message_loop_);
+
+ state_ = kStopped;
+
+ VideoDecoder::Stop(callback);
+
+ // TODO(ronghuawu): Stop rtc
+}
+
+void RTCVideoDecoder::Seek(base::TimeDelta time,
+ FilterCallback* callback) {
+ if (MessageLoop::current() != message_loop_) {
+ message_loop_->PostTask(FROM_HERE,
+ NewRunnableMethod(this,
+ &RTCVideoDecoder::Seek,
+ time,
+ callback));
+ return;
+ }
+
+ DCHECK_EQ(MessageLoop::current(), message_loop_);
+
+ state_ = kSeeking;
+ // Create output buffer pool and pass the frames to renderer
+ // so that the renderer can complete the seeking
+ for (size_t i = 0; i < Limits::kMaxVideoFrames; ++i) {
+ scoped_refptr<VideoFrame> video_frame;
+ VideoFrame::CreateFrame(VideoFrame::YV12,
+ width_,
+ height_,
+ kNoTimestamp,
+ kNoTimestamp,
+ &video_frame);
+ if (!video_frame.get()) {
+ break;
+ }
+
+ // Create black frame
+ const uint8 kBlackY = 0x00;
+ const uint8 kBlackUV = 0x80;
+ // Fill the Y plane.
+ uint8* y_plane = video_frame->data(VideoFrame::kYPlane);
+ for (size_t i = 0; i < height_; ++i) {
+ memset(y_plane, kBlackY, width_);
+ y_plane += video_frame->stride(VideoFrame::kYPlane);
+ }
+ // Fill the U and V planes.
+ uint8* u_plane = video_frame->data(VideoFrame::kUPlane);
+ uint8* v_plane = video_frame->data(VideoFrame::kVPlane);
+ for (size_t i = 0; i < (height_ / 2); ++i) {
+ memset(u_plane, kBlackUV, width_ / 2);
+ memset(v_plane, kBlackUV, width_ / 2);
+ u_plane += video_frame->stride(VideoFrame::kUPlane);
+ v_plane += video_frame->stride(VideoFrame::kVPlane);
+ }
+
+ VideoFrameReady(video_frame);
+ }
+
+ state_ = kNormal;
+
+ callback->Run();
+ delete callback;
+
+ // TODO(ronghuawu): Start rtc
+}
+
+const MediaFormat& RTCVideoDecoder::media_format() {
+ return media_format_;
+}
+
+void RTCVideoDecoder::ProduceVideoFrame(
+ scoped_refptr<VideoFrame> video_frame) {
+ if (MessageLoop::current() != message_loop_) {
+ message_loop_->PostTask(
+ FROM_HERE,
+ NewRunnableMethod(this,
+ &RTCVideoDecoder::ProduceVideoFrame, video_frame));
+ return;
+ }
+ DCHECK_EQ(MessageLoop::current(), message_loop_);
+ lock_.Acquire();
+ frame_queue_available_.push_back(video_frame);
+ lock_.Release();
+}
+
+bool RTCVideoDecoder::ProvidesBuffer() {
+ return true;
+}
+
+int RTCVideoDecoder::FrameSizeChange(unsigned int width,
+ unsigned int height,
+ unsigned int number_of_streams) {
+ width_ = width;
+ height_ = height;
+
+ media_format_.SetAsInteger(MediaFormat::kWidth, width_);
+ media_format_.SetAsInteger(MediaFormat::kHeight, height_);
+ host()->SetVideoSize(width_, height_);
+ return 0;
+}
+
+int RTCVideoDecoder::DeliverFrame(unsigned char* buffer,
+ int buffer_size) {
+ DCHECK(buffer);
+
+ if (frame_queue_available_.size() == 0)
+ return 0;
+
+ if (state_ != kNormal)
+ return 0;
+
+ // This is called from another thread
+ lock_.Acquire();
+ scoped_refptr<VideoFrame> video_frame = frame_queue_available_.front();
+ frame_queue_available_.pop_front();
+ lock_.Release();
+
+ // Check if there's a size change
+ if (video_frame->width() != width_ || video_frame->height() != height_) {
+ video_frame.release();
+ // Allocate new buffer based on the new size
+ VideoFrame::CreateFrame(VideoFrame::YV12,
+ width_,
+ height_,
+ kNoTimestamp,
+ kNoTimestamp,
+ &video_frame);
+ if (!video_frame.get()) {
+ return -1;
+ }
+ }
+
+ video_frame->SetTimestamp(host()->GetTime());
+ video_frame->SetDuration(base::TimeDelta::FromMilliseconds(30));
+
+ uint8* y_plane = video_frame->data(VideoFrame::kYPlane);
+ for (size_t row = 0; row < video_frame->height(); ++row) {
+ memcpy(y_plane, buffer, width_);
+ y_plane += video_frame->stride(VideoFrame::kYPlane);
+ buffer += width_;
+ }
+ size_t uv_width = width_/2;
+ uint8* u_plane = video_frame->data(VideoFrame::kUPlane);
+ for (size_t row = 0; row < video_frame->height(); row += 2) {
+ memcpy(u_plane, buffer, uv_width);
+ u_plane += video_frame->stride(VideoFrame::kUPlane);
+ buffer += uv_width;
+ }
+ uint8* v_plane = video_frame->data(VideoFrame::kVPlane);
+ for (size_t row = 0; row < video_frame->height(); row += 2) {
+ memcpy(v_plane, buffer, uv_width);
+ v_plane += video_frame->stride(VideoFrame::kVPlane);
+ buffer += uv_width;
+ }
+
+ if (MessageLoop::current() != message_loop_) {
+ message_loop_->PostTask(
+ FROM_HERE,
+ NewRunnableMethod(this,
+ &RTCVideoDecoder::VideoFrameReady,
+ video_frame));
+ } else {
+ VideoFrameReady(video_frame);
+ }
+
+ return 0;
+}
+
+bool RTCVideoDecoder::IsUrlSupported(const std::string& url) {
+ GURL gurl(url);
+ return gurl.SchemeIs(kMediaScheme);
+}
+
+} // namespace media
diff --git a/media/filters/rtc_video_decoder.h b/media/filters/rtc_video_decoder.h
new file mode 100644
index 0000000..02bd96f
--- /dev/null
+++ b/media/filters/rtc_video_decoder.h
@@ -0,0 +1,94 @@
+// Copyright (c) 2011 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_FILTERS_RTC_VIDEO_DECODER_H_
+#define MEDIA_FILTERS_RTC_VIDEO_DECODER_H_
+
+#include <deque>
+#include <string>
+
+#include "base/gtest_prod_util.h"
+#include "base/time.h"
+#include "media/base/filters.h"
+#include "media/base/video_frame.h"
+#include "media/filters/decoder_base.h"
+
+// TODO(ronghuawu) ExternalRenderer should be defined in WebRtc
+class ExternalRenderer {
+ public:
+ virtual int FrameSizeChange(unsigned int width,
+ unsigned int height,
+ unsigned int number_of_streams) = 0;
+ virtual int DeliverFrame(unsigned char* buffer, int buffer_size) = 0;
+
+ protected:
+ virtual ~ExternalRenderer() {}
+};
+
+namespace media {
+
+class RTCVideoDecoder : public VideoDecoder,
+ public ExternalRenderer {
+ public:
+ RTCVideoDecoder(MessageLoop* message_loop, const std::string& url);
+ virtual ~RTCVideoDecoder();
+
+ // Filter implementation.
+ virtual void Play(FilterCallback* callback);
+ virtual void Seek(base::TimeDelta time, FilterCallback* callback);
+ virtual void Pause(FilterCallback* callback);
+ virtual void Stop(FilterCallback* callback);
+
+ // Decoder implementation.
+ virtual void Initialize(DemuxerStream* demuxer_stream,
+ FilterCallback* filter_callback,
+ StatisticsCallback* stat_callback);
+ virtual const MediaFormat& media_format();
+ virtual void ProduceVideoFrame(scoped_refptr<VideoFrame> video_frame);
+ virtual bool ProvidesBuffer();
+
+ // ExternalRenderer implementation
+ virtual int FrameSizeChange(unsigned int width,
+ unsigned int height,
+ unsigned int number_of_streams);
+
+ virtual int DeliverFrame(unsigned char* buffer,
+ int buffer_size);
+
+ // TODO(ronghuawu): maybe move this function to a
+ // base class (RawVideoDecoder) so that the camera preview may share this.
+ static bool IsUrlSupported(const std::string& url);
+
+ private:
+ friend class RTCVideoDecoderTest;
+ FRIEND_TEST_ALL_PREFIXES(RTCVideoDecoderTest, Initialize_Successful);
+ FRIEND_TEST_ALL_PREFIXES(RTCVideoDecoderTest, DoSeek);
+ FRIEND_TEST_ALL_PREFIXES(RTCVideoDecoderTest, DoDeliverFrame);
+ FRIEND_TEST_ALL_PREFIXES(RTCVideoDecoderTest, DoFrameSizeChange);
+
+ enum DecoderState {
+ kUnInitialized,
+ kNormal,
+ kSeeking,
+ kPaused,
+ kStopped
+ };
+
+ MessageLoop* message_loop_;
+ size_t width_;
+ size_t height_;
+ std::string url_;
+ DecoderState state_;
+ MediaFormat media_format_;
+ std::deque<scoped_refptr<VideoFrame> > frame_queue_available_;
+ // Used for accessing frame queue from another thread.
+ base::Lock lock_;
+
+ DISALLOW_COPY_AND_ASSIGN(RTCVideoDecoder);
+};
+
+} // namespace media
+
+#endif // MEDIA_FILTERS_RTC_VIDEO_DECODER_H_
+
diff --git a/media/filters/rtc_video_decoder_unittest.cc b/media/filters/rtc_video_decoder_unittest.cc
new file mode 100644
index 0000000..f3bf704
--- /dev/null
+++ b/media/filters/rtc_video_decoder_unittest.cc
@@ -0,0 +1,170 @@
+// Copyright (c) 2011 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <deque>
+
+#include "base/singleton.h"
+#include "base/string_util.h"
+#include "media/base/data_buffer.h"
+#include "media/base/filters.h"
+#include "media/base/limits.h"
+#include "media/base/mock_callback.h"
+#include "media/base/mock_filter_host.h"
+#include "media/base/mock_filters.h"
+#include "media/base/mock_task.h"
+#include "media/base/video_frame.h"
+#include "media/filters/rtc_video_decoder.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+using ::testing::_;
+using ::testing::AnyNumber;
+using ::testing::DoAll;
+using ::testing::Message;
+using ::testing::Return;
+using ::testing::ReturnNull;
+using ::testing::SetArgumentPointee;
+using ::testing::StrictMock;
+using ::testing::WithArg;
+using ::testing::Invoke;
+
+namespace media {
+
+class RTCVideoDecoderTest : public testing::Test {
+ protected:
+ static const int kWidth;
+ static const int kHeight;
+ static const char* kUrl;
+ static const PipelineStatistics kStatistics;
+
+ RTCVideoDecoderTest() {
+ MediaFormat media_format;
+ decoder_ = new RTCVideoDecoder(&message_loop_, kUrl);
+ renderer_ = new MockVideoRenderer();
+
+ DCHECK(decoder_);
+
+ // Inject mocks and prepare a demuxer stream.
+ decoder_->set_host(&host_);
+
+ EXPECT_CALL(stats_callback_object_, OnStatistics(_))
+ .Times(AnyNumber());
+ }
+
+ virtual ~RTCVideoDecoderTest() {
+ // Finish up any remaining tasks.
+ message_loop_.RunAllPending();
+ }
+
+ void InitializeDecoderSuccessfully() {
+ // Test successful initialization.
+ decoder_->Initialize(NULL,
+ NewExpectedCallback(), NewStatisticsCallback());
+ message_loop_.RunAllPending();
+ }
+
+ StatisticsCallback* NewStatisticsCallback() {
+ return NewCallback(&stats_callback_object_,
+ &MockStatisticsCallback::OnStatistics);
+ }
+
+ // Fixture members.
+ scoped_refptr<RTCVideoDecoder> decoder_;
+ scoped_refptr<MockVideoRenderer> renderer_;
+ MockStatisticsCallback stats_callback_object_;
+ StrictMock<MockFilterHost> host_;
+ MessageLoop message_loop_;
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(RTCVideoDecoderTest);
+};
+
+const int RTCVideoDecoderTest::kWidth = 176;
+const int RTCVideoDecoderTest::kHeight = 144;
+const char* RTCVideoDecoderTest::kUrl = "media://remote/0";
+const PipelineStatistics RTCVideoDecoderTest::kStatistics;
+
+TEST_F(RTCVideoDecoderTest, Initialize_Successful) {
+ InitializeDecoderSuccessfully();
+
+ // Test that the output media format is an uncompressed video surface that
+ // matches the dimensions specified by rtc.
+ const MediaFormat& media_format = decoder_->media_format();
+ int width = 0;
+ int height = 0;
+ EXPECT_TRUE(media_format.GetAsInteger(MediaFormat::kWidth, &width));
+ EXPECT_EQ(kWidth, width);
+ EXPECT_TRUE(media_format.GetAsInteger(MediaFormat::kHeight, &height));
+ EXPECT_EQ(kHeight, height);
+}
+
+TEST_F(RTCVideoDecoderTest, DoSeek) {
+ const base::TimeDelta kZero;
+
+ InitializeDecoderSuccessfully();
+
+ decoder_->set_consume_video_frame_callback(
+ NewCallback(renderer_.get(), &MockVideoRenderer::ConsumeVideoFrame));
+
+ // Expect Seek and verify the results.
+ EXPECT_CALL(*renderer_.get(), ConsumeVideoFrame(_))
+ .Times(Limits::kMaxVideoFrames);
+ decoder_->Seek(kZero, NewExpectedCallback());
+
+ message_loop_.RunAllPending();
+ EXPECT_EQ(RTCVideoDecoder::kNormal, decoder_->state_);
+}
+
+TEST_F(RTCVideoDecoderTest, DoDeliverFrame) {
+ const base::TimeDelta kZero;
+ EXPECT_CALL(host_, GetTime()).WillRepeatedly(Return(base::TimeDelta()));
+
+ InitializeDecoderSuccessfully();
+
+ // Pass the frame back to decoder
+ decoder_->set_consume_video_frame_callback(
+ NewCallback(decoder_.get(), &RTCVideoDecoder::ProduceVideoFrame));
+ decoder_->Seek(kZero, NewExpectedCallback());
+
+ decoder_->set_consume_video_frame_callback(
+ NewCallback(renderer_.get(), &MockVideoRenderer::ConsumeVideoFrame));
+ EXPECT_CALL(*renderer_.get(), ConsumeVideoFrame(_))
+ .Times(Limits::kMaxVideoFrames);
+
+ unsigned int video_frame_size = decoder_->width_*decoder_->height_*3/2;
+ unsigned char* video_frame = new unsigned char[video_frame_size];
+
+ for (size_t i = 0; i < Limits::kMaxVideoFrames; ++i) {
+ decoder_->DeliverFrame(video_frame, video_frame_size);
+ }
+ delete [] video_frame;
+
+ message_loop_.RunAllPending();
+ EXPECT_EQ(RTCVideoDecoder::kNormal, decoder_->state_);
+}
+
+TEST_F(RTCVideoDecoderTest, DoFrameSizeChange) {
+ InitializeDecoderSuccessfully();
+
+ int new_width = kWidth * 2;
+ int new_height = kHeight * 2;
+ int new_number_of_streams = 0;
+
+ EXPECT_CALL(host_,
+ SetVideoSize(new_width, new_height)).WillRepeatedly(Return());
+
+ decoder_->FrameSizeChange(new_width, new_height, new_number_of_streams);
+
+ const MediaFormat& media_format = decoder_->media_format();
+ int width = 0;
+ int height = 0;
+ EXPECT_TRUE(media_format.GetAsInteger(MediaFormat::kWidth, &width));
+ EXPECT_EQ(new_width, width);
+ EXPECT_TRUE(media_format.GetAsInteger(MediaFormat::kHeight, &height));
+ EXPECT_EQ(new_height, height);
+
+ message_loop_.RunAllPending();
+}
+
+
+} // namespace media
diff --git a/media/media.gyp b/media/media.gyp
index 311e24f..482e8e8 100644
--- a/media/media.gyp
+++ b/media/media.gyp
@@ -15,6 +15,7 @@
'yuv_convert',
'../base/base.gyp:base',
'../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
+ '../build/temp_gyp/googleurl.gyp:googleurl',
],
'include_dirs': [
'..',
@@ -161,6 +162,8 @@
'filters/null_audio_renderer.h',
'filters/null_video_renderer.cc',
'filters/null_video_renderer.h',
+ 'filters/rtc_video_decoder.cc',
+ 'filters/rtc_video_decoder.h',
'filters/video_renderer_base.cc',
'filters/video_renderer_base.h',
'video/ffmpeg_video_allocator.cc',
@@ -335,6 +338,7 @@
'../base/base.gyp:base',
'../base/base.gyp:base_i18n',
'../base/base.gyp:test_support_base',
+ '../build/temp_gyp/googleurl.gyp:googleurl',
'../testing/gmock.gyp:gmock',
'../testing/gtest.gyp:gtest',
'../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
@@ -381,6 +385,7 @@
'filters/ffmpeg_h264_bitstream_converter_unittest.cc',
'filters/ffmpeg_video_decoder_unittest.cc',
'filters/file_data_source_unittest.cc',
+ 'filters/rtc_video_decoder_unittest.cc',
'filters/video_renderer_base_unittest.cc',
'omx/mock_omx.cc',
'omx/mock_omx.h',