diff options
-rw-r--r-- | content/renderer/media/capture_video_decoder.cc | 50 | ||||
-rw-r--r-- | content/renderer/media/capture_video_decoder.h | 3 | ||||
-rw-r--r-- | content/renderer/media/capture_video_decoder_unittest.cc | 132 |
3 files changed, 131 insertions, 54 deletions
diff --git a/content/renderer/media/capture_video_decoder.cc b/content/renderer/media/capture_video_decoder.cc index 4d1f683..eef752e 100644 --- a/content/renderer/media/capture_video_decoder.cc +++ b/content/renderer/media/capture_video_decoder.cc @@ -119,14 +119,17 @@ void CaptureVideoDecoder::OnBufferReady( void CaptureVideoDecoder::OnDeviceInfoReceived( media::VideoCapture* capture, const media::VideoCaptureParams& device_info) { - NOTIMPLEMENTED(); + message_loop_proxy_->PostTask( + FROM_HERE, + base::Bind(&CaptureVideoDecoder::OnDeviceInfoReceivedOnDecoderThread, + this, capture, device_info)); } void CaptureVideoDecoder::InitializeOnDecoderThread( media::DemuxerStream* demuxer_stream, const base::Closure& filter_callback, const media::StatisticsCallback& stat_callback) { - VLOG(1) << "InitializeOnDecoderThread."; + DVLOG(1) << "InitializeOnDecoderThread"; DCHECK(message_loop_proxy_->BelongsToCurrentThread()); capture_engine_ = vc_manager_->AddDevice(video_stream_id_, this); @@ -143,20 +146,20 @@ void CaptureVideoDecoder::ReadOnDecoderThread(const ReadCB& callback) { } void CaptureVideoDecoder::PlayOnDecoderThread(const base::Closure& callback) { - VLOG(1) << "PlayOnDecoderThread."; + DVLOG(1) << "PlayOnDecoderThread"; DCHECK(message_loop_proxy_->BelongsToCurrentThread()); callback.Run(); } void CaptureVideoDecoder::PauseOnDecoderThread(const base::Closure& callback) { - VLOG(1) << "PauseOnDecoderThread."; + DVLOG(1) << "PauseOnDecoderThread"; DCHECK(message_loop_proxy_->BelongsToCurrentThread()); state_ = kPaused; media::VideoDecoder::Pause(callback); } void CaptureVideoDecoder::StopOnDecoderThread(const base::Closure& callback) { - VLOG(1) << "StopOnDecoderThread."; + DVLOG(1) << "StopOnDecoderThread"; DCHECK(message_loop_proxy_->BelongsToCurrentThread()); pending_stop_cb_ = callback; state_ = kStopped; @@ -165,7 +168,7 @@ void CaptureVideoDecoder::StopOnDecoderThread(const base::Closure& callback) { void CaptureVideoDecoder::SeekOnDecoderThread(base::TimeDelta time, const media::FilterStatusCB& cb) { - VLOG(1) << "SeekOnDecoderThread."; + DVLOG(1) << "SeekOnDecoderThread"; DCHECK(message_loop_proxy_->BelongsToCurrentThread()); cb.Run(media::PIPELINE_OK); @@ -175,13 +178,24 @@ void CaptureVideoDecoder::SeekOnDecoderThread(base::TimeDelta time, void CaptureVideoDecoder::OnStoppedOnDecoderThread( media::VideoCapture* capture) { - VLOG(1) << "OnStoppedOnDecoderThread."; + DVLOG(1) << "OnStoppedOnDecoderThread"; DCHECK(message_loop_proxy_->BelongsToCurrentThread()); if (!pending_stop_cb_.is_null()) media::ResetAndRunCB(&pending_stop_cb_); vc_manager_->RemoveDevice(video_stream_id_, this); } +void CaptureVideoDecoder::OnDeviceInfoReceivedOnDecoderThread( + media::VideoCapture* capture, + const media::VideoCaptureParams& device_info) { + DCHECK(message_loop_proxy_->BelongsToCurrentThread()); + if (device_info.width != natural_size_.width() || + device_info.height != natural_size_.height()) { + natural_size_.SetSize(device_info.width, device_info.height); + host()->SetNaturalVideoSize(natural_size_); + } +} + void CaptureVideoDecoder::OnBufferReadyOnDecoderThread( media::VideoCapture* capture, scoped_refptr<media::VideoCapture::VideoFrameBuffer> buf) { @@ -192,9 +206,11 @@ void CaptureVideoDecoder::OnBufferReadyOnDecoderThread( return; } - if (buf->width != capability_.width || buf->height != capability_.height) { - capability_.width = buf->width; - capability_.height = buf->height; + // TODO(wjia): should we always expect device to send device info before + // any buffer, and buffers should have dimension stated in device info? + // Or should we be flexible as in following code? + if (buf->width != natural_size_.width() || + buf->height != natural_size_.height()) { natural_size_.SetSize(buf->width, buf->height); host()->SetNaturalVideoSize(natural_size_); } @@ -211,13 +227,13 @@ void CaptureVideoDecoder::OnBufferReadyOnDecoderThread( uint8* buffer = buf->memory_pointer; - // Assume YV12 format. - // TODO(vrk): This DCHECK fails in content_unittests ... it should not! - // DCHECK(capability_.raw_type == media::VideoFrame::YV12); - int y_width = capability_.width; - int y_height = capability_.height; - int uv_width = capability_.width / 2; - int uv_height = capability_.height / 2; // YV12 format. + // Assume YV12 format. Note that camera gives YUV and media pipeline video + // renderer asks for YVU. The following code did the conversion. + DCHECK_EQ(capability_.raw_type, media::VideoFrame::I420); + int y_width = buf->width; + int y_height = buf->height; + int uv_width = buf->width / 2; + int uv_height = buf->height / 2; // YV12 format. CopyYPlane(buffer, y_width, y_height, video_frame); buffer += y_width * y_height; CopyUPlane(buffer, uv_width, uv_height, video_frame); diff --git a/content/renderer/media/capture_video_decoder.h b/content/renderer/media/capture_video_decoder.h index f3e79be..cfcdbad 100644 --- a/content/renderer/media/capture_video_decoder.h +++ b/content/renderer/media/capture_video_decoder.h @@ -86,6 +86,9 @@ class CONTENT_EXPORT CaptureVideoDecoder void OnBufferReadyOnDecoderThread( media::VideoCapture* capture, scoped_refptr<media::VideoCapture::VideoFrameBuffer> buf); + void OnDeviceInfoReceivedOnDecoderThread( + media::VideoCapture* capture, + const media::VideoCaptureParams& device_info); // Delivers the frame to |read_cb_| and resets the callback. void DeliverFrame(const scoped_refptr<media::VideoFrame>& video_frame); diff --git a/content/renderer/media/capture_video_decoder_unittest.cc b/content/renderer/media/capture_video_decoder_unittest.cc index 1b7cd11..ad785a7 100644 --- a/content/renderer/media/capture_video_decoder_unittest.cc +++ b/content/renderer/media/capture_video_decoder_unittest.cc @@ -12,6 +12,7 @@ #include "media/base/mock_filter_host.h" #include "media/base/mock_filters.h" #include "media/base/pipeline_status.h" +#include "media/video/capture/video_capture_types.h" #include "testing/gtest/include/gtest/gtest.h" using ::testing::_; @@ -19,6 +20,9 @@ using ::testing::AnyNumber; using ::testing::Return; using ::testing::StrictMock; +static const int kWidth = 176; +static const int kHeight = 144; +static const int kFPS = 30; static const media::VideoCaptureSessionId kVideoStreamId = 1; ACTION_P3(CreateDataBufferFromCapture, decoder, vc_impl, data_buffer_number) { @@ -85,9 +89,9 @@ class CaptureVideoDecoderTest : public ::testing::Test { base::MessageLoopProxy::current().get(); vc_manager_ = new MockVideoCaptureImplManager(); media::VideoCapture::VideoCaptureCapability capability; - capability.width = 176; - capability.height = 144; - capability.max_fps = 30; + capability.width = kWidth; + capability.height = kHeight; + capability.max_fps = kFPS; capability.expected_capture_delay = 0; capability.raw_type = media::VideoFrame::I420; capability.interlaced = false; @@ -100,6 +104,9 @@ class CaptureVideoDecoderTest : public ::testing::Test { read_cb_ = base::Bind(&CaptureVideoDecoderTest::FrameReady, base::Unretained(this)); + + vc_impl_.reset(new MockVideoCaptureImpl( + kVideoStreamId, message_loop_proxy_, new VideoCaptureMessageFilter())); } virtual ~CaptureVideoDecoderTest() { @@ -111,11 +118,60 @@ class CaptureVideoDecoderTest : public ::testing::Test { base::Unretained(&statistics_callback_object_)); } + void Initialize() { + EXPECT_CALL(*vc_manager_, AddDevice(_, _)) + .WillOnce(Return(vc_impl_.get())); + decoder_->Initialize(NULL, + media::NewExpectedClosure(), + NewStatisticsCallback()); + message_loop_->RunAllPending(); + } + + void Start() { + // Issue a read. + EXPECT_CALL(*this, FrameReady(_)); + decoder_->Read(read_cb_); + + // Issue a seek. + int buffer_count = 1; + EXPECT_CALL(*vc_impl_, StartCapture(capture_client(), _)) + .Times(1) + .WillOnce(CreateDataBufferFromCapture(capture_client(), + vc_impl_.get(), + buffer_count)); + EXPECT_CALL(*vc_impl_, FeedBuffer(_)) + .Times(buffer_count) + .WillRepeatedly(DeleteDataBuffer()); + decoder_->Seek(base::TimeDelta(), + media::NewExpectedStatusCB(media::PIPELINE_OK)); + message_loop_->RunAllPending(); + } + + void Play() { + decoder_->Play(media::NewExpectedClosure()); + message_loop_->RunAllPending(); + } + + void Stop() { + EXPECT_CALL(*vc_impl_, StopCapture(capture_client())) + .Times(1) + .WillOnce(CaptureStopped(capture_client(), vc_impl_.get())); + EXPECT_CALL(*vc_manager_, RemoveDevice(_, _)) + .WillOnce(Return()); + decoder_->Stop(media::NewExpectedClosure()); + message_loop_->RunAllPending(); + } + + media::VideoCapture::EventHandler* capture_client() { + return static_cast<media::VideoCapture::EventHandler*>(decoder_); + } + MOCK_METHOD1(FrameReady, void(scoped_refptr<media::VideoFrame>)); // Fixture members. scoped_refptr<CaptureVideoDecoder> decoder_; scoped_refptr<MockVideoCaptureImplManager> vc_manager_; + scoped_ptr<MockVideoCaptureImpl> vc_impl_; media::MockStatisticsCallback statistics_callback_object_; StrictMock<media::MockFilterHost> host_; scoped_ptr<MessageLoop> message_loop_; @@ -126,42 +182,44 @@ class CaptureVideoDecoderTest : public ::testing::Test { DISALLOW_COPY_AND_ASSIGN(CaptureVideoDecoderTest); }; +TEST_F(CaptureVideoDecoderTest, Initialize) { + // Test basic initialize and teardown. + Initialize(); + + // Natural size should be initialized to default capability. + EXPECT_EQ(kWidth, decoder_->natural_size().width()); + EXPECT_EQ(kHeight, decoder_->natural_size().height()); + + Stop(); +} + TEST_F(CaptureVideoDecoderTest, Play) { - int data_buffer_number = 1; - media::VideoCapture::EventHandler* capture_client = - static_cast<media::VideoCapture::EventHandler*>(decoder_); - scoped_ptr<MockVideoCaptureImpl> vc_impl( - new MockVideoCaptureImpl(kVideoStreamId, - message_loop_proxy_, - new VideoCaptureMessageFilter())); - - EXPECT_CALL(*vc_manager_, AddDevice(_, _)) - .WillOnce(Return(vc_impl.get())); - decoder_->Initialize(NULL, - media::NewExpectedClosure(), - NewStatisticsCallback()); - message_loop_->RunAllPending(); + // Test basic initialize, play, and teardown sequence. + Initialize(); + Start(); + Play(); + Stop(); +} - EXPECT_CALL(*this, FrameReady(_)); - decoder_->Read(read_cb_); - - EXPECT_CALL(*vc_impl, StartCapture(capture_client, _)) - .Times(1) - .WillOnce(CreateDataBufferFromCapture(capture_client, vc_impl.get(), - data_buffer_number)); - EXPECT_CALL(*vc_impl, FeedBuffer(_)) - .Times(data_buffer_number) - .WillRepeatedly(DeleteDataBuffer()); - decoder_->Seek(base::TimeDelta(), - media::NewExpectedStatusCB(media::PIPELINE_OK)); - decoder_->Play(media::NewExpectedClosure()); - message_loop_->RunAllPending(); +TEST_F(CaptureVideoDecoderTest, OnDeviceInfoReceived) { + // Test that natural size gets updated as device information is sent. + Initialize(); + Start(); + + gfx::Size expected_size(kWidth * 2, kHeight * 2); - EXPECT_CALL(*vc_impl, StopCapture(capture_client)) - .Times(1) - .WillOnce(CaptureStopped(capture_client, vc_impl.get())); - EXPECT_CALL(*vc_manager_, RemoveDevice(_, _)) - .WillOnce(Return()); - decoder_->Stop(media::NewExpectedClosure()); + media::VideoCaptureParams params; + params.width = expected_size.width(); + params.height = expected_size.height(); + params.frame_per_second = kFPS; + params.session_id = kVideoStreamId; + + EXPECT_CALL(host_, SetNaturalVideoSize(expected_size)); + decoder_->OnDeviceInfoReceived(vc_impl_.get(), params); message_loop_->RunAllPending(); + + EXPECT_EQ(expected_size.width(), decoder_->natural_size().width()); + EXPECT_EQ(expected_size.height(), decoder_->natural_size().height()); + + Stop(); } |