diff options
55 files changed, 749 insertions, 441 deletions
diff --git a/chrome/renderer/media/cast_receiver_session_delegate.cc b/chrome/renderer/media/cast_receiver_session_delegate.cc index 400dbf3..5fc5844 100644 --- a/chrome/renderer/media/cast_receiver_session_delegate.cc +++ b/chrome/renderer/media/cast_receiver_session_delegate.cc @@ -96,6 +96,6 @@ void CastReceiverSessionDelegate::OnDecodedVideoFrame( bool is_continous) { if (frame_callback_.is_null()) return; - frame_callback_.Run(video_frame, format_, playout_time); + frame_callback_.Run(video_frame, playout_time); cast_receiver_->RequestDecodedVideoFrame(on_video_decoded_cb_); } diff --git a/chrome/renderer/media/cast_rtp_stream.cc b/chrome/renderer/media/cast_rtp_stream.cc index a3f970d..6cc8e6f 100644 --- a/chrome/renderer/media/cast_rtp_stream.cc +++ b/chrome/renderer/media/cast_rtp_stream.cc @@ -272,7 +272,6 @@ class CastVideoSink : public base::SupportsWeakPtr<CastVideoSink>, const scoped_refptr<media::cast::VideoFrameInput> frame_input, // These parameters are passed for each frame. const scoped_refptr<media::VideoFrame>& frame, - const media::VideoCaptureFormat& format, const base::TimeTicks& estimated_capture_time) { base::TimeTicks timestamp; if (estimated_capture_time.is_null()) diff --git a/content/browser/media/capture/content_video_capture_device_core.cc b/content/browser/media/capture/content_video_capture_device_core.cc index b470bcd..d9956f1 100644 --- a/content/browser/media/capture/content_video_capture_device_core.cc +++ b/content/browser/media/capture/content_video_capture_device_core.cc @@ -25,6 +25,7 @@ #include "media/base/bind_to_current_loop.h" #include "media/base/video_capture_types.h" #include "media/base/video_frame.h" +#include "media/base/video_frame_metadata.h" #include "media/base/video_util.h" #include "ui/gfx/geometry/rect.h" @@ -71,6 +72,9 @@ bool ThreadSafeCaptureOracle::ObserveEventAndDecideCapture( base::TimeTicks event_time, scoped_refptr<media::VideoFrame>* storage, CaptureFrameCallback* callback) { + // Grab the current time before waiting to acquire the |lock_|. + const base::TimeTicks capture_begin_time = base::TimeTicks::Now(); + base::AutoLock guard(lock_); if (!client_) @@ -142,7 +146,8 @@ bool ThreadSafeCaptureOracle::ObserveEventAndDecideCapture( *callback = base::Bind(&ThreadSafeCaptureOracle::DidCaptureFrame, this, frame_number, - output_buffer); + output_buffer, + capture_begin_time); return true; } @@ -189,6 +194,7 @@ void ThreadSafeCaptureOracle::ReportError(const std::string& reason) { void ThreadSafeCaptureOracle::DidCaptureFrame( int frame_number, const scoped_refptr<media::VideoCaptureDevice::Client::Buffer>& buffer, + base::TimeTicks capture_begin_time, const scoped_refptr<media::VideoFrame>& frame, base::TimeTicks timestamp, bool success) { @@ -202,13 +208,14 @@ void ThreadSafeCaptureOracle::DidCaptureFrame( if (success) { if (oracle_.CompleteCapture(frame_number, ×tamp)) { - media::VideoCaptureFormat format = params_.requested_format; - // TODO(miu): Passing VideoCaptureFormat here introduces ambiguities. The - // following is a hack where frame_size takes on a different meaning than - // everywhere else (i.e., coded size, not visible size). Will fix in - // soon-upcoming code change. - format.frame_size = frame->coded_size(); - client_->OnIncomingCapturedVideoFrame(buffer, format, frame, timestamp); + // TODO(miu): Use the locked-in frame rate from AnimatedContentSampler. + frame->metadata()->SetDouble(media::VideoFrameMetadata::FRAME_RATE, + params_.requested_format.frame_rate); + frame->metadata()->SetTimeTicks( + media::VideoFrameMetadata::CAPTURE_BEGIN_TIME, capture_begin_time); + frame->metadata()->SetTimeTicks( + media::VideoFrameMetadata::CAPTURE_END_TIME, base::TimeTicks::Now()); + client_->OnIncomingCapturedVideoFrame(buffer, frame, timestamp); } } } diff --git a/content/browser/media/capture/content_video_capture_device_core.h b/content/browser/media/capture/content_video_capture_device_core.h index f56c109..1543028 100644 --- a/content/browser/media/capture/content_video_capture_device_core.h +++ b/content/browser/media/capture/content_video_capture_device_core.h @@ -83,6 +83,7 @@ class ThreadSafeCaptureOracle void DidCaptureFrame( int frame_number, const scoped_refptr<media::VideoCaptureDevice::Client::Buffer>& buffer, + base::TimeTicks capture_begin_time, const scoped_refptr<media::VideoFrame>& frame, base::TimeTicks timestamp, bool success); diff --git a/content/browser/media/capture/desktop_capture_device_aura_unittest.cc b/content/browser/media/capture/desktop_capture_device_aura_unittest.cc index fb06dad..9f259eb 100644 --- a/content/browser/media/capture/desktop_capture_device_aura_unittest.cc +++ b/content/browser/media/capture/desktop_capture_device_aura_unittest.cc @@ -44,9 +44,8 @@ class MockDeviceClient : public media::VideoCaptureDevice::Client { MOCK_METHOD2(ReserveOutputBuffer, scoped_refptr<Buffer>(media::VideoFrame::Format format, const gfx::Size& dimensions)); - MOCK_METHOD4(OnIncomingCapturedVideoFrame, + MOCK_METHOD3(OnIncomingCapturedVideoFrame, void(const scoped_refptr<Buffer>& buffer, - const media::VideoCaptureFormat& buffer_format, const scoped_refptr<media::VideoFrame>& frame, const base::TimeTicks& timestamp)); MOCK_METHOD1(OnError, void(const std::string& reason)); diff --git a/content/browser/media/capture/desktop_capture_device_unittest.cc b/content/browser/media/capture/desktop_capture_device_unittest.cc index 842b57e..49a4dfd 100644 --- a/content/browser/media/capture/desktop_capture_device_unittest.cc +++ b/content/browser/media/capture/desktop_capture_device_unittest.cc @@ -62,9 +62,8 @@ class MockDeviceClient : public media::VideoCaptureDevice::Client { MOCK_METHOD2(ReserveOutputBuffer, scoped_refptr<Buffer>(media::VideoFrame::Format format, const gfx::Size& dimensions)); - MOCK_METHOD4(OnIncomingCapturedVideoFrame, + MOCK_METHOD3(OnIncomingCapturedVideoFrame, void(const scoped_refptr<Buffer>& buffer, - const media::VideoCaptureFormat& buffer_format, const scoped_refptr<media::VideoFrame>& frame, const base::TimeTicks& timestamp)); MOCK_METHOD1(OnError, void(const std::string& reason)); diff --git a/content/browser/media/capture/web_contents_video_capture_device_unittest.cc b/content/browser/media/capture/web_contents_video_capture_device_unittest.cc index 59dffc4..84813da 100644 --- a/content/browser/media/capture/web_contents_video_capture_device_unittest.cc +++ b/content/browser/media/capture/web_contents_video_capture_device_unittest.cc @@ -343,15 +343,18 @@ class StubClient : public media::VideoCaptureDevice::Client { void OnIncomingCapturedVideoFrame( const scoped_refptr<Buffer>& buffer, - const media::VideoCaptureFormat& buffer_format, const scoped_refptr<media::VideoFrame>& frame, const base::TimeTicks& timestamp) override { - EXPECT_EQ(gfx::Size(kTestWidth, kTestHeight), buffer_format.frame_size); - EXPECT_EQ(media::PIXEL_FORMAT_I420, buffer_format.pixel_format); + EXPECT_EQ(gfx::Size(kTestWidth, kTestHeight), frame->visible_rect().size()); EXPECT_EQ(media::VideoFrame::I420, frame->format()); + double frame_rate = 0; + EXPECT_TRUE( + frame->metadata()->GetDouble(media::VideoFrameMetadata::FRAME_RATE, + &frame_rate)); + EXPECT_EQ(kTestFramesPerSecond, frame_rate); uint8 yuv[3]; for (int plane = 0; plane < 3; ++plane) - yuv[plane] = frame->data(plane)[0]; + yuv[plane] = frame->visible_data(plane)[0]; // TODO(nick): We just look at the first pixel presently, because if // the analysis is too slow, the backlog of frames will grow without bound // and trouble erupts. http://crbug.com/174519 diff --git a/content/browser/renderer_host/media/video_capture_controller.cc b/content/browser/renderer_host/media/video_capture_controller.cc index 7f4a1f6..3fbb2dc 100644 --- a/content/browser/renderer_host/media/video_capture_controller.cc +++ b/content/browser/renderer_host/media/video_capture_controller.cc @@ -175,7 +175,6 @@ class VideoCaptureController::VideoCaptureDeviceClient const gfx::Size& size) override; void OnIncomingCapturedVideoFrame( const scoped_refptr<Buffer>& buffer, - const VideoCaptureFormat& buffer_format, const scoped_refptr<media::VideoFrame>& frame, const base::TimeTicks& timestamp) override; void OnError(const std::string& reason) override; @@ -547,9 +546,9 @@ void VideoCaptureController::VideoCaptureDeviceClient::OnIncomingCapturedData( base::TimeDelta(), base::Closure()); DCHECK(frame.get()); + frame->metadata()->SetDouble(media::VideoFrameMetadata::FRAME_RATE, + frame_format.frame_rate); - VideoCaptureFormat format( - dimensions, frame_format.frame_rate, media::PIXEL_FORMAT_I420); BrowserThread::PostTask( BrowserThread::IO, FROM_HERE, @@ -557,7 +556,6 @@ void VideoCaptureController::VideoCaptureDeviceClient::OnIncomingCapturedData( &VideoCaptureController::DoIncomingCapturedVideoFrameOnIOThread, controller_, buffer, - format, frame, timestamp)); } @@ -565,7 +563,6 @@ void VideoCaptureController::VideoCaptureDeviceClient::OnIncomingCapturedData( void VideoCaptureController::VideoCaptureDeviceClient::OnIncomingCapturedVideoFrame( const scoped_refptr<Buffer>& buffer, - const VideoCaptureFormat& buffer_format, const scoped_refptr<media::VideoFrame>& frame, const base::TimeTicks& timestamp) { BrowserThread::PostTask( @@ -575,7 +572,6 @@ VideoCaptureController::VideoCaptureDeviceClient::OnIncomingCapturedVideoFrame( &VideoCaptureController::DoIncomingCapturedVideoFrameOnIOThread, controller_, buffer, - buffer_format, frame, timestamp)); } @@ -606,7 +602,6 @@ VideoCaptureController::~VideoCaptureController() { void VideoCaptureController::DoIncomingCapturedVideoFrameOnIOThread( const scoped_refptr<media::VideoCaptureDevice::Client::Buffer>& buffer, - const media::VideoCaptureFormat& buffer_format, const scoped_refptr<media::VideoFrame>& frame, const base::TimeTicks& timestamp) { DCHECK_CURRENTLY_ON(BrowserThread::IO); @@ -614,17 +609,33 @@ void VideoCaptureController::DoIncomingCapturedVideoFrameOnIOThread( int count = 0; if (state_ == VIDEO_CAPTURE_STATE_STARTED) { + if (!frame->metadata()->HasKey(media::VideoFrameMetadata::FRAME_RATE)) { + frame->metadata()->SetDouble(media::VideoFrameMetadata::FRAME_RATE, + video_capture_format_.frame_rate); + } + scoped_ptr<base::DictionaryValue> metadata(new base::DictionaryValue()); + frame->metadata()->MergeInternalValuesInto(metadata.get()); + for (const auto& client : controller_clients_) { if (client->session_closed || client->paused) continue; + scoped_ptr<base::DictionaryValue> copy_of_metadata; + if (client == controller_clients_.back()) + copy_of_metadata = metadata.Pass(); + else + copy_of_metadata.reset(metadata->DeepCopy()); + if (frame->format() == media::VideoFrame::NATIVE_TEXTURE) { + DCHECK(frame->coded_size() == frame->visible_rect().size()) + << "Textures are always supposed to be tightly packed."; client->event_handler->OnMailboxBufferReady(client->controller_id, buffer->id(), *frame->mailbox_holder(), - buffer_format, - timestamp); - } else { + frame->coded_size(), + timestamp, + copy_of_metadata.Pass()); + } else if (frame->format() == media::VideoFrame::I420) { bool is_new_buffer = client->known_buffers.insert(buffer->id()).second; if (is_new_buffer) { // On the first use of a buffer on a client, share the memory handle. @@ -636,8 +647,12 @@ void VideoCaptureController::DoIncomingCapturedVideoFrameOnIOThread( } client->event_handler->OnBufferReady( - client->controller_id, buffer->id(), buffer_format, - frame->visible_rect(), timestamp); + client->controller_id, buffer->id(), frame->coded_size(), + frame->visible_rect(), timestamp, copy_of_metadata.Pass()); + } else { + // VideoFrame format not supported. + NOTREACHED(); + break; } bool inserted = @@ -650,14 +665,17 @@ void VideoCaptureController::DoIncomingCapturedVideoFrameOnIOThread( if (!has_received_frames_) { UMA_HISTOGRAM_COUNTS("Media.VideoCapture.Width", - buffer_format.frame_size.width()); + frame->visible_rect().width()); UMA_HISTOGRAM_COUNTS("Media.VideoCapture.Height", - buffer_format.frame_size.height()); + frame->visible_rect().height()); UMA_HISTOGRAM_ASPECT_RATIO("Media.VideoCapture.AspectRatio", - buffer_format.frame_size.width(), - buffer_format.frame_size.height()); - UMA_HISTOGRAM_COUNTS("Media.VideoCapture.FrameRate", - buffer_format.frame_rate); + frame->visible_rect().width(), + frame->visible_rect().height()); + double frame_rate; + if (!frame->metadata()->GetDouble(media::VideoFrameMetadata::FRAME_RATE, + &frame_rate)) + frame_rate = video_capture_format_.frame_rate; + UMA_HISTOGRAM_COUNTS("Media.VideoCapture.FrameRate", frame_rate); has_received_frames_ = true; } diff --git a/content/browser/renderer_host/media/video_capture_controller.h b/content/browser/renderer_host/media/video_capture_controller.h index 43d3004..7b4a75d 100644 --- a/content/browser/renderer_host/media/video_capture_controller.h +++ b/content/browser/renderer_host/media/video_capture_controller.h @@ -133,7 +133,6 @@ class CONTENT_EXPORT VideoCaptureController { // Worker functions on IO thread. Called by the VideoCaptureDeviceClient. void DoIncomingCapturedVideoFrameOnIOThread( const scoped_refptr<media::VideoCaptureDevice::Client::Buffer>& buffer, - const media::VideoCaptureFormat& format, const scoped_refptr<media::VideoFrame>& frame, const base::TimeTicks& timestamp); void DoErrorOnIOThread(); diff --git a/content/browser/renderer_host/media/video_capture_controller_event_handler.h b/content/browser/renderer_host/media/video_capture_controller_event_handler.h index 2bddf69..45dbbea 100644 --- a/content/browser/renderer_host/media/video_capture_controller_event_handler.h +++ b/content/browser/renderer_host/media/video_capture_controller_event_handler.h @@ -5,12 +5,15 @@ #ifndef CONTENT_BROWSER_RENDERER_HOST_MEDIA_VIDEO_CAPTURE_CONTROLLER_EVENT_HANDLER_H_ #define CONTENT_BROWSER_RENDERER_HOST_MEDIA_VIDEO_CAPTURE_CONTROLLER_EVENT_HANDLER_H_ +#include "base/memory/scoped_ptr.h" #include "base/memory/shared_memory.h" #include "base/time/time.h" +#include "base/values.h" #include "content/common/content_export.h" namespace gfx { class Rect; +class Size; } // namespace gfx namespace gpu { @@ -54,16 +57,19 @@ class CONTENT_EXPORT VideoCaptureControllerEventHandler { // A buffer has been filled with I420 video. virtual void OnBufferReady(const VideoCaptureControllerID& id, int buffer_id, - const media::VideoCaptureFormat& format, + const gfx::Size& coded_size, const gfx::Rect& visible_rect, - base::TimeTicks timestamp) = 0; + base::TimeTicks timestamp, + scoped_ptr<base::DictionaryValue> metadata) = 0; // A texture mailbox buffer has been filled with data. - virtual void OnMailboxBufferReady(const VideoCaptureControllerID& id, - int buffer_id, - const gpu::MailboxHolder& mailbox_holder, - const media::VideoCaptureFormat& format, - base::TimeTicks timestamp) = 0; + virtual void OnMailboxBufferReady( + const VideoCaptureControllerID& id, + int buffer_id, + const gpu::MailboxHolder& mailbox_holder, + const gfx::Size& packed_frame_size, + base::TimeTicks timestamp, + scoped_ptr<base::DictionaryValue> metadata) = 0; // The capture session has ended and no more frames will be sent. virtual void OnEnded(const VideoCaptureControllerID& id) = 0; diff --git a/content/browser/renderer_host/media/video_capture_controller_unittest.cc b/content/browser/renderer_host/media/video_capture_controller_unittest.cc index abea827..69f83ae 100644 --- a/content/browser/renderer_host/media/video_capture_controller_unittest.cc +++ b/content/browser/renderer_host/media/video_capture_controller_unittest.cc @@ -63,11 +63,13 @@ class MockVideoCaptureControllerEventHandler int buffer_id) override { DoBufferDestroyed(id); } - virtual void OnBufferReady(const VideoCaptureControllerID& id, - int buffer_id, - const media::VideoCaptureFormat& format, - const gfx::Rect& visible_rect, - base::TimeTicks timestamp) override { + virtual void OnBufferReady( + const VideoCaptureControllerID& id, + int buffer_id, + const gfx::Size& coded_size, + const gfx::Rect& visible_rect, + base::TimeTicks timestamp, + scoped_ptr<base::DictionaryValue> metadata) override { DoBufferReady(id); base::MessageLoop::current()->PostTask( FROM_HERE, @@ -78,11 +80,13 @@ class MockVideoCaptureControllerEventHandler buffer_id, 0)); } - virtual void OnMailboxBufferReady(const VideoCaptureControllerID& id, - int buffer_id, - const gpu::MailboxHolder& mailbox_holder, - const media::VideoCaptureFormat& format, - base::TimeTicks timestamp) override { + virtual void OnMailboxBufferReady( + const VideoCaptureControllerID& id, + int buffer_id, + const gpu::MailboxHolder& mailbox_holder, + const gfx::Size& packed_frame_size, + base::TimeTicks timestamp, + scoped_ptr<base::DictionaryValue> metadata) override { DoMailboxBufferReady(id); base::MessageLoop::current()->PostTask( FROM_HERE, @@ -339,9 +343,6 @@ TEST_F(VideoCaptureControllerTest, NormalCaptureMultipleClients) { } device_->OnIncomingCapturedVideoFrame( buffer, - media::VideoCaptureFormat(capture_resolution, - device_format.frame_rate, - media::PIXEL_FORMAT_I420), WrapI420Buffer(buffer, capture_resolution), base::TimeTicks()); buffer = NULL; @@ -359,9 +360,6 @@ TEST_F(VideoCaptureControllerTest, NormalCaptureMultipleClients) { memset(buffer->data(), buffer_no++, buffer->size()); device_->OnIncomingCapturedVideoFrame( buffer, - media::VideoCaptureFormat(capture_resolution, - device_format.frame_rate, - media::PIXEL_FORMAT_I420), WrapI420Buffer(buffer, capture_resolution), base::TimeTicks()); buffer = NULL; @@ -390,9 +388,6 @@ TEST_F(VideoCaptureControllerTest, NormalCaptureMultipleClients) { memset(buffer->data(), buffer_no++, buffer->size()); device_->OnIncomingCapturedVideoFrame( buffer, - media::VideoCaptureFormat(capture_resolution, - device_format.frame_rate, - media::PIXEL_FORMAT_I420), WrapI420Buffer(buffer, capture_resolution), base::TimeTicks()); buffer = NULL; @@ -430,9 +425,6 @@ TEST_F(VideoCaptureControllerTest, NormalCaptureMultipleClients) { memset(buffer->data(), buffer_no++, buffer->size()); device_->OnIncomingCapturedVideoFrame( buffer, - media::VideoCaptureFormat(capture_resolution, - device_format.frame_rate, - media::PIXEL_FORMAT_I420), WrapI420Buffer(buffer, capture_resolution), base::TimeTicks()); buffer = NULL; @@ -448,9 +440,6 @@ TEST_F(VideoCaptureControllerTest, NormalCaptureMultipleClients) { memset(buffer->data(), buffer_no++, buffer->size()); device_->OnIncomingCapturedVideoFrame( buffer, - media::VideoCaptureFormat(capture_resolution, - device_format.frame_rate, - media::PIXEL_FORMAT_I420), WrapI420Buffer(buffer, capture_resolution), base::TimeTicks()); buffer = NULL; @@ -481,9 +470,6 @@ TEST_F(VideoCaptureControllerTest, NormalCaptureMultipleClients) { ASSERT_TRUE(buffer.get()); device_->OnIncomingCapturedVideoFrame( buffer, - media::VideoCaptureFormat(capture_resolution, - device_format.frame_rate, - media::PIXEL_FORMAT_I420), WrapI420Buffer(buffer, capture_resolution), base::TimeTicks()); buffer = NULL; @@ -499,9 +485,6 @@ TEST_F(VideoCaptureControllerTest, NormalCaptureMultipleClients) { #endif device_->OnIncomingCapturedVideoFrame( buffer, - media::VideoCaptureFormat(capture_resolution, - device_format.frame_rate, - media::PIXEL_FORMAT_TEXTURE), WrapMailboxBuffer(make_scoped_ptr(new gpu::MailboxHolder( gpu::Mailbox(), 0, mailbox_syncpoints[i])), base::Bind(&CacheSyncPoint, &release_syncpoints[i]), @@ -568,8 +551,6 @@ TEST_F(VideoCaptureControllerTest, ErrorBeforeDeviceCreation) { device_->OnIncomingCapturedVideoFrame( buffer, - media::VideoCaptureFormat( - capture_resolution, 30, media::PIXEL_FORMAT_I420), WrapI420Buffer(buffer, capture_resolution), base::TimeTicks()); buffer = NULL; @@ -608,8 +589,6 @@ TEST_F(VideoCaptureControllerTest, ErrorAfterDeviceCreation) { device_->OnError("Test error"); device_->OnIncomingCapturedVideoFrame( buffer, - media::VideoCaptureFormat( - dims, device_format.frame_rate, media::PIXEL_FORMAT_I420), WrapI420Buffer(buffer, dims), base::TimeTicks()); buffer = NULL; diff --git a/content/browser/renderer_host/media/video_capture_host.cc b/content/browser/renderer_host/media/video_capture_host.cc index cc340e3..12bb227 100644 --- a/content/browser/renderer_host/media/video_capture_host.cc +++ b/content/browser/renderer_host/media/video_capture_host.cc @@ -5,6 +5,7 @@ #include "content/browser/renderer_host/media/video_capture_host.h" #include "base/bind.h" +#include "base/bind_helpers.h" #include "base/memory/scoped_ptr.h" #include "content/browser/browser_main_loop.h" #include "content/browser/renderer_host/media/media_stream_manager.h" @@ -76,9 +77,10 @@ void VideoCaptureHost::OnBufferDestroyed( void VideoCaptureHost::OnBufferReady( const VideoCaptureControllerID& controller_id, int buffer_id, - const media::VideoCaptureFormat& frame_format, + const gfx::Size& coded_size, const gfx::Rect& visible_rect, - base::TimeTicks timestamp) { + base::TimeTicks timestamp, + scoped_ptr<base::DictionaryValue> metadata) { BrowserThread::PostTask( BrowserThread::IO, FROM_HERE, @@ -86,17 +88,19 @@ void VideoCaptureHost::OnBufferReady( this, controller_id, buffer_id, - frame_format, + coded_size, visible_rect, - timestamp)); + timestamp, + base::Passed(&metadata))); } void VideoCaptureHost::OnMailboxBufferReady( const VideoCaptureControllerID& controller_id, int buffer_id, const gpu::MailboxHolder& mailbox_holder, - const media::VideoCaptureFormat& frame_format, - base::TimeTicks timestamp) { + const gfx::Size& packed_frame_size, + base::TimeTicks timestamp, + scoped_ptr<base::DictionaryValue> metadata) { BrowserThread::PostTask( BrowserThread::IO, FROM_HERE, @@ -105,8 +109,9 @@ void VideoCaptureHost::OnMailboxBufferReady( controller_id, buffer_id, mailbox_holder, - frame_format, - timestamp)); + packed_frame_size, + timestamp, + base::Passed(&metadata))); } void VideoCaptureHost::OnEnded(const VideoCaptureControllerID& controller_id) { @@ -144,31 +149,47 @@ void VideoCaptureHost::DoSendFreeBufferOnIOThread( void VideoCaptureHost::DoSendFilledBufferOnIOThread( const VideoCaptureControllerID& controller_id, int buffer_id, - const media::VideoCaptureFormat& format, + const gfx::Size& coded_size, const gfx::Rect& visible_rect, - base::TimeTicks timestamp) { + base::TimeTicks timestamp, + scoped_ptr<base::DictionaryValue> metadata) { DCHECK_CURRENTLY_ON(BrowserThread::IO); if (entries_.find(controller_id) == entries_.end()) return; - Send(new VideoCaptureMsg_BufferReady( - controller_id.device_id, buffer_id, format, visible_rect, timestamp)); + VideoCaptureMsg_BufferReady_Params params; + params.device_id = controller_id.device_id; + params.buffer_id = buffer_id; + params.coded_size = coded_size; + params.visible_rect = visible_rect; + params.timestamp = timestamp; + if (metadata) + params.metadata.Swap(metadata.get()); + Send(new VideoCaptureMsg_BufferReady(params)); } void VideoCaptureHost::DoSendFilledMailboxBufferOnIOThread( const VideoCaptureControllerID& controller_id, int buffer_id, const gpu::MailboxHolder& mailbox_holder, - const media::VideoCaptureFormat& format, - base::TimeTicks timestamp) { + const gfx::Size& packed_frame_size, + base::TimeTicks timestamp, + scoped_ptr<base::DictionaryValue> metadata) { DCHECK_CURRENTLY_ON(BrowserThread::IO); if (entries_.find(controller_id) == entries_.end()) return; - Send(new VideoCaptureMsg_MailboxBufferReady( - controller_id.device_id, buffer_id, mailbox_holder, format, timestamp)); + VideoCaptureMsg_MailboxBufferReady_Params params; + params.device_id = controller_id.device_id; + params.buffer_id = buffer_id; + params.mailbox_holder = mailbox_holder; + params.packed_frame_size = packed_frame_size; + params.timestamp = timestamp; + if (metadata) + params.metadata.Swap(metadata.get()); + Send(new VideoCaptureMsg_MailboxBufferReady(params)); } void VideoCaptureHost::DoHandleErrorOnIOThread( diff --git a/content/browser/renderer_host/media/video_capture_host.h b/content/browser/renderer_host/media/video_capture_host.h index 1fd9813..2434abc 100644 --- a/content/browser/renderer_host/media/video_capture_host.h +++ b/content/browser/renderer_host/media/video_capture_host.h @@ -85,14 +85,17 @@ class CONTENT_EXPORT VideoCaptureHost int buffer_id) override; void OnBufferReady(const VideoCaptureControllerID& id, int buffer_id, - const media::VideoCaptureFormat& format, + const gfx::Size& coded_size, const gfx::Rect& visible_rect, - base::TimeTicks timestamp) override; - void OnMailboxBufferReady(const VideoCaptureControllerID& id, - int buffer_id, - const gpu::MailboxHolder& mailbox_holder, - const media::VideoCaptureFormat& format, - base::TimeTicks timestamp) override; + base::TimeTicks timestamp, + scoped_ptr<base::DictionaryValue> metadata) override; + void OnMailboxBufferReady( + const VideoCaptureControllerID& id, + int buffer_id, + const gpu::MailboxHolder& mailbox_holder, + const gfx::Size& packed_frame_size, + base::TimeTicks timestamp, + scoped_ptr<base::DictionaryValue> metadata) override; void OnEnded(const VideoCaptureControllerID& id) override; private: @@ -159,17 +162,19 @@ class CONTENT_EXPORT VideoCaptureHost void DoSendFilledBufferOnIOThread( const VideoCaptureControllerID& controller_id, int buffer_id, - const media::VideoCaptureFormat& format, + const gfx::Size& coded_size, const gfx::Rect& visible_rect, - base::TimeTicks timestamp); + base::TimeTicks timestamp, + scoped_ptr<base::DictionaryValue> metadata); // Sends a filled texture mailbox buffer to the VideoCaptureMessageFilter. void DoSendFilledMailboxBufferOnIOThread( const VideoCaptureControllerID& controller_id, int buffer_id, const gpu::MailboxHolder& mailbox_holder, - const media::VideoCaptureFormat& format, - base::TimeTicks timestamp); + const gfx::Size& packed_frame_size, + base::TimeTicks timestamp, + scoped_ptr<base::DictionaryValue> metadata); // Handles error coming from VideoCaptureDevice. void DoHandleErrorOnIOThread(const VideoCaptureControllerID& controller_id); diff --git a/content/browser/renderer_host/media/video_capture_host_unittest.cc b/content/browser/renderer_host/media/video_capture_host_unittest.cc index 57a47c8..aace519 100644 --- a/content/browser/renderer_host/media/video_capture_host_unittest.cc +++ b/content/browser/renderer_host/media/video_capture_host_unittest.cc @@ -64,23 +64,27 @@ static const int kDeviceId = 555; // verifying the output. class DumpVideo { public: - DumpVideo() : expected_size_(0) {} - void StartDump(int width, int height) { + DumpVideo() {} + const gfx::Size& coded_size() const { return coded_size_; } + void StartDump(const gfx::Size& coded_size) { base::FilePath file_name = base::FilePath(base::StringPrintf( - FILE_PATH_LITERAL("dump_w%d_h%d.yuv"), width, height)); + FILE_PATH_LITERAL("dump_w%d_h%d.yuv"), + coded_size.width(), + coded_size.height())); file_.reset(base::OpenFile(file_name, "wb")); - expected_size_ = media::VideoFrame::AllocationSize( - media::VideoFrame::I420, gfx::Size(width, height)); + coded_size_ = coded_size; } void NewVideoFrame(const void* buffer) { if (file_.get() != NULL) { - ASSERT_EQ(1U, fwrite(buffer, expected_size_, 1, file_.get())); + const int size = media::VideoFrame::AllocationSize( + media::VideoFrame::I420, coded_size_); + ASSERT_EQ(1U, fwrite(buffer, size, 1, file_.get())); } } private: base::ScopedFILE file_; - int expected_size_; + gfx::Size coded_size_; }; class MockMediaStreamRequester : public MediaStreamRequester { @@ -130,18 +134,20 @@ class MockVideoCaptureHost : public VideoCaptureHost { int buffer_id)); MOCK_METHOD2(OnBufferFreed, void(int device_id, int buffer_id)); - MOCK_METHOD5(OnBufferFilled, + MOCK_METHOD6(OnBufferFilled, void(int device_id, int buffer_id, - const media::VideoCaptureFormat& format, + const gfx::Size& coded_size, const gfx::Rect& visible_rect, - base::TimeTicks timestamp)); - MOCK_METHOD5(OnMailboxBufferFilled, + base::TimeTicks timestamp, + const base::DictionaryValue& metadata)); + MOCK_METHOD6(OnMailboxBufferFilled, void(int device_id, int buffer_id, const gpu::MailboxHolder& mailbox_holder, - const media::VideoCaptureFormat& format, - base::TimeTicks timestamp)); + const gfx::Size& packed_frame_size, + base::TimeTicks timestamp, + const base::DictionaryValue& metadata)); MOCK_METHOD2(OnStateChanged, void(int device_id, VideoCaptureState state)); // Use class DumpVideo to write I420 video to file. @@ -224,41 +230,34 @@ class MockVideoCaptureHost : public VideoCaptureHost { filled_dib_.erase(it); } - void OnBufferFilledDispatch(int device_id, - int buffer_id, - const media::VideoCaptureFormat& frame_format, - const gfx::Rect& visible_rect, - base::TimeTicks timestamp) { - base::SharedMemory* dib = filled_dib_[buffer_id]; + void OnBufferFilledDispatch( + const VideoCaptureMsg_BufferReady_Params& params) { + base::SharedMemory* dib = filled_dib_[params.buffer_id]; ASSERT_TRUE(dib != NULL); if (dump_video_) { - if (!format_.IsValid()) { - dumper_.StartDump(frame_format.frame_size.width(), - frame_format.frame_size.height()); - format_ = frame_format; - } - ASSERT_EQ(format_.frame_size.width(), frame_format.frame_size.width()) - << "Dump format does not handle variable resolution."; - ASSERT_EQ(format_.frame_size.height(), frame_format.frame_size.height()) + if (dumper_.coded_size().IsEmpty()) + dumper_.StartDump(params.coded_size); + ASSERT_TRUE(dumper_.coded_size() == params.coded_size) << "Dump format does not handle variable resolution."; dumper_.NewVideoFrame(dib->memory()); } - OnBufferFilled(device_id, buffer_id, frame_format, visible_rect, timestamp); + OnBufferFilled(params.device_id, params.buffer_id, params.coded_size, + params.visible_rect, params.timestamp, params.metadata); if (return_buffers_) { - VideoCaptureHost::OnReceiveEmptyBuffer(device_id, buffer_id, 0); + VideoCaptureHost::OnReceiveEmptyBuffer( + params.device_id, params.buffer_id, 0); } } - void OnMailboxBufferFilledDispatch(int device_id, - int buffer_id, - const gpu::MailboxHolder& mailbox_holder, - const media::VideoCaptureFormat& format, - base::TimeTicks timestamp) { - OnMailboxBufferFilled( - device_id, buffer_id, mailbox_holder, format, timestamp); + void OnMailboxBufferFilledDispatch( + const VideoCaptureMsg_MailboxBufferReady_Params& params) { + OnMailboxBufferFilled(params.device_id, params.buffer_id, + params.mailbox_holder, params.packed_frame_size, + params.timestamp, params.metadata); if (return_buffers_) { - VideoCaptureHost::OnReceiveEmptyBuffer(device_id, buffer_id, 0); + VideoCaptureHost::OnReceiveEmptyBuffer( + params.device_id, params.buffer_id, 0); } } @@ -407,7 +406,7 @@ class VideoCaptureHostTest : public testing::Test { .WillRepeatedly(Return()); base::RunLoop run_loop; - EXPECT_CALL(*host_.get(), OnBufferFilled(kDeviceId, _, _, _, _)) + EXPECT_CALL(*host_.get(), OnBufferFilled(kDeviceId, _, _, _, _, _)) .Times(AnyNumber()) .WillOnce(ExitMessageLoop(message_loop_, run_loop.QuitClosure())); @@ -441,7 +440,7 @@ class VideoCaptureHostTest : public testing::Test { .Times(AnyNumber()).WillRepeatedly(Return()); base::RunLoop run_loop; - EXPECT_CALL(*host_, OnBufferFilled(kDeviceId, _, _, _, _)) + EXPECT_CALL(*host_, OnBufferFilled(kDeviceId, _, _, _, _, _)) .Times(AnyNumber()) .WillOnce(ExitMessageLoop(message_loop_, run_loop.QuitClosure())); @@ -473,7 +472,7 @@ class VideoCaptureHostTest : public testing::Test { void NotifyPacketReady() { base::RunLoop run_loop; - EXPECT_CALL(*host_.get(), OnBufferFilled(kDeviceId, _, _, _, _)) + EXPECT_CALL(*host_.get(), OnBufferFilled(kDeviceId, _, _, _, _, _)) .Times(AnyNumber()) .WillOnce(ExitMessageLoop(message_loop_, run_loop.QuitClosure())) .RetiresOnSaturation(); diff --git a/content/browser/renderer_host/media/video_capture_manager_unittest.cc b/content/browser/renderer_host/media/video_capture_manager_unittest.cc index f41984b..126186c 100644 --- a/content/browser/renderer_host/media/video_capture_manager_unittest.cc +++ b/content/browser/renderer_host/media/video_capture_manager_unittest.cc @@ -51,16 +51,20 @@ class MockFrameObserver : public VideoCaptureControllerEventHandler { int length, int buffer_id) override {} virtual void OnBufferDestroyed(const VideoCaptureControllerID& id, int buffer_id) override {} - virtual void OnBufferReady(const VideoCaptureControllerID& id, - int buffer_id, - const media::VideoCaptureFormat& format, - const gfx::Rect& visible_rect, - base::TimeTicks timestamp) override {} - virtual void OnMailboxBufferReady(const VideoCaptureControllerID& id, - int buffer_id, - const gpu::MailboxHolder& mailbox_holder, - const media::VideoCaptureFormat& format, - base::TimeTicks timestamp) override {} + virtual void OnBufferReady( + const VideoCaptureControllerID& id, + int buffer_id, + const gfx::Size& coded_size, + const gfx::Rect& visible_rect, + base::TimeTicks timestamp, + scoped_ptr<base::DictionaryValue> metadata) override {} + virtual void OnMailboxBufferReady( + const VideoCaptureControllerID& id, + int buffer_id, + const gpu::MailboxHolder& mailbox_holder, + const gfx::Size& packed_frame_size, + base::TimeTicks timestamp, + scoped_ptr<base::DictionaryValue> metadata) override {} virtual void OnEnded(const VideoCaptureControllerID& id) override {} void OnGotControllerCallback(VideoCaptureControllerID) {} diff --git a/content/common/media/video_capture_messages.h b/content/common/media/video_capture_messages.h index bebe487..77f95bf 100644 --- a/content/common/media/video_capture_messages.h +++ b/content/common/media/video_capture_messages.h @@ -25,6 +25,24 @@ IPC_STRUCT_TRAITS_BEGIN(media::VideoCaptureParams) IPC_STRUCT_TRAITS_MEMBER(resolution_change_policy) IPC_STRUCT_TRAITS_END() +IPC_STRUCT_BEGIN(VideoCaptureMsg_BufferReady_Params) + IPC_STRUCT_MEMBER(int, device_id) + IPC_STRUCT_MEMBER(int, buffer_id) + IPC_STRUCT_MEMBER(gfx::Size, coded_size) + IPC_STRUCT_MEMBER(gfx::Rect, visible_rect) + IPC_STRUCT_MEMBER(base::TimeTicks, timestamp) + IPC_STRUCT_MEMBER(base::DictionaryValue, metadata) +IPC_STRUCT_END() + +IPC_STRUCT_BEGIN(VideoCaptureMsg_MailboxBufferReady_Params) + IPC_STRUCT_MEMBER(int, device_id) + IPC_STRUCT_MEMBER(int, buffer_id) + IPC_STRUCT_MEMBER(gpu::MailboxHolder, mailbox_holder) + IPC_STRUCT_MEMBER(gfx::Size, packed_frame_size) + IPC_STRUCT_MEMBER(base::TimeTicks, timestamp) + IPC_STRUCT_MEMBER(base::DictionaryValue, metadata) +IPC_STRUCT_END() + // TODO(nick): device_id in these messages is basically just a route_id. We // should shift to IPC_MESSAGE_ROUTED and use MessageRouter in the filter impls. @@ -48,21 +66,13 @@ IPC_MESSAGE_CONTROL2(VideoCaptureMsg_FreeBuffer, int /* buffer_id */) // Tell the renderer process that a buffer is available from video capture. -IPC_MESSAGE_CONTROL5(VideoCaptureMsg_BufferReady, - int /* device id */, - int /* buffer_id */, - media::VideoCaptureFormat /* format */, - gfx::Rect /* visible_rect */, - base::TimeTicks /* timestamp */) +IPC_MESSAGE_CONTROL1(VideoCaptureMsg_BufferReady, + VideoCaptureMsg_BufferReady_Params) // Tell the renderer process that a texture mailbox buffer is available from // video capture. -IPC_MESSAGE_CONTROL5(VideoCaptureMsg_MailboxBufferReady, - int /* device_id */, - int /* buffer_id */, - gpu::MailboxHolder /* mailbox_holder */, - media::VideoCaptureFormat /* format */, - base::TimeTicks /* timestamp */) +IPC_MESSAGE_CONTROL1(VideoCaptureMsg_MailboxBufferReady, + VideoCaptureMsg_MailboxBufferReady_Params) // Notify the renderer about a device's supported formats; this is a response // to a VideoCaptureHostMsg_GetDeviceSupportedFormats request. diff --git a/content/renderer/media/media_stream_video_capture_source_unittest.cc b/content/renderer/media/media_stream_video_capture_source_unittest.cc index b6f4833..f01f42f 100644 --- a/content/renderer/media/media_stream_video_capture_source_unittest.cc +++ b/content/renderer/media/media_stream_video_capture_source_unittest.cc @@ -172,24 +172,30 @@ TEST_F(MediaStreamVideoCapturerSourceTest, Ended) { class FakeMediaStreamVideoSink : public MediaStreamVideoSink { public: FakeMediaStreamVideoSink(base::TimeTicks* capture_time, + media::VideoFrameMetadata* metadata, base::Closure got_frame_cb) : capture_time_(capture_time), + metadata_(metadata), got_frame_cb_(got_frame_cb) { } void OnVideoFrame(const scoped_refptr<media::VideoFrame>& frame, - const media::VideoCaptureFormat& format, const base::TimeTicks& capture_time) { *capture_time_ = capture_time; + metadata_->Clear(); + base::DictionaryValue tmp; + frame->metadata()->MergeInternalValuesInto(&tmp); + metadata_->MergeInternalValuesFrom(tmp); base::ResetAndReturn(&got_frame_cb_).Run(); } private: - base::TimeTicks* capture_time_; + base::TimeTicks* const capture_time_; + media::VideoFrameMetadata* const metadata_; base::Closure got_frame_cb_; }; -TEST_F(MediaStreamVideoCapturerSourceTest, CaptureTime) { +TEST_F(MediaStreamVideoCapturerSourceTest, CaptureTimeAndMetadataPlumbing) { StreamDeviceInfo device_info; device_info.device.type = MEDIA_DESKTOP_VIDEO_CAPTURE; InitWithDeviceInfo(device_info); @@ -213,23 +219,28 @@ TEST_F(MediaStreamVideoCapturerSourceTest, CaptureTime) { base::TimeTicks reference_capture_time = base::TimeTicks::FromInternalValue(60013); base::TimeTicks capture_time; + media::VideoFrameMetadata metadata; FakeMediaStreamVideoSink fake_sink( &capture_time, + &metadata, media::BindToCurrentLoop(run_loop.QuitClosure())); FakeMediaStreamVideoSink::AddToVideoTrack( &fake_sink, base::Bind(&FakeMediaStreamVideoSink::OnVideoFrame, base::Unretained(&fake_sink)), track); + const scoped_refptr<media::VideoFrame> frame = + media::VideoFrame::CreateBlackFrame(gfx::Size(2, 2)); + frame->metadata()->SetDouble(media::VideoFrameMetadata::FRAME_RATE, 30.0); child_process_->io_message_loop()->PostTask( - FROM_HERE, - base::Bind(deliver_frame_cb, - media::VideoFrame::CreateBlackFrame(gfx::Size(2, 2)), - media::VideoCaptureFormat(), - reference_capture_time)); + FROM_HERE, base::Bind(deliver_frame_cb, frame, reference_capture_time)); run_loop.Run(); FakeMediaStreamVideoSink::RemoveFromVideoTrack(&fake_sink, track); EXPECT_EQ(reference_capture_time, capture_time); + double metadata_value; + EXPECT_TRUE(metadata.GetDouble(media::VideoFrameMetadata::FRAME_RATE, + &metadata_value)); + EXPECT_EQ(30.0, metadata_value); } } // namespace content diff --git a/content/renderer/media/media_stream_video_track.cc b/content/renderer/media/media_stream_video_track.cc index 04d8817..8a049a6 100644 --- a/content/renderer/media/media_stream_video_track.cc +++ b/content/renderer/media/media_stream_video_track.cc @@ -45,7 +45,6 @@ class MediaStreamVideoTrack::FrameDeliverer // Triggers all registered callbacks with |frame|, |format| and // |estimated_capture_time| as parameters. Must be called on the IO-thread. void DeliverFrameOnIO(const scoped_refptr<media::VideoFrame>& frame, - const media::VideoCaptureFormat& format, const base::TimeTicks& estimated_capture_time); private: @@ -145,16 +144,12 @@ void MediaStreamVideoTrack::FrameDeliverer::SetEnabledOnIO(bool enabled) { void MediaStreamVideoTrack::FrameDeliverer::DeliverFrameOnIO( const scoped_refptr<media::VideoFrame>& frame, - const media::VideoCaptureFormat& format, const base::TimeTicks& estimated_capture_time) { DCHECK(io_message_loop_->BelongsToCurrentThread()); const scoped_refptr<media::VideoFrame>& video_frame = enabled_ ? frame : GetBlackFrame(frame); - - for (std::vector<VideoIdCallbackPair>::iterator it = callbacks_.begin(); - it != callbacks_.end(); ++it) { - it->second.Run(video_frame, format, estimated_capture_time); - } + for (const auto& entry : callbacks_) + entry.second.Run(video_frame, estimated_capture_time); } const scoped_refptr<media::VideoFrame>& diff --git a/content/renderer/media/media_stream_video_track_unittest.cc b/content/renderer/media/media_stream_video_track_unittest.cc index eed4fa29..2fe8503 100644 --- a/content/renderer/media/media_stream_video_track_unittest.cc +++ b/content/renderer/media/media_stream_video_track_unittest.cc @@ -139,7 +139,6 @@ class CheckThreadHelper { void CheckThreadVideoFrameReceiver( CheckThreadHelper* helper, const scoped_refptr<media::VideoFrame>& frame, - const media::VideoCaptureFormat& format, const base::TimeTicks& estimated_capture_time) { // Do nothing. } diff --git a/content/renderer/media/mock_media_stream_video_sink.cc b/content/renderer/media/mock_media_stream_video_sink.cc index 7a054ab..2ded48a 100644 --- a/content/renderer/media/mock_media_stream_video_sink.cc +++ b/content/renderer/media/mock_media_stream_video_sink.cc @@ -29,7 +29,6 @@ MockMediaStreamVideoSink::GetDeliverFrameCB() { void MockMediaStreamVideoSink::DeliverVideoFrame( const scoped_refptr<media::VideoFrame>& frame, - const media::VideoCaptureFormat& format, const base::TimeTicks& estimated_capture_time) { last_frame_ = frame; ++number_of_frames_; diff --git a/content/renderer/media/mock_media_stream_video_sink.h b/content/renderer/media/mock_media_stream_video_sink.h index 2f59fcf..e82020b 100644 --- a/content/renderer/media/mock_media_stream_video_sink.h +++ b/content/renderer/media/mock_media_stream_video_sink.h @@ -38,10 +38,8 @@ class MockMediaStreamVideoSink : public MediaStreamVideoSink { blink::WebMediaStreamSource::ReadyState state() const { return state_; } private: - void DeliverVideoFrame( - const scoped_refptr<media::VideoFrame>& frame, - const media::VideoCaptureFormat& format, - const base::TimeTicks& estimated_capture_time); + void DeliverVideoFrame(const scoped_refptr<media::VideoFrame>& frame, + const base::TimeTicks& estimated_capture_time); int number_of_frames_; bool enabled_; diff --git a/content/renderer/media/mock_media_stream_video_source.cc b/content/renderer/media/mock_media_stream_video_source.cc index 19f7b6f..b99e4c3 100644 --- a/content/renderer/media/mock_media_stream_video_source.cc +++ b/content/renderer/media/mock_media_stream_video_source.cc @@ -78,17 +78,7 @@ void MockMediaStreamVideoSource::DeliverVideoFrame( DCHECK(!frame_callback_.is_null()); io_message_loop()->PostTask( FROM_HERE, - base::Bind(&MockMediaStreamVideoSource::DeliverVideoFrameOnIO, - base::Unretained(this), frame, format_, - base::TimeTicks(), frame_callback_)); -} - -void MockMediaStreamVideoSource::DeliverVideoFrameOnIO( - const scoped_refptr<media::VideoFrame>& frame, - media::VideoCaptureFormat format, - const base::TimeTicks& estimated_capture_time, - const VideoCaptureDeliverFrameCB& frame_callback) { - frame_callback.Run(frame, format, estimated_capture_time); + base::Bind(frame_callback_, frame, base::TimeTicks())); } } // namespace content diff --git a/content/renderer/media/mock_media_stream_video_source.h b/content/renderer/media/mock_media_stream_video_source.h index 9602b2f..0ae7507 100644 --- a/content/renderer/media/mock_media_stream_video_source.h +++ b/content/renderer/media/mock_media_stream_video_source.h @@ -50,11 +50,6 @@ class MockMediaStreamVideoSource : public MediaStreamVideoSource { } protected: - void DeliverVideoFrameOnIO(const scoped_refptr<media::VideoFrame>& frame, - media::VideoCaptureFormat format, - const base::TimeTicks& estimated_capture_time, - const VideoCaptureDeliverFrameCB& frame_callback); - // Implements MediaStreamVideoSource. virtual void GetCurrentSupportedFormats( int max_requested_height, diff --git a/content/renderer/media/rtc_video_renderer.cc b/content/renderer/media/rtc_video_renderer.cc index 66190cb..0f32598 100644 --- a/content/renderer/media/rtc_video_renderer.cc +++ b/content/renderer/media/rtc_video_renderer.cc @@ -83,7 +83,6 @@ void RTCVideoRenderer::OnReadyStateChanged( void RTCVideoRenderer::OnVideoFrame( const scoped_refptr<media::VideoFrame>& frame, - const media::VideoCaptureFormat& format, const base::TimeTicks& estimated_capture_time) { DCHECK(message_loop_proxy_->BelongsToCurrentThread()); if (state_ != STARTED) { @@ -109,7 +108,7 @@ void RTCVideoRenderer::RenderSignalingFrame() { // originates from a video camera. scoped_refptr<media::VideoFrame> video_frame = media::VideoFrame::CreateBlackFrame(frame_size_); - OnVideoFrame(video_frame, media::VideoCaptureFormat(), base::TimeTicks()); + OnVideoFrame(video_frame, base::TimeTicks()); } } // namespace content diff --git a/content/renderer/media/rtc_video_renderer.h b/content/renderer/media/rtc_video_renderer.h index 924b003..19550e1 100644 --- a/content/renderer/media/rtc_video_renderer.h +++ b/content/renderer/media/rtc_video_renderer.h @@ -55,7 +55,6 @@ class CONTENT_EXPORT RTCVideoRenderer }; void OnVideoFrame(const scoped_refptr<media::VideoFrame>& frame, - const media::VideoCaptureFormat& format, const base::TimeTicks& estimated_capture_time); // VideoTrackSink implementation. Called on the main thread. diff --git a/content/renderer/media/video_capture_impl.cc b/content/renderer/media/video_capture_impl.cc index 7f395cd..c4796ce 100644 --- a/content/renderer/media/video_capture_impl.cc +++ b/content/renderer/media/video_capture_impl.cc @@ -208,20 +208,17 @@ void VideoCaptureImpl::OnBufferDestroyed(int buffer_id) { } void VideoCaptureImpl::OnBufferReceived(int buffer_id, - const media::VideoCaptureFormat& format, + const gfx::Size& coded_size, const gfx::Rect& visible_rect, - base::TimeTicks timestamp) { + base::TimeTicks timestamp, + const base::DictionaryValue& metadata) { DCHECK(thread_checker_.CalledOnValidThread()); - // The capture pipeline supports only I420 for now. - DCHECK_EQ(format.pixel_format, media::PIXEL_FORMAT_I420); - if (state_ != VIDEO_CAPTURE_STATE_STARTED || suspended_) { Send(new VideoCaptureHostMsg_BufferReady(device_id_, buffer_id, 0)); return; } - last_frame_format_ = format; if (first_frame_timestamp_.is_null()) first_frame_timestamp_ = timestamp; @@ -238,7 +235,7 @@ void VideoCaptureImpl::OnBufferReceived(int buffer_id, scoped_refptr<media::VideoFrame> frame = media::VideoFrame::WrapExternalPackedMemory( media::VideoFrame::I420, - last_frame_format_.frame_size, + coded_size, visible_rect, gfx::Size(visible_rect.width(), visible_rect.height()), reinterpret_cast<uint8*>(buffer->buffer->memory()), @@ -252,18 +249,18 @@ void VideoCaptureImpl::OnBufferReceived(int buffer_id, buffer_id, buffer, 0))); + frame->metadata()->MergeInternalValuesFrom(metadata); - for (ClientInfoMap::iterator it = clients_.begin(); it != clients_.end(); - ++it) { - it->second.deliver_frame_cb.Run(frame, format, timestamp); - } + for (const auto& entry : clients_) + entry.second.deliver_frame_cb.Run(frame, timestamp); } void VideoCaptureImpl::OnMailboxBufferReceived( int buffer_id, const gpu::MailboxHolder& mailbox_holder, - const media::VideoCaptureFormat& format, - base::TimeTicks timestamp) { + const gfx::Size& packed_frame_size, + base::TimeTicks timestamp, + const base::DictionaryValue& metadata) { DCHECK(thread_checker_.CalledOnValidThread()); if (state_ != VIDEO_CAPTURE_STATE_STARTED || suspended_) { @@ -271,7 +268,6 @@ void VideoCaptureImpl::OnMailboxBufferReceived( return; } - last_frame_format_ = format; if (first_frame_timestamp_.is_null()) first_frame_timestamp_ = timestamp; @@ -280,13 +276,12 @@ void VideoCaptureImpl::OnMailboxBufferReceived( media::BindToCurrentLoop(base::Bind( &VideoCaptureImpl::OnClientBufferFinished, weak_factory_.GetWeakPtr(), buffer_id, scoped_refptr<ClientBuffer>())), - last_frame_format_.frame_size, gfx::Rect(last_frame_format_.frame_size), - last_frame_format_.frame_size, timestamp - first_frame_timestamp_, false); + packed_frame_size, gfx::Rect(packed_frame_size), packed_frame_size, + timestamp - first_frame_timestamp_, false); + frame->metadata()->MergeInternalValuesFrom(metadata); - for (ClientInfoMap::iterator it = clients_.begin(); it != clients_.end(); - ++it) { - it->second.deliver_frame_cb.Run(frame, format, timestamp); - } + for (const auto& entry : clients_) + entry.second.deliver_frame_cb.Run(frame, timestamp); } void VideoCaptureImpl::OnClientBufferFinished( diff --git a/content/renderer/media/video_capture_impl.h b/content/renderer/media/video_capture_impl.h index 86b1e4b..d7d6eb0 100644 --- a/content/renderer/media/video_capture_impl.h +++ b/content/renderer/media/video_capture_impl.h @@ -113,13 +113,15 @@ class CONTENT_EXPORT VideoCaptureImpl int buffer_id) override; void OnBufferDestroyed(int buffer_id) override; void OnBufferReceived(int buffer_id, - const media::VideoCaptureFormat& format, + const gfx::Size& coded_size, const gfx::Rect& visible_rect, - base::TimeTicks) override; + base::TimeTicks timestamp, + const base::DictionaryValue& metadata) override; void OnMailboxBufferReceived(int buffer_id, const gpu::MailboxHolder& mailbox_holder, - const media::VideoCaptureFormat& format, - base::TimeTicks timestamp) override; + const gfx::Size& packed_frame_size, + base::TimeTicks timestamp, + const base::DictionaryValue& metadata) override; void OnStateChanged(VideoCaptureState state) override; void OnDeviceSupportedFormatsEnumerated( const media::VideoCaptureFormats& supported_formats) override; @@ -165,9 +167,6 @@ class CONTENT_EXPORT VideoCaptureImpl // client to this class via StartCapture(). media::VideoCaptureParams params_; - // The device's video capture format sent from browser process side. - media::VideoCaptureFormat last_frame_format_; - // The device's first captured frame timestamp sent from browser process side. base::TimeTicks first_frame_timestamp_; diff --git a/content/renderer/media/video_capture_impl_manager_unittest.cc b/content/renderer/media/video_capture_impl_manager_unittest.cc index a688c12..1258c76 100644 --- a/content/renderer/media/video_capture_impl_manager_unittest.cc +++ b/content/renderer/media/video_capture_impl_manager_unittest.cc @@ -91,9 +91,8 @@ class VideoCaptureImplManagerTest : public ::testing::Test { } protected: - MOCK_METHOD3(OnFrameReady, + MOCK_METHOD2(OnFrameReady, void(const scoped_refptr<media::VideoFrame>&, - const media::VideoCaptureFormat&, const base::TimeTicks& estimated_capture_time)); MOCK_METHOD0(OnStarted, void()); MOCK_METHOD0(OnStopped, void()); diff --git a/content/renderer/media/video_capture_impl_unittest.cc b/content/renderer/media/video_capture_impl_unittest.cc index 8c75e47..282cc3e 100644 --- a/content/renderer/media/video_capture_impl_unittest.cc +++ b/content/renderer/media/video_capture_impl_unittest.cc @@ -129,9 +129,8 @@ class VideoCaptureImplTest : public ::testing::Test { } protected: - MOCK_METHOD3(OnFrameReady, + MOCK_METHOD2(OnFrameReady, void(const scoped_refptr<media::VideoFrame>&, - const media::VideoCaptureFormat&, const base::TimeTicks&)); MOCK_METHOD1(OnStateUpdate, void(VideoCaptureState)); MOCK_METHOD1(OnDeviceFormatsInUse, diff --git a/content/renderer/media/video_capture_message_filter.cc b/content/renderer/media/video_capture_message_filter.cc index 7157407..dcdcc24 100644 --- a/content/renderer/media/video_capture_message_filter.cc +++ b/content/renderer/media/video_capture_message_filter.cc @@ -122,32 +122,29 @@ void VideoCaptureMessageFilter::OnBufferCreated( } void VideoCaptureMessageFilter::OnBufferReceived( - int device_id, - int buffer_id, - const media::VideoCaptureFormat& format, - const gfx::Rect& visible_rect, - base::TimeTicks timestamp) { - Delegate* delegate = find_delegate(device_id); + const VideoCaptureMsg_BufferReady_Params& params) { + Delegate* delegate = find_delegate(params.device_id); if (!delegate) { DLOG(WARNING) << "OnBufferReceived: Got video SHM buffer for a " "non-existent or removed video capture."; // Send the buffer back to Host in case it's waiting for all buffers // to be returned. - Send(new VideoCaptureHostMsg_BufferReady(device_id, buffer_id, 0)); + Send(new VideoCaptureHostMsg_BufferReady( + params.device_id, params.buffer_id, 0)); return; } - delegate->OnBufferReceived(buffer_id, format, visible_rect, timestamp); + delegate->OnBufferReceived(params.buffer_id, + params.coded_size, + params.visible_rect, + params.timestamp, + params.metadata); } void VideoCaptureMessageFilter::OnMailboxBufferReceived( - int device_id, - int buffer_id, - const gpu::MailboxHolder& mailbox_holder, - const media::VideoCaptureFormat& format, - base::TimeTicks timestamp) { - Delegate* delegate = find_delegate(device_id); + const VideoCaptureMsg_MailboxBufferReady_Params& params) { + Delegate* delegate = find_delegate(params.device_id); if (!delegate) { DLOG(WARNING) << "OnMailboxBufferReceived: Got video mailbox buffer for a " @@ -155,12 +152,17 @@ void VideoCaptureMessageFilter::OnMailboxBufferReceived( // Send the buffer back to Host in case it's waiting for all buffers // to be returned. - Send(new VideoCaptureHostMsg_BufferReady(device_id, buffer_id, 0)); + Send(new VideoCaptureHostMsg_BufferReady( + params.device_id, params.buffer_id, 0)); return; } delegate->OnMailboxBufferReceived( - buffer_id, mailbox_holder, format, timestamp); + params.buffer_id, + params.mailbox_holder, + params.packed_frame_size, + params.timestamp, + params.metadata); } void VideoCaptureMessageFilter::OnBufferDestroyed( diff --git a/content/renderer/media/video_capture_message_filter.h b/content/renderer/media/video_capture_message_filter.h index 311bcba..3171237 100644 --- a/content/renderer/media/video_capture_message_filter.h +++ b/content/renderer/media/video_capture_message_filter.h @@ -13,11 +13,15 @@ #include <map> #include "base/memory/shared_memory.h" +#include "base/values.h" #include "content/common/content_export.h" #include "content/common/media/video_capture.h" #include "ipc/message_filter.h" #include "media/base/video_capture_types.h" +struct VideoCaptureMsg_BufferReady_Params; +struct VideoCaptureMsg_MailboxBufferReady_Params; + namespace gpu { struct MailboxHolder; } // namespace gpu @@ -36,17 +40,20 @@ class CONTENT_EXPORT VideoCaptureMessageFilter : public IPC::MessageFilter { virtual void OnBufferDestroyed(int buffer_id) = 0; // Called when a video frame buffer is received from the browser process. - virtual void OnBufferReceived(int buffer_id, - const media::VideoCaptureFormat& format, - const gfx::Rect& visible_rect, - base::TimeTicks timestamp) = 0; + virtual void OnBufferReceived( + int buffer_id, + const gfx::Size& coded_size, + const gfx::Rect& visible_rect, + base::TimeTicks timestamp, + const base::DictionaryValue& metadata) = 0; // Called when a video mailbox buffer is received from the browser process. virtual void OnMailboxBufferReceived( int buffer_id, const gpu::MailboxHolder& mailbox_holder, - const media::VideoCaptureFormat& format, - base::TimeTicks timestamp) = 0; + const gfx::Size& packed_frame_size, + base::TimeTicks timestamp, + const base::DictionaryValue& metadata) = 0; // Called when state of a video capture device has changed in the browser // process. @@ -102,18 +109,11 @@ class CONTENT_EXPORT VideoCaptureMessageFilter : public IPC::MessageFilter { int buffer_id); // Receive a filled buffer from browser process. - void OnBufferReceived(int device_id, - int buffer_id, - const media::VideoCaptureFormat& format, - const gfx::Rect& visible_rect, - base::TimeTicks timestamp); + void OnBufferReceived(const VideoCaptureMsg_BufferReady_Params& params); // Receive a filled texture mailbox buffer from browser process. - void OnMailboxBufferReceived(int device_id, - int buffer_id, - const gpu::MailboxHolder& mailbox_holder, - const media::VideoCaptureFormat& format, - base::TimeTicks timestamp); + void OnMailboxBufferReceived( + const VideoCaptureMsg_MailboxBufferReady_Params& params); // State of browser process' video capture device has changed. void OnDeviceStateChanged(int device_id, VideoCaptureState state); diff --git a/content/renderer/media/video_capture_message_filter_unittest.cc b/content/renderer/media/video_capture_message_filter_unittest.cc index 1298e11..72fd235 100644 --- a/content/renderer/media/video_capture_message_filter_unittest.cc +++ b/content/renderer/media/video_capture_message_filter_unittest.cc @@ -12,10 +12,13 @@ using ::testing::_; using ::testing::AnyNumber; +using ::testing::DoAll; +using ::testing::Invoke; using ::testing::Mock; using ::testing::Return; using ::testing::SaveArg; using ::testing::StrictMock; +using ::testing::WithArg; namespace content { namespace { @@ -29,16 +32,18 @@ class MockVideoCaptureDelegate : public VideoCaptureMessageFilter::Delegate { int length, int buffer_id)); MOCK_METHOD1(OnBufferDestroyed, void(int buffer_id)); - MOCK_METHOD4(OnBufferReceived, + MOCK_METHOD5(OnBufferReceived, void(int buffer_id, - const media::VideoCaptureFormat& format, + const gfx::Size& coded_size, const gfx::Rect& visible_rect, - base::TimeTicks timestamp)); - MOCK_METHOD4(OnMailboxBufferReceived, + base::TimeTicks timestamp, + const base::DictionaryValue& metadata)); + MOCK_METHOD5(OnMailboxBufferReceived, void(int buffer_id, const gpu::MailboxHolder& mailbox_holder, - const media::VideoCaptureFormat& format, - base::TimeTicks timestamp)); + const gfx::Size& packed_frame_size, + base::TimeTicks timestamp, + const base::DictionaryValue& metadata)); MOCK_METHOD1(OnStateChanged, void(VideoCaptureState state)); MOCK_METHOD1(OnDeviceSupportedFormatsEnumerated, void(const media::VideoCaptureFormats& formats)); @@ -57,6 +62,16 @@ class MockVideoCaptureDelegate : public VideoCaptureMessageFilter::Delegate { int device_id_; }; +void ExpectMetadataContainsFooBarBaz(const base::DictionaryValue& metadata) { + std::string value; + if (metadata.GetString("foo", &value)) + EXPECT_EQ(std::string("bar"), value); + else if (metadata.GetString("bar", &value)) + EXPECT_EQ(std::string("baz"), value); + else + FAIL() << "Missing key 'foo' or key 'bar'."; +} + } // namespace TEST(VideoCaptureMessageFilterTest, Basic) { @@ -89,56 +104,55 @@ TEST(VideoCaptureMessageFilterTest, Basic) { Mock::VerifyAndClearExpectations(&delegate); // VideoCaptureMsg_BufferReady - int buffer_id = 22; - base::TimeTicks timestamp = base::TimeTicks::FromInternalValue(1); - - const media::VideoCaptureFormat shm_format( - gfx::Size(234, 512), 30, media::PIXEL_FORMAT_I420); - media::VideoCaptureFormat saved_format; - EXPECT_CALL(delegate, OnBufferReceived(buffer_id, _, _, timestamp)) - .WillRepeatedly(SaveArg<1>(&saved_format)); - filter->OnMessageReceived(VideoCaptureMsg_BufferReady( - delegate.device_id(), buffer_id, shm_format, gfx::Rect(234, 512), - timestamp)); + VideoCaptureMsg_BufferReady_Params params; + params.device_id = delegate.device_id(); + params.buffer_id = 22; + params.coded_size = gfx::Size(234, 512); + params.visible_rect = gfx::Rect(100, 200, 300, 400); + params.timestamp = base::TimeTicks::FromInternalValue(1); + params.metadata.SetString("foo", "bar"); + + EXPECT_CALL(delegate, OnBufferReceived(params.buffer_id, + params.coded_size, + params.visible_rect, + params.timestamp, + _)) + .WillRepeatedly(WithArg<4>(Invoke(&ExpectMetadataContainsFooBarBaz))); + filter->OnMessageReceived(VideoCaptureMsg_BufferReady(params)); Mock::VerifyAndClearExpectations(&delegate); - EXPECT_EQ(shm_format.frame_size, saved_format.frame_size); - EXPECT_EQ(shm_format.frame_rate, saved_format.frame_rate); - EXPECT_EQ(shm_format.pixel_format, saved_format.pixel_format); // VideoCaptureMsg_MailboxBufferReady - buffer_id = 33; - timestamp = base::TimeTicks::FromInternalValue(2); - - const media::VideoCaptureFormat mailbox_format( - gfx::Size(234, 512), 30, media::PIXEL_FORMAT_TEXTURE); + VideoCaptureMsg_MailboxBufferReady_Params params_m; + params_m.device_id = delegate.device_id(); + params_m.buffer_id = 33; gpu::Mailbox mailbox; const int8 mailbox_name[arraysize(mailbox.name)] = "TEST MAILBOX"; mailbox.SetName(mailbox_name); - unsigned int syncpoint = 44; + params_m.mailbox_holder = gpu::MailboxHolder(mailbox, 0, 44); + params_m.packed_frame_size = gfx::Size(345, 256); + params_m.timestamp = base::TimeTicks::FromInternalValue(2); + params_m.metadata.SetString("bar", "baz"); + gpu::MailboxHolder saved_mailbox_holder; - EXPECT_CALL(delegate, OnMailboxBufferReceived(buffer_id, _, _, timestamp)) - .WillRepeatedly( - DoAll(SaveArg<1>(&saved_mailbox_holder), SaveArg<2>(&saved_format))); - gpu::MailboxHolder mailbox_holder(mailbox, 0, syncpoint); - filter->OnMessageReceived( - VideoCaptureMsg_MailboxBufferReady(delegate.device_id(), - buffer_id, - mailbox_holder, - mailbox_format, - timestamp)); + EXPECT_CALL(delegate, OnMailboxBufferReceived(params_m.buffer_id, + _, + params_m.packed_frame_size, + params_m.timestamp, + _)) + .WillRepeatedly(DoAll( + SaveArg<1>(&saved_mailbox_holder), + WithArg<4>(Invoke(&ExpectMetadataContainsFooBarBaz)))); + filter->OnMessageReceived(VideoCaptureMsg_MailboxBufferReady(params_m)); Mock::VerifyAndClearExpectations(&delegate); - EXPECT_EQ(mailbox_format.frame_size, saved_format.frame_size); - EXPECT_EQ(mailbox_format.frame_rate, saved_format.frame_rate); - EXPECT_EQ(mailbox_format.pixel_format, saved_format.pixel_format); EXPECT_EQ(memcmp(mailbox.name, saved_mailbox_holder.mailbox.name, sizeof(mailbox.name)), 0); // VideoCaptureMsg_FreeBuffer - EXPECT_CALL(delegate, OnBufferDestroyed(buffer_id)); + EXPECT_CALL(delegate, OnBufferDestroyed(params_m.buffer_id)); filter->OnMessageReceived(VideoCaptureMsg_FreeBuffer( - delegate.device_id(), buffer_id)); + delegate.device_id(), params_m.buffer_id)); Mock::VerifyAndClearExpectations(&delegate); } diff --git a/content/renderer/media/video_source_handler.cc b/content/renderer/media/video_source_handler.cc index b48f8e2..13af886 100644 --- a/content/renderer/media/video_source_handler.cc +++ b/content/renderer/media/video_source_handler.cc @@ -53,10 +53,8 @@ class PpFrameReceiver : public MediaStreamVideoSink { reader_ = reader; } - void OnVideoFrame( - const scoped_refptr<media::VideoFrame>& frame, - const media::VideoCaptureFormat& format, - const base::TimeTicks& estimated_capture_time) { + void OnVideoFrame(const scoped_refptr<media::VideoFrame>& frame, + const base::TimeTicks& estimated_capture_time) { TRACE_EVENT0("video", "PpFrameReceiver::OnVideoFrame"); if (reader_) { reader_->GotFrame(frame); @@ -141,8 +139,7 @@ void VideoSourceHandler::DeliverFrameForTesting( return; } PpFrameReceiver* receiver = it->second->receiver_.get(); - receiver->OnVideoFrame(frame, media::VideoCaptureFormat(), - base::TimeTicks()); + receiver->OnVideoFrame(frame, base::TimeTicks()); } VideoSourceHandler::SourceInfo::SourceInfo( diff --git a/content/renderer/media/video_track_adapter.cc b/content/renderer/media/video_track_adapter.cc index 855cf76..d56e8d4 100644 --- a/content/renderer/media/video_track_adapter.cc +++ b/content/renderer/media/video_track_adapter.cc @@ -76,7 +76,6 @@ class VideoTrackAdapter::VideoFrameResolutionAdapter void RemoveCallback(const MediaStreamVideoTrack* track); void DeliverFrame(const scoped_refptr<media::VideoFrame>& frame, - const media::VideoCaptureFormat& format, const base::TimeTicks& estimated_capture_time); // Returns true if all arguments match with the output of this adapter. @@ -93,7 +92,6 @@ class VideoTrackAdapter::VideoFrameResolutionAdapter virtual void DoDeliverFrame( const scoped_refptr<media::VideoFrame>& frame, - const media::VideoCaptureFormat& format, const base::TimeTicks& estimated_capture_time); // Returns |true| if the input frame rate is higher that the requested max @@ -174,17 +172,21 @@ VideoFrameResolutionAdapter::~VideoFrameResolutionAdapter() { void VideoTrackAdapter::VideoFrameResolutionAdapter::DeliverFrame( const scoped_refptr<media::VideoFrame>& frame, - const media::VideoCaptureFormat& format, const base::TimeTicks& estimated_capture_time) { DCHECK(io_thread_checker_.CalledOnValidThread()); - if (MaybeDropFrame(frame, format.frame_rate)) + double frame_rate; + if (!frame->metadata()->GetDouble(media::VideoFrameMetadata::FRAME_RATE, + &frame_rate)) { + frame_rate = MediaStreamVideoSource::kUnknownFrameRate; + } + if (MaybeDropFrame(frame, frame_rate)) return; // TODO(perkj): Allow cropping / scaling of textures once // http://crbug/362521 is fixed. if (frame->format() == media::VideoFrame::NATIVE_TEXTURE) { - DoDeliverFrame(frame, format, estimated_capture_time); + DoDeliverFrame(frame, estimated_capture_time); return; } scoped_refptr<media::VideoFrame> video_frame(frame); @@ -246,7 +248,7 @@ void VideoTrackAdapter::VideoFrameResolutionAdapter::DeliverFrame( << " output visible rect " << video_frame->visible_rect().ToString(); } - DoDeliverFrame(video_frame, format, estimated_capture_time); + DoDeliverFrame(video_frame, estimated_capture_time); } bool VideoTrackAdapter::VideoFrameResolutionAdapter::MaybeDropFrame( @@ -311,13 +313,10 @@ bool VideoTrackAdapter::VideoFrameResolutionAdapter::MaybeDropFrame( void VideoTrackAdapter:: VideoFrameResolutionAdapter::DoDeliverFrame( const scoped_refptr<media::VideoFrame>& frame, - const media::VideoCaptureFormat& format, const base::TimeTicks& estimated_capture_time) { DCHECK(io_thread_checker_.CalledOnValidThread()); - for (std::vector<VideoIdCallbackPair>::const_iterator it = callbacks_.begin(); - it != callbacks_.end(); ++it) { - it->second.Run(frame, format, estimated_capture_time); - } + for (const auto& entry : callbacks_) + entry.second.Run(frame, estimated_capture_time); } void VideoTrackAdapter::VideoFrameResolutionAdapter::AddCallback( @@ -494,15 +493,12 @@ void VideoTrackAdapter::RemoveTrackOnIO(const MediaStreamVideoTrack* track) { void VideoTrackAdapter::DeliverFrameOnIO( const scoped_refptr<media::VideoFrame>& frame, - const media::VideoCaptureFormat& format, const base::TimeTicks& estimated_capture_time) { DCHECK(io_message_loop_->BelongsToCurrentThread()); TRACE_EVENT0("video", "VideoTrackAdapter::DeliverFrameOnIO"); ++frame_counter_; - for (FrameAdapters::iterator it = adapters_.begin(); - it != adapters_.end(); ++it) { - (*it)->DeliverFrame(frame, format, estimated_capture_time); - } + for (const auto& adapter : adapters_) + adapter->DeliverFrame(frame, estimated_capture_time); } void VideoTrackAdapter::CheckFramesReceivedOnIO( diff --git a/content/renderer/media/video_track_adapter.h b/content/renderer/media/video_track_adapter.h index 1a61612..a2f441c 100644 --- a/content/renderer/media/video_track_adapter.h +++ b/content/renderer/media/video_track_adapter.h @@ -50,7 +50,6 @@ class VideoTrackAdapter // Must be called on the IO-thread. void DeliverFrameOnIO( const scoped_refptr<media::VideoFrame>& frame, - const media::VideoCaptureFormat& format, const base::TimeTicks& estimated_capture_time); const scoped_refptr<base::MessageLoopProxy>& io_message_loop() { diff --git a/content/renderer/media/webrtc/media_stream_remote_video_source.cc b/content/renderer/media/webrtc/media_stream_remote_video_source.cc index b752721..786ed84 100644 --- a/content/renderer/media/webrtc/media_stream_remote_video_source.cc +++ b/content/renderer/media/webrtc/media_stream_remote_video_source.cc @@ -40,8 +40,9 @@ class MediaStreamRemoteVideoSource::RemoteVideoSourceDelegate void SetSize(int width, int height) override; void RenderFrame(const cricket::VideoFrame* frame) override; - void DoRenderFrameOnIOThread(scoped_refptr<media::VideoFrame> video_frame, - const media::VideoCaptureFormat& format); + void DoRenderFrameOnIOThread( + const scoped_refptr<media::VideoFrame>& video_frame); + private: // Bound to the render thread. base::ThreadChecker thread_checker_; @@ -103,30 +104,19 @@ RemoteVideoSourceDelegate::RenderFrame( frame->GetVPlane(), frame->GetVPitch(), uv_rows, video_frame.get()); } - media::VideoPixelFormat pixel_format = - (video_frame->format() == media::VideoFrame::YV12) ? - media::PIXEL_FORMAT_YV12 : media::PIXEL_FORMAT_TEXTURE; - - media::VideoCaptureFormat format( - gfx::Size(video_frame->natural_size().width(), - video_frame->natural_size().height()), - MediaStreamVideoSource::kUnknownFrameRate, - pixel_format); - io_message_loop_->PostTask( FROM_HERE, base::Bind(&RemoteVideoSourceDelegate::DoRenderFrameOnIOThread, - this, video_frame, format)); + this, video_frame)); } void MediaStreamRemoteVideoSource:: RemoteVideoSourceDelegate::DoRenderFrameOnIOThread( - scoped_refptr<media::VideoFrame> video_frame, - const media::VideoCaptureFormat& format) { + const scoped_refptr<media::VideoFrame>& video_frame) { DCHECK(io_message_loop_->BelongsToCurrentThread()); TRACE_EVENT0("webrtc", "RemoteVideoSourceDelegate::DoRenderFrameOnIOThread"); // TODO(hclam): Give the estimated capture time. - frame_callback_.Run(video_frame, format, base::TimeTicks()); + frame_callback_.Run(video_frame, base::TimeTicks()); } MediaStreamRemoteVideoSource::MediaStreamRemoteVideoSource( diff --git a/content/renderer/media/webrtc/video_destination_handler.cc b/content/renderer/media/webrtc/video_destination_handler.cc index 45579f0..0684aa6 100644 --- a/content/renderer/media/webrtc/video_destination_handler.cc +++ b/content/renderer/media/webrtc/video_destination_handler.cc @@ -32,14 +32,12 @@ class PpFrameWriter::FrameWriterDelegate const scoped_refptr<base::MessageLoopProxy>& io_message_loop_proxy, const VideoCaptureDeliverFrameCB& new_frame_callback); - void DeliverFrame(const scoped_refptr<media::VideoFrame>& frame, - const media::VideoCaptureFormat& format); + void DeliverFrame(const scoped_refptr<media::VideoFrame>& frame); private: friend class base::RefCountedThreadSafe<FrameWriterDelegate>; virtual ~FrameWriterDelegate(); - void DeliverFrameOnIO(const scoped_refptr<media::VideoFrame>& frame, - const media::VideoCaptureFormat& format); + void DeliverFrameOnIO(const scoped_refptr<media::VideoFrame>& frame); scoped_refptr<base::MessageLoopProxy> io_message_loop_; VideoCaptureDeliverFrameCB new_frame_callback_; @@ -56,21 +54,18 @@ PpFrameWriter::FrameWriterDelegate::~FrameWriterDelegate() { } void PpFrameWriter::FrameWriterDelegate::DeliverFrame( - const scoped_refptr<media::VideoFrame>& frame, - const media::VideoCaptureFormat& format) { + const scoped_refptr<media::VideoFrame>& frame) { io_message_loop_->PostTask( FROM_HERE, - base::Bind(&FrameWriterDelegate::DeliverFrameOnIO, - this, frame, format)); + base::Bind(&FrameWriterDelegate::DeliverFrameOnIO, this, frame)); } void PpFrameWriter::FrameWriterDelegate::DeliverFrameOnIO( - const scoped_refptr<media::VideoFrame>& frame, - const media::VideoCaptureFormat& format) { + const scoped_refptr<media::VideoFrame>& frame) { DCHECK(io_message_loop_->BelongsToCurrentThread()); // The local time when this frame is generated is unknown so give a null // value to |estimated_capture_time|. - new_frame_callback_.Run(frame, format, base::TimeTicks()); + new_frame_callback_.Run(frame, base::TimeTicks()); } PpFrameWriter::PpFrameWriter() { @@ -152,10 +147,6 @@ void PpFrameWriter::PutFrame(PPB_ImageData_Impl* image_data, scoped_refptr<media::VideoFrame> new_frame = frame_pool_.CreateFrame(media::VideoFrame::YV12, frame_size, gfx::Rect(frame_size), frame_size, timestamp); - media::VideoCaptureFormat format( - frame_size, - MediaStreamVideoSource::kUnknownFrameRate, - media::PIXEL_FORMAT_YV12); // TODO(magjed): Chrome OS is not ready for switching from BGRA to ARGB. // Remove this once http://crbug/434007 is fixed. We have a corresponding @@ -176,7 +167,7 @@ void PpFrameWriter::PutFrame(PPB_ImageData_Impl* image_data, width, height); - delegate_->DeliverFrame(new_frame, format); + delegate_->DeliverFrame(new_frame); } // PpFrameWriterProxy is a helper class to make sure the user won't use diff --git a/content/renderer/media/webrtc/webrtc_video_track_adapter.cc b/content/renderer/media/webrtc/webrtc_video_track_adapter.cc index 6a9c2f4..d3c2036 100644 --- a/content/renderer/media/webrtc/webrtc_video_track_adapter.cc +++ b/content/renderer/media/webrtc/webrtc_video_track_adapter.cc @@ -44,14 +44,11 @@ class WebRtcVideoTrackAdapter::WebRtcVideoSourceAdapter void ReleaseSourceOnMainThread(); void OnVideoFrameOnIO(const scoped_refptr<media::VideoFrame>& frame, - const media::VideoCaptureFormat& format, const base::TimeTicks& estimated_capture_time); private: void OnVideoFrameOnWorkerThread( - const scoped_refptr<media::VideoFrame>& frame, - const media::VideoCaptureFormat& format, - const base::TimeTicks& estimated_capture_time); + const scoped_refptr<media::VideoFrame>& frame); friend class base::RefCountedThreadSafe<WebRtcVideoSourceAdapter>; virtual ~WebRtcVideoSourceAdapter(); @@ -113,20 +110,18 @@ ReleaseSourceOnMainThread() { void WebRtcVideoTrackAdapter::WebRtcVideoSourceAdapter::OnVideoFrameOnIO( const scoped_refptr<media::VideoFrame>& frame, - const media::VideoCaptureFormat& format, const base::TimeTicks& estimated_capture_time) { DCHECK(io_thread_checker_.CalledOnValidThread()); libjingle_worker_thread_->PostTask( FROM_HERE, base::Bind(&WebRtcVideoSourceAdapter::OnVideoFrameOnWorkerThread, - this, frame, format, estimated_capture_time)); + this, + frame)); } void WebRtcVideoTrackAdapter::WebRtcVideoSourceAdapter::OnVideoFrameOnWorkerThread( - const scoped_refptr<media::VideoFrame>& frame, - const media::VideoCaptureFormat& format, - const base::TimeTicks& estimated_capture_time) { + const scoped_refptr<media::VideoFrame>& frame) { DCHECK(libjingle_worker_thread_->BelongsToCurrentThread()); base::AutoLock auto_lock(capture_adapter_stop_lock_); if (capture_adapter_) diff --git a/content/renderer/pepper/pepper_media_stream_video_track_host.cc b/content/renderer/pepper/pepper_media_stream_video_track_host.cc index 096820b..258ee56 100644 --- a/content/renderer/pepper/pepper_media_stream_video_track_host.cc +++ b/content/renderer/pepper/pepper_media_stream_video_track_host.cc @@ -97,24 +97,24 @@ void ConvertFromMediaVideoFrame(const scoped_refptr<media::VideoFrame>& src, uint8_t* dst) { CHECK(src->format() == VideoFrame::YV12 || src->format() == VideoFrame::I420); if (dst_format == PP_VIDEOFRAME_FORMAT_BGRA) { - if (src->coded_size() == dst_size) { - libyuv::I420ToARGB(src->data(VideoFrame::kYPlane), + if (src->visible_rect().size() == dst_size) { + libyuv::I420ToARGB(src->visible_data(VideoFrame::kYPlane), src->stride(VideoFrame::kYPlane), - src->data(VideoFrame::kUPlane), + src->visible_data(VideoFrame::kUPlane), src->stride(VideoFrame::kUPlane), - src->data(VideoFrame::kVPlane), + src->visible_data(VideoFrame::kVPlane), src->stride(VideoFrame::kVPlane), dst, dst_size.width() * 4, dst_size.width(), dst_size.height()); } else { - media::ScaleYUVToRGB32(src->data(VideoFrame::kYPlane), - src->data(VideoFrame::kUPlane), - src->data(VideoFrame::kVPlane), + media::ScaleYUVToRGB32(src->visible_data(VideoFrame::kYPlane), + src->visible_data(VideoFrame::kUPlane), + src->visible_data(VideoFrame::kVPlane), dst, - src->coded_size().width(), - src->coded_size().height(), + src->visible_rect().width(), + src->visible_rect().height(), dst_size.width(), dst_size.height(), src->stride(VideoFrame::kYPlane), @@ -135,21 +135,21 @@ void ConvertFromMediaVideoFrame(const scoped_refptr<media::VideoFrame>& src, const int plane_order = (dst_format == PP_VIDEOFRAME_FORMAT_YV12) ? 0 : 1; int dst_width = dst_size.width(); int dst_height = dst_size.height(); - libyuv::ScalePlane(src->data(kPlanesOrder[plane_order][0]), + libyuv::ScalePlane(src->visible_data(kPlanesOrder[plane_order][0]), src->stride(kPlanesOrder[plane_order][0]), - src->coded_size().width(), - src->coded_size().height(), + src->visible_rect().width(), + src->visible_rect().height(), dst, dst_width, dst_width, dst_height, kFilterMode); dst += dst_width * dst_height; - const int src_halfwidth = (src->coded_size().width() + 1) >> 1; - const int src_halfheight = (src->coded_size().height() + 1) >> 1; + const int src_halfwidth = (src->visible_rect().width() + 1) >> 1; + const int src_halfheight = (src->visible_rect().height() + 1) >> 1; const int dst_halfwidth = (dst_width + 1) >> 1; const int dst_halfheight = (dst_height + 1) >> 1; - libyuv::ScalePlane(src->data(kPlanesOrder[plane_order][1]), + libyuv::ScalePlane(src->visible_data(kPlanesOrder[plane_order][1]), src->stride(kPlanesOrder[plane_order][1]), src_halfwidth, src_halfheight, @@ -159,7 +159,7 @@ void ConvertFromMediaVideoFrame(const scoped_refptr<media::VideoFrame>& src, dst_halfheight, kFilterMode); dst += dst_halfwidth * dst_halfheight; - libyuv::ScalePlane(src->data(kPlanesOrder[plane_order][2]), + libyuv::ScalePlane(src->visible_data(kPlanesOrder[plane_order][2]), src->stride(kPlanesOrder[plane_order][2]), src_halfwidth, src_halfheight, @@ -186,15 +186,13 @@ class PepperMediaStreamVideoTrackHost::FrameDeliverer const scoped_refptr<base::MessageLoopProxy>& io_message_loop_proxy, const VideoCaptureDeliverFrameCB& new_frame_callback); - void DeliverVideoFrame(const scoped_refptr<media::VideoFrame>& frame, - const media::VideoCaptureFormat& format); + void DeliverVideoFrame(const scoped_refptr<media::VideoFrame>& frame); private: friend class base::RefCountedThreadSafe<FrameDeliverer>; virtual ~FrameDeliverer(); - void DeliverFrameOnIO(const scoped_refptr<media::VideoFrame>& frame, - const media::VideoCaptureFormat& format); + void DeliverFrameOnIO(const scoped_refptr<media::VideoFrame>& frame); scoped_refptr<base::MessageLoopProxy> io_message_loop_; VideoCaptureDeliverFrameCB new_frame_callback_; @@ -213,21 +211,18 @@ PepperMediaStreamVideoTrackHost::FrameDeliverer::~FrameDeliverer() { } void PepperMediaStreamVideoTrackHost::FrameDeliverer::DeliverVideoFrame( - const scoped_refptr<media::VideoFrame>& frame, - const media::VideoCaptureFormat& format) { + const scoped_refptr<media::VideoFrame>& frame) { io_message_loop_->PostTask( FROM_HERE, - base::Bind(&FrameDeliverer::DeliverFrameOnIO, - this, frame, format)); + base::Bind(&FrameDeliverer::DeliverFrameOnIO, this, frame)); } void PepperMediaStreamVideoTrackHost::FrameDeliverer::DeliverFrameOnIO( - const scoped_refptr<media::VideoFrame>& frame, - const media::VideoCaptureFormat& format) { + const scoped_refptr<media::VideoFrame>& frame) { DCHECK(io_message_loop_->BelongsToCurrentThread()); // The time when this frame is generated is unknown so give a null value to // |estimated_capture_time|. - new_frame_callback_.Run(frame, format, base::TimeTicks()); + new_frame_callback_.Run(frame, base::TimeTicks()); } PepperMediaStreamVideoTrackHost::PepperMediaStreamVideoTrackHost( @@ -369,11 +364,7 @@ int32_t PepperMediaStreamVideoTrackHost::SendFrameToTrack(int32_t index) { base::TimeDelta::FromMilliseconds(ts_ms), base::Closure()); - frame_deliverer_->DeliverVideoFrame( - frame, - media::VideoCaptureFormat(plugin_frame_size_, - kDefaultOutputFrameRate, - ToPixelFormat(plugin_frame_format_))); + frame_deliverer_->DeliverVideoFrame(frame); } // Makes the frame available again for plugin. @@ -383,7 +374,6 @@ int32_t PepperMediaStreamVideoTrackHost::SendFrameToTrack(int32_t index) { void PepperMediaStreamVideoTrackHost::OnVideoFrame( const scoped_refptr<VideoFrame>& frame, - const media::VideoCaptureFormat& format, const base::TimeTicks& estimated_capture_time) { DCHECK(frame.get()); // TODO(penghuang): Check |frame->end_of_stream()| and close the track. @@ -392,7 +382,7 @@ void PepperMediaStreamVideoTrackHost::OnVideoFrame( return; if (source_frame_size_.IsEmpty()) { - source_frame_size_ = frame->coded_size(); + source_frame_size_ = frame->visible_rect().size(); source_frame_format_ = ppformat; InitBuffers(); } diff --git a/content/renderer/pepper/pepper_media_stream_video_track_host.h b/content/renderer/pepper/pepper_media_stream_video_track_host.h index e9a562b..74455ee 100644 --- a/content/renderer/pepper/pepper_media_stream_video_track_host.h +++ b/content/renderer/pepper/pepper_media_stream_video_track_host.h @@ -56,7 +56,6 @@ class PepperMediaStreamVideoTrackHost : public PepperMediaStreamTrackHostBase, int32_t SendFrameToTrack(int32_t index); void OnVideoFrame(const scoped_refptr<media::VideoFrame>& frame, - const media::VideoCaptureFormat& format, const base::TimeTicks& estimated_capture_time); // MediaStreamVideoSource overrides: diff --git a/content/renderer/pepper/pepper_platform_video_capture.cc b/content/renderer/pepper/pepper_platform_video_capture.cc index 0d011f3..68ef480 100644 --- a/content/renderer/pepper/pepper_platform_video_capture.cc +++ b/content/renderer/pepper/pepper_platform_video_capture.cc @@ -142,10 +142,9 @@ void PepperPlatformVideoCapture::OnStateUpdate(VideoCaptureState state) { void PepperPlatformVideoCapture::OnFrameReady( const scoped_refptr<media::VideoFrame>& frame, - const media::VideoCaptureFormat& format, const base::TimeTicks& estimated_capture_time) { if (handler_ && !stop_capture_cb_.is_null()) - handler_->OnFrameReady(frame, format); + handler_->OnFrameReady(frame); } PepperMediaDeviceManager* PepperPlatformVideoCapture::GetMediaDeviceManager() { diff --git a/content/renderer/pepper/pepper_platform_video_capture.h b/content/renderer/pepper/pepper_platform_video_capture.h index 601c4f1..dd6ed78 100644 --- a/content/renderer/pepper/pepper_platform_video_capture.h +++ b/content/renderer/pepper/pepper_platform_video_capture.h @@ -42,7 +42,6 @@ class PepperPlatformVideoCapture { void OnDeviceOpened(int request_id, bool succeeded, const std::string& label); void OnStateUpdate(VideoCaptureState state); void OnFrameReady(const scoped_refptr<media::VideoFrame>& frame, - const media::VideoCaptureFormat& format, const base::TimeTicks& estimated_capture_time); // Can return NULL if the RenderFrame referenced by |render_frame_id_| has diff --git a/content/renderer/pepper/pepper_video_capture_host.cc b/content/renderer/pepper/pepper_video_capture_host.cc index ed76dc3..820dd12 100644 --- a/content/renderer/pepper/pepper_video_capture_host.cc +++ b/content/renderer/pepper/pepper_video_capture_host.cc @@ -4,6 +4,7 @@ #include "content/renderer/pepper/pepper_video_capture_host.h" +#include "content/renderer/media/media_stream_video_source.h" #include "content/renderer/pepper/host_globals.h" #include "content/renderer/pepper/pepper_media_device_manager.h" #include "content/renderer/pepper/pepper_platform_video_capture.h" @@ -120,21 +121,26 @@ void PepperVideoCaptureHost::PostErrorReply() { } void PepperVideoCaptureHost::OnFrameReady( - const scoped_refptr<media::VideoFrame>& frame, - media::VideoCaptureFormat format) { + const scoped_refptr<media::VideoFrame>& frame) { DCHECK(frame.get()); - if (alloc_size_ != frame->coded_size() || buffers_.empty()) { - AllocBuffers(frame->coded_size(), format.frame_rate); - alloc_size_ = frame->coded_size(); + if (alloc_size_ != frame->visible_rect().size() || buffers_.empty()) { + alloc_size_ = frame->visible_rect().size(); + double frame_rate; + int rounded_frame_rate; + if (frame->metadata()->GetDouble(media::VideoFrameMetadata::FRAME_RATE, + &frame_rate)) + rounded_frame_rate = static_cast<int>(frame_rate + 0.5 /* round */); + else + rounded_frame_rate = MediaStreamVideoSource::kUnknownFrameRate; + AllocBuffers(alloc_size_, rounded_frame_rate); } for (uint32_t i = 0; i < buffers_.size(); ++i) { if (!buffers_[i].in_use) { DCHECK_EQ(frame->format(), media::VideoFrame::I420); if (buffers_[i].buffer->size() < - media::VideoFrame::AllocationSize(frame->format(), - frame->coded_size())) { + media::VideoFrame::AllocationSize(frame->format(), alloc_size_)) { // TODO(ihf): handle size mismatches gracefully here. return; } @@ -144,7 +150,7 @@ void PepperVideoCaptureHost::OnFrameReady( static_assert(media::VideoFrame::kVPlane == 2, "v plane should be 2"); for (size_t j = 0; j < media::VideoFrame::NumPlanes(frame->format()); ++j) { - const uint8* src = frame->data(j); + const uint8* src = frame->visible_data(j); const size_t row_bytes = frame->row_bytes(j); const size_t src_stride = frame->stride(j); for (int k = 0; k < frame->rows(j); ++k) { diff --git a/content/renderer/pepper/pepper_video_capture_host.h b/content/renderer/pepper/pepper_video_capture_host.h index cfa11ec..24061b5e7 100644 --- a/content/renderer/pepper/pepper_video_capture_host.h +++ b/content/renderer/pepper/pepper_video_capture_host.h @@ -59,8 +59,7 @@ class PepperVideoCaptureHost : public ppapi::host::ResourceHost { void OnError(); // Called when a video frame is ready. - void OnFrameReady(const scoped_refptr<media::VideoFrame>& frame, - media::VideoCaptureFormat format); + void OnFrameReady(const scoped_refptr<media::VideoFrame>& frame); private: int32_t OnOpen(ppapi::host::HostMessageContext* context, diff --git a/media/base/BUILD.gn b/media/base/BUILD.gn index 11d87f2..235ef60 100644 --- a/media/base/BUILD.gn +++ b/media/base/BUILD.gn @@ -172,6 +172,8 @@ source_set("base") { "video_decoder_config.h", "video_frame.cc", "video_frame.h", + "video_frame_metadata.cc", + "video_frame_metadata.h", "video_frame_pool.cc", "video_frame_pool.h", "video_renderer.cc", @@ -272,6 +274,8 @@ source_set("base_for_cast_ios") { sources = [ "video_frame.cc", "video_frame.h", + "video_frame_metadata.cc", + "video_frame_metadata.h", ] configs += [ "//build/config/compiler:no_size_t_to_int_warning", diff --git a/media/base/video_capturer_source.h b/media/base/video_capturer_source.h index 56ec729..774f28d 100644 --- a/media/base/video_capturer_source.h +++ b/media/base/video_capturer_source.h @@ -42,7 +42,6 @@ class MEDIA_EXPORT VideoCapturerSource { // the first video frame delivered may not have timestamp equal to 0. typedef base::Callback< void(const scoped_refptr<media::VideoFrame>& video_frame, - const media::VideoCaptureFormat& format, const base::TimeTicks& estimated_capture_time)> VideoCaptureDeliverFrameCB; diff --git a/media/base/video_frame.h b/media/base/video_frame.h index 366a356..cebed0a 100644 --- a/media/base/video_frame.h +++ b/media/base/video_frame.h @@ -12,6 +12,7 @@ #include "base/memory/shared_memory.h" #include "base/synchronization/lock.h" #include "media/base/buffers.h" +#include "media/base/video_frame_metadata.h" #include "ui/gfx/geometry/rect.h" #include "ui/gfx/geometry/size.h" @@ -288,6 +289,15 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> { // Returns the offset into the shared memory where the frame data begins. size_t shared_memory_offset() const; + // Returns a dictionary of optional metadata. This contains information + // associated with the frame that downstream clients might use for frame-level + // logging, quality/performance optimizations, signaling, etc. + // + // TODO(miu): Move some of the "extra" members of VideoFrame (below) into + // here as a later clean-up step. + const VideoFrameMetadata* metadata() const { return &metadata_; } + VideoFrameMetadata* metadata() { return &metadata_; } + bool allow_overlay() const { return allow_overlay_; } #if defined(OS_POSIX) @@ -403,6 +413,8 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> { const bool end_of_stream_; + VideoFrameMetadata metadata_; + bool allow_overlay_; DISALLOW_IMPLICIT_CONSTRUCTORS(VideoFrame); diff --git a/media/base/video_frame_metadata.cc b/media/base/video_frame_metadata.cc new file mode 100644 index 0000000..d14bbe9 --- /dev/null +++ b/media/base/video_frame_metadata.cc @@ -0,0 +1,125 @@ +// Copyright 2015 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#include "base/logging.h" +#include "base/strings/string_number_conversions.h" +#include "media/base/video_frame_metadata.h" + +namespace media { + +namespace { + +// Map enum key to internal std::string key used by base::DictionaryValue. +inline std::string ToInternalKey(VideoFrameMetadata::Key key) { + DCHECK_LT(key, VideoFrameMetadata::NUM_KEYS); + return base::IntToString(static_cast<int>(key)); +} + +} // namespace + +VideoFrameMetadata::VideoFrameMetadata() {} + +VideoFrameMetadata::~VideoFrameMetadata() {} + +bool VideoFrameMetadata::HasKey(Key key) const { + return dictionary_.HasKey(ToInternalKey(key)); +} + +void VideoFrameMetadata::SetBoolean(Key key, bool value) { + dictionary_.SetBooleanWithoutPathExpansion(ToInternalKey(key), value); +} + +void VideoFrameMetadata::SetInteger(Key key, int value) { + dictionary_.SetIntegerWithoutPathExpansion(ToInternalKey(key), value); +} + +void VideoFrameMetadata::SetDouble(Key key, double value) { + dictionary_.SetDoubleWithoutPathExpansion(ToInternalKey(key), value); +} + +void VideoFrameMetadata::SetString(Key key, const std::string& value) { + dictionary_.SetWithoutPathExpansion( + ToInternalKey(key), + // Using BinaryValue since we don't want the |value| interpreted as having + // any particular character encoding (e.g., UTF-8) by + // base::DictionaryValue. + base::BinaryValue::CreateWithCopiedBuffer(value.data(), value.size())); +} + +void VideoFrameMetadata::SetTimeTicks(Key key, const base::TimeTicks& value) { + const int64 internal_value = value.ToInternalValue(); + dictionary_.SetWithoutPathExpansion( + ToInternalKey(key), + base::BinaryValue::CreateWithCopiedBuffer( + reinterpret_cast<const char*>(&internal_value), + sizeof(internal_value))); +} + +void VideoFrameMetadata::SetValue(Key key, scoped_ptr<base::Value> value) { + dictionary_.SetWithoutPathExpansion(ToInternalKey(key), value.Pass()); +} + +bool VideoFrameMetadata::GetBoolean(Key key, bool* value) const { + DCHECK(value); + return dictionary_.GetBooleanWithoutPathExpansion(ToInternalKey(key), value); +} + +bool VideoFrameMetadata::GetInteger(Key key, int* value) const { + DCHECK(value); + return dictionary_.GetIntegerWithoutPathExpansion(ToInternalKey(key), value); +} + +bool VideoFrameMetadata::GetDouble(Key key, double* value) const { + DCHECK(value); + return dictionary_.GetDoubleWithoutPathExpansion(ToInternalKey(key), value); +} + +bool VideoFrameMetadata::GetString(Key key, std::string* value) const { + DCHECK(value); + const base::BinaryValue* const binary_value = GetBinaryValue(key); + if (binary_value) + value->assign(binary_value->GetBuffer(), binary_value->GetSize()); + return !!binary_value; +} + +bool VideoFrameMetadata::GetTimeTicks(Key key, base::TimeTicks* value) const { + DCHECK(value); + const base::BinaryValue* const binary_value = GetBinaryValue(key); + if (binary_value && binary_value->GetSize() == sizeof(int64)) { + int64 internal_value; + memcpy(&internal_value, binary_value->GetBuffer(), sizeof(internal_value)); + *value = base::TimeTicks::FromInternalValue(internal_value); + return true; + } + return false; +} + +const base::Value* VideoFrameMetadata::GetValue(Key key) const { + const base::Value* result = nullptr; + if (!dictionary_.GetWithoutPathExpansion(ToInternalKey(key), &result)) + return nullptr; + return result; +} + +void VideoFrameMetadata::MergeInternalValuesInto( + base::DictionaryValue* out) const { + out->MergeDictionary(&dictionary_); +} + +void VideoFrameMetadata::MergeInternalValuesFrom( + const base::DictionaryValue& in) { + dictionary_.MergeDictionary(&in); +} + +const base::BinaryValue* VideoFrameMetadata::GetBinaryValue(Key key) const { + const base::Value* internal_value = nullptr; + if (dictionary_.GetWithoutPathExpansion(ToInternalKey(key), + &internal_value) && + internal_value->GetType() == base::Value::TYPE_BINARY) { + return static_cast<const base::BinaryValue*>(internal_value); + } + return nullptr; +} + +} // namespace media diff --git a/media/base/video_frame_metadata.h b/media/base/video_frame_metadata.h new file mode 100644 index 0000000..31fbe74 --- /dev/null +++ b/media/base/video_frame_metadata.h @@ -0,0 +1,70 @@ +// Copyright 2015 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef MEDIA_BASE_VIDEO_FRAME_METADATA_H_ +#define MEDIA_BASE_VIDEO_FRAME_METADATA_H_ + +#include "base/compiler_specific.h" +#include "base/time/time.h" +#include "base/values.h" +#include "media/base/media_export.h" + +namespace media { + +class MEDIA_EXPORT VideoFrameMetadata { + public: + enum Key { + // Video capture begin/end timestamps. Consumers can use these values for + // dynamic optimizations, logging stats, etc. Use Get/SetTimeTicks() for + // these keys. + CAPTURE_BEGIN_TIME, + CAPTURE_END_TIME, + + // Represents either the fixed frame rate, or the maximum frame rate to + // expect from a variable-rate source. Use Get/SetDouble() for this key. + FRAME_RATE, + + NUM_KEYS + }; + + VideoFrameMetadata(); + ~VideoFrameMetadata(); + + bool HasKey(Key key) const; + + void Clear() { dictionary_.Clear(); } + + // Setters. Overwrites existing value, if present. + void SetBoolean(Key key, bool value); + void SetInteger(Key key, int value); + void SetDouble(Key key, double value); + void SetString(Key key, const std::string& value); + void SetTimeTicks(Key key, const base::TimeTicks& value); + void SetValue(Key key, scoped_ptr<base::Value> value); + + // Getters. Returns true if |key| was present and has the value has been set. + bool GetBoolean(Key key, bool* value) const WARN_UNUSED_RESULT; + bool GetInteger(Key key, int* value) const WARN_UNUSED_RESULT; + bool GetDouble(Key key, double* value) const WARN_UNUSED_RESULT; + bool GetString(Key key, std::string* value) const WARN_UNUSED_RESULT; + bool GetTimeTicks(Key key, base::TimeTicks* value) const WARN_UNUSED_RESULT; + + // Returns null if |key| was not present. + const base::Value* GetValue(Key key) const WARN_UNUSED_RESULT; + + // For serialization. + void MergeInternalValuesInto(base::DictionaryValue* out) const; + void MergeInternalValuesFrom(const base::DictionaryValue& in); + + private: + const base::BinaryValue* GetBinaryValue(Key key) const; + + base::DictionaryValue dictionary_; + + DISALLOW_COPY_AND_ASSIGN(VideoFrameMetadata); +}; + +} // namespace media + +#endif // MEDIA_BASE_VIDEO_FRAME_METADATA_H_ diff --git a/media/base/video_frame_unittest.cc b/media/base/video_frame_unittest.cc index f2635f9..5c2159f 100644 --- a/media/base/video_frame_unittest.cc +++ b/media/base/video_frame_unittest.cc @@ -328,4 +328,83 @@ TEST(VideoFrame, ZeroInitialized) { EXPECT_EQ(0, frame->data(i)[0]); } +TEST(VideoFrameMetadata, SetAndThenGetAllKeysForAllTypes) { + VideoFrameMetadata metadata; + + for (int i = 0; i < VideoFrameMetadata::NUM_KEYS; ++i) { + const VideoFrameMetadata::Key key = static_cast<VideoFrameMetadata::Key>(i); + + EXPECT_FALSE(metadata.HasKey(key)); + metadata.SetBoolean(key, true); + EXPECT_TRUE(metadata.HasKey(key)); + bool bool_value = false; + EXPECT_TRUE(metadata.GetBoolean(key, &bool_value)); + EXPECT_EQ(true, bool_value); + metadata.Clear(); + + EXPECT_FALSE(metadata.HasKey(key)); + metadata.SetInteger(key, i); + EXPECT_TRUE(metadata.HasKey(key)); + int int_value = -999; + EXPECT_TRUE(metadata.GetInteger(key, &int_value)); + EXPECT_EQ(i, int_value); + metadata.Clear(); + + EXPECT_FALSE(metadata.HasKey(key)); + metadata.SetDouble(key, 3.14 * i); + EXPECT_TRUE(metadata.HasKey(key)); + double double_value = -999.99; + EXPECT_TRUE(metadata.GetDouble(key, &double_value)); + EXPECT_EQ(3.14 * i, double_value); + metadata.Clear(); + + EXPECT_FALSE(metadata.HasKey(key)); + metadata.SetString(key, base::StringPrintf("\xfe%d\xff", i)); + EXPECT_TRUE(metadata.HasKey(key)); + std::string string_value; + EXPECT_TRUE(metadata.GetString(key, &string_value)); + EXPECT_EQ(base::StringPrintf("\xfe%d\xff", i), string_value); + metadata.Clear(); + + EXPECT_FALSE(metadata.HasKey(key)); + metadata.SetTimeTicks(key, base::TimeTicks::FromInternalValue(~(0LL) + i)); + EXPECT_TRUE(metadata.HasKey(key)); + base::TimeTicks ticks_value; + EXPECT_TRUE(metadata.GetTimeTicks(key, &ticks_value)); + EXPECT_EQ(base::TimeTicks::FromInternalValue(~(0LL) + i), ticks_value); + metadata.Clear(); + + EXPECT_FALSE(metadata.HasKey(key)); + metadata.SetValue(key, + scoped_ptr<base::Value>(base::Value::CreateNullValue())); + EXPECT_TRUE(metadata.HasKey(key)); + const base::Value* const null_value = metadata.GetValue(key); + EXPECT_TRUE(null_value); + EXPECT_EQ(base::Value::TYPE_NULL, null_value->GetType()); + metadata.Clear(); + } +} + +TEST(VideoFrameMetadata, PassMetadataViaIntermediary) { + VideoFrameMetadata expected; + for (int i = 0; i < VideoFrameMetadata::NUM_KEYS; ++i) { + const VideoFrameMetadata::Key key = static_cast<VideoFrameMetadata::Key>(i); + expected.SetInteger(key, i); + } + + base::DictionaryValue tmp; + expected.MergeInternalValuesInto(&tmp); + EXPECT_EQ(static_cast<size_t>(VideoFrameMetadata::NUM_KEYS), tmp.size()); + + VideoFrameMetadata result; + result.MergeInternalValuesFrom(tmp); + + for (int i = 0; i < VideoFrameMetadata::NUM_KEYS; ++i) { + const VideoFrameMetadata::Key key = static_cast<VideoFrameMetadata::Key>(i); + int value = -1; + EXPECT_TRUE(result.GetInteger(key, &value)); + EXPECT_EQ(i, value); + } +} + } // namespace media diff --git a/media/cast/sender/video_sender.cc b/media/cast/sender/video_sender.cc index 2389e9f..e4e3a45 100644 --- a/media/cast/sender/video_sender.cc +++ b/media/cast/sender/video_sender.cc @@ -30,6 +30,30 @@ const int kRoundTripsNeeded = 4; // time). const int kConstantTimeMs = 75; +// Extract capture begin/end timestamps from |video_frame|'s metadata and log +// it. +void LogVideoCaptureTimestamps(const CastEnvironment& cast_environment, + const media::VideoFrame& video_frame, + RtpTimestamp rtp_timestamp) { + base::TimeTicks capture_begin_time; + base::TimeTicks capture_end_time; + if (!video_frame.metadata()->GetTimeTicks( + media::VideoFrameMetadata::CAPTURE_BEGIN_TIME, &capture_begin_time) || + !video_frame.metadata()->GetTimeTicks( + media::VideoFrameMetadata::CAPTURE_END_TIME, &capture_end_time)) { + // The frame capture timestamps were not provided by the video capture + // source. Simply log the events as happening right now. + capture_begin_time = capture_end_time = + cast_environment.Clock()->NowTicks(); + } + cast_environment.Logging()->InsertFrameEvent( + capture_begin_time, FRAME_CAPTURE_BEGIN, VIDEO_EVENT, rtp_timestamp, + kFrameIdUnknown); + cast_environment.Logging()->InsertFrameEvent( + capture_end_time, FRAME_CAPTURE_END, VIDEO_EVENT, rtp_timestamp, + kFrameIdUnknown); +} + } // namespace // Note, we use a fixed bitrate value when external video encoder is used. @@ -108,15 +132,7 @@ void VideoSender::InsertRawVideoFrame( const RtpTimestamp rtp_timestamp = TimeDeltaToRtpDelta(video_frame->timestamp(), kVideoFrequency); - const base::TimeTicks insertion_time = cast_environment_->Clock()->NowTicks(); - // TODO(miu): Plumb in capture timestamps. For now, make it look like capture - // took zero time by setting the BEGIN and END event to the same timestamp. - cast_environment_->Logging()->InsertFrameEvent( - insertion_time, FRAME_CAPTURE_BEGIN, VIDEO_EVENT, rtp_timestamp, - kFrameIdUnknown); - cast_environment_->Logging()->InsertFrameEvent( - insertion_time, FRAME_CAPTURE_END, VIDEO_EVENT, rtp_timestamp, - kFrameIdUnknown); + LogVideoCaptureTimestamps(*cast_environment_, *video_frame, rtp_timestamp); // Used by chrome/browser/extension/api/cast_streaming/performance_test.cc TRACE_EVENT_INSTANT2( diff --git a/media/media.gyp b/media/media.gyp index 3f95cc8..ea044b9 100644 --- a/media/media.gyp +++ b/media/media.gyp @@ -388,6 +388,8 @@ 'base/video_decoder_config.h', 'base/video_frame.cc', 'base/video_frame.h', + 'base/video_frame_metadata.cc', + 'base/video_frame_metadata.h', 'base/video_frame_pool.cc', 'base/video_frame_pool.h', 'base/video_renderer.cc', @@ -1889,6 +1891,8 @@ 'base/mac/videotoolbox_glue.mm', 'base/video_frame.cc', 'base/video_frame.h', + 'base/video_frame_metadata.cc', + 'base/video_frame_metadata.h', ], 'link_settings': { 'libraries': [ diff --git a/media/video/capture/fake_video_capture_device_unittest.cc b/media/video/capture/fake_video_capture_device_unittest.cc index ace1a28..3ad349f 100644 --- a/media/video/capture/fake_video_capture_device_unittest.cc +++ b/media/video/capture/fake_video_capture_device_unittest.cc @@ -26,9 +26,8 @@ class MockClient : public VideoCaptureDevice::Client { MOCK_METHOD2(ReserveOutputBuffer, scoped_refptr<Buffer>(VideoFrame::Format format, const gfx::Size& dimensions)); - MOCK_METHOD4(OnIncomingCapturedVideoFrame, + MOCK_METHOD3(OnIncomingCapturedVideoFrame, void(const scoped_refptr<Buffer>& buffer, - const VideoCaptureFormat& buffer_format, const scoped_refptr<media::VideoFrame>& frame, const base::TimeTicks& timestamp)); MOCK_METHOD1(OnError, void(const std::string& reason)); @@ -82,7 +81,7 @@ class FakeVideoCaptureDeviceTest : public testing::Test { void SetUp() override { EXPECT_CALL(*client_, ReserveOutputBuffer(_,_)).Times(0); - EXPECT_CALL(*client_, OnIncomingCapturedVideoFrame(_,_,_,_)).Times(0); + EXPECT_CALL(*client_, OnIncomingCapturedVideoFrame(_,_,_)).Times(0); } void OnFrameCaptured(const VideoCaptureFormat& format) { diff --git a/media/video/capture/video_capture_device.h b/media/video/capture/video_capture_device.h index 87de975..747f1b0 100644 --- a/media/video/capture/video_capture_device.h +++ b/media/video/capture/video_capture_device.h @@ -219,7 +219,6 @@ class MEDIA_EXPORT VideoCaptureDevice { // additional copies in the browser process. virtual void OnIncomingCapturedVideoFrame( const scoped_refptr<Buffer>& buffer, - const VideoCaptureFormat& buffer_format, const scoped_refptr<media::VideoFrame>& frame, const base::TimeTicks& timestamp) = 0; diff --git a/media/video/capture/video_capture_device_unittest.cc b/media/video/capture/video_capture_device_unittest.cc index c4e94a0..0c66c34 100644 --- a/media/video/capture/video_capture_device_unittest.cc +++ b/media/video/capture/video_capture_device_unittest.cc @@ -67,9 +67,8 @@ class MockClient : public VideoCaptureDevice::Client { MOCK_METHOD2(ReserveOutputBuffer, scoped_refptr<Buffer>(VideoFrame::Format format, const gfx::Size& dimensions)); - MOCK_METHOD4(OnIncomingCapturedVideoFrame, + MOCK_METHOD3(OnIncomingCapturedVideoFrame, void(const scoped_refptr<Buffer>& buffer, - const VideoCaptureFormat& buffer_format, const scoped_refptr<VideoFrame>& frame, const base::TimeTicks& timestamp)); MOCK_METHOD1(OnError, void(const std::string& reason)); @@ -129,7 +128,7 @@ class VideoCaptureDeviceTest : public testing::Test { base::android::AttachCurrentThread()); #endif EXPECT_CALL(*client_, ReserveOutputBuffer(_,_)).Times(0); - EXPECT_CALL(*client_, OnIncomingCapturedVideoFrame(_,_,_,_)).Times(0); + EXPECT_CALL(*client_, OnIncomingCapturedVideoFrame(_,_,_)).Times(0); } void ResetWithNewClient() { |