summaryrefslogtreecommitdiffstats
path: root/content
diff options
context:
space:
mode:
authordongseong.hwang@intel.com <dongseong.hwang@intel.com@0039d316-1c4b-4281-b951-d872f2087c98>2014-04-30 19:15:28 +0000
committerdongseong.hwang@intel.com <dongseong.hwang@intel.com@0039d316-1c4b-4281-b951-d872f2087c98>2014-04-30 19:15:28 +0000
commitc37a4cc4ca8b30e5c5c47fe831d5a35f6b8461bf (patch)
treee51b1458cb8e5cc03ef2b8e5098c21e8ebe3ce6f /content
parentbac64671bef27d0b35d4a7b099bb61fe9944c5d7 (diff)
downloadchromium_src-c37a4cc4ca8b30e5c5c47fe831d5a35f6b8461bf.zip
chromium_src-c37a4cc4ca8b30e5c5c47fe831d5a35f6b8461bf.tar.gz
chromium_src-c37a4cc4ca8b30e5c5c47fe831d5a35f6b8461bf.tar.bz2
HW Video: Make media::VideoFrame handle the sync point of the compositor as well as webgl and canvas.
This makes it compatible when the video hw texture and WebGL destination texture are in different share groups. media::VideoFrame must receive multiple sync points from clients, because clients can be many. In WebGL case, only the compositor is the client of the mailbox in a frame. So we reuse MailboxHolder::sync_point as release sync point. However, media::VideoFrame has multiple clients, so media::VideoFrame must handle multiple release sync points. Let me explain the lifecycle of the mailbox of a video frame in detail, 1. The video decoder receives a new mailbox from gpu process. The video decoder doesn't insert a sync point, because all GPU operations for the mailbox already were executed in the gpu process. 2. Blink or the compositor reads the mailbox. After that, all clients must insert a release sync point. 3. When the ref count of the video frame is 0, the destructor of the video frame calls recycle callback of the video decoder. 4. The video decoder notifies reusable mailboxes to the gpu process after waiting for the release sync points. Currently, there are three providers that can make a texture type video frame: GpuVideoDecoder, RTCVideoDecoder, and VideoCapture. The video frame of VideoCapture is created in the browser process, not the gpu process. So, VideoCapture inserts a sync point before providing it to clients. BUG=127940, 350925, 362521 Review URL: https://codereview.chromium.org/175223003 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@267290 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'content')
-rw-r--r--content/browser/renderer_host/media/video_capture_controller.cc8
-rw-r--r--content/browser/renderer_host/media/video_capture_controller.h2
-rw-r--r--content/browser/renderer_host/media/video_capture_controller_unittest.cc22
-rw-r--r--content/browser/renderer_host/media/video_capture_host.cc9
-rw-r--r--content/browser/renderer_host/media/video_capture_host.h4
-rw-r--r--content/browser/renderer_host/media/video_capture_host_unittest.cc7
-rw-r--r--content/common/media/video_capture_messages.h2
-rw-r--r--content/renderer/media/android/webmediaplayer_android.cc9
-rw-r--r--content/renderer/media/rtc_video_decoder.cc6
-rw-r--r--content/renderer/media/rtc_video_decoder.h6
-rw-r--r--content/renderer/media/video_capture_impl.cc23
-rw-r--r--content/renderer/media/video_capture_impl.h2
-rw-r--r--content/renderer/media/video_capture_impl_unittest.cc2
-rw-r--r--content/renderer/media/video_capture_message_filter.cc9
-rw-r--r--content/renderer/media/webmediaplayer_impl.cc3
15 files changed, 64 insertions, 50 deletions
diff --git a/content/browser/renderer_host/media/video_capture_controller.cc b/content/browser/renderer_host/media/video_capture_controller.cc
index fb1ff7a..9c70806 100644
--- a/content/browser/renderer_host/media/video_capture_controller.cc
+++ b/content/browser/renderer_host/media/video_capture_controller.cc
@@ -251,7 +251,7 @@ void VideoCaptureController::ReturnBuffer(
const VideoCaptureControllerID& id,
VideoCaptureControllerEventHandler* event_handler,
int buffer_id,
- uint32 sync_point) {
+ const std::vector<uint32>& sync_points) {
DCHECK_CURRENTLY_ON(BrowserThread::IO);
ControllerClient* client = FindClient(id, event_handler, controller_clients_);
@@ -267,8 +267,10 @@ void VideoCaptureController::ReturnBuffer(
scoped_refptr<media::VideoFrame> frame = iter->second;
client->active_buffers.erase(iter);
- if (frame->format() == media::VideoFrame::NATIVE_TEXTURE)
- frame->mailbox_holder()->sync_point = sync_point;
+ if (frame->format() == media::VideoFrame::NATIVE_TEXTURE) {
+ for (size_t i = 0; i < sync_points.size(); i++)
+ frame->AppendReleaseSyncPoint(sync_points[i]);
+ }
buffer_pool_->RelinquishConsumerHold(buffer_id, 1);
}
diff --git a/content/browser/renderer_host/media/video_capture_controller.h b/content/browser/renderer_host/media/video_capture_controller.h
index c0c3bed..9b82870 100644
--- a/content/browser/renderer_host/media/video_capture_controller.h
+++ b/content/browser/renderer_host/media/video_capture_controller.h
@@ -105,7 +105,7 @@ class CONTENT_EXPORT VideoCaptureController {
void ReturnBuffer(const VideoCaptureControllerID& id,
VideoCaptureControllerEventHandler* event_handler,
int buffer_id,
- uint32 sync_point);
+ const std::vector<uint32>& sync_points);
const media::VideoCaptureFormat& GetVideoCaptureFormat() const;
diff --git a/content/browser/renderer_host/media/video_capture_controller_unittest.cc b/content/browser/renderer_host/media/video_capture_controller_unittest.cc
index 5fbb515..f650397 100644
--- a/content/browser/renderer_host/media/video_capture_controller_unittest.cc
+++ b/content/browser/renderer_host/media/video_capture_controller_unittest.cc
@@ -70,7 +70,7 @@ class MockVideoCaptureControllerEventHandler
id,
this,
buffer_id,
- 0));
+ std::vector<uint32>()));
}
virtual void OnMailboxBufferReady(const VideoCaptureControllerID& id,
int buffer_id,
@@ -79,7 +79,8 @@ class MockVideoCaptureControllerEventHandler
base::TimeTicks timestamp) OVERRIDE {
DoMailboxBufferReady(id);
// Use a very different syncpoint value when returning a new syncpoint.
- const uint32 new_sync_point = ~mailbox_holder.sync_point;
+ std::vector<uint32> release_sync_points;
+ release_sync_points.push_back(~mailbox_holder.sync_point);
base::MessageLoop::current()->PostTask(
FROM_HERE,
base::Bind(&VideoCaptureController::ReturnBuffer,
@@ -87,7 +88,7 @@ class MockVideoCaptureControllerEventHandler
id,
this,
buffer_id,
- new_sync_point));
+ release_sync_points));
}
virtual void OnEnded(const VideoCaptureControllerID& id) OVERRIDE {
DoEnded(id);
@@ -262,9 +263,11 @@ TEST_F(VideoCaptureControllerTest, AddAndRemoveClients) {
<< "Client count should return to zero after all clients are gone.";
}
-static void CacheSyncPoint(uint32* sync_value,
- scoped_ptr<gpu::MailboxHolder> mailbox_holder) {
- *sync_value = mailbox_holder->sync_point;
+static void CacheSyncPoint(std::vector<uint32>* called_release_sync_points,
+ const std::vector<uint32>& release_sync_points) {
+ DCHECK(called_release_sync_points->empty());
+ called_release_sync_points->assign(release_sync_points.begin(),
+ release_sync_points.end());
}
// This test will connect and disconnect several clients while simulating an
@@ -481,7 +484,7 @@ TEST_F(VideoCaptureControllerTest, NormalCaptureMultipleClients) {
buffer = NULL;
}
std::vector<uint32> mailbox_syncpoints(mailbox_buffers);
- std::vector<uint32> mailbox_syncpoints_new(mailbox_buffers);
+ std::vector<std::vector<uint32> > release_syncpoint_vectors(mailbox_buffers);
for (int i = 0; i < mailbox_buffers; ++i) {
buffer = device_->ReserveOutputBuffer(media::VideoFrame::NATIVE_TEXTURE,
gfx::Size(0, 0));
@@ -496,7 +499,7 @@ TEST_F(VideoCaptureControllerTest, NormalCaptureMultipleClients) {
buffer,
make_scoped_ptr(new gpu::MailboxHolder(
gpu::Mailbox(), 0, mailbox_syncpoints[i])),
- base::Bind(&CacheSyncPoint, &mailbox_syncpoints_new[i]),
+ base::Bind(&CacheSyncPoint, &release_syncpoint_vectors[i]),
capture_resolution),
base::TimeTicks());
buffer = NULL;
@@ -513,7 +516,8 @@ TEST_F(VideoCaptureControllerTest, NormalCaptureMultipleClients) {
base::RunLoop().RunUntilIdle();
for (size_t i = 0; i < mailbox_syncpoints.size(); ++i) {
// See: MockVideoCaptureControllerEventHandler::OnMailboxBufferReady()
- ASSERT_EQ(mailbox_syncpoints[i], ~mailbox_syncpoints_new[i]);
+ ASSERT_EQ(1u, release_syncpoint_vectors[i].size());
+ ASSERT_EQ(mailbox_syncpoints[i], ~release_syncpoint_vectors[i][0]);
}
Mock::VerifyAndClearExpectations(client_b_.get());
}
diff --git a/content/browser/renderer_host/media/video_capture_host.cc b/content/browser/renderer_host/media/video_capture_host.cc
index 43bc404..14ab5d4 100644
--- a/content/browser/renderer_host/media/video_capture_host.cc
+++ b/content/browser/renderer_host/media/video_capture_host.cc
@@ -295,9 +295,10 @@ void VideoCaptureHost::OnPauseCapture(int device_id) {
Send(new VideoCaptureMsg_StateChanged(device_id, VIDEO_CAPTURE_STATE_ERROR));
}
-void VideoCaptureHost::OnReceiveEmptyBuffer(int device_id,
- int buffer_id,
- uint32 sync_point) {
+void VideoCaptureHost::OnReceiveEmptyBuffer(
+ int device_id,
+ int buffer_id,
+ const std::vector<uint32>& sync_points) {
DCHECK_CURRENTLY_ON(BrowserThread::IO);
VideoCaptureControllerID controller_id(device_id);
@@ -305,7 +306,7 @@ void VideoCaptureHost::OnReceiveEmptyBuffer(int device_id,
if (it != entries_.end()) {
const base::WeakPtr<VideoCaptureController>& controller = it->second;
if (controller)
- controller->ReturnBuffer(controller_id, this, buffer_id, sync_point);
+ controller->ReturnBuffer(controller_id, this, buffer_id, sync_points);
}
}
diff --git a/content/browser/renderer_host/media/video_capture_host.h b/content/browser/renderer_host/media/video_capture_host.h
index 89dd1d6..25d6947 100644
--- a/content/browser/renderer_host/media/video_capture_host.h
+++ b/content/browser/renderer_host/media/video_capture_host.h
@@ -125,7 +125,9 @@ class CONTENT_EXPORT VideoCaptureHost
// IPC message: Receive an empty buffer from renderer. Send it to device
// referenced by |device_id|.
- void OnReceiveEmptyBuffer(int device_id, int buffer_id, uint32 sync_point);
+ void OnReceiveEmptyBuffer(int device_id,
+ int buffer_id,
+ const std::vector<uint32>& sync_points);
// IPC message: Get supported formats referenced by |capture_session_id|.
// |device_id| is needed for message back-routing purposes.
diff --git a/content/browser/renderer_host/media/video_capture_host_unittest.cc b/content/browser/renderer_host/media/video_capture_host_unittest.cc
index 48a3563..171d300 100644
--- a/content/browser/renderer_host/media/video_capture_host_unittest.cc
+++ b/content/browser/renderer_host/media/video_capture_host_unittest.cc
@@ -151,7 +151,7 @@ class MockVideoCaptureHost : public VideoCaptureHost {
void ReturnReceivedDibs(int device_id) {
int handle = GetReceivedDib();
while (handle) {
- this->OnReceiveEmptyBuffer(device_id, handle, 0);
+ this->OnReceiveEmptyBuffer(device_id, handle, std::vector<uint32>());
handle = GetReceivedDib();
}
}
@@ -239,7 +239,8 @@ class MockVideoCaptureHost : public VideoCaptureHost {
OnBufferFilled(device_id, buffer_id, frame_format, timestamp);
if (return_buffers_) {
- VideoCaptureHost::OnReceiveEmptyBuffer(device_id, buffer_id, 0);
+ VideoCaptureHost::OnReceiveEmptyBuffer(
+ device_id, buffer_id, std::vector<uint32>());
}
}
@@ -252,7 +253,7 @@ class MockVideoCaptureHost : public VideoCaptureHost {
device_id, buffer_id, mailbox_holder, format, timestamp);
if (return_buffers_) {
VideoCaptureHost::OnReceiveEmptyBuffer(
- device_id, buffer_id, mailbox_holder.sync_point);
+ device_id, buffer_id, std::vector<uint32>());
}
}
diff --git a/content/common/media/video_capture_messages.h b/content/common/media/video_capture_messages.h
index abfc46c..c3970df 100644
--- a/content/common/media/video_capture_messages.h
+++ b/content/common/media/video_capture_messages.h
@@ -92,7 +92,7 @@ IPC_MESSAGE_CONTROL1(VideoCaptureHostMsg_Stop,
IPC_MESSAGE_CONTROL3(VideoCaptureHostMsg_BufferReady,
int /* device_id */,
int /* buffer_id */,
- uint32 /* syncpoint */)
+ std::vector<uint32> /* syncpoints */)
// Get the formats supported by a device referenced by |capture_session_id|.
IPC_MESSAGE_CONTROL2(VideoCaptureHostMsg_GetDeviceSupportedFormats,
diff --git a/content/renderer/media/android/webmediaplayer_android.cc b/content/renderer/media/android/webmediaplayer_android.cc
index a725840..c8c8ae4 100644
--- a/content/renderer/media/android/webmediaplayer_android.cc
+++ b/content/renderer/media/android/webmediaplayer_android.cc
@@ -71,10 +71,10 @@ const char* kMediaEme = "Media.EME.";
void OnReleaseTexture(
const scoped_refptr<content::StreamTextureFactory>& factories,
uint32 texture_id,
- scoped_ptr<gpu::MailboxHolder> mailbox_holder) {
+ const std::vector<uint32>& release_sync_points) {
GLES2Interface* gl = factories->ContextGL();
- if (mailbox_holder->sync_point)
- gl->WaitSyncPointCHROMIUM(mailbox_holder->sync_point);
+ for (size_t i = 0; i < release_sync_points.size(); i++)
+ gl->WaitSyncPointCHROMIUM(release_sync_points[i]);
gl->DeleteTextures(1, &texture_id);
}
} // namespace
@@ -468,7 +468,7 @@ bool WebMediaPlayerAndroid::copyVideoTextureToPlatformTexture(
if (!video_frame ||
video_frame->format() != media::VideoFrame::NATIVE_TEXTURE)
return false;
- gpu::MailboxHolder* mailbox_holder = video_frame->mailbox_holder();
+ const gpu::MailboxHolder* mailbox_holder = video_frame->mailbox_holder();
DCHECK((!is_remote_ &&
mailbox_holder->texture_target == GL_TEXTURE_EXTERNAL_OES) ||
(is_remote_ && mailbox_holder->texture_target == GL_TEXTURE_2D));
@@ -516,6 +516,7 @@ bool WebMediaPlayerAndroid::copyVideoTextureToPlatformTexture(
web_graphics_context->bindTexture(GL_TEXTURE_2D, texture);
web_graphics_context->deleteTexture(source_texture);
web_graphics_context->flush();
+ video_frame->AppendReleaseSyncPoint(web_graphics_context->insertSyncPoint());
return true;
}
diff --git a/content/renderer/media/rtc_video_decoder.cc b/content/renderer/media/rtc_video_decoder.cc
index 81ca813..3987767 100644
--- a/content/renderer/media/rtc_video_decoder.cc
+++ b/content/renderer/media/rtc_video_decoder.cc
@@ -639,9 +639,11 @@ void RTCVideoDecoder::ReleaseMailbox(
const scoped_refptr<media::GpuVideoAcceleratorFactories>& factories,
int64 picture_buffer_id,
uint32 texture_id,
- scoped_ptr<gpu::MailboxHolder> mailbox_holder) {
+ const std::vector<uint32>& release_sync_points) {
DCHECK(factories->GetTaskRunner()->BelongsToCurrentThread());
- factories->WaitSyncPoint(mailbox_holder->sync_point);
+
+ for (size_t i = 0; i < release_sync_points.size(); i++)
+ factories->WaitSyncPoint(release_sync_points[i]);
if (decoder) {
decoder->ReusePictureBuffer(picture_buffer_id);
diff --git a/content/renderer/media/rtc_video_decoder.h b/content/renderer/media/rtc_video_decoder.h
index e0070b7..d1a04e5 100644
--- a/content/renderer/media/rtc_video_decoder.h
+++ b/content/renderer/media/rtc_video_decoder.h
@@ -28,10 +28,6 @@ class WaitableEvent;
class MessageLoopProxy;
};
-namespace gpu {
-struct MailboxHolder;
-}
-
namespace media {
class DecoderBuffer;
class GpuVideoAcceleratorFactories;
@@ -156,7 +152,7 @@ class CONTENT_EXPORT RTCVideoDecoder
const scoped_refptr<media::GpuVideoAcceleratorFactories>& factories,
int64 picture_buffer_id,
uint32 texture_id,
- scoped_ptr<gpu::MailboxHolder> mailbox_holder);
+ const std::vector<uint32>& release_sync_points);
// Tells VDA that a picture buffer can be recycled.
void ReusePictureBuffer(int64 picture_buffer_id);
diff --git a/content/renderer/media/video_capture_impl.cc b/content/renderer/media/video_capture_impl.cc
index bda203a..465c9fe 100644
--- a/content/renderer/media/video_capture_impl.cc
+++ b/content/renderer/media/video_capture_impl.cc
@@ -214,7 +214,8 @@ void VideoCaptureImpl::OnBufferReceived(int buffer_id,
DCHECK_EQ(format.pixel_format, media::PIXEL_FORMAT_I420);
if (state_ != VIDEO_CAPTURE_STATE_STARTED || suspended_) {
- Send(new VideoCaptureHostMsg_BufferReady(device_id_, buffer_id, 0));
+ Send(new VideoCaptureHostMsg_BufferReady(
+ device_id_, buffer_id, std::vector<uint32>()));
return;
}
@@ -242,12 +243,12 @@ void VideoCaptureImpl::OnBufferReceived(int buffer_id,
buffer->buffer_size,
buffer->buffer->handle(),
timestamp - first_frame_timestamp_,
- media::BindToCurrentLoop(base::Bind(
- &VideoCaptureImpl::OnClientBufferFinished,
- weak_factory_.GetWeakPtr(),
- buffer_id,
- buffer,
- base::Passed(scoped_ptr<gpu::MailboxHolder>().Pass()))));
+ media::BindToCurrentLoop(
+ base::Bind(&VideoCaptureImpl::OnClientBufferFinished,
+ weak_factory_.GetWeakPtr(),
+ buffer_id,
+ buffer,
+ std::vector<uint32>())));
for (ClientInfoMap::iterator it = clients_.begin(); it != clients_.end();
++it) {
@@ -266,7 +267,7 @@ void VideoCaptureImpl::OnMailboxBufferReceived(
if (state_ != VIDEO_CAPTURE_STATE_STARTED || suspended_) {
Send(new VideoCaptureHostMsg_BufferReady(
- device_id_, buffer_id, mailbox_holder.sync_point));
+ device_id_, buffer_id, std::vector<uint32>()));
return;
}
@@ -296,10 +297,10 @@ void VideoCaptureImpl::OnMailboxBufferReceived(
void VideoCaptureImpl::OnClientBufferFinished(
int buffer_id,
const scoped_refptr<ClientBuffer>& /* ignored_buffer */,
- scoped_ptr<gpu::MailboxHolder> mailbox_holder) {
+ const std::vector<uint32>& release_sync_points) {
DCHECK(thread_checker_.CalledOnValidThread());
- const uint32 sync_point = (mailbox_holder ? mailbox_holder->sync_point : 0);
- Send(new VideoCaptureHostMsg_BufferReady(device_id_, buffer_id, sync_point));
+ Send(new VideoCaptureHostMsg_BufferReady(
+ device_id_, buffer_id, release_sync_points));
}
void VideoCaptureImpl::OnStateChanged(VideoCaptureState state) {
diff --git a/content/renderer/media/video_capture_impl.h b/content/renderer/media/video_capture_impl.h
index 66c393c..872bfd6 100644
--- a/content/renderer/media/video_capture_impl.h
+++ b/content/renderer/media/video_capture_impl.h
@@ -129,7 +129,7 @@ class CONTENT_EXPORT VideoCaptureImpl
// buffer.
void OnClientBufferFinished(int buffer_id,
const scoped_refptr<ClientBuffer>& buffer,
- scoped_ptr<gpu::MailboxHolder> mailbox_holder);
+ const std::vector<uint32>& release_sync_points);
void StopDevice();
void RestartCapture();
diff --git a/content/renderer/media/video_capture_impl_unittest.cc b/content/renderer/media/video_capture_impl_unittest.cc
index 13c71af..1f54b76 100644
--- a/content/renderer/media/video_capture_impl_unittest.cc
+++ b/content/renderer/media/video_capture_impl_unittest.cc
@@ -80,7 +80,7 @@ class VideoCaptureImplTest : public ::testing::Test {
void DeviceReceiveEmptyBuffer(int device_id,
int buffer_id,
- uint32 sync_point) {}
+ const std::vector<uint32>& sync_points) {}
void DeviceGetSupportedFormats(int device_id,
media::VideoCaptureSessionId session_id) {
diff --git a/content/renderer/media/video_capture_message_filter.cc b/content/renderer/media/video_capture_message_filter.cc
index 78d25e7..f58347b 100644
--- a/content/renderer/media/video_capture_message_filter.cc
+++ b/content/renderer/media/video_capture_message_filter.cc
@@ -114,7 +114,8 @@ void VideoCaptureMessageFilter::OnBufferCreated(
// Send the buffer back to Host in case it's waiting for all buffers
// to be returned.
base::SharedMemory::CloseHandle(handle);
- Send(new VideoCaptureHostMsg_BufferReady(device_id, buffer_id, 0));
+ Send(new VideoCaptureHostMsg_BufferReady(
+ device_id, buffer_id, std::vector<uint32>()));
return;
}
@@ -133,7 +134,8 @@ void VideoCaptureMessageFilter::OnBufferReceived(
// Send the buffer back to Host in case it's waiting for all buffers
// to be returned.
- Send(new VideoCaptureHostMsg_BufferReady(device_id, buffer_id, 0));
+ Send(new VideoCaptureHostMsg_BufferReady(
+ device_id, buffer_id, std::vector<uint32>()));
return;
}
@@ -154,7 +156,8 @@ void VideoCaptureMessageFilter::OnMailboxBufferReceived(
// Send the buffer back to Host in case it's waiting for all buffers
// to be returned.
- Send(new VideoCaptureHostMsg_BufferReady(device_id, buffer_id, 0));
+ Send(new VideoCaptureHostMsg_BufferReady(
+ device_id, buffer_id, std::vector<uint32>()));
return;
}
diff --git a/content/renderer/media/webmediaplayer_impl.cc b/content/renderer/media/webmediaplayer_impl.cc
index fa12b0f..75c1e7a 100644
--- a/content/renderer/media/webmediaplayer_impl.cc
+++ b/content/renderer/media/webmediaplayer_impl.cc
@@ -635,7 +635,7 @@ bool WebMediaPlayerImpl::copyVideoTextureToPlatformTexture(
if (video_frame->format() != media::VideoFrame::NATIVE_TEXTURE)
return false;
- gpu::MailboxHolder* mailbox_holder = video_frame->mailbox_holder();
+ const gpu::MailboxHolder* mailbox_holder = video_frame->mailbox_holder();
if (mailbox_holder->texture_target != GL_TEXTURE_2D)
return false;
@@ -682,6 +682,7 @@ bool WebMediaPlayerImpl::copyVideoTextureToPlatformTexture(
web_graphics_context->deleteTexture(source_texture);
web_graphics_context->flush();
+ video_frame->AppendReleaseSyncPoint(web_graphics_context->insertSyncPoint());
return true;
}