summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authormiu <miu@chromium.org>2015-11-16 22:18:18 -0800
committerCommit bot <commit-bot@chromium.org>2015-11-17 06:19:10 +0000
commitd3ec5867d26f67b821489476179ff1a9b9996d2a (patch)
treedd9991336fbf42cbc171390f55354adfdda3bc3c
parentf0f61dec25d904bf8f8377b33603817ae1a64d38 (diff)
downloadchromium_src-d3ec5867d26f67b821489476179ff1a9b9996d2a.zip
chromium_src-d3ec5867d26f67b821489476179ff1a9b9996d2a.tar.gz
chromium_src-d3ec5867d26f67b821489476179ff1a9b9996d2a.tar.bz2
Remove dead code paths around PIXEL_STORAGE_TEXTURE in capture pipeline.
Removes code to capture a result as a GPU texture (i.e., tab/desktop capture without read-back into main memory). This code has no activation path and is being removed to make future refactoring/clean-up efforts go more smoothly. This change also has a few minor clean-ups/comment changes in downstream code, based on revealations during the code removal process. BUG=552570 Review URL: https://codereview.chromium.org/1439533004 Cr-Commit-Position: refs/heads/master@{#360021}
-rw-r--r--content/browser/media/capture/aura_window_capture_machine.cc35
-rw-r--r--content/browser/media/capture/desktop_capture_device_aura_unittest.cc6
-rw-r--r--content/browser/renderer_host/media/video_capture_buffer_pool.cc16
-rw-r--r--content/browser/renderer_host/media/video_capture_buffer_pool_unittest.cc12
-rw-r--r--content/browser/renderer_host/media/video_capture_controller.cc93
-rw-r--r--content/browser/renderer_host/media/video_capture_controller.h2
-rw-r--r--content/browser/renderer_host/media/video_capture_controller_unittest.cc119
-rw-r--r--content/browser/renderer_host/media/video_capture_device_client.cc100
-rw-r--r--content/browser/renderer_host/media/video_capture_gpu_jpeg_decoder.h2
-rw-r--r--content/browser/renderer_host/media/video_capture_host.cc5
-rw-r--r--content/common/media/video_capture_messages.h2
-rw-r--r--content/renderer/media/video_capture_impl.cc103
-rw-r--r--content/renderer/media/video_capture_impl.h7
-rw-r--r--content/renderer/media/video_capture_impl_unittest.cc48
-rw-r--r--content/renderer/media/video_capture_message_filter.cc3
-rw-r--r--content/renderer/media/video_capture_message_filter.h7
-rw-r--r--content/renderer/media/video_capture_message_filter_unittest.cc36
-rw-r--r--media/base/video_capture_types.cc16
-rw-r--r--media/base/video_capture_types.h5
-rw-r--r--media/base/video_frame.cc6
-rw-r--r--media/capture/content/screen_capture_device_core.cc6
-rw-r--r--media/capture/content/thread_safe_capture_oracle.cc33
22 files changed, 227 insertions, 435 deletions
diff --git a/content/browser/media/capture/aura_window_capture_machine.cc b/content/browser/media/capture/aura_window_capture_machine.cc
index d6578c9..6cb3e77 100644
--- a/content/browser/media/capture/aura_window_capture_machine.cc
+++ b/content/browser/media/capture/aura_window_capture_machine.cc
@@ -36,15 +36,6 @@
namespace content {
-namespace {
-
-void RunSingleReleaseCallback(scoped_ptr<cc::SingleReleaseCallback> cb,
- const gpu::SyncToken& sync_token) {
- cb->Run(sync_token, false);
-}
-
-} // namespace
-
AuraWindowCaptureMachine::AuraWindowCaptureMachine()
: desktop_window_(NULL),
timer_(true, true),
@@ -246,31 +237,7 @@ bool AuraWindowCaptureMachine::ProcessCopyOutputResponse(
if (result->IsEmpty() || result->size().IsEmpty() || !desktop_window_)
return false;
-
- if (capture_params_.requested_format.pixel_storage ==
- media::PIXEL_STORAGE_TEXTURE) {
- DCHECK_EQ(media::PIXEL_FORMAT_ARGB,
- capture_params_.requested_format.pixel_format);
- DCHECK(!video_frame.get());
- cc::TextureMailbox texture_mailbox;
- scoped_ptr<cc::SingleReleaseCallback> release_callback;
- result->TakeTexture(&texture_mailbox, &release_callback);
- DCHECK(texture_mailbox.IsTexture());
- if (!texture_mailbox.IsTexture())
- return false;
- video_frame = media::VideoFrame::WrapNativeTexture(
- media::PIXEL_FORMAT_ARGB,
- gpu::MailboxHolder(texture_mailbox.mailbox(),
- texture_mailbox.sync_token(),
- texture_mailbox.target()),
- base::Bind(&RunSingleReleaseCallback, base::Passed(&release_callback)),
- result->size(), gfx::Rect(result->size()), result->size(),
- base::TimeDelta());
- capture_frame_cb.Run(video_frame, start_time, true);
- return true;
- } else {
- DCHECK(video_frame.get());
- }
+ DCHECK(video_frame.get());
// Compute the dest size we want after the letterboxing resize. Make the
// coordinates and sizes even because we letterbox in YUV space
diff --git a/content/browser/media/capture/desktop_capture_device_aura_unittest.cc b/content/browser/media/capture/desktop_capture_device_aura_unittest.cc
index dd12640..c0ee3db 100644
--- a/content/browser/media/capture/desktop_capture_device_aura_unittest.cc
+++ b/content/browser/media/capture/desktop_capture_device_aura_unittest.cc
@@ -64,10 +64,8 @@ class MockDeviceClient : public media::VideoCaptureDevice::Client {
const gfx::Size& dimensions,
media::VideoPixelFormat format,
media::VideoPixelStorage storage) override {
- EXPECT_TRUE((format == media::PIXEL_FORMAT_I420 &&
- storage == media::PIXEL_STORAGE_CPU) ||
- (format == media::PIXEL_FORMAT_ARGB &&
- storage == media::PIXEL_STORAGE_TEXTURE));
+ EXPECT_EQ(media::PIXEL_FORMAT_I420, format);
+ EXPECT_EQ(media::PIXEL_STORAGE_CPU, storage);
DoReserveOutputBuffer();
return scoped_ptr<Buffer>();
}
diff --git a/content/browser/renderer_host/media/video_capture_buffer_pool.cc b/content/browser/renderer_host/media/video_capture_buffer_pool.cc
index b358021..76cc9b6 100644
--- a/content/browser/renderer_host/media/video_capture_buffer_pool.cc
+++ b/content/browser/renderer_host/media/video_capture_buffer_pool.cc
@@ -260,14 +260,14 @@ bool VideoCaptureBufferPool::GpuMemoryBufferTracker::ShareToProcess2(
scoped_ptr<VideoCaptureBufferPool::Tracker>
VideoCaptureBufferPool::Tracker::CreateTracker(
media::VideoPixelStorage storage) {
- DCHECK(storage == media::PIXEL_STORAGE_GPUMEMORYBUFFER ||
- storage == media::PIXEL_STORAGE_CPU ||
- storage == media::PIXEL_STORAGE_TEXTURE);
-
- if (storage == media::PIXEL_STORAGE_GPUMEMORYBUFFER)
- return make_scoped_ptr(new GpuMemoryBufferTracker());
- else
- return make_scoped_ptr(new SharedMemTracker());
+ switch (storage) {
+ case media::PIXEL_STORAGE_GPUMEMORYBUFFER:
+ return make_scoped_ptr(new GpuMemoryBufferTracker());
+ case media::PIXEL_STORAGE_CPU:
+ return make_scoped_ptr(new SharedMemTracker());
+ }
+ NOTREACHED();
+ return scoped_ptr<VideoCaptureBufferPool::Tracker>();
}
VideoCaptureBufferPool::Tracker::~Tracker() {}
diff --git a/content/browser/renderer_host/media/video_capture_buffer_pool_unittest.cc b/content/browser/renderer_host/media/video_capture_buffer_pool_unittest.cc
index 856f532..5ebb610 100644
--- a/content/browser/renderer_host/media/video_capture_buffer_pool_unittest.cc
+++ b/content/browser/renderer_host/media/video_capture_buffer_pool_unittest.cc
@@ -16,7 +16,6 @@
#include "content/browser/gpu/browser_gpu_memory_buffer_manager.h"
#include "content/browser/renderer_host/media/video_capture_controller.h"
#include "media/base/video_frame.h"
-#include "media/base/video_util.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -30,7 +29,6 @@ struct PixelFormatAndStorage {
static const PixelFormatAndStorage kCapturePixelFormatAndStorages[] = {
{media::PIXEL_FORMAT_I420, media::PIXEL_STORAGE_CPU},
{media::PIXEL_FORMAT_ARGB, media::PIXEL_STORAGE_CPU},
- {media::PIXEL_FORMAT_ARGB, media::PIXEL_STORAGE_TEXTURE},
#if !defined(OS_ANDROID)
{media::PIXEL_FORMAT_I420,
media::PIXEL_STORAGE_GPUMEMORYBUFFER},
@@ -225,12 +223,10 @@ TEST_P(VideoCaptureBufferPoolTest, BufferPool) {
ASSERT_LE(format_lo.ImageAllocationSize(), buffer3->mapped_size());
}
- // Texture backed Frames cannot be manipulated via mapping.
- if (GetParam().pixel_storage != media::PIXEL_STORAGE_TEXTURE) {
- ASSERT_NE(nullptr, buffer1->data());
- ASSERT_NE(nullptr, buffer2->data());
- ASSERT_NE(nullptr, buffer3->data());
- }
+ ASSERT_NE(nullptr, buffer1->data());
+ ASSERT_NE(nullptr, buffer2->data());
+ ASSERT_NE(nullptr, buffer3->data());
+
// Touch the memory.
if (buffer1->data() != nullptr)
memset(buffer1->data(), 0x11, buffer1->mapped_size());
diff --git a/content/browser/renderer_host/media/video_capture_controller.cc b/content/browser/renderer_host/media/video_capture_controller.cc
index 01a7772..72a7a62 100644
--- a/content/browser/renderer_host/media/video_capture_controller.cc
+++ b/content/browser/renderer_host/media/video_capture_controller.cc
@@ -19,7 +19,6 @@
#include "content/common/gpu/client/gl_helper.h"
#include "content/public/browser/browser_thread.h"
#include "content/public/common/content_switches.h"
-#include "gpu/command_buffer/common/mailbox_holder.h"
#include "media/base/video_frame.h"
#if !defined(OS_ANDROID)
@@ -152,11 +151,25 @@ void VideoCaptureController::AddClient(
media::VideoCaptureSessionId session_id,
const media::VideoCaptureParams& params) {
DCHECK_CURRENTLY_ON(BrowserThread::IO);
- DVLOG(1) << "VideoCaptureController::AddClient, id " << id
- << ", " << params.requested_format.frame_size.ToString()
- << ", " << params.requested_format.frame_rate
- << ", " << session_id
- << ")";
+ DVLOG(1) << "VideoCaptureController::AddClient() -- id=" << id
+ << ", session_id=" << session_id
+ << ", params.requested_format="
+ << media::VideoCaptureFormat::ToString(params.requested_format);
+
+ // Check that requested VideoCaptureParams are valid and supported. If not,
+ // report an error immediately and punt.
+ if (!params.IsValid() ||
+ params.requested_format.pixel_format != media::PIXEL_FORMAT_I420 ||
+ (params.requested_format.pixel_storage != media::PIXEL_STORAGE_CPU &&
+ params.requested_format.pixel_storage !=
+ media::PIXEL_STORAGE_GPUMEMORYBUFFER)) {
+ // Crash in debug builds since the renderer should not have asked for
+ // invalid or unsupported parameters.
+ LOG(DFATAL) << "Invalid or unsupported video capture parameters requested: "
+ << media::VideoCaptureFormat::ToString(params.requested_format);
+ event_handler->OnError(id);
+ return;
+ }
// If this is the first client added to the controller, cache the parameters.
if (!controller_clients_.size())
@@ -343,11 +356,20 @@ void VideoCaptureController::DoIncomingCapturedVideoFrameOnIOThread(
scoped_ptr<base::DictionaryValue> metadata(new base::DictionaryValue());
frame->metadata()->MergeInternalValuesInto(metadata.get());
- DCHECK(
- (frame->IsMappable() && frame->format() == media::PIXEL_FORMAT_I420) ||
- (frame->HasTextures() && frame->format() == media::PIXEL_FORMAT_ARGB))
- << "Format and/or storage type combination not supported (received: "
- << media::VideoPixelFormatToString(frame->format()) << ")";
+ // Only I420 pixel format is currently supported.
+ DCHECK_EQ(frame->format(), media::PIXEL_FORMAT_I420)
+ << "Unsupported pixel format: "
+ << media::VideoPixelFormatToString(frame->format());
+
+ // Sanity-checks to confirm |frame| is actually being backed by |buffer|.
+ DCHECK(frame->storage_type() == media::VideoFrame::STORAGE_SHMEM ||
+ (frame->storage_type() ==
+ media::VideoFrame::STORAGE_GPU_MEMORY_BUFFERS));
+ DCHECK(frame->data(media::VideoFrame::kYPlane) >= buffer->data(0) &&
+ (frame->data(media::VideoFrame::kYPlane) <
+ (reinterpret_cast<const uint8*>(buffer->data(0)) +
+ buffer->mapped_size())))
+ << "VideoFrame does not appear to be backed by Buffer";
for (const auto& client : controller_clients_) {
if (client->session_closed || client->paused)
@@ -428,31 +450,32 @@ void VideoCaptureController::DoNewBufferOnIOThread(
DCHECK_CURRENTLY_ON(BrowserThread::IO);
const int buffer_id = buffer->id();
- if (frame->HasTextures()) {
- DCHECK_EQ(frame->format(), media::PIXEL_FORMAT_ARGB);
- DCHECK(frame->coded_size() == frame->visible_rect().size())
- << "Textures shouldn't be crop-marked or letterboxed.";
- return;
- }
-
- DCHECK_EQ(frame->format(), media::PIXEL_FORMAT_I420);
- if (frame->storage_type() == media::VideoFrame::STORAGE_GPU_MEMORY_BUFFERS) {
- std::vector<gfx::GpuMemoryBufferHandle> handles;
- for (size_t i = 0; i < media::VideoFrame::NumPlanes(frame->format()); ++i) {
- gfx::GpuMemoryBufferHandle remote_handle;
- buffer_pool_->ShareToProcess2(buffer_id, i, client->render_process_handle,
- &remote_handle);
- handles.push_back(remote_handle);
+ switch (frame->storage_type()) {
+ case media::VideoFrame::STORAGE_GPU_MEMORY_BUFFERS: {
+ std::vector<gfx::GpuMemoryBufferHandle> handles;
+ const size_t num_planes = media::VideoFrame::NumPlanes(frame->format());
+ for (size_t i = 0; i < num_planes; ++i) {
+ gfx::GpuMemoryBufferHandle remote_handle;
+ buffer_pool_->ShareToProcess2(
+ buffer_id, i, client->render_process_handle, &remote_handle);
+ handles.push_back(remote_handle);
+ }
+ client->event_handler->OnBufferCreated2(client->controller_id, handles,
+ buffer->dimensions(), buffer_id);
+ break;
+ }
+ case media::VideoFrame::STORAGE_SHMEM: {
+ base::SharedMemoryHandle remote_handle;
+ buffer_pool_->ShareToProcess(buffer_id, client->render_process_handle,
+ &remote_handle);
+ client->event_handler->OnBufferCreated(
+ client->controller_id, remote_handle, buffer->mapped_size(),
+ buffer_id);
+ break;
}
- client->event_handler->OnBufferCreated2(client->controller_id, handles,
- buffer->dimensions(), buffer_id);
- } else {
- base::SharedMemoryHandle remote_handle;
- buffer_pool_->ShareToProcess(buffer_id, client->render_process_handle,
- &remote_handle);
-
- client->event_handler->OnBufferCreated(client->controller_id, remote_handle,
- buffer->mapped_size(), buffer_id);
+ default:
+ NOTREACHED();
+ break;
}
}
diff --git a/content/browser/renderer_host/media/video_capture_controller.h b/content/browser/renderer_host/media/video_capture_controller.h
index b51bd25..9f97ad3 100644
--- a/content/browser/renderer_host/media/video_capture_controller.h
+++ b/content/browser/renderer_host/media/video_capture_controller.h
@@ -107,7 +107,7 @@ class CONTENT_EXPORT VideoCaptureController {
// Return a buffer with id |buffer_id| previously given in
// VideoCaptureControllerEventHandler::OnBufferReady. In the case that the
- // buffer was backed by a texture, |sync_point| will be waited on before
+ // buffer was backed by a texture, |sync_token| will be waited on before
// destroying or recycling the texture, to synchronize with texture users in
// the renderer process. If the consumer provided resource utilization
// feedback, this will be passed here (-1.0 indicates no feedback).
diff --git a/content/browser/renderer_host/media/video_capture_controller_unittest.cc b/content/browser/renderer_host/media/video_capture_controller_unittest.cc
index 27eb172..9af6feea 100644
--- a/content/browser/renderer_host/media/video_capture_controller_unittest.cc
+++ b/content/browser/renderer_host/media/video_capture_controller_unittest.cc
@@ -18,20 +18,14 @@
#include "content/browser/renderer_host/media/video_capture_controller.h"
#include "content/browser/renderer_host/media/video_capture_controller_event_handler.h"
#include "content/browser/renderer_host/media/video_capture_manager.h"
-#include "content/common/gpu/client/gl_helper.h"
#include "content/common/media/media_stream_options.h"
#include "content/public/test/test_browser_thread_bundle.h"
-#include "gpu/command_buffer/common/mailbox_holder.h"
#include "media/base/video_capture_types.h"
#include "media/base/video_frame_metadata.h"
#include "media/base/video_util.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
-#if !defined(OS_ANDROID)
-#include "content/browser/compositor/test/no_transport_image_transport_factory.h"
-#endif
-
using ::testing::_;
using ::testing::InSequence;
using ::testing::Mock;
@@ -55,8 +49,6 @@ class MockVideoCaptureControllerEventHandler
MOCK_METHOD1(DoBufferDestroyed, void(VideoCaptureControllerID));
MOCK_METHOD2(DoI420BufferReady,
void(VideoCaptureControllerID, const gfx::Size&));
- MOCK_METHOD2(DoTextureBufferReady,
- void(VideoCaptureControllerID, const gfx::Size&));
MOCK_METHOD1(DoEnded, void(VideoCaptureControllerID));
MOCK_METHOD1(DoError, void(VideoCaptureControllerID));
@@ -82,7 +74,6 @@ class MockVideoCaptureControllerEventHandler
int buffer_id,
const scoped_refptr<media::VideoFrame>& frame,
const base::TimeTicks& timestamp) override {
- if (!frame->HasTextures()) {
EXPECT_EQ(frame->format(), media::PIXEL_FORMAT_I420);
DoI420BufferReady(id, frame->coded_size());
base::ThreadTaskRunnerHandle::Get()->PostTask(
@@ -90,15 +81,6 @@ class MockVideoCaptureControllerEventHandler
base::Bind(&VideoCaptureController::ReturnBuffer,
base::Unretained(controller_), id, this, buffer_id,
gpu::SyncToken(), resource_utilization_));
- } else {
- EXPECT_EQ(frame->format(), media::PIXEL_FORMAT_ARGB);
- DoTextureBufferReady(id, frame->coded_size());
- base::ThreadTaskRunnerHandle::Get()->PostTask(
- FROM_HERE, base::Bind(&VideoCaptureController::ReturnBuffer,
- base::Unretained(controller_), id, this,
- buffer_id, frame->mailbox_holder(0).sync_token,
- resource_utilization_));
- }
}
void OnEnded(VideoCaptureControllerID id) override {
DoEnded(id);
@@ -135,20 +117,11 @@ class VideoCaptureControllerTest : public testing::Test {
scoped_refptr<media::VideoFrame> WrapI420Buffer(gfx::Size dimensions,
uint8* data) {
- return media::VideoFrame::WrapExternalData(
+ return media::VideoFrame::WrapExternalSharedMemory(
media::PIXEL_FORMAT_I420, dimensions, gfx::Rect(dimensions), dimensions,
data,
media::VideoFrame::AllocationSize(media::PIXEL_FORMAT_I420, dimensions),
- base::TimeDelta());
- }
-
- scoped_refptr<media::VideoFrame> WrapMailboxBuffer(
- const gpu::MailboxHolder& holder,
- const media::VideoFrame::ReleaseMailboxCB& release_cb,
- gfx::Size dimensions) {
- return media::VideoFrame::WrapNativeTexture(
- media::PIXEL_FORMAT_ARGB, holder, release_cb, dimensions,
- gfx::Rect(dimensions), dimensions, base::TimeDelta());
+ base::SharedMemory::NULLHandle(), 0u, base::TimeDelta());
}
TestBrowserThreadBundle bundle_;
@@ -261,21 +234,10 @@ TEST_F(VideoCaptureControllerTest, AddAndRemoveClients) {
<< "Client count should return to zero after all clients are gone.";
}
-static void CacheSyncToken(gpu::SyncToken* called_release_sync_token,
- const gpu::SyncToken& release_sync_token) {
- *called_release_sync_token = release_sync_token;
-}
-
// This test will connect and disconnect several clients while simulating an
// active capture device being started and generating frames. It runs on one
// thread and is intended to behave deterministically.
TEST_F(VideoCaptureControllerTest, NormalCaptureMultipleClients) {
-// VideoCaptureController::ReturnBuffer() uses ImageTransportFactory.
-#if !defined(OS_ANDROID)
- ImageTransportFactory::InitializeForUnitTests(
- scoped_ptr<ImageTransportFactory>(new NoTransportImageTransportFactory));
-#endif
-
media::VideoCaptureParams session_100;
session_100.requested_format = media::VideoCaptureFormat(
gfx::Size(320, 240), 30, media::PIXEL_FORMAT_I420);
@@ -500,83 +462,6 @@ TEST_F(VideoCaptureControllerTest, NormalCaptureMultipleClients) {
base::RunLoop().RunUntilIdle();
Mock::VerifyAndClearExpectations(client_a_.get());
Mock::VerifyAndClearExpectations(client_b_.get());
-
- // Allocate all buffers from the buffer pool, half as SHM buffer and half as
- // mailbox buffers. Make sure of different counts though.
-#if defined(OS_ANDROID)
- int mailbox_buffers = 0;
-#else
- int mailbox_buffers = kPoolSize / 2;
-#endif
- int shm_buffers = kPoolSize - mailbox_buffers;
- if (shm_buffers == mailbox_buffers) {
- shm_buffers--;
- mailbox_buffers++;
- }
-
- for (int i = 0; i < shm_buffers; ++i) {
- scoped_ptr<media::VideoCaptureDevice::Client::Buffer> buffer =
- device_->ReserveOutputBuffer(capture_resolution,
- media::PIXEL_FORMAT_I420,
- media::PIXEL_STORAGE_CPU);
- ASSERT_TRUE(buffer.get());
- video_frame =
- WrapI420Buffer(capture_resolution, static_cast<uint8*>(buffer->data()));
- device_->OnIncomingCapturedVideoFrame(buffer.Pass(), video_frame,
- base::TimeTicks());
- }
- std::vector<gpu::SyncToken> mailbox_synctokens(mailbox_buffers);
- std::vector<gpu::SyncToken> release_synctokens(mailbox_buffers);
- for (int i = 0; i < mailbox_buffers; ++i) {
- scoped_ptr<media::VideoCaptureDevice::Client::Buffer> buffer =
- device_->ReserveOutputBuffer(capture_resolution,
- media::PIXEL_FORMAT_ARGB,
- media::PIXEL_STORAGE_TEXTURE);
- ASSERT_TRUE(buffer.get());
-#if !defined(OS_ANDROID)
- ImageTransportFactory::GetInstance()->GetGLHelper()->GenerateSyncToken(
- &mailbox_synctokens[i]);
-#endif
- device_->OnIncomingCapturedVideoFrame(
- buffer.Pass(),
- WrapMailboxBuffer(gpu::MailboxHolder(gpu::Mailbox::Generate(),
- mailbox_synctokens[i], 0),
- base::Bind(&CacheSyncToken, &release_synctokens[i]),
- capture_resolution),
- base::TimeTicks());
- }
- // ReserveOutputBuffers ought to fail now regardless of buffer format, because
- // the pool is depleted.
- ASSERT_FALSE(
- device_->ReserveOutputBuffer(capture_resolution,
- media::PIXEL_FORMAT_I420,
- media::PIXEL_STORAGE_CPU).get());
- ASSERT_FALSE(
- device_->ReserveOutputBuffer(capture_resolution,
- media::PIXEL_FORMAT_ARGB,
- media::PIXEL_STORAGE_TEXTURE).get());
- EXPECT_CALL(*client_b_,
- DoI420BufferReady(client_b_route_2, capture_resolution))
- .Times(shm_buffers);
- EXPECT_CALL(*client_b_,
- DoTextureBufferReady(client_b_route_2, capture_resolution))
- .Times(mailbox_buffers);
-#if !defined(OS_ANDROID)
- EXPECT_CALL(*client_b_, DoBufferDestroyed(client_b_route_2));
-#endif
- base::RunLoop().RunUntilIdle();
- for (size_t i = 0; i < mailbox_synctokens.size(); ++i) {
- // A new release sync point must be inserted when the video frame is
- // returned to the Browser process.
- // See: MockVideoCaptureControllerEventHandler::OnMailboxBufferReady() and
- // VideoCaptureController::ReturnBuffer()
- ASSERT_NE(mailbox_synctokens[i], release_synctokens[i]);
- }
- Mock::VerifyAndClearExpectations(client_b_.get());
-
-#if !defined(OS_ANDROID)
- ImageTransportFactory::Terminate();
-#endif
}
// Exercises the OnError() codepath of VideoCaptureController, and tests the
diff --git a/content/browser/renderer_host/media/video_capture_device_client.cc b/content/browser/renderer_host/media/video_capture_device_client.cc
index 0cd2d4f..c484b19 100644
--- a/content/browser/renderer_host/media/video_capture_device_client.cc
+++ b/content/browser/renderer_host/media/video_capture_device_client.cc
@@ -11,25 +11,14 @@
#include "base/location.h"
#include "base/strings/stringprintf.h"
#include "base/trace_event/trace_event.h"
-#include "content/browser/compositor/image_transport_factory.h"
-#include "content/browser/gpu/browser_gpu_channel_host_factory.h"
-#include "content/browser/gpu/browser_gpu_memory_buffer_manager.h"
-#include "content/browser/gpu/gpu_data_manager_impl.h"
#include "content/browser/renderer_host/media/video_capture_buffer_pool.h"
#include "content/browser/renderer_host/media/video_capture_controller.h"
#include "content/browser/renderer_host/media/video_capture_gpu_jpeg_decoder.h"
-#include "content/common/gpu/client/context_provider_command_buffer.h"
-#include "content/common/gpu/client/gl_helper.h"
-#include "content/common/gpu/client/gpu_channel_host.h"
-#include "content/common/gpu/client/webgraphicscontext3d_command_buffer_impl.h"
-#include "content/common/gpu/gpu_process_launch_causes.h"
#include "content/public/browser/browser_thread.h"
-#include "gpu/command_buffer/common/mailbox_holder.h"
#include "media/base/bind_to_current_loop.h"
#include "media/base/media_switches.h"
#include "media/base/video_capture_types.h"
#include "media/base/video_frame.h"
-#include "third_party/khronos/GLES2/gl2ext.h"
#include "third_party/libyuv/include/libyuv.h"
using media::VideoCaptureFormat;
@@ -321,10 +310,10 @@ VideoCaptureDeviceClient::ReserveOutputBuffer(
const gfx::Size& frame_size,
media::VideoPixelFormat pixel_format,
media::VideoPixelStorage pixel_storage) {
- DCHECK(pixel_format == media::PIXEL_FORMAT_I420 ||
- pixel_format == media::PIXEL_FORMAT_ARGB);
DCHECK_GT(frame_size.width(), 0);
DCHECK_GT(frame_size.height(), 0);
+ // Currently, only I420 pixel format is supported.
+ DCHECK_EQ(media::PIXEL_FORMAT_I420, pixel_format);
// TODO(mcasas): For PIXEL_STORAGE_GPUMEMORYBUFFER, find a way to indicate if
// it's a ShMem GMB or a DmaBuf GMB.
@@ -351,26 +340,32 @@ void VideoCaptureDeviceClient::OnIncomingCapturedBuffer(
scoped_ptr<Buffer> buffer,
const VideoCaptureFormat& frame_format,
const base::TimeTicks& timestamp) {
- DCHECK(frame_format.pixel_format == media::PIXEL_FORMAT_I420);
+ // Currently, only I420 pixel format is supported.
+ DCHECK_EQ(media::PIXEL_FORMAT_I420, frame_format.pixel_format);
+
scoped_refptr<VideoFrame> frame;
- if (frame_format.pixel_storage == media::PIXEL_STORAGE_GPUMEMORYBUFFER) {
- // Create a VideoFrame to set the correct storage_type and pixel_format.
- gfx::GpuMemoryBufferHandle handle;
- frame = VideoFrame::WrapExternalYuvGpuMemoryBuffers(
- media::PIXEL_FORMAT_I420, frame_format.frame_size,
- gfx::Rect(frame_format.frame_size), frame_format.frame_size, 0, 0, 0,
- reinterpret_cast<uint8*>(buffer->data(media::VideoFrame::kYPlane)),
- reinterpret_cast<uint8*>(buffer->data(media::VideoFrame::kUPlane)),
- reinterpret_cast<uint8*>(buffer->data(media::VideoFrame::kVPlane)),
- handle, handle, handle, base::TimeDelta());
- } else {
- frame = VideoFrame::WrapExternalData(
- media::PIXEL_FORMAT_I420, frame_format.frame_size,
- gfx::Rect(frame_format.frame_size), frame_format.frame_size,
- reinterpret_cast<uint8*>(buffer->data()),
- VideoFrame::AllocationSize(media::PIXEL_FORMAT_I420,
- frame_format.frame_size),
- base::TimeDelta());
+ switch (frame_format.pixel_storage) {
+ case media::PIXEL_STORAGE_GPUMEMORYBUFFER: {
+ // Create a VideoFrame to set the correct storage_type and pixel_format.
+ gfx::GpuMemoryBufferHandle handle;
+ frame = VideoFrame::WrapExternalYuvGpuMemoryBuffers(
+ media::PIXEL_FORMAT_I420, frame_format.frame_size,
+ gfx::Rect(frame_format.frame_size), frame_format.frame_size, 0, 0, 0,
+ reinterpret_cast<uint8*>(buffer->data(media::VideoFrame::kYPlane)),
+ reinterpret_cast<uint8*>(buffer->data(media::VideoFrame::kUPlane)),
+ reinterpret_cast<uint8*>(buffer->data(media::VideoFrame::kVPlane)),
+ handle, handle, handle, base::TimeDelta());
+ break;
+ }
+ case media::PIXEL_STORAGE_CPU:
+ frame = VideoFrame::WrapExternalSharedMemory(
+ media::PIXEL_FORMAT_I420, frame_format.frame_size,
+ gfx::Rect(frame_format.frame_size), frame_format.frame_size,
+ reinterpret_cast<uint8*>(buffer->data()),
+ VideoFrame::AllocationSize(media::PIXEL_FORMAT_I420,
+ frame_format.frame_size),
+ base::SharedMemory::NULLHandle(), 0u, base::TimeDelta());
+ break;
}
DCHECK(frame.get());
frame->metadata()->SetDouble(media::VideoFrameMetadata::FRAME_RATE,
@@ -438,24 +433,31 @@ VideoCaptureDeviceClient::ReserveI420OutputBuffer(
if (!buffer)
return scoped_ptr<Buffer>();
- if (storage == media::PIXEL_STORAGE_CPU) {
- // TODO(emircan): See http://crbug.com/521068, move this pointer arithmetic
- // inside Buffer::data() when this bug is resolved.
- *y_plane_data = reinterpret_cast<uint8*>(buffer->data());
- *u_plane_data =
- *y_plane_data +
- VideoFrame::PlaneSize(format, VideoFrame::kYPlane, dimensions)
- .GetArea();
- *v_plane_data =
- *u_plane_data +
- VideoFrame::PlaneSize(format, VideoFrame::kUPlane, dimensions)
- .GetArea();
- } else if (storage == media::PIXEL_STORAGE_GPUMEMORYBUFFER) {
- *y_plane_data = reinterpret_cast<uint8*>(buffer->data(VideoFrame::kYPlane));
- *u_plane_data = reinterpret_cast<uint8*>(buffer->data(VideoFrame::kUPlane));
- *v_plane_data = reinterpret_cast<uint8*>(buffer->data(VideoFrame::kVPlane));
+ switch (storage) {
+ case media::PIXEL_STORAGE_CPU:
+ // TODO(emircan): See http://crbug.com/521068, move this pointer
+ // arithmetic inside Buffer::data() when this bug is resolved.
+ *y_plane_data = reinterpret_cast<uint8*>(buffer->data());
+ *u_plane_data =
+ *y_plane_data +
+ VideoFrame::PlaneSize(format, VideoFrame::kYPlane, dimensions)
+ .GetArea();
+ *v_plane_data =
+ *u_plane_data +
+ VideoFrame::PlaneSize(format, VideoFrame::kUPlane, dimensions)
+ .GetArea();
+ return buffer.Pass();
+ case media::PIXEL_STORAGE_GPUMEMORYBUFFER:
+ *y_plane_data =
+ reinterpret_cast<uint8*>(buffer->data(VideoFrame::kYPlane));
+ *u_plane_data =
+ reinterpret_cast<uint8*>(buffer->data(VideoFrame::kUPlane));
+ *v_plane_data =
+ reinterpret_cast<uint8*>(buffer->data(VideoFrame::kVPlane));
+ return buffer.Pass();
}
- return buffer.Pass();
+ NOTREACHED();
+ return scoped_ptr<Buffer>();
}
} // namespace content
diff --git a/content/browser/renderer_host/media/video_capture_gpu_jpeg_decoder.h b/content/browser/renderer_host/media/video_capture_gpu_jpeg_decoder.h
index c7040ce..f87c288b 100644
--- a/content/browser/renderer_host/media/video_capture_gpu_jpeg_decoder.h
+++ b/content/browser/renderer_host/media/video_capture_gpu_jpeg_decoder.h
@@ -53,7 +53,7 @@ class CONTENT_EXPORT VideoCaptureGpuJpegDecoder
// |decode_done_cb| is called on the IO thread when decode succeed. This can
// be on any thread. |decode_done_cb| is never called after
// VideoCaptureGpuJpegDecoder is destroyed.
- VideoCaptureGpuJpegDecoder(const DecodeDoneCB& decode_done_cb);
+ explicit VideoCaptureGpuJpegDecoder(const DecodeDoneCB& decode_done_cb);
~VideoCaptureGpuJpegDecoder() override;
// Creates and intializes decoder asynchronously.
diff --git a/content/browser/renderer_host/media/video_capture_host.cc b/content/browser/renderer_host/media/video_capture_host.cc
index f07dfbc..ed3fea6 100644
--- a/content/browser/renderer_host/media/video_capture_host.cc
+++ b/content/browser/renderer_host/media/video_capture_host.cc
@@ -105,11 +105,6 @@ void VideoCaptureHost::OnBufferReady(
params.storage_type = video_frame->storage_type();
params.coded_size = video_frame->coded_size();
params.visible_rect = video_frame->visible_rect();
- if (video_frame->HasTextures()) {
- DCHECK_EQ(1u, media::VideoFrame::NumPlanes(video_frame->format()))
- << "Only single planar textures are supported";
- params.mailbox_holder = video_frame->mailbox_holder(0);
- }
Send(new VideoCaptureMsg_BufferReady(params));
}
diff --git a/content/common/media/video_capture_messages.h b/content/common/media/video_capture_messages.h
index 3c89883..be1a179 100644
--- a/content/common/media/video_capture_messages.h
+++ b/content/common/media/video_capture_messages.h
@@ -6,7 +6,6 @@
#include "content/common/content_export.h"
#include "content/common/media/video_capture.h"
#include "content/public/common/common_param_traits.h"
-#include "gpu/command_buffer/common/mailbox_holder.h"
#include "ipc/ipc_message_macros.h"
#include "media/base/video_capture_types.h"
#include "media/base/video_frame.h"
@@ -42,7 +41,6 @@ IPC_STRUCT_BEGIN(VideoCaptureMsg_BufferReady_Params)
IPC_STRUCT_MEMBER(media::VideoFrame::StorageType, storage_type)
IPC_STRUCT_MEMBER(gfx::Size, coded_size)
IPC_STRUCT_MEMBER(gfx::Rect, visible_rect)
- IPC_STRUCT_MEMBER(gpu::MailboxHolder, mailbox_holder)
IPC_STRUCT_END()
// TODO(nick): device_id in these messages is basically just a route_id. We
diff --git a/content/renderer/media/video_capture_impl.cc b/content/renderer/media/video_capture_impl.cc
index 1e99fd8..f9e13b6 100644
--- a/content/renderer/media/video_capture_impl.cc
+++ b/content/renderer/media/video_capture_impl.cc
@@ -23,21 +23,6 @@
namespace content {
-namespace {
-
-// This is called on an unknown thread when the VideoFrame destructor executes.
-// As of this writing, this callback mechanism is the only interface in
-// VideoFrame to provide the final value for |release_sync_token|.
-// VideoCaptureImpl::DidFinishConsumingFrame() will read the value saved here,
-// and pass it back to the IO thread to pass back to the host via the
-// BufferReady IPC.
-void SaveReleaseSyncToken(gpu::SyncToken* sync_token_storage,
- const gpu::SyncToken& release_sync_token) {
- *sync_token_storage = release_sync_token;
-}
-
-} // namespace
-
// A holder of a memory-backed buffer and accessors to it.
class VideoCaptureImpl::ClientBuffer
: public base::RefCountedThreadSafe<ClientBuffer> {
@@ -312,10 +297,17 @@ void VideoCaptureImpl::OnBufferReceived(
media::VideoPixelFormat pixel_format,
media::VideoFrame::StorageType storage_type,
const gfx::Size& coded_size,
- const gfx::Rect& visible_rect,
- const gpu::MailboxHolder& mailbox_holder) {
+ const gfx::Rect& visible_rect) {
DCHECK(io_task_runner_->BelongsToCurrentThread());
- if (state_ != VIDEO_CAPTURE_STATE_STARTED || suspended_) {
+ if (state_ != VIDEO_CAPTURE_STATE_STARTED || suspended_ ||
+ pixel_format != media::PIXEL_FORMAT_I420 ||
+ (storage_type != media::VideoFrame::STORAGE_SHMEM &&
+ storage_type != media::VideoFrame::STORAGE_GPU_MEMORY_BUFFERS)) {
+ // Crash in debug builds since the host should not have provided a buffer
+ // with an unsupported pixel format or storage type.
+ DCHECK_EQ(media::PIXEL_FORMAT_I420, pixel_format);
+ DCHECK(storage_type == media::VideoFrame::STORAGE_SHMEM ||
+ storage_type == media::VideoFrame::STORAGE_GPU_MEMORY_BUFFERS);
Send(new VideoCaptureHostMsg_BufferReady(device_id_, buffer_id,
gpu::SyncToken(), -1.0));
return;
@@ -332,52 +324,36 @@ void VideoCaptureImpl::OnBufferReceived(
scoped_refptr<media::VideoFrame> frame;
BufferFinishedCallback buffer_finished_callback;
scoped_ptr<gpu::SyncToken> release_sync_token(new gpu::SyncToken);
- if (storage_type == media::VideoFrame::STORAGE_GPU_MEMORY_BUFFERS) {
- DCHECK_EQ(media::PIXEL_FORMAT_I420, pixel_format);
- const auto& iter = client_buffer2s_.find(buffer_id);
- DCHECK(iter != client_buffer2s_.end());
- scoped_refptr<ClientBuffer2> buffer = iter->second;
- const auto& handles = buffer->gpu_memory_buffer_handles();
- frame = media::VideoFrame::WrapExternalYuvGpuMemoryBuffers(
- media::PIXEL_FORMAT_I420,
- coded_size,
- gfx::Rect(coded_size),
- coded_size,
- buffer->stride(media::VideoFrame::kYPlane),
- buffer->stride(media::VideoFrame::kUPlane),
- buffer->stride(media::VideoFrame::kVPlane),
- buffer->data(media::VideoFrame::kYPlane),
- buffer->data(media::VideoFrame::kUPlane),
- buffer->data(media::VideoFrame::kVPlane),
- handles[media::VideoFrame::kYPlane],
- handles[media::VideoFrame::kUPlane],
- handles[media::VideoFrame::kVPlane],
- timestamp - first_frame_timestamp_);
- DCHECK(frame);
- buffer_finished_callback = media::BindToCurrentLoop(
- base::Bind(&VideoCaptureImpl::OnClientBufferFinished2,
- weak_factory_.GetWeakPtr(), buffer_id, buffer));
- } else {
- scoped_refptr<ClientBuffer> buffer;
- if (storage_type == media::VideoFrame::STORAGE_OPAQUE) {
- DCHECK(mailbox_holder.mailbox.Verify());
- DCHECK_EQ(media::PIXEL_FORMAT_ARGB, pixel_format);
- frame = media::VideoFrame::WrapNativeTexture(
- pixel_format,
- mailbox_holder,
- base::Bind(&SaveReleaseSyncToken, release_sync_token.get()),
+ switch (storage_type) {
+ case media::VideoFrame::STORAGE_GPU_MEMORY_BUFFERS: {
+ const auto& iter = client_buffer2s_.find(buffer_id);
+ DCHECK(iter != client_buffer2s_.end());
+ scoped_refptr<ClientBuffer2> buffer = iter->second;
+ const auto& handles = buffer->gpu_memory_buffer_handles();
+ frame = media::VideoFrame::WrapExternalYuvGpuMemoryBuffers(
+ media::PIXEL_FORMAT_I420,
coded_size,
gfx::Rect(coded_size),
coded_size,
+ buffer->stride(media::VideoFrame::kYPlane),
+ buffer->stride(media::VideoFrame::kUPlane),
+ buffer->stride(media::VideoFrame::kVPlane),
+ buffer->data(media::VideoFrame::kYPlane),
+ buffer->data(media::VideoFrame::kUPlane),
+ buffer->data(media::VideoFrame::kVPlane),
+ handles[media::VideoFrame::kYPlane],
+ handles[media::VideoFrame::kUPlane],
+ handles[media::VideoFrame::kVPlane],
timestamp - first_frame_timestamp_);
+ buffer_finished_callback = media::BindToCurrentLoop(
+ base::Bind(&VideoCaptureImpl::OnClientBufferFinished2,
+ weak_factory_.GetWeakPtr(), buffer_id, buffer));
+ break;
}
- else {
- DCHECK(storage_type == media::VideoFrame::STORAGE_UNOWNED_MEMORY ||
- storage_type == media::VideoFrame::STORAGE_SHMEM);
- DCHECK_EQ(media::PIXEL_FORMAT_I420, pixel_format);
+ case media::VideoFrame::STORAGE_SHMEM: {
const auto& iter = client_buffers_.find(buffer_id);
DCHECK(iter != client_buffers_.end());
- buffer = iter->second;
+ const scoped_refptr<ClientBuffer> buffer = iter->second;
frame = media::VideoFrame::WrapExternalSharedMemory(
pixel_format,
coded_size,
@@ -389,12 +365,17 @@ void VideoCaptureImpl::OnBufferReceived(
buffer->buffer()->handle(),
0 /* shared_memory_offset */,
timestamp - first_frame_timestamp_);
+ buffer_finished_callback = media::BindToCurrentLoop(
+ base::Bind(&VideoCaptureImpl::OnClientBufferFinished,
+ weak_factory_.GetWeakPtr(), buffer_id, buffer));
+ break;
}
- DCHECK(frame);
- buffer_finished_callback = media::BindToCurrentLoop(
- base::Bind(&VideoCaptureImpl::OnClientBufferFinished,
- weak_factory_.GetWeakPtr(), buffer_id, buffer));
+ default:
+ NOTREACHED();
+ break;
}
+ DCHECK(frame);
+
frame->metadata()->SetTimeTicks(media::VideoFrameMetadata::REFERENCE_TIME,
timestamp);
frame->AddDestructionObserver(
diff --git a/content/renderer/media/video_capture_impl.h b/content/renderer/media/video_capture_impl.h
index 0563117..52c2eb3 100644
--- a/content/renderer/media/video_capture_impl.h
+++ b/content/renderer/media/video_capture_impl.h
@@ -20,10 +20,6 @@ namespace base {
class SingleThreadTaskRunner;
} // namespace base
-namespace gpu {
-struct MailboxHolder;
-} // namespace gpu
-
namespace media {
class VideoFrame;
} // namespace media
@@ -123,8 +119,7 @@ class CONTENT_EXPORT VideoCaptureImpl
media::VideoPixelFormat pixel_format,
media::VideoFrame::StorageType storage_type,
const gfx::Size& coded_size,
- const gfx::Rect& visible_rect,
- const gpu::MailboxHolder& mailbox_holder) override;
+ const gfx::Rect& visible_rect) override;
void OnStateChanged(VideoCaptureState state) override;
void OnDeviceSupportedFormatsEnumerated(
const media::VideoCaptureFormats& supported_formats) override;
diff --git a/content/renderer/media/video_capture_impl_unittest.cc b/content/renderer/media/video_capture_impl_unittest.cc
index 261aa5b..80bca69 100644
--- a/content/renderer/media/video_capture_impl_unittest.cc
+++ b/content/renderer/media/video_capture_impl_unittest.cc
@@ -33,26 +33,7 @@ class MockVideoCaptureMessageFilter : public VideoCaptureMessageFilter {
DISALLOW_COPY_AND_ASSIGN(MockVideoCaptureMessageFilter);
};
-struct BufferReceivedTestArg {
- BufferReceivedTestArg(media::VideoPixelFormat pixel_format,
- const gpu::MailboxHolder& holder)
- : pixel_format(pixel_format), mailbox_holder(holder) {}
-
- BufferReceivedTestArg(media::VideoPixelFormat pixel_format)
- : pixel_format(pixel_format) {}
-
- media::VideoPixelFormat pixel_format;
- gpu::MailboxHolder mailbox_holder;
-};
-
-static const BufferReceivedTestArg kBufferFormats[] = {
- BufferReceivedTestArg(media::PIXEL_FORMAT_I420),
- BufferReceivedTestArg(
- media::PIXEL_FORMAT_ARGB,
- gpu::MailboxHolder(gpu::Mailbox::Generate(), gpu::SyncToken(), 0))};
-
-class VideoCaptureImplTest
- : public ::testing::TestWithParam<BufferReceivedTestArg> {
+class VideoCaptureImplTest : public ::testing::Test {
public:
class MockVideoCaptureImpl : public VideoCaptureImpl {
public:
@@ -183,16 +164,11 @@ class VideoCaptureImplTest
shm.mapped_size(), buffer_id);
}
- void BufferReceived(int buffer_id, const gfx::Size& size,
- media::VideoPixelFormat pixel_format,
- const gpu::MailboxHolder& mailbox_holder) {
- const media::VideoFrame::StorageType storage_type =
- pixel_format != media::PIXEL_FORMAT_ARGB
- ? media::VideoFrame::STORAGE_UNOWNED_MEMORY
- : media::VideoFrame::STORAGE_OPAQUE;
+ void BufferReceived(int buffer_id, const gfx::Size& size) {
video_capture_impl_->OnBufferReceived(
buffer_id, base::TimeTicks::Now(), base::DictionaryValue(),
- pixel_format, storage_type, size, gfx::Rect(size), mailbox_holder);
+ media::PIXEL_FORMAT_I420, media::VideoFrame::STORAGE_SHMEM, size,
+ gfx::Rect(size));
}
void BufferDestroyed(int buffer_id) {
@@ -309,37 +285,32 @@ TEST_F(VideoCaptureImplTest, GetDeviceFormatsInUse) {
DeInit();
}
-TEST_P(VideoCaptureImplTest, BufferReceivedWithFormat) {
+TEST_F(VideoCaptureImplTest, BufferReceived) {
EXPECT_CALL(*this, OnStateUpdate(VIDEO_CAPTURE_STATE_STARTED)).Times(1);
EXPECT_CALL(*this, OnStateUpdate(VIDEO_CAPTURE_STATE_STOPPED)).Times(1);
EXPECT_CALL(*this, OnFrameReady(_, _)).Times(1);
- const BufferReceivedTestArg& buffer_arg = GetParam();
const gfx::Size size(1280, 720);
// Create a fake shared memory for buffer.
base::SharedMemory shm;
const size_t frame_size = media::VideoFrame::AllocationSize(
- buffer_arg.pixel_format, size);
+ media::PIXEL_FORMAT_I420, size);
ASSERT_TRUE(shm.CreateAndMapAnonymous(frame_size));
media::VideoCaptureParams params;
params.requested_format = media::VideoCaptureFormat(
- size, 30, buffer_arg.pixel_format);
+ size, 30, media::PIXEL_FORMAT_I420);
Init();
StartCapture(0, params);
NewBuffer(0, shm);
- BufferReceived(0, size, buffer_arg.pixel_format, buffer_arg.mailbox_holder);
+ BufferReceived(0, size);
StopCapture(0);
BufferDestroyed(0);
DeInit();
}
-INSTANTIATE_TEST_CASE_P(I420AndARGB,
- VideoCaptureImplTest,
- testing::ValuesIn(kBufferFormats));
-
TEST_F(VideoCaptureImplTest, BufferReceivedAfterStop) {
EXPECT_CALL(*this, OnStateUpdate(VIDEO_CAPTURE_STATE_STARTED)).Times(1);
EXPECT_CALL(*this, OnStateUpdate(VIDEO_CAPTURE_STATE_STOPPED)).Times(1);
@@ -355,8 +326,7 @@ TEST_F(VideoCaptureImplTest, BufferReceivedAfterStop) {
StartCapture(0, params_large_);
NewBuffer(0, shm);
StopCapture(0);
- BufferReceived(0, params_large_.requested_format.frame_size,
- media::PIXEL_FORMAT_I420, gpu::MailboxHolder());
+ BufferReceived(0, params_large_.requested_format.frame_size);
BufferDestroyed(0);
DeInit();
diff --git a/content/renderer/media/video_capture_message_filter.cc b/content/renderer/media/video_capture_message_filter.cc
index a75e369..14afb71 100644
--- a/content/renderer/media/video_capture_message_filter.cc
+++ b/content/renderer/media/video_capture_message_filter.cc
@@ -159,8 +159,7 @@ void VideoCaptureMessageFilter::OnBufferReceived(
params.pixel_format,
params.storage_type,
params.coded_size,
- params.visible_rect,
- params.mailbox_holder);
+ params.visible_rect);
}
void VideoCaptureMessageFilter::OnBufferDestroyed(int device_id,
diff --git a/content/renderer/media/video_capture_message_filter.h b/content/renderer/media/video_capture_message_filter.h
index 2d6053e..c8cb872 100644
--- a/content/renderer/media/video_capture_message_filter.h
+++ b/content/renderer/media/video_capture_message_filter.h
@@ -23,10 +23,6 @@
struct VideoCaptureMsg_BufferReady_Params;
-namespace gpu {
-struct MailboxHolder;
-} // namespace gpu
-
namespace content {
class CONTENT_EXPORT VideoCaptureMessageFilter : public IPC::MessageFilter {
@@ -56,8 +52,7 @@ class CONTENT_EXPORT VideoCaptureMessageFilter : public IPC::MessageFilter {
media::VideoPixelFormat pixel_format,
media::VideoFrame::StorageType storage_type,
const gfx::Size& coded_size,
- const gfx::Rect& visible_rect,
- const gpu::MailboxHolder& mailbox_holder) = 0;
+ const gfx::Rect& visible_rect) = 0;
// Called when state of a video capture device has changed in the browser
// process.
diff --git a/content/renderer/media/video_capture_message_filter_unittest.cc b/content/renderer/media/video_capture_message_filter_unittest.cc
index c36f3d3..a3d4e90 100644
--- a/content/renderer/media/video_capture_message_filter_unittest.cc
+++ b/content/renderer/media/video_capture_message_filter_unittest.cc
@@ -38,15 +38,14 @@ class MockVideoCaptureDelegate : public VideoCaptureMessageFilter::Delegate {
const gfx::Size& size,
int buffer_id));
MOCK_METHOD1(OnBufferDestroyed, void(int buffer_id));
- MOCK_METHOD8(OnBufferReceived,
+ MOCK_METHOD7(OnBufferReceived,
void(int buffer_id,
base::TimeTicks timestamp,
const base::DictionaryValue& metadata,
media::VideoPixelFormat pixel_format,
media::VideoFrame::StorageType storage_type,
const gfx::Size& coded_size,
- const gfx::Rect& visible_rect,
- const gpu::MailboxHolder& mailbox_holder));
+ const gfx::Rect& visible_rect));
MOCK_METHOD1(OnStateChanged, void(VideoCaptureState state));
MOCK_METHOD1(OnDeviceSupportedFormatsEnumerated,
void(const media::VideoCaptureFormats& formats));
@@ -122,40 +121,15 @@ TEST(VideoCaptureMessageFilterTest, Basic) {
OnBufferReceived(params.buffer_id, params.timestamp, _,
media::PIXEL_FORMAT_I420,
media::VideoFrame::STORAGE_SHMEM,
- params.coded_size, params.visible_rect, _))
+ params.coded_size, params.visible_rect))
.WillRepeatedly(WithArg<2>(Invoke(&ExpectMetadataContainsFooBarBaz)));
filter->OnMessageReceived(VideoCaptureMsg_BufferReady(params));
Mock::VerifyAndClearExpectations(&delegate);
- // VideoCaptureMsg_BufferReady_Params with a Texture
- VideoCaptureMsg_BufferReady_Params params_m;
- params_m.device_id = delegate.device_id();
- params_m.buffer_id = 33;
- params_m.timestamp = base::TimeTicks::FromInternalValue(2);
- params_m.metadata.SetString("bar", "baz");
- params_m.pixel_format = media::PIXEL_FORMAT_ARGB;
- params_m.storage_type = media::VideoFrame::STORAGE_OPAQUE;
- params_m.coded_size = gfx::Size(345, 256);
- params_m.mailbox_holder =
- gpu::MailboxHolder(gpu::Mailbox::Generate(), gpu::SyncToken(), 44);
-
- gpu::MailboxHolder received_mailbox_holder;
- EXPECT_CALL(delegate, OnBufferReceived(params_m.buffer_id, params_m.timestamp,
- _, media::PIXEL_FORMAT_ARGB,
- media::VideoFrame::STORAGE_OPAQUE,
- params_m.coded_size, _, _))
- .WillRepeatedly(
- DoAll(SaveArg<7>(&received_mailbox_holder),
- WithArg<2>(Invoke(&ExpectMetadataContainsFooBarBaz))));
- filter->OnMessageReceived(VideoCaptureMsg_BufferReady(params_m));
- Mock::VerifyAndClearExpectations(&delegate);
- EXPECT_EQ(params_m.mailbox_holder.mailbox,
- received_mailbox_holder.mailbox);
-
// VideoCaptureMsg_FreeBuffer
- EXPECT_CALL(delegate, OnBufferDestroyed(params_m.buffer_id));
+ EXPECT_CALL(delegate, OnBufferDestroyed(params.buffer_id));
filter->OnMessageReceived(VideoCaptureMsg_FreeBuffer(
- delegate.device_id(), params_m.buffer_id));
+ delegate.device_id(), params.buffer_id));
Mock::VerifyAndClearExpectations(&delegate);
}
diff --git a/media/base/video_capture_types.cc b/media/base/video_capture_types.cc
index 0bc6ae6..f68eeb3 100644
--- a/media/base/video_capture_types.cc
+++ b/media/base/video_capture_types.cc
@@ -57,8 +57,10 @@ bool VideoCaptureFormat::IsValid() const {
(frame_size.GetArea() < media::limits::kMaxCanvas) &&
(frame_rate >= 0.0f) &&
(frame_rate < media::limits::kMaxFramesPerSecond) &&
- (pixel_storage != PIXEL_STORAGE_TEXTURE ||
- pixel_format == PIXEL_FORMAT_ARGB);
+ (pixel_format >= PIXEL_FORMAT_UNKNOWN &&
+ pixel_format <= PIXEL_FORMAT_MAX) &&
+ (pixel_storage == PIXEL_STORAGE_CPU ||
+ pixel_storage == PIXEL_STORAGE_GPUMEMORYBUFFER);
}
size_t VideoCaptureFormat::ImageAllocationSize() const {
@@ -80,8 +82,6 @@ std::string VideoCaptureFormat::PixelStorageToString(
switch (storage) {
case PIXEL_STORAGE_CPU:
return "CPU";
- case PIXEL_STORAGE_TEXTURE:
- return "TEXTURE";
case PIXEL_STORAGE_GPUMEMORYBUFFER:
return "GPUMEMORYBUFFER";
}
@@ -109,4 +109,12 @@ VideoCaptureParams::VideoCaptureParams()
: resolution_change_policy(RESOLUTION_POLICY_FIXED_RESOLUTION),
power_line_frequency(PowerLineFrequency::FREQUENCY_DEFAULT) {}
+bool VideoCaptureParams::IsValid() const {
+ return requested_format.IsValid() &&
+ resolution_change_policy >= RESOLUTION_POLICY_FIXED_RESOLUTION &&
+ resolution_change_policy <= RESOLUTION_POLICY_LAST &&
+ power_line_frequency >= PowerLineFrequency::FREQUENCY_DEFAULT &&
+ power_line_frequency <= PowerLineFrequency::FREQUENCY_MAX;
+}
+
} // namespace media
diff --git a/media/base/video_capture_types.h b/media/base/video_capture_types.h
index d6ad5e3..fa631a0 100644
--- a/media/base/video_capture_types.h
+++ b/media/base/video_capture_types.h
@@ -24,7 +24,6 @@ typedef int VideoCaptureSessionId;
// TODO(mcasas): http://crbug.com/504160 Consider making this an enum class.
enum VideoPixelStorage {
PIXEL_STORAGE_CPU,
- PIXEL_STORAGE_TEXTURE,
PIXEL_STORAGE_GPUMEMORYBUFFER,
PIXEL_STORAGE_MAX = PIXEL_STORAGE_GPUMEMORYBUFFER,
};
@@ -126,6 +125,10 @@ typedef std::vector<VideoCaptureFormat> VideoCaptureFormats;
struct MEDIA_EXPORT VideoCaptureParams {
VideoCaptureParams();
+ // Returns true if requested_format.IsValid() and all other values are within
+ // their expected ranges.
+ bool IsValid() const;
+
bool operator==(const VideoCaptureParams& other) const {
return requested_format == other.requested_format &&
resolution_change_policy == other.resolution_change_policy &&
diff --git a/media/base/video_frame.cc b/media/base/video_frame.cc
index c0ed3ca..3be20da 100644
--- a/media/base/video_frame.cc
+++ b/media/base/video_frame.cc
@@ -829,6 +829,8 @@ scoped_refptr<VideoFrame> VideoFrame::WrapExternalStorage(
size_t data_offset) {
DCHECK(IsStorageTypeMappable(storage_type));
+ // TODO(miu): This function should support any pixel format.
+ // http://crbug.com/555909
if (format != PIXEL_FORMAT_I420) {
DLOG(ERROR) << "Only PIXEL_FORMAT_I420 format supported: "
<< VideoPixelFormatToString(format);
@@ -852,6 +854,10 @@ scoped_refptr<VideoFrame> VideoFrame::WrapExternalStorage(
natural_size, timestamp);
}
frame->strides_[kYPlane] = coded_size.width();
+ // TODO(miu): This always rounds widths down, whereas VideoFrame::RowBytes()
+ // always rounds up. This inconsistency must be resolved. Perhaps a
+ // CommonAlignment() check should be made in IsValidConfig()?
+ // http://crbug.com/555909
frame->strides_[kUPlane] = coded_size.width() / 2;
frame->strides_[kVPlane] = coded_size.width() / 2;
frame->data_[kYPlane] = data;
diff --git a/media/capture/content/screen_capture_device_core.cc b/media/capture/content/screen_capture_device_core.cc
index d089eae..5ea0450 100644
--- a/media/capture/content/screen_capture_device_core.cc
+++ b/media/capture/content/screen_capture_device_core.cc
@@ -42,10 +42,8 @@ void ScreenCaptureDeviceCore::AllocateAndStart(
return;
}
- if (!(params.requested_format.pixel_format == PIXEL_FORMAT_I420 &&
- params.requested_format.pixel_storage == PIXEL_STORAGE_CPU) &&
- !(params.requested_format.pixel_format == PIXEL_FORMAT_ARGB &&
- params.requested_format.pixel_storage == PIXEL_STORAGE_TEXTURE)) {
+ if (params.requested_format.pixel_format != PIXEL_FORMAT_I420 ||
+ params.requested_format.pixel_storage != PIXEL_STORAGE_CPU) {
client->OnError(
FROM_HERE,
base::StringPrintf(
diff --git a/media/capture/content/thread_safe_capture_oracle.cc b/media/capture/content/thread_safe_capture_oracle.cc
index bda73de..0c62e69 100644
--- a/media/capture/content/thread_safe_capture_oracle.cc
+++ b/media/capture/content/thread_safe_capture_oracle.cc
@@ -6,6 +6,7 @@
#include "base/basictypes.h"
#include "base/bind.h"
+#include "base/bits.h"
#include "base/logging.h"
#include "base/memory/scoped_ptr.h"
#include "base/synchronization/lock.h"
@@ -63,17 +64,15 @@ bool ThreadSafeCaptureOracle::ObserveEventAndDecideCapture(
const bool should_capture =
oracle_.ObserveEventAndDecideCapture(event, damage_rect, event_time);
const gfx::Size visible_size = oracle_.capture_size();
- // Always round up the coded size to multiple of 16 pixels.
- // See http://crbug.com/402151.
- const gfx::Size coded_size((visible_size.width() + 15) & ~15,
- (visible_size.height() + 15) & ~15);
+ // TODO(miu): Clients should request exact padding, instead of this
+ // memory-wasting hack to make frames that are compatible with all HW
+ // encoders. http://crbug.com/555911
+ const gfx::Size coded_size(base::bits::Align(visible_size.width(), 16),
+ base::bits::Align(visible_size.height(), 16));
scoped_ptr<media::VideoCaptureDevice::Client::Buffer> output_buffer(
client_->ReserveOutputBuffer(coded_size,
- (params_.requested_format.pixel_storage !=
- media::PIXEL_STORAGE_TEXTURE)
- ? media::PIXEL_FORMAT_I420
- : media::PIXEL_FORMAT_ARGB,
+ params_.requested_format.pixel_format,
params_.requested_format.pixel_storage));
// Get the current buffer pool utilization and attenuate it: The utilization
// reported to the oracle is in terms of a maximum sustainable amount (not the
@@ -110,18 +109,18 @@ bool ThreadSafeCaptureOracle::ObserveEventAndDecideCapture(
TRACE_EVENT_SCOPE_THREAD, "trigger", event_name);
return false;
}
+
const int frame_number = oracle_.RecordCapture(attenuated_utilization);
TRACE_EVENT_ASYNC_BEGIN2("gpu.capture", "Capture", output_buffer.get(),
"frame_number", frame_number, "trigger", event_name);
- // Texture frames wrap a texture mailbox, which we don't have at the moment.
- // We do not construct those frames.
- if (params_.requested_format.pixel_storage != media::PIXEL_STORAGE_TEXTURE) {
- *storage = VideoFrame::WrapExternalData(
- media::PIXEL_FORMAT_I420, coded_size, gfx::Rect(visible_size),
- visible_size, static_cast<uint8*>(output_buffer->data()),
- output_buffer->mapped_size(), base::TimeDelta());
- DCHECK(*storage);
- }
+
+ DCHECK_EQ(media::PIXEL_STORAGE_CPU, params_.requested_format.pixel_storage);
+ *storage = VideoFrame::WrapExternalSharedMemory(
+ params_.requested_format.pixel_format, coded_size,
+ gfx::Rect(visible_size), visible_size,
+ static_cast<uint8*>(output_buffer->data()), output_buffer->mapped_size(),
+ base::SharedMemory::NULLHandle(), 0u, base::TimeDelta());
+ DCHECK(*storage);
*callback =
base::Bind(&ThreadSafeCaptureOracle::DidCaptureFrame, this, frame_number,
base::Passed(&output_buffer), capture_begin_time,