summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--content/browser/media/media_internals_unittest.cc1
-rw-r--r--content/browser/renderer_host/media/video_capture_device_client.cc58
-rw-r--r--content/browser/renderer_host/media/video_capture_device_client.h9
-rw-r--r--content/browser/renderer_host/media/video_capture_device_client_unittest.cc63
-rw-r--r--media/BUILD.gn4
-rw-r--r--media/capture/video/fake_video_capture_device.cc31
-rw-r--r--media/capture/video/fake_video_capture_device.h10
-rw-r--r--media/capture/video/fake_video_capture_device_factory.cc8
-rw-r--r--media/capture/video/fake_video_capture_device_factory.h1
-rw-r--r--media/capture/video/fake_video_capture_device_unittest.cc25
-rw-r--r--media/capture/video/linux/v4l2_capture_delegate.cc214
-rw-r--r--media/capture/video/linux/v4l2_capture_delegate.h101
-rw-r--r--media/capture/video/linux/v4l2_capture_delegate_multi_plane.cc100
-rw-r--r--media/capture/video/linux/v4l2_capture_delegate_multi_plane.h64
-rw-r--r--media/capture/video/linux/v4l2_capture_delegate_single_plane.cc61
-rw-r--r--media/capture/video/linux/v4l2_capture_delegate_single_plane.h58
-rw-r--r--media/capture/video/linux/video_capture_device_factory_linux.cc50
-rw-r--r--media/capture/video/linux/video_capture_device_linux.cc5
-rw-r--r--media/capture/video/video_capture_device.cc2
-rw-r--r--media/capture/video/video_capture_device.h14
-rw-r--r--media/media.gyp4
21 files changed, 190 insertions, 693 deletions
diff --git a/content/browser/media/media_internals_unittest.cc b/content/browser/media/media_internals_unittest.cc
index 2ab0539..33d0cb8 100644
--- a/content/browser/media/media_internals_unittest.cc
+++ b/content/browser/media/media_internals_unittest.cc
@@ -122,7 +122,6 @@ TEST_F(MediaInternalsVideoCaptureDeviceTest,
CaptureApiTypeStringMap m;
#if defined(OS_LINUX)
m[VideoCaptureDeviceName::V4L2_SINGLE_PLANE] = "V4L2 SPLANE";
- m[VideoCaptureDeviceName::V4L2_MULTI_PLANE] = "V4L2 MPLANE";
#elif defined(OS_WIN)
m[VideoCaptureDeviceName::MEDIA_FOUNDATION] = "Media Foundation";
m[VideoCaptureDeviceName::DIRECT_SHOW] = "Direct Show";
diff --git a/content/browser/renderer_host/media/video_capture_device_client.cc b/content/browser/renderer_host/media/video_capture_device_client.cc
index b4d142e..03d64d2 100644
--- a/content/browser/renderer_host/media/video_capture_device_client.cc
+++ b/content/browser/renderer_host/media/video_capture_device_client.cc
@@ -188,10 +188,10 @@ void VideoCaptureDeviceClient::OnIncomingCapturedData(
NOTREACHED() << "RGB24 is only available in Linux and Windows platforms";
#endif
#if defined(OS_WIN)
- // TODO(wjia): Currently, for RGB24 on WIN, capture device always
- // passes in positive src_width and src_height. Remove this hardcoded
- // value when nagative src_height is supported. The negative src_height
- // indicates that vertical flipping is needed.
+ // TODO(wjia): Currently, for RGB24 on WIN, capture device always passes
+ // in positive src_width and src_height. Remove this hardcoded value when
+ // negative src_height is supported. The negative src_height indicates
+ // that vertical flipping is needed.
flip = true;
#endif
break;
@@ -256,56 +256,6 @@ void VideoCaptureDeviceClient::OnIncomingCapturedData(
OnIncomingCapturedBuffer(std::move(buffer), output_format, timestamp);
}
-void VideoCaptureDeviceClient::OnIncomingCapturedYuvData(
- const uint8_t* y_data,
- const uint8_t* u_data,
- const uint8_t* v_data,
- size_t y_stride,
- size_t u_stride,
- size_t v_stride,
- const VideoCaptureFormat& frame_format,
- int clockwise_rotation,
- const base::TimeTicks& timestamp) {
- TRACE_EVENT0("video", "VideoCaptureDeviceClient::OnIncomingCapturedYuvData");
- DCHECK_EQ(media::PIXEL_FORMAT_I420, frame_format.pixel_format);
- DCHECK_EQ(media::PIXEL_STORAGE_CPU, frame_format.pixel_storage);
- DCHECK_EQ(0, clockwise_rotation) << "Rotation not supported";
-
- uint8_t *y_plane_data, *u_plane_data, *v_plane_data;
- scoped_ptr<Buffer> buffer(ReserveI420OutputBuffer(
- frame_format.frame_size, frame_format.pixel_storage, &y_plane_data,
- &u_plane_data, &v_plane_data));
- if (!buffer.get())
- return;
-
- const size_t dst_y_stride =
- VideoFrame::RowBytes(VideoFrame::kYPlane, media::PIXEL_FORMAT_I420,
- frame_format.frame_size.width());
- const size_t dst_u_stride =
- VideoFrame::RowBytes(VideoFrame::kUPlane, media::PIXEL_FORMAT_I420,
- frame_format.frame_size.width());
- const size_t dst_v_stride =
- VideoFrame::RowBytes(VideoFrame::kVPlane, media::PIXEL_FORMAT_I420,
- frame_format.frame_size.width());
- DCHECK_GE(y_stride, dst_y_stride);
- DCHECK_GE(u_stride, dst_u_stride);
- DCHECK_GE(v_stride, dst_v_stride);
-
- if (libyuv::I420Copy(y_data, y_stride,
- u_data, u_stride,
- v_data, v_stride,
- y_plane_data, dst_y_stride,
- u_plane_data, dst_u_stride,
- v_plane_data, dst_v_stride,
- frame_format.frame_size.width(),
- frame_format.frame_size.height())) {
- DLOG(WARNING) << "Failed to copy buffer";
- return;
- }
-
- OnIncomingCapturedBuffer(std::move(buffer), frame_format, timestamp);
-};
-
scoped_ptr<media::VideoCaptureDevice::Client::Buffer>
VideoCaptureDeviceClient::ReserveOutputBuffer(
const gfx::Size& frame_size,
diff --git a/content/browser/renderer_host/media/video_capture_device_client.h b/content/browser/renderer_host/media/video_capture_device_client.h
index 8215f564..1b982a2 100644
--- a/content/browser/renderer_host/media/video_capture_device_client.h
+++ b/content/browser/renderer_host/media/video_capture_device_client.h
@@ -49,15 +49,6 @@ class CONTENT_EXPORT VideoCaptureDeviceClient
const media::VideoCaptureFormat& frame_format,
int rotation,
const base::TimeTicks& timestamp) override;
- void OnIncomingCapturedYuvData(const uint8_t* y_data,
- const uint8_t* u_data,
- const uint8_t* v_data,
- size_t y_stride,
- size_t u_stride,
- size_t v_stride,
- const media::VideoCaptureFormat& frame_format,
- int clockwise_rotation,
- const base::TimeTicks& timestamp) override;
scoped_ptr<Buffer> ReserveOutputBuffer(
const gfx::Size& dimensions,
media::VideoPixelFormat format,
diff --git a/content/browser/renderer_host/media/video_capture_device_client_unittest.cc b/content/browser/renderer_host/media/video_capture_device_client_unittest.cc
index 1463b26..e5847f1 100644
--- a/content/browser/renderer_host/media/video_capture_device_client_unittest.cc
+++ b/content/browser/renderer_host/media/video_capture_device_client_unittest.cc
@@ -5,6 +5,7 @@
#include <stddef.h>
#include "base/bind.h"
+#include "base/logging.h"
#include "base/macros.h"
#include "base/memory/scoped_ptr.h"
#include "base/run_loop.h"
@@ -79,6 +80,7 @@ TEST_F(VideoCaptureDeviceClientTest, Minimal) {
media::PIXEL_FORMAT_I420,
media::PIXEL_STORAGE_CPU);
DCHECK(device_client_.get());
+ EXPECT_CALL(*controller_, DoLogOnIOThread(_)).Times(1);
EXPECT_CALL(*controller_, MockDoIncomingCapturedVideoFrameOnIOThread(_))
.Times(1);
device_client_->OnIncomingCapturedData(data, kScratchpadSizeInBytes,
@@ -100,6 +102,7 @@ TEST_F(VideoCaptureDeviceClientTest, FailsSilentlyGivenInvalidFrameFormat) {
media::VideoPixelStorage::PIXEL_STORAGE_CPU);
DCHECK(device_client_.get());
// Expect the the call to fail silently inside the VideoCaptureDeviceClient.
+ EXPECT_CALL(*controller_, DoLogOnIOThread(_)).Times(1);
EXPECT_CALL(*controller_, MockDoIncomingCapturedVideoFrameOnIOThread(_))
.Times(0);
device_client_->OnIncomingCapturedData(data, kScratchpadSizeInBytes,
@@ -119,6 +122,7 @@ TEST_F(VideoCaptureDeviceClientTest, DropsFrameIfNoBuffer) {
media::PIXEL_STORAGE_CPU);
// We expect the second frame to be silently dropped, so these should
// only be called once despite the two frames.
+ EXPECT_CALL(*controller_, DoLogOnIOThread(_)).Times(1);
EXPECT_CALL(*controller_, MockDoIncomingCapturedVideoFrameOnIOThread(_))
.Times(1);
// Pass two frames. The second will be dropped.
@@ -132,45 +136,42 @@ TEST_F(VideoCaptureDeviceClientTest, DropsFrameIfNoBuffer) {
Mock::VerifyAndClearExpectations(controller_.get());
}
-// Tests that buffer-based capture API accepts all memory-backed pixel formats.
-TEST_F(VideoCaptureDeviceClientTest, DataCaptureInEachVideoFormatInSequence) {
+// Tests that buffer-based capture API accepts some memory-backed pixel formats.
+TEST_F(VideoCaptureDeviceClientTest, DataCaptureGoodPixelFormats) {
// The usual ReserveOutputBuffer() -> OnIncomingCapturedVideoFrame() cannot
// be used since it does not accept all pixel formats. The memory backed
// buffer OnIncomingCapturedData() is used instead, with a dummy scratchpad
// buffer.
const size_t kScratchpadSizeInBytes = 400;
unsigned char data[kScratchpadSizeInBytes] = {};
- const gfx::Size capture_resolution(10, 10);
- ASSERT_GE(kScratchpadSizeInBytes, capture_resolution.GetArea() * 4u)
+ const gfx::Size kCaptureResolution(10, 10);
+ ASSERT_GE(kScratchpadSizeInBytes, kCaptureResolution.GetArea() * 4u)
<< "Scratchpad is too small to hold the largest pixel format (ARGB).";
- for (int format = 0; format < media::PIXEL_FORMAT_MAX;
- ++format) {
- // Conversion from some formats are unsupported.
- if (format == media::PIXEL_FORMAT_UNKNOWN ||
- format == media::PIXEL_FORMAT_YV16 ||
- format == media::PIXEL_FORMAT_YV12A ||
- format == media::PIXEL_FORMAT_YV24 ||
- format == media::PIXEL_FORMAT_ARGB ||
- format == media::PIXEL_FORMAT_XRGB ||
- format == media::PIXEL_FORMAT_MJPEG ||
- format == media::PIXEL_FORMAT_MT21 ||
- format == media::PIXEL_FORMAT_YUV420P9 ||
- format == media::PIXEL_FORMAT_YUV420P10 ||
- format == media::PIXEL_FORMAT_YUV422P9 ||
- format == media::PIXEL_FORMAT_YUV422P10 ||
- format == media::PIXEL_FORMAT_YUV444P9 ||
- format == media::PIXEL_FORMAT_YUV444P10) {
- continue;
- }
-#if !defined(OS_LINUX) && !defined(OS_WIN)
- if (format == media::PIXEL_FORMAT_RGB24) {
- continue;
- }
+ media::VideoCaptureParams params;
+ params.requested_format = media::VideoCaptureFormat(
+ kCaptureResolution, 30.0f, media::PIXEL_FORMAT_UNKNOWN);
+
+ // Only use the VideoPixelFormats that we know supported. Do not add
+ // PIXEL_FORMAT_MJPEG since it would need a real JPEG header.
+ const media::VideoPixelFormat kSupportedFormats[] = {
+ media::PIXEL_FORMAT_I420,
+ media::PIXEL_FORMAT_YV12,
+ media::PIXEL_FORMAT_NV12,
+ media::PIXEL_FORMAT_NV21,
+ media::PIXEL_FORMAT_YUY2,
+ media::PIXEL_FORMAT_UYVY,
+#if defined(OS_WIN) || defined(OS_LINUX)
+ media::PIXEL_FORMAT_RGB24,
#endif
- media::VideoCaptureParams params;
- params.requested_format = media::VideoCaptureFormat(
- capture_resolution, 30.0f, media::VideoPixelFormat(format));
+ media::PIXEL_FORMAT_RGB32,
+ media::PIXEL_FORMAT_ARGB
+ };
+
+ for (media::VideoPixelFormat format : kSupportedFormats) {
+ params.requested_format.pixel_format = format;
+
+ EXPECT_CALL(*controller_, DoLogOnIOThread(_)).Times(1);
EXPECT_CALL(*controller_, MockDoIncomingCapturedVideoFrameOnIOThread(_))
.Times(1);
device_client_->OnIncomingCapturedData(
@@ -204,6 +205,8 @@ TEST_F(VideoCaptureDeviceClientTest, CheckRotationsAndCrops) {
const size_t kScratchpadSizeInBytes = 400;
unsigned char data[kScratchpadSizeInBytes] = {};
+ EXPECT_CALL(*controller_, DoLogOnIOThread(_)).Times(1);
+
media::VideoCaptureParams params;
for (const auto& size_and_rotation : kSizeAndRotations) {
ASSERT_GE(kScratchpadSizeInBytes,
diff --git a/media/BUILD.gn b/media/BUILD.gn
index b92755a..1ca0c08 100644
--- a/media/BUILD.gn
+++ b/media/BUILD.gn
@@ -125,10 +125,6 @@ component("media") {
"capture/video/file_video_capture_device_factory.h",
"capture/video/linux/v4l2_capture_delegate.cc",
"capture/video/linux/v4l2_capture_delegate.h",
- "capture/video/linux/v4l2_capture_delegate_multi_plane.cc",
- "capture/video/linux/v4l2_capture_delegate_multi_plane.h",
- "capture/video/linux/v4l2_capture_delegate_single_plane.cc",
- "capture/video/linux/v4l2_capture_delegate_single_plane.h",
"capture/video/linux/video_capture_device_chromeos.cc",
"capture/video/linux/video_capture_device_chromeos.h",
"capture/video/linux/video_capture_device_factory_linux.cc",
diff --git a/media/capture/video/fake_video_capture_device.cc b/media/capture/video/fake_video_capture_device.cc
index f4ef6ad..5b61f9c 100644
--- a/media/capture/video/fake_video_capture_device.cc
+++ b/media/capture/video/fake_video_capture_device.cc
@@ -73,10 +73,8 @@ void DrawPacman(bool use_argb,
}
FakeVideoCaptureDevice::FakeVideoCaptureDevice(BufferOwnership buffer_ownership,
- BufferPlanarity planarity,
float fake_capture_rate)
: buffer_ownership_(buffer_ownership),
- planarity_(planarity),
fake_capture_rate_(fake_capture_rate),
weak_factory_(this) {}
@@ -105,15 +103,9 @@ void FakeVideoCaptureDevice::AllocateAndStart(
capture_format_.frame_size.SetSize(320, 240);
if (buffer_ownership_ == BufferOwnership::CLIENT_BUFFERS) {
- if (planarity_ == BufferPlanarity::PACKED) {
- capture_format_.pixel_storage = PIXEL_STORAGE_CPU;
- capture_format_.pixel_format = PIXEL_FORMAT_ARGB;
- DVLOG(1) << "starting with client argb buffers";
- } else if (planarity_ == BufferPlanarity::TRIPLANAR) {
- capture_format_.pixel_storage = PIXEL_STORAGE_GPUMEMORYBUFFER;
- capture_format_.pixel_format = PIXEL_FORMAT_I420;
- DVLOG(1) << "starting with gmb I420 buffers";
- }
+ capture_format_.pixel_storage = PIXEL_STORAGE_CPU;
+ capture_format_.pixel_format = PIXEL_FORMAT_ARGB;
+ DVLOG(1) << "starting with client argb buffers";
} else if (buffer_ownership_ == BufferOwnership::OWN_BUFFERS) {
capture_format_.pixel_storage = PIXEL_STORAGE_CPU;
capture_format_.pixel_format = PIXEL_FORMAT_I420;
@@ -155,20 +147,9 @@ void FakeVideoCaptureDevice::CaptureUsingOwnBuffers(
fake_capture_rate_, capture_format_.frame_size);
// Give the captured frame to the client.
- if (planarity_ == BufferPlanarity::PACKED) {
- client_->OnIncomingCapturedData(fake_frame_.get(), frame_size,
- capture_format_, 0 /* rotation */,
- base::TimeTicks::Now());
- } else if (planarity_ == BufferPlanarity::TRIPLANAR) {
- client_->OnIncomingCapturedYuvData(
- fake_frame_.get(),
- fake_frame_.get() + capture_format_.frame_size.GetArea(),
- fake_frame_.get() + capture_format_.frame_size.GetArea() * 5 / 4,
- capture_format_.frame_size.width(),
- capture_format_.frame_size.width() / 2,
- capture_format_.frame_size.width() / 2, capture_format_,
- 0 /* rotation */, base::TimeTicks::Now());
- }
+ client_->OnIncomingCapturedData(fake_frame_.get(), frame_size,
+ capture_format_, 0 /* rotation */,
+ base::TimeTicks::Now());
BeepAndScheduleNextCapture(
expected_execution_time,
base::Bind(&FakeVideoCaptureDevice::CaptureUsingOwnBuffers,
diff --git a/media/capture/video/fake_video_capture_device.h b/media/capture/video/fake_video_capture_device.h
index f6cc44b..0112642 100644
--- a/media/capture/video/fake_video_capture_device.h
+++ b/media/capture/video/fake_video_capture_device.h
@@ -16,7 +16,6 @@
#include "base/macros.h"
#include "base/memory/scoped_ptr.h"
#include "base/memory/weak_ptr.h"
-#include "base/threading/thread.h"
#include "base/threading/thread_checker.h"
#include "base/time/time.h"
#include "media/capture/video/video_capture_device.h"
@@ -30,15 +29,7 @@ class MEDIA_EXPORT FakeVideoCaptureDevice : public VideoCaptureDevice {
CLIENT_BUFFERS,
};
- enum class BufferPlanarity {
- PACKED,
- TRIPLANAR,
- };
-
- FakeVideoCaptureDevice(BufferOwnership buffer_ownership,
- BufferPlanarity planarity);
FakeVideoCaptureDevice(BufferOwnership buffer_ownership,
- BufferPlanarity planarity,
float fake_capture_rate);
~FakeVideoCaptureDevice() override;
@@ -59,7 +50,6 @@ class MEDIA_EXPORT FakeVideoCaptureDevice : public VideoCaptureDevice {
base::ThreadChecker thread_checker_;
const BufferOwnership buffer_ownership_;
- const BufferPlanarity planarity_;
// Frame rate of the fake video device.
const float fake_capture_rate_;
diff --git a/media/capture/video/fake_video_capture_device_factory.cc b/media/capture/video/fake_video_capture_device_factory.cc
index e97f29b..e228a5d 100644
--- a/media/capture/video/fake_video_capture_device_factory.cc
+++ b/media/capture/video/fake_video_capture_device_factory.cc
@@ -24,7 +24,6 @@ static const float kFakeCaptureDefaultFrameRate = 20.0f;
FakeVideoCaptureDeviceFactory::FakeVideoCaptureDeviceFactory()
: number_of_devices_(1),
fake_vcd_ownership_(FakeVideoCaptureDevice::BufferOwnership::OWN_BUFFERS),
- fake_vcd_planarity_(FakeVideoCaptureDevice::BufferPlanarity::PACKED),
frame_rate_(kFakeCaptureDefaultFrameRate) {}
scoped_ptr<VideoCaptureDevice> FakeVideoCaptureDeviceFactory::Create(
@@ -36,8 +35,8 @@ scoped_ptr<VideoCaptureDevice> FakeVideoCaptureDeviceFactory::Create(
for (int n = 0; n < number_of_devices_; ++n) {
std::string possible_id = base::StringPrintf("/dev/video%d", n);
if (device_name.id().compare(possible_id) == 0) {
- return scoped_ptr<VideoCaptureDevice>(new FakeVideoCaptureDevice(
- fake_vcd_ownership_, fake_vcd_planarity_, frame_rate_));
+ return scoped_ptr<VideoCaptureDevice>(
+ new FakeVideoCaptureDevice(fake_vcd_ownership_, frame_rate_));
}
}
return scoped_ptr<VideoCaptureDevice>();
@@ -108,9 +107,6 @@ void FakeVideoCaptureDeviceFactory::parse_command_line() {
base::EqualsCaseInsensitiveASCII(param.back(), "client")) {
fake_vcd_ownership_ =
FakeVideoCaptureDevice::BufferOwnership::CLIENT_BUFFERS;
- } else if (base::EqualsCaseInsensitiveASCII(param.front(), "planarity") &&
- base::EqualsCaseInsensitiveASCII(param.back(), "triplanar")) {
- fake_vcd_planarity_ = FakeVideoCaptureDevice::BufferPlanarity::TRIPLANAR;
} else if (base::EqualsCaseInsensitiveASCII(param.front(), "fps")) {
double fps = 0;
if (base::StringToDouble(param.back(), &fps)) {
diff --git a/media/capture/video/fake_video_capture_device_factory.h b/media/capture/video/fake_video_capture_device_factory.h
index 9402580..e780f2a 100644
--- a/media/capture/video/fake_video_capture_device_factory.h
+++ b/media/capture/video/fake_video_capture_device_factory.h
@@ -41,7 +41,6 @@ class MEDIA_EXPORT FakeVideoCaptureDeviceFactory
int number_of_devices_;
FakeVideoCaptureDevice::BufferOwnership fake_vcd_ownership_;
- FakeVideoCaptureDevice::BufferPlanarity fake_vcd_planarity_;
float frame_rate_;
};
diff --git a/media/capture/video/fake_video_capture_device_unittest.cc b/media/capture/video/fake_video_capture_device_unittest.cc
index f61b085..eff85fc 100644
--- a/media/capture/video/fake_video_capture_device_unittest.cc
+++ b/media/capture/video/fake_video_capture_device_unittest.cc
@@ -75,18 +75,6 @@ class MockClient : public VideoCaptureDevice::Client {
const base::TimeTicks& timestamp) {
frame_cb_.Run(format);
}
- void OnIncomingCapturedYuvData(const uint8_t* y_data,
- const uint8_t* u_data,
- const uint8_t* v_data,
- size_t y_stride,
- size_t u_stride,
- size_t v_stride,
- const VideoCaptureFormat& frame_format,
- int clockwise_rotation,
- const base::TimeTicks& timestamp) {
- frame_cb_.Run(frame_format);
- }
-
// Virtual methods for capturing using Client's Buffers.
scoped_ptr<Buffer> ReserveOutputBuffer(const gfx::Size& dimensions,
media::VideoPixelFormat format,
@@ -187,9 +175,7 @@ class FakeVideoCaptureDeviceBase : public ::testing::Test {
class FakeVideoCaptureDeviceTest
: public FakeVideoCaptureDeviceBase,
public ::testing::WithParamInterface<
- ::testing::tuple<FakeVideoCaptureDevice::BufferOwnership,
- FakeVideoCaptureDevice::BufferPlanarity,
- float>> {};
+ ::testing::tuple<FakeVideoCaptureDevice::BufferOwnership, float>> {};
struct CommandLineTestData {
// Command line argument
@@ -207,19 +193,18 @@ TEST_P(FakeVideoCaptureDeviceTest, CaptureUsing) {
ASSERT_FALSE(names->empty());
scoped_ptr<VideoCaptureDevice> device(new FakeVideoCaptureDevice(
- testing::get<0>(GetParam()), testing::get<1>(GetParam()),
- testing::get<2>(GetParam())));
+ testing::get<0>(GetParam()), testing::get<1>(GetParam())));
ASSERT_TRUE(device);
VideoCaptureParams capture_params;
capture_params.requested_format.frame_size.SetSize(640, 480);
- capture_params.requested_format.frame_rate = testing::get<2>(GetParam());
+ capture_params.requested_format.frame_rate = testing::get<1>(GetParam());
device->AllocateAndStart(capture_params, std::move(client_));
WaitForCapturedFrame();
EXPECT_EQ(last_format().frame_size.width(), 640);
EXPECT_EQ(last_format().frame_size.height(), 480);
- EXPECT_EQ(last_format().frame_rate, testing::get<2>(GetParam()));
+ EXPECT_EQ(last_format().frame_rate, testing::get<1>(GetParam()));
device->StopAndDeAllocate();
}
@@ -228,8 +213,6 @@ INSTANTIATE_TEST_CASE_P(
FakeVideoCaptureDeviceTest,
Combine(Values(FakeVideoCaptureDevice::BufferOwnership::OWN_BUFFERS,
FakeVideoCaptureDevice::BufferOwnership::CLIENT_BUFFERS),
- Values(FakeVideoCaptureDevice::BufferPlanarity::PACKED,
- FakeVideoCaptureDevice::BufferPlanarity::TRIPLANAR),
Values(20, 29.97, 30, 50, 60)));
TEST_F(FakeVideoCaptureDeviceTest, GetDeviceSupportedFormats) {
diff --git a/media/capture/video/linux/v4l2_capture_delegate.cc b/media/capture/video/linux/v4l2_capture_delegate.cc
index 612672b..c49ecb7 100644
--- a/media/capture/video/linux/v4l2_capture_delegate.cc
+++ b/media/capture/video/linux/v4l2_capture_delegate.cc
@@ -16,8 +16,6 @@
#include "base/strings/stringprintf.h"
#include "build/build_config.h"
#include "media/base/bind_to_current_loop.h"
-#include "media/capture/video/linux/v4l2_capture_delegate_multi_plane.h"
-#include "media/capture/video/linux/v4l2_capture_delegate_single_plane.h"
#include "media/capture/video/linux/video_capture_device_linux.h"
namespace media {
@@ -48,44 +46,76 @@ static struct {
{V4L2_PIX_FMT_YUYV, PIXEL_FORMAT_YUY2, 1},
{V4L2_PIX_FMT_UYVY, PIXEL_FORMAT_UYVY, 1},
{V4L2_PIX_FMT_RGB24, PIXEL_FORMAT_RGB24, 1},
-#if !defined(OS_OPENBSD)
- // TODO(mcasas): add V4L2_PIX_FMT_YVU420M when available in bots.
- {V4L2_PIX_FMT_YUV420M, PIXEL_FORMAT_I420, 3},
-#endif
// MJPEG is usually sitting fairly low since we don't want to have to
- // decode.
- // However, is needed for large resolutions due to USB bandwidth
- // limitations,
- // so GetListOfUsableFourCcs() can duplicate it on top, see that method.
+ // decode. However, it is needed for large resolutions due to USB bandwidth
+ // limitations, so GetListOfUsableFourCcs() can duplicate it on top, see
+ // that method.
{V4L2_PIX_FMT_MJPEG, PIXEL_FORMAT_MJPEG, 1},
// JPEG works as MJPEG on some gspca webcams from field reports, see
// https://code.google.com/p/webrtc/issues/detail?id=529, put it as the
- // least
- // preferred format.
+ // least preferred format.
{V4L2_PIX_FMT_JPEG, PIXEL_FORMAT_MJPEG, 1},
};
-// static
-scoped_refptr<V4L2CaptureDelegate>
-V4L2CaptureDelegate::CreateV4L2CaptureDelegate(
- const VideoCaptureDevice::Name& device_name,
- const scoped_refptr<base::SingleThreadTaskRunner>& v4l2_task_runner,
- int power_line_frequency) {
- switch (device_name.capture_api_type()) {
- case VideoCaptureDevice::Name::V4L2_SINGLE_PLANE:
- return make_scoped_refptr(new V4L2CaptureDelegateSinglePlane(
- device_name, v4l2_task_runner, power_line_frequency));
- case VideoCaptureDevice::Name::V4L2_MULTI_PLANE:
-#if !defined(OS_OPENBSD)
- return make_scoped_refptr(new V4L2CaptureDelegateMultiPlane(
- device_name, v4l2_task_runner, power_line_frequency));
- default:
-#endif
- NOTIMPLEMENTED() << "Unknown V4L2 capture API type";
- return scoped_refptr<V4L2CaptureDelegate>();
- }
+// Fill in |format| with the given parameters.
+static void FillV4L2Format(v4l2_format* format,
+ uint32_t width,
+ uint32_t height,
+ uint32_t pixelformat_fourcc) {
+ memset(format, 0, sizeof(*format));
+ format->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ format->fmt.pix.width = width;
+ format->fmt.pix.height = height;
+ format->fmt.pix.pixelformat = pixelformat_fourcc;
+}
+
+// Fills all parts of |buffer|.
+static void FillV4L2Buffer(v4l2_buffer* buffer, int index) {
+ memset(buffer, 0, sizeof(*buffer));
+ buffer->memory = V4L2_MEMORY_MMAP;
+ buffer->index = index;
+ buffer->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+}
+
+static void FillV4L2RequestBuffer(v4l2_requestbuffers* request_buffer,
+ int count) {
+ memset(request_buffer, 0, sizeof(*request_buffer));
+ request_buffer->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ request_buffer->memory = V4L2_MEMORY_MMAP;
+ request_buffer->count = count;
+}
+
+// Returns the input |fourcc| as a std::string four char representation.
+static std::string FourccToString(uint32_t fourcc) {
+ return base::StringPrintf("%c%c%c%c", fourcc & 0xFF, (fourcc >> 8) & 0xFF,
+ (fourcc >> 16) & 0xFF, (fourcc >> 24) & 0xFF);
}
+// Class keeping track of a SPLANE V4L2 buffer, mmap()ed on construction and
+// munmap()ed on destruction.
+class V4L2CaptureDelegate::BufferTracker
+ : public base::RefCounted<BufferTracker> {
+ public:
+ BufferTracker();
+ // Abstract method to mmap() given |fd| according to |buffer|.
+ bool Init(int fd, const v4l2_buffer& buffer);
+
+ const uint8_t* start() const { return start_; }
+ size_t payload_size() const { return payload_size_; }
+ void set_payload_size(size_t payload_size) {
+ DCHECK_LE(payload_size, length_);
+ payload_size_ = payload_size;
+ }
+
+ private:
+ friend class base::RefCounted<BufferTracker>;
+ virtual ~BufferTracker();
+
+ uint8_t* start_;
+ size_t length_;
+ size_t payload_size_;
+};
+
// static
size_t V4L2CaptureDelegate::GetNumPlanesForFourCc(uint32_t fourcc) {
for (const auto& fourcc_and_pixel_format : kSupportedFormatsAndPlanarity) {
@@ -124,51 +154,16 @@ std::list<uint32_t> V4L2CaptureDelegate::GetListOfUsableFourCcs(
return supported_formats;
}
-// static
-std::string V4L2CaptureDelegate::FourccToString(uint32_t fourcc) {
- return base::StringPrintf("%c%c%c%c", fourcc & 0xFF, (fourcc >> 8) & 0xFF,
- (fourcc >> 16) & 0xFF, (fourcc >> 24) & 0xFF);
-}
-
-V4L2CaptureDelegate::BufferTracker::BufferTracker() {
-}
-
-V4L2CaptureDelegate::BufferTracker::~BufferTracker() {
- for (const auto& plane : planes_) {
- if (plane.start == nullptr)
- continue;
- const int result = munmap(plane.start, plane.length);
- PLOG_IF(ERROR, result < 0) << "Error munmap()ing V4L2 buffer";
- }
-}
-
-void V4L2CaptureDelegate::BufferTracker::AddMmapedPlane(uint8_t* const start,
- size_t length) {
- Plane plane;
- plane.start = start;
- plane.length = length;
- plane.payload_size = 0;
- planes_.push_back(plane);
-}
-
V4L2CaptureDelegate::V4L2CaptureDelegate(
const VideoCaptureDevice::Name& device_name,
const scoped_refptr<base::SingleThreadTaskRunner>& v4l2_task_runner,
int power_line_frequency)
- : capture_type_((device_name.capture_api_type() ==
- VideoCaptureDevice::Name::V4L2_SINGLE_PLANE)
- ? V4L2_BUF_TYPE_VIDEO_CAPTURE
- : V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE),
- v4l2_task_runner_(v4l2_task_runner),
+ : v4l2_task_runner_(v4l2_task_runner),
device_name_(device_name),
power_line_frequency_(power_line_frequency),
is_capturing_(false),
timeout_count_(0),
- rotation_(0) {
-}
-
-V4L2CaptureDelegate::~V4L2CaptureDelegate() {
-}
+ rotation_(0) {}
void V4L2CaptureDelegate::AllocateAndStart(
int width,
@@ -188,23 +183,21 @@ void V4L2CaptureDelegate::AllocateAndStart(
v4l2_capability cap = {};
if (!((HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_QUERYCAP, &cap)) == 0) &&
- ((cap.capabilities & V4L2_CAP_VIDEO_CAPTURE ||
- cap.capabilities & V4L2_CAP_VIDEO_CAPTURE_MPLANE) &&
- !(cap.capabilities & V4L2_CAP_VIDEO_OUTPUT) &&
- !(cap.capabilities & V4L2_CAP_VIDEO_OUTPUT_MPLANE)))) {
+ ((cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) &&
+ !(cap.capabilities & V4L2_CAP_VIDEO_OUTPUT)))) {
device_fd_.reset();
SetErrorState(FROM_HERE, "This is not a V4L2 video capture device");
return;
}
- // Get supported video formats in preferred order.
- // For large resolutions, favour mjpeg over raw formats.
+ // Get supported video formats in preferred order. For large resolutions,
+ // favour mjpeg over raw formats.
const std::list<uint32_t>& desired_v4l2_formats =
GetListOfUsableFourCcs(width > kMjpegWidth || height > kMjpegHeight);
std::list<uint32_t>::const_iterator best = desired_v4l2_formats.end();
v4l2_fmtdesc fmtdesc = {};
- fmtdesc.type = capture_type_;
+ fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
for (; HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_ENUM_FMT, &fmtdesc)) == 0;
++fmtdesc.index) {
best = std::find(desired_v4l2_formats.begin(), best, fmtdesc.pixelformat);
@@ -215,12 +208,7 @@ void V4L2CaptureDelegate::AllocateAndStart(
}
DVLOG(1) << "Chosen pixel format is " << FourccToString(*best);
-
- video_fmt_.type = capture_type_;
- if (!FillV4L2Format(&video_fmt_, width, height, *best)) {
- SetErrorState(FROM_HERE, "Failed filling in V4L2 Format");
- return;
- }
+ FillV4L2Format(&video_fmt_, width, height, *best);
if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_S_FMT, &video_fmt_)) < 0) {
SetErrorState(FROM_HERE, "Failed to set video capture format");
@@ -235,7 +223,7 @@ void V4L2CaptureDelegate::AllocateAndStart(
// Set capture framerate in the form of capture interval.
v4l2_streamparm streamparm = {};
- streamparm.type = capture_type_;
+ streamparm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
// The following line checks that the driver knows about framerate get/set.
if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_G_PARM, &streamparm)) >= 0) {
// Now check if the device is able to accept a capture framerate set.
@@ -279,10 +267,8 @@ void V4L2CaptureDelegate::AllocateAndStart(
capture_format_.frame_rate = frame_rate;
capture_format_.pixel_format = pixel_format;
- v4l2_requestbuffers r_buffer = {};
- r_buffer.type = capture_type_;
- r_buffer.memory = V4L2_MEMORY_MMAP;
- r_buffer.count = kNumVideoBuffers;
+ v4l2_requestbuffers r_buffer;
+ FillV4L2RequestBuffer(&r_buffer, kNumVideoBuffers);
if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_REQBUFS, &r_buffer)) < 0) {
SetErrorState(FROM_HERE, "Error requesting MMAP buffers from V4L2");
return;
@@ -294,7 +280,8 @@ void V4L2CaptureDelegate::AllocateAndStart(
}
}
- if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_STREAMON, &capture_type_)) <
+ v4l2_buf_type capture_type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_STREAMON, &capture_type)) <
0) {
SetErrorState(FROM_HERE, "VIDIOC_STREAMON failed");
return;
@@ -310,7 +297,8 @@ void V4L2CaptureDelegate::StopAndDeAllocate() {
DCHECK(v4l2_task_runner_->BelongsToCurrentThread());
// The order is important: stop streaming, clear |buffer_pool_|,
// thus munmap()ing the v4l2_buffers, and then return them to the OS.
- if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_STREAMOFF, &capture_type_)) <
+ v4l2_buf_type capture_type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_STREAMOFF, &capture_type)) <
0) {
SetErrorState(FROM_HERE, "VIDIOC_STREAMOFF failed");
return;
@@ -318,10 +306,8 @@ void V4L2CaptureDelegate::StopAndDeAllocate() {
buffer_tracker_pool_.clear();
- v4l2_requestbuffers r_buffer = {};
- r_buffer.type = capture_type_;
- r_buffer.memory = V4L2_MEMORY_MMAP;
- r_buffer.count = 0;
+ v4l2_requestbuffers r_buffer;
+ FillV4L2RequestBuffer(&r_buffer, 0);
if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_REQBUFS, &r_buffer)) < 0)
SetErrorState(FROM_HERE, "Failed to VIDIOC_REQBUFS with count = 0");
@@ -338,6 +324,8 @@ void V4L2CaptureDelegate::SetRotation(int rotation) {
rotation_ = rotation;
}
+V4L2CaptureDelegate::~V4L2CaptureDelegate() {}
+
bool V4L2CaptureDelegate::MapAndQueueBuffer(int index) {
v4l2_buffer buffer;
FillV4L2Buffer(&buffer, index);
@@ -347,7 +335,7 @@ bool V4L2CaptureDelegate::MapAndQueueBuffer(int index) {
return false;
}
- const scoped_refptr<BufferTracker>& buffer_tracker = CreateBufferTracker();
+ const scoped_refptr<BufferTracker> buffer_tracker(new BufferTracker());
if (!buffer_tracker->Init(device_fd_.get(), buffer)) {
DLOG(ERROR) << "Error creating BufferTracker";
return false;
@@ -362,13 +350,6 @@ bool V4L2CaptureDelegate::MapAndQueueBuffer(int index) {
return true;
}
-void V4L2CaptureDelegate::FillV4L2Buffer(v4l2_buffer* buffer, int i) const {
- memset(buffer, 0, sizeof(*buffer));
- buffer->memory = V4L2_MEMORY_MMAP;
- buffer->index = i;
- FinishFillingV4L2Buffer(buffer);
-}
-
void V4L2CaptureDelegate::DoCapture() {
DCHECK(v4l2_task_runner_->BelongsToCurrentThread());
if (!is_capturing_)
@@ -406,8 +387,12 @@ void V4L2CaptureDelegate::DoCapture() {
return;
}
- SetPayloadSize(buffer_tracker_pool_[buffer.index], buffer);
- SendBuffer(buffer_tracker_pool_[buffer.index], video_fmt_);
+ buffer_tracker_pool_[buffer.index]->set_payload_size(buffer.bytesused);
+ const scoped_refptr<BufferTracker>& buffer_tracker =
+ buffer_tracker_pool_[buffer.index];
+ client_->OnIncomingCapturedData(
+ buffer_tracker->start(), buffer_tracker->payload_size(),
+ capture_format_, rotation_, base::TimeTicks::Now());
if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_QBUF, &buffer)) < 0) {
SetErrorState(FROM_HERE, "Failed to enqueue capture buffer");
@@ -427,4 +412,29 @@ void V4L2CaptureDelegate::SetErrorState(
client_->OnError(from_here, reason);
}
+V4L2CaptureDelegate::BufferTracker::BufferTracker() {}
+
+V4L2CaptureDelegate::BufferTracker::~BufferTracker() {
+ if (start_ == nullptr)
+ return;
+ const int result = munmap(start_, length_);
+ PLOG_IF(ERROR, result < 0) << "Error munmap()ing V4L2 buffer";
+}
+
+bool V4L2CaptureDelegate::BufferTracker::Init(int fd,
+ const v4l2_buffer& buffer) {
+ // Some devices require mmap() to be called with both READ and WRITE.
+ // See http://crbug.com/178582.
+ void* const start = mmap(NULL, buffer.length, PROT_READ | PROT_WRITE,
+ MAP_SHARED, fd, buffer.m.offset);
+ if (start == MAP_FAILED) {
+ DLOG(ERROR) << "Error mmap()ing a V4L2 buffer into userspace";
+ return false;
+ }
+ start_ = static_cast<uint8_t*>(start);
+ length_ = buffer.length;
+ payload_size_ = 0;
+ return true;
+}
+
} // namespace media
diff --git a/media/capture/video/linux/v4l2_capture_delegate.h b/media/capture/video/linux/v4l2_capture_delegate.h
index 56af8e5..9d4a8b3 100644
--- a/media/capture/video/linux/v4l2_capture_delegate.h
+++ b/media/capture/video/linux/v4l2_capture_delegate.h
@@ -29,15 +29,10 @@ namespace media {
// Class doing the actual Linux capture using V4L2 API. V4L2 SPLANE/MPLANE
// capture specifics are implemented in derived classes. Created and destroyed
// on the owner's thread, otherwise living and operating on |v4l2_task_runner_|.
-class V4L2CaptureDelegate
+// TODO(mcasas): Make this class a non-ref-counted.
+class V4L2CaptureDelegate final
: public base::RefCountedThreadSafe<V4L2CaptureDelegate> {
public:
- // Creates the appropiate VideoCaptureDelegate according to parameters.
- static scoped_refptr<V4L2CaptureDelegate> CreateV4L2CaptureDelegate(
- const VideoCaptureDevice::Name& device_name,
- const scoped_refptr<base::SingleThreadTaskRunner>& v4l2_task_runner,
- int power_line_frequency);
-
// Retrieves the #planes for a given |fourcc|, or 0 if unknown.
static size_t GetNumPlanesForFourCc(uint32_t fourcc);
// Returns the Chrome pixel format for |v4l2_fourcc| or PIXEL_FORMAT_UNKNOWN.
@@ -48,6 +43,11 @@ class V4L2CaptureDelegate
// preference, with MJPEG prioritised depending on |prefer_mjpeg|.
static std::list<uint32_t> GetListOfUsableFourCcs(bool prefer_mjpeg);
+ V4L2CaptureDelegate(
+ const VideoCaptureDevice::Name& device_name,
+ const scoped_refptr<base::SingleThreadTaskRunner>& v4l2_task_runner,
+ int power_line_frequency);
+
// Forward-to versions of VideoCaptureDevice virtual methods.
void AllocateAndStart(int width,
int height,
@@ -57,99 +57,19 @@ class V4L2CaptureDelegate
void SetRotation(int rotation);
- protected:
- // Class keeping track of SPLANE/MPLANE V4L2 buffers, mmap()ed on construction
- // and munmap()ed on destruction. Destruction is syntactically equal for
- // S/MPLANE but not construction, so this is implemented in derived classes.
- // Internally it has a vector of planes, which for SPLANE will contain only
- // one element.
- class BufferTracker : public base::RefCounted<BufferTracker> {
- public:
- BufferTracker();
- // Abstract method to mmap() given |fd| according to |buffer|, planarity
- // specific.
- virtual bool Init(int fd, const v4l2_buffer& buffer) = 0;
-
- const uint8_t* GetPlaneStart(size_t plane) const {
- DCHECK_LT(plane, planes_.size());
- return planes_[plane].start;
- }
-
- size_t GetPlanePayloadSize(size_t plane) const {
- DCHECK_LT(plane, planes_.size());
- return planes_[plane].payload_size;
- }
-
- void SetPlanePayloadSize(size_t plane, size_t payload_size) {
- DCHECK_LT(plane, planes_.size());
- DCHECK_LE(payload_size, planes_[plane].length);
- planes_[plane].payload_size = payload_size;
- }
-
- protected:
- friend class base::RefCounted<BufferTracker>;
- virtual ~BufferTracker();
- // Adds a given mmap()ed plane to |planes_|.
- void AddMmapedPlane(uint8_t* const start, size_t length);
-
- private:
- struct Plane {
- uint8_t* start;
- size_t length;
- size_t payload_size;
- };
- std::vector<Plane> planes_;
- };
-
- V4L2CaptureDelegate(
- const VideoCaptureDevice::Name& device_name,
- const scoped_refptr<base::SingleThreadTaskRunner>& v4l2_task_runner,
- int power_line_frequency);
- virtual ~V4L2CaptureDelegate();
-
- // Creates the necessary, planarity-specific, internal tracking schemes,
- virtual scoped_refptr<BufferTracker> CreateBufferTracker() const = 0;
-
- // Fill in |format| with the given parameters, in a planarity dependent way.
- virtual bool FillV4L2Format(v4l2_format* format,
- uint32_t width,
- uint32_t height,
- uint32_t pixelformat_fourcc) const = 0;
-
- // Finish filling |buffer| struct with planarity-dependent data.
- virtual void FinishFillingV4L2Buffer(v4l2_buffer* buffer) const = 0;
-
- // Fetch the number of bytes occupied by data in |buffer| and set to
- // |buffer_tracker|.
- virtual void SetPayloadSize(
- const scoped_refptr<BufferTracker>& buffer_tracker,
- const v4l2_buffer& buffer) const = 0;
-
- // Sends the captured |buffer| to the |client_|, synchronously.
- virtual void SendBuffer(const scoped_refptr<BufferTracker>& buffer_tracker,
- const v4l2_format& format) const = 0;
-
- // A few accessors for SendBuffer()'s to access private member variables.
- VideoCaptureFormat capture_format() const { return capture_format_; }
- VideoCaptureDevice::Client* client() const { return client_.get(); }
- int rotation() const { return rotation_; }
-
private:
friend class base::RefCountedThreadSafe<V4L2CaptureDelegate>;
+ ~V4L2CaptureDelegate();
- // Returns the input |fourcc| as a std::string four char representation.
- static std::string FourccToString(uint32_t fourcc);
// VIDIOC_QUERYBUFs a buffer from V4L2, creates a BufferTracker for it and
// enqueues it (VIDIOC_QBUF) back into V4L2.
bool MapAndQueueBuffer(int index);
- // Fills all common parts of |buffer|. Delegates to FinishFillingV4L2Buffer()
- // for filling in the planar-dependent parts.
- void FillV4L2Buffer(v4l2_buffer* buffer, int i) const;
+
void DoCapture();
+
void SetErrorState(const tracked_objects::Location& from_here,
const std::string& reason);
- const v4l2_buf_type capture_type_;
const scoped_refptr<base::SingleThreadTaskRunner> v4l2_task_runner_;
const VideoCaptureDevice::Name device_name_;
const int power_line_frequency_;
@@ -161,6 +81,7 @@ class V4L2CaptureDelegate
base::ScopedFD device_fd_;
// Vector of BufferTracker to keep track of mmap()ed pointers and their use.
+ class BufferTracker;
std::vector<scoped_refptr<BufferTracker>> buffer_tracker_pool_;
bool is_capturing_;
diff --git a/media/capture/video/linux/v4l2_capture_delegate_multi_plane.cc b/media/capture/video/linux/v4l2_capture_delegate_multi_plane.cc
deleted file mode 100644
index 22e2af9..0000000
--- a/media/capture/video/linux/v4l2_capture_delegate_multi_plane.cc
+++ /dev/null
@@ -1,100 +0,0 @@
-// Copyright 2015 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "media/capture/video/linux/v4l2_capture_delegate_multi_plane.h"
-
-#include <stddef.h>
-#include <sys/mman.h>
-
-namespace media {
-
-V4L2CaptureDelegateMultiPlane::V4L2CaptureDelegateMultiPlane(
- const VideoCaptureDevice::Name& device_name,
- const scoped_refptr<base::SingleThreadTaskRunner>& v4l2_task_runner,
- int power_line_frequency)
- : V4L2CaptureDelegate(device_name, v4l2_task_runner, power_line_frequency) {
-}
-
-V4L2CaptureDelegateMultiPlane::~V4L2CaptureDelegateMultiPlane() {
-}
-
-scoped_refptr<V4L2CaptureDelegate::BufferTracker>
-V4L2CaptureDelegateMultiPlane::CreateBufferTracker() const {
- return make_scoped_refptr(new BufferTrackerMPlane());
-}
-
-bool V4L2CaptureDelegateMultiPlane::FillV4L2Format(
- v4l2_format* format,
- uint32_t width,
- uint32_t height,
- uint32_t pixelformat_fourcc) const {
- format->fmt.pix_mp.width = width;
- format->fmt.pix_mp.height = height;
- format->fmt.pix_mp.pixelformat = pixelformat_fourcc;
-
- const size_t num_v4l2_planes =
- V4L2CaptureDelegate::GetNumPlanesForFourCc(pixelformat_fourcc);
- if (num_v4l2_planes == 0u)
- return false;
- DCHECK_LE(num_v4l2_planes, static_cast<size_t>(VIDEO_MAX_PLANES));
- format->fmt.pix_mp.num_planes = num_v4l2_planes;
-
- v4l2_planes_.resize(num_v4l2_planes);
- return true;
-}
-
-void V4L2CaptureDelegateMultiPlane::FinishFillingV4L2Buffer(
- v4l2_buffer* buffer) const {
- buffer->type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
- buffer->length = v4l2_planes_.size();
-
- static const struct v4l2_plane empty_plane = {};
- std::fill(v4l2_planes_.begin(), v4l2_planes_.end(), empty_plane);
- buffer->m.planes = v4l2_planes_.data();
-}
-
-void V4L2CaptureDelegateMultiPlane::SetPayloadSize(
- const scoped_refptr<BufferTracker>& buffer_tracker,
- const v4l2_buffer& buffer) const {
- for (size_t i = 0; i < v4l2_planes_.size() && i < buffer.length; i++)
- buffer_tracker->SetPlanePayloadSize(i, buffer.m.planes[i].bytesused);
-}
-
-void V4L2CaptureDelegateMultiPlane::SendBuffer(
- const scoped_refptr<BufferTracker>& buffer_tracker,
- const v4l2_format& format) const {
- DCHECK_EQ(capture_format().pixel_format, PIXEL_FORMAT_I420);
- const size_t y_stride = format.fmt.pix_mp.plane_fmt[0].bytesperline;
- const size_t u_stride = format.fmt.pix_mp.plane_fmt[1].bytesperline;
- const size_t v_stride = format.fmt.pix_mp.plane_fmt[2].bytesperline;
- DCHECK_GE(y_stride, 1u * capture_format().frame_size.width());
- DCHECK_GE(u_stride, 1u * capture_format().frame_size.width() / 2);
- DCHECK_GE(v_stride, 1u * capture_format().frame_size.width() / 2);
- client()->OnIncomingCapturedYuvData(
- buffer_tracker->GetPlaneStart(0), buffer_tracker->GetPlaneStart(1),
- buffer_tracker->GetPlaneStart(2), y_stride, u_stride, v_stride,
- capture_format(), rotation(), base::TimeTicks::Now());
-}
-
-bool V4L2CaptureDelegateMultiPlane::BufferTrackerMPlane::Init(
- int fd,
- const v4l2_buffer& buffer) {
- for (size_t p = 0; p < buffer.length; ++p) {
- // Some devices require mmap() to be called with both READ and WRITE.
- // See http://crbug.com/178582.
- void* const start =
- mmap(NULL, buffer.m.planes[p].length, PROT_READ | PROT_WRITE,
- MAP_SHARED, fd, buffer.m.planes[p].m.mem_offset);
- if (start == MAP_FAILED) {
- DLOG(ERROR) << "Error mmap()ing a V4L2 buffer into userspace";
- return false;
- }
- AddMmapedPlane(static_cast<uint8_t*>(start), buffer.m.planes[p].length);
- DVLOG(3) << "Mmap()ed plane #" << p << " of " << buffer.m.planes[p].length
- << "B";
- }
- return true;
-}
-
-} // namespace media
diff --git a/media/capture/video/linux/v4l2_capture_delegate_multi_plane.h b/media/capture/video/linux/v4l2_capture_delegate_multi_plane.h
deleted file mode 100644
index 265f6b5..0000000
--- a/media/capture/video/linux/v4l2_capture_delegate_multi_plane.h
+++ /dev/null
@@ -1,64 +0,0 @@
-// Copyright 2015 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef MEDIA_VIDEO_CAPTURE_LINUX_V4L2_CAPTURE_DELEGATE_MULTI_PLANE_H_
-#define MEDIA_VIDEO_CAPTURE_LINUX_V4L2_CAPTURE_DELEGATE_MULTI_PLANE_H_
-
-#include <stdint.h>
-
-#include "base/memory/ref_counted.h"
-#include "build/build_config.h"
-#include "media/capture/video/linux/v4l2_capture_delegate.h"
-
-#if defined(OS_OPENBSD)
-#error "OpenBSD does not support MPlane capture API."
-#endif
-
-namespace base {
-class SingleThreadTaskRunner;
-} // namespace base
-
-namespace media {
-
-// V4L2 specifics for MPLANE API.
-class V4L2CaptureDelegateMultiPlane final : public V4L2CaptureDelegate {
- public:
- V4L2CaptureDelegateMultiPlane(
- const VideoCaptureDevice::Name& device_name,
- const scoped_refptr<base::SingleThreadTaskRunner>& v4l2_task_runner,
- int power_line_frequency);
-
- private:
- // BufferTracker derivation to implement construction semantics for MPLANE.
- class BufferTrackerMPlane final : public BufferTracker {
- public:
- bool Init(int fd, const v4l2_buffer& buffer) override;
-
- private:
- ~BufferTrackerMPlane() override {}
- };
-
- ~V4L2CaptureDelegateMultiPlane() override;
-
- // V4L2CaptureDelegate virtual methods implementation.
- scoped_refptr<BufferTracker> CreateBufferTracker() const override;
- bool FillV4L2Format(v4l2_format* format,
- uint32_t width,
- uint32_t height,
- uint32_t pixelformat_fourcc) const override;
- void FinishFillingV4L2Buffer(v4l2_buffer* buffer) const override;
- void SetPayloadSize(const scoped_refptr<BufferTracker>& buffer_tracker,
- const v4l2_buffer& buffer) const override;
- void SendBuffer(const scoped_refptr<BufferTracker>& buffer_tracker,
- const v4l2_format& format) const override;
-
- // Vector to allocate and track as many v4l2_plane structs as planes, needed
- // for v4l2_buffer.m.planes. This is a scratchpad marked mutable to enable
- // using it in otherwise const methods.
- mutable std::vector<struct v4l2_plane> v4l2_planes_;
-};
-
-} // namespace media
-
-#endif // MEDIA_VIDEO_CAPTURE_LINUX_V4L2_CAPTURE_DELEGATE_SINGLE_PLANE_H_
diff --git a/media/capture/video/linux/v4l2_capture_delegate_single_plane.cc b/media/capture/video/linux/v4l2_capture_delegate_single_plane.cc
deleted file mode 100644
index 722eedc..0000000
--- a/media/capture/video/linux/v4l2_capture_delegate_single_plane.cc
+++ /dev/null
@@ -1,61 +0,0 @@
-// Copyright 2015 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "media/capture/video/linux/v4l2_capture_delegate_single_plane.h"
-
-#include <sys/mman.h>
-
-namespace media {
-
-scoped_refptr<V4L2CaptureDelegate::BufferTracker>
-V4L2CaptureDelegateSinglePlane::CreateBufferTracker() const {
- return make_scoped_refptr(new BufferTrackerSPlane());
-}
-
-bool V4L2CaptureDelegateSinglePlane::FillV4L2Format(
- v4l2_format* format,
- uint32_t width,
- uint32_t height,
- uint32_t pixelformat_fourcc) const {
- format->fmt.pix.width = width;
- format->fmt.pix.height = height;
- format->fmt.pix.pixelformat = pixelformat_fourcc;
- return true;
-}
-
-void V4L2CaptureDelegateSinglePlane::FinishFillingV4L2Buffer(
- v4l2_buffer* buffer) const {
- buffer->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
-}
-
-void V4L2CaptureDelegateSinglePlane::SetPayloadSize(
- const scoped_refptr<BufferTracker>& buffer_tracker,
- const v4l2_buffer& buffer) const {
- buffer_tracker->SetPlanePayloadSize(0, buffer.bytesused);
-}
-
-void V4L2CaptureDelegateSinglePlane::SendBuffer(
- const scoped_refptr<BufferTracker>& buffer_tracker,
- const v4l2_format& format) const {
- client()->OnIncomingCapturedData(
- buffer_tracker->GetPlaneStart(0), buffer_tracker->GetPlanePayloadSize(0),
- capture_format(), rotation(), base::TimeTicks::Now());
-}
-
-bool V4L2CaptureDelegateSinglePlane::BufferTrackerSPlane::Init(
- int fd,
- const v4l2_buffer& buffer) {
- // Some devices require mmap() to be called with both READ and WRITE.
- // See http://crbug.com/178582.
- void* const start = mmap(NULL, buffer.length, PROT_READ | PROT_WRITE,
- MAP_SHARED, fd, buffer.m.offset);
- if (start == MAP_FAILED) {
- DLOG(ERROR) << "Error mmap()ing a V4L2 buffer into userspace";
- return false;
- }
- AddMmapedPlane(static_cast<uint8_t*>(start), buffer.length);
- return true;
-}
-
-} // namespace media
diff --git a/media/capture/video/linux/v4l2_capture_delegate_single_plane.h b/media/capture/video/linux/v4l2_capture_delegate_single_plane.h
deleted file mode 100644
index 9ddb9b6..0000000
--- a/media/capture/video/linux/v4l2_capture_delegate_single_plane.h
+++ /dev/null
@@ -1,58 +0,0 @@
-// Copyright 2015 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef MEDIA_VIDEO_CAPTURE_LINUX_V4L2_CAPTURE_DELEGATE_SINGLE_PLANE_H_
-#define MEDIA_VIDEO_CAPTURE_LINUX_V4L2_CAPTURE_DELEGATE_SINGLE_PLANE_H_
-
-#include <stdint.h>
-
-#include "base/memory/ref_counted.h"
-#include "media/capture/video/linux/v4l2_capture_delegate.h"
-#include "media/capture/video/video_capture_device.h"
-
-namespace base {
-class SingleThreadTaskRunner;
-} // namespace base
-
-namespace media {
-
-// V4L2 specifics for SPLANE API.
-class V4L2CaptureDelegateSinglePlane final : public V4L2CaptureDelegate {
- public:
- V4L2CaptureDelegateSinglePlane(
- const VideoCaptureDevice::Name& device_name,
- const scoped_refptr<base::SingleThreadTaskRunner>& v4l2_task_runner,
- int power_line_frequency)
- : V4L2CaptureDelegate(device_name,
- v4l2_task_runner,
- power_line_frequency) {}
-
- private:
- // BufferTracker derivation to implement construction semantics for SPLANE.
- class BufferTrackerSPlane final : public BufferTracker {
- public:
- bool Init(int fd, const v4l2_buffer& buffer) override;
-
- private:
- ~BufferTrackerSPlane() override {}
- };
-
- ~V4L2CaptureDelegateSinglePlane() override {}
-
- // V4L2CaptureDelegate virtual methods implementation.
- scoped_refptr<BufferTracker> CreateBufferTracker() const override;
- bool FillV4L2Format(v4l2_format* format,
- uint32_t width,
- uint32_t height,
- uint32_t pixelformat_fourcc) const override;
- void FinishFillingV4L2Buffer(v4l2_buffer* buffer) const override;
- void SetPayloadSize(const scoped_refptr<BufferTracker>& buffer_tracker,
- const v4l2_buffer& buffer) const override;
- void SendBuffer(const scoped_refptr<BufferTracker>& buffer_tracker,
- const v4l2_format& format) const override;
-};
-
-} // namespace media
-
-#endif // MEDIA_VIDEO_CAPTURE_LINUX_V4L2_CAPTURE_DELEGATE_MULTI_PLANE_H_
diff --git a/media/capture/video/linux/video_capture_device_factory_linux.cc b/media/capture/video/linux/video_capture_device_factory_linux.cc
index 901b071..000f548 100644
--- a/media/capture/video/linux/video_capture_device_factory_linux.cc
+++ b/media/capture/video/linux/video_capture_device_factory_linux.cc
@@ -28,28 +28,21 @@
namespace media {
static bool HasUsableFormats(int fd, uint32_t capabilities) {
+ if (!(capabilities & V4L2_CAP_VIDEO_CAPTURE))
+ return false;
+
const std::list<uint32_t>& usable_fourccs =
VideoCaptureDeviceLinux::GetListOfUsableFourCCs(false);
-
- static const struct {
- int capability;
- v4l2_buf_type buf_type;
- } kCapabilityAndBufferTypes[] = {
- {V4L2_CAP_VIDEO_CAPTURE, V4L2_BUF_TYPE_VIDEO_CAPTURE},
- {V4L2_CAP_VIDEO_CAPTURE_MPLANE, V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE}};
-
- for (const auto& capability_and_buffer_type : kCapabilityAndBufferTypes) {
- v4l2_fmtdesc fmtdesc = {};
- if (capabilities & capability_and_buffer_type.capability) {
- fmtdesc.type = capability_and_buffer_type.buf_type;
- for (; HANDLE_EINTR(ioctl(fd, VIDIOC_ENUM_FMT, &fmtdesc)) == 0;
- ++fmtdesc.index) {
- if (std::find(usable_fourccs.begin(), usable_fourccs.end(),
- fmtdesc.pixelformat) != usable_fourccs.end())
- return true;
- }
+ v4l2_fmtdesc fmtdesc = {};
+ fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ for (; HANDLE_EINTR(ioctl(fd, VIDIOC_ENUM_FMT, &fmtdesc)) == 0;
+ ++fmtdesc.index) {
+ if (std::find(usable_fourccs.begin(), usable_fourccs.end(),
+ fmtdesc.pixelformat) != usable_fourccs.end()) {
+ return true;
}
}
+
DLOG(ERROR) << "No usable formats found";
return false;
}
@@ -89,10 +82,9 @@ static std::list<float> GetFrameRateList(int fd,
static void GetSupportedFormatsForV4L2BufferType(
int fd,
- v4l2_buf_type buf_type,
media::VideoCaptureFormats* supported_formats) {
v4l2_fmtdesc v4l2_format = {};
- v4l2_format.type = buf_type;
+ v4l2_format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
for (; HANDLE_EINTR(ioctl(fd, VIDIOC_ENUM_FMT, &v4l2_format)) == 0;
++v4l2_format.index) {
VideoCaptureFormat supported_format;
@@ -182,16 +174,12 @@ void VideoCaptureDeviceFactoryLinux::GetDeviceNames(
// http://crbug.com/139356.
v4l2_capability cap;
if ((HANDLE_EINTR(ioctl(fd.get(), VIDIOC_QUERYCAP, &cap)) == 0) &&
- ((cap.capabilities & V4L2_CAP_VIDEO_CAPTURE ||
- cap.capabilities & V4L2_CAP_VIDEO_CAPTURE_MPLANE) &&
- !(cap.capabilities & V4L2_CAP_VIDEO_OUTPUT) &&
- !(cap.capabilities & V4L2_CAP_VIDEO_OUTPUT_MPLANE)) &&
+ (cap.capabilities & V4L2_CAP_VIDEO_CAPTURE &&
+ !(cap.capabilities & V4L2_CAP_VIDEO_OUTPUT)) &&
HasUsableFormats(fd.get(), cap.capabilities)) {
device_names->push_back(VideoCaptureDevice::Name(
reinterpret_cast<char*>(cap.card), unique_id,
- (cap.capabilities & V4L2_CAP_VIDEO_CAPTURE_MPLANE)
- ? VideoCaptureDevice::Name::V4L2_MULTI_PLANE
- : VideoCaptureDevice::Name::V4L2_SINGLE_PLANE));
+ VideoCaptureDevice::Name::V4L2_SINGLE_PLANE));
}
}
}
@@ -209,13 +197,7 @@ void VideoCaptureDeviceFactoryLinux::GetDeviceSupportedFormats(
DCHECK_NE(device.capture_api_type(),
VideoCaptureDevice::Name::API_TYPE_UNKNOWN);
- const v4l2_buf_type buf_type =
- (device.capture_api_type() == VideoCaptureDevice::Name::V4L2_MULTI_PLANE)
- ? V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE
- : V4L2_BUF_TYPE_VIDEO_CAPTURE;
- GetSupportedFormatsForV4L2BufferType(fd.get(), buf_type, supported_formats);
-
- return;
+ GetSupportedFormatsForV4L2BufferType(fd.get(), supported_formats);
}
// static
diff --git a/media/capture/video/linux/video_capture_device_linux.cc b/media/capture/video/linux/video_capture_device_linux.cc
index 32f30e0..f2a1d1d 100644
--- a/media/capture/video/linux/video_capture_device_linux.cc
+++ b/media/capture/video/linux/video_capture_device_linux.cc
@@ -81,8 +81,7 @@ const std::string VideoCaptureDevice::Name::GetModel() const {
}
VideoCaptureDeviceLinux::VideoCaptureDeviceLinux(const Name& device_name)
- : v4l2_thread_("V4L2CaptureThread"), device_name_(device_name) {
-}
+ : v4l2_thread_("V4L2CaptureThread"), device_name_(device_name) {}
VideoCaptureDeviceLinux::~VideoCaptureDeviceLinux() {
// Check if the thread is running.
@@ -101,7 +100,7 @@ void VideoCaptureDeviceLinux::AllocateAndStart(
const int line_frequency =
TranslatePowerLineFrequencyToV4L2(GetPowerLineFrequency(params));
- capture_impl_ = V4L2CaptureDelegate::CreateV4L2CaptureDelegate(
+ capture_impl_ = new V4L2CaptureDelegate(
device_name_, v4l2_thread_.task_runner(), line_frequency);
if (!capture_impl_) {
client->OnError(FROM_HERE, "Failed to create VideoCaptureDelegate");
diff --git a/media/capture/video/video_capture_device.cc b/media/capture/video/video_capture_device.cc
index d624775..f3fc565 100644
--- a/media/capture/video/video_capture_device.cc
+++ b/media/capture/video/video_capture_device.cc
@@ -87,8 +87,6 @@ const char* VideoCaptureDevice::Name::GetCaptureApiTypeString() const {
switch (capture_api_type()) {
case V4L2_SINGLE_PLANE:
return "V4L2 SPLANE";
- case V4L2_MULTI_PLANE:
- return "V4L2 MPLANE";
default:
NOTREACHED() << "Unknown Video Capture API type!";
return "Unknown API";
diff --git a/media/capture/video/video_capture_device.h b/media/capture/video/video_capture_device.h
index 2caa669..7d8ac1f 100644
--- a/media/capture/video/video_capture_device.h
+++ b/media/capture/video/video_capture_device.h
@@ -55,7 +55,6 @@ class MEDIA_EXPORT VideoCaptureDevice {
// Linux/CrOS targets Capture Api type: it can only be set on construction.
enum CaptureApiType {
V4L2_SINGLE_PLANE,
- V4L2_MULTI_PLANE,
API_TYPE_UNKNOWN
};
#elif defined(OS_WIN)
@@ -213,19 +212,6 @@ class MEDIA_EXPORT VideoCaptureDevice {
int clockwise_rotation,
const base::TimeTicks& timestamp) = 0;
- // Captured a 3 planar YUV frame. Planes are possibly disjoint.
- // |frame_format| must indicate I420.
- virtual void OnIncomingCapturedYuvData(
- const uint8_t* y_data,
- const uint8_t* u_data,
- const uint8_t* v_data,
- size_t y_stride,
- size_t u_stride,
- size_t v_stride,
- const VideoCaptureFormat& frame_format,
- int clockwise_rotation,
- const base::TimeTicks& timestamp) = 0;
-
// Reserve an output buffer into which contents can be captured directly.
// The returned Buffer will always be allocated with a memory size suitable
// for holding a packed video frame with pixels of |format| format, of
diff --git a/media/media.gyp b/media/media.gyp
index 8cbf9c4..de4de9d 100644
--- a/media/media.gyp
+++ b/media/media.gyp
@@ -471,10 +471,6 @@
'capture/video/file_video_capture_device_factory.h',
'capture/video/linux/v4l2_capture_delegate.cc',
'capture/video/linux/v4l2_capture_delegate.h',
- 'capture/video/linux/v4l2_capture_delegate_multi_plane.cc',
- 'capture/video/linux/v4l2_capture_delegate_multi_plane.h',
- 'capture/video/linux/v4l2_capture_delegate_single_plane.cc',
- 'capture/video/linux/v4l2_capture_delegate_single_plane.h',
'capture/video/linux/video_capture_device_chromeos.cc',
'capture/video/linux/video_capture_device_chromeos.h',
'capture/video/linux/video_capture_device_factory_linux.cc',