summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorsheu@chromium.org <sheu@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2013-11-23 02:22:44 +0000
committersheu@chromium.org <sheu@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2013-11-23 02:22:44 +0000
commit1251c46bd12dbed396ec0e8c0d09761b536139de (patch)
tree0fdaa7c4af306dba526acadb1445ed0e3ac57b1f
parent9f54263155acdec4ad09830578a90cb3a2871c9c (diff)
downloadchromium_src-1251c46bd12dbed396ec0e8c0d09761b536139de.zip
chromium_src-1251c46bd12dbed396ec0e8c0d09761b536139de.tar.gz
chromium_src-1251c46bd12dbed396ec0e8c0d09761b536139de.tar.bz2
Reorganize media::VideoCapture* types
The purpose of this CL is to clean up the distinction between VideoCaptureFormat (which identifies the captured type of a frame), VideoCaptureParams (which identifies the requested format of a capture), and VideoCaptureCapability (which identifies the capture capabilities of a device). Notably: * VideoCaptureFormat::frame_size_type -> VideoCaptureParams::allow_resolution_change, as variable resolution capability is a per-session, not a per-frame property. * VideoCaptureCapability::color -> VideoCaptureFormat::pixel_format, as frame color format is a per-frame property. * As VideoCaptureParams holds a VideoCaptureFormat member, capture requests are able now to request a particular capture color format. BUG=269312 TEST=local build, run unittests, chrome on CrOS snow, desktop Linux Review URL: https://codereview.chromium.org/68503005 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@236927 0039d316-1c4b-4281-b951-d872f2087c98
-rw-r--r--content/browser/renderer_host/media/desktop_capture_device.cc65
-rw-r--r--content/browser/renderer_host/media/desktop_capture_device.h5
-rw-r--r--content/browser/renderer_host/media/desktop_capture_device_unittest.cc80
-rw-r--r--content/browser/renderer_host/media/video_capture_controller.cc66
-rw-r--r--content/browser/renderer_host/media/video_capture_controller.h1
-rw-r--r--content/browser/renderer_host/media/video_capture_controller_unittest.cc104
-rw-r--r--content/browser/renderer_host/media/video_capture_host.cc33
-rw-r--r--content/browser/renderer_host/media/video_capture_host.h11
-rw-r--r--content/browser/renderer_host/media/video_capture_host_unittest.cc26
-rw-r--r--content/browser/renderer_host/media/video_capture_manager.cc43
-rw-r--r--content/browser/renderer_host/media/video_capture_manager.h7
-rw-r--r--content/browser/renderer_host/media/video_capture_manager_unittest.cc13
-rw-r--r--content/browser/renderer_host/media/web_contents_video_capture_device.cc42
-rw-r--r--content/browser/renderer_host/media/web_contents_video_capture_device.h5
-rw-r--r--content/browser/renderer_host/media/web_contents_video_capture_device_unittest.cc115
-rw-r--r--content/common/media/media_param_traits.cc21
-rw-r--r--content/common/media/video_capture_messages.h11
-rw-r--r--content/renderer/media/rtc_video_capture_delegate.cc1
-rw-r--r--content/renderer/media/rtc_video_capturer.cc10
-rw-r--r--content/renderer/media/video_capture_impl.cc44
-rw-r--r--content/renderer/media/video_capture_impl_unittest.cc5
-rw-r--r--content/renderer/media/video_capture_message_filter_unittest.cc17
-rw-r--r--content/renderer/pepper/pepper_video_capture_host.cc10
-rw-r--r--media/video/capture/android/video_capture_device_android.cc54
-rw-r--r--media/video/capture/android/video_capture_device_android.h7
-rw-r--r--media/video/capture/fake_video_capture_device.cc113
-rw-r--r--media/video/capture/fake_video_capture_device.h14
-rw-r--r--media/video/capture/linux/video_capture_device_linux.cc38
-rw-r--r--media/video/capture/linux/video_capture_device_linux.h4
-rw-r--r--media/video/capture/mac/video_capture_device_mac.h10
-rw-r--r--media/video/capture/mac/video_capture_device_mac.mm66
-rw-r--r--media/video/capture/mac/video_capture_device_qtkit_mac.mm11
-rw-r--r--media/video/capture/video_capture_device.h13
-rw-r--r--media/video/capture/video_capture_device_unittest.cc182
-rw-r--r--media/video/capture/video_capture_proxy.cc1
-rw-r--r--media/video/capture/video_capture_types.cc48
-rw-r--r--media/video/capture/video_capture_types.h44
-rw-r--r--media/video/capture/win/capability_list_win.cc10
-rw-r--r--media/video/capture/win/capability_list_win.h8
-rw-r--r--media/video/capture/win/sink_filter_win.cc9
-rw-r--r--media/video/capture/win/sink_filter_win.h7
-rw-r--r--media/video/capture/win/sink_input_pin_win.cc60
-rw-r--r--media/video/capture/win/sink_input_pin_win.h8
-rw-r--r--media/video/capture/win/video_capture_device_mf_win.cc30
-rw-r--r--media/video/capture/win/video_capture_device_mf_win.h8
-rw-r--r--media/video/capture/win/video_capture_device_win.cc66
-rw-r--r--media/video/capture/win/video_capture_device_win.h8
47 files changed, 743 insertions, 811 deletions
diff --git a/content/browser/renderer_host/media/desktop_capture_device.cc b/content/browser/renderer_host/media/desktop_capture_device.cc
index 7f3e779..fe9e9bb 100644
--- a/content/browser/renderer_host/media/desktop_capture_device.cc
+++ b/content/browser/renderer_host/media/desktop_capture_device.cc
@@ -53,7 +53,7 @@ class DesktopCaptureDevice::Core
scoped_ptr<webrtc::DesktopCapturer> capturer);
// Implementation of VideoCaptureDevice methods.
- void AllocateAndStart(const media::VideoCaptureCapability& capture_format,
+ void AllocateAndStart(const media::VideoCaptureParams& params,
scoped_ptr<Client> client);
void StopAndDeAllocate();
@@ -67,7 +67,7 @@ class DesktopCaptureDevice::Core
// Helper methods that run on the |task_runner_|. Posted from the
// corresponding public methods.
- void DoAllocateAndStart(const media::VideoCaptureCapability& capture_format,
+ void DoAllocateAndStart(const media::VideoCaptureParams& params,
scoped_ptr<Client> client);
void DoStopAndDeAllocate();
@@ -97,10 +97,10 @@ class DesktopCaptureDevice::Core
scoped_ptr<Client> client_;
// Requested video capture format (width, height, frame rate, etc).
- media::VideoCaptureCapability requested_format_;
+ media::VideoCaptureParams requested_params_;
// Actual video capture format being generated.
- media::VideoCaptureCapability capture_format_;
+ media::VideoCaptureFormat capture_format_;
// Size of frame most recently captured from the source.
webrtc::DesktopSize previous_frame_size_;
@@ -136,18 +136,15 @@ DesktopCaptureDevice::Core::~Core() {
}
void DesktopCaptureDevice::Core::AllocateAndStart(
- const media::VideoCaptureCapability& capture_format,
+ const media::VideoCaptureParams& params,
scoped_ptr<Client> client) {
- DCHECK_GT(capture_format.width, 0);
- DCHECK_GT(capture_format.height, 0);
- DCHECK_GT(capture_format.frame_rate, 0);
+ DCHECK_GT(params.requested_format.frame_size.GetArea(), 0);
+ DCHECK_GT(params.requested_format.frame_rate, 0);
task_runner_->PostTask(
FROM_HERE,
- base::Bind(&Core::DoAllocateAndStart,
- this,
- capture_format,
- base::Passed(&client)));
+ base::Bind(
+ &Core::DoAllocateAndStart, this, params, base::Passed(&client)));
}
void DesktopCaptureDevice::Core::StopAndDeAllocate() {
@@ -181,8 +178,8 @@ void DesktopCaptureDevice::Core::OnCaptureCompleted(
// Handle initial frame size and size changes.
RefreshCaptureFormat(frame->size());
- webrtc::DesktopSize output_size(capture_format_.width,
- capture_format_.height);
+ webrtc::DesktopSize output_size(capture_format_.frame_size.width(),
+ capture_format_.frame_size.height());
size_t output_bytes = output_size.width() * output_size.height() *
webrtc::DesktopFrame::kBytesPerPixel;
const uint8_t* output_data = NULL;
@@ -228,7 +225,7 @@ void DesktopCaptureDevice::Core::OnCaptureCompleted(
}
void DesktopCaptureDevice::Core::DoAllocateAndStart(
- const media::VideoCaptureCapability& capture_format,
+ const media::VideoCaptureParams& params,
scoped_ptr<Client> client) {
DCHECK(task_runner_->RunsTasksOnCurrentThread());
DCHECK(desktop_capturer_);
@@ -236,19 +233,12 @@ void DesktopCaptureDevice::Core::DoAllocateAndStart(
DCHECK(!client_.get());
client_ = client.Pass();
- requested_format_ = capture_format;
+ requested_params_ = params;
- capture_format_.frame_rate = requested_format_.frame_rate;
-
- // Support dynamic changes in resolution only if requester also does.
- if (requested_format_.frame_size_type ==
- media::VariableResolutionVideoCaptureDevice) {
- capture_format_.frame_size_type =
- media::VariableResolutionVideoCaptureDevice;
- }
+ capture_format_ = requested_params_.requested_format;
// This capturer always outputs ARGB, non-interlaced.
- capture_format_.color = media::PIXEL_FORMAT_ARGB;
+ capture_format_.pixel_format = media::PIXEL_FORMAT_ARGB;
desktop_capturer_->Start(this);
@@ -273,28 +263,31 @@ void DesktopCaptureDevice::Core::RefreshCaptureFormat(
output_frame_.reset();
if (previous_frame_size_.is_empty() ||
- requested_format_.frame_size_type ==
- media::VariableResolutionVideoCaptureDevice) {
+ requested_params_.allow_resolution_change) {
// If this is the first frame, or the receiver supports variable resolution
// then determine the output size by treating the requested width & height
// as maxima.
- if (frame_size.width() > requested_format_.width ||
- frame_size.height() > requested_format_.height) {
+ if (frame_size.width() >
+ requested_params_.requested_format.frame_size.width() ||
+ frame_size.height() >
+ requested_params_.requested_format.frame_size.height()) {
output_rect_ = ComputeLetterboxRect(
- webrtc::DesktopSize(requested_format_.width,
- requested_format_.height),
+ webrtc::DesktopSize(
+ requested_params_.requested_format.frame_size.width(),
+ requested_params_.requested_format.frame_size.height()),
frame_size);
output_rect_.Translate(-output_rect_.left(), -output_rect_.top());
} else {
output_rect_ = webrtc::DesktopRect::MakeSize(frame_size);
}
- capture_format_.width = output_rect_.width();
- capture_format_.height = output_rect_.height();
+ capture_format_.frame_size.SetSize(output_rect_.width(),
+ output_rect_.height());
} else {
// Otherwise the output frame size cannot change, so just scale and
// letterbox.
output_rect_ = ComputeLetterboxRect(
- webrtc::DesktopSize(capture_format_.width, capture_format_.height),
+ webrtc::DesktopSize(capture_format_.frame_size.width(),
+ capture_format_.frame_size.height()),
frame_size);
}
@@ -411,9 +404,9 @@ DesktopCaptureDevice::~DesktopCaptureDevice() {
}
void DesktopCaptureDevice::AllocateAndStart(
- const media::VideoCaptureCapability& capture_format,
+ const media::VideoCaptureParams& params,
scoped_ptr<Client> client) {
- core_->AllocateAndStart(capture_format, client.Pass());
+ core_->AllocateAndStart(params, client.Pass());
}
void DesktopCaptureDevice::StopAndDeAllocate() {
diff --git a/content/browser/renderer_host/media/desktop_capture_device.h b/content/browser/renderer_host/media/desktop_capture_device.h
index 6a12d84..b418a29 100644
--- a/content/browser/renderer_host/media/desktop_capture_device.h
+++ b/content/browser/renderer_host/media/desktop_capture_device.h
@@ -38,9 +38,8 @@ class CONTENT_EXPORT DesktopCaptureDevice : public media::VideoCaptureDevice {
virtual ~DesktopCaptureDevice();
// VideoCaptureDevice interface.
- virtual void AllocateAndStart(
- const media::VideoCaptureCapability& capture_format,
- scoped_ptr<Client> client) OVERRIDE;
+ virtual void AllocateAndStart(const media::VideoCaptureParams& params,
+ scoped_ptr<Client> client) OVERRIDE;
virtual void StopAndDeAllocate() OVERRIDE;
private:
diff --git a/content/browser/renderer_host/media/desktop_capture_device_unittest.cc b/content/browser/renderer_host/media/desktop_capture_device_unittest.cc
index a3926e9..c8843e6 100644
--- a/content/browser/renderer_host/media/desktop_capture_device_unittest.cc
+++ b/content/browser/renderer_host/media/desktop_capture_device_unittest.cc
@@ -51,7 +51,7 @@ class MockDeviceClient : public media::VideoCaptureDevice::Client {
int rotation,
bool flip_vert,
bool flip_horiz,
- const media::VideoCaptureCapability& frame_info));
+ const media::VideoCaptureFormat& frame_format));
MOCK_METHOD5(OnIncomingCapturedBuffer,
void(const scoped_refptr<Buffer>& buffer,
media::VideoFrame::Format format,
@@ -120,7 +120,7 @@ TEST_F(DesktopCaptureDeviceTest, MAYBE_Capture) {
DesktopCaptureDevice capture_device(
worker_pool_->GetSequencedTaskRunner(worker_pool_->GetSequenceToken()),
capturer.Pass());
- media::VideoCaptureCapability caps;
+ media::VideoCaptureFormat format;
base::WaitableEvent done_event(false, false);
int frame_size;
@@ -129,23 +129,25 @@ TEST_F(DesktopCaptureDeviceTest, MAYBE_Capture) {
EXPECT_CALL(*client, OnIncomingCapturedFrame(_, _, _, _, _, _, _))
.WillRepeatedly(
DoAll(SaveArg<1>(&frame_size),
- SaveArg<6>(&caps),
+ SaveArg<6>(&format),
InvokeWithoutArgs(&done_event, &base::WaitableEvent::Signal)));
- media::VideoCaptureCapability capture_format(
- 640, 480, kFrameRate, media::PIXEL_FORMAT_I420,
- media::ConstantResolutionVideoCaptureDevice);
+ media::VideoCaptureParams capture_params;
+ capture_params.requested_format.frame_size.SetSize(640, 480);
+ capture_params.requested_format.frame_rate = kFrameRate;
+ capture_params.requested_format.pixel_format = media::PIXEL_FORMAT_I420;
+ capture_params.allow_resolution_change = false;
capture_device.AllocateAndStart(
- capture_format, client.PassAs<media::VideoCaptureDevice::Client>());
+ capture_params, client.PassAs<media::VideoCaptureDevice::Client>());
EXPECT_TRUE(done_event.TimedWait(TestTimeouts::action_max_timeout()));
capture_device.StopAndDeAllocate();
- EXPECT_GT(caps.width, 0);
- EXPECT_GT(caps.height, 0);
- EXPECT_EQ(kFrameRate, caps.frame_rate);
- EXPECT_EQ(media::PIXEL_FORMAT_ARGB, caps.color);
+ EXPECT_GT(format.frame_size.width(), 0);
+ EXPECT_GT(format.frame_size.height(), 0);
+ EXPECT_EQ(kFrameRate, format.frame_rate);
+ EXPECT_EQ(media::PIXEL_FORMAT_ARGB, format.pixel_format);
- EXPECT_EQ(caps.width * caps.height * 4, frame_size);
+ EXPECT_EQ(format.frame_size.GetArea() * 4, frame_size);
worker_pool_->FlushForTesting();
}
@@ -158,7 +160,7 @@ TEST_F(DesktopCaptureDeviceTest, ScreenResolutionChangeConstantResolution) {
worker_pool_->GetSequencedTaskRunner(worker_pool_->GetSequenceToken()),
scoped_ptr<webrtc::DesktopCapturer>(mock_capturer));
- media::VideoCaptureCapability caps;
+ media::VideoCaptureFormat format;
base::WaitableEvent done_event(false, false);
int frame_size;
@@ -167,18 +169,18 @@ TEST_F(DesktopCaptureDeviceTest, ScreenResolutionChangeConstantResolution) {
EXPECT_CALL(*client, OnIncomingCapturedFrame(_, _, _, _, _, _, _))
.WillRepeatedly(
DoAll(SaveArg<1>(&frame_size),
- SaveArg<6>(&caps),
+ SaveArg<6>(&format),
InvokeWithoutArgs(&done_event, &base::WaitableEvent::Signal)));
- media::VideoCaptureCapability capture_format(
- kTestFrameWidth1,
- kTestFrameHeight1,
- kFrameRate,
- media::PIXEL_FORMAT_I420,
- media::ConstantResolutionVideoCaptureDevice);
+ media::VideoCaptureParams capture_params;
+ capture_params.requested_format.frame_size.SetSize(kTestFrameWidth1,
+ kTestFrameHeight1);
+ capture_params.requested_format.frame_rate = kFrameRate;
+ capture_params.requested_format.pixel_format = media::PIXEL_FORMAT_I420;
+ capture_params.allow_resolution_change = false;
capture_device.AllocateAndStart(
- capture_format, client.PassAs<media::VideoCaptureDevice::Client>());
+ capture_params, client.PassAs<media::VideoCaptureDevice::Client>());
// Capture at least two frames, to ensure that the source frame size has
// changed while capturing.
@@ -188,12 +190,12 @@ TEST_F(DesktopCaptureDeviceTest, ScreenResolutionChangeConstantResolution) {
capture_device.StopAndDeAllocate();
- EXPECT_EQ(kTestFrameWidth1, caps.width);
- EXPECT_EQ(kTestFrameHeight1, caps.height);
- EXPECT_EQ(kFrameRate, caps.frame_rate);
- EXPECT_EQ(media::PIXEL_FORMAT_ARGB, caps.color);
+ EXPECT_EQ(kTestFrameWidth1, format.frame_size.width());
+ EXPECT_EQ(kTestFrameHeight1, format.frame_size.height());
+ EXPECT_EQ(kFrameRate, format.frame_rate);
+ EXPECT_EQ(media::PIXEL_FORMAT_ARGB, format.pixel_format);
- EXPECT_EQ(caps.width * caps.height * 4, frame_size);
+ EXPECT_EQ(format.frame_size.GetArea() * 4, frame_size);
worker_pool_->FlushForTesting();
}
@@ -206,25 +208,25 @@ TEST_F(DesktopCaptureDeviceTest, ScreenResolutionChangeVariableResolution) {
worker_pool_->GetSequencedTaskRunner(worker_pool_->GetSequenceToken()),
scoped_ptr<webrtc::DesktopCapturer>(mock_capturer));
- media::VideoCaptureCapability caps;
+ media::VideoCaptureFormat format;
base::WaitableEvent done_event(false, false);
scoped_ptr<MockDeviceClient> client(new MockDeviceClient());
EXPECT_CALL(*client, OnError()).Times(0);
EXPECT_CALL(*client, OnIncomingCapturedFrame(_, _, _, _, _, _, _))
.WillRepeatedly(
- DoAll(SaveArg<6>(&caps),
+ DoAll(SaveArg<6>(&format),
InvokeWithoutArgs(&done_event, &base::WaitableEvent::Signal)));
- media::VideoCaptureCapability capture_format(
- kTestFrameWidth2,
- kTestFrameHeight2,
- kFrameRate,
- media::PIXEL_FORMAT_I420,
- media::VariableResolutionVideoCaptureDevice);
+ media::VideoCaptureParams capture_params;
+ capture_params.requested_format.frame_size.SetSize(kTestFrameWidth2,
+ kTestFrameHeight2);
+ capture_params.requested_format.frame_rate = kFrameRate;
+ capture_params.requested_format.pixel_format = media::PIXEL_FORMAT_I420;
+ capture_params.allow_resolution_change = false;
capture_device.AllocateAndStart(
- capture_format, client.PassAs<media::VideoCaptureDevice::Client>());
+ capture_params, client.PassAs<media::VideoCaptureDevice::Client>());
// Capture at least three frames, to ensure that the source frame size has
// changed at least twice while capturing.
@@ -236,10 +238,10 @@ TEST_F(DesktopCaptureDeviceTest, ScreenResolutionChangeVariableResolution) {
capture_device.StopAndDeAllocate();
- EXPECT_EQ(kTestFrameWidth1, caps.width);
- EXPECT_EQ(kTestFrameHeight1, caps.height);
- EXPECT_EQ(kFrameRate, caps.frame_rate);
- EXPECT_EQ(media::PIXEL_FORMAT_ARGB, caps.color);
+ EXPECT_EQ(kTestFrameWidth1, format.frame_size.width());
+ EXPECT_EQ(kTestFrameHeight1, format.frame_size.height());
+ EXPECT_EQ(kFrameRate, format.frame_rate);
+ EXPECT_EQ(media::PIXEL_FORMAT_ARGB, format.pixel_format);
worker_pool_->FlushForTesting();
}
diff --git a/content/browser/renderer_host/media/video_capture_controller.cc b/content/browser/renderer_host/media/video_capture_controller.cc
index 2930141..ea83696 100644
--- a/content/browser/renderer_host/media/video_capture_controller.cc
+++ b/content/browser/renderer_host/media/video_capture_controller.cc
@@ -20,7 +20,7 @@
#include "third_party/libyuv/include/libyuv.h"
#endif
-using media::VideoCaptureCapability;
+using media::VideoCaptureFormat;
namespace content {
@@ -48,27 +48,28 @@ class PoolBuffer : public media::VideoCaptureDevice::Client::Buffer {
} // anonymous namespace
struct VideoCaptureController::ControllerClient {
- ControllerClient(
- const VideoCaptureControllerID& id,
- VideoCaptureControllerEventHandler* handler,
- base::ProcessHandle render_process,
- const media::VideoCaptureParams& params)
+ ControllerClient(const VideoCaptureControllerID& id,
+ VideoCaptureControllerEventHandler* handler,
+ base::ProcessHandle render_process,
+ media::VideoCaptureSessionId session_id,
+ const media::VideoCaptureParams& params)
: controller_id(id),
event_handler(handler),
render_process_handle(render_process),
+ session_id(session_id),
parameters(params),
- session_closed(false) {
- }
+ session_closed(false) {}
~ControllerClient() {}
// ID used for identifying this object.
- VideoCaptureControllerID controller_id;
- VideoCaptureControllerEventHandler* event_handler;
+ const VideoCaptureControllerID controller_id;
+ VideoCaptureControllerEventHandler* const event_handler;
// Handle to the render process that will receive the capture buffers.
- base::ProcessHandle render_process_handle;
- media::VideoCaptureParams parameters;
+ const base::ProcessHandle render_process_handle;
+ const media::VideoCaptureSessionId session_id;
+ const media::VideoCaptureParams parameters;
// Buffers that are currently known to this client.
std::set<int> known_buffers;
@@ -115,7 +116,7 @@ class VideoCaptureController::VideoCaptureDeviceClient
int rotation,
bool flip_vert,
bool flip_horiz,
- const VideoCaptureCapability& frame_info)
+ const VideoCaptureFormat& frame_format)
OVERRIDE;
virtual void OnIncomingCapturedBuffer(const scoped_refptr<Buffer>& buffer,
media::VideoFrame::Format format,
@@ -167,13 +168,13 @@ void VideoCaptureController::AddClient(
const VideoCaptureControllerID& id,
VideoCaptureControllerEventHandler* event_handler,
base::ProcessHandle render_process,
+ media::VideoCaptureSessionId session_id,
const media::VideoCaptureParams& params) {
DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
DVLOG(1) << "VideoCaptureController::AddClient, id " << id.device_id
- << ", (" << params.requested_format.width
- << ", " << params.requested_format.height
+ << ", " << params.requested_format.frame_size.ToString()
<< ", " << params.requested_format.frame_rate
- << ", " << params.session_id
+ << ", " << session_id
<< ")";
// Signal error in case device is already in error state.
@@ -186,8 +187,8 @@ void VideoCaptureController::AddClient(
if (FindClient(id, event_handler, controller_clients_))
return;
- ControllerClient* client = new ControllerClient(id, event_handler,
- render_process, params);
+ ControllerClient* client = new ControllerClient(
+ id, event_handler, render_process, session_id, params);
// If we already have gotten frame_info from the device, repeat it to the new
// client.
if (state_ == VIDEO_CAPTURE_STATE_STARTED) {
@@ -215,7 +216,7 @@ int VideoCaptureController::RemoveClient(
}
client->active_buffers.clear();
- int session_id = client->parameters.session_id;
+ int session_id = client->session_id;
controller_clients_.remove(client);
delete client;
@@ -266,24 +267,24 @@ void VideoCaptureController::VideoCaptureDeviceClient::OnIncomingCapturedFrame(
int rotation,
bool flip_vert,
bool flip_horiz,
- const VideoCaptureCapability& frame_info) {
+ const VideoCaptureFormat& frame_format) {
TRACE_EVENT0("video", "VideoCaptureController::OnIncomingCapturedFrame");
- if (!frame_info.IsValid())
+ if (!frame_format.IsValid())
return;
// Chopped pixels in width/height in case video capture device has odd
// numbers for width/height.
int chopped_width = 0;
int chopped_height = 0;
- int new_width = frame_info.width;
- int new_height = frame_info.height;
+ int new_width = frame_format.frame_size.width();
+ int new_height = frame_format.frame_size.height();
- if (frame_info.width & 1) {
+ if (new_width & 1) {
--new_width;
chopped_width = 1;
}
- if (frame_info.height & 1) {
+ if (new_height & 1) {
--new_height;
chopped_height = 1;
}
@@ -324,7 +325,7 @@ void VideoCaptureController::VideoCaptureDeviceClient::OnIncomingCapturedFrame(
else if (new_rotation_angle == 270)
rotation_mode = libyuv::kRotate270;
- switch (frame_info.color) {
+ switch (frame_format.pixel_format) {
case media::PIXEL_FORMAT_UNKNOWN: // Color format not set.
break;
case media::PIXEL_FORMAT_I420:
@@ -364,7 +365,7 @@ void VideoCaptureController::VideoCaptureDeviceClient::OnIncomingCapturedFrame(
#if defined(OS_WIN)
// kRGB24 on Windows start at the bottom line and has a negative stride. This
// is not supported by libyuv, so the media API is used instead.
- if (frame_info.color == media::PIXEL_FORMAT_RGB24) {
+ if (frame_format.pixel_format == media::PIXEL_FORMAT_RGB24) {
// Rotation and flipping is not supported in kRGB24 and OS_WIN case.
DCHECK(!rotation && !flip_vert && !flip_horiz);
need_convert_rgb24_on_win = true;
@@ -434,7 +435,7 @@ void VideoCaptureController::VideoCaptureDeviceClient::OnIncomingCapturedFrame(
controller_,
buffer,
dimensions,
- frame_info.frame_rate,
+ frame_format.frame_rate,
timestamp));
}
@@ -524,11 +525,8 @@ void VideoCaptureController::DoIncomingCapturedI420BufferOnIOThread(
DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
DCHECK_NE(buffer->id(), VideoCaptureBufferPool::kInvalidId);
- media::VideoCaptureFormat frame_format(
- dimensions.width(),
- dimensions.height(),
- frame_rate,
- media::VariableResolutionVideoCaptureDevice);
+ VideoCaptureFormat frame_format(
+ dimensions, frame_rate, media::PIXEL_FORMAT_I420);
int count = 0;
if (state_ == VIDEO_CAPTURE_STATE_STARTED) {
@@ -611,7 +609,7 @@ VideoCaptureController::FindClient(
const ControllerClients& clients) {
for (ControllerClients::const_iterator client_it = clients.begin();
client_it != clients.end(); ++client_it) {
- if ((*client_it)->parameters.session_id == session_id) {
+ if ((*client_it)->session_id == session_id) {
return *client_it;
}
}
diff --git a/content/browser/renderer_host/media/video_capture_controller.h b/content/browser/renderer_host/media/video_capture_controller.h
index 99fcde6..badab3b 100644
--- a/content/browser/renderer_host/media/video_capture_controller.h
+++ b/content/browser/renderer_host/media/video_capture_controller.h
@@ -82,6 +82,7 @@ class CONTENT_EXPORT VideoCaptureController {
void AddClient(const VideoCaptureControllerID& id,
VideoCaptureControllerEventHandler* event_handler,
base::ProcessHandle render_process,
+ media::VideoCaptureSessionId session_id,
const media::VideoCaptureParams& params);
// Stop video capture. This will take back all buffers held by by
diff --git a/content/browser/renderer_host/media/video_capture_controller_unittest.cc b/content/browser/renderer_host/media/video_capture_controller_unittest.cc
index db90c46..20cc4e4 100644
--- a/content/browser/renderer_host/media/video_capture_controller_unittest.cc
+++ b/content/browser/renderer_host/media/video_capture_controller_unittest.cc
@@ -113,18 +113,13 @@ class VideoCaptureControllerTest : public testing::Test {
// track of clients.
TEST_F(VideoCaptureControllerTest, AddAndRemoveClients) {
media::VideoCaptureParams session_100;
- session_100.session_id = 100;
session_100.requested_format = media::VideoCaptureFormat(
- 320, 240, 30, media::ConstantResolutionVideoCaptureDevice);
-
+ gfx::Size(320, 240), 30, media::PIXEL_FORMAT_I420);
media::VideoCaptureParams session_200 = session_100;
- session_200.session_id = 200;
media::VideoCaptureParams session_300 = session_100;
- session_300.session_id = 300;
media::VideoCaptureParams session_400 = session_100;
- session_400.session_id = 400;
// Intentionally use the same route ID for two of the clients: the device_ids
// are a per-VideoCaptureHost namespace, and can overlap across hosts.
@@ -136,18 +131,27 @@ TEST_F(VideoCaptureControllerTest, AddAndRemoveClients) {
// Clients in controller: []
ASSERT_EQ(0, controller_->GetClientCount())
<< "Client count should initially be zero.";
- controller_->AddClient(client_a_route_1, client_a_.get(),
- base::kNullProcessHandle, session_100);
+ controller_->AddClient(client_a_route_1,
+ client_a_.get(),
+ base::kNullProcessHandle,
+ 100,
+ session_100);
// Clients in controller: [A/1]
ASSERT_EQ(1, controller_->GetClientCount())
<< "Adding client A/1 should bump client count.";;
- controller_->AddClient(client_a_route_2, client_a_.get(),
- base::kNullProcessHandle, session_200);
+ controller_->AddClient(client_a_route_2,
+ client_a_.get(),
+ base::kNullProcessHandle,
+ 200,
+ session_200);
// Clients in controller: [A/1, A/2]
ASSERT_EQ(2, controller_->GetClientCount())
<< "Adding client A/2 should bump client count.";
- controller_->AddClient(client_b_route_1, client_b_.get(),
- base::kNullProcessHandle, session_300);
+ controller_->AddClient(client_b_route_1,
+ client_b_.get(),
+ base::kNullProcessHandle,
+ 300,
+ session_300);
// Clients in controller: [A/1, A/2, B/1]
ASSERT_EQ(3, controller_->GetClientCount())
<< "Adding client B/1 should bump client count.";
@@ -166,8 +170,11 @@ TEST_F(VideoCaptureControllerTest, AddAndRemoveClients) {
<< "Removing client B/1 should return its session_id.";
// Clients in controller: [A/1]
ASSERT_EQ(1, controller_->GetClientCount());
- controller_->AddClient(client_b_route_2, client_b_.get(),
- base::kNullProcessHandle, session_400);
+ controller_->AddClient(client_b_route_2,
+ client_b_.get(),
+ base::kNullProcessHandle,
+ 400,
+ session_400);
// Clients in controller: [A/1, B/2]
EXPECT_CALL(*client_a_, DoEnded(client_a_route_1)).Times(1);
@@ -207,26 +214,21 @@ TEST_F(VideoCaptureControllerTest, AddAndRemoveClients) {
// thread and is intended to behave deterministically.
TEST_F(VideoCaptureControllerTest, NormalCaptureMultipleClients) {
media::VideoCaptureParams session_100;
- session_100.session_id = 100;
session_100.requested_format = media::VideoCaptureFormat(
- 320, 240, 30, media::ConstantResolutionVideoCaptureDevice);
+ gfx::Size(320, 240), 30, media::PIXEL_FORMAT_I420);
media::VideoCaptureParams session_200 = session_100;
- session_200.session_id = 200;
media::VideoCaptureParams session_300 = session_100;
- session_300.session_id = 300;
media::VideoCaptureParams session_1 = session_100;
- session_1.session_id = 1;
gfx::Size capture_resolution(444, 200);
// The device format needn't match the VideoCaptureParams (the camera can do
// what it wants). Pick something random.
- media::VideoCaptureCapability device_format(
- 10, 10, 25, media::PIXEL_FORMAT_RGB24,
- media::ConstantResolutionVideoCaptureDevice);
+ media::VideoCaptureFormat device_format(
+ gfx::Size(10, 10), 25, media::PIXEL_FORMAT_RGB24);
const VideoCaptureControllerID client_a_route_1(0xa1a1a1a1);
const VideoCaptureControllerID client_a_route_2(0xa2a2a2a2);
@@ -234,12 +236,21 @@ TEST_F(VideoCaptureControllerTest, NormalCaptureMultipleClients) {
const VideoCaptureControllerID client_b_route_2(0xb2b2b2b2);
// Start with two clients.
- controller_->AddClient(client_a_route_1, client_a_.get(),
- base::kNullProcessHandle, session_100);
- controller_->AddClient(client_b_route_1, client_b_.get(),
- base::kNullProcessHandle, session_300);
- controller_->AddClient(client_a_route_2, client_a_.get(),
- base::kNullProcessHandle, session_200);
+ controller_->AddClient(client_a_route_1,
+ client_a_.get(),
+ base::kNullProcessHandle,
+ 100,
+ session_100);
+ controller_->AddClient(client_b_route_1,
+ client_b_.get(),
+ base::kNullProcessHandle,
+ 300,
+ session_300);
+ controller_->AddClient(client_a_route_2,
+ client_a_.get(),
+ base::kNullProcessHandle,
+ 200,
+ session_200);
ASSERT_EQ(3, controller_->GetClientCount());
// Now, simulate an incoming captured buffer from the capture device. As a
@@ -299,8 +310,11 @@ TEST_F(VideoCaptureControllerTest, NormalCaptureMultipleClients) {
Mock::VerifyAndClearExpectations(client_b_.get());
// Add a fourth client now that some buffers have come through.
- controller_->AddClient(client_b_route_2, client_b_.get(),
- base::kNullProcessHandle, session_1);
+ controller_->AddClient(client_b_route_2,
+ client_b_.get(),
+ base::kNullProcessHandle,
+ 1,
+ session_1);
Mock::VerifyAndClearExpectations(client_b_.get());
// Third, fourth, and fifth buffers. Pretend they all arrive at the same time.
@@ -381,20 +395,18 @@ TEST_F(VideoCaptureControllerTest, NormalCaptureMultipleClients) {
// behavior of various operations after the error state has been signalled.
TEST_F(VideoCaptureControllerTest, ErrorBeforeDeviceCreation) {
media::VideoCaptureParams session_100;
- session_100.session_id = 100;
session_100.requested_format = media::VideoCaptureFormat(
- 320, 240, 30, media::ConstantResolutionVideoCaptureDevice);
+ gfx::Size(320, 240), 30, media::PIXEL_FORMAT_I420);
media::VideoCaptureParams session_200 = session_100;
- session_200.session_id = 200;
const gfx::Size capture_resolution(320, 240);
const VideoCaptureControllerID route_id(0x99);
// Start with one client.
- controller_->AddClient(route_id, client_a_.get(),
- base::kNullProcessHandle, session_100);
+ controller_->AddClient(
+ route_id, client_a_.get(), base::kNullProcessHandle, 100, session_100);
device_->OnError();
EXPECT_CALL(*client_a_, DoError(route_id)).Times(1);
base::RunLoop().RunUntilIdle();
@@ -403,8 +415,8 @@ TEST_F(VideoCaptureControllerTest, ErrorBeforeDeviceCreation) {
// Second client connects after the error state. It also should get told of
// the error.
EXPECT_CALL(*client_b_, DoError(route_id)).Times(1);
- controller_->AddClient(route_id, client_b_.get(),
- base::kNullProcessHandle, session_200);
+ controller_->AddClient(
+ route_id, client_b_.get(), base::kNullProcessHandle, 200, session_200);
base::RunLoop().RunUntilIdle();
Mock::VerifyAndClearExpectations(client_b_.get());
@@ -423,22 +435,18 @@ TEST_F(VideoCaptureControllerTest, ErrorBeforeDeviceCreation) {
// behavior of various operations after the error state has been signalled.
TEST_F(VideoCaptureControllerTest, ErrorAfterDeviceCreation) {
media::VideoCaptureParams session_100;
- session_100.session_id = 100;
session_100.requested_format = media::VideoCaptureFormat(
- 320, 240, 30, media::ConstantResolutionVideoCaptureDevice);
+ gfx::Size(320, 240), 30, media::PIXEL_FORMAT_I420);
media::VideoCaptureParams session_200 = session_100;
- session_200.session_id = 200;
const VideoCaptureControllerID route_id(0x99);
// Start with one client.
- controller_->AddClient(route_id, client_a_.get(),
- base::kNullProcessHandle, session_100);
- // OnFrameInfo from the VCD should become a no-op after the error occurs.
- media::VideoCaptureCapability device_format(
- 10, 10, 25, media::PIXEL_FORMAT_ARGB,
- media::ConstantResolutionVideoCaptureDevice);
+ controller_->AddClient(
+ route_id, client_a_.get(), base::kNullProcessHandle, 100, session_100);
+ media::VideoCaptureFormat device_format(
+ gfx::Size(10, 10), 25, media::PIXEL_FORMAT_ARGB);
// Start the device. Then, before the first buffer, signal an error and
// deliver the buffer. The error should be propagated to clients; the buffer
@@ -466,8 +474,8 @@ TEST_F(VideoCaptureControllerTest, ErrorAfterDeviceCreation) {
// Second client connects after the error state. It also should get told of
// the error.
EXPECT_CALL(*client_b_, DoError(route_id)).Times(1);
- controller_->AddClient(route_id, client_b_.get(),
- base::kNullProcessHandle, session_200);
+ controller_->AddClient(
+ route_id, client_b_.get(), base::kNullProcessHandle, 200, session_200);
Mock::VerifyAndClearExpectations(client_b_.get());
}
diff --git a/content/browser/renderer_host/media/video_capture_host.cc b/content/browser/renderer_host/media/video_capture_host.cc
index eb51246..039d981 100644
--- a/content/browser/renderer_host/media/video_capture_host.cc
+++ b/content/browser/renderer_host/media/video_capture_host.cc
@@ -164,36 +164,43 @@ bool VideoCaptureHost::OnMessageReceived(const IPC::Message& message,
}
void VideoCaptureHost::OnStartCapture(int device_id,
+ media::VideoCaptureSessionId session_id,
const media::VideoCaptureParams& params) {
DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
- DVLOG(1) << "VideoCaptureHost::OnStartCapture, device_id " << device_id
- << ", (" << params.requested_format.width
- << ", " << params.requested_format.height
- << ", " << params.requested_format.frame_rate
- << ", " << params.session_id << ", variable resolution device:"
- << ((params.requested_format.frame_size_type ==
- media::VariableResolutionVideoCaptureDevice) ? "yes" : "no")
+ DVLOG(1) << "VideoCaptureHost::OnStartCapture:"
+ << " session_id=" << session_id
+ << ", device_id=" << device_id
+ << ", format=" << params.requested_format.frame_size.ToString()
+ << "@" << params.requested_format.frame_rate
+ << " (" << (params.allow_resolution_change ? "variable" : "constant")
<< ")";
VideoCaptureControllerID controller_id(device_id);
DCHECK(entries_.find(controller_id) == entries_.end());
entries_[controller_id] = base::WeakPtr<VideoCaptureController>();
media_stream_manager_->video_capture_manager()->StartCaptureForClient(
- params, PeerHandle(), controller_id, this, base::Bind(
- &VideoCaptureHost::OnControllerAdded, this, device_id, params));
+ session_id,
+ params,
+ PeerHandle(),
+ controller_id,
+ this,
+ base::Bind(&VideoCaptureHost::OnControllerAdded, this, device_id));
}
void VideoCaptureHost::OnControllerAdded(
- int device_id, const media::VideoCaptureParams& params,
+ int device_id,
const base::WeakPtr<VideoCaptureController>& controller) {
BrowserThread::PostTask(
- BrowserThread::IO, FROM_HERE,
+ BrowserThread::IO,
+ FROM_HERE,
base::Bind(&VideoCaptureHost::DoControllerAddedOnIOThread,
- this, device_id, params, controller));
+ this,
+ device_id,
+ controller));
}
void VideoCaptureHost::DoControllerAddedOnIOThread(
- int device_id, const media::VideoCaptureParams& params,
+ int device_id,
const base::WeakPtr<VideoCaptureController>& controller) {
DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
VideoCaptureControllerID controller_id(device_id);
diff --git a/content/browser/renderer_host/media/video_capture_host.h b/content/browser/renderer_host/media/video_capture_host.h
index 4648466..c1f8e3c 100644
--- a/content/browser/renderer_host/media/video_capture_host.h
+++ b/content/browser/renderer_host/media/video_capture_host.h
@@ -104,16 +104,17 @@ class CONTENT_EXPORT VideoCaptureHost
virtual ~VideoCaptureHost();
// IPC message: Start capture on the VideoCaptureDevice referenced by
- // VideoCaptureParams::session_id. |device_id| is an id created by
- // VideoCaptureMessageFilter to identify a session
- // between a VideoCaptureMessageFilter and a VideoCaptureHost.
+ // |session_id|. |device_id| is an id created by VideoCaptureMessageFilter
+ // to identify a session between a VideoCaptureMessageFilter and a
+ // VideoCaptureHost.
void OnStartCapture(int device_id,
+ media::VideoCaptureSessionId session_id,
const media::VideoCaptureParams& params);
void OnControllerAdded(
- int device_id, const media::VideoCaptureParams& params,
+ int device_id,
const base::WeakPtr<VideoCaptureController>& controller);
void DoControllerAddedOnIOThread(
- int device_id, const media::VideoCaptureParams& params,
+ int device_id,
const base::WeakPtr<VideoCaptureController>& controller);
// IPC message: Stop capture on device referenced by |device_id|.
diff --git a/content/browser/renderer_host/media/video_capture_host_unittest.cc b/content/browser/renderer_host/media/video_capture_host_unittest.cc
index cdb465b..43a9d8d 100644
--- a/content/browser/renderer_host/media/video_capture_host_unittest.cc
+++ b/content/browser/renderer_host/media/video_capture_host_unittest.cc
@@ -199,13 +199,14 @@ class MockVideoCaptureHost : public VideoCaptureHost {
ASSERT_TRUE(dib != NULL);
if (dump_video_) {
if (!format_.IsValid()) {
- dumper_.StartDump(frame_format.width, frame_format.height);
+ dumper_.StartDump(frame_format.frame_size.width(),
+ frame_format.frame_size.height());
format_ = frame_format;
}
- ASSERT_EQ(format_.width, frame_format.width)
+ ASSERT_EQ(format_.frame_size.width(), frame_format.frame_size.width())
+ << "Dump format does not handle variable resolution.";
+ ASSERT_EQ(format_.frame_size.height(), frame_format.frame_size.height())
<< "Dump format does not handle variable resolution.";
- ASSERT_EQ(format_.height, frame_format.height)
- << "Dump format does not handle variable resolution.";;
dumper_.NewVideoFrame(dib->memory());
}
@@ -347,9 +348,8 @@ class VideoCaptureHostTest : public testing::Test {
media::VideoCaptureParams params;
params.requested_format = media::VideoCaptureFormat(
- 352, 288, 30, media::ConstantResolutionVideoCaptureDevice);
- params.session_id = opened_session_id_;
- host_->OnStartCapture(kDeviceId, params);
+ gfx::Size(352, 288), 30, media::PIXEL_FORMAT_I420);
+ host_->OnStartCapture(kDeviceId, opened_session_id_, params);
run_loop.Run();
}
@@ -361,9 +361,8 @@ class VideoCaptureHostTest : public testing::Test {
EXPECT_CALL(*host_, OnStateChanged(kDeviceId, VIDEO_CAPTURE_STATE_STOPPED));
media::VideoCaptureParams params;
params.requested_format = media::VideoCaptureFormat(
- 352, 288, 30, media::ConstantResolutionVideoCaptureDevice);
- params.session_id = opened_session_id_;
- host_->OnStartCapture(kDeviceId, params);
+ gfx::Size(352, 288), 30, media::PIXEL_FORMAT_I420);
+ host_->OnStartCapture(kDeviceId, opened_session_id_, params);
host_->OnStopCapture(kDeviceId);
run_loop.RunUntilIdle();
}
@@ -380,11 +379,10 @@ class VideoCaptureHostTest : public testing::Test {
.WillOnce(ExitMessageLoop(message_loop_, run_loop.QuitClosure()));
media::VideoCaptureParams params;
- params.requested_format = media::VideoCaptureFormat(
- width, height, frame_rate, media::ConstantResolutionVideoCaptureDevice);
- params.session_id = opened_session_id_;
+ params.requested_format =
+ media::VideoCaptureFormat(gfx::Size(width, height), frame_rate);
host_->SetDumpVideo(true);
- host_->OnStartCapture(kDeviceId, params);
+ host_->OnStartCapture(kDeviceId, opened_session_id_, params);
run_loop.Run();
}
#endif
diff --git a/content/browser/renderer_host/media/video_capture_manager.cc b/content/browser/renderer_host/media/video_capture_manager.cc
index a1fcad7..18a7f2b 100644
--- a/content/browser/renderer_host/media/video_capture_manager.cc
+++ b/content/browser/renderer_host/media/video_capture_manager.cc
@@ -133,7 +133,7 @@ void VideoCaptureManager::UseFakeDevice() {
void VideoCaptureManager::DoStartDeviceOnDeviceThread(
DeviceEntry* entry,
- const media::VideoCaptureCapability& capture_params,
+ const media::VideoCaptureParams& params,
scoped_ptr<media::VideoCaptureDevice::Client> device_client) {
SCOPED_UMA_HISTOGRAM_TIMER("Media.VideoCaptureManager.StartDeviceTime");
DCHECK(IsOnDeviceThread());
@@ -178,25 +178,23 @@ void VideoCaptureManager::DoStartDeviceOnDeviceThread(
return;
}
- video_capture_device->AllocateAndStart(capture_params, device_client.Pass());
+ video_capture_device->AllocateAndStart(params, device_client.Pass());
entry->video_capture_device = video_capture_device.Pass();
}
void VideoCaptureManager::StartCaptureForClient(
+ media::VideoCaptureSessionId session_id,
const media::VideoCaptureParams& params,
base::ProcessHandle client_render_process,
VideoCaptureControllerID client_id,
VideoCaptureControllerEventHandler* client_handler,
const DoneCB& done_cb) {
DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
- DVLOG(1) << "VideoCaptureManager::StartCaptureForClient, ("
- << params.requested_format.width
- << ", " << params.requested_format.height
- << ", " << params.requested_format.frame_rate
- << ", #" << params.session_id
- << ")";
-
- DeviceEntry* entry = GetOrCreateDeviceEntry(params.session_id);
+ DVLOG(1) << "VideoCaptureManager::StartCaptureForClient, "
+ << params.requested_format.frame_size.ToString() << ", "
+ << params.requested_format.frame_rate << ", #" << session_id << ")";
+
+ DeviceEntry* entry = GetOrCreateDeviceEntry(session_id);
if (!entry) {
done_cb.Run(base::WeakPtr<VideoCaptureController>());
return;
@@ -209,24 +207,19 @@ void VideoCaptureManager::StartCaptureForClient(
DVLOG(1) << "VideoCaptureManager starting device (type = "
<< entry->stream_type << ", id = " << entry->id << ")";
- media::VideoCaptureCapability params_as_capability;
- params_as_capability.width = params.requested_format.width;
- params_as_capability.height = params.requested_format.height;
- params_as_capability.frame_rate = params.requested_format.frame_rate;
- params_as_capability.frame_size_type =
- params.requested_format.frame_size_type;
-
- device_loop_->PostTask(FROM_HERE, base::Bind(
- &VideoCaptureManager::DoStartDeviceOnDeviceThread, this,
- entry, params_as_capability,
- base::Passed(entry->video_capture_controller->NewDeviceClient())));
+ device_loop_->PostTask(
+ FROM_HERE,
+ base::Bind(
+ &VideoCaptureManager::DoStartDeviceOnDeviceThread,
+ this,
+ entry,
+ params,
+ base::Passed(entry->video_capture_controller->NewDeviceClient())));
}
// Run the callback first, as AddClient() may trigger OnFrameInfo().
done_cb.Run(entry->video_capture_controller->GetWeakPtr());
- entry->video_capture_controller->AddClient(client_id,
- client_handler,
- client_render_process,
- params);
+ entry->video_capture_controller->AddClient(
+ client_id, client_handler, client_render_process, session_id, params);
}
void VideoCaptureManager::StopCaptureForClient(
diff --git a/content/browser/renderer_host/media/video_capture_manager.h b/content/browser/renderer_host/media/video_capture_manager.h
index 6f0b0ec..67dbb75 100644
--- a/content/browser/renderer_host/media/video_capture_manager.h
+++ b/content/browser/renderer_host/media/video_capture_manager.h
@@ -58,7 +58,7 @@ class CONTENT_EXPORT VideoCaptureManager : public MediaStreamProvider {
// Called by VideoCaptureHost to locate a capture device for |capture_params|,
// adding the Host as a client of the device's controller if successful. The
- // value of |capture_params.session_id| controls which device is selected;
+ // value of |session_id| controls which device is selected;
// this value should be a session id previously returned by Open().
//
// If the device is not already started (i.e., no other client is currently
@@ -68,7 +68,8 @@ class CONTENT_EXPORT VideoCaptureManager : public MediaStreamProvider {
// On success, the controller is returned via calling |done_cb|, indicating
// that the client was successfully added. A NULL controller is passed to
// the callback on failure.
- void StartCaptureForClient(const media::VideoCaptureParams& capture_params,
+ void StartCaptureForClient(media::VideoCaptureSessionId session_id,
+ const media::VideoCaptureParams& capture_params,
base::ProcessHandle client_render_process,
VideoCaptureControllerID client_id,
VideoCaptureControllerEventHandler* client_handler,
@@ -120,7 +121,7 @@ class CONTENT_EXPORT VideoCaptureManager : public MediaStreamProvider {
// the device.
void DoStartDeviceOnDeviceThread(
DeviceEntry* entry,
- const media::VideoCaptureCapability& capture_params,
+ const media::VideoCaptureParams& params,
scoped_ptr<media::VideoCaptureDevice::Client> client);
// Stop and destroy the VideoCaptureDevice held in
diff --git a/content/browser/renderer_host/media/video_capture_manager_unittest.cc b/content/browser/renderer_host/media/video_capture_manager_unittest.cc
index a986e0b..e4d3bb2 100644
--- a/content/browser/renderer_host/media/video_capture_manager_unittest.cc
+++ b/content/browser/renderer_host/media/video_capture_manager_unittest.cc
@@ -99,16 +99,21 @@ class VideoCaptureManagerTest : public testing::Test {
VideoCaptureControllerID StartClient(int session_id, bool expect_success) {
media::VideoCaptureParams params;
- params.session_id = session_id;
params.requested_format = media::VideoCaptureFormat(
- 320, 240, 30, media::ConstantResolutionVideoCaptureDevice);
+ gfx::Size(320, 240), 30, media::PIXEL_FORMAT_I420);
VideoCaptureControllerID client_id(next_client_id_++);
base::RunLoop run_loop;
vcm_->StartCaptureForClient(
- params, base::kNullProcessHandle, client_id, frame_observer_.get(),
+ session_id,
+ params,
+ base::kNullProcessHandle,
+ client_id,
+ frame_observer_.get(),
base::Bind(&VideoCaptureManagerTest::OnGotControllerCallback,
- base::Unretained(this), client_id, run_loop.QuitClosure(),
+ base::Unretained(this),
+ client_id,
+ run_loop.QuitClosure(),
expect_success));
run_loop.Run();
return client_id;
diff --git a/content/browser/renderer_host/media/web_contents_video_capture_device.cc b/content/browser/renderer_host/media/web_contents_video_capture_device.cc
index d45f543..5a5efc2f 100644
--- a/content/browser/renderer_host/media/web_contents_video_capture_device.cc
+++ b/content/browser/renderer_host/media/web_contents_video_capture_device.cc
@@ -411,7 +411,6 @@ ThreadSafeCaptureOracle::ThreadSafeCaptureOracle(
capture_size_(capture_size),
frame_rate_(frame_rate) {}
-
bool ThreadSafeCaptureOracle::ObserveEventAndDecideCapture(
VideoCaptureOracle::Event event,
base::Time event_time,
@@ -961,9 +960,7 @@ class WebContentsVideoCaptureDevice::Impl : public base::SupportsWeakPtr<Impl> {
virtual ~Impl();
// Asynchronous requests to change WebContentsVideoCaptureDevice::Impl state.
- void AllocateAndStart(int width,
- int height,
- int frame_rate,
+ void AllocateAndStart(const media::VideoCaptureParams& params,
scoped_ptr<media::VideoCaptureDevice::Client> client);
void StopAndDeAllocate();
@@ -1025,9 +1022,7 @@ WebContentsVideoCaptureDevice::Impl::Impl(int render_process_id,
render_thread_("WebContentsVideo_RenderThread") {}
void WebContentsVideoCaptureDevice::Impl::AllocateAndStart(
- int width,
- int height,
- int frame_rate,
+ const media::VideoCaptureParams& params,
scoped_ptr<VideoCaptureDevice::Client> client) {
DCHECK(thread_checker_.CalledOnValidThread());
@@ -1036,8 +1031,8 @@ void WebContentsVideoCaptureDevice::Impl::AllocateAndStart(
return;
}
- if (frame_rate <= 0) {
- DVLOG(1) << "invalid frame_rate: " << frame_rate;
+ if (params.requested_format.frame_rate <= 0) {
+ DVLOG(1) << "invalid frame_rate: " << params.requested_format.frame_rate;
client->OnError();
return;
}
@@ -1050,23 +1045,26 @@ void WebContentsVideoCaptureDevice::Impl::AllocateAndStart(
// Frame dimensions must each be a positive, even integer, since the client
// wants (or will convert to) YUV420.
- width = MakeEven(width);
- height = MakeEven(height);
- if (width < kMinFrameWidth || height < kMinFrameHeight) {
- DVLOG(1) << "invalid width (" << width << ") and/or height ("
- << height << ")";
+ gfx::Size frame_size(MakeEven(params.requested_format.frame_size.width()),
+ MakeEven(params.requested_format.frame_size.height()));
+ if (frame_size.width() < kMinFrameWidth ||
+ frame_size.height() < kMinFrameHeight) {
+ DVLOG(1) << "invalid frame size: " << frame_size.ToString();
client->OnError();
return;
}
base::TimeDelta capture_period = base::TimeDelta::FromMicroseconds(
- 1000000.0 / frame_rate + 0.5);
+ 1000000.0 / params.requested_format.frame_rate + 0.5);
scoped_ptr<VideoCaptureOracle> oracle(
new VideoCaptureOracle(capture_period,
kAcceleratedSubscriberIsSupported));
- oracle_proxy_ = new ThreadSafeCaptureOracle(
- client.Pass(), oracle.Pass(), gfx::Size(width, height), frame_rate);
+ oracle_proxy_ =
+ new ThreadSafeCaptureOracle(client.Pass(),
+ oracle.Pass(),
+ frame_size,
+ params.requested_format.frame_rate);
// Allocates the CaptureMachine. The CaptureMachine will be tracking render
// view swapping over its lifetime, and we don't want to lose our reference to
@@ -1189,14 +1187,10 @@ media::VideoCaptureDevice* WebContentsVideoCaptureDevice::Create(
}
void WebContentsVideoCaptureDevice::AllocateAndStart(
- const media::VideoCaptureCapability& capture_format,
+ const media::VideoCaptureParams& params,
scoped_ptr<Client> client) {
- DVLOG(1) << "Allocating " << capture_format.width << "x"
- << capture_format.height;
- impl_->AllocateAndStart(capture_format.width,
- capture_format.height,
- capture_format.frame_rate,
- client.Pass());
+ DVLOG(1) << "Allocating " << params.requested_format.frame_size.ToString();
+ impl_->AllocateAndStart(params, client.Pass());
}
void WebContentsVideoCaptureDevice::StopAndDeAllocate() {
diff --git a/content/browser/renderer_host/media/web_contents_video_capture_device.h b/content/browser/renderer_host/media/web_contents_video_capture_device.h
index 35d42d5..cd99a22 100644
--- a/content/browser/renderer_host/media/web_contents_video_capture_device.h
+++ b/content/browser/renderer_host/media/web_contents_video_capture_device.h
@@ -42,9 +42,8 @@ class CONTENT_EXPORT WebContentsVideoCaptureDevice
virtual ~WebContentsVideoCaptureDevice();
// VideoCaptureDevice implementation.
- virtual void AllocateAndStart(
- const media::VideoCaptureCapability& capture_format,
- scoped_ptr<Client> client) OVERRIDE;
+ virtual void AllocateAndStart(const media::VideoCaptureParams& params,
+ scoped_ptr<Client> client) OVERRIDE;
virtual void StopAndDeAllocate() OVERRIDE;
private:
diff --git a/content/browser/renderer_host/media/web_contents_video_capture_device_unittest.cc b/content/browser/renderer_host/media/web_contents_video_capture_device_unittest.cc
index 9ba692d..5ef2ef9 100644
--- a/content/browser/renderer_host/media/web_contents_video_capture_device_unittest.cc
+++ b/content/browser/renderer_host/media/web_contents_video_capture_device_unittest.cc
@@ -339,7 +339,7 @@ class StubClient : public media::VideoCaptureDevice::Client {
int rotation,
bool flip_vert,
bool flip_horiz,
- const media::VideoCaptureCapability& frame_info) OVERRIDE {
+ const media::VideoCaptureFormat& frame_format) OVERRIDE {
FAIL();
}
@@ -594,14 +594,12 @@ TEST_F(WebContentsVideoCaptureDeviceTest, InvalidInitialWebContentsError) {
// practice; we should be able to recover gracefully.
ResetWebContents();
- media::VideoCaptureCapability capture_format(
- kTestWidth,
- kTestHeight,
- kTestFramesPerSecond,
- media::PIXEL_FORMAT_I420,
- media::ConstantResolutionVideoCaptureDevice);
- device()->AllocateAndStart(
- capture_format, client_observer()->PassClient());
+ media::VideoCaptureParams capture_params;
+ capture_params.requested_format.frame_size.SetSize(kTestWidth, kTestHeight);
+ capture_params.requested_format.frame_rate = kTestFramesPerSecond;
+ capture_params.requested_format.pixel_format = media::PIXEL_FORMAT_I420;
+ capture_params.allow_resolution_change = false;
+ device()->AllocateAndStart(capture_params, client_observer()->PassClient());
ASSERT_NO_FATAL_FAILURE(client_observer()->WaitForError());
device()->StopAndDeAllocate();
}
@@ -609,14 +607,12 @@ TEST_F(WebContentsVideoCaptureDeviceTest, InvalidInitialWebContentsError) {
TEST_F(WebContentsVideoCaptureDeviceTest, WebContentsDestroyed) {
// We'll simulate the tab being closed after the capture pipeline is up and
// running.
- media::VideoCaptureCapability capture_format(
- kTestWidth,
- kTestHeight,
- kTestFramesPerSecond,
- media::PIXEL_FORMAT_I420,
- media::ConstantResolutionVideoCaptureDevice);
- device()->AllocateAndStart(
- capture_format, client_observer()->PassClient());
+ media::VideoCaptureParams capture_params;
+ capture_params.requested_format.frame_size.SetSize(kTestWidth, kTestHeight);
+ capture_params.requested_format.frame_rate = kTestFramesPerSecond;
+ capture_params.requested_format.pixel_format = media::PIXEL_FORMAT_I420;
+ capture_params.allow_resolution_change = false;
+ device()->AllocateAndStart(capture_params, client_observer()->PassClient());
// Do one capture to prove
source()->SetSolidColor(SK_ColorRED);
SimulateDrawEvent();
@@ -635,14 +631,12 @@ TEST_F(WebContentsVideoCaptureDeviceTest, WebContentsDestroyed) {
TEST_F(WebContentsVideoCaptureDeviceTest,
StopDeviceBeforeCaptureMachineCreation) {
- media::VideoCaptureCapability capture_format(
- kTestWidth,
- kTestHeight,
- kTestFramesPerSecond,
- media::PIXEL_FORMAT_I420,
- media::ConstantResolutionVideoCaptureDevice);
- device()->AllocateAndStart(
- capture_format, client_observer()->PassClient());
+ media::VideoCaptureParams capture_params;
+ capture_params.requested_format.frame_size.SetSize(kTestWidth, kTestHeight);
+ capture_params.requested_format.frame_rate = kTestFramesPerSecond;
+ capture_params.requested_format.pixel_format = media::PIXEL_FORMAT_I420;
+ capture_params.allow_resolution_change = false;
+ device()->AllocateAndStart(capture_params, client_observer()->PassClient());
// Make a point of not running the UI messageloop here.
device()->StopAndDeAllocate();
@@ -659,14 +653,12 @@ TEST_F(WebContentsVideoCaptureDeviceTest, StopWithRendererWorkToDo) {
// Set up the test to use RGB copies and an normal
source()->SetCanCopyToVideoFrame(false);
source()->SetUseFrameSubscriber(false);
- media::VideoCaptureCapability capture_format(
- kTestWidth,
- kTestHeight,
- kTestFramesPerSecond,
- media::PIXEL_FORMAT_I420,
- media::ConstantResolutionVideoCaptureDevice);
- device()->AllocateAndStart(
- capture_format, client_observer()->PassClient());
+ media::VideoCaptureParams capture_params;
+ capture_params.requested_format.frame_size.SetSize(kTestWidth, kTestHeight);
+ capture_params.requested_format.frame_rate = kTestFramesPerSecond;
+ capture_params.requested_format.pixel_format = media::PIXEL_FORMAT_I420;
+ capture_params.allow_resolution_change = false;
+ device()->AllocateAndStart(capture_params, client_observer()->PassClient());
base::RunLoop().RunUntilIdle();
@@ -681,14 +673,12 @@ TEST_F(WebContentsVideoCaptureDeviceTest, StopWithRendererWorkToDo) {
}
TEST_F(WebContentsVideoCaptureDeviceTest, DeviceRestart) {
- media::VideoCaptureCapability capture_format(
- kTestWidth,
- kTestHeight,
- kTestFramesPerSecond,
- media::PIXEL_FORMAT_I420,
- media::ConstantResolutionVideoCaptureDevice);
- device()->AllocateAndStart(
- capture_format, client_observer()->PassClient());
+ media::VideoCaptureParams capture_params;
+ capture_params.requested_format.frame_size.SetSize(kTestWidth, kTestHeight);
+ capture_params.requested_format.frame_rate = kTestFramesPerSecond;
+ capture_params.requested_format.pixel_format = media::PIXEL_FORMAT_I420;
+ capture_params.allow_resolution_change = false;
+ device()->AllocateAndStart(capture_params, client_observer()->PassClient());
base::RunLoop().RunUntilIdle();
source()->SetSolidColor(SK_ColorRED);
SimulateDrawEvent();
@@ -707,7 +697,7 @@ TEST_F(WebContentsVideoCaptureDeviceTest, DeviceRestart) {
base::RunLoop().RunUntilIdle();
StubClientObserver observer2;
- device()->AllocateAndStart(capture_format, observer2.PassClient());
+ device()->AllocateAndStart(capture_params, observer2.PassClient());
source()->SetSolidColor(SK_ColorBLUE);
SimulateDrawEvent();
ASSERT_NO_FATAL_FAILURE(observer2.WaitForNextColor(SK_ColorBLUE));
@@ -722,14 +712,12 @@ TEST_F(WebContentsVideoCaptureDeviceTest, DeviceRestart) {
// consumer. The test will alternate between the three capture paths, simulating
// falling in and out of accelerated compositing.
TEST_F(WebContentsVideoCaptureDeviceTest, GoesThroughAllTheMotions) {
- media::VideoCaptureCapability capture_format(
- kTestWidth,
- kTestHeight,
- kTestFramesPerSecond,
- media::PIXEL_FORMAT_I420,
- media::ConstantResolutionVideoCaptureDevice);
- device()->AllocateAndStart(
- capture_format, client_observer()->PassClient());
+ media::VideoCaptureParams capture_params;
+ capture_params.requested_format.frame_size.SetSize(kTestWidth, kTestHeight);
+ capture_params.requested_format.frame_rate = kTestFramesPerSecond;
+ capture_params.requested_format.pixel_format = media::PIXEL_FORMAT_I420;
+ capture_params.allow_resolution_change = false;
+ device()->AllocateAndStart(capture_params, client_observer()->PassClient());
for (int i = 0; i < 6; i++) {
const char* name = NULL;
@@ -775,18 +763,17 @@ TEST_F(WebContentsVideoCaptureDeviceTest, GoesThroughAllTheMotions) {
}
TEST_F(WebContentsVideoCaptureDeviceTest, RejectsInvalidAllocateParams) {
- media::VideoCaptureCapability capture_format(
- 1280,
- 720,
- -2,
- media::PIXEL_FORMAT_I420,
- media::ConstantResolutionVideoCaptureDevice);
+ media::VideoCaptureParams capture_params;
+ capture_params.requested_format.frame_size.SetSize(1280, 720);
+ capture_params.requested_format.frame_rate = -2;
+ capture_params.requested_format.pixel_format = media::PIXEL_FORMAT_I420;
+ capture_params.allow_resolution_change = false;
BrowserThread::PostTask(
BrowserThread::UI,
FROM_HERE,
base::Bind(&media::VideoCaptureDevice::AllocateAndStart,
base::Unretained(device()),
- capture_format,
+ capture_params,
base::Passed(client_observer()->PassClient())));
ASSERT_NO_FATAL_FAILURE(client_observer()->WaitForError());
BrowserThread::PostTask(
@@ -798,17 +785,15 @@ TEST_F(WebContentsVideoCaptureDeviceTest, RejectsInvalidAllocateParams) {
}
TEST_F(WebContentsVideoCaptureDeviceTest, BadFramesGoodFrames) {
- media::VideoCaptureCapability capture_format(
- kTestWidth,
- kTestHeight,
- kTestFramesPerSecond,
- media::PIXEL_FORMAT_I420,
- media::ConstantResolutionVideoCaptureDevice);
+ media::VideoCaptureParams capture_params;
+ capture_params.requested_format.frame_size.SetSize(kTestWidth, kTestHeight);
+ capture_params.requested_format.frame_rate = kTestFramesPerSecond;
+ capture_params.requested_format.pixel_format = media::PIXEL_FORMAT_I420;
+ capture_params.allow_resolution_change = false;
// 1x1 is too small to process; we intend for this to result in an error.
source()->SetCopyResultSize(1, 1);
source()->SetSolidColor(SK_ColorRED);
- device()->AllocateAndStart(
- capture_format, client_observer()->PassClient());
+ device()->AllocateAndStart(capture_params, client_observer()->PassClient());
// These frames ought to be dropped during the Render stage. Let
// several captures to happen.
diff --git a/content/common/media/media_param_traits.cc b/content/common/media/media_param_traits.cc
index 18058d8..f6eaec1 100644
--- a/content/common/media/media_param_traits.cc
+++ b/content/common/media/media_param_traits.cc
@@ -12,7 +12,7 @@
using media::AudioParameters;
using media::ChannelLayout;
using media::VideoCaptureFormat;
-using media::VideoCaptureSessionId;
+using media::VideoPixelFormat;
namespace IPC {
@@ -56,25 +56,24 @@ void ParamTraits<AudioParameters>::Log(const AudioParameters& p,
void ParamTraits<VideoCaptureFormat>::Write(Message* m,
const VideoCaptureFormat& p) {
- m->WriteInt(p.width);
- m->WriteInt(p.height);
+ m->WriteInt(p.frame_size.width());
+ m->WriteInt(p.frame_size.height());
m->WriteInt(p.frame_rate);
- m->WriteInt(static_cast<int>(p.frame_size_type));
+ m->WriteInt(static_cast<int>(p.pixel_format));
}
bool ParamTraits<VideoCaptureFormat>::Read(const Message* m,
PickleIterator* iter,
VideoCaptureFormat* r) {
- int frame_size_type;
- if (!m->ReadInt(iter, &r->width) ||
- !m->ReadInt(iter, &r->height) ||
+ int frame_size_width, frame_size_height, pixel_format;
+ if (!m->ReadInt(iter, &frame_size_width) ||
+ !m->ReadInt(iter, &frame_size_height) ||
!m->ReadInt(iter, &r->frame_rate) ||
- !m->ReadInt(iter, &frame_size_type))
+ !m->ReadInt(iter, &pixel_format))
return false;
- r->frame_size_type =
- static_cast<media::VideoCaptureResolutionType>(
- frame_size_type);
+ r->frame_size.SetSize(frame_size_width, frame_size_height);
+ r->pixel_format = static_cast<VideoPixelFormat>(pixel_format);
if (!r->IsValid())
return false;
return true;
diff --git a/content/common/media/video_capture_messages.h b/content/common/media/video_capture_messages.h
index 50e6f9a..30ac62c 100644
--- a/content/common/media/video_capture_messages.h
+++ b/content/common/media/video_capture_messages.h
@@ -14,12 +14,14 @@
#define IPC_MESSAGE_START VideoCaptureMsgStart
IPC_ENUM_TRAITS(content::VideoCaptureState)
-IPC_ENUM_TRAITS_MAX_VALUE(media::VideoCaptureResolutionType,
- media::MaxVideoCaptureResolutionType - 1)
IPC_STRUCT_TRAITS_BEGIN(media::VideoCaptureParams)
- IPC_STRUCT_TRAITS_MEMBER(session_id)
IPC_STRUCT_TRAITS_MEMBER(requested_format)
+ IPC_STRUCT_TRAITS_MEMBER(allow_resolution_change)
+IPC_STRUCT_TRAITS_END()
+
+IPC_STRUCT_TRAITS_BEGIN(media::VideoCaptureCapability)
+ IPC_STRUCT_TRAITS_MEMBER(supported_format)
IPC_STRUCT_TRAITS_END()
// TODO(nick): device_id in these messages is basically just a route_id. We
@@ -53,8 +55,9 @@ IPC_MESSAGE_CONTROL4(VideoCaptureMsg_BufferReady,
// Start a video capture as |device_id|, a new id picked by the renderer
// process. The session to be started is determined by |params.session_id|.
-IPC_MESSAGE_CONTROL2(VideoCaptureHostMsg_Start,
+IPC_MESSAGE_CONTROL3(VideoCaptureHostMsg_Start,
int /* device_id */,
+ media::VideoCaptureSessionId, /* session_id */
media::VideoCaptureParams /* params */)
// Pause the video capture specified by |device_id|.
diff --git a/content/renderer/media/rtc_video_capture_delegate.cc b/content/renderer/media/rtc_video_capture_delegate.cc
index 9567e2c..4cc2b59 100644
--- a/content/renderer/media/rtc_video_capture_delegate.cc
+++ b/content/renderer/media/rtc_video_capture_delegate.cc
@@ -5,6 +5,7 @@
#include "content/renderer/media/rtc_video_capture_delegate.h"
#include "base/bind.h"
+#include "media/base/video_frame.h"
namespace content {
diff --git a/content/renderer/media/rtc_video_capturer.cc b/content/renderer/media/rtc_video_capturer.cc
index 2cf0d91..0a7a82c 100644
--- a/content/renderer/media/rtc_video_capturer.cc
+++ b/content/renderer/media/rtc_video_capturer.cc
@@ -6,6 +6,7 @@
#include "base/bind.h"
#include "base/debug/trace_event.h"
+#include "media/base/video_frame.h"
namespace content {
@@ -30,11 +31,10 @@ cricket::CaptureState RtcVideoCapturer::Start(
}
media::VideoCaptureParams request;
- request.requested_format =
- media::VideoCaptureFormat(capture_format.width,
- capture_format.height,
- capture_format.framerate(),
- media::ConstantResolutionVideoCaptureDevice);
+ request.requested_format = media::VideoCaptureFormat(
+ gfx::Size(capture_format.width, capture_format.height),
+ capture_format.framerate(),
+ media::PIXEL_FORMAT_I420);
SetCaptureFormat(&capture_format);
diff --git a/content/renderer/media/video_capture_impl.cc b/content/renderer/media/video_capture_impl.cc
index 8fd24f8..30b43dd 100644
--- a/content/renderer/media/video_capture_impl.cc
+++ b/content/renderer/media/video_capture_impl.cc
@@ -10,6 +10,7 @@
#include "content/common/media/video_capture_messages.h"
#include "media/base/bind_to_loop.h"
#include "media/base/limits.h"
+#include "media/base/video_frame.h"
namespace content {
@@ -159,24 +160,23 @@ void VideoCaptureImpl::DoStartCaptureOnCaptureThread(
clients_[handler] = params;
} else if (state_ == VIDEO_CAPTURE_STATE_STOPPING) {
clients_pending_on_restart_[handler] = params;
- DVLOG(1) << "StartCapture: Got new resolution ("
- << params.requested_format.width << ", "
- << params.requested_format.height << ") "
- << ", during stopping.";
+ DVLOG(1) << "StartCapture: Got new resolution "
+ << params.requested_format.frame_size.ToString()
+ << " during stopping.";
} else {
- DCHECK_EQ(params.session_id, 0);
+ // TODO(sheu): Allowing resolution change will require that all
+ // outstanding clients of a capture session support resolution change.
+ DCHECK(!params.allow_resolution_change);
clients_[handler] = params;
DCHECK_EQ(1ul, clients_.size());
params_ = params;
- params_.session_id = session_id_;
if (params_.requested_format.frame_rate >
media::limits::kMaxFramesPerSecond) {
params_.requested_format.frame_rate =
media::limits::kMaxFramesPerSecond;
}
- DVLOG(1) << "StartCapture: starting with first resolution ("
- << params_.requested_format.width << ","
- << params_.requested_format.height << ")";
+ DVLOG(1) << "StartCapture: starting with first resolution "
+ << params_.requested_format.frame_size.ToString();
StartCaptureInternal();
}
@@ -252,7 +252,6 @@ void VideoCaptureImpl::DoBufferReceivedOnCaptureThread(
}
last_frame_format_ = format;
- gfx::Size size(format.width, format.height);
ClientBufferMap::iterator iter = client_buffers_.find(buffer_id);
DCHECK(iter != client_buffers_.end());
@@ -260,9 +259,9 @@ void VideoCaptureImpl::DoBufferReceivedOnCaptureThread(
scoped_refptr<media::VideoFrame> frame =
media::VideoFrame::WrapExternalPackedMemory(
media::VideoFrame::I420,
- size,
- gfx::Rect(size),
- size,
+ last_frame_format_.frame_size,
+ gfx::Rect(last_frame_format_.frame_size),
+ last_frame_format_.frame_size,
reinterpret_cast<uint8*>(buffer->buffer->memory()),
buffer->buffer_size,
buffer->buffer->handle(),
@@ -360,7 +359,7 @@ void VideoCaptureImpl::StopDevice() {
if (state_ == VIDEO_CAPTURE_STATE_STARTED) {
state_ = VIDEO_CAPTURE_STATE_STOPPING;
Send(new VideoCaptureHostMsg_Stop(device_id_));
- params_.requested_format.width = params_.requested_format.height = 0;
+ params_.requested_format.frame_size.SetSize(0, 0);
}
}
@@ -372,20 +371,19 @@ void VideoCaptureImpl::RestartCapture() {
int height = 0;
for (ClientInfo::iterator it = clients_.begin();
it != clients_.end(); ++it) {
- width = std::max(width, it->second.requested_format.width);
- height = std::max(height, it->second.requested_format.height);
+ width = std::max(width, it->second.requested_format.frame_size.width());
+ height = std::max(height, it->second.requested_format.frame_size.height());
}
for (ClientInfo::iterator it = clients_pending_on_restart_.begin();
it != clients_pending_on_restart_.end(); ) {
- width = std::max(width, it->second.requested_format.width);
- height = std::max(height, it->second.requested_format.height);
+ width = std::max(width, it->second.requested_format.frame_size.width());
+ height = std::max(height, it->second.requested_format.frame_size.height());
clients_[it->first] = it->second;
clients_pending_on_restart_.erase(it++);
}
- params_.requested_format.width = width;
- params_.requested_format.height = height;
- DVLOG(1) << "RestartCapture, " << params_.requested_format.width << ", "
- << params_.requested_format.height;
+ params_.requested_format.frame_size.SetSize(width, height);
+ DVLOG(1) << "RestartCapture, "
+ << params_.requested_format.frame_size.ToString();
StartCaptureInternal();
}
@@ -393,7 +391,7 @@ void VideoCaptureImpl::StartCaptureInternal() {
DCHECK(capture_message_loop_proxy_->BelongsToCurrentThread());
DCHECK(device_id_);
- Send(new VideoCaptureHostMsg_Start(device_id_, params_));
+ Send(new VideoCaptureHostMsg_Start(device_id_, session_id_, params_));
state_ = VIDEO_CAPTURE_STATE_STARTED;
}
diff --git a/content/renderer/media/video_capture_impl_unittest.cc b/content/renderer/media/video_capture_impl_unittest.cc
index f0bf333..713b3a0 100644
--- a/content/renderer/media/video_capture_impl_unittest.cc
+++ b/content/renderer/media/video_capture_impl_unittest.cc
@@ -81,6 +81,7 @@ class VideoCaptureImplTest : public ::testing::Test {
}
void DeviceStartCapture(int device_id,
+ media::VideoCaptureSessionId session_id,
const media::VideoCaptureParams& params) {
OnStateChanged(VIDEO_CAPTURE_STATE_STARTED);
}
@@ -96,10 +97,10 @@ class VideoCaptureImplTest : public ::testing::Test {
VideoCaptureImplTest() {
params_small_.requested_format = media::VideoCaptureFormat(
- 176, 144, 30, media::ConstantResolutionVideoCaptureDevice);
+ gfx::Size(176, 144), 30, media::PIXEL_FORMAT_I420);
params_large_.requested_format = media::VideoCaptureFormat(
- 320, 240, 30, media::ConstantResolutionVideoCaptureDevice);
+ gfx::Size(320, 240), 30, media::PIXEL_FORMAT_I420);
message_loop_.reset(new base::MessageLoop(base::MessageLoop::TYPE_IO));
message_loop_proxy_ = base::MessageLoopProxy::current().get();
diff --git a/content/renderer/media/video_capture_message_filter_unittest.cc b/content/renderer/media/video_capture_message_filter_unittest.cc
index 069673f..366bcb0 100644
--- a/content/renderer/media/video_capture_message_filter_unittest.cc
+++ b/content/renderer/media/video_capture_message_filter_unittest.cc
@@ -11,11 +11,10 @@
#include "testing/gtest/include/gtest/gtest.h"
using ::testing::_;
-using ::testing::AllOf;
using ::testing::AnyNumber;
-using ::testing::Field;
using ::testing::Mock;
using ::testing::Return;
+using ::testing::SaveArg;
using ::testing::StrictMock;
namespace content {
@@ -82,15 +81,17 @@ TEST(VideoCaptureMessageFilterTest, Basic) {
int buffer_id = 22;
base::Time timestamp = base::Time::FromInternalValue(1);
- media::VideoCaptureFormat format(234, 512, 30,
- media::ConstantResolutionVideoCaptureDevice);
- EXPECT_CALL(delegate, OnBufferReceived(buffer_id, timestamp,
- AllOf(Field(&media::VideoCaptureFormat::width, 234),
- Field(&media::VideoCaptureFormat::height, 512),
- Field(&media::VideoCaptureFormat::frame_rate, 30))));
+ media::VideoCaptureFormat format(
+ gfx::Size(234, 512), 30, media::PIXEL_FORMAT_I420);
+ media::VideoCaptureFormat saved_format;
+ EXPECT_CALL(delegate, OnBufferReceived(buffer_id, timestamp, _))
+ .WillRepeatedly(SaveArg<2>(&saved_format));
filter->OnMessageReceived(VideoCaptureMsg_BufferReady(
delegate.device_id(), buffer_id, timestamp, format));
Mock::VerifyAndClearExpectations(&delegate);
+ EXPECT_EQ(234, saved_format.frame_size.width());
+ EXPECT_EQ(512, saved_format.frame_size.height());
+ EXPECT_EQ(30, saved_format.frame_rate);
// VideoCaptureMsg_FreeBuffer
EXPECT_CALL(delegate, OnBufferDestroyed(buffer_id));
diff --git a/content/renderer/pepper/pepper_video_capture_host.cc b/content/renderer/pepper/pepper_video_capture_host.cc
index 86a7da4..7328424 100644
--- a/content/renderer/pepper/pepper_video_capture_host.cc
+++ b/content/renderer/pepper/pepper_video_capture_host.cc
@@ -365,11 +365,11 @@ void PepperVideoCaptureHost::SetRequestedInfo(
// Clamp the buffer count to between 1 and |kMaxBuffers|.
buffer_count_hint_ = std::min(std::max(buffer_count, 1U), kMaxBuffers);
- video_capture_params_.requested_format =
- media::VideoCaptureFormat(device_info.width,
- device_info.height,
- device_info.frames_per_second,
- media::ConstantResolutionVideoCaptureDevice);
+ video_capture_params_.requested_format = media::VideoCaptureFormat(
+ gfx::Size(device_info.width, device_info.height),
+ device_info.frames_per_second,
+ media::PIXEL_FORMAT_I420);
+ video_capture_params_.allow_resolution_change = false;
}
void PepperVideoCaptureHost::DetachPlatformVideoCapture() {
diff --git a/media/video/capture/android/video_capture_device_android.cc b/media/video/capture/android/video_capture_device_android.cc
index 06c4604..c4c7034 100644
--- a/media/video/capture/android/video_capture_device_android.cc
+++ b/media/video/capture/android/video_capture_device_android.cc
@@ -86,11 +86,7 @@ bool VideoCaptureDeviceAndroid::RegisterVideoCaptureDevice(JNIEnv* env) {
}
VideoCaptureDeviceAndroid::VideoCaptureDeviceAndroid(const Name& device_name)
- : state_(kIdle),
- got_first_frame_(false),
- device_name_(device_name),
- current_settings_() {
-}
+ : state_(kIdle), got_first_frame_(false), device_name_(device_name) {}
VideoCaptureDeviceAndroid::~VideoCaptureDeviceAndroid() {
StopAndDeAllocate();
@@ -111,7 +107,7 @@ bool VideoCaptureDeviceAndroid::Init() {
}
void VideoCaptureDeviceAndroid::AllocateAndStart(
- const VideoCaptureCapability& capture_format,
+ const VideoCaptureParams& params,
scoped_ptr<Client> client) {
DVLOG(1) << "VideoCaptureDeviceAndroid::AllocateAndStart";
{
@@ -124,40 +120,38 @@ void VideoCaptureDeviceAndroid::AllocateAndStart(
JNIEnv* env = AttachCurrentThread();
- jboolean ret = Java_VideoCapture_allocate(env,
- j_capture_.obj(),
- capture_format.width,
- capture_format.height,
- capture_format.frame_rate);
+ jboolean ret =
+ Java_VideoCapture_allocate(env,
+ j_capture_.obj(),
+ params.requested_format.frame_size.width(),
+ params.requested_format.frame_size.height(),
+ params.requested_format.frame_rate);
if (!ret) {
SetErrorState("failed to allocate");
return;
}
// Store current width and height.
- current_settings_.width =
- Java_VideoCapture_queryWidth(env, j_capture_.obj());
- current_settings_.height =
- Java_VideoCapture_queryHeight(env, j_capture_.obj());
- current_settings_.frame_rate =
+ capture_format_.frame_size.SetSize(
+ Java_VideoCapture_queryWidth(env, j_capture_.obj()),
+ Java_VideoCapture_queryHeight(env, j_capture_.obj()));
+ capture_format_.frame_rate =
Java_VideoCapture_queryFrameRate(env, j_capture_.obj());
- current_settings_.color = GetColorspace();
- DCHECK_NE(current_settings_.color, media::PIXEL_FORMAT_UNKNOWN);
- CHECK(current_settings_.width > 0 && !(current_settings_.width % 2));
- CHECK(current_settings_.height > 0 && !(current_settings_.height % 2));
+ capture_format_.pixel_format = GetColorspace();
+ DCHECK_NE(capture_format_.pixel_format, media::PIXEL_FORMAT_UNKNOWN);
+ CHECK(capture_format_.frame_size.GetArea() > 0);
+ CHECK(!(capture_format_.frame_size.width() % 2));
+ CHECK(!(capture_format_.frame_size.height() % 2));
- if (capture_format.frame_rate > 0) {
+ if (capture_format_.frame_rate > 0) {
frame_interval_ = base::TimeDelta::FromMicroseconds(
- (base::Time::kMicrosecondsPerSecond + capture_format.frame_rate - 1) /
- capture_format.frame_rate);
+ (base::Time::kMicrosecondsPerSecond + capture_format_.frame_rate - 1) /
+ capture_format_.frame_rate);
}
- DVLOG(1) << "VideoCaptureDeviceAndroid::Allocate: queried width="
- << current_settings_.width
- << ", height="
- << current_settings_.height
- << ", frame_rate="
- << current_settings_.frame_rate;
+ DVLOG(1) << "VideoCaptureDeviceAndroid::Allocate: queried frame_size="
+ << capture_format_.frame_size.ToString()
+ << ", frame_rate=" << capture_format_.frame_rate;
jint result = Java_VideoCapture_startCapture(env, j_capture_.obj());
if (result < 0) {
@@ -234,7 +228,7 @@ void VideoCaptureDeviceAndroid::OnFrameAvailable(
rotation,
flip_vert,
flip_horiz,
- current_settings_);
+ capture_format_);
}
env->ReleaseByteArrayElements(data, buffer, JNI_ABORT);
diff --git a/media/video/capture/android/video_capture_device_android.h b/media/video/capture/android/video_capture_device_android.h
index c9ab5d7..3039a79 100644
--- a/media/video/capture/android/video_capture_device_android.h
+++ b/media/video/capture/android/video_capture_device_android.h
@@ -29,9 +29,8 @@ class MEDIA_EXPORT VideoCaptureDeviceAndroid : public VideoCaptureDevice {
static bool RegisterVideoCaptureDevice(JNIEnv* env);
// VideoCaptureDevice implementation.
- virtual void AllocateAndStart(
- const VideoCaptureCapability& capture_format,
- scoped_ptr<Client> client) OVERRIDE;
+ virtual void AllocateAndStart(const VideoCaptureParams& params,
+ scoped_ptr<Client> client) OVERRIDE;
virtual void StopAndDeAllocate() OVERRIDE;
// Implement org.chromium.media.VideoCapture.nativeOnFrameAvailable.
@@ -73,7 +72,7 @@ class MEDIA_EXPORT VideoCaptureDeviceAndroid : public VideoCaptureDevice {
scoped_ptr<VideoCaptureDevice::Client> client_;
Name device_name_;
- VideoCaptureCapability current_settings_;
+ VideoCaptureFormat capture_format_;
// Java VideoCaptureAndroid instance.
base::android::ScopedJavaGlobalRef<jobject> j_capture_;
diff --git a/media/video/capture/fake_video_capture_device.cc b/media/video/capture/fake_video_capture_device.cc
index c36670c..ef44d86 100644
--- a/media/video/capture/fake_video_capture_device.cc
+++ b/media/video/capture/fake_video_capture_device.cc
@@ -37,12 +37,13 @@ void FakeVideoCaptureDevice::GetDeviceNames(Names* const device_names) {
void FakeVideoCaptureDevice::GetDeviceSupportedFormats(
const Name& device,
VideoCaptureCapabilities* formats) {
- VideoCaptureCapability capture_format;
- capture_format.color = media::PIXEL_FORMAT_I420;
- capture_format.width = 640;
- capture_format.height = 480;
- capture_format.frame_rate = 1000 / kFakeCaptureTimeoutMs;
- formats->push_back(capture_format);
+ VideoCaptureCapability capture_format_640x480;
+ capture_format_640x480.supported_format.frame_size.SetSize(640, 480);
+ capture_format_640x480.supported_format.frame_rate =
+ 1000 / kFakeCaptureTimeoutMs;
+ capture_format_640x480.supported_format.pixel_format =
+ media::PIXEL_FORMAT_I420;
+ formats->push_back(capture_format_640x480);
}
VideoCaptureDevice* FakeVideoCaptureDevice::Create(const Name& device_name) {
@@ -67,8 +68,7 @@ FakeVideoCaptureDevice::FakeVideoCaptureDevice()
: state_(kIdle),
capture_thread_("CaptureThread"),
frame_count_(0),
- capabilities_roster_index_(0) {
-}
+ format_roster_index_(0) {}
FakeVideoCaptureDevice::~FakeVideoCaptureDevice() {
// Check if the thread is running.
@@ -77,31 +77,27 @@ FakeVideoCaptureDevice::~FakeVideoCaptureDevice() {
}
void FakeVideoCaptureDevice::AllocateAndStart(
- const VideoCaptureCapability& capture_format,
+ const VideoCaptureParams& params,
scoped_ptr<VideoCaptureDevice::Client> client) {
- capture_format_.frame_size_type = capture_format.frame_size_type;
- if (capture_format.frame_size_type == VariableResolutionVideoCaptureDevice)
- PopulateCapabilitiesRoster();
+ if (params.allow_resolution_change)
+ PopulateFormatRoster();
if (state_ != kIdle) {
return; // Wrong state.
}
client_ = client.Pass();
- capture_format_.color = PIXEL_FORMAT_I420;
- if (capture_format.width > 320) { // VGA
- capture_format_.width = 640;
- capture_format_.height = 480;
+ capture_format_.pixel_format = PIXEL_FORMAT_I420;
+ if (params.requested_format.frame_size.width() > 320) { // VGA
+ capture_format_.frame_size.SetSize(640, 480);
capture_format_.frame_rate = 30;
} else { // QVGA
- capture_format_.width = 320;
- capture_format_.height = 240;
+ capture_format_.frame_size.SetSize(320, 240);
capture_format_.frame_rate = 30;
}
- const size_t fake_frame_size = VideoFrame::AllocationSize(
- VideoFrame::I420,
- gfx::Size(capture_format_.width, capture_format_.height));
+ const size_t fake_frame_size =
+ VideoFrame::AllocationSize(VideoFrame::I420, capture_format_.frame_size);
fake_frame_.reset(new uint8[fake_frame_size]);
state_ = kCapturing;
@@ -114,15 +110,14 @@ void FakeVideoCaptureDevice::AllocateAndStart(
void FakeVideoCaptureDevice::Reallocate() {
DCHECK_EQ(state_, kCapturing);
- capture_format_ = capabilities_roster_.at(++capabilities_roster_index_ %
- capabilities_roster_.size());
- DCHECK_EQ(capture_format_.color, PIXEL_FORMAT_I420);
- DVLOG(3) << "Reallocating FakeVideoCaptureDevice, new capture resolution ("
- << capture_format_.width << "x" << capture_format_.height << ")";
-
- const size_t fake_frame_size = VideoFrame::AllocationSize(
- VideoFrame::I420,
- gfx::Size(capture_format_.width, capture_format_.height));
+ capture_format_ =
+ format_roster_.at(++format_roster_index_ % format_roster_.size());
+ DCHECK_EQ(capture_format_.pixel_format, PIXEL_FORMAT_I420);
+ DVLOG(3) << "Reallocating FakeVideoCaptureDevice, new capture resolution "
+ << capture_format_.frame_size.ToString();
+
+ const size_t fake_frame_size =
+ VideoFrame::AllocationSize(VideoFrame::I420, capture_format_.frame_size);
fake_frame_.reset(new uint8[fake_frame_size]);
}
@@ -139,25 +134,28 @@ void FakeVideoCaptureDevice::OnCaptureTask() {
return;
}
- const size_t frame_size = VideoFrame::AllocationSize(
- VideoFrame::I420,
- gfx::Size(capture_format_.width, capture_format_.height));
+ const size_t frame_size =
+ VideoFrame::AllocationSize(VideoFrame::I420, capture_format_.frame_size);
memset(fake_frame_.get(), 0, frame_size);
SkBitmap bitmap;
bitmap.setConfig(SkBitmap::kA8_Config,
- capture_format_.width,
- capture_format_.height,
- capture_format_.width);
- bitmap.setPixels(fake_frame_.get());
+ capture_format_.frame_size.width(),
+ capture_format_.frame_size.height(),
+ capture_format_.frame_size.width()),
+ bitmap.setPixels(fake_frame_.get());
SkCanvas canvas(bitmap);
// Draw a sweeping circle to show an animation.
- int radius = std::min(capture_format_.width, capture_format_.height) / 4;
- SkRect rect = SkRect::MakeXYWH(
- capture_format_.width / 2 - radius, capture_format_.height / 2 - radius,
- 2 * radius, 2 * radius);
+ int radius = std::min(capture_format_.frame_size.width(),
+ capture_format_.frame_size.height()) /
+ 4;
+ SkRect rect =
+ SkRect::MakeXYWH(capture_format_.frame_size.width() / 2 - radius,
+ capture_format_.frame_size.height() / 2 - radius,
+ 2 * radius,
+ 2 * radius);
SkPaint paint;
paint.setStyle(SkPaint::kFill_Style);
@@ -203,8 +201,7 @@ void FakeVideoCaptureDevice::OnCaptureTask() {
false,
capture_format_);
if (!(frame_count_ % kFakeCaptureCapabilityChangePeriod) &&
- (capture_format_.frame_size_type ==
- VariableResolutionVideoCaptureDevice)) {
+ format_roster_.size() > 0U) {
Reallocate();
}
// Reschedule next CaptureTask.
@@ -215,27 +212,15 @@ void FakeVideoCaptureDevice::OnCaptureTask() {
base::TimeDelta::FromMilliseconds(kFakeCaptureTimeoutMs));
}
-void FakeVideoCaptureDevice::PopulateCapabilitiesRoster() {
- capabilities_roster_.push_back(
- media::VideoCaptureCapability(320,
- 240,
- 30,
- PIXEL_FORMAT_I420,
- VariableResolutionVideoCaptureDevice));
- capabilities_roster_.push_back(
- media::VideoCaptureCapability(640,
- 480,
- 30,
- PIXEL_FORMAT_I420,
- VariableResolutionVideoCaptureDevice));
- capabilities_roster_.push_back(
- media::VideoCaptureCapability(800,
- 600,
- 30,
- PIXEL_FORMAT_I420,
- VariableResolutionVideoCaptureDevice));
-
- capabilities_roster_index_ = 0;
+void FakeVideoCaptureDevice::PopulateFormatRoster() {
+ format_roster_.push_back(
+ media::VideoCaptureFormat(gfx::Size(320, 240), 30, PIXEL_FORMAT_I420));
+ format_roster_.push_back(
+ media::VideoCaptureFormat(gfx::Size(640, 480), 30, PIXEL_FORMAT_I420));
+ format_roster_.push_back(
+ media::VideoCaptureFormat(gfx::Size(800, 600), 30, PIXEL_FORMAT_I420));
+
+ format_roster_index_ = 0;
}
} // namespace media
diff --git a/media/video/capture/fake_video_capture_device.h b/media/video/capture/fake_video_capture_device.h
index 174ba06a..7d5de57 100644
--- a/media/video/capture/fake_video_capture_device.h
+++ b/media/video/capture/fake_video_capture_device.h
@@ -29,9 +29,9 @@ class MEDIA_EXPORT FakeVideoCaptureDevice : public VideoCaptureDevice {
VideoCaptureCapabilities* formats);
// VideoCaptureDevice implementation.
- virtual void AllocateAndStart(
- const VideoCaptureCapability& capture_format,
- scoped_ptr<VideoCaptureDevice::Client> client) OVERRIDE;
+ virtual void AllocateAndStart(const VideoCaptureParams& params,
+ scoped_ptr<VideoCaptureDevice::Client> client)
+ OVERRIDE;
virtual void StopAndDeAllocate() OVERRIDE;
private:
@@ -49,19 +49,19 @@ class MEDIA_EXPORT FakeVideoCaptureDevice : public VideoCaptureDevice {
// EXPERIMENTAL, similar to allocate, but changes resolution and calls
// client->OnFrameInfoChanged(VideoCaptureCapability&)
void Reallocate();
- void PopulateCapabilitiesRoster();
+ void PopulateFormatRoster();
scoped_ptr<VideoCaptureDevice::Client> client_;
InternalState state_;
base::Thread capture_thread_;
scoped_ptr<uint8[]> fake_frame_;
int frame_count_;
- VideoCaptureCapability capture_format_;
+ VideoCaptureFormat capture_format_;
// When the device is configured as mutating video captures, this vector
// holds the available ones which are used in sequence, restarting at the end.
- std::vector<VideoCaptureCapability> capabilities_roster_;
- int capabilities_roster_index_;
+ std::vector<VideoCaptureFormat> format_roster_;
+ int format_roster_index_;
static bool fail_next_create_;
diff --git a/media/video/capture/linux/video_capture_device_linux.cc b/media/video/capture/linux/video_capture_device_linux.cc
index 66aecff..1946961 100644
--- a/media/video/capture/linux/video_capture_device_linux.cc
+++ b/media/video/capture/linux/video_capture_device_linux.cc
@@ -159,22 +159,25 @@ void VideoCaptureDevice::GetDeviceSupportedFormats(
formats->clear();
- VideoCaptureCapability capture_format;
+ VideoCaptureCapability capture_capability;
// Retrieve the caps one by one, first get colorspace, then sizes, then
// framerates. See http://linuxtv.org/downloads/v4l-dvb-apis for reference.
v4l2_fmtdesc pixel_format = {};
pixel_format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
while (ioctl(fd, VIDIOC_ENUM_FMT, &pixel_format) == 0) {
- capture_format.color =
+ capture_capability.supported_format.pixel_format =
V4l2ColorToVideoCaptureColorFormat((int32)pixel_format.pixelformat);
- if (capture_format.color == PIXEL_FORMAT_UNKNOWN) continue;
+ if (capture_capability.supported_format.pixel_format ==
+ PIXEL_FORMAT_UNKNOWN) {
+ continue;
+ }
v4l2_frmsizeenum frame_size = {};
frame_size.pixel_format = pixel_format.pixelformat;
while (ioctl(fd, VIDIOC_ENUM_FRAMESIZES, &frame_size) == 0) {
if (frame_size.type == V4L2_FRMSIZE_TYPE_DISCRETE) {
- capture_format.width = frame_size.discrete.width;
- capture_format.height = frame_size.discrete.height;
+ capture_capability.supported_format.frame_size.SetSize(
+ frame_size.discrete.width, frame_size.discrete.height);
} else if (frame_size.type == V4L2_FRMSIZE_TYPE_STEPWISE) {
// TODO(mcasas): see http://crbug.com/249953, support these devices.
NOTIMPLEMENTED();
@@ -189,11 +192,11 @@ void VideoCaptureDevice::GetDeviceSupportedFormats(
while (ioctl(fd, VIDIOC_ENUM_FRAMEINTERVALS, &frame_interval) == 0) {
if (frame_interval.type == V4L2_FRMIVAL_TYPE_DISCRETE) {
if (frame_interval.discrete.numerator != 0) {
- capture_format.frame_rate =
+ capture_capability.supported_format.frame_rate =
static_cast<float>(frame_interval.discrete.denominator) /
static_cast<float>(frame_interval.discrete.numerator);
} else {
- capture_format.frame_rate = 0;
+ capture_capability.supported_format.frame_rate = 0;
}
} else if (frame_interval.type == V4L2_FRMIVAL_TYPE_CONTINUOUS) {
// TODO(mcasas): see http://crbug.com/249953, support these devices.
@@ -204,7 +207,7 @@ void VideoCaptureDevice::GetDeviceSupportedFormats(
NOTIMPLEMENTED();
break;
}
- formats->push_back(capture_format);
+ formats->push_back(capture_capability);
++frame_interval.index;
}
++frame_size.index;
@@ -291,7 +294,7 @@ VideoCaptureDeviceLinux::~VideoCaptureDeviceLinux() {
}
void VideoCaptureDeviceLinux::AllocateAndStart(
- const VideoCaptureCapability& capture_format,
+ const VideoCaptureParams& params,
scoped_ptr<VideoCaptureDevice::Client> client) {
if (v4l2_thread_.IsRunning()) {
return; // Wrong state.
@@ -301,9 +304,9 @@ void VideoCaptureDeviceLinux::AllocateAndStart(
FROM_HERE,
base::Bind(&VideoCaptureDeviceLinux::OnAllocateAndStart,
base::Unretained(this),
- capture_format.width,
- capture_format.height,
- capture_format.frame_rate,
+ params.requested_format.frame_size.width(),
+ params.requested_format.frame_size.height(),
+ params.requested_format.frame_rate,
base::Passed(&client)));
}
@@ -409,12 +412,11 @@ void VideoCaptureDeviceLinux::OnAllocateAndStart(int width,
// framerate configuration, or the actual one is different from the desired?
// Store our current width and height.
- frame_info_.color =
+ capture_format_.frame_size.SetSize(video_fmt.fmt.pix.width,
+ video_fmt.fmt.pix.height);
+ capture_format_.frame_rate = frame_rate;
+ capture_format_.pixel_format =
V4l2ColorToVideoCaptureColorFormat(video_fmt.fmt.pix.pixelformat);
- frame_info_.width = video_fmt.fmt.pix.width;
- frame_info_.height = video_fmt.fmt.pix.height;
- frame_info_.frame_rate = frame_rate;
- frame_info_.frame_size_type = VariableResolutionVideoCaptureDevice;
// Start capturing.
if (!AllocateVideoBuffers()) {
@@ -519,7 +521,7 @@ void VideoCaptureDeviceLinux::OnCaptureTask() {
0,
false,
false,
- frame_info_);
+ capture_format_);
// Enqueue the buffer again.
if (ioctl(device_fd_, VIDIOC_QBUF, &buffer) == -1) {
diff --git a/media/video/capture/linux/video_capture_device_linux.h b/media/video/capture/linux/video_capture_device_linux.h
index ad93fc0..a5917b7 100644
--- a/media/video/capture/linux/video_capture_device_linux.h
+++ b/media/video/capture/linux/video_capture_device_linux.h
@@ -24,7 +24,7 @@ class VideoCaptureDeviceLinux : public VideoCaptureDevice {
virtual ~VideoCaptureDeviceLinux();
// VideoCaptureDevice implementation.
- virtual void AllocateAndStart(const VideoCaptureCapability& capture_format,
+ virtual void AllocateAndStart(const VideoCaptureParams& params,
scoped_ptr<Client> client) OVERRIDE;
virtual void StopAndDeAllocate() OVERRIDE;
@@ -64,7 +64,7 @@ class VideoCaptureDeviceLinux : public VideoCaptureDevice {
Buffer* buffer_pool_;
int buffer_pool_size_; // Number of allocated buffers.
int timeout_count_;
- VideoCaptureCapability frame_info_;
+ VideoCaptureFormat capture_format_;
DISALLOW_IMPLICIT_CONSTRUCTORS(VideoCaptureDeviceLinux);
};
diff --git a/media/video/capture/mac/video_capture_device_mac.h b/media/video/capture/mac/video_capture_device_mac.h
index adfd4c7..474e7e1 100644
--- a/media/video/capture/mac/video_capture_device_mac.h
+++ b/media/video/capture/mac/video_capture_device_mac.h
@@ -30,9 +30,9 @@ class VideoCaptureDeviceMac : public VideoCaptureDevice {
virtual ~VideoCaptureDeviceMac();
// VideoCaptureDevice implementation.
- virtual void AllocateAndStart(
- const VideoCaptureCapability& capture_format,
- scoped_ptr<VideoCaptureDevice::Client> client) OVERRIDE;
+ virtual void AllocateAndStart(const VideoCaptureParams& params,
+ scoped_ptr<VideoCaptureDevice::Client> client)
+ OVERRIDE;
virtual void StopAndDeAllocate() OVERRIDE;
bool Init();
@@ -40,7 +40,7 @@ class VideoCaptureDeviceMac : public VideoCaptureDevice {
// Called to deliver captured video frames.
void ReceiveFrame(const uint8* video_frame,
int video_frame_length,
- const VideoCaptureCapability& frame_info,
+ const VideoCaptureFormat& frame_format,
int aspect_numerator,
int aspect_denominator);
@@ -61,7 +61,7 @@ class VideoCaptureDeviceMac : public VideoCaptureDevice {
Name device_name_;
scoped_ptr<VideoCaptureDevice::Client> client_;
- VideoCaptureCapability current_settings_;
+ VideoCaptureFormat capture_format_;
bool sent_frame_info_;
bool tried_to_square_pixels_;
diff --git a/media/video/capture/mac/video_capture_device_mac.mm b/media/video/capture/mac/video_capture_device_mac.mm
index b353b12..9ef29d2 100644
--- a/media/video/capture/mac/video_capture_device_mac.mm
+++ b/media/video/capture/mac/video_capture_device_mac.mm
@@ -134,15 +134,15 @@ VideoCaptureDeviceMac::~VideoCaptureDeviceMac() {
}
void VideoCaptureDeviceMac::AllocateAndStart(
- const VideoCaptureCapability& capture_format,
+ const VideoCaptureParams& params,
scoped_ptr<VideoCaptureDevice::Client> client) {
DCHECK_EQ(loop_proxy_, base::MessageLoopProxy::current());
if (state_ != kIdle) {
return;
}
- int width = capture_format.width;
- int height = capture_format.height;
- int frame_rate = capture_format.frame_rate;
+ int width = params.requested_format.frame_size.width();
+ int height = params.requested_format.frame_size.height();
+ int frame_rate = params.requested_format.frame_rate;
// The OS API can scale captured frame to any size requested, which would lead
// to undesired aspect ratio change. Try to open the camera with a natively
@@ -164,10 +164,9 @@ void VideoCaptureDeviceMac::AllocateAndStart(
else if (frame_rate > kMaxFrameRate)
frame_rate = kMaxFrameRate;
- current_settings_.color = PIXEL_FORMAT_UYVY;
- current_settings_.width = width;
- current_settings_.height = height;
- current_settings_.frame_rate = frame_rate;
+ capture_format_.frame_size.SetSize(width, height);
+ capture_format_.frame_rate = frame_rate;
+ capture_format_.pixel_format = PIXEL_FORMAT_UYVY;
if (width <= kVGA.width || height <= kVGA.height) {
// If the resolution is VGA or QVGA, set the capture resolution to the
@@ -226,7 +225,7 @@ bool VideoCaptureDeviceMac::Init() {
void VideoCaptureDeviceMac::ReceiveFrame(
const uint8* video_frame,
int video_frame_length,
- const VideoCaptureCapability& frame_info,
+ const VideoCaptureFormat& frame_format,
int aspect_numerator,
int aspect_denominator) {
// This method is safe to call from a device capture thread,
@@ -234,23 +233,24 @@ void VideoCaptureDeviceMac::ReceiveFrame(
if (!sent_frame_info_) {
// Final resolution has not yet been selected.
- if (current_settings_.width > kVGA.width ||
- current_settings_.height > kVGA.height) {
+ if (capture_format_.frame_size.width() > kVGA.width ||
+ capture_format_.frame_size.height() > kVGA.height) {
// We are requesting HD. Make sure that the picture is good, otherwise
// drop down to VGA.
bool change_to_vga = false;
- if (frame_info.width < current_settings_.width ||
- frame_info.height < current_settings_.height) {
+ if (frame_format.frame_size.width() <
+ capture_format_.frame_size.width() ||
+ frame_format.frame_size.height() <
+ capture_format_.frame_size.height()) {
// These are the default capture settings, not yet configured to match
- // |current_settings_|.
- DCHECK(frame_info.frame_rate == 0);
+ // |capture_format_|.
+ DCHECK(frame_format.frame_rate == 0);
DVLOG(1) << "Switching to VGA because the default resolution is " <<
- frame_info.width << "x" << frame_info.height;
+ frame_format.frame_size.ToString();
change_to_vga = true;
}
- if (frame_info.width == current_settings_.width &&
- frame_info.height == current_settings_.height &&
+ if (capture_format_.frame_size == frame_format.frame_size &&
aspect_numerator != aspect_denominator) {
DVLOG(1) << "Switching to VGA because HD has nonsquare pixel " <<
"aspect ratio " << aspect_numerator << ":" << aspect_denominator;
@@ -258,33 +258,29 @@ void VideoCaptureDeviceMac::ReceiveFrame(
}
if (change_to_vga) {
- current_settings_.width = kVGA.width;
- current_settings_.height = kVGA.height;
+ capture_format_.frame_size.SetSize(kVGA.width, kVGA.height);
}
}
- if (current_settings_.width == frame_info.width &&
- current_settings_.height == frame_info.height &&
+ if (capture_format_.frame_size == frame_format.frame_size &&
!tried_to_square_pixels_ &&
(aspect_numerator > kMaxPixelAspectRatio * aspect_denominator ||
aspect_denominator > kMaxPixelAspectRatio * aspect_numerator)) {
// The requested size results in non-square PAR.
// Shrink the frame to 1:1 PAR (assuming QTKit selects the same input
// mode, which is not guaranteed).
- int new_width = current_settings_.width;
- int new_height = current_settings_.height;
+ int new_width = capture_format_.frame_size.width();
+ int new_height = capture_format_.frame_size.height();
if (aspect_numerator < aspect_denominator) {
new_width = (new_width * aspect_numerator) / aspect_denominator;
} else {
new_height = (new_height * aspect_denominator) / aspect_numerator;
}
- current_settings_.width = new_width;
- current_settings_.height = new_height;
+ capture_format_.frame_size.SetSize(new_width, new_height);
tried_to_square_pixels_ = true;
}
- if (current_settings_.width == frame_info.width &&
- current_settings_.height == frame_info.height) {
+ if (capture_format_.frame_size == frame_format.frame_size) {
sent_frame_info_ = true;
} else {
UpdateCaptureResolution();
@@ -294,8 +290,10 @@ void VideoCaptureDeviceMac::ReceiveFrame(
}
}
- DCHECK(current_settings_.width == frame_info.width &&
- current_settings_.height == frame_info.height);
+ DCHECK_EQ(capture_format_.frame_size.width(),
+ frame_format.frame_size.width());
+ DCHECK_EQ(capture_format_.frame_size.height(),
+ frame_format.frame_size.height());
client_->OnIncomingCapturedFrame(video_frame,
video_frame_length,
@@ -303,7 +301,7 @@ void VideoCaptureDeviceMac::ReceiveFrame(
0,
false,
false,
- current_settings_);
+ capture_format_);
}
void VideoCaptureDeviceMac::ReceiveError(const std::string& reason) {
@@ -320,9 +318,9 @@ void VideoCaptureDeviceMac::SetErrorState(const std::string& reason) {
}
bool VideoCaptureDeviceMac::UpdateCaptureResolution() {
- if (![capture_device_ setCaptureHeight:current_settings_.height
- width:current_settings_.width
- frameRate:current_settings_.frame_rate]) {
+ if (![capture_device_ setCaptureHeight:capture_format_.frame_size.height()
+ width:capture_format_.frame_size.width()
+ frameRate:capture_format_.frame_rate]) {
ReceiveError("Could not configure capture device.");
return false;
}
diff --git a/media/video/capture/mac/video_capture_device_qtkit_mac.mm b/media/video/capture/mac/video_capture_device_qtkit_mac.mm
index 8a778a1..19ee7dc 100644
--- a/media/video/capture/mac/video_capture_device_qtkit_mac.mm
+++ b/media/video/capture/mac/video_capture_device_qtkit_mac.mm
@@ -281,11 +281,10 @@
addressToPass = adjustedAddress;
frameSize = frameHeight * expectedBytesPerRow;
}
- media::VideoCaptureCapability captureCapability;
- captureCapability.width = frameWidth;
- captureCapability.height = frameHeight;
- captureCapability.frame_rate = frameRate_;
- captureCapability.color = media::PIXEL_FORMAT_UYVY;
+
+ media::VideoCaptureFormat captureFormat(gfx::Size(frameWidth, frameHeight),
+ frameRate_,
+ media::PIXEL_FORMAT_UYVY);
// The aspect ratio dictionary is often missing, in which case we report
// a pixel aspect ratio of 0:0.
@@ -305,7 +304,7 @@
}
// Deliver the captured video frame.
- frameReceiver_->ReceiveFrame(addressToPass, frameSize, captureCapability,
+ frameReceiver_->ReceiveFrame(addressToPass, frameSize, captureFormat,
aspectNumerator, aspectDenominator);
CVPixelBufferUnlockBaseAddress(videoFrame, kLockFlags);
diff --git a/media/video/capture/video_capture_device.h b/media/video/capture/video_capture_device.h
index 74a8d27..c4930be 100644
--- a/media/video/capture/video_capture_device.h
+++ b/media/video/capture/video_capture_device.h
@@ -20,6 +20,7 @@
#include "base/memory/scoped_ptr.h"
#include "base/time/time.h"
#include "media/base/media_export.h"
+#include "media/base/video_frame.h"
#include "media/video/capture/video_capture_types.h"
namespace media {
@@ -172,7 +173,7 @@ class MEDIA_EXPORT VideoCaptureDevice {
int rotation, // Clockwise.
bool flip_vert,
bool flip_horiz,
- const VideoCaptureCapability& frame_info) = 0;
+ const VideoCaptureFormat& frame_format) = 0;
// Captured a new video frame, held in |buffer|.
//
@@ -208,12 +209,10 @@ class MEDIA_EXPORT VideoCaptureDevice {
VideoCaptureCapabilities* formats);
// Prepare the camera for use. After this function has been called no other
- // applications can use the camera. On completion Client::OnFrameInfo()
- // is called informing of the resulting resolution and frame rate.
- // StopAndDeAllocate() must be called before the object is deleted.
- virtual void AllocateAndStart(
- const VideoCaptureCapability& capture_format,
- scoped_ptr<Client> client) = 0;
+ // applications can use the camera. StopAndDeAllocate() must be called before
+ // the object is deleted.
+ virtual void AllocateAndStart(const VideoCaptureParams& params,
+ scoped_ptr<Client> client) = 0;
// Deallocates the camera, possibly asynchronously.
//
diff --git a/media/video/capture/video_capture_device_unittest.cc b/media/video/capture/video_capture_device_unittest.cc
index 3b470d1..705167d 100644
--- a/media/video/capture/video_capture_device_unittest.cc
+++ b/media/video/capture/video_capture_device_unittest.cc
@@ -58,7 +58,6 @@ using ::testing::_;
using ::testing::AnyNumber;
using ::testing::Return;
using ::testing::AtLeast;
-using ::testing::SaveArg;
namespace media {
@@ -69,8 +68,7 @@ class MockClient : public media::VideoCaptureDevice::Client {
const gfx::Size& dimensions));
MOCK_METHOD0(OnErr, void());
- explicit MockClient(
- base::Callback<void(const VideoCaptureCapability&)> frame_cb)
+ explicit MockClient(base::Callback<void(const VideoCaptureFormat&)> frame_cb)
: main_thread_(base::MessageLoopProxy::current()), frame_cb_(frame_cb) {}
virtual void OnError() OVERRIDE {
@@ -83,9 +81,9 @@ class MockClient : public media::VideoCaptureDevice::Client {
int rotation,
bool flip_vert,
bool flip_horiz,
- const VideoCaptureCapability& frame_info)
+ const VideoCaptureFormat& format)
OVERRIDE {
- main_thread_->PostTask(FROM_HERE, base::Bind(frame_cb_, frame_info));
+ main_thread_->PostTask(FROM_HERE, base::Bind(frame_cb_, format));
}
virtual void OnIncomingCapturedBuffer(const scoped_refptr<Buffer>& buffer,
@@ -98,7 +96,7 @@ class MockClient : public media::VideoCaptureDevice::Client {
private:
scoped_refptr<base::MessageLoopProxy> main_thread_;
- base::Callback<void(const VideoCaptureCapability&)> frame_cb_;
+ base::Callback<void(const VideoCaptureFormat&)> frame_cb_;
};
class VideoCaptureDeviceTest : public testing::Test {
@@ -123,8 +121,8 @@ class VideoCaptureDeviceTest : public testing::Test {
&VideoCaptureDeviceTest::OnFrameCaptured, base::Unretained(this))));
}
- void OnFrameCaptured(const VideoCaptureCapability& frame_info) {
- last_frame_info_ = frame_info;
+ void OnFrameCaptured(const VideoCaptureFormat& format) {
+ last_format_ = format;
run_loop_->QuitClosure().Run();
}
@@ -133,9 +131,7 @@ class VideoCaptureDeviceTest : public testing::Test {
run_loop_->Run();
}
- const VideoCaptureCapability& last_frame_info() const {
- return last_frame_info_;
- }
+ const VideoCaptureFormat& last_format() const { return last_format_; }
#if defined(OS_WIN)
base::win::ScopedCOMInitializer initialize_com_;
@@ -144,7 +140,7 @@ class VideoCaptureDeviceTest : public testing::Test {
scoped_ptr<base::MessageLoop> loop_;
scoped_ptr<base::RunLoop> run_loop_;
scoped_ptr<MockClient> client_;
- VideoCaptureCapability last_frame_info_;
+ VideoCaptureFormat last_format_;
};
TEST_F(VideoCaptureDeviceTest, OpenInvalidDevice) {
@@ -176,17 +172,16 @@ TEST_F(VideoCaptureDeviceTest, CaptureVGA) {
EXPECT_CALL(*client_, OnErr())
.Times(0);
- VideoCaptureCapability capture_format(640,
- 480,
- 30,
- PIXEL_FORMAT_I420,
- ConstantResolutionVideoCaptureDevice);
- device->AllocateAndStart(capture_format,
- client_.PassAs<Client>());
+ VideoCaptureParams capture_params;
+ capture_params.requested_format.frame_size.SetSize(640, 480);
+ capture_params.requested_format.frame_rate = 30;
+ capture_params.requested_format.pixel_format = PIXEL_FORMAT_I420;
+ capture_params.allow_resolution_change = false;
+ device->AllocateAndStart(capture_params, client_.PassAs<Client>());
// Get captured video frames.
WaitForCapturedFrame();
- EXPECT_EQ(last_frame_info().width, 640);
- EXPECT_EQ(last_frame_info().height, 480);
+ EXPECT_EQ(last_format().frame_size.width(), 640);
+ EXPECT_EQ(last_format().frame_size.height(), 480);
device->StopAndDeAllocate();
}
@@ -204,13 +199,12 @@ TEST_F(VideoCaptureDeviceTest, Capture720p) {
EXPECT_CALL(*client_, OnErr())
.Times(0);
- VideoCaptureCapability capture_format(1280,
- 720,
- 30,
- PIXEL_FORMAT_I420,
- ConstantResolutionVideoCaptureDevice);
- device->AllocateAndStart(capture_format,
- client_.PassAs<Client>());
+ VideoCaptureParams capture_params;
+ capture_params.requested_format.frame_size.SetSize(1280, 720);
+ capture_params.requested_format.frame_rate = 30;
+ capture_params.requested_format.pixel_format = PIXEL_FORMAT_I420;
+ capture_params.allow_resolution_change = false;
+ device->AllocateAndStart(capture_params, client_.PassAs<Client>());
// Get captured video frames.
WaitForCapturedFrame();
device->StopAndDeAllocate();
@@ -229,17 +223,16 @@ TEST_F(VideoCaptureDeviceTest, MAYBE_AllocateBadSize) {
EXPECT_CALL(*client_, OnErr())
.Times(0);
- VideoCaptureCapability capture_format(637,
- 472,
- 35,
- PIXEL_FORMAT_I420,
- ConstantResolutionVideoCaptureDevice);
- device->AllocateAndStart(capture_format,
- client_.PassAs<Client>());
+ VideoCaptureParams capture_params;
+ capture_params.requested_format.frame_size.SetSize(637, 472);
+ capture_params.requested_format.frame_rate = 35;
+ capture_params.requested_format.pixel_format = PIXEL_FORMAT_I420;
+ capture_params.allow_resolution_change = false;
+ device->AllocateAndStart(capture_params, client_.PassAs<Client>());
WaitForCapturedFrame();
device->StopAndDeAllocate();
- EXPECT_EQ(last_frame_info().width, 640);
- EXPECT_EQ(last_frame_info().height, 480);
+ EXPECT_EQ(last_format().frame_size.width(), 640);
+ EXPECT_EQ(last_format().frame_size.height(), 480);
}
TEST_F(VideoCaptureDeviceTest, ReAllocateCamera) {
@@ -260,36 +253,32 @@ TEST_F(VideoCaptureDeviceTest, ReAllocateCamera) {
} else {
resolution = gfx::Size(1280, 1024);
}
- VideoCaptureCapability requested_format(
- resolution.width(),
- resolution.height(),
- 30,
- PIXEL_FORMAT_I420,
- ConstantResolutionVideoCaptureDevice);
-
- device->AllocateAndStart(requested_format, client_.PassAs<Client>());
+ VideoCaptureParams capture_params;
+ capture_params.requested_format.frame_size = resolution;
+ capture_params.requested_format.frame_rate = 30;
+ capture_params.requested_format.pixel_format = PIXEL_FORMAT_I420;
+ capture_params.allow_resolution_change = false;
+ device->AllocateAndStart(capture_params, client_.PassAs<Client>());
device->StopAndDeAllocate();
}
// Finally, do a device start and wait for it to finish.
- gfx::Size resolution;
- VideoCaptureCapability requested_format(
- 320,
- 240,
- 30,
- PIXEL_FORMAT_I420,
- ConstantResolutionVideoCaptureDevice);
+ VideoCaptureParams capture_params;
+ capture_params.requested_format.frame_size.SetSize(320, 240);
+ capture_params.requested_format.frame_rate = 30;
+ capture_params.requested_format.pixel_format = PIXEL_FORMAT_I420;
+ capture_params.allow_resolution_change = false;
ResetWithNewClient();
scoped_ptr<VideoCaptureDevice> device(
VideoCaptureDevice::Create(names_.front()));
- device->AllocateAndStart(requested_format, client_.PassAs<Client>());
+ device->AllocateAndStart(capture_params, client_.PassAs<Client>());
WaitForCapturedFrame();
device->StopAndDeAllocate();
device.reset();
- EXPECT_EQ(last_frame_info().width, 320);
- EXPECT_EQ(last_frame_info().height, 240);
+ EXPECT_EQ(last_format().frame_size.width(), 320);
+ EXPECT_EQ(last_format().frame_size.height(), 240);
}
TEST_F(VideoCaptureDeviceTest, DeAllocateCameraWhileRunning) {
@@ -305,17 +294,17 @@ TEST_F(VideoCaptureDeviceTest, DeAllocateCameraWhileRunning) {
EXPECT_CALL(*client_, OnErr())
.Times(0);
- VideoCaptureCapability capture_format(640,
- 480,
- 30,
- PIXEL_FORMAT_I420,
- ConstantResolutionVideoCaptureDevice);
- device->AllocateAndStart(capture_format, client_.PassAs<Client>());
+ VideoCaptureParams capture_params;
+ capture_params.requested_format.frame_size.SetSize(640, 480);
+ capture_params.requested_format.frame_rate = 30;
+ capture_params.requested_format.pixel_format = PIXEL_FORMAT_I420;
+ capture_params.allow_resolution_change = false;
+ device->AllocateAndStart(capture_params, client_.PassAs<Client>());
// Get captured video frames.
WaitForCapturedFrame();
- EXPECT_EQ(last_frame_info().width, 640);
- EXPECT_EQ(last_frame_info().height, 480);
- EXPECT_EQ(last_frame_info().frame_rate, 30);
+ EXPECT_EQ(last_format().frame_size.width(), 640);
+ EXPECT_EQ(last_format().frame_size.height(), 480);
+ EXPECT_EQ(last_format().frame_rate, 30);
device->StopAndDeAllocate();
}
@@ -333,17 +322,16 @@ TEST_F(VideoCaptureDeviceTest, FakeCapture) {
EXPECT_CALL(*client_, OnErr())
.Times(0);
- VideoCaptureCapability capture_format(640,
- 480,
- 30,
- PIXEL_FORMAT_I420,
- ConstantResolutionVideoCaptureDevice);
- device->AllocateAndStart(capture_format,
- client_.PassAs<Client>());
+ VideoCaptureParams capture_params;
+ capture_params.requested_format.frame_size.SetSize(640, 480);
+ capture_params.requested_format.frame_rate = 30;
+ capture_params.requested_format.pixel_format = PIXEL_FORMAT_I420;
+ capture_params.allow_resolution_change = false;
+ device->AllocateAndStart(capture_params, client_.PassAs<Client>());
WaitForCapturedFrame();
- EXPECT_EQ(last_frame_info().width, 640);
- EXPECT_EQ(last_frame_info().height, 480);
- EXPECT_EQ(last_frame_info().frame_rate, 30);
+ EXPECT_EQ(last_format().frame_size.width(), 640);
+ EXPECT_EQ(last_format().frame_size.height(), 480);
+ EXPECT_EQ(last_format().frame_rate, 30);
device->StopAndDeAllocate();
}
@@ -361,17 +349,17 @@ TEST_F(VideoCaptureDeviceTest, MAYBE_CaptureMjpeg) {
EXPECT_CALL(*client_, OnErr())
.Times(0);
- VideoCaptureCapability capture_format(1280,
- 720,
- 30,
- PIXEL_FORMAT_MJPEG,
- ConstantResolutionVideoCaptureDevice);
- device->AllocateAndStart(capture_format, client_.PassAs<Client>());
+ VideoCaptureParams capture_params;
+ capture_params.requested_format.frame_size.SetSize(1280, 720);
+ capture_params.requested_format.frame_rate = 30;
+ capture_params.requested_format.pixel_format = PIXEL_FORMAT_MJPEG;
+ capture_params.allow_resolution_change = false;
+ device->AllocateAndStart(capture_params, client_.PassAs<Client>());
// Get captured video frames.
WaitForCapturedFrame();
// Verify we get MJPEG from the device. Not all devices can capture 1280x720
// @ 30 fps, so we don't care about the exact resolution we get.
- EXPECT_EQ(last_frame_info().color, PIXEL_FORMAT_MJPEG);
+ EXPECT_EQ(last_format().pixel_format, PIXEL_FORMAT_MJPEG);
device->StopAndDeAllocate();
}
@@ -381,12 +369,12 @@ TEST_F(VideoCaptureDeviceTest, GetDeviceSupportedFormats) {
DVLOG(1) << "No camera available. Exiting test.";
return;
}
- VideoCaptureCapabilities capture_formats;
+ VideoCaptureCapabilities capture_capabilities;
VideoCaptureDevice::Names::iterator names_iterator;
for (names_iterator = names_.begin(); names_iterator != names_.end();
++names_iterator) {
VideoCaptureDevice::GetDeviceSupportedFormats(*names_iterator,
- &capture_formats);
+ &capture_capabilities);
// Nothing to test here since we cannot forecast the hardware capabilities.
}
}
@@ -395,11 +383,11 @@ TEST_F(VideoCaptureDeviceTest, FakeCaptureVariableResolution) {
VideoCaptureDevice::Names names;
FakeVideoCaptureDevice::GetDeviceNames(&names);
- media::VideoCaptureCapability capture_format;
- capture_format.width = 640;
- capture_format.height = 480;
- capture_format.frame_rate = 30;
- capture_format.frame_size_type = media::VariableResolutionVideoCaptureDevice;
+ VideoCaptureParams capture_params;
+ capture_params.requested_format.frame_size.SetSize(640, 480);
+ capture_params.requested_format.frame_rate = 30;
+ capture_params.requested_format.pixel_format = PIXEL_FORMAT_I420;
+ capture_params.allow_resolution_change = true;
ASSERT_GT(static_cast<int>(names.size()), 0);
@@ -411,7 +399,7 @@ TEST_F(VideoCaptureDeviceTest, FakeCaptureVariableResolution) {
.Times(0);
int action_count = 200;
- device->AllocateAndStart(capture_format, client_.PassAs<Client>());
+ device->AllocateAndStart(capture_params, client_.PassAs<Client>());
// We set TimeWait to 200 action timeouts and this should be enough for at
// least action_count/kFakeCaptureCapabilityChangePeriod calls.
@@ -425,18 +413,20 @@ TEST_F(VideoCaptureDeviceTest, FakeGetDeviceSupportedFormats) {
VideoCaptureDevice::Names names;
FakeVideoCaptureDevice::GetDeviceNames(&names);
- VideoCaptureCapabilities capture_formats;
+ VideoCaptureCapabilities capture_capabilities;
VideoCaptureDevice::Names::iterator names_iterator;
for (names_iterator = names.begin(); names_iterator != names.end();
++names_iterator) {
FakeVideoCaptureDevice::GetDeviceSupportedFormats(*names_iterator,
- &capture_formats);
- EXPECT_GE(capture_formats.size(), 1u);
- EXPECT_EQ(capture_formats[0].width, 640);
- EXPECT_EQ(capture_formats[0].height, 480);
- EXPECT_EQ(capture_formats[0].color, media::PIXEL_FORMAT_I420);
- EXPECT_GE(capture_formats[0].frame_rate, 20);
+ &capture_capabilities);
+ EXPECT_GE(capture_capabilities.size(), 1u);
+ EXPECT_EQ(capture_capabilities[0].supported_format.frame_size.width(), 640);
+ EXPECT_EQ(capture_capabilities[0].supported_format.frame_size.height(),
+ 480);
+ EXPECT_EQ(capture_capabilities[0].supported_format.pixel_format,
+ media::PIXEL_FORMAT_I420);
+ EXPECT_GE(capture_capabilities[0].supported_format.frame_rate, 20);
}
}
diff --git a/media/video/capture/video_capture_proxy.cc b/media/video/capture/video_capture_proxy.cc
index bbbd610..d488c50 100644
--- a/media/video/capture/video_capture_proxy.cc
+++ b/media/video/capture/video_capture_proxy.cc
@@ -7,6 +7,7 @@
#include "base/bind.h"
#include "base/location.h"
#include "base/message_loop/message_loop_proxy.h"
+#include "media/base/video_frame.h"
namespace {
diff --git a/media/video/capture/video_capture_types.cc b/media/video/capture/video_capture_types.cc
index 48f03f1..96e5236 100644
--- a/media/video/capture/video_capture_types.cc
+++ b/media/video/capture/video_capture_types.cc
@@ -9,44 +9,28 @@
namespace media {
VideoCaptureFormat::VideoCaptureFormat()
- : width(0),
- height(0),
- frame_rate(0),
- frame_size_type(ConstantResolutionVideoCaptureDevice) {}
-
-VideoCaptureFormat::VideoCaptureFormat(
- int width,
- int height,
- int frame_rate,
- VideoCaptureResolutionType frame_size_type)
- : width(width),
- height(height),
+ : frame_rate(0), pixel_format(PIXEL_FORMAT_UNKNOWN) {}
+
+VideoCaptureFormat::VideoCaptureFormat(const gfx::Size& frame_size,
+ int frame_rate,
+ VideoPixelFormat pixel_format)
+ : frame_size(frame_size),
frame_rate(frame_rate),
- frame_size_type(frame_size_type) {}
+ pixel_format(pixel_format) {}
bool VideoCaptureFormat::IsValid() const {
- return (width > 0) && (height > 0) && (frame_rate > 0) &&
+ return (frame_size.width() < media::limits::kMaxDimension) &&
+ (frame_size.height() < media::limits::kMaxDimension) &&
+ (frame_size.GetArea() > 0) &&
+ (frame_size.GetArea() < media::limits::kMaxCanvas) &&
+ (frame_rate > 0) &&
(frame_rate < media::limits::kMaxFramesPerSecond) &&
- (width < media::limits::kMaxDimension) &&
- (height < media::limits::kMaxDimension) &&
- (width * height < media::limits::kMaxCanvas) &&
- (frame_size_type >= 0) &&
- (frame_size_type < media::MaxVideoCaptureResolutionType);
+ (pixel_format >= PIXEL_FORMAT_UNKNOWN) &&
+ (pixel_format < PIXEL_FORMAT_MAX);
}
-VideoCaptureParams::VideoCaptureParams()
- : session_id(0) {}
-
-VideoCaptureCapability::VideoCaptureCapability()
- : color(PIXEL_FORMAT_UNKNOWN) {}
+VideoCaptureParams::VideoCaptureParams() : allow_resolution_change(false) {}
-VideoCaptureCapability::VideoCaptureCapability(
- int width,
- int height,
- int frame_rate,
- VideoPixelFormat color,
- VideoCaptureResolutionType frame_size_type)
- : VideoCaptureFormat(width, height, frame_rate, frame_size_type),
- color(color) {}
+VideoCaptureCapability::VideoCaptureCapability() {}
} // namespace media
diff --git a/media/video/capture/video_capture_types.h b/media/video/capture/video_capture_types.h
index 489befe..d06c1b4 100644
--- a/media/video/capture/video_capture_types.h
+++ b/media/video/capture/video_capture_types.h
@@ -5,7 +5,10 @@
#ifndef MEDIA_VIDEO_CAPTURE_VIDEO_CAPTURE_TYPES_H_
#define MEDIA_VIDEO_CAPTURE_VIDEO_CAPTURE_TYPES_H_
-#include "media/base/video_frame.h"
+#include <vector>
+
+#include "media/base/media_export.h"
+#include "ui/gfx/size.h"
namespace media {
@@ -13,12 +16,6 @@ namespace media {
// shared with device manager.
typedef int VideoCaptureSessionId;
-enum VideoCaptureResolutionType {
- ConstantResolutionVideoCaptureDevice = 0,
- VariableResolutionVideoCaptureDevice,
- MaxVideoCaptureResolutionType, // Must be last.
-};
-
// Color formats from camera.
enum VideoPixelFormat {
PIXEL_FORMAT_UNKNOWN, // Color format not set.
@@ -30,49 +27,52 @@ enum VideoPixelFormat {
PIXEL_FORMAT_MJPEG,
PIXEL_FORMAT_NV21,
PIXEL_FORMAT_YV12,
+ PIXEL_FORMAT_MAX,
};
// Video capture format specification.
+// This class is used by the video capture device to specify the format of every
+// frame captured and returned to a client.
class MEDIA_EXPORT VideoCaptureFormat {
public:
VideoCaptureFormat();
- VideoCaptureFormat(int width,
- int height,
+ VideoCaptureFormat(const gfx::Size& frame_size,
int frame_rate,
- VideoCaptureResolutionType frame_size_type);
+ VideoPixelFormat pixel_format);
// Checks that all values are in the expected range. All limits are specified
// in media::Limits.
bool IsValid() const;
- int width;
- int height;
+ gfx::Size frame_size;
int frame_rate;
- VideoCaptureResolutionType frame_size_type;
+ VideoPixelFormat pixel_format;
};
// Parameters for starting video capture.
+// This class is used by the client of a video capture device to specify the
+// format of frames in which the client would like to have captured frames
+// returned.
class MEDIA_EXPORT VideoCaptureParams {
public:
VideoCaptureParams();
- // Identifies which device is to be started.
- VideoCaptureSessionId session_id;
// Requests a resolution and format at which the capture will occur.
VideoCaptureFormat requested_format;
+
+ // Allow mid-capture resolution change.
+ bool allow_resolution_change;
};
// Capabilities describe the format a camera captures video in.
-class MEDIA_EXPORT VideoCaptureCapability : public VideoCaptureFormat {
+// This class is used by the video capture device to report the formats in which
+// it is capable of capturing frames.
+class MEDIA_EXPORT VideoCaptureCapability {
public:
VideoCaptureCapability();
- VideoCaptureCapability(int width,
- int height,
- int frame_rate,
- VideoPixelFormat color,
- VideoCaptureResolutionType frame_size_type);
- VideoPixelFormat color; // Desired video type.
+ // Supported resolution and format.
+ VideoCaptureFormat supported_format;
};
typedef std::vector<VideoCaptureCapability> VideoCaptureCapabilities;
diff --git a/media/video/capture/win/capability_list_win.cc b/media/video/capture/win/capability_list_win.cc
index 18325bb..bfa58ed 100644
--- a/media/video/capture/win/capability_list_win.cc
+++ b/media/video/capture/win/capability_list_win.cc
@@ -33,7 +33,8 @@ bool CompareFrameRate(const ResolutionDiff& item1,
}
bool CompareColor(const ResolutionDiff& item1, const ResolutionDiff& item2) {
- return item1.capability->color < item2.capability->color;
+ return item1.capability->supported_format.pixel_format <
+ item2.capability->supported_format.pixel_format;
}
} // namespace.
@@ -50,7 +51,7 @@ void CapabilityList::Add(const VideoCaptureCapabilityWin& capability) {
capabilities_.push_back(capability);
}
-const VideoCaptureCapabilityWin& CapabilityList::GetBestMatchedCapability(
+const VideoCaptureCapabilityWin& CapabilityList::GetBestMatchedFormat(
int requested_width,
int requested_height,
int requested_frame_rate) const {
@@ -65,8 +66,9 @@ const VideoCaptureCapabilityWin& CapabilityList::GetBestMatchedCapability(
it != capabilities_.end(); ++it) {
ResolutionDiff diff;
diff.capability = &(*it);
- diff.diff_width = it->width - requested_width;
- diff.diff_height = it->height - requested_height;
+ diff.diff_width = it->supported_format.frame_size.width() - requested_width;
+ diff.diff_height =
+ it->supported_format.frame_size.height() - requested_height;
// The 1000 allows using integer arithmetic for f.i. 29.971 fps.
diff.diff_frame_rate =
1000 * ((static_cast<float>(it->frame_rate_numerator) /
diff --git a/media/video/capture/win/capability_list_win.h b/media/video/capture/win/capability_list_win.h
index c07b220..05ce8e8 100644
--- a/media/video/capture/win/capability_list_win.h
+++ b/media/video/capture/win/capability_list_win.h
@@ -16,7 +16,7 @@
namespace media {
-struct VideoCaptureCapabilityWin : public VideoCaptureCapability {
+struct VideoCaptureCapabilityWin {
explicit VideoCaptureCapabilityWin(int index)
: stream_index(index),
frame_rate_numerator(0),
@@ -26,6 +26,7 @@ struct VideoCaptureCapabilityWin : public VideoCaptureCapability {
// so framerates can be properly represented, f.i. 29.971fps= 30000/1001.
int frame_rate_numerator;
int frame_rate_denominator;
+ VideoCaptureFormat supported_format;
};
class CapabilityList : public base::NonThreadSafe {
@@ -41,8 +42,9 @@ class CapabilityList : public base::NonThreadSafe {
// Loops through the list of capabilities and returns an index of the best
// matching capability. The algorithm prioritizes height, width, frame rate
// and color format in that order.
- const VideoCaptureCapabilityWin& GetBestMatchedCapability(
- int requested_width, int requested_height,
+ const VideoCaptureCapabilityWin& GetBestMatchedFormat(
+ int requested_width,
+ int requested_height,
int requested_frame_rate) const;
private:
diff --git a/media/video/capture/win/sink_filter_win.cc b/media/video/capture/win/sink_filter_win.cc
index c3fc410..e3bb0a5 100644
--- a/media/video/capture/win/sink_filter_win.cc
+++ b/media/video/capture/win/sink_filter_win.cc
@@ -28,13 +28,12 @@ SinkFilter::~SinkFilter() {
input_pin_->SetOwner(NULL);
}
-void SinkFilter::SetRequestedMediaCapability(
- const VideoCaptureCapability& capability) {
- input_pin_->SetRequestedMediaCapability(capability);
+void SinkFilter::SetRequestedMediaFormat(const VideoCaptureFormat& format) {
+ input_pin_->SetRequestedMediaFormat(format);
}
-const VideoCaptureCapability& SinkFilter::ResultingCapability() {
- return input_pin_->ResultingCapability();
+const VideoCaptureFormat& SinkFilter::ResultingFormat() {
+ return input_pin_->ResultingFormat();
}
size_t SinkFilter::NoOfPins() {
diff --git a/media/video/capture/win/sink_filter_win.h b/media/video/capture/win/sink_filter_win.h
index 36bb124..e454f0b 100644
--- a/media/video/capture/win/sink_filter_win.h
+++ b/media/video/capture/win/sink_filter_win.h
@@ -32,11 +32,10 @@ class __declspec(uuid("88cdbbdc-a73b-4afa-acbf-15d5e2ce12c3"))
explicit SinkFilter(SinkFilterObserver* observer);
virtual ~SinkFilter();
- void SetRequestedMediaCapability(
- const VideoCaptureCapability& capability);
- // Returns the capability that is negotiated when this
+ void SetRequestedMediaFormat(const VideoCaptureFormat& format);
+ // Returns the format that is negotiated when this
// filter is connected to a media filter.
- const VideoCaptureCapability& ResultingCapability();
+ const VideoCaptureFormat& ResultingFormat();
// Implement FilterBase.
virtual size_t NoOfPins();
diff --git a/media/video/capture/win/sink_input_pin_win.cc b/media/video/capture/win/sink_input_pin_win.cc
index 9d97918..0126e13 100644
--- a/media/video/capture/win/sink_input_pin_win.cc
+++ b/media/video/capture/win/sink_input_pin_win.cc
@@ -20,8 +20,6 @@ SinkInputPin::SinkInputPin(IBaseFilter* filter,
SinkFilterObserver* observer)
: observer_(observer),
PinBase(filter) {
- memset(&requested_capability_, 0, sizeof(requested_capability_));
- memset(&resulting_capability_, 0, sizeof(resulting_capability_));
}
SinkInputPin::~SinkInputPin() {}
@@ -38,9 +36,9 @@ bool SinkInputPin::GetValidMediaType(int index, AM_MEDIA_TYPE* media_type) {
pvi->bmiHeader.biPlanes = 1;
pvi->bmiHeader.biClrImportant = 0;
pvi->bmiHeader.biClrUsed = 0;
- if (requested_capability_.frame_rate > 0) {
- pvi->AvgTimePerFrame = kSecondsToReferenceTime /
- requested_capability_.frame_rate;
+ if (requested_format_.frame_rate > 0) {
+ pvi->AvgTimePerFrame =
+ kSecondsToReferenceTime / requested_format_.frame_rate;
}
media_type->majortype = MEDIATYPE_Video;
@@ -51,30 +49,28 @@ bool SinkInputPin::GetValidMediaType(int index, AM_MEDIA_TYPE* media_type) {
case 0: {
pvi->bmiHeader.biCompression = MAKEFOURCC('I', '4', '2', '0');
pvi->bmiHeader.biBitCount = 12; // bit per pixel
- pvi->bmiHeader.biWidth = requested_capability_.width;
- pvi->bmiHeader.biHeight = requested_capability_.height;
- pvi->bmiHeader.biSizeImage = 3 * requested_capability_.height *
- requested_capability_.width / 2;
+ pvi->bmiHeader.biWidth = requested_format_.frame_size.width();
+ pvi->bmiHeader.biHeight = requested_format_.frame_size.height();
+ pvi->bmiHeader.biSizeImage =
+ requested_format_.frame_size.GetArea() * 3 / 2;
media_type->subtype = kMediaSubTypeI420;
break;
}
case 1: {
pvi->bmiHeader.biCompression = MAKEFOURCC('Y', 'U', 'Y', '2');
pvi->bmiHeader.biBitCount = 16;
- pvi->bmiHeader.biWidth = requested_capability_.width;
- pvi->bmiHeader.biHeight = requested_capability_.height;
- pvi->bmiHeader.biSizeImage = 2 * requested_capability_.width *
- requested_capability_.height;
+ pvi->bmiHeader.biWidth = requested_format_.frame_size.width();
+ pvi->bmiHeader.biHeight = requested_format_.frame_size.height();
+ pvi->bmiHeader.biSizeImage = requested_format_.frame_size.GetArea() * 2;
media_type->subtype = MEDIASUBTYPE_YUY2;
break;
}
case 2: {
pvi->bmiHeader.biCompression = BI_RGB;
pvi->bmiHeader.biBitCount = 24;
- pvi->bmiHeader.biWidth = requested_capability_.width;
- pvi->bmiHeader.biHeight = requested_capability_.height;
- pvi->bmiHeader.biSizeImage = 3 * requested_capability_.height *
- requested_capability_.width;
+ pvi->bmiHeader.biWidth = requested_format_.frame_size.width();
+ pvi->bmiHeader.biHeight = requested_format_.frame_size.height();
+ pvi->bmiHeader.biSizeImage = requested_format_.frame_size.GetArea() * 3;
media_type->subtype = MEDIASUBTYPE_RGB24;
break;
}
@@ -104,27 +100,27 @@ bool SinkInputPin::IsMediaTypeValid(const AM_MEDIA_TYPE* media_type) {
return false;
// Store the incoming width and height.
- resulting_capability_.width = pvi->bmiHeader.biWidth;
- resulting_capability_.height = abs(pvi->bmiHeader.biHeight);
+ resulting_format_.frame_size.SetSize(pvi->bmiHeader.biWidth,
+ abs(pvi->bmiHeader.biHeight));
if (pvi->AvgTimePerFrame > 0) {
- resulting_capability_.frame_rate =
+ resulting_format_.frame_rate =
static_cast<int>(kSecondsToReferenceTime / pvi->AvgTimePerFrame);
} else {
- resulting_capability_.frame_rate = requested_capability_.frame_rate;
+ resulting_format_.frame_rate = requested_format_.frame_rate;
}
if (sub_type == kMediaSubTypeI420 &&
pvi->bmiHeader.biCompression == MAKEFOURCC('I', '4', '2', '0')) {
- resulting_capability_.color = PIXEL_FORMAT_I420;
+ resulting_format_.pixel_format = PIXEL_FORMAT_I420;
return true; // This format is acceptable.
}
if (sub_type == MEDIASUBTYPE_YUY2 &&
pvi->bmiHeader.biCompression == MAKEFOURCC('Y', 'U', 'Y', '2')) {
- resulting_capability_.color = PIXEL_FORMAT_YUY2;
+ resulting_format_.pixel_format = PIXEL_FORMAT_YUY2;
return true; // This format is acceptable.
}
if (sub_type == MEDIASUBTYPE_RGB24 &&
pvi->bmiHeader.biCompression == BI_RGB) {
- resulting_capability_.color = PIXEL_FORMAT_RGB24;
+ resulting_format_.pixel_format = PIXEL_FORMAT_RGB24;
return true; // This format is acceptable.
}
return false;
@@ -140,17 +136,15 @@ HRESULT SinkInputPin::Receive(IMediaSample* sample) {
return S_OK;
}
-void SinkInputPin::SetRequestedMediaCapability(
- const VideoCaptureCapability& capability) {
- requested_capability_ = capability;
- resulting_capability_.width = 0;
- resulting_capability_.height = 0;
- resulting_capability_.frame_rate = 0;
- resulting_capability_.color = PIXEL_FORMAT_UNKNOWN;
+void SinkInputPin::SetRequestedMediaFormat(const VideoCaptureFormat& format) {
+ requested_format_ = format;
+ resulting_format_.frame_size.SetSize(0, 0);
+ resulting_format_.frame_rate = 0;
+ resulting_format_.pixel_format = PIXEL_FORMAT_UNKNOWN;
}
-const VideoCaptureCapability& SinkInputPin::ResultingCapability() {
- return resulting_capability_;
+const VideoCaptureFormat& SinkInputPin::ResultingFormat() {
+ return resulting_format_;
}
} // namespace media
diff --git a/media/video/capture/win/sink_input_pin_win.h b/media/video/capture/win/sink_input_pin_win.h
index 16168a3..f14ca33 100644
--- a/media/video/capture/win/sink_input_pin_win.h
+++ b/media/video/capture/win/sink_input_pin_win.h
@@ -24,10 +24,10 @@ class SinkInputPin : public PinBase {
SinkInputPin(IBaseFilter* filter, SinkFilterObserver* observer);
virtual ~SinkInputPin();
- void SetRequestedMediaCapability(const VideoCaptureCapability& capability);
+ void SetRequestedMediaFormat(const VideoCaptureFormat& format);
// Returns the capability that is negotiated when this
// pin is connected to a media filter.
- const VideoCaptureCapability& ResultingCapability();
+ const VideoCaptureFormat& ResultingFormat();
// Implement PinBase.
virtual bool IsMediaTypeValid(const AM_MEDIA_TYPE* media_type);
@@ -36,8 +36,8 @@ class SinkInputPin : public PinBase {
STDMETHOD(Receive)(IMediaSample* media_sample);
private:
- VideoCaptureCapability requested_capability_;
- VideoCaptureCapability resulting_capability_;
+ VideoCaptureFormat requested_format_;
+ VideoCaptureFormat resulting_format_;
SinkFilterObserver* observer_;
DISALLOW_IMPLICIT_CONSTRUCTORS(SinkInputPin);
diff --git a/media/video/capture/win/video_capture_device_mf_win.cc b/media/video/capture/win/video_capture_device_mf_win.cc
index 12694de..f9cfb0a 100644
--- a/media/video/capture/win/video_capture_device_mf_win.cc
+++ b/media/video/capture/win/video_capture_device_mf_win.cc
@@ -94,12 +94,11 @@ bool FormatFromGuid(const GUID& guid, VideoPixelFormat* format) {
return false;
}
-bool GetFrameSize(IMFMediaType* type, int* width, int* height) {
+bool GetFrameSize(IMFMediaType* type, gfx::Size* frame_size) {
UINT32 width32, height32;
if (FAILED(MFGetAttributeSize(type, MF_MT_FRAME_SIZE, &width32, &height32)))
return false;
- *width = width32;
- *height = height32;
+ frame_size->SetSize(width32, height32);
return true;
}
@@ -121,15 +120,15 @@ bool FillCapabilitiesFromType(IMFMediaType* type,
VideoCaptureCapabilityWin* capability) {
GUID type_guid;
if (FAILED(type->GetGUID(MF_MT_SUBTYPE, &type_guid)) ||
- !FormatFromGuid(type_guid, &capability->color) ||
- !GetFrameSize(type, &capability->width, &capability->height) ||
+ !GetFrameSize(type, &capability->supported_format.frame_size) ||
!GetFrameRate(type,
&capability->frame_rate_numerator,
- &capability->frame_rate_denominator)) {
+ &capability->frame_rate_denominator) ||
+ !FormatFromGuid(type_guid, &capability->supported_format.pixel_format)) {
return false;
}
// Keep the integer version of the frame_rate for (potential) returns.
- capability->frame_rate =
+ capability->supported_format.frame_rate =
capability->frame_rate_numerator / capability->frame_rate_denominator;
return true;
@@ -337,7 +336,7 @@ bool VideoCaptureDeviceMFWin::Init() {
}
void VideoCaptureDeviceMFWin::AllocateAndStart(
- const VideoCaptureCapability& capture_format,
+ const VideoCaptureParams& params,
scoped_ptr<VideoCaptureDevice::Client> client) {
DCHECK(CalledOnValidThread());
@@ -354,13 +353,10 @@ void VideoCaptureDeviceMFWin::AllocateAndStart(
}
VideoCaptureCapabilityWin found_capability =
- capabilities.GetBestMatchedCapability(capture_format.width,
- capture_format.height,
- capture_format.frame_rate);
- DLOG(INFO) << "Chosen capture format= (" << found_capability.width << "x"
- << found_capability.height << ")@("
- << found_capability.frame_rate_numerator << "/"
- << found_capability.frame_rate_denominator << ")fps";
+ capabilities.GetBestMatchedFormat(
+ params.requested_format.frame_size.width(),
+ params.requested_format.frame_size.height(),
+ params.requested_format.frame_rate);
ScopedComPtr<IMFMediaType> type;
if (FAILED(hr = reader_->GetNativeMediaType(
@@ -377,7 +373,7 @@ void VideoCaptureDeviceMFWin::AllocateAndStart(
OnError(hr);
return;
}
- current_setting_ = found_capability;
+ capture_format_ = found_capability.supported_format;
capture_ = true;
}
@@ -424,7 +420,7 @@ void VideoCaptureDeviceMFWin::OnIncomingCapturedFrame(
rotation,
flip_vert,
flip_horiz,
- current_setting_);
+ capture_format_);
if (capture_) {
HRESULT hr = reader_->ReadSample(MF_SOURCE_READER_FIRST_VIDEO_STREAM, 0,
diff --git a/media/video/capture/win/video_capture_device_mf_win.h b/media/video/capture/win/video_capture_device_mf_win.h
index a9191bd..c9da58c 100644
--- a/media/video/capture/win/video_capture_device_mf_win.h
+++ b/media/video/capture/win/video_capture_device_mf_win.h
@@ -38,9 +38,9 @@ class MEDIA_EXPORT VideoCaptureDeviceMFWin
bool Init();
// VideoCaptureDevice implementation.
- virtual void AllocateAndStart(
- const VideoCaptureCapability& capture_format,
- scoped_ptr<VideoCaptureDevice::Client> client) OVERRIDE;
+ virtual void AllocateAndStart(const VideoCaptureParams& params,
+ scoped_ptr<VideoCaptureDevice::Client> client)
+ OVERRIDE;
virtual void StopAndDeAllocate() OVERRIDE;
// Returns true iff the current platform supports the Media Foundation API
@@ -71,7 +71,7 @@ class MEDIA_EXPORT VideoCaptureDeviceMFWin
base::Lock lock_; // Used to guard the below variables.
scoped_ptr<VideoCaptureDevice::Client> client_;
base::win::ScopedComPtr<IMFSourceReader> reader_;
- VideoCaptureCapability current_setting_;
+ VideoCaptureFormat capture_format_;
bool capture_;
DISALLOW_IMPLICIT_CONSTRUCTORS(VideoCaptureDeviceMFWin);
diff --git a/media/video/capture/win/video_capture_device_win.cc b/media/video/capture/win/video_capture_device_win.cc
index a4f0b83..5b24d72 100644
--- a/media/video/capture/win/video_capture_device_win.cc
+++ b/media/video/capture/win/video_capture_device_win.cc
@@ -341,7 +341,7 @@ bool VideoCaptureDeviceWin::Init() {
}
void VideoCaptureDeviceWin::AllocateAndStart(
- const VideoCaptureCapability& capture_format,
+ const VideoCaptureParams& params,
scoped_ptr<VideoCaptureDevice::Client> client) {
DCHECK(CalledOnValidThread());
if (state_ != kIdle)
@@ -351,15 +351,16 @@ void VideoCaptureDeviceWin::AllocateAndStart(
// Get the camera capability that best match the requested resolution.
const VideoCaptureCapabilityWin& found_capability =
- capabilities_.GetBestMatchedCapability(capture_format.width,
- capture_format.height,
- capture_format.frame_rate);
- VideoCaptureCapability capability = found_capability;
+ capabilities_.GetBestMatchedFormat(
+ params.requested_format.frame_size.width(),
+ params.requested_format.frame_size.height(),
+ params.requested_format.frame_rate);
+ VideoCaptureFormat format = found_capability.supported_format;
// Reduce the frame rate if the requested frame rate is lower
// than the capability.
- if (capability.frame_rate > capture_format.frame_rate)
- capability.frame_rate = capture_format.frame_rate;
+ if (format.frame_rate > params.requested_format.frame_rate)
+ format.frame_rate = params.requested_format.frame_rate;
AM_MEDIA_TYPE* pmt = NULL;
VIDEO_STREAM_CONFIG_CAPS caps;
@@ -377,20 +378,19 @@ void VideoCaptureDeviceWin::AllocateAndStart(
if (SUCCEEDED(hr)) {
if (pmt->formattype == FORMAT_VideoInfo) {
VIDEOINFOHEADER* h = reinterpret_cast<VIDEOINFOHEADER*>(pmt->pbFormat);
- if (capability.frame_rate > 0)
- h->AvgTimePerFrame = kSecondsToReferenceTime / capability.frame_rate;
+ if (format.frame_rate > 0)
+ h->AvgTimePerFrame = kSecondsToReferenceTime / format.frame_rate;
}
- // Set the sink filter to request this capability.
- sink_filter_->SetRequestedMediaCapability(capability);
- // Order the capture device to use this capability.
+ // Set the sink filter to request this format.
+ sink_filter_->SetRequestedMediaFormat(format);
+ // Order the capture device to use this format.
hr = stream_config->SetFormat(pmt);
}
if (FAILED(hr))
SetErrorState("Failed to set capture device output format");
- if (capability.color == PIXEL_FORMAT_MJPEG &&
- !mjpg_filter_.get()) {
+ if (format.pixel_format == PIXEL_FORMAT_MJPEG && !mjpg_filter_.get()) {
// Create MJPG filter if we need it.
hr = mjpg_filter_.CreateInstance(CLSID_MjpegDec, NULL, CLSCTX_INPROC);
@@ -408,8 +408,7 @@ void VideoCaptureDeviceWin::AllocateAndStart(
}
}
- if (capability.color == PIXEL_FORMAT_MJPEG &&
- mjpg_filter_.get()) {
+ if (format.pixel_format == PIXEL_FORMAT_MJPEG && mjpg_filter_.get()) {
// Connect the camera to the MJPEG decoder.
hr = graph_builder_->ConnectDirect(output_capture_pin_, input_mjpg_pin_,
NULL);
@@ -433,9 +432,9 @@ void VideoCaptureDeviceWin::AllocateAndStart(
return;
}
- // Get the capability back from the sink filter after the filter have been
+ // Get the format back from the sink filter after the filter have been
// connected.
- current_setting_ = sink_filter_->ResultingCapability();
+ capture_format_ = sink_filter_->ResultingFormat();
// Start capturing.
hr = media_control_->Run();
@@ -479,7 +478,7 @@ void VideoCaptureDeviceWin::StopAndDeAllocate() {
void VideoCaptureDeviceWin::FrameReceived(const uint8* buffer,
int length) {
client_->OnIncomingCapturedFrame(
- buffer, length, base::Time::Now(), 0, false, false, current_setting_);
+ buffer, length, base::Time::Now(), 0, false, false, capture_format_);
}
bool VideoCaptureDeviceWin::CreateCapabilityMap() {
@@ -522,8 +521,8 @@ bool VideoCaptureDeviceWin::CreateCapabilityMap() {
VideoCaptureCapabilityWin capability(i);
VIDEOINFOHEADER* h =
reinterpret_cast<VIDEOINFOHEADER*>(media_type->pbFormat);
- capability.width = h->bmiHeader.biWidth;
- capability.height = h->bmiHeader.biHeight;
+ capability.supported_format.frame_size.SetSize(h->bmiHeader.biWidth,
+ h->bmiHeader.biHeight);
// Try to get a better |time_per_frame| from IAMVideoControl. If not, use
// the value from VIDEOINFOHEADER.
@@ -531,7 +530,8 @@ bool VideoCaptureDeviceWin::CreateCapabilityMap() {
if (video_control) {
ScopedCoMem<LONGLONG> max_fps;
LONG list_size = 0;
- SIZE size = { capability.width, capability.height };
+ SIZE size = {capability.supported_format.frame_size.width(),
+ capability.supported_format.frame_size.height()};
// GetFrameRateList doesn't return max frame rate always
// eg: Logitech Notebook. This may be due to a bug in that API
@@ -549,30 +549,32 @@ bool VideoCaptureDeviceWin::CreateCapabilityMap() {
}
}
- capability.frame_rate = (time_per_frame > 0) ?
- static_cast<int>(kSecondsToReferenceTime / time_per_frame) : 0;
+ capability.supported_format.frame_rate =
+ (time_per_frame > 0)
+ ? static_cast<int>(kSecondsToReferenceTime / time_per_frame)
+ : 0;
// DirectShow works at the moment only on integer frame_rate but the
// best capability matching class works on rational frame rates.
- capability.frame_rate_numerator = capability.frame_rate;
+ capability.frame_rate_numerator = capability.supported_format.frame_rate;
capability.frame_rate_denominator = 1;
// We can't switch MEDIATYPE :~(.
if (media_type->subtype == kMediaSubTypeI420) {
- capability.color = PIXEL_FORMAT_I420;
+ capability.supported_format.pixel_format = PIXEL_FORMAT_I420;
} else if (media_type->subtype == MEDIASUBTYPE_IYUV) {
// This is identical to PIXEL_FORMAT_I420.
- capability.color = PIXEL_FORMAT_I420;
+ capability.supported_format.pixel_format = PIXEL_FORMAT_I420;
} else if (media_type->subtype == MEDIASUBTYPE_RGB24) {
- capability.color = PIXEL_FORMAT_RGB24;
+ capability.supported_format.pixel_format = PIXEL_FORMAT_RGB24;
} else if (media_type->subtype == MEDIASUBTYPE_YUY2) {
- capability.color = PIXEL_FORMAT_YUY2;
+ capability.supported_format.pixel_format = PIXEL_FORMAT_YUY2;
} else if (media_type->subtype == MEDIASUBTYPE_MJPG) {
- capability.color = PIXEL_FORMAT_MJPEG;
+ capability.supported_format.pixel_format = PIXEL_FORMAT_MJPEG;
} else if (media_type->subtype == MEDIASUBTYPE_UYVY) {
- capability.color = PIXEL_FORMAT_UYVY;
+ capability.supported_format.pixel_format = PIXEL_FORMAT_UYVY;
} else if (media_type->subtype == MEDIASUBTYPE_ARGB32) {
- capability.color = PIXEL_FORMAT_ARGB;
+ capability.supported_format.pixel_format = PIXEL_FORMAT_ARGB;
} else {
WCHAR guid_str[128];
StringFromGUID2(media_type->subtype, guid_str, arraysize(guid_str));
diff --git a/media/video/capture/win/video_capture_device_win.h b/media/video/capture/win/video_capture_device_win.h
index 7f55f99..164c01c 100644
--- a/media/video/capture/win/video_capture_device_win.h
+++ b/media/video/capture/win/video_capture_device_win.h
@@ -40,9 +40,9 @@ class VideoCaptureDeviceWin
bool Init();
// VideoCaptureDevice implementation.
- virtual void AllocateAndStart(
- const VideoCaptureCapability& capture_format,
- scoped_ptr<VideoCaptureDevice::Client> client) OVERRIDE;
+ virtual void AllocateAndStart(const VideoCaptureParams& params,
+ scoped_ptr<VideoCaptureDevice::Client> client)
+ OVERRIDE;
virtual void StopAndDeAllocate() OVERRIDE;
static void GetDeviceNames(Names* device_names);
@@ -79,7 +79,7 @@ class VideoCaptureDeviceWin
// Map of all capabilities this device support.
CapabilityList capabilities_;
- VideoCaptureCapability current_setting_;
+ VideoCaptureFormat capture_format_;
DISALLOW_IMPLICIT_CONSTRUCTORS(VideoCaptureDeviceWin);
};