summaryrefslogtreecommitdiffstats
path: root/media
diff options
context:
space:
mode:
authorsheu@chromium.org <sheu@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2013-11-23 02:22:44 +0000
committersheu@chromium.org <sheu@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2013-11-23 02:22:44 +0000
commit1251c46bd12dbed396ec0e8c0d09761b536139de (patch)
tree0fdaa7c4af306dba526acadb1445ed0e3ac57b1f /media
parent9f54263155acdec4ad09830578a90cb3a2871c9c (diff)
downloadchromium_src-1251c46bd12dbed396ec0e8c0d09761b536139de.zip
chromium_src-1251c46bd12dbed396ec0e8c0d09761b536139de.tar.gz
chromium_src-1251c46bd12dbed396ec0e8c0d09761b536139de.tar.bz2
Reorganize media::VideoCapture* types
The purpose of this CL is to clean up the distinction between VideoCaptureFormat (which identifies the captured type of a frame), VideoCaptureParams (which identifies the requested format of a capture), and VideoCaptureCapability (which identifies the capture capabilities of a device). Notably: * VideoCaptureFormat::frame_size_type -> VideoCaptureParams::allow_resolution_change, as variable resolution capability is a per-session, not a per-frame property. * VideoCaptureCapability::color -> VideoCaptureFormat::pixel_format, as frame color format is a per-frame property. * As VideoCaptureParams holds a VideoCaptureFormat member, capture requests are able now to request a particular capture color format. BUG=269312 TEST=local build, run unittests, chrome on CrOS snow, desktop Linux Review URL: https://codereview.chromium.org/68503005 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@236927 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'media')
-rw-r--r--media/video/capture/android/video_capture_device_android.cc54
-rw-r--r--media/video/capture/android/video_capture_device_android.h7
-rw-r--r--media/video/capture/fake_video_capture_device.cc113
-rw-r--r--media/video/capture/fake_video_capture_device.h14
-rw-r--r--media/video/capture/linux/video_capture_device_linux.cc38
-rw-r--r--media/video/capture/linux/video_capture_device_linux.h4
-rw-r--r--media/video/capture/mac/video_capture_device_mac.h10
-rw-r--r--media/video/capture/mac/video_capture_device_mac.mm66
-rw-r--r--media/video/capture/mac/video_capture_device_qtkit_mac.mm11
-rw-r--r--media/video/capture/video_capture_device.h13
-rw-r--r--media/video/capture/video_capture_device_unittest.cc182
-rw-r--r--media/video/capture/video_capture_proxy.cc1
-rw-r--r--media/video/capture/video_capture_types.cc48
-rw-r--r--media/video/capture/video_capture_types.h44
-rw-r--r--media/video/capture/win/capability_list_win.cc10
-rw-r--r--media/video/capture/win/capability_list_win.h8
-rw-r--r--media/video/capture/win/sink_filter_win.cc9
-rw-r--r--media/video/capture/win/sink_filter_win.h7
-rw-r--r--media/video/capture/win/sink_input_pin_win.cc60
-rw-r--r--media/video/capture/win/sink_input_pin_win.h8
-rw-r--r--media/video/capture/win/video_capture_device_mf_win.cc30
-rw-r--r--media/video/capture/win/video_capture_device_mf_win.h8
-rw-r--r--media/video/capture/win/video_capture_device_win.cc66
-rw-r--r--media/video/capture/win/video_capture_device_win.h8
24 files changed, 382 insertions, 437 deletions
diff --git a/media/video/capture/android/video_capture_device_android.cc b/media/video/capture/android/video_capture_device_android.cc
index 06c4604..c4c7034 100644
--- a/media/video/capture/android/video_capture_device_android.cc
+++ b/media/video/capture/android/video_capture_device_android.cc
@@ -86,11 +86,7 @@ bool VideoCaptureDeviceAndroid::RegisterVideoCaptureDevice(JNIEnv* env) {
}
VideoCaptureDeviceAndroid::VideoCaptureDeviceAndroid(const Name& device_name)
- : state_(kIdle),
- got_first_frame_(false),
- device_name_(device_name),
- current_settings_() {
-}
+ : state_(kIdle), got_first_frame_(false), device_name_(device_name) {}
VideoCaptureDeviceAndroid::~VideoCaptureDeviceAndroid() {
StopAndDeAllocate();
@@ -111,7 +107,7 @@ bool VideoCaptureDeviceAndroid::Init() {
}
void VideoCaptureDeviceAndroid::AllocateAndStart(
- const VideoCaptureCapability& capture_format,
+ const VideoCaptureParams& params,
scoped_ptr<Client> client) {
DVLOG(1) << "VideoCaptureDeviceAndroid::AllocateAndStart";
{
@@ -124,40 +120,38 @@ void VideoCaptureDeviceAndroid::AllocateAndStart(
JNIEnv* env = AttachCurrentThread();
- jboolean ret = Java_VideoCapture_allocate(env,
- j_capture_.obj(),
- capture_format.width,
- capture_format.height,
- capture_format.frame_rate);
+ jboolean ret =
+ Java_VideoCapture_allocate(env,
+ j_capture_.obj(),
+ params.requested_format.frame_size.width(),
+ params.requested_format.frame_size.height(),
+ params.requested_format.frame_rate);
if (!ret) {
SetErrorState("failed to allocate");
return;
}
// Store current width and height.
- current_settings_.width =
- Java_VideoCapture_queryWidth(env, j_capture_.obj());
- current_settings_.height =
- Java_VideoCapture_queryHeight(env, j_capture_.obj());
- current_settings_.frame_rate =
+ capture_format_.frame_size.SetSize(
+ Java_VideoCapture_queryWidth(env, j_capture_.obj()),
+ Java_VideoCapture_queryHeight(env, j_capture_.obj()));
+ capture_format_.frame_rate =
Java_VideoCapture_queryFrameRate(env, j_capture_.obj());
- current_settings_.color = GetColorspace();
- DCHECK_NE(current_settings_.color, media::PIXEL_FORMAT_UNKNOWN);
- CHECK(current_settings_.width > 0 && !(current_settings_.width % 2));
- CHECK(current_settings_.height > 0 && !(current_settings_.height % 2));
+ capture_format_.pixel_format = GetColorspace();
+ DCHECK_NE(capture_format_.pixel_format, media::PIXEL_FORMAT_UNKNOWN);
+ CHECK(capture_format_.frame_size.GetArea() > 0);
+ CHECK(!(capture_format_.frame_size.width() % 2));
+ CHECK(!(capture_format_.frame_size.height() % 2));
- if (capture_format.frame_rate > 0) {
+ if (capture_format_.frame_rate > 0) {
frame_interval_ = base::TimeDelta::FromMicroseconds(
- (base::Time::kMicrosecondsPerSecond + capture_format.frame_rate - 1) /
- capture_format.frame_rate);
+ (base::Time::kMicrosecondsPerSecond + capture_format_.frame_rate - 1) /
+ capture_format_.frame_rate);
}
- DVLOG(1) << "VideoCaptureDeviceAndroid::Allocate: queried width="
- << current_settings_.width
- << ", height="
- << current_settings_.height
- << ", frame_rate="
- << current_settings_.frame_rate;
+ DVLOG(1) << "VideoCaptureDeviceAndroid::Allocate: queried frame_size="
+ << capture_format_.frame_size.ToString()
+ << ", frame_rate=" << capture_format_.frame_rate;
jint result = Java_VideoCapture_startCapture(env, j_capture_.obj());
if (result < 0) {
@@ -234,7 +228,7 @@ void VideoCaptureDeviceAndroid::OnFrameAvailable(
rotation,
flip_vert,
flip_horiz,
- current_settings_);
+ capture_format_);
}
env->ReleaseByteArrayElements(data, buffer, JNI_ABORT);
diff --git a/media/video/capture/android/video_capture_device_android.h b/media/video/capture/android/video_capture_device_android.h
index c9ab5d7..3039a79 100644
--- a/media/video/capture/android/video_capture_device_android.h
+++ b/media/video/capture/android/video_capture_device_android.h
@@ -29,9 +29,8 @@ class MEDIA_EXPORT VideoCaptureDeviceAndroid : public VideoCaptureDevice {
static bool RegisterVideoCaptureDevice(JNIEnv* env);
// VideoCaptureDevice implementation.
- virtual void AllocateAndStart(
- const VideoCaptureCapability& capture_format,
- scoped_ptr<Client> client) OVERRIDE;
+ virtual void AllocateAndStart(const VideoCaptureParams& params,
+ scoped_ptr<Client> client) OVERRIDE;
virtual void StopAndDeAllocate() OVERRIDE;
// Implement org.chromium.media.VideoCapture.nativeOnFrameAvailable.
@@ -73,7 +72,7 @@ class MEDIA_EXPORT VideoCaptureDeviceAndroid : public VideoCaptureDevice {
scoped_ptr<VideoCaptureDevice::Client> client_;
Name device_name_;
- VideoCaptureCapability current_settings_;
+ VideoCaptureFormat capture_format_;
// Java VideoCaptureAndroid instance.
base::android::ScopedJavaGlobalRef<jobject> j_capture_;
diff --git a/media/video/capture/fake_video_capture_device.cc b/media/video/capture/fake_video_capture_device.cc
index c36670c..ef44d86 100644
--- a/media/video/capture/fake_video_capture_device.cc
+++ b/media/video/capture/fake_video_capture_device.cc
@@ -37,12 +37,13 @@ void FakeVideoCaptureDevice::GetDeviceNames(Names* const device_names) {
void FakeVideoCaptureDevice::GetDeviceSupportedFormats(
const Name& device,
VideoCaptureCapabilities* formats) {
- VideoCaptureCapability capture_format;
- capture_format.color = media::PIXEL_FORMAT_I420;
- capture_format.width = 640;
- capture_format.height = 480;
- capture_format.frame_rate = 1000 / kFakeCaptureTimeoutMs;
- formats->push_back(capture_format);
+ VideoCaptureCapability capture_format_640x480;
+ capture_format_640x480.supported_format.frame_size.SetSize(640, 480);
+ capture_format_640x480.supported_format.frame_rate =
+ 1000 / kFakeCaptureTimeoutMs;
+ capture_format_640x480.supported_format.pixel_format =
+ media::PIXEL_FORMAT_I420;
+ formats->push_back(capture_format_640x480);
}
VideoCaptureDevice* FakeVideoCaptureDevice::Create(const Name& device_name) {
@@ -67,8 +68,7 @@ FakeVideoCaptureDevice::FakeVideoCaptureDevice()
: state_(kIdle),
capture_thread_("CaptureThread"),
frame_count_(0),
- capabilities_roster_index_(0) {
-}
+ format_roster_index_(0) {}
FakeVideoCaptureDevice::~FakeVideoCaptureDevice() {
// Check if the thread is running.
@@ -77,31 +77,27 @@ FakeVideoCaptureDevice::~FakeVideoCaptureDevice() {
}
void FakeVideoCaptureDevice::AllocateAndStart(
- const VideoCaptureCapability& capture_format,
+ const VideoCaptureParams& params,
scoped_ptr<VideoCaptureDevice::Client> client) {
- capture_format_.frame_size_type = capture_format.frame_size_type;
- if (capture_format.frame_size_type == VariableResolutionVideoCaptureDevice)
- PopulateCapabilitiesRoster();
+ if (params.allow_resolution_change)
+ PopulateFormatRoster();
if (state_ != kIdle) {
return; // Wrong state.
}
client_ = client.Pass();
- capture_format_.color = PIXEL_FORMAT_I420;
- if (capture_format.width > 320) { // VGA
- capture_format_.width = 640;
- capture_format_.height = 480;
+ capture_format_.pixel_format = PIXEL_FORMAT_I420;
+ if (params.requested_format.frame_size.width() > 320) { // VGA
+ capture_format_.frame_size.SetSize(640, 480);
capture_format_.frame_rate = 30;
} else { // QVGA
- capture_format_.width = 320;
- capture_format_.height = 240;
+ capture_format_.frame_size.SetSize(320, 240);
capture_format_.frame_rate = 30;
}
- const size_t fake_frame_size = VideoFrame::AllocationSize(
- VideoFrame::I420,
- gfx::Size(capture_format_.width, capture_format_.height));
+ const size_t fake_frame_size =
+ VideoFrame::AllocationSize(VideoFrame::I420, capture_format_.frame_size);
fake_frame_.reset(new uint8[fake_frame_size]);
state_ = kCapturing;
@@ -114,15 +110,14 @@ void FakeVideoCaptureDevice::AllocateAndStart(
void FakeVideoCaptureDevice::Reallocate() {
DCHECK_EQ(state_, kCapturing);
- capture_format_ = capabilities_roster_.at(++capabilities_roster_index_ %
- capabilities_roster_.size());
- DCHECK_EQ(capture_format_.color, PIXEL_FORMAT_I420);
- DVLOG(3) << "Reallocating FakeVideoCaptureDevice, new capture resolution ("
- << capture_format_.width << "x" << capture_format_.height << ")";
-
- const size_t fake_frame_size = VideoFrame::AllocationSize(
- VideoFrame::I420,
- gfx::Size(capture_format_.width, capture_format_.height));
+ capture_format_ =
+ format_roster_.at(++format_roster_index_ % format_roster_.size());
+ DCHECK_EQ(capture_format_.pixel_format, PIXEL_FORMAT_I420);
+ DVLOG(3) << "Reallocating FakeVideoCaptureDevice, new capture resolution "
+ << capture_format_.frame_size.ToString();
+
+ const size_t fake_frame_size =
+ VideoFrame::AllocationSize(VideoFrame::I420, capture_format_.frame_size);
fake_frame_.reset(new uint8[fake_frame_size]);
}
@@ -139,25 +134,28 @@ void FakeVideoCaptureDevice::OnCaptureTask() {
return;
}
- const size_t frame_size = VideoFrame::AllocationSize(
- VideoFrame::I420,
- gfx::Size(capture_format_.width, capture_format_.height));
+ const size_t frame_size =
+ VideoFrame::AllocationSize(VideoFrame::I420, capture_format_.frame_size);
memset(fake_frame_.get(), 0, frame_size);
SkBitmap bitmap;
bitmap.setConfig(SkBitmap::kA8_Config,
- capture_format_.width,
- capture_format_.height,
- capture_format_.width);
- bitmap.setPixels(fake_frame_.get());
+ capture_format_.frame_size.width(),
+ capture_format_.frame_size.height(),
+ capture_format_.frame_size.width()),
+ bitmap.setPixels(fake_frame_.get());
SkCanvas canvas(bitmap);
// Draw a sweeping circle to show an animation.
- int radius = std::min(capture_format_.width, capture_format_.height) / 4;
- SkRect rect = SkRect::MakeXYWH(
- capture_format_.width / 2 - radius, capture_format_.height / 2 - radius,
- 2 * radius, 2 * radius);
+ int radius = std::min(capture_format_.frame_size.width(),
+ capture_format_.frame_size.height()) /
+ 4;
+ SkRect rect =
+ SkRect::MakeXYWH(capture_format_.frame_size.width() / 2 - radius,
+ capture_format_.frame_size.height() / 2 - radius,
+ 2 * radius,
+ 2 * radius);
SkPaint paint;
paint.setStyle(SkPaint::kFill_Style);
@@ -203,8 +201,7 @@ void FakeVideoCaptureDevice::OnCaptureTask() {
false,
capture_format_);
if (!(frame_count_ % kFakeCaptureCapabilityChangePeriod) &&
- (capture_format_.frame_size_type ==
- VariableResolutionVideoCaptureDevice)) {
+ format_roster_.size() > 0U) {
Reallocate();
}
// Reschedule next CaptureTask.
@@ -215,27 +212,15 @@ void FakeVideoCaptureDevice::OnCaptureTask() {
base::TimeDelta::FromMilliseconds(kFakeCaptureTimeoutMs));
}
-void FakeVideoCaptureDevice::PopulateCapabilitiesRoster() {
- capabilities_roster_.push_back(
- media::VideoCaptureCapability(320,
- 240,
- 30,
- PIXEL_FORMAT_I420,
- VariableResolutionVideoCaptureDevice));
- capabilities_roster_.push_back(
- media::VideoCaptureCapability(640,
- 480,
- 30,
- PIXEL_FORMAT_I420,
- VariableResolutionVideoCaptureDevice));
- capabilities_roster_.push_back(
- media::VideoCaptureCapability(800,
- 600,
- 30,
- PIXEL_FORMAT_I420,
- VariableResolutionVideoCaptureDevice));
-
- capabilities_roster_index_ = 0;
+void FakeVideoCaptureDevice::PopulateFormatRoster() {
+ format_roster_.push_back(
+ media::VideoCaptureFormat(gfx::Size(320, 240), 30, PIXEL_FORMAT_I420));
+ format_roster_.push_back(
+ media::VideoCaptureFormat(gfx::Size(640, 480), 30, PIXEL_FORMAT_I420));
+ format_roster_.push_back(
+ media::VideoCaptureFormat(gfx::Size(800, 600), 30, PIXEL_FORMAT_I420));
+
+ format_roster_index_ = 0;
}
} // namespace media
diff --git a/media/video/capture/fake_video_capture_device.h b/media/video/capture/fake_video_capture_device.h
index 174ba06a..7d5de57 100644
--- a/media/video/capture/fake_video_capture_device.h
+++ b/media/video/capture/fake_video_capture_device.h
@@ -29,9 +29,9 @@ class MEDIA_EXPORT FakeVideoCaptureDevice : public VideoCaptureDevice {
VideoCaptureCapabilities* formats);
// VideoCaptureDevice implementation.
- virtual void AllocateAndStart(
- const VideoCaptureCapability& capture_format,
- scoped_ptr<VideoCaptureDevice::Client> client) OVERRIDE;
+ virtual void AllocateAndStart(const VideoCaptureParams& params,
+ scoped_ptr<VideoCaptureDevice::Client> client)
+ OVERRIDE;
virtual void StopAndDeAllocate() OVERRIDE;
private:
@@ -49,19 +49,19 @@ class MEDIA_EXPORT FakeVideoCaptureDevice : public VideoCaptureDevice {
// EXPERIMENTAL, similar to allocate, but changes resolution and calls
// client->OnFrameInfoChanged(VideoCaptureCapability&)
void Reallocate();
- void PopulateCapabilitiesRoster();
+ void PopulateFormatRoster();
scoped_ptr<VideoCaptureDevice::Client> client_;
InternalState state_;
base::Thread capture_thread_;
scoped_ptr<uint8[]> fake_frame_;
int frame_count_;
- VideoCaptureCapability capture_format_;
+ VideoCaptureFormat capture_format_;
// When the device is configured as mutating video captures, this vector
// holds the available ones which are used in sequence, restarting at the end.
- std::vector<VideoCaptureCapability> capabilities_roster_;
- int capabilities_roster_index_;
+ std::vector<VideoCaptureFormat> format_roster_;
+ int format_roster_index_;
static bool fail_next_create_;
diff --git a/media/video/capture/linux/video_capture_device_linux.cc b/media/video/capture/linux/video_capture_device_linux.cc
index 66aecff..1946961 100644
--- a/media/video/capture/linux/video_capture_device_linux.cc
+++ b/media/video/capture/linux/video_capture_device_linux.cc
@@ -159,22 +159,25 @@ void VideoCaptureDevice::GetDeviceSupportedFormats(
formats->clear();
- VideoCaptureCapability capture_format;
+ VideoCaptureCapability capture_capability;
// Retrieve the caps one by one, first get colorspace, then sizes, then
// framerates. See http://linuxtv.org/downloads/v4l-dvb-apis for reference.
v4l2_fmtdesc pixel_format = {};
pixel_format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
while (ioctl(fd, VIDIOC_ENUM_FMT, &pixel_format) == 0) {
- capture_format.color =
+ capture_capability.supported_format.pixel_format =
V4l2ColorToVideoCaptureColorFormat((int32)pixel_format.pixelformat);
- if (capture_format.color == PIXEL_FORMAT_UNKNOWN) continue;
+ if (capture_capability.supported_format.pixel_format ==
+ PIXEL_FORMAT_UNKNOWN) {
+ continue;
+ }
v4l2_frmsizeenum frame_size = {};
frame_size.pixel_format = pixel_format.pixelformat;
while (ioctl(fd, VIDIOC_ENUM_FRAMESIZES, &frame_size) == 0) {
if (frame_size.type == V4L2_FRMSIZE_TYPE_DISCRETE) {
- capture_format.width = frame_size.discrete.width;
- capture_format.height = frame_size.discrete.height;
+ capture_capability.supported_format.frame_size.SetSize(
+ frame_size.discrete.width, frame_size.discrete.height);
} else if (frame_size.type == V4L2_FRMSIZE_TYPE_STEPWISE) {
// TODO(mcasas): see http://crbug.com/249953, support these devices.
NOTIMPLEMENTED();
@@ -189,11 +192,11 @@ void VideoCaptureDevice::GetDeviceSupportedFormats(
while (ioctl(fd, VIDIOC_ENUM_FRAMEINTERVALS, &frame_interval) == 0) {
if (frame_interval.type == V4L2_FRMIVAL_TYPE_DISCRETE) {
if (frame_interval.discrete.numerator != 0) {
- capture_format.frame_rate =
+ capture_capability.supported_format.frame_rate =
static_cast<float>(frame_interval.discrete.denominator) /
static_cast<float>(frame_interval.discrete.numerator);
} else {
- capture_format.frame_rate = 0;
+ capture_capability.supported_format.frame_rate = 0;
}
} else if (frame_interval.type == V4L2_FRMIVAL_TYPE_CONTINUOUS) {
// TODO(mcasas): see http://crbug.com/249953, support these devices.
@@ -204,7 +207,7 @@ void VideoCaptureDevice::GetDeviceSupportedFormats(
NOTIMPLEMENTED();
break;
}
- formats->push_back(capture_format);
+ formats->push_back(capture_capability);
++frame_interval.index;
}
++frame_size.index;
@@ -291,7 +294,7 @@ VideoCaptureDeviceLinux::~VideoCaptureDeviceLinux() {
}
void VideoCaptureDeviceLinux::AllocateAndStart(
- const VideoCaptureCapability& capture_format,
+ const VideoCaptureParams& params,
scoped_ptr<VideoCaptureDevice::Client> client) {
if (v4l2_thread_.IsRunning()) {
return; // Wrong state.
@@ -301,9 +304,9 @@ void VideoCaptureDeviceLinux::AllocateAndStart(
FROM_HERE,
base::Bind(&VideoCaptureDeviceLinux::OnAllocateAndStart,
base::Unretained(this),
- capture_format.width,
- capture_format.height,
- capture_format.frame_rate,
+ params.requested_format.frame_size.width(),
+ params.requested_format.frame_size.height(),
+ params.requested_format.frame_rate,
base::Passed(&client)));
}
@@ -409,12 +412,11 @@ void VideoCaptureDeviceLinux::OnAllocateAndStart(int width,
// framerate configuration, or the actual one is different from the desired?
// Store our current width and height.
- frame_info_.color =
+ capture_format_.frame_size.SetSize(video_fmt.fmt.pix.width,
+ video_fmt.fmt.pix.height);
+ capture_format_.frame_rate = frame_rate;
+ capture_format_.pixel_format =
V4l2ColorToVideoCaptureColorFormat(video_fmt.fmt.pix.pixelformat);
- frame_info_.width = video_fmt.fmt.pix.width;
- frame_info_.height = video_fmt.fmt.pix.height;
- frame_info_.frame_rate = frame_rate;
- frame_info_.frame_size_type = VariableResolutionVideoCaptureDevice;
// Start capturing.
if (!AllocateVideoBuffers()) {
@@ -519,7 +521,7 @@ void VideoCaptureDeviceLinux::OnCaptureTask() {
0,
false,
false,
- frame_info_);
+ capture_format_);
// Enqueue the buffer again.
if (ioctl(device_fd_, VIDIOC_QBUF, &buffer) == -1) {
diff --git a/media/video/capture/linux/video_capture_device_linux.h b/media/video/capture/linux/video_capture_device_linux.h
index ad93fc0..a5917b7 100644
--- a/media/video/capture/linux/video_capture_device_linux.h
+++ b/media/video/capture/linux/video_capture_device_linux.h
@@ -24,7 +24,7 @@ class VideoCaptureDeviceLinux : public VideoCaptureDevice {
virtual ~VideoCaptureDeviceLinux();
// VideoCaptureDevice implementation.
- virtual void AllocateAndStart(const VideoCaptureCapability& capture_format,
+ virtual void AllocateAndStart(const VideoCaptureParams& params,
scoped_ptr<Client> client) OVERRIDE;
virtual void StopAndDeAllocate() OVERRIDE;
@@ -64,7 +64,7 @@ class VideoCaptureDeviceLinux : public VideoCaptureDevice {
Buffer* buffer_pool_;
int buffer_pool_size_; // Number of allocated buffers.
int timeout_count_;
- VideoCaptureCapability frame_info_;
+ VideoCaptureFormat capture_format_;
DISALLOW_IMPLICIT_CONSTRUCTORS(VideoCaptureDeviceLinux);
};
diff --git a/media/video/capture/mac/video_capture_device_mac.h b/media/video/capture/mac/video_capture_device_mac.h
index adfd4c7..474e7e1 100644
--- a/media/video/capture/mac/video_capture_device_mac.h
+++ b/media/video/capture/mac/video_capture_device_mac.h
@@ -30,9 +30,9 @@ class VideoCaptureDeviceMac : public VideoCaptureDevice {
virtual ~VideoCaptureDeviceMac();
// VideoCaptureDevice implementation.
- virtual void AllocateAndStart(
- const VideoCaptureCapability& capture_format,
- scoped_ptr<VideoCaptureDevice::Client> client) OVERRIDE;
+ virtual void AllocateAndStart(const VideoCaptureParams& params,
+ scoped_ptr<VideoCaptureDevice::Client> client)
+ OVERRIDE;
virtual void StopAndDeAllocate() OVERRIDE;
bool Init();
@@ -40,7 +40,7 @@ class VideoCaptureDeviceMac : public VideoCaptureDevice {
// Called to deliver captured video frames.
void ReceiveFrame(const uint8* video_frame,
int video_frame_length,
- const VideoCaptureCapability& frame_info,
+ const VideoCaptureFormat& frame_format,
int aspect_numerator,
int aspect_denominator);
@@ -61,7 +61,7 @@ class VideoCaptureDeviceMac : public VideoCaptureDevice {
Name device_name_;
scoped_ptr<VideoCaptureDevice::Client> client_;
- VideoCaptureCapability current_settings_;
+ VideoCaptureFormat capture_format_;
bool sent_frame_info_;
bool tried_to_square_pixels_;
diff --git a/media/video/capture/mac/video_capture_device_mac.mm b/media/video/capture/mac/video_capture_device_mac.mm
index b353b12..9ef29d2 100644
--- a/media/video/capture/mac/video_capture_device_mac.mm
+++ b/media/video/capture/mac/video_capture_device_mac.mm
@@ -134,15 +134,15 @@ VideoCaptureDeviceMac::~VideoCaptureDeviceMac() {
}
void VideoCaptureDeviceMac::AllocateAndStart(
- const VideoCaptureCapability& capture_format,
+ const VideoCaptureParams& params,
scoped_ptr<VideoCaptureDevice::Client> client) {
DCHECK_EQ(loop_proxy_, base::MessageLoopProxy::current());
if (state_ != kIdle) {
return;
}
- int width = capture_format.width;
- int height = capture_format.height;
- int frame_rate = capture_format.frame_rate;
+ int width = params.requested_format.frame_size.width();
+ int height = params.requested_format.frame_size.height();
+ int frame_rate = params.requested_format.frame_rate;
// The OS API can scale captured frame to any size requested, which would lead
// to undesired aspect ratio change. Try to open the camera with a natively
@@ -164,10 +164,9 @@ void VideoCaptureDeviceMac::AllocateAndStart(
else if (frame_rate > kMaxFrameRate)
frame_rate = kMaxFrameRate;
- current_settings_.color = PIXEL_FORMAT_UYVY;
- current_settings_.width = width;
- current_settings_.height = height;
- current_settings_.frame_rate = frame_rate;
+ capture_format_.frame_size.SetSize(width, height);
+ capture_format_.frame_rate = frame_rate;
+ capture_format_.pixel_format = PIXEL_FORMAT_UYVY;
if (width <= kVGA.width || height <= kVGA.height) {
// If the resolution is VGA or QVGA, set the capture resolution to the
@@ -226,7 +225,7 @@ bool VideoCaptureDeviceMac::Init() {
void VideoCaptureDeviceMac::ReceiveFrame(
const uint8* video_frame,
int video_frame_length,
- const VideoCaptureCapability& frame_info,
+ const VideoCaptureFormat& frame_format,
int aspect_numerator,
int aspect_denominator) {
// This method is safe to call from a device capture thread,
@@ -234,23 +233,24 @@ void VideoCaptureDeviceMac::ReceiveFrame(
if (!sent_frame_info_) {
// Final resolution has not yet been selected.
- if (current_settings_.width > kVGA.width ||
- current_settings_.height > kVGA.height) {
+ if (capture_format_.frame_size.width() > kVGA.width ||
+ capture_format_.frame_size.height() > kVGA.height) {
// We are requesting HD. Make sure that the picture is good, otherwise
// drop down to VGA.
bool change_to_vga = false;
- if (frame_info.width < current_settings_.width ||
- frame_info.height < current_settings_.height) {
+ if (frame_format.frame_size.width() <
+ capture_format_.frame_size.width() ||
+ frame_format.frame_size.height() <
+ capture_format_.frame_size.height()) {
// These are the default capture settings, not yet configured to match
- // |current_settings_|.
- DCHECK(frame_info.frame_rate == 0);
+ // |capture_format_|.
+ DCHECK(frame_format.frame_rate == 0);
DVLOG(1) << "Switching to VGA because the default resolution is " <<
- frame_info.width << "x" << frame_info.height;
+ frame_format.frame_size.ToString();
change_to_vga = true;
}
- if (frame_info.width == current_settings_.width &&
- frame_info.height == current_settings_.height &&
+ if (capture_format_.frame_size == frame_format.frame_size &&
aspect_numerator != aspect_denominator) {
DVLOG(1) << "Switching to VGA because HD has nonsquare pixel " <<
"aspect ratio " << aspect_numerator << ":" << aspect_denominator;
@@ -258,33 +258,29 @@ void VideoCaptureDeviceMac::ReceiveFrame(
}
if (change_to_vga) {
- current_settings_.width = kVGA.width;
- current_settings_.height = kVGA.height;
+ capture_format_.frame_size.SetSize(kVGA.width, kVGA.height);
}
}
- if (current_settings_.width == frame_info.width &&
- current_settings_.height == frame_info.height &&
+ if (capture_format_.frame_size == frame_format.frame_size &&
!tried_to_square_pixels_ &&
(aspect_numerator > kMaxPixelAspectRatio * aspect_denominator ||
aspect_denominator > kMaxPixelAspectRatio * aspect_numerator)) {
// The requested size results in non-square PAR.
// Shrink the frame to 1:1 PAR (assuming QTKit selects the same input
// mode, which is not guaranteed).
- int new_width = current_settings_.width;
- int new_height = current_settings_.height;
+ int new_width = capture_format_.frame_size.width();
+ int new_height = capture_format_.frame_size.height();
if (aspect_numerator < aspect_denominator) {
new_width = (new_width * aspect_numerator) / aspect_denominator;
} else {
new_height = (new_height * aspect_denominator) / aspect_numerator;
}
- current_settings_.width = new_width;
- current_settings_.height = new_height;
+ capture_format_.frame_size.SetSize(new_width, new_height);
tried_to_square_pixels_ = true;
}
- if (current_settings_.width == frame_info.width &&
- current_settings_.height == frame_info.height) {
+ if (capture_format_.frame_size == frame_format.frame_size) {
sent_frame_info_ = true;
} else {
UpdateCaptureResolution();
@@ -294,8 +290,10 @@ void VideoCaptureDeviceMac::ReceiveFrame(
}
}
- DCHECK(current_settings_.width == frame_info.width &&
- current_settings_.height == frame_info.height);
+ DCHECK_EQ(capture_format_.frame_size.width(),
+ frame_format.frame_size.width());
+ DCHECK_EQ(capture_format_.frame_size.height(),
+ frame_format.frame_size.height());
client_->OnIncomingCapturedFrame(video_frame,
video_frame_length,
@@ -303,7 +301,7 @@ void VideoCaptureDeviceMac::ReceiveFrame(
0,
false,
false,
- current_settings_);
+ capture_format_);
}
void VideoCaptureDeviceMac::ReceiveError(const std::string& reason) {
@@ -320,9 +318,9 @@ void VideoCaptureDeviceMac::SetErrorState(const std::string& reason) {
}
bool VideoCaptureDeviceMac::UpdateCaptureResolution() {
- if (![capture_device_ setCaptureHeight:current_settings_.height
- width:current_settings_.width
- frameRate:current_settings_.frame_rate]) {
+ if (![capture_device_ setCaptureHeight:capture_format_.frame_size.height()
+ width:capture_format_.frame_size.width()
+ frameRate:capture_format_.frame_rate]) {
ReceiveError("Could not configure capture device.");
return false;
}
diff --git a/media/video/capture/mac/video_capture_device_qtkit_mac.mm b/media/video/capture/mac/video_capture_device_qtkit_mac.mm
index 8a778a1..19ee7dc 100644
--- a/media/video/capture/mac/video_capture_device_qtkit_mac.mm
+++ b/media/video/capture/mac/video_capture_device_qtkit_mac.mm
@@ -281,11 +281,10 @@
addressToPass = adjustedAddress;
frameSize = frameHeight * expectedBytesPerRow;
}
- media::VideoCaptureCapability captureCapability;
- captureCapability.width = frameWidth;
- captureCapability.height = frameHeight;
- captureCapability.frame_rate = frameRate_;
- captureCapability.color = media::PIXEL_FORMAT_UYVY;
+
+ media::VideoCaptureFormat captureFormat(gfx::Size(frameWidth, frameHeight),
+ frameRate_,
+ media::PIXEL_FORMAT_UYVY);
// The aspect ratio dictionary is often missing, in which case we report
// a pixel aspect ratio of 0:0.
@@ -305,7 +304,7 @@
}
// Deliver the captured video frame.
- frameReceiver_->ReceiveFrame(addressToPass, frameSize, captureCapability,
+ frameReceiver_->ReceiveFrame(addressToPass, frameSize, captureFormat,
aspectNumerator, aspectDenominator);
CVPixelBufferUnlockBaseAddress(videoFrame, kLockFlags);
diff --git a/media/video/capture/video_capture_device.h b/media/video/capture/video_capture_device.h
index 74a8d27..c4930be 100644
--- a/media/video/capture/video_capture_device.h
+++ b/media/video/capture/video_capture_device.h
@@ -20,6 +20,7 @@
#include "base/memory/scoped_ptr.h"
#include "base/time/time.h"
#include "media/base/media_export.h"
+#include "media/base/video_frame.h"
#include "media/video/capture/video_capture_types.h"
namespace media {
@@ -172,7 +173,7 @@ class MEDIA_EXPORT VideoCaptureDevice {
int rotation, // Clockwise.
bool flip_vert,
bool flip_horiz,
- const VideoCaptureCapability& frame_info) = 0;
+ const VideoCaptureFormat& frame_format) = 0;
// Captured a new video frame, held in |buffer|.
//
@@ -208,12 +209,10 @@ class MEDIA_EXPORT VideoCaptureDevice {
VideoCaptureCapabilities* formats);
// Prepare the camera for use. After this function has been called no other
- // applications can use the camera. On completion Client::OnFrameInfo()
- // is called informing of the resulting resolution and frame rate.
- // StopAndDeAllocate() must be called before the object is deleted.
- virtual void AllocateAndStart(
- const VideoCaptureCapability& capture_format,
- scoped_ptr<Client> client) = 0;
+ // applications can use the camera. StopAndDeAllocate() must be called before
+ // the object is deleted.
+ virtual void AllocateAndStart(const VideoCaptureParams& params,
+ scoped_ptr<Client> client) = 0;
// Deallocates the camera, possibly asynchronously.
//
diff --git a/media/video/capture/video_capture_device_unittest.cc b/media/video/capture/video_capture_device_unittest.cc
index 3b470d1..705167d 100644
--- a/media/video/capture/video_capture_device_unittest.cc
+++ b/media/video/capture/video_capture_device_unittest.cc
@@ -58,7 +58,6 @@ using ::testing::_;
using ::testing::AnyNumber;
using ::testing::Return;
using ::testing::AtLeast;
-using ::testing::SaveArg;
namespace media {
@@ -69,8 +68,7 @@ class MockClient : public media::VideoCaptureDevice::Client {
const gfx::Size& dimensions));
MOCK_METHOD0(OnErr, void());
- explicit MockClient(
- base::Callback<void(const VideoCaptureCapability&)> frame_cb)
+ explicit MockClient(base::Callback<void(const VideoCaptureFormat&)> frame_cb)
: main_thread_(base::MessageLoopProxy::current()), frame_cb_(frame_cb) {}
virtual void OnError() OVERRIDE {
@@ -83,9 +81,9 @@ class MockClient : public media::VideoCaptureDevice::Client {
int rotation,
bool flip_vert,
bool flip_horiz,
- const VideoCaptureCapability& frame_info)
+ const VideoCaptureFormat& format)
OVERRIDE {
- main_thread_->PostTask(FROM_HERE, base::Bind(frame_cb_, frame_info));
+ main_thread_->PostTask(FROM_HERE, base::Bind(frame_cb_, format));
}
virtual void OnIncomingCapturedBuffer(const scoped_refptr<Buffer>& buffer,
@@ -98,7 +96,7 @@ class MockClient : public media::VideoCaptureDevice::Client {
private:
scoped_refptr<base::MessageLoopProxy> main_thread_;
- base::Callback<void(const VideoCaptureCapability&)> frame_cb_;
+ base::Callback<void(const VideoCaptureFormat&)> frame_cb_;
};
class VideoCaptureDeviceTest : public testing::Test {
@@ -123,8 +121,8 @@ class VideoCaptureDeviceTest : public testing::Test {
&VideoCaptureDeviceTest::OnFrameCaptured, base::Unretained(this))));
}
- void OnFrameCaptured(const VideoCaptureCapability& frame_info) {
- last_frame_info_ = frame_info;
+ void OnFrameCaptured(const VideoCaptureFormat& format) {
+ last_format_ = format;
run_loop_->QuitClosure().Run();
}
@@ -133,9 +131,7 @@ class VideoCaptureDeviceTest : public testing::Test {
run_loop_->Run();
}
- const VideoCaptureCapability& last_frame_info() const {
- return last_frame_info_;
- }
+ const VideoCaptureFormat& last_format() const { return last_format_; }
#if defined(OS_WIN)
base::win::ScopedCOMInitializer initialize_com_;
@@ -144,7 +140,7 @@ class VideoCaptureDeviceTest : public testing::Test {
scoped_ptr<base::MessageLoop> loop_;
scoped_ptr<base::RunLoop> run_loop_;
scoped_ptr<MockClient> client_;
- VideoCaptureCapability last_frame_info_;
+ VideoCaptureFormat last_format_;
};
TEST_F(VideoCaptureDeviceTest, OpenInvalidDevice) {
@@ -176,17 +172,16 @@ TEST_F(VideoCaptureDeviceTest, CaptureVGA) {
EXPECT_CALL(*client_, OnErr())
.Times(0);
- VideoCaptureCapability capture_format(640,
- 480,
- 30,
- PIXEL_FORMAT_I420,
- ConstantResolutionVideoCaptureDevice);
- device->AllocateAndStart(capture_format,
- client_.PassAs<Client>());
+ VideoCaptureParams capture_params;
+ capture_params.requested_format.frame_size.SetSize(640, 480);
+ capture_params.requested_format.frame_rate = 30;
+ capture_params.requested_format.pixel_format = PIXEL_FORMAT_I420;
+ capture_params.allow_resolution_change = false;
+ device->AllocateAndStart(capture_params, client_.PassAs<Client>());
// Get captured video frames.
WaitForCapturedFrame();
- EXPECT_EQ(last_frame_info().width, 640);
- EXPECT_EQ(last_frame_info().height, 480);
+ EXPECT_EQ(last_format().frame_size.width(), 640);
+ EXPECT_EQ(last_format().frame_size.height(), 480);
device->StopAndDeAllocate();
}
@@ -204,13 +199,12 @@ TEST_F(VideoCaptureDeviceTest, Capture720p) {
EXPECT_CALL(*client_, OnErr())
.Times(0);
- VideoCaptureCapability capture_format(1280,
- 720,
- 30,
- PIXEL_FORMAT_I420,
- ConstantResolutionVideoCaptureDevice);
- device->AllocateAndStart(capture_format,
- client_.PassAs<Client>());
+ VideoCaptureParams capture_params;
+ capture_params.requested_format.frame_size.SetSize(1280, 720);
+ capture_params.requested_format.frame_rate = 30;
+ capture_params.requested_format.pixel_format = PIXEL_FORMAT_I420;
+ capture_params.allow_resolution_change = false;
+ device->AllocateAndStart(capture_params, client_.PassAs<Client>());
// Get captured video frames.
WaitForCapturedFrame();
device->StopAndDeAllocate();
@@ -229,17 +223,16 @@ TEST_F(VideoCaptureDeviceTest, MAYBE_AllocateBadSize) {
EXPECT_CALL(*client_, OnErr())
.Times(0);
- VideoCaptureCapability capture_format(637,
- 472,
- 35,
- PIXEL_FORMAT_I420,
- ConstantResolutionVideoCaptureDevice);
- device->AllocateAndStart(capture_format,
- client_.PassAs<Client>());
+ VideoCaptureParams capture_params;
+ capture_params.requested_format.frame_size.SetSize(637, 472);
+ capture_params.requested_format.frame_rate = 35;
+ capture_params.requested_format.pixel_format = PIXEL_FORMAT_I420;
+ capture_params.allow_resolution_change = false;
+ device->AllocateAndStart(capture_params, client_.PassAs<Client>());
WaitForCapturedFrame();
device->StopAndDeAllocate();
- EXPECT_EQ(last_frame_info().width, 640);
- EXPECT_EQ(last_frame_info().height, 480);
+ EXPECT_EQ(last_format().frame_size.width(), 640);
+ EXPECT_EQ(last_format().frame_size.height(), 480);
}
TEST_F(VideoCaptureDeviceTest, ReAllocateCamera) {
@@ -260,36 +253,32 @@ TEST_F(VideoCaptureDeviceTest, ReAllocateCamera) {
} else {
resolution = gfx::Size(1280, 1024);
}
- VideoCaptureCapability requested_format(
- resolution.width(),
- resolution.height(),
- 30,
- PIXEL_FORMAT_I420,
- ConstantResolutionVideoCaptureDevice);
-
- device->AllocateAndStart(requested_format, client_.PassAs<Client>());
+ VideoCaptureParams capture_params;
+ capture_params.requested_format.frame_size = resolution;
+ capture_params.requested_format.frame_rate = 30;
+ capture_params.requested_format.pixel_format = PIXEL_FORMAT_I420;
+ capture_params.allow_resolution_change = false;
+ device->AllocateAndStart(capture_params, client_.PassAs<Client>());
device->StopAndDeAllocate();
}
// Finally, do a device start and wait for it to finish.
- gfx::Size resolution;
- VideoCaptureCapability requested_format(
- 320,
- 240,
- 30,
- PIXEL_FORMAT_I420,
- ConstantResolutionVideoCaptureDevice);
+ VideoCaptureParams capture_params;
+ capture_params.requested_format.frame_size.SetSize(320, 240);
+ capture_params.requested_format.frame_rate = 30;
+ capture_params.requested_format.pixel_format = PIXEL_FORMAT_I420;
+ capture_params.allow_resolution_change = false;
ResetWithNewClient();
scoped_ptr<VideoCaptureDevice> device(
VideoCaptureDevice::Create(names_.front()));
- device->AllocateAndStart(requested_format, client_.PassAs<Client>());
+ device->AllocateAndStart(capture_params, client_.PassAs<Client>());
WaitForCapturedFrame();
device->StopAndDeAllocate();
device.reset();
- EXPECT_EQ(last_frame_info().width, 320);
- EXPECT_EQ(last_frame_info().height, 240);
+ EXPECT_EQ(last_format().frame_size.width(), 320);
+ EXPECT_EQ(last_format().frame_size.height(), 240);
}
TEST_F(VideoCaptureDeviceTest, DeAllocateCameraWhileRunning) {
@@ -305,17 +294,17 @@ TEST_F(VideoCaptureDeviceTest, DeAllocateCameraWhileRunning) {
EXPECT_CALL(*client_, OnErr())
.Times(0);
- VideoCaptureCapability capture_format(640,
- 480,
- 30,
- PIXEL_FORMAT_I420,
- ConstantResolutionVideoCaptureDevice);
- device->AllocateAndStart(capture_format, client_.PassAs<Client>());
+ VideoCaptureParams capture_params;
+ capture_params.requested_format.frame_size.SetSize(640, 480);
+ capture_params.requested_format.frame_rate = 30;
+ capture_params.requested_format.pixel_format = PIXEL_FORMAT_I420;
+ capture_params.allow_resolution_change = false;
+ device->AllocateAndStart(capture_params, client_.PassAs<Client>());
// Get captured video frames.
WaitForCapturedFrame();
- EXPECT_EQ(last_frame_info().width, 640);
- EXPECT_EQ(last_frame_info().height, 480);
- EXPECT_EQ(last_frame_info().frame_rate, 30);
+ EXPECT_EQ(last_format().frame_size.width(), 640);
+ EXPECT_EQ(last_format().frame_size.height(), 480);
+ EXPECT_EQ(last_format().frame_rate, 30);
device->StopAndDeAllocate();
}
@@ -333,17 +322,16 @@ TEST_F(VideoCaptureDeviceTest, FakeCapture) {
EXPECT_CALL(*client_, OnErr())
.Times(0);
- VideoCaptureCapability capture_format(640,
- 480,
- 30,
- PIXEL_FORMAT_I420,
- ConstantResolutionVideoCaptureDevice);
- device->AllocateAndStart(capture_format,
- client_.PassAs<Client>());
+ VideoCaptureParams capture_params;
+ capture_params.requested_format.frame_size.SetSize(640, 480);
+ capture_params.requested_format.frame_rate = 30;
+ capture_params.requested_format.pixel_format = PIXEL_FORMAT_I420;
+ capture_params.allow_resolution_change = false;
+ device->AllocateAndStart(capture_params, client_.PassAs<Client>());
WaitForCapturedFrame();
- EXPECT_EQ(last_frame_info().width, 640);
- EXPECT_EQ(last_frame_info().height, 480);
- EXPECT_EQ(last_frame_info().frame_rate, 30);
+ EXPECT_EQ(last_format().frame_size.width(), 640);
+ EXPECT_EQ(last_format().frame_size.height(), 480);
+ EXPECT_EQ(last_format().frame_rate, 30);
device->StopAndDeAllocate();
}
@@ -361,17 +349,17 @@ TEST_F(VideoCaptureDeviceTest, MAYBE_CaptureMjpeg) {
EXPECT_CALL(*client_, OnErr())
.Times(0);
- VideoCaptureCapability capture_format(1280,
- 720,
- 30,
- PIXEL_FORMAT_MJPEG,
- ConstantResolutionVideoCaptureDevice);
- device->AllocateAndStart(capture_format, client_.PassAs<Client>());
+ VideoCaptureParams capture_params;
+ capture_params.requested_format.frame_size.SetSize(1280, 720);
+ capture_params.requested_format.frame_rate = 30;
+ capture_params.requested_format.pixel_format = PIXEL_FORMAT_MJPEG;
+ capture_params.allow_resolution_change = false;
+ device->AllocateAndStart(capture_params, client_.PassAs<Client>());
// Get captured video frames.
WaitForCapturedFrame();
// Verify we get MJPEG from the device. Not all devices can capture 1280x720
// @ 30 fps, so we don't care about the exact resolution we get.
- EXPECT_EQ(last_frame_info().color, PIXEL_FORMAT_MJPEG);
+ EXPECT_EQ(last_format().pixel_format, PIXEL_FORMAT_MJPEG);
device->StopAndDeAllocate();
}
@@ -381,12 +369,12 @@ TEST_F(VideoCaptureDeviceTest, GetDeviceSupportedFormats) {
DVLOG(1) << "No camera available. Exiting test.";
return;
}
- VideoCaptureCapabilities capture_formats;
+ VideoCaptureCapabilities capture_capabilities;
VideoCaptureDevice::Names::iterator names_iterator;
for (names_iterator = names_.begin(); names_iterator != names_.end();
++names_iterator) {
VideoCaptureDevice::GetDeviceSupportedFormats(*names_iterator,
- &capture_formats);
+ &capture_capabilities);
// Nothing to test here since we cannot forecast the hardware capabilities.
}
}
@@ -395,11 +383,11 @@ TEST_F(VideoCaptureDeviceTest, FakeCaptureVariableResolution) {
VideoCaptureDevice::Names names;
FakeVideoCaptureDevice::GetDeviceNames(&names);
- media::VideoCaptureCapability capture_format;
- capture_format.width = 640;
- capture_format.height = 480;
- capture_format.frame_rate = 30;
- capture_format.frame_size_type = media::VariableResolutionVideoCaptureDevice;
+ VideoCaptureParams capture_params;
+ capture_params.requested_format.frame_size.SetSize(640, 480);
+ capture_params.requested_format.frame_rate = 30;
+ capture_params.requested_format.pixel_format = PIXEL_FORMAT_I420;
+ capture_params.allow_resolution_change = true;
ASSERT_GT(static_cast<int>(names.size()), 0);
@@ -411,7 +399,7 @@ TEST_F(VideoCaptureDeviceTest, FakeCaptureVariableResolution) {
.Times(0);
int action_count = 200;
- device->AllocateAndStart(capture_format, client_.PassAs<Client>());
+ device->AllocateAndStart(capture_params, client_.PassAs<Client>());
// We set TimeWait to 200 action timeouts and this should be enough for at
// least action_count/kFakeCaptureCapabilityChangePeriod calls.
@@ -425,18 +413,20 @@ TEST_F(VideoCaptureDeviceTest, FakeGetDeviceSupportedFormats) {
VideoCaptureDevice::Names names;
FakeVideoCaptureDevice::GetDeviceNames(&names);
- VideoCaptureCapabilities capture_formats;
+ VideoCaptureCapabilities capture_capabilities;
VideoCaptureDevice::Names::iterator names_iterator;
for (names_iterator = names.begin(); names_iterator != names.end();
++names_iterator) {
FakeVideoCaptureDevice::GetDeviceSupportedFormats(*names_iterator,
- &capture_formats);
- EXPECT_GE(capture_formats.size(), 1u);
- EXPECT_EQ(capture_formats[0].width, 640);
- EXPECT_EQ(capture_formats[0].height, 480);
- EXPECT_EQ(capture_formats[0].color, media::PIXEL_FORMAT_I420);
- EXPECT_GE(capture_formats[0].frame_rate, 20);
+ &capture_capabilities);
+ EXPECT_GE(capture_capabilities.size(), 1u);
+ EXPECT_EQ(capture_capabilities[0].supported_format.frame_size.width(), 640);
+ EXPECT_EQ(capture_capabilities[0].supported_format.frame_size.height(),
+ 480);
+ EXPECT_EQ(capture_capabilities[0].supported_format.pixel_format,
+ media::PIXEL_FORMAT_I420);
+ EXPECT_GE(capture_capabilities[0].supported_format.frame_rate, 20);
}
}
diff --git a/media/video/capture/video_capture_proxy.cc b/media/video/capture/video_capture_proxy.cc
index bbbd610..d488c50 100644
--- a/media/video/capture/video_capture_proxy.cc
+++ b/media/video/capture/video_capture_proxy.cc
@@ -7,6 +7,7 @@
#include "base/bind.h"
#include "base/location.h"
#include "base/message_loop/message_loop_proxy.h"
+#include "media/base/video_frame.h"
namespace {
diff --git a/media/video/capture/video_capture_types.cc b/media/video/capture/video_capture_types.cc
index 48f03f1..96e5236 100644
--- a/media/video/capture/video_capture_types.cc
+++ b/media/video/capture/video_capture_types.cc
@@ -9,44 +9,28 @@
namespace media {
VideoCaptureFormat::VideoCaptureFormat()
- : width(0),
- height(0),
- frame_rate(0),
- frame_size_type(ConstantResolutionVideoCaptureDevice) {}
-
-VideoCaptureFormat::VideoCaptureFormat(
- int width,
- int height,
- int frame_rate,
- VideoCaptureResolutionType frame_size_type)
- : width(width),
- height(height),
+ : frame_rate(0), pixel_format(PIXEL_FORMAT_UNKNOWN) {}
+
+VideoCaptureFormat::VideoCaptureFormat(const gfx::Size& frame_size,
+ int frame_rate,
+ VideoPixelFormat pixel_format)
+ : frame_size(frame_size),
frame_rate(frame_rate),
- frame_size_type(frame_size_type) {}
+ pixel_format(pixel_format) {}
bool VideoCaptureFormat::IsValid() const {
- return (width > 0) && (height > 0) && (frame_rate > 0) &&
+ return (frame_size.width() < media::limits::kMaxDimension) &&
+ (frame_size.height() < media::limits::kMaxDimension) &&
+ (frame_size.GetArea() > 0) &&
+ (frame_size.GetArea() < media::limits::kMaxCanvas) &&
+ (frame_rate > 0) &&
(frame_rate < media::limits::kMaxFramesPerSecond) &&
- (width < media::limits::kMaxDimension) &&
- (height < media::limits::kMaxDimension) &&
- (width * height < media::limits::kMaxCanvas) &&
- (frame_size_type >= 0) &&
- (frame_size_type < media::MaxVideoCaptureResolutionType);
+ (pixel_format >= PIXEL_FORMAT_UNKNOWN) &&
+ (pixel_format < PIXEL_FORMAT_MAX);
}
-VideoCaptureParams::VideoCaptureParams()
- : session_id(0) {}
-
-VideoCaptureCapability::VideoCaptureCapability()
- : color(PIXEL_FORMAT_UNKNOWN) {}
+VideoCaptureParams::VideoCaptureParams() : allow_resolution_change(false) {}
-VideoCaptureCapability::VideoCaptureCapability(
- int width,
- int height,
- int frame_rate,
- VideoPixelFormat color,
- VideoCaptureResolutionType frame_size_type)
- : VideoCaptureFormat(width, height, frame_rate, frame_size_type),
- color(color) {}
+VideoCaptureCapability::VideoCaptureCapability() {}
} // namespace media
diff --git a/media/video/capture/video_capture_types.h b/media/video/capture/video_capture_types.h
index 489befe..d06c1b4 100644
--- a/media/video/capture/video_capture_types.h
+++ b/media/video/capture/video_capture_types.h
@@ -5,7 +5,10 @@
#ifndef MEDIA_VIDEO_CAPTURE_VIDEO_CAPTURE_TYPES_H_
#define MEDIA_VIDEO_CAPTURE_VIDEO_CAPTURE_TYPES_H_
-#include "media/base/video_frame.h"
+#include <vector>
+
+#include "media/base/media_export.h"
+#include "ui/gfx/size.h"
namespace media {
@@ -13,12 +16,6 @@ namespace media {
// shared with device manager.
typedef int VideoCaptureSessionId;
-enum VideoCaptureResolutionType {
- ConstantResolutionVideoCaptureDevice = 0,
- VariableResolutionVideoCaptureDevice,
- MaxVideoCaptureResolutionType, // Must be last.
-};
-
// Color formats from camera.
enum VideoPixelFormat {
PIXEL_FORMAT_UNKNOWN, // Color format not set.
@@ -30,49 +27,52 @@ enum VideoPixelFormat {
PIXEL_FORMAT_MJPEG,
PIXEL_FORMAT_NV21,
PIXEL_FORMAT_YV12,
+ PIXEL_FORMAT_MAX,
};
// Video capture format specification.
+// This class is used by the video capture device to specify the format of every
+// frame captured and returned to a client.
class MEDIA_EXPORT VideoCaptureFormat {
public:
VideoCaptureFormat();
- VideoCaptureFormat(int width,
- int height,
+ VideoCaptureFormat(const gfx::Size& frame_size,
int frame_rate,
- VideoCaptureResolutionType frame_size_type);
+ VideoPixelFormat pixel_format);
// Checks that all values are in the expected range. All limits are specified
// in media::Limits.
bool IsValid() const;
- int width;
- int height;
+ gfx::Size frame_size;
int frame_rate;
- VideoCaptureResolutionType frame_size_type;
+ VideoPixelFormat pixel_format;
};
// Parameters for starting video capture.
+// This class is used by the client of a video capture device to specify the
+// format of frames in which the client would like to have captured frames
+// returned.
class MEDIA_EXPORT VideoCaptureParams {
public:
VideoCaptureParams();
- // Identifies which device is to be started.
- VideoCaptureSessionId session_id;
// Requests a resolution and format at which the capture will occur.
VideoCaptureFormat requested_format;
+
+ // Allow mid-capture resolution change.
+ bool allow_resolution_change;
};
// Capabilities describe the format a camera captures video in.
-class MEDIA_EXPORT VideoCaptureCapability : public VideoCaptureFormat {
+// This class is used by the video capture device to report the formats in which
+// it is capable of capturing frames.
+class MEDIA_EXPORT VideoCaptureCapability {
public:
VideoCaptureCapability();
- VideoCaptureCapability(int width,
- int height,
- int frame_rate,
- VideoPixelFormat color,
- VideoCaptureResolutionType frame_size_type);
- VideoPixelFormat color; // Desired video type.
+ // Supported resolution and format.
+ VideoCaptureFormat supported_format;
};
typedef std::vector<VideoCaptureCapability> VideoCaptureCapabilities;
diff --git a/media/video/capture/win/capability_list_win.cc b/media/video/capture/win/capability_list_win.cc
index 18325bb..bfa58ed 100644
--- a/media/video/capture/win/capability_list_win.cc
+++ b/media/video/capture/win/capability_list_win.cc
@@ -33,7 +33,8 @@ bool CompareFrameRate(const ResolutionDiff& item1,
}
bool CompareColor(const ResolutionDiff& item1, const ResolutionDiff& item2) {
- return item1.capability->color < item2.capability->color;
+ return item1.capability->supported_format.pixel_format <
+ item2.capability->supported_format.pixel_format;
}
} // namespace.
@@ -50,7 +51,7 @@ void CapabilityList::Add(const VideoCaptureCapabilityWin& capability) {
capabilities_.push_back(capability);
}
-const VideoCaptureCapabilityWin& CapabilityList::GetBestMatchedCapability(
+const VideoCaptureCapabilityWin& CapabilityList::GetBestMatchedFormat(
int requested_width,
int requested_height,
int requested_frame_rate) const {
@@ -65,8 +66,9 @@ const VideoCaptureCapabilityWin& CapabilityList::GetBestMatchedCapability(
it != capabilities_.end(); ++it) {
ResolutionDiff diff;
diff.capability = &(*it);
- diff.diff_width = it->width - requested_width;
- diff.diff_height = it->height - requested_height;
+ diff.diff_width = it->supported_format.frame_size.width() - requested_width;
+ diff.diff_height =
+ it->supported_format.frame_size.height() - requested_height;
// The 1000 allows using integer arithmetic for f.i. 29.971 fps.
diff.diff_frame_rate =
1000 * ((static_cast<float>(it->frame_rate_numerator) /
diff --git a/media/video/capture/win/capability_list_win.h b/media/video/capture/win/capability_list_win.h
index c07b220..05ce8e8 100644
--- a/media/video/capture/win/capability_list_win.h
+++ b/media/video/capture/win/capability_list_win.h
@@ -16,7 +16,7 @@
namespace media {
-struct VideoCaptureCapabilityWin : public VideoCaptureCapability {
+struct VideoCaptureCapabilityWin {
explicit VideoCaptureCapabilityWin(int index)
: stream_index(index),
frame_rate_numerator(0),
@@ -26,6 +26,7 @@ struct VideoCaptureCapabilityWin : public VideoCaptureCapability {
// so framerates can be properly represented, f.i. 29.971fps= 30000/1001.
int frame_rate_numerator;
int frame_rate_denominator;
+ VideoCaptureFormat supported_format;
};
class CapabilityList : public base::NonThreadSafe {
@@ -41,8 +42,9 @@ class CapabilityList : public base::NonThreadSafe {
// Loops through the list of capabilities and returns an index of the best
// matching capability. The algorithm prioritizes height, width, frame rate
// and color format in that order.
- const VideoCaptureCapabilityWin& GetBestMatchedCapability(
- int requested_width, int requested_height,
+ const VideoCaptureCapabilityWin& GetBestMatchedFormat(
+ int requested_width,
+ int requested_height,
int requested_frame_rate) const;
private:
diff --git a/media/video/capture/win/sink_filter_win.cc b/media/video/capture/win/sink_filter_win.cc
index c3fc410..e3bb0a5 100644
--- a/media/video/capture/win/sink_filter_win.cc
+++ b/media/video/capture/win/sink_filter_win.cc
@@ -28,13 +28,12 @@ SinkFilter::~SinkFilter() {
input_pin_->SetOwner(NULL);
}
-void SinkFilter::SetRequestedMediaCapability(
- const VideoCaptureCapability& capability) {
- input_pin_->SetRequestedMediaCapability(capability);
+void SinkFilter::SetRequestedMediaFormat(const VideoCaptureFormat& format) {
+ input_pin_->SetRequestedMediaFormat(format);
}
-const VideoCaptureCapability& SinkFilter::ResultingCapability() {
- return input_pin_->ResultingCapability();
+const VideoCaptureFormat& SinkFilter::ResultingFormat() {
+ return input_pin_->ResultingFormat();
}
size_t SinkFilter::NoOfPins() {
diff --git a/media/video/capture/win/sink_filter_win.h b/media/video/capture/win/sink_filter_win.h
index 36bb124..e454f0b 100644
--- a/media/video/capture/win/sink_filter_win.h
+++ b/media/video/capture/win/sink_filter_win.h
@@ -32,11 +32,10 @@ class __declspec(uuid("88cdbbdc-a73b-4afa-acbf-15d5e2ce12c3"))
explicit SinkFilter(SinkFilterObserver* observer);
virtual ~SinkFilter();
- void SetRequestedMediaCapability(
- const VideoCaptureCapability& capability);
- // Returns the capability that is negotiated when this
+ void SetRequestedMediaFormat(const VideoCaptureFormat& format);
+ // Returns the format that is negotiated when this
// filter is connected to a media filter.
- const VideoCaptureCapability& ResultingCapability();
+ const VideoCaptureFormat& ResultingFormat();
// Implement FilterBase.
virtual size_t NoOfPins();
diff --git a/media/video/capture/win/sink_input_pin_win.cc b/media/video/capture/win/sink_input_pin_win.cc
index 9d97918..0126e13 100644
--- a/media/video/capture/win/sink_input_pin_win.cc
+++ b/media/video/capture/win/sink_input_pin_win.cc
@@ -20,8 +20,6 @@ SinkInputPin::SinkInputPin(IBaseFilter* filter,
SinkFilterObserver* observer)
: observer_(observer),
PinBase(filter) {
- memset(&requested_capability_, 0, sizeof(requested_capability_));
- memset(&resulting_capability_, 0, sizeof(resulting_capability_));
}
SinkInputPin::~SinkInputPin() {}
@@ -38,9 +36,9 @@ bool SinkInputPin::GetValidMediaType(int index, AM_MEDIA_TYPE* media_type) {
pvi->bmiHeader.biPlanes = 1;
pvi->bmiHeader.biClrImportant = 0;
pvi->bmiHeader.biClrUsed = 0;
- if (requested_capability_.frame_rate > 0) {
- pvi->AvgTimePerFrame = kSecondsToReferenceTime /
- requested_capability_.frame_rate;
+ if (requested_format_.frame_rate > 0) {
+ pvi->AvgTimePerFrame =
+ kSecondsToReferenceTime / requested_format_.frame_rate;
}
media_type->majortype = MEDIATYPE_Video;
@@ -51,30 +49,28 @@ bool SinkInputPin::GetValidMediaType(int index, AM_MEDIA_TYPE* media_type) {
case 0: {
pvi->bmiHeader.biCompression = MAKEFOURCC('I', '4', '2', '0');
pvi->bmiHeader.biBitCount = 12; // bit per pixel
- pvi->bmiHeader.biWidth = requested_capability_.width;
- pvi->bmiHeader.biHeight = requested_capability_.height;
- pvi->bmiHeader.biSizeImage = 3 * requested_capability_.height *
- requested_capability_.width / 2;
+ pvi->bmiHeader.biWidth = requested_format_.frame_size.width();
+ pvi->bmiHeader.biHeight = requested_format_.frame_size.height();
+ pvi->bmiHeader.biSizeImage =
+ requested_format_.frame_size.GetArea() * 3 / 2;
media_type->subtype = kMediaSubTypeI420;
break;
}
case 1: {
pvi->bmiHeader.biCompression = MAKEFOURCC('Y', 'U', 'Y', '2');
pvi->bmiHeader.biBitCount = 16;
- pvi->bmiHeader.biWidth = requested_capability_.width;
- pvi->bmiHeader.biHeight = requested_capability_.height;
- pvi->bmiHeader.biSizeImage = 2 * requested_capability_.width *
- requested_capability_.height;
+ pvi->bmiHeader.biWidth = requested_format_.frame_size.width();
+ pvi->bmiHeader.biHeight = requested_format_.frame_size.height();
+ pvi->bmiHeader.biSizeImage = requested_format_.frame_size.GetArea() * 2;
media_type->subtype = MEDIASUBTYPE_YUY2;
break;
}
case 2: {
pvi->bmiHeader.biCompression = BI_RGB;
pvi->bmiHeader.biBitCount = 24;
- pvi->bmiHeader.biWidth = requested_capability_.width;
- pvi->bmiHeader.biHeight = requested_capability_.height;
- pvi->bmiHeader.biSizeImage = 3 * requested_capability_.height *
- requested_capability_.width;
+ pvi->bmiHeader.biWidth = requested_format_.frame_size.width();
+ pvi->bmiHeader.biHeight = requested_format_.frame_size.height();
+ pvi->bmiHeader.biSizeImage = requested_format_.frame_size.GetArea() * 3;
media_type->subtype = MEDIASUBTYPE_RGB24;
break;
}
@@ -104,27 +100,27 @@ bool SinkInputPin::IsMediaTypeValid(const AM_MEDIA_TYPE* media_type) {
return false;
// Store the incoming width and height.
- resulting_capability_.width = pvi->bmiHeader.biWidth;
- resulting_capability_.height = abs(pvi->bmiHeader.biHeight);
+ resulting_format_.frame_size.SetSize(pvi->bmiHeader.biWidth,
+ abs(pvi->bmiHeader.biHeight));
if (pvi->AvgTimePerFrame > 0) {
- resulting_capability_.frame_rate =
+ resulting_format_.frame_rate =
static_cast<int>(kSecondsToReferenceTime / pvi->AvgTimePerFrame);
} else {
- resulting_capability_.frame_rate = requested_capability_.frame_rate;
+ resulting_format_.frame_rate = requested_format_.frame_rate;
}
if (sub_type == kMediaSubTypeI420 &&
pvi->bmiHeader.biCompression == MAKEFOURCC('I', '4', '2', '0')) {
- resulting_capability_.color = PIXEL_FORMAT_I420;
+ resulting_format_.pixel_format = PIXEL_FORMAT_I420;
return true; // This format is acceptable.
}
if (sub_type == MEDIASUBTYPE_YUY2 &&
pvi->bmiHeader.biCompression == MAKEFOURCC('Y', 'U', 'Y', '2')) {
- resulting_capability_.color = PIXEL_FORMAT_YUY2;
+ resulting_format_.pixel_format = PIXEL_FORMAT_YUY2;
return true; // This format is acceptable.
}
if (sub_type == MEDIASUBTYPE_RGB24 &&
pvi->bmiHeader.biCompression == BI_RGB) {
- resulting_capability_.color = PIXEL_FORMAT_RGB24;
+ resulting_format_.pixel_format = PIXEL_FORMAT_RGB24;
return true; // This format is acceptable.
}
return false;
@@ -140,17 +136,15 @@ HRESULT SinkInputPin::Receive(IMediaSample* sample) {
return S_OK;
}
-void SinkInputPin::SetRequestedMediaCapability(
- const VideoCaptureCapability& capability) {
- requested_capability_ = capability;
- resulting_capability_.width = 0;
- resulting_capability_.height = 0;
- resulting_capability_.frame_rate = 0;
- resulting_capability_.color = PIXEL_FORMAT_UNKNOWN;
+void SinkInputPin::SetRequestedMediaFormat(const VideoCaptureFormat& format) {
+ requested_format_ = format;
+ resulting_format_.frame_size.SetSize(0, 0);
+ resulting_format_.frame_rate = 0;
+ resulting_format_.pixel_format = PIXEL_FORMAT_UNKNOWN;
}
-const VideoCaptureCapability& SinkInputPin::ResultingCapability() {
- return resulting_capability_;
+const VideoCaptureFormat& SinkInputPin::ResultingFormat() {
+ return resulting_format_;
}
} // namespace media
diff --git a/media/video/capture/win/sink_input_pin_win.h b/media/video/capture/win/sink_input_pin_win.h
index 16168a3..f14ca33 100644
--- a/media/video/capture/win/sink_input_pin_win.h
+++ b/media/video/capture/win/sink_input_pin_win.h
@@ -24,10 +24,10 @@ class SinkInputPin : public PinBase {
SinkInputPin(IBaseFilter* filter, SinkFilterObserver* observer);
virtual ~SinkInputPin();
- void SetRequestedMediaCapability(const VideoCaptureCapability& capability);
+ void SetRequestedMediaFormat(const VideoCaptureFormat& format);
// Returns the capability that is negotiated when this
// pin is connected to a media filter.
- const VideoCaptureCapability& ResultingCapability();
+ const VideoCaptureFormat& ResultingFormat();
// Implement PinBase.
virtual bool IsMediaTypeValid(const AM_MEDIA_TYPE* media_type);
@@ -36,8 +36,8 @@ class SinkInputPin : public PinBase {
STDMETHOD(Receive)(IMediaSample* media_sample);
private:
- VideoCaptureCapability requested_capability_;
- VideoCaptureCapability resulting_capability_;
+ VideoCaptureFormat requested_format_;
+ VideoCaptureFormat resulting_format_;
SinkFilterObserver* observer_;
DISALLOW_IMPLICIT_CONSTRUCTORS(SinkInputPin);
diff --git a/media/video/capture/win/video_capture_device_mf_win.cc b/media/video/capture/win/video_capture_device_mf_win.cc
index 12694de..f9cfb0a 100644
--- a/media/video/capture/win/video_capture_device_mf_win.cc
+++ b/media/video/capture/win/video_capture_device_mf_win.cc
@@ -94,12 +94,11 @@ bool FormatFromGuid(const GUID& guid, VideoPixelFormat* format) {
return false;
}
-bool GetFrameSize(IMFMediaType* type, int* width, int* height) {
+bool GetFrameSize(IMFMediaType* type, gfx::Size* frame_size) {
UINT32 width32, height32;
if (FAILED(MFGetAttributeSize(type, MF_MT_FRAME_SIZE, &width32, &height32)))
return false;
- *width = width32;
- *height = height32;
+ frame_size->SetSize(width32, height32);
return true;
}
@@ -121,15 +120,15 @@ bool FillCapabilitiesFromType(IMFMediaType* type,
VideoCaptureCapabilityWin* capability) {
GUID type_guid;
if (FAILED(type->GetGUID(MF_MT_SUBTYPE, &type_guid)) ||
- !FormatFromGuid(type_guid, &capability->color) ||
- !GetFrameSize(type, &capability->width, &capability->height) ||
+ !GetFrameSize(type, &capability->supported_format.frame_size) ||
!GetFrameRate(type,
&capability->frame_rate_numerator,
- &capability->frame_rate_denominator)) {
+ &capability->frame_rate_denominator) ||
+ !FormatFromGuid(type_guid, &capability->supported_format.pixel_format)) {
return false;
}
// Keep the integer version of the frame_rate for (potential) returns.
- capability->frame_rate =
+ capability->supported_format.frame_rate =
capability->frame_rate_numerator / capability->frame_rate_denominator;
return true;
@@ -337,7 +336,7 @@ bool VideoCaptureDeviceMFWin::Init() {
}
void VideoCaptureDeviceMFWin::AllocateAndStart(
- const VideoCaptureCapability& capture_format,
+ const VideoCaptureParams& params,
scoped_ptr<VideoCaptureDevice::Client> client) {
DCHECK(CalledOnValidThread());
@@ -354,13 +353,10 @@ void VideoCaptureDeviceMFWin::AllocateAndStart(
}
VideoCaptureCapabilityWin found_capability =
- capabilities.GetBestMatchedCapability(capture_format.width,
- capture_format.height,
- capture_format.frame_rate);
- DLOG(INFO) << "Chosen capture format= (" << found_capability.width << "x"
- << found_capability.height << ")@("
- << found_capability.frame_rate_numerator << "/"
- << found_capability.frame_rate_denominator << ")fps";
+ capabilities.GetBestMatchedFormat(
+ params.requested_format.frame_size.width(),
+ params.requested_format.frame_size.height(),
+ params.requested_format.frame_rate);
ScopedComPtr<IMFMediaType> type;
if (FAILED(hr = reader_->GetNativeMediaType(
@@ -377,7 +373,7 @@ void VideoCaptureDeviceMFWin::AllocateAndStart(
OnError(hr);
return;
}
- current_setting_ = found_capability;
+ capture_format_ = found_capability.supported_format;
capture_ = true;
}
@@ -424,7 +420,7 @@ void VideoCaptureDeviceMFWin::OnIncomingCapturedFrame(
rotation,
flip_vert,
flip_horiz,
- current_setting_);
+ capture_format_);
if (capture_) {
HRESULT hr = reader_->ReadSample(MF_SOURCE_READER_FIRST_VIDEO_STREAM, 0,
diff --git a/media/video/capture/win/video_capture_device_mf_win.h b/media/video/capture/win/video_capture_device_mf_win.h
index a9191bd..c9da58c 100644
--- a/media/video/capture/win/video_capture_device_mf_win.h
+++ b/media/video/capture/win/video_capture_device_mf_win.h
@@ -38,9 +38,9 @@ class MEDIA_EXPORT VideoCaptureDeviceMFWin
bool Init();
// VideoCaptureDevice implementation.
- virtual void AllocateAndStart(
- const VideoCaptureCapability& capture_format,
- scoped_ptr<VideoCaptureDevice::Client> client) OVERRIDE;
+ virtual void AllocateAndStart(const VideoCaptureParams& params,
+ scoped_ptr<VideoCaptureDevice::Client> client)
+ OVERRIDE;
virtual void StopAndDeAllocate() OVERRIDE;
// Returns true iff the current platform supports the Media Foundation API
@@ -71,7 +71,7 @@ class MEDIA_EXPORT VideoCaptureDeviceMFWin
base::Lock lock_; // Used to guard the below variables.
scoped_ptr<VideoCaptureDevice::Client> client_;
base::win::ScopedComPtr<IMFSourceReader> reader_;
- VideoCaptureCapability current_setting_;
+ VideoCaptureFormat capture_format_;
bool capture_;
DISALLOW_IMPLICIT_CONSTRUCTORS(VideoCaptureDeviceMFWin);
diff --git a/media/video/capture/win/video_capture_device_win.cc b/media/video/capture/win/video_capture_device_win.cc
index a4f0b83..5b24d72 100644
--- a/media/video/capture/win/video_capture_device_win.cc
+++ b/media/video/capture/win/video_capture_device_win.cc
@@ -341,7 +341,7 @@ bool VideoCaptureDeviceWin::Init() {
}
void VideoCaptureDeviceWin::AllocateAndStart(
- const VideoCaptureCapability& capture_format,
+ const VideoCaptureParams& params,
scoped_ptr<VideoCaptureDevice::Client> client) {
DCHECK(CalledOnValidThread());
if (state_ != kIdle)
@@ -351,15 +351,16 @@ void VideoCaptureDeviceWin::AllocateAndStart(
// Get the camera capability that best match the requested resolution.
const VideoCaptureCapabilityWin& found_capability =
- capabilities_.GetBestMatchedCapability(capture_format.width,
- capture_format.height,
- capture_format.frame_rate);
- VideoCaptureCapability capability = found_capability;
+ capabilities_.GetBestMatchedFormat(
+ params.requested_format.frame_size.width(),
+ params.requested_format.frame_size.height(),
+ params.requested_format.frame_rate);
+ VideoCaptureFormat format = found_capability.supported_format;
// Reduce the frame rate if the requested frame rate is lower
// than the capability.
- if (capability.frame_rate > capture_format.frame_rate)
- capability.frame_rate = capture_format.frame_rate;
+ if (format.frame_rate > params.requested_format.frame_rate)
+ format.frame_rate = params.requested_format.frame_rate;
AM_MEDIA_TYPE* pmt = NULL;
VIDEO_STREAM_CONFIG_CAPS caps;
@@ -377,20 +378,19 @@ void VideoCaptureDeviceWin::AllocateAndStart(
if (SUCCEEDED(hr)) {
if (pmt->formattype == FORMAT_VideoInfo) {
VIDEOINFOHEADER* h = reinterpret_cast<VIDEOINFOHEADER*>(pmt->pbFormat);
- if (capability.frame_rate > 0)
- h->AvgTimePerFrame = kSecondsToReferenceTime / capability.frame_rate;
+ if (format.frame_rate > 0)
+ h->AvgTimePerFrame = kSecondsToReferenceTime / format.frame_rate;
}
- // Set the sink filter to request this capability.
- sink_filter_->SetRequestedMediaCapability(capability);
- // Order the capture device to use this capability.
+ // Set the sink filter to request this format.
+ sink_filter_->SetRequestedMediaFormat(format);
+ // Order the capture device to use this format.
hr = stream_config->SetFormat(pmt);
}
if (FAILED(hr))
SetErrorState("Failed to set capture device output format");
- if (capability.color == PIXEL_FORMAT_MJPEG &&
- !mjpg_filter_.get()) {
+ if (format.pixel_format == PIXEL_FORMAT_MJPEG && !mjpg_filter_.get()) {
// Create MJPG filter if we need it.
hr = mjpg_filter_.CreateInstance(CLSID_MjpegDec, NULL, CLSCTX_INPROC);
@@ -408,8 +408,7 @@ void VideoCaptureDeviceWin::AllocateAndStart(
}
}
- if (capability.color == PIXEL_FORMAT_MJPEG &&
- mjpg_filter_.get()) {
+ if (format.pixel_format == PIXEL_FORMAT_MJPEG && mjpg_filter_.get()) {
// Connect the camera to the MJPEG decoder.
hr = graph_builder_->ConnectDirect(output_capture_pin_, input_mjpg_pin_,
NULL);
@@ -433,9 +432,9 @@ void VideoCaptureDeviceWin::AllocateAndStart(
return;
}
- // Get the capability back from the sink filter after the filter have been
+ // Get the format back from the sink filter after the filter have been
// connected.
- current_setting_ = sink_filter_->ResultingCapability();
+ capture_format_ = sink_filter_->ResultingFormat();
// Start capturing.
hr = media_control_->Run();
@@ -479,7 +478,7 @@ void VideoCaptureDeviceWin::StopAndDeAllocate() {
void VideoCaptureDeviceWin::FrameReceived(const uint8* buffer,
int length) {
client_->OnIncomingCapturedFrame(
- buffer, length, base::Time::Now(), 0, false, false, current_setting_);
+ buffer, length, base::Time::Now(), 0, false, false, capture_format_);
}
bool VideoCaptureDeviceWin::CreateCapabilityMap() {
@@ -522,8 +521,8 @@ bool VideoCaptureDeviceWin::CreateCapabilityMap() {
VideoCaptureCapabilityWin capability(i);
VIDEOINFOHEADER* h =
reinterpret_cast<VIDEOINFOHEADER*>(media_type->pbFormat);
- capability.width = h->bmiHeader.biWidth;
- capability.height = h->bmiHeader.biHeight;
+ capability.supported_format.frame_size.SetSize(h->bmiHeader.biWidth,
+ h->bmiHeader.biHeight);
// Try to get a better |time_per_frame| from IAMVideoControl. If not, use
// the value from VIDEOINFOHEADER.
@@ -531,7 +530,8 @@ bool VideoCaptureDeviceWin::CreateCapabilityMap() {
if (video_control) {
ScopedCoMem<LONGLONG> max_fps;
LONG list_size = 0;
- SIZE size = { capability.width, capability.height };
+ SIZE size = {capability.supported_format.frame_size.width(),
+ capability.supported_format.frame_size.height()};
// GetFrameRateList doesn't return max frame rate always
// eg: Logitech Notebook. This may be due to a bug in that API
@@ -549,30 +549,32 @@ bool VideoCaptureDeviceWin::CreateCapabilityMap() {
}
}
- capability.frame_rate = (time_per_frame > 0) ?
- static_cast<int>(kSecondsToReferenceTime / time_per_frame) : 0;
+ capability.supported_format.frame_rate =
+ (time_per_frame > 0)
+ ? static_cast<int>(kSecondsToReferenceTime / time_per_frame)
+ : 0;
// DirectShow works at the moment only on integer frame_rate but the
// best capability matching class works on rational frame rates.
- capability.frame_rate_numerator = capability.frame_rate;
+ capability.frame_rate_numerator = capability.supported_format.frame_rate;
capability.frame_rate_denominator = 1;
// We can't switch MEDIATYPE :~(.
if (media_type->subtype == kMediaSubTypeI420) {
- capability.color = PIXEL_FORMAT_I420;
+ capability.supported_format.pixel_format = PIXEL_FORMAT_I420;
} else if (media_type->subtype == MEDIASUBTYPE_IYUV) {
// This is identical to PIXEL_FORMAT_I420.
- capability.color = PIXEL_FORMAT_I420;
+ capability.supported_format.pixel_format = PIXEL_FORMAT_I420;
} else if (media_type->subtype == MEDIASUBTYPE_RGB24) {
- capability.color = PIXEL_FORMAT_RGB24;
+ capability.supported_format.pixel_format = PIXEL_FORMAT_RGB24;
} else if (media_type->subtype == MEDIASUBTYPE_YUY2) {
- capability.color = PIXEL_FORMAT_YUY2;
+ capability.supported_format.pixel_format = PIXEL_FORMAT_YUY2;
} else if (media_type->subtype == MEDIASUBTYPE_MJPG) {
- capability.color = PIXEL_FORMAT_MJPEG;
+ capability.supported_format.pixel_format = PIXEL_FORMAT_MJPEG;
} else if (media_type->subtype == MEDIASUBTYPE_UYVY) {
- capability.color = PIXEL_FORMAT_UYVY;
+ capability.supported_format.pixel_format = PIXEL_FORMAT_UYVY;
} else if (media_type->subtype == MEDIASUBTYPE_ARGB32) {
- capability.color = PIXEL_FORMAT_ARGB;
+ capability.supported_format.pixel_format = PIXEL_FORMAT_ARGB;
} else {
WCHAR guid_str[128];
StringFromGUID2(media_type->subtype, guid_str, arraysize(guid_str));
diff --git a/media/video/capture/win/video_capture_device_win.h b/media/video/capture/win/video_capture_device_win.h
index 7f55f99..164c01c 100644
--- a/media/video/capture/win/video_capture_device_win.h
+++ b/media/video/capture/win/video_capture_device_win.h
@@ -40,9 +40,9 @@ class VideoCaptureDeviceWin
bool Init();
// VideoCaptureDevice implementation.
- virtual void AllocateAndStart(
- const VideoCaptureCapability& capture_format,
- scoped_ptr<VideoCaptureDevice::Client> client) OVERRIDE;
+ virtual void AllocateAndStart(const VideoCaptureParams& params,
+ scoped_ptr<VideoCaptureDevice::Client> client)
+ OVERRIDE;
virtual void StopAndDeAllocate() OVERRIDE;
static void GetDeviceNames(Names* device_names);
@@ -79,7 +79,7 @@ class VideoCaptureDeviceWin
// Map of all capabilities this device support.
CapabilityList capabilities_;
- VideoCaptureCapability current_setting_;
+ VideoCaptureFormat capture_format_;
DISALLOW_IMPLICIT_CONSTRUCTORS(VideoCaptureDeviceWin);
};