summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorwjia@chromium.org <wjia@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2013-02-28 22:01:04 +0000
committerwjia@chromium.org <wjia@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2013-02-28 22:01:04 +0000
commit1fa37522cade3c1bbd5a523f4aafc884348f8262 (patch)
treedbb65668ba5d228c41fa5d17a887e8d323ba972e
parent2226daac070dd08be5f6c60cf464dba7fa4bd94c (diff)
downloadchromium_src-1fa37522cade3c1bbd5a523f4aafc884348f8262.zip
chromium_src-1fa37522cade3c1bbd5a523f4aafc884348f8262.tar.gz
chromium_src-1fa37522cade3c1bbd5a523f4aafc884348f8262.tar.bz2
Remove one video frame copying in video capture.
On Android, the captured frame might need to be rotated. By moving the rotation code into VideoCaptureController, one video frame copying is reduced. On Nexus 4, the frame copying from intermedia buffer to shared memory takes about 0.1 ms per frame for VGA YV12. Review URL: https://codereview.chromium.org/12378007 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@185339 0039d316-1c4b-4281-b951-d872f2087c98
-rw-r--r--content/browser/renderer_host/media/video_capture_controller.cc92
-rw-r--r--content/browser/renderer_host/media/video_capture_controller.h8
-rw-r--r--content/browser/renderer_host/media/video_capture_manager_unittest.cc5
-rw-r--r--content/browser/renderer_host/media/web_contents_video_capture_device.cc16
-rw-r--r--content/browser/renderer_host/media/web_contents_video_capture_device_unittest.cc9
-rw-r--r--media/base/video_util.cc4
-rw-r--r--media/base/video_util.h2
-rw-r--r--media/video/capture/android/video_capture_device_android.cc46
-rw-r--r--media/video/capture/android/video_capture_device_android.h2
-rw-r--r--media/video/capture/fake_video_capture_device.cc5
-rw-r--r--media/video/capture/linux/video_capture_device_linux.cc2
-rw-r--r--media/video/capture/mac/video_capture_device_mac.mm4
-rw-r--r--media/video/capture/screen/screen_capture_device.cc5
-rw-r--r--media/video/capture/screen/screen_capture_device_unittest.cc12
-rw-r--r--media/video/capture/video_capture_device.h5
-rw-r--r--media/video/capture/video_capture_device_unittest.cc9
-rw-r--r--media/video/capture/win/video_capture_device_mf_win.cc13
-rw-r--r--media/video/capture/win/video_capture_device_mf_win.h9
-rw-r--r--media/video/capture/win/video_capture_device_win.cc3
19 files changed, 149 insertions, 102 deletions
diff --git a/content/browser/renderer_host/media/video_capture_controller.cc b/content/browser/renderer_host/media/video_capture_controller.cc
index b503293..fc18af4 100644
--- a/content/browser/renderer_host/media/video_capture_controller.cc
+++ b/content/browser/renderer_host/media/video_capture_controller.cc
@@ -20,6 +20,39 @@
#include "third_party/libyuv/include/libyuv.h"
#endif
+namespace {
+
+void ResetBufferYV12(uint8* buffer, int width, int height) {
+ int y_size = width * height;
+ memset(buffer, 0, y_size);
+ buffer += y_size;
+ memset(buffer, 128, y_size / 2);
+}
+
+// TODO(wjia): Support stride.
+void RotatePackedYV12Frame(
+ const uint8* src,
+ uint8* dest_yplane,
+ uint8* dest_uplane,
+ uint8* dest_vplane,
+ int width,
+ int height,
+ int rotation,
+ bool flip_vert,
+ bool flip_horiz) {
+ media::RotatePlaneByPixels(
+ src, dest_yplane, width, height, rotation, flip_vert, flip_horiz);
+ int y_size = width * height;
+ src += y_size;
+ media::RotatePlaneByPixels(
+ src, dest_uplane, width/2, height/2, rotation, flip_vert, flip_horiz);
+ src += y_size/4;
+ media::RotatePlaneByPixels(
+ src, dest_vplane, width/2, height/2, rotation, flip_vert, flip_horiz);
+}
+
+} // namespace
+
namespace content {
// The number of DIBs VideoCaptureController allocate.
@@ -58,6 +91,7 @@ struct VideoCaptureController::ControllerClient {
struct VideoCaptureController::SharedDIB {
SharedDIB(base::SharedMemory* ptr)
: shared_memory(ptr),
+ rotation(0),
references(0) {
}
@@ -66,6 +100,8 @@ struct VideoCaptureController::SharedDIB {
// The memory created to be shared with renderer processes.
scoped_ptr<base::SharedMemory> shared_memory;
+ int rotation;
+
// Number of renderer processes which hold this shared memory.
// renderer process is represented by VidoeCaptureHost.
int references;
@@ -250,7 +286,8 @@ void VideoCaptureController::ReturnBuffer(
bool VideoCaptureController::ReserveSharedMemory(int* buffer_id_out,
uint8** yplane,
uint8** uplane,
- uint8** vplane) {
+ uint8** vplane,
+ int rotation) {
int buffer_id = 0;
base::SharedMemory* dib = NULL;
{
@@ -263,6 +300,11 @@ bool VideoCaptureController::ReserveSharedMemory(int* buffer_id_out,
// renderer side.
dib_it->second->references = -1;
dib = dib_it->second->shared_memory.get();
+ if (rotation != dib_it->second->rotation) {
+ ResetBufferYV12(static_cast<uint8*>(dib->memory()),
+ frame_info_.width, frame_info_.height);
+ dib_it->second->rotation = rotation;
+ }
break;
}
}
@@ -284,42 +326,46 @@ bool VideoCaptureController::ReserveSharedMemory(int* buffer_id_out,
// Implements VideoCaptureDevice::EventHandler.
// OnIncomingCapturedFrame is called the thread running the capture device.
// I.e.- DirectShow thread on windows and v4l2_thread on Linux.
-void VideoCaptureController::OnIncomingCapturedFrame(const uint8* data,
- int length,
- base::Time timestamp) {
+void VideoCaptureController::OnIncomingCapturedFrame(
+ const uint8* data,
+ int length,
+ base::Time timestamp,
+ int rotation,
+ bool flip_vert,
+ bool flip_horiz) {
+ DCHECK (frame_info_.color == media::VideoCaptureCapability::kI420 ||
+ frame_info_.color == media::VideoCaptureCapability::kYV12 ||
+ (rotation == 0 && !flip_vert && !flip_horiz));
+
int buffer_id = 0;
uint8* yplane = NULL;
uint8* uplane = NULL;
uint8* vplane = NULL;
- if (!ReserveSharedMemory(&buffer_id, &yplane, &uplane, &vplane))
+ if (!ReserveSharedMemory(&buffer_id, &yplane, &uplane, &vplane, rotation))
return;
// Do color conversion from the camera format to I420.
switch (frame_info_.color) {
case media::VideoCaptureCapability::kColorUnknown: // Color format not set.
break;
- case media::VideoCaptureCapability::kI420: {
+ case media::VideoCaptureCapability::kI420:
DCHECK(!chopped_width_ && !chopped_height_);
- memcpy(yplane, data, (frame_info_.width * frame_info_.height * 3) / 2);
+ RotatePackedYV12Frame(
+ data, yplane, uplane, vplane, frame_info_.width, frame_info_.height,
+ rotation, flip_vert, flip_horiz);
break;
- }
- case media::VideoCaptureCapability::kYV12: {
+ case media::VideoCaptureCapability::kYV12:
DCHECK(!chopped_width_ && !chopped_height_);
- const uint8* ptr = data;
- memcpy(yplane, ptr, (frame_info_.width * frame_info_.height));
- ptr += frame_info_.width * frame_info_.height;
- memcpy(vplane, ptr, (frame_info_.width * frame_info_.height) >> 2);
- ptr += (frame_info_.width * frame_info_.height) >> 2;
- memcpy(uplane, ptr, (frame_info_.width * frame_info_.height) >> 2);
+ RotatePackedYV12Frame(
+ data, yplane, vplane, uplane, frame_info_.width, frame_info_.height,
+ rotation, flip_vert, flip_horiz);
break;
- }
- case media::VideoCaptureCapability::kNV21: {
+ case media::VideoCaptureCapability::kNV21:
DCHECK(!chopped_width_ && !chopped_height_);
media::ConvertNV21ToYUV(data, yplane, uplane, vplane, frame_info_.width,
frame_info_.height);
break;
- }
- case media::VideoCaptureCapability::kYUY2: {
+ case media::VideoCaptureCapability::kYUY2:
DCHECK(!chopped_width_ && !chopped_height_);
if (frame_info_.width * frame_info_.height * 2 != length) {
// If |length| of |data| does not match the expected width and height
@@ -330,7 +376,6 @@ void VideoCaptureController::OnIncomingCapturedFrame(const uint8* data,
media::ConvertYUY2ToYUV(data, yplane, uplane, vplane, frame_info_.width,
frame_info_.height);
break;
- }
case media::VideoCaptureCapability::kRGB24: {
int ystride = frame_info_.width;
int uvstride = frame_info_.width / 2;
@@ -347,13 +392,12 @@ void VideoCaptureController::OnIncomingCapturedFrame(const uint8* data,
rgb_stride, ystride, uvstride);
break;
}
- case media::VideoCaptureCapability::kARGB: {
+ case media::VideoCaptureCapability::kARGB:
media::ConvertRGB32ToYUV(data, yplane, uplane, vplane, frame_info_.width,
frame_info_.height,
(frame_info_.width + chopped_width_) * 4,
frame_info_.width, frame_info_.width / 2);
break;
- }
#if !defined(OS_IOS) && !defined(OS_ANDROID)
case media::VideoCaptureCapability::kMJPEG: {
int yplane_stride = frame_info_.width;
@@ -398,7 +442,7 @@ void VideoCaptureController::OnIncomingCapturedVideoFrame(
uint8* yplane = NULL;
uint8* uplane = NULL;
uint8* vplane = NULL;
- if (!ReserveSharedMemory(&buffer_id, &yplane, &uplane, &vplane))
+ if (!ReserveSharedMemory(&buffer_id, &yplane, &uplane, &vplane, 0))
return;
scoped_refptr<media::VideoFrame> target_as_frame(
@@ -553,6 +597,8 @@ void VideoCaptureController::DoFrameInfoOnIOThread() {
frames_created = false;
break;
}
+ ResetBufferYV12(static_cast<uint8*>(shared_memory->memory()),
+ frame_info_.width, frame_info_.height);
SharedDIB* dib = new SharedDIB(shared_memory.release());
owned_dibs_.insert(std::make_pair(i, dib));
}
diff --git a/content/browser/renderer_host/media/video_capture_controller.h b/content/browser/renderer_host/media/video_capture_controller.h
index 3f7594a..b220c16 100644
--- a/content/browser/renderer_host/media/video_capture_controller.h
+++ b/content/browser/renderer_host/media/video_capture_controller.h
@@ -67,7 +67,10 @@ class CONTENT_EXPORT VideoCaptureController
// Implement media::VideoCaptureDevice::EventHandler.
virtual void OnIncomingCapturedFrame(const uint8* data,
int length,
- base::Time timestamp) OVERRIDE;
+ base::Time timestamp,
+ int rotation,
+ bool flip_vert,
+ bool flip_horiz) OVERRIDE;
virtual void OnIncomingCapturedVideoFrame(media::VideoFrame* frame,
base::Time timestamp) OVERRIDE;
virtual void OnError() OVERRIDE;
@@ -119,7 +122,8 @@ class CONTENT_EXPORT VideoCaptureController
bool ReserveSharedMemory(int* buffer_id_out,
uint8** yplane,
uint8** uplane,
- uint8** vplane);
+ uint8** vplane,
+ int rotation);
// Lock to protect free_dibs_ and owned_dibs_.
base::Lock lock_;
diff --git a/content/browser/renderer_host/media/video_capture_manager_unittest.cc b/content/browser/renderer_host/media/video_capture_manager_unittest.cc
index 1901cfc..917df1e 100644
--- a/content/browser/renderer_host/media/video_capture_manager_unittest.cc
+++ b/content/browser/renderer_host/media/video_capture_manager_unittest.cc
@@ -49,7 +49,10 @@ class MockFrameObserver : public media::VideoCaptureDevice::EventHandler {
const media::VideoCaptureCapability& info) OVERRIDE {}
virtual void OnIncomingCapturedFrame(const uint8* data,
int length,
- base::Time timestamp) OVERRIDE {}
+ base::Time timestamp,
+ int rotation,
+ bool flip_vert,
+ bool flip_horiz) OVERRIDE {}
virtual void OnIncomingCapturedVideoFrame(media::VideoFrame* frame,
base::Time timestamp) OVERRIDE {}
};
diff --git a/content/browser/renderer_host/media/web_contents_video_capture_device.cc b/content/browser/renderer_host/media/web_contents_video_capture_device.cc
index e478c93..bddc311 100644
--- a/content/browser/renderer_host/media/web_contents_video_capture_device.cc
+++ b/content/browser/renderer_host/media/web_contents_video_capture_device.cc
@@ -252,8 +252,12 @@ class SynchronizedConsumer {
void OnFrameInfo(const media::VideoCaptureCapability& info);
void OnError();
- void OnIncomingCapturedFrame(const uint8* pixels, int size,
- const base::Time& timestamp);
+ void OnIncomingCapturedFrame(const uint8* pixels,
+ int size,
+ const base::Time& timestamp,
+ int rotation,
+ bool flip_vert,
+ bool flip_horiz);
void OnIncomingCapturedVideoFrame(
const scoped_refptr<media::VideoFrame>& video_frame,
const base::Time& timestamp);
@@ -524,10 +528,12 @@ void SynchronizedConsumer::OnError() {
}
void SynchronizedConsumer::OnIncomingCapturedFrame(
- const uint8* pixels, int size, const base::Time& timestamp) {
+ const uint8* pixels, int size, const base::Time& timestamp,
+ int rotation, bool flip_vert, bool flip_horiz) {
base::AutoLock guard(consumer_lock_);
if (wrapped_consumer_) {
- wrapped_consumer_->OnIncomingCapturedFrame(pixels, size, timestamp);
+ wrapped_consumer_->OnIncomingCapturedFrame(pixels, size, timestamp,
+ rotation, flip_vert, flip_horiz);
}
}
@@ -586,7 +592,7 @@ void VideoFrameDeliverer::DeliverOnDeliverThread(
consumer_->OnIncomingCapturedFrame(
static_cast<const uint8*>(frame_buffer.getPixels()),
frame_buffer.getSize(),
- frame_timestamp);
+ frame_timestamp, 0, false, false);
ChronicleFrameDelivery(frame_number);
diff --git a/content/browser/renderer_host/media/web_contents_video_capture_device_unittest.cc b/content/browser/renderer_host/media/web_contents_video_capture_device_unittest.cc
index 6d74199..2abc251 100644
--- a/content/browser/renderer_host/media/web_contents_video_capture_device_unittest.cc
+++ b/content/browser/renderer_host/media/web_contents_video_capture_device_unittest.cc
@@ -296,8 +296,13 @@ class StubConsumer : public media::VideoCaptureDevice::EventHandler {
}
}
- virtual void OnIncomingCapturedFrame(const uint8* data, int length,
- base::Time timestamp) OVERRIDE {
+ virtual void OnIncomingCapturedFrame(
+ const uint8* data,
+ int length,
+ base::Time timestamp,
+ int rotation,
+ bool flip_vert,
+ bool flip_horiz) OVERRIDE {
DCHECK(data);
static const int kNumPixels = kTestWidth * kTestHeight;
EXPECT_EQ(kNumPixels * kBytesPerPixel, length);
diff --git a/media/base/video_util.cc b/media/base/video_util.cc
index d7498ad..c40468f 100644
--- a/media/base/video_util.cc
+++ b/media/base/video_util.cc
@@ -86,7 +86,7 @@ void FillYUV(VideoFrame* frame, uint8 y, uint8 u, uint8 v) {
}
void RotatePlaneByPixels(
- uint8* src,
+ const uint8* src,
uint8* dest,
int width,
int height,
@@ -175,7 +175,7 @@ void RotatePlaneByPixels(
// Copy pixels.
for (int row = 0; row < num_rows; ++row) {
- uint8* src_ptr = src;
+ const uint8* src_ptr = src;
uint8* dest_ptr = dest;
for (int col = 0; col < num_cols; ++col) {
*dest_ptr = *src_ptr++;
diff --git a/media/base/video_util.h b/media/base/video_util.h
index 0be1f4c..066e7bc 100644
--- a/media/base/video_util.h
+++ b/media/base/video_util.h
@@ -49,7 +49,7 @@ MEDIA_EXPORT void FillYUV(VideoFrame* frame, uint8 y, uint8 u, uint8 v);
// ignored for both |src| and |dest|.
// The caller is responsible for blanking out the margin area.
MEDIA_EXPORT void RotatePlaneByPixels(
- uint8* src,
+ const uint8* src,
uint8* dest,
int width,
int height,
diff --git a/media/video/capture/android/video_capture_device_android.cc b/media/video/capture/android/video_capture_device_android.cc
index 26b80a8..4cd8e4e 100644
--- a/media/video/capture/android/video_capture_device_android.cc
+++ b/media/video/capture/android/video_capture_device_android.cc
@@ -23,14 +23,6 @@ using base::android::ScopedJavaLocalRef;
namespace {
-// TODO(wjia): add stride as part of buffer parameter.
-void ResetBufferI420(uint8* buffer, int width, int height) {
- int y_size = width * height;
- memset(buffer, 0, y_size);
- buffer += y_size;
- memset(buffer, 128, y_size / 2);
-}
-
int GetIntField(JNIEnv* env,
const JavaRef<jclass>& clazz,
const JavaRef<jobject>& instance,
@@ -123,8 +115,7 @@ VideoCaptureDeviceAndroid::VideoCaptureDeviceAndroid(const Name& device_name)
: state_(kIdle),
observer_(NULL),
device_name_(device_name),
- current_settings_(),
- rotation_(0) {
+ current_settings_() {
}
VideoCaptureDeviceAndroid::~VideoCaptureDeviceAndroid() {
@@ -193,12 +184,6 @@ void VideoCaptureDeviceAndroid::Allocate(
<< current_settings_.frame_rate;
// Report the frame size to the observer.
observer_->OnFrameInfo(current_settings_);
-
- int y_size = current_settings_.width * current_settings_.height;
- rotation_buffer_.reset(new uint8[y_size * 3 / 2]);
- ResetBufferI420(rotation_buffer_.get(),
- current_settings_.width,
- current_settings_.height);
}
void VideoCaptureDeviceAndroid::Start() {
@@ -285,32 +270,9 @@ void VideoCaptureDeviceAndroid::OnFrameAvailable(
return;
}
- // TODO(wjia): move rotation into VideoCaptureController to remove
- // one buffer copying.
- // Rotate the buffer when needed.
- int width = current_settings_.width;
- int height = current_settings_.height;
- if (rotation_ != rotation) {
- rotation_ = rotation;
- ResetBufferI420(rotation_buffer_.get(), width, height);
- }
-
- uint8* src = reinterpret_cast<uint8*>(buffer);
- uint8* dest = rotation_buffer_.get();
-
- RotatePlaneByPixels(src, dest, width, height, rotation, flip_vert,
- flip_horiz);
- int y_size = width * height;
- src += y_size;
- dest += y_size;
- RotatePlaneByPixels(src, dest, width/2, height/2, rotation, flip_vert,
- flip_horiz);
- src += y_size/4;
- dest += y_size/4;
- RotatePlaneByPixels(src, dest, width/2, height/2, rotation, flip_vert,
- flip_horiz);
- observer_->OnIncomingCapturedFrame(rotation_buffer_.get(), length,
- base::Time::Now());
+ observer_->OnIncomingCapturedFrame(
+ reinterpret_cast<uint8*>(buffer), length, base::Time::Now(),
+ rotation, flip_vert, flip_horiz);
env->ReleaseByteArrayElements(data, buffer, JNI_ABORT);
}
diff --git a/media/video/capture/android/video_capture_device_android.h b/media/video/capture/android/video_capture_device_android.h
index d2316f5..2034f1f 100644
--- a/media/video/capture/android/video_capture_device_android.h
+++ b/media/video/capture/android/video_capture_device_android.h
@@ -67,8 +67,6 @@ class MEDIA_EXPORT VideoCaptureDeviceAndroid : public VideoCaptureDevice {
Name device_name_;
VideoCaptureCapability current_settings_;
- scoped_ptr<uint8[]> rotation_buffer_;
- int rotation_;
// Java VideoCaptureAndroid instance.
base::android::ScopedJavaGlobalRef<jobject> j_capture_;
diff --git a/media/video/capture/fake_video_capture_device.cc b/media/video/capture/fake_video_capture_device.cc
index b137192..8ca6860 100644
--- a/media/video/capture/fake_video_capture_device.cc
+++ b/media/video/capture/fake_video_capture_device.cc
@@ -192,9 +192,8 @@ void FakeVideoCaptureDevice::OnCaptureTask() {
frame_count_++;
// Give the captured frame to the observer.
- observer_->OnIncomingCapturedFrame(fake_frame_.get(),
- frame_size_,
- base::Time::Now());
+ observer_->OnIncomingCapturedFrame(
+ fake_frame_.get(), frame_size_, base::Time::Now(), 0, false, false);
// Reschedule next CaptureTask.
capture_thread_.message_loop()->PostDelayedTask(
FROM_HERE,
diff --git a/media/video/capture/linux/video_capture_device_linux.cc b/media/video/capture/linux/video_capture_device_linux.cc
index 2049291..d4db951 100644
--- a/media/video/capture/linux/video_capture_device_linux.cc
+++ b/media/video/capture/linux/video_capture_device_linux.cc
@@ -414,7 +414,7 @@ void VideoCaptureDeviceLinux::OnCaptureTask() {
if (ioctl(device_fd_, VIDIOC_DQBUF, &buffer) == 0) {
observer_->OnIncomingCapturedFrame(
static_cast<uint8*> (buffer_pool_[buffer.index].start),
- buffer.bytesused, base::Time::Now());
+ buffer.bytesused, base::Time::Now(), 0, false, false);
// Enqueue the buffer again.
if (ioctl(device_fd_, VIDIOC_QBUF, &buffer) == -1) {
diff --git a/media/video/capture/mac/video_capture_device_mac.mm b/media/video/capture/mac/video_capture_device_mac.mm
index 0bb14a7..6e74d95 100644
--- a/media/video/capture/mac/video_capture_device_mac.mm
+++ b/media/video/capture/mac/video_capture_device_mac.mm
@@ -150,8 +150,8 @@ void VideoCaptureDeviceMac::ReceiveFrame(
const uint8* video_frame,
int video_frame_length,
const VideoCaptureCapability& frame_info) {
- observer_->OnIncomingCapturedFrame(video_frame, video_frame_length,
- base::Time::Now());
+ observer_->OnIncomingCapturedFrame(
+ video_frame, video_frame_length, base::Time::Now(), 0, false, false);
}
void VideoCaptureDeviceMac::SetErrorState(const std::string& reason) {
diff --git a/media/video/capture/screen/screen_capture_device.cc b/media/video/capture/screen/screen_capture_device.cc
index 4caf17b..6182f8d 100644
--- a/media/video/capture/screen/screen_capture_device.cc
+++ b/media/video/capture/screen/screen_capture_device.cc
@@ -193,7 +193,8 @@ void ScreenCaptureDevice::Core::OnCaptureCompleted(
base::AutoLock auto_lock(event_handler_lock_);
if (event_handler_) {
event_handler_->OnIncomingCapturedFrame(
- capture_data->data(), buffer_size, base::Time::Now());
+ capture_data->data(), buffer_size, base::Time::Now(),
+ 0, false, false);
}
return;
}
@@ -253,7 +254,7 @@ void ScreenCaptureDevice::Core::OnCaptureCompleted(
if (event_handler_) {
event_handler_->OnIncomingCapturedFrame(
reinterpret_cast<uint8*>(resized_bitmap_.getPixels()), buffer_size,
- base::Time::Now());
+ base::Time::Now(), 0, false, false);
}
}
diff --git a/media/video/capture/screen/screen_capture_device_unittest.cc b/media/video/capture/screen/screen_capture_device_unittest.cc
index e2cfb78..b39283b 100644
--- a/media/video/capture/screen/screen_capture_device_unittest.cc
+++ b/media/video/capture/screen/screen_capture_device_unittest.cc
@@ -35,8 +35,12 @@ class MockFrameObserver : public VideoCaptureDevice::EventHandler {
public:
MOCK_METHOD0(OnError, void());
MOCK_METHOD1(OnFrameInfo, void(const VideoCaptureCapability& info));
- MOCK_METHOD3(OnIncomingCapturedFrame, void(const uint8* data, int length,
- base::Time timestamp));
+ MOCK_METHOD6(OnIncomingCapturedFrame, void(const uint8* data,
+ int length,
+ base::Time timestamp,
+ int rotation,
+ bool flip_vert,
+ bool flip_horiz));
MOCK_METHOD2(OnIncomingCapturedVideoFrame, void(media::VideoFrame* frame,
base::Time timestamp));
};
@@ -105,7 +109,7 @@ TEST_F(ScreenCaptureDeviceTest, Capture) {
.WillOnce(SaveArg<0>(&caps));
EXPECT_CALL(frame_observer, OnError())
.Times(0);
- EXPECT_CALL(frame_observer, OnIncomingCapturedFrame(_, _, _))
+ EXPECT_CALL(frame_observer, OnIncomingCapturedFrame(_, _, _, _, _, _))
.WillRepeatedly(DoAll(
SaveArg<1>(&frame_size),
InvokeWithoutArgs(&done_event, &base::WaitableEvent::Signal)));
@@ -143,7 +147,7 @@ TEST_F(ScreenCaptureDeviceTest, ScreenResolutionChange) {
.WillOnce(SaveArg<0>(&caps));
EXPECT_CALL(frame_observer, OnError())
.Times(0);
- EXPECT_CALL(frame_observer, OnIncomingCapturedFrame(_, _, _))
+ EXPECT_CALL(frame_observer, OnIncomingCapturedFrame(_, _, _, _, _, _))
.WillRepeatedly(DoAll(
SaveArg<1>(&frame_size),
InvokeWithoutArgs(&done_event, &base::WaitableEvent::Signal)));
diff --git a/media/video/capture/video_capture_device.h b/media/video/capture/video_capture_device.h
index 6181b62..d879223 100644
--- a/media/video/capture/video_capture_device.h
+++ b/media/video/capture/video_capture_device.h
@@ -42,7 +42,10 @@ class MEDIA_EXPORT VideoCaptureDevice {
// color planes.
virtual void OnIncomingCapturedFrame(const uint8* data,
int length,
- base::Time timestamp) = 0;
+ base::Time timestamp,
+ int rotation, // Clockwise.
+ bool flip_vert,
+ bool flip_horiz) = 0;
// Captured a new video frame, held in a VideoFrame container. |frame| must
// be allocated as RGB32, YV12 or I420, and the size must match that
// specified by an earlier call to OnFrameInfo().
diff --git a/media/video/capture/video_capture_device_unittest.cc b/media/video/capture/video_capture_device_unittest.cc
index cc131e4..1b5745e 100644
--- a/media/video/capture/video_capture_device_unittest.cc
+++ b/media/video/capture/video_capture_device_unittest.cc
@@ -74,8 +74,13 @@ class MockFrameObserver : public media::VideoCaptureDevice::EventHandler {
OnFrameInfo(info.width, info.height, info.frame_rate, info.color);
}
- virtual void OnIncomingCapturedFrame(const uint8* data, int length,
- base::Time timestamp) OVERRIDE {
+ virtual void OnIncomingCapturedFrame(
+ const uint8* data,
+ int length,
+ base::Time timestamp,
+ int rotation,
+ bool flip_vert,
+ bool flip_horiz) OVERRIDE {
wait_event_->Signal();
}
diff --git a/media/video/capture/win/video_capture_device_mf_win.cc b/media/video/capture/win/video_capture_device_mf_win.cc
index e984bb1..b666429 100644
--- a/media/video/capture/win/video_capture_device_mf_win.cc
+++ b/media/video/capture/win/video_capture_device_mf_win.cc
@@ -197,7 +197,7 @@ class MFReaderCallback
DWORD stream_flags, LONGLONG time_stamp, IMFSample* sample) {
base::Time stamp(base::Time::Now());
if (!sample) {
- observer_->OnIncomingCapturedFrame(NULL, 0, stamp);
+ observer_->OnIncomingCapturedFrame(NULL, 0, stamp, 0, false, false);
return S_OK;
}
@@ -211,7 +211,8 @@ class MFReaderCallback
DWORD length = 0, max_length = 0;
BYTE* data = NULL;
buffer->Lock(&data, &max_length, &length);
- observer_->OnIncomingCapturedFrame(data, length, stamp);
+ observer_->OnIncomingCapturedFrame(data, length, stamp,
+ 0, false, false);
buffer->Unlock();
}
}
@@ -400,10 +401,14 @@ const VideoCaptureDevice::Name& VideoCaptureDeviceMFWin::device_name() {
void VideoCaptureDeviceMFWin::OnIncomingCapturedFrame(
const uint8* data,
int length,
- const base::Time& time_stamp) {
+ const base::Time& time_stamp,
+ int rotation,
+ bool flip_vert,
+ bool flip_horiz) {
base::AutoLock lock(lock_);
if (data && observer_)
- observer_->OnIncomingCapturedFrame(data, length, time_stamp);
+ observer_->OnIncomingCapturedFrame(data, length, time_stamp,
+ rotation, flip_vert, flip_horiz);
if (capture_) {
HRESULT hr = reader_->ReadSample(MF_SOURCE_READER_FIRST_VIDEO_STREAM, 0,
diff --git a/media/video/capture/win/video_capture_device_mf_win.h b/media/video/capture/win/video_capture_device_mf_win.h
index 773b1ed..bfe55f6 100644
--- a/media/video/capture/win/video_capture_device_mf_win.h
+++ b/media/video/capture/win/video_capture_device_mf_win.h
@@ -56,8 +56,13 @@ class VideoCaptureDeviceMFWin
static void GetDeviceNames(Names* device_names);
// Captured a new video frame.
- void OnIncomingCapturedFrame(const uint8* data, int length,
- const base::Time& time_stamp);
+ void OnIncomingCapturedFrame(
+ const uint8* data,
+ int length,
+ const base::Time& time_stamp,
+ int rotation,
+ bool flip_vert,
+ bool flip_horiz);
private:
void OnError(HRESULT hr);
diff --git a/media/video/capture/win/video_capture_device_win.cc b/media/video/capture/win/video_capture_device_win.cc
index 875ca00..2842b78 100644
--- a/media/video/capture/win/video_capture_device_win.cc
+++ b/media/video/capture/win/video_capture_device_win.cc
@@ -482,7 +482,8 @@ const VideoCaptureDevice::Name& VideoCaptureDeviceWin::device_name() {
// Implements SinkFilterObserver::SinkFilterObserver.
void VideoCaptureDeviceWin::FrameReceived(const uint8* buffer,
int length) {
- observer_->OnIncomingCapturedFrame(buffer, length, base::Time::Now());
+ observer_->OnIncomingCapturedFrame(buffer, length, base::Time::Now(),
+ 0, false, false);
}
bool VideoCaptureDeviceWin::CreateCapabilityMap() {