summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--content/content_renderer.gypi10
-rw-r--r--content/content_tests.gypi2
-rw-r--r--content/renderer/media/media_stream_dependency_factory.cc36
-rw-r--r--content/renderer/media/media_stream_dependency_factory.h10
-rw-r--r--content/renderer/media/media_stream_dependency_factory_unittest.cc4
-rw-r--r--content/renderer/media/media_stream_impl.cc34
-rw-r--r--content/renderer/media/media_stream_impl.h10
-rw-r--r--content/renderer/media/media_stream_impl_unittest.cc67
-rw-r--r--content/renderer/media/media_stream_video_capturer_source.cc201
-rw-r--r--content/renderer/media/media_stream_video_capturer_source.h117
-rw-r--r--content/renderer/media/media_stream_video_source.cc437
-rw-r--r--content/renderer/media/media_stream_video_source.h139
-rw-r--r--content/renderer/media/media_stream_video_source_unittest.cc299
-rw-r--r--content/renderer/media/mock_media_stream_dependency_factory.cc65
-rw-r--r--content/renderer/media/mock_media_stream_dependency_factory.h12
-rw-r--r--content/renderer/media/mock_media_stream_video_source.cc66
-rw-r--r--content/renderer/media/mock_media_stream_video_source.h61
-rw-r--r--content/renderer/media/rtc_peer_connection_handler_unittest.cc10
-rw-r--r--content/renderer/media/rtc_video_capture_delegate.cc126
-rw-r--r--content/renderer/media/rtc_video_capture_delegate.h89
-rw-r--r--content/renderer/media/rtc_video_capturer.cc159
-rw-r--r--content/renderer/media/rtc_video_capturer.h57
-rw-r--r--content/renderer/media/webrtc/webrtc_video_capturer_adapter.cc121
-rw-r--r--content/renderer/media/webrtc/webrtc_video_capturer_adapter.h57
24 files changed, 1495 insertions, 694 deletions
diff --git a/content/content_renderer.gypi b/content/content_renderer.gypi
index 64d7a40..31dd286 100644
--- a/content/content_renderer.gypi
+++ b/content/content_renderer.gypi
@@ -616,8 +616,6 @@
'renderer/media/media_stream_audio_sink_owner.cc',
'renderer/media/media_stream_audio_sink_owner.h',
'renderer/media/media_stream_audio_track_sink.h',
- 'renderer/media/media_stream_video_track.cc',
- 'renderer/media/media_stream_video_track.h',
'renderer/media/media_stream_center.cc',
'renderer/media/media_stream_dependency_factory.cc',
'renderer/media/media_stream_dispatcher.cc',
@@ -631,6 +629,8 @@
'renderer/media/media_stream_video_capturer_source.h',
'renderer/media/media_stream_video_source.cc',
'renderer/media/media_stream_video_source.h',
+ 'renderer/media/media_stream_video_track.cc',
+ 'renderer/media/media_stream_video_track.h',
'renderer/media/native_handle_impl.cc',
'renderer/media/native_handle_impl.h',
'renderer/media/peer_connection_audio_sink_owner.cc',
@@ -651,10 +651,6 @@
'renderer/media/rtc_media_constraints.h',
'renderer/media/rtc_peer_connection_handler.cc',
'renderer/media/rtc_peer_connection_handler.h',
- 'renderer/media/rtc_video_capture_delegate.cc',
- 'renderer/media/rtc_video_capture_delegate.h',
- 'renderer/media/rtc_video_capturer.cc',
- 'renderer/media/rtc_video_capturer.h',
'renderer/media/rtc_video_decoder.cc',
'renderer/media/rtc_video_decoder.h',
'renderer/media/rtc_video_decoder_factory.cc',
@@ -697,6 +693,8 @@
'renderer/media/webrtc_logging.h',
'renderer/media/webrtc_uma_histograms.cc',
'renderer/media/webrtc_uma_histograms.h',
+ 'renderer/media/webrtc/webrtc_video_capturer_adapter.cc',
+ 'renderer/media/webrtc/webrtc_video_capturer_adapter.h',
'renderer/p2p/host_address_request.cc',
'renderer/p2p/host_address_request.h',
'renderer/p2p/ipc_network_manager.cc',
diff --git a/content/content_tests.gypi b/content/content_tests.gypi
index 2666e12..2316a4e 100644
--- a/content/content_tests.gypi
+++ b/content/content_tests.gypi
@@ -556,6 +556,8 @@
'renderer/media/cache_util_unittest.cc',
'renderer/media/crypto/key_systems_unittest.cc',
'renderer/media/media_stream_video_source_unittest.cc',
+ 'renderer/media/mock_media_stream_video_source.cc',
+ 'renderer/media/mock_media_stream_video_source.h',
'renderer/media/test_response_generator.cc',
'renderer/media/test_response_generator.h',
'renderer/media/video_capture_impl_manager_unittest.cc',
diff --git a/content/renderer/media/media_stream_dependency_factory.cc b/content/renderer/media/media_stream_dependency_factory.cc
index 3217f8b..b39d3c3 100644
--- a/content/renderer/media/media_stream_dependency_factory.cc
+++ b/content/renderer/media/media_stream_dependency_factory.cc
@@ -19,11 +19,11 @@
#include "content/renderer/media/peer_connection_identity_service.h"
#include "content/renderer/media/rtc_media_constraints.h"
#include "content/renderer/media/rtc_peer_connection_handler.h"
-#include "content/renderer/media/rtc_video_capturer.h"
#include "content/renderer/media/rtc_video_decoder_factory.h"
#include "content/renderer/media/rtc_video_encoder_factory.h"
#include "content/renderer/media/webaudio_capturer_source.h"
#include "content/renderer/media/webrtc/webrtc_local_audio_track_adapter.h"
+#include "content/renderer/media/webrtc/webrtc_video_capturer_adapter.h"
#include "content/renderer/media/webrtc_audio_device_impl.h"
#include "content/renderer/media/webrtc_local_audio_track.h"
#include "content/renderer/media/webrtc_uma_histograms.h"
@@ -232,12 +232,15 @@ bool MediaStreamDependencyFactory::InitializeMediaStreamAudioSource(
return true;
}
-cricket::VideoCapturer* MediaStreamDependencyFactory::CreateVideoCapturer(
- const StreamDeviceInfo& info) {
- bool is_screeencast =
- info.device.type == MEDIA_TAB_VIDEO_CAPTURE ||
- info.device.type == MEDIA_DESKTOP_VIDEO_CAPTURE;
- return new RtcVideoCapturer(info.session_id, is_screeencast);
+WebRtcVideoCapturerAdapter* MediaStreamDependencyFactory::CreateVideoCapturer(
+ bool is_screeencast) {
+ // We need to make sure the libjingle thread wrappers have been created
+ // before we can use an instance of a WebRtcVideoCapturerAdapter. This is
+ // since the base class of WebRtcVideoCapturerAdapter is a
+ // cricket::VideoCapturer and it uses the libjingle thread wrappers.
+ if (!GetPcFactory())
+ return NULL;
+ return new WebRtcVideoCapturerAdapter(is_screeencast);
}
scoped_refptr<webrtc::MediaStreamInterface>
@@ -367,11 +370,12 @@ bool MediaStreamDependencyFactory::RemoveNativeMediaStreamTrack(
}
scoped_refptr<webrtc::VideoSourceInterface>
- MediaStreamDependencyFactory::CreateVideoSource(
- cricket::VideoCapturer* capturer,
- const webrtc::MediaConstraintsInterface* constraints) {
+MediaStreamDependencyFactory::CreateVideoSource(
+ cricket::VideoCapturer* capturer,
+ const blink::WebMediaConstraints& constraints) {
+ RTCMediaConstraints webrtc_constraints(constraints);
scoped_refptr<webrtc::VideoSourceInterface> source =
- GetPcFactory()->CreateVideoSource(capturer, constraints).get();
+ GetPcFactory()->CreateVideoSource(capturer, &webrtc_constraints).get();
return source;
}
@@ -632,7 +636,7 @@ MediaStreamDependencyFactory::CreateLocalVideoTrack(
// Create video source from the |capturer|.
scoped_refptr<webrtc::VideoSourceInterface> source =
- CreateVideoSource(capturer, NULL);
+ GetPcFactory()->CreateVideoSource(capturer, NULL).get();
// Create native track from the source.
return GetPcFactory()->CreateVideoTrack(id, source.get()).get();
@@ -768,10 +772,10 @@ void MediaStreamDependencyFactory::OnDisableAecDump() {
void MediaStreamDependencyFactory::StartAecDump(
const base::PlatformFile& aec_dump_file) {
- // |pc_factory_| always takes ownership of |aec_dump_file|. If StartAecDump()
- // fails, |aec_dump_file| will be closed.
- if (!GetPcFactory()->StartAecDump(aec_dump_file))
- VLOG(1) << "Could not start AEC dump.";
+ // |pc_factory_| always takes ownership of |aec_dump_file|. If StartAecDump()
+ // fails, |aec_dump_file| will be closed.
+ if (!GetPcFactory()->StartAecDump(aec_dump_file))
+ VLOG(1) << "Could not start AEC dump.";
}
void MediaStreamDependencyFactory::EnsureWebRtcAudioDeviceImpl() {
diff --git a/content/renderer/media/media_stream_dependency_factory.h b/content/renderer/media/media_stream_dependency_factory.h
index 2fea7c4..730ae2a 100644
--- a/content/renderer/media/media_stream_dependency_factory.h
+++ b/content/renderer/media/media_stream_dependency_factory.h
@@ -50,6 +50,7 @@ class WebRtcAudioDeviceImpl;
class WebRtcLocalAudioTrack;
class WebRtcLoggingHandlerImpl;
class WebRtcLoggingMessageFilter;
+class WebRtcVideoCapturerAdapter;
struct StreamDeviceInfo;
// Object factory for RTC MediaStreams and RTC PeerConnections.
@@ -78,8 +79,8 @@ class CONTENT_EXPORT MediaStreamDependencyFactory
// Creates an implementation of a cricket::VideoCapturer object that can be
// used when creating a libjingle webrtc::VideoSourceInterface object.
- virtual cricket::VideoCapturer* CreateVideoCapturer(
- const StreamDeviceInfo& info);
+ virtual WebRtcVideoCapturerAdapter* CreateVideoCapturer(
+ bool is_screen_capture);
// Creates a libjingle representation of a MediaStream.
scoped_refptr<webrtc::MediaStreamInterface> CreateNativeLocalMediaStream(
@@ -113,9 +114,8 @@ class CONTENT_EXPORT MediaStreamDependencyFactory
// Asks the PeerConnection factory to create a Video Source.
// The video source takes ownership of |capturer|.
virtual scoped_refptr<webrtc::VideoSourceInterface>
- CreateVideoSource(
- cricket::VideoCapturer* capturer,
- const webrtc::MediaConstraintsInterface* constraints);
+ CreateVideoSource(cricket::VideoCapturer* capturer,
+ const blink::WebMediaConstraints& constraints);
// Asks the libjingle PeerConnection factory to create a libjingle
// PeerConnection object.
diff --git a/content/renderer/media/media_stream_dependency_factory_unittest.cc b/content/renderer/media/media_stream_dependency_factory_unittest.cc
index 4245465..7ed5c21 100644
--- a/content/renderer/media/media_stream_dependency_factory_unittest.cc
+++ b/content/renderer/media/media_stream_dependency_factory_unittest.cc
@@ -9,6 +9,7 @@
#include "content/renderer/media/media_stream_video_source.h"
#include "content/renderer/media/media_stream_video_track.h"
#include "content/renderer/media/mock_media_stream_dependency_factory.h"
+#include "content/renderer/media/mock_media_stream_video_source.h"
#include "content/renderer/media/mock_web_rtc_peer_connection_handler_client.h"
#include "content/renderer/media/webrtc/webrtc_local_audio_track_adapter.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -79,8 +80,9 @@ class MediaStreamDependencyFactoryTest : public ::testing::Test {
video_sources[0].initialize("video",
blink::WebMediaStreamSource::TypeVideo,
"video");
+
video_sources[0].setExtraData(
- new MediaStreamVideoSource(dependency_factory_.get()));
+ new MockMediaStreamVideoSource(dependency_factory_.get(), false));
video_sources_.assign(video_sources);
}
blink::WebMediaStream stream_desc;
diff --git a/content/renderer/media/media_stream_impl.cc b/content/renderer/media/media_stream_impl.cc
index d577437..0ba73fd 100644
--- a/content/renderer/media/media_stream_impl.cc
+++ b/content/renderer/media/media_stream_impl.cc
@@ -317,12 +317,18 @@ void MediaStreamImpl::OnStreamGenerated(
request_info->generated = true;
// WebUserMediaRequest don't have an implementation in unit tests.
- // Therefore we need to check for isNull here.
+ // Therefore we need to check for isNull here and initialize the
+ // constraints.
blink::WebUserMediaRequest* request = &(request_info->request);
- blink::WebMediaConstraints audio_constraints = request->isNull() ?
- blink::WebMediaConstraints() : request->audioConstraints();
- blink::WebMediaConstraints video_constraints = request->isNull() ?
- blink::WebMediaConstraints() : request->videoConstraints();
+ blink::WebMediaConstraints audio_constraints;
+ blink::WebMediaConstraints video_constraints;
+ if (request->isNull()) {
+ audio_constraints.initialize();
+ video_constraints.initialize();
+ } else {
+ audio_constraints = request->audioConstraints();
+ video_constraints = request->videoConstraints();
+ }
blink::WebVector<blink::WebMediaStreamTrack> audio_track_vector(
audio_array.size());
@@ -437,12 +443,10 @@ void MediaStreamImpl::InitializeSourceObject(
<< ", name = " << webkit_source->name().utf8();
if (type == blink::WebMediaStreamSource::TypeVideo) {
- MediaStreamVideoCapturerSource* video_source(
- new content::MediaStreamVideoCapturerSource(
+ webkit_source->setExtraData(
+ CreateVideoSource(
device,
- base::Bind(&MediaStreamImpl::OnLocalSourceStopped, AsWeakPtr()),
- dependency_factory_));
- webkit_source->setExtraData(video_source);
+ base::Bind(&MediaStreamImpl::OnLocalSourceStopped, AsWeakPtr())));
} else {
DCHECK_EQ(blink::WebMediaStreamSource::TypeAudio, type);
MediaStreamAudioSource* audio_source(
@@ -456,6 +460,16 @@ void MediaStreamImpl::InitializeSourceObject(
local_sources_.push_back(LocalStreamSource(frame, *webkit_source));
}
+MediaStreamVideoSource* MediaStreamImpl::CreateVideoSource(
+ const StreamDeviceInfo& device,
+ const MediaStreamSource::SourceStoppedCallback& stop_callback) {
+ return new content::MediaStreamVideoCapturerSource(
+ device,
+ stop_callback,
+ new VideoCapturerDelegate(device),
+ dependency_factory_);
+}
+
void MediaStreamImpl::CreateVideoTracks(
const StreamDeviceInfoArray& devices,
const blink::WebMediaConstraints& constraints,
diff --git a/content/renderer/media/media_stream_impl.h b/content/renderer/media/media_stream_impl.h
index 6911c8f..ef0de16 100644
--- a/content/renderer/media/media_stream_impl.h
+++ b/content/renderer/media/media_stream_impl.h
@@ -19,6 +19,7 @@
#include "content/public/renderer/render_view_observer.h"
#include "content/renderer/media/media_stream_client.h"
#include "content/renderer/media/media_stream_dispatcher_eventhandler.h"
+#include "content/renderer/media/media_stream_source.h"
#include "third_party/WebKit/public/platform/WebMediaStream.h"
#include "third_party/WebKit/public/platform/WebMediaStreamSource.h"
#include "third_party/WebKit/public/platform/WebVector.h"
@@ -30,7 +31,8 @@ namespace content {
class MediaStreamAudioRenderer;
class MediaStreamDependencyFactory;
class MediaStreamDispatcher;
-class MediaStreamSource;
+class MediaStreamVideoSource;
+class VideoCapturerDelegate;
class WebRtcAudioRenderer;
class WebRtcLocalAudioRenderer;
@@ -115,6 +117,12 @@ class CONTENT_EXPORT MediaStreamImpl
// This is virtual for test purposes.
virtual blink::WebMediaStream GetMediaStream(const GURL& url);
+ // Creates a MediaStreamVideoSource object.
+ // This is virtual for test purposes.
+ virtual MediaStreamVideoSource* CreateVideoSource(
+ const StreamDeviceInfo& device,
+ const MediaStreamSource::SourceStoppedCallback& stop_callback);
+
private:
// Class for storing information about a WebKit request to create a
// MediaStream.
diff --git a/content/renderer/media/media_stream_impl_unittest.cc b/content/renderer/media/media_stream_impl_unittest.cc
index cd32666..05ae9c0 100644
--- a/content/renderer/media/media_stream_impl_unittest.cc
+++ b/content/renderer/media/media_stream_impl_unittest.cc
@@ -8,7 +8,7 @@
#include "content/renderer/media/media_stream_impl.h"
#include "content/renderer/media/mock_media_stream_dependency_factory.h"
#include "content/renderer/media/mock_media_stream_dispatcher.h"
-#include "content/renderer/media/video_capture_impl_manager.h"
+#include "content/renderer/media/mock_media_stream_video_source.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "third_party/WebKit/public/platform/WebMediaStream.h"
#include "third_party/WebKit/public/platform/WebMediaStreamSource.h"
@@ -18,6 +18,18 @@
namespace content {
+class MockMediaStreamVideoCapturerSource : public MockMediaStreamVideoSource {
+ public:
+ MockMediaStreamVideoCapturerSource(
+ const StreamDeviceInfo& device,
+ const SourceStoppedCallback& stop_callback,
+ MediaStreamDependencyFactory* factory)
+ : MockMediaStreamVideoSource(factory, false) {
+ SetDeviceInfo(device);
+ SetStopCallback(stop_callback);
+ }
+};
+
class MediaStreamImplUnderTest : public MediaStreamImpl {
public:
enum RequestState {
@@ -30,7 +42,9 @@ class MediaStreamImplUnderTest : public MediaStreamImpl {
MediaStreamImplUnderTest(MediaStreamDispatcher* media_stream_dispatcher,
MediaStreamDependencyFactory* dependency_factory)
: MediaStreamImpl(NULL, media_stream_dispatcher, dependency_factory),
- state_(REQUEST_NOT_STARTED) {
+ state_(REQUEST_NOT_STARTED),
+ factory_(dependency_factory),
+ video_source_(NULL) {
}
void RequestUserMedia() {
@@ -52,6 +66,15 @@ class MediaStreamImplUnderTest : public MediaStreamImpl {
return last_generated_stream_;
}
+ virtual MediaStreamVideoSource* CreateVideoSource(
+ const StreamDeviceInfo& device,
+ const MediaStreamSource::SourceStoppedCallback& stop_callback) OVERRIDE {
+ video_source_ = new MockMediaStreamVideoCapturerSource(device,
+ stop_callback,
+ factory_);
+ return video_source_;
+ }
+
using MediaStreamImpl::OnLocalMediaStreamStop;
using MediaStreamImpl::OnLocalSourceStopped;
@@ -59,11 +82,17 @@ class MediaStreamImplUnderTest : public MediaStreamImpl {
return last_generated_stream_;
}
+ MockMediaStreamVideoCapturerSource* last_created_video_source() const {
+ return video_source_;
+ }
+
RequestState request_state() const { return state_; }
private:
blink::WebMediaStream last_generated_stream_;
RequestState state_;
+ MediaStreamDependencyFactory* factory_;
+ MockMediaStreamVideoCapturerSource* video_source_;
};
class MediaStreamImplTest : public ::testing::Test {
@@ -79,7 +108,7 @@ class MediaStreamImplTest : public ::testing::Test {
blink::WebMediaStream RequestLocalMediaStream() {
ms_impl_->RequestUserMedia();
FakeMediaStreamDispatcherComplete();
- ChangeVideoSourceStateToLive();
+ StartMockedVideoSource();
EXPECT_EQ(MediaStreamImplUnderTest::REQUEST_SUCCEEDED,
ms_impl_->request_state());
@@ -110,16 +139,18 @@ class MediaStreamImplTest : public ::testing::Test {
ms_dispatcher_->video_array());
}
- void ChangeVideoSourceStateToLive() {
- if (dependency_factory_->last_video_source() != NULL) {
- dependency_factory_->last_video_source()->SetLive();
- }
+ void StartMockedVideoSource() {
+ MockMediaStreamVideoCapturerSource* video_source =
+ ms_impl_->last_created_video_source();
+ if (video_source->SourceHasAttemptedToStart())
+ video_source->StartMockedSource();
}
- void ChangeVideoSourceStateToEnded() {
- if (dependency_factory_->last_video_source() != NULL) {
- dependency_factory_->last_video_source()->SetEnded();
- }
+ void FailToStartMockedVideoSource() {
+ MockMediaStreamVideoCapturerSource* video_source =
+ ms_impl_->last_created_video_source();
+ if (video_source->SourceHasAttemptedToStart())
+ video_source->FailToStartMockedSource();
}
protected:
@@ -256,7 +287,7 @@ TEST_F(MediaStreamImplTest, FrameWillClose) {
TEST_F(MediaStreamImplTest, MediaSourceFailToStart) {
ms_impl_->RequestUserMedia();
FakeMediaStreamDispatcherComplete();
- ChangeVideoSourceStateToEnded();
+ FailToStartMockedVideoSource();
EXPECT_EQ(MediaStreamImplUnderTest::REQUEST_FAILED,
ms_impl_->request_state());
EXPECT_EQ(1, ms_dispatcher_->request_stream_counter());
@@ -272,8 +303,14 @@ TEST_F(MediaStreamImplTest, MediaStreamImplShutDown) {
EXPECT_EQ(1, ms_dispatcher_->request_stream_counter());
EXPECT_EQ(MediaStreamImplUnderTest::REQUEST_NOT_COMPLETE,
ms_impl_->request_state());
+
+ MockMediaStreamVideoCapturerSource* video_source =
+ ms_impl_->last_created_video_source();
+ // Hold on to a blink reference to the source to guarantee that its not
+ // deleted when MediaStreamImpl is deleted.
+ blink::WebMediaStreamSource blink_source = video_source->owner();
ms_impl_.reset();
- ChangeVideoSourceStateToLive();
+ video_source->StartMockedSource();
}
// This test what happens if the WebFrame is closed while the MediaStream is
@@ -284,13 +321,12 @@ TEST_F(MediaStreamImplTest, ReloadFrameWhileGeneratingStream) {
EXPECT_EQ(1, ms_dispatcher_->request_stream_counter());
EXPECT_EQ(0, ms_dispatcher_->stop_audio_device_counter());
EXPECT_EQ(0, ms_dispatcher_->stop_video_device_counter());
- ChangeVideoSourceStateToLive();
EXPECT_EQ(MediaStreamImplUnderTest::REQUEST_NOT_COMPLETE,
ms_impl_->request_state());
}
// This test what happens if the WebFrame is closed while the sources are being
-// started by MediaStreamDependencyFactory.
+// started.
TEST_F(MediaStreamImplTest, ReloadFrameWhileGeneratingSources) {
ms_impl_->RequestUserMedia();
FakeMediaStreamDispatcherComplete();
@@ -298,7 +334,6 @@ TEST_F(MediaStreamImplTest, ReloadFrameWhileGeneratingSources) {
ms_impl_->FrameWillClose(NULL);
EXPECT_EQ(1, ms_dispatcher_->stop_audio_device_counter());
EXPECT_EQ(1, ms_dispatcher_->stop_video_device_counter());
- ChangeVideoSourceStateToLive();
EXPECT_EQ(MediaStreamImplUnderTest::REQUEST_NOT_COMPLETE,
ms_impl_->request_state());
}
diff --git a/content/renderer/media/media_stream_video_capturer_source.cc b/content/renderer/media/media_stream_video_capturer_source.cc
index 4e736a85..722844b 100644
--- a/content/renderer/media/media_stream_video_capturer_source.cc
+++ b/content/renderer/media/media_stream_video_capturer_source.cc
@@ -5,17 +5,180 @@
#include "content/renderer/media/media_stream_video_capturer_source.h"
#include "base/bind.h"
-#include "base/debug/trace_event.h"
-#include "content/renderer/media/rtc_media_constraints.h"
-#include "content/renderer/media/rtc_video_capturer.h"
+#include "base/location.h"
+#include "content/renderer/media/video_capture_impl_manager.h"
+#include "content/renderer/render_thread_impl.h"
+#include "media/base/video_frame.h"
+
+namespace {
+
+struct SourceVideoFormat {
+ int width;
+ int height;
+ int frame_rate;
+};
+
+// List of formats used if the source doesn't support capability enumeration.
+const SourceVideoFormat kVideoFormats[] = {
+ {1920, 1080, 30},
+ {1280, 720, 30},
+ {960, 720, 30},
+ {640, 480, 30},
+ {640, 360, 30},
+ {320, 240, 30},
+ {320, 180, 30}
+};
+
+} // namespace
namespace content {
+VideoCapturerDelegate::VideoCapturerDelegate(
+ const StreamDeviceInfo& device_info)
+ : session_id_(device_info.session_id),
+ capture_engine_(
+ RenderThreadImpl::current()->video_capture_impl_manager()
+ ->UseDevice(device_info.session_id)),
+ is_screen_cast_(device_info.device.type == MEDIA_TAB_VIDEO_CAPTURE ||
+ device_info.device.type == MEDIA_DESKTOP_VIDEO_CAPTURE),
+ got_first_frame_(false) {
+ DVLOG(3) << "VideoCapturerDelegate::ctor";
+ DCHECK(capture_engine_);
+}
+
+VideoCapturerDelegate::~VideoCapturerDelegate() {
+ DVLOG(3) << "VideoCapturerDelegate::dtor";
+ DCHECK(new_frame_callback_.is_null());
+}
+
+void VideoCapturerDelegate::GetCurrentSupportedFormats(
+ int max_requested_width,
+ int max_requested_height,
+ const SupportedFormatsCallback& callback) {
+ DVLOG(3) << "GetCurrentSupportedFormats("
+ << " { max_requested_height = " << max_requested_height << "})"
+ << " { max_requested_width = " << max_requested_width << "})";
+
+ if (is_screen_cast_) {
+ media::VideoCaptureFormats formats;
+ const int width = max_requested_width ?
+ max_requested_width : MediaStreamVideoSource::kDefaultWidth;
+ const int height = max_requested_height ?
+ max_requested_height : MediaStreamVideoSource::kDefaultHeight;
+ formats.push_back(
+ media::VideoCaptureFormat(
+ gfx::Size(width, height),
+ MediaStreamVideoSource::kDefaultFrameRate,
+ media::PIXEL_FORMAT_I420));
+ callback.Run(formats);
+ return;
+ }
+
+ // This delegate implementation doesn't support capability enumeration.
+ // We need to guess what it supports.
+ media::VideoCaptureFormats formats;
+ for (size_t i = 0; i < arraysize(kVideoFormats); ++i) {
+ formats.push_back(
+ media::VideoCaptureFormat(
+ gfx::Size(kVideoFormats[i].width,
+ kVideoFormats[i].height),
+ kVideoFormats[i].frame_rate,
+ media::PIXEL_FORMAT_I420));
+ }
+ callback.Run(formats);
+}
+
+void VideoCapturerDelegate::StartDeliver(
+ const media::VideoCaptureParams& params,
+ const NewFrameCallback& new_frame_callback,
+ const StartedCallback& started_callback) {
+ DCHECK(params.requested_format.IsValid());
+ message_loop_proxy_ = base::MessageLoopProxy::current();
+ new_frame_callback_ = new_frame_callback;
+ started_callback_ = started_callback;
+ got_first_frame_ = false;
+
+ // Increase the reference count to ensure the object is not deleted until
+ // it is unregistered in VideoCapturerDelegate::OnRemoved.
+ AddRef();
+ capture_engine_->StartCapture(this, params);
+}
+
+void VideoCapturerDelegate::StopDeliver() {
+ // Immediately make sure we don't provide more frames.
+ DVLOG(3) << "VideoCapturerDelegate::StopCapture()";
+ DCHECK(message_loop_proxy_ == base::MessageLoopProxy::current());
+ capture_engine_->StopCapture(this);
+ new_frame_callback_.Reset();
+ started_callback_.Reset();
+}
+
+void VideoCapturerDelegate::OnStarted(media::VideoCapture* capture) {
+ DVLOG(3) << "VideoCapturerDelegate::OnStarted";
+}
+
+void VideoCapturerDelegate::OnStopped(media::VideoCapture* capture) {
+}
+
+void VideoCapturerDelegate::OnPaused(media::VideoCapture* capture) {
+}
+
+void VideoCapturerDelegate::OnError(media::VideoCapture* capture,
+ int error_code) {
+ DVLOG(3) << "VideoCapturerDelegate::OnError";
+ message_loop_proxy_->PostTask(
+ FROM_HERE,
+ base::Bind(&VideoCapturerDelegate::OnErrorOnCaptureThread,
+ this, capture));
+}
+
+void VideoCapturerDelegate::OnRemoved(media::VideoCapture* capture) {
+ DVLOG(3) << " MediaStreamVideoCapturerSource::OnRemoved";
+
+ // Balance the AddRef in StartDeliver.
+ // This means we are no longer registered as an event handler and can safely
+ // be deleted.
+ Release();
+}
+
+void VideoCapturerDelegate::OnFrameReady(
+ media::VideoCapture* capture,
+ const scoped_refptr<media::VideoFrame>& frame) {
+ message_loop_proxy_->PostTask(
+ FROM_HERE,
+ base::Bind(&VideoCapturerDelegate::OnFrameReadyOnCaptureThread,
+ this,
+ capture,
+ frame));
+}
+
+void VideoCapturerDelegate::OnFrameReadyOnCaptureThread(
+ media::VideoCapture* capture,
+ const scoped_refptr<media::VideoFrame>& frame) {
+ if (!got_first_frame_) {
+ got_first_frame_ = true;
+ if (!started_callback_.is_null())
+ started_callback_.Run(true);
+ }
+
+ if (!new_frame_callback_.is_null()) {
+ new_frame_callback_.Run(frame);
+ }
+}
+
+void VideoCapturerDelegate::OnErrorOnCaptureThread(
+ media::VideoCapture* capture) {
+ if (!started_callback_.is_null())
+ started_callback_.Run(false);
+}
+
MediaStreamVideoCapturerSource::MediaStreamVideoCapturerSource(
const StreamDeviceInfo& device_info,
const SourceStoppedCallback& stop_callback,
+ const scoped_refptr<VideoCapturerDelegate>& delegate,
MediaStreamDependencyFactory* factory)
- : MediaStreamVideoSource(factory) {
+ : MediaStreamVideoSource(factory),
+ delegate_(delegate) {
SetDeviceInfo(device_info);
SetStopCallback(stop_callback);
}
@@ -23,14 +186,28 @@ MediaStreamVideoCapturerSource::MediaStreamVideoCapturerSource(
MediaStreamVideoCapturerSource::~MediaStreamVideoCapturerSource() {
}
-void MediaStreamVideoCapturerSource::InitAdapter(
- const blink::WebMediaConstraints& constraints) {
- // Create the webrtc::VideoSource implementation.
- RTCMediaConstraints webrtc_constraints(constraints);
- cricket::VideoCapturer* capturer =
- factory()->CreateVideoCapturer(device_info());
- SetAdapter(factory()->CreateVideoSource(capturer,
- &webrtc_constraints));
+void MediaStreamVideoCapturerSource::GetCurrentSupportedFormats(
+ int max_requested_width,
+ int max_requested_height) {
+ delegate_->GetCurrentSupportedFormats(
+ max_requested_width,
+ max_requested_height,
+ base::Bind(&MediaStreamVideoCapturerSource::OnSupportedFormats,
+ base::Unretained(this)));
+}
+
+void MediaStreamVideoCapturerSource::StartSourceImpl(
+ const media::VideoCaptureParams& params) {
+ delegate_->StartDeliver(
+ params,
+ base::Bind(&MediaStreamVideoCapturerSource::DeliverVideoFrame,
+ base::Unretained(this)),
+ base::Bind(&MediaStreamVideoCapturerSource::OnStartDone,
+ base::Unretained(this)));
+}
+
+void MediaStreamVideoCapturerSource::StopSourceImpl() {
+ delegate_->StopDeliver();
}
} // namespace content
diff --git a/content/renderer/media/media_stream_video_capturer_source.h b/content/renderer/media/media_stream_video_capturer_source.h
index 2cd46bc..e4f8f53 100644
--- a/content/renderer/media/media_stream_video_capturer_source.h
+++ b/content/renderer/media/media_stream_video_capturer_source.h
@@ -5,33 +5,124 @@
#ifndef CONTENT_RENDERER_MEDIA_MEDIA_STREAM_VIDEO_CAPTURER_SOURCE_H_
#define CONTENT_RENDERER_MEDIA_MEDIA_STREAM_VIDEO_CAPTURER_SOURCE_H_
-#include "base/compiler_specific.h"
-#include "content/common/content_export.h"
+#include "base/callback.h"
+#include "base/message_loop/message_loop_proxy.h"
+#include "content/common/media/video_capture.h"
#include "content/renderer/media/media_stream_video_source.h"
+#include "media/video/capture/video_capture.h"
namespace content {
-// MediaStreamVideoCapturerSource is an implementation of
-// MediaStreamVideoSource used for local video capture such as USB cameras
-// and tab capture.
-// TODO(perkj): Currently, this use RTCVideoCapturer and a libjingle
-// implementation of webrt::MediaStreamSourceInterface. Implement this class
-// without using cricket::VideoCapturer and webrtc::VideoSourceInterface as
-// part of project Piranha Plant.
-class CONTENT_EXPORT MediaStreamVideoCapturerSource
- : public MediaStreamVideoSource {
+class VideoCaptureHandle;
+
+// VideoCapturerDelegate is a delegate used by MediaStreamVideoCapturerSource
+// for local video capturer. It uses VideoCaptureImplManager to start / stop
+// and receive I420 frames from Chrome's video capture implementation.
+class VideoCapturerDelegate
+ : public media::VideoCapture::EventHandler,
+ public base::RefCountedThreadSafe<VideoCapturerDelegate> {
+ public:
+ typedef base::Callback<void(const scoped_refptr<media::VideoFrame>&)>
+ NewFrameCallback;
+ typedef base::Callback<void(bool running)> StartedCallback;
+ typedef base::Callback<void(const media::VideoCaptureFormats& formats)>
+ SupportedFormatsCallback;
+
+ explicit VideoCapturerDelegate(
+ const StreamDeviceInfo& device_info);
+
+ // Collects the formats that can currently be used.
+ // |max_requested_height| and |max_requested_width| is used by Tab and Screen
+ // capture to decide what resolution to generate.
+ // |callback| is triggered when the formats has been collected.
+ virtual void GetCurrentSupportedFormats(
+ int max_requested_width,
+ int max_requested_height,
+ const SupportedFormatsCallback& callback);
+
+ // Starts deliver frames using the resolution in |params|.
+ // |new_frame_callback| is triggered when a new video frame is available.
+ // |started_callback| is triggered before the first video frame is received
+ // or if the underlying video capturer fails to start.
+ virtual void StartDeliver(
+ const media::VideoCaptureParams& params,
+ const NewFrameCallback& new_frame_callback,
+ const StartedCallback& started_callback);
+
+ // Stops deliver frames and clears all callbacks including the
+ // SupportedFormatsCallback callback.
+ virtual void StopDeliver();
+
+ protected:
+ // media::VideoCapture::EventHandler implementation.
+ // These functions are called on the IO thread (same as where
+ // |capture_engine_| runs).
+ virtual void OnStarted(media::VideoCapture* capture) OVERRIDE;
+ virtual void OnStopped(media::VideoCapture* capture) OVERRIDE;
+ virtual void OnPaused(media::VideoCapture* capture) OVERRIDE;
+ virtual void OnError(media::VideoCapture* capture, int error_code) OVERRIDE;
+ virtual void OnRemoved(media::VideoCapture* capture) OVERRIDE;
+ virtual void OnFrameReady(
+ media::VideoCapture* capture,
+ const scoped_refptr<media::VideoFrame>& frame) OVERRIDE;
+
+ private:
+ friend class base::RefCountedThreadSafe<VideoCapturerDelegate>;
+
+ virtual ~VideoCapturerDelegate();
+
+ void OnFrameReadyOnCaptureThread(
+ media::VideoCapture* capture,
+ const scoped_refptr<media::VideoFrame>& frame);
+ void OnErrorOnCaptureThread(media::VideoCapture* capture);
+
+ // The id identifies which video capture device is used for this video
+ // capture session.
+ media::VideoCaptureSessionId session_id_;
+ scoped_ptr<VideoCaptureHandle> capture_engine_;
+
+ bool is_screen_cast_;
+
+ // Accessed on the thread where StartDeliver is called.
+ bool got_first_frame_;
+
+ // |new_frame_callback_| is provided to this class in StartDeliver and must be
+ // valid until StopDeliver is called.
+ NewFrameCallback new_frame_callback_;
+ // |started_callback| is provided to this class in StartDeliver and must be
+ // valid until StopDeliver is called.
+ StartedCallback started_callback_;
+ // Message loop of the caller of StartDeliver.
+ scoped_refptr<base::MessageLoopProxy> message_loop_proxy_;
+
+ DISALLOW_COPY_AND_ASSIGN(VideoCapturerDelegate);
+};
+
+class MediaStreamVideoCapturerSource : public MediaStreamVideoSource {
public:
MediaStreamVideoCapturerSource(
const StreamDeviceInfo& device_info,
const SourceStoppedCallback& stop_callback,
+ const scoped_refptr<VideoCapturerDelegate>& delegate,
MediaStreamDependencyFactory* factory);
+
virtual ~MediaStreamVideoCapturerSource();
protected:
- virtual void InitAdapter(
- const blink::WebMediaConstraints& constraints) OVERRIDE;
+ // Implements MediaStreamVideoSource.
+ virtual void GetCurrentSupportedFormats(
+ int max_requested_width,
+ int max_requested_height) OVERRIDE;
+
+ virtual void StartSourceImpl(
+ const media::VideoCaptureParams& params) OVERRIDE;
+
+ virtual void StopSourceImpl() OVERRIDE;
private:
+ // The delegate that provides video frames.
+ scoped_refptr<VideoCapturerDelegate> delegate_;
+
DISALLOW_COPY_AND_ASSIGN(MediaStreamVideoCapturerSource);
};
diff --git a/content/renderer/media/media_stream_video_source.cc b/content/renderer/media/media_stream_video_source.cc
index 9834ef7..118d66e 100644
--- a/content/renderer/media/media_stream_video_source.cc
+++ b/content/renderer/media/media_stream_video_source.cc
@@ -4,55 +4,266 @@
#include "content/renderer/media/media_stream_video_source.h"
+#include <limits>
+#include <string>
+
#include "base/logging.h"
+#include "base/strings/string_number_conversions.h"
#include "content/renderer/media/media_stream_dependency_factory.h"
-#include "content/renderer/media/rtc_media_constraints.h"
-#include "media/base/video_frame.h"
-#include "third_party/libjingle/source/talk/app/webrtc/remotevideocapturer.h"
-#include "third_party/libjingle/source/talk/media/webrtc/webrtcvideoframe.h"
+#include "content/renderer/media/webrtc/webrtc_video_capturer_adapter.h"
namespace content {
+// Constraint keys. Specified by draft-alvestrand-constraints-resolution-00b
+const char MediaStreamVideoSource::kMinAspectRatio[] = "minAspectRatio";
+const char MediaStreamVideoSource::kMaxAspectRatio[] = "maxAspectRatio";
+const char MediaStreamVideoSource::kMaxWidth[] = "maxWidth";
+const char MediaStreamVideoSource::kMinWidth[] = "minWidth";
+const char MediaStreamVideoSource::kMaxHeight[] = "maxHeight";
+const char MediaStreamVideoSource::kMinHeight[] = "minHeight";
+const char MediaStreamVideoSource::kMaxFrameRate[] = "maxFrameRate";
+const char MediaStreamVideoSource::kMinFrameRate[] = "minFrameRate";
+
+const int MediaStreamVideoSource::kDefaultWidth = 640;
+const int MediaStreamVideoSource::kDefaultHeight = 480;
+const int MediaStreamVideoSource::kDefaultFrameRate = 30;
+
+namespace {
+// Constraints keys for http://dev.w3.org/2011/webrtc/editor/getusermedia.html
+const char kSourceId[] = "sourceId";
+
+// Google-specific key prefix. Constraints with this prefix are ignored if they
+// are unknown.
+const char kGooglePrefix[] = "goog";
+
+// Returns true if |constraint| is fulfilled. |format| can be changed
+// changed by a constraint. Ie - the frame rate can be changed by setting
+// maxFrameRate.
+bool UpdateFormatForConstraint(
+ const blink::WebMediaConstraint& constraint,
+ bool mandatory,
+ media::VideoCaptureFormat* format) {
+ DCHECK(format != NULL);
+
+ if (!format->IsValid())
+ return false;
+
+ std::string constraint_name = constraint.m_name.utf8();
+ std::string constraint_value = constraint.m_value.utf8();
+
+ if (constraint_name.find(kGooglePrefix) == 0) {
+ // These are actually options, not constraints, so they can be satisfied
+ // regardless of the format.
+ return true;
+ }
+
+ if (constraint_name == kSourceId) {
+ // This is a constraint that doesn't affect the format.
+ return true;
+ }
+
+ // Ignore Chrome specific Tab capture constraints.
+ if (constraint_name == kMediaStreamSource ||
+ constraint_name == kMediaStreamSourceId)
+ return true;
+
+ if (constraint_name == MediaStreamVideoSource::kMinAspectRatio ||
+ constraint_name == MediaStreamVideoSource::kMaxAspectRatio) {
+ double double_value = 0;
+ base::StringToDouble(constraint_value, &double_value);
+
+ // The aspect ratio in |constraint.m_value| has been converted to a string
+ // and back to a double, so it may have a rounding error.
+ // E.g if the value 1/3 is converted to a string, the string will not have
+ // infinite length.
+ // We add a margin of 0.0005 which is high enough to detect the same aspect
+ // ratio but small enough to avoid matching wrong aspect ratios.
+ const double kRoundingTruncation = 0.0005;
+ double ratio = static_cast<double>(format->frame_size.width()) /
+ format->frame_size.height();
+ if (constraint_name == MediaStreamVideoSource::kMinAspectRatio)
+ return (double_value <= ratio + kRoundingTruncation);
+ // Subtract 0.0005 to avoid rounding problems. Same as above.
+ return (double_value >= ratio - kRoundingTruncation);
+ }
+
+ int value;
+ if (!base::StringToInt(constraint_value, &value)) {
+ DLOG(WARNING) << "Can't parse MediaStream constraint. Name:"
+ << constraint_name << " Value:" << constraint_value;
+ return false;
+ }
+ if (constraint_name == MediaStreamVideoSource::kMinWidth) {
+ return (value <= format->frame_size.width());
+ } else if (constraint_name == MediaStreamVideoSource::kMaxWidth) {
+ return (value >= format->frame_size.width());
+ } else if (constraint_name == MediaStreamVideoSource::kMinHeight) {
+ return (value <= format->frame_size.height());
+ } else if (constraint_name == MediaStreamVideoSource::kMaxHeight) {
+ return (value >= format->frame_size.height());
+ } else if (constraint_name == MediaStreamVideoSource::kMinFrameRate) {
+ return (value <= format->frame_rate);
+ } else if (constraint_name == MediaStreamVideoSource::kMaxFrameRate) {
+ if (value == 0) {
+ // The frame rate is set by constraint.
+ // Don't allow 0 as frame rate if it is a mandatory constraint.
+ // Set the frame rate to 1 if it is not mandatory.
+ if (mandatory) {
+ return false;
+ } else {
+ value = 1;
+ }
+ }
+ format->frame_rate =
+ (format->frame_rate > value) ? value : format->frame_rate;
+ return true;
+ } else {
+ LOG(WARNING) << "Found unknown MediaStream constraint. Name:"
+ << constraint_name << " Value:" << constraint_value;
+ return false;
+ }
+}
+
+// Removes media::VideoCaptureFormats from |formats| that don't meet
+// |constraint|.
+void FilterFormatsByConstraint(
+ const blink::WebMediaConstraint& constraint,
+ bool mandatory,
+ media::VideoCaptureFormats* formats) {
+ DVLOG(3) << "FilterFormatsByConstraint("
+ << "{ constraint.m_name = " << constraint.m_name.utf8()
+ << " constraint.m_value = " << constraint.m_value.utf8()
+ << " mandatory = " << mandatory << "})";
+ media::VideoCaptureFormats::iterator format_it = formats->begin();
+ while (format_it != formats->end()) {
+ // Modify the format_it to fulfill the constraint if possible.
+ // Delete it otherwise.
+ if (!UpdateFormatForConstraint(constraint, mandatory, &(*format_it))) {
+ format_it = formats->erase(format_it);
+ } else {
+ ++format_it;
+ }
+ }
+}
+
+// Returns the media::VideoCaptureFormats that matches |constraints|.
+media::VideoCaptureFormats FilterFormats(
+ const blink::WebMediaConstraints& constraints,
+ const media::VideoCaptureFormats& supported_formats) {
+ if (constraints.isNull()) {
+ return supported_formats;
+ }
+
+ blink::WebVector<blink::WebMediaConstraint> mandatory;
+ blink::WebVector<blink::WebMediaConstraint> optional;
+ constraints.getMandatoryConstraints(mandatory);
+ constraints.getOptionalConstraints(optional);
+
+ media::VideoCaptureFormats candidates = supported_formats;
+
+ for (size_t i = 0; i < mandatory.size(); ++i)
+ FilterFormatsByConstraint(mandatory[i], true, &candidates);
+
+ if (candidates.empty())
+ return candidates;
+
+ // Ok - all mandatory checked and we still have candidates.
+ // Let's try filtering using the optional constraints. The optional
+ // constraints must be filtered in the order they occur in |optional|.
+ // But if a constraint produce zero candidates, the constraint is ignored and
+ // the next constraint is tested.
+ // http://dev.w3.org/2011/webrtc/editor/getusermedia.html#idl-def-Constraints
+ for (size_t i = 0; i < optional.size(); ++i) {
+ media::VideoCaptureFormats current_candidates = candidates;
+ FilterFormatsByConstraint(optional[i], false, &current_candidates);
+ if (!current_candidates.empty()) {
+ candidates = current_candidates;
+ }
+ }
+
+ // We have done as good as we can to filter the supported resolutions.
+ return candidates;
+}
+
+// Find the format that best matches the default video size.
+// This algorithm is chosen since a resolution must be picked even if no
+// constraints are provided. We don't just select the maximum supported
+// resolution since higher resolution cost more in terms of complexity and
+// many cameras perform worse at its maximum supported resolution.
+const media::VideoCaptureFormat& GetBestCaptureFormat(
+ const media::VideoCaptureFormats& formats) {
+ DCHECK(!formats.empty());
+
+ int default_area =
+ MediaStreamVideoSource::kDefaultWidth *
+ MediaStreamVideoSource::kDefaultHeight;
+
+ media::VideoCaptureFormats::const_iterator it = formats.begin();
+ media::VideoCaptureFormats::const_iterator best_it = formats.begin();
+ int best_diff = std::numeric_limits<int>::max();
+ for (; it != formats.end(); ++it) {
+ int diff = abs(default_area -
+ it->frame_size.width() * it->frame_size.height());
+ if (diff < best_diff) {
+ best_diff = diff;
+ best_it = it;
+ }
+ }
+ return *best_it;
+}
+
+} // anonymous namespace
+
MediaStreamVideoSource::MediaStreamVideoSource(
MediaStreamDependencyFactory* factory)
- : initializing_(false),
+ : state_(NEW),
factory_(factory),
- width_(0),
- height_(0),
- first_frame_timestamp_(media::kNoTimestamp()) {
+ capture_adapter_(NULL) {
DCHECK(factory_);
}
MediaStreamVideoSource::~MediaStreamVideoSource() {
- if (initializing_) {
- adapter_->UnregisterObserver(this);
- }
}
void MediaStreamVideoSource::AddTrack(
const blink::WebMediaStreamTrack& track,
const blink::WebMediaConstraints& constraints,
const ConstraintsCallback& callback) {
- if (!adapter_) {
- // Create the webrtc::MediaStreamVideoSourceInterface adapter.
- InitAdapter(constraints);
- DCHECK(adapter_);
+ DCHECK(CalledOnValidThread());
+ requested_constraints_.push_back(RequestedConstraints(constraints,
+ callback));
+ switch (state_) {
+ case NEW: {
+ // Tab capture and Screen capture needs the maximum requested height
+ // and width to decide on the resolution.
+ blink::WebString max_width;
+ int max_requested_width = 0;
+ if (constraints.getMandatoryConstraintValue(kMaxWidth, max_width))
+ base::StringToInt(max_width.utf8(), &max_requested_width);
- current_constraints_ = constraints;
- initializing_ = true;
- // Register to the adapter to get notified when it has been started
- // successfully.
- adapter_->RegisterObserver(this);
- }
+ int max_requested_height = 0;
+ blink::WebString max_height;
+ if (constraints.getMandatoryConstraintValue(kMaxHeight, max_height))
+ base::StringToInt(max_height.utf8(), &max_requested_height);
- // TODO(perkj): Currently, reconfiguring the source is not supported. For now
- // we ignore if |constraints| do not match the constraints that was used
- // when the source was started
+ state_ = RETRIEVING_CAPABILITIES;
+ GetCurrentSupportedFormats(max_requested_width,
+ max_requested_height);
- // There might be multiple tracks attaching to the source while it is being
- // configured.
- constraints_callbacks_.push_back(callback);
- TriggerConstraintsCallbackOnStateChange();
+ break;
+ }
+ case STARTING:
+ case RETRIEVING_CAPABILITIES: {
+ // The |callback| will be triggered once the delegate has started or
+ // the capabilitites has been retrieved.
+ break;
+ }
+ case ENDED:
+ case STARTED: {
+ // Currently, reconfiguring the source is not supported.
+ FinalizeAddTrack();
+ }
+ }
}
void MediaStreamVideoSource::RemoveTrack(
@@ -60,77 +271,135 @@ void MediaStreamVideoSource::RemoveTrack(
// TODO(ronghuawu): What should be done here? Do we really need RemoveTrack?
}
-void MediaStreamVideoSource::InitAdapter(
- const blink::WebMediaConstraints& constraints) {
- DCHECK(!adapter_);
- RTCMediaConstraints webrtc_constraints(constraints);
- adapter_ = factory_->CreateVideoSource(new webrtc::RemoteVideoCapturer(),
- &webrtc_constraints);
+void MediaStreamVideoSource::InitAdapter() {
+ if (adapter_)
+ return;
+ // Create the webrtc::MediaStreamVideoSourceInterface adapter.
+ // It needs the constraints so that constraints used by a PeerConnection
+ // will be available such as constraints for CPU adaptation and a tab
+ // capture.
+ bool is_screeencast =
+ device_info().device.type == MEDIA_TAB_VIDEO_CAPTURE ||
+ device_info().device.type == MEDIA_DESKTOP_VIDEO_CAPTURE;
+ capture_adapter_ = factory_->CreateVideoCapturer(is_screeencast);
+ capture_adapter_->SetRequestedFormat(current_format_);
+ adapter_ = factory_->CreateVideoSource(capture_adapter_,
+ current_constraints_);
}
-void MediaStreamVideoSource::SetReadyState(
- blink::WebMediaStreamSource::ReadyState state) {
- // TODO(ronghuawu): Sets WebMediaStreamSource's ready state and notifies the
- // ready state to all registered tracks.
+webrtc::VideoSourceInterface* MediaStreamVideoSource::GetAdapter() {
+ if (!adapter_) {
+ InitAdapter();
+ }
+ return adapter_;
+}
+
+void MediaStreamVideoSource::DoStopSource() {
+ DVLOG(3) << "DoStopSource()";
+ StopSourceImpl();
+ state_ = ENDED;
}
void MediaStreamVideoSource::DeliverVideoFrame(
const scoped_refptr<media::VideoFrame>& frame) {
- if (first_frame_timestamp_ == media::kNoTimestamp()) {
- first_frame_timestamp_ = frame->GetTimestamp();
- }
-
- cricket::VideoRenderer* input = adapter_->FrameInput();
- if (width_ != frame->coded_size().width() ||
- height_ != frame->coded_size().height()) {
- width_ = frame->coded_size().width();
- height_ = frame->coded_size().height();
- const int reserved = 0;
- input->SetSize(width_, height_, reserved);
- }
-
- cricket::WebRtcVideoFrame cricket_frame;
- const int64 elapsed_time_ns =
- (frame->GetTimestamp() - first_frame_timestamp_).InMicroseconds() *
- base::Time::kNanosecondsPerMicrosecond;
- const int64 time_stamp_ns = frame->GetTimestamp().InMicroseconds() *
- base::Time::kNanosecondsPerMicrosecond;
- const size_t size =
- media::VideoFrame::AllocationSize(frame->format(), frame->coded_size());
- const size_t pixel_width = 1;
- const size_t pixel_height = 1;
- const int rotation = 0;
- cricket_frame.Alias(frame->data(0), size,
- width_, height_,
- pixel_width, pixel_height,
- elapsed_time_ns, time_stamp_ns,
- rotation);
- input->RenderFrame(&cricket_frame);
-}
-
-void MediaStreamVideoSource::OnChanged() {
- DCHECK(CalledOnValidThread());
- TriggerConstraintsCallbackOnStateChange();
+ if (capture_adapter_)
+ capture_adapter_->OnFrameCaptured(frame);
}
-void MediaStreamVideoSource::TriggerConstraintsCallbackOnStateChange() {
- if (adapter_->state() == webrtc::MediaSourceInterface::kInitializing)
+void MediaStreamVideoSource::OnSupportedFormats(
+ const media::VideoCaptureFormats& formats) {
+ DCHECK(CalledOnValidThread());
+ DCHECK_EQ(RETRIEVING_CAPABILITIES, state_);
+
+ supported_formats_ = formats;
+ if (!FindBestFormatWithConstraints(supported_formats_, &current_format_,
+ &current_constraints_)) {
+ FinalizeAddTrack();
+ SetReadyState(blink::WebMediaStreamSource::ReadyStateEnded);
return;
+ }
+
+ state_ = STARTING;
+ DVLOG(3) << "Starting the capturer with"
+ << " width = " << current_format_.frame_size.width()
+ << " height = " << current_format_.frame_size.height()
+ << " frame rate = " << current_format_.frame_rate;
- if (initializing_) {
- adapter_->UnregisterObserver(this);
- initializing_ = false;
+ media::VideoCaptureParams params;
+ params.requested_format = current_format_;
+ StartSourceImpl(params);
+}
+
+bool MediaStreamVideoSource::FindBestFormatWithConstraints(
+ const media::VideoCaptureFormats& formats,
+ media::VideoCaptureFormat* best_format,
+ blink::WebMediaConstraints* resulting_constraints) {
+ // Find the first constraints that we can fulfilled.
+ for (std::vector<RequestedConstraints>::iterator request_it =
+ requested_constraints_.begin();
+ request_it != requested_constraints_.end(); ++request_it) {
+ const blink::WebMediaConstraints& requested_constraints =
+ request_it->constraints;
+
+ media::VideoCaptureFormats filtered_formats =
+ FilterFormats(requested_constraints, formats);
+ if (filtered_formats.size() > 0) {
+ // A request with constraints that can be fulfilled.
+ *best_format = GetBestCaptureFormat(filtered_formats);
+ *resulting_constraints= requested_constraints;
+ return true;
+ }
+ }
+ return false;
+}
+
+void MediaStreamVideoSource::OnStartDone(bool success) {
+ DCHECK(CalledOnValidThread());
+ DVLOG(3) << "OnStartDone({success =" << success << "})";
+ if (success) {
+ DCHECK_EQ(STARTING, state_);
+ state_ = STARTED;
+ SetReadyState(blink::WebMediaStreamSource::ReadyStateLive);
+ } else {
+ state_ = ENDED;
+ SetReadyState(blink::WebMediaStreamSource::ReadyStateEnded);
+ StopSourceImpl();
}
- std::vector<ConstraintsCallback> callbacks;
- callbacks.swap(constraints_callbacks_);
+ FinalizeAddTrack();
+}
- bool success = (adapter_->state() == webrtc::MediaSourceInterface::kLive);
- for (std::vector<ConstraintsCallback>::iterator it = callbacks.begin();
+void MediaStreamVideoSource::FinalizeAddTrack() {
+ media::VideoCaptureFormats formats;
+ formats.push_back(current_format_);
+
+ std::vector<RequestedConstraints> callbacks;
+ callbacks.swap(requested_constraints_);
+ for (std::vector<RequestedConstraints>::iterator it = callbacks.begin();
it != callbacks.end(); ++it) {
- if (!it->is_null())
- it->Run(this, success);
+ bool success = state_ == STARTED &&
+ !FilterFormats(it->constraints, formats).empty();
+ DVLOG(3) << "FinalizeAddTrack() success " << success;
+ if (!it->callback.is_null())
+ it->callback.Run(this, success);
}
}
+void MediaStreamVideoSource::SetReadyState(
+ blink::WebMediaStreamSource::ReadyState state) {
+ if (!owner().isNull()) {
+ owner().setReadyState(state);
+ }
+ // TODO(perkj): Notify all registered tracks.
+}
+
+MediaStreamVideoSource::RequestedConstraints::RequestedConstraints(
+ const blink::WebMediaConstraints& constraints,
+ const ConstraintsCallback& callback)
+ : constraints(constraints), callback(callback) {
+}
+
+MediaStreamVideoSource::RequestedConstraints::~RequestedConstraints() {
+}
+
} // namespace content
diff --git a/content/renderer/media/media_stream_video_source.h b/content/renderer/media/media_stream_video_source.h
index 063bf0c..6f4fc44 100644
--- a/content/renderer/media/media_stream_video_source.h
+++ b/content/renderer/media/media_stream_video_source.h
@@ -5,11 +5,14 @@
#ifndef CONTENT_RENDERER_MEDIA_MEDIA_STREAM_VIDEO_SOURCE_H_
#define CONTENT_RENDERER_MEDIA_MEDIA_STREAM_VIDEO_SOURCE_H_
+#include <vector>
+
#include "base/compiler_specific.h"
-#include "base/memory/ref_counted.h"
#include "content/common/content_export.h"
#include "content/renderer/media/media_stream_dependency_factory.h"
#include "content/renderer/media/media_stream_source.h"
+#include "media/base/video_frame.h"
+#include "media/video/capture/video_capture_types.h"
#include "third_party/WebKit/public/platform/WebMediaConstraints.h"
#include "third_party/WebKit/public/platform/WebMediaStreamSource.h"
#include "third_party/WebKit/public/platform/WebMediaStreamTrack.h"
@@ -21,19 +24,29 @@ class VideoFrame;
namespace content {
class MediaStreamDependencyFactory;
+class WebRtcVideoCapturerAdapter;
// MediaStreamVideoSource is an interface used for sending video frames to a
// MediaStreamVideoTrack.
// http://dev.w3.org/2011/webrtc/editor/getusermedia.html
+// The purpose of this base class is to be able to implement different
+// MediaStreaVideoSources such as local video capture, video sources received
+// on a PeerConnection or a source created in NaCl.
// All methods calls will be done from the main render thread.
+//
+// When the first track is added to the source by calling AddTrack
+// the MediaStreamVideoSource implementation calls GetCurrentSupportedFormats.
+// the source implementation must call OnSupportedFormats.
+// MediaStreamVideoSource then match the constraints provided in AddTrack with
+// the formats and call StartSourceImpl. The source implementation must call
+// OnStartDone when the underlying source has been started or failed to
+// start.
class CONTENT_EXPORT MediaStreamVideoSource
: public MediaStreamSource,
- NON_EXPORTED_BASE(public webrtc::ObserverInterface),
NON_EXPORTED_BASE(public base::NonThreadSafe) {
public:
- explicit MediaStreamVideoSource(
- MediaStreamDependencyFactory* factory);
-
+ explicit MediaStreamVideoSource(MediaStreamDependencyFactory* factory);
+ virtual ~MediaStreamVideoSource();
// Puts |track| in the registered tracks list.
virtual void AddTrack(const blink::WebMediaStreamTrack& track,
@@ -43,24 +56,29 @@ class CONTENT_EXPORT MediaStreamVideoSource
// TODO(ronghuawu): Remove webrtc::VideoSourceInterface from the public
// interface of this class.
- webrtc::VideoSourceInterface* GetAdapter() {
- return adapter_;
- }
+ // This creates a VideoSourceInterface implementation if it does not already
+ // exist.
+ webrtc::VideoSourceInterface* GetAdapter();
+
+ // Constraint keys used by a video source.
+ // Specified by draft-alvestrand-constraints-resolution-00b
+ static const char kMinAspectRatio[]; // minAspectRatio
+ static const char kMaxAspectRatio[]; // maxAspectRatio
+ static const char kMaxWidth[]; // maxWidth
+ static const char kMinWidth[]; // minWidthOnCaptureFormats
+ static const char kMaxHeight[]; // maxHeight
+ static const char kMinHeight[]; // minHeight
+ static const char kMaxFrameRate[]; // maxFrameRate
+ static const char kMinFrameRate[]; // minFrameRate
+
+ // Default resolution. If no constraints are specified and the delegate
+ // support it, this is the resolution that will be used.
+ static const int kDefaultWidth;
+ static const int kDefaultHeight;
+ static const int kDefaultFrameRate;
protected:
- virtual void DoStopSource() OVERRIDE {}
-
- // Called when the first track is added to this source.
- // It currently creates a webrtc::VideoSourceInterface.
- // If a derived class overrides this method, it must call SetAdapter.
- virtual void InitAdapter(const blink::WebMediaConstraints& constraints);
-
- // Set the webrtc::VideoSourceInterface adapter used by this class.
- // It must be called by a derived class that overrides the InitAdapter method.
- void SetAdapter(webrtc::VideoSourceInterface* adapter) {
- DCHECK(!adapter_);
- adapter_ = adapter;
- }
+ virtual void DoStopSource() OVERRIDE;
MediaStreamDependencyFactory* factory() { return factory_; }
@@ -72,26 +90,75 @@ class CONTENT_EXPORT MediaStreamVideoSource
// planes and I420.
virtual void DeliverVideoFrame(const scoped_refptr<media::VideoFrame>& frame);
- // Implements webrtc::Observer.
- virtual void OnChanged() OVERRIDE;
-
- virtual ~MediaStreamVideoSource();
+ // An implementation must fetch the formats that can currently be used by
+ // the source and call OnSupportedFormats when done.
+ // |max_requested_height| and |max_requested_width| is the max height and
+ // width set as a mandatory constraint if set when calling
+ // MediaStreamVideoSource::AddTrack. If max height and max width is not set
+ // |max_requested_height| and |max_requested_width| are 0.
+ virtual void GetCurrentSupportedFormats(int max_requested_width,
+ int max_requested_height) = 0;
+ void OnSupportedFormats(const media::VideoCaptureFormats& formats);
+
+ // An implementation must starts capture frames using the resolution in
+ // |params|. When the source has started or the source failed to start
+ // OnStartDone must be called. An implementation must call
+ // DeliverVideoFrame with the captured frames.
+ virtual void StartSourceImpl(const media::VideoCaptureParams& params) = 0;
+ void OnStartDone(bool success);
+
+ // An implementation must immediately stop capture video frames and must not
+ // call OnSupportedFormats after this method has been called. After this
+ // method has been called, MediaStreamVideoSource may be deleted.
+ virtual void StopSourceImpl() = 0;
private:
- // Checks if the underlying source state has changed from an initializing
- // state to a final state and in that case trigger all callbacks in
- // |constraints_callbacks_|.
- void TriggerConstraintsCallbackOnStateChange();
+ // Creates a webrtc::VideoSourceInterface used by libjingle.
+ void InitAdapter();
+
+ // Finds the first constraints in |requested_constraints_| that can be
+ // fulfilled. |best_format| is set to the video resolution that can be
+ // fulfilled. |resulting_constraints| is set to the found constraints in
+ // |requested_constraints_|.
+ bool FindBestFormatWithConstraints(
+ const media::VideoCaptureFormats& formats,
+ media::VideoCaptureFormat* best_format,
+ blink::WebMediaConstraints* resulting_constraints);
+
+ // Trigger all cached callbacks from AddTrack. AddTrack is successful
+ // if the capture delegate has started and the constraints provided in
+ // AddTrack match the format that was used to start the device.
+ void FinalizeAddTrack();
+
+ enum State {
+ NEW,
+ RETRIEVING_CAPABILITIES,
+ STARTING,
+ STARTED,
+ ENDED
+ };
+ State state_;
+
+ media::VideoCaptureFormat current_format_;
+ blink::WebMediaConstraints current_constraints_;
+
+ struct RequestedConstraints {
+ RequestedConstraints(const blink::WebMediaConstraints& constraints,
+ const ConstraintsCallback& callback);
+ ~RequestedConstraints();
+
+ blink::WebMediaConstraints constraints;
+ ConstraintsCallback callback;
+ };
+ std::vector<RequestedConstraints> requested_constraints_;
- bool initializing_;
+ media::VideoCaptureFormats supported_formats_;
+
+ // TODO(perkj): The below classes use webrtc/libjingle types. The goal is to
+ // get rid of them as far as possible.
MediaStreamDependencyFactory* factory_;
scoped_refptr<webrtc::VideoSourceInterface> adapter_;
- int width_;
- int height_;
- base::TimeDelta first_frame_timestamp_;
-
- blink::WebMediaConstraints current_constraints_;
- std::vector<ConstraintsCallback> constraints_callbacks_;
+ WebRtcVideoCapturerAdapter* capture_adapter_;
DISALLOW_COPY_AND_ASSIGN(MediaStreamVideoSource);
};
diff --git a/content/renderer/media/media_stream_video_source_unittest.cc b/content/renderer/media/media_stream_video_source_unittest.cc
index 38950f7..1060b04 100644
--- a/content/renderer/media/media_stream_video_source_unittest.cc
+++ b/content/renderer/media/media_stream_video_source_unittest.cc
@@ -3,27 +3,52 @@
// found in the LICENSE file.
#include <string>
+#include <vector>
+#include "base/strings/string_number_conversions.h"
#include "base/strings/utf_string_conversions.h"
#include "content/renderer/media/media_stream_video_source.h"
#include "content/renderer/media/mock_media_stream_dependency_factory.h"
+#include "content/renderer/media/mock_media_stream_video_source.h"
#include "media/base/video_frame.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace content {
-class DummyMediaStreamVideoSource : public MediaStreamVideoSource {
+class ConstraintsFactory {
public:
- DummyMediaStreamVideoSource(MediaStreamDependencyFactory* factory)
- : MediaStreamVideoSource(factory) {
+ void AddMandatory(const std::string& key, int value) {
+ mandatory_.push_back(blink::WebMediaConstraint(base::UTF8ToUTF16(key),
+ base::IntToString16(value)));
+ }
+ void AddMandatory(const std::string& key, double value) {
+ mandatory_.push_back(blink::WebMediaConstraint(
+ base::UTF8ToUTF16(key),
+ base::UTF8ToUTF16(base::DoubleToString(value))));
+ }
+
+ void AddOptional(const std::string& key, int value) {
+ optional_.push_back(blink::WebMediaConstraint(base::UTF8ToUTF16(key),
+ base::IntToString16(value)));
}
- virtual ~DummyMediaStreamVideoSource() {
+ void AddOptional(const std::string& key, double value) {
+ optional_.push_back(blink::WebMediaConstraint(
+ base::UTF8ToUTF16(key),
+ base::UTF8ToUTF16(base::DoubleToString(value))));
}
- void OnNewFrame(const scoped_refptr<media::VideoFrame>& frame) {
- MediaStreamVideoSource::DeliverVideoFrame(frame);
+ blink::WebMediaConstraints CreateConstraints() {
+ blink::WebVector<blink::WebMediaConstraint> mandatory(mandatory_);
+ blink::WebVector<blink::WebMediaConstraint> optional(optional_);
+ blink::WebMediaConstraints constraints;
+ constraints.initialize(optional, mandatory);
+ return constraints;
}
+
+ private:
+ std::vector<blink::WebMediaConstraint> mandatory_;
+ std::vector<blink::WebMediaConstraint> optional_;
};
class MediaStreamVideoSourceTest
@@ -31,11 +56,24 @@ class MediaStreamVideoSourceTest
public:
MediaStreamVideoSourceTest()
: number_of_successful_constraints_applied_(0),
- number_of_failed_constraints_applied_(0) {
+ number_of_failed_constraints_applied_(0),
+ mock_source_(new MockMediaStreamVideoSource(&factory_, true)) {
+ media::VideoCaptureFormats formats;
+ formats.push_back(media::VideoCaptureFormat(
+ gfx::Size(1280, 720), 30, media::PIXEL_FORMAT_I420));
+ formats.push_back(media::VideoCaptureFormat(
+ gfx::Size(640, 480), 30, media::PIXEL_FORMAT_I420));
+ formats.push_back(media::VideoCaptureFormat(
+ gfx::Size(640, 400), 30, media::PIXEL_FORMAT_I420));
+ formats.push_back(media::VideoCaptureFormat(
+ gfx::Size(352, 288), 30, media::PIXEL_FORMAT_I420));
+ formats.push_back(media::VideoCaptureFormat(
+ gfx::Size(320, 240), 30, media::PIXEL_FORMAT_I420));
+ mock_source_->SetSupportedFormats(formats);
webkit_source_.initialize(base::UTF8ToUTF16("dummy_source_id"),
blink::WebMediaStreamSource::TypeVideo,
base::UTF8ToUTF16("dummy_source_name"));
- webkit_source_.setExtraData(new DummyMediaStreamVideoSource(&factory_));
+ webkit_source_.setExtraData(mock_source_);
}
protected:
@@ -46,8 +84,8 @@ class MediaStreamVideoSourceTest
blink::WebMediaStreamTrack track;
track.initialize(base::UTF8ToUTF16(id), webkit_source_);
- DummyMediaStreamVideoSource* source =
- static_cast<DummyMediaStreamVideoSource*>(track.source().extraData());
+ MediaStreamVideoSource* source =
+ static_cast<MediaStreamVideoSource*>(track.source().extraData());
source->AddTrack(track,
constraints,
@@ -57,24 +95,29 @@ class MediaStreamVideoSourceTest
return track;
}
- // Simulate that the underlying device start successfully.
- void StartSource() {
- factory_.last_video_source()->SetLive();
- }
+ blink::WebMediaStreamTrack CreateTrackAndStartSource(
+ const blink::WebMediaConstraints& constraints,
+ int expected_width,
+ int expected_height,
+ int expected_frame_rate) {
+ blink::WebMediaStreamTrack track = CreateTrack("123", constraints);
- // Simulate that the underlying device fail to start.
- void FailToStartSource() {
- factory_.last_video_source()->SetEnded();
- }
+ mock_source_->CompleteGetSupportedFormats();
+ const media::VideoCaptureParams& format = mock_source()->start_params();
+ EXPECT_EQ(expected_width, format.requested_format.frame_size.width());
+ EXPECT_EQ(expected_height, format.requested_format.frame_size.height());
+ EXPECT_EQ(expected_frame_rate, format.requested_format.frame_rate);
- void VerifyFrame(int width, int height, int num) {
- DummyMediaStreamVideoSource* source =
- static_cast<DummyMediaStreamVideoSource*>(webkit_source_.extraData());
- MockVideoSource* adapter =
- static_cast<MockVideoSource*>(source->GetAdapter());
- EXPECT_EQ(width, adapter->GetLastFrameWidth());
- EXPECT_EQ(height, adapter->GetLastFrameHeight());
- EXPECT_EQ(num, adapter->GetFrameNum());
+ MediaStreamVideoSource* source =
+ static_cast<MediaStreamVideoSource*>(track.source().extraData());
+ EXPECT_TRUE(source->GetAdapter() != NULL);
+
+ EXPECT_EQ(0, NumberOfSuccessConstraintsCallbacks());
+ mock_source_->StartMockedSource();
+ // Once the source has started successfully we expect that the
+ // ConstraintsCallback in MediaStreamSource::AddTrack completes.
+ EXPECT_EQ(1, NumberOfSuccessConstraintsCallbacks());
+ return track;
}
int NumberOfSuccessConstraintsCallbacks() const {
@@ -85,6 +128,8 @@ class MediaStreamVideoSourceTest
return number_of_failed_constraints_applied_;
}
+ MockMediaStreamVideoSource* mock_source() { return mock_source_; }
+
private:
void OnConstraintsApplied(MediaStreamSource* source, bool success) {
ASSERT_EQ(source, webkit_source_.extraData());
@@ -99,57 +144,213 @@ class MediaStreamVideoSourceTest
int number_of_failed_constraints_applied_;
MockMediaStreamDependencyFactory factory_;
blink::WebMediaStreamSource webkit_source_;
+ // |mock_source_| is owned by |webkit_source_|.
+ MockMediaStreamVideoSource* mock_source_;
};
-TEST_F(MediaStreamVideoSourceTest, AddTrackAndStartAdapter) {
+TEST_F(MediaStreamVideoSourceTest, AddTrackAndStartSource) {
blink::WebMediaConstraints constraints;
+ constraints.initialize();
blink::WebMediaStreamTrack track = CreateTrack("123", constraints);
- StartSource();
+ mock_source()->CompleteGetSupportedFormats();
+ mock_source()->StartMockedSource();
EXPECT_EQ(1, NumberOfSuccessConstraintsCallbacks());
}
-TEST_F(MediaStreamVideoSourceTest, AddTwoTracksBeforeAdapterStart) {
+TEST_F(MediaStreamVideoSourceTest, AddTwoTracksBeforeSourceStarts) {
blink::WebMediaConstraints constraints;
+ constraints.initialize();
blink::WebMediaStreamTrack track1 = CreateTrack("123", constraints);
+ mock_source()->CompleteGetSupportedFormats();
blink::WebMediaStreamTrack track2 = CreateTrack("123", constraints);
EXPECT_EQ(0, NumberOfSuccessConstraintsCallbacks());
- StartSource();
+ mock_source()->StartMockedSource();
EXPECT_EQ(2, NumberOfSuccessConstraintsCallbacks());
}
-TEST_F(MediaStreamVideoSourceTest, AddTrackAfterAdapterStart) {
+TEST_F(MediaStreamVideoSourceTest, AddTrackAfterSourceStarts) {
blink::WebMediaConstraints constraints;
+ constraints.initialize();
blink::WebMediaStreamTrack track1 = CreateTrack("123", constraints);
- StartSource();
+ mock_source()->CompleteGetSupportedFormats();
+ mock_source()->StartMockedSource();
EXPECT_EQ(1, NumberOfSuccessConstraintsCallbacks());
blink::WebMediaStreamTrack track2 = CreateTrack("123", constraints);
EXPECT_EQ(2, NumberOfSuccessConstraintsCallbacks());
}
-TEST_F(MediaStreamVideoSourceTest, AddTrackAndFailToStartAdapter) {
+TEST_F(MediaStreamVideoSourceTest, AddTrackAndFailToStartSource) {
blink::WebMediaConstraints constraints;
+ constraints.initialize();
blink::WebMediaStreamTrack track = CreateTrack("123", constraints);
- FailToStartSource();
+ mock_source()->CompleteGetSupportedFormats();
+ mock_source()->FailToStartMockedSource();
EXPECT_EQ(1, NumberOfFailedConstraintsCallbacks());
}
-TEST_F(MediaStreamVideoSourceTest, DeliverVideoFrame) {
+TEST_F(MediaStreamVideoSourceTest, AddTwoTracksBeforeGetSupportedFormats) {
blink::WebMediaConstraints constraints;
- blink::WebMediaStreamTrack track = CreateTrack("123", constraints);
- StartSource();
- DummyMediaStreamVideoSource* source =
- static_cast<DummyMediaStreamVideoSource*>(track.source().extraData());
- VerifyFrame(0, 0, 0);
- const int kWidth = 640;
- const int kHeight = 480;
+ constraints.initialize();
+ blink::WebMediaStreamTrack track1 = CreateTrack("123", constraints);
+ blink::WebMediaStreamTrack track2 = CreateTrack("123", constraints);
+ mock_source()->CompleteGetSupportedFormats();
+ mock_source()->StartMockedSource();
+ EXPECT_EQ(2, NumberOfSuccessConstraintsCallbacks());
+}
+
+// Test that the capture output is CIF if we set max constraints to CIF.
+// and the capture device support CIF.
+TEST_F(MediaStreamVideoSourceTest, MandatoryConstraintCif5Fps) {
+ ConstraintsFactory factory;
+ factory.AddMandatory(MediaStreamVideoSource::kMaxWidth, 352);
+ factory.AddMandatory(MediaStreamVideoSource::kMaxHeight, 288);
+ factory.AddMandatory(MediaStreamVideoSource::kMaxFrameRate, 5);
+
+ CreateTrackAndStartSource(factory.CreateConstraints(), 352, 288, 5);
+}
+
+// Test that the capture output is 720P if the camera support it and the
+// optional constraint is set to 720P.
+TEST_F(MediaStreamVideoSourceTest, MandatoryMinVgaOptional720P) {
+ ConstraintsFactory factory;
+ factory.AddMandatory(MediaStreamVideoSource::kMinWidth, 640);
+ factory.AddMandatory(MediaStreamVideoSource::kMinHeight, 480);
+ factory.AddOptional(MediaStreamVideoSource::kMinWidth, 1280);
+ factory.AddOptional(MediaStreamVideoSource::kMinAspectRatio,
+ 1280.0 / 720);
+
+ CreateTrackAndStartSource(factory.CreateConstraints(), 1280, 720, 30);
+}
+
+// Test that the capture output have aspect ratio 4:3 if a mandatory constraint
+// require it even if an optional constraint request a higher resolution
+// that don't have this aspect ratio.
+TEST_F(MediaStreamVideoSourceTest, MandatoryAspectRatio4To3) {
+ ConstraintsFactory factory;
+ factory.AddMandatory(MediaStreamVideoSource::kMinWidth, 640);
+ factory.AddMandatory(MediaStreamVideoSource::kMinHeight, 480);
+ factory.AddMandatory(MediaStreamVideoSource::kMaxAspectRatio,
+ 640.0 / 480);
+ factory.AddOptional(MediaStreamVideoSource::kMinWidth, 1280);
+
+ CreateTrackAndStartSource(factory.CreateConstraints(), 640, 480, 30);
+}
+
+// Test that ApplyConstraints fail if the mandatory aspect ratio
+// is set higher than supported.
+TEST_F(MediaStreamVideoSourceTest, MandatoryAspectRatioTooHigh) {
+ ConstraintsFactory factory;
+ factory.AddMandatory(MediaStreamVideoSource::kMinAspectRatio, 2);
+ CreateTrack("123", factory.CreateConstraints());
+ mock_source()->CompleteGetSupportedFormats();
+ EXPECT_EQ(1, NumberOfFailedConstraintsCallbacks());
+}
+
+// Test that the source ignores an optional aspect ratio that is higher than
+// supported.
+TEST_F(MediaStreamVideoSourceTest, OptionalAspectRatioTooHigh) {
+ ConstraintsFactory factory;
+ factory.AddOptional(MediaStreamVideoSource::kMinAspectRatio, 2);
+ CreateTrack("123", factory.CreateConstraints());
+ mock_source()->CompleteGetSupportedFormats();
+
+ const media::VideoCaptureParams& params = mock_source()->start_params();
+ double aspect_ratio =
+ static_cast<double>(params.requested_format.frame_size.width()) /
+ params.requested_format.frame_size.height();
+ EXPECT_LT(aspect_ratio, 2);
+}
+
+// Test that the source starts video with the default resolution if the
+// that is the only supported.
+TEST_F(MediaStreamVideoSourceTest, DefaultCapability) {
+ media::VideoCaptureFormats formats;
+ formats.push_back(media::VideoCaptureFormat(
+ gfx::Size(MediaStreamVideoSource::kDefaultWidth,
+ MediaStreamVideoSource::kDefaultHeight),
+ MediaStreamVideoSource::kDefaultFrameRate,
+ media::PIXEL_FORMAT_I420));
+ mock_source()->SetSupportedFormats(formats);
+
+ blink::WebMediaConstraints constraints;
+ constraints.initialize();
+ CreateTrackAndStartSource(constraints,
+ MediaStreamVideoSource::kDefaultWidth,
+ MediaStreamVideoSource::kDefaultHeight,
+ 30);
+}
+
+TEST_F(MediaStreamVideoSourceTest, InvalidMandatoryConstraint) {
+ ConstraintsFactory factory;
+ factory.AddMandatory("weird key", 640);
+
+ CreateTrack("123", factory.CreateConstraints());
+ mock_source()->CompleteGetSupportedFormats();
+ EXPECT_EQ(1, NumberOfFailedConstraintsCallbacks());
+}
+
+// Test that the source ignores an unknown optional constraint.
+TEST_F(MediaStreamVideoSourceTest, InvalidOptionalConstraint) {
+ ConstraintsFactory factory;
+ factory.AddOptional("weird key", 640);
+
+ CreateTrackAndStartSource(factory.CreateConstraints(),
+ MediaStreamVideoSource::kDefaultWidth,
+ MediaStreamVideoSource::kDefaultHeight,
+ 30);
+}
+
+// Tests that the source starts video with the max width and height set by
+// constraints for screencast.
+TEST_F(MediaStreamVideoSourceTest, ScreencastResolutionWithConstraint) {
+ media::VideoCaptureFormats formats;
+ formats.push_back(media::VideoCaptureFormat(
+ gfx::Size(480, 270), 30, media::PIXEL_FORMAT_I420));
+ mock_source()->SetSupportedFormats(formats);
+ ConstraintsFactory factory;
+ factory.AddMandatory(MediaStreamVideoSource::kMaxWidth, 480);
+ factory.AddMandatory(MediaStreamVideoSource::kMaxHeight, 270);
+
+ CreateTrackAndStartSource(factory.CreateConstraints(), 480, 270, 30);
+ EXPECT_EQ(480, mock_source()->max_requested_height());
+ EXPECT_EQ(270, mock_source()->max_requested_width());
+}
+
+// Test that optional constraints are applied in order.
+TEST_F(MediaStreamVideoSourceTest, OptionalConstraints) {
+ ConstraintsFactory factory;
+ // Min width of 2056 pixels can not be fulfilled.
+ factory.AddOptional(MediaStreamVideoSource::kMinWidth, 2056);
+ factory.AddOptional(MediaStreamVideoSource::kMinWidth, 641);
+ // Since min width is set to 641 pixels, max width 640 can not be fulfilled.
+ factory.AddOptional(MediaStreamVideoSource::kMaxWidth, 640);
+ CreateTrackAndStartSource(factory.CreateConstraints(), 1280, 720, 30);
+}
+
+// Test that the webrtc video adapter can be created and that it received
+// video frames if the source deliver video frames.
+TEST_F(MediaStreamVideoSourceTest, AdapterReceiveVideoFrame) {
+ ConstraintsFactory factory;
+ CreateTrackAndStartSource(factory.CreateConstraints(),
+ MediaStreamVideoSource::kDefaultWidth,
+ MediaStreamVideoSource::kDefaultHeight,
+ MediaStreamVideoSource::kDefaultFrameRate);
+ ASSERT_TRUE(mock_source()->GetAdapter());
+ MockVideoSource* adapter = static_cast<MockVideoSource*>(
+ mock_source()->GetAdapter());
+ EXPECT_EQ(0, adapter->GetFrameNum());
+
scoped_refptr<media::VideoFrame> frame =
- media::VideoFrame::CreateBlackFrame(gfx::Size(kWidth, kHeight));
- ASSERT_TRUE(frame.get());
- source->OnNewFrame(frame);
- VerifyFrame(640, 480, 1);
- source->OnNewFrame(frame);
- VerifyFrame(640, 480, 2);
- source->RemoveTrack(track);
+ media::VideoFrame::CreateBlackFrame(
+ gfx::Size(MediaStreamVideoSource::kDefaultWidth,
+ MediaStreamVideoSource::kDefaultHeight));
+ mock_source()->DeliverVideoFrame(frame);
+ EXPECT_EQ(1, adapter->GetFrameNum());
+ EXPECT_EQ(MediaStreamVideoSource::kDefaultWidth,
+ adapter->GetLastFrameWidth());
+ EXPECT_EQ(MediaStreamVideoSource::kDefaultHeight,
+ adapter->GetLastFrameHeight());
}
+
} // namespace content
diff --git a/content/renderer/media/mock_media_stream_dependency_factory.cc b/content/renderer/media/mock_media_stream_dependency_factory.cc
index 0e57d97..e0eaf03 100644
--- a/content/renderer/media/mock_media_stream_dependency_factory.cc
+++ b/content/renderer/media/mock_media_stream_dependency_factory.cc
@@ -9,6 +9,7 @@
#include "content/renderer/media/mock_peer_connection_impl.h"
#include "content/renderer/media/webaudio_capturer_source.h"
#include "content/renderer/media/webrtc/webrtc_local_audio_track_adapter.h"
+#include "content/renderer/media/webrtc/webrtc_video_capturer_adapter.h"
#include "content/renderer/media/webrtc_audio_capturer.h"
#include "third_party/WebKit/public/platform/WebMediaStreamTrack.h"
#include "third_party/libjingle/source/talk/app/webrtc/mediastreaminterface.h"
@@ -119,6 +120,44 @@ void MockMediaStream::UnregisterObserver(ObserverInterface* observer) {
MockMediaStream::~MockMediaStream() {}
+class MockRtcVideoCapturer : public WebRtcVideoCapturerAdapter {
+ public:
+ explicit MockRtcVideoCapturer(bool is_screencast)
+ : WebRtcVideoCapturerAdapter(is_screencast),
+ number_of_capturered_frames_(0),
+ width_(0),
+ height_(0) {
+ }
+
+ virtual void SetRequestedFormat(
+ const media::VideoCaptureFormat& format) OVERRIDE {
+ }
+
+ virtual void OnFrameCaptured(
+ const scoped_refptr<media::VideoFrame>& frame) OVERRIDE {
+ ++number_of_capturered_frames_;
+ width_ = frame->coded_size().width();
+ height_ = frame->coded_size().height();
+ }
+
+ int GetLastFrameWidth() const {
+ return width_;
+ }
+
+ int GetLastFrameHeight() const {
+ return height_;
+ }
+
+ int GetFrameNum() const {
+ return number_of_capturered_frames_;
+ }
+
+ private:
+ int number_of_capturered_frames_;
+ int width_;
+ int height_;
+};
+
MockVideoRenderer::MockVideoRenderer()
: width_(0),
height_(0),
@@ -230,6 +269,23 @@ const cricket::VideoOptions* MockVideoSource::options() const {
return NULL;
}
+int MockVideoSource::GetLastFrameWidth() const {
+ DCHECK(capturer_);
+ return
+ static_cast<MockRtcVideoCapturer*>(capturer_.get())->GetLastFrameWidth();
+}
+
+int MockVideoSource::GetLastFrameHeight() const {
+ DCHECK(capturer_);
+ return
+ static_cast<MockRtcVideoCapturer*>(capturer_.get())->GetLastFrameHeight();
+}
+
+int MockVideoSource::GetFrameNum() const {
+ DCHECK(capturer_);
+ return static_cast<MockRtcVideoCapturer*>(capturer_.get())->GetFrameNum();
+}
+
MockLocalVideoTrack::MockLocalVideoTrack(std::string id,
webrtc::VideoSourceInterface* source)
: enabled_(false),
@@ -395,15 +451,16 @@ MockMediaStreamDependencyFactory::CreateLocalAudioSource(
return last_audio_source_;
}
-cricket::VideoCapturer* MockMediaStreamDependencyFactory::CreateVideoCapturer(
- const StreamDeviceInfo& info) {
- return NULL;
+WebRtcVideoCapturerAdapter*
+MockMediaStreamDependencyFactory::CreateVideoCapturer(
+ bool is_screen_capture) {
+ return new MockRtcVideoCapturer(is_screen_capture);
}
scoped_refptr<webrtc::VideoSourceInterface>
MockMediaStreamDependencyFactory::CreateVideoSource(
cricket::VideoCapturer* capturer,
- const webrtc::MediaConstraintsInterface* constraints) {
+ const blink::WebMediaConstraints& constraints) {
last_video_source_ = new talk_base::RefCountedObject<MockVideoSource>();
last_video_source_->SetVideoCapturer(capturer);
return last_video_source_;
diff --git a/content/renderer/media/mock_media_stream_dependency_factory.h b/content/renderer/media/mock_media_stream_dependency_factory.h
index 2171cde..243920d 100644
--- a/content/renderer/media/mock_media_stream_dependency_factory.h
+++ b/content/renderer/media/mock_media_stream_dependency_factory.h
@@ -55,9 +55,9 @@ class MockVideoSource : public webrtc::VideoSourceInterface {
void SetVideoCapturer(cricket::VideoCapturer* capturer);
// Test helpers.
- int GetLastFrameWidth() const { return renderer_.width(); }
- int GetLastFrameHeight() const { return renderer_.height(); }
- int GetFrameNum() const { return renderer_.num(); }
+ int GetLastFrameWidth() const;
+ int GetLastFrameHeight() const;
+ int GetFrameNum() const;
protected:
virtual ~MockVideoSource();
@@ -175,12 +175,12 @@ class MockMediaStreamDependencyFactory : public MediaStreamDependencyFactory {
virtual scoped_refptr<webrtc::AudioSourceInterface>
CreateLocalAudioSource(
const webrtc::MediaConstraintsInterface* constraints) OVERRIDE;
- virtual cricket::VideoCapturer* CreateVideoCapturer(
- const StreamDeviceInfo& info) OVERRIDE;
+ virtual WebRtcVideoCapturerAdapter* CreateVideoCapturer(
+ bool is_screen_capture) OVERRIDE;
virtual scoped_refptr<webrtc::VideoSourceInterface>
CreateVideoSource(
cricket::VideoCapturer* capturer,
- const webrtc::MediaConstraintsInterface* constraints) OVERRIDE;
+ const blink::WebMediaConstraints& constraints) OVERRIDE;
virtual scoped_refptr<WebAudioCapturerSource> CreateWebAudioSource(
blink::WebMediaStreamSource* source) OVERRIDE;
virtual scoped_refptr<webrtc::MediaStreamInterface>
diff --git a/content/renderer/media/mock_media_stream_video_source.cc b/content/renderer/media/mock_media_stream_video_source.cc
new file mode 100644
index 0000000..f88473b
--- /dev/null
+++ b/content/renderer/media/mock_media_stream_video_source.cc
@@ -0,0 +1,66 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "content/renderer/media/mock_media_stream_video_source.h"
+
+namespace content {
+
+MockMediaStreamVideoSource::MockMediaStreamVideoSource(
+ MediaStreamDependencyFactory* factory,
+ bool manual_get_supported_formats)
+ : MediaStreamVideoSource(factory),
+ manual_get_supported_formats_(manual_get_supported_formats),
+ max_requested_height_(0),
+ max_requested_width_(0),
+ attempted_to_start_(false) {
+ supported_formats_.push_back(
+ media::VideoCaptureFormat(
+ gfx::Size(MediaStreamVideoSource::kDefaultWidth,
+ MediaStreamVideoSource::kDefaultHeight),
+ MediaStreamVideoSource::kDefaultFrameRate,
+ media::PIXEL_FORMAT_I420));
+}
+
+MockMediaStreamVideoSource::~MockMediaStreamVideoSource() {}
+
+void MockMediaStreamVideoSource::StartMockedSource() {
+ DCHECK(attempted_to_start_);
+ attempted_to_start_ = false;
+ OnStartDone(true);
+}
+
+void MockMediaStreamVideoSource::FailToStartMockedSource() {
+ DCHECK(attempted_to_start_);
+ attempted_to_start_ = false;
+ OnStartDone(false);
+}
+
+void MockMediaStreamVideoSource::CompleteGetSupportedFormats() {
+ OnSupportedFormats(supported_formats_);
+}
+
+void MockMediaStreamVideoSource::GetCurrentSupportedFormats(
+ int max_requested_height,
+ int max_requested_width) {
+ max_requested_height_ = max_requested_height;
+ max_requested_width_ = max_requested_width;
+
+ if (!manual_get_supported_formats_)
+ OnSupportedFormats(supported_formats_);
+}
+
+void MockMediaStreamVideoSource::StartSourceImpl(
+ const media::VideoCaptureParams& params) {
+ params_ = params;
+ attempted_to_start_ = true;
+}
+
+void MockMediaStreamVideoSource::StopSourceImpl() {
+}
+
+} // namespace content
+
+
+
+
diff --git a/content/renderer/media/mock_media_stream_video_source.h b/content/renderer/media/mock_media_stream_video_source.h
new file mode 100644
index 0000000..c1e5452
--- /dev/null
+++ b/content/renderer/media/mock_media_stream_video_source.h
@@ -0,0 +1,61 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef CONTENT_RENDERER_MEDIA_MOCK_MEDIA_STREAM_VIDEO_SOURCE_H_
+#define CONTENT_RENDERER_MEDIA_MOCK_MEDIA_STREAM_VIDEO_SOURCE_H_
+
+#include "content/renderer/media/media_stream_video_source.h"
+
+namespace content {
+
+class MockMediaStreamVideoSource
+ : public MediaStreamVideoSource {
+ public:
+ MockMediaStreamVideoSource(MediaStreamDependencyFactory* factory,
+ bool manual_get_supported_formats);
+ virtual ~MockMediaStreamVideoSource();
+
+ // Simulate that the underlying source start successfully.
+ void StartMockedSource();
+
+ // Simulate that the underlying source fail to start.
+ void FailToStartMockedSource();
+
+ // Returns true if StartSource has been called and StartMockedSource
+ // or FailToStartMockedSource has not been called.
+ bool SourceHasAttemptedToStart() { return attempted_to_start_; }
+
+ void SetSupportedFormats(const media::VideoCaptureFormats& formats) {
+ supported_formats_ = formats;
+ }
+
+ void CompleteGetSupportedFormats();
+
+ const media::VideoCaptureParams& start_params() const { return params_; }
+ int max_requested_height() const { return max_requested_height_; }
+ int max_requested_width() const { return max_requested_width_; }
+
+ using MediaStreamVideoSource::DeliverVideoFrame;
+
+ protected:
+ // Implements MediaStreamVideoSource.
+ virtual void GetCurrentSupportedFormats(
+ int max_requested_height,
+ int max_requested_width) OVERRIDE;
+ virtual void StartSourceImpl(
+ const media::VideoCaptureParams& params) OVERRIDE;
+ virtual void StopSourceImpl() OVERRIDE;
+
+ private:
+ media::VideoCaptureParams params_;
+ media::VideoCaptureFormats supported_formats_;
+ bool manual_get_supported_formats_;
+ int max_requested_height_;
+ int max_requested_width_;
+ bool attempted_to_start_;
+};
+
+} // namespace content
+
+#endif // CONTENT_RENDERER_MEDIA_MOCK_MEDIA_STREAM_VIDEO_SOURCE_H_
diff --git a/content/renderer/media/rtc_peer_connection_handler_unittest.cc b/content/renderer/media/rtc_peer_connection_handler_unittest.cc
index 41b0352..293a327 100644
--- a/content/renderer/media/rtc_peer_connection_handler_unittest.cc
+++ b/content/renderer/media/rtc_peer_connection_handler_unittest.cc
@@ -10,9 +10,10 @@
#include "base/values.h"
#include "content/renderer/media/media_stream.h"
#include "content/renderer/media/media_stream_audio_source.h"
-#include "content/renderer/media/media_stream_video_source.h"
+#include "content/renderer/media/media_stream_source.h"
#include "content/renderer/media/media_stream_video_track.h"
#include "content/renderer/media/mock_media_stream_dependency_factory.h"
+#include "content/renderer/media/mock_media_stream_video_source.h"
#include "content/renderer/media/mock_peer_connection_impl.h"
#include "content/renderer/media/mock_web_rtc_peer_connection_handler_client.h"
#include "content/renderer/media/peer_connection_tracker.h"
@@ -230,7 +231,8 @@ class RTCPeerConnectionHandlerTest : public ::testing::Test {
blink::WebMediaStreamSource::TypeVideo,
blink::WebString::fromUTF8("video_track"));
video_source.setExtraData(
- new MediaStreamVideoSource(mock_dependency_factory_.get()));
+ new MockMediaStreamVideoSource(mock_dependency_factory_.get(),
+ false));
blink::WebVector<blink::WebMediaStreamTrack> audio_tracks(
static_cast<size_t>(1));
@@ -426,8 +428,8 @@ TEST_F(RTCPeerConnectionHandlerTest, addStreamWithStoppedAudioAndVideoTrack) {
blink::WebVector<blink::WebMediaStreamTrack> audio_tracks;
local_stream.audioTracks(audio_tracks);
- MediaStreamVideoSource* native_audio_source =
- static_cast<MediaStreamVideoSource*>(
+ MediaStreamAudioSource* native_audio_source =
+ static_cast<MediaStreamAudioSource*>(
audio_tracks[0].source().extraData());
native_audio_source->StopSource();
diff --git a/content/renderer/media/rtc_video_capture_delegate.cc b/content/renderer/media/rtc_video_capture_delegate.cc
deleted file mode 100644
index 13d66abd..0000000
--- a/content/renderer/media/rtc_video_capture_delegate.cc
+++ /dev/null
@@ -1,126 +0,0 @@
-// Copyright (c) 2012 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "content/renderer/media/rtc_video_capture_delegate.h"
-
-#include "base/bind.h"
-#include "base/location.h"
-#include "content/renderer/media/video_capture_impl_manager.h"
-#include "content/renderer/render_thread_impl.h"
-#include "media/base/video_frame.h"
-
-namespace content {
-
-RtcVideoCaptureDelegate::RtcVideoCaptureDelegate(
- const media::VideoCaptureSessionId id)
- : session_id_(id),
- got_first_frame_(false),
- error_occured_(false) {
- DVLOG(3) << " RtcVideoCaptureDelegate::ctor";
- capture_engine_ =
- RenderThreadImpl::current()->video_capture_impl_manager()
- ->UseDevice(session_id_);
-}
-
-RtcVideoCaptureDelegate::~RtcVideoCaptureDelegate() {
- DVLOG(3) << " RtcVideoCaptureDelegate::dtor";
- StopCapture();
-}
-
-void RtcVideoCaptureDelegate::StartCapture(
- const media::VideoCaptureParams& params,
- const FrameCapturedCallback& captured_callback,
- const StateChangeCallback& state_callback) {
- DVLOG(3) << " RtcVideoCaptureDelegate::StartCapture ";
- message_loop_proxy_ = base::MessageLoopProxy::current();
- captured_callback_ = captured_callback;
- state_callback_ = state_callback;
- got_first_frame_ = false;
- error_occured_ = false;
-
- // Increase the reference count to ensure we are not deleted until
- // The we are unregistered in RtcVideoCaptureDelegate::OnRemoved.
- AddRef();
- capture_engine_->StartCapture(this, params);
-}
-
-void RtcVideoCaptureDelegate::StopCapture() {
- // Immediately make sure we don't provide more frames.
- captured_callback_.Reset();
- state_callback_.Reset();
- capture_engine_->StopCapture(this);
-}
-
-void RtcVideoCaptureDelegate::OnStarted(media::VideoCapture* capture) {
- DVLOG(3) << " RtcVideoCaptureDelegate::OnStarted";
-}
-
-void RtcVideoCaptureDelegate::OnStopped(media::VideoCapture* capture) {
-}
-
-void RtcVideoCaptureDelegate::OnPaused(media::VideoCapture* capture) {
-}
-
-void RtcVideoCaptureDelegate::OnError(media::VideoCapture* capture,
- int error_code) {
- DVLOG(3) << " RtcVideoCaptureDelegate::OnError";
- message_loop_proxy_->PostTask(
- FROM_HERE,
- base::Bind(&RtcVideoCaptureDelegate::OnErrorOnCaptureThread,
- this, capture));
-}
-
-void RtcVideoCaptureDelegate::OnRemoved(media::VideoCapture* capture) {
- DVLOG(3) << " RtcVideoCaptureDelegate::OnRemoved";
- message_loop_proxy_->PostTask(
- FROM_HERE,
- base::Bind(&RtcVideoCaptureDelegate::OnRemovedOnCaptureThread,
- this, capture));
-
- // Balance the AddRef in StartCapture.
- // This means we are no longer registered as an event handler and can safely
- // be deleted.
- Release();
-}
-
-void RtcVideoCaptureDelegate::OnFrameReady(
- media::VideoCapture* capture,
- const scoped_refptr<media::VideoFrame>& frame) {
- message_loop_proxy_->PostTask(
- FROM_HERE,
- base::Bind(&RtcVideoCaptureDelegate::OnFrameReadyOnCaptureThread,
- this,
- capture,
- frame));
-}
-
-void RtcVideoCaptureDelegate::OnFrameReadyOnCaptureThread(
- media::VideoCapture* capture,
- const scoped_refptr<media::VideoFrame>& frame) {
- if (!captured_callback_.is_null()) {
- if (!got_first_frame_) {
- got_first_frame_ = true;
- if (!state_callback_.is_null())
- state_callback_.Run(CAPTURE_RUNNING);
- }
-
- captured_callback_.Run(frame);
- }
-}
-
-void RtcVideoCaptureDelegate::OnErrorOnCaptureThread(
- media::VideoCapture* capture) {
- error_occured_ = true;
- if (!state_callback_.is_null())
- state_callback_.Run(CAPTURE_FAILED);
-}
-
-
-void RtcVideoCaptureDelegate::OnRemovedOnCaptureThread(
- media::VideoCapture* capture) {
- if (!error_occured_ && !state_callback_.is_null())
- state_callback_.Run(CAPTURE_STOPPED);
-}
-
-} // namespace content
diff --git a/content/renderer/media/rtc_video_capture_delegate.h b/content/renderer/media/rtc_video_capture_delegate.h
deleted file mode 100644
index 3a69acc..0000000
--- a/content/renderer/media/rtc_video_capture_delegate.h
+++ /dev/null
@@ -1,89 +0,0 @@
-// Copyright (c) 2012 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef CONTENT_RENDERER_MEDIA_RTC_VIDEO_CAPTURE_DELEGATE_H_
-#define CONTENT_RENDERER_MEDIA_RTC_VIDEO_CAPTURE_DELEGATE_H_
-
-#include "base/callback.h"
-#include "base/message_loop/message_loop_proxy.h"
-#include "content/common/media/video_capture.h"
-#include "media/video/capture/video_capture.h"
-
-namespace content {
-
-class VideoCaptureHandle;
-
-// Implements a simple reference counted video capturer that guarantees that
-// methods in RtcVideoCaptureDelegateEventHandler is only called from when
-// StartCapture have been called until after StopCapture have been called.
-// It uses VideoCaptureImplManager to start / stop and receive I420 frames
-// from Chrome's video capture implementation.
-class RtcVideoCaptureDelegate
- : public base::RefCountedThreadSafe<RtcVideoCaptureDelegate>,
- public media::VideoCapture::EventHandler {
- public:
- enum CaptureState {
- CAPTURE_STOPPED, // The capturer has been stopped or hasn't started yet.
- CAPTURE_RUNNING, // The capturer has been started successfully and is now
- // capturing.
- CAPTURE_FAILED, // The capturer failed to start.
- };
-
- typedef base::Callback<void(const scoped_refptr<media::VideoFrame>&)>
- FrameCapturedCallback;
- typedef base::Callback<void(CaptureState)> StateChangeCallback;
-
- RtcVideoCaptureDelegate(const media::VideoCaptureSessionId id);
-
- void StartCapture(const media::VideoCaptureParams& params,
- const FrameCapturedCallback& captured_callback,
- const StateChangeCallback& state_callback);
- void StopCapture();
-
- protected:
- // media::VideoCapture::EventHandler implementation.
- // These functions are called on the IO thread (same as where
- // |capture_engine_| runs).
- virtual void OnStarted(media::VideoCapture* capture) OVERRIDE;
- virtual void OnStopped(media::VideoCapture* capture) OVERRIDE;
- virtual void OnPaused(media::VideoCapture* capture) OVERRIDE;
- virtual void OnError(media::VideoCapture* capture, int error_code) OVERRIDE;
- virtual void OnRemoved(media::VideoCapture* capture) OVERRIDE;
- virtual void OnFrameReady(
- media::VideoCapture* capture,
- const scoped_refptr<media::VideoFrame>& frame) OVERRIDE;
-
- private:
- friend class base::RefCountedThreadSafe<RtcVideoCaptureDelegate>;
-
- virtual ~RtcVideoCaptureDelegate();
-
- void OnFrameReadyOnCaptureThread(
- media::VideoCapture* capture,
- const scoped_refptr<media::VideoFrame>& frame);
- void OnErrorOnCaptureThread(media::VideoCapture* capture);
- void OnRemovedOnCaptureThread(media::VideoCapture* capture);
-
- // The id identifies which video capture device is used for this video
- // capture session.
- media::VideoCaptureSessionId session_id_;
- scoped_ptr<VideoCaptureHandle> capture_engine_;
-
- // Accessed on the thread where StartCapture is called.
- bool got_first_frame_;
- bool error_occured_;
-
- // |captured_callback_| is provided to this class in StartCapture and must be
- // valid until StopCapture is called.
- FrameCapturedCallback captured_callback_;
- // |state_callback_| is provided to this class in StartCapture and must be
- // valid until StopCapture is called.
- StateChangeCallback state_callback_;
- // Message loop of the caller of StartCapture.
- scoped_refptr<base::MessageLoopProxy> message_loop_proxy_;
-};
-
-} // namespace content
-
-#endif // CONTENT_RENDERER_MEDIA_RTC_VIDEO_CAPTURE_DELEGATE_H_
diff --git a/content/renderer/media/rtc_video_capturer.cc b/content/renderer/media/rtc_video_capturer.cc
deleted file mode 100644
index 58881c8..0000000
--- a/content/renderer/media/rtc_video_capturer.cc
+++ /dev/null
@@ -1,159 +0,0 @@
-// Copyright (c) 2012 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "content/renderer/media/rtc_video_capturer.h"
-
-#include "base/bind.h"
-#include "base/debug/trace_event.h"
-#include "media/base/video_frame.h"
-
-namespace content {
-
-RtcVideoCapturer::RtcVideoCapturer(const media::VideoCaptureSessionId id,
- bool is_screencast)
- : is_screencast_(is_screencast),
- delegate_(new RtcVideoCaptureDelegate(id)),
- state_(VIDEO_CAPTURE_STATE_STOPPED) {}
-
-RtcVideoCapturer::~RtcVideoCapturer() {
- DCHECK_EQ(state_, VIDEO_CAPTURE_STATE_STOPPED);
- DVLOG(3) << " RtcVideoCapturer::dtor";
-}
-
-cricket::CaptureState RtcVideoCapturer::Start(
- const cricket::VideoFormat& capture_format) {
- DVLOG(3) << " RtcVideoCapturer::Start ";
- if (state_ == VIDEO_CAPTURE_STATE_STARTED) {
- DVLOG(1) << "Got a StartCapture when already started!!! ";
- return cricket::CS_FAILED;
- }
-
- media::VideoCaptureParams request;
- request.allow_resolution_change = is_screencast_;
- request.requested_format = media::VideoCaptureFormat(
- gfx::Size(capture_format.width, capture_format.height),
- capture_format.framerate(),
- media::PIXEL_FORMAT_I420);
-
- SetCaptureFormat(&capture_format);
-
- state_ = VIDEO_CAPTURE_STATE_STARTED;
- first_frame_timestamp_ = media::kNoTimestamp();
- delegate_->StartCapture(
- request,
- base::Bind(&RtcVideoCapturer::OnFrameCaptured, base::Unretained(this)),
- base::Bind(&RtcVideoCapturer::OnStateChange, base::Unretained(this)));
- // Update the desired aspect ratio so that later the video frame can be
- // cropped to meet the requirement if the camera returns a different
- // resolution than the |request|.
- UpdateAspectRatio(capture_format.width, capture_format.height);
- return cricket::CS_STARTING;
-}
-
-void RtcVideoCapturer::Stop() {
- DVLOG(3) << " RtcVideoCapturer::Stop ";
- if (state_ == VIDEO_CAPTURE_STATE_STOPPED) {
- DVLOG(1) << "Got a StopCapture while not started.";
- return;
- }
-
- SetCaptureFormat(NULL);
- state_ = VIDEO_CAPTURE_STATE_STOPPED;
- delegate_->StopCapture();
- SignalStateChange(this, cricket::CS_STOPPED);
-}
-
-bool RtcVideoCapturer::IsRunning() {
- return state_ == VIDEO_CAPTURE_STATE_STARTED;
-}
-
-bool RtcVideoCapturer::GetPreferredFourccs(std::vector<uint32>* fourccs) {
- if (!fourccs)
- return false;
- fourccs->push_back(cricket::FOURCC_I420);
- return true;
-}
-
-bool RtcVideoCapturer::IsScreencast() const {
- return is_screencast_;
-}
-
-bool RtcVideoCapturer::GetBestCaptureFormat(const cricket::VideoFormat& desired,
- cricket::VideoFormat* best_format) {
- if (!best_format) {
- return false;
- }
-
- // Chrome does not support capability enumeration.
- // Use the desired format as the best format.
- best_format->width = desired.width;
- best_format->height = desired.height;
- best_format->fourcc = cricket::FOURCC_I420;
- best_format->interval = desired.interval;
- return true;
-}
-
-void RtcVideoCapturer::OnFrameCaptured(
- const scoped_refptr<media::VideoFrame>& frame) {
- if (first_frame_timestamp_ == media::kNoTimestamp())
- first_frame_timestamp_ = frame->GetTimestamp();
-
- // Currently, |fourcc| is always I420.
- cricket::CapturedFrame captured_frame;
- captured_frame.width = frame->coded_size().width();
- captured_frame.height = frame->coded_size().height();
- captured_frame.fourcc = cricket::FOURCC_I420;
- // cricket::CapturedFrame time is in nanoseconds.
- captured_frame.elapsed_time =
- (frame->GetTimestamp() - first_frame_timestamp_).InMicroseconds() *
- base::Time::kNanosecondsPerMicrosecond;
- captured_frame.time_stamp = frame->GetTimestamp().InMicroseconds() *
- base::Time::kNanosecondsPerMicrosecond;
- // TODO(sheu): we assume contiguous layout of image planes.
- captured_frame.data = frame->data(0);
- captured_frame.data_size =
- media::VideoFrame::AllocationSize(frame->format(), frame->coded_size());
- captured_frame.pixel_height = 1;
- captured_frame.pixel_width = 1;
-
- TRACE_EVENT_INSTANT2(
- "rtc_video_capturer",
- "OnFrameCaptured",
- TRACE_EVENT_SCOPE_THREAD,
- "elapsed time",
- captured_frame.elapsed_time,
- "timestamp_ms",
- captured_frame.time_stamp / talk_base::kNumNanosecsPerMillisec);
-
- // This signals to libJingle that a new VideoFrame is available.
- // libJingle have no assumptions on what thread this signal come from.
- SignalFrameCaptured(this, &captured_frame);
-}
-
-void RtcVideoCapturer::OnStateChange(
- RtcVideoCaptureDelegate::CaptureState state) {
- cricket::CaptureState converted_state = cricket::CS_FAILED;
- DVLOG(3) << " RtcVideoCapturer::OnStateChange " << state;
- switch (state) {
- case RtcVideoCaptureDelegate::CAPTURE_STOPPED:
- converted_state = cricket::CS_STOPPED;
- break;
- case RtcVideoCaptureDelegate::CAPTURE_RUNNING:
- converted_state = cricket::CS_RUNNING;
- break;
- case RtcVideoCaptureDelegate::CAPTURE_FAILED:
- // TODO(perkj): Update the comments in the the definition of
- // cricket::CS_FAILED. According to the comments, cricket::CS_FAILED
- // means that the capturer failed to start. But here and in libjingle it
- // is also used if an error occur during capturing.
- converted_state = cricket::CS_FAILED;
- break;
- default:
- NOTREACHED();
- break;
- }
- SignalStateChange(this, converted_state);
-}
-
-} // namespace content
diff --git a/content/renderer/media/rtc_video_capturer.h b/content/renderer/media/rtc_video_capturer.h
deleted file mode 100644
index 20b01a6..0000000
--- a/content/renderer/media/rtc_video_capturer.h
+++ /dev/null
@@ -1,57 +0,0 @@
-// Copyright (c) 2012 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef CONTENT_RENDERER_MEDIA_RTC_VIDEO_CAPTURER_H_
-#define CONTENT_RENDERER_MEDIA_RTC_VIDEO_CAPTURER_H_
-
-#include <vector>
-
-#include "base/compiler_specific.h"
-#include "content/renderer/media/rtc_video_capture_delegate.h"
-#include "third_party/libjingle/source/talk/media/base/videocapturer.h"
-
-namespace content {
-
-// RtcVideoCapturer implements a simple cricket::VideoCapturer that is used for
-// VideoCapturing in libJingle and especially in PeerConnections.
-// The class is created and destroyed on the main render thread.
-// PeerConnection access cricket::VideoCapturer from a libJingle worker thread.
-// The video frames are delivered in OnFrameCaptured on a thread owned by
-// Chrome's video capture implementation.
-class RtcVideoCapturer
- : public cricket::VideoCapturer {
- public:
- RtcVideoCapturer(const media::VideoCaptureSessionId id,
- bool is_screencast);
- virtual ~RtcVideoCapturer();
-
- // cricket::VideoCapturer implementation.
- // These methods are accessed from a libJingle worker thread.
- virtual cricket::CaptureState Start(
- const cricket::VideoFormat& capture_format) OVERRIDE;
- virtual void Stop() OVERRIDE;
- virtual bool IsRunning() OVERRIDE;
- virtual bool GetPreferredFourccs(std::vector<uint32>* fourccs) OVERRIDE;
- virtual bool GetBestCaptureFormat(const cricket::VideoFormat& desired,
- cricket::VideoFormat* best_format) OVERRIDE;
- virtual bool IsScreencast() const OVERRIDE;
-
- private:
- // Frame captured callback method.
- virtual void OnFrameCaptured(const scoped_refptr<media::VideoFrame>& frame);
-
- // State change callback, must be called on same thread as Start is called.
- void OnStateChange(RtcVideoCaptureDelegate::CaptureState state);
-
- const bool is_screencast_;
- scoped_refptr<RtcVideoCaptureDelegate> delegate_;
- VideoCaptureState state_;
- base::TimeDelta first_frame_timestamp_;
-
- DISALLOW_COPY_AND_ASSIGN(RtcVideoCapturer);
-};
-
-} // namespace content
-
-#endif // CONTENT_RENDERER_MEDIA_RTC_VIDEO_CAPTURER_H_
diff --git a/content/renderer/media/webrtc/webrtc_video_capturer_adapter.cc b/content/renderer/media/webrtc/webrtc_video_capturer_adapter.cc
new file mode 100644
index 0000000..bf7f8dd
--- /dev/null
+++ b/content/renderer/media/webrtc/webrtc_video_capturer_adapter.cc
@@ -0,0 +1,121 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "content/renderer/media/webrtc/webrtc_video_capturer_adapter.h"
+
+#include "base/bind.h"
+#include "base/debug/trace_event.h"
+#include "media/base/video_frame.h"
+
+namespace content {
+
+WebRtcVideoCapturerAdapter::WebRtcVideoCapturerAdapter(bool is_screencast)
+ : is_screencast_(is_screencast),
+ running_(false) {
+}
+
+WebRtcVideoCapturerAdapter::~WebRtcVideoCapturerAdapter() {
+ DVLOG(3) << " WebRtcVideoCapturerAdapter::dtor";
+}
+
+void WebRtcVideoCapturerAdapter::SetRequestedFormat(
+ const media::VideoCaptureFormat& format) {
+ DCHECK_EQ(media::PIXEL_FORMAT_I420, format.pixel_format);
+ DVLOG(3) << "WebRtcVideoCapturerAdapter::SetRequestedFormat"
+ << " w = " << format.frame_size.width()
+ << " h = " << format.frame_size.height();
+ cricket::VideoFormat supported_format(format.frame_size.width(),
+ format.frame_size.height(),
+ cricket::VideoFormat::FpsToInterval(
+ format.frame_rate),
+ cricket::FOURCC_I420);
+ SetCaptureFormat(&supported_format);
+
+ // Update the desired aspect ratio so that later the video frame can be
+ // cropped to meet the requirement if the camera returns a different
+ // resolution than the |request|.
+ UpdateAspectRatio(format.frame_size.width(), format.frame_size.height());
+}
+
+cricket::CaptureState WebRtcVideoCapturerAdapter::Start(
+ const cricket::VideoFormat& capture_format) {
+ DCHECK(!running_);
+ DVLOG(3) << " WebRtcVideoCapturerAdapter::Start w = " << capture_format.width
+ << " h = " << capture_format.height;
+
+ running_ = true;
+ return cricket::CS_RUNNING;
+}
+
+void WebRtcVideoCapturerAdapter::Stop() {
+ DVLOG(3) << " WebRtcVideoCapturerAdapter::Stop ";
+ DCHECK(running_);
+ running_ = false;
+ SetCaptureFormat(NULL);
+ SignalStateChange(this, cricket::CS_STOPPED);
+}
+
+bool WebRtcVideoCapturerAdapter::IsRunning() {
+ return running_;
+}
+
+bool WebRtcVideoCapturerAdapter::GetPreferredFourccs(
+ std::vector<uint32>* fourccs) {
+ if (!fourccs)
+ return false;
+ fourccs->push_back(cricket::FOURCC_I420);
+ return true;
+}
+
+bool WebRtcVideoCapturerAdapter::IsScreencast() const {
+ return is_screencast_;
+}
+
+bool WebRtcVideoCapturerAdapter::GetBestCaptureFormat(
+ const cricket::VideoFormat& desired,
+ cricket::VideoFormat* best_format) {
+ DVLOG(3) << " GetBestCaptureFormat:: "
+ << " w = " << desired.width
+ << " h = " << desired.height;
+
+ // Capability enumeration is done in MediaStreamVideoSource. The adapter can
+ // just use what is provided.
+ // Use the desired format as the best format.
+ best_format->width = desired.width;
+ best_format->height = desired.height;
+ best_format->fourcc = cricket::FOURCC_I420;
+ best_format->interval = desired.interval;
+ return true;
+}
+
+void WebRtcVideoCapturerAdapter::OnFrameCaptured(
+ const scoped_refptr<media::VideoFrame>& frame) {
+ DCHECK_EQ(media::VideoFrame::I420, frame->format());
+ if (first_frame_timestamp_ == media::kNoTimestamp())
+ first_frame_timestamp_ = frame->GetTimestamp();
+
+ // Currently, |fourcc| is always I420.
+ cricket::CapturedFrame captured_frame;
+ captured_frame.width = frame->coded_size().width();
+ captured_frame.height = frame->coded_size().height();
+ captured_frame.fourcc = cricket::FOURCC_I420;
+ // cricket::CapturedFrame time is in nanoseconds.
+ captured_frame.elapsed_time =
+ (frame->GetTimestamp() - first_frame_timestamp_).InMicroseconds() *
+ base::Time::kNanosecondsPerMicrosecond;
+ captured_frame.time_stamp = frame->GetTimestamp().InMicroseconds() *
+ base::Time::kNanosecondsPerMicrosecond;
+ // TODO(sheu): we assume contiguous layout of image planes.
+ captured_frame.data = frame->data(0);
+ captured_frame.data_size =
+ media::VideoFrame::AllocationSize(frame->format(), frame->coded_size());
+ captured_frame.pixel_height = 1;
+ captured_frame.pixel_width = 1;
+
+ // This signals to libJingle that a new VideoFrame is available.
+ // libJingle have no assumptions on what thread this signal come from.
+ SignalFrameCaptured(this, &captured_frame);
+}
+
+} // namespace content
diff --git a/content/renderer/media/webrtc/webrtc_video_capturer_adapter.h b/content/renderer/media/webrtc/webrtc_video_capturer_adapter.h
new file mode 100644
index 0000000..6ef2b36
--- /dev/null
+++ b/content/renderer/media/webrtc/webrtc_video_capturer_adapter.h
@@ -0,0 +1,57 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef CONTENT_RENDERER_MEDIA_WEBRTC_WEBRTC_VIDEO_CAPTURER_ADAPTER_H_
+#define CONTENT_RENDERER_MEDIA_WEBRTC_WEBRTC_VIDEO_CAPTURER_ADAPTER_H_
+
+#include "base/compiler_specific.h"
+#include "content/common/content_export.h"
+#include "media/base/video_frame.h"
+#include "media/video/capture/video_capture_types.h"
+#include "third_party/libjingle/source/talk/media/base/videocapturer.h"
+
+namespace content {
+
+// WebRtcVideoCapturerAdapter implements a simple cricket::VideoCapturer that is
+// used for VideoCapturing in libJingle and especially in PeerConnections.
+// The class is created and destroyed on the main render thread.
+// PeerConnection access cricket::VideoCapturer from a libJingle worker thread.
+class CONTENT_EXPORT WebRtcVideoCapturerAdapter
+ : NON_EXPORTED_BASE(public cricket::VideoCapturer) {
+ public:
+ explicit WebRtcVideoCapturerAdapter(bool is_screencast);
+ virtual ~WebRtcVideoCapturerAdapter();
+
+ // Sets the requested format. cricket::VideoCapturer may try to scale or
+ // crop to this format if the frame delivered in OnFrameCaptured is not in
+ // this format.
+ // This method is virtual for testing purposes.
+ virtual void SetRequestedFormat(const media::VideoCaptureFormat& format);
+
+ // This method is virtual for testing purposes.
+ virtual void OnFrameCaptured(const scoped_refptr<media::VideoFrame>& frame);
+
+ private:
+ // cricket::VideoCapturer implementation.
+ // These methods are accessed from a libJingle worker thread.
+ virtual cricket::CaptureState Start(
+ const cricket::VideoFormat& capture_format) OVERRIDE;
+ virtual void Stop() OVERRIDE;
+ virtual bool IsRunning() OVERRIDE;
+ virtual bool GetPreferredFourccs(std::vector<uint32>* fourccs) OVERRIDE;
+ virtual bool GetBestCaptureFormat(const cricket::VideoFormat& desired,
+ cricket::VideoFormat* best_format) OVERRIDE;
+ virtual bool IsScreencast() const OVERRIDE;
+
+ private:
+ const bool is_screencast_;
+ bool running_;
+ base::TimeDelta first_frame_timestamp_;
+
+ DISALLOW_COPY_AND_ASSIGN(WebRtcVideoCapturerAdapter);
+};
+
+} // namespace content
+
+#endif // CONTENT_RENDERER_MEDIA_WEBRTC_WEBRTC_VIDEO_CAPTURER_ADAPTER_H_