summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorperkj@chromium.org <perkj@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2012-10-24 18:11:37 +0000
committerperkj@chromium.org <perkj@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2012-10-24 18:11:37 +0000
commit948d8155a1a31d8dfa534cfdc4ba51b2036c3827 (patch)
tree5993731043ff52914f36c08684a9e571d659531c
parent06eaeb44ab3115e27b90f1fdec5572885c841b85 (diff)
downloadchromium_src-948d8155a1a31d8dfa534cfdc4ba51b2036c3827.zip
chromium_src-948d8155a1a31d8dfa534cfdc4ba51b2036c3827.tar.gz
chromium_src-948d8155a1a31d8dfa534cfdc4ba51b2036c3827.tar.bz2
Hookup constraint to getusermedia.
Ensure that chrome wait for the video source to start. Switch Chrome to use libjingle for local rendering. BUG= webrtc issue tracker http://code.google.com/p/webrtc/issues/detail?id=377 http://code.google.com/p/webrtc/issues/detail?id=862 TEST= Use https://webrtc-demos.appspot.com/html/constraints-and-stats.html to see that you can control the video frame size. Inspect the video element and see that the video element size can be VGA or larger. Note that at the moment the minimum constraints are not enforced. E.g. if the camera output something smaller than the minimum constraint GetUserMedia still succeeds. Review URL: https://chromiumcodereview.appspot.com/11188035 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@163869 0039d316-1c4b-4281-b951-d872f2087c98
-rw-r--r--content/content_renderer.gypi4
-rw-r--r--content/content_tests.gypi1
-rw-r--r--content/renderer/media/capture_video_decoder.cc274
-rw-r--r--content/renderer/media/capture_video_decoder.h112
-rw-r--r--content/renderer/media/capture_video_decoder_unittest.cc233
-rw-r--r--content/renderer/media/media_stream_dependency_factory.cc175
-rw-r--r--content/renderer/media/media_stream_dependency_factory.h37
-rw-r--r--content/renderer/media/media_stream_dependency_factory_unittest.cc57
-rw-r--r--content/renderer/media/media_stream_impl.cc277
-rw-r--r--content/renderer/media/media_stream_impl.h62
-rw-r--r--content/renderer/media/media_stream_impl_unittest.cc54
-rw-r--r--content/renderer/media/media_stream_source_extra_data.h13
-rw-r--r--content/renderer/media/mock_media_stream_dependency_factory.cc73
-rw-r--r--content/renderer/media/mock_media_stream_dependency_factory.h49
-rw-r--r--content/renderer/media/peer_connection_handler_jsep_unittest.cc4
-rw-r--r--content/renderer/media/rtc_media_constraints.cc62
-rw-r--r--content/renderer/media/rtc_media_constraints.h36
-rw-r--r--content/renderer/media/rtc_peer_connection_handler.cc41
-rw-r--r--content/renderer/media/rtc_peer_connection_handler_unittest.cc4
19 files changed, 666 insertions, 902 deletions
diff --git a/content/content_renderer.gypi b/content/content_renderer.gypi
index 5b64195..35e6fae 100644
--- a/content/content_renderer.gypi
+++ b/content/content_renderer.gypi
@@ -120,8 +120,6 @@
'renderer/media/audio_message_filter.h',
'renderer/media/audio_renderer_mixer_manager.cc',
'renderer/media/audio_renderer_mixer_manager.h',
- 'renderer/media/capture_video_decoder.cc',
- 'renderer/media/capture_video_decoder.h',
'renderer/media/local_video_capture.cc',
'renderer/media/local_video_capture.h',
'renderer/media/media_stream_center.h',
@@ -139,6 +137,8 @@
'renderer/media/renderer_gpu_video_decoder_factories.h',
'renderer/media/renderer_webaudiodevice_impl.cc',
'renderer/media/renderer_webaudiodevice_impl.h',
+ 'renderer/media/rtc_media_constraints.cc',
+ 'renderer/media/rtc_media_constraints.h',
'renderer/media/rtc_video_decoder.cc',
'renderer/media/rtc_video_decoder.h',
'renderer/media/rtc_video_renderer.cc',
diff --git a/content/content_tests.gypi b/content/content_tests.gypi
index 8bba0ce..93fa630 100644
--- a/content/content_tests.gypi
+++ b/content/content_tests.gypi
@@ -355,7 +355,6 @@
'renderer/hyphenator/hyphenator_unittest.cc',
'renderer/media/audio_message_filter_unittest.cc',
'renderer/media/audio_renderer_mixer_manager_unittest.cc',
- 'renderer/media/capture_video_decoder_unittest.cc',
'renderer/media/video_capture_impl_unittest.cc',
'renderer/media/video_capture_message_filter_unittest.cc',
'renderer/paint_aggregator_unittest.cc',
diff --git a/content/renderer/media/capture_video_decoder.cc b/content/renderer/media/capture_video_decoder.cc
deleted file mode 100644
index 23dcc4a..0000000
--- a/content/renderer/media/capture_video_decoder.cc
+++ /dev/null
@@ -1,274 +0,0 @@
-// Copyright (c) 2012 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "content/renderer/media/capture_video_decoder.h"
-
-#include "base/bind.h"
-#include "base/callback_helpers.h"
-#include "content/renderer/media/video_capture_impl_manager.h"
-#include "media/base/demuxer_stream.h"
-#include "media/base/limits.h"
-#include "media/base/video_util.h"
-
-using media::CopyYPlane;
-using media::CopyUPlane;
-using media::CopyVPlane;
-
-namespace content {
-
-CaptureVideoDecoder::CaptureVideoDecoder(
- base::MessageLoopProxy* message_loop_proxy,
- media::VideoCaptureSessionId video_stream_id,
- VideoCaptureImplManager* vc_manager,
- const media::VideoCaptureCapability& capability)
- : message_loop_proxy_(message_loop_proxy),
- vc_manager_(vc_manager),
- capability_(capability),
- natural_size_(capability.width, capability.height),
- state_(kUnInitialized),
- got_first_frame_(false),
- video_stream_id_(video_stream_id),
- capture_engine_(NULL) {
- DCHECK(vc_manager);
-}
-
-void CaptureVideoDecoder::Initialize(
- const scoped_refptr<media::DemuxerStream>& stream,
- const media::PipelineStatusCB& status_cb,
- const media::StatisticsCB& statistics_cb) {
- message_loop_proxy_->PostTask(
- FROM_HERE,
- base::Bind(&CaptureVideoDecoder::InitializeOnDecoderThread,
- this, stream, status_cb, statistics_cb));
-}
-
-void CaptureVideoDecoder::Read(const ReadCB& read_cb) {
- message_loop_proxy_->PostTask(
- FROM_HERE,
- base::Bind(&CaptureVideoDecoder::ReadOnDecoderThread,
- this, read_cb));
-}
-
-void CaptureVideoDecoder::Reset(const base::Closure& closure) {
- message_loop_proxy_->PostTask(
- FROM_HERE,
- base::Bind(&CaptureVideoDecoder::ResetOnDecoderThread, this, closure));
-}
-
-void CaptureVideoDecoder::Stop(const base::Closure& closure) {
- message_loop_proxy_->PostTask(
- FROM_HERE,
- base::Bind(&CaptureVideoDecoder::StopOnDecoderThread, this, closure));
-}
-
-void CaptureVideoDecoder::OnStarted(media::VideoCapture* capture) {
- NOTIMPLEMENTED();
-}
-
-void CaptureVideoDecoder::OnStopped(media::VideoCapture* capture) {
- message_loop_proxy_->PostTask(
- FROM_HERE,
- base::Bind(&CaptureVideoDecoder::OnStoppedOnDecoderThread,
- this, capture));
-}
-
-void CaptureVideoDecoder::OnPaused(media::VideoCapture* capture) {
- message_loop_proxy_->PostTask(
- FROM_HERE,
- base::Bind(&CaptureVideoDecoder::OnPausedOnDecoderThread,
- this, capture));
-}
-
-void CaptureVideoDecoder::OnError(media::VideoCapture* capture,
- int error_code) {
- message_loop_proxy_->PostTask(
- FROM_HERE,
- base::Bind(&CaptureVideoDecoder::OnPausedOnDecoderThread,
- this, capture));
-}
-
-void CaptureVideoDecoder::OnRemoved(media::VideoCapture* capture) {
- message_loop_proxy_->PostTask(
- FROM_HERE,
- base::Bind(&CaptureVideoDecoder::OnRemovedOnDecoderThread,
- this, capture));
-}
-
-void CaptureVideoDecoder::OnBufferReady(
- media::VideoCapture* capture,
- scoped_refptr<media::VideoCapture::VideoFrameBuffer> buf) {
- DCHECK(buf);
- message_loop_proxy_->PostTask(
- FROM_HERE,
- base::Bind(&CaptureVideoDecoder::OnBufferReadyOnDecoderThread,
- this, capture, buf));
-}
-
-void CaptureVideoDecoder::OnDeviceInfoReceived(
- media::VideoCapture* capture,
- const media::VideoCaptureParams& device_info) {
- message_loop_proxy_->PostTask(
- FROM_HERE,
- base::Bind(&CaptureVideoDecoder::OnDeviceInfoReceivedOnDecoderThread,
- this, capture, device_info));
-}
-
-CaptureVideoDecoder::~CaptureVideoDecoder() {}
-
-void CaptureVideoDecoder::InitializeOnDecoderThread(
- const scoped_refptr<media::DemuxerStream>& /* stream */,
- const media::PipelineStatusCB& status_cb,
- const media::StatisticsCB& statistics_cb) {
- DVLOG(1) << "InitializeOnDecoderThread";
- DCHECK(message_loop_proxy_->BelongsToCurrentThread());
-
- capture_engine_ = vc_manager_->AddDevice(video_stream_id_, this);
-
- statistics_cb_ = statistics_cb;
- status_cb.Run(media::PIPELINE_OK);
- state_ = kNormal;
- capture_engine_->StartCapture(this, capability_);
- AddRef(); // Will be balanced in OnRemoved().
-}
-
-void CaptureVideoDecoder::ReadOnDecoderThread(const ReadCB& read_cb) {
- DCHECK_NE(state_, kUnInitialized);
- DCHECK(message_loop_proxy_->BelongsToCurrentThread());
- CHECK(read_cb_.is_null());
- read_cb_ = read_cb;
- if (state_ == kPaused || state_ == kStopped) {
- DeliverFrame(media::VideoFrame::CreateEmptyFrame());
- }
-}
-
-void CaptureVideoDecoder::ResetOnDecoderThread(const base::Closure& closure) {
- DVLOG(1) << "ResetOnDecoderThread";
- DCHECK(message_loop_proxy_->BelongsToCurrentThread());
- if (!read_cb_.is_null()) {
- scoped_refptr<media::VideoFrame> video_frame =
- media::VideoFrame::CreateBlackFrame(natural_size_);
- DeliverFrame(video_frame);
- }
- closure.Run();
-}
-
-void CaptureVideoDecoder::StopOnDecoderThread(const base::Closure& closure) {
- DVLOG(1) << "StopOnDecoderThread";
- DCHECK(message_loop_proxy_->BelongsToCurrentThread());
-
- if (!read_cb_.is_null())
- DeliverFrame(media::VideoFrame::CreateEmptyFrame());
-
- if (!closure.is_null())
- closure.Run();
-
- if (state_ != kNormal) {
- // Do nothing when this decoder is already stopped or in error state.
- return;
- }
-
- state_ = kStopped;
-
- capture_engine_->StopCapture(this);
-}
-
-void CaptureVideoDecoder::OnStoppedOnDecoderThread(
- media::VideoCapture* capture) {
- DVLOG(1) << "OnStoppedOnDecoderThread";
- DCHECK(message_loop_proxy_->BelongsToCurrentThread());
-}
-
-void CaptureVideoDecoder::OnRemovedOnDecoderThread(
- media::VideoCapture* capture) {
- DVLOG(1) << "OnRemovedOnDecoderThread";
- DCHECK(message_loop_proxy_->BelongsToCurrentThread());
- vc_manager_->RemoveDevice(video_stream_id_, this);
- Release(); // Balance the AddRef() in InitializeOnDecoderThread().
-}
-
-void CaptureVideoDecoder::OnPausedOnDecoderThread(
- media::VideoCapture* capture) {
- DVLOG(1) << "OnPausedOnDecoderThread";
- DCHECK(message_loop_proxy_->BelongsToCurrentThread());
- state_ = kPaused;
- if (!read_cb_.is_null()) {
- DeliverFrame(media::VideoFrame::CreateEmptyFrame());
- }
-}
-
-void CaptureVideoDecoder::OnDeviceInfoReceivedOnDecoderThread(
- media::VideoCapture* capture,
- const media::VideoCaptureParams& device_info) {
- DVLOG(1) << "OnDeviceInfoReceivedOnDecoderThread";
- DCHECK(message_loop_proxy_->BelongsToCurrentThread());
- if (device_info.width != natural_size_.width() ||
- device_info.height != natural_size_.height()) {
- natural_size_.SetSize(device_info.width, device_info.height);
- }
-}
-
-void CaptureVideoDecoder::OnBufferReadyOnDecoderThread(
- media::VideoCapture* capture,
- scoped_refptr<media::VideoCapture::VideoFrameBuffer> buf) {
- DCHECK(message_loop_proxy_->BelongsToCurrentThread());
-
- if (read_cb_.is_null() || kNormal != state_) {
- // TODO(wjia): revisit TS adjustment when crbug.com/111672 is resolved.
- if (got_first_frame_) {
- start_time_ += buf->timestamp - last_frame_timestamp_;
- }
- last_frame_timestamp_ = buf->timestamp;
- capture->FeedBuffer(buf);
- return;
- }
-
- // TODO(wjia): should we always expect device to send device info before
- // any buffer, and buffers should have dimension stated in device info?
- // Or should we be flexible as in following code?
- if (buf->width != natural_size_.width() ||
- buf->height != natural_size_.height()) {
- natural_size_.SetSize(buf->width, buf->height);
- }
-
- // Need to rebase timestamp with zero as starting point.
- if (!got_first_frame_) {
- start_time_ = buf->timestamp;
- got_first_frame_ = true;
- }
-
- // Always allocate a new frame.
- //
- // TODO(scherkus): migrate this to proper buffer recycling.
- scoped_refptr<media::VideoFrame> video_frame =
- media::VideoFrame::CreateFrame(media::VideoFrame::YV12,
- natural_size_, natural_size_,
- buf->timestamp - start_time_);
-
- last_frame_timestamp_ = buf->timestamp;
- uint8* buffer = buf->memory_pointer;
-
- // Assume YV12 format. Note that camera gives YUV and media pipeline video
- // renderer asks for YVU. The following code did the conversion.
- DCHECK_EQ(capability_.color, media::VideoCaptureCapability::kI420);
- int y_width = buf->width;
- int y_height = buf->height;
- int uv_width = buf->width / 2;
- int uv_height = buf->height / 2; // YV12 format.
- CopyYPlane(buffer, y_width, y_height, video_frame);
- buffer += y_width * y_height;
- CopyUPlane(buffer, uv_width, uv_height, video_frame);
- buffer += uv_width * uv_height;
- CopyVPlane(buffer, uv_width, uv_height, video_frame);
-
- DeliverFrame(video_frame);
- capture->FeedBuffer(buf);
-}
-
-void CaptureVideoDecoder::DeliverFrame(
- const scoped_refptr<media::VideoFrame>& video_frame) {
- // Reset the callback before running to protect against reentrancy.
- base::ResetAndReturn(&read_cb_).Run(kOk, video_frame);
-}
-
-} // namespace content
diff --git a/content/renderer/media/capture_video_decoder.h b/content/renderer/media/capture_video_decoder.h
deleted file mode 100644
index 1a47fe0..0000000
--- a/content/renderer/media/capture_video_decoder.h
+++ /dev/null
@@ -1,112 +0,0 @@
-// Copyright (c) 2012 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef CONTENT_RENDERER_MEDIA_CAPTURE_VIDEO_DECODER_H_
-#define CONTENT_RENDERER_MEDIA_CAPTURE_VIDEO_DECODER_H_
-
-#include "base/time.h"
-#include "content/common/content_export.h"
-#include "media/base/pipeline_status.h"
-#include "media/base/video_decoder.h"
-#include "media/video/capture/video_capture.h"
-#include "media/video/capture/video_capture_types.h"
-
-namespace base {
-class MessageLoopProxy;
-}
-namespace media {
-class VideoFrame;
-}
-
-namespace content {
-class VideoCaptureImplManager;
-
-// A filter takes raw frames from video capture engine and passes them to media
-// engine as a video decoder filter.
-class CONTENT_EXPORT CaptureVideoDecoder
- : public media::VideoDecoder,
- public media::VideoCapture::EventHandler {
- public:
- CaptureVideoDecoder(
- base::MessageLoopProxy* message_loop_proxy,
- media::VideoCaptureSessionId video_stream_id,
- VideoCaptureImplManager* vc_manager,
- const media::VideoCaptureCapability& capability);
-
- // media::VideoDecoder implementation.
- virtual void Initialize(const scoped_refptr<media::DemuxerStream>& stream,
- const media::PipelineStatusCB& status_cb,
- const media::StatisticsCB& statistics_cb) OVERRIDE;
- virtual void Read(const ReadCB& read_cb) OVERRIDE;
- virtual void Reset(const base::Closure& closure) OVERRIDE;
- virtual void Stop(const base::Closure& closure) OVERRIDE;
-
- // VideoCapture::EventHandler implementation.
- virtual void OnStarted(media::VideoCapture* capture) OVERRIDE;
- virtual void OnStopped(media::VideoCapture* capture) OVERRIDE;
- virtual void OnPaused(media::VideoCapture* capture) OVERRIDE;
- virtual void OnError(media::VideoCapture* capture, int error_code) OVERRIDE;
- virtual void OnRemoved(media::VideoCapture* capture) OVERRIDE;
- virtual void OnBufferReady(
- media::VideoCapture* capture,
- scoped_refptr<media::VideoCapture::VideoFrameBuffer> buf) OVERRIDE;
- virtual void OnDeviceInfoReceived(
- media::VideoCapture* capture,
- const media::VideoCaptureParams& device_info) OVERRIDE;
-
- protected:
- virtual ~CaptureVideoDecoder();
-
- private:
- friend class CaptureVideoDecoderTest;
-
- enum DecoderState {
- kUnInitialized,
- kNormal,
- kStopped,
- kPaused
- };
-
- void InitializeOnDecoderThread(
- const scoped_refptr<media::DemuxerStream>& stream,
- const media::PipelineStatusCB& status_cb,
- const media::StatisticsCB& statistics_cb);
- void ReadOnDecoderThread(const ReadCB& read_cb);
- void ResetOnDecoderThread(const base::Closure& closure);
- void StopOnDecoderThread(const base::Closure& closure);
- void PrepareForShutdownHackOnDecoderThread();
-
- void OnStoppedOnDecoderThread(media::VideoCapture* capture);
- void OnRemovedOnDecoderThread(media::VideoCapture* capture);
- void OnPausedOnDecoderThread(media::VideoCapture* capture);
- void OnBufferReadyOnDecoderThread(
- media::VideoCapture* capture,
- scoped_refptr<media::VideoCapture::VideoFrameBuffer> buf);
- void OnDeviceInfoReceivedOnDecoderThread(
- media::VideoCapture* capture,
- const media::VideoCaptureParams& device_info);
-
- // Delivers the frame to |read_cb_| and resets the callback.
- void DeliverFrame(const scoped_refptr<media::VideoFrame>& video_frame);
-
- scoped_refptr<base::MessageLoopProxy> message_loop_proxy_;
- scoped_refptr<VideoCaptureImplManager> vc_manager_;
- media::VideoCaptureCapability capability_;
- gfx::Size natural_size_;
- DecoderState state_;
- bool got_first_frame_;
- ReadCB read_cb_;
- media::StatisticsCB statistics_cb_;
-
- media::VideoCaptureSessionId video_stream_id_;
- media::VideoCapture* capture_engine_;
- base::Time last_frame_timestamp_;
- base::Time start_time_;
-
- DISALLOW_COPY_AND_ASSIGN(CaptureVideoDecoder);
-};
-
-} // namespace content
-
-#endif // CONTENT_RENDERER_MEDIA_CAPTURE_VIDEO_DECODER_H_
diff --git a/content/renderer/media/capture_video_decoder_unittest.cc b/content/renderer/media/capture_video_decoder_unittest.cc
deleted file mode 100644
index 512a06d..0000000
--- a/content/renderer/media/capture_video_decoder_unittest.cc
+++ /dev/null
@@ -1,233 +0,0 @@
-// Copyright (c) 2012 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "base/bind.h"
-#include "content/common/child_process.h"
-#include "content/renderer/media/capture_video_decoder.h"
-#include "content/renderer/media/video_capture_impl.h"
-#include "content/renderer/media/video_capture_impl_manager.h"
-#include "media/base/limits.h"
-#include "media/base/mock_callback.h"
-#include "media/base/mock_filters.h"
-#include "media/base/pipeline_status.h"
-#include "media/video/capture/video_capture_types.h"
-#include "testing/gtest/include/gtest/gtest.h"
-
-using ::testing::_;
-using ::testing::AnyNumber;
-using ::testing::Return;
-using ::testing::StrictMock;
-
-static const int kWidth = 176;
-static const int kHeight = 144;
-static const int kFPS = 30;
-static const media::VideoCaptureSessionId kVideoStreamId = 1;
-
-namespace content {
-
-ACTION(DeleteDataBuffer) {
- delete[] arg0->memory_pointer;
-}
-
-ACTION_P2(CaptureStopped, decoder, vc_impl) {
- decoder->OnStopped(vc_impl);
- decoder->OnRemoved(vc_impl);
-}
-
-MATCHER_P2(HasSize, width, height, "") {
- EXPECT_EQ(arg->data_size().width(), width);
- EXPECT_EQ(arg->data_size().height(), height);
- EXPECT_EQ(arg->natural_size().width(), width);
- EXPECT_EQ(arg->natural_size().height(), height);
- return (arg->data_size().width() == width) &&
- (arg->data_size().height() == height) &&
- (arg->natural_size().width() == width) &&
- (arg->natural_size().height() == height);
-}
-
-class MockVideoCaptureImpl : public VideoCaptureImpl {
- public:
- MockVideoCaptureImpl(const media::VideoCaptureSessionId id,
- scoped_refptr<base::MessageLoopProxy> ml_proxy,
- VideoCaptureMessageFilter* filter)
- : VideoCaptureImpl(id, ml_proxy, filter) {
- }
-
- MOCK_METHOD2(StartCapture,
- void(media::VideoCapture::EventHandler* handler,
- const media::VideoCaptureCapability& capability));
- MOCK_METHOD1(StopCapture, void(media::VideoCapture::EventHandler* handler));
- MOCK_METHOD1(FeedBuffer, void(scoped_refptr<VideoFrameBuffer> buffer));
-
- private:
- DISALLOW_COPY_AND_ASSIGN(MockVideoCaptureImpl);
-};
-
-class MockVideoCaptureImplManager : public VideoCaptureImplManager {
- public:
- MockVideoCaptureImplManager() {}
-
- MOCK_METHOD2(AddDevice,
- media::VideoCapture*(media::VideoCaptureSessionId id,
- media::VideoCapture::EventHandler* handler));
- MOCK_METHOD2(RemoveDevice,
- void(media::VideoCaptureSessionId id,
- media::VideoCapture::EventHandler* handler));
-
- protected:
- virtual ~MockVideoCaptureImplManager() {}
-
- private:
- DISALLOW_COPY_AND_ASSIGN(MockVideoCaptureImplManager);
-};
-
-class CaptureVideoDecoderTest : public ::testing::Test {
- protected:
- CaptureVideoDecoderTest() {
- message_loop_.reset(new MessageLoop(MessageLoop::TYPE_IO));
- message_loop_proxy_ =
- base::MessageLoopProxy::current().get();
- vc_manager_ = new MockVideoCaptureImplManager();
- media::VideoCaptureCapability capability;
- capability.width = kWidth;
- capability.height = kHeight;
- capability.frame_rate = kFPS;
- capability.expected_capture_delay = 0;
- capability.color = media::VideoCaptureCapability::kI420;
- capability.interlaced = false;
-
- decoder_ = new CaptureVideoDecoder(message_loop_proxy_,
- kVideoStreamId, vc_manager_, capability);
- EXPECT_CALL(statistics_cb_object_, OnStatistics(_))
- .Times(AnyNumber());
-
- read_cb_ = base::Bind(&CaptureVideoDecoderTest::FrameReady,
- base::Unretained(this));
-
- child_process_.reset(new ChildProcess());
- vc_impl_.reset(new MockVideoCaptureImpl(
- kVideoStreamId, message_loop_proxy_, new VideoCaptureMessageFilter()));
- }
-
- virtual ~CaptureVideoDecoderTest() {
- message_loop_->RunAllPending();
- }
-
- media::StatisticsCB NewStatisticsCB() {
- return base::Bind(&media::MockStatisticsCB::OnStatistics,
- base::Unretained(&statistics_cb_object_));
- }
-
- void Initialize() {
- EXPECT_CALL(*vc_manager_, AddDevice(_, _))
- .WillOnce(Return(vc_impl_.get()));
- EXPECT_CALL(*vc_impl_, StartCapture(capture_client(), _));
- decoder_->Initialize(NULL,
- media::NewExpectedStatusCB(media::PIPELINE_OK),
- NewStatisticsCB());
-
- EXPECT_CALL(*this, FrameReady(media::VideoDecoder::kOk,
- HasSize(kWidth, kHeight)));
- decoder_->Read(read_cb_);
- SendBufferToDecoder(gfx::Size(kWidth, kHeight));
- message_loop_->RunAllPending();
- }
-
- void Stop() {
- EXPECT_CALL(*vc_impl_, StopCapture(capture_client()))
- .Times(1)
- .WillOnce(CaptureStopped(capture_client(), vc_impl_.get()));
- EXPECT_CALL(*vc_manager_, RemoveDevice(_, _))
- .WillOnce(Return());
- decoder_->Stop(media::NewExpectedClosure());
- message_loop_->RunAllPending();
- }
-
- media::VideoCapture::EventHandler* capture_client() {
- return static_cast<media::VideoCapture::EventHandler*>(decoder_);
- }
-
- void SendBufferToDecoder(const gfx::Size& size) {
- scoped_refptr<media::VideoCapture::VideoFrameBuffer> buffer =
- new media::VideoCapture::VideoFrameBuffer();
- buffer->width = size.width();
- buffer->height = size.height();
- int length = buffer->width * buffer->height * 3 / 2;
- buffer->memory_pointer = new uint8[length];
- buffer->buffer_size = length;
-
- EXPECT_CALL(*vc_impl_, FeedBuffer(_))
- .WillOnce(DeleteDataBuffer());
- decoder_->OnBufferReady(vc_impl_.get(), buffer);
- }
-
- MOCK_METHOD2(FrameReady, void(media::VideoDecoder::Status status,
- const scoped_refptr<media::VideoFrame>&));
-
- // Fixture members.
- scoped_refptr<CaptureVideoDecoder> decoder_;
- scoped_refptr<MockVideoCaptureImplManager> vc_manager_;
- scoped_ptr<ChildProcess> child_process_;
- scoped_ptr<MockVideoCaptureImpl> vc_impl_;
- media::MockStatisticsCB statistics_cb_object_;
- scoped_ptr<MessageLoop> message_loop_;
- scoped_refptr<base::MessageLoopProxy> message_loop_proxy_;
- media::VideoDecoder::ReadCB read_cb_;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(CaptureVideoDecoderTest);
-};
-
-TEST_F(CaptureVideoDecoderTest, ReadAndReset) {
- // Test basic initialize and teardown sequence.
- Initialize();
- EXPECT_CALL(*this, FrameReady(media::VideoDecoder::kOk,
- HasSize(kWidth, kHeight)));
- decoder_->Read(read_cb_);
- decoder_->Reset(media::NewExpectedClosure());
- message_loop_->RunAllPending();
-
- Stop();
-}
-
-TEST_F(CaptureVideoDecoderTest, OnDeviceInfoReceived) {
- // Test that natural size gets updated as device information is sent.
- Initialize();
-
- gfx::Size expected_size(kWidth * 2, kHeight * 2);
-
- media::VideoCaptureParams params;
- params.width = expected_size.width();
- params.height = expected_size.height();
- params.frame_per_second = kFPS;
- params.session_id = kVideoStreamId;
-
- decoder_->OnDeviceInfoReceived(vc_impl_.get(), params);
-
- EXPECT_CALL(*this, FrameReady(media::VideoDecoder::kOk,
- HasSize(expected_size.width(),
- expected_size.height())));
- decoder_->Read(read_cb_);
- SendBufferToDecoder(expected_size);
- message_loop_->RunAllPending();
-
- Stop();
-}
-
-TEST_F(CaptureVideoDecoderTest, ReadAndShutdown) {
- // Test all the Read requests can be fullfilled (which is needed in order to
- // teardown the pipeline) even when there's no input frame.
- Initialize();
-
- EXPECT_CALL(*this, FrameReady(media::VideoDecoder::kOk, HasSize(0, 0)));
- decoder_->Read(read_cb_);
- Stop();
-
- // Any read after stopping should be immediately satisfied.
- EXPECT_CALL(*this, FrameReady(media::VideoDecoder::kOk, HasSize(0, 0)));
- decoder_->Read(read_cb_);
- message_loop_->RunAllPending();
-}
-
-} // namespace content
diff --git a/content/renderer/media/media_stream_dependency_factory.cc b/content/renderer/media/media_stream_dependency_factory.cc
index 29c1e02..6c96146 100644
--- a/content/renderer/media/media_stream_dependency_factory.cc
+++ b/content/renderer/media/media_stream_dependency_factory.cc
@@ -10,6 +10,7 @@
#include "base/utf_string_conversions.h"
#include "content/renderer/media/media_stream_source_extra_data.h"
#include "content/renderer/media/peer_connection_handler_jsep.h"
+#include "content/renderer/media/rtc_media_constraints.h"
#include "content/renderer/media/rtc_peer_connection_handler.h"
#include "content/renderer/media/rtc_video_capturer.h"
#include "content/renderer/media/video_capture_impl_manager.h"
@@ -28,7 +29,7 @@
#include "net/socket/nss_ssl_util.h"
#endif
-namespace content{
+namespace content {
class P2PPortAllocatorFactory : public webrtc::PortAllocatorFactoryInterface {
public:
@@ -79,6 +80,82 @@ class P2PPortAllocatorFactory : public webrtc::PortAllocatorFactoryInterface {
WebKit::WebFrame* web_frame_;
};
+// SourceStateObserver is a help class used for observing the startup state
+// transition of webrtc media sources such as a camera or microphone.
+// An instance of the object deletes itself after use.
+// Usage:
+// 1. Create an instance of the object with the WebKit::WebMediaStreamDescriptor
+// the observed sources belongs to a callback.
+// 2. Add the sources to the observer using AddSource.
+// 3. Call StartObserving()
+// 4. The callback will be triggered when all sources have transitioned from
+// webrtc::MediaSourceInterface::kInitializing.
+class SourceStateObserver : public webrtc::ObserverInterface,
+ public base::NonThreadSafe {
+ public:
+ SourceStateObserver(
+ WebKit::WebMediaStreamDescriptor* description,
+ const MediaStreamDependencyFactory::MediaSourcesCreatedCallback& callback)
+ : description_(description),
+ ready_callback_(callback),
+ live_(true) {
+ }
+
+ void AddSource(webrtc::MediaSourceInterface* source) {
+ DCHECK(CalledOnValidThread());
+ switch (source->state()) {
+ case webrtc::MediaSourceInterface::kInitializing:
+ sources_.push_back(source);
+ source->RegisterObserver(this);
+ break;
+ case webrtc::MediaSourceInterface::kLive:
+ // The source is already live so we don't need to wait for it.
+ break;
+ case webrtc::MediaSourceInterface::kEnded:
+ // The source have already failed.
+ live_ = false;
+ break;
+ default:
+ NOTREACHED();
+ }
+ }
+
+ void StartObservering() {
+ DCHECK(CalledOnValidThread());
+ CheckIfSourcesAreLive();
+ }
+
+ virtual void OnChanged() {
+ DCHECK(CalledOnValidThread());
+ CheckIfSourcesAreLive();
+ }
+
+ private:
+ void CheckIfSourcesAreLive() {
+ ObservedSources::iterator it = sources_.begin();
+ while (it != sources_.end()) {
+ if ((*it)->state() != webrtc::MediaSourceInterface::kInitializing) {
+ live_ &= (*it)->state() == webrtc::MediaSourceInterface::kLive;
+ (*it)->UnregisterObserver(this);
+ it = sources_.erase(it);
+ } else {
+ ++it;
+ }
+ }
+ if (sources_.empty()) {
+ ready_callback_.Run(description_, live_);
+ delete this;
+ }
+ }
+
+ WebKit::WebMediaStreamDescriptor* description_;
+ MediaStreamDependencyFactory::MediaSourcesCreatedCallback ready_callback_;
+ bool live_;
+ typedef std::vector<scoped_refptr<webrtc::MediaSourceInterface> >
+ ObservedSources;
+ ObservedSources sources_;
+};
+
MediaStreamDependencyFactory::MediaStreamDependencyFactory(
VideoCaptureImplManager* vc_manager,
P2PSocketDispatcher* p2p_socket_dispatcher)
@@ -122,16 +199,50 @@ MediaStreamDependencyFactory::CreateRTCPeerConnectionHandler(
return new RTCPeerConnectionHandler(client, this);
}
-bool MediaStreamDependencyFactory::CreateNativeLocalMediaStream(
+void MediaStreamDependencyFactory::CreateNativeMediaSources(
+ const WebKit::WebMediaConstraints& audio_constraints,
+ const WebKit::WebMediaConstraints& video_constraints,
+ WebKit::WebMediaStreamDescriptor* description,
+ const MediaSourcesCreatedCallback& sources_created) {
+ if (!EnsurePeerConnectionFactory()) {
+ sources_created.Run(description, false);
+ return;
+ }
+
+ // |source_observer| clean up itself when it has completed
+ // source_observer->StartObservering.
+ SourceStateObserver* source_observer =
+ new SourceStateObserver(description, sources_created);
+
+ // TODO(perkj): Implement local audio sources.
+
+ // Create local video sources.
+ RTCMediaConstraints native_video_constraints(video_constraints);
+ WebKit::WebVector<WebKit::WebMediaStreamComponent> video_components;
+ description->videoSources(video_components);
+ for (size_t i = 0; i < video_components.size(); ++i) {
+ const WebKit::WebMediaStreamSource& source = video_components[i].source();
+ MediaStreamSourceExtraData* source_data =
+ static_cast<MediaStreamSourceExtraData*>(source.extraData());
+ if (!source_data) {
+ // TODO(perkj): Implement support for sources from remote MediaStreams.
+ NOTIMPLEMENTED();
+ continue;
+ }
+ const bool is_screencast = (source_data->device_info().stream_type ==
+ content::MEDIA_TAB_VIDEO_CAPTURE);
+ source_data->SetVideoSource(
+ CreateVideoSource(source_data->device_info().session_id,
+ is_screencast,
+ &native_video_constraints));
+ source_observer->AddSource(source_data->video_source());
+ }
+ source_observer->StartObservering();
+}
+
+void MediaStreamDependencyFactory::CreateNativeLocalMediaStream(
WebKit::WebMediaStreamDescriptor* description) {
- // Creating the peer connection factory can fail if for example the audio
- // (input or output) or video device cannot be opened. Handling such cases
- // better is a higher level design discussion which involves the media
- // manager, webrtc and libjingle. We cannot create any native
- // track objects however, so we'll just have to skip that. Furthermore,
- // creating a peer connection later on will fail if we don't have a factory.
- if (!EnsurePeerConnectionFactory())
- return false;
+ DCHECK(PeerConnectionFactoryCreated());
std::string label = UTF16ToUTF8(description->label());
scoped_refptr<webrtc::LocalMediaStreamInterface> native_stream =
@@ -169,35 +280,32 @@ bool MediaStreamDependencyFactory::CreateNativeLocalMediaStream(
const WebKit::WebMediaStreamSource& source = video_components[i].source();
MediaStreamSourceExtraData* source_data =
static_cast<MediaStreamSourceExtraData*>(source.extraData());
- if (!source_data) {
+ if (!source_data || !source_data->video_source()) {
// TODO(perkj): Implement support for sources from remote MediaStreams.
NOTIMPLEMENTED();
continue;
}
- const bool is_screencast = (source_data->device_info().stream_type ==
- MEDIA_TAB_VIDEO_CAPTURE);
- scoped_refptr<webrtc::LocalVideoTrackInterface> video_track(
+
+ scoped_refptr<webrtc::VideoTrackInterface> video_track(
CreateLocalVideoTrack(UTF16ToUTF8(source.id()),
- source_data->device_info().session_id,
- is_screencast));
+ source_data->video_source()));
+
native_stream->AddTrack(video_track);
video_track->set_enabled(video_components[i].isEnabled());
}
- description->setExtraData(new MediaStreamExtraData(native_stream));
- return true;
+ MediaStreamExtraData* extra_data = new MediaStreamExtraData(native_stream);
+ description->setExtraData(extra_data);
}
-bool MediaStreamDependencyFactory::CreateNativeLocalMediaStream(
+void MediaStreamDependencyFactory::CreateNativeLocalMediaStream(
WebKit::WebMediaStreamDescriptor* description,
const MediaStreamExtraData::StreamStopCallback& stream_stop) {
- if (!CreateNativeLocalMediaStream(description))
- return false;
+ CreateNativeLocalMediaStream(description);
MediaStreamExtraData* extra_data =
- static_cast<MediaStreamExtraData*>(description->extraData());
+ static_cast<MediaStreamExtraData*>(description->extraData());
extra_data->SetLocalStreamStopCallback(stream_stop);
- return true;
}
bool MediaStreamDependencyFactory::CreatePeerConnectionFactory() {
@@ -255,16 +363,25 @@ MediaStreamDependencyFactory::CreateLocalMediaStream(
return pc_factory_->CreateLocalMediaStream(label).get();
}
-scoped_refptr<webrtc::LocalVideoTrackInterface>
-MediaStreamDependencyFactory::CreateLocalVideoTrack(
- const std::string& label,
+scoped_refptr<webrtc::VideoSourceInterface>
+MediaStreamDependencyFactory::CreateVideoSource(
int video_session_id,
- bool is_screencast) {
+ bool is_screencast,
+ const webrtc::MediaConstraintsInterface* constraints) {
RtcVideoCapturer* capturer = new RtcVideoCapturer(
video_session_id, vc_manager_.get(), is_screencast);
- // The video track takes ownership of |capturer|.
- return pc_factory_->CreateLocalVideoTrack(label, capturer).get();
+ // The video source takes ownership of |capturer|.
+ scoped_refptr<webrtc::VideoSourceInterface> source =
+ pc_factory_->CreateVideoSource(capturer, constraints).get();
+ return source;
+}
+
+scoped_refptr<webrtc::VideoTrackInterface>
+MediaStreamDependencyFactory::CreateLocalVideoTrack(
+ const std::string& label,
+ webrtc::VideoSourceInterface* source) {
+ return pc_factory_->CreateVideoTrack(label, source).get();
}
scoped_refptr<webrtc::LocalAudioTrackInterface>
diff --git a/content/renderer/media/media_stream_dependency_factory.h b/content/renderer/media/media_stream_dependency_factory.h
index 8b359da..8972e25 100644
--- a/content/renderer/media/media_stream_dependency_factory.h
+++ b/content/renderer/media/media_stream_dependency_factory.h
@@ -14,6 +14,7 @@
#include "content/renderer/media/media_stream_extra_data.h"
#include "content/renderer/p2p/socket_dispatcher.h"
#include "third_party/libjingle/source/talk/app/webrtc/peerconnectioninterface.h"
+#include "third_party/libjingle/source/talk/app/webrtc/videosourceinterface.h"
namespace base {
class WaitableEvent;
@@ -31,6 +32,7 @@ class PeerConnection;
namespace WebKit {
class WebFrame;
+class WebMediaConstraints;
class WebMediaStreamDescriptor;
class WebPeerConnection00Handler;
class WebPeerConnection00HandlerClient;
@@ -49,6 +51,9 @@ class WebRtcAudioDeviceImpl;
class CONTENT_EXPORT MediaStreamDependencyFactory
: NON_EXPORTED_BASE(public base::NonThreadSafe) {
public:
+ // MediaSourcesCreatedCallback is used in CreateNativeMediaSources.
+ typedef base::Callback<void(WebKit::WebMediaStreamDescriptor* description,
+ bool live)> MediaSourcesCreatedCallback;
MediaStreamDependencyFactory(
VideoCaptureImplManager* vc_manager,
P2PSocketDispatcher* p2p_socket_dispatcher);
@@ -64,16 +69,29 @@ class CONTENT_EXPORT MediaStreamDependencyFactory
WebKit::WebRTCPeerConnectionHandler* CreateRTCPeerConnectionHandler(
WebKit::WebRTCPeerConnectionHandlerClient* client);
+ // CreateNativeMediaSources creates libjingle representations of
+ // the underlying sources to the tracks in |description|.
+ // |sources_created| is invoked when the sources have either been created and
+ // transitioned to a live state or failed.
+ // The libjingle sources is stored in the extra data field of
+ // WebMediaStreamSource.
+ // |audio_constraints| and |video_constraints| set parameters for the sources.
+ void CreateNativeMediaSources(
+ const WebKit::WebMediaConstraints& audio_constraints,
+ const WebKit::WebMediaConstraints& video_constraints,
+ WebKit::WebMediaStreamDescriptor* description,
+ const MediaSourcesCreatedCallback& sources_created);
+
// Creates a libjingle representation of a MediaStream and stores
// it in the extra data field of |description|.
- bool CreateNativeLocalMediaStream(
+ void CreateNativeLocalMediaStream(
WebKit::WebMediaStreamDescriptor* description);
// Creates a libjingle representation of a MediaStream and stores
// it in the extra data field of |description|.
- // |stream_stopped| a is callback that is run when a MediaStream have been
+ // |stream_stopped| is a callback that is run when a MediaStream have been
// stopped.
- bool CreateNativeLocalMediaStream(
+ void CreateNativeLocalMediaStream(
WebKit::WebMediaStreamDescriptor* description,
const MediaStreamExtraData::StreamStopCallback& stream_stop);
@@ -112,11 +130,16 @@ class CONTENT_EXPORT MediaStreamDependencyFactory
virtual scoped_refptr<webrtc::LocalMediaStreamInterface>
CreateLocalMediaStream(const std::string& label);
+ // Asks the PeerConnection factory to create a Local Video Source.
+ virtual scoped_refptr<webrtc::VideoSourceInterface>
+ CreateVideoSource(int video_session_id,
+ bool is_screen_cast,
+ const webrtc::MediaConstraintsInterface* constraints);
+
// Asks the PeerConnection factory to create a Local VideoTrack object.
- virtual scoped_refptr<webrtc::LocalVideoTrackInterface>
+ virtual scoped_refptr<webrtc::VideoTrackInterface>
CreateLocalVideoTrack(const std::string& label,
- int video_session_id,
- bool is_screencast);
+ webrtc::VideoSourceInterface* source);
// Asks the PeerConnection factory to create a Local AudioTrack object.
virtual scoped_refptr<webrtc::LocalAudioTrackInterface>
@@ -124,13 +147,13 @@ class CONTENT_EXPORT MediaStreamDependencyFactory
webrtc::AudioDeviceModule* audio_device);
virtual bool EnsurePeerConnectionFactory();
+ virtual bool PeerConnectionFactoryCreated();
virtual void SetAudioDeviceSessionId(int session_id);
private:
// Creates and deletes |pc_factory_|, which in turn is used for
// creating PeerConnection objects.
bool CreatePeerConnectionFactory();
- bool PeerConnectionFactoryCreated();
void InitializeWorkerThread(talk_base::Thread** thread,
base::WaitableEvent* event);
diff --git a/content/renderer/media/media_stream_dependency_factory_unittest.cc b/content/renderer/media/media_stream_dependency_factory_unittest.cc
index 9eda682..36da571 100644
--- a/content/renderer/media/media_stream_dependency_factory_unittest.cc
+++ b/content/renderer/media/media_stream_dependency_factory_unittest.cc
@@ -10,6 +10,8 @@
#include "content/renderer/media/mock_web_peer_connection_00_handler_client.h"
#include "content/renderer/media/mock_web_rtc_peer_connection_handler_client.h"
#include "testing/gtest/include/gtest/gtest.h"
+#include "third_party/libjingle/source/talk/app/webrtc/videosourceinterface.h"
+#include "third_party/WebKit/Source/Platform/chromium/public/WebMediaConstraints.h"
#include "third_party/WebKit/Source/Platform/chromium/public/WebMediaStreamComponent.h"
#include "third_party/WebKit/Source/Platform/chromium/public/WebMediaStreamDescriptor.h"
#include "third_party/WebKit/Source/Platform/chromium/public/WebMediaStreamSource.h"
@@ -20,6 +22,30 @@
namespace content {
+class MediaSourceCreatedObserver {
+ public:
+ MediaSourceCreatedObserver()
+ : result_(false),
+ description_(NULL) {
+ }
+
+ void OnCreateNativeSourcesComplete(
+ WebKit::WebMediaStreamDescriptor* description,
+ bool request_succeeded) {
+ result_ = request_succeeded;
+ description_ = description;
+ }
+
+ WebKit::WebMediaStreamDescriptor* description() const {
+ return description_;
+ }
+ bool result() const { return result_; }
+
+ private:
+ bool result_;
+ WebKit::WebMediaStreamDescriptor* description_;
+};
+
class MediaStreamDependencyFactoryTest : public ::testing::Test {
public:
void SetUp() {
@@ -61,6 +87,26 @@ class MediaStreamDependencyFactoryTest : public ::testing::Test {
return stream_desc;
}
+ void CreateNativeSources(WebKit::WebMediaStreamDescriptor* descriptor) {
+ MediaSourceCreatedObserver observer;
+ dependency_factory_->CreateNativeMediaSources(
+ WebKit::WebMediaConstraints(),
+ WebKit::WebMediaConstraints(),
+ descriptor,
+ base::Bind(
+ &MediaSourceCreatedObserver::OnCreateNativeSourcesComplete,
+ base::Unretained(&observer)));
+
+ EXPECT_FALSE(observer.result());
+ // Change the state of the created source to live. This should trigger
+ // MediaSourceCreatedObserver::OnCreateNativeSourcesComplete
+ if (dependency_factory_->last_video_source()) {
+ dependency_factory_->last_video_source()->SetLive();
+ }
+ EXPECT_TRUE(observer.result());
+ EXPECT_TRUE(observer.description() == descriptor);
+ }
+
protected:
scoped_ptr<MockMediaStreamDependencyFactory> dependency_factory_;
};
@@ -82,7 +128,9 @@ TEST_F(MediaStreamDependencyFactoryTest, CreateRTCPeerConnectionHandler) {
TEST_F(MediaStreamDependencyFactoryTest, CreateNativeMediaStream) {
WebKit::WebMediaStreamDescriptor stream_desc = CreateWebKitMediaStream(true,
true);
- EXPECT_TRUE(dependency_factory_->CreateNativeLocalMediaStream(&stream_desc));
+ CreateNativeSources(&stream_desc);
+
+ dependency_factory_->CreateNativeLocalMediaStream(&stream_desc);
content::MediaStreamExtraData* extra_data =
static_cast<content::MediaStreamExtraData*>(stream_desc.extraData());
@@ -109,9 +157,10 @@ TEST_F(MediaStreamDependencyFactoryTest, CreateNativeMediaStreamWithoutSource) {
"something");
stream_desc.initialize("new stream", audio_sources, video_sources);
- EXPECT_TRUE(dependency_factory_->CreateNativeLocalMediaStream(&stream_desc));
- content::MediaStreamExtraData* extra_data =
- static_cast<content::MediaStreamExtraData*>(stream_desc.extraData());
+ EXPECT_TRUE(dependency_factory_->EnsurePeerConnectionFactory());
+ dependency_factory_->CreateNativeLocalMediaStream(&stream_desc);
+ MediaStreamExtraData* extra_data = static_cast<MediaStreamExtraData*>(
+ stream_desc.extraData());
ASSERT_TRUE(extra_data && extra_data->local_stream());
EXPECT_EQ(0u, extra_data->local_stream()->video_tracks()->count());
EXPECT_EQ(0u, extra_data->local_stream()->audio_tracks()->count());
diff --git a/content/renderer/media/media_stream_impl.cc b/content/renderer/media/media_stream_impl.cc
index 95ad1a5..89524b3 100644
--- a/content/renderer/media/media_stream_impl.cc
+++ b/content/renderer/media/media_stream_impl.cc
@@ -10,8 +10,6 @@
#include "base/string_number_conversions.h"
#include "base/stringprintf.h"
#include "base/utf_string_conversions.h"
-#include "content/renderer/media/capture_video_decoder.h"
-#include "content/renderer/media/local_video_capture.h"
#include "content/renderer/media/media_stream_extra_data.h"
#include "content/renderer/media/media_stream_source_extra_data.h"
#include "content/renderer/media/media_stream_dependency_factory.h"
@@ -27,17 +25,11 @@
#include "third_party/WebKit/Source/WebKit/chromium/public/WebMediaStreamRegistry.h"
#include "third_party/WebKit/Source/WebKit/chromium/public/WebSecurityOrigin.h"
#include "third_party/WebKit/Source/WebKit/chromium/public/platform/WebMediaStreamComponent.h"
-#include "third_party/WebKit/Source/WebKit/chromium/public/platform/WebMediaStreamDescriptor.h"
#include "third_party/WebKit/Source/WebKit/chromium/public/platform/WebMediaStreamSource.h"
#include "third_party/WebKit/Source/WebKit/chromium/public/platform/WebVector.h"
-namespace content {
namespace {
-const int kVideoCaptureWidth = 640;
-const int kVideoCaptureHeight = 480;
-const int kVideoCaptureFramePerSecond = 30;
-
std::string GetMandatoryStreamConstraint(
const WebKit::WebMediaConstraints& constraints, const std::string& key) {
if (constraints.isNull())
@@ -51,34 +43,32 @@ std::string GetMandatoryStreamConstraint(
void UpdateOptionsIfTabMediaRequest(
const WebKit::WebUserMediaRequest& user_media_request,
media_stream::StreamOptions* options) {
- if (options->audio_type != MEDIA_NO_SERVICE &&
+ if (options->audio_type != content::MEDIA_NO_SERVICE &&
GetMandatoryStreamConstraint(user_media_request.audioConstraints(),
media_stream::kMediaStreamSource) ==
media_stream::kMediaStreamSourceTab) {
- options->audio_type = MEDIA_TAB_AUDIO_CAPTURE;
+ options->audio_type = content::MEDIA_TAB_AUDIO_CAPTURE;
options->audio_device_id = GetMandatoryStreamConstraint(
user_media_request.audioConstraints(),
media_stream::kMediaStreamSourceId);
}
- if (options->video_type != MEDIA_NO_SERVICE &&
+ if (options->video_type != content::MEDIA_NO_SERVICE &&
GetMandatoryStreamConstraint(user_media_request.videoConstraints(),
media_stream::kMediaStreamSource) ==
media_stream::kMediaStreamSourceTab) {
- options->video_type = MEDIA_TAB_VIDEO_CAPTURE;
+ options->video_type = content::MEDIA_TAB_VIDEO_CAPTURE;
options->video_device_id = GetMandatoryStreamConstraint(
user_media_request.videoConstraints(),
media_stream::kMediaStreamSourceId);
}
}
-} // namespace
-
static int g_next_request_id = 0;
// Creates a WebKit representation of a stream sources based on
// |devices| from the MediaStreamDispatcher.
-static void CreateWebKitSourceVector(
+void CreateWebKitSourceVector(
const std::string& label,
const media_stream::StreamDeviceInfoArray& devices,
WebKit::WebMediaStreamSource::Type type,
@@ -92,10 +82,26 @@ static void CreateWebKitSourceVector(
type,
UTF8ToUTF16(devices[i].name));
webkit_sources[i].setExtraData(
- new MediaStreamSourceExtraData(devices[i]));
+ new content::MediaStreamSourceExtraData(devices[i]));
}
}
+webrtc::MediaStreamInterface* GetNativeMediaStream(
+ const WebKit::WebMediaStreamDescriptor& descriptor) {
+ content::MediaStreamExtraData* extra_data =
+ static_cast<content::MediaStreamExtraData*>(descriptor.extraData());
+ if (!extra_data)
+ return NULL;
+ webrtc::MediaStreamInterface* stream = extra_data->local_stream();
+ if (!stream)
+ stream = extra_data->remote_stream();
+ return stream;
+}
+
+} // namespace
+
+namespace content {
+
MediaStreamImpl::MediaStreamImpl(
RenderView* render_view,
MediaStreamDispatcher* media_stream_dispatcher,
@@ -155,13 +161,13 @@ void MediaStreamImpl::requestUserMedia(
UpdateOptionsIfTabMediaRequest(user_media_request, &options);
}
- DVLOG(1) << "MediaStreamImpl::generateStream(" << request_id << ", [ "
+ DVLOG(1) << "MediaStreamImpl::requestUserMedia(" << request_id << ", [ "
<< "audio=" << (options.audio_type)
<< ", video=" << (options.video_type) << " ], "
<< security_origin.spec() << ")";
- user_media_requests_[request_id] =
- UserMediaRequestInfo(frame, user_media_request);
+ user_media_requests_.push_back(
+ new UserMediaRequestInfo(request_id, frame, user_media_request));
media_stream_dispatcher_->GenerateStream(
request_id,
@@ -173,13 +179,15 @@ void MediaStreamImpl::requestUserMedia(
void MediaStreamImpl::cancelUserMediaRequest(
const WebKit::WebUserMediaRequest& user_media_request) {
DCHECK(CalledOnValidThread());
- MediaRequestMap::iterator it = user_media_requests_.begin();
+ UserMediaRequests::iterator it = user_media_requests_.begin();
for (; it != user_media_requests_.end(); ++it) {
- if (it->second.request_ == user_media_request)
+ if ((*it)->request == user_media_request)
break;
}
if (it != user_media_requests_.end()) {
- media_stream_dispatcher_->CancelGenerateStream(it->first);
+ // We can't abort the stream generation process.
+ // Instead, erase the request. Once the stream is generated we will stop the
+ // stream if the request does not exist.
user_media_requests_.erase(it);
}
}
@@ -201,12 +209,7 @@ bool MediaStreamImpl::CheckMediaStream(const GURL& url) {
if (descriptor.isNull() || !descriptor.extraData())
return false; // This is not a valid stream.
- MediaStreamExtraData* extra_data =
- static_cast<MediaStreamExtraData*>(descriptor.extraData());
- webrtc::MediaStreamInterface* stream = extra_data->local_stream();
- if (stream && stream->video_tracks() && stream->video_tracks()->count() > 0)
- return true;
- stream = extra_data->remote_stream();
+ webrtc::MediaStreamInterface* stream = GetNativeMediaStream(descriptor);
if (stream && stream->video_tracks() && stream->video_tracks()->count() > 0)
return true;
return false;
@@ -226,14 +229,9 @@ MediaStreamImpl::GetVideoFrameProvider(
DVLOG(1) << "MediaStreamImpl::GetVideoFrameProvider stream:"
<< UTF16ToUTF8(descriptor.label());
- MediaStreamExtraData* extra_data =
- static_cast<MediaStreamExtraData*>(descriptor.extraData());
- if (extra_data->local_stream())
- return CreateLocalVideoFrameProvider(extra_data->local_stream(),
- error_cb, repaint_cb);
- if (extra_data->remote_stream())
- return CreateRemoteVideoFrameProvider(extra_data->remote_stream(),
- error_cb, repaint_cb);
+ webrtc::MediaStreamInterface* stream = GetNativeMediaStream(descriptor);
+ if (stream)
+ return CreateVideoFrameProvider(stream, error_cb, repaint_cb);
NOTREACHED();
return NULL;
}
@@ -250,14 +248,9 @@ scoped_refptr<media::VideoDecoder> MediaStreamImpl::GetVideoDecoder(
DVLOG(1) << "MediaStreamImpl::GetVideoDecoder stream:"
<< UTF16ToUTF8(descriptor.label());
- MediaStreamExtraData* extra_data =
- static_cast<MediaStreamExtraData*>(descriptor.extraData());
- if (extra_data->local_stream())
- return CreateLocalVideoDecoder(extra_data->local_stream(),
- message_loop_factory);
- if (extra_data->remote_stream())
- return CreateRemoteVideoDecoder(extra_data->remote_stream(),
- message_loop_factory);
+ webrtc::MediaStreamInterface* stream = GetNativeMediaStream(descriptor);
+ if (stream)
+ return CreateVideoDecoder(stream, message_loop_factory);
NOTREACHED();
return NULL;
}
@@ -282,51 +275,73 @@ void MediaStreamImpl::OnStreamGenerated(
WebKit::WebMediaStreamSource::TypeVideo,
video_source_vector);
- MediaRequestMap::iterator it = user_media_requests_.find(request_id);
- if (it == user_media_requests_.end()) {
+ UserMediaRequestInfo* request_info = FindUserMediaRequestInfo(request_id);
+ if (!request_info) {
DVLOG(1) << "Request ID not found";
media_stream_dispatcher_->StopStream(label);
return;
}
+ WebKit::WebUserMediaRequest* request = &(request_info->request);
WebKit::WebString webkit_label = UTF8ToUTF16(label);
- WebKit::WebMediaStreamDescriptor description;
- description.initialize(webkit_label, audio_source_vector,
- video_source_vector);
-
- if (!dependency_factory_->CreateNativeLocalMediaStream(
- &description, base::Bind(
- &MediaStreamImpl::OnLocalMediaStreamStop, base::Unretained(this)))) {
- DVLOG(1) << "Failed to create native stream in OnStreamGenerated.";
- media_stream_dispatcher_->StopStream(label);
- it->second.request_.requestFailed();
- user_media_requests_.erase(it);
+ WebKit::WebMediaStreamDescriptor* description = &(request_info->descriptor);
+
+ description->initialize(webkit_label, audio_source_vector,
+ video_source_vector);
+
+ // Store the frame that requested the stream so it is stopped if the frame is
+ // reloaded.
+ local_media_streams_[label] = request_info->frame;
+
+ // WebUserMediaRequest don't have an implementation in unit tests.
+ // Therefore we need to check for isNull here.
+ WebKit::WebMediaConstraints audio_constraints = request->isNull() ?
+ WebKit::WebMediaConstraints() : request->audioConstraints();
+ WebKit::WebMediaConstraints video_constraints = request->isNull() ?
+ WebKit::WebMediaConstraints() : request->videoConstraints();
+
+ dependency_factory_->CreateNativeMediaSources(
+ audio_constraints, video_constraints, description,
+ base::Bind(&MediaStreamImpl::OnCreateNativeSourcesComplete, AsWeakPtr()));
+}
+
+void MediaStreamImpl::OnCreateNativeSourcesComplete(
+ WebKit::WebMediaStreamDescriptor* description,
+ bool request_succeeded) {
+ UserMediaRequestInfo* request_info = FindUserMediaRequestInfo(description);
+ if (!request_info) {
+ OnLocalMediaStreamStop(UTF16ToUTF8(description->label()));
return;
}
- local_media_streams_[label] = it->second.frame_;
- CompleteGetUserMediaRequest(description, &it->second.request_);
- user_media_requests_.erase(it);
-}
-void MediaStreamImpl::CompleteGetUserMediaRequest(
- const WebKit::WebMediaStreamDescriptor& stream,
- WebKit::WebUserMediaRequest* request) {
- request->requestSucceeded(stream);
+ // Create a native representation of the stream.
+ if (request_succeeded) {
+ dependency_factory_->CreateNativeLocalMediaStream(
+ description,
+ base::Bind(&MediaStreamImpl::OnLocalMediaStreamStop, AsWeakPtr()));
+ } else {
+ OnLocalMediaStreamStop(UTF16ToUTF8(description->label()));
+ }
+
+ CompleteGetUserMediaRequest(request_info->descriptor,
+ &request_info->request,
+ request_succeeded);
+ DeleteUserMediaRequestInfo(request_info);
}
void MediaStreamImpl::OnStreamGenerationFailed(int request_id) {
DCHECK(CalledOnValidThread());
DVLOG(1) << "MediaStreamImpl::OnStreamGenerationFailed("
<< request_id << ")";
- MediaRequestMap::iterator it = user_media_requests_.find(request_id);
- if (it == user_media_requests_.end()) {
+ UserMediaRequestInfo* request_info = FindUserMediaRequestInfo(request_id);
+ if (!request_info) {
DVLOG(1) << "Request ID not found";
return;
}
- WebKit::WebUserMediaRequest user_media_request(it->second.request_);
- user_media_requests_.erase(it);
-
- user_media_request.requestFailed();
+ CompleteGetUserMediaRequest(request_info->descriptor,
+ &request_info->request,
+ false);
+ DeleteUserMediaRequestInfo(request_info);
}
void MediaStreamImpl::OnDevicesEnumerated(
@@ -358,14 +373,57 @@ void MediaStreamImpl::OnDeviceOpenFailed(int request_id) {
NOTIMPLEMENTED();
}
+void MediaStreamImpl::CompleteGetUserMediaRequest(
+ const WebKit::WebMediaStreamDescriptor& stream,
+ WebKit::WebUserMediaRequest* request_info,
+ bool request_succeeded) {
+ if (request_succeeded) {
+ request_info->requestSucceeded(stream);
+ } else {
+ request_info->requestFailed();
+ }
+}
+
+MediaStreamImpl::UserMediaRequestInfo*
+MediaStreamImpl::FindUserMediaRequestInfo(int request_id) {
+ UserMediaRequests::iterator it = user_media_requests_.begin();
+ for (; it != user_media_requests_.end(); ++it) {
+ if ((*it)->request_id == request_id)
+ return (*it);
+ }
+ return NULL;
+}
+
+MediaStreamImpl::UserMediaRequestInfo*
+MediaStreamImpl::FindUserMediaRequestInfo(
+ WebKit::WebMediaStreamDescriptor* descriptor) {
+ UserMediaRequests::iterator it = user_media_requests_.begin();
+ for (; it != user_media_requests_.end(); ++it) {
+ if (&((*it)->descriptor) == descriptor)
+ return (*it);
+ }
+ return NULL;
+}
+
+void MediaStreamImpl::DeleteUserMediaRequestInfo(
+ UserMediaRequestInfo* request) {
+ UserMediaRequests::iterator it = user_media_requests_.begin();
+ for (; it != user_media_requests_.end(); ++it) {
+ if ((*it) == request) {
+ user_media_requests_.erase(it);
+ return;
+ }
+ }
+ NOTREACHED();
+}
+
void MediaStreamImpl::FrameWillClose(WebKit::WebFrame* frame) {
- MediaRequestMap::iterator request_it = user_media_requests_.begin();
+ UserMediaRequests::iterator request_it = user_media_requests_.begin();
while (request_it != user_media_requests_.end()) {
- if (request_it->second.frame_ == frame) {
+ if ((*request_it)->frame == frame) {
DVLOG(1) << "MediaStreamImpl::FrameWillClose: "
- << "Cancel user media request " << request_it->first;
- cancelUserMediaRequest(request_it->second.request_);
- request_it = user_media_requests_.begin();
+ << "Cancel user media request " << (*request_it)->request_id;
+ request_it = user_media_requests_.erase(request_it);
} else {
++request_it;
}
@@ -385,36 +443,7 @@ void MediaStreamImpl::FrameWillClose(WebKit::WebFrame* frame) {
}
scoped_refptr<webkit_media::VideoFrameProvider>
-MediaStreamImpl::CreateLocalVideoFrameProvider(
- webrtc::MediaStreamInterface* stream,
- const base::Closure& error_cb,
- const webkit_media::VideoFrameProvider::RepaintCB& repaint_cb) {
- if (!stream->video_tracks() || stream->video_tracks()->count() == 0)
- return NULL;
-
- int video_session_id =
- media_stream_dispatcher_->video_session_id(stream->label(), 0);
- media::VideoCaptureCapability capability;
- capability.width = kVideoCaptureWidth;
- capability.height = kVideoCaptureHeight;
- capability.frame_rate = kVideoCaptureFramePerSecond;
- capability.color = media::VideoCaptureCapability::kI420;
- capability.expected_capture_delay = 0;
- capability.interlaced = false;
-
- DVLOG(1) << "MediaStreamImpl::CreateLocalVideoFrameProvider video_session_id:"
- << video_session_id;
-
- return new LocalVideoCapture(
- video_session_id,
- vc_manager_.get(),
- capability,
- error_cb,
- repaint_cb);
-}
-
-scoped_refptr<webkit_media::VideoFrameProvider>
-MediaStreamImpl::CreateRemoteVideoFrameProvider(
+MediaStreamImpl::CreateVideoFrameProvider(
webrtc::MediaStreamInterface* stream,
const base::Closure& error_cb,
const webkit_media::VideoFrameProvider::RepaintCB& repaint_cb) {
@@ -430,34 +459,7 @@ MediaStreamImpl::CreateRemoteVideoFrameProvider(
repaint_cb);
}
-scoped_refptr<media::VideoDecoder> MediaStreamImpl::CreateLocalVideoDecoder(
- webrtc::MediaStreamInterface* stream,
- media::MessageLoopFactory* message_loop_factory) {
- if (!stream->video_tracks() || stream->video_tracks()->count() == 0)
- return NULL;
-
- int video_session_id =
- media_stream_dispatcher_->video_session_id(stream->label(), 0);
- media::VideoCaptureCapability capability;
- capability.width = kVideoCaptureWidth;
- capability.height = kVideoCaptureHeight;
- capability.frame_rate = kVideoCaptureFramePerSecond;
- capability.color = media::VideoCaptureCapability::kI420;
- capability.expected_capture_delay = 0;
- capability.interlaced = false;
-
- DVLOG(1) << "MediaStreamImpl::CreateLocalVideoDecoder video_session_id:"
- << video_session_id;
-
- return new CaptureVideoDecoder(
- message_loop_factory->GetMessageLoop(
- media::MessageLoopFactory::kDecoder),
- video_session_id,
- vc_manager_.get(),
- capability);
-}
-
-scoped_refptr<media::VideoDecoder> MediaStreamImpl::CreateRemoteVideoDecoder(
+scoped_refptr<media::VideoDecoder> MediaStreamImpl::CreateVideoDecoder(
webrtc::MediaStreamInterface* stream,
media::MessageLoopFactory* message_loop_factory) {
if (!stream->video_tracks() || stream->video_tracks()->count() == 0)
@@ -472,6 +474,13 @@ scoped_refptr<media::VideoDecoder> MediaStreamImpl::CreateRemoteVideoDecoder(
stream->video_tracks()->at(0));
}
+MediaStreamSourceExtraData::MediaStreamSourceExtraData(
+ const media_stream::StreamDeviceInfo& device_info)
+ : device_info_(device_info) {
+}
+
+MediaStreamSourceExtraData::~MediaStreamSourceExtraData() {}
+
MediaStreamExtraData::MediaStreamExtraData(
webrtc::MediaStreamInterface* remote_stream)
: remote_stream_(remote_stream) {
diff --git a/content/renderer/media/media_stream_impl.h b/content/renderer/media/media_stream_impl.h
index 110e23a..ea0dbe4 100644
--- a/content/renderer/media/media_stream_impl.h
+++ b/content/renderer/media/media_stream_impl.h
@@ -5,27 +5,25 @@
#ifndef CONTENT_RENDERER_MEDIA_MEDIA_STREAM_IMPL_H_
#define CONTENT_RENDERER_MEDIA_MEDIA_STREAM_IMPL_H_
-#include <map>
#include <string>
+#include <vector>
#include "base/basictypes.h"
#include "base/compiler_specific.h"
#include "base/memory/ref_counted.h"
#include "base/memory/scoped_ptr.h"
+#include "base/memory/scoped_vector.h"
#include "base/memory/weak_ptr.h"
#include "base/threading/non_thread_safe.h"
#include "content/common/content_export.h"
#include "content/public/renderer/render_view_observer.h"
#include "content/renderer/media/media_stream_dispatcher_eventhandler.h"
-#include "third_party/libjingle/source/talk/app/webrtc/mediastream.h"
+#include "third_party/libjingle/source/talk/app/webrtc/mediastreaminterface.h"
#include "third_party/WebKit/Source/WebKit/chromium/public/WebUserMediaClient.h"
+#include "third_party/WebKit/Source/Platform/chromium/public/WebMediaStreamDescriptor.h"
#include "third_party/WebKit/Source/WebKit/chromium/public/WebUserMediaRequest.h"
#include "webkit/media/media_stream_client.h"
-namespace WebKit {
-class WebMediaStreamDescriptor;
-}
-
namespace content {
class MediaStreamDependencyFactory;
class MediaStreamDispatcher;
@@ -101,48 +99,60 @@ class CONTENT_EXPORT MediaStreamImpl
// Stops a local MediaStream by notifying the MediaStreamDispatcher that the
// stream no longer may be used.
void OnLocalMediaStreamStop(const std::string& label);
+
+ // Callback function triggered when all native (libjingle) versions of the
+ // underlying media sources have been created and started.
+ // |description| is a raw pointer to the description in
+ // UserMediaRequests::description for which the underlying sources have been
+ // created.
+ void OnCreateNativeSourcesComplete(
+ WebKit::WebMediaStreamDescriptor* description,
+ bool request_succeeded);
+
// This function is virtual for test purposes. A test can override this to
// test requesting local media streams. The function notifies WebKit that the
// |request| have completed and generated the MediaStream |stream|.
virtual void CompleteGetUserMediaRequest(
const WebKit::WebMediaStreamDescriptor& stream,
- WebKit::WebUserMediaRequest* request);
- // This function is virtual for test purposes.
+ WebKit::WebUserMediaRequest* request_info,
+ bool request_succeeded);
+
// Returns the WebKit representation of a MediaStream given an URL.
+ // This is virtual for test purposes.
virtual WebKit::WebMediaStreamDescriptor GetMediaStream(const GURL& url);
private:
// Structure for storing information about a WebKit request to create a
// MediaStream.
struct UserMediaRequestInfo {
- UserMediaRequestInfo() : frame_(NULL), request_() {}
- UserMediaRequestInfo(WebKit::WebFrame* frame,
+ UserMediaRequestInfo() : request_id(0), frame(NULL), request() {}
+ UserMediaRequestInfo(int request_id,
+ WebKit::WebFrame* frame,
const WebKit::WebUserMediaRequest& request)
- : frame_(frame), request_(request) {}
- WebKit::WebFrame* frame_; // WebFrame that requested the MediaStream.
- WebKit::WebUserMediaRequest request_;
+ : request_id(request_id), frame(frame), request(request) {}
+ int request_id;
+ WebKit::WebFrame* frame; // WebFrame that requested the MediaStream.
+ WebKit::WebMediaStreamDescriptor descriptor;
+ WebKit::WebUserMediaRequest request;
};
- typedef std::map<int, UserMediaRequestInfo> MediaRequestMap;
+ typedef ScopedVector<UserMediaRequestInfo> UserMediaRequests;
// We keep a list of the label and WebFrame of generated local media streams,
// so that we can stop them when needed.
typedef std::map<std::string, WebKit::WebFrame*> LocalNativeStreamMap;
- typedef scoped_refptr<webrtc::LocalMediaStreamInterface> LocalNativeStreamPtr;
+ typedef scoped_refptr<webrtc::MediaStreamInterface> LocalNativeStreamPtr;
+
+ UserMediaRequestInfo* FindUserMediaRequestInfo(int request_id);
+ UserMediaRequestInfo* FindUserMediaRequestInfo(
+ WebKit::WebMediaStreamDescriptor* descriptor);
+ void DeleteUserMediaRequestInfo(UserMediaRequestInfo* request);
scoped_refptr<webkit_media::VideoFrameProvider>
- CreateLocalVideoFrameProvider(
- webrtc::MediaStreamInterface* stream,
- const base::Closure& error_cb,
- const webkit_media::VideoFrameProvider::RepaintCB& repaint_cb);
- scoped_refptr<webkit_media::VideoFrameProvider>
- CreateRemoteVideoFrameProvider(
+ CreateVideoFrameProvider(
webrtc::MediaStreamInterface* stream,
const base::Closure& error_cb,
const webkit_media::VideoFrameProvider::RepaintCB& repaint_cb);
- scoped_refptr<media::VideoDecoder> CreateLocalVideoDecoder(
- webrtc::MediaStreamInterface* stream,
- media::MessageLoopFactory* message_loop_factory);
- scoped_refptr<media::VideoDecoder> CreateRemoteVideoDecoder(
+ scoped_refptr<media::VideoDecoder> CreateVideoDecoder(
webrtc::MediaStreamInterface* stream,
media::MessageLoopFactory* message_loop_factory);
@@ -156,7 +166,7 @@ class CONTENT_EXPORT MediaStreamImpl
scoped_refptr<VideoCaptureImplManager> vc_manager_;
- MediaRequestMap user_media_requests_;
+ UserMediaRequests user_media_requests_;
LocalNativeStreamMap local_media_streams_;
DISALLOW_COPY_AND_ASSIGN(MediaStreamImpl);
diff --git a/content/renderer/media/media_stream_impl_unittest.cc b/content/renderer/media/media_stream_impl_unittest.cc
index f80f2ed..f65ee22 100644
--- a/content/renderer/media/media_stream_impl_unittest.cc
+++ b/content/renderer/media/media_stream_impl_unittest.cc
@@ -31,16 +31,19 @@ class MediaStreamImplUnderTest : public MediaStreamImpl {
}
virtual void CompleteGetUserMediaRequest(
- const WebKit::WebMediaStreamDescriptor& stream,
- WebKit::WebUserMediaRequest* request) {
+ const WebKit::WebMediaStreamDescriptor& stream,
+ WebKit::WebUserMediaRequest* request_info,
+ bool request_succeeded) OVERRIDE {
last_generated_stream_ = stream;
+ EXPECT_TRUE(request_succeeded);
}
- virtual WebKit::WebMediaStreamDescriptor GetMediaStream(const GURL& url) {
+ const WebKit::WebMediaStreamDescriptor& last_generated_stream() {
return last_generated_stream_;
}
- const WebKit::WebMediaStreamDescriptor& last_generated_stream() {
+ virtual WebKit::WebMediaStreamDescriptor GetMediaStream(
+ const GURL& url) OVERRIDE {
return last_generated_stream_;
}
@@ -65,18 +68,8 @@ class MediaStreamImplTest : public ::testing::Test {
WebKit::WebMediaStreamDescriptor RequestLocalMediaStream(bool audio,
bool video) {
- WebKit::WebUserMediaRequest user_media_request;
- WebKit::WebVector<WebKit::WebMediaStreamSource> audio_sources(
- audio ? static_cast<size_t>(1) : 0);
- WebKit::WebVector<WebKit::WebMediaStreamSource> video_sources(
- video ? static_cast<size_t>(1) : 0);
- ms_impl_->requestUserMedia(user_media_request, audio_sources,
- video_sources);
-
- ms_impl_->OnStreamGenerated(ms_dispatcher_->request_id(),
- ms_dispatcher_->stream_label(),
- ms_dispatcher_->audio_array(),
- ms_dispatcher_->video_array());
+ GenerateSources(audio, video);
+ ChangeSourceStateToLive();
WebKit::WebMediaStreamDescriptor desc = ms_impl_->last_generated_stream();
content::MediaStreamExtraData* extra_data =
@@ -97,6 +90,27 @@ class MediaStreamImplTest : public ::testing::Test {
return desc;
}
+ void GenerateSources(bool audio, bool video) {
+ WebKit::WebUserMediaRequest user_media_request;
+ WebKit::WebVector<WebKit::WebMediaStreamSource> audio_sources(
+ audio ? static_cast<size_t>(1) : 0);
+ WebKit::WebVector<WebKit::WebMediaStreamSource> video_sources(
+ video ? static_cast<size_t>(1) : 0);
+ ms_impl_->requestUserMedia(user_media_request, audio_sources,
+ video_sources);
+
+ ms_impl_->OnStreamGenerated(ms_dispatcher_->request_id(),
+ ms_dispatcher_->stream_label(),
+ ms_dispatcher_->audio_array(),
+ ms_dispatcher_->video_array());
+ }
+
+ void ChangeSourceStateToLive() {
+ if(dependency_factory_->last_video_source() != NULL) {
+ dependency_factory_->last_video_source()->SetLive();
+ }
+ }
+
protected:
scoped_ptr<MockMediaStreamDispatcher> ms_dispatcher_;
scoped_ptr<MediaStreamImplUnderTest> ms_impl_;
@@ -140,4 +154,12 @@ TEST_F(MediaStreamImplTest, LocalMediaStream) {
EXPECT_EQ(3, ms_dispatcher_->stop_stream_counter());
}
+// This test what happens if MediaStreamImpl is deleted while the sources of a
+// MediaStream is being started. This only test that no crash occur.
+TEST_F(MediaStreamImplTest, DependencyFactoryShutDown) {
+ GenerateSources(true, true);
+ ms_impl_.reset();
+ ChangeSourceStateToLive();
+}
+
} // namespace content
diff --git a/content/renderer/media/media_stream_source_extra_data.h b/content/renderer/media/media_stream_source_extra_data.h
index 5ae26dc..cdd8304 100644
--- a/content/renderer/media/media_stream_source_extra_data.h
+++ b/content/renderer/media/media_stream_source_extra_data.h
@@ -8,6 +8,7 @@
#include "base/compiler_specific.h"
#include "content/common/content_export.h"
#include "content/common/media/media_stream_options.h"
+#include "third_party/libjingle/source/talk/app/webrtc/videosourceinterface.h"
#include "third_party/WebKit/Source/Platform/chromium/public/WebMediaStreamSource.h"
namespace content {
@@ -16,17 +17,23 @@ class CONTENT_EXPORT MediaStreamSourceExtraData
: NON_EXPORTED_BASE(public WebKit::WebMediaStreamSource::ExtraData) {
public:
explicit MediaStreamSourceExtraData(
- const media_stream::StreamDeviceInfo& device_info)
- : device_info_(device_info) {
- }
+ const media_stream::StreamDeviceInfo& device_info);
+ virtual ~MediaStreamSourceExtraData();
// Return device information about the camera or microphone.
const media_stream::StreamDeviceInfo& device_info() const {
return device_info_;
}
+ void SetVideoSource(webrtc::VideoSourceInterface* source) {
+ video_source_ = source;
+ }
+
+ webrtc::VideoSourceInterface* video_source() { return video_source_; }
+
private:
media_stream::StreamDeviceInfo device_info_;
+ scoped_refptr<webrtc::VideoSourceInterface> video_source_;
DISALLOW_COPY_AND_ASSIGN(MediaStreamSourceExtraData);
};
diff --git a/content/renderer/media/mock_media_stream_dependency_factory.cc b/content/renderer/media/mock_media_stream_dependency_factory.cc
index ffbabc3..5b33802 100644
--- a/content/renderer/media/mock_media_stream_dependency_factory.cc
+++ b/content/renderer/media/mock_media_stream_dependency_factory.cc
@@ -89,6 +89,53 @@ class MockLocalMediaStream : public webrtc::LocalMediaStreamInterface {
scoped_refptr<MockVideoTracks> video_tracks_;
};
+MockVideoSource::MockVideoSource()
+ : observer_(NULL),
+ state_(MediaSourceInterface::kInitializing) {
+}
+
+MockVideoSource::~MockVideoSource() {}
+
+cricket::VideoCapturer* MockVideoSource::GetVideoCapturer() {
+ NOTIMPLEMENTED();
+ return NULL;
+}
+
+void MockVideoSource::AddSink(cricket::VideoRenderer* output) {
+ NOTIMPLEMENTED();
+}
+void MockVideoSource::RemoveSink(cricket::VideoRenderer* output) {
+ NOTIMPLEMENTED();
+}
+
+void MockVideoSource::RegisterObserver(webrtc::ObserverInterface* observer) {
+ observer_ = observer;
+}
+
+void MockVideoSource::UnregisterObserver(webrtc::ObserverInterface* observer) {
+ DCHECK(observer_ == observer);
+ observer_ = NULL;
+}
+
+void MockVideoSource::SetLive() {
+ state_ = MediaSourceInterface::kLive;
+ if (observer_)
+ observer_->OnChanged();
+}
+
+webrtc::MediaSourceInterface::SourceState MockVideoSource::state() const {
+ return state_;
+}
+
+MockLocalVideoTrack::MockLocalVideoTrack(std::string label,
+ webrtc::VideoSourceInterface* source)
+ : enabled_(false),
+ label_(label),
+ source_(source) {
+}
+
+MockLocalVideoTrack::~MockLocalVideoTrack() {}
+
void MockLocalVideoTrack::AddRenderer(VideoRendererInterface* renderer) {
NOTIMPLEMENTED();
}
@@ -135,8 +182,7 @@ void MockLocalVideoTrack::UnregisterObserver(ObserverInterface* observer) {
}
VideoSourceInterface* MockLocalVideoTrack::GetSource() const {
- NOTIMPLEMENTED();
- return NULL;
+ return source_;
}
std::string MockLocalAudioTrack::kind() const {
@@ -273,6 +319,10 @@ bool MockMediaStreamDependencyFactory::EnsurePeerConnectionFactory() {
return true;
}
+bool MockMediaStreamDependencyFactory::PeerConnectionFactoryCreated() {
+ return mock_pc_factory_created_;
+}
+
scoped_refptr<webrtc::PeerConnectionInterface>
MockMediaStreamDependencyFactory::CreatePeerConnection(
const std::string& config,
@@ -291,6 +341,15 @@ MockMediaStreamDependencyFactory::CreatePeerConnection(
return new talk_base::RefCountedObject<MockPeerConnectionImpl>(this);
}
+scoped_refptr<webrtc::VideoSourceInterface>
+MockMediaStreamDependencyFactory::CreateVideoSource(
+ int video_session_id,
+ bool is_screencast,
+ const webrtc::MediaConstraintsInterface* constraints) {
+ last_video_source_ = new talk_base::RefCountedObject<MockVideoSource>();
+ return last_video_source_;
+}
+
scoped_refptr<webrtc::LocalMediaStreamInterface>
MockMediaStreamDependencyFactory::CreateLocalMediaStream(
const std::string& label) {
@@ -298,14 +357,14 @@ MockMediaStreamDependencyFactory::CreateLocalMediaStream(
return new talk_base::RefCountedObject<MockLocalMediaStream>(label);
}
-scoped_refptr<webrtc::LocalVideoTrackInterface>
+scoped_refptr<webrtc::VideoTrackInterface>
MockMediaStreamDependencyFactory::CreateLocalVideoTrack(
const std::string& label,
- int video_session_id,
- bool is_screencast) {
+ webrtc::VideoSourceInterface* source) {
DCHECK(mock_pc_factory_created_);
- scoped_refptr<webrtc::LocalVideoTrackInterface> track(
- new talk_base::RefCountedObject<MockLocalVideoTrack>(label));
+ scoped_refptr<webrtc::VideoTrackInterface> track(
+ new talk_base::RefCountedObject<MockLocalVideoTrack>(
+ label, source));
return track;
}
diff --git a/content/renderer/media/mock_media_stream_dependency_factory.h b/content/renderer/media/mock_media_stream_dependency_factory.h
index 2fb07d9..dc23ccb 100644
--- a/content/renderer/media/mock_media_stream_dependency_factory.h
+++ b/content/renderer/media/mock_media_stream_dependency_factory.h
@@ -13,12 +13,32 @@
namespace content {
-class MockLocalVideoTrack : public webrtc::LocalVideoTrackInterface {
+class MockVideoSource : public webrtc::VideoSourceInterface {
public:
- explicit MockLocalVideoTrack(std::string label)
- : enabled_(false),
- label_(label) {
- }
+ MockVideoSource();
+
+ virtual void RegisterObserver(webrtc::ObserverInterface* observer) OVERRIDE;
+ virtual void UnregisterObserver(webrtc::ObserverInterface* observer) OVERRIDE;
+ virtual MediaSourceInterface::SourceState state() const OVERRIDE;
+ virtual cricket::VideoCapturer* GetVideoCapturer() OVERRIDE;
+ virtual void AddSink(cricket::VideoRenderer* output) OVERRIDE;
+ virtual void RemoveSink(cricket::VideoRenderer* output) OVERRIDE;
+
+ // Change the state of the source to live and notifies the observer.
+ void SetLive();
+
+ protected:
+ virtual ~MockVideoSource();
+
+ private:
+ webrtc::ObserverInterface* observer_;
+ MediaSourceInterface::SourceState state_;
+};
+
+class MockLocalVideoTrack : public webrtc::VideoTrackInterface {
+ public:
+ MockLocalVideoTrack(std::string label,
+ webrtc::VideoSourceInterface* source);
virtual void AddRenderer(webrtc::VideoRendererInterface* renderer) OVERRIDE;
virtual void RemoveRenderer(
webrtc::VideoRendererInterface* renderer) OVERRIDE;
@@ -34,14 +54,15 @@ class MockLocalVideoTrack : public webrtc::LocalVideoTrackInterface {
virtual webrtc::VideoSourceInterface* GetSource() const OVERRIDE;
protected:
- virtual ~MockLocalVideoTrack() {}
+ virtual ~MockLocalVideoTrack();
private:
bool enabled_;
std::string label_;
+ scoped_refptr<webrtc::VideoSourceInterface> source_;
};
-class MockLocalAudioTrack : public webrtc::LocalAudioTrackInterface {
+class MockLocalAudioTrack : public webrtc::AudioTrackInterface {
public:
explicit MockLocalAudioTrack(const std::string& label)
: enabled_(false),
@@ -80,12 +101,16 @@ class MockMediaStreamDependencyFactory : public MediaStreamDependencyFactory {
const webrtc::MediaConstraintsInterface* constraints,
WebKit::WebFrame* frame,
webrtc::PeerConnectionObserver* observer) OVERRIDE;
+ virtual scoped_refptr<webrtc::VideoSourceInterface>
+ CreateVideoSource(
+ int video_session_id,
+ bool is_screencast,
+ const webrtc::MediaConstraintsInterface* constraints) OVERRIDE;
virtual scoped_refptr<webrtc::LocalMediaStreamInterface>
CreateLocalMediaStream(const std::string& label) OVERRIDE;
- virtual scoped_refptr<webrtc::LocalVideoTrackInterface>
+ virtual scoped_refptr<webrtc::VideoTrackInterface>
CreateLocalVideoTrack(const std::string& label,
- int video_session_id,
- bool is_screencast) OVERRIDE;
+ webrtc::VideoSourceInterface* source) OVERRIDE;
virtual scoped_refptr<webrtc::LocalAudioTrackInterface>
CreateLocalAudioTrack(const std::string& label,
webrtc::AudioDeviceModule* audio_device) OVERRIDE;
@@ -100,10 +125,14 @@ class MockMediaStreamDependencyFactory : public MediaStreamDependencyFactory {
const std::string& sdp) OVERRIDE;
virtual bool EnsurePeerConnectionFactory() OVERRIDE;
+ virtual bool PeerConnectionFactoryCreated() OVERRIDE;
virtual void SetAudioDeviceSessionId(int session_id) OVERRIDE;
+ MockVideoSource* last_video_source() { return last_video_source_; }
+
private:
bool mock_pc_factory_created_;
+ scoped_refptr <MockVideoSource> last_video_source_;
DISALLOW_COPY_AND_ASSIGN(MockMediaStreamDependencyFactory);
};
diff --git a/content/renderer/media/peer_connection_handler_jsep_unittest.cc b/content/renderer/media/peer_connection_handler_jsep_unittest.cc
index 256c4f0..483d031 100644
--- a/content/renderer/media/peer_connection_handler_jsep_unittest.cc
+++ b/content/renderer/media/peer_connection_handler_jsep_unittest.cc
@@ -73,7 +73,7 @@ class PeerConnectionHandlerJsepTest : public ::testing::Test {
native_stream->AddTrack(audio_track);
talk_base::scoped_refptr<webrtc::LocalVideoTrackInterface> video_track(
mock_dependency_factory_->CreateLocalVideoTrack(
- video_track_label, 0, false));
+ video_track_label, 0));
native_stream->AddTrack(video_track);
WebKit::WebVector<WebKit::WebMediaStreamSource> audio_sources(
@@ -106,7 +106,7 @@ class PeerConnectionHandlerJsepTest : public ::testing::Test {
if (!video_track_label.empty()) {
talk_base::scoped_refptr<webrtc::LocalVideoTrackInterface> video_track(
mock_dependency_factory_->CreateLocalVideoTrack(
- video_track_label, 0, false));
+ video_track_label, 0));
stream->AddTrack(video_track);
}
if (!audio_track_label.empty()) {
diff --git a/content/renderer/media/rtc_media_constraints.cc b/content/renderer/media/rtc_media_constraints.cc
new file mode 100644
index 0000000..cc5a741
--- /dev/null
+++ b/content/renderer/media/rtc_media_constraints.cc
@@ -0,0 +1,62 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+#include "content/renderer/media/rtc_media_constraints.h"
+
+#include "base/logging.h"
+
+#include "content/common/media/media_stream_options.h"
+#include "third_party/WebKit/Source/Platform/chromium/public/WebMediaConstraints.h"
+#include "third_party/WebKit/Source/Platform/chromium/public/WebCString.h"
+#include "third_party/WebKit/Source/Platform/chromium/public/WebString.h"
+
+namespace {
+
+void GetNativeMediaConstraints(
+ const WebKit::WebVector<WebKit::WebMediaConstraint>& constraints,
+ webrtc::MediaConstraintsInterface::Constraints* native_constraints) {
+ DCHECK(native_constraints);
+ for (size_t i = 0; i < constraints.size(); ++i) {
+ webrtc::MediaConstraintsInterface::Constraint new_constraint;
+ new_constraint.key = constraints[i].m_name.utf8();
+ new_constraint.value = constraints[i].m_value.utf8();
+
+ // Ignore Chrome specific Tab capture constraints.
+ if (new_constraint.key == media_stream::kMediaStreamSource ||
+ new_constraint.key == media_stream::kMediaStreamSourceId)
+ continue;
+ DVLOG(3) << "MediaStreamConstraints:" << new_constraint.key
+ << " : " << new_constraint.value;
+ native_constraints->push_back(new_constraint);
+ }
+}
+
+} // namespace
+
+namespace content {
+
+RTCMediaConstraints::RTCMediaConstraints(
+ const WebKit::WebMediaConstraints& constraints) {
+ if (constraints.isNull())
+ return; // Will happen in unit tests.
+ WebKit::WebVector<WebKit::WebMediaConstraint> mandatory;
+ constraints.getMandatoryConstraints(mandatory);
+ GetNativeMediaConstraints(mandatory, &mandatory_);
+ WebKit::WebVector<WebKit::WebMediaConstraint> optional;
+ constraints.getOptionalConstraints(optional);
+ GetNativeMediaConstraints(optional, &optional_);
+}
+
+RTCMediaConstraints::~RTCMediaConstraints() {}
+
+const webrtc::MediaConstraintsInterface::Constraints&
+RTCMediaConstraints::GetMandatory() const {
+ return mandatory_;
+}
+
+const webrtc::MediaConstraintsInterface::Constraints&
+RTCMediaConstraints::GetOptional() const {
+ return optional_;
+}
+
+} // namespace content
diff --git a/content/renderer/media/rtc_media_constraints.h b/content/renderer/media/rtc_media_constraints.h
new file mode 100644
index 0000000..3100571
--- /dev/null
+++ b/content/renderer/media/rtc_media_constraints.h
@@ -0,0 +1,36 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef CONTENT_RENDERER_MEDIA_RTC_MEDIA_CONSTRAINTS_H_
+#define CONTENT_RENDERER_MEDIA_RTC_MEDIA_CONSTRAINTS_H_
+
+#include "base/compiler_specific.h"
+#include "third_party/libjingle/source/talk/app/webrtc/mediastreaminterface.h"
+
+namespace WebKit {
+class WebMediaConstraints;
+}
+
+namespace content {
+
+// RTCMediaConstraints acts as a glue layer between WebKits MediaConstraints and
+// libjingle webrtc::MediaConstraintsInterface.
+// Constraints are used by PeerConnection and getUserMedia API calls.
+class RTCMediaConstraints : public webrtc::MediaConstraintsInterface {
+ public:
+ explicit RTCMediaConstraints(
+ const WebKit::WebMediaConstraints& constraints);
+ virtual ~RTCMediaConstraints();
+ virtual const Constraints& GetMandatory() const OVERRIDE;
+ virtual const Constraints& GetOptional() const OVERRIDE;
+
+ private:
+ Constraints mandatory_;
+ Constraints optional_;
+};
+
+} // namespace content
+
+
+#endif // CONTENT_RENDERER_MEDIA_RTC_MEDIA_CONSTRAINTS_H_
diff --git a/content/renderer/media/rtc_peer_connection_handler.cc b/content/renderer/media/rtc_peer_connection_handler.cc
index c2092cf..33bb7c4a 100644
--- a/content/renderer/media/rtc_peer_connection_handler.cc
+++ b/content/renderer/media/rtc_peer_connection_handler.cc
@@ -11,6 +11,7 @@
#include "base/memory/scoped_ptr.h"
#include "base/utf_string_conversions.h"
#include "content/renderer/media/media_stream_dependency_factory.h"
+#include "content/renderer/media/rtc_media_constraints.h"
#include "third_party/WebKit/Source/Platform/chromium/public/WebMediaConstraints.h"
#include "third_party/WebKit/Source/Platform/chromium/public/WebRTCConfiguration.h"
#include "third_party/WebKit/Source/Platform/chromium/public/WebRTCICECandidate.h"
@@ -105,20 +106,6 @@ static void GetNativeIceServers(
}
}
-static void GetNativeMediaConstraints(
- const WebKit::WebVector<WebKit::WebMediaConstraint>& constraints,
- webrtc::MediaConstraintsInterface::Constraints* native_constraints) {
- DCHECK(native_constraints);
- for (size_t i = 0; i < constraints.size(); ++i) {
- webrtc::MediaConstraintsInterface::Constraint new_constraint;
- new_constraint.key = constraints[i].m_name.utf8();
- new_constraint.value = constraints[i].m_value.utf8();
- DVLOG(3) << "MediaStreamConstraints:" << new_constraint.key
- << " : " << new_constraint.value;
- native_constraints->push_back(new_constraint);
- }
-}
-
// Class mapping responses from calls to libjingle CreateOffer/Answer and
// the WebKit::WebRTCSessionDescriptionRequest.
class CreateSessionDescriptionRequest
@@ -165,32 +152,6 @@ class SetSessionDescriptionRequest
WebKit::WebRTCVoidRequest webkit_request_;
};
-class RTCMediaConstraints : public webrtc::MediaConstraintsInterface {
- public:
- explicit RTCMediaConstraints(
- const WebKit::WebMediaConstraints& constraints) {
- if (constraints.isNull())
- return; // Will happen in unit tests.
- WebKit::WebVector<WebKit::WebMediaConstraint> mandatory;
- constraints.getMandatoryConstraints(mandatory);
- GetNativeMediaConstraints(mandatory, &mandatory_);
- WebKit::WebVector<WebKit::WebMediaConstraint> optional;
- constraints.getOptionalConstraints(optional);
- GetNativeMediaConstraints(optional, &optional_);
- }
- virtual const Constraints& GetMandatory() const OVERRIDE {
- return mandatory_;
- }
- virtual const Constraints& GetOptional() const OVERRIDE {
- return optional_;
- }
- ~RTCMediaConstraints() {}
-
- private:
- Constraints mandatory_;
- Constraints optional_;
-};
-
RTCPeerConnectionHandler::RTCPeerConnectionHandler(
WebKit::WebRTCPeerConnectionHandlerClient* client,
MediaStreamDependencyFactory* dependency_factory)
diff --git a/content/renderer/media/rtc_peer_connection_handler_unittest.cc b/content/renderer/media/rtc_peer_connection_handler_unittest.cc
index a0a1b7b..71c6a3b 100644
--- a/content/renderer/media/rtc_peer_connection_handler_unittest.cc
+++ b/content/renderer/media/rtc_peer_connection_handler_unittest.cc
@@ -77,7 +77,7 @@ class RTCPeerConnectionHandlerTest : public ::testing::Test {
native_stream->AddTrack(audio_track);
scoped_refptr<webrtc::LocalVideoTrackInterface> video_track(
mock_dependency_factory_->CreateLocalVideoTrack(
- video_track_label, 0, false));
+ video_track_label, 0));
native_stream->AddTrack(video_track);
WebKit::WebVector<WebKit::WebMediaStreamSource> audio_sources(
@@ -110,7 +110,7 @@ class RTCPeerConnectionHandlerTest : public ::testing::Test {
if (!video_track_label.empty()) {
scoped_refptr<webrtc::LocalVideoTrackInterface> video_track(
mock_dependency_factory_->CreateLocalVideoTrack(
- video_track_label, 0, false));
+ video_track_label, 0));
stream->AddTrack(video_track);
}
if (!audio_track_label.empty()) {