summaryrefslogtreecommitdiffstats
path: root/content/renderer/media
diff options
context:
space:
mode:
authorperkj@chromium.org <perkj@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2014-04-16 09:31:03 +0000
committerperkj@chromium.org <perkj@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2014-04-16 09:31:03 +0000
commit21d97ecb3a8b877f977db43e45ce3122b9f2ec9d (patch)
tree5e1e78334a57d4ee1471b5895e66ee8444251a77 /content/renderer/media
parent44b3bdf53dc8fcdad34a8bd400e71fb92f001d07 (diff)
downloadchromium_src-21d97ecb3a8b877f977db43e45ce3122b9f2ec9d.zip
chromium_src-21d97ecb3a8b877f977db43e45ce3122b9f2ec9d.tar.gz
chromium_src-21d97ecb3a8b877f977db43e45ce3122b9f2ec9d.tar.bz2
Update MediaStreamTrack::Stop to latest draft.
MediaStreamTrack::Stop now only stop the source if there are no other tracks using the same source. BUG=357503 Review URL: https://codereview.chromium.org/218763007 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@264155 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'content/renderer/media')
-rw-r--r--content/renderer/media/media_stream.cc9
-rw-r--r--content/renderer/media/media_stream.h8
-rw-r--r--content/renderer/media/media_stream_audio_source.cc25
-rw-r--r--content/renderer/media/media_stream_center.cc24
-rw-r--r--content/renderer/media/media_stream_dependency_factory.cc9
-rw-r--r--content/renderer/media/media_stream_dependency_factory.h3
-rw-r--r--content/renderer/media/media_stream_dependency_factory_unittest.cc2
-rw-r--r--content/renderer/media/media_stream_impl.cc106
-rw-r--r--content/renderer/media/media_stream_impl.h30
-rw-r--r--content/renderer/media/media_stream_impl_unittest.cc142
-rw-r--r--content/renderer/media/media_stream_track.cc9
-rw-r--r--content/renderer/media/media_stream_track.h4
-rw-r--r--content/renderer/media/media_stream_video_capture_source_unittest.cc15
-rw-r--r--content/renderer/media/media_stream_video_capturer_source.cc2
-rw-r--r--content/renderer/media/media_stream_video_source.cc20
-rw-r--r--content/renderer/media/media_stream_video_source_unittest.cc38
-rw-r--r--content/renderer/media/media_stream_video_track.cc11
-rw-r--r--content/renderer/media/media_stream_video_track.h1
-rw-r--r--content/renderer/media/media_stream_video_track_unittest.cc50
-rw-r--r--content/renderer/media/mock_media_stream_dependency_factory.cc18
-rw-r--r--content/renderer/media/mock_media_stream_dependency_factory.h9
-rw-r--r--content/renderer/media/mock_media_stream_registry.cc1
-rw-r--r--content/renderer/media/rtc_peer_connection_handler_unittest.cc1
-rw-r--r--content/renderer/media/webrtc/media_stream_remote_video_source.cc6
-rw-r--r--content/renderer/media/webrtc/webrtc_local_audio_track_adapter_unittest.cc2
-rw-r--r--content/renderer/media/webrtc_audio_capturer.cc52
-rw-r--r--content/renderer/media/webrtc_audio_capturer.h22
-rw-r--r--content/renderer/media/webrtc_audio_capturer_unittest.cc6
-rw-r--r--content/renderer/media/webrtc_local_audio_source_provider_unittest.cc2
-rw-r--r--content/renderer/media/webrtc_local_audio_track.h2
-rw-r--r--content/renderer/media/webrtc_local_audio_track_unittest.cc77
31 files changed, 388 insertions, 318 deletions
diff --git a/content/renderer/media/media_stream.cc b/content/renderer/media/media_stream.cc
index 9a8f143..bb8a953 100644
--- a/content/renderer/media/media_stream.cc
+++ b/content/renderer/media/media_stream.cc
@@ -26,10 +26,8 @@ webrtc::MediaStreamInterface* MediaStream::GetAdapter(
}
MediaStream::MediaStream(MediaStreamDependencyFactory* factory,
- StreamStopCallback stream_stop,
const blink::WebMediaStream& stream)
- : stream_stop_callback_(stream_stop),
- stream_adapter_(NULL),
+ : stream_adapter_(NULL),
is_local_(true),
label_(stream.id().utf8()),
factory_(factory) {
@@ -46,11 +44,6 @@ MediaStream::MediaStream(webrtc::MediaStreamInterface* stream)
MediaStream::~MediaStream() {
}
-void MediaStream::OnStreamStopped() {
- if (!stream_stop_callback_.is_null())
- stream_stop_callback_.Run(label_);
-}
-
webrtc::MediaStreamInterface* MediaStream::GetWebRtcAdapter(
const blink::WebMediaStream& stream) {
if (!stream_adapter_) {
diff --git a/content/renderer/media/media_stream.h b/content/renderer/media/media_stream.h
index 2281043..ac7faf4 100644
--- a/content/renderer/media/media_stream.h
+++ b/content/renderer/media/media_stream.h
@@ -26,11 +26,8 @@ class MediaStreamDependencyFactory;
class CONTENT_EXPORT MediaStream
: NON_EXPORTED_BASE(public blink::WebMediaStream::ExtraData) {
public:
- typedef base::Callback<void(const std::string& label)> StreamStopCallback;
-
// Constructor for local MediaStreams.
MediaStream(MediaStreamDependencyFactory* factory,
- StreamStopCallback stream_stop,
const blink::WebMediaStream& stream);
// Constructor for remote MediaStreams.
explicit MediaStream(webrtc::MediaStreamInterface* stream);
@@ -51,10 +48,6 @@ class CONTENT_EXPORT MediaStream
// way regardless if they are local or remote.
bool is_local() const { return is_local_; }
- // Called by MediaStreamCenter when a stream has been stopped
- // from JavaScript. Triggers |stream_stop_callback_|.
- void OnStreamStopped();
-
// Called by MediaStreamCenter when a track has been added to a stream stream.
// If a libjingle representation of |stream| exist, the track is added to
// the libjingle MediaStream.
@@ -72,7 +65,6 @@ class CONTENT_EXPORT MediaStream
const blink::WebMediaStream& stream);
private:
- StreamStopCallback stream_stop_callback_;
scoped_refptr<webrtc::MediaStreamInterface> stream_adapter_;
const bool is_local_;
const std::string label_;
diff --git a/content/renderer/media/media_stream_audio_source.cc b/content/renderer/media/media_stream_audio_source.cc
index 34f3087..a8aa7d6 100644
--- a/content/renderer/media/media_stream_audio_source.cc
+++ b/content/renderer/media/media_stream_audio_source.cc
@@ -34,18 +34,23 @@ void MediaStreamAudioSource::AddTrack(
const blink::WebMediaConstraints& constraints,
const ConstraintsCallback& callback) {
// TODO(xians): Properly implement for audio sources.
- if (!factory_)
- callback.Run(this, false);
-
- bool result = true;
if (!local_audio_source_) {
- result = factory_->InitializeMediaStreamAudioSource(render_view_id_,
- constraints,
- this);
+ if (!factory_->InitializeMediaStreamAudioSource(render_view_id_,
+ constraints,
+ this)) {
+ // The source failed to start.
+ // MediaStreamImpl rely on the |stop_callback| to be triggered when the
+ // last track is removed from the source. But in this case, the source is
+ // is not even started. So we need to fail both adding the track and
+ // trigger |stop_callback|.
+ callback.Run(this, false);
+ StopSource();
+ return;
+ }
}
- if (result)
- factory_->CreateLocalAudioTrack(track);
- callback.Run(this, result);
+
+ factory_->CreateLocalAudioTrack(track);
+ callback.Run(this, true);
}
} // namespace content
diff --git a/content/renderer/media/media_stream_center.cc b/content/renderer/media/media_stream_center.cc
index e568567..159afb7 100644
--- a/content/renderer/media/media_stream_center.cc
+++ b/content/renderer/media/media_stream_center.cc
@@ -121,15 +121,8 @@ void MediaStreamCenter::didDisableMediaStreamTrack(
bool MediaStreamCenter::didStopMediaStreamTrack(
const blink::WebMediaStreamTrack& track) {
DVLOG(1) << "MediaStreamCenter::didStopMediaStreamTrack";
- blink::WebMediaStreamSource source = track.source();
- MediaStreamSource* extra_data =
- static_cast<MediaStreamSource*>(source.extraData());
- if (!extra_data) {
- DVLOG(1) << "didStopMediaStreamTrack called on a remote track.";
- return false;
- }
-
- extra_data->StopSource();
+ MediaStreamTrack* native_track = MediaStreamTrack::GetTrack(track);
+ native_track->Stop();
return true;
}
@@ -163,22 +156,16 @@ void MediaStreamCenter::didStopLocalMediaStream(
}
// TODO(perkj): MediaStream::Stop is being deprecated. But for the moment we
- // need to support the old behavior and the new. Since we only create one
- // source object per actual device- we need to fake stopping a
- // MediaStreamTrack by disabling it if the same device is used as source by
- // multiple tracks. Note that disabling a track here, don't affect the
- // enabled property in JS.
+ // need to support both MediaStream::Stop and MediaStreamTrack::Stop.
blink::WebVector<blink::WebMediaStreamTrack> audio_tracks;
stream.audioTracks(audio_tracks);
for (size_t i = 0; i < audio_tracks.size(); ++i)
- didDisableMediaStreamTrack(audio_tracks[i]);
+ didStopMediaStreamTrack(audio_tracks[i]);
blink::WebVector<blink::WebMediaStreamTrack> video_tracks;
stream.videoTracks(video_tracks);
for (size_t i = 0; i < video_tracks.size(); ++i)
- didDisableMediaStreamTrack(video_tracks[i]);
-
- native_stream->OnStreamStopped();
+ didStopMediaStreamTrack(video_tracks[i]);
}
void MediaStreamCenter::didCreateMediaStream(blink::WebMediaStream& stream) {
@@ -186,7 +173,6 @@ void MediaStreamCenter::didCreateMediaStream(blink::WebMediaStream& stream) {
blink::WebMediaStream writable_stream(stream);
MediaStream* native_stream(
new MediaStream(rtc_factory_,
- MediaStream::StreamStopCallback(),
stream));
writable_stream.setExtraData(native_stream);
diff --git a/content/renderer/media/media_stream_dependency_factory.cc b/content/renderer/media/media_stream_dependency_factory.cc
index 70f3d06..2e41cd81 100644
--- a/content/renderer/media/media_stream_dependency_factory.cc
+++ b/content/renderer/media/media_stream_dependency_factory.cc
@@ -205,7 +205,8 @@ bool MediaStreamDependencyFactory::InitializeMediaStreamAudioSource(
&device_info.device.input.effects);
scoped_refptr<WebRtcAudioCapturer> capturer(
- CreateAudioCapturer(render_view_id, device_info, audio_constraints));
+ CreateAudioCapturer(render_view_id, device_info, audio_constraints,
+ source_data));
if (!capturer.get()) {
DLOG(WARNING) << "Failed to create the capturer for device "
<< device_info.device.id;
@@ -666,7 +667,8 @@ scoped_refptr<WebRtcAudioCapturer>
MediaStreamDependencyFactory::CreateAudioCapturer(
int render_view_id,
const StreamDeviceInfo& device_info,
- const blink::WebMediaConstraints& constraints) {
+ const blink::WebMediaConstraints& constraints,
+ MediaStreamAudioSource* audio_source) {
// TODO(xians): Handle the cases when gUM is called without a proper render
// view, for example, by an extension.
DCHECK_GE(render_view_id, 0);
@@ -675,7 +677,8 @@ MediaStreamDependencyFactory::CreateAudioCapturer(
DCHECK(GetWebRtcAudioDevice());
return WebRtcAudioCapturer::CreateCapturer(render_view_id, device_info,
constraints,
- GetWebRtcAudioDevice());
+ GetWebRtcAudioDevice(),
+ audio_source);
}
void MediaStreamDependencyFactory::AddNativeAudioTrackToBlinkTrack(
diff --git a/content/renderer/media/media_stream_dependency_factory.h b/content/renderer/media/media_stream_dependency_factory.h
index 5986abb..5ce97b8 100644
--- a/content/renderer/media/media_stream_dependency_factory.h
+++ b/content/renderer/media/media_stream_dependency_factory.h
@@ -173,7 +173,8 @@ class CONTENT_EXPORT MediaStreamDependencyFactory
// it reuses existing capture if any; otherwise it creates a new capturer.
virtual scoped_refptr<WebRtcAudioCapturer> CreateAudioCapturer(
int render_view_id, const StreamDeviceInfo& device_info,
- const blink::WebMediaConstraints& constraints);
+ const blink::WebMediaConstraints& constraints,
+ MediaStreamAudioSource* audio_source);
// Adds the audio device as a sink to the audio track and starts the local
// audio track. This is virtual for test purposes since no real audio device
diff --git a/content/renderer/media/media_stream_dependency_factory_unittest.cc b/content/renderer/media/media_stream_dependency_factory_unittest.cc
index 9236c23..6263518 100644
--- a/content/renderer/media/media_stream_dependency_factory_unittest.cc
+++ b/content/renderer/media/media_stream_dependency_factory_unittest.cc
@@ -117,7 +117,6 @@ class MediaStreamDependencyFactoryTest : public ::testing::Test {
video_track_vector);
stream_desc.setExtraData(
new content::MediaStream(dependency_factory_.get(),
- content::MediaStream::StreamStopCallback(),
stream_desc));
return stream_desc;
}
@@ -181,7 +180,6 @@ TEST_F(MediaStreamDependencyFactoryTest, CreateNativeMediaStreamWithoutSource) {
stream_desc.initialize("new stream", audio_tracks, video_tracks);
stream_desc.setExtraData(
new content::MediaStream(dependency_factory_.get(),
- content::MediaStream::StreamStopCallback(),
stream_desc));
VerifyMediaStream(stream_desc, 0, 0);
diff --git a/content/renderer/media/media_stream_impl.cc b/content/renderer/media/media_stream_impl.cc
index 9c6d33b..a0b3cf2 100644
--- a/content/renderer/media/media_stream_impl.cc
+++ b/content/renderer/media/media_stream_impl.cc
@@ -342,7 +342,6 @@ void MediaStreamImpl::OnStreamGenerated(
web_stream->setExtraData(
new MediaStream(
dependency_factory_,
- base::Bind(&MediaStreamImpl::OnLocalMediaStreamStop, AsWeakPtr()),
*web_stream));
// Wait for the tracks to be started successfully or to fail.
@@ -365,9 +364,8 @@ void MediaStreamImpl::OnStreamGenerationFailed(
DVLOG(1) << "Request ID not found";
return;
}
- CompleteGetUserMediaRequest(request_info->web_stream,
- &request_info->request,
- result);
+
+ GetUserMediaRequestFailed(&request_info->request, result);
DeleteUserMediaRequestInfo(request_info);
}
@@ -399,19 +397,6 @@ void MediaStreamImpl::OnDeviceStopped(
break;
}
}
-
- // Remove the reference to this source from all |user_media_requests_|.
- // TODO(perkj): The below is not necessary once we don't need to support
- // MediaStream::Stop().
- UserMediaRequests::iterator it = user_media_requests_.begin();
- while (it != user_media_requests_.end()) {
- (*it)->RemoveSource(source);
- if ((*it)->AreAllSourcesRemoved()) {
- it = user_media_requests_.erase(it);
- } else {
- ++it;
- }
- }
}
void MediaStreamImpl::InitializeSourceObject(
@@ -533,14 +518,12 @@ void MediaStreamImpl::OnCreateNativeTracksCompleted(
DVLOG(1) << "MediaStreamImpl::OnCreateNativeTracksComplete("
<< "{request_id = " << request->request_id << "} "
<< "{result = " << result << "})";
- CompleteGetUserMediaRequest(request->web_stream, &request->request,
- result);
- if (result != MEDIA_DEVICE_OK) {
- // TODO(perkj): Once we don't support MediaStream::Stop the |request_info|
- // can be deleted even if the request succeeds.
- DeleteUserMediaRequestInfo(request);
- StopUnreferencedSources(true);
- }
+ if (result == content::MEDIA_DEVICE_OK)
+ GetUserMediaRequestSucceeded(request->web_stream, &request->request);
+ else
+ GetUserMediaRequestFailed(&request->request, result);
+
+ DeleteUserMediaRequestInfo(request);
}
void MediaStreamImpl::OnDevicesEnumerated(
@@ -566,17 +549,19 @@ void MediaStreamImpl::OnDeviceOpenFailed(int request_id) {
NOTIMPLEMENTED();
}
-void MediaStreamImpl::CompleteGetUserMediaRequest(
+void MediaStreamImpl::GetUserMediaRequestSucceeded(
const blink::WebMediaStream& stream,
+ blink::WebUserMediaRequest* request_info) {
+ DVLOG(1) << "MediaStreamImpl::GetUserMediaRequestSucceeded";
+ request_info->requestSucceeded(stream);
+}
+
+void MediaStreamImpl::GetUserMediaRequestFailed(
blink::WebUserMediaRequest* request_info,
content::MediaStreamRequestResult result) {
-
- DVLOG(1) << "MediaStreamImpl::CompleteGetUserMediaRequest("
- << "result=" << result;
-
switch (result) {
case MEDIA_DEVICE_OK:
- request_info->requestSucceeded(stream);
+ NOTREACHED();
break;
case MEDIA_DEVICE_PERMISSION_DENIED:
request_info->requestDenied();
@@ -627,16 +612,6 @@ const blink::WebMediaStreamSource* MediaStreamImpl::FindLocalSource(
return NULL;
}
-bool MediaStreamImpl::IsSourceInRequests(
- const blink::WebMediaStreamSource& source) const {
- for (UserMediaRequests::const_iterator req_it = user_media_requests_.begin();
- req_it != user_media_requests_.end(); ++req_it) {
- if ((*req_it)->IsSourceUsed(source))
- return true;
- }
- return false;
-}
-
MediaStreamImpl::UserMediaRequestInfo*
MediaStreamImpl::FindUserMediaRequestInfo(int request_id) {
UserMediaRequests::iterator it = user_media_requests_.begin();
@@ -658,16 +633,6 @@ MediaStreamImpl::FindUserMediaRequestInfo(
return NULL;
}
-MediaStreamImpl::UserMediaRequestInfo*
-MediaStreamImpl::FindUserMediaRequestInfo(const std::string& label) {
- UserMediaRequests::iterator it = user_media_requests_.begin();
- for (; it != user_media_requests_.end(); ++it) {
- if ((*it)->generated && (*it)->web_stream.id() == base::UTF8ToUTF16(label))
- return (*it);
- }
- return NULL;
-}
-
void MediaStreamImpl::DeleteUserMediaRequestInfo(
UserMediaRequestInfo* request) {
UserMediaRequests::iterator it = user_media_requests_.begin();
@@ -720,20 +685,10 @@ void MediaStreamImpl::FrameWillClose(blink::WebFrame* frame) {
}
}
-void MediaStreamImpl::OnLocalMediaStreamStop(
- const std::string& label) {
- DVLOG(1) << "MediaStreamImpl::OnLocalMediaStreamStop(" << label << ")";
-
- UserMediaRequestInfo* user_media_request = FindUserMediaRequestInfo(label);
- if (user_media_request) {
- DeleteUserMediaRequestInfo(user_media_request);
- }
- StopUnreferencedSources(true);
-}
-
void MediaStreamImpl::OnLocalSourceStopped(
const blink::WebMediaStreamSource& source) {
DCHECK(CalledOnValidThread());
+ DVLOG(1) << "MediaStreamImpl::OnLocalSourceStopped";
bool device_found = false;
for (LocalStreamSources::iterator device_it = local_sources_.begin();
@@ -746,19 +701,6 @@ void MediaStreamImpl::OnLocalSourceStopped(
}
CHECK(device_found);
- // Remove the reference to this source from all |user_media_requests_|.
- // TODO(perkj): The below is not necessary once we don't need to support
- // MediaStream::Stop().
- UserMediaRequests::iterator it = user_media_requests_.begin();
- while (it != user_media_requests_.end()) {
- (*it)->RemoveSource(source);
- if ((*it)->AreAllSourcesRemoved()) {
- it = user_media_requests_.erase(it);
- } else {
- ++it;
- }
- }
-
MediaStreamSource* source_impl =
static_cast<MediaStreamSource*> (source.extraData());
media_stream_dispatcher_->StopStreamDevice(source_impl->device_info());
@@ -779,18 +721,6 @@ void MediaStreamImpl::StopLocalSource(
source_impl->StopSource();
}
-void MediaStreamImpl::StopUnreferencedSources(bool notify_dispatcher) {
- LocalStreamSources::iterator source_it = local_sources_.begin();
- while (source_it != local_sources_.end()) {
- if (!IsSourceInRequests(source_it->source)) {
- StopLocalSource(source_it->source, notify_dispatcher);
- source_it = local_sources_.erase(source_it);
- } else {
- ++source_it;
- }
- }
-}
-
scoped_refptr<WebRtcAudioRenderer> MediaStreamImpl::CreateRemoteAudioRenderer(
webrtc::MediaStreamInterface* stream,
int render_frame_id) {
@@ -912,7 +842,7 @@ void MediaStreamImpl::UserMediaRequestInfo::CallbackOnTracksStarted(
void MediaStreamImpl::UserMediaRequestInfo::OnTrackStarted(
MediaStreamSource* source, bool success) {
- DVLOG(1) << "OnTrackStarted";
+ DVLOG(1) << "OnTrackStarted result " << success;
std::vector<MediaStreamSource*>::iterator it =
std::find(sources_waiting_for_callback_.begin(),
sources_waiting_for_callback_.end(),
diff --git a/content/renderer/media/media_stream_impl.h b/content/renderer/media/media_stream_impl.h
index 12de8def..af9e135 100644
--- a/content/renderer/media/media_stream_impl.h
+++ b/content/renderer/media/media_stream_impl.h
@@ -99,22 +99,17 @@ class CONTENT_EXPORT MediaStreamImpl
// Called when |source| has been stopped from JavaScript.
void OnLocalSourceStopped(const blink::WebMediaStreamSource& source);
- // Called when a MediaStream with label |label| has been ordered to stop from
- // JavaScript. The implementation must stop all sources that are not used by
- // other MediaStreams.
- // TODO(perkj): MediaStream::Stop has been deprecated from the spec and all
- // applications should move to use MediaStreamTrack::Stop instead and this
- // method be removed.
- void OnLocalMediaStreamStop(const std::string& label);
-
- // This function is virtual for test purposes. A test can override this to
+ // These methods are virtual for test purposes. A test can override them to
// test requesting local media streams. The function notifies WebKit that the
- // |request| have completed and generated the MediaStream |stream|.
- virtual void CompleteGetUserMediaRequest(
- const blink::WebMediaStream& stream,
+ // |request| have completed.
+ virtual void GetUserMediaRequestSucceeded(
+ const blink::WebMediaStream& stream,
+ blink::WebUserMediaRequest* request_info);
+ virtual void GetUserMediaRequestFailed(
blink::WebUserMediaRequest* request_info,
content::MediaStreamRequestResult result);
+
// Returns the WebKit representation of a MediaStream given an URL.
// This is virtual for test purposes.
virtual blink::WebMediaStream GetMediaStream(const GURL& url);
@@ -220,7 +215,6 @@ class CONTENT_EXPORT MediaStreamImpl
UserMediaRequestInfo* FindUserMediaRequestInfo(int request_id);
UserMediaRequestInfo* FindUserMediaRequestInfo(
const blink::WebUserMediaRequest& request);
- UserMediaRequestInfo* FindUserMediaRequestInfo(const std::string& label);
void DeleteUserMediaRequestInfo(UserMediaRequestInfo* request);
// Returns the source that use a device with |device.session_id|
@@ -228,14 +222,8 @@ class CONTENT_EXPORT MediaStreamImpl
const blink::WebMediaStreamSource* FindLocalSource(
const StreamDeviceInfo& device) const;
- // Returns true if |source| exists in |user_media_requests_|
- bool IsSourceInRequests(const blink::WebMediaStreamSource& source) const;
-
void StopLocalSource(const blink::WebMediaStreamSource& source,
bool notify_dispatcher);
- // Stops all local sources that don't exist in exist in
- // |user_media_requests_|.
- void StopUnreferencedSources(bool notify_dispatcher);
scoped_refptr<WebRtcAudioRenderer> CreateRemoteAudioRenderer(
webrtc::MediaStreamInterface* stream, int render_frame_id);
@@ -261,10 +249,10 @@ class CONTENT_EXPORT MediaStreamImpl
// valid for the lifetime of RenderView.
MediaStreamDispatcher* media_stream_dispatcher_;
- UserMediaRequests user_media_requests_;
-
LocalStreamSources local_sources_;
+ UserMediaRequests user_media_requests_;
+
DISALLOW_COPY_AND_ASSIGN(MediaStreamImpl);
};
diff --git a/content/renderer/media/media_stream_impl_unittest.cc b/content/renderer/media/media_stream_impl_unittest.cc
index 16b464b..f5c7ca5 100644
--- a/content/renderer/media/media_stream_impl_unittest.cc
+++ b/content/renderer/media/media_stream_impl_unittest.cc
@@ -6,6 +6,7 @@
#include "base/strings/utf_string_conversions.h"
#include "content/renderer/media/media_stream.h"
#include "content/renderer/media/media_stream_impl.h"
+#include "content/renderer/media/media_stream_track.h"
#include "content/renderer/media/mock_media_stream_dependency_factory.h"
#include "content/renderer/media/mock_media_stream_dispatcher.h"
#include "content/renderer/media/mock_media_stream_video_source.h"
@@ -43,6 +44,7 @@ class MediaStreamImplUnderTest : public MediaStreamImpl {
MediaStreamDependencyFactory* dependency_factory)
: MediaStreamImpl(NULL, media_stream_dispatcher, dependency_factory),
state_(REQUEST_NOT_STARTED),
+ result_(NUM_MEDIA_REQUEST_RESULTS),
factory_(dependency_factory),
video_source_(NULL) {
}
@@ -53,18 +55,19 @@ class MediaStreamImplUnderTest : public MediaStreamImpl {
requestUserMedia(user_media_request);
}
- virtual void CompleteGetUserMediaRequest(
+ virtual void GetUserMediaRequestSucceeded(
const blink::WebMediaStream& stream,
- blink::WebUserMediaRequest* request_info,
- content::MediaStreamRequestResult result) OVERRIDE {
+ blink::WebUserMediaRequest* request_info) OVERRIDE {
last_generated_stream_ = stream;
- result_ = result;
- state_ = (result == MEDIA_DEVICE_OK ? REQUEST_SUCCEEDED : REQUEST_FAILED);
+ state_ = REQUEST_SUCCEEDED;
}
- virtual blink::WebMediaStream GetMediaStream(
- const GURL& url) OVERRIDE {
- return last_generated_stream_;
+ virtual void GetUserMediaRequestFailed(
+ blink::WebUserMediaRequest* request_info,
+ content::MediaStreamRequestResult result) OVERRIDE {
+ last_generated_stream_.reset();
+ state_ = REQUEST_FAILED;
+ result_ = result;
}
virtual MediaStreamVideoSource* CreateVideoSource(
@@ -76,13 +79,14 @@ class MediaStreamImplUnderTest : public MediaStreamImpl {
return video_source_;
}
- using MediaStreamImpl::OnLocalMediaStreamStop;
- using MediaStreamImpl::OnLocalSourceStopped;
-
const blink::WebMediaStream& last_generated_stream() {
return last_generated_stream_;
}
+ void ClearLastGeneratedStream() {
+ last_generated_stream_.reset();
+ }
+
MockMediaStreamVideoCapturerSource* last_created_video_source() const {
return video_source_;
}
@@ -156,6 +160,10 @@ class MediaStreamImplTest : public ::testing::Test {
video_source->FailToStartMockedSource();
}
+ void FailToCreateNextAudioCapturer() {
+ dependency_factory_->FailToCreateNextAudioCapturer();
+ }
+
protected:
scoped_ptr<MockMediaStreamDispatcher> ms_dispatcher_;
scoped_ptr<MediaStreamImplUnderTest> ms_impl_;
@@ -224,51 +232,65 @@ TEST_F(MediaStreamImplTest, GenerateTwoMediaStreamsWithDifferentSources) {
desc2_audio_tracks[0].source().extraData());
}
-TEST_F(MediaStreamImplTest, StopLocalMediaStream) {
+TEST_F(MediaStreamImplTest, StopLocalTracks) {
// Generate a stream with both audio and video.
blink::WebMediaStream mixed_desc = RequestLocalMediaStream();
- // Stop generated local streams.
- ms_impl_->OnLocalMediaStreamStop(mixed_desc.id().utf8());
+ blink::WebVector<blink::WebMediaStreamTrack> audio_tracks;
+ mixed_desc.audioTracks(audio_tracks);
+ MediaStreamTrack* audio_track = MediaStreamTrack::GetTrack(audio_tracks[0]);
+ audio_track->Stop();
EXPECT_EQ(1, ms_dispatcher_->stop_audio_device_counter());
+
+ blink::WebVector<blink::WebMediaStreamTrack> video_tracks;
+ mixed_desc.videoTracks(video_tracks);
+ MediaStreamTrack* video_track = MediaStreamTrack::GetTrack(video_tracks[0]);
+ video_track->Stop();
EXPECT_EQ(1, ms_dispatcher_->stop_video_device_counter());
}
-// This test that a source is not stopped even if the MediaStream is stopped if
-// there are two MediaStreams using the same device. The source is stopped
-// if there are no more MediaStreams using the device.
-TEST_F(MediaStreamImplTest, StopLocalMediaStreamWhenTwoStreamUseSameDevices) {
+// This test that a source is not stopped even if the tracks in a
+// MediaStream is stopped if there are two MediaStreams with tracks using the
+// same device. The source is stopped
+// if there are no more MediaStream tracks using the device.
+TEST_F(MediaStreamImplTest, StopLocalTracksWhenTwoStreamUseSameDevices) {
// Generate a stream with both audio and video.
blink::WebMediaStream desc1 = RequestLocalMediaStream();
blink::WebMediaStream desc2 = RequestLocalMediaStream();
- ms_impl_->OnLocalMediaStreamStop(desc2.id().utf8());
+ blink::WebVector<blink::WebMediaStreamTrack> audio_tracks1;
+ desc1.audioTracks(audio_tracks1);
+ MediaStreamTrack* audio_track1 = MediaStreamTrack::GetTrack(audio_tracks1[0]);
+ audio_track1->Stop();
EXPECT_EQ(0, ms_dispatcher_->stop_audio_device_counter());
- EXPECT_EQ(0, ms_dispatcher_->stop_video_device_counter());
- ms_impl_->OnLocalMediaStreamStop(desc1.id().utf8());
+ blink::WebVector<blink::WebMediaStreamTrack> audio_tracks2;
+ desc2.audioTracks(audio_tracks2);
+ MediaStreamTrack* audio_track2 = MediaStreamTrack::GetTrack(audio_tracks2[0]);
+ audio_track2->Stop();
EXPECT_EQ(1, ms_dispatcher_->stop_audio_device_counter());
+
+ blink::WebVector<blink::WebMediaStreamTrack> video_tracks1;
+ desc1.videoTracks(video_tracks1);
+ MediaStreamTrack* video_track1 = MediaStreamTrack::GetTrack(video_tracks1[0]);
+ video_track1->Stop();
+ EXPECT_EQ(0, ms_dispatcher_->stop_video_device_counter());
+
+ blink::WebVector<blink::WebMediaStreamTrack> video_tracks2;
+ desc2.videoTracks(video_tracks2);
+ MediaStreamTrack* video_track2 = MediaStreamTrack::GetTrack(video_tracks2[0]);
+ video_track2->Stop();
EXPECT_EQ(1, ms_dispatcher_->stop_video_device_counter());
}
-// Test that the source is stopped even if there are two MediaStreams using
-// the same source.
-TEST_F(MediaStreamImplTest, StopSource) {
+TEST_F(MediaStreamImplTest, StopSourceWhenMediaStreamGoesOutOfScope) {
// Generate a stream with both audio and video.
- blink::WebMediaStream desc1 = RequestLocalMediaStream();
- blink::WebMediaStream desc2 = RequestLocalMediaStream();
-
- // Stop the video source.
- blink::WebVector<blink::WebMediaStreamTrack> video_tracks;
- desc1.videoTracks(video_tracks);
- ms_impl_->OnLocalSourceStopped(video_tracks[0].source());
- EXPECT_EQ(0, ms_dispatcher_->stop_audio_device_counter());
- EXPECT_EQ(1, ms_dispatcher_->stop_video_device_counter());
+ RequestLocalMediaStream();
+ // Makes sure the test itself don't hold a reference to the created
+ // MediaStream.
+ ms_impl_->ClearLastGeneratedStream();
- // Stop the audio source.
- blink::WebVector<blink::WebMediaStreamTrack> audio_tracks;
- desc1.audioTracks(audio_tracks);
- ms_impl_->OnLocalSourceStopped(audio_tracks[0].source());
+ // Expect the sources to be stopped when the MediaStream goes out of scope.
EXPECT_EQ(1, ms_dispatcher_->stop_audio_device_counter());
EXPECT_EQ(1, ms_dispatcher_->stop_video_device_counter());
}
@@ -278,6 +300,7 @@ TEST_F(MediaStreamImplTest, StopSource) {
TEST_F(MediaStreamImplTest, FrameWillClose) {
// Test a stream with both audio and video.
blink::WebMediaStream mixed_desc = RequestLocalMediaStream();
+ blink::WebMediaStream desc2 = RequestLocalMediaStream();
// Test that the MediaStreams are deleted if the owning WebFrame is deleted.
// In the unit test the owning frame is NULL.
@@ -286,8 +309,8 @@ TEST_F(MediaStreamImplTest, FrameWillClose) {
EXPECT_EQ(1, ms_dispatcher_->stop_video_device_counter());
}
-// This test what happens if a source to a MediaSteam fails to start.
-TEST_F(MediaStreamImplTest, MediaSourceFailToStart) {
+// This test what happens if a video source to a MediaSteam fails to start.
+TEST_F(MediaStreamImplTest, MediaVideoSourceFailToStart) {
ms_impl_->RequestUserMedia();
FakeMediaStreamDispatcherComplete();
FailToStartMockedVideoSource();
@@ -300,22 +323,30 @@ TEST_F(MediaStreamImplTest, MediaSourceFailToStart) {
EXPECT_EQ(1, ms_dispatcher_->stop_video_device_counter());
}
-// This test what happens if MediaStreamImpl is deleted while the sources of a
-// MediaStream is being started.
+// This test what happens if an audio source fail to initialize.
+TEST_F(MediaStreamImplTest, MediaAudioSourceFailToInitialize) {
+ FailToCreateNextAudioCapturer();
+ ms_impl_->RequestUserMedia();
+ FakeMediaStreamDispatcherComplete();
+ StartMockedVideoSource();
+ EXPECT_EQ(MediaStreamImplUnderTest::REQUEST_FAILED,
+ ms_impl_->request_state());
+ EXPECT_EQ(MEDIA_DEVICE_TRACK_START_FAILURE,
+ ms_impl_->error_reason());
+ EXPECT_EQ(1, ms_dispatcher_->request_stream_counter());
+ EXPECT_EQ(1, ms_dispatcher_->stop_audio_device_counter());
+ EXPECT_EQ(1, ms_dispatcher_->stop_video_device_counter());
+}
+
+// This test what happens if MediaStreamImpl is deleted before a source has
+// started.
TEST_F(MediaStreamImplTest, MediaStreamImplShutDown) {
ms_impl_->RequestUserMedia();
FakeMediaStreamDispatcherComplete();
EXPECT_EQ(1, ms_dispatcher_->request_stream_counter());
EXPECT_EQ(MediaStreamImplUnderTest::REQUEST_NOT_COMPLETE,
ms_impl_->request_state());
-
- MockMediaStreamVideoCapturerSource* video_source =
- ms_impl_->last_created_video_source();
- // Hold on to a blink reference to the source to guarantee that its not
- // deleted when MediaStreamImpl is deleted.
- blink::WebMediaStreamSource blink_source = video_source->owner();
ms_impl_.reset();
- video_source->StartMockedSource();
}
// This test what happens if the WebFrame is closed while the MediaStream is
@@ -343,16 +374,25 @@ TEST_F(MediaStreamImplTest, ReloadFrameWhileGeneratingSources) {
ms_impl_->request_state());
}
-// This test what happens if stop is called on a stream after the frame has
+// This test what happens if stop is called on a track after the frame has
// been reloaded.
-TEST_F(MediaStreamImplTest, StopStreamAfterReload) {
+TEST_F(MediaStreamImplTest, StopTrackAfterReload) {
blink::WebMediaStream mixed_desc = RequestLocalMediaStream();
EXPECT_EQ(1, ms_dispatcher_->request_stream_counter());
ms_impl_->FrameWillClose(NULL);
EXPECT_EQ(1, ms_dispatcher_->stop_audio_device_counter());
EXPECT_EQ(1, ms_dispatcher_->stop_video_device_counter());
- ms_impl_->OnLocalMediaStreamStop(mixed_desc.id().utf8());
+
+ blink::WebVector<blink::WebMediaStreamTrack> audio_tracks;
+ mixed_desc.audioTracks(audio_tracks);
+ MediaStreamTrack* audio_track = MediaStreamTrack::GetTrack(audio_tracks[0]);
+ audio_track->Stop();
EXPECT_EQ(1, ms_dispatcher_->stop_audio_device_counter());
+
+ blink::WebVector<blink::WebMediaStreamTrack> video_tracks;
+ mixed_desc.videoTracks(video_tracks);
+ MediaStreamTrack* video_track = MediaStreamTrack::GetTrack(video_tracks[0]);
+ video_track->Stop();
EXPECT_EQ(1, ms_dispatcher_->stop_video_device_counter());
}
diff --git a/content/renderer/media/media_stream_track.cc b/content/renderer/media/media_stream_track.cc
index 1846b17..b5b2695 100644
--- a/content/renderer/media/media_stream_track.cc
+++ b/content/renderer/media/media_stream_track.cc
@@ -31,6 +31,15 @@ void MediaStreamTrack::SetEnabled(bool enabled) {
track_->set_enabled(enabled);
}
+void MediaStreamTrack::Stop() {
+ // Stop means that a track should be stopped permanently. But
+ // since there is no proper way of doing that on a remote track, we can
+ // at least disable the track. Blink will not call down to the content layer
+ // after a track has been stopped.
+ if (track_)
+ track_->set_enabled(false);
+}
+
webrtc::AudioTrackInterface* MediaStreamTrack::GetAudioAdapter() {
return static_cast<webrtc::AudioTrackInterface*>(track_.get());
}
diff --git a/content/renderer/media/media_stream_track.h b/content/renderer/media/media_stream_track.h
index e1e2e4f..5dfa41b 100644
--- a/content/renderer/media/media_stream_track.h
+++ b/content/renderer/media/media_stream_track.h
@@ -35,6 +35,10 @@ class CONTENT_EXPORT MediaStreamTrack
// If a subclass overrides this method it has to call the base class.
virtual void SetEnabled(bool enabled);
+ // TODO(xians): Make this pure virtual when Stop[Track] has been
+ // implemented for remote audio tracks.
+ virtual void Stop();
+
virtual webrtc::AudioTrackInterface* GetAudioAdapter();
virtual webrtc::VideoTrackInterface* GetVideoAdapter();
diff --git a/content/renderer/media/media_stream_video_capture_source_unittest.cc b/content/renderer/media/media_stream_video_capture_source_unittest.cc
index 0a29e76..cfb5ff0 100644
--- a/content/renderer/media/media_stream_video_capture_source_unittest.cc
+++ b/content/renderer/media/media_stream_video_capture_source_unittest.cc
@@ -21,6 +21,7 @@ class MockVideoCapturerDelegate : public VideoCapturerDelegate {
void(const media::VideoCaptureParams& params,
const NewFrameCallback& new_frame_callback,
const StartedCallback& started_callback));
+ MOCK_METHOD0(StopDeliver,void());
private:
virtual ~MockVideoCapturerDelegate() {}
@@ -42,12 +43,12 @@ class MediaStreamVideoCapturerSourceTest : public testing::Test {
webkit_source_.setExtraData(source_);
}
- void StartSource() {
+ blink::WebMediaStreamTrack StartSource() {
MockMediaConstraintFactory factory;
bool enabled = true;
MediaStreamDependencyFactory* dependency_factory = NULL;
- // CreateVideoTrack will trigger OnSupportedFormats.
- MediaStreamVideoTrack::CreateVideoTrack(
+ // CreateVideoTrack will trigger OnConstraintsApplied.
+ return MediaStreamVideoTrack::CreateVideoTrack(
source_, factory.CreateWebMediaConstraints(),
base::Bind(
&MediaStreamVideoCapturerSourceTest::OnConstraintsApplied,
@@ -74,7 +75,9 @@ TEST_F(MediaStreamVideoCapturerSourceTest, TabCaptureAllowResolutionChange) {
testing::Field(&media::VideoCaptureParams::allow_resolution_change, true),
testing::_,
testing::_)).Times(1);
- StartSource();
+ blink::WebMediaStreamTrack track = StartSource();
+ // When the track goes out of scope, the source will be stopped.
+ EXPECT_CALL(*delegate_, StopDeliver());
}
TEST_F(MediaStreamVideoCapturerSourceTest,
@@ -87,7 +90,9 @@ TEST_F(MediaStreamVideoCapturerSourceTest,
testing::Field(&media::VideoCaptureParams::allow_resolution_change, true),
testing::_,
testing::_)).Times(1);
- StartSource();
+ blink::WebMediaStreamTrack track = StartSource();
+ // When the track goes out of scope, the source will be stopped.
+ EXPECT_CALL(*delegate_, StopDeliver());
}
} // namespace content
diff --git a/content/renderer/media/media_stream_video_capturer_source.cc b/content/renderer/media/media_stream_video_capturer_source.cc
index ccb1e4c..6359c45 100644
--- a/content/renderer/media/media_stream_video_capturer_source.cc
+++ b/content/renderer/media/media_stream_video_capturer_source.cc
@@ -99,7 +99,7 @@ void VideoCapturerDelegate::StartDeliver(
void VideoCapturerDelegate::StopDeliver() {
// Immediately make sure we don't provide more frames.
- DVLOG(3) << "VideoCapturerDelegate::StopCapture()";
+ DVLOG(3) << "VideoCapturerDelegate::StopDeliver()";
DCHECK(message_loop_proxy_->BelongsToCurrentThread());
capture_engine_->StopCapture(this);
new_frame_callback_.Reset();
diff --git a/content/renderer/media/media_stream_video_source.cc b/content/renderer/media/media_stream_video_source.cc
index 28d47d2..7a0ec71 100644
--- a/content/renderer/media/media_stream_video_source.cc
+++ b/content/renderer/media/media_stream_video_source.cc
@@ -27,14 +27,14 @@ const char MediaStreamVideoSource::kMaxFrameRate[] = "maxFrameRate";
const char MediaStreamVideoSource::kMinFrameRate[] = "minFrameRate";
const char* kSupportedConstraints[] = {
- MediaStreamVideoSource::kMaxAspectRatio,
- MediaStreamVideoSource::kMinAspectRatio,
- MediaStreamVideoSource::kMaxWidth,
- MediaStreamVideoSource::kMinWidth,
- MediaStreamVideoSource::kMaxHeight,
- MediaStreamVideoSource::kMinHeight,
- MediaStreamVideoSource::kMaxFrameRate,
- MediaStreamVideoSource::kMinFrameRate,
+ MediaStreamVideoSource::kMaxAspectRatio,
+ MediaStreamVideoSource::kMinAspectRatio,
+ MediaStreamVideoSource::kMaxWidth,
+ MediaStreamVideoSource::kMinWidth,
+ MediaStreamVideoSource::kMaxHeight,
+ MediaStreamVideoSource::kMinHeight,
+ MediaStreamVideoSource::kMaxFrameRate,
+ MediaStreamVideoSource::kMinFrameRate,
};
const int MediaStreamVideoSource::kDefaultWidth = 640;
@@ -353,10 +353,13 @@ void MediaStreamVideoSource::AddTrack(
}
void MediaStreamVideoSource::RemoveTrack(MediaStreamVideoTrack* video_track) {
+ DCHECK(CalledOnValidThread());
std::vector<MediaStreamVideoTrack*>::iterator it =
std::find(tracks_.begin(), tracks_.end(), video_track);
DCHECK(it != tracks_.end());
tracks_.erase(it);
+ if (tracks_.empty())
+ StopSource();
}
void MediaStreamVideoSource::InitAdapter() {
@@ -382,6 +385,7 @@ webrtc::VideoSourceInterface* MediaStreamVideoSource::GetAdapter() {
}
void MediaStreamVideoSource::DoStopSource() {
+ DCHECK(CalledOnValidThread());
DVLOG(3) << "DoStopSource()";
StopSourceImpl();
state_ = ENDED;
diff --git a/content/renderer/media/media_stream_video_source_unittest.cc b/content/renderer/media/media_stream_video_source_unittest.cc
index 2dae490..1b79ec1 100644
--- a/content/renderer/media/media_stream_video_source_unittest.cc
+++ b/content/renderer/media/media_stream_video_source_unittest.cc
@@ -101,7 +101,8 @@ class MediaStreamVideoSourceTest
int expected_height,
int expected_width) {
// Expect the source to start capture with the supported resolution.
- CreateTrackAndStartSource(constraints, capture_width, capture_height , 30);
+ blink::WebMediaStreamTrack track = CreateTrackAndStartSource(
+ constraints, capture_width, capture_height , 30);
ASSERT_TRUE(mock_source()->GetAdapter());
MockVideoSource* adapter = static_cast<MockVideoSource*>(
@@ -140,7 +141,7 @@ class MediaStreamVideoSourceTest
TEST_F(MediaStreamVideoSourceTest, AddTrackAndStartSource) {
blink::WebMediaConstraints constraints;
constraints.initialize();
- CreateTrack("123", constraints);
+ blink::WebMediaStreamTrack track = CreateTrack("123", constraints);
mock_source()->CompleteGetSupportedFormats();
mock_source()->StartMockedSource();
EXPECT_EQ(1, NumberOfSuccessConstraintsCallbacks());
@@ -160,18 +161,18 @@ TEST_F(MediaStreamVideoSourceTest, AddTwoTracksBeforeSourceStarts) {
TEST_F(MediaStreamVideoSourceTest, AddTrackAfterSourceStarts) {
blink::WebMediaConstraints constraints;
constraints.initialize();
- CreateTrack("123", constraints);
+ blink::WebMediaStreamTrack track1 = CreateTrack("123", constraints);
mock_source()->CompleteGetSupportedFormats();
mock_source()->StartMockedSource();
EXPECT_EQ(1, NumberOfSuccessConstraintsCallbacks());
- CreateTrack("123", constraints);
+ blink::WebMediaStreamTrack track2 = CreateTrack("123", constraints);
EXPECT_EQ(2, NumberOfSuccessConstraintsCallbacks());
}
TEST_F(MediaStreamVideoSourceTest, AddTrackAndFailToStartSource) {
blink::WebMediaConstraints constraints;
constraints.initialize();
- CreateTrack("123", constraints);
+ blink::WebMediaStreamTrack track = CreateTrack("123", constraints);
mock_source()->CompleteGetSupportedFormats();
mock_source()->FailToStartMockedSource();
EXPECT_EQ(1, NumberOfFailedConstraintsCallbacks());
@@ -180,8 +181,8 @@ TEST_F(MediaStreamVideoSourceTest, AddTrackAndFailToStartSource) {
TEST_F(MediaStreamVideoSourceTest, AddTwoTracksBeforeGetSupportedFormats) {
blink::WebMediaConstraints constraints;
constraints.initialize();
- CreateTrack("123", constraints);
- CreateTrack("123", constraints);
+ blink::WebMediaStreamTrack track1 = CreateTrack("123", constraints);
+ blink::WebMediaStreamTrack track2 = CreateTrack("123", constraints);
mock_source()->CompleteGetSupportedFormats();
mock_source()->StartMockedSource();
EXPECT_EQ(2, NumberOfSuccessConstraintsCallbacks());
@@ -230,7 +231,8 @@ TEST_F(MediaStreamVideoSourceTest, MandatoryAspectRatio4To3) {
TEST_F(MediaStreamVideoSourceTest, MandatoryAspectRatioTooHigh) {
MockMediaConstraintFactory factory;
factory.AddMandatory(MediaStreamVideoSource::kMinAspectRatio, 2);
- CreateTrack("123", factory.CreateWebMediaConstraints());
+ blink::WebMediaStreamTrack track = CreateTrack(
+ "123", factory.CreateWebMediaConstraints());
mock_source()->CompleteGetSupportedFormats();
EXPECT_EQ(1, NumberOfFailedConstraintsCallbacks());
}
@@ -240,7 +242,8 @@ TEST_F(MediaStreamVideoSourceTest, MandatoryAspectRatioTooHigh) {
TEST_F(MediaStreamVideoSourceTest, OptionalAspectRatioTooHigh) {
MockMediaConstraintFactory factory;
factory.AddOptional(MediaStreamVideoSource::kMinAspectRatio, 2);
- CreateTrack("123", factory.CreateWebMediaConstraints());
+ blink::WebMediaStreamTrack track = CreateTrack(
+ "123", factory.CreateWebMediaConstraints());
mock_source()->CompleteGetSupportedFormats();
const media::VideoCaptureParams& params = mock_source()->start_params();
@@ -272,7 +275,8 @@ TEST_F(MediaStreamVideoSourceTest, DefaultCapability) {
TEST_F(MediaStreamVideoSourceTest, InvalidMandatoryConstraint) {
MockMediaConstraintFactory factory;
factory.AddMandatory("weird key", 640);
- CreateTrack("123", factory.CreateWebMediaConstraints());
+ blink::WebMediaStreamTrack track = CreateTrack(
+ "123", factory.CreateWebMediaConstraints());
mock_source()->CompleteGetSupportedFormats();
EXPECT_EQ(1, NumberOfFailedConstraintsCallbacks());
}
@@ -299,7 +303,8 @@ TEST_F(MediaStreamVideoSourceTest, ScreencastResolutionWithConstraint) {
factory.AddMandatory(MediaStreamVideoSource::kMaxWidth, 480);
factory.AddMandatory(MediaStreamVideoSource::kMaxHeight, 270);
- CreateTrackAndStartSource(factory.CreateWebMediaConstraints(), 480, 270, 30);
+ blink::WebMediaStreamTrack track = CreateTrackAndStartSource(
+ factory.CreateWebMediaConstraints(), 480, 270, 30);
EXPECT_EQ(480, mock_source()->max_requested_height());
EXPECT_EQ(270, mock_source()->max_requested_width());
}
@@ -319,10 +324,11 @@ TEST_F(MediaStreamVideoSourceTest, OptionalConstraints) {
// video frames if the source deliver video frames.
TEST_F(MediaStreamVideoSourceTest, AdapterReceiveVideoFrame) {
MockMediaConstraintFactory factory;
- CreateTrackAndStartSource(factory.CreateWebMediaConstraints(),
- MediaStreamVideoSource::kDefaultWidth,
- MediaStreamVideoSource::kDefaultHeight,
- MediaStreamVideoSource::kDefaultFrameRate);
+ blink::WebMediaStreamTrack track = CreateTrackAndStartSource(
+ factory.CreateWebMediaConstraints(),
+ MediaStreamVideoSource::kDefaultWidth,
+ MediaStreamVideoSource::kDefaultHeight,
+ MediaStreamVideoSource::kDefaultFrameRate);
ASSERT_TRUE(mock_source()->GetAdapter());
MockVideoSource* adapter = static_cast<MockVideoSource*>(
mock_source()->GetAdapter());
@@ -455,7 +461,7 @@ TEST_F(MediaStreamVideoSourceTest, IsConstraintSupported) {
MediaStreamVideoSource::kMinAspectRatio));
EXPECT_FALSE(MediaStreamVideoSource::IsConstraintSupported(
- "googCpuAdaptation"));
+ "something unsupported"));
}
} // namespace content
diff --git a/content/renderer/media/media_stream_video_track.cc b/content/renderer/media/media_stream_video_track.cc
index 8d4b83d..3935e7b 100644
--- a/content/renderer/media/media_stream_video_track.cc
+++ b/content/renderer/media/media_stream_video_track.cc
@@ -46,7 +46,7 @@ MediaStreamVideoTrack::MediaStreamVideoTrack(
MediaStreamVideoTrack::~MediaStreamVideoTrack() {
DCHECK(sinks_.empty());
- source_->RemoveTrack(this);
+ Stop();
}
void MediaStreamVideoTrack::AddSink(MediaStreamVideoSink* sink) {
@@ -88,6 +88,15 @@ void MediaStreamVideoTrack::SetEnabled(bool enabled) {
}
}
+void MediaStreamVideoTrack::Stop() {
+ DCHECK(thread_checker_.CalledOnValidThread());
+ if (source_) {
+ source_->RemoveTrack(this);
+ source_ = NULL;
+ }
+ OnReadyStateChanged(blink::WebMediaStreamSource::ReadyStateEnded);
+}
+
void MediaStreamVideoTrack::OnVideoFrame(
const scoped_refptr<media::VideoFrame>& frame) {
DCHECK(thread_checker_.CalledOnValidThread());
diff --git a/content/renderer/media/media_stream_video_track.h b/content/renderer/media/media_stream_video_track.h
index 07307ae..9e5ef38 100644
--- a/content/renderer/media/media_stream_video_track.h
+++ b/content/renderer/media/media_stream_video_track.h
@@ -62,6 +62,7 @@ class CONTENT_EXPORT MediaStreamVideoTrack : public MediaStreamTrack {
// where the track is added to a RTCPeerConnection. crbug/323223.
virtual webrtc::VideoTrackInterface* GetVideoAdapter() OVERRIDE;
virtual void SetEnabled(bool enabled) OVERRIDE;
+ virtual void Stop() OVERRIDE;
void OnVideoFrame(const scoped_refptr<media::VideoFrame>& frame);
void OnReadyStateChanged(blink::WebMediaStreamSource::ReadyState state);
diff --git a/content/renderer/media/media_stream_video_track_unittest.cc b/content/renderer/media/media_stream_video_track_unittest.cc
index d2e8482..c2307c6 100644
--- a/content/renderer/media/media_stream_video_track_unittest.cc
+++ b/content/renderer/media/media_stream_video_track_unittest.cc
@@ -15,11 +15,12 @@ namespace content {
class MediaStreamVideoTrackTest : public ::testing::Test {
public:
MediaStreamVideoTrackTest()
- : mock_source_(new MockMediaStreamVideoSource(&factory_, false)) {
- webkit_source_.initialize(base::UTF8ToUTF16("dummy_source_id"),
+ : mock_source_(new MockMediaStreamVideoSource(&factory_, false)),
+ source_started_(false) {
+ blink_source_.initialize(base::UTF8ToUTF16("dummy_source_id"),
blink::WebMediaStreamSource::TypeVideo,
base::UTF8ToUTF16("dummy_source_name"));
- webkit_source_.setExtraData(mock_source_);
+ blink_source_.setExtraData(mock_source_);
}
protected:
@@ -32,18 +33,24 @@ class MediaStreamVideoTrackTest : public ::testing::Test {
MediaStreamVideoTrack::CreateVideoTrack(
mock_source_, constraints,
MediaStreamSource::ConstraintsCallback(), enabled, &factory_);
- mock_source_->StartMockedSource();
-
+ if (!source_started_) {
+ mock_source_->StartMockedSource();
+ source_started_ = true;
+ }
return track;
}
MockMediaStreamVideoSource* mock_source() { return mock_source_; }
+ const blink::WebMediaStreamSource& blink_source() const {
+ return blink_source_;
+ }
private:
MockMediaStreamDependencyFactory factory_;
- blink::WebMediaStreamSource webkit_source_;
+ blink::WebMediaStreamSource blink_source_;
// |mock_source_| is owned by |webkit_source_|.
MockMediaStreamVideoSource* mock_source_;
+ bool source_started_;
};
TEST_F(MediaStreamVideoTrackTest, GetAdapter) {
@@ -111,4 +118,35 @@ TEST_F(MediaStreamVideoTrackTest, SourceStopped) {
MediaStreamVideoSink::RemoveFromVideoTrack(&sink, track);
}
+TEST_F(MediaStreamVideoTrackTest, StopLastTrack) {
+ MockMediaStreamVideoSink sink1;
+ blink::WebMediaStreamTrack track1 = CreateTrack();
+ MediaStreamVideoSink::AddToVideoTrack(&sink1, track1);
+ EXPECT_EQ(blink::WebMediaStreamSource::ReadyStateLive, sink1.state());
+
+ EXPECT_EQ(blink::WebMediaStreamSource::ReadyStateLive,
+ blink_source().readyState());
+
+ MockMediaStreamVideoSink sink2;
+ blink::WebMediaStreamTrack track2 = CreateTrack();
+ MediaStreamVideoSink::AddToVideoTrack(&sink2, track2);
+ EXPECT_EQ(blink::WebMediaStreamSource::ReadyStateLive, sink2.state());
+
+ MediaStreamVideoTrack* native_track1 =
+ MediaStreamVideoTrack::GetVideoTrack(track1);
+ native_track1->Stop();
+ EXPECT_EQ(blink::WebMediaStreamSource::ReadyStateEnded, sink1.state());
+ EXPECT_EQ(blink::WebMediaStreamSource::ReadyStateLive,
+ blink_source().readyState());
+ MediaStreamVideoSink::RemoveFromVideoTrack(&sink1, track1);
+
+ MediaStreamVideoTrack* native_track2 =
+ MediaStreamVideoTrack::GetVideoTrack(track2);
+ native_track2->Stop();
+ EXPECT_EQ(blink::WebMediaStreamSource::ReadyStateEnded, sink2.state());
+ EXPECT_EQ(blink::WebMediaStreamSource::ReadyStateEnded,
+ blink_source().readyState());
+ MediaStreamVideoSink::RemoveFromVideoTrack(&sink2, track2);
+}
+
} // namespace content
diff --git a/content/renderer/media/mock_media_stream_dependency_factory.cc b/content/renderer/media/mock_media_stream_dependency_factory.cc
index d07cb3a..9816113 100644
--- a/content/renderer/media/mock_media_stream_dependency_factory.cc
+++ b/content/renderer/media/mock_media_stream_dependency_factory.cc
@@ -11,6 +11,7 @@
#include "content/renderer/media/webrtc/webrtc_local_audio_track_adapter.h"
#include "content/renderer/media/webrtc/webrtc_video_capturer_adapter.h"
#include "content/renderer/media/webrtc_audio_capturer.h"
+#include "content/renderer/media/webrtc_local_audio_track.h"
#include "third_party/WebKit/public/platform/WebMediaStreamTrack.h"
#include "third_party/libjingle/source/talk/app/webrtc/mediastreaminterface.h"
#include "third_party/libjingle/source/talk/base/scoped_ref_ptr.h"
@@ -284,7 +285,7 @@ int MockVideoSource::GetFrameNum() const {
}
MockWebRtcVideoTrack::MockWebRtcVideoTrack(
- std::string id,
+ const std::string& id,
webrtc::VideoSourceInterface* source)
: enabled_(false),
id_(id),
@@ -430,7 +431,8 @@ class MockIceCandidate : public IceCandidateInterface {
};
MockMediaStreamDependencyFactory::MockMediaStreamDependencyFactory()
- : MediaStreamDependencyFactory(NULL) {
+ : MediaStreamDependencyFactory(NULL),
+ fail_to_create_next_audio_capturer_(false) {
}
MockMediaStreamDependencyFactory::~MockMediaStreamDependencyFactory() {}
@@ -520,14 +522,20 @@ MockMediaStreamDependencyFactory::CreateIceCandidate(
scoped_refptr<WebRtcAudioCapturer>
MockMediaStreamDependencyFactory::CreateAudioCapturer(
int render_view_id, const StreamDeviceInfo& device_info,
- const blink::WebMediaConstraints& constraints) {
+ const blink::WebMediaConstraints& constraints,
+ MediaStreamAudioSource* audio_source) {
+ if (fail_to_create_next_audio_capturer_) {
+ fail_to_create_next_audio_capturer_ = false;
+ return NULL;
+ }
+ DCHECK(audio_source);
return WebRtcAudioCapturer::CreateCapturer(-1, device_info,
- constraints, NULL);
+ constraints, NULL, audio_source);
}
void MockMediaStreamDependencyFactory::StartLocalAudioTrack(
WebRtcLocalAudioTrack* audio_track) {
- return;
+ audio_track->Start();
}
} // namespace content
diff --git a/content/renderer/media/mock_media_stream_dependency_factory.h b/content/renderer/media/mock_media_stream_dependency_factory.h
index a2cce87..4bbad2a 100644
--- a/content/renderer/media/mock_media_stream_dependency_factory.h
+++ b/content/renderer/media/mock_media_stream_dependency_factory.h
@@ -106,7 +106,7 @@ class MockAudioSource : public webrtc::AudioSourceInterface {
class MockWebRtcVideoTrack : public webrtc::VideoTrackInterface {
public:
- MockWebRtcVideoTrack(std::string id,
+ MockWebRtcVideoTrack(const std::string& id,
webrtc::VideoSourceInterface* source);
virtual void AddRenderer(webrtc::VideoRendererInterface* renderer) OVERRIDE;
virtual void RemoveRenderer(
@@ -207,7 +207,11 @@ class MockMediaStreamDependencyFactory : public MediaStreamDependencyFactory {
virtual scoped_refptr<WebRtcAudioCapturer> CreateAudioCapturer(
int render_view_id, const StreamDeviceInfo& device_info,
- const blink::WebMediaConstraints& constraints) OVERRIDE;
+ const blink::WebMediaConstraints& constraints,
+ MediaStreamAudioSource* audio_source) OVERRIDE;
+ void FailToCreateNextAudioCapturer() {
+ fail_to_create_next_audio_capturer_ = true;
+ }
virtual void StartLocalAudioTrack(
WebRtcLocalAudioTrack* audio_track) OVERRIDE;
@@ -216,6 +220,7 @@ class MockMediaStreamDependencyFactory : public MediaStreamDependencyFactory {
MockVideoSource* last_video_source() { return last_video_source_.get(); }
private:
+ bool fail_to_create_next_audio_capturer_;
scoped_refptr <MockAudioSource> last_audio_source_;
scoped_refptr <MockVideoSource> last_video_source_;
diff --git a/content/renderer/media/mock_media_stream_registry.cc b/content/renderer/media/mock_media_stream_registry.cc
index 6b990bc..0cc392c 100644
--- a/content/renderer/media/mock_media_stream_registry.cc
+++ b/content/renderer/media/mock_media_stream_registry.cc
@@ -29,7 +29,6 @@ void MockMediaStreamRegistry::Init(const std::string& stream_url) {
blink::WebString label(kTestStreamLabel);
test_stream_.initialize(label, webkit_audio_tracks, webkit_video_tracks);
test_stream_.setExtraData(new MediaStream(&dependency_factory_,
- MediaStream::StreamStopCallback(),
test_stream_));
}
diff --git a/content/renderer/media/rtc_peer_connection_handler_unittest.cc b/content/renderer/media/rtc_peer_connection_handler_unittest.cc
index 17cf4e8..405b691 100644
--- a/content/renderer/media/rtc_peer_connection_handler_unittest.cc
+++ b/content/renderer/media/rtc_peer_connection_handler_unittest.cc
@@ -258,7 +258,6 @@ class RTCPeerConnectionHandlerTest : public ::testing::Test {
video_tracks);
local_stream.setExtraData(
new MediaStream(mock_dependency_factory_.get(),
- MediaStream::StreamStopCallback(),
local_stream));
return local_stream;
}
diff --git a/content/renderer/media/webrtc/media_stream_remote_video_source.cc b/content/renderer/media/webrtc/media_stream_remote_video_source.cc
index 70446e4..d7c00be 100644
--- a/content/renderer/media/webrtc/media_stream_remote_video_source.cc
+++ b/content/renderer/media/webrtc/media_stream_remote_video_source.cc
@@ -47,8 +47,10 @@ void MediaStreamRemoteVideoSource::StartSourceImpl(
void MediaStreamRemoteVideoSource::StopSourceImpl() {
DCHECK(message_loop_proxy_->BelongsToCurrentThread());
- remote_track_->RemoveRenderer(this);
- remote_track_->UnregisterObserver(this);
+ if (state() != MediaStreamVideoSource::ENDED) {
+ remote_track_->RemoveRenderer(this);
+ remote_track_->UnregisterObserver(this);
+ }
}
webrtc::VideoSourceInterface* MediaStreamRemoteVideoSource::GetAdapter() {
diff --git a/content/renderer/media/webrtc/webrtc_local_audio_track_adapter_unittest.cc b/content/renderer/media/webrtc/webrtc_local_audio_track_adapter_unittest.cc
index a937d6d..4fe1eaf 100644
--- a/content/renderer/media/webrtc/webrtc_local_audio_track_adapter_unittest.cc
+++ b/content/renderer/media/webrtc/webrtc_local_audio_track_adapter_unittest.cc
@@ -36,7 +36,7 @@ class WebRtcLocalAudioTrackAdapterTest : public ::testing::Test {
adapter_(WebRtcLocalAudioTrackAdapter::Create(std::string(), NULL)),
capturer_(WebRtcAudioCapturer::CreateCapturer(
-1, StreamDeviceInfo(MEDIA_DEVICE_AUDIO_CAPTURE, "", ""),
- blink::WebMediaConstraints(), NULL)),
+ blink::WebMediaConstraints(), NULL, NULL)),
track_(new WebRtcLocalAudioTrack(adapter_, capturer_, NULL)) {}
protected:
diff --git a/content/renderer/media/webrtc_audio_capturer.cc b/content/renderer/media/webrtc_audio_capturer.cc
index 67882a1..5169c27 100644
--- a/content/renderer/media/webrtc_audio_capturer.cc
+++ b/content/renderer/media/webrtc_audio_capturer.cc
@@ -13,6 +13,7 @@
#include "content/renderer/media/audio_device_factory.h"
#include "content/renderer/media/media_stream_audio_processor.h"
#include "content/renderer/media/media_stream_audio_processor_options.h"
+#include "content/renderer/media/media_stream_audio_source.h"
#include "content/renderer/media/webrtc_audio_device_impl.h"
#include "content/renderer/media/webrtc_local_audio_track.h"
#include "content/renderer/media/webrtc_logging.h"
@@ -131,9 +132,10 @@ class WebRtcAudioCapturer::TrackOwner
scoped_refptr<WebRtcAudioCapturer> WebRtcAudioCapturer::CreateCapturer(
int render_view_id, const StreamDeviceInfo& device_info,
const blink::WebMediaConstraints& constraints,
- WebRtcAudioDeviceImpl* audio_device) {
+ WebRtcAudioDeviceImpl* audio_device,
+ MediaStreamAudioSource* audio_source) {
scoped_refptr<WebRtcAudioCapturer> capturer = new WebRtcAudioCapturer(
- render_view_id, device_info, constraints, audio_device);
+ render_view_id, device_info, constraints, audio_device, audio_source);
if (capturer->Initialize())
return capturer;
@@ -216,7 +218,8 @@ WebRtcAudioCapturer::WebRtcAudioCapturer(
int render_view_id,
const StreamDeviceInfo& device_info,
const blink::WebMediaConstraints& constraints,
- WebRtcAudioDeviceImpl* audio_device)
+ WebRtcAudioDeviceImpl* audio_device,
+ MediaStreamAudioSource* audio_source)
: constraints_(constraints),
audio_processor_(
new talk_base::RefCountedObject<MediaStreamAudioProcessor>(
@@ -230,6 +233,7 @@ WebRtcAudioCapturer::WebRtcAudioCapturer(
key_pressed_(false),
need_audio_processing_(false),
audio_device_(audio_device),
+ audio_source_(audio_source),
audio_power_monitor_(
device_info_.device.input.sample_rate,
base::TimeDelta::FromMilliseconds(kPowerMonitorTimeConstantMs)) {
@@ -239,8 +243,8 @@ WebRtcAudioCapturer::WebRtcAudioCapturer(
WebRtcAudioCapturer::~WebRtcAudioCapturer() {
DCHECK(thread_checker_.CalledOnValidThread());
DCHECK(tracks_.IsEmpty());
- DCHECK(!running_);
DVLOG(1) << "WebRtcAudioCapturer::~WebRtcAudioCapturer()";
+ Stop();
}
void WebRtcAudioCapturer::AddTrack(WebRtcLocalAudioTrack* track) {
@@ -257,25 +261,34 @@ void WebRtcAudioCapturer::AddTrack(WebRtcLocalAudioTrack* track) {
scoped_refptr<TrackOwner> track_owner(new TrackOwner(track));
tracks_.AddAndTag(track_owner);
}
-
- // Start the source if the first audio track is connected to the capturer.
- // Start() will do nothing if the capturer has already been started.
- Start();
-
}
void WebRtcAudioCapturer::RemoveTrack(WebRtcLocalAudioTrack* track) {
DCHECK(thread_checker_.CalledOnValidThread());
- base::AutoLock auto_lock(lock_);
+ DVLOG(1) << "WebRtcAudioCapturer::RemoveTrack()";
+ bool stop_source = false;
+ {
+ base::AutoLock auto_lock(lock_);
- scoped_refptr<TrackOwner> removed_item =
- tracks_.Remove(TrackOwner::TrackWrapper(track));
+ scoped_refptr<TrackOwner> removed_item =
+ tracks_.Remove(TrackOwner::TrackWrapper(track));
- // Clear the delegate to ensure that no more capture callbacks will
- // be sent to this sink. Also avoids a possible crash which can happen
- // if this method is called while capturing is active.
- if (removed_item.get())
- removed_item->Reset();
+ // Clear the delegate to ensure that no more capture callbacks will
+ // be sent to this sink. Also avoids a possible crash which can happen
+ // if this method is called while capturing is active.
+ if (removed_item.get()) {
+ removed_item->Reset();
+ stop_source = tracks_.IsEmpty();
+ }
+ }
+ if (stop_source) {
+ // Since WebRtcAudioCapturer does not inherit MediaStreamAudioSource,
+ // and instead MediaStreamAudioSource is composed of a WebRtcAudioCapturer,
+ // we have to call StopSource on the MediaStreamSource. This will call
+ // MediaStreamAudioSource::DoStopSource which in turn call
+ // WebRtcAudioCapturerer::Stop();
+ audio_source_->StopSource();
+ }
}
void WebRtcAudioCapturer::SetCapturerSource(
@@ -286,7 +299,6 @@ void WebRtcAudioCapturer::SetCapturerSource(
DVLOG(1) << "SetCapturerSource(channel_layout=" << channel_layout << ","
<< "sample_rate=" << sample_rate << ")";
scoped_refptr<media::AudioCapturerSource> old_source;
- bool restart_source = false;
{
base::AutoLock auto_lock(lock_);
if (source_.get() == source.get())
@@ -296,7 +308,6 @@ void WebRtcAudioCapturer::SetCapturerSource(
source_ = source;
// Reset the flag to allow starting the new source.
- restart_source = running_;
running_ = false;
}
@@ -329,8 +340,7 @@ void WebRtcAudioCapturer::SetCapturerSource(
if (source.get())
source->Initialize(params, this, session_id());
- if (restart_source)
- Start();
+ Start();
}
void WebRtcAudioCapturer::EnablePeerConnectionMode() {
diff --git a/content/renderer/media/webrtc_audio_capturer.h b/content/renderer/media/webrtc_audio_capturer.h
index 63c846f..6ac89d5 100644
--- a/content/renderer/media/webrtc_audio_capturer.h
+++ b/content/renderer/media/webrtc_audio_capturer.h
@@ -28,6 +28,7 @@ class AudioBus;
namespace content {
class MediaStreamAudioProcessor;
+class MediaStreamAudioSource;
class WebRtcAudioDeviceImpl;
class WebRtcLocalAudioRenderer;
class WebRtcLocalAudioTrack;
@@ -57,7 +58,8 @@ class CONTENT_EXPORT WebRtcAudioCapturer
int render_view_id,
const StreamDeviceInfo& device_info,
const blink::WebMediaConstraints& constraints,
- WebRtcAudioDeviceImpl* audio_device);
+ WebRtcAudioDeviceImpl* audio_device,
+ MediaStreamAudioSource* audio_source);
// Add a audio track to the sinks of the capturer.
@@ -100,8 +102,8 @@ class CONTENT_EXPORT WebRtcAudioCapturer
// Stops recording audio. This method will empty its track lists since
// stopping the capturer will implicitly invalidate all its tracks.
- // This method is exposed to the public because the media stream track can
- // call Stop() on its source.
+ // This method is exposed to the public because the MediaStreamAudioSource can
+ // call Stop()
void Stop();
// Called by the WebAudioCapturerSource to get the audio processing params.
@@ -129,7 +131,8 @@ class CONTENT_EXPORT WebRtcAudioCapturer
WebRtcAudioCapturer(int render_view_id,
const StreamDeviceInfo& device_info,
const blink::WebMediaConstraints& constraints,
- WebRtcAudioDeviceImpl* audio_device);
+ WebRtcAudioDeviceImpl* audio_device,
+ MediaStreamAudioSource* audio_source);
// AudioCapturerSource::CaptureCallback implementation.
// Called on the AudioInputDevice audio thread.
@@ -208,7 +211,16 @@ class CONTENT_EXPORT WebRtcAudioCapturer
// of RenderThread.
WebRtcAudioDeviceImpl* audio_device_;
- // Audio power monitor for logging audio power level.
+ // Raw pointer to the MediaStreamAudioSource object that holds a reference
+ // to this WebRtcAudioCapturer.
+ // Since |audio_source_| is owned by a blink::WebMediaStreamSource object and
+ // blink guarantees that the blink::WebMediaStreamSource outlives any
+ // blink::WebMediaStreamTrack connected to the source, |audio_source_| is
+ // guaranteed to exist as long as a WebRtcLocalAudioTrack is connected to this
+ // WebRtcAudioCapturer.
+ MediaStreamAudioSource* const audio_source_;
+
+ // Audio power monitor for logging audio power level.
media::AudioPowerMonitor audio_power_monitor_;
// Records when the last time audio power level is logged.
diff --git a/content/renderer/media/webrtc_audio_capturer_unittest.cc b/content/renderer/media/webrtc_audio_capturer_unittest.cc
index d0bcf29..8b67aea 100644
--- a/content/renderer/media/webrtc_audio_capturer_unittest.cc
+++ b/content/renderer/media/webrtc_audio_capturer_unittest.cc
@@ -96,13 +96,13 @@ class WebRtcAudioCapturerTest : public testing::Test {
"", "", params_.sample_rate(),
params_.channel_layout(),
params_.frames_per_buffer()),
- constraints, NULL);
+ constraints, NULL, NULL);
capturer_source_ = new MockCapturerSource();
EXPECT_CALL(*capturer_source_.get(), Initialize(_, capturer_.get(), -1));
- capturer_->SetCapturerSourceForTesting(capturer_source_, params_);
-
EXPECT_CALL(*capturer_source_.get(), SetAutomaticGainControl(true));
EXPECT_CALL(*capturer_source_.get(), Start());
+ capturer_->SetCapturerSourceForTesting(capturer_source_, params_);
+
scoped_refptr<WebRtcLocalAudioTrackAdapter> adapter(
WebRtcLocalAudioTrackAdapter::Create(std::string(), NULL));
track_.reset(new WebRtcLocalAudioTrack(adapter, capturer_, NULL));
diff --git a/content/renderer/media/webrtc_local_audio_source_provider_unittest.cc b/content/renderer/media/webrtc_local_audio_source_provider_unittest.cc
index 8b0fd6b..4a85903 100644
--- a/content/renderer/media/webrtc_local_audio_source_provider_unittest.cc
+++ b/content/renderer/media/webrtc_local_audio_source_provider_unittest.cc
@@ -32,7 +32,7 @@ class WebRtcLocalAudioSourceProviderTest : public testing::Test {
blink::WebMediaConstraints constraints;
scoped_refptr<WebRtcAudioCapturer> capturer(
WebRtcAudioCapturer::CreateCapturer(-1, StreamDeviceInfo(),
- constraints, NULL));
+ constraints, NULL, NULL));
scoped_refptr<WebRtcLocalAudioTrackAdapter> adapter(
WebRtcLocalAudioTrackAdapter::Create(std::string(), NULL));
scoped_ptr<WebRtcLocalAudioTrack> native_track(
diff --git a/content/renderer/media/webrtc_local_audio_track.h b/content/renderer/media/webrtc_local_audio_track.h
index c6ba35c..c2cb81b 100644
--- a/content/renderer/media/webrtc_local_audio_track.h
+++ b/content/renderer/media/webrtc_local_audio_track.h
@@ -62,7 +62,7 @@ class CONTENT_EXPORT WebRtcLocalAudioTrack
// Stops the local audio track. Called on the main render thread and
// should be called only once when audio track going away.
- void Stop();
+ virtual void Stop() OVERRIDE;
// Method called by the capturer to deliver the capture data.
// Called on the capture audio thread.
diff --git a/content/renderer/media/webrtc_local_audio_track_unittest.cc b/content/renderer/media/webrtc_local_audio_track_unittest.cc
index a30786b..9bc9ff4c 100644
--- a/content/renderer/media/webrtc_local_audio_track_unittest.cc
+++ b/content/renderer/media/webrtc_local_audio_track_unittest.cc
@@ -4,6 +4,7 @@
#include "base/synchronization/waitable_event.h"
#include "base/test/test_timeouts.h"
+#include "content/renderer/media/media_stream_audio_source.h"
#include "content/renderer/media/mock_media_constraint_factory.h"
#include "content/renderer/media/webrtc/webrtc_local_audio_track_adapter.h"
#include "content/renderer/media/webrtc_audio_capturer.h"
@@ -34,12 +35,12 @@ ACTION_P(SignalEvent, event) {
// the |WebRtcAudioCapturer|.
class FakeAudioThread : public base::PlatformThread::Delegate {
public:
- FakeAudioThread(const scoped_refptr<WebRtcAudioCapturer>& capturer,
+ FakeAudioThread(WebRtcAudioCapturer* capturer,
const media::AudioParameters& params)
: capturer_(capturer),
thread_(),
closure_(false, false) {
- DCHECK(capturer.get());
+ DCHECK(capturer);
audio_bus_ = media::AudioBus::Create(params);
}
@@ -53,7 +54,7 @@ class FakeAudioThread : public base::PlatformThread::Delegate {
media::AudioCapturerSource::CaptureCallback* callback =
static_cast<media::AudioCapturerSource::CaptureCallback*>(
- capturer_.get());
+ capturer_);
audio_bus_->Zero();
callback->Capture(audio_bus_.get(), 0, 0, false);
@@ -76,7 +77,7 @@ class FakeAudioThread : public base::PlatformThread::Delegate {
private:
scoped_ptr<media::AudioBus> audio_bus_;
- scoped_refptr<WebRtcAudioCapturer> capturer_;
+ WebRtcAudioCapturer* capturer_;
base::PlatformThreadHandle thread_;
base::WaitableEvent closure_;
DISALLOW_COPY_AND_ASSIGN(FakeAudioThread);
@@ -170,17 +171,27 @@ class WebRtcLocalAudioTrackTest : public ::testing::Test {
params_.Reset(media::AudioParameters::AUDIO_PCM_LOW_LATENCY,
media::CHANNEL_LAYOUT_STEREO, 2, 0, 48000, 16, 480);
blink::WebMediaConstraints constraints;
+ blink_source_.initialize("dummy", blink::WebMediaStreamSource::TypeAudio,
+ "dummy");
+ MediaStreamAudioSource* audio_source = new MediaStreamAudioSource();
+ blink_source_.setExtraData(audio_source);
+
StreamDeviceInfo device(MEDIA_DEVICE_AUDIO_CAPTURE,
std::string(), std::string());
capturer_ = WebRtcAudioCapturer::CreateCapturer(-1, device,
- constraints, NULL);
+ constraints, NULL,
+ audio_source);
+ audio_source->SetAudioCapturer(capturer_);
capturer_source_ = new MockCapturerSource(capturer_.get());
EXPECT_CALL(*capturer_source_.get(), OnInitialize(_, capturer_.get(), -1))
.WillOnce(Return());
+ EXPECT_CALL(*capturer_source_.get(), SetAutomaticGainControl(true));
+ EXPECT_CALL(*capturer_source_.get(), OnStart());
capturer_->SetCapturerSourceForTesting(capturer_source_, params_);
}
media::AudioParameters params_;
+ blink::WebMediaStreamSource blink_source_;
scoped_refptr<MockCapturerSource> capturer_source_;
scoped_refptr<WebRtcAudioCapturer> capturer_;
};
@@ -190,8 +201,6 @@ class WebRtcLocalAudioTrackTest : public ::testing::Test {
// get data callback when the track is connected to the capturer but not when
// the track is disconnected from the capturer.
TEST_F(WebRtcLocalAudioTrackTest, ConnectAndDisconnectOneSink) {
- EXPECT_CALL(*capturer_source_.get(), SetAutomaticGainControl(true));
- EXPECT_CALL(*capturer_source_.get(), OnStart());
scoped_refptr<WebRtcLocalAudioTrackAdapter> adapter(
WebRtcLocalAudioTrackAdapter::Create(std::string(), NULL));
scoped_ptr<WebRtcLocalAudioTrack> track(
@@ -269,8 +278,6 @@ TEST_F(WebRtcLocalAudioTrackTest, DISABLED_DisableEnableAudioTrack) {
// callbacks appear/disappear.
// Flaky due to a data race, see http://crbug.com/295418
TEST_F(WebRtcLocalAudioTrackTest, DISABLED_MultipleAudioTracks) {
- EXPECT_CALL(*capturer_source_.get(), SetAutomaticGainControl(true));
- EXPECT_CALL(*capturer_source_.get(), OnStart());
scoped_refptr<WebRtcLocalAudioTrackAdapter> adapter_1(
WebRtcLocalAudioTrackAdapter::Create(std::string(), NULL));
scoped_ptr<WebRtcLocalAudioTrack> track_1(
@@ -326,16 +333,12 @@ TEST_F(WebRtcLocalAudioTrackTest, DISABLED_MultipleAudioTracks) {
track_2->RemoveSink(sink_2.get());
track_2->Stop();
track_2.reset();
-
- capturer_->Stop();
}
// Start one track and verify the capturer is correctly starting its source.
// And it should be fine to not to call Stop() explicitly.
TEST_F(WebRtcLocalAudioTrackTest, StartOneAudioTrack) {
- EXPECT_CALL(*capturer_source_.get(), SetAutomaticGainControl(true));
- EXPECT_CALL(*capturer_source_.get(), OnStart());
scoped_refptr<WebRtcLocalAudioTrackAdapter> adapter(
WebRtcLocalAudioTrackAdapter::Create(std::string(), NULL));
scoped_ptr<WebRtcLocalAudioTrack> track(
@@ -345,17 +348,36 @@ TEST_F(WebRtcLocalAudioTrackTest, StartOneAudioTrack) {
// When the track goes away, it will automatically stop the
// |capturer_source_|.
EXPECT_CALL(*capturer_source_.get(), OnStop());
- capturer_->Stop();
track.reset();
}
+// Start two tracks and verify the capturer is correctly starting its source.
+// When the last track connected to the capturer is stopped, the source is
+// stopped.
+TEST_F(WebRtcLocalAudioTrackTest, StartTwoAudioTracks) {
+ scoped_refptr<WebRtcLocalAudioTrackAdapter> adapter1(
+ WebRtcLocalAudioTrackAdapter::Create(std::string(), NULL));
+ scoped_ptr<WebRtcLocalAudioTrack> track1(
+ new WebRtcLocalAudioTrack(adapter1, capturer_, NULL));
+ track1->Start();
+
+ scoped_refptr<WebRtcLocalAudioTrackAdapter> adapter2(
+ WebRtcLocalAudioTrackAdapter::Create(std::string(), NULL));
+ scoped_ptr<WebRtcLocalAudioTrack> track2(
+ new WebRtcLocalAudioTrack(adapter2, capturer_, NULL));
+ track2->Start();
+
+ track1->Stop();
+ // When the last track is stopped, it will automatically stop the
+ // |capturer_source_|.
+ EXPECT_CALL(*capturer_source_.get(), OnStop());
+ track2->Stop();
+}
+
// Start/Stop tracks and verify the capturer is correctly starting/stopping
// its source.
TEST_F(WebRtcLocalAudioTrackTest, StartAndStopAudioTracks) {
- // Starting the first audio track will start the |capturer_source_|.
base::WaitableEvent event(false, false);
- EXPECT_CALL(*capturer_source_.get(), SetAutomaticGainControl(true));
- EXPECT_CALL(*capturer_source_.get(), OnStart()).WillOnce(SignalEvent(&event));
scoped_refptr<WebRtcLocalAudioTrackAdapter> adapter_1(
WebRtcLocalAudioTrackAdapter::Create(std::string(), NULL));
scoped_ptr<WebRtcLocalAudioTrack> track_1(
@@ -363,7 +385,6 @@ TEST_F(WebRtcLocalAudioTrackTest, StartAndStopAudioTracks) {
static_cast<webrtc::AudioTrackInterface*>(
adapter_1.get())->GetRenderer()->AddChannel(0);
track_1->Start();
- EXPECT_TRUE(event.TimedWait(TestTimeouts::tiny_timeout()));
// Verify the data flow by connecting the sink to |track_1|.
scoped_ptr<MockMediaStreamAudioSink> sink(new MockMediaStreamAudioSink());
@@ -403,8 +424,6 @@ TEST_F(WebRtcLocalAudioTrackTest, StartAndStopAudioTracks) {
// Create a new capturer with new source, connect it to a new audio track.
TEST_F(WebRtcLocalAudioTrackTest, ConnectTracksToDifferentCapturers) {
// Setup the first audio track and start it.
- EXPECT_CALL(*capturer_source_.get(), SetAutomaticGainControl(true));
- EXPECT_CALL(*capturer_source_.get(), OnStart());
scoped_refptr<WebRtcLocalAudioTrackAdapter> adapter_1(
WebRtcLocalAudioTrackAdapter::Create(std::string(), NULL));
scoped_ptr<WebRtcLocalAudioTrack> track_1(
@@ -431,18 +450,19 @@ TEST_F(WebRtcLocalAudioTrackTest, ConnectTracksToDifferentCapturers) {
StreamDeviceInfo device(MEDIA_DEVICE_AUDIO_CAPTURE,
std::string(), std::string());
scoped_refptr<WebRtcAudioCapturer> new_capturer(
- WebRtcAudioCapturer::CreateCapturer(-1, device, constraints, NULL));
+ WebRtcAudioCapturer::CreateCapturer(-1, device, constraints, NULL, NULL));
scoped_refptr<MockCapturerSource> new_source(
new MockCapturerSource(new_capturer.get()));
EXPECT_CALL(*new_source.get(), OnInitialize(_, new_capturer.get(), -1));
+ EXPECT_CALL(*new_source.get(), SetAutomaticGainControl(true));
+ EXPECT_CALL(*new_source.get(), OnStart());
+
media::AudioParameters new_param(
media::AudioParameters::AUDIO_PCM_LOW_LATENCY,
media::CHANNEL_LAYOUT_MONO, 44100, 16, 441);
new_capturer->SetCapturerSourceForTesting(new_source, new_param);
// Setup the second audio track, connect it to the new capturer and start it.
- EXPECT_CALL(*new_source.get(), SetAutomaticGainControl(true));
- EXPECT_CALL(*new_source.get(), OnStart());
scoped_refptr<WebRtcLocalAudioTrackAdapter> adapter_2(
WebRtcLocalAudioTrackAdapter::Create(std::string(), NULL));
scoped_ptr<WebRtcLocalAudioTrack> track_2(
@@ -477,7 +497,6 @@ TEST_F(WebRtcLocalAudioTrackTest, ConnectTracksToDifferentCapturers) {
capturer_->Stop();
}
-
// Make sure a audio track can deliver packets with a buffer size smaller than
// 10ms when it is not connected with a peer connection.
TEST_F(WebRtcLocalAudioTrackTest, TrackWorkWithSmallBufferSize) {
@@ -496,15 +515,15 @@ TEST_F(WebRtcLocalAudioTrackTest, TrackWorkWithSmallBufferSize) {
params.channel_layout(),
params.frames_per_buffer()),
factory.CreateWebMediaConstraints(),
- NULL));
+ NULL, NULL));
scoped_refptr<MockCapturerSource> source(
new MockCapturerSource(capturer.get()));
EXPECT_CALL(*source.get(), OnInitialize(_, capturer.get(), -1));
+ EXPECT_CALL(*source.get(), SetAutomaticGainControl(true));
+ EXPECT_CALL(*source.get(), OnStart());
capturer->SetCapturerSourceForTesting(source, params);
// Setup a audio track, connect it to the capturer and start it.
- EXPECT_CALL(*source.get(), SetAutomaticGainControl(true));
- EXPECT_CALL(*source.get(), OnStart());
scoped_refptr<WebRtcLocalAudioTrackAdapter> adapter(
WebRtcLocalAudioTrackAdapter::Create(std::string(), NULL));
scoped_ptr<WebRtcLocalAudioTrack> track(
@@ -531,6 +550,10 @@ TEST_F(WebRtcLocalAudioTrackTest, TrackWorkWithSmallBufferSize) {
// Stopping the new source will stop the second track.
EXPECT_CALL(*source, OnStop()).Times(1);
capturer->Stop();
+
+ // Even though this test don't use |capturer_source_| it will be stopped
+ // during teardown of the test harness.
+ EXPECT_CALL(*capturer_source_.get(), OnStop());
}
} // namespace content