diff options
author | miu@chromium.org <miu@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98> | 2014-07-17 08:04:32 +0000 |
---|---|---|
committer | miu@chromium.org <miu@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98> | 2014-07-17 08:04:32 +0000 |
commit | 977db4a4225870c35d80393d2312cb1931a4dba4 (patch) | |
tree | 468d02055ff28dbb207ccad8b19c3f170f01569c /content/renderer/media | |
parent | 6ee8a1e328f53fd64c014484e3fdd0f765c64996 (diff) | |
download | chromium_src-977db4a4225870c35d80393d2312cb1931a4dba4.zip chromium_src-977db4a4225870c35d80393d2312cb1931a4dba4.tar.gz chromium_src-977db4a4225870c35d80393d2312cb1931a4dba4.tar.bz2 |
[Cross-Site Isolation] Migrate entire MediaStream verticals to be per-RenderFrame.
Much of this change is search-and-replace RenderView-->RenderFrame and
render_view_id-->render_frame_id. However, the following significant
changes have also been made, mainly to simplify/clarify/clean-up the
same LOC that would have had to be changed anyway:
1. Desktop and Tab capture APIs now "register" a WebContents instance,
rather than a RenderViewHost. This is a more-appropriate choice than
registering a RenderFrameHost, and simplifies a lot of code.
2. Removed unnecessary "prepending" and "stripping" of the first part of
the tab capture device ID in special circumstances. It is no longer
necessary.
3. TabCaptureRegistry uses WebContentsObserver to detect all fullscreen
changes, rather than the deprecated NotificationObserver scheme.
4. Fixes for buggy MediaStreamImpl tear-down: MediaStreamDispatcher is
now explicitly owned by MediaStreamImpl, to ensure it is destroyed
only after it is no longer needed. Also, weak pointers to
MediaStreamImpl are invalidated before MSI data members are destroyed
(it was observed during testing that outstanding callbacks were
firing *after* the data members were destroyed).
Testing:
1. All relevant content_unittests and browser_tests run.
2. Manually engaged tab capture and desktop capture using Google Cast
extension and "Desktop Capture Example"
(https://developer.chrome.com/extensions/samples) to test screen
picker UI.
3. Ran WebRTC demo: http://apprtc.appspot.com
4. Confirmed Flash webcam and microphone input.
5. Checked tab capture/recording/audio indicators.
6. Confirmed HTML5 and Flash fullscreen-within-tab functionality during
tab capture.
BUG=304341,323223,320200,163100,338445
TEST=See "Testing" above.
TBR=kenrb@chromium.org
Review URL: https://codereview.chromium.org/364123002
git-svn-id: svn://svn.chromium.org/chrome/trunk/src@283702 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'content/renderer/media')
-rw-r--r-- | content/renderer/media/media_stream_audio_source.cc | 18 | ||||
-rw-r--r-- | content/renderer/media/media_stream_audio_source.h | 8 | ||||
-rw-r--r-- | content/renderer/media/media_stream_dispatcher.cc | 52 | ||||
-rw-r--r-- | content/renderer/media/media_stream_dispatcher.h | 16 | ||||
-rw-r--r-- | content/renderer/media/media_stream_dispatcher_unittest.cc | 2 | ||||
-rw-r--r-- | content/renderer/media/media_stream_impl.cc | 153 | ||||
-rw-r--r-- | content/renderer/media/media_stream_impl.h | 51 | ||||
-rw-r--r-- | content/renderer/media/media_stream_impl_unittest.cc | 30 | ||||
-rw-r--r-- | content/renderer/media/webrtc_audio_renderer.cc | 14 | ||||
-rw-r--r-- | content/renderer/media/webrtc_local_audio_renderer.cc | 12 |
10 files changed, 170 insertions, 186 deletions
diff --git a/content/renderer/media/media_stream_audio_source.cc b/content/renderer/media/media_stream_audio_source.cc index 069f4e3..d988552 100644 --- a/content/renderer/media/media_stream_audio_source.cc +++ b/content/renderer/media/media_stream_audio_source.cc @@ -4,14 +4,28 @@ #include "content/renderer/media/media_stream_audio_source.h" +#include "content/renderer/render_frame_impl.h" +#include "content/renderer/render_view_impl.h" + namespace content { +namespace { +// TODO(miu): This is a temporary hack until the Chrome audio vertical is +// migrated for the Cross-Site Isolation project. http://crbug.com/392596 +int ToRenderViewId(int render_frame_id) { + RenderFrameImpl* const frame = + RenderFrameImpl::FromRoutingID(render_frame_id); + RenderViewImpl* const view = frame ? frame->render_view() : NULL; + return view ? view->GetRoutingID() : -1; +} +} // namespace + MediaStreamAudioSource::MediaStreamAudioSource( - int render_view_id, + int render_frame_id, const StreamDeviceInfo& device_info, const SourceStoppedCallback& stop_callback, PeerConnectionDependencyFactory* factory) - : render_view_id_(render_view_id), + : render_view_id_(ToRenderViewId(render_frame_id)), factory_(factory) { SetDeviceInfo(device_info); SetStopCallback(stop_callback); diff --git a/content/renderer/media/media_stream_audio_source.h b/content/renderer/media/media_stream_audio_source.h index 29f1d4c..7d49a43 100644 --- a/content/renderer/media/media_stream_audio_source.h +++ b/content/renderer/media/media_stream_audio_source.h @@ -17,7 +17,7 @@ namespace content { class CONTENT_EXPORT MediaStreamAudioSource : NON_EXPORTED_BASE(public MediaStreamSource) { public: - MediaStreamAudioSource(int render_view_id, + MediaStreamAudioSource(int render_frame_id, const StreamDeviceInfo& device_info, const SourceStoppedCallback& stop_callback, PeerConnectionDependencyFactory* factory); @@ -49,15 +49,15 @@ class CONTENT_EXPORT MediaStreamAudioSource virtual void DoStopSource() OVERRIDE; private: - int render_view_id_; // Render view ID that created this source. + const int render_view_id_; // Render view ID that created this source. + PeerConnectionDependencyFactory* const factory_; + // This member holds an instance of webrtc::LocalAudioSource. This is used // as a container for audio options. scoped_refptr<webrtc::AudioSourceInterface> local_audio_source_; scoped_refptr<WebRtcAudioCapturer> audio_capturer_; - PeerConnectionDependencyFactory* factory_; - DISALLOW_COPY_AND_ASSIGN(MediaStreamAudioSource); }; diff --git a/content/renderer/media/media_stream_dispatcher.cc b/content/renderer/media/media_stream_dispatcher.cc index e2675a8..9903bdf 100644 --- a/content/renderer/media/media_stream_dispatcher.cc +++ b/content/renderer/media/media_stream_dispatcher.cc @@ -5,11 +5,9 @@ #include "content/renderer/media/media_stream_dispatcher.h" #include "base/logging.h" -#include "base/message_loop/message_loop_proxy.h" #include "content/common/media/media_stream_messages.h" #include "content/renderer/media/media_stream_dispatcher_eventhandler.h" #include "content/renderer/render_thread_impl.h" -#include "content/renderer/render_view_impl.h" #include "media/audio/audio_parameters.h" #include "third_party/WebKit/public/web/WebUserGestureIndicator.h" #include "url/gurl.h" @@ -63,9 +61,8 @@ struct MediaStreamDispatcher::Stream { StreamDeviceInfoArray video_array; }; -MediaStreamDispatcher::MediaStreamDispatcher(RenderViewImpl* render_view) - : RenderViewObserver(render_view), - main_loop_(base::MessageLoopProxy::current()), +MediaStreamDispatcher::MediaStreamDispatcher(RenderFrame* render_frame) + : RenderFrameObserver(render_frame), next_ipc_id_(0) { } @@ -76,7 +73,7 @@ void MediaStreamDispatcher::GenerateStream( const base::WeakPtr<MediaStreamDispatcherEventHandler>& event_handler, const StreamOptions& components, const GURL& security_origin) { - DCHECK(main_loop_->BelongsToCurrentThread()); + DCHECK(thread_checker_.CalledOnValidThread()); DVLOG(1) << "MediaStreamDispatcher::GenerateStream(" << request_id << ")"; requests_.push_back(Request(event_handler, request_id, next_ipc_id_)); @@ -88,7 +85,7 @@ void MediaStreamDispatcher::GenerateStream( void MediaStreamDispatcher::CancelGenerateStream( int request_id, const base::WeakPtr<MediaStreamDispatcherEventHandler>& event_handler) { - DCHECK(main_loop_->BelongsToCurrentThread()); + DCHECK(thread_checker_.CalledOnValidThread()); DVLOG(1) << "MediaStreamDispatcher::CancelGenerateStream" << ", {request_id = " << request_id << "}"; @@ -106,7 +103,7 @@ void MediaStreamDispatcher::CancelGenerateStream( void MediaStreamDispatcher::StopStreamDevice( const StreamDeviceInfo& device_info) { - DCHECK(main_loop_->BelongsToCurrentThread()); + DCHECK(thread_checker_.CalledOnValidThread()); DVLOG(1) << "MediaStreamDispatcher::StopStreamDevice" << ", {device_id = " << device_info.device.id << "}"; // Remove |device_info| from all streams in |label_stream_map_|. @@ -128,9 +125,10 @@ void MediaStreamDispatcher::StopStreamDevice( } if (!device_found) { - // TODO(perkj): This can currently happen since there is one - // MediaStreamDispatcher per RenderView but there is one MediaStreamImpl - // per RenderFrame. http://crbug/368030. + // TODO(perkj): Revisit this. It used to be true (but isn't anymore) that + // there was one MediaStreamDispatcher per RenderView, but one + // MediaStreamImpl per RenderFrame. Now both MediaStreamDispatcher and + // MediaStreamImpl are 1:1 per RenderFrame. http://crbug/368030. return; } @@ -144,7 +142,7 @@ void MediaStreamDispatcher::EnumerateDevices( MediaStreamType type, const GURL& security_origin, bool hide_labels_if_no_access) { - DCHECK(main_loop_->BelongsToCurrentThread()); + DCHECK(thread_checker_.CalledOnValidThread()); DCHECK(type == MEDIA_DEVICE_AUDIO_CAPTURE || type == MEDIA_DEVICE_VIDEO_CAPTURE || type == MEDIA_DEVICE_AUDIO_OUTPUT); @@ -167,7 +165,7 @@ void MediaStreamDispatcher::EnumerateDevices( void MediaStreamDispatcher::StopEnumerateDevices( int request_id, const base::WeakPtr<MediaStreamDispatcherEventHandler>& event_handler) { - DCHECK(main_loop_->BelongsToCurrentThread()); + DCHECK(thread_checker_.CalledOnValidThread()); DVLOG(1) << "MediaStreamDispatcher::StopEnumerateDevices(" << request_id << ")"; for (RequestList::iterator it = requests_.begin(); it != requests_.end(); @@ -187,7 +185,7 @@ void MediaStreamDispatcher::OpenDevice( const std::string& device_id, MediaStreamType type, const GURL& security_origin) { - DCHECK(main_loop_->BelongsToCurrentThread()); + DCHECK(thread_checker_.CalledOnValidThread()); DVLOG(1) << "MediaStreamDispatcher::OpenDevice(" << request_id << ")"; requests_.push_back(Request(event_handler, request_id, next_ipc_id_)); @@ -205,7 +203,7 @@ void MediaStreamDispatcher::CancelOpenDevice( } void MediaStreamDispatcher::CloseDevice(const std::string& label) { - DCHECK(main_loop_->BelongsToCurrentThread()); + DCHECK(thread_checker_.CalledOnValidThread()); DCHECK(!label.empty()); DVLOG(1) << "MediaStreamDispatcher::CloseDevice" << ", {label = " << label << "}"; @@ -218,6 +216,10 @@ void MediaStreamDispatcher::CloseDevice(const std::string& label) { Send(new MediaStreamHostMsg_CloseDevice(routing_id(), label)); } +void MediaStreamDispatcher::OnDestruct() { + // Do not self-destruct. MediaStreamImpl owns |this|. +} + bool MediaStreamDispatcher::Send(IPC::Message* message) { if (!RenderThread::Get()) { delete message; @@ -252,7 +254,7 @@ void MediaStreamDispatcher::OnStreamGenerated( const std::string& label, const StreamDeviceInfoArray& audio_array, const StreamDeviceInfoArray& video_array) { - DCHECK(main_loop_->BelongsToCurrentThread()); + DCHECK(thread_checker_.CalledOnValidThread()); for (RequestList::iterator it = requests_.begin(); it != requests_.end(); ++it) { @@ -278,7 +280,7 @@ void MediaStreamDispatcher::OnStreamGenerated( void MediaStreamDispatcher::OnStreamGenerationFailed( int request_id, content::MediaStreamRequestResult result) { - DCHECK(main_loop_->BelongsToCurrentThread()); + DCHECK(thread_checker_.CalledOnValidThread()); for (RequestList::iterator it = requests_.begin(); it != requests_.end(); ++it) { Request& request = *it; @@ -297,7 +299,7 @@ void MediaStreamDispatcher::OnStreamGenerationFailed( void MediaStreamDispatcher::OnDeviceStopped( const std::string& label, const StreamDeviceInfo& device_info) { - DCHECK(main_loop_->BelongsToCurrentThread()); + DCHECK(thread_checker_.CalledOnValidThread()); DVLOG(1) << "MediaStreamDispatcher::OnDeviceStopped(" << "{label = " << label << "})" << ", {device_id = " << device_info.device.id << "})"; @@ -324,7 +326,7 @@ void MediaStreamDispatcher::OnDeviceStopped( void MediaStreamDispatcher::OnDevicesEnumerated( int request_id, const StreamDeviceInfoArray& device_array) { - DCHECK(main_loop_->BelongsToCurrentThread()); + DCHECK(thread_checker_.CalledOnValidThread()); DCHECK_GE(request_id, 0); for (RequestList::iterator it = requests_.begin(); it != requests_.end(); @@ -340,7 +342,7 @@ void MediaStreamDispatcher::OnDeviceOpened( int request_id, const std::string& label, const StreamDeviceInfo& device_info) { - DCHECK(main_loop_->BelongsToCurrentThread()); + DCHECK(thread_checker_.CalledOnValidThread()); for (RequestList::iterator it = requests_.begin(); it != requests_.end(); ++it) { Request& request = *it; @@ -367,7 +369,7 @@ void MediaStreamDispatcher::OnDeviceOpened( } void MediaStreamDispatcher::OnDeviceOpenFailed(int request_id) { - DCHECK(main_loop_->BelongsToCurrentThread()); + DCHECK(thread_checker_.CalledOnValidThread()); for (RequestList::iterator it = requests_.begin(); it != requests_.end(); ++it) { Request& request = *it; @@ -385,7 +387,7 @@ void MediaStreamDispatcher::OnDeviceOpenFailed(int request_id) { int MediaStreamDispatcher::audio_session_id(const std::string& label, int index) { - DCHECK(main_loop_->BelongsToCurrentThread()); + DCHECK(thread_checker_.CalledOnValidThread()); LabelStreamMap::iterator it = label_stream_map_.find(label); if (it == label_stream_map_.end() || it->second.audio_array.size() <= static_cast<size_t>(index)) { @@ -395,13 +397,13 @@ int MediaStreamDispatcher::audio_session_id(const std::string& label, } bool MediaStreamDispatcher::IsStream(const std::string& label) { - DCHECK(main_loop_->BelongsToCurrentThread()); + DCHECK(thread_checker_.CalledOnValidThread()); return label_stream_map_.find(label) != label_stream_map_.end(); } int MediaStreamDispatcher::video_session_id(const std::string& label, int index) { - DCHECK(main_loop_->BelongsToCurrentThread()); + DCHECK(thread_checker_.CalledOnValidThread()); LabelStreamMap::iterator it = label_stream_map_.find(label); if (it == label_stream_map_.end() || it->second.video_array.size() <= static_cast<size_t>(index)) { @@ -411,7 +413,7 @@ int MediaStreamDispatcher::video_session_id(const std::string& label, } bool MediaStreamDispatcher::IsAudioDuckingActive() const { - DCHECK(main_loop_->BelongsToCurrentThread()); + DCHECK(thread_checker_.CalledOnValidThread()); LabelStreamMap::const_iterator stream_it = label_stream_map_.begin(); while (stream_it != label_stream_map_.end()) { const StreamDeviceInfoArray& audio_array = stream_it->second.audio_array; diff --git a/content/renderer/media/media_stream_dispatcher.h b/content/renderer/media/media_stream_dispatcher.h index 59363a3..c4e5136 100644 --- a/content/renderer/media/media_stream_dispatcher.h +++ b/content/renderer/media/media_stream_dispatcher.h @@ -13,9 +13,10 @@ #include "base/gtest_prod_util.h" #include "base/memory/scoped_ptr.h" #include "base/memory/weak_ptr.h" +#include "base/threading/thread_checker.h" #include "content/common/content_export.h" #include "content/common/media/media_stream_options.h" -#include "content/public/renderer/render_view_observer.h" +#include "content/public/renderer/render_frame_observer.h" #include "content/renderer/media/media_stream_dispatcher_eventhandler.h" namespace base { @@ -24,18 +25,16 @@ class MessageLoopProxy; namespace content { -class RenderViewImpl; - // MediaStreamDispatcher is a delegate for the Media Stream API messages. // MediaStreams are used by WebKit to open media devices such as Video Capture // and Audio input devices. // It's the complement of MediaStreamDispatcherHost (owned by // BrowserRenderProcessHost). class CONTENT_EXPORT MediaStreamDispatcher - : public RenderViewObserver, + : public RenderFrameObserver, public base::SupportsWeakPtr<MediaStreamDispatcher> { public: - explicit MediaStreamDispatcher(RenderViewImpl* render_view); + explicit MediaStreamDispatcher(RenderFrame* render_frame); virtual ~MediaStreamDispatcher(); // Request a new media stream to be created. @@ -115,11 +114,12 @@ class CONTENT_EXPORT MediaStreamDispatcher // opened it. struct Stream; - // RenderViewObserver OVERRIDE. + // RenderFrameObserver OVERRIDE. + virtual void OnDestruct() OVERRIDE; virtual bool Send(IPC::Message* message) OVERRIDE; + virtual bool OnMessageReceived(const IPC::Message& message) OVERRIDE; // Messages from the browser. - virtual bool OnMessageReceived(const IPC::Message& message) OVERRIDE; void OnStreamGenerated( int request_id, const std::string& label, @@ -140,7 +140,7 @@ class CONTENT_EXPORT MediaStreamDispatcher void OnDeviceOpenFailed(int request_id); // Used for DCHECKs so methods calls won't execute in the wrong thread. - scoped_refptr<base::MessageLoopProxy> main_loop_; + base::ThreadChecker thread_checker_; int next_ipc_id_; typedef std::map<std::string, Stream> LabelStreamMap; diff --git a/content/renderer/media/media_stream_dispatcher_unittest.cc b/content/renderer/media/media_stream_dispatcher_unittest.cc index 62a6f70..6e80f98 100644 --- a/content/renderer/media/media_stream_dispatcher_unittest.cc +++ b/content/renderer/media/media_stream_dispatcher_unittest.cc @@ -109,7 +109,7 @@ class MediaStreamDispatcherUnderTest : public MediaStreamDispatcher { MediaStreamDispatcherUnderTest() : MediaStreamDispatcher(NULL) {} using MediaStreamDispatcher::GetNextIpcIdForTest; - using RenderViewObserver::OnMessageReceived; + using RenderFrameObserver::OnMessageReceived; }; class MediaStreamDispatcherTest : public ::testing::Test { diff --git a/content/renderer/media/media_stream_impl.cc b/content/renderer/media/media_stream_impl.cc index 79f86a8..887b593 100644 --- a/content/renderer/media/media_stream_impl.cc +++ b/content/renderer/media/media_stream_impl.cc @@ -12,6 +12,7 @@ #include "base/strings/string_util.h" #include "base/strings/stringprintf.h" #include "base/strings/utf_string_conversions.h" +#include "content/public/renderer/render_frame.h" #include "content/renderer/media/media_stream.h" #include "content/renderer/media/media_stream_audio_source.h" #include "content/renderer/media/media_stream_dispatcher.h" @@ -82,15 +83,22 @@ struct MediaStreamImpl::MediaDevicesRequestInfo { }; MediaStreamImpl::MediaStreamImpl( - RenderView* render_view, - MediaStreamDispatcher* media_stream_dispatcher, - PeerConnectionDependencyFactory* dependency_factory) - : RenderViewObserver(render_view), + RenderFrame* render_frame, + PeerConnectionDependencyFactory* dependency_factory, + scoped_ptr<MediaStreamDispatcher> media_stream_dispatcher) + : RenderFrameObserver(render_frame), dependency_factory_(dependency_factory), - media_stream_dispatcher_(media_stream_dispatcher) { + media_stream_dispatcher_(media_stream_dispatcher.Pass()), + weak_factory_(this) { + DCHECK(dependency_factory_); + DCHECK(media_stream_dispatcher_.get()); } MediaStreamImpl::~MediaStreamImpl() { + // Force-close all outstanding user media requests and local sources here, + // before the outstanding WeakPtrs are invalidated, to ensure a clean + // shutdown. + FrameWillClose(); } void MediaStreamImpl::requestUserMedia( @@ -108,7 +116,6 @@ void MediaStreamImpl::requestUserMedia( int request_id = g_next_request_id++; StreamOptions options; - blink::WebLocalFrame* frame = NULL; GURL security_origin; bool enable_automatic_output_device_selection = false; @@ -142,11 +149,9 @@ void MediaStreamImpl::requestUserMedia( } security_origin = GURL(user_media_request.securityOrigin().toString()); - // Get the WebFrame that requested a MediaStream. - // The frame is needed to tell the MediaStreamDispatcher when a stream goes - // out of scope. - frame = user_media_request.ownerDocument().frame(); - DCHECK(frame); + DCHECK(render_frame()->GetWebFrame() == + static_cast<blink::WebFrame*>( + user_media_request.ownerDocument().frame())); } DVLOG(1) << "MediaStreamImpl::requestUserMedia(" << request_id << ", [ " @@ -176,12 +181,12 @@ void MediaStreamImpl::requestUserMedia( mandatory_video ? "true":"false")); user_media_requests_.push_back( - new UserMediaRequestInfo(request_id, frame, user_media_request, - enable_automatic_output_device_selection)); + new UserMediaRequestInfo(request_id, user_media_request, + enable_automatic_output_device_selection)); media_stream_dispatcher_->GenerateStream( request_id, - AsWeakPtr(), + weak_factory_.GetWeakPtr(), options, security_origin); } @@ -226,21 +231,21 @@ void MediaStreamImpl::requestMediaDevices( media_stream_dispatcher_->EnumerateDevices( audio_input_request_id, - AsWeakPtr(), + weak_factory_.GetWeakPtr(), MEDIA_DEVICE_AUDIO_CAPTURE, security_origin, true); media_stream_dispatcher_->EnumerateDevices( video_input_request_id, - AsWeakPtr(), + weak_factory_.GetWeakPtr(), MEDIA_DEVICE_VIDEO_CAPTURE, security_origin, true); media_stream_dispatcher_->EnumerateDevices( audio_output_request_id, - AsWeakPtr(), + weak_factory_.GetWeakPtr(), MEDIA_DEVICE_AUDIO_OUTPUT, security_origin, true); @@ -253,18 +258,7 @@ void MediaStreamImpl::cancelMediaDevicesRequest( FindMediaDevicesRequestInfo(media_devices_request); if (!request) return; - - // Cancel device enumeration. - media_stream_dispatcher_->StopEnumerateDevices( - request->audio_input_request_id, - AsWeakPtr()); - media_stream_dispatcher_->StopEnumerateDevices( - request->video_input_request_id, - AsWeakPtr()); - media_stream_dispatcher_->StopEnumerateDevices( - request->audio_output_request_id, - AsWeakPtr()); - DeleteMediaDevicesRequestInfo(request); + CancelAndDeleteMediaDevicesRequest(request); } // Callback from MediaStreamDispatcher. @@ -334,7 +328,8 @@ void MediaStreamImpl::OnStreamGenerated( // Wait for the tracks to be started successfully or to fail. request_info->CallbackOnTracksStarted( - base::Bind(&MediaStreamImpl::OnCreateNativeTracksCompleted, AsWeakPtr())); + base::Bind(&MediaStreamImpl::OnCreateNativeTracksCompleted, + weak_factory_.GetWeakPtr())); } // Callback from MediaStreamDispatcher. @@ -380,7 +375,7 @@ void MediaStreamImpl::OnDeviceStopped( for (LocalStreamSources::iterator device_it = local_sources_.begin(); device_it != local_sources_.end(); ++device_it) { - if (device_it->source.id() == source.id()) { + if (device_it->id() == source.id()) { local_sources_.erase(device_it); break; } @@ -391,7 +386,6 @@ void MediaStreamImpl::InitializeSourceObject( const StreamDeviceInfo& device, blink::WebMediaStreamSource::Type type, const blink::WebMediaConstraints& constraints, - blink::WebFrame* frame, blink::WebMediaStreamSource* webkit_source) { const blink::WebMediaStreamSource* existing_source = FindLocalSource(device); @@ -415,18 +409,20 @@ void MediaStreamImpl::InitializeSourceObject( webkit_source->setExtraData( CreateVideoSource( device, - base::Bind(&MediaStreamImpl::OnLocalSourceStopped, AsWeakPtr()))); + base::Bind(&MediaStreamImpl::OnLocalSourceStopped, + weak_factory_.GetWeakPtr()))); } else { DCHECK_EQ(blink::WebMediaStreamSource::TypeAudio, type); MediaStreamAudioSource* audio_source( new MediaStreamAudioSource( - RenderViewObserver::routing_id(), + RenderFrameObserver::routing_id(), device, - base::Bind(&MediaStreamImpl::OnLocalSourceStopped, AsWeakPtr()), + base::Bind(&MediaStreamImpl::OnLocalSourceStopped, + weak_factory_.GetWeakPtr()), dependency_factory_)); webkit_source->setExtraData(audio_source); } - local_sources_.push_back(LocalStreamSource(frame, *webkit_source)); + local_sources_.push_back(*webkit_source); } MediaStreamVideoSource* MediaStreamImpl::CreateVideoSource( @@ -450,7 +446,6 @@ void MediaStreamImpl::CreateVideoTracks( InitializeSourceObject(devices[i], blink::WebMediaStreamSource::TypeVideo, constraints, - request->frame, &webkit_source); (*webkit_tracks)[i] = request->CreateAndStartVideoTrack(webkit_source, constraints); @@ -491,7 +486,6 @@ void MediaStreamImpl::CreateAudioTracks( InitializeSourceObject(overridden_audio_array[i], blink::WebMediaStreamSource::TypeAudio, constraints, - request->frame, &webkit_source); (*webkit_tracks)[i].initialize(webkit_source); request->StartAudioTrack((*webkit_tracks)[i], constraints); @@ -585,19 +579,7 @@ void MediaStreamImpl::OnDevicesEnumerated( } EnumerateDevicesSucceded(&request->request, devices); - - // Cancel device enumeration. - media_stream_dispatcher_->StopEnumerateDevices( - request->audio_input_request_id, - AsWeakPtr()); - media_stream_dispatcher_->StopEnumerateDevices( - request->video_input_request_id, - AsWeakPtr()); - media_stream_dispatcher_->StopEnumerateDevices( - request->audio_output_request_id, - AsWeakPtr()); - - DeleteMediaDevicesRequestInfo(request); + CancelAndDeleteMediaDevicesRequest(request); } void MediaStreamImpl::OnDeviceOpened( @@ -672,13 +654,13 @@ const blink::WebMediaStreamSource* MediaStreamImpl::FindLocalSource( const StreamDeviceInfo& device) const { for (LocalStreamSources::const_iterator it = local_sources_.begin(); it != local_sources_.end(); ++it) { - MediaStreamSource* source = - static_cast<MediaStreamSource*>(it->source.extraData()); + MediaStreamSource* const source = + static_cast<MediaStreamSource*>(it->extraData()); const StreamDeviceInfo& active_device = source->device_info(); if (active_device.device.id == device.device.id && active_device.device.type == device.device.type && active_device.session_id == device.session_id) { - return &it->source; + return &(*it); } } return NULL; @@ -742,11 +724,19 @@ MediaStreamImpl::FindMediaDevicesRequestInfo( return NULL; } -void MediaStreamImpl::DeleteMediaDevicesRequestInfo( +void MediaStreamImpl::CancelAndDeleteMediaDevicesRequest( MediaDevicesRequestInfo* request) { MediaDevicesRequests::iterator it = media_devices_requests_.begin(); for (; it != media_devices_requests_.end(); ++it) { if ((*it) == request) { + // Cancel device enumeration. + media_stream_dispatcher_->StopEnumerateDevices( + request->audio_input_request_id, weak_factory_.GetWeakPtr()); + media_stream_dispatcher_->StopEnumerateDevices( + request->video_input_request_id, weak_factory_.GetWeakPtr()); + media_stream_dispatcher_->StopEnumerateDevices( + request->audio_output_request_id, weak_factory_.GetWeakPtr()); + media_devices_requests_.erase(it); return; } @@ -754,43 +744,28 @@ void MediaStreamImpl::DeleteMediaDevicesRequestInfo( NOTREACHED(); } -void MediaStreamImpl::FrameDetached(blink::WebFrame* frame) { - // Do same thing as FrameWillClose. - FrameWillClose(frame); -} - -void MediaStreamImpl::FrameWillClose(blink::WebFrame* frame) { - // Loop through all UserMediaRequests and find the requests that belong to the - // frame that is being closed. +void MediaStreamImpl::FrameWillClose() { + // Cancel all outstanding UserMediaRequests. UserMediaRequests::iterator request_it = user_media_requests_.begin(); while (request_it != user_media_requests_.end()) { - if ((*request_it)->frame == frame) { - DVLOG(1) << "MediaStreamImpl::FrameWillClose: " - << "Cancel user media request " << (*request_it)->request_id; - // If the request is not generated, it means that a request - // has been sent to the MediaStreamDispatcher to generate a stream - // but MediaStreamDispatcher has not yet responded and we need to cancel - // the request. - if (!(*request_it)->generated) { - media_stream_dispatcher_->CancelGenerateStream( - (*request_it)->request_id, AsWeakPtr()); - } - request_it = user_media_requests_.erase(request_it); - } else { - ++request_it; + DVLOG(1) << "MediaStreamImpl@" << this << "::FrameWillClose: " + << "Cancel user media request " << (*request_it)->request_id; + // If the request is not generated, it means that a request + // has been sent to the MediaStreamDispatcher to generate a stream + // but MediaStreamDispatcher has not yet responded and we need to cancel + // the request. + if (!(*request_it)->generated) { + media_stream_dispatcher_->CancelGenerateStream( + (*request_it)->request_id, weak_factory_.GetWeakPtr()); } + request_it = user_media_requests_.erase(request_it); } - // Loop through all current local sources and stop the sources that were - // created by the frame that will be closed. + // Loop through all current local sources and stop the sources. LocalStreamSources::iterator sources_it = local_sources_.begin(); while (sources_it != local_sources_.end()) { - if (sources_it->frame == frame) { - StopLocalSource(sources_it->source, true); - sources_it = local_sources_.erase(sources_it); - } else { - ++sources_it; - } + StopLocalSource(*sources_it, true); + sources_it = local_sources_.erase(sources_it); } } @@ -802,7 +777,7 @@ void MediaStreamImpl::OnLocalSourceStopped( bool device_found = false; for (LocalStreamSources::iterator device_it = local_sources_.begin(); device_it != local_sources_.end(); ++device_it) { - if (device_it->source.id() == source.id()) { + if (device_it->id() == source.id()) { device_found = true; local_sources_.erase(device_it); break; @@ -811,7 +786,7 @@ void MediaStreamImpl::OnLocalSourceStopped( CHECK(device_found); MediaStreamSource* source_impl = - static_cast<MediaStreamSource*> (source.extraData()); + static_cast<MediaStreamSource*>(source.extraData()); media_stream_dispatcher_->StopStreamDevice(source_impl->device_info()); } @@ -819,7 +794,7 @@ void MediaStreamImpl::StopLocalSource( const blink::WebMediaStreamSource& source, bool notify_dispatcher) { MediaStreamSource* source_impl = - static_cast<MediaStreamSource*> (source.extraData()); + static_cast<MediaStreamSource*>(source.extraData()); DVLOG(1) << "MediaStreamImpl::StopLocalSource(" << "{device_id = " << source_impl->device_info().device.id << "})"; @@ -832,14 +807,12 @@ void MediaStreamImpl::StopLocalSource( MediaStreamImpl::UserMediaRequestInfo::UserMediaRequestInfo( int request_id, - blink::WebFrame* frame, const blink::WebUserMediaRequest& request, bool enable_automatic_output_device_selection) : request_id(request_id), generated(false), enable_automatic_output_device_selection( enable_automatic_output_device_selection), - frame(frame), request(request), request_failed_(false) { } diff --git a/content/renderer/media/media_stream_impl.h b/content/renderer/media/media_stream_impl.h index ad96020..0effc2a 100644 --- a/content/renderer/media/media_stream_impl.h +++ b/content/renderer/media/media_stream_impl.h @@ -16,7 +16,7 @@ #include "base/memory/weak_ptr.h" #include "base/threading/non_thread_safe.h" #include "content/common/content_export.h" -#include "content/public/renderer/render_view_observer.h" +#include "content/public/renderer/render_frame_observer.h" #include "content/renderer/media/media_stream_dispatcher_eventhandler.h" #include "content/renderer/media/media_stream_source.h" #include "third_party/WebKit/public/platform/WebMediaStream.h" @@ -38,20 +38,23 @@ class VideoCapturerDelegate; // (via MediaStreamDispatcher and MediaStreamDispatcherHost) // in the browser process. It must be created, called and destroyed on the // render thread. -// MediaStreamImpl have weak pointers to a MediaStreamDispatcher. class CONTENT_EXPORT MediaStreamImpl - : public RenderViewObserver, + : public RenderFrameObserver, NON_EXPORTED_BASE(public blink::WebUserMediaClient), public MediaStreamDispatcherEventHandler, - public base::SupportsWeakPtr<MediaStreamImpl>, NON_EXPORTED_BASE(public base::NonThreadSafe) { public: + // |render_frame| and |dependency_factory| must outlive this instance. MediaStreamImpl( - RenderView* render_view, - MediaStreamDispatcher* media_stream_dispatcher, - PeerConnectionDependencyFactory* dependency_factory); + RenderFrame* render_frame, + PeerConnectionDependencyFactory* dependency_factory, + scoped_ptr<MediaStreamDispatcher> media_stream_dispatcher); virtual ~MediaStreamImpl(); + MediaStreamDispatcher* media_stream_dispatcher() const { + return media_stream_dispatcher_.get(); + } + // blink::WebUserMediaClient implementation virtual void requestUserMedia( const blink::WebUserMediaRequest& user_media_request); @@ -82,9 +85,8 @@ class CONTENT_EXPORT MediaStreamImpl const StreamDeviceInfo& device_info) OVERRIDE; virtual void OnDeviceOpenFailed(int request_id) OVERRIDE; - // RenderViewObserver OVERRIDE - virtual void FrameDetached(blink::WebFrame* frame) OVERRIDE; - virtual void FrameWillClose(blink::WebFrame* frame) OVERRIDE; + // RenderFrameObserver OVERRIDE + virtual void FrameWillClose() OVERRIDE; protected: // Called when |source| has been stopped from JavaScript. @@ -119,7 +121,6 @@ class CONTENT_EXPORT MediaStreamImpl ResourcesReady; UserMediaRequestInfo(int request_id, - blink::WebFrame* frame, const blink::WebUserMediaRequest& request, bool enable_automatic_output_device_selection); ~UserMediaRequestInfo(); @@ -128,7 +129,6 @@ class CONTENT_EXPORT MediaStreamImpl // OnStreamGenerated. bool generated; const bool enable_automatic_output_device_selection; - blink::WebFrame* frame; // WebFrame that requested the MediaStream. blink::WebMediaStream web_stream; blink::WebUserMediaRequest request; @@ -160,17 +160,7 @@ class CONTENT_EXPORT MediaStreamImpl }; typedef ScopedVector<UserMediaRequestInfo> UserMediaRequests; - struct LocalStreamSource { - LocalStreamSource(blink::WebFrame* frame, - const blink::WebMediaStreamSource& source) - : frame(frame), source(source) { - } - // |frame| is the WebFrame that requested |source|. NULL in unit tests. - // TODO(perkj): Change so that |frame| is not NULL in unit tests. - blink::WebFrame* frame; - blink::WebMediaStreamSource source; - }; - typedef std::vector<LocalStreamSource> LocalStreamSources; + typedef std::vector<blink::WebMediaStreamSource> LocalStreamSources; struct MediaDevicesRequestInfo; typedef ScopedVector<MediaDevicesRequestInfo> MediaDevicesRequests; @@ -181,7 +171,6 @@ class CONTENT_EXPORT MediaStreamImpl const StreamDeviceInfo& device, blink::WebMediaStreamSource::Type type, const blink::WebMediaConstraints& constraints, - blink::WebFrame* frame, blink::WebMediaStreamSource* webkit_source); void CreateVideoTracks( @@ -210,7 +199,7 @@ class CONTENT_EXPORT MediaStreamImpl MediaDevicesRequestInfo* FindMediaDevicesRequestInfo(int request_id); MediaDevicesRequestInfo* FindMediaDevicesRequestInfo( const blink::WebMediaDevicesRequest& request); - void DeleteMediaDevicesRequestInfo(MediaDevicesRequestInfo* request); + void CancelAndDeleteMediaDevicesRequest(MediaDevicesRequestInfo* request); // Returns the source that use a device with |device.session_id| // and |device.device.id|. NULL if such source doesn't exist. @@ -224,11 +213,11 @@ class CONTENT_EXPORT MediaStreamImpl // It's valid for the lifetime of RenderThread. // TODO(xians): Remove this dependency once audio do not need it for local // audio. - PeerConnectionDependencyFactory* dependency_factory_; + PeerConnectionDependencyFactory* const dependency_factory_; - // media_stream_dispatcher_ is a weak reference, owned by RenderView. It's - // valid for the lifetime of RenderView. - MediaStreamDispatcher* media_stream_dispatcher_; + // MediaStreamImpl owns MediaStreamDispatcher instead of RenderFrameImpl + // (or RenderFrameObserver) to ensure tear-down occurs in the right order. + const scoped_ptr<MediaStreamDispatcher> media_stream_dispatcher_; LocalStreamSources local_sources_; @@ -237,6 +226,10 @@ class CONTENT_EXPORT MediaStreamImpl // Requests to enumerate media devices. MediaDevicesRequests media_devices_requests_; + // Note: This member must be the last to ensure all outstanding weak pointers + // are invalidated first. + base::WeakPtrFactory<MediaStreamImpl> weak_factory_; + DISALLOW_COPY_AND_ASSIGN(MediaStreamImpl); }; diff --git a/content/renderer/media/media_stream_impl_unittest.cc b/content/renderer/media/media_stream_impl_unittest.cc index cddf119..997340a 100644 --- a/content/renderer/media/media_stream_impl_unittest.cc +++ b/content/renderer/media/media_stream_impl_unittest.cc @@ -43,9 +43,11 @@ class MediaStreamImplUnderTest : public MediaStreamImpl { REQUEST_FAILED, }; - MediaStreamImplUnderTest(MediaStreamDispatcher* media_stream_dispatcher, - PeerConnectionDependencyFactory* dependency_factory) - : MediaStreamImpl(NULL, media_stream_dispatcher, dependency_factory), + MediaStreamImplUnderTest( + PeerConnectionDependencyFactory* dependency_factory, + scoped_ptr<MediaStreamDispatcher> media_stream_dispatcher) + : MediaStreamImpl( + NULL, dependency_factory, media_stream_dispatcher.Pass()), state_(REQUEST_NOT_STARTED), result_(NUM_MEDIA_REQUEST_RESULTS), factory_(dependency_factory), @@ -128,10 +130,11 @@ class MediaStreamImplTest : public ::testing::Test { virtual void SetUp() { // Create our test object. child_process_.reset(new ChildProcess()); - ms_dispatcher_.reset(new MockMediaStreamDispatcher()); dependency_factory_.reset(new MockPeerConnectionDependencyFactory()); - ms_impl_.reset(new MediaStreamImplUnderTest(ms_dispatcher_.get(), - dependency_factory_.get())); + ms_dispatcher_ = new MockMediaStreamDispatcher(); + ms_impl_.reset(new MediaStreamImplUnderTest( + dependency_factory_.get(), + scoped_ptr<MediaStreamDispatcher>(ms_dispatcher_).Pass())); } blink::WebMediaStream RequestLocalMediaStream() { @@ -199,7 +202,7 @@ class MediaStreamImplTest : public ::testing::Test { protected: base::MessageLoop message_loop_; scoped_ptr<ChildProcess> child_process_; - scoped_ptr<MockMediaStreamDispatcher> ms_dispatcher_; + MockMediaStreamDispatcher* ms_dispatcher_; // Owned my |ms_impl_|. scoped_ptr<MediaStreamImplUnderTest> ms_impl_; scoped_ptr<MockPeerConnectionDependencyFactory> dependency_factory_; }; @@ -329,16 +332,13 @@ TEST_F(MediaStreamImplTest, StopSourceWhenMediaStreamGoesOutOfScope) { EXPECT_EQ(1, ms_dispatcher_->stop_video_device_counter()); } -// Test that the MediaStreams are deleted if the owning WebFrame is deleted. +// Test that the MediaStreams are deleted if the owning WebFrame is closing. // In the unit test the owning frame is NULL. TEST_F(MediaStreamImplTest, FrameWillClose) { // Test a stream with both audio and video. blink::WebMediaStream mixed_desc = RequestLocalMediaStream(); blink::WebMediaStream desc2 = RequestLocalMediaStream(); - - // Test that the MediaStreams are deleted if the owning WebFrame is deleted. - // In the unit test the owning frame is NULL. - ms_impl_->FrameWillClose(NULL); + ms_impl_->FrameWillClose(); EXPECT_EQ(1, ms_dispatcher_->stop_audio_device_counter()); EXPECT_EQ(1, ms_dispatcher_->stop_video_device_counter()); } @@ -387,7 +387,7 @@ TEST_F(MediaStreamImplTest, MediaStreamImplShutDown) { // being generated by the MediaStreamDispatcher. TEST_F(MediaStreamImplTest, ReloadFrameWhileGeneratingStream) { ms_impl_->RequestUserMedia(); - ms_impl_->FrameWillClose(NULL); + ms_impl_->FrameWillClose(); EXPECT_EQ(1, ms_dispatcher_->request_stream_counter()); EXPECT_EQ(0, ms_dispatcher_->stop_audio_device_counter()); EXPECT_EQ(0, ms_dispatcher_->stop_video_device_counter()); @@ -401,7 +401,7 @@ TEST_F(MediaStreamImplTest, ReloadFrameWhileGeneratingSources) { ms_impl_->RequestUserMedia(); FakeMediaStreamDispatcherRequestUserMediaComplete(); EXPECT_EQ(1, ms_dispatcher_->request_stream_counter()); - ms_impl_->FrameWillClose(NULL); + ms_impl_->FrameWillClose(); EXPECT_EQ(1, ms_dispatcher_->stop_audio_device_counter()); EXPECT_EQ(1, ms_dispatcher_->stop_video_device_counter()); EXPECT_EQ(MediaStreamImplUnderTest::REQUEST_NOT_COMPLETE, @@ -413,7 +413,7 @@ TEST_F(MediaStreamImplTest, ReloadFrameWhileGeneratingSources) { TEST_F(MediaStreamImplTest, StopTrackAfterReload) { blink::WebMediaStream mixed_desc = RequestLocalMediaStream(); EXPECT_EQ(1, ms_dispatcher_->request_stream_counter()); - ms_impl_->FrameWillClose(NULL); + ms_impl_->FrameWillClose(); EXPECT_EQ(1, ms_dispatcher_->stop_audio_device_counter()); EXPECT_EQ(1, ms_dispatcher_->stop_video_device_counter()); diff --git a/content/renderer/media/webrtc_audio_renderer.cc b/content/renderer/media/webrtc_audio_renderer.cc index 3193dba..006f12f 100644 --- a/content/renderer/media/webrtc_audio_renderer.cc +++ b/content/renderer/media/webrtc_audio_renderer.cc @@ -12,7 +12,7 @@ #include "content/renderer/media/media_stream_dispatcher.h" #include "content/renderer/media/webrtc_audio_device_impl.h" #include "content/renderer/media/webrtc_logging.h" -#include "content/renderer/render_view_impl.h" +#include "content/renderer/render_frame_impl.h" #include "media/audio/audio_output_device.h" #include "media/audio/audio_parameters.h" #include "media/audio/sample_rates.h" @@ -186,10 +186,12 @@ class SharedAudioRenderer : public MediaStreamAudioRenderer { // Returns either AudioParameters::NO_EFFECTS or AudioParameters::DUCKING // depending on whether or not an input element is currently open with // ducking enabled. -int GetCurrentDuckingFlag(int render_view_id) { - RenderViewImpl* render_view = RenderViewImpl::FromRoutingID(render_view_id); - if (render_view && render_view->media_stream_dispatcher() && - render_view->media_stream_dispatcher()->IsAudioDuckingActive()) { +int GetCurrentDuckingFlag(int render_frame_id) { + RenderFrameImpl* const frame = + RenderFrameImpl::FromRoutingID(render_frame_id); + MediaStreamDispatcher* const dispatcher = frame ? + frame->GetMediaStreamDispatcher() : NULL; + if (dispatcher && dispatcher->IsAudioDuckingActive()) { return media::AudioParameters::DUCKING; } @@ -218,7 +220,7 @@ WebRtcAudioRenderer::WebRtcAudioRenderer( sink_params_(media::AudioParameters::AUDIO_PCM_LOW_LATENCY, media::CHANNEL_LAYOUT_STEREO, 0, sample_rate, 16, frames_per_buffer, - GetCurrentDuckingFlag(source_render_view_id)) { + GetCurrentDuckingFlag(source_render_frame_id)) { WebRtcLogMessage(base::StringPrintf( "WAR::WAR. source_render_view_id=%d" ", session_id=%d, sample_rate=%d, frames_per_buffer=%d, effects=%i", diff --git a/content/renderer/media/webrtc_local_audio_renderer.cc b/content/renderer/media/webrtc_local_audio_renderer.cc index ed98b28..f13e7b0 100644 --- a/content/renderer/media/webrtc_local_audio_renderer.cc +++ b/content/renderer/media/webrtc_local_audio_renderer.cc @@ -12,7 +12,7 @@ #include "content/renderer/media/audio_device_factory.h" #include "content/renderer/media/media_stream_dispatcher.h" #include "content/renderer/media/webrtc_audio_capturer.h" -#include "content/renderer/render_view_impl.h" +#include "content/renderer/render_frame_impl.h" #include "media/audio/audio_output_device.h" #include "media/base/audio_bus.h" #include "media/base/audio_fifo.h" @@ -266,11 +266,11 @@ void WebRtcLocalAudioRenderer::ReconfigureSink( DVLOG(1) << "WebRtcLocalAudioRenderer::ReconfigureSink()"; int implicit_ducking_effect = 0; - RenderViewImpl* render_view = - RenderViewImpl::FromRoutingID(source_render_view_id_); - if (render_view && - render_view->media_stream_dispatcher() && - render_view->media_stream_dispatcher()->IsAudioDuckingActive()) { + RenderFrameImpl* const frame = + RenderFrameImpl::FromRoutingID(source_render_frame_id_); + MediaStreamDispatcher* const dispatcher = frame ? + frame->GetMediaStreamDispatcher() : NULL; + if (dispatcher && dispatcher->IsAudioDuckingActive()) { DVLOG(1) << "Forcing DUCKING to be ON for output"; implicit_ducking_effect = media::AudioParameters::DUCKING; } else { |