diff options
author | henrika@chromium.org <henrika@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98> | 2012-12-14 16:08:35 +0000 |
---|---|---|
committer | henrika@chromium.org <henrika@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98> | 2012-12-14 16:08:35 +0000 |
commit | f47d99eb44515152996a4386d728d538705769cf (patch) | |
tree | f74dfd15a65ce03378b17bc172954353ac3c20b3 /content/renderer/media | |
parent | 77e939c5e65a3976ac26eb465cf13c3aa60ecc15 (diff) | |
download | chromium_src-f47d99eb44515152996a4386d728d538705769cf.zip chromium_src-f47d99eb44515152996a4386d728d538705769cf.tar.gz chromium_src-f47d99eb44515152996a4386d728d538705769cf.tar.bz2 |
Adds support of rendering a local media stream for audio using HTML5 audio tag.
Overview:
=========
This patch ensures that a user can add a local media stream to an audio/video element and render the captured audio locally (in loopback).
Details:
========
Our current architecture is a bit hairy but I've done my best to add the new code in a structured way. I have an issue in crbug assigned to myself to refactor the code in this area since we really must improve and make it less complex to work with.
One more client now implements the webkit_media::MediaStreamAudioRenderer and this client is
called WebRtcLocalAudioRenderer (WLAR).
The WLAR is created by the WebMediaPlayer when a local media stream is generated and this will ensure that the controls for a media element becomes visible.
The main action takes place in WebRtcLocalAudioRenderer::Start() where I have gathered all the main stuff. This method is the best starting point for understanding the new data flow.
A reference to an existing WebRtcAudioCapturer (WAC) (owned by the WebRTC ADM) is given to the WLAR at construction. Calling Start =>
- WLAR connects itself to the WAC using the WAC pointer from construction
- render audio parameter are copied from the capture side (since output does resampling etc.)
- creates and inits a new AudioOutputDevice (AOD)
- starts the capturer and the new AOD
Media flow:
-----------
Data is recorded and fed to the WAC which knows that it is in "loopback mode". The WAC then stores recorded data in a FIFO. The WLAR consumes audio from the FIFO when the AOD needs data to render. The WLAR reads data from the FIFO using a callback.
Testing procedure:
==================
Main testing was done using a new WebRCT demo at https://www.corp.google.com/~henrika/webrtc/gum4.html.
I also tried all other demos at https://webrtc-demos.appspot.com/ and the htp://apprtc.appspot.com demo.
For all cases, debug filters were used to track things like calling sequences etc.
BUG=157142
Review URL: https://codereview.chromium.org/11450029
git-svn-id: svn://svn.chromium.org/chrome/trunk/src@173164 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'content/renderer/media')
-rw-r--r-- | content/renderer/media/media_stream_impl.cc | 64 | ||||
-rw-r--r-- | content/renderer/media/media_stream_impl.h | 5 | ||||
-rw-r--r-- | content/renderer/media/webrtc_audio_capturer.cc | 126 | ||||
-rw-r--r-- | content/renderer/media/webrtc_audio_capturer.h | 89 | ||||
-rw-r--r-- | content/renderer/media/webrtc_audio_device_impl.h | 4 | ||||
-rw-r--r-- | content/renderer/media/webrtc_audio_renderer.cc | 13 | ||||
-rw-r--r-- | content/renderer/media/webrtc_audio_renderer.h | 3 | ||||
-rw-r--r-- | content/renderer/media/webrtc_local_audio_renderer.cc | 270 | ||||
-rw-r--r-- | content/renderer/media/webrtc_local_audio_renderer.h | 104 |
9 files changed, 661 insertions, 17 deletions
diff --git a/content/renderer/media/media_stream_impl.cc b/content/renderer/media/media_stream_impl.cc index f08f190..d100f5f 100644 --- a/content/renderer/media/media_stream_impl.cc +++ b/content/renderer/media/media_stream_impl.cc @@ -17,7 +17,9 @@ #include "content/renderer/media/rtc_video_decoder.h" #include "content/renderer/media/rtc_video_renderer.h" #include "content/renderer/media/video_capture_impl_manager.h" +#include "content/renderer/media/webrtc_audio_capturer.h" #include "content/renderer/media/webrtc_audio_renderer.h" +#include "content/renderer/media/webrtc_local_audio_renderer.h" #include "content/renderer/media/webrtc_uma_histograms.h" #include "third_party/WebKit/Source/Platform/chromium/public/WebMediaConstraints.h" #include "third_party/WebKit/Source/WebKit/chromium/public/WebDocument.h" @@ -66,6 +68,36 @@ void UpdateOptionsIfTabMediaRequest( } } +// Get session ID for the selected microphone to ensure that we start +// capturing audio using the correct input device. +static int GetSessionId(const WebKit::WebMediaStreamDescriptor& descriptor) { + WebKit::WebVector<WebKit::WebMediaStreamComponent> audio_components; + descriptor.audioSources(audio_components); + if (audio_components.size() != 1) { + // TODO(henrika): add support for more than one audio track. + NOTIMPLEMENTED(); + return -1; + } + + if (!audio_components[0].isEnabled()) { + DVLOG(1) << "audio track is disabled"; + return -1; + } + + const WebKit::WebMediaStreamSource& source = audio_components[0].source(); + MediaStreamSourceExtraData* source_data = + static_cast<MediaStreamSourceExtraData*>(source.extraData()); + if (!source_data) { + // TODO(henrika): Implement support for sources from remote MediaStreams. + NOTIMPLEMENTED(); + return -1; + } + DVLOG(1) << "local audio track source name: " + << source_data->device_info().name; + + return source_data->device_info().session_id; +} + static int g_next_request_id = 0; // Creates a WebKit representation of a stream sources based on @@ -274,6 +306,7 @@ MediaStreamImpl::GetAudioRenderer(const GURL& url) { MediaStreamExtraData* extra_data = static_cast<MediaStreamExtraData*>(descriptor.extraData()); + if (extra_data->remote_stream()) { scoped_refptr<WebRtcAudioRenderer> renderer = CreateRemoteAudioRenderer(extra_data->remote_stream()); @@ -285,11 +318,19 @@ MediaStreamImpl::GetAudioRenderer(const GURL& url) { // WebRtcAudioDeviceImpl can only support one renderer. return NULL; - } - - if (extra_data->local_stream()) { - // TODO(xians): Implement a WebRtcAudioFIFO to handle the local loopback. - return NULL; + } else if (extra_data->local_stream()) { + DVLOG(1) << "creating local audio renderer for stream:" + << extra_data->local_stream()->label(); + + // Get session ID for the local media stream. + int session_id = GetSessionId(descriptor); + if (session_id == -1) + return NULL; + + // Create the local audio renderer using the specified session ID. + scoped_refptr<WebRtcLocalAudioRenderer> local_renderer = + CreateLocalAudioRenderer(session_id); + return local_renderer; } NOTREACHED(); @@ -555,6 +596,19 @@ scoped_refptr<WebRtcAudioRenderer> MediaStreamImpl::CreateRemoteAudioRenderer( return new WebRtcAudioRenderer(RenderViewObserver::routing_id()); } +scoped_refptr<WebRtcLocalAudioRenderer> +MediaStreamImpl::CreateLocalAudioRenderer(int session_id) { + DCHECK_NE(session_id, -1); + // Ensure that the existing capturer reads data from the selected microphone. + scoped_refptr<WebRtcAudioCapturer> source = + dependency_factory_->GetWebRtcAudioDevice()->capturer(); + source->SetDevice(session_id); + + // Create a new WebRtcLocalAudioRenderer instance and connect it to the + // existing WebRtcAudioCapturer so that the renderer can use it as source. + return new WebRtcLocalAudioRenderer(source, RenderViewObserver::routing_id()); +} + MediaStreamSourceExtraData::MediaStreamSourceExtraData( const StreamDeviceInfo& device_info) : device_info_(device_info) { diff --git a/content/renderer/media/media_stream_impl.h b/content/renderer/media/media_stream_impl.h index 268b06c..3760bcb 100644 --- a/content/renderer/media/media_stream_impl.h +++ b/content/renderer/media/media_stream_impl.h @@ -37,6 +37,7 @@ class MediaStreamDependencyFactory; class MediaStreamDispatcher; class VideoCaptureImplManager; class WebRtcAudioRenderer; +class WebRtcLocalAudioRenderer; // MediaStreamImpl is a delegate for the Media Stream API messages used by // WebKit. It ties together WebKit, native PeerConnection in libjingle and @@ -172,7 +173,9 @@ class CONTENT_EXPORT MediaStreamImpl webrtc::MediaStreamInterface* stream, const scoped_refptr<base::MessageLoopProxy>& message_loop); scoped_refptr<WebRtcAudioRenderer> CreateRemoteAudioRenderer( - webrtc::MediaStreamInterface* stream); + webrtc::MediaStreamInterface* stream); + scoped_refptr<WebRtcLocalAudioRenderer> CreateLocalAudioRenderer( + int session_id); // Weak ref to a MediaStreamDependencyFactory, owned by the RenderThread. // It's valid for the lifetime of RenderThread. diff --git a/content/renderer/media/webrtc_audio_capturer.cc b/content/renderer/media/webrtc_audio_capturer.cc index ce714ba..7486e08 100644 --- a/content/renderer/media/webrtc_audio_capturer.cc +++ b/content/renderer/media/webrtc_audio_capturer.cc @@ -4,12 +4,15 @@ #include "content/renderer/media/webrtc_audio_capturer.h" +#include "base/bind.h" #include "base/logging.h" #include "base/metrics/histogram.h" #include "base/string_util.h" +#include "content/common/child_process.h" #include "content/renderer/media/audio_device_factory.h" #include "content/renderer/media/audio_hardware.h" #include "content/renderer/media/webrtc_audio_device_impl.h" +#include "content/renderer/media/webrtc_local_audio_renderer.h" #include "media/audio/audio_util.h" #include "media/audio/sample_rates.h" @@ -66,11 +69,13 @@ scoped_refptr<WebRtcAudioCapturer> WebRtcAudioCapturer::CreateCapturer() { WebRtcAudioCapturer::WebRtcAudioCapturer() : source_(NULL), - running_(false) { + running_(false), + buffering_(false) { } WebRtcAudioCapturer::~WebRtcAudioCapturer() { DCHECK(sinks_.empty()); + DCHECK(!loopback_fifo_); } void WebRtcAudioCapturer::AddCapturerSink(WebRtcAudioCapturerSink* sink) { @@ -115,7 +120,59 @@ void WebRtcAudioCapturer::SetCapturerSource( source->Initialize(params_, this, this); } +void WebRtcAudioCapturer::SetStopCallback( + const base::Closure& on_device_stopped_cb) { + DVLOG(1) << "WebRtcAudioCapturer::SetStopCallback()"; + base::AutoLock auto_lock(lock_); + on_device_stopped_cb_ = on_device_stopped_cb; +} + +void WebRtcAudioCapturer::PrepareLoopback() { + DVLOG(1) << "WebRtcAudioCapturer::PrepareLoopback()"; + base::AutoLock auto_lock(lock_); + DCHECK(!loopback_fifo_); + + // TODO(henrika): we could add a more dynamic solution here but I prefer + // a fixed size combined with bad audio at overflow. The alternative is + // that we start to build up latency and that can be more difficult to + // detect. Tests have shown that the FIFO never contains more than 2 or 3 + // audio frames but I have selected a max size of ten buffers just + // in case since these tests were performed on a 16 core, 64GB Win 7 + // machine. We could also add some sort of error notifier in this area if + // the FIFO overflows. + loopback_fifo_.reset(new media::AudioFifo(params_.channels(), + 10 * params_.frames_per_buffer())); + buffering_ = true; +} + +void WebRtcAudioCapturer::CancelLoopback() { + DVLOG(1) << "WebRtcAudioCapturer::CancelLoopback()"; + base::AutoLock auto_lock(lock_); + buffering_ = false; + if (loopback_fifo_.get() != NULL) { + loopback_fifo_->Clear(); + loopback_fifo_.reset(); + } +} + +void WebRtcAudioCapturer::PauseBuffering() { + DVLOG(1) << "WebRtcAudioCapturer::PauseBuffering()"; + base::AutoLock auto_lock(lock_); + buffering_ = false; +} + +void WebRtcAudioCapturer::ResumeBuffering() { + DVLOG(1) << "WebRtcAudioCapturer::ResumeBuffering()"; + base::AutoLock auto_lock(lock_); + if (buffering_) + return; + if (loopback_fifo_.get() != NULL) + loopback_fifo_->Clear(); + buffering_ = true; +} + bool WebRtcAudioCapturer::Initialize() { + DVLOG(1) << "WebRtcAudioCapturer::Initialize()"; // Ask the browser for the default audio input hardware sample-rate. // This request is based on a synchronous IPC message. // TODO(xians): we should ask for the native sample rate of a specific device. @@ -163,7 +220,29 @@ bool WebRtcAudioCapturer::Initialize() { return true; } +void WebRtcAudioCapturer::ProvideInput(media::AudioBus* dest) { + base::AutoLock auto_lock(lock_); + DCHECK(loopback_fifo_.get() != NULL); + + if (!running_) { + dest->Zero(); + return; + } + + // Provide data by reading from the FIFO if the FIFO contains enough + // to fulfill the request. + if (loopback_fifo_->frames() >= dest->frames()) { + loopback_fifo_->Consume(dest, 0, dest->frames()); + } else { + dest->Zero(); + // This warning is perfectly safe if it happens for the first audio + // frames. It should not happen in a steady-state mode. + DLOG(WARNING) << "WARNING: loopback FIFO is empty."; + } +} + void WebRtcAudioCapturer::Start() { + DVLOG(1) << "WebRtcAudioCapturer::Start()"; base::AutoLock auto_lock(lock_); if (running_) return; @@ -177,12 +256,20 @@ void WebRtcAudioCapturer::Start() { } void WebRtcAudioCapturer::Stop() { + DVLOG(1) << "WebRtcAudioCapturer::Stop()"; scoped_refptr<media::AudioCapturerSource> source; { base::AutoLock auto_lock(lock_); if (!running_) return; + // Ignore the Stop() request if we need to continue running for the + // local capturer. + if (loopback_fifo_) { + loopback_fifo_->Clear(); + return; + } + source = source_; running_ = false; } @@ -192,6 +279,7 @@ void WebRtcAudioCapturer::Stop() { } void WebRtcAudioCapturer::SetVolume(double volume) { + DVLOG(1) << "WebRtcAudioCapturer::SetVolume()"; base::AutoLock auto_lock(lock_); if (source_) @@ -199,6 +287,7 @@ void WebRtcAudioCapturer::SetVolume(double volume) { } void WebRtcAudioCapturer::SetDevice(int session_id) { + DVLOG(1) << "WebRtcAudioCapturer::SetDevice(" << session_id << ")"; base::AutoLock auto_lock(lock_); if (source_) source_->SetDevice(session_id); @@ -210,6 +299,11 @@ void WebRtcAudioCapturer::SetAutomaticGainControl(bool enable) { source_->SetAutomaticGainControl(enable); } +bool WebRtcAudioCapturer::IsInLoopbackMode() { + base::AutoLock auto_lock(lock_); + return (loopback_fifo_ != NULL); +} + void WebRtcAudioCapturer::Capture(media::AudioBus* audio_source, int audio_delay_milliseconds, double volume) { @@ -224,6 +318,17 @@ void WebRtcAudioCapturer::Capture(media::AudioBus* audio_source, // Copy the sink list to a local variable. sinks = sinks_; + + // Push captured audio to FIFO so it can be read by a local sink. + // Buffering is only enabled if we are rendering a local media stream. + if (loopback_fifo_ && buffering_) { + if (loopback_fifo_->frames() + audio_source->frames() <= + loopback_fifo_->max_frames()) { + loopback_fifo_->Push(audio_source); + } else { + DLOG(WARNING) << "FIFO is full"; + } + } } // Interleave, scale, and clip input to int and store result in @@ -251,7 +356,24 @@ void WebRtcAudioCapturer::OnDeviceStarted(const std::string& device_id) { } void WebRtcAudioCapturer::OnDeviceStopped() { - NOTIMPLEMENTED(); + DCHECK_EQ(MessageLoop::current(), ChildProcess::current()->io_message_loop()); + DVLOG(1) << "WebRtcAudioCapturer::OnDeviceStopped()"; + { + base::AutoLock auto_lock(lock_); + running_ = false; + buffering_ = false; + if (loopback_fifo_) { + loopback_fifo_->Clear(); + } + } + + // Inform the local renderer about the stopped device. + // The renderer can then save resources by not asking for more data from + // the stopped source. We are on the IO thread but the callback task will + // be posted on the message loop of the main render thread thanks to + // usage of BindToLoop() when the callback was initialized. + if (!on_device_stopped_cb_.is_null()) + on_device_stopped_cb_.Run(); } } // namespace content diff --git a/content/renderer/media/webrtc_audio_capturer.h b/content/renderer/media/webrtc_audio_capturer.h index c584a53..a3a22b1 100644 --- a/content/renderer/media/webrtc_audio_capturer.h +++ b/content/renderer/media/webrtc_audio_capturer.h @@ -8,41 +8,83 @@ #include <list> #include <string> +#include "base/callback.h" #include "base/memory/ref_counted.h" #include "base/synchronization/lock.h" #include "content/renderer/media/webrtc_audio_device_impl.h" +#include "content/renderer/media/webrtc_local_audio_renderer.h" #include "media/audio/audio_input_device.h" #include "media/base/audio_capturer_source.h" +#include "media/base/audio_fifo.h" + +namespace media { +class AudioBus; +} namespace content { class WebRtcAudioCapturerSink; +class WebRtcLocalAudioRenderer; // This class manages the capture data flow by getting data from its // |source_|, and passing it to its |sink_|. // It allows clients to inject their own capture data source by calling -// SetCapturerSource(). +// SetCapturerSource(). It is also possible to enable a local sink and +// register a callback which the sink can call when it wants to read captured +// data cached in a FIFO for local loopback rendering. +// The threading model for this class is rather messy since it will be +// created on a Libjingle thread, captured data is provided on a dedicated +// AudioInputDevice thread, and methods can be called either on the Libjingle +// thread or on the main render thread but also other client threads +// if an alternative AudioCapturerSource has been set. In addition, the +// AudioCapturerSource::CaptureEventHandler methods are called on the IO thread +// and requests for data to render is done on the AudioOutputDevice thread. class WebRtcAudioCapturer : public base::RefCountedThreadSafe<WebRtcAudioCapturer>, public media::AudioCapturerSource::CaptureCallback, - public media::AudioCapturerSource::CaptureEventHandler { + public media::AudioCapturerSource::CaptureEventHandler, + public content::WebRtcLocalAudioRenderer::LocalRenderCallback { public: // Use to construct the audio capturer. static scoped_refptr<WebRtcAudioCapturer> CreateCapturer(); - // Called by the client on the sink side. Return false if the capturer has - // not been initialized successfully. + // Called by the client on the sink side to add a sink. void AddCapturerSink(WebRtcAudioCapturerSink* sink); - // Called by the client on the sink side to remove + // Called by the client on the sink side to remove a sink. void RemoveCapturerSink(WebRtcAudioCapturerSink* sink); - // SetCapturerSource() is called if client on the source side desires to + // SetCapturerSource() is called if the client on the source side desires to // provide their own captured audio data. Client is responsible for calling // Start() on its own source to have the ball rolling. void SetCapturerSource( const scoped_refptr<media::AudioCapturerSource>& source); + // The |on_device_stopped_cb| callback will be called in OnDeviceStopped(). + void SetStopCallback(const base::Closure& on_device_stopped_cb); + + // Informs this class that a local sink shall be used in addition to the + // registered WebRtcAudioCapturerSink sink(s). The capturer will enter a + // buffering mode and store all incoming audio frames in a local FIFO. + // The renderer will read data from this buffer using the ProvideInput() + // method. Called on the main render thread. + void PrepareLoopback(); + + // Cancels loopback mode and stops buffering local copies of captured + // data in the FIFO. + // Called on the main render thread. + void CancelLoopback(); + + // Pauses buffering of captured data. Does only have an effect if a local + // sink is used. + // Called on the main render thread. + void PauseBuffering(); + + // Resumes buffering of captured data. Does only have an effect if a local + // sink is used. + // Called on the main render thread. + void ResumeBuffering(); + // Starts recording audio. void Start(); @@ -60,16 +102,33 @@ class WebRtcAudioCapturer bool is_recording() const { return running_; } + // Returns true if a local renderer has called PrepareLoopback() and it can + // be utilized to prevent more than one local renderer. + bool IsInLoopbackMode(); + + // Audio parameters utilized by the audio capturer. Can be utilized by + // a local renderer to set up a renderer using identical parameters as the + // capturer. + const media::AudioParameters& audio_parameter() const { return params_; } + // AudioCapturerSource::CaptureCallback implementation. + // Called on the AudioInputDevice audio thread. virtual void Capture(media::AudioBus* audio_source, int audio_delay_milliseconds, double volume) OVERRIDE; virtual void OnCaptureError() OVERRIDE; // AudioCapturerSource::CaptureEventHandler implementation. + // Called on the IO thread. virtual void OnDeviceStarted(const std::string& device_id) OVERRIDE; virtual void OnDeviceStopped() OVERRIDE; + // WebRtcLocalAudioRenderer::LocalRenderCallback implementation. + // Reads stored captured data from a local FIFO. This method is used in + // combination with a local sink to render captured audio in loopback. + // This method is called on the AudioOutputDevice worker thread. + virtual void ProvideInput(media::AudioBus* dest) OVERRIDE; + protected: friend class base::RefCountedThreadSafe<WebRtcAudioCapturer>; virtual ~WebRtcAudioCapturer(); @@ -80,9 +139,13 @@ class WebRtcAudioCapturer WebRtcAudioCapturer(); // Initializes the capturer, called right after the object is created. - // Returns false if the initializetion fails. + // Returns false if the initialization fails. bool Initialize(); + // Protects |source_|, |sinks_|, |running_|, |on_device_stopped_cb_|, + // |loopback_fifo_| and |buffering_|. + base::Lock lock_; + // A list of sinks that the audio data is fed to. SinkList sinks_; @@ -98,8 +161,16 @@ class WebRtcAudioCapturer std::string device_id_; bool running_; - // Protect access to |source_|, |sinks_|, |running_|. - base::Lock lock_; + // Callback object which is called during OnDeviceStopped(). + // Informs a local sink that it should stop asking for data. + base::Closure on_device_stopped_cb_; + + // Contains copies of captured audio frames. Only utilized in loopback + // mode when a local sink has been set. + scoped_ptr<media::AudioFifo> loopback_fifo_; + + // True when FIFO is utilized, false otherwise. + bool buffering_; DISALLOW_COPY_AND_ASSIGN(WebRtcAudioCapturer); }; diff --git a/content/renderer/media/webrtc_audio_device_impl.h b/content/renderer/media/webrtc_audio_device_impl.h index 61f2ad5..701a7c2 100644 --- a/content/renderer/media/webrtc_audio_device_impl.h +++ b/content/renderer/media/webrtc_audio_device_impl.h @@ -375,6 +375,10 @@ class CONTENT_EXPORT WebRtcAudioDeviceImpl // Sets the |renderer_|, returns false if |renderer_| has already existed. bool SetRenderer(WebRtcAudioRenderer* renderer); + const scoped_refptr<WebRtcAudioCapturer>& capturer() const { + return capturer_; + } + // Accessors. int input_buffer_size() const { return input_audio_parameters_.frames_per_buffer(); diff --git a/content/renderer/media/webrtc_audio_renderer.cc b/content/renderer/media/webrtc_audio_renderer.cc index 2012d23..182fb5b 100644 --- a/content/renderer/media/webrtc_audio_renderer.cc +++ b/content/renderer/media/webrtc_audio_renderer.cc @@ -207,6 +207,11 @@ bool WebRtcAudioRenderer::Initialize(WebRtcAudioRendererSource* source) { return true; } +void WebRtcAudioRenderer::Start() { + // TODO(xians): refactor to make usage of Start/Stop more symmetric. + NOTIMPLEMENTED(); +} + void WebRtcAudioRenderer::Play() { base::AutoLock auto_lock(lock_); if (state_ == UNINITIALIZED) @@ -242,6 +247,14 @@ void WebRtcAudioRenderer::SetVolume(float volume) { sink_->SetVolume(volume); } +base::TimeDelta WebRtcAudioRenderer::GetCurrentRenderTime() const { + return base::TimeDelta(); +} + +bool WebRtcAudioRenderer::IsLocalRenderer() const { + return false; +} + int WebRtcAudioRenderer::Render(media::AudioBus* audio_bus, int audio_delay_milliseconds) { { diff --git a/content/renderer/media/webrtc_audio_renderer.h b/content/renderer/media/webrtc_audio_renderer.h index 28d5f42..44e58a7 100644 --- a/content/renderer/media/webrtc_audio_renderer.h +++ b/content/renderer/media/webrtc_audio_renderer.h @@ -32,10 +32,13 @@ class CONTENT_EXPORT WebRtcAudioRenderer // Methods called by WebMediaPlayerMS and WebRtcAudioDeviceImpl. // MediaStreamAudioRenderer implementation. + virtual void Start() OVERRIDE; virtual void Play() OVERRIDE; virtual void Pause() OVERRIDE; virtual void Stop() OVERRIDE; virtual void SetVolume(float volume) OVERRIDE; + virtual base::TimeDelta GetCurrentRenderTime() const OVERRIDE; + virtual bool IsLocalRenderer() const OVERRIDE; protected: virtual ~WebRtcAudioRenderer(); diff --git a/content/renderer/media/webrtc_local_audio_renderer.cc b/content/renderer/media/webrtc_local_audio_renderer.cc new file mode 100644 index 0000000..d6efff6 --- /dev/null +++ b/content/renderer/media/webrtc_local_audio_renderer.cc @@ -0,0 +1,270 @@ +// Copyright (c) 2012 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#include "content/renderer/media/webrtc_local_audio_renderer.h" + +#include "base/atomicops.h" +#include "base/bind.h" +#include "base/bind_helpers.h" +#include "base/command_line.h" +#include "base/debug/trace_event.h" +#include "base/logging.h" +#include "base/message_loop_proxy.h" +#include "base/synchronization/lock.h" +#include "content/renderer/media/audio_device_factory.h" +#include "content/renderer/media/renderer_audio_output_device.h" +#include "content/renderer/media/webrtc_audio_capturer.h" +#include "media/base/bind_to_loop.h" +#include "media/base/media_switches.h" + +namespace content { + +// WebRtcLocalAudioRenderer::AudioCallback wraps the AudioOutputDevice thread, +// receives callbacks on that thread and proxies these requests to the capture +// sink. +class WebRtcLocalAudioRenderer::AudioCallback + : public media::AudioRendererSink::RenderCallback { + public: + explicit AudioCallback(LocalRenderCallback* source); + virtual ~AudioCallback(); + + // media::AudioRendererSink::RenderCallback implementation. + // Render() is called on the AudioOutputDevice thread and OnRenderError() + // on the IO thread. + virtual int Render(media::AudioBus* audio_bus, + int audio_delay_milliseconds) OVERRIDE; + virtual void OnRenderError() OVERRIDE; + + void Start(); + void Play(); + void Pause(); + bool Playing() const; + + base::TimeDelta total_render_time() const { return total_render_time_; } + + private: + // Get data from this source on each render request. + LocalRenderCallback* source_; + + // Set when playing, cleared when paused. + base::subtle::Atomic32 playing_; + + // Stores last time a render callback was received. The time difference + // between a new time stamp and this value can be used to derive the + // total render time. + base::Time last_render_time_; + + // Keeps track of total time audio has been rendered. + base::TimeDelta total_render_time_; + + // Protects |total_render_time_| and |source_capture_device_has_stopped_ + base::Lock lock_; + + DISALLOW_COPY_AND_ASSIGN(AudioCallback); +}; + +// WebRtcLocalAudioRenderer::AudioCallback implementation. +WebRtcLocalAudioRenderer::AudioCallback::AudioCallback( + LocalRenderCallback* source) + : source_(source), + playing_(false) { +} + +WebRtcLocalAudioRenderer::AudioCallback::~AudioCallback() {} + +void WebRtcLocalAudioRenderer::AudioCallback::Start() { + last_render_time_ = base::Time::Now(); + base::subtle::Release_Store(&playing_, 0); +} + +void WebRtcLocalAudioRenderer::AudioCallback::Play() { + base::subtle::Release_Store(&playing_, 1); + last_render_time_ = base::Time::Now(); +} + +void WebRtcLocalAudioRenderer::AudioCallback::Pause() { + base::subtle::Release_Store(&playing_, 0); +} + +bool WebRtcLocalAudioRenderer::AudioCallback::Playing() const { + return (base::subtle::Acquire_Load(&playing_) != false); +} + +// media::AudioRendererSink::RenderCallback implementation +int WebRtcLocalAudioRenderer::AudioCallback::Render( + media::AudioBus* audio_bus, int audio_delay_milliseconds) { + + if (!Playing()) + return 0; + + base::Time now = base::Time::Now(); + { + base::AutoLock auto_lock(lock_); + total_render_time_ += now - last_render_time_; + } + last_render_time_ = now; + + TRACE_EVENT0("audio", "WebRtcLocalAudioRenderer::AudioCallback::Render"); + + // Acquire audio samples from the FIFO owned by the capturing source. + source_->ProvideInput(audio_bus); + return audio_bus->frames(); +} + +void WebRtcLocalAudioRenderer::AudioCallback::OnRenderError() { + NOTIMPLEMENTED(); +} + +// WebRtcLocalAudioRenderer::WebRtcLocalAudioRenderer implementation. +WebRtcLocalAudioRenderer::WebRtcLocalAudioRenderer( + const scoped_refptr<WebRtcAudioCapturer>& source, + int source_render_view_id) + : source_(source), + source_render_view_id_(source_render_view_id) { + DVLOG(1) << "WebRtcLocalAudioRenderer::WebRtcLocalAudioRenderer()"; +} + +WebRtcLocalAudioRenderer::~WebRtcLocalAudioRenderer() { + DCHECK(thread_checker_.CalledOnValidThread()); + DCHECK(!Started()); + DVLOG(1) << "WebRtcLocalAudioRenderer::~WebRtcLocalAudioRenderer()"; +} + +void WebRtcLocalAudioRenderer::Start() { + DCHECK(thread_checker_.CalledOnValidThread()); + DCHECK(!Started()); + DVLOG(1) << "WebRtcLocalAudioRenderer::Start()"; + + if (source_->IsInLoopbackMode()) { + DLOG(ERROR) << "Only one local renderer is supported"; + return; + } + + // Create the audio callback instance and attach the capture source to ensure + // that the renderer can read data from the loopback buffer in the capture + // source. + callback_.reset(new WebRtcLocalAudioRenderer::AudioCallback(source_)); + + // Register the local renderer with its source. We are a sink seen from the + // source perspective. + const base::Closure& on_device_stopped_cb = media::BindToCurrentLoop( + base::Bind( + &WebRtcLocalAudioRenderer::OnSourceCaptureDeviceStopped, this)); + source_->PrepareLoopback(); + source_->SetStopCallback(on_device_stopped_cb); + + // Use the capturing source audio parameters when opening the output audio + // device. Any mismatch will be compensated for by the audio output back-end. + // Note that the buffer size is modified to make the full-duplex scheme less + // resource intensive. By doubling the buffer size (compared to the capture + // side), the callback frequency of browser side callbacks will be lower and + // tests have shown that it resolves issues with audio glitches for some + // cases where resampling is needed on the output side. + // TODO(henrika): verify this scheme on as many different devices and + // combinations of sample rates as possible + media::AudioParameters source_params = source_->audio_parameter(); + media::AudioParameters sink_params(source_params.format(), + source_params.channel_layout(), + source_params.sample_rate(), + source_params.bits_per_sample(), + 2 * source_params.frames_per_buffer()); + sink_ = AudioDeviceFactory::NewOutputDevice(); + if (CommandLine::ForCurrentProcess()->HasSwitch( + switches::kEnableWebAudioInput)) { + // TODO(henrika): we could utilize the unified audio here instead and do + // sink_->InitializeIO(sink_params, 2, callback_.get()); + // It would then be possible to avoid using the WebRtcAudioCapturer. + DVLOG(1) << "enable-webaudio-input command-line flag is enabled"; + } + sink_->Initialize(sink_params, callback_.get()); + sink_->SetSourceRenderView(source_render_view_id_); + + // Start local rendering and the capturer. Note that, the capturer is owned + // by the WebRTC ADM and might already bee running. + source_->Start(); + sink_->Start(); + callback_->Start(); +} + +void WebRtcLocalAudioRenderer::Stop() { + DCHECK(thread_checker_.CalledOnValidThread()); + DVLOG(1) << "WebRtcLocalAudioRenderer::Stop()"; + + if (!Started()) + return; + + // Stop the output audio stream, i.e, stop asking for data to render. + sink_->Stop(); + sink_ = NULL; + + // Delete the audio callback before deleting the source since the callback + // is using the source. + callback_.reset(); + + // Unregister this class as sink to the capturing source and invalidate the + // source pointer. This call clears the local FIFO in the capturer, and at + // same time, ensure that recorded data is no longer added to the FIFO. + // Note that, we do not stop the capturer here since it may still be used by + // the WebRTC ADM. + source_->CancelLoopback(); + source_ = NULL; +} + +void WebRtcLocalAudioRenderer::Play() { + DCHECK(thread_checker_.CalledOnValidThread()); + DVLOG(1) << "WebRtcLocalAudioRenderer::Play()"; + + if (!Started()) + return; + + // Resumes rendering by ensuring that WebRtcLocalAudioRenderer::Render() + // now reads data from the local FIFO in the capturing source. + callback_->Play(); + source_->ResumeBuffering(); +} + +void WebRtcLocalAudioRenderer::Pause() { + DCHECK(thread_checker_.CalledOnValidThread()); + DVLOG(1) << "WebRtcLocalAudioRenderer::Pause()"; + + if (!Started()) + return; + + // Temporarily suspends rendering audio. + // WebRtcLocalAudioRenderer::Render() will return early during this state + // and no data will be provided to the active sink. + callback_->Pause(); + source_->PauseBuffering(); +} + +void WebRtcLocalAudioRenderer::SetVolume(float volume) { + DCHECK(thread_checker_.CalledOnValidThread()); + DVLOG(1) << "WebRtcLocalAudioRenderer::SetVolume(" << volume << ")"; + if (sink_) + sink_->SetVolume(volume); +} + +base::TimeDelta WebRtcLocalAudioRenderer::GetCurrentRenderTime() const { + DCHECK(thread_checker_.CalledOnValidThread()); + if (!Started()) + return base::TimeDelta(); + return callback_->total_render_time(); +} + +bool WebRtcLocalAudioRenderer::IsLocalRenderer() const { + return true; +} + +void WebRtcLocalAudioRenderer::OnSourceCaptureDeviceStopped() { + DCHECK(thread_checker_.CalledOnValidThread()); + DVLOG(1) << "WebRtcLocalAudioRenderer::OnSourceCaptureDeviceStopped()"; + if (!Started()) + return; + + // The capture device has stopped and we should therefore stop all activity + // as well to save resources. + Stop(); +} + +} // namespace content diff --git a/content/renderer/media/webrtc_local_audio_renderer.h b/content/renderer/media/webrtc_local_audio_renderer.h new file mode 100644 index 0000000..26215fd --- /dev/null +++ b/content/renderer/media/webrtc_local_audio_renderer.h @@ -0,0 +1,104 @@ +// Copyright (c) 2012 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef CONTENT_RENDERER_MEDIA_WEBRTC_LOCAL_AUDIO_RENDERER_H_ +#define CONTENT_RENDERER_MEDIA_WEBRTC_LOCAL_AUDIO_RENDERER_H_ + +#include "base/callback.h" +#include "base/memory/ref_counted.h" +#include "base/threading/thread_checker.h" +#include "content/common/content_export.h" +#include "webkit/media/media_stream_audio_renderer.h" + +namespace media { +class AudioBus; +class AudioParameters; +} + +namespace content { + +class RendererAudioOutputDevice; +class WebRtcAudioCapturer; + +// WebRtcLocalAudioRenderer is a webkit_media::MediaStreamAudioRenderer +// designed for rendering local audio media stream tracks, +// http://dev.w3.org/2011/webrtc/editor/getusermedia.html#mediastreamtrack +// It also implements media::AudioRendererSink::RenderCallback to render audio +// data provided from a WebRtcAudioCapturer source which is set at construction. +// When the audio layer in the browser process asks for data to render, this +// class provides the data by reading from the source using the registered +// WebRtcAudioCapturer source. +// TODO(henrika): improve by using similar principles as in RTCVideoRenderer +// which register itself to the video track when the provider is started and +// deregisters itself when it is stopped. +// Tracking this at http://crbug.com/164813. +class CONTENT_EXPORT WebRtcLocalAudioRenderer + : NON_EXPORTED_BASE(public webkit_media::MediaStreamAudioRenderer) { + public: + + class LocalRenderCallback { + public: + // Audio data is provided to the caller using this callback. + virtual void ProvideInput(media::AudioBus* dest) = 0; + + protected: + virtual ~LocalRenderCallback() {} + }; + + // Creates a local renderer and registers a capturing |source| object. + // The |source| is owned by the WebRtcAudioDeviceImpl. + // Called on the main thread. + WebRtcLocalAudioRenderer(const scoped_refptr<WebRtcAudioCapturer>& source, + int source_render_view_id); + + // webkit_media::MediaStreamAudioRenderer implementation. + // Called on the main thread. + virtual void Start() OVERRIDE; + virtual void Stop() OVERRIDE; + virtual void Play() OVERRIDE; + virtual void Pause() OVERRIDE; + virtual void SetVolume(float volume) OVERRIDE; + virtual base::TimeDelta GetCurrentRenderTime() const OVERRIDE; + virtual bool IsLocalRenderer() const OVERRIDE; + + bool Started() const { return (callback_ != NULL); } + + protected: + virtual ~WebRtcLocalAudioRenderer(); + + private: + // Called by the WebRtcAudioCapturer when the capture device has stopped. + void OnSourceCaptureDeviceStopped(); + + // Private class which implements AudioRendererSink::RenderCallback + // and also wraps members which can be accesses both on the main render + // thread and the AudioOutputDevice media thread. + class AudioCallback; + + // The actual WebRtcLocalAudioRenderer::AudioCallback instance is created + // in Start() and released in Stop(). + scoped_ptr<WebRtcLocalAudioRenderer::AudioCallback> callback_; + + // The source of data to render. Given that this class implements local + // loopback, the source is a capture instance reading data from the + // selected microphone. The recorded data is stored in a FIFO and consumed + // by this class when the sink asks for new data. + // The WebRtcAudioCapturer is today created by WebRtcAudioDeviceImpl. + scoped_refptr<WebRtcAudioCapturer> source_; + + // The render view in which the audio is rendered into |sink_|. + const int source_render_view_id_; + + // The sink (destination) for rendered audio. + scoped_refptr<RendererAudioOutputDevice> sink_; + + // Used to DCHECK that we are called on the correct thread. + base::ThreadChecker thread_checker_; + + DISALLOW_COPY_AND_ASSIGN(WebRtcLocalAudioRenderer); +}; + +} // namespace content + +#endif // CONTENT_RENDERER_MEDIA_WEBRTC_LOCAL_AUDIO_RENDERER_H_ |