summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--chrome/renderer/media/audio_renderer_impl.cc150
-rw-r--r--chrome/renderer/media/audio_renderer_impl.h163
-rw-r--r--chrome/renderer/render_view.cc15
-rw-r--r--chrome/renderer/webmediaplayer_delegate_impl.cc17
-rw-r--r--chrome/renderer/webmediaplayer_delegate_impl.h14
-rw-r--r--media/base/pipeline.h2
-rw-r--r--media/filters/audio_renderer_base.h2
7 files changed, 315 insertions, 48 deletions
diff --git a/chrome/renderer/media/audio_renderer_impl.cc b/chrome/renderer/media/audio_renderer_impl.cc
index 9dbd392..5df76ba 100644
--- a/chrome/renderer/media/audio_renderer_impl.cc
+++ b/chrome/renderer/media/audio_renderer_impl.cc
@@ -3,50 +3,160 @@
// LICENSE file.
#include "chrome/renderer/media/audio_renderer_impl.h"
+#include "chrome/renderer/render_view.h"
+#include "chrome/renderer/render_thread.h"
+#include "chrome/renderer/webmediaplayer_delegate_impl.h"
+#include "media/base/filter_host.h"
+
+// We'll try to fill 4096 samples per buffer, which is roughly ~92ms of audio
+// data for a 44.1kHz audio source.
+static const size_t kSamplesPerBuffer = 4096;
AudioRendererImpl::AudioRendererImpl(WebMediaPlayerDelegateImpl* delegate)
- : delegate_(delegate) {
+ : AudioRendererBase(kDefaultMaxQueueSize),
+ delegate_(delegate),
+ stream_id_(0),
+ shared_memory_(NULL),
+ shared_memory_size_(0),
+ packet_requested_(false),
+ render_loop_(RenderThread::current()->message_loop()),
+ resource_release_event_(true, false) {
+ // TODO(hclam): do we need to move this method call to render thread?
+ delegate_->SetAudioRenderer(this);
}
AudioRendererImpl::~AudioRendererImpl() {
}
-void AudioRendererImpl::Stop() {
- // TODO(scherkus): implement Stop.
- NOTIMPLEMENTED();
+bool AudioRendererImpl::IsMediaFormatSupported(
+ const media::MediaFormat* media_format) {
+ int channels;
+ int sample_rate;
+ int sample_bits;
+ return ParseMediaFormat(media_format, &channels, &sample_rate, &sample_bits);
}
-bool AudioRendererImpl::Initialize(media::AudioDecoder* decoder) {
- // TODO(scherkus): implement Initialize.
- NOTIMPLEMENTED();
- return false;
+bool AudioRendererImpl::OnInitialize(const media::MediaFormat* media_format) {
+ // Parse integer values in MediaFormat.
+ int channels;
+ int sample_rate;
+ int sample_bits;
+ if (!ParseMediaFormat(media_format, &channels, &sample_rate, &sample_bits)) {
+ return false;
+ }
+
+ // Create the audio output stream in browser process.
+ size_t packet_size = kSamplesPerBuffer * channels * sample_bits / 8;
+ render_loop_->PostTask(FROM_HERE,
+ NewRunnableMethod(this, &AudioRendererImpl::OnCreateAudioStream,
+ AudioManager::AUDIO_PCM_LINEAR, channels, sample_rate, sample_bits,
+ packet_size));
+ return true;
}
-void AudioRendererImpl::SetVolume(float volume) {
- // TODO(scherkus): implement SetVolume.
- NOTIMPLEMENTED();
+void AudioRendererImpl::OnStop() {
+ if (!resource_release_event_.IsSignaled()) {
+ render_loop_->PostTask(FROM_HERE,
+ NewRunnableMethod(this, &AudioRendererImpl::ReleaseRendererResources));
+ resource_release_event_.Wait();
+ }
}
-bool AudioRendererImpl::IsMediaFormatSupported(
- const media::MediaFormat* format) {
- // TODO(hclam): check the format correct.
- return true;
+void AudioRendererImpl::OnAssignment(media::Buffer* buffer_in) {
+ // Use the base class to queue the buffer.
+ AudioRendererBase::OnAssignment(buffer_in);
+ // Post a task to render thread to notify a packet reception.
+ render_loop_->PostTask(FROM_HERE,
+ NewRunnableMethod(this, &AudioRendererImpl::OnNotifyAudioPacketReady));
}
-void AudioRendererImpl::OnRequestPacket() {
- // TODO(hclam): implement this.
+void AudioRendererImpl::SetPlaybackRate(float rate) {
+ // TODO(hclam): handle playback rates not equal to 1.0.
+ if (rate == 1.0f) {
+ // TODO(hclam): what should I do here? OnCreated has fired StartAudioStream
+ // in the browser process, it seems there's nothing to do here.
+ } else {
+ NOTIMPLEMENTED();
+ }
+}
+
+void AudioRendererImpl::SetVolume(float volume) {
+ // TODO(hclam): change this to multichannel if possible.
+ render_loop_->PostTask(FROM_HERE,
+ NewRunnableMethod(
+ this, &AudioRendererImpl::OnSetAudioVolume, volume, volume));
}
void AudioRendererImpl::OnCreated(base::SharedMemoryHandle handle,
size_t length) {
- // TODO(hclam): implement this.
+ shared_memory_.reset(new base::SharedMemory(handle, false));
+ shared_memory_size_ = length;
+ // TODO(hclam): is there any better place to do this?
+ OnStartAudioStream();
+}
+
+void AudioRendererImpl::OnRequestPacket() {
+ packet_requested_ = true;
}
void AudioRendererImpl::OnStateChanged(AudioOutputStream::State state,
int info) {
- // TODO(hclam): implement this.
+ switch (state) {
+ case AudioOutputStream::STATE_ERROR:
+ host_->Error(media::PIPELINE_ERROR_AUDIO_HARDWARE);
+ break;
+ // TODO(hclam): handle these events.
+ case AudioOutputStream::STATE_STARTED:
+ case AudioOutputStream::STATE_PAUSED:
+ break;
+ default:
+ NOTREACHED();
+ break;
+ }
}
void AudioRendererImpl::OnVolume(double left, double right) {
- // TODO(hclam): implement this.
+ // TODO(hclam): decide whether we need to report the current volume to
+ // pipeline.
+}
+
+void AudioRendererImpl::ReleaseRendererResources() {
+ OnCloseAudioStream();
+ resource_release_event_.Signal();
+}
+
+void AudioRendererImpl::OnCreateAudioStream(
+ AudioManager::Format format, int channels, int sample_rate,
+ int bits_per_sample, size_t packet_size) {
+ stream_id_ = delegate_->view()->CreateAudioStream(
+ this, format, channels, sample_rate, bits_per_sample, packet_size);
+}
+
+void AudioRendererImpl::OnStartAudioStream() {
+ delegate_->view()->StartAudioStream(stream_id_);
+}
+
+void AudioRendererImpl::OnCloseAudioStream() {
+ // Unregister ourself from RenderView, we will not be called anymore.
+ delegate_->view()->CloseAudioStream(stream_id_);
+}
+
+void AudioRendererImpl::OnSetAudioVolume(double left, double right) {
+ delegate_->view()->SetAudioVolume(stream_id_, left, right);
+}
+
+void AudioRendererImpl::OnNotifyAudioPacketReady() {
+ if (packet_requested_) {
+ DCHECK(shared_memory_.get());
+ // Fill into the shared memory.
+ size_t filled = FillBuffer(static_cast<uint8*>(shared_memory_->memory()),
+ shared_memory_size_);
+ packet_requested_ = false;
+ // Then tell browser process we are done filling into the buffer.
+ delegate_->view()->NotifyAudioPacketReady(stream_id_, filled);
+ }
+}
+
+const media::MediaFormat* AudioRendererImpl::GetMediaFormat() {
+ return &media_format_;
}
diff --git a/chrome/renderer/media/audio_renderer_impl.h b/chrome/renderer/media/audio_renderer_impl.h
index 9d6b0b4..c0a8257 100644
--- a/chrome/renderer/media/audio_renderer_impl.h
+++ b/chrome/renderer/media/audio_renderer_impl.h
@@ -1,49 +1,184 @@
// Copyright (c) 2009 The Chromium Authors. All rights reserved. Use of this
// source code is governed by a BSD-style license that can be found in the
// LICENSE file.
+//
+// Audio rendering unit utilizing audio output stream provided by browser
+// process through IPC.
+//
+// Relationship of classes.
+//
+// AudioRendererHost AudioRendererImpl
+// ^ ^
+// | |
+// v IPC v
+// ResourceMessageFilter <---------> RenderView
+//
+// Implementation of interface with audio device is in AudioRendererHost and
+// it provides services and entry points in ResourceMessageFilter, allowing
+// usage of IPC calls to interact with audio device. RenderView acts as a portal
+// for IPC calls and does no more than delegation.
+//
+// Transportation of audio buffer is done by using shared memory, after
+// OnCreateAudioStream is executed, OnCreated would be called along with a
+// SharedMemoryHandle upon successful creation of audio output stream in the
+// browser process. The same piece of shared memory would be used during the
+// lifetime of this unit.
+//
+// This class lives inside three threads during it's lifetime, namely:
+// 1. Render thread
+// The thread within which this class is constructed and destroyed,
+// interfacing with RenderView should only happen here.
+// 2. Pipeline thread
+// Initialization of filter and proper stopping of filters happens here.
+// Properties of this filter is also set in this thread.
+// 3. Audio decoder thread (If there's one.)
+// Responsible for decoding audio data and gives raw PCM data to this object.
+//
+// Methods categorized according to the thread(s) they are running on.
+//
+// Render thread
+// +-- CreateFactory()
+// | Helper method for construction this class.
+// |-- IsMetidFormatSupported()
+// | Helper method to identify media formats accepted by this class for
+// | construction.
+// |-- OnCreateAudioStream()
+// | Calls RenderView::CreateAudioStream().
+// |-- OnStartAudioStream()
+// | Calls RenderView::StartAudioStream().
+// |-- OnCloseAudioStream()
+// | Calls RenderView::CloseAudioStream().
+// |-- OnSetAudioVolume()
+// | Calls RenderView::SetAudioVolume().
+// |-- OnNotifyAudioPacketReady
+// | Calls RenderView::NotifyAudioPacketReady().
+// |-- OnRequestPacket()
+// | Called from RenderView when an audio packet requested is received
+// | from browser process.
+// |-- OnStateChanged()
+// | Called from RenderView upon state change of the audio output stream
+// | in the browser process. Error of the output stream is reported here.
+// |-- OnCreated()
+// | Called from RenderView upon successful creation of audio output stream
+// | in the browser process, called along with a SharedMemoryHandle.
+// |-- OnVolume()
+// | Called from RenderView about the volume of the audio output stream.
+// \-- ReleaseRendererResource()
+// Release resources that live inside render thread.
+//
+// Pipeline thread
+// +-- AudioRendererImpl()
+// | Constructor method.
+// |-- ~AudioRendererImpl()
+// | Destructor method.
+// |-- SetPlaybackRate()
+// | Given the playback rate information.
+// |-- GetMediaFormat()
+// | Obtain the current media format of this unit.
+// |-- SetVolume()
+// | Given the volume information.
+// |-- OnInitialize()
+// | Called from AudioRendererBase for initialization event.
+// \-- OnStop()
+// Called from AudioRendererBase for stop event.
+//
+// Audio decoder thread (If there's one.)
+// \-- OnAssignment()
+// A raw PCM audio packet buffer is recevied here, this method is called
+// from pipeline thread if audio decoder thread does not exist.
#ifndef CHROME_RENDERER_MEDIA_AUDIO_RENDERER_IMPL_H_
#define CHROME_RENDERER_MEDIA_AUDIO_RENDERER_IMPL_H_
#include "base/shared_memory.h"
+#include "base/waitable_event.h"
#include "media/audio/audio_output.h"
#include "media/base/factory.h"
#include "media/base/filters.h"
+#include "media/filters/audio_renderer_base.h"
class WebMediaPlayerDelegateImpl;
-class AudioRendererImpl : public media::AudioRenderer {
+class AudioRendererImpl : public media::AudioRendererBase {
public:
- AudioRendererImpl(WebMediaPlayerDelegateImpl* delegate);
-
- // media::MediaFilter implementation.
- virtual void Stop();
-
- // media::AudioRenderer implementation.
- virtual bool Initialize(media::AudioDecoder* decoder);
- virtual void SetVolume(float volume);
-
- // Static method for creating factory for this object.
+ // Methods called on render thread ------------------------------------------
+ // Methods called during construction.
static media::FilterFactory* CreateFactory(
WebMediaPlayerDelegateImpl* delegate) {
return new media::FilterFactoryImpl1<AudioRendererImpl,
WebMediaPlayerDelegateImpl*>(delegate);
}
-
- // Answers question from the factory to see if we accept |format|.
static bool IsMediaFormatSupported(const media::MediaFormat* format);
+ // Methods call from RenderView when audio related IPC messages are received
+ // from browser process.
void OnRequestPacket();
void OnStateChanged(AudioOutputStream::State state, int info);
void OnCreated(base::SharedMemoryHandle handle, size_t length);
void OnVolume(double left, double right);
+ // Release resources that lives in renderer thread, i.e. audio output streams.
+ void ReleaseRendererResources();
+
+ // Methods called on pipeline thread ----------------------------------------
+ // media::MediaFilter implementation.
+ virtual void SetPlaybackRate(float rate);
+ const media::MediaFormat* GetMediaFormat();
+
+ // media::AudioRenderer implementation.
+ virtual void SetVolume(float volume);
+
+ // AssignableBuffer<AudioRendererBase, BufferInterface> implementation.
+ virtual void OnAssignment(media::Buffer* buffer_in);
+
protected:
- virtual ~AudioRendererImpl();
+ // Methods called on audio renderer thread ----------------------------------
+ // These methods are called from AudioRendererBase.
+ virtual bool OnInitialize(const media::MediaFormat* media_format);
+ virtual void OnStop();
private:
+ friend class media::FilterFactoryImpl1<AudioRendererImpl,
+ WebMediaPlayerDelegateImpl*>;
+
+ explicit AudioRendererImpl(WebMediaPlayerDelegateImpl* delegate);
+ virtual ~AudioRendererImpl();
+
+ // Methods call on render thread --------------------------------------------
+ // The following methods are tasks posted on the render thread that needs to
+ // be executed on that thread. They interact with WebMediaPlayerDelegateImpl
+ // and the containing RenderView, because method calls to RenderView must be
+ // executed on render thread.
+ void OnCreateAudioStream(AudioManager::Format format, int channels,
+ int sample_rate, int bits_per_sample,
+ size_t packet_size);
+ void OnStartAudioStream();
+ void OnCloseAudioStream();
+ void OnSetAudioVolume(double left, double right);
+ void OnNotifyAudioPacketReady();
+
WebMediaPlayerDelegateImpl* delegate_;
+ // A map of media format information.
+ media::MediaFormat media_format_;
+
+ // ID of the stream created in the browser process.
+ int32 stream_id_;
+
+ // Memory shared by the browser process for audio buffer.
+ scoped_ptr<base::SharedMemory> shared_memory_;
+ size_t shared_memory_size_;
+
+ // Flag that tells whether we have any unfulfilled packet request.
+ bool packet_requested_;
+
+ // Message loop for the render thread, it's the message loop where this class
+ // is constructed.
+ MessageLoop* render_loop_;
+
+ // Event for releasing resources that live in render thread.
+ base::WaitableEvent resource_release_event_;
+
DISALLOW_COPY_AND_ASSIGN(AudioRendererImpl);
};
diff --git a/chrome/renderer/render_view.cc b/chrome/renderer/render_view.cc
index bf010f3..e098199 100644
--- a/chrome/renderer/render_view.cc
+++ b/chrome/renderer/render_view.cc
@@ -2925,7 +2925,7 @@ int32 RenderView::CreateAudioStream(AudioRendererImpl* audio_renderer,
AudioManager::Format format, int channels,
int sample_rate, int bits_per_sample,
size_t packet_size) {
- // TODO(hclam): make sure this method is called on render thread.
+ DCHECK(RenderThread::current()->message_loop() == MessageLoop::current());
// Loop through the map and make sure there's no renderer already in the map.
for (IDMap<AudioRendererImpl>::const_iterator iter = audio_renderers_.begin();
iter != audio_renderers_.end(); ++iter) {
@@ -2945,31 +2945,34 @@ int32 RenderView::CreateAudioStream(AudioRendererImpl* audio_renderer,
}
void RenderView::StartAudioStream(int stream_id) {
- // TODO(hclam): make sure this method is called on render thread.
+ DCHECK(RenderThread::current()->message_loop() == MessageLoop::current());
DCHECK(audio_renderers_.Lookup(stream_id) != NULL);
Send(new ViewHostMsg_StartAudioStream(routing_id_, stream_id));
}
void RenderView::CloseAudioStream(int stream_id) {
- // TODO(hclam): make sure this method is called on render thread.
+ DCHECK(RenderThread::current()->message_loop() == MessageLoop::current());
DCHECK(audio_renderers_.Lookup(stream_id) != NULL);
+ // Remove the entry from the map and send a close message to browser process,
+ // we won't be getting anything back from browser even if there's an error.
+ audio_renderers_.Remove(stream_id);
Send(new ViewHostMsg_CloseAudioStream(routing_id_, stream_id));
}
void RenderView::NotifyAudioPacketReady(int stream_id, size_t size) {
- // TODO(hclam): make sure this method is called on render thread.
+ DCHECK(RenderThread::current()->message_loop() == MessageLoop::current());
DCHECK(audio_renderers_.Lookup(stream_id) != NULL);
Send(new ViewHostMsg_NotifyAudioPacketReady(routing_id_, stream_id, size));
}
void RenderView::GetAudioVolume(int stream_id) {
- // TODO(hclam): make sure this method is called on render thread.
+ DCHECK(RenderThread::current()->message_loop() == MessageLoop::current());
DCHECK(audio_renderers_.Lookup(stream_id) != NULL);
Send(new ViewHostMsg_GetAudioVolume(routing_id_, stream_id));
}
void RenderView::SetAudioVolume(int stream_id, double left, double right) {
- // TODO(hclam): make sure this method is called on render thread.
+ DCHECK(RenderThread::current()->message_loop() == MessageLoop::current());
DCHECK(audio_renderers_.Lookup(stream_id) != NULL);
Send(new ViewHostMsg_SetAudioVolume(routing_id_, stream_id, left, right));
}
diff --git a/chrome/renderer/webmediaplayer_delegate_impl.cc b/chrome/renderer/webmediaplayer_delegate_impl.cc
index cb054e5..11c2391 100644
--- a/chrome/renderer/webmediaplayer_delegate_impl.cc
+++ b/chrome/renderer/webmediaplayer_delegate_impl.cc
@@ -14,9 +14,10 @@
class NotifyWebMediaPlayerTask : public CancelableTask {
public:
- NotifyWebMediaPlayerTask(webkit_glue::WebMediaPlayer* web_media_player_,
+ NotifyWebMediaPlayerTask(webkit_glue::WebMediaPlayer* web_media_player,
WebMediaPlayerMethod method)
- : method_(method) {}
+ : web_media_player_(web_media_player),
+ method_(method) {}
virtual void Run() {
if (web_media_player_) {
@@ -43,6 +44,8 @@ WebMediaPlayerDelegateImpl::WebMediaPlayerDelegateImpl(RenderView* view)
ready_state_(webkit_glue::WebMediaPlayer::DATA_UNAVAILABLE),
main_loop_(NULL),
filter_factory_(new media::FilterFactoryCollection()),
+ audio_renderer_(NULL),
+ video_renderer_(NULL),
web_media_player_(NULL),
view_(view),
tasks_(kLastTaskIndex) {
@@ -92,8 +95,6 @@ void WebMediaPlayerDelegateImpl::Load(const GURL& url) {
// Initialize the pipeline
pipeline_.Start(filter_factory_.get(), url.spec(),
NewCallback(this, &WebMediaPlayerDelegateImpl::DidInitializePipeline));
-
- // TODO(hclam): Calls to render_view_ to kick start a resource load.
}
void WebMediaPlayerDelegateImpl::CancelLoad() {
@@ -272,6 +273,8 @@ void WebMediaPlayerDelegateImpl::Paint(skia::PlatformCanvas *canvas,
}
void WebMediaPlayerDelegateImpl::WillDestroyCurrentMessageLoop() {
+ if (audio_renderer_)
+ audio_renderer_->ReleaseRendererResources();
// Stop the pipeline when the main thread is being destroyed so we won't be
// posting any more messages onto it. And we just let this obejct and
// associated WebMediaPlayer to leak.
@@ -297,6 +300,12 @@ void WebMediaPlayerDelegateImpl::DidInitializePipeline(bool successful) {
&webkit_glue::WebMediaPlayer::NotifyReadyStateChange);
}
+void WebMediaPlayerDelegateImpl::SetAudioRenderer(
+ AudioRendererImpl* audio_renderer) {
+ DCHECK(!audio_renderer_);
+ audio_renderer_ = audio_renderer;
+}
+
void WebMediaPlayerDelegateImpl::SetVideoRenderer(
VideoRendererImpl* video_renderer) {
DCHECK(!video_renderer_);
diff --git a/chrome/renderer/webmediaplayer_delegate_impl.h b/chrome/renderer/webmediaplayer_delegate_impl.h
index 3a75b2f..4f8df38 100644
--- a/chrome/renderer/webmediaplayer_delegate_impl.h
+++ b/chrome/renderer/webmediaplayer_delegate_impl.h
@@ -16,6 +16,9 @@
// exist in the main thread.
//
// Methods that are accessed in media threads:
+// SetAudioRenderer()
+// ^--- Called during the initialization of the pipeline, essentially from the
+// the pipeline thread.
// SetVideoRenderer()
// ^--- Called during the initialization of the pipeline, essentially from the
// the pipeline thread.
@@ -48,6 +51,7 @@
#include "media/base/pipeline_impl.h"
#include "webkit/glue/webmediaplayer_delegate.h"
+class AudioRendererImpl;
class RenderView;
class VideoRendererImpl;
@@ -134,7 +138,9 @@ class WebMediaPlayerDelegateImpl : public webkit_glue::WebMediaPlayerDelegate,
// reference of them.
void DidTask(CancelableTask* task);
- // Public methods to be called from renderers and data source.
+ // Public methods to be called from renderers and data source so that
+ // WebMediaPlayerDelegateImpl has references to them.
+ void SetAudioRenderer(AudioRendererImpl* audio_renderer);
void SetVideoRenderer(VideoRendererImpl* video_renderer);
// Called from VideoRenderer to fire a repaint task to main_loop_.
@@ -175,9 +181,13 @@ class WebMediaPlayerDelegateImpl : public webkit_glue::WebMediaPlayerDelegate,
// the same lifetime as the pipeline.
media::PipelineImpl pipeline_;
+ // Holds a pointer to the audio renderer so we can tell it to stop during
+ // render thread destruction.
+ scoped_refptr<AudioRendererImpl> audio_renderer_;
+
// We have the interface to VideoRenderer to delegate paint messages to it
// from WebKit.
- VideoRendererImpl* video_renderer_;
+ scoped_refptr<VideoRendererImpl> video_renderer_;
webkit_glue::WebMediaPlayer* web_media_player_;
RenderView* view_;
diff --git a/media/base/pipeline.h b/media/base/pipeline.h
index 58883cc..2f20d65 100644
--- a/media/base/pipeline.h
+++ b/media/base/pipeline.h
@@ -32,7 +32,7 @@ enum PipelineError {
PIPELINE_ERROR_OUT_OF_MEMORY,
PIPELINE_ERROR_COULD_NOT_RENDER,
PIPELINE_ERROR_READ,
-
+ PIPELINE_ERROR_AUDIO_HARDWARE,
// Demuxer related errors.
DEMUXER_ERROR_COULD_NOT_OPEN,
DEMUXER_ERROR_COULD_NOT_PARSE,
diff --git a/media/filters/audio_renderer_base.h b/media/filters/audio_renderer_base.h
index 7d8d571..fd371e6 100644
--- a/media/filters/audio_renderer_base.h
+++ b/media/filters/audio_renderer_base.h
@@ -36,7 +36,7 @@ class AudioRendererBase : public AudioRenderer {
virtual bool Initialize(AudioDecoder* decoder);
// AssignableBuffer<AudioRendererBase, BufferInterface> implementation.
- void OnAssignment(Buffer* buffer_in);
+ virtual void OnAssignment(Buffer* buffer_in);
protected:
// The default maximum size of the queue.