summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorcrogers@google.com <crogers@google.com@0039d316-1c4b-4281-b951-d872f2087c98>2011-01-15 00:18:39 +0000
committercrogers@google.com <crogers@google.com@0039d316-1c4b-4281-b951-d872f2087c98>2011-01-15 00:18:39 +0000
commitec8962ca7de01eb5685b32a4361dd7be8f7e6293 (patch)
treef26945b2e47fb967427b831ec3d4b1b962be3964
parent521b248f7ce021f99932c9150461744b3edf3067 (diff)
downloadchromium_src-ec8962ca7de01eb5685b32a4361dd7be8f7e6293.zip
chromium_src-ec8962ca7de01eb5685b32a4361dd7be8f7e6293.tar.gz
chromium_src-ec8962ca7de01eb5685b32a4361dd7be8f7e6293.tar.bz2
Implement renderer AudioDevice API for low-latency audio output
BUG=none TEST=none (I tested locally for Mac OS X, but we'll need further testing for Windows and Linux as we add real-time threads, etc.) Review URL: http://codereview.chromium.org/6002005 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@71521 0039d316-1c4b-4281-b951-d872f2087c98
-rw-r--r--chrome/browser/renderer_host/render_message_filter.cc6
-rw-r--r--chrome/browser/renderer_host/render_message_filter.h1
-rw-r--r--chrome/chrome_renderer.gypi4
-rw-r--r--chrome/common/render_messages_internal.h4
-rw-r--r--chrome/renderer/audio_device.cc187
-rw-r--r--chrome/renderer/audio_device.h85
-rw-r--r--chrome/renderer/renderer_webaudiodevice_impl.cc41
-rw-r--r--chrome/renderer/renderer_webaudiodevice_impl.h40
-rw-r--r--chrome/renderer/renderer_webkitclient_impl.cc14
-rw-r--r--chrome/renderer/renderer_webkitclient_impl.h3
-rw-r--r--media/audio/audio_util.cc19
-rw-r--r--media/audio/audio_util.h12
12 files changed, 416 insertions, 0 deletions
diff --git a/chrome/browser/renderer_host/render_message_filter.cc b/chrome/browser/renderer_host/render_message_filter.cc
index 3e23ae8..d00235c 100644
--- a/chrome/browser/renderer_host/render_message_filter.cc
+++ b/chrome/browser/renderer_host/render_message_filter.cc
@@ -284,6 +284,8 @@ bool RenderMessageFilter::OnMessageReceived(const IPC::Message& message,
OnGetRootWindowRect)
#endif
+ IPC_MESSAGE_HANDLER(ViewHostMsg_GenerateRoutingID, OnGenerateRoutingID)
+
IPC_MESSAGE_HANDLER(ViewHostMsg_CreateWindow, OnMsgCreateWindow)
IPC_MESSAGE_HANDLER(ViewHostMsg_CreateWidget, OnMsgCreateWidget)
IPC_MESSAGE_HANDLER(ViewHostMsg_CreateFullscreenWidget,
@@ -728,6 +730,10 @@ void RenderMessageFilter::OnLaunchNaCl(
host->Launch(this, channel_descriptor, reply_msg);
}
+void RenderMessageFilter::OnGenerateRoutingID(int* route_id) {
+ *route_id = render_widget_helper_->GetNextRoutingID();
+}
+
void RenderMessageFilter::OnDownloadUrl(const IPC::Message& message,
const GURL& url,
const GURL& referrer) {
diff --git a/chrome/browser/renderer_host/render_message_filter.h b/chrome/browser/renderer_host/render_message_filter.h
index f502625..6274a9f 100644
--- a/chrome/browser/renderer_host/render_message_filter.h
+++ b/chrome/browser/renderer_host/render_message_filter.h
@@ -164,6 +164,7 @@ class RenderMessageFilter : public BrowserMessageFilter,
void OnLaunchNaCl(const std::wstring& url,
int channel_descriptor,
IPC::Message* reply_msg);
+ void OnGenerateRoutingID(int* route_id);
void OnDownloadUrl(const IPC::Message& message,
const GURL& url,
const GURL& referrer);
diff --git a/chrome/chrome_renderer.gypi b/chrome/chrome_renderer.gypi
index f1b33f6..8cd44fb 100644
--- a/chrome/chrome_renderer.gypi
+++ b/chrome/chrome_renderer.gypi
@@ -90,6 +90,8 @@
'renderer/resources/renderer_extension_bindings.js',
'renderer/about_handler.cc',
'renderer/about_handler.h',
+ 'renderer/audio_device.cc',
+ 'renderer/audio_device.h',
'renderer/audio_message_filter.cc',
'renderer/audio_message_filter.h',
'renderer/blocked_plugin.cc',
@@ -184,6 +186,8 @@
'renderer/renderer_sandbox_support_linux.h',
'renderer/renderer_webapplicationcachehost_impl.cc',
'renderer/renderer_webapplicationcachehost_impl.h',
+ 'renderer/renderer_webaudiodevice_impl.cc',
+ 'renderer/renderer_webaudiodevice_impl.h',
'renderer/renderer_webcookiejar_impl.cc',
'renderer/renderer_webcookiejar_impl.h',
'renderer/renderer_webidbcursor_impl.cc',
diff --git a/chrome/common/render_messages_internal.h b/chrome/common/render_messages_internal.h
index f726e0b..f86a18f 100644
--- a/chrome/common/render_messages_internal.h
+++ b/chrome/common/render_messages_internal.h
@@ -94,6 +94,10 @@ IPC_MESSAGE_CONTROL1(ViewMsg_SetNextPageID,
IPC_MESSAGE_ROUTED1(ViewMsg_SetCSSColors,
std::vector<CSSColors::CSSColorMapping>)
+// Asks the browser for a unique routing ID.
+IPC_SYNC_MESSAGE_CONTROL0_1(ViewHostMsg_GenerateRoutingID,
+ int /* routing_id */)
+
// Tells the renderer to create a new view.
// This message is slightly different, the view it takes (via
// ViewMsg_New_Params) is the view to create, the message itself is sent as a
diff --git a/chrome/renderer/audio_device.cc b/chrome/renderer/audio_device.cc
new file mode 100644
index 0000000..04ffc64
--- /dev/null
+++ b/chrome/renderer/audio_device.cc
@@ -0,0 +1,187 @@
+// Copyright (c) 2010 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "chrome/renderer/audio_device.h"
+
+#include "base/singleton.h"
+#include "chrome/common/render_messages_params.h"
+#include "chrome/renderer/render_thread.h"
+#include "media/audio/audio_util.h"
+
+scoped_refptr<AudioMessageFilter> AudioDevice::filter_;
+
+namespace {
+
+// AudioMessageFilterCreator is intended to be used as a singleton so we can
+// get access to a shared AudioMessageFilter.
+// Example usage:
+// AudioMessageFilter* filter = AudioMessageFilterCreator::SharedFilter();
+
+class AudioMessageFilterCreator {
+ public:
+ AudioMessageFilterCreator() {
+ int routing_id;
+ RenderThread::current()->Send(
+ new ViewHostMsg_GenerateRoutingID(&routing_id));
+ filter_ = new AudioMessageFilter(routing_id);
+ RenderThread::current()->AddFilter(filter_);
+ }
+
+ static AudioMessageFilter* SharedFilter() {
+ return GetInstance()->filter_.get();
+ }
+
+ static AudioMessageFilterCreator* GetInstance() {
+ return Singleton<AudioMessageFilterCreator>::get();
+ }
+
+ private:
+ scoped_refptr<AudioMessageFilter> filter_;
+};
+
+}
+
+AudioDevice::AudioDevice(size_t buffer_size,
+ int channels,
+ double sample_rate,
+ RenderCallback* callback)
+ : buffer_size_(buffer_size),
+ channels_(channels),
+ sample_rate_(sample_rate),
+ callback_(callback),
+ stream_id_(0) {
+ audio_data_.reserve(channels);
+ for (int i = 0; i < channels; ++i) {
+ float* channel_data = new float[buffer_size];
+ audio_data_.push_back(channel_data);
+ }
+}
+
+AudioDevice::~AudioDevice() {
+ Stop();
+ for (int i = 0; i < channels_; ++i)
+ delete [] audio_data_[i];
+}
+
+bool AudioDevice::Start() {
+ // Make sure we don't call Start() more than once.
+ DCHECK_EQ(0, stream_id_);
+ if (stream_id_)
+ return false;
+
+ // Lazily create the message filter and share across AudioDevice instances.
+ filter_ = AudioMessageFilterCreator::SharedFilter();
+
+ stream_id_ = filter_->AddDelegate(this);
+
+ ViewHostMsg_Audio_CreateStream_Params params;
+ params.params.format = AudioParameters::AUDIO_PCM_LINEAR;
+ params.params.channels = channels_;
+ params.params.sample_rate = static_cast<int>(sample_rate_);
+ params.params.bits_per_sample = 16;
+ params.params.samples_per_packet = buffer_size_;
+
+ filter_->Send(
+ new ViewHostMsg_CreateAudioStream(0, stream_id_, params, true));
+
+ return true;
+}
+
+bool AudioDevice::Stop() {
+ if (stream_id_) {
+ OnDestroy();
+ return true;
+ }
+ return false;
+}
+
+void AudioDevice::OnDestroy() {
+ // Make sure we don't call destroy more than once.
+ DCHECK_NE(0, stream_id_);
+ if (!stream_id_)
+ return;
+
+ filter_->RemoveDelegate(stream_id_);
+ filter_->Send(new ViewHostMsg_CloseAudioStream(0, stream_id_));
+ stream_id_ = 0;
+ if (audio_thread_.get()) {
+ socket_->Close();
+ audio_thread_->Join();
+ }
+}
+
+void AudioDevice::OnRequestPacket(AudioBuffersState buffers_state) {
+ // This method does not apply to the low-latency system.
+ NOTIMPLEMENTED();
+}
+
+void AudioDevice::OnStateChanged(
+ const ViewMsg_AudioStreamState_Params& state) {
+ // Not needed in this simple implementation.
+ NOTIMPLEMENTED();
+}
+
+void AudioDevice::OnCreated(
+ base::SharedMemoryHandle handle, uint32 length) {
+ // Not needed in this simple implementation.
+ NOTIMPLEMENTED();
+}
+
+void AudioDevice::OnLowLatencyCreated(
+ base::SharedMemoryHandle handle,
+ base::SyncSocket::Handle socket_handle,
+ uint32 length) {
+
+#if defined(OS_WIN)
+ DCHECK(handle);
+ DCHECK(socket_handle);
+#else
+ DCHECK_GE(handle.fd, 0);
+ DCHECK_GE(socket_handle, 0);
+#endif
+ DCHECK(length);
+ DCHECK(!audio_thread_.get());
+
+ // TODO(crogers) : check that length is big enough for buffer_size_
+
+ shared_memory_.reset(new base::SharedMemory(handle, false));
+ shared_memory_->Map(length);
+
+ socket_.reset(new base::SyncSocket(socket_handle));
+ // Allow the client to pre-populate the buffer.
+ FireRenderCallback();
+
+ // TODO(crogers): we could optionally set the thread to high-priority
+ audio_thread_.reset(
+ new base::DelegateSimpleThread(this, "renderer_audio_thread"));
+ audio_thread_->Start();
+
+ filter_->Send(new ViewHostMsg_PlayAudioStream(0, stream_id_));
+}
+
+void AudioDevice::OnVolume(double volume) {
+ // Not needed in this simple implementation.
+ NOTIMPLEMENTED();
+}
+
+// Our audio thread runs here.
+void AudioDevice::Run() {
+ int pending_data;
+ while (sizeof(pending_data) == socket_->Receive(&pending_data,
+ sizeof(pending_data)) &&
+ pending_data >= 0) {
+ FireRenderCallback();
+ }
+}
+
+void AudioDevice::FireRenderCallback() {
+ if (callback_) {
+ // Ask client to render audio.
+ callback_->Render(audio_data_, buffer_size_);
+
+ // Interleave, scale, and clip to int16.
+ int16* output_buffer16 = static_cast<int16*>(shared_memory_data());
+ media::InterleaveFloatToInt16(audio_data_, output_buffer16, buffer_size_);
+ }
+}
diff --git a/chrome/renderer/audio_device.h b/chrome/renderer/audio_device.h
new file mode 100644
index 0000000..292de14
--- /dev/null
+++ b/chrome/renderer/audio_device.h
@@ -0,0 +1,85 @@
+// Copyright (c) 2010 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef CHROME_RENDERER_AUDIO_DEVICE_H_
+#define CHROME_RENDERER_AUDIO_DEVICE_H_
+#pragma once
+
+#include <vector>
+
+#include "base/basictypes.h"
+#include "base/scoped_ptr.h"
+#include "base/shared_memory.h"
+#include "base/threading/simple_thread.h"
+#include "chrome/common/render_messages.h"
+#include "chrome/renderer/audio_message_filter.h"
+
+// Each instance of AudioDevice corresponds to one host stream.
+// This class is not thread-safe, so its methods must be called from
+// the same thread.
+class AudioDevice : public AudioMessageFilter::Delegate,
+ public base::DelegateSimpleThread::Delegate {
+ public:
+ class RenderCallback {
+ public:
+ virtual void Render(const std::vector<float*>& audio_data,
+ size_t number_of_frames) = 0;
+ protected:
+ virtual ~RenderCallback() {}
+ };
+
+ // |buffer_size| is the number of sample-frames.
+ AudioDevice(size_t buffer_size,
+ int channels,
+ double sample_rate,
+ RenderCallback* callback);
+ virtual ~AudioDevice();
+
+ // Returns |true| on success.
+ bool Start();
+ bool Stop();
+
+ private:
+ // AudioMessageFilter::Delegate implementation.
+ virtual void OnRequestPacket(AudioBuffersState buffers_state);
+ virtual void OnStateChanged(const ViewMsg_AudioStreamState_Params& state);
+ virtual void OnCreated(base::SharedMemoryHandle handle, uint32 length);
+ virtual void OnLowLatencyCreated(base::SharedMemoryHandle handle,
+ base::SyncSocket::Handle socket_handle,
+ uint32 length);
+ virtual void OnVolume(double volume);
+ virtual void OnDestroy();
+
+ // DelegateSimpleThread::Delegate implementation.
+ virtual void Run();
+
+ // Format
+ size_t buffer_size_; // in sample-frames
+ int channels_;
+ double sample_rate_;
+
+ // Calls the client's callback for rendering audio.
+ void FireRenderCallback();
+ RenderCallback* callback_;
+
+ // The client callback renders audio into here.
+ std::vector<float*> audio_data_;
+
+ // Callbacks for rendering audio occur on this thread.
+ scoped_ptr<base::DelegateSimpleThread> audio_thread_;
+
+ // IPC message stuff.
+ base::SharedMemory* shared_memory() { return shared_memory_.get(); }
+ base::SyncSocket* socket() { return socket_.get(); }
+ void* shared_memory_data() { return shared_memory()->memory(); }
+
+ static scoped_refptr<AudioMessageFilter> filter_;
+ int32 stream_id_;
+ scoped_ptr<base::SharedMemory> shared_memory_;
+ scoped_ptr<base::SyncSocket> socket_;
+
+ DISALLOW_COPY_AND_ASSIGN(AudioDevice);
+};
+
+#endif // CHROME_RENDERER_AUDIO_DEVICE_H_
diff --git a/chrome/renderer/renderer_webaudiodevice_impl.cc b/chrome/renderer/renderer_webaudiodevice_impl.cc
new file mode 100644
index 0000000..1030da2
--- /dev/null
+++ b/chrome/renderer/renderer_webaudiodevice_impl.cc
@@ -0,0 +1,41 @@
+// Copyright (c) 2010 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "chrome/renderer/renderer_webaudiodevice_impl.h"
+
+using WebKit::WebAudioDevice;
+using WebKit::WebVector;
+
+RendererWebAudioDeviceImpl::RendererWebAudioDeviceImpl(size_t buffer_size,
+ int channels, double sample_rate, WebAudioDevice::RenderCallback* callback)
+ : client_callback_(callback) {
+ audio_device_.reset(
+ new AudioDevice(buffer_size, channels, sample_rate, this));
+}
+
+RendererWebAudioDeviceImpl::~RendererWebAudioDeviceImpl() {
+ stop();
+}
+
+void RendererWebAudioDeviceImpl::start() {
+ audio_device_->Start();
+}
+
+void RendererWebAudioDeviceImpl::stop() {
+ audio_device_->Stop();
+}
+
+void RendererWebAudioDeviceImpl::Render(const std::vector<float*>& audio_data,
+ size_t number_of_frames) {
+ // Make the client callback to get rendered audio.
+ DCHECK(client_callback_);
+ if (client_callback_) {
+ // Wrap the pointers using WebVector.
+ WebVector<float*> web_audio_data(audio_data.size());
+ for (size_t i = 0; i < audio_data.size(); ++i)
+ web_audio_data[i] = audio_data[i];
+
+ client_callback_->render(web_audio_data, number_of_frames);
+ }
+}
diff --git a/chrome/renderer/renderer_webaudiodevice_impl.h b/chrome/renderer/renderer_webaudiodevice_impl.h
new file mode 100644
index 0000000..6aed7b8
--- /dev/null
+++ b/chrome/renderer/renderer_webaudiodevice_impl.h
@@ -0,0 +1,40 @@
+// Copyright (c) 2010 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef CHROME_RENDERER_RENDERER_WEBAUDIODEVICE_IMPL_H_
+#define CHROME_RENDERER_RENDERER_WEBAUDIODEVICE_IMPL_H_
+
+#include <vector>
+
+#include "base/scoped_ptr.h"
+#include "chrome/renderer/audio_device.h"
+#include "third_party/WebKit/WebKit/chromium/public/WebAudioDevice.h"
+#include "third_party/WebKit/WebKit/chromium/public/WebVector.h"
+
+class RendererWebAudioDeviceImpl : public WebKit::WebAudioDevice,
+ public AudioDevice::RenderCallback {
+ public:
+ RendererWebAudioDeviceImpl(size_t buffer_size,
+ int channels,
+ double sample_rate,
+ WebKit::WebAudioDevice::RenderCallback* callback);
+ virtual ~RendererWebAudioDeviceImpl();
+
+ // WebKit::WebAudioDevice implementation.
+ virtual void start();
+ virtual void stop();
+
+ // AudioDevice::RenderCallback implementation.
+ void Render(const std::vector<float*>& audio_data, size_t number_of_frames);
+
+ private:
+ scoped_ptr<AudioDevice> audio_device_;
+
+ // Weak reference to the callback into WebKit code.
+ WebKit::WebAudioDevice::RenderCallback* client_callback_;
+
+ DISALLOW_COPY_AND_ASSIGN(RendererWebAudioDeviceImpl);
+};
+
+#endif // CHROME_RENDERER_RENDERER_WEBAUDIODEVICE_IMPL_H_
diff --git a/chrome/renderer/renderer_webkitclient_impl.cc b/chrome/renderer/renderer_webkitclient_impl.cc
index c68625b..860c6ee 100644
--- a/chrome/renderer/renderer_webkitclient_impl.cc
+++ b/chrome/renderer/renderer_webkitclient_impl.cc
@@ -21,6 +21,7 @@
#include "chrome/renderer/net/renderer_net_predictor.h"
#include "chrome/renderer/render_thread.h"
#include "chrome/renderer/render_view.h"
+#include "chrome/renderer/renderer_webaudiodevice_impl.h"
#include "chrome/renderer/renderer_webidbfactory_impl.h"
#include "chrome/renderer/renderer_webstoragenamespace_impl.h"
#include "chrome/renderer/visitedlink_slave.h"
@@ -67,6 +68,7 @@
#include "base/file_descriptor_posix.h"
#endif
+using WebKit::WebAudioDevice;
using WebKit::WebBlobRegistry;
using WebKit::WebFileSystem;
using WebKit::WebFrame;
@@ -519,6 +521,18 @@ RendererWebKitClientImpl::createGraphicsContext3D() {
}
}
+WebAudioDevice*
+RendererWebKitClientImpl::createAudioDevice(
+ size_t buffer_size,
+ unsigned channels,
+ double sample_rate,
+ WebAudioDevice::RenderCallback* callback) {
+ return new RendererWebAudioDeviceImpl(buffer_size,
+ channels,
+ sample_rate,
+ callback);
+}
+
//------------------------------------------------------------------------------
WebKit::WebString RendererWebKitClientImpl::signedPublicKeyAndChallengeString(
diff --git a/chrome/renderer/renderer_webkitclient_impl.h b/chrome/renderer/renderer_webkitclient_impl.h
index 9448fe9..60010fc 100644
--- a/chrome/renderer/renderer_webkitclient_impl.h
+++ b/chrome/renderer/renderer_webkitclient_impl.h
@@ -70,6 +70,9 @@ class RendererWebKitClientImpl : public webkit_glue::WebKitClientImpl {
virtual WebKit::WebSharedWorkerRepository* sharedWorkerRepository();
virtual WebKit::WebGraphicsContext3D* createGraphicsContext3D();
+ virtual WebKit::WebAudioDevice* createAudioDevice(
+ size_t buffer_size, unsigned channels, double sample_rate,
+ WebKit::WebAudioDevice::RenderCallback* callback);
virtual WebKit::WebBlobRegistry* blobRegistry();
diff --git a/media/audio/audio_util.cc b/media/audio/audio_util.cc
index 6d80c87..ed84bc1 100644
--- a/media/audio/audio_util.cc
+++ b/media/audio/audio_util.cc
@@ -206,4 +206,23 @@ bool DeinterleaveAudioChannel(void* source,
return false;
}
+void InterleaveFloatToInt16(const std::vector<float*>& source,
+ int16* destination,
+ size_t number_of_frames) {
+ const float kScale = 32768.0f;
+ int channels = source.size();
+ for (int i = 0; i < channels; ++i) {
+ float* channel_data = source[i];
+ for (size_t j = 0; j < number_of_frames; ++j) {
+ float sample = kScale * channel_data[j];
+ if (sample < -32768.0)
+ sample = -32768.0;
+ else if (sample > 32767.0)
+ sample = 32767.0;
+
+ destination[j * channels + i] = static_cast<int16>(sample);
+ }
+ }
+}
+
} // namespace media
diff --git a/media/audio/audio_util.h b/media/audio/audio_util.h
index 669b4fa..a6ba75f 100644
--- a/media/audio/audio_util.h
+++ b/media/audio/audio_util.h
@@ -5,6 +5,8 @@
#ifndef MEDIA_AUDIO_AUDIO_UTIL_H_
#define MEDIA_AUDIO_AUDIO_UTIL_H_
+#include <vector>
+
#include "base/basictypes.h"
namespace media {
@@ -62,6 +64,16 @@ bool DeinterleaveAudioChannel(void* source,
int bytes_per_sample,
size_t number_of_frames);
+// InterleaveFloatToInt16 scales, clips, and interleaves the planar
+// floating-point audio contained in |source| to the int16 |destination|.
+// The floating-point data is in a canonical range of -1.0 -> +1.0.
+// The size of the |source| vector determines the number of channels.
+// The |destination| buffer is assumed to be large enough to hold the
+// result. Thus it must be at least size: number_of_frames * source.size()
+void InterleaveFloatToInt16(const std::vector<float*>& source,
+ int16* destination,
+ size_t number_of_frames);
+
} // namespace media
#endif // MEDIA_AUDIO_AUDIO_UTIL_H_