diff options
author | zea@chromium.org <zea@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98> | 2011-07-15 22:09:28 +0000 |
---|---|---|
committer | zea@chromium.org <zea@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98> | 2011-07-15 22:09:28 +0000 |
commit | d1bbf6fde2b8709c7760a78c667542cfcb21fe2c (patch) | |
tree | 85aa20e75c2665a555060eac9b6b022a4cbba985 /content/renderer | |
parent | 620db17658bac3bd4a608fff6a0bcb2578baf0eb (diff) | |
download | chromium_src-d1bbf6fde2b8709c7760a78c667542cfcb21fe2c.zip chromium_src-d1bbf6fde2b8709c7760a78c667542cfcb21fe2c.tar.gz chromium_src-d1bbf6fde2b8709c7760a78c667542cfcb21fe2c.tar.bz2 |
[Reverted due to check_deps failure]
Revert 92749 - Fix problem when 'ended' event was fired before stream really ended.
That caused impression that rewind does not work. With that
change small JS program
var a = new Audio("file:///home/enal/temp/click2/click2.wav");
var num_played = 0;
a.addEventListener('canplaythrough', function() {
a.play();
});
a.addEventListener('ended', function() {
num_played ++;
if (num_played < 10) {
a.currentTime = 0;
a.play();
}
});
works correctly, you hear 10 clicks one after another, and it takes
~1.5 seconds to play all 10 sounds (one click is 146ms). Current
Chrome plays only beginnings of the first 9 clicks and then entire
10th click -- 'ended' event fires too early, so rewind stops audio
playback for all clicks but last one.
With that fix you can easily create pool of audio objects -- on 'ended'
event just add audio object to the pool.
Fix consists of 3 parts:
1) For low-latency code path pass entire "audio state" object to the renderer
process. That allows renderer take into account number of pending bytes
in the buffer.
2) When using low-latency code path renderer not only fills the buffer with
data, but also writes length of data into first word of the buffer. That
allows host process to pass correct byte counts to renderer.
3) Renderer now keeps track of the earliest time playback can end based on the
number of rendered bytes, and will not call 'ended' callback till that time.
BUG=http://code.google.com/p/chromium/issues/detail?id=78992
http://codereview.chromium.org/7328030
Review URL: http://codereview.chromium.org/7328030
TBR=enal@chromium.org
Review URL: http://codereview.chromium.org/7395014
git-svn-id: svn://svn.chromium.org/chrome/trunk/src@92753 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'content/renderer')
-rw-r--r-- | content/renderer/media/audio_device.cc | 28 | ||||
-rw-r--r-- | content/renderer/media/audio_input_device.cc | 28 | ||||
-rw-r--r-- | content/renderer/media/audio_renderer_impl.cc | 62 | ||||
-rw-r--r-- | content/renderer/media/audio_renderer_impl.h | 34 | ||||
-rw-r--r-- | content/renderer/media/audio_renderer_impl_unittest.cc | 12 |
5 files changed, 34 insertions, 130 deletions
diff --git a/content/renderer/media/audio_device.cc b/content/renderer/media/audio_device.cc index d812c83..e00b370 100644 --- a/content/renderer/media/audio_device.cc +++ b/content/renderer/media/audio_device.cc @@ -9,7 +9,6 @@ #include "content/common/media/audio_messages.h" #include "content/common/view_messages.h" #include "content/renderer/render_thread.h" -#include "media/audio/audio_buffers_state.h" #include "media/audio/audio_util.h" AudioDevice::AudioDevice(size_t buffer_size, @@ -162,8 +161,6 @@ void AudioDevice::OnLowLatencyCreated( shared_memory_.reset(new base::SharedMemory(handle, false)); shared_memory_->Map(length); - DCHECK_GE(length, buffer_size_ * sizeof(int16) + sizeof(uint32)); - socket_.reset(new base::SyncSocket(socket_handle)); // Allow the client to pre-populate the buffer. FireRenderCallback(); @@ -189,17 +186,16 @@ void AudioDevice::Send(IPC::Message* message) { void AudioDevice::Run() { audio_thread_->SetThreadPriority(base::kThreadPriority_RealtimeAudio); - AudioBuffersState buffer_state; + int pending_data; const int samples_per_ms = static_cast<int>(sample_rate_) / 1000; const int bytes_per_ms = channels_ * (bits_per_sample_ / 8) * samples_per_ms; - while (buffer_state.Receive(socket_.get()) && - (buffer_state.total_bytes() >= 0)) { - { - // Convert the number of pending bytes in the render buffer - // into milliseconds. - audio_delay_milliseconds_ = buffer_state.total_bytes() / bytes_per_ms; - } + while (sizeof(pending_data) == socket_->Receive(&pending_data, + sizeof(pending_data)) && + pending_data >= 0) { + // Convert the number of pending bytes in the render buffer + // into milliseconds. + audio_delay_milliseconds_ = pending_data / bytes_per_ms; FireRenderCallback(); } @@ -211,13 +207,7 @@ void AudioDevice::FireRenderCallback() { callback_->Render(audio_data_, buffer_size_, audio_delay_milliseconds_); // Interleave, scale, and clip to int16. - media::InterleaveFloatToInt16( - audio_data_, - static_cast<int16*>(media::GetDataPointer(shared_memory())), - buffer_size_); - - // Consumer should know how much data was written. - media::SetActualDataSizeInBytes(shared_memory(), - buffer_size_ * sizeof(int16)); + int16* output_buffer16 = static_cast<int16*>(shared_memory_data()); + media::InterleaveFloatToInt16(audio_data_, output_buffer16, buffer_size_); } } diff --git a/content/renderer/media/audio_input_device.cc b/content/renderer/media/audio_input_device.cc index de1bfb0..5bfecea 100644 --- a/content/renderer/media/audio_input_device.cc +++ b/content/renderer/media/audio_input_device.cc @@ -9,7 +9,6 @@ #include "content/common/media/audio_messages.h" #include "content/common/view_messages.h" #include "content/renderer/render_thread.h" -#include "media/audio/audio_buffers_state.h" #include "media/audio/audio_util.h" AudioInputDevice::AudioInputDevice(size_t buffer_size, @@ -55,8 +54,8 @@ bool AudioInputDevice::Start() { params.samples_per_packet = buffer_size_; ChildProcess::current()->io_message_loop()->PostTask( - FROM_HERE, - NewRunnableMethod(this, &AudioInputDevice::InitializeOnIOThread, params)); + FROM_HERE, + NewRunnableMethod(this, &AudioInputDevice::InitializeOnIOThread, params)); return true; } @@ -129,8 +128,6 @@ void AudioInputDevice::OnLowLatencyCreated( shared_memory_.reset(new base::SharedMemory(handle, false)); shared_memory_->Map(length); - DCHECK_GE(length, buffer_size_ + sizeof(uint32)); - socket_.reset(new base::SyncSocket(socket_handle)); audio_thread_.reset( @@ -155,18 +152,19 @@ void AudioInputDevice::Send(IPC::Message* message) { void AudioInputDevice::Run() { audio_thread_->SetThreadPriority(base::kThreadPriority_RealtimeAudio); - AudioBuffersState buffer_state; + int pending_data; const int samples_per_ms = static_cast<int>(sample_rate_) / 1000; const int bytes_per_ms = channels_ * (bits_per_sample_ / 8) * samples_per_ms; - while (buffer_state.Receive(socket_.get()) && - (buffer_state.total_bytes() >= 0)) { - // TODO(henrika): investigate the provided |buffer_state.total_bytes()| - // value and ensure that it is actually an accurate delay estimation. + while (sizeof(pending_data) == socket_->Receive(&pending_data, + sizeof(pending_data)) && + pending_data >= 0) { + // TODO(henrika): investigate the provided |pending_data| value + // and ensure that it is actually an accurate delay estimation. // Convert the number of pending bytes in the capture buffer // into milliseconds. - audio_delay_milliseconds_ = buffer_state.total_bytes() / bytes_per_ms; + audio_delay_milliseconds_ = pending_data / bytes_per_ms; FireCaptureCallback(); } @@ -176,14 +174,10 @@ void AudioInputDevice::FireCaptureCallback() { if (!callback_) return; - uint32 actual_buffer_size = media::GetActualDataSizeInBytes(shared_memory()) / - sizeof(int16); - DCHECK_LE(actual_buffer_size, buffer_size_); - const size_t number_of_frames = actual_buffer_size; + const size_t number_of_frames = buffer_size_; // Read 16-bit samples from shared memory (browser writes to it). - int16* input_audio = static_cast<int16*>( - media::GetDataPointer(shared_memory())); + int16* input_audio = static_cast<int16*>(shared_memory_data()); const int bytes_per_sample = sizeof(input_audio[0]); // Deinterleave each channel and convert to 32-bit floating-point diff --git a/content/renderer/media/audio_renderer_impl.cc b/content/renderer/media/audio_renderer_impl.cc index b3ad114..480da56 100644 --- a/content/renderer/media/audio_renderer_impl.cc +++ b/content/renderer/media/audio_renderer_impl.cc @@ -6,17 +6,13 @@ #include <math.h> -#include <algorithm> - -#include "base/command_line.h" #include "content/common/child_process.h" +#include "base/command_line.h" #include "content/common/content_switches.h" #include "content/common/media/audio_messages.h" #include "content/renderer/render_thread.h" #include "content/renderer/render_view.h" -#include "media/audio/audio_buffers_state.h" #include "media/audio/audio_output_controller.h" -#include "media/audio/audio_util.h" #include "media/base/filter_host.h" // Static variable that says what code path we are using -- low or high @@ -66,23 +62,6 @@ base::TimeDelta AudioRendererImpl::ConvertToDuration(int bytes) { return base::TimeDelta(); } -void AudioRendererImpl::UpdateEarliestEndTime(int bytes_filled, - base::TimeDelta request_delay, - base::Time time_now) { - if (bytes_filled != 0) { - base::TimeDelta predicted_play_time = ConvertToDuration(bytes_filled); - float playback_rate = GetPlaybackRate(); - if (playback_rate != 1.0f) { - predicted_play_time = base::TimeDelta::FromMicroseconds( - static_cast<int64>(ceil(predicted_play_time.InMicroseconds() * - playback_rate))); - } - earliest_end_time_ = - std::max(earliest_end_time_, - time_now + request_delay + predicted_play_time); - } -} - bool AudioRendererImpl::OnInitialize(const media::AudioDecoderConfig& config) { AudioParameters params(config); params.format = AudioParameters::AUDIO_PCM_LINEAR; @@ -341,7 +320,6 @@ void AudioRendererImpl::CreateStreamTask(const AudioParameters& audio_params) { void AudioRendererImpl::PlayTask() { DCHECK(MessageLoop::current() == ChildProcess::current()->io_message_loop()); - earliest_end_time_ = base::Time::Now(); Send(new AudioHostMsg_PlayStream(stream_id_)); } @@ -354,7 +332,6 @@ void AudioRendererImpl::PauseTask() { void AudioRendererImpl::SeekTask() { DCHECK(MessageLoop::current() == ChildProcess::current()->io_message_loop()); - earliest_end_time_ = base::Time::Now(); // We have to pause the audio stream before we can flush. Send(new AudioHostMsg_PauseStream(stream_id_)); Send(new AudioHostMsg_FlushStream(stream_id_)); @@ -416,18 +393,10 @@ void AudioRendererImpl::NotifyPacketReadyTask() { GetPlaybackRate()))); } - bool buffer_empty = (request_buffers_state_.pending_bytes == 0) && - (current_time >= earliest_end_time_); - - // For high latency mode we don't write length into shared memory, - // it is explicit part of AudioHostMsg_NotifyPacketReady() message, - // so no need to reserve first word of buffer for length. uint32 filled = FillBuffer(static_cast<uint8*>(shared_memory_->memory()), shared_memory_size_, request_delay, - buffer_empty); - UpdateEarliestEndTime(filled, request_delay, current_time); + request_buffers_state_.pending_bytes == 0); pending_request_ = false; - // Then tell browser process we are done filling into the buffer. Send(new AudioHostMsg_NotifyPacketReady(stream_id_, filled)); } @@ -450,11 +419,12 @@ void AudioRendererImpl::WillDestroyCurrentMessageLoop() { void AudioRendererImpl::Run() { audio_thread_->SetThreadPriority(base::kThreadPriority_RealtimeAudio); - AudioBuffersState buffer_state; - while (buffer_state.Receive(socket_.get())) { - if (buffer_state.total_bytes() == media::AudioOutputController::kPauseMark) + int bytes; + while (sizeof(bytes) == socket_->Receive(&bytes, sizeof(bytes))) { + LOG(ERROR) << "+++ bytes: " << bytes; + if (bytes == media::AudioOutputController::kPauseMark) continue; - else if (buffer_state.total_bytes() < 0) + else if (bytes < 0) break; base::AutoLock auto_lock(lock_); if (stopped_) @@ -463,25 +433,17 @@ void AudioRendererImpl::Run() { if (playback_rate <= 0.0f) continue; DCHECK(shared_memory_.get()); - base::TimeDelta request_delay = - ConvertToDuration(buffer_state.total_bytes()); - + base::TimeDelta request_delay = ConvertToDuration(bytes); // We need to adjust the delay according to playback rate. if (playback_rate != 1.0f) { request_delay = base::TimeDelta::FromMicroseconds( static_cast<int64>(ceil(request_delay.InMicroseconds() * playback_rate))); } - base::Time time_now = base::Time::Now(); - bool buffer_empty = (buffer_state.pending_bytes == 0) && - (time_now >= earliest_end_time_); - void *data_buffer = media::GetDataPointer(shared_memory_.get()); - uint32 size = FillBuffer(static_cast<uint8*>(data_buffer), - media::GetMaxDataSizeInBytes(shared_memory_size_), - request_delay, - buffer_empty); - media::SetActualDataSizeInBytes(shared_memory_.get(), size); - UpdateEarliestEndTime(size, request_delay, time_now); + FillBuffer(static_cast<uint8*>(shared_memory_->memory()), + shared_memory_size_, + request_delay, + true /* buffers empty */); } } diff --git a/content/renderer/media/audio_renderer_impl.h b/content/renderer/media/audio_renderer_impl.h index d9c73d0..5ba9bc5 100644 --- a/content/renderer/media/audio_renderer_impl.h +++ b/content/renderer/media/audio_renderer_impl.h @@ -58,7 +58,7 @@ class AudioRendererImpl : public media::AudioRendererBase, public MessageLoop::DestructionObserver { public: // Methods called on Render thread ------------------------------------------ - AudioRendererImpl(); + explicit AudioRendererImpl(); virtual ~AudioRendererImpl(); // Methods called on IO thread ---------------------------------------------- @@ -105,7 +105,6 @@ class AudioRendererImpl : public media::AudioRendererBase, FRIEND_TEST_ALL_PREFIXES(AudioRendererImplTest, Stop); FRIEND_TEST_ALL_PREFIXES(AudioRendererImplTest, DestroyedMessageLoop_ConsumeAudioSamples); - FRIEND_TEST_ALL_PREFIXES(AudioRendererImplTest, UpdateEarliestEndTime); // Helper methods. // Convert number of bytes to duration of time using information about the // number of channels, sample rate and sample bits. @@ -139,29 +138,16 @@ class AudioRendererImpl : public media::AudioRendererBase, virtual void CreateAudioThread(); // Accessors used by tests. - LatencyType latency_type() const { + LatencyType latency_type() { return latency_type_; } - base::Time earliest_end_time() const { - return earliest_end_time_; - } - - uint32 bytes_per_second() const { - return bytes_per_second_; - } - // Should be called before any class instance is created. static void set_latency_type(LatencyType latency_type); // Helper method for IPC send calls. void Send(IPC::Message* message); - // Estimate earliest time when current buffer can stop playing. - void UpdateEarliestEndTime(int bytes_filled, - base::TimeDelta request_delay, - base::Time time_now); - // Used to calculate audio delay given bytes. uint32 bytes_per_second_; @@ -202,22 +188,6 @@ class AudioRendererImpl : public media::AudioRendererBase, // Remaining bytes for prerolling to complete. uint32 preroll_bytes_; - // We're supposed to know amount of audio data OS or hardware buffered, but - // that is not always so -- on my Linux box - // AudioBuffersState::hardware_delay_bytes never reaches 0. - // - // As a result we cannot use it to find when stream ends. If we just ignore - // buffered data we will notify host that stream ended before it is actually - // did so, I've seen it done ~140ms too early when playing ~150ms file. - // - // Instead of trying to invent OS-specific solution for each and every OS we - // are supporting, use simple workaround: every time we fill the buffer we - // remember when it should stop playing, and do not assume that buffer is - // empty till that time. Workaround is not bulletproof, as we don't exactly - // know when that particular data would start playing, but it is much better - // than nothing. - base::Time earliest_end_time_; - DISALLOW_COPY_AND_ASSIGN(AudioRendererImpl); }; diff --git a/content/renderer/media/audio_renderer_impl_unittest.cc b/content/renderer/media/audio_renderer_impl_unittest.cc index 1a6b070..478eb63 100644 --- a/content/renderer/media/audio_renderer_impl_unittest.cc +++ b/content/renderer/media/audio_renderer_impl_unittest.cc @@ -325,15 +325,3 @@ TEST_F(AudioRendererImplTest, DestroyedMessageLoop_ConsumeAudioSamples) { renderer_->ConsumeAudioSamples(buffer); renderer_->Stop(media::NewExpectedCallback()); } - -TEST_F(AudioRendererImplTest, UpdateEarliestEndTime) { - renderer_->SetPlaybackRate(1.0f); - base::Time time_now = base::Time(); // Null time by default. - renderer_->UpdateEarliestEndTime(renderer_->bytes_per_second(), - base::TimeDelta::FromMilliseconds(100), - time_now); - int time_delta = (renderer_->earliest_end_time() - time_now).InMilliseconds(); - EXPECT_EQ(1100, time_delta); - renderer_->Stop(media::NewExpectedCallback()); -} - |