1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
|
// Copyright (c) 2009 The Chromium Authors. All rights reserved. Use of this
// source code is governed by a BSD-style license that can be found in the
// LICENSE file.
#include "chrome/renderer/media/audio_renderer_impl.h"
#include "chrome/renderer/render_view.h"
#include "chrome/renderer/render_thread.h"
#include "chrome/renderer/webmediaplayer_delegate_impl.h"
#include "media/base/filter_host.h"
// We'll try to fill 4096 samples per buffer, which is roughly ~92ms of audio
// data for a 44.1kHz audio source.
static const size_t kSamplesPerBuffer = 4096;
AudioRendererImpl::AudioRendererImpl(WebMediaPlayerDelegateImpl* delegate)
: AudioRendererBase(kDefaultMaxQueueSize),
delegate_(delegate),
stream_id_(0),
shared_memory_(NULL),
shared_memory_size_(0),
packet_requested_(false),
render_loop_(RenderThread::current()->message_loop()),
resource_release_event_(true, false) {
// TODO(hclam): do we need to move this method call to render thread?
delegate_->SetAudioRenderer(this);
}
AudioRendererImpl::~AudioRendererImpl() {
}
bool AudioRendererImpl::IsMediaFormatSupported(
const media::MediaFormat& media_format) {
int channels;
int sample_rate;
int sample_bits;
return ParseMediaFormat(media_format, &channels, &sample_rate, &sample_bits);
}
bool AudioRendererImpl::OnInitialize(const media::MediaFormat& media_format) {
// Parse integer values in MediaFormat.
int channels;
int sample_rate;
int sample_bits;
if (!ParseMediaFormat(media_format, &channels, &sample_rate, &sample_bits)) {
return false;
}
// Create the audio output stream in browser process.
size_t packet_size = kSamplesPerBuffer * channels * sample_bits / 8;
render_loop_->PostTask(FROM_HERE,
NewRunnableMethod(this, &AudioRendererImpl::OnCreateAudioStream,
AudioManager::AUDIO_PCM_LINEAR, channels, sample_rate, sample_bits,
packet_size));
return true;
}
void AudioRendererImpl::OnStop() {
if (!resource_release_event_.IsSignaled()) {
render_loop_->PostTask(FROM_HERE,
NewRunnableMethod(this, &AudioRendererImpl::ReleaseRendererResources));
resource_release_event_.Wait();
}
}
void AudioRendererImpl::OnAssignment(media::Buffer* buffer_in) {
// Use the base class to queue the buffer.
AudioRendererBase::OnAssignment(buffer_in);
// Post a task to render thread to notify a packet reception.
render_loop_->PostTask(FROM_HERE,
NewRunnableMethod(this, &AudioRendererImpl::OnNotifyAudioPacketReady));
}
void AudioRendererImpl::SetPlaybackRate(float rate) {
// TODO(hclam): handle playback rates not equal to 1.0.
if (rate == 1.0f) {
// TODO(hclam): what should I do here? OnCreated has fired StartAudioStream
// in the browser process, it seems there's nothing to do here.
} else {
NOTIMPLEMENTED();
}
}
void AudioRendererImpl::SetVolume(float volume) {
// TODO(hclam): change this to multichannel if possible.
render_loop_->PostTask(FROM_HERE,
NewRunnableMethod(
this, &AudioRendererImpl::OnSetAudioVolume, volume, volume));
}
void AudioRendererImpl::OnCreated(base::SharedMemoryHandle handle,
size_t length) {
shared_memory_.reset(new base::SharedMemory(handle, false));
shared_memory_size_ = length;
// TODO(hclam): is there any better place to do this?
OnStartAudioStream();
}
void AudioRendererImpl::OnRequestPacket() {
packet_requested_ = true;
}
void AudioRendererImpl::OnStateChanged(AudioOutputStream::State state,
int info) {
switch (state) {
case AudioOutputStream::STATE_ERROR:
host_->Error(media::PIPELINE_ERROR_AUDIO_HARDWARE);
break;
// TODO(hclam): handle these events.
case AudioOutputStream::STATE_STARTED:
case AudioOutputStream::STATE_PAUSED:
break;
default:
NOTREACHED();
break;
}
}
void AudioRendererImpl::OnVolume(double left, double right) {
// TODO(hclam): decide whether we need to report the current volume to
// pipeline.
}
void AudioRendererImpl::ReleaseRendererResources() {
OnCloseAudioStream();
resource_release_event_.Signal();
}
void AudioRendererImpl::OnCreateAudioStream(
AudioManager::Format format, int channels, int sample_rate,
int bits_per_sample, size_t packet_size) {
stream_id_ = delegate_->view()->CreateAudioStream(
this, format, channels, sample_rate, bits_per_sample, packet_size);
}
void AudioRendererImpl::OnStartAudioStream() {
delegate_->view()->StartAudioStream(stream_id_);
}
void AudioRendererImpl::OnCloseAudioStream() {
// Unregister ourself from RenderView, we will not be called anymore.
delegate_->view()->CloseAudioStream(stream_id_);
}
void AudioRendererImpl::OnSetAudioVolume(double left, double right) {
delegate_->view()->SetAudioVolume(stream_id_, left, right);
}
void AudioRendererImpl::OnNotifyAudioPacketReady() {
if (packet_requested_) {
DCHECK(shared_memory_.get());
// Fill into the shared memory.
size_t filled = FillBuffer(static_cast<uint8*>(shared_memory_->memory()),
shared_memory_size_);
packet_requested_ = false;
// Then tell browser process we are done filling into the buffer.
delegate_->view()->NotifyAudioPacketReady(stream_id_, filled);
}
}
|