summaryrefslogtreecommitdiffstats
path: root/media/audio/win/audio_low_latency_output_win.cc
diff options
context:
space:
mode:
Diffstat (limited to 'media/audio/win/audio_low_latency_output_win.cc')
-rw-r--r--media/audio/win/audio_low_latency_output_win.cc107
1 files changed, 96 insertions, 11 deletions
diff --git a/media/audio/win/audio_low_latency_output_win.cc b/media/audio/win/audio_low_latency_output_win.cc
index f7b31a3..a9d637c 100644
--- a/media/audio/win/audio_low_latency_output_win.cc
+++ b/media/audio/win/audio_low_latency_output_win.cc
@@ -249,6 +249,11 @@ void WASAPIAudioOutputStream::Start(AudioSourceCallback* callback) {
}
num_written_frames_ = endpoint_buffer_size_frames_;
+ if (!MarshalComPointers()) {
+ callback->OnError(this);
+ return;
+ }
+
// Create and start the thread that will drive the rendering by waiting for
// render events.
render_thread_.reset(
@@ -333,7 +338,7 @@ void WASAPIAudioOutputStream::GetVolume(double* volume) {
}
void WASAPIAudioOutputStream::Run() {
- ScopedCOMInitializer com_init(ScopedCOMInitializer::kMTA);
+ ScopedCOMInitializer com_init;
// Increase the thread priority.
render_thread_->SetThreadPriority(base::ThreadPriority::REALTIME_AUDIO);
@@ -352,6 +357,12 @@ void WASAPIAudioOutputStream::Run() {
LOG(WARNING) << "Failed to enable MMCSS (error code=" << err << ").";
}
+ // Retrieve COM pointers from the main thread.
+ ScopedComPtr<IAudioClient> audio_client;
+ ScopedComPtr<IAudioRenderClient> audio_render_client;
+ ScopedComPtr<IAudioClock> audio_clock;
+ UnmarshalComPointers(&audio_client, &audio_render_client, &audio_clock);
+
HRESULT hr = S_FALSE;
bool playing = true;
@@ -362,7 +373,7 @@ void WASAPIAudioOutputStream::Run() {
// The device frequency is the frequency generated by the hardware clock in
// the audio device. The GetFrequency() method reports a constant frequency.
- hr = audio_clock_->GetFrequency(&device_frequency);
+ hr = audio_clock->GetFrequency(&device_frequency);
error = FAILED(hr);
PLOG_IF(ERROR, error) << "Failed to acquire IAudioClock interface: "
<< std::hex << hr;
@@ -383,7 +394,9 @@ void WASAPIAudioOutputStream::Run() {
break;
case WAIT_OBJECT_0 + 1:
// |audio_samples_render_event_| has been set.
- error = !RenderAudioFromSource(device_frequency);
+ error = !RenderAudioFromSource(device_frequency, audio_client.get(),
+ audio_render_client.get(),
+ audio_clock.get());
break;
default:
error = true;
@@ -391,11 +404,11 @@ void WASAPIAudioOutputStream::Run() {
}
}
- if (playing && error) {
+ if (playing && error && audio_client) {
// Stop audio rendering since something has gone wrong in our main thread
// loop. Note that, we are still in a "started" state, hence a Stop() call
// is required to join the thread properly.
- audio_client_->Stop();
+ audio_client->Stop();
PLOG(ERROR) << "WASAPI rendering failed.";
}
@@ -405,7 +418,11 @@ void WASAPIAudioOutputStream::Run() {
}
}
-bool WASAPIAudioOutputStream::RenderAudioFromSource(UINT64 device_frequency) {
+bool WASAPIAudioOutputStream::RenderAudioFromSource(
+ UINT64 device_frequency,
+ IAudioClient* audio_client,
+ IAudioRenderClient* audio_render_client,
+ IAudioClock* audio_clock) {
TRACE_EVENT0("audio", "RenderAudioFromSource");
HRESULT hr = S_FALSE;
@@ -420,7 +437,7 @@ bool WASAPIAudioOutputStream::RenderAudioFromSource(UINT64 device_frequency) {
if (share_mode_ == AUDCLNT_SHAREMODE_SHARED) {
// Get the padding value which represents the amount of rendering
// data that is queued up to play in the endpoint buffer.
- hr = audio_client_->GetCurrentPadding(&num_queued_frames);
+ hr = audio_client->GetCurrentPadding(&num_queued_frames);
num_available_frames =
endpoint_buffer_size_frames_ - num_queued_frames;
if (FAILED(hr)) {
@@ -462,8 +479,7 @@ bool WASAPIAudioOutputStream::RenderAudioFromSource(UINT64 device_frequency) {
for (size_t n = 0; n < num_packets; ++n) {
// Grab all available space in the rendering endpoint buffer
// into which the client can write a data packet.
- hr = audio_render_client_->GetBuffer(packet_size_frames_,
- &audio_data);
+ hr = audio_render_client->GetBuffer(packet_size_frames_, &audio_data);
if (FAILED(hr)) {
DLOG(ERROR) << "Failed to use rendering audio buffer: "
<< std::hex << hr;
@@ -477,7 +493,7 @@ bool WASAPIAudioOutputStream::RenderAudioFromSource(UINT64 device_frequency) {
// unit at the render side.
UINT64 position = 0;
uint32 audio_delay_bytes = 0;
- hr = audio_clock_->GetPosition(&position, NULL);
+ hr = audio_clock->GetPosition(&position, NULL);
if (SUCCEEDED(hr)) {
// Stream position of the sample that is currently playing
// through the speaker.
@@ -517,7 +533,7 @@ bool WASAPIAudioOutputStream::RenderAudioFromSource(UINT64 device_frequency) {
// Render silence if we were not able to fill up the buffer totally.
DWORD flags = (num_filled_bytes < packet_size_bytes_) ?
AUDCLNT_BUFFERFLAGS_SILENT : 0;
- audio_render_client_->ReleaseBuffer(packet_size_frames_, flags);
+ audio_render_client->ReleaseBuffer(packet_size_frames_, flags);
num_written_frames_ += packet_size_frames_;
}
@@ -622,4 +638,73 @@ void WASAPIAudioOutputStream::StopThread() {
source_ = NULL;
}
+bool WASAPIAudioOutputStream::MarshalComPointers() {
+ DCHECK_EQ(creating_thread_id_, base::PlatformThread::CurrentId());
+ DCHECK(!com_stream_);
+
+ ScopedComPtr<IStream> com_stream;
+ HRESULT hr = CreateStreamOnHGlobal(NULL, TRUE, com_stream.Receive());
+ if (FAILED(hr)) {
+ DLOG(ERROR) << "Failed to create stream for marshaling COM pointers.";
+ return false;
+ }
+
+ hr = CoMarshalInterface(com_stream.get(), __uuidof(IAudioClient),
+ audio_client_.get(), MSHCTX_INPROC, NULL,
+ MSHLFLAGS_NORMAL);
+ if (FAILED(hr)) {
+ DLOG(ERROR) << "Marshal failed for IAudioClient: " << std::hex << hr;
+ return false;
+ }
+
+ hr = CoMarshalInterface(com_stream.get(), __uuidof(IAudioRenderClient),
+ audio_render_client_.get(), MSHCTX_INPROC, NULL,
+ MSHLFLAGS_NORMAL);
+ if (FAILED(hr)) {
+ DLOG(ERROR) << "Marshal failed for IAudioRenderClient: " << std::hex << hr;
+ return false;
+ }
+
+ hr = CoMarshalInterface(com_stream.get(), __uuidof(IAudioClock),
+ audio_clock_.get(), MSHCTX_INPROC, NULL,
+ MSHLFLAGS_NORMAL);
+ if (FAILED(hr)) {
+ DLOG(ERROR) << "Marshal failed for IAudioClock: " << std::hex << hr;
+ return false;
+ }
+
+ LARGE_INTEGER pos = {0};
+ hr = com_stream->Seek(pos, STREAM_SEEK_SET, NULL);
+ if (FAILED(hr)) {
+ DLOG(ERROR) << "Failed to seek IStream for marshaling: " << std::hex << hr;
+ return false;
+ }
+
+ com_stream_ = com_stream.Pass();
+ return true;
+}
+
+void WASAPIAudioOutputStream::UnmarshalComPointers(
+ ScopedComPtr<IAudioClient>* audio_client,
+ ScopedComPtr<IAudioRenderClient>* audio_render_client,
+ ScopedComPtr<IAudioClock>* audio_clock) {
+ DCHECK_EQ(render_thread_->tid(), base::PlatformThread::CurrentId());
+
+ DCHECK(com_stream_);
+ ScopedComPtr<IStream> com_stream;
+ com_stream = com_stream_.Pass();
+
+ HRESULT hr = CoUnmarshalInterface(com_stream.get(), __uuidof(IAudioClient),
+ audio_client->ReceiveVoid());
+ CHECK(SUCCEEDED(hr));
+
+ hr = CoUnmarshalInterface(com_stream.get(), __uuidof(IAudioRenderClient),
+ audio_render_client->ReceiveVoid());
+ CHECK(SUCCEEDED(hr));
+
+ hr = CoUnmarshalInterface(com_stream.get(), __uuidof(IAudioClock),
+ audio_clock->ReceiveVoid());
+ CHECK(SUCCEEDED(hr));
+}
+
} // namespace media