summaryrefslogtreecommitdiffstats
path: root/media
diff options
context:
space:
mode:
authordalecurtis@google.com <dalecurtis@google.com@0039d316-1c4b-4281-b951-d872f2087c98>2014-05-31 22:24:48 +0000
committerdalecurtis@google.com <dalecurtis@google.com@0039d316-1c4b-4281-b951-d872f2087c98>2014-05-31 22:24:48 +0000
commit28a10808e26f47156c2e592c7f5c4d97119971e1 (patch)
treef4c5818d12671f4ece3e3d65a0b781241d2930ef /media
parentf4b492e99cb4ebf162ace826b55a62a4602ed252 (diff)
downloadchromium_src-28a10808e26f47156c2e592c7f5c4d97119971e1.zip
chromium_src-28a10808e26f47156c2e592c7f5c4d97119971e1.tar.gz
chromium_src-28a10808e26f47156c2e592c7f5c4d97119971e1.tar.bz2
Always use the source channel layout with AudioBufferConverter.
There are no web sites currently using this feature, but it causes all multichannel users to waste resources unnecessarily upmixing, generating, and transferring empty channel data. Further this breaks fancy OSX multichannel users which have non standard channel layouts. Mostly because we don't want to deal with the plethora of ways OSX wants to map channels ourselves. Instead we can require that MSE users which want to change channel counts, must specify the maximum number of channels in the first initialization segment. This also fixes use cases where clients routed multichannel streams into WebAudio for processing instead of actual playout. BUG=266674,379288 TEST=Only 2 channels are created for stereo sources when a 7.1 device is connected. R=rileya@chromium.org, wolenetz@chromium.org Review URL: https://codereview.chromium.org/304233006 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@274068 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'media')
-rw-r--r--media/filters/audio_renderer_impl.cc20
-rw-r--r--media/filters/pipeline_integration_test.cc5
2 files changed, 15 insertions, 10 deletions
diff --git a/media/filters/audio_renderer_impl.cc b/media/filters/audio_renderer_impl.cc
index 87c4a86..d07826a 100644
--- a/media/filters/audio_renderer_impl.cc
+++ b/media/filters/audio_renderer_impl.cc
@@ -283,13 +283,19 @@ void AudioRendererImpl::Initialize(DemuxerStream* stream,
} else {
// TODO(rileya): Support hardware config changes
const AudioParameters& hw_params = hardware_config_->GetOutputConfig();
- audio_parameters_.Reset(hw_params.format(),
- hw_params.channel_layout(),
- hw_params.channels(),
- hw_params.input_channels(),
- hw_params.sample_rate(),
- hw_params.bits_per_sample(),
- hardware_config_->GetHighLatencyBufferSize());
+ audio_parameters_.Reset(
+ hw_params.format(),
+ // Always use the source's channel layout and channel count to avoid
+ // premature downmixing (http://crbug.com/379288), platform specific
+ // issues around channel layouts (http://crbug.com/266674), and
+ // unnecessary upmixing overhead.
+ stream->audio_decoder_config().channel_layout(),
+ ChannelLayoutToChannelCount(
+ stream->audio_decoder_config().channel_layout()),
+ hw_params.input_channels(),
+ hw_params.sample_rate(),
+ hw_params.bits_per_sample(),
+ hardware_config_->GetHighLatencyBufferSize());
}
audio_clock_.reset(new AudioClock(audio_parameters_.sample_rate()));
diff --git a/media/filters/pipeline_integration_test.cc b/media/filters/pipeline_integration_test.cc
index 8e42b8c..a0f505d 100644
--- a/media/filters/pipeline_integration_test.cc
+++ b/media/filters/pipeline_integration_test.cc
@@ -963,9 +963,8 @@ TEST_P(PipelineIntegrationTest, MediaSource_MP3) {
EXPECT_TRUE(WaitUntilOnEnded());
- // Verify that codec delay was stripped, if it wasn't the hash would be:
- // "5.16,1.25,7.78,4.29,8.98,2.76,"
- EXPECT_EQ("5.81,2.71,8.97,4.32,7.83,1.12,", GetAudioHash());
+ // Verify that codec delay was stripped.
+ EXPECT_EQ("1.01,2.71,4.18,4.32,3.04,1.12,", GetAudioHash());
}
TEST_P(PipelineIntegrationTest, MediaSource_MP3_TimestampOffset) {