diff options
author | kmackay <kmackay@chromium.org> | 2016-02-08 10:31:06 -0800 |
---|---|---|
committer | Commit bot <commit-bot@chromium.org> | 2016-02-08 18:32:03 +0000 |
commit | 2eb96d8e6ae23d1f1e2e510112389ee77faa9c9d (patch) | |
tree | f7f56fb6232dc2fbdc1c2ab6d2ea3ff93b42932c | |
parent | a5cd6e72a46a07437c3429b864bcda7ab7560741 (diff) | |
download | chromium_src-2eb96d8e6ae23d1f1e2e510112389ee77faa9c9d.zip chromium_src-2eb96d8e6ae23d1f1e2e510112389ee77faa9c9d.tar.gz chromium_src-2eb96d8e6ae23d1f1e2e510112389ee77faa9c9d.tar.bz2 |
Update cast_media_unittests to test media shlib conformance
This updates audio_video_pipeline_device_unittest.cc to more completely
cover the new CMA backend API. This includes tests for the new state
management, as well as new tests for additional media codecs.
Also, some bugs in the ALSA and default CMA backend implementations
were uncovered and fixed.
BUG= internal b/26496802
Review URL: https://codereview.chromium.org/1649933004
Cr-Commit-Position: refs/heads/master@{#374143}
14 files changed, 776 insertions, 73 deletions
diff --git a/chromecast/media/base/BUILD.gn b/chromecast/media/base/BUILD.gn index 5371726..85cfcf3 100644 --- a/chromecast/media/base/BUILD.gn +++ b/chromecast/media/base/BUILD.gn @@ -100,7 +100,9 @@ source_set("libcast_media_1.0_default_core") { ] deps = [ + "//base", "//build/config/sanitizers:deps", + "//chromecast/base", "//chromecast/media/cma/backend", ] } @@ -121,7 +123,9 @@ shared_library("libcast_media_1.0_default") { ] deps = [ + "//base", "//build/config/sanitizers:deps", + "//chromecast/base", "//chromecast/media/cma/backend", ] } diff --git a/chromecast/media/base/cast_media_default.cc b/chromecast/media/base/cast_media_default.cc index 33f9590..dbf4867 100644 --- a/chromecast/media/base/cast_media_default.cc +++ b/chromecast/media/base/cast_media_default.cc @@ -2,10 +2,15 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. +#include "base/memory/scoped_ptr.h" #include "build/build_config.h" +#include "base/single_thread_task_runner.h" +#include "base/thread_task_runner_handle.h" +#include "chromecast/base/task_runner_impl.h" #include "chromecast/media/cma/backend/media_pipeline_backend_default.h" #include "chromecast/public/cast_media_shlib.h" #include "chromecast/public/graphics_types.h" +#include "chromecast/public/media/media_pipeline_device_params.h" #include "chromecast/public/media_codec_support_shlib.h" #include "chromecast/public/video_plane.h" @@ -22,6 +27,7 @@ class DefaultVideoPlane : public VideoPlane { }; DefaultVideoPlane* g_video_plane = nullptr; +base::ThreadTaskRunnerHandle* g_thread_task_runner_handle = nullptr; } // namespace @@ -32,6 +38,8 @@ void CastMediaShlib::Initialize(const std::vector<std::string>& argv) { void CastMediaShlib::Finalize() { delete g_video_plane; g_video_plane = nullptr; + delete g_thread_task_runner_handle; + g_thread_task_runner_handle = nullptr; } VideoPlane* CastMediaShlib::GetVideoPlane() { @@ -40,6 +48,17 @@ VideoPlane* CastMediaShlib::GetVideoPlane() { MediaPipelineBackend* CastMediaShlib::CreateMediaPipelineBackend( const MediaPipelineDeviceParams& params) { + // Set up the static reference in base::ThreadTaskRunnerHandle::Get + // for the media thread in this shared library. We can extract the + // SingleThreadTaskRunner passed in from cast_shell for this. + if (!base::ThreadTaskRunnerHandle::IsSet()) { + DCHECK(!g_thread_task_runner_handle); + const scoped_refptr<base::SingleThreadTaskRunner> task_runner = + static_cast<TaskRunnerImpl*>(params.task_runner)->runner(); + DCHECK(task_runner->BelongsToCurrentThread()); + g_thread_task_runner_handle = new base::ThreadTaskRunnerHandle(task_runner); + } + return new MediaPipelineBackendDefault(); } diff --git a/chromecast/media/cma/backend/alsa/audio_decoder_alsa.cc b/chromecast/media/cma/backend/alsa/audio_decoder_alsa.cc index 0a0ab98..eddff02 100644 --- a/chromecast/media/cma/backend/alsa/audio_decoder_alsa.cc +++ b/chromecast/media/cma/backend/alsa/audio_decoder_alsa.cc @@ -75,6 +75,7 @@ bool AudioDecoderAlsa::Initialize() { DCHECK(delegate_); stats_ = Statistics(); is_eos_ = false; + last_buffer_pts_ = std::numeric_limits<int64_t>::min(); struct timespec now; if (clock_gettime(CLOCK_MONOTONIC, &now) == 0) { @@ -135,8 +136,10 @@ AudioDecoderAlsa::BufferStatus AudioDecoderAlsa::PushBuffer( uint64_t input_bytes = buffer->end_of_stream() ? 0 : buffer->data_size(); scoped_refptr<DecoderBufferBase> buffer_base( static_cast<DecoderBufferBase*>(buffer)); - if (!buffer->end_of_stream()) + if (!buffer->end_of_stream()) { last_buffer_pts_ = buffer->timestamp(); + current_pts_ = std::min(current_pts_, last_buffer_pts_); + } // If the buffer is already decoded, do not attempt to decode. Call // OnBufferDecoded asynchronously on the main thread. @@ -278,7 +281,7 @@ void AudioDecoderAlsa::OnWritePcmCompletion(BufferStatus status, const RenderingDelay& delay) { TRACE_FUNCTION_ENTRY0(); DCHECK(task_runner_->BelongsToCurrentThread()); - if (status == MediaPipelineBackendAlsa::kBufferSuccess) + if (status == MediaPipelineBackendAlsa::kBufferSuccess && !is_eos_) current_pts_ = last_buffer_pts_; if (delay.timestamp_microseconds != kInvalidDelayTimestamp) last_known_delay_ = delay; diff --git a/chromecast/media/cma/backend/alsa/stream_mixer_alsa.cc b/chromecast/media/cma/backend/alsa/stream_mixer_alsa.cc index 3082892..bf25888 100644 --- a/chromecast/media/cma/backend/alsa/stream_mixer_alsa.cc +++ b/chromecast/media/cma/backend/alsa/stream_mixer_alsa.cc @@ -421,12 +421,16 @@ StreamMixerAlsa::~StreamMixerAlsa() { void StreamMixerAlsa::FinalizeOnMixerThread() { RUN_ON_MIXER_THREAD(&StreamMixerAlsa::FinalizeOnMixerThread); - retry_write_frames_timer_.reset(); - check_close_timer_.reset(); - Stop(); ClosePcm(); + // Post a task to allow any pending input deletions to run. + POST_TASK_TO_MIXER_THREAD(&StreamMixerAlsa::FinishFinalize); +} + +void StreamMixerAlsa::FinishFinalize() { + retry_write_frames_timer_.reset(); + check_close_timer_.reset(); inputs_.clear(); ignored_inputs_.clear(); } @@ -632,6 +636,8 @@ void StreamMixerAlsa::CheckClose() { } void StreamMixerAlsa::OnFramesQueued() { + if (state_ != kStateNormalPlayback) + return; if (retry_write_frames_timer_->IsRunning()) return; retry_write_frames_timer_->Start( diff --git a/chromecast/media/cma/backend/alsa/stream_mixer_alsa.h b/chromecast/media/cma/backend/alsa/stream_mixer_alsa.h index 0e8ba35..ba88802 100644 --- a/chromecast/media/cma/backend/alsa/stream_mixer_alsa.h +++ b/chromecast/media/cma/backend/alsa/stream_mixer_alsa.h @@ -146,6 +146,7 @@ class StreamMixerAlsa { private: void ResetTaskRunnerForTest(); void FinalizeOnMixerThread(); + void FinishFinalize(); // Reads the buffer size, period size, start threshold, and avail min value // from the provided command line flags or uses default values if no flags are diff --git a/chromecast/media/cma/backend/audio_decoder_default.cc b/chromecast/media/cma/backend/audio_decoder_default.cc index 28887ba..9a399bb 100644 --- a/chromecast/media/cma/backend/audio_decoder_default.cc +++ b/chromecast/media/cma/backend/audio_decoder_default.cc @@ -4,13 +4,21 @@ #include "chromecast/media/cma/backend/audio_decoder_default.h" +#include <limits> + +#include "base/bind.h" +#include "base/location.h" #include "base/logging.h" +#include "base/thread_task_runner_handle.h" #include "chromecast/public/media/cast_decoder_buffer.h" namespace chromecast { namespace media { -AudioDecoderDefault::AudioDecoderDefault() : delegate_(nullptr) {} +AudioDecoderDefault::AudioDecoderDefault() + : delegate_(nullptr), + last_push_pts_(std::numeric_limits<int64_t>::min()), + weak_factory_(this) {} AudioDecoderDefault::~AudioDecoderDefault() {} @@ -23,8 +31,14 @@ MediaPipelineBackend::BufferStatus AudioDecoderDefault::PushBuffer( CastDecoderBuffer* buffer) { DCHECK(delegate_); DCHECK(buffer); - if (buffer->end_of_stream()) - delegate_->OnEndOfStream(); + + if (buffer->end_of_stream()) { + base::ThreadTaskRunnerHandle::Get()->PostTask( + FROM_HERE, base::Bind(&AudioDecoderDefault::OnEndOfStream, + weak_factory_.GetWeakPtr())); + } else { + last_push_pts_ = buffer->timestamp(); + } return MediaPipelineBackend::kBufferSuccess; } @@ -43,5 +57,9 @@ AudioDecoderDefault::RenderingDelay AudioDecoderDefault::GetRenderingDelay() { return RenderingDelay(); } +void AudioDecoderDefault::OnEndOfStream() { + delegate_->OnEndOfStream(); +} + } // namespace media } // namespace chromecast diff --git a/chromecast/media/cma/backend/audio_decoder_default.h b/chromecast/media/cma/backend/audio_decoder_default.h index 75e0dec..ea712da 100644 --- a/chromecast/media/cma/backend/audio_decoder_default.h +++ b/chromecast/media/cma/backend/audio_decoder_default.h @@ -5,7 +5,10 @@ #ifndef CHROMECAST_MEDIA_CMA_BACKEND_AUDIO_DECODER_DEFAULT_H_ #define CHROMECAST_MEDIA_CMA_BACKEND_AUDIO_DECODER_DEFAULT_H_ +#include <stdint.h> + #include "base/macros.h" +#include "base/memory/weak_ptr.h" #include "chromecast/public/media/media_pipeline_backend.h" namespace chromecast { @@ -16,6 +19,8 @@ class AudioDecoderDefault : public MediaPipelineBackend::AudioDecoder { AudioDecoderDefault(); ~AudioDecoderDefault() override; + int64_t last_push_pts() const { return last_push_pts_; } + // MediaPipelineBackend::AudioDecoder implementation: void SetDelegate(Delegate* delegate) override; MediaPipelineBackend::BufferStatus PushBuffer( @@ -26,7 +31,11 @@ class AudioDecoderDefault : public MediaPipelineBackend::AudioDecoder { RenderingDelay GetRenderingDelay() override; private: + void OnEndOfStream(); + Delegate* delegate_; + int64_t last_push_pts_; + base::WeakPtrFactory<AudioDecoderDefault> weak_factory_; DISALLOW_COPY_AND_ASSIGN(AudioDecoderDefault); }; diff --git a/chromecast/media/cma/backend/audio_video_pipeline_device_unittest.cc b/chromecast/media/cma/backend/audio_video_pipeline_device_unittest.cc index 72d4d4b..3cb80b4 100644 --- a/chromecast/media/cma/backend/audio_video_pipeline_device_unittest.cc +++ b/chromecast/media/cma/backend/audio_video_pipeline_device_unittest.cc @@ -4,6 +4,7 @@ #include <stdint.h> +#include <limits> #include <vector> #include "base/bind.h" @@ -20,6 +21,7 @@ #include "base/single_thread_task_runner.h" #include "base/thread_task_runner_handle.h" #include "base/threading/thread.h" +#include "base/threading/thread_checker.h" #include "base/time/time.h" #include "chromecast/base/task_runner_impl.h" #include "chromecast/media/cma/base/decoder_buffer_adapter.h" @@ -31,6 +33,7 @@ #include "chromecast/public/media/media_pipeline_backend.h" #include "chromecast/public/media/media_pipeline_device_params.h" #include "media/base/audio_decoder_config.h" +#include "media/base/audio_timestamp_helper.h" #include "media/base/decoder_buffer.h" #include "media/base/video_decoder_config.h" #include "testing/gtest/include/gtest/gtest.h" @@ -43,6 +46,31 @@ class AudioVideoPipelineDeviceTest; namespace { const base::TimeDelta kMonitorLoopDelay = base::TimeDelta::FromMilliseconds(20); +// Call Start() with an initial PTS of 1 second, to test the behaviour if +// we push buffers with a PTS before the start PTS. In this case the backend +// should report the PTS as no later than the last pushed buffers. +const int64_t kStartPts = 1000 * 1000; + +void IgnoreEos() {} + +AudioConfig DefaultAudioConfig() { + AudioConfig default_config; + default_config.codec = kCodecPCM; + default_config.sample_format = kSampleFormatS16; + default_config.channel_number = 2; + default_config.bytes_per_channel = 2; + default_config.samples_per_second = 48000; + return default_config; +} + +VideoConfig DefaultVideoConfig() { + VideoConfig default_config; + default_config.codec = kCodecH264; + default_config.profile = kH264Main; + default_config.additional_config = nullptr; + default_config.is_encrypted = false; + return default_config; +} base::FilePath GetTestDataFilePath(const std::string& name) { base::FilePath file_path; @@ -56,6 +84,7 @@ base::FilePath GetTestDataFilePath(const std::string& name) { class BufferFeeder : public MediaPipelineBackend::Decoder::Delegate { public: + explicit BufferFeeder(const base::Closure& eos_cb); ~BufferFeeder() override {} static scoped_ptr<BufferFeeder> LoadAudio(MediaPipelineBackend* backend, @@ -67,32 +96,58 @@ class BufferFeeder : public MediaPipelineBackend::Decoder::Delegate { const base::Closure& eos_cb); bool eos() const { return eos_; } + MediaPipelineBackend::Decoder* decoder() const { return decoder_; } + int64_t last_pushed_pts() const { return last_pushed_pts_; } + + void SetAudioConfig(const AudioConfig& config) { audio_config_ = config; } + void SetVideoConfig(const VideoConfig& config) { video_config_ = config; } - void Initialize(MediaPipelineBackend::Decoder* decoder, + void FeedContinuousPcm(); + void PauseBeforeEos(); + void Initialize(MediaPipelineBackend* backend, + MediaPipelineBackend::Decoder* decoder, const BufferList& buffers); void Start(); + void Stop(); + + void ScheduleConfigTest(); + + void TestAudioConfigs(); + void TestAudioVolume(); + void TestVideoConfigs(); // MediaPipelineBackend::Decoder::Delegate implementation: void OnPushBufferComplete(MediaPipelineBackend::BufferStatus status) override; void OnEndOfStream() override; - void OnDecoderError() override { ASSERT_TRUE(false); } + void OnDecoderError() override; void OnKeyStatusChanged(const std::string& key_id, CastKeyStatus key_status, - uint32_t system_code) override { - ASSERT_TRUE(false); - } - void OnVideoResolutionChanged(const Size& size) override {} + uint32_t system_code) override; + void OnVideoResolutionChanged(const Size& size) override; private: - explicit BufferFeeder(const base::Closure& eos_cb); void FeedBuffer(); + void FeedPcm(); + void TestConfigs(); base::Closure eos_cb_; + bool within_push_buffer_call_; + bool expecting_buffer_complete_; bool feeding_completed_; bool eos_; + bool pause_before_eos_; + bool test_config_after_next_push_; + bool feed_continuous_pcm_; + MediaPipelineBackend* backend_; MediaPipelineBackend::Decoder* decoder_; BufferList buffers_; + BufferList buffers_copy_; scoped_refptr<DecoderBufferBase> pending_buffer_; + base::ThreadChecker thread_checker_; + AudioConfig audio_config_; + VideoConfig video_config_; + int64_t last_pushed_pts_; + scoped_ptr<::media::AudioTimestampHelper> timestamp_helper_; DISALLOW_COPY_AND_ASSIGN(BufferFeeder); }; @@ -113,6 +168,14 @@ class AudioVideoPipelineDeviceTest : public testing::Test { AudioVideoPipelineDeviceTest(); ~AudioVideoPipelineDeviceTest() override; + MediaPipelineBackend* backend() const { return backend_.get(); } + void set_sync_type(MediaPipelineDeviceParams::MediaSyncType sync_type) { + sync_type_ = sync_type; + } + void set_audio_type(MediaPipelineDeviceParams::AudioStreamType audio_type) { + audio_type_ = audio_type; + } + void SetUp() override { CastMediaShlib::Initialize( base::CommandLine::ForCurrentProcess()->argv()); @@ -135,22 +198,43 @@ class AudioVideoPipelineDeviceTest : public testing::Test { // Adds a pause to the end of pause pattern void AddPause(base::TimeDelta delay, base::TimeDelta length); + void PauseBeforeEos(); + void AddEffectsStreams(); + void Initialize(); void Start(); void OnEndOfStream(); - private: - void Initialize(); + void SetAudioFeeder(scoped_ptr<BufferFeeder> audio_feeder) { + audio_feeder_ = std::move(audio_feeder); + } + void SetVideoFeeder(scoped_ptr<BufferFeeder> video_feeder) { + video_feeder_ = std::move(video_feeder); + } + void RunStoppedChecks(); + void RunPlaybackChecks(); + void TestBackendStates(); + void StartImmediateEosTest(); + void EndImmediateEosTest(); + + private: void MonitorLoop(); void OnPauseCompleted(); + MediaPipelineDeviceParams::MediaSyncType sync_type_; + MediaPipelineDeviceParams::AudioStreamType audio_type_; scoped_ptr<TaskRunnerImpl> task_runner_; scoped_ptr<MediaPipelineBackend> backend_; + std::vector<scoped_ptr<MediaPipelineBackend>> effects_backends_; + std::vector<scoped_ptr<BufferFeeder>> effects_feeders_; scoped_ptr<BufferFeeder> audio_feeder_; scoped_ptr<BufferFeeder> video_feeder_; bool stopped_; + bool ran_playing_playback_checks_; + bool backwards_pts_change_; + int64_t last_pts_; // Current media time. base::TimeDelta pause_time_; @@ -165,13 +249,32 @@ class AudioVideoPipelineDeviceTest : public testing::Test { namespace { BufferFeeder::BufferFeeder(const base::Closure& eos_cb) - : eos_cb_(eos_cb), feeding_completed_(false), eos_(false) { + : eos_cb_(eos_cb), + within_push_buffer_call_(false), + expecting_buffer_complete_(false), + feeding_completed_(false), + eos_(false), + pause_before_eos_(false), + test_config_after_next_push_(false), + feed_continuous_pcm_(false), + backend_(nullptr) { CHECK(!eos_cb_.is_null()); } -void BufferFeeder::Initialize(MediaPipelineBackend::Decoder* decoder, +void BufferFeeder::FeedContinuousPcm() { + feed_continuous_pcm_ = true; +} + +void BufferFeeder::PauseBeforeEos() { + pause_before_eos_ = true; +} + +void BufferFeeder::Initialize(MediaPipelineBackend* backend, + MediaPipelineBackend::Decoder* decoder, const BufferList& buffers) { + CHECK(backend); CHECK(decoder); + backend_ = backend; decoder_ = decoder; decoder_->SetDelegate(this); buffers_ = buffers; @@ -180,28 +283,65 @@ void BufferFeeder::Initialize(MediaPipelineBackend::Decoder* decoder, } void BufferFeeder::Start() { + if (feed_continuous_pcm_) { + timestamp_helper_.reset( + new ::media::AudioTimestampHelper(audio_config_.samples_per_second)); + timestamp_helper_->SetBaseTimestamp(base::TimeDelta()); + } + last_pushed_pts_ = std::numeric_limits<int64_t>::min(); + buffers_copy_ = buffers_; base::ThreadTaskRunnerHandle::Get()->PostTask( FROM_HERE, base::Bind(&BufferFeeder::FeedBuffer, base::Unretained(this))); } +void BufferFeeder::Stop() { + feeding_completed_ = true; +} + +void BufferFeeder::ScheduleConfigTest() { + if (expecting_buffer_complete_) { + test_config_after_next_push_ = true; + } else { + TestConfigs(); + } +} + void BufferFeeder::FeedBuffer() { - // Possibly feed one buffer. - CHECK(!buffers_.empty()); + CHECK(decoder_); if (feeding_completed_) return; + if (feed_continuous_pcm_) { + FeedPcm(); + return; + } + // Possibly feed one buffer. + CHECK(!buffers_.empty()); pending_buffer_ = buffers_.front(); + if (pending_buffer_->end_of_stream()) { + if (pause_before_eos_) + ASSERT_TRUE(backend_->Pause()); + } else { + last_pushed_pts_ = pending_buffer_->timestamp(); + } + expecting_buffer_complete_ = true; + within_push_buffer_call_ = true; BufferStatus status = decoder_->PushBuffer(pending_buffer_.get()); + within_push_buffer_call_ = false; EXPECT_NE(status, MediaPipelineBackend::kBufferFailed); buffers_.pop_front(); + if (pending_buffer_->end_of_stream() && pause_before_eos_) + ASSERT_TRUE(backend_->Resume()); + // Feeding is done, just wait for the end of stream callback. if (pending_buffer_->end_of_stream() || buffers_.empty()) { if (buffers_.empty() && !pending_buffer_->end_of_stream()) LOG(WARNING) << "Stream emptied without feeding EOS frame"; + if (!buffers_.empty()) + LOG(WARNING) << "Stream has more buffers after EOS frame"; feeding_completed_ = true; - return; } if (status == MediaPipelineBackend::kBufferPending) @@ -210,13 +350,50 @@ void BufferFeeder::FeedBuffer() { OnPushBufferComplete(MediaPipelineBackend::kBufferSuccess); } +void BufferFeeder::FeedPcm() { + const int num_frames = 512; + scoped_refptr<::media::DecoderBuffer> silence_buffer( + new ::media::DecoderBuffer(num_frames * audio_config_.channel_number * + audio_config_.bytes_per_channel)); + memset(silence_buffer->writable_data(), 0, silence_buffer->data_size()); + pending_buffer_ = new media::DecoderBufferAdapter(silence_buffer); + pending_buffer_->set_timestamp(timestamp_helper_->GetTimestamp()); + timestamp_helper_->AddFrames(num_frames); + + expecting_buffer_complete_ = true; + within_push_buffer_call_ = true; + BufferStatus status = decoder_->PushBuffer(pending_buffer_.get()); + within_push_buffer_call_ = false; + ASSERT_NE(status, MediaPipelineBackend::kBufferFailed); + if (status == MediaPipelineBackend::kBufferPending) + return; + OnPushBufferComplete(MediaPipelineBackend::kBufferSuccess); +} + void BufferFeeder::OnEndOfStream() { + DCHECK(thread_checker_.CalledOnValidThread()); + EXPECT_FALSE(expecting_buffer_complete_) + << "Got OnEndOfStream() before the EOS buffer completed"; eos_ = true; eos_cb_.Run(); } void BufferFeeder::OnPushBufferComplete(BufferStatus status) { - EXPECT_NE(status, MediaPipelineBackend::kBufferFailed); + DCHECK(thread_checker_.CalledOnValidThread()); + pending_buffer_ = nullptr; + EXPECT_FALSE(within_push_buffer_call_) + << "OnPushBufferComplete() called during a call to PushBuffer()"; + EXPECT_TRUE(expecting_buffer_complete_) + << "OnPushBufferComplete() called unexpectedly"; + expecting_buffer_complete_ = false; + ASSERT_NE(status, MediaPipelineBackend::kBufferFailed); + EXPECT_FALSE(eos_) << "Got OnPushBufferComplete() after OnEndOfStream()"; + + if (test_config_after_next_push_) { + test_config_after_next_push_ = false; + TestConfigs(); + } + if (feeding_completed_) return; @@ -224,6 +401,131 @@ void BufferFeeder::OnPushBufferComplete(BufferStatus status) { FROM_HERE, base::Bind(&BufferFeeder::FeedBuffer, base::Unretained(this))); } +void BufferFeeder::OnDecoderError() { + DCHECK(thread_checker_.CalledOnValidThread()); + if (feed_continuous_pcm_) { + feeding_completed_ = true; + } else { + ASSERT_TRUE(false); + } +} + +void BufferFeeder::OnKeyStatusChanged(const std::string& key_id, + CastKeyStatus key_status, + uint32_t system_code) { + DCHECK(thread_checker_.CalledOnValidThread()); + ASSERT_TRUE(false); +} + +void BufferFeeder::OnVideoResolutionChanged(const Size& size) { + DCHECK(thread_checker_.CalledOnValidThread()); +} + +void BufferFeeder::TestConfigs() { + if (IsValidConfig(audio_config_)) + TestAudioConfigs(); + if (IsValidConfig(video_config_)) + TestVideoConfigs(); +} + +void BufferFeeder::TestAudioConfigs() { + MediaPipelineBackend::AudioDecoder* audio_decoder = + static_cast<MediaPipelineBackend::AudioDecoder*>(decoder_); + AudioConfig config; + // First, make sure that kAudioCodecUnknown is not accepted. + config.codec = kAudioCodecUnknown; + config.sample_format = kSampleFormatS16; + config.channel_number = 2; + config.bytes_per_channel = 2; + config.samples_per_second = 48000; + // Set invalid config first, to test that the decoder still accepts valid + // config after an invalid config. + audio_decoder->SetConfig(config); + + // Next, test required sample formats. + config.codec = kCodecPCM_S16BE; + EXPECT_TRUE(audio_decoder->SetConfig(config)) + << "Audio decoder does not accept kCodecPCM_S16BE"; + + config.codec = kCodecPCM; + EXPECT_TRUE(audio_decoder->SetConfig(config)) + << "Audio decoder does not accept kCodecPCM"; + + config.sample_format = kSampleFormatPlanarF32; + config.bytes_per_channel = 4; + EXPECT_TRUE(audio_decoder->SetConfig(config)) + << "Audio decoder does not accept kCodecPCM with " + << "planar float (required for multiroom audio)"; + + config.codec = kCodecAAC; + // TODO(kmackay) Determine required sample formats/channel numbers. + config.sample_format = kSampleFormatS16; + config.bytes_per_channel = 2; + config.codec = kCodecAAC; + EXPECT_TRUE(audio_decoder->SetConfig(config)) + << "Audio decoder does not accept kCodecAAC"; + config.codec = kCodecMP3; + EXPECT_TRUE(audio_decoder->SetConfig(config)) + << "Audio decoder does not accept kCodecMP3"; + + // Test optional codecs. + // TODO(kmackay) Make sure other parts of config are correct for each codec. + config.codec = kCodecOpus; + if (!audio_decoder->SetConfig(config)) + LOG(INFO) << "Audio decoder does not accept kCodecOpus"; + config.codec = kCodecEAC3; + if (!audio_decoder->SetConfig(config)) + LOG(INFO) << "Audio decoder does not accept kCodecEAC3"; + config.codec = kCodecAC3; + if (!audio_decoder->SetConfig(config)) + LOG(INFO) << "Audio decoder does not accept kCodecAC3"; + config.codec = kCodecDTS; + if (!audio_decoder->SetConfig(config)) + LOG(INFO) << "Audio decoder does not accept kCodecDTS"; + config.codec = kCodecFLAC; + if (!audio_decoder->SetConfig(config)) + LOG(INFO) << "Audio decoder does not accept kCodecFLAC"; + + // Test supported sample rates. + const int kRequiredSampleRates[] = {8000, 11025, 12000, 16000, 22050, + 24000, 32000, 44100, 48000}; + const int kHiResSampleRates[] = {64000, 88200, 96000}; + config.codec = kCodecPCM; + for (int rate : kRequiredSampleRates) { + config.samples_per_second = rate; + EXPECT_TRUE(audio_decoder->SetConfig(config)) + << "Audio decoder does not accept sample rate " << rate; + } + for (int rate : kHiResSampleRates) { + config.samples_per_second = rate; + if (!audio_decoder->SetConfig(config)) + LOG(INFO) << "Audio decoder does not accept hi-res sample rate " << rate; + } + EXPECT_TRUE(audio_decoder->SetConfig(audio_config_)); +} + +void BufferFeeder::TestAudioVolume() { + MediaPipelineBackend::AudioDecoder* audio_decoder = + static_cast<MediaPipelineBackend::AudioDecoder*>(decoder_); + EXPECT_TRUE(audio_decoder->SetVolume(1.0)) + << "Failed to set audio volume to 1.0"; + EXPECT_TRUE(audio_decoder->SetVolume(0.0)) + << "Failed to set audio volume to 0.0"; + EXPECT_TRUE(audio_decoder->SetVolume(0.2)) + << "Failed to set audio volume to 0.2"; +} + +void BufferFeeder::TestVideoConfigs() { + MediaPipelineBackend::VideoDecoder* video_decoder = + static_cast<MediaPipelineBackend::VideoDecoder*>(decoder_); + VideoConfig config; + config.codec = kVideoCodecUnknown; + // Set invalid config first, to test that the decoder still accepts valid + // config after an invalid config. + video_decoder->SetConfig(config); + EXPECT_TRUE(video_decoder->SetConfig(video_config_)); +} + // static scoped_ptr<BufferFeeder> BufferFeeder::LoadAudio(MediaPipelineBackend* backend, const std::string& filename, @@ -234,14 +536,15 @@ scoped_ptr<BufferFeeder> BufferFeeder::LoadAudio(MediaPipelineBackend* backend, MediaPipelineBackend::AudioDecoder* decoder = backend->CreateAudioDecoder(); CHECK(decoder); - - bool success = decoder->SetConfig(DecoderConfigAdapter::ToCastAudioConfig( - kPrimary, demux_result.audio_config)); + AudioConfig config = DecoderConfigAdapter::ToCastAudioConfig( + kPrimary, demux_result.audio_config); + bool success = decoder->SetConfig(config); CHECK(success); VLOG(2) << "Got " << demux_result.frames.size() << " audio input frames"; scoped_ptr<BufferFeeder> feeder(new BufferFeeder(eos_cb)); - feeder->Initialize(decoder, demux_result.frames); + feeder->audio_config_ = config; + feeder->Initialize(backend, decoder, demux_result.frames); return feeder; } @@ -264,7 +567,7 @@ scoped_ptr<BufferFeeder> BufferFeeder::LoadVideo(MediaPipelineBackend* backend, // TODO(erickung): Either pull data from stream or make caller specify value video_config.codec = kCodecH264; video_config.profile = kH264Main; - video_config.additional_config = NULL; + video_config.additional_config = nullptr; video_config.is_encrypted = false; } else { base::FilePath file_path = GetTestDataFilePath(filename); @@ -282,22 +585,68 @@ scoped_ptr<BufferFeeder> BufferFeeder::LoadVideo(MediaPipelineBackend* backend, VLOG(2) << "Got " << buffers.size() << " video input frames"; scoped_ptr<BufferFeeder> feeder(new BufferFeeder(eos_cb)); - feeder->Initialize(decoder, buffers); + feeder->video_config_ = video_config; + feeder->Initialize(backend, decoder, buffers); return feeder; } } // namespace AudioVideoPipelineDeviceTest::AudioVideoPipelineDeviceTest() - : stopped_(false), pause_pattern_() {} + : sync_type_(MediaPipelineDeviceParams::kModeSyncPts), + audio_type_(MediaPipelineDeviceParams::kAudioStreamNormal), + stopped_(false), + ran_playing_playback_checks_(false), + backwards_pts_change_(false), + pause_pattern_() {} AudioVideoPipelineDeviceTest::~AudioVideoPipelineDeviceTest() {} +void AudioVideoPipelineDeviceTest::Initialize() { + // Create the media device. + task_runner_.reset(new TaskRunnerImpl()); + MediaPipelineDeviceParams params(sync_type_, audio_type_, task_runner_.get()); + backend_.reset(CastMediaShlib::CreateMediaPipelineBackend(params)); + CHECK(backend_); +} + void AudioVideoPipelineDeviceTest::AddPause(base::TimeDelta delay, base::TimeDelta length) { pause_pattern_.push_back(PauseInfo(delay, length)); } +void AudioVideoPipelineDeviceTest::PauseBeforeEos() { + if (audio_feeder_) + audio_feeder_->PauseBeforeEos(); + if (video_feeder_) + video_feeder_->PauseBeforeEos(); +} + +void AudioVideoPipelineDeviceTest::AddEffectsStreams() { + const int kNumEffectsStreams = 3; + for (int i = 0; i < kNumEffectsStreams; ++i) { + MediaPipelineDeviceParams params( + MediaPipelineDeviceParams::kModeIgnorePts, + MediaPipelineDeviceParams::kAudioStreamSoundEffects, + task_runner_.get()); + MediaPipelineBackend* effects_backend = + CastMediaShlib::CreateMediaPipelineBackend(params); + CHECK(effects_backend); + effects_backends_.push_back(make_scoped_ptr(effects_backend)); + + MediaPipelineBackend::AudioDecoder* audio_decoder = + effects_backend->CreateAudioDecoder(); + audio_decoder->SetConfig(DefaultAudioConfig()); + + scoped_ptr<BufferFeeder> feeder(new BufferFeeder(base::Bind(&IgnoreEos))); + feeder->FeedContinuousPcm(); + feeder->Initialize(effects_backend, audio_decoder, BufferList()); + feeder->SetAudioConfig(DefaultAudioConfig()); + effects_feeders_.push_back(std::move(feeder)); + ASSERT_TRUE(effects_backend->Initialize()); + } +} + void AudioVideoPipelineDeviceTest::SetPausePattern( const std::vector<PauseInfo> pattern) { pause_pattern_ = pattern; @@ -310,8 +659,7 @@ void AudioVideoPipelineDeviceTest::ConfigureForAudioOnly( backend_.get(), filename, base::Bind(&AudioVideoPipelineDeviceTest::OnEndOfStream, base::Unretained(this))); - bool success = backend_->Initialize(); - ASSERT_TRUE(success); + ASSERT_TRUE(backend_->Initialize()); } void AudioVideoPipelineDeviceTest::ConfigureForVideoOnly( @@ -322,8 +670,7 @@ void AudioVideoPipelineDeviceTest::ConfigureForVideoOnly( backend_.get(), filename, raw_h264, base::Bind(&AudioVideoPipelineDeviceTest::OnEndOfStream, base::Unretained(this))); - bool success = backend_->Initialize(); - ASSERT_TRUE(success); + ASSERT_TRUE(backend_->Initialize()); } void AudioVideoPipelineDeviceTest::ConfigureForFile( @@ -334,33 +681,72 @@ void AudioVideoPipelineDeviceTest::ConfigureForFile( video_feeder_ = BufferFeeder::LoadVideo(backend_.get(), filename, false /* raw_h264 */, eos_cb); audio_feeder_ = BufferFeeder::LoadAudio(backend_.get(), filename, eos_cb); - bool success = backend_->Initialize(); - ASSERT_TRUE(success); + ASSERT_TRUE(backend_->Initialize()); } void AudioVideoPipelineDeviceTest::Start() { pause_time_ = base::TimeDelta(); pause_pattern_idx_ = 0; stopped_ = false; + ran_playing_playback_checks_ = false; + last_pts_ = std::numeric_limits<int64_t>::min(); if (audio_feeder_) audio_feeder_->Start(); if (video_feeder_) video_feeder_->Start(); - backend_->Start(0); + for (auto& feeder : effects_feeders_) + feeder->Start(); + for (auto& backend : effects_backends_) + backend->Start(kStartPts); + + RunStoppedChecks(); + + backend_->Start(kStartPts); + int64_t current_pts = backend()->GetCurrentPts(); + EXPECT_TRUE(kStartPts || current_pts == std::numeric_limits<int64_t>::min()); + last_pts_ = current_pts; base::ThreadTaskRunnerHandle::Get()->PostTask( FROM_HERE, base::Bind(&AudioVideoPipelineDeviceTest::MonitorLoop, base::Unretained(this))); } +void AudioVideoPipelineDeviceTest::RunStoppedChecks() { + if (audio_feeder_) { + audio_feeder_->ScheduleConfigTest(); + audio_feeder_->TestAudioVolume(); + } + if (video_feeder_) + video_feeder_->ScheduleConfigTest(); +} + +void AudioVideoPipelineDeviceTest::RunPlaybackChecks() { + RunStoppedChecks(); + + EXPECT_TRUE(backend_->SetPlaybackRate(1.0f)); + if (!backend_->SetPlaybackRate(0.1f)) + LOG(INFO) << "Playback rate 0.1 not supported"; + if (!backend_->SetPlaybackRate(0.5f)) + LOG(INFO) << "Playback rate 0.5 not supported"; + if (!backend_->SetPlaybackRate(1.5f)) + LOG(INFO) << "Playback rate 1.5 not supported"; + EXPECT_TRUE(backend_->SetPlaybackRate(1.0f)); +} + void AudioVideoPipelineDeviceTest::OnEndOfStream() { if ((!audio_feeder_ || audio_feeder_->eos()) && (!video_feeder_ || video_feeder_->eos())) { + RunPlaybackChecks(); bool success = backend_->Stop(); stopped_ = true; ASSERT_TRUE(success); + RunStoppedChecks(); + + for (auto& feeder : effects_feeders_) + feeder->Stop(); + base::MessageLoop::current()->QuitWhenIdle(); } } @@ -370,8 +756,33 @@ void AudioVideoPipelineDeviceTest::MonitorLoop() { if (stopped_) return; - base::TimeDelta media_time = - base::TimeDelta::FromMicroseconds(backend_->GetCurrentPts()); + // Run checks while playing (once). + if (!ran_playing_playback_checks_) { + RunPlaybackChecks(); + ran_playing_playback_checks_ = true; + } + + int64_t pts = backend_->GetCurrentPts(); + base::TimeDelta media_time = base::TimeDelta::FromMicroseconds(pts); + + // Check that the current PTS is no more than 100ms past the last pushed PTS. + if (audio_feeder_ && + audio_feeder_->last_pushed_pts() != std::numeric_limits<int64_t>::min()) { + EXPECT_LE(pts, audio_feeder_->last_pushed_pts() + 100 * 1000); + } + if (video_feeder_ && + video_feeder_->last_pushed_pts() != std::numeric_limits<int64_t>::min()) { + EXPECT_LE(pts, video_feeder_->last_pushed_pts() + 100 * 1000); + } + // PTS is allowed to move backwards once to allow for updates when the first + // buffers are pushed. + if (!backwards_pts_change_) { + if (pts < last_pts_) + backwards_pts_change_ = true; + } else { + EXPECT_GE(pts, last_pts_); + } + last_pts_ = pts; if (!pause_pattern_.empty() && pause_pattern_[pause_pattern_idx_].delay >= base::TimeDelta() && @@ -379,6 +790,7 @@ void AudioVideoPipelineDeviceTest::MonitorLoop() { // Do Pause backend_->Pause(); pause_time_ = base::TimeDelta::FromMicroseconds(backend_->GetCurrentPts()); + RunPlaybackChecks(); VLOG(2) << "Pausing at " << pause_time_.InMilliseconds() << "ms for " << pause_pattern_[pause_pattern_idx_].length.InMilliseconds() << "ms"; @@ -403,38 +815,90 @@ void AudioVideoPipelineDeviceTest::OnPauseCompleted() { base::TimeDelta media_time = base::TimeDelta::FromMicroseconds(backend_->GetCurrentPts()); - // TODO(damienv): - // Should be: - // EXPECT_EQ(media_time, media_time_); - // However, some backends, when rendering the first frame while in paused - // mode moves the time forward. - // This behaviour is not intended. - EXPECT_GE(media_time, pause_time_); - EXPECT_LE(media_time, pause_time_ + base::TimeDelta::FromMilliseconds(50)); + // Make sure that the PTS did not advance while paused. + EXPECT_EQ(pause_time_, media_time); pause_time_ = media_time; pause_pattern_idx_ = (pause_pattern_idx_ + 1) % pause_pattern_.size(); VLOG(2) << "Pause complete, restarting media clock"; + RunPlaybackChecks(); // Resume playback and frame feeding. backend_->Resume(); + RunPlaybackChecks(); MonitorLoop(); } -void AudioVideoPipelineDeviceTest::Initialize() { - // Create the media device. - task_runner_.reset(new TaskRunnerImpl()); - MediaPipelineDeviceParams params(task_runner_.get()); - backend_.reset(CastMediaShlib::CreateMediaPipelineBackend(params)); - CHECK(backend_); +void AudioVideoPipelineDeviceTest::TestBackendStates() { + ASSERT_TRUE(backend()->Initialize()); + base::MessageLoop::current()->RunUntilIdle(); + + RunStoppedChecks(); + base::MessageLoop::current()->RunUntilIdle(); + + const int64_t start_pts = 222; + ASSERT_TRUE(backend()->Start(start_pts)); + base::MessageLoop::current()->RunUntilIdle(); + RunPlaybackChecks(); + + ASSERT_TRUE(backend()->Pause()); + base::MessageLoop::current()->RunUntilIdle(); + RunPlaybackChecks(); + + ASSERT_TRUE(backend()->Stop()); + base::MessageLoop::current()->RunUntilIdle(); + + RunStoppedChecks(); + base::MessageLoop::current()->RunUntilIdle(); +} + +void AudioVideoPipelineDeviceTest::StartImmediateEosTest() { + RunStoppedChecks(); + + ASSERT_TRUE(backend()->Initialize()); + base::MessageLoop::current()->RunUntilIdle(); + + Start(); +} + +void AudioVideoPipelineDeviceTest::EndImmediateEosTest() { + EXPECT_EQ(kStartPts, backend_->GetCurrentPts()); + RunPlaybackChecks(); + + ASSERT_TRUE(backend_->Pause()); + base::MessageLoop::current()->RunUntilIdle(); + + EXPECT_EQ(kStartPts, backend_->GetCurrentPts()); + RunPlaybackChecks(); + + ASSERT_TRUE(backend_->Stop()); + base::MessageLoop::current()->RunUntilIdle(); + + RunStoppedChecks(); + + base::MessageLoop::current()->QuitWhenIdle(); } TEST_F(AudioVideoPipelineDeviceTest, Mp3Playback) { scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop()); + set_sync_type(MediaPipelineDeviceParams::kModeSyncPts); ConfigureForAudioOnly("sfx.mp3"); + AddEffectsStreams(); + PauseBeforeEos(); + Start(); + message_loop->Run(); +} + +TEST_F(AudioVideoPipelineDeviceTest, AacPlayback) { + scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop()); + + set_sync_type(MediaPipelineDeviceParams::kModeSyncPts); + ConfigureForAudioOnly("sfx.m4a"); + AddEffectsStreams(); + PauseBeforeEos(); Start(); message_loop->Run(); } @@ -442,7 +906,44 @@ TEST_F(AudioVideoPipelineDeviceTest, Mp3Playback) { TEST_F(AudioVideoPipelineDeviceTest, VorbisPlayback) { scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop()); + set_sync_type(MediaPipelineDeviceParams::kModeIgnorePts); ConfigureForAudioOnly("sfx.ogg"); + AddEffectsStreams(); + Start(); + message_loop->Run(); +} + +// TODO(kmackay) FFmpegDemuxForTest can't handle AC3 or EAC3. + +TEST_F(AudioVideoPipelineDeviceTest, OpusPlayback_Optional) { + scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop()); + + set_sync_type(MediaPipelineDeviceParams::kModeSyncPts); + ConfigureForAudioOnly("bear-opus.ogg"); + AddEffectsStreams(); + PauseBeforeEos(); + Start(); + message_loop->Run(); +} + +TEST_F(AudioVideoPipelineDeviceTest, DtsPlayback_Optional) { + scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop()); + + set_sync_type(MediaPipelineDeviceParams::kModeSyncPts); + ConfigureForAudioOnly("bear.adts"); + AddEffectsStreams(); + PauseBeforeEos(); + Start(); + message_loop->Run(); +} + +TEST_F(AudioVideoPipelineDeviceTest, FlacPlayback_Optional) { + scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop()); + + set_sync_type(MediaPipelineDeviceParams::kModeSyncPts); + ConfigureForAudioOnly("bear.flac"); + AddEffectsStreams(); + PauseBeforeEos(); Start(); message_loop->Run(); } @@ -450,7 +951,10 @@ TEST_F(AudioVideoPipelineDeviceTest, VorbisPlayback) { TEST_F(AudioVideoPipelineDeviceTest, H264Playback) { scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop()); + set_sync_type(MediaPipelineDeviceParams::kModeIgnorePtsAndVSync); ConfigureForVideoOnly("bear.h264", true /* raw_h264 */); + PauseBeforeEos(); + AddEffectsStreams(); Start(); message_loop->Run(); } @@ -458,11 +962,13 @@ TEST_F(AudioVideoPipelineDeviceTest, H264Playback) { TEST_F(AudioVideoPipelineDeviceTest, WebmPlaybackWithPause) { scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop()); + set_sync_type(MediaPipelineDeviceParams::kModeIgnorePts); // Setup to pause for 100ms every 500ms AddPause(base::TimeDelta::FromMilliseconds(500), base::TimeDelta::FromMilliseconds(100)); ConfigureForVideoOnly("bear-640x360.webm", false /* raw_h264 */); + AddEffectsStreams(); Start(); message_loop->Run(); } @@ -470,7 +976,9 @@ TEST_F(AudioVideoPipelineDeviceTest, WebmPlaybackWithPause) { TEST_F(AudioVideoPipelineDeviceTest, Vp8Playback) { scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop()); + set_sync_type(MediaPipelineDeviceParams::kModeSyncPts); ConfigureForVideoOnly("bear-vp8a.webm", false /* raw_h264 */); + AddEffectsStreams(); Start(); message_loop->Run(); } @@ -478,10 +986,95 @@ TEST_F(AudioVideoPipelineDeviceTest, Vp8Playback) { TEST_F(AudioVideoPipelineDeviceTest, WebmPlayback) { scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop()); + set_sync_type(MediaPipelineDeviceParams::kModeIgnorePtsAndVSync); ConfigureForFile("bear-640x360.webm"); + PauseBeforeEos(); + AddEffectsStreams(); Start(); message_loop->Run(); } +// TODO(kmackay) FFmpegDemuxForTest can't handle HEVC or VP9. + +TEST_F(AudioVideoPipelineDeviceTest, AudioBackendStates) { + scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop()); + Initialize(); + MediaPipelineBackend::AudioDecoder* audio_decoder = + backend()->CreateAudioDecoder(); + + // Test setting config before Initialize(). + scoped_ptr<BufferFeeder> feeder(new BufferFeeder(base::Bind(&IgnoreEos))); + feeder->Initialize(backend(), audio_decoder, BufferList()); + feeder->SetAudioConfig(DefaultAudioConfig()); + feeder->TestAudioConfigs(); + + SetAudioFeeder(std::move(feeder)); + TestBackendStates(); +} + +TEST_F(AudioVideoPipelineDeviceTest, AudioEffectsBackendStates) { + scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop()); + set_audio_type(MediaPipelineDeviceParams::kAudioStreamSoundEffects); + Initialize(); + MediaPipelineBackend::AudioDecoder* audio_decoder = + backend()->CreateAudioDecoder(); + + // Test setting config before Initialize(). + scoped_ptr<BufferFeeder> feeder(new BufferFeeder(base::Bind(&IgnoreEos))); + feeder->Initialize(backend(), audio_decoder, BufferList()); + feeder->SetAudioConfig(DefaultAudioConfig()); + + SetAudioFeeder(std::move(feeder)); + TestBackendStates(); +} + +TEST_F(AudioVideoPipelineDeviceTest, VideoBackendStates) { + scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop()); + Initialize(); + MediaPipelineBackend::VideoDecoder* video_decoder = + backend()->CreateVideoDecoder(); + + // Test setting config before Initialize(). + scoped_ptr<BufferFeeder> feeder(new BufferFeeder(base::Bind(&IgnoreEos))); + feeder->Initialize(backend(), video_decoder, BufferList()); + feeder->SetVideoConfig(DefaultVideoConfig()); + feeder->TestVideoConfigs(); + + SetVideoFeeder(std::move(feeder)); + TestBackendStates(); +} + +TEST_F(AudioVideoPipelineDeviceTest, AudioImmediateEos) { + scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop()); + Initialize(); + MediaPipelineBackend::AudioDecoder* audio_decoder = + backend()->CreateAudioDecoder(); + + scoped_ptr<BufferFeeder> feeder(new BufferFeeder( + base::Bind(&AudioVideoPipelineDeviceTest::EndImmediateEosTest, + base::Unretained(this)))); + feeder->Initialize(backend(), audio_decoder, BufferList()); + feeder->SetAudioConfig(DefaultAudioConfig()); + SetAudioFeeder(std::move(feeder)); + + StartImmediateEosTest(); +} + +TEST_F(AudioVideoPipelineDeviceTest, VideoImmediateEos) { + scoped_ptr<base::MessageLoop> message_loop(new base::MessageLoop()); + Initialize(); + MediaPipelineBackend::VideoDecoder* video_decoder = + backend()->CreateVideoDecoder(); + + scoped_ptr<BufferFeeder> feeder(new BufferFeeder( + base::Bind(&AudioVideoPipelineDeviceTest::EndImmediateEosTest, + base::Unretained(this)))); + feeder->Initialize(backend(), video_decoder, BufferList()); + feeder->SetVideoConfig(DefaultVideoConfig()); + SetVideoFeeder(std::move(feeder)); + + StartImmediateEosTest(); +} + } // namespace media } // namespace chromecast diff --git a/chromecast/media/cma/backend/media_pipeline_backend_default.cc b/chromecast/media/cma/backend/media_pipeline_backend_default.cc index 5ad1260..e23a1b1 100644 --- a/chromecast/media/cma/backend/media_pipeline_backend_default.cc +++ b/chromecast/media/cma/backend/media_pipeline_backend_default.cc @@ -4,6 +4,9 @@ #include "chromecast/media/cma/backend/media_pipeline_backend_default.h" +#include <algorithm> +#include <limits> + #include "chromecast/media/cma/backend/audio_decoder_default.h" #include "chromecast/media/cma/backend/video_decoder_default.h" #include "chromecast/public/media/cast_decoder_buffer.h" @@ -12,8 +15,9 @@ namespace chromecast { namespace media { MediaPipelineBackendDefault::MediaPipelineBackendDefault() - : running_(false), rate_(1.0f) { -} + : start_pts_(std::numeric_limits<int64_t>::min()), + running_(false), + rate_(1.0f) {} MediaPipelineBackendDefault::~MediaPipelineBackendDefault() { } @@ -38,21 +42,21 @@ bool MediaPipelineBackendDefault::Initialize() { bool MediaPipelineBackendDefault::Start(int64_t start_pts) { DCHECK(!running_); - start_pts_ = base::TimeDelta::FromMicroseconds(start_pts); + start_pts_ = start_pts; start_clock_ = base::TimeTicks::Now(); running_ = true; return true; } bool MediaPipelineBackendDefault::Stop() { - start_pts_ = base::TimeDelta::FromMicroseconds(GetCurrentPts()); + start_pts_ = GetCurrentPts(); running_ = false; return true; } bool MediaPipelineBackendDefault::Pause() { DCHECK(running_); - start_pts_ = base::TimeDelta::FromMicroseconds(GetCurrentPts()); + start_pts_ = GetCurrentPts(); running_ = false; return true; } @@ -66,17 +70,28 @@ bool MediaPipelineBackendDefault::Resume() { int64_t MediaPipelineBackendDefault::GetCurrentPts() { if (!running_) - return start_pts_.InMicroseconds(); + return start_pts_; + + if (audio_decoder_ && + audio_decoder_->last_push_pts() != std::numeric_limits<int64_t>::min()) { + start_pts_ = std::min(start_pts_, audio_decoder_->last_push_pts()); + } + if (video_decoder_ && + video_decoder_->last_push_pts() != std::numeric_limits<int64_t>::min()) { + start_pts_ = std::min(start_pts_, video_decoder_->last_push_pts()); + } base::TimeTicks now = base::TimeTicks::Now(); base::TimeDelta interpolated_media_time = - start_pts_ + (now - start_clock_) * rate_; + base::TimeDelta::FromMicroseconds(start_pts_) + + (now - start_clock_) * rate_; + return interpolated_media_time.InMicroseconds(); } bool MediaPipelineBackendDefault::SetPlaybackRate(float rate) { DCHECK_GT(rate, 0.0f); - start_pts_ = base::TimeDelta::FromMicroseconds(GetCurrentPts()); + start_pts_ = GetCurrentPts(); start_clock_ = base::TimeTicks::Now(); rate_ = rate; return true; diff --git a/chromecast/media/cma/backend/media_pipeline_backend_default.h b/chromecast/media/cma/backend/media_pipeline_backend_default.h index 4f9f4d0..82a90e1 100644 --- a/chromecast/media/cma/backend/media_pipeline_backend_default.h +++ b/chromecast/media/cma/backend/media_pipeline_backend_default.h @@ -35,7 +35,7 @@ class MediaPipelineBackendDefault : public MediaPipelineBackend { bool SetPlaybackRate(float rate) override; private: - base::TimeDelta start_pts_; + int64_t start_pts_; base::TimeTicks start_clock_; bool running_; float rate_; diff --git a/chromecast/media/cma/backend/video_decoder_default.cc b/chromecast/media/cma/backend/video_decoder_default.cc index 257332e..0d057aa 100644 --- a/chromecast/media/cma/backend/video_decoder_default.cc +++ b/chromecast/media/cma/backend/video_decoder_default.cc @@ -4,13 +4,21 @@ #include "chromecast/media/cma/backend/video_decoder_default.h" +#include <limits> + +#include "base/bind.h" +#include "base/location.h" #include "base/logging.h" +#include "base/thread_task_runner_handle.h" #include "chromecast/public/media/cast_decoder_buffer.h" namespace chromecast { namespace media { -VideoDecoderDefault::VideoDecoderDefault() : delegate_(nullptr) {} +VideoDecoderDefault::VideoDecoderDefault() + : delegate_(nullptr), + last_push_pts_(std::numeric_limits<int64_t>::min()), + weak_factory_(this) {} VideoDecoderDefault::~VideoDecoderDefault() {} @@ -23,8 +31,13 @@ MediaPipelineBackend::BufferStatus VideoDecoderDefault::PushBuffer( CastDecoderBuffer* buffer) { DCHECK(delegate_); DCHECK(buffer); - if (buffer->end_of_stream()) - delegate_->OnEndOfStream(); + if (buffer->end_of_stream()) { + base::ThreadTaskRunnerHandle::Get()->PostTask( + FROM_HERE, base::Bind(&VideoDecoderDefault::OnEndOfStream, + weak_factory_.GetWeakPtr())); + } else { + last_push_pts_ = buffer->timestamp(); + } return MediaPipelineBackend::kBufferSuccess; } @@ -35,5 +48,9 @@ bool VideoDecoderDefault::SetConfig(const VideoConfig& config) { return true; } +void VideoDecoderDefault::OnEndOfStream() { + delegate_->OnEndOfStream(); +} + } // namespace media } // namespace chromecast diff --git a/chromecast/media/cma/backend/video_decoder_default.h b/chromecast/media/cma/backend/video_decoder_default.h index 5b71edd..b56a485 100644 --- a/chromecast/media/cma/backend/video_decoder_default.h +++ b/chromecast/media/cma/backend/video_decoder_default.h @@ -5,7 +5,10 @@ #ifndef CHROMECAST_MEDIA_CMA_BACKEND_VIDEO_DECODER_DEFAULT_H_ #define CHROMECAST_MEDIA_CMA_BACKEND_VIDEO_DECODER_DEFAULT_H_ +#include <stdint.h> + #include "base/macros.h" +#include "base/memory/weak_ptr.h" #include "chromecast/public/media/media_pipeline_backend.h" namespace chromecast { @@ -16,6 +19,8 @@ class VideoDecoderDefault : public MediaPipelineBackend::VideoDecoder { VideoDecoderDefault(); ~VideoDecoderDefault() override; + int64_t last_push_pts() const { return last_push_pts_; } + // MediaPipelineBackend::VideoDecoder implementation: void SetDelegate(Delegate* delegate) override; MediaPipelineBackend::BufferStatus PushBuffer( @@ -24,7 +29,11 @@ class VideoDecoderDefault : public MediaPipelineBackend::VideoDecoder { bool SetConfig(const VideoConfig& config) override; private: + void OnEndOfStream(); + Delegate* delegate_; + int64_t last_push_pts_; + base::WeakPtrFactory<VideoDecoderDefault> weak_factory_; DISALLOW_COPY_AND_ASSIGN(VideoDecoderDefault); }; diff --git a/chromecast/media/cma/decoder/cast_audio_decoder_linux.cc b/chromecast/media/cma/decoder/cast_audio_decoder_linux.cc index b5b0396..360d945 100644 --- a/chromecast/media/cma/decoder/cast_audio_decoder_linux.cc +++ b/chromecast/media/cma/decoder/cast_audio_decoder_linux.cc @@ -170,7 +170,10 @@ class CastAudioDecoderImpl : public CastAudioDecoder { } decoded_chunks_.clear(); decoded->set_timestamp(buffer_timestamp); + base::WeakPtr<CastAudioDecoderImpl> self = weak_factory_.GetWeakPtr(); decode_callback.Run(result_status, decoded); + if (!self.get()) + return; // Return immediately if the decode callback deleted this. // Do not reset decode_pending_ to false until after the callback has // finished running because the callback may call Decode(). diff --git a/chromecast/media/media.gyp b/chromecast/media/media.gyp index 71074a2..359e9ae 100644 --- a/chromecast/media/media.gyp +++ b/chromecast/media/media.gyp @@ -16,6 +16,18 @@ '../..', # Root of Chromium checkout '../public/', # Public APIs ], + 'target_conditions': [ + ['_type=="executable"', { + 'ldflags': [ + # Allow OEMs to override default libraries that are shipped with + # cast receiver package by installed OEM-specific libraries in + # /oem_cast_shlib. + '-Wl,-rpath=/oem_cast_shlib', + # Some shlibs are built in same directory of executables. + '-Wl,-rpath=\$$ORIGIN', + ], + }], + ], }, 'targets': [ { @@ -291,14 +303,6 @@ 'cma/test/mock_frame_provider.h', 'cma/test/run_all_unittests.cc', ], - 'ldflags': [ - # Allow OEMs to override default libraries that are shipped with - # cast receiver package by installed OEM-specific libraries in - # /oem_cast_shlib. - '-Wl,-rpath=/oem_cast_shlib', - # Some shlibs are built in same directory of executables. - '-Wl,-rpath=\$$ORIGIN', - ], 'conditions': [ ['chromecast_branding=="public"', { 'dependencies': [ @@ -326,6 +330,8 @@ 'target_name': 'libcast_media_1.0_default_core', 'type': '<(component)', 'dependencies': [ + '<(DEPTH)/base/base.gyp:base', + '<(DEPTH)/chromecast/chromecast.gyp:cast_base', '../../chromecast/chromecast.gyp:cast_public_api', 'default_cma_backend' ], |