summaryrefslogtreecommitdiffstats
path: root/media
diff options
context:
space:
mode:
authorxhwang <xhwang@chromium.org>2014-08-23 14:44:55 -0700
committerCommit bot <commit-bot@chromium.org>2014-08-23 21:56:39 +0000
commitbe9da705e341863169faeff532c24c568fad2852 (patch)
treefde1d66476cd0cac3db7ee39cb2328738f869794 /media
parentce6c5546caf24780e28720e82b9b2de7ea49da48 (diff)
downloadchromium_src-be9da705e341863169faeff532c24c568fad2852.zip
chromium_src-be9da705e341863169faeff532c24c568fad2852.tar.gz
chromium_src-be9da705e341863169faeff532c24c568fad2852.tar.bz2
media: Introduce Renderer interface and RendererImpl.
Add a Renderer interface to manage all audio/video (and in the future text) rendering. With Renderer, Pipeline only needs to manage a Demuxer and a Renderer, which helps move a lot of complicated logic out of Pipeline. On Desktop Chrome, we use RendererImpl, which manages AudioRendererImpl and VideoRendererImpl. On other platforms, we could add different Renderer implementation. For example, we could support Browser side decoding/rendering. BUG=392259 Review URL: https://codereview.chromium.org/418143005 Cr-Commit-Position: refs/heads/master@{#291592}
Diffstat (limited to 'media')
-rw-r--r--media/BUILD.gn3
-rw-r--r--media/base/BUILD.gn2
-rw-r--r--media/base/filter_collection.cc24
-rw-r--r--media/base/filter_collection.h21
-rw-r--r--media/base/media_log.cc6
-rw-r--r--media/base/media_log_event.h7
-rw-r--r--media/base/mock_filters.cc4
-rw-r--r--media/base/mock_filters.h26
-rw-r--r--media/base/pipeline.cc358
-rw-r--r--media/base/pipeline.h94
-rw-r--r--media/base/pipeline_unittest.cc453
-rw-r--r--media/base/renderer.cc13
-rw-r--r--media/base/renderer.h75
-rw-r--r--media/filters/audio_renderer_impl.cc6
-rw-r--r--media/filters/pipeline_integration_test_base.cc35
-rw-r--r--media/filters/renderer_impl.cc572
-rw-r--r--media/filters/renderer_impl.h201
-rw-r--r--media/filters/renderer_impl_unittest.cc554
-rw-r--r--media/filters/video_renderer_impl.cc9
-rw-r--r--media/filters/video_renderer_impl_unittest.cc3
-rw-r--r--media/media.gyp5
-rw-r--r--media/tools/player_x11/player_x11.cc31
22 files changed, 1699 insertions, 803 deletions
diff --git a/media/BUILD.gn b/media/BUILD.gn
index 8a5a9c5..8912f4b 100644
--- a/media/BUILD.gn
+++ b/media/BUILD.gn
@@ -107,6 +107,8 @@ component("media") {
"filters/h264_bit_reader.h",
"filters/h264_parser.cc",
"filters/h264_parser.h",
+ "filters/renderer_impl.cc",
+ "filters/renderer_impl.h",
"filters/skcanvas_video_renderer.cc",
"filters/skcanvas_video_renderer.h",
"filters/source_buffer_platform.cc",
@@ -449,6 +451,7 @@ test("media_unittests") {
"filters/frame_processor_unittest.cc",
"filters/h264_bit_reader_unittest.cc",
"filters/h264_parser_unittest.cc",
+ "filters/renderer_impl_unittest.cc",
"filters/skcanvas_video_renderer_unittest.cc",
"filters/source_buffer_stream_unittest.cc",
"filters/video_decoder_selector_unittest.cc",
diff --git a/media/base/BUILD.gn b/media/base/BUILD.gn
index 954b9a3..37455d1 100644
--- a/media/base/BUILD.gn
+++ b/media/base/BUILD.gn
@@ -99,6 +99,8 @@ source_set("base") {
"player_tracker.h",
"ranges.cc",
"ranges.h",
+ "renderer.cc",
+ "renderer.h",
"sample_format.cc",
"sample_format.h",
"scoped_histogram_timer.h",
diff --git a/media/base/filter_collection.cc b/media/base/filter_collection.cc
index da5042f..4535a2f 100644
--- a/media/base/filter_collection.cc
+++ b/media/base/filter_collection.cc
@@ -4,10 +4,9 @@
#include "media/base/filter_collection.h"
-#include "media/base/audio_renderer.h"
#include "media/base/demuxer.h"
+#include "media/base/renderer.h"
#include "media/base/text_renderer.h"
-#include "media/base/video_renderer.h"
namespace media {
@@ -23,26 +22,15 @@ Demuxer* FilterCollection::GetDemuxer() {
return demuxer_;
}
-void FilterCollection::SetAudioRenderer(
- scoped_ptr<AudioRenderer> audio_renderer) {
- audio_renderer_ = audio_renderer.Pass();
+void FilterCollection::SetRenderer(scoped_ptr<Renderer> renderer) {
+ renderer_ = renderer.Pass();
}
-scoped_ptr<AudioRenderer> FilterCollection::GetAudioRenderer() {
- return audio_renderer_.Pass();
+scoped_ptr<Renderer> FilterCollection::GetRenderer() {
+ return renderer_.Pass();
}
-void FilterCollection::SetVideoRenderer(
- scoped_ptr<VideoRenderer> video_renderer) {
- video_renderer_ = video_renderer.Pass();
-}
-
-scoped_ptr<VideoRenderer> FilterCollection::GetVideoRenderer() {
- return video_renderer_.Pass();
-}
-
-void FilterCollection::SetTextRenderer(
- scoped_ptr<TextRenderer> text_renderer) {
+void FilterCollection::SetTextRenderer(scoped_ptr<TextRenderer> text_renderer) {
text_renderer_ = text_renderer.Pass();
}
diff --git a/media/base/filter_collection.h b/media/base/filter_collection.h
index a0aee76..33c53ba 100644
--- a/media/base/filter_collection.h
+++ b/media/base/filter_collection.h
@@ -10,16 +10,15 @@
namespace media {
-class AudioRenderer;
class Demuxer;
+class Renderer;
class TextRenderer;
-class VideoRenderer;
-// Represents a set of uninitialized demuxer and audio/video decoders and
-// renderers. Used to start a Pipeline object for media playback.
+// Represents a set of uninitialized demuxer and renderers. Used to start a
+// Pipeline object for media playback.
//
-// TODO(scherkus): Replace FilterCollection with something sensible, see
-// http://crbug.com/110800
+// TODO(xhwang): Create TextRenderer in Pipeline, pass Demuxer and Renderer to
+// Pipeline, and remove FilterCollection, see http://crbug.com/110800
class MEDIA_EXPORT FilterCollection {
public:
FilterCollection();
@@ -28,19 +27,15 @@ class MEDIA_EXPORT FilterCollection {
void SetDemuxer(Demuxer* demuxer);
Demuxer* GetDemuxer();
- void SetAudioRenderer(scoped_ptr<AudioRenderer> audio_renderer);
- scoped_ptr<AudioRenderer> GetAudioRenderer();
-
- void SetVideoRenderer(scoped_ptr<VideoRenderer> video_renderer);
- scoped_ptr<VideoRenderer> GetVideoRenderer();
+ void SetRenderer(scoped_ptr<Renderer> renderer);
+ scoped_ptr<Renderer> GetRenderer();
void SetTextRenderer(scoped_ptr<TextRenderer> text_renderer);
scoped_ptr<TextRenderer> GetTextRenderer();
private:
Demuxer* demuxer_;
- scoped_ptr<AudioRenderer> audio_renderer_;
- scoped_ptr<VideoRenderer> video_renderer_;
+ scoped_ptr<Renderer> renderer_;
scoped_ptr<TextRenderer> text_renderer_;
DISALLOW_COPY_AND_ASSIGN(FilterCollection);
diff --git a/media/base/media_log.cc b/media/base/media_log.cc
index 5e1ed76..b172bfb 100644
--- a/media/base/media_log.cc
+++ b/media/base/media_log.cc
@@ -46,10 +46,8 @@ const char* MediaLog::EventTypeToString(MediaLogEvent::Type type) {
return "TOTAL_BYTES_SET";
case MediaLogEvent::NETWORK_ACTIVITY_SET:
return "NETWORK_ACTIVITY_SET";
- case MediaLogEvent::AUDIO_ENDED:
- return "AUDIO_ENDED";
- case MediaLogEvent::VIDEO_ENDED:
- return "VIDEO_ENDED";
+ case MediaLogEvent::ENDED:
+ return "ENDED";
case MediaLogEvent::TEXT_ENDED:
return "TEXT_ENDED";
case MediaLogEvent::BUFFERED_EXTENTS_CHANGED:
diff --git a/media/base/media_log_event.h b/media/base/media_log_event.h
index c93c8a4..ee21d00 100644
--- a/media/base/media_log_event.h
+++ b/media/base/media_log_event.h
@@ -70,9 +70,10 @@ struct MediaLogEvent {
TOTAL_BYTES_SET,
NETWORK_ACTIVITY_SET,
- // Audio/Video/Text stream playback has ended.
- AUDIO_ENDED,
- VIDEO_ENDED,
+ // Audio/Video stream playback has ended.
+ ENDED,
+
+ // Text stream playback has ended.
TEXT_ENDED,
// The extents of the sliding buffer have changed.
diff --git a/media/base/mock_filters.cc b/media/base/mock_filters.cc
index b608ecb..4bb03e4 100644
--- a/media/base/mock_filters.cc
+++ b/media/base/mock_filters.cc
@@ -70,6 +70,10 @@ MockAudioRenderer::MockAudioRenderer() {}
MockAudioRenderer::~MockAudioRenderer() {}
+MockRenderer::MockRenderer() {}
+
+MockRenderer::~MockRenderer() {}
+
MockTimeSource::MockTimeSource() {}
MockTimeSource::~MockTimeSource() {}
diff --git a/media/base/mock_filters.h b/media/base/mock_filters.h
index 0755e46..1950dcf 100644
--- a/media/base/mock_filters.h
+++ b/media/base/mock_filters.h
@@ -16,6 +16,7 @@
#include "media/base/demuxer.h"
#include "media/base/filter_collection.h"
#include "media/base/pipeline_status.h"
+#include "media/base/renderer.h"
#include "media/base/text_track.h"
#include "media/base/time_source.h"
#include "media/base/video_decoder.h"
@@ -155,6 +156,31 @@ class MockAudioRenderer : public AudioRenderer {
DISALLOW_COPY_AND_ASSIGN(MockAudioRenderer);
};
+class MockRenderer : public Renderer {
+ public:
+ MockRenderer();
+ virtual ~MockRenderer();
+
+ // Renderer implementation.
+ MOCK_METHOD6(Initialize, void(const PipelineStatusCB& init_cb,
+ const StatisticsCB& statistics_cb,
+ const base::Closure& ended_cb,
+ const PipelineStatusCB& error_cb,
+ const BufferingStateCB& buffering_state_cb,
+ const TimeDeltaCB& get_duration_cb));
+ MOCK_METHOD1(Flush, void(const base::Closure& flush_cb));
+ MOCK_METHOD1(StartPlayingFrom, void(base::TimeDelta timestamp));
+ MOCK_METHOD1(SetPlaybackRate, void(float playback_rate));
+ MOCK_METHOD1(SetVolume, void(float volume));
+ MOCK_METHOD0(GetMediaTime, base::TimeDelta());
+ MOCK_METHOD0(HasAudio, bool());
+ MOCK_METHOD0(HasVideo, bool());
+ MOCK_METHOD1(SetCdm, void(MediaKeys* cdm));
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(MockRenderer);
+};
+
class MockTimeSource : public TimeSource {
public:
MockTimeSource();
diff --git a/media/base/pipeline.cc b/media/base/pipeline.cc
index bc55981..41eeb98 100644
--- a/media/base/pipeline.cc
+++ b/media/base/pipeline.cc
@@ -17,17 +17,12 @@
#include "base/strings/string_number_conversions.h"
#include "base/strings/string_util.h"
#include "base/synchronization/condition_variable.h"
-#include "media/base/audio_decoder.h"
-#include "media/base/audio_renderer.h"
#include "media/base/filter_collection.h"
#include "media/base/media_log.h"
+#include "media/base/renderer.h"
#include "media/base/text_renderer.h"
#include "media/base/text_track_config.h"
-#include "media/base/time_delta_interpolator.h"
-#include "media/base/time_source.h"
-#include "media/base/video_decoder.h"
#include "media/base/video_decoder_config.h"
-#include "media/base/video_renderer.h"
using base::TimeDelta;
@@ -42,23 +37,16 @@ Pipeline::Pipeline(
did_loading_progress_(false),
volume_(1.0f),
playback_rate_(0.0f),
- interpolator_(new TimeDeltaInterpolator(&default_tick_clock_)),
- interpolation_state_(INTERPOLATION_STOPPED),
status_(PIPELINE_OK),
+ is_initialized_(false),
state_(kCreated),
- audio_ended_(false),
- video_ended_(false),
- text_ended_(false),
- audio_buffering_state_(BUFFERING_HAVE_NOTHING),
- video_buffering_state_(BUFFERING_HAVE_NOTHING),
+ renderer_ended_(false),
+ text_renderer_ended_(false),
demuxer_(NULL),
- time_source_(NULL),
- underflow_disabled_for_testing_(false),
weak_factory_(this) {
media_log_->AddEvent(media_log_->CreatePipelineStateChangedEvent(kCreated));
media_log_->AddEvent(
media_log_->CreateEvent(MediaLogEvent::PIPELINE_CREATED));
- interpolator_->SetBounds(base::TimeDelta(), base::TimeDelta());
}
Pipeline::~Pipeline() {
@@ -166,7 +154,8 @@ void Pipeline::SetVolume(float volume) {
TimeDelta Pipeline::GetMediaTime() const {
base::AutoLock auto_lock(lock_);
- return std::min(interpolator_->GetInterpolatedTime(), duration_);
+ return renderer_ ? std::min(renderer_->GetMediaTime(), duration_)
+ : TimeDelta();
}
Ranges<TimeDelta> Pipeline::GetBufferedTimeRanges() const {
@@ -191,11 +180,6 @@ PipelineStatistics Pipeline::GetStatistics() const {
return statistics_;
}
-void Pipeline::SetTimeDeltaInterpolatorForTesting(
- TimeDeltaInterpolator* interpolator) {
- interpolator_.reset(interpolator);
-}
-
void Pipeline::SetErrorForTesting(PipelineStatus status) {
OnError(status);
}
@@ -218,8 +202,7 @@ const char* Pipeline::GetStateString(State state) {
switch (state) {
RETURN_STRING(kCreated);
RETURN_STRING(kInitDemuxer);
- RETURN_STRING(kInitAudioRenderer);
- RETURN_STRING(kInitVideoRenderer);
+ RETURN_STRING(kInitRenderer);
RETURN_STRING(kSeeking);
RETURN_STRING(kPlaying);
RETURN_STRING(kStopping);
@@ -243,20 +226,13 @@ Pipeline::State Pipeline::GetNextState() const {
return kInitDemuxer;
case kInitDemuxer:
- if (demuxer_->GetStream(DemuxerStream::AUDIO))
- return kInitAudioRenderer;
- if (demuxer_->GetStream(DemuxerStream::VIDEO))
- return kInitVideoRenderer;
- return kPlaying;
-
- case kInitAudioRenderer:
- if (demuxer_->GetStream(DemuxerStream::VIDEO))
- return kInitVideoRenderer;
- return kPlaying;
-
- case kInitVideoRenderer:
+ if (demuxer_->GetStream(DemuxerStream::AUDIO) ||
+ demuxer_->GetStream(DemuxerStream::VIDEO)) {
+ return kInitRenderer;
+ }
return kPlaying;
+ case kInitRenderer:
case kSeeking:
return kPlaying;
@@ -302,37 +278,6 @@ void Pipeline::OnError(PipelineStatus error) {
&Pipeline::ErrorChangedTask, weak_factory_.GetWeakPtr(), error));
}
-void Pipeline::OnAudioTimeUpdate(TimeDelta time, TimeDelta max_time) {
- DCHECK(task_runner_->BelongsToCurrentThread());
- DCHECK_LE(time.InMicroseconds(), max_time.InMicroseconds());
- base::AutoLock auto_lock(lock_);
-
- if (interpolation_state_ == INTERPOLATION_WAITING_FOR_AUDIO_TIME_UPDATE &&
- time < interpolator_->GetInterpolatedTime()) {
- return;
- }
-
- if (state_ == kSeeking)
- return;
-
- interpolator_->SetBounds(time, max_time);
- StartClockIfWaitingForTimeUpdate_Locked();
-}
-
-void Pipeline::OnVideoTimeUpdate(TimeDelta max_time) {
- DCHECK(task_runner_->BelongsToCurrentThread());
-
- if (audio_renderer_)
- return;
-
- if (state_ == kSeeking)
- return;
-
- base::AutoLock auto_lock(lock_);
- DCHECK_NE(interpolation_state_, INTERPOLATION_WAITING_FOR_AUDIO_TIME_UPDATE);
- interpolator_->SetUpperBound(max_time);
-}
-
void Pipeline::SetDuration(TimeDelta duration) {
DCHECK(IsRunning());
media_log_->AddEvent(
@@ -386,29 +331,24 @@ void Pipeline::StateTransitionTask(PipelineStatus status) {
case kInitDemuxer:
return InitializeDemuxer(done_cb);
- case kInitAudioRenderer:
- return InitializeAudioRenderer(done_cb);
-
- case kInitVideoRenderer:
- return InitializeVideoRenderer(done_cb);
+ case kInitRenderer:
+ return InitializeRenderer(done_cb);
case kPlaying:
// Finish initial start sequence the first time we enter the playing
// state.
- if (filter_collection_) {
- filter_collection_.reset();
- if (!audio_renderer_ && !video_renderer_) {
+ if (!is_initialized_) {
+ if (!renderer_) {
ErrorChangedTask(PIPELINE_ERROR_COULD_NOT_RENDER);
return;
}
- if (audio_renderer_)
- time_source_ = audio_renderer_->GetTimeSource();
+ is_initialized_ = true;
{
PipelineMetadata metadata;
- metadata.has_audio = audio_renderer_;
- metadata.has_video = video_renderer_;
+ metadata.has_audio = renderer_->HasAudio();
+ metadata.has_video = renderer_->HasVideo();
metadata.timeline_offset = demuxer_->GetTimelineOffset();
DemuxerStream* stream = demuxer_->GetStream(DemuxerStream::VIDEO);
if (stream) {
@@ -422,17 +362,8 @@ void Pipeline::StateTransitionTask(PipelineStatus status) {
base::ResetAndReturn(&seek_cb_).Run(PIPELINE_OK);
- {
- base::AutoLock auto_lock(lock_);
- interpolator_->SetBounds(start_timestamp_, start_timestamp_);
- }
+ renderer_->StartPlayingFrom(start_timestamp_);
- if (time_source_)
- time_source_->SetMediaTime(start_timestamp_);
- if (audio_renderer_)
- audio_renderer_->StartPlaying();
- if (video_renderer_)
- video_renderer_->StartPlaying();
if (text_renderer_)
text_renderer_->StartPlaying();
@@ -449,31 +380,17 @@ void Pipeline::StateTransitionTask(PipelineStatus status) {
}
}
-// Note that the usage of base::Unretained() with the audio/video renderers
-// in the following DoXXX() functions is considered safe as they are owned by
-// |pending_callbacks_| and share the same lifetime.
+// Note that the usage of base::Unretained() with the renderers is considered
+// safe as they are owned by |pending_callbacks_| and share the same lifetime.
//
// That being said, deleting the renderers while keeping |pending_callbacks_|
// running on the media thread would result in crashes.
-
-#if DCHECK_IS_ON
-static void VerifyBufferingStates(BufferingState* audio_buffering_state,
- BufferingState* video_buffering_state) {
- DCHECK_EQ(*audio_buffering_state, BUFFERING_HAVE_NOTHING);
- DCHECK_EQ(*video_buffering_state, BUFFERING_HAVE_NOTHING);
-}
-#endif
-
-void Pipeline::DoSeek(
- base::TimeDelta seek_timestamp,
- const PipelineStatusCB& done_cb) {
+void Pipeline::DoSeek(TimeDelta seek_timestamp,
+ const PipelineStatusCB& done_cb) {
DCHECK(task_runner_->BelongsToCurrentThread());
DCHECK(!pending_callbacks_.get());
+ DCHECK_EQ(state_, kSeeking);
SerialRunner::Queue bound_fns;
- {
- base::AutoLock auto_lock(lock_);
- PauseClockAndStopTicking_Locked();
- }
// Pause.
if (text_renderer_) {
@@ -482,22 +399,9 @@ void Pipeline::DoSeek(
}
// Flush.
- if (audio_renderer_) {
- bound_fns.Push(base::Bind(
- &AudioRenderer::Flush, base::Unretained(audio_renderer_.get())));
- }
-
- if (video_renderer_) {
- bound_fns.Push(base::Bind(
- &VideoRenderer::Flush, base::Unretained(video_renderer_.get())));
- }
-
-#if DCHECK_IS_ON
- // Verify renderers reset their buffering states.
- bound_fns.Push(base::Bind(&VerifyBufferingStates,
- &audio_buffering_state_,
- &video_buffering_state_));
-#endif
+ DCHECK(renderer_);
+ bound_fns.Push(
+ base::Bind(&Renderer::Flush, base::Unretained(renderer_.get())));
if (text_renderer_) {
bound_fns.Push(base::Bind(
@@ -516,8 +420,7 @@ void Pipeline::DoStop(const PipelineStatusCB& done_cb) {
DCHECK(task_runner_->BelongsToCurrentThread());
DCHECK(!pending_callbacks_.get());
- audio_renderer_.reset();
- video_renderer_.reset();
+ renderer_.reset();
text_renderer_.reset();
if (demuxer_) {
@@ -532,9 +435,9 @@ void Pipeline::OnStopCompleted(PipelineStatus status) {
DVLOG(2) << __FUNCTION__;
DCHECK(task_runner_->BelongsToCurrentThread());
DCHECK_EQ(state_, kStopping);
- DCHECK(!audio_renderer_);
- DCHECK(!video_renderer_);
+ DCHECK(!renderer_);
DCHECK(!text_renderer_);
+
{
base::AutoLock l(lock_);
running_ = false;
@@ -569,8 +472,7 @@ void Pipeline::OnStopCompleted(PipelineStatus status) {
}
}
-void Pipeline::AddBufferedTimeRange(base::TimeDelta start,
- base::TimeDelta end) {
+void Pipeline::AddBufferedTimeRange(TimeDelta start, TimeDelta end) {
DCHECK(IsRunning());
base::AutoLock auto_lock(lock_);
buffered_time_ranges_.Add(start, end);
@@ -652,13 +554,7 @@ void Pipeline::PlaybackRateChangedTask(float playback_rate) {
if (state_ != kPlaying)
return;
- {
- base::AutoLock auto_lock(lock_);
- interpolator_->SetPlaybackRate(playback_rate);
- }
-
- if (time_source_)
- time_source_->SetPlaybackRate(playback_rate_);
+ renderer_->SetPlaybackRate(playback_rate_);
}
void Pipeline::VolumeChangedTask(float volume) {
@@ -668,8 +564,7 @@ void Pipeline::VolumeChangedTask(float volume) {
if (state_ != kPlaying)
return;
- if (audio_renderer_)
- audio_renderer_->SetVolume(volume);
+ renderer_->SetVolume(volume);
}
void Pipeline::SeekTask(TimeDelta time, const PipelineStatusCB& seek_cb) {
@@ -692,44 +587,23 @@ void Pipeline::SeekTask(TimeDelta time, const PipelineStatusCB& seek_cb) {
SetState(kSeeking);
seek_cb_ = seek_cb;
- audio_ended_ = false;
- video_ended_ = false;
- text_ended_ = false;
+ renderer_ended_ = false;
+ text_renderer_ended_ = false;
start_timestamp_ = time;
DoSeek(time,
base::Bind(&Pipeline::OnStateTransition, weak_factory_.GetWeakPtr()));
}
-void Pipeline::OnAudioRendererEnded() {
- DCHECK(task_runner_->BelongsToCurrentThread());
- media_log_->AddEvent(media_log_->CreateEvent(MediaLogEvent::AUDIO_ENDED));
-
- if (state_ != kPlaying)
- return;
-
- DCHECK(!audio_ended_);
- audio_ended_ = true;
-
- // Start clock since there is no more audio to trigger clock updates.
- {
- base::AutoLock auto_lock(lock_);
- interpolator_->SetUpperBound(duration_);
- StartClockIfWaitingForTimeUpdate_Locked();
- }
-
- RunEndedCallbackIfNeeded();
-}
-
-void Pipeline::OnVideoRendererEnded() {
+void Pipeline::OnRendererEnded() {
DCHECK(task_runner_->BelongsToCurrentThread());
- media_log_->AddEvent(media_log_->CreateEvent(MediaLogEvent::VIDEO_ENDED));
+ media_log_->AddEvent(media_log_->CreateEvent(MediaLogEvent::ENDED));
if (state_ != kPlaying)
return;
- DCHECK(!video_ended_);
- video_ended_ = true;
+ DCHECK(!renderer_ended_);
+ renderer_ended_ = true;
RunEndedCallbackIfNeeded();
}
@@ -741,8 +615,8 @@ void Pipeline::OnTextRendererEnded() {
if (state_ != kPlaying)
return;
- DCHECK(!text_ended_);
- text_ended_ = true;
+ DCHECK(!text_renderer_ended_);
+ text_renderer_ended_ = true;
RunEndedCallbackIfNeeded();
}
@@ -750,21 +624,12 @@ void Pipeline::OnTextRendererEnded() {
void Pipeline::RunEndedCallbackIfNeeded() {
DCHECK(task_runner_->BelongsToCurrentThread());
- if (audio_renderer_ && !audio_ended_)
+ if (renderer_ && !renderer_ended_)
return;
- if (video_renderer_ && !video_ended_)
+ if (text_renderer_ && text_renderer_->HasTracks() && !text_renderer_ended_)
return;
- if (text_renderer_ && text_renderer_->HasTracks() && !text_ended_)
- return;
-
- {
- base::AutoLock auto_lock(lock_);
- PauseClockAndStopTicking_Locked();
- interpolator_->SetBounds(duration_, duration_);
- }
-
DCHECK_EQ(status_, PIPELINE_OK);
ended_cb_.Run();
}
@@ -789,144 +654,25 @@ void Pipeline::InitializeDemuxer(const PipelineStatusCB& done_cb) {
demuxer_->Initialize(this, done_cb, text_renderer_);
}
-void Pipeline::InitializeAudioRenderer(const PipelineStatusCB& done_cb) {
+void Pipeline::InitializeRenderer(const PipelineStatusCB& done_cb) {
DCHECK(task_runner_->BelongsToCurrentThread());
- audio_renderer_ = filter_collection_->GetAudioRenderer();
- base::WeakPtr<Pipeline> weak_this = weak_factory_.GetWeakPtr();
- audio_renderer_->Initialize(
- demuxer_->GetStream(DemuxerStream::AUDIO),
- done_cb,
- base::Bind(&Pipeline::OnUpdateStatistics, weak_this),
- base::Bind(&Pipeline::OnAudioTimeUpdate, weak_this),
- base::Bind(&Pipeline::BufferingStateChanged, weak_this,
- &audio_buffering_state_),
- base::Bind(&Pipeline::OnAudioRendererEnded, weak_this),
- base::Bind(&Pipeline::OnError, weak_this));
-}
-
-void Pipeline::InitializeVideoRenderer(const PipelineStatusCB& done_cb) {
- DCHECK(task_runner_->BelongsToCurrentThread());
+ renderer_ = filter_collection_->GetRenderer();
- video_renderer_ = filter_collection_->GetVideoRenderer();
base::WeakPtr<Pipeline> weak_this = weak_factory_.GetWeakPtr();
- video_renderer_->Initialize(
- demuxer_->GetStream(DemuxerStream::VIDEO),
- demuxer_->GetLiveness() == Demuxer::LIVENESS_LIVE,
+ renderer_->Initialize(
done_cb,
base::Bind(&Pipeline::OnUpdateStatistics, weak_this),
- base::Bind(&Pipeline::OnVideoTimeUpdate, weak_this),
- base::Bind(&Pipeline::BufferingStateChanged, weak_this,
- &video_buffering_state_),
- base::Bind(&Pipeline::OnVideoRendererEnded, weak_this),
+ base::Bind(&Pipeline::OnRendererEnded, weak_this),
base::Bind(&Pipeline::OnError, weak_this),
- base::Bind(&Pipeline::GetMediaTime, base::Unretained(this)),
+ base::Bind(&Pipeline::BufferingStateChanged, weak_this),
base::Bind(&Pipeline::GetMediaDuration, base::Unretained(this)));
}
-void Pipeline::BufferingStateChanged(BufferingState* buffering_state,
- BufferingState new_buffering_state) {
- DVLOG(1) << __FUNCTION__ << "(" << *buffering_state << ", "
- << " " << new_buffering_state << ") "
- << (buffering_state == &audio_buffering_state_ ? "audio" : "video");
+void Pipeline::BufferingStateChanged(BufferingState new_buffering_state) {
+ DVLOG(1) << __FUNCTION__ << "(" << new_buffering_state << ") ";
DCHECK(task_runner_->BelongsToCurrentThread());
- bool was_waiting_for_enough_data = WaitingForEnoughData();
-
- *buffering_state = new_buffering_state;
-
- // Disable underflow by ignoring updates that renderers have ran out of data
- // after we have started the clock.
- if (state_ == kPlaying && underflow_disabled_for_testing_ &&
- interpolation_state_ != INTERPOLATION_STOPPED) {
- return;
- }
-
- // Renderer underflowed.
- if (!was_waiting_for_enough_data && WaitingForEnoughData()) {
- PausePlayback();
-
- // TODO(scherkus): Fire BUFFERING_HAVE_NOTHING callback to alert clients of
- // underflow state http://crbug.com/144683
- return;
- }
-
- // Renderer prerolled.
- if (was_waiting_for_enough_data && !WaitingForEnoughData()) {
- StartPlayback();
- buffering_state_cb_.Run(BUFFERING_HAVE_ENOUGH);
- return;
- }
-}
-
-bool Pipeline::WaitingForEnoughData() const {
- DCHECK(task_runner_->BelongsToCurrentThread());
- if (state_ != kPlaying)
- return false;
- if (audio_renderer_ && audio_buffering_state_ != BUFFERING_HAVE_ENOUGH)
- return true;
- if (video_renderer_ && video_buffering_state_ != BUFFERING_HAVE_ENOUGH)
- return true;
- return false;
-}
-
-void Pipeline::PausePlayback() {
- DVLOG(1) << __FUNCTION__;
- DCHECK_EQ(state_, kPlaying);
- DCHECK(WaitingForEnoughData());
- DCHECK(task_runner_->BelongsToCurrentThread());
-
- base::AutoLock auto_lock(lock_);
- PauseClockAndStopTicking_Locked();
-}
-
-void Pipeline::StartPlayback() {
- DVLOG(1) << __FUNCTION__;
- DCHECK_EQ(state_, kPlaying);
- DCHECK_EQ(interpolation_state_, INTERPOLATION_STOPPED);
- DCHECK(!WaitingForEnoughData());
- DCHECK(task_runner_->BelongsToCurrentThread());
-
- if (time_source_) {
- // We use audio stream to update the clock. So if there is such a
- // stream, we pause the clock until we receive a valid timestamp.
- base::AutoLock auto_lock(lock_);
- interpolation_state_ = INTERPOLATION_WAITING_FOR_AUDIO_TIME_UPDATE;
- time_source_->StartTicking();
- } else {
- base::AutoLock auto_lock(lock_);
- interpolation_state_ = INTERPOLATION_STARTED;
- interpolator_->SetUpperBound(duration_);
- interpolator_->StartInterpolating();
- }
-}
-
-void Pipeline::PauseClockAndStopTicking_Locked() {
- lock_.AssertAcquired();
- switch (interpolation_state_) {
- case INTERPOLATION_STOPPED:
- return;
-
- case INTERPOLATION_WAITING_FOR_AUDIO_TIME_UPDATE:
- time_source_->StopTicking();
- break;
-
- case INTERPOLATION_STARTED:
- if (time_source_)
- time_source_->StopTicking();
- interpolator_->StopInterpolating();
- break;
- }
-
- interpolation_state_ = INTERPOLATION_STOPPED;
-}
-
-void Pipeline::StartClockIfWaitingForTimeUpdate_Locked() {
- lock_.AssertAcquired();
- if (interpolation_state_ != INTERPOLATION_WAITING_FOR_AUDIO_TIME_UPDATE)
- return;
-
- interpolation_state_ = INTERPOLATION_STARTED;
- interpolator_->StartInterpolating();
+ buffering_state_cb_.Run(new_buffering_state);
}
} // namespace media
diff --git a/media/base/pipeline.h b/media/base/pipeline.h
index fe5962a..fed95a3 100644
--- a/media/base/pipeline.h
+++ b/media/base/pipeline.h
@@ -5,15 +5,11 @@
#ifndef MEDIA_BASE_PIPELINE_H_
#define MEDIA_BASE_PIPELINE_H_
-#include <string>
-
#include "base/gtest_prod_util.h"
#include "base/memory/weak_ptr.h"
-#include "base/synchronization/condition_variable.h"
#include "base/synchronization/lock.h"
#include "base/threading/thread_checker.h"
#include "base/time/default_tick_clock.h"
-#include "media/base/audio_renderer.h"
#include "media/base/buffering_state.h"
#include "media/base/demuxer.h"
#include "media/base/media_export.h"
@@ -32,11 +28,10 @@ namespace media {
class FilterCollection;
class MediaLog;
+class Renderer;
class TextRenderer;
class TextTrackConfig;
class TimeDeltaInterpolator;
-class TimeSource;
-class VideoRenderer;
// Metadata describing a pipeline once it has been initialized.
struct PipelineMetadata {
@@ -178,10 +173,6 @@ class MEDIA_EXPORT Pipeline : public DemuxerHost {
// Gets the current pipeline statistics.
PipelineStatistics GetStatistics() const;
- void set_underflow_disabled_for_testing(bool disabled) {
- underflow_disabled_for_testing_ = disabled;
- }
- void SetTimeDeltaInterpolatorForTesting(TimeDeltaInterpolator* interpolator);
void SetErrorForTesting(PipelineStatus status);
bool HasWeakPtrsForTesting() const;
@@ -195,8 +186,7 @@ class MEDIA_EXPORT Pipeline : public DemuxerHost {
enum State {
kCreated,
kInitDemuxer,
- kInitAudioRenderer,
- kInitVideoRenderer,
+ kInitRenderer,
kSeeking,
kPlaying,
kStopping,
@@ -229,12 +219,6 @@ class MEDIA_EXPORT Pipeline : public DemuxerHost {
// Callback executed by filters to update statistics.
void OnUpdateStatistics(const PipelineStatistics& stats);
- // Callback executed by audio renderer to update clock time.
- void OnAudioTimeUpdate(base::TimeDelta time, base::TimeDelta max_time);
-
- // Callback executed by video renderer to update clock time.
- void OnVideoTimeUpdate(base::TimeDelta max_time);
-
// The following "task" methods correspond to the public methods, but these
// methods are run as the result of posting a task to the Pipeline's
// task runner.
@@ -257,8 +241,7 @@ class MEDIA_EXPORT Pipeline : public DemuxerHost {
void SeekTask(base::TimeDelta time, const PipelineStatusCB& seek_cb);
// Callbacks executed when a renderer has ended.
- void OnAudioRendererEnded();
- void OnVideoRendererEnded();
+ void OnRendererEnded();
void OnTextRendererEnded();
void RunEndedCallbackIfNeeded();
@@ -272,20 +255,7 @@ class MEDIA_EXPORT Pipeline : public DemuxerHost {
// Kicks off initialization for each media object, executing |done_cb| with
// the result when completed.
void InitializeDemuxer(const PipelineStatusCB& done_cb);
- void InitializeAudioRenderer(const PipelineStatusCB& done_cb);
- void InitializeVideoRenderer(const PipelineStatusCB& done_cb);
-
- // Kicks off destroying filters. Called by StopTask() and ErrorChangedTask().
- // When we start to tear down the pipeline, we will consider two cases:
- // 1. when pipeline has not been initialized, we will transit to stopping
- // state first.
- // 2. when pipeline has been initialized, we will first transit to pausing
- // => flushing => stopping => stopped state.
- // This will remove the race condition during stop between filters.
- void TearDownPipeline();
-
- // Compute the time corresponding to a byte offset.
- base::TimeDelta TimeForByteOffset_Locked(int64 byte_offset) const;
+ void InitializeRenderer(const PipelineStatusCB& done_cb);
void OnStateTransition(PipelineStatus status);
void StateTransitionTask(PipelineStatus status);
@@ -299,23 +269,7 @@ class MEDIA_EXPORT Pipeline : public DemuxerHost {
void DoStop(const PipelineStatusCB& done_cb);
void OnStopCompleted(PipelineStatus status);
- // Collection of callback methods and helpers for tracking changes in
- // buffering state and transition from paused/underflow states and playing
- // states.
- //
- // While in the kPlaying state:
- // - A waiting to non-waiting transition indicates preroll has completed
- // and StartPlayback() should be called
- // - A non-waiting to waiting transition indicates underflow has occurred
- // and PausePlayback() should be called
- void BufferingStateChanged(BufferingState* buffering_state,
- BufferingState new_buffering_state);
- bool WaitingForEnoughData() const;
- void PausePlayback();
- void StartPlayback();
-
- void PauseClockAndStopTicking_Locked();
- void StartClockIfWaitingForTimeUpdate_Locked();
+ void BufferingStateChanged(BufferingState new_buffering_state);
// Task runner used to execute pipeline tasks.
scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
@@ -349,26 +303,6 @@ class MEDIA_EXPORT Pipeline : public DemuxerHost {
// Current duration as reported by |demuxer_|.
base::TimeDelta duration_;
- // base::TickClock used by |interpolator_|.
- base::DefaultTickClock default_tick_clock_;
-
- // Tracks the most recent media time update and provides interpolated values
- // as playback progresses.
- scoped_ptr<TimeDeltaInterpolator> interpolator_;
-
- enum InterpolationState {
- // Audio (if present) is not rendering. Time isn't being interpolated.
- INTERPOLATION_STOPPED,
-
- // Audio (if present) is rendering. Time isn't being interpolated.
- INTERPOLATION_WAITING_FOR_AUDIO_TIME_UPDATE,
-
- // Audio (if present) is rendering. Time is being interpolated.
- INTERPOLATION_STARTED,
- };
-
- InterpolationState interpolation_state_;
-
// Status of the pipeline. Initialized to PIPELINE_OK which indicates that
// the pipeline is operating correctly. Any other value indicates that the
// pipeline is stopped or is stopping. Clients can call the Stop() method to
@@ -378,6 +312,8 @@ class MEDIA_EXPORT Pipeline : public DemuxerHost {
// The following data members are only accessed by tasks posted to
// |task_runner_|.
+ bool is_initialized_;
+
// Member that tracks the current state.
State state_;
@@ -385,12 +321,8 @@ class MEDIA_EXPORT Pipeline : public DemuxerHost {
base::TimeDelta start_timestamp_;
// Whether we've received the audio/video/text ended events.
- bool audio_ended_;
- bool video_ended_;
- bool text_ended_;
-
- BufferingState audio_buffering_state_;
- BufferingState video_buffering_state_;
+ bool renderer_ended_;
+ bool text_renderer_ended_;
// Temporary callback used for Start() and Seek().
PipelineStatusCB seek_cb_;
@@ -413,19 +345,13 @@ class MEDIA_EXPORT Pipeline : public DemuxerHost {
// Holds the initialized renderers. Used for setting the volume,
// playback rate, and determining when playback has finished.
- scoped_ptr<AudioRenderer> audio_renderer_;
- scoped_ptr<VideoRenderer> video_renderer_;
+ scoped_ptr<Renderer> renderer_;
scoped_ptr<TextRenderer> text_renderer_;
- // Renderer-provided time source used to control playback.
- TimeSource* time_source_;
-
PipelineStatistics statistics_;
scoped_ptr<SerialRunner> pending_callbacks_;
- bool underflow_disabled_for_testing_;
-
base::ThreadChecker thread_checker_;
// NOTE: Weak pointers must be invalidated before all other member variables.
diff --git a/media/base/pipeline_unittest.cc b/media/base/pipeline_unittest.cc
index 4eb84f6..c064df4 100644
--- a/media/base/pipeline_unittest.cc
+++ b/media/base/pipeline_unittest.cc
@@ -55,26 +55,6 @@ ACTION_P2(SetBufferingState, cb, buffering_state) {
cb->Run(buffering_state);
}
-// Used for setting expectations on pipeline callbacks. Using a StrictMock
-// also lets us test for missing callbacks.
-class CallbackHelper {
- public:
- CallbackHelper() {}
- virtual ~CallbackHelper() {}
-
- MOCK_METHOD1(OnStart, void(PipelineStatus));
- MOCK_METHOD1(OnSeek, void(PipelineStatus));
- MOCK_METHOD0(OnStop, void());
- MOCK_METHOD0(OnEnded, void());
- MOCK_METHOD1(OnError, void(PipelineStatus));
- MOCK_METHOD1(OnMetadata, void(PipelineMetadata));
- MOCK_METHOD1(OnBufferingStateChange, void(BufferingState));
- MOCK_METHOD0(OnDurationChange, void());
-
- private:
- DISALLOW_COPY_AND_ASSIGN(CallbackHelper);
-};
-
// TODO(scherkus): even though some filters are initialized on separate
// threads these test aren't flaky... why? It's because filters' Initialize()
// is executed on |message_loop_| and the mock filters instantly call
@@ -83,6 +63,26 @@ class CallbackHelper {
// initialization is moved to a separate thread this test will become flaky.
class PipelineTest : public ::testing::Test {
public:
+ // Used for setting expectations on pipeline callbacks. Using a StrictMock
+ // also lets us test for missing callbacks.
+ class CallbackHelper {
+ public:
+ CallbackHelper() {}
+ virtual ~CallbackHelper() {}
+
+ MOCK_METHOD1(OnStart, void(PipelineStatus));
+ MOCK_METHOD1(OnSeek, void(PipelineStatus));
+ MOCK_METHOD0(OnStop, void());
+ MOCK_METHOD0(OnEnded, void());
+ MOCK_METHOD1(OnError, void(PipelineStatus));
+ MOCK_METHOD1(OnMetadata, void(PipelineMetadata));
+ MOCK_METHOD1(OnBufferingStateChange, void(BufferingState));
+ MOCK_METHOD0(OnDurationChange, void());
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(CallbackHelper);
+ };
+
PipelineTest()
: pipeline_(new Pipeline(message_loop_.message_loop_proxy(),
new MediaLog())),
@@ -90,13 +90,9 @@ class PipelineTest : public ::testing::Test {
demuxer_(new StrictMock<MockDemuxer>()) {
filter_collection_->SetDemuxer(demuxer_.get());
- video_renderer_ = new StrictMock<MockVideoRenderer>();
- scoped_ptr<VideoRenderer> video_renderer(video_renderer_);
- filter_collection_->SetVideoRenderer(video_renderer.Pass());
-
- audio_renderer_ = new StrictMock<MockAudioRenderer>();
- scoped_ptr<AudioRenderer> audio_renderer(audio_renderer_);
- filter_collection_->SetAudioRenderer(audio_renderer.Pass());
+ renderer_ = new StrictMock<MockRenderer>();
+ scoped_ptr<Renderer> renderer(renderer_);
+ filter_collection_->SetRenderer(renderer.Pass());
text_renderer_ = new TextRenderer(
message_loop_.message_loop_proxy(),
@@ -116,6 +112,9 @@ class PipelineTest : public ::testing::Test {
EXPECT_CALL(*demuxer_, GetLiveness())
.WillRepeatedly(Return(Demuxer::LIVENESS_UNKNOWN));
+
+ EXPECT_CALL(*renderer_, GetMediaTime())
+ .WillRepeatedly(Return(base::TimeDelta()));
}
virtual ~PipelineTest() {
@@ -175,20 +174,13 @@ class PipelineTest : public ::testing::Test {
}
// Sets up expectations to allow the video renderer to initialize.
- void SetVideoRendererExpectations(DemuxerStream* stream) {
- EXPECT_CALL(*video_renderer_, Initialize(stream, _, _, _, _, _, _, _, _, _))
- .WillOnce(DoAll(SaveArg<5>(&video_buffering_state_cb_),
- SaveArg<6>(&video_ended_cb_),
- RunCallback<2>(PIPELINE_OK)));
- }
-
- // Sets up expectations to allow the audio renderer to initialize.
- void SetAudioRendererExpectations(DemuxerStream* stream) {
- EXPECT_CALL(*audio_renderer_, Initialize(stream, _, _, _, _, _, _))
- .WillOnce(DoAll(SaveArg<3>(&audio_time_cb_),
- SaveArg<4>(&audio_buffering_state_cb_),
- SaveArg<5>(&audio_ended_cb_),
- RunCallback<1>(PIPELINE_OK)));
+ void SetRendererExpectations() {
+ EXPECT_CALL(*renderer_, Initialize(_, _, _, _, _, _))
+ .WillOnce(DoAll(SaveArg<2>(&ended_cb_),
+ SaveArg<4>(&buffering_state_cb_),
+ RunCallback<0>(PIPELINE_OK)));
+ EXPECT_CALL(*renderer_, HasAudio()).WillRepeatedly(Return(audio_stream()));
+ EXPECT_CALL(*renderer_, HasVideo()).WillRepeatedly(Return(video_stream()));
}
void AddTextStream() {
@@ -196,6 +188,7 @@ class PipelineTest : public ::testing::Test {
.WillOnce(Invoke(this, &PipelineTest::DoOnAddTextTrack));
static_cast<DemuxerHost*>(pipeline_.get())->AddTextStream(text_stream(),
TextTrackConfig(kTextSubtitles, "", "", ""));
+ message_loop_.RunUntilIdle();
}
// Sets up expectations on the callback and initializes the pipeline. Called
@@ -205,25 +198,11 @@ class PipelineTest : public ::testing::Test {
if (start_status == PIPELINE_OK) {
EXPECT_CALL(callbacks_, OnMetadata(_)).WillOnce(SaveArg<0>(&metadata_));
-
- if (audio_stream_) {
- EXPECT_CALL(*audio_renderer_, GetTimeSource())
- .WillOnce(Return(&time_source_));
- EXPECT_CALL(time_source_, SetPlaybackRate(0.0f));
- EXPECT_CALL(time_source_, SetMediaTime(base::TimeDelta()));
- EXPECT_CALL(time_source_, StartTicking());
- EXPECT_CALL(*audio_renderer_, SetVolume(1.0f));
- EXPECT_CALL(*audio_renderer_, StartPlaying())
- .WillOnce(SetBufferingState(&audio_buffering_state_cb_,
- BUFFERING_HAVE_ENOUGH));
- }
-
- if (video_stream_) {
- EXPECT_CALL(*video_renderer_, StartPlaying())
- .WillOnce(SetBufferingState(&video_buffering_state_cb_,
- BUFFERING_HAVE_ENOUGH));
- }
-
+ EXPECT_CALL(*renderer_, SetPlaybackRate(0.0f));
+ EXPECT_CALL(*renderer_, SetVolume(1.0f));
+ EXPECT_CALL(*renderer_, StartPlayingFrom(base::TimeDelta()))
+ .WillOnce(SetBufferingState(&buffering_state_cb_,
+ BUFFERING_HAVE_ENOUGH));
EXPECT_CALL(callbacks_, OnBufferingStateChange(BUFFERING_HAVE_ENOUGH));
}
@@ -268,35 +247,19 @@ class PipelineTest : public ::testing::Test {
}
void ExpectSeek(const base::TimeDelta& seek_time, bool underflowed) {
- // Every filter should receive a call to Seek().
EXPECT_CALL(*demuxer_, Seek(seek_time, _))
.WillOnce(RunCallback<1>(PIPELINE_OK));
- if (audio_stream_) {
- if (!underflowed)
- EXPECT_CALL(time_source_, StopTicking());
- EXPECT_CALL(*audio_renderer_, Flush(_))
- .WillOnce(DoAll(SetBufferingState(&audio_buffering_state_cb_,
- BUFFERING_HAVE_NOTHING),
- RunClosure<0>()));
- EXPECT_CALL(time_source_, SetMediaTime(seek_time));
- EXPECT_CALL(time_source_, SetPlaybackRate(_));
- EXPECT_CALL(time_source_, StartTicking());
- EXPECT_CALL(*audio_renderer_, StartPlaying())
- .WillOnce(SetBufferingState(&audio_buffering_state_cb_,
- BUFFERING_HAVE_ENOUGH));
- EXPECT_CALL(*audio_renderer_, SetVolume(_));
- }
-
- if (video_stream_) {
- EXPECT_CALL(*video_renderer_, Flush(_))
- .WillOnce(DoAll(SetBufferingState(&video_buffering_state_cb_,
- BUFFERING_HAVE_NOTHING),
- RunClosure<0>()));
- EXPECT_CALL(*video_renderer_, StartPlaying())
- .WillOnce(SetBufferingState(&video_buffering_state_cb_,
- BUFFERING_HAVE_ENOUGH));
- }
+ EXPECT_CALL(*renderer_, Flush(_))
+ .WillOnce(DoAll(SetBufferingState(&buffering_state_cb_,
+ BUFFERING_HAVE_NOTHING),
+ RunClosure<0>()));
+ EXPECT_CALL(*renderer_, SetPlaybackRate(_));
+ EXPECT_CALL(*renderer_, SetVolume(_));
+ EXPECT_CALL(*renderer_, StartPlayingFrom(seek_time))
+ .WillOnce(SetBufferingState(&buffering_state_cb_,
+ BUFFERING_HAVE_ENOUGH));
+ EXPECT_CALL(callbacks_, OnBufferingStateChange(BUFFERING_HAVE_NOTHING));
// We expect a successful seek callback followed by a buffering update.
EXPECT_CALL(callbacks_, OnSeek(PIPELINE_OK));
@@ -307,11 +270,7 @@ class PipelineTest : public ::testing::Test {
pipeline_->Seek(seek_time,
base::Bind(&CallbackHelper::OnSeek,
base::Unretained(&callbacks_)));
-
- // We expect the time to be updated only after the seek has completed.
- EXPECT_NE(seek_time, pipeline_->GetMediaTime());
message_loop_.RunUntilIdle();
- EXPECT_EQ(seek_time, pipeline_->GetMediaTime());
}
void DestroyPipeline() {
@@ -350,19 +309,14 @@ class PipelineTest : public ::testing::Test {
scoped_ptr<FilterCollection> filter_collection_;
scoped_ptr<StrictMock<MockDemuxer> > demuxer_;
- StrictMock<MockVideoRenderer>* video_renderer_;
- StrictMock<MockAudioRenderer>* audio_renderer_;
- StrictMock<MockTimeSource> time_source_;
+ StrictMock<MockRenderer>* renderer_;
StrictMock<CallbackHelper> text_renderer_callbacks_;
TextRenderer* text_renderer_;
scoped_ptr<StrictMock<MockDemuxerStream> > audio_stream_;
scoped_ptr<StrictMock<MockDemuxerStream> > video_stream_;
scoped_ptr<FakeTextTrackStream> text_stream_;
- AudioRenderer::TimeCB audio_time_cb_;
- BufferingStateCB audio_buffering_state_cb_;
- BufferingStateCB video_buffering_state_cb_;
- base::Closure audio_ended_cb_;
- base::Closure video_ended_cb_;
+ BufferingStateCB buffering_state_cb_;
+ base::Closure ended_cb_;
VideoDecoderConfig video_decoder_config_;
PipelineMetadata metadata_;
@@ -461,7 +415,7 @@ TEST_F(PipelineTest, DemuxerErrorDuringStop) {
streams.push_back(audio_stream());
SetDemuxerExpectations(&streams);
- SetAudioRendererExpectations(audio_stream());
+ SetRendererExpectations();
StartPipeline(PIPELINE_OK);
@@ -499,7 +453,7 @@ TEST_F(PipelineTest, AudioStream) {
streams.push_back(audio_stream());
SetDemuxerExpectations(&streams);
- SetAudioRendererExpectations(audio_stream());
+ SetRendererExpectations();
StartPipeline(PIPELINE_OK);
EXPECT_TRUE(metadata_.has_audio);
@@ -512,7 +466,7 @@ TEST_F(PipelineTest, VideoStream) {
streams.push_back(video_stream());
SetDemuxerExpectations(&streams);
- SetVideoRendererExpectations(video_stream());
+ SetRendererExpectations();
StartPipeline(PIPELINE_OK);
EXPECT_FALSE(metadata_.has_audio);
@@ -527,8 +481,7 @@ TEST_F(PipelineTest, AudioVideoStream) {
streams.push_back(video_stream());
SetDemuxerExpectations(&streams);
- SetAudioRendererExpectations(audio_stream());
- SetVideoRendererExpectations(video_stream());
+ SetRendererExpectations();
StartPipeline(PIPELINE_OK);
EXPECT_TRUE(metadata_.has_audio);
@@ -542,14 +495,13 @@ TEST_F(PipelineTest, VideoTextStream) {
streams.push_back(video_stream());
SetDemuxerExpectations(&streams);
- SetVideoRendererExpectations(video_stream());
+ SetRendererExpectations();
StartPipeline(PIPELINE_OK);
EXPECT_FALSE(metadata_.has_audio);
EXPECT_TRUE(metadata_.has_video);
AddTextStream();
- message_loop_.RunUntilIdle();
}
TEST_F(PipelineTest, VideoAudioTextStream) {
@@ -561,15 +513,13 @@ TEST_F(PipelineTest, VideoAudioTextStream) {
streams.push_back(audio_stream());
SetDemuxerExpectations(&streams);
- SetVideoRendererExpectations(video_stream());
- SetAudioRendererExpectations(audio_stream());
+ SetRendererExpectations();
StartPipeline(PIPELINE_OK);
EXPECT_TRUE(metadata_.has_audio);
EXPECT_TRUE(metadata_.has_video);
AddTextStream();
- message_loop_.RunUntilIdle();
}
TEST_F(PipelineTest, Seek) {
@@ -581,8 +531,7 @@ TEST_F(PipelineTest, Seek) {
streams.push_back(video_stream());
SetDemuxerExpectations(&streams, base::TimeDelta::FromSeconds(3000));
- SetAudioRendererExpectations(audio_stream());
- SetVideoRendererExpectations(video_stream());
+ SetRendererExpectations();
// Initialize then seek!
StartPipeline(PIPELINE_OK);
@@ -599,7 +548,7 @@ TEST_F(PipelineTest, SeekAfterError) {
streams.push_back(audio_stream());
SetDemuxerExpectations(&streams, base::TimeDelta::FromSeconds(3000));
- SetAudioRendererExpectations(audio_stream());
+ SetRendererExpectations();
// Initialize then seek!
StartPipeline(PIPELINE_OK);
@@ -624,11 +573,11 @@ TEST_F(PipelineTest, SetVolume) {
streams.push_back(audio_stream());
SetDemuxerExpectations(&streams);
- SetAudioRendererExpectations(audio_stream());
+ SetRendererExpectations();
// The audio renderer should receive a call to SetVolume().
float expected = 0.5f;
- EXPECT_CALL(*audio_renderer_, SetVolume(expected));
+ EXPECT_CALL(*renderer_, SetVolume(expected));
// Initialize then set volume!
StartPipeline(PIPELINE_OK);
@@ -642,7 +591,7 @@ TEST_F(PipelineTest, Properties) {
const base::TimeDelta kDuration = base::TimeDelta::FromSeconds(100);
SetDemuxerExpectations(&streams, kDuration);
- SetVideoRendererExpectations(video_stream());
+ SetRendererExpectations();
StartPipeline(PIPELINE_OK);
EXPECT_EQ(kDuration.ToInternalValue(),
@@ -657,7 +606,7 @@ TEST_F(PipelineTest, GetBufferedTimeRanges) {
const base::TimeDelta kDuration = base::TimeDelta::FromSeconds(100);
SetDemuxerExpectations(&streams, kDuration);
- SetVideoRendererExpectations(video_stream());
+ SetRendererExpectations();
StartPipeline(PIPELINE_OK);
@@ -687,93 +636,39 @@ TEST_F(PipelineTest, EndedCallback) {
streams.push_back(video_stream());
SetDemuxerExpectations(&streams);
- SetAudioRendererExpectations(audio_stream());
- SetVideoRendererExpectations(video_stream());
+ SetRendererExpectations();
StartPipeline(PIPELINE_OK);
AddTextStream();
// The ended callback shouldn't run until all renderers have ended.
- audio_ended_cb_.Run();
- message_loop_.RunUntilIdle();
-
- video_ended_cb_.Run();
+ ended_cb_.Run();
message_loop_.RunUntilIdle();
- EXPECT_CALL(time_source_, StopTicking());
EXPECT_CALL(callbacks_, OnEnded());
text_stream()->SendEosNotification();
message_loop_.RunUntilIdle();
}
-TEST_F(PipelineTest, AudioStreamShorterThanVideo) {
- base::TimeDelta duration = base::TimeDelta::FromSeconds(10);
-
- CreateAudioStream();
- CreateVideoStream();
- MockDemuxerStreamVector streams;
- streams.push_back(audio_stream());
- streams.push_back(video_stream());
-
- // Replace what's used for interpolating to simulate wall clock time.
- pipeline_->SetTimeDeltaInterpolatorForTesting(
- new TimeDeltaInterpolator(&test_tick_clock_));
-
- SetDemuxerExpectations(&streams, duration);
- SetAudioRendererExpectations(audio_stream());
- SetVideoRendererExpectations(video_stream());
- StartPipeline(PIPELINE_OK);
-
- EXPECT_EQ(0, pipeline_->GetMediaTime().ToInternalValue());
-
- float playback_rate = 1.0f;
- EXPECT_CALL(time_source_, SetPlaybackRate(playback_rate));
- pipeline_->SetPlaybackRate(playback_rate);
- message_loop_.RunUntilIdle();
-
- InSequence s;
-
- // Verify that the clock doesn't advance since it hasn't been started by
- // a time update from the audio stream.
- int64 start_time = pipeline_->GetMediaTime().ToInternalValue();
- test_tick_clock_.Advance(base::TimeDelta::FromMilliseconds(100));
- EXPECT_EQ(pipeline_->GetMediaTime().ToInternalValue(), start_time);
-
- // Signal end of audio stream.
- audio_ended_cb_.Run();
- message_loop_.RunUntilIdle();
-
- // Verify that the clock advances.
- start_time = pipeline_->GetMediaTime().ToInternalValue();
- test_tick_clock_.Advance(base::TimeDelta::FromMilliseconds(100));
- EXPECT_GT(pipeline_->GetMediaTime().ToInternalValue(), start_time);
-
- // Signal end of video stream and make sure OnEnded() callback occurs.
- EXPECT_CALL(time_source_, StopTicking());
- EXPECT_CALL(callbacks_, OnEnded());
- video_ended_cb_.Run();
-}
-
TEST_F(PipelineTest, ErrorDuringSeek) {
CreateAudioStream();
MockDemuxerStreamVector streams;
streams.push_back(audio_stream());
SetDemuxerExpectations(&streams);
- SetAudioRendererExpectations(audio_stream());
+ SetRendererExpectations();
StartPipeline(PIPELINE_OK);
float playback_rate = 1.0f;
- EXPECT_CALL(time_source_, SetPlaybackRate(playback_rate));
+ EXPECT_CALL(*renderer_, SetPlaybackRate(playback_rate));
pipeline_->SetPlaybackRate(playback_rate);
message_loop_.RunUntilIdle();
base::TimeDelta seek_time = base::TimeDelta::FromSeconds(5);
- // Preroll() isn't called as the demuxer errors out first.
- EXPECT_CALL(time_source_, StopTicking());
- EXPECT_CALL(*audio_renderer_, Flush(_))
- .WillOnce(DoAll(SetBufferingState(&audio_buffering_state_cb_,
+ EXPECT_CALL(callbacks_, OnBufferingStateChange(BUFFERING_HAVE_NOTHING));
+ EXPECT_CALL(*renderer_, Flush(_))
+ .WillOnce(DoAll(SetBufferingState(&buffering_state_cb_,
BUFFERING_HAVE_NOTHING),
RunClosure<0>()));
@@ -813,7 +708,7 @@ TEST_F(PipelineTest, NoMessageDuringTearDownFromError) {
streams.push_back(audio_stream());
SetDemuxerExpectations(&streams);
- SetAudioRendererExpectations(audio_stream());
+ SetRendererExpectations();
StartPipeline(PIPELINE_OK);
// Trigger additional requests on the pipeline during tear down from error.
@@ -825,11 +720,11 @@ TEST_F(PipelineTest, NoMessageDuringTearDownFromError) {
base::TimeDelta seek_time = base::TimeDelta::FromSeconds(5);
// Seek() isn't called as the demuxer errors out first.
- EXPECT_CALL(time_source_, StopTicking());
- EXPECT_CALL(*audio_renderer_, Flush(_))
- .WillOnce(DoAll(SetBufferingState(&audio_buffering_state_cb_,
+ EXPECT_CALL(*renderer_, Flush(_))
+ .WillOnce(DoAll(SetBufferingState(&buffering_state_cb_,
BUFFERING_HAVE_NOTHING),
RunClosure<0>()));
+ EXPECT_CALL(callbacks_, OnBufferingStateChange(BUFFERING_HAVE_NOTHING));
EXPECT_CALL(*demuxer_, Seek(seek_time, _))
.WillOnce(RunCallback<1>(PIPELINE_ERROR_READ));
@@ -842,75 +737,12 @@ TEST_F(PipelineTest, NoMessageDuringTearDownFromError) {
message_loop_.RunUntilIdle();
}
-static void RunTimeCB(const AudioRenderer::TimeCB& time_cb,
- int time_in_ms,
- int max_time_in_ms) {
- time_cb.Run(base::TimeDelta::FromMilliseconds(time_in_ms),
- base::TimeDelta::FromMilliseconds(max_time_in_ms));
-}
-
-TEST_F(PipelineTest, AudioTimeUpdateDuringSeek) {
- CreateAudioStream();
- MockDemuxerStreamVector streams;
- streams.push_back(audio_stream());
-
- SetDemuxerExpectations(&streams);
- SetAudioRendererExpectations(audio_stream());
- StartPipeline(PIPELINE_OK);
-
- float playback_rate = 1.0f;
- EXPECT_CALL(time_source_, SetPlaybackRate(playback_rate));
- pipeline_->SetPlaybackRate(playback_rate);
- message_loop_.RunUntilIdle();
-
- // Provide an initial time update so that the pipeline transitions out of the
- // "waiting for time update" state.
- audio_time_cb_.Run(base::TimeDelta::FromMilliseconds(100),
- base::TimeDelta::FromMilliseconds(500));
-
- base::TimeDelta seek_time = base::TimeDelta::FromSeconds(5);
-
- // Arrange to trigger a time update while the demuxer is in the middle of
- // seeking. This update should be ignored by the pipeline and the clock should
- // not get updated.
- base::Closure closure = base::Bind(&RunTimeCB, audio_time_cb_, 300, 700);
- EXPECT_CALL(*demuxer_, Seek(seek_time, _))
- .WillOnce(DoAll(InvokeWithoutArgs(&closure, &base::Closure::Run),
- RunCallback<1>(PIPELINE_OK)));
-
- EXPECT_CALL(time_source_, StopTicking());
- EXPECT_CALL(*audio_renderer_, Flush(_))
- .WillOnce(DoAll(SetBufferingState(&audio_buffering_state_cb_,
- BUFFERING_HAVE_NOTHING),
- RunClosure<0>()));
- EXPECT_CALL(time_source_, SetMediaTime(seek_time));
- EXPECT_CALL(time_source_, SetPlaybackRate(_));
- EXPECT_CALL(time_source_, StartTicking());
- EXPECT_CALL(*audio_renderer_, StartPlaying())
- .WillOnce(SetBufferingState(&audio_buffering_state_cb_,
- BUFFERING_HAVE_ENOUGH));
- EXPECT_CALL(*audio_renderer_, SetVolume(_));
-
- EXPECT_CALL(callbacks_, OnSeek(PIPELINE_OK));
- EXPECT_CALL(callbacks_, OnBufferingStateChange(BUFFERING_HAVE_ENOUGH));
- DoSeek(seek_time);
-
- EXPECT_EQ(pipeline_->GetMediaTime(), seek_time);
-
- // Now that the seek is complete, verify that time updates advance the current
- // time.
- base::TimeDelta new_time = seek_time + base::TimeDelta::FromMilliseconds(100);
- audio_time_cb_.Run(new_time, new_time);
-
- EXPECT_EQ(pipeline_->GetMediaTime(), new_time);
-}
-
TEST_F(PipelineTest, DestroyAfterStop) {
CreateAudioStream();
MockDemuxerStreamVector streams;
streams.push_back(audio_stream());
SetDemuxerExpectations(&streams);
- SetAudioRendererExpectations(audio_stream());
+ SetRendererExpectations();
StartPipeline(PIPELINE_OK);
ExpectDemuxerStop();
@@ -929,59 +761,24 @@ TEST_F(PipelineTest, Underflow) {
streams.push_back(video_stream());
SetDemuxerExpectations(&streams);
- SetAudioRendererExpectations(audio_stream());
- SetVideoRendererExpectations(video_stream());
+ SetRendererExpectations();
StartPipeline(PIPELINE_OK);
// Simulate underflow.
- EXPECT_CALL(time_source_, StopTicking());
- audio_buffering_state_cb_.Run(BUFFERING_HAVE_NOTHING);
+ EXPECT_CALL(callbacks_, OnBufferingStateChange(BUFFERING_HAVE_NOTHING));
+ buffering_state_cb_.Run(BUFFERING_HAVE_NOTHING);
- // Seek while underflowed. We shouldn't call StopTicking() again.
+ // Seek while underflowed.
base::TimeDelta expected = base::TimeDelta::FromSeconds(5);
ExpectSeek(expected, true);
DoSeek(expected);
}
-static void PostTimeCB(base::MessageLoop* message_loop,
- const AudioRenderer::TimeCB& time_cb) {
- base::TimeDelta new_time = base::TimeDelta::FromMilliseconds(100);
- message_loop->PostTask(FROM_HERE, base::Bind(time_cb, new_time, new_time));
-}
-
-TEST_F(PipelineTest, TimeUpdateAfterStop) {
- CreateAudioStream();
- CreateVideoStream();
- MockDemuxerStreamVector streams;
- streams.push_back(audio_stream());
- streams.push_back(video_stream());
-
- SetDemuxerExpectations(&streams);
- SetAudioRendererExpectations(audio_stream());
- SetVideoRendererExpectations(video_stream());
- StartPipeline(PIPELINE_OK);
-
- // Double post here! This is a hack to simulate the case where TimeCB is
- // posted during ~AudioRenderer(), which is triggered in Pipeline::DoStop.
- // Since we can't EXPECT_CALL the dtor and Pipeline::DoStop() is posted
- // as well, we need to post twice here.
- message_loop_.PostTask(
- FROM_HERE, base::Bind(&PostTimeCB, &message_loop_, audio_time_cb_));
-
- EXPECT_CALL(*demuxer_, Stop(_)).WillOnce(RunClosure<0>());
-
- ExpectPipelineStopAndDestroyPipeline();
- pipeline_->Stop(
- base::Bind(&CallbackHelper::OnStop, base::Unretained(&callbacks_)));
- message_loop_.RunUntilIdle();
-}
-
class PipelineTeardownTest : public PipelineTest {
public:
enum TeardownState {
kInitDemuxer,
- kInitAudioRenderer,
- kInitVideoRenderer,
+ kInitRenderer,
kFlushing,
kSeeking,
kPlaying,
@@ -999,8 +796,7 @@ class PipelineTeardownTest : public PipelineTest {
void RunTest(TeardownState state, StopOrError stop_or_error) {
switch (state) {
case kInitDemuxer:
- case kInitAudioRenderer:
- case kInitVideoRenderer:
+ case kInitRenderer:
DoInitialize(state, stop_or_error);
break;
@@ -1068,60 +864,36 @@ class PipelineTeardownTest : public PipelineTest {
streams.push_back(video_stream());
SetDemuxerExpectations(&streams, base::TimeDelta::FromSeconds(3000));
- if (state == kInitAudioRenderer) {
- if (stop_or_error == kStop) {
- EXPECT_CALL(*audio_renderer_, Initialize(_, _, _, _, _, _, _))
- .WillOnce(DoAll(Stop(pipeline_.get(), stop_cb),
- RunCallback<1>(PIPELINE_OK)));
- ExpectPipelineStopAndDestroyPipeline();
- } else {
- status = PIPELINE_ERROR_INITIALIZATION_FAILED;
- EXPECT_CALL(*audio_renderer_, Initialize(_, _, _, _, _, _, _))
- .WillOnce(RunCallback<1>(status));
- }
-
- EXPECT_CALL(*demuxer_, Stop(_)).WillOnce(RunClosure<0>());
- return status;
- }
-
- EXPECT_CALL(*audio_renderer_, Initialize(_, _, _, _, _, _, _))
- .WillOnce(DoAll(SaveArg<4>(&audio_buffering_state_cb_),
- RunCallback<1>(PIPELINE_OK)));
+ EXPECT_CALL(*renderer_, HasAudio()).WillRepeatedly(Return(true));
+ EXPECT_CALL(*renderer_, HasVideo()).WillRepeatedly(Return(true));
- if (state == kInitVideoRenderer) {
+ if (state == kInitRenderer) {
if (stop_or_error == kStop) {
- EXPECT_CALL(*video_renderer_, Initialize(_, _, _, _, _, _, _, _, _, _))
+ EXPECT_CALL(*renderer_, Initialize(_, _, _, _, _, _))
.WillOnce(DoAll(Stop(pipeline_.get(), stop_cb),
- RunCallback<2>(PIPELINE_OK)));
+ RunCallback<0>(PIPELINE_OK)));
ExpectPipelineStopAndDestroyPipeline();
} else {
status = PIPELINE_ERROR_INITIALIZATION_FAILED;
- EXPECT_CALL(*video_renderer_, Initialize(_, _, _, _, _, _, _, _, _, _))
- .WillOnce(RunCallback<2>(status));
+ EXPECT_CALL(*renderer_, Initialize(_, _, _, _, _, _))
+ .WillOnce(RunCallback<0>(status));
}
EXPECT_CALL(*demuxer_, Stop(_)).WillOnce(RunClosure<0>());
return status;
}
- EXPECT_CALL(*video_renderer_, Initialize(_, _, _, _, _, _, _, _, _, _))
- .WillOnce(DoAll(SaveArg<5>(&video_buffering_state_cb_),
- RunCallback<2>(PIPELINE_OK)));
+ EXPECT_CALL(*renderer_, Initialize(_, _, _, _, _, _))
+ .WillOnce(DoAll(SaveArg<4>(&buffering_state_cb_),
+ RunCallback<0>(PIPELINE_OK)));
EXPECT_CALL(callbacks_, OnMetadata(_));
// If we get here it's a successful initialization.
- EXPECT_CALL(*audio_renderer_, GetTimeSource())
- .WillOnce(Return(&time_source_));
- EXPECT_CALL(time_source_, SetMediaTime(base::TimeDelta()));
- EXPECT_CALL(time_source_, SetPlaybackRate(0.0f));
- EXPECT_CALL(time_source_, StartTicking());
- EXPECT_CALL(*audio_renderer_, SetVolume(1.0f));
- EXPECT_CALL(*audio_renderer_, StartPlaying())
- .WillOnce(SetBufferingState(&audio_buffering_state_cb_,
- BUFFERING_HAVE_ENOUGH));
- EXPECT_CALL(*video_renderer_, StartPlaying())
- .WillOnce(SetBufferingState(&video_buffering_state_cb_,
+ EXPECT_CALL(*renderer_, SetPlaybackRate(0.0f));
+ EXPECT_CALL(*renderer_, SetVolume(1.0f));
+ EXPECT_CALL(*renderer_, StartPlayingFrom(base::TimeDelta()))
+ .WillOnce(SetBufferingState(&buffering_state_cb_,
BUFFERING_HAVE_ENOUGH));
if (status == PIPELINE_OK)
@@ -1152,35 +924,32 @@ class PipelineTeardownTest : public PipelineTest {
base::Closure stop_cb = base::Bind(
&CallbackHelper::OnStop, base::Unretained(&callbacks_));
- EXPECT_CALL(time_source_, StopTicking());
-
if (state == kFlushing) {
if (stop_or_error == kStop) {
- EXPECT_CALL(*audio_renderer_, Flush(_))
+ EXPECT_CALL(*renderer_, Flush(_))
.WillOnce(DoAll(Stop(pipeline_.get(), stop_cb),
- SetBufferingState(&audio_buffering_state_cb_,
+ SetBufferingState(&buffering_state_cb_,
BUFFERING_HAVE_NOTHING),
RunClosure<0>()));
+ EXPECT_CALL(callbacks_, OnBufferingStateChange(BUFFERING_HAVE_NOTHING));
} else {
status = PIPELINE_ERROR_READ;
- EXPECT_CALL(*audio_renderer_, Flush(_)).WillOnce(
- DoAll(SetError(pipeline_.get(), status),
- SetBufferingState(&audio_buffering_state_cb_,
- BUFFERING_HAVE_NOTHING),
- RunClosure<0>()));
+ EXPECT_CALL(*renderer_, Flush(_))
+ .WillOnce(DoAll(SetError(pipeline_.get(), status),
+ SetBufferingState(&buffering_state_cb_,
+ BUFFERING_HAVE_NOTHING),
+ RunClosure<0>()));
+ EXPECT_CALL(callbacks_, OnBufferingStateChange(BUFFERING_HAVE_NOTHING));
}
return status;
}
- EXPECT_CALL(*audio_renderer_, Flush(_))
- .WillOnce(DoAll(SetBufferingState(&audio_buffering_state_cb_,
- BUFFERING_HAVE_NOTHING),
- RunClosure<0>()));
- EXPECT_CALL(*video_renderer_, Flush(_))
- .WillOnce(DoAll(SetBufferingState(&video_buffering_state_cb_,
+ EXPECT_CALL(*renderer_, Flush(_))
+ .WillOnce(DoAll(SetBufferingState(&buffering_state_cb_,
BUFFERING_HAVE_NOTHING),
RunClosure<0>()));
+ EXPECT_CALL(callbacks_, OnBufferingStateChange(BUFFERING_HAVE_NOTHING));
if (state == kSeeking) {
if (stop_or_error == kStop) {
@@ -1238,15 +1007,13 @@ class PipelineTeardownTest : public PipelineTest {
}
INSTANTIATE_TEARDOWN_TEST(Stop, InitDemuxer);
-INSTANTIATE_TEARDOWN_TEST(Stop, InitAudioRenderer);
-INSTANTIATE_TEARDOWN_TEST(Stop, InitVideoRenderer);
+INSTANTIATE_TEARDOWN_TEST(Stop, InitRenderer);
INSTANTIATE_TEARDOWN_TEST(Stop, Flushing);
INSTANTIATE_TEARDOWN_TEST(Stop, Seeking);
INSTANTIATE_TEARDOWN_TEST(Stop, Playing);
INSTANTIATE_TEARDOWN_TEST(Error, InitDemuxer);
-INSTANTIATE_TEARDOWN_TEST(Error, InitAudioRenderer);
-INSTANTIATE_TEARDOWN_TEST(Error, InitVideoRenderer);
+INSTANTIATE_TEARDOWN_TEST(Error, InitRenderer);
INSTANTIATE_TEARDOWN_TEST(Error, Flushing);
INSTANTIATE_TEARDOWN_TEST(Error, Seeking);
INSTANTIATE_TEARDOWN_TEST(Error, Playing);
diff --git a/media/base/renderer.cc b/media/base/renderer.cc
new file mode 100644
index 0000000..a4cf82c
--- /dev/null
+++ b/media/base/renderer.cc
@@ -0,0 +1,13 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/base/renderer.h"
+
+namespace media {
+
+Renderer::Renderer() {}
+
+Renderer::~Renderer() {}
+
+} // namespace media
diff --git a/media/base/renderer.h b/media/base/renderer.h
new file mode 100644
index 0000000..adb0de7
--- /dev/null
+++ b/media/base/renderer.h
@@ -0,0 +1,75 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_BASE_RENDERER_H_
+#define MEDIA_BASE_RENDERER_H_
+
+#include "base/callback.h"
+#include "base/time/time.h"
+#include "media/base/buffering_state.h"
+#include "media/base/media_export.h"
+#include "media/base/pipeline_status.h"
+
+namespace media {
+
+class MediaKeys;
+
+class MEDIA_EXPORT Renderer {
+ public:
+ typedef base::Callback<base::TimeDelta()> TimeDeltaCB;
+
+ Renderer();
+
+ // Stops rendering and fires any pending callbacks.
+ virtual ~Renderer();
+
+ // Initializes the Renderer, executing |init_cb| upon completion.
+ // TODO(xhwang): Provide a set of DemuxerStreams in Initialize().
+ // TODO(xhwang): Replace |init_cb| with a Closure.
+ //
+ // Permanent callbacks:
+ // - |statistics_cb|: Executed periodically with rendering statistics.
+ // - |time_cb|: Executed whenever time has advanced through rendering.
+ // - |ended_cb|: Executed when rendering has reached the end of stream.
+ // - |error_cb|: Executed if any error was encountered during rendering.
+ virtual void Initialize(const PipelineStatusCB& init_cb,
+ const StatisticsCB& statistics_cb,
+ const base::Closure& ended_cb,
+ const PipelineStatusCB& error_cb,
+ const BufferingStateCB& buffering_state_cb,
+ const TimeDeltaCB& get_duration_cb) = 0;
+
+ // The following functions must be called after Initialize().
+
+ // Discards any buffered data, executing |flush_cb| when completed.
+ virtual void Flush(const base::Closure& flush_cb) = 0;
+
+ // Starts rendering from |time|.
+ virtual void StartPlayingFrom(base::TimeDelta time) = 0;
+
+ // Updates the current playback rate. The default playback rate should be 1.
+ virtual void SetPlaybackRate(float playback_rate) = 0;
+
+ // Sets the output volume. The default volume should be 1.
+ virtual void SetVolume(float volume) = 0;
+
+ // Returns the current media time.
+ virtual base::TimeDelta GetMediaTime() = 0;
+
+ // Returns whether |this| renders audio.
+ virtual bool HasAudio() = 0;
+
+ // Returns whether |this| renders video.
+ virtual bool HasVideo() = 0;
+
+ // Associates the |cdm| with this Renderer.
+ virtual void SetCdm(MediaKeys* cdm) = 0;
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(Renderer);
+};
+
+} // namespace media
+
+#endif // MEDIA_BASE_RENDERER_H_
diff --git a/media/filters/audio_renderer_impl.cc b/media/filters/audio_renderer_impl.cc
index fbf4c16..fddb7ae 100644
--- a/media/filters/audio_renderer_impl.cc
+++ b/media/filters/audio_renderer_impl.cc
@@ -258,7 +258,10 @@ void AudioRendererImpl::Initialize(DemuxerStream* stream,
state_ = kInitializing;
- init_cb_ = init_cb;
+ // Always post |init_cb_| because |this| could be destroyed if initialization
+ // failed.
+ init_cb_ = BindToCurrentLoop(init_cb);
+
time_cb_ = time_cb;
buffering_state_cb_ = buffering_state_cb;
ended_cb_ = ended_cb;
@@ -348,7 +351,6 @@ void AudioRendererImpl::OnAudioBufferStreamInitialized(bool success) {
}
DCHECK(!sink_playing_);
-
base::ResetAndReturn(&init_cb_).Run(PIPELINE_OK);
}
diff --git a/media/filters/pipeline_integration_test_base.cc b/media/filters/pipeline_integration_test_base.cc
index a635286..039a3d8 100644
--- a/media/filters/pipeline_integration_test_base.cc
+++ b/media/filters/pipeline_integration_test_base.cc
@@ -15,6 +15,7 @@
#include "media/filters/ffmpeg_video_decoder.h"
#include "media/filters/file_data_source.h"
#include "media/filters/opus_audio_decoder.h"
+#include "media/filters/renderer_impl.h"
#include "media/filters/vpx_video_decoder.h"
using ::testing::_;
@@ -37,10 +38,6 @@ PipelineIntegrationTestBase::PipelineIntegrationTestBase()
last_video_frame_format_(VideoFrame::UNKNOWN),
hardware_config_(AudioParameters(), AudioParameters()) {
base::MD5Init(&md5_context_);
-
- // Prevent non-deterministic buffering state callbacks from firing (e.g., slow
- // machine, valgrind).
- pipeline_->set_underflow_disabled_for_testing(true);
}
PipelineIntegrationTestBase::~PipelineIntegrationTestBase() {
@@ -132,10 +129,6 @@ bool PipelineIntegrationTestBase::Start(const base::FilePath& file_path,
kTestType test_type) {
hashing_enabled_ = test_type == kHashed;
clockless_playback_ = test_type == kClockless;
- if (clockless_playback_) {
- pipeline_->SetTimeDeltaInterpolatorForTesting(
- new TimeDeltaInterpolator(&dummy_clock_));
- }
return Start(file_path, expected_status);
}
@@ -256,7 +249,7 @@ PipelineIntegrationTestBase::CreateFilterCollection(
new FFmpegVideoDecoder(message_loop_.message_loop_proxy()));
// Disable frame dropping if hashing is enabled.
- scoped_ptr<VideoRenderer> renderer(new VideoRendererImpl(
+ scoped_ptr<VideoRenderer> video_renderer(new VideoRendererImpl(
message_loop_.message_loop_proxy(),
video_decoders.Pass(),
base::Bind(&PipelineIntegrationTestBase::SetDecryptor,
@@ -265,7 +258,6 @@ PipelineIntegrationTestBase::CreateFilterCollection(
base::Bind(&PipelineIntegrationTestBase::OnVideoRendererPaint,
base::Unretained(this)),
false));
- collection->SetVideoRenderer(renderer.Pass());
if (!clockless_playback_) {
audio_sink_ = new NullAudioSink(message_loop_.message_loop_proxy());
@@ -286,7 +278,7 @@ PipelineIntegrationTestBase::CreateFilterCollection(
512);
hardware_config_.UpdateOutputConfig(out_params);
- AudioRendererImpl* audio_renderer_impl = new AudioRendererImpl(
+ scoped_ptr<AudioRenderer> audio_renderer(new AudioRendererImpl(
message_loop_.message_loop_proxy(),
(clockless_playback_)
? static_cast<AudioRendererSink*>(clockless_audio_sink_.get())
@@ -295,11 +287,26 @@ PipelineIntegrationTestBase::CreateFilterCollection(
base::Bind(&PipelineIntegrationTestBase::SetDecryptor,
base::Unretained(this),
decryptor),
- &hardware_config_);
+ &hardware_config_));
if (hashing_enabled_)
audio_sink_->StartAudioHashForTesting();
- scoped_ptr<AudioRenderer> audio_renderer(audio_renderer_impl);
- collection->SetAudioRenderer(audio_renderer.Pass());
+
+ scoped_ptr<RendererImpl> renderer_impl(
+ new RendererImpl(message_loop_.message_loop_proxy(),
+ demuxer_.get(),
+ audio_renderer.Pass(),
+ video_renderer.Pass()));
+
+ // Prevent non-deterministic buffering state callbacks from firing (e.g., slow
+ // machine, valgrind).
+ renderer_impl->DisableUnderflowForTesting();
+
+ if (clockless_playback_) {
+ renderer_impl->SetTimeDeltaInterpolatorForTesting(
+ new TimeDeltaInterpolator(&dummy_clock_));
+ }
+
+ collection->SetRenderer(renderer_impl.PassAs<Renderer>());
return collection.Pass();
}
diff --git a/media/filters/renderer_impl.cc b/media/filters/renderer_impl.cc
new file mode 100644
index 0000000..1dcbe1c
--- /dev/null
+++ b/media/filters/renderer_impl.cc
@@ -0,0 +1,572 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/filters/renderer_impl.h"
+
+#include "base/bind.h"
+#include "base/callback.h"
+#include "base/callback_helpers.h"
+#include "base/compiler_specific.h"
+#include "base/location.h"
+#include "base/single_thread_task_runner.h"
+#include "media/base/audio_renderer.h"
+#include "media/base/demuxer.h"
+#include "media/base/filter_collection.h"
+#include "media/base/time_delta_interpolator.h"
+#include "media/base/time_source.h"
+#include "media/base/video_renderer.h"
+
+namespace media {
+
+RendererImpl::RendererImpl(
+ const scoped_refptr<base::SingleThreadTaskRunner>& task_runner,
+ Demuxer* demuxer,
+ scoped_ptr<AudioRenderer> audio_renderer,
+ scoped_ptr<VideoRenderer> video_renderer)
+ : state_(STATE_UNINITIALIZED),
+ task_runner_(task_runner),
+ demuxer_(demuxer),
+ audio_renderer_(audio_renderer.Pass()),
+ video_renderer_(video_renderer.Pass()),
+ time_source_(NULL),
+ audio_buffering_state_(BUFFERING_HAVE_NOTHING),
+ video_buffering_state_(BUFFERING_HAVE_NOTHING),
+ audio_ended_(false),
+ video_ended_(false),
+ underflow_disabled_for_testing_(false),
+ interpolator_(new TimeDeltaInterpolator(&default_tick_clock_)),
+ interpolation_state_(INTERPOLATION_STOPPED),
+ weak_factory_(this),
+ weak_this_(weak_factory_.GetWeakPtr()) {
+ DVLOG(1) << __FUNCTION__;
+ interpolator_->SetBounds(base::TimeDelta(), base::TimeDelta());
+}
+
+RendererImpl::~RendererImpl() {
+ DVLOG(1) << __FUNCTION__;
+ DCHECK(task_runner_->BelongsToCurrentThread());
+
+ audio_renderer_.reset();
+ video_renderer_.reset();
+
+ FireAllPendingCallbacks();
+}
+
+void RendererImpl::Initialize(const PipelineStatusCB& init_cb,
+ const StatisticsCB& statistics_cb,
+ const base::Closure& ended_cb,
+ const PipelineStatusCB& error_cb,
+ const BufferingStateCB& buffering_state_cb,
+ const TimeDeltaCB& get_duration_cb) {
+ DVLOG(1) << __FUNCTION__;
+ DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_EQ(state_, STATE_UNINITIALIZED) << state_;
+ DCHECK(!init_cb.is_null());
+ DCHECK(!statistics_cb.is_null());
+ DCHECK(!ended_cb.is_null());
+ DCHECK(!error_cb.is_null());
+ DCHECK(!buffering_state_cb.is_null());
+ DCHECK(!get_duration_cb.is_null());
+ DCHECK(demuxer_->GetStream(DemuxerStream::AUDIO) ||
+ demuxer_->GetStream(DemuxerStream::VIDEO));
+
+ statistics_cb_ = statistics_cb;
+ ended_cb_ = ended_cb;
+ error_cb_ = error_cb;
+ buffering_state_cb_ = buffering_state_cb;
+ get_duration_cb_ = get_duration_cb;
+
+ init_cb_ = init_cb;
+ state_ = STATE_INITIALIZING;
+ InitializeAudioRenderer();
+}
+
+void RendererImpl::Flush(const base::Closure& flush_cb) {
+ DVLOG(2) << __FUNCTION__;
+ DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_EQ(state_, STATE_PLAYING) << state_;
+ DCHECK(flush_cb_.is_null());
+
+ {
+ base::AutoLock auto_lock(interpolator_lock_);
+ PauseClockAndStopTicking_Locked();
+ }
+
+ flush_cb_ = flush_cb;
+ state_ = STATE_FLUSHING;
+ FlushAudioRenderer();
+}
+
+void RendererImpl::StartPlayingFrom(base::TimeDelta time) {
+ DVLOG(2) << __FUNCTION__;
+ DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_EQ(state_, STATE_PLAYING) << state_;
+
+ {
+ base::AutoLock auto_lock(interpolator_lock_);
+ interpolator_->SetBounds(time, time);
+ }
+
+ if (time_source_)
+ time_source_->SetMediaTime(time);
+ if (audio_renderer_)
+ audio_renderer_->StartPlaying();
+ if (video_renderer_)
+ video_renderer_->StartPlaying();
+}
+
+void RendererImpl::SetPlaybackRate(float playback_rate) {
+ DVLOG(2) << __FUNCTION__ << "(" << playback_rate << ")";
+ DCHECK(task_runner_->BelongsToCurrentThread());
+
+ // Playback rate changes are only carried out while playing.
+ if (state_ != STATE_PLAYING)
+ return;
+
+ {
+ base::AutoLock auto_lock(interpolator_lock_);
+ interpolator_->SetPlaybackRate(playback_rate);
+ }
+
+ if (time_source_)
+ time_source_->SetPlaybackRate(playback_rate);
+}
+
+void RendererImpl::SetVolume(float volume) {
+ DVLOG(1) << __FUNCTION__;
+ DCHECK(task_runner_->BelongsToCurrentThread());
+
+ if (audio_renderer_)
+ audio_renderer_->SetVolume(volume);
+}
+
+base::TimeDelta RendererImpl::GetMediaTime() {
+ // No BelongsToCurrentThread() checking because this can be called from other
+ // threads.
+ base::AutoLock auto_lock(interpolator_lock_);
+ return interpolator_->GetInterpolatedTime();
+}
+
+bool RendererImpl::HasAudio() {
+ DCHECK(task_runner_->BelongsToCurrentThread());
+ return audio_renderer_ != NULL;
+}
+
+bool RendererImpl::HasVideo() {
+ DCHECK(task_runner_->BelongsToCurrentThread());
+ return video_renderer_ != NULL;
+}
+
+void RendererImpl::SetCdm(MediaKeys* cdm) {
+ DVLOG(1) << __FUNCTION__;
+ DCHECK(task_runner_->BelongsToCurrentThread());
+ // TODO(xhwang): Explore to possibility to move CDM setting from
+ // WebMediaPlayerImpl to this class. See http://crbug.com/401264
+ NOTREACHED();
+}
+
+void RendererImpl::DisableUnderflowForTesting() {
+ DVLOG(2) << __FUNCTION__;
+ DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_EQ(state_, STATE_UNINITIALIZED);
+
+ underflow_disabled_for_testing_ = true;
+}
+
+void RendererImpl::SetTimeDeltaInterpolatorForTesting(
+ TimeDeltaInterpolator* interpolator) {
+ DVLOG(2) << __FUNCTION__;
+ DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_EQ(state_, STATE_UNINITIALIZED);
+
+ interpolator_.reset(interpolator);
+}
+
+base::TimeDelta RendererImpl::GetMediaDuration() {
+ DCHECK(task_runner_->BelongsToCurrentThread());
+ return get_duration_cb_.Run();
+}
+
+void RendererImpl::InitializeAudioRenderer() {
+ DVLOG(2) << __FUNCTION__;
+ DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_EQ(state_, STATE_INITIALIZING) << state_;
+ DCHECK(!init_cb_.is_null());
+
+ PipelineStatusCB done_cb =
+ base::Bind(&RendererImpl::OnAudioRendererInitializeDone, weak_this_);
+
+ if (!demuxer_->GetStream(DemuxerStream::AUDIO)) {
+ audio_renderer_.reset();
+ task_runner_->PostTask(FROM_HERE, base::Bind(done_cb, PIPELINE_OK));
+ return;
+ }
+
+ audio_renderer_->Initialize(
+ demuxer_->GetStream(DemuxerStream::AUDIO),
+ done_cb,
+ base::Bind(&RendererImpl::OnUpdateStatistics, weak_this_),
+ base::Bind(&RendererImpl::OnAudioTimeUpdate, weak_this_),
+ base::Bind(&RendererImpl::OnBufferingStateChanged, weak_this_,
+ &audio_buffering_state_),
+ base::Bind(&RendererImpl::OnAudioRendererEnded, weak_this_),
+ base::Bind(&RendererImpl::OnError, weak_this_));
+}
+
+void RendererImpl::OnAudioRendererInitializeDone(PipelineStatus status) {
+ DVLOG(2) << __FUNCTION__ << ": " << status;
+ DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_EQ(state_, STATE_INITIALIZING) << state_;
+ DCHECK(!init_cb_.is_null());
+
+ if (status != PIPELINE_OK) {
+ audio_renderer_.reset();
+ state_ = STATE_ERROR;
+ base::ResetAndReturn(&init_cb_).Run(status);
+ return;
+ }
+
+ if (audio_renderer_)
+ time_source_ = audio_renderer_->GetTimeSource();
+
+ InitializeVideoRenderer();
+}
+
+void RendererImpl::InitializeVideoRenderer() {
+ DVLOG(2) << __FUNCTION__;
+ DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_EQ(state_, STATE_INITIALIZING) << state_;
+ DCHECK(!init_cb_.is_null());
+
+ PipelineStatusCB done_cb =
+ base::Bind(&RendererImpl::OnVideoRendererInitializeDone, weak_this_);
+
+ if (!demuxer_->GetStream(DemuxerStream::VIDEO)) {
+ video_renderer_.reset();
+ task_runner_->PostTask(FROM_HERE, base::Bind(done_cb, PIPELINE_OK));
+ return;
+ }
+
+ video_renderer_->Initialize(
+ demuxer_->GetStream(DemuxerStream::VIDEO),
+ demuxer_->GetLiveness() == Demuxer::LIVENESS_LIVE,
+ done_cb,
+ base::Bind(&RendererImpl::OnUpdateStatistics, weak_this_),
+ base::Bind(&RendererImpl::OnVideoTimeUpdate, weak_this_),
+ base::Bind(&RendererImpl::OnBufferingStateChanged, weak_this_,
+ &video_buffering_state_),
+ base::Bind(&RendererImpl::OnVideoRendererEnded, weak_this_),
+ base::Bind(&RendererImpl::OnError, weak_this_),
+ base::Bind(&RendererImpl::GetMediaTime, base::Unretained(this)),
+ base::Bind(&RendererImpl::GetMediaDuration, base::Unretained(this)));
+}
+
+void RendererImpl::OnVideoRendererInitializeDone(PipelineStatus status) {
+ DVLOG(2) << __FUNCTION__ << ": " << status;
+ DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_EQ(state_, STATE_INITIALIZING) << state_;
+ DCHECK(!init_cb_.is_null());
+
+ if (status != PIPELINE_OK) {
+ audio_renderer_.reset();
+ video_renderer_.reset();
+ state_ = STATE_ERROR;
+ base::ResetAndReturn(&init_cb_).Run(status);
+ return;
+ }
+
+ state_ = STATE_PLAYING;
+ DCHECK(audio_renderer_ || video_renderer_);
+ base::ResetAndReturn(&init_cb_).Run(PIPELINE_OK);
+}
+
+void RendererImpl::FlushAudioRenderer() {
+ DVLOG(1) << __FUNCTION__;
+ DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_EQ(state_, STATE_FLUSHING) << state_;
+ DCHECK(!flush_cb_.is_null());
+
+ if (!audio_renderer_) {
+ OnAudioRendererFlushDone();
+ return;
+ }
+
+ audio_renderer_->Flush(
+ base::Bind(&RendererImpl::OnAudioRendererFlushDone, weak_this_));
+}
+
+void RendererImpl::OnAudioRendererFlushDone() {
+ DVLOG(1) << __FUNCTION__;
+ DCHECK(task_runner_->BelongsToCurrentThread());
+
+ if (state_ == STATE_ERROR) {
+ DCHECK(flush_cb_.is_null());
+ return;
+ }
+
+ DCHECK_EQ(state_, STATE_FLUSHING) << state_;
+ DCHECK(!flush_cb_.is_null());
+
+ DCHECK_EQ(audio_buffering_state_, BUFFERING_HAVE_NOTHING);
+ audio_ended_ = false;
+ FlushVideoRenderer();
+}
+
+void RendererImpl::FlushVideoRenderer() {
+ DVLOG(1) << __FUNCTION__;
+ DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_EQ(state_, STATE_FLUSHING) << state_;
+ DCHECK(!flush_cb_.is_null());
+
+ if (!video_renderer_) {
+ OnVideoRendererFlushDone();
+ return;
+ }
+
+ video_renderer_->Flush(
+ base::Bind(&RendererImpl::OnVideoRendererFlushDone, weak_this_));
+}
+
+void RendererImpl::OnVideoRendererFlushDone() {
+ DVLOG(1) << __FUNCTION__;
+ DCHECK(task_runner_->BelongsToCurrentThread());
+
+ if (state_ == STATE_ERROR) {
+ DCHECK(flush_cb_.is_null());
+ return;
+ }
+
+ DCHECK_EQ(state_, STATE_FLUSHING) << state_;
+ DCHECK(!flush_cb_.is_null());
+
+ DCHECK_EQ(video_buffering_state_, BUFFERING_HAVE_NOTHING);
+ video_ended_ = false;
+ state_ = STATE_PLAYING;
+ base::ResetAndReturn(&flush_cb_).Run();
+}
+
+void RendererImpl::OnAudioTimeUpdate(base::TimeDelta time,
+ base::TimeDelta max_time) {
+ DVLOG(3) << __FUNCTION__ << "(" << time.InMilliseconds()
+ << ", " << max_time.InMilliseconds() << ")";
+ DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_LE(time.InMicroseconds(), max_time.InMicroseconds());
+
+ base::AutoLock auto_lock(interpolator_lock_);
+
+ if (interpolation_state_ == INTERPOLATION_WAITING_FOR_AUDIO_TIME_UPDATE &&
+ time < interpolator_->GetInterpolatedTime()) {
+ return;
+ }
+
+ if (state_ == STATE_FLUSHING)
+ return;
+
+ interpolator_->SetBounds(time, max_time);
+ StartClockIfWaitingForTimeUpdate_Locked();
+}
+
+void RendererImpl::OnVideoTimeUpdate(base::TimeDelta max_time) {
+ DVLOG(3) << __FUNCTION__ << "(" << max_time.InMilliseconds() << ")";
+ DCHECK(task_runner_->BelongsToCurrentThread());
+
+ if (audio_renderer_)
+ return;
+
+ if (state_ == STATE_FLUSHING)
+ return;
+
+ base::AutoLock auto_lock(interpolator_lock_);
+ DCHECK_NE(interpolation_state_, INTERPOLATION_WAITING_FOR_AUDIO_TIME_UPDATE);
+ interpolator_->SetUpperBound(max_time);
+}
+
+void RendererImpl::OnUpdateStatistics(const PipelineStatistics& stats) {
+ DCHECK(task_runner_->BelongsToCurrentThread());
+ statistics_cb_.Run(stats);
+}
+
+void RendererImpl::OnBufferingStateChanged(BufferingState* buffering_state,
+ BufferingState new_buffering_state) {
+ DVLOG(2) << __FUNCTION__ << "(" << *buffering_state << ", "
+ << new_buffering_state << ") "
+ << (buffering_state == &audio_buffering_state_ ? "audio" : "video");
+ DCHECK(task_runner_->BelongsToCurrentThread());
+ bool was_waiting_for_enough_data = WaitingForEnoughData();
+
+ *buffering_state = new_buffering_state;
+
+ // Disable underflow by ignoring updates that renderers have ran out of data.
+ if (state_ == STATE_PLAYING && underflow_disabled_for_testing_ &&
+ interpolation_state_ != INTERPOLATION_STOPPED) {
+ DVLOG(2) << "Update ignored because underflow is disabled for testing.";
+ return;
+ }
+
+ // Renderer underflowed.
+ if (!was_waiting_for_enough_data && WaitingForEnoughData()) {
+ PausePlayback();
+
+ // TODO(scherkus): Fire BUFFERING_HAVE_NOTHING callback to alert clients of
+ // underflow state http://crbug.com/144683
+ return;
+ }
+
+ // Renderer prerolled.
+ if (was_waiting_for_enough_data && !WaitingForEnoughData()) {
+ StartPlayback();
+ buffering_state_cb_.Run(BUFFERING_HAVE_ENOUGH);
+ return;
+ }
+}
+
+bool RendererImpl::WaitingForEnoughData() const {
+ DCHECK(task_runner_->BelongsToCurrentThread());
+ if (state_ != STATE_PLAYING)
+ return false;
+ if (audio_renderer_ && audio_buffering_state_ != BUFFERING_HAVE_ENOUGH)
+ return true;
+ if (video_renderer_ && video_buffering_state_ != BUFFERING_HAVE_ENOUGH)
+ return true;
+ return false;
+}
+
+void RendererImpl::PausePlayback() {
+ DVLOG(2) << __FUNCTION__;
+ DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_EQ(state_, STATE_PLAYING);
+ DCHECK(WaitingForEnoughData());
+
+ base::AutoLock auto_lock(interpolator_lock_);
+ PauseClockAndStopTicking_Locked();
+}
+
+void RendererImpl::StartPlayback() {
+ DVLOG(2) << __FUNCTION__;
+ DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_EQ(state_, STATE_PLAYING);
+ DCHECK_EQ(interpolation_state_, INTERPOLATION_STOPPED);
+ DCHECK(!WaitingForEnoughData());
+
+ if (time_source_) {
+ // We use audio stream to update the interpolator. So if there is such a
+ // stream, we pause the interpolator until we receive a valid time update.
+ base::AutoLock auto_lock(interpolator_lock_);
+ interpolation_state_ = INTERPOLATION_WAITING_FOR_AUDIO_TIME_UPDATE;
+ time_source_->StartTicking();
+ } else {
+ base::AutoLock auto_lock(interpolator_lock_);
+ interpolation_state_ = INTERPOLATION_STARTED;
+ interpolator_->SetUpperBound(get_duration_cb_.Run());
+ interpolator_->StartInterpolating();
+ }
+}
+
+void RendererImpl::PauseClockAndStopTicking_Locked() {
+ DVLOG(2) << __FUNCTION__;
+ interpolator_lock_.AssertAcquired();
+ switch (interpolation_state_) {
+ case INTERPOLATION_STOPPED:
+ return;
+
+ case INTERPOLATION_WAITING_FOR_AUDIO_TIME_UPDATE:
+ time_source_->StopTicking();
+ break;
+
+ case INTERPOLATION_STARTED:
+ if (time_source_)
+ time_source_->StopTicking();
+ interpolator_->StopInterpolating();
+ break;
+ }
+
+ interpolation_state_ = INTERPOLATION_STOPPED;
+}
+
+void RendererImpl::StartClockIfWaitingForTimeUpdate_Locked() {
+ interpolator_lock_.AssertAcquired();
+ if (interpolation_state_ != INTERPOLATION_WAITING_FOR_AUDIO_TIME_UPDATE)
+ return;
+
+ interpolation_state_ = INTERPOLATION_STARTED;
+ interpolator_->StartInterpolating();
+}
+
+void RendererImpl::OnAudioRendererEnded() {
+ DVLOG(2) << __FUNCTION__;
+ DCHECK(task_runner_->BelongsToCurrentThread());
+
+ if (state_ != STATE_PLAYING)
+ return;
+
+ DCHECK(!audio_ended_);
+ audio_ended_ = true;
+
+ // Start clock since there is no more audio to trigger clock updates.
+ {
+ base::AutoLock auto_lock(interpolator_lock_);
+ interpolator_->SetUpperBound(get_duration_cb_.Run());
+ StartClockIfWaitingForTimeUpdate_Locked();
+ }
+
+ RunEndedCallbackIfNeeded();
+}
+
+void RendererImpl::OnVideoRendererEnded() {
+ DVLOG(2) << __FUNCTION__;
+ DCHECK(task_runner_->BelongsToCurrentThread());
+
+ if (state_ != STATE_PLAYING)
+ return;
+
+ DCHECK(!video_ended_);
+ video_ended_ = true;
+
+ RunEndedCallbackIfNeeded();
+}
+
+void RendererImpl::RunEndedCallbackIfNeeded() {
+ DVLOG(2) << __FUNCTION__;
+ DCHECK(task_runner_->BelongsToCurrentThread());
+
+ if (audio_renderer_ && !audio_ended_)
+ return;
+
+ if (video_renderer_ && !video_ended_)
+ return;
+
+ {
+ base::AutoLock auto_lock(interpolator_lock_);
+ PauseClockAndStopTicking_Locked();
+ base::TimeDelta duration = get_duration_cb_.Run();
+ interpolator_->SetBounds(duration, duration);
+ }
+
+ ended_cb_.Run();
+}
+
+void RendererImpl::OnError(PipelineStatus error) {
+ DVLOG(2) << __FUNCTION__;
+ DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_NE(PIPELINE_OK, error) << "PIPELINE_OK isn't an error!";
+
+ state_ = STATE_ERROR;
+
+ // Pipeline will destroy |this| as the result of error.
+ base::ResetAndReturn(&error_cb_).Run(error);
+
+ FireAllPendingCallbacks();
+}
+
+void RendererImpl::FireAllPendingCallbacks() {
+ DCHECK(task_runner_->BelongsToCurrentThread());
+
+ if (!init_cb_.is_null())
+ base::ResetAndReturn(&init_cb_).Run(PIPELINE_ERROR_ABORT);
+
+ if (!flush_cb_.is_null())
+ base::ResetAndReturn(&flush_cb_).Run();
+}
+
+} // namespace media
diff --git a/media/filters/renderer_impl.h b/media/filters/renderer_impl.h
new file mode 100644
index 0000000..d0bd8e5
--- /dev/null
+++ b/media/filters/renderer_impl.h
@@ -0,0 +1,201 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_FILTERS_RENDERER_IMPL_H_
+#define MEDIA_FILTERS_RENDERER_IMPL_H_
+
+#include "base/memory/ref_counted.h"
+#include "base/memory/scoped_ptr.h"
+#include "base/memory/weak_ptr.h"
+#include "base/synchronization/lock.h"
+#include "base/time/clock.h"
+#include "base/time/default_tick_clock.h"
+#include "base/time/time.h"
+#include "media/base/buffering_state.h"
+#include "media/base/media_export.h"
+#include "media/base/pipeline_status.h"
+#include "media/base/renderer.h"
+
+namespace base {
+class SingleThreadTaskRunner;
+}
+
+namespace media {
+
+class AudioRenderer;
+class Demuxer;
+class FilterCollection;
+class TimeDeltaInterpolator;
+class TimeSource;
+class VideoRenderer;
+
+class MEDIA_EXPORT RendererImpl : public Renderer {
+ public:
+ // Renders audio/video streams in |demuxer| using |audio_renderer| and
+ // |video_renderer| provided. All methods except for GetMediaTime() run on the
+ // |task_runner|. GetMediaTime() runs on the render main thread because it's
+ // part of JS sync API. |get_duration_cb| is used to get the duration of the
+ // stream.
+ RendererImpl(const scoped_refptr<base::SingleThreadTaskRunner>& task_runner,
+ Demuxer* demuxer,
+ scoped_ptr<AudioRenderer> audio_renderer,
+ scoped_ptr<VideoRenderer> video_renderer);
+
+ virtual ~RendererImpl();
+
+ // Renderer implementation.
+ virtual void Initialize(const PipelineStatusCB& init_cb,
+ const StatisticsCB& statistics_cb,
+ const base::Closure& ended_cb,
+ const PipelineStatusCB& error_cb,
+ const BufferingStateCB& buffering_state_cb,
+ const TimeDeltaCB& get_duration_cb) OVERRIDE;
+ virtual void Flush(const base::Closure& flush_cb) OVERRIDE;
+ virtual void StartPlayingFrom(base::TimeDelta time) OVERRIDE;
+ virtual void SetPlaybackRate(float playback_rate) OVERRIDE;
+ virtual void SetVolume(float volume) OVERRIDE;
+ virtual base::TimeDelta GetMediaTime() OVERRIDE;
+ virtual bool HasAudio() OVERRIDE;
+ virtual bool HasVideo() OVERRIDE;
+ virtual void SetCdm(MediaKeys* cdm) OVERRIDE;
+
+ // Helper functions for testing purposes. Must be called before Initialize().
+ void DisableUnderflowForTesting();
+ void SetTimeDeltaInterpolatorForTesting(TimeDeltaInterpolator* interpolator);
+
+ private:
+ enum State {
+ STATE_UNINITIALIZED,
+ STATE_INITIALIZING,
+ STATE_FLUSHING,
+ STATE_PLAYING,
+ STATE_ERROR
+ };
+
+ base::TimeDelta GetMediaDuration();
+
+ // Helper functions and callbacks for Initialize().
+ void InitializeAudioRenderer();
+ void OnAudioRendererInitializeDone(PipelineStatus status);
+ void InitializeVideoRenderer();
+ void OnVideoRendererInitializeDone(PipelineStatus status);
+
+ // Helper functions and callbacks for Flush().
+ void FlushAudioRenderer();
+ void OnAudioRendererFlushDone();
+ void FlushVideoRenderer();
+ void OnVideoRendererFlushDone();
+
+ // Callback executed by audio renderer to update clock time.
+ void OnAudioTimeUpdate(base::TimeDelta time, base::TimeDelta max_time);
+
+ // Callback executed by video renderer to update clock time.
+ void OnVideoTimeUpdate(base::TimeDelta max_time);
+
+ // Callback executed by filters to update statistics.
+ void OnUpdateStatistics(const PipelineStatistics& stats);
+
+ // Collection of callback methods and helpers for tracking changes in
+ // buffering state and transition from paused/underflow states and playing
+ // states.
+ //
+ // While in the kPlaying state:
+ // - A waiting to non-waiting transition indicates preroll has completed
+ // and StartPlayback() should be called
+ // - A non-waiting to waiting transition indicates underflow has occurred
+ // and PausePlayback() should be called
+ void OnBufferingStateChanged(BufferingState* buffering_state,
+ BufferingState new_buffering_state);
+ bool WaitingForEnoughData() const;
+ void PausePlayback();
+ void StartPlayback();
+
+ void PauseClockAndStopTicking_Locked();
+ void StartClockIfWaitingForTimeUpdate_Locked();
+
+ // Callbacks executed when a renderer has ended.
+ void OnAudioRendererEnded();
+ void OnVideoRendererEnded();
+ void RunEndedCallbackIfNeeded();
+
+ // Callback executed when a runtime error happens.
+ void OnError(PipelineStatus error);
+
+ void FireAllPendingCallbacks();
+
+ State state_;
+
+ // Task runner used to execute pipeline tasks.
+ scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
+
+ scoped_ptr<FilterCollection> filter_collection_;
+ Demuxer* demuxer_;
+
+ // Permanent callback to get the media duration.
+ TimeDeltaCB get_duration_cb_;
+
+ // Permanent callbacks to notify various renderer states/stats.
+ StatisticsCB statistics_cb_;
+ base::Closure ended_cb_;
+ PipelineStatusCB error_cb_;
+ BufferingStateCB buffering_state_cb_;
+
+ // Temporary callback used for Initialize() and Flush().
+ PipelineStatusCB init_cb_;
+ base::Closure flush_cb_;
+
+ scoped_ptr<AudioRenderer> audio_renderer_;
+ scoped_ptr<VideoRenderer> video_renderer_;
+
+ // Renderer-provided time source used to control playback.
+ TimeSource* time_source_;
+
+ // The time to start playback from after starting/seeking has completed.
+ base::TimeDelta start_time_;
+
+ BufferingState audio_buffering_state_;
+ BufferingState video_buffering_state_;
+
+ // Whether we've received the audio/video ended events.
+ bool audio_ended_;
+ bool video_ended_;
+
+ bool underflow_disabled_for_testing_;
+
+ // Protects time interpolation related member variables, i.e. |interpolator_|,
+ // |default_tick_clock_| and |interpolation_state_|. This is because
+ // |interpolator_| can be used on different threads (see GetMediaTime()).
+ mutable base::Lock interpolator_lock_;
+
+ // Tracks the most recent media time update and provides interpolated values
+ // as playback progresses.
+ scoped_ptr<TimeDeltaInterpolator> interpolator_;
+
+ // base::TickClock used by |interpolator_|.
+ // TODO(xhwang): This can be TimeDeltaInterpolator's implementation detail.
+ base::DefaultTickClock default_tick_clock_;
+
+ enum InterpolationState {
+ // Audio (if present) is not rendering. Time isn't being interpolated.
+ INTERPOLATION_STOPPED,
+
+ // Audio (if present) is rendering. Time isn't being interpolated.
+ INTERPOLATION_WAITING_FOR_AUDIO_TIME_UPDATE,
+
+ // Audio (if present) is rendering. Time is being interpolated.
+ INTERPOLATION_STARTED,
+ };
+
+ InterpolationState interpolation_state_;
+
+ // NOTE: Weak pointers must be invalidated before all other member variables.
+ base::WeakPtrFactory<RendererImpl> weak_factory_;
+ base::WeakPtr<RendererImpl> weak_this_;
+
+ DISALLOW_COPY_AND_ASSIGN(RendererImpl);
+};
+
+} // namespace media
+
+#endif // MEDIA_FILTERS_RENDERER_IMPL_H_
diff --git a/media/filters/renderer_impl_unittest.cc b/media/filters/renderer_impl_unittest.cc
new file mode 100644
index 0000000..7e0b1cb
--- /dev/null
+++ b/media/filters/renderer_impl_unittest.cc
@@ -0,0 +1,554 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <vector>
+
+#include "base/bind.h"
+#include "base/message_loop/message_loop.h"
+#include "base/run_loop.h"
+#include "base/test/simple_test_tick_clock.h"
+#include "media/base/gmock_callback_support.h"
+#include "media/base/mock_filters.h"
+#include "media/base/test_helpers.h"
+#include "media/base/time_delta_interpolator.h"
+#include "media/filters/renderer_impl.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+using ::testing::_;
+using ::testing::DoAll;
+using ::testing::InSequence;
+using ::testing::Mock;
+using ::testing::Return;
+using ::testing::SaveArg;
+using ::testing::StrictMock;
+
+namespace media {
+
+const int64 kStartPlayingTimeInMs = 100;
+const int64 kDurationInMs = 3000;
+const int64 kAudioUpdateTimeMs = 150;
+const int64 kAudioUpdateMaxTimeMs = 1000;
+
+ACTION_P2(SetBufferingState, cb, buffering_state) {
+ cb->Run(buffering_state);
+}
+
+ACTION_P3(UpdateAudioTime, cb, time, max_time) {
+ cb->Run(base::TimeDelta::FromMilliseconds(time),
+ base::TimeDelta::FromMilliseconds(max_time));
+}
+
+ACTION_P2(AudioError, cb, error) {
+ cb->Run(error);
+}
+
+static base::TimeDelta GetDuration() {
+ return base::TimeDelta::FromMilliseconds(kDurationInMs);
+}
+
+class RendererImplTest : public ::testing::Test {
+ public:
+ // Used for setting expectations on pipeline callbacks. Using a StrictMock
+ // also lets us test for missing callbacks.
+ class CallbackHelper {
+ public:
+ CallbackHelper() {}
+ virtual ~CallbackHelper() {}
+
+ MOCK_METHOD1(OnInitialize, void(PipelineStatus));
+ MOCK_METHOD0(OnFlushed, void());
+ MOCK_METHOD0(OnEnded, void());
+ MOCK_METHOD1(OnError, void(PipelineStatus));
+ MOCK_METHOD1(OnUpdateStatistics, void(const PipelineStatistics&));
+ MOCK_METHOD1(OnBufferingStateChange, void(BufferingState));
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(CallbackHelper);
+ };
+
+ RendererImplTest()
+ : demuxer_(new StrictMock<MockDemuxer>()),
+ video_renderer_(new StrictMock<MockVideoRenderer>()),
+ audio_renderer_(new StrictMock<MockAudioRenderer>()),
+ renderer_impl_(
+ new RendererImpl(message_loop_.message_loop_proxy(),
+ demuxer_.get(),
+ scoped_ptr<AudioRenderer>(audio_renderer_),
+ scoped_ptr<VideoRenderer>(video_renderer_))) {
+ // SetDemuxerExpectations() adds overriding expectations for expected
+ // non-NULL streams.
+ DemuxerStream* null_pointer = NULL;
+ EXPECT_CALL(*demuxer_, GetStream(_))
+ .WillRepeatedly(Return(null_pointer));
+ EXPECT_CALL(*demuxer_, GetLiveness())
+ .WillRepeatedly(Return(Demuxer::LIVENESS_UNKNOWN));
+ }
+
+ virtual ~RendererImplTest() {
+ renderer_impl_.reset();
+ base::RunLoop().RunUntilIdle();
+ }
+
+ protected:
+ typedef std::vector<MockDemuxerStream*> MockDemuxerStreamVector;
+
+ scoped_ptr<StrictMock<MockDemuxerStream> > CreateStream(
+ DemuxerStream::Type type) {
+ scoped_ptr<StrictMock<MockDemuxerStream> > stream(
+ new StrictMock<MockDemuxerStream>(type));
+ return stream.Pass();
+ }
+
+ // Sets up expectations to allow the audio renderer to initialize.
+ void SetAudioRendererInitializeExpectations(PipelineStatus status) {
+ EXPECT_CALL(*audio_renderer_,
+ Initialize(audio_stream_.get(), _, _, _, _, _, _))
+ .WillOnce(DoAll(SaveArg<3>(&audio_time_cb_),
+ SaveArg<4>(&audio_buffering_state_cb_),
+ SaveArg<5>(&audio_ended_cb_),
+ SaveArg<6>(&audio_error_cb_),
+ RunCallback<1>(status)));
+ if (status == PIPELINE_OK) {
+ EXPECT_CALL(*audio_renderer_, GetTimeSource())
+ .WillOnce(Return(&time_source_));
+ }
+ }
+
+ // Sets up expectations to allow the video renderer to initialize.
+ void SetVideoRendererInitializeExpectations(PipelineStatus status) {
+ EXPECT_CALL(*video_renderer_,
+ Initialize(video_stream_.get(), _, _, _, _, _, _, _, _, _))
+ .WillOnce(DoAll(SaveArg<5>(&video_buffering_state_cb_),
+ SaveArg<6>(&video_ended_cb_),
+ RunCallback<2>(status)));
+ }
+
+ void InitializeAndExpect(PipelineStatus start_status) {
+ EXPECT_CALL(callbacks_, OnInitialize(start_status));
+
+ renderer_impl_->Initialize(
+ base::Bind(&CallbackHelper::OnInitialize,
+ base::Unretained(&callbacks_)),
+ base::Bind(&CallbackHelper::OnUpdateStatistics,
+ base::Unretained(&callbacks_)),
+ base::Bind(&CallbackHelper::OnEnded, base::Unretained(&callbacks_)),
+ base::Bind(&CallbackHelper::OnError, base::Unretained(&callbacks_)),
+ base::Bind(&CallbackHelper::OnBufferingStateChange,
+ base::Unretained(&callbacks_)),
+ base::Bind(&GetDuration));
+ base::RunLoop().RunUntilIdle();
+ }
+
+ void CreateAudioStream() {
+ audio_stream_ = CreateStream(DemuxerStream::AUDIO);
+ streams_.push_back(audio_stream_.get());
+ EXPECT_CALL(*demuxer_, GetStream(DemuxerStream::AUDIO))
+ .WillRepeatedly(Return(audio_stream_.get()));
+ }
+
+ void CreateVideoStream() {
+ video_stream_ = CreateStream(DemuxerStream::VIDEO);
+ video_stream_->set_video_decoder_config(video_decoder_config_);
+ streams_.push_back(video_stream_.get());
+ EXPECT_CALL(*demuxer_, GetStream(DemuxerStream::VIDEO))
+ .WillRepeatedly(Return(video_stream_.get()));
+ }
+
+ void CreateAudioAndVideoStream() {
+ CreateAudioStream();
+ CreateVideoStream();
+ }
+
+ void InitializeWithAudio() {
+ CreateAudioStream();
+ SetAudioRendererInitializeExpectations(PIPELINE_OK);
+ InitializeAndExpect(PIPELINE_OK);
+ }
+
+ void InitializeWithVideo() {
+ CreateVideoStream();
+ SetVideoRendererInitializeExpectations(PIPELINE_OK);
+ InitializeAndExpect(PIPELINE_OK);
+ }
+
+ void InitializeWithAudioAndVideo() {
+ CreateAudioAndVideoStream();
+ SetAudioRendererInitializeExpectations(PIPELINE_OK);
+ SetVideoRendererInitializeExpectations(PIPELINE_OK);
+ InitializeAndExpect(PIPELINE_OK);
+ }
+
+ void Play() {
+ DCHECK(audio_stream_ || video_stream_);
+ EXPECT_CALL(callbacks_, OnBufferingStateChange(BUFFERING_HAVE_ENOUGH));
+
+ if (audio_stream_) {
+ EXPECT_CALL(time_source_,
+ SetMediaTime(base::TimeDelta::FromMilliseconds(
+ kStartPlayingTimeInMs)));
+ EXPECT_CALL(time_source_, StartTicking());
+ EXPECT_CALL(*audio_renderer_, StartPlaying())
+ .WillOnce(SetBufferingState(&audio_buffering_state_cb_,
+ BUFFERING_HAVE_ENOUGH));
+ }
+
+ if (video_stream_) {
+ EXPECT_CALL(*video_renderer_, StartPlaying())
+ .WillOnce(SetBufferingState(&video_buffering_state_cb_,
+ BUFFERING_HAVE_ENOUGH));
+ }
+
+ renderer_impl_->StartPlayingFrom(
+ base::TimeDelta::FromMilliseconds(kStartPlayingTimeInMs));
+ base::RunLoop().RunUntilIdle();
+ }
+
+ void Flush(bool underflowed) {
+ if (audio_stream_) {
+ if (!underflowed)
+ EXPECT_CALL(time_source_, StopTicking());
+ EXPECT_CALL(*audio_renderer_, Flush(_))
+ .WillOnce(DoAll(SetBufferingState(&audio_buffering_state_cb_,
+ BUFFERING_HAVE_NOTHING),
+ RunClosure<0>()));
+ }
+
+ if (video_stream_) {
+ EXPECT_CALL(*video_renderer_, Flush(_))
+ .WillOnce(DoAll(SetBufferingState(&video_buffering_state_cb_,
+ BUFFERING_HAVE_NOTHING),
+ RunClosure<0>()));
+ }
+
+ EXPECT_CALL(callbacks_, OnFlushed());
+
+ renderer_impl_->Flush(
+ base::Bind(&CallbackHelper::OnFlushed, base::Unretained(&callbacks_)));
+ base::RunLoop().RunUntilIdle();
+ }
+
+ void SetPlaybackRate(float playback_rate) {
+ EXPECT_CALL(time_source_, SetPlaybackRate(playback_rate));
+ renderer_impl_->SetPlaybackRate(playback_rate);
+ base::RunLoop().RunUntilIdle();
+ }
+
+ int64 GetMediaTimeMs() {
+ return renderer_impl_->GetMediaTime().InMilliseconds();
+ }
+
+ bool IsMediaTimeAdvancing(float playback_rate) {
+ int64 start_time_ms = GetMediaTimeMs();
+ const int64 time_to_advance_ms = 100;
+
+ test_tick_clock_.Advance(
+ base::TimeDelta::FromMilliseconds(time_to_advance_ms));
+
+ if (GetMediaTimeMs() == start_time_ms + time_to_advance_ms * playback_rate)
+ return true;
+
+ DCHECK_EQ(start_time_ms, GetMediaTimeMs());
+ return false;
+ }
+
+ bool IsMediaTimeAdvancing() {
+ return IsMediaTimeAdvancing(1.0f);
+ }
+
+ // Fixture members.
+ base::MessageLoop message_loop_;
+ StrictMock<CallbackHelper> callbacks_;
+ base::SimpleTestTickClock test_tick_clock_;
+
+ scoped_ptr<StrictMock<MockDemuxer> > demuxer_;
+ StrictMock<MockVideoRenderer>* video_renderer_;
+ StrictMock<MockAudioRenderer>* audio_renderer_;
+ scoped_ptr<RendererImpl> renderer_impl_;
+
+ StrictMock<MockTimeSource> time_source_;
+ scoped_ptr<StrictMock<MockDemuxerStream> > audio_stream_;
+ scoped_ptr<StrictMock<MockDemuxerStream> > video_stream_;
+ MockDemuxerStreamVector streams_;
+ AudioRenderer::TimeCB audio_time_cb_;
+ BufferingStateCB audio_buffering_state_cb_;
+ BufferingStateCB video_buffering_state_cb_;
+ base::Closure audio_ended_cb_;
+ base::Closure video_ended_cb_;
+ PipelineStatusCB audio_error_cb_;
+ VideoDecoderConfig video_decoder_config_;
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(RendererImplTest);
+};
+
+TEST_F(RendererImplTest, DestroyBeforeInitialize) {
+ // |renderer_impl_| will be destroyed in the dtor.
+}
+
+TEST_F(RendererImplTest, InitializeWithAudio) {
+ InitializeWithAudio();
+}
+
+TEST_F(RendererImplTest, InitializeWithVideo) {
+ InitializeWithVideo();
+}
+
+TEST_F(RendererImplTest, InitializeWithAudioVideo) {
+ InitializeWithAudioAndVideo();
+}
+
+TEST_F(RendererImplTest, InitializeWithAudio_Failed) {
+ CreateAudioStream();
+ SetAudioRendererInitializeExpectations(PIPELINE_ERROR_INITIALIZATION_FAILED);
+ InitializeAndExpect(PIPELINE_ERROR_INITIALIZATION_FAILED);
+}
+
+TEST_F(RendererImplTest, InitializeWithVideo_Failed) {
+ CreateVideoStream();
+ SetVideoRendererInitializeExpectations(PIPELINE_ERROR_INITIALIZATION_FAILED);
+ InitializeAndExpect(PIPELINE_ERROR_INITIALIZATION_FAILED);
+}
+
+TEST_F(RendererImplTest, InitializeWithAudioVideo_AudioRendererFailed) {
+ CreateAudioAndVideoStream();
+ SetAudioRendererInitializeExpectations(PIPELINE_ERROR_INITIALIZATION_FAILED);
+ // VideoRenderer::Initialize() should not be called.
+ InitializeAndExpect(PIPELINE_ERROR_INITIALIZATION_FAILED);
+}
+
+TEST_F(RendererImplTest, InitializeWithAudioVideo_VideoRendererFailed) {
+ CreateAudioAndVideoStream();
+ SetAudioRendererInitializeExpectations(PIPELINE_OK);
+ SetVideoRendererInitializeExpectations(PIPELINE_ERROR_INITIALIZATION_FAILED);
+ InitializeAndExpect(PIPELINE_ERROR_INITIALIZATION_FAILED);
+}
+
+TEST_F(RendererImplTest, StartPlayingFrom) {
+ InitializeWithAudioAndVideo();
+ Play();
+}
+
+TEST_F(RendererImplTest, FlushAfterInitialization) {
+ InitializeWithAudioAndVideo();
+ Flush(true);
+}
+
+TEST_F(RendererImplTest, FlushAfterPlay) {
+ InitializeWithAudioAndVideo();
+ Play();
+ Flush(false);
+}
+
+TEST_F(RendererImplTest, FlushAfterUnderflow) {
+ InitializeWithAudioAndVideo();
+ Play();
+
+ // Simulate underflow.
+ EXPECT_CALL(time_source_, StopTicking());
+ audio_buffering_state_cb_.Run(BUFFERING_HAVE_NOTHING);
+
+ // Flush while underflowed. We shouldn't call StopTicking() again.
+ Flush(true);
+}
+
+TEST_F(RendererImplTest, SetPlaybackRate) {
+ InitializeWithAudioAndVideo();
+ SetPlaybackRate(1.0f);
+ SetPlaybackRate(2.0f);
+}
+
+TEST_F(RendererImplTest, SetVolume) {
+ InitializeWithAudioAndVideo();
+ EXPECT_CALL(*audio_renderer_, SetVolume(2.0f));
+ renderer_impl_->SetVolume(2.0f);
+}
+
+TEST_F(RendererImplTest, GetMediaTime) {
+ // Replace what's used for interpolating to simulate wall clock time.
+ renderer_impl_->SetTimeDeltaInterpolatorForTesting(
+ new TimeDeltaInterpolator(&test_tick_clock_));
+
+ InitializeWithAudioAndVideo();
+ Play();
+
+ EXPECT_EQ(kStartPlayingTimeInMs, GetMediaTimeMs());
+
+ // Verify that the clock doesn't advance since it hasn't been started by
+ // a time update from the audio stream.
+ EXPECT_FALSE(IsMediaTimeAdvancing());
+
+ // Provide an initial time update so that the pipeline transitions out of the
+ // "waiting for time update" state.
+ audio_time_cb_.Run(base::TimeDelta::FromMilliseconds(kAudioUpdateTimeMs),
+ base::TimeDelta::FromMilliseconds(kAudioUpdateMaxTimeMs));
+ EXPECT_EQ(kAudioUpdateTimeMs, GetMediaTimeMs());
+
+ // Advance the clock so that GetMediaTime() also advances. This also verifies
+ // that the default playback rate is 1.
+ EXPECT_TRUE(IsMediaTimeAdvancing());
+
+ // Verify that playback rate affects the rate GetMediaTime() advances.
+ SetPlaybackRate(2.0f);
+ EXPECT_TRUE(IsMediaTimeAdvancing(2.0f));
+
+ // Verify that GetMediaTime() is bounded by audio max time.
+ DCHECK_GT(GetMediaTimeMs() + 2000, kAudioUpdateMaxTimeMs);
+ test_tick_clock_.Advance(base::TimeDelta::FromMilliseconds(2000));
+ EXPECT_EQ(kAudioUpdateMaxTimeMs, GetMediaTimeMs());
+}
+
+TEST_F(RendererImplTest, AudioStreamShorterThanVideo) {
+ // Replace what's used for interpolating to simulate wall clock time.
+ renderer_impl_->SetTimeDeltaInterpolatorForTesting(
+ new TimeDeltaInterpolator(&test_tick_clock_));
+
+ InitializeWithAudioAndVideo();
+ Play();
+
+ EXPECT_EQ(kStartPlayingTimeInMs, GetMediaTimeMs());
+
+ // Verify that the clock doesn't advance since it hasn't been started by
+ // a time update from the audio stream.
+ EXPECT_FALSE(IsMediaTimeAdvancing());
+
+ // Signal end of audio stream.
+ audio_ended_cb_.Run();
+ base::RunLoop().RunUntilIdle();
+
+ // Verify that the clock advances.
+ EXPECT_TRUE(IsMediaTimeAdvancing());
+
+ // Signal end of video stream and make sure OnEnded() callback occurs.
+ EXPECT_CALL(time_source_, StopTicking());
+ EXPECT_CALL(callbacks_, OnEnded());
+ video_ended_cb_.Run();
+ base::RunLoop().RunUntilIdle();
+}
+
+TEST_F(RendererImplTest, AudioTimeUpdateDuringFlush) {
+ // Replace what's used for interpolating to simulate wall clock time.
+ renderer_impl_->SetTimeDeltaInterpolatorForTesting(
+ new TimeDeltaInterpolator(&test_tick_clock_));
+
+ InitializeWithAudio();
+ Play();
+
+ // Provide an initial time update so that the pipeline transitions out of the
+ // "waiting for time update" state.
+ audio_time_cb_.Run(base::TimeDelta::FromMilliseconds(kAudioUpdateTimeMs),
+ base::TimeDelta::FromMilliseconds(kAudioUpdateMaxTimeMs));
+ EXPECT_EQ(kAudioUpdateTimeMs, GetMediaTimeMs());
+
+ int64 start_time = GetMediaTimeMs();
+
+ EXPECT_CALL(*audio_renderer_, Flush(_)).WillOnce(DoAll(
+ UpdateAudioTime(
+ &audio_time_cb_, kAudioUpdateTimeMs + 100, kAudioUpdateMaxTimeMs),
+ SetBufferingState(&audio_buffering_state_cb_, BUFFERING_HAVE_NOTHING),
+ RunClosure<0>()));
+ EXPECT_CALL(time_source_, StopTicking());
+ EXPECT_CALL(callbacks_, OnFlushed());
+ renderer_impl_->Flush(
+ base::Bind(&CallbackHelper::OnFlushed, base::Unretained(&callbacks_)));
+
+ // Audio time update during Flush() has no effect.
+ EXPECT_EQ(start_time, GetMediaTimeMs());
+
+ // Verify that the clock doesn't advance since it hasn't been started by
+ // a time update from the audio stream.
+ EXPECT_FALSE(IsMediaTimeAdvancing());
+}
+
+TEST_F(RendererImplTest, PostTimeUpdateDuringDestroy) {
+ InitializeWithAudioAndVideo();
+
+ // Simulate the case where TimeCB is posted during ~AudioRenderer(), which is
+ // triggered in ~Renderer().
+ base::TimeDelta time = base::TimeDelta::FromMilliseconds(100);
+ message_loop_.PostTask(FROM_HERE, base::Bind(audio_time_cb_, time, time));
+
+ renderer_impl_.reset();
+ message_loop_.RunUntilIdle();
+}
+
+TEST_F(RendererImplTest, AudioStreamEnded) {
+ InitializeWithAudio();
+ Play();
+
+ EXPECT_CALL(time_source_, StopTicking());
+ EXPECT_CALL(callbacks_, OnEnded());
+
+ audio_ended_cb_.Run();
+ base::RunLoop().RunUntilIdle();
+}
+
+TEST_F(RendererImplTest, VideoStreamEnded) {
+ InitializeWithVideo();
+ Play();
+
+ // Video ended won't affect |time_source_|.
+ EXPECT_CALL(callbacks_, OnEnded());
+
+ video_ended_cb_.Run();
+ base::RunLoop().RunUntilIdle();
+}
+
+TEST_F(RendererImplTest, AudioVideoStreamsEnded) {
+ InitializeWithAudioAndVideo();
+ Play();
+
+ // OnEnded() is called only when all streams have finished.
+ audio_ended_cb_.Run();
+ base::RunLoop().RunUntilIdle();
+
+ EXPECT_CALL(time_source_, StopTicking());
+ EXPECT_CALL(callbacks_, OnEnded());
+
+ video_ended_cb_.Run();
+ base::RunLoop().RunUntilIdle();
+}
+
+TEST_F(RendererImplTest, ErrorAfterInitialize) {
+ InitializeWithAudio();
+ EXPECT_CALL(callbacks_, OnError(PIPELINE_ERROR_DECODE));
+ audio_error_cb_.Run(PIPELINE_ERROR_DECODE);
+ base::RunLoop().RunUntilIdle();
+}
+
+TEST_F(RendererImplTest, ErrorDuringPlaying) {
+ InitializeWithAudio();
+ Play();
+
+ EXPECT_CALL(callbacks_, OnError(PIPELINE_ERROR_DECODE));
+ audio_error_cb_.Run(PIPELINE_ERROR_DECODE);
+ base::RunLoop().RunUntilIdle();
+}
+
+TEST_F(RendererImplTest, ErrorDuringFlush) {
+ InitializeWithAudio();
+ Play();
+
+ InSequence s;
+ EXPECT_CALL(time_source_, StopTicking());
+ EXPECT_CALL(*audio_renderer_, Flush(_)).WillOnce(DoAll(
+ AudioError(&audio_error_cb_, PIPELINE_ERROR_DECODE),
+ RunClosure<0>()));
+ EXPECT_CALL(callbacks_, OnError(PIPELINE_ERROR_DECODE));
+ EXPECT_CALL(callbacks_, OnFlushed());
+ renderer_impl_->Flush(
+ base::Bind(&CallbackHelper::OnFlushed, base::Unretained(&callbacks_)));
+ base::RunLoop().RunUntilIdle();
+}
+
+TEST_F(RendererImplTest, ErrorAfterFlush) {
+ InitializeWithAudio();
+ Play();
+ Flush(false);
+
+ EXPECT_CALL(callbacks_, OnError(PIPELINE_ERROR_DECODE));
+ audio_error_cb_.Run(PIPELINE_ERROR_DECODE);
+ base::RunLoop().RunUntilIdle();
+}
+
+} // namespace media
diff --git a/media/filters/video_renderer_impl.cc b/media/filters/video_renderer_impl.cc
index 25099ca..6866855 100644
--- a/media/filters/video_renderer_impl.cc
+++ b/media/filters/video_renderer_impl.cc
@@ -11,6 +11,7 @@
#include "base/location.h"
#include "base/single_thread_task_runner.h"
#include "base/threading/platform_thread.h"
+#include "media/base/bind_to_current_loop.h"
#include "media/base/buffers.h"
#include "media/base/limits.h"
#include "media/base/pipeline.h"
@@ -126,7 +127,10 @@ void VideoRendererImpl::Initialize(DemuxerStream* stream,
low_delay_ = low_delay;
- init_cb_ = init_cb;
+ // Always post |init_cb_| because |this| could be destroyed if initialization
+ // failed.
+ init_cb_ = BindToCurrentLoop(init_cb);
+
statistics_cb_ = statistics_cb;
max_time_cb_ = max_time_cb;
buffering_state_cb_ = buffering_state_cb;
@@ -292,6 +296,7 @@ void VideoRendererImpl::DropNextReadyFrame_Locked() {
void VideoRendererImpl::FrameReady(VideoFrameStream::Status status,
const scoped_refptr<VideoFrame>& frame) {
+ DCHECK(task_runner_->BelongsToCurrentThread());
base::AutoLock auto_lock(lock_);
DCHECK_NE(state_, kUninitialized);
DCHECK_NE(state_, kFlushed);
@@ -305,7 +310,7 @@ void VideoRendererImpl::FrameReady(VideoFrameStream::Status status,
PipelineStatus error = PIPELINE_ERROR_DECODE;
if (status == VideoFrameStream::DECRYPT_ERROR)
error = PIPELINE_ERROR_DECRYPT;
- error_cb_.Run(error);
+ task_runner_->PostTask(FROM_HERE, base::Bind(error_cb_, error));
return;
}
diff --git a/media/filters/video_renderer_impl_unittest.cc b/media/filters/video_renderer_impl_unittest.cc
index 4f44939..1650aa0 100644
--- a/media/filters/video_renderer_impl_unittest.cc
+++ b/media/filters/video_renderer_impl_unittest.cc
@@ -140,6 +140,7 @@ class VideoRendererImplTest : public ::testing::Test {
void Destroy() {
SCOPED_TRACE("Destroy()");
renderer_.reset();
+ message_loop_.RunUntilIdle();
}
// Parses a string representation of video frames and generates corresponding
@@ -541,7 +542,7 @@ TEST_F(VideoRendererImplTest, Underflow) {
WaitableMessageLoopEvent event;
EXPECT_CALL(mock_cb_, BufferingStateChange(BUFFERING_HAVE_NOTHING))
.WillOnce(RunClosure(event.GetClosure()));
- AdvanceTimeInMs(3000); // Must match kTimeToDeclareHaveNothing.
+ AdvanceTimeInMs(3000); // Must match kTimeToDeclareHaveNothing.
event.RunAndWait();
}
diff --git a/media/media.gyp b/media/media.gyp
index 027cb55..86e97c8 100644
--- a/media/media.gyp
+++ b/media/media.gyp
@@ -304,6 +304,8 @@
'base/player_tracker.h',
'base/ranges.cc',
'base/ranges.h',
+ 'base/renderer.cc',
+ 'base/renderer.h',
'base/sample_format.cc',
'base/sample_format.h',
'base/scoped_histogram_timer.h',
@@ -418,6 +420,8 @@
'filters/in_memory_url_protocol.h',
'filters/opus_audio_decoder.cc',
'filters/opus_audio_decoder.h',
+ 'filters/renderer_impl.cc',
+ 'filters/renderer_impl.h',
'filters/skcanvas_video_renderer.cc',
'filters/skcanvas_video_renderer.h',
'filters/source_buffer_platform.cc',
@@ -1125,6 +1129,7 @@
'filters/in_memory_url_protocol_unittest.cc',
'filters/pipeline_integration_test.cc',
'filters/pipeline_integration_test_base.cc',
+ 'filters/renderer_impl_unittest.cc',
'filters/skcanvas_video_renderer_unittest.cc',
'filters/source_buffer_stream_unittest.cc',
'filters/video_decoder_selector_unittest.cc',
diff --git a/media/tools/player_x11/player_x11.cc b/media/tools/player_x11/player_x11.cc
index 94aac8a..f60d115 100644
--- a/media/tools/player_x11/player_x11.cc
+++ b/media/tools/player_x11/player_x11.cc
@@ -30,6 +30,7 @@
#include "media/filters/ffmpeg_demuxer.h"
#include "media/filters/ffmpeg_video_decoder.h"
#include "media/filters/file_data_source.h"
+#include "media/filters/renderer_impl.h"
#include "media/filters/video_renderer_impl.h"
#include "media/tools/player_x11/data_source_logger.h"
@@ -114,13 +115,12 @@ void InitPipeline(
ScopedVector<media::VideoDecoder> video_decoders;
video_decoders.push_back(new media::FFmpegVideoDecoder(task_runner));
- scoped_ptr<media::VideoRenderer> video_renderer(new media::VideoRendererImpl(
- task_runner,
- video_decoders.Pass(),
- media::SetDecryptorReadyCB(),
- paint_cb,
- true));
- collection->SetVideoRenderer(video_renderer.Pass());
+ scoped_ptr<media::VideoRenderer> video_renderer(
+ new media::VideoRendererImpl(task_runner,
+ video_decoders.Pass(),
+ media::SetDecryptorReadyCB(),
+ paint_cb,
+ true));
ScopedVector<media::AudioDecoder> audio_decoders;
audio_decoders.push_back(new media::FFmpegAudioDecoder(task_runner,
@@ -140,16 +140,21 @@ void InitPipeline(
media::SetDecryptorReadyCB(),
&hardware_config));
- collection->SetAudioRenderer(audio_renderer.Pass());
+ scoped_ptr<media::Renderer> renderer(new media::RendererImpl(
+ task_runner, demuxer, audio_renderer.Pass(), video_renderer.Pass()));
+
+ collection->SetRenderer(renderer.Pass());
base::WaitableEvent event(true, false);
media::PipelineStatus status;
- pipeline->Start(
- collection.Pass(), base::Bind(&DoNothing), base::Bind(&OnStatus),
- base::Bind(&SaveStatusAndSignal, &event, &status),
- base::Bind(&OnMetadata), base::Bind(&OnBufferingStateChanged),
- base::Bind(&DoNothing));
+ pipeline->Start(collection.Pass(),
+ base::Bind(&DoNothing),
+ base::Bind(&OnStatus),
+ base::Bind(&SaveStatusAndSignal, &event, &status),
+ base::Bind(&OnMetadata),
+ base::Bind(&OnBufferingStateChanged),
+ base::Bind(&DoNothing));
// Wait until the pipeline is fully initialized.
event.Wait();