summaryrefslogtreecommitdiffstats
path: root/media/base
diff options
context:
space:
mode:
authorscherkus@chromium.org <scherkus@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2009-08-12 23:52:05 +0000
committerscherkus@chromium.org <scherkus@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2009-08-12 23:52:05 +0000
commit576537844b224ca246713c57e039d19d0dfefbf7 (patch)
treeb9cdb7a157abaa212fa87cfbd18ed05c0f6f71e2 /media/base
parent1cf1f99e52b39e01115eeef712c139dfa63df00e (diff)
downloadchromium_src-576537844b224ca246713c57e039d19d0dfefbf7.zip
chromium_src-576537844b224ca246713c57e039d19d0dfefbf7.tar.gz
chromium_src-576537844b224ca246713c57e039d19d0dfefbf7.tar.bz2
Implemented end-of-stream callback for media::PipelineImpl.
A new method HasEnded() was added to renderer interfaces. Renderers return true when they have both received and rendered an end-of-stream buffer. For audio this translates to sending the very last buffer to the hardware. For video this translates to displaying a black frame after the very last frame has been displayed. Renderers can notify the pipeline that the value of HasEnded() has changed to true via FilterHost::NotifyEnded(). Instead of tracking which renderers have called NotifyEnded(), the pipeline uses the notification to poll every renderer. The ended callback will only be executed once every renderer returns true for HasEnded(). This has a nice benefit of being able to ignore extra NotifyEnded() calls if we already determine the pipeline has ended. With the changes to WebMediaPlayerImpl, we should now properly support both the ended event and looping. BUG=16768,17970,18433,18846 TEST=media_unittests, media layout tests, ended event, timeupdate should stop firing, looping should work, seeking after video ends Review URL: http://codereview.chromium.org/164403 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@23255 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'media/base')
-rw-r--r--media/base/filter_host.h4
-rw-r--r--media/base/filters.h8
-rw-r--r--media/base/mock_filter_host.h1
-rw-r--r--media/base/mock_filters.h2
-rw-r--r--media/base/pipeline_impl.cc54
-rw-r--r--media/base/pipeline_impl.h16
-rw-r--r--media/base/pipeline_impl_unittest.cc47
-rw-r--r--media/base/video_frame_impl.cc42
-rw-r--r--media/base/video_frame_impl.h5
-rw-r--r--media/base/video_frame_impl_unittest.cc42
10 files changed, 215 insertions, 6 deletions
diff --git a/media/base/filter_host.h b/media/base/filter_host.h
index 17e7631..02ae399 100644
--- a/media/base/filter_host.h
+++ b/media/base/filter_host.h
@@ -57,6 +57,10 @@ class FilterHost {
// Sets the flag to indicate that we are doing streaming.
virtual void SetStreaming(bool streaming) = 0;
+ // Notifies that this filter has ended, typically only called by filter graph
+ // endpoints such as renderers.
+ virtual void NotifyEnded() = 0;
+
// Broadcast a message of type |message| to all other filters from |source|.
virtual void BroadcastMessage(FilterMessage message) = 0;
diff --git a/media/base/filters.h b/media/base/filters.h
index 9a63763..875c8c6 100644
--- a/media/base/filters.h
+++ b/media/base/filters.h
@@ -314,6 +314,10 @@ class VideoRenderer : public MediaFilter {
// Initialize a VideoRenderer with the given VideoDecoder, executing the
// callback upon completion.
virtual void Initialize(VideoDecoder* decoder, FilterCallback* callback) = 0;
+
+ // Returns true if this filter has received and processed an end-of-stream
+ // buffer.
+ virtual bool HasEnded() = 0;
};
@@ -331,6 +335,10 @@ class AudioRenderer : public MediaFilter {
// callback upon completion.
virtual void Initialize(AudioDecoder* decoder, FilterCallback* callback) = 0;
+ // Returns true if this filter has received and processed an end-of-stream
+ // buffer.
+ virtual bool HasEnded() = 0;
+
// Sets the output volume.
virtual void SetVolume(float volume) = 0;
};
diff --git a/media/base/mock_filter_host.h b/media/base/mock_filter_host.h
index c463541..439eda8 100644
--- a/media/base/mock_filter_host.h
+++ b/media/base/mock_filter_host.h
@@ -37,6 +37,7 @@ class MockFilterHost : public FilterHost {
MOCK_METHOD1(SetBufferedBytes, void(int64 buffered_bytes));
MOCK_METHOD2(SetVideoSize, void(size_t width, size_t height));
MOCK_METHOD1(SetStreaming, void(bool streamed));
+ MOCK_METHOD0(NotifyEnded, void());
MOCK_METHOD1(BroadcastMessage, void(FilterMessage message));
private:
diff --git a/media/base/mock_filters.h b/media/base/mock_filters.h
index affc0fa..e140818 100644
--- a/media/base/mock_filters.h
+++ b/media/base/mock_filters.h
@@ -242,6 +242,7 @@ class MockVideoRenderer : public VideoRenderer {
// VideoRenderer implementation.
MOCK_METHOD2(Initialize, void(VideoDecoder* decoder,
FilterCallback* callback));
+ MOCK_METHOD0(HasEnded, bool());
protected:
virtual ~MockVideoRenderer() {}
@@ -263,6 +264,7 @@ class MockAudioRenderer : public AudioRenderer {
// AudioRenderer implementation.
MOCK_METHOD2(Initialize, void(AudioDecoder* decoder,
FilterCallback* callback));
+ MOCK_METHOD0(HasEnded, bool());
MOCK_METHOD1(SetVolume, void(float volume));
protected:
diff --git a/media/base/pipeline_impl.cc b/media/base/pipeline_impl.cc
index ccfce70..b9ac047 100644
--- a/media/base/pipeline_impl.cc
+++ b/media/base/pipeline_impl.cc
@@ -149,6 +149,7 @@ bool PipelineImpl::IsInitialized() const {
case kSeeking:
case kStarting:
case kStarted:
+ case kEnded:
return true;
default:
return false;
@@ -201,9 +202,12 @@ void PipelineImpl::SetVolume(float volume) {
}
base::TimeDelta PipelineImpl::GetCurrentTime() const {
+ // TODO(scherkus): perhaps replace checking state_ == kEnded with a bool that
+ // is set/get under the lock, because this is breaching the contract that
+ // |state_| is only accessed on |message_loop_|.
AutoLock auto_lock(lock_);
base::TimeDelta elapsed = clock_.Elapsed();
- if (elapsed > duration_) {
+ if (state_ == kEnded || elapsed > duration_) {
return duration_;
}
return elapsed;
@@ -262,7 +266,15 @@ PipelineError PipelineImpl::GetError() const {
return error_;
}
+void PipelineImpl::SetPipelineEndedCallback(PipelineCallback* ended_callback) {
+ DCHECK(!IsRunning())
+ << "Permanent callbacks should be set before the pipeline has started";
+ ended_callback_.reset(ended_callback);
+}
+
void PipelineImpl::SetPipelineErrorCallback(PipelineCallback* error_callback) {
+ DCHECK(!IsRunning())
+ << "Permanent callbacks should be set before the pipeline has started";
error_callback_.reset(error_callback);
}
@@ -374,6 +386,12 @@ void PipelineImpl::SetStreaming(bool streaming) {
streaming_ = streaming;
}
+void PipelineImpl::NotifyEnded() {
+ DCHECK(IsRunning());
+ message_loop_->PostTask(FROM_HERE,
+ NewRunnableMethod(this, &PipelineImpl::NotifyEndedTask));
+}
+
void PipelineImpl::BroadcastMessage(FilterMessage message) {
DCHECK(IsRunning());
@@ -612,10 +630,10 @@ void PipelineImpl::SeekTask(base::TimeDelta time,
DCHECK_EQ(MessageLoop::current(), message_loop_);
// Suppress seeking if we're not fully started.
- if (state_ != kStarted) {
+ if (state_ != kStarted && state_ != kEnded) {
// TODO(scherkus): should we run the callback? I'm tempted to say the API
// will only execute the first Seek() request.
- LOG(INFO) << "Media pipeline is not in started state, ignoring seek to "
+ LOG(INFO) << "Media pipeline has not started, ignoring seek to "
<< time.InMicroseconds();
delete seek_callback;
return;
@@ -623,7 +641,7 @@ void PipelineImpl::SeekTask(base::TimeDelta time,
// We'll need to pause every filter before seeking. The state transition
// is as follows:
- // kStarted
+ // kStarted/kEnded
// kPausing (for each filter)
// kSeeking (for each filter)
// kStarting (for each filter)
@@ -639,6 +657,34 @@ void PipelineImpl::SeekTask(base::TimeDelta time,
NewCallback(this, &PipelineImpl::OnFilterStateTransition));
}
+void PipelineImpl::NotifyEndedTask() {
+ DCHECK_EQ(MessageLoop::current(), message_loop_);
+
+ // We can only end if we were actually playing.
+ if (state_ != kStarted) {
+ return;
+ }
+
+ // Grab the renderers, if they exist.
+ scoped_refptr<AudioRenderer> audio_renderer;
+ scoped_refptr<VideoRenderer> video_renderer;
+ GetFilter(&audio_renderer);
+ GetFilter(&video_renderer);
+ DCHECK(audio_renderer || video_renderer);
+
+ // Make sure every extant renderer has ended.
+ if ((audio_renderer && !audio_renderer->HasEnded()) ||
+ (video_renderer && !video_renderer->HasEnded())) {
+ return;
+ }
+
+ // Transition to ended, executing the callback if present.
+ state_ = kEnded;
+ if (ended_callback_.get()) {
+ ended_callback_->Run();
+ }
+}
+
void PipelineImpl::BroadcastMessageTask(FilterMessage message) {
DCHECK_EQ(MessageLoop::current(), message_loop_);
diff --git a/media/base/pipeline_impl.h b/media/base/pipeline_impl.h
index 4332cba..383969d 100644
--- a/media/base/pipeline_impl.h
+++ b/media/base/pipeline_impl.h
@@ -41,7 +41,9 @@ namespace media {
// | |
// V Seek() |
// [ Started ] --------> [ Pausing (for each filter) ] -'
-//
+// | |
+// | NotifyEnded() Seek() |
+// `-------------> [ Ended ] ---------------------'
//
// SetError()
// [ Any State ] -------------> [ Error ]
@@ -84,8 +86,12 @@ class PipelineImpl : public Pipeline, public FilterHost {
virtual bool IsStreaming() const;
virtual PipelineError GetError() const;
+ // Sets a permanent callback owned by the pipeline that will be executed when
+ // the media reaches the end.
+ virtual void SetPipelineEndedCallback(PipelineCallback* ended_callback);
+
// |error_callback_| will be executed upon an error in the pipeline. If
- // |error_callback_| is NULL, it is ignored. The pipeline takes ownernship
+ // |error_callback_| is NULL, it is ignored. The pipeline takes ownership
// of |error_callback|.
virtual void SetPipelineErrorCallback(PipelineCallback* error_callback);
@@ -103,6 +109,7 @@ class PipelineImpl : public Pipeline, public FilterHost {
kSeeking,
kStarting,
kStarted,
+ kEnded,
kStopped,
kError,
};
@@ -136,6 +143,7 @@ class PipelineImpl : public Pipeline, public FilterHost {
virtual void SetBufferedBytes(int64 buffered_bytes);
virtual void SetVideoSize(size_t width, size_t height);
virtual void SetStreaming(bool streamed);
+ virtual void NotifyEnded();
virtual void BroadcastMessage(FilterMessage message);
// Method called during initialization to insert a mime type into the
@@ -181,6 +189,9 @@ class PipelineImpl : public Pipeline, public FilterHost {
// Carries out notifying filters that we are seeking to a new timestamp.
void SeekTask(base::TimeDelta time, PipelineCallback* seek_callback);
+ // Carries out handling a notification from a filter that it has ended.
+ void NotifyEndedTask();
+
// Carries out message broadcasting on the message loop.
void BroadcastMessageTask(FilterMessage message);
@@ -332,6 +343,7 @@ class PipelineImpl : public Pipeline, public FilterHost {
// Callbacks for various pipeline operations.
scoped_ptr<PipelineCallback> seek_callback_;
scoped_ptr<PipelineCallback> stop_callback_;
+ scoped_ptr<PipelineCallback> ended_callback_;
scoped_ptr<PipelineCallback> error_callback_;
// Vector of our filters and map maintaining the relationship between the
diff --git a/media/base/pipeline_impl_unittest.cc b/media/base/pipeline_impl_unittest.cc
index f3c0297..e7681da 100644
--- a/media/base/pipeline_impl_unittest.cc
+++ b/media/base/pipeline_impl_unittest.cc
@@ -14,6 +14,7 @@
#include "testing/gtest/include/gtest/gtest.h"
using ::testing::DoAll;
+using ::testing::InSequence;
using ::testing::Invoke;
using ::testing::Mock;
using ::testing::NotNull;
@@ -42,6 +43,7 @@ class CallbackHelper {
MOCK_METHOD0(OnStart, void());
MOCK_METHOD0(OnSeek, void());
MOCK_METHOD0(OnStop, void());
+ MOCK_METHOD0(OnEnded, void());
MOCK_METHOD0(OnError, void());
private:
@@ -480,4 +482,49 @@ TEST_F(PipelineImplTest, BroadcastMessage) {
mocks_->audio_renderer()->SetPlaybackRate(1.0f);
}
+TEST_F(PipelineImplTest, EndedCallback) {
+ scoped_refptr<StrictMock<MockDemuxerStream> > audio_stream =
+ new StrictMock<MockDemuxerStream>("audio/x-foo");
+ scoped_refptr<StrictMock<MockDemuxerStream> > video_stream =
+ new StrictMock<MockDemuxerStream>("video/x-foo");
+ MockDemuxerStreamVector streams;
+ streams.push_back(audio_stream);
+ streams.push_back(video_stream);
+
+ // Set our ended callback.
+ pipeline_->SetPipelineEndedCallback(
+ NewCallback(reinterpret_cast<CallbackHelper*>(&callbacks_),
+ &CallbackHelper::OnEnded));
+
+ InitializeDataSource();
+ InitializeDemuxer(&streams, base::TimeDelta());
+ InitializeAudioDecoder(audio_stream);
+ InitializeAudioRenderer();
+ InitializeVideoDecoder(video_stream);
+ InitializeVideoRenderer();
+ InitializePipeline();
+
+ // For convenience to simulate filters calling the methods.
+ FilterHost* host = pipeline_;
+
+ // Due to short circuit evaluation we only need to test a subset of cases.
+ InSequence s;
+ EXPECT_CALL(*mocks_->audio_renderer(), HasEnded())
+ .WillOnce(Return(false));
+ host->NotifyEnded();
+
+ EXPECT_CALL(*mocks_->audio_renderer(), HasEnded())
+ .WillOnce(Return(true));
+ EXPECT_CALL(*mocks_->video_renderer(), HasEnded())
+ .WillOnce(Return(false));
+ host->NotifyEnded();
+
+ EXPECT_CALL(*mocks_->audio_renderer(), HasEnded())
+ .WillOnce(Return(true));
+ EXPECT_CALL(*mocks_->video_renderer(), HasEnded())
+ .WillOnce(Return(true));
+ EXPECT_CALL(callbacks_, OnEnded());
+ host->NotifyEnded();
+}
+
} // namespace media
diff --git a/media/base/video_frame_impl.cc b/media/base/video_frame_impl.cc
index aabda6c..470e297 100644
--- a/media/base/video_frame_impl.cc
+++ b/media/base/video_frame_impl.cc
@@ -52,6 +52,48 @@ void VideoFrameImpl::CreateEmptyFrame(scoped_refptr<VideoFrame>* frame_out) {
*frame_out = new VideoFrameImpl(VideoSurface::EMPTY, 0, 0);
}
+// static
+void VideoFrameImpl::CreateBlackFrame(int width, int height,
+ scoped_refptr<VideoFrame>* frame_out) {
+ DCHECK_GT(width, 0);
+ DCHECK_GT(height, 0);
+
+ // Create our frame.
+ scoped_refptr<VideoFrame> frame;
+ const base::TimeDelta kZero;
+ VideoFrameImpl::CreateFrame(VideoSurface::YV12, width, height, kZero, kZero,
+ &frame);
+ DCHECK(frame);
+
+ // Now set the data to YUV(0,128,128).
+ const uint8 kBlackY = 0x00;
+ const uint8 kBlackUV = 0x80;
+ VideoSurface surface;
+ frame->Lock(&surface);
+ DCHECK_EQ(VideoSurface::YV12, surface.format) << "Expected YV12 surface";
+
+ // Fill the Y plane.
+ for (size_t i = 0; i < surface.height; ++i) {
+ memset(surface.data[VideoSurface::kYPlane], kBlackY, surface.width);
+ surface.data[VideoSurface::kYPlane]
+ += surface.strides[VideoSurface::kYPlane];
+ }
+
+ // Fill the U and V planes.
+ for (size_t i = 0; i < (surface.height / 2); ++i) {
+ memset(surface.data[VideoSurface::kUPlane], kBlackUV, surface.width / 2);
+ memset(surface.data[VideoSurface::kVPlane], kBlackUV, surface.width / 2);
+ surface.data[VideoSurface::kUPlane] +=
+ surface.strides[VideoSurface::kUPlane];
+ surface.data[VideoSurface::kVPlane] +=
+ surface.strides[VideoSurface::kVPlane];
+ }
+ frame->Unlock();
+
+ // Success!
+ *frame_out = frame;
+}
+
static inline size_t RoundUp(size_t value, size_t alignment) {
// Check that |alignment| is a power of 2.
DCHECK((alignment + (alignment - 1)) == (alignment | (alignment - 1)));
diff --git a/media/base/video_frame_impl.h b/media/base/video_frame_impl.h
index 3a48380..cf660ea 100644
--- a/media/base/video_frame_impl.h
+++ b/media/base/video_frame_impl.h
@@ -26,6 +26,11 @@ class VideoFrameImpl : public VideoFrame {
// timestamp and duration are all 0.
static void CreateEmptyFrame(scoped_refptr<VideoFrame>* frame_out);
+ // Allocates YV12 frame based on |width| and |height|, and sets its data to
+ // the YUV equivalent of RGB(0,0,0).
+ static void CreateBlackFrame(int width, int height,
+ scoped_refptr<VideoFrame>* frame_out);
+
// Implementation of VideoFrame.
virtual bool Lock(VideoSurface* surface);
virtual void Unlock();
diff --git a/media/base/video_frame_impl_unittest.cc b/media/base/video_frame_impl_unittest.cc
index eeec716..45e4553 100644
--- a/media/base/video_frame_impl_unittest.cc
+++ b/media/base/video_frame_impl_unittest.cc
@@ -130,4 +130,46 @@ TEST(VideoFrameImpl, CreateFrame) {
EXPECT_TRUE(frame->IsEndOfStream());
}
+TEST(VideoFrameImpl, CreateBlackFrame) {
+ const size_t kWidth = 2;
+ const size_t kHeight = 2;
+ const uint8 kExpectedYRow[] = { 0, 0 };
+ const uint8 kExpectedUVRow[] = { 128 };
+
+ scoped_refptr<media::VideoFrame> frame;
+ VideoFrameImpl::CreateBlackFrame(kWidth, kHeight, &frame);
+ ASSERT_TRUE(frame);
+
+ // Test basic properties.
+ EXPECT_EQ(0, frame->GetTimestamp().InMicroseconds());
+ EXPECT_EQ(0, frame->GetDuration().InMicroseconds());
+ EXPECT_FALSE(frame->IsEndOfStream());
+
+ // Test surface properties.
+ VideoSurface surface;
+ EXPECT_TRUE(frame->Lock(&surface));
+ EXPECT_EQ(VideoSurface::YV12, surface.format);
+ EXPECT_EQ(kWidth, surface.width);
+ EXPECT_EQ(kHeight, surface.height);
+ EXPECT_EQ(3u, surface.planes);
+
+ // Test surfaces themselves.
+ for (size_t y = 0; y < surface.height; ++y) {
+ EXPECT_EQ(0, memcmp(kExpectedYRow, surface.data[VideoSurface::kYPlane],
+ arraysize(kExpectedYRow)));
+ surface.data[VideoSurface::kYPlane] +=
+ surface.strides[VideoSurface::kYPlane];
+ }
+ for (size_t y = 0; y < surface.height / 2; ++y) {
+ EXPECT_EQ(0, memcmp(kExpectedUVRow, surface.data[VideoSurface::kUPlane],
+ arraysize(kExpectedUVRow)));
+ EXPECT_EQ(0, memcmp(kExpectedUVRow, surface.data[VideoSurface::kVPlane],
+ arraysize(kExpectedUVRow)));
+ surface.data[VideoSurface::kUPlane] +=
+ surface.strides[VideoSurface::kUPlane];
+ surface.data[VideoSurface::kVPlane] +=
+ surface.strides[VideoSurface::kVPlane];
+ }
+}
+
} // namespace media