summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--media/base/filter_host.h4
-rw-r--r--media/base/filters.h8
-rw-r--r--media/base/mock_filter_host.h1
-rw-r--r--media/base/mock_filters.h2
-rw-r--r--media/base/pipeline_impl.cc54
-rw-r--r--media/base/pipeline_impl.h16
-rw-r--r--media/base/pipeline_impl_unittest.cc47
-rw-r--r--media/base/video_frame_impl.cc42
-rw-r--r--media/base/video_frame_impl.h5
-rw-r--r--media/base/video_frame_impl_unittest.cc42
-rw-r--r--media/filters/audio_renderer_base.cc27
-rw-r--r--media/filters/audio_renderer_base.h5
-rw-r--r--media/filters/audio_renderer_base_unittest.cc21
-rw-r--r--media/filters/video_renderer_base.cc118
-rw-r--r--media/filters/video_renderer_base.h6
-rw-r--r--media/filters/video_renderer_base_unittest.cc6
-rw-r--r--webkit/glue/webmediaplayer_impl.cc28
-rw-r--r--webkit/glue/webmediaplayer_impl.h6
18 files changed, 352 insertions, 86 deletions
diff --git a/media/base/filter_host.h b/media/base/filter_host.h
index 17e7631..02ae399 100644
--- a/media/base/filter_host.h
+++ b/media/base/filter_host.h
@@ -57,6 +57,10 @@ class FilterHost {
// Sets the flag to indicate that we are doing streaming.
virtual void SetStreaming(bool streaming) = 0;
+ // Notifies that this filter has ended, typically only called by filter graph
+ // endpoints such as renderers.
+ virtual void NotifyEnded() = 0;
+
// Broadcast a message of type |message| to all other filters from |source|.
virtual void BroadcastMessage(FilterMessage message) = 0;
diff --git a/media/base/filters.h b/media/base/filters.h
index 9a63763..875c8c6 100644
--- a/media/base/filters.h
+++ b/media/base/filters.h
@@ -314,6 +314,10 @@ class VideoRenderer : public MediaFilter {
// Initialize a VideoRenderer with the given VideoDecoder, executing the
// callback upon completion.
virtual void Initialize(VideoDecoder* decoder, FilterCallback* callback) = 0;
+
+ // Returns true if this filter has received and processed an end-of-stream
+ // buffer.
+ virtual bool HasEnded() = 0;
};
@@ -331,6 +335,10 @@ class AudioRenderer : public MediaFilter {
// callback upon completion.
virtual void Initialize(AudioDecoder* decoder, FilterCallback* callback) = 0;
+ // Returns true if this filter has received and processed an end-of-stream
+ // buffer.
+ virtual bool HasEnded() = 0;
+
// Sets the output volume.
virtual void SetVolume(float volume) = 0;
};
diff --git a/media/base/mock_filter_host.h b/media/base/mock_filter_host.h
index c463541..439eda8 100644
--- a/media/base/mock_filter_host.h
+++ b/media/base/mock_filter_host.h
@@ -37,6 +37,7 @@ class MockFilterHost : public FilterHost {
MOCK_METHOD1(SetBufferedBytes, void(int64 buffered_bytes));
MOCK_METHOD2(SetVideoSize, void(size_t width, size_t height));
MOCK_METHOD1(SetStreaming, void(bool streamed));
+ MOCK_METHOD0(NotifyEnded, void());
MOCK_METHOD1(BroadcastMessage, void(FilterMessage message));
private:
diff --git a/media/base/mock_filters.h b/media/base/mock_filters.h
index affc0fa..e140818 100644
--- a/media/base/mock_filters.h
+++ b/media/base/mock_filters.h
@@ -242,6 +242,7 @@ class MockVideoRenderer : public VideoRenderer {
// VideoRenderer implementation.
MOCK_METHOD2(Initialize, void(VideoDecoder* decoder,
FilterCallback* callback));
+ MOCK_METHOD0(HasEnded, bool());
protected:
virtual ~MockVideoRenderer() {}
@@ -263,6 +264,7 @@ class MockAudioRenderer : public AudioRenderer {
// AudioRenderer implementation.
MOCK_METHOD2(Initialize, void(AudioDecoder* decoder,
FilterCallback* callback));
+ MOCK_METHOD0(HasEnded, bool());
MOCK_METHOD1(SetVolume, void(float volume));
protected:
diff --git a/media/base/pipeline_impl.cc b/media/base/pipeline_impl.cc
index ccfce70..b9ac047 100644
--- a/media/base/pipeline_impl.cc
+++ b/media/base/pipeline_impl.cc
@@ -149,6 +149,7 @@ bool PipelineImpl::IsInitialized() const {
case kSeeking:
case kStarting:
case kStarted:
+ case kEnded:
return true;
default:
return false;
@@ -201,9 +202,12 @@ void PipelineImpl::SetVolume(float volume) {
}
base::TimeDelta PipelineImpl::GetCurrentTime() const {
+ // TODO(scherkus): perhaps replace checking state_ == kEnded with a bool that
+ // is set/get under the lock, because this is breaching the contract that
+ // |state_| is only accessed on |message_loop_|.
AutoLock auto_lock(lock_);
base::TimeDelta elapsed = clock_.Elapsed();
- if (elapsed > duration_) {
+ if (state_ == kEnded || elapsed > duration_) {
return duration_;
}
return elapsed;
@@ -262,7 +266,15 @@ PipelineError PipelineImpl::GetError() const {
return error_;
}
+void PipelineImpl::SetPipelineEndedCallback(PipelineCallback* ended_callback) {
+ DCHECK(!IsRunning())
+ << "Permanent callbacks should be set before the pipeline has started";
+ ended_callback_.reset(ended_callback);
+}
+
void PipelineImpl::SetPipelineErrorCallback(PipelineCallback* error_callback) {
+ DCHECK(!IsRunning())
+ << "Permanent callbacks should be set before the pipeline has started";
error_callback_.reset(error_callback);
}
@@ -374,6 +386,12 @@ void PipelineImpl::SetStreaming(bool streaming) {
streaming_ = streaming;
}
+void PipelineImpl::NotifyEnded() {
+ DCHECK(IsRunning());
+ message_loop_->PostTask(FROM_HERE,
+ NewRunnableMethod(this, &PipelineImpl::NotifyEndedTask));
+}
+
void PipelineImpl::BroadcastMessage(FilterMessage message) {
DCHECK(IsRunning());
@@ -612,10 +630,10 @@ void PipelineImpl::SeekTask(base::TimeDelta time,
DCHECK_EQ(MessageLoop::current(), message_loop_);
// Suppress seeking if we're not fully started.
- if (state_ != kStarted) {
+ if (state_ != kStarted && state_ != kEnded) {
// TODO(scherkus): should we run the callback? I'm tempted to say the API
// will only execute the first Seek() request.
- LOG(INFO) << "Media pipeline is not in started state, ignoring seek to "
+ LOG(INFO) << "Media pipeline has not started, ignoring seek to "
<< time.InMicroseconds();
delete seek_callback;
return;
@@ -623,7 +641,7 @@ void PipelineImpl::SeekTask(base::TimeDelta time,
// We'll need to pause every filter before seeking. The state transition
// is as follows:
- // kStarted
+ // kStarted/kEnded
// kPausing (for each filter)
// kSeeking (for each filter)
// kStarting (for each filter)
@@ -639,6 +657,34 @@ void PipelineImpl::SeekTask(base::TimeDelta time,
NewCallback(this, &PipelineImpl::OnFilterStateTransition));
}
+void PipelineImpl::NotifyEndedTask() {
+ DCHECK_EQ(MessageLoop::current(), message_loop_);
+
+ // We can only end if we were actually playing.
+ if (state_ != kStarted) {
+ return;
+ }
+
+ // Grab the renderers, if they exist.
+ scoped_refptr<AudioRenderer> audio_renderer;
+ scoped_refptr<VideoRenderer> video_renderer;
+ GetFilter(&audio_renderer);
+ GetFilter(&video_renderer);
+ DCHECK(audio_renderer || video_renderer);
+
+ // Make sure every extant renderer has ended.
+ if ((audio_renderer && !audio_renderer->HasEnded()) ||
+ (video_renderer && !video_renderer->HasEnded())) {
+ return;
+ }
+
+ // Transition to ended, executing the callback if present.
+ state_ = kEnded;
+ if (ended_callback_.get()) {
+ ended_callback_->Run();
+ }
+}
+
void PipelineImpl::BroadcastMessageTask(FilterMessage message) {
DCHECK_EQ(MessageLoop::current(), message_loop_);
diff --git a/media/base/pipeline_impl.h b/media/base/pipeline_impl.h
index 4332cba..383969d 100644
--- a/media/base/pipeline_impl.h
+++ b/media/base/pipeline_impl.h
@@ -41,7 +41,9 @@ namespace media {
// | |
// V Seek() |
// [ Started ] --------> [ Pausing (for each filter) ] -'
-//
+// | |
+// | NotifyEnded() Seek() |
+// `-------------> [ Ended ] ---------------------'
//
// SetError()
// [ Any State ] -------------> [ Error ]
@@ -84,8 +86,12 @@ class PipelineImpl : public Pipeline, public FilterHost {
virtual bool IsStreaming() const;
virtual PipelineError GetError() const;
+ // Sets a permanent callback owned by the pipeline that will be executed when
+ // the media reaches the end.
+ virtual void SetPipelineEndedCallback(PipelineCallback* ended_callback);
+
// |error_callback_| will be executed upon an error in the pipeline. If
- // |error_callback_| is NULL, it is ignored. The pipeline takes ownernship
+ // |error_callback_| is NULL, it is ignored. The pipeline takes ownership
// of |error_callback|.
virtual void SetPipelineErrorCallback(PipelineCallback* error_callback);
@@ -103,6 +109,7 @@ class PipelineImpl : public Pipeline, public FilterHost {
kSeeking,
kStarting,
kStarted,
+ kEnded,
kStopped,
kError,
};
@@ -136,6 +143,7 @@ class PipelineImpl : public Pipeline, public FilterHost {
virtual void SetBufferedBytes(int64 buffered_bytes);
virtual void SetVideoSize(size_t width, size_t height);
virtual void SetStreaming(bool streamed);
+ virtual void NotifyEnded();
virtual void BroadcastMessage(FilterMessage message);
// Method called during initialization to insert a mime type into the
@@ -181,6 +189,9 @@ class PipelineImpl : public Pipeline, public FilterHost {
// Carries out notifying filters that we are seeking to a new timestamp.
void SeekTask(base::TimeDelta time, PipelineCallback* seek_callback);
+ // Carries out handling a notification from a filter that it has ended.
+ void NotifyEndedTask();
+
// Carries out message broadcasting on the message loop.
void BroadcastMessageTask(FilterMessage message);
@@ -332,6 +343,7 @@ class PipelineImpl : public Pipeline, public FilterHost {
// Callbacks for various pipeline operations.
scoped_ptr<PipelineCallback> seek_callback_;
scoped_ptr<PipelineCallback> stop_callback_;
+ scoped_ptr<PipelineCallback> ended_callback_;
scoped_ptr<PipelineCallback> error_callback_;
// Vector of our filters and map maintaining the relationship between the
diff --git a/media/base/pipeline_impl_unittest.cc b/media/base/pipeline_impl_unittest.cc
index f3c0297..e7681da 100644
--- a/media/base/pipeline_impl_unittest.cc
+++ b/media/base/pipeline_impl_unittest.cc
@@ -14,6 +14,7 @@
#include "testing/gtest/include/gtest/gtest.h"
using ::testing::DoAll;
+using ::testing::InSequence;
using ::testing::Invoke;
using ::testing::Mock;
using ::testing::NotNull;
@@ -42,6 +43,7 @@ class CallbackHelper {
MOCK_METHOD0(OnStart, void());
MOCK_METHOD0(OnSeek, void());
MOCK_METHOD0(OnStop, void());
+ MOCK_METHOD0(OnEnded, void());
MOCK_METHOD0(OnError, void());
private:
@@ -480,4 +482,49 @@ TEST_F(PipelineImplTest, BroadcastMessage) {
mocks_->audio_renderer()->SetPlaybackRate(1.0f);
}
+TEST_F(PipelineImplTest, EndedCallback) {
+ scoped_refptr<StrictMock<MockDemuxerStream> > audio_stream =
+ new StrictMock<MockDemuxerStream>("audio/x-foo");
+ scoped_refptr<StrictMock<MockDemuxerStream> > video_stream =
+ new StrictMock<MockDemuxerStream>("video/x-foo");
+ MockDemuxerStreamVector streams;
+ streams.push_back(audio_stream);
+ streams.push_back(video_stream);
+
+ // Set our ended callback.
+ pipeline_->SetPipelineEndedCallback(
+ NewCallback(reinterpret_cast<CallbackHelper*>(&callbacks_),
+ &CallbackHelper::OnEnded));
+
+ InitializeDataSource();
+ InitializeDemuxer(&streams, base::TimeDelta());
+ InitializeAudioDecoder(audio_stream);
+ InitializeAudioRenderer();
+ InitializeVideoDecoder(video_stream);
+ InitializeVideoRenderer();
+ InitializePipeline();
+
+ // For convenience to simulate filters calling the methods.
+ FilterHost* host = pipeline_;
+
+ // Due to short circuit evaluation we only need to test a subset of cases.
+ InSequence s;
+ EXPECT_CALL(*mocks_->audio_renderer(), HasEnded())
+ .WillOnce(Return(false));
+ host->NotifyEnded();
+
+ EXPECT_CALL(*mocks_->audio_renderer(), HasEnded())
+ .WillOnce(Return(true));
+ EXPECT_CALL(*mocks_->video_renderer(), HasEnded())
+ .WillOnce(Return(false));
+ host->NotifyEnded();
+
+ EXPECT_CALL(*mocks_->audio_renderer(), HasEnded())
+ .WillOnce(Return(true));
+ EXPECT_CALL(*mocks_->video_renderer(), HasEnded())
+ .WillOnce(Return(true));
+ EXPECT_CALL(callbacks_, OnEnded());
+ host->NotifyEnded();
+}
+
} // namespace media
diff --git a/media/base/video_frame_impl.cc b/media/base/video_frame_impl.cc
index aabda6c..470e297 100644
--- a/media/base/video_frame_impl.cc
+++ b/media/base/video_frame_impl.cc
@@ -52,6 +52,48 @@ void VideoFrameImpl::CreateEmptyFrame(scoped_refptr<VideoFrame>* frame_out) {
*frame_out = new VideoFrameImpl(VideoSurface::EMPTY, 0, 0);
}
+// static
+void VideoFrameImpl::CreateBlackFrame(int width, int height,
+ scoped_refptr<VideoFrame>* frame_out) {
+ DCHECK_GT(width, 0);
+ DCHECK_GT(height, 0);
+
+ // Create our frame.
+ scoped_refptr<VideoFrame> frame;
+ const base::TimeDelta kZero;
+ VideoFrameImpl::CreateFrame(VideoSurface::YV12, width, height, kZero, kZero,
+ &frame);
+ DCHECK(frame);
+
+ // Now set the data to YUV(0,128,128).
+ const uint8 kBlackY = 0x00;
+ const uint8 kBlackUV = 0x80;
+ VideoSurface surface;
+ frame->Lock(&surface);
+ DCHECK_EQ(VideoSurface::YV12, surface.format) << "Expected YV12 surface";
+
+ // Fill the Y plane.
+ for (size_t i = 0; i < surface.height; ++i) {
+ memset(surface.data[VideoSurface::kYPlane], kBlackY, surface.width);
+ surface.data[VideoSurface::kYPlane]
+ += surface.strides[VideoSurface::kYPlane];
+ }
+
+ // Fill the U and V planes.
+ for (size_t i = 0; i < (surface.height / 2); ++i) {
+ memset(surface.data[VideoSurface::kUPlane], kBlackUV, surface.width / 2);
+ memset(surface.data[VideoSurface::kVPlane], kBlackUV, surface.width / 2);
+ surface.data[VideoSurface::kUPlane] +=
+ surface.strides[VideoSurface::kUPlane];
+ surface.data[VideoSurface::kVPlane] +=
+ surface.strides[VideoSurface::kVPlane];
+ }
+ frame->Unlock();
+
+ // Success!
+ *frame_out = frame;
+}
+
static inline size_t RoundUp(size_t value, size_t alignment) {
// Check that |alignment| is a power of 2.
DCHECK((alignment + (alignment - 1)) == (alignment | (alignment - 1)));
diff --git a/media/base/video_frame_impl.h b/media/base/video_frame_impl.h
index 3a48380..cf660ea 100644
--- a/media/base/video_frame_impl.h
+++ b/media/base/video_frame_impl.h
@@ -26,6 +26,11 @@ class VideoFrameImpl : public VideoFrame {
// timestamp and duration are all 0.
static void CreateEmptyFrame(scoped_refptr<VideoFrame>* frame_out);
+ // Allocates YV12 frame based on |width| and |height|, and sets its data to
+ // the YUV equivalent of RGB(0,0,0).
+ static void CreateBlackFrame(int width, int height,
+ scoped_refptr<VideoFrame>* frame_out);
+
// Implementation of VideoFrame.
virtual bool Lock(VideoSurface* surface);
virtual void Unlock();
diff --git a/media/base/video_frame_impl_unittest.cc b/media/base/video_frame_impl_unittest.cc
index eeec716..45e4553 100644
--- a/media/base/video_frame_impl_unittest.cc
+++ b/media/base/video_frame_impl_unittest.cc
@@ -130,4 +130,46 @@ TEST(VideoFrameImpl, CreateFrame) {
EXPECT_TRUE(frame->IsEndOfStream());
}
+TEST(VideoFrameImpl, CreateBlackFrame) {
+ const size_t kWidth = 2;
+ const size_t kHeight = 2;
+ const uint8 kExpectedYRow[] = { 0, 0 };
+ const uint8 kExpectedUVRow[] = { 128 };
+
+ scoped_refptr<media::VideoFrame> frame;
+ VideoFrameImpl::CreateBlackFrame(kWidth, kHeight, &frame);
+ ASSERT_TRUE(frame);
+
+ // Test basic properties.
+ EXPECT_EQ(0, frame->GetTimestamp().InMicroseconds());
+ EXPECT_EQ(0, frame->GetDuration().InMicroseconds());
+ EXPECT_FALSE(frame->IsEndOfStream());
+
+ // Test surface properties.
+ VideoSurface surface;
+ EXPECT_TRUE(frame->Lock(&surface));
+ EXPECT_EQ(VideoSurface::YV12, surface.format);
+ EXPECT_EQ(kWidth, surface.width);
+ EXPECT_EQ(kHeight, surface.height);
+ EXPECT_EQ(3u, surface.planes);
+
+ // Test surfaces themselves.
+ for (size_t y = 0; y < surface.height; ++y) {
+ EXPECT_EQ(0, memcmp(kExpectedYRow, surface.data[VideoSurface::kYPlane],
+ arraysize(kExpectedYRow)));
+ surface.data[VideoSurface::kYPlane] +=
+ surface.strides[VideoSurface::kYPlane];
+ }
+ for (size_t y = 0; y < surface.height / 2; ++y) {
+ EXPECT_EQ(0, memcmp(kExpectedUVRow, surface.data[VideoSurface::kUPlane],
+ arraysize(kExpectedUVRow)));
+ EXPECT_EQ(0, memcmp(kExpectedUVRow, surface.data[VideoSurface::kVPlane],
+ arraysize(kExpectedUVRow)));
+ surface.data[VideoSurface::kUPlane] +=
+ surface.strides[VideoSurface::kUPlane];
+ surface.data[VideoSurface::kVPlane] +=
+ surface.strides[VideoSurface::kVPlane];
+ }
+}
+
} // namespace media
diff --git a/media/filters/audio_renderer_base.cc b/media/filters/audio_renderer_base.cc
index 681bfb2..4b46b1d 100644
--- a/media/filters/audio_renderer_base.cc
+++ b/media/filters/audio_renderer_base.cc
@@ -13,6 +13,8 @@ namespace media {
AudioRendererBase::AudioRendererBase()
: state_(kUninitialized),
+ recieved_end_of_stream_(false),
+ rendered_end_of_stream_(false),
pending_reads_(0) {
}
@@ -61,6 +63,8 @@ void AudioRendererBase::Seek(base::TimeDelta time, FilterCallback* callback) {
// Throw away everything and schedule our reads.
last_fill_buffer_time_ = base::TimeDelta();
+ recieved_end_of_stream_ = false;
+ rendered_end_of_stream_ = false;
// |algorithm_| will request more reads.
algorithm_->FlushBuffers();
@@ -114,6 +118,15 @@ void AudioRendererBase::Initialize(AudioDecoder* decoder,
callback->Run();
}
+bool AudioRendererBase::HasEnded() {
+ AutoLock auto_lock(lock_);
+ if (rendered_end_of_stream_) {
+ DCHECK(algorithm_->IsQueueEmpty())
+ << "Audio queue should be empty if we have rendered end of stream";
+ }
+ return recieved_end_of_stream_ && rendered_end_of_stream_;
+}
+
void AudioRendererBase::OnReadComplete(Buffer* buffer_in) {
AutoLock auto_lock(lock_);
DCHECK(state_ == kPaused || state_ == kSeeking || state_ == kPlaying);
@@ -121,7 +134,9 @@ void AudioRendererBase::OnReadComplete(Buffer* buffer_in) {
--pending_reads_;
// Don't enqueue an end-of-stream buffer because it has no data.
- if (!buffer_in->IsEndOfStream()) {
+ if (buffer_in->IsEndOfStream()) {
+ recieved_end_of_stream_ = true;
+ } else {
// Note: Calling this may schedule more reads.
algorithm_->EnqueueBuffer(buffer_in);
}
@@ -129,7 +144,7 @@ void AudioRendererBase::OnReadComplete(Buffer* buffer_in) {
// Check for our preroll complete condition.
if (state_ == kSeeking) {
DCHECK(seek_callback_.get());
- if (algorithm_->IsQueueFull() || buffer_in->IsEndOfStream()) {
+ if (algorithm_->IsQueueFull() || recieved_end_of_stream_) {
// Transition into paused whether we have data in |algorithm_| or not.
// FillBuffer() will play silence if there's nothing to fill.
state_ = kPaused;
@@ -174,6 +189,14 @@ size_t AudioRendererBase::FillBuffer(uint8* dest,
// Do the fill.
dest_written = algorithm_->FillBuffer(dest, dest_len);
+ // Check if we finally reached end of stream by emptying |algorithm_|.
+ if (recieved_end_of_stream_ && algorithm_->IsQueueEmpty()) {
+ if (!rendered_end_of_stream_) {
+ rendered_end_of_stream_ = true;
+ host()->NotifyEnded();
+ }
+ }
+
// Get the current time.
last_fill_buffer_time_ = algorithm_->GetTime();
}
diff --git a/media/filters/audio_renderer_base.h b/media/filters/audio_renderer_base.h
index da40339..07b62e4 100644
--- a/media/filters/audio_renderer_base.h
+++ b/media/filters/audio_renderer_base.h
@@ -39,6 +39,7 @@ class AudioRendererBase : public AudioRenderer {
// AudioRenderer implementation.
virtual void Initialize(AudioDecoder* decoder, FilterCallback* callback);
+ virtual bool HasEnded();
protected:
// Only allow a factory to create this class.
@@ -115,6 +116,10 @@ class AudioRendererBase : public AudioRenderer {
};
State state_;
+ // Keeps track of whether we received and rendered the end of stream buffer.
+ bool recieved_end_of_stream_;
+ bool rendered_end_of_stream_;
+
// Keeps track of our pending reads. We *must* have no pending reads before
// executing the pause callback, otherwise we breach the contract that all
// filters are idling.
diff --git a/media/filters/audio_renderer_base_unittest.cc b/media/filters/audio_renderer_base_unittest.cc
index cf2856b..c5e55c1 100644
--- a/media/filters/audio_renderer_base_unittest.cc
+++ b/media/filters/audio_renderer_base_unittest.cc
@@ -176,12 +176,14 @@ TEST_F(AudioRendererBaseTest, OneCompleteReadCycle) {
// Now satisfy the read requests. Our callback should be executed after
// exiting this loop.
const size_t kDataSize = 1024;
+ size_t bytes_buffered = 0;
while (!read_queue_.empty()) {
scoped_refptr<DataBuffer> buffer = new DataBuffer(kDataSize);
buffer->SetDataSize(kDataSize);
read_queue_.front()->Run(buffer);
delete read_queue_.front();
read_queue_.pop_front();
+ bytes_buffered += kDataSize;
}
MockFilterCallback play_callback;
@@ -198,6 +200,7 @@ TEST_F(AudioRendererBaseTest, OneCompleteReadCycle) {
for (size_t i = 0; i < kMaxQueueSize; ++i) {
EXPECT_EQ(kDataSize,
renderer_->FillBuffer(buffer, kDataSize, base::TimeDelta()));
+ bytes_buffered -= kDataSize;
}
// Make sure the read request queue is full.
@@ -209,8 +212,26 @@ TEST_F(AudioRendererBaseTest, OneCompleteReadCycle) {
delete read_queue_.front();
read_queue_.pop_front();
+ // We shouldn't report ended until all data has been flushed out.
+ EXPECT_FALSE(renderer_->HasEnded());
+
// We should have one less read request in the queue.
EXPECT_EQ(kMaxQueueSize - 1, read_queue_.size());
+
+ // Flush the entire internal buffer, which should notify the host we've ended.
+ EXPECT_EQ(0u, bytes_buffered % kDataSize);
+ EXPECT_CALL(host_, NotifyEnded());
+ while (bytes_buffered > 0) {
+ EXPECT_EQ(kDataSize,
+ renderer_->FillBuffer(buffer, kDataSize, base::TimeDelta()));
+ bytes_buffered -= kDataSize;
+ }
+
+ // We should now report ended.
+ EXPECT_TRUE(renderer_->HasEnded());
+
+ // Further reads should return muted audio and not notify any more.
+ EXPECT_EQ(0u, renderer_->FillBuffer(buffer, kDataSize, base::TimeDelta()));
}
} // namespace media
diff --git a/media/filters/video_renderer_base.cc b/media/filters/video_renderer_base.cc
index 9f832b4..1660f01 100644
--- a/media/filters/video_renderer_base.cc
+++ b/media/filters/video_renderer_base.cc
@@ -74,7 +74,7 @@ void VideoRendererBase::Play(FilterCallback* callback) {
void VideoRendererBase::Pause(FilterCallback* callback) {
AutoLock auto_lock(lock_);
- DCHECK_EQ(kPlaying, state_);
+ DCHECK(state_ == kPlaying || state_ == kEnded);
pause_callback_.reset(callback);
state_ = kPaused;
@@ -156,7 +156,7 @@ void VideoRendererBase::Initialize(VideoDecoder* decoder,
// Create a black frame so clients have something to render before we finish
// prerolling.
- CreateBlackFrame(&current_frame_);
+ VideoFrameImpl::CreateBlackFrame(width_, height_, &current_frame_);
// We're all good! Consider ourselves paused (ThreadMain() should never
// see us in the kUninitialized state).
@@ -180,6 +180,11 @@ void VideoRendererBase::Initialize(VideoDecoder* decoder,
callback->Run();
}
+bool VideoRendererBase::HasEnded() {
+ AutoLock auto_lock(lock_);
+ return state_ == kEnded;
+}
+
// PlatformThread::Delegate implementation.
void VideoRendererBase::ThreadMain() {
PlatformThread::SetName("VideoThread");
@@ -187,17 +192,25 @@ void VideoRendererBase::ThreadMain() {
// State and playback rate to assume for this iteration of the loop.
State state;
float playback_rate;
+ base::TimeDelta remaining_time;
{
AutoLock auto_lock(lock_);
state = state_;
playback_rate = playback_rate_;
+
+ // Calculate how long until we should advance the frame, which is
+ // typically negative but for playback rates < 1.0f may be long enough
+ // that it makes more sense to idle and check again.
+ remaining_time = current_frame_->GetTimestamp() - host()->GetTime();
}
if (state == kStopped) {
return;
}
- // Sleep while paused or seeking.
- if (state == kPaused || state == kSeeking || playback_rate == 0) {
+ // Idle if we shouldn't be playing or advancing the frame yet.
+ if (state == kPaused || state == kSeeking || state == kEnded ||
+ remaining_time.InMilliseconds() > kIdleMilliseconds ||
+ playback_rate == 0) {
PlatformThread::Sleep(kIdleMilliseconds);
continue;
}
@@ -213,17 +226,9 @@ void VideoRendererBase::ThreadMain() {
continue;
}
- // Idle if the next frame is too far ahead.
- base::TimeDelta diff = current_frame_->GetTimestamp() - host()->GetTime();
- if (diff.InMilliseconds() > kIdleMilliseconds) {
- PlatformThread::Sleep(kIdleMilliseconds);
- continue;
- }
-
// Otherwise we're playing, so advance the frame and keep reading from the
- // decoder. |frames_| might be empty if we seeked to the very end of the
- // media where no frames were available.
- if (!frames_.empty()) {
+ // decoder if we haven't reach end of stream.
+ if (!frames_.empty() && !frames_.front()->IsEndOfStream()) {
DCHECK_EQ(current_frame_, frames_.front());
frames_.pop_front();
ScheduleRead_Locked();
@@ -241,9 +246,16 @@ void VideoRendererBase::ThreadMain() {
continue;
}
+ // If the new front frame is end of stream, we've officially ended.
+ if (frames_.front()->IsEndOfStream()) {
+ state_ = kEnded;
+ host()->NotifyEnded();
+ continue;
+ }
+
// Update our current frame and attempt to grab the next frame.
current_frame_ = frames_.front();
- if (frames_.size() >= 2) {
+ if (frames_.size() >= 2 && !frames_[1]->IsEndOfStream()) {
next_frame = frames_[1];
}
}
@@ -276,41 +288,41 @@ void VideoRendererBase::ThreadMain() {
void VideoRendererBase::GetCurrentFrame(scoped_refptr<VideoFrame>* frame_out) {
AutoLock auto_lock(lock_);
// We should have initialized and have the current frame.
- DCHECK(state_ == kPaused || state_ == kSeeking || state_ == kPlaying);
+ DCHECK(state_ == kPaused || state_ == kSeeking || state_ == kPlaying ||
+ state_ == kEnded);
DCHECK(current_frame_);
*frame_out = current_frame_;
}
void VideoRendererBase::OnReadComplete(VideoFrame* frame) {
AutoLock auto_lock(lock_);
- DCHECK(state_ == kPaused || state_ == kSeeking || state_ == kPlaying);
+ DCHECK(state_ == kPaused || state_ == kSeeking || state_ == kPlaying ||
+ state_ == kEnded);
DCHECK_GT(pending_reads_, 0u);
--pending_reads_;
- // If this is an end of stream frame, don't enqueue it since it has no data.
- if (!frame->IsEndOfStream()) {
- frames_.push_back(frame);
- DCHECK_LE(frames_.size(), kMaxFrames);
- frame_available_.Signal();
- }
+ // Enqueue the frame.
+ frames_.push_back(frame);
+ DCHECK_LE(frames_.size(), kMaxFrames);
+ frame_available_.Signal();
// Check for our preroll complete condition.
if (state_ == kSeeking) {
DCHECK(seek_callback_.get());
- if (frames_.size() == kMaxFrames || frame->IsEndOfStream()) {
- if (frames_.empty()) {
- // Eeep.. we seeked to somewhere where there's no video data (most
- // likely the very end of the file). For user-friendliness, we'll
- // create a black frame just in case |current_frame_| is old or garbage.
- CreateBlackFrame(&current_frame_);
+ if (frames_.size() == kMaxFrames) {
+ // We're paused, so make sure we update |current_frame_| to represent
+ // our new location.
+ state_ = kPaused;
+ if (frames_.front()->IsEndOfStream()) {
+ VideoFrameImpl::CreateBlackFrame(width_, height_, &current_frame_);
} else {
- // Update our current frame.
current_frame_ = frames_.front();
}
- // Because we might remain paused, we can't rely on ThreadMain() to
- // notify the subclass the frame has been updated.
+
+ // Because we might remain paused (i.e., we were not playing before we
+ // received a seek), we can't rely on ThreadMain() to notify the subclass
+ // the frame has been updated.
DCHECK(current_frame_);
- state_ = kPaused;
OnFrameAvailable();
seek_callback_->Run();
@@ -327,6 +339,7 @@ void VideoRendererBase::OnReadComplete(VideoFrame* frame) {
void VideoRendererBase::ScheduleRead_Locked() {
lock_.AssertAcquired();
+ DCHECK_NE(kEnded, state_);
DCHECK_LT(pending_reads_, kMaxFrames);
++pending_reads_;
decoder_->Read(NewCallback(this, &VideoRendererBase::OnReadComplete));
@@ -361,43 +374,4 @@ base::TimeDelta VideoRendererBase::CalculateSleepDuration(
static_cast<int64>(sleep.InMicroseconds() / playback_rate));
}
-void VideoRendererBase::CreateBlackFrame(scoped_refptr<VideoFrame>* frame_out) {
- DCHECK_GT(width_, 0);
- DCHECK_GT(height_, 0);
- *frame_out = NULL;
-
- // Create our frame.
- scoped_refptr<VideoFrame> frame;
- const base::TimeDelta kZero;
- VideoFrameImpl::CreateFrame(VideoSurface::YV12, width_, height_, kZero, kZero,
- &frame);
- DCHECK(frame);
-
- // Now set the data to YUV(0,128,128).
- VideoSurface surface;
- frame->Lock(&surface);
- DCHECK_EQ(VideoSurface::YV12, surface.format) << "Expected YV12 surface";
-
- // Fill the Y plane.
- for (size_t i = 0; i < surface.height; ++i) {
- memset(surface.data[VideoSurface::kYPlane], 0x00, surface.width);
- surface.data[VideoSurface::kYPlane]
- += surface.strides[VideoSurface::kYPlane];
- }
-
- // Fill the U and V planes.
- for (size_t i = 0; i < (surface.height / 2); ++i) {
- memset(surface.data[VideoSurface::kUPlane], 0x80, surface.width / 2);
- memset(surface.data[VideoSurface::kVPlane], 0x80, surface.width / 2);
- surface.data[VideoSurface::kUPlane]
- += surface.strides[VideoSurface::kUPlane];
- surface.data[VideoSurface::kVPlane]
- += surface.strides[VideoSurface::kVPlane];
- }
- frame->Unlock();
-
- // Success!
- *frame_out = frame;
-}
-
} // namespace media
diff --git a/media/filters/video_renderer_base.h b/media/filters/video_renderer_base.h
index d8940db..d18d6a1 100644
--- a/media/filters/video_renderer_base.h
+++ b/media/filters/video_renderer_base.h
@@ -45,6 +45,7 @@ class VideoRendererBase : public VideoRenderer,
// VideoRenderer implementation.
virtual void Initialize(VideoDecoder* decoder, FilterCallback* callback);
+ virtual bool HasEnded();
// PlatformThread::Delegate implementation.
virtual void ThreadMain();
@@ -91,10 +92,6 @@ class VideoRendererBase : public VideoRenderer,
base::TimeDelta CalculateSleepDuration(VideoFrame* next_frame,
float playback_rate);
- // Allocates YV12 frame based on |width_| and |height_|, and sets its data to
- // the YUV equivalent of RGB(0,0,0).
- void CreateBlackFrame(scoped_refptr<VideoFrame>* frame_out);
-
// Used for accessing data members.
Lock lock_;
@@ -120,6 +117,7 @@ class VideoRendererBase : public VideoRenderer,
kPaused,
kSeeking,
kPlaying,
+ kEnded,
kStopped,
kError,
};
diff --git a/media/filters/video_renderer_base_unittest.cc b/media/filters/video_renderer_base_unittest.cc
index 14f7259..b6a5c6b 100644
--- a/media/filters/video_renderer_base_unittest.cc
+++ b/media/filters/video_renderer_base_unittest.cc
@@ -127,6 +127,12 @@ TEST_F(VideoRendererBaseTest, Initialize_Failed) {
// Test successful initialization and preroll.
TEST_F(VideoRendererBaseTest, Initialize_Successful) {
+ // Who knows how many times ThreadMain() will execute!
+ //
+ // TODO(scherkus): really, really, really need to inject a thread into
+ // VideoRendererBase... it makes mocking much harder.
+ EXPECT_CALL(host_, GetTime()).WillRepeatedly(Return(base::TimeDelta()));
+
InSequence s;
// We expect the video size to be set.
diff --git a/webkit/glue/webmediaplayer_impl.cc b/webkit/glue/webmediaplayer_impl.cc
index 04f5dc8..4f37785 100644
--- a/webkit/glue/webmediaplayer_impl.cc
+++ b/webkit/glue/webmediaplayer_impl.cc
@@ -112,6 +112,11 @@ void WebMediaPlayerImpl::Proxy::PipelineSeekCallback() {
&WebMediaPlayerImpl::Proxy::PipelineSeekTask));
}
+void WebMediaPlayerImpl::Proxy::PipelineEndedCallback() {
+ render_loop_->PostTask(FROM_HERE, NewRunnableMethod(this,
+ &WebMediaPlayerImpl::Proxy::PipelineEndedTask));
+}
+
void WebMediaPlayerImpl::Proxy::PipelineErrorCallback() {
render_loop_->PostTask(FROM_HERE, NewRunnableMethod(this,
&WebMediaPlayerImpl::Proxy::PipelineErrorTask));
@@ -143,6 +148,13 @@ void WebMediaPlayerImpl::Proxy::PipelineSeekTask() {
}
}
+void WebMediaPlayerImpl::Proxy::PipelineEndedTask() {
+ DCHECK(MessageLoop::current() == render_loop_);
+ if (webmediaplayer_) {
+ webmediaplayer_->OnPipelineEnded();
+ }
+}
+
void WebMediaPlayerImpl::Proxy::PipelineErrorTask() {
DCHECK(MessageLoop::current() == render_loop_);
if (webmediaplayer_) {
@@ -181,7 +193,9 @@ WebMediaPlayerImpl::WebMediaPlayerImpl(WebKit::WebMediaPlayerClient* client,
// Creates the proxy.
proxy_ = new Proxy(main_loop_, this);
- // Sets the pipeline's error reporting callback.
+ // Set our pipeline callbacks.
+ pipeline_->SetPipelineEndedCallback(NewCallback(proxy_.get(),
+ &WebMediaPlayerImpl::Proxy::PipelineEndedCallback));
pipeline_->SetPipelineErrorCallback(NewCallback(proxy_.get(),
&WebMediaPlayerImpl::Proxy::PipelineErrorCallback));
@@ -248,9 +262,12 @@ bool WebMediaPlayerImpl::supportsSave() const {
void WebMediaPlayerImpl::seek(float seconds) {
DCHECK(MessageLoop::current() == main_loop_);
+ // TODO(scherkus): WebKit fires a seek(0) at the very start, however pipeline
+ // already does a seek(0) internally. Investigate whether doing two seek(0)
+ // at the start impacts startup latency.
+
// Try to preserve as much accuracy as possible.
float microseconds = seconds * base::Time::kMicrosecondsPerSecond;
- if (seconds != 0)
pipeline_->Seek(
base::TimeDelta::FromMicroseconds(static_cast<int64>(microseconds)),
NewCallback(proxy_.get(),
@@ -453,6 +470,13 @@ void WebMediaPlayerImpl::OnPipelineSeek() {
}
}
+void WebMediaPlayerImpl::OnPipelineEnded() {
+ DCHECK(MessageLoop::current() == main_loop_);
+ if (pipeline_->GetError() == media::PIPELINE_OK) {
+ GetClient()->timeChanged();
+ }
+}
+
void WebMediaPlayerImpl::OnPipelineError() {
DCHECK(MessageLoop::current() == main_loop_);
switch (pipeline_->GetError()) {
diff --git a/webkit/glue/webmediaplayer_impl.h b/webkit/glue/webmediaplayer_impl.h
index 187c753..da1b142 100644
--- a/webkit/glue/webmediaplayer_impl.h
+++ b/webkit/glue/webmediaplayer_impl.h
@@ -106,6 +106,7 @@ class WebMediaPlayerImpl : public WebKit::WebMediaPlayer,
// WebMediaPlayerImpl.
void PipelineInitializationCallback();
void PipelineSeekCallback();
+ void PipelineEndedCallback();
void PipelineErrorCallback();
private:
@@ -118,6 +119,9 @@ class WebMediaPlayerImpl : public WebKit::WebMediaPlayer,
// Notify |webmediaplayer_| that a seek has finished.
void PipelineSeekTask();
+ // Notify |webmediaplayer_| that the media has ended.
+ void PipelineEndedTask();
+
// Notify |webmediaplayer_| that a pipeline error has been set.
void PipelineErrorTask();
@@ -220,6 +224,8 @@ class WebMediaPlayerImpl : public WebKit::WebMediaPlayer,
void OnPipelineSeek();
+ void OnPipelineEnded();
+
void OnPipelineError();
private: