summaryrefslogtreecommitdiffstats
path: root/media
diff options
context:
space:
mode:
authorjiesun@google.com <jiesun@google.com@0039d316-1c4b-4281-b951-d872f2087c98>2010-09-02 00:15:55 +0000
committerjiesun@google.com <jiesun@google.com@0039d316-1c4b-4281-b951-d872f2087c98>2010-09-02 00:15:55 +0000
commit34d14848d6b9c5613e4a1a882898f50c7ab39983 (patch)
tree706560d8008a4212f2066dc05653899fddaf555d /media
parent6818e708dfd58aee620d592df28b29c251dcb0f1 (diff)
downloadchromium_src-34d14848d6b9c5613e4a1a882898f50c7ab39983.zip
chromium_src-34d14848d6b9c5613e4a1a882898f50c7ab39983.tar.gz
chromium_src-34d14848d6b9c5613e4a1a882898f50c7ab39983.tar.bz2
media: recycle buffers/direct rendering etc. (third patch)
1. ffmpeg use direct rendering for ogg/h264. webm still do not use direct rendering. for both cases, decoder owns buffers. 2. video renderer change to support flush in a more flexible way. 3. openmax 's both path had been merged and both recycle buffer, 4. finish/fine tune seek logic in openmax code . TEST=test matrix/player_x11/layout test. BUG = None Review URL: http://codereview.chromium.org/3014059 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@58279 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'media')
-rw-r--r--media/base/limits.h5
-rw-r--r--media/filters/ffmpeg_video_allocator.h8
-rw-r--r--media/filters/ffmpeg_video_decode_engine.cc159
-rw-r--r--media/filters/ffmpeg_video_decode_engine.h31
-rw-r--r--media/filters/ffmpeg_video_decoder.cc100
-rw-r--r--media/filters/ffmpeg_video_decoder.h17
-rw-r--r--media/filters/omx_video_decode_engine.cc343
-rw-r--r--media/filters/omx_video_decode_engine.h31
-rw-r--r--media/filters/video_renderer_base.cc222
-rw-r--r--media/filters/video_renderer_base.h42
-rw-r--r--media/filters/video_renderer_base_unittest.cc33
-rw-r--r--media/tools/player_x11/gles_video_renderer.cc2
12 files changed, 623 insertions, 370 deletions
diff --git a/media/base/limits.h b/media/base/limits.h
index 517a5d1..b8a139f 100644
--- a/media/base/limits.h
+++ b/media/base/limits.h
@@ -1,4 +1,4 @@
-// Copyright (c) 2009 The Chromium Authors. All rights reserved.
+// Copyright (c) 2010 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
@@ -16,6 +16,9 @@ struct Limits {
static const size_t kMaxDimension = (1 << 15) - 1; // 32767
static const size_t kMaxCanvas = (1 << (14 * 2)); // 16384 x 16384
+ // Total number of video frames which are populating in the pipeline.
+ static const size_t kMaxVideoFrames = 4;
+
// Following limits are used by AudioParameters::IsValid().
// The 192 Khz constant is the frequency of quicktime lossless audio codec.
// MP4 is limited to 96 Khz, and mp3 is limited to 48 Khz.
diff --git a/media/filters/ffmpeg_video_allocator.h b/media/filters/ffmpeg_video_allocator.h
index 2cb1d742..b72a1f1 100644
--- a/media/filters/ffmpeg_video_allocator.h
+++ b/media/filters/ffmpeg_video_allocator.h
@@ -28,7 +28,13 @@ class FFmpegVideoAllocator {
struct RefCountedAVFrame {
RefCountedAVFrame() : usage_count_(0) {}
- ~RefCountedAVFrame() { DCHECK_EQ(usage_count_, 0); }
+
+ // TODO(jiesun): we had commented out "DCHECK_EQ(usage_count_, 0);" here.
+ // Because the way FFMPEG-MT handle release buffer in delayed fashion.
+ // Probably we could wait FFMPEG-MT release all buffers before we callback
+ // the flush completion.
+ ~RefCountedAVFrame() {}
+
void AddRef() {
base::AtomicRefCountIncN(&usage_count_, 1);
}
diff --git a/media/filters/ffmpeg_video_decode_engine.cc b/media/filters/ffmpeg_video_decode_engine.cc
index f5e6d34..0a2f384 100644
--- a/media/filters/ffmpeg_video_decode_engine.cc
+++ b/media/filters/ffmpeg_video_decode_engine.cc
@@ -14,13 +14,19 @@
#include "media/ffmpeg/ffmpeg_common.h"
#include "media/ffmpeg/ffmpeg_util.h"
#include "media/filters/ffmpeg_demuxer.h"
+#include "media/filters/ffmpeg_video_allocator.h"
namespace media {
FFmpegVideoDecodeEngine::FFmpegVideoDecodeEngine()
: codec_context_(NULL),
av_stream_(NULL),
- event_handler_(NULL) {
+ event_handler_(NULL),
+ direct_rendering_(false),
+ pending_input_buffers_(0),
+ pending_output_buffers_(0),
+ output_eos_reached_(false),
+ flush_pending_(false) {
}
FFmpegVideoDecodeEngine::~FFmpegVideoDecodeEngine() {
@@ -30,6 +36,8 @@ void FFmpegVideoDecodeEngine::Initialize(
MessageLoop* message_loop,
VideoDecodeEngine::EventHandler* event_handler,
const VideoCodecConfig& config) {
+ allocator_.reset(new FFmpegVideoAllocator());
+
// Always try to use three threads for video decoding. There is little reason
// not to since current day CPUs tend to be multi-core and we measured
// performance benefits on older machines such as P4s with hyperthreading.
@@ -51,6 +59,16 @@ void FFmpegVideoDecodeEngine::Initialize(
AVCodec* codec = avcodec_find_decoder(codec_context_->codec_id);
+ if (codec) {
+#ifdef FF_THREAD_FRAME // Only defined in FFMPEG-MT.
+ direct_rendering_ = codec->capabilities & CODEC_CAP_DR1 ? true : false;
+#endif
+ if (direct_rendering_) {
+ DLOG(INFO) << "direct rendering is used";
+ allocator_->Initialize(codec_context_, GetSurfaceFormat());
+ }
+ }
+
// TODO(fbarchard): Improve thread logic based on size / codec.
// TODO(fbarchard): Fix bug affecting video-cookie.html
int decode_threads = (codec_context_->codec_id == CODEC_ID_THEORA) ?
@@ -70,15 +88,38 @@ void FFmpegVideoDecodeEngine::Initialize(
av_frame_.reset(avcodec_alloc_frame());
VideoCodecInfo info;
info.success_ = false;
- info.provides_buffers_ = false;
+ info.provides_buffers_ = true;
info.stream_info_.surface_type_ = VideoFrame::TYPE_SYSTEM_MEMORY;
info.stream_info_.surface_format_ = GetSurfaceFormat();
info.stream_info_.surface_width_ = config.width_;
info.stream_info_.surface_height_ = config.height_;
+
+ // If we do not have enough buffers, we will report error too.
+ bool buffer_allocated = true;
+ frame_queue_available_.clear();
+ if (!direct_rendering_) {
+ // Create output buffer pool when direct rendering is not used.
+ for (size_t i = 0; i < Limits::kMaxVideoFrames; ++i) {
+ scoped_refptr<VideoFrame> video_frame;
+ VideoFrame::CreateFrame(VideoFrame::YV12,
+ config.width_,
+ config.height_,
+ StreamSample::kInvalidTimestamp,
+ StreamSample::kInvalidTimestamp,
+ &video_frame);
+ if (!video_frame.get()) {
+ buffer_allocated = false;
+ break;
+ }
+ frame_queue_available_.push_back(video_frame);
+ }
+ }
+
if (codec &&
avcodec_thread_init(codec_context_, decode_threads) >= 0 &&
avcodec_open(codec_context_, codec) >= 0 &&
- av_frame_.get()) {
+ av_frame_.get() &&
+ buffer_allocated) {
info.success_ = true;
}
event_handler_ = event_handler;
@@ -112,12 +153,34 @@ static void CopyPlane(size_t plane,
void FFmpegVideoDecodeEngine::EmptyThisBuffer(
scoped_refptr<Buffer> buffer) {
- DecodeFrame(buffer);
+ pending_input_buffers_--;
+ if (flush_pending_) {
+ TryToFinishPendingFlush();
+ } else {
+ // Otherwise try to decode this buffer.
+ DecodeFrame(buffer);
+ }
}
void FFmpegVideoDecodeEngine::FillThisBuffer(scoped_refptr<VideoFrame> frame) {
- scoped_refptr<Buffer> buffer;
- event_handler_->OnEmptyBufferCallback(buffer);
+ // We should never receive NULL frame or EOS frame.
+ DCHECK(frame.get() && !frame->IsEndOfStream());
+
+ // Increment pending output buffer count.
+ pending_output_buffers_++;
+
+ // Return this frame to available pool or allocator after display.
+ if (direct_rendering_)
+ allocator_->DisplayDone(codec_context_, frame);
+ else
+ frame_queue_available_.push_back(frame);
+
+ if (flush_pending_) {
+ TryToFinishPendingFlush();
+ } else if (!output_eos_reached_) {
+ // If we already deliver EOS to renderer, we stop reading new input.
+ ReadInput();
+ }
}
// Try to decode frame when both input and output are ready.
@@ -158,11 +221,17 @@ void FFmpegVideoDecodeEngine::DecodeFrame(scoped_refptr<Buffer> buffer) {
}
// If frame_decoded == 0, then no frame was produced.
+ // In this case, if we already begin to flush codec with empty
+ // input packet at the end of input stream, the first time we
+ // encounter frame_decoded == 0 signal output frame had been
+ // drained, we mark the flag. Otherwise we read from demuxer again.
if (frame_decoded == 0) {
- if (buffer->IsEndOfStream()) // We had started flushing.
+ if (buffer->IsEndOfStream()) { // We had started flushing.
event_handler_->OnFillBufferCallback(video_frame);
- else
- event_handler_->OnEmptyBufferCallback(buffer);
+ output_eos_reached_ = true;
+ } else {
+ ReadInput();
+ }
return;
}
@@ -198,44 +267,76 @@ void FFmpegVideoDecodeEngine::DecodeFrame(scoped_refptr<Buffer> buffer) {
base::TimeDelta duration =
ConvertTimestamp(doubled_time_base, 2 + av_frame_->repeat_pict);
- VideoFrame::CreateFrame(GetSurfaceFormat(),
- codec_context_->width,
- codec_context_->height,
- timestamp,
- duration,
- &video_frame);
- if (!video_frame.get()) {
- // TODO(jiesun): call event_handler_->OnError() instead.
- event_handler_->OnFillBufferCallback(video_frame);
- return;
+ if (!direct_rendering_) {
+ // Available frame is guaranteed, because we issue as much reads as
+ // available frame, except the case of |frame_decoded| == 0, which
+ // implies decoder order delay, and force us to read more inputs.
+ DCHECK(frame_queue_available_.size());
+ video_frame = frame_queue_available_.front();
+ frame_queue_available_.pop_front();
+
+ // Copy the frame data since FFmpeg reuses internal buffers for AVFrame
+ // output, meaning the data is only valid until the next
+ // avcodec_decode_video() call.
+ CopyPlane(VideoFrame::kYPlane, video_frame.get(), av_frame_.get());
+ CopyPlane(VideoFrame::kUPlane, video_frame.get(), av_frame_.get());
+ CopyPlane(VideoFrame::kVPlane, video_frame.get(), av_frame_.get());
+ } else {
+ // Get the VideoFrame from allocator which associate with av_frame_.
+ video_frame = allocator_->DecodeDone(codec_context_, av_frame_.get());
}
- // Copy the frame data since FFmpeg reuses internal buffers for AVFrame
- // output, meaning the data is only valid until the next
- // avcodec_decode_video() call.
- // TODO(scherkus): figure out pre-allocation/buffer cycling scheme.
- // TODO(scherkus): is there a cleaner way to figure out the # of planes?
- CopyPlane(VideoFrame::kYPlane, video_frame.get(), av_frame_.get());
- CopyPlane(VideoFrame::kUPlane, video_frame.get(), av_frame_.get());
- CopyPlane(VideoFrame::kVPlane, video_frame.get(), av_frame_.get());
+ video_frame->SetTimestamp(timestamp);
+ video_frame->SetDuration(duration);
+ pending_output_buffers_--;
event_handler_->OnFillBufferCallback(video_frame);
}
void FFmpegVideoDecodeEngine::Uninitialize() {
- // TODO(jiesun): Release buffers when we support buffer recycling.
+ if (direct_rendering_) {
+ allocator_->Stop(codec_context_);
+ }
+
event_handler_->OnUninitializeComplete();
}
void FFmpegVideoDecodeEngine::Flush() {
avcodec_flush_buffers(codec_context_);
- event_handler_->OnFlushComplete();
+ flush_pending_ = true;
+ TryToFinishPendingFlush();
+}
+
+void FFmpegVideoDecodeEngine::TryToFinishPendingFlush() {
+ DCHECK(flush_pending_);
+
+ // We consider ourself flushed when there is no pending input buffers
+ // and output buffers, which implies that all buffers had been returned
+ // to its owner.
+ if (!pending_input_buffers_ && !pending_output_buffers_) {
+ // Try to finish flushing and notify pipeline.
+ flush_pending_ = false;
+ event_handler_->OnFlushComplete();
+ }
}
void FFmpegVideoDecodeEngine::Seek() {
+ // After a seek, output stream no longer considered as EOS.
+ output_eos_reached_ = false;
+
+ // The buffer provider is assumed to perform pre-roll operation.
+ for (unsigned int i = 0; i < Limits::kMaxVideoFrames; ++i)
+ ReadInput();
+
event_handler_->OnSeekComplete();
}
+void FFmpegVideoDecodeEngine::ReadInput() {
+ DCHECK_EQ(output_eos_reached_, false);
+ pending_input_buffers_++;
+ event_handler_->OnEmptyBufferCallback(NULL);
+}
+
VideoFrame::Format FFmpegVideoDecodeEngine::GetSurfaceFormat() const {
// J (Motion JPEG) versions of YUV are full range 0..255.
// Regular (MPEG) YUV is 16..240.
diff --git a/media/filters/ffmpeg_video_decode_engine.h b/media/filters/ffmpeg_video_decode_engine.h
index b12c87e..fbb465a 100644
--- a/media/filters/ffmpeg_video_decode_engine.h
+++ b/media/filters/ffmpeg_video_decode_engine.h
@@ -5,6 +5,8 @@
#ifndef MEDIA_FILTERS_FFMPEG_VIDEO_DECODE_ENGINE_H_
#define MEDIA_FILTERS_FFMPEG_VIDEO_DECODE_ENGINE_H_
+#include <deque>
+
#include "base/scoped_ptr.h"
#include "media/ffmpeg/ffmpeg_common.h"
#include "media/filters/video_decode_engine.h"
@@ -16,6 +18,8 @@ struct AVStream;
namespace media {
+class FFmpegVideoAllocator;
+
class FFmpegVideoDecodeEngine : public VideoDecodeEngine {
public:
FFmpegVideoDecodeEngine();
@@ -40,12 +44,39 @@ class FFmpegVideoDecodeEngine : public VideoDecodeEngine {
VideoFrame::Format GetSurfaceFormat() const;
private:
void DecodeFrame(scoped_refptr<Buffer> buffer);
+ void ReadInput();
+ void TryToFinishPendingFlush();
AVCodecContext* codec_context_;
AVStream* av_stream_;
scoped_ptr_malloc<AVFrame, ScopedPtrAVFree> av_frame_;
VideoDecodeEngine::EventHandler* event_handler_;
+ // Whether direct rendering is used.
+ bool direct_rendering_;
+
+ // Used when direct rendering is used to recycle output buffers.
+ scoped_ptr<FFmpegVideoAllocator> allocator_;
+
+ // Indicate how many buffers are pending on input port of this filter:
+ // Increment when engine receive one input packet from demuxer;
+ // Decrement when engine send one input packet to demuxer;
+ int pending_input_buffers_;
+
+ // Indicate how many buffers are pending on output port of this filter:
+ // Increment when engine receive one output frame from renderer;
+ // Decrement when engine send one output frame to renderer;
+ int pending_output_buffers_;
+
+ // Whether end of stream had been reached at output side.
+ bool output_eos_reached_;
+
+ // Used when direct rendering is disabled to hold available output buffers.
+ std::deque<scoped_refptr<VideoFrame> > frame_queue_available_;
+
+ // Whether flush operation is pending.
+ bool flush_pending_;
+
DISALLOW_COPY_AND_ASSIGN(FFmpegVideoDecodeEngine);
};
diff --git a/media/filters/ffmpeg_video_decoder.cc b/media/filters/ffmpeg_video_decoder.cc
index 8b673dc..46e4036 100644
--- a/media/filters/ffmpeg_video_decoder.cc
+++ b/media/filters/ffmpeg_video_decoder.cc
@@ -26,9 +26,7 @@ FFmpegVideoDecoder::FFmpegVideoDecoder(VideoDecodeEngine* engine)
height_(0),
time_base_(new AVRational()),
state_(kUnInitialized),
- decode_engine_(engine),
- pending_reads_(0),
- pending_requests_(0) {
+ decode_engine_(engine) {
memset(&info_, 0, sizeof(info_));
}
@@ -150,6 +148,19 @@ void FFmpegVideoDecoder::OnUninitializeComplete() {
state_ = kStopped;
}
+void FFmpegVideoDecoder::Pause(FilterCallback* callback) {
+ if (MessageLoop::current() != message_loop()) {
+ message_loop()->PostTask(FROM_HERE,
+ NewRunnableMethod(this,
+ &FFmpegVideoDecoder::Pause,
+ callback));
+ return;
+ }
+
+ AutoCallbackRunner done_runner(callback);
+ state_ = kPausing;
+}
+
void FFmpegVideoDecoder::Flush(FilterCallback* callback) {
if (MessageLoop::current() != message_loop()) {
message_loop()->PostTask(FROM_HERE,
@@ -162,11 +173,11 @@ void FFmpegVideoDecoder::Flush(FilterCallback* callback) {
DCHECK_EQ(MessageLoop::current(), message_loop());
DCHECK(!flush_callback_.get());
- flush_callback_.reset(callback);
+ state_ = kFlushing;
- // Everything in the presentation time queue is invalid, clear the queue.
- while (!pts_heap_.IsEmpty())
- pts_heap_.Pop();
+ FlushBuffers();
+
+ flush_callback_.reset(callback);
decode_engine_->Flush();
}
@@ -176,6 +187,13 @@ void FFmpegVideoDecoder::OnFlushComplete() {
DCHECK(flush_callback_.get());
AutoCallbackRunner done_runner(flush_callback_.release());
+
+ // Everything in the presentation time queue is invalid, clear the queue.
+ while (!pts_heap_.IsEmpty())
+ pts_heap_.Pop();
+
+ // Mark flush operation had been done.
+ state_ = kNormal;
}
void FFmpegVideoDecoder::Seek(base::TimeDelta time,
@@ -192,10 +210,6 @@ void FFmpegVideoDecoder::Seek(base::TimeDelta time,
DCHECK_EQ(MessageLoop::current(), message_loop());
DCHECK(!seek_callback_.get());
- // TODO(jiesun): when we move to parallel Flush, we should remove this.
- DCHECK_EQ(0u, pending_reads_) << "Pending reads should have completed";
- DCHECK_EQ(0u, pending_requests_) << "Pending requests should be empty";
-
seek_callback_.reset(callback);
decode_engine_->Seek();
}
@@ -205,7 +219,6 @@ void FFmpegVideoDecoder::OnSeekComplete() {
DCHECK(seek_callback_.get());
AutoCallbackRunner done_runner(seek_callback_.release());
- state_ = kNormal;
}
void FFmpegVideoDecoder::OnError() {
@@ -227,9 +240,7 @@ void FFmpegVideoDecoder::OnReadComplete(Buffer* buffer_in) {
void FFmpegVideoDecoder::OnReadCompleteTask(scoped_refptr<Buffer> buffer) {
DCHECK_EQ(MessageLoop::current(), message_loop());
- DCHECK_GT(pending_reads_, 0u);
-
- --pending_reads_;
+ DCHECK_NE(state_, kStopped); // because of Flush() before Stop().
// During decode, because reads are issued asynchronously, it is possible to
// receive multiple end of stream buffers since each read is acked. When the
@@ -255,18 +266,6 @@ void FFmpegVideoDecoder::OnReadCompleteTask(scoped_refptr<Buffer> buffer) {
// When avcodec_decode_video2() returns 0 data or errors out.
// (any state) -> kNormal:
// Any time buffer->IsDiscontinuous() is true.
- //
- // If the decoding is finished, we just always return empty frames.
- if (state_ == kDecodeFinished || state_ == kStopped) {
- DCHECK(buffer->IsEndOfStream());
-
- --pending_requests_;
- // Signal VideoRenderer the end of the stream event.
- scoped_refptr<VideoFrame> video_frame;
- VideoFrame::CreateEmptyFrame(&video_frame);
- fill_buffer_done_callback()->Run(video_frame);
- return;
- }
// Transition to kFlushCodec on the first end of stream buffer.
if (state_ == kNormal && buffer->IsEndOfStream()) {
@@ -302,23 +301,35 @@ void FFmpegVideoDecoder::FillThisBuffer(
DCHECK_EQ(MessageLoop::current(), message_loop());
// Synchronized flushing before stop should prevent this.
- if (state_ == kStopped)
- return; // Discard the video frame.
+ DCHECK_NE(state_, kStopped);
+
+ // If the decoding is finished, we just always return empty frames.
+ if (state_ == kDecodeFinished) {
+ // Signal VideoRenderer the end of the stream event.
+ scoped_refptr<VideoFrame> empty_frame;
+ VideoFrame::CreateEmptyFrame(&empty_frame);
+ fill_buffer_done_callback()->Run(empty_frame);
+
+ // Fall through, because we still need to keep record of this frame.
+ }
// Notify decode engine the available of new frame.
- ++pending_requests_;
decode_engine_->FillThisBuffer(video_frame);
}
void FFmpegVideoDecoder::OnFillBufferCallback(
scoped_refptr<VideoFrame> video_frame) {
DCHECK_EQ(MessageLoop::current(), message_loop());
-
- // TODO(jiesun): Flush before stop will prevent this from happening.
- if (state_ == kStopped)
- return; // Discard the video frame.
+ DCHECK_NE(state_, kStopped);
if (video_frame.get()) {
+ if (kPausing == state_ || kFlushing == state_) {
+ frame_queue_flushed_.push_back(video_frame);
+ if (kFlushing == state_)
+ FlushBuffers();
+ return;
+ }
+
// If we actually got data back, enqueue a frame.
last_pts_ = FindPtsAndDuration(*time_base_, &pts_heap_, last_pts_,
video_frame.get());
@@ -326,8 +337,6 @@ void FFmpegVideoDecoder::OnFillBufferCallback(
video_frame->SetTimestamp(last_pts_.timestamp);
video_frame->SetDuration(last_pts_.duration);
- // Deliver this frame to VideoRenderer.
- --pending_requests_;
fill_buffer_done_callback()->Run(video_frame);
} else {
// When in kFlushCodec, any errored decode, or a 0-lengthed frame,
@@ -335,7 +344,6 @@ void FFmpegVideoDecoder::OnFillBufferCallback(
if (state_ == kFlushCodec) {
state_ = kDecodeFinished;
- --pending_requests_;
// Signal VideoRenderer the end of the stream event.
scoped_refptr<VideoFrame> video_frame;
VideoFrame::CreateEmptyFrame(&video_frame);
@@ -347,11 +355,10 @@ void FFmpegVideoDecoder::OnFillBufferCallback(
void FFmpegVideoDecoder::OnEmptyBufferCallback(
scoped_refptr<Buffer> buffer) {
DCHECK_EQ(MessageLoop::current(), message_loop());
- DCHECK_LE(pending_reads_, pending_requests_);
+ DCHECK_NE(state_, kStopped);
demuxer_stream_->Read(
NewCallback(this, &FFmpegVideoDecoder::OnReadComplete));
- ++pending_reads_;
}
FFmpegVideoDecoder::TimeTuple FFmpegVideoDecoder::FindPtsAndDuration(
@@ -408,6 +415,21 @@ bool FFmpegVideoDecoder::ProvidesBuffer() {
return info_.provides_buffers_;
}
+void FFmpegVideoDecoder::FlushBuffers() {
+ while (!frame_queue_flushed_.empty()) {
+ scoped_refptr<VideoFrame> video_frame;
+ video_frame = frame_queue_flushed_.front();
+ frame_queue_flushed_.pop_front();
+
+ // Depends on who own the buffers, we either return it to the renderer
+ // or return it to the decode engine.
+ if (ProvidesBuffer())
+ decode_engine_->FillThisBuffer(video_frame);
+ else
+ fill_buffer_done_callback()->Run(video_frame);
+ }
+}
+
void FFmpegVideoDecoder::SetVideoDecodeEngineForTest(
VideoDecodeEngine* engine) {
decode_engine_ = engine;
diff --git a/media/filters/ffmpeg_video_decoder.h b/media/filters/ffmpeg_video_decoder.h
index ea23723..972010b 100644
--- a/media/filters/ffmpeg_video_decoder.h
+++ b/media/filters/ffmpeg_video_decoder.h
@@ -5,6 +5,8 @@
#ifndef MEDIA_FILTERS_FFMPEG_VIDEO_DECODER_H_
#define MEDIA_FILTERS_FFMPEG_VIDEO_DECODER_H_
+#include <deque>
+
#include "base/gtest_prod_util.h"
#include "base/time.h"
#include "media/base/factory.h"
@@ -33,6 +35,7 @@ class FFmpegVideoDecoder : public VideoDecoder,
// MediaFilter implementation.
virtual void Stop(FilterCallback* callback);
virtual void Seek(base::TimeDelta time, FilterCallback* callback);
+ virtual void Pause(FilterCallback* callback);
virtual void Flush(FilterCallback* callback);
// Decoder implementation.
@@ -77,6 +80,8 @@ class FFmpegVideoDecoder : public VideoDecoder,
kNormal,
kFlushCodec,
kDecodeFinished,
+ kPausing,
+ kFlushing,
kStopped
};
@@ -88,6 +93,9 @@ class FFmpegVideoDecoder : public VideoDecoder,
// this with OnReadComplete
void OnReadCompleteTask(scoped_refptr<Buffer> buffer);
+ // Flush the output buffers that we had held in Paused state.
+ void FlushBuffers();
+
// Attempt to get the PTS and Duration for this frame by examining the time
// info provided via packet stream (stored in |pts_heap|), or the info
// written into the AVFrame itself. If no data is available in either, then
@@ -115,17 +123,14 @@ class FFmpegVideoDecoder : public VideoDecoder,
DecoderState state_;
scoped_refptr<VideoDecodeEngine> decode_engine_;
- // Tracks the number of asynchronous reads issued to |demuxer_stream_|.
- // Using size_t since it is always compared against deque::size().
- size_t pending_reads_;
- // Tracks the number of asynchronous reads issued from renderer.
- size_t pending_requests_;
-
scoped_ptr<FilterCallback> initialize_callback_;
scoped_ptr<FilterCallback> uninitialize_callback_;
scoped_ptr<FilterCallback> flush_callback_;
scoped_ptr<FilterCallback> seek_callback_;
+ // Hold video frames when flush happens.
+ std::deque<scoped_refptr<VideoFrame> > frame_queue_flushed_;
+
VideoCodecInfo info_;
// Pointer to the demuxer stream that will feed us compressed buffers.
diff --git a/media/filters/omx_video_decode_engine.cc b/media/filters/omx_video_decode_engine.cc
index b1d7762..358e005 100644
--- a/media/filters/omx_video_decode_engine.cc
+++ b/media/filters/omx_video_decode_engine.cc
@@ -16,17 +16,10 @@
#include "media/filters/omx_video_decode_engine.h"
-#include <algorithm>
-#include <string>
-
-#include "base/callback.h"
#include "base/logging.h"
#include "base/message_loop.h"
-#include "base/stl_util-inl.h"
#include "base/string_util.h"
#include "media/base/buffers.h"
-#include "media/base/callback.h"
-#include "media/ffmpeg/ffmpeg_common.h"
namespace media {
@@ -41,15 +34,21 @@ OmxVideoDecodeEngine::OmxVideoDecodeEngine()
input_pending_request_(0),
input_queue_has_eos_(false),
input_has_fed_eos_(false),
+ input_port_flushed_(false),
output_buffer_count_(0),
output_buffer_size_(0),
output_port_(0),
+ output_buffers_at_component_(0),
+ output_pending_request_(0),
output_eos_(false),
+ output_port_flushed_(false),
il_state_(kIlNone),
expected_il_state_(kIlNone),
client_state_(kClientNotInitialized),
component_handle_(NULL),
need_free_input_buffers_(false),
+ need_free_output_buffers_(false),
+ flush_pending_(false),
output_frames_allocated_(false),
need_setup_output_port_(false) {
// TODO(wjia): change uses_egl_image_ to runtime setup
@@ -67,13 +66,11 @@ OmxVideoDecodeEngine::~OmxVideoDecodeEngine() {
client_state_ == kClientStopped);
DCHECK_EQ(il_state_, kIlNone);
DCHECK_EQ(0u, input_buffers_.size());
- DCHECK_EQ(0u, output_buffers_.size());
DCHECK(free_input_buffers_.empty());
DCHECK(available_input_buffers_.empty());
DCHECK_EQ(0, input_buffers_at_component_);
+ DCHECK_EQ(0, output_buffers_at_component_);
DCHECK(output_frames_.empty());
- DCHECK(available_output_frames_.empty());
- DCHECK(output_frames_ready_.empty());
}
template <typename T>
@@ -126,6 +123,7 @@ void OmxVideoDecodeEngine::Initialize(
void OmxVideoDecodeEngine::EmptyThisBuffer(scoped_refptr<Buffer> buffer) {
DCHECK_EQ(message_loop_, MessageLoop::current());
DCHECK(!free_input_buffers_.empty());
+ DCHECK_GT(input_pending_request_, 0);
--input_pending_request_;
@@ -146,7 +144,10 @@ void OmxVideoDecodeEngine::EmptyThisBuffer(scoped_refptr<Buffer> buffer) {
omx_buffer->pBuffer = const_cast<OMX_U8*>(buffer->GetData());
omx_buffer->nFilledLen = buffer->GetDataSize();
omx_buffer->nAllocLen = omx_buffer->nFilledLen;
- omx_buffer->nFlags |= input_queue_has_eos_ ? OMX_BUFFERFLAG_EOS : 0;
+ if (input_queue_has_eos_)
+ omx_buffer->nFlags |= OMX_BUFFERFLAG_EOS;
+ else
+ omx_buffer->nFlags &= ~OMX_BUFFERFLAG_EOS;
omx_buffer->nTimeStamp = buffer->GetTimestamp().InMicroseconds();
omx_buffer->pAppPrivate = buffer.get();
buffer->AddRef();
@@ -155,7 +156,7 @@ void OmxVideoDecodeEngine::EmptyThisBuffer(scoped_refptr<Buffer> buffer) {
// Try to feed buffers into the decoder.
EmptyBufferTask();
- if (il_state_ == kIlPause && input_pending_request_ == 0)
+ if (flush_pending_ && input_pending_request_ == 0)
StartFlush();
}
@@ -163,8 +164,12 @@ void OmxVideoDecodeEngine::Flush() {
DCHECK_EQ(message_loop_, MessageLoop::current());
DCHECK_EQ(il_state_, kIlExecuting);
- client_state_ = kClientFlushing;
+ if (il_state_ != kIlExecuting) {
+ event_handler_->OnFlushComplete();
+ return;
+ }
+ client_state_ = kClientFlushing;
expected_il_state_ = kIlPause;
OnStateSetEventFunc = &OmxVideoDecodeEngine::PauseFromExecuting;
TransitionToState(OMX_StatePause);
@@ -176,37 +181,32 @@ void OmxVideoDecodeEngine::PauseFromExecuting(OMX_STATETYPE state) {
OnStateSetEventFunc = NULL;
il_state_ = kIlPause;
- if (input_pending_request_ == 0 ) {
+ if (input_pending_request_ == 0)
StartFlush();
- }
+ else
+ flush_pending_ = true;
}
void OmxVideoDecodeEngine::StartFlush() {
DCHECK_EQ(message_loop_, MessageLoop::current());
-
- if (client_state_ != kClientFlushing) {
- // TODO(jiesun): how to prevent initial seek.
- event_handler_->OnFlushComplete();
- return;
- }
-
- // TODO(jiesun): return buffers to filter who allocate them.
- while (!output_frames_ready_.empty())
- output_frames_ready_.pop();
+ DCHECK_EQ(input_pending_request_, 0);
+ DLOG(INFO) << "StartFlush";
while (!available_input_buffers_.empty())
available_input_buffers_.pop();
- input_port_flushed_ = false;
- output_port_flushed_ = false;
+ flush_pending_ = false;
+
+ // Flush input port first.
OnFlushEventFunc = &OmxVideoDecodeEngine::PortFlushDone;
OMX_ERRORTYPE omxresult;
omxresult = OMX_SendCommand(component_handle_,
OMX_CommandFlush,
- OMX_ALL, 0);
+ input_port_, 0);
}
bool OmxVideoDecodeEngine::InputPortFlushed() {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
DCHECK_EQ(client_state_, kClientFlushing);
// Port flushed is defined by OpenMAX component had signal flush done and
// We had all buffers returned from demuxer and OpenMAX component.
@@ -215,40 +215,64 @@ bool OmxVideoDecodeEngine::InputPortFlushed() {
}
bool OmxVideoDecodeEngine::OutputPortFlushed() {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
DCHECK_EQ(client_state_, kClientFlushing);
// Port flushed is defined by OpenMAX component had signal flush done and
// We had all buffers returned from renderer and OpenMAX component.
- // TODO(jiesun): egl image path may use different value.
- return output_port_flushed_ /*&&
- available_output_frames_.size() == output_buffer_count_*/;
+ return output_port_flushed_ && output_pending_request_ == 0;
}
void OmxVideoDecodeEngine::ComponentFlushDone() {
- // use these flags to ensure only callback once.
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+ DLOG(INFO) << "Component had been flushed!";
+
if (input_port_flushed_ && output_port_flushed_) {
event_handler_->OnFlushComplete();
input_port_flushed_ = false;
output_port_flushed_ = false;
-
- InitialReadBuffer();
- OnStateSetEventFunc = &OmxVideoDecodeEngine::DoneSetStateExecuting;
- TransitionToState(OMX_StateExecuting);
}
}
void OmxVideoDecodeEngine::PortFlushDone(int port) {
- if (port == input_port_ || port == static_cast<int>(OMX_ALL))
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+ DCHECK_NE(port, static_cast<int>(OMX_ALL));
+
+ if (port == input_port_) {
+ DLOG(INFO) << "Input Port had been flushed";
+ DCHECK_EQ(input_buffers_at_component_, 0);
input_port_flushed_ = true;
- if (port == output_port_ || port == static_cast<int>(OMX_ALL))
+ // Flush output port next.
+ OMX_ERRORTYPE omxresult;
+ omxresult = OMX_SendCommand(component_handle_,
+ OMX_CommandFlush,
+ output_port_, 0);
+ return;
+ }
+
+ if (port == output_port_) {
+ DLOG(INFO) << "Output Port had been flushed";
+ DCHECK_EQ(output_buffers_at_component_, 0);
+
output_port_flushed_ = true;
+ }
- if (InputPortFlushed() && OutputPortFlushed())
+ if (kClientFlushing == client_state_ &&
+ InputPortFlushed() && OutputPortFlushed())
ComponentFlushDone();
}
void OmxVideoDecodeEngine::Seek() {
DCHECK_EQ(message_loop_, MessageLoop::current());
- // TODO(jiesun): add real logic here.
+
+ DCHECK(client_state_ == kClientFlushing || // After a flush
+ client_state_ == kClientInitializing); // After an initialize.
+
+ if (client_state_ == kClientFlushing) {
+ InitialReadBuffer();
+ OnStateSetEventFunc = &OmxVideoDecodeEngine::DoneSetStateExecuting;
+ TransitionToState(OMX_StateExecuting);
+ }
+
event_handler_->OnSeekComplete();
}
@@ -267,7 +291,7 @@ void OmxVideoDecodeEngine::Uninitialize() {
}
// TODO(wjia): add more state checking
- if (kClientRunning == client_state_) {
+ if (kClientRunning == client_state_ || kClientFlushing == client_state_) {
client_state_ = kClientStopping;
DeinitFromExecuting(OMX_StateExecuting);
}
@@ -277,13 +301,6 @@ void OmxVideoDecodeEngine::Uninitialize() {
// client_state_ = kClientStopping;
}
-void OmxVideoDecodeEngine::OnFormatChange(
- const OmxConfigurator::MediaFormat& input_format,
- const OmxConfigurator::MediaFormat& output_format) {
- // TODO(jiesun): We should not need this for here, because width and height
- // are already known from upper layer of the stack.
-}
-
void OmxVideoDecodeEngine::FinishEmptyBuffer(scoped_refptr<Buffer> buffer) {
DCHECK_EQ(message_loop_, MessageLoop::current());
@@ -295,43 +312,20 @@ void OmxVideoDecodeEngine::FinishEmptyBuffer(scoped_refptr<Buffer> buffer) {
void OmxVideoDecodeEngine::FinishFillBuffer(OMX_BUFFERHEADERTYPE* buffer) {
DCHECK_EQ(message_loop_, MessageLoop::current());
+ DCHECK(buffer);
scoped_refptr<VideoFrame> frame;
- // EOF
- if (!buffer) {
- VideoFrame::CreateEmptyFrame(&frame);
- event_handler_->OnFillBufferCallback(frame);
- return;
- }
-
- if (uses_egl_image_) {
- frame = static_cast<VideoFrame*>(buffer->pAppPrivate);
- } else {
- VideoFrame::CreateFrame(GetSurfaceFormat(),
- width_, height_,
- StreamSample::kInvalidTimestamp,
- StreamSample::kInvalidTimestamp,
- &frame);
- if (!frame.get()) {
- // TODO(jiesun): this is also an error case handled as normal.
- return;
- }
+ frame = static_cast<VideoFrame*>(buffer->pAppPrivate);
- // TODO(jiesun): Assume YUV 420 format.
- const int pixels = width_ * height_;
- memcpy(frame->data(VideoFrame::kYPlane), buffer->pBuffer, pixels);
- memcpy(frame->data(VideoFrame::kUPlane), buffer->pBuffer + pixels,
- pixels / 4);
- memcpy(frame->data(VideoFrame::kVPlane),
- buffer->pBuffer + pixels + pixels / 4,
- pixels / 4);
- }
+ // We should not flush buffer to renderer during decoder flushing if decoder
+ // provides the buffer allocator.
+ if (kClientFlushing == client_state_ && !uses_egl_image_) return;
frame->SetTimestamp(base::TimeDelta::FromMicroseconds(buffer->nTimeStamp));
frame->SetDuration(frame->GetTimestamp() - last_pts_);
last_pts_ = frame->GetTimestamp();
-
event_handler_->OnFillBufferCallback(frame);
+ output_pending_request_--;
}
void OmxVideoDecodeEngine::OnStopDone() {
@@ -346,7 +340,6 @@ void OmxVideoDecodeEngine::InitializeTask() {
il_state_ = kIlNone;
expected_il_state_ = kIlLoaded;
- input_port_enabled_ = true;
output_port_state_ = kPortEnabled;
if (!CreateComponent()) {
StopOnError();
@@ -587,7 +580,8 @@ void OmxVideoDecodeEngine::DoneSetStateIdle(OMX_STATETYPE state) {
// Event callback during initialization to handle DoneStateSet to executing
void OmxVideoDecodeEngine::DoneSetStateExecuting(OMX_STATETYPE state) {
DCHECK_EQ(message_loop_, MessageLoop::current());
- DCHECK_EQ(client_state_, kClientInitializing);
+ DCHECK(client_state_ == kClientInitializing ||
+ client_state_ == kClientFlushing);
DCHECK_EQ(OMX_StateExecuting, state);
DLOG(INFO) << "OMX video decode engine is in Executing";
@@ -606,13 +600,14 @@ void OmxVideoDecodeEngine::DoneSetStateExecuting(OMX_STATETYPE state) {
// and outside allocator.
void OmxVideoDecodeEngine::FillThisBuffer(
scoped_refptr<VideoFrame> video_frame) {
-
- // TODO(wjia): merge buffer recycling for EGLImage and system memory path.
- if (!video_frame.get() || VideoFrame::TYPE_EGL_IMAGE != video_frame->type())
- return;
+ DCHECK(video_frame.get() && !video_frame->IsEndOfStream());
+ output_pending_request_++;
if (!CanAcceptOutput()) {
- event_handler_->OnFillBufferCallback(video_frame);
+ if (uses_egl_image_) { // return it to owner.
+ output_pending_request_--;
+ event_handler_->OnFillBufferCallback(video_frame);
+ }
return;
}
@@ -620,11 +615,17 @@ void OmxVideoDecodeEngine::FillThisBuffer(
if (omx_buffer) {
if (kClientRunning == client_state_) {
SendOutputBufferToComponent(omx_buffer);
- } else {
- available_output_frames_.push(omx_buffer);
+ } else if (kClientFlushing == client_state_) {
+ if (uses_egl_image_) { // return it to owner.
+ output_pending_request_--;
+ event_handler_->OnFillBufferCallback(video_frame);
+ }
+ if (InputPortFlushed() && OutputPortFlushed())
+ ComponentFlushDone();
}
} else {
DCHECK(!output_frames_allocated_);
+ DCHECK(uses_egl_image_);
output_frames_.push_back(std::make_pair(video_frame,
static_cast<OMX_BUFFERHEADERTYPE*>(NULL)));
}
@@ -690,7 +691,6 @@ void OmxVideoDecodeEngine::OnPortSettingsChangedRun(int port,
output_format.video_header.height = port_format.format.video.nFrameHeight;
output_format.video_header.width = port_format.format.video.nFrameWidth;
output_format.video_header.stride = port_format.format.video.nStride;
- // OnFormatChange(input_format, output_format);
output_buffer_count_ = port_format.nBufferCountActual;
output_buffer_size_ = port_format.nBufferSize;
@@ -794,7 +794,10 @@ void OmxVideoDecodeEngine::DeinitFromIdle(OMX_STATETYPE state) {
else
need_free_input_buffers_ = true;
- FreeOutputBuffers();
+ if (!output_buffers_at_component_)
+ FreeOutputBuffers();
+ else
+ need_free_output_buffers_ = true;
}
void OmxVideoDecodeEngine::DeinitFromLoaded(OMX_STATETYPE state) {
@@ -831,6 +834,11 @@ void OmxVideoDecodeEngine::StopOnError() {
DeinitFromIdle(OMX_StateIdle);
} else if (kIlLoaded == expected_il_state_) {
DeinitFromLoaded(OMX_StateLoaded);
+ } else if (kIlPause == expected_il_state_) {
+ // TODO(jiesun): Make sure this works.
+ DeinitFromExecuting(OMX_StateExecuting);
+ } else {
+ NOTREACHED();
}
}
@@ -862,7 +870,6 @@ bool OmxVideoDecodeEngine::AllocateInputBuffers() {
// allocation case is similar to EGLImage
bool OmxVideoDecodeEngine::AllocateOutputBuffers() {
DCHECK_EQ(message_loop_, MessageLoop::current());
- // DCHECK_EQ(output_buffer_count_, static_cast<int>(output_frames_.size()));
if (uses_egl_image_ && !output_frames_allocated_) {
DLOG(INFO) << "Output frames are not allocated yet";
@@ -872,30 +879,59 @@ bool OmxVideoDecodeEngine::AllocateOutputBuffers() {
for (int i = 0; i < output_buffer_count_; ++i) {
OMX_BUFFERHEADERTYPE* buffer;
+ scoped_refptr<VideoFrame> video_frame;
OMX_ERRORTYPE error;
if (uses_egl_image_) {
OutputFrame output_frame = output_frames_[i];
- scoped_refptr<VideoFrame> video_frame = output_frame.first;
+ video_frame = output_frame.first;
DCHECK(!output_frame.second);
error = OMX_UseEGLImage(component_handle_, &buffer, output_port_,
video_frame.get(), video_frame->private_buffer());
if (error != OMX_ErrorNone)
return false;
output_frames_[i].second = buffer;
- available_output_frames_.push(buffer);
} else {
- error =
- OMX_AllocateBuffer(component_handle_, &buffer, output_port_,
- NULL, output_buffer_size_);
+ error = OMX_AllocateBuffer(component_handle_, &buffer, output_port_,
+ NULL, output_buffer_size_);
if (error != OMX_ErrorNone)
return false;
- output_buffers_.push_back(buffer);
+ video_frame = CreateOmxBufferVideoFrame(buffer);
+ output_frames_.push_back(std::make_pair(video_frame, buffer));
+ buffer->pAppPrivate = video_frame.get();
}
}
return true;
}
+scoped_refptr<VideoFrame> OmxVideoDecodeEngine::CreateOmxBufferVideoFrame(
+ OMX_BUFFERHEADERTYPE* omx_buffer) {
+ scoped_refptr<VideoFrame> video_frame;
+ uint8* data[VideoFrame::kMaxPlanes];
+ int32 strides[VideoFrame::kMaxPlanes];
+
+ memset(data, 0, sizeof(data));
+ memset(strides, 0, sizeof(strides));
+ // TODO(jiesun): chroma format 4:2:0 only and 3 planes.
+ data[0] = omx_buffer->pBuffer;
+ data[1] = data[0] + width_ * height_;
+ data[2] = data[1] + width_ * height_ / 4;
+ strides[0] = width_;
+ strides[1] = strides[2] = width_ >> 1;
+
+ VideoFrame::CreateFrameExternal(
+ VideoFrame::TYPE_OMXBUFFERHEAD,
+ VideoFrame::YV12,
+ width_, height_, 3,
+ data, strides,
+ StreamSample::kInvalidTimestamp,
+ StreamSample::kInvalidTimestamp,
+ omx_buffer,
+ &video_frame);
+
+ return video_frame;
+}
+
void OmxVideoDecodeEngine::FreeInputBuffers() {
DCHECK_EQ(message_loop_, MessageLoop::current());
@@ -916,26 +952,23 @@ void OmxVideoDecodeEngine::FreeInputBuffers() {
for (size_t i = 0; i < input_buffers_.size(); ++i)
OMX_FreeBuffer(component_handle_, input_port_, input_buffers_[i]);
input_buffers_.clear();
+
+ need_free_input_buffers_ = false;
}
void OmxVideoDecodeEngine::FreeOutputBuffers() {
DCHECK_EQ(message_loop_, MessageLoop::current());
// Calls to OMX to free buffers.
- if (uses_egl_image_) {
- for (size_t i = 0; i < output_frames_.size(); ++i) {
- OMX_BUFFERHEADERTYPE* omx_buffer = output_frames_[i].second;
- if (omx_buffer) {
- OMX_FreeBuffer(component_handle_, output_port_, omx_buffer);
- }
- }
- output_frames_.clear();
- output_frames_allocated_ = false;
- } else {
- for (size_t i = 0; i < output_buffers_.size(); ++i)
- OMX_FreeBuffer(component_handle_, output_port_, output_buffers_[i]);
- output_buffers_.clear();
+ for (size_t i = 0; i < output_frames_.size(); ++i) {
+ OMX_BUFFERHEADERTYPE* omx_buffer = output_frames_[i].second;
+ CHECK(omx_buffer);
+ OMX_FreeBuffer(component_handle_, output_port_, omx_buffer);
}
+ output_frames_.clear();
+ output_frames_allocated_ = false;
+
+ need_free_output_buffers_ = false;
}
bool OmxVideoDecodeEngine::ConfigureIOPorts() {
@@ -982,8 +1015,7 @@ bool OmxVideoDecodeEngine::CanEmptyBuffer() {
// We can call empty buffer while we are in executing and EOS has
// not been sent
return (il_state_ == kIlExecuting &&
- !input_has_fed_eos_ &&
- input_port_enabled_);
+ !input_has_fed_eos_);
}
bool OmxVideoDecodeEngine::CanFillBuffer() {
@@ -1007,7 +1039,8 @@ bool OmxVideoDecodeEngine::CanAcceptOutput() {
return (kClientError != client_state_ &&
kClientStopping != client_state_ &&
kClientStopped != client_state_ &&
- output_port_state_ == kPortEnabled);
+ output_port_state_ == kPortEnabled &&
+ !output_eos_);
}
// TODO(wjia): There are several things need to be done here:
@@ -1043,29 +1076,14 @@ void OmxVideoDecodeEngine::EmptyBufferTask() {
}
}
-void OmxVideoDecodeEngine::FulfillOneRead() {
- DCHECK_EQ(message_loop_, MessageLoop::current());
-
- if (!output_frames_ready_.empty()) {
- OMX_BUFFERHEADERTYPE *buffer = output_frames_ready_.front();
- output_frames_ready_.pop();
-
- // If the buffer is real then send it to downstream.
- // Otherwise if it is an end-of-stream buffer then just drop it.
- if (buffer->nFlags & OMX_BUFFERFLAG_EOS) {
- // We intentionally drop last frame because it could be garbage.
- FinishFillBuffer(static_cast<OMX_BUFFERHEADERTYPE*>(NULL));
- } else {
- FinishFillBuffer(buffer);
- // In non-EGLImage path, OMX_BUFFERHEADERTYPEs are immediately recycled.
- if (!uses_egl_image_) SendOutputBufferToComponent(buffer);
- }
- }
-}
-
void OmxVideoDecodeEngine::InitialReadBuffer() {
DCHECK_EQ(message_loop_, MessageLoop::current());
+ input_queue_has_eos_ = false;
+ input_has_fed_eos_ = false;
+ output_eos_ = false;
+
+ DLOG(INFO) << "OmxVideoDecodeEngine::InitialReadBuffer";
for (size_t i = 0; i < free_input_buffers_.size(); i++)
FinishEmptyBuffer(NULL);
}
@@ -1077,28 +1095,12 @@ void OmxVideoDecodeEngine::InitialFillBuffer() {
if (!CanFillBuffer())
return;
+ DLOG(INFO) << "OmxVideoDecodeEngine::InitialFillBuffer";
+
// Ask the decoder to fill the output buffers.
- if (uses_egl_image_) {
- while (!available_output_frames_.empty()) {
- OMX_BUFFERHEADERTYPE* omx_buffer = available_output_frames_.front();
- available_output_frames_.pop();
- SendOutputBufferToComponent(omx_buffer);
- }
- } else {
- for (size_t i = 0; i < output_buffers_.size(); ++i) {
- OMX_BUFFERHEADERTYPE* omx_buffer = output_buffers_[i];
- omx_buffer->nOutputPortIndex = output_port_;
- // Need to clear the EOS flag.
- omx_buffer->nFlags &= ~OMX_BUFFERFLAG_EOS;
- omx_buffer->pAppPrivate = this;
- OMX_ERRORTYPE ret = OMX_FillThisBuffer(component_handle_, omx_buffer);
-
- if (OMX_ErrorNone != ret) {
- LOG(ERROR) << "OMX_FillThisBuffer() failed with result " << ret;
- client_state_ = kClientError;
- return;
- }
- }
+ for (uint32 i = 0; i < output_frames_.size(); ++i) {
+ OMX_BUFFERHEADERTYPE* omx_buffer = output_frames_[i].second;
+ SendOutputBufferToComponent(omx_buffer);
}
}
@@ -1150,6 +1152,8 @@ void OmxVideoDecodeEngine::SendOutputBufferToComponent(
// clear EOS flag.
omx_buffer->nFlags &= ~OMX_BUFFERFLAG_EOS;
+ omx_buffer->nOutputPortIndex = output_port_;
+ output_buffers_at_component_++;
OMX_ERRORTYPE ret = OMX_FillThisBuffer(component_handle_, omx_buffer);
if (OMX_ErrorNone != ret) {
@@ -1189,8 +1193,10 @@ void OmxVideoDecodeEngine::EmptyBufferDoneTask(OMX_BUFFERHEADERTYPE* buffer) {
free_input_buffers_.push(buffer);
input_buffers_at_component_--;
- if (need_free_input_buffers_ && !input_buffers_at_component_)
+ if (need_free_input_buffers_ && !input_buffers_at_component_) {
FreeInputBuffers();
+ return;
+ }
// Try to feed more data into the decoder.
EmptyBufferTask();
@@ -1202,9 +1208,24 @@ void OmxVideoDecodeEngine::EmptyBufferDoneTask(OMX_BUFFERHEADERTYPE* buffer) {
void OmxVideoDecodeEngine::FillBufferDoneTask(OMX_BUFFERHEADERTYPE* buffer) {
DCHECK_EQ(message_loop_, MessageLoop::current());
+ DCHECK_GT(output_buffers_at_component_, 0);
+
+ output_buffers_at_component_--;
- if (!CanAcceptOutput())
+ if (need_free_output_buffers_ && !output_buffers_at_component_) {
+ FreeOutputBuffers();
return;
+ }
+
+ if (!CanAcceptOutput()) {
+ if (uses_egl_image_) {
+ scoped_refptr<VideoFrame> frame;
+ frame = static_cast<VideoFrame*>(buffer->pAppPrivate);
+ event_handler_->OnFillBufferCallback(frame);
+ output_pending_request_--;
+ }
+ return;
+ }
// This buffer is received with decoded frame. Enqueue it and make it
// ready to be consumed by reads.
@@ -1214,12 +1235,14 @@ void OmxVideoDecodeEngine::FillBufferDoneTask(OMX_BUFFERHEADERTYPE* buffer) {
DLOG(INFO) << "Output has EOS";
}
- // TODO(jiesun): return this buffer to allocator?
- if (client_state_ != kClientFlushing)
- output_frames_ready_.push(buffer);
+ FinishFillBuffer(buffer);
- // Try to fulfill one read request.
- FulfillOneRead();
+ if (buffer->nFlags & OMX_BUFFERFLAG_EOS) {
+ // Singal end of stream.
+ scoped_refptr<VideoFrame> frame;
+ VideoFrame::CreateEmptyFrame(&frame);
+ event_handler_->OnFillBufferCallback(frame);
+ }
if (client_state_ == kClientFlushing &&
InputPortFlushed() && OutputPortFlushed())
@@ -1246,7 +1269,7 @@ void OmxVideoDecodeEngine::EventHandlerCompleteTask(OMX_EVENTTYPE event,
} else if (cmd == OMX_CommandFlush) {
(this->*OnFlushEventFunc)(data2);
} else {
- LOG(ERROR) << "Unknown command completed\n";
+ LOG(ERROR) << "Unknown command completed\n" << data1;
}
break;
}
diff --git a/media/filters/omx_video_decode_engine.h b/media/filters/omx_video_decode_engine.h
index 7b5e0134..da3fe52 100644
--- a/media/filters/omx_video_decode_engine.h
+++ b/media/filters/omx_video_decode_engine.h
@@ -5,8 +5,6 @@
#ifndef MEDIA_FILTERS_OMX_VIDEO_DECODE_ENGINE_H_
#define MEDIA_FILTERS_OMX_VIDEO_DECODE_ENGINE_H_
-#include <functional>
-#include <list>
#include <queue>
#include <vector>
@@ -20,15 +18,8 @@
#include "third_party/openmax/il/OMX_Core.h"
#include "third_party/openmax/il/OMX_Video.h"
-class MessageLoop;
-
-// FFmpeg types.
-struct AVStream;
-
namespace media {
-class Buffer;
-
class OmxVideoDecodeEngine : public VideoDecodeEngine {
public:
OmxVideoDecodeEngine();
@@ -80,9 +71,6 @@ class OmxVideoDecodeEngine : public VideoDecodeEngine {
// calls into other classes
void FinishEmptyBuffer(scoped_refptr<Buffer> buffer);
- void OnFormatChange(
- const OmxConfigurator::MediaFormat& input_format,
- const OmxConfigurator::MediaFormat& output_format);
void FinishFillBuffer(OMX_BUFFERHEADERTYPE* buffer);
// Helper method to perform tasks when this object is stopped.
void OnStopDone();
@@ -135,9 +123,6 @@ class OmxVideoDecodeEngine : public VideoDecodeEngine {
// Method to send input buffers to component
void EmptyBufferTask();
- // Take one decoded buffer to fulfill one read request.
- void FulfillOneRead();
-
// Method doing initial reads to get bit stream from demuxer.
void InitialReadBuffer();
@@ -186,6 +171,10 @@ class OmxVideoDecodeEngine : public VideoDecodeEngine {
void (OmxVideoDecodeEngine::*OnStateSetEventFunc)(OMX_STATETYPE state);
void (OmxVideoDecodeEngine::*OnFlushEventFunc)(int port);
+ // Helper function
+ scoped_refptr<VideoFrame> CreateOmxBufferVideoFrame(
+ OMX_BUFFERHEADERTYPE* omx_buffer);
+
size_t width_;
size_t height_;
@@ -201,10 +190,11 @@ class OmxVideoDecodeEngine : public VideoDecodeEngine {
bool input_has_fed_eos_;
bool input_port_flushed_;
- std::vector<OMX_BUFFERHEADERTYPE*> output_buffers_;
int output_buffer_count_;
int output_buffer_size_;
int output_port_;
+ int output_buffers_at_component_;
+ int output_pending_request_;
bool output_eos_;
bool output_port_flushed_;
bool uses_egl_image_;
@@ -230,19 +220,20 @@ class OmxVideoDecodeEngine : public VideoDecodeEngine {
// OMX_EmptyThisBuffer() call.
std::queue<OMX_BUFFERHEADERTYPE*> available_input_buffers_;
- // flag for freeing input buffers
+ // flag for freeing input/output buffers
bool need_free_input_buffers_;
+ bool need_free_output_buffers_;
+
+ // for calling flush callback only once.
+ bool flush_pending_;
// For output buffer recycling cases.
typedef std::pair<scoped_refptr<VideoFrame>,
OMX_BUFFERHEADERTYPE*> OutputFrame;
std::vector<OutputFrame> output_frames_;
- std::queue<OMX_BUFFERHEADERTYPE*> available_output_frames_;
- std::queue<OMX_BUFFERHEADERTYPE*> output_frames_ready_;
bool output_frames_allocated_;
// port related
- bool input_port_enabled_;
bool need_setup_output_port_;
OmxIlPortState output_port_state_;
VideoDecodeEngine::EventHandler* event_handler_;
diff --git a/media/filters/video_renderer_base.cc b/media/filters/video_renderer_base.cc
index 166c992..c9ede3a 100644
--- a/media/filters/video_renderer_base.cc
+++ b/media/filters/video_renderer_base.cc
@@ -6,26 +6,12 @@
#include "media/base/buffers.h"
#include "media/base/callback.h"
#include "media/base/filter_host.h"
+#include "media/base/limits.h"
#include "media/base/video_frame.h"
#include "media/filters/video_renderer_base.h"
namespace media {
-// Limit our read ahead to at least 3 frames. One frame is typically in flux at
-// all times, as in frame n is discarded at the top of ThreadMain() while frame
-// (n + kMaxFrames) is being asynchronously fetched. The remaining two frames
-// allow us to advance the current frame as well as read the timestamp of the
-// following frame for more accurate timing.
-//
-// Increasing this number beyond 3 simply creates a larger buffer to work with
-// at the expense of memory (~0.5MB and ~1.3MB per frame for 480p and 720p
-// resolutions, respectively). This can help on lower-end systems if there are
-// difficult sections in the movie and decoding slows down.
-//
-// Set to 4 because some vendor's driver doesn't allow buffer count to go below
-// preset limit, e.g., EGLImage path.
-static const size_t kMaxFrames = 4;
-
// This equates to ~16.67 fps, which is just slow enough to be tolerable when
// our video renderer is ahead of the audio playback.
//
@@ -91,7 +77,7 @@ bool VideoRendererBase::ParseMediaFormat(
void VideoRendererBase::Play(FilterCallback* callback) {
AutoLock auto_lock(lock_);
- DCHECK(kPaused == state_ || kFlushing == state_);
+ DCHECK_EQ(kPrerolled, state_);
scoped_ptr<FilterCallback> c(callback);
state_ = kPlaying;
callback->Run();
@@ -99,38 +85,29 @@ void VideoRendererBase::Play(FilterCallback* callback) {
void VideoRendererBase::Pause(FilterCallback* callback) {
AutoLock auto_lock(lock_);
- DCHECK(state_ == kPlaying || state_ == kEnded);
+ DCHECK(state_ != kUninitialized || state_ == kError);
AutoCallbackRunner done_runner(callback);
state_ = kPaused;
}
void VideoRendererBase::Flush(FilterCallback* callback) {
- DCHECK(state_ == kPaused);
+ DCHECK_EQ(state_, kPaused);
AutoLock auto_lock(lock_);
flush_callback_.reset(callback);
state_ = kFlushing;
- // Filter is considered paused when we've finished all pending reads, which
- // implies all buffers are returned to owner in Decoder/Renderer. Renderer
- // is considered paused with one more contingency that |pending_paint_| is
- // false, such that no client of us is holding any reference to VideoFrame.
- if (pending_reads_ == 0 && pending_paint_ == false) {
- flush_callback_->Run();
- flush_callback_.reset();
+ if (pending_paint_ == false)
FlushBuffers();
- }
}
void VideoRendererBase::Stop(FilterCallback* callback) {
+ DCHECK_EQ(pending_reads_, 0);
+
{
AutoLock auto_lock(lock_);
state_ = kStopped;
- // TODO(jiesun): move this to flush.
- // TODO(jiesun): we should wait until pending_paint_ is false;
- FlushBuffers();
-
// Clean up our thread if present.
if (thread_) {
// Signal the thread since it's possible to get stopped with the video
@@ -155,26 +132,25 @@ void VideoRendererBase::SetPlaybackRate(float playback_rate) {
void VideoRendererBase::Seek(base::TimeDelta time, FilterCallback* callback) {
AutoLock auto_lock(lock_);
- DCHECK(kPaused == state_ || kFlushing == state_);
- DCHECK_EQ(0u, pending_reads_) << "Pending reads should have completed";
- state_ = kSeeking;
- seek_callback_.reset(callback);
- seek_timestamp_ = time;
-
- // Throw away everything and schedule our reads.
- // TODO(jiesun): this should be guaranteed by pause/flush before seek happen.
- frames_queue_ready_.clear();
- frames_queue_done_.clear();
- for (size_t i = 0; i < kMaxFrames; ++i) {
- // TODO(jiesun): this is dummy read for ffmpeg path until we truely recycle
- // in that path.
- scoped_refptr<VideoFrame> null_frame;
- frames_queue_done_.push_back(null_frame);
+ // There is a race condition between filters to receive SeekTask().
+ // It turns out we could receive buffer from decoder before seek()
+ // is called on us. so we do the following:
+ // kFlushed => ( Receive first buffer or Seek() ) => kSeeking and
+ // kSeeking => ( Receive enough buffers) => kPrerolled. )
+ DCHECK(kPrerolled == state_ || kFlushed == state_ || kSeeking == state_);
+
+ if (state_ == kPrerolled) {
+ // Already get enough buffers from decoder.
+ callback->Run();
+ delete callback;
+ } else {
+ // Otherwise we are either kFlushed or kSeeking, but without enough buffers;
+ // we should save the callback function and call it later.
+ state_ = kSeeking;
+ seek_callback_.reset(callback);
}
- // TODO(jiesun): if EGL image path make sure those video frames are already in
- // frames_queue_done_, we could remove FillThisBuffer call from derived class.
- // But currently that is trigger by first paint(), which is bad.
+ seek_timestamp_ = time;
ScheduleRead_Locked();
}
@@ -185,7 +161,7 @@ void VideoRendererBase::Initialize(VideoDecoder* decoder,
DCHECK(callback);
DCHECK_EQ(kUninitialized, state_);
decoder_ = decoder;
- scoped_ptr<FilterCallback> c(callback);
+ AutoCallbackRunner done_runner(callback);
decoder_->set_fill_buffer_done_callback(
NewCallback(this, &VideoRendererBase::OnFillBufferDone));
@@ -195,7 +171,7 @@ void VideoRendererBase::Initialize(VideoDecoder* decoder,
&surface_format_,
&width_, &height_)) {
host()->SetError(PIPELINE_ERROR_INITIALIZATION_FAILED);
- callback->Run();
+ state_ = kError;
return;
}
host()->SetVideoSize(width_, height_);
@@ -205,19 +181,21 @@ void VideoRendererBase::Initialize(VideoDecoder* decoder,
// we're holding the lock?
if (!OnInitialize(decoder)) {
host()->SetError(PIPELINE_ERROR_INITIALIZATION_FAILED);
- callback->Run();
+ state_ = kError;
return;
}
- // We're all good! Consider ourselves paused (ThreadMain() should never
+ // We're all good! Consider ourselves flushed. (ThreadMain() should never
// see us in the kUninitialized state).
- state_ = kPaused;
+ // Since we had an initial Seek, we consider ourself flushed, because we
+ // have not populated any buffers yet.
+ state_ = kFlushed;
// Create our video thread.
if (!PlatformThread::Create(0, this, &thread_)) {
NOTREACHED() << "Video thread creation failed";
host()->SetError(PIPELINE_ERROR_INITIALIZATION_FAILED);
- callback->Run();
+ state_ = kError;
return;
}
@@ -227,8 +205,6 @@ void VideoRendererBase::Initialize(VideoDecoder* decoder,
::SetThreadPriority(thread_, THREAD_PRIORITY_ABOVE_NORMAL);
#endif // defined(OS_WIN)
- // Finally, execute the start callback.
- callback->Run();
}
bool VideoRendererBase::HasEnded() {
@@ -266,8 +242,20 @@ void VideoRendererBase::ThreadMain() {
remaining_time = CalculateSleepDuration(next_frame, playback_rate_);
}
+ // TODO(jiesun): I do not think we should wake up every 10ms.
+ // We should only wait up when following is true:
+ // 1. frame arrival (use event);
+ // 2. state_ change (use event);
+ // 3. playback_rate_ change (use event);
+ // 4. next frame's pts (use timeout);
if (remaining_time > kIdleTimeDelta)
remaining_time = kIdleTimeDelta;
+
+ // We can not do anything about this until next frame arrival.
+ // We do not want to spin in this case though.
+ if (remaining_time.InMicroseconds() < 0 && frames_queue_ready_.empty())
+ remaining_time = kIdleTimeDelta;
+
if (remaining_time.InMicroseconds() > 0)
frame_available_.TimedWait(remaining_time);
@@ -323,18 +311,8 @@ void VideoRendererBase::ThreadMain() {
}
}
if (timeout_frame.get()) {
- // TODO(jiesun): we should really merge the following branch. That way
- // we will remove the last EGLImage hack in this class. but the
- // |pending_reads_| prevents use to do so (until we had implemented
- // flush logic and get rid of pending reads.)
- if (uses_egl_image() &&
- media::VideoFrame::TYPE_EGL_IMAGE == timeout_frame->type()) {
- decoder_->FillThisBuffer(timeout_frame);
- } else {
- // TODO(jiesun): could this be merged with EGLimage path?
- frames_queue_done_.push_back(timeout_frame);
- ScheduleRead_Locked();
- }
+ frames_queue_done_.push_back(timeout_frame);
+ ScheduleRead_Locked();
}
if (new_frame_available) {
AutoUnlock auto_unlock(lock_);
@@ -349,15 +327,13 @@ void VideoRendererBase::GetCurrentFrame(scoped_refptr<VideoFrame>* frame_out) {
AutoLock auto_lock(lock_);
DCHECK(!pending_paint_);
- if (state_ == kStopped || !current_frame_.get() ||
- current_frame_->IsEndOfStream()) {
+ if (!current_frame_.get() || current_frame_->IsEndOfStream()) {
*frame_out = NULL;
return;
}
// We should have initialized and have the current frame.
- DCHECK(state_ == kPaused || state_ == kSeeking || state_ == kPlaying ||
- state_ == kFlushing || state_ == kEnded);
+ DCHECK(state_ != kUninitialized && state_ != kStopped && state_ != kError);
*frame_out = current_frame_;
pending_paint_ = true;
}
@@ -375,29 +351,41 @@ void VideoRendererBase::PutCurrentFrame(scoped_refptr<VideoFrame> frame) {
// frame is timed-out. We will wake up our main thread to advance the current
// frame when this is true.
frame_available_.Signal();
- if (state_ == kFlushing && pending_reads_ == 0 && flush_callback_.get()) {
- // No more pending reads! We're now officially "paused".
+ if (state_ == kFlushing)
FlushBuffers();
- flush_callback_->Run();
- flush_callback_.reset();
- }
}
void VideoRendererBase::OnFillBufferDone(scoped_refptr<VideoFrame> frame) {
AutoLock auto_lock(lock_);
- // TODO(ajwong): Work around cause we don't synchronize on stop. Correct
- // fix is to resolve http://crbug.com/16059.
- if (state_ == kStopped) {
- // TODO(jiesun): Remove this when flush before stop landed!
+ // Decoder could reach seek state before our Seek() get called.
+ // We will enter kSeeking
+ if (kFlushed == state_)
+ state_ = kSeeking;
+
+ // Synchronous flush between filters should prevent this from happening.
+ DCHECK_NE(state_, kStopped);
+ if (frame.get() && !frame->IsEndOfStream())
+ --pending_reads_;
+
+ DCHECK(state_ != kUninitialized && state_ != kStopped && state_ != kError);
+
+ if (state_ == kPaused || state_ == kFlushing) {
+ // Decoder are flushing rubbish video frame, we will not display them.
+ if (frame.get() && !frame->IsEndOfStream())
+ frames_queue_done_.push_back(frame);
+ DCHECK_LE(frames_queue_done_.size(),
+ static_cast<size_t>(Limits::kMaxVideoFrames));
+
+ // Excluding kPause here, because in pause state, we will never
+ // transfer out-bounding buffer. We do not flush buffer when Compositor
+ // hold reference to our video frame either.
+ if (state_ == kFlushing && pending_paint_ == false)
+ FlushBuffers();
+
return;
}
- DCHECK(state_ == kPaused || state_ == kSeeking || state_ == kPlaying ||
- state_ == kFlushing || state_ == kEnded);
- DCHECK_GT(pending_reads_, 0u);
- --pending_reads_;
-
// Discard frames until we reach our desired seek timestamp.
if (state_ == kSeeking && !frame->IsEndOfStream() &&
(frame->GetTimestamp() + frame->GetDuration()) < seek_timestamp_) {
@@ -405,17 +393,18 @@ void VideoRendererBase::OnFillBufferDone(scoped_refptr<VideoFrame> frame) {
ScheduleRead_Locked();
} else {
frames_queue_ready_.push_back(frame);
- DCHECK_LE(frames_queue_ready_.size(), kMaxFrames);
+ DCHECK_LE(frames_queue_ready_.size(),
+ static_cast<size_t>(Limits::kMaxVideoFrames));
frame_available_.Signal();
}
// Check for our preroll complete condition.
if (state_ == kSeeking) {
- DCHECK(seek_callback_.get());
- if (frames_queue_ready_.size() == kMaxFrames || frame->IsEndOfStream()) {
+ if (frames_queue_ready_.size() == Limits::kMaxVideoFrames ||
+ frame->IsEndOfStream()) {
// We're paused, so make sure we update |current_frame_| to represent
// our new location.
- state_ = kPaused;
+ state_ = kPrerolled;
// Because we might remain paused (i.e., we were not playing before we
// received a seek), we can't rely on ThreadMain() to notify the subclass
@@ -428,18 +417,27 @@ void VideoRendererBase::OnFillBufferDone(scoped_refptr<VideoFrame> frame) {
}
OnFrameAvailable();
- seek_callback_->Run();
- seek_callback_.reset();
+ // If we reach prerolled state before Seek() is called by pipeline,
+ // |seek_callback_| is not set, we will return immediately during
+ // when Seek() is eventually called.
+ if ((seek_callback_.get())) {
+ seek_callback_->Run();
+ seek_callback_.reset();
+ }
}
} else if (state_ == kFlushing && pending_reads_ == 0 && !pending_paint_) {
- // No more pending reads! We're now officially "paused".
- if (flush_callback_.get()) {
- flush_callback_->Run();
- flush_callback_.reset();
- }
+ OnFlushDone();
}
}
+void VideoRendererBase::ReadInput(scoped_refptr<VideoFrame> frame) {
+ // We should never return empty frames or EOS frame.
+ DCHECK(frame.get() && !frame->IsEndOfStream());
+
+ decoder_->FillThisBuffer(frame);
+ ++pending_reads_;
+}
+
void VideoRendererBase::ScheduleRead_Locked() {
lock_.AssertAcquired();
DCHECK_NE(kEnded, state_);
@@ -449,9 +447,7 @@ void VideoRendererBase::ScheduleRead_Locked() {
while (!frames_queue_done_.empty()) {
scoped_refptr<VideoFrame> video_frame = frames_queue_done_.front();
frames_queue_done_.pop_front();
- decoder_->FillThisBuffer(video_frame);
- DCHECK_LT(pending_reads_, kMaxFrames);
- ++pending_reads_;
+ ReadInput(video_frame);
}
}
@@ -470,6 +466,32 @@ void VideoRendererBase::FlushBuffers() {
frames_queue_done_.push_back(current_frame_);
}
current_frame_ = NULL;
+
+ if (decoder_->ProvidesBuffer()) {
+ // Flush all buffers out to decoder;
+ ScheduleRead_Locked();
+ }
+
+ if (pending_reads_ == 0)
+ OnFlushDone();
+}
+
+void VideoRendererBase::OnFlushDone() {
+ // Check all buffers are return to owners.
+ if (decoder_->ProvidesBuffer()) {
+ DCHECK_EQ(frames_queue_done_.size(), 0u);
+ } else {
+ DCHECK_EQ(frames_queue_done_.size(),
+ static_cast<size_t>(Limits::kMaxVideoFrames));
+ }
+ DCHECK(!current_frame_.get());
+ DCHECK(frames_queue_ready_.empty());
+
+ if (flush_callback_.get()) { // This ensures callback is invoked once.
+ flush_callback_->Run();
+ flush_callback_.reset();
+ }
+ state_ = kFlushed;
}
base::TimeDelta VideoRendererBase::CalculateSleepDuration(
diff --git a/media/filters/video_renderer_base.h b/media/filters/video_renderer_base.h
index 1aa42d2..b41c51e 100644
--- a/media/filters/video_renderer_base.h
+++ b/media/filters/video_renderer_base.h
@@ -107,6 +107,8 @@ class VideoRendererBase : public VideoRenderer,
return surface_type_ == media::VideoFrame::TYPE_EGL_IMAGE;
}
+ void ReadInput(scoped_refptr<VideoFrame> frame);
+
private:
// Callback from video decoder to deliver decoded video frames and decrements
// |pending_reads_|.
@@ -118,6 +120,9 @@ class VideoRendererBase : public VideoRenderer,
// Safe to call from any thread.
void ScheduleRead_Locked();
+ // Helper function to finished "flush" operation
+ void OnFlushDone();
+
// Helper method that flushes all video frame in "ready queue" including
// current frame into "done queue".
void FlushBuffers();
@@ -150,11 +155,36 @@ class VideoRendererBase : public VideoRenderer,
// always check |state_| to see if it was set to STOPPED after waking up!
ConditionVariable frame_available_;
+ // State transition Diagram of this class:
+ // [kUninitialized] -------> [kError]
+ // |
+ // | Initialize()
+ // V All frames returned
+ // +------[kFlushed]<----------------------[kFlushing]
+ // | | Seek() or upon ^
+ // | V got first frame |
+ // | [kSeeking] | Flush()
+ // | | |
+ // | V Got enough frames |
+ // | [kPrerolled]---------------------->[kPaused]
+ // | | Pause() ^
+ // | V Play() |
+ // | [kPlaying]---------------------------|
+ // | | Pause() ^
+ // | V Receive EOF frame. | Pause()
+ // | [kEnded]-----------------------------+
+ // | ^
+ // | |
+ // +-----> [kStopped] [Any state other than]
+ // [kUninitialized/kError]
+
// Simple state tracking variable.
enum State {
kUninitialized,
+ kPrerolled,
kPaused,
kFlushing,
+ kFlushed,
kSeeking,
kPlaying,
kEnded,
@@ -169,12 +199,12 @@ class VideoRendererBase : public VideoRenderer,
// Previous time returned from the pipeline.
base::TimeDelta previous_time_;
- // Keeps track of our pending reads. We *must* have no pending reads before
- // executing the pause callback, otherwise we breach the contract that all
- // filters are idling.
- //
- // We use size_t since we compare against std::deque::size().
- size_t pending_reads_;
+ // Keeps track of our pending buffers. We *must* have no pending reads
+ // before executing the flush callback; We decrement it each time we receive
+ // a buffer and increment it each time we send a buffer out. therefore if
+ // decoder provides buffer, |pending_reads_| is always non-positive and if
+ // renderer provides buffer, |pending_reads_| is always non-negative.
+ int pending_reads_;
bool pending_paint_;
float playback_rate_;
diff --git a/media/filters/video_renderer_base_unittest.cc b/media/filters/video_renderer_base_unittest.cc
index ae21320..486c85e 100644
--- a/media/filters/video_renderer_base_unittest.cc
+++ b/media/filters/video_renderer_base_unittest.cc
@@ -6,6 +6,7 @@
#include "base/stl_util-inl.h"
#include "media/base/callback.h"
#include "media/base/data_buffer.h"
+#include "media/base/limits.h"
#include "media/base/mock_filter_host.h"
#include "media/base/mock_filters.h"
#include "media/base/video_frame.h"
@@ -75,6 +76,7 @@ class VideoRendererBaseTest : public ::testing::Test {
EXPECT_CALL(*renderer_, OnStop(NotNull()))
.WillOnce(DoAll(OnStop(), Return()))
.RetiresOnSaturation();
+
EXPECT_CALL(callback_, OnFilterCallback());
EXPECT_CALL(callback_, OnCallbackDestroyed());
renderer_->Stop(callback_.NewCallback());
@@ -175,6 +177,10 @@ TEST_F(VideoRendererBaseTest, Initialize_Successful) {
EXPECT_CALL(callback_, OnFilterCallback());
EXPECT_CALL(callback_, OnCallbackDestroyed());
+ // Initialize, we shouldn't have any reads.
+ renderer_->Initialize(decoder_, callback_.NewCallback());
+ EXPECT_EQ(0u, read_queue_.size());
+
// Verify the following expectations haven't run until we complete the reads.
EXPECT_CALL(*renderer_, CheckPoint(0));
@@ -185,27 +191,40 @@ TEST_F(VideoRendererBaseTest, Initialize_Successful) {
EXPECT_CALL(seek_callback, OnFilterCallback());
EXPECT_CALL(seek_callback, OnCallbackDestroyed());
- // Initialize, we shouldn't have any reads.
- renderer_->Initialize(decoder_, callback_.NewCallback());
- EXPECT_EQ(0u, read_queue_.size());
-
// Now seek to trigger prerolling.
renderer_->Seek(base::TimeDelta(), seek_callback.NewCallback());
- EXPECT_LT(0u, read_queue_.size());
// Verify our seek callback hasn't been executed yet.
renderer_->CheckPoint(0);
// Now satisfy the read requests. Our callback should be executed after
// exiting this loop.
- while (!read_queue_.empty()) {
+ for (unsigned int i = 0; i < Limits::kMaxVideoFrames; i++) {
const base::TimeDelta kZero;
scoped_refptr<VideoFrame> frame;
VideoFrame::CreateFrame(VideoFrame::RGB32, kWidth, kHeight, kZero,
kZero, &frame);
decoder_->fill_buffer_done_callback()->Run(frame);
- read_queue_.pop_front();
}
+
+ MockFilterCallback play_callback;
+ EXPECT_CALL(play_callback, OnFilterCallback());
+ EXPECT_CALL(play_callback, OnCallbackDestroyed());
+
+ renderer_->Play(play_callback.NewCallback());
+
+ StrictMock<MockFilterCallback> pause_callback;
+ EXPECT_CALL(pause_callback, OnFilterCallback());
+ EXPECT_CALL(pause_callback, OnCallbackDestroyed());
+ renderer_->Pause(pause_callback.NewCallback());
+
+ EXPECT_CALL(*decoder_, ProvidesBuffer())
+ .WillRepeatedly(Return(true));
+
+ StrictMock<MockFilterCallback> flush_callback;
+ EXPECT_CALL(flush_callback, OnFilterCallback());
+ EXPECT_CALL(flush_callback, OnCallbackDestroyed());
+ renderer_->Flush(flush_callback.NewCallback());
}
} // namespace media
diff --git a/media/tools/player_x11/gles_video_renderer.cc b/media/tools/player_x11/gles_video_renderer.cc
index a702c77..724efa4 100644
--- a/media/tools/player_x11/gles_video_renderer.cc
+++ b/media/tools/player_x11/gles_video_renderer.cc
@@ -446,7 +446,7 @@ void GlesVideoRenderer::CreateTextureAndProgramEgl() {
egl_image,
&video_frame);
egl_frames_.push_back(std::make_pair(video_frame, texture));
- GetDecoder()->FillThisBuffer(video_frame);
+ ReadInput(video_frame);
}
GLuint program = glCreateProgram();