summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorakalin@chromium.org <akalin@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2010-09-03 00:10:10 +0000
committerakalin@chromium.org <akalin@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2010-09-03 00:10:10 +0000
commit5afac58e83e7cb072ef21d0d4cf9c103eff97a3b (patch)
treeb13e106b51af3c4fbfb87200aad67a625721a234
parent8315330bd8b37e9cea6a085cc612976e22b81924 (diff)
downloadchromium_src-5afac58e83e7cb072ef21d0d4cf9c103eff97a3b.zip
chromium_src-5afac58e83e7cb072ef21d0d4cf9c103eff97a3b.tar.gz
chromium_src-5afac58e83e7cb072ef21d0d4cf9c103eff97a3b.tar.bz2
Revert 58429, 58428, 58427, 58423.
TBR=hclam@chromium.org Review URL: http://codereview.chromium.org/3337009 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@58434 0039d316-1c4b-4281-b951-d872f2087c98
-rw-r--r--chrome/gpu/DEPS3
-rw-r--r--chrome/gpu/gpu_video_decoder.h3
-rw-r--r--media/filters/ffmpeg_video_allocator.cc179
-rw-r--r--media/filters/ffmpeg_video_allocator.h94
-rw-r--r--media/filters/ffmpeg_video_decode_engine.cc359
-rw-r--r--media/filters/ffmpeg_video_decode_engine.h85
-rw-r--r--media/filters/ffmpeg_video_decode_engine_unittest.cc293
-rw-r--r--media/filters/ffmpeg_video_decoder.cc4
-rw-r--r--media/filters/ffmpeg_video_decoder.h2
-rw-r--r--media/filters/ffmpeg_video_decoder_unittest.cc2
-rw-r--r--media/filters/omx_video_decode_engine.cc1339
-rw-r--r--media/filters/omx_video_decode_engine.h246
-rw-r--r--media/filters/omx_video_decoder.cc2
-rw-r--r--media/filters/omx_video_decoder.h2
-rw-r--r--media/filters/video_decode_engine.h128
-rw-r--r--media/media.gyp18
-rw-r--r--media/mf/mft_h264_decoder.h2
-rw-r--r--media/mf/test/mft_h264_decoder_unittest.cc2
-rw-r--r--media/omx/omx_codec_unittest.cc4
-rw-r--r--media/tools/omx_test/omx_test.cc2
20 files changed, 2748 insertions, 21 deletions
diff --git a/chrome/gpu/DEPS b/chrome/gpu/DEPS
index 9597b6b..03e6502 100644
--- a/chrome/gpu/DEPS
+++ b/chrome/gpu/DEPS
@@ -2,5 +2,6 @@ include_rules = [
"+chrome/app",
"+gpu/command_buffer",
"+media/base",
- "+media/video",
+ "+media/filters",
]
+
diff --git a/chrome/gpu/gpu_video_decoder.h b/chrome/gpu/gpu_video_decoder.h
index 977cd02..caed871 100644
--- a/chrome/gpu/gpu_video_decoder.h
+++ b/chrome/gpu/gpu_video_decoder.h
@@ -10,7 +10,7 @@
#include "base/ref_counted.h"
#include "base/scoped_ptr.h"
#include "chrome/common/gpu_video_common.h"
-#include "media/video/video_decode_engine.h"
+#include "media/filters/video_decode_engine.h"
#include "ipc/ipc_channel.h"
class GpuChannel;
@@ -93,3 +93,4 @@ class GpuVideoDecoder
};
#endif // CHROME_GPU_GPU_VIDEO_DECODER_H_
+
diff --git a/media/filters/ffmpeg_video_allocator.cc b/media/filters/ffmpeg_video_allocator.cc
new file mode 100644
index 0000000..63665f7
--- /dev/null
+++ b/media/filters/ffmpeg_video_allocator.cc
@@ -0,0 +1,179 @@
+// Copyright (c) 2010 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/filters/ffmpeg_video_allocator.h"
+
+#include "base/logging.h"
+#include "media/ffmpeg/ffmpeg_common.h"
+
+// Because Chromium could be build with FFMPEG version other than FFMPEG-MT
+// by using GYP_DEFINES variable "use-system-ffmpeg". The following code will
+// not build with vanilla FFMPEG. We will fall back to "disable direct
+// rendering" when that happens.
+// TODO(jiesun): Actually we could do better than this: we should modify the
+// following code to work with vanilla FFMPEG.
+
+namespace media {
+
+FFmpegVideoAllocator::FFmpegVideoAllocator()
+ : get_buffer_(NULL),
+ release_buffer_(NULL) {
+}
+
+void FFmpegVideoAllocator::Initialize(AVCodecContext* codec_context,
+ VideoFrame::Format surface_format) {
+#ifdef FF_THREAD_FRAME // Only defined in FFMPEG-MT.
+ surface_format_ = surface_format;
+ get_buffer_ = codec_context->get_buffer;
+ release_buffer_ = codec_context->release_buffer;
+ codec_context->get_buffer = AllocateBuffer;
+ codec_context->release_buffer = ReleaseBuffer;
+ codec_context->opaque = this;
+#endif
+}
+
+void FFmpegVideoAllocator::Stop(AVCodecContext* codec_context) {
+#ifdef FF_THREAD_FRAME // Only defined in FFMPEG-MT.
+ // Restore default buffer allocator functions.
+ // This does not work actually, because in ffmpeg-mt, there are
+ // multiple codec_context copies per threads. each context maintain
+ // its own internal buffer pools.
+ codec_context->get_buffer = get_buffer_;
+ codec_context->release_buffer = release_buffer_;
+
+ while (!frame_pool_.empty()) {
+ RefCountedAVFrame* ffmpeg_video_frame = frame_pool_.front();
+ frame_pool_.pop_front();
+ ffmpeg_video_frame->av_frame_.opaque = NULL;
+
+ // Reset per-context default buffer release functions.
+ ffmpeg_video_frame->av_frame_.owner->release_buffer = release_buffer_;
+ ffmpeg_video_frame->av_frame_.owner->get_buffer = get_buffer_;
+ delete ffmpeg_video_frame;
+ }
+ for (int i = 0; i < kMaxFFmpegThreads; ++i)
+ available_frames_[i].clear();
+ codec_index_map_.clear();
+#endif
+}
+
+void FFmpegVideoAllocator::DisplayDone(
+ AVCodecContext* codec_context,
+ scoped_refptr<VideoFrame> video_frame) {
+#ifdef FF_THREAD_FRAME // Only defined in FFMPEG-MT.
+ RefCountedAVFrame* ffmpeg_video_frame =
+ reinterpret_cast<RefCountedAVFrame*>(video_frame->private_buffer());
+ if (ffmpeg_video_frame->Release() == 0) {
+ int index = codec_index_map_[ffmpeg_video_frame->av_frame_.owner];
+ available_frames_[index].push_back(ffmpeg_video_frame);
+ }
+#endif
+}
+
+scoped_refptr<VideoFrame> FFmpegVideoAllocator::DecodeDone(
+ AVCodecContext* codec_context,
+ AVFrame* av_frame) {
+ scoped_refptr<VideoFrame> frame;
+#ifdef FF_THREAD_FRAME // Only defined in FFMPEG-MT.
+ RefCountedAVFrame* ffmpeg_video_frame =
+ reinterpret_cast<RefCountedAVFrame*>(av_frame->opaque);
+ ffmpeg_video_frame->av_frame_ = *av_frame;
+ ffmpeg_video_frame->AddRef();
+
+ VideoFrame::CreateFrameExternal(
+ VideoFrame::TYPE_SYSTEM_MEMORY, surface_format_,
+ codec_context->width, codec_context->height, 3,
+ av_frame->data,
+ av_frame->linesize,
+ StreamSample::kInvalidTimestamp,
+ StreamSample::kInvalidTimestamp,
+ ffmpeg_video_frame, // |private_buffer_|.
+ &frame);
+#endif
+ return frame;
+}
+
+int FFmpegVideoAllocator::AllocateBuffer(AVCodecContext* codec_context,
+ AVFrame* av_frame) {
+ FFmpegVideoAllocator* context =
+ reinterpret_cast<FFmpegVideoAllocator*>(codec_context->opaque);
+ return context->InternalAllocateBuffer(codec_context, av_frame);
+}
+
+void FFmpegVideoAllocator::ReleaseBuffer(AVCodecContext* codec_context,
+ AVFrame* av_frame) {
+ FFmpegVideoAllocator* context =
+ reinterpret_cast<FFmpegVideoAllocator*>(codec_context->opaque);
+ context->InternalReleaseBuffer(codec_context, av_frame);
+}
+
+int FFmpegVideoAllocator::InternalAllocateBuffer(
+ AVCodecContext* codec_context, AVFrame* av_frame) {
+#ifdef FF_THREAD_FRAME // Only defined in FFMPEG-MT.
+ // If |codec_context| is not yet known to us, we add it to our map.
+ if (codec_index_map_.find(codec_context) == codec_index_map_.end()) {
+ int next_index = codec_index_map_.size();
+ codec_index_map_[codec_context] = next_index;
+ CHECK_LE((int)codec_index_map_.size(), kMaxFFmpegThreads);
+ }
+
+ int index = codec_index_map_[codec_context];
+
+ RefCountedAVFrame* ffmpeg_video_frame;
+ if (available_frames_[index].empty()) {
+ int ret = get_buffer_(codec_context, av_frame);
+ CHECK_EQ(ret, 0);
+ ffmpeg_video_frame = new RefCountedAVFrame();
+ ffmpeg_video_frame->av_frame_ = *av_frame;
+ frame_pool_.push_back(ffmpeg_video_frame);
+ } else {
+ ffmpeg_video_frame = available_frames_[index].front();
+ available_frames_[index].pop_front();
+ // We assume |get_buffer| immediately after |release_buffer| will
+ // not trigger real buffer allocation. We just use it to fill the
+ // correct value inside |pic|.
+ release_buffer_(codec_context, &ffmpeg_video_frame->av_frame_);
+ get_buffer_(codec_context, av_frame);
+ ffmpeg_video_frame->av_frame_ = *av_frame;
+ }
+
+ av_frame->opaque = ffmpeg_video_frame;
+ av_frame->type = FF_BUFFER_TYPE_USER;
+ ffmpeg_video_frame->AddRef();
+#endif
+ return 0;
+}
+
+void FFmpegVideoAllocator::InternalReleaseBuffer(
+ AVCodecContext* codec_context, AVFrame* av_frame) {
+#ifdef FF_THREAD_FRAME // Only defined in FFMPEG-MT.
+ if (av_frame->opaque == NULL) {
+ // This could happened in two scenario:
+ // 1. FFMPEG-MT H264 codec seems to allocate one frame during
+ // av_find_stream_info. This happens before we could even
+ // install the custom allocator functions.
+ // 2. When clean up time, we reset pic->opaque, and destruct ourselves.
+ // We could not use our own release_buffer function because
+ // handle-delayed-release() is called after we get destructed.
+ release_buffer_(codec_context, av_frame);
+ return;
+ }
+
+ RefCountedAVFrame* ffmpeg_video_frame =
+ reinterpret_cast<RefCountedAVFrame*>(av_frame->opaque);
+ release_buffer_(codec_context, av_frame);
+
+ // This is required for get_buffer().
+ ffmpeg_video_frame->av_frame_.data[0] = NULL;
+ get_buffer_(codec_context, &ffmpeg_video_frame->av_frame_);
+ int index = codec_index_map_[codec_context];
+ if (ffmpeg_video_frame->Release() == 0)
+ available_frames_[index].push_back(ffmpeg_video_frame);
+
+ for(int k = 0; k < 4; ++k)
+ av_frame->data[k]=NULL;
+#endif
+}
+
+} // namespace media
diff --git a/media/filters/ffmpeg_video_allocator.h b/media/filters/ffmpeg_video_allocator.h
new file mode 100644
index 0000000..b72a1f1
--- /dev/null
+++ b/media/filters/ffmpeg_video_allocator.h
@@ -0,0 +1,94 @@
+// Copyright (c) 2010 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_FILTERS_FFMPEG_VIDEO_ALLOCATOR_H_
+#define MEDIA_FILTERS_FFMPEG_VIDEO_ALLOCATOR_H_
+
+#include "base/logging.h"
+#include "base/scoped_ptr.h"
+#include "media/base/video_frame.h"
+#include "media/ffmpeg/ffmpeg_common.h"
+#include "media/ffmpeg/ffmpeg_util.h"
+
+#include <deque>
+#include <map>
+
+// FFmpeg types.
+struct AVCodecContext;
+struct AVFrame;
+struct AVStream;
+
+namespace media {
+
+class FFmpegVideoAllocator {
+ public:
+ FFmpegVideoAllocator();
+ virtual ~FFmpegVideoAllocator() {}
+
+ struct RefCountedAVFrame {
+ RefCountedAVFrame() : usage_count_(0) {}
+
+ // TODO(jiesun): we had commented out "DCHECK_EQ(usage_count_, 0);" here.
+ // Because the way FFMPEG-MT handle release buffer in delayed fashion.
+ // Probably we could wait FFMPEG-MT release all buffers before we callback
+ // the flush completion.
+ ~RefCountedAVFrame() {}
+
+ void AddRef() {
+ base::AtomicRefCountIncN(&usage_count_, 1);
+ }
+
+ bool Release() {
+ return base::AtomicRefCountDecN(&usage_count_, 1);
+ }
+
+ AVFrame av_frame_;
+ base::AtomicRefCount usage_count_;
+ };
+
+ static int AllocateBuffer(AVCodecContext* codec_context, AVFrame* av_frame);
+ static void ReleaseBuffer(AVCodecContext* codec_context, AVFrame* av_frame);
+
+ void Initialize(AVCodecContext* codec_context,
+ VideoFrame::Format surface_format);
+ void Stop(AVCodecContext* codec_context);
+
+ // DisplayDone() is called when renderer has finished using a frame.
+ void DisplayDone(AVCodecContext* codec_context,
+ scoped_refptr<VideoFrame> video_frame);
+
+ // DecodeDone() is called after avcodec_video_decode() finish so that we can
+ // acquire a reference to the video frame before we hand it to the renderer.
+ scoped_refptr<VideoFrame> DecodeDone(AVCodecContext* codec_context,
+ AVFrame* av_frame);
+
+ private:
+ int InternalAllocateBuffer(AVCodecContext* codec_context, AVFrame* av_frame);
+ void InternalReleaseBuffer(AVCodecContext* codec_context, AVFrame* av_frame);
+
+ VideoFrame::Format surface_format_;
+
+ // This queue keeps reference count for all VideoFrame allocated.
+ std::deque<RefCountedAVFrame*> frame_pool_;
+
+ // This queue keeps per-AVCodecContext VideoFrame allocation that
+ // was available for recycling.
+ static const int kMaxFFmpegThreads = 3;
+ std::deque<RefCountedAVFrame*> available_frames_[kMaxFFmpegThreads];
+
+ // This map is used to map from AVCodecContext* to index to
+ // |available_frames_|, because ffmpeg-mt maintain multiple
+ // AVCodecContext (per thread).
+ std::map<void*, int> codec_index_map_;
+
+ // These function pointer store original ffmpeg AVCodecContext's
+ // get_buffer()/release_buffer() function pointer. We use these function
+ // to delegate the allocation request.
+ int (*get_buffer_)(struct AVCodecContext *c, AVFrame *pic);
+ void (*release_buffer_)(struct AVCodecContext *c, AVFrame *pic);
+};
+
+} // namespace media
+
+#endif // MEDIA_FILTERS_FFMPEG_VIDEO_ALLOCATOR_H_
diff --git a/media/filters/ffmpeg_video_decode_engine.cc b/media/filters/ffmpeg_video_decode_engine.cc
new file mode 100644
index 0000000..0a2f384
--- /dev/null
+++ b/media/filters/ffmpeg_video_decode_engine.cc
@@ -0,0 +1,359 @@
+// Copyright (c) 2010 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/filters/ffmpeg_video_decode_engine.h"
+
+#include "base/command_line.h"
+#include "base/string_number_conversions.h"
+#include "base/task.h"
+#include "media/base/buffers.h"
+#include "media/base/callback.h"
+#include "media/base/limits.h"
+#include "media/base/media_switches.h"
+#include "media/ffmpeg/ffmpeg_common.h"
+#include "media/ffmpeg/ffmpeg_util.h"
+#include "media/filters/ffmpeg_demuxer.h"
+#include "media/filters/ffmpeg_video_allocator.h"
+
+namespace media {
+
+FFmpegVideoDecodeEngine::FFmpegVideoDecodeEngine()
+ : codec_context_(NULL),
+ av_stream_(NULL),
+ event_handler_(NULL),
+ direct_rendering_(false),
+ pending_input_buffers_(0),
+ pending_output_buffers_(0),
+ output_eos_reached_(false),
+ flush_pending_(false) {
+}
+
+FFmpegVideoDecodeEngine::~FFmpegVideoDecodeEngine() {
+}
+
+void FFmpegVideoDecodeEngine::Initialize(
+ MessageLoop* message_loop,
+ VideoDecodeEngine::EventHandler* event_handler,
+ const VideoCodecConfig& config) {
+ allocator_.reset(new FFmpegVideoAllocator());
+
+ // Always try to use three threads for video decoding. There is little reason
+ // not to since current day CPUs tend to be multi-core and we measured
+ // performance benefits on older machines such as P4s with hyperthreading.
+ //
+ // Handling decoding on separate threads also frees up the pipeline thread to
+ // continue processing. Although it'd be nice to have the option of a single
+ // decoding thread, FFmpeg treats having one thread the same as having zero
+ // threads (i.e., avcodec_decode_video() will execute on the calling thread).
+ // Yet another reason for having two threads :)
+ static const int kDecodeThreads = 2;
+ static const int kMaxDecodeThreads = 16;
+
+ av_stream_ = static_cast<AVStream*>(config.opaque_context_);
+ codec_context_ = av_stream_->codec;
+ // Enable motion vector search (potentially slow), strong deblocking filter
+ // for damaged macroblocks, and set our error detection sensitivity.
+ codec_context_->error_concealment = FF_EC_GUESS_MVS | FF_EC_DEBLOCK;
+ codec_context_->error_recognition = FF_ER_CAREFUL;
+
+ AVCodec* codec = avcodec_find_decoder(codec_context_->codec_id);
+
+ if (codec) {
+#ifdef FF_THREAD_FRAME // Only defined in FFMPEG-MT.
+ direct_rendering_ = codec->capabilities & CODEC_CAP_DR1 ? true : false;
+#endif
+ if (direct_rendering_) {
+ DLOG(INFO) << "direct rendering is used";
+ allocator_->Initialize(codec_context_, GetSurfaceFormat());
+ }
+ }
+
+ // TODO(fbarchard): Improve thread logic based on size / codec.
+ // TODO(fbarchard): Fix bug affecting video-cookie.html
+ int decode_threads = (codec_context_->codec_id == CODEC_ID_THEORA) ?
+ 1 : kDecodeThreads;
+
+ const CommandLine* cmd_line = CommandLine::ForCurrentProcess();
+ std::string threads(cmd_line->GetSwitchValueASCII(switches::kVideoThreads));
+ if ((!threads.empty() &&
+ !base::StringToInt(threads, &decode_threads)) ||
+ decode_threads < 0 || decode_threads > kMaxDecodeThreads) {
+ decode_threads = kDecodeThreads;
+ }
+
+ // We don't allocate AVFrame on the stack since different versions of FFmpeg
+ // may change the size of AVFrame, causing stack corruption. The solution is
+ // to let FFmpeg allocate the structure via avcodec_alloc_frame().
+ av_frame_.reset(avcodec_alloc_frame());
+ VideoCodecInfo info;
+ info.success_ = false;
+ info.provides_buffers_ = true;
+ info.stream_info_.surface_type_ = VideoFrame::TYPE_SYSTEM_MEMORY;
+ info.stream_info_.surface_format_ = GetSurfaceFormat();
+ info.stream_info_.surface_width_ = config.width_;
+ info.stream_info_.surface_height_ = config.height_;
+
+ // If we do not have enough buffers, we will report error too.
+ bool buffer_allocated = true;
+ frame_queue_available_.clear();
+ if (!direct_rendering_) {
+ // Create output buffer pool when direct rendering is not used.
+ for (size_t i = 0; i < Limits::kMaxVideoFrames; ++i) {
+ scoped_refptr<VideoFrame> video_frame;
+ VideoFrame::CreateFrame(VideoFrame::YV12,
+ config.width_,
+ config.height_,
+ StreamSample::kInvalidTimestamp,
+ StreamSample::kInvalidTimestamp,
+ &video_frame);
+ if (!video_frame.get()) {
+ buffer_allocated = false;
+ break;
+ }
+ frame_queue_available_.push_back(video_frame);
+ }
+ }
+
+ if (codec &&
+ avcodec_thread_init(codec_context_, decode_threads) >= 0 &&
+ avcodec_open(codec_context_, codec) >= 0 &&
+ av_frame_.get() &&
+ buffer_allocated) {
+ info.success_ = true;
+ }
+ event_handler_ = event_handler;
+ event_handler_->OnInitializeComplete(info);
+}
+
+// TODO(fbarchard): Find way to remove this memcpy of the entire image.
+static void CopyPlane(size_t plane,
+ scoped_refptr<VideoFrame> video_frame,
+ const AVFrame* frame) {
+ DCHECK_EQ(video_frame->width() % 2, 0u);
+ const uint8* source = frame->data[plane];
+ const size_t source_stride = frame->linesize[plane];
+ uint8* dest = video_frame->data(plane);
+ const size_t dest_stride = video_frame->stride(plane);
+ size_t bytes_per_line = video_frame->width();
+ size_t copy_lines = video_frame->height();
+ if (plane != VideoFrame::kYPlane) {
+ bytes_per_line /= 2;
+ if (video_frame->format() == VideoFrame::YV12) {
+ copy_lines = (copy_lines + 1) / 2;
+ }
+ }
+ DCHECK(bytes_per_line <= source_stride && bytes_per_line <= dest_stride);
+ for (size_t i = 0; i < copy_lines; ++i) {
+ memcpy(dest, source, bytes_per_line);
+ source += source_stride;
+ dest += dest_stride;
+ }
+}
+
+void FFmpegVideoDecodeEngine::EmptyThisBuffer(
+ scoped_refptr<Buffer> buffer) {
+ pending_input_buffers_--;
+ if (flush_pending_) {
+ TryToFinishPendingFlush();
+ } else {
+ // Otherwise try to decode this buffer.
+ DecodeFrame(buffer);
+ }
+}
+
+void FFmpegVideoDecodeEngine::FillThisBuffer(scoped_refptr<VideoFrame> frame) {
+ // We should never receive NULL frame or EOS frame.
+ DCHECK(frame.get() && !frame->IsEndOfStream());
+
+ // Increment pending output buffer count.
+ pending_output_buffers_++;
+
+ // Return this frame to available pool or allocator after display.
+ if (direct_rendering_)
+ allocator_->DisplayDone(codec_context_, frame);
+ else
+ frame_queue_available_.push_back(frame);
+
+ if (flush_pending_) {
+ TryToFinishPendingFlush();
+ } else if (!output_eos_reached_) {
+ // If we already deliver EOS to renderer, we stop reading new input.
+ ReadInput();
+ }
+}
+
+// Try to decode frame when both input and output are ready.
+void FFmpegVideoDecodeEngine::DecodeFrame(scoped_refptr<Buffer> buffer) {
+ scoped_refptr<VideoFrame> video_frame;
+
+ // Create a packet for input data.
+ // Due to FFmpeg API changes we no longer have const read-only pointers.
+ AVPacket packet;
+ av_init_packet(&packet);
+ packet.data = const_cast<uint8*>(buffer->GetData());
+ packet.size = buffer->GetDataSize();
+
+ // Let FFmpeg handle presentation timestamp reordering.
+ codec_context_->reordered_opaque = buffer->GetTimestamp().InMicroseconds();
+
+ // This is for codecs not using get_buffer to initialize
+ // |av_frame_->reordered_opaque|
+ av_frame_->reordered_opaque = codec_context_->reordered_opaque;
+
+ int frame_decoded = 0;
+ int result = avcodec_decode_video2(codec_context_,
+ av_frame_.get(),
+ &frame_decoded,
+ &packet);
+
+ // Log the problem if we can't decode a video frame and exit early.
+ if (result < 0) {
+ LOG(INFO) << "Error decoding a video frame with timestamp: "
+ << buffer->GetTimestamp().InMicroseconds() << " us"
+ << " , duration: "
+ << buffer->GetDuration().InMicroseconds() << " us"
+ << " , packet size: "
+ << buffer->GetDataSize() << " bytes";
+ // TODO(jiesun): call event_handler_->OnError() instead.
+ event_handler_->OnFillBufferCallback(video_frame);
+ return;
+ }
+
+ // If frame_decoded == 0, then no frame was produced.
+ // In this case, if we already begin to flush codec with empty
+ // input packet at the end of input stream, the first time we
+ // encounter frame_decoded == 0 signal output frame had been
+ // drained, we mark the flag. Otherwise we read from demuxer again.
+ if (frame_decoded == 0) {
+ if (buffer->IsEndOfStream()) { // We had started flushing.
+ event_handler_->OnFillBufferCallback(video_frame);
+ output_eos_reached_ = true;
+ } else {
+ ReadInput();
+ }
+ return;
+ }
+
+ // TODO(fbarchard): Work around for FFmpeg http://crbug.com/27675
+ // The decoder is in a bad state and not decoding correctly.
+ // Checking for NULL avoids a crash in CopyPlane().
+ if (!av_frame_->data[VideoFrame::kYPlane] ||
+ !av_frame_->data[VideoFrame::kUPlane] ||
+ !av_frame_->data[VideoFrame::kVPlane]) {
+ // TODO(jiesun): call event_handler_->OnError() instead.
+ event_handler_->OnFillBufferCallback(video_frame);
+ return;
+ }
+
+ // Determine timestamp and calculate the duration based on the repeat picture
+ // count. According to FFmpeg docs, the total duration can be calculated as
+ // follows:
+ // duration = (1 / fps) + (repeat_pict) / (2 * fps)
+ // = (2 + repeat_pict) / (2 * fps)
+ DCHECK_LE(av_frame_->repeat_pict, 2); // Sanity check.
+ // Even frame rate is fixed, for some streams and codecs, the value of
+ // |codec_context_->time_base| and |av_stream_->time_base| are not the
+ // inverse of the |av_stream_->r_frame_rate|. They use 1 milli-second as
+ // time-base units and use increment of av_packet->pts which is not one.
+ // Use the inverse of |av_stream_->r_frame_rate| instead of time_base.
+ AVRational doubled_time_base;
+ doubled_time_base.den = av_stream_->r_frame_rate.num;
+ doubled_time_base.num = av_stream_->r_frame_rate.den;
+ doubled_time_base.den *= 2;
+
+ base::TimeDelta timestamp =
+ base::TimeDelta::FromMicroseconds(av_frame_->reordered_opaque);
+ base::TimeDelta duration =
+ ConvertTimestamp(doubled_time_base, 2 + av_frame_->repeat_pict);
+
+ if (!direct_rendering_) {
+ // Available frame is guaranteed, because we issue as much reads as
+ // available frame, except the case of |frame_decoded| == 0, which
+ // implies decoder order delay, and force us to read more inputs.
+ DCHECK(frame_queue_available_.size());
+ video_frame = frame_queue_available_.front();
+ frame_queue_available_.pop_front();
+
+ // Copy the frame data since FFmpeg reuses internal buffers for AVFrame
+ // output, meaning the data is only valid until the next
+ // avcodec_decode_video() call.
+ CopyPlane(VideoFrame::kYPlane, video_frame.get(), av_frame_.get());
+ CopyPlane(VideoFrame::kUPlane, video_frame.get(), av_frame_.get());
+ CopyPlane(VideoFrame::kVPlane, video_frame.get(), av_frame_.get());
+ } else {
+ // Get the VideoFrame from allocator which associate with av_frame_.
+ video_frame = allocator_->DecodeDone(codec_context_, av_frame_.get());
+ }
+
+ video_frame->SetTimestamp(timestamp);
+ video_frame->SetDuration(duration);
+
+ pending_output_buffers_--;
+ event_handler_->OnFillBufferCallback(video_frame);
+}
+
+void FFmpegVideoDecodeEngine::Uninitialize() {
+ if (direct_rendering_) {
+ allocator_->Stop(codec_context_);
+ }
+
+ event_handler_->OnUninitializeComplete();
+}
+
+void FFmpegVideoDecodeEngine::Flush() {
+ avcodec_flush_buffers(codec_context_);
+ flush_pending_ = true;
+ TryToFinishPendingFlush();
+}
+
+void FFmpegVideoDecodeEngine::TryToFinishPendingFlush() {
+ DCHECK(flush_pending_);
+
+ // We consider ourself flushed when there is no pending input buffers
+ // and output buffers, which implies that all buffers had been returned
+ // to its owner.
+ if (!pending_input_buffers_ && !pending_output_buffers_) {
+ // Try to finish flushing and notify pipeline.
+ flush_pending_ = false;
+ event_handler_->OnFlushComplete();
+ }
+}
+
+void FFmpegVideoDecodeEngine::Seek() {
+ // After a seek, output stream no longer considered as EOS.
+ output_eos_reached_ = false;
+
+ // The buffer provider is assumed to perform pre-roll operation.
+ for (unsigned int i = 0; i < Limits::kMaxVideoFrames; ++i)
+ ReadInput();
+
+ event_handler_->OnSeekComplete();
+}
+
+void FFmpegVideoDecodeEngine::ReadInput() {
+ DCHECK_EQ(output_eos_reached_, false);
+ pending_input_buffers_++;
+ event_handler_->OnEmptyBufferCallback(NULL);
+}
+
+VideoFrame::Format FFmpegVideoDecodeEngine::GetSurfaceFormat() const {
+ // J (Motion JPEG) versions of YUV are full range 0..255.
+ // Regular (MPEG) YUV is 16..240.
+ // For now we will ignore the distinction and treat them the same.
+ switch (codec_context_->pix_fmt) {
+ case PIX_FMT_YUV420P:
+ case PIX_FMT_YUVJ420P:
+ return VideoFrame::YV12;
+ break;
+ case PIX_FMT_YUV422P:
+ case PIX_FMT_YUVJ422P:
+ return VideoFrame::YV16;
+ break;
+ default:
+ // TODO(scherkus): More formats here?
+ return VideoFrame::INVALID;
+ }
+}
+
+} // namespace media
diff --git a/media/filters/ffmpeg_video_decode_engine.h b/media/filters/ffmpeg_video_decode_engine.h
new file mode 100644
index 0000000..fbb465a
--- /dev/null
+++ b/media/filters/ffmpeg_video_decode_engine.h
@@ -0,0 +1,85 @@
+// Copyright (c) 2010 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_FILTERS_FFMPEG_VIDEO_DECODE_ENGINE_H_
+#define MEDIA_FILTERS_FFMPEG_VIDEO_DECODE_ENGINE_H_
+
+#include <deque>
+
+#include "base/scoped_ptr.h"
+#include "media/ffmpeg/ffmpeg_common.h"
+#include "media/filters/video_decode_engine.h"
+
+// FFmpeg types.
+struct AVCodecContext;
+struct AVFrame;
+struct AVStream;
+
+namespace media {
+
+class FFmpegVideoAllocator;
+
+class FFmpegVideoDecodeEngine : public VideoDecodeEngine {
+ public:
+ FFmpegVideoDecodeEngine();
+ virtual ~FFmpegVideoDecodeEngine();
+
+ // Implementation of the VideoDecodeEngine Interface.
+ virtual void Initialize(MessageLoop* message_loop,
+ VideoDecodeEngine::EventHandler* event_handler,
+ const VideoCodecConfig& config);
+ virtual void EmptyThisBuffer(scoped_refptr<Buffer> buffer);
+ virtual void FillThisBuffer(scoped_refptr<VideoFrame> frame);
+ virtual void Uninitialize();
+ virtual void Flush();
+ virtual void Seek();
+
+ virtual AVCodecContext* codec_context() const { return codec_context_; }
+
+ virtual void SetCodecContextForTest(AVCodecContext* context) {
+ codec_context_ = context;
+ }
+
+ VideoFrame::Format GetSurfaceFormat() const;
+ private:
+ void DecodeFrame(scoped_refptr<Buffer> buffer);
+ void ReadInput();
+ void TryToFinishPendingFlush();
+
+ AVCodecContext* codec_context_;
+ AVStream* av_stream_;
+ scoped_ptr_malloc<AVFrame, ScopedPtrAVFree> av_frame_;
+ VideoDecodeEngine::EventHandler* event_handler_;
+
+ // Whether direct rendering is used.
+ bool direct_rendering_;
+
+ // Used when direct rendering is used to recycle output buffers.
+ scoped_ptr<FFmpegVideoAllocator> allocator_;
+
+ // Indicate how many buffers are pending on input port of this filter:
+ // Increment when engine receive one input packet from demuxer;
+ // Decrement when engine send one input packet to demuxer;
+ int pending_input_buffers_;
+
+ // Indicate how many buffers are pending on output port of this filter:
+ // Increment when engine receive one output frame from renderer;
+ // Decrement when engine send one output frame to renderer;
+ int pending_output_buffers_;
+
+ // Whether end of stream had been reached at output side.
+ bool output_eos_reached_;
+
+ // Used when direct rendering is disabled to hold available output buffers.
+ std::deque<scoped_refptr<VideoFrame> > frame_queue_available_;
+
+ // Whether flush operation is pending.
+ bool flush_pending_;
+
+ DISALLOW_COPY_AND_ASSIGN(FFmpegVideoDecodeEngine);
+};
+
+} // namespace media
+
+#endif // MEDIA_FILTERS_FFMPEG_VIDEO_DECODE_ENGINE_H_
diff --git a/media/filters/ffmpeg_video_decode_engine_unittest.cc b/media/filters/ffmpeg_video_decode_engine_unittest.cc
new file mode 100644
index 0000000..c8e8d06
--- /dev/null
+++ b/media/filters/ffmpeg_video_decode_engine_unittest.cc
@@ -0,0 +1,293 @@
+// Copyright (c) 2010 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "base/scoped_ptr.h"
+#include "media/base/data_buffer.h"
+#include "media/base/mock_ffmpeg.h"
+#include "media/base/mock_task.h"
+#include "media/filters/ffmpeg_video_decode_engine.h"
+#include "media/filters/ffmpeg_video_decoder.h"
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+using ::testing::_;
+using ::testing::DoAll;
+using ::testing::Return;
+using ::testing::ReturnNull;
+using ::testing::SetArgumentPointee;
+using ::testing::StrictMock;
+
+namespace media {
+
+static const int kWidth = 320;
+static const int kHeight = 240;
+static const AVRational kTimeBase = { 1, 100 };
+
+ACTION_P(SaveInitializeResult, engine) {
+ engine->info_ = arg0;
+}
+
+class FFmpegVideoDecodeEngineTest : public testing::Test,
+ public VideoDecodeEngine::EventHandler {
+ protected:
+ FFmpegVideoDecodeEngineTest() {
+ // Setup FFmpeg structures.
+ frame_buffer_.reset(new uint8[kWidth * kHeight]);
+ memset(&yuv_frame_, 0, sizeof(yuv_frame_));
+
+ // DecodeFrame will check these pointers as non-NULL value.
+ yuv_frame_.data[0] = yuv_frame_.data[1] = yuv_frame_.data[2]
+ = frame_buffer_.get();
+ yuv_frame_.linesize[0] = kWidth;
+ yuv_frame_.linesize[1] = yuv_frame_.linesize[2] = kWidth >> 1;
+
+ memset(&codec_context_, 0, sizeof(codec_context_));
+ codec_context_.width = kWidth;
+ codec_context_.height = kHeight;
+ codec_context_.time_base = kTimeBase;
+
+ memset(&codec_, 0, sizeof(codec_));
+ memset(&stream_, 0, sizeof(stream_));
+ stream_.codec = &codec_context_;
+ stream_.r_frame_rate.num = kTimeBase.den;
+ stream_.r_frame_rate.den = kTimeBase.num;
+
+ buffer_ = new DataBuffer(1);
+
+ // Initialize MockFFmpeg.
+ MockFFmpeg::set(&mock_ffmpeg_);
+
+ test_engine_ = new FFmpegVideoDecodeEngine();
+ test_engine_->SetCodecContextForTest(&codec_context_);
+
+ VideoFrame::CreateFrame(VideoFrame::YV12,
+ kWidth,
+ kHeight,
+ StreamSample::kInvalidTimestamp,
+ StreamSample::kInvalidTimestamp,
+ &video_frame_);
+ }
+
+ ~FFmpegVideoDecodeEngineTest() {
+ test_engine_ = NULL;
+ MockFFmpeg::set(NULL);
+ }
+
+ void Initialize() {
+ EXPECT_CALL(*MockFFmpeg::get(), AVCodecFindDecoder(CODEC_ID_NONE))
+ .WillOnce(Return(&codec_));
+ EXPECT_CALL(*MockFFmpeg::get(), AVCodecAllocFrame())
+ .WillOnce(Return(&yuv_frame_));
+ EXPECT_CALL(*MockFFmpeg::get(), AVCodecThreadInit(&codec_context_, 2))
+ .WillOnce(Return(0));
+ EXPECT_CALL(*MockFFmpeg::get(), AVCodecOpen(&codec_context_, &codec_))
+ .WillOnce(Return(0));
+ EXPECT_CALL(*MockFFmpeg::get(), AVFree(&yuv_frame_))
+ .Times(1);
+
+ config_.codec_ = kCodecH264;
+ config_.opaque_context_ = &stream_;
+ config_.width_ = kWidth;
+ config_.height_ = kHeight;
+ EXPECT_CALL(*this, OnInitializeComplete(_))
+ .WillOnce(SaveInitializeResult(this));
+ test_engine_->Initialize(MessageLoop::current(), this, config_);
+ EXPECT_TRUE(info_.success_);
+ }
+
+ public:
+ MOCK_METHOD1(OnFillBufferCallback,
+ void(scoped_refptr<VideoFrame> video_frame));
+ MOCK_METHOD1(OnEmptyBufferCallback,
+ void(scoped_refptr<Buffer> buffer));
+ MOCK_METHOD1(OnInitializeComplete,
+ void(const VideoCodecInfo& info));
+ MOCK_METHOD0(OnUninitializeComplete, void());
+ MOCK_METHOD0(OnFlushComplete, void());
+ MOCK_METHOD0(OnSeekComplete, void());
+ MOCK_METHOD0(OnError, void());
+ MOCK_METHOD1(OnFormatChange, void(VideoStreamInfo stream_info));
+
+ scoped_refptr<VideoFrame> video_frame_;
+ VideoCodecConfig config_;
+ VideoCodecInfo info_;
+ protected:
+ scoped_refptr<FFmpegVideoDecodeEngine> test_engine_;
+ scoped_array<uint8_t> frame_buffer_;
+ StrictMock<MockFFmpeg> mock_ffmpeg_;
+
+ AVFrame yuv_frame_;
+ AVCodecContext codec_context_;
+ AVStream stream_;
+ AVCodec codec_;
+ scoped_refptr<DataBuffer> buffer_;
+
+};
+
+TEST_F(FFmpegVideoDecodeEngineTest, Initialize_Normal) {
+ Initialize();
+}
+
+TEST_F(FFmpegVideoDecodeEngineTest, Initialize_FindDecoderFails) {
+ // Test avcodec_find_decoder() returning NULL.
+ EXPECT_CALL(*MockFFmpeg::get(), AVCodecFindDecoder(CODEC_ID_NONE))
+ .WillOnce(ReturnNull());
+ EXPECT_CALL(*MockFFmpeg::get(), AVCodecAllocFrame())
+ .WillOnce(Return(&yuv_frame_));
+ EXPECT_CALL(*MockFFmpeg::get(), AVFree(&yuv_frame_))
+ .Times(1);
+
+ config_.codec_ = kCodecH264;
+ config_.opaque_context_ = &stream_;
+ config_.width_ = kWidth;
+ config_.height_ = kHeight;
+ EXPECT_CALL(*this, OnInitializeComplete(_))
+ .WillOnce(SaveInitializeResult(this));
+ test_engine_->Initialize(MessageLoop::current(), this, config_);
+ EXPECT_FALSE(info_.success_);
+}
+
+// Note There are 2 threads for FFmpeg-mt.
+TEST_F(FFmpegVideoDecodeEngineTest, Initialize_InitThreadFails) {
+ // Test avcodec_thread_init() failing.
+ EXPECT_CALL(*MockFFmpeg::get(), AVCodecFindDecoder(CODEC_ID_NONE))
+ .WillOnce(Return(&codec_));
+ EXPECT_CALL(*MockFFmpeg::get(), AVCodecAllocFrame())
+ .WillOnce(Return(&yuv_frame_));
+ EXPECT_CALL(*MockFFmpeg::get(), AVCodecThreadInit(&codec_context_, 2))
+ .WillOnce(Return(-1));
+ EXPECT_CALL(*MockFFmpeg::get(), AVFree(&yuv_frame_))
+ .Times(1);
+
+ config_.codec_ = kCodecH264;
+ config_.opaque_context_ = &stream_;
+ config_.width_ = kWidth;
+ config_.height_ = kHeight;
+ EXPECT_CALL(*this, OnInitializeComplete(_))
+ .WillOnce(SaveInitializeResult(this));
+ test_engine_->Initialize(MessageLoop::current(), this, config_);
+ EXPECT_FALSE(info_.success_);
+}
+
+TEST_F(FFmpegVideoDecodeEngineTest, Initialize_OpenDecoderFails) {
+ // Test avcodec_open() failing.
+ EXPECT_CALL(*MockFFmpeg::get(), AVCodecFindDecoder(CODEC_ID_NONE))
+ .WillOnce(Return(&codec_));
+ EXPECT_CALL(*MockFFmpeg::get(), AVCodecAllocFrame())
+ .WillOnce(Return(&yuv_frame_));
+ EXPECT_CALL(*MockFFmpeg::get(), AVCodecThreadInit(&codec_context_, 2))
+ .WillOnce(Return(0));
+ EXPECT_CALL(*MockFFmpeg::get(), AVCodecOpen(&codec_context_, &codec_))
+ .WillOnce(Return(-1));
+ EXPECT_CALL(*MockFFmpeg::get(), AVFree(&yuv_frame_))
+ .Times(1);
+
+ config_.codec_ = kCodecH264;
+ config_.opaque_context_ = &stream_;
+ config_.width_ = kWidth;
+ config_.height_ = kHeight;
+ EXPECT_CALL(*this, OnInitializeComplete(_))
+ .WillOnce(SaveInitializeResult(this));
+ test_engine_->Initialize(MessageLoop::current(), this, config_);
+ EXPECT_FALSE(info_.success_);
+}
+
+ACTION_P2(DemuxComplete, engine, buffer) {
+ engine->EmptyThisBuffer(buffer);
+}
+
+ACTION_P(DecodeComplete, decoder) {
+ decoder->video_frame_ = arg0;
+}
+
+TEST_F(FFmpegVideoDecodeEngineTest, DecodeFrame_Normal) {
+ Initialize();
+
+ // We rely on FFmpeg for timestamp and duration reporting. The one tricky
+ // bit is calculating the duration when |repeat_pict| > 0.
+ const base::TimeDelta kTimestamp = base::TimeDelta::FromMicroseconds(123);
+ const base::TimeDelta kDuration = base::TimeDelta::FromMicroseconds(15000);
+ yuv_frame_.repeat_pict = 1;
+ yuv_frame_.reordered_opaque = kTimestamp.InMicroseconds();
+
+ // Expect a bunch of avcodec calls.
+ EXPECT_CALL(mock_ffmpeg_, AVInitPacket(_));
+ EXPECT_CALL(mock_ffmpeg_,
+ AVCodecDecodeVideo2(&codec_context_, &yuv_frame_, _, _))
+ .WillOnce(DoAll(SetArgumentPointee<2>(1), // Simulate 1 byte frame.
+ Return(0)));
+
+ EXPECT_CALL(*this, OnEmptyBufferCallback(_))
+ .WillOnce(DemuxComplete(test_engine_.get(), buffer_));
+ EXPECT_CALL(*this, OnFillBufferCallback(_))
+ .WillOnce(DecodeComplete(this));
+ test_engine_->FillThisBuffer(video_frame_);
+
+ // |video_frame_| timestamp is 0 because we set the timestamp based off
+ // the buffer timestamp.
+ EXPECT_EQ(0, video_frame_->GetTimestamp().ToInternalValue());
+ EXPECT_EQ(kDuration.ToInternalValue(),
+ video_frame_->GetDuration().ToInternalValue());
+}
+
+TEST_F(FFmpegVideoDecodeEngineTest, DecodeFrame_0ByteFrame) {
+ Initialize();
+
+ // Expect a bunch of avcodec calls.
+ EXPECT_CALL(mock_ffmpeg_, AVInitPacket(_))
+ .Times(2);
+ EXPECT_CALL(mock_ffmpeg_,
+ AVCodecDecodeVideo2(&codec_context_, &yuv_frame_, _, _))
+ .WillOnce(DoAll(SetArgumentPointee<2>(0), // Simulate 0 byte frame.
+ Return(0)))
+ .WillOnce(DoAll(SetArgumentPointee<2>(1), // Simulate 1 byte frame.
+ Return(0)));
+
+ EXPECT_CALL(*this, OnEmptyBufferCallback(_))
+ .WillOnce(DemuxComplete(test_engine_.get(), buffer_))
+ .WillOnce(DemuxComplete(test_engine_.get(), buffer_));
+ EXPECT_CALL(*this, OnFillBufferCallback(_))
+ .WillOnce(DecodeComplete(this));
+ test_engine_->FillThisBuffer(video_frame_);
+
+ EXPECT_TRUE(video_frame_.get());
+}
+
+TEST_F(FFmpegVideoDecodeEngineTest, DecodeFrame_DecodeError) {
+ Initialize();
+
+ // Expect a bunch of avcodec calls.
+ EXPECT_CALL(mock_ffmpeg_, AVInitPacket(_));
+ EXPECT_CALL(mock_ffmpeg_,
+ AVCodecDecodeVideo2(&codec_context_, &yuv_frame_, _, _))
+ .WillOnce(Return(-1));
+
+ EXPECT_CALL(*this, OnEmptyBufferCallback(_))
+ .WillOnce(DemuxComplete(test_engine_.get(), buffer_));
+ EXPECT_CALL(*this, OnFillBufferCallback(_))
+ .WillOnce(DecodeComplete(this));
+ test_engine_->FillThisBuffer(video_frame_);
+
+ EXPECT_FALSE(video_frame_.get());
+}
+
+TEST_F(FFmpegVideoDecodeEngineTest, GetSurfaceFormat) {
+ // YV12 formats.
+ codec_context_.pix_fmt = PIX_FMT_YUV420P;
+ EXPECT_EQ(VideoFrame::YV12, test_engine_->GetSurfaceFormat());
+ codec_context_.pix_fmt = PIX_FMT_YUVJ420P;
+ EXPECT_EQ(VideoFrame::YV12, test_engine_->GetSurfaceFormat());
+
+ // YV16 formats.
+ codec_context_.pix_fmt = PIX_FMT_YUV422P;
+ EXPECT_EQ(VideoFrame::YV16, test_engine_->GetSurfaceFormat());
+ codec_context_.pix_fmt = PIX_FMT_YUVJ422P;
+ EXPECT_EQ(VideoFrame::YV16, test_engine_->GetSurfaceFormat());
+
+ // Invalid value.
+ codec_context_.pix_fmt = PIX_FMT_NONE;
+ EXPECT_EQ(VideoFrame::INVALID, test_engine_->GetSurfaceFormat());
+}
+
+} // namespace media
diff --git a/media/filters/ffmpeg_video_decoder.cc b/media/filters/ffmpeg_video_decoder.cc
index eb57493..46e4036 100644
--- a/media/filters/ffmpeg_video_decoder.cc
+++ b/media/filters/ffmpeg_video_decoder.cc
@@ -16,8 +16,8 @@
#include "media/ffmpeg/ffmpeg_common.h"
#include "media/ffmpeg/ffmpeg_util.h"
#include "media/filters/ffmpeg_interfaces.h"
-#include "media/video/ffmpeg_video_decode_engine.h"
-#include "media/video/video_decode_engine.h"
+#include "media/filters/ffmpeg_video_decode_engine.h"
+#include "media/filters/video_decode_engine.h"
namespace media {
diff --git a/media/filters/ffmpeg_video_decoder.h b/media/filters/ffmpeg_video_decoder.h
index e898266..972010b 100644
--- a/media/filters/ffmpeg_video_decoder.h
+++ b/media/filters/ffmpeg_video_decoder.h
@@ -14,7 +14,7 @@
#include "media/base/pts_heap.h"
#include "media/base/video_frame.h"
#include "media/filters/decoder_base.h"
-#include "media/video/video_decode_engine.h"
+#include "media/filters/video_decode_engine.h"
// FFmpeg types.
struct AVRational;
diff --git a/media/filters/ffmpeg_video_decoder_unittest.cc b/media/filters/ffmpeg_video_decoder_unittest.cc
index b1bd481..90a538f 100644
--- a/media/filters/ffmpeg_video_decoder_unittest.cc
+++ b/media/filters/ffmpeg_video_decoder_unittest.cc
@@ -17,7 +17,7 @@
#include "media/ffmpeg/ffmpeg_common.h"
#include "media/filters/ffmpeg_interfaces.h"
#include "media/filters/ffmpeg_video_decoder.h"
-#include "media/video/video_decode_engine.h"
+#include "media/filters/video_decode_engine.h"
#include "testing/gtest/include/gtest/gtest.h"
using ::testing::_;
diff --git a/media/filters/omx_video_decode_engine.cc b/media/filters/omx_video_decode_engine.cc
new file mode 100644
index 0000000..358e005
--- /dev/null
+++ b/media/filters/omx_video_decode_engine.cc
@@ -0,0 +1,1339 @@
+// Copyright (c) 2010 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This class interacts with OmxCodec and the VideoDecoderImpl
+// in the media pipeline.
+//
+// THREADING SEMANTICS
+//
+// This class is created by VideoDecoderImpl and lives on the thread
+// that VideoDecoderImpl lives. This class is given the message loop
+// for the above thread. The OMX callbacks are guaranteed to be
+// executed on the hosting message loop. This essentially means that
+// all methods in this class are executed on the same thread as
+// VideoDecoderImpl. Because of that there's no need for locking anywhere.
+
+#include "media/filters/omx_video_decode_engine.h"
+
+#include "base/logging.h"
+#include "base/message_loop.h"
+#include "base/string_util.h"
+#include "media/base/buffers.h"
+
+namespace media {
+
+OmxVideoDecodeEngine::OmxVideoDecodeEngine()
+ : width_(16),
+ height_(16),
+ message_loop_(NULL),
+ input_buffer_count_(0),
+ input_buffer_size_(0),
+ input_port_(0),
+ input_buffers_at_component_(0),
+ input_pending_request_(0),
+ input_queue_has_eos_(false),
+ input_has_fed_eos_(false),
+ input_port_flushed_(false),
+ output_buffer_count_(0),
+ output_buffer_size_(0),
+ output_port_(0),
+ output_buffers_at_component_(0),
+ output_pending_request_(0),
+ output_eos_(false),
+ output_port_flushed_(false),
+ il_state_(kIlNone),
+ expected_il_state_(kIlNone),
+ client_state_(kClientNotInitialized),
+ component_handle_(NULL),
+ need_free_input_buffers_(false),
+ need_free_output_buffers_(false),
+ flush_pending_(false),
+ output_frames_allocated_(false),
+ need_setup_output_port_(false) {
+ // TODO(wjia): change uses_egl_image_ to runtime setup
+#if ENABLE_EGLIMAGE == 1
+ uses_egl_image_ = true;
+ DLOG(INFO) << "Uses egl image for output";
+#else
+ uses_egl_image_ = false;
+ DLOG(INFO) << "Uses system memory for output";
+#endif
+}
+
+OmxVideoDecodeEngine::~OmxVideoDecodeEngine() {
+ DCHECK(client_state_ == kClientNotInitialized ||
+ client_state_ == kClientStopped);
+ DCHECK_EQ(il_state_, kIlNone);
+ DCHECK_EQ(0u, input_buffers_.size());
+ DCHECK(free_input_buffers_.empty());
+ DCHECK(available_input_buffers_.empty());
+ DCHECK_EQ(0, input_buffers_at_component_);
+ DCHECK_EQ(0, output_buffers_at_component_);
+ DCHECK(output_frames_.empty());
+}
+
+template <typename T>
+static void ResetParamHeader(const OmxVideoDecodeEngine& dec, T* param) {
+ memset(param, 0, sizeof(T));
+ param->nVersion.nVersion = dec.current_omx_spec_version();
+ param->nSize = sizeof(T);
+}
+
+void OmxVideoDecodeEngine::Initialize(
+ MessageLoop* message_loop,
+ VideoDecodeEngine::EventHandler* event_handler,
+ const VideoCodecConfig& config) {
+ DCHECK_EQ(message_loop, MessageLoop::current());
+
+ message_loop_ = message_loop;
+ event_handler_ = event_handler;
+
+ width_ = config.width_;
+ height_ = config.height_;
+
+ // TODO(wjia): Find the right way to determine the codec type.
+ OmxConfigurator::MediaFormat input_format, output_format;
+ memset(&input_format, 0, sizeof(input_format));
+ memset(&output_format, 0, sizeof(output_format));
+ input_format.codec = OmxConfigurator::kCodecH264;
+ output_format.codec = OmxConfigurator::kCodecRaw;
+ configurator_.reset(
+ new OmxDecoderConfigurator(input_format, output_format));
+
+ // TODO(jiesun): We already ensure Initialize() is called in thread context,
+ // We should try to merge the following function into this function.
+ client_state_ = kClientInitializing;
+ InitializeTask();
+
+ VideoCodecInfo info;
+ // TODO(jiesun): ridiculous, we never fail initialization?
+ info.success_ = true;
+ info.provides_buffers_ = !uses_egl_image_;
+ info.stream_info_.surface_type_ =
+ uses_egl_image_ ? VideoFrame::TYPE_EGL_IMAGE
+ : VideoFrame::TYPE_SYSTEM_MEMORY;
+ info.stream_info_.surface_format_ = GetSurfaceFormat();
+ info.stream_info_.surface_width_ = config.width_;
+ info.stream_info_.surface_height_ = config.height_;
+ event_handler_->OnInitializeComplete(info);
+}
+
+// This method handles only input buffer, without coupling with output
+void OmxVideoDecodeEngine::EmptyThisBuffer(scoped_refptr<Buffer> buffer) {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+ DCHECK(!free_input_buffers_.empty());
+ DCHECK_GT(input_pending_request_, 0);
+
+ --input_pending_request_;
+
+ if (!CanAcceptInput()) {
+ FinishEmptyBuffer(buffer);
+ return;
+ }
+
+ if (buffer->IsEndOfStream()) {
+ DLOG(INFO) << "Input queue has EOS";
+ input_queue_has_eos_ = true;
+ }
+
+ OMX_BUFFERHEADERTYPE* omx_buffer = free_input_buffers_.front();
+ free_input_buffers_.pop();
+
+ // setup |omx_buffer|.
+ omx_buffer->pBuffer = const_cast<OMX_U8*>(buffer->GetData());
+ omx_buffer->nFilledLen = buffer->GetDataSize();
+ omx_buffer->nAllocLen = omx_buffer->nFilledLen;
+ if (input_queue_has_eos_)
+ omx_buffer->nFlags |= OMX_BUFFERFLAG_EOS;
+ else
+ omx_buffer->nFlags &= ~OMX_BUFFERFLAG_EOS;
+ omx_buffer->nTimeStamp = buffer->GetTimestamp().InMicroseconds();
+ omx_buffer->pAppPrivate = buffer.get();
+ buffer->AddRef();
+ available_input_buffers_.push(omx_buffer);
+
+ // Try to feed buffers into the decoder.
+ EmptyBufferTask();
+
+ if (flush_pending_ && input_pending_request_ == 0)
+ StartFlush();
+}
+
+void OmxVideoDecodeEngine::Flush() {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+ DCHECK_EQ(il_state_, kIlExecuting);
+
+ if (il_state_ != kIlExecuting) {
+ event_handler_->OnFlushComplete();
+ return;
+ }
+
+ client_state_ = kClientFlushing;
+ expected_il_state_ = kIlPause;
+ OnStateSetEventFunc = &OmxVideoDecodeEngine::PauseFromExecuting;
+ TransitionToState(OMX_StatePause);
+}
+
+void OmxVideoDecodeEngine::PauseFromExecuting(OMX_STATETYPE state) {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+
+ OnStateSetEventFunc = NULL;
+ il_state_ = kIlPause;
+
+ if (input_pending_request_ == 0)
+ StartFlush();
+ else
+ flush_pending_ = true;
+}
+
+void OmxVideoDecodeEngine::StartFlush() {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+ DCHECK_EQ(input_pending_request_, 0);
+ DLOG(INFO) << "StartFlush";
+
+ while (!available_input_buffers_.empty())
+ available_input_buffers_.pop();
+
+ flush_pending_ = false;
+
+ // Flush input port first.
+ OnFlushEventFunc = &OmxVideoDecodeEngine::PortFlushDone;
+ OMX_ERRORTYPE omxresult;
+ omxresult = OMX_SendCommand(component_handle_,
+ OMX_CommandFlush,
+ input_port_, 0);
+}
+
+bool OmxVideoDecodeEngine::InputPortFlushed() {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+ DCHECK_EQ(client_state_, kClientFlushing);
+ // Port flushed is defined by OpenMAX component had signal flush done and
+ // We had all buffers returned from demuxer and OpenMAX component.
+ int free_input_size = static_cast<int>(free_input_buffers_.size());
+ return input_port_flushed_ && free_input_size == input_buffer_count_;
+}
+
+bool OmxVideoDecodeEngine::OutputPortFlushed() {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+ DCHECK_EQ(client_state_, kClientFlushing);
+ // Port flushed is defined by OpenMAX component had signal flush done and
+ // We had all buffers returned from renderer and OpenMAX component.
+ return output_port_flushed_ && output_pending_request_ == 0;
+}
+
+void OmxVideoDecodeEngine::ComponentFlushDone() {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+ DLOG(INFO) << "Component had been flushed!";
+
+ if (input_port_flushed_ && output_port_flushed_) {
+ event_handler_->OnFlushComplete();
+ input_port_flushed_ = false;
+ output_port_flushed_ = false;
+ }
+}
+
+void OmxVideoDecodeEngine::PortFlushDone(int port) {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+ DCHECK_NE(port, static_cast<int>(OMX_ALL));
+
+ if (port == input_port_) {
+ DLOG(INFO) << "Input Port had been flushed";
+ DCHECK_EQ(input_buffers_at_component_, 0);
+ input_port_flushed_ = true;
+ // Flush output port next.
+ OMX_ERRORTYPE omxresult;
+ omxresult = OMX_SendCommand(component_handle_,
+ OMX_CommandFlush,
+ output_port_, 0);
+ return;
+ }
+
+ if (port == output_port_) {
+ DLOG(INFO) << "Output Port had been flushed";
+ DCHECK_EQ(output_buffers_at_component_, 0);
+
+ output_port_flushed_ = true;
+ }
+
+ if (kClientFlushing == client_state_ &&
+ InputPortFlushed() && OutputPortFlushed())
+ ComponentFlushDone();
+}
+
+void OmxVideoDecodeEngine::Seek() {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+
+ DCHECK(client_state_ == kClientFlushing || // After a flush
+ client_state_ == kClientInitializing); // After an initialize.
+
+ if (client_state_ == kClientFlushing) {
+ InitialReadBuffer();
+ OnStateSetEventFunc = &OmxVideoDecodeEngine::DoneSetStateExecuting;
+ TransitionToState(OMX_StateExecuting);
+ }
+
+ event_handler_->OnSeekComplete();
+}
+
+VideoFrame::Format OmxVideoDecodeEngine::GetSurfaceFormat() const {
+ // TODO(jiesun): Both OmxHeaderType and EGLImage surface type could have
+ // different surface formats.
+ return uses_egl_image_ ? VideoFrame::RGBA : VideoFrame::YV12;
+}
+
+void OmxVideoDecodeEngine::Uninitialize() {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+
+ if (client_state_ == kClientError) {
+ OnStopDone();
+ return;
+ }
+
+ // TODO(wjia): add more state checking
+ if (kClientRunning == client_state_ || kClientFlushing == client_state_) {
+ client_state_ = kClientStopping;
+ DeinitFromExecuting(OMX_StateExecuting);
+ }
+
+ // TODO(wjia): When FillThisBuffer() is added, engine state should be
+ // kStopping here. engine state should be set to kStopped in OnStopDone();
+ // client_state_ = kClientStopping;
+}
+
+void OmxVideoDecodeEngine::FinishEmptyBuffer(scoped_refptr<Buffer> buffer) {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+
+ if (!input_queue_has_eos_) {
+ event_handler_->OnEmptyBufferCallback(buffer);
+ ++input_pending_request_;
+ }
+}
+
+void OmxVideoDecodeEngine::FinishFillBuffer(OMX_BUFFERHEADERTYPE* buffer) {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+ DCHECK(buffer);
+
+ scoped_refptr<VideoFrame> frame;
+ frame = static_cast<VideoFrame*>(buffer->pAppPrivate);
+
+ // We should not flush buffer to renderer during decoder flushing if decoder
+ // provides the buffer allocator.
+ if (kClientFlushing == client_state_ && !uses_egl_image_) return;
+
+ frame->SetTimestamp(base::TimeDelta::FromMicroseconds(buffer->nTimeStamp));
+ frame->SetDuration(frame->GetTimestamp() - last_pts_);
+ last_pts_ = frame->GetTimestamp();
+ event_handler_->OnFillBufferCallback(frame);
+ output_pending_request_--;
+}
+
+void OmxVideoDecodeEngine::OnStopDone() {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+
+ event_handler_->OnUninitializeComplete();
+}
+
+// Function sequence for initializing
+void OmxVideoDecodeEngine::InitializeTask() {
+ DCHECK_EQ(il_state_, kIlNone);
+
+ il_state_ = kIlNone;
+ expected_il_state_ = kIlLoaded;
+ output_port_state_ = kPortEnabled;
+ if (!CreateComponent()) {
+ StopOnError();
+ return;
+ }
+ il_state_ = kIlLoaded;
+
+ // TODO(wjia): Disabling output port is to work around racing condition
+ // due to bug in some vendor's driver. But it hits another bug.
+ // So temporarily fall back to enabling output port. Still keep the code
+ // disabling output port here.
+ // No need to respond to this PortDisable event
+ // OnPortDisableEventFunc = NULL;
+ // ChangePort(OMX_CommandPortDisable, output_port_);
+ // if (kClientError == client_state_) {
+ // StopOnError();
+ // return;
+ // }
+ // output_port_state_ = kPortDisabled;
+
+ // Transition component to Idle state
+ OnStateSetEventFunc = &OmxVideoDecodeEngine::DoneSetStateIdle;
+ if (!TransitionToState(OMX_StateIdle)) {
+ StopOnError();
+ return;
+ }
+ expected_il_state_ = kIlIdle;
+
+ if (!AllocateInputBuffers()) {
+ LOG(ERROR) << "OMX_AllocateBuffer() Input buffer error";
+ client_state_ = kClientError;
+ StopOnError();
+ return;
+ }
+ if (!AllocateOutputBuffers()) {
+ LOG(ERROR) << "OMX_AllocateBuffer() Output buffer error";
+ client_state_ = kClientError;
+ return;
+ }
+}
+
+// Sequence of actions in this transition:
+//
+// 1. Initialize OMX (To be removed.)
+// 2. Map role name to component name.
+// 3. Get handle of the OMX component
+// 4. Get the port information.
+// 5. Set role for the component.
+// 6. Input/output ports media format configuration.
+// 7. Obtain the information about the input port.
+// 8. Obtain the information about the output port.
+bool OmxVideoDecodeEngine::CreateComponent() {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+
+ static OMX_CALLBACKTYPE callback = {
+ &OmxVideoDecodeEngine::EventHandler,
+ &OmxVideoDecodeEngine::EmptyBufferCallback,
+ &OmxVideoDecodeEngine::FillBufferCallback
+ };
+
+ // 1. Initialize the OpenMAX Core.
+ // TODO(hclam): move this out.
+ OMX_ERRORTYPE omxresult = OMX_Init();
+ if (omxresult != OMX_ErrorNone) {
+ LOG(ERROR) << "Failed to init OpenMAX core";
+ client_state_ = kClientError;
+ return false;
+ }
+
+ // 2. Map role name to component name.
+ std::string role_name = configurator_->GetRoleName();
+ OMX_U32 roles = 0;
+ omxresult = OMX_GetComponentsOfRole(
+ const_cast<OMX_STRING>(role_name.c_str()),
+ &roles, 0);
+ if (omxresult != OMX_ErrorNone || roles == 0) {
+ LOG(ERROR) << "Unsupported Role: " << role_name.c_str();
+ client_state_ = kClientError;
+ return false;
+ }
+ const OMX_U32 kMaxRolePerComponent = 20;
+ CHECK(roles < kMaxRolePerComponent);
+
+ OMX_U8** component_names = new OMX_U8*[roles];
+ const int kMaxComponentNameLength = 256;
+ for (size_t i = 0; i < roles; ++i)
+ component_names[i] = new OMX_U8[kMaxComponentNameLength];
+
+ omxresult = OMX_GetComponentsOfRole(
+ const_cast<OMX_STRING>(role_name.c_str()),
+ &roles, component_names);
+
+ // Use first component only. Copy the name of the first component
+ // so that we could free the memory.
+ std::string component_name;
+ if (omxresult == OMX_ErrorNone)
+ component_name = reinterpret_cast<char*>(component_names[0]);
+
+ for (size_t i = 0; i < roles; ++i)
+ delete [] component_names[i];
+ delete [] component_names;
+
+ if (omxresult != OMX_ErrorNone || roles == 0) {
+ LOG(ERROR) << "Unsupported Role: " << role_name.c_str();
+ client_state_ = kClientError;
+ return false;
+ }
+
+ // 3. Get the handle to the component. After OMX_GetHandle(),
+ // the component is in loaded state.
+ OMX_STRING component = const_cast<OMX_STRING>(component_name.c_str());
+ omxresult = OMX_GetHandle(&component_handle_, component, this, &callback);
+ if (omxresult != OMX_ErrorNone) {
+ LOG(ERROR) << "Failed to Load the component: " << component;
+ client_state_ = kClientError;
+ return false;
+ }
+
+ // 4. Get the port information. This will obtain information about the
+ // number of ports and index of the first port.
+ OMX_PORT_PARAM_TYPE port_param;
+ ResetParamHeader(*this, &port_param);
+ omxresult = OMX_GetParameter(component_handle_, OMX_IndexParamVideoInit,
+ &port_param);
+ if (omxresult != OMX_ErrorNone) {
+ LOG(ERROR) << "Failed to get Port Param";
+ client_state_ = kClientError;
+ return false;
+ }
+ input_port_ = port_param.nStartPortNumber;
+ output_port_ = input_port_ + 1;
+
+ // 5. Set role for the component because our component could
+ // have multiple roles.
+ OMX_PARAM_COMPONENTROLETYPE role_type;
+ ResetParamHeader(*this, &role_type);
+ base::strlcpy(reinterpret_cast<char*>(role_type.cRole),
+ role_name.c_str(),
+ OMX_MAX_STRINGNAME_SIZE);
+ role_type.cRole[OMX_MAX_STRINGNAME_SIZE - 1] = '\0';
+ omxresult = OMX_SetParameter(component_handle_,
+ OMX_IndexParamStandardComponentRole,
+ &role_type);
+ if (omxresult != OMX_ErrorNone) {
+ LOG(ERROR) << "Failed to Set Role";
+ client_state_ = kClientError;
+ return false;
+ }
+
+ // 6. Input/output ports media format configuration.
+ if (!ConfigureIOPorts()) {
+ LOG(ERROR) << "Media format configurations failed";
+ client_state_ = kClientError;
+ return false;
+ }
+
+ // 7. Obtain the information about the input port.
+ // This will have the new mini buffer count in |port_format.nBufferCountMin|.
+ // Save this value to input_buf_count.
+ OMX_PARAM_PORTDEFINITIONTYPE port_format;
+ ResetParamHeader(*this, &port_format);
+ port_format.nPortIndex = input_port_;
+ omxresult = OMX_GetParameter(component_handle_,
+ OMX_IndexParamPortDefinition,
+ &port_format);
+ if (omxresult != OMX_ErrorNone) {
+ LOG(ERROR) << "GetParameter(OMX_IndexParamPortDefinition) failed";
+ client_state_ = kClientError;
+ return false;
+ }
+ if (OMX_DirInput != port_format.eDir) {
+ LOG(ERROR) << "Expected input port";
+ client_state_ = kClientError;
+ return false;
+ }
+ input_buffer_count_ = port_format.nBufferCountMin;
+ input_buffer_size_ = port_format.nBufferSize;
+
+ // 8. Obtain the information about the output port.
+ ResetParamHeader(*this, &port_format);
+ port_format.nPortIndex = output_port_;
+ omxresult = OMX_GetParameter(component_handle_,
+ OMX_IndexParamPortDefinition,
+ &port_format);
+ if (omxresult != OMX_ErrorNone) {
+ LOG(ERROR) << "GetParameter(OMX_IndexParamPortDefinition) failed";
+ client_state_ = kClientError;
+ return false;
+ }
+ if (OMX_DirOutput != port_format.eDir) {
+ LOG(ERROR) << "Expect Output Port";
+ client_state_ = kClientError;
+ return false;
+ }
+
+ // TODO(wjia): use same buffer recycling for EGLImage and system memory.
+ // Override buffer count when EGLImage is used.
+ if (uses_egl_image_) {
+ // TODO(wjia): remove hard-coded value
+ port_format.nBufferCountActual = port_format.nBufferCountMin =
+ output_buffer_count_ = 4;
+
+ omxresult = OMX_SetParameter(component_handle_,
+ OMX_IndexParamPortDefinition,
+ &port_format);
+ if (omxresult != OMX_ErrorNone) {
+ LOG(ERROR) << "SetParameter(OMX_IndexParamPortDefinition) failed";
+ client_state_ = kClientError;
+ return false;
+ }
+ } else {
+ output_buffer_count_ = port_format.nBufferCountActual;
+ }
+ output_buffer_size_ = port_format.nBufferSize;
+
+ return true;
+}
+
+// Event callback during initialization to handle DoneStateSet to idle
+void OmxVideoDecodeEngine::DoneSetStateIdle(OMX_STATETYPE state) {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+ DCHECK_EQ(client_state_, kClientInitializing);
+ DCHECK_EQ(OMX_StateIdle, state);
+ DLOG(INFO) << "OMX video decode engine is in Idle";
+
+ il_state_ = kIlIdle;
+
+ // start reading bit stream
+ InitialReadBuffer();
+ OnStateSetEventFunc = &OmxVideoDecodeEngine::DoneSetStateExecuting;
+ if (!TransitionToState(OMX_StateExecuting)) {
+ StopOnError();
+ return;
+ }
+ expected_il_state_ = kIlExecuting;
+}
+
+// Event callback during initialization to handle DoneStateSet to executing
+void OmxVideoDecodeEngine::DoneSetStateExecuting(OMX_STATETYPE state) {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+ DCHECK(client_state_ == kClientInitializing ||
+ client_state_ == kClientFlushing);
+ DCHECK_EQ(OMX_StateExecuting, state);
+ DLOG(INFO) << "OMX video decode engine is in Executing";
+
+ il_state_ = kIlExecuting;
+ client_state_ = kClientRunning;
+ OnStateSetEventFunc = NULL;
+ EmptyBufferTask();
+ InitialFillBuffer();
+ if (kClientError == client_state_) {
+ StopOnError();
+ return;
+ }
+}
+
+// Function for receiving output buffers. Hookup for buffer recycling
+// and outside allocator.
+void OmxVideoDecodeEngine::FillThisBuffer(
+ scoped_refptr<VideoFrame> video_frame) {
+ DCHECK(video_frame.get() && !video_frame->IsEndOfStream());
+ output_pending_request_++;
+
+ if (!CanAcceptOutput()) {
+ if (uses_egl_image_) { // return it to owner.
+ output_pending_request_--;
+ event_handler_->OnFillBufferCallback(video_frame);
+ }
+ return;
+ }
+
+ OMX_BUFFERHEADERTYPE* omx_buffer = FindOmxBuffer(video_frame);
+ if (omx_buffer) {
+ if (kClientRunning == client_state_) {
+ SendOutputBufferToComponent(omx_buffer);
+ } else if (kClientFlushing == client_state_) {
+ if (uses_egl_image_) { // return it to owner.
+ output_pending_request_--;
+ event_handler_->OnFillBufferCallback(video_frame);
+ }
+ if (InputPortFlushed() && OutputPortFlushed())
+ ComponentFlushDone();
+ }
+ } else {
+ DCHECK(!output_frames_allocated_);
+ DCHECK(uses_egl_image_);
+ output_frames_.push_back(std::make_pair(video_frame,
+ static_cast<OMX_BUFFERHEADERTYPE*>(NULL)));
+ }
+
+ DCHECK(static_cast<int>(output_frames_.size()) <= output_buffer_count_);
+
+ if ((!output_frames_allocated_) &&
+ static_cast<int>(output_frames_.size()) == output_buffer_count_) {
+ output_frames_allocated_ = true;
+
+ if (need_setup_output_port_) {
+ SetupOutputPort();
+ }
+ }
+
+ if (kClientError == client_state_) {
+ StopOnError();
+ return;
+ }
+}
+
+// Reconfigure port
+void OmxVideoDecodeEngine::OnPortSettingsChangedRun(int port,
+ OMX_INDEXTYPE index) {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+ DCHECK_EQ(client_state_, kClientRunning);
+ DCHECK_EQ(port, output_port_);
+
+ // TODO(wjia): add buffer negotiation between decoder and renderer.
+ if (uses_egl_image_) {
+ DLOG(INFO) << "Port settings are changed";
+ return;
+ }
+
+ // TODO(wjia): remove this checking when all vendors observe same spec.
+ if (index > OMX_IndexComponentStartUnused) {
+ if (index != OMX_IndexParamPortDefinition)
+ return;
+ }
+
+ OMX_PARAM_PORTDEFINITIONTYPE port_format;
+ ResetParamHeader(*this, &port_format);
+ port_format.nPortIndex = output_port_;
+ OMX_ERRORTYPE omxresult;
+ omxresult = OMX_GetParameter(component_handle_,
+ OMX_IndexParamPortDefinition,
+ &port_format);
+ if (omxresult != OMX_ErrorNone) {
+ LOG(ERROR) << "GetParameter(OMX_IndexParamPortDefinition) failed";
+ client_state_ = kClientError;
+ StopOnError();
+ return;
+ }
+ if (OMX_DirOutput != port_format.eDir) {
+ LOG(ERROR) << "Expected Output Port";
+ client_state_ = kClientError;
+ StopOnError();
+ return;
+ }
+
+ // Update the output format.
+ OmxConfigurator::MediaFormat output_format;
+ output_format.video_header.height = port_format.format.video.nFrameHeight;
+ output_format.video_header.width = port_format.format.video.nFrameWidth;
+ output_format.video_header.stride = port_format.format.video.nStride;
+ output_buffer_count_ = port_format.nBufferCountActual;
+ output_buffer_size_ = port_format.nBufferSize;
+
+ if (kPortEnabled == output_port_state_) {
+ output_port_state_ = kPortDisabling;
+ OnPortDisableEventFunc = &OmxVideoDecodeEngine::OnPortDisableEventRun;
+ ChangePort(OMX_CommandPortDisable, output_port_);
+ if (kClientError == client_state_) {
+ StopOnError();
+ return;
+ }
+ FreeOutputBuffers();
+ } else {
+ OnPortDisableEventRun(output_port_);
+ }
+}
+
+// Post output port disabling
+void OmxVideoDecodeEngine::OnPortDisableEventRun(int port) {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+ DCHECK_EQ(client_state_, kClientRunning);
+ DCHECK_EQ(port, output_port_);
+
+ output_port_state_ = kPortDisabled;
+
+ // make sure all eglimages are available before enabling output port
+ if (output_frames_allocated_ || !uses_egl_image_) {
+ SetupOutputPort();
+ if (kClientError == client_state_) {
+ StopOnError();
+ return;
+ }
+ } else {
+ need_setup_output_port_ = true;
+ }
+}
+
+// Enable output port and allocate buffers correspondingly
+void OmxVideoDecodeEngine::SetupOutputPort() {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+
+ need_setup_output_port_ = false;
+
+ // Enable output port when necessary since the port could be waiting for
+ // buffers, instead of port reconfiguration.
+ if (kPortEnabled != output_port_state_) {
+ output_port_state_ = kPortEnabling;
+ OnPortEnableEventFunc = &OmxVideoDecodeEngine::OnPortEnableEventRun;
+ ChangePort(OMX_CommandPortEnable, output_port_);
+ if (kClientError == client_state_) {
+ return;
+ }
+ }
+
+ // TODO(wjia): add state checking
+ // Update the ports in buffer if necessary
+ if (!AllocateOutputBuffers()) {
+ LOG(ERROR) << "OMX_AllocateBuffer() Output buffer error";
+ client_state_ = kClientError;
+ return;
+ }
+}
+
+// Post output port enabling
+void OmxVideoDecodeEngine::OnPortEnableEventRun(int port) {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+ DCHECK_EQ(port, output_port_);
+ DCHECK_EQ(client_state_, kClientRunning);
+
+ output_port_state_ = kPortEnabled;
+ last_pts_ = base::TimeDelta::FromMilliseconds(0);
+ OnPortEnableEventFunc = NULL;
+ InitialFillBuffer();
+ if (kClientError == client_state_) {
+ StopOnError();
+ return;
+ }
+}
+
+void OmxVideoDecodeEngine::DeinitFromExecuting(OMX_STATETYPE state) {
+ DCHECK_EQ(state, OMX_StateExecuting);
+
+ DLOG(INFO) << "Deinit from Executing";
+ OnStateSetEventFunc = &OmxVideoDecodeEngine::DeinitFromIdle;
+ TransitionToState(OMX_StateIdle);
+ expected_il_state_ = kIlIdle;
+}
+
+void OmxVideoDecodeEngine::DeinitFromIdle(OMX_STATETYPE state) {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+ DCHECK_EQ(state, OMX_StateIdle);
+
+ DLOG(INFO) << "Deinit from Idle";
+ il_state_ = kIlIdle;
+ OnStateSetEventFunc = &OmxVideoDecodeEngine::DeinitFromLoaded;
+ TransitionToState(OMX_StateLoaded);
+ expected_il_state_ = kIlLoaded;
+
+ if (!input_buffers_at_component_)
+ FreeInputBuffers();
+ else
+ need_free_input_buffers_ = true;
+
+ if (!output_buffers_at_component_)
+ FreeOutputBuffers();
+ else
+ need_free_output_buffers_ = true;
+}
+
+void OmxVideoDecodeEngine::DeinitFromLoaded(OMX_STATETYPE state) {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+ DCHECK_EQ(state, OMX_StateLoaded);
+
+ DLOG(INFO) << "Deinit from Loaded";
+ il_state_ = kIlLoaded;
+ if (component_handle_) {
+ OMX_ERRORTYPE result = OMX_FreeHandle(component_handle_);
+ if (result != OMX_ErrorNone)
+ LOG(ERROR) << "OMX_FreeHandle() error. Error code: " << result;
+ component_handle_ = NULL;
+ }
+ il_state_ = expected_il_state_ = kIlNone;
+
+ // kClientStopped is different from kClientNotInitialized. The former can't
+ // accept output buffers, while the latter can.
+ client_state_ = kClientStopped;
+
+ OMX_Deinit();
+
+ OnStopDone();
+}
+
+void OmxVideoDecodeEngine::StopOnError() {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+
+ client_state_ = kClientStopping;
+
+ if (kIlExecuting == expected_il_state_) {
+ DeinitFromExecuting(OMX_StateExecuting);
+ } else if (kIlIdle == expected_il_state_) {
+ DeinitFromIdle(OMX_StateIdle);
+ } else if (kIlLoaded == expected_il_state_) {
+ DeinitFromLoaded(OMX_StateLoaded);
+ } else if (kIlPause == expected_il_state_) {
+ // TODO(jiesun): Make sure this works.
+ DeinitFromExecuting(OMX_StateExecuting);
+ } else {
+ NOTREACHED();
+ }
+}
+
+// Call OMX_UseBuffer() to avoid buffer copying when
+// OMX_EmptyThisBuffer() is called
+bool OmxVideoDecodeEngine::AllocateInputBuffers() {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+
+ uint8* data = new uint8[input_buffer_size_];
+ scoped_array<uint8> data_deleter(data);
+
+ for (int i = 0; i < input_buffer_count_; ++i) {
+ OMX_BUFFERHEADERTYPE* buffer;
+ OMX_ERRORTYPE error =
+ OMX_UseBuffer(component_handle_, &buffer, input_port_,
+ this, input_buffer_size_, data);
+ if (error != OMX_ErrorNone)
+ return false;
+ buffer->nInputPortIndex = input_port_;
+ buffer->nOffset = 0;
+ buffer->nFlags = 0;
+ input_buffers_.push_back(buffer);
+ free_input_buffers_.push(buffer);
+ }
+ return true;
+}
+
+// This method handles EGLImage and internal buffer cases. Any external
+// allocation case is similar to EGLImage
+bool OmxVideoDecodeEngine::AllocateOutputBuffers() {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+
+ if (uses_egl_image_ && !output_frames_allocated_) {
+ DLOG(INFO) << "Output frames are not allocated yet";
+ need_setup_output_port_ = true;
+ return true;
+ }
+
+ for (int i = 0; i < output_buffer_count_; ++i) {
+ OMX_BUFFERHEADERTYPE* buffer;
+ scoped_refptr<VideoFrame> video_frame;
+ OMX_ERRORTYPE error;
+ if (uses_egl_image_) {
+ OutputFrame output_frame = output_frames_[i];
+ video_frame = output_frame.first;
+ DCHECK(!output_frame.second);
+ error = OMX_UseEGLImage(component_handle_, &buffer, output_port_,
+ video_frame.get(), video_frame->private_buffer());
+ if (error != OMX_ErrorNone)
+ return false;
+ output_frames_[i].second = buffer;
+ } else {
+ error = OMX_AllocateBuffer(component_handle_, &buffer, output_port_,
+ NULL, output_buffer_size_);
+ if (error != OMX_ErrorNone)
+ return false;
+ video_frame = CreateOmxBufferVideoFrame(buffer);
+ output_frames_.push_back(std::make_pair(video_frame, buffer));
+ buffer->pAppPrivate = video_frame.get();
+ }
+ }
+
+ return true;
+}
+
+scoped_refptr<VideoFrame> OmxVideoDecodeEngine::CreateOmxBufferVideoFrame(
+ OMX_BUFFERHEADERTYPE* omx_buffer) {
+ scoped_refptr<VideoFrame> video_frame;
+ uint8* data[VideoFrame::kMaxPlanes];
+ int32 strides[VideoFrame::kMaxPlanes];
+
+ memset(data, 0, sizeof(data));
+ memset(strides, 0, sizeof(strides));
+ // TODO(jiesun): chroma format 4:2:0 only and 3 planes.
+ data[0] = omx_buffer->pBuffer;
+ data[1] = data[0] + width_ * height_;
+ data[2] = data[1] + width_ * height_ / 4;
+ strides[0] = width_;
+ strides[1] = strides[2] = width_ >> 1;
+
+ VideoFrame::CreateFrameExternal(
+ VideoFrame::TYPE_OMXBUFFERHEAD,
+ VideoFrame::YV12,
+ width_, height_, 3,
+ data, strides,
+ StreamSample::kInvalidTimestamp,
+ StreamSample::kInvalidTimestamp,
+ omx_buffer,
+ &video_frame);
+
+ return video_frame;
+}
+
+void OmxVideoDecodeEngine::FreeInputBuffers() {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+
+ // Empty available buffer queue.
+ while (!free_input_buffers_.empty()) {
+ free_input_buffers_.pop();
+ }
+
+ while (!available_input_buffers_.empty()) {
+ OMX_BUFFERHEADERTYPE* omx_buffer = available_input_buffers_.front();
+ available_input_buffers_.pop();
+ Buffer* stored_buffer = static_cast<Buffer*>(omx_buffer->pAppPrivate);
+ FinishEmptyBuffer(stored_buffer);
+ stored_buffer->Release();
+ }
+
+ // Calls to OMX to free buffers.
+ for (size_t i = 0; i < input_buffers_.size(); ++i)
+ OMX_FreeBuffer(component_handle_, input_port_, input_buffers_[i]);
+ input_buffers_.clear();
+
+ need_free_input_buffers_ = false;
+}
+
+void OmxVideoDecodeEngine::FreeOutputBuffers() {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+
+ // Calls to OMX to free buffers.
+ for (size_t i = 0; i < output_frames_.size(); ++i) {
+ OMX_BUFFERHEADERTYPE* omx_buffer = output_frames_[i].second;
+ CHECK(omx_buffer);
+ OMX_FreeBuffer(component_handle_, output_port_, omx_buffer);
+ }
+ output_frames_.clear();
+ output_frames_allocated_ = false;
+
+ need_free_output_buffers_ = false;
+}
+
+bool OmxVideoDecodeEngine::ConfigureIOPorts() {
+ OMX_PARAM_PORTDEFINITIONTYPE input_port_def, output_port_def;
+ OMX_ERRORTYPE omxresult = OMX_ErrorNone;
+ // Get default input port definition.
+ ResetParamHeader(*this, &input_port_def);
+ input_port_def.nPortIndex = input_port_;
+ omxresult = OMX_GetParameter(component_handle_,
+ OMX_IndexParamPortDefinition,
+ &input_port_def);
+ if (omxresult != OMX_ErrorNone) {
+ LOG(ERROR) << "GetParameter(OMX_IndexParamPortDefinition) "
+ << "for input port failed";
+ return false;
+ }
+ if (OMX_DirInput != input_port_def.eDir) {
+ LOG(ERROR) << "Expected Input Port";
+ return false;
+ }
+
+ // Get default output port definition.
+ ResetParamHeader(*this, &output_port_def);
+ output_port_def.nPortIndex = output_port_;
+ omxresult = OMX_GetParameter(component_handle_,
+ OMX_IndexParamPortDefinition,
+ &output_port_def);
+ if (omxresult != OMX_ErrorNone) {
+ LOG(ERROR) << "GetParameter(OMX_IndexParamPortDefinition) "
+ << "for output port failed";
+ return false;
+ }
+ if (OMX_DirOutput != output_port_def.eDir) {
+ LOG(ERROR) << "Expected Output Port";
+ return false;
+ }
+
+ return configurator_->ConfigureIOPorts(
+ static_cast<OMX_COMPONENTTYPE*>(component_handle_),
+ &input_port_def, &output_port_def);
+}
+
+bool OmxVideoDecodeEngine::CanEmptyBuffer() {
+ // We can call empty buffer while we are in executing and EOS has
+ // not been sent
+ return (il_state_ == kIlExecuting &&
+ !input_has_fed_eos_);
+}
+
+bool OmxVideoDecodeEngine::CanFillBuffer() {
+ // Make sure component is in the executing state and end-of-stream
+ // has not been reached.
+ return (il_state_ == kIlExecuting &&
+ !output_eos_ &&
+ (output_port_state_ == kPortEnabled ||
+ output_port_state_ == kPortEnabling));
+}
+
+bool OmxVideoDecodeEngine::CanAcceptInput() {
+ // We can't take input buffer when in error state.
+ return (kClientError != client_state_ &&
+ kClientStopping != client_state_ &&
+ kClientStopped != client_state_ &&
+ !input_queue_has_eos_);
+}
+
+bool OmxVideoDecodeEngine::CanAcceptOutput() {
+ return (kClientError != client_state_ &&
+ kClientStopping != client_state_ &&
+ kClientStopped != client_state_ &&
+ output_port_state_ == kPortEnabled &&
+ !output_eos_);
+}
+
+// TODO(wjia): There are several things need to be done here:
+// 1. Merge this method into EmptyThisBuffer();
+// 2. Get rid of the while loop, this is not needed because when we call
+// OMX_EmptyThisBuffer we assume we *always* have an input buffer.
+void OmxVideoDecodeEngine::EmptyBufferTask() {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+
+ if (!CanEmptyBuffer())
+ return;
+
+ // Loop for all available input data and input buffer for the
+ // decoder. When input has reached EOS we need to stop.
+ while (!available_input_buffers_.empty() &&
+ !input_has_fed_eos_) {
+ OMX_BUFFERHEADERTYPE* omx_buffer = available_input_buffers_.front();
+ available_input_buffers_.pop();
+
+ input_has_fed_eos_ = omx_buffer->nFlags & OMX_BUFFERFLAG_EOS;
+ if (input_has_fed_eos_) {
+ DLOG(INFO) << "Input has fed EOS";
+ }
+
+ // Give this buffer to OMX.
+ input_buffers_at_component_++;
+ OMX_ERRORTYPE ret = OMX_EmptyThisBuffer(component_handle_, omx_buffer);
+ if (ret != OMX_ErrorNone) {
+ LOG(ERROR) << "OMX_EmptyThisBuffer() failed with result " << ret;
+ client_state_ = kClientError;
+ return;
+ }
+ }
+}
+
+void OmxVideoDecodeEngine::InitialReadBuffer() {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+
+ input_queue_has_eos_ = false;
+ input_has_fed_eos_ = false;
+ output_eos_ = false;
+
+ DLOG(INFO) << "OmxVideoDecodeEngine::InitialReadBuffer";
+ for (size_t i = 0; i < free_input_buffers_.size(); i++)
+ FinishEmptyBuffer(NULL);
+}
+
+void OmxVideoDecodeEngine::InitialFillBuffer() {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+ // DCHECK(output_frames_allocated_);
+
+ if (!CanFillBuffer())
+ return;
+
+ DLOG(INFO) << "OmxVideoDecodeEngine::InitialFillBuffer";
+
+ // Ask the decoder to fill the output buffers.
+ for (uint32 i = 0; i < output_frames_.size(); ++i) {
+ OMX_BUFFERHEADERTYPE* omx_buffer = output_frames_[i].second;
+ SendOutputBufferToComponent(omx_buffer);
+ }
+}
+
+// helper functions
+// Send command to disable/enable port.
+void OmxVideoDecodeEngine::ChangePort(OMX_COMMANDTYPE cmd, int port_index) {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+
+ OMX_ERRORTYPE omxresult = OMX_SendCommand(component_handle_,
+ cmd, port_index, 0);
+ if (omxresult != OMX_ErrorNone) {
+ LOG(ERROR) << "SendCommand(OMX_CommandPortDisable) failed";
+ client_state_ = kClientError;
+ return;
+ }
+}
+
+// Find if omx_buffer exists corresponding to video_frame
+OMX_BUFFERHEADERTYPE* OmxVideoDecodeEngine::FindOmxBuffer(
+ scoped_refptr<VideoFrame> video_frame) {
+ for (size_t i = 0; i < output_frames_.size(); ++i) {
+ scoped_refptr<VideoFrame> frame = output_frames_[i].first;
+ if (video_frame->private_buffer() == frame->private_buffer())
+ return output_frames_[i].second;
+ }
+ return NULL;
+}
+
+OMX_STATETYPE OmxVideoDecodeEngine::GetComponentState() {
+ OMX_STATETYPE eState;
+ OMX_ERRORTYPE eError;
+
+ eError = OMX_GetState(component_handle_, &eState);
+ if (OMX_ErrorNone != eError) {
+ LOG(ERROR) << "OMX_GetState failed";
+ StopOnError();
+ }
+
+ return eState;
+}
+
+// send one output buffer to component
+void OmxVideoDecodeEngine::SendOutputBufferToComponent(
+ OMX_BUFFERHEADERTYPE *omx_buffer) {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+
+ if (!CanFillBuffer())
+ return;
+
+ // clear EOS flag.
+ omx_buffer->nFlags &= ~OMX_BUFFERFLAG_EOS;
+ omx_buffer->nOutputPortIndex = output_port_;
+ output_buffers_at_component_++;
+ OMX_ERRORTYPE ret = OMX_FillThisBuffer(component_handle_, omx_buffer);
+
+ if (OMX_ErrorNone != ret) {
+ LOG(ERROR) << "OMX_FillThisBuffer() failed with result " << ret;
+ client_state_ = kClientError;
+ return;
+ }
+}
+
+// Send state transition command to component.
+bool OmxVideoDecodeEngine::TransitionToState(OMX_STATETYPE new_state) {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+
+ OMX_ERRORTYPE omxresult = OMX_SendCommand(component_handle_,
+ OMX_CommandStateSet,
+ new_state, 0);
+ if (omxresult != OMX_ErrorNone) {
+ LOG(ERROR) << "SendCommand(OMX_CommandStateSet) failed";
+ client_state_ = kClientError;
+ return false;
+ }
+
+ return true;
+}
+
+void OmxVideoDecodeEngine::EmptyBufferDoneTask(OMX_BUFFERHEADERTYPE* buffer) {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+ DCHECK_GT(input_buffers_at_component_, 0);
+
+ Buffer* stored_buffer = static_cast<Buffer*>(buffer->pAppPrivate);
+ buffer->pAppPrivate = NULL;
+ if (client_state_ != kClientFlushing)
+ FinishEmptyBuffer(stored_buffer);
+ stored_buffer->Release();
+
+ // Enqueue the available buffer because the decoder has consumed it.
+ free_input_buffers_.push(buffer);
+ input_buffers_at_component_--;
+
+ if (need_free_input_buffers_ && !input_buffers_at_component_) {
+ FreeInputBuffers();
+ return;
+ }
+
+ // Try to feed more data into the decoder.
+ EmptyBufferTask();
+
+ if (client_state_ == kClientFlushing &&
+ InputPortFlushed() && OutputPortFlushed())
+ ComponentFlushDone();
+}
+
+void OmxVideoDecodeEngine::FillBufferDoneTask(OMX_BUFFERHEADERTYPE* buffer) {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+ DCHECK_GT(output_buffers_at_component_, 0);
+
+ output_buffers_at_component_--;
+
+ if (need_free_output_buffers_ && !output_buffers_at_component_) {
+ FreeOutputBuffers();
+ return;
+ }
+
+ if (!CanAcceptOutput()) {
+ if (uses_egl_image_) {
+ scoped_refptr<VideoFrame> frame;
+ frame = static_cast<VideoFrame*>(buffer->pAppPrivate);
+ event_handler_->OnFillBufferCallback(frame);
+ output_pending_request_--;
+ }
+ return;
+ }
+
+ // This buffer is received with decoded frame. Enqueue it and make it
+ // ready to be consumed by reads.
+
+ if (buffer->nFlags & OMX_BUFFERFLAG_EOS) {
+ output_eos_ = true;
+ DLOG(INFO) << "Output has EOS";
+ }
+
+ FinishFillBuffer(buffer);
+
+ if (buffer->nFlags & OMX_BUFFERFLAG_EOS) {
+ // Singal end of stream.
+ scoped_refptr<VideoFrame> frame;
+ VideoFrame::CreateEmptyFrame(&frame);
+ event_handler_->OnFillBufferCallback(frame);
+ }
+
+ if (client_state_ == kClientFlushing &&
+ InputPortFlushed() && OutputPortFlushed())
+ ComponentFlushDone();
+}
+
+void OmxVideoDecodeEngine::EventHandlerCompleteTask(OMX_EVENTTYPE event,
+ OMX_U32 data1,
+ OMX_U32 data2) {
+ switch (event) {
+ case OMX_EventCmdComplete: {
+ // If the last command was successful, we have completed
+ // a state transition. So notify that we have done it
+ // accordingly.
+ OMX_COMMANDTYPE cmd = static_cast<OMX_COMMANDTYPE>(data1);
+ if (cmd == OMX_CommandPortDisable) {
+ if (OnPortDisableEventFunc)
+ (this->*OnPortDisableEventFunc)(static_cast<int>(data2));
+ } else if (cmd == OMX_CommandPortEnable) {
+ if (OnPortEnableEventFunc)
+ (this->*OnPortEnableEventFunc)(static_cast<int>(data2));
+ } else if (cmd == OMX_CommandStateSet) {
+ (this->*OnStateSetEventFunc)(static_cast<OMX_STATETYPE>(data2));
+ } else if (cmd == OMX_CommandFlush) {
+ (this->*OnFlushEventFunc)(data2);
+ } else {
+ LOG(ERROR) << "Unknown command completed\n" << data1;
+ }
+ break;
+ }
+ case OMX_EventError:
+ if (OMX_ErrorInvalidState == (OMX_ERRORTYPE)data1) {
+ // TODO(hclam): what to do here?
+ }
+ StopOnError();
+ break;
+ case OMX_EventPortSettingsChanged:
+ // TODO(wjia): remove this hack when all vendors observe same spec.
+ if (data1 < OMX_IndexComponentStartUnused)
+ OnPortSettingsChangedRun(static_cast<int>(data1),
+ static_cast<OMX_INDEXTYPE>(data2));
+ else
+ OnPortSettingsChangedRun(static_cast<int>(data2),
+ static_cast<OMX_INDEXTYPE>(data1));
+ break;
+ default:
+ LOG(ERROR) << "Warning - Unknown event received\n";
+ break;
+ }
+}
+
+// static
+OMX_ERRORTYPE OmxVideoDecodeEngine::EventHandler(OMX_HANDLETYPE component,
+ OMX_PTR priv_data,
+ OMX_EVENTTYPE event,
+ OMX_U32 data1,
+ OMX_U32 data2,
+ OMX_PTR event_data) {
+ OmxVideoDecodeEngine* decoder = static_cast<OmxVideoDecodeEngine*>(priv_data);
+ DCHECK_EQ(component, decoder->component_handle_);
+ decoder->message_loop_->PostTask(FROM_HERE,
+ NewRunnableMethod(decoder,
+ &OmxVideoDecodeEngine::EventHandlerCompleteTask,
+ event, data1, data2));
+ return OMX_ErrorNone;
+}
+
+// static
+OMX_ERRORTYPE OmxVideoDecodeEngine::EmptyBufferCallback(
+ OMX_HANDLETYPE component,
+ OMX_PTR priv_data,
+ OMX_BUFFERHEADERTYPE* buffer) {
+ OmxVideoDecodeEngine* decoder = static_cast<OmxVideoDecodeEngine*>(priv_data);
+ DCHECK_EQ(component, decoder->component_handle_);
+ decoder->message_loop_->PostTask(FROM_HERE,
+ NewRunnableMethod(decoder,
+ &OmxVideoDecodeEngine::EmptyBufferDoneTask, buffer));
+ return OMX_ErrorNone;
+}
+
+// static
+OMX_ERRORTYPE OmxVideoDecodeEngine::FillBufferCallback(
+ OMX_HANDLETYPE component,
+ OMX_PTR priv_data,
+ OMX_BUFFERHEADERTYPE* buffer) {
+ OmxVideoDecodeEngine* decoder = static_cast<OmxVideoDecodeEngine*>(priv_data);
+ DCHECK_EQ(component, decoder->component_handle_);
+ decoder->message_loop_->PostTask(FROM_HERE,
+ NewRunnableMethod(decoder,
+ &OmxVideoDecodeEngine::FillBufferDoneTask, buffer));
+ return OMX_ErrorNone;
+}
+
+} // namespace media
diff --git a/media/filters/omx_video_decode_engine.h b/media/filters/omx_video_decode_engine.h
new file mode 100644
index 0000000..da3fe52
--- /dev/null
+++ b/media/filters/omx_video_decode_engine.h
@@ -0,0 +1,246 @@
+// Copyright (c) 2010 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_FILTERS_OMX_VIDEO_DECODE_ENGINE_H_
+#define MEDIA_FILTERS_OMX_VIDEO_DECODE_ENGINE_H_
+
+#include <queue>
+#include <vector>
+
+#include "base/callback.h"
+#include "base/lock.h"
+#include "base/scoped_ptr.h"
+#include "base/task.h"
+#include "media/filters/video_decode_engine.h"
+#include "media/omx/omx_configurator.h"
+#include "third_party/openmax/il/OMX_Component.h"
+#include "third_party/openmax/il/OMX_Core.h"
+#include "third_party/openmax/il/OMX_Video.h"
+
+namespace media {
+
+class OmxVideoDecodeEngine : public VideoDecodeEngine {
+ public:
+ OmxVideoDecodeEngine();
+ virtual ~OmxVideoDecodeEngine();
+
+ // Implementation of the VideoDecodeEngine Interface.
+ virtual void Initialize(MessageLoop* message_loop,
+ VideoDecodeEngine::EventHandler* event_handler,
+ const VideoCodecConfig& config);
+ virtual void EmptyThisBuffer(scoped_refptr<Buffer> buffer);
+ virtual void FillThisBuffer(scoped_refptr<VideoFrame> frame);
+ virtual void Uninitialize();
+ virtual void Flush();
+ virtual void Seek();
+
+ // Subclass can provide a different value.
+ virtual int current_omx_spec_version() const { return 0x00000101; }
+
+ private:
+ enum OmxIlState {
+ kIlNone,
+ kIlLoaded,
+ kIlIdle,
+ kIlExecuting,
+ kIlPause,
+ kIlInvalid,
+ kIlUnknown,
+ };
+
+ enum OmxIlClientState {
+ kClientNotInitialized,
+ kClientInitializing,
+ kClientRunning,
+ kClientStopping,
+ kClientStopped,
+ kClientPausing,
+ kClientFlushing,
+ kClientError,
+ };
+
+ enum OmxIlPortState {
+ kPortDisabled,
+ kPortEnabling,
+ kPortEnabled,
+ kPortDisabling,
+ };
+
+ typedef Callback0::Type Callback;
+
+ // calls into other classes
+ void FinishEmptyBuffer(scoped_refptr<Buffer> buffer);
+ void FinishFillBuffer(OMX_BUFFERHEADERTYPE* buffer);
+ // Helper method to perform tasks when this object is stopped.
+ void OnStopDone();
+
+ // Transition method sequence for initialization
+ bool CreateComponent();
+ void DoneSetStateIdle(OMX_STATETYPE state);
+ void DoneSetStateExecuting(OMX_STATETYPE state);
+ void OnPortSettingsChangedRun(int port, OMX_INDEXTYPE index);
+ void OnPortDisableEventRun(int port);
+ void SetupOutputPort();
+ void OnPortEnableEventRun(int port);
+
+ // Transition methods for shutdown
+ void DeinitFromExecuting(OMX_STATETYPE state);
+ void DeinitFromIdle(OMX_STATETYPE state);
+ void DeinitFromLoaded(OMX_STATETYPE state);
+ void PauseFromExecuting(OMX_STATETYPE state);
+ void StartFlush();
+ void PortFlushDone(int port);
+ void ComponentFlushDone();
+
+ void StopOnError();
+
+ void InitializeTask();
+
+ // Methods to free input and output buffers.
+ bool AllocateInputBuffers();
+ bool AllocateOutputBuffers();
+ void FreeInputBuffers();
+ void FreeOutputBuffers();
+ void FreeInputQueue();
+
+ // Helper method to configure port format at LOADED state.
+ bool ConfigureIOPorts();
+
+ // Determine whether we can issue fill buffer or empty buffer
+ // to the decoder based on the current state and port state.
+ bool CanEmptyBuffer();
+ bool CanFillBuffer();
+
+ // Determine whether we can use |input_queue_| and |output_queue_|
+ // based on the current state.
+ bool CanAcceptInput();
+ bool CanAcceptOutput();
+
+ bool InputPortFlushed();
+ bool OutputPortFlushed();
+
+ // Method to send input buffers to component
+ void EmptyBufferTask();
+
+ // Method doing initial reads to get bit stream from demuxer.
+ void InitialReadBuffer();
+
+ // Method doing initial fills to kick start the decoding process.
+ void InitialFillBuffer();
+
+ // helper functions
+ void ChangePort(OMX_COMMANDTYPE cmd, int port_index);
+ OMX_BUFFERHEADERTYPE* FindOmxBuffer(scoped_refptr<VideoFrame> video_frame);
+ OMX_STATETYPE GetComponentState();
+ void SendOutputBufferToComponent(OMX_BUFFERHEADERTYPE *omx_buffer);
+ bool TransitionToState(OMX_STATETYPE new_state);
+ virtual VideoFrame::Format GetSurfaceFormat() const;
+
+ // Method to handle events
+ void EventHandlerCompleteTask(OMX_EVENTTYPE event,
+ OMX_U32 data1,
+ OMX_U32 data2);
+
+ // Method to receive buffers from component's input port
+ void EmptyBufferDoneTask(OMX_BUFFERHEADERTYPE* buffer);
+
+ // Method to receive buffers from component's output port
+ void FillBufferDoneTask(OMX_BUFFERHEADERTYPE* buffer);
+
+ // The following three methods are static callback methods
+ // for the OMX component. When these callbacks are received, the
+ // call is delegated to the three internal methods above.
+ static OMX_ERRORTYPE EventHandler(OMX_HANDLETYPE component,
+ OMX_PTR priv_data,
+ OMX_EVENTTYPE event,
+ OMX_U32 data1, OMX_U32 data2,
+ OMX_PTR event_data);
+
+ static OMX_ERRORTYPE EmptyBufferCallback(OMX_HANDLETYPE component,
+ OMX_PTR priv_data,
+ OMX_BUFFERHEADERTYPE* buffer);
+
+ static OMX_ERRORTYPE FillBufferCallback(OMX_HANDLETYPE component,
+ OMX_PTR priv_data,
+ OMX_BUFFERHEADERTYPE* buffer);
+
+ // Member function pointers to respond to events
+ void (OmxVideoDecodeEngine::*OnPortDisableEventFunc)(int port);
+ void (OmxVideoDecodeEngine::*OnPortEnableEventFunc)(int port);
+ void (OmxVideoDecodeEngine::*OnStateSetEventFunc)(OMX_STATETYPE state);
+ void (OmxVideoDecodeEngine::*OnFlushEventFunc)(int port);
+
+ // Helper function
+ scoped_refptr<VideoFrame> CreateOmxBufferVideoFrame(
+ OMX_BUFFERHEADERTYPE* omx_buffer);
+
+ size_t width_;
+ size_t height_;
+
+ MessageLoop* message_loop_;
+
+ std::vector<OMX_BUFFERHEADERTYPE*> input_buffers_;
+ int input_buffer_count_;
+ int input_buffer_size_;
+ int input_port_;
+ int input_buffers_at_component_;
+ int input_pending_request_;
+ bool input_queue_has_eos_;
+ bool input_has_fed_eos_;
+ bool input_port_flushed_;
+
+ int output_buffer_count_;
+ int output_buffer_size_;
+ int output_port_;
+ int output_buffers_at_component_;
+ int output_pending_request_;
+ bool output_eos_;
+ bool output_port_flushed_;
+ bool uses_egl_image_;
+ base::TimeDelta last_pts_;
+
+ // |il_state_| records the current component state. During state transition
+ // |expected_il_state_| is the next state that the component will transition
+ // to. After a state transition is completed, |il_state_| equals
+ // |expected_il_state_|. Inequality can be used to detect a state transition.
+ // These two members are read and written only on |message_loop_|.
+ OmxIlState il_state_;
+ OmxIlState expected_il_state_;
+ OmxIlClientState client_state_;
+
+ OMX_HANDLETYPE component_handle_;
+ scoped_ptr<media::OmxConfigurator> configurator_;
+
+ // Free input OpenMAX buffers that can be used to take input bitstream from
+ // demuxer.
+ std::queue<OMX_BUFFERHEADERTYPE*> free_input_buffers_;
+
+ // Available input OpenMAX buffers that we can use to issue
+ // OMX_EmptyThisBuffer() call.
+ std::queue<OMX_BUFFERHEADERTYPE*> available_input_buffers_;
+
+ // flag for freeing input/output buffers
+ bool need_free_input_buffers_;
+ bool need_free_output_buffers_;
+
+ // for calling flush callback only once.
+ bool flush_pending_;
+
+ // For output buffer recycling cases.
+ typedef std::pair<scoped_refptr<VideoFrame>,
+ OMX_BUFFERHEADERTYPE*> OutputFrame;
+ std::vector<OutputFrame> output_frames_;
+ bool output_frames_allocated_;
+
+ // port related
+ bool need_setup_output_port_;
+ OmxIlPortState output_port_state_;
+ VideoDecodeEngine::EventHandler* event_handler_;
+
+ DISALLOW_COPY_AND_ASSIGN(OmxVideoDecodeEngine);
+};
+
+} // namespace media
+
+#endif // MEDIA_FILTERS_OMX_VIDEO_DECODE_ENGINE_H_
diff --git a/media/filters/omx_video_decoder.cc b/media/filters/omx_video_decoder.cc
index 0a6d9910..2c96117 100644
--- a/media/filters/omx_video_decoder.cc
+++ b/media/filters/omx_video_decoder.cc
@@ -11,7 +11,7 @@
#include "media/base/limits.h"
#include "media/ffmpeg/ffmpeg_common.h"
#include "media/filters/ffmpeg_interfaces.h"
-#include "media/video/omx_video_decode_engine.h"
+#include "media/filters/omx_video_decode_engine.h"
namespace media {
diff --git a/media/filters/omx_video_decoder.h b/media/filters/omx_video_decoder.h
index 26efc38..16f3b64 100644
--- a/media/filters/omx_video_decoder.h
+++ b/media/filters/omx_video_decoder.h
@@ -10,7 +10,7 @@
#include "media/base/factory.h"
#include "media/base/filters.h"
#include "media/base/media_format.h"
-#include "media/video/video_decode_engine.h"
+#include "media/filters/video_decode_engine.h"
class MessageLoop;
diff --git a/media/filters/video_decode_engine.h b/media/filters/video_decode_engine.h
new file mode 100644
index 0000000..f1faf7b
--- /dev/null
+++ b/media/filters/video_decode_engine.h
@@ -0,0 +1,128 @@
+// Copyright (c) 2010 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_FILTERS_VIDEO_DECODE_ENGINE_H_
+#define MEDIA_FILTERS_VIDEO_DECODE_ENGINE_H_
+
+#include "base/callback.h"
+#include "base/message_loop.h"
+#include "media/base/video_frame.h"
+
+namespace media {
+
+class Buffer;
+
+enum VideoCodec {
+ kCodecH264,
+ kCodecVC1,
+ kCodecMPEG2,
+ kCodecMPEG4,
+ kCodecTheora,
+ kCodecVP8,
+};
+
+static const uint32 kProfileDoNotCare = static_cast<uint32>(-1);
+static const uint32 kLevelDoNotCare = static_cast<uint32>(-1);
+
+struct VideoCodecConfig {
+ VideoCodecConfig() : codec_(kCodecH264),
+ profile_(kProfileDoNotCare),
+ level_(kLevelDoNotCare),
+ width_(0),
+ height_(0),
+ opaque_context_(NULL) {}
+
+ VideoCodec codec_;
+
+ // TODO(jiesun): video profile and level are specific to individual codec.
+ // Define enum to.
+ uint32 profile_;
+ uint32 level_;
+
+ // Container's concept of width and height of this video.
+ int32 width_;
+ int32 height_; // TODO(jiesun): Do we allow height to be negative to
+ // indicate output is upside-down?
+
+ // FFMPEG's will use this to pass AVStream. Otherwise, we should remove this.
+ void* opaque_context_;
+};
+
+struct VideoStreamInfo {
+ VideoFrame::Format surface_format_;
+ VideoFrame::SurfaceType surface_type_;
+ uint32 surface_width_; // Can be different with container's value.
+ uint32 surface_height_; // Can be different with container's value.
+};
+
+struct VideoCodecInfo {
+ // Other parameter is only meaningful when this is true.
+ bool success_;
+
+ // Whether decoder provides output buffer pool.
+ bool provides_buffers_;
+
+ // Initial Stream Info. Only part of them could be valid.
+ // If they are not valid, Engine should update with OnFormatChange.
+ VideoStreamInfo stream_info_;
+};
+
+class VideoDecodeEngine : public base::RefCountedThreadSafe<VideoDecodeEngine> {
+ public:
+ struct EventHandler {
+ public:
+ virtual ~EventHandler() {}
+ virtual void OnInitializeComplete(const VideoCodecInfo& info) = 0;
+ virtual void OnUninitializeComplete() = 0;
+ virtual void OnFlushComplete() = 0;
+ virtual void OnSeekComplete() = 0;
+ virtual void OnError() = 0;
+ virtual void OnFormatChange(VideoStreamInfo stream_info) = 0;
+ virtual void OnEmptyBufferCallback(scoped_refptr<Buffer> buffer) = 0;
+ virtual void OnFillBufferCallback(scoped_refptr<VideoFrame> frame) = 0;
+ };
+
+ VideoDecodeEngine() {}
+ virtual ~VideoDecodeEngine() {}
+
+ // Initialized the engine with specified configuration. |message_loop| could
+ // be NULL if every operation is synchronous. Engine should call the
+ // EventHandler::OnInitializeDone() no matter finished successfully or not.
+ // TODO(jiesun): remove message_loop and create thread inside openmax engine?
+ // or create thread in GpuVideoDecoder and pass message loop here?
+ virtual void Initialize(MessageLoop* message_loop,
+ EventHandler* event_handler,
+ const VideoCodecConfig& config) = 0;
+
+ // Uninitialize the engine. Engine should destroy all resources and call
+ // EventHandler::OnUninitializeComplete().
+ virtual void Uninitialize() = 0;
+
+ // Flush the engine. Engine should return all the buffers to owner ( which
+ // could be itself. ) then call EventHandler::OnFlushDone().
+ virtual void Flush() = 0;
+
+ // Used in openmax to InitialReadBuffers().
+ virtual void Seek() = 0; // TODO(jiesun): Do we need this?
+
+ // Buffer exchange method for input and output stream.
+ // These functions and callbacks could be used in two scenarios for both
+ // input and output streams:
+ // 1. Engine provide buffers.
+ // 2. Outside party provide buffers.
+ // The currently planned engine implementation:
+ // 1. provides the input buffer request inside engine through
+ // |EmptyThisBufferCallback|. The engine implementation has better knowledge
+ // of the decoder reordering delay and jittery removal requirements. Input
+ // buffers are returned into engine through |EmptyThisBuffer|.
+ // 2. Output buffers are provided from outside the engine, and feed into
+ // engine through |FillThisBuffer|. Output buffers are returned to outside
+ // by |FillThisBufferCallback|.
+ virtual void EmptyThisBuffer(scoped_refptr<Buffer> buffer) = 0;
+ virtual void FillThisBuffer(scoped_refptr<VideoFrame> frame) = 0;
+};
+
+} // namespace media
+
+#endif // MEDIA_FILTERS_VIDEO_DECODE_ENGINE_H_
diff --git a/media/media.gyp b/media/media.gyp
index 21f0dc1..15b4266 100644
--- a/media/media.gyp
+++ b/media/media.gyp
@@ -126,19 +126,19 @@
'filters/ffmpeg_glue.h',
'filters/ffmpeg_interfaces.cc',
'filters/ffmpeg_interfaces.h',
+ 'filters/ffmpeg_video_allocator.cc',
+ 'filters/ffmpeg_video_allocator.h',
+ 'filters/ffmpeg_video_decode_engine.cc',
+ 'filters/ffmpeg_video_decode_engine.h',
'filters/ffmpeg_video_decoder.cc',
'filters/ffmpeg_video_decoder.h',
'filters/file_data_source.cc',
'filters/file_data_source.h',
'filters/null_audio_renderer.cc',
'filters/null_audio_renderer.h',
+ 'filters/video_decode_engine.h',
'filters/video_renderer_base.cc',
'filters/video_renderer_base.h',
- 'video/ffmpeg_video_allocator.cc',
- 'video/ffmpeg_video_allocator.h',
- 'video/ffmpeg_video_decode_engine.cc',
- 'video/ffmpeg_video_decode_engine.h',
- 'video/video_decode_engine.h',
],
'direct_dependent_settings': {
'include_dirs': [
@@ -170,6 +170,8 @@
}],
['OS=="linux" or OS=="freebsd" or OS=="openbsd"', {
'sources': [
+ 'filters/omx_video_decode_engine.cc',
+ 'filters/omx_video_decode_engine.h',
'filters/omx_video_decoder.cc',
'filters/omx_video_decoder.h',
],
@@ -270,12 +272,12 @@
'filters/decoder_base_unittest.cc',
'filters/ffmpeg_demuxer_unittest.cc',
'filters/ffmpeg_glue_unittest.cc',
+ 'filters/ffmpeg_video_decode_engine_unittest.cc',
'filters/ffmpeg_video_decoder_unittest.cc',
'filters/file_data_source_unittest.cc',
'filters/video_renderer_base_unittest.cc',
'omx/mock_omx.cc',
'omx/mock_omx.h',
- 'video/ffmpeg_video_decode_engine_unittest.cc',
],
'conditions': [
['OS=="linux" or OS=="freebsd" or OS=="openbsd" or OS=="solaris"', {
@@ -567,10 +569,10 @@
'../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
],
'sources': [
+ 'filters/omx_video_decode_engine.cc',
+ 'filters/omx_video_decode_engine.cc',
'omx/omx_configurator.cc',
'omx/omx_configurator.h',
- 'video/omx_video_decode_engine.cc',
- 'video/omx_video_decode_engine.cc',
],
'hard_dependency': 1,
'export_dependent_settings': [
diff --git a/media/mf/mft_h264_decoder.h b/media/mf/mft_h264_decoder.h
index b84ab97..bcb45c4 100644
--- a/media/mf/mft_h264_decoder.h
+++ b/media/mf/mft_h264_decoder.h
@@ -19,7 +19,7 @@
#include "base/gtest_prod_util.h"
#include "base/scoped_comptr_win.h"
-#include "media/video/video_decode_engine.h"
+#include "media/filters/video_decode_engine.h"
class MessageLoop;
diff --git a/media/mf/test/mft_h264_decoder_unittest.cc b/media/mf/test/mft_h264_decoder_unittest.cc
index c14a3e41..0e78449 100644
--- a/media/mf/test/mft_h264_decoder_unittest.cc
+++ b/media/mf/test/mft_h264_decoder_unittest.cc
@@ -12,9 +12,9 @@
#include "base/time.h"
#include "media/base/data_buffer.h"
#include "media/base/video_frame.h"
+#include "media/filters/video_decode_engine.h"
#include "media/mf/file_reader_util.h"
#include "media/mf/mft_h264_decoder.h"
-#include "media/video/video_decode_engine.h"
#include "testing/gtest/include/gtest/gtest.h"
using base::TimeDelta;
diff --git a/media/omx/omx_codec_unittest.cc b/media/omx/omx_codec_unittest.cc
index 0e4b258..92bb25f 100644
--- a/media/omx/omx_codec_unittest.cc
+++ b/media/omx/omx_codec_unittest.cc
@@ -14,8 +14,8 @@
#include "media/base/mock_filters.h"
#include "media/base/mock_task.h"
#include "media/ffmpeg/ffmpeg_common.h"
-#include "media/video/omx_video_decode_engine.h"
-#include "media/video/video_decode_engine.h"
+#include "media/filters/omx_video_decode_engine.h"
+#include "media/filters/video_decode_engine.h"
#include "media/omx/mock_omx.h"
#include "testing/gtest/include/gtest/gtest.h"
diff --git a/media/tools/omx_test/omx_test.cc b/media/tools/omx_test/omx_test.cc
index e48251c..efddb14 100644
--- a/media/tools/omx_test/omx_test.cc
+++ b/media/tools/omx_test/omx_test.cc
@@ -22,10 +22,10 @@
#include "media/ffmpeg/ffmpeg_common.h"
#include "media/ffmpeg/file_protocol.h"
#include "media/filters/bitstream_converter.h"
+#include "media/filters/omx_video_decode_engine.h"
#include "media/tools/omx_test/color_space_util.h"
#include "media/tools/omx_test/file_reader_util.h"
#include "media/tools/omx_test/file_sink.h"
-#include "media/video/omx_video_decode_engine.h"
using media::BlockFileReader;
using media::Buffer;