summaryrefslogtreecommitdiffstats
path: root/media
diff options
context:
space:
mode:
authorscherkus@chromium.org <scherkus@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2011-06-03 16:33:56 +0000
committerscherkus@chromium.org <scherkus@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2011-06-03 16:33:56 +0000
commitef03875e8fab7be527abe1c890d420b1fedc58f6 (patch)
treee6b00dd59a5ba859a88e3e65e38237a9ef162c6d /media
parent6556c953778fcd26f9b2ddd817be926cfd9ca191 (diff)
downloadchromium_src-ef03875e8fab7be527abe1c890d420b1fedc58f6.zip
chromium_src-ef03875e8fab7be527abe1c890d420b1fedc58f6.tar.gz
chromium_src-ef03875e8fab7be527abe1c890d420b1fedc58f6.tar.bz2
Revert 87790 - Removing defunct OpenMAX code.
All of this code hasn't been used in over a year and has been replaced by VideoDecodeAccelerator and it's corresponding OpenMAX implementation OmxVideoDecodeAccelerator. BUG=none TEST=the world still compiles Review URL: http://codereview.chromium.org/7066071 TBR=scherkus@chromium.org Review URL: http://codereview.chromium.org/7065060 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@87793 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'media')
-rw-r--r--media/base/media_switches.cc4
-rw-r--r--media/base/media_switches.h2
-rw-r--r--media/filters/ffmpeg_video_decoder.cc1
-rw-r--r--media/filters/omx_video_decoder.cc255
-rw-r--r--media/filters/omx_video_decoder.h82
-rw-r--r--media/media.gyp100
-rw-r--r--media/tools/player_x11/player_x11.cc23
-rw-r--r--media/video/omx_video_decode_engine.cc1357
-rw-r--r--media/video/omx_video_decode_engine.h246
9 files changed, 2065 insertions, 5 deletions
diff --git a/media/base/media_switches.cc b/media/base/media_switches.cc
index 54e83b5..c93727e 100644
--- a/media/base/media_switches.cc
+++ b/media/base/media_switches.cc
@@ -19,6 +19,10 @@ const char kEnableAcceleratedDecoding[] = "enable-accelerated-decoding";
// Enable x-adaptive URL scheme.
const char kEnableAdaptive[] = "enable-adaptive";
+// Enable hardware decoding using OpenMax API.
+// In practice this is for ChromeOS ARM.
+const char kEnableOpenMax[] = "enable-openmax";
+
// Set number of threads to use for video decoding.
const char kVideoThreads[] = "video-threads";
diff --git a/media/base/media_switches.h b/media/base/media_switches.h
index b84fcda..904f9bc 100644
--- a/media/base/media_switches.h
+++ b/media/base/media_switches.h
@@ -18,8 +18,10 @@ extern const char kAlsaInputDevice[];
extern const char kEnableAcceleratedDecoding[];
extern const char kEnableAdaptive[];
+extern const char kEnableOpenMax[];
extern const char kVideoThreads[];
+
} // namespace switches
#endif // MEDIA_BASE_MEDIA_SWITCHES_H_
diff --git a/media/filters/ffmpeg_video_decoder.cc b/media/filters/ffmpeg_video_decoder.cc
index 449b6aa..53d28a2 100644
--- a/media/filters/ffmpeg_video_decoder.cc
+++ b/media/filters/ffmpeg_video_decoder.cc
@@ -88,6 +88,7 @@ void FFmpegVideoDecoder::Initialize(DemuxerStream* demuxer_stream,
}
void FFmpegVideoDecoder::OnInitializeComplete(const VideoCodecInfo& info) {
+ // TODO(scherkus): Dedup this from OmxVideoDecoder::OnInitializeComplete.
DCHECK_EQ(MessageLoop::current(), message_loop_);
DCHECK(initialize_callback_.get());
diff --git a/media/filters/omx_video_decoder.cc b/media/filters/omx_video_decoder.cc
new file mode 100644
index 0000000..b58e1c6
--- /dev/null
+++ b/media/filters/omx_video_decoder.cc
@@ -0,0 +1,255 @@
+// Copyright (c) 2011 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/filters/omx_video_decoder.h"
+
+#include "base/bind.h"
+#include "base/callback.h"
+#include "base/message_loop.h"
+#include "media/base/callback.h"
+#include "media/base/filter_host.h"
+#include "media/base/limits.h"
+#include "media/ffmpeg/ffmpeg_common.h"
+#include "media/video/omx_video_decode_engine.h"
+
+namespace media {
+
+OmxVideoDecoder::OmxVideoDecoder(
+ MessageLoop* message_loop,
+ VideoDecodeContext* context)
+ : message_loop_(message_loop),
+ decode_engine_(new OmxVideoDecodeEngine()),
+ decode_context_(context) {
+ DCHECK(decode_engine_.get());
+ memset(&info_, 0, sizeof(info_));
+}
+
+OmxVideoDecoder::~OmxVideoDecoder() {
+ // TODO(hclam): Make sure OmxVideoDecodeEngine is stopped.
+}
+
+void OmxVideoDecoder::Initialize(DemuxerStream* demuxer_stream,
+ FilterCallback* callback,
+ StatisticsCallback* stats_callback) {
+ if (MessageLoop::current() != message_loop_) {
+ message_loop_->PostTask(
+ FROM_HERE,
+ NewRunnableMethod(this,
+ &OmxVideoDecoder::Initialize,
+ make_scoped_refptr(demuxer_stream),
+ callback, stats_callback));
+ return;
+ }
+
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+ DCHECK(!demuxer_stream_);
+ DCHECK(!initialize_callback_.get());
+
+ initialize_callback_.reset(callback);
+ statistics_callback_.reset(stats_callback);
+ demuxer_stream_ = demuxer_stream;
+
+ // We require bit stream converter for openmax hardware decoder.
+ demuxer_stream->EnableBitstreamConverter();
+
+ AVStream* av_stream = demuxer_stream->GetAVStream();
+ if (!av_stream) {
+ VideoCodecInfo info = {0};
+ OnInitializeComplete(info);
+ return;
+ }
+
+ pts_stream_.Initialize(GetFrameDuration(av_stream));
+
+ int width = av_stream->codec->coded_width;
+ int height = av_stream->codec->coded_height;
+
+ int surface_width = GetSurfaceWidth(av_stream);
+ int surface_height = GetSurfaceHeight(av_stream);
+
+ if (surface_width > Limits::kMaxDimension ||
+ surface_height > Limits::kMaxDimension ||
+ (surface_width * surface_height) > Limits::kMaxCanvas) {
+ VideoCodecInfo info = {0};
+ OnInitializeComplete(info);
+ return;
+ }
+
+ VideoDecoderConfig config(CodecIDToVideoCodec(av_stream->codec->codec_id),
+ width, height,
+ surface_width, surface_height,
+ av_stream->r_frame_rate.num,
+ av_stream->r_frame_rate.den,
+ av_stream->codec->extradata,
+ av_stream->codec->extradata_size);
+ decode_engine_->Initialize(message_loop_, this, NULL, config);
+}
+
+void OmxVideoDecoder::OnInitializeComplete(const VideoCodecInfo& info) {
+ // TODO(scherkus): Dedup this from FFmpegVideoDecoder::OnInitializeComplete.
+ DCHECK_EQ(MessageLoop::current(), message_loop_);
+ DCHECK(initialize_callback_.get());
+
+ info_ = info;
+ AutoCallbackRunner done_runner(initialize_callback_.release());
+
+ if (info.success) {
+ media_format_.SetAsInteger(MediaFormat::kWidth,
+ info.stream_info.surface_width);
+ media_format_.SetAsInteger(MediaFormat::kHeight,
+ info.stream_info.surface_height);
+ media_format_.SetAsInteger(
+ MediaFormat::kSurfaceType,
+ static_cast<int>(info.stream_info.surface_type));
+ media_format_.SetAsInteger(
+ MediaFormat::kSurfaceFormat,
+ static_cast<int>(info.stream_info.surface_format));
+ } else {
+ host()->SetError(PIPELINE_ERROR_DECODE);
+ }
+}
+
+void OmxVideoDecoder::Stop(FilterCallback* callback) {
+ if (MessageLoop::current() != message_loop_) {
+ message_loop_->PostTask(FROM_HERE,
+ NewRunnableMethod(this,
+ &OmxVideoDecoder::Stop,
+ callback));
+ return;
+ }
+
+ DCHECK_EQ(MessageLoop::current(), message_loop_);
+ DCHECK(!uninitialize_callback_.get());
+
+ uninitialize_callback_.reset(callback);
+ decode_engine_->Uninitialize();
+}
+
+void OmxVideoDecoder::OnUninitializeComplete() {
+ DCHECK_EQ(MessageLoop::current(), message_loop_);
+ DCHECK(uninitialize_callback_.get());
+
+ AutoCallbackRunner done_runner(uninitialize_callback_.release());
+
+ // TODO(jiesun): Destroy the decoder context.
+}
+
+void OmxVideoDecoder::Flush(FilterCallback* callback) {
+ if (MessageLoop::current() != message_loop_) {
+ message_loop_->PostTask(FROM_HERE,
+ NewRunnableMethod(this,
+ &OmxVideoDecoder::Flush,
+ callback));
+ return;
+ }
+
+ DCHECK_EQ(MessageLoop::current(), message_loop_);
+ DCHECK(!flush_callback_.get());
+
+ flush_callback_.reset(callback);
+
+ decode_engine_->Flush();
+}
+
+
+void OmxVideoDecoder::OnFlushComplete() {
+ DCHECK(flush_callback_.get());
+
+ AutoCallbackRunner done_runner(flush_callback_.release());
+
+ pts_stream_.Flush();
+}
+
+void OmxVideoDecoder::Seek(base::TimeDelta time, const FilterStatusCB& cb) {
+ if (MessageLoop::current() != message_loop_) {
+ message_loop_->PostTask(FROM_HERE,
+ NewRunnableMethod(this,
+ &OmxVideoDecoder::Seek,
+ time,
+ cb));
+ return;
+ }
+
+ DCHECK_EQ(MessageLoop::current(), message_loop_);
+ DCHECK(seek_cb_.is_null());
+
+ pts_stream_.Seek(time);
+ seek_cb_ = cb;
+ decode_engine_->Seek();
+}
+
+void OmxVideoDecoder::OnSeekComplete() {
+ DCHECK_EQ(MessageLoop::current(), message_loop_);
+ DCHECK(!seek_cb_.is_null());
+
+ ResetAndRunCB(&seek_cb_, PIPELINE_OK);
+}
+
+void OmxVideoDecoder::OnError() {
+ NOTIMPLEMENTED();
+}
+void OmxVideoDecoder::OnFormatChange(VideoStreamInfo stream_info) {
+ NOTIMPLEMENTED();
+}
+
+void OmxVideoDecoder::ProduceVideoSample(scoped_refptr<Buffer> buffer) {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+
+ // Issue more demux.
+ demuxer_stream_->Read(base::Bind(&OmxVideoDecoder::DemuxCompleteTask, this));
+}
+
+void OmxVideoDecoder::ConsumeVideoFrame(scoped_refptr<VideoFrame> frame,
+ const PipelineStatistics& statistics) {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+ statistics_callback_->Run(statistics);
+
+ if (frame.get()) {
+ pts_stream_.UpdatePtsAndDuration(frame.get());
+
+ frame->SetTimestamp(pts_stream_.current_pts());
+ frame->SetDuration(pts_stream_.current_duration());
+ }
+
+ VideoFrameReady(frame);
+}
+
+void OmxVideoDecoder::ProduceVideoFrame(scoped_refptr<VideoFrame> frame) {
+ DCHECK(decode_engine_.get());
+ message_loop_->PostTask(
+ FROM_HERE,
+ NewRunnableMethod(decode_engine_.get(),
+ &VideoDecodeEngine::ProduceVideoFrame, frame));
+}
+
+bool OmxVideoDecoder::ProvidesBuffer() {
+ DCHECK(info_.success);
+ return info_.provides_buffers;
+}
+
+const MediaFormat& OmxVideoDecoder::media_format() {
+ return media_format_;
+}
+
+void OmxVideoDecoder::DemuxCompleteTask(Buffer* buffer) {
+ // We simply delicate the buffer to the right message loop.
+ scoped_refptr<Buffer> ref_buffer = buffer;
+ DCHECK(decode_engine_.get());
+ message_loop_->PostTask(
+ FROM_HERE,
+ NewRunnableMethod(this,
+ &OmxVideoDecoder::ConsumeVideoSample, ref_buffer));
+}
+
+void OmxVideoDecoder::ConsumeVideoSample(scoped_refptr<Buffer> buffer) {
+ if (buffer.get())
+ pts_stream_.EnqueuePts(buffer.get());
+ decode_engine_->ConsumeVideoSample(buffer);
+}
+
+} // namespace media
+
+// Disable refcounting for the decode engine because it only lives on the
+// video decoder thread.
+DISABLE_RUNNABLE_METHOD_REFCOUNT(media::VideoDecodeEngine);
diff --git a/media/filters/omx_video_decoder.h b/media/filters/omx_video_decoder.h
new file mode 100644
index 0000000..40bf721
--- /dev/null
+++ b/media/filters/omx_video_decoder.h
@@ -0,0 +1,82 @@
+// Copyright (c) 2011 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_FILTERS_OMX_VIDEO_DECODER_H_
+#define MEDIA_FILTERS_OMX_VIDEO_DECODER_H_
+
+#include <queue>
+
+#include "media/base/filters.h"
+#include "media/base/media_format.h"
+#include "media/base/pts_stream.h"
+#include "media/video/video_decode_context.h"
+#include "media/video/video_decode_engine.h"
+
+class MessageLoop;
+
+namespace media {
+
+class Buffer;
+class OmxVideoDecodeEngine;
+class VideoFrame;
+
+class OmxVideoDecoder : public VideoDecoder,
+ public VideoDecodeEngine::EventHandler {
+ public:
+ OmxVideoDecoder(MessageLoop* message_loop,
+ VideoDecodeContext* decode_context);
+ virtual ~OmxVideoDecoder();
+
+ // Filter implementations.
+ virtual void Initialize(DemuxerStream* stream,
+ FilterCallback* callback,
+ StatisticsCallback* stats_callback);
+ virtual void Stop(FilterCallback* callback);
+ virtual void Flush(FilterCallback* callback);
+ virtual void Seek(base::TimeDelta time, const FilterStatusCB& cb);
+ virtual void ProduceVideoFrame(scoped_refptr<VideoFrame> frame);
+ virtual bool ProvidesBuffer();
+ virtual const MediaFormat& media_format();
+
+ private:
+ // VideoDecodeEngine::EventHandler interface.
+ virtual void OnInitializeComplete(const VideoCodecInfo& info);
+ virtual void OnUninitializeComplete();
+ virtual void OnFlushComplete();
+ virtual void OnSeekComplete();
+ virtual void OnError();
+ virtual void OnFormatChange(VideoStreamInfo stream_info);
+ virtual void ProduceVideoSample(scoped_refptr<Buffer> buffer);
+ virtual void ConsumeVideoFrame(scoped_refptr<VideoFrame> frame,
+ const PipelineStatistics& statistics);
+
+ // TODO(hclam): This is very ugly that we keep reference instead of
+ // scoped_refptr.
+ void DemuxCompleteTask(Buffer* buffer);
+ void ConsumeVideoSample(scoped_refptr<Buffer> buffer);
+
+ MessageLoop* message_loop_;
+
+ // Pointer to the demuxer stream that will feed us compressed buffers.
+ scoped_refptr<DemuxerStream> demuxer_stream_;
+ scoped_ptr<VideoDecodeEngine> decode_engine_;
+ scoped_ptr<VideoDecodeContext> decode_context_;
+ MediaFormat media_format_;
+
+ scoped_ptr<FilterCallback> initialize_callback_;
+ scoped_ptr<FilterCallback> uninitialize_callback_;
+ scoped_ptr<FilterCallback> flush_callback_;
+ FilterStatusCB seek_cb_;
+ scoped_ptr<StatisticsCallback> statistics_callback_;
+
+ VideoCodecInfo info_;
+
+ PtsStream pts_stream_; // Stream of presentation timestamps.
+
+ DISALLOW_COPY_AND_ASSIGN(OmxVideoDecoder);
+};
+
+} // namespace media
+
+#endif // MEDIA_FILTERS_OMX_VIDEO_DECODER_H_
diff --git a/media/media.gyp b/media/media.gyp
index 846297d..b7560c1 100644
--- a/media/media.gyp
+++ b/media/media.gyp
@@ -15,9 +15,8 @@
'yuv_convert',
'../base/base.gyp:base',
'../base/third_party/dynamic_annotations/dynamic_annotations.gyp:dynamic_annotations',
- '../build/temp_gyp/googleurl.gyp:googleurl',
'../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
- '../third_party/openmax/openmax.gyp:il',
+ '../build/temp_gyp/googleurl.gyp:googleurl',
],
'include_dirs': [
'..',
@@ -225,6 +224,15 @@
],
}],
['os_posix == 1 and OS != "mac"', {
+ 'sources': [
+ 'filters/omx_video_decoder.cc',
+ 'filters/omx_video_decoder.h',
+ ],
+ 'dependencies': [
+ 'omx_wrapper',
+ ]
+ }],
+ ['os_posix == 1 and OS != "mac"', {
'sources!': [
'video/capture/video_capture_device_dummy.cc',
'video/capture/video_capture_device_dummy.h',
@@ -368,6 +376,10 @@
'../testing/gmock.gyp:gmock',
'../testing/gtest.gyp:gtest',
'../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
+ '../third_party/openmax/openmax.gyp:il',
+ ],
+ 'sources!': [
+ '../third_party/openmax/omx_stub.cc',
],
'sources': [
'audio/audio_input_controller_unittest.cc',
@@ -413,8 +425,31 @@
'filters/file_data_source_unittest.cc',
'filters/rtc_video_decoder_unittest.cc',
'filters/video_renderer_base_unittest.cc',
+ 'omx/mock_omx.cc',
+ 'omx/mock_omx.h',
'video/ffmpeg_video_decode_engine_unittest.cc',
],
+ 'conditions': [
+ ['toolkit_uses_gtk == 1', {
+ 'dependencies': [
+ # Needed for the following #include chain:
+ # base/run_all_unittests.cc
+ # ../base/test_suite.h
+ # gtk/gtk.h
+ '../build/linux/system.gyp:gtk',
+ ],
+ 'sources': [
+ 'omx/omx_codec_unittest.cc',
+ ],
+ 'conditions': [
+ ['linux_use_tcmalloc==1', {
+ 'dependencies': [
+ '../base/allocator/allocator.gyp:allocator',
+ ],
+ }],
+ ],
+ }],
+ ],
},
{
'target_name': 'media_test_support',
@@ -641,6 +676,67 @@
['os_posix == 1 and OS != "mac"', {
'targets': [
{
+ 'target_name': 'omx_test',
+ 'type': 'executable',
+ 'dependencies': [
+ 'media',
+ '../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
+ '../third_party/openmax/openmax.gyp:il',
+ ],
+ 'sources': [
+ 'tools/omx_test/color_space_util.cc',
+ 'tools/omx_test/color_space_util.h',
+ 'tools/omx_test/file_reader_util.cc',
+ 'tools/omx_test/file_reader_util.h',
+ 'tools/omx_test/file_sink.cc',
+ 'tools/omx_test/file_sink.h',
+ 'tools/omx_test/omx_test.cc',
+ ],
+ },
+ {
+ 'target_name': 'omx_unittests',
+ 'type': 'executable',
+ 'dependencies': [
+ 'media',
+ 'omx_wrapper',
+ '../base/base.gyp:base',
+ '../base/base.gyp:base_i18n',
+ '../base/base.gyp:test_support_base',
+ '../testing/gtest.gyp:gtest',
+ ],
+ 'conditions': [
+ ['toolkit_uses_gtk == 1', {
+ 'dependencies': [
+ '../build/linux/system.gyp:gtk',
+ ],
+ }],
+ ],
+ 'sources': [
+ 'omx/omx_unittest.cc',
+ 'omx/run_all_unittests.cc',
+ ],
+ },
+ {
+ 'target_name': 'omx_wrapper',
+ 'type': 'static_library',
+ 'dependencies': [
+ '../base/base.gyp:base',
+ '../third_party/openmax/openmax.gyp:il',
+ # TODO(wjia): remove ffmpeg
+ '../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
+ ],
+ 'sources': [
+ 'omx/omx_configurator.cc',
+ 'omx/omx_configurator.h',
+ 'video/omx_video_decode_engine.cc',
+ 'video/omx_video_decode_engine.cc',
+ ],
+ 'hard_dependency': 1,
+ 'export_dependent_settings': [
+ '../third_party/openmax/openmax.gyp:il',
+ ],
+ },
+ {
'target_name': 'player_x11',
'type': 'executable',
'dependencies': [
diff --git a/media/tools/player_x11/player_x11.cc b/media/tools/player_x11/player_x11.cc
index 590306b..42b30438 100644
--- a/media/tools/player_x11/player_x11.cc
+++ b/media/tools/player_x11/player_x11.cc
@@ -26,6 +26,7 @@
#include "media/filters/ffmpeg_video_decoder.h"
#include "media/filters/file_data_source_factory.h"
#include "media/filters/null_audio_renderer.h"
+#include "media/filters/omx_video_decoder.h"
// TODO(jiesun): implement different video decode contexts according to
// these flags. e.g.
@@ -94,6 +95,14 @@ bool InitPipeline(MessageLoop* message_loop,
scoped_refptr<media::PipelineImpl>* pipeline,
MessageLoop* paint_message_loop,
media::MessageLoopFactory* message_loop_factory) {
+ // Initialize OpenMAX.
+ if (CommandLine::ForCurrentProcess()->HasSwitch(
+ switches::kEnableOpenMax) &&
+ !media::InitializeOpenMaxLibrary(FilePath())) {
+ std::cout << "Unable to initialize OpenMAX library."<< std::endl;
+ return false;
+ }
+
// Load media libraries.
if (!media::InitializeMediaLibrary(FilePath())) {
std::cout << "Unable to initialize the media library." << std::endl;
@@ -109,9 +118,16 @@ bool InitPipeline(MessageLoop* message_loop,
new media::FileDataSourceFactory(), message_loop)));
collection->AddAudioDecoder(new media::FFmpegAudioDecoder(
message_loop_factory->GetMessageLoop("AudioDecoderThread")));
- collection->AddVideoDecoder(new media::FFmpegVideoDecoder(
- message_loop_factory->GetMessageLoop("VideoDecoderThread"),
- NULL));
+ if (CommandLine::ForCurrentProcess()->HasSwitch(
+ switches::kEnableOpenMax)) {
+ collection->AddVideoDecoder(new media::OmxVideoDecoder(
+ message_loop_factory->GetMessageLoop("VideoDecoderThread"),
+ NULL));
+ } else {
+ collection->AddVideoDecoder(new media::FFmpegVideoDecoder(
+ message_loop_factory->GetMessageLoop("VideoDecoderThread"),
+ NULL));
+ }
collection->AddVideoRenderer(new Renderer(g_display,
g_window,
paint_message_loop));
@@ -218,6 +234,7 @@ int main(int argc, char** argv) {
std::cout << "Usage: " << argv[0] << " --file=FILE" << std::endl
<< std::endl
<< "Optional arguments:" << std::endl
+ << " [--enable-openmax]"
<< " [--audio]"
<< " [--alsa-device=DEVICE]" << std::endl
<< " Press [ESC] to stop" << std::endl
diff --git a/media/video/omx_video_decode_engine.cc b/media/video/omx_video_decode_engine.cc
new file mode 100644
index 0000000..d799ecd
--- /dev/null
+++ b/media/video/omx_video_decode_engine.cc
@@ -0,0 +1,1357 @@
+// Copyright (c) 2011 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This class interacts with OmxCodec and the VideoDecoderImpl
+// in the media pipeline.
+//
+// THREADING SEMANTICS
+//
+// This class is created by OmxVideoDecoder and lives on the thread
+// that it lives. This class is given the message loop
+// for the above thread. The OMX callbacks are guaranteed to be
+// executed on the hosting message loop. Because of that there's no need
+// for locking anywhere.
+
+#include "media/video/omx_video_decode_engine.h"
+
+#include "base/logging.h"
+#include "base/message_loop.h"
+#include "base/string_util.h"
+#include "media/base/buffers.h"
+#include "media/base/pipeline.h"
+
+namespace media {
+
+OmxVideoDecodeEngine::OmxVideoDecodeEngine()
+ : width_(16),
+ height_(16),
+ message_loop_(NULL),
+ input_buffer_count_(0),
+ input_buffer_size_(0),
+ input_port_(0),
+ input_buffers_at_component_(0),
+ input_pending_request_(0),
+ input_queue_has_eos_(false),
+ input_has_fed_eos_(false),
+ input_port_flushed_(false),
+ output_buffer_count_(0),
+ output_buffer_size_(0),
+ output_port_(0),
+ output_buffers_at_component_(0),
+ output_pending_request_(0),
+ output_eos_(false),
+ output_port_flushed_(false),
+ il_state_(kIlNone),
+ expected_il_state_(kIlNone),
+ client_state_(kClientNotInitialized),
+ component_handle_(NULL),
+ need_free_input_buffers_(false),
+ need_free_output_buffers_(false),
+ flush_pending_(false),
+ output_frames_allocated_(false),
+ need_setup_output_port_(false) {
+ // TODO(wjia): change uses_egl_image_ to runtime setup
+#if ENABLE_EGLIMAGE == 1
+ uses_egl_image_ = true;
+ DLOG(INFO) << "Uses egl image for output";
+#else
+ uses_egl_image_ = false;
+ DLOG(INFO) << "Uses system memory for output";
+#endif
+}
+
+OmxVideoDecodeEngine::~OmxVideoDecodeEngine() {
+ DCHECK(client_state_ == kClientNotInitialized ||
+ client_state_ == kClientStopped);
+ DCHECK_EQ(il_state_, kIlNone);
+ DCHECK_EQ(0u, input_buffers_.size());
+ DCHECK(free_input_buffers_.empty());
+ DCHECK(available_input_buffers_.empty());
+ DCHECK_EQ(0, input_buffers_at_component_);
+ DCHECK_EQ(0, output_buffers_at_component_);
+ DCHECK(output_frames_.empty());
+}
+
+template <typename T>
+static void ResetParamHeader(const OmxVideoDecodeEngine& dec, T* param) {
+ memset(param, 0, sizeof(T));
+ param->nVersion.nVersion = dec.current_omx_spec_version();
+ param->nSize = sizeof(T);
+}
+
+void OmxVideoDecodeEngine::Initialize(
+ MessageLoop* message_loop,
+ VideoDecodeEngine::EventHandler* event_handler,
+ VideoDecodeContext* context,
+ const VideoDecoderConfig& config) {
+ DCHECK_EQ(message_loop, MessageLoop::current());
+
+ message_loop_ = message_loop;
+ event_handler_ = event_handler;
+
+ width_ = config.width();
+ height_ = config.height();
+
+ // TODO(wjia): Find the right way to determine the codec type.
+ OmxConfigurator::MediaFormat input_format, output_format;
+ memset(&input_format, 0, sizeof(input_format));
+ memset(&output_format, 0, sizeof(output_format));
+ input_format.codec = OmxConfigurator::kCodecH264;
+ output_format.codec = OmxConfigurator::kCodecRaw;
+ configurator_.reset(
+ new OmxDecoderConfigurator(input_format, output_format));
+
+ // TODO(jiesun): We already ensure Initialize() is called in thread context,
+ // We should try to merge the following function into this function.
+ client_state_ = kClientInitializing;
+ InitializeTask();
+
+ VideoCodecInfo info;
+ // TODO(jiesun): ridiculous, we never fail initialization?
+ info.success = true;
+ info.provides_buffers = !uses_egl_image_;
+ info.stream_info.surface_type =
+ uses_egl_image_ ? VideoFrame::TYPE_GL_TEXTURE
+ : VideoFrame::TYPE_SYSTEM_MEMORY;
+ info.stream_info.surface_format = GetSurfaceFormat();
+ info.stream_info.surface_width = config.width();
+ info.stream_info.surface_height = config.height();
+ event_handler_->OnInitializeComplete(info);
+}
+
+// This method handles only input buffer, without coupling with output
+void OmxVideoDecodeEngine::ConsumeVideoSample(scoped_refptr<Buffer> buffer) {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+ DCHECK(!free_input_buffers_.empty());
+ DCHECK_GT(input_pending_request_, 0);
+
+ --input_pending_request_;
+
+ if (!CanAcceptInput()) {
+ FinishEmptyBuffer(buffer);
+ return;
+ }
+
+ if (buffer->IsEndOfStream()) {
+ DLOG(INFO) << "Input queue has EOS";
+ input_queue_has_eos_ = true;
+ }
+
+ OMX_BUFFERHEADERTYPE* omx_buffer = free_input_buffers_.front();
+ free_input_buffers_.pop();
+
+ // setup |omx_buffer|.
+ omx_buffer->pBuffer = const_cast<OMX_U8*>(buffer->GetData());
+ omx_buffer->nFilledLen = buffer->GetDataSize();
+ omx_buffer->nAllocLen = omx_buffer->nFilledLen;
+ if (input_queue_has_eos_)
+ omx_buffer->nFlags |= OMX_BUFFERFLAG_EOS;
+ else
+ omx_buffer->nFlags &= ~OMX_BUFFERFLAG_EOS;
+ omx_buffer->nTimeStamp = buffer->GetTimestamp().InMicroseconds();
+ omx_buffer->pAppPrivate = buffer.get();
+ buffer->AddRef();
+ available_input_buffers_.push(omx_buffer);
+
+ // Try to feed buffers into the decoder.
+ EmptyBufferTask();
+
+ if (flush_pending_ && input_pending_request_ == 0)
+ StartFlush();
+}
+
+void OmxVideoDecodeEngine::Flush() {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+ DCHECK_EQ(il_state_, kIlExecuting);
+
+ if (il_state_ != kIlExecuting) {
+ event_handler_->OnFlushComplete();
+ return;
+ }
+
+ client_state_ = kClientFlushing;
+ expected_il_state_ = kIlPause;
+ OnStateSetEventFunc = &OmxVideoDecodeEngine::PauseFromExecuting;
+ TransitionToState(OMX_StatePause);
+}
+
+void OmxVideoDecodeEngine::PauseFromExecuting(OMX_STATETYPE state) {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+
+ OnStateSetEventFunc = NULL;
+ il_state_ = kIlPause;
+
+ if (input_pending_request_ == 0)
+ StartFlush();
+ else
+ flush_pending_ = true;
+}
+
+void OmxVideoDecodeEngine::StartFlush() {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+ DCHECK_EQ(input_pending_request_, 0);
+ DLOG(INFO) << "StartFlush";
+
+ while (!available_input_buffers_.empty())
+ available_input_buffers_.pop();
+
+ flush_pending_ = false;
+
+ // Flush input port first.
+ OnFlushEventFunc = &OmxVideoDecodeEngine::PortFlushDone;
+ OMX_ERRORTYPE omxresult;
+ omxresult = OMX_SendCommand(component_handle_,
+ OMX_CommandFlush,
+ input_port_, 0);
+}
+
+bool OmxVideoDecodeEngine::InputPortFlushed() {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+ DCHECK_EQ(client_state_, kClientFlushing);
+ // Port flushed is defined by OpenMAX component had signal flush done and
+ // We had all buffers returned from demuxer and OpenMAX component.
+ int free_input_size = static_cast<int>(free_input_buffers_.size());
+ return input_port_flushed_ && free_input_size == input_buffer_count_;
+}
+
+bool OmxVideoDecodeEngine::OutputPortFlushed() {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+ DCHECK_EQ(client_state_, kClientFlushing);
+ // Port flushed is defined by OpenMAX component had signal flush done and
+ // We had all buffers returned from renderer and OpenMAX component.
+ return output_port_flushed_ && output_pending_request_ == 0;
+}
+
+void OmxVideoDecodeEngine::ComponentFlushDone() {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+ DLOG(INFO) << "Component had been flushed!";
+
+ if (input_port_flushed_ && output_port_flushed_) {
+ event_handler_->OnFlushComplete();
+ input_port_flushed_ = false;
+ output_port_flushed_ = false;
+ }
+}
+
+void OmxVideoDecodeEngine::PortFlushDone(int port) {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+ DCHECK_NE(port, static_cast<int>(OMX_ALL));
+
+ if (port == input_port_) {
+ DLOG(INFO) << "Input Port had been flushed";
+ DCHECK_EQ(input_buffers_at_component_, 0);
+ input_port_flushed_ = true;
+ // Flush output port next.
+ OMX_ERRORTYPE omxresult;
+ omxresult = OMX_SendCommand(component_handle_,
+ OMX_CommandFlush,
+ output_port_, 0);
+ return;
+ }
+
+ if (port == output_port_) {
+ DLOG(INFO) << "Output Port had been flushed";
+ DCHECK_EQ(output_buffers_at_component_, 0);
+
+ output_port_flushed_ = true;
+ }
+
+ if (kClientFlushing == client_state_ &&
+ InputPortFlushed() && OutputPortFlushed())
+ ComponentFlushDone();
+}
+
+void OmxVideoDecodeEngine::Seek() {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+
+ DCHECK(client_state_ == kClientFlushing || // After a flush
+ client_state_ == kClientInitializing); // After an initialize.
+
+ if (client_state_ == kClientFlushing) {
+ InitialReadBuffer();
+ OnStateSetEventFunc = &OmxVideoDecodeEngine::DoneSetStateExecuting;
+ TransitionToState(OMX_StateExecuting);
+ }
+
+ event_handler_->OnSeekComplete();
+}
+
+int OmxVideoDecodeEngine::current_omx_spec_version() const {
+ return 0x00000101;
+}
+
+VideoFrame::Format OmxVideoDecodeEngine::GetSurfaceFormat() const {
+ // TODO(jiesun): Both OmxHeaderType and EGLImage surface type could have
+ // different surface formats.
+ return uses_egl_image_ ? VideoFrame::RGBA : VideoFrame::YV12;
+}
+
+void OmxVideoDecodeEngine::Uninitialize() {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+
+ if (client_state_ == kClientError) {
+ OnStopDone();
+ return;
+ }
+
+ // TODO(wjia): add more state checking
+ if (kClientRunning == client_state_ || kClientFlushing == client_state_) {
+ client_state_ = kClientStopping;
+ DeinitFromExecuting(OMX_StateExecuting);
+ }
+
+ // TODO(wjia): When FillThisBuffer() is added, engine state should be
+ // kStopping here. engine state should be set to kStopped in OnStopDone();
+ // client_state_ = kClientStopping;
+}
+
+void OmxVideoDecodeEngine::FinishEmptyBuffer(scoped_refptr<Buffer> buffer) {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+
+ if (!input_queue_has_eos_) {
+ event_handler_->ProduceVideoSample(buffer);
+ ++input_pending_request_;
+ }
+}
+
+void OmxVideoDecodeEngine::FinishFillBuffer(OMX_BUFFERHEADERTYPE* buffer) {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+ DCHECK(buffer);
+
+ scoped_refptr<VideoFrame> frame;
+ frame = static_cast<VideoFrame*>(buffer->pAppPrivate);
+
+ // We should not flush buffer to renderer during decoder flushing if decoder
+ // provides the buffer allocator.
+ if (kClientFlushing == client_state_ && !uses_egl_image_) return;
+
+ PipelineStatistics statistics;
+ statistics.video_bytes_decoded = buffer->nFilledLen;
+
+ frame->SetTimestamp(base::TimeDelta::FromMicroseconds(buffer->nTimeStamp));
+ frame->SetDuration(frame->GetTimestamp() - last_pts_);
+ last_pts_ = frame->GetTimestamp();
+ event_handler_->ConsumeVideoFrame(frame, statistics);
+ output_pending_request_--;
+}
+
+void OmxVideoDecodeEngine::OnStopDone() {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+
+ event_handler_->OnUninitializeComplete();
+}
+
+// Function sequence for initializing
+void OmxVideoDecodeEngine::InitializeTask() {
+ DCHECK_EQ(il_state_, kIlNone);
+
+ il_state_ = kIlNone;
+ expected_il_state_ = kIlLoaded;
+ output_port_state_ = kPortEnabled;
+ if (!CreateComponent()) {
+ StopOnError();
+ return;
+ }
+ il_state_ = kIlLoaded;
+
+ // TODO(wjia): Disabling output port is to work around racing condition
+ // due to bug in some vendor's driver. But it hits another bug.
+ // So temporarily fall back to enabling output port. Still keep the code
+ // disabling output port here.
+ // No need to respond to this PortDisable event
+ // OnPortDisableEventFunc = NULL;
+ // ChangePort(OMX_CommandPortDisable, output_port_);
+ // if (kClientError == client_state_) {
+ // StopOnError();
+ // return;
+ // }
+ // output_port_state_ = kPortDisabled;
+
+ // Transition component to Idle state
+ OnStateSetEventFunc = &OmxVideoDecodeEngine::DoneSetStateIdle;
+ if (!TransitionToState(OMX_StateIdle)) {
+ StopOnError();
+ return;
+ }
+ expected_il_state_ = kIlIdle;
+
+ if (!AllocateInputBuffers()) {
+ LOG(ERROR) << "OMX_AllocateBuffer() Input buffer error";
+ client_state_ = kClientError;
+ StopOnError();
+ return;
+ }
+ if (!AllocateOutputBuffers()) {
+ LOG(ERROR) << "OMX_AllocateBuffer() Output buffer error";
+ client_state_ = kClientError;
+ return;
+ }
+}
+
+// Sequence of actions in this transition:
+//
+// 1. Initialize OMX (To be removed.)
+// 2. Map role name to component name.
+// 3. Get handle of the OMX component
+// 4. Get the port information.
+// 5. Set role for the component.
+// 6. Input/output ports media format configuration.
+// 7. Obtain the information about the input port.
+// 8. Obtain the information about the output port.
+bool OmxVideoDecodeEngine::CreateComponent() {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+
+ static OMX_CALLBACKTYPE callback = {
+ &OmxVideoDecodeEngine::EventHandler,
+ &OmxVideoDecodeEngine::EmptyBufferCallback,
+ &OmxVideoDecodeEngine::FillBufferCallback
+ };
+
+ // 1. Initialize the OpenMAX Core.
+ // TODO(hclam): move this out.
+ OMX_ERRORTYPE omxresult = OMX_Init();
+ if (omxresult != OMX_ErrorNone) {
+ LOG(ERROR) << "Failed to init OpenMAX core";
+ client_state_ = kClientError;
+ return false;
+ }
+
+ // 2. Map role name to component name.
+ std::string role_name = configurator_->GetRoleName();
+ OMX_U32 roles = 0;
+ omxresult = OMX_GetComponentsOfRole(
+ const_cast<OMX_STRING>(role_name.c_str()),
+ &roles, 0);
+ if (omxresult != OMX_ErrorNone || roles == 0) {
+ LOG(ERROR) << "Unsupported Role: " << role_name.c_str();
+ client_state_ = kClientError;
+ return false;
+ }
+ const OMX_U32 kMaxRolePerComponent = 20;
+ CHECK(roles < kMaxRolePerComponent);
+
+ OMX_U8** component_names = new OMX_U8*[roles];
+ const int kMaxComponentNameLength = 256;
+ for (size_t i = 0; i < roles; ++i)
+ component_names[i] = new OMX_U8[kMaxComponentNameLength];
+
+ omxresult = OMX_GetComponentsOfRole(
+ const_cast<OMX_STRING>(role_name.c_str()),
+ &roles, component_names);
+
+ // Use first component only. Copy the name of the first component
+ // so that we could free the memory.
+ std::string component_name;
+ if (omxresult == OMX_ErrorNone)
+ component_name = reinterpret_cast<char*>(component_names[0]);
+
+ for (size_t i = 0; i < roles; ++i)
+ delete [] component_names[i];
+ delete [] component_names;
+
+ if (omxresult != OMX_ErrorNone || roles == 0) {
+ LOG(ERROR) << "Unsupported Role: " << role_name.c_str();
+ client_state_ = kClientError;
+ return false;
+ }
+
+ // 3. Get the handle to the component. After OMX_GetHandle(),
+ // the component is in loaded state.
+ OMX_STRING component = const_cast<OMX_STRING>(component_name.c_str());
+ omxresult = OMX_GetHandle(&component_handle_, component, this, &callback);
+ if (omxresult != OMX_ErrorNone) {
+ LOG(ERROR) << "Failed to Load the component: " << component;
+ client_state_ = kClientError;
+ return false;
+ }
+
+ // 4. Get the port information. This will obtain information about the
+ // number of ports and index of the first port.
+ OMX_PORT_PARAM_TYPE port_param;
+ ResetParamHeader(*this, &port_param);
+ omxresult = OMX_GetParameter(component_handle_, OMX_IndexParamVideoInit,
+ &port_param);
+ if (omxresult != OMX_ErrorNone) {
+ LOG(ERROR) << "Failed to get Port Param";
+ client_state_ = kClientError;
+ return false;
+ }
+ input_port_ = port_param.nStartPortNumber;
+ output_port_ = input_port_ + 1;
+
+ // 5. Set role for the component because our component could
+ // have multiple roles.
+ OMX_PARAM_COMPONENTROLETYPE role_type;
+ ResetParamHeader(*this, &role_type);
+ base::strlcpy(reinterpret_cast<char*>(role_type.cRole),
+ role_name.c_str(),
+ OMX_MAX_STRINGNAME_SIZE);
+ role_type.cRole[OMX_MAX_STRINGNAME_SIZE - 1] = '\0';
+ omxresult = OMX_SetParameter(component_handle_,
+ OMX_IndexParamStandardComponentRole,
+ &role_type);
+ if (omxresult != OMX_ErrorNone) {
+ LOG(ERROR) << "Failed to Set Role";
+ client_state_ = kClientError;
+ return false;
+ }
+
+ // 6. Input/output ports media format configuration.
+ if (!ConfigureIOPorts()) {
+ LOG(ERROR) << "Media format configurations failed";
+ client_state_ = kClientError;
+ return false;
+ }
+
+ // 7. Obtain the information about the input port.
+ // This will have the new mini buffer count in |port_format.nBufferCountMin|.
+ // Save this value to input_buf_count.
+ OMX_PARAM_PORTDEFINITIONTYPE port_format;
+ ResetParamHeader(*this, &port_format);
+ port_format.nPortIndex = input_port_;
+ omxresult = OMX_GetParameter(component_handle_,
+ OMX_IndexParamPortDefinition,
+ &port_format);
+ if (omxresult != OMX_ErrorNone) {
+ LOG(ERROR) << "GetParameter(OMX_IndexParamPortDefinition) failed";
+ client_state_ = kClientError;
+ return false;
+ }
+ if (OMX_DirInput != port_format.eDir) {
+ LOG(ERROR) << "Expected input port";
+ client_state_ = kClientError;
+ return false;
+ }
+ input_buffer_count_ = port_format.nBufferCountActual;
+ input_buffer_size_ = port_format.nBufferSize;
+
+ // 8. Obtain the information about the output port.
+ ResetParamHeader(*this, &port_format);
+ port_format.nPortIndex = output_port_;
+ omxresult = OMX_GetParameter(component_handle_,
+ OMX_IndexParamPortDefinition,
+ &port_format);
+ if (omxresult != OMX_ErrorNone) {
+ LOG(ERROR) << "GetParameter(OMX_IndexParamPortDefinition) failed";
+ client_state_ = kClientError;
+ return false;
+ }
+ if (OMX_DirOutput != port_format.eDir) {
+ LOG(ERROR) << "Expect Output Port";
+ client_state_ = kClientError;
+ return false;
+ }
+
+ // TODO(wjia): use same buffer recycling for EGLImage and system memory.
+ // Override buffer count when EGLImage is used.
+ if (uses_egl_image_) {
+ // TODO(wjia): remove hard-coded value
+ port_format.nBufferCountActual = port_format.nBufferCountMin =
+ output_buffer_count_ = 4;
+
+ omxresult = OMX_SetParameter(component_handle_,
+ OMX_IndexParamPortDefinition,
+ &port_format);
+ if (omxresult != OMX_ErrorNone) {
+ LOG(ERROR) << "SetParameter(OMX_IndexParamPortDefinition) failed";
+ client_state_ = kClientError;
+ return false;
+ }
+ } else {
+ output_buffer_count_ = port_format.nBufferCountActual;
+ }
+ output_buffer_size_ = port_format.nBufferSize;
+
+ return true;
+}
+
+// Event callback during initialization to handle DoneStateSet to idle
+void OmxVideoDecodeEngine::DoneSetStateIdle(OMX_STATETYPE state) {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+ DCHECK_EQ(client_state_, kClientInitializing);
+ DCHECK_EQ(OMX_StateIdle, state);
+ DLOG(INFO) << "OMX video decode engine is in Idle";
+
+ il_state_ = kIlIdle;
+
+ // start reading bit stream
+ InitialReadBuffer();
+ OnStateSetEventFunc = &OmxVideoDecodeEngine::DoneSetStateExecuting;
+ if (!TransitionToState(OMX_StateExecuting)) {
+ StopOnError();
+ return;
+ }
+ expected_il_state_ = kIlExecuting;
+}
+
+// Event callback during initialization to handle DoneStateSet to executing
+void OmxVideoDecodeEngine::DoneSetStateExecuting(OMX_STATETYPE state) {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+ DCHECK(client_state_ == kClientInitializing ||
+ client_state_ == kClientFlushing);
+ DCHECK_EQ(OMX_StateExecuting, state);
+ DLOG(INFO) << "OMX video decode engine is in Executing";
+
+ il_state_ = kIlExecuting;
+ client_state_ = kClientRunning;
+ OnStateSetEventFunc = NULL;
+ EmptyBufferTask();
+ InitialFillBuffer();
+ if (kClientError == client_state_) {
+ StopOnError();
+ return;
+ }
+}
+
+// Function for receiving output buffers. Hookup for buffer recycling
+// and outside allocator.
+void OmxVideoDecodeEngine::ProduceVideoFrame(
+ scoped_refptr<VideoFrame> video_frame) {
+ DCHECK(video_frame.get() && !video_frame->IsEndOfStream());
+ output_pending_request_++;
+
+ PipelineStatistics statistics;
+
+ if (!CanAcceptOutput()) {
+ if (uses_egl_image_) { // return it to owner.
+ output_pending_request_--;
+ event_handler_->ConsumeVideoFrame(video_frame, statistics);
+ }
+ return;
+ }
+
+ OMX_BUFFERHEADERTYPE* omx_buffer = FindOmxBuffer(video_frame);
+ if (omx_buffer) {
+ statistics.video_bytes_decoded = omx_buffer->nFilledLen;
+
+ if (kClientRunning == client_state_) {
+ SendOutputBufferToComponent(omx_buffer);
+ } else if (kClientFlushing == client_state_) {
+ if (uses_egl_image_) { // return it to owner.
+ output_pending_request_--;
+ event_handler_->ConsumeVideoFrame(video_frame, statistics);
+ }
+ if (InputPortFlushed() && OutputPortFlushed())
+ ComponentFlushDone();
+ }
+ } else {
+ DCHECK(!output_frames_allocated_);
+ DCHECK(uses_egl_image_);
+ output_frames_.push_back(std::make_pair(video_frame,
+ static_cast<OMX_BUFFERHEADERTYPE*>(NULL)));
+ }
+
+ DCHECK(static_cast<int>(output_frames_.size()) <= output_buffer_count_);
+
+ if ((!output_frames_allocated_) &&
+ static_cast<int>(output_frames_.size()) == output_buffer_count_) {
+ output_frames_allocated_ = true;
+
+ if (need_setup_output_port_) {
+ SetupOutputPort();
+ }
+ }
+
+ if (kClientError == client_state_) {
+ StopOnError();
+ return;
+ }
+}
+
+// Reconfigure port
+void OmxVideoDecodeEngine::OnPortSettingsChangedRun(int port,
+ OMX_INDEXTYPE index) {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+ DCHECK_EQ(client_state_, kClientRunning);
+ DCHECK_EQ(port, output_port_);
+
+ // TODO(wjia): add buffer negotiation between decoder and renderer.
+ if (uses_egl_image_) {
+ DLOG(INFO) << "Port settings are changed";
+ return;
+ }
+
+ // TODO(wjia): remove this checking when all vendors observe same spec.
+ if (index > OMX_IndexComponentStartUnused) {
+ if (index != OMX_IndexParamPortDefinition)
+ return;
+ }
+
+ OMX_PARAM_PORTDEFINITIONTYPE port_format;
+ ResetParamHeader(*this, &port_format);
+ port_format.nPortIndex = output_port_;
+ OMX_ERRORTYPE omxresult;
+ omxresult = OMX_GetParameter(component_handle_,
+ OMX_IndexParamPortDefinition,
+ &port_format);
+ if (omxresult != OMX_ErrorNone) {
+ LOG(ERROR) << "GetParameter(OMX_IndexParamPortDefinition) failed";
+ client_state_ = kClientError;
+ StopOnError();
+ return;
+ }
+ if (OMX_DirOutput != port_format.eDir) {
+ LOG(ERROR) << "Expected Output Port";
+ client_state_ = kClientError;
+ StopOnError();
+ return;
+ }
+
+ // Update the output format.
+ OmxConfigurator::MediaFormat output_format;
+ output_format.video_header.height = port_format.format.video.nFrameHeight;
+ output_format.video_header.width = port_format.format.video.nFrameWidth;
+ output_format.video_header.stride = port_format.format.video.nStride;
+ output_buffer_count_ = port_format.nBufferCountActual;
+ output_buffer_size_ = port_format.nBufferSize;
+
+ if (kPortEnabled == output_port_state_) {
+ output_port_state_ = kPortDisabling;
+ OnPortDisableEventFunc = &OmxVideoDecodeEngine::OnPortDisableEventRun;
+ ChangePort(OMX_CommandPortDisable, output_port_);
+ if (kClientError == client_state_) {
+ StopOnError();
+ return;
+ }
+ FreeOutputBuffers();
+ } else {
+ OnPortDisableEventRun(output_port_);
+ }
+}
+
+// Post output port disabling
+void OmxVideoDecodeEngine::OnPortDisableEventRun(int port) {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+ DCHECK_EQ(client_state_, kClientRunning);
+ DCHECK_EQ(port, output_port_);
+
+ output_port_state_ = kPortDisabled;
+
+ // make sure all eglimages are available before enabling output port
+ if (output_frames_allocated_ || !uses_egl_image_) {
+ SetupOutputPort();
+ if (kClientError == client_state_) {
+ StopOnError();
+ return;
+ }
+ } else {
+ need_setup_output_port_ = true;
+ }
+}
+
+// Enable output port and allocate buffers correspondingly
+void OmxVideoDecodeEngine::SetupOutputPort() {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+
+ need_setup_output_port_ = false;
+
+ // Enable output port when necessary since the port could be waiting for
+ // buffers, instead of port reconfiguration.
+ if (kPortEnabled != output_port_state_) {
+ output_port_state_ = kPortEnabling;
+ OnPortEnableEventFunc = &OmxVideoDecodeEngine::OnPortEnableEventRun;
+ ChangePort(OMX_CommandPortEnable, output_port_);
+ if (kClientError == client_state_) {
+ return;
+ }
+ }
+
+ // TODO(wjia): add state checking
+ // Update the ports in buffer if necessary
+ if (!AllocateOutputBuffers()) {
+ LOG(ERROR) << "OMX_AllocateBuffer() Output buffer error";
+ client_state_ = kClientError;
+ return;
+ }
+}
+
+// Post output port enabling
+void OmxVideoDecodeEngine::OnPortEnableEventRun(int port) {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+ DCHECK_EQ(port, output_port_);
+ DCHECK_EQ(client_state_, kClientRunning);
+
+ output_port_state_ = kPortEnabled;
+ last_pts_ = base::TimeDelta::FromMilliseconds(0);
+ OnPortEnableEventFunc = NULL;
+ InitialFillBuffer();
+ if (kClientError == client_state_) {
+ StopOnError();
+ return;
+ }
+}
+
+void OmxVideoDecodeEngine::DeinitFromExecuting(OMX_STATETYPE state) {
+ DCHECK_EQ(state, OMX_StateExecuting);
+
+ DLOG(INFO) << "Deinit from Executing";
+ OnStateSetEventFunc = &OmxVideoDecodeEngine::DeinitFromIdle;
+ TransitionToState(OMX_StateIdle);
+ expected_il_state_ = kIlIdle;
+}
+
+void OmxVideoDecodeEngine::DeinitFromIdle(OMX_STATETYPE state) {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+ DCHECK_EQ(state, OMX_StateIdle);
+
+ DLOG(INFO) << "Deinit from Idle";
+ il_state_ = kIlIdle;
+ OnStateSetEventFunc = &OmxVideoDecodeEngine::DeinitFromLoaded;
+ TransitionToState(OMX_StateLoaded);
+ expected_il_state_ = kIlLoaded;
+
+ if (!input_buffers_at_component_)
+ FreeInputBuffers();
+ else
+ need_free_input_buffers_ = true;
+
+ if (!output_buffers_at_component_)
+ FreeOutputBuffers();
+ else
+ need_free_output_buffers_ = true;
+}
+
+void OmxVideoDecodeEngine::DeinitFromLoaded(OMX_STATETYPE state) {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+ DCHECK_EQ(state, OMX_StateLoaded);
+
+ DLOG(INFO) << "Deinit from Loaded";
+ il_state_ = kIlLoaded;
+ if (component_handle_) {
+ OMX_ERRORTYPE result = OMX_FreeHandle(component_handle_);
+ if (result != OMX_ErrorNone)
+ LOG(ERROR) << "OMX_FreeHandle() error. Error code: " << result;
+ component_handle_ = NULL;
+ }
+ il_state_ = expected_il_state_ = kIlNone;
+
+ // kClientStopped is different from kClientNotInitialized. The former can't
+ // accept output buffers, while the latter can.
+ client_state_ = kClientStopped;
+
+ OMX_Deinit();
+
+ OnStopDone();
+}
+
+void OmxVideoDecodeEngine::StopOnError() {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+
+ client_state_ = kClientStopping;
+
+ if (kIlExecuting == expected_il_state_) {
+ DeinitFromExecuting(OMX_StateExecuting);
+ } else if (kIlIdle == expected_il_state_) {
+ DeinitFromIdle(OMX_StateIdle);
+ } else if (kIlLoaded == expected_il_state_) {
+ DeinitFromLoaded(OMX_StateLoaded);
+ } else if (kIlPause == expected_il_state_) {
+ // TODO(jiesun): Make sure this works.
+ DeinitFromExecuting(OMX_StateExecuting);
+ } else {
+ NOTREACHED();
+ }
+}
+
+// Call OMX_UseBuffer() to avoid buffer copying when
+// OMX_EmptyThisBuffer() is called
+bool OmxVideoDecodeEngine::AllocateInputBuffers() {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+
+ uint8* data = new uint8[input_buffer_size_];
+ scoped_array<uint8> data_deleter(data);
+
+ for (int i = 0; i < input_buffer_count_; ++i) {
+ OMX_BUFFERHEADERTYPE* buffer;
+ OMX_ERRORTYPE error =
+ OMX_UseBuffer(component_handle_, &buffer, input_port_,
+ this, input_buffer_size_, data);
+ if (error != OMX_ErrorNone)
+ return false;
+ buffer->nInputPortIndex = input_port_;
+ buffer->nOffset = 0;
+ buffer->nFlags = 0;
+ input_buffers_.push_back(buffer);
+ free_input_buffers_.push(buffer);
+ }
+ return true;
+}
+
+// This method handles EGLImage and internal buffer cases. Any external
+// allocation case is similar to EGLImage
+bool OmxVideoDecodeEngine::AllocateOutputBuffers() {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+
+ if (uses_egl_image_ && !output_frames_allocated_) {
+ DLOG(INFO) << "Output frames are not allocated yet";
+ need_setup_output_port_ = true;
+ return true;
+ }
+
+ for (int i = 0; i < output_buffer_count_; ++i) {
+ OMX_BUFFERHEADERTYPE* buffer;
+ scoped_refptr<VideoFrame> video_frame;
+ OMX_ERRORTYPE error;
+ if (uses_egl_image_) {
+ OutputFrame output_frame = output_frames_[i];
+ video_frame = output_frame.first;
+ DCHECK(!output_frame.second);
+ error = OMX_UseEGLImage(component_handle_, &buffer, output_port_,
+ video_frame.get(), video_frame->private_buffer());
+ if (error != OMX_ErrorNone)
+ return false;
+ output_frames_[i].second = buffer;
+ } else {
+ error = OMX_AllocateBuffer(component_handle_, &buffer, output_port_,
+ NULL, output_buffer_size_);
+ if (error != OMX_ErrorNone)
+ return false;
+ video_frame = CreateOmxBufferVideoFrame(buffer);
+ output_frames_.push_back(std::make_pair(video_frame, buffer));
+ buffer->pAppPrivate = video_frame.get();
+ }
+ }
+
+ return true;
+}
+
+scoped_refptr<VideoFrame> OmxVideoDecodeEngine::CreateOmxBufferVideoFrame(
+ OMX_BUFFERHEADERTYPE* omx_buffer) {
+ scoped_refptr<VideoFrame> video_frame;
+ uint8* data[VideoFrame::kMaxPlanes];
+ int32 strides[VideoFrame::kMaxPlanes];
+
+ memset(data, 0, sizeof(data));
+ memset(strides, 0, sizeof(strides));
+ // TODO(jiesun): chroma format 4:2:0 only and 3 planes.
+ data[0] = omx_buffer->pBuffer;
+ data[1] = data[0] + width_ * height_;
+ data[2] = data[1] + width_ * height_ / 4;
+ strides[0] = width_;
+ strides[1] = strides[2] = width_ >> 1;
+
+ VideoFrame::CreateFrameExternal(
+ VideoFrame::TYPE_SYSTEM_MEMORY,
+ VideoFrame::YV12,
+ width_, height_, 3,
+ data, strides,
+ kNoTimestamp,
+ kNoTimestamp,
+ omx_buffer,
+ &video_frame);
+
+ return video_frame;
+}
+
+void OmxVideoDecodeEngine::FreeInputBuffers() {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+
+ // Empty available buffer queue.
+ while (!free_input_buffers_.empty()) {
+ free_input_buffers_.pop();
+ }
+
+ while (!available_input_buffers_.empty()) {
+ OMX_BUFFERHEADERTYPE* omx_buffer = available_input_buffers_.front();
+ available_input_buffers_.pop();
+ Buffer* stored_buffer = static_cast<Buffer*>(omx_buffer->pAppPrivate);
+ FinishEmptyBuffer(stored_buffer);
+ stored_buffer->Release();
+ }
+
+ // Calls to OMX to free buffers.
+ for (size_t i = 0; i < input_buffers_.size(); ++i)
+ OMX_FreeBuffer(component_handle_, input_port_, input_buffers_[i]);
+ input_buffers_.clear();
+
+ need_free_input_buffers_ = false;
+}
+
+void OmxVideoDecodeEngine::FreeOutputBuffers() {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+
+ // Calls to OMX to free buffers.
+ for (size_t i = 0; i < output_frames_.size(); ++i) {
+ OMX_BUFFERHEADERTYPE* omx_buffer = output_frames_[i].second;
+ CHECK(omx_buffer);
+ OMX_FreeBuffer(component_handle_, output_port_, omx_buffer);
+ }
+ output_frames_.clear();
+ output_frames_allocated_ = false;
+
+ need_free_output_buffers_ = false;
+}
+
+bool OmxVideoDecodeEngine::ConfigureIOPorts() {
+ OMX_PARAM_PORTDEFINITIONTYPE input_port_def, output_port_def;
+ OMX_ERRORTYPE omxresult = OMX_ErrorNone;
+ // Get default input port definition.
+ ResetParamHeader(*this, &input_port_def);
+ input_port_def.nPortIndex = input_port_;
+ omxresult = OMX_GetParameter(component_handle_,
+ OMX_IndexParamPortDefinition,
+ &input_port_def);
+ if (omxresult != OMX_ErrorNone) {
+ LOG(ERROR) << "GetParameter(OMX_IndexParamPortDefinition) "
+ << "for input port failed";
+ return false;
+ }
+ if (OMX_DirInput != input_port_def.eDir) {
+ LOG(ERROR) << "Expected Input Port";
+ return false;
+ }
+
+ // Get default output port definition.
+ ResetParamHeader(*this, &output_port_def);
+ output_port_def.nPortIndex = output_port_;
+ omxresult = OMX_GetParameter(component_handle_,
+ OMX_IndexParamPortDefinition,
+ &output_port_def);
+ if (omxresult != OMX_ErrorNone) {
+ LOG(ERROR) << "GetParameter(OMX_IndexParamPortDefinition) "
+ << "for output port failed";
+ return false;
+ }
+ if (OMX_DirOutput != output_port_def.eDir) {
+ LOG(ERROR) << "Expected Output Port";
+ return false;
+ }
+
+ return configurator_->ConfigureIOPorts(
+ static_cast<OMX_COMPONENTTYPE*>(component_handle_),
+ &input_port_def, &output_port_def);
+}
+
+bool OmxVideoDecodeEngine::CanEmptyBuffer() {
+ // We can call empty buffer while we are in executing and EOS has
+ // not been sent
+ return (il_state_ == kIlExecuting &&
+ !input_has_fed_eos_);
+}
+
+bool OmxVideoDecodeEngine::CanFillBuffer() {
+ // Make sure component is in the executing state and end-of-stream
+ // has not been reached.
+ return (il_state_ == kIlExecuting &&
+ !output_eos_ &&
+ (output_port_state_ == kPortEnabled ||
+ output_port_state_ == kPortEnabling));
+}
+
+bool OmxVideoDecodeEngine::CanAcceptInput() {
+ // We can't take input buffer when in error state.
+ return (kClientError != client_state_ &&
+ kClientStopping != client_state_ &&
+ kClientStopped != client_state_ &&
+ !input_queue_has_eos_);
+}
+
+bool OmxVideoDecodeEngine::CanAcceptOutput() {
+ return (kClientError != client_state_ &&
+ kClientStopping != client_state_ &&
+ kClientStopped != client_state_ &&
+ output_port_state_ == kPortEnabled &&
+ !output_eos_);
+}
+
+// TODO(wjia): There are several things need to be done here:
+// 1. Merge this method into EmptyThisBuffer();
+// 2. Get rid of the while loop, this is not needed because when we call
+// OMX_EmptyThisBuffer we assume we *always* have an input buffer.
+void OmxVideoDecodeEngine::EmptyBufferTask() {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+
+ if (!CanEmptyBuffer())
+ return;
+
+ // Loop for all available input data and input buffer for the
+ // decoder. When input has reached EOS we need to stop.
+ while (!available_input_buffers_.empty() &&
+ !input_has_fed_eos_) {
+ OMX_BUFFERHEADERTYPE* omx_buffer = available_input_buffers_.front();
+ available_input_buffers_.pop();
+
+ input_has_fed_eos_ = omx_buffer->nFlags & OMX_BUFFERFLAG_EOS;
+ if (input_has_fed_eos_) {
+ DLOG(INFO) << "Input has fed EOS";
+ }
+
+ // Give this buffer to OMX.
+ input_buffers_at_component_++;
+ OMX_ERRORTYPE ret = OMX_EmptyThisBuffer(component_handle_, omx_buffer);
+ if (ret != OMX_ErrorNone) {
+ LOG(ERROR) << "OMX_EmptyThisBuffer() failed with result " << ret;
+ client_state_ = kClientError;
+ return;
+ }
+ }
+}
+
+void OmxVideoDecodeEngine::InitialReadBuffer() {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+
+ input_queue_has_eos_ = false;
+ input_has_fed_eos_ = false;
+ output_eos_ = false;
+
+ DLOG(INFO) << "OmxVideoDecodeEngine::InitialReadBuffer";
+ for (size_t i = 0; i < free_input_buffers_.size(); i++)
+ FinishEmptyBuffer(NULL);
+}
+
+void OmxVideoDecodeEngine::InitialFillBuffer() {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+ // DCHECK(output_frames_allocated_);
+
+ if (!CanFillBuffer())
+ return;
+
+ DLOG(INFO) << "OmxVideoDecodeEngine::InitialFillBuffer";
+
+ // Ask the decoder to fill the output buffers.
+ for (uint32 i = 0; i < output_frames_.size(); ++i) {
+ OMX_BUFFERHEADERTYPE* omx_buffer = output_frames_[i].second;
+ SendOutputBufferToComponent(omx_buffer);
+ }
+}
+
+// helper functions
+// Send command to disable/enable port.
+void OmxVideoDecodeEngine::ChangePort(OMX_COMMANDTYPE cmd, int port_index) {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+
+ OMX_ERRORTYPE omxresult = OMX_SendCommand(component_handle_,
+ cmd, port_index, 0);
+ if (omxresult != OMX_ErrorNone) {
+ LOG(ERROR) << "SendCommand(OMX_CommandPortDisable) failed";
+ client_state_ = kClientError;
+ return;
+ }
+}
+
+// Find if omx_buffer exists corresponding to video_frame
+OMX_BUFFERHEADERTYPE* OmxVideoDecodeEngine::FindOmxBuffer(
+ scoped_refptr<VideoFrame> video_frame) {
+ for (size_t i = 0; i < output_frames_.size(); ++i) {
+ if (video_frame == output_frames_[i].first)
+ return output_frames_[i].second;
+ }
+ return NULL;
+}
+
+OMX_STATETYPE OmxVideoDecodeEngine::GetComponentState() {
+ OMX_STATETYPE eState;
+ OMX_ERRORTYPE eError;
+
+ eError = OMX_GetState(component_handle_, &eState);
+ if (OMX_ErrorNone != eError) {
+ LOG(ERROR) << "OMX_GetState failed";
+ StopOnError();
+ }
+
+ return eState;
+}
+
+// send one output buffer to component
+void OmxVideoDecodeEngine::SendOutputBufferToComponent(
+ OMX_BUFFERHEADERTYPE *omx_buffer) {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+
+ if (!CanFillBuffer())
+ return;
+
+ // clear EOS flag.
+ omx_buffer->nFlags &= ~OMX_BUFFERFLAG_EOS;
+ omx_buffer->nOutputPortIndex = output_port_;
+ output_buffers_at_component_++;
+ OMX_ERRORTYPE ret = OMX_FillThisBuffer(component_handle_, omx_buffer);
+
+ if (OMX_ErrorNone != ret) {
+ LOG(ERROR) << "OMX_FillThisBuffer() failed with result " << ret;
+ client_state_ = kClientError;
+ return;
+ }
+}
+
+// Send state transition command to component.
+bool OmxVideoDecodeEngine::TransitionToState(OMX_STATETYPE new_state) {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+
+ OMX_ERRORTYPE omxresult = OMX_SendCommand(component_handle_,
+ OMX_CommandStateSet,
+ new_state, 0);
+ if (omxresult != OMX_ErrorNone) {
+ LOG(ERROR) << "SendCommand(OMX_CommandStateSet) failed";
+ client_state_ = kClientError;
+ return false;
+ }
+
+ return true;
+}
+
+void OmxVideoDecodeEngine::EmptyBufferDoneTask(OMX_BUFFERHEADERTYPE* buffer) {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+ DCHECK_GT(input_buffers_at_component_, 0);
+
+ Buffer* stored_buffer = static_cast<Buffer*>(buffer->pAppPrivate);
+ buffer->pAppPrivate = NULL;
+ if (client_state_ != kClientFlushing)
+ FinishEmptyBuffer(stored_buffer);
+ stored_buffer->Release();
+
+ // Enqueue the available buffer because the decoder has consumed it.
+ free_input_buffers_.push(buffer);
+ input_buffers_at_component_--;
+
+ if (need_free_input_buffers_ && !input_buffers_at_component_) {
+ FreeInputBuffers();
+ return;
+ }
+
+ // Try to feed more data into the decoder.
+ EmptyBufferTask();
+
+ if (client_state_ == kClientFlushing &&
+ InputPortFlushed() && OutputPortFlushed())
+ ComponentFlushDone();
+}
+
+void OmxVideoDecodeEngine::FillBufferDoneTask(OMX_BUFFERHEADERTYPE* buffer) {
+ DCHECK_EQ(message_loop_, MessageLoop::current());
+ DCHECK_GT(output_buffers_at_component_, 0);
+
+ output_buffers_at_component_--;
+
+ if (need_free_output_buffers_ && !output_buffers_at_component_) {
+ FreeOutputBuffers();
+ return;
+ }
+
+ PipelineStatistics statistics;
+ statistics.video_bytes_decoded = buffer->nFilledLen;
+
+ if (!CanAcceptOutput()) {
+ if (uses_egl_image_) {
+ scoped_refptr<VideoFrame> frame;
+ frame = static_cast<VideoFrame*>(buffer->pAppPrivate);
+ event_handler_->ConsumeVideoFrame(frame, statistics);
+ output_pending_request_--;
+ }
+ return;
+ }
+
+ // This buffer is received with decoded frame. Enqueue it and make it
+ // ready to be consumed by reads.
+
+ if (buffer->nFlags & OMX_BUFFERFLAG_EOS) {
+ output_eos_ = true;
+ DLOG(INFO) << "Output has EOS";
+ }
+
+ FinishFillBuffer(buffer);
+
+ if (buffer->nFlags & OMX_BUFFERFLAG_EOS) {
+ // Singal end of stream.
+ scoped_refptr<VideoFrame> frame;
+ VideoFrame::CreateEmptyFrame(&frame);
+ event_handler_->ConsumeVideoFrame(frame, statistics);
+ }
+
+ if (client_state_ == kClientFlushing &&
+ InputPortFlushed() && OutputPortFlushed())
+ ComponentFlushDone();
+}
+
+void OmxVideoDecodeEngine::EventHandlerCompleteTask(OMX_EVENTTYPE event,
+ OMX_U32 data1,
+ OMX_U32 data2) {
+ switch (event) {
+ case OMX_EventCmdComplete: {
+ // If the last command was successful, we have completed
+ // a state transition. So notify that we have done it
+ // accordingly.
+ OMX_COMMANDTYPE cmd = static_cast<OMX_COMMANDTYPE>(data1);
+ if (cmd == OMX_CommandPortDisable) {
+ if (OnPortDisableEventFunc)
+ (this->*OnPortDisableEventFunc)(static_cast<int>(data2));
+ } else if (cmd == OMX_CommandPortEnable) {
+ if (OnPortEnableEventFunc)
+ (this->*OnPortEnableEventFunc)(static_cast<int>(data2));
+ } else if (cmd == OMX_CommandStateSet) {
+ (this->*OnStateSetEventFunc)(static_cast<OMX_STATETYPE>(data2));
+ } else if (cmd == OMX_CommandFlush) {
+ (this->*OnFlushEventFunc)(data2);
+ } else {
+ LOG(ERROR) << "Unknown command completed\n" << data1;
+ }
+ break;
+ }
+ case OMX_EventError:
+ if (OMX_ErrorInvalidState == (OMX_ERRORTYPE)data1) {
+ // TODO(hclam): what to do here?
+ }
+ StopOnError();
+ break;
+ case OMX_EventPortSettingsChanged:
+ // TODO(wjia): remove this hack when all vendors observe same spec.
+ if (data1 < OMX_IndexComponentStartUnused)
+ OnPortSettingsChangedRun(static_cast<int>(data1),
+ static_cast<OMX_INDEXTYPE>(data2));
+ else
+ OnPortSettingsChangedRun(static_cast<int>(data2),
+ static_cast<OMX_INDEXTYPE>(data1));
+ break;
+ default:
+ LOG(ERROR) << "Warning - Unknown event received\n";
+ break;
+ }
+}
+
+// static
+OMX_ERRORTYPE OmxVideoDecodeEngine::EventHandler(OMX_HANDLETYPE component,
+ OMX_PTR priv_data,
+ OMX_EVENTTYPE event,
+ OMX_U32 data1,
+ OMX_U32 data2,
+ OMX_PTR event_data) {
+ OmxVideoDecodeEngine* decoder = static_cast<OmxVideoDecodeEngine*>(priv_data);
+ DCHECK_EQ(component, decoder->component_handle_);
+ decoder->message_loop_->PostTask(FROM_HERE,
+ NewRunnableMethod(decoder,
+ &OmxVideoDecodeEngine::EventHandlerCompleteTask,
+ event, data1, data2));
+ return OMX_ErrorNone;
+}
+
+// static
+OMX_ERRORTYPE OmxVideoDecodeEngine::EmptyBufferCallback(
+ OMX_HANDLETYPE component,
+ OMX_PTR priv_data,
+ OMX_BUFFERHEADERTYPE* buffer) {
+ OmxVideoDecodeEngine* decoder = static_cast<OmxVideoDecodeEngine*>(priv_data);
+ DCHECK_EQ(component, decoder->component_handle_);
+ decoder->message_loop_->PostTask(FROM_HERE,
+ NewRunnableMethod(decoder,
+ &OmxVideoDecodeEngine::EmptyBufferDoneTask, buffer));
+ return OMX_ErrorNone;
+}
+
+// static
+OMX_ERRORTYPE OmxVideoDecodeEngine::FillBufferCallback(
+ OMX_HANDLETYPE component,
+ OMX_PTR priv_data,
+ OMX_BUFFERHEADERTYPE* buffer) {
+ OmxVideoDecodeEngine* decoder = static_cast<OmxVideoDecodeEngine*>(priv_data);
+ DCHECK_EQ(component, decoder->component_handle_);
+ decoder->message_loop_->PostTask(FROM_HERE,
+ NewRunnableMethod(decoder,
+ &OmxVideoDecodeEngine::FillBufferDoneTask, buffer));
+ return OMX_ErrorNone;
+}
+
+} // namespace media
+
+// Disable refcounting for this object because this object only lives
+// on the video decoder thread and there's no need to refcount it.
+DISABLE_RUNNABLE_METHOD_REFCOUNT(media::OmxVideoDecodeEngine);
diff --git a/media/video/omx_video_decode_engine.h b/media/video/omx_video_decode_engine.h
new file mode 100644
index 0000000..e9cc756
--- /dev/null
+++ b/media/video/omx_video_decode_engine.h
@@ -0,0 +1,246 @@
+// Copyright (c) 2011 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_VIDEO_OMX_VIDEO_DECODE_ENGINE_H_
+#define MEDIA_VIDEO_OMX_VIDEO_DECODE_ENGINE_H_
+
+#include <queue>
+#include <utility>
+#include <vector>
+
+#include "base/callback_old.h"
+#include "base/memory/scoped_ptr.h"
+#include "media/omx/omx_configurator.h"
+#include "media/video/video_decode_engine.h"
+#include "third_party/openmax/il/OMX_Component.h"
+#include "third_party/openmax/il/OMX_Core.h"
+#include "third_party/openmax/il/OMX_Video.h"
+
+namespace media {
+
+class OmxVideoDecodeEngine : public VideoDecodeEngine {
+ public:
+ OmxVideoDecodeEngine();
+ virtual ~OmxVideoDecodeEngine();
+
+ // Implementation of the VideoDecodeEngine Interface.
+ virtual void Initialize(MessageLoop* message_loop,
+ VideoDecodeEngine::EventHandler* event_handler,
+ VideoDecodeContext* context,
+ const VideoDecoderConfig& config);
+ virtual void ConsumeVideoSample(scoped_refptr<Buffer> buffer);
+ virtual void ProduceVideoFrame(scoped_refptr<VideoFrame> frame);
+ virtual void Uninitialize();
+ virtual void Flush();
+ virtual void Seek();
+
+ // Subclass can provide a different value.
+ virtual int current_omx_spec_version() const;
+
+ private:
+ enum OmxIlState {
+ kIlNone,
+ kIlLoaded,
+ kIlIdle,
+ kIlExecuting,
+ kIlPause,
+ kIlInvalid,
+ kIlUnknown,
+ };
+
+ enum OmxIlClientState {
+ kClientNotInitialized,
+ kClientInitializing,
+ kClientRunning,
+ kClientStopping,
+ kClientStopped,
+ kClientPausing,
+ kClientFlushing,
+ kClientError,
+ };
+
+ enum OmxIlPortState {
+ kPortDisabled,
+ kPortEnabling,
+ kPortEnabled,
+ kPortDisabling,
+ };
+
+ typedef Callback0::Type Callback;
+
+ // calls into other classes
+ void FinishEmptyBuffer(scoped_refptr<Buffer> buffer);
+ void FinishFillBuffer(OMX_BUFFERHEADERTYPE* buffer);
+ // Helper method to perform tasks when this object is stopped.
+ void OnStopDone();
+
+ // Transition method sequence for initialization
+ bool CreateComponent();
+ void DoneSetStateIdle(OMX_STATETYPE state);
+ void DoneSetStateExecuting(OMX_STATETYPE state);
+ void OnPortSettingsChangedRun(int port, OMX_INDEXTYPE index);
+ void OnPortDisableEventRun(int port);
+ void SetupOutputPort();
+ void OnPortEnableEventRun(int port);
+
+ // Transition methods for shutdown
+ void DeinitFromExecuting(OMX_STATETYPE state);
+ void DeinitFromIdle(OMX_STATETYPE state);
+ void DeinitFromLoaded(OMX_STATETYPE state);
+ void PauseFromExecuting(OMX_STATETYPE state);
+ void StartFlush();
+ void PortFlushDone(int port);
+ void ComponentFlushDone();
+
+ void StopOnError();
+
+ void InitializeTask();
+
+ // Methods to free input and output buffers.
+ bool AllocateInputBuffers();
+ bool AllocateOutputBuffers();
+ void FreeInputBuffers();
+ void FreeOutputBuffers();
+ void FreeInputQueue();
+
+ // Helper method to configure port format at LOADED state.
+ bool ConfigureIOPorts();
+
+ // Determine whether we can issue fill buffer or empty buffer
+ // to the decoder based on the current state and port state.
+ bool CanEmptyBuffer();
+ bool CanFillBuffer();
+
+ // Determine whether we can use |input_queue_| and |output_queue_|
+ // based on the current state.
+ bool CanAcceptInput();
+ bool CanAcceptOutput();
+
+ bool InputPortFlushed();
+ bool OutputPortFlushed();
+
+ // Method to send input buffers to component
+ void EmptyBufferTask();
+
+ // Method doing initial reads to get bit stream from demuxer.
+ void InitialReadBuffer();
+
+ // Method doing initial fills to kick start the decoding process.
+ void InitialFillBuffer();
+
+ // helper functions
+ void ChangePort(OMX_COMMANDTYPE cmd, int port_index);
+ OMX_BUFFERHEADERTYPE* FindOmxBuffer(scoped_refptr<VideoFrame> video_frame);
+ OMX_STATETYPE GetComponentState();
+ void SendOutputBufferToComponent(OMX_BUFFERHEADERTYPE *omx_buffer);
+ bool TransitionToState(OMX_STATETYPE new_state);
+ virtual VideoFrame::Format GetSurfaceFormat() const;
+
+ // Method to handle events
+ void EventHandlerCompleteTask(OMX_EVENTTYPE event,
+ OMX_U32 data1,
+ OMX_U32 data2);
+
+ // Method to receive buffers from component's input port
+ void EmptyBufferDoneTask(OMX_BUFFERHEADERTYPE* buffer);
+
+ // Method to receive buffers from component's output port
+ void FillBufferDoneTask(OMX_BUFFERHEADERTYPE* buffer);
+
+ // The following three methods are static callback methods
+ // for the OMX component. When these callbacks are received, the
+ // call is delegated to the three internal methods above.
+ static OMX_ERRORTYPE EventHandler(OMX_HANDLETYPE component,
+ OMX_PTR priv_data,
+ OMX_EVENTTYPE event,
+ OMX_U32 data1, OMX_U32 data2,
+ OMX_PTR event_data);
+
+ static OMX_ERRORTYPE EmptyBufferCallback(OMX_HANDLETYPE component,
+ OMX_PTR priv_data,
+ OMX_BUFFERHEADERTYPE* buffer);
+
+ static OMX_ERRORTYPE FillBufferCallback(OMX_HANDLETYPE component,
+ OMX_PTR priv_data,
+ OMX_BUFFERHEADERTYPE* buffer);
+
+ // Member function pointers to respond to events
+ void (OmxVideoDecodeEngine::*OnPortDisableEventFunc)(int port);
+ void (OmxVideoDecodeEngine::*OnPortEnableEventFunc)(int port);
+ void (OmxVideoDecodeEngine::*OnStateSetEventFunc)(OMX_STATETYPE state);
+ void (OmxVideoDecodeEngine::*OnFlushEventFunc)(int port);
+
+ // Helper function
+ scoped_refptr<VideoFrame> CreateOmxBufferVideoFrame(
+ OMX_BUFFERHEADERTYPE* omx_buffer);
+
+ int width_;
+ int height_;
+
+ MessageLoop* message_loop_;
+
+ std::vector<OMX_BUFFERHEADERTYPE*> input_buffers_;
+ int input_buffer_count_;
+ int input_buffer_size_;
+ int input_port_;
+ int input_buffers_at_component_;
+ int input_pending_request_;
+ bool input_queue_has_eos_;
+ bool input_has_fed_eos_;
+ bool input_port_flushed_;
+
+ int output_buffer_count_;
+ int output_buffer_size_;
+ int output_port_;
+ int output_buffers_at_component_;
+ int output_pending_request_;
+ bool output_eos_;
+ bool output_port_flushed_;
+ bool uses_egl_image_;
+ base::TimeDelta last_pts_;
+
+ // |il_state_| records the current component state. During state transition
+ // |expected_il_state_| is the next state that the component will transition
+ // to. After a state transition is completed, |il_state_| equals
+ // |expected_il_state_|. Inequality can be used to detect a state transition.
+ // These two members are read and written only on |message_loop_|.
+ OmxIlState il_state_;
+ OmxIlState expected_il_state_;
+ OmxIlClientState client_state_;
+
+ OMX_HANDLETYPE component_handle_;
+ scoped_ptr<media::OmxConfigurator> configurator_;
+
+ // Free input OpenMAX buffers that can be used to take input bitstream from
+ // demuxer.
+ std::queue<OMX_BUFFERHEADERTYPE*> free_input_buffers_;
+
+ // Available input OpenMAX buffers that we can use to issue
+ // OMX_EmptyThisBuffer() call.
+ std::queue<OMX_BUFFERHEADERTYPE*> available_input_buffers_;
+
+ // flag for freeing input/output buffers
+ bool need_free_input_buffers_;
+ bool need_free_output_buffers_;
+
+ // for calling flush callback only once.
+ bool flush_pending_;
+
+ // For output buffer recycling cases.
+ typedef std::pair<scoped_refptr<VideoFrame>,
+ OMX_BUFFERHEADERTYPE*> OutputFrame;
+ std::vector<OutputFrame> output_frames_;
+ bool output_frames_allocated_;
+
+ // port related
+ bool need_setup_output_port_;
+ OmxIlPortState output_port_state_;
+ VideoDecodeEngine::EventHandler* event_handler_;
+
+ DISALLOW_COPY_AND_ASSIGN(OmxVideoDecodeEngine);
+};
+
+} // namespace media
+
+#endif // MEDIA_VIDEO_OMX_VIDEO_DECODE_ENGINE_H_