summaryrefslogtreecommitdiffstats
path: root/media
diff options
context:
space:
mode:
authorwjia@google.com <wjia@google.com@0039d316-1c4b-4281-b951-d872f2087c98>2010-06-07 20:05:42 +0000
committerwjia@google.com <wjia@google.com@0039d316-1c4b-4281-b951-d872f2087c98>2010-06-07 20:05:42 +0000
commit84a661708faa4431cdba5833a33c7834389c65ef (patch)
tree4143eefac3569a4523c4af4984ebddb95400274c /media
parent62dae71cec04b8d040ef86e1bfb4516d13080a97 (diff)
downloadchromium_src-84a661708faa4431cdba5833a33c7834389c65ef.zip
chromium_src-84a661708faa4431cdba5833a33c7834389c65ef.tar.gz
chromium_src-84a661708faa4431cdba5833a33c7834389c65ef.tar.bz2
add support of OpenMAX EGL image decoding
add support of GLES2 EGL image rendering OpenMAX system memory decoding almost works expect flow control in video renderer base BUG=none TEST=dev platform Review URL: http://codereview.chromium.org/2456006 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@49089 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'media')
-rw-r--r--media/filters/omx_video_decode_engine.cc143
-rw-r--r--media/filters/omx_video_decode_engine.h6
-rw-r--r--media/filters/video_renderer_base.cc28
-rw-r--r--media/filters/video_renderer_base.h4
-rw-r--r--media/omx/omx_codec_unittest.cc10
-rw-r--r--media/tools/omx_test/omx_test.cc2
-rw-r--r--media/tools/player_x11/gles_video_renderer.cc239
-rw-r--r--media/tools/player_x11/gles_video_renderer.h15
8 files changed, 326 insertions, 121 deletions
diff --git a/media/filters/omx_video_decode_engine.cc b/media/filters/omx_video_decode_engine.cc
index 7f64a50..13ee05e 100644
--- a/media/filters/omx_video_decode_engine.cc
+++ b/media/filters/omx_video_decode_engine.cc
@@ -49,10 +49,13 @@ OmxVideoDecodeEngine::OmxVideoDecodeEngine()
component_handle_(NULL),
output_frames_allocated_(false),
need_setup_output_port_(false) {
+ // TODO(wjia): change uses_egl_image_ to runtime setup
#if ENABLE_EGLIMAGE == 1
uses_egl_image_ = true;
+ DLOG(INFO) << "Uses egl image for output";
#else
uses_egl_image_ = false;
+ DLOG(INFO) << "Uses system memory for output";
#endif
}
@@ -113,12 +116,13 @@ void OmxVideoDecodeEngine::EmptyThisBuffer(scoped_refptr<Buffer> buffer) {
DCHECK_EQ(message_loop_, MessageLoop::current());
if (!CanAcceptInput()) {
- OnFeedDone(buffer);
+ FinishEmptyBuffer(buffer);
return;
}
if (buffer->IsEndOfStream()) {
- input_queue_has_eos_ = true;
+ DLOG(INFO) << "Input queue has EOS";
+ input_queue_has_eos_ = true;
}
// Queue this input buffer.
@@ -164,7 +168,8 @@ VideoDecodeEngine::State OmxVideoDecodeEngine::state() const {
}
void OmxVideoDecodeEngine::Stop(Callback0::Type* done_cb) {
- DCHECK_EQ(message_loop_, MessageLoop::current());
+ // TODO(wjia): make this call in the thread.
+ // DCHECK_EQ(message_loop_, MessageLoop::current());
message_loop_->PostTask(FROM_HERE,
NewRunnableMethod(this, &OmxVideoDecodeEngine::StopTask, done_cb));
@@ -181,50 +186,48 @@ void OmxVideoDecodeEngine::OnFormatChange(
// are already known from upper layer of the stack.
}
-void OmxVideoDecodeEngine::OnFeedDone(scoped_refptr<Buffer> buffer) {
+void OmxVideoDecodeEngine::FinishEmptyBuffer(scoped_refptr<Buffer> buffer) {
DCHECK_EQ(message_loop_, MessageLoop::current());
- empty_this_buffer_callback_->Run(buffer);
+ if (!input_queue_has_eos_)
+ empty_this_buffer_callback_->Run(buffer);
}
-void OmxVideoDecodeEngine::OnReadComplete(OMX_BUFFERHEADERTYPE* buffer) {
+void OmxVideoDecodeEngine::FinishFillBuffer(OMX_BUFFERHEADERTYPE* buffer) {
DCHECK_EQ(message_loop_, MessageLoop::current());
+ scoped_refptr<VideoFrame> frame;
// EOF
if (!buffer) {
- scoped_refptr<VideoFrame> frame = NULL;
+ VideoFrame::CreateEmptyFrame(&frame);
fill_this_buffer_callback_->Run(frame);
return;
}
- scoped_refptr<VideoFrame> frame;
if (uses_egl_image_) {
- frame =
- *static_cast<scoped_refptr<VideoFrame>* >(buffer->pAppPrivate);
- fill_this_buffer_callback_->Run(frame);
- return;
- }
+ frame = static_cast<VideoFrame*>(buffer->pAppPrivate);
+ } else {
+ VideoFrame::CreateFrame(GetSurfaceFormat(),
+ width_, height_,
+ StreamSample::kInvalidTimestamp,
+ StreamSample::kInvalidTimestamp,
+ &frame);
+ if (!frame.get()) {
+ // TODO(jiesun): this is also an error case handled as normal.
+ return;
+ }
- VideoFrame::CreateFrame(GetSurfaceFormat(),
- width_, height_,
- StreamSample::kInvalidTimestamp,
- StreamSample::kInvalidTimestamp,
- &frame);
- if (!frame.get()) {
- // TODO(jiesun): this is also an error case handled as normal.
- return;
+ // TODO(jiesun): Assume YUV 420 format.
+ const int pixels = width_ * height_;
+ memcpy(frame->data(VideoFrame::kYPlane), buffer->pBuffer, pixels);
+ memcpy(frame->data(VideoFrame::kUPlane), buffer->pBuffer + pixels,
+ pixels / 4);
+ memcpy(frame->data(VideoFrame::kVPlane),
+ buffer->pBuffer + pixels + pixels / 4,
+ pixels / 4);
}
- // TODO(jiesun): Assume YUV 420 format.
- const int pixels = width_ * height_;
- memcpy(frame->data(VideoFrame::kYPlane), buffer->pBuffer, pixels);
- memcpy(frame->data(VideoFrame::kUPlane), buffer->pBuffer + pixels,
- pixels / 4);
- memcpy(frame->data(VideoFrame::kVPlane),
- buffer->pBuffer + pixels + pixels /4,
- pixels / 4);
-
- frame->SetTimestamp(base::TimeDelta::FromMilliseconds(buffer->nTimeStamp));
+ frame->SetTimestamp(base::TimeDelta::FromMicroseconds(buffer->nTimeStamp));
frame->SetDuration(frame->GetTimestamp() - last_pts_);
last_pts_ = frame->GetTimestamp();
@@ -444,10 +447,12 @@ bool OmxVideoDecodeEngine::CreateComponent() {
return false;
}
- // override buffer count when EGLImage is used
+ // TODO(wjia): use same buffer recycling for EGLImage and system memory.
+ // Override buffer count when EGLImage is used.
if (uses_egl_image_) {
+ // TODO(wjia): remove hard-coded value
port_format.nBufferCountActual = port_format.nBufferCountMin =
- output_buffer_count_ = 3;
+ output_buffer_count_ = 4;
omxresult = OMX_SetParameter(component_handle_,
OMX_IndexParamPortDefinition,
@@ -470,6 +475,7 @@ void OmxVideoDecodeEngine::DoneSetStateIdle(OMX_STATETYPE state) {
DCHECK_EQ(message_loop_, MessageLoop::current());
DCHECK_EQ(client_state_, kClientInitializing);
DCHECK_EQ(OMX_StateIdle, state);
+ DLOG(INFO) << "OMX video decode engine is in Idle";
il_state_ = kIlIdle;
OnStateSetEventFunc = &OmxVideoDecodeEngine::DoneSetStateExecuting;
@@ -485,6 +491,7 @@ void OmxVideoDecodeEngine::DoneSetStateExecuting(OMX_STATETYPE state) {
DCHECK_EQ(message_loop_, MessageLoop::current());
DCHECK_EQ(client_state_, kClientInitializing);
DCHECK_EQ(OMX_StateExecuting, state);
+ DLOG(INFO) << "OMX video decode engine is in Executing";
il_state_ = kIlExecuting;
client_state_ = kClientRunning;
@@ -503,8 +510,12 @@ void OmxVideoDecodeEngine::DoneSetStateExecuting(OMX_STATETYPE state) {
// Function for receiving output buffers. Hookup for buffer recycling
// and outside allocator.
-void OmxVideoDecodeEngine::OnReceiveOneOutputBuffer(
+void OmxVideoDecodeEngine::FillThisBuffer(
scoped_refptr<VideoFrame> video_frame) {
+ // TODO(wjia): merge buffer recycling for EGLImage and system memory path.
+ if (!video_frame.get() || VideoFrame::TYPE_EGL_IMAGE != video_frame->type())
+ return;
+
OMX_BUFFERHEADERTYPE* omx_buffer = FindOmxBuffer(video_frame);
if (omx_buffer) {
if (kClientRunning == client_state_) {
@@ -524,8 +535,9 @@ void OmxVideoDecodeEngine::OnReceiveOneOutputBuffer(
static_cast<int>(output_frames_.size()) == output_buffer_count_) {
output_frames_allocated_ = true;
- if (need_setup_output_port_)
+ if (need_setup_output_port_) {
SetupOutputPort();
+ }
}
if (kClientError == client_state_) {
@@ -541,6 +553,12 @@ void OmxVideoDecodeEngine::OnPortSettingsChangedRun(OMX_INDEXTYPE index,
DCHECK_EQ(client_state_, kClientRunning);
DCHECK_EQ(port, output_port_);
+ // TODO(wjia): add buffer negotiation between decoder and renderer.
+ if (uses_egl_image_) {
+ DLOG(INFO) << "Port settings are changed";
+ return;
+ }
+
if (index != OMX_IndexParamPortDefinition)
return;
@@ -609,16 +627,19 @@ void OmxVideoDecodeEngine::OnPortDisableEventRun(int port) {
// Enable output port and allocate buffers correspondingly
void OmxVideoDecodeEngine::SetupOutputPort() {
- DCHECK_EQ(client_state_, kClientRunning);
+ DCHECK_EQ(message_loop_, MessageLoop::current());
need_setup_output_port_ = false;
- // Enable output port.
- output_port_state_ = kPortEnabling;
- OnPortEnableEventFunc = &OmxVideoDecodeEngine::OnPortEnableEventRun;
- ChangePort(OMX_CommandPortEnable, output_port_);
- if (kClientError == client_state_) {
- return;
+ // Enable output port when necessary since the port could be waiting for
+ // buffers, instead of port reconfiguration.
+ if (kPortEnabled != output_port_state_) {
+ output_port_state_ = kPortEnabling;
+ OnPortEnableEventFunc = &OmxVideoDecodeEngine::OnPortEnableEventRun;
+ ChangePort(OMX_CommandPortEnable, output_port_);
+ if (kClientError == client_state_) {
+ return;
+ }
}
// TODO(wjia): add state checking
@@ -633,8 +654,8 @@ void OmxVideoDecodeEngine::SetupOutputPort() {
// Post output port enabling
void OmxVideoDecodeEngine::OnPortEnableEventRun(int port) {
DCHECK_EQ(message_loop_, MessageLoop::current());
- DCHECK_EQ(client_state_, kClientRunning);
DCHECK_EQ(port, output_port_);
+ DCHECK_EQ(client_state_, kClientRunning);
output_port_state_ = kPortEnabled;
last_pts_ = base::TimeDelta::FromMilliseconds(0);
@@ -669,6 +690,7 @@ void OmxVideoDecodeEngine::StopTask(Callback* callback) {
void OmxVideoDecodeEngine::DeinitFromExecuting(OMX_STATETYPE state) {
DCHECK_EQ(state, OMX_StateExecuting);
+ DLOG(INFO) << "Deinit from Executing";
OnStateSetEventFunc = &OmxVideoDecodeEngine::DeinitFromIdle;
TransitionToState(OMX_StateIdle);
expected_il_state_ = kIlIdle;
@@ -678,6 +700,7 @@ void OmxVideoDecodeEngine::DeinitFromIdle(OMX_STATETYPE state) {
DCHECK_EQ(message_loop_, MessageLoop::current());
DCHECK_EQ(state, OMX_StateIdle);
+ DLOG(INFO) << "Deinit from Idle";
il_state_ = kIlIdle;
OnStateSetEventFunc = &OmxVideoDecodeEngine::DeinitFromLoaded;
TransitionToState(OMX_StateLoaded);
@@ -691,6 +714,7 @@ void OmxVideoDecodeEngine::DeinitFromLoaded(OMX_STATETYPE state) {
DCHECK_EQ(message_loop_, MessageLoop::current());
DCHECK_EQ(state, OMX_StateLoaded);
+ DLOG(INFO) << "Deinit from Loaded";
il_state_ = kIlLoaded;
if (component_handle_) {
OMX_ERRORTYPE result = OMX_FreeHandle(component_handle_);
@@ -751,6 +775,12 @@ bool OmxVideoDecodeEngine::AllocateOutputBuffers() {
DCHECK_EQ(message_loop_, MessageLoop::current());
// DCHECK_EQ(output_buffer_count_, static_cast<int>(output_frames_.size()));
+ if (uses_egl_image_ && !output_frames_allocated_) {
+ DLOG(INFO) << "Output frames are not allocated yet";
+ need_setup_output_port_ = true;
+ return true;
+ }
+
for (int i = 0; i < output_buffer_count_; ++i) {
OMX_BUFFERHEADERTYPE* buffer;
OMX_ERRORTYPE error;
@@ -759,7 +789,7 @@ bool OmxVideoDecodeEngine::AllocateOutputBuffers() {
scoped_refptr<VideoFrame> video_frame = output_frame.first;
DCHECK(!output_frame.second);
error = OMX_UseEGLImage(component_handle_, &buffer, output_port_,
- &video_frame, video_frame->private_buffer());
+ video_frame.get(), video_frame->private_buffer());
if (error != OMX_ErrorNone)
return false;
output_frames_[i].second = buffer;
@@ -798,8 +828,9 @@ void OmxVideoDecodeEngine::FreeOutputBuffers() {
if (uses_egl_image_) {
for (size_t i = 0; i < output_frames_.size(); ++i) {
OMX_BUFFERHEADERTYPE* omx_buffer = output_frames_[i].second;
- if (!omx_buffer)
+ if (omx_buffer) {
OMX_FreeBuffer(component_handle_, output_port_, omx_buffer);
+ }
}
output_frames_.clear();
} else {
@@ -814,7 +845,7 @@ void OmxVideoDecodeEngine::FreeInputQueue() {
while (!pending_input_queue_.empty()) {
scoped_refptr<Buffer> buffer = pending_input_queue_.front();
- OnFeedDone(buffer);
+ FinishEmptyBuffer(buffer);
pending_input_queue_.pop();
}
}
@@ -911,13 +942,16 @@ void OmxVideoDecodeEngine::EmptyBufferTask() {
available_input_buffers_.pop();
input_has_fed_eos_ = buffer->IsEndOfStream();
+ if (input_has_fed_eos_) {
+ DLOG(INFO) << "Input has fed EOS";
+ }
// setup |omx_buffer|.
omx_buffer->pBuffer = const_cast<OMX_U8*>(buffer.get()->GetData());
omx_buffer->nFilledLen = buffer.get()->GetDataSize();
omx_buffer->nAllocLen = omx_buffer->nFilledLen;
omx_buffer->nFlags |= input_has_fed_eos_ ? OMX_BUFFERFLAG_EOS : 0;
- omx_buffer->nTimeStamp = buffer->GetTimestamp().InMilliseconds();
+ omx_buffer->nTimeStamp = buffer->GetTimestamp().InMicroseconds();
omx_buffer->pAppPrivate = buffer.get();
// Give this buffer to OMX.
@@ -940,10 +974,10 @@ void OmxVideoDecodeEngine::FulfillOneRead() {
// If the buffer is real then send it to downstream.
// Otherwise if it is an end-of-stream buffer then just drop it.
if (buffer_id != kEosBuffer) {
- OnReadComplete(output_buffers_[buffer_id]);
+ FinishFillBuffer(output_buffers_[buffer_id]);
SendOutputBufferToComponent(output_buffers_[buffer_id]);
} else {
- OnReadComplete(static_cast<OMX_BUFFERHEADERTYPE*>(NULL));
+ FinishFillBuffer(static_cast<OMX_BUFFERHEADERTYPE*>(NULL));
}
} else if (uses_egl_image_ && !output_frames_ready_.empty()) {
OMX_BUFFERHEADERTYPE *buffer = output_frames_ready_.front();
@@ -952,9 +986,9 @@ void OmxVideoDecodeEngine::FulfillOneRead() {
// If the buffer is real then send it to downstream.
// Otherwise if it is an end-of-stream buffer then just drop it.
if (buffer->nFlags & OMX_BUFFERFLAG_EOS) {
- OnReadComplete(static_cast<OMX_BUFFERHEADERTYPE*>(NULL));
+ FinishFillBuffer(static_cast<OMX_BUFFERHEADERTYPE*>(NULL));
} else {
- OnReadComplete(buffer);
+ FinishFillBuffer(buffer);
}
}
}
@@ -968,7 +1002,7 @@ void OmxVideoDecodeEngine::InitialFillBuffer() {
// Ask the decoder to fill the output buffers.
if (uses_egl_image_) {
- for (size_t i = 0; i < available_output_frames_.size(); ++i) {
+ while (!available_output_frames_.empty()) {
OMX_BUFFERHEADERTYPE* omx_buffer = available_output_frames_.front();
available_output_frames_.pop();
SendOutputBufferToComponent(omx_buffer);
@@ -1068,7 +1102,8 @@ void OmxVideoDecodeEngine::EmptyBufferDoneTask(OMX_BUFFERHEADERTYPE* buffer) {
DCHECK_EQ(message_loop_, MessageLoop::current());
Buffer* stored_buffer = static_cast<Buffer*>(buffer->pAppPrivate);
- OnFeedDone(stored_buffer);
+ buffer->pAppPrivate = NULL;
+ FinishEmptyBuffer(stored_buffer);
stored_buffer->Release();
// Enqueue the available buffer beacuse the decoder has consumed it.
@@ -1090,6 +1125,7 @@ void OmxVideoDecodeEngine::FillBufferDoneTask(OMX_BUFFERHEADERTYPE* buffer) {
if (uses_egl_image_) {
if (buffer->nFlags & OMX_BUFFERFLAG_EOS) {
output_eos_ = true;
+ DLOG(INFO) << "Output has EOS";
}
output_frames_ready_.push(buffer);
} else {
@@ -1108,6 +1144,7 @@ void OmxVideoDecodeEngine::FillBufferDoneTask(OMX_BUFFERHEADERTYPE* buffer) {
if (buffer->nFlags & OMX_BUFFERFLAG_EOS || !buffer->nFilledLen) {
buffer_id = kEosBuffer;
output_eos_ = true;
+ DLOG(INFO) << "Output has EOS";
}
output_buffers_ready_.push(buffer_id);
}
diff --git a/media/filters/omx_video_decode_engine.h b/media/filters/omx_video_decode_engine.h
index 10f1ead..c535125 100644
--- a/media/filters/omx_video_decode_engine.h
+++ b/media/filters/omx_video_decode_engine.h
@@ -43,6 +43,7 @@ class OmxVideoDecodeEngine :
FillThisBufferCallback* fill_buffer_callback,
Task* done_cb);
virtual void EmptyThisBuffer(scoped_refptr<Buffer> buffer);
+ virtual void FillThisBuffer(scoped_refptr<VideoFrame> video_frame);
virtual void Flush(Task* done_cb);
virtual VideoFrame::Format GetSurfaceFormat() const;
@@ -90,11 +91,11 @@ class OmxVideoDecodeEngine :
typedef Callback0::Type Callback;
// calls into other classes
- void OnFeedDone(scoped_refptr<Buffer> buffer);
+ void FinishEmptyBuffer(scoped_refptr<Buffer> buffer);
void OnFormatChange(
const OmxConfigurator::MediaFormat& input_format,
const OmxConfigurator::MediaFormat& output_format);
- void OnReadComplete(OMX_BUFFERHEADERTYPE* buffer);
+ void FinishFillBuffer(OMX_BUFFERHEADERTYPE* buffer);
// Helper method to perform tasks when this object is stopped.
void OnStopDone();
@@ -107,7 +108,6 @@ class OmxVideoDecodeEngine :
bool CreateComponent();
void DoneSetStateIdle(OMX_STATETYPE state);
void DoneSetStateExecuting(OMX_STATETYPE state);
- void OnReceiveOneOutputBuffer(scoped_refptr<VideoFrame> video_frame);
void OnPortSettingsChangedRun(OMX_INDEXTYPE index, int port);
void OnPortDisableEventRun(int port);
void SetupOutputPort();
diff --git a/media/filters/video_renderer_base.cc b/media/filters/video_renderer_base.cc
index 4833d34..c689c16 100644
--- a/media/filters/video_renderer_base.cc
+++ b/media/filters/video_renderer_base.cc
@@ -10,8 +10,8 @@
namespace media {
-// Limit our read ahead to three frames. One frame is typically in flux at all
-// times, as in frame n is discarded at the top of ThreadMain() while frame
+// Limit our read ahead to at least 3 frames. One frame is typically in flux at
+// all times, as in frame n is discarded at the top of ThreadMain() while frame
// (n + kMaxFrames) is being asynchronously fetched. The remaining two frames
// allow us to advance the current frame as well as read the timestamp of the
// following frame for more accurate timing.
@@ -20,7 +20,10 @@ namespace media {
// at the expense of memory (~0.5MB and ~1.3MB per frame for 480p and 720p
// resolutions, respectively). This can help on lower-end systems if there are
// difficult sections in the movie and decoding slows down.
-static const size_t kMaxFrames = 3;
+//
+// Set to 4 because some vendor's driver doesn't allow buffer count to go below
+// preset limit, e.g., EGLImage path.
+static const size_t kMaxFrames = 4;
// This equates to ~16.67 fps, which is just slow enough to be tolerable when
// our video renderer is ahead of the audio playback.
@@ -135,6 +138,17 @@ void VideoRendererBase::Seek(base::TimeDelta time, FilterCallback* callback) {
for (size_t i = 0; i < kMaxFrames; ++i) {
ScheduleRead_Locked();
}
+
+ // TODO(wjia): This would be removed if "Paint" thread allows renderer to
+ // allocate EGL images before filters are in playing state.
+ if (uses_egl_image_) {
+ state_ = kPaused;
+ VideoFrame::CreateBlackFrame(width_, height_, &current_frame_);
+ DCHECK(current_frame_);
+ OnFrameAvailable();
+ seek_callback_->Run();
+ seek_callback_.reset();
+ }
}
void VideoRendererBase::Initialize(VideoDecoder* decoder,
@@ -251,6 +265,10 @@ void VideoRendererBase::ThreadMain() {
if (!frames_.empty() && !frames_.front()->IsEndOfStream()) {
DCHECK_EQ(current_frame_, frames_.front());
frames_.pop_front();
+ if (uses_egl_image_ &&
+ media::VideoFrame::TYPE_EGL_IMAGE == current_frame_->type()) {
+ decoder_->FillThisBuffer(current_frame_);
+ }
ScheduleRead_Locked();
}
@@ -269,6 +287,7 @@ void VideoRendererBase::ThreadMain() {
// If the new front frame is end of stream, we've officially ended.
if (frames_.front()->IsEndOfStream()) {
state_ = kEnded;
+ LOG(INFO) << "Video render gets EOS";
host()->NotifyEnded();
continue;
}
@@ -330,6 +349,9 @@ void VideoRendererBase::OnFillBufferDone(scoped_refptr<VideoFrame> frame) {
// Enqueue the frame.
frames_.push_back(frame);
+ if (uses_egl_image_ &&
+ media::VideoFrame::TYPE_EGL_IMAGE != current_frame_->type())
+ current_frame_ = frame;
DCHECK_LE(frames_.size(), kMaxFrames);
frame_available_.Signal();
diff --git a/media/filters/video_renderer_base.h b/media/filters/video_renderer_base.h
index f66d1cd..551d234 100644
--- a/media/filters/video_renderer_base.h
+++ b/media/filters/video_renderer_base.h
@@ -81,6 +81,10 @@ class VideoRendererBase : public VideoRenderer,
// class executes on.
virtual void OnFrameAvailable() = 0;
+ virtual VideoDecoder* GetDecoder() {
+ return decoder_.get();
+ }
+
private:
// Callback from video decoder to deliver decoded video frames and decrements
// |pending_reads_|.
diff --git a/media/omx/omx_codec_unittest.cc b/media/omx/omx_codec_unittest.cc
index ebec9c9..92201d4 100644
--- a/media/omx/omx_codec_unittest.cc
+++ b/media/omx/omx_codec_unittest.cc
@@ -307,7 +307,7 @@ class OmxCodecTest : public testing::Test {
void FillBufferDoneCallback(scoped_refptr<VideoFrame> frame) {
output_units_.push_back(frame);
- if (frame.get() == NULL)
+ if (frame->IsEndOfStream())
got_eos_ = true;
}
@@ -414,9 +414,9 @@ TEST_F(OmxCodecTest, NormalFlow) {
// Send EndOfStream, expect eos flag.
SendEOSInputBuffer();
- EXPECT_EQ(kBufferCount, static_cast<int>(input_units_.size()));
+ EXPECT_EQ(kBufferCount - 1, static_cast<int>(input_units_.size()));
EXPECT_EQ(1, static_cast<int>(output_units_.size()));
- EXPECT_EQ(count-1, static_cast<int>(output_pool_.size()));
+ EXPECT_EQ(count - 1, static_cast<int>(output_pool_.size()));
EXPECT_EQ(true, got_eos_);
// Shutdown.
@@ -468,9 +468,9 @@ TEST_F(OmxCodecTest, RecycleInputBuffers) {
// Send EndOfStream, expect eos flag.
SendEOSInputBuffer();
- EXPECT_EQ(kBufferCount, static_cast<int>(input_units_.size()));
+ EXPECT_EQ(kBufferCount - 1, static_cast<int>(input_units_.size()));
EXPECT_EQ(1, static_cast<int>(output_units_.size()));
- EXPECT_EQ(count-1, static_cast<int>(output_pool_.size()));
+ EXPECT_EQ(count - 1, static_cast<int>(output_pool_.size()));
EXPECT_EQ(true, got_eos_);
// Shutdown.
diff --git a/media/tools/omx_test/omx_test.cc b/media/tools/omx_test/omx_test.cc
index 560e6d6..2deddeb 100644
--- a/media/tools/omx_test/omx_test.cc
+++ b/media/tools/omx_test/omx_test.cc
@@ -127,7 +127,7 @@ class TestApp : public base::RefCountedThreadSafe<TestApp> {
first_sample_delivered_time_ = base::TimeTicks::HighResNow();
// If we are readding to the end, then stop.
- if (frame.get() == NULL) {
+ if (frame->IsEndOfStream()) {
engine_->Stop(NewCallback(this, &TestApp::StopCallback));
return;
}
diff --git a/media/tools/player_x11/gles_video_renderer.cc b/media/tools/player_x11/gles_video_renderer.cc
index 85e4329..a275366 100644
--- a/media/tools/player_x11/gles_video_renderer.cc
+++ b/media/tools/player_x11/gles_video_renderer.cc
@@ -5,6 +5,7 @@
#include "media/tools/player_x11/gles_video_renderer.h"
#include <dlfcn.h>
+#include <EGL/eglext.h>
#include <X11/Xutil.h>
#include <X11/extensions/Xrender.h>
#include <X11/extensions/Xcomposite.h>
@@ -18,7 +19,9 @@
GlesVideoRenderer* GlesVideoRenderer::instance_ = NULL;
GlesVideoRenderer::GlesVideoRenderer(Display* display, Window window)
- : display_(display),
+ : egl_create_image_khr_(NULL),
+ egl_destroy_image_khr_(NULL),
+ display_(display),
window_(window),
new_frame_(false),
egl_display_(NULL),
@@ -43,6 +46,14 @@ void GlesVideoRenderer::OnStop() {
// calls may fail. Need to fix them.
eglMakeCurrent(egl_display_, EGL_NO_SURFACE,
EGL_NO_SURFACE, EGL_NO_CONTEXT);
+ for (size_t i = 0; i < egl_frames_.size(); ++i) {
+ scoped_refptr<media::VideoFrame> frame = egl_frames_[i].first;
+ if (frame->private_buffer())
+ egl_destroy_image_khr_(egl_display_, frame->private_buffer());
+ if (egl_frames_[i].second)
+ glDeleteTextures(1, &egl_frames_[i].second);
+ }
+ egl_frames_.clear();
eglDestroyContext(egl_display_, egl_context_);
eglDestroySurface(egl_display_, egl_surface_);
}
@@ -70,6 +81,14 @@ static const float kTextureCoords[8] = {
1, 1,
};
+// Texture Coordinates mapping the entire texture for EGL image.
+static const float kTextureCoordsEgl[8] = {
+ 0, 1,
+ 0, 0,
+ 1, 1,
+ 1, 0,
+};
+
// Pass-through vertex shader.
static const char kVertexShader[] =
"precision highp float; precision highp int;\n"
@@ -103,6 +122,16 @@ static const char kFragmentShader[] =
" gl_FragColor = vec4(rgb, 1);\n"
"}\n";
+// Color shader for EGLImage.
+static const char kFragmentShaderEgl[] =
+ "varying vec2 interp_tc;\n"
+ "\n"
+ "uniform sampler2D tex;\n"
+ "\n"
+ "void main() {\n"
+ " gl_FragColor = texture2D(tex, interp_tc);\n"
+ "}\n";
+
// Buffer size for compile errors.
static const unsigned int kErrorSize = 4096;
@@ -149,8 +178,23 @@ void GlesVideoRenderer::Paint() {
scoped_refptr<media::VideoFrame> video_frame;
GetCurrentFrame(&video_frame);
- if (!video_frame)
+ if (!video_frame.get()) {
return;
+ }
+
+ if (uses_egl_image_) {
+ if (media::VideoFrame::TYPE_EGL_IMAGE == video_frame->type()) {
+
+ GLuint texture = FindTexture(video_frame);
+ if (texture) {
+ glActiveTexture(GL_TEXTURE0);
+ glBindTexture(GL_TEXTURE_2D, texture);
+ glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
+ eglSwapBuffers(egl_display_, egl_surface_);
+ }
+ }
+ return;
+ }
// Convert YUV frame to RGB.
DCHECK(video_frame->format() == media::VideoFrame::YV12 ||
@@ -200,6 +244,17 @@ void GlesVideoRenderer::Paint() {
eglSwapBuffers(egl_display_, egl_surface_);
}
+// find if texture exists corresponding to video_frame
+GLuint GlesVideoRenderer::FindTexture(
+ scoped_refptr<media::VideoFrame> video_frame) {
+ for (size_t i = 0; i < egl_frames_.size(); ++i) {
+ scoped_refptr<media::VideoFrame> frame = egl_frames_[i].first;
+ if (video_frame->private_buffer() == frame->private_buffer())
+ return egl_frames_[i].second;
+ }
+ return NULL;
+}
+
bool GlesVideoRenderer::InitializeGles() {
// Resize the window to fit that of the video.
XResizeWindow(display_, window_, width_, height_);
@@ -288,6 +343,127 @@ bool GlesVideoRenderer::InitializeGles() {
eglQuerySurface(egl_display_, egl_surface_, EGL_HEIGHT, &height);
glViewport(0, 0, width_, height_);
+ if (uses_egl_image_) {
+ CreateTextureAndProgramEgl();
+ return true;
+ }
+
+ CreateTextureAndProgramYuv2Rgb();
+
+ // We are getting called on a thread. Release the context so that it can be
+ // made current on the main thread.
+ // TODO(hclam): Fix this if neccessary. Currently the following call fails
+ // for some drivers.
+ // eglMakeCurrent(egl_display_, EGL_NO_SURFACE,
+ // EGL_NO_SURFACE, EGL_NO_CONTEXT);
+ return true;
+}
+
+void GlesVideoRenderer::CreateShader(GLuint program,
+ GLenum type,
+ const char* source,
+ int size) {
+ GLuint shader = glCreateShader(type);
+ glShaderSource(shader, 1, &source, &size);
+ glCompileShader(shader);
+ int result = GL_FALSE;
+ glGetShaderiv(shader, GL_COMPILE_STATUS, &result);
+ if (!result) {
+ char log[kErrorSize];
+ int len;
+ glGetShaderInfoLog(shader, kErrorSize - 1, &len, log);
+ log[kErrorSize - 1] = 0;
+ LOG(FATAL) << log;
+ }
+ glAttachShader(program, shader);
+ glDeleteShader(shader);
+}
+
+void GlesVideoRenderer::LinkProgram(GLuint program) {
+ glLinkProgram(program);
+ int result = GL_FALSE;
+ glGetProgramiv(program, GL_LINK_STATUS, &result);
+ if (!result) {
+ char log[kErrorSize];
+ int len;
+ glGetProgramInfoLog(program, kErrorSize - 1, &len, log);
+ log[kErrorSize - 1] = 0;
+ LOG(FATAL) << log;
+ }
+ glUseProgram(program);
+ glDeleteProgram(program);
+}
+
+void GlesVideoRenderer::CreateTextureAndProgramEgl() {
+ if (!egl_create_image_khr_)
+ egl_create_image_khr_ = reinterpret_cast<PFNEGLCREATEIMAGEKHRPROC>
+ (eglGetProcAddress("eglCreateImageKHR"));
+ if (!egl_destroy_image_khr_)
+ egl_destroy_image_khr_ = reinterpret_cast<PFNEGLDESTROYIMAGEKHRPROC>
+ (eglGetProcAddress("eglDestroyImageKHR"));
+ // TODO(wjia): get count from decoder.
+ for (int i = 0; i < 4; i++) {
+ GLuint texture;
+ EGLint attrib = EGL_NONE;
+ EGLImageKHR egl_image;
+
+ glGenTextures(1, &texture);
+ glBindTexture(GL_TEXTURE_2D, texture);
+ glTexImage2D(
+ GL_TEXTURE_2D,
+ 0,
+ GL_RGBA,
+ width_,
+ height_,
+ 0,
+ GL_RGBA,
+ GL_UNSIGNED_BYTE,
+ NULL);
+ glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
+ glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
+
+ egl_image = egl_create_image_khr_(
+ egl_display_,
+ egl_context_,
+ EGL_GL_TEXTURE_2D_KHR,
+ reinterpret_cast<EGLClientBuffer>(texture),
+ &attrib);
+
+ scoped_refptr<media::VideoFrame> video_frame;
+ const base::TimeDelta kZero;
+ media::VideoFrame:: CreatePrivateFrame(
+ media::VideoFrame::TYPE_EGL_IMAGE,
+ media::VideoFrame::RGB565,
+ width_, height_, kZero, kZero,
+ egl_image,
+ &video_frame);
+ egl_frames_.push_back(std::make_pair(video_frame, texture));
+ GetDecoder()->FillThisBuffer(video_frame);
+ }
+
+ GLuint program = glCreateProgram();
+
+ // Create shader for EGL image
+ CreateShader(program, GL_VERTEX_SHADER,
+ kVertexShader, sizeof(kVertexShader));
+ CreateShader(program, GL_FRAGMENT_SHADER,
+ kFragmentShaderEgl, sizeof(kFragmentShaderEgl));
+ LinkProgram(program);
+
+ // Bind parameters.
+ glUniform1i(glGetUniformLocation(program, "tex"), 0);
+
+ int pos_location = glGetAttribLocation(program, "in_pos");
+ glEnableVertexAttribArray(pos_location);
+ glVertexAttribPointer(pos_location, 2, GL_FLOAT, GL_FALSE, 0, kVertices);
+
+ int tc_location = glGetAttribLocation(program, "in_tc");
+ glEnableVertexAttribArray(tc_location);
+ glVertexAttribPointer(tc_location, 2, GL_FLOAT, GL_FALSE, 0,
+ kTextureCoordsEgl);
+}
+
+void GlesVideoRenderer::CreateTextureAndProgramYuv2Rgb() {
// Create 3 textures, one for each plane, and bind them to different
// texture units.
glGenTextures(media::VideoFrame::kNumYUVPlanes, textures_);
@@ -313,52 +489,11 @@ bool GlesVideoRenderer::InitializeGles() {
GLuint program = glCreateProgram();
// Create our YUV->RGB shader.
- GLuint vertex_shader = glCreateShader(GL_VERTEX_SHADER);
- const char* vs_source = kVertexShader;
- int vs_size = sizeof(kVertexShader);
- glShaderSource(vertex_shader, 1, &vs_source, &vs_size);
- glCompileShader(vertex_shader);
- int result = GL_FALSE;
- glGetShaderiv(vertex_shader, GL_COMPILE_STATUS, &result);
- if (!result) {
- char log[kErrorSize];
- int len;
- glGetShaderInfoLog(vertex_shader, kErrorSize - 1, &len, log);
- log[kErrorSize - 1] = 0;
- LOG(FATAL) << log;
- }
- glAttachShader(program, vertex_shader);
- glDeleteShader(vertex_shader);
-
- GLuint fragment_shader = glCreateShader(GL_FRAGMENT_SHADER);
- const char* ps_source = kFragmentShader;
- int ps_size = sizeof(kFragmentShader);
- glShaderSource(fragment_shader, 1, &ps_source, &ps_size);
- glCompileShader(fragment_shader);
- result = GL_FALSE;
- glGetShaderiv(fragment_shader, GL_COMPILE_STATUS, &result);
- if (!result) {
- char log[kErrorSize];
- int len;
- glGetShaderInfoLog(fragment_shader, kErrorSize - 1, &len, log);
- log[kErrorSize - 1] = 0;
- LOG(FATAL) << log;
- }
- glAttachShader(program, fragment_shader);
- glDeleteShader(fragment_shader);
-
- glLinkProgram(program);
- result = GL_FALSE;
- glGetProgramiv(program, GL_LINK_STATUS, &result);
- if (!result) {
- char log[kErrorSize];
- int len;
- glGetProgramInfoLog(program, kErrorSize - 1, &len, log);
- log[kErrorSize - 1] = 0;
- LOG(FATAL) << log;
- }
- glUseProgram(program);
- glDeleteProgram(program);
+ CreateShader(program, GL_VERTEX_SHADER,
+ kVertexShader, sizeof(kVertexShader));
+ CreateShader(program, GL_FRAGMENT_SHADER,
+ kFragmentShaderEgl, sizeof(kFragmentShader));
+ LinkProgram(program);
// Bind parameters.
glUniform1i(glGetUniformLocation(program, "y_tex"), 0);
@@ -377,12 +512,4 @@ bool GlesVideoRenderer::InitializeGles() {
glEnableVertexAttribArray(tc_location);
glVertexAttribPointer(tc_location, 2, GL_FLOAT, GL_FALSE, 0,
kTextureCoords);
-
- // We are getting called on a thread. Release the context so that it can be
- // made current on the main thread.
- // TODO(hclam): Fix this if neccessary. Currently the following call fails
- // for some drivers.
- // eglMakeCurrent(egl_display_, EGL_NO_SURFACE,
- // EGL_NO_SURFACE, EGL_NO_CONTEXT);
- return true;
}
diff --git a/media/tools/player_x11/gles_video_renderer.h b/media/tools/player_x11/gles_video_renderer.h
index 39478ac..075bdbc 100644
--- a/media/tools/player_x11/gles_video_renderer.h
+++ b/media/tools/player_x11/gles_video_renderer.h
@@ -6,12 +6,14 @@
#define MEDIA_TOOLS_PLAYER_X11_GL_VIDEO_RENDERER_H_
#include <EGL/egl.h>
+#include <EGL/eglext.h>
#include <GLES2/gl2.h>
#include <GLES2/gl2ext.h>
#include "base/lock.h"
#include "base/scoped_ptr.h"
#include "media/base/factory.h"
+#include "media/base/video_frame.h"
#include "media/filters/video_renderer_base.h"
class GlesVideoRenderer : public media::VideoRendererBase {
@@ -44,7 +46,16 @@ class GlesVideoRenderer : public media::VideoRendererBase {
friend class scoped_refptr<GlesVideoRenderer>;
virtual ~GlesVideoRenderer();
+ GLuint FindTexture(scoped_refptr<media::VideoFrame> video_frame);
bool InitializeGles();
+ void CreateShader(GLuint program, GLenum type,
+ const char* vs_source, int vs_size);
+ void LinkProgram(GLuint program);
+ void CreateTextureAndProgramEgl();
+ void CreateTextureAndProgramYuv2Rgb();
+
+ PFNEGLCREATEIMAGEKHRPROC egl_create_image_khr_;
+ PFNEGLDESTROYIMAGEKHRPROC egl_destroy_image_khr_;
int width_;
int height_;
@@ -62,6 +73,10 @@ class GlesVideoRenderer : public media::VideoRendererBase {
EGLSurface egl_surface_;
EGLContext egl_context_;
+ // textures for EGL image
+ typedef std::pair<scoped_refptr<media::VideoFrame>, GLuint> EglFrame;
+ std::vector<EglFrame> egl_frames_;
+
// 3 textures, one for each plane.
GLuint textures_[3];