summaryrefslogtreecommitdiffstats
path: root/chrome/renderer/media
diff options
context:
space:
mode:
authorjochen@chromium.org <jochen@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2010-09-29 09:30:07 +0000
committerjochen@chromium.org <jochen@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2010-09-29 09:30:07 +0000
commit63b39c73dd951416f6c1808f34a10ca422336610 (patch)
tree23346799369839cf51e1d831f9d97de1629934d0 /chrome/renderer/media
parentddaa08c97c8a0f35d709d5fd1191b093fbae1c36 (diff)
downloadchromium_src-63b39c73dd951416f6c1808f34a10ca422336610.zip
chromium_src-63b39c73dd951416f6c1808f34a10ca422336610.tar.gz
chromium_src-63b39c73dd951416f6c1808f34a10ca422336610.tar.bz2
Revert 60912 - Some cleanup work in GpuVideoDecoderHost and IpcVideoDecoder
Seeing that GpuVideoDecoderHost has a very similar interface to VideoDecodeEngine, this patch makes GpuVideoDecoderHost implments VideoDecodeEngine. Also did some cleanup work to remove code patch that doesn't fit into the buffer allocation model we are moving to. BUG=53714 TEST=Tree is green. Code still doesn't work yet. Review URL: http://codereview.chromium.org/3393014 TBR=hclam@chromium.org Review URL: http://codereview.chromium.org/3531002 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@60922 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'chrome/renderer/media')
-rw-r--r--chrome/renderer/media/gles2_video_decode_context.cc6
-rw-r--r--chrome/renderer/media/gles2_video_decode_context.h74
-rw-r--r--chrome/renderer/media/ipc_video_decoder.cc362
-rw-r--r--chrome/renderer/media/ipc_video_decoder.h65
4 files changed, 351 insertions, 156 deletions
diff --git a/chrome/renderer/media/gles2_video_decode_context.cc b/chrome/renderer/media/gles2_video_decode_context.cc
index 50180ef..b7eec02 100644
--- a/chrome/renderer/media/gles2_video_decode_context.cc
+++ b/chrome/renderer/media/gles2_video_decode_context.cc
@@ -5,10 +5,8 @@
#include "chrome/renderer/media/gles2_video_decode_context.h"
Gles2VideoDecodeContext::Gles2VideoDecodeContext(
- MessageLoop* message_loop, bool memory_mapped, ggl::Context* context)
- : message_loop_(message_loop),
- memory_mapped_(memory_mapped),
- context_(context) {
+ StorageType type, ggl::Context* context)
+ : message_loop_(MessageLoop::current()), type_(type), context_(context) {
}
Gles2VideoDecodeContext::~Gles2VideoDecodeContext() {
diff --git a/chrome/renderer/media/gles2_video_decode_context.h b/chrome/renderer/media/gles2_video_decode_context.h
index 3e56e73..4f556ab 100644
--- a/chrome/renderer/media/gles2_video_decode_context.h
+++ b/chrome/renderer/media/gles2_video_decode_context.h
@@ -24,35 +24,38 @@ class Context;
// commands specific to Chrome's renderer process to provide needed resources.
//
// There are two different kinds of video frame storage provided by this class:
-// 1. Memory mapped textures (aka software decoding mode).
-// Each texture is memory mapped and appears to the VideoDecodeEngine as
-// system memory.
+// 1. Memory mapped YUV textures (aka software decoding mode).
+// Each video frame allocated is backed by 3 luminance textures carrying
+// the Y, U and V planes.
//
-// The usage of the textures is that the VideoDecodeEngine is performing
+// Furthermore each texture is memory mapped and appears to the
+// VideoDecodeEngine as 3 planes backed by system memory.
+//
+// The usage of these 3 textures is that the VideoDecodeEngine is performing
// software video decoding and use them as if they are allocated in plain
// system memory (in fact they are allocated in system memory and shared
// bwith the GPU process). An additional step of uploading the content to
// video memory is needed. Since VideoDecodeEngine is unaware of the video
-// memory, this upload operation is performed by calling
-// UploadToVideoFrame().
+// memory, this upload operation is performed by video renderer provided by
+// Chrome.
//
// After the content is uploaded to video memory, WebKit will see the video
-// frame as textures and will perform the necessary operations for
+// frame as 3 textures and will perform the necessary operations for
// rendering.
//
-// 2. Opaque textures (aka hardware decoding mode).
-// In this mode of operation each video frame is backed by some opaque
-// textures. This is used only when hardware video decoding is used. The
-// textures needs to be generated and allocated inside the renderer process
-// first. This will establish a translation between texture ID in the
-// renderer process and the GPU process.
+// 2. RGBA texture (aka hardware decoding mode).
+// In this mode of operation each video frame is backed by a RGBA texture.
+// This is used only when hardware video decoding is used. The texture needs
+// to be generated and allocated inside the renderer process first. This
+// will establish a translation between texture ID in the renderer process
+// and the GPU process.
//
// The texture ID generated is used by IpcVideoDecodeEngine only to be sent
// the GPU process. Inside the GPU process the texture ID is translated to
// a real texture ID inside the actual context. The real texture ID is then
// assigned to the hardware video decoder for storing the video frame.
//
-// WebKit will see the video frame as a normal textures and perform
+// WebKit will see the video frame as a normal RGBA texture and perform
// necessary render operations.
//
// In both operation modes, the objective is to have WebKit see the video frames
@@ -60,22 +63,36 @@ class Context;
//
// THREAD SEMANTICS
//
-// All methods of this class can be called on any thread. GLES2 context and all
-// OpenGL method calls are accessed on the Render Thread. As as result all Tasks
-// given to this object are executed on the Render Thread.
+// This class is accessed on two threads, namely the Render Thread and the
+// Video Decoder Thread.
+//
+// GLES2 context and all OpenGL method calls should be accessed on the Render
+// Thread.
//
-// Since this class is not refcounted, it is important to destroy objects of
-// this class only when the Task given to Destroy() is called.
+// VideoDecodeContext implementations are accessed on the Video Decoder Thread.
//
class Gles2VideoDecodeContext : public media::VideoDecodeContext {
public:
- // |message_loop| is the message of the Render Thread.
- // |memory_mapped| determines if textures allocated are memory mapped.
- // |context| is the graphics context for generating textures.
- Gles2VideoDecodeContext(MessageLoop* message_loop,
- bool memory_mapped, ggl::Context* context);
+ enum StorageType {
+ // This video decode context provides YUV textures as storage. This is used
+ // only in software decoding mode.
+ kMemoryMappedYuvTextures,
+
+ // This video decode context provides RBGA textures as storage. This is
+ // used in hardware decoding mode.
+ kRgbaTextures,
+ };
+
+ //--------------------------------------------------------------------------
+ // Render Thread
+ Gles2VideoDecodeContext(StorageType type, ggl::Context* context);
+
+ // TODO(hclam): Need to figure out which thread destroys this object.
virtual ~Gles2VideoDecodeContext();
+ //--------------------------------------------------------------------------
+ // Video Decoder Thread
+
// media::VideoDecodeContext implementation.
virtual void* GetDevice();
virtual void AllocateVideoFrames(
@@ -87,15 +104,18 @@ class Gles2VideoDecodeContext : public media::VideoDecodeContext {
Task* task);
virtual void Destroy(Task* task);
+ //--------------------------------------------------------------------------
+ // Any thread
// Accessor of the current mode of this decode context.
- bool IsMemoryMapped() const { return memory_mapped_; }
+ bool IsMemoryMapped() const { return type_ == kMemoryMappedYuvTextures; }
private:
- // Message loop for Render Thread.
+ // Message loop that this object lives on. This is the message loop that
+ // this object is created.
MessageLoop* message_loop_;
// Type of storage provided by this class.
- bool memory_mapped_;
+ StorageType type_;
// Pointer to the GLES2 context.
ggl::Context* context_;
diff --git a/chrome/renderer/media/ipc_video_decoder.cc b/chrome/renderer/media/ipc_video_decoder.cc
index 4206380..eda8696 100644
--- a/chrome/renderer/media/ipc_video_decoder.cc
+++ b/chrome/renderer/media/ipc_video_decoder.cc
@@ -1,6 +1,6 @@
-// Copyright (c) 2010 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
+// Copyright (c) 2010 The Chromium Authors. All rights reserved. Use of this
+// source code is governed by a BSD-style license that can be found in the
+// LICENSE file.
#include "chrome/renderer/media/ipc_video_decoder.h"
@@ -17,13 +17,15 @@
#include "media/ffmpeg/ffmpeg_common.h"
#include "media/ffmpeg/ffmpeg_util.h"
#include "media/filters/ffmpeg_interfaces.h"
-#include "media/video/video_decode_engine.h"
IpcVideoDecoder::IpcVideoDecoder(MessageLoop* message_loop,
ggl::Context* ggl_context)
: width_(0),
height_(0),
- decode_engine_message_loop_(message_loop),
+ state_(kUnInitialized),
+ pending_reads_(0),
+ pending_requests_(0),
+ renderer_thread_message_loop_(message_loop),
ggl_context_(ggl_context) {
}
@@ -32,23 +34,30 @@ IpcVideoDecoder::~IpcVideoDecoder() {
void IpcVideoDecoder::Initialize(media::DemuxerStream* demuxer_stream,
media::FilterCallback* callback) {
- // It doesn't matter which thread we perform initialization because
- // all this method does is create objects and delegate the initialize
- // messsage.
+ if (MessageLoop::current() != renderer_thread_message_loop_) {
+ renderer_thread_message_loop_->PostTask(
+ FROM_HERE,
+ NewRunnableMethod(this,
+ &IpcVideoDecoder::Initialize,
+ demuxer_stream,
+ callback));
+ return;
+ }
DCHECK(!demuxer_stream_);
demuxer_stream_ = demuxer_stream;
initialize_callback_.reset(callback);
- // We require bit stream converter for hardware decoder.
+ // We require bit stream converter for openmax hardware decoder.
+ // TODO(hclam): This is a wrong place to initialize the demuxer stream's
+ // bitstream converter.
demuxer_stream->EnableBitstreamConverter();
// Get the AVStream by querying for the provider interface.
media::AVStreamProvider* av_stream_provider;
if (!demuxer_stream->QueryInterface(&av_stream_provider)) {
- host()->SetError(media::PIPELINE_ERROR_DECODE);
- callback->Run();
- delete callback;
+ GpuVideoDecoderInitDoneParam param;
+ OnInitializeDone(false, param);
return;
}
@@ -56,141 +65,296 @@ void IpcVideoDecoder::Initialize(media::DemuxerStream* demuxer_stream,
width_ = av_stream->codec->width;
height_ = av_stream->codec->height;
- // Create a video decode context that assocates with the graphics
- // context.
- decode_context_.reset(ggl::CreateVideoDecodeContext(ggl_context_, true));
+ // Switch GL context.
+ bool ret = ggl::MakeCurrent(ggl_context_);
+ DCHECK(ret) << "Failed to switch GL context";
- // Create a hardware video decoder handle.
- decode_engine_.reset(ggl::CreateVideoDecodeEngine(ggl_context_));
+ // Generate textures to be used by the hardware video decoder in the GPU
+ // process.
+ // TODO(hclam): Allocation of textures should be done based on the request
+ // of the GPU process.
+ GLuint texture;
+ glGenTextures(1, &texture);
+ glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width_, height_, 0, GL_RGBA,
+ GL_UNSIGNED_BYTE, NULL);
+ texture_ = texture;
+
+ // Create a hardware video decoder handle for IPC communication.
+ gpu_video_decoder_host_ = ggl::CreateVideoDecoder(ggl_context_);
// Initialize hardware decoder.
- media::VideoCodecConfig param;
- memset(&param, 0, sizeof(param));
+ GpuVideoDecoderInitParam param = {0};
param.width = width_;
param.height = height_;
+ if (!gpu_video_decoder_host_->Initialize(this, param)) {
+ GpuVideoDecoderInitDoneParam param;
+ OnInitializeDone(false, param);
+ }
+}
- // TODO(hclam): Move VideoDecodeEngine to IO Thread, this will avoid
- // dead lock during teardown.
- // VideoDecodeEngine will perform initialization on the message loop
- // given to it so it doesn't matter on which thread we are calling this.
- decode_engine_->Initialize(decode_engine_message_loop_, this,
- decode_context_.get(), param);
+void IpcVideoDecoder::OnInitializeDone(
+ bool success, const GpuVideoDecoderInitDoneParam& param) {
+ if (MessageLoop::current() != renderer_thread_message_loop_) {
+ renderer_thread_message_loop_->PostTask(
+ FROM_HERE,
+ NewRunnableMethod(this,
+ &IpcVideoDecoder::OnInitializeDone,
+ success,
+ param));
+ return;
+ }
+
+ media::AutoCallbackRunner done_runner(initialize_callback_.release());
+
+ if (success) {
+ media_format_.SetAsString(media::MediaFormat::kMimeType,
+ media::mime_type::kUncompressedVideo);
+ media_format_.SetAsInteger(media::MediaFormat::kWidth, width_);
+ media_format_.SetAsInteger(media::MediaFormat::kHeight, height_);
+ media_format_.SetAsInteger(
+ media::MediaFormat::kSurfaceType,
+ static_cast<int>(media::VideoFrame::TYPE_GL_TEXTURE));
+ state_ = kPlaying;
+ } else {
+ LOG(ERROR) << "IpcVideoDecoder initialization failed!";
+ host()->SetError(media::PIPELINE_ERROR_DECODE);
+ }
}
void IpcVideoDecoder::Stop(media::FilterCallback* callback) {
+ if (MessageLoop::current() != renderer_thread_message_loop_) {
+ renderer_thread_message_loop_->PostTask(
+ FROM_HERE,
+ NewRunnableMethod(this,
+ &IpcVideoDecoder::Stop,
+ callback));
+ return;
+ }
+
stop_callback_.reset(callback);
- decode_engine_->Uninitialize();
+ if (!gpu_video_decoder_host_->Uninitialize()) {
+ LOG(ERROR) << "gpu video decoder destroy failed";
+ IpcVideoDecoder::OnUninitializeDone();
+ }
+}
+
+void IpcVideoDecoder::OnUninitializeDone() {
+ if (MessageLoop::current() != renderer_thread_message_loop_) {
+ renderer_thread_message_loop_->PostTask(
+ FROM_HERE,
+ NewRunnableMethod(this,
+ &IpcVideoDecoder::OnUninitializeDone));
+ return;
+ }
+
+ media::AutoCallbackRunner done_runner(stop_callback_.release());
+
+ state_ = kStopped;
}
void IpcVideoDecoder::Pause(media::FilterCallback* callback) {
- // TODO(hclam): It looks like that pause is not necessary so implement this
- // later.
- callback->Run();
- delete callback;
+ Flush(callback); // TODO(jiesun): move this to flush().
}
void IpcVideoDecoder::Flush(media::FilterCallback* callback) {
+ if (MessageLoop::current() != renderer_thread_message_loop_) {
+ renderer_thread_message_loop_->PostTask(
+ FROM_HERE,
+ NewRunnableMethod(this,
+ &IpcVideoDecoder::Flush,
+ callback));
+ return;
+ }
+
+ state_ = kFlushing;
+
flush_callback_.reset(callback);
- decode_engine_->Flush();
+
+ if (!gpu_video_decoder_host_->Flush()) {
+ LOG(ERROR) << "gpu video decoder flush failed";
+ OnFlushDone();
+ }
+}
+
+void IpcVideoDecoder::OnFlushDone() {
+ if (MessageLoop::current() != renderer_thread_message_loop_) {
+ renderer_thread_message_loop_->PostTask(
+ FROM_HERE,
+ NewRunnableMethod(this,
+ &IpcVideoDecoder::OnFlushDone));
+ return;
+ }
+
+ if (pending_reads_ == 0 && pending_requests_ == 0 && flush_callback_.get()) {
+ flush_callback_->Run();
+ flush_callback_.reset();
+ }
}
void IpcVideoDecoder::Seek(base::TimeDelta time,
media::FilterCallback* callback) {
- seek_callback_.reset(callback);
- decode_engine_->Seek();
+ if (MessageLoop::current() != renderer_thread_message_loop_) {
+ renderer_thread_message_loop_->PostTask(
+ FROM_HERE,
+ NewRunnableMethod(this,
+ &IpcVideoDecoder::Seek,
+ time,
+ callback));
+ return;
+ }
+
+ OnSeekComplete(callback);
}
-void IpcVideoDecoder::OnInitializeComplete(const media::VideoCodecInfo& info) {
- DCHECK_EQ(decode_engine_message_loop_, MessageLoop::current());
+void IpcVideoDecoder::OnSeekComplete(media::FilterCallback* callback) {
+ if (MessageLoop::current() != renderer_thread_message_loop_) {
+ renderer_thread_message_loop_->PostTask(
+ FROM_HERE,
+ NewRunnableMethod(this,
+ &IpcVideoDecoder::OnSeekComplete,
+ callback));
+ return;
+ }
- if (info.success) {
- media_format_.SetAsString(media::MediaFormat::kMimeType,
- media::mime_type::kUncompressedVideo);
- media_format_.SetAsInteger(media::MediaFormat::kWidth,
- info.stream_info.surface_width);
- media_format_.SetAsInteger(media::MediaFormat::kHeight,
- info.stream_info.surface_height);
- media_format_.SetAsInteger(
- media::MediaFormat::kSurfaceType,
- static_cast<int>(media::VideoFrame::TYPE_GL_TEXTURE));
- } else {
- LOG(ERROR) << "IpcVideoDecoder initialization failed!";
- host()->SetError(media::PIPELINE_ERROR_DECODE);
+ media::AutoCallbackRunner done_runner(callback);
+
+ state_ = kPlaying;
+
+ for (int i = 0; i < 20; ++i) {
+ demuxer_stream_->Read(
+ NewCallback(this,
+ &IpcVideoDecoder::OnReadComplete));
+ ++pending_reads_;
}
+}
- initialize_callback_->Run();
- initialize_callback_.reset();
+void IpcVideoDecoder::OnReadComplete(media::Buffer* buffer) {
+ scoped_refptr<media::Buffer> buffer_ref = buffer;
+ ReadCompleteTask(buffer_ref);
}
-void IpcVideoDecoder::OnUninitializeComplete() {
- DCHECK_EQ(decode_engine_message_loop_, MessageLoop::current());
+void IpcVideoDecoder::ReadCompleteTask(
+ scoped_refptr<media::Buffer> buffer) {
+ if (MessageLoop::current() != renderer_thread_message_loop_) {
+ renderer_thread_message_loop_->PostTask(
+ FROM_HERE,
+ NewRunnableMethod(this,
+ &IpcVideoDecoder::ReadCompleteTask,
+ buffer));
+ return;
+ }
- // After the decode engine is uninitialized we are safe to destroy the decode
- // context. The task will add a refcount to this object so don't need to worry
- // about objects lifetime.
- decode_context_->Destroy(
- NewRunnableMethod(this, &IpcVideoDecoder::OnDestroyComplete));
+ DCHECK_GT(pending_reads_, 0u);
+ --pending_reads_;
- // We don't need to wait for destruction of decode context to complete because
- // it can happen asynchronously. This object and decode context will live until
- // the destruction task is called.
- stop_callback_->Run();
- stop_callback_.reset();
-}
+ if (state_ == kStopped || state_ == kEnded) {
+ // Just discard the input buffers
+ return;
+ }
-void IpcVideoDecoder::OnFlushComplete() {
- DCHECK_EQ(decode_engine_message_loop_, MessageLoop::current());
- flush_callback_->Run();
- flush_callback_.reset();
-}
+ if (state_ == kFlushing) {
+ if (pending_reads_ == 0 && pending_requests_ == 0) {
+ flush_callback_->Run();
+ flush_callback_.reset();
+ state_ = kPlaying;
+ }
+ return;
+ }
+ // Transition to kFlushCodec on the first end of input stream buffer.
+ if (state_ == kPlaying && buffer->IsEndOfStream()) {
+ state_ = kFlushCodec;
+ }
-void IpcVideoDecoder::OnSeekComplete() {
- DCHECK_EQ(decode_engine_message_loop_, MessageLoop::current());
- seek_callback_->Run();
- seek_callback_.reset();
+ gpu_video_decoder_host_->EmptyThisBuffer(buffer);
}
-void IpcVideoDecoder::OnError() {
- DCHECK_EQ(decode_engine_message_loop_, MessageLoop::current());
- host()->SetError(media::PIPELINE_ERROR_DECODE);
-}
+void IpcVideoDecoder::ProduceVideoFrame(scoped_refptr<VideoFrame> video_frame) {
+ if (MessageLoop::current() != renderer_thread_message_loop_) {
+ renderer_thread_message_loop_->PostTask(
+ FROM_HERE,
+ NewRunnableMethod(this,
+ &IpcVideoDecoder::ProduceVideoFrame,
+ video_frame));
+ return;
+ }
-// This methid is called by Demuxer after a demuxed packet is produced.
-void IpcVideoDecoder::OnReadComplete(media::Buffer* buffer) {
- decode_engine_->ConsumeVideoSample(buffer);
-}
+ // Synchronized flushing before stop should prevent this.
+ DCHECK_NE(state_, kStopped);
-void IpcVideoDecoder::OnDestroyComplete() {
- // We don't need to do anything in this method. Destruction of objects will
- // occur as soon as refcount goes to 0.
-}
+ // Notify decode engine the available of new frame.
+ ++pending_requests_;
-// This method is called by VideoRenderer. We delegate the method call to
-// VideoDecodeEngine.
-void IpcVideoDecoder::ProduceVideoFrame(
- scoped_refptr<media::VideoFrame> video_frame) {
- decode_engine_->ProduceVideoFrame(video_frame);
+ VideoFrame::GlTexture textures[3] = { texture_, 0, 0 };
+ scoped_refptr<VideoFrame> frame;
+ media::VideoFrame::CreateFrameGlTexture(
+ media::VideoFrame::RGBA, width_, height_, textures,
+ base::TimeDelta(), base::TimeDelta(), &frame);
+ gpu_video_decoder_host_->FillThisBuffer(frame);
}
-// This method is called by VideoDecodeEngine that a video frame is produced.
-// This is then passed to VideoRenderer.
-void IpcVideoDecoder::ConsumeVideoFrame(
+void IpcVideoDecoder::OnFillBufferDone(
scoped_refptr<media::VideoFrame> video_frame) {
- DCHECK(video_frame);
- VideoFrameReady(video_frame);
+ if (MessageLoop::current() != renderer_thread_message_loop_) {
+ renderer_thread_message_loop_->PostTask(
+ FROM_HERE,
+ NewRunnableMethod(this,
+ &IpcVideoDecoder::OnFillBufferDone,
+ video_frame));
+ return;
+ }
+
+ if (video_frame.get()) {
+ --pending_requests_;
+ VideoFrameReady(video_frame);
+ if (state_ == kFlushing && pending_reads_ == 0 && pending_requests_ == 0) {
+ CHECK(flush_callback_.get());
+ flush_callback_->Run();
+ flush_callback_.reset();
+ state_ = kPlaying;
+ }
+
+ } else {
+ if (state_ == kFlushCodec) {
+ // When in kFlushCodec, any errored decode, or a 0-lengthed frame,
+ // is taken as a signal to stop decoding.
+ state_ = kEnded;
+ scoped_refptr<VideoFrame> video_frame;
+ VideoFrame::CreateEmptyFrame(&video_frame);
+ VideoFrameReady(video_frame);
+ }
+ }
}
-// This method is called by VideoDecodeEngine to request a video frame. The
-// request is passed to demuxer.
-void IpcVideoDecoder::ProduceVideoSample(scoped_refptr<media::Buffer> buffer) {
+void IpcVideoDecoder::OnEmptyBufferDone(scoped_refptr<media::Buffer> buffer) {
+ if (MessageLoop::current() != renderer_thread_message_loop_) {
+ renderer_thread_message_loop_->PostTask(
+ FROM_HERE,
+ NewRunnableMethod(this,
+ &IpcVideoDecoder::OnEmptyBufferDone,
+ buffer));
+ return;
+ }
+
+ // TODO(jiesun): We haven't recycle input buffer yet.
demuxer_stream_->Read(NewCallback(this, &IpcVideoDecoder::OnReadComplete));
+ ++pending_reads_;
+}
+
+void IpcVideoDecoder::OnDeviceError() {
+ host()->SetError(media::PIPELINE_ERROR_DECODE);
+}
+
+bool IpcVideoDecoder::ProvidesBuffer() {
+ return true;
}
// static
media::FilterFactory* IpcVideoDecoder::CreateFactory(
MessageLoop* message_loop, ggl::Context* ggl_context) {
- return new media::FilterFactoryImpl2<
- IpcVideoDecoder, MessageLoop*, ggl::Context*>(message_loop, ggl_context);
+ return new media::FilterFactoryImpl2<IpcVideoDecoder,
+ MessageLoop*,
+ ggl::Context*>(
+ message_loop, ggl_context);
}
// static
diff --git a/chrome/renderer/media/ipc_video_decoder.h b/chrome/renderer/media/ipc_video_decoder.h
index d5ab685..0309ccd 100644
--- a/chrome/renderer/media/ipc_video_decoder.h
+++ b/chrome/renderer/media/ipc_video_decoder.h
@@ -6,11 +6,10 @@
#define CHROME_RENDERER_MEDIA_IPC_VIDEO_DECODER_H_
#include "base/time.h"
+#include "chrome/renderer/gpu_video_service_host.h"
#include "media/base/pts_heap.h"
#include "media/base/video_frame.h"
#include "media/filters/decoder_base.h"
-#include "media/video/video_decode_engine.h"
-#include "media/video/video_decode_context.h"
struct AVRational;
@@ -19,7 +18,7 @@ class Context;
} // namespace ggl
class IpcVideoDecoder : public media::VideoDecoder,
- public media::VideoDecodeEngine::EventHandler {
+ public GpuVideoDecoderHost::EventHandler {
public:
explicit IpcVideoDecoder(MessageLoop* message_loop,
ggl::Context* ggl_context);
@@ -41,37 +40,51 @@ class IpcVideoDecoder : public media::VideoDecoder,
virtual const media::MediaFormat& media_format() { return media_format_; }
virtual void ProduceVideoFrame(scoped_refptr<media::VideoFrame> video_frame);
- // TODO(hclam): Remove this method.
- virtual bool ProvidesBuffer() { return true; }
-
- // VideoDecodeEngine::EventHandler implementation.
- virtual void OnInitializeComplete(const media::VideoCodecInfo& info);
- virtual void OnUninitializeComplete();
- virtual void OnFlushComplete();
- virtual void OnSeekComplete();
- virtual void OnError();
- virtual void OnFormatChange(media::VideoStreamInfo stream_info);
- virtual void ProduceVideoSample(scoped_refptr<media::Buffer> buffer);
- virtual void ConsumeVideoFrame(scoped_refptr<media::VideoFrame> frame);
+ // GpuVideoDecoderHost::EventHandler.
+ virtual void OnInitializeDone(bool success,
+ const GpuVideoDecoderInitDoneParam& param);
+ virtual void OnUninitializeDone();
+ virtual void OnFlushDone();
+ virtual void OnEmptyBufferDone(scoped_refptr<media::Buffer> buffer);
+ virtual void OnFillBufferDone(scoped_refptr<media::VideoFrame> frame);
+ virtual void OnDeviceError();
+ virtual bool ProvidesBuffer();
private:
+ enum DecoderState {
+ kUnInitialized,
+ kPlaying,
+ kFlushing,
+ kPausing,
+ kFlushCodec,
+ kEnded,
+ kStopped,
+ };
+
+ void OnSeekComplete(media::FilterCallback* callback);
void OnReadComplete(media::Buffer* buffer);
- void OnDestroyComplete();
+ void ReadCompleteTask(scoped_refptr<media::Buffer> buffer);
int32 width_;
int32 height_;
media::MediaFormat media_format_;
scoped_ptr<media::FilterCallback> flush_callback_;
- scoped_ptr<media::FilterCallback> seek_callback_;
scoped_ptr<media::FilterCallback> initialize_callback_;
scoped_ptr<media::FilterCallback> stop_callback_;
+ DecoderState state_;
+
+ // Tracks the number of asynchronous reads issued to |demuxer_stream_|.
+ // Using size_t since it is always compared against deque::size().
+ size_t pending_reads_;
+ // Tracks the number of asynchronous reads issued from renderer.
+ size_t pending_requests_;
+
// Pointer to the demuxer stream that will feed us compressed buffers.
scoped_refptr<media::DemuxerStream> demuxer_stream_;
- // This is the message loop that we should assign to VideoDecodeEngine.
- MessageLoop* decode_engine_message_loop_;
+ MessageLoop* renderer_thread_message_loop_;
// A context for allocating textures and issuing GLES2 commands.
// TODO(hclam): A ggl::Context lives on the Render Thread while this object
@@ -79,14 +92,14 @@ class IpcVideoDecoder : public media::VideoDecoder,
// and destruction of the context.
ggl::Context* ggl_context_;
- // This VideoDecodeEngine translate our requests to IPC commands to the
- // GPU process.
- // VideoDecodeEngine should run on IO Thread instead of Render Thread to
- // avoid dead lock during tear down of the media pipeline.
- scoped_ptr<media::VideoDecodeEngine> decode_engine_;
+ // Handle to the hardware video decoder. This object will use IPC to
+ // communicate with the decoder in the GPU process.
+ scoped_refptr<GpuVideoDecoderHost> gpu_video_decoder_host_;
- // Decoding context to be used by VideoDecodeEngine.
- scoped_ptr<media::VideoDecodeContext> decode_context_;
+ // Texture that contains the video frame.
+ // TODO(hclam): Instead of one texture, we should have a set of textures
+ // as requested by the hardware video decode engine in the GPU process.
+ unsigned int texture_;
DISALLOW_COPY_AND_ASSIGN(IpcVideoDecoder);
};