diff options
author | hclam@chromium.org <hclam@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98> | 2010-09-17 12:49:26 +0000 |
---|---|---|
committer | hclam@chromium.org <hclam@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98> | 2010-09-17 12:49:26 +0000 |
commit | e65efc1a7a29d16b3baa0839ecc88c26663d88af (patch) | |
tree | 2075dbaf6e60fc1f2eaf4c35459b9366e104fdb9 /chrome | |
parent | 9b98f084607bc1e9185be7f29b52f1e4c0fa498d (diff) | |
download | chromium_src-e65efc1a7a29d16b3baa0839ecc88c26663d88af.zip chromium_src-e65efc1a7a29d16b3baa0839ecc88c26663d88af.tar.gz chromium_src-e65efc1a7a29d16b3baa0839ecc88c26663d88af.tar.bz2 |
Implement FakeGlVideoDecodeEngine using FakeGlVideoDecodeContext
Defines UploadToVideoFrame in VideoDecodeContext.
FakeGlVideoDecodeEngine now uses FakeGlVideoDecodeContext to video frame
allocation and uploading.
BUG=53714
TEST=Tree is green
Review URL: http://codereview.chromium.org/3312022
git-svn-id: svn://svn.chromium.org/chrome/trunk/src@59785 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'chrome')
-rw-r--r-- | chrome/chrome.gyp | 2 | ||||
-rw-r--r-- | chrome/gpu/gpu_video_decoder.cc | 40 | ||||
-rw-r--r-- | chrome/gpu/gpu_video_decoder.h | 21 | ||||
-rw-r--r-- | chrome/gpu/media/fake_gl_video_decode_engine.cc | 50 | ||||
-rw-r--r-- | chrome/gpu/media/fake_gl_video_decode_engine.h | 20 | ||||
-rw-r--r-- | chrome/gpu/media/fake_gl_video_device.cc | 60 | ||||
-rw-r--r-- | chrome/gpu/media/fake_gl_video_device.h | 27 | ||||
-rw-r--r-- | chrome/gpu/media/gpu_video_device.h | 17 | ||||
-rw-r--r-- | chrome/renderer/media/gles2_video_decode_context.cc | 5 | ||||
-rw-r--r-- | chrome/renderer/media/gles2_video_decode_context.h | 5 |
10 files changed, 216 insertions, 31 deletions
diff --git a/chrome/chrome.gyp b/chrome/chrome.gyp index d561e3a..b9ede11 100644 --- a/chrome/chrome.gyp +++ b/chrome/chrome.gyp @@ -721,6 +721,8 @@ 'gpu/media/gpu_video_device.h', 'gpu/media/fake_gl_video_decode_engine.cc', 'gpu/media/fake_gl_video_decode_engine.h', + 'gpu/media/fake_gl_video_device.cc', + 'gpu/media/fake_gl_video_device.h', ], 'include_dirs': [ '..', diff --git a/chrome/gpu/gpu_video_decoder.cc b/chrome/gpu/gpu_video_decoder.cc index eb1823a..bf5881ed 100644 --- a/chrome/gpu/gpu_video_decoder.cc +++ b/chrome/gpu/gpu_video_decoder.cc @@ -4,9 +4,11 @@ #include "chrome/gpu/gpu_video_decoder.h" +#include "chrome/common/child_thread.h" #include "chrome/common/gpu_messages.h" #include "chrome/gpu/gpu_channel.h" #include "chrome/gpu/media/fake_gl_video_decode_engine.h" +#include "chrome/gpu/media/fake_gl_video_device.h" #include "media/base/data_buffer.h" #include "media/base/video_frame.h" @@ -110,6 +112,10 @@ void GpuVideoDecoder::ConsumeVideoFrame(scoped_refptr<VideoFrame> frame) { } void* GpuVideoDecoder::GetDevice() { + bool ret = gles2_decoder_->MakeCurrent(); + DCHECK(ret) << "Failed to switch context"; + + // Simply delegate the method call to GpuVideoDevice. return video_device_->GetDevice(); } @@ -155,6 +161,9 @@ void GpuVideoDecoder::ReleaseAllVideoFrames() { // // And finally we'll send IPC commands to IpcVideoDecoder to destroy all // GL textures generated. + bool ret = gles2_decoder_->MakeCurrent(); + DCHECK(ret) << "Failed to switch context"; + for (VideoFrameMap::iterator i = video_frame_map_.begin(); i != video_frame_map_.end(); ++i) { video_device_->ReleaseVideoFrame(i->second); @@ -163,6 +172,22 @@ void GpuVideoDecoder::ReleaseAllVideoFrames() { SendReleaseAllVideoFrames(); } +void GpuVideoDecoder::UploadToVideoFrame(void* buffer, + scoped_refptr<media::VideoFrame> frame, + Task* task) { + // This method is called by VideoDecodeEngine to upload a buffer to a + // VideoFrame. We should just delegate this to GpuVideoDevice which contains + // the actual implementation. + bool ret = gles2_decoder_->MakeCurrent(); + DCHECK(ret) << "Failed to switch context"; + + // Actually doing the upload on the main thread. + ret = video_device_->UploadToVideoFrame(buffer, frame); + DCHECK(ret) << "Failed to upload video content to a VideoFrame."; + task->Run(); + delete task; +} + void GpuVideoDecoder::Destroy(Task* task) { // TODO(hclam): I still need to think what I should do here. } @@ -183,17 +208,17 @@ GpuVideoDecoder::GpuVideoDecoder( // TODO(jiesun): find a better way to determine which VideoDecodeEngine // to return on current platform. decode_engine_.reset(new FakeGlVideoDecodeEngine()); + video_device_.reset(new FakeGlVideoDevice()); } void GpuVideoDecoder::OnInitialize(const GpuVideoDecoderInitParam& param) { // TODO(hclam): Initialize the VideoDecodeContext first. - // TODO(jiesun): codec id should come from |param|. config_.codec = media::kCodecH264; config_.width = param.width; config_.height = param.height; config_.opaque_context = NULL; - decode_engine_->Initialize(NULL, this, config_); + decode_engine_->Initialize(NULL, this, this, config_); } void GpuVideoDecoder::OnUninitialize() { @@ -224,8 +249,6 @@ void GpuVideoDecoder::OnEmptyThisBuffer( void GpuVideoDecoder::OnFillThisBuffer( const GpuVideoDecoderOutputBufferParam& param) { // Switch context before calling to the decode engine. - // TODO(hclam): This is temporary to allow FakeGlVideoDecodeEngine to issue - // GL commands correctly. bool ret = gles2_decoder_->MakeCurrent(); DCHECK(ret) << "Failed to switch context"; @@ -252,19 +275,18 @@ void GpuVideoDecoder::OnVideoFrameAllocated(int32 frame_id, // GpuVideoDevice. The VideoFrame created is added to the internal map. // If we have generated enough VideoFrame, we call |allocation_callack_| to // complete the allocation process. - media::VideoFrame::GlTexture gl_textures[media::VideoFrame::kMaxPlanes]; - memset(gl_textures, 0, sizeof(gl_textures)); for (size_t i = 0; i < textures.size(); ++i) { + media::VideoFrame::GlTexture gl_texture; // Translate the client texture id to service texture id. - bool ret = gles2_decoder_->GetServiceTextureId(textures[i], - gl_textures + i); + bool ret = gles2_decoder_->GetServiceTextureId(textures[i], &gl_texture); DCHECK(ret) << "Cannot translate client texture ID to service ID"; + textures[i] = gl_texture; } scoped_refptr<media::VideoFrame> frame; bool ret = video_device_->CreateVideoFrameFromGlTextures( pending_allocation_->width, pending_allocation_->height, - pending_allocation_->format, gl_textures, &frame); + pending_allocation_->format, textures, &frame); DCHECK(ret) << "Failed to allocation VideoFrame from GL textures)"; pending_allocation_->frames->push_back(frame); diff --git a/chrome/gpu/gpu_video_decoder.h b/chrome/gpu/gpu_video_decoder.h index 9f77700..d4c9b09 100644 --- a/chrome/gpu/gpu_video_decoder.h +++ b/chrome/gpu/gpu_video_decoder.h @@ -39,7 +39,7 @@ class GpuChannel; // In addition to delegating video related commamnds to VideoDecodeEngine it // has the following important functions: // -// Buffer Allocation +// BUFFER ALLOCATION // // VideoDecodeEngine requires platform specific video frame buffer to operate. // In order to abstract the platform specific bits GpuVideoDecoderContext is @@ -66,12 +66,20 @@ class GpuChannel; // VideoFrame(s) from the textures. // 6. GpuVideoDecoder sends the VideoFrame(s) generated to VideoDecodeEngine. // -// Buffer Translation +// BUFFER UPLOADING +// +// A VideoDecodeEngine always produces some device specific buffer. In order to +// use them in Chrome we always upload them to GL textures. The upload step is +// different on each platform and each subsystem. We perform these special +// upload steps by using GpuVideoDevice which are written for each +// VideoDecodeEngine. +// +// BUFFER MAPPING // // GpuVideoDecoder will be working with VideoDecodeEngine, they exchange -// buffers that are only meaningful to VideoDecodeEngine. In order to translate -// that to something we can transport in the IPC channel we need a mapping -// between VideoFrame and buffer ID known between GpuVideoDecoder and +// buffers that are only meaningful to VideoDecodeEngine. In order to map that +// to something we can transport in the IPC channel we need a mapping between +// VideoFrame and buffer ID known between GpuVideoDecoder and // GpuVideoDecoderHost in the Renderer process. // // After texture allocation and VideoFrame allocation are done, GpuVideoDecoder @@ -105,6 +113,9 @@ class GpuVideoDecoder int n, size_t width, size_t height, media::VideoFrame::Format format, std::vector<scoped_refptr<media::VideoFrame> >* frames, Task* task); virtual void ReleaseAllVideoFrames(); + virtual void UploadToVideoFrame(void* buffer, + scoped_refptr<media::VideoFrame> frame, + Task* task); virtual void Destroy(Task* task); // Constructor and destructor. diff --git a/chrome/gpu/media/fake_gl_video_decode_engine.cc b/chrome/gpu/media/fake_gl_video_decode_engine.cc index 52e2dfd..b3d093f 100644 --- a/chrome/gpu/media/fake_gl_video_decode_engine.cc +++ b/chrome/gpu/media/fake_gl_video_decode_engine.cc @@ -4,7 +4,8 @@ #include "chrome/gpu/media/fake_gl_video_decode_engine.h" -#include "app/gfx/gl/gl_bindings.h" +#include "media/base/video_frame.h" +#include "media/video/video_decode_context.h" FakeGlVideoDecodeEngine::FakeGlVideoDecodeEngine() : width_(0), @@ -18,11 +19,33 @@ FakeGlVideoDecodeEngine::~FakeGlVideoDecodeEngine() { void FakeGlVideoDecodeEngine::Initialize( MessageLoop* message_loop, media::VideoDecodeEngine::EventHandler* event_handler, + media::VideoDecodeContext* context, const media::VideoCodecConfig& config) { handler_ = event_handler; + context_ = context; width_ = config.width; height_ = config.height; + // Create an internal VideoFrame that we can write to. This is going to be + // uploaded through VideoDecodeContext. + media::VideoFrame::CreateFrame( + media::VideoFrame::RGBA, width_, height_, base::TimeDelta(), + base::TimeDelta(), &internal_frame_); + memset(internal_frame_->data(media::VideoFrame::kRGBPlane), 0, + height_ * internal_frame_->stride(media::VideoFrame::kRGBPlane)); + + // Use VideoDecodeContext to allocate VideoFrame that can be consumed by + // external body. + context_->AllocateVideoFrames( + 1, width_, height_, media::VideoFrame::RGBA, &external_frames_, + NewRunnableMethod(this, + &FakeGlVideoDecodeEngine::AllocationCompleteTask)); +} + +void FakeGlVideoDecodeEngine::AllocationCompleteTask() { + DCHECK_EQ(1u, external_frames_.size()); + DCHECK_EQ(media::VideoFrame::TYPE_GL_TEXTURE, external_frames_[0]->type()); + media::VideoCodecInfo info; info.success = true; info.provides_buffers = true; @@ -30,9 +53,6 @@ void FakeGlVideoDecodeEngine::Initialize( info.stream_info.surface_type = media::VideoFrame::TYPE_GL_TEXTURE; info.stream_info.surface_width = width_; info.stream_info.surface_height = height_; - - // TODO(hclam): When we have VideoDecodeContext we should use it to allocate - // video frames. handler_->OnInitializeComplete(info); } @@ -62,7 +82,7 @@ void FakeGlVideoDecodeEngine::ProduceVideoFrame( scoped_array<uint8> buffer(new uint8[size]); memset(buffer.get(), 0, size); - uint8* row = buffer.get(); + uint8* row = internal_frame_->data(media::VideoFrame::kRGBPlane); static int seed = 0; for (int y = 0; y < height_; ++y) { @@ -75,14 +95,18 @@ void FakeGlVideoDecodeEngine::ProduceVideoFrame( } ++seed; - // Assume we are in the right context and then upload the content to the - // texture. - glBindTexture(GL_TEXTURE_2D, - frame->gl_texture(media::VideoFrame::kRGBPlane)); - glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width_, height_, 0, GL_RGBA, - GL_UNSIGNED_BYTE, buffer.get()); + // After we have filled the content upload the internal frame to the + // VideoFrame allocated through VideoDecodeContext. + context_->UploadToVideoFrame( + internal_frame_, external_frames_[0], + NewRunnableMethod(this, &FakeGlVideoDecodeEngine::UploadCompleteTask, + external_frames_[0])); +} - // We have done generating data to the frame so give it to the handler. - // TODO(hclam): Advance the timestamp every time we call this method. +void FakeGlVideoDecodeEngine::UploadCompleteTask( + scoped_refptr<media::VideoFrame> frame) { + // |frame| is the upload target. We can immediately send this frame out. handler_->ConsumeVideoFrame(frame); } + +DISABLE_RUNNABLE_METHOD_REFCOUNT(FakeGlVideoDecodeEngine); diff --git a/chrome/gpu/media/fake_gl_video_decode_engine.h b/chrome/gpu/media/fake_gl_video_decode_engine.h index c3eeb3e..164c8c4 100644 --- a/chrome/gpu/media/fake_gl_video_decode_engine.h +++ b/chrome/gpu/media/fake_gl_video_decode_engine.h @@ -5,10 +5,13 @@ #ifndef CHROME_GPU_MEDIA_FAKE_GL_VIDEO_DECODE_ENGINE_H_ #define CHROME_GPU_MEDIA_FAKE_GL_VIDEO_DECODE_ENGINE_H_ +#include <vector> + #include "base/scoped_ptr.h" #include "media/video/video_decode_engine.h" namespace media { +class VideoDecodeContext; class VideoFrame; } // namespace media @@ -20,6 +23,7 @@ class FakeGlVideoDecodeEngine : public media::VideoDecodeEngine { virtual void Initialize( MessageLoop* message_loop, media::VideoDecodeEngine::EventHandler* event_handler, + media::VideoDecodeContext* context, const media::VideoCodecConfig& config); virtual void Uninitialize(); @@ -29,9 +33,25 @@ class FakeGlVideoDecodeEngine : public media::VideoDecodeEngine { virtual void ProduceVideoFrame(scoped_refptr<media::VideoFrame> frame); private: + // This method is called when video frames allocation is completed by + // VideoDecodeContext. + void AllocationCompleteTask(); + + // This method is called by VideoDecodeContext when uploading to a VideoFrame + // has completed. + void UploadCompleteTask(scoped_refptr<media::VideoFrame> frame); + int width_; int height_; media::VideoDecodeEngine::EventHandler* handler_; + media::VideoDecodeContext* context_; + + // Internal video frame that is to be uploaded through VideoDecodeContext. + scoped_refptr<media::VideoFrame> internal_frame_; + + // VideoFrame(s) allocated through VideoDecodeContext. These frames are + // opaque to us. And we need an extra upload step. + std::vector<scoped_refptr<media::VideoFrame> > external_frames_; DISALLOW_COPY_AND_ASSIGN(FakeGlVideoDecodeEngine); }; diff --git a/chrome/gpu/media/fake_gl_video_device.cc b/chrome/gpu/media/fake_gl_video_device.cc new file mode 100644 index 0000000..df1b5b1 --- /dev/null +++ b/chrome/gpu/media/fake_gl_video_device.cc @@ -0,0 +1,60 @@ +// Copyright (c) 2010 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#include "chrome/gpu/media/fake_gl_video_device.h" + +#include "app/gfx/gl/gl_bindings.h" +#include "media/base/video_frame.h" + +void* FakeGlVideoDevice::GetDevice() { + // No actual hardware device should be used. + return NULL; +} + +bool FakeGlVideoDevice::CreateVideoFrameFromGlTextures( + size_t width, size_t height, media::VideoFrame::Format format, + const std::vector<media::VideoFrame::GlTexture>& textures, + scoped_refptr<media::VideoFrame>* frame) { + media::VideoFrame::GlTexture texture_array[media::VideoFrame::kMaxPlanes]; + memset(texture_array, 0, sizeof(texture_array)); + + for (size_t i = 0; i < textures.size(); ++i) { + texture_array[i] = textures[i]; + } + + media::VideoFrame::CreateFrameGlTexture(format, + width, + height, + texture_array, + base::TimeDelta(), + base::TimeDelta(), + frame); + return *frame != NULL; +} + +void FakeGlVideoDevice::ReleaseVideoFrame( + const scoped_refptr<media::VideoFrame>& frame) { + // We didn't need to anything here because we didin't allocate any resources + // for the VideoFrame(s) generated. +} + +bool FakeGlVideoDevice::UploadToVideoFrame( + void* buffer, scoped_refptr<media::VideoFrame> frame) { + // Assume we are in the right context and then upload the content to the + // texture. + glBindTexture(GL_TEXTURE_2D, + frame->gl_texture(media::VideoFrame::kRGBPlane)); + + // |buffer| is also a VideoFrame. + scoped_refptr<media::VideoFrame> frame_to_upload( + reinterpret_cast<media::VideoFrame*>(buffer)); + DCHECK_EQ(frame->width(), frame_to_upload->width()); + DCHECK_EQ(frame->height(), frame_to_upload->height()); + DCHECK_EQ(frame->format(), frame_to_upload->format()); + glTexImage2D( + GL_TEXTURE_2D, 0, GL_RGBA, frame_to_upload->width(), + frame_to_upload->height(), 0, GL_RGBA, + GL_UNSIGNED_BYTE, frame_to_upload->data(media::VideoFrame::kRGBPlane)); + return true; +} diff --git a/chrome/gpu/media/fake_gl_video_device.h b/chrome/gpu/media/fake_gl_video_device.h new file mode 100644 index 0000000..711c3ef --- /dev/null +++ b/chrome/gpu/media/fake_gl_video_device.h @@ -0,0 +1,27 @@ +// Copyright (c) 2010 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef CHROME_GPU_MEDIA_FAKE_GL_VIDEO_DEVICE_H_ +#define CHROME_GPU_MEDIA_FAKE_GL_VIDEO_DEVICE_H_ + +#include "chrome/gpu/media/gpu_video_device.h" + +// A simple GpuVideoDevice that create VideoFrame with GL textures. +// It uploads frames in RGBA format in system memory to the GL texture. +class FakeGlVideoDevice : public GpuVideoDevice { + public: + virtual ~FakeGlVideoDevice() {} + + virtual void* GetDevice(); + virtual bool CreateVideoFrameFromGlTextures( + size_t width, size_t height, media::VideoFrame::Format format, + const std::vector<media::VideoFrame::GlTexture>& textures, + scoped_refptr<media::VideoFrame>* frame); + virtual void ReleaseVideoFrame( + const scoped_refptr<media::VideoFrame>& frame); + virtual bool UploadToVideoFrame(void* buffer, + scoped_refptr<media::VideoFrame> frame); +}; + +#endif // CHROME_GPU_MEDIA_FAKE_GL_VIDEO_DEVICE_H_ diff --git a/chrome/gpu/media/gpu_video_device.h b/chrome/gpu/media/gpu_video_device.h index 7998070..0556903 100644 --- a/chrome/gpu/media/gpu_video_device.h +++ b/chrome/gpu/media/gpu_video_device.h @@ -5,6 +5,8 @@ #ifndef CHROME_GPU_MEDIA_GPU_VIDEO_DEVICE_H_ #define CHROME_GPU_MEDIA_GPU_VIDEO_DEVICE_H_ +#include <vector> + #include "media/base/video_frame.h" #include "media/video/video_decode_context.h" @@ -31,16 +33,25 @@ class GpuVideoDevice { // // VideoFrame generated is used by VideoDecodeEngine for output buffer. // - // |frames| will contain the set of VideoFrame(s) generated. + // |frame| will contain the VideoFrame generated. // // Return true if the operation was successful. virtual bool CreateVideoFrameFromGlTextures( size_t width, size_t height, media::VideoFrame::Format format, - media::VideoFrame::GlTexture const* textures, + const std::vector<media::VideoFrame::GlTexture>& textures, scoped_refptr<media::VideoFrame>* frame) = 0; // Release VideoFrame generated. - virtual void ReleaseVideoFrame(scoped_refptr<media::VideoFrame> frame) = 0; + virtual void ReleaseVideoFrame( + const scoped_refptr<media::VideoFrame>& frame) = 0; + + // Upload a device specific buffer to a VideoFrame object that can be used in + // the GPU process. + // + // Return true if successful. + // TODO(hclam): Rename this to ConvertToVideoFrame(). + virtual bool UploadToVideoFrame(void* buffer, + scoped_refptr<media::VideoFrame> frame) = 0; }; #endif // CHROME_GPU_MEDIA_GPU_VIDEO_DEVICE_H_ diff --git a/chrome/renderer/media/gles2_video_decode_context.cc b/chrome/renderer/media/gles2_video_decode_context.cc index 2c04282..b7eec02 100644 --- a/chrome/renderer/media/gles2_video_decode_context.cc +++ b/chrome/renderer/media/gles2_video_decode_context.cc @@ -29,6 +29,11 @@ void Gles2VideoDecodeContext::ReleaseAllVideoFrames() { // TODO(hclam): Implement. } +void Gles2VideoDecodeContext::UploadToVideoFrame( + void* buffer, scoped_refptr<media::VideoFrame> frame, Task* task) { + // TODO(hclam): Implement. +} + void Gles2VideoDecodeContext::Destroy(Task* task) { // TODO(hclam): Implement. } diff --git a/chrome/renderer/media/gles2_video_decode_context.h b/chrome/renderer/media/gles2_video_decode_context.h index 35958b6..4f556ab 100644 --- a/chrome/renderer/media/gles2_video_decode_context.h +++ b/chrome/renderer/media/gles2_video_decode_context.h @@ -99,7 +99,10 @@ class Gles2VideoDecodeContext : public media::VideoDecodeContext { int n, size_t width, size_t height, media::VideoFrame::Format format, std::vector<scoped_refptr<media::VideoFrame> >* frames, Task* task); virtual void ReleaseAllVideoFrames(); - virtual void Destroy(Task* task) = 0; + virtual void UploadToVideoFrame(void* buffer, + scoped_refptr<media::VideoFrame> frame, + Task* task); + virtual void Destroy(Task* task); //-------------------------------------------------------------------------- // Any thread |