summaryrefslogtreecommitdiffstats
path: root/chrome
diff options
context:
space:
mode:
authorhclam@chromium.org <hclam@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2010-09-29 02:23:31 +0000
committerhclam@chromium.org <hclam@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2010-09-29 02:23:31 +0000
commitf37619e85e12e0692fe106e1f6b3a8ac6267fc74 (patch)
treebc9b9737feaea32c90d3e137c787ce7661b43e77 /chrome
parent91bfaaef281c73e3792a3c96df37920c8a5370a5 (diff)
downloadchromium_src-f37619e85e12e0692fe106e1f6b3a8ac6267fc74.zip
chromium_src-f37619e85e12e0692fe106e1f6b3a8ac6267fc74.tar.gz
chromium_src-f37619e85e12e0692fe106e1f6b3a8ac6267fc74.tar.bz2
Implement video frame exchange in GpuVideoDecoder and tests
Implement ProduceVideoFrame() in GpuVideoDecoder and unit tests for testing the loginc in GpuVideoDecoder. BUG=53714 TEST=unit_tests --gtest_filter=GpuVideoDecoder* Review URL: http://codereview.chromium.org/3414003 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@60902 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'chrome')
-rw-r--r--chrome/chrome_tests.gypi1
-rw-r--r--chrome/common/gpu_messages_internal.h28
-rw-r--r--chrome/common/gpu_video_common.cc29
-rw-r--r--chrome/common/gpu_video_common.h30
-rw-r--r--chrome/gpu/gpu_video_decoder.cc109
-rw-r--r--chrome/gpu/gpu_video_decoder.h31
-rw-r--r--chrome/gpu/gpu_video_decoder_unittest.cc267
-rw-r--r--chrome/gpu/gpu_video_service.cc3
-rw-r--r--chrome/renderer/gpu_video_decoder_host.cc27
-rw-r--r--chrome/renderer/gpu_video_decoder_host.h3
10 files changed, 378 insertions, 150 deletions
diff --git a/chrome/chrome_tests.gypi b/chrome/chrome_tests.gypi
index 3651641..f020871 100644
--- a/chrome/chrome_tests.gypi
+++ b/chrome/chrome_tests.gypi
@@ -1366,6 +1366,7 @@
'common/zip_unittest.cc',
'gpu/gpu_idirect3d9_mock_win.h',
'gpu/gpu_info_unittest_win.cc',
+ 'gpu/gpu_video_decoder_unittest.cc',
'renderer/audio_message_filter_unittest.cc',
'renderer/extensions/extension_api_json_validity_unittest.cc',
'renderer/extensions/extension_renderer_info_unittest.cc',
diff --git a/chrome/common/gpu_messages_internal.h b/chrome/common/gpu_messages_internal.h
index 0121e16..7b2c44a 100644
--- a/chrome/common/gpu_messages_internal.h
+++ b/chrome/common/gpu_messages_internal.h
@@ -297,14 +297,9 @@ IPC_BEGIN_MESSAGES(GpuVideoDecoder)
IPC_MESSAGE_ROUTED1(GpuVideoDecoderMsg_EmptyThisBuffer,
GpuVideoDecoderInputBufferParam)
- // Require output buffer from GpuVideoDecoder.
- IPC_MESSAGE_ROUTED1(GpuVideoDecoderMsg_FillThisBuffer,
- GpuVideoDecoderOutputBufferParam)
-
- // GpuVideoDecoderHost has consumed the output buffer.
- // NOTE: this may only useful for copy back solution
- // where output transfer buffer had to be guarded.
- IPC_MESSAGE_ROUTED0(GpuVideoDecoderMsg_FillThisBufferDoneACK)
+ // Ask the GPU process to produce a video frame with the ID.
+ IPC_MESSAGE_ROUTED1(GpuVideoDecoderMsg_ProduceVideoFrame,
+ int32) /* Video Frame ID */
// Sent from Renderer process to the GPU process to notify that textures are
// generated for a video frame.
@@ -334,16 +329,19 @@ IPC_BEGIN_MESSAGES(GpuVideoDecoderHost)
// GpuVideoDecoder require new input buffer.
IPC_MESSAGE_ROUTED0(GpuVideoDecoderHostMsg_EmptyThisBufferDone)
- // GpuVideoDecoder report output buffer ready.
- IPC_MESSAGE_ROUTED1(GpuVideoDecoderHostMsg_FillThisBufferDone,
- GpuVideoDecoderOutputBufferParam)
+ // GpuVideoDecoder reports that a video frame is ready to be consumed.
+ IPC_MESSAGE_ROUTED4(GpuVideoDecoderHostMsg_ConsumeVideoFrame,
+ int32, /* Video Frame ID */
+ int64, /* Timestamp in ms */
+ int64, /* Duration in ms */
+ int32) /* Flags */
// Allocate video frames for output of the hardware video decoder.
IPC_MESSAGE_ROUTED4(GpuVideoDecoderHostMsg_AllocateVideoFrames,
- int32, /* Numer of video frames to generate */
- int32, /* Width of the video frame */
- int32, /* Height of the video frame */
- media::VideoFrame::Format /* Format of the video frame */)
+ int32, /* Numer of video frames to generate */
+ uint32, /* Width of the video frame */
+ uint32, /* Height of the video frame */
+ int32 /* Format of the video frame */)
// Release all video frames allocated for a hardware video decoder.
IPC_MESSAGE_ROUTED0(GpuVideoDecoderHostMsg_ReleaseAllVideoFrames)
diff --git a/chrome/common/gpu_video_common.cc b/chrome/common/gpu_video_common.cc
index 5c2e9257..556e385 100644
--- a/chrome/common/gpu_video_common.cc
+++ b/chrome/common/gpu_video_common.cc
@@ -131,35 +131,6 @@ void ParamTraits<GpuVideoDecoderInputBufferParam>::Log(
///////////////////////////////////////////////////////////////////////////////
-void ParamTraits<GpuVideoDecoderOutputBufferParam>::Write(
- Message* m, const GpuVideoDecoderOutputBufferParam& p) {
- WriteParam(m, p.frame_id);
- WriteParam(m, p.timestamp);
- WriteParam(m, p.duration);
- WriteParam(m, p.flags);
-}
-
-bool ParamTraits<GpuVideoDecoderOutputBufferParam>::Read(
- const Message* m, void** iter, GpuVideoDecoderOutputBufferParam* r) {
- if (!ReadParam(m, iter, &r->frame_id) ||
- !ReadParam(m, iter, &r->timestamp) ||
- !ReadParam(m, iter, &r->duration) ||
- !ReadParam(m, iter, &r->flags))
- return false;
- return true;
-}
-
-void ParamTraits<GpuVideoDecoderOutputBufferParam>::Log(
- const GpuVideoDecoderOutputBufferParam& p, std::string* l) {
- l->append(StringPrintf("(%d %d %d %x)",
- p.frame_id,
- static_cast<int>(p.timestamp),
- static_cast<int>(p.duration),
- p.flags));
-}
-
-///////////////////////////////////////////////////////////////////////////////
-
void ParamTraits<GpuVideoDecoderErrorInfoParam>::Write(
Message* m, const GpuVideoDecoderErrorInfoParam& p) {
WriteParam(m, p.error_id);
diff --git a/chrome/common/gpu_video_common.h b/chrome/common/gpu_video_common.h
index 8102f3d..30a94be 100644
--- a/chrome/common/gpu_video_common.h
+++ b/chrome/common/gpu_video_common.h
@@ -10,6 +10,12 @@
#include "chrome/common/common_param_traits.h"
#include "media/base/video_frame.h"
+// Flags assigned to a video buffer for both input and output.
+enum GpuVideoBufferFlag {
+ kGpuVideoEndOfStream = 1 << 0,
+ kGpuVideoDiscontinuous = 1 << 1,
+};
+
struct GpuVideoServiceInfoParam {
// route id for GpuVideoService on GPU process side for this channel.
int32 video_service_route_id;
@@ -56,21 +62,7 @@ struct GpuVideoDecoderInputBufferParam {
int64 timestamp; // In unit of microseconds.
int32 offset;
int32 size;
- int32 flags; // miscellaneous flag bit mask
-};
-
-// A message that contains formation of a video frame that is ready to be
-// rendered by the Renderer process.
-struct GpuVideoDecoderOutputBufferParam {
- int32 frame_id; // ID of the video frame that is ready to be rendered.
- int64 timestamp; // In unit of microseconds.
- int64 duration; // In unit of microseconds.
- int32 flags; // miscellaneous flag bit mask
-
- enum {
- kFlagsEndOfStream = 0x00000001,
- kFlagsDiscontinuous = 0x00000002,
- };
+ int32 flags; // Miscellaneous flag bit mask.
};
struct GpuVideoDecoderErrorInfoParam {
@@ -126,14 +118,6 @@ struct ParamTraits<GpuVideoDecoderInputBufferParam> {
};
template <>
-struct ParamTraits<GpuVideoDecoderOutputBufferParam> {
- typedef GpuVideoDecoderOutputBufferParam param_type;
- static void Write(Message* m, const param_type& p);
- static bool Read(const Message* m, void** iter, param_type* r);
- static void Log(const param_type& p, std::string* l);
-};
-
-template <>
struct ParamTraits<GpuVideoDecoderErrorInfoParam> {
typedef GpuVideoDecoderErrorInfoParam param_type;
static void Write(Message* m, const param_type& p);
diff --git a/chrome/gpu/gpu_video_decoder.cc b/chrome/gpu/gpu_video_decoder.cc
index 91758e0..71ce991 100644
--- a/chrome/gpu/gpu_video_decoder.cc
+++ b/chrome/gpu/gpu_video_decoder.cc
@@ -28,10 +28,10 @@ void GpuVideoDecoder::OnMessageReceived(const IPC::Message& msg) {
OnFlush)
IPC_MESSAGE_HANDLER(GpuVideoDecoderMsg_EmptyThisBuffer,
OnEmptyThisBuffer)
- IPC_MESSAGE_HANDLER(GpuVideoDecoderMsg_FillThisBuffer,
- OnFillThisBuffer)
- IPC_MESSAGE_HANDLER(GpuVideoDecoderMsg_FillThisBufferDoneACK,
- OnFillThisBufferDoneACK)
+ IPC_MESSAGE_HANDLER(GpuVideoDecoderMsg_ProduceVideoFrame,
+ OnProduceVideoFrame)
+ IPC_MESSAGE_HANDLER(GpuVideoDecoderMsg_VideoFrameAllocated,
+ OnVideoFrameAllocated)
IPC_MESSAGE_UNHANDLED_ERROR()
IPC_END_MESSAGE_MAP()
}
@@ -103,12 +103,19 @@ void GpuVideoDecoder::ProduceVideoSample(scoped_refptr<Buffer> buffer) {
}
void GpuVideoDecoder::ConsumeVideoFrame(scoped_refptr<VideoFrame> frame) {
- GpuVideoDecoderOutputBufferParam output_param;
- output_param.timestamp = frame->GetTimestamp().InMicroseconds();
- output_param.duration = frame->GetDuration().InMicroseconds();
- output_param.flags = frame->IsEndOfStream() ?
- GpuVideoDecoderOutputBufferParam::kFlagsEndOfStream : 0;
- SendFillBufferDone(output_param);
+ int32 frame_id = -1;
+ for (VideoFrameMap::iterator i = video_frame_map_.begin();
+ i != video_frame_map_.end(); ++i) {
+ if (i->second == frame) {
+ frame_id = i->first;
+ break;
+ }
+ }
+ DCHECK_NE(-1, frame_id) << "VideoFrame not recognized";
+
+ SendConsumeVideoFrame(frame_id, frame->GetTimestamp().InMicroseconds(),
+ frame->GetDuration().InMicroseconds(),
+ frame->IsEndOfStream() ? kGpuVideoEndOfStream : 0);
}
void* GpuVideoDecoder::GetDevice() {
@@ -192,14 +199,23 @@ void GpuVideoDecoder::Destroy(Task* task) {
// TODO(hclam): I still need to think what I should do here.
}
+void GpuVideoDecoder::SetVideoDecodeEngine(media::VideoDecodeEngine* engine) {
+ decode_engine_.reset(engine);
+}
+
+void GpuVideoDecoder::SetGpuVideoDevice(GpuVideoDevice* device) {
+ video_device_.reset(device);
+}
+
GpuVideoDecoder::GpuVideoDecoder(
+ MessageLoop* message_loop,
const GpuVideoDecoderInfoParam* param,
- GpuChannel* channel,
+ IPC::Message::Sender* sender,
base::ProcessHandle handle,
gpu::gles2::GLES2Decoder* decoder)
- : decoder_host_route_id_(param->decoder_host_route_id),
- pending_output_requests_(0),
- channel_(channel),
+ : message_loop_(message_loop),
+ decoder_host_route_id_(param->decoder_host_route_id),
+ sender_(sender),
renderer_handle_(handle),
gles2_decoder_(decoder) {
memset(&config_, 0, sizeof(config_));
@@ -212,7 +228,6 @@ GpuVideoDecoder::GpuVideoDecoder(
}
void GpuVideoDecoder::OnInitialize(const GpuVideoDecoderInitParam& param) {
- // TODO(hclam): Initialize the VideoDecodeContext first.
// TODO(jiesun): codec id should come from |param|.
config_.codec = media::kCodecH264;
config_.width = param.width;
@@ -226,8 +241,6 @@ void GpuVideoDecoder::OnUninitialize() {
}
void GpuVideoDecoder::OnFlush() {
- pending_output_requests_ = 0;
-
decode_engine_->Flush();
}
@@ -243,32 +256,25 @@ void GpuVideoDecoder::OnEmptyThisBuffer(
memcpy(dst, src, buffer.size);
SendEmptyBufferACK();
+ // Delegate the method call to VideoDecodeEngine.
decode_engine_->ConsumeVideoSample(input_buffer);
}
-void GpuVideoDecoder::OnFillThisBuffer(
- const GpuVideoDecoderOutputBufferParam& param) {
- // Switch context before calling to the decode engine.
- bool ret = gles2_decoder_->MakeCurrent();
- DCHECK(ret) << "Failed to switch context";
-
- if (info_.stream_info.surface_type == VideoFrame::TYPE_SYSTEM_MEMORY) {
- pending_output_requests_++;
- } else {
+void GpuVideoDecoder::OnProduceVideoFrame(int32 frame_id) {
+ VideoFrameMap::iterator i = video_frame_map_.find(frame_id);
+ if (i == video_frame_map_.end()) {
+ NOTREACHED() << "Received a request of unknown frame ID.";
}
-}
-void GpuVideoDecoder::OnFillThisBufferDoneACK() {
- if (info_.stream_info.surface_type == VideoFrame::TYPE_SYSTEM_MEMORY) {
- pending_output_requests_--;
- if (pending_output_requests_) {
- decode_engine_->ProduceVideoFrame(frame_);
- }
- }
+ // Delegate the method call to VideoDecodeEngine.
+ decode_engine_->ProduceVideoFrame(i->second);
}
void GpuVideoDecoder::OnVideoFrameAllocated(int32 frame_id,
std::vector<uint32> textures) {
+ bool ret = gles2_decoder_->MakeCurrent();
+ DCHECK(ret) << "Failed to switch context";
+
// This method is called in response to a video frame allocation request sent
// to the Renderer process.
// We should use the textures to generate a VideoFrame by using
@@ -278,13 +284,15 @@ void GpuVideoDecoder::OnVideoFrameAllocated(int32 frame_id,
for (size_t i = 0; i < textures.size(); ++i) {
media::VideoFrame::GlTexture gl_texture;
// Translate the client texture id to service texture id.
- bool ret = gles2_decoder_->GetServiceTextureId(textures[i], &gl_texture);
+ ret = gles2_decoder_->GetServiceTextureId(textures[i], &gl_texture);
DCHECK(ret) << "Cannot translate client texture ID to service ID";
textures[i] = gl_texture;
}
+ // Use GpuVideoDevice to allocate VideoFrame objects.
scoped_refptr<media::VideoFrame> frame;
- bool ret = video_device_->CreateVideoFrameFromGlTextures(
+
+ ret = video_device_->CreateVideoFrameFromGlTextures(
pending_allocation_->width, pending_allocation_->height,
pending_allocation_->format, textures, &frame);
@@ -301,53 +309,58 @@ void GpuVideoDecoder::OnVideoFrameAllocated(int32 frame_id,
void GpuVideoDecoder::SendInitializeDone(
const GpuVideoDecoderInitDoneParam& param) {
- if (!channel_->Send(
+ if (!sender_->Send(
new GpuVideoDecoderHostMsg_InitializeACK(route_id(), param))) {
LOG(ERROR) << "GpuVideoDecoderMsg_InitializeACK failed";
}
}
void GpuVideoDecoder::SendUninitializeDone() {
- if (!channel_->Send(new GpuVideoDecoderHostMsg_DestroyACK(route_id()))) {
+ if (!sender_->Send(new GpuVideoDecoderHostMsg_DestroyACK(route_id()))) {
LOG(ERROR) << "GpuVideoDecoderMsg_DestroyACK failed";
}
}
void GpuVideoDecoder::SendFlushDone() {
- if (!channel_->Send(new GpuVideoDecoderHostMsg_FlushACK(route_id()))) {
+ if (!sender_->Send(new GpuVideoDecoderHostMsg_FlushACK(route_id()))) {
LOG(ERROR) << "GpuVideoDecoderMsg_FlushACK failed";
}
}
void GpuVideoDecoder::SendEmptyBufferDone() {
- if (!channel_->Send(
+ if (!sender_->Send(
new GpuVideoDecoderHostMsg_EmptyThisBufferDone(route_id()))) {
LOG(ERROR) << "GpuVideoDecoderMsg_EmptyThisBufferDone failed";
}
}
void GpuVideoDecoder::SendEmptyBufferACK() {
- if (!channel_->Send(
+ if (!sender_->Send(
new GpuVideoDecoderHostMsg_EmptyThisBufferACK(route_id()))) {
LOG(ERROR) << "GpuVideoDecoderMsg_EmptyThisBufferACK failed";
}
}
-void GpuVideoDecoder::SendFillBufferDone(
- const GpuVideoDecoderOutputBufferParam& param) {
- if (!channel_->Send(
- new GpuVideoDecoderHostMsg_FillThisBufferDone(route_id(), param))) {
- LOG(ERROR) << "GpuVideoDecoderMsg_FillThisBufferDone failed";
+void GpuVideoDecoder::SendConsumeVideoFrame(
+ int32 frame_id, int64 timestamp, int64 duration, int32 flags) {
+ if (!sender_->Send(
+ new GpuVideoDecoderHostMsg_ConsumeVideoFrame(
+ route_id(), frame_id, timestamp, duration, flags))) {
+ LOG(ERROR) << "GpuVideoDecodeHostMsg_ConsumeVideoFrame failed.";
}
}
void GpuVideoDecoder::SendAllocateVideoFrames(
int n, size_t width, size_t height, media::VideoFrame::Format format) {
- // TODO(hclam): Actually send the message.
+ if (!sender_->Send(
+ new GpuVideoDecoderHostMsg_AllocateVideoFrames(
+ route_id(), n, width, height, static_cast<int32>(format)))) {
+ LOG(ERROR) << "GpuVideoDecoderMsg_AllocateVideoFrames failed";
+ }
}
void GpuVideoDecoder::SendReleaseAllVideoFrames() {
- if (!channel_->Send(
+ if (!sender_->Send(
new GpuVideoDecoderHostMsg_ReleaseAllVideoFrames(route_id()))) {
LOG(ERROR) << "GpuVideoDecoderMsg_ReleaseAllVideoFrames failed";
}
diff --git a/chrome/gpu/gpu_video_decoder.h b/chrome/gpu/gpu_video_decoder.h
index d4c9b09..f975eb8 100644
--- a/chrome/gpu/gpu_video_decoder.h
+++ b/chrome/gpu/gpu_video_decoder.h
@@ -90,8 +90,15 @@ class GpuVideoDecoder
public IPC::Channel::Listener,
public media::VideoDecodeEngine::EventHandler,
public media::VideoDecodeContext {
-
public:
+ // Constructor and destructor.
+ GpuVideoDecoder(MessageLoop* message_loop,
+ const GpuVideoDecoderInfoParam* param,
+ IPC::Message::Sender* sender,
+ base::ProcessHandle handle,
+ gpu::gles2::GLES2Decoder* decoder);
+ virtual ~GpuVideoDecoder() {}
+
// IPC::Channel::Listener implementation.
virtual void OnChannelConnected(int32 peer_pid);
virtual void OnChannelError();
@@ -118,12 +125,9 @@ class GpuVideoDecoder
Task* task);
virtual void Destroy(Task* task);
- // Constructor and destructor.
- GpuVideoDecoder(const GpuVideoDecoderInfoParam* param,
- GpuChannel* channel_,
- base::ProcessHandle handle,
- gpu::gles2::GLES2Decoder* decoder);
- virtual ~GpuVideoDecoder() {}
+ // These methods are used in unit test only.
+ void SetVideoDecodeEngine(media::VideoDecodeEngine* engine);
+ void SetGpuVideoDevice(GpuVideoDevice* device);
private:
struct PendingAllocation {
@@ -146,8 +150,7 @@ class GpuVideoDecoder
void OnUninitialize();
void OnFlush();
void OnEmptyThisBuffer(const GpuVideoDecoderInputBufferParam& buffer);
- void OnFillThisBuffer(const GpuVideoDecoderOutputBufferParam& param);
- void OnFillThisBufferDoneACK();
+ void OnProduceVideoFrame(int32 frame_id);
void OnVideoFrameAllocated(int32 frame_id, std::vector<uint32> textures);
// Helper methods for sending messages to the Renderer process.
@@ -156,18 +159,21 @@ class GpuVideoDecoder
void SendFlushDone();
void SendEmptyBufferDone();
void SendEmptyBufferACK();
- void SendFillBufferDone(const GpuVideoDecoderOutputBufferParam& param);
+ void SendConsumeVideoFrame(int32 frame_id, int64 timestamp, int64 duration,
+ int32 flags);
void SendAllocateVideoFrames(
int n, size_t width, size_t height, media::VideoFrame::Format format);
void SendReleaseAllVideoFrames();
+ // The message loop that this object should run on.
+ MessageLoop* message_loop_;
+
int32 decoder_host_route_id_;
// Used only in system memory path. i.e. Remove this later.
scoped_refptr<VideoFrame> frame_;
- int32 pending_output_requests_;
- GpuChannel* channel_;
+ IPC::Message::Sender* sender_;
base::ProcessHandle renderer_handle_;
// The GLES2 decoder has the context associated with this decoder. This object
@@ -189,6 +195,7 @@ class GpuVideoDecoder
// Contains the mapping between a |frame_id| and VideoFrame generated by
// GpuVideoDevice from the associated GL textures.
+ // TODO(hclam): Using a faster data structure than map.
typedef std::map<int32, scoped_refptr<media::VideoFrame> > VideoFrameMap;
VideoFrameMap video_frame_map_;
diff --git a/chrome/gpu/gpu_video_decoder_unittest.cc b/chrome/gpu/gpu_video_decoder_unittest.cc
new file mode 100644
index 0000000..4548807
--- /dev/null
+++ b/chrome/gpu/gpu_video_decoder_unittest.cc
@@ -0,0 +1,267 @@
+// Copyright (c) 2010 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "base/process.h"
+#include "chrome/common/gpu_messages.h"
+#include "chrome/gpu/gpu_video_decoder.h"
+#include "gpu/command_buffer/service/context_group.h"
+#include "gpu/command_buffer/service/gles2_cmd_decoder_mock.h"
+#include "ipc/ipc_message_utils.h"
+#include "media/video/mock_objects.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+using testing::_;
+using testing::DoAll;
+using testing::NotNull;
+using testing::Return;
+using testing::SetArgumentPointee;
+
+static const int32 kFrameId = 10;
+static const media::VideoFrame::GlTexture kClientTexture = 101;
+static const media::VideoFrame::GlTexture kServiceTexture = 102;
+static const size_t kWidth = 320;
+static const size_t kHeight = 240;
+
+class MockGpuVideoDevice : public GpuVideoDevice {
+ public:
+ MockGpuVideoDevice() {}
+ virtual ~MockGpuVideoDevice() {}
+
+ MOCK_METHOD0(GetDevice, void*());
+ MOCK_METHOD5(CreateVideoFrameFromGlTextures,
+ bool(size_t, size_t, media::VideoFrame::Format,
+ const std::vector<media::VideoFrame::GlTexture>&,
+ scoped_refptr<media::VideoFrame>*));
+ MOCK_METHOD1(ReleaseVideoFrame,
+ void(const scoped_refptr<media::VideoFrame>& frame));
+ MOCK_METHOD2(UploadToVideoFrame,
+ bool(void* buffer, scoped_refptr<media::VideoFrame> frame));
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(MockGpuVideoDevice);
+};
+
+ACTION_P(InitializationDone, handler) {
+ media::VideoCodecInfo info;
+ info.success = true;
+ info.provides_buffers = false;
+ info.stream_info.surface_format = media::VideoFrame::RGBA;
+ info.stream_info.surface_type = media::VideoFrame::TYPE_SYSTEM_MEMORY;
+ info.stream_info.surface_width = kWidth;
+ info.stream_info.surface_height = kHeight;
+ handler->OnInitializeComplete(info);
+}
+
+ACTION_P(SendVideoFrameAllocated, handler) {
+ std::vector<media::VideoFrame::GlTexture> textures;
+ textures.push_back(kClientTexture);
+ GpuVideoDecoderMsg_VideoFrameAllocated msg(0, kFrameId, textures);
+ handler->OnMessageReceived(msg);
+}
+
+ACTION_P2(SendConsumeVideoFrame, handler, frame) {
+ handler->ConsumeVideoFrame(frame);
+}
+
+class GpuVideoDecoderTest : public testing::Test,
+ public IPC::Message::Sender {
+ public:
+ GpuVideoDecoderTest() {
+ // Create the mock objects.
+ gles2_decoder_.reset(new gpu::gles2::MockGLES2Decoder(&group_));
+
+ // Initialize GpuVideoDecoder with the default params.
+ GpuVideoDecoderInfoParam param;
+ memset(&param, 0, sizeof(param));
+ gpu_video_decoder_ = new GpuVideoDecoder(
+ &message_loop_, &param, this, base::kNullProcessHandle,
+ gles2_decoder_.get());
+
+ // Create the mock objects.
+ mock_engine_ = new media::MockVideoDecodeEngine();
+ mock_device_ = new MockGpuVideoDevice();
+
+ // Inject the mock objects.
+ gpu_video_decoder_->SetVideoDecodeEngine(mock_engine_);
+ gpu_video_decoder_->SetGpuVideoDevice(mock_device_);
+
+ // VideoFrame for GpuVideoDevice.
+ media::VideoFrame::GlTexture textures[] = { kServiceTexture, 0, 0 };
+ media::VideoFrame::CreateFrameGlTexture(media::VideoFrame::RGBA,
+ kWidth, kHeight, textures,
+ base::TimeDelta(),
+ base::TimeDelta(),
+ &device_frame_);
+ }
+
+ ~GpuVideoDecoderTest() {
+ gpu_video_decoder_->SetVideoDecodeEngine(NULL);
+ gpu_video_decoder_->SetGpuVideoDevice(NULL);
+ }
+
+ // This method is used to dispatch IPC messages to mock methods.
+ virtual bool Send(IPC::Message* msg) {
+ EXPECT_TRUE(msg);
+ if (!msg)
+ return false;
+
+ bool handled = true;
+ IPC_BEGIN_MESSAGE_MAP(GpuVideoDecoderTest, *msg)
+ IPC_MESSAGE_HANDLER(GpuVideoDecoderHostMsg_InitializeACK,
+ OnInitializeDone)
+ IPC_MESSAGE_HANDLER(GpuVideoDecoderHostMsg_DestroyACK,
+ OnUninitializeDone)
+ IPC_MESSAGE_HANDLER(GpuVideoDecoderHostMsg_FlushACK,
+ OnFlushDone)
+ IPC_MESSAGE_HANDLER(GpuVideoDecoderHostMsg_EmptyThisBufferACK,
+ OnEmptyThisBufferACK)
+ IPC_MESSAGE_HANDLER(GpuVideoDecoderHostMsg_EmptyThisBufferDone,
+ OnEmptyThisBufferDone)
+ IPC_MESSAGE_HANDLER(GpuVideoDecoderHostMsg_AllocateVideoFrames,
+ OnAllocateVideoFrames)
+ IPC_MESSAGE_HANDLER(GpuVideoDecoderHostMsg_ReleaseAllVideoFrames,
+ OnReleaseAllVideoFrames)
+ IPC_MESSAGE_HANDLER(GpuVideoDecoderHostMsg_ConsumeVideoFrame,
+ OnConsumeVideoFrame)
+ IPC_MESSAGE_UNHANDLED_ERROR()
+ IPC_END_MESSAGE_MAP()
+ EXPECT_TRUE(handled);
+ delete msg;
+ return true;
+ }
+
+ // Mock methods for handling output IPC messages.
+ MOCK_METHOD1(OnInitializeDone,
+ void(const GpuVideoDecoderInitDoneParam& param));
+ MOCK_METHOD0(OnUninitializeDone, void());
+ MOCK_METHOD0(OnFlushDone, void());
+ MOCK_METHOD0(OnEmptyThisBufferDone, void());
+ MOCK_METHOD4(OnConsumeVideoFrame, void(int32 device_frame_id, int64 timestamp,
+ int64 duration, int32 flags));
+ MOCK_METHOD0(OnEmptyThisBufferACK, void());
+ MOCK_METHOD4(OnAllocateVideoFrames, void(int32 n, uint32 width,
+ uint32 height, int32 format));
+ MOCK_METHOD0(OnReleaseAllVideoFrames, void());
+
+ // Receive events from GpuVideoDecoder.
+ MOCK_METHOD0(VideoFramesAllocated, void());
+
+ void Initialize() {
+ // VideoDecodeEngine is called.
+ EXPECT_CALL(*mock_engine_, Initialize(_, _, _, _))
+ .WillOnce(InitializationDone(gpu_video_decoder_));
+
+ // Expect that initialization is completed.
+ EXPECT_CALL(*this, OnInitializeDone(_));
+
+ // Send an initialiaze message to GpuVideoDecoder.
+ GpuVideoDecoderInitParam param;
+ param.width = kWidth;
+ param.height = kHeight;
+
+ GpuVideoDecoderMsg_Initialize msg(0, param);
+ gpu_video_decoder_->OnMessageReceived(msg);
+ }
+
+ void AllocateVideoFrames() {
+ // Expect that IPC messages are sent. We'll reply with some GL textures.
+ EXPECT_CALL(*this, OnAllocateVideoFrames(
+ 1, kWidth, kHeight, static_cast<int32>(media::VideoFrame::RGBA)))
+ .WillOnce(SendVideoFrameAllocated(gpu_video_decoder_));
+
+ // Expect that MakeCurrent() is called.
+ EXPECT_CALL(*gles2_decoder_.get(), MakeCurrent())
+ .WillOnce(Return(true))
+ .RetiresOnSaturation();
+
+ // Expect that translate method is called.
+ EXPECT_CALL(*gles2_decoder_.get(),
+ GetServiceTextureId(kClientTexture, NotNull()))
+ .WillOnce(DoAll(SetArgumentPointee<1>(kServiceTexture), Return(true)));
+
+ // And then GpuVideoDevice is called to create VideoFrame from GL textures.
+ EXPECT_CALL(*mock_device_,
+ CreateVideoFrameFromGlTextures(kWidth, kHeight,
+ media::VideoFrame::RGBA, _,
+ NotNull()))
+ .WillOnce(DoAll(SetArgumentPointee<4>(device_frame_), Return(true)));
+
+ // Finally the task is called.
+ EXPECT_CALL(*this, VideoFramesAllocated());
+
+ // Pretend calling GpuVideoDecoder for allocating frames.
+ gpu_video_decoder_->AllocateVideoFrames(
+ 1, kWidth, kHeight, media::VideoFrame::RGBA, &decoder_frames_,
+ NewRunnableMethod(this, &GpuVideoDecoderTest::VideoFramesAllocated));
+ }
+
+ void ReleaseVideoFrames() {
+ // Expect that MakeCurrent() is called.
+ EXPECT_CALL(*gles2_decoder_.get(), MakeCurrent())
+ .WillOnce(Return(true))
+ .RetiresOnSaturation();
+
+ // Expect that video frame is released.
+ EXPECT_CALL(*mock_device_, ReleaseVideoFrame(device_frame_));
+
+ // Expect that IPC message is send to release video frame.
+ EXPECT_CALL(*this, OnReleaseAllVideoFrames());
+
+ // Call to GpuVideoDecoder to release all video frames.
+ gpu_video_decoder_->ReleaseAllVideoFrames();
+ }
+
+ void BufferExchange() {
+ // Expect that we call to produce video frame.
+ EXPECT_CALL(*mock_engine_, ProduceVideoFrame(device_frame_))
+ .WillOnce(SendConsumeVideoFrame(gpu_video_decoder_, device_frame_))
+ .RetiresOnSaturation();
+
+ // Expect that consume video frame is called.
+ EXPECT_CALL(*this, OnConsumeVideoFrame(kFrameId, 0, 0, 0))
+ .RetiresOnSaturation();
+
+ // Ask the GpuVideoDecoder to produce a video frame.
+ GpuVideoDecoderMsg_ProduceVideoFrame msg(0, kFrameId);
+ gpu_video_decoder_->OnMessageReceived(msg);
+ }
+
+ private:
+ scoped_refptr<GpuVideoDecoder> gpu_video_decoder_;
+ MockGpuVideoDevice* mock_device_;
+ media::MockVideoDecodeEngine* mock_engine_;
+ gpu::gles2::ContextGroup group_;
+ scoped_ptr<gpu::gles2::MockGLES2Decoder> gles2_decoder_;
+ std::vector<scoped_refptr<media::VideoFrame> > decoder_frames_;
+ scoped_refptr<media::VideoFrame> device_frame_;
+
+ MessageLoop message_loop_;
+
+ DISALLOW_COPY_AND_ASSIGN(GpuVideoDecoderTest);
+};
+
+TEST_F(GpuVideoDecoderTest, Initialize) {
+ Initialize();
+}
+
+TEST_F(GpuVideoDecoderTest, AllocateVideoFrames) {
+ Initialize();
+ AllocateVideoFrames();
+}
+
+TEST_F(GpuVideoDecoderTest, ReleaseVideoFrames) {
+ Initialize();
+ AllocateVideoFrames();
+ ReleaseVideoFrames();
+}
+
+TEST_F(GpuVideoDecoderTest, BufferExchange) {
+ Initialize();
+ AllocateVideoFrames();
+ BufferExchange();
+ BufferExchange();
+ ReleaseVideoFrames();
+}
+
+DISABLE_RUNNABLE_METHOD_REFCOUNT(GpuVideoDecoderTest);
diff --git a/chrome/gpu/gpu_video_service.cc b/chrome/gpu/gpu_video_service.cc
index 8d1ca4e..6ff2e81 100644
--- a/chrome/gpu/gpu_video_service.cc
+++ b/chrome/gpu/gpu_video_service.cc
@@ -49,7 +49,8 @@ bool GpuVideoService::CreateVideoDecoder(
int32 decoder_id = GetNextAvailableDecoderID();
param->decoder_id = decoder_id;
base::ProcessHandle handle = channel->renderer_handle();
- decoder_info.decoder_ = new GpuVideoDecoder(param, channel, handle,
+ decoder_info.decoder_ = new GpuVideoDecoder(MessageLoop::current(),
+ param, channel, handle,
gles2_decoder);
decoder_info.channel_ = channel;
decoder_info.param = *param;
diff --git a/chrome/renderer/gpu_video_decoder_host.cc b/chrome/renderer/gpu_video_decoder_host.cc
index 771c38a..130aa32 100644
--- a/chrome/renderer/gpu_video_decoder_host.cc
+++ b/chrome/renderer/gpu_video_decoder_host.cc
@@ -38,8 +38,6 @@ void GpuVideoDecoderHost::OnMessageReceived(const IPC::Message& msg) {
OnEmptyThisBufferACK)
IPC_MESSAGE_HANDLER(GpuVideoDecoderHostMsg_EmptyThisBufferDone,
OnEmptyThisBufferDone)
- IPC_MESSAGE_HANDLER(GpuVideoDecoderHostMsg_FillThisBufferDone,
- OnFillThisBufferDone)
IPC_MESSAGE_UNHANDLED_ERROR()
IPC_END_MESSAGE_MAP()
}
@@ -109,13 +107,6 @@ void GpuVideoDecoderHost::FillThisBuffer(scoped_refptr<VideoFrame> frame) {
// TODO(hclam): We should keep an IDMap to convert between a frame a buffer
// ID so that we can signal GpuVideoDecoder in GPU process to use the buffer.
// This eliminates one conversion step.
- // TODO(hclam): Fill the param.
- GpuVideoDecoderOutputBufferParam param;
-
- if (!channel_host_ || !channel_host_->Send(
- new GpuVideoDecoderMsg_FillThisBuffer(route_id(), param))) {
- LOG(ERROR) << "GpuVideoDecoderMsg_FillThisBuffer failed";
- }
}
bool GpuVideoDecoderHost::Flush() {
@@ -172,30 +163,24 @@ void GpuVideoDecoderHost::OnEmptyThisBufferDone() {
event_handler_->OnEmptyBufferDone(buffer);
}
-void GpuVideoDecoderHost::OnFillThisBufferDone(
- const GpuVideoDecoderOutputBufferParam& param) {
+void GpuVideoDecoderHost::OnConsumeVideoFrame(int32 frame_id, int64 timestamp,
+ int64 duration, int32 flags) {
scoped_refptr<VideoFrame> frame;
- if (param.flags & GpuVideoDecoderOutputBufferParam::kFlagsEndOfStream) {
+ if (flags & kGpuVideoEndOfStream) {
VideoFrame::CreateEmptyFrame(&frame);
} else {
- // TODO(hclam): The logic in buffer allocation is pretty much around
- // using shared memory for output buffer which needs to be adjusted.
- // Fake the texture ID until we implement it properly.
+ // TODO(hclam): Use |frame_id| to find the VideoFrame.
VideoFrame::GlTexture textures[3] = { 0, 0, 0 };
media::VideoFrame::CreateFrameGlTexture(
media::VideoFrame::RGBA, init_param_.width, init_param_.height,
textures,
- base::TimeDelta::FromMicroseconds(param.timestamp),
- base::TimeDelta::FromMicroseconds(param.duration),
+ base::TimeDelta::FromMicroseconds(timestamp),
+ base::TimeDelta::FromMicroseconds(duration),
&frame);
}
event_handler_->OnFillBufferDone(frame);
- if (!channel_host_ || !channel_host_->Send(
- new GpuVideoDecoderMsg_FillThisBufferDoneACK(route_id()))) {
- LOG(ERROR) << "GpuVideoDecoderMsg_FillThisBufferDoneACK failed";
- }
}
void GpuVideoDecoderHost::OnEmptyThisBufferACK() {
diff --git a/chrome/renderer/gpu_video_decoder_host.h b/chrome/renderer/gpu_video_decoder_host.h
index 1255bf5..cfcdf88 100644
--- a/chrome/renderer/gpu_video_decoder_host.h
+++ b/chrome/renderer/gpu_video_decoder_host.h
@@ -70,7 +70,8 @@ class GpuVideoDecoderHost
void OnUninitializeDone();
void OnFlushDone();
void OnEmptyThisBufferDone();
- void OnFillThisBufferDone(const GpuVideoDecoderOutputBufferParam& param);
+ void OnConsumeVideoFrame(int32 frame_id, int64 timestamp,
+ int64 duration, int32 flags);
void OnEmptyThisBufferACK();
// Helper function.