summaryrefslogtreecommitdiffstats
path: root/media
diff options
context:
space:
mode:
authorimcheng@chromium.org <imcheng@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2010-08-11 16:37:17 +0000
committerimcheng@chromium.org <imcheng@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2010-08-11 16:37:17 +0000
commitb24c81a4c510bec2902b12d4768d6d47aafed9f2 (patch)
treea5cc2c420519816c48e045a26d2e97f1600038d3 /media
parentc02ecb834297e41993b0546fb57156ec2ab2563f (diff)
downloadchromium_src-b24c81a4c510bec2902b12d4768d6d47aafed9f2.zip
chromium_src-b24c81a4c510bec2902b12d4768d6d47aafed9f2.tar.gz
chromium_src-b24c81a4c510bec2902b12d4768d6d47aafed9f2.tar.bz2
Added rendering and unittests for the MF H264 decoder prototype.
BUG=none TEST=none Review URL: http://codereview.chromium.org/3096010 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@55734 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'media')
-rw-r--r--media/media.gyp57
-rw-r--r--media/mf/README.chromium2
-rwxr-xr-xmedia/mf/basic_renderer.cc221
-rwxr-xr-xmedia/mf/basic_renderer.h60
-rwxr-xr-xmedia/mf/d3d_util.cc89
-rwxr-xr-xmedia/mf/d3d_util.h34
-rw-r--r--media/mf/mft_h264_decoder.cc139
-rw-r--r--media/mf/mft_h264_decoder.h23
-rw-r--r--media/mf/mft_h264_decoder_example.cc (renamed from media/mf/main.cc)204
-rwxr-xr-xmedia/mf/test/mft_h264_decoder_unittest.cc278
-rwxr-xr-xmedia/mf/test/run_all_unittests.cc27
11 files changed, 980 insertions, 154 deletions
diff --git a/media/media.gyp b/media/media.gyp
index 7f9ba7a2..db0eeb0 100644
--- a/media/media.gyp
+++ b/media/media.gyp
@@ -430,11 +430,10 @@
},
{
'target_name': 'mft_h264_decoder',
- 'type': 'executable',
+ 'type': '<(library)',
'dependencies': [
'media',
'../base/base.gyp:base',
- '../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
],
'include_dirs': [
'..',
@@ -442,9 +441,61 @@
'sources': [
'mf/mft_h264_decoder.cc',
'mf/mft_h264_decoder.h',
+ ],
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'SubSystem': '1', # Set /SUBSYSTEM:CONSOLE
+ },
+ },
+ },
+ {
+ 'target_name': 'mft_h264_decoder_example',
+ 'type': 'executable',
+ 'dependencies': [
+ 'media',
+ 'mft_h264_decoder',
+ '../base/base.gyp:base',
+ '../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
+ ],
+ 'include_dirs': [
+ '..',
+ ],
+ 'sources': [
+ 'mf/basic_renderer.cc',
+ 'mf/basic_renderer.h',
+ 'mf/d3d_util.cc',
+ 'mf/d3d_util.h',
+ 'mf/file_reader_util.cc',
+ 'mf/file_reader_util.h',
+ 'mf/mft_h264_decoder_example.cc',
+ ],
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'SubSystem': '1', # Set /SUBSYSTEM:CONSOLE
+ },
+ },
+ },
+ {
+ 'target_name': 'mft_h264_decoder_unittests',
+ 'type': 'executable',
+ 'dependencies': [
+ 'media',
+ 'mft_h264_decoder',
+ '../base/base.gyp:base',
+ '../base/base.gyp:base_i18n',
+ '../testing/gtest.gyp:gtest',
+ '../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
+ ],
+ 'include_dirs': [
+ '..',
+ ],
+ 'sources': [
+ 'mf/d3d_util.cc',
+ 'mf/d3d_util.h',
'mf/file_reader_util.cc',
'mf/file_reader_util.h',
- 'mf/main.cc',
+ 'mf/test/mft_h264_decoder_unittest.cc',
+ 'mf/test/run_all_unittests.cc',
],
'msvs_settings': {
'VCLinkerTool': {
diff --git a/media/mf/README.chromium b/media/mf/README.chromium
index cdd17a5..5ef5f64 100644
--- a/media/mf/README.chromium
+++ b/media/mf/README.chromium
@@ -11,8 +11,6 @@ H.264 decoder using callbacks.
Requirements: Windows 7
-Note: Rendering coming in next patch
-
Note1: On some video files, there is a mysterious 1-off decoded frame count
when DXVA is enabled.
diff --git a/media/mf/basic_renderer.cc b/media/mf/basic_renderer.cc
new file mode 100755
index 0000000..b85afd5
--- /dev/null
+++ b/media/mf/basic_renderer.cc
@@ -0,0 +1,221 @@
+// Copyright (c) 2010 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/mf/basic_renderer.h"
+
+#include <d3d9.h>
+#include <mfapi.h>
+#include <mfidl.h>
+
+#include "base/message_loop.h"
+#include "base/scoped_comptr_win.h"
+#include "base/time.h"
+#include "media/base/yuv_convert.h"
+
+// For MFGetService and MF_BUFFER_SERVICE (getting D3D surface from buffer)
+#pragma comment(lib, "mf.lib")
+#pragma comment(lib, "strmiids.lib")
+
+namespace media {
+
+// Converts the given raw data buffer into RGB32 format, and drawing the result
+// into the given window. This is only used when DXVA2 is not enabled.
+// Returns: true on success.
+bool ConvertToRGBAndDrawToWindow(HWND video_window, uint8* data, int width,
+ int height, int stride) {
+ CHECK(video_window != NULL);
+ CHECK(data != NULL);
+ CHECK_GT(width, 0);
+ CHECK_GT(height, 0);
+ CHECK_GE(stride, width);
+ height = (height + 15) & ~15;
+ bool success = true;
+ uint8* y_start = reinterpret_cast<uint8*>(data);
+ uint8* u_start = y_start + height * stride * 5 / 4;
+ uint8* v_start = y_start + height * stride;
+ static uint8* rgb_frame = new uint8[height * stride * 4];
+ int y_stride = stride;
+ int uv_stride = stride / 2;
+ int rgb_stride = stride * 4;
+ ConvertYUVToRGB32(y_start, u_start, v_start, rgb_frame,
+ width, height, y_stride, uv_stride,
+ rgb_stride, YV12);
+ PAINTSTRUCT ps;
+ InvalidateRect(video_window, NULL, TRUE);
+ HDC hdc = BeginPaint(video_window, &ps);
+ BITMAPINFOHEADER hdr;
+ hdr.biSize = sizeof(BITMAPINFOHEADER);
+ hdr.biWidth = width;
+ hdr.biHeight = -height; // minus means top-down bitmap
+ hdr.biPlanes = 1;
+ hdr.biBitCount = 32;
+ hdr.biCompression = BI_RGB; // no compression
+ hdr.biSizeImage = 0;
+ hdr.biXPelsPerMeter = 1;
+ hdr.biYPelsPerMeter = 1;
+ hdr.biClrUsed = 0;
+ hdr.biClrImportant = 0;
+ int rv = StretchDIBits(hdc, 0, 0, width, height, 0, 0, width, height,
+ rgb_frame, reinterpret_cast<BITMAPINFO*>(&hdr),
+ DIB_RGB_COLORS, SRCCOPY);
+ if (rv == 0) {
+ LOG(ERROR) << "StretchDIBits failed";
+ MessageLoopForUI::current()->QuitNow();
+ success = false;
+ }
+ EndPaint(video_window, &ps);
+
+ return success;
+}
+
+// Obtains the underlying raw data buffer for the given IMFMediaBuffer, and
+// calls ConvertToRGBAndDrawToWindow() with it.
+// Returns: true on success.
+bool PaintMediaBufferOntoWindow(HWND video_window, IMFMediaBuffer* video_buffer,
+ int width, int height, int stride) {
+ CHECK(video_buffer != NULL);
+ HRESULT hr;
+ BYTE* data;
+ DWORD buffer_length;
+ DWORD data_length;
+ hr = video_buffer->Lock(&data, &buffer_length, &data_length);
+ if (FAILED(hr)) {
+ LOG(ERROR) << "Failed to lock IMFMediaBuffer";
+ return false;
+ }
+ if (!ConvertToRGBAndDrawToWindow(video_window,
+ reinterpret_cast<uint8*>(data),
+ width,
+ height,
+ stride)) {
+ LOG(ERROR) << "Failed to convert raw buffer to RGB and draw to window";
+ video_buffer->Unlock();
+ return false;
+ }
+ video_buffer->Unlock();
+ return true;
+}
+
+// Obtains the D3D9 surface from the given IMFMediaBuffer, then calls methods
+// in the D3D device to draw to the window associated with it.
+// Returns: true on success.
+bool PaintD3D9BufferOntoWindow(IDirect3DDevice9* device,
+ IMFMediaBuffer* video_buffer) {
+ CHECK(device != NULL);
+ ScopedComPtr<IDirect3DSurface9> surface;
+ HRESULT hr = MFGetService(video_buffer, MR_BUFFER_SERVICE,
+ IID_PPV_ARGS(surface.Receive()));
+ if (FAILED(hr)) {
+ LOG(ERROR) << "Failed to get D3D9 surface from buffer";
+ return false;
+ }
+ hr = device->Clear(0, NULL, D3DCLEAR_TARGET, D3DCOLOR_XRGB(0, 0, 0),
+ 1.0f, 0);
+ if (FAILED(hr)) {
+ LOG(ERROR) << "Device->Clear() failed";
+ return false;
+ }
+ ScopedComPtr<IDirect3DSurface9> backbuffer;
+ hr = device->GetBackBuffer(0, 0, D3DBACKBUFFER_TYPE_MONO,
+ backbuffer.Receive());
+ if (FAILED(hr)) {
+ LOG(ERROR) << "Device->GetBackBuffer() failed";
+ return false;
+ }
+ hr = device->StretchRect(surface.get(), NULL, backbuffer.get(), NULL,
+ D3DTEXF_NONE);
+ if (FAILED(hr)) {
+ LOG(ERROR) << "Device->StretchRect() failed";
+ return false;
+ }
+ hr = device->Present(NULL, NULL, NULL, NULL);
+ if (FAILED(hr)) {
+ if (hr == E_FAIL) {
+ LOG(WARNING) << "Present() returned E_FAIL";
+ } else {
+ static int frames_dropped = 0;
+ LOG(ERROR) << "Device->Present() failed "
+ << std::hex << std::showbase << hr;
+ if (++frames_dropped == 10) {
+ LOG(ERROR) << "Dropped too many frames, quitting";
+ MessageLoopForUI::current()->QuitNow();
+ return false;
+ }
+ }
+ }
+ return true;
+}
+
+static void ReleaseOutputBuffer(VideoFrame* frame) {
+ if (frame != NULL &&
+ frame->type() == VideoFrame::TYPE_MFBUFFER ||
+ frame->type() == VideoFrame::TYPE_DIRECT3DSURFACE) {
+ static_cast<IMFMediaBuffer*>(frame->private_buffer())->Release();
+ }
+}
+
+// NullRenderer
+
+NullRenderer::NullRenderer(MftH264Decoder* decoder) : MftRenderer(decoder) {}
+NullRenderer::~NullRenderer() {}
+
+void NullRenderer::ProcessFrame(scoped_refptr<VideoFrame> frame) {
+ ReleaseOutputBuffer(frame);
+ MessageLoop::current()->PostTask(
+ FROM_HERE, NewRunnableMethod(decoder_.get(),
+ &MftH264Decoder::GetOutput));
+}
+
+void NullRenderer::StartPlayback() {
+ MessageLoop::current()->PostTask(
+ FROM_HERE, NewRunnableMethod(decoder_.get(),
+ &MftH264Decoder::GetOutput));
+}
+
+void NullRenderer::StopPlayback() {
+ MessageLoop::current()->Quit();
+}
+
+// BasicRenderer
+
+BasicRenderer::BasicRenderer(MftH264Decoder* decoder,
+ HWND window, IDirect3DDevice9* device)
+ : MftRenderer(decoder),
+ window_(window),
+ device_(device) {
+}
+
+BasicRenderer::~BasicRenderer() {}
+
+void BasicRenderer::ProcessFrame(scoped_refptr<VideoFrame> frame) {
+ if (device_ != NULL) {
+ if (!PaintD3D9BufferOntoWindow(device_,
+ static_cast<IMFMediaBuffer*>(frame->private_buffer()))) {
+ MessageLoopForUI::current()->QuitNow();
+ }
+ } else {
+ if (!PaintMediaBufferOntoWindow(
+ window_, static_cast<IMFMediaBuffer*>(frame->private_buffer()),
+ frame->width(), frame->height(), frame->stride(0))) {
+ MessageLoopForUI::current()->QuitNow();
+ }
+ }
+ ReleaseOutputBuffer(frame);
+ MessageLoopForUI::current()->PostDelayedTask(
+ FROM_HERE, NewRunnableMethod(decoder_.get(),
+ &MftH264Decoder::GetOutput),
+ frame->GetDuration().InMilliseconds());
+}
+
+void BasicRenderer::StartPlayback() {
+ MessageLoopForUI::current()->PostTask(
+ FROM_HERE, NewRunnableMethod(decoder_.get(),
+ &MftH264Decoder::GetOutput));
+}
+
+void BasicRenderer::StopPlayback() {
+ MessageLoopForUI::current()->Quit();
+}
+
+} // namespace media
diff --git a/media/mf/basic_renderer.h b/media/mf/basic_renderer.h
new file mode 100755
index 0000000..97711c2
--- /dev/null
+++ b/media/mf/basic_renderer.h
@@ -0,0 +1,60 @@
+// Copyright (c) 2010 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+// Short / basic implementation to simulate rendering H.264 frames outputs by
+// MF's H.264 decoder to screen.
+
+#ifndef MEDIA_MF_BASIC_RENDERER_H_
+#define MEDIA_MF_BASIC_RENDERER_H_
+
+#include <d3d9.h>
+
+#include "base/scoped_ptr.h"
+#include "base/scoped_comptr_win.h"
+#include "media/base/video_frame.h"
+#include "media/mf/mft_h264_decoder.h"
+
+namespace media {
+
+class MftRenderer : public base::RefCountedThreadSafe<MftRenderer> {
+ public:
+ explicit MftRenderer(MftH264Decoder* decoder) : decoder_(decoder) {}
+ virtual ~MftRenderer() {}
+ virtual void ProcessFrame(scoped_refptr<VideoFrame> frame) = 0;
+ virtual void StartPlayback() = 0;
+ virtual void StopPlayback() = 0;
+
+ protected:
+ scoped_refptr<MftH264Decoder> decoder_;
+};
+
+// This renderer does nothing with the frame except discarding it.
+class NullRenderer : public MftRenderer {
+ public:
+ explicit NullRenderer(MftH264Decoder* decoder);
+ virtual ~NullRenderer();
+ virtual void ProcessFrame(scoped_refptr<VideoFrame> frame);
+ virtual void StartPlayback();
+ virtual void StopPlayback();
+};
+
+// This renderer does a basic playback by drawing to |window_|. It tries to
+// respect timing specified in the recevied VideoFrames.
+class BasicRenderer : public MftRenderer {
+ public:
+ explicit BasicRenderer(MftH264Decoder* decoder,
+ HWND window, IDirect3DDevice9* device);
+ virtual ~BasicRenderer();
+ virtual void ProcessFrame(scoped_refptr<VideoFrame> frame);
+ virtual void StartPlayback();
+ virtual void StopPlayback();
+
+ private:
+ HWND window_;
+ ScopedComPtr<IDirect3DDevice9> device_;
+};
+
+} // namespace media
+
+#endif // MEDIA_MF_BASIC_RENDERER_H_
diff --git a/media/mf/d3d_util.cc b/media/mf/d3d_util.cc
new file mode 100755
index 0000000..a6639e4
--- /dev/null
+++ b/media/mf/d3d_util.cc
@@ -0,0 +1,89 @@
+// Copyright (c) 2010 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/mf/d3d_util.h"
+
+#include <d3d9.h>
+#include <dxva2api.h>
+
+#include "base/scoped_comptr_win.h"
+
+namespace media {
+
+IDirect3DDeviceManager9* CreateD3DDevManager(HWND video_window,
+ IDirect3D9** direct3d,
+ IDirect3DDevice9** device) {
+ ScopedComPtr<IDirect3DDeviceManager9> dev_manager;
+ ScopedComPtr<IDirect3D9> d3d;
+ d3d.Attach(Direct3DCreate9(D3D_SDK_VERSION));
+ if (d3d == NULL) {
+ LOG(ERROR) << "Failed to create D3D9";
+ return NULL;
+ }
+ D3DPRESENT_PARAMETERS present_params = {0};
+
+ // Once we know the dimensions, we need to reset using
+ // AdjustD3DDeviceBackBufferDimensions().
+ present_params.BackBufferWidth = 0;
+ present_params.BackBufferHeight = 0;
+ present_params.BackBufferFormat = D3DFMT_UNKNOWN;
+ present_params.BackBufferCount = 1;
+ present_params.SwapEffect = D3DSWAPEFFECT_DISCARD;
+ present_params.hDeviceWindow = video_window;
+ present_params.Windowed = TRUE;
+ present_params.Flags = D3DPRESENTFLAG_VIDEO;
+ present_params.FullScreen_RefreshRateInHz = 0;
+ present_params.PresentationInterval = 0;
+
+ ScopedComPtr<IDirect3DDevice9> temp_device;
+
+ // D3DCREATE_HARDWARE_VERTEXPROCESSING specifies hardware vertex processing.
+ // (Is it even needed for just video decoding?)
+ HRESULT hr = d3d->CreateDevice(D3DADAPTER_DEFAULT,
+ D3DDEVTYPE_HAL,
+ NULL,
+ D3DCREATE_HARDWARE_VERTEXPROCESSING,
+ &present_params,
+ temp_device.Receive());
+ if (FAILED(hr)) {
+ LOG(ERROR) << "Failed to create D3D Device";
+ return NULL;
+ }
+ UINT dev_manager_reset_token = 0;
+ hr = DXVA2CreateDirect3DDeviceManager9(&dev_manager_reset_token,
+ dev_manager.Receive());
+ if (FAILED(hr)) {
+ LOG(ERROR) << "Couldn't create D3D Device manager";
+ return NULL;
+ }
+ hr = dev_manager->ResetDevice(temp_device.get(), dev_manager_reset_token);
+ if (FAILED(hr)) {
+ LOG(ERROR) << "Failed to set device to device manager";
+ return NULL;
+ }
+ *direct3d = d3d.Detach();
+ *device = temp_device.Detach();
+ return dev_manager.Detach();
+}
+
+bool AdjustD3DDeviceBackBufferDimensions(IDirect3DDevice9* device,
+ HWND video_window,
+ int width,
+ int height) {
+ D3DPRESENT_PARAMETERS present_params = {0};
+ present_params.BackBufferWidth = width;
+ present_params.BackBufferHeight = height;
+ present_params.BackBufferFormat = D3DFMT_UNKNOWN;
+ present_params.BackBufferCount = 1;
+ present_params.SwapEffect = D3DSWAPEFFECT_DISCARD;
+ present_params.hDeviceWindow = video_window;
+ present_params.Windowed = TRUE;
+ present_params.Flags = D3DPRESENTFLAG_VIDEO;
+ present_params.FullScreen_RefreshRateInHz = 0;
+ present_params.PresentationInterval = 0;
+
+ return SUCCEEDED(device->Reset(&present_params)) ? true : false;
+}
+
+} // namespace media
diff --git a/media/mf/d3d_util.h b/media/mf/d3d_util.h
new file mode 100755
index 0000000..307eec57
--- /dev/null
+++ b/media/mf/d3d_util.h
@@ -0,0 +1,34 @@
+// Copyright (c) 2010 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+// Utility functions for Direct3D Devices.
+
+#ifndef MEDIA_MF_D3D_UTIL_H_
+#define MEDIA_MF_D3D_UTIL_H_
+
+#include <windows.h>
+
+struct IDirect3D9;
+struct IDirect3DDevice9;
+struct IDirect3DDeviceManager9;
+
+namespace media {
+
+// Creates a Direct3D device manager for the given window.
+IDirect3DDeviceManager9* CreateD3DDevManager(HWND video_window,
+ IDirect3D9** direct3d,
+ IDirect3DDevice9** device);
+
+// Resets the D3D device to prevent scaling from happening because it was
+// created with window before resizing occurred. We need to change the back
+// buffer dimensions to the actual video frame dimensions.
+// Both the decoder and device should be initialized before calling this method.
+// Returns: true if successful.
+bool AdjustD3DDeviceBackBufferDimensions(IDirect3DDevice9* device,
+ HWND video_window,
+ int width,
+ int height);
+
+} // namespace media
+#endif // MEDIA_MF_D3D_UTIL_H_
diff --git a/media/mf/mft_h264_decoder.cc b/media/mf/mft_h264_decoder.cc
index 3ff88f6..ffbed15 100644
--- a/media/mf/mft_h264_decoder.cc
+++ b/media/mf/mft_h264_decoder.cc
@@ -7,17 +7,24 @@
#include <algorithm>
#include <string>
+#include <d3d9.h>
+#include <evr.h>
+#include <initguid.h>
#include <mfapi.h>
#include <mferror.h>
+#include <mfidl.h>
+#include <shlwapi.h>
#include <wmcodecdsp.h>
#include "base/callback.h"
#include "base/logging.h"
+#include "base/message_loop.h"
#include "base/scoped_comptr_win.h"
#include "media/base/video_frame.h"
-#pragma comment(lib, "dxva2.lib")
#pragma comment(lib, "d3d9.lib")
+#pragma comment(lib, "dxva2.lib")
+#pragma comment(lib, "evr.lib")
#pragma comment(lib, "mfuuid.lib")
#pragma comment(lib, "mfplat.lib")
@@ -29,7 +36,7 @@ static IMFTransform* GetH264Decoder() {
// Use __uuidof() to avoid linking to a library just for the CLSID.
IMFTransform* dec;
HRESULT hr = CoCreateInstance(__uuidof(CMSH264DecoderMFT), NULL,
- CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&dec));
+ CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&dec));
if (FAILED(hr)) {
LOG(ERROR) << "CoCreateInstance failed " << std::hex << std::showbase << hr;
return NULL;
@@ -49,16 +56,24 @@ static IMFSample* CreateEmptySample() {
return sample.Detach();
}
-// Creates a Media Foundation sample with one buffer of length |buffer_length|.
-static IMFSample* CreateEmptySampleWithBuffer(int buffer_length) {
+// Creates a Media Foundation sample with one buffer of length |buffer_length|
+// on a |align|-byte boundary. Alignment must be a perfect power of 2 or 0.
+// If |align| is 0, then no alignment is specified.
+static IMFSample* CreateEmptySampleWithBuffer(int buffer_length, int align) {
CHECK_GT(buffer_length, 0);
ScopedComPtr<IMFSample> sample;
sample.Attach(CreateEmptySample());
- if (sample.get() == NULL)
+ if (!sample.get())
return NULL;
ScopedComPtr<IMFMediaBuffer> buffer;
HRESULT hr;
- hr = MFCreateMemoryBuffer(buffer_length, buffer.Receive());
+ if (align == 0) {
+ // Note that MFCreateMemoryBuffer is same as MFCreateAlignedMemoryBuffer
+ // with the align argument being 0.
+ hr = MFCreateMemoryBuffer(buffer_length, buffer.Receive());
+ } else {
+ hr = MFCreateAlignedMemoryBuffer(buffer_length, align-1, buffer.Receive());
+ }
if (FAILED(hr)) {
LOG(ERROR) << "Unable to create an empty buffer";
return NULL;
@@ -73,17 +88,20 @@ static IMFSample* CreateEmptySampleWithBuffer(int buffer_length) {
// Creates a Media Foundation sample with one buffer containing a copy of the
// given Annex B stream data.
-// If duration and sample_time are not known, provide 0.
-// min_size specifies the minimum size of the buffer (might be required by
+// If duration and sample time are not known, provide 0.
+// |min_size| specifies the minimum size of the buffer (might be required by
// the decoder for input). The times here should be given in 100ns units.
+// |alignment| specifies the buffer in the sample to be aligned. If no
+// alignment is required, provide 0 or 1.
static IMFSample* CreateInputSample(uint8* stream, int size,
int64 timestamp, int64 duration,
- int min_size) {
- CHECK(stream != NULL);
+ int min_size, int alignment) {
+ CHECK(stream);
CHECK_GT(size, 0);
ScopedComPtr<IMFSample> sample;
- sample.Attach(CreateEmptySampleWithBuffer(std::max(min_size, size)));
- if (sample.get() == NULL) {
+ sample.Attach(CreateEmptySampleWithBuffer(std::max(min_size, size),
+ alignment));
+ if (!sample.get()) {
LOG(ERROR) << "Failed to create empty buffer for input";
return NULL;
}
@@ -138,7 +156,9 @@ MftH264Decoder::MftH264Decoder(bool use_dxva)
use_dxva_(use_dxva),
drain_message_sent_(false),
in_buffer_size_(0),
+ in_buffer_alignment_(0),
out_buffer_size_(0),
+ out_buffer_alignment_(0),
frames_read_(0),
frames_decoded_(0),
width_(0),
@@ -148,6 +168,13 @@ MftH264Decoder::MftH264Decoder(bool use_dxva)
}
MftH264Decoder::~MftH264Decoder() {
+ // |decoder_| has to be destroyed before the library uninitialization.
+ if (decoder_)
+ decoder_->Release();
+ if (FAILED(MFShutdown())) {
+ LOG(WARNING) << "Warning: MF failed to shutdown";
+ }
+ CoUninitialize();
}
bool MftH264Decoder::Init(IDirect3DDeviceManager9* dev_manager,
@@ -156,12 +183,16 @@ bool MftH264Decoder::Init(IDirect3DDeviceManager9* dev_manager,
int aspect_num, int aspect_denom,
ReadInputCallback* read_input_cb,
OutputReadyCallback* output_avail_cb) {
- CHECK(read_input_cb != NULL);
- CHECK(output_avail_cb != NULL);
if (initialized_)
return true;
+ if (!read_input_cb || !output_avail_cb) {
+ LOG(ERROR) << "No callback provided";
+ return false;
+ }
read_input_callback_.reset(read_input_cb);
output_avail_callback_.reset(output_avail_cb);
+ if (!InitComMfLibraries())
+ return false;
if (!InitDecoder(dev_manager, frame_rate_num, frame_rate_denom,
width, height, aspect_num, aspect_denom))
return false;
@@ -176,17 +207,17 @@ bool MftH264Decoder::Init(IDirect3DDeviceManager9* dev_manager,
bool MftH264Decoder::SendInput(uint8* data, int size, int64 timestamp,
int64 duration) {
CHECK(initialized_);
- CHECK(data != NULL);
+ CHECK(data);
CHECK_GT(size, 0);
if (drain_message_sent_) {
LOG(ERROR) << "Drain message was already sent, but trying to send more "
- "input to decoder";
+ << "input to decoder";
return false;
}
ScopedComPtr<IMFSample> sample;
sample.Attach(CreateInputSample(data, size, timestamp, duration,
- in_buffer_size_));
- if (sample.get() == NULL) {
+ in_buffer_size_, in_buffer_alignment_));
+ if (!sample.get()) {
LOG(ERROR) << "Failed to convert input stream to sample";
return false;
}
@@ -214,8 +245,9 @@ MftH264Decoder::DecoderOutputState MftH264Decoder::GetOutput() {
ScopedComPtr<IMFSample> output_sample;
if (!use_dxva_) {
// If DXVA is enabled, the decoder will allocate the sample for us.
- output_sample.Attach(CreateEmptySampleWithBuffer(out_buffer_size_));
- if (output_sample.get() == NULL) {
+ output_sample.Attach(CreateEmptySampleWithBuffer(out_buffer_size_,
+ out_buffer_alignment_));
+ if (!output_sample.get()) {
LOG(ERROR) << "GetSample: failed to create empty output sample";
return kNoMemory;
}
@@ -225,7 +257,7 @@ MftH264Decoder::DecoderOutputState MftH264Decoder::GetOutput() {
DWORD status;
for (;;) {
output_data_buffer.dwStreamID = 0;
- output_data_buffer.pSample = output_sample;
+ output_data_buffer.pSample = output_sample.get();
output_data_buffer.dwStatus = 0;
output_data_buffer.pEvents = NULL;
hr = decoder_->ProcessOutput(0, // No flags
@@ -233,7 +265,7 @@ MftH264Decoder::DecoderOutputState MftH264Decoder::GetOutput() {
&output_data_buffer,
&status);
IMFCollection* events = output_data_buffer.pEvents;
- if (events != NULL) {
+ if (events) {
LOG(INFO) << "Got events from ProcessOuput, but discarding";
events->Release();
}
@@ -243,6 +275,7 @@ MftH264Decoder::DecoderOutputState MftH264Decoder::GetOutput() {
if (hr == MF_E_TRANSFORM_STREAM_CHANGE) {
if (!SetDecoderOutputMediaType(output_format_)) {
LOG(ERROR) << "Failed to reset output type";
+ MessageLoop::current()->Quit();
return kResetOutputStreamFailed;
} else {
LOG(INFO) << "Reset output type done";
@@ -254,18 +287,21 @@ MftH264Decoder::DecoderOutputState MftH264Decoder::GetOutput() {
// anymore output then we know the decoder has processed everything.
if (drain_message_sent_) {
LOG(INFO) << "Drain message was already sent + no output => done";
+ MessageLoop::current()->Quit();
return kNoMoreOutput;
} else {
if (!ReadAndProcessInput()) {
LOG(INFO) << "Failed to read/process input. Sending drain message";
if (!SendDrainMessage()) {
LOG(ERROR) << "Failed to send drain message";
+ MessageLoop::current()->Quit();
return kNoMoreOutput;
}
}
continue;
}
} else {
+ MessageLoop::current()->Quit();
return kUnspecifiedError;
}
} else {
@@ -275,9 +311,10 @@ MftH264Decoder::DecoderOutputState MftH264Decoder::GetOutput() {
// If dxva is enabled, we did not provide a sample to ProcessOutput,
// i.e. output_sample is NULL.
output_sample.Attach(output_data_buffer.pSample);
- if (output_sample.get() == NULL) {
+ if (!output_sample.get()) {
LOG(ERROR) << "Output sample using DXVA is NULL - ProcessOutput did "
<< "not provide it!";
+ MessageLoop::current()->Quit();
return kOutputSampleError;
}
}
@@ -287,6 +324,7 @@ MftH264Decoder::DecoderOutputState MftH264Decoder::GetOutput() {
if (FAILED(hr)) {
LOG(ERROR) << "Failed to get sample duration or timestamp "
<< std::hex << hr;
+ MessageLoop::current()->Quit();
return kOutputSampleError;
}
@@ -300,16 +338,19 @@ MftH264Decoder::DecoderOutputState MftH264Decoder::GetOutput() {
hr = output_sample->GetBufferCount(&buf_count);
if (FAILED(hr)) {
LOG(ERROR) << "Failed to get buff count, hr = " << std::hex << hr;
+ MessageLoop::current()->Quit();
return kOutputSampleError;
}
if (buf_count == 0) {
LOG(ERROR) << "buf_count is 0, dropping sample";
+ MessageLoop::current()->Quit();
return kOutputSampleError;
}
ScopedComPtr<IMFMediaBuffer> out_buffer;
hr = output_sample->GetBufferByIndex(0, out_buffer.Receive());
if (FAILED(hr)) {
LOG(ERROR) << "Failed to get decoded output buffer";
+ MessageLoop::current()->Quit();
return kOutputSampleError;
}
@@ -317,10 +358,10 @@ MftH264Decoder::DecoderOutputState MftH264Decoder::GetOutput() {
// of using the data field.
// In NV12, there are only 2 planes - the Y plane, and the interleaved UV
// plane. Both have the same strides.
- uint8* null_data[2] = { NULL, NULL };
- int32 strides[2] = { stride_, output_format_ == MFVideoFormat_NV12 ?
- stride_ :
- stride_ / 2 };
+ uint8* null_data[3] = { NULL, NULL, NULL };
+ int32 uv_stride = output_format_ == MFVideoFormat_NV12 ? stride_
+ : stride_ / 2;
+ int32 strides[3] = { stride_, uv_stride, uv_stride };
scoped_refptr<VideoFrame> decoded_frame;
VideoFrame::CreateFrameExternal(
use_dxva_ ? VideoFrame::TYPE_DIRECT3DSURFACE :
@@ -336,7 +377,7 @@ MftH264Decoder::DecoderOutputState MftH264Decoder::GetOutput() {
base::TimeDelta::FromMicroseconds(duration),
out_buffer.Detach(),
&decoded_frame);
- CHECK(decoded_frame.get() != NULL);
+ CHECK(decoded_frame.get());
frames_decoded_++;
output_avail_callback_->Run(decoded_frame);
return kOutputOk;
@@ -346,12 +387,28 @@ MftH264Decoder::DecoderOutputState MftH264Decoder::GetOutput() {
// Private methods
+bool MftH264Decoder::InitComMfLibraries() {
+ HRESULT hr;
+ hr = CoInitializeEx(NULL, COINIT_APARTMENTTHREADED | COINIT_DISABLE_OLE1DDE);
+ if (FAILED(hr)) {
+ LOG(ERROR) << "CoInit fail";
+ return false;
+ }
+ hr = MFStartup(MF_VERSION, MFSTARTUP_FULL);
+ if (FAILED(hr)) {
+ LOG(ERROR) << "MFStartup fail";
+ CoUninitialize();
+ return false;
+ }
+ return true;
+}
+
bool MftH264Decoder::InitDecoder(IDirect3DDeviceManager9* dev_manager,
int frame_rate_num, int frame_rate_denom,
int width, int height,
int aspect_num, int aspect_denom) {
- decoder_.Attach(GetH264Decoder());
- if (!decoder_.get())
+ decoder_ = GetH264Decoder();
+ if (!decoder_)
return false;
if (use_dxva_ && !SetDecoderD3d9Manager(dev_manager))
return false;
@@ -365,9 +422,15 @@ bool MftH264Decoder::InitDecoder(IDirect3DDeviceManager9* dev_manager,
bool MftH264Decoder::SetDecoderD3d9Manager(
IDirect3DDeviceManager9* dev_manager) {
- DCHECK(use_dxva_) << "SetDecoderD3d9Manager should only be called if DXVA is "
- << "enabled";
- CHECK(dev_manager != NULL);
+ if (!use_dxva_) {
+ LOG(ERROR) << "SetDecoderD3d9Manager should only be called if DXVA is "
+ << "enabled";
+ return false;
+ }
+ if (!dev_manager) {
+ LOG(ERROR) << "dev_manager cannot be NULL";
+ return false;
+ }
HRESULT hr;
hr = decoder_->ProcessMessage(MFT_MESSAGE_SET_D3D_MANAGER,
reinterpret_cast<ULONG_PTR>(dev_manager));
@@ -382,7 +445,7 @@ bool MftH264Decoder::SetDecoderMediaTypes(int frame_rate_num,
int frame_rate_denom,
int width, int height,
int aspect_num, int aspect_denom) {
- DCHECK(decoder_.get());
+ DCHECK(decoder_);
if (!SetDecoderInputMediaType(frame_rate_num, frame_rate_denom,
width, height,
aspect_num, aspect_denom))
@@ -519,7 +582,7 @@ bool MftH264Decoder::SendStartMessage() {
// to do it ourselves and make sure they're the correct size.
// Exception is when dxva is enabled, the decoder will allocate output.
bool MftH264Decoder::GetStreamsInfoAndBufferReqs() {
- DCHECK(decoder_.get());
+ DCHECK(decoder_);
HRESULT hr;
MFT_INPUT_STREAM_INFO input_stream_info;
hr = decoder_->GetInputStreamInfo(0, &input_stream_info);
@@ -539,7 +602,7 @@ bool MftH264Decoder::GetStreamsInfoAndBufferReqs() {
LOG(INFO) << "Min buffer size: " << input_stream_info.cbSize;
LOG(INFO) << "Max lookahead: " << input_stream_info.cbMaxLookahead;
LOG(INFO) << "Alignment: " << input_stream_info.cbAlignment;
- CHECK_EQ(input_stream_info.cbAlignment, 0u);
+ in_buffer_alignment_ = input_stream_info.cbAlignment;
in_buffer_size_ = input_stream_info.cbSize;
MFT_OUTPUT_STREAM_INFO output_stream_info;
@@ -558,7 +621,7 @@ bool MftH264Decoder::GetStreamsInfoAndBufferReqs() {
CHECK_EQ(output_stream_info.dwFlags, use_dxva_ ? 0x107u : 0x7u);
LOG(INFO) << "Min buffer size: " << output_stream_info.cbSize;
LOG(INFO) << "Alignment: " << output_stream_info.cbAlignment;
- CHECK_EQ(output_stream_info.cbAlignment, 0u);
+ out_buffer_alignment_ = output_stream_info.cbAlignment;
out_buffer_size_ = output_stream_info.cbSize;
return true;
@@ -571,7 +634,7 @@ bool MftH264Decoder::ReadAndProcessInput() {
int64 timestamp;
read_input_callback_->Run(&input_stream_dummy, &size, &timestamp, &duration);
scoped_array<uint8> input_stream(input_stream_dummy);
- if (input_stream.get() == NULL) {
+ if (!input_stream.get()) {
LOG(INFO) << "No more input";
return false;
} else {
diff --git a/media/mf/mft_h264_decoder.h b/media/mf/mft_h264_decoder.h
index 3a57d3e..1590331 100644
--- a/media/mf/mft_h264_decoder.h
+++ b/media/mf/mft_h264_decoder.h
@@ -19,6 +19,7 @@
#include "base/callback.h"
#include "base/scoped_ptr.h"
#include "base/scoped_comptr_win.h"
+#include "testing/gtest/include/gtest/gtest_prod.h"
struct IDirect3DDeviceManager9;
struct IMFTransform;
@@ -28,10 +29,8 @@ namespace media {
class VideoFrame;
// A decoder that takes samples of Annex B streams then outputs decoded frames.
-class MftH264Decoder {
+class MftH264Decoder : public base::RefCountedThreadSafe<MftH264Decoder> {
public:
- typedef Callback4<uint8**, int*, int64*, int64*>::Type ReadInputCallback;
- typedef Callback1<scoped_refptr<VideoFrame> >::Type OutputReadyCallback;
enum DecoderOutputState {
kOutputOk = 0,
kResetOutputStreamFailed,
@@ -40,6 +39,9 @@ class MftH264Decoder {
kNoMemory,
kOutputSampleError
};
+ typedef Callback4<uint8**, int*, int64*, int64*>::Type ReadInputCallback;
+ typedef Callback1<scoped_refptr<VideoFrame> >::Type OutputReadyCallback;
+
explicit MftH264Decoder(bool use_dxva);
~MftH264Decoder();
@@ -80,6 +82,13 @@ class MftH264Decoder {
int height() const { return height_; }
private:
+ friend class MftH264DecoderTest;
+ FRIEND_TEST(MftH264DecoderTest, SendDrainMessageBeforeInitDeathTest);
+ FRIEND_TEST(MftH264DecoderTest, SendDrainMessageAtInit);
+ FRIEND_TEST(MftH264DecoderTest, DrainOnEndOfInputStream);
+ FRIEND_TEST(MftH264DecoderTest, NoOutputOnGarbageInput);
+
+ bool InitComMfLibraries();
bool InitDecoder(IDirect3DDeviceManager9* dev_manager,
int frame_rate_num, int frame_rate_denom,
int width, int height,
@@ -101,16 +110,20 @@ class MftH264Decoder {
// Returns: true if the drain message was sent successfully.
bool SendDrainMessage();
+ // |output_error_callback_| should stop the message loop.
scoped_ptr<ReadInputCallback> read_input_callback_;
scoped_ptr<OutputReadyCallback> output_avail_callback_;
- ScopedComPtr<IMFTransform> decoder_;
+ IMFTransform* decoder_;
bool initialized_;
bool use_dxva_;
bool drain_message_sent_;
- // Minimum input and output buffer sizes as required by the decoder.
+ // Minimum input and output buffer sizes/alignment required by the decoder.
+ // If |buffer_alignment_| is zero, then the buffer needs not be aligned.
int in_buffer_size_;
+ int in_buffer_alignment_;
int out_buffer_size_;
+ int out_buffer_alignment_;
int frames_read_;
int frames_decoded_;
int width_;
diff --git a/media/mf/main.cc b/media/mf/mft_h264_decoder_example.cc
index 50dd9ac..a323e62 100644
--- a/media/mf/main.cc
+++ b/media/mf/mft_h264_decoder_example.cc
@@ -10,11 +10,12 @@
#include <d3d9.h>
#include <dxva2api.h>
-#include <mfapi.h>
+#include "base/at_exit.h"
#include "base/command_line.h"
#include "base/file_path.h"
#include "base/logging.h"
+#include "base/message_loop.h"
#include "base/scoped_comptr_win.h"
#include "base/scoped_ptr.h"
#include "base/time.h"
@@ -22,21 +23,33 @@
#include "media/base/video_frame.h"
#include "media/ffmpeg/ffmpeg_common.h"
#include "media/ffmpeg/file_protocol.h"
+#include "media/mf/basic_renderer.h"
+#include "media/mf/d3d_util.h"
#include "media/mf/file_reader_util.h"
#include "media/mf/mft_h264_decoder.h"
+using base::AtExitManager;
using base::Time;
using base::TimeDelta;
+using media::BasicRenderer;
+using media::NullRenderer;
using media::FFmpegFileReader;
using media::MftH264Decoder;
+using media::MftRenderer;
using media::VideoFrame;
namespace {
+const wchar_t* const kWindowClass = L"Chrome_H264_MFT";
+const wchar_t* const kWindowTitle = L"H264_MFT";
+const int kWindowStyleFlags = (WS_OVERLAPPEDWINDOW | WS_VISIBLE) &
+ ~(WS_MAXIMIZEBOX | WS_THICKFRAME);
+
void usage() {
static char* usage_msg =
- "Usage: mft_h264_decoder [--enable-dxva] --input-file=FILE\n"
+ "Usage: mft_h264_decoder [--enable-dxva] [--render] --input-file=FILE\n"
"enable-dxva: Enables hardware accelerated decoding\n"
+ "render: Render to window\n"
"To display this message: mft_h264_decoder --help";
fprintf(stderr, "%s\n", usage_msg);
}
@@ -50,112 +63,59 @@ static bool InitFFmpeg() {
return true;
}
-bool InitComLibraries() {
+bool InitComLibrary() {
HRESULT hr;
hr = CoInitializeEx(NULL, COINIT_APARTMENTTHREADED | COINIT_DISABLE_OLE1DDE);
if (FAILED(hr)) {
LOG(ERROR) << "CoInit fail";
return false;
}
- hr = MFStartup(MF_VERSION, MFSTARTUP_FULL);
- if (FAILED(hr)) {
- LOG(ERROR) << "MFStartup fail";
- CoUninitialize();
- return false;
- }
return true;
}
-void ShutdownComLibraries() {
- HRESULT hr;
- hr = MFShutdown();
- if (FAILED(hr)) {
- LOG(WARNING) << "Warning: MF failed to shutdown";
- }
- CoUninitialize();
-}
-
-static IDirect3DDeviceManager9* CreateD3DDevManager(HWND video_window,
- IDirect3D9** direct3d,
- IDirect3DDevice9** device) {
- CHECK(video_window != NULL);
- CHECK(direct3d != NULL);
- CHECK(device != NULL);
-
- ScopedComPtr<IDirect3DDeviceManager9> dev_manager;
- ScopedComPtr<IDirect3D9> d3d;
- d3d.Attach(Direct3DCreate9(D3D_SDK_VERSION));
- if (d3d == NULL) {
- LOG(ERROR) << "Failed to create D3D9";
- return NULL;
- }
- D3DPRESENT_PARAMETERS present_params = {0};
-
- // Once we know the dimensions, we need to reset using
- // AdjustD3DDeviceBackBufferDimensions().
- present_params.BackBufferWidth = 0;
- present_params.BackBufferHeight = 0;
- present_params.BackBufferFormat = D3DFMT_UNKNOWN;
- present_params.BackBufferCount = 1;
- present_params.SwapEffect = D3DSWAPEFFECT_DISCARD;
- present_params.hDeviceWindow = video_window;
- present_params.Windowed = TRUE;
- present_params.Flags = D3DPRESENTFLAG_VIDEO;
- present_params.FullScreen_RefreshRateInHz = 0;
- present_params.PresentationInterval = 0;
-
- ScopedComPtr<IDirect3DDevice9> temp_device;
-
- // D3DCREATE_HARDWARE_VERTEXPROCESSING specifies hardware vertex processing.
- // (Is it even needed for just video decoding?)
- HRESULT hr = d3d->CreateDevice(D3DADAPTER_DEFAULT,
- D3DDEVTYPE_HAL,
- NULL,
- D3DCREATE_HARDWARE_VERTEXPROCESSING,
- &present_params,
- temp_device.Receive());
- if (FAILED(hr)) {
- LOG(ERROR) << "Failed to create D3D Device";
- return NULL;
- }
- UINT dev_manager_reset_token = 0;
- hr = DXVA2CreateDirect3DDeviceManager9(&dev_manager_reset_token,
- dev_manager.Receive());
- if (FAILED(hr)) {
- LOG(ERROR) << "Couldn't create D3D Device manager";
- return NULL;
+// Creates a window with the given width and height.
+// Returns: A handle to the window on success, NULL otherwise.
+static HWND CreateDrawWindow(int width, int height) {
+ WNDCLASS window_class = {0};
+ window_class.lpszClassName = kWindowClass;
+ window_class.hInstance = NULL;
+ window_class.hbrBackground = 0;
+ window_class.lpfnWndProc = DefWindowProc;
+ window_class.hCursor = 0;
+
+ if (RegisterClass(&window_class) == 0) {
+ LOG(ERROR) << "Failed to register window class";
+ return false;
}
- hr = dev_manager->ResetDevice(temp_device.get(), dev_manager_reset_token);
- if (FAILED(hr)) {
- LOG(ERROR) << "Failed to set device to device manager";
+ HWND window = CreateWindow(kWindowClass,
+ kWindowTitle,
+ kWindowStyleFlags,
+ 100,
+ 100,
+ width,
+ height,
+ NULL,
+ NULL,
+ NULL,
+ NULL);
+ if (window == NULL) {
+ LOG(ERROR) << "Failed to create window";
return NULL;
}
- *direct3d = d3d.Detach();
- *device = temp_device.Detach();
- return dev_manager.Detach();
+ return window;
}
-static void ReleaseOutputBuffer(VideoFrame* frame) {
- if (frame != NULL &&
- frame->type() == VideoFrame::TYPE_MFBUFFER ||
- frame->type() == VideoFrame::TYPE_DIRECT3DSURFACE) {
- static_cast<IMFMediaBuffer*>(frame->private_buffer())->Release();
- }
-}
-
-class FakeRenderer {
- public:
- FakeRenderer() {}
- ~FakeRenderer() {}
- void ProcessFrame(scoped_refptr<VideoFrame> frame) {
- ReleaseOutputBuffer(frame.get());
- }
-};
-
-static int Run(bool use_dxva, const std::string& input_file) {
+static int Run(bool use_dxva, bool render, const std::string& input_file) {
// If we are not rendering, we need a window anyway to create a D3D device,
// so we will just use the desktop window. (?)
HWND window = GetDesktopWindow();
+ if (render) {
+ window = CreateDrawWindow(640, 480);
+ if (window == NULL) {
+ LOG(ERROR) << "Failed to create window";
+ return -1;
+ }
+ }
scoped_ptr<FFmpegFileReader> reader(new FFmpegFileReader(input_file));
if (reader.get() == NULL || !reader->Initialize()) {
LOG(ERROR) << "Failed to create/initialize reader";
@@ -177,18 +137,22 @@ static int Run(bool use_dxva, const std::string& input_file) {
ScopedComPtr<IDirect3DDevice9> device;
ScopedComPtr<IDirect3DDeviceManager9> dev_manager;
if (use_dxva) {
- dev_manager.Attach(CreateD3DDevManager(window,
- d3d9.Receive(),
- device.Receive()));
+ dev_manager.Attach(media::CreateD3DDevManager(window,
+ d3d9.Receive(),
+ device.Receive()));
if (dev_manager.get() == NULL) {
LOG(ERROR) << "Cannot create D3D9 manager";
return -1;
}
}
- scoped_ptr<MftH264Decoder> mft(new MftH264Decoder(use_dxva));
- scoped_ptr<FakeRenderer> renderer(new FakeRenderer());
-
- if (mft.get() == NULL || renderer.get() == NULL) {
+ scoped_refptr<MftH264Decoder> mft(new MftH264Decoder(use_dxva));
+ scoped_refptr<MftRenderer> renderer;
+ if (render) {
+ renderer = new BasicRenderer(mft.get(), window, device);
+ } else {
+ renderer = new NullRenderer(mft.get());
+ }
+ if (mft.get() == NULL) {
LOG(ERROR) << "Failed to create fake renderer / MFT";
return -1;
}
@@ -197,15 +161,39 @@ static int Run(bool use_dxva, const std::string& input_file) {
width, height,
aspect_ratio_num, aspect_ratio_denom,
NewCallback(reader.get(), &FFmpegFileReader::Read2),
- NewCallback(renderer.get(), &FakeRenderer::ProcessFrame))) {
+ NewCallback(renderer.get(), &MftRenderer::ProcessFrame))) {
LOG(ERROR) << "Failed to initialize mft";
return -1;
}
- Time decode_start(Time::Now());
- while (true) {
- if (MftH264Decoder::kOutputOk != mft->GetOutput())
- break;
+ // If rendering, resize the window to fit the video frames.
+ if (render) {
+ RECT rect;
+ rect.left = 0;
+ rect.right = mft->width();
+ rect.top = 0;
+ rect.bottom = mft->height();
+ AdjustWindowRect(&rect, kWindowStyleFlags, FALSE);
+ if (!MoveWindow(window, 0, 0, rect.right - rect.left,
+ rect.bottom - rect.top, TRUE)) {
+ LOG(WARNING) << "Warning: Failed to resize window";
+ }
}
+ if (use_dxva) {
+ // Reset the device's back buffer dimensions to match the window's
+ // dimensions.
+ if (!media::AdjustD3DDeviceBackBufferDimensions(device.get(),
+ window,
+ mft->width(),
+ mft->height())) {
+ LOG(WARNING) << "Warning: Failed to reset device to have correct "
+ << "backbuffer dimension, scaling might occur";
+ }
+ }
+ Time decode_start(Time::Now());
+ MessageLoopForUI::current()->PostTask(FROM_HERE,
+ NewRunnableMethod(renderer.get(), &MftRenderer::StartPlayback));
+ MessageLoopForUI::current()->Run(NULL);
+
TimeDelta decode_time = Time::Now() - decode_start;
printf("All done, frames read: %d, frames decoded: %d\n",
@@ -217,6 +205,8 @@ static int Run(bool use_dxva, const std::string& input_file) {
} // namespace
int main(int argc, char** argv) {
+ AtExitManager at_exit;
+ MessageLoopForUI message_loop;
CommandLine::Init(argc, argv);
if (argc == 1) {
fprintf(stderr, "Not enough arguments\n");
@@ -229,6 +219,7 @@ int main(int argc, char** argv) {
return -1;
}
bool use_dxva = cmd_line.HasSwitch("enable-dxva");
+ bool render = cmd_line.HasSwitch("render");
std::string input_file = cmd_line.GetSwitchValueASCII("input-file");
if (input_file.empty()) {
fprintf(stderr, "No input file provided\n");
@@ -236,18 +227,19 @@ int main(int argc, char** argv) {
return -1;
}
printf("enable-dxva: %d\n", use_dxva);
+ printf("render: %d\n", render);
printf("input-file: %s\n", input_file.c_str());
if (!InitFFmpeg()) {
LOG(ERROR) << "InitFFMpeg() failed";
return -1;
}
- if (!InitComLibraries()) {
+ if (!InitComLibrary()) {
LOG(ERROR) << "InitComLibraries() failed";
return -1;
}
- int ret = Run(use_dxva, input_file);
- ShutdownComLibraries();
+ int ret = Run(use_dxva, render, input_file);
+
printf("Done\n");
return ret;
}
diff --git a/media/mf/test/mft_h264_decoder_unittest.cc b/media/mf/test/mft_h264_decoder_unittest.cc
new file mode 100755
index 0000000..8bc7b41
--- /dev/null
+++ b/media/mf/test/mft_h264_decoder_unittest.cc
@@ -0,0 +1,278 @@
+// Copyright (c) 2010 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <d3d9.h>
+#include <dxva2api.h>
+#include <mfapi.h>
+
+#include "base/file_path.h"
+#include "base/file_util.h"
+#include "base/message_loop.h"
+#include "base/path_service.h"
+#include "base/scoped_ptr.h"
+#include "base/string_util.h"
+#include "media/base/video_frame.h"
+#include "media/mf/d3d_util.h"
+#include "media/mf/file_reader_util.h"
+#include "media/mf/mft_h264_decoder.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+namespace media {
+
+static const int kDecoderMaxWidth = 1920;
+static const int kDecoderMaxHeight = 1088;
+
+class FakeMftReader {
+ public:
+ FakeMftReader() : frames_remaining_(20) {}
+ explicit FakeMftReader(int count) : frames_remaining_(count) {}
+ ~FakeMftReader() {}
+
+ // Provides garbage input to the decoder.
+ void ReadCallback(uint8** buf, int* sz, int64* ts, int64* dur) {
+ if (frames_remaining_ > 0) {
+ *sz = 4096;
+ *buf = new uint8[*sz];
+ memset(*buf, 42, *sz);
+ *ts = 50000000 - frames_remaining_ * 10000;
+ *dur = 5000;
+ --frames_remaining_;
+ } else {
+ // Emulate end of stream on the last "frame".
+ *buf = NULL;
+ *sz = 0;
+ }
+ }
+ int frames_remaining() const { return frames_remaining_; }
+
+ private:
+ int frames_remaining_;
+};
+
+class FakeMftRenderer : public base::RefCountedThreadSafe<FakeMftRenderer> {
+ public:
+ explicit FakeMftRenderer(scoped_refptr<MftH264Decoder> decoder)
+ : decoder_(decoder),
+ count_(0) {
+ }
+
+ ~FakeMftRenderer() {}
+
+ void WriteCallback(scoped_refptr<VideoFrame> frame) {
+ static_cast<IMFMediaBuffer*>(frame->private_buffer())->Release();
+ ++count_;
+ MessageLoop::current()->PostTask(
+ FROM_HERE,
+ NewRunnableMethod(decoder_.get(), &MftH264Decoder::GetOutput));
+ }
+
+ void Start() {
+ MessageLoop::current()->PostTask(
+ FROM_HERE,
+ NewRunnableMethod(decoder_.get(), &MftH264Decoder::GetOutput));
+ }
+
+ int count() const { return count_; }
+
+ private:
+ scoped_refptr<MftH264Decoder> decoder_;
+ int count_;
+};
+
+class MftH264DecoderTest : public testing::Test {
+ public:
+ MftH264DecoderTest() {}
+ virtual ~MftH264DecoderTest() {}
+
+ protected:
+ virtual void SetUp() {}
+ virtual void TearDown() {}
+};
+
+// A simple test case for init/deinit of MF/COM libraries.
+TEST_F(MftH264DecoderTest, SimpleInit) {
+ EXPECT_HRESULT_SUCCEEDED(
+ CoInitializeEx(NULL, COINIT_APARTMENTTHREADED | COINIT_DISABLE_OLE1DDE));
+ EXPECT_HRESULT_SUCCEEDED(MFStartup(MF_VERSION, MFSTARTUP_FULL));
+ EXPECT_HRESULT_SUCCEEDED(MFShutdown());
+ CoUninitialize();
+}
+
+TEST_F(MftH264DecoderTest, InitWithDxvaButNoD3DDevice) {
+ scoped_refptr<MftH264Decoder> decoder(new MftH264Decoder(true));
+ ASSERT_TRUE(decoder.get() != NULL);
+ FakeMftReader reader;
+ scoped_refptr<FakeMftRenderer> renderer(new FakeMftRenderer(decoder));
+ EXPECT_FALSE(
+ decoder->Init(NULL, 6, 7, 111, 222, 3, 1,
+ NewCallback(&reader, &FakeMftReader::ReadCallback),
+ NewCallback(renderer.get(),
+ &FakeMftRenderer::WriteCallback)));
+}
+
+TEST_F(MftH264DecoderTest, InitMissingCallbacks) {
+ scoped_refptr<MftH264Decoder> decoder(new MftH264Decoder(false));
+ ASSERT_TRUE(decoder.get() != NULL);
+ EXPECT_FALSE(decoder->Init(NULL, 1, 3, 111, 222, 56, 34, NULL, NULL));
+}
+
+TEST_F(MftH264DecoderTest, InitWithNegativeDimensions) {
+ scoped_refptr<MftH264Decoder> decoder(new MftH264Decoder(false));
+ ASSERT_TRUE(decoder.get() != NULL);
+ FakeMftReader reader;
+ scoped_refptr<FakeMftRenderer> renderer(new FakeMftRenderer(decoder));
+ EXPECT_TRUE(decoder->Init(NULL, 0, 6, -123, -456, 22, 4787,
+ NewCallback(&reader, &FakeMftReader::ReadCallback),
+ NewCallback(renderer.get(),
+ &FakeMftRenderer::WriteCallback)));
+
+ // By default, decoder should "guess" the dimensions to be the maximum.
+ EXPECT_EQ(kDecoderMaxWidth, decoder->width());
+ EXPECT_EQ(kDecoderMaxHeight, decoder->height());
+}
+
+TEST_F(MftH264DecoderTest, InitWithTooHighDimensions) {
+ scoped_refptr<MftH264Decoder> decoder(new MftH264Decoder(false));
+ ASSERT_TRUE(decoder.get() != NULL);
+ FakeMftReader reader;
+ scoped_refptr<FakeMftRenderer> renderer(new FakeMftRenderer(decoder));
+ EXPECT_TRUE(decoder->Init(NULL, 0, 0,
+ kDecoderMaxWidth + 1, kDecoderMaxHeight + 1,
+ 0, 0,
+ NewCallback(&reader, &FakeMftReader::ReadCallback),
+ NewCallback(renderer.get(),
+ &FakeMftRenderer::WriteCallback)));
+
+ // Decoder should truncate the dimensions to the maximum supported.
+ EXPECT_EQ(kDecoderMaxWidth, decoder->width());
+ EXPECT_EQ(kDecoderMaxHeight, decoder->height());
+}
+
+TEST_F(MftH264DecoderTest, InitWithNormalDimensions) {
+ scoped_refptr<MftH264Decoder> decoder(new MftH264Decoder(false));
+ ASSERT_TRUE(decoder.get() != NULL);
+ FakeMftReader reader;
+ scoped_refptr<FakeMftRenderer> renderer(new FakeMftRenderer(decoder));
+ int width = 1024, height = 768;
+ EXPECT_TRUE(decoder->Init(NULL, 0, 0, width, height, 0, 0,
+ NewCallback(&reader, &FakeMftReader::ReadCallback),
+ NewCallback(renderer.get(),
+ &FakeMftRenderer::WriteCallback)));
+
+ EXPECT_EQ(width, decoder->width());
+ EXPECT_EQ(height, decoder->height());
+}
+
+// SendDrainMessage() is not a public method. Nonetheless it does not hurt
+// to test that the decoder should not do things before it is initialized.
+TEST_F(MftH264DecoderTest, SendDrainMessageBeforeInitDeathTest) {
+ scoped_refptr<MftH264Decoder> decoder(new MftH264Decoder(false));
+ ASSERT_TRUE(decoder.get() != NULL);
+ EXPECT_DEATH({ decoder->SendDrainMessage(); }, ".*initialized_.*");
+}
+
+// Tests draining after init, but before any input is sent.
+TEST_F(MftH264DecoderTest, SendDrainMessageAtInit) {
+ scoped_refptr<MftH264Decoder> decoder(new MftH264Decoder(false));
+ ASSERT_TRUE(decoder.get() != NULL);
+ FakeMftReader reader;
+ scoped_refptr<FakeMftRenderer> renderer(new FakeMftRenderer(decoder));
+ ASSERT_TRUE(decoder->Init(NULL, 0, 0, 111, 222, 0, 0,
+ NewCallback(&reader, &FakeMftReader::ReadCallback),
+ NewCallback(renderer.get(),
+ &FakeMftRenderer::WriteCallback)));
+ EXPECT_TRUE(decoder->SendDrainMessage());
+ EXPECT_TRUE(decoder->drain_message_sent_);
+}
+
+TEST_F(MftH264DecoderTest, DrainOnEndOfInputStream) {
+ MessageLoop loop;
+ scoped_refptr<MftH264Decoder> decoder(new MftH264Decoder(false));
+ ASSERT_TRUE(decoder.get() != NULL);
+
+ // No frames, outputs a NULL indicating end-of-stream
+ FakeMftReader reader(0);
+ scoped_refptr<FakeMftRenderer> renderer(new FakeMftRenderer(decoder));
+ ASSERT_TRUE(decoder->Init(NULL, 0, 0, 111, 222, 0, 0,
+ NewCallback(&reader, &FakeMftReader::ReadCallback),
+ NewCallback(renderer.get(),
+ &FakeMftRenderer::WriteCallback)));
+ MessageLoop::current()->PostTask(
+ FROM_HERE,
+ NewRunnableMethod(renderer.get(), &FakeMftRenderer::Start));
+ MessageLoop::current()->Run();
+ EXPECT_TRUE(decoder->drain_message_sent());
+}
+
+// 100 input garbage samples should be enough to test whether the decoder
+// will output decoded garbage frames.
+TEST_F(MftH264DecoderTest, NoOutputOnGarbageInput) {
+ MessageLoop loop;
+ scoped_refptr<MftH264Decoder> decoder(new MftH264Decoder(false));
+ ASSERT_TRUE(decoder.get() != NULL);
+ int num_frames = 100;
+ FakeMftReader reader(num_frames);
+ scoped_refptr<FakeMftRenderer> renderer(new FakeMftRenderer(decoder));
+ ASSERT_TRUE(decoder->Init(NULL, 0, 0, 111, 222, 0, 0,
+ NewCallback(&reader, &FakeMftReader::ReadCallback),
+ NewCallback(renderer.get(),
+ &FakeMftRenderer::WriteCallback)));
+ MessageLoop::current()->PostTask(
+ FROM_HERE, NewRunnableMethod(renderer.get(), &FakeMftRenderer::Start));
+ MessageLoop::current()->Run();
+
+ // Decoder should accept corrupt input data and silently ignore it.
+ EXPECT_EQ(num_frames, decoder->frames_read());
+
+ // Decoder should not have output anything if input is corrupt.
+ EXPECT_EQ(0, decoder->frames_decoded());
+ EXPECT_EQ(0, renderer->count());
+}
+
+FilePath GetBearVideoFilePath(const std::string& file_name) {
+ FilePath path;
+ PathService::Get(base::DIR_SOURCE_ROOT, &path);
+ path = path.AppendASCII("media")
+ .AppendASCII("test")
+ .AppendASCII("data")
+ .AppendASCII(file_name.c_str());
+ return path;
+}
+
+// Decodes media/test/data/bear.1280x720.mp4 which is expected to be a valid
+// H.264 video.
+TEST_F(MftH264DecoderTest, DecodeValidVideoDxva) {
+ MessageLoop loop;
+ FilePath path = GetBearVideoFilePath("bear.1280x720.mp4");
+ ASSERT_TRUE(file_util::PathExists(path));
+
+ ScopedComPtr<IDirect3D9> d3d9;
+ ScopedComPtr<IDirect3DDevice9> device;
+ ScopedComPtr<IDirect3DDeviceManager9> dev_manager;
+ dev_manager.Attach(CreateD3DDevManager(GetDesktopWindow(),
+ d3d9.Receive(),
+ device.Receive()));
+ ASSERT_TRUE(dev_manager.get() != NULL);
+
+ scoped_refptr<MftH264Decoder> decoder(new MftH264Decoder(true));
+ ASSERT_TRUE(decoder.get() != NULL);
+ FFmpegFileReader reader(WideToASCII(path.value()));
+ ASSERT_TRUE(reader.Initialize());
+ scoped_refptr<FakeMftRenderer> renderer(new FakeMftRenderer(decoder));
+ ASSERT_TRUE(decoder->Init(dev_manager.get(), 0, 0, 111, 222, 0, 0,
+ NewCallback(&reader, &FFmpegFileReader::Read2),
+ NewCallback(renderer.get(),
+ &FakeMftRenderer::WriteCallback)));
+ MessageLoop::current()->PostTask(
+ FROM_HERE,
+ NewRunnableMethod(renderer.get(), &FakeMftRenderer::Start));
+ MessageLoop::current()->Run();
+
+ // If the video is valid, then it should output frames. However, for some
+ // videos, the number of frames decoded is one-off.
+ EXPECT_EQ(82, decoder->frames_read());
+ EXPECT_LE(decoder->frames_read() - decoder->frames_decoded(), 1);
+}
+
+} // namespace media
diff --git a/media/mf/test/run_all_unittests.cc b/media/mf/test/run_all_unittests.cc
new file mode 100755
index 0000000..6551d59
--- /dev/null
+++ b/media/mf/test/run_all_unittests.cc
@@ -0,0 +1,27 @@
+// Copyright (c) 2010 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <cstdio>
+
+#include "base/test/test_suite.h"
+#include "media/base/media.h"
+#include "media/ffmpeg/ffmpeg_common.h"
+#include "media/ffmpeg/file_protocol.h"
+
+static bool InitFFmpeg() {
+ if (!media::InitializeMediaLibrary(FilePath()))
+ return false;
+ avcodec_init();
+ av_register_all();
+ av_register_protocol2(&kFFmpegFileProtocol, sizeof(kFFmpegFileProtocol));
+ return true;
+}
+
+int main(int argc, char** argv) {
+ if (!InitFFmpeg()) {
+ fprintf(stderr, "Failed to init ffmpeg\n");
+ return -1;
+ }
+ return TestSuite(argc, argv).Run();
+}