summaryrefslogtreecommitdiffstats
path: root/media
diff options
context:
space:
mode:
authorhclam@chromium.org <hclam@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2010-07-20 20:45:27 +0000
committerhclam@chromium.org <hclam@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2010-07-20 20:45:27 +0000
commit3d10fbfc4f656cb342171aa3cdbe7723c0bb5cec (patch)
tree28c3774a8b4e5bcad3525d793aa17ee97bb2a17c /media
parentf2c24eb2781f252e6311b61bd1d0c8a1cb671389 (diff)
downloadchromium_src-3d10fbfc4f656cb342171aa3cdbe7723c0bb5cec.zip
chromium_src-3d10fbfc4f656cb342171aa3cdbe7723c0bb5cec.tar.gz
chromium_src-3d10fbfc4f656cb342171aa3cdbe7723c0bb5cec.tar.bz2
A tool using Source Reader in Media Foundation for H264 decoding
Reviewed: http://codereview.chromium.org/2812050/show Submitted for: imcheng@google.com Review URL: http://codereview.chromium.org/3003002 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@53090 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'media')
-rw-r--r--media/media.gyp21
-rw-r--r--media/tools/mfdecoder/README.chromium49
-rw-r--r--media/tools/mfdecoder/main.cc552
-rw-r--r--media/tools/mfdecoder/mfdecoder.cc329
-rw-r--r--media/tools/mfdecoder/mfdecoder.h89
5 files changed, 1040 insertions, 0 deletions
diff --git a/media/media.gyp b/media/media.gyp
index 7ef2bae..1f866ee 100644
--- a/media/media.gyp
+++ b/media/media.gyp
@@ -362,6 +362,27 @@
},
},
},
+ {
+ 'target_name': 'mfdecoder',
+ 'type': 'executable',
+ 'dependencies': [
+ 'media',
+ '../base/base.gyp:base',
+ ],
+ 'include_dirs': [
+ '..',
+ ],
+ 'sources': [
+ 'tools/mfdecoder/main.cc',
+ 'tools/mfdecoder/mfdecoder.h',
+ 'tools/mfdecoder/mfdecoder.cc',
+ ],
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'SubSystem': '1', # Set /SUBSYSTEM:CONSOLE
+ },
+ },
+ },
],
}],
['OS=="linux" or OS=="freebsd" or OS=="openbsd"', {
diff --git a/media/tools/mfdecoder/README.chromium b/media/tools/mfdecoder/README.chromium
new file mode 100644
index 0000000..ac4994b
--- /dev/null
+++ b/media/tools/mfdecoder/README.chromium
@@ -0,0 +1,49 @@
+This tool decodes a H.264 format video into YV12 frames and draws them onto
+a window.
+
+The program uses Media Foundation to do much of the work. Specifically, it
+uses the Source Reader (IMFSourceReader) to read from a file and the built-in
+H.264 decoder (as an IMFTransform) to decode the video file into YV12 frames.
+The decoding can be done with or without hardware acceleration.
+
+If decoding is done without hardware acceleration, then the YV12 frames are
+converted into RGB using ConvertYUVToRGB32() provided in
+media/base/yuv_convert.h. They are then drawn to a window using StretchDIBits()
+provided in gfx/gdi_util.h.
+
+If decoding is done with hardware acceleration, then the frames are obtained
+from a D3D surface (IDirect3DSurface9). It is then drawn through calling
+methods of a D3D device (IDirect3DDevice9) that is associated with the
+video window that we created during initialization.
+
+The painting is done using a MessageLoop that posts paint tasks every 30ms
+until the end of stream is reached. Thus the painting part acts like a
+playback.
+
+Note: The current version uses a synchronous version of source reader. An
+asynchronous version will likely to be more useful and have better
+performance.
+
+Note2: There might be some artifacts produced when displaying using software
+decoding. This is related to execution of MMX instructions and alignment
+issues with the video buffer during color-space conversion and is not a bug.
+
+Note3: The program might crash randomly when rendering windows using D3D
+surfaces (i.e., both -h and -r flags are specified), especially for videos
+for larger dimensions. This seems to be caused by the D3D Device's Present()
+method during rendering even though the decoding behaves correctly.
+
+Note4: The maximum resolution supported by Microsoft's H.264 decoder is
+1920 by 1088 pixels. If you try to feed it a larger video, it will "fail
+to determine frame size." This is an inherent limitation that cannot be fixed
+unless a different decoder is used.
+
+Requirements: Windows 7
+
+usage: mfdecoder (-s|-h) (-d|-r) input-file
+flags:
+-s: Use software decoding
+-h: Uses hardware decoding
+
+-d: Decode to YV12 as fast as possible, no rendering or color-space conversion
+-r: Render to window \ No newline at end of file
diff --git a/media/tools/mfdecoder/main.cc b/media/tools/mfdecoder/main.cc
new file mode 100644
index 0000000..72c4785
--- /dev/null
+++ b/media/tools/mfdecoder/main.cc
@@ -0,0 +1,552 @@
+// Copyright (c) 2010 The Chromium Authors. All rights reserved. Use of this
+// source code is governed by a BSD-style license that can be found in the
+// LICENSE file.
+
+#ifdef UNICODE
+#undef UNICODE
+#endif
+
+#include <d3d9.h>
+#include <dxva2api.h>
+#include <evr.h>
+#include <mfapi.h>
+#include <mfreadwrite.h>
+#include <windows.h>
+
+#include "base/at_exit.h"
+#include "base/basictypes.h"
+#include "base/logging.h"
+#include "base/message_loop.h"
+#include "base/message_pump_win.h"
+#include "base/scoped_comptr_win.h"
+#include "base/scoped_ptr.h"
+#include "base/time.h"
+#include "gfx/gdi_util.h"
+#include "media/base/yuv_convert.h"
+#include "media/tools/mfdecoder/mfdecoder.h"
+
+namespace {
+
+const char* const kWindowClass = "Chrome_MF_Decoder";
+const char* const kWindowTitle = "MF Decoder";
+const int kWindowStyleFlags = (WS_OVERLAPPEDWINDOW | WS_VISIBLE) &
+ ~(WS_MAXIMIZEBOX | WS_THICKFRAME);
+bool g_render_to_window = false;
+
+void usage() {
+ static char* usage_msg = "Usage: mfdecoder (-s|-h) (-d|-r) input-file\n"
+ "-s: Use software decoding\n"
+ "-h: Use hardware decoding\n"
+ "\n"
+ "-d: Decode to YV12 as fast as possible, no " \
+ "rendering or color-space conversion\n"
+ "-r: Render to window\n"
+ "\n"
+ "To see this message: mfdecoder --help\n";
+ fprintf(stderr, "%s", usage_msg);
+}
+
+// Converts an ASCII string to an Unicode string. This function allocates
+// space for the returned Unicode string from the heap and it is caller's
+// responsibility to free it.
+// Returns: An equivalent Unicode string if successful, NULL otherwise.
+wchar_t* ConvertASCIIStringToUnicode(const char* source) {
+ if (source == NULL) {
+ LOG(ERROR) << "ConvertASCIIStringToUnicode: source cannot be NULL";
+ return NULL;
+ }
+ DWORD string_length = MultiByteToWideChar(CP_ACP, 0, source, -1, NULL, 0);
+ if (string_length == 0) {
+ LOG(ERROR) << "Error getting size of ansi string";
+ return NULL;
+ }
+ scoped_array<wchar_t> ret(new wchar_t[string_length]);
+ if (ret.get() == NULL) {
+ LOG(ERROR) << "Error allocating unicode string buffer";
+ return NULL;
+ }
+ if (MultiByteToWideChar(CP_ACP, 0, source, string_length, ret.get(),
+ string_length) == 0) {
+ LOG(ERROR) << "Error converting ansi string to unicode";
+ return NULL;
+ }
+ return ret.release();
+}
+
+// Converts the given raw data buffer into RGB32 format, and drawing the result
+// into the given window. This is only used when DXVA2 is not enabled.
+// Returns: true on success.
+bool ConvertToRGBAndDrawToWindow(HWND video_window, uint8* data, int width,
+ int height, int stride) {
+ CHECK(video_window != NULL);
+ CHECK(data != NULL);
+ CHECK_GT(width, 0);
+ CHECK_GT(height, 0);
+ CHECK_GE(stride, width);
+ bool success = true;
+ uint8* y_start = reinterpret_cast<uint8*>(data);
+ uint8* u_start = y_start + height * stride * 5 / 4;
+ uint8* v_start = y_start + height * stride;
+ static uint8* rgb_frame = new uint8[height * stride * 4];
+ int y_stride = stride;
+ int uv_stride = stride / 2;
+ int rgb_stride = stride * 4;
+ media::ConvertYUVToRGB32(y_start, u_start, v_start, rgb_frame,
+ width, height, y_stride, uv_stride,
+ rgb_stride, media::YV12);
+
+ PAINTSTRUCT ps;
+ HDC hdc = BeginPaint(video_window, &ps);
+ BITMAPINFOHEADER hdr;
+ hdr.biSize = sizeof(BITMAPINFOHEADER);
+ hdr.biWidth = width;
+ hdr.biHeight = -height; // minus means top-down bitmap
+ hdr.biPlanes = 1;
+ hdr.biBitCount = 32;
+ hdr.biCompression = BI_RGB; // no compression
+ hdr.biSizeImage = 0;
+ hdr.biXPelsPerMeter = 1;
+ hdr.biYPelsPerMeter = 1;
+ hdr.biClrUsed = 0;
+ hdr.biClrImportant = 0;
+ int rv = StretchDIBits(hdc, 0, 0, width, height, 0, 0, width, height,
+ rgb_frame, reinterpret_cast<BITMAPINFO*>(&hdr),
+ DIB_RGB_COLORS, SRCCOPY);
+ if (rv == 0) {
+ LOG(ERROR) << "StretchDIBits failed";
+ success = false;
+ }
+ EndPaint(video_window, &ps);
+
+ return success;
+}
+
+// Obtains the underlying raw data buffer for the given IMFMediaBuffer, and
+// calls ConvertToRGBAndDrawToWindow() with it.
+// Returns: true on success.
+bool PaintMediaBufferOntoWindow(HWND video_window, IMFMediaBuffer* video_buffer,
+ int width, int height, int stride) {
+ CHECK(video_buffer != NULL);
+ HRESULT hr;
+ BYTE* data;
+ DWORD buffer_length;
+ DWORD data_length;
+ hr = video_buffer->Lock(&data, &buffer_length, &data_length);
+ if (FAILED(hr)) {
+ LOG(ERROR) << "Failed to lock IMFMediaBuffer";
+ return false;
+ }
+ if (g_render_to_window) {
+ if (!ConvertToRGBAndDrawToWindow(video_window,
+ reinterpret_cast<uint8*>(data),
+ width,
+ height,
+ stride)) {
+ LOG(ERROR) << "Failed to convert raw buffer to RGB and draw to window";
+ video_buffer->Unlock();
+ return false;
+ }
+ }
+ video_buffer->Unlock();
+ return true;
+}
+
+// Obtains the D3D9 surface from the given IMFMediaBuffer, then calls methods
+// in the D3D device to draw to the window associated with it.
+// Returns: true on success.
+bool PaintD3D9BufferOntoWindow(IDirect3DDevice9* device,
+ IMFMediaBuffer* video_buffer) {
+ CHECK(device != NULL);
+ ScopedComPtr<IDirect3DSurface9> surface;
+ HRESULT hr = MFGetService(video_buffer, MR_BUFFER_SERVICE,
+ IID_PPV_ARGS(surface.Receive()));
+ if (FAILED(hr)) {
+ LOG(ERROR) << "Failed to get D3D9 surface from buffer";
+ return false;
+ }
+ if (g_render_to_window) {
+ hr = device->Clear(0, NULL, D3DCLEAR_TARGET, D3DCOLOR_XRGB(0, 0, 255),
+ 1.0f, 0);
+ if (FAILED(hr)) {
+ LOG(ERROR) << "Device->Clear() failed";
+ return false;
+ }
+ ScopedComPtr<IDirect3DSurface9> backbuffer;
+ hr = device->GetBackBuffer(0, 0, D3DBACKBUFFER_TYPE_MONO,
+ backbuffer.Receive());
+ if (FAILED(hr)) {
+ LOG(ERROR) << "Device->GetBackBuffer() failed";
+ return false;
+ }
+ hr = device->StretchRect(surface.get(), NULL, backbuffer.get(), NULL,
+ D3DTEXF_NONE);
+ if (FAILED(hr)) {
+ LOG(ERROR) << "Device->StretchRect() failed";
+ return false;
+ }
+ hr = device->Present(NULL, NULL, NULL, NULL);
+ if (FAILED(hr)) {
+ static int frames_dropped = 0;
+ LOG(ERROR) << "Device->Present() failed "
+ << std::hex << std::showbase << hr;
+ if (++frames_dropped == 10) {
+ LOG(ERROR) << "Dropped too many frames, quitting";
+ MessageLoopForUI::current()->Quit();
+ return false;
+ }
+ }
+ }
+ return true;
+}
+
+// Reads a sample from the given decoder, and draws the sample to the given
+// window. Obtains the IMFMediaBuffer objects from the given IMFSample, and
+// calls either PaintMediaBufferOntoWindow() or PaintD3D9BufferOntoWindow() with
+// each of them, depending on whether the decoder supports DXVA2.
+// The decoder should be initialized before calling this method.
+// For H.264 format, there should only be 1 buffer per sample, so each buffer
+// represents 1 frame.
+// Returns: true if successful.
+bool DrawVideoSample(HWND video_window, media::MFDecoder* decoder,
+ IDirect3DDevice9* device) {
+ CHECK(video_window != NULL);
+ CHECK(decoder != NULL);
+ CHECK(decoder->initialized());
+
+ if (decoder->end_of_stream()) {
+ LOG(ERROR) << "Failed to obtain more samples from decoder because end of "
+ << "stream has been reached";
+ return false;
+ }
+ ScopedComPtr<IMFSample> video_sample;
+ video_sample.Attach(decoder->ReadVideoSample());
+ if (video_sample.get() == NULL) {
+ LOG(ERROR) << "Failed to obtain a sample from decoder: end of stream? "
+ << (decoder->end_of_stream() ? "true" : "false");
+ return false;
+ }
+
+ // Get the buffer inside the sample.
+ DWORD buffer_count;
+ HRESULT hr = video_sample->GetBufferCount(&buffer_count);
+ if (FAILED(hr)) {
+ LOG(ERROR) << "Failed to get buffer count from sample";
+ return false;
+ }
+
+ // For H.264 videos, the number of buffers in the sample is 1.
+ CHECK_EQ(static_cast<int>(buffer_count), 1) << "buffer_count should be equal "
+ << "to 1 for H.264 format";
+ ScopedComPtr<IMFMediaBuffer> video_buffer;
+ hr = video_sample->GetBufferByIndex(0, video_buffer.Receive());
+ if (FAILED(hr)) {
+ LOG(ERROR) << "Failed to get buffer from sample";
+ return false;
+ }
+ if (decoder->use_dxva2()) {
+ return PaintD3D9BufferOntoWindow(device, video_buffer);
+ } else {
+ return PaintMediaBufferOntoWindow(video_window, video_buffer,
+ decoder->width(), decoder->height(),
+ decoder->mfbuffer_stride());
+ }
+}
+
+// Creates a window with the given width and height.
+// Returns: A handle to the window on success, NULL otherwise.
+HWND CreateDrawWindow(int width, int height) {
+ WNDCLASS window_class = {0};
+ window_class.lpszClassName = kWindowClass;
+ window_class.hInstance = NULL;
+ window_class.hbrBackground = GetSysColorBrush(COLOR_3DFACE);
+ window_class.lpfnWndProc = DefWindowProc;
+ window_class.hCursor = LoadCursor(0, IDC_ARROW);
+
+ if (RegisterClass(&window_class) == 0) {
+ LOG(ERROR) << "Failed to register window class";
+ return false;
+ }
+ HWND window = CreateWindow(kWindowClass,
+ kWindowTitle,
+ kWindowStyleFlags,
+ 100,
+ 100,
+ width+100,
+ height+30,
+ NULL,
+ NULL,
+ NULL,
+ NULL);
+ if (window == NULL) {
+ LOG(ERROR) << "Failed to create window";
+ return NULL;
+ }
+ return window;
+}
+
+// This function creates a D3D Device and a D3D Device Manager, sets the manager
+// to use the device, and returns the manager. It also initializes the D3D
+// device. This function is used by mfdecoder.cc during the call to
+// MFDecoder::GetDXVA2AttributesForSourceReader().
+// Returns: The D3D manager object if successful. Otherwise, NULL is returned.
+IDirect3DDeviceManager9* CreateD3DDevManager(HWND video_window,
+ IDirect3DDevice9** device) {
+ CHECK(video_window != NULL);
+ CHECK(device != NULL);
+ int ret = -1;
+
+ ScopedComPtr<IDirect3DDeviceManager9> dev_manager;
+ ScopedComPtr<IDirect3D9> d3d;
+ d3d.Attach(Direct3DCreate9(D3D_SDK_VERSION));
+ if (d3d == NULL) {
+ LOG(ERROR) << "Failed to create D3D9";
+ return NULL;
+ }
+ D3DPRESENT_PARAMETERS present_params = {0};
+
+ // Not sure if these values are correct, or if
+ // they even matter. (taken from DXVA_HD sample code)
+ present_params.BackBufferWidth = 0;
+ present_params.BackBufferHeight = 0;
+ present_params.BackBufferFormat = D3DFMT_X8R8G8B8;
+ present_params.BackBufferCount = 1;
+ present_params.SwapEffect = D3DSWAPEFFECT_DISCARD;
+ present_params.hDeviceWindow = video_window;
+ present_params.Windowed = TRUE;
+ present_params.Flags = D3DPRESENTFLAG_VIDEO;
+ present_params.FullScreen_RefreshRateInHz = 0;
+ present_params.PresentationInterval = 0;
+
+ ScopedComPtr<IDirect3DDevice9> temp_device;
+ // D3DCREATE_HARDWARE_VERTEXPROCESSING specifies hardware vertex processing.
+ HRESULT hr = d3d->CreateDevice(D3DADAPTER_DEFAULT,
+ D3DDEVTYPE_HAL,
+ video_window,
+ D3DCREATE_HARDWARE_VERTEXPROCESSING,
+ &present_params,
+ temp_device.Receive());
+ if (FAILED(hr)) {
+ LOG(ERROR) << "Failed to create D3D Device";
+ return NULL;
+ }
+ UINT dev_manager_reset_token = 0;
+ hr = DXVA2CreateDirect3DDeviceManager9(&dev_manager_reset_token,
+ dev_manager.Receive());
+ if (FAILED(hr)) {
+ LOG(ERROR) << "Couldn't create D3D Device manager";
+ return NULL;
+ }
+ hr = dev_manager->ResetDevice(temp_device.get(), dev_manager_reset_token);
+ if (FAILED(hr)) {
+ LOG(ERROR) << "Failed to set device to device manager";
+ return NULL;
+ }
+ *device = temp_device.Detach();
+ return dev_manager.Detach();
+}
+
+// Resets the D3D device to prevent scaling from happening because it was
+// created with window before resizing occurred. We need to change the back
+// buffer dimensions to the actual video frame dimensions.
+// Both the decoder and device should be initialized before calling this method.
+// Returns: true if successful.
+bool AdjustD3DDeviceBackBufferDimensions(media::MFDecoder* decoder,
+ IDirect3DDevice9* device,
+ HWND video_window) {
+ CHECK(decoder != NULL);
+ CHECK(decoder->initialized());
+ CHECK(decoder->use_dxva2());
+ CHECK(device != NULL);
+ D3DPRESENT_PARAMETERS present_params = {0};
+ memset(&present_params, 0, sizeof(present_params));
+ present_params.BackBufferWidth = decoder->width();
+ present_params.BackBufferHeight = decoder->height();
+ present_params.BackBufferFormat = D3DFMT_X8R8G8B8;
+ present_params.BackBufferCount = 1;
+ present_params.SwapEffect = D3DSWAPEFFECT_DISCARD;
+ present_params.hDeviceWindow = video_window;
+ present_params.Windowed = TRUE;
+ present_params.Flags = D3DPRESENTFLAG_VIDEO;
+ present_params.FullScreen_RefreshRateInHz = 0;
+ present_params.PresentationInterval = 0;
+
+ return SUCCEEDED(device->Reset(&present_params)) ? true : false;
+}
+
+// Post this task in the MessageLoop if DXVA2 is enabled.
+void RepaintD3D9(media::MFDecoder* decoder, HWND video_window,
+ IDirect3DDevice9* device) {
+ // If we are using D3D9, then we do not send a WM_PAINT message since the two
+ // do not work well together.
+ if (!DrawVideoSample(video_window, decoder, device)) {
+ LOG(ERROR) << "DrawVideoSample failed, quitting MessageLoop";
+ MessageLoopForUI::current()->Quit();
+ } else {
+ if (g_render_to_window) {
+ MessageLoopForUI::current()->PostDelayedTask(
+ FROM_HERE,
+ NewRunnableFunction(&RepaintD3D9, decoder, video_window, device), 30);
+ } else {
+ MessageLoopForUI::current()->PostTask(
+ FROM_HERE,
+ NewRunnableFunction(&RepaintD3D9, decoder, video_window, device));
+ }
+ }
+}
+
+// Post this task in the MessageLoop if DXVA2 is NOT enabled.
+void RepaintGdi(media::MFDecoder* decoder, HWND video_window) {
+ // This sends a WM_PAINT message so we can paint on the window later.
+ InvalidateRect(video_window, NULL, TRUE);
+
+ // We do not have a D3D device if we did not enable DXVA2, so NULL is passed
+ // in.
+ if (!DrawVideoSample(video_window, decoder, NULL)) {
+ LOG(ERROR) << "DrawVideoSample failed, quitting MessageLoop";
+ MessageLoopForUI::current()->Quit();
+ } else {
+ if (g_render_to_window) {
+ MessageLoopForUI::current()->PostDelayedTask(
+ FROM_HERE,
+ NewRunnableFunction(&RepaintGdi, decoder, video_window), 30);
+ } else {
+ MessageLoopForUI::current()->PostTask(
+ FROM_HERE,
+ NewRunnableFunction(&RepaintGdi, decoder, video_window));
+ }
+ }
+}
+
+// Implementation of Observer for MessageLoopForUI.
+class WindowObserver : public MessageLoopForUI::Observer {
+ public:
+ virtual void WillProcessMessage(const MSG& msg) {}
+
+ virtual void DidProcessMessage(const MSG& msg) {}
+};
+
+} // namespace
+
+int main(int argc, char** argv) {
+ if (argc < 2) {
+ fprintf(stderr, "missing arguments\n");
+ usage();
+ return -1;
+ }
+ if (strcmp(argv[1], "--help") == 0) {
+ usage();
+ return 0;
+ }
+ if (argc != 4) {
+ fprintf(stderr, "invalid number of arguments\n");
+ usage();
+ return -1;
+ }
+
+ bool use_dxva2 = false;
+ if (strcmp(argv[1], "-s") == 0) {
+ use_dxva2 = false;
+ } else if (strcmp(argv[1], "-h") == 0) {
+ use_dxva2 = true;
+ } else {
+ fprintf(stderr, "unknown option %s\n", argv[1]);
+ usage();
+ return -1;
+ }
+ LOG(INFO) << "use_dxva2: " << use_dxva2;
+
+ g_render_to_window = false;
+ if (strcmp(argv[2], "-d") == 0) {
+ g_render_to_window = false;
+ } else if (strcmp(argv[2], "-r") == 0) {
+ g_render_to_window = true;
+ } else {
+ fprintf(stderr, "unknown option %s\n", argv[2]);
+ usage();
+ return -1;
+ }
+ LOG(INFO) << "g_render_to_window: " << g_render_to_window;
+
+ scoped_array<wchar_t> file_name(ConvertASCIIStringToUnicode(argv[argc-1]));
+ if (file_name.get() == NULL) {
+ LOG(ERROR) << "Failed to convert file name to unicode";
+ return -1;
+ }
+
+ // Once we initialized the decoder, we should resize the window to frame size.
+ // For now, just create a window with arbitrary dimensions.
+ HWND video_window = CreateDrawWindow(640, 480);
+ if (video_window == NULL) {
+ LOG(ERROR) << "main: Failed to create the video window";
+ return -1;
+ }
+ scoped_ptr<media::MFDecoder> decoder(new media::MFDecoder(use_dxva2));
+ if (decoder == NULL) {
+ LOG(ERROR) << "Failed to create decoder";
+ return -1;
+ }
+ ScopedComPtr<IDirect3DDeviceManager9> dev_manager;
+ ScopedComPtr<IDirect3DDevice9> device;
+ if (decoder->use_dxva2()) {
+ dev_manager.Attach(CreateD3DDevManager(video_window, device.Receive()));
+ if (dev_manager.get() == NULL || device.get() == NULL) {
+ LOG(ERROR) << "DXVA2 specified, but failed to create D3D device";
+ return -1;
+ }
+ }
+ if (!decoder->Init(file_name.get(), dev_manager.get())) {
+ LOG(ERROR) << "main: Decoder initialization failed";
+ return -1;
+ }
+
+ // Resize the window to the dimensions of video frame.
+ RECT rect;
+ rect.left = 0;
+ rect.right = decoder->width();
+ rect.top = 0;
+ rect.bottom = decoder->height();
+ AdjustWindowRect(&rect, kWindowStyleFlags, FALSE);
+ if (!MoveWindow(video_window, 0, 0, rect.right - rect.left,
+ rect.bottom - rect.top, TRUE)) {
+ LOG(WARNING) << "Warning: Failed to resize window";
+ }
+ if (decoder->use_dxva2()) {
+ // Reset the device's back buffer dimensions to match the window's
+ // dimensions.
+ if (!AdjustD3DDeviceBackBufferDimensions(decoder.get(), device.get(),
+ video_window)) {
+ LOG(WARNING) << "Warning: Failed to reset device to have correct "
+ << "backbuffer dimension, scaling might occur";
+ }
+ }
+ base::Time start(base::Time::Now());
+ printf("Decoding started\n");
+ LOG(INFO) << "Decoding started at " << start.ToTimeT();
+
+ // MessageLoop
+ base::AtExitManager exit_manager;
+ MessageLoopForUI message_loop;
+ WindowObserver window_observer;
+ MessageLoopForUI::current()->AddObserver(&window_observer);
+ if (decoder->use_dxva2()) {
+ MessageLoopForUI::current()->PostTask(FROM_HERE,
+ NewRunnableFunction(&RepaintD3D9,
+ decoder.get(),
+ video_window,
+ device.get()));
+ } else {
+ MessageLoopForUI::current()->PostTask(FROM_HERE,
+ NewRunnableFunction(&RepaintGdi,
+ decoder.get(),
+ video_window));
+ }
+ MessageLoopForUI::current()->Run(NULL);
+
+ printf("Decoding finished\n");
+ base::Time end(base::Time::Now());
+ LOG(INFO) << "Decoding finished at " << end.ToTimeT();
+ LOG(INFO) << "Took " << (end-start).InMilliseconds() << "ms";
+
+ printf("Normal termination\n");
+ return 0;
+}
diff --git a/media/tools/mfdecoder/mfdecoder.cc b/media/tools/mfdecoder/mfdecoder.cc
new file mode 100644
index 0000000..e629a4a
--- /dev/null
+++ b/media/tools/mfdecoder/mfdecoder.cc
@@ -0,0 +1,329 @@
+// Copyright (c) 2010 The Chromium Authors. All rights reserved. Use of this
+// source code is governed by a BSD-style license that can be found in the
+// LICENSE file.
+
+#ifdef WINVER
+#undef WINVER
+#define WINVER 0x0601 // Windows 7
+#endif
+
+#include <d3d9.h>
+#include <dxva2api.h>
+#include <evr.h>
+#include <mfapi.h>
+#include <mferror.h>
+#include <mfreadwrite.h> // depends on evr.h
+#include <windows.h>
+
+#include "base/logging.h"
+#include "base/scoped_comptr_win.h"
+#include "media/tools/mfdecoder/mfdecoder.h"
+
+#pragma comment(lib, "shlwapi.lib")
+#pragma comment(lib, "strmiids.lib")
+#pragma comment(lib, "dxva2.lib")
+#pragma comment(lib, "d3d9.lib")
+#pragma comment(lib, "mfuuid.lib")
+#pragma comment(lib, "mfplat.lib")
+#pragma comment(lib, "mf.lib")
+#pragma comment(lib, "evr.lib")
+#pragma comment(lib, "mfreadwrite.lib")
+
+namespace media {
+
+MFDecoder::MFDecoder(bool use_dxva2)
+ : width_(0),
+ height_(0),
+ use_dxva2_(use_dxva2),
+ initialized_(false),
+ com_lib_initialized_(false),
+ mf_lib_initialized_(false),
+ reader_(NULL),
+ video_stream_index_(-1),
+ mfbuffer_stride_(0),
+ end_of_stream_(false) {
+}
+
+MFDecoder::~MFDecoder() {
+ if (reader_)
+ reader_->Release();
+ if (com_lib_initialized_)
+ CoUninitialize();
+ if (mf_lib_initialized_)
+ MFShutdown();
+}
+
+bool MFDecoder::Init(const wchar_t* source_url,
+ IDirect3DDeviceManager9* dev_manager) {
+ if (initialized_)
+ return true;
+ if (source_url == NULL) {
+ LOG(ERROR) << "Init: source_url cannot be NULL";
+ return false;
+ }
+ if (use_dxva2_ && dev_manager == NULL) {
+ LOG(ERROR) << "Init failed: DXVA2 specified, but no manager provided";
+ return false;
+ } else if (!use_dxva2_ && dev_manager != NULL) {
+ LOG(WARNING) << "Init: Warning: DXVA2 not specified but manager is "
+ << "provided -- the manager will be ignored";
+ dev_manager = NULL;
+ }
+ if (!InitLibraries())
+ return false;
+ if (!InitSourceReader(source_url, dev_manager))
+ return false;
+
+ // By now, |reader_| should be initialized.
+ if (!SelectVideoStreamOnly())
+ return false;
+
+ // |video_stream_index_| should be pointing to the video stream now.
+ if (!InitVideoInfo(dev_manager))
+ return false;
+
+ initialized_ = true;
+ return true;
+}
+
+IMFSample* MFDecoder::ReadVideoSample() {
+ CHECK(reader_ != NULL);
+ CHECK_GE(video_stream_index_, 0);
+ ScopedComPtr<IMFSample> video_sample;
+ DWORD actual_stream_index;
+ DWORD output_flags;
+
+ // TODO(imcheng): Get timestamp back instead by passing in a timestamp pointer
+ // instead of NULL.
+ // TODO(imcheng): Read samples asynchronously and use callbacks.
+ HRESULT hr = reader_->ReadSample(video_stream_index_,
+ 0, // No flags.
+ &actual_stream_index,
+ &output_flags,
+ NULL,
+ video_sample.Receive());
+ if (FAILED(hr)) {
+ LOG(ERROR) << "Failed to read video sample";
+ return NULL;
+ } else {
+ if (output_flags & MF_SOURCE_READERF_ERROR) {
+ LOG(ERROR) << "output_flag error while reading video sample";
+ return NULL;
+ }
+ if (output_flags & MF_SOURCE_READERF_ENDOFSTREAM) {
+ LOG(INFO) << "Video sample reading has reached the end of stream";
+ end_of_stream_ = true;
+ return NULL;
+ }
+ if (static_cast<int>(actual_stream_index) != video_stream_index_) {
+ LOG(ERROR) << "Received sample from stream " << actual_stream_index
+ << " instead of intended video stream " << video_stream_index_;
+ return NULL;
+ }
+ if (video_sample.get() == NULL)
+ LOG(WARNING) << "Video sample is NULL and not at end of stream!";
+ return video_sample.Detach();
+ }
+}
+
+// Private methods
+
+bool MFDecoder::InitLibraries() {
+ // TODO(imcheng): Move initialization to a singleton.
+ HRESULT hr;
+ hr = CoInitializeEx(NULL, COINIT_APARTMENTTHREADED | COINIT_DISABLE_OLE1DDE);
+ if (FAILED(hr)) {
+ LOG(ERROR) << "CoInitializeEx failed during InitLibraries()";
+ return false;
+ }
+ com_lib_initialized_ = true;
+
+ hr = MFStartup(MF_VERSION, MFSTARTUP_FULL);
+ if (FAILED(hr)) {
+ LOG(ERROR) << "MFStartup failed during InitLibraries()";
+ CoUninitialize();
+ com_lib_initialized_ = false;
+ return false;
+ }
+ mf_lib_initialized_ = true;
+
+ return true;
+}
+
+bool MFDecoder::InitSourceReader(const wchar_t* source_url,
+ IDirect3DDeviceManager9* dev_manager) {
+ CHECK(source_url != NULL);
+ ScopedComPtr<IMFAttributes> reader_attributes;
+ if (use_dxva2_) {
+ reader_attributes.Attach(GetDXVA2AttributesForSourceReader(dev_manager));
+ if (reader_attributes == NULL) {
+ LOG(ERROR) << "Failed to create DXVA2 attributes for source reader";
+ return false;
+ }
+ }
+ HRESULT hr = MFCreateSourceReaderFromURL(source_url, reader_attributes.get(),
+ &reader_);
+ if (FAILED(hr)) {
+ LOG(ERROR) << "Failed to create source reader";
+ return false;
+ }
+ LOG(INFO) << "Source reader created";
+ return true;
+}
+
+IMFAttributes* MFDecoder::GetDXVA2AttributesForSourceReader(
+ IDirect3DDeviceManager9* dev_manager) {
+ if (!use_dxva2_)
+ return NULL;
+ CHECK(dev_manager != NULL);
+ ScopedComPtr<IMFAttributes> attributes;
+
+ // Create an attribute store with an initial size of 2.
+ HRESULT hr = MFCreateAttributes(attributes.Receive(), 2);
+ if (FAILED(hr)) {
+ LOG(ERROR) << "Failed to create DXVA2 attributes for source reader";
+ return NULL;
+ }
+ hr = attributes->SetUnknown(MF_SOURCE_READER_D3D_MANAGER, dev_manager);
+ if (FAILED(hr)) {
+ LOG(ERROR) << "Failed to set D3D9 manager to attribute";
+ return NULL;
+ }
+ hr = attributes->SetUINT32(MF_SOURCE_READER_DISABLE_DXVA, FALSE);
+ if (FAILED(hr)) {
+ LOG(ERROR) << "Failed to set DISABLE_DXVA to false";
+ return NULL;
+ }
+ return attributes.Detach();
+}
+
+bool MFDecoder::SelectVideoStreamOnly() {
+ CHECK(reader_ != NULL);
+ HRESULT hr;
+ for (DWORD stream_index = 0; ; stream_index++) {
+ ScopedComPtr<IMFMediaType> media_type;
+ hr = reader_->GetCurrentMediaType(stream_index, media_type.Receive());
+ if (SUCCEEDED(hr)) {
+ GUID major_type;
+ hr = media_type->GetMajorType(&major_type);
+ if (FAILED(hr)) {
+ LOG(ERROR) << "Could not determine major type for stream "
+ << stream_index;
+ return false;
+ }
+ if (major_type != MFMediaType_Video) {
+ // Deselect any non-video streams.
+ hr = reader_->SetStreamSelection(stream_index, FALSE);
+ if (FAILED(hr)) {
+ LOG(ERROR) << "Could not deselect stream " << stream_index;
+ return false;
+ }
+ } else {
+ // Ensure that the video stream is selected.
+ hr = reader_->SetStreamSelection(stream_index, TRUE);
+ if (FAILED(hr)) {
+ LOG(ERROR) << "Could not select video stream " << stream_index;
+ return false;
+ }
+ video_stream_index_ = stream_index;
+ LOG(INFO) << "Video stream is at " << video_stream_index_;
+ }
+ } else if (hr == MF_E_INVALIDSTREAMNUMBER) {
+ break; // No more streams, quit.
+ } else {
+ LOG(ERROR) << "Error occurred while getting stream " << stream_index;
+ return false;
+ }
+ } // end of for-loop
+ return video_stream_index_ >= 0;
+}
+
+bool MFDecoder::InitVideoInfo(IDirect3DDeviceManager9* dev_manager) {
+ CHECK(reader_ != NULL);
+ CHECK_GE(video_stream_index_, 0);
+ ScopedComPtr<IMFMediaType> video_type;
+ HRESULT hr = reader_->GetCurrentMediaType(video_stream_index_,
+ video_type.Receive());
+ if (FAILED(hr)) {
+ LOG(ERROR) << "InitVideoInfo: Failed to get video stream";
+ return false;
+ }
+ GUID video_subtype;
+ hr = video_type->GetGUID(MF_MT_SUBTYPE, &video_subtype);
+ if (FAILED(hr)) {
+ LOG(ERROR) << "Failed to determine video subtype";
+ return false;
+ } else {
+ if (video_subtype == MFVideoFormat_H264) {
+ LOG(INFO) << "Video subtype is H.264";
+ } else {
+ LOG(INFO) << "Video subtype is NOT H.264";
+ }
+ }
+ hr = MFGetAttributeSize(video_type, MF_MT_FRAME_SIZE,
+ reinterpret_cast<UINT32*>(&width_),
+ reinterpret_cast<UINT32*>(&height_));
+ if (FAILED(hr)) {
+ LOG(ERROR) << "Failed to determine frame size";
+ return false;
+ } else {
+ LOG(INFO) << "Video width: " << width_ << ", height: " << height_;
+ }
+
+ // Try to change to YV12 output format.
+ const GUID kOutputVideoSubtype = MFVideoFormat_YV12;
+ ScopedComPtr<IMFMediaType> output_video_format;
+ hr = MFCreateMediaType(output_video_format.Receive());
+ if (FAILED(hr)) {
+ LOG(ERROR) << "Failed to create a IMFMediaType object for video output";
+ return false;
+ }
+ if (SUCCEEDED(hr))
+ hr = output_video_format->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
+ if (SUCCEEDED(hr))
+ hr = output_video_format->SetGUID(MF_MT_SUBTYPE, kOutputVideoSubtype);
+ if (SUCCEEDED(hr)) {
+ hr = MFSetAttributeSize(output_video_format, MF_MT_FRAME_SIZE, width_,
+ height_);
+ }
+ if (SUCCEEDED(hr)) {
+ hr = reader_->SetCurrentMediaType(video_stream_index_,
+ NULL, // Reserved.
+ output_video_format);
+ }
+ if (SUCCEEDED(hr)) {
+ hr = MFGetStrideForBitmapInfoHeader(
+ kOutputVideoSubtype.Data1,
+ width_,
+ reinterpret_cast<LONG*>(&mfbuffer_stride_));
+ }
+ if (FAILED(hr)) {
+ LOG(ERROR) << "Failed to change output video format and determine stride";
+ return false;
+ } else {
+ LOG(INFO) << "IMFMediaBuffer stride: " << mfbuffer_stride_;
+ }
+
+ // Send a message to the decoder to tell it to use DXVA2.
+ if (use_dxva2_) {
+ // Call GetServiceForStream to get the interface to the video decoder.
+ ScopedComPtr<IMFTransform> video_decoder;
+ hr = reader_->GetServiceForStream(video_stream_index_, GUID_NULL,
+ IID_PPV_ARGS(video_decoder.Receive()));
+ if (FAILED(hr)) {
+ LOG(ERROR) << "Failed to obtain interface to decoder";
+ return false;
+ } else {
+ hr = video_decoder->ProcessMessage(
+ MFT_MESSAGE_SET_D3D_MANAGER,
+ reinterpret_cast<ULONG_PTR>(dev_manager));
+ if (FAILED(hr)) {
+ LOG(ERROR) << "Failed to send DXVA message to decoder";
+ return false;
+ }
+ }
+ }
+ return true;
+}
+
+} // namespace media
diff --git a/media/tools/mfdecoder/mfdecoder.h b/media/tools/mfdecoder/mfdecoder.h
new file mode 100644
index 0000000..e280f9c
--- /dev/null
+++ b/media/tools/mfdecoder/mfdecoder.h
@@ -0,0 +1,89 @@
+// Copyright (c) 2010 The Chromium Authors. All rights reserved. Use of this
+// source code is governed by a BSD-style license that can be found in the
+// LICENSE file.
+
+#ifndef MEDIA_TOOLS_MFDECODER_MFDECODER_H_
+#define MEDIA_TOOLS_MFDECODER_MFDECODER_H_
+
+#include "base/basictypes.h"
+
+struct IDirect3DDeviceManager9;
+struct IMFAttributes;
+struct IMFSample;
+struct IMFSourceReader;
+
+namespace media {
+
+class MFDecoder {
+ public:
+ explicit MFDecoder(bool use_dxva2);
+ ~MFDecoder();
+
+ // This method is to be called after the constructor. This method
+ // creates a source reader with the given URL, and initializes the member
+ // variables that are related to the video, such as the dimensions of the
+ // video, stride, index of video stream, etc.
+ // If DXVA2 was specified in the constructor, then the given device manager
+ // is passed into the source reader so that it can do hardware accelerated
+ // decoding.
+ // Returns: true on success.
+ bool Init(const wchar_t* source_url, IDirect3DDeviceManager9* dev_manager);
+ int width() const { return width_; }
+ int height() const { return height_; }
+ bool use_dxva2() const { return use_dxva2_; }
+ bool initialized() const { return initialized_; }
+ int mfbuffer_stride() const { return mfbuffer_stride_; }
+ bool end_of_stream() const { return end_of_stream_; }
+
+ // Reads a single video sample. If end of stream is reached, |end_of_stream_|
+ // will be set to true.
+ // Returns: Pointer to a IMFSample on success, NULL otherwise. Caller is
+ // responsible for releasing the sample.
+ IMFSample* ReadVideoSample();
+
+ private:
+ // Initializes the COM and MF libraries for this decoder. The two libraries
+ // are either both initialized, or both uninitialized.
+ // Returns: true if both libraries were successfully initialized.
+ bool InitLibraries();
+
+ // Initializes the source reader with the given URL, and device manager if
+ // DXVA2 is enabled on the decoder.
+ // Returns: true on success.
+ bool InitSourceReader(const wchar_t* source_url,
+ IDirect3DDeviceManager9* dev_manager);
+
+ // Called by InitSourceReader() if DXVA2 is to be used. Creates an attribute
+ // store that can be passed to the source reader constructor.
+ // Caller is responsible for releasing the attribute object.
+ // Returns: pointer to an IMFAttributes object if successful, NULL otherwise.
+ IMFAttributes* GetDXVA2AttributesForSourceReader(
+ IDirect3DDeviceManager9* dev_manager);
+
+ // Deselects any non-video streams, ensures the video stream is selected, and
+ // initializes |video_stream_index_| to that video stream.
+ // Returns: true on success.
+ bool SelectVideoStreamOnly();
+
+ // Obtains information about the video (height, width, etc.) and sets the
+ // output format to YV12.
+ // Returns: true on success.
+ bool InitVideoInfo(IDirect3DDeviceManager9* dev_manager);
+
+ int width_;
+ int height_;
+ bool use_dxva2_;
+ bool initialized_;
+ bool com_lib_initialized_;
+ bool mf_lib_initialized_;
+ IMFSourceReader* reader_;
+ int video_stream_index_;
+ int mfbuffer_stride_;
+ bool end_of_stream_;
+
+ DISALLOW_COPY_AND_ASSIGN(MFDecoder);
+};
+
+} // namespace media
+
+#endif // MEDIA_TOOLS_MFDECODER_MFDECODER_H_