diff options
author | ananta <ananta@chromium.org> | 2015-02-27 16:39:18 -0800 |
---|---|---|
committer | Commit bot <commit-bot@chromium.org> | 2015-02-28 00:40:00 +0000 |
commit | 3b01db9b22725d4e7d6422d8dadf48dce8ac4794 (patch) | |
tree | 0282a9b6f8a584158667d7e5ab7714eb1c98a8fc | |
parent | a0c036305b748e1617e56c624cb0f4c49c24e6e0 (diff) | |
download | chromium_src-3b01db9b22725d4e7d6422d8dadf48dce8ac4794.zip chromium_src-3b01db9b22725d4e7d6422d8dadf48dce8ac4794.tar.gz chromium_src-3b01db9b22725d4e7d6422d8dadf48dce8ac4794.tar.bz2 |
Add support for DX11 based H/W video decoding on Windows 8+
This is only available on Windows 8+ because the media foundation API which exposes the device
manager to be pased to the decoder MFCreateDXGIDeviceManager only exists on Windows 8+.
Changes in this patch are mostly around using DX11 or D3D wherever needed. These are as below:-
1. In the initialization code path where we use D3D or DX11 based on whether we are on Windows 8+
and ANGLE and the decoder say they support DX11.
2. The output frame processing code where we extract the DX11 texture and copy it out to ANGLE. One change
here is that DX11 does not provide an automatic way for format conversion for textures. The decoder outputs
YUV12 textures and ANGLE expects RGB. We can achieve this by setting a shader for conversion. That seemed
like too much work. Thankfully there is a video processor media foundation transform on Windows which
does the conversion for us on the GPU. We use this object to convert the output frame and copy it out to
ANGLE.
3. We pass the GL context to the decoder to enable us to query ANGLE to see if it is using D3D or DX11.
BUG=456418
Review URL: https://codereview.chromium.org/922003002
Cr-Commit-Position: refs/heads/master@{#318560}
-rw-r--r-- | content/common/BUILD.gn | 2 | ||||
-rw-r--r-- | content/common/gpu/media/dxva_video_decode_accelerator.cc | 774 | ||||
-rw-r--r-- | content/common/gpu/media/dxva_video_decode_accelerator.h | 65 | ||||
-rw-r--r-- | content/common/gpu/media/gpu_video_decode_accelerator.cc | 3 | ||||
-rw-r--r-- | content/common/gpu/media/video_decode_accelerator_unittest.cc | 4 | ||||
-rw-r--r-- | content/content_common.gypi | 2 |
6 files changed, 716 insertions, 134 deletions
diff --git a/content/common/BUILD.gn b/content/common/BUILD.gn index fc7c5f8e..ab9e9a6 100644 --- a/content/common/BUILD.gn +++ b/content/common/BUILD.gn @@ -410,6 +410,7 @@ source_set("common") { deps += [ "//ui/gl" ] libs += [ "d3d9.lib", + "d3d11.lib", "dxva2.lib", "strmiids.lib", "mf.lib", @@ -418,6 +419,7 @@ source_set("common") { ] ldflags += [ "/DELAYLOAD:d3d9.dll", + "/DELAYLOAD:d3d11.dll", "/DELAYLOAD:dxva2.dll", "/DELAYLOAD:mf.dll", "/DELAYLOAD:mfplat.dll", diff --git a/content/common/gpu/media/dxva_video_decode_accelerator.cc b/content/common/gpu/media/dxva_video_decode_accelerator.cc index fb51f59..be50634 100644 --- a/content/common/gpu/media/dxva_video_decode_accelerator.cc +++ b/content/common/gpu/media/dxva_video_decode_accelerator.cc @@ -10,6 +10,7 @@ #include <ks.h> #include <codecapi.h> +#include <dxgi1_2.h> #include <mfapi.h> #include <mferror.h> #include <wmcodecdsp.h> @@ -18,6 +19,7 @@ #include "base/bind.h" #include "base/callback.h" #include "base/command_line.h" +#include "base/debug/alias.h" #include "base/file_version_info.h" #include "base/files/file_path.h" #include "base/logging.h" @@ -29,6 +31,7 @@ #include "base/win/windows_version.h" #include "media/video/video_decode_accelerator.h" #include "ui/gl/gl_bindings.h" +#include "ui/gl/gl_context.h" #include "ui/gl/gl_surface_egl.h" #include "ui/gl/gl_switches.h" @@ -83,10 +86,26 @@ const CLSID MEDIASUBTYPE_VP90 = { { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 } }; +// The CLSID of the video processor media foundation transform which we use for +// texture color conversion in DX11. +DEFINE_GUID(CLSID_VideoProcessorMFT, + 0x88753b26, 0x5b24, 0x49bd, 0xb2, 0xe7, 0xc, 0x44, 0x5c, 0x78, + 0xc9, 0x82); + +// MF_XVP_PLAYBACK_MODE +// Data type: UINT32 (treat as BOOL) +// If this attribute is TRUE, the video processor will run in playback mode +// where it allows callers to allocate output samples and allows last frame +// regeneration (repaint). +DEFINE_GUID(MF_XVP_PLAYBACK_MODE, 0x3c5d293f, 0xad67, 0x4e29, 0xaf, 0x12, + 0xcf, 0x3e, 0x23, 0x8a, 0xcc, 0xe9); } namespace content { +CreateDXGIDeviceManager DXVAVideoDecodeAccelerator::create_dxgi_device_manager_ + = NULL; + #define RETURN_ON_FAILURE(result, log, ret) \ do { \ if (!(result)) { \ @@ -158,6 +177,7 @@ static IMFSample* CreateEmptySampleWithBuffer(int buffer_length, int align) { hr = sample->AddBuffer(buffer.get()); RETURN_ON_HR_FAILURE(hr, "Failed to add buffer to sample", NULL); + buffer->SetCurrentLength(0); return sample.Detach(); } @@ -229,6 +249,7 @@ struct DXVAVideoDecodeAccelerator::DXVAPictureBuffer { bool CopyOutputSampleDataToPictureBuffer( DXVAVideoDecodeAccelerator* decoder, IDirect3DSurface9* dest_surface, + ID3D11Texture2D* dx11_texture, int input_buffer_id); bool available() const { @@ -259,6 +280,7 @@ struct DXVAVideoDecodeAccelerator::DXVAPictureBuffer { media::PictureBuffer picture_buffer_; EGLSurface decoding_surface_; base::win::ScopedComPtr<IDirect3DTexture9> decoding_texture_; + base::win::ScopedComPtr<ID3D11Texture2D> dx11_decoding_texture_; // The following |IDirect3DSurface9| interface pointers are used to hold // references on the surfaces during the course of a StretchRect operation @@ -267,6 +289,11 @@ struct DXVAVideoDecodeAccelerator::DXVAPictureBuffer { base::win::ScopedComPtr<IDirect3DSurface9> decoder_surface_; base::win::ScopedComPtr<IDirect3DSurface9> target_surface_; + // This ID3D11Texture2D interface pointer is used to hold a reference to the + // decoder texture during the course of a copy operation. This reference is + // released when the copy completes. + base::win::ScopedComPtr<ID3D11Texture2D> decoder_dx11_texture_; + // Set to true if RGB is supported by the texture. // Defaults to true. bool use_rgb_; @@ -315,17 +342,27 @@ DXVAVideoDecodeAccelerator::DXVAPictureBuffer::Create( "Failed to query ANGLE surface pointer", linked_ptr<DXVAPictureBuffer>(NULL)); - // TODO(dshwang): after moving to D3D11, use RGBA surface. crbug.com/438691 - HRESULT hr = decoder.device_->CreateTexture( - buffer.size().width(), - buffer.size().height(), - 1, - D3DUSAGE_RENDERTARGET, - use_rgb ? D3DFMT_X8R8G8B8 : D3DFMT_A8R8G8B8, - D3DPOOL_DEFAULT, - picture_buffer->decoding_texture_.Receive(), - &share_handle); - + HRESULT hr = E_FAIL; + if (decoder.d3d11_device_) { + base::win::ScopedComPtr<ID3D11Resource> resource; + hr = decoder.d3d11_device_->OpenSharedResource( + share_handle, + __uuidof(ID3D11Resource), + reinterpret_cast<void**>(resource.Receive())); + RETURN_ON_HR_FAILURE(hr, "Failed to open shared resource", + linked_ptr<DXVAPictureBuffer>(NULL)); + hr = picture_buffer->dx11_decoding_texture_.QueryFrom(resource.get()); + } else { + hr = decoder.d3d9_device_ex_->CreateTexture( + buffer.size().width(), + buffer.size().height(), + 1, + D3DUSAGE_RENDERTARGET, + use_rgb ? D3DFMT_X8R8G8B8 : D3DFMT_A8R8G8B8, + D3DPOOL_DEFAULT, + picture_buffer->decoding_texture_.Receive(), + &share_handle); + } RETURN_ON_HR_FAILURE(hr, "Failed to create texture", linked_ptr<DXVAPictureBuffer>(NULL)); picture_buffer->use_rgb_ = !!use_rgb; @@ -365,6 +402,7 @@ void DXVAVideoDecodeAccelerator::DXVAPictureBuffer::ReusePictureBuffer() { EGL_BACK_BUFFER); decoder_surface_.Release(); target_surface_.Release(); + decoder_dx11_texture_.Release(); set_available(true); } @@ -372,9 +410,18 @@ bool DXVAVideoDecodeAccelerator::DXVAPictureBuffer:: CopyOutputSampleDataToPictureBuffer( DXVAVideoDecodeAccelerator* decoder, IDirect3DSurface9* dest_surface, + ID3D11Texture2D* dx11_texture, int input_buffer_id) { - DCHECK(dest_surface); - + DCHECK(dest_surface || dx11_texture); + if (dx11_texture) { + // Grab a reference on the decoder texture. This reference will be released + // when we receive a notification that the copy was completed or when the + // DXVAPictureBuffer instance is destroyed. + decoder_dx11_texture_ = dx11_texture; + decoder->CopyTexture(dx11_texture, dx11_decoding_texture_.get(), NULL, + id(), input_buffer_id); + return true; + } D3DSURFACE_DESC surface_desc; HRESULT hr = dest_surface->GetDesc(&surface_desc); RETURN_ON_HR_FAILURE(hr, "Failed to get surface description", false); @@ -424,11 +471,15 @@ void DXVAVideoDecodeAccelerator::DXVAPictureBuffer::CopySurfaceComplete( glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); - DCHECK_EQ(src_surface, decoder_surface_.get()); - DCHECK_EQ(dest_surface, target_surface_.get()); - - decoder_surface_.Release(); - target_surface_.Release(); + if (src_surface && dest_surface) { + DCHECK_EQ(src_surface, decoder_surface_.get()); + DCHECK_EQ(dest_surface, target_surface_.get()); + decoder_surface_.Release(); + target_surface_.Release(); + } else { + DCHECK(decoder_dx11_texture_.get()); + decoder_dx11_texture_.Release(); + } EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay(); eglBindTexImage( @@ -449,57 +500,12 @@ DXVAVideoDecodeAccelerator::PendingSampleInfo::PendingSampleInfo( DXVAVideoDecodeAccelerator::PendingSampleInfo::~PendingSampleInfo() {} -// static -bool DXVAVideoDecodeAccelerator::CreateD3DDevManager() { - TRACE_EVENT0("gpu", "DXVAVideoDecodeAccelerator_CreateD3DDevManager"); - - HRESULT hr = Direct3DCreate9Ex(D3D_SDK_VERSION, d3d9_.Receive()); - RETURN_ON_HR_FAILURE(hr, "Direct3DCreate9Ex failed", false); - - D3DPRESENT_PARAMETERS present_params = {0}; - present_params.BackBufferWidth = 1; - present_params.BackBufferHeight = 1; - present_params.BackBufferFormat = D3DFMT_UNKNOWN; - present_params.BackBufferCount = 1; - present_params.SwapEffect = D3DSWAPEFFECT_DISCARD; - present_params.hDeviceWindow = ::GetShellWindow(); - present_params.Windowed = TRUE; - present_params.Flags = D3DPRESENTFLAG_VIDEO; - present_params.FullScreen_RefreshRateInHz = 0; - present_params.PresentationInterval = 0; - - hr = d3d9_->CreateDeviceEx(D3DADAPTER_DEFAULT, - D3DDEVTYPE_HAL, - ::GetShellWindow(), - D3DCREATE_FPU_PRESERVE | - D3DCREATE_SOFTWARE_VERTEXPROCESSING | - D3DCREATE_DISABLE_PSGP_THREADING | - D3DCREATE_MULTITHREADED, - &present_params, - NULL, - device_.Receive()); - RETURN_ON_HR_FAILURE(hr, "Failed to create D3D device", false); - - hr = DXVA2CreateDirect3DDeviceManager9(&dev_manager_reset_token_, - device_manager_.Receive()); - RETURN_ON_HR_FAILURE(hr, "DXVA2CreateDirect3DDeviceManager9 failed", false); - - hr = device_manager_->ResetDevice(device_.get(), dev_manager_reset_token_); - RETURN_ON_HR_FAILURE(hr, "Failed to reset device", false); - - hr = device_->CreateQuery(D3DQUERYTYPE_EVENT, query_.Receive()); - RETURN_ON_HR_FAILURE(hr, "Failed to create D3D device query", false); - // Ensure query_ API works (to avoid an infinite loop later in - // CopyOutputSampleDataToPictureBuffer). - hr = query_->Issue(D3DISSUE_END); - RETURN_ON_HR_FAILURE(hr, "Failed to issue END test query", false); - return true; -} - DXVAVideoDecodeAccelerator::DXVAVideoDecodeAccelerator( - const base::Callback<bool(void)>& make_context_current) + const base::Callback<bool(void)>& make_context_current, + gfx::GLContext* gl_context) : client_(NULL), dev_manager_reset_token_(0), + dx11_dev_manager_reset_token_(0), egl_config_(NULL), state_(kUninitialized), pictures_requested_(false), @@ -510,7 +516,10 @@ DXVAVideoDecodeAccelerator::DXVAVideoDecodeAccelerator( decoder_thread_("DXVAVideoDecoderThread"), weak_this_factory_(this), weak_ptr_(weak_this_factory_.GetWeakPtr()), - pending_flush_(false) { + pending_flush_(false), + use_dx11_(false), + dx11_video_format_converter_media_type_needs_init_(true), + gl_context_(gl_context) { memset(&input_stream_info_, 0, sizeof(input_stream_info_)); memset(&output_stream_info_, 0, sizeof(output_stream_info_)); } @@ -525,14 +534,6 @@ bool DXVAVideoDecodeAccelerator::Initialize(media::VideoCodecProfile profile, main_thread_task_runner_ = base::MessageLoop::current()->task_runner(); - // Not all versions of Windows 7 and later include Media Foundation DLLs. - // Instead of crashing while delay loading the DLL when calling MFStartup() - // below, probe whether we can successfully load the DLL now. - // - // See http://crbug.com/339678 for details. - HMODULE mfplat_dll = ::LoadLibrary(L"MFPlat.dll"); - RETURN_ON_FAILURE(mfplat_dll, "MFPlat.dll is required for decoding", false); - if (profile != media::H264PROFILE_BASELINE && profile != media::H264PROFILE_MAIN && profile != media::H264PROFILE_HIGH && @@ -542,6 +543,40 @@ bool DXVAVideoDecodeAccelerator::Initialize(media::VideoCodecProfile profile, "Unsupported h.264, vp8, or vp9 profile", PLATFORM_FAILURE, false); } + // Not all versions of Windows 7 and later include Media Foundation DLLs. + // Instead of crashing while delay loading the DLL when calling MFStartup() + // below, probe whether we can successfully load the DLL now. + // See http://crbug.com/339678 for details. + HMODULE dxgi_manager_dll = NULL; + if ((dxgi_manager_dll = ::GetModuleHandle(L"MFPlat.dll")) == NULL) { + HMODULE mfplat_dll = ::LoadLibrary(L"MFPlat.dll"); + RETURN_ON_FAILURE(mfplat_dll, "MFPlat.dll is required for decoding", + false); + // On Windows 8+ mfplat.dll provides the MFCreateDXGIDeviceManager API. + // On Windows 7 mshtmlmedia.dll provides it. + dxgi_manager_dll = mfplat_dll; + } + + // TODO(ananta) + // The code below works, as in we can create the DX11 device manager for + // Windows 7. However the IMFTransform we use for texture conversion and + // copy does not exist on Windows 7. Look into an alternate approach + // and enable the code below. +#if defined ENABLE_DX11_FOR_WIN7 + if ((base::win::GetVersion() == base::win::VERSION_WIN7) && + ((dxgi_manager_dll = ::GetModuleHandle(L"mshtmlmedia.dll")) == NULL)) { + HMODULE mshtml_media_dll = ::LoadLibrary(L"mshtmlmedia.dll"); + if (mshtml_media_dll) + dxgi_manager_dll = mshtml_media_dll; + } +#endif + // If we don't find the MFCreateDXGIDeviceManager API we fallback to D3D9 + // decoding. + if (dxgi_manager_dll && !create_dxgi_device_manager_) { + create_dxgi_device_manager_ = reinterpret_cast<CreateDXGIDeviceManager>( + ::GetProcAddress(dxgi_manager_dll, "MFCreateDXGIDeviceManager")); + } + RETURN_AND_NOTIFY_ON_FAILURE( gfx::g_driver_egl.ext.b_EGL_ANGLE_surface_d3d_texture_2d_share_handle, "EGL_ANGLE_surface_d3d_texture_2d_share_handle unavailable", @@ -556,11 +591,6 @@ bool DXVAVideoDecodeAccelerator::Initialize(media::VideoCodecProfile profile, RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "MFStartup failed.", PLATFORM_FAILURE, false); - RETURN_AND_NOTIFY_ON_FAILURE(CreateD3DDevManager(), - "Failed to initialize D3D device and manager", - PLATFORM_FAILURE, - false); - RETURN_AND_NOTIFY_ON_FAILURE(InitDecoder(profile), "Failed to initialize decoder", PLATFORM_FAILURE, false); @@ -583,6 +613,128 @@ bool DXVAVideoDecodeAccelerator::Initialize(media::VideoCodecProfile profile, return true; } +bool DXVAVideoDecodeAccelerator::CreateD3DDevManager() { + TRACE_EVENT0("gpu", "DXVAVideoDecodeAccelerator_CreateD3DDevManager"); + + HRESULT hr = Direct3DCreate9Ex(D3D_SDK_VERSION, d3d9_.Receive()); + RETURN_ON_HR_FAILURE(hr, "Direct3DCreate9Ex failed", false); + + D3DPRESENT_PARAMETERS present_params = {0}; + present_params.BackBufferWidth = 1; + present_params.BackBufferHeight = 1; + present_params.BackBufferFormat = D3DFMT_UNKNOWN; + present_params.BackBufferCount = 1; + present_params.SwapEffect = D3DSWAPEFFECT_DISCARD; + present_params.hDeviceWindow = ::GetShellWindow(); + present_params.Windowed = TRUE; + present_params.Flags = D3DPRESENTFLAG_VIDEO; + present_params.FullScreen_RefreshRateInHz = 0; + present_params.PresentationInterval = 0; + + hr = d3d9_->CreateDeviceEx(D3DADAPTER_DEFAULT, + D3DDEVTYPE_HAL, + ::GetShellWindow(), + D3DCREATE_FPU_PRESERVE | + D3DCREATE_SOFTWARE_VERTEXPROCESSING | + D3DCREATE_DISABLE_PSGP_THREADING | + D3DCREATE_MULTITHREADED, + &present_params, + NULL, + d3d9_device_ex_.Receive()); + RETURN_ON_HR_FAILURE(hr, "Failed to create D3D device", false); + + hr = DXVA2CreateDirect3DDeviceManager9(&dev_manager_reset_token_, + device_manager_.Receive()); + RETURN_ON_HR_FAILURE(hr, "DXVA2CreateDirect3DDeviceManager9 failed", false); + + hr = device_manager_->ResetDevice(d3d9_device_ex_.get(), + dev_manager_reset_token_); + RETURN_ON_HR_FAILURE(hr, "Failed to reset device", false); + + hr = d3d9_device_ex_->CreateQuery(D3DQUERYTYPE_EVENT, query_.Receive()); + RETURN_ON_HR_FAILURE(hr, "Failed to create D3D device query", false); + // Ensure query_ API works (to avoid an infinite loop later in + // CopyOutputSampleDataToPictureBuffer). + hr = query_->Issue(D3DISSUE_END); + RETURN_ON_HR_FAILURE(hr, "Failed to issue END test query", false); + return true; +} + +bool DXVAVideoDecodeAccelerator::CreateDX11DevManager() { + HRESULT hr = create_dxgi_device_manager_(&dx11_dev_manager_reset_token_, + d3d11_device_manager_.Receive()); + RETURN_ON_HR_FAILURE(hr, "MFCreateDXGIDeviceManager failed", false); + + // This array defines the set of DirectX hardware feature levels we support. + // The ordering MUST be preserved. All applications are assumed to support + // 9.1 unless otherwise stated by the application, which is not our case. + D3D_FEATURE_LEVEL feature_levels[] = { + D3D_FEATURE_LEVEL_11_1, + D3D_FEATURE_LEVEL_11_0, + D3D_FEATURE_LEVEL_10_1, + D3D_FEATURE_LEVEL_10_0, + D3D_FEATURE_LEVEL_9_3, + D3D_FEATURE_LEVEL_9_2, + D3D_FEATURE_LEVEL_9_1 }; + + UINT flags = D3D11_CREATE_DEVICE_VIDEO_SUPPORT; + +#if defined _DEBUG + flags |= D3D11_CREATE_DEVICE_DEBUG; +#endif + + D3D_FEATURE_LEVEL feature_level_out = D3D_FEATURE_LEVEL_11_0; + hr = D3D11CreateDevice(NULL, + D3D_DRIVER_TYPE_HARDWARE, + NULL, + flags, + feature_levels, + arraysize(feature_levels), + D3D11_SDK_VERSION, + d3d11_device_.Receive(), + &feature_level_out, + d3d11_device_context_.Receive()); + RETURN_ON_HR_FAILURE(hr, "Failed to create DX11 device", false); + + // Enable multithreaded mode on the context. This ensures that accesses to + // context are synchronized across threads. We have multiple threads + // accessing the context, the media foundation decoder threads and the + // decoder thread via the video format conversion transform. + base::win::ScopedComPtr<ID3D10Multithread> multi_threaded; + hr = multi_threaded.QueryFrom(d3d11_device_context_.get()); + RETURN_ON_HR_FAILURE(hr, "Failed to query ID3D10Multithread", false); + multi_threaded->SetMultithreadProtected(TRUE); + + hr = d3d11_device_manager_->ResetDevice(d3d11_device_.get(), + dx11_dev_manager_reset_token_); + RETURN_ON_HR_FAILURE(hr, "Failed to reset device", false); + + D3D11_QUERY_DESC query_desc; + query_desc.Query = D3D11_QUERY_EVENT; + query_desc.MiscFlags = 0; + hr = d3d11_device_->CreateQuery( + &query_desc, + d3d11_query_.Receive()); + RETURN_ON_HR_FAILURE(hr, "Failed to create DX11 device query", false); + + hr = ::CoCreateInstance( + CLSID_VideoProcessorMFT, + NULL, + CLSCTX_INPROC_SERVER, + IID_IMFTransform, + reinterpret_cast<void**>(video_format_converter_mft_.Receive())); + + if (FAILED(hr)) { + base::debug::Alias(&hr); + // TODO(ananta) + // Remove this CHECK when the change to use DX11 for H/W decoding + // stablizes. + CHECK(false); + } + RETURN_ON_HR_FAILURE(hr, "Failed to create video format converter", false); + return true; +} + void DXVAVideoDecodeAccelerator::Decode( const media::BitstreamBuffer& bitstream_buffer) { DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); @@ -632,6 +784,7 @@ void DXVAVideoDecodeAccelerator::AssignPictureBuffers( buffers[buffer_index].id(), picture_buffer)).second; DCHECK(inserted); } + ProcessPendingSamples(); if (pending_flush_) { decoder_thread_task_runner_->PostTask( @@ -840,10 +993,31 @@ bool DXVAVideoDecodeAccelerator::InitDecoder(media::VideoCodecProfile profile) { RETURN_ON_FAILURE(CheckDecoderDxvaSupport(), "Failed to check decoder DXVA support", false); + ULONG_PTR device_manager_to_use = NULL; + if (use_dx11_) { + CHECK(create_dxgi_device_manager_); + RETURN_AND_NOTIFY_ON_FAILURE(CreateDX11DevManager(), + "Failed to initialize DX11 device and manager", + PLATFORM_FAILURE, + false); + device_manager_to_use = reinterpret_cast<ULONG_PTR>( + d3d11_device_manager_.get()); + } else { + RETURN_AND_NOTIFY_ON_FAILURE(CreateD3DDevManager(), + "Failed to initialize D3D device and manager", + PLATFORM_FAILURE, + false); + device_manager_to_use = reinterpret_cast<ULONG_PTR>(device_manager_.get()); + } + hr = decoder_->ProcessMessage( MFT_MESSAGE_SET_D3D_MANAGER, - reinterpret_cast<ULONG_PTR>(device_manager_.get())); - RETURN_ON_HR_FAILURE(hr, "Failed to pass D3D manager to decoder", false); + device_manager_to_use); + if (use_dx11_) { + RETURN_ON_HR_FAILURE(hr, "Failed to pass DX11 manager to decoder", false); + } else { + RETURN_ON_HR_FAILURE(hr, "Failed to pass D3D manager to decoder", false); + } EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay(); @@ -890,6 +1064,20 @@ bool DXVAVideoDecodeAccelerator::CheckDecoderDxvaSupport() { } else { DVLOG(1) << "Failed to set Low latency mode on decoder. Error: " << hr; } + + // The decoder should use DX11 iff + // 1. The underlying H/W decoder supports it. + // 2. We have a pointer to the MFCreateDXGIDeviceManager function needed for + // this. This should always be true for Windows 8+. + // 3. ANGLE is using DX11. + DCHECK(gl_context_); + if (create_dxgi_device_manager_ && + (gl_context_->GetGLRenderer().find("Direct3D11") != + std::string::npos)) { + UINT32 dx11_aware = 0; + attributes->GetUINT32(MF_SA_D3D11_AWARE, &dx11_aware); + use_dx11_ = !!dx11_aware; + } return true; } @@ -1054,16 +1242,6 @@ void DXVAVideoDecodeAccelerator::DoDecode() { bool DXVAVideoDecodeAccelerator::ProcessOutputSample(IMFSample* sample) { RETURN_ON_FAILURE(sample, "Decode succeeded with NULL output sample", false); - base::win::ScopedComPtr<IMFMediaBuffer> output_buffer; - HRESULT hr = sample->GetBufferByIndex(0, output_buffer.Receive()); - RETURN_ON_HR_FAILURE(hr, "Failed to get buffer from output sample", false); - - base::win::ScopedComPtr<IDirect3DSurface9> surface; - hr = MFGetService(output_buffer.get(), MR_BUFFER_SERVICE, - IID_PPV_ARGS(surface.Receive())); - RETURN_ON_HR_FAILURE(hr, "Failed to get D3D surface from output sample", - false); - LONGLONG input_buffer_id = 0; RETURN_ON_HR_FAILURE(sample->GetSampleTime(&input_buffer_id), "Failed to get input buffer id associated with sample", @@ -1085,20 +1263,20 @@ bool DXVAVideoDecodeAccelerator::ProcessOutputSample(IMFSample* sample) { return true; } - // We only read the surface description, which contains its width/height when - // we need the picture buffers from the client. Once we have those, then they - // are reused. - D3DSURFACE_DESC surface_desc; - hr = surface->GetDesc(&surface_desc); - RETURN_ON_HR_FAILURE(hr, "Failed to get surface description", false); + int width = 0; + int height = 0; + if (!GetVideoFrameDimensions(sample, &width, &height)) { + RETURN_ON_FAILURE(false, "Failed to get D3D surface from output sample", + false); + } // Go ahead and request picture buffers. main_thread_task_runner_->PostTask( FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::RequestPictureBuffers, weak_this_factory_.GetWeakPtr(), - surface_desc.Width, - surface_desc.Height)); + width, + height)); pictures_requested_ = true; return true; @@ -1130,31 +1308,43 @@ void DXVAVideoDecodeAccelerator::ProcessPendingSamples() { pending_sample = &sample_info; } + int width = 0; + int height = 0; + if (!GetVideoFrameDimensions(pending_sample->output_sample.get(), + &width, &height)) { + RETURN_AND_NOTIFY_ON_FAILURE(false, + "Failed to get D3D surface from output sample", PLATFORM_FAILURE,); + } + + if (width != index->second->size().width() || + height != index->second->size().height()) { + HandleResolutionChanged(width, height); + return; + } + base::win::ScopedComPtr<IMFMediaBuffer> output_buffer; HRESULT hr = pending_sample->output_sample->GetBufferByIndex( 0, output_buffer.Receive()); - RETURN_AND_NOTIFY_ON_HR_FAILURE( - hr, "Failed to get buffer from output sample", PLATFORM_FAILURE,); + RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, + "Failed to get buffer from output sample", PLATFORM_FAILURE,); base::win::ScopedComPtr<IDirect3DSurface9> surface; - hr = MFGetService(output_buffer.get(), MR_BUFFER_SERVICE, - IID_PPV_ARGS(surface.Receive())); - RETURN_AND_NOTIFY_ON_HR_FAILURE( - hr, "Failed to get D3D surface from output sample", - PLATFORM_FAILURE,); - - D3DSURFACE_DESC surface_desc; - hr = surface->GetDesc(&surface_desc); - RETURN_AND_NOTIFY_ON_HR_FAILURE( - hr, "Failed to get surface description", PLATFORM_FAILURE,); - - if (surface_desc.Width != - static_cast<uint32>(index->second->size().width()) || - surface_desc.Height != - static_cast<uint32>(index->second->size().height())) { - HandleResolutionChanged(surface_desc.Width, surface_desc.Height); - return; + base::win::ScopedComPtr<ID3D11Texture2D> d3d11_texture; + + if (use_dx11_) { + base::win::ScopedComPtr<IMFDXGIBuffer> dxgi_buffer; + hr = dxgi_buffer.QueryFrom(output_buffer.get()); + RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, + "Failed to get DXGIBuffer from output sample", PLATFORM_FAILURE,); + hr = dxgi_buffer->GetResource( + __uuidof(ID3D11Texture2D), + reinterpret_cast<void**>(d3d11_texture.Receive())); + } else { + hr = MFGetService(output_buffer.get(), MR_BUFFER_SERVICE, + IID_PPV_ARGS(surface.Receive())); } + RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, + "Failed to get surface from output sample", PLATFORM_FAILURE,); pending_sample->picture_buffer_id = index->second->id(); @@ -1162,8 +1352,9 @@ void DXVAVideoDecodeAccelerator::ProcessPendingSamples() { index->second->CopyOutputSampleDataToPictureBuffer( this, surface.get(), + d3d11_texture.get(), pending_sample->input_buffer_id), - "Failed to copy output sample", PLATFORM_FAILURE, ); + "Failed to copy output sample", PLATFORM_FAILURE,); index->second->set_available(false); } @@ -1200,7 +1391,13 @@ void DXVAVideoDecodeAccelerator::Invalidate() { pending_output_samples_.clear(); pending_input_buffers_.clear(); decoder_.Release(); + if (video_format_converter_mft_.get()) { + video_format_converter_mft_->ProcessMessage( + MFT_MESSAGE_NOTIFY_END_STREAMING, 0); + video_format_converter_mft_.Release(); + } MFShutdown(); + dx11_video_format_converter_media_type_needs_init_ = true; SetState(kUninitialized); } @@ -1425,6 +1622,8 @@ void DXVAVideoDecodeAccelerator::DecodeInternal( void DXVAVideoDecodeAccelerator::HandleResolutionChanged(int width, int height) { + dx11_video_format_converter_media_type_needs_init_ = true; + main_thread_task_runner_->PostTask( FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::DismissStaleBuffers, @@ -1518,8 +1717,8 @@ void DXVAVideoDecodeAccelerator::CopySurface(IDirect3DSurface9* src_surface, return; } - HRESULT hr = device_->StretchRect(src_surface, NULL, dest_surface, - NULL, D3DTEXF_NONE); + HRESULT hr = d3d9_device_ex_->StretchRect(src_surface, NULL, dest_surface, + NULL, D3DTEXF_NONE); RETURN_ON_HR_FAILURE(hr, "Colorspace conversion via StretchRect failed",); // Ideally, this should be done immediately before the draw call that uses @@ -1590,6 +1789,135 @@ void DXVAVideoDecodeAccelerator::CopySurfaceComplete( base::Unretained(this))); } +void DXVAVideoDecodeAccelerator::CopyTexture(ID3D11Texture2D* src_texture, + ID3D11Texture2D* dest_texture, + IMFSample* video_frame, + int picture_buffer_id, + int input_buffer_id) { + HRESULT hr = E_FAIL; + + DCHECK(use_dx11_); + + if (!decoder_thread_task_runner_->BelongsToCurrentThread()) { + // The media foundation H.264 decoder outputs YUV12 textures which we + // cannot copy into ANGLE as they expect ARGB textures. In D3D land + // the StretchRect API in the IDirect3DDevice9Ex interface did the color + // space conversion for us. Sadly in DX11 land the API does not provide + // a straightforward way to do this. + // We use the video processor MFT. + // https://msdn.microsoft.com/en-us/library/hh162913(v=vs.85).aspx + // This object implements a media foundation transform (IMFTransform) + // which follows the same contract as the decoder. The color space + // conversion as per msdn is done in the GPU. + + D3D11_TEXTURE2D_DESC source_desc; + src_texture->GetDesc(&source_desc); + + // Set up the input and output types for the video processor MFT. + if (!InitializeDX11VideoFormatConverterMediaType(source_desc.Width, + source_desc.Height)) { + RETURN_AND_NOTIFY_ON_FAILURE( + false, "Failed to initialize media types for convesion.", + PLATFORM_FAILURE,); + } + + // The input to the video processor is the output sample. + base::win::ScopedComPtr<IMFSample> input_sample_for_conversion; + { + base::AutoLock lock(decoder_lock_); + PendingSampleInfo& sample_info = pending_output_samples_.front(); + input_sample_for_conversion = sample_info.output_sample; + } + + decoder_thread_task_runner_->PostTask( + FROM_HERE, + base::Bind(&DXVAVideoDecodeAccelerator::CopyTexture, + base::Unretained(this), + src_texture, + dest_texture, + input_sample_for_conversion.Detach(), + picture_buffer_id, + input_buffer_id)); + return; + } + + DCHECK(video_frame); + + base::win::ScopedComPtr<IMFSample> input_sample; + input_sample.Attach(video_frame); + + DCHECK(video_format_converter_mft_.get()); + + // d3d11_device_context_->Begin(d3d11_query_.get()); + + hr = video_format_converter_mft_->ProcessInput(0, video_frame, 0); + if (FAILED(hr)) { + DCHECK(false); + RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, + "Failed to convert output sample format.", PLATFORM_FAILURE,); + } + + // The video processor MFT requires output samples to be allocated by the + // caller. We create a sample with a buffer backed with the ID3D11Texture2D + // interface exposed by ANGLE. This works nicely as this ensures that the + // video processor coverts the color space of the output frame and copies + // the result into the ANGLE texture. + base::win::ScopedComPtr<IMFSample> output_sample; + hr = MFCreateSample(output_sample.Receive()); + if (FAILED(hr)) { + RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, + "Failed to create output sample.", PLATFORM_FAILURE,); + } + + base::win::ScopedComPtr<IMFMediaBuffer> output_buffer; + hr = MFCreateDXGISurfaceBuffer( + __uuidof(ID3D11Texture2D), dest_texture, 0, FALSE, + output_buffer.Receive()); + if (FAILED(hr)) { + base::debug::Alias(&hr); + // TODO(ananta) + // Remove this CHECK when the change to use DX11 for H/W decoding + // stablizes. + CHECK(false); + RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, + "Failed to create output sample.", PLATFORM_FAILURE,); + } + + output_sample->AddBuffer(output_buffer.get()); + + DWORD status = 0; + MFT_OUTPUT_DATA_BUFFER format_converter_output = {}; + format_converter_output.pSample = output_sample.get(); + hr = video_format_converter_mft_->ProcessOutput( + 0, // No flags + 1, // # of out streams to pull from + &format_converter_output, + &status); + + d3d11_device_context_->Flush(); + d3d11_device_context_->End(d3d11_query_.get()); + + if (FAILED(hr)) { + base::debug::Alias(&hr); + // TODO(ananta) + // Remove this CHECK when the change to use DX11 for H/W decoding + // stablizes. + CHECK(false); + RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, + "Failed to convert output sample format.", PLATFORM_FAILURE,); + } + + decoder_thread_task_runner_->PostDelayedTask( + FROM_HERE, + base::Bind(&DXVAVideoDecodeAccelerator::FlushDecoder, + base::Unretained(this), 0, + reinterpret_cast<IDirect3DSurface9*>(NULL), + reinterpret_cast<IDirect3DSurface9*>(NULL), + picture_buffer_id, input_buffer_id), + base::TimeDelta::FromMilliseconds( + kFlushDecoderSurfaceTimeoutMs)); +} + void DXVAVideoDecodeAccelerator::FlushDecoder( int iterations, IDirect3DSurface9* src_surface, @@ -1611,7 +1939,22 @@ void DXVAVideoDecodeAccelerator::FlushDecoder( // infinite loop. // Workaround is to have an upper limit of 4 on the number of iterations to // wait for the Flush to finish. - HRESULT hr = query_->GetData(NULL, 0, D3DGETDATA_FLUSH); + HRESULT hr = E_FAIL; + + if (use_dx11_) { + BOOL query_data = 0; + hr = d3d11_device_context_->GetData(d3d11_query_.get(), &query_data, + sizeof(BOOL), 0); + if (FAILED(hr)) { + base::debug::Alias(&hr); + // TODO(ananta) + // Remove this CHECK when the change to use DX11 for H/W decoding + // stablizes. + CHECK(false); + } + } else { + hr = query_->GetData(NULL, 0, D3DGETDATA_FLUSH); + } if ((hr == S_FALSE) && (++iterations < kMaxIterationsForD3DFlush)) { decoder_thread_task_runner_->PostDelayedTask( FROM_HERE, @@ -1621,14 +1964,185 @@ void DXVAVideoDecodeAccelerator::FlushDecoder( base::TimeDelta::FromMilliseconds(kFlushDecoderSurfaceTimeoutMs)); return; } + main_thread_task_runner_->PostTask( - FROM_HERE, - base::Bind(&DXVAVideoDecodeAccelerator::CopySurfaceComplete, - weak_this_factory_.GetWeakPtr(), - src_surface, - dest_surface, - picture_buffer_id, - input_buffer_id)); + FROM_HERE, + base::Bind(&DXVAVideoDecodeAccelerator::CopySurfaceComplete, + weak_this_factory_.GetWeakPtr(), + src_surface, + dest_surface, + picture_buffer_id, + input_buffer_id)); +} + +bool DXVAVideoDecodeAccelerator::InitializeDX11VideoFormatConverterMediaType( + int width, int height) { + if (!dx11_video_format_converter_media_type_needs_init_) + return true; + + CHECK(video_format_converter_mft_.get()); + + HRESULT hr = video_format_converter_mft_->ProcessMessage( + MFT_MESSAGE_SET_D3D_MANAGER, + reinterpret_cast<ULONG_PTR>( + d3d11_device_manager_.get())); + + if (FAILED(hr)) { + base::debug::Alias(&hr); + // TODO(ananta) + // Remove this CHECK when the change to use DX11 for H/W decoding + // stablizes. + CHECK(false); + } + RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, + "Failed to initialize video format converter", PLATFORM_FAILURE, false); + + video_format_converter_mft_->ProcessMessage( + MFT_MESSAGE_NOTIFY_END_STREAMING, 0); + + base::win::ScopedComPtr<IMFMediaType> media_type; + hr = MFCreateMediaType(media_type.Receive()); + RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "MFCreateMediaType failed", + PLATFORM_FAILURE, false); + + hr = media_type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); + RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set major input type", + PLATFORM_FAILURE, false); + + hr = media_type->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_NV12); + RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set input sub type", + PLATFORM_FAILURE, false); + + hr = media_type->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE); + RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, + "Failed to set attributes on media type", PLATFORM_FAILURE, false); + + hr = media_type->SetUINT32(MF_MT_INTERLACE_MODE, + MFVideoInterlace_Progressive); + RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, + "Failed to set attributes on media type", PLATFORM_FAILURE, false); + + base::win::ScopedComPtr<IMFAttributes> converter_attributes; + hr = video_format_converter_mft_->GetAttributes( + converter_attributes.Receive()); + RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to get converter attributes", + PLATFORM_FAILURE, false); + + hr = converter_attributes->SetUINT32(MF_XVP_PLAYBACK_MODE, TRUE); + RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set converter attributes", + PLATFORM_FAILURE, false); + + hr = converter_attributes->SetUINT32(MF_LOW_LATENCY, FALSE); + RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set converter attributes", + PLATFORM_FAILURE, false); + + hr = MFSetAttributeSize(media_type.get(), MF_MT_FRAME_SIZE, width, height); + RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set media type attributes", + PLATFORM_FAILURE, false); + + hr = video_format_converter_mft_->SetInputType(0, media_type.get(), 0); + if (FAILED(hr)) { + base::debug::Alias(&hr); + // TODO(ananta) + // Remove this CHECK when the change to use DX11 for H/W decoding + // stablizes. + CHECK(false); + } + RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set converter input type", + PLATFORM_FAILURE, false); + + base::win::ScopedComPtr<IMFMediaType> out_media_type; + + for (uint32 i = 0; + SUCCEEDED(video_format_converter_mft_->GetOutputAvailableType(0, i, + out_media_type.Receive())); + ++i) { + GUID out_subtype = {0}; + hr = out_media_type->GetGUID(MF_MT_SUBTYPE, &out_subtype); + RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to get output major type", + PLATFORM_FAILURE, false); + + if (out_subtype == MFVideoFormat_ARGB32) { + hr = out_media_type->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE); + RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, + "Failed to set attributes on media type", PLATFORM_FAILURE, false); + + hr = out_media_type->SetUINT32(MF_MT_INTERLACE_MODE, + MFVideoInterlace_Progressive); + RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, + "Failed to set attributes on media type", PLATFORM_FAILURE, false); + + hr = MFSetAttributeSize(out_media_type.get(), MF_MT_FRAME_SIZE, width, + height); + RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, + "Failed to set media type attributes", PLATFORM_FAILURE, false); + + hr = video_format_converter_mft_->SetOutputType( + 0, out_media_type.get(), 0); // No flags + if (FAILED(hr)) { + base::debug::Alias(&hr); + // TODO(ananta) + // Remove this CHECK when the change to use DX11 for H/W decoding + // stablizes. + CHECK(false); + } + RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, + "Failed to set converter output type", PLATFORM_FAILURE, false); + + hr = video_format_converter_mft_->ProcessMessage( + MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, 0); + if (FAILED(hr)) { + // TODO(ananta) + // Remove this CHECK when the change to use DX11 for H/W decoding + // stablizes. + RETURN_AND_NOTIFY_ON_FAILURE( + false, "Failed to initialize video converter.", PLATFORM_FAILURE, + false); + } + dx11_video_format_converter_media_type_needs_init_ = false; + return true; + } + out_media_type.Release(); + } + return false; +} + +bool DXVAVideoDecodeAccelerator::GetVideoFrameDimensions( + IMFSample* sample, + int* width, + int* height) { + base::win::ScopedComPtr<IMFMediaBuffer> output_buffer; + HRESULT hr = sample->GetBufferByIndex(0, output_buffer.Receive()); + RETURN_ON_HR_FAILURE(hr, "Failed to get buffer from output sample", false); + + if (use_dx11_) { + base::win::ScopedComPtr<IMFDXGIBuffer> dxgi_buffer; + base::win::ScopedComPtr<ID3D11Texture2D> d3d11_texture; + hr = dxgi_buffer.QueryFrom(output_buffer.get()); + RETURN_ON_HR_FAILURE(hr, "Failed to get DXGIBuffer from output sample", + false); + hr = dxgi_buffer->GetResource( + __uuidof(ID3D11Texture2D), + reinterpret_cast<void**>(d3d11_texture.Receive())); + RETURN_ON_HR_FAILURE(hr, "Failed to get D3D11Texture from output buffer", + false); + D3D11_TEXTURE2D_DESC d3d11_texture_desc; + d3d11_texture->GetDesc(&d3d11_texture_desc); + *width = d3d11_texture_desc.Width; + *height = d3d11_texture_desc.Height; + } else { + base::win::ScopedComPtr<IDirect3DSurface9> surface; + hr = MFGetService(output_buffer.get(), MR_BUFFER_SERVICE, + IID_PPV_ARGS(surface.Receive())); + RETURN_ON_HR_FAILURE(hr, "Failed to get D3D surface from output sample", + false); + D3DSURFACE_DESC surface_desc; + hr = surface->GetDesc(&surface_desc); + RETURN_ON_HR_FAILURE(hr, "Failed to get surface description", false); + *width = surface_desc.Width; + *height = surface_desc.Height; + } + return true; } } // namespace content diff --git a/content/common/gpu/media/dxva_video_decode_accelerator.h b/content/common/gpu/media/dxva_video_decode_accelerator.h index 6aa21c9..e98f613 100644 --- a/content/common/gpu/media/dxva_video_decode_accelerator.h +++ b/content/common/gpu/media/dxva_video_decode_accelerator.h @@ -5,6 +5,7 @@ #ifndef CONTENT_COMMON_GPU_MEDIA_DXVA_VIDEO_DECODE_ACCELERATOR_H_ #define CONTENT_COMMON_GPU_MEDIA_DXVA_VIDEO_DECODE_ACCELERATOR_H_ +#include <d3d11.h> #include <d3d9.h> // Work around bug in this header by disabling the relevant warning for it. // https://connect.microsoft.com/VisualStudio/feedback/details/911260/dxva2api-h-in-win8-sdk-triggers-c4201-with-w4 @@ -30,6 +31,14 @@ interface IMFSample; interface IDirect3DSurface9; +namespace gfx { +class GLContext; +} + +typedef HRESULT (WINAPI* CreateDXGIDeviceManager)( + UINT* reset_token, + IMFDXGIDeviceManager** device_manager); + namespace content { // Class to provide a DXVA 2.0 based accelerator using the Microsoft Media @@ -49,7 +58,8 @@ class CONTENT_EXPORT DXVAVideoDecodeAccelerator // Does not take ownership of |client| which must outlive |*this|. explicit DXVAVideoDecodeAccelerator( - const base::Callback<bool(void)>& make_context_current); + const base::Callback<bool(void)>& make_context_current, + gfx::GLContext* gl_context); ~DXVAVideoDecodeAccelerator() override; // media::VideoDecodeAccelerator implementation. @@ -74,6 +84,11 @@ class CONTENT_EXPORT DXVAVideoDecodeAccelerator // passed to the IMFTransform interface implemented by the decoder. bool CreateD3DDevManager(); + // Creates and initializes an instance of the DX11 device and the + // corresponding device manager. The device manager instance is eventually + // passed to the IMFTransform interface implemented by the decoder. + bool CreateDX11DevManager(); + // Creates, initializes and sets the media codec types for the decoder. bool InitDecoder(media::VideoCodecProfile profile); @@ -195,6 +210,15 @@ class CONTENT_EXPORT DXVAVideoDecodeAccelerator int picture_buffer_id, int input_buffer_id); + // Copies the source texture |src_texture| to the destination |dest_texture|. + // The copying is done on the decoder thread. The |video_frame| parameter + // is the sample containing the frame to be copied. + void CopyTexture(ID3D11Texture2D* src_texture, + ID3D11Texture2D* dest_texture, + IMFSample* video_frame, + int picture_buffer_id, + int input_buffer_id); + // Flushes the decoder device to ensure that the decoded surface is copied // to the target surface. |iterations| helps to maintain an upper limit on // the number of times we try to complete the flush operation. @@ -204,15 +228,33 @@ class CONTENT_EXPORT DXVAVideoDecodeAccelerator int picture_buffer_id, int input_buffer_id); + // Initializes the DX11 Video format converter media types. + // Returns true on success. + bool InitializeDX11VideoFormatConverterMediaType(int width, int height); + + // Returns the output video frame dimensions (width, height). + // |sample| :- This is the output sample containing the video frame. + // |width| :- The width is returned here. + // |height| :- The height is returned here. + // Returns true on success. + bool GetVideoFrameDimensions(IMFSample* sample, int* width, int* height); + // To expose client callbacks from VideoDecodeAccelerator. media::VideoDecodeAccelerator::Client* client_; base::win::ScopedComPtr<IMFTransform> decoder_; + base::win::ScopedComPtr<IMFTransform> video_format_converter_mft_; base::win::ScopedComPtr<IDirect3D9Ex> d3d9_; - base::win::ScopedComPtr<IDirect3DDevice9Ex> device_; + base::win::ScopedComPtr<IDirect3DDevice9Ex> d3d9_device_ex_; base::win::ScopedComPtr<IDirect3DDeviceManager9> device_manager_; base::win::ScopedComPtr<IDirect3DQuery9> query_; + + base::win::ScopedComPtr<ID3D11DeviceContext> d3d11_device_context_; + base::win::ScopedComPtr<ID3D11Device > d3d11_device_; + base::win::ScopedComPtr<IMFDXGIDeviceManager> d3d11_device_manager_; + base::win::ScopedComPtr<ID3D11Query> d3d11_query_; + // Ideally the reset token would be a stack variable which is used while // creating the device manager. However it seems that the device manager // holds onto the token and attempts to access it if the underlying device @@ -220,6 +262,11 @@ class CONTENT_EXPORT DXVAVideoDecodeAccelerator // TODO(ananta): This needs to be verified. uint32 dev_manager_reset_token_; + // Reset token for the DX11 device manager. + uint32 dx11_dev_manager_reset_token_; + + uint32 dx11_dev_manager_reset_token_format_conversion_; + // The EGL config to use for decoded frames. EGLConfig egl_config_; @@ -305,6 +352,20 @@ class CONTENT_EXPORT DXVAVideoDecodeAccelerator // Set to true if we are in the context of a Flush operation. Used to prevent // multiple flush done notifications being sent out. bool pending_flush_; + + // Defaults to false. Indicates if we should use D3D or DX11 interfaces for + // H/W decoding. + bool use_dx11_; + + // Set to true if the DX11 video format converter input media types need to + // be initialized. Defaults to true. + bool dx11_video_format_converter_media_type_needs_init_; + + // The GLContext to be used by the decoder. + scoped_refptr<gfx::GLContext> gl_context_; + + // Function pointer for the MFCreateDXGIDeviceManager API. + static CreateDXGIDeviceManager create_dxgi_device_manager_; }; } // namespace content diff --git a/content/common/gpu/media/gpu_video_decode_accelerator.cc b/content/common/gpu/media/gpu_video_decode_accelerator.cc index 0a3b4df..fde81ac 100644 --- a/content/common/gpu/media/gpu_video_decode_accelerator.cc +++ b/content/common/gpu/media/gpu_video_decode_accelerator.cc @@ -289,7 +289,8 @@ GpuVideoDecodeAccelerator::CreateDXVAVDA() { #if defined(OS_WIN) if (base::win::GetVersion() >= base::win::VERSION_WIN7) { DVLOG(0) << "Initializing DXVA HW decoder for windows."; - decoder.reset(new DXVAVideoDecodeAccelerator(make_context_current_)); + decoder.reset(new DXVAVideoDecodeAccelerator(make_context_current_, + stub_->decoder()->GetGLContext())); } else { NOTIMPLEMENTED() << "HW video decode acceleration not available."; } diff --git a/content/common/gpu/media/video_decode_accelerator_unittest.cc b/content/common/gpu/media/video_decode_accelerator_unittest.cc index 4de1c9b..f88b2c5 100644 --- a/content/common/gpu/media/video_decode_accelerator_unittest.cc +++ b/content/common/gpu/media/video_decode_accelerator_unittest.cc @@ -510,7 +510,9 @@ GLRenderingVDAClient::CreateDXVAVDA() { #if defined(OS_WIN) if (base::win::GetVersion() >= base::win::VERSION_WIN7) decoder.reset( - new DXVAVideoDecodeAccelerator(base::Bind(&DoNothingReturnTrue))); + new DXVAVideoDecodeAccelerator( + base::Bind(&DoNothingReturnTrue), + rendering_helper_->GetGLContext().get())); #endif return decoder.Pass(); } diff --git a/content/content_common.gypi b/content/content_common.gypi index d61a5f0..c62e10f 100644 --- a/content/content_common.gypi +++ b/content/content_common.gypi @@ -953,6 +953,7 @@ 'link_settings': { 'libraries': [ '-ld3d9.lib', + '-ld3d11.lib', '-ldxva2.lib', '-lstrmiids.lib', '-lmf.lib', @@ -963,6 +964,7 @@ 'VCLinkerTool': { 'DelayLoadDLLs': [ 'd3d9.dll', + 'd3d11.dll', 'dxva2.dll', 'mf.dll', 'mfplat.dll', |