diff options
-rw-r--r-- | media/mf/mft_h264_decoder.cc | 154 | ||||
-rw-r--r-- | media/mf/mft_h264_decoder.h | 5 | ||||
-rw-r--r-- | media/mf/mft_h264_decoder_example.cc | 137 | ||||
-rw-r--r-- | media/mf/test/mft_h264_decoder_unittest.cc | 63 |
4 files changed, 212 insertions, 147 deletions
diff --git a/media/mf/mft_h264_decoder.cc b/media/mf/mft_h264_decoder.cc index 68a42bb..6d85f70 100644 --- a/media/mf/mft_h264_decoder.cc +++ b/media/mf/mft_h264_decoder.cc @@ -2,10 +2,6 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -#include "build/build_config.h" // For OS_WIN. - -#if defined(OS_WIN) - #include "media/mf/mft_h264_decoder.h" #include <d3d9.h> @@ -25,6 +21,8 @@ #pragma comment(lib, "mf.lib") #pragma comment(lib, "mfplat.lib") +using base::TimeDelta; + namespace { // Creates an empty Media Foundation sample with no buffers. @@ -131,18 +129,32 @@ static IMFSample* CreateInputSample(const uint8* stream, int size, return sample.Detach(); } +const GUID ConvertVideoFrameFormatToGuid(media::VideoFrame::Format format) { + switch (format) { + case media::VideoFrame::NV12: + return MFVideoFormat_NV12; + case media::VideoFrame::YV12: + return MFVideoFormat_YV12; + default: + NOTREACHED() << "Unsupported VideoFrame format"; + return GUID_NULL; + } + NOTREACHED(); + return GUID_NULL; +} + } // namespace namespace media { // public methods -MftH264Decoder::MftH264Decoder(bool use_dxva) +MftH264Decoder::MftH264Decoder(bool use_dxva, HWND draw_window) : use_dxva_(use_dxva), d3d9_(NULL), device_(NULL), device_manager_(NULL), - device_window_(NULL), + draw_window_(draw_window), decoder_(NULL), input_stream_info_(), output_stream_info_(), @@ -177,8 +189,8 @@ void MftH264Decoder::Initialize( // TODO(jiesun): Actually it is more likely an NV12 D3DSuface9. // Until we had hardware composition working. if (use_dxva_) { - info_.stream_info.surface_format = VideoFrame::YV12; - info_.stream_info.surface_type = VideoFrame::TYPE_SYSTEM_MEMORY; + info_.stream_info.surface_format = VideoFrame::NV12; + info_.stream_info.surface_type = VideoFrame::TYPE_D3D_TEXTURE; } else { info_.stream_info.surface_format = VideoFrame::YV12; info_.stream_info.surface_type = VideoFrame::TYPE_SYSTEM_MEMORY; @@ -206,8 +218,6 @@ void MftH264Decoder::Uninitialize() { // Cannot shutdown COM libraries here because the COM objects still needs // to be Release()'ed. We can explicitly release them here, or move the // uninitialize to GpuVideoService... - if (device_window_) - DestroyWindow(device_window_); decoder_.Release(); device_manager_.Release(); device_.Release(); @@ -319,31 +329,20 @@ void MftH264Decoder::ShutdownComLibraries() { } bool MftH264Decoder::CreateD3DDevManager() { + CHECK(draw_window_); d3d9_.Attach(Direct3DCreate9(D3D_SDK_VERSION)); if (d3d9_.get() == NULL) { LOG(ERROR) << "Failed to create D3D9"; return false; } - static const TCHAR kWindowName[] = TEXT("MFT Decoder Hidden Window"); - static const TCHAR kClassName[] = TEXT("STATIC"); - device_window_ = CreateWindowEx(WS_EX_NOACTIVATE, - kClassName, - kWindowName, - WS_DISABLED | WS_POPUP, - 0, 0, 1, 1, - HWND_MESSAGE, - NULL, - GetModuleHandle(NULL), - NULL); - CHECK(device_window_); D3DPRESENT_PARAMETERS present_params = {0}; - present_params.BackBufferWidth = 1; - present_params.BackBufferHeight = 1; + present_params.BackBufferWidth = 0; + present_params.BackBufferHeight = 0; present_params.BackBufferFormat = D3DFMT_UNKNOWN; present_params.BackBufferCount = 1; present_params.SwapEffect = D3DSWAPEFFECT_DISCARD; - present_params.hDeviceWindow = device_window_; + present_params.hDeviceWindow = draw_window_; present_params.Windowed = TRUE; present_params.Flags = D3DPRESENTFLAG_VIDEO; present_params.FullScreen_RefreshRateInHz = 0; @@ -353,7 +352,7 @@ bool MftH264Decoder::CreateD3DDevManager() { // (Is it even needed for just video decoding?) HRESULT hr = d3d9_->CreateDevice(D3DADAPTER_DEFAULT, D3DDEVTYPE_HAL, - device_window_, + draw_window_, (D3DCREATE_HARDWARE_VERTEXPROCESSING | D3DCREATE_MULTITHREADED), &present_params, @@ -412,7 +411,7 @@ bool MftH264Decoder::InitDecoder() { MFT_MESSAGE_SET_D3D_MANAGER, reinterpret_cast<ULONG_PTR>(device_manager_.get())); if (FAILED(hr)) { - LOG(ERROR) << "Failed to set D3D9 device to decoder"; + LOG(ERROR) << "Failed to set D3D9 device to decoder " << std::hex << hr; return false; } } @@ -443,8 +442,8 @@ bool MftH264Decoder::CheckDecoderDxvaSupport() { bool MftH264Decoder::SetDecoderMediaTypes() { if (!SetDecoderInputMediaType()) return false; - return SetDecoderOutputMediaType(use_dxva_ ? MFVideoFormat_NV12 - : MFVideoFormat_YV12); + return SetDecoderOutputMediaType(ConvertVideoFrameFormatToGuid( + info_.stream_info.surface_format)); } bool MftH264Decoder::SetDecoderInputMediaType() { @@ -592,8 +591,8 @@ bool MftH264Decoder::DoDecode() { if (FAILED(hr)) { if (hr == MF_E_TRANSFORM_STREAM_CHANGE) { - hr = SetDecoderOutputMediaType(use_dxva_ ? MFVideoFormat_NV12 - : MFVideoFormat_YV12); + hr = SetDecoderOutputMediaType(ConvertVideoFrameFormatToGuid( + info_.stream_info.surface_format)); if (SUCCEEDED(hr)) { event_handler_->OnFormatChange(info_.stream_info); return true; @@ -656,74 +655,51 @@ bool MftH264Decoder::DoDecode() { return true; } - VideoFrame::CreateFrame(info_.stream_info.surface_format, - info_.stream_info.surface_width, - info_.stream_info.surface_height, - base::TimeDelta::FromMicroseconds(timestamp), - base::TimeDelta::FromMicroseconds(duration), - &frame); - if (!frame.get()) { - LOG(ERROR) << "Failed to allocate video frame"; - event_handler_->OnError(); - return true; - } + if (use_dxva_) { - // temporary until we figure out how to send a D3D9 surface handle. ScopedComPtr<IDirect3DSurface9> surface; hr = MFGetService(output_buffer, MR_BUFFER_SERVICE, IID_PPV_ARGS(surface.Receive())); - if (FAILED(hr)) - return true; - - // TODO(imcheng): - // This is causing some problems (LockRect does not work always). - // We won't need this when we figure out how to use the d3d - // surface directly. - // NV12 to YV12 - D3DLOCKED_RECT d3dlocked_rect; - hr = surface->LockRect(&d3dlocked_rect, NULL, D3DLOCK_READONLY); if (FAILED(hr)) { - LOG(ERROR) << "LockRect"; - return true; - } - D3DSURFACE_DESC desc; - hr = surface->GetDesc(&desc); - if (FAILED(hr)) { - LOG(ERROR) << "GetDesc"; - CHECK(SUCCEEDED(surface->UnlockRect())); + LOG(ERROR) << "Failed to get surface from buffer"; return true; } - uint32 src_stride = d3dlocked_rect.Pitch; - uint32 dst_stride = config_.width; - uint8* src_y = static_cast<uint8*>(d3dlocked_rect.pBits); - uint8* src_uv = src_y + src_stride * desc.Height; - uint8* dst_y = static_cast<uint8*>(frame->data(VideoFrame::kYPlane)); - uint8* dst_u = static_cast<uint8*>(frame->data(VideoFrame::kVPlane)); - uint8* dst_v = static_cast<uint8*>(frame->data(VideoFrame::kUPlane)); - - for (int y = 0; y < config_.height; ++y) { - for (int x = 0; x < config_.width; ++x) { - dst_y[x] = src_y[x]; - if (!(y & 1)) { - if (x & 1) - dst_v[x>>1] = src_uv[x]; - else - dst_u[x>>1] = src_uv[x]; - } - } - dst_y += dst_stride; - src_y += src_stride; - if (!(y & 1)) { - src_uv += src_stride; - dst_v += dst_stride >> 1; - dst_u += dst_stride >> 1; - } - } - CHECK(SUCCEEDED(surface->UnlockRect())); + // No distinction between the 3 planes - all 3 point to the handle of + // the texture. (There are actually only 2 planes since the output + // D3D surface is in NV12 format.) + VideoFrame::D3dTexture textures[VideoFrame::kMaxPlanes] = { surface.get(), + surface.get(), + surface.get() }; + VideoFrame::CreateFrameD3dTexture(info_.stream_info.surface_format, + info_.stream_info.surface_width, + info_.stream_info.surface_height, + textures, + TimeDelta::FromMicroseconds(timestamp), + TimeDelta::FromMicroseconds(duration), + &frame); + if (!frame.get()) { + LOG(ERROR) << "Failed to allocate video frame for d3d texture"; + event_handler_->OnError(); + return true; + } + + // The reference is now in the VideoFrame. + surface.Detach(); } else { // Not DXVA. + VideoFrame::CreateFrame(info_.stream_info.surface_format, + info_.stream_info.surface_width, + info_.stream_info.surface_height, + TimeDelta::FromMicroseconds(timestamp), + TimeDelta::FromMicroseconds(duration), + &frame); + if (!frame.get()) { + LOG(ERROR) << "Failed to allocate video frame for yuv plane"; + event_handler_->OnError(); + return true; + } uint8* src_y; DWORD max_length, current_length; HRESULT hr = output_buffer->Lock(&src_y, &max_length, ¤t_length); @@ -740,5 +716,3 @@ bool MftH264Decoder::DoDecode() { } } // namespace media - -#endif // defined(OS_WIN) diff --git a/media/mf/mft_h264_decoder.h b/media/mf/mft_h264_decoder.h index ede63cb..61e3c65 100644 --- a/media/mf/mft_h264_decoder.h +++ b/media/mf/mft_h264_decoder.h @@ -33,7 +33,7 @@ class MftH264Decoder : public media::VideoDecodeEngine { kStopped, // upon output EOS received. } State; - explicit MftH264Decoder(bool use_dxva); + explicit MftH264Decoder(bool use_dxva, HWND draw_window); ~MftH264Decoder(); virtual void Initialize(MessageLoop* message_loop, media::VideoDecodeEngine::EventHandler* event_handler, @@ -45,6 +45,7 @@ class MftH264Decoder : public media::VideoDecodeEngine { virtual void ProduceVideoFrame(scoped_refptr<VideoFrame> frame); bool use_dxva() const { return use_dxva_; } + IDirect3DDevice9* device() const { return device_.get(); } State state() const { return state_; } private: @@ -73,7 +74,7 @@ class MftH264Decoder : public media::VideoDecodeEngine { ScopedComPtr<IDirect3D9> d3d9_; ScopedComPtr<IDirect3DDevice9> device_; ScopedComPtr<IDirect3DDeviceManager9> device_manager_; - HWND device_window_; + HWND draw_window_; ScopedComPtr<IMFTransform> decoder_; MFT_INPUT_STREAM_INFO input_stream_info_; diff --git a/media/mf/mft_h264_decoder_example.cc b/media/mf/mft_h264_decoder_example.cc index e1cc790..788a2ca 100644 --- a/media/mf/mft_h264_decoder_example.cc +++ b/media/mf/mft_h264_decoder_example.cc @@ -77,10 +77,8 @@ static HWND CreateDrawWindow(int width, int height) { window_class.lpfnWndProc = DefWindowProc; window_class.hCursor = 0; - if (RegisterClass(&window_class) == 0) { - LOG(ERROR) << "Failed to register window class"; - return false; - } + RegisterClass(&window_class); + HWND window = CreateWindow(kWindowClass, kWindowTitle, kWindowStyleFlags, @@ -197,6 +195,81 @@ class RenderToWindowHandler : public MftH264DecoderHandler { has_output_(false) { } virtual ~RenderToWindowHandler() {} + bool RenderSoftwareFrame(scoped_refptr<VideoFrame> frame) { + int width = frame->width(); + int height = frame->height(); + + // Assume height does not change. + static uint8* rgb_frame = new uint8[height * frame->stride(0) * 4]; + uint8* frame_y = static_cast<uint8*>(frame->data(VideoFrame::kYPlane)); + uint8* frame_u = static_cast<uint8*>(frame->data(VideoFrame::kUPlane)); + uint8* frame_v = static_cast<uint8*>(frame->data(VideoFrame::kVPlane)); + media::ConvertYUVToRGB32(frame_y, frame_v, frame_u, rgb_frame, + width, height, + frame->stride(0), frame->stride(1), + 4 * frame->stride(0), media::YV12); + PAINTSTRUCT ps; + InvalidateRect(window_, NULL, TRUE); + HDC hdc = BeginPaint(window_, &ps); + BITMAPINFOHEADER hdr; + hdr.biSize = sizeof(BITMAPINFOHEADER); + hdr.biWidth = width; + hdr.biHeight = -height; // minus means top-down bitmap + hdr.biPlanes = 1; + hdr.biBitCount = 32; + hdr.biCompression = BI_RGB; // no compression + hdr.biSizeImage = 0; + hdr.biXPelsPerMeter = 1; + hdr.biYPelsPerMeter = 1; + hdr.biClrUsed = 0; + hdr.biClrImportant = 0; + int rv = StretchDIBits(hdc, 0, 0, width, height, 0, 0, width, height, + rgb_frame, reinterpret_cast<BITMAPINFO*>(&hdr), + DIB_RGB_COLORS, SRCCOPY); + EndPaint(window_, &ps); + return rv != 0; + } + bool RenderD3dSurface(scoped_refptr<VideoFrame> frame) { + ScopedComPtr<IDirect3DSurface9> surface; + IDirect3DDevice9* device = decoder_->device(); + surface.Attach(static_cast<IDirect3DSurface9*>(frame->d3d_texture(0))); + HRESULT hr; + hr = device->Clear(0, NULL, D3DCLEAR_TARGET, D3DCOLOR_XRGB(0, 0, 0), + 1.0f, 0); + if (FAILED(hr)) { + LOG(ERROR) << "Device->Clear() failed"; + return false; + } + ScopedComPtr<IDirect3DSurface9> backbuffer; + hr = device->GetBackBuffer(0, 0, D3DBACKBUFFER_TYPE_MONO, + backbuffer.Receive()); + if (FAILED(hr)) { + LOG(ERROR) << "Device->GetBackBuffer() failed"; + return false; + } + hr = device->StretchRect(surface.get(), NULL, backbuffer.get(), NULL, + D3DTEXF_NONE); + if (FAILED(hr)) { + LOG(ERROR) << "Device->StretchRect() failed"; + return false; + } + hr = device->Present(NULL, NULL, NULL, NULL); + if (FAILED(hr)) { + if (hr == E_FAIL) { + LOG(WARNING) << "Present() returned E_FAIL"; + } else { + static int frames_dropped = 0; + LOG(ERROR) << "Device->Present() failed " + << std::hex << std::showbase << hr; + if (++frames_dropped == 10) { + LOG(ERROR) << "Dropped too many frames, quitting"; + MessageLoopForUI::current()->QuitNow(); + return false; + } + } + } + return true; + } virtual void ConsumeVideoFrame(scoped_refptr<VideoFrame> frame) { has_output_ = true; if (frame.get()) { @@ -206,40 +279,14 @@ class RenderToWindowHandler : public MftH264DecoderHandler { FROM_HERE, NewRunnableMethod(this, &RenderToWindowHandler::DecodeSingleFrame), frame->GetDuration().InMilliseconds()); - - int width = frame->width(); - int height = frame->height(); - - // Assume height does not change. - static uint8* rgb_frame = new uint8[height * frame->stride(0) * 4]; - uint8* frame_y = static_cast<uint8*>(frame->data(VideoFrame::kYPlane)); - uint8* frame_u = static_cast<uint8*>(frame->data(VideoFrame::kUPlane)); - uint8* frame_v = static_cast<uint8*>(frame->data(VideoFrame::kVPlane)); - media::ConvertYUVToRGB32(frame_y, frame_v, frame_u, rgb_frame, - width, height, - frame->stride(0), frame->stride(1), - 4 * frame->stride(0), media::YV12); - PAINTSTRUCT ps; - InvalidateRect(window_, NULL, TRUE); - HDC hdc = BeginPaint(window_, &ps); - BITMAPINFOHEADER hdr; - hdr.biSize = sizeof(BITMAPINFOHEADER); - hdr.biWidth = width; - hdr.biHeight = -height; // minus means top-down bitmap - hdr.biPlanes = 1; - hdr.biBitCount = 32; - hdr.biCompression = BI_RGB; // no compression - hdr.biSizeImage = 0; - hdr.biXPelsPerMeter = 1; - hdr.biYPelsPerMeter = 1; - hdr.biClrUsed = 0; - hdr.biClrImportant = 0; - int rv = StretchDIBits(hdc, 0, 0, width, height, 0, 0, width, height, - rgb_frame, reinterpret_cast<BITMAPINFO*>(&hdr), - DIB_RGB_COLORS, SRCCOPY); - EndPaint(window_, &ps); - if (!rv) { - LOG(ERROR) << "StretchDIBits failed"; + bool success; + if (decoder_->use_dxva()) { + success = RenderD3dSurface(frame); + } else { + success = RenderSoftwareFrame(frame); + } + if (!success) { + LOG(ERROR) << "Render failed"; loop_->QuitNow(); } } else { // if frame is type EMPTY, there will be no more frames. @@ -287,17 +334,19 @@ static int Run(bool use_dxva, bool render, const std::string& input_file) { config.width = width; config.height = height; HWND window = NULL; - if (render) { + if (use_dxva || render) { window = CreateDrawWindow(width, height); + if (!render) + ShowWindow(window, SW_HIDE); if (window == NULL) { LOG(ERROR) << "Failed to create window"; return -1; } } - scoped_ptr<MftH264Decoder> mft(new MftH264Decoder(use_dxva)); + scoped_ptr<MftH264Decoder> mft(new MftH264Decoder(use_dxva, window)); if (!mft.get()) { - LOG(ERROR) << "Failed to create fake MFT"; + LOG(ERROR) << "Failed to create MFT"; return -1; } @@ -309,11 +358,15 @@ static int Run(bool use_dxva, bool render, const std::string& input_file) { handler->SetDecoder(mft.get()); handler->SetReader(reader.get()); if (!handler.get()) { - LOG(ERROR) << "FAiled to create handler"; + LOG(ERROR) << "Failed to create handler"; return -1; } mft->Initialize(MessageLoop::current(), handler.get(), config); + if (!handler->info_.success) { + LOG(ERROR) << "Failed to initialize decoder"; + return -1; + } scoped_ptr<WindowObserver> observer; if (render) { observer.reset(new WindowObserver(reader.get(), mft.get())); diff --git a/media/mf/test/mft_h264_decoder_unittest.cc b/media/mf/test/mft_h264_decoder_unittest.cc index 562e7e4..67dc07c 100644 --- a/media/mf/test/mft_h264_decoder_unittest.cc +++ b/media/mf/test/mft_h264_decoder_unittest.cc @@ -24,6 +24,37 @@ namespace media { static const int kDecoderMaxWidth = 1920; static const int kDecoderMaxHeight = 1088; +static HWND CreateDrawWindow(int width, int height) { + static const wchar_t kClassName[] = L"Test"; + static const wchar_t kWindowTitle[] = L"MFT Unittest Draw Window"; + WNDCLASS window_class = {0}; + window_class.lpszClassName = kClassName; + window_class.hInstance = NULL; + window_class.hbrBackground = 0; + window_class.lpfnWndProc = DefWindowProc; + window_class.hCursor = 0; + + RegisterClass(&window_class); + + HWND window = CreateWindow(kClassName, + kWindowTitle, + (WS_OVERLAPPEDWINDOW | WS_VISIBLE) & + ~(WS_MAXIMIZEBOX | WS_THICKFRAME), + 100, + 100, + width, + height, + NULL, + NULL, + NULL, + NULL); + if (window == NULL) { + LOG(ERROR) << "Failed to create window"; + return NULL; + } + return window; +} + class BaseMftReader : public base::RefCountedThreadSafe<BaseMftReader> { public: virtual ~BaseMftReader() {} @@ -165,7 +196,7 @@ TEST_F(MftH264DecoderTest, LibraryInit) { } TEST_F(MftH264DecoderTest, DecoderUninitializedAtFirst) { - scoped_ptr<MftH264Decoder> decoder(new MftH264Decoder(true)); + scoped_ptr<MftH264Decoder> decoder(new MftH264Decoder(true, NULL)); ASSERT_TRUE(decoder.get()); EXPECT_EQ(MftH264Decoder::kUninitialized, decoder->state()); } @@ -174,7 +205,7 @@ TEST_F(MftH264DecoderTest, DecoderInitMissingArgs) { VideoCodecConfig config; config.width = 800; config.height = 600; - scoped_ptr<MftH264Decoder> decoder(new MftH264Decoder(false)); + scoped_ptr<MftH264Decoder> decoder(new MftH264Decoder(false, NULL)); ASSERT_TRUE(decoder.get()); decoder->Initialize(NULL, NULL, config); EXPECT_EQ(MftH264Decoder::kUninitialized, decoder->state()); @@ -186,7 +217,7 @@ TEST_F(MftH264DecoderTest, DecoderInitNoDxva) { VideoCodecConfig config; config.width = 800; config.height = 600; - scoped_ptr<MftH264Decoder> decoder(new MftH264Decoder(false)); + scoped_ptr<MftH264Decoder> decoder(new MftH264Decoder(false, NULL)); ASSERT_TRUE(decoder.get()); decoder->Initialize(&loop, &handler, config); EXPECT_EQ(1, handler.init_count_); @@ -200,12 +231,15 @@ TEST_F(MftH264DecoderTest, DecoderInitDxva) { VideoCodecConfig config; config.width = 800; config.height = 600; - scoped_ptr<MftH264Decoder> decoder(new MftH264Decoder(true)); + HWND hwnd = CreateDrawWindow(config.width, config.height); + ASSERT_TRUE(hwnd); + scoped_ptr<MftH264Decoder> decoder(new MftH264Decoder(true, hwnd)); ASSERT_TRUE(decoder.get()); decoder->Initialize(&loop, &handler, config); EXPECT_EQ(1, handler.init_count_); EXPECT_EQ(MftH264Decoder::kNormal, decoder->state()); decoder->Uninitialize(); + DestroyWindow(hwnd); } TEST_F(MftH264DecoderTest, DecoderUninit) { @@ -214,7 +248,7 @@ TEST_F(MftH264DecoderTest, DecoderUninit) { VideoCodecConfig config; config.width = 800; config.height = 600; - scoped_ptr<MftH264Decoder> decoder(new MftH264Decoder(false)); + scoped_ptr<MftH264Decoder> decoder(new MftH264Decoder(false, NULL)); ASSERT_TRUE(decoder.get()); decoder->Initialize(&loop, &handler, config); EXPECT_EQ(MftH264Decoder::kNormal, decoder->state()); @@ -229,7 +263,7 @@ TEST_F(MftH264DecoderTest, UninitBeforeInit) { VideoCodecConfig config; config.width = 800; config.height = 600; - scoped_ptr<MftH264Decoder> decoder(new MftH264Decoder(false)); + scoped_ptr<MftH264Decoder> decoder(new MftH264Decoder(false, NULL)); ASSERT_TRUE(decoder.get()); decoder->Uninitialize(); EXPECT_EQ(0, handler.uninit_count_); @@ -241,7 +275,7 @@ TEST_F(MftH264DecoderTest, InitWithNegativeDimensions) { VideoCodecConfig config; config.width = -123; config.height = -456; - scoped_ptr<MftH264Decoder> decoder(new MftH264Decoder(false)); + scoped_ptr<MftH264Decoder> decoder(new MftH264Decoder(false, NULL)); ASSERT_TRUE(decoder.get()); decoder->Initialize(&loop, &handler, config); EXPECT_EQ(MftH264Decoder::kNormal, decoder->state()); @@ -256,7 +290,7 @@ TEST_F(MftH264DecoderTest, InitWithTooHighDimensions) { VideoCodecConfig config; config.width = kDecoderMaxWidth + 1; config.height = kDecoderMaxHeight + 1; - scoped_ptr<MftH264Decoder> decoder(new MftH264Decoder(false)); + scoped_ptr<MftH264Decoder> decoder(new MftH264Decoder(false, NULL)); ASSERT_TRUE(decoder.get()); decoder->Initialize(&loop, &handler, config); EXPECT_EQ(MftH264Decoder::kNormal, decoder->state()); @@ -271,7 +305,7 @@ TEST_F(MftH264DecoderTest, DrainOnEmptyBuffer) { VideoCodecConfig config; config.width = 1024; config.height = 768; - scoped_ptr<MftH264Decoder> decoder(new MftH264Decoder(false)); + scoped_ptr<MftH264Decoder> decoder(new MftH264Decoder(false, NULL)); ASSERT_TRUE(decoder.get()); decoder->Initialize(&loop, &handler, config); EXPECT_EQ(MftH264Decoder::kNormal, decoder->state()); @@ -300,7 +334,7 @@ TEST_F(MftH264DecoderTest, NoOutputOnGarbageInput) { VideoCodecConfig config; config.width = 1024; config.height = 768; - scoped_ptr<MftH264Decoder> decoder(new MftH264Decoder(false)); + scoped_ptr<MftH264Decoder> decoder(new MftH264Decoder(false, NULL)); ASSERT_TRUE(decoder.get()); decoder->Initialize(&loop, &handler, config); EXPECT_EQ(MftH264Decoder::kNormal, decoder->state()); @@ -328,7 +362,7 @@ TEST_F(MftH264DecoderTest, FlushAtStart) { VideoCodecConfig config; config.width = 1024; config.height = 768; - scoped_ptr<MftH264Decoder> decoder(new MftH264Decoder(false)); + scoped_ptr<MftH264Decoder> decoder(new MftH264Decoder(false, NULL)); ASSERT_TRUE(decoder.get()); decoder->Initialize(&loop, &handler, config); EXPECT_EQ(MftH264Decoder::kNormal, decoder->state()); @@ -348,7 +382,7 @@ TEST_F(MftH264DecoderTest, NoFlushAtStopped) { VideoCodecConfig config; config.width = 1024; config.height = 768; - scoped_ptr<MftH264Decoder> decoder(new MftH264Decoder(false)); + scoped_ptr<MftH264Decoder> decoder(new MftH264Decoder(false, NULL)); ASSERT_TRUE(decoder.get()); decoder->Initialize(&loop, &handler, config); EXPECT_EQ(MftH264Decoder::kNormal, decoder->state()); @@ -391,7 +425,9 @@ void DecodeValidVideo(const std::string& filename, int num_frames, bool dxva) { VideoCodecConfig config; config.width = 1; config.height = 1; - scoped_ptr<MftH264Decoder> decoder(new MftH264Decoder(dxva)); + HWND hwnd = CreateDrawWindow(config.width, config.height); + ASSERT_TRUE(hwnd); + scoped_ptr<MftH264Decoder> decoder(new MftH264Decoder(dxva, hwnd)); ASSERT_TRUE(decoder.get()); decoder->Initialize(&loop, &handler, config); EXPECT_EQ(MftH264Decoder::kNormal, decoder->state()); @@ -410,6 +446,7 @@ void DecodeValidVideo(const std::string& filename, int num_frames, bool dxva) { EXPECT_GE(handler.empty_buffer_callback_count_, num_frames); EXPECT_EQ(num_frames, handler.fill_buffer_callback_count_ - 1); decoder->Uninitialize(); + DestroyWindow(hwnd); } TEST_F(MftH264DecoderTest, DecodeValidVideoDxva) { |