summaryrefslogtreecommitdiff
path: root/dom/media/platforms/wmf
diff options
context:
space:
mode:
authorMatt A. Tobin <mattatobin@localhost.localdomain>2018-02-02 04:16:08 -0500
committerMatt A. Tobin <mattatobin@localhost.localdomain>2018-02-02 04:16:08 -0500
commit5f8de423f190bbb79a62f804151bc24824fa32d8 (patch)
tree10027f336435511475e392454359edea8e25895d /dom/media/platforms/wmf
parent49ee0794b5d912db1f95dce6eb52d781dc210db5 (diff)
downloaduxp-5f8de423f190bbb79a62f804151bc24824fa32d8.tar.gz
Add m-esr52 at 52.6.0
Diffstat (limited to 'dom/media/platforms/wmf')
-rw-r--r--dom/media/platforms/wmf/DXVA2Manager.cpp955
-rw-r--r--dom/media/platforms/wmf/DXVA2Manager.h56
-rw-r--r--dom/media/platforms/wmf/MFTDecoder.cpp305
-rw-r--r--dom/media/platforms/wmf/MFTDecoder.h111
-rw-r--r--dom/media/platforms/wmf/WMF.h104
-rw-r--r--dom/media/platforms/wmf/WMFAudioMFTManager.cpp358
-rw-r--r--dom/media/platforms/wmf/WMFAudioMFTManager.h78
-rw-r--r--dom/media/platforms/wmf/WMFDecoderModule.cpp257
-rw-r--r--dom/media/platforms/wmf/WMFDecoderModule.h56
-rw-r--r--dom/media/platforms/wmf/WMFMediaDataDecoder.cpp227
-rw-r--r--dom/media/platforms/wmf/WMFMediaDataDecoder.h147
-rw-r--r--dom/media/platforms/wmf/WMFUtils.cpp311
-rw-r--r--dom/media/platforms/wmf/WMFUtils.h67
-rw-r--r--dom/media/platforms/wmf/WMFVideoMFTManager.cpp1016
-rw-r--r--dom/media/platforms/wmf/WMFVideoMFTManager.h125
-rw-r--r--dom/media/platforms/wmf/moz.build34
16 files changed, 4207 insertions, 0 deletions
diff --git a/dom/media/platforms/wmf/DXVA2Manager.cpp b/dom/media/platforms/wmf/DXVA2Manager.cpp
new file mode 100644
index 0000000000..9fdb0fa200
--- /dev/null
+++ b/dom/media/platforms/wmf/DXVA2Manager.cpp
@@ -0,0 +1,955 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "DXVA2Manager.h"
+#include <d3d11.h>
+#include "nsThreadUtils.h"
+#include "ImageContainer.h"
+#include "gfxWindowsPlatform.h"
+#include "D3D9SurfaceImage.h"
+#include "mozilla/gfx/DeviceManagerDx.h"
+#include "mozilla/layers/D3D11ShareHandleImage.h"
+#include "mozilla/layers/ImageBridgeChild.h"
+#include "mozilla/layers/TextureForwarder.h"
+#include "mozilla/Telemetry.h"
+#include "MediaTelemetryConstants.h"
+#include "mfapi.h"
+#include "gfxPrefs.h"
+#include "MFTDecoder.h"
+#include "DriverCrashGuard.h"
+#include "nsPrintfCString.h"
+#include "gfxCrashReporterUtils.h"
+#include "VideoUtils.h"
+
+const CLSID CLSID_VideoProcessorMFT =
+{
+ 0x88753b26,
+ 0x5b24,
+ 0x49bd,
+ { 0xb2, 0xe7, 0xc, 0x44, 0x5c, 0x78, 0xc9, 0x82 }
+};
+
+const GUID MF_XVP_PLAYBACK_MODE =
+{
+ 0x3c5d293f,
+ 0xad67,
+ 0x4e29,
+ { 0xaf, 0x12, 0xcf, 0x3e, 0x23, 0x8a, 0xcc, 0xe9 }
+};
+
+DEFINE_GUID(MF_LOW_LATENCY,
+ 0x9c27891a, 0xed7a, 0x40e1, 0x88, 0xe8, 0xb2, 0x27, 0x27, 0xa0, 0x24, 0xee);
+
+// R600, R700, Evergreen and Cayman AMD cards. These support DXVA via UVD3 or earlier, and don't
+// handle 1080p60 well.
+static const DWORD sAMDPreUVD4[] = {
+ 0x9400, 0x9401, 0x9402, 0x9403, 0x9405, 0x940a, 0x940b, 0x940f, 0x94c0, 0x94c1, 0x94c3, 0x94c4, 0x94c5,
+ 0x94c6, 0x94c7, 0x94c8, 0x94c9, 0x94cb, 0x94cc, 0x94cd, 0x9580, 0x9581, 0x9583, 0x9586, 0x9587, 0x9588,
+ 0x9589, 0x958a, 0x958b, 0x958c, 0x958d, 0x958e, 0x958f, 0x9500, 0x9501, 0x9504, 0x9505, 0x9506, 0x9507,
+ 0x9508, 0x9509, 0x950f, 0x9511, 0x9515, 0x9517, 0x9519, 0x95c0, 0x95c2, 0x95c4, 0x95c5, 0x95c6, 0x95c7,
+ 0x95c9, 0x95cc, 0x95cd, 0x95ce, 0x95cf, 0x9590, 0x9591, 0x9593, 0x9595, 0x9596, 0x9597, 0x9598, 0x9599,
+ 0x959b, 0x9610, 0x9611, 0x9612, 0x9613, 0x9614, 0x9615, 0x9616, 0x9710, 0x9711, 0x9712, 0x9713, 0x9714,
+ 0x9715, 0x9440, 0x9441, 0x9442, 0x9443, 0x9444, 0x9446, 0x944a, 0x944b, 0x944c, 0x944e, 0x9450, 0x9452,
+ 0x9456, 0x945a, 0x945b, 0x945e, 0x9460, 0x9462, 0x946a, 0x946b, 0x947a, 0x947b, 0x9480, 0x9487, 0x9488,
+ 0x9489, 0x948a, 0x948f, 0x9490, 0x9491, 0x9495, 0x9498, 0x949c, 0x949e, 0x949f, 0x9540, 0x9541, 0x9542,
+ 0x954e, 0x954f, 0x9552, 0x9553, 0x9555, 0x9557, 0x955f, 0x94a0, 0x94a1, 0x94a3, 0x94b1, 0x94b3, 0x94b4,
+ 0x94b5, 0x94b9, 0x68e0, 0x68e1, 0x68e4, 0x68e5, 0x68e8, 0x68e9, 0x68f1, 0x68f2, 0x68f8, 0x68f9, 0x68fa,
+ 0x68fe, 0x68c0, 0x68c1, 0x68c7, 0x68c8, 0x68c9, 0x68d8, 0x68d9, 0x68da, 0x68de, 0x68a0, 0x68a1, 0x68a8,
+ 0x68a9, 0x68b0, 0x68b8, 0x68b9, 0x68ba, 0x68be, 0x68bf, 0x6880, 0x6888, 0x6889, 0x688a, 0x688c, 0x688d,
+ 0x6898, 0x6899, 0x689b, 0x689e, 0x689c, 0x689d, 0x9802, 0x9803, 0x9804, 0x9805, 0x9806, 0x9807, 0x9808,
+ 0x9809, 0x980a, 0x9640, 0x9641, 0x9647, 0x9648, 0x964a, 0x964b, 0x964c, 0x964e, 0x964f, 0x9642, 0x9643,
+ 0x9644, 0x9645, 0x9649, 0x6720, 0x6721, 0x6722, 0x6723, 0x6724, 0x6725, 0x6726, 0x6727, 0x6728, 0x6729,
+ 0x6738, 0x6739, 0x673e, 0x6740, 0x6741, 0x6742, 0x6743, 0x6744, 0x6745, 0x6746, 0x6747, 0x6748, 0x6749,
+ 0x674a, 0x6750, 0x6751, 0x6758, 0x6759, 0x675b, 0x675d, 0x675f, 0x6840, 0x6841, 0x6842, 0x6843, 0x6849,
+ 0x6850, 0x6858, 0x6859, 0x6760, 0x6761, 0x6762, 0x6763, 0x6764, 0x6765, 0x6766, 0x6767, 0x6768, 0x6770,
+ 0x6771, 0x6772, 0x6778, 0x6779, 0x677b, 0x6700, 0x6701, 0x6702, 0x6703, 0x6704, 0x6705, 0x6706, 0x6707,
+ 0x6708, 0x6709, 0x6718, 0x6719, 0x671c, 0x671d, 0x671f, 0x9900, 0x9901, 0x9903, 0x9904, 0x9905, 0x9906,
+ 0x9907, 0x9908, 0x9909, 0x990a, 0x990b, 0x990c, 0x990d, 0x990e, 0x990f, 0x9910, 0x9913, 0x9917, 0x9918,
+ 0x9919, 0x9990, 0x9991, 0x9992, 0x9993, 0x9994, 0x9995, 0x9996, 0x9997, 0x9998, 0x9999, 0x999a, 0x999b,
+ 0x999c, 0x999d, 0x99a0, 0x99a2, 0x99a4
+};
+
+// The size we use for our synchronization surface.
+// 16x16 is the size recommended by Microsoft (in the D3D9ExDXGISharedSurf sample) that works
+// best to avoid driver bugs.
+static const uint32_t kSyncSurfaceSize = 16;
+
+namespace mozilla {
+
+using layers::Image;
+using layers::ImageContainer;
+using layers::D3D9SurfaceImage;
+using layers::D3D9RecycleAllocator;
+using layers::D3D11ShareHandleImage;
+using layers::D3D11RecycleAllocator;
+
+class D3D9DXVA2Manager : public DXVA2Manager
+{
+public:
+ D3D9DXVA2Manager();
+ virtual ~D3D9DXVA2Manager();
+
+ HRESULT Init(layers::KnowsCompositor* aKnowsCompositor,
+ nsACString& aFailureReason);
+
+ IUnknown* GetDXVADeviceManager() override;
+
+ // Copies a region (aRegion) of the video frame stored in aVideoSample
+ // into an image which is returned by aOutImage.
+ HRESULT CopyToImage(IMFSample* aVideoSample,
+ const nsIntRect& aRegion,
+ Image** aOutImage) override;
+
+ bool SupportsConfig(IMFMediaType* aType, float aFramerate) override;
+
+private:
+ RefPtr<IDirect3D9Ex> mD3D9;
+ RefPtr<IDirect3DDevice9Ex> mDevice;
+ RefPtr<IDirect3DDeviceManager9> mDeviceManager;
+ RefPtr<D3D9RecycleAllocator> mTextureClientAllocator;
+ RefPtr<IDirectXVideoDecoderService> mDecoderService;
+ RefPtr<IDirect3DSurface9> mSyncSurface;
+ GUID mDecoderGUID;
+ UINT32 mResetToken;
+ bool mFirstFrame;
+ bool mIsAMDPreUVD4;
+};
+
+void GetDXVA2ExtendedFormatFromMFMediaType(IMFMediaType *pType,
+ DXVA2_ExtendedFormat *pFormat)
+{
+ // Get the interlace mode.
+ MFVideoInterlaceMode interlace =
+ (MFVideoInterlaceMode)MFGetAttributeUINT32(pType, MF_MT_INTERLACE_MODE, MFVideoInterlace_Unknown);
+
+ if (interlace == MFVideoInterlace_MixedInterlaceOrProgressive) {
+ pFormat->SampleFormat = DXVA2_SampleFieldInterleavedEvenFirst;
+ } else {
+ pFormat->SampleFormat = (UINT)interlace;
+ }
+
+ pFormat->VideoChromaSubsampling =
+ MFGetAttributeUINT32(pType, MF_MT_VIDEO_CHROMA_SITING, MFVideoChromaSubsampling_Unknown);
+ pFormat->NominalRange =
+ MFGetAttributeUINT32(pType, MF_MT_VIDEO_NOMINAL_RANGE, MFNominalRange_Unknown);
+ pFormat->VideoTransferMatrix =
+ MFGetAttributeUINT32(pType, MF_MT_YUV_MATRIX, MFVideoTransferMatrix_Unknown);
+ pFormat->VideoLighting =
+ MFGetAttributeUINT32(pType, MF_MT_VIDEO_LIGHTING, MFVideoLighting_Unknown);
+ pFormat->VideoPrimaries =
+ MFGetAttributeUINT32(pType, MF_MT_VIDEO_PRIMARIES, MFVideoPrimaries_Unknown);
+ pFormat->VideoTransferFunction =
+ MFGetAttributeUINT32(pType, MF_MT_TRANSFER_FUNCTION, MFVideoTransFunc_Unknown);
+}
+
+HRESULT ConvertMFTypeToDXVAType(IMFMediaType *pType, DXVA2_VideoDesc *pDesc)
+{
+ ZeroMemory(pDesc, sizeof(*pDesc));
+
+ // The D3D format is the first DWORD of the subtype GUID.
+ GUID subtype = GUID_NULL;
+ HRESULT hr = pType->GetGUID(MF_MT_SUBTYPE, &subtype);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+ pDesc->Format = (D3DFORMAT)subtype.Data1;
+
+ UINT32 width = 0;
+ UINT32 height = 0;
+ hr = MFGetAttributeSize(pType, MF_MT_FRAME_SIZE, &width, &height);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+ NS_ENSURE_TRUE(width <= MAX_VIDEO_WIDTH, E_FAIL);
+ NS_ENSURE_TRUE(height <= MAX_VIDEO_HEIGHT, E_FAIL);
+ pDesc->SampleWidth = width;
+ pDesc->SampleHeight = height;
+
+ UINT32 fpsNumerator = 0;
+ UINT32 fpsDenominator = 0;
+ hr = MFGetAttributeRatio(pType, MF_MT_FRAME_RATE, &fpsNumerator, &fpsDenominator);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+ pDesc->InputSampleFreq.Numerator = fpsNumerator;
+ pDesc->InputSampleFreq.Denominator = fpsDenominator;
+
+ GetDXVA2ExtendedFormatFromMFMediaType(pType, &pDesc->SampleFormat);
+ pDesc->OutputFrameFreq = pDesc->InputSampleFreq;
+ if ((pDesc->SampleFormat.SampleFormat == DXVA2_SampleFieldInterleavedEvenFirst) ||
+ (pDesc->SampleFormat.SampleFormat == DXVA2_SampleFieldInterleavedOddFirst)) {
+ pDesc->OutputFrameFreq.Numerator *= 2;
+ }
+
+ return S_OK;
+}
+
+static const GUID DXVA2_ModeH264_E = {
+ 0x1b81be68, 0xa0c7, 0x11d3, { 0xb9, 0x84, 0x00, 0xc0, 0x4f, 0x2e, 0x73, 0xc5 }
+};
+
+static const GUID DXVA2_Intel_ModeH264_E = {
+ 0x604F8E68, 0x4951, 0x4c54, { 0x88, 0xFE, 0xAB, 0xD2, 0x5C, 0x15, 0xB3, 0xD6 }
+};
+
+// This tests if a DXVA video decoder can be created for the given media type/resolution.
+// It uses the same decoder device (DXVA2_ModeH264_E - DXVA2_ModeH264_VLD_NoFGT) as the H264
+// decoder MFT provided by windows (CLSID_CMSH264DecoderMFT) uses, so we can use it to determine
+// if the MFT will use software fallback or not.
+bool
+D3D9DXVA2Manager::SupportsConfig(IMFMediaType* aType, float aFramerate)
+{
+ MOZ_ASSERT(NS_IsMainThread());
+ gfx::D3D9VideoCrashGuard crashGuard;
+ if (crashGuard.Crashed()) {
+ NS_WARNING("DXVA2D3D9 crash detected");
+ return false;
+ }
+
+ DXVA2_VideoDesc desc;
+ HRESULT hr = ConvertMFTypeToDXVAType(aType, &desc);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), false);
+
+ // AMD cards with UVD3 or earlier perform poorly trying to decode 1080p60 in hardware,
+ // so use software instead. Pick 45 as an arbitrary upper bound for the framerate we can
+ // handle.
+ if (mIsAMDPreUVD4 &&
+ (desc.SampleWidth >= 1920 || desc.SampleHeight >= 1088) &&
+ aFramerate > 45) {
+ return false;
+ }
+
+ UINT configCount;
+ DXVA2_ConfigPictureDecode* configs = nullptr;
+ hr = mDecoderService->GetDecoderConfigurations(mDecoderGUID, &desc, nullptr, &configCount, &configs);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), false);
+
+ RefPtr<IDirect3DSurface9> surface;
+ hr = mDecoderService->CreateSurface(desc.SampleWidth, desc.SampleHeight, 0, (D3DFORMAT)MAKEFOURCC('N', 'V', '1', '2'),
+ D3DPOOL_DEFAULT, 0, DXVA2_VideoDecoderRenderTarget,
+ surface.StartAssignment(), NULL);
+ if (!SUCCEEDED(hr)) {
+ CoTaskMemFree(configs);
+ return false;
+ }
+
+ for (UINT i = 0; i < configCount; i++) {
+ RefPtr<IDirectXVideoDecoder> decoder;
+ IDirect3DSurface9* surfaces = surface;
+ hr = mDecoderService->CreateVideoDecoder(mDecoderGUID, &desc, &configs[i], &surfaces, 1, decoder.StartAssignment());
+ if (SUCCEEDED(hr) && decoder) {
+ CoTaskMemFree(configs);
+ return true;
+ }
+ }
+ CoTaskMemFree(configs);
+ return false;
+}
+
+D3D9DXVA2Manager::D3D9DXVA2Manager()
+ : mResetToken(0)
+ , mFirstFrame(true)
+ , mIsAMDPreUVD4(false)
+{
+ MOZ_COUNT_CTOR(D3D9DXVA2Manager);
+ MOZ_ASSERT(NS_IsMainThread());
+}
+
+D3D9DXVA2Manager::~D3D9DXVA2Manager()
+{
+ MOZ_COUNT_DTOR(D3D9DXVA2Manager);
+ MOZ_ASSERT(NS_IsMainThread());
+}
+
+IUnknown*
+D3D9DXVA2Manager::GetDXVADeviceManager()
+{
+ MutexAutoLock lock(mLock);
+ return mDeviceManager;
+}
+
+HRESULT
+D3D9DXVA2Manager::Init(layers::KnowsCompositor* aKnowsCompositor,
+ nsACString& aFailureReason)
+{
+ MOZ_ASSERT(NS_IsMainThread());
+
+ ScopedGfxFeatureReporter reporter("DXVA2D3D9");
+
+ gfx::D3D9VideoCrashGuard crashGuard;
+ if (crashGuard.Crashed()) {
+ NS_WARNING("DXVA2D3D9 crash detected");
+ aFailureReason.AssignLiteral("DXVA2D3D9 crashes detected in the past");
+ return E_FAIL;
+ }
+
+ // Create D3D9Ex.
+ HMODULE d3d9lib = LoadLibraryW(L"d3d9.dll");
+ NS_ENSURE_TRUE(d3d9lib, E_FAIL);
+ decltype(Direct3DCreate9Ex)* d3d9Create =
+ (decltype(Direct3DCreate9Ex)*) GetProcAddress(d3d9lib, "Direct3DCreate9Ex");
+ if (!d3d9Create) {
+ NS_WARNING("Couldn't find Direct3DCreate9Ex symbol in d3d9.dll");
+ aFailureReason.AssignLiteral("Couldn't find Direct3DCreate9Ex symbol in d3d9.dll");
+ return E_FAIL;
+ }
+ RefPtr<IDirect3D9Ex> d3d9Ex;
+ HRESULT hr = d3d9Create(D3D_SDK_VERSION, getter_AddRefs(d3d9Ex));
+ if (!d3d9Ex) {
+ NS_WARNING("Direct3DCreate9 failed");
+ aFailureReason.AssignLiteral("Direct3DCreate9 failed");
+ return E_FAIL;
+ }
+
+ // Ensure we can do the YCbCr->RGB conversion in StretchRect.
+ // Fail if we can't.
+ hr = d3d9Ex->CheckDeviceFormatConversion(D3DADAPTER_DEFAULT,
+ D3DDEVTYPE_HAL,
+ (D3DFORMAT)MAKEFOURCC('N','V','1','2'),
+ D3DFMT_X8R8G8B8);
+ if (!SUCCEEDED(hr)) {
+ aFailureReason = nsPrintfCString("CheckDeviceFormatConversion failed with error %X", hr);
+ return hr;
+ }
+
+ // Create D3D9DeviceEx. We pass null HWNDs here even though the documentation
+ // suggests that one of them should not be. At this point in time Chromium
+ // does the same thing for video acceleration.
+ D3DPRESENT_PARAMETERS params = {0};
+ params.BackBufferWidth = 1;
+ params.BackBufferHeight = 1;
+ params.BackBufferFormat = D3DFMT_A8R8G8B8;
+ params.BackBufferCount = 1;
+ params.SwapEffect = D3DSWAPEFFECT_DISCARD;
+ params.hDeviceWindow = nullptr;
+ params.Windowed = TRUE;
+ params.Flags = D3DPRESENTFLAG_VIDEO;
+
+ RefPtr<IDirect3DDevice9Ex> device;
+ hr = d3d9Ex->CreateDeviceEx(D3DADAPTER_DEFAULT,
+ D3DDEVTYPE_HAL,
+ nullptr,
+ D3DCREATE_FPU_PRESERVE |
+ D3DCREATE_MULTITHREADED |
+ D3DCREATE_MIXED_VERTEXPROCESSING,
+ &params,
+ nullptr,
+ getter_AddRefs(device));
+ if (!SUCCEEDED(hr)) {
+ aFailureReason = nsPrintfCString("CreateDeviceEx failed with error %X", hr);
+ return hr;
+ }
+
+ // Ensure we can create queries to synchronize operations between devices.
+ // Without this, when we make a copy of the frame in order to share it with
+ // another device, we can't be sure that the copy has finished before the
+ // other device starts using it.
+ RefPtr<IDirect3DQuery9> query;
+
+ hr = device->CreateQuery(D3DQUERYTYPE_EVENT, getter_AddRefs(query));
+ if (!SUCCEEDED(hr)) {
+ aFailureReason = nsPrintfCString("CreateQuery failed with error %X", hr);
+ return hr;
+ }
+
+ // Create and initialize IDirect3DDeviceManager9.
+ UINT resetToken = 0;
+ RefPtr<IDirect3DDeviceManager9> deviceManager;
+
+ hr = wmf::DXVA2CreateDirect3DDeviceManager9(&resetToken,
+ getter_AddRefs(deviceManager));
+ if (!SUCCEEDED(hr)) {
+ aFailureReason = nsPrintfCString("DXVA2CreateDirect3DDeviceManager9 failed with error %X", hr);
+ return hr;
+ }
+ hr = deviceManager->ResetDevice(device, resetToken);
+ if (!SUCCEEDED(hr)) {
+ aFailureReason = nsPrintfCString("IDirect3DDeviceManager9::ResetDevice failed with error %X", hr);
+ return hr;
+ }
+
+ HANDLE deviceHandle;
+ RefPtr<IDirectXVideoDecoderService> decoderService;
+ hr = deviceManager->OpenDeviceHandle(&deviceHandle);
+ if (!SUCCEEDED(hr)) {
+ aFailureReason = nsPrintfCString("IDirect3DDeviceManager9::OpenDeviceHandle failed with error %X", hr);
+ return hr;
+ }
+
+ hr = deviceManager->GetVideoService(deviceHandle, IID_PPV_ARGS(decoderService.StartAssignment()));
+ deviceManager->CloseDeviceHandle(deviceHandle);
+ if (!SUCCEEDED(hr)) {
+ aFailureReason = nsPrintfCString("IDirectXVideoDecoderServer::GetVideoService failed with error %X", hr);
+ return hr;
+ }
+
+ UINT deviceCount;
+ GUID* decoderDevices = nullptr;
+ hr = decoderService->GetDecoderDeviceGuids(&deviceCount, &decoderDevices);
+ if (!SUCCEEDED(hr)) {
+ aFailureReason = nsPrintfCString("IDirectXVideoDecoderServer::GetDecoderDeviceGuids failed with error %X", hr);
+ return hr;
+ }
+
+ bool found = false;
+ for (UINT i = 0; i < deviceCount; i++) {
+ if (decoderDevices[i] == DXVA2_ModeH264_E ||
+ decoderDevices[i] == DXVA2_Intel_ModeH264_E) {
+ mDecoderGUID = decoderDevices[i];
+ found = true;
+ break;
+ }
+ }
+ CoTaskMemFree(decoderDevices);
+
+ if (!found) {
+ aFailureReason.AssignLiteral("Failed to find an appropriate decoder GUID");
+ return E_FAIL;
+ }
+
+ D3DADAPTER_IDENTIFIER9 adapter;
+ hr = d3d9Ex->GetAdapterIdentifier(D3DADAPTER_DEFAULT, 0, &adapter);
+ if (!SUCCEEDED(hr)) {
+ aFailureReason = nsPrintfCString("IDirect3D9Ex::GetAdapterIdentifier failed with error %X", hr);
+ return hr;
+ }
+
+ if (adapter.VendorId == 0x1022 && !gfxPrefs::PDMWMFSkipBlacklist()) {
+ for (size_t i = 0; i < MOZ_ARRAY_LENGTH(sAMDPreUVD4); i++) {
+ if (adapter.DeviceId == sAMDPreUVD4[i]) {
+ mIsAMDPreUVD4 = true;
+ break;
+ }
+ }
+ }
+
+ RefPtr<IDirect3DSurface9> syncSurf;
+ hr = device->CreateRenderTarget(kSyncSurfaceSize, kSyncSurfaceSize,
+ D3DFMT_X8R8G8B8, D3DMULTISAMPLE_NONE,
+ 0, TRUE, getter_AddRefs(syncSurf), NULL);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ mDecoderService = decoderService;
+
+ mResetToken = resetToken;
+ mD3D9 = d3d9Ex;
+ mDevice = device;
+ mDeviceManager = deviceManager;
+ mSyncSurface = syncSurf;
+
+ if (layers::ImageBridgeChild::GetSingleton()) {
+ // There's no proper KnowsCompositor for ImageBridge currently (and it
+ // implements the interface), so just use that if it's available.
+ mTextureClientAllocator = new D3D9RecycleAllocator(layers::ImageBridgeChild::GetSingleton().get(),
+ mDevice);
+ } else {
+ mTextureClientAllocator = new D3D9RecycleAllocator(aKnowsCompositor,
+ mDevice);
+ }
+ mTextureClientAllocator->SetMaxPoolSize(5);
+
+ Telemetry::Accumulate(Telemetry::MEDIA_DECODER_BACKEND_USED,
+ uint32_t(media::MediaDecoderBackend::WMFDXVA2D3D9));
+
+ reporter.SetSuccessful();
+
+ return S_OK;
+}
+
+HRESULT
+D3D9DXVA2Manager::CopyToImage(IMFSample* aSample,
+ const nsIntRect& aRegion,
+ Image** aOutImage)
+{
+ RefPtr<IMFMediaBuffer> buffer;
+ HRESULT hr = aSample->GetBufferByIndex(0, getter_AddRefs(buffer));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ RefPtr<IDirect3DSurface9> surface;
+ hr = wmf::MFGetService(buffer,
+ MR_BUFFER_SERVICE,
+ IID_IDirect3DSurface9,
+ getter_AddRefs(surface));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ RefPtr<D3D9SurfaceImage> image = new D3D9SurfaceImage();
+ hr = image->AllocateAndCopy(mTextureClientAllocator, surface, aRegion);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ RefPtr<IDirect3DSurface9> sourceSurf = image->GetD3D9Surface();
+
+ // Copy a small rect into our sync surface, and then map it
+ // to block until decoding/color conversion completes.
+ RECT copyRect = { 0, 0, kSyncSurfaceSize, kSyncSurfaceSize };
+ hr = mDevice->StretchRect(sourceSurf, &copyRect, mSyncSurface, &copyRect, D3DTEXF_NONE);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ D3DLOCKED_RECT lockedRect;
+ hr = mSyncSurface->LockRect(&lockedRect, NULL, D3DLOCK_READONLY);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = mSyncSurface->UnlockRect();
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ image.forget(aOutImage);
+ return S_OK;
+}
+
+// Count of the number of DXVAManager's we've created. This is also the
+// number of videos we're decoding with DXVA. Use on main thread only.
+static uint32_t sDXVAVideosCount = 0;
+
+/* static */
+DXVA2Manager*
+DXVA2Manager::CreateD3D9DXVA(layers::KnowsCompositor* aKnowsCompositor,
+ nsACString& aFailureReason)
+{
+ MOZ_ASSERT(NS_IsMainThread());
+ HRESULT hr;
+
+ // DXVA processing takes up a lot of GPU resources, so limit the number of
+ // videos we use DXVA with at any one time.
+ uint32_t dxvaLimit = gfxPrefs::PDMWMFMaxDXVAVideos();
+
+ if (sDXVAVideosCount == dxvaLimit) {
+ aFailureReason.AssignLiteral("Too many DXVA videos playing");
+ return nullptr;
+ }
+
+ nsAutoPtr<D3D9DXVA2Manager> d3d9Manager(new D3D9DXVA2Manager());
+ hr = d3d9Manager->Init(aKnowsCompositor, aFailureReason);
+ if (SUCCEEDED(hr)) {
+ return d3d9Manager.forget();
+ }
+
+ // No hardware accelerated video decoding. :(
+ return nullptr;
+}
+
+class D3D11DXVA2Manager : public DXVA2Manager
+{
+public:
+ D3D11DXVA2Manager();
+ virtual ~D3D11DXVA2Manager();
+
+ HRESULT Init(layers::KnowsCompositor* aKnowsCompositor,
+ nsACString& aFailureReason);
+
+ IUnknown* GetDXVADeviceManager() override;
+
+ // Copies a region (aRegion) of the video frame stored in aVideoSample
+ // into an image which is returned by aOutImage.
+ HRESULT CopyToImage(IMFSample* aVideoSample,
+ const nsIntRect& aRegion,
+ Image** aOutImage) override;
+
+ HRESULT ConfigureForSize(uint32_t aWidth, uint32_t aHeight) override;
+
+ bool IsD3D11() override { return true; }
+
+ bool SupportsConfig(IMFMediaType* aType, float aFramerate) override;
+
+private:
+ HRESULT CreateFormatConverter();
+
+ HRESULT CreateOutputSample(RefPtr<IMFSample>& aSample,
+ ID3D11Texture2D* aTexture);
+
+ RefPtr<ID3D11Device> mDevice;
+ RefPtr<ID3D11DeviceContext> mContext;
+ RefPtr<IMFDXGIDeviceManager> mDXGIDeviceManager;
+ RefPtr<MFTDecoder> mTransform;
+ RefPtr<D3D11RecycleAllocator> mTextureClientAllocator;
+ RefPtr<ID3D11Texture2D> mSyncSurface;
+ GUID mDecoderGUID;
+ uint32_t mWidth;
+ uint32_t mHeight;
+ UINT mDeviceManagerToken;
+ bool mIsAMDPreUVD4;
+};
+
+bool
+D3D11DXVA2Manager::SupportsConfig(IMFMediaType* aType, float aFramerate)
+{
+ MOZ_ASSERT(NS_IsMainThread());
+ gfx::D3D11VideoCrashGuard crashGuard;
+ if (crashGuard.Crashed()) {
+ NS_WARNING("DXVA2D3D9 crash detected");
+ return false;
+ }
+
+ RefPtr<ID3D11VideoDevice> videoDevice;
+ HRESULT hr = mDevice->QueryInterface(static_cast<ID3D11VideoDevice**>(getter_AddRefs(videoDevice)));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), false);
+
+ D3D11_VIDEO_DECODER_DESC desc;
+ desc.Guid = mDecoderGUID;
+
+ UINT32 width = 0;
+ UINT32 height = 0;
+ hr = MFGetAttributeSize(aType, MF_MT_FRAME_SIZE, &width, &height);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+ NS_ENSURE_TRUE(width <= MAX_VIDEO_WIDTH, false);
+ NS_ENSURE_TRUE(height <= MAX_VIDEO_HEIGHT, false);
+ desc.SampleWidth = width;
+ desc.SampleHeight = height;
+
+ desc.OutputFormat = DXGI_FORMAT_NV12;
+
+ // AMD cards with UVD3 or earlier perform poorly trying to decode 1080p60 in hardware,
+ // so use software instead. Pick 45 as an arbitrary upper bound for the framerate we can
+ // handle.
+ if (mIsAMDPreUVD4 &&
+ (desc.SampleWidth >= 1920 || desc.SampleHeight >= 1088) &&
+ aFramerate > 45) {
+ return false;
+ }
+
+ UINT configCount = 0;
+ hr = videoDevice->GetVideoDecoderConfigCount(&desc, &configCount);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), false);
+
+ for (UINT i = 0; i < configCount; i++) {
+ D3D11_VIDEO_DECODER_CONFIG config;
+ hr = videoDevice->GetVideoDecoderConfig(&desc, i, &config);
+ if (SUCCEEDED(hr)) {
+ RefPtr<ID3D11VideoDecoder> decoder;
+ hr = videoDevice->CreateVideoDecoder(&desc, &config, decoder.StartAssignment());
+ if (SUCCEEDED(hr) && decoder) {
+ return true;
+ }
+ }
+ }
+ return false;
+}
+
+D3D11DXVA2Manager::D3D11DXVA2Manager()
+ : mWidth(0)
+ , mHeight(0)
+ , mDeviceManagerToken(0)
+ , mIsAMDPreUVD4(false)
+{
+}
+
+D3D11DXVA2Manager::~D3D11DXVA2Manager()
+{
+}
+
+IUnknown*
+D3D11DXVA2Manager::GetDXVADeviceManager()
+{
+ MutexAutoLock lock(mLock);
+ return mDXGIDeviceManager;
+}
+
+HRESULT
+D3D11DXVA2Manager::Init(layers::KnowsCompositor* aKnowsCompositor,
+ nsACString& aFailureReason)
+{
+ HRESULT hr;
+
+ ScopedGfxFeatureReporter reporter("DXVA2D3D11");
+
+ gfx::D3D11VideoCrashGuard crashGuard;
+ if (crashGuard.Crashed()) {
+ NS_WARNING("DXVA2D3D11 crash detected");
+ aFailureReason.AssignLiteral("DXVA2D3D11 crashes detected in the past");
+ return E_FAIL;
+ }
+
+ mDevice = gfx::DeviceManagerDx::Get()->CreateDecoderDevice();
+ if (!mDevice) {
+ aFailureReason.AssignLiteral("Failed to create D3D11 device for decoder");
+ return E_FAIL;
+ }
+
+ mDevice->GetImmediateContext(getter_AddRefs(mContext));
+ if (!mContext) {
+ aFailureReason.AssignLiteral("Failed to get immediate context for d3d11 device");
+ return E_FAIL;
+ }
+
+ hr = wmf::MFCreateDXGIDeviceManager(&mDeviceManagerToken, getter_AddRefs(mDXGIDeviceManager));
+ if (!SUCCEEDED(hr)) {
+ aFailureReason = nsPrintfCString("MFCreateDXGIDeviceManager failed with code %X", hr);
+ return hr;
+ }
+
+ hr = mDXGIDeviceManager->ResetDevice(mDevice, mDeviceManagerToken);
+ if (!SUCCEEDED(hr)) {
+ aFailureReason = nsPrintfCString("IMFDXGIDeviceManager::ResetDevice failed with code %X", hr);
+ return hr;
+ }
+
+ mTransform = new MFTDecoder();
+ hr = mTransform->Create(CLSID_VideoProcessorMFT);
+ if (!SUCCEEDED(hr)) {
+ aFailureReason = nsPrintfCString("MFTDecoder::Create(CLSID_VideoProcessorMFT) failed with code %X", hr);
+ return hr;
+ }
+
+ hr = mTransform->SendMFTMessage(MFT_MESSAGE_SET_D3D_MANAGER, ULONG_PTR(mDXGIDeviceManager.get()));
+ if (!SUCCEEDED(hr)) {
+ aFailureReason = nsPrintfCString("MFTDecoder::SendMFTMessage(MFT_MESSAGE_SET_D3D_MANAGER) failed with code %X", hr);
+ return hr;
+ }
+
+ RefPtr<ID3D11VideoDevice> videoDevice;
+ hr = mDevice->QueryInterface(static_cast<ID3D11VideoDevice**>(getter_AddRefs(videoDevice)));
+ if (!SUCCEEDED(hr)) {
+ aFailureReason = nsPrintfCString("QI to ID3D11VideoDevice failed with code %X", hr);
+ return hr;
+ }
+
+ bool found = false;
+ UINT profileCount = videoDevice->GetVideoDecoderProfileCount();
+ for (UINT i = 0; i < profileCount; i++) {
+ GUID id;
+ hr = videoDevice->GetVideoDecoderProfile(i, &id);
+ if (SUCCEEDED(hr) && (id == DXVA2_ModeH264_E || id == DXVA2_Intel_ModeH264_E)) {
+ mDecoderGUID = id;
+ found = true;
+ break;
+ }
+ }
+ if (!found) {
+ aFailureReason.AssignLiteral("Failed to find an appropriate decoder GUID");
+ return E_FAIL;
+ }
+
+ BOOL nv12Support = false;
+ hr = videoDevice->CheckVideoDecoderFormat(&mDecoderGUID, DXGI_FORMAT_NV12, &nv12Support);
+ if (!SUCCEEDED(hr)) {
+ aFailureReason = nsPrintfCString("CheckVideoDecoderFormat failed with code %X", hr);
+ return hr;
+ }
+ if (!nv12Support) {
+ aFailureReason.AssignLiteral("Decoder doesn't support NV12 surfaces");
+ return E_FAIL;
+ }
+
+ RefPtr<IDXGIDevice> dxgiDevice;
+ hr = mDevice->QueryInterface(static_cast<IDXGIDevice**>(getter_AddRefs(dxgiDevice)));
+ if (!SUCCEEDED(hr)) {
+ aFailureReason = nsPrintfCString("QI to IDXGIDevice failed with code %X", hr);
+ return hr;
+ }
+
+ RefPtr<IDXGIAdapter> adapter;
+ hr = dxgiDevice->GetAdapter(adapter.StartAssignment());
+ if (!SUCCEEDED(hr)) {
+ aFailureReason = nsPrintfCString("IDXGIDevice::GetAdapter failed with code %X", hr);
+ return hr;
+ }
+
+ DXGI_ADAPTER_DESC adapterDesc;
+ hr = adapter->GetDesc(&adapterDesc);
+ if (!SUCCEEDED(hr)) {
+ aFailureReason = nsPrintfCString("IDXGIAdapter::GetDesc failed with code %X", hr);
+ return hr;
+ }
+
+ if (adapterDesc.VendorId == 0x1022 && !gfxPrefs::PDMWMFSkipBlacklist()) {
+ for (size_t i = 0; i < MOZ_ARRAY_LENGTH(sAMDPreUVD4); i++) {
+ if (adapterDesc.DeviceId == sAMDPreUVD4[i]) {
+ mIsAMDPreUVD4 = true;
+ break;
+ }
+ }
+ }
+
+ D3D11_TEXTURE2D_DESC desc;
+ desc.Width = kSyncSurfaceSize;
+ desc.Height = kSyncSurfaceSize;
+ desc.MipLevels = 1;
+ desc.ArraySize = 1;
+ desc.Format = DXGI_FORMAT_B8G8R8A8_UNORM;
+ desc.SampleDesc.Count = 1;
+ desc.SampleDesc.Quality = 0;
+ desc.Usage = D3D11_USAGE_STAGING;
+ desc.BindFlags = 0;
+ desc.CPUAccessFlags = D3D11_CPU_ACCESS_READ;
+ desc.MiscFlags = 0;
+
+ hr = mDevice->CreateTexture2D(&desc, NULL, getter_AddRefs(mSyncSurface));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ if (layers::ImageBridgeChild::GetSingleton()) {
+ // There's no proper KnowsCompositor for ImageBridge currently (and it
+ // implements the interface), so just use that if it's available.
+ mTextureClientAllocator = new D3D11RecycleAllocator(layers::ImageBridgeChild::GetSingleton().get(),
+ mDevice);
+ } else {
+ mTextureClientAllocator = new D3D11RecycleAllocator(aKnowsCompositor,
+ mDevice);
+ }
+ mTextureClientAllocator->SetMaxPoolSize(5);
+
+ Telemetry::Accumulate(Telemetry::MEDIA_DECODER_BACKEND_USED,
+ uint32_t(media::MediaDecoderBackend::WMFDXVA2D3D11));
+
+ reporter.SetSuccessful();
+
+ return S_OK;
+}
+
+HRESULT
+D3D11DXVA2Manager::CreateOutputSample(RefPtr<IMFSample>& aSample, ID3D11Texture2D* aTexture)
+{
+ RefPtr<IMFSample> sample;
+ HRESULT hr = wmf::MFCreateSample(getter_AddRefs(sample));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ RefPtr<IMFMediaBuffer> buffer;
+ hr = wmf::MFCreateDXGISurfaceBuffer(__uuidof(ID3D11Texture2D), aTexture, 0, FALSE, getter_AddRefs(buffer));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ sample->AddBuffer(buffer);
+
+ aSample = sample;
+ return S_OK;
+}
+
+HRESULT
+D3D11DXVA2Manager::CopyToImage(IMFSample* aVideoSample,
+ const nsIntRect& aRegion,
+ Image** aOutImage)
+{
+ NS_ENSURE_TRUE(aVideoSample, E_POINTER);
+ NS_ENSURE_TRUE(aOutImage, E_POINTER);
+
+ // Our video frame is stored in a non-sharable ID3D11Texture2D. We need
+ // to create a copy of that frame as a sharable resource, save its share
+ // handle, and put that handle into the rendering pipeline.
+
+ RefPtr<D3D11ShareHandleImage> image =
+ new D3D11ShareHandleImage(gfx::IntSize(mWidth, mHeight), aRegion);
+ bool ok = image->AllocateTexture(mTextureClientAllocator, mDevice);
+ NS_ENSURE_TRUE(ok, E_FAIL);
+
+ HRESULT hr = mTransform->Input(aVideoSample);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ RefPtr<IMFSample> sample;
+ RefPtr<ID3D11Texture2D> texture = image->GetTexture();
+ hr = CreateOutputSample(sample, texture);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = mTransform->Output(&sample);
+
+ RefPtr<ID3D11DeviceContext> ctx;
+ mDevice->GetImmediateContext(getter_AddRefs(ctx));
+
+ // Copy a small rect into our sync surface, and then map it
+ // to block until decoding/color conversion completes.
+ D3D11_BOX rect = { 0, 0, 0, kSyncSurfaceSize, kSyncSurfaceSize, 1 };
+ ctx->CopySubresourceRegion(mSyncSurface, 0, 0, 0, 0, texture, 0, &rect);
+
+ D3D11_MAPPED_SUBRESOURCE mapped;
+ hr = ctx->Map(mSyncSurface, 0, D3D11_MAP_READ, 0, &mapped);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ ctx->Unmap(mSyncSurface, 0);
+
+ image.forget(aOutImage);
+
+ return S_OK;
+}
+
+HRESULT ConfigureOutput(IMFMediaType* aOutput, void* aData)
+{
+ HRESULT hr = aOutput->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = aOutput->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ gfx::IntSize* size = reinterpret_cast<gfx::IntSize*>(aData);
+ hr = MFSetAttributeSize(aOutput, MF_MT_FRAME_SIZE, size->width, size->height);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ return S_OK;
+}
+
+HRESULT
+D3D11DXVA2Manager::ConfigureForSize(uint32_t aWidth, uint32_t aHeight)
+{
+ mWidth = aWidth;
+ mHeight = aHeight;
+
+ RefPtr<IMFMediaType> inputType;
+ HRESULT hr = wmf::MFCreateMediaType(getter_AddRefs(inputType));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = inputType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = inputType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_NV12);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = inputType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = inputType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ RefPtr<IMFAttributes> attr = mTransform->GetAttributes();
+
+ hr = attr->SetUINT32(MF_XVP_PLAYBACK_MODE, TRUE);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = attr->SetUINT32(MF_LOW_LATENCY, FALSE);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = MFSetAttributeSize(inputType, MF_MT_FRAME_SIZE, aWidth, aHeight);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ RefPtr<IMFMediaType> outputType;
+ hr = wmf::MFCreateMediaType(getter_AddRefs(outputType));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = outputType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = outputType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_ARGB32);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ gfx::IntSize size(mWidth, mHeight);
+ hr = mTransform->SetMediaTypes(inputType, outputType, ConfigureOutput, &size);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ return S_OK;
+}
+
+/* static */
+DXVA2Manager*
+DXVA2Manager::CreateD3D11DXVA(layers::KnowsCompositor* aKnowsCompositor,
+ nsACString& aFailureReason)
+{
+ // DXVA processing takes up a lot of GPU resources, so limit the number of
+ // videos we use DXVA with at any one time.
+ uint32_t dxvaLimit = gfxPrefs::PDMWMFMaxDXVAVideos();
+
+ if (sDXVAVideosCount == dxvaLimit) {
+ aFailureReason.AssignLiteral("Too many DXVA videos playing");
+ return nullptr;
+ }
+
+ nsAutoPtr<D3D11DXVA2Manager> manager(new D3D11DXVA2Manager());
+ HRESULT hr = manager->Init(aKnowsCompositor, aFailureReason);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr);
+
+ return manager.forget();
+}
+
+DXVA2Manager::DXVA2Manager()
+ : mLock("DXVA2Manager")
+{
+ MOZ_ASSERT(NS_IsMainThread());
+ ++sDXVAVideosCount;
+}
+
+DXVA2Manager::~DXVA2Manager()
+{
+ MOZ_ASSERT(NS_IsMainThread());
+ --sDXVAVideosCount;
+}
+
+} // namespace mozilla
diff --git a/dom/media/platforms/wmf/DXVA2Manager.h b/dom/media/platforms/wmf/DXVA2Manager.h
new file mode 100644
index 0000000000..0bdc02dd40
--- /dev/null
+++ b/dom/media/platforms/wmf/DXVA2Manager.h
@@ -0,0 +1,56 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+#if !defined(DXVA2Manager_h_)
+#define DXVA2Manager_h_
+
+#include "WMF.h"
+#include "nsAutoPtr.h"
+#include "mozilla/Mutex.h"
+#include "nsRect.h"
+
+namespace mozilla {
+
+namespace layers {
+class Image;
+class ImageContainer;
+class KnowsCompositor;
+}
+
+class DXVA2Manager {
+public:
+
+ // Creates and initializes a DXVA2Manager. We can use DXVA2 via either
+ // D3D9Ex or D3D11.
+ static DXVA2Manager* CreateD3D9DXVA(layers::KnowsCompositor* aKnowsCompositor, nsACString& aFailureReason);
+ static DXVA2Manager* CreateD3D11DXVA(layers::KnowsCompositor* aKnowsCompositor, nsACString& aFailureReason);
+
+ // Returns a pointer to the D3D device manager responsible for managing the
+ // device we're using for hardware accelerated video decoding. If we're using
+ // D3D9Ex, this is an IDirect3DDeviceManager9. For D3D11 this is an
+ // IMFDXGIDeviceManager. It is safe to call this on any thread.
+ virtual IUnknown* GetDXVADeviceManager() = 0;
+
+ // Creates an Image for the video frame stored in aVideoSample.
+ virtual HRESULT CopyToImage(IMFSample* aVideoSample,
+ const nsIntRect& aRegion,
+ layers::Image** aOutImage) = 0;
+
+ virtual HRESULT ConfigureForSize(uint32_t aWidth, uint32_t aHeight) { return S_OK; }
+
+ virtual bool IsD3D11() { return false; }
+
+ virtual ~DXVA2Manager();
+
+ virtual bool SupportsConfig(IMFMediaType* aType, float aFramerate) = 0;
+
+protected:
+ Mutex mLock;
+ DXVA2Manager();
+};
+
+} // namespace mozilla
+
+#endif // DXVA2Manager_h_
diff --git a/dom/media/platforms/wmf/MFTDecoder.cpp b/dom/media/platforms/wmf/MFTDecoder.cpp
new file mode 100644
index 0000000000..e634fcff94
--- /dev/null
+++ b/dom/media/platforms/wmf/MFTDecoder.cpp
@@ -0,0 +1,305 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "MFTDecoder.h"
+#include "nsThreadUtils.h"
+#include "WMFUtils.h"
+#include "mozilla/Logging.h"
+
+#define LOG(...) MOZ_LOG(sPDMLog, mozilla::LogLevel::Debug, (__VA_ARGS__))
+
+namespace mozilla {
+
+MFTDecoder::MFTDecoder()
+ : mMFTProvidesOutputSamples(false)
+ , mDiscontinuity(true)
+{
+ memset(&mInputStreamInfo, 0, sizeof(MFT_INPUT_STREAM_INFO));
+ memset(&mOutputStreamInfo, 0, sizeof(MFT_OUTPUT_STREAM_INFO));
+}
+
+MFTDecoder::~MFTDecoder()
+{
+}
+
+HRESULT
+MFTDecoder::Create(const GUID& aMFTClsID)
+{
+ // Create the IMFTransform to do the decoding.
+ HRESULT hr;
+ hr = CoCreateInstance(aMFTClsID,
+ nullptr,
+ CLSCTX_INPROC_SERVER,
+ IID_IMFTransform,
+ reinterpret_cast<void**>(static_cast<IMFTransform**>(getter_AddRefs(mDecoder))));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ return S_OK;
+}
+
+HRESULT
+MFTDecoder::SetMediaTypes(IMFMediaType* aInputType,
+ IMFMediaType* aOutputType,
+ ConfigureOutputCallback aCallback,
+ void* aData)
+{
+ mOutputType = aOutputType;
+
+ // Set the input type to the one the caller gave us...
+ HRESULT hr = mDecoder->SetInputType(0, aInputType, 0);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = SetDecoderOutputType(aCallback, aData);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = mDecoder->GetInputStreamInfo(0, &mInputStreamInfo);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = SendMFTMessage(MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, 0);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = SendMFTMessage(MFT_MESSAGE_NOTIFY_START_OF_STREAM, 0);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ return S_OK;
+}
+
+already_AddRefed<IMFAttributes>
+MFTDecoder::GetAttributes()
+{
+ RefPtr<IMFAttributes> attr;
+ HRESULT hr = mDecoder->GetAttributes(getter_AddRefs(attr));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr);
+ return attr.forget();
+}
+
+HRESULT
+MFTDecoder::SetDecoderOutputType(ConfigureOutputCallback aCallback, void* aData)
+{
+ NS_ENSURE_TRUE(mDecoder != nullptr, E_POINTER);
+
+ // Iterate the enumerate the output types, until we find one compatible
+ // with what we need.
+ HRESULT hr;
+ RefPtr<IMFMediaType> outputType;
+ UINT32 typeIndex = 0;
+ while (SUCCEEDED(mDecoder->GetOutputAvailableType(0, typeIndex++, getter_AddRefs(outputType)))) {
+ BOOL resultMatch;
+ hr = mOutputType->Compare(outputType, MF_ATTRIBUTES_MATCH_OUR_ITEMS, &resultMatch);
+ if (SUCCEEDED(hr) && resultMatch == TRUE) {
+ if (aCallback) {
+ hr = aCallback(outputType, aData);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+ }
+ hr = mDecoder->SetOutputType(0, outputType, 0);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = mDecoder->GetOutputStreamInfo(0, &mOutputStreamInfo);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ mMFTProvidesOutputSamples = IsFlagSet(mOutputStreamInfo.dwFlags, MFT_OUTPUT_STREAM_PROVIDES_SAMPLES);
+
+ return S_OK;
+ }
+ outputType = nullptr;
+ }
+ return E_FAIL;
+}
+
+HRESULT
+MFTDecoder::SendMFTMessage(MFT_MESSAGE_TYPE aMsg, ULONG_PTR aData)
+{
+ NS_ENSURE_TRUE(mDecoder != nullptr, E_POINTER);
+ HRESULT hr = mDecoder->ProcessMessage(aMsg, aData);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+ return S_OK;
+}
+
+HRESULT
+MFTDecoder::CreateInputSample(const uint8_t* aData,
+ uint32_t aDataSize,
+ int64_t aTimestamp,
+ RefPtr<IMFSample>* aOutSample)
+{
+ NS_ENSURE_TRUE(mDecoder != nullptr, E_POINTER);
+
+ HRESULT hr;
+ RefPtr<IMFSample> sample;
+ hr = wmf::MFCreateSample(getter_AddRefs(sample));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ RefPtr<IMFMediaBuffer> buffer;
+ int32_t bufferSize = std::max<uint32_t>(uint32_t(mInputStreamInfo.cbSize), aDataSize);
+ UINT32 alignment = (mInputStreamInfo.cbAlignment > 1) ? mInputStreamInfo.cbAlignment - 1 : 0;
+ hr = wmf::MFCreateAlignedMemoryBuffer(bufferSize, alignment, getter_AddRefs(buffer));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ DWORD maxLength = 0;
+ DWORD currentLength = 0;
+ BYTE* dst = nullptr;
+ hr = buffer->Lock(&dst, &maxLength, &currentLength);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ // Copy data into sample's buffer.
+ memcpy(dst, aData, aDataSize);
+
+ hr = buffer->Unlock();
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = buffer->SetCurrentLength(aDataSize);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = sample->AddBuffer(buffer);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = sample->SetSampleTime(UsecsToHNs(aTimestamp));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ *aOutSample = sample.forget();
+
+ return S_OK;
+}
+
+HRESULT
+MFTDecoder::CreateOutputSample(RefPtr<IMFSample>* aOutSample)
+{
+ NS_ENSURE_TRUE(mDecoder != nullptr, E_POINTER);
+
+ HRESULT hr;
+ RefPtr<IMFSample> sample;
+ hr = wmf::MFCreateSample(getter_AddRefs(sample));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ RefPtr<IMFMediaBuffer> buffer;
+ int32_t bufferSize = mOutputStreamInfo.cbSize;
+ UINT32 alignment = (mOutputStreamInfo.cbAlignment > 1) ? mOutputStreamInfo.cbAlignment - 1 : 0;
+ hr = wmf::MFCreateAlignedMemoryBuffer(bufferSize, alignment, getter_AddRefs(buffer));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = sample->AddBuffer(buffer);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ *aOutSample = sample.forget();
+
+ return S_OK;
+}
+
+HRESULT
+MFTDecoder::Output(RefPtr<IMFSample>* aOutput)
+{
+ NS_ENSURE_TRUE(mDecoder != nullptr, E_POINTER);
+
+ HRESULT hr;
+
+ MFT_OUTPUT_DATA_BUFFER output = {0};
+
+ bool providedSample = false;
+ RefPtr<IMFSample> sample;
+ if (*aOutput) {
+ output.pSample = *aOutput;
+ providedSample = true;
+ } else if (!mMFTProvidesOutputSamples) {
+ hr = CreateOutputSample(&sample);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+ output.pSample = sample;
+ }
+
+ DWORD status = 0;
+ hr = mDecoder->ProcessOutput(0, 1, &output, &status);
+ if (output.pEvents) {
+ // We must release this, as per the IMFTransform::ProcessOutput()
+ // MSDN documentation.
+ output.pEvents->Release();
+ output.pEvents = nullptr;
+ }
+
+ if (hr == MF_E_TRANSFORM_STREAM_CHANGE) {
+ // Type change, probably geometric aperture change.
+ // Reconfigure decoder output type, so that GetOutputMediaType()
+ // returns the new type, and return the error code to caller.
+ // This is an expected failure, so don't warn on encountering it.
+ hr = SetDecoderOutputType(nullptr, nullptr);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+ // Return the error, so that the caller knows to retry.
+ return MF_E_TRANSFORM_STREAM_CHANGE;
+ }
+
+ if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) {
+ // Not enough input to produce output. This is an expected failure,
+ // so don't warn on encountering it.
+ return hr;
+ }
+ // Treat other errors as unexpected, and warn.
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ if (!output.pSample) {
+ return S_OK;
+ }
+
+ if (mDiscontinuity) {
+ output.pSample->SetUINT32(MFSampleExtension_Discontinuity, TRUE);
+ mDiscontinuity = false;
+ }
+
+ *aOutput = output.pSample; // AddRefs
+ if (mMFTProvidesOutputSamples && !providedSample) {
+ // If the MFT is providing samples, we must release the sample here.
+ // Typically only the H.264 MFT provides samples when using DXVA,
+ // and it always re-uses the same sample, so if we don't release it
+ // MFT::ProcessOutput() deadlocks waiting for the sample to be released.
+ output.pSample->Release();
+ output.pSample = nullptr;
+ }
+
+ return S_OK;
+}
+
+HRESULT
+MFTDecoder::Input(const uint8_t* aData,
+ uint32_t aDataSize,
+ int64_t aTimestamp)
+{
+ NS_ENSURE_TRUE(mDecoder != nullptr, E_POINTER);
+
+ RefPtr<IMFSample> input;
+ HRESULT hr = CreateInputSample(aData, aDataSize, aTimestamp, &input);
+ NS_ENSURE_TRUE(SUCCEEDED(hr) && input != nullptr, hr);
+
+ return Input(input);
+}
+
+HRESULT
+MFTDecoder::Input(IMFSample* aSample)
+{
+ HRESULT hr = mDecoder->ProcessInput(0, aSample, 0);
+ if (hr == MF_E_NOTACCEPTING) {
+ // MFT *already* has enough data to produce a sample. Retrieve it.
+ return MF_E_NOTACCEPTING;
+ }
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ return S_OK;
+}
+
+HRESULT
+MFTDecoder::Flush()
+{
+ HRESULT hr = SendMFTMessage(MFT_MESSAGE_COMMAND_FLUSH, 0);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ mDiscontinuity = true;
+
+ return S_OK;
+}
+
+HRESULT
+MFTDecoder::GetOutputMediaType(RefPtr<IMFMediaType>& aMediaType)
+{
+ NS_ENSURE_TRUE(mDecoder, E_POINTER);
+ return mDecoder->GetOutputCurrentType(0, getter_AddRefs(aMediaType));
+}
+
+} // namespace mozilla
diff --git a/dom/media/platforms/wmf/MFTDecoder.h b/dom/media/platforms/wmf/MFTDecoder.h
new file mode 100644
index 0000000000..91c18f18c0
--- /dev/null
+++ b/dom/media/platforms/wmf/MFTDecoder.h
@@ -0,0 +1,111 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#if !defined(MFTDecoder_h_)
+#define MFTDecoder_h_
+
+#include "WMF.h"
+#include "mozilla/RefPtr.h"
+#include "mozilla/ReentrantMonitor.h"
+#include "nsIThread.h"
+
+namespace mozilla {
+
+class MFTDecoder final {
+ ~MFTDecoder();
+
+public:
+ NS_INLINE_DECL_THREADSAFE_REFCOUNTING(MFTDecoder)
+
+ MFTDecoder();
+
+ // Creates the MFT. First thing to do as part of setup.
+ //
+ // Params:
+ // - aMFTClsID the clsid used by CoCreateInstance to instantiate the
+ // decoder MFT.
+ HRESULT Create(const GUID& aMFTClsID);
+
+ // Sets the input and output media types. Call after Init().
+ //
+ // Params:
+ // - aInputType needs at least major and minor types set.
+ // - aOutputType needs at least major and minor types set.
+ // This is used to select the matching output type out
+ // of all the available output types of the MFT.
+ typedef HRESULT (*ConfigureOutputCallback)(IMFMediaType* aOutputType, void* aData);
+ HRESULT SetMediaTypes(IMFMediaType* aInputType,
+ IMFMediaType* aOutputType,
+ ConfigureOutputCallback aCallback = nullptr,
+ void* aData = nullptr);
+
+ // Returns the MFT's IMFAttributes object.
+ already_AddRefed<IMFAttributes> GetAttributes();
+
+ // Retrieves the media type being output. This may not be valid until
+ // the first sample is decoded.
+ HRESULT GetOutputMediaType(RefPtr<IMFMediaType>& aMediaType);
+
+ // Submits data into the MFT for processing.
+ //
+ // Returns:
+ // - MF_E_NOTACCEPTING if the decoder can't accept input. The data
+ // must be resubmitted after Output() stops producing output.
+ HRESULT Input(const uint8_t* aData,
+ uint32_t aDataSize,
+ int64_t aTimestampUsecs);
+ HRESULT Input(IMFSample* aSample);
+
+ HRESULT CreateInputSample(const uint8_t* aData,
+ uint32_t aDataSize,
+ int64_t aTimestampUsecs,
+ RefPtr<IMFSample>* aOutSample);
+
+ // Retrieves output from the MFT. Call this once Input() returns
+ // MF_E_NOTACCEPTING. Some MFTs with hardware acceleration (the H.264
+ // decoder MFT in particular) can't handle it if clients hold onto
+ // references to the output IMFSample, so don't do that.
+ //
+ // Returns:
+ // - MF_E_TRANSFORM_STREAM_CHANGE if the underlying stream output
+ // type changed. Retrieve the output media type and reconfig client,
+ // else you may misinterpret the MFT's output.
+ // - MF_E_TRANSFORM_NEED_MORE_INPUT if no output can be produced
+ // due to lack of input.
+ // - S_OK if an output frame is produced.
+ HRESULT Output(RefPtr<IMFSample>* aOutput);
+
+ // Sends a flush message to the MFT. This causes it to discard all
+ // input data. Use before seeking.
+ HRESULT Flush();
+
+ // Sends a message to the MFT.
+ HRESULT SendMFTMessage(MFT_MESSAGE_TYPE aMsg, ULONG_PTR aData);
+
+
+ HRESULT SetDecoderOutputType(ConfigureOutputCallback aCallback, void* aData);
+private:
+
+
+ HRESULT CreateOutputSample(RefPtr<IMFSample>* aOutSample);
+
+ MFT_INPUT_STREAM_INFO mInputStreamInfo;
+ MFT_OUTPUT_STREAM_INFO mOutputStreamInfo;
+
+ RefPtr<IMFTransform> mDecoder;
+
+ RefPtr<IMFMediaType> mOutputType;
+
+ // True if the IMFTransform allocates the samples that it returns.
+ bool mMFTProvidesOutputSamples;
+
+ // True if we need to mark the next sample as a discontinuity.
+ bool mDiscontinuity;
+};
+
+} // namespace mozilla
+
+#endif
diff --git a/dom/media/platforms/wmf/WMF.h b/dom/media/platforms/wmf/WMF.h
new file mode 100644
index 0000000000..5ede0d361d
--- /dev/null
+++ b/dom/media/platforms/wmf/WMF.h
@@ -0,0 +1,104 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef WMF_H_
+#define WMF_H_
+
+#if WINVER < _WIN32_WINNT_WIN7
+#error \
+You must include WMF.h before including mozilla headers, \
+otherwise mozconfig.h will be included \
+and that sets WINVER to WinXP, \
+which makes Windows Media Foundation unavailable.
+#endif
+
+#pragma push_macro("WINVER")
+#undef WINVER
+#define WINVER _WIN32_WINNT_WIN7
+
+#include <windows.h>
+#include <mfapi.h>
+#include <mfidl.h>
+#include <mfreadwrite.h>
+#include <mfobjects.h>
+#include <ks.h>
+#include <stdio.h>
+#include <mferror.h>
+#include <propvarutil.h>
+#include <wmcodecdsp.h>
+#include <d3d9.h>
+#include <dxva2api.h>
+#include <wmcodecdsp.h>
+#include <codecapi.h>
+
+// The Windows headers helpfully declare min and max macros, which don't
+// compile in the prescence of std::min and std::max and unified builds.
+// So undef them here.
+#ifdef min
+#undef min
+#endif
+#ifdef max
+#undef max
+#endif
+
+// Some SDK versions don't define the AAC decoder CLSID.
+#ifndef CLSID_CMSAACDecMFT
+extern "C" const CLSID CLSID_CMSAACDecMFT;
+#define WMF_MUST_DEFINE_AAC_MFT_CLSID
+#endif
+
+namespace mozilla {
+namespace wmf {
+
+// If successful, loads all required WMF DLLs and calls the WMF MFStartup()
+// function.
+HRESULT MFStartup();
+
+// Calls the WMF MFShutdown() function. Call this once for every time
+// wmf::MFStartup() succeeds. Note: does not unload the WMF DLLs loaded by
+// MFStartup(); leaves them in memory to save I/O at next MFStartup() call.
+HRESULT MFShutdown();
+
+// All functions below are wrappers around the corresponding WMF function,
+// and automatically locate and call the corresponding function in the WMF DLLs.
+
+HRESULT MFCreateMediaType(IMFMediaType **aOutMFType);
+
+HRESULT MFGetStrideForBitmapInfoHeader(DWORD aFormat,
+ DWORD aWidth,
+ LONG *aOutStride);
+
+HRESULT MFGetService(IUnknown *punkObject,
+ REFGUID guidService,
+ REFIID riid,
+ LPVOID *ppvObject);
+
+HRESULT DXVA2CreateDirect3DDeviceManager9(UINT *pResetToken,
+ IDirect3DDeviceManager9 **ppDXVAManager);
+
+
+HRESULT MFCreateDXGIDeviceManager(UINT *pResetToken, IMFDXGIDeviceManager **ppDXVAManager);
+
+HRESULT MFCreateSample(IMFSample **ppIMFSample);
+
+HRESULT MFCreateAlignedMemoryBuffer(DWORD cbMaxLength,
+ DWORD fAlignmentFlags,
+ IMFMediaBuffer **ppBuffer);
+
+HRESULT MFCreateDXGISurfaceBuffer(REFIID riid,
+ IUnknown *punkSurface,
+ UINT uSubresourceIndex,
+ BOOL fButtomUpWhenLinear,
+ IMFMediaBuffer **ppBuffer);
+
+} // end namespace wmf
+} // end namespace mozilla
+
+
+
+#pragma pop_macro("WINVER")
+
+#endif
diff --git a/dom/media/platforms/wmf/WMFAudioMFTManager.cpp b/dom/media/platforms/wmf/WMFAudioMFTManager.cpp
new file mode 100644
index 0000000000..69b62da517
--- /dev/null
+++ b/dom/media/platforms/wmf/WMFAudioMFTManager.cpp
@@ -0,0 +1,358 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "WMFAudioMFTManager.h"
+#include "MediaInfo.h"
+#include "VideoUtils.h"
+#include "WMFUtils.h"
+#include "nsTArray.h"
+#include "TimeUnits.h"
+#include "mozilla/Telemetry.h"
+#include "mozilla/Logging.h"
+
+#define LOG(...) MOZ_LOG(sPDMLog, mozilla::LogLevel::Debug, (__VA_ARGS__))
+
+namespace mozilla {
+
+static void
+AACAudioSpecificConfigToUserData(uint8_t aAACProfileLevelIndication,
+ const uint8_t* aAudioSpecConfig,
+ uint32_t aConfigLength,
+ nsTArray<BYTE>& aOutUserData)
+{
+ MOZ_ASSERT(aOutUserData.IsEmpty());
+
+ // The MF_MT_USER_DATA for AAC is defined here:
+ // http://msdn.microsoft.com/en-us/library/windows/desktop/dd742784%28v=vs.85%29.aspx
+ //
+ // For MFAudioFormat_AAC, MF_MT_USER_DATA contains the portion of
+ // the HEAACWAVEINFO structure that appears after the WAVEFORMATEX
+ // structure (that is, after the wfx member). This is followed by
+ // the AudioSpecificConfig() data, as defined by ISO/IEC 14496-3.
+ // [...]
+ // The length of the AudioSpecificConfig() data is 2 bytes for AAC-LC
+ // or HE-AAC with implicit signaling of SBR/PS. It is more than 2 bytes
+ // for HE-AAC with explicit signaling of SBR/PS.
+ //
+ // The value of audioObjectType as defined in AudioSpecificConfig()
+ // must be 2, indicating AAC-LC. The value of extensionAudioObjectType
+ // must be 5 for SBR or 29 for PS.
+ //
+ // HEAACWAVEINFO structure:
+ // typedef struct heaacwaveinfo_tag {
+ // WAVEFORMATEX wfx;
+ // WORD wPayloadType;
+ // WORD wAudioProfileLevelIndication;
+ // WORD wStructType;
+ // WORD wReserved1;
+ // DWORD dwReserved2;
+ // }
+ const UINT32 heeInfoLen = 4 * sizeof(WORD) + sizeof(DWORD);
+
+ // The HEAACWAVEINFO must have payload and profile set,
+ // the rest can be all 0x00.
+ BYTE heeInfo[heeInfoLen] = {0};
+ WORD* w = (WORD*)heeInfo;
+ w[0] = 0x0; // Payload type raw AAC packet
+ w[1] = aAACProfileLevelIndication;
+
+ aOutUserData.AppendElements(heeInfo, heeInfoLen);
+
+ if (aAACProfileLevelIndication == 2 && aConfigLength > 2) {
+ // The AudioSpecificConfig is TTTTTFFF|FCCCCGGG
+ // (T=ObjectType, F=Frequency, C=Channel, G=GASpecificConfig)
+ // If frequency = 0xf, then the frequency is explicitly defined on 24 bits.
+ int8_t profile = (aAudioSpecConfig[0] & 0xF8) >> 3;
+ int8_t frequency =
+ (aAudioSpecConfig[0] & 0x7) << 1 | (aAudioSpecConfig[1] & 0x80) >> 7;
+ int8_t channels = (aAudioSpecConfig[1] & 0x78) >> 3;
+ int8_t gasc = aAudioSpecConfig[1] & 0x7;
+ if (frequency != 0xf && channels && !gasc) {
+ // We enter this condition if the AudioSpecificConfig should theorically
+ // be 2 bytes long but it's not.
+ // The WMF AAC decoder will error if unknown extensions are found,
+ // so remove them.
+ aConfigLength = 2;
+ }
+ }
+ aOutUserData.AppendElements(aAudioSpecConfig, aConfigLength);
+}
+
+WMFAudioMFTManager::WMFAudioMFTManager(
+ const AudioInfo& aConfig)
+ : mAudioChannels(aConfig.mChannels)
+ , mAudioRate(aConfig.mRate)
+ , mAudioFrameSum(0)
+ , mMustRecaptureAudioPosition(true)
+{
+ MOZ_COUNT_CTOR(WMFAudioMFTManager);
+
+ if (aConfig.mMimeType.EqualsLiteral("audio/mpeg")) {
+ mStreamType = MP3;
+ } else if (aConfig.mMimeType.EqualsLiteral("audio/mp4a-latm")) {
+ mStreamType = AAC;
+ AACAudioSpecificConfigToUserData(aConfig.mExtendedProfile,
+ aConfig.mCodecSpecificConfig->Elements(),
+ aConfig.mCodecSpecificConfig->Length(),
+ mUserData);
+ } else {
+ mStreamType = Unknown;
+ }
+}
+
+WMFAudioMFTManager::~WMFAudioMFTManager()
+{
+ MOZ_COUNT_DTOR(WMFAudioMFTManager);
+}
+
+const GUID&
+WMFAudioMFTManager::GetMFTGUID()
+{
+ MOZ_ASSERT(mStreamType != Unknown);
+ switch (mStreamType) {
+ case AAC: return CLSID_CMSAACDecMFT;
+ case MP3: return CLSID_CMP3DecMediaObject;
+ default: return GUID_NULL;
+ };
+}
+
+const GUID&
+WMFAudioMFTManager::GetMediaSubtypeGUID()
+{
+ MOZ_ASSERT(mStreamType != Unknown);
+ switch (mStreamType) {
+ case AAC: return MFAudioFormat_AAC;
+ case MP3: return MFAudioFormat_MP3;
+ default: return GUID_NULL;
+ };
+}
+
+bool
+WMFAudioMFTManager::Init()
+{
+ NS_ENSURE_TRUE(mStreamType != Unknown, false);
+
+ RefPtr<MFTDecoder> decoder(new MFTDecoder());
+
+ HRESULT hr = decoder->Create(GetMFTGUID());
+ NS_ENSURE_TRUE(SUCCEEDED(hr), false);
+
+ // Setup input/output media types
+ RefPtr<IMFMediaType> inputType;
+
+ hr = wmf::MFCreateMediaType(getter_AddRefs(inputType));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), false);
+
+ hr = inputType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Audio);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), false);
+
+ hr = inputType->SetGUID(MF_MT_SUBTYPE, GetMediaSubtypeGUID());
+ NS_ENSURE_TRUE(SUCCEEDED(hr), false);
+
+ hr = inputType->SetUINT32(MF_MT_AUDIO_SAMPLES_PER_SECOND, mAudioRate);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), false);
+
+ hr = inputType->SetUINT32(MF_MT_AUDIO_NUM_CHANNELS, mAudioChannels);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), false);
+
+ if (mStreamType == AAC) {
+ hr = inputType->SetUINT32(MF_MT_AAC_PAYLOAD_TYPE, 0x0); // Raw AAC packet
+ NS_ENSURE_TRUE(SUCCEEDED(hr), false);
+
+ hr = inputType->SetBlob(MF_MT_USER_DATA,
+ mUserData.Elements(),
+ mUserData.Length());
+ NS_ENSURE_TRUE(SUCCEEDED(hr), false);
+ }
+
+ RefPtr<IMFMediaType> outputType;
+ hr = wmf::MFCreateMediaType(getter_AddRefs(outputType));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), false);
+
+ hr = outputType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Audio);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), false);
+
+ hr = outputType->SetGUID(MF_MT_SUBTYPE, MFAudioFormat_PCM);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), false);
+
+ hr = outputType->SetUINT32(MF_MT_AUDIO_BITS_PER_SAMPLE, 16);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), false);
+
+ hr = decoder->SetMediaTypes(inputType, outputType);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), false);
+
+ mDecoder = decoder;
+
+ return true;
+}
+
+HRESULT
+WMFAudioMFTManager::Input(MediaRawData* aSample)
+{
+ return mDecoder->Input(aSample->Data(),
+ uint32_t(aSample->Size()),
+ aSample->mTime);
+}
+
+HRESULT
+WMFAudioMFTManager::UpdateOutputType()
+{
+ HRESULT hr;
+
+ RefPtr<IMFMediaType> type;
+ hr = mDecoder->GetOutputMediaType(type);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = type->GetUINT32(MF_MT_AUDIO_SAMPLES_PER_SECOND, &mAudioRate);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = type->GetUINT32(MF_MT_AUDIO_NUM_CHANNELS, &mAudioChannels);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ AudioConfig::ChannelLayout layout(mAudioChannels);
+ if (!layout.IsValid()) {
+ return E_FAIL;
+ }
+
+ return S_OK;
+}
+
+HRESULT
+WMFAudioMFTManager::Output(int64_t aStreamOffset,
+ RefPtr<MediaData>& aOutData)
+{
+ aOutData = nullptr;
+ RefPtr<IMFSample> sample;
+ HRESULT hr;
+ int typeChangeCount = 0;
+ while (true) {
+ hr = mDecoder->Output(&sample);
+ if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) {
+ return hr;
+ }
+ if (hr == MF_E_TRANSFORM_STREAM_CHANGE) {
+ hr = UpdateOutputType();
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+ // Catch infinite loops, but some decoders perform at least 2 stream
+ // changes on consecutive calls, so be permissive.
+ // 100 is arbitrarily > 2.
+ NS_ENSURE_TRUE(typeChangeCount < 100, MF_E_TRANSFORM_STREAM_CHANGE);
+ ++typeChangeCount;
+ continue;
+ }
+ break;
+ }
+
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ if (!sample) {
+ LOG("Audio MFTDecoder returned success but null output.");
+ nsCOMPtr<nsIRunnable> task = NS_NewRunnableFunction([]() -> void {
+ LOG("Reporting telemetry AUDIO_MFT_OUTPUT_NULL_SAMPLES");
+ Telemetry::Accumulate(Telemetry::ID::AUDIO_MFT_OUTPUT_NULL_SAMPLES, 1);
+ });
+ AbstractThread::MainThread()->Dispatch(task.forget());
+ return E_FAIL;
+ }
+
+ RefPtr<IMFMediaBuffer> buffer;
+ hr = sample->ConvertToContiguousBuffer(getter_AddRefs(buffer));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ BYTE* data = nullptr; // Note: *data will be owned by the IMFMediaBuffer, we don't need to free it.
+ DWORD maxLength = 0, currentLength = 0;
+ hr = buffer->Lock(&data, &maxLength, &currentLength);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ // Sometimes when starting decoding, the AAC decoder gives us samples
+ // with a negative timestamp. AAC does usually have preroll (or encoder
+ // delay) encoded into its bitstream, but the amount encoded to the stream
+ // is variable, and it not signalled in-bitstream. There is sometimes
+ // signalling in the MP4 container what the preroll amount, but it's
+ // inconsistent. It looks like WMF's AAC encoder may take this into
+ // account, so strip off samples with a negative timestamp to get us
+ // to a 0-timestamp start. This seems to maintain A/V sync, so we can run
+ // with this until someone complains...
+
+ // We calculate the timestamp and the duration based on the number of audio
+ // frames we've already played. We don't trust the timestamp stored on the
+ // IMFSample, as sometimes it's wrong, possibly due to buggy encoders?
+
+ // If this sample block comes after a discontinuity (i.e. a gap or seek)
+ // reset the frame counters, and capture the timestamp. Future timestamps
+ // will be offset from this block's timestamp.
+ UINT32 discontinuity = false;
+ sample->GetUINT32(MFSampleExtension_Discontinuity, &discontinuity);
+ if (mMustRecaptureAudioPosition || discontinuity) {
+ // Update the output type, in case this segment has a different
+ // rate. This also triggers on the first sample, which can have a
+ // different rate than is advertised in the container, and sometimes we
+ // don't get a MF_E_TRANSFORM_STREAM_CHANGE when the rate changes.
+ hr = UpdateOutputType();
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ mAudioFrameSum = 0;
+ LONGLONG timestampHns = 0;
+ hr = sample->GetSampleTime(&timestampHns);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+ mAudioTimeOffset = media::TimeUnit::FromMicroseconds(timestampHns / 10);
+ mMustRecaptureAudioPosition = false;
+ }
+ // We can assume PCM 16 output.
+ int32_t numSamples = currentLength / 2;
+ int32_t numFrames = numSamples / mAudioChannels;
+ MOZ_ASSERT(numFrames >= 0);
+ MOZ_ASSERT(numSamples >= 0);
+ if (numFrames == 0) {
+ // All data from this chunk stripped, loop back and try to output the next
+ // frame, if possible.
+ return S_OK;
+ }
+
+ AlignedAudioBuffer audioData(numSamples);
+ if (!audioData) {
+ return E_OUTOFMEMORY;
+ }
+
+ int16_t* pcm = (int16_t*)data;
+ for (int32_t i = 0; i < numSamples; ++i) {
+ audioData[i] = AudioSampleToFloat(pcm[i]);
+ }
+
+ buffer->Unlock();
+
+ media::TimeUnit timestamp =
+ mAudioTimeOffset + FramesToTimeUnit(mAudioFrameSum, mAudioRate);
+ NS_ENSURE_TRUE(timestamp.IsValid(), E_FAIL);
+
+ mAudioFrameSum += numFrames;
+
+ media::TimeUnit duration = FramesToTimeUnit(numFrames, mAudioRate);
+ NS_ENSURE_TRUE(duration.IsValid(), E_FAIL);
+
+ aOutData = new AudioData(aStreamOffset,
+ timestamp.ToMicroseconds(),
+ duration.ToMicroseconds(),
+ numFrames,
+ Move(audioData),
+ mAudioChannels,
+ mAudioRate);
+
+ #ifdef LOG_SAMPLE_DECODE
+ LOG("Decoded audio sample! timestamp=%lld duration=%lld currentLength=%u",
+ timestamp.ToMicroseconds(), duration.ToMicroseconds(), currentLength);
+ #endif
+
+ return S_OK;
+}
+
+void
+WMFAudioMFTManager::Shutdown()
+{
+ mDecoder = nullptr;
+}
+
+} // namespace mozilla
diff --git a/dom/media/platforms/wmf/WMFAudioMFTManager.h b/dom/media/platforms/wmf/WMFAudioMFTManager.h
new file mode 100644
index 0000000000..5bbbc6108a
--- /dev/null
+++ b/dom/media/platforms/wmf/WMFAudioMFTManager.h
@@ -0,0 +1,78 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#if !defined(WMFAudioOutputSource_h_)
+#define WMFAudioOutputSource_h_
+
+#include "WMF.h"
+#include "MFTDecoder.h"
+#include "mozilla/RefPtr.h"
+#include "WMFMediaDataDecoder.h"
+
+extern const GUID CLSID_WebmMfVpxDec;
+
+namespace mozilla {
+
+class WMFAudioMFTManager : public MFTManager {
+public:
+ WMFAudioMFTManager(const AudioInfo& aConfig);
+ ~WMFAudioMFTManager();
+
+ bool Init();
+
+ HRESULT Input(MediaRawData* aSample) override;
+
+ // Note WMF's AAC decoder sometimes output negatively timestamped samples,
+ // presumably they're the preroll samples, and we strip them. We may return
+ // a null aOutput in this case.
+ HRESULT Output(int64_t aStreamOffset,
+ RefPtr<MediaData>& aOutput) override;
+
+ void Shutdown() override;
+
+ TrackInfo::TrackType GetType() override {
+ return TrackInfo::kAudioTrack;
+ }
+
+ const char* GetDescriptionName() const override
+ {
+ return "wmf audio decoder";
+ }
+
+private:
+
+ HRESULT UpdateOutputType();
+
+ uint32_t mAudioChannels;
+ uint32_t mAudioRate;
+ nsTArray<BYTE> mUserData;
+
+ // The offset, at which playback started since the
+ // last discontinuity.
+ media::TimeUnit mAudioTimeOffset;
+ // The number of audio frames that we've played since the last
+ // discontinuity.
+ int64_t mAudioFrameSum;
+
+ enum StreamType {
+ Unknown,
+ AAC,
+ MP3
+ };
+ StreamType mStreamType;
+
+ const GUID& GetMFTGUID();
+ const GUID& GetMediaSubtypeGUID();
+
+ // True if we need to re-initialize mAudioTimeOffset and mAudioFrameSum
+ // from the next audio packet we decode. This happens after a seek, since
+ // WMF doesn't mark a stream as having a discontinuity after a seek(0).
+ bool mMustRecaptureAudioPosition;
+};
+
+} // namespace mozilla
+
+#endif // WMFAudioOutputSource_h_
diff --git a/dom/media/platforms/wmf/WMFDecoderModule.cpp b/dom/media/platforms/wmf/WMFDecoderModule.cpp
new file mode 100644
index 0000000000..06bf49fa6a
--- /dev/null
+++ b/dom/media/platforms/wmf/WMFDecoderModule.cpp
@@ -0,0 +1,257 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "WMF.h"
+#include "WMFDecoderModule.h"
+#include "WMFVideoMFTManager.h"
+#include "WMFAudioMFTManager.h"
+#include "MFTDecoder.h"
+#include "mozilla/DebugOnly.h"
+#include "mozilla/Services.h"
+#include "WMFMediaDataDecoder.h"
+#include "nsAutoPtr.h"
+#include "nsIWindowsRegKey.h"
+#include "nsComponentManagerUtils.h"
+#include "nsServiceManagerUtils.h"
+#include "nsIGfxInfo.h"
+#include "nsWindowsHelpers.h"
+#include "GfxDriverInfo.h"
+#include "mozilla/gfx/gfxVars.h"
+#include "MediaInfo.h"
+#include "MediaPrefs.h"
+#include "prsystem.h"
+#include "mozilla/Maybe.h"
+#include "mozilla/StaticMutex.h"
+#include "mozilla/WindowsVersion.h"
+#include "MP4Decoder.h"
+#include "VPXDecoder.h"
+
+namespace mozilla {
+
+static Atomic<bool> sDXVAEnabled(false);
+
+WMFDecoderModule::WMFDecoderModule()
+ : mWMFInitialized(false)
+{
+}
+
+WMFDecoderModule::~WMFDecoderModule()
+{
+ if (mWMFInitialized) {
+ DebugOnly<HRESULT> hr = wmf::MFShutdown();
+ NS_ASSERTION(SUCCEEDED(hr), "MFShutdown failed");
+ }
+}
+
+/* static */
+void
+WMFDecoderModule::Init()
+{
+ sDXVAEnabled = gfx::gfxVars::CanUseHardwareVideoDecoding();
+}
+
+/* static */
+int
+WMFDecoderModule::GetNumDecoderThreads()
+{
+ int32_t numCores = PR_GetNumberOfProcessors();
+
+ // If we have more than 4 cores, let the decoder decide how many threads.
+ // On an 8 core machine, WMF chooses 4 decoder threads
+ const int WMF_DECODER_DEFAULT = -1;
+ int32_t prefThreadCount = WMF_DECODER_DEFAULT;
+ if (XRE_GetProcessType() != GeckoProcessType_GPU) {
+ prefThreadCount = MediaPrefs::PDMWMFThreadCount();
+ }
+ if (prefThreadCount != WMF_DECODER_DEFAULT) {
+ return std::max(prefThreadCount, 1);
+ } else if (numCores > 4) {
+ return WMF_DECODER_DEFAULT;
+ }
+ return std::max(numCores - 1, 1);
+}
+
+nsresult
+WMFDecoderModule::Startup()
+{
+ mWMFInitialized = SUCCEEDED(wmf::MFStartup());
+ return mWMFInitialized ? NS_OK : NS_ERROR_FAILURE;
+}
+
+already_AddRefed<MediaDataDecoder>
+WMFDecoderModule::CreateVideoDecoder(const CreateDecoderParams& aParams)
+{
+ nsAutoPtr<WMFVideoMFTManager> manager(
+ new WMFVideoMFTManager(aParams.VideoConfig(),
+ aParams.mKnowsCompositor,
+ aParams.mImageContainer,
+ sDXVAEnabled));
+
+ if (!manager->Init()) {
+ return nullptr;
+ }
+
+ RefPtr<MediaDataDecoder> decoder =
+ new WMFMediaDataDecoder(manager.forget(), aParams.mTaskQueue, aParams.mCallback);
+
+ return decoder.forget();
+}
+
+already_AddRefed<MediaDataDecoder>
+WMFDecoderModule::CreateAudioDecoder(const CreateDecoderParams& aParams)
+{
+ nsAutoPtr<WMFAudioMFTManager> manager(new WMFAudioMFTManager(aParams.AudioConfig()));
+
+ if (!manager->Init()) {
+ return nullptr;
+ }
+
+ RefPtr<MediaDataDecoder> decoder =
+ new WMFMediaDataDecoder(manager.forget(), aParams.mTaskQueue, aParams.mCallback);
+ return decoder.forget();
+}
+
+static bool
+CanCreateMFTDecoder(const GUID& aGuid)
+{
+ if (FAILED(wmf::MFStartup())) {
+ return false;
+ }
+ bool hasdecoder = false;
+ {
+ RefPtr<MFTDecoder> decoder(new MFTDecoder());
+ hasdecoder = SUCCEEDED(decoder->Create(aGuid));
+ }
+ wmf::MFShutdown();
+ return hasdecoder;
+}
+
+template<const GUID& aGuid>
+static bool
+CanCreateWMFDecoder()
+{
+ static StaticMutex sMutex;
+ StaticMutexAutoLock lock(sMutex);
+ static Maybe<bool> result;
+ if (result.isNothing()) {
+ result.emplace(CanCreateMFTDecoder(aGuid));
+ }
+ return result.value();
+}
+
+static bool
+IsH264DecoderBlacklisted()
+{
+#ifdef BLACKLIST_CRASHY_H264_DECODERS
+ WCHAR systemPath[MAX_PATH + 1];
+ if (!ConstructSystem32Path(L"msmpeg2vdec.dll", systemPath, MAX_PATH + 1)) {
+ // Cannot build path -> Assume it's not the blacklisted DLL.
+ return false;
+ }
+
+ DWORD zero;
+ DWORD infoSize = GetFileVersionInfoSizeW(systemPath, &zero);
+ if (infoSize == 0) {
+ // Can't get file info -> Assume we don't have the blacklisted DLL.
+ return false;
+ }
+ auto infoData = MakeUnique<unsigned char[]>(infoSize);
+ VS_FIXEDFILEINFO *vInfo;
+ UINT vInfoLen;
+ if (GetFileVersionInfoW(systemPath, 0, infoSize, infoData.get()) &&
+ VerQueryValueW(infoData.get(), L"\\", (LPVOID*)&vInfo, &vInfoLen))
+ {
+ if ((vInfo->dwFileVersionMS == ((12u << 16) | 0u))
+ && ((vInfo->dwFileVersionLS == ((9200u << 16) | 16426u))
+ || (vInfo->dwFileVersionLS == ((9200u << 16) | 17037u)))) {
+ // 12.0.9200.16426 & .17037 are blacklisted on Win64, see bug 1242343.
+ return true;
+ }
+ }
+#endif // BLACKLIST_CRASHY_H264_DECODERS
+ return false;
+}
+
+/* static */ bool
+WMFDecoderModule::HasH264()
+{
+ if (IsH264DecoderBlacklisted()) {
+ return false;
+ }
+ return CanCreateWMFDecoder<CLSID_CMSH264DecoderMFT>();
+}
+
+/* static */ bool
+WMFDecoderModule::HasAAC()
+{
+ return CanCreateWMFDecoder<CLSID_CMSAACDecMFT>();
+}
+
+bool
+WMFDecoderModule::SupportsMimeType(const nsACString& aMimeType,
+ DecoderDoctorDiagnostics* aDiagnostics) const
+{
+ UniquePtr<TrackInfo> trackInfo = CreateTrackInfoWithMIMEType(aMimeType);
+ if (!trackInfo) {
+ return false;
+ }
+ return Supports(*trackInfo, aDiagnostics);
+}
+
+bool
+WMFDecoderModule::Supports(const TrackInfo& aTrackInfo,
+ DecoderDoctorDiagnostics* aDiagnostics) const
+{
+ if ((aTrackInfo.mMimeType.EqualsLiteral("audio/mp4a-latm") ||
+ aTrackInfo.mMimeType.EqualsLiteral("audio/mp4")) &&
+ WMFDecoderModule::HasAAC()) {
+ return true;
+ }
+ if (MP4Decoder::IsH264(aTrackInfo.mMimeType) && WMFDecoderModule::HasH264()) {
+ const VideoInfo* videoInfo = aTrackInfo.GetAsVideoInfo();
+ MOZ_ASSERT(videoInfo);
+ // Check Windows format constraints, based on:
+ // https://msdn.microsoft.com/en-us/library/windows/desktop/dd797815(v=vs.85).aspx
+ if (IsWin8OrLater()) {
+ // Windows >7 supports at most 4096x2304.
+ if (videoInfo->mImage.width > 4096 || videoInfo->mImage.height > 2304) {
+ return false;
+ }
+ } else {
+ // Windows <=7 supports at most 1920x1088.
+ if (videoInfo->mImage.width > 1920 || videoInfo->mImage.height > 1088) {
+ return false;
+ }
+ }
+ return true;
+ }
+ if (aTrackInfo.mMimeType.EqualsLiteral("audio/mpeg") &&
+ CanCreateWMFDecoder<CLSID_CMP3DecMediaObject>()) {
+ return true;
+ }
+ if (MediaPrefs::PDMWMFVP9DecoderEnabled() && sDXVAEnabled) {
+ if ((VPXDecoder::IsVP8(aTrackInfo.mMimeType) ||
+ VPXDecoder::IsVP9(aTrackInfo.mMimeType)) &&
+ CanCreateWMFDecoder<CLSID_WebmMfVpxDec>()) {
+ return true;
+ }
+ }
+
+ // Some unsupported codec.
+ return false;
+}
+
+PlatformDecoderModule::ConversionRequired
+WMFDecoderModule::DecoderNeedsConversion(const TrackInfo& aConfig) const
+{
+ if (aConfig.IsVideo() && MP4Decoder::IsH264(aConfig.mMimeType)) {
+ return ConversionRequired::kNeedAnnexB;
+ } else {
+ return ConversionRequired::kNeedNone;
+ }
+}
+
+} // namespace mozilla
diff --git a/dom/media/platforms/wmf/WMFDecoderModule.h b/dom/media/platforms/wmf/WMFDecoderModule.h
new file mode 100644
index 0000000000..cd7b8c6609
--- /dev/null
+++ b/dom/media/platforms/wmf/WMFDecoderModule.h
@@ -0,0 +1,56 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#if !defined(WMFPlatformDecoderModule_h_)
+#define WMFPlatformDecoderModule_h_
+
+#include "PlatformDecoderModule.h"
+
+namespace mozilla {
+
+class WMFDecoderModule : public PlatformDecoderModule {
+public:
+ WMFDecoderModule();
+ virtual ~WMFDecoderModule();
+
+ // Initializes the module, loads required dynamic libraries, etc.
+ nsresult Startup() override;
+
+ already_AddRefed<MediaDataDecoder>
+ CreateVideoDecoder(const CreateDecoderParams& aParams) override;
+
+ already_AddRefed<MediaDataDecoder>
+ CreateAudioDecoder(const CreateDecoderParams& aParams) override;
+
+ bool SupportsMimeType(const nsACString& aMimeType,
+ DecoderDoctorDiagnostics* aDiagnostics) const override;
+ bool Supports(const TrackInfo& aTrackInfo,
+ DecoderDoctorDiagnostics* aDiagnostics) const override;
+
+ ConversionRequired
+ DecoderNeedsConversion(const TrackInfo& aConfig) const override;
+
+ // Called on main thread.
+ static void Init();
+
+ // Called from any thread, must call init first
+ static int GetNumDecoderThreads();
+
+ // Accessors that report whether we have the required MFTs available
+ // on the system to play various codecs. Windows Vista doesn't have the
+ // H.264/AAC decoders if the "Platform Update Supplement for Windows Vista"
+ // is not installed, and Window N and KN variants also require a "Media
+ // Feature Pack" to be installed. Windows XP doesn't have WMF.
+ static bool HasAAC();
+ static bool HasH264();
+
+private:
+ bool mWMFInitialized;
+};
+
+} // namespace mozilla
+
+#endif
diff --git a/dom/media/platforms/wmf/WMFMediaDataDecoder.cpp b/dom/media/platforms/wmf/WMFMediaDataDecoder.cpp
new file mode 100644
index 0000000000..d2c13eac75
--- /dev/null
+++ b/dom/media/platforms/wmf/WMFMediaDataDecoder.cpp
@@ -0,0 +1,227 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "WMFMediaDataDecoder.h"
+#include "VideoUtils.h"
+#include "WMFUtils.h"
+#include "nsTArray.h"
+#include "mozilla/Telemetry.h"
+
+#include "mozilla/Logging.h"
+#include "mozilla/SyncRunnable.h"
+
+#define LOG(...) MOZ_LOG(sPDMLog, mozilla::LogLevel::Debug, (__VA_ARGS__))
+
+namespace mozilla {
+
+WMFMediaDataDecoder::WMFMediaDataDecoder(MFTManager* aMFTManager,
+ TaskQueue* aTaskQueue,
+ MediaDataDecoderCallback* aCallback)
+ : mTaskQueue(aTaskQueue)
+ , mCallback(aCallback)
+ , mMFTManager(aMFTManager)
+ , mIsFlushing(false)
+ , mIsShutDown(false)
+{
+}
+
+WMFMediaDataDecoder::~WMFMediaDataDecoder()
+{
+}
+
+RefPtr<MediaDataDecoder::InitPromise>
+WMFMediaDataDecoder::Init()
+{
+ MOZ_ASSERT(!mIsShutDown);
+ return InitPromise::CreateAndResolve(mMFTManager->GetType(), __func__);
+}
+
+// A single telemetry sample is reported for each MediaDataDecoder object
+// that has detected error or produced output successfully.
+static void
+SendTelemetry(unsigned long hr)
+{
+ // Collapse the error codes into a range of 0-0xff that can be viewed in
+ // telemetry histograms. For most MF_E_* errors, unique samples are used,
+ // retaining the least significant 7 or 8 bits. Other error codes are
+ // bucketed.
+ uint32_t sample;
+ if (SUCCEEDED(hr)) {
+ sample = 0;
+ } else if (hr < 0xc00d36b0) {
+ sample = 1; // low bucket
+ } else if (hr < 0xc00d3700) {
+ sample = hr & 0xffU; // MF_E_*
+ } else if (hr <= 0xc00d3705) {
+ sample = 0x80 + (hr & 0xfU); // more MF_E_*
+ } else if (hr < 0xc00d6d60) {
+ sample = 2; // mid bucket
+ } else if (hr <= 0xc00d6d78) {
+ sample = hr & 0xffU; // MF_E_TRANSFORM_*
+ } else {
+ sample = 3; // high bucket
+ }
+
+ nsCOMPtr<nsIRunnable> runnable = NS_NewRunnableFunction(
+ [sample] {
+ Telemetry::Accumulate(Telemetry::MEDIA_WMF_DECODE_ERROR, sample);
+ });
+ NS_DispatchToMainThread(runnable);
+}
+
+void
+WMFMediaDataDecoder::Shutdown()
+{
+ MOZ_DIAGNOSTIC_ASSERT(!mIsShutDown);
+
+ if (mTaskQueue) {
+ mTaskQueue->Dispatch(NewRunnableMethod(this, &WMFMediaDataDecoder::ProcessShutdown));
+ } else {
+ ProcessShutdown();
+ }
+ mIsShutDown = true;
+}
+
+void
+WMFMediaDataDecoder::ProcessShutdown()
+{
+ if (mMFTManager) {
+ mMFTManager->Shutdown();
+ mMFTManager = nullptr;
+ if (!mRecordedError && mHasSuccessfulOutput) {
+ SendTelemetry(S_OK);
+ }
+ }
+}
+
+// Inserts data into the decoder's pipeline.
+void
+WMFMediaDataDecoder::Input(MediaRawData* aSample)
+{
+ MOZ_ASSERT(mCallback->OnReaderTaskQueue());
+ MOZ_DIAGNOSTIC_ASSERT(!mIsShutDown);
+
+ nsCOMPtr<nsIRunnable> runnable =
+ NewRunnableMethod<RefPtr<MediaRawData>>(
+ this,
+ &WMFMediaDataDecoder::ProcessDecode,
+ RefPtr<MediaRawData>(aSample));
+ mTaskQueue->Dispatch(runnable.forget());
+}
+
+void
+WMFMediaDataDecoder::ProcessDecode(MediaRawData* aSample)
+{
+ if (mIsFlushing) {
+ // Skip sample, to be released by runnable.
+ return;
+ }
+
+ HRESULT hr = mMFTManager->Input(aSample);
+ if (FAILED(hr)) {
+ NS_WARNING("MFTManager rejected sample");
+ mCallback->Error(MediaResult(NS_ERROR_DOM_MEDIA_DECODE_ERR,
+ RESULT_DETAIL("MFTManager::Input:%x", hr)));
+ if (!mRecordedError) {
+ SendTelemetry(hr);
+ mRecordedError = true;
+ }
+ return;
+ }
+
+ mLastStreamOffset = aSample->mOffset;
+
+ ProcessOutput();
+}
+
+void
+WMFMediaDataDecoder::ProcessOutput()
+{
+ RefPtr<MediaData> output;
+ HRESULT hr = S_OK;
+ while (SUCCEEDED(hr = mMFTManager->Output(mLastStreamOffset, output)) &&
+ output) {
+ mHasSuccessfulOutput = true;
+ mCallback->Output(output);
+ }
+ if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) {
+ mCallback->InputExhausted();
+ } else if (FAILED(hr)) {
+ NS_WARNING("WMFMediaDataDecoder failed to output data");
+ mCallback->Error(MediaResult(NS_ERROR_DOM_MEDIA_DECODE_ERR,
+ RESULT_DETAIL("MFTManager::Output:%x", hr)));
+ if (!mRecordedError) {
+ SendTelemetry(hr);
+ mRecordedError = true;
+ }
+ }
+}
+
+void
+WMFMediaDataDecoder::ProcessFlush()
+{
+ if (mMFTManager) {
+ mMFTManager->Flush();
+ }
+}
+
+void
+WMFMediaDataDecoder::Flush()
+{
+ MOZ_ASSERT(mCallback->OnReaderTaskQueue());
+ MOZ_DIAGNOSTIC_ASSERT(!mIsShutDown);
+
+ mIsFlushing = true;
+ nsCOMPtr<nsIRunnable> runnable =
+ NewRunnableMethod(this, &WMFMediaDataDecoder::ProcessFlush);
+ SyncRunnable::DispatchToThread(mTaskQueue, runnable);
+ mIsFlushing = false;
+}
+
+void
+WMFMediaDataDecoder::ProcessDrain()
+{
+ if (!mIsFlushing && mMFTManager) {
+ // Order the decoder to drain...
+ mMFTManager->Drain();
+ // Then extract all available output.
+ ProcessOutput();
+ }
+ mCallback->DrainComplete();
+}
+
+void
+WMFMediaDataDecoder::Drain()
+{
+ MOZ_ASSERT(mCallback->OnReaderTaskQueue());
+ MOZ_DIAGNOSTIC_ASSERT(!mIsShutDown);
+
+ mTaskQueue->Dispatch(NewRunnableMethod(this, &WMFMediaDataDecoder::ProcessDrain));
+}
+
+bool
+WMFMediaDataDecoder::IsHardwareAccelerated(nsACString& aFailureReason) const {
+ MOZ_ASSERT(!mIsShutDown);
+
+ return mMFTManager && mMFTManager->IsHardwareAccelerated(aFailureReason);
+}
+
+void
+WMFMediaDataDecoder::SetSeekThreshold(const media::TimeUnit& aTime)
+{
+ MOZ_ASSERT(mCallback->OnReaderTaskQueue());
+ MOZ_DIAGNOSTIC_ASSERT(!mIsShutDown);
+
+ RefPtr<WMFMediaDataDecoder> self = this;
+ nsCOMPtr<nsIRunnable> runnable =
+ NS_NewRunnableFunction([self, aTime]() {
+ media::TimeUnit threshold = aTime;
+ self->mMFTManager->SetSeekThreshold(threshold);
+ });
+ mTaskQueue->Dispatch(runnable.forget());
+}
+
+} // namespace mozilla
diff --git a/dom/media/platforms/wmf/WMFMediaDataDecoder.h b/dom/media/platforms/wmf/WMFMediaDataDecoder.h
new file mode 100644
index 0000000000..a4dd49f56f
--- /dev/null
+++ b/dom/media/platforms/wmf/WMFMediaDataDecoder.h
@@ -0,0 +1,147 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#if !defined(WMFMediaDataDecoder_h_)
+#define WMFMediaDataDecoder_h_
+
+
+#include "WMF.h"
+#include "MFTDecoder.h"
+#include "mozilla/RefPtr.h"
+#include "nsAutoPtr.h"
+#include "PlatformDecoderModule.h"
+
+namespace mozilla {
+
+// Encapsulates the initialization of the MFTDecoder appropriate for decoding
+// a given stream, and the process of converting the IMFSample produced
+// by the MFT into a MediaData object.
+class MFTManager {
+public:
+ virtual ~MFTManager() {}
+
+ // Submit a compressed sample for decoding.
+ // This should forward to the MFTDecoder after performing
+ // any required sample formatting.
+ virtual HRESULT Input(MediaRawData* aSample) = 0;
+
+ // Produces decoded output, if possible. Blocks until output can be produced,
+ // or until no more is able to be produced.
+ // Returns S_OK on success, or MF_E_TRANSFORM_NEED_MORE_INPUT if there's not
+ // enough data to produce more output. If this returns a failure code other
+ // than MF_E_TRANSFORM_NEED_MORE_INPUT, an error will be reported to the
+ // MP4Reader.
+ virtual HRESULT Output(int64_t aStreamOffset,
+ RefPtr<MediaData>& aOutput) = 0;
+
+ virtual void Flush()
+ {
+ mDecoder->Flush();
+ mSeekTargetThreshold.reset();
+ }
+
+ virtual void Drain()
+ {
+ if (FAILED(mDecoder->SendMFTMessage(MFT_MESSAGE_COMMAND_DRAIN, 0))) {
+ NS_WARNING("Failed to send DRAIN command to MFT");
+ }
+ }
+
+ // Destroys all resources.
+ virtual void Shutdown() = 0;
+
+ virtual bool IsHardwareAccelerated(nsACString& aFailureReason) const { return false; }
+
+ virtual TrackInfo::TrackType GetType() = 0;
+
+ virtual const char* GetDescriptionName() const = 0;
+
+ virtual void SetSeekThreshold(const media::TimeUnit& aTime) {
+ mSeekTargetThreshold = Some(aTime);
+ }
+
+protected:
+ // IMFTransform wrapper that performs the decoding.
+ RefPtr<MFTDecoder> mDecoder;
+
+ Maybe<media::TimeUnit> mSeekTargetThreshold;
+};
+
+// Decodes audio and video using Windows Media Foundation. Samples are decoded
+// using the MFTDecoder created by the MFTManager. This class implements
+// the higher-level logic that drives mapping the MFT to the async
+// MediaDataDecoder interface. The specifics of decoding the exact stream
+// type are handled by MFTManager and the MFTDecoder it creates.
+class WMFMediaDataDecoder : public MediaDataDecoder {
+public:
+ WMFMediaDataDecoder(MFTManager* aOutputSource,
+ TaskQueue* aTaskQueue,
+ MediaDataDecoderCallback* aCallback);
+ ~WMFMediaDataDecoder();
+
+ RefPtr<MediaDataDecoder::InitPromise> Init() override;
+
+ void Input(MediaRawData* aSample);
+
+ void Flush() override;
+
+ void Drain() override;
+
+ void Shutdown() override;
+
+ bool IsHardwareAccelerated(nsACString& aFailureReason) const override;
+
+ const char* GetDescriptionName() const override
+ {
+ return mMFTManager ? mMFTManager->GetDescriptionName() : "";
+ }
+
+ virtual void SetSeekThreshold(const media::TimeUnit& aTime) override;
+
+private:
+
+ // Called on the task queue. Inserts the sample into the decoder, and
+ // extracts output if available.
+ void ProcessDecode(MediaRawData* aSample);
+
+ // Called on the task queue. Extracts output if available, and delivers
+ // it to the reader. Called after ProcessDecode() and ProcessDrain().
+ void ProcessOutput();
+
+ // Called on the task queue. Orders the MFT to flush. There is no output to
+ // extract.
+ void ProcessFlush();
+
+ // Called on the task queue. Orders the MFT to drain, and then extracts
+ // all available output.
+ void ProcessDrain();
+
+ void ProcessShutdown();
+
+ const RefPtr<TaskQueue> mTaskQueue;
+ MediaDataDecoderCallback* mCallback;
+
+ nsAutoPtr<MFTManager> mMFTManager;
+
+ // The last offset into the media resource that was passed into Input().
+ // This is used to approximate the decoder's position in the media resource.
+ int64_t mLastStreamOffset;
+
+ // Set on reader/decode thread calling Flush() to indicate that output is
+ // not required and so input samples on mTaskQueue need not be processed.
+ // Cleared on mTaskQueue.
+ Atomic<bool> mIsFlushing;
+
+ bool mIsShutDown;
+
+ // For telemetry
+ bool mHasSuccessfulOutput = false;
+ bool mRecordedError = false;
+};
+
+} // namespace mozilla
+
+#endif // WMFMediaDataDecoder_h_
diff --git a/dom/media/platforms/wmf/WMFUtils.cpp b/dom/media/platforms/wmf/WMFUtils.cpp
new file mode 100644
index 0000000000..8aec8a8af4
--- /dev/null
+++ b/dom/media/platforms/wmf/WMFUtils.cpp
@@ -0,0 +1,311 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "WMFUtils.h"
+#include <stdint.h>
+#include "mozilla/ArrayUtils.h"
+#include "mozilla/RefPtr.h"
+#include "mozilla/WindowsVersion.h"
+#include "mozilla/Logging.h"
+#include "nsThreadUtils.h"
+#include "nsWindowsHelpers.h"
+#include "mozilla/CheckedInt.h"
+#include "VideoUtils.h"
+#include <initguid.h>
+#include "nsTArray.h"
+
+#ifdef WMF_MUST_DEFINE_AAC_MFT_CLSID
+// Some SDK versions don't define the AAC decoder CLSID.
+// {32D186A7-218F-4C75-8876-DD77273A8999}
+DEFINE_GUID(CLSID_CMSAACDecMFT, 0x32D186A7, 0x218F, 0x4C75, 0x88, 0x76, 0xDD, 0x77, 0x27, 0x3A, 0x89, 0x99);
+#endif
+
+namespace mozilla {
+
+HRESULT
+HNsToFrames(int64_t aHNs, uint32_t aRate, int64_t* aOutFrames)
+{
+ MOZ_ASSERT(aOutFrames);
+ const int64_t HNS_PER_S = USECS_PER_S * 10;
+ CheckedInt<int64_t> i = aHNs;
+ i *= aRate;
+ i /= HNS_PER_S;
+ NS_ENSURE_TRUE(i.isValid(), E_FAIL);
+ *aOutFrames = i.value();
+ return S_OK;
+}
+
+HRESULT
+GetDefaultStride(IMFMediaType *aType, uint32_t aWidth, uint32_t* aOutStride)
+{
+ // Try to get the default stride from the media type.
+ HRESULT hr = aType->GetUINT32(MF_MT_DEFAULT_STRIDE, aOutStride);
+ if (SUCCEEDED(hr)) {
+ return S_OK;
+ }
+
+ // Stride attribute not set, calculate it.
+ GUID subtype = GUID_NULL;
+
+ hr = aType->GetGUID(MF_MT_SUBTYPE, &subtype);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = wmf::MFGetStrideForBitmapInfoHeader(subtype.Data1, aWidth, (LONG*)(aOutStride));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ return hr;
+}
+
+int32_t
+MFOffsetToInt32(const MFOffset& aOffset)
+{
+ return int32_t(aOffset.value + (aOffset.fract / 65536.0f));
+}
+
+media::TimeUnit
+GetSampleDuration(IMFSample* aSample)
+{
+ NS_ENSURE_TRUE(aSample, media::TimeUnit::Invalid());
+ int64_t duration = 0;
+ aSample->GetSampleDuration(&duration);
+ return media::TimeUnit::FromMicroseconds(HNsToUsecs(duration));
+}
+
+media::TimeUnit
+GetSampleTime(IMFSample* aSample)
+{
+ NS_ENSURE_TRUE(aSample, media::TimeUnit::Invalid());
+ LONGLONG timestampHns = 0;
+ HRESULT hr = aSample->GetSampleTime(&timestampHns);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), media::TimeUnit::Invalid());
+ return media::TimeUnit::FromMicroseconds(HNsToUsecs(timestampHns));
+}
+
+// Gets the sub-region of the video frame that should be displayed.
+// See: http://msdn.microsoft.com/en-us/library/windows/desktop/bb530115(v=vs.85).aspx
+HRESULT
+GetPictureRegion(IMFMediaType* aMediaType, nsIntRect& aOutPictureRegion)
+{
+ // Determine if "pan and scan" is enabled for this media. If it is, we
+ // only display a region of the video frame, not the entire frame.
+ BOOL panScan = MFGetAttributeUINT32(aMediaType, MF_MT_PAN_SCAN_ENABLED, FALSE);
+
+ // If pan and scan mode is enabled. Try to get the display region.
+ HRESULT hr = E_FAIL;
+ MFVideoArea videoArea;
+ memset(&videoArea, 0, sizeof(MFVideoArea));
+ if (panScan) {
+ hr = aMediaType->GetBlob(MF_MT_PAN_SCAN_APERTURE,
+ (UINT8*)&videoArea,
+ sizeof(MFVideoArea),
+ nullptr);
+ }
+
+ // If we're not in pan-and-scan mode, or the pan-and-scan region is not set,
+ // check for a minimimum display aperture.
+ if (!panScan || hr == MF_E_ATTRIBUTENOTFOUND) {
+ hr = aMediaType->GetBlob(MF_MT_MINIMUM_DISPLAY_APERTURE,
+ (UINT8*)&videoArea,
+ sizeof(MFVideoArea),
+ nullptr);
+ }
+
+ if (hr == MF_E_ATTRIBUTENOTFOUND) {
+ // Minimum display aperture is not set, for "backward compatibility with
+ // some components", check for a geometric aperture.
+ hr = aMediaType->GetBlob(MF_MT_GEOMETRIC_APERTURE,
+ (UINT8*)&videoArea,
+ sizeof(MFVideoArea),
+ nullptr);
+ }
+
+ if (SUCCEEDED(hr)) {
+ // The media specified a picture region, return it.
+ aOutPictureRegion = nsIntRect(MFOffsetToInt32(videoArea.OffsetX),
+ MFOffsetToInt32(videoArea.OffsetY),
+ videoArea.Area.cx,
+ videoArea.Area.cy);
+ return S_OK;
+ }
+
+ // No picture region defined, fall back to using the entire video area.
+ UINT32 width = 0, height = 0;
+ hr = MFGetAttributeSize(aMediaType, MF_MT_FRAME_SIZE, &width, &height);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+ NS_ENSURE_TRUE(width <= MAX_VIDEO_WIDTH, E_FAIL);
+ NS_ENSURE_TRUE(height <= MAX_VIDEO_HEIGHT, E_FAIL);
+
+ aOutPictureRegion = nsIntRect(0, 0, width, height);
+ return S_OK;
+}
+
+namespace wmf {
+
+static const wchar_t* sDLLs[] = {
+ L"mfplat.dll",
+ L"mf.dll",
+ L"dxva2.dll",
+ L"evr.dll",
+};
+
+HRESULT
+LoadDLLs()
+{
+ static bool sDLLsLoaded = false;
+ static bool sFailedToLoadDlls = false;
+
+ if (sDLLsLoaded) {
+ return S_OK;
+ }
+ if (sFailedToLoadDlls) {
+ return E_FAIL;
+ }
+
+ // Try to load all the required DLLs. If we fail to load any dll,
+ // unload the dlls we succeeded in loading.
+ nsTArray<const wchar_t*> loadedDlls;
+ for (const wchar_t* dll : sDLLs) {
+ if (!LoadLibrarySystem32(dll)) {
+ NS_WARNING("Failed to load WMF DLLs");
+ for (const wchar_t* loadedDll : loadedDlls) {
+ FreeLibrary(GetModuleHandleW(loadedDll));
+ }
+ sFailedToLoadDlls = true;
+ return E_FAIL;
+ }
+ loadedDlls.AppendElement(dll);
+ }
+ sDLLsLoaded = true;
+
+ return S_OK;
+}
+
+#define ENSURE_FUNCTION_PTR_HELPER(FunctionType, FunctionName, DLL) \
+ static FunctionType FunctionName##Ptr = nullptr; \
+ if (!FunctionName##Ptr) { \
+ FunctionName##Ptr = (FunctionType) GetProcAddress(GetModuleHandleW(L ## #DLL), #FunctionName); \
+ if (!FunctionName##Ptr) { \
+ NS_WARNING("Failed to get GetProcAddress of " #FunctionName " from " #DLL); \
+ return E_FAIL; \
+ } \
+ }
+
+#define ENSURE_FUNCTION_PTR(FunctionName, DLL) \
+ ENSURE_FUNCTION_PTR_HELPER(decltype(::FunctionName)*, FunctionName, DLL) \
+
+#define ENSURE_FUNCTION_PTR_(FunctionName, DLL) \
+ ENSURE_FUNCTION_PTR_HELPER(FunctionName##Ptr_t, FunctionName, DLL) \
+
+#define DECL_FUNCTION_PTR(FunctionName, ...) \
+ typedef HRESULT (STDMETHODCALLTYPE * FunctionName##Ptr_t)(__VA_ARGS__)
+
+HRESULT
+MFStartup()
+{
+ if (!IsVistaOrLater() || IsWin7AndPre2000Compatible()) {
+ // *Only* use WMF on Vista and later, as if Firefox is run in Windows 95
+ // compatibility mode on Windows 7 (it does happen!) we may crash trying
+ // to startup WMF. So we need to detect the OS version here, as in
+ // compatibility mode IsVistaOrLater() and friends behave as if we're on
+ // the emulated version of Windows. See bug 1279171.
+ // Using GetVersionEx API which takes compatibility mode into account.
+ return E_FAIL;
+ }
+
+ HRESULT hr = LoadDLLs();
+ if (FAILED(hr)) {
+ return hr;
+ }
+
+ const int MF_VISTA_VERSION = (0x0001 << 16 | MF_API_VERSION);
+ const int MF_WIN7_VERSION = (0x0002 << 16 | MF_API_VERSION);
+
+ // decltype is unusable for functions having default parameters
+ DECL_FUNCTION_PTR(MFStartup, ULONG, DWORD);
+ ENSURE_FUNCTION_PTR_(MFStartup, Mfplat.dll)
+ if (!IsWin7OrLater())
+ return MFStartupPtr(MF_VISTA_VERSION, MFSTARTUP_FULL);
+ else
+ return MFStartupPtr(MF_WIN7_VERSION, MFSTARTUP_FULL);
+}
+
+HRESULT
+MFShutdown()
+{
+ ENSURE_FUNCTION_PTR(MFShutdown, Mfplat.dll)
+ return (MFShutdownPtr)();
+}
+
+HRESULT
+MFCreateMediaType(IMFMediaType **aOutMFType)
+{
+ ENSURE_FUNCTION_PTR(MFCreateMediaType, Mfplat.dll)
+ return (MFCreateMediaTypePtr)(aOutMFType);
+}
+
+
+HRESULT
+MFGetStrideForBitmapInfoHeader(DWORD aFormat,
+ DWORD aWidth,
+ LONG *aOutStride)
+{
+ ENSURE_FUNCTION_PTR(MFGetStrideForBitmapInfoHeader, evr.dll)
+ return (MFGetStrideForBitmapInfoHeaderPtr)(aFormat, aWidth, aOutStride);
+}
+
+HRESULT MFGetService(IUnknown *punkObject,
+ REFGUID guidService,
+ REFIID riid,
+ LPVOID *ppvObject)
+{
+ ENSURE_FUNCTION_PTR(MFGetService, mf.dll)
+ return (MFGetServicePtr)(punkObject, guidService, riid, ppvObject);
+}
+
+HRESULT
+DXVA2CreateDirect3DDeviceManager9(UINT *pResetToken,
+ IDirect3DDeviceManager9 **ppDXVAManager)
+{
+ ENSURE_FUNCTION_PTR(DXVA2CreateDirect3DDeviceManager9, dxva2.dll)
+ return (DXVA2CreateDirect3DDeviceManager9Ptr)(pResetToken, ppDXVAManager);
+}
+
+HRESULT
+MFCreateSample(IMFSample **ppIMFSample)
+{
+ ENSURE_FUNCTION_PTR(MFCreateSample, mfplat.dll)
+ return (MFCreateSamplePtr)(ppIMFSample);
+}
+
+HRESULT
+MFCreateAlignedMemoryBuffer(DWORD cbMaxLength,
+ DWORD fAlignmentFlags,
+ IMFMediaBuffer **ppBuffer)
+{
+ ENSURE_FUNCTION_PTR(MFCreateAlignedMemoryBuffer, mfplat.dll)
+ return (MFCreateAlignedMemoryBufferPtr)(cbMaxLength, fAlignmentFlags, ppBuffer);
+}
+
+HRESULT
+MFCreateDXGIDeviceManager(UINT *pResetToken, IMFDXGIDeviceManager **ppDXVAManager)
+{
+ ENSURE_FUNCTION_PTR(MFCreateDXGIDeviceManager, mfplat.dll)
+ return (MFCreateDXGIDeviceManagerPtr)(pResetToken, ppDXVAManager);
+}
+
+HRESULT
+MFCreateDXGISurfaceBuffer(REFIID riid,
+ IUnknown *punkSurface,
+ UINT uSubresourceIndex,
+ BOOL fButtomUpWhenLinear,
+ IMFMediaBuffer **ppBuffer)
+{
+ ENSURE_FUNCTION_PTR(MFCreateDXGISurfaceBuffer, mfplat.dll)
+ return (MFCreateDXGISurfaceBufferPtr)(riid, punkSurface, uSubresourceIndex, fButtomUpWhenLinear, ppBuffer);
+}
+
+} // end namespace wmf
+} // end namespace mozilla
diff --git a/dom/media/platforms/wmf/WMFUtils.h b/dom/media/platforms/wmf/WMFUtils.h
new file mode 100644
index 0000000000..ac87797748
--- /dev/null
+++ b/dom/media/platforms/wmf/WMFUtils.h
@@ -0,0 +1,67 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef WMFUtils_h
+#define WMFUtils_h
+
+#include "WMF.h"
+#include "nsString.h"
+#include "nsRect.h"
+#include "TimeUnits.h"
+#include "VideoUtils.h"
+
+// Various utilities shared by WMF backend files.
+
+namespace mozilla {
+
+// Converts from microseconds to hundreds of nanoseconds.
+// We use microseconds for our timestamps, whereas WMF uses
+// hundreds of nanoseconds.
+inline int64_t
+UsecsToHNs(int64_t aUsecs) {
+ return aUsecs * 10;
+}
+
+// Converts from hundreds of nanoseconds to microseconds.
+// We use microseconds for our timestamps, whereas WMF uses
+// hundreds of nanoseconds.
+inline int64_t
+HNsToUsecs(int64_t hNanoSecs) {
+ return hNanoSecs / 10;
+}
+
+HRESULT
+HNsToFrames(int64_t aHNs, uint32_t aRate, int64_t* aOutFrames);
+
+HRESULT
+GetDefaultStride(IMFMediaType *aType, uint32_t aWidth, uint32_t* aOutStride);
+
+int32_t
+MFOffsetToInt32(const MFOffset& aOffset);
+
+// Gets the sub-region of the video frame that should be displayed.
+// See: http://msdn.microsoft.com/en-us/library/windows/desktop/bb530115(v=vs.85).aspx
+HRESULT
+GetPictureRegion(IMFMediaType* aMediaType, nsIntRect& aOutPictureRegion);
+
+// Returns the duration of a IMFSample in TimeUnit.
+// Returns media::TimeUnit::Invalid() on failure.
+media::TimeUnit
+GetSampleDuration(IMFSample* aSample);
+
+// Returns the presentation time of a IMFSample in TimeUnit.
+// Returns media::TimeUnit::Invalid() on failure.
+media::TimeUnit
+GetSampleTime(IMFSample* aSample);
+
+inline bool
+IsFlagSet(DWORD flags, DWORD pattern) {
+ return (flags & pattern) == pattern;
+}
+
+} // namespace mozilla
+
+#endif
diff --git a/dom/media/platforms/wmf/WMFVideoMFTManager.cpp b/dom/media/platforms/wmf/WMFVideoMFTManager.cpp
new file mode 100644
index 0000000000..291bc5b74f
--- /dev/null
+++ b/dom/media/platforms/wmf/WMFVideoMFTManager.cpp
@@ -0,0 +1,1016 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include <algorithm>
+#include <winsdkver.h>
+#include <psapi.h>
+#include "WMFVideoMFTManager.h"
+#include "MediaDecoderReader.h"
+#include "gfxPrefs.h"
+#include "WMFUtils.h"
+#include "ImageContainer.h"
+#include "VideoUtils.h"
+#include "DXVA2Manager.h"
+#include "nsThreadUtils.h"
+#include "Layers.h"
+#include "mozilla/ClearOnShutdown.h"
+#include "mozilla/layers/LayersTypes.h"
+#include "MediaInfo.h"
+#include "mozilla/Logging.h"
+#include "nsWindowsHelpers.h"
+#include "gfx2DGlue.h"
+#include "gfxWindowsPlatform.h"
+#include "IMFYCbCrImage.h"
+#include "mozilla/WindowsVersion.h"
+#include "mozilla/Telemetry.h"
+#include "nsPrintfCString.h"
+#include "MediaTelemetryConstants.h"
+#include "GMPUtils.h" // For SplitAt. TODO: Move SplitAt to a central place.
+#include "MP4Decoder.h"
+#include "VPXDecoder.h"
+#include "mozilla/SyncRunnable.h"
+
+#define LOG(...) MOZ_LOG(sPDMLog, mozilla::LogLevel::Debug, (__VA_ARGS__))
+
+using mozilla::layers::Image;
+using mozilla::layers::IMFYCbCrImage;
+using mozilla::layers::LayerManager;
+using mozilla::layers::LayersBackend;
+
+#if WINVER_MAXVER < 0x0A00
+// Windows 10+ SDK has VP80 and VP90 defines
+const GUID MFVideoFormat_VP80 =
+{
+ 0x30385056,
+ 0x0000,
+ 0x0010,
+ {0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71}
+};
+
+const GUID MFVideoFormat_VP90 =
+{
+ 0x30395056,
+ 0x0000,
+ 0x0010,
+ {0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71}
+};
+#endif
+
+const CLSID CLSID_WebmMfVpxDec =
+{
+ 0xe3aaf548,
+ 0xc9a4,
+ 0x4c6e,
+ { 0x23, 0x4d, 0x5a, 0xda, 0x37, 0x4b, 0x00, 0x00 }
+};
+
+namespace mozilla {
+
+LayersBackend
+GetCompositorBackendType(layers::KnowsCompositor* aKnowsCompositor)
+{
+ if (aKnowsCompositor) {
+ return aKnowsCompositor->GetCompositorBackendType();
+ }
+ return LayersBackend::LAYERS_NONE;
+}
+
+WMFVideoMFTManager::WMFVideoMFTManager(
+ const VideoInfo& aConfig,
+ layers::KnowsCompositor* aKnowsCompositor,
+ layers::ImageContainer* aImageContainer,
+ bool aDXVAEnabled)
+ : mVideoInfo(aConfig)
+ , mVideoStride(0)
+ , mImageSize(aConfig.mImage)
+ , mImageContainer(aImageContainer)
+ , mDXVAEnabled(aDXVAEnabled)
+ , mKnowsCompositor(aKnowsCompositor)
+ , mNullOutputCount(0)
+ , mGotValidOutputAfterNullOutput(false)
+ , mGotExcessiveNullOutput(false)
+ , mIsValid(true)
+ // mVideoStride, mVideoWidth, mVideoHeight, mUseHwAccel are initialized in
+ // Init().
+{
+ MOZ_COUNT_CTOR(WMFVideoMFTManager);
+
+ // Need additional checks/params to check vp8/vp9
+ if (MP4Decoder::IsH264(aConfig.mMimeType)) {
+ mStreamType = H264;
+ } else if (VPXDecoder::IsVP8(aConfig.mMimeType)) {
+ mStreamType = VP8;
+ } else if (VPXDecoder::IsVP9(aConfig.mMimeType)) {
+ mStreamType = VP9;
+ } else {
+ mStreamType = Unknown;
+ }
+}
+
+WMFVideoMFTManager::~WMFVideoMFTManager()
+{
+ MOZ_COUNT_DTOR(WMFVideoMFTManager);
+ // Ensure DXVA/D3D9 related objects are released on the main thread.
+ if (mDXVA2Manager) {
+ DeleteOnMainThread(mDXVA2Manager);
+ }
+
+ // Record whether the video decoder successfully decoded, or output null
+ // samples but did/didn't recover.
+ uint32_t telemetry = (mNullOutputCount == 0) ? 0 :
+ (mGotValidOutputAfterNullOutput && mGotExcessiveNullOutput) ? 1 :
+ mGotExcessiveNullOutput ? 2 :
+ mGotValidOutputAfterNullOutput ? 3 :
+ 4;
+
+ nsCOMPtr<nsIRunnable> task = NS_NewRunnableFunction([=]() -> void {
+ LOG(nsPrintfCString("Reporting telemetry VIDEO_MFT_OUTPUT_NULL_SAMPLES=%d", telemetry).get());
+ Telemetry::Accumulate(Telemetry::ID::VIDEO_MFT_OUTPUT_NULL_SAMPLES, telemetry);
+ });
+ AbstractThread::MainThread()->Dispatch(task.forget());
+}
+
+const GUID&
+WMFVideoMFTManager::GetMFTGUID()
+{
+ MOZ_ASSERT(mStreamType != Unknown);
+ switch (mStreamType) {
+ case H264: return CLSID_CMSH264DecoderMFT;
+ case VP8: return CLSID_WebmMfVpxDec;
+ case VP9: return CLSID_WebmMfVpxDec;
+ default: return GUID_NULL;
+ };
+}
+
+const GUID&
+WMFVideoMFTManager::GetMediaSubtypeGUID()
+{
+ MOZ_ASSERT(mStreamType != Unknown);
+ switch (mStreamType) {
+ case H264: return MFVideoFormat_H264;
+ case VP8: return MFVideoFormat_VP80;
+ case VP9: return MFVideoFormat_VP90;
+ default: return GUID_NULL;
+ };
+}
+
+struct D3DDLLBlacklistingCache
+{
+ // Blacklist pref value last seen.
+ nsCString mBlacklistPref;
+ // Non-empty if a blacklisted DLL was found.
+ nsCString mBlacklistedDLL;
+};
+StaticAutoPtr<D3DDLLBlacklistingCache> sD3D11BlacklistingCache;
+StaticAutoPtr<D3DDLLBlacklistingCache> sD3D9BlacklistingCache;
+
+// If a blacklisted DLL is found, return its information, otherwise "".
+static const nsCString&
+FindDXVABlacklistedDLL(StaticAutoPtr<D3DDLLBlacklistingCache>& aDLLBlacklistingCache,
+ const nsCString& aBlacklist,
+ const char* aDLLBlacklistPrefName)
+{
+ NS_ASSERTION(NS_IsMainThread(), "Must be on main thread.");
+
+ if (!aDLLBlacklistingCache) {
+ // First time here, create persistent data that will be reused in all
+ // D3D11-blacklisting checks.
+ aDLLBlacklistingCache = new D3DDLLBlacklistingCache();
+ ClearOnShutdown(&aDLLBlacklistingCache);
+ }
+
+ if (aBlacklist.IsEmpty()) {
+ // Empty blacklist -> No blacklisting.
+ aDLLBlacklistingCache->mBlacklistPref.SetLength(0);
+ aDLLBlacklistingCache->mBlacklistedDLL.SetLength(0);
+ return aDLLBlacklistingCache->mBlacklistedDLL;
+ }
+
+ // Detect changes in pref.
+ if (aDLLBlacklistingCache->mBlacklistPref.Equals(aBlacklist)) {
+ // Same blacklist -> Return same result (i.e., don't check DLLs again).
+ return aDLLBlacklistingCache->mBlacklistedDLL;
+ }
+ // Adopt new pref now, so we don't work on it again.
+ aDLLBlacklistingCache->mBlacklistPref = aBlacklist;
+
+ HANDLE hProcess = GetCurrentProcess();
+ mozilla::UniquePtr<HMODULE[]> hMods;
+ unsigned int modulesNum = 0;
+ if (hProcess != NULL) {
+ DWORD modulesSize;
+ EnumProcessModules(hProcess, nullptr, 0, &modulesSize);
+ modulesNum = modulesSize / sizeof(HMODULE);
+ hMods = mozilla::MakeUnique<HMODULE[]>(modulesNum);
+ EnumProcessModules(hProcess, hMods.get(), modulesNum * sizeof(HMODULE), &modulesSize);
+ }
+
+ // media.wmf.disable-d3d*-for-dlls format: (whitespace is trimmed)
+ // "dll1.dll: 1.2.3.4[, more versions...][; more dlls...]"
+ nsTArray<nsCString> dlls;
+ SplitAt(";", aBlacklist, dlls);
+ for (const auto& dll : dlls) {
+ nsTArray<nsCString> nameAndVersions;
+ SplitAt(":", dll, nameAndVersions);
+ if (nameAndVersions.Length() != 2) {
+ NS_WARNING(nsPrintfCString("Skipping incorrect '%s' dll:versions format",
+ aDLLBlacklistPrefName).get());
+ continue;
+ }
+
+ nameAndVersions[0].CompressWhitespace();
+ NS_ConvertUTF8toUTF16 name(nameAndVersions[0]);
+
+ for (unsigned int i = 0; i <= modulesNum; i++) {
+ WCHAR dllPath[MAX_PATH + 1];
+
+ if (i < modulesNum) {
+ if (!GetModuleFileNameEx(hProcess, hMods[i], dllPath, sizeof(dllPath) / sizeof(WCHAR))) {
+ continue;
+ }
+
+ nsCOMPtr<nsIFile> file;
+ if (NS_WARN_IF(NS_FAILED(NS_NewLocalFile(nsDependentString(dllPath), false, getter_AddRefs(file))))) {
+ continue;
+ }
+
+ nsAutoString leafName;
+ if (NS_WARN_IF(NS_FAILED(file->GetLeafName(leafName)))) {
+ continue;
+ }
+
+ if (_wcsicmp(leafName.get(), name.get())) {
+ continue;
+ }
+ } else {
+ if (!ConstructSystem32Path(name.get(), dllPath, MAX_PATH + 1)) {
+ // Cannot build path -> Assume it's not the blacklisted DLL.
+ continue;
+ }
+ }
+
+ DWORD zero;
+ DWORD infoSize = GetFileVersionInfoSizeW(dllPath, &zero);
+ if (infoSize == 0) {
+ // Can't get file info -> Assume we don't have the blacklisted DLL.
+ continue;
+ }
+ // vInfo is a pointer into infoData, that's why we keep it outside of the loop.
+ auto infoData = MakeUnique<unsigned char[]>(infoSize);
+ VS_FIXEDFILEINFO *vInfo;
+ UINT vInfoLen;
+ if (!GetFileVersionInfoW(dllPath, 0, infoSize, infoData.get())
+ || !VerQueryValueW(infoData.get(), L"\\", (LPVOID*)&vInfo, &vInfoLen)
+ || !vInfo) {
+ // Can't find version -> Assume it's not blacklisted.
+ continue;
+ }
+
+ nsTArray<nsCString> versions;
+ SplitAt(",", nameAndVersions[1], versions);
+ for (const auto& version : versions) {
+ nsTArray<nsCString> numberStrings;
+ SplitAt(".", version, numberStrings);
+ if (numberStrings.Length() != 4) {
+ NS_WARNING(nsPrintfCString("Skipping incorrect '%s' a.b.c.d version format",
+ aDLLBlacklistPrefName).get());
+ continue;
+ }
+ DWORD numbers[4];
+ nsresult errorCode = NS_OK;
+ for (int i = 0; i < 4; ++i) {
+ numberStrings[i].CompressWhitespace();
+ numbers[i] = DWORD(numberStrings[i].ToInteger(&errorCode));
+ if (NS_FAILED(errorCode)) {
+ break;
+ }
+ if (numbers[i] > UINT16_MAX) {
+ errorCode = NS_ERROR_FAILURE;
+ break;
+ }
+ }
+
+ if (NS_FAILED(errorCode)) {
+ NS_WARNING(nsPrintfCString("Skipping incorrect '%s' a.b.c.d version format",
+ aDLLBlacklistPrefName).get());
+ continue;
+ }
+
+ if (vInfo->dwFileVersionMS == ((numbers[0] << 16) | numbers[1])
+ && vInfo->dwFileVersionLS == ((numbers[2] << 16) | numbers[3])) {
+ // Blacklisted! Record bad DLL.
+ aDLLBlacklistingCache->mBlacklistedDLL.SetLength(0);
+ aDLLBlacklistingCache->mBlacklistedDLL.AppendPrintf(
+ "%s (%lu.%lu.%lu.%lu)",
+ nameAndVersions[0].get(), numbers[0], numbers[1], numbers[2], numbers[3]);
+ return aDLLBlacklistingCache->mBlacklistedDLL;
+ }
+ }
+ }
+ }
+
+ // No blacklisted DLL.
+ aDLLBlacklistingCache->mBlacklistedDLL.SetLength(0);
+ return aDLLBlacklistingCache->mBlacklistedDLL;
+}
+
+static const nsCString&
+FindD3D11BlacklistedDLL() {
+ return FindDXVABlacklistedDLL(sD3D11BlacklistingCache,
+ gfx::gfxVars::PDMWMFDisableD3D11Dlls(),
+ "media.wmf.disable-d3d11-for-dlls");
+}
+
+static const nsCString&
+FindD3D9BlacklistedDLL() {
+ return FindDXVABlacklistedDLL(sD3D9BlacklistingCache,
+ gfx::gfxVars::PDMWMFDisableD3D9Dlls(),
+ "media.wmf.disable-d3d9-for-dlls");
+}
+
+class CreateDXVAManagerEvent : public Runnable {
+public:
+ CreateDXVAManagerEvent(LayersBackend aBackend,
+ layers::KnowsCompositor* aKnowsCompositor,
+ nsCString& aFailureReason)
+ : mBackend(aBackend)
+ , mKnowsCompositor(aKnowsCompositor)
+ , mFailureReason(aFailureReason)
+ {}
+
+ NS_IMETHOD Run() override {
+ NS_ASSERTION(NS_IsMainThread(), "Must be on main thread.");
+ nsACString* failureReason = &mFailureReason;
+ nsCString secondFailureReason;
+ if (mBackend == LayersBackend::LAYERS_D3D11 &&
+ gfxPrefs::PDMWMFAllowD3D11() && IsWin8OrLater()) {
+ const nsCString& blacklistedDLL = FindD3D11BlacklistedDLL();
+ if (!blacklistedDLL.IsEmpty()) {
+ failureReason->AppendPrintf("D3D11 blacklisted with DLL %s",
+ blacklistedDLL.get());
+ } else {
+ mDXVA2Manager = DXVA2Manager::CreateD3D11DXVA(mKnowsCompositor, *failureReason);
+ if (mDXVA2Manager) {
+ return NS_OK;
+ }
+ }
+ // Try again with d3d9, but record the failure reason
+ // into a new var to avoid overwriting the d3d11 failure.
+ failureReason = &secondFailureReason;
+ mFailureReason.Append(NS_LITERAL_CSTRING("; "));
+ }
+
+ const nsCString& blacklistedDLL = FindD3D9BlacklistedDLL();
+ if (!blacklistedDLL.IsEmpty()) {
+ mFailureReason.AppendPrintf("D3D9 blacklisted with DLL %s",
+ blacklistedDLL.get());
+ } else {
+ mDXVA2Manager = DXVA2Manager::CreateD3D9DXVA(mKnowsCompositor, *failureReason);
+ // Make sure we include the messages from both attempts (if applicable).
+ mFailureReason.Append(secondFailureReason);
+ }
+ return NS_OK;
+ }
+ nsAutoPtr<DXVA2Manager> mDXVA2Manager;
+ layers::LayersBackend mBackend;
+ KnowsCompositor* mKnowsCompositor;
+ nsACString& mFailureReason;
+};
+
+bool
+WMFVideoMFTManager::InitializeDXVA(bool aForceD3D9)
+{
+ // If we use DXVA but aren't running with a D3D layer manager then the
+ // readback of decoded video frames from GPU to CPU memory grinds painting
+ // to a halt, and makes playback performance *worse*.
+ if (!mDXVAEnabled) {
+ mDXVAFailureReason.AssignLiteral("Hardware video decoding disabled or blacklisted");
+ return false;
+ }
+ MOZ_ASSERT(!mDXVA2Manager);
+ LayersBackend backend = GetCompositorBackendType(mKnowsCompositor);
+ if (backend != LayersBackend::LAYERS_D3D9 &&
+ backend != LayersBackend::LAYERS_D3D11) {
+ mDXVAFailureReason.AssignLiteral("Unsupported layers backend");
+ return false;
+ }
+
+ // The DXVA manager must be created on the main thread.
+ RefPtr<CreateDXVAManagerEvent> event =
+ new CreateDXVAManagerEvent(aForceD3D9 ? LayersBackend::LAYERS_D3D9
+ : backend,
+ mKnowsCompositor,
+ mDXVAFailureReason);
+
+ if (NS_IsMainThread()) {
+ event->Run();
+ } else {
+ // This logic needs to run on the main thread
+ nsCOMPtr<nsIThread> mainThread = do_GetMainThread();
+ mozilla::SyncRunnable::DispatchToThread(mainThread, event);
+ }
+ mDXVA2Manager = event->mDXVA2Manager;
+
+ return mDXVA2Manager != nullptr;
+}
+
+bool
+WMFVideoMFTManager::ValidateVideoInfo()
+{
+ // The WMF H.264 decoder is documented to have a minimum resolution
+ // 48x48 pixels. We've observed the decoder working for output smaller than
+ // that, but on some output it hangs in IMFTransform::ProcessOutput(), so
+ // we just reject streams which are less than the documented minimum.
+ // https://msdn.microsoft.com/en-us/library/windows/desktop/dd797815(v=vs.85).aspx
+ static const int32_t MIN_H264_FRAME_DIMENSION = 48;
+ if (mStreamType == H264 &&
+ (mVideoInfo.mImage.width < MIN_H264_FRAME_DIMENSION ||
+ mVideoInfo.mImage.height < MIN_H264_FRAME_DIMENSION)) {
+ LogToBrowserConsole(NS_LITERAL_STRING(
+ "Can't decode H.264 stream with width or height less than 48 pixels."));
+ mIsValid = false;
+ }
+
+ return mIsValid;
+}
+
+bool
+WMFVideoMFTManager::Init()
+{
+ if (!ValidateVideoInfo()) {
+ return false;
+ }
+
+ bool success = InitInternal(/* aForceD3D9 = */ false);
+
+ if (success && mDXVA2Manager) {
+ // If we had some failures but eventually made it work,
+ // make sure we preserve the messages.
+ if (mDXVA2Manager->IsD3D11()) {
+ mDXVAFailureReason.Append(NS_LITERAL_CSTRING("Using D3D11 API"));
+ } else {
+ mDXVAFailureReason.Append(NS_LITERAL_CSTRING("Using D3D9 API"));
+ }
+ }
+
+ return success;
+}
+
+bool
+WMFVideoMFTManager::InitInternal(bool aForceD3D9)
+{
+ mUseHwAccel = false; // default value; changed if D3D setup succeeds.
+ bool useDxva = InitializeDXVA(aForceD3D9);
+
+ RefPtr<MFTDecoder> decoder(new MFTDecoder());
+
+ HRESULT hr = decoder->Create(GetMFTGUID());
+ NS_ENSURE_TRUE(SUCCEEDED(hr), false);
+
+ RefPtr<IMFAttributes> attr(decoder->GetAttributes());
+ UINT32 aware = 0;
+ if (attr) {
+ attr->GetUINT32(MF_SA_D3D_AWARE, &aware);
+ attr->SetUINT32(CODECAPI_AVDecNumWorkerThreads,
+ WMFDecoderModule::GetNumDecoderThreads());
+ if (gfxPrefs::PDMWMFLowLatencyEnabled()) {
+ hr = attr->SetUINT32(CODECAPI_AVLowLatencyMode, TRUE);
+ if (SUCCEEDED(hr)) {
+ LOG("Enabling Low Latency Mode");
+ } else {
+ LOG("Couldn't enable Low Latency Mode");
+ }
+ }
+ }
+
+ if (useDxva) {
+ if (aware) {
+ // TODO: Test if I need this anywhere... Maybe on Vista?
+ //hr = attr->SetUINT32(CODECAPI_AVDecVideoAcceleration_H264, TRUE);
+ //NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+ MOZ_ASSERT(mDXVA2Manager);
+ ULONG_PTR manager = ULONG_PTR(mDXVA2Manager->GetDXVADeviceManager());
+ hr = decoder->SendMFTMessage(MFT_MESSAGE_SET_D3D_MANAGER, manager);
+ if (SUCCEEDED(hr)) {
+ mUseHwAccel = true;
+ } else {
+ DeleteOnMainThread(mDXVA2Manager);
+ mDXVAFailureReason = nsPrintfCString("MFT_MESSAGE_SET_D3D_MANAGER failed with code %X", hr);
+ }
+ }
+ else {
+ mDXVAFailureReason.AssignLiteral("Decoder returned false for MF_SA_D3D_AWARE");
+ }
+ }
+
+ if (!mUseHwAccel) {
+ // Use VP8/9 MFT only if HW acceleration is available
+ if (mStreamType == VP9 || mStreamType == VP8) {
+ return false;
+ }
+ Telemetry::Accumulate(Telemetry::MEDIA_DECODER_BACKEND_USED,
+ uint32_t(media::MediaDecoderBackend::WMFSoftware));
+ }
+
+ mDecoder = decoder;
+ hr = SetDecoderMediaTypes();
+ NS_ENSURE_TRUE(SUCCEEDED(hr), false);
+
+ LOG("Video Decoder initialized, Using DXVA: %s", (mUseHwAccel ? "Yes" : "No"));
+
+ return true;
+}
+
+HRESULT
+WMFVideoMFTManager::SetDecoderMediaTypes()
+{
+ // Setup the input/output media types.
+ RefPtr<IMFMediaType> inputType;
+ HRESULT hr = wmf::MFCreateMediaType(getter_AddRefs(inputType));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = inputType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = inputType->SetGUID(MF_MT_SUBTYPE, GetMediaSubtypeGUID());
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = inputType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_MixedInterlaceOrProgressive);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ // MSFT MFT needs this frame size set for VP9?
+ if (mStreamType == VP9 || mStreamType == VP8) {
+ hr = inputType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = MFSetAttributeSize(inputType, MF_MT_FRAME_SIZE, mVideoInfo.ImageRect().width, mVideoInfo.ImageRect().height);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+ }
+
+ RefPtr<IMFMediaType> outputType;
+ hr = wmf::MFCreateMediaType(getter_AddRefs(outputType));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = outputType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ GUID outputSubType = mUseHwAccel ? MFVideoFormat_NV12 : MFVideoFormat_YV12;
+ hr = outputType->SetGUID(MF_MT_SUBTYPE, outputSubType);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ return mDecoder->SetMediaTypes(inputType, outputType);
+}
+
+HRESULT
+WMFVideoMFTManager::Input(MediaRawData* aSample)
+{
+ if (!mIsValid) {
+ return E_FAIL;
+ }
+
+ if (!mDecoder) {
+ // This can happen during shutdown.
+ return E_FAIL;
+ }
+
+ HRESULT hr = mDecoder->CreateInputSample(aSample->Data(),
+ uint32_t(aSample->Size()),
+ aSample->mTime,
+ &mLastInput);
+ NS_ENSURE_TRUE(SUCCEEDED(hr) && mLastInput != nullptr, hr);
+
+ mLastDuration = aSample->mDuration;
+ mLastTime = aSample->mTime;
+ mSamplesCount++;
+
+ // Forward sample data to the decoder.
+ return mDecoder->Input(mLastInput);
+}
+
+class SupportsConfigEvent : public Runnable {
+public:
+ SupportsConfigEvent(DXVA2Manager* aDXVA2Manager, IMFMediaType* aMediaType, float aFramerate)
+ : mDXVA2Manager(aDXVA2Manager)
+ , mMediaType(aMediaType)
+ , mFramerate(aFramerate)
+ , mSupportsConfig(false)
+ {}
+
+ NS_IMETHOD Run() {
+ MOZ_ASSERT(NS_IsMainThread(), "Must be on main thread.");
+ mSupportsConfig = mDXVA2Manager->SupportsConfig(mMediaType, mFramerate);
+ return NS_OK;
+ }
+ DXVA2Manager* mDXVA2Manager;
+ IMFMediaType* mMediaType;
+ float mFramerate;
+ bool mSupportsConfig;
+};
+
+// The MFTransform we use for decoding h264 video will silently fall
+// back to software decoding (even if we've negotiated DXVA) if the GPU
+// doesn't support decoding the given resolution. It will then upload
+// the software decoded frames into d3d textures to preserve behaviour.
+//
+// Unfortunately this seems to cause corruption (see bug 1193547) and is
+// slow because the upload is done into a non-shareable texture and requires
+// us to copy it.
+//
+// This code tests if the given resolution can be supported directly on the GPU,
+// and makes sure we only ask the MFT for DXVA if it can be supported properly.
+//
+// Ideally we'd know the framerate during initialization and would also ensure
+// that new decoders are created if the resolution changes. Then we could move
+// this check into Init and consolidate the main thread blocking code.
+bool
+WMFVideoMFTManager::CanUseDXVA(IMFMediaType* aType)
+{
+ MOZ_ASSERT(mDXVA2Manager);
+ // SupportsConfig only checks for valid h264 decoders currently.
+ if (mStreamType != H264) {
+ return true;
+ }
+
+ // Assume the current samples duration is representative for the
+ // entire video.
+ float framerate = 1000000.0 / mLastDuration;
+
+ // The supports config check must be done on the main thread since we have
+ // a crash guard protecting it.
+ RefPtr<SupportsConfigEvent> event =
+ new SupportsConfigEvent(mDXVA2Manager, aType, framerate);
+
+ if (NS_IsMainThread()) {
+ event->Run();
+ } else {
+ // This logic needs to run on the main thread
+ nsCOMPtr<nsIThread> mainThread = do_GetMainThread();
+ mozilla::SyncRunnable::DispatchToThread(mainThread, event);
+ }
+
+ return event->mSupportsConfig;
+}
+
+HRESULT
+WMFVideoMFTManager::ConfigureVideoFrameGeometry()
+{
+ RefPtr<IMFMediaType> mediaType;
+ HRESULT hr = mDecoder->GetOutputMediaType(mediaType);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ // If we enabled/disabled DXVA in response to a resolution
+ // change then we need to renegotiate our media types,
+ // and resubmit our previous frame (since the MFT appears
+ // to lose it otherwise).
+ if (mUseHwAccel && !CanUseDXVA(mediaType)) {
+ mDXVAEnabled = false;
+ if (!Init()) {
+ return E_FAIL;
+ }
+
+ mDecoder->Input(mLastInput);
+ return S_OK;
+ }
+
+ // Verify that the video subtype is what we expect it to be.
+ // When using hardware acceleration/DXVA2 the video format should
+ // be NV12, which is DXVA2's preferred format. For software decoding
+ // we use YV12, as that's easier for us to stick into our rendering
+ // pipeline than NV12. NV12 has interleaved UV samples, whereas YV12
+ // is a planar format.
+ GUID videoFormat;
+ hr = mediaType->GetGUID(MF_MT_SUBTYPE, &videoFormat);
+ NS_ENSURE_TRUE(videoFormat == MFVideoFormat_NV12 || !mUseHwAccel, E_FAIL);
+ NS_ENSURE_TRUE(videoFormat == MFVideoFormat_YV12 || mUseHwAccel, E_FAIL);
+
+ nsIntRect pictureRegion;
+ hr = GetPictureRegion(mediaType, pictureRegion);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ UINT32 width = pictureRegion.width;
+ UINT32 height = pictureRegion.height;
+ mImageSize = nsIntSize(width, height);
+ // Calculate and validate the picture region and frame dimensions after
+ // scaling by the pixel aspect ratio.
+ pictureRegion = mVideoInfo.ScaledImageRect(width, height);
+ if (!IsValidVideoRegion(mImageSize, pictureRegion, mVideoInfo.mDisplay)) {
+ // Video track's frame sizes will overflow. Ignore the video track.
+ return E_FAIL;
+ }
+
+ if (mDXVA2Manager) {
+ hr = mDXVA2Manager->ConfigureForSize(width, height);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+ }
+
+ // Success! Save state.
+ GetDefaultStride(mediaType, width, &mVideoStride);
+
+ LOG("WMFVideoMFTManager frame geometry frame=(%u,%u) stride=%u picture=(%d, %d, %d, %d) display=(%d,%d)",
+ width, height,
+ mVideoStride,
+ pictureRegion.x, pictureRegion.y, pictureRegion.width, pictureRegion.height,
+ mVideoInfo.mDisplay.width, mVideoInfo.mDisplay.height);
+
+ return S_OK;
+}
+
+HRESULT
+WMFVideoMFTManager::CreateBasicVideoFrame(IMFSample* aSample,
+ int64_t aStreamOffset,
+ VideoData** aOutVideoData)
+{
+ NS_ENSURE_TRUE(aSample, E_POINTER);
+ NS_ENSURE_TRUE(aOutVideoData, E_POINTER);
+
+ *aOutVideoData = nullptr;
+
+ HRESULT hr;
+ RefPtr<IMFMediaBuffer> buffer;
+
+ // Must convert to contiguous buffer to use IMD2DBuffer interface.
+ hr = aSample->ConvertToContiguousBuffer(getter_AddRefs(buffer));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ // Try and use the IMF2DBuffer interface if available, otherwise fallback
+ // to the IMFMediaBuffer interface. Apparently IMF2DBuffer is more efficient,
+ // but only some systems (Windows 8?) support it.
+ BYTE* data = nullptr;
+ LONG stride = 0;
+ RefPtr<IMF2DBuffer> twoDBuffer;
+ hr = buffer->QueryInterface(static_cast<IMF2DBuffer**>(getter_AddRefs(twoDBuffer)));
+ if (SUCCEEDED(hr)) {
+ hr = twoDBuffer->Lock2D(&data, &stride);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+ } else {
+ hr = buffer->Lock(&data, nullptr, nullptr);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+ stride = mVideoStride;
+ }
+
+ // YV12, planar format: [YYYY....][VVVV....][UUUU....]
+ // i.e., Y, then V, then U.
+ VideoData::YCbCrBuffer b;
+
+ uint32_t videoWidth = mImageSize.width;
+ uint32_t videoHeight = mImageSize.height;
+
+ // Y (Y') plane
+ b.mPlanes[0].mData = data;
+ b.mPlanes[0].mStride = stride;
+ b.mPlanes[0].mHeight = videoHeight;
+ b.mPlanes[0].mWidth = videoWidth;
+ b.mPlanes[0].mOffset = 0;
+ b.mPlanes[0].mSkip = 0;
+
+ // The V and U planes are stored 16-row-aligned, so we need to add padding
+ // to the row heights to ensure the Y'CbCr planes are referenced properly.
+ uint32_t padding = 0;
+ if (videoHeight % 16 != 0) {
+ padding = 16 - (videoHeight % 16);
+ }
+ uint32_t y_size = stride * (videoHeight + padding);
+ uint32_t v_size = stride * (videoHeight + padding) / 4;
+ uint32_t halfStride = (stride + 1) / 2;
+ uint32_t halfHeight = (videoHeight + 1) / 2;
+ uint32_t halfWidth = (videoWidth + 1) / 2;
+
+ // U plane (Cb)
+ b.mPlanes[1].mData = data + y_size + v_size;
+ b.mPlanes[1].mStride = halfStride;
+ b.mPlanes[1].mHeight = halfHeight;
+ b.mPlanes[1].mWidth = halfWidth;
+ b.mPlanes[1].mOffset = 0;
+ b.mPlanes[1].mSkip = 0;
+
+ // V plane (Cr)
+ b.mPlanes[2].mData = data + y_size;
+ b.mPlanes[2].mStride = halfStride;
+ b.mPlanes[2].mHeight = halfHeight;
+ b.mPlanes[2].mWidth = halfWidth;
+ b.mPlanes[2].mOffset = 0;
+ b.mPlanes[2].mSkip = 0;
+
+ media::TimeUnit pts = GetSampleTime(aSample);
+ NS_ENSURE_TRUE(pts.IsValid(), E_FAIL);
+ media::TimeUnit duration = GetSampleDuration(aSample);
+ NS_ENSURE_TRUE(duration.IsValid(), E_FAIL);
+ nsIntRect pictureRegion = mVideoInfo.ScaledImageRect(videoWidth, videoHeight);
+
+ LayersBackend backend = GetCompositorBackendType(mKnowsCompositor);
+ if (backend != LayersBackend::LAYERS_D3D9 &&
+ backend != LayersBackend::LAYERS_D3D11) {
+ RefPtr<VideoData> v =
+ VideoData::CreateAndCopyData(mVideoInfo,
+ mImageContainer,
+ aStreamOffset,
+ pts.ToMicroseconds(),
+ duration.ToMicroseconds(),
+ b,
+ false,
+ -1,
+ pictureRegion);
+ if (twoDBuffer) {
+ twoDBuffer->Unlock2D();
+ } else {
+ buffer->Unlock();
+ }
+ v.forget(aOutVideoData);
+ return S_OK;
+ }
+
+ RefPtr<layers::PlanarYCbCrImage> image =
+ new IMFYCbCrImage(buffer, twoDBuffer);
+
+ VideoData::SetVideoDataToImage(image,
+ mVideoInfo,
+ b,
+ pictureRegion,
+ false);
+
+ RefPtr<VideoData> v =
+ VideoData::CreateFromImage(mVideoInfo,
+ aStreamOffset,
+ pts.ToMicroseconds(),
+ duration.ToMicroseconds(),
+ image.forget(),
+ false,
+ -1,
+ pictureRegion);
+
+ v.forget(aOutVideoData);
+ return S_OK;
+}
+
+HRESULT
+WMFVideoMFTManager::CreateD3DVideoFrame(IMFSample* aSample,
+ int64_t aStreamOffset,
+ VideoData** aOutVideoData)
+{
+ NS_ENSURE_TRUE(aSample, E_POINTER);
+ NS_ENSURE_TRUE(aOutVideoData, E_POINTER);
+ NS_ENSURE_TRUE(mDXVA2Manager, E_ABORT);
+ NS_ENSURE_TRUE(mUseHwAccel, E_ABORT);
+
+ *aOutVideoData = nullptr;
+ HRESULT hr;
+
+ nsIntRect pictureRegion =
+ mVideoInfo.ScaledImageRect(mImageSize.width, mImageSize.height);
+ RefPtr<Image> image;
+ hr = mDXVA2Manager->CopyToImage(aSample,
+ pictureRegion,
+ getter_AddRefs(image));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+ NS_ENSURE_TRUE(image, E_FAIL);
+
+ media::TimeUnit pts = GetSampleTime(aSample);
+ NS_ENSURE_TRUE(pts.IsValid(), E_FAIL);
+ media::TimeUnit duration = GetSampleDuration(aSample);
+ NS_ENSURE_TRUE(duration.IsValid(), E_FAIL);
+ RefPtr<VideoData> v = VideoData::CreateFromImage(mVideoInfo,
+ aStreamOffset,
+ pts.ToMicroseconds(),
+ duration.ToMicroseconds(),
+ image.forget(),
+ false,
+ -1,
+ pictureRegion);
+
+ NS_ENSURE_TRUE(v, E_FAIL);
+ v.forget(aOutVideoData);
+
+ return S_OK;
+}
+
+// Blocks until decoded sample is produced by the deoder.
+HRESULT
+WMFVideoMFTManager::Output(int64_t aStreamOffset,
+ RefPtr<MediaData>& aOutData)
+{
+ RefPtr<IMFSample> sample;
+ HRESULT hr;
+ aOutData = nullptr;
+ int typeChangeCount = 0;
+ bool wasDraining = mDraining;
+ int64_t sampleCount = mSamplesCount;
+ if (wasDraining) {
+ mSamplesCount = 0;
+ mDraining = false;
+ }
+
+ media::TimeUnit pts;
+ media::TimeUnit duration;
+
+ // Loop until we decode a sample, or an unexpected error that we can't
+ // handle occurs.
+ while (true) {
+ hr = mDecoder->Output(&sample);
+ if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) {
+ return MF_E_TRANSFORM_NEED_MORE_INPUT;
+ }
+ if (hr == MF_E_TRANSFORM_STREAM_CHANGE) {
+ // Video stream output type change. Probably a geometric apperature
+ // change. Reconfigure the video geometry, so that we output the
+ // correct size frames.
+ MOZ_ASSERT(!sample);
+ hr = ConfigureVideoFrameGeometry();
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+ // Catch infinite loops, but some decoders perform at least 2 stream
+ // changes on consecutive calls, so be permissive.
+ // 100 is arbitrarily > 2.
+ NS_ENSURE_TRUE(typeChangeCount < 100, MF_E_TRANSFORM_STREAM_CHANGE);
+ // Loop back and try decoding again...
+ ++typeChangeCount;
+ continue;
+ }
+ if (SUCCEEDED(hr)) {
+ if (!sample) {
+ LOG("Video MFTDecoder returned success but no output!");
+ // On some machines/input the MFT returns success but doesn't output
+ // a video frame. If we detect this, try again, but only up to a
+ // point; after 250 failures, give up. Note we count all failures
+ // over the life of the decoder, as we may end up exiting with a
+ // NEED_MORE_INPUT and coming back to hit the same error. So just
+ // counting with a local variable (like typeChangeCount does) may
+ // not work in this situation.
+ ++mNullOutputCount;
+ if (mNullOutputCount > 250) {
+ LOG("Excessive Video MFTDecoder returning success but no output; giving up");
+ mGotExcessiveNullOutput = true;
+ return E_FAIL;
+ }
+ continue;
+ }
+ pts = GetSampleTime(sample);
+ duration = GetSampleDuration(sample);
+ if (!pts.IsValid() || !duration.IsValid()) {
+ return E_FAIL;
+ }
+ if (wasDraining && sampleCount == 1 && pts == media::TimeUnit()) {
+ // WMF is unable to calculate a duration if only a single sample
+ // was parsed. Additionally, the pts always comes out at 0 under those
+ // circumstances.
+ // Seeing that we've only fed the decoder a single frame, the pts
+ // and duration are known, it's of the last sample.
+ pts = media::TimeUnit::FromMicroseconds(mLastTime);
+ duration = media::TimeUnit::FromMicroseconds(mLastDuration);
+ }
+ if (mSeekTargetThreshold.isSome()) {
+ if ((pts + duration) < mSeekTargetThreshold.ref()) {
+ LOG("Dropping video frame which pts is smaller than seek target.");
+ // It is necessary to clear the pointer to release the previous output
+ // buffer.
+ sample = nullptr;
+ continue;
+ }
+ mSeekTargetThreshold.reset();
+ }
+ break;
+ }
+ // Else unexpected error, assert, and bail.
+ NS_WARNING("WMFVideoMFTManager::Output() unexpected error");
+ return hr;
+ }
+
+ RefPtr<VideoData> frame;
+ if (mUseHwAccel) {
+ hr = CreateD3DVideoFrame(sample, aStreamOffset, getter_AddRefs(frame));
+ } else {
+ hr = CreateBasicVideoFrame(sample, aStreamOffset, getter_AddRefs(frame));
+ }
+ // Frame should be non null only when we succeeded.
+ MOZ_ASSERT((frame != nullptr) == SUCCEEDED(hr));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+ NS_ENSURE_TRUE(frame, E_FAIL);
+
+ aOutData = frame;
+ // Set the potentially corrected pts and duration.
+ aOutData->mTime = pts.ToMicroseconds();
+ aOutData->mDuration = duration.ToMicroseconds();
+
+ if (mNullOutputCount) {
+ mGotValidOutputAfterNullOutput = true;
+ }
+
+ return S_OK;
+}
+
+void
+WMFVideoMFTManager::Shutdown()
+{
+ mDecoder = nullptr;
+ DeleteOnMainThread(mDXVA2Manager);
+}
+
+bool
+WMFVideoMFTManager::IsHardwareAccelerated(nsACString& aFailureReason) const
+{
+ aFailureReason = mDXVAFailureReason;
+ return mDecoder && mUseHwAccel;
+}
+
+} // namespace mozilla
diff --git a/dom/media/platforms/wmf/WMFVideoMFTManager.h b/dom/media/platforms/wmf/WMFVideoMFTManager.h
new file mode 100644
index 0000000000..b8dfa63365
--- /dev/null
+++ b/dom/media/platforms/wmf/WMFVideoMFTManager.h
@@ -0,0 +1,125 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#if !defined(WMFVideoMFTManager_h_)
+#define WMFVideoMFTManager_h_
+
+#include "WMF.h"
+#include "MFTDecoder.h"
+#include "nsAutoPtr.h"
+#include "nsRect.h"
+#include "WMFMediaDataDecoder.h"
+#include "mozilla/RefPtr.h"
+
+namespace mozilla {
+
+class DXVA2Manager;
+
+class WMFVideoMFTManager : public MFTManager {
+public:
+ WMFVideoMFTManager(const VideoInfo& aConfig,
+ layers::KnowsCompositor* aKnowsCompositor,
+ layers::ImageContainer* aImageContainer,
+ bool aDXVAEnabled);
+ ~WMFVideoMFTManager();
+
+ bool Init();
+
+ HRESULT Input(MediaRawData* aSample) override;
+
+ HRESULT Output(int64_t aStreamOffset, RefPtr<MediaData>& aOutput) override;
+
+ void Shutdown() override;
+
+ bool IsHardwareAccelerated(nsACString& aFailureReason) const override;
+
+ TrackInfo::TrackType GetType() override {
+ return TrackInfo::kVideoTrack;
+ }
+
+ const char* GetDescriptionName() const override
+ {
+ nsCString failureReason;
+ return IsHardwareAccelerated(failureReason)
+ ? "wmf hardware video decoder" : "wmf software video decoder";
+ }
+
+ void Flush() override
+ {
+ MFTManager::Flush();
+ mDraining = false;
+ mSamplesCount = 0;
+ }
+
+ void Drain() override
+ {
+ MFTManager::Drain();
+ mDraining = true;
+ }
+
+private:
+
+ bool ValidateVideoInfo();
+
+ bool InitializeDXVA(bool aForceD3D9);
+
+ bool InitInternal(bool aForceD3D9);
+
+ HRESULT ConfigureVideoFrameGeometry();
+
+ HRESULT CreateBasicVideoFrame(IMFSample* aSample,
+ int64_t aStreamOffset,
+ VideoData** aOutVideoData);
+
+ HRESULT CreateD3DVideoFrame(IMFSample* aSample,
+ int64_t aStreamOffset,
+ VideoData** aOutVideoData);
+
+ HRESULT SetDecoderMediaTypes();
+
+ bool CanUseDXVA(IMFMediaType* aType);
+
+ // Video frame geometry.
+ VideoInfo mVideoInfo;
+ uint32_t mVideoStride;
+ nsIntSize mImageSize;
+
+ RefPtr<layers::ImageContainer> mImageContainer;
+ RefPtr<layers::KnowsCompositor> mKnowsCompositor;
+ nsAutoPtr<DXVA2Manager> mDXVA2Manager;
+
+ RefPtr<IMFSample> mLastInput;
+ float mLastDuration;
+ int64_t mLastTime = 0;
+ bool mDraining = false;
+ int64_t mSamplesCount = 0;
+
+ bool mDXVAEnabled;
+ bool mUseHwAccel;
+
+ nsCString mDXVAFailureReason;
+
+ enum StreamType {
+ Unknown,
+ H264,
+ VP8,
+ VP9
+ };
+
+ StreamType mStreamType;
+
+ const GUID& GetMFTGUID();
+ const GUID& GetMediaSubtypeGUID();
+
+ uint32_t mNullOutputCount;
+ bool mGotValidOutputAfterNullOutput;
+ bool mGotExcessiveNullOutput;
+ bool mIsValid;
+};
+
+} // namespace mozilla
+
+#endif // WMFVideoMFTManager_h_
diff --git a/dom/media/platforms/wmf/moz.build b/dom/media/platforms/wmf/moz.build
new file mode 100644
index 0000000000..fa966bea23
--- /dev/null
+++ b/dom/media/platforms/wmf/moz.build
@@ -0,0 +1,34 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+EXPORTS += [
+ 'DXVA2Manager.h',
+ 'MFTDecoder.h',
+ 'WMF.h',
+ 'WMFAudioMFTManager.h',
+ 'WMFDecoderModule.h',
+ 'WMFMediaDataDecoder.h',
+ 'WMFUtils.h',
+ 'WMFVideoMFTManager.h',
+]
+UNIFIED_SOURCES += [
+ 'DXVA2Manager.cpp',
+ 'MFTDecoder.cpp',
+ 'WMFAudioMFTManager.cpp',
+ 'WMFDecoderModule.cpp',
+ 'WMFMediaDataDecoder.cpp',
+ 'WMFVideoMFTManager.cpp',
+]
+
+SOURCES += [
+ 'WMFUtils.cpp',
+]
+
+include('/ipc/chromium/chromium-config.mozbuild')
+
+FINAL_LIBRARY = 'xul'
+
+CXXFLAGS += CONFIG['MOZ_CAIRO_CFLAGS']