From 18849ef0aec6a6738dfb05f3000bb5f052ee6c5d Mon Sep 17 00:00:00 2001 From: Narcis Beleuzu Date: Sun, 7 Oct 2018 02:03:46 +0300 Subject: [PATCH] Backed out 6 changesets (bug 1495025) for webgl2 failures on test_2_conformance2__textures__misc__npot-video-sizing.html Backed out changeset 24d67618f6b9 (bug 1495025) Backed out changeset 68efa7588ba8 (bug 1495025) Backed out changeset 9f59a50dcc6d (bug 1495025) Backed out changeset 7fd1f6103294 (bug 1495025) Backed out changeset f1afe7e2a9e3 (bug 1495025) Backed out changeset c62823871aca (bug 1495025) --- dom/media/MediaData.cpp | 14 +- dom/media/platforms/wmf/DXVA2Manager.cpp | 154 ++++++++---------- dom/media/platforms/wmf/DXVA2Manager.h | 5 +- dom/media/platforms/wmf/MFTDecoder.cpp | 60 +++---- dom/media/platforms/wmf/MFTDecoder.h | 19 +-- .../platforms/wmf/WMFAudioMFTManager.cpp | 5 +- .../platforms/wmf/WMFVideoMFTManager.cpp | 99 ++++------- gfx/2d/Tools.h | 9 - gfx/2d/Types.h | 1 - gfx/layers/D3D11ShareHandleImage.cpp | 30 ++-- gfx/layers/D3D11ShareHandleImage.h | 7 +- gfx/layers/client/TextureClient.cpp | 7 + gfx/webrender_bindings/WebRenderTypes.h | 2 - 13 files changed, 156 insertions(+), 256 deletions(-) diff --git a/dom/media/MediaData.cpp b/dom/media/MediaData.cpp index 95d6f3860207..ef393760fff7 100644 --- a/dom/media/MediaData.cpp +++ b/dom/media/MediaData.cpp @@ -323,18 +323,14 @@ VideoData::CreateAndCopyData(const VideoInfo& aInfo, #if XP_WIN // We disable this code path on Windows version earlier of Windows 8 due to // intermittent crashes with old drivers. See bug 1405110. - // D3D11YCbCrImage can only handle YCbCr images using 3 non-interleaved planes - // non-zero mSkip value indicates that one of the plane would be interleaved. - if (IsWin8OrLater() && !XRE_IsParentProcess() && aAllocator && - aAllocator->SupportsD3D11() && aBuffer.mPlanes[0].mSkip == 0 && - aBuffer.mPlanes[1].mSkip == 0 && aBuffer.mPlanes[2].mSkip == 0) { + if (IsWin8OrLater() && !XRE_IsParentProcess() && + aAllocator && aAllocator->SupportsD3D11()) { RefPtr d3d11Image = new layers::D3D11YCbCrImage(); PlanarYCbCrData data = ConstructPlanarYCbCrData(aInfo, aBuffer, aPicture); if (d3d11Image->SetData(layers::ImageBridgeChild::GetSingleton() - ? layers::ImageBridgeChild::GetSingleton().get() - : aAllocator, - aContainer, - data)) { + ? layers::ImageBridgeChild::GetSingleton().get() + : aAllocator, + aContainer, data)) { v->mImage = d3d11Image; return v.forget(); } diff --git a/dom/media/platforms/wmf/DXVA2Manager.cpp b/dom/media/platforms/wmf/DXVA2Manager.cpp index d9376ce4d7a1..9750f59a1d48 100644 --- a/dom/media/platforms/wmf/DXVA2Manager.cpp +++ b/dom/media/platforms/wmf/DXVA2Manager.cpp @@ -620,11 +620,10 @@ public: const gfx::IntRect& aRegion, Image** aOutImage) override; - HRESULT CopyToBGRATexture(ID3D11Texture2D* aInTexture, - const GUID& aSubType, - ID3D11Texture2D** aOutTexture) override; + virtual HRESULT CopyToBGRATexture(ID3D11Texture2D *aInTexture, + ID3D11Texture2D** aOutTexture); - HRESULT ConfigureForSize(IMFMediaType* aInputType) override; + HRESULT ConfigureForSize(uint32_t aWidth, uint32_t aHeight) override; bool IsD3D11() override { return true; } @@ -653,8 +652,7 @@ private: uint32_t mWidth = 0; uint32_t mHeight = 0; UINT mDeviceManagerToken = 0; - RefPtr mInputType; - GUID mInputSubType; + bool mConfiguredForSize = false; }; bool @@ -921,37 +919,18 @@ D3D11DXVA2Manager::CopyToImage(IMFSample* aVideoSample, NS_ENSURE_TRUE(aOutImage, E_POINTER); MOZ_ASSERT(mTextureClientAllocator); - RefPtr image = new D3D11ShareHandleImage( - gfx::IntSize(mWidth, mHeight), aRegion, mInputSubType); - - // Retrieve the DXGI_FORMAT for the current video sample. - RefPtr buffer; - HRESULT hr = aVideoSample->GetBufferByIndex(0, getter_AddRefs(buffer)); - NS_ENSURE_TRUE(SUCCEEDED(hr), hr); - - RefPtr dxgiBuf; - hr = buffer->QueryInterface((IMFDXGIBuffer**)getter_AddRefs(dxgiBuf)); - NS_ENSURE_TRUE(SUCCEEDED(hr), hr); - - RefPtr tex; - hr = dxgiBuf->GetResource(__uuidof(ID3D11Texture2D), getter_AddRefs(tex)); - NS_ENSURE_TRUE(SUCCEEDED(hr), hr); - - D3D11_TEXTURE2D_DESC inDesc; - tex->GetDesc(&inDesc); - + RefPtr image = + new D3D11ShareHandleImage(gfx::IntSize(mWidth, mHeight), aRegion); bool ok = image->AllocateTexture(mTextureClientAllocator, mDevice); NS_ENSURE_TRUE(ok, E_FAIL); - RefPtr client = - image->GetTextureClient(ImageBridgeChild::GetSingleton().get()); + RefPtr client = image->GetTextureClient(ImageBridgeChild::GetSingleton().get()); NS_ENSURE_TRUE(client, E_FAIL); - RefPtr texture = image->GetTexture(); - D3D11_TEXTURE2D_DESC outDesc; - texture->GetDesc(&outDesc); - RefPtr mutex; + HRESULT hr = S_OK; + RefPtr texture = image->GetTexture(); + texture->QueryInterface((IDXGIKeyedMutex**)getter_AddRefs(mutex)); { @@ -964,15 +943,28 @@ D3D11DXVA2Manager::CopyToImage(IMFSample* aVideoSample, NS_ENSURE_TRUE(mSyncObject, E_FAIL); } - if (outDesc.Format == inDesc.Format) { + if (client && client->GetFormat() == SurfaceFormat::NV12) { // Our video frame is stored in a non-sharable ID3D11Texture2D. We need // to create a copy of that frame as a sharable resource, save its share // handle, and put that handle into the rendering pipeline. + RefPtr buffer; + hr = aVideoSample->GetBufferByIndex(0, getter_AddRefs(buffer)); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + RefPtr dxgiBuf; + hr = buffer->QueryInterface((IMFDXGIBuffer**)getter_AddRefs(dxgiBuf)); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + RefPtr tex; + hr = dxgiBuf->GetResource(__uuidof(ID3D11Texture2D), getter_AddRefs(tex)); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + UINT index; dxgiBuf->GetSubresourceIndex(&index); mContext->CopySubresourceRegion(texture, 0, 0, 0, 0, tex, index, nullptr); } else { + // Our video sample is in NV12 format but our output texture is in BGRA. // Use MFT to do color conversion. hr = E_FAIL; mozilla::mscom::EnsureMTA( @@ -1005,8 +997,7 @@ D3D11DXVA2Manager::CopyToImage(IMFSample* aVideoSample, } HRESULT -D3D11DXVA2Manager::CopyToBGRATexture(ID3D11Texture2D* aInTexture, - const GUID& aSubType, +D3D11DXVA2Manager::CopyToBGRATexture(ID3D11Texture2D *aInTexture, ID3D11Texture2D** aOutTexture) { NS_ENSURE_TRUE(aInTexture, E_POINTER); @@ -1019,32 +1010,8 @@ D3D11DXVA2Manager::CopyToBGRATexture(ID3D11Texture2D* aInTexture, CD3D11_TEXTURE2D_DESC desc; aInTexture->GetDesc(&desc); - - if (!mInputType || desc.Width != mWidth || desc.Height != mHeight) { - RefPtr inputType; - hr = wmf::MFCreateMediaType(getter_AddRefs(inputType)); - NS_ENSURE_TRUE(SUCCEEDED(hr), hr); - - hr = inputType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); - NS_ENSURE_TRUE(SUCCEEDED(hr), hr); - - hr = inputType->SetGUID(MF_MT_SUBTYPE, aSubType); - NS_ENSURE_TRUE(SUCCEEDED(hr), hr); - - hr = - MFSetAttributeSize(inputType, MF_MT_FRAME_SIZE, desc.Width, desc.Height); - NS_ENSURE_TRUE(SUCCEEDED(hr), hr); - - hr = - inputType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive); - NS_ENSURE_TRUE(SUCCEEDED(hr), hr); - - hr = inputType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE); - NS_ENSURE_TRUE(SUCCEEDED(hr), hr); - - hr = ConfigureForSize(inputType); - NS_ENSURE_TRUE(SUCCEEDED(hr), hr); - } + hr = ConfigureForSize(desc.Width, desc.Height); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); RefPtr mutex; inTexture->QueryInterface((IDXGIKeyedMutex**)getter_AddRefs(mutex)); @@ -1106,23 +1073,47 @@ D3D11DXVA2Manager::CopyToBGRATexture(ID3D11Texture2D* aInTexture, return S_OK; } -HRESULT -D3D11DXVA2Manager::ConfigureForSize(IMFMediaType* aInputType) +HRESULT ConfigureOutput(IMFMediaType* aOutput, void* aData) { + HRESULT hr = + aOutput->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); - if (aInputType == mInputType) { - // If the media type hasn't changed, don't reconfigure. + hr = aOutput->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + gfx::IntSize* size = reinterpret_cast(aData); + hr = MFSetAttributeSize(aOutput, MF_MT_FRAME_SIZE, size->width, size->height); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + return S_OK; +} + +HRESULT +D3D11DXVA2Manager::ConfigureForSize(uint32_t aWidth, uint32_t aHeight) +{ + if (mConfiguredForSize && aWidth == mWidth && aHeight == mHeight) { + // If the size hasn't changed, don't reconfigure. return S_OK; } - UINT32 width = 0, height = 0; - HRESULT hr = MFGetAttributeSize(aInputType, MF_MT_FRAME_SIZE, &width, &height); - NS_ENSURE_TRUE(SUCCEEDED(hr), hr); - mWidth = width; - mHeight = height; + mWidth = aWidth; + mHeight = aHeight; - GUID subType = { 0 }; - hr = aInputType->GetGUID(MF_MT_SUBTYPE, &subType); + RefPtr inputType; + HRESULT hr = wmf::MFCreateMediaType(getter_AddRefs(inputType)); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + hr = inputType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + hr = inputType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_NV12); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + hr = inputType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + hr = inputType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE); NS_ENSURE_TRUE(SUCCEEDED(hr), hr); RefPtr attr; @@ -1136,6 +1127,9 @@ D3D11DXVA2Manager::ConfigureForSize(IMFMediaType* aInputType) hr = attr->SetUINT32(MF_LOW_LATENCY, FALSE); NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + hr = MFSetAttributeSize(inputType, MF_MT_FRAME_SIZE, aWidth, aHeight); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + RefPtr outputType; hr = wmf::MFCreateMediaType(getter_AddRefs(outputType)); NS_ENSURE_TRUE(SUCCEEDED(hr), hr); @@ -1146,25 +1140,15 @@ D3D11DXVA2Manager::ConfigureForSize(IMFMediaType* aInputType) hr = outputType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_ARGB32); NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + gfx::IntSize size(mWidth, mHeight); hr = E_FAIL; mozilla::mscom::EnsureMTA([&]() -> void { - hr = mTransform->SetMediaTypes( - aInputType, outputType, [this](IMFMediaType* aOutput) { - HRESULT hr = aOutput->SetUINT32(MF_MT_INTERLACE_MODE, - MFVideoInterlace_Progressive); - NS_ENSURE_TRUE(SUCCEEDED(hr), hr); - hr = aOutput->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE); - NS_ENSURE_TRUE(SUCCEEDED(hr), hr); - hr = MFSetAttributeSize(aOutput, MF_MT_FRAME_SIZE, mWidth, mHeight); - NS_ENSURE_TRUE(SUCCEEDED(hr), hr); - - return S_OK; - }); + hr = + mTransform->SetMediaTypes(inputType, outputType, ConfigureOutput, &size); }); NS_ENSURE_TRUE(SUCCEEDED(hr), hr); - mInputType = aInputType; - mInputSubType = subType; + mConfiguredForSize = true; return S_OK; } diff --git a/dom/media/platforms/wmf/DXVA2Manager.h b/dom/media/platforms/wmf/DXVA2Manager.h index ba207a15ae3f..41eea5066671 100644 --- a/dom/media/platforms/wmf/DXVA2Manager.h +++ b/dom/media/platforms/wmf/DXVA2Manager.h @@ -46,8 +46,7 @@ public: const gfx::IntRect& aRegion, layers::Image** aOutImage) = 0; - virtual HRESULT CopyToBGRATexture(ID3D11Texture2D* aInTexture, - const GUID& aSubType, + virtual HRESULT CopyToBGRATexture(ID3D11Texture2D *aInTexture, ID3D11Texture2D** aOutTexture) { // Not implemented! @@ -55,7 +54,7 @@ public: return E_FAIL; } - virtual HRESULT ConfigureForSize(IMFMediaType* aInputType) + virtual HRESULT ConfigureForSize(uint32_t aWidth, uint32_t aHeight) { return S_OK; } diff --git a/dom/media/platforms/wmf/MFTDecoder.cpp b/dom/media/platforms/wmf/MFTDecoder.cpp index 934c3cc1560a..b383a6edd275 100644 --- a/dom/media/platforms/wmf/MFTDecoder.cpp +++ b/dom/media/platforms/wmf/MFTDecoder.cpp @@ -85,21 +85,17 @@ MFTDecoder::Create(HMODULE aDecoderDLL, const GUID& aMFTClsID) HRESULT MFTDecoder::SetMediaTypes(IMFMediaType* aInputType, IMFMediaType* aOutputType, - std::function&& aCallback) + ConfigureOutputCallback aCallback, + void* aData) { MOZ_ASSERT(mscom::IsCurrentThreadMTA()); + mOutputType = aOutputType; // Set the input type to the one the caller gave us... HRESULT hr = mDecoder->SetInputType(0, aInputType, 0); NS_ENSURE_TRUE(SUCCEEDED(hr), hr); - GUID currentSubtype = {0}; - hr = aOutputType->GetGUID(MF_MT_SUBTYPE, ¤tSubtype); - NS_ENSURE_TRUE(SUCCEEDED(hr), hr); - - hr = SetDecoderOutputType(currentSubtype, - aOutputType, - std::move(aCallback)); + hr = SetDecoderOutputType(true /* match all attributes */, aCallback, aData); NS_ENSURE_TRUE(SUCCEEDED(hr), hr); hr = mDecoder->GetInputStreamInfo(0, &mInputStreamInfo); @@ -125,33 +121,16 @@ MFTDecoder::GetAttributes() } HRESULT -MFTDecoder::FindDecoderOutputType() -{ - MOZ_ASSERT(mscom::IsCurrentThreadMTA()); - MOZ_ASSERT(mOutputType, "SetDecoderTypes must have been called once"); - - return FindDecoderOutputTypeWithSubtype(mOutputSubType); -} - -HRESULT -MFTDecoder::FindDecoderOutputTypeWithSubtype(const GUID& aSubType) -{ - return SetDecoderOutputType( - aSubType, nullptr, [](IMFMediaType*) { return S_OK; }); -} - -HRESULT -MFTDecoder::SetDecoderOutputType( - const GUID& aSubType, - IMFMediaType* aTypeToUse, - std::function&& aCallback) +MFTDecoder::SetDecoderOutputType(bool aMatchAllAttributes, + ConfigureOutputCallback aCallback, + void* aData) { MOZ_ASSERT(mscom::IsCurrentThreadMTA()); NS_ENSURE_TRUE(mDecoder != nullptr, E_POINTER); - if (!aTypeToUse) { - aTypeToUse = mOutputType; - } + GUID currentSubtype = {0}; + HRESULT hr = mOutputType->GetGUID(MF_MT_SUBTYPE, ¤tSubtype); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); // Iterate the enumerate the output types, until we find one compatible // with what we need. @@ -160,13 +139,21 @@ MFTDecoder::SetDecoderOutputType( while (SUCCEEDED(mDecoder->GetOutputAvailableType( 0, typeIndex++, getter_AddRefs(outputType)))) { GUID outSubtype = {0}; - HRESULT hr = outputType->GetGUID(MF_MT_SUBTYPE, &outSubtype); + hr = outputType->GetGUID(MF_MT_SUBTYPE, &outSubtype); NS_ENSURE_TRUE(SUCCEEDED(hr), hr); - if (aSubType == outSubtype) { - hr = aCallback(outputType); - NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + BOOL resultMatch = currentSubtype == outSubtype; + if (resultMatch && aMatchAllAttributes) { + hr = mOutputType->Compare(outputType, MF_ATTRIBUTES_MATCH_OUR_ITEMS, + &resultMatch); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + } + if (resultMatch == TRUE) { + if (aCallback) { + hr = aCallback(outputType, aData); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + } hr = mDecoder->SetOutputType(0, outputType, 0); NS_ENSURE_TRUE(SUCCEEDED(hr), hr); @@ -176,9 +163,6 @@ MFTDecoder::SetDecoderOutputType( mMFTProvidesOutputSamples = IsFlagSet(mOutputStreamInfo.dwFlags, MFT_OUTPUT_STREAM_PROVIDES_SAMPLES); - mOutputType = outputType; - mOutputSubType = outSubtype; - return S_OK; } outputType = nullptr; diff --git a/dom/media/platforms/wmf/MFTDecoder.h b/dom/media/platforms/wmf/MFTDecoder.h index cf1e69f693cc..fb79cede6e7e 100644 --- a/dom/media/platforms/wmf/MFTDecoder.h +++ b/dom/media/platforms/wmf/MFTDecoder.h @@ -38,10 +38,12 @@ public: // - aOutputType needs at least major and minor types set. // This is used to select the matching output type out // of all the available output types of the MFT. + typedef HRESULT (*ConfigureOutputCallback)(IMFMediaType* aOutputType, + void* aData); HRESULT SetMediaTypes(IMFMediaType* aInputType, IMFMediaType* aOutputType, - std::function&& aCallback = - [](IMFMediaType* aOutput) { return S_OK; }); + ConfigureOutputCallback aCallback = nullptr, + void* aData = nullptr); // Returns the MFT's IMFAttributes object. already_AddRefed GetAttributes(); @@ -49,7 +51,6 @@ public: // Retrieves the media type being output. This may not be valid until // the first sample is decoded. HRESULT GetOutputMediaType(RefPtr& aMediaType); - const GUID& GetOutputMediaSubType() const { return mOutputSubType; } // Submits data into the MFT for processing. // @@ -87,15 +88,10 @@ public: // Sends a message to the MFT. HRESULT SendMFTMessage(MFT_MESSAGE_TYPE aMsg, ULONG_PTR aData); - HRESULT FindDecoderOutputTypeWithSubtype(const GUID& aSubType); - HRESULT FindDecoderOutputType(); + HRESULT SetDecoderOutputType(bool aMatchAllAttributes, + ConfigureOutputCallback aCallback, + void* aData); private: - // Will search a suitable MediaType using aTypeToUse if set, if not will - // use the current mOutputType. - HRESULT SetDecoderOutputType( - const GUID& aSubType, - IMFMediaType* aTypeToUse, - std::function&& aCallback); HRESULT CreateOutputSample(RefPtr* aOutSample); MFT_INPUT_STREAM_INFO mInputStreamInfo; @@ -104,7 +100,6 @@ private: RefPtr mDecoder; RefPtr mOutputType; - GUID mOutputSubType; // True if the IMFTransform allocates the samples that it returns. bool mMFTProvidesOutputSamples = false; diff --git a/dom/media/platforms/wmf/WMFAudioMFTManager.cpp b/dom/media/platforms/wmf/WMFAudioMFTManager.cpp index 95ed388b9f7e..f47c5a3a5961 100644 --- a/dom/media/platforms/wmf/WMFAudioMFTManager.cpp +++ b/dom/media/platforms/wmf/WMFAudioMFTManager.cpp @@ -237,8 +237,9 @@ WMFAudioMFTManager::Output(int64_t aStreamOffset, return hr; } if (hr == MF_E_TRANSFORM_STREAM_CHANGE) { - hr = mDecoder->FindDecoderOutputType(); - NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + hr = mDecoder->SetDecoderOutputType(true /* check all attribute */, + nullptr, + nullptr); hr = UpdateOutputType(); NS_ENSURE_TRUE(SUCCEEDED(hr), hr); // Catch infinite loops, but some decoders perform at least 2 stream diff --git a/dom/media/platforms/wmf/WMFVideoMFTManager.cpp b/dom/media/platforms/wmf/WMFVideoMFTManager.cpp index 5302061c47ac..2fabf75eaee6 100644 --- a/dom/media/platforms/wmf/WMFVideoMFTManager.cpp +++ b/dom/media/platforms/wmf/WMFVideoMFTManager.cpp @@ -686,7 +686,8 @@ WMFVideoMFTManager::InitInternal() (mUseHwAccel ? "Yes" : "No")); if (mUseHwAccel) { - hr = mDXVA2Manager->ConfigureForSize(outputType); + hr = mDXVA2Manager->ConfigureForSize(mVideoInfo.ImageRect().width, + mVideoInfo.ImageRect().height); NS_ENSURE_TRUE(SUCCEEDED(hr), MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR, RESULT_DETAIL("Fail to configure image size for " @@ -899,18 +900,8 @@ WMFVideoMFTManager::CreateBasicVideoFrame(IMFSample* aSample, stride = mVideoStride; } - const GUID& subType = mDecoder->GetOutputMediaSubType(); - MOZ_DIAGNOSTIC_ASSERT(subType == MFVideoFormat_YV12 || - subType == MFVideoFormat_P010 || - subType == MFVideoFormat_P016); - const gfx::ColorDepth colorDepth = subType == MFVideoFormat_YV12 - ? gfx::ColorDepth::COLOR_8 - : gfx::ColorDepth::COLOR_16; - - // YV12, planar format (3 planes): [YYYY....][VVVV....][UUUU....] + // YV12, planar format: [YYYY....][VVVV....][UUUU....] // i.e., Y, then V, then U. - // P010, P016 planar format (2 planes) [YYYY....][UVUV...] - // See https://docs.microsoft.com/en-us/windows/desktop/medfound/10-bit-and-16-bit-yuv-video-formats VideoData::YCbCrBuffer b; uint32_t videoWidth = mImageSize.width; @@ -932,43 +923,24 @@ WMFVideoMFTManager::CreateBasicVideoFrame(IMFSample* aSample, uint32_t halfHeight = (videoHeight + 1) / 2; uint32_t halfWidth = (videoWidth + 1) / 2; - if (subType == MFVideoFormat_YV12) { - // U plane (Cb) - b.mPlanes[1].mData = data + y_size + v_size; - b.mPlanes[1].mStride = halfStride; - b.mPlanes[1].mHeight = halfHeight; - b.mPlanes[1].mWidth = halfWidth; - b.mPlanes[1].mOffset = 0; - b.mPlanes[1].mSkip = 0; + // U plane (Cb) + b.mPlanes[1].mData = data + y_size + v_size; + b.mPlanes[1].mStride = halfStride; + b.mPlanes[1].mHeight = halfHeight; + b.mPlanes[1].mWidth = halfWidth; + b.mPlanes[1].mOffset = 0; + b.mPlanes[1].mSkip = 0; - // V plane (Cr) - b.mPlanes[2].mData = data + y_size; - b.mPlanes[2].mStride = halfStride; - b.mPlanes[2].mHeight = halfHeight; - b.mPlanes[2].mWidth = halfWidth; - b.mPlanes[2].mOffset = 0; - b.mPlanes[2].mSkip = 0; - } else { - // U plane (Cb) - b.mPlanes[1].mData = data + y_size; - b.mPlanes[1].mStride = stride; - b.mPlanes[1].mHeight = halfHeight; - b.mPlanes[1].mWidth = halfWidth; - b.mPlanes[1].mOffset = 0; - b.mPlanes[1].mSkip = 1; - - // V plane (Cr) - b.mPlanes[2].mData = data + y_size + sizeof(short); - b.mPlanes[2].mStride = stride; - b.mPlanes[2].mHeight = halfHeight; - b.mPlanes[2].mWidth = halfWidth; - b.mPlanes[2].mOffset = 0; - b.mPlanes[2].mSkip = 1; - } + // V plane (Cr) + b.mPlanes[2].mData = data + y_size; + b.mPlanes[2].mStride = halfStride; + b.mPlanes[2].mHeight = halfHeight; + b.mPlanes[2].mWidth = halfWidth; + b.mPlanes[2].mOffset = 0; + b.mPlanes[2].mSkip = 0; // YuvColorSpace b.mYUVColorSpace = mYUVColorSpace; - b.mColorDepth = colorDepth; TimeUnit pts = GetSampleTime(aSample); NS_ENSURE_TRUE(pts.IsValid(), E_FAIL); @@ -977,8 +949,7 @@ WMFVideoMFTManager::CreateBasicVideoFrame(IMFSample* aSample, gfx::IntRect pictureRegion = mVideoInfo.ScaledImageRect(videoWidth, videoHeight); - if (colorDepth != gfx::ColorDepth::COLOR_8 || !mKnowsCompositor || - !mKnowsCompositor->SupportsD3D11() || !mIMFUsable) { + if (!mKnowsCompositor || !mKnowsCompositor->SupportsD3D11() || !mIMFUsable) { RefPtr v = VideoData::CreateAndCopyData(mVideoInfo, mImageContainer, @@ -1089,35 +1060,21 @@ WMFVideoMFTManager::Output(int64_t aStreamOffset, if (hr == MF_E_TRANSFORM_STREAM_CHANGE) { MOZ_ASSERT(!sample); - // Video stream output type change, probably geometric aperture change or - // pixel type. + // Video stream output type change, probably geometric aperture change. // We must reconfigure the decoder output type. - - // Attempt to find an appropriate OutputType, trying in order: - // if HW accelerated: NV12, P010, P016 - // if SW: YV12, P010, P016 - if (FAILED((hr = (mDecoder->FindDecoderOutputTypeWithSubtype( - mUseHwAccel ? MFVideoFormat_NV12 : MFVideoFormat_YV12)))) && - FAILED((hr = mDecoder->FindDecoderOutputTypeWithSubtype( - MFVideoFormat_P010))) && - FAILED((hr = mDecoder->FindDecoderOutputTypeWithSubtype( - MFVideoFormat_P016)))) { - LOG("No suitable output format found"); - return hr; - } - - RefPtr outputType; - hr = mDecoder->GetOutputMediaType(outputType); + hr = mDecoder->SetDecoderOutputType(false /* check all attribute */, + nullptr, + nullptr); NS_ENSURE_TRUE(SUCCEEDED(hr), hr); - if (mUseHwAccel) { - hr = mDXVA2Manager->ConfigureForSize(outputType); - NS_ENSURE_TRUE(SUCCEEDED(hr), hr); - } else { + if (!mUseHwAccel) { // The stride may have changed, recheck for it. + RefPtr outputType; + hr = mDecoder->GetOutputMediaType(outputType); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); mYUVColorSpace = GetYUVColorSpace(outputType); - hr = GetDefaultStride( - outputType, mVideoInfo.ImageRect().width, &mVideoStride); + hr = GetDefaultStride(outputType, mVideoInfo.ImageRect().width, + &mVideoStride); NS_ENSURE_TRUE(SUCCEEDED(hr), hr); UINT32 width = 0, height = 0; diff --git a/gfx/2d/Tools.h b/gfx/2d/Tools.h index cdd2ff08ae45..5fbb07646a2d 100644 --- a/gfx/2d/Tools.h +++ b/gfx/2d/Tools.h @@ -118,7 +118,6 @@ SurfaceFormatForColorDepth(ColorDepth aColorDepth) break; case ColorDepth::COLOR_10: case ColorDepth::COLOR_12: - case ColorDepth::COLOR_16: format = SurfaceFormat::A16; break; case ColorDepth::UNKNOWN: @@ -140,9 +139,6 @@ BitDepthForColorDepth(ColorDepth aColorDepth) case ColorDepth::COLOR_12: depth = 12; break; - case ColorDepth::COLOR_16: - depth = 16; - break; case ColorDepth::UNKNOWN: MOZ_ASSERT_UNREACHABLE("invalid color depth value"); } @@ -162,9 +158,6 @@ ColorDepthForBitDepth(uint8_t aBitDepth) case 12: depth = ColorDepth::COLOR_12; break; - case 16: - depth = ColorDepth::COLOR_16; - break; default: MOZ_ASSERT_UNREACHABLE("invalid color depth value"); } @@ -186,8 +179,6 @@ RescalingFactorForColorDepth(ColorDepth aColorDepth) case ColorDepth::COLOR_12: factor = 16; break; - case ColorDepth::COLOR_16: - break; case ColorDepth::UNKNOWN: MOZ_ASSERT_UNREACHABLE("invalid color depth value"); } diff --git a/gfx/2d/Types.h b/gfx/2d/Types.h index 6c791def8bde..1ace3d81820c 100644 --- a/gfx/2d/Types.h +++ b/gfx/2d/Types.h @@ -106,7 +106,6 @@ enum class ColorDepth : uint8_t { COLOR_8, COLOR_10, COLOR_12, - COLOR_16, UNKNOWN }; diff --git a/gfx/layers/D3D11ShareHandleImage.cpp b/gfx/layers/D3D11ShareHandleImage.cpp index 7db10fc47089..29195fc98c4c 100644 --- a/gfx/layers/D3D11ShareHandleImage.cpp +++ b/gfx/layers/D3D11ShareHandleImage.cpp @@ -24,29 +24,22 @@ namespace layers { using namespace gfx; D3D11ShareHandleImage::D3D11ShareHandleImage(const gfx::IntSize& aSize, - const gfx::IntRect& aRect, - const GUID& aSourceFormat) - : Image(nullptr, ImageFormat::D3D11_SHARE_HANDLE_TEXTURE) - , mSize(aSize) - , mPictureRect(aRect) - , mSourceFormat(aSourceFormat) - + const gfx::IntRect& aRect) + : Image(nullptr, ImageFormat::D3D11_SHARE_HANDLE_TEXTURE), + mSize(aSize), + mPictureRect(aRect) { } bool -D3D11ShareHandleImage::AllocateTexture(D3D11RecycleAllocator* aAllocator, - ID3D11Device* aDevice) +D3D11ShareHandleImage::AllocateTexture(D3D11RecycleAllocator* aAllocator, ID3D11Device* aDevice) { if (aAllocator) { - if (mSourceFormat == MFVideoFormat_NV12 && - gfxPrefs::PDMWMFUseNV12Format() && + if (gfxPrefs::PDMWMFUseNV12Format() && gfx::DeviceManagerDx::Get()->CanUseNV12()) { - mTextureClient = - aAllocator->CreateOrRecycleClient(gfx::SurfaceFormat::NV12, mSize); + mTextureClient = aAllocator->CreateOrRecycleClient(gfx::SurfaceFormat::NV12, mSize); } else { - mTextureClient = - aAllocator->CreateOrRecycleClient(gfx::SurfaceFormat::B8G8R8A8, mSize); + mTextureClient = aAllocator->CreateOrRecycleClient(gfx::SurfaceFormat::B8G8R8A8, mSize); } if (mTextureClient) { mTexture = static_cast(mTextureClient->GetInternalData())->GetD3D11Texture(); @@ -94,7 +87,7 @@ D3D11ShareHandleImage::GetAsSourceSurface() HRESULT hr; - if (desc.Format != DXGI_FORMAT_B8G8R8A8_UNORM) { + if (desc.Format == DXGI_FORMAT_NV12) { nsAutoCString error; std::unique_ptr manager(DXVA2Manager::CreateD3D11DXVA(nullptr, error, device)); @@ -105,11 +98,10 @@ D3D11ShareHandleImage::GetAsSourceSurface() RefPtr outTexture; - hr = manager->CopyToBGRATexture( - texture, mSourceFormat, getter_AddRefs(outTexture)); + hr = manager->CopyToBGRATexture(texture, getter_AddRefs(outTexture)); if (FAILED(hr)) { - gfxWarning() << "Failed to copy to BGRA texture."; + gfxWarning() << "Failed to copy NV12 to BGRA texture."; return nullptr; } diff --git a/gfx/layers/D3D11ShareHandleImage.h b/gfx/layers/D3D11ShareHandleImage.h index 13120e84b57a..8d399c62054e 100644 --- a/gfx/layers/D3D11ShareHandleImage.h +++ b/gfx/layers/D3D11ShareHandleImage.h @@ -52,12 +52,10 @@ protected: class D3D11ShareHandleImage final : public Image { public: D3D11ShareHandleImage(const gfx::IntSize& aSize, - const gfx::IntRect& aRect, - const GUID& aSourceFormat); + const gfx::IntRect& aRect); virtual ~D3D11ShareHandleImage() {} - bool AllocateTexture(D3D11RecycleAllocator* aAllocator, - ID3D11Device* aDevice); + bool AllocateTexture(D3D11RecycleAllocator* aAllocator, ID3D11Device* aDevice); gfx::IntSize GetSize() const override; already_AddRefed GetAsSourceSurface() override; @@ -69,7 +67,6 @@ public: private: gfx::IntSize mSize; gfx::IntRect mPictureRect; - const GUID mSourceFormat; RefPtr mTextureClient; RefPtr mTexture; }; diff --git a/gfx/layers/client/TextureClient.cpp b/gfx/layers/client/TextureClient.cpp index 7b29a829c1ad..5b16b64b672c 100644 --- a/gfx/layers/client/TextureClient.cpp +++ b/gfx/layers/client/TextureClient.cpp @@ -1895,6 +1895,13 @@ MappedYCbCrChannelData::CopyInto(MappedYCbCrChannelData& aDst) if (bytesPerPixel == 1) { copyData(aDst.data, aDst, data, *this); } else if (bytesPerPixel == 2) { + if (skip != 0) { + // The skip value definition doesn't specify if it's in bytes, or in + // "pixels". We will assume the later. There are currently no decoders + // returning HDR content with a skip value different than zero anyway. + NS_WARNING("skip value non zero for HDR content, please verify code " + "(see bug 1421187)"); + } copyData(reinterpret_cast(aDst.data), aDst, reinterpret_cast(data), diff --git a/gfx/webrender_bindings/WebRenderTypes.h b/gfx/webrender_bindings/WebRenderTypes.h index 7cf82eb539c8..32eb6b1b8b93 100644 --- a/gfx/webrender_bindings/WebRenderTypes.h +++ b/gfx/webrender_bindings/WebRenderTypes.h @@ -864,8 +864,6 @@ static inline wr::WrColorDepth ToWrColorDepth(gfx::ColorDepth aColorDepth) { return wr::WrColorDepth::Color10; case gfx::ColorDepth::COLOR_12: return wr::WrColorDepth::Color12; - case gfx::ColorDepth::COLOR_16: - return wr::WrColorDepth::Color16; default: MOZ_ASSERT_UNREACHABLE("Tried to convert invalid color depth value."); }