mirror of
https://gitee.com/openharmony/third_party_ffmpeg
synced 2024-11-26 20:50:34 +00:00
3b0fb00990
Signed-off-by: cyberbox <468042667@qq.com> Change-Id: I63cc2a8c9ff6197c67d6b6b47c124882ad942a22
643 lines
21 KiB
C
643 lines
21 KiB
C
/*
|
|
* This file is part of FFmpeg.
|
|
*
|
|
* FFmpeg is free software; you can redistribute it and/or
|
|
* modify it under the terms of the GNU Lesser General Public
|
|
* License as published by the Free Software Foundation; either
|
|
* version 2.1 of the License, or (at your option) any later version.
|
|
*
|
|
* FFmpeg is distributed in the hope that it will be useful,
|
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
* Lesser General Public License for more details.
|
|
*
|
|
* You should have received a copy of the GNU Lesser General Public
|
|
* License along with FFmpeg; if not, write to the Free Software
|
|
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
|
*/
|
|
|
|
#include "config.h"
|
|
|
|
#include <windows.h>
|
|
|
|
#define COBJMACROS
|
|
|
|
#include <initguid.h>
|
|
#include <d3d11.h>
|
|
#include <dxgi1_2.h>
|
|
|
|
#if HAVE_DXGIDEBUG_H
|
|
#include <dxgidebug.h>
|
|
#endif
|
|
|
|
#include "avassert.h"
|
|
#include "common.h"
|
|
#include "hwcontext.h"
|
|
#include "hwcontext_d3d11va.h"
|
|
#include "hwcontext_internal.h"
|
|
#include "imgutils.h"
|
|
#include "pixdesc.h"
|
|
#include "pixfmt.h"
|
|
#include "thread.h"
|
|
#include "compat/w32dlfcn.h"
|
|
|
|
typedef HRESULT(WINAPI *PFN_CREATE_DXGI_FACTORY)(REFIID riid, void **ppFactory);
|
|
|
|
static AVOnce functions_loaded = AV_ONCE_INIT;
|
|
|
|
static PFN_CREATE_DXGI_FACTORY mCreateDXGIFactory;
|
|
static PFN_D3D11_CREATE_DEVICE mD3D11CreateDevice;
|
|
|
|
static av_cold void load_functions(void)
|
|
{
|
|
#if !HAVE_UWP
|
|
// We let these "leak" - this is fine, as unloading has no great benefit, and
|
|
// Windows will mark a DLL as loaded forever if its internal refcount overflows
|
|
// from too many LoadLibrary calls.
|
|
HANDLE d3dlib, dxgilib;
|
|
|
|
d3dlib = dlopen("d3d11.dll", 0);
|
|
dxgilib = dlopen("dxgi.dll", 0);
|
|
if (!d3dlib || !dxgilib)
|
|
return;
|
|
|
|
mD3D11CreateDevice = (PFN_D3D11_CREATE_DEVICE) GetProcAddress(d3dlib, "D3D11CreateDevice");
|
|
mCreateDXGIFactory = (PFN_CREATE_DXGI_FACTORY) GetProcAddress(dxgilib, "CreateDXGIFactory");
|
|
#else
|
|
// In UWP (which lacks LoadLibrary), CreateDXGIFactory isn't available,
|
|
// only CreateDXGIFactory1
|
|
mD3D11CreateDevice = (PFN_D3D11_CREATE_DEVICE) D3D11CreateDevice;
|
|
mCreateDXGIFactory = (PFN_CREATE_DXGI_FACTORY) CreateDXGIFactory1;
|
|
#endif
|
|
}
|
|
|
|
typedef struct D3D11VAFramesContext {
|
|
int nb_surfaces;
|
|
int nb_surfaces_used;
|
|
|
|
DXGI_FORMAT format;
|
|
|
|
ID3D11Texture2D *staging_texture;
|
|
} D3D11VAFramesContext;
|
|
|
|
static const struct {
|
|
DXGI_FORMAT d3d_format;
|
|
enum AVPixelFormat pix_fmt;
|
|
} supported_formats[] = {
|
|
{ DXGI_FORMAT_NV12, AV_PIX_FMT_NV12 },
|
|
{ DXGI_FORMAT_P010, AV_PIX_FMT_P010 },
|
|
// Special opaque formats. The pix_fmt is merely a place holder, as the
|
|
// opaque format cannot be accessed directly.
|
|
{ DXGI_FORMAT_420_OPAQUE, AV_PIX_FMT_YUV420P },
|
|
};
|
|
|
|
static void d3d11va_default_lock(void *ctx)
|
|
{
|
|
WaitForSingleObjectEx(ctx, INFINITE, FALSE);
|
|
}
|
|
|
|
static void d3d11va_default_unlock(void *ctx)
|
|
{
|
|
ReleaseMutex(ctx);
|
|
}
|
|
|
|
static void d3d11va_frames_uninit(AVHWFramesContext *ctx)
|
|
{
|
|
AVD3D11VAFramesContext *frames_hwctx = ctx->hwctx;
|
|
D3D11VAFramesContext *s = ctx->internal->priv;
|
|
|
|
if (frames_hwctx->texture)
|
|
ID3D11Texture2D_Release(frames_hwctx->texture);
|
|
frames_hwctx->texture = NULL;
|
|
|
|
if (s->staging_texture)
|
|
ID3D11Texture2D_Release(s->staging_texture);
|
|
s->staging_texture = NULL;
|
|
|
|
av_freep(&frames_hwctx->texture_infos);
|
|
}
|
|
|
|
static int d3d11va_frames_get_constraints(AVHWDeviceContext *ctx,
|
|
const void *hwconfig,
|
|
AVHWFramesConstraints *constraints)
|
|
{
|
|
AVD3D11VADeviceContext *device_hwctx = ctx->hwctx;
|
|
int nb_sw_formats = 0;
|
|
HRESULT hr;
|
|
int i;
|
|
|
|
constraints->valid_sw_formats = av_malloc_array(FF_ARRAY_ELEMS(supported_formats) + 1,
|
|
sizeof(*constraints->valid_sw_formats));
|
|
if (!constraints->valid_sw_formats)
|
|
return AVERROR(ENOMEM);
|
|
|
|
for (i = 0; i < FF_ARRAY_ELEMS(supported_formats); i++) {
|
|
UINT format_support = 0;
|
|
hr = ID3D11Device_CheckFormatSupport(device_hwctx->device, supported_formats[i].d3d_format, &format_support);
|
|
if (SUCCEEDED(hr) && (format_support & D3D11_FORMAT_SUPPORT_TEXTURE2D))
|
|
constraints->valid_sw_formats[nb_sw_formats++] = supported_formats[i].pix_fmt;
|
|
}
|
|
constraints->valid_sw_formats[nb_sw_formats] = AV_PIX_FMT_NONE;
|
|
|
|
constraints->valid_hw_formats = av_malloc_array(2, sizeof(*constraints->valid_hw_formats));
|
|
if (!constraints->valid_hw_formats)
|
|
return AVERROR(ENOMEM);
|
|
|
|
constraints->valid_hw_formats[0] = AV_PIX_FMT_D3D11;
|
|
constraints->valid_hw_formats[1] = AV_PIX_FMT_NONE;
|
|
|
|
return 0;
|
|
}
|
|
|
|
static void free_texture(void *opaque, uint8_t *data)
|
|
{
|
|
ID3D11Texture2D_Release((ID3D11Texture2D *)opaque);
|
|
av_free(data);
|
|
}
|
|
|
|
static AVBufferRef *wrap_texture_buf(AVHWFramesContext *ctx, ID3D11Texture2D *tex, int index)
|
|
{
|
|
AVBufferRef *buf;
|
|
AVD3D11FrameDescriptor *desc = av_mallocz(sizeof(*desc));
|
|
D3D11VAFramesContext *s = ctx->internal->priv;
|
|
AVD3D11VAFramesContext *frames_hwctx = ctx->hwctx;
|
|
if (!desc) {
|
|
ID3D11Texture2D_Release(tex);
|
|
return NULL;
|
|
}
|
|
|
|
if (s->nb_surfaces <= s->nb_surfaces_used) {
|
|
frames_hwctx->texture_infos = av_realloc_f(frames_hwctx->texture_infos,
|
|
s->nb_surfaces_used + 1,
|
|
sizeof(*frames_hwctx->texture_infos));
|
|
if (!frames_hwctx->texture_infos) {
|
|
ID3D11Texture2D_Release(tex);
|
|
return NULL;
|
|
}
|
|
s->nb_surfaces = s->nb_surfaces_used + 1;
|
|
}
|
|
|
|
frames_hwctx->texture_infos[s->nb_surfaces_used].texture = tex;
|
|
frames_hwctx->texture_infos[s->nb_surfaces_used].index = index;
|
|
s->nb_surfaces_used++;
|
|
|
|
desc->texture = tex;
|
|
desc->index = index;
|
|
|
|
buf = av_buffer_create((uint8_t *)desc, sizeof(desc), free_texture, tex, 0);
|
|
if (!buf) {
|
|
ID3D11Texture2D_Release(tex);
|
|
av_free(desc);
|
|
return NULL;
|
|
}
|
|
|
|
return buf;
|
|
}
|
|
|
|
static AVBufferRef *d3d11va_alloc_single(AVHWFramesContext *ctx)
|
|
{
|
|
D3D11VAFramesContext *s = ctx->internal->priv;
|
|
AVD3D11VAFramesContext *hwctx = ctx->hwctx;
|
|
AVD3D11VADeviceContext *device_hwctx = ctx->device_ctx->hwctx;
|
|
HRESULT hr;
|
|
ID3D11Texture2D *tex;
|
|
D3D11_TEXTURE2D_DESC texDesc = {
|
|
.Width = ctx->width,
|
|
.Height = ctx->height,
|
|
.MipLevels = 1,
|
|
.Format = s->format,
|
|
.SampleDesc = { .Count = 1 },
|
|
.ArraySize = 1,
|
|
.Usage = D3D11_USAGE_DEFAULT,
|
|
.BindFlags = hwctx->BindFlags,
|
|
.MiscFlags = hwctx->MiscFlags,
|
|
};
|
|
|
|
hr = ID3D11Device_CreateTexture2D(device_hwctx->device, &texDesc, NULL, &tex);
|
|
if (FAILED(hr)) {
|
|
av_log(ctx, AV_LOG_ERROR, "Could not create the texture (%lx)\n", (long)hr);
|
|
return NULL;
|
|
}
|
|
|
|
return wrap_texture_buf(ctx, tex, 0);
|
|
}
|
|
|
|
static AVBufferRef *d3d11va_pool_alloc(void *opaque, size_t size)
|
|
{
|
|
AVHWFramesContext *ctx = (AVHWFramesContext*)opaque;
|
|
D3D11VAFramesContext *s = ctx->internal->priv;
|
|
AVD3D11VAFramesContext *hwctx = ctx->hwctx;
|
|
D3D11_TEXTURE2D_DESC texDesc;
|
|
|
|
if (!hwctx->texture)
|
|
return d3d11va_alloc_single(ctx);
|
|
|
|
ID3D11Texture2D_GetDesc(hwctx->texture, &texDesc);
|
|
|
|
if (s->nb_surfaces_used >= texDesc.ArraySize) {
|
|
av_log(ctx, AV_LOG_ERROR, "Static surface pool size exceeded.\n");
|
|
return NULL;
|
|
}
|
|
|
|
ID3D11Texture2D_AddRef(hwctx->texture);
|
|
return wrap_texture_buf(ctx, hwctx->texture, s->nb_surfaces_used);
|
|
}
|
|
|
|
static int d3d11va_frames_init(AVHWFramesContext *ctx)
|
|
{
|
|
AVD3D11VAFramesContext *hwctx = ctx->hwctx;
|
|
AVD3D11VADeviceContext *device_hwctx = ctx->device_ctx->hwctx;
|
|
D3D11VAFramesContext *s = ctx->internal->priv;
|
|
|
|
int i;
|
|
HRESULT hr;
|
|
D3D11_TEXTURE2D_DESC texDesc;
|
|
|
|
for (i = 0; i < FF_ARRAY_ELEMS(supported_formats); i++) {
|
|
if (ctx->sw_format == supported_formats[i].pix_fmt) {
|
|
s->format = supported_formats[i].d3d_format;
|
|
break;
|
|
}
|
|
}
|
|
if (i == FF_ARRAY_ELEMS(supported_formats)) {
|
|
av_log(ctx, AV_LOG_ERROR, "Unsupported pixel format: %s\n",
|
|
av_get_pix_fmt_name(ctx->sw_format));
|
|
return AVERROR(EINVAL);
|
|
}
|
|
|
|
texDesc = (D3D11_TEXTURE2D_DESC){
|
|
.Width = ctx->width,
|
|
.Height = ctx->height,
|
|
.MipLevels = 1,
|
|
.Format = s->format,
|
|
.SampleDesc = { .Count = 1 },
|
|
.ArraySize = ctx->initial_pool_size,
|
|
.Usage = D3D11_USAGE_DEFAULT,
|
|
.BindFlags = hwctx->BindFlags,
|
|
.MiscFlags = hwctx->MiscFlags,
|
|
};
|
|
|
|
if (hwctx->texture) {
|
|
D3D11_TEXTURE2D_DESC texDesc2;
|
|
ID3D11Texture2D_GetDesc(hwctx->texture, &texDesc2);
|
|
|
|
if (texDesc.Width != texDesc2.Width ||
|
|
texDesc.Height != texDesc2.Height ||
|
|
texDesc.Format != texDesc2.Format) {
|
|
av_log(ctx, AV_LOG_ERROR, "User-provided texture has mismatching parameters\n");
|
|
return AVERROR(EINVAL);
|
|
}
|
|
} else if (!(texDesc.BindFlags & D3D11_BIND_RENDER_TARGET) && texDesc.ArraySize > 0) {
|
|
hr = ID3D11Device_CreateTexture2D(device_hwctx->device, &texDesc, NULL, &hwctx->texture);
|
|
if (FAILED(hr)) {
|
|
av_log(ctx, AV_LOG_ERROR, "Could not create the texture (%lx)\n", (long)hr);
|
|
return AVERROR_UNKNOWN;
|
|
}
|
|
}
|
|
|
|
hwctx->texture_infos = av_realloc_f(NULL, ctx->initial_pool_size, sizeof(*hwctx->texture_infos));
|
|
if (!hwctx->texture_infos)
|
|
return AVERROR(ENOMEM);
|
|
s->nb_surfaces = ctx->initial_pool_size;
|
|
|
|
ctx->internal->pool_internal = av_buffer_pool_init2(sizeof(AVD3D11FrameDescriptor),
|
|
ctx, d3d11va_pool_alloc, NULL);
|
|
if (!ctx->internal->pool_internal)
|
|
return AVERROR(ENOMEM);
|
|
|
|
return 0;
|
|
}
|
|
|
|
static int d3d11va_get_buffer(AVHWFramesContext *ctx, AVFrame *frame)
|
|
{
|
|
AVD3D11FrameDescriptor *desc;
|
|
|
|
frame->buf[0] = av_buffer_pool_get(ctx->pool);
|
|
if (!frame->buf[0])
|
|
return AVERROR(ENOMEM);
|
|
|
|
desc = (AVD3D11FrameDescriptor *)frame->buf[0]->data;
|
|
|
|
frame->data[0] = (uint8_t *)desc->texture;
|
|
frame->data[1] = (uint8_t *)desc->index;
|
|
frame->format = AV_PIX_FMT_D3D11;
|
|
frame->width = ctx->width;
|
|
frame->height = ctx->height;
|
|
|
|
return 0;
|
|
}
|
|
|
|
static int d3d11va_transfer_get_formats(AVHWFramesContext *ctx,
|
|
enum AVHWFrameTransferDirection dir,
|
|
enum AVPixelFormat **formats)
|
|
{
|
|
D3D11VAFramesContext *s = ctx->internal->priv;
|
|
enum AVPixelFormat *fmts;
|
|
|
|
fmts = av_malloc_array(2, sizeof(*fmts));
|
|
if (!fmts)
|
|
return AVERROR(ENOMEM);
|
|
|
|
fmts[0] = ctx->sw_format;
|
|
fmts[1] = AV_PIX_FMT_NONE;
|
|
|
|
// Don't signal support for opaque formats. Actual access would fail.
|
|
if (s->format == DXGI_FORMAT_420_OPAQUE)
|
|
fmts[0] = AV_PIX_FMT_NONE;
|
|
|
|
*formats = fmts;
|
|
|
|
return 0;
|
|
}
|
|
|
|
static int d3d11va_create_staging_texture(AVHWFramesContext *ctx, DXGI_FORMAT format)
|
|
{
|
|
AVD3D11VADeviceContext *device_hwctx = ctx->device_ctx->hwctx;
|
|
D3D11VAFramesContext *s = ctx->internal->priv;
|
|
HRESULT hr;
|
|
D3D11_TEXTURE2D_DESC texDesc = {
|
|
.Width = ctx->width,
|
|
.Height = ctx->height,
|
|
.MipLevels = 1,
|
|
.Format = format,
|
|
.SampleDesc = { .Count = 1 },
|
|
.ArraySize = 1,
|
|
.Usage = D3D11_USAGE_STAGING,
|
|
.CPUAccessFlags = D3D11_CPU_ACCESS_READ | D3D11_CPU_ACCESS_WRITE,
|
|
};
|
|
|
|
hr = ID3D11Device_CreateTexture2D(device_hwctx->device, &texDesc, NULL, &s->staging_texture);
|
|
if (FAILED(hr)) {
|
|
av_log(ctx, AV_LOG_ERROR, "Could not create the staging texture (%lx)\n", (long)hr);
|
|
return AVERROR_UNKNOWN;
|
|
}
|
|
|
|
return 0;
|
|
}
|
|
|
|
static void fill_texture_ptrs(uint8_t *data[4], int linesize[4],
|
|
AVHWFramesContext *ctx,
|
|
D3D11_TEXTURE2D_DESC *desc,
|
|
D3D11_MAPPED_SUBRESOURCE *map)
|
|
{
|
|
int i;
|
|
|
|
for (i = 0; i < 4; i++)
|
|
linesize[i] = map->RowPitch;
|
|
|
|
av_image_fill_pointers(data, ctx->sw_format, desc->Height,
|
|
(uint8_t*)map->pData, linesize);
|
|
}
|
|
|
|
static int d3d11va_transfer_data(AVHWFramesContext *ctx, AVFrame *dst,
|
|
const AVFrame *src)
|
|
{
|
|
AVD3D11VADeviceContext *device_hwctx = ctx->device_ctx->hwctx;
|
|
D3D11VAFramesContext *s = ctx->internal->priv;
|
|
int download = src->format == AV_PIX_FMT_D3D11;
|
|
const AVFrame *frame = download ? src : dst;
|
|
const AVFrame *other = download ? dst : src;
|
|
// (The interface types are compatible.)
|
|
ID3D11Resource *texture = (ID3D11Resource *)(ID3D11Texture2D *)frame->data[0];
|
|
int index = (intptr_t)frame->data[1];
|
|
ID3D11Resource *staging;
|
|
int w = FFMIN(dst->width, src->width);
|
|
int h = FFMIN(dst->height, src->height);
|
|
uint8_t *map_data[4];
|
|
int map_linesize[4];
|
|
D3D11_TEXTURE2D_DESC desc;
|
|
D3D11_MAPPED_SUBRESOURCE map;
|
|
HRESULT hr;
|
|
|
|
if (frame->hw_frames_ctx->data != (uint8_t *)ctx || other->format != ctx->sw_format)
|
|
return AVERROR(EINVAL);
|
|
|
|
device_hwctx->lock(device_hwctx->lock_ctx);
|
|
|
|
if (!s->staging_texture) {
|
|
ID3D11Texture2D_GetDesc((ID3D11Texture2D *)texture, &desc);
|
|
int res = d3d11va_create_staging_texture(ctx, desc.Format);
|
|
if (res < 0)
|
|
return res;
|
|
}
|
|
|
|
staging = (ID3D11Resource *)s->staging_texture;
|
|
|
|
ID3D11Texture2D_GetDesc(s->staging_texture, &desc);
|
|
|
|
if (download) {
|
|
ID3D11DeviceContext_CopySubresourceRegion(device_hwctx->device_context,
|
|
staging, 0, 0, 0, 0,
|
|
texture, index, NULL);
|
|
|
|
hr = ID3D11DeviceContext_Map(device_hwctx->device_context,
|
|
staging, 0, D3D11_MAP_READ, 0, &map);
|
|
if (FAILED(hr))
|
|
goto map_failed;
|
|
|
|
fill_texture_ptrs(map_data, map_linesize, ctx, &desc, &map);
|
|
|
|
av_image_copy(dst->data, dst->linesize, (const uint8_t **)map_data, map_linesize,
|
|
ctx->sw_format, w, h);
|
|
|
|
ID3D11DeviceContext_Unmap(device_hwctx->device_context, staging, 0);
|
|
} else {
|
|
hr = ID3D11DeviceContext_Map(device_hwctx->device_context,
|
|
staging, 0, D3D11_MAP_WRITE, 0, &map);
|
|
if (FAILED(hr))
|
|
goto map_failed;
|
|
|
|
fill_texture_ptrs(map_data, map_linesize, ctx, &desc, &map);
|
|
|
|
av_image_copy(map_data, map_linesize, (const uint8_t **)src->data, src->linesize,
|
|
ctx->sw_format, w, h);
|
|
|
|
ID3D11DeviceContext_Unmap(device_hwctx->device_context, staging, 0);
|
|
|
|
ID3D11DeviceContext_CopySubresourceRegion(device_hwctx->device_context,
|
|
texture, index, 0, 0, 0,
|
|
staging, 0, NULL);
|
|
}
|
|
|
|
device_hwctx->unlock(device_hwctx->lock_ctx);
|
|
return 0;
|
|
|
|
map_failed:
|
|
av_log(ctx, AV_LOG_ERROR, "Unable to lock D3D11VA surface (%lx)\n", (long)hr);
|
|
device_hwctx->unlock(device_hwctx->lock_ctx);
|
|
return AVERROR_UNKNOWN;
|
|
}
|
|
|
|
static int d3d11va_device_init(AVHWDeviceContext *hwdev)
|
|
{
|
|
AVD3D11VADeviceContext *device_hwctx = hwdev->hwctx;
|
|
HRESULT hr;
|
|
|
|
if (!device_hwctx->lock) {
|
|
device_hwctx->lock_ctx = CreateMutex(NULL, 0, NULL);
|
|
if (device_hwctx->lock_ctx == INVALID_HANDLE_VALUE) {
|
|
av_log(NULL, AV_LOG_ERROR, "Failed to create a mutex\n");
|
|
return AVERROR(EINVAL);
|
|
}
|
|
device_hwctx->lock = d3d11va_default_lock;
|
|
device_hwctx->unlock = d3d11va_default_unlock;
|
|
}
|
|
|
|
if (!device_hwctx->device_context) {
|
|
ID3D11Device_GetImmediateContext(device_hwctx->device, &device_hwctx->device_context);
|
|
if (!device_hwctx->device_context)
|
|
return AVERROR_UNKNOWN;
|
|
}
|
|
|
|
if (!device_hwctx->video_device) {
|
|
hr = ID3D11DeviceContext_QueryInterface(device_hwctx->device, &IID_ID3D11VideoDevice,
|
|
(void **)&device_hwctx->video_device);
|
|
if (FAILED(hr))
|
|
return AVERROR_UNKNOWN;
|
|
}
|
|
|
|
if (!device_hwctx->video_context) {
|
|
hr = ID3D11DeviceContext_QueryInterface(device_hwctx->device_context, &IID_ID3D11VideoContext,
|
|
(void **)&device_hwctx->video_context);
|
|
if (FAILED(hr))
|
|
return AVERROR_UNKNOWN;
|
|
}
|
|
|
|
return 0;
|
|
}
|
|
|
|
static void d3d11va_device_uninit(AVHWDeviceContext *hwdev)
|
|
{
|
|
AVD3D11VADeviceContext *device_hwctx = hwdev->hwctx;
|
|
|
|
if (device_hwctx->device) {
|
|
ID3D11Device_Release(device_hwctx->device);
|
|
device_hwctx->device = NULL;
|
|
}
|
|
|
|
if (device_hwctx->device_context) {
|
|
ID3D11DeviceContext_Release(device_hwctx->device_context);
|
|
device_hwctx->device_context = NULL;
|
|
}
|
|
|
|
if (device_hwctx->video_device) {
|
|
ID3D11VideoDevice_Release(device_hwctx->video_device);
|
|
device_hwctx->video_device = NULL;
|
|
}
|
|
|
|
if (device_hwctx->video_context) {
|
|
ID3D11VideoContext_Release(device_hwctx->video_context);
|
|
device_hwctx->video_context = NULL;
|
|
}
|
|
|
|
if (device_hwctx->lock == d3d11va_default_lock) {
|
|
CloseHandle(device_hwctx->lock_ctx);
|
|
device_hwctx->lock_ctx = INVALID_HANDLE_VALUE;
|
|
device_hwctx->lock = NULL;
|
|
}
|
|
}
|
|
|
|
static int d3d11va_device_create(AVHWDeviceContext *ctx, const char *device,
|
|
AVDictionary *opts, int flags)
|
|
{
|
|
AVD3D11VADeviceContext *device_hwctx = ctx->hwctx;
|
|
|
|
HRESULT hr;
|
|
IDXGIAdapter *pAdapter = NULL;
|
|
ID3D10Multithread *pMultithread;
|
|
UINT creationFlags = D3D11_CREATE_DEVICE_VIDEO_SUPPORT;
|
|
int is_debug = !!av_dict_get(opts, "debug", NULL, 0);
|
|
int ret;
|
|
|
|
// (On UWP we can't check this.)
|
|
#if !HAVE_UWP
|
|
if (!LoadLibrary("d3d11_1sdklayers.dll"))
|
|
is_debug = 0;
|
|
#endif
|
|
|
|
if (is_debug)
|
|
creationFlags |= D3D11_CREATE_DEVICE_DEBUG;
|
|
|
|
if ((ret = ff_thread_once(&functions_loaded, load_functions)) != 0)
|
|
return AVERROR_UNKNOWN;
|
|
if (!mD3D11CreateDevice || !mCreateDXGIFactory) {
|
|
av_log(ctx, AV_LOG_ERROR, "Failed to load D3D11 library or its functions\n");
|
|
return AVERROR_UNKNOWN;
|
|
}
|
|
|
|
if (device) {
|
|
IDXGIFactory2 *pDXGIFactory;
|
|
hr = mCreateDXGIFactory(&IID_IDXGIFactory2, (void **)&pDXGIFactory);
|
|
if (SUCCEEDED(hr)) {
|
|
int adapter = atoi(device);
|
|
if (FAILED(IDXGIFactory2_EnumAdapters(pDXGIFactory, adapter, &pAdapter)))
|
|
pAdapter = NULL;
|
|
IDXGIFactory2_Release(pDXGIFactory);
|
|
}
|
|
}
|
|
|
|
if (pAdapter) {
|
|
DXGI_ADAPTER_DESC desc;
|
|
hr = IDXGIAdapter2_GetDesc(pAdapter, &desc);
|
|
if (!FAILED(hr)) {
|
|
av_log(ctx, AV_LOG_INFO, "Using device %04x:%04x (%ls).\n",
|
|
desc.VendorId, desc.DeviceId, desc.Description);
|
|
}
|
|
}
|
|
|
|
hr = mD3D11CreateDevice(pAdapter, pAdapter ? D3D_DRIVER_TYPE_UNKNOWN : D3D_DRIVER_TYPE_HARDWARE, NULL, creationFlags, NULL, 0,
|
|
D3D11_SDK_VERSION, &device_hwctx->device, NULL, NULL);
|
|
if (pAdapter)
|
|
IDXGIAdapter_Release(pAdapter);
|
|
if (FAILED(hr)) {
|
|
av_log(ctx, AV_LOG_ERROR, "Failed to create Direct3D device (%lx)\n", (long)hr);
|
|
return AVERROR_UNKNOWN;
|
|
}
|
|
|
|
hr = ID3D11Device_QueryInterface(device_hwctx->device, &IID_ID3D10Multithread, (void **)&pMultithread);
|
|
if (SUCCEEDED(hr)) {
|
|
ID3D10Multithread_SetMultithreadProtected(pMultithread, TRUE);
|
|
ID3D10Multithread_Release(pMultithread);
|
|
}
|
|
|
|
#if !HAVE_UWP && HAVE_DXGIDEBUG_H
|
|
if (is_debug) {
|
|
HANDLE dxgidebug_dll = LoadLibrary("dxgidebug.dll");
|
|
if (dxgidebug_dll) {
|
|
HRESULT (WINAPI * pf_DXGIGetDebugInterface)(const GUID *riid, void **ppDebug)
|
|
= (void *)GetProcAddress(dxgidebug_dll, "DXGIGetDebugInterface");
|
|
if (pf_DXGIGetDebugInterface) {
|
|
IDXGIDebug *dxgi_debug = NULL;
|
|
hr = pf_DXGIGetDebugInterface(&IID_IDXGIDebug, (void**)&dxgi_debug);
|
|
if (SUCCEEDED(hr) && dxgi_debug)
|
|
IDXGIDebug_ReportLiveObjects(dxgi_debug, DXGI_DEBUG_ALL, DXGI_DEBUG_RLO_ALL);
|
|
}
|
|
}
|
|
}
|
|
#endif
|
|
|
|
return 0;
|
|
}
|
|
|
|
const HWContextType ff_hwcontext_type_d3d11va = {
|
|
.type = AV_HWDEVICE_TYPE_D3D11VA,
|
|
.name = "D3D11VA",
|
|
|
|
.device_hwctx_size = sizeof(AVD3D11VADeviceContext),
|
|
.frames_hwctx_size = sizeof(AVD3D11VAFramesContext),
|
|
.frames_priv_size = sizeof(D3D11VAFramesContext),
|
|
|
|
.device_create = d3d11va_device_create,
|
|
.device_init = d3d11va_device_init,
|
|
.device_uninit = d3d11va_device_uninit,
|
|
.frames_get_constraints = d3d11va_frames_get_constraints,
|
|
.frames_init = d3d11va_frames_init,
|
|
.frames_uninit = d3d11va_frames_uninit,
|
|
.frames_get_buffer = d3d11va_get_buffer,
|
|
.transfer_get_formats = d3d11va_transfer_get_formats,
|
|
.transfer_data_to = d3d11va_transfer_data,
|
|
.transfer_data_from = d3d11va_transfer_data,
|
|
|
|
.pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_D3D11, AV_PIX_FMT_NONE },
|
|
};
|