video: remove d3d11 video processor use from OpenGL interop

We now have a video filter that uses the d3d11 video processor, so it
makes no sense to have one in the VO interop code. The VO uses it for
formats not directly supported by ANGLE (so the video data is converted
to a RGB texture, which ANGLE can take in).

Change this so that the video filter is automatically inserted if
needed. Move the code that maps RGB surfaces to its own inteorp backend.
Add a bunch of new image formats, which are used to enforce the new
constraints, and to automatically insert the filter only when needed.

The added vf mechanism to auto-insert the d3d11vpp filter is very dumb
and primitive, and will work only for this specific purpose. The format
negotiation mechanism in the filter chain is generally not very pretty,
and mostly broken as well. (libavfilter has a different mechanism, and
these mechanisms don't match well, so vf_lavfi uses some sort of hack.
It only works because hwaccel and non-hwaccel formats are strictly
separated.)

The RGB interop is now only used with older ANGLE versions. The only
reason I'm keeping it is because it's relatively isolated (uses only
existing mechanisms and adds no new concepts), and because I want to be
able to compare the behavior of the old code with the new one for
testing. It will be removed eventually.

If ANGLE has NV12 interop, P010 is now handled by converting to NV12
with the video processor, instead of converting it to RGB and using the
old mechanism to import that as a texture.
This commit is contained in:
wm4 2016-05-29 17:13:22 +02:00
parent 49f73eaf7b
commit 0348cd080f
11 changed files with 482 additions and 445 deletions

View File

@ -2295,7 +2295,8 @@ static int probe_deint_filters(struct MPContext *mpctx)
if (check_output_format(mpctx, IMGFMT_VAAPI) &&
probe_deint_filter(mpctx, "vavpp"))
return 0;
if (check_output_format(mpctx, IMGFMT_D3D11VA) &&
if ((check_output_format(mpctx, IMGFMT_D3D11VA) ||
check_output_format(mpctx, IMGFMT_D3D11NV12)) &&
probe_deint_filter(mpctx, "d3d11vpp"))
return 0;
if (probe_deint_filter(mpctx, "yadif"))

View File

@ -195,6 +195,9 @@ static struct mp_image *d3d11va_update_image_attribs(struct lavc_ctx *s,
}
}
if (img->params.hw_subfmt == IMGFMT_NV12)
mp_image_setfmt(img, IMGFMT_D3D11NV12);
return img;
}

View File

@ -518,7 +518,23 @@ static void query_formats(uint8_t *fmts, struct vf_instance *vf)
static bool is_conv_filter(struct vf_instance *vf)
{
return vf && strcmp(vf->info->name, "scale") == 0;
return vf && (strcmp(vf->info->name, "scale") == 0 || vf->autoinserted);
}
static const char *find_conv_filter(uint8_t *fmts_out)
{
for (int n = 0; filter_list[n]; n++) {
if (filter_list[n]->test_conversion) {
for (int a = IMGFMT_START; a < IMGFMT_END; a++) {
for (int b = IMGFMT_START; b < IMGFMT_END; b++) {
if (fmts_out[b - IMGFMT_START] &&
filter_list[n]->test_conversion(a, b))
return filter_list[n]->name;
}
}
}
}
return "scale";
}
static void update_formats(struct vf_chain *c, struct vf_instance *vf,
@ -539,7 +555,8 @@ static void update_formats(struct vf_chain *c, struct vf_instance *vf,
// filters after vf work, but vf can't output any format the filters
// after it accept), try to insert a conversion filter.
MP_INFO(c, "Using conversion filter.\n");
struct vf_instance *conv = vf_open(c, "scale", NULL);
const char *filter = find_conv_filter(vf->last_outfmts);
struct vf_instance *conv = vf_open(c, filter, NULL);
if (conv) {
conv->autoinserted = true;
conv->next = vf->next;

View File

@ -37,6 +37,7 @@ typedef struct vf_info {
const void *priv_defaults;
const struct m_option *options;
void (*print_help)(struct mp_log *log);
bool (*test_conversion)(int in, int out);
} vf_info_t;
typedef struct vf_instance {

View File

@ -42,7 +42,13 @@ struct vf_priv_s {
ID3D11VideoProcessorEnumerator *vp_enum;
D3D11_VIDEO_FRAME_FORMAT d3d_frame_format;
struct mp_image_params params;
DXGI_FORMAT out_format;
bool out_shared;
bool out_rgb;
bool require_filtering;
struct mp_image_params params, out_params;
int c_w, c_h;
struct mp_image_pool *pool;
@ -53,12 +59,6 @@ struct vf_priv_s {
int interlaced_only;
};
struct d3d11va_surface {
ID3D11Texture2D *texture;
int subindex;
ID3D11VideoDecoderOutputView *surface;
};
static void release_tex(void *arg)
{
ID3D11Texture2D *texture = arg;
@ -66,24 +66,25 @@ static void release_tex(void *arg)
ID3D11Texture2D_Release(texture);
}
static struct mp_image *alloc_surface(ID3D11Device *dev, DXGI_FORMAT format,
int hw_subfmt, int w, int h, bool shared)
static struct mp_image *alloc_pool(void *pctx, int fmt, int w, int h)
{
struct vf_instance *vf = pctx;
struct vf_priv_s *p = vf->priv;
HRESULT hr;
ID3D11Texture2D *texture = NULL;
D3D11_TEXTURE2D_DESC texdesc = {
.Width = w,
.Height = h,
.Format = format,
.Format = p->out_format,
.MipLevels = 1,
.ArraySize = 1,
.SampleDesc = { .Count = 1 },
.Usage = D3D11_USAGE_DEFAULT,
.BindFlags = D3D11_BIND_RENDER_TARGET | D3D11_BIND_SHADER_RESOURCE,
.MiscFlags = shared ? D3D11_RESOURCE_MISC_SHARED : 0,
.MiscFlags = p->out_shared ? D3D11_RESOURCE_MISC_SHARED : 0,
};
hr = ID3D11Device_CreateTexture2D(dev, &texdesc, NULL, &texture);
hr = ID3D11Device_CreateTexture2D(p->vo_dev, &texdesc, NULL, &texture);
if (FAILED(hr))
return NULL;
@ -91,9 +92,9 @@ static struct mp_image *alloc_surface(ID3D11Device *dev, DXGI_FORMAT format,
if (!mpi)
abort();
mp_image_setfmt(mpi, IMGFMT_D3D11VA);
mp_image_setfmt(mpi, p->out_params.imgfmt);
mp_image_set_size(mpi, w, h);
mpi->params.hw_subfmt = hw_subfmt;
mpi->params.hw_subfmt = p->out_params.hw_subfmt;
mpi->planes[1] = (void *)texture;
mpi->planes[2] = (void *)(intptr_t)0;
@ -101,14 +102,6 @@ static struct mp_image *alloc_surface(ID3D11Device *dev, DXGI_FORMAT format,
return mpi;
}
static struct mp_image *alloc_pool_nv12(void *pctx, int fmt, int w, int h)
{
ID3D11Device *dev = pctx;
assert(fmt == IMGFMT_D3D11VA);
return alloc_surface(dev, DXGI_FORMAT_NV12, IMGFMT_NV12, w, h, false);
}
static void flush_frames(struct vf_instance *vf)
{
struct vf_priv_s *p = vf->priv;
@ -149,7 +142,6 @@ static int recreate_video_proc(struct vf_instance *vf)
destroy_video_proc(vf);
// Note: we skip any deinterlacing considerations for now.
D3D11_VIDEO_PROCESSOR_CONTENT_DESC vpdesc = {
.InputFrameFormat = p->d3d_frame_format,
.InputWidth = p->c_w,
@ -221,9 +213,21 @@ static int recreate_video_proc(struct vf_instance *vf)
ID3D11VideoContext_VideoProcessorSetStreamColorSpace(p->video_ctx,
p->video_proc,
0, &csp);
ID3D11VideoContext_VideoProcessorSetOutputColorSpace(p->video_ctx,
p->video_proc,
&csp);
if (p->out_rgb) {
if (p->params.colorspace != MP_CSP_BT_601 &&
p->params.colorspace != MP_CSP_BT_709)
{
MP_WARN(vf, "Unsupported video colorspace (%s/%s). Consider "
"disabling hardware decoding, or using "
"--hwdec=d3d11va-copy to get correct output.\n",
m_opt_choice_str(mp_csp_names, p->params.colorspace),
m_opt_choice_str(mp_csp_levels_names, p->params.colorlevels));
}
} else {
ID3D11VideoContext_VideoProcessorSetOutputColorSpace(p->video_ctx,
p->video_proc,
&csp);
}
return 0;
fail:
@ -239,7 +243,7 @@ static int render(struct vf_instance *vf)
ID3D11VideoProcessorInputView *in_view = NULL;
ID3D11VideoProcessorOutputView *out_view = NULL;
struct mp_image *in = NULL, *out = NULL;
out = mp_image_pool_get(p->pool, IMGFMT_D3D11VA, p->params.w, p->params.h);
out = mp_image_pool_get(p->pool, p->out_params.imgfmt, p->params.w, p->params.h);
if (!out)
goto cleanup;
@ -323,6 +327,10 @@ static int render(struct vf_instance *vf)
goto cleanup;
}
// Make sure the texture is updated correctly on the shared context.
// (I'm not sure if this is correct, though it won't harm.)
ID3D11DeviceContext_Flush(p->device_ctx);
res = 0;
cleanup:
if (in_view)
@ -338,7 +346,6 @@ cleanup:
return res;
}
static int filter_out(struct vf_instance *vf)
{
struct vf_priv_s *p = vf->priv;
@ -347,7 +354,7 @@ static int filter_out(struct vf_instance *vf)
return 0;
// no filtering
if (!mp_refqueue_should_deint(p->queue)) {
if (!mp_refqueue_should_deint(p->queue) && !p->require_filtering) {
struct mp_image *in = mp_refqueue_get(p->queue, 0);
vf_add_output_frame(vf, mp_image_new_ref(in));
mp_refqueue_next(p->queue);
@ -368,16 +375,34 @@ static int reconfig(struct vf_instance *vf, struct mp_image_params *in,
destroy_video_proc(vf);
*out = *in;
if (vf_next_query_format(vf, IMGFMT_D3D11VA) ||
vf_next_query_format(vf, IMGFMT_D3D11NV12))
{
out->imgfmt = vf_next_query_format(vf, IMGFMT_D3D11VA)
? IMGFMT_D3D11VA : IMGFMT_D3D11NV12;
out->hw_subfmt = IMGFMT_NV12;
p->out_format = DXGI_FORMAT_NV12;
p->out_shared = false;
p->out_rgb = false;
} else {
out->imgfmt = IMGFMT_D3D11RGB;
out->hw_subfmt = IMGFMT_RGB0;
p->out_format = DXGI_FORMAT_B8G8R8A8_UNORM;
p->out_shared = true;
p->out_rgb = true;
}
p->require_filtering = in->hw_subfmt != out->hw_subfmt;
p->params = *in;
p->out_params = *out;
p->pool = mp_image_pool_new(20);
mp_image_pool_set_allocator(p->pool, alloc_pool_nv12, p->vo_dev);
mp_image_pool_set_allocator(p->pool, alloc_pool, vf);
mp_image_pool_set_lru(p->pool);
*out = *in;
out->imgfmt = IMGFMT_D3D11VA;
out->hw_subfmt = IMGFMT_NV12;
return 0;
}
@ -406,11 +431,27 @@ static void uninit(struct vf_instance *vf)
static int query_format(struct vf_instance *vf, unsigned int imgfmt)
{
if (imgfmt == IMGFMT_D3D11VA)
return vf_next_query_format(vf, IMGFMT_D3D11VA);
if (imgfmt == IMGFMT_D3D11VA ||
imgfmt == IMGFMT_D3D11NV12 ||
imgfmt == IMGFMT_D3D11RGB)
{
return vf_next_query_format(vf, IMGFMT_D3D11VA) ||
vf_next_query_format(vf, IMGFMT_D3D11NV12) ||
vf_next_query_format(vf, IMGFMT_D3D11RGB);
}
return 0;
}
static bool test_conversion(int in, int out)
{
return (in == IMGFMT_D3D11VA ||
in == IMGFMT_D3D11NV12 ||
in == IMGFMT_D3D11RGB) &&
(out == IMGFMT_D3D11VA ||
out == IMGFMT_D3D11NV12 ||
out == IMGFMT_D3D11RGB);
}
static int control(struct vf_instance *vf, int request, void* data)
{
struct vf_priv_s *p = vf->priv;
@ -480,6 +521,7 @@ static const m_option_t vf_opts_fields[] = {
const vf_info_t vf_info_d3d11vpp = {
.description = "D3D11 Video Post-Process Filter",
.name = "d3d11vpp",
.test_conversion = test_conversion,
.open = vf_open,
.priv_size = sizeof(struct vf_priv_s),
.priv_defaults = &(const struct vf_priv_s) {

View File

@ -36,6 +36,8 @@ struct mp_imgfmt_entry {
static const struct mp_imgfmt_entry mp_imgfmt_list[] = {
// not in ffmpeg
{"vdpau_output", IMGFMT_VDPAU_OUTPUT},
{"d3d11_nv12", IMGFMT_D3D11NV12},
{"d3d11_rgb", IMGFMT_D3D11RGB},
// FFmpeg names have an annoying "_vld" suffix
{"videotoolbox", IMGFMT_VIDEOTOOLBOX},
{"vaapi", IMGFMT_VAAPI},
@ -120,12 +122,20 @@ static struct mp_imgfmt_desc mp_only_imgfmt_desc(int mpfmt)
{
switch (mpfmt) {
case IMGFMT_VDPAU_OUTPUT:
case IMGFMT_D3D11RGB:
return (struct mp_imgfmt_desc) {
.id = mpfmt,
.avformat = AV_PIX_FMT_NONE,
.flags = MP_IMGFLAG_BE | MP_IMGFLAG_LE | MP_IMGFLAG_RGB |
MP_IMGFLAG_HWACCEL,
};
case IMGFMT_D3D11NV12:
return (struct mp_imgfmt_desc) {
.id = mpfmt,
.avformat = AV_PIX_FMT_NONE,
.flags = MP_IMGFLAG_BE | MP_IMGFLAG_LE | MP_IMGFLAG_YUV |
MP_IMGFLAG_HWACCEL,
};
}
return (struct mp_imgfmt_desc) {0};
}

View File

@ -198,7 +198,17 @@ enum mp_imgfmt {
IMGFMT_VDPAU, // VdpVideoSurface
IMGFMT_VDPAU_OUTPUT, // VdpOutputSurface
IMGFMT_VAAPI,
IMGFMT_D3D11VA, // ID3D11VideoDecoderOutputView (NV12/P010/P016)
// NV12/P010/P016
// plane 1: ID3D11Texture2D
// plane 2: slice index casted to pointer
// plane 3: ID3D11VideoDecoderOutputView (can be absent in filters/VO)
IMGFMT_D3D11VA,
// Like IMGFMT_D3D11VA, but format is restricted to NV12.
IMGFMT_D3D11NV12,
// Like IMGFMT_D3D11VA, but format is restricted to a certain RGB format.
// Also, it must have a share handle, have been flushed, and not be a
// texture array slice.
IMGFMT_D3D11RGB,
IMGFMT_DXVA2, // IDirect3DSurface9 (NV12/P010/P016)
IMGFMT_MMAL, // MMAL_BUFFER_HEADER_T
IMGFMT_VIDEOTOOLBOX, // CVPixelBufferRef

View File

@ -30,6 +30,7 @@ extern const struct gl_hwdec_driver gl_hwdec_videotoolbox;
extern const struct gl_hwdec_driver gl_hwdec_vdpau;
extern const struct gl_hwdec_driver gl_hwdec_dxva2egl;
extern const struct gl_hwdec_driver gl_hwdec_d3d11egl;
extern const struct gl_hwdec_driver gl_hwdec_d3d11eglrgb;
extern const struct gl_hwdec_driver gl_hwdec_dxva2gldx;
extern const struct gl_hwdec_driver gl_hwdec_dxva2;
@ -49,6 +50,7 @@ static const struct gl_hwdec_driver *const mpgl_hwdec_drivers[] = {
#if HAVE_D3D_HWACCEL
#if HAVE_EGL_ANGLE
&gl_hwdec_d3d11egl,
&gl_hwdec_d3d11eglrgb,
&gl_hwdec_dxva2egl,
#endif
#if HAVE_GL_DXINTEROP

View File

@ -39,24 +39,9 @@ struct priv {
struct mp_hwdec_ctx hwctx;
ID3D11Device *d3d11_device;
ID3D11DeviceContext *device_ctx;
ID3D11VideoDevice *video_dev;
ID3D11VideoContext *video_ctx;
EGLDisplay egl_display;
EGLConfig egl_config;
EGLSurface egl_surface;
ID3D11Texture2D *texture;
ID3D11VideoProcessor *video_proc;
ID3D11VideoProcessorEnumerator *vp_enum;
ID3D11VideoProcessorOutputView *out_view;
struct mp_image_params image_params;
int c_w, c_h;
EGLStreamKHR egl_stream;
GLuint gl_textures[3];
// EGL_KHR_stream
@ -83,23 +68,6 @@ struct priv {
const EGLAttrib *attrib_list);
};
static void destroy_video_proc(struct gl_hwdec *hw)
{
struct priv *p = hw->priv;
if (p->out_view)
ID3D11VideoProcessorOutputView_Release(p->out_view);
p->out_view = NULL;
if (p->video_proc)
ID3D11VideoProcessor_Release(p->video_proc);
p->video_proc = NULL;
if (p->vp_enum)
ID3D11VideoProcessorEnumerator_Release(p->vp_enum);
p->vp_enum = NULL;
}
static void destroy_objects(struct gl_hwdec *hw)
{
struct priv *p = hw->priv;
@ -113,18 +81,6 @@ static void destroy_objects(struct gl_hwdec *hw)
gl->DeleteTextures(1, &p->gl_textures[n]);
p->gl_textures[n] = 0;
}
if (p->egl_display && p->egl_surface) {
eglReleaseTexImage(p->egl_display, p->egl_surface, EGL_BACK_BUFFER);
eglDestroySurface(p->egl_display, p->egl_surface);
}
p->egl_surface = NULL;
if (p->texture)
ID3D11Texture2D_Release(p->texture);
p->texture = NULL;
destroy_video_proc(hw);
}
static void destroy(struct gl_hwdec *hw)
@ -135,18 +91,6 @@ static void destroy(struct gl_hwdec *hw)
hwdec_devices_remove(hw->devs, &p->hwctx);
if (p->video_ctx)
ID3D11VideoContext_Release(p->video_ctx);
p->video_ctx = NULL;
if (p->video_dev)
ID3D11VideoDevice_Release(p->video_dev);
p->video_dev = NULL;
if (p->device_ctx)
ID3D11DeviceContext_Release(p->device_ctx);
p->device_ctx = NULL;
if (p->d3d11_device)
ID3D11Device_Release(p->d3d11_device);
p->d3d11_device = NULL;
@ -165,99 +109,70 @@ static int create(struct gl_hwdec *hw)
return -1;
const char *exts = eglQueryString(egl_display, EGL_EXTENSIONS);
if (!exts || !strstr(exts, "EGL_ANGLE_d3d_share_handle_client_buffer"))
if (!exts || !strstr(exts, "EGL_ANGLE_d3d_share_handle_client_buffer") ||
!strstr(exts, "EGL_ANGLE_stream_producer_d3d_texture_nv12") ||
!(strstr(hw->gl->extensions, "GL_OES_EGL_image_external_essl3") ||
hw->gl->es == 200) ||
!strstr(exts, "EGL_EXT_device_query"))
return -1;
bool use_native_device = !!strstr(exts, "EGL_EXT_device_query");
HRESULT hr;
struct priv *p = talloc_zero(hw, struct priv);
hw->priv = p;
p->egl_display = egl_display;
// Optional EGLStream stuff for working without video processor.
if (strstr(exts, "EGL_ANGLE_stream_producer_d3d_texture_nv12") &&
use_native_device &&
(strstr(hw->gl->extensions, "GL_OES_EGL_image_external_essl3") ||
hw->gl->es == 200))
p->CreateStreamKHR = (void *)eglGetProcAddress("eglCreateStreamKHR");
p->DestroyStreamKHR = (void *)eglGetProcAddress("eglDestroyStreamKHR");
p->StreamConsumerAcquireKHR =
(void *)eglGetProcAddress("eglStreamConsumerAcquireKHR");
p->StreamConsumerReleaseKHR =
(void *)eglGetProcAddress("eglStreamConsumerReleaseKHR");
p->StreamConsumerGLTextureExternalAttribsNV =
(void *)eglGetProcAddress("eglStreamConsumerGLTextureExternalAttribsNV");
p->CreateStreamProducerD3DTextureNV12ANGLE =
(void *)eglGetProcAddress("eglCreateStreamProducerD3DTextureNV12ANGLE");
p->StreamPostD3DTextureNV12ANGLE =
(void *)eglGetProcAddress("eglStreamPostD3DTextureNV12ANGLE");
if (!p->CreateStreamKHR || !p->DestroyStreamKHR ||
!p->StreamConsumerAcquireKHR || !p->StreamConsumerReleaseKHR ||
!p->StreamConsumerGLTextureExternalAttribsNV ||
!p->CreateStreamProducerD3DTextureNV12ANGLE ||
!p->StreamPostD3DTextureNV12ANGLE)
{
MP_VERBOSE(hw, "Loading EGL_ANGLE_stream_producer_d3d_texture_nv12\n");
p->CreateStreamKHR = (void *)eglGetProcAddress("eglCreateStreamKHR");
p->DestroyStreamKHR = (void *)eglGetProcAddress("eglDestroyStreamKHR");
p->StreamConsumerAcquireKHR =
(void *)eglGetProcAddress("eglStreamConsumerAcquireKHR");
p->StreamConsumerReleaseKHR =
(void *)eglGetProcAddress("eglStreamConsumerReleaseKHR");
p->StreamConsumerGLTextureExternalAttribsNV =
(void *)eglGetProcAddress("eglStreamConsumerGLTextureExternalAttribsNV");
p->CreateStreamProducerD3DTextureNV12ANGLE =
(void *)eglGetProcAddress("eglCreateStreamProducerD3DTextureNV12ANGLE");
p->StreamPostD3DTextureNV12ANGLE =
(void *)eglGetProcAddress("eglStreamPostD3DTextureNV12ANGLE");
if (!p->CreateStreamKHR || !p->DestroyStreamKHR ||
!p->StreamConsumerAcquireKHR || !p->StreamConsumerReleaseKHR ||
!p->StreamConsumerGLTextureExternalAttribsNV ||
!p->CreateStreamProducerD3DTextureNV12ANGLE ||
!p->StreamPostD3DTextureNV12ANGLE)
{
MP_ERR(hw, "Failed to load some EGLStream functions.\n");
goto fail;
}
static const char *es2_exts[] = {"GL_NV_EGL_stream_consumer_external", 0};
static const char *es3_exts[] = {"GL_NV_EGL_stream_consumer_external",
"GL_OES_EGL_image_external_essl3", 0};
hw->glsl_extensions = hw->gl->es == 200 ? es2_exts : es3_exts;
MP_ERR(hw, "Failed to load some EGLStream functions.\n");
goto fail;
}
if (use_native_device) {
PFNEGLQUERYDISPLAYATTRIBEXTPROC p_eglQueryDisplayAttribEXT =
(void *)eglGetProcAddress("eglQueryDisplayAttribEXT");
PFNEGLQUERYDEVICEATTRIBEXTPROC p_eglQueryDeviceAttribEXT =
(void *)eglGetProcAddress("eglQueryDeviceAttribEXT");
if (!p_eglQueryDisplayAttribEXT || !p_eglQueryDeviceAttribEXT)
goto fail;
static const char *es2_exts[] = {"GL_NV_EGL_stream_consumer_external", 0};
static const char *es3_exts[] = {"GL_NV_EGL_stream_consumer_external",
"GL_OES_EGL_image_external_essl3", 0};
hw->glsl_extensions = hw->gl->es == 200 ? es2_exts : es3_exts;
EGLAttrib device = 0;
if (!p_eglQueryDisplayAttribEXT(egl_display, EGL_DEVICE_EXT, &device))
goto fail;
EGLAttrib d3d_device = 0;
if (!p_eglQueryDeviceAttribEXT((EGLDeviceEXT)device,
EGL_D3D11_DEVICE_ANGLE, &d3d_device))
{
MP_ERR(hw, "Could not get EGL_D3D11_DEVICE_ANGLE from ANGLE.\n");
goto fail;
}
PFNEGLQUERYDISPLAYATTRIBEXTPROC p_eglQueryDisplayAttribEXT =
(void *)eglGetProcAddress("eglQueryDisplayAttribEXT");
PFNEGLQUERYDEVICEATTRIBEXTPROC p_eglQueryDeviceAttribEXT =
(void *)eglGetProcAddress("eglQueryDeviceAttribEXT");
if (!p_eglQueryDisplayAttribEXT || !p_eglQueryDeviceAttribEXT)
goto fail;
p->d3d11_device = (ID3D11Device *)d3d_device;
if (!p->d3d11_device)
goto fail;
ID3D11Device_AddRef(p->d3d11_device);
} else {
HANDLE d3d11_dll = GetModuleHandleW(L"d3d11.dll");
if (!d3d11_dll) {
MP_ERR(hw, "Failed to load D3D11 library\n");
goto fail;
}
PFN_D3D11_CREATE_DEVICE CreateDevice =
(void *)GetProcAddress(d3d11_dll, "D3D11CreateDevice");
if (!CreateDevice)
goto fail;
hr = CreateDevice(NULL, D3D_DRIVER_TYPE_HARDWARE, NULL,
D3D11_CREATE_DEVICE_VIDEO_SUPPORT, NULL, 0,
D3D11_SDK_VERSION, &p->d3d11_device, NULL, NULL);
if (FAILED(hr)) {
MP_ERR(hw, "Failed to create D3D11 Device: %s\n",
mp_HRESULT_to_str(hr));
goto fail;
}
EGLAttrib device = 0;
if (!p_eglQueryDisplayAttribEXT(egl_display, EGL_DEVICE_EXT, &device))
goto fail;
EGLAttrib d3d_device = 0;
if (!p_eglQueryDeviceAttribEXT((EGLDeviceEXT)device,
EGL_D3D11_DEVICE_ANGLE, &d3d_device))
{
MP_ERR(hw, "Could not get EGL_D3D11_DEVICE_ANGLE from ANGLE.\n");
goto fail;
}
p->d3d11_device = (ID3D11Device *)d3d_device;
if (!p->d3d11_device)
goto fail;
ID3D11Device_AddRef(p->d3d11_device);
ID3D10Multithread *multithread;
hr = ID3D11Device_QueryInterface(p->d3d11_device, &IID_ID3D10Multithread,
(void **)&multithread);
@ -270,36 +185,6 @@ static int create(struct gl_hwdec *hw)
ID3D10Multithread_SetMultithreadProtected(multithread, TRUE);
ID3D10Multithread_Release(multithread);
hr = ID3D11Device_QueryInterface(p->d3d11_device, &IID_ID3D11VideoDevice,
(void **)&p->video_dev);
if (FAILED(hr))
goto fail;
ID3D11Device_GetImmediateContext(p->d3d11_device, &p->device_ctx);
if (!p->device_ctx)
goto fail;
hr = ID3D11DeviceContext_QueryInterface(p->device_ctx, &IID_ID3D11VideoContext,
(void **)&p->video_ctx);
if (FAILED(hr))
goto fail;
EGLint attrs[] = {
EGL_BUFFER_SIZE, 32,
EGL_RED_SIZE, 8,
EGL_GREEN_SIZE, 8,
EGL_BLUE_SIZE, 8,
EGL_SURFACE_TYPE, EGL_PBUFFER_BIT,
EGL_ALPHA_SIZE, 8,
EGL_BIND_TO_TEXTURE_RGBA, EGL_TRUE,
EGL_NONE
};
EGLint count;
if (!eglChooseConfig(p->egl_display, attrs, &p->egl_config, 1, &count) ||
!count) {
MP_ERR(hw, "Failed to get EGL surface configuration\n");
goto fail;
}
p->hwctx = (struct mp_hwdec_ctx){
.type = HWDEC_D3D11VA,
.driver_name = hw->driver->name,
@ -313,18 +198,17 @@ fail:
return -1;
}
static int create_egl_stream(struct gl_hwdec *hw, struct mp_image_params *params)
static int reinit(struct gl_hwdec *hw, struct mp_image_params *params)
{
struct priv *p = hw->priv;
GL *gl = hw->gl;
if (params->hw_subfmt != IMGFMT_NV12)
destroy_objects(hw);
if (params->hw_subfmt != IMGFMT_NV12) {
MP_FATAL(hw, "Format not supported.\n");
return -1;
if (!p->CreateStreamKHR)
return -1; // extensions not available
MP_VERBOSE(hw, "Using EGL_KHR_stream path.\n");
}
// Hope that the given texture unit range is not "in use" by anything.
// The texture units need to be bound during init only, and are free for
@ -380,227 +264,18 @@ fail:
return -1;
}
static int reinit(struct gl_hwdec *hw, struct mp_image_params *params)
static int map_frame(struct gl_hwdec *hw, struct mp_image *hw_image,
struct gl_hwdec_frame *out_frame)
{
struct priv *p = hw->priv;
GL *gl = hw->gl;
HRESULT hr;
destroy_objects(hw);
p->image_params = *params;
// If this does not work, use the video process instead.
if (create_egl_stream(hw, params) >= 0)
return 0;
MP_VERBOSE(hw, "Using ID3D11VideoProcessor path.\n");
D3D11_TEXTURE2D_DESC texdesc = {
.Width = params->w,
.Height = params->h,
.Format = DXGI_FORMAT_B8G8R8A8_UNORM,
.MipLevels = 1,
.ArraySize = 1,
.SampleDesc = { .Count = 1 },
.Usage = D3D11_USAGE_DEFAULT,
.BindFlags = D3D11_BIND_RENDER_TARGET | D3D11_BIND_SHADER_RESOURCE,
.MiscFlags = D3D11_RESOURCE_MISC_SHARED,
};
hr = ID3D11Device_CreateTexture2D(p->d3d11_device, &texdesc, NULL, &p->texture);
if (FAILED(hr)) {
MP_ERR(hw, "Failed to create texture: %s\n", mp_HRESULT_to_str(hr));
goto fail;
}
HANDLE share_handle = NULL;
IDXGIResource *res;
hr = IUnknown_QueryInterface(p->texture, &IID_IDXGIResource, (void **)&res);
if (FAILED(hr))
goto fail;
hr = IDXGIResource_GetSharedHandle(res, &share_handle);
if (FAILED(hr))
share_handle = NULL;
IDXGIResource_Release(res);
if (!share_handle)
goto fail;
EGLint attrib_list[] = {
EGL_WIDTH, params->w,
EGL_HEIGHT, params->h,
EGL_TEXTURE_FORMAT, EGL_TEXTURE_RGBA,
EGL_TEXTURE_TARGET, EGL_TEXTURE_2D,
EGL_NONE
};
p->egl_surface = eglCreatePbufferFromClientBuffer(
p->egl_display, EGL_D3D_TEXTURE_2D_SHARE_HANDLE_ANGLE,
share_handle, p->egl_config, attrib_list);
if (p->egl_surface == EGL_NO_SURFACE) {
MP_ERR(hw, "Failed to create EGL surface\n");
goto fail;
}
gl->GenTextures(1, &p->gl_textures[0]);
gl->BindTexture(GL_TEXTURE_2D, p->gl_textures[0]);
gl->TexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
gl->TexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
gl->TexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
gl->TexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
eglBindTexImage(p->egl_display, p->egl_surface, EGL_BACK_BUFFER);
gl->BindTexture(GL_TEXTURE_2D, 0);
params->imgfmt = IMGFMT_RGB0;
return 0;
fail:
destroy_objects(hw);
return -1;
}
static int create_video_proc(struct gl_hwdec *hw, struct mp_image_params *params)
{
struct priv *p = hw->priv;
HRESULT hr;
destroy_video_proc(hw);
// Note: we skip any deinterlacing considerations for now.
D3D11_VIDEO_PROCESSOR_CONTENT_DESC vpdesc = {
.InputWidth = p->c_w,
.InputHeight = p->c_h,
.OutputWidth = params->w,
.OutputHeight = params->h,
};
hr = ID3D11VideoDevice_CreateVideoProcessorEnumerator(p->video_dev, &vpdesc,
&p->vp_enum);
if (FAILED(hr))
goto fail;
// Assume RateConversionIndex==0 always works fine for us.
hr = ID3D11VideoDevice_CreateVideoProcessor(p->video_dev, p->vp_enum, 0,
&p->video_proc);
if (FAILED(hr)) {
MP_ERR(hw, "Failed to create D3D11 video processor.\n");
goto fail;
}
D3D11_VIDEO_PROCESSOR_OUTPUT_VIEW_DESC outdesc = {
.ViewDimension = D3D11_VPOV_DIMENSION_TEXTURE2D,
};
hr = ID3D11VideoDevice_CreateVideoProcessorOutputView(p->video_dev,
(ID3D11Resource *)p->texture,
p->vp_enum, &outdesc,
&p->out_view);
if (FAILED(hr))
goto fail;
// Note: libavcodec does not support cropping left/top with hwaccel.
RECT src_rc = {
.right = params->w,
.bottom = params->h,
};
ID3D11VideoContext_VideoProcessorSetStreamSourceRect(p->video_ctx,
p->video_proc,
0, TRUE, &src_rc);
// This is supposed to stop drivers from fucking up the video quality.
ID3D11VideoContext_VideoProcessorSetStreamAutoProcessingMode(p->video_ctx,
p->video_proc,
0, FALSE);
if (params->colorspace != MP_CSP_BT_601 &&
params->colorspace != MP_CSP_BT_709)
{
MP_WARN(hw, "Unsupported video colorspace (%s/%s). Consider disabling "
"hardware decoding, or using --hwdec=d3d11va-copy to get "
"correct output.\n",
m_opt_choice_str(mp_csp_names, params->colorspace),
m_opt_choice_str(mp_csp_levels_names, params->colorlevels));
}
D3D11_VIDEO_PROCESSOR_COLOR_SPACE csp = {
.YCbCr_Matrix = params->colorspace != MP_CSP_BT_601,
.Nominal_Range = params->colorlevels == MP_CSP_LEVELS_TV ? 1 : 2,
};
ID3D11VideoContext_VideoProcessorSetStreamColorSpace(p->video_ctx,
p->video_proc,
0, &csp);
return 0;
fail:
destroy_video_proc(hw);
return -1;
}
static int map_frame_video_proc(struct gl_hwdec *hw, ID3D11Texture2D *d3d_tex,
int d3d_subindex, struct gl_hwdec_frame *out_frame)
{
struct priv *p = hw->priv;
HRESULT hr;
ID3D11VideoProcessorInputView *in_view = NULL;
D3D11_TEXTURE2D_DESC texdesc;
ID3D11Texture2D_GetDesc(d3d_tex, &texdesc);
if (!p->video_proc || p->c_w != texdesc.Width || p->c_h != texdesc.Height) {
p->c_w = texdesc.Width;
p->c_h = texdesc.Height;
if (create_video_proc(hw, &p->image_params) < 0)
return -1;
}
D3D11_VIDEO_PROCESSOR_INPUT_VIEW_DESC indesc = {
.FourCC = 0, // huh?
.ViewDimension = D3D11_VPIV_DIMENSION_TEXTURE2D,
.Texture2D = {
.ArraySlice = d3d_subindex,
},
};
hr = ID3D11VideoDevice_CreateVideoProcessorInputView(p->video_dev,
(ID3D11Resource *)d3d_tex,
p->vp_enum, &indesc,
&in_view);
if (FAILED(hr)) {
MP_ERR(hw, "Could not create ID3D11VideoProcessorInputView\n");
if (!p->gl_textures[0])
return -1;
}
D3D11_VIDEO_PROCESSOR_STREAM stream = {
.Enable = TRUE,
.pInputSurface = in_view,
};
hr = ID3D11VideoContext_VideoProcessorBlt(p->video_ctx, p->video_proc,
p->out_view, 0, 1, &stream);
ID3D11VideoProcessorInputView_Release(in_view);
if (FAILED(hr)) {
MP_ERR(hw, "VideoProcessorBlt failed.\n");
ID3D11Texture2D *d3d_tex = (void *)hw_image->planes[1];
int d3d_subindex = (intptr_t)hw_image->planes[2];
if (!d3d_tex)
return -1;
}
// Make sure the texture is updated correctly on the shared context.
// I'm not sure if this is needed if the shared context is the same
// context. (ANGLE API does not allow not using sharing.)
ID3D11DeviceContext_Flush(p->device_ctx);
*out_frame = (struct gl_hwdec_frame){
.planes = {
{
.gl_texture = p->gl_textures[0],
.gl_target = GL_TEXTURE_2D,
.tex_w = p->image_params.w,
.tex_h = p->image_params.h,
},
},
};
return 0;
}
static int map_frame_egl_stream(struct gl_hwdec *hw, ID3D11Texture2D *d3d_tex,
int d3d_subindex, struct gl_hwdec_frame *out_frame)
{
struct priv *p = hw->priv;
EGLAttrib attrs[] = {
EGL_D3D_TEXTURE_SUBRESOURCE_ID_ANGLE, d3d_subindex,
@ -636,24 +311,6 @@ static int map_frame_egl_stream(struct gl_hwdec *hw, ID3D11Texture2D *d3d_tex,
return 0;
}
static int map_frame(struct gl_hwdec *hw, struct mp_image *hw_image,
struct gl_hwdec_frame *out_frame)
{
struct priv *p = hw->priv;
if (!p->gl_textures[0])
return -1;
ID3D11Texture2D *d3d_tex = (void *)hw_image->planes[1];
int d3d_subindex = (intptr_t)hw_image->planes[2];
if (!d3d_tex)
return -1;
return p->egl_stream
? map_frame_egl_stream(hw, d3d_tex, d3d_subindex, out_frame)
: map_frame_video_proc(hw, d3d_tex, d3d_subindex, out_frame);
}
static void unmap(struct gl_hwdec *hw)
{
struct priv *p = hw->priv;
@ -664,7 +321,7 @@ static void unmap(struct gl_hwdec *hw)
const struct gl_hwdec_driver gl_hwdec_d3d11egl = {
.name = "d3d11-egl",
.api = HWDEC_D3D11VA,
.imgfmt = IMGFMT_D3D11VA,
.imgfmt = IMGFMT_D3D11NV12,
.create = create,
.reinit = reinit,
.map_frame = map_frame,

View File

@ -0,0 +1,293 @@
/*
* This file is part of mpv.
*
* mpv is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* mpv is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with mpv. If not, see <http://www.gnu.org/licenses/>.
*/
#include <initguid.h>
#include <assert.h>
#include <windows.h>
#include <d3d11.h>
#include <EGL/egl.h>
#include <EGL/eglext.h>
#include "angle_dynamic.h"
#include "common/common.h"
#include "osdep/timer.h"
#include "osdep/windows_utils.h"
#include "hwdec.h"
#include "video/hwdec.h"
#ifndef EGL_D3D_TEXTURE_SUBRESOURCE_ID_ANGLE
#define EGL_D3D_TEXTURE_SUBRESOURCE_ID_ANGLE 0x3AAB
#endif
struct priv {
struct mp_hwdec_ctx hwctx;
ID3D11Device *d3d11_device;
ID3D11DeviceContext *device_ctx;
ID3D11VideoDevice *video_dev;
ID3D11VideoContext *video_ctx;
EGLDisplay egl_display;
EGLConfig egl_config;
EGLSurface egl_surface;
ID3D11Texture2D *texture;
ID3D11VideoProcessor *video_proc;
ID3D11VideoProcessorEnumerator *vp_enum;
ID3D11VideoProcessorOutputView *out_view;
GLuint gl_texture;
};
static void unmap(struct gl_hwdec *hw)
{
struct priv *p = hw->priv;
if (p->egl_surface) {
eglReleaseTexImage(p->egl_display, p->egl_surface, EGL_BACK_BUFFER);
eglDestroySurface(p->egl_display, p->egl_surface);
}
p->egl_surface = NULL;
}
static void destroy_objects(struct gl_hwdec *hw)
{
struct priv *p = hw->priv;
GL *gl = hw->gl;
unmap(hw);
gl->DeleteTextures(1, &p->gl_texture);
p->gl_texture = 0;
}
static void destroy(struct gl_hwdec *hw)
{
struct priv *p = hw->priv;
destroy_objects(hw);
hwdec_devices_remove(hw->devs, &p->hwctx);
if (p->video_ctx)
ID3D11VideoContext_Release(p->video_ctx);
p->video_ctx = NULL;
if (p->video_dev)
ID3D11VideoDevice_Release(p->video_dev);
p->video_dev = NULL;
if (p->device_ctx)
ID3D11DeviceContext_Release(p->device_ctx);
p->device_ctx = NULL;
if (p->d3d11_device)
ID3D11Device_Release(p->d3d11_device);
p->d3d11_device = NULL;
}
static int create(struct gl_hwdec *hw)
{
if (!angle_load())
return -1;
EGLDisplay egl_display = eglGetCurrentDisplay();
if (!egl_display)
return -1;
if (!eglGetCurrentContext())
return -1;
const char *exts = eglQueryString(egl_display, EGL_EXTENSIONS);
if (!exts || !strstr(exts, "EGL_ANGLE_d3d_share_handle_client_buffer"))
return -1;
HRESULT hr;
struct priv *p = talloc_zero(hw, struct priv);
hw->priv = p;
p->egl_display = egl_display;
HANDLE d3d11_dll = GetModuleHandleW(L"d3d11.dll");
if (!d3d11_dll) {
MP_ERR(hw, "Failed to load D3D11 library\n");
goto fail;
}
PFN_D3D11_CREATE_DEVICE CreateDevice =
(void *)GetProcAddress(d3d11_dll, "D3D11CreateDevice");
if (!CreateDevice)
goto fail;
hr = CreateDevice(NULL, D3D_DRIVER_TYPE_HARDWARE, NULL,
D3D11_CREATE_DEVICE_VIDEO_SUPPORT, NULL, 0,
D3D11_SDK_VERSION, &p->d3d11_device, NULL, NULL);
if (FAILED(hr)) {
MP_ERR(hw, "Failed to create D3D11 Device: %s\n",
mp_HRESULT_to_str(hr));
goto fail;
}
ID3D10Multithread *multithread;
hr = ID3D11Device_QueryInterface(p->d3d11_device, &IID_ID3D10Multithread,
(void **)&multithread);
if (FAILED(hr)) {
ID3D10Multithread_Release(multithread);
MP_ERR(hw, "Failed to get Multithread interface: %s\n",
mp_HRESULT_to_str(hr));
goto fail;
}
ID3D10Multithread_SetMultithreadProtected(multithread, TRUE);
ID3D10Multithread_Release(multithread);
hr = ID3D11Device_QueryInterface(p->d3d11_device, &IID_ID3D11VideoDevice,
(void **)&p->video_dev);
if (FAILED(hr))
goto fail;
ID3D11Device_GetImmediateContext(p->d3d11_device, &p->device_ctx);
if (!p->device_ctx)
goto fail;
hr = ID3D11DeviceContext_QueryInterface(p->device_ctx, &IID_ID3D11VideoContext,
(void **)&p->video_ctx);
if (FAILED(hr))
goto fail;
EGLint attrs[] = {
EGL_BUFFER_SIZE, 32,
EGL_RED_SIZE, 8,
EGL_GREEN_SIZE, 8,
EGL_BLUE_SIZE, 8,
EGL_SURFACE_TYPE, EGL_PBUFFER_BIT,
EGL_ALPHA_SIZE, 8,
EGL_BIND_TO_TEXTURE_RGBA, EGL_TRUE,
EGL_NONE
};
EGLint count;
if (!eglChooseConfig(p->egl_display, attrs, &p->egl_config, 1, &count) ||
!count) {
MP_ERR(hw, "Failed to get EGL surface configuration\n");
goto fail;
}
p->hwctx = (struct mp_hwdec_ctx){
.type = HWDEC_D3D11VA,
.driver_name = hw->driver->name,
.ctx = p->d3d11_device,
};
hwdec_devices_add(hw->devs, &p->hwctx);
return 0;
fail:
destroy(hw);
return -1;
}
static int reinit(struct gl_hwdec *hw, struct mp_image_params *params)
{
struct priv *p = hw->priv;
GL *gl = hw->gl;
destroy_objects(hw);
gl->GenTextures(1, &p->gl_texture);
gl->BindTexture(GL_TEXTURE_2D, p->gl_texture);
gl->TexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
gl->TexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
gl->TexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
gl->TexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
gl->BindTexture(GL_TEXTURE_2D, 0);
params->imgfmt = IMGFMT_RGB0;
return 0;
}
static int map_frame(struct gl_hwdec *hw, struct mp_image *hw_image,
struct gl_hwdec_frame *out_frame)
{
struct priv *p = hw->priv;
GL *gl = hw->gl;
HRESULT hr;
if (!p->gl_texture)
return -1;
ID3D11Texture2D *d3d_tex = (void *)hw_image->planes[1];
if (!d3d_tex)
return -1;
IDXGIResource *res;
hr = IUnknown_QueryInterface(d3d_tex, &IID_IDXGIResource, (void **)&res);
if (FAILED(hr))
return -1;
HANDLE share_handle = NULL;
hr = IDXGIResource_GetSharedHandle(res, &share_handle);
if (FAILED(hr))
share_handle = NULL;
IDXGIResource_Release(res);
if (!share_handle)
return -1;
D3D11_TEXTURE2D_DESC texdesc;
ID3D11Texture2D_GetDesc(d3d_tex, &texdesc);
EGLint attrib_list[] = {
EGL_WIDTH, texdesc.Width,
EGL_HEIGHT, texdesc.Height,
EGL_TEXTURE_FORMAT, EGL_TEXTURE_RGBA,
EGL_TEXTURE_TARGET, EGL_TEXTURE_2D,
EGL_NONE
};
p->egl_surface = eglCreatePbufferFromClientBuffer(
p->egl_display, EGL_D3D_TEXTURE_2D_SHARE_HANDLE_ANGLE,
share_handle, p->egl_config, attrib_list);
if (p->egl_surface == EGL_NO_SURFACE) {
MP_ERR(hw, "Failed to create EGL surface\n");
return -1;
}
gl->BindTexture(GL_TEXTURE_2D, p->gl_texture);
eglBindTexImage(p->egl_display, p->egl_surface, EGL_BACK_BUFFER);
gl->BindTexture(GL_TEXTURE_2D, 0);
*out_frame = (struct gl_hwdec_frame){
.planes = {
{
.gl_texture = p->gl_texture,
.gl_target = GL_TEXTURE_2D,
.tex_w = texdesc.Width,
.tex_h = texdesc.Height,
},
},
};
return 0;
}
const struct gl_hwdec_driver gl_hwdec_d3d11eglrgb = {
.name = "d3d11-egl-rgb",
.api = HWDEC_D3D11VA,
.imgfmt = IMGFMT_D3D11RGB,
.create = create,
.reinit = reinit,
.map_frame = map_frame,
.unmap = unmap,
.destroy = destroy,
};

View File

@ -346,6 +346,7 @@ def build(ctx):
( "video/out/opengl/formats.c", "gl" ),
( "video/out/opengl/hwdec.c", "gl" ),
( "video/out/opengl/hwdec_d3d11egl.c", "egl-angle" ),
( "video/out/opengl/hwdec_d3d11eglrgb.c","egl-angle" ),
( "video/out/opengl/hwdec_dxva2.c", "gl-win32" ),
( "video/out/opengl/hwdec_dxva2gldx.c", "gl-dxinterop" ),
( "video/out/opengl/hwdec_dxva2egl.c", "egl-angle" ),