mpv/video/out/opengl/hwdec_d3d11egl.c

358 lines
12 KiB
C
Raw Normal View History

/*
* This file is part of mpv.
*
* mpv is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* mpv is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with mpv. If not, see <http://www.gnu.org/licenses/>.
*/
#include <assert.h>
#include <windows.h>
#include <d3d11.h>
#include <EGL/egl.h>
#include <EGL/eglext.h>
#include "angle_dynamic.h"
#include "common/common.h"
#include "osdep/timer.h"
#include "osdep/windows_utils.h"
#include "video/out/gpu/hwdec.h"
#include "ra_gl.h"
#include "video/hwdec.h"
#include "video/d3d.h"
#ifndef EGL_D3D_TEXTURE_SUBRESOURCE_ID_ANGLE
#define EGL_D3D_TEXTURE_SUBRESOURCE_ID_ANGLE 0x33AB
#endif
struct priv_owner {
struct mp_hwdec_ctx hwctx;
ID3D11Device *d3d11_device;
EGLDisplay egl_display;
// EGL_KHR_stream
EGLStreamKHR (EGLAPIENTRY *CreateStreamKHR)(EGLDisplay dpy,
const EGLint *attrib_list);
EGLBoolean (EGLAPIENTRY *DestroyStreamKHR)(EGLDisplay dpy,
EGLStreamKHR stream);
// EGL_KHR_stream_consumer_gltexture
EGLBoolean (EGLAPIENTRY *StreamConsumerAcquireKHR)
(EGLDisplay dpy, EGLStreamKHR stream);
EGLBoolean (EGLAPIENTRY *StreamConsumerReleaseKHR)
(EGLDisplay dpy, EGLStreamKHR stream);
// EGL_NV_stream_consumer_gltexture_yuv
EGLBoolean (EGLAPIENTRY *StreamConsumerGLTextureExternalAttribsNV)
(EGLDisplay dpy, EGLStreamKHR stream, EGLAttrib *attrib_list);
// EGL_ANGLE_stream_producer_d3d_texture
EGLBoolean (EGLAPIENTRY *CreateStreamProducerD3DTextureANGLE)
(EGLDisplay dpy, EGLStreamKHR stream, const EGLAttrib *attrib_list);
EGLBoolean (EGLAPIENTRY *StreamPostD3DTextureANGLE)
(EGLDisplay dpy, EGLStreamKHR stream, void *texture,
const EGLAttrib *attrib_list);
};
struct priv {
EGLStreamKHR egl_stream;
GLuint gl_textures[2];
};
static void uninit(struct ra_hwdec *hw)
{
struct priv_owner *p = hw->priv;
hwdec_devices_remove(hw->devs, &p->hwctx);
if (p->d3d11_device)
ID3D11Device_Release(p->d3d11_device);
}
static int init(struct ra_hwdec *hw)
{
struct priv_owner *p = hw->priv;
HRESULT hr;
if (!ra_is_gl(hw->ra))
return -1;
if (!angle_load())
return -1;
EGLDisplay egl_display = eglGetCurrentDisplay();
if (!egl_display)
return -1;
if (!eglGetCurrentContext())
return -1;
GL *gl = ra_gl_get(hw->ra);
const char *exts = eglQueryString(egl_display, EGL_EXTENSIONS);
video: remove d3d11 video processor use from OpenGL interop We now have a video filter that uses the d3d11 video processor, so it makes no sense to have one in the VO interop code. The VO uses it for formats not directly supported by ANGLE (so the video data is converted to a RGB texture, which ANGLE can take in). Change this so that the video filter is automatically inserted if needed. Move the code that maps RGB surfaces to its own inteorp backend. Add a bunch of new image formats, which are used to enforce the new constraints, and to automatically insert the filter only when needed. The added vf mechanism to auto-insert the d3d11vpp filter is very dumb and primitive, and will work only for this specific purpose. The format negotiation mechanism in the filter chain is generally not very pretty, and mostly broken as well. (libavfilter has a different mechanism, and these mechanisms don't match well, so vf_lavfi uses some sort of hack. It only works because hwaccel and non-hwaccel formats are strictly separated.) The RGB interop is now only used with older ANGLE versions. The only reason I'm keeping it is because it's relatively isolated (uses only existing mechanisms and adds no new concepts), and because I want to be able to compare the behavior of the old code with the new one for testing. It will be removed eventually. If ANGLE has NV12 interop, P010 is now handled by converting to NV12 with the video processor, instead of converting it to RGB and using the old mechanism to import that as a texture.
2016-05-29 15:13:22 +00:00
if (!exts || !strstr(exts, "EGL_ANGLE_d3d_share_handle_client_buffer") ||
!gl_check_extension(exts, "EGL_ANGLE_stream_producer_d3d_texture") ||
!(strstr(gl->extensions, "GL_OES_EGL_image_external_essl3") ||
gl->es == 200) ||
!strstr(exts, "EGL_EXT_device_query") ||
!(gl->mpgl_caps & MPGL_CAP_TEX_RG))
return -1;
p->egl_display = egl_display;
video: remove d3d11 video processor use from OpenGL interop We now have a video filter that uses the d3d11 video processor, so it makes no sense to have one in the VO interop code. The VO uses it for formats not directly supported by ANGLE (so the video data is converted to a RGB texture, which ANGLE can take in). Change this so that the video filter is automatically inserted if needed. Move the code that maps RGB surfaces to its own inteorp backend. Add a bunch of new image formats, which are used to enforce the new constraints, and to automatically insert the filter only when needed. The added vf mechanism to auto-insert the d3d11vpp filter is very dumb and primitive, and will work only for this specific purpose. The format negotiation mechanism in the filter chain is generally not very pretty, and mostly broken as well. (libavfilter has a different mechanism, and these mechanisms don't match well, so vf_lavfi uses some sort of hack. It only works because hwaccel and non-hwaccel formats are strictly separated.) The RGB interop is now only used with older ANGLE versions. The only reason I'm keeping it is because it's relatively isolated (uses only existing mechanisms and adds no new concepts), and because I want to be able to compare the behavior of the old code with the new one for testing. It will be removed eventually. If ANGLE has NV12 interop, P010 is now handled by converting to NV12 with the video processor, instead of converting it to RGB and using the old mechanism to import that as a texture.
2016-05-29 15:13:22 +00:00
p->CreateStreamKHR = (void *)eglGetProcAddress("eglCreateStreamKHR");
p->DestroyStreamKHR = (void *)eglGetProcAddress("eglDestroyStreamKHR");
p->StreamConsumerAcquireKHR =
(void *)eglGetProcAddress("eglStreamConsumerAcquireKHR");
p->StreamConsumerReleaseKHR =
(void *)eglGetProcAddress("eglStreamConsumerReleaseKHR");
p->StreamConsumerGLTextureExternalAttribsNV =
(void *)eglGetProcAddress("eglStreamConsumerGLTextureExternalAttribsNV");
p->CreateStreamProducerD3DTextureANGLE =
(void *)eglGetProcAddress("eglCreateStreamProducerD3DTextureANGLE");
p->StreamPostD3DTextureANGLE =
(void *)eglGetProcAddress("eglStreamPostD3DTextureANGLE");
video: remove d3d11 video processor use from OpenGL interop We now have a video filter that uses the d3d11 video processor, so it makes no sense to have one in the VO interop code. The VO uses it for formats not directly supported by ANGLE (so the video data is converted to a RGB texture, which ANGLE can take in). Change this so that the video filter is automatically inserted if needed. Move the code that maps RGB surfaces to its own inteorp backend. Add a bunch of new image formats, which are used to enforce the new constraints, and to automatically insert the filter only when needed. The added vf mechanism to auto-insert the d3d11vpp filter is very dumb and primitive, and will work only for this specific purpose. The format negotiation mechanism in the filter chain is generally not very pretty, and mostly broken as well. (libavfilter has a different mechanism, and these mechanisms don't match well, so vf_lavfi uses some sort of hack. It only works because hwaccel and non-hwaccel formats are strictly separated.) The RGB interop is now only used with older ANGLE versions. The only reason I'm keeping it is because it's relatively isolated (uses only existing mechanisms and adds no new concepts), and because I want to be able to compare the behavior of the old code with the new one for testing. It will be removed eventually. If ANGLE has NV12 interop, P010 is now handled by converting to NV12 with the video processor, instead of converting it to RGB and using the old mechanism to import that as a texture.
2016-05-29 15:13:22 +00:00
if (!p->CreateStreamKHR || !p->DestroyStreamKHR ||
!p->StreamConsumerAcquireKHR || !p->StreamConsumerReleaseKHR ||
!p->StreamConsumerGLTextureExternalAttribsNV ||
!p->CreateStreamProducerD3DTextureANGLE ||
!p->StreamPostD3DTextureANGLE)
{
video: remove d3d11 video processor use from OpenGL interop We now have a video filter that uses the d3d11 video processor, so it makes no sense to have one in the VO interop code. The VO uses it for formats not directly supported by ANGLE (so the video data is converted to a RGB texture, which ANGLE can take in). Change this so that the video filter is automatically inserted if needed. Move the code that maps RGB surfaces to its own inteorp backend. Add a bunch of new image formats, which are used to enforce the new constraints, and to automatically insert the filter only when needed. The added vf mechanism to auto-insert the d3d11vpp filter is very dumb and primitive, and will work only for this specific purpose. The format negotiation mechanism in the filter chain is generally not very pretty, and mostly broken as well. (libavfilter has a different mechanism, and these mechanisms don't match well, so vf_lavfi uses some sort of hack. It only works because hwaccel and non-hwaccel formats are strictly separated.) The RGB interop is now only used with older ANGLE versions. The only reason I'm keeping it is because it's relatively isolated (uses only existing mechanisms and adds no new concepts), and because I want to be able to compare the behavior of the old code with the new one for testing. It will be removed eventually. If ANGLE has NV12 interop, P010 is now handled by converting to NV12 with the video processor, instead of converting it to RGB and using the old mechanism to import that as a texture.
2016-05-29 15:13:22 +00:00
MP_ERR(hw, "Failed to load some EGLStream functions.\n");
goto fail;
}
video: remove d3d11 video processor use from OpenGL interop We now have a video filter that uses the d3d11 video processor, so it makes no sense to have one in the VO interop code. The VO uses it for formats not directly supported by ANGLE (so the video data is converted to a RGB texture, which ANGLE can take in). Change this so that the video filter is automatically inserted if needed. Move the code that maps RGB surfaces to its own inteorp backend. Add a bunch of new image formats, which are used to enforce the new constraints, and to automatically insert the filter only when needed. The added vf mechanism to auto-insert the d3d11vpp filter is very dumb and primitive, and will work only for this specific purpose. The format negotiation mechanism in the filter chain is generally not very pretty, and mostly broken as well. (libavfilter has a different mechanism, and these mechanisms don't match well, so vf_lavfi uses some sort of hack. It only works because hwaccel and non-hwaccel formats are strictly separated.) The RGB interop is now only used with older ANGLE versions. The only reason I'm keeping it is because it's relatively isolated (uses only existing mechanisms and adds no new concepts), and because I want to be able to compare the behavior of the old code with the new one for testing. It will be removed eventually. If ANGLE has NV12 interop, P010 is now handled by converting to NV12 with the video processor, instead of converting it to RGB and using the old mechanism to import that as a texture.
2016-05-29 15:13:22 +00:00
static const char *es2_exts[] = {"GL_NV_EGL_stream_consumer_external", 0};
static const char *es3_exts[] = {"GL_NV_EGL_stream_consumer_external",
"GL_OES_EGL_image_external_essl3", 0};
hw->glsl_extensions = gl->es == 200 ? es2_exts : es3_exts;
video: remove d3d11 video processor use from OpenGL interop We now have a video filter that uses the d3d11 video processor, so it makes no sense to have one in the VO interop code. The VO uses it for formats not directly supported by ANGLE (so the video data is converted to a RGB texture, which ANGLE can take in). Change this so that the video filter is automatically inserted if needed. Move the code that maps RGB surfaces to its own inteorp backend. Add a bunch of new image formats, which are used to enforce the new constraints, and to automatically insert the filter only when needed. The added vf mechanism to auto-insert the d3d11vpp filter is very dumb and primitive, and will work only for this specific purpose. The format negotiation mechanism in the filter chain is generally not very pretty, and mostly broken as well. (libavfilter has a different mechanism, and these mechanisms don't match well, so vf_lavfi uses some sort of hack. It only works because hwaccel and non-hwaccel formats are strictly separated.) The RGB interop is now only used with older ANGLE versions. The only reason I'm keeping it is because it's relatively isolated (uses only existing mechanisms and adds no new concepts), and because I want to be able to compare the behavior of the old code with the new one for testing. It will be removed eventually. If ANGLE has NV12 interop, P010 is now handled by converting to NV12 with the video processor, instead of converting it to RGB and using the old mechanism to import that as a texture.
2016-05-29 15:13:22 +00:00
PFNEGLQUERYDISPLAYATTRIBEXTPROC p_eglQueryDisplayAttribEXT =
(void *)eglGetProcAddress("eglQueryDisplayAttribEXT");
PFNEGLQUERYDEVICEATTRIBEXTPROC p_eglQueryDeviceAttribEXT =
(void *)eglGetProcAddress("eglQueryDeviceAttribEXT");
if (!p_eglQueryDisplayAttribEXT || !p_eglQueryDeviceAttribEXT)
goto fail;
EGLAttrib device = 0;
if (!p_eglQueryDisplayAttribEXT(egl_display, EGL_DEVICE_EXT, &device))
goto fail;
EGLAttrib d3d_device = 0;
if (!p_eglQueryDeviceAttribEXT((EGLDeviceEXT)device,
EGL_D3D11_DEVICE_ANGLE, &d3d_device))
{
MP_ERR(hw, "Could not get EGL_D3D11_DEVICE_ANGLE from ANGLE.\n");
goto fail;
}
video: remove d3d11 video processor use from OpenGL interop We now have a video filter that uses the d3d11 video processor, so it makes no sense to have one in the VO interop code. The VO uses it for formats not directly supported by ANGLE (so the video data is converted to a RGB texture, which ANGLE can take in). Change this so that the video filter is automatically inserted if needed. Move the code that maps RGB surfaces to its own inteorp backend. Add a bunch of new image formats, which are used to enforce the new constraints, and to automatically insert the filter only when needed. The added vf mechanism to auto-insert the d3d11vpp filter is very dumb and primitive, and will work only for this specific purpose. The format negotiation mechanism in the filter chain is generally not very pretty, and mostly broken as well. (libavfilter has a different mechanism, and these mechanisms don't match well, so vf_lavfi uses some sort of hack. It only works because hwaccel and non-hwaccel formats are strictly separated.) The RGB interop is now only used with older ANGLE versions. The only reason I'm keeping it is because it's relatively isolated (uses only existing mechanisms and adds no new concepts), and because I want to be able to compare the behavior of the old code with the new one for testing. It will be removed eventually. If ANGLE has NV12 interop, P010 is now handled by converting to NV12 with the video processor, instead of converting it to RGB and using the old mechanism to import that as a texture.
2016-05-29 15:13:22 +00:00
p->d3d11_device = (ID3D11Device *)d3d_device;
if (!p->d3d11_device)
goto fail;
ID3D11Device_AddRef(p->d3d11_device);
if (!d3d11_check_decoding(p->d3d11_device)) {
MP_VERBOSE(hw, "D3D11 video decoding not supported on this system.\n");
goto fail;
}
ID3D10Multithread *multithread;
hr = ID3D11Device_QueryInterface(p->d3d11_device, &IID_ID3D10Multithread,
(void **)&multithread);
if (FAILED(hr)) {
MP_ERR(hw, "Failed to get Multithread interface: %s\n",
mp_HRESULT_to_str(hr));
goto fail;
}
ID3D10Multithread_SetMultithreadProtected(multithread, TRUE);
ID3D10Multithread_Release(multithread);
static const int subfmts[] = {IMGFMT_NV12, IMGFMT_P010, 0};
p->hwctx = (struct mp_hwdec_ctx){
.driver_name = hw->driver->name,
.av_device_ref = d3d11_wrap_device_ref(p->d3d11_device),
video: rewrite filtering glue code Get rid of the old vf.c code. Replace it with a generic filtering framework, which can potentially handle more than just --vf. At least reimplementing --af with this code is planned. This changes some --vf semantics (including runtime behavior and the "vf" command). The most important ones are listed in interface-changes. vf_convert.c is renamed to f_swscale.c. It is now an internal filter that can not be inserted by the user manually. f_lavfi.c is a refactor of player/lavfi.c. The latter will be removed once --lavfi-complex is reimplemented on top of f_lavfi.c. (which is conceptually easy, but a big mess due to the data flow changes). The existing filters are all changed heavily. The data flow of the new filter framework is different. Especially EOF handling changes - EOF is now a "frame" rather than a state, and must be passed through exactly once. Another major thing is that all filters must support dynamic format changes. The filter reconfig() function goes away. (This sounds complex, but since all filters need to handle EOF draining anyway, they can use the same code, and it removes the mess with reconfig() having to predict the output format, which completely breaks with libavfilter anyway.) In addition, there is no automatic format negotiation or conversion. libavfilter's primitive and insufficient API simply doesn't allow us to do this in a reasonable way. Instead, filters can use f_autoconvert as sub-filter, and tell it which formats they support. This filter will in turn add actual conversion filters, such as f_swscale, to perform necessary format changes. vf_vapoursynth.c uses the same basic principle of operation as before, but with worryingly different details in data flow. Still appears to work. The hardware deint filters (vf_vavpp.c, vf_d3d11vpp.c, vf_vdpaupp.c) are heavily changed. Fortunately, they all used refqueue.c, which is for sharing the data flow logic (especially for managing future/past surfaces and such). It turns out it can be used to factor out most of the data flow. Some of these filters accepted software input. Instead of having ad-hoc upload code in each filter, surface upload is now delegated to f_autoconvert, which can use f_hwupload to perform this. Exporting VO capabilities is still a big mess (mp_stream_info stuff). The D3D11 code drops the redundant image formats, and all code uses the hw_subfmt (sw_format in FFmpeg) instead. Although that too seems to be a big mess for now. f_async_queue is unused.
2018-01-16 10:53:44 +00:00
.supported_formats = subfmts,
.hw_imgfmt = IMGFMT_D3D11,
};
hwdec_devices_add(hw->devs, &p->hwctx);
return 0;
fail:
return -1;
}
static void mapper_uninit(struct ra_hwdec_mapper *mapper)
{
struct priv_owner *o = mapper->owner->priv;
struct priv *p = mapper->priv;
GL *gl = ra_gl_get(mapper->ra);
if (p->egl_stream)
o->DestroyStreamKHR(o->egl_display, p->egl_stream);
p->egl_stream = 0;
gl->DeleteTextures(2, p->gl_textures);
}
static int mapper_init(struct ra_hwdec_mapper *mapper)
{
struct priv_owner *o = mapper->owner->priv;
struct priv *p = mapper->priv;
GL *gl = ra_gl_get(mapper->ra);
struct ra_imgfmt_desc desc = {0};
ra_get_imgfmt_desc(mapper->ra, mapper->src_params.hw_subfmt, &desc);
// ANGLE hardcodes the list of accepted formats. This is a subset.
if ((mapper->src_params.hw_subfmt != IMGFMT_NV12 &&
mapper->src_params.hw_subfmt != IMGFMT_P010) ||
desc.num_planes < 1 || desc.num_planes > 2)
{
MP_FATAL(mapper, "Format not supported.\n");
video: remove d3d11 video processor use from OpenGL interop We now have a video filter that uses the d3d11 video processor, so it makes no sense to have one in the VO interop code. The VO uses it for formats not directly supported by ANGLE (so the video data is converted to a RGB texture, which ANGLE can take in). Change this so that the video filter is automatically inserted if needed. Move the code that maps RGB surfaces to its own inteorp backend. Add a bunch of new image formats, which are used to enforce the new constraints, and to automatically insert the filter only when needed. The added vf mechanism to auto-insert the d3d11vpp filter is very dumb and primitive, and will work only for this specific purpose. The format negotiation mechanism in the filter chain is generally not very pretty, and mostly broken as well. (libavfilter has a different mechanism, and these mechanisms don't match well, so vf_lavfi uses some sort of hack. It only works because hwaccel and non-hwaccel formats are strictly separated.) The RGB interop is now only used with older ANGLE versions. The only reason I'm keeping it is because it's relatively isolated (uses only existing mechanisms and adds no new concepts), and because I want to be able to compare the behavior of the old code with the new one for testing. It will be removed eventually. If ANGLE has NV12 interop, P010 is now handled by converting to NV12 with the video processor, instead of converting it to RGB and using the old mechanism to import that as a texture.
2016-05-29 15:13:22 +00:00
return -1;
}
mapper->dst_params = mapper->src_params;
mapper->dst_params.imgfmt = mapper->src_params.hw_subfmt;
mapper->dst_params.hw_subfmt = 0;
// The texture units need to be bound during init only, and are free for
// use again after the initialization here is done.
int texunits = 0; // [texunits, texunits + num_planes)
int num_planes = desc.num_planes;
int gl_target = GL_TEXTURE_EXTERNAL_OES;
p->egl_stream = o->CreateStreamKHR(o->egl_display, (EGLint[]){EGL_NONE});
if (!p->egl_stream)
goto fail;
EGLAttrib attrs[(2 + 2 + 1) * 2] = {
EGL_COLOR_BUFFER_TYPE, EGL_YUV_BUFFER_EXT,
EGL_YUV_NUMBER_OF_PLANES_EXT, num_planes,
};
for (int n = 0; n < num_planes; n++) {
gl->ActiveTexture(GL_TEXTURE0 + texunits + n);
gl->GenTextures(1, &p->gl_textures[n]);
gl->BindTexture(gl_target, p->gl_textures[n]);
gl->TexParameteri(gl_target, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
gl->TexParameteri(gl_target, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
gl->TexParameteri(gl_target, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
gl->TexParameteri(gl_target, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
attrs[(2 + n) * 2 + 0] = EGL_YUV_PLANE0_TEXTURE_UNIT_NV + n;
attrs[(2 + n) * 2 + 1] = texunits + n;
}
attrs[(2 + num_planes) * 2 + 0] = EGL_NONE;
if (!o->StreamConsumerGLTextureExternalAttribsNV(o->egl_display, p->egl_stream,
attrs))
goto fail;
if (!o->CreateStreamProducerD3DTextureANGLE(o->egl_display, p->egl_stream,
(EGLAttrib[]){EGL_NONE}))
goto fail;
for (int n = 0; n < num_planes; n++) {
gl->ActiveTexture(GL_TEXTURE0 + texunits + n);
gl->BindTexture(gl_target, 0);
}
gl->ActiveTexture(GL_TEXTURE0);
return 0;
fail:
gl->ActiveTexture(GL_TEXTURE0);
MP_ERR(mapper, "Failed to create EGLStream\n");
return -1;
}
static int mapper_map(struct ra_hwdec_mapper *mapper)
{
struct priv_owner *o = mapper->owner->priv;
struct priv *p = mapper->priv;
ID3D11Texture2D *d3d_tex = (void *)mapper->src->planes[0];
int d3d_subindex = (intptr_t)mapper->src->planes[1];
video: remove d3d11 video processor use from OpenGL interop We now have a video filter that uses the d3d11 video processor, so it makes no sense to have one in the VO interop code. The VO uses it for formats not directly supported by ANGLE (so the video data is converted to a RGB texture, which ANGLE can take in). Change this so that the video filter is automatically inserted if needed. Move the code that maps RGB surfaces to its own inteorp backend. Add a bunch of new image formats, which are used to enforce the new constraints, and to automatically insert the filter only when needed. The added vf mechanism to auto-insert the d3d11vpp filter is very dumb and primitive, and will work only for this specific purpose. The format negotiation mechanism in the filter chain is generally not very pretty, and mostly broken as well. (libavfilter has a different mechanism, and these mechanisms don't match well, so vf_lavfi uses some sort of hack. It only works because hwaccel and non-hwaccel formats are strictly separated.) The RGB interop is now only used with older ANGLE versions. The only reason I'm keeping it is because it's relatively isolated (uses only existing mechanisms and adds no new concepts), and because I want to be able to compare the behavior of the old code with the new one for testing. It will be removed eventually. If ANGLE has NV12 interop, P010 is now handled by converting to NV12 with the video processor, instead of converting it to RGB and using the old mechanism to import that as a texture.
2016-05-29 15:13:22 +00:00
if (!d3d_tex)
return -1;
EGLAttrib attrs[] = {
EGL_D3D_TEXTURE_SUBRESOURCE_ID_ANGLE, d3d_subindex,
EGL_NONE,
};
if (!o->StreamPostD3DTextureANGLE(o->egl_display, p->egl_stream,
(void *)d3d_tex, attrs))
{
// ANGLE changed the enum ID of this without warning at one point.
attrs[0] = attrs[0] == 0x33AB ? 0x3AAB : 0x33AB;
if (!o->StreamPostD3DTextureANGLE(o->egl_display, p->egl_stream,
(void *)d3d_tex, attrs))
return -1;
}
if (!o->StreamConsumerAcquireKHR(o->egl_display, p->egl_stream))
return -1;
D3D11_TEXTURE2D_DESC texdesc;
ID3D11Texture2D_GetDesc(d3d_tex, &texdesc);
for (int n = 0; n < 2; n++) {
struct ra_tex_params params = {
.dimensions = 2,
.w = texdesc.Width / (n ? 2 : 1),
.h = texdesc.Height / (n ? 2 : 1),
.d = 1,
.format = ra_find_unorm_format(mapper->ra, 1, n ? 2 : 1),
.render_src = true,
.src_linear = true,
.external_oes = true,
};
if (!params.format)
return -1;
mapper->tex[n] = ra_create_wrapped_tex(mapper->ra, &params,
p->gl_textures[n]);
if (!mapper->tex[n])
return -1;
}
return 0;
}
static void mapper_unmap(struct ra_hwdec_mapper *mapper)
{
struct priv_owner *o = mapper->owner->priv;
struct priv *p = mapper->priv;
for (int n = 0; n < 2; n++)
ra_tex_free(mapper->ra, &mapper->tex[n]);
if (p->egl_stream)
o->StreamConsumerReleaseKHR(o->egl_display, p->egl_stream);
}
const struct ra_hwdec_driver ra_hwdec_d3d11egl = {
.name = "d3d11-egl",
.priv_size = sizeof(struct priv_owner),
video: rewrite filtering glue code Get rid of the old vf.c code. Replace it with a generic filtering framework, which can potentially handle more than just --vf. At least reimplementing --af with this code is planned. This changes some --vf semantics (including runtime behavior and the "vf" command). The most important ones are listed in interface-changes. vf_convert.c is renamed to f_swscale.c. It is now an internal filter that can not be inserted by the user manually. f_lavfi.c is a refactor of player/lavfi.c. The latter will be removed once --lavfi-complex is reimplemented on top of f_lavfi.c. (which is conceptually easy, but a big mess due to the data flow changes). The existing filters are all changed heavily. The data flow of the new filter framework is different. Especially EOF handling changes - EOF is now a "frame" rather than a state, and must be passed through exactly once. Another major thing is that all filters must support dynamic format changes. The filter reconfig() function goes away. (This sounds complex, but since all filters need to handle EOF draining anyway, they can use the same code, and it removes the mess with reconfig() having to predict the output format, which completely breaks with libavfilter anyway.) In addition, there is no automatic format negotiation or conversion. libavfilter's primitive and insufficient API simply doesn't allow us to do this in a reasonable way. Instead, filters can use f_autoconvert as sub-filter, and tell it which formats they support. This filter will in turn add actual conversion filters, such as f_swscale, to perform necessary format changes. vf_vapoursynth.c uses the same basic principle of operation as before, but with worryingly different details in data flow. Still appears to work. The hardware deint filters (vf_vavpp.c, vf_d3d11vpp.c, vf_vdpaupp.c) are heavily changed. Fortunately, they all used refqueue.c, which is for sharing the data flow logic (especially for managing future/past surfaces and such). It turns out it can be used to factor out most of the data flow. Some of these filters accepted software input. Instead of having ad-hoc upload code in each filter, surface upload is now delegated to f_autoconvert, which can use f_hwupload to perform this. Exporting VO capabilities is still a big mess (mp_stream_info stuff). The D3D11 code drops the redundant image formats, and all code uses the hw_subfmt (sw_format in FFmpeg) instead. Although that too seems to be a big mess for now. f_async_queue is unused.
2018-01-16 10:53:44 +00:00
.imgfmts = {IMGFMT_D3D11, 0},
.init = init,
.uninit = uninit,
.mapper = &(const struct ra_hwdec_mapper_driver){
.priv_size = sizeof(struct priv),
.init = mapper_init,
.uninit = mapper_uninit,
.map = mapper_map,
.unmap = mapper_unmap,
},
};