avconv_vdpau: use the hwcontext API to simplify code

This commit is contained in:
Anton Khirnov 2016-02-02 11:21:36 +01:00
parent a001ce31bc
commit bd49be885e

View File

@ -30,30 +30,32 @@
#include "libavutil/avassert.h"
#include "libavutil/buffer.h"
#include "libavutil/frame.h"
#include "libavutil/hwcontext.h"
#include "libavutil/hwcontext_vdpau.h"
#include "libavutil/pixfmt.h"
typedef struct VDPAUContext {
Display *dpy;
VdpDevice device;
VdpDecoder decoder;
VdpGetProcAddress *get_proc_address;
VdpGetErrorString *get_error_string;
VdpGetInformationString *get_information_string;
VdpDeviceDestroy *device_destroy;
VdpVideoSurfaceCreate *video_surface_create;
VdpVideoSurfaceDestroy *video_surface_destroy;
VdpVideoSurfaceGetBitsYCbCr *video_surface_get_bits;
VdpVideoSurfaceGetParameters *video_surface_get_parameters;
VdpVideoSurfaceQueryGetPutBitsYCbCrCapabilities *video_surface_query;
AVBufferRef *hw_frames_ctx;
AVFrame *tmp_frame;
enum AVPixelFormat pix_fmt;
VdpYCbCrFormat vdpau_format;
} VDPAUContext;
typedef struct VDPAUHWDevicePriv {
VdpDeviceDestroy *device_destroy;
Display *dpy;
} VDPAUHWDevicePriv;
static void device_free(AVHWDeviceContext *ctx)
{
AVVDPAUDeviceContext *hwctx = ctx->hwctx;
VDPAUHWDevicePriv *priv = ctx->user_opaque;
if (priv->device_destroy)
priv->device_destroy(hwctx->device);
if (priv->dpy)
XCloseDisplay(priv->dpy);
av_freep(&priv);
}
static void vdpau_uninit(AVCodecContext *s)
{
InputStream *ist = s->opaque;
@ -63,128 +65,43 @@ static void vdpau_uninit(AVCodecContext *s)
ist->hwaccel_get_buffer = NULL;
ist->hwaccel_retrieve_data = NULL;
if (ctx->device_destroy)
ctx->device_destroy(ctx->device);
if (ctx->dpy)
XCloseDisplay(ctx->dpy);
av_buffer_unref(&ctx->hw_frames_ctx);
av_frame_free(&ctx->tmp_frame);
av_freep(&ist->hwaccel_ctx);
av_freep(&s->hwaccel_context);
}
static void vdpau_release_buffer(void *opaque, uint8_t *data)
{
VdpVideoSurface surface = *(VdpVideoSurface*)data;
VDPAUContext *ctx = opaque;
ctx->video_surface_destroy(surface);
av_freep(&data);
}
static int vdpau_get_buffer(AVCodecContext *s, AVFrame *frame, int flags)
{
InputStream *ist = s->opaque;
VDPAUContext *ctx = ist->hwaccel_ctx;
VdpVideoSurface *surface;
VdpStatus err;
VdpChromaType chroma;
uint32_t width, height;
av_assert0(frame->format == AV_PIX_FMT_VDPAU);
if (av_vdpau_get_surface_parameters(s, &chroma, &width, &height))
return AVERROR(ENOSYS);
surface = av_malloc(sizeof(*surface));
if (!surface)
return AVERROR(ENOMEM);
frame->buf[0] = av_buffer_create((uint8_t*)surface, sizeof(*surface),
vdpau_release_buffer, ctx,
AV_BUFFER_FLAG_READONLY);
if (!frame->buf[0]) {
av_freep(&surface);
return AVERROR(ENOMEM);
}
// properly we should keep a pool of surfaces instead of creating
// them anew for each frame, but since we don't care about speed
// much in this code, we don't bother
err = ctx->video_surface_create(ctx->device, chroma, width, height,
surface);
if (err != VDP_STATUS_OK) {
av_log(NULL, AV_LOG_ERROR, "Error allocating a VDPAU video surface: %s\n",
ctx->get_error_string(err));
av_buffer_unref(&frame->buf[0]);
return AVERROR_UNKNOWN;
}
frame->data[3] = (uint8_t*)(uintptr_t)*surface;
return 0;
return av_hwframe_get_buffer(ctx->hw_frames_ctx, frame, 0);
}
static int vdpau_retrieve_data(AVCodecContext *s, AVFrame *frame)
{
VdpVideoSurface surface = (VdpVideoSurface)(uintptr_t)frame->data[3];
InputStream *ist = s->opaque;
VDPAUContext *ctx = ist->hwaccel_ctx;
VdpStatus err;
int ret, chroma_type;
int ret;
err = ctx->video_surface_get_parameters(surface, &chroma_type,
&ctx->tmp_frame->width,
&ctx->tmp_frame->height);
if (err != VDP_STATUS_OK) {
av_log(NULL, AV_LOG_ERROR, "Error getting surface parameters: %s\n",
ctx->get_error_string(err));
return AVERROR_UNKNOWN;
}
ctx->tmp_frame->format = ctx->pix_fmt;
ret = av_frame_get_buffer(ctx->tmp_frame, 32);
ret = av_hwframe_transfer_data(ctx->tmp_frame, frame, 0);
if (ret < 0)
return ret;
ctx->tmp_frame->width = frame->width;
ctx->tmp_frame->height = frame->height;
err = ctx->video_surface_get_bits(surface, ctx->vdpau_format,
(void * const *)ctx->tmp_frame->data,
ctx->tmp_frame->linesize);
if (err != VDP_STATUS_OK) {
av_log(NULL, AV_LOG_ERROR, "Error retrieving frame data from VDPAU: %s\n",
ctx->get_error_string(err));
ret = AVERROR_UNKNOWN;
goto fail;
}
if (ctx->vdpau_format == VDP_YCBCR_FORMAT_YV12)
FFSWAP(uint8_t*, ctx->tmp_frame->data[1], ctx->tmp_frame->data[2]);
ret = av_frame_copy_props(ctx->tmp_frame, frame);
if (ret < 0)
goto fail;
if (ret < 0) {
av_frame_unref(ctx->tmp_frame);
return ret;
}
av_frame_unref(frame);
av_frame_move_ref(frame, ctx->tmp_frame);
return 0;
fail:
av_frame_unref(ctx->tmp_frame);
return ret;
}
static const int vdpau_formats[][2] = {
{ VDP_YCBCR_FORMAT_YV12, AV_PIX_FMT_YUV420P },
{ VDP_YCBCR_FORMAT_NV12, AV_PIX_FMT_NV12 },
{ VDP_YCBCR_FORMAT_YUYV, AV_PIX_FMT_YUYV422 },
{ VDP_YCBCR_FORMAT_UYVY, AV_PIX_FMT_UYVY422 },
};
static int vdpau_alloc(AVCodecContext *s)
{
InputStream *ist = s->opaque;
@ -192,12 +109,26 @@ static int vdpau_alloc(AVCodecContext *s)
VDPAUContext *ctx;
const char *display, *vendor;
VdpStatus err;
int i;
int ret;
VdpDevice device;
VdpGetProcAddress *get_proc_address;
VdpGetInformationString *get_information_string;
VDPAUHWDevicePriv *device_priv = NULL;
AVBufferRef *device_ref = NULL;
AVHWDeviceContext *device_ctx;
AVVDPAUDeviceContext *device_hwctx;
AVHWFramesContext *frames_ctx;
ctx = av_mallocz(sizeof(*ctx));
if (!ctx)
return AVERROR(ENOMEM);
device_priv = av_mallocz(sizeof(*device_priv));
if (!device_priv)
goto fail;
ist->hwaccel_ctx = ctx;
ist->hwaccel_uninit = vdpau_uninit;
ist->hwaccel_get_buffer = vdpau_get_buffer;
@ -207,16 +138,16 @@ static int vdpau_alloc(AVCodecContext *s)
if (!ctx->tmp_frame)
goto fail;
ctx->dpy = XOpenDisplay(ist->hwaccel_device);
if (!ctx->dpy) {
device_priv->dpy = XOpenDisplay(ist->hwaccel_device);
if (!device_priv->dpy) {
av_log(NULL, loglevel, "Cannot open the X11 display %s.\n",
XDisplayName(ist->hwaccel_device));
goto fail;
}
display = XDisplayString(ctx->dpy);
display = XDisplayString(device_priv->dpy);
err = vdp_device_create_x11(ctx->dpy, XDefaultScreen(ctx->dpy), &ctx->device,
&ctx->get_proc_address);
err = vdp_device_create_x11(device_priv->dpy, XDefaultScreen(device_priv->dpy),
&device, &get_proc_address);
if (err != VDP_STATUS_OK) {
av_log(NULL, loglevel, "VDPAU device creation on X11 display %s failed.\n",
display);
@ -226,49 +157,52 @@ static int vdpau_alloc(AVCodecContext *s)
#define GET_CALLBACK(id, result) \
do { \
void *tmp; \
err = ctx->get_proc_address(ctx->device, id, &tmp); \
err = get_proc_address(device, id, &tmp); \
if (err != VDP_STATUS_OK) { \
av_log(NULL, loglevel, "Error getting the " #id " callback.\n"); \
goto fail; \
} \
ctx->result = tmp; \
result = tmp; \
} while (0)
GET_CALLBACK(VDP_FUNC_ID_GET_ERROR_STRING, get_error_string);
GET_CALLBACK(VDP_FUNC_ID_GET_INFORMATION_STRING, get_information_string);
GET_CALLBACK(VDP_FUNC_ID_DEVICE_DESTROY, device_destroy);
GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_CREATE, video_surface_create);
GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_DESTROY, video_surface_destroy);
GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_GET_BITS_Y_CB_CR, video_surface_get_bits);
GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_GET_PARAMETERS, video_surface_get_parameters);
GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_QUERY_GET_PUT_BITS_Y_CB_CR_CAPABILITIES,
video_surface_query);
GET_CALLBACK(VDP_FUNC_ID_GET_INFORMATION_STRING, get_information_string);
GET_CALLBACK(VDP_FUNC_ID_DEVICE_DESTROY, device_priv->device_destroy);
for (i = 0; i < FF_ARRAY_ELEMS(vdpau_formats); i++) {
VdpBool supported;
err = ctx->video_surface_query(ctx->device, VDP_CHROMA_TYPE_420,
vdpau_formats[i][0], &supported);
if (err != VDP_STATUS_OK) {
av_log(NULL, loglevel,
"Error querying VDPAU surface capabilities: %s\n",
ctx->get_error_string(err));
goto fail;
}
if (supported)
break;
}
if (i == FF_ARRAY_ELEMS(vdpau_formats)) {
av_log(NULL, loglevel,
"No supported VDPAU format for retrieving the data.\n");
return AVERROR(EINVAL);
}
ctx->vdpau_format = vdpau_formats[i][0];
ctx->pix_fmt = vdpau_formats[i][1];
device_ref = av_hwdevice_ctx_alloc(AV_HWDEVICE_TYPE_VDPAU);
if (!device_ref)
goto fail;
device_ctx = (AVHWDeviceContext*)device_ref->data;
device_hwctx = device_ctx->hwctx;
device_ctx->user_opaque = device_priv;
device_ctx->free = device_free;
device_hwctx->device = device;
device_hwctx->get_proc_address = get_proc_address;
if (av_vdpau_bind_context(s, ctx->device, ctx->get_proc_address, 0))
device_priv = NULL;
ret = av_hwdevice_ctx_init(device_ref);
if (ret < 0)
goto fail;
ctx->get_information_string(&vendor);
ctx->hw_frames_ctx = av_hwframe_ctx_alloc(device_ref);
if (!ctx->hw_frames_ctx)
goto fail;
av_buffer_unref(&device_ref);
frames_ctx = (AVHWFramesContext*)ctx->hw_frames_ctx->data;
frames_ctx->format = AV_PIX_FMT_VDPAU;
frames_ctx->sw_format = s->sw_pix_fmt;
frames_ctx->width = s->coded_width;
frames_ctx->height = s->coded_height;
ret = av_hwframe_ctx_init(ctx->hw_frames_ctx);
if (ret < 0)
goto fail;
if (av_vdpau_bind_context(s, device, get_proc_address, 0))
goto fail;
get_information_string(&vendor);
av_log(NULL, AV_LOG_VERBOSE, "Using VDPAU -- %s -- on X11 display %s, "
"to decode input stream #%d:%d.\n", vendor,
display, ist->file_index, ist->st->index);
@ -278,6 +212,14 @@ do {
fail:
av_log(NULL, loglevel, "VDPAU init failed for stream #%d:%d.\n",
ist->file_index, ist->st->index);
if (device_priv) {
if (device_priv->device_destroy)
device_priv->device_destroy(device);
if (device_priv->dpy)
XCloseDisplay(device_priv->dpy);
}
av_freep(&device_priv);
av_buffer_unref(&device_ref);
vdpau_uninit(s);
return AVERROR(EINVAL);
}