libavcodec/qsvdec: Add more pixel format support to qsvdec

Qsv decoder only supports directly output nv12 and p010 to system
memory. For other format, we need to download frame from qsv format
to system memory. Now add other supported format to qsvdec.

Signed-off-by: Wenbin Chen <wenbin.chen@intel.com>
This commit is contained in:
Wenbin Chen 2022-04-06 16:48:03 +08:00 committed by Haihao Xiang
parent 0a0847dbac
commit e0ae810da3
3 changed files with 56 additions and 6 deletions

View File

@ -244,6 +244,42 @@ int ff_qsv_map_pixfmt(enum AVPixelFormat format, uint32_t *fourcc)
}
}
int ff_qsv_map_frame_to_surface(const AVFrame *frame, mfxFrameSurface1 *surface)
{
switch (frame->format) {
case AV_PIX_FMT_NV12:
case AV_PIX_FMT_P010:
surface->Data.Y = frame->data[0];
surface->Data.UV = frame->data[1];
/* The SDK checks Data.V when using system memory for VP9 encoding */
surface->Data.V = surface->Data.UV + 1;
break;
case AV_PIX_FMT_X2RGB10LE:
case AV_PIX_FMT_BGRA:
surface->Data.B = frame->data[0];
surface->Data.G = frame->data[0] + 1;
surface->Data.R = frame->data[0] + 2;
surface->Data.A = frame->data[0] + 3;
break;
case AV_PIX_FMT_YUYV422:
surface->Data.Y = frame->data[0];
surface->Data.U = frame->data[0] + 1;
surface->Data.V = frame->data[0] + 3;
break;
case AV_PIX_FMT_Y210:
surface->Data.Y16 = (mfxU16 *)frame->data[0];
surface->Data.U16 = (mfxU16 *)frame->data[0] + 1;
surface->Data.V16 = (mfxU16 *)frame->data[0] + 3;
break;
default:
return AVERROR(ENOSYS);
}
surface->Data.PitchLow = frame->linesize[0];
return 0;
}
int ff_qsv_find_surface_idx(QSVFramesContext *ctx, QSVFrame *frame)
{
int i;

View File

@ -147,4 +147,7 @@ int ff_qsv_find_surface_idx(QSVFramesContext *ctx, QSVFrame *frame);
void ff_qsv_frame_add_ext_param(AVCodecContext *avctx, QSVFrame *frame,
mfxExtBuffer *param);
int ff_qsv_map_frame_to_surface(const AVFrame *frame, mfxFrameSurface1 *surface);
#endif /* AVCODEC_QSV_INTERNAL_H */

View File

@ -131,21 +131,28 @@ static int qsv_get_continuous_buffer(AVCodecContext *avctx, AVFrame *frame,
frame->linesize[0] = FFALIGN(avctx->width, 128);
break;
case AV_PIX_FMT_P010:
case AV_PIX_FMT_YUYV422:
frame->linesize[0] = 2 * FFALIGN(avctx->width, 128);
break;
case AV_PIX_FMT_Y210:
frame->linesize[0] = 4 * FFALIGN(avctx->width, 128);
break;
default:
av_log(avctx, AV_LOG_ERROR, "Unsupported pixel format.\n");
return AVERROR(EINVAL);
}
frame->linesize[1] = frame->linesize[0];
frame->buf[0] = av_buffer_pool_get(pool);
if (!frame->buf[0])
return AVERROR(ENOMEM);
frame->data[0] = frame->buf[0]->data;
frame->data[1] = frame->data[0] +
frame->linesize[0] * FFALIGN(avctx->height, 64);
if (avctx->pix_fmt == AV_PIX_FMT_NV12 ||
avctx->pix_fmt == AV_PIX_FMT_P010) {
frame->linesize[1] = frame->linesize[0];
frame->data[1] = frame->data[0] +
frame->linesize[0] * FFALIGN(avctx->height, 64);
}
ret = ff_attach_decode_data(frame);
if (ret < 0)
@ -425,9 +432,11 @@ static int alloc_frame(AVCodecContext *avctx, QSVContext *q, QSVFrame *frame)
if (frame->frame->format == AV_PIX_FMT_QSV) {
frame->surface = *(mfxFrameSurface1*)frame->frame->data[3];
} else {
frame->surface.Data.PitchLow = frame->frame->linesize[0];
frame->surface.Data.Y = frame->frame->data[0];
frame->surface.Data.UV = frame->frame->data[1];
ret = ff_qsv_map_frame_to_surface(frame->frame, &frame->surface);
if (ret < 0) {
av_log(avctx, AV_LOG_ERROR, "map frame to surface failed.\n");
return ret;
}
}
frame->surface.Info = q->frame_info;
@ -1010,6 +1019,8 @@ const FFCodec ff_##x##_qsv_decoder = { \
.p.priv_class = &x##_qsv_class, \
.p.pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_NV12, \
AV_PIX_FMT_P010, \
AV_PIX_FMT_YUYV422, \
AV_PIX_FMT_Y210, \
AV_PIX_FMT_QSV, \
AV_PIX_FMT_NONE }, \
.hw_configs = qsv_hw_configs, \