cuvid: Pass bit depth information to decoder

Although cuvid can only output 8bit, it can consume HEVC Main10 if
the bit depth is set properly. In cases where >8bit is not supported,
this change is still beneficial as the decoder will fail to be
created instead of plowing throw and decoding as 8bit.
This commit is contained in:
Philip Langdale 2016-09-03 12:52:52 -07:00
parent 843aff3cf7
commit 289a6bb8b1
1 changed files with 2 additions and 0 deletions

View File

@ -181,6 +181,7 @@ static int CUDAAPI cuvid_handle_video_sequence(void *opaque, CUVIDEOFORMAT* form
cuinfo.ulNumDecodeSurfaces = MAX_FRAME_COUNT;
cuinfo.ulNumOutputSurfaces = 1;
cuinfo.ulCreationFlags = cudaVideoCreate_PreferCUVID;
cuinfo.bitDepthMinus8 = format->bit_depth_luma_minus8;
if (format->progressive_sequence) {
ctx->deint_mode = cuinfo.DeinterlaceMode = cudaVideoDeinterlaceMode_Weave;
@ -573,6 +574,7 @@ static int cuvid_test_dummy_decoder(AVCodecContext *avctx, CUVIDPARSERPARAMS *cu
cuinfo.ulNumDecodeSurfaces = MAX_FRAME_COUNT;
cuinfo.ulNumOutputSurfaces = 1;
cuinfo.ulCreationFlags = cudaVideoCreate_PreferCUVID;
cuinfo.bitDepthMinus8 = 0;
cuinfo.DeinterlaceMode = cudaVideoDeinterlaceMode_Weave;