lavc/options_table: set min to -1 for timecode_frame_start

timecode_frame_start is set to -1 in avcodec_get_context_defaults3()
AVOptions API complains about it.

Signed-off-by: Lukasz Marek <lukasz.m.luki2@gmail.com>
This commit is contained in:
Lukasz Marek 2014-11-11 08:15:17 +01:00
parent 08ee02deca
commit c727006616
1 changed files with 1 additions and 1 deletions

View File

@ -398,7 +398,7 @@ static const AVOption avcodec_options[] = {
{"compression_level", NULL, OFFSET(compression_level), AV_OPT_TYPE_INT, {.i64 = FF_COMPRESSION_DEFAULT }, INT_MIN, INT_MAX, V|A|E},
{"min_prediction_order", NULL, OFFSET(min_prediction_order), AV_OPT_TYPE_INT, {.i64 = -1 }, INT_MIN, INT_MAX, A|E},
{"max_prediction_order", NULL, OFFSET(max_prediction_order), AV_OPT_TYPE_INT, {.i64 = -1 }, INT_MIN, INT_MAX, A|E},
{"timecode_frame_start", "GOP timecode frame start number, in non-drop-frame format", OFFSET(timecode_frame_start), AV_OPT_TYPE_INT64, {.i64 = 0 }, 0, INT64_MAX, V|E},
{"timecode_frame_start", "GOP timecode frame start number, in non-drop-frame format", OFFSET(timecode_frame_start), AV_OPT_TYPE_INT64, {.i64 = 0 }, -1, INT64_MAX, V|E},
#if FF_API_REQUEST_CHANNELS
{"request_channels", "set desired number of audio channels", OFFSET(request_channels), AV_OPT_TYPE_INT, {.i64 = DEFAULT }, 0, INT_MAX, A|D},
#endif