ffmpeg: explicitly handle sub2video subpicture initialization

Each time the sub2video structure is initialized, the sub2video
subpicture is initialized together with the first received heartbeat.
The heartbeat's PTS is utilized as the subpicture start time.

Additionally, add some documentation on the stages.
This commit is contained in:
Jan Ekström 2019-02-23 01:48:08 +02:00
parent 39fbd06314
commit 7ba3ea212f
3 changed files with 24 additions and 9 deletions

View File

@ -237,7 +237,7 @@ static void sub2video_push_ref(InputStream *ist, int64_t pts)
}
}
void sub2video_update(InputStream *ist, AVSubtitle *sub)
void sub2video_update(InputStream *ist, int64_t heartbeat_pts, AVSubtitle *sub)
{
AVFrame *frame = ist->sub2video.frame;
int8_t *dst;
@ -254,7 +254,12 @@ void sub2video_update(InputStream *ist, AVSubtitle *sub)
AV_TIME_BASE_Q, ist->st->time_base);
num_rects = sub->num_rects;
} else {
pts = ist->sub2video.end_pts;
/* If we are initializing the system, utilize current heartbeat
PTS as the start time, and show until the following subpicture
is received. Otherwise, utilize the previous subpicture's end time
as the fall-back value. */
pts = ist->sub2video.initialize ?
heartbeat_pts : ist->sub2video.end_pts;
end_pts = INT64_MAX;
num_rects = 0;
}
@ -269,6 +274,7 @@ void sub2video_update(InputStream *ist, AVSubtitle *sub)
sub2video_copy_rect(dst, dst_linesize, frame->width, frame->height, sub->rects[i]);
sub2video_push_ref(ist, pts);
ist->sub2video.end_pts = end_pts;
ist->sub2video.initialize = 0;
}
static void sub2video_heartbeat(InputStream *ist, int64_t pts)
@ -291,9 +297,11 @@ static void sub2video_heartbeat(InputStream *ist, int64_t pts)
/* do not send the heartbeat frame if the subtitle is already ahead */
if (pts2 <= ist2->sub2video.last_pts)
continue;
if (pts2 >= ist2->sub2video.end_pts ||
(!ist2->sub2video.frame->data[0] && ist2->sub2video.end_pts < INT64_MAX))
sub2video_update(ist2, NULL);
if (pts2 >= ist2->sub2video.end_pts || ist2->sub2video.initialize)
/* if we have hit the end of the current displayed subpicture,
or if we need to initialize the system, update the
overlayed subpicture and its start/end times */
sub2video_update(ist2, pts2 + 1, NULL);
for (j = 0, nb_reqs = 0; j < ist2->nb_filters; j++)
nb_reqs += av_buffersrc_get_nb_failed_requests(ist2->filters[j]->filter);
if (nb_reqs)
@ -307,7 +315,7 @@ static void sub2video_flush(InputStream *ist)
int ret;
if (ist->sub2video.end_pts < INT64_MAX)
sub2video_update(ist, NULL);
sub2video_update(ist, INT64_MAX, NULL);
for (i = 0; i < ist->nb_filters; i++) {
ret = av_buffersrc_add_frame(ist->filters[i]->filter, NULL);
if (ret != AVERROR_EOF && ret < 0)
@ -2507,7 +2515,7 @@ static int transcode_subtitles(InputStream *ist, AVPacket *pkt, int *got_output,
return ret;
if (ist->sub2video.frame) {
sub2video_update(ist, &subtitle);
sub2video_update(ist, INT64_MIN, &subtitle);
} else if (ist->nb_filters) {
if (!ist->sub2video.sub_queue)
ist->sub2video.sub_queue = av_fifo_alloc(8 * sizeof(AVSubtitle));

View File

@ -348,6 +348,7 @@ typedef struct InputStream {
AVFifoBuffer *sub_queue; ///< queue of AVSubtitle* before filter init
AVFrame *frame;
int w, h;
unsigned int initialize; ///< marks if sub2video_update should force an initialization
} sub2video;
int dr1;
@ -645,7 +646,7 @@ int filtergraph_is_simple(FilterGraph *fg);
int init_simple_filtergraph(InputStream *ist, OutputStream *ost);
int init_complex_filtergraph(FilterGraph *fg);
void sub2video_update(InputStream *ist, AVSubtitle *sub);
void sub2video_update(InputStream *ist, int64_t heartbeat_pts, AVSubtitle *sub);
int ifilter_parameters_from_frame(InputFilter *ifilter, const AVFrame *frame);

View File

@ -740,6 +740,12 @@ static int sub2video_prepare(InputStream *ist, InputFilter *ifilter)
return AVERROR(ENOMEM);
ist->sub2video.last_pts = INT64_MIN;
ist->sub2video.end_pts = INT64_MIN;
/* sub2video structure has been (re-)initialized.
Mark it as such so that the system will be
initialized with the first received heartbeat. */
ist->sub2video.initialize = 1;
return 0;
}
@ -1168,7 +1174,7 @@ int configure_filtergraph(FilterGraph *fg)
while (av_fifo_size(ist->sub2video.sub_queue)) {
AVSubtitle tmp;
av_fifo_generic_read(ist->sub2video.sub_queue, &tmp, sizeof(tmp), NULL);
sub2video_update(ist, &tmp);
sub2video_update(ist, INT64_MIN, &tmp);
avsubtitle_free(&tmp);
}
}