revert 12156

Log:
Make timestamp interpolation work with mpeg2 field pictures.
Cleaner/simpler solutions are welcome.
----
A IMHO cleaner solution has been implemented.

Originally committed as revision 12162 to svn://svn.ffmpeg.org/ffmpeg/trunk
This commit is contained in:
Michael Niedermayer 2008-02-20 20:24:38 +00:00
parent bdd745da60
commit 635fbcb17f
4 changed files with 11 additions and 25 deletions

View File

@ -2788,7 +2788,6 @@ typedef struct AVCodecParserContext {
/* video info */
int pict_type; /* XXX: Put it back in AVCodecContext. */
int repeat_pict; /* XXX: Put it back in AVCodecContext. */
int parity;
int64_t pts; /* pts of the current frame */
int64_t dts; /* dts of the current frame */

View File

@ -36,7 +36,6 @@ static void mpegvideo_extract_headers(AVCodecParserContext *s,
int horiz_size_ext, vert_size_ext, bit_rate_ext;
//FIXME replace the crap with get_bits()
s->repeat_pict = 0;
s->parity = 0;
buf_end = buf + buf_size;
while (buf < buf_end) {
start_code= -1;

View File

@ -337,8 +337,8 @@ typedef struct AVStream {
struct AVCodecParserContext *parser;
int64_t cur_dts;
int last_IP_duration[2];
int64_t last_IP_pts[2];
int last_IP_duration;
int64_t last_IP_pts;
/* av_seek_frame() support */
AVIndexEntry *index_entries; /**< only used if the format does not
support seeking natively */

View File

@ -703,30 +703,21 @@ static void compute_pkt_fields(AVFormatContext *s, AVStream *st,
/* interpolate PTS and DTS if they are not present */
if(delay <=1){
if (presentation_delayed) {
int fields= 2 + (pc ? pc->repeat_pict : 0);
int field_duration= pkt->duration / fields;
int parity= pc ? pc->parity : 0;
/* DTS = decompression timestamp */
/* PTS = presentation timestamp */
if (pkt->dts == AV_NOPTS_VALUE)
pkt->dts = st->last_IP_pts[parity];
pkt->dts = st->last_IP_pts;
update_initial_timestamps(s, pkt->stream_index, pkt->dts, pkt->pts);
if (pkt->dts == AV_NOPTS_VALUE)
pkt->dts = st->cur_dts;
/* this is tricky: the dts must be incremented by the duration
of the frame we are displaying, i.e. the last I- or P-frame */
st->cur_dts = pkt->dts;
for(i=0; i<fields; i++){
int p= (parity + i)&1;
if(!st->last_IP_duration[p])
st->last_IP_duration[p]= field_duration;
st->cur_dts += st->last_IP_duration[p];
st->last_IP_pts[p]= pkt->pts;
if(pkt->pts != AV_NOPTS_VALUE)
st->last_IP_pts[p] += i*field_duration;
st->last_IP_duration[p]= field_duration;
}
if (st->last_IP_duration == 0)
st->last_IP_duration = pkt->duration;
st->cur_dts = pkt->dts + st->last_IP_duration;
st->last_IP_duration = pkt->duration;
st->last_IP_pts= pkt->pts;
/* cannot compute PTS if not present (we can compute it only
by knowing the future */
} else if(pkt->pts != AV_NOPTS_VALUE || pkt->dts != AV_NOPTS_VALUE || pkt->duration){
@ -1024,8 +1015,7 @@ static void av_read_frame_flush(AVFormatContext *s)
av_parser_close(st->parser);
st->parser = NULL;
}
st->last_IP_pts[0] =
st->last_IP_pts[1] = AV_NOPTS_VALUE;
st->last_IP_pts = AV_NOPTS_VALUE;
st->cur_dts = AV_NOPTS_VALUE; /* we set the current DTS to an unspecified origin */
}
}
@ -1633,8 +1623,7 @@ static void av_estimate_timings_from_pts(AVFormatContext *ic, offset_t old_offse
for(i=0; i<ic->nb_streams; i++){
st= ic->streams[i];
st->cur_dts= st->first_dts;
st->last_IP_pts[0] =
st->last_IP_pts[1] = AV_NOPTS_VALUE;
st->last_IP_pts = AV_NOPTS_VALUE;
}
}
@ -2193,8 +2182,7 @@ AVStream *av_new_stream(AVFormatContext *s, int id)
/* default pts setting is MPEG-like */
av_set_pts_info(st, 33, 1, 90000);
st->last_IP_pts[0] =
st->last_IP_pts[1] = AV_NOPTS_VALUE;
st->last_IP_pts = AV_NOPTS_VALUE;
for(i=0; i<MAX_REORDER_DELAY+1; i++)
st->pts_buffer[i]= AV_NOPTS_VALUE;