typedef struct VideoPicture {
double pts; // presentation timestamp for this picture
+ double duration; // estimated duration based on frame rate
int64_t pos; // byte position in file
SDL_Overlay *bmp;
int width, height; /* source height & width */
}
}
-static int queue_picture(VideoState *is, AVFrame *src_frame, double pts, int64_t pos, int serial)
+static int queue_picture(VideoState *is, AVFrame *src_frame, double pts, double duration, int64_t pos, int serial)
{
VideoPicture *vp;
SDL_UnlockYUVOverlay(vp->bmp);
vp->pts = pts;
+ vp->duration = duration;
vp->pos = pos;
vp->serial = serial;
VideoState *is = arg;
AVFrame *frame = av_frame_alloc();
double pts;
+ double duration;
int ret;
int serial = 0;
AVRational tb = is->video_st->time_base;
+ AVRational frame_rate = av_guess_frame_rate(is->ic, is->video_st, NULL);
#if CONFIG_AVFILTER
AVFilterGraph *graph = avfilter_graph_alloc();
last_h = frame->height;
last_format = frame->format;
last_serial = serial;
+ frame_rate = filt_out->inputs[0]->frame_rate;
}
ret = av_buffersrc_add_frame(filt_in, frame);
is->frame_last_filter_delay = 0;
tb = filt_out->inputs[0]->time_base;
#endif
+ duration = (frame_rate.num && frame_rate.den ? av_q2d((AVRational){frame_rate.den, frame_rate.num}) : 0);
pts = (frame->pts == AV_NOPTS_VALUE) ? NAN : frame->pts * av_q2d(tb);
- ret = queue_picture(is, frame, pts, av_frame_get_pkt_pos(frame), serial);
+ ret = queue_picture(is, frame, pts, duration, av_frame_get_pkt_pos(frame), serial);
av_frame_unref(frame);
#if CONFIG_AVFILTER
}