}
#define DURATION_MAX_READ_SIZE 250000LL
-#define DURATION_MAX_RETRY 4
+#define DURATION_MAX_RETRY 6
/* only usable for MPEG-PS streams */
static void estimate_timings_from_pts(AVFormatContext *ic, int64_t old_offset)
AVFrame *frame = av_frame_alloc();
AVSubtitle subtitle;
AVPacket pkt = *avpkt;
+ int do_skip_frame = 0;
+ enum AVDiscard skip_frame;
if (!frame)
return AVERROR(ENOMEM);
goto fail;
}
+ if (st->codec->codec->caps_internal & FF_CODEC_CAP_SKIP_FRAME_FILL_PARAM) {
+ do_skip_frame = 1;
+ skip_frame = st->codec->skip_frame;
+ st->codec->skip_frame = AVDISCARD_ALL;
+ }
+
while ((pkt.size > 0 || (!pkt.data && got_picture)) &&
ret >= 0 &&
(!has_codec_parameters(st, NULL) || !has_decode_delay_been_guessed(st) ||
ret = -1;
fail:
+ if (do_skip_frame) {
+ st->codec->skip_frame = skip_frame;
+ }
+
av_frame_free(&frame);
return ret;
}
st->codec->time_base = st->time_base;
}
// only for the split stuff
- if (!st->parser && !(ic->flags & AVFMT_FLAG_NOPARSE)) {
+ if (!st->parser && !(ic->flags & AVFMT_FLAG_NOPARSE) && st->request_probe <= 0) {
st->parser = av_parser_init(st->codec->codec_id);
if (st->parser) {
if (st->need_parsing == AVSTREAM_PARSE_HEADERS) {
}
if (probesize)
- estimate_timings(ic, old_offset);
+ estimate_timings(ic, old_offset);
av_opt_set(ic, "skip_clear", "0", AV_OPT_SEARCH_CHILDREN);
return AVERROR(ENOSYS);
}
-void ff_free_stream(AVFormatContext *s, AVStream *st) {
- int j;
- av_assert0(s->nb_streams>0);
- av_assert0(s->streams[ s->nb_streams - 1 ] == st);
+static void free_stream(AVStream **pst)
+{
+ AVStream *st = *pst;
+ int i;
- for (j = 0; j < st->nb_side_data; j++)
- av_freep(&st->side_data[j].data);
+ if (!st)
+ return;
+
+ for (i = 0; i < st->nb_side_data; i++)
+ av_freep(&st->side_data[i].data);
av_freep(&st->side_data);
- st->nb_side_data = 0;
- if (st->parser) {
+ if (st->parser)
av_parser_close(st->parser);
- }
+
if (st->attached_pic.data)
av_packet_unref(&st->attached_pic);
+
+ av_freep(&st->internal);
+
av_dict_free(&st->metadata);
av_freep(&st->probe_data.buf);
av_freep(&st->index_entries);
av_freep(&st->info);
av_freep(&st->recommended_encoder_configuration);
av_freep(&st->priv_pts);
- av_freep(&s->streams[ --s->nb_streams ]);
+
+ av_freep(pst);
+}
+
+void ff_free_stream(AVFormatContext *s, AVStream *st)
+{
+ av_assert0(s->nb_streams>0);
+ av_assert0(s->streams[ s->nb_streams - 1 ] == st);
+
+ free_stream(&s->streams[ --s->nb_streams ]);
}
void avformat_free_context(AVFormatContext *s)
if (s->oformat && s->oformat->priv_class && s->priv_data)
av_opt_free(s->priv_data);
- for (i = s->nb_streams - 1; i >= 0; i--) {
+ for (i = s->nb_streams - 1; i >= 0; i--)
ff_free_stream(s, s->streams[i]);
- }
+
+
for (i = s->nb_programs - 1; i >= 0; i--) {
av_dict_free(&s->programs[i]->metadata);
av_freep(&s->programs[i]->stream_index);
av_free(st);
return NULL;
}
+
+ st->internal = av_mallocz(sizeof(*st->internal));
+ if (!st->internal)
+ goto fail;
+
if (s->iformat) {
/* no default bitrate if decoding */
st->codec->bit_rate = 0;
/* default pts setting is MPEG-like */
avpriv_set_pts_info(st, 33, 1, 90000);
+ /* we set the current DTS to 0 so that formats without any timestamps
+ * but durations get some timestamps, formats with some unknown
+ * timestamps have their first few packets buffered and the
+ * timestamps corrected before they are returned to the user */
+ st->cur_dts = RELATIVE_TS_BASE;
+ } else {
+ st->cur_dts = AV_NOPTS_VALUE;
}
st->index = s->nb_streams;
st->start_time = AV_NOPTS_VALUE;
st->duration = AV_NOPTS_VALUE;
- /* we set the current DTS to 0 so that formats without any timestamps
- * but durations get some timestamps, formats with some unknown
- * timestamps have their first few packets buffered and the
- * timestamps corrected before they are returned to the user */
- st->cur_dts = s->iformat ? RELATIVE_TS_BASE : 0;
st->first_dts = AV_NOPTS_VALUE;
st->probe_packets = MAX_PROBE_PACKETS;
st->pts_wrap_reference = AV_NOPTS_VALUE;
s->streams[s->nb_streams++] = st;
return st;
+fail:
+ free_stream(&st);
+ return NULL;
}
AVProgram *av_new_program(AVFormatContext *ac, int id)
return NULL;
}
-uint8_t *ff_stream_new_side_data(AVStream *st, enum AVPacketSideDataType type,
+uint8_t *av_stream_new_side_data(AVStream *st, enum AVPacketSideDataType type,
int size)
{
AVPacketSideData *sd, *tmp;