for (j = 0; j < ost->nb_bitstream_filters; j++)
av_bsf_free(&ost->bsf_ctx[j]);
av_freep(&ost->bsf_ctx);
- av_freep(&ost->bsf_extradata_updated);
av_frame_free(&ost->filtered_frame);
av_frame_free(&ost->last_frame);
}
}
-static void output_packet(OutputFile *of, AVPacket *pkt, OutputStream *ost)
+/*
+ * Send a single packet to the output, applying any bitstream filters
+ * associated with the output stream. This may result in any number
+ * of packets actually being written, depending on what bitstream
+ * filters are applied. The supplied packet is consumed and will be
+ * blank (as if newly-allocated) when this function returns.
+ *
+ * If eof is set, instead indicate EOF to all bitstream filters and
+ * therefore flush any delayed packets to the output. A blank packet
+ * must be supplied in this case.
+ */
+static void output_packet(OutputFile *of, AVPacket *pkt,
+ OutputStream *ost, int eof)
{
int ret = 0;
if (ost->nb_bitstream_filters) {
int idx;
- ret = av_bsf_send_packet(ost->bsf_ctx[0], pkt);
+ ret = av_bsf_send_packet(ost->bsf_ctx[0], eof ? NULL : pkt);
if (ret < 0)
goto finish;
+ eof = 0;
idx = 1;
while (idx) {
/* get a packet from the previous filter up the chain */
ret = 0;
idx--;
continue;
+ } else if (ret == AVERROR_EOF) {
+ eof = 1;
} else if (ret < 0)
goto finish;
- /* HACK! - aac_adtstoasc updates extradata after filtering the first frame when
- * the api states this shouldn't happen after init(). Propagate it here to the
- * muxer and to the next filters in the chain to workaround this.
- * TODO/FIXME - Make aac_adtstoasc use new packet side data instead of changing
- * par_out->extradata and adapt muxers accordingly to get rid of this. */
- if (!(ost->bsf_extradata_updated[idx - 1] & 1)) {
- ret = avcodec_parameters_copy(ost->st->codecpar, ost->bsf_ctx[idx - 1]->par_out);
- if (ret < 0)
- goto finish;
- ost->bsf_extradata_updated[idx - 1] |= 1;
- }
/* send it to the next filter down the chain or to the muxer */
if (idx < ost->nb_bitstream_filters) {
- /* HACK/FIXME! - See above */
- if (!(ost->bsf_extradata_updated[idx] & 2)) {
- ret = avcodec_parameters_copy(ost->bsf_ctx[idx]->par_out, ost->bsf_ctx[idx - 1]->par_out);
- if (ret < 0)
- goto finish;
- ost->bsf_extradata_updated[idx] |= 2;
- }
- ret = av_bsf_send_packet(ost->bsf_ctx[idx], pkt);
+ ret = av_bsf_send_packet(ost->bsf_ctx[idx], eof ? NULL : pkt);
if (ret < 0)
goto finish;
idx++;
- } else
+ eof = 0;
+ } else if (eof)
+ goto finish;
+ else
write_packet(of, pkt, ost, 0);
}
- } else
+ } else if (!eof)
write_packet(of, pkt, ost, 0);
finish:
av_ts2str(pkt.dts), av_ts2timestr(pkt.dts, &enc->time_base));
}
- output_packet(of, &pkt, ost);
+ output_packet(of, &pkt, ost, 0);
}
return;
pkt.pts += av_rescale_q(sub->end_display_time, (AVRational){ 1, 1000 }, ost->mux_timebase);
}
pkt.dts = pkt.pts;
- output_packet(of, &pkt, ost);
+ output_packet(of, &pkt, ost, 0);
}
}
pkt.pts = av_rescale_q(in_picture->pts, enc->time_base, ost->mux_timebase);
pkt.flags |= AV_PKT_FLAG_KEY;
- output_packet(of, &pkt, ost);
+ output_packet(of, &pkt, ost, 0);
} else
#endif
{
}
frame_size = pkt.size;
- output_packet(of, &pkt, ost);
+ output_packet(of, &pkt, ost, 0);
/* if two pass, output log */
if (ost->logfile && enc->stats_out) {
fprintf(ost->logfile, "%s", enc->stats_out);
}
if (ret == AVERROR_EOF) {
+ output_packet(of, &pkt, ost, 1);
break;
}
if (ost->finished & MUXER_FINISHED) {
}
av_packet_rescale_ts(&pkt, enc->time_base, ost->mux_timebase);
pkt_size = pkt.size;
- output_packet(of, &pkt, ost);
+ output_packet(of, &pkt, ost, 0);
if (ost->enc_ctx->codec_type == AVMEDIA_TYPE_VIDEO && vstats_filename) {
do_video_stats(ost, pkt_size);
}
}
#endif
- output_packet(of, &opkt, ost);
+ output_packet(of, &opkt, ost, 0);
}
int guess_input_channel_layout(InputStream *ist)
if (!av_dict_get(ist->decoder_opts, "threads", NULL, 0))
av_dict_set(&ist->decoder_opts, "threads", "auto", 0);
+
+ ret = hw_device_setup_for_decode(ist);
+ if (ret < 0) {
+ snprintf(error, error_len, "Device setup failed for "
+ "decoder on input stream #%d:%d : %s",
+ ist->file_index, ist->st->index, av_err2str(ret));
+ return ret;
+ }
+
if ((ret = avcodec_open2(ist->dec_ctx, codec, &ist->decoder_opts)) < 0) {
if (ret == AVERROR_EXPERIMENTAL)
abort_codec_experimental(codec, 0);
ost->forced_kf_pts = pts;
}
+static void init_encoder_time_base(OutputStream *ost, AVRational default_time_base)
+{
+ InputStream *ist = get_input_stream(ost);
+ AVCodecContext *enc_ctx = ost->enc_ctx;
+ AVFormatContext *oc;
+
+ if (ost->enc_timebase.num > 0) {
+ enc_ctx->time_base = ost->enc_timebase;
+ return;
+ }
+
+ if (ost->enc_timebase.num < 0) {
+ if (ist) {
+ enc_ctx->time_base = ist->st->time_base;
+ return;
+ }
+
+ oc = output_files[ost->file_index]->ctx;
+ av_log(oc, AV_LOG_WARNING, "Input stream data not available, using default time base\n");
+ }
+
+ enc_ctx->time_base = default_time_base;
+}
+
static int init_output_stream_encode(OutputStream *ost)
{
InputStream *ist = get_input_stream(ost);
enc_ctx->sample_rate = av_buffersink_get_sample_rate(ost->filter->filter);
enc_ctx->channel_layout = av_buffersink_get_channel_layout(ost->filter->filter);
enc_ctx->channels = av_buffersink_get_channels(ost->filter->filter);
- enc_ctx->time_base = (AVRational){ 1, enc_ctx->sample_rate };
+
+ init_encoder_time_base(ost, av_make_q(1, enc_ctx->sample_rate));
break;
+
case AVMEDIA_TYPE_VIDEO:
- enc_ctx->time_base = av_inv_q(ost->frame_rate);
+ init_encoder_time_base(ost, av_inv_q(ost->frame_rate));
+
if (!(enc_ctx->time_base.num && enc_ctx->time_base.den))
enc_ctx->time_base = av_buffersink_get_time_base(ost->filter->filter);
if ( av_q2d(enc_ctx->time_base) < 0.001 && video_sync_method != VSYNC_PASSTHROUGH
ost->frame_aspect_ratio.num ? // overridden by the -aspect cli option
av_mul_q(ost->frame_aspect_ratio, (AVRational){ enc_ctx->height, enc_ctx->width }) :
av_buffersink_get_sample_aspect_ratio(ost->filter->filter);
- if (!strncmp(ost->enc->name, "libx264", 7) &&
- enc_ctx->pix_fmt == AV_PIX_FMT_NONE &&
- av_buffersink_get_format(ost->filter->filter) != AV_PIX_FMT_YUV420P)
- av_log(NULL, AV_LOG_WARNING,
- "No pixel format specified, %s for H.264 encoding chosen.\n"
- "Use -pix_fmt yuv420p for compatibility with outdated media players.\n",
- av_get_pix_fmt_name(av_buffersink_get_format(ost->filter->filter)));
- if (!strncmp(ost->enc->name, "mpeg2video", 10) &&
- enc_ctx->pix_fmt == AV_PIX_FMT_NONE &&
- av_buffersink_get_format(ost->filter->filter) != AV_PIX_FMT_YUV420P)
- av_log(NULL, AV_LOG_WARNING,
- "No pixel format specified, %s for MPEG-2 encoding chosen.\n"
- "Use -pix_fmt yuv420p for compatibility with outdated media players.\n",
- av_get_pix_fmt_name(av_buffersink_get_format(ost->filter->filter)));
+
enc_ctx->pix_fmt = av_buffersink_get_format(ost->filter->filter);
if (dec_ctx)
enc_ctx->bits_per_raw_sample = FFMIN(dec_ctx->bits_per_raw_sample,
ost->enc_ctx->hw_frames_ctx = av_buffer_ref(av_buffersink_get_hw_frames_ctx(ost->filter->filter));
if (!ost->enc_ctx->hw_frames_ctx)
return AVERROR(ENOMEM);
+ } else {
+ ret = hw_device_setup_for_encode(ost);
+ if (ret < 0) {
+ snprintf(error, error_len, "Device setup failed for "
+ "encoder on output stream #%d:%d : %s",
+ ost->file_index, ost->index, av_err2str(ret));
+ return ret;
+ }
}
if ((ret = avcodec_open2(ost->enc_ctx, codec, &ost->encoder_opts)) < 0) {
}
av_buffer_unref(&hw_device_ctx);
+ hw_device_free_all();
/* finished ! */
ret = 0;