/*
-* Copyright (c) 2011 Sveriges Television AB <info@casparcg.com>
+* Copyright 2013 Sveriges Television AB http://casparcg.com/
*
* This file is part of CasparCG (www.casparcg.com).
*
* Author: Robert Nagy, ronag89@gmail.com
*/
-#include "../../stdafx.h"
+#include "../../StdAfx.h"
#include "filter.h"
-#include "parallel_yadif.h"
-
#include "../../ffmpeg_error.h"
+#include "../../ffmpeg.h"
+#include "../util/util.h"
+#include <common/assert.h>
#include <common/except.h>
-#include <boost/assign.hpp>
-#include <boost/range/iterator_range.hpp>
-#include <boost/range/adaptors.hpp>
-#include <boost/assign.hpp>
#include <boost/algorithm/string.hpp>
-#include <boost/foreach.hpp>
+#include <boost/thread.hpp>
+#include <boost/format.hpp>
+#include <boost/rational.hpp>
#include <cstdio>
#include <sstream>
+#include <string>
+#include <queue>
#if defined(_MSC_VER)
#pragma warning (push)
#pragma warning (disable : 4244)
#endif
-extern "C"
+extern "C"
{
#include <libavutil/avutil.h>
#include <libavutil/imgutils.h>
+ #include <libavutil/opt.h>
#include <libavfilter/avfilter.h>
#include <libavfilter/avcodec.h>
- #include <libavfilter/avfiltergraph.h>
#include <libavfilter/buffersink.h>
- #include <libavfilter/vsrc_buffer.h>
+ #include <libavfilter/buffersrc.h>
}
#if defined(_MSC_VER)
#pragma warning (pop)
#endif
-
namespace caspar { namespace ffmpeg {
-
-static int query_formats_444(AVFilterContext *ctx)
-{
- static const int pix_fmts[] = {PIX_FMT_YUV444P, PIX_FMT_NONE};
- avfilter_set_common_pixel_formats(ctx, avfilter_make_format_list(pix_fmts));
- return 0;
-}
-
-static int query_formats_422(AVFilterContext *ctx)
-{
- static const int pix_fmts[] = {PIX_FMT_YUV422P, PIX_FMT_NONE};
- avfilter_set_common_pixel_formats(ctx, avfilter_make_format_list(pix_fmts));
- return 0;
-}
-
-static int query_formats_420(AVFilterContext *ctx)
+struct filter::implementation
{
- static const int pix_fmts[] = {PIX_FMT_YUV420P, PIX_FMT_NONE};
- avfilter_set_common_pixel_formats(ctx, avfilter_make_format_list(pix_fmts));
- return 0;
-}
+ std::string filtergraph_;
-static int query_formats_420a(AVFilterContext *ctx)
-{
- static const int pix_fmts[] = {PIX_FMT_YUVA420P, PIX_FMT_NONE};
- avfilter_set_common_pixel_formats(ctx, avfilter_make_format_list(pix_fmts));
- return 0;
-}
+ std::shared_ptr<AVFilterGraph> video_graph_;
+ AVFilterContext* video_graph_in_;
+ AVFilterContext* video_graph_out_;
-static int query_formats_411(AVFilterContext *ctx)
-{
- static const int pix_fmts[] = {PIX_FMT_YUV411P, PIX_FMT_NONE};
- avfilter_set_common_pixel_formats(ctx, avfilter_make_format_list(pix_fmts));
- return 0;
-}
+ std::queue<std::shared_ptr<AVFrame>> fast_path_;
-static int query_formats_410(AVFilterContext *ctx)
-{
- static const int pix_fmts[] = {PIX_FMT_YUV410P, PIX_FMT_NONE};
- avfilter_set_common_pixel_formats(ctx, avfilter_make_format_list(pix_fmts));
- return 0;
-}
-
-struct filter::impl
-{
- std::wstring filters_;
- std::shared_ptr<AVFilterGraph> graph_;
- AVFilterContext* buffersink_ctx_;
- AVFilterContext* buffersrc_ctx_;
- std::shared_ptr<void> parallel_yadif_ctx_;
- std::vector<PixelFormat> pix_fmts_;
- std::queue<spl::shared_ptr<AVFrame>> bypass_;
-
- impl(const std::wstring& filters, const std::vector<PixelFormat>& pix_fmts)
- : filters_(filters)
- , parallel_yadif_ctx_(nullptr)
- , pix_fmts_(pix_fmts)
+ implementation(
+ int in_width,
+ int in_height,
+ boost::rational<int> in_time_base,
+ boost::rational<int> in_frame_rate,
+ boost::rational<int> in_sample_aspect_ratio,
+ AVPixelFormat in_pix_fmt,
+ std::vector<AVPixelFormat> out_pix_fmts,
+ const std::string& filtergraph,
+ bool multithreaded)
+ : filtergraph_(boost::to_lower_copy(filtergraph))
{
- if(pix_fmts_.empty())
+ if(out_pix_fmts.empty())
{
- pix_fmts_ = boost::assign::list_of
- (PIX_FMT_YUVA420P)
- (PIX_FMT_YUV444P)
- (PIX_FMT_YUV422P)
- (PIX_FMT_YUV420P)
- (PIX_FMT_YUV411P)
- (PIX_FMT_BGRA)
- (PIX_FMT_ARGB)
- (PIX_FMT_RGBA)
- (PIX_FMT_ABGR)
- (PIX_FMT_GRAY8);
+ out_pix_fmts = {
+ AV_PIX_FMT_YUVA420P,
+ AV_PIX_FMT_YUV444P,
+ AV_PIX_FMT_YUV422P,
+ AV_PIX_FMT_YUV420P,
+ AV_PIX_FMT_YUV411P,
+ AV_PIX_FMT_BGRA,
+ AV_PIX_FMT_ARGB,
+ AV_PIX_FMT_RGBA,
+ AV_PIX_FMT_ABGR,
+ AV_PIX_FMT_GRAY8
+ };
}
-
- pix_fmts_.push_back(PIX_FMT_NONE);
- }
-
- void push(const std::shared_ptr<AVFrame>& frame)
- {
- if(!frame)
- return;
- if(frame->data[0] == nullptr || frame->width < 1)
- CASPAR_THROW_EXCEPTION(invalid_argument());
+ out_pix_fmts.push_back(AV_PIX_FMT_NONE);
+
+ video_graph_.reset(
+ avfilter_graph_alloc(),
+ [](AVFilterGraph* p)
+ {
+ avfilter_graph_free(&p);
+ });
- if(filters_.empty())
+ if (multithreaded)
{
- bypass_.push(spl::make_shared_ptr(frame));
- return;
+ video_graph_->nb_threads = 0;
+ video_graph_->thread_type = AVFILTER_THREAD_SLICE;
}
-
- try
+ else
{
- if(!graph_)
- {
- try
- {
- graph_.reset(avfilter_graph_alloc(), [](AVFilterGraph* p){avfilter_graph_free(&p);});
-
- // Input
- std::stringstream args;
- args << frame->width << ":" << frame->height << ":" << frame->format << ":" << 0 << ":" << 0 << ":" << 0 << ":" << 0; // don't care about pts and aspect_ratio
- THROW_ON_ERROR2(avfilter_graph_create_filter(&buffersrc_ctx_, avfilter_get_by_name("buffer"), "src", args.str().c_str(), NULL, graph_.get()), "[filter]");
-
- #if FF_API_OLD_VSINK_API
- THROW_ON_ERROR2(avfilter_graph_create_filter(&buffersink_ctx_, avfilter_get_by_name("buffersink"), "out", NULL, pix_fmts_.data(), graph_.get()), "[filter]");
- #else
- spl::shared_ptr<AVBufferSinkParams> buffersink_params(av_buffersink_params_alloc(), av_free);
- buffersink_params->pixel_fmts = pix_fmts_.data();
- THROW_ON_ERROR2(avfilter_graph_create_filter(&buffersink_ctx_, avfilter_get_by_name("buffersink"), "out", NULL, buffersink_params.get(), graph_.get()), "[filter]");
- #endif
- AVFilterInOut* inputs = avfilter_inout_alloc();
- AVFilterInOut* outputs = avfilter_inout_alloc();
-
- outputs->name = av_strdup("in");
- outputs->filter_ctx = buffersrc_ctx_;
- outputs->pad_idx = 0;
- outputs->next = nullptr;
-
- inputs->name = av_strdup("out");
- inputs->filter_ctx = buffersink_ctx_;
- inputs->pad_idx = 0;
- inputs->next = nullptr;
-
- std::string filters = boost::to_lower_copy(u8(filters_));
- THROW_ON_ERROR2(avfilter_graph_parse(graph_.get(), filters.c_str(), &inputs, &outputs, NULL), "[filter]");
-
- auto yadif_filter = boost::adaptors::filtered([&](AVFilterContext* p){return strstr(p->name, "yadif") != 0;});
-
- BOOST_FOREACH(auto filter_ctx, boost::make_iterator_range(graph_->filters, graph_->filters + graph_->filter_count) | yadif_filter)
- {
- // Don't trust that libavfilter chooses optimal format.
- filter_ctx->filter->query_formats = [&]() -> int (*)(AVFilterContext*)
- {
- switch(frame->format)
- {
- case PIX_FMT_YUV444P16:
- case PIX_FMT_YUV444P10:
- case PIX_FMT_YUV444P9:
- case PIX_FMT_YUV444P:
- case PIX_FMT_BGR24:
- case PIX_FMT_RGB24:
- return query_formats_444;
- case PIX_FMT_YUV422P16:
- case PIX_FMT_YUV422P10:
- case PIX_FMT_YUV422P9:
- case PIX_FMT_YUV422P:
- case PIX_FMT_UYVY422:
- case PIX_FMT_YUYV422:
- return query_formats_422;
- case PIX_FMT_YUV420P16:
- case PIX_FMT_YUV420P10:
- case PIX_FMT_YUV420P9:
- case PIX_FMT_YUV420P:
- return query_formats_420;
- case PIX_FMT_YUVA420P:
- case PIX_FMT_BGRA:
- case PIX_FMT_RGBA:
- case PIX_FMT_ABGR:
- case PIX_FMT_ARGB:
- return query_formats_420a;
- case PIX_FMT_UYYVYY411:
- case PIX_FMT_YUV411P:
- return query_formats_411;
- case PIX_FMT_YUV410P:
- return query_formats_410;
- default:
- return filter_ctx->filter->query_formats;
- }
- }();
- }
-
- THROW_ON_ERROR2(avfilter_graph_config(graph_.get(), NULL), "[filter]");
-
- BOOST_FOREACH(auto filter_ctx, boost::make_iterator_range(graph_->filters, graph_->filters + graph_->filter_count) | yadif_filter)
- parallel_yadif_ctx_ = make_parallel_yadif(filter_ctx);
- }
- catch(...)
- {
- graph_ = nullptr;
- throw;
- }
- }
-
- THROW_ON_ERROR2(av_vsrc_buffer_add_frame(buffersrc_ctx_, frame.get(), 0), "[filter]");
+ video_graph_->nb_threads = 1;
}
- catch(ffmpeg_error&)
+
+ const auto vsrc_options = (boost::format("video_size=%1%x%2%:pix_fmt=%3%:time_base=%4%/%5%:pixel_aspect=%6%/%7%:frame_rate=%8%/%9%")
+ % in_width % in_height
+ % in_pix_fmt
+ % in_time_base.numerator() % in_time_base.denominator()
+ % in_sample_aspect_ratio.numerator() % in_sample_aspect_ratio.denominator()
+ % in_frame_rate.numerator() % in_frame_rate.denominator()).str();
+
+ AVFilterContext* filt_vsrc = nullptr;
+ FF(avfilter_graph_create_filter(
+ &filt_vsrc,
+ avfilter_get_by_name("buffer"),
+ "filter_buffer",
+ vsrc_options.c_str(),
+ nullptr,
+ video_graph_.get()));
+
+ AVFilterContext* filt_vsink = nullptr;
+ FF(avfilter_graph_create_filter(
+ &filt_vsink,
+ avfilter_get_by_name("buffersink"),
+ "filter_buffersink",
+ nullptr,
+ nullptr,
+ video_graph_.get()));
+
+#pragma warning (push)
+#pragma warning (disable : 4245)
+
+ FF(av_opt_set_int_list(
+ filt_vsink,
+ "pix_fmts",
+ out_pix_fmts.data(),
+ -1,
+ AV_OPT_SEARCH_CHILDREN));
+
+#pragma warning (pop)
+
+ configure_filtergraph(
+ *video_graph_,
+ filtergraph_,
+ *filt_vsrc,
+ *filt_vsink);
+
+ video_graph_in_ = filt_vsrc;
+ video_graph_out_ = filt_vsink;
+
+ if (is_logging_quiet_for_thread())
+ CASPAR_LOG(trace)
+ << u16(std::string("\n")
+ + avfilter_graph_dump(
+ video_graph_.get(),
+ nullptr));
+ else
+ CASPAR_LOG(debug)
+ << u16(std::string("\n")
+ + avfilter_graph_dump(
+ video_graph_.get(),
+ nullptr));
+ }
+
+ void configure_filtergraph(
+ AVFilterGraph& graph,
+ const std::string& filtergraph,
+ AVFilterContext& source_ctx,
+ AVFilterContext& sink_ctx)
+ {
+ if (!filtergraph.empty())
{
- throw;
+ auto outputs = avfilter_inout_alloc();
+ auto inputs = avfilter_inout_alloc();
+
+ CASPAR_VERIFY(outputs && inputs);
+
+ outputs->name = av_strdup("in");
+ outputs->filter_ctx = &source_ctx;
+ outputs->pad_idx = 0;
+ outputs->next = nullptr;
+
+ inputs->name = av_strdup("out");
+ inputs->filter_ctx = &sink_ctx;
+ inputs->pad_idx = 0;
+ inputs->next = nullptr;
+
+ FF(avfilter_graph_parse(
+ &graph,
+ filtergraph.c_str(),
+ inputs,
+ outputs,
+ nullptr));
}
- catch(...)
+ else
{
- CASPAR_THROW_EXCEPTION(ffmpeg_error() << boost::errinfo_nested_exception(boost::current_exception()));
+ FF(avfilter_link(
+ &source_ctx,
+ 0,
+ &sink_ctx,
+ 0));
}
+
+ FF(avfilter_graph_config(&graph, nullptr));
+ }
+
+ bool fast_path() const
+ {
+ return filtergraph_.empty();
+ }
+
+ void push(const std::shared_ptr<AVFrame>& src_av_frame)
+ {
+ if (fast_path())
+ fast_path_.push(src_av_frame);
+ else
+ FF(av_buffersrc_add_frame(
+ video_graph_in_,
+ src_av_frame.get()));
}
std::shared_ptr<AVFrame> poll()
{
- if(filters_.empty())
+ if (fast_path())
{
- if(bypass_.empty())
+ if (fast_path_.empty())
return nullptr;
- auto frame = bypass_.front();
- bypass_.pop();
- return frame;
+
+ auto result = fast_path_.front();
+ fast_path_.pop();
+ return result;
}
- if(!graph_)
- return nullptr;
-
- try
- {
- if(avfilter_poll_frame(buffersink_ctx_->inputs[0]))
- {
- AVFilterBufferRef *picref;
- THROW_ON_ERROR2(av_buffersink_get_buffer_ref(buffersink_ctx_, &picref, 0), "[filter]");
-
- if (!picref)
- return nullptr;
-
- spl::shared_ptr<AVFrame> frame(avcodec_alloc_frame(), [=](AVFrame* p)
- {
- av_free(p);
- avfilter_unref_buffer(picref);
- });
-
- avcodec_get_frame_defaults(frame.get());
-
- memcpy(frame->data, picref->data, sizeof(frame->data));
- memcpy(frame->linesize, picref->linesize, sizeof(frame->linesize));
- frame->format = picref->format;
- frame->width = picref->video->w;
- frame->height = picref->video->h;
- frame->pkt_pos = picref->pos;
- frame->interlaced_frame = picref->video->interlaced;
- frame->top_field_first = picref->video->top_field_first;
- frame->key_frame = picref->video->key_frame;
- frame->pict_type = picref->video->pict_type;
- frame->sample_aspect_ratio = picref->video->sample_aspect_ratio;
-
- return frame;
- }
+ auto filt_frame = create_frame();
+
+ const auto ret = av_buffersink_get_frame(
+ video_graph_out_,
+ filt_frame.get());
+ if(ret == AVERROR_EOF || ret == AVERROR(EAGAIN))
return nullptr;
- }
- catch(ffmpeg_error&)
- {
- throw;
- }
- catch(...)
- {
- CASPAR_THROW_EXCEPTION(ffmpeg_error() << boost::errinfo_nested_exception(boost::current_exception()));
- }
+
+ FF_RET(ret, "poll");
+
+ return filt_frame;
}
};
-filter::filter(const std::wstring& filters, const std::vector<PixelFormat>& pix_fmts) : impl_(new impl(filters, pix_fmts)){}
+filter::filter(
+ int in_width,
+ int in_height,
+ boost::rational<int> in_time_base,
+ boost::rational<int> in_frame_rate,
+ boost::rational<int> in_sample_aspect_ratio,
+ AVPixelFormat in_pix_fmt,
+ std::vector<AVPixelFormat> out_pix_fmts,
+ const std::string& filtergraph,
+ bool multithreaded)
+ : impl_(new implementation(
+ in_width,
+ in_height,
+ in_time_base,
+ in_frame_rate,
+ in_sample_aspect_ratio,
+ in_pix_fmt,
+ out_pix_fmts,
+ filtergraph,
+ multithreaded)){}
filter::filter(filter&& other) : impl_(std::move(other.impl_)){}
filter& filter::operator=(filter&& other){impl_ = std::move(other.impl_); return *this;}
void filter::push(const std::shared_ptr<AVFrame>& frame){impl_->push(frame);}
std::shared_ptr<AVFrame> filter::poll(){return impl_->poll();}
-std::wstring filter::filter_str() const{return impl_->filters_;}
+std::wstring filter::filter_str() const{return u16(impl_->filtergraph_);}
std::vector<spl::shared_ptr<AVFrame>> filter::poll_all()
-{
+{
std::vector<spl::shared_ptr<AVFrame>> frames;
for(auto frame = poll(); frame; frame = poll())
frames.push_back(spl::make_shared_ptr(frame));
return frames;
}
-
-}}
\ No newline at end of file
+}}