\r
#include "filter.h"\r
\r
-#include "../../ffmpeg_error.h"\r
+#include "parallel_yadif.h"\r
\r
-#include <common/exception/exceptions.h>\r
-#include <core/producer/frame/basic_frame.h>\r
-#include <core/producer/frame/frame_factory.h>\r
-#include <core/mixer/write_frame.h>\r
+#include "../../ffmpeg_error.h"\r
\r
-#include <boost/circular_buffer.hpp>\r
+#include <boost/assign.hpp>\r
\r
#include <cstdio>\r
#include <sstream>\r
\r
+#if defined(_MSC_VER)\r
+#pragma warning (push)\r
+#pragma warning (disable : 4244)\r
+#endif\r
extern "C" \r
{\r
- #define __STDC_CONSTANT_MACROS\r
- #define __STDC_LIMIT_MACROS\r
#include <libavutil/avutil.h>\r
#include <libavutil/imgutils.h>\r
#include <libavfilter/avfilter.h>\r
#include <libavfilter/avcodec.h>\r
- #include <libavfilter/vsrc_buffer.h>\r
#include <libavfilter/avfiltergraph.h>\r
+ #include <libavfilter/buffersink.h>\r
+ #include <libavfilter/vsrc_buffer.h>\r
}\r
+#if defined(_MSC_VER)\r
+#pragma warning (pop)\r
+#endif\r
\r
-namespace caspar {\r
+namespace caspar { namespace ffmpeg {\r
\r
struct filter::implementation\r
{\r
- std::string filters_;\r
- std::shared_ptr<AVFilterGraph> graph_;\r
- AVFilterContext* video_in_filter_;\r
- AVFilterContext* video_out_filter_;\r
-\r
- boost::circular_buffer<std::shared_ptr<AVFilterBufferRef>> buffers_;\r
+ std::string filters_;\r
+ std::shared_ptr<AVFilterGraph> graph_; \r
+ AVFilterContext* buffersink_ctx_;\r
+ AVFilterContext* buffersrc_ctx_;\r
+ std::shared_ptr<void> parallel_yadif_ctx_;\r
+ std::vector<PixelFormat> pix_fmts_;\r
\r
- implementation(const std::string& filters) \r
- : filters_(filters)\r
+ implementation(const std::wstring& filters, const std::vector<PixelFormat>& pix_fmts) \r
+ : filters_(narrow(filters))\r
+ , parallel_yadif_ctx_(nullptr)\r
+ , pix_fmts_(pix_fmts)\r
{\r
+ if(pix_fmts_.empty())\r
+ {\r
+ pix_fmts_.push_back(PIX_FMT_YUV420P);\r
+ pix_fmts_.push_back(PIX_FMT_YUVA420P);\r
+ pix_fmts_.push_back(PIX_FMT_YUV422P);\r
+ pix_fmts_.push_back(PIX_FMT_YUV444P);\r
+ pix_fmts_.push_back(PIX_FMT_YUV411P);\r
+ pix_fmts_.push_back(PIX_FMT_ARGB);\r
+ pix_fmts_.push_back(PIX_FMT_RGBA);\r
+ pix_fmts_.push_back(PIX_FMT_ABGR);\r
+ pix_fmts_.push_back(PIX_FMT_GRAY8);\r
+ pix_fmts_.push_back(PIX_FMT_NONE);\r
+ }\r
+ else\r
+ pix_fmts_.push_back(PIX_FMT_NONE);\r
+\r
std::transform(filters_.begin(), filters_.end(), filters_.begin(), ::tolower);\r
+ }\r
+ \r
+ std::vector<safe_ptr<AVFrame>> execute(const std::shared_ptr<AVFrame>& frame)\r
+ {\r
+ if(!frame)\r
+ return std::vector<safe_ptr<AVFrame>>();\r
+\r
+ if(filters_.empty())\r
+ return boost::assign::list_of(frame);\r
\r
- buffers_.set_capacity(3);\r
+ push(frame);\r
+ return poll();\r
}\r
\r
- void push(const safe_ptr<AVFrame>& frame)\r
+ void push(const std::shared_ptr<AVFrame>& frame)\r
{ \r
- int errn = 0; \r
-\r
if(!graph_)\r
{\r
graph_.reset(avfilter_graph_alloc(), [](AVFilterGraph* p){avfilter_graph_free(&p);});\r
- \r
+ \r
// Input\r
- std::stringstream buffer_ss;\r
- buffer_ss << frame->width << ":" << frame->height << ":" << frame->format << ":" << 0 << ":" << 0 << ":" << 0 << ":" << 0; // don't care about pts and aspect_ratio\r
- errn = avfilter_graph_create_filter(&video_in_filter_, avfilter_get_by_name("buffer"), "src", buffer_ss.str().c_str(), NULL, graph_.get());\r
- if(errn < 0 || !video_in_filter_)\r
- {\r
- BOOST_THROW_EXCEPTION(caspar_exception() << msg_info(av_error_str(errn)) <<\r
- boost::errinfo_api_function("avfilter_graph_create_filter") << boost::errinfo_errno(AVUNERROR(errn)));\r
- }\r
-\r
- // Output\r
- errn = avfilter_graph_create_filter(&video_out_filter_, avfilter_get_by_name("nullsink"), "out", NULL, NULL, graph_.get());\r
- if(errn < 0 || !video_out_filter_)\r
- {\r
- BOOST_THROW_EXCEPTION(caspar_exception() << msg_info(av_error_str(errn)) <<\r
- boost::errinfo_api_function("avfilter_graph_create_filter") << boost::errinfo_errno(AVUNERROR(errn)));\r
- }\r
+ std::stringstream args;\r
+ args << frame->width << ":" << frame->height << ":" << frame->format << ":" << 0 << ":" << 0 << ":" << 0 << ":" << 0; // don't care about pts and aspect_ratio\r
+ THROW_ON_ERROR2(avfilter_graph_create_filter(&buffersrc_ctx_, avfilter_get_by_name("buffer"), "src", args.str().c_str(), NULL, graph_.get()), "[filter]");\r
+\r
+ // OPIX_FMT_BGRAutput\r
+ AVBufferSinkParams *buffersink_params = av_buffersink_params_alloc();\r
+ buffersink_params->pixel_fmts = pix_fmts_.data();\r
+ THROW_ON_ERROR2(avfilter_graph_create_filter(&buffersink_ctx_, avfilter_get_by_name("buffersink"), "out", NULL, buffersink_params, graph_.get()), "[filter]");\r
+ \r
+ AVFilterInOut* outputs = avfilter_inout_alloc();\r
+ AVFilterInOut* inputs = avfilter_inout_alloc();\r
\r
- AVFilterInOut* outputs = reinterpret_cast<AVFilterInOut*>(av_malloc(sizeof(AVFilterInOut)));\r
- AVFilterInOut* inputs = reinterpret_cast<AVFilterInOut*>(av_malloc(sizeof(AVFilterInOut)));\r
-\r
outputs->name = av_strdup("in");\r
- outputs->filter_ctx = video_in_filter_;\r
+ outputs->filter_ctx = buffersrc_ctx_;\r
outputs->pad_idx = 0;\r
outputs->next = NULL;\r
\r
inputs->name = av_strdup("out");\r
- inputs->filter_ctx = video_out_filter_;\r
+ inputs->filter_ctx = buffersink_ctx_;\r
inputs->pad_idx = 0;\r
inputs->next = NULL;\r
\r
- errn = avfilter_graph_parse(graph_.get(), filters_.c_str(), inputs, outputs, NULL);\r
- if(errn < 0)\r
- {\r
- BOOST_THROW_EXCEPTION(caspar_exception() << msg_info(av_error_str(errn)) <<\r
- boost::errinfo_api_function("avfilter_graph_parse") << boost::errinfo_errno(AVUNERROR(errn)));\r
- }\r
+ THROW_ON_ERROR2(avfilter_graph_parse(graph_.get(), filters_.c_str(), &inputs, &outputs, NULL), "[filter]");\r
+ \r
+ avfilter_inout_free(&inputs);\r
+ avfilter_inout_free(&outputs);\r
\r
-// av_free(outputs);\r
-// av_free(inputs);\r
+ THROW_ON_ERROR2(avfilter_graph_config(graph_.get(), NULL), "[filter]"); \r
\r
- errn = avfilter_graph_config(graph_.get(), NULL);\r
- if(errn < 0)\r
+ for(size_t n = 0; n < graph_->filter_count; ++n)\r
{\r
- BOOST_THROW_EXCEPTION(caspar_exception() << msg_info(av_error_str(errn)) \r
- << boost::errinfo_api_function("avfilter_graph_config") << boost::errinfo_errno(AVUNERROR(errn)));\r
+ auto filter_name = graph_->filters[n]->name;\r
+ if(strstr(filter_name, "yadif") != 0)\r
+ parallel_yadif_ctx_ = make_parallel_yadif(graph_->filters[n]);\r
}\r
}\r
\r
- errn = av_vsrc_buffer_add_frame(video_in_filter_, frame.get(), AV_VSRC_BUF_FLAG_OVERWRITE);\r
- if(errn < 0)\r
- {\r
- BOOST_THROW_EXCEPTION(caspar_exception() << msg_info(av_error_str(errn)) <<\r
- boost::errinfo_api_function("av_vsrc_buffer_add_frame") << boost::errinfo_errno(AVUNERROR(errn)));\r
- }\r
+ THROW_ON_ERROR2(av_vsrc_buffer_add_frame(buffersrc_ctx_, frame.get(), 0), "[filter]");\r
}\r
\r
std::vector<safe_ptr<AVFrame>> poll()\r
{\r
- int errn = avfilter_poll_frame(video_out_filter_->inputs[0]);\r
- if(errn < 0)\r
- {\r
- BOOST_THROW_EXCEPTION(caspar_exception() << msg_info(av_error_str(errn)) <<\r
- boost::errinfo_api_function("avfilter_poll_frame") << boost::errinfo_errno(AVUNERROR(errn)));\r
- }\r
-\r
std::vector<safe_ptr<AVFrame>> result;\r
\r
- std::generate_n(std::back_inserter(result), errn, [&]{return request_frame();});\r
-\r
- return result;\r
- }\r
- \r
- safe_ptr<AVFrame> request_frame()\r
- { \r
- auto link = video_out_filter_->inputs[0];\r
- \r
- int errn = avfilter_request_frame(link); \r
- if(errn < 0)\r
- {\r
- BOOST_THROW_EXCEPTION(caspar_exception() << msg_info(av_error_str(errn)) <<\r
- boost::errinfo_api_function("avfilter_request_frame") << boost::errinfo_errno(AVUNERROR(errn)));\r
- }\r
- \r
- auto pic = reinterpret_cast<AVPicture*>(link->cur_buf->buf);\r
+ if(!graph_)\r
+ return result;\r
\r
- safe_ptr<AVFrame> frame(avcodec_alloc_frame(), av_free);\r
- avcodec_get_frame_defaults(frame.get()); \r
-\r
- for(size_t n = 0; n < 4; ++n)\r
+ while (avfilter_poll_frame(buffersink_ctx_->inputs[0])) \r
{\r
- frame->data[n] = pic->data[n];\r
- frame->linesize[n] = pic->linesize[n];\r
- }\r
-\r
- // FIXME\r
- frame->width = link->cur_buf->video->w;\r
- frame->height = link->cur_buf->video->h;\r
- frame->format = link->cur_buf->format;\r
- frame->interlaced_frame = link->cur_buf->video->interlaced;\r
- frame->top_field_first = link->cur_buf->video->top_field_first;\r
- frame->key_frame = link->cur_buf->video->key_frame;\r
-\r
- buffers_.push_back(std::shared_ptr<AVFilterBufferRef>(link->cur_buf, avfilter_unref_buffer));\r
+ AVFilterBufferRef *picref;\r
+ THROW_ON_ERROR2(av_buffersink_get_buffer_ref(buffersink_ctx_, &picref, 0), "[filter]");\r
+\r
+ if (picref) \r
+ { \r
+ safe_ptr<AVFrame> frame(avcodec_alloc_frame(), [=](AVFrame* p)\r
+ {\r
+ av_free(p);\r
+ avfilter_unref_buffer(picref);\r
+ });\r
+\r
+ avcodec_get_frame_defaults(frame.get()); \r
+\r
+ memcpy(frame->data, picref->data, sizeof(frame->data));\r
+ memcpy(frame->linesize, picref->linesize, sizeof(frame->linesize));\r
+ frame->format = picref->format;\r
+ frame->width = picref->video->w;\r
+ frame->height = picref->video->h;\r
+ frame->pkt_pos = picref->pos;\r
+ frame->interlaced_frame = picref->video->interlaced;\r
+ frame->top_field_first = picref->video->top_field_first;\r
+ frame->key_frame = picref->video->key_frame;\r
+ frame->pict_type = picref->video->pict_type;\r
+ frame->sample_aspect_ratio = picref->video->sample_aspect_ratio;\r
+\r
+ result.push_back(frame);\r
+ }\r
+ }\r
\r
- return frame;\r
- }\r
-\r
- void skip()\r
- {\r
- int errn = avfilter_poll_frame(video_out_filter_->inputs[0]);\r
- if(errn < 0)\r
- {\r
- BOOST_THROW_EXCEPTION(caspar_exception() << msg_info(av_error_str(errn)) <<\r
- boost::errinfo_api_function("avfilter_poll_frame") << boost::errinfo_errno(AVUNERROR(errn)));\r
- }\r
+ return result;\r
}\r
};\r
\r
-filter::filter(const std::string& filters) : impl_(new implementation(filters)){}\r
-void filter::push(const safe_ptr<AVFrame>& frame) {impl_->push(frame);}\r
-std::vector<safe_ptr<AVFrame>> filter::poll() {return impl_->poll();}\r
-void filter::skip() {impl_->skip();}\r
+filter::filter(const std::wstring& filters, const std::vector<PixelFormat>& pix_fmts) : impl_(new implementation(filters, pix_fmts)){}\r
+filter::filter(filter&& other) : impl_(std::move(other.impl_)){}\r
+filter& filter::operator=(filter&& other){impl_ = std::move(other.impl_); return *this;}\r
+std::vector<safe_ptr<AVFrame>> filter::execute(const std::shared_ptr<AVFrame>& frame) {return impl_->execute(frame);}\r
\r
-}
\ No newline at end of file
+}}
\ No newline at end of file