1 #include "../../stdafx.h"
\r
5 #include "parallel_yadif.h"
\r
7 #include "../../ffmpeg_error.h"
\r
9 #include <boost/assign.hpp>
\r
14 #if defined(_MSC_VER)
\r
15 #pragma warning (push)
\r
16 #pragma warning (disable : 4244)
\r
20 #include <libavutil/avutil.h>
\r
21 #include <libavutil/imgutils.h>
\r
22 #include <libavfilter/avfilter.h>
\r
23 #include <libavfilter/avcodec.h>
\r
24 #include <libavfilter/avfiltergraph.h>
\r
25 #include <libavfilter/buffersink.h>
\r
26 #include <libavfilter/vsrc_buffer.h>
\r
28 #if defined(_MSC_VER)
\r
29 #pragma warning (pop)
\r
32 namespace caspar { namespace ffmpeg {
\r
34 struct filter::implementation
\r
36 std::string filters_;
\r
37 std::shared_ptr<AVFilterGraph> graph_;
\r
38 AVFilterContext* buffersink_ctx_;
\r
39 AVFilterContext* buffersrc_ctx_;
\r
40 std::shared_ptr<void> parallel_yadif_ctx_;
\r
41 std::vector<PixelFormat> pix_fmts_;
\r
42 std::queue<std::shared_ptr<AVFrame>> bypass_;
\r
44 implementation(const std::wstring& filters, const std::vector<PixelFormat>& pix_fmts)
\r
45 : filters_(narrow(filters))
\r
46 , parallel_yadif_ctx_(nullptr)
\r
47 , pix_fmts_(pix_fmts)
\r
49 if(pix_fmts_.empty())
\r
51 pix_fmts_.push_back(PIX_FMT_YUV420P);
\r
52 pix_fmts_.push_back(PIX_FMT_YUVA420P);
\r
53 pix_fmts_.push_back(PIX_FMT_YUV422P);
\r
54 pix_fmts_.push_back(PIX_FMT_YUV444P);
\r
55 pix_fmts_.push_back(PIX_FMT_YUV411P);
\r
56 pix_fmts_.push_back(PIX_FMT_ARGB);
\r
57 pix_fmts_.push_back(PIX_FMT_RGBA);
\r
58 pix_fmts_.push_back(PIX_FMT_ABGR);
\r
59 pix_fmts_.push_back(PIX_FMT_GRAY8);
\r
60 pix_fmts_.push_back(PIX_FMT_NONE);
\r
63 pix_fmts_.push_back(PIX_FMT_NONE);
\r
65 std::transform(filters_.begin(), filters_.end(), filters_.begin(), ::tolower);
\r
68 void push(const std::shared_ptr<AVFrame>& frame)
\r
73 if(filters_.empty())
\r
75 bypass_.push(frame);
\r
81 graph_.reset(avfilter_graph_alloc(), [](AVFilterGraph* p){avfilter_graph_free(&p);});
\r
84 std::stringstream args;
\r
85 args << frame->width << ":" << frame->height << ":" << frame->format << ":" << 0 << ":" << 0 << ":" << 0 << ":" << 0; // don't care about pts and aspect_ratio
\r
86 THROW_ON_ERROR2(avfilter_graph_create_filter(&buffersrc_ctx_, avfilter_get_by_name("buffer"), "src", args.str().c_str(), NULL, graph_.get()), "[filter]");
\r
88 // OPIX_FMT_BGRAutput
\r
89 AVBufferSinkParams *buffersink_params = av_buffersink_params_alloc();
\r
90 buffersink_params->pixel_fmts = pix_fmts_.data();
\r
91 THROW_ON_ERROR2(avfilter_graph_create_filter(&buffersink_ctx_, avfilter_get_by_name("buffersink"), "out", NULL, buffersink_params, graph_.get()), "[filter]");
\r
93 AVFilterInOut* outputs = avfilter_inout_alloc();
\r
94 AVFilterInOut* inputs = avfilter_inout_alloc();
\r
96 outputs->name = av_strdup("in");
\r
97 outputs->filter_ctx = buffersrc_ctx_;
\r
98 outputs->pad_idx = 0;
\r
99 outputs->next = NULL;
\r
101 inputs->name = av_strdup("out");
\r
102 inputs->filter_ctx = buffersink_ctx_;
\r
103 inputs->pad_idx = 0;
\r
104 inputs->next = NULL;
\r
106 THROW_ON_ERROR2(avfilter_graph_parse(graph_.get(), filters_.c_str(), &inputs, &outputs, NULL), "[filter]");
\r
108 avfilter_inout_free(&inputs);
\r
109 avfilter_inout_free(&outputs);
\r
111 THROW_ON_ERROR2(avfilter_graph_config(graph_.get(), NULL), "[filter]");
\r
113 for(size_t n = 0; n < graph_->filter_count; ++n)
\r
115 auto filter_name = graph_->filters[n]->name;
\r
116 if(strstr(filter_name, "yadif") != 0)
\r
117 parallel_yadif_ctx_ = make_parallel_yadif(graph_->filters[n]);
\r
121 THROW_ON_ERROR2(av_vsrc_buffer_add_frame(buffersrc_ctx_, frame.get(), 0), "[filter]");
\r
124 std::shared_ptr<AVFrame> poll()
\r
126 if(filters_.empty())
\r
128 if(bypass_.empty())
\r
130 auto frame = bypass_.front();
\r
138 if(avfilter_poll_frame(buffersink_ctx_->inputs[0]))
\r
140 AVFilterBufferRef *picref;
\r
141 THROW_ON_ERROR2(av_buffersink_get_buffer_ref(buffersink_ctx_, &picref, 0), "[filter]");
\r
145 safe_ptr<AVFrame> frame(avcodec_alloc_frame(), [=](AVFrame* p)
\r
148 avfilter_unref_buffer(picref);
\r
151 avcodec_get_frame_defaults(frame.get());
\r
153 memcpy(frame->data, picref->data, sizeof(frame->data));
\r
154 memcpy(frame->linesize, picref->linesize, sizeof(frame->linesize));
\r
155 frame->format = picref->format;
\r
156 frame->width = picref->video->w;
\r
157 frame->height = picref->video->h;
\r
158 frame->pkt_pos = picref->pos;
\r
159 frame->interlaced_frame = picref->video->interlaced;
\r
160 frame->top_field_first = picref->video->top_field_first;
\r
161 frame->key_frame = picref->video->key_frame;
\r
162 frame->pict_type = picref->video->pict_type;
\r
163 frame->sample_aspect_ratio = picref->video->sample_aspect_ratio;
\r
173 filter::filter(const std::wstring& filters, const std::vector<PixelFormat>& pix_fmts) : impl_(new implementation(filters, pix_fmts)){}
\r
174 filter::filter(filter&& other) : impl_(std::move(other.impl_)){}
\r
175 filter& filter::operator=(filter&& other){impl_ = std::move(other.impl_); return *this;}
\r
176 void filter::push(const std::shared_ptr<AVFrame>& frame){impl_->push(frame);}
\r
177 std::shared_ptr<AVFrame> filter::poll(){return impl_->poll();}
\r