]> git.sesse.net Git - casparcg/blob - modules/ffmpeg/producer/filter/filter.cpp
f7f6ac19d05a3ad6c1bff95c8d2ff62991758bd0
[casparcg] / modules / ffmpeg / producer / filter / filter.cpp
1 #include "../../stdafx.h"\r
2 \r
3 #include "filter.h"\r
4 \r
5 #include "parallel_yadif.h"\r
6 \r
7 #include "../../ffmpeg_error.h"\r
8 \r
9 #include <common/exception/exceptions.h>\r
10 \r
11 #include <boost/assign.hpp>\r
12 \r
13 #include <cstdio>\r
14 #include <sstream>\r
15 \r
16 #if defined(_MSC_VER)\r
17 #pragma warning (push)\r
18 #pragma warning (disable : 4244)\r
19 #endif\r
20 extern "C" \r
21 {\r
22         #include <libavutil/avutil.h>\r
23         #include <libavutil/imgutils.h>\r
24         #include <libavfilter/avfilter.h>\r
25         #include <libavfilter/avcodec.h>\r
26         #include <libavfilter/avfiltergraph.h>\r
27         #include <libavfilter/buffersink.h>\r
28         #include <libavfilter/vsrc_buffer.h>\r
29 }\r
30 #if defined(_MSC_VER)\r
31 #pragma warning (pop)\r
32 #endif\r
33 \r
34 namespace caspar { namespace ffmpeg {\r
35         \r
36 struct filter::implementation\r
37 {\r
38         std::string                                             filters_;\r
39         std::shared_ptr<AVFilterGraph>  graph_; \r
40         AVFilterContext*                                buffersink_ctx_;\r
41         AVFilterContext*                                buffersrc_ctx_;\r
42         std::shared_ptr<void>                   parallel_yadif_ctx_;\r
43         std::vector<PixelFormat>                pix_fmts_;\r
44         std::queue<std::shared_ptr<AVFrame>> bypass_;\r
45                 \r
46         implementation(const std::wstring& filters, const std::vector<PixelFormat>& pix_fmts) \r
47                 : filters_(narrow(filters))\r
48                 , parallel_yadif_ctx_(nullptr)\r
49                 , pix_fmts_(pix_fmts)\r
50         {\r
51                 if(pix_fmts_.empty())\r
52                 {\r
53                         pix_fmts_.push_back(PIX_FMT_YUV420P);\r
54                         pix_fmts_.push_back(PIX_FMT_YUVA420P);\r
55                         pix_fmts_.push_back(PIX_FMT_YUV422P);\r
56                         pix_fmts_.push_back(PIX_FMT_YUV444P);\r
57                         pix_fmts_.push_back(PIX_FMT_YUV411P);\r
58                         pix_fmts_.push_back(PIX_FMT_ARGB);\r
59                         pix_fmts_.push_back(PIX_FMT_RGBA);\r
60                         pix_fmts_.push_back(PIX_FMT_ABGR);\r
61                         pix_fmts_.push_back(PIX_FMT_GRAY8);\r
62                         pix_fmts_.push_back(PIX_FMT_NONE);\r
63                 }\r
64                 else\r
65                         pix_fmts_.push_back(PIX_FMT_NONE);\r
66 \r
67                 std::transform(filters_.begin(), filters_.end(), filters_.begin(), ::tolower);\r
68         }\r
69         \r
70         void push(const std::shared_ptr<AVFrame>& frame)\r
71         {               \r
72                 if(!frame)\r
73                         return;\r
74 \r
75                 if(frame->data[0] == nullptr || frame->width < 1)\r
76                         BOOST_THROW_EXCEPTION(invalid_argument());\r
77 \r
78                 if(filters_.empty())\r
79                 {\r
80                         bypass_.push(frame);\r
81                         return;\r
82                 }\r
83                 \r
84                 try\r
85                 {\r
86                         if(!graph_)\r
87                         {\r
88                                 try\r
89                                 {\r
90                                         graph_.reset(avfilter_graph_alloc(), [](AVFilterGraph* p){avfilter_graph_free(&p);});\r
91                                                                 \r
92                                         // Input\r
93                                         std::stringstream args;\r
94                                         args << frame->width << ":" << frame->height << ":" << frame->format << ":" << 0 << ":" << 0 << ":" << 0 << ":" << 0; // don't care about pts and aspect_ratio\r
95                                         THROW_ON_ERROR2(avfilter_graph_create_filter(&buffersrc_ctx_, avfilter_get_by_name("buffer"), "src", args.str().c_str(), NULL, graph_.get()), "[filter]");\r
96 \r
97                                         // OPIX_FMT_BGRAutput\r
98                                         AVBufferSinkParams *buffersink_params = av_buffersink_params_alloc();\r
99                                         buffersink_params->pixel_fmts = pix_fmts_.data();\r
100                                         THROW_ON_ERROR2(avfilter_graph_create_filter(&buffersink_ctx_, avfilter_get_by_name("buffersink"), "out", NULL, buffersink_params, graph_.get()), "[filter]");\r
101                         \r
102                                         AVFilterInOut* outputs = avfilter_inout_alloc();\r
103                                         AVFilterInOut* inputs  = avfilter_inout_alloc();\r
104                         \r
105                                         outputs->name                   = av_strdup("in");\r
106                                         outputs->filter_ctx             = buffersrc_ctx_;\r
107                                         outputs->pad_idx                = 0;\r
108                                         outputs->next                   = NULL;\r
109 \r
110                                         inputs->name                    = av_strdup("out");\r
111                                         inputs->filter_ctx              = buffersink_ctx_;\r
112                                         inputs->pad_idx                 = 0;\r
113                                         inputs->next                    = NULL;\r
114                         \r
115                                         THROW_ON_ERROR2(avfilter_graph_parse(graph_.get(), filters_.c_str(), &inputs, &outputs, NULL), "[filter]");\r
116                         \r
117                                         avfilter_inout_free(&inputs);\r
118                                         avfilter_inout_free(&outputs);\r
119 \r
120                                         THROW_ON_ERROR2(avfilter_graph_config(graph_.get(), NULL), "[filter]");                 \r
121 \r
122                                         for(size_t n = 0; n < graph_->filter_count; ++n)\r
123                                         {\r
124                                                 auto filter_name = graph_->filters[n]->name;\r
125                                                 if(strstr(filter_name, "yadif") != 0)\r
126                                                         parallel_yadif_ctx_ = make_parallel_yadif(graph_->filters[n]);\r
127                                         }\r
128                                 }\r
129                                 catch(...)\r
130                                 {\r
131                                         graph_ = nullptr;\r
132                                         throw;\r
133                                 }\r
134                         }\r
135                 \r
136                         THROW_ON_ERROR2(av_vsrc_buffer_add_frame(buffersrc_ctx_, frame.get(), 0), "[filter]");\r
137                 }\r
138                 catch(ffmpeg_error&)\r
139                 {\r
140                         throw;\r
141                 }\r
142                 catch(...)\r
143                 {\r
144                         BOOST_THROW_EXCEPTION(ffmpeg_error() << boost::errinfo_nested_exception(boost::current_exception()));\r
145                 }\r
146         }\r
147 \r
148         std::shared_ptr<AVFrame> poll()\r
149         {\r
150                 if(filters_.empty())\r
151                 {\r
152                         if(bypass_.empty())\r
153                                 return nullptr;\r
154                         auto frame = bypass_.front();\r
155                         bypass_.pop();\r
156                         return frame;\r
157                 }\r
158 \r
159                 if(!graph_)\r
160                         return nullptr;\r
161                 \r
162                 try\r
163                 {\r
164                         if(avfilter_poll_frame(buffersink_ctx_->inputs[0])) \r
165                         {\r
166                                 AVFilterBufferRef *picref;\r
167                                 THROW_ON_ERROR2(av_buffersink_get_buffer_ref(buffersink_ctx_, &picref, 0), "[filter]");\r
168 \r
169                                 if (picref) \r
170                                 {               \r
171                                         safe_ptr<AVFrame> frame(avcodec_alloc_frame(), [=](AVFrame* p)\r
172                                         {\r
173                                                 av_free(p);\r
174                                                 avfilter_unref_buffer(picref);\r
175                                         });\r
176 \r
177                                         avcodec_get_frame_defaults(frame.get());        \r
178 \r
179                                         memcpy(frame->data,     picref->data,     sizeof(frame->data));\r
180                                         memcpy(frame->linesize, picref->linesize, sizeof(frame->linesize));\r
181                                         frame->format                           = picref->format;\r
182                                         frame->width                            = picref->video->w;\r
183                                         frame->height                           = picref->video->h;\r
184                                         frame->pkt_pos                          = picref->pos;\r
185                                         frame->interlaced_frame         = picref->video->interlaced;\r
186                                         frame->top_field_first          = picref->video->top_field_first;\r
187                                         frame->key_frame                        = picref->video->key_frame;\r
188                                         frame->pict_type                        = picref->video->pict_type;\r
189                                         frame->sample_aspect_ratio      = picref->video->sample_aspect_ratio;\r
190 \r
191                                         return frame;\r
192                                 }\r
193                         }\r
194                 }\r
195                 catch(ffmpeg_error&)\r
196                 {\r
197                         throw;\r
198                 }\r
199                 catch(...)\r
200                 {\r
201                         BOOST_THROW_EXCEPTION(ffmpeg_error() << boost::errinfo_nested_exception(boost::current_exception()));\r
202                 }\r
203 \r
204                 return nullptr;\r
205         }\r
206 };\r
207 \r
208 filter::filter(const std::wstring& filters, const std::vector<PixelFormat>& pix_fmts) : impl_(new implementation(filters, pix_fmts)){}\r
209 filter::filter(filter&& other) : impl_(std::move(other.impl_)){}\r
210 filter& filter::operator=(filter&& other){impl_ = std::move(other.impl_); return *this;}\r
211 void filter::push(const std::shared_ptr<AVFrame>& frame){impl_->push(frame);}\r
212 std::shared_ptr<AVFrame> filter::poll(){return impl_->poll();}\r
213 std::string filter::filter_str() const{return impl_->filters_;}\r
214 std::vector<safe_ptr<AVFrame>> filter::poll_all()\r
215 {       \r
216         std::vector<safe_ptr<AVFrame>> frames;\r
217         for(auto frame = poll(); frame; frame = poll())\r
218                 frames.push_back(make_safe_ptr(frame));\r
219         return frames;\r
220 }\r
221 \r
222 }}