]> git.sesse.net Git - casparcg/blob - modules/ffmpeg/producer/filter/filter.cpp
git-svn-id: https://casparcg.svn.sourceforge.net/svnroot/casparcg/server/branches...
[casparcg] / modules / ffmpeg / producer / filter / filter.cpp
1 #include "../../stdafx.h"\r
2 \r
3 #include "filter.h"\r
4 \r
5 #include "parallel_yadif.h"\r
6 \r
7 #include "../../ffmpeg_error.h"\r
8 \r
9 #include <boost/assign.hpp>\r
10 \r
11 #include <cstdio>\r
12 #include <sstream>\r
13 \r
14 #if defined(_MSC_VER)\r
15 #pragma warning (push)\r
16 #pragma warning (disable : 4244)\r
17 #endif\r
18 extern "C" \r
19 {\r
20         #include <libavutil/avutil.h>\r
21         #include <libavutil/imgutils.h>\r
22         #include <libavfilter/avfilter.h>\r
23         #include <libavfilter/avcodec.h>\r
24         #include <libavfilter/avfiltergraph.h>\r
25         #include <libavfilter/buffersink.h>\r
26         #include <libavfilter/vsrc_buffer.h>\r
27 }\r
28 #if defined(_MSC_VER)\r
29 #pragma warning (pop)\r
30 #endif\r
31 \r
32 namespace caspar { namespace ffmpeg {\r
33         \r
34 struct filter::implementation\r
35 {\r
36         std::string                                             filters_;\r
37         std::shared_ptr<AVFilterGraph>  graph_; \r
38         AVFilterContext*                                buffersink_ctx_;\r
39         AVFilterContext*                                buffersrc_ctx_;\r
40         std::shared_ptr<void>                   parallel_yadif_ctx_;\r
41         std::vector<PixelFormat>                pix_fmts_;\r
42         std::queue<std::shared_ptr<AVFrame>> bypass_;\r
43                 \r
44         implementation(const std::wstring& filters, const std::vector<PixelFormat>& pix_fmts) \r
45                 : filters_(narrow(filters))\r
46                 , parallel_yadif_ctx_(nullptr)\r
47                 , pix_fmts_(pix_fmts)\r
48         {\r
49                 if(pix_fmts_.empty())\r
50                 {\r
51                         pix_fmts_.push_back(PIX_FMT_YUV420P);\r
52                         pix_fmts_.push_back(PIX_FMT_YUVA420P);\r
53                         pix_fmts_.push_back(PIX_FMT_YUV422P);\r
54                         pix_fmts_.push_back(PIX_FMT_YUV444P);\r
55                         pix_fmts_.push_back(PIX_FMT_YUV411P);\r
56                         pix_fmts_.push_back(PIX_FMT_ARGB);\r
57                         pix_fmts_.push_back(PIX_FMT_RGBA);\r
58                         pix_fmts_.push_back(PIX_FMT_ABGR);\r
59                         pix_fmts_.push_back(PIX_FMT_GRAY8);\r
60                         pix_fmts_.push_back(PIX_FMT_NONE);\r
61                 }\r
62                 else\r
63                         pix_fmts_.push_back(PIX_FMT_NONE);\r
64 \r
65                 std::transform(filters_.begin(), filters_.end(), filters_.begin(), ::tolower);\r
66         }\r
67         \r
68         void push(const std::shared_ptr<AVFrame>& frame)\r
69         {               \r
70                 if(!frame)\r
71                         return;\r
72 \r
73                 if(filters_.empty())\r
74                 {\r
75                         bypass_.push(frame);\r
76                         return;\r
77                 }\r
78 \r
79                 if(!graph_)\r
80                 {\r
81                         graph_.reset(avfilter_graph_alloc(), [](AVFilterGraph* p){avfilter_graph_free(&p);});\r
82                                                                 \r
83                         // Input\r
84                         std::stringstream args;\r
85                         args << frame->width << ":" << frame->height << ":" << frame->format << ":" << 0 << ":" << 0 << ":" << 0 << ":" << 0; // don't care about pts and aspect_ratio\r
86                         THROW_ON_ERROR2(avfilter_graph_create_filter(&buffersrc_ctx_, avfilter_get_by_name("buffer"), "src", args.str().c_str(), NULL, graph_.get()), "[filter]");\r
87 \r
88                         // OPIX_FMT_BGRAutput\r
89                         AVBufferSinkParams *buffersink_params = av_buffersink_params_alloc();\r
90                         buffersink_params->pixel_fmts = pix_fmts_.data();\r
91                         THROW_ON_ERROR2(avfilter_graph_create_filter(&buffersink_ctx_, avfilter_get_by_name("buffersink"), "out", NULL, buffersink_params, graph_.get()), "[filter]");\r
92                         \r
93                         AVFilterInOut* outputs = avfilter_inout_alloc();\r
94                         AVFilterInOut* inputs  = avfilter_inout_alloc();\r
95                         \r
96                         outputs->name                   = av_strdup("in");\r
97                         outputs->filter_ctx             = buffersrc_ctx_;\r
98                         outputs->pad_idx                = 0;\r
99                         outputs->next                   = NULL;\r
100 \r
101                         inputs->name                    = av_strdup("out");\r
102                         inputs->filter_ctx              = buffersink_ctx_;\r
103                         inputs->pad_idx                 = 0;\r
104                         inputs->next                    = NULL;\r
105                         \r
106                         THROW_ON_ERROR2(avfilter_graph_parse(graph_.get(), filters_.c_str(), &inputs, &outputs, NULL), "[filter]");\r
107                         \r
108                         avfilter_inout_free(&inputs);\r
109                         avfilter_inout_free(&outputs);\r
110 \r
111                         THROW_ON_ERROR2(avfilter_graph_config(graph_.get(), NULL), "[filter]");                 \r
112 \r
113                         for(size_t n = 0; n < graph_->filter_count; ++n)\r
114                         {\r
115                                 auto filter_name = graph_->filters[n]->name;\r
116                                 if(strstr(filter_name, "yadif") != 0)\r
117                                         parallel_yadif_ctx_ = make_parallel_yadif(graph_->filters[n]);\r
118                         }\r
119                 }\r
120                         \r
121                 THROW_ON_ERROR2(av_vsrc_buffer_add_frame(buffersrc_ctx_, frame.get(), 0), "[filter]");\r
122         }\r
123 \r
124         std::shared_ptr<AVFrame> poll()\r
125         {\r
126                 if(filters_.empty())\r
127                 {\r
128                         if(bypass_.empty())\r
129                                 return nullptr;\r
130                         auto frame = bypass_.front();\r
131                         bypass_.pop();\r
132                         return frame;\r
133                 }\r
134 \r
135                 if(!graph_)\r
136                         return nullptr;\r
137                 \r
138                 if(avfilter_poll_frame(buffersink_ctx_->inputs[0])) \r
139                 {\r
140                         AVFilterBufferRef *picref;\r
141                         THROW_ON_ERROR2(av_buffersink_get_buffer_ref(buffersink_ctx_, &picref, 0), "[filter]");\r
142 \r
143             if (picref) \r
144                         {               \r
145                                 safe_ptr<AVFrame> frame(avcodec_alloc_frame(), [=](AVFrame* p)\r
146                                 {\r
147                                         av_free(p);\r
148                                         avfilter_unref_buffer(picref);\r
149                                 });\r
150 \r
151                                 avcodec_get_frame_defaults(frame.get());        \r
152 \r
153                                 memcpy(frame->data,     picref->data,     sizeof(frame->data));\r
154                                 memcpy(frame->linesize, picref->linesize, sizeof(frame->linesize));\r
155                                 frame->format                           = picref->format;\r
156                                 frame->width                            = picref->video->w;\r
157                                 frame->height                           = picref->video->h;\r
158                                 frame->pkt_pos                          = picref->pos;\r
159                                 frame->interlaced_frame         = picref->video->interlaced;\r
160                                 frame->top_field_first          = picref->video->top_field_first;\r
161                                 frame->key_frame                        = picref->video->key_frame;\r
162                                 frame->pict_type                        = picref->video->pict_type;\r
163                                 frame->sample_aspect_ratio      = picref->video->sample_aspect_ratio;\r
164 \r
165                                 return frame;\r
166             }\r
167         }\r
168 \r
169                 return nullptr;\r
170         }\r
171 };\r
172 \r
173 filter::filter(const std::wstring& filters, const std::vector<PixelFormat>& pix_fmts) : impl_(new implementation(filters, pix_fmts)){}\r
174 filter::filter(filter&& other) : impl_(std::move(other.impl_)){}\r
175 filter& filter::operator=(filter&& other){impl_ = std::move(other.impl_); return *this;}\r
176 void filter::push(const std::shared_ptr<AVFrame>& frame){impl_->push(frame);}\r
177 std::shared_ptr<AVFrame> filter::poll(){return impl_->poll();}\r
178 std::vector<safe_ptr<AVFrame>> filter::poll_all()\r
179 {       \r
180         std::vector<safe_ptr<AVFrame>> frames;\r
181         while(true)\r
182         {\r
183                 auto frame = poll();\r
184                 if(!frame)\r
185                         break;\r
186                 frames.push_back(make_safe_ptr(frame));\r
187         }\r
188         return frames;\r
189 }\r
190 \r
191 }}