]> git.sesse.net Git - casparcg/blob - modules/ffmpeg/producer/filter/filter.cpp
9478d523fafd7a96a0440c61d8291262e38bf380
[casparcg] / modules / ffmpeg / producer / filter / filter.cpp
1 #include "../../stdafx.h"\r
2 \r
3 #include "filter.h"\r
4 \r
5 #include "parallel_yadif.h"\r
6 \r
7 #include "../../ffmpeg_error.h"\r
8 \r
9 #include <boost/assign.hpp>\r
10 \r
11 #include <cstdio>\r
12 #include <sstream>\r
13 \r
14 #if defined(_MSC_VER)\r
15 #pragma warning (push)\r
16 #pragma warning (disable : 4244)\r
17 #endif\r
18 extern "C" \r
19 {\r
20         #include <libavutil/avutil.h>\r
21         #include <libavutil/imgutils.h>\r
22         #include <libavfilter/avfilter.h>\r
23         #include <libavfilter/avcodec.h>\r
24         #include <libavfilter/avfiltergraph.h>\r
25         #include <libavfilter/vsink_buffer.h>\r
26         #include <libavfilter/vsrc_buffer.h>\r
27 }\r
28 #if defined(_MSC_VER)\r
29 #pragma warning (pop)\r
30 #endif\r
31 \r
32 namespace caspar {\r
33         \r
34 PixelFormat pix_fmts[] = \r
35 {\r
36         PIX_FMT_YUV420P,\r
37         PIX_FMT_YUVA420P,\r
38         PIX_FMT_YUV422P,\r
39         PIX_FMT_YUV444P,\r
40         PIX_FMT_YUV411P,\r
41         PIX_FMT_ARGB, \r
42         PIX_FMT_RGBA,\r
43         PIX_FMT_ABGR,\r
44         PIX_FMT_GRAY8,\r
45         PIX_FMT_NONE\r
46 };      \r
47 \r
48 struct filter::implementation\r
49 {\r
50         std::string                                             filters_;\r
51         std::shared_ptr<AVFilterGraph>  graph_; \r
52         AVFilterContext*                                buffersink_ctx_;\r
53         AVFilterContext*                                buffersrc_ctx_;\r
54         std::shared_ptr<void>                   parallel_yadif_ctx_;\r
55                 \r
56         implementation(const std::wstring& filters) \r
57                 : filters_(narrow(filters))\r
58                 , parallel_yadif_ctx_(nullptr)\r
59         {\r
60                 std::transform(filters_.begin(), filters_.end(), filters_.begin(), ::tolower);\r
61         }\r
62         \r
63         std::vector<safe_ptr<AVFrame>> execute(const std::shared_ptr<AVFrame>& frame)\r
64         {\r
65                 if(!frame)\r
66                         return std::vector<safe_ptr<AVFrame>>();\r
67 \r
68                 if(filters_.empty())\r
69                         return boost::assign::list_of(frame);\r
70 \r
71                 push(frame);\r
72                 return poll();\r
73         }\r
74 \r
75         void push(const std::shared_ptr<AVFrame>& frame)\r
76         {               \r
77                 if(!graph_)\r
78                 {\r
79                         graph_.reset(avfilter_graph_alloc(), [](AVFilterGraph* p){avfilter_graph_free(&p);});\r
80                                                                 \r
81                         // Input\r
82                         std::stringstream args;\r
83                         args << frame->width << ":" << frame->height << ":" << frame->format << ":" << 0 << ":" << 0 << ":" << 0 << ":" << 0; // don't care about pts and aspect_ratio\r
84                         THROW_ON_ERROR2(avfilter_graph_create_filter(&buffersrc_ctx_, avfilter_get_by_name("buffer"), "src", args.str().c_str(), NULL, graph_.get()), "[filter]");\r
85 \r
86                         // OPIX_FMT_BGRAutput\r
87                         THROW_ON_ERROR2(avfilter_graph_create_filter(&buffersink_ctx_, avfilter_get_by_name("buffersink"), "out", NULL, pix_fmts, graph_.get()), "[filter]");\r
88                         \r
89                         AVFilterInOut* outputs = avfilter_inout_alloc();\r
90                         AVFilterInOut* inputs  = avfilter_inout_alloc();\r
91                         \r
92                         outputs->name                   = av_strdup("in");\r
93                         outputs->filter_ctx             = buffersrc_ctx_;\r
94                         outputs->pad_idx                = 0;\r
95                         outputs->next                   = NULL;\r
96 \r
97                         inputs->name                    = av_strdup("out");\r
98                         inputs->filter_ctx              = buffersink_ctx_;\r
99                         inputs->pad_idx                 = 0;\r
100                         inputs->next                    = NULL;\r
101                         \r
102                         THROW_ON_ERROR2(avfilter_graph_parse(graph_.get(), filters_.c_str(), &inputs, &outputs, NULL), "[filter]");\r
103                         \r
104                         avfilter_inout_free(&inputs);\r
105                         avfilter_inout_free(&outputs);\r
106 \r
107                         THROW_ON_ERROR2(avfilter_graph_config(graph_.get(), NULL), "[filter]");                 \r
108 \r
109                         for(size_t n = 0; n < graph_->filter_count; ++n)\r
110                         {\r
111                                 auto filter_name = graph_->filters[n]->name;\r
112                                 if(strstr(filter_name, "yadif") != 0)\r
113                                         parallel_yadif_ctx_ = make_parallel_yadif(graph_->filters[n]);\r
114                         }\r
115                 }\r
116         \r
117                 THROW_ON_ERROR2(av_vsrc_buffer_add_frame(buffersrc_ctx_, frame.get(), 0), "[filter]");\r
118         }\r
119 \r
120         std::vector<safe_ptr<AVFrame>> poll()\r
121         {\r
122                 std::vector<safe_ptr<AVFrame>> result;\r
123 \r
124                 if(!graph_)\r
125                         return result;\r
126                 \r
127                 while (avfilter_poll_frame(buffersink_ctx_->inputs[0])) \r
128                 {\r
129                         AVFilterBufferRef *picref;\r
130                         THROW_ON_ERROR2(av_vsink_buffer_get_video_buffer_ref(buffersink_ctx_, &picref, 0), "[filter]");\r
131 \r
132             if (picref) \r
133                         {               \r
134                                 safe_ptr<AVFrame> frame(avcodec_alloc_frame(), [=](AVFrame* p)\r
135                                 {\r
136                                         av_free(p);\r
137                                         avfilter_unref_buffer(picref);\r
138                                 });\r
139 \r
140                                 avcodec_get_frame_defaults(frame.get());        \r
141 \r
142                                 memcpy(frame->data,     picref->data,     sizeof(frame->data));\r
143                                 memcpy(frame->linesize, picref->linesize, sizeof(frame->linesize));\r
144                                 frame->format                           = picref->format;\r
145                                 frame->width                            = picref->video->w;\r
146                                 frame->height                           = picref->video->h;\r
147                                 frame->pkt_pos                          = picref->pos;\r
148                                 frame->interlaced_frame         = picref->video->interlaced;\r
149                                 frame->top_field_first          = picref->video->top_field_first;\r
150                                 frame->key_frame                        = picref->video->key_frame;\r
151                                 frame->pict_type                        = picref->video->pict_type;\r
152                                 frame->sample_aspect_ratio      = picref->video->sample_aspect_ratio;\r
153 \r
154                                 result.push_back(frame);\r
155             }\r
156         }\r
157 \r
158                 return result;\r
159         }\r
160 };\r
161 \r
162 filter::filter(const std::wstring& filters) : impl_(new implementation(filters)){}\r
163 filter::filter(filter&& other) : impl_(std::move(other.impl_)){}\r
164 filter& filter::operator=(filter&& other){impl_ = std::move(other.impl_); return *this;}\r
165 std::vector<safe_ptr<AVFrame>> filter::execute(const std::shared_ptr<AVFrame>& frame) {return impl_->execute(frame);}\r
166 }