]> git.sesse.net Git - casparcg/blob - modules/ffmpeg/producer/filter/filter.cpp
d2c3bd41fee7a994e9c7b7f65167ed6bf1aa61a4
[casparcg] / modules / ffmpeg / producer / filter / filter.cpp
1 #include "../../stdafx.h"\r
2 \r
3 #include "filter.h"\r
4 \r
5 #include "parallel_yadif.h"\r
6 \r
7 #include "../../ffmpeg_error.h"\r
8 \r
9 #include <boost/assign.hpp>\r
10 \r
11 #include <cstdio>\r
12 #include <sstream>\r
13 \r
14 #if defined(_MSC_VER)\r
15 #pragma warning (push)\r
16 #pragma warning (disable : 4244)\r
17 #endif\r
18 extern "C" \r
19 {\r
20         #include <libavutil/avutil.h>\r
21         #include <libavutil/imgutils.h>\r
22         #include <libavfilter/avfilter.h>\r
23         #include <libavfilter/avcodec.h>\r
24         #include <libavfilter/avfiltergraph.h>\r
25         #include <libavfilter/vsink_buffer.h>\r
26         #include <libavfilter/vsrc_buffer.h>\r
27 }\r
28 #if defined(_MSC_VER)\r
29 #pragma warning (pop)\r
30 #endif\r
31 \r
32 namespace caspar {\r
33         \r
34 PixelFormat pix_fmts[] = \r
35 {\r
36         PIX_FMT_YUV420P,\r
37         PIX_FMT_YUVA420P,\r
38         PIX_FMT_YUV422P,\r
39         PIX_FMT_YUV444P,\r
40         PIX_FMT_YUV411P,\r
41         PIX_FMT_ARGB, \r
42         PIX_FMT_RGBA,\r
43         PIX_FMT_ABGR,\r
44         PIX_FMT_GRAY8,\r
45         PIX_FMT_NONE\r
46 };      \r
47 \r
48 struct filter::implementation\r
49 {\r
50         std::string                                             filters_;\r
51         std::shared_ptr<AVFilterGraph>  graph_; \r
52         AVFilterContext*                                buffersink_ctx_;\r
53         AVFilterContext*                                buffersrc_ctx_;\r
54         std::shared_ptr<void>                   parallel_yadif_ctx_;\r
55                 \r
56         implementation(const std::wstring& filters) \r
57                 : filters_(narrow(filters))\r
58                 , parallel_yadif_ctx_(nullptr)\r
59         {\r
60                 std::transform(filters_.begin(), filters_.end(), filters_.begin(), ::tolower);\r
61         }\r
62         \r
63         std::vector<safe_ptr<AVFrame>> execute(const std::shared_ptr<AVFrame>& frame)\r
64         {\r
65                 if(!frame)\r
66                         return std::vector<safe_ptr<AVFrame>>();\r
67 \r
68                 if(filters_.empty())\r
69                         return boost::assign::list_of(frame);\r
70 \r
71                 push(frame);\r
72                 return poll();\r
73         }\r
74 \r
75         void push(const std::shared_ptr<AVFrame>& frame)\r
76         {               \r
77 \r
78                 if(!graph_)\r
79                 {\r
80                         graph_.reset(avfilter_graph_alloc(), [](AVFilterGraph* p){avfilter_graph_free(&p);});\r
81                                                                 \r
82                         // Input\r
83                         std::stringstream args;\r
84                         args << frame->width << ":" << frame->height << ":" << frame->format << ":" << 0 << ":" << 0 << ":" << 0 << ":" << 0; // don't care about pts and aspect_ratio\r
85                         THROW_ON_ERROR2(avfilter_graph_create_filter(&buffersrc_ctx_, avfilter_get_by_name("buffer"), "src", args.str().c_str(), NULL, graph_.get()), "[filter]");\r
86 \r
87                         // OPIX_FMT_BGRAutput\r
88                         THROW_ON_ERROR2(avfilter_graph_create_filter(&buffersink_ctx_, avfilter_get_by_name("buffersink"), "out", NULL, pix_fmts, graph_.get()), "[filter]");\r
89                         \r
90                         AVFilterInOut* outputs = avfilter_inout_alloc();\r
91                         AVFilterInOut* inputs  = avfilter_inout_alloc();\r
92                         \r
93                         outputs->name                   = av_strdup("in");\r
94                         outputs->filter_ctx             = buffersrc_ctx_;\r
95                         outputs->pad_idx                = 0;\r
96                         outputs->next                   = NULL;\r
97 \r
98                         inputs->name                    = av_strdup("out");\r
99                         inputs->filter_ctx              = buffersink_ctx_;\r
100                         inputs->pad_idx                 = 0;\r
101                         inputs->next                    = NULL;\r
102                         \r
103                         THROW_ON_ERROR2(avfilter_graph_parse(graph_.get(), filters_.c_str(), &inputs, &outputs, NULL), "[filter]");\r
104                         \r
105                         avfilter_inout_free(&inputs);\r
106                         avfilter_inout_free(&outputs);\r
107 \r
108                         THROW_ON_ERROR2(avfilter_graph_config(graph_.get(), NULL), "[filter]");                 \r
109 \r
110                         for(size_t n = 0; n < graph_->filter_count; ++n)\r
111                         {\r
112                                 auto filter_name = graph_->filters[n]->name;\r
113                                 if(strstr(filter_name, "yadif") != 0)\r
114                                         parallel_yadif_ctx_ = make_parallel_yadif(graph_->filters[n]);\r
115                         }\r
116                 }\r
117         \r
118                 THROW_ON_ERROR2(av_vsrc_buffer_add_frame(buffersrc_ctx_, frame.get(), 0), "[filter]");\r
119         }\r
120 \r
121         std::vector<safe_ptr<AVFrame>> poll()\r
122         {\r
123                 std::vector<safe_ptr<AVFrame>> result;\r
124 \r
125                 if(!graph_)\r
126                         return result;\r
127                 \r
128                 while (avfilter_poll_frame(buffersink_ctx_->inputs[0])) \r
129                 {\r
130                         AVFilterBufferRef *picref;\r
131                         THROW_ON_ERROR2(av_vsink_buffer_get_video_buffer_ref(buffersink_ctx_, &picref, 0), "[filter]");\r
132 \r
133             if (picref) \r
134                         {               \r
135                                 safe_ptr<AVFrame> frame(avcodec_alloc_frame(), [=](AVFrame* p)\r
136                                 {\r
137                                         av_free(p);\r
138                                         avfilter_unref_buffer(picref);\r
139                                 });\r
140 \r
141                                 avcodec_get_frame_defaults(frame.get());        \r
142 \r
143                                 memcpy(frame->data,     picref->data,     sizeof(frame->data));\r
144                                 memcpy(frame->linesize, picref->linesize, sizeof(frame->linesize));\r
145                                 frame->format                           = picref->format;\r
146                                 frame->width                            = picref->video->w;\r
147                                 frame->height                           = picref->video->h;\r
148                                 frame->pkt_pos                          = picref->pos;\r
149                                 frame->interlaced_frame         = picref->video->interlaced;\r
150                                 frame->top_field_first          = picref->video->top_field_first;\r
151                                 frame->key_frame                        = picref->video->key_frame;\r
152                                 frame->pict_type                        = picref->video->pict_type;\r
153                                 frame->sample_aspect_ratio      = picref->video->sample_aspect_ratio;\r
154 \r
155                                 result.push_back(frame);\r
156             }\r
157         }\r
158 \r
159                 return result;\r
160         }\r
161 };\r
162 \r
163 filter::filter(const std::wstring& filters) : impl_(new implementation(filters)){}\r
164 filter::filter(filter&& other) : impl_(std::move(other.impl_)){}\r
165 filter& filter::operator=(filter&& other){impl_ = std::move(other.impl_); return *this;}\r
166 std::vector<safe_ptr<AVFrame>> filter::execute(const std::shared_ptr<AVFrame>& frame) {return impl_->execute(frame);}\r
167 }