]> git.sesse.net Git - casparcg/commitdiff
2.0.2: ffmpeg/filter: Added alpha support for yadif filter and (RGB)A formats.
authorronag <ronag@362d55ac-95cf-4e76-9f9a-cbaa9c17b72d>
Sun, 4 Dec 2011 15:18:30 +0000 (15:18 +0000)
committerronag <ronag@362d55ac-95cf-4e76-9f9a-cbaa9c17b72d>
Sun, 4 Dec 2011 15:18:30 +0000 (15:18 +0000)
git-svn-id: https://casparcg.svn.sourceforge.net/svnroot/casparcg/server/branches/2.0.2@1783 362d55ac-95cf-4e76-9f9a-cbaa9c17b72d

modules/ffmpeg/StdAfx.h
modules/ffmpeg/producer/filter/filter.cpp
modules/ffmpeg/producer/input/input.cpp

index e723026cd94936cc0f881a01d74f59fc864d43df..f7e56ad990a43d50d21b4d23b502bd363f4eafc6 100644 (file)
 #include <tbb/parallel_for.h>\r
 \r
 #include <boost/assign.hpp>\r
+#include <boost/algorithm/string.hpp>\r
 #include <boost/filesystem.hpp>\r
 #include <boost/foreach.hpp>\r
 #include <boost/range/algorithm.hpp>\r
 #include <boost/property_tree/ptree.hpp>\r
+#include <boost/assign.hpp>\r
 \r
 #include <common/utility/string.h>\r
 #include <common/utility/assert.h>\r
index a6e8e2aad434f07cc32f7163eabd862ef418383e..e9c998e856e3c0d382a35a32290f58cce66dc1bd 100644 (file)
 #include <common/exception/exceptions.h>\r
 \r
 #include <boost/assign.hpp>\r
+#include <boost/range/iterator_range.hpp>\r
+#include <boost/range/adaptors.hpp>\r
+#include <boost/assign.hpp>\r
+#include <boost/algorithm/string.hpp>\r
+#include <boost/foreach.hpp>\r
 \r
 #include <cstdio>\r
 #include <sstream>\r
@@ -52,6 +57,7 @@ extern "C"
 #pragma warning (pop)\r
 #endif\r
 \r
+\r
 namespace caspar { namespace ffmpeg {\r
 \r
 static int query_formats_444(AVFilterContext *ctx)\r
@@ -75,6 +81,13 @@ static int query_formats_420(AVFilterContext *ctx)
     return 0;\r
 }\r
 \r
+static int query_formats_420a(AVFilterContext *ctx)\r
+{\r
+    static const int pix_fmts[] = {PIX_FMT_YUVA420P, PIX_FMT_NONE};\r
+    avfilter_set_common_pixel_formats(ctx, avfilter_make_format_list(pix_fmts));\r
+    return 0;\r
+}\r
+\r
 static int query_formats_411(AVFilterContext *ctx)\r
 {\r
     static const int pix_fmts[] = {PIX_FMT_YUV411P, PIX_FMT_NONE};\r
@@ -84,7 +97,7 @@ static int query_formats_411(AVFilterContext *ctx)
 \r
 struct filter::implementation\r
 {\r
-       std::string                                             filters_;\r
+       std::wstring                                    filters_;\r
        std::shared_ptr<AVFilterGraph>  graph_; \r
        AVFilterContext*                                buffersink_ctx_;\r
        AVFilterContext*                                buffersrc_ctx_;\r
@@ -93,28 +106,26 @@ struct filter::implementation
        std::queue<safe_ptr<AVFrame>>   bypass_;\r
                \r
        implementation(const std::wstring& filters, const std::vector<PixelFormat>& pix_fmts) \r
-               : filters_(narrow(filters))\r
+               : filters_(filters)\r
                , parallel_yadif_ctx_(nullptr)\r
                , pix_fmts_(pix_fmts)\r
        {\r
                if(pix_fmts_.empty())\r
                {\r
-                       pix_fmts_.push_back(PIX_FMT_YUVA420P);\r
-                       pix_fmts_.push_back(PIX_FMT_YUV444P);\r
-                       pix_fmts_.push_back(PIX_FMT_YUV422P);\r
-                       pix_fmts_.push_back(PIX_FMT_YUV420P);\r
-                       pix_fmts_.push_back(PIX_FMT_YUV411P);\r
-                       pix_fmts_.push_back(PIX_FMT_BGRA);\r
-                       pix_fmts_.push_back(PIX_FMT_ARGB);\r
-                       pix_fmts_.push_back(PIX_FMT_RGBA);\r
-                       pix_fmts_.push_back(PIX_FMT_ABGR);\r
-                       pix_fmts_.push_back(PIX_FMT_GRAY8);\r
-                       pix_fmts_.push_back(PIX_FMT_NONE);\r
+                       pix_fmts_ = boost::assign::list_of\r
+                               (PIX_FMT_YUVA420P)\r
+                               (PIX_FMT_YUV444P)\r
+                               (PIX_FMT_YUV422P)\r
+                               (PIX_FMT_YUV420P)\r
+                               (PIX_FMT_YUV411P)\r
+                               (PIX_FMT_BGRA)\r
+                               (PIX_FMT_ARGB)\r
+                               (PIX_FMT_RGBA)\r
+                               (PIX_FMT_ABGR)\r
+                               (PIX_FMT_GRAY8);\r
                }\r
-               else\r
-                       pix_fmts_.push_back(PIX_FMT_NONE);\r
-\r
-               std::transform(filters_.begin(), filters_.end(), filters_.begin(), ::tolower);\r
+               \r
+               pix_fmts_.push_back(PIX_FMT_NONE);\r
        }\r
        \r
        void push(const std::shared_ptr<AVFrame>& frame)\r
@@ -151,52 +162,54 @@ struct filter::implementation
                                        buffersink_params->pixel_fmts = pix_fmts_.data();\r
                                        THROW_ON_ERROR2(avfilter_graph_create_filter(&buffersink_ctx_, avfilter_get_by_name("buffersink"), "out", NULL, buffersink_params.get(), graph_.get()), "[filter]");\r
 #endif\r
-                                       AVFilterInOut* outputs = avfilter_inout_alloc();\r
                                        AVFilterInOut* inputs  = avfilter_inout_alloc();\r
-                       \r
+                                       AVFilterInOut* outputs = avfilter_inout_alloc();\r
+                                                               \r
                                        outputs->name                   = av_strdup("in");\r
                                        outputs->filter_ctx             = buffersrc_ctx_;\r
                                        outputs->pad_idx                = 0;\r
-                                       outputs->next                   = NULL;\r
+                                       outputs->next                   = nullptr;\r
 \r
                                        inputs->name                    = av_strdup("out");\r
                                        inputs->filter_ctx              = buffersink_ctx_;\r
                                        inputs->pad_idx                 = 0;\r
-                                       inputs->next                    = NULL;\r
+                                       inputs->next                    = nullptr;\r
                        \r
-                                       THROW_ON_ERROR2(avfilter_graph_parse(graph_.get(), filters_.c_str(), &inputs, &outputs, NULL), "[filter]");\r
+                                       std::string filters = boost::to_lower_copy(narrow(filters_));\r
+                                       THROW_ON_ERROR2(avfilter_graph_parse(graph_.get(), filters.c_str(), &inputs, &outputs, NULL), "[filter]");\r
                        \r
-                                       for(size_t n = 0; n < graph_->filter_count; ++n)\r
-                                       {\r
-                                               auto filter_name = graph_->filters[n]->name;\r
-                                               if(strstr(filter_name, "yadif") != 0)\r
-                                               {\r
-                                                       if(frame->format == PIX_FMT_UYVY422)\r
-                                                               graph_->filters[n]->filter->query_formats = query_formats_422;\r
-                                                       if(frame->format == PIX_FMT_YUYV422)\r
-                                                               graph_->filters[n]->filter->query_formats = query_formats_422;\r
-                                                       if(frame->format == PIX_FMT_UYYVYY411)\r
-                                                               graph_->filters[n]->filter->query_formats = query_formats_411;\r
-                                                       else if(frame->format == PIX_FMT_YUV420P10)\r
-                                                               graph_->filters[n]->filter->query_formats = query_formats_420;\r
-                                                       else if(frame->format == PIX_FMT_YUV422P10)\r
-                                                               graph_->filters[n]->filter->query_formats = query_formats_422;\r
-                                                       else if(frame->format == PIX_FMT_YUV444P10)\r
-                                                               graph_->filters[n]->filter->query_formats = query_formats_444;\r
-                                               }\r
-                                       }\r
-\r
-                                       avfilter_inout_free(&inputs);\r
-                                       avfilter_inout_free(&outputs);\r
-\r
-                                       THROW_ON_ERROR2(avfilter_graph_config(graph_.get(), NULL), "[filter]");                 \r
+                                       auto yadif_filter = boost::adaptors::filtered([&](AVFilterContext* p){return strstr(p->name, "yadif") != 0;});\r
 \r
-                                       for(size_t n = 0; n < graph_->filter_count; ++n)\r
+                                       BOOST_FOREACH(auto filter_ctx, boost::make_iterator_range(graph_->filters, graph_->filters + graph_->filter_count) | yadif_filter)\r
                                        {\r
-                                               auto filter_name = graph_->filters[n]->name;\r
-                                               if(strstr(filter_name, "yadif") != 0)                                           \r
-                                                       parallel_yadif_ctx_ = make_parallel_yadif(graph_->filters[n]);                                          \r
+                                               filter_ctx->filter->query_formats = [&]() -> int (*)(AVFilterContext*)\r
+                                               {\r
+                                                       switch(frame->format)\r
+                                                       {\r
+                                                       case PIX_FMT_UYVY422:   return query_formats_422;\r
+                                                       case PIX_FMT_YUYV422:   return query_formats_422;\r
+                                                       case PIX_FMT_UYYVYY411: return query_formats_411;\r
+                                                       case PIX_FMT_YUV420P10: return query_formats_420;\r
+                                                       case PIX_FMT_YUV422P10: return query_formats_422;\r
+                                                       case PIX_FMT_YUV444P10: return query_formats_444;\r
+                                                       case PIX_FMT_YUV420P9:  return query_formats_420;\r
+                                                       case PIX_FMT_YUV422P9:  return query_formats_422;\r
+                                                       case PIX_FMT_YUV444P9:  return query_formats_444;\r
+                                                       case PIX_FMT_BGR24:             return query_formats_444;\r
+                                                       case PIX_FMT_RGB24:             return query_formats_444;\r
+                                                       case PIX_FMT_BGRA:              return query_formats_420a;\r
+                                                       case PIX_FMT_RGBA:              return query_formats_420a;\r
+                                                       case PIX_FMT_ABGR:              return query_formats_420a;\r
+                                                       case PIX_FMT_ARGB:              return query_formats_420a;\r
+                                                       default:                                return filter_ctx->filter->query_formats;\r
+                                                       }\r
+                                               }();\r
                                        }\r
+                                       \r
+                                       THROW_ON_ERROR2(avfilter_graph_config(graph_.get(), NULL), "[filter]"); \r
+                                       \r
+                                       BOOST_FOREACH(auto filter_ctx, boost::make_iterator_range(graph_->filters, graph_->filters + graph_->filter_count) | yadif_filter)                                              \r
+                                               parallel_yadif_ctx_ = make_parallel_yadif(filter_ctx);                                          \r
                                }\r
                                catch(...)\r
                                {\r
@@ -238,31 +251,33 @@ struct filter::implementation
                                AVFilterBufferRef *picref;\r
                                THROW_ON_ERROR2(av_buffersink_get_buffer_ref(buffersink_ctx_, &picref, 0), "[filter]");\r
 \r
-                               if (picref) \r
-                               {               \r
-                                       safe_ptr<AVFrame> frame(avcodec_alloc_frame(), [=](AVFrame* p)\r
-                                       {\r
-                                               av_free(p);\r
-                                               avfilter_unref_buffer(picref);\r
-                                       });\r
+                               if (!picref) \r
+                                       return nullptr;\r
+                               \r
+                               safe_ptr<AVFrame> frame(avcodec_alloc_frame(), [=](AVFrame* p)\r
+                               {\r
+                                       av_free(p);\r
+                                       avfilter_unref_buffer(picref);\r
+                               });\r
 \r
-                                       avcodec_get_frame_defaults(frame.get());        \r
+                               avcodec_get_frame_defaults(frame.get());        \r
 \r
-                                       memcpy(frame->data,     picref->data,     sizeof(frame->data));\r
-                                       memcpy(frame->linesize, picref->linesize, sizeof(frame->linesize));\r
-                                       frame->format                           = picref->format;\r
-                                       frame->width                            = picref->video->w;\r
-                                       frame->height                           = picref->video->h;\r
-                                       frame->pkt_pos                          = picref->pos;\r
-                                       frame->interlaced_frame         = picref->video->interlaced;\r
-                                       frame->top_field_first          = picref->video->top_field_first;\r
-                                       frame->key_frame                        = picref->video->key_frame;\r
-                                       frame->pict_type                        = picref->video->pict_type;\r
-                                       frame->sample_aspect_ratio      = picref->video->sample_aspect_ratio;\r
+                               memcpy(frame->data,     picref->data,     sizeof(frame->data));\r
+                               memcpy(frame->linesize, picref->linesize, sizeof(frame->linesize));\r
+                               frame->format                           = picref->format;\r
+                               frame->width                            = picref->video->w;\r
+                               frame->height                           = picref->video->h;\r
+                               frame->pkt_pos                          = picref->pos;\r
+                               frame->interlaced_frame         = picref->video->interlaced;\r
+                               frame->top_field_first          = picref->video->top_field_first;\r
+                               frame->key_frame                        = picref->video->key_frame;\r
+                               frame->pict_type                        = picref->video->pict_type;\r
+                               frame->sample_aspect_ratio      = picref->video->sample_aspect_ratio;\r
                                        \r
-                                       return frame;\r
-                               }\r
+                               return frame;                           \r
                        }\r
+\r
+                       return nullptr;\r
                }\r
                catch(ffmpeg_error&)\r
                {\r
@@ -272,8 +287,6 @@ struct filter::implementation
                {\r
                        BOOST_THROW_EXCEPTION(ffmpeg_error() << boost::errinfo_nested_exception(boost::current_exception()));\r
                }\r
-\r
-               return nullptr;\r
        }\r
 };\r
 \r
@@ -282,7 +295,7 @@ filter::filter(filter&& other) : impl_(std::move(other.impl_)){}
 filter& filter::operator=(filter&& other){impl_ = std::move(other.impl_); return *this;}\r
 void filter::push(const std::shared_ptr<AVFrame>& frame){impl_->push(frame);}\r
 std::shared_ptr<AVFrame> filter::poll(){return impl_->poll();}\r
-std::wstring filter::filter_str() const{return widen(impl_->filters_);}\r
+std::wstring filter::filter_str() const{return impl_->filters_;}\r
 std::vector<safe_ptr<AVFrame>> filter::poll_all()\r
 {      \r
        std::vector<safe_ptr<AVFrame>> frames;\r
index f45dd67edc113ff3f4e03e56be72a09a56885dbc..093bc7ac401a82d89f4d9f5df482ebf0f2a9fce4 100644 (file)
@@ -178,7 +178,7 @@ struct input::implementation : boost::noncopyable
                        {\r
                                do_seek(start_);\r
                                graph_->add_tag("seek");                \r
-                               CASPAR_LOG(debug) << print() << " Looping.";                    \r
+                               CASPAR_LOG(trace) << print() << " Looping.";                    \r
                        }                                       \r
                }\r
                else\r
@@ -264,7 +264,7 @@ struct input::implementation : boost::noncopyable
                if(ret == AVERROR(EIO))\r
                        CASPAR_LOG(trace) << print() << " Received EIO, assuming EOF. ";\r
                if(ret == AVERROR_EOF)\r
-                       CASPAR_LOG(debug) << print() << " Received EOF. ";\r
+                       CASPAR_LOG(trace) << print() << " Received EOF. ";\r
 \r
                return ret == AVERROR_EOF || ret == AVERROR(EIO) || frame_number_ >= length_; // av_read_frame doesn't always correctly return AVERROR_EOF;\r
        }\r