\r
namespace caspar {\r
\r
-PixelFormat pix_fmts[] = \r
-{\r
- PIX_FMT_YUV420P,\r
- PIX_FMT_YUVA420P,\r
- PIX_FMT_YUV422P,\r
- PIX_FMT_YUV444P,\r
- PIX_FMT_YUV411P,\r
- PIX_FMT_ARGB, \r
- PIX_FMT_RGBA,\r
- PIX_FMT_ABGR,\r
- PIX_FMT_GRAY8,\r
- PIX_FMT_NONE\r
-}; \r
-\r
struct filter::implementation\r
{\r
std::string filters_;\r
AVFilterContext* buffersink_ctx_;\r
AVFilterContext* buffersrc_ctx_;\r
std::shared_ptr<void> parallel_yadif_ctx_;\r
+ std::vector<PixelFormat> pix_fmts_;\r
\r
- implementation(const std::wstring& filters) \r
+ implementation(const std::wstring& filters, const std::vector<PixelFormat>& pix_fmts) \r
: filters_(narrow(filters))\r
, parallel_yadif_ctx_(nullptr)\r
+ , pix_fmts_(pix_fmts)\r
{\r
+ if(pix_fmts_.empty())\r
+ {\r
+ pix_fmts_.push_back(PIX_FMT_YUV420P);\r
+ pix_fmts_.push_back(PIX_FMT_YUVA420P);\r
+ pix_fmts_.push_back(PIX_FMT_YUV422P);\r
+ pix_fmts_.push_back(PIX_FMT_YUV444P);\r
+ pix_fmts_.push_back(PIX_FMT_YUV411P);\r
+ pix_fmts_.push_back(PIX_FMT_ARGB);\r
+ pix_fmts_.push_back(PIX_FMT_RGBA);\r
+ pix_fmts_.push_back(PIX_FMT_ABGR);\r
+ pix_fmts_.push_back(PIX_FMT_GRAY8);\r
+ pix_fmts_.push_back(PIX_FMT_NONE);\r
+ }\r
+ else\r
+ pix_fmts_.push_back(PIX_FMT_NONE);\r
+\r
std::transform(filters_.begin(), filters_.end(), filters_.begin(), ::tolower);\r
}\r
\r
THROW_ON_ERROR2(avfilter_graph_create_filter(&buffersrc_ctx_, avfilter_get_by_name("buffer"), "src", args.str().c_str(), NULL, graph_.get()), "[filter]");\r
\r
// OPIX_FMT_BGRAutput\r
- THROW_ON_ERROR2(avfilter_graph_create_filter(&buffersink_ctx_, avfilter_get_by_name("buffersink"), "out", NULL, pix_fmts, graph_.get()), "[filter]");\r
+ THROW_ON_ERROR2(avfilter_graph_create_filter(&buffersink_ctx_, avfilter_get_by_name("buffersink"), "out", NULL, pix_fmts_.data(), graph_.get()), "[filter]");\r
\r
AVFilterInOut* outputs = avfilter_inout_alloc();\r
AVFilterInOut* inputs = avfilter_inout_alloc();\r
}\r
};\r
\r
-filter::filter(const std::wstring& filters) : impl_(new implementation(filters)){}\r
+filter::filter(const std::wstring& filters, const std::vector<PixelFormat>& pix_fmts) : impl_(new implementation(filters, pix_fmts)){}\r
filter::filter(filter&& other) : impl_(std::move(other.impl_)){}\r
filter& filter::operator=(filter&& other){impl_ = std::move(other.impl_); return *this;}\r
std::vector<safe_ptr<AVFrame>> filter::execute(const std::shared_ptr<AVFrame>& frame) {return impl_->execute(frame);}\r
#include <common/utility/timer.h>\r
#include <common/utility/string.h>\r
\r
+#include <ffmpeg/producer/filter/filter.h>\r
+\r
#include <core/video_format.h>\r
#include <core/mixer/read_frame.h>\r
#include <core/consumer/frame_consumer.h>\r
#include <tbb/atomic.h>\r
#include <tbb/concurrent_queue.h>\r
\r
+#include <boost/assign.hpp>\r
+\r
#include <algorithm>\r
#include <vector>\r
\r
+#if defined(_MSC_VER)\r
+#pragma warning (push)\r
+#pragma warning (disable : 4244)\r
+#endif\r
+extern "C" \r
+{\r
+ #define __STDC_CONSTANT_MACROS\r
+ #define __STDC_LIMIT_MACROS\r
+ #include <libavcodec/avcodec.h>\r
+ #include <libavutil/imgutils.h>\r
+}\r
+#if defined(_MSC_VER)\r
+#pragma warning (pop)\r
+#endif\r
+\r
namespace caspar {\r
\r
enum stretch\r
boost::thread thread_;\r
tbb::atomic<bool> is_running_;\r
\r
+ \r
+ filter filter_;\r
+\r
const bool key_only_;\r
public:\r
ogl_consumer(unsigned int screen_index, stretch stretch, bool windowed, const core::video_format_desc& format_desc, bool key_only) \r
, graph_(diagnostics::create_graph(narrow(print())))\r
, input_buffer_(core::consumer_buffer_depth()-1)\r
, key_only_(key_only)\r
+ , filter_(format_desc.field_mode == core::field_mode::progressive ? L"" : L"YADIF=0:-1", boost::assign::list_of(PIX_FMT_BGRA))\r
{ \r
+ if(format_desc.field_mode != core::field_mode::progressive)\r
+ CASPAR_LOG(info) << print() << L" Deinterlacer enabled.";\r
+\r
frame_buffer_.set_capacity(2);\r
\r
graph_->add_guide("frame-time", 0.5);\r
return format_desc_;\r
}\r
\r
+ safe_ptr<AVFrame> get_av_frame()\r
+ { \r
+ safe_ptr<AVFrame> av_frame(avcodec_alloc_frame(), av_free); \r
+ avcodec_get_frame_defaults(av_frame.get());\r
+ \r
+ av_frame->linesize[0] = format_desc_.width*4; \r
+ av_frame->format = PIX_FMT_BGRA;\r
+ av_frame->width = format_desc_.width;\r
+ av_frame->height = format_desc_.height;\r
+ av_frame->interlaced_frame = format_desc_.field_mode != core::field_mode::progressive;\r
+ av_frame->top_field_first = format_desc_.field_mode == core::field_mode::upper ? 1 : 0;\r
+\r
+ return av_frame;\r
+ }\r
+\r
void render(const safe_ptr<core::read_frame>& frame)\r
{ \r
if(frame->image_data().empty())\r
return;\r
+ \r
+ auto av_frame = get_av_frame();\r
+ av_frame->data[0] = const_cast<uint8_t*>(frame->image_data().begin());\r
+\r
+ auto frames = filter_.execute(av_frame);\r
+\r
+ if(frames.empty())\r
+ return;\r
+\r
+ av_frame = frames[0];\r
+\r
+ if(av_frame->linesize[0] != static_cast<int>(format_desc_.width*4))\r
+ {\r
+ const uint8_t *src_data[4] = {0};\r
+ memcpy(const_cast<uint8_t**>(&src_data[0]), av_frame->data, 4);\r
+ const int src_linesizes[4] = {0};\r
+ memcpy(const_cast<int*>(&src_linesizes[0]), av_frame->linesize, 4);\r
+\r
+ auto av_frame2 = get_av_frame();\r
+ av_image_alloc(av_frame2->data, av_frame2->linesize, av_frame2->width, av_frame2->height, PIX_FMT_BGRA, 16);\r
+ av_image_copy(av_frame2->data, av_frame2->linesize, src_data, src_linesizes, PIX_FMT_BGRA, av_frame2->width, av_frame2->height);\r
+ av_frame = safe_ptr<AVFrame>(av_frame2.get(), [=](AVFrame*)\r
+ {\r
+ av_freep(&av_frame2->data[0]);\r
+ });\r
+ }\r
\r
glBindTexture(GL_TEXTURE_2D, texture_);\r
\r
if(ptr)\r
{\r
if(key_only_)\r
- fast_memshfl(reinterpret_cast<char*>(ptr), frame->image_data().begin(), frame->image_data().size(), 0x0F0F0F0F, 0x0B0B0B0B, 0x07070707, 0x03030303);\r
+ fast_memshfl(reinterpret_cast<char*>(ptr), av_frame->data[0], frame->image_data().size(), 0x0F0F0F0F, 0x0B0B0B0B, 0x07070707, 0x03030303);\r
else\r
- fast_memcpy(reinterpret_cast<char*>(ptr), frame->image_data().begin(), frame->image_data().size());\r
+ fast_memcpy(reinterpret_cast<char*>(ptr), av_frame->data[0], frame->image_data().size());\r
\r
glUnmapBuffer(GL_PIXEL_UNPACK_BUFFER); // release the mapped buffer\r
}\r
<IntDir Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">$(ProjectDir)tmp\$(Configuration)\</IntDir>\r
<IntDir Condition="'$(Configuration)|$(Platform)'=='Profile|Win32'">$(ProjectDir)tmp\$(Configuration)\</IntDir>\r
<IntDir Condition="'$(Configuration)|$(Platform)'=='Develop|Win32'">$(ProjectDir)tmp\$(Configuration)\</IntDir>\r
- <IncludePath Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">..\..\;..\..\dependencies\boost_1_44_0\;..\..\dependencies\glew-1.6.0\include;..\..\dependencies\tbb\include\;..\..\dependencies\SFML-1.6\include;$(IncludePath)</IncludePath>\r
+ <IncludePath Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">..\..\;..\..\dependencies\boost_1_44_0\;..\..\dependencies\glew-1.6.0\include;..\..\dependencies\tbb\include\;..\..\dependencies\SFML-1.6\include;..\..\dependencies\ffmpeg 0.8\include;$(IncludePath)</IncludePath>\r
<IncludePath Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">..\..\;..\..\dependencies\boost_1_44_0\;..\..\dependencies\glew-1.6.0\include;..\..\dependencies\tbb\include\;..\..\dependencies\SFML-1.6\include;$(IncludePath)</IncludePath>\r
<IncludePath Condition="'$(Configuration)|$(Platform)'=='Profile|Win32'">..\..\;..\..\dependencies\boost_1_44_0\;..\..\dependencies\glew-1.6.0\include;..\..\dependencies\tbb\include\;..\..\dependencies\SFML-1.6\include;$(IncludePath)</IncludePath>\r
<IncludePath Condition="'$(Configuration)|$(Platform)'=='Develop|Win32'">..\..\;..\..\dependencies\boost_1_44_0\;..\..\dependencies\glew-1.6.0\include;..\..\dependencies\tbb\include\;..\..\dependencies\SFML-1.6\include;$(IncludePath)</IncludePath>\r
<ProjectReference Include="..\..\core\core.vcxproj">\r
<Project>{79388c20-6499-4bf6-b8b9-d8c33d7d4ddd}</Project>\r
</ProjectReference>\r
+ <ProjectReference Include="..\ffmpeg\ffmpeg.vcxproj">\r
+ <Project>{f6223af3-be0b-4b61-8406-98922ce521c2}</Project>\r
+ </ProjectReference>\r
</ItemGroup>\r
<ItemGroup>\r
<ClCompile Include="consumer\ogl_consumer.cpp">\r