struct graph::implementation : public drawable\r
{\r
std::map<std::string, diagnostics::line> lines_;\r
- const printer parent_printer_;\r
std::string name_;\r
-\r
- int counter_;\r
-\r
+ std::string text_;\r
+ \r
implementation(const std::string& name) \r
: name_(name)\r
- , counter_(0){}\r
-\r
- implementation(const printer& parent_printer) \r
- : parent_printer_(parent_printer)\r
- , name_("")\r
- , counter_(0){}\r
+ , text_(name_){}\r
+ \r
+ void update_text(const std::string& value)\r
+ {\r
+ text_ = value;\r
+ }\r
\r
void update(const std::string& name, double value)\r
{\r
private:\r
void render(sf::RenderTarget& target)\r
{\r
- if(counter_++ > 25) // Don't update name too often since print can be implemented with locks.\r
- {\r
- counter_ = 0;\r
- if(parent_printer_)\r
- name_ = narrow(parent_printer_());\r
- }\r
const size_t text_size = 15;\r
const size_t text_margin = 2;\r
const size_t text_offset = (text_size+text_margin*2)*2;\r
\r
- sf::String text(name_.c_str(), sf::Font::GetDefaultFont(), text_size);\r
+ sf::String text(text_.c_str(), sf::Font::GetDefaultFont(), text_size);\r
text.SetStyle(sf::String::Italic);\r
text.Move(text_margin, text_margin);\r
\r
graph::start();\r
}\r
\r
-graph::graph(const printer& parent_printer, bool start) : impl_(env::properties().get("configuration.diagnostics.graphs", true) ? new implementation(parent_printer) : nullptr)\r
-{\r
- if(start)\r
- graph::start();\r
-}\r
-\r
void graph::start()\r
{ \r
if(impl_)\r
context::register_drawable(impl_);\r
}\r
\r
+void graph::update_text(const std::string& value)\r
+{\r
+ if(impl_)\r
+ { \r
+ auto p = impl_;\r
+ context::begin_invoke([=]\r
+ { \r
+ p->update_text(value);\r
+ });\r
+ }\r
+}\r
+\r
void graph::update_value(const std::string& name, double value)\r
{\r
if(impl_)\r
{\r
return safe_ptr<graph>(new graph(name, start));\r
}\r
-safe_ptr<graph> create_graph(const printer& parent_printer, bool start)\r
-{\r
- return safe_ptr<graph>(new graph(parent_printer, start));\r
-}\r
-\r
\r
//namespace v2\r
//{ \r
class graph\r
{\r
friend safe_ptr<graph> create_graph(const std::string& name, bool start);\r
- friend safe_ptr<graph> create_graph(const printer& parent_printer, bool start);\r
graph(const std::string& name, bool start = true);\r
graph(const printer& parent_printer, bool start = true);\r
public:\r
void start();\r
+ void update_text(const std::string& value);\r
void update_value(const std::string& name, double value);\r
void set_value(const std::string& name, double value);\r
void set_color(const std::string& name, color c);\r
};\r
\r
safe_ptr<graph> create_graph(const std::string& name, bool start = true);\r
-safe_ptr<graph> create_graph(const printer& parent_printer, bool start = true);\r
\r
//namespace v2\r
//{\r
\r
#include <cstring>\r
\r
+#include <ppl.h>\r
+\r
namespace caspar {\r
\r
+namespace internal {\r
+\r
static void* fast_memclr(void* dest, size_t count)\r
{\r
- if(count < 2048)\r
- return memset(dest, 0, count);\r
-\r
assert(dest != nullptr);\r
\r
- size_t rest = count % 128;\r
+ size_t rest = count % 2048;\r
count -= rest;\r
\r
- __asm \r
- { \r
- mov edi, dest; \r
- mov ebx, count; \r
- shr ebx, 7;\r
- pxor xmm0, xmm0; \r
-\r
- clr: \r
- movntdq [edi+00h], xmm0;\r
- movntdq [edi+10h], xmm0;\r
- movntdq [edi+20h], xmm0; \r
- movntdq [edi+30h], xmm0;\r
+ if(count > 0)\r
+ {\r
+ __asm \r
+ { \r
+ mov edi, dest; \r
+ mov ebx, count; \r
+ shr ebx, 7;\r
+ pxor xmm0, xmm0; \r
+\r
+ clr: \r
+ movntdq [edi+00h], xmm0;\r
+ movntdq [edi+10h], xmm0;\r
+ movntdq [edi+20h], xmm0; \r
+ movntdq [edi+30h], xmm0;\r
\r
- movntdq [edi+40h], xmm0; \r
- movntdq [edi+50h], xmm0; \r
- movntdq [edi+60h], xmm0; \r
- movntdq [edi+70h], xmm0; \r
+ movntdq [edi+40h], xmm0; \r
+ movntdq [edi+50h], xmm0; \r
+ movntdq [edi+60h], xmm0; \r
+ movntdq [edi+70h], xmm0; \r
\r
- lea edi, [edi+80h]; \r
+ lea edi, [edi+80h]; \r
+\r
+ dec ebx; \r
+ jnz clr; \r
+ } \r
+ }\r
\r
- dec ebx; \r
- jnz clr; \r
- } \r
return memset(reinterpret_cast<char*>(dest)+count, 0, rest);\r
}\r
\r
+}\r
+\r
+static void* fast_memclr(void* dest, size_t count)\r
+{ \r
+ size_t rest = count % 8192;\r
+ count -= rest;\r
+\r
+ Concurrency::parallel_for<int>(0, count / 8192, [&](size_t n)\r
+ { \r
+ internal::fast_memclr(reinterpret_cast<char*>(dest) + n*8192, 8192); \r
+ });\r
+\r
+ return internal::fast_memclr(reinterpret_cast<char*>(dest)+count, rest);\r
+}\r
+\r
}
\ No newline at end of file
\r
#include <assert.h>\r
\r
-#include <tbb/parallel_for.h>\r
+#include <ppl.h>\r
\r
namespace caspar {\r
\r
\r
static void* fast_memcpy(void* dest, const void* source, size_t count)\r
{ \r
- if(count < 2048)\r
- return memcpy(dest, source, count);\r
-\r
- size_t rest = count % 128;\r
+ size_t rest = count % 2048;\r
count -= rest;\r
\r
- tbb::affinity_partitioner ap;\r
- tbb::parallel_for(tbb::blocked_range<size_t>(0, count/128), [&](const tbb::blocked_range<size_t>& r)\r
+ Concurrency::parallel_for<int>(0, count / 2048, [&](size_t n)\r
{ \r
- internal::fast_memcpy(reinterpret_cast<char*>(dest) + r.begin()*128, reinterpret_cast<const char*>(source) + r.begin()*128, r.size()*128); \r
- }, ap);\r
+ internal::fast_memcpy(reinterpret_cast<char*>(dest) + n*2048, reinterpret_cast<const char*>(source) + n*2048, 2048); \r
+ });\r
\r
return memcpy(reinterpret_cast<char*>(dest)+count, reinterpret_cast<const char*>(source)+count, rest);\r
}\r
\r
#include <assert.h>\r
\r
-#include <tbb/parallel_for.h>\r
+#include <ppl.h>\r
\r
namespace caspar {\r
\r
\r
static void* fast_memshfl(void* dest, const void* source, size_t count, int m1, int m2, int m3, int m4)\r
{ \r
- tbb::affinity_partitioner ap;\r
- tbb::parallel_for(tbb::blocked_range<size_t>(0, count/128), [&](const tbb::blocked_range<size_t>& r)\r
+ size_t rest = count % 2048;\r
+ count -= rest;\r
+\r
+ Concurrency::parallel_for<int>(0, count, 2048, [&](int n)\r
{ \r
- internal::fast_memshfl(reinterpret_cast<char*>(dest) + r.begin()*128, reinterpret_cast<const char*>(source) + r.begin()*128, r.size()*128, m1, m2, m3, m4); \r
- }, ap);\r
+ internal::fast_memshfl(reinterpret_cast<char*>(dest) + n*2048, reinterpret_cast<const char*>(source) + n*2048, 2048, m1, m2, m3, m4); \r
+ });\r
\r
- return dest;\r
+ return internal::fast_memshfl(reinterpret_cast<char*>(dest) + count, reinterpret_cast<const char*>(source) + count, rest, m1, m2, m3, m4); \r
}\r
\r
\r
#include <core/mixer/write_frame.h>\r
#include <core/producer/frame/frame_transform.h>\r
\r
-#include <tbb/parallel_for.h>\r
+#include <ppl.h>\r
\r
#include <safeint.h>\r
\r
\r
std::map<const void*, core::frame_transform> next_frame_transforms;\r
\r
- tbb::affinity_partitioner ap;\r
-\r
BOOST_FOREACH(auto& item, items)\r
{ \r
const auto next = item.transform;\r
const float next_volume = static_cast<float>(next.volume);\r
const float delta = 1.0f/static_cast<float>(format_desc_.audio_samples_per_frame/format_desc_.audio_channels);\r
\r
- tbb::parallel_for\r
- (\r
- tbb::blocked_range<size_t>(0, format_desc_.audio_samples_per_frame/4),\r
- [&](const tbb::blocked_range<size_t>& r)\r
- { \r
- auto alpha_ps = _mm_setr_ps(delta, delta, 0.0f, 0.0f);\r
- auto delta2_ps = _mm_set_ps1(delta*2.0f);\r
- auto prev_ps = _mm_set_ps1(prev_volume);\r
- auto next_ps = _mm_set_ps1(next_volume); \r
-\r
- for(size_t n = r.begin(); n < r.end(); ++n)\r
- { \r
- auto next2_ps = _mm_mul_ps(next_ps, alpha_ps);\r
- auto prev2_ps = _mm_sub_ps(prev_ps, _mm_mul_ps(prev_ps, alpha_ps));\r
- auto volume_ps = _mm_add_ps(next2_ps, prev2_ps);\r
-\r
- auto sample_ps = _mm_cvtepi32_ps(_mm_load_si128(reinterpret_cast<__m128i*>(&item.audio_data[n*4])));\r
- auto res_sample_ps = _mm_load_ps(&intermediate[n*4]); \r
- sample_ps = _mm_mul_ps(sample_ps, volume_ps); \r
- res_sample_ps = _mm_add_ps(sample_ps, res_sample_ps); \r
-\r
- alpha_ps = _mm_add_ps(alpha_ps, delta2_ps);\r
-\r
- _mm_store_ps(&intermediate[n*4], res_sample_ps);\r
- }\r
- }\r
- , ap);\r
+ auto alpha_ps = _mm_setr_ps(delta, delta, 0.0f, 0.0f);\r
+ auto delta2_ps = _mm_set_ps1(delta*2.0f);\r
+ auto prev_ps = _mm_set_ps1(prev_volume);\r
+ auto next_ps = _mm_set_ps1(next_volume); \r
+\r
+ Concurrency::parallel_for<int>(0, format_desc_.audio_samples_per_frame/4,\r
+ [&](int n)\r
+ { \r
+ auto next2_ps = _mm_mul_ps(next_ps, alpha_ps);\r
+ auto prev2_ps = _mm_sub_ps(prev_ps, _mm_mul_ps(prev_ps, alpha_ps));\r
+ auto volume_ps = _mm_add_ps(next2_ps, prev2_ps);\r
+\r
+ auto sample_ps = _mm_cvtepi32_ps(_mm_load_si128(reinterpret_cast<__m128i*>(&item.audio_data[n*4])));\r
+ auto res_sample_ps = _mm_load_ps(&intermediate[n*4]); \r
+ sample_ps = _mm_mul_ps(sample_ps, volume_ps); \r
+ res_sample_ps = _mm_add_ps(sample_ps, res_sample_ps); \r
+\r
+ alpha_ps = _mm_add_ps(alpha_ps, delta2_ps);\r
+\r
+ _mm_store_ps(&intermediate[n*4], res_sample_ps);\r
+ });\r
}\r
\r
auto result = audio_buffer(format_desc_.audio_samples_per_frame+128, 0); \r
\r
- tbb::parallel_for\r
- (\r
- tbb::blocked_range<size_t>(0, format_desc_.audio_samples_per_frame/32),\r
- [&](const tbb::blocked_range<size_t>& r)\r
- { \r
- auto intermediate_128 = reinterpret_cast<__m128i*>(intermediate.data()+r.begin()*32);\r
- auto result_128 = reinterpret_cast<__m128i*>(result.data()+r.begin()*32);\r
+ Concurrency::parallel_for<int>(0, format_desc_.audio_samples_per_frame/32, \r
+ [&](int n)\r
+ { \r
+ auto intermediate_128 = reinterpret_cast<__m128i*>(intermediate.data()+n*32);\r
+ auto result_128 = reinterpret_cast<__m128i*>(result.data()+n*32);\r
\r
- for(size_t n = r.begin(); n < r.end(); ++n)\r
- { \r
- auto xmm0 = _mm_load_ps(reinterpret_cast<float*>(intermediate_128++));\r
- auto xmm1 = _mm_load_ps(reinterpret_cast<float*>(intermediate_128++));\r
- auto xmm2 = _mm_load_ps(reinterpret_cast<float*>(intermediate_128++));\r
- auto xmm3 = _mm_load_ps(reinterpret_cast<float*>(intermediate_128++));\r
- auto xmm4 = _mm_load_ps(reinterpret_cast<float*>(intermediate_128++));\r
- auto xmm5 = _mm_load_ps(reinterpret_cast<float*>(intermediate_128++));\r
- auto xmm6 = _mm_load_ps(reinterpret_cast<float*>(intermediate_128++));\r
- auto xmm7 = _mm_load_ps(reinterpret_cast<float*>(intermediate_128++));\r
+ auto xmm0 = _mm_load_ps(reinterpret_cast<float*>(intermediate_128++));\r
+ auto xmm1 = _mm_load_ps(reinterpret_cast<float*>(intermediate_128++));\r
+ auto xmm2 = _mm_load_ps(reinterpret_cast<float*>(intermediate_128++));\r
+ auto xmm3 = _mm_load_ps(reinterpret_cast<float*>(intermediate_128++));\r
+ auto xmm4 = _mm_load_ps(reinterpret_cast<float*>(intermediate_128++));\r
+ auto xmm5 = _mm_load_ps(reinterpret_cast<float*>(intermediate_128++));\r
+ auto xmm6 = _mm_load_ps(reinterpret_cast<float*>(intermediate_128++));\r
+ auto xmm7 = _mm_load_ps(reinterpret_cast<float*>(intermediate_128++));\r
\r
- _mm_stream_si128(result_128++, _mm_cvtps_epi32(xmm0));\r
- _mm_stream_si128(result_128++, _mm_cvtps_epi32(xmm1));\r
- _mm_stream_si128(result_128++, _mm_cvtps_epi32(xmm2));\r
- _mm_stream_si128(result_128++, _mm_cvtps_epi32(xmm3));\r
- _mm_stream_si128(result_128++, _mm_cvtps_epi32(xmm4));\r
- _mm_stream_si128(result_128++, _mm_cvtps_epi32(xmm5));\r
- _mm_stream_si128(result_128++, _mm_cvtps_epi32(xmm6));\r
- _mm_stream_si128(result_128++, _mm_cvtps_epi32(xmm7));\r
- }\r
- }\r
- , ap);\r
+ _mm_stream_si128(result_128++, _mm_cvtps_epi32(xmm0));\r
+ _mm_stream_si128(result_128++, _mm_cvtps_epi32(xmm1));\r
+ _mm_stream_si128(result_128++, _mm_cvtps_epi32(xmm2));\r
+ _mm_stream_si128(result_128++, _mm_cvtps_epi32(xmm3));\r
+ _mm_stream_si128(result_128++, _mm_cvtps_epi32(xmm4));\r
+ _mm_stream_si128(result_128++, _mm_cvtps_epi32(xmm5));\r
+ _mm_stream_si128(result_128++, _mm_cvtps_epi32(xmm6));\r
+ _mm_stream_si128(result_128++, _mm_cvtps_epi32(xmm7));\r
+ });\r
\r
items.clear();\r
prev_frame_transforms_ = std::move(next_frame_transforms); \r
#include <boost/fusion/include/at_key.hpp>\r
#include <boost/foreach.hpp>\r
\r
-#include <tbb/parallel_invoke.h>\r
-\r
#include <unordered_map>\r
\r
namespace caspar { namespace core {\r
<PrecompiledHeader>Use</PrecompiledHeader>\r
<BrowseInformation>true</BrowseInformation>\r
<WarningLevel>Level4</WarningLevel>\r
- <DebugInformationFormat>EditAndContinue</DebugInformationFormat>\r
+ <DebugInformationFormat>ProgramDatabase</DebugInformationFormat>\r
<PreprocessorDefinitions>TBB_USE_DEBUG;_CRT_SECURE_NO_WARNINGS;_SCL_SECURE_NO_WARNINGS;TBB_USE_CAPTURED_EXCEPTION=0;TBB_USE_ASSERT=1;_DEBUG;%(PreprocessorDefinitions)</PreprocessorDefinitions>\r
<TreatWarningAsError>true</TreatWarningAsError>\r
<MultiProcessorCompilation>true</MultiProcessorCompilation>\r
<PrecompiledHeader>Use</PrecompiledHeader>\r
<BrowseInformation>true</BrowseInformation>\r
<WarningLevel>Level4</WarningLevel>\r
- <DebugInformationFormat>EditAndContinue</DebugInformationFormat>\r
+ <DebugInformationFormat>ProgramDatabase</DebugInformationFormat>\r
<PreprocessorDefinitions>TBB_USE_DEBUG;TBB_USE_CAPTURED_EXCEPTION=0;TBB_USE_ASSERT=1;_DEBUG;%(PreprocessorDefinitions)</PreprocessorDefinitions>\r
<TreatWarningAsError>true</TreatWarningAsError>\r
<MultiProcessorCompilation>true</MultiProcessorCompilation>\r
\r
\r
/* File created by MIDL compiler version 7.00.0555 */\r
-/* at Tue Oct 18 16:33:45 2011\r
+/* at Sat Oct 22 04:24:45 2011\r
*/\r
/* Compiler settings for interop\DeckLinkAPI.idl:\r
Oicf, W1, Zp8, env=Win32 (32b run), target_arch=X86 7.00.0555 \r
\r
\r
/* File created by MIDL compiler version 7.00.0555 */\r
-/* at Tue Oct 18 16:33:45 2011\r
+/* at Sat Oct 22 04:24:45 2011\r
*/\r
/* Compiler settings for interop\DeckLinkAPI.idl:\r
Oicf, W1, Zp8, env=Win32 (32b run), target_arch=X86 7.00.0555 \r
const core::video_format_desc format_desc_;\r
const size_t device_index_;\r
\r
- std::shared_ptr<diagnostics::graph> graph_;\r
+ safe_ptr<diagnostics::graph> graph_;\r
boost::timer tick_timer_;\r
boost::timer frame_timer_;\r
\r
, model_name_(get_model_name(decklink_))\r
, format_desc_(format_desc)\r
, device_index_(device_index)\r
+ , graph_ (diagnostics::create_graph("", false))\r
{ \r
- graph_ = diagnostics::create_graph(boost::bind(&decklink_producer::print, this));\r
graph_->add_guide("tick-time", 0.5);\r
graph_->set_color("tick-time", diagnostics::color(0.0f, 0.6f, 0.9f)); \r
graph_->set_color("late-frame", diagnostics::color(0.6f, 0.3f, 0.3f));\r
graph_->set_color("frame-time", diagnostics::color(1.0f, 0.0f, 0.0f));\r
graph_->set_color("dropped-frame", diagnostics::color(0.3f, 0.6f, 0.3f));\r
graph_->set_color("output-buffer", diagnostics::color(0.0f, 1.0f, 0.0f));\r
+ graph_->update_text(narrow(print()));\r
\r
auto display_mode = get_display_mode(input_, format_desc_.format, bmdFormat8BitYUV, bmdVideoInputFlagDefault);\r
\r
<< boost::errinfo_api_function("StartStreams"));\r
\r
CASPAR_LOG(info) << print() << L" Successfully Initialized.";\r
+\r
+ graph_->start();\r
}\r
\r
~decklink_producer()\r
}\r
}\r
\r
-\r
virtual HRESULT STDMETHODCALLTYPE QueryInterface (REFIID, LPVOID*) {return E_NOINTERFACE;}\r
virtual ULONG STDMETHODCALLTYPE AddRef () {return 1;}\r
virtual ULONG STDMETHODCALLTYPE Release () {return 1;}\r
\r
class decklink_producer_proxy : public Concurrency::agent, public core::frame_producer\r
{ \r
- std::shared_ptr<Concurrency::bounded_buffer<std::shared_ptr<AVFrame>>> video_frames_;\r
- std::shared_ptr<Concurrency::bounded_buffer<std::shared_ptr<core::audio_buffer>>> audio_buffers_;\r
- Concurrency::bounded_buffer<std::shared_ptr<core::basic_frame>> muxed_frames_;\r
+ Concurrency::bounded_buffer<std::shared_ptr<AVFrame>> video_frames_;\r
+ Concurrency::bounded_buffer<std::shared_ptr<core::audio_buffer>> audio_buffers_;\r
+ Concurrency::bounded_buffer<safe_ptr<core::basic_frame>> muxed_frames_;\r
\r
const core::video_format_desc format_desc_;\r
const size_t device_index_;\r
public:\r
\r
explicit decklink_producer_proxy(const safe_ptr<core::frame_factory>& frame_factory, const core::video_format_desc& format_desc, size_t device_index, const std::wstring& filter_str, int64_t length)\r
- : video_frames_(std::make_shared<Concurrency::bounded_buffer<std::shared_ptr<AVFrame>>>(1))\r
- , audio_buffers_(std::make_shared<Concurrency::bounded_buffer<std::shared_ptr<core::audio_buffer>>>(1))\r
+ : video_frames_(1)\r
+ , audio_buffers_(1)\r
, muxed_frames_(1)\r
, format_desc_(format_desc)\r
, device_index_(device_index)\r
, last_frame_(core::basic_frame::empty())\r
, length_(length)\r
, filter_(filter_str)\r
- , muxer_(video_frames_, audio_buffers_, muxed_frames_, ffmpeg::double_rate(filter_str) ? format_desc.fps * 2.0 : format_desc.fps, frame_factory)\r
+ , muxer_(&video_frames_, &audio_buffers_, muxed_frames_, ffmpeg::double_rate(filter_str) ? format_desc.fps * 2.0 : format_desc.fps, frame_factory)\r
, is_running_(true)\r
{\r
agent::start();\r
{\r
try\r
{\r
- CoInitializeEx(NULL, COINIT_MULTITHREADED);\r
+ struct co_init\r
+ {\r
+ co_init() {CoInitialize(NULL);}\r
+ ~co_init() {CoUninitialize();}\r
+ } init;\r
\r
Concurrency::bounded_buffer<frame_packet> input_buffer(2);\r
\r
auto frame = filter_.poll();\r
if(!frame)\r
break;\r
- Concurrency::send<std::shared_ptr<AVFrame>>(*video_frames_, frame);\r
+ Concurrency::send(video_frames_, frame);\r
}\r
},\r
[&]\r
{\r
auto sample_frame_count = audio->GetSampleFrameCount();\r
auto audio_data = reinterpret_cast<int32_t*>(bytes);\r
- Concurrency::send(*audio_buffers_, std::make_shared<core::audio_buffer>(audio_data, audio_data + sample_frame_count*format_desc_.audio_channels));\r
+ Concurrency::send(audio_buffers_, std::make_shared<core::audio_buffer>(audio_data, audio_data + sample_frame_count*format_desc_.audio_channels));\r
}\r
else\r
- Concurrency::send(*audio_buffers_, ffmpeg::empty_audio()); \r
+ Concurrency::send(audio_buffers_, ffmpeg::empty_audio()); \r
});\r
}\r
\r
{\r
CASPAR_LOG_CURRENT_EXCEPTION();\r
}\r
-\r
- CoUninitialize();\r
-\r
+ \r
CASPAR_LOG(info) << print() << L" Successfully Uninitialized."; \r
\r
done();\r
<PrecompiledHeader>Use</PrecompiledHeader>\r
<BrowseInformation>true</BrowseInformation>\r
<WarningLevel>Level4</WarningLevel>\r
- <DebugInformationFormat>EditAndContinue</DebugInformationFormat>\r
+ <DebugInformationFormat>ProgramDatabase</DebugInformationFormat>\r
<PreprocessorDefinitions>TBB_USE_DEBUG;TBB_USE_CAPTURED_EXCEPTION=0;TBB_USE_ASSERT=1;_DEBUG;%(PreprocessorDefinitions)</PreprocessorDefinitions>\r
<TreatWarningAsError>true</TreatWarningAsError>\r
<MultiProcessorCompilation>true</MultiProcessorCompilation>\r
#pragma once\r
\r
#include <common/exception/exceptions.h>\r
+#include <common/utility/string.h>\r
\r
#include <string>\r
\r
boost::errinfo_errno(AVUNERROR(ret))); \\r
} \\r
return ret; \\r
- }();\r
+ }()\r
#define THROW_ON_ERROR2(call, source) THROW_ON_ERROR3(call, source, ffmpeg_error)\r
\r
}}
\ No newline at end of file
#pragma warning (pop)\r
#endif\r
\r
+#include <connect.h>\r
+#include <semaphore.h>\r
+\r
namespace caspar { namespace ffmpeg {\r
\r
-struct audio_decoder::implementation : public Concurrency::agent, boost::noncopyable\r
-{\r
- audio_decoder::source_t& source_;\r
- audio_decoder::target_t& target_;\r
-\r
- std::shared_ptr<AVCodecContext> codec_context_; \r
- const core::video_format_desc format_desc_;\r
+struct audio_decoder::implementation : boost::noncopyable\r
+{ \r
int index_;\r
- std::unique_ptr<audio_resampler> resampler_;\r
-\r
- std::vector<int8_t, tbb::cache_aligned_allocator<int8_t>> buffer1_;\r
+ std::shared_ptr<AVCodecContext> codec_context_; \r
+ \r
+ audio_resampler resampler_;\r
\r
- int64_t nb_frames_;\r
+ std::vector<int8_t, tbb::cache_aligned_allocator<int8_t>> buffer1_;\r
\r
+ Concurrency::transformer<std::shared_ptr<AVPacket>, std::shared_ptr<core::audio_buffer>> transformer_;\r
+ \r
public:\r
- explicit implementation(audio_decoder::source_t& source,\r
- audio_decoder::target_t& target,\r
- const safe_ptr<AVFormatContext>& context, \r
- const core::video_format_desc& format_desc) \r
- : source_(source)\r
- , target_(target)\r
- , format_desc_(format_desc) \r
- , nb_frames_(0)\r
+ explicit implementation(audio_decoder::source_t& source, audio_decoder::target_t& target, AVFormatContext& context, const core::video_format_desc& format_desc) \r
+ : codec_context_(open_codec(context, AVMEDIA_TYPE_AUDIO, index_))\r
+ , resampler_(format_desc.audio_channels, codec_context_->channels,\r
+ format_desc.audio_sample_rate, codec_context_->sample_rate,\r
+ AV_SAMPLE_FMT_S32, codec_context_->sample_fmt)\r
+ , buffer1_(AVCODEC_MAX_AUDIO_FRAME_SIZE*2)\r
+ , transformer_(std::bind(&implementation::decode, this, std::placeholders::_1), &target, [this](const std::shared_ptr<AVPacket>& packet)\r
+ {\r
+ return packet && packet->stream_index == index_;\r
+ })\r
{ \r
- \r
- AVCodec* dec;\r
- index_ = THROW_ON_ERROR3(av_find_best_stream(context.get(), AVMEDIA_TYPE_AUDIO, -1, -1, &dec, 0), "[audio_decoder]", ffmpeg_stream_not_found);\r
- CASPAR_VERIFY(index_ > -1, ffmpeg_error());\r
+ CASPAR_LOG(debug) << "[audio_decoder] " << context.streams[index_]->codec->codec->long_name;\r
\r
- THROW_ON_ERROR2(avcodec_open(context->streams[index_]->codec, dec), "[audio_decoder]");\r
- CASPAR_VERIFY(context->streams[index_]->codec, ffmpeg_error());\r
- \r
- codec_context_.reset(context->streams[index_]->codec, avcodec_close);\r
-\r
- buffer1_.resize(AVCODEC_MAX_AUDIO_FRAME_SIZE*2);\r
+ Concurrency::connect(source, transformer_);\r
+ }\r
\r
- resampler_.reset(new audio_resampler(format_desc_.audio_channels, codec_context_->channels,\r
- format_desc_.audio_sample_rate, codec_context_->sample_rate,\r
- AV_SAMPLE_FMT_S32, codec_context_->sample_fmt)); \r
- CASPAR_VERIFY(resampler_, ffmpeg_error());\r
+ std::shared_ptr<core::audio_buffer> decode(const std::shared_ptr<AVPacket>& packet)\r
+ { \r
+ if(!packet)\r
+ return nullptr;\r
\r
- start();\r
- }\r
+ if(packet == loop_packet(index_))\r
+ return loop_audio();\r
\r
- ~implementation()\r
- {\r
- agent::wait(this);\r
- } \r
+ if(packet == eof_packet(index_))\r
+ return eof_audio();\r
\r
- virtual void run()\r
- {\r
- try\r
- {\r
- while(true)\r
- {\r
- auto packet = Concurrency::receive(source_, [this](const std::shared_ptr<AVPacket>& packet)\r
- {\r
- return packet && packet->stream_index == index_;\r
- });\r
+ auto result = make_safe<core::audio_buffer>();\r
\r
- if(packet == eof_packet(index_)) \r
- break;\r
- \r
- if(packet == loop_packet(index_)) \r
- { \r
- avcodec_flush_buffers(codec_context_.get());\r
- Concurrency::send(target_, loop_audio());\r
- } \r
- else if(packet->stream_index == index_)\r
- {\r
- Concurrency::send(target_, decode(*packet)); \r
- Concurrency::wait(0);\r
- }\r
- }\r
- }\r
- catch(...)\r
+ while(packet->size > 0)\r
{\r
- CASPAR_LOG_CURRENT_EXCEPTION();\r
- }\r
- \r
- Concurrency::send(target_, eof_audio());\r
-\r
- done();\r
- }\r
- \r
- std::shared_ptr<core::audio_buffer> decode(AVPacket& pkt)\r
- { \r
- buffer1_.resize(AVCODEC_MAX_AUDIO_FRAME_SIZE*2);\r
- int written_bytes = buffer1_.size() - FF_INPUT_BUFFER_PADDING_SIZE;\r
+ buffer1_.resize(AVCODEC_MAX_AUDIO_FRAME_SIZE*2);\r
+ int written_bytes = buffer1_.size() - FF_INPUT_BUFFER_PADDING_SIZE;\r
\r
- int ret = THROW_ON_ERROR2(avcodec_decode_audio3(codec_context_.get(), reinterpret_cast<int16_t*>(buffer1_.data()), &written_bytes, &pkt), "[audio_decoder]");\r
+ int ret = THROW_ON_ERROR2(avcodec_decode_audio3(codec_context_.get(), reinterpret_cast<int16_t*>(buffer1_.data()), &written_bytes, packet.get()), "[audio_decoder]");\r
\r
- // There might be several frames in one packet.\r
- pkt.size -= ret;\r
- pkt.data += ret;\r
+ // There might be several frames in one packet.\r
+ packet->size -= ret;\r
+ packet->data += ret;\r
\r
- buffer1_.resize(written_bytes);\r
+ buffer1_.resize(written_bytes);\r
\r
- buffer1_ = resampler_->resample(std::move(buffer1_));\r
+ buffer1_ = resampler_.resample(std::move(buffer1_));\r
\r
- const auto n_samples = buffer1_.size() / av_get_bytes_per_sample(AV_SAMPLE_FMT_S32);\r
- const auto samples = reinterpret_cast<int32_t*>(buffer1_.data());\r
+ const auto n_samples = buffer1_.size() / av_get_bytes_per_sample(AV_SAMPLE_FMT_S32);\r
+ const auto samples = reinterpret_cast<int32_t*>(buffer1_.data());\r
\r
- return n_samples > 0 ? std::make_shared<core::audio_buffer>(samples, samples + n_samples) : nullptr;\r
+ result->insert(result->end(), samples, samples + n_samples);\r
+ }\r
+ \r
+ return result;\r
}\r
};\r
\r
-audio_decoder::audio_decoder(source_t& source,\r
- target_t& target,\r
- const safe_ptr<AVFormatContext>& context, \r
- const core::video_format_desc& format_desc)\r
+audio_decoder::audio_decoder(audio_decoder::source_t& source, audio_decoder::target_t& target, AVFormatContext& context, const core::video_format_desc& format_desc)\r
: impl_(new implementation(source, target, context, format_desc))\r
{\r
}\r
-int64_t audio_decoder::nb_frames() const{return impl_->nb_frames_;}\r
+\r
+int64_t audio_decoder::nb_frames() const\r
+{\r
+ return 0;\r
+}\r
\r
}}
\ No newline at end of file
{\r
public:\r
\r
- typedef Concurrency::ISource<std::shared_ptr<AVPacket>> source_t;\r
- typedef Concurrency::ITarget<std::shared_ptr<core::audio_buffer>> target_t;\r
-\r
- explicit audio_decoder(source_t& source,\r
- target_t& target,\r
- const safe_ptr<AVFormatContext>& context, \r
- const core::video_format_desc& format_desc);\r
+ typedef Concurrency::ISource<std::shared_ptr<AVPacket>>& source_t;\r
+ typedef Concurrency::ITarget<std::shared_ptr<core::audio_buffer>>& target_t;\r
+ \r
+ explicit audio_decoder(source_t& source, target_t& target, AVFormatContext& context, const core::video_format_desc& format_desc);\r
\r
int64_t nb_frames() const;\r
\r
\r
#include <agents.h>\r
#include <agents_extras.h>\r
+#include <connect.h>\r
+#include <concrt.h>\r
#include <ppl.h>\r
\r
using namespace Concurrency;\r
const bool loop_;\r
const size_t length_;\r
\r
- Concurrency::unbounded_buffer<std::shared_ptr<AVPacket>> packet_stream_;\r
- std::shared_ptr<Concurrency::ISource<std::shared_ptr<AVFrame>>> video_stream_;\r
- std::shared_ptr<Concurrency::ISource<std::shared_ptr<core::audio_buffer>>> audio_stream_;\r
- Concurrency::bounded_buffer<std::shared_ptr<core::basic_frame>> frame_stream_;\r
+ Concurrency::unbounded_buffer<std::shared_ptr<AVPacket>> packets_;\r
+ Concurrency::unbounded_buffer<std::shared_ptr<AVFrame>> video_;\r
+ Concurrency::unbounded_buffer<std::shared_ptr<core::audio_buffer>> audio_;\r
+ Concurrency::bounded_buffer<safe_ptr<core::basic_frame>> frames_;\r
\r
const safe_ptr<diagnostics::graph> graph_;\r
\r
- input input_; \r
- std::unique_ptr<video_decoder> video_decoder_;\r
- std::unique_ptr<audio_decoder> audio_decoder_; \r
- std::unique_ptr<frame_muxer2> muxer_;\r
+ input input_; \r
+ std::shared_ptr<video_decoder> video_decoder_;\r
+ std::shared_ptr<audio_decoder> audio_decoder_; \r
+ Concurrency::call<std::shared_ptr<AVPacket>> throw_away_;\r
+ std::unique_ptr<frame_muxer2> muxer_;\r
\r
safe_ptr<core::basic_frame> last_frame_;\r
\r
, start_(start)\r
, loop_(loop)\r
, length_(length)\r
- , frame_stream_(2)\r
- , graph_(diagnostics::create_graph([this]{return print();}, false))\r
- , input_(packet_stream_, graph_, filename_, loop, start, length)\r
+ , throw_away_([](const std::shared_ptr<AVPacket>&){})\r
+ , frames_(2)\r
+ , graph_(diagnostics::create_graph("", false))\r
+ , input_(packets_, graph_, filename_, loop, start, length)\r
, last_frame_(core::basic_frame::empty())\r
{\r
+ frame_muxer2::video_source_t* video_source = nullptr;\r
+ frame_muxer2::audio_source_t* audio_source = nullptr;\r
+\r
try\r
{\r
- auto target = std::make_shared<Concurrency::bounded_buffer<std::shared_ptr<AVFrame>>>(2);\r
- video_decoder_.reset(new video_decoder(packet_stream_, *target, input_.context(), frame_factory->get_video_format_desc().fps));\r
- video_stream_ = target;\r
+ video_decoder_.reset(new video_decoder(packets_, video_, *input_.context()));\r
+ video_source = &video_;\r
}\r
catch(ffmpeg_stream_not_found&)\r
{\r
\r
try\r
{\r
- auto target = std::make_shared<Concurrency::bounded_buffer<std::shared_ptr<core::audio_buffer>>>(25);\r
- audio_decoder_.reset(new audio_decoder(packet_stream_, *target, input_.context(), frame_factory->get_video_format_desc()));\r
- audio_stream_ = target;\r
+ audio_decoder_.reset(new audio_decoder(packets_, audio_, *input_.context(), frame_factory->get_video_format_desc()));\r
+ audio_source = &audio_;\r
}\r
catch(ffmpeg_stream_not_found&)\r
{\r
CASPAR_LOG_CURRENT_EXCEPTION();\r
CASPAR_LOG(warning) << "Failed to open audio-stream. Running without audio."; \r
}\r
+ \r
+ Concurrency::connect(packets_, throw_away_);\r
\r
CASPAR_VERIFY(video_decoder_ || audio_decoder_, ffmpeg_error());\r
\r
- muxer_.reset(new frame_muxer2(video_stream_, audio_stream_, frame_stream_, video_decoder_ ? video_decoder_->fps() : frame_factory->get_video_format_desc().fps, frame_factory));\r
+ muxer_.reset(new frame_muxer2(video_source, audio_source, frames_, video_decoder_ ? video_decoder_->fps() : frame_factory->get_video_format_desc().fps, frame_factory));\r
\r
graph_->set_color("underflow", diagnostics::color(0.6f, 0.3f, 0.9f)); \r
graph_->start();\r
~ffmpeg_producer()\r
{\r
input_.stop(); \r
- while(Concurrency::receive(frame_stream_) != core::basic_frame::eof())\r
+ while(Concurrency::receive(frames_) != core::basic_frame::eof())\r
{\r
}\r
}\r
\r
try\r
{ \r
- frame = last_frame_ = safe_ptr<core::basic_frame>(Concurrency::receive(frame_stream_, 10));\r
+ frame = last_frame_ = safe_ptr<core::basic_frame>(Concurrency::receive(frames_, 10));\r
+ graph_->update_text(narrow(print()));\r
}\r
catch(Concurrency::operation_timed_out&)\r
{ \r
filter& filter::operator=(filter&& other){impl_ = std::move(other.impl_); return *this;}\r
void filter::push(const std::shared_ptr<AVFrame>& frame){impl_->push(frame);}\r
std::shared_ptr<AVFrame> filter::poll(){return impl_->poll();}\r
+std::string filter::filter_str() const{return impl_->filters_;}\r
std::vector<safe_ptr<AVFrame>> filter::poll_all()\r
{ \r
std::vector<safe_ptr<AVFrame>> frames;\r
void push(const std::shared_ptr<AVFrame>& frame);\r
std::shared_ptr<AVFrame> poll();\r
std::vector<safe_ptr<AVFrame>> poll_all();\r
+\r
+ std::string filter_str() const;\r
\r
private:\r
struct implementation;\r
\r
#include <boost/foreach.hpp>\r
#include <boost/range/algorithm_ext/push_back.hpp>\r
+#include <boost/assign.hpp>\r
\r
-#include <agents.h>\r
-#include <ppl.h>\r
+#include <agents_extras.h>\r
\r
#include <deque>\r
+#include <tuple>\r
#include <queue>\r
#include <vector>\r
\r
{\r
switch(value)\r
{\r
- case simple:\r
- return L"simple";\r
- case duplicate:\r
- return L"duplicate";\r
- case half:\r
- return L"half";\r
- case interlace:\r
- return L"interlace";\r
- case deinterlace_bob:\r
- return L"deinterlace_bob";\r
- case deinterlace_bob_reinterlace:\r
- return L"deinterlace_bob_reinterlace";\r
- case deinterlace:\r
- return L"deinterlace";\r
- default:\r
- return L"invalid";\r
+ case simple: return L"simple";\r
+ case duplicate: return L"duplicate";\r
+ case half: return L"half";\r
+ case interlace: return L"interlace";\r
+ case deinterlace_bob: return L"deinterlace_bob";\r
+ case deinterlace_bob_reinterlace: return L"deinterlace_bob_reinterlace";\r
+ case deinterlace: return L"deinterlace";\r
+ default: return L"invalid";\r
}\r
}\r
};\r
return display_mode::invalid;\r
}\r
\r
-struct frame_muxer2::implementation : public Concurrency::agent, boost::noncopyable\r
-{ \r
- const std::shared_ptr<frame_muxer2::video_source_t>& video_source_;\r
- const std::shared_ptr<frame_muxer2::audio_source_t>& audio_source_;\r
- frame_muxer2::target_t& target_;\r
-\r
- std::deque<std::queue<safe_ptr<write_frame>>> video_streams_;\r
- std::deque<core::audio_buffer> audio_streams_;\r
- std::deque<safe_ptr<basic_frame>> frame_buffer_;\r
- display_mode::type display_mode_;\r
- const double in_fps_;\r
- const video_format_desc format_desc_;\r
- bool auto_transcode_;\r
-\r
- size_t audio_sample_count_;\r
- size_t video_frame_count_;\r
- \r
- size_t processed_audio_sample_count_;\r
- size_t processed_video_frame_count_;\r
\r
- filter filter_;\r
- safe_ptr<core::frame_factory> frame_factory_;\r
- \r
- implementation(const std::shared_ptr<frame_muxer2::video_source_t>& video_source,\r
- const std::shared_ptr<frame_muxer2::audio_source_t>& audio_source,\r
+struct frame_muxer2::implementation : boost::noncopyable\r
+{ \r
+ display_mode::type display_mode_;\r
+ const double in_fps_;\r
+ const video_format_desc format_desc_;\r
+ bool auto_transcode_;\r
+ \r
+ filter filter_;\r
+ safe_ptr<core::frame_factory> frame_factory_;\r
+ \r
+ Concurrency::call<std::shared_ptr<AVFrame>> push_video_;\r
+ Concurrency::call<std::shared_ptr<core::audio_buffer>> push_audio_;\r
+ \r
+ Concurrency::transformer<safe_ptr<AVFrame>, std::shared_ptr<core::write_frame>> video_;\r
+ Concurrency::unbounded_buffer<std::shared_ptr<core::audio_buffer>> audio_;\r
+\r
+ typedef std::tuple<std::shared_ptr<core::write_frame>, std::shared_ptr<core::audio_buffer>> join_element_t;\r
+ \r
+ Concurrency::transformer<join_element_t, safe_ptr<core::basic_frame>> merge_;\r
+ safe_ptr<Concurrency::ISource<join_element_t>> join_;\r
+\r
+ core::audio_buffer audio_data_;\r
+ std::queue<safe_ptr<AVFrame>> video_frames_;\r
+ \r
+ implementation(frame_muxer2::video_source_t* video_source,\r
+ frame_muxer2::audio_source_t* audio_source,\r
frame_muxer2::target_t& target,\r
double in_fps, \r
const safe_ptr<core::frame_factory>& frame_factory)\r
- : video_source_(video_source)\r
- , audio_source_(audio_source)\r
- , target_(target)\r
- , video_streams_(1)\r
- , audio_streams_(1)\r
- , display_mode_(display_mode::invalid)\r
+ : display_mode_(display_mode::invalid)\r
, in_fps_(in_fps)\r
, format_desc_(frame_factory->get_video_format_desc())\r
, auto_transcode_(env::properties().get("configuration.producers.auto-transcode", false))\r
- , audio_sample_count_(0)\r
- , video_frame_count_(0)\r
, frame_factory_(make_safe<core::concrt_frame_factory>(frame_factory))\r
+ , video_(std::bind(&make_write_frame, this, std::placeholders::_1, frame_factory, 0))\r
+ , push_video_(std::bind(&implementation::push_video, this, std::placeholders::_1))\r
+ , push_audio_(std::bind(&implementation::push_audio, this, std::placeholders::_1))\r
+ , merge_(std::bind(&implementation::merge, this, std::placeholders::_1), &target)\r
+ , join_(make_join(&video_, &audio_))\r
{\r
- start();\r
- }\r
+ if(video_source)\r
+ video_source->link_target(&push_video_);\r
+ if(audio_source)\r
+ audio_source->link_target(&push_audio_);\r
\r
- ~implementation()\r
- {\r
- agent::wait(this);\r
+ join_->link_target(&merge_);\r
}\r
\r
- virtual void run()\r
+ safe_ptr<core::basic_frame> merge(const join_element_t& element)\r
{\r
- try\r
- {\r
- while(true)\r
- {\r
- Concurrency::parallel_invoke(\r
- [&]\r
- {\r
- while(!video_ready())\r
- {\r
- if(video_source_)\r
- {\r
- auto video = Concurrency::receive(*video_source_);\r
- if(video == eof_video())\r
- break;\r
- push(video, 0); \r
- }\r
- else\r
- push(empty_video(), 0); \r
- }\r
- },\r
- [&]\r
- {\r
- while(!audio_ready())\r
- {\r
- if(audio_source_)\r
- {\r
- auto audio = Concurrency::receive(*audio_source_);\r
- if(audio == eof_audio())\r
- break;\r
- push(audio); \r
- }\r
- else\r
- push(empty_audio()); \r
- } \r
- });\r
-\r
- if((video_source_ && !video_ready()) || (audio_source_ && !audio_ready())) \r
- break; \r
-\r
- commit();\r
- \r
- if(!frame_buffer_.empty())\r
- {\r
- Concurrency::send(target_, std::shared_ptr<core::basic_frame>(frame_buffer_.front()));\r
- frame_buffer_.pop_front(); \r
- }\r
- }\r
- }\r
- catch(...)\r
- {\r
- CASPAR_LOG_CURRENT_EXCEPTION();\r
- }\r
- \r
- Concurrency::send(target_, std::shared_ptr<core::basic_frame>(core::basic_frame::eof()));\r
- \r
- done();\r
+ //if(std::get<0>(element) == eof_video() || std::get<1>(element) == eof_audio())\r
+ // return core::basic_frame::eof();\r
+ auto frame = std::get<0>(element);\r
+ frame->audio_data() = std::move(*std::get<1>(element));\r
+ return make_safe_ptr(frame);\r
}\r
-\r
- void push(const std::shared_ptr<AVFrame>& video_frame, int hints)\r
+ \r
+ void push_video(const std::shared_ptr<AVFrame>& video_frame)\r
{ \r
if(!video_frame)\r
return;\r
\r
- if(video_frame == loop_video())\r
- { \r
- CASPAR_LOG(debug) << L"video-frame-count: " << static_cast<float>(video_frame_count_);\r
- video_frame_count_ = 0;\r
- video_streams_.push_back(std::queue<safe_ptr<write_frame>>());\r
- return;\r
- }\r
-\r
+ if(video_frame == loop_video()) \r
+ return; \r
+ \r
if(video_frame == empty_video())\r
{\r
- video_streams_.back().push(make_safe<core::write_frame>(this));\r
- ++video_frame_count_;\r
- display_mode_ = display_mode::simple;\r
+ Concurrency::send(video_, make_safe_ptr(empty_video()));\r
return;\r
}\r
-\r
+ \r
if(display_mode_ == display_mode::invalid)\r
{\r
if(auto_transcode_)\r
display_mode_ = display_mode::simple;\r
}\r
\r
+ // copy <= We need to release frames\r
+ if(display_mode_ != display_mode::simple && filter_.filter_str().empty())\r
+ filter_ = filter(L"copy"); \r
+\r
CASPAR_LOG(info) << "[frame_muxer] " << display_mode::print(display_mode_);\r
}\r
-\r
- \r
- if(hints & core::frame_producer::ALPHA_HINT)\r
- video_frame->format = make_alpha_format(video_frame->format);\r
+ \r
+ //if(hints & core::frame_producer::ALPHA_HINT)\r
+ // video_frame->format = make_alpha_format(video_frame->format);\r
\r
auto format = video_frame->format;\r
if(video_frame->format == CASPAR_PIX_FMT_LUMA) // CASPAR_PIX_FMT_LUMA is not valid for filter, change it to GRAY8\r
\r
filter_.push(video_frame);\r
\r
- while(true)\r
- {\r
- auto av_frame = filter_.poll();\r
- if(!av_frame)\r
- break;\r
- \r
+ BOOST_FOREACH(auto av_frame, filter_.poll_all())\r
+ { \r
av_frame->format = format;\r
+ \r
+ video_frames_.push(av_frame);\r
\r
- auto frame = make_write_frame(this, make_safe_ptr(av_frame), frame_factory_, hints);\r
+ switch(display_mode_)\r
+ {\r
+ case display_mode::simple: \r
+ case display_mode::deinterlace:\r
+ case display_mode::deinterlace_bob:\r
+ {\r
+ Concurrency::send(video_, video_frames_.front());\r
+ video_frames_.pop();\r
+ break;\r
+ }\r
+ case display_mode::duplicate: \r
+ {\r
+ if(video_frames_.size() < 2)\r
+ return;\r
+\r
+ Concurrency::send(video_, video_frames_.front());\r
+ video_frames_.pop();\r
+ Concurrency::send(video_, video_frames_.front());\r
+ video_frames_.pop();\r
+ break;\r
+ }\r
+ case display_mode::half: \r
+ {\r
+ if(video_frames_.size() < 2)\r
+ break;\r
+ \r
+ Concurrency::send(video_, video_frames_.front());\r
+ video_frames_.pop();\r
+ video_frames_.pop();\r
+\r
+ break;\r
+ }\r
+ case display_mode::deinterlace_bob_reinterlace:\r
+ case display_mode::interlace: \r
+ {\r
+ if(video_frames_.size() < 2)\r
+ break;\r
\r
- // Fix field-order if needed\r
- if(frame->get_type() == core::field_mode::lower && format_desc_.field_mode == core::field_mode::upper)\r
- frame->get_frame_transform().fill_translation[1] += 1.0/static_cast<double>(format_desc_.height);\r
- else if(frame->get_type() == core::field_mode::upper && format_desc_.field_mode == core::field_mode::lower)\r
- frame->get_frame_transform().fill_translation[1] -= 1.0/static_cast<double>(format_desc_.height);\r
+ auto frame1 = video_frames_.front();\r
+ video_frames_.pop();\r
+ auto frame2 = video_frames_.front();\r
+ video_frames_.pop();\r
+ \r
+ //Concurrency::send(video_, video_, core::basic_frame::interlace(frame1, frame2, format_desc_.field_mode));\r
\r
- video_streams_.back().push(frame);\r
- ++video_frame_count_;\r
+ break;\r
+ }\r
+ default: \r
+ BOOST_THROW_EXCEPTION(invalid_operation() << msg_info("invalid display-mode"));\r
+ }\r
}\r
-\r
- if(video_streams_.back().size() > 8)\r
- BOOST_THROW_EXCEPTION(invalid_operation() << source_info("frame_muxer") << msg_info("video-stream overflow. This can be caused by incorrect frame-rate. Check clip meta-data."));\r
}\r
\r
- void push(std::shared_ptr<core::audio_buffer> audio_samples)\r
+ void push_audio(const std::shared_ptr<core::audio_buffer>& audio_samples)\r
{\r
if(!audio_samples)\r
return;\r
\r
- if(audio_samples == loop_audio()) \r
- {\r
- CASPAR_LOG(debug) << L"audio-chunk-count: " << audio_sample_count_/format_desc_.audio_samples_per_frame;\r
- audio_streams_.push_back(core::audio_buffer());\r
- audio_sample_count_ = 0;\r
- return;\r
- }\r
+ if(audio_samples == loop_audio()) \r
+ return; \r
\r
if(audio_samples == empty_audio()) \r
- audio_samples = std::make_shared<core::audio_buffer>(format_desc_.audio_samples_per_frame); \r
-\r
- audio_sample_count_ += audio_samples->size();\r
-\r
- boost::range::push_back(audio_streams_.back(), *audio_samples);\r
-\r
- if(audio_streams_.back().size() > 8*format_desc_.audio_samples_per_frame)\r
- BOOST_THROW_EXCEPTION(invalid_operation() << source_info("frame_muxer") << msg_info("audio-stream overflow. This can be caused by incorrect frame-rate. Check clip meta-data."));\r
- }\r
- \r
- safe_ptr<core::write_frame> pop_video()\r
- {\r
- auto frame = video_streams_.front().front();\r
- video_streams_.front().pop();\r
- \r
- return frame;\r
- }\r
-\r
- core::audio_buffer pop_audio()\r
- {\r
- CASPAR_ASSERT(audio_streams_.front().size() >= format_desc_.audio_samples_per_frame);\r
-\r
- auto begin = audio_streams_.front().begin();\r
- auto end = begin + format_desc_.audio_samples_per_frame;\r
-\r
- auto samples = core::audio_buffer(begin, end);\r
- audio_streams_.front().erase(begin, end);\r
-\r
- return samples;\r
- }\r
- \r
- bool video_ready() const\r
- { \r
- return video_streams_.size() > 1 || (video_streams_.size() >= audio_streams_.size() && video_ready2());\r
- }\r
- \r
- bool audio_ready() const\r
- {\r
- return audio_streams_.size() > 1 || (audio_streams_.size() >= video_streams_.size() && audio_ready2());\r
- }\r
-\r
- bool video_ready2() const\r
- { \r
- switch(display_mode_)\r
- {\r
- case display_mode::deinterlace_bob_reinterlace: \r
- case display_mode::interlace: \r
- return video_streams_.front().size() >= 2;\r
- default: \r
- return !video_streams_.front().empty();\r
- }\r
- }\r
- \r
- bool audio_ready2() const\r
- {\r
- switch(display_mode_)\r
- {\r
- case display_mode::duplicate: \r
- return audio_streams_.front().size()/2 >= format_desc_.audio_samples_per_frame;\r
- default: \r
- return audio_streams_.front().size() >= format_desc_.audio_samples_per_frame;\r
- }\r
- }\r
- \r
- void commit()\r
- {\r
- if(video_streams_.size() > 1 && audio_streams_.size() > 1 && (!video_ready2() || !audio_ready2()))\r
- {\r
- if(!video_streams_.front().empty() || !audio_streams_.front().empty())\r
- CASPAR_LOG(debug) << "Truncating: " << video_streams_.front().size() << L" video-frames, " << audio_streams_.front().size() << L" audio-samples.";\r
+ Concurrency::send(audio_, std::make_shared<core::audio_buffer>(format_desc_.audio_samples_per_frame, 0)); \r
\r
- video_streams_.pop_front();\r
- audio_streams_.pop_front();\r
- }\r
-\r
- if(!video_ready2() || !audio_ready2())\r
- return;\r
+ audio_data_.insert(audio_data_.end(), audio_samples->begin(), audio_samples->end());\r
\r
- switch(display_mode_)\r
+ while(audio_data_.size() >= format_desc_.audio_samples_per_frame)\r
{\r
- case display_mode::simple: return simple(frame_buffer_);\r
- case display_mode::duplicate: return duplicate(frame_buffer_);\r
- case display_mode::half: return half(frame_buffer_);\r
- case display_mode::interlace: return interlace(frame_buffer_);\r
- case display_mode::deinterlace_bob: return simple(frame_buffer_);\r
- case display_mode::deinterlace_bob_reinterlace: return interlace(frame_buffer_);\r
- case display_mode::deinterlace: return simple(frame_buffer_);\r
- default: BOOST_THROW_EXCEPTION(invalid_operation() << msg_info("invalid display-mode"));\r
+ auto begin = audio_data_.begin(); \r
+ auto end = begin + format_desc_.audio_samples_per_frame;\r
+ \r
+ Concurrency::send(audio_, std::make_shared<core::audio_buffer>(begin, end));\r
+ audio_data_.erase(begin, end);\r
}\r
}\r
- \r
- void simple(std::deque<safe_ptr<basic_frame>>& dest)\r
- { \r
- auto frame1 = pop_video();\r
- frame1->audio_data() = pop_audio();\r
-\r
- dest.push_back(frame1); \r
- }\r
-\r
- void duplicate(std::deque<safe_ptr<basic_frame>>& dest)\r
- { \r
- auto frame = pop_video();\r
-\r
- auto frame1 = make_safe<core::write_frame>(*frame); // make a copy\r
- frame1->audio_data() = pop_audio();\r
-\r
- auto frame2 = frame;\r
- frame2->audio_data() = pop_audio();\r
-\r
- dest.push_back(frame1);\r
- dest.push_back(frame2);\r
- }\r
-\r
- void half(std::deque<safe_ptr<basic_frame>>& dest)\r
- { \r
- auto frame1 = pop_video();\r
- frame1->audio_data() = pop_audio();\r
- \r
- video_streams_.front().pop(); // Throw away\r
-\r
- dest.push_back(frame1);\r
- }\r
- \r
- void interlace(std::deque<safe_ptr<basic_frame>>& dest)\r
- { \r
- auto frame1 = pop_video();\r
- frame1->audio_data() = pop_audio();\r
- \r
- auto frame2 = pop_video();\r
-\r
- dest.push_back(core::basic_frame::interlace(frame1, frame2, format_desc_.field_mode)); \r
- }\r
- \r
+ \r
int64_t calc_nb_frames(int64_t nb_frames) const\r
{\r
switch(display_mode_)\r
}\r
};\r
\r
-frame_muxer2::frame_muxer2(const std::shared_ptr<video_source_t>& video_source, \r
- const std::shared_ptr<audio_source_t>& audio_source,\r
+frame_muxer2::frame_muxer2(video_source_t* video_source, \r
+ audio_source_t* audio_source,\r
target_t& target,\r
double in_fps, \r
const safe_ptr<core::frame_factory>& frame_factory)\r
: impl_(new implementation(video_source, audio_source, target, in_fps, frame_factory))\r
{\r
}\r
+\r
int64_t frame_muxer2::calc_nb_frames(int64_t nb_frames) const\r
{\r
return impl_->calc_nb_frames(nb_frames);\r
#include <boost/noncopyable.hpp>\r
\r
#include <agents.h>\r
+#include <semaphore.h>\r
+\r
#include <vector>\r
\r
struct AVFrame;\r
\r
typedef Concurrency::ISource<std::shared_ptr<AVFrame>> video_source_t;\r
typedef Concurrency::ISource<std::shared_ptr<core::audio_buffer>> audio_source_t;\r
- typedef Concurrency::ITarget<std::shared_ptr<core::basic_frame>> target_t;\r
+ typedef Concurrency::ITarget<safe_ptr<core::basic_frame>> target_t;\r
\r
- frame_muxer2(const std::shared_ptr<video_source_t>& video_source,\r
- const std::shared_ptr<audio_source_t>& audio_source, \r
+ frame_muxer2(video_source_t* video_source,\r
+ audio_source_t* audio_source, \r
target_t& target,\r
double in_fps, \r
const safe_ptr<core::frame_factory>& frame_factory);\r
\r
#include <agents.h>\r
#include <concrt_extras.h>\r
+#include <semaphore.h>\r
\r
#if defined(_MSC_VER)\r
#pragma warning (push)\r
\r
namespace caspar { namespace ffmpeg {\r
\r
-static const size_t MAX_BUFFER_COUNT = 16;\r
+static const size_t MAX_BUFFER_COUNT = 32;\r
\r
struct input::implementation : public Concurrency::agent, boost::noncopyable\r
-{ \r
- std::shared_ptr<AVFormatContext> format_context_; // Destroy this last\r
- int default_stream_index_;\r
-\r
- safe_ptr<diagnostics::graph> graph_;\r
- \r
- const std::wstring filename_;\r
- const bool loop_;\r
- const size_t start_; \r
- const size_t length_;\r
- size_t frame_number_;\r
- \r
- input::target_t& target_;\r
- \r
- tbb::atomic<size_t> nb_frames_;\r
- tbb::atomic<size_t> nb_loops_;\r
- \r
- tbb::atomic<size_t> packets_count_;\r
-\r
- bool eof_;\r
- bool stop_;\r
+{\r
+ input::target_t& target_;\r
\r
- Concurrency::unbounded_buffer<int> tickets_;\r
+ const std::wstring filename_;\r
+ const safe_ptr<AVFormatContext> format_context_; // Destroy this last\r
+ int default_stream_index_;\r
+ const boost::iterator_range<AVStream**> streams_;\r
\r
- boost::iterator_range<AVStream**> streams_;\r
+ safe_ptr<diagnostics::graph> graph_;\r
+ \r
+ const bool loop_;\r
+ const size_t start_; \r
+ const size_t length_;\r
+ size_t frame_number_;\r
+ \r
+ tbb::atomic<size_t> nb_frames_;\r
+ tbb::atomic<size_t> nb_loops_; \r
+ tbb::atomic<size_t> packets_count_;\r
\r
+ bool stop_;\r
+ \r
public:\r
explicit implementation(input::target_t& target,\r
const safe_ptr<diagnostics::graph>& graph, \r
size_t start,\r
size_t length)\r
: target_(target)\r
+ , filename_(filename)\r
+ , format_context_(open_input(filename)) \r
+ , default_stream_index_(av_find_default_stream_index(format_context_.get()))\r
+ , streams_(format_context_->streams, format_context_->streams + format_context_->nb_streams)\r
, graph_(graph)\r
, loop_(loop)\r
- , filename_(filename)\r
, start_(start)\r
, length_(length)\r
, frame_number_(0)\r
- , eof_(false)\r
, stop_(false)\r
- { \r
- packets_count_ = 0;\r
- nb_frames_ = 0;\r
- nb_loops_ = 0;\r
- \r
- AVFormatContext* weak_format_context_ = nullptr;\r
- THROW_ON_ERROR2(avformat_open_input(&weak_format_context_, narrow(filename).c_str(), nullptr, nullptr), print());\r
- CASPAR_VERIFY(weak_format_context_, ffmpeg_error());\r
-\r
- format_context_.reset(weak_format_context_, av_close_input_file);\r
- \r
- av_dump_format(weak_format_context_, 0, narrow(filename).c_str(), 0);\r
- \r
- THROW_ON_ERROR2(avformat_find_stream_info(format_context_.get(), nullptr), print());\r
- \r
- streams_ = boost::make_iterator_range(format_context_->streams, format_context_->streams + format_context_->nb_streams);\r
- default_stream_index_ = THROW_ON_ERROR2(av_find_default_stream_index(format_context_.get()), print());\r
- \r
- for(int n = 0; n < MAX_BUFFER_COUNT; ++n)\r
- tickets_.enqueue(0);\r
-\r
+ { \r
+ packets_count_ = 0;\r
+ nb_frames_ = 0;\r
+ nb_loops_ = 0;\r
+ \r
+ av_dump_format(format_context_.get(), 0, narrow(filename).c_str(), 0);\r
+ \r
if(start_ > 0) \r
seek_frame(start_);\r
\r
graph_->set_color("seek", diagnostics::color(1.0f, 0.5f, 0.0f));\r
graph_->set_color("buffer-count", diagnostics::color(0.7f, 0.4f, 0.4f));\r
graph_->set_color("buffer-size", diagnostics::color(1.0f, 1.0f, 0.0f)); \r
-\r
- CASPAR_VERIFY(default_stream_index_ > -1, ffmpeg_error());\r
}\r
\r
~implementation()\r
{\r
try\r
{\r
- while(!stop_)\r
+ while(!stop_ && read_next_packet())\r
{\r
- read_next_packet();\r
- \r
- graph_->update_value("buffer-count", (static_cast<double>(packets_count_)+0.001)/MAX_BUFFER_COUNT); \r
- } \r
+ }\r
}\r
catch(...)\r
{\r
CASPAR_LOG_CURRENT_EXCEPTION();\r
} \r
\r
- std::for_each(streams_.begin(), streams_.end(), [this](const AVStream* stream)\r
- {\r
+ BOOST_FOREACH(auto stream, streams_)\r
Concurrency::send(target_, eof_packet(stream->index)); \r
- });\r
- \r
+ \r
done();\r
}\r
\r
- void read_next_packet()\r
+ bool read_next_packet()\r
{ \r
- if(eof_)\r
- return;\r
-\r
- int ret = 0;\r
-\r
auto packet = create_packet();\r
-\r
+ \r
+ int ret = [&]() -> int\r
{\r
Concurrency::scoped_oversubcription_token oversubscribe;\r
- ret = av_read_frame(format_context_.get(), packet.get()); // packet is only valid until next call of av_read_frame. Use av_dup_packet to extend its life. \r
- }\r
+ return av_read_frame(format_context_.get(), packet.get()); // packet is only valid until next call of av_read_frame. Use av_dup_packet to extend its life. \r
+ }();\r
\r
if(is_eof(ret)) \r
{\r
\r
if(loop_)\r
{\r
- int flags = AVSEEK_FLAG_BACKWARD;\r
-\r
- int vid_stream_index = av_find_best_stream(format_context_.get(), AVMEDIA_TYPE_VIDEO, -1, -1, 0, 0);\r
- if(vid_stream_index >= 0)\r
- {\r
- auto codec_id = format_context_->streams[vid_stream_index]->codec->codec_id;\r
- if(codec_id == CODEC_ID_VP6A || codec_id == CODEC_ID_VP6F || codec_id == CODEC_ID_VP6)\r
- flags |= AVSEEK_FLAG_BYTE;\r
- }\r
-\r
- seek_frame(start_, flags);\r
- graph_->add_tag("seek"); \r
- CASPAR_LOG(trace) << print() << " Looping."; \r
+ CASPAR_LOG(trace) << print() << " Looping.";\r
+ seek_frame(start_, AVSEEK_FLAG_BACKWARD); \r
} \r
else\r
{\r
CASPAR_LOG(trace) << print() << " Stopping.";\r
- eof_ = true;\r
+ return false;\r
}\r
}\r
else\r
{\r
packet->size = size;\r
packet->data = data;\r
- tickets_.enqueue(0);\r
--packets_count_;\r
+ graph_->update_value("buffer-count", (static_cast<double>(packets_count_)+0.001)/MAX_BUFFER_COUNT); \r
});\r
\r
- tickets_.dequeue();\r
++packets_count_;\r
\r
- Concurrency::send(target_, packet);\r
+ Concurrency::asend(target_, packet);\r
\r
graph_->update_value("buffer-count", (static_cast<double>(packets_count_)+0.001)/MAX_BUFFER_COUNT);\r
} \r
+\r
+ return true;\r
}\r
\r
void seek_frame(int64_t frame, int flags = 0)\r
- { \r
+ { \r
+ if(flags == AVSEEK_FLAG_BACKWARD)\r
+ {\r
+ // Fix VP6 seeking\r
+ int vid_stream_index = av_find_best_stream(format_context_.get(), AVMEDIA_TYPE_VIDEO, -1, -1, 0, 0);\r
+ if(vid_stream_index >= 0)\r
+ {\r
+ auto codec_id = format_context_->streams[vid_stream_index]->codec->codec_id;\r
+ if(codec_id == CODEC_ID_VP6A || codec_id == CODEC_ID_VP6F || codec_id == CODEC_ID_VP6)\r
+ flags |= AVSEEK_FLAG_BYTE;\r
+ }\r
+ }\r
+\r
THROW_ON_ERROR2(av_seek_frame(format_context_.get(), default_stream_index_, frame, flags), print()); \r
auto packet = create_packet();\r
- packet->size = 0;\r
+ packet->size = 0; \r
\r
- std::for_each(streams_.begin(), streams_.end(), [this](const AVStream* stream)\r
- {\r
+ BOOST_FOREACH(auto stream, streams_)\r
Concurrency::send(target_, loop_packet(stream->index)); \r
- });\r
+\r
+ graph_->add_tag("seek"); \r
} \r
\r
bool is_eof(int ret)\r
}\r
};\r
\r
-input::input(target_t& target, \r
- const safe_ptr<diagnostics::graph>& graph, \r
+input::input(input::target_t& target,\r
+ const safe_ptr<diagnostics::graph>& graph, \r
const std::wstring& filename, \r
bool loop, \r
size_t start, \r
#include <common/memory/safe_ptr.h>\r
\r
#include <agents.h>\r
+#include <concrt.h>\r
+\r
#include <memory>\r
#include <string>\r
\r
\r
typedef Concurrency::ITarget<std::shared_ptr<AVPacket>> target_t;\r
\r
- explicit input(target_t& target, \r
+ explicit input(target_t& target, \r
const safe_ptr<diagnostics::graph>& graph, \r
const std::wstring& filename, bool loop, \r
size_t start = 0, \r
\r
#include "util.h"\r
\r
+#include "../ffmpeg_error.h"\r
#include "format/flv.h"\r
\r
#include <concurrent_unordered_map.h>\r
#include <core/mixer/write_frame.h>\r
\r
#include <common/exception/exceptions.h>\r
+#include <common/utility/assert.h>\r
\r
#include <ppl.h>\r
\r
{ \r
static Concurrency::concurrent_unordered_map<size_t, Concurrency::concurrent_queue<std::shared_ptr<SwsContext>>> sws_contexts_;\r
\r
+ if(decoded_frame->width < 1 || decoded_frame->height < 1)\r
+ return make_safe<core::write_frame>(tag);\r
+\r
const auto width = decoded_frame->width;\r
const auto height = decoded_frame->height;\r
auto desc = get_pixel_format_desc(static_cast<PixelFormat>(decoded_frame->format), width, height);\r
if(hints & core::frame_producer::ALPHA_HINT)\r
desc = get_pixel_format_desc(static_cast<PixelFormat>(make_alpha_format(decoded_frame->format)), width, height);\r
\r
+ std::shared_ptr<core::write_frame> write;\r
+\r
if(desc.pix_fmt == core::pixel_format::invalid)\r
{\r
auto pix_fmt = static_cast<PixelFormat>(decoded_frame->format);\r
\r
- auto write = frame_factory->create_frame(tag, get_pixel_format_desc(PIX_FMT_BGRA, width, height));\r
+ write = frame_factory->create_frame(tag, get_pixel_format_desc(PIX_FMT_BGRA, width, height));\r
write->set_type(get_mode(*decoded_frame));\r
\r
std::shared_ptr<SwsContext> sws_context;\r
pool.push(sws_context);\r
\r
write->commit();\r
-\r
- return write;\r
}\r
else\r
{\r
- auto write = frame_factory->create_frame(tag, desc);\r
+ write = frame_factory->create_frame(tag, desc);\r
write->set_type(get_mode(*decoded_frame));\r
\r
for(int n = 0; n < static_cast<int>(desc.planes.size()); ++n)\r
\r
write->commit(n);\r
}\r
- \r
- return write;\r
}\r
+ \r
+ // Fix field-order if needed\r
+ if(write->get_type() == core::field_mode::lower && frame_factory->get_video_format_desc().field_mode == core::field_mode::upper)\r
+ write->get_frame_transform().fill_translation[1] += 1.0/static_cast<double>(frame_factory->get_video_format_desc().height);\r
+ else if(write->get_type() == core::field_mode::upper && frame_factory->get_video_format_desc().field_mode == core::field_mode::lower)\r
+ write->get_frame_transform().fill_translation[1] -= 1.0/static_cast<double>(frame_factory->get_video_format_desc().height);\r
+\r
+ return make_safe_ptr(write);\r
}\r
\r
bool is_sane_fps(AVRational time_base)\r
return audio2;\r
}\r
\r
+safe_ptr<AVCodecContext> open_codec(AVFormatContext& context, enum AVMediaType type, int& index)\r
+{ \r
+ AVCodec* decoder;\r
+ index = THROW_ON_ERROR3(av_find_best_stream(&context, type, -1, -1, &decoder, 0), "", ffmpeg_stream_not_found);\r
+ THROW_ON_ERROR2(avcodec_open(context.streams[index]->codec, decoder), "");\r
+ return safe_ptr<AVCodecContext>(context.streams[index]->codec, avcodec_close);\r
+}\r
+\r
+safe_ptr<AVFormatContext> open_input(const std::wstring& filename)\r
+{\r
+ AVFormatContext* weak_context = nullptr;\r
+ THROW_ON_ERROR2(avformat_open_input(&weak_context, narrow(filename).c_str(), nullptr, nullptr), filename);\r
+ safe_ptr<AVFormatContext> context(weak_context, av_close_input_file); \r
+ THROW_ON_ERROR2(avformat_find_stream_info(weak_context, nullptr), filename);\r
+ fix_meta_data(*context);\r
+ return context;\r
+}\r
+\r
}}
\ No newline at end of file
#pragma warning (pop)\r
#endif\r
\r
+#include <agents.h>\r
+\r
struct AVFrame;\r
struct AVFormatContext;\r
struct AVPacket;\r
}\r
\r
namespace ffmpeg {\r
-\r
+ \r
static const PixelFormat CASPAR_PIX_FMT_LUMA = PIX_FMT_MONOBLACK; // Just hijack some unual pixel format.\r
\r
core::field_mode::type get_mode(AVFrame& frame);\r
const std::shared_ptr<core::audio_buffer>& empty_audio();\r
const std::shared_ptr<core::audio_buffer>& eof_audio();\r
\r
+safe_ptr<AVCodecContext> open_codec(AVFormatContext& context, enum AVMediaType type, int& index);\r
+safe_ptr<AVFormatContext> open_input(const std::wstring& filename);\r
+\r
}}
\ No newline at end of file
#pragma warning (pop)\r
#endif\r
\r
+#include <connect.h>\r
+#include <semaphore.h>\r
+\r
namespace caspar { namespace ffmpeg {\r
\r
-struct video_decoder::implementation : public Concurrency::agent, boost::noncopyable\r
-{\r
- video_decoder::source_t& source_;\r
- video_decoder::target_t& target_;\r
-\r
- std::shared_ptr<AVCodecContext> codec_context_;\r
+struct video_decoder::implementation : boost::noncopyable\r
+{ \r
int index_;\r
+ std::shared_ptr<AVCodecContext> codec_context_;\r
\r
double fps_;\r
int64_t nb_frames_;\r
size_t width_;\r
size_t height_;\r
bool is_progressive_;\r
-\r
- Concurrency::event event_;\r
+ \r
+ Concurrency::transformer<std::shared_ptr<AVPacket>, std::shared_ptr<AVFrame>> transformer_;\r
+ \r
+ Concurrency::semaphore semaphore_;\r
\r
public:\r
- explicit implementation(video_decoder::source_t& source,\r
- video_decoder::target_t& target,\r
- const safe_ptr<AVFormatContext>& context,\r
- double fps) \r
- : source_(source)\r
- , target_(target)\r
- , fps_(fps)\r
- , nb_frames_(0)\r
- , width_(0)\r
- , height_(0)\r
+ explicit implementation(video_decoder::source_t& source, video_decoder::target_t& target, AVFormatContext& context) \r
+ : codec_context_(open_codec(context, AVMEDIA_TYPE_VIDEO, index_))\r
+ , fps_(static_cast<double>(codec_context_->time_base.den) / static_cast<double>(codec_context_->time_base.num))\r
+ , nb_frames_(context.streams[index_]->nb_frames)\r
+ , width_(codec_context_->width)\r
+ , height_(codec_context_->height)\r
, is_progressive_(true)\r
- {\r
- event_.set();\r
-\r
- AVCodec* dec;\r
- index_ = THROW_ON_ERROR3(av_find_best_stream(context.get(), AVMEDIA_TYPE_VIDEO, -1, -1, &dec, 0), "[video_decoder]", ffmpeg_stream_not_found);\r
- CASPAR_VERIFY(index_ > -1, ffmpeg_error());\r
- \r
- THROW_ON_ERROR2(avcodec_open(context->streams[index_]->codec, dec), "[video_decoder]");\r
- CASPAR_VERIFY(context->streams[index_]->codec, ffmpeg_error())\r
- \r
- codec_context_.reset(context->streams[index_]->codec, avcodec_close);\r
+ , transformer_(std::bind(&implementation::decode, this, std::placeholders::_1), &target, [this](const std::shared_ptr<AVPacket>& packet)\r
+ {\r
+ return packet && packet->stream_index == index_;\r
+ })\r
+ , semaphore_(1)\r
+ { \r
+ CASPAR_LOG(debug) << "[video_decoder] " << context.streams[index_]->codec->codec->long_name;\r
\r
- CASPAR_LOG(debug) << "[video_decoder] " << context->streams[index_]->codec->codec->long_name;\r
-\r
- // Some files give an invalid time_base numerator, try to fix it.\r
-\r
- fix_meta_data(*context);\r
- \r
- fps_ = static_cast<double>(codec_context_->time_base.den) / static_cast<double>(codec_context_->time_base.num);\r
- nb_frames_ = context->streams[index_]->nb_frames;\r
- \r
- width_ = codec_context_->width;\r
- height_ = codec_context_->height;\r
-\r
CASPAR_VERIFY(width_ > 0, ffmpeg_error());\r
CASPAR_VERIFY(height_ > 0, ffmpeg_error());\r
\r
- start();\r
+ Concurrency::connect(source, transformer_);\r
}\r
- \r
- ~implementation()\r
- {\r
- agent::wait(this);\r
- }\r
- \r
- virtual void run()\r
- {\r
- try\r
- {\r
- while(true)\r
- {\r
- auto packet = Concurrency::receive(source_, [this](const std::shared_ptr<AVPacket>& packet)\r
- {\r
- return packet && packet->stream_index == index_;\r
- });\r
-\r
- //if(packet == eof_packet(index_) || packet == loop_packet(index_))\r
- //{\r
- // if(codec_context_->codec->capabilities & CODEC_CAP_DELAY)\r
- // {\r
- // AVPacket pkt;\r
- // av_init_packet(&pkt);\r
- // pkt.data = nullptr;\r
- // pkt.size = 0;\r
- //\r
- // while(decode(pkt)){}\r
- // }\r
- //}\r
- \r
- if(packet == eof_packet(index_)) \r
- break;\r
- \r
- if(packet == loop_packet(index_))\r
- { \r
- avcodec_flush_buffers(codec_context_.get()); \r
- Concurrency::send(target_, loop_video()); \r
- }\r
- else if(packet->stream_index == index_)\r
- { \r
- Concurrency::send(target_, decode(*packet)); \r
- }\r
- }\r
- }\r
- catch(...)\r
- {\r
- CASPAR_LOG_CURRENT_EXCEPTION();\r
- }\r
\r
- Concurrency::send(target_, eof_video());\r
- \r
- done();\r
- }\r
-\r
- std::shared_ptr<AVFrame> decode(AVPacket& pkt)\r
+ std::shared_ptr<AVFrame> decode(const std::shared_ptr<AVPacket>& packet)\r
{\r
- event_.wait();\r
+ if(!packet)\r
+ return nullptr;\r
+\r
+ if(packet == loop_packet(index_))\r
+ return loop_video();\r
\r
+ if(packet == eof_packet(index_))\r
+ return eof_video();\r
+ \r
std::shared_ptr<AVFrame> decoded_frame(avcodec_alloc_frame(), [this](AVFrame* frame)\r
{\r
av_free(frame);\r
- event_.set();\r
+ semaphore_.release();\r
});\r
+ semaphore_.acquire();\r
\r
int frame_finished = 0;\r
- THROW_ON_ERROR2(avcodec_decode_video2(codec_context_.get(), decoded_frame.get(), &frame_finished, &pkt), "[video_decocer]");\r
- event_.reset();\r
+ THROW_ON_ERROR2(avcodec_decode_video2(codec_context_.get(), decoded_frame.get(), &frame_finished, packet.get()), "[video_decocer]");\r
\r
+ // 1 packet <=> 1 frame.\r
// If a decoder consumes less then the whole packet then something is wrong\r
// that might be just harmless padding at the end, or a problem with the\r
// AVParser or demuxer which puted more then one frame in a AVPacket.\r
- pkt.data = nullptr;\r
- pkt.size = 0;\r
\r
if(frame_finished == 0) \r
return nullptr;\r
}\r
};\r
\r
-video_decoder::video_decoder(source_t& source,\r
- target_t& target,\r
- const safe_ptr<AVFormatContext>& context, \r
- double fps) \r
- : impl_(new implementation(source, target, context, fps))\r
+video_decoder::video_decoder(video_decoder::source_t& source, video_decoder::target_t& target, AVFormatContext& context) \r
+ : impl_(new implementation(source, target, context))\r
{\r
}\r
\r
#include <boost/noncopyable.hpp>\r
\r
#include <agents.h>\r
+\r
#include <vector>\r
\r
struct AVFormatContext;\r
class video_decoder : boost::noncopyable\r
{\r
public:\r
-\r
+ \r
typedef Concurrency::ISource<std::shared_ptr<AVPacket>> source_t;\r
- typedef Concurrency::ITarget<std::shared_ptr<AVFrame>> target_t;\r
-\r
- explicit video_decoder(source_t& source,\r
- target_t& target,\r
- const safe_ptr<AVFormatContext>& context, \r
- double fps); \r
+ typedef Concurrency::ITarget<std::shared_ptr<AVFrame>> target_t;\r
+ \r
+ explicit video_decoder(source_t& source, target_t& target, AVFormatContext& context); \r
\r
size_t width() const;\r
size_t height() const;\r
<PrecompiledHeader>Use</PrecompiledHeader>\r
<BrowseInformation>true</BrowseInformation>\r
<WarningLevel>Level4</WarningLevel>\r
- <DebugInformationFormat>EditAndContinue</DebugInformationFormat>\r
+ <DebugInformationFormat>ProgramDatabase</DebugInformationFormat>\r
<PreprocessorDefinitions>TBB_USE_DEBUG;TBB_USE_CAPTURED_EXCEPTION=0;TBB_USE_ASSERT=1;_DEBUG;%(PreprocessorDefinitions)</PreprocessorDefinitions>\r
<TreatWarningAsError>true</TreatWarningAsError>\r
<MultiProcessorCompilation>true</MultiProcessorCompilation>\r
\r
#include <functional>\r
\r
-#include <tbb/spin_mutex.h>\r
+#include <agents.h>\r
+#include <agents_extras.h>\r
+#include <concrt_extras.h>\r
\r
namespace caspar { namespace flash {\r
\r
{ \r
const std::wstring filename_;\r
\r
- const std::shared_ptr<core::frame_factory> frame_factory_;\r
+ const safe_ptr<core::frame_factory> frame_factory_;\r
\r
CComObject<caspar::flash::FlashAxContainer>* ax_;\r
safe_ptr<core::basic_frame> head_;\r
const size_t height_;\r
\r
public:\r
- flash_renderer(const safe_ptr<diagnostics::graph>& graph, const std::shared_ptr<core::frame_factory>& frame_factory, const std::wstring& filename, int width, int height) \r
+ flash_renderer(const safe_ptr<diagnostics::graph>& graph, const safe_ptr<core::frame_factory>& frame_factory, const std::wstring& filename, int width, int height) \r
: graph_(graph)\r
, filename_(filename)\r
- , frame_factory_(frame_factory)\r
+ , frame_factory_(make_safe<core::concrt_frame_factory>(frame_factory))\r
, ax_(nullptr)\r
, head_(core::basic_frame::empty())\r
, bmp_(width, height)\r
CASPAR_LOG(info) << print() << L" Thread ended.";\r
}\r
\r
+ void make_write_frame(const std::shared_ptr<bitmap>& bmp)\r
+ {\r
+\r
+ }\r
+\r
void param(const std::wstring& param)\r
{ \r
if(!ax_->FlashCall(param))\r
if(ax_->IsEmpty())\r
return core::basic_frame::empty(); \r
\r
- if(!has_underflow) \r
+ if(!has_underflow) \r
+ {\r
+ Concurrency::scoped_oversubcription_token oversubscribe;\r
timer_.tick(frame_time); // This will block the thread.\r
+ //Concurrency::wait(std::max<int>(0, frame_time-3));\r
+ }\r
else\r
graph_->add_tag("skip-sync");\r
\r
}\r
};\r
\r
-struct flash_producer : public core::frame_producer\r
+struct flash_producer : public Concurrency::agent, public core::frame_producer\r
{ \r
- const std::wstring filename_; \r
- const safe_ptr<core::frame_factory> frame_factory_;\r
-\r
- tbb::atomic<int> fps_;\r
-\r
- std::shared_ptr<diagnostics::graph> graph_;\r
+ Concurrency::unbounded_buffer<std::wstring> params_;\r
+ Concurrency::bounded_buffer<safe_ptr<core::basic_frame>> frames_;\r
\r
- tbb::concurrent_bounded_queue<safe_ptr<core::basic_frame>> frame_buffer_;\r
+ tbb::atomic<bool> is_running_;\r
+ \r
+ const safe_ptr<core::frame_factory> frame_factory_;\r
+ const std::wstring filename_; \r
+ tbb::atomic<int> fps_;\r
+ const int width_;\r
+ const int height_;\r
+ \r
+ mutable Concurrency::overwrite_buffer<safe_ptr<core::basic_frame>> last_frame_;\r
\r
- mutable tbb::spin_mutex last_frame_mutex_;\r
- safe_ptr<core::basic_frame> last_frame_;\r
+ safe_ptr<diagnostics::graph> graph_;\r
\r
- com_context<flash_renderer> context_; \r
-\r
- int width_;\r
- int height_;\r
public:\r
flash_producer(const safe_ptr<core::frame_factory>& frame_factory, const std::wstring& filename, size_t width, size_t height) \r
- : filename_(filename) \r
+ : frames_(frame_factory->get_video_format_desc().fps > 30.0 ? 2 : 1)\r
, frame_factory_(frame_factory)\r
- , context_(L"flash_producer")\r
- , last_frame_(core::basic_frame::empty())\r
+ , filename_(filename) \r
, width_(width > 0 ? width : frame_factory->get_video_format_desc().width)\r
, height_(height > 0 ? height : frame_factory->get_video_format_desc().height)\r
+ , graph_(diagnostics::create_graph("", false))\r
{ \r
if(!boost::filesystem::exists(filename))\r
BOOST_THROW_EXCEPTION(file_not_found() << boost::errinfo_file_name(narrow(filename))); \r
-\r
- fps_ = 0;\r
-\r
- graph_ = diagnostics::create_graph([this]{return print();});\r
- graph_->set_color("output-buffer-count", diagnostics::color(1.0f, 1.0f, 0.0f)); \r
+ \r
graph_->set_color("underflow", diagnostics::color(0.6f, 0.3f, 0.9f)); \r
+\r
+ Concurrency::send(last_frame_, core::basic_frame::empty());\r
\r
- frame_buffer_.set_capacity(frame_factory_->get_video_format_desc().fps > 30.0 ? 2 : 1);\r
+ fps_ = 0;\r
+ is_running_ = true;\r
\r
- initialize(); \r
+ graph_->start();\r
+ start();\r
}\r
\r
~flash_producer()\r
{\r
- frame_buffer_.clear();\r
+ is_running_ = false;\r
+ auto frame = core::basic_frame::empty();\r
+ while(Concurrency::try_receive(frames_, frame)){}\r
+ agent::wait(this);\r
}\r
+ \r
+ virtual void run()\r
+ { \r
+ try\r
+ {\r
+ struct co_init\r
+ {\r
+ co_init() {CoInitialize(NULL);}\r
+ ~co_init() {CoUninitialize();}\r
+ } init;\r
+\r
+ flash_renderer renderer(safe_ptr<diagnostics::graph>(graph_), frame_factory_, filename_, width_, height_);\r
\r
+ is_running_ = true;\r
+ while(is_running_)\r
+ {\r
+ std::wstring param;\r
+ while(is_running_ && Concurrency::try_receive(params_, param))\r
+ renderer.param(param);\r
+ \r
+ const auto& format_desc = frame_factory_->get_video_format_desc();\r
+\r
+ if(abs(renderer.fps()/2.0 - format_desc.fps) < 2.0) // flash == 2 * format -> interlace\r
+ {\r
+ auto frame1 = renderer.render_frame(false);\r
+ auto frame2 = renderer.render_frame(false);\r
+ Concurrency::send(last_frame_, frame2);\r
+ Concurrency::send(frames_, core::basic_frame::interlace(frame1, frame2, format_desc.field_mode));\r
+ }\r
+ else if(abs(renderer.fps()- format_desc.fps/2.0) < 2.0) // format == 2 * flash -> duplicate\r
+ {\r
+ auto frame = renderer.render_frame(false);\r
+ Concurrency::send(last_frame_, frame);\r
+ Concurrency::send(frames_, frame);\r
+ Concurrency::send(frames_, frame);\r
+ }\r
+ else //if(abs(renderer_->fps() - format_desc_.fps) < 0.1) // format == flash -> simple\r
+ {\r
+ auto frame = renderer.render_frame(false);\r
+ Concurrency::send(last_frame_, frame);\r
+ Concurrency::send(frames_, frame);\r
+ }\r
+\r
+ fps_ = static_cast<int>(renderer.fps()*100.0);\r
+ graph_->update_text(narrow(print()));\r
+ }\r
+ }\r
+ catch(...)\r
+ {\r
+ CASPAR_LOG_CURRENT_EXCEPTION();\r
+ }\r
+ \r
+ is_running_ = false;\r
+ done();\r
+ }\r
+ \r
// frame_producer\r
\r
virtual safe_ptr<core::basic_frame> receive(int)\r
- { \r
- graph_->set_value("output-buffer-count", static_cast<float>(frame_buffer_.size())/static_cast<float>(frame_buffer_.capacity()));\r
-\r
+ { \r
auto frame = core::basic_frame::late();\r
- if(!frame_buffer_.try_pop(frame))\r
+\r
+ try\r
+ {\r
+ frame = Concurrency::receive(frames_, 5);\r
+ }\r
+ catch(Concurrency::operation_timed_out&)\r
+ { \r
graph_->add_tag("underflow");\r
+ }\r
\r
return frame;\r
}\r
\r
virtual safe_ptr<core::basic_frame> last_frame() const\r
{\r
- tbb::spin_mutex::scoped_lock lock(last_frame_mutex_);\r
- return last_frame_;\r
+ return last_frame_.value();\r
} \r
\r
virtual void param(const std::wstring& param) \r
{ \r
- context_.begin_invoke([=]\r
+ if(!is_running_.fetch_and_store(true))\r
{\r
- if(!context_)\r
- initialize();\r
-\r
- try\r
- {\r
- context_->param(param); \r
-\r
- //const auto& format_desc = frame_factory_->get_video_format_desc();\r
- //if(abs(context_->fps() - format_desc.fps) > 0.01 && abs(context_->fps()/2.0 - format_desc.fps) > 0.01)\r
- // CASPAR_LOG(warning) << print() << " Invalid frame-rate: " << context_->fps() << L". Should be either " << format_desc.fps << L" or " << format_desc.fps*2.0 << L".";\r
- }\r
- catch(...)\r
- {\r
- CASPAR_LOG_CURRENT_EXCEPTION();\r
- context_.reset(nullptr);\r
- frame_buffer_.push(core::basic_frame::empty());\r
- }\r
- });\r
+ agent::wait(this);\r
+ start();\r
+ }\r
+ Concurrency::asend(params_, param);\r
}\r
\r
virtual std::wstring print() const\r
{ \r
return L"flash[" + boost::filesystem::wpath(filename_).filename() + L"|" + boost::lexical_cast<std::wstring>(fps_) + L"]"; \r
} \r
-\r
- // flash_producer\r
-\r
- void initialize()\r
- {\r
- context_.reset([&]{return new flash_renderer(safe_ptr<diagnostics::graph>(graph_), frame_factory_, filename_, width_, height_);});\r
- while(frame_buffer_.try_push(core::basic_frame::empty())){} \r
- render(context_.get());\r
- }\r
-\r
- safe_ptr<core::basic_frame> render_frame()\r
- {\r
- auto frame = context_->render_frame(frame_buffer_.size() < frame_buffer_.capacity()); \r
- tbb::spin_mutex::scoped_lock lock(last_frame_mutex_);\r
- last_frame_ = make_safe<core::basic_frame>(frame);\r
- return frame;\r
- }\r
-\r
- void render(const flash_renderer* renderer)\r
- { \r
- context_.begin_invoke([=]\r
- {\r
- if(context_.get() != renderer) // Since initialize will start a new recursive call make sure the recursive calls are only for a specific instance.\r
- return;\r
-\r
- try\r
- { \r
- const auto& format_desc = frame_factory_->get_video_format_desc();\r
-\r
- if(abs(context_->fps()/2.0 - format_desc.fps) < 2.0) // flash == 2 * format -> interlace\r
- {\r
- auto frame1 = render_frame();\r
- auto frame2 = render_frame();\r
- frame_buffer_.push(core::basic_frame::interlace(frame1, frame2, format_desc.field_mode));\r
- }\r
- else if(abs(context_->fps()- format_desc.fps/2.0) < 2.0) // format == 2 * flash -> duplicate\r
- {\r
- auto frame = render_frame();\r
- frame_buffer_.push(frame);\r
- frame_buffer_.push(frame);\r
- }\r
- else //if(abs(renderer_->fps() - format_desc_.fps) < 0.1) // format == flash -> simple\r
- {\r
- auto frame = render_frame();\r
- frame_buffer_.push(frame);\r
- }\r
-\r
- graph_->set_value("output-buffer-count", static_cast<float>(frame_buffer_.size())/static_cast<float>(frame_buffer_.capacity())); \r
- fps_.fetch_and_store(static_cast<int>(context_->fps()*100.0));\r
-\r
- render(renderer);\r
- }\r
- catch(...)\r
- {\r
- CASPAR_LOG_CURRENT_EXCEPTION();\r
- context_.reset(nullptr);\r
- frame_buffer_.push(core::basic_frame::empty());\r
- }\r
- });\r
- }\r
};\r
\r
safe_ptr<core::frame_producer> create_producer(const safe_ptr<core::frame_factory>& frame_factory, const std::vector<std::wstring>& params)\r
<PrecompiledHeader>NotUsing</PrecompiledHeader>\r
<BrowseInformation>true</BrowseInformation>\r
<WarningLevel>Level4</WarningLevel>\r
- <DebugInformationFormat>EditAndContinue</DebugInformationFormat>\r
+ <DebugInformationFormat>ProgramDatabase</DebugInformationFormat>\r
<PreprocessorDefinitions>TBB_USE_DEBUG;_SCL_SECURE_NO_WARNINGS;TBB_USE_CAPTURED_EXCEPTION=0;TBB_USE_ASSERT=1;_DEBUG;%(PreprocessorDefinitions)</PreprocessorDefinitions>\r
<TreatWarningAsError>true</TreatWarningAsError>\r
<MultiProcessorCompilation>true</MultiProcessorCompilation>\r
<PrecompiledHeader>Use</PrecompiledHeader>\r
<BrowseInformation>true</BrowseInformation>\r
<WarningLevel>Level4</WarningLevel>\r
- <DebugInformationFormat>EditAndContinue</DebugInformationFormat>\r
+ <DebugInformationFormat>ProgramDatabase</DebugInformationFormat>\r
<PreprocessorDefinitions>TBB_USE_DEBUG;TBB_USE_CAPTURED_EXCEPTION=0;TBB_USE_ASSERT=1;_DEBUG;%(PreprocessorDefinitions)</PreprocessorDefinitions>\r
<TreatWarningAsError>true</TreatWarningAsError>\r
<MultiProcessorCompilation>true</MultiProcessorCompilation>\r
<PrecompiledHeader>Use</PrecompiledHeader>\r
<BrowseInformation>true</BrowseInformation>\r
<WarningLevel>Level4</WarningLevel>\r
- <DebugInformationFormat>EditAndContinue</DebugInformationFormat>\r
+ <DebugInformationFormat>ProgramDatabase</DebugInformationFormat>\r
<PreprocessorDefinitions>TBB_USE_DEBUG;_SCL_SECURE_NO_WARNINGS;TBB_USE_CAPTURED_EXCEPTION=0;TBB_USE_ASSERT=1;_DEBUG;%(PreprocessorDefinitions)</PreprocessorDefinitions>\r
<TreatWarningAsError>true</TreatWarningAsError>\r
<MultiProcessorCompilation>true</MultiProcessorCompilation>\r
<< L"BACKGROUND:" << status.background << L"\r\n"\r
<< L"STATUS:" << (status.is_paused ? L"PAUSED" : L"PLAYING") << L"\r\n"\r
<< L"TOTAL FRAMES:" << (status.total_frames == std::numeric_limits<int64_t>::max() ? 0 : status.total_frames) << L"\r\n"\r
- << L"CURRENT FRAME:" << status.current_frame << L"\r\n";\r
+ << L"CURRENT FRAME:" << status.current_frame << L"\r\n\r\n";\r
\r
SetReplyString(status_text.str());\r
return true;\r
<buffer-depth>1</buffer-depth>\r
<auto-transcode>true</auto-transcode>\r
<template-hosts>\r
+ <template-host>\r
+ <video-mode>PAL</video-mode>\r
+ <filename>cg.fth.18.pal</filename>\r
+ </template-host>\r
<template-host>\r
<video-mode>1080i5994</video-mode>\r
<filename>cg.fth.18.6000</filename>\r
<width>1280</width>\r
<height>720</height>\r
</template-host>\r
+ <template-host>\r
+ <video-mode>1080i5000</video-mode>\r
+ <filename>cg.fth.18</filename>\r
+ <width>1280</width>\r
+ <height>720</height>\r
+ </template-host>\r
</template-hosts>\r
</producers>\r
<channels>\r
<device>1</device>\r
<embedded-audio>true</embedded-audio>\r
</decklink>\r
+ <audio></audio>\r
</consumers>\r
</channel>\r
</channels>\r
\r
#include <core/mixer/gpu/ogl_device.h>\r
\r
-#include <tbb/task_scheduler_init.h>\r
-#include <tbb/task_scheduler_observer.h>\r
+#include <agents.h>\r
\r
#include <boost/property_tree/detail/file_parser_error.hpp>\r
\r
inc_prec(){timeBeginPeriod(1);}\r
~inc_prec(){timeEndPeriod(1);}\r
} inc_prec; \r
-\r
- // Install unstructured exception handlers into all tbb threads.\r
- struct tbb_thread_installer : public tbb::task_scheduler_observer\r
- {\r
- tbb_thread_installer(){observe(true);}\r
- void on_scheduler_entry(bool is_worker)\r
- {\r
- //caspar::detail::SetThreadName(GetCurrentThreadId(), "tbb-worker-thread");\r
- caspar::win32_exception::install_handler();\r
- }\r
- } tbb_thread_installer;\r
-\r
- tbb::task_scheduler_init init;\r
- \r
+ \r
try \r
{\r
// Configure environment properties from configuration.\r