#include "../util/util.h"\r
#include "../util/memory.h"\r
\r
-#include <core/consumer/frame/read_frame.h>\r
+#include <core/mixer/read_frame.h>\r
\r
#include <common/concurrency/executor.h>\r
#include <common/diagnostics/graph.h>\r
#include <common/memory/memcpy.h>\r
+#include <common/memory/memclr.h>\r
#include <common/utility/timer.h>\r
\r
#include <tbb/concurrent_queue.h>\r
blue_velvet_initialize();\r
blue_hanc_initialize();\r
}\r
+\r
+safe_ptr<CBlueVelvet4> create_blue(size_t device_index)\r
+{\r
+ if(!BlueVelvetFactory4 || !encode_hanc_frame || !encode_hanc_frame)\r
+ BOOST_THROW_EXCEPTION(caspar_exception() << msg_info("Bluefish drivers not found."));\r
+\r
+ auto blue = safe_ptr<CBlueVelvet4>(BlueVelvetFactory4());\r
+ \r
+ if(BLUE_FAIL(blue->device_attach(device_index, FALSE))) \r
+ BOOST_THROW_EXCEPTION(bluefish_exception() << msg_info("Failed to attach device."));\r
+\r
+ return blue;\r
+}\r
+\r
+EVideoMode get_video_mode(CBlueVelvet4& blue, const core::video_format_desc& format_desc)\r
+{\r
+ EVideoMode vid_fmt = VID_FMT_INVALID;\r
+ auto desiredVideoFormat = vid_fmt_from_video_format(format_desc.format);\r
+ int videoModeCount = blue.count_video_mode();\r
+ for(int videoModeIndex = 1; videoModeIndex <= videoModeCount; ++videoModeIndex) \r
+ {\r
+ EVideoMode videoMode = blue.enum_video_mode(videoModeIndex);\r
+ if(videoMode == desiredVideoFormat) \r
+ vid_fmt = videoMode; \r
+ }\r
+ if(vid_fmt == VID_FMT_INVALID)\r
+ BOOST_THROW_EXCEPTION(bluefish_exception() << msg_info("Failed get videomode.") << arg_value_info(narrow(format_desc.name)));\r
+\r
+ return vid_fmt;\r
+}\r
\r
-struct bluefish_consumer::implementation : boost::noncopyable\r
+struct bluefish_consumer : boost::noncopyable\r
{\r
- std::wstring model_name_;\r
- const unsigned int device_index_;\r
+ safe_ptr<CBlueVelvet4> blue_;\r
+ const unsigned int device_index_;\r
+ const core::video_format_desc format_desc_;\r
\r
- std::shared_ptr<diagnostics::graph> graph_;\r
- boost::timer perf_timer_;\r
+ const std::wstring model_name_;\r
\r
- boost::unique_future<void> active_;\r
- \r
- std::shared_ptr<CBlueVelvet4> blue_;\r
- \r
- core::video_format_desc format_desc_;\r
+ std::shared_ptr<diagnostics::graph> graph_;\r
+ boost::timer frame_timer_;\r
+ boost::timer tick_timer_;\r
+ boost::timer sync_timer_; \r
\r
- unsigned long mem_fmt_;\r
- unsigned long upd_fmt_;\r
- EVideoMode vid_fmt_; \r
- unsigned long res_fmt_; \r
- unsigned long engine_mode_;\r
+ const EVideoMode vid_fmt_; \r
+ const EMemoryFormat mem_fmt_;\r
+ const EUpdateMethod upd_fmt_;\r
+ const EResoFormat res_fmt_; \r
+ EEngineMode engine_mode_;\r
\r
- std::array<blue_dma_buffer_ptr, 3> reserved_frames_; \r
+ std::array<blue_dma_buffer_ptr, 4> reserved_frames_; \r
+ tbb::concurrent_bounded_queue<std::shared_ptr<const core::read_frame>> frame_buffer_;\r
\r
- const bool embed_audio_;\r
+ int preroll_count_;\r
\r
+ const bool embedded_audio_;\r
+ \r
executor executor_;\r
public:\r
- implementation::implementation(unsigned int device_index, bool embed_audio) \r
- : model_name_(L"BLUEFISH")\r
- , device_index_(device_index) \r
+ bluefish_consumer(const core::video_format_desc& format_desc, unsigned int device_index, bool embedded_audio) \r
+ : blue_(create_blue(device_index))\r
+ , device_index_(device_index)\r
+ , format_desc_(format_desc) \r
+ , model_name_(get_card_desc(blue_->has_video_cardtype()))\r
+ , vid_fmt_(get_video_mode(*blue_, format_desc)) \r
, mem_fmt_(MEM_FMT_ARGB_PC)\r
, upd_fmt_(UPD_FMT_FRAME)\r
- , vid_fmt_(VID_FMT_INVALID) \r
, res_fmt_(RES_FMT_NORMAL) \r
- , engine_mode_(VIDEO_ENGINE_FRAMESTORE) \r
- , embed_audio_(embed_audio)\r
+ , engine_mode_(VIDEO_ENGINE_FRAMESTORE) \r
+ , preroll_count_(0)\r
+ , embedded_audio_(embedded_audio)\r
, executor_(print())\r
{\r
- if(!BlueVelvetFactory4 || (embed_audio_ && (!encode_hanc_frame || !encode_hanc_frame)))\r
- BOOST_THROW_EXCEPTION(bluefish_exception() << msg_info("Bluefish drivers not found."));\r
- }\r
-\r
- ~implementation()\r
- {\r
- if(executor_.is_running())\r
- {\r
- executor_.invoke([&]\r
- {\r
- disable_video_output();\r
-\r
- if(blue_)\r
- blue_->device_detach(); \r
- });\r
- }\r
- \r
- CASPAR_LOG(info) << print() << L" Shutting down."; \r
- }\r
-\r
- void initialize(const core::video_format_desc& format_desc)\r
- { \r
- format_desc_ = format_desc;\r
-\r
- blue_.reset(BlueVelvetFactory4());\r
-\r
- if(BLUE_FAIL(blue_->device_attach(device_index_, FALSE))) \r
- BOOST_THROW_EXCEPTION(bluefish_exception() << msg_info(narrow(print()) + " Failed to attach device."));\r
- \r
- int videoCardType = blue_->has_video_cardtype();\r
- model_name_ = get_card_desc(videoCardType);\r
+ executor_.set_capacity(CONSUMER_BUFFER_DEPTH);\r
\r
graph_ = diagnostics::create_graph(narrow(print()));\r
graph_->add_guide("tick-time", 0.5);\r
graph_->set_color("tick-time", diagnostics::color(0.1f, 0.7f, 0.8f));\r
+ graph_->add_guide("frame-time", 0.5f); \r
+ graph_->set_color("frame-time", diagnostics::color(1.0f, 0.0f, 0.0f));\r
+ graph_->set_color("sync-time", diagnostics::color(0.5f, 1.0f, 0.2f));\r
+ graph_->set_color("input-buffer", diagnostics::color(1.0f, 1.0f, 0.0f));\r
\r
- //void* pBlueDevice = blue_attach_to_device(1);\r
- //EBlueConnectorPropertySetting video_routing[1];\r
- //auto channel = BLUE_VIDEO_OUTPUT_CHANNEL_A;\r
- //video_routing[0].channel = channel; \r
- //video_routing[0].propType = BLUE_CONNECTOR_PROP_SINGLE_LINK;\r
- //video_routing[0].connector = channel == BLUE_VIDEO_OUTPUT_CHANNEL_A ? BLUE_CONNECTOR_SDI_OUTPUT_A : BLUE_CONNECTOR_SDI_OUTPUT_B;\r
- //blue_set_connector_property(pBlueDevice, 1, video_routing);\r
- //blue_detach_from_device(&pBlueDevice);\r
- \r
- auto desiredVideoFormat = vid_fmt_from_video_format(format_desc_.format);\r
- int videoModeCount = blue_->count_video_mode();\r
- for(int videoModeIndex = 1; videoModeIndex <= videoModeCount; ++videoModeIndex) \r
- {\r
- EVideoMode videoMode = blue_->enum_video_mode(videoModeIndex);\r
- if(videoMode == desiredVideoFormat) \r
- vid_fmt_ = videoMode; \r
- }\r
- if(vid_fmt_ == VID_FMT_INVALID)\r
- BOOST_THROW_EXCEPTION(bluefish_exception() << msg_info(narrow(print()) + " Failed to set videomode."));\r
- \r
- // Set default video output channel\r
- //if(BLUE_FAIL(set_card_property(blue_, DEFAULT_VIDEO_OUTPUT_CHANNEL, channel)))\r
- // CASPAR_LOG(error) << TEXT("BLUECARD ERROR: Failed to set default channel. (device ") << device_index_ << TEXT(")");\r
-\r
//Setting output Video mode\r
if(BLUE_FAIL(set_card_property(blue_, VIDEO_MODE, vid_fmt_))) \r
BOOST_THROW_EXCEPTION(bluefish_exception() << msg_info(narrow(print()) + " Failed to set videomode."));\r
if(BLUE_FAIL(set_card_property(blue_, VIDEO_PREDEFINED_COLOR_MATRIX, vid_fmt_ == VID_FMT_PAL ? MATRIX_601_CGR : MATRIX_709_CGR)))\r
CASPAR_LOG(warning) << print() << TEXT(" Failed to set colormatrix to ") << (vid_fmt_ == VID_FMT_PAL ? TEXT("601 CGR") : TEXT("709 CGR")) << TEXT(".");\r
\r
- if(!embed_audio_)\r
+ if(!embedded_audio_)\r
{\r
if(BLUE_FAIL(set_card_property(blue_, EMBEDEDDED_AUDIO_OUTPUT, 0))) \r
CASPAR_LOG(warning) << TEXT("BLUECARD ERROR: Failed to disable embedded audio."); \r
if(blue_->GetHDCardType(device_index_) != CRD_HD_INVALID) \r
blue_->Set_DownConverterSignalType(vid_fmt_ == VID_FMT_PAL ? SD_SDI : HD_SDI); \r
\r
- if(BLUE_FAIL(blue_->set_video_engine(engine_mode_)))\r
+ if(BLUE_FAIL(blue_->set_video_engine(*reinterpret_cast<unsigned long*>(&engine_mode_))))\r
BOOST_THROW_EXCEPTION(bluefish_exception() << msg_info(narrow(print()) + " Failed to set video engine."));\r
\r
enable_video_output();\r
\r
for(size_t n = 0; n < reserved_frames_.size(); ++n)\r
- reserved_frames_[n] = std::make_shared<blue_dma_buffer>(format_desc_.size, n); \r
- \r
- executor_.start();\r
- active_ = executor_.begin_invoke([]{});\r
-\r
- CASPAR_LOG(info) << print() << TEXT(" Successfully initialized for ") << format_desc_ << TEXT(".");\r
+ reserved_frames_[n] = std::make_shared<blue_dma_buffer>(format_desc_.size, n); \r
+ \r
+ CASPAR_LOG(info) << print() << L" Successfully Initialized.";\r
}\r
+\r
+ ~bluefish_consumer()\r
+ {\r
+ executor_.invoke([&]\r
+ {\r
+ disable_video_output();\r
+ blue_->device_detach(); \r
+ });\r
\r
+ CASPAR_LOG(info) << print() << L" Shutting down."; \r
+ }\r
+ \r
+ const core::video_format_desc& get_video_format_desc() const\r
+ {\r
+ return format_desc_;\r
+ }\r
+\r
void enable_video_output()\r
{\r
if(!BLUE_PASS(set_card_property(blue_, VIDEO_BLACKGENERATOR, 0)))\r
if(!BLUE_PASS(set_card_property(blue_, VIDEO_BLACKGENERATOR, 1)))\r
CASPAR_LOG(error)<< print() << TEXT(" Failed to disable video output."); \r
}\r
-\r
- virtual void send(const safe_ptr<const core::read_frame>& frame)\r
- { \r
- static std::vector<short> silence(MAX_HANC_BUFFER_SIZE, 0);\r
+ \r
+ void send(const safe_ptr<const core::read_frame>& frame)\r
+ { \r
+ if(preroll_count_ < executor_.capacity())\r
+ {\r
+ while(preroll_count_++ < executor_.capacity())\r
+ schedule_next_video(make_safe<core::read_frame>());\r
+ }\r
\r
- size_t audio_samples = static_cast<size_t>(48000.0 / format_desc_.fps);\r
- size_t audio_nchannels = 2;\r
+ schedule_next_video(frame); \r
+ }\r
+ \r
+ void schedule_next_video(const safe_ptr<const core::read_frame>& frame)\r
+ {\r
+ static std::vector<short> silence(MAX_HANC_BUFFER_SIZE, 0);\r
\r
- active_.get();\r
- active_ = executor_.begin_invoke([=]\r
+ executor_.begin_invoke([=]\r
{\r
try\r
{\r
- fast_memcpy(reserved_frames_.front()->image_data(), frame->image_data().begin(), frame->image_data().size());\r
+ const size_t audio_samples = format_desc_.audio_samples_per_frame;\r
+ const size_t audio_nchannels = format_desc_.audio_channels;\r
+\r
+ frame_timer_.restart();\r
\r
- if(embed_audio_)\r
+ if(!frame->image_data().empty())\r
+ fast_memcpy(reserved_frames_.front()->image_data(), frame->image_data().begin(), frame->image_data().size());\r
+ else\r
+ fast_memclr(reserved_frames_.front()->image_data(), reserved_frames_.front()->image_size());\r
+\r
+ sync_timer_.restart();\r
+ unsigned long n_field = 0;\r
+ blue_->wait_output_video_synch(UPD_FMT_FRAME, n_field);\r
+ graph_->update_value("sync-time", static_cast<float>(sync_timer_.elapsed()*format_desc_.fps*0.5));\r
+\r
+ if(embedded_audio_)\r
{ \r
auto frame_audio_data = frame->audio_data().empty() ? silence.data() : const_cast<short*>(frame->audio_data().begin());\r
\r
CASPAR_LOG(warning) << print() << TEXT(" render_buffer_update failed.");\r
}\r
\r
- unsigned long n_field = 0;\r
- blue_->wait_output_video_synch(UPD_FMT_FRAME, n_field);\r
-\r
std::rotate(reserved_frames_.begin(), reserved_frames_.begin() + 1, reserved_frames_.end());\r
- graph_->update_value("tick-time", static_cast<float>(perf_timer_.elapsed()/format_desc_.interval*0.5));\r
- perf_timer_.restart();\r
+ \r
+ graph_->update_value("frame-time", static_cast<float>(frame_timer_.elapsed()*format_desc_.fps*0.5));\r
+\r
+ graph_->update_value("tick-time", static_cast<float>(tick_timer_.elapsed()*format_desc_.fps*0.5));\r
+ tick_timer_.restart();\r
}\r
catch(...)\r
{\r
CASPAR_LOG_CURRENT_EXCEPTION();\r
}\r
+ graph_->set_value("input-buffer", static_cast<double>(executor_.size())/static_cast<double>(executor_.capacity()));\r
});\r
+ graph_->set_value("input-buffer", static_cast<double>(executor_.size())/static_cast<double>(executor_.capacity()));\r
}\r
\r
- virtual size_t buffer_depth() const{return 1;}\r
-\r
void encode_hanc(BLUE_UINT32* hanc_data, void* audio_data, size_t audio_samples, size_t audio_nchannels)\r
{ \r
auto card_type = blue_->has_video_cardtype();\r
\r
std::wstring print() const\r
{\r
- return model_name_ + L" [" + boost::lexical_cast<std::wstring>(device_index_) + L"]";\r
+ return model_name_ + L" [" + boost::lexical_cast<std::wstring>(device_index_) + L"|" + format_desc_.name + L"]";\r
}\r
};\r
\r
-bluefish_consumer::bluefish_consumer(unsigned int device_index, bool embed_audio) : impl_(new implementation(device_index, embed_audio)){} \r
-bluefish_consumer::bluefish_consumer(bluefish_consumer&& other) : impl_(std::move(other.impl_)){}\r
-void bluefish_consumer::initialize(const core::video_format_desc& format_desc)\r
+struct bluefish_consumer_proxy : public core::frame_consumer\r
{\r
- // TODO: Ugly\r
- impl_.reset(new implementation(impl_->device_index_, impl_->embed_audio_));\r
- impl_->initialize(format_desc);\r
-}\r
-void bluefish_consumer::send(const safe_ptr<const core::read_frame>& frame){impl_->send(frame);}\r
-size_t bluefish_consumer::buffer_depth() const{return impl_->buffer_depth();}\r
-std::wstring bluefish_consumer::print() const {return impl_->print();} \r
+ std::unique_ptr<bluefish_consumer> consumer_;\r
+ const size_t device_index_;\r
+ const bool embedded_audio_;\r
+ bool key_only_;\r
+public:\r
+\r
+ bluefish_consumer_proxy(size_t device_index, bool embedded_audio, bool key_only)\r
+ : device_index_(device_index)\r
+ , embedded_audio_(embedded_audio)\r
+ , key_only_(key_only){}\r
+ \r
+ virtual void initialize(const core::video_format_desc& format_desc)\r
+ {\r
+ consumer_.reset(new bluefish_consumer(format_desc, device_index_, embedded_audio_));\r
+ }\r
+ \r
+ virtual void send(const safe_ptr<const core::read_frame>& frame)\r
+ {\r
+ consumer_->send(frame);\r
+ }\r
+\r
+ virtual const core::video_format_desc& get_video_format_desc() const\r
+ {\r
+ return consumer_->get_video_format_desc();\r
+ }\r
+ \r
+ virtual std::wstring print() const\r
+ {\r
+ return consumer_->print();\r
+ }\r
+\r
+ virtual bool key_only() const\r
+ {\r
+ return key_only_;\r
+ }\r
+}; \r
\r
std::wstring get_bluefish_version()\r
{\r
\r
for(int n = 1; BLUE_PASS(blue->device_attach(n, FALSE)); ++n)\r
{ \r
- devices.push_back(L"[" + boost::lexical_cast<std::wstring>(n) + L"] " + get_card_desc(blue->has_video_cardtype()));\r
+ devices.push_back(std::wstring(get_card_desc(blue->has_video_cardtype())) + L" [" + boost::lexical_cast<std::wstring>(n) + L"]");\r
blue->device_detach(); \r
}\r
}\r
return core::frame_consumer::empty();\r
\r
int device_index = 1;\r
- bool embed_audio = false;\r
-\r
if(params.size() > 1)\r
- device_index = lexical_cast_or_default<int>(params[2]);\r
+ device_index = lexical_cast_or_default<int>(params[1], 1);\r
+\r
+ bool embedded_audio = std::find(params.begin(), params.end(), L"EMBEDDED_AUDIO") != params.end();\r
+ bool key_only = std::find(params.begin(), params.end(), L"KEY_ONLY") != params.end();\r
+\r
+ return make_safe<bluefish_consumer_proxy>(device_index, embedded_audio, key_only);\r
+}\r
\r
- if(params.size() > 2) \r
- embed_audio = lexical_cast_or_default<bool>(params[3]);\r
+safe_ptr<core::frame_consumer> create_bluefish_consumer(const boost::property_tree::ptree& ptree) \r
+{ \r
+ auto device_index = ptree.get("device", 1);\r
+ auto embedded_audio = ptree.get("embedded-audio", false);\r
+ bool key_only = ptree.get("key-only", false);\r
\r
- return make_safe<bluefish_consumer>(device_index, embed_audio);\r
+ return make_safe<bluefish_consumer_proxy>(device_index, embedded_audio, key_only);\r
}\r
\r
}
\ No newline at end of file