#include "../util/util.h"\r
#include "../util/memory.h"\r
\r
-#include <core/consumer/frame/read_frame.h>\r
+#include <core/mixer/read_frame.h>\r
\r
#include <common/concurrency/executor.h>\r
#include <common/diagnostics/graph.h>\r
+#include <common/memory/memcpy.h>\r
#include <common/utility/timer.h>\r
\r
#include <tbb/concurrent_queue.h>\r
\r
+#include <boost/timer.hpp>\r
+\r
#include <BlueVelvet4.h>\r
#include <BlueHancUtils.h>\r
\r
blue_hanc_initialize();\r
}\r
\r
-struct bluefish_consumer::implementation : boost::noncopyable\r
+struct bluefish_consumer : boost::noncopyable\r
{\r
std::wstring model_name_;\r
const unsigned int device_index_;\r
\r
std::shared_ptr<diagnostics::graph> graph_;\r
- timer perf_timer_;\r
+ boost::timer frame_timer_;\r
+ boost::timer tick_timer_;\r
+ boost::timer sync_timer_;\r
\r
boost::unique_future<void> active_;\r
\r
std::shared_ptr<CBlueVelvet4> blue_;\r
\r
- core::video_format_desc format_desc_;\r
+ const core::video_format_desc format_desc_;\r
\r
- unsigned long mem_fmt_;\r
- unsigned long upd_fmt_;\r
- EVideoMode vid_fmt_; \r
- unsigned long res_fmt_; \r
- unsigned long engine_mode_;\r
+ const unsigned long mem_fmt_;\r
+ const unsigned long upd_fmt_;\r
+ const unsigned long res_fmt_; \r
+ unsigned long engine_mode_;\r
+ EVideoMode vid_fmt_; \r
\r
std::array<blue_dma_buffer_ptr, 3> reserved_frames_; \r
\r
- const bool embed_audio_;\r
+ const bool embedded_audio_;\r
\r
executor executor_;\r
public:\r
- implementation::implementation(unsigned int device_index, bool embed_audio) \r
+ bluefish_consumer(const core::video_format_desc& format_desc, unsigned int device_index, bool embedded_audio) \r
: model_name_(L"BLUEFISH")\r
, device_index_(device_index) \r
+ , format_desc_(format_desc)\r
, mem_fmt_(MEM_FMT_ARGB_PC)\r
, upd_fmt_(UPD_FMT_FRAME)\r
- , vid_fmt_(VID_FMT_INVALID) \r
, res_fmt_(RES_FMT_NORMAL) \r
, engine_mode_(VIDEO_ENGINE_FRAMESTORE) \r
- , embed_audio_(embed_audio)\r
- , executor_(print())\r
- {\r
- if(!BlueVelvetFactory4 || (embed_audio_ && (!encode_hanc_frame || !encode_hanc_frame)))\r
- BOOST_THROW_EXCEPTION(bluefish_exception() << msg_info("Bluefish drivers not found."));\r
- }\r
-\r
- ~implementation()\r
+ , vid_fmt_(VID_FMT_INVALID) \r
+ , embedded_audio_(embedded_audio)\r
+ , executor_(print(), true)\r
{\r
- if(executor_.is_running())\r
- {\r
- executor_.invoke([&]\r
- {\r
- disable_video_output();\r
-\r
- if(blue_)\r
- blue_->device_detach(); \r
- });\r
- }\r
+ if(!BlueVelvetFactory4 || (embedded_audio_ && (!encode_hanc_frame || !encode_hanc_frame)))\r
+ BOOST_THROW_EXCEPTION(caspar_exception() << msg_info("Bluefish drivers not found."));\r
\r
- CASPAR_LOG(info) << print() << L" Shutting down."; \r
- }\r
-\r
- void initialize(const core::video_format_desc& format_desc)\r
- { \r
- format_desc_ = format_desc;\r
-\r
blue_.reset(BlueVelvetFactory4());\r
\r
if(BLUE_FAIL(blue_->device_attach(device_index_, FALSE))) \r
graph_ = diagnostics::create_graph(narrow(print()));\r
graph_->add_guide("tick-time", 0.5);\r
graph_->set_color("tick-time", diagnostics::color(0.1f, 0.7f, 0.8f));\r
+ graph_->add_guide("frame-time", 0.5f); \r
+ graph_->set_color("frame-time", diagnostics::color(1.0f, 0.0f, 0.0f));\r
+ graph_->add_guide("frame-time", 0.5f); \r
+ graph_->set_color("sync-time", diagnostics::color(0.5f, 1.0f, 0.2f));\r
\r
//void* pBlueDevice = blue_attach_to_device(1);\r
//EBlueConnectorPropertySetting video_routing[1];\r
if(BLUE_FAIL(set_card_property(blue_, VIDEO_PREDEFINED_COLOR_MATRIX, vid_fmt_ == VID_FMT_PAL ? MATRIX_601_CGR : MATRIX_709_CGR)))\r
CASPAR_LOG(warning) << print() << TEXT(" Failed to set colormatrix to ") << (vid_fmt_ == VID_FMT_PAL ? TEXT("601 CGR") : TEXT("709 CGR")) << TEXT(".");\r
\r
- if(!embed_audio_)\r
+ if(!embedded_audio_)\r
{\r
if(BLUE_FAIL(set_card_property(blue_, EMBEDEDDED_AUDIO_OUTPUT, 0))) \r
CASPAR_LOG(warning) << TEXT("BLUECARD ERROR: Failed to disable embedded audio."); \r
\r
for(size_t n = 0; n < reserved_frames_.size(); ++n)\r
reserved_frames_[n] = std::make_shared<blue_dma_buffer>(format_desc_.size, n); \r
- \r
- executor_.start();\r
- active_ = executor_.begin_invoke([]{});\r
\r
- CASPAR_LOG(info) << print() << TEXT(" Successfully initialized for ") << format_desc_ << TEXT(".");\r
+ active_ = executor_.begin_invoke([]{});\r
+ \r
+ CASPAR_LOG(info) << print() << L" Successfully Initialized.";\r
}\r
+\r
+ ~bluefish_consumer()\r
+ {\r
+ executor_.clear();\r
+ executor_.invoke([&]\r
+ {\r
+ disable_video_output();\r
+\r
+ if(blue_)\r
+ blue_->device_detach(); \r
+ });\r
\r
+ CASPAR_LOG(info) << print() << L" Shutting down."; \r
+ }\r
+\r
void enable_video_output()\r
{\r
if(!BLUE_PASS(set_card_property(blue_, VIDEO_BLACKGENERATOR, 0)))\r
CASPAR_LOG(error)<< print() << TEXT(" Failed to disable video output."); \r
}\r
\r
- virtual void send(const safe_ptr<const core::read_frame>& frame)\r
+ void send(const safe_ptr<const core::read_frame>& frame)\r
{ \r
static std::vector<short> silence(MAX_HANC_BUFFER_SIZE, 0);\r
- \r
- size_t audio_samples = static_cast<size_t>(48000.0 / format_desc_.fps);\r
- size_t audio_nchannels = 2;\r
- \r
+ \r
active_.get();\r
active_ = executor_.begin_invoke([=]\r
{\r
try\r
{\r
- std::copy_n(frame->image_data().begin(), frame->image_data().size(), reserved_frames_.front()->image_data());\r
+ frame_timer_.restart();\r
+\r
+ const size_t audio_samples = static_cast<size_t>(48000.0 / format_desc_.fps);\r
+ const size_t audio_nchannels = 2;\r
+ \r
+ fast_memcpy(reserved_frames_.front()->image_data(), frame->image_data().begin(), frame->image_data().size());\r
\r
- if(embed_audio_)\r
+ sync_timer_.restart();\r
+ unsigned long n_field = 0;\r
+ blue_->wait_output_video_synch(UPD_FMT_FRAME, n_field);\r
+ graph_->update_value("sync-time", static_cast<float>(sync_timer_.elapsed()*format_desc_.fps*0.5));\r
+\r
+ if(embedded_audio_)\r
{ \r
auto frame_audio_data = frame->audio_data().empty() ? silence.data() : const_cast<short*>(frame->audio_data().begin());\r
\r
CASPAR_LOG(warning) << print() << TEXT(" render_buffer_update failed.");\r
}\r
\r
- unsigned long n_field = 0;\r
- blue_->wait_output_video_synch(UPD_FMT_FRAME, n_field);\r
-\r
std::rotate(reserved_frames_.begin(), reserved_frames_.begin() + 1, reserved_frames_.end());\r
- graph_->update_value("tick-time", static_cast<float>(perf_timer_.elapsed()/format_desc_.interval*0.5));\r
- perf_timer_.reset();\r
+ \r
+ graph_->update_value("frame-time", static_cast<float>(frame_timer_.elapsed()*format_desc_.fps*0.5));\r
+\r
+ graph_->update_value("tick-time", static_cast<float>(tick_timer_.elapsed()*format_desc_.fps*0.5));\r
+ tick_timer_.restart();\r
}\r
catch(...)\r
{\r
});\r
}\r
\r
- virtual size_t buffer_depth() const{return 1;}\r
-\r
void encode_hanc(BLUE_UINT32* hanc_data, void* audio_data, size_t audio_samples, size_t audio_nchannels)\r
{ \r
auto card_type = blue_->has_video_cardtype();\r
\r
std::wstring print() const\r
{\r
- return model_name_ + L" [" + boost::lexical_cast<std::wstring>(device_index_) + L"]";\r
+ return model_name_ + L" [" + boost::lexical_cast<std::wstring>(device_index_) + L"|" + format_desc_.name + L"]";\r
}\r
};\r
\r
-bluefish_consumer::bluefish_consumer(unsigned int device_index, bool embed_audio) : impl_(new implementation(device_index, embed_audio)){} \r
-bluefish_consumer::bluefish_consumer(bluefish_consumer&& other) : impl_(std::move(other.impl_)){}\r
-void bluefish_consumer::initialize(const core::video_format_desc& format_desc)\r
+struct bluefish_consumer_proxy : public core::frame_consumer\r
{\r
- impl_.reset(new implementation(impl_->device_index_, impl_->embed_audio_));\r
- impl_->initialize(format_desc);\r
-}\r
-void bluefish_consumer::send(const safe_ptr<const core::read_frame>& frame){impl_->send(frame);}\r
-size_t bluefish_consumer::buffer_depth() const{return impl_->buffer_depth();}\r
-std::wstring bluefish_consumer::print() const {return impl_->print();} \r
+ std::unique_ptr<bluefish_consumer> consumer_;\r
+ const size_t device_index_;\r
+ const bool embedded_audio_;\r
+ bool key_only_;\r
+public:\r
+\r
+ bluefish_consumer_proxy(size_t device_index, bool embedded_audio, bool key_only)\r
+ : device_index_(device_index)\r
+ , embedded_audio_(embedded_audio)\r
+ , key_only_(key_only){}\r
+ \r
+ virtual void initialize(const core::video_format_desc& format_desc)\r
+ {\r
+ consumer_.reset(new bluefish_consumer(format_desc, device_index_, embedded_audio_));\r
+ }\r
+ \r
+ virtual void send(const safe_ptr<const core::read_frame>& frame)\r
+ {\r
+ consumer_->send(frame);\r
+ }\r
+ \r
+ virtual std::wstring print() const\r
+ {\r
+ return consumer_->print();\r
+ }\r
+\r
+ virtual bool key_only() const\r
+ {\r
+ return key_only_;\r
+ }\r
+}; \r
\r
std::wstring get_bluefish_version()\r
{\r
\r
for(int n = 1; BLUE_PASS(blue->device_attach(n, FALSE)); ++n)\r
{ \r
- devices.push_back(L"[" + boost::lexical_cast<std::wstring>(n) + L"] " + get_card_desc(blue->has_video_cardtype()));\r
+ devices.push_back(std::wstring(get_card_desc(blue->has_video_cardtype())) + L" [" + boost::lexical_cast<std::wstring>(n) + L"]");\r
blue->device_detach(); \r
}\r
}\r
return core::frame_consumer::empty();\r
\r
int device_index = 1;\r
- bool embed_audio = false;\r
-\r
if(params.size() > 1)\r
- device_index = lexical_cast_or_default<int>(params[2]);\r
+ device_index = lexical_cast_or_default<int>(params[1], 1);\r
+\r
+ bool embedded_audio = std::find(params.begin(), params.end(), L"EMBEDDED_AUDIO") != params.end();\r
+ bool key_only = std::find(params.begin(), params.end(), L"KEY_ONLY") != params.end();\r
+\r
+ return make_safe<bluefish_consumer_proxy>(device_index, embedded_audio, key_only);\r
+}\r
\r
- if(params.size() > 2) \r
- embed_audio = lexical_cast_or_default<bool>(params[3]);\r
+safe_ptr<core::frame_consumer> create_bluefish_consumer(const boost::property_tree::ptree& ptree) \r
+{ \r
+ auto device_index = ptree.get("device", 0);\r
+ auto embedded_audio = ptree.get("embedded-audio", false);\r
+ bool key_only = (ptree.get("output", "fill_and_key") == "key_only");\r
\r
- return make_safe<bluefish_consumer>(device_index, embed_audio);\r
+ return make_safe<bluefish_consumer_proxy>(device_index, embedded_audio, key_only);\r
}\r
\r
}
\ No newline at end of file