/*\r
-* copyright (c) 2010 Sveriges Television AB <info@casparcg.com>\r
+* Copyright (c) 2011 Sveriges Television AB <info@casparcg.com>\r
*\r
-* This file is part of CasparCG.\r
+* This file is part of CasparCG (www.casparcg.com).\r
*\r
-* CasparCG is free software: you can redistribute it and/or modify\r
-* it under the terms of the GNU General Public License as published by\r
-* the Free Software Foundation, either version 3 of the License, or\r
-* (at your option) any later version.\r
+* CasparCG is free software: you can redistribute it and/or modify\r
+* it under the terms of the GNU General Public License as published by\r
+* the Free Software Foundation, either version 3 of the License, or\r
+* (at your option) any later version.\r
*\r
-* CasparCG is distributed in the hope that it will be useful,\r
-* but WITHOUT ANY WARRANTY; without even the implied warranty of\r
-* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\r
-* GNU General Public License for more details.\r
-\r
-* You should have received a copy of the GNU General Public License\r
-* along with CasparCG. If not, see <http://www.gnu.org/licenses/>.\r
+* CasparCG is distributed in the hope that it will be useful,\r
+* but WITHOUT ANY WARRANTY; without even the implied warranty of\r
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\r
+* GNU General Public License for more details.\r
+*\r
+* You should have received a copy of the GNU General Public License\r
+* along with CasparCG. If not, see <http://www.gnu.org/licenses/>.\r
*\r
+* Author: Robert Nagy, ronag89@gmail.com\r
*/\r
\r
#include "../StdAfx.h"\r
\r
#include "bluefish_consumer.h"\r
-#include "../util/util.h"\r
+#include "../util/blue_velvet.h"\r
#include "../util/memory.h"\r
\r
+#include <core/video_format.h>\r
#include <core/mixer/read_frame.h>\r
\r
#include <common/concurrency/executor.h>\r
#include <common/diagnostics/graph.h>\r
-#include <common/memory/memcpy.h>\r
-#include <common/memory/memclr.h>\r
+#include <common/memory/memshfl.h>\r
#include <common/utility/timer.h>\r
+#include <common/utility/assert.h>\r
+\r
+#include <core/consumer/frame_consumer.h>\r
+#include <core/mixer/audio/audio_util.h>\r
\r
#include <tbb/concurrent_queue.h>\r
\r
#include <boost/timer.hpp>\r
+#include <boost/range/algorithm.hpp>\r
+#include <boost/property_tree/ptree.hpp>\r
\r
-#include <BlueVelvet4.h>\r
-#include <BlueHancUtils.h>\r
+#include <asmlib.h>\r
\r
#include <memory>\r
#include <array>\r
\r
-namespace caspar { \r
- \r
-CBlueVelvet4* (*BlueVelvetFactory4)() = nullptr;\r
-const char* (*BlueVelvetVersion)() = nullptr;\r
-BLUE_UINT32 (*encode_hanc_frame)(struct hanc_stream_info_struct * hanc_stream_ptr, void * audio_pcm_ptr,BLUE_UINT32 no_audio_ch,BLUE_UINT32 no_audio_samples,BLUE_UINT32 nTypeOfSample,BLUE_UINT32 emb_audio_flag) = nullptr;\r
-BLUE_UINT32 (*encode_hanc_frame_ex)(BLUE_UINT32 card_type, struct hanc_stream_info_struct * hanc_stream_ptr, void * audio_pcm_ptr, BLUE_UINT32 no_audio_ch, BLUE_UINT32 no_audio_samples, BLUE_UINT32 nTypeOfSample, BLUE_UINT32 emb_audio_flag) = nullptr;\r
-\r
-void blue_velvet_initialize()\r
-{\r
-#ifdef _DEBUG\r
- auto module = LoadLibrary(L"BlueVelvet3_d.dll");\r
-#else\r
- auto module = LoadLibrary(L"BlueVelvet3.dll");\r
-#endif\r
- if(!module)\r
- BOOST_THROW_EXCEPTION(file_not_found() << msg_info("Could not find BlueVelvet3.dll"));\r
- static std::shared_ptr<void> lib(module, FreeLibrary);\r
- BlueVelvetFactory4 = reinterpret_cast<decltype(BlueVelvetFactory4)>(GetProcAddress(module, "BlueVelvetFactory4"));\r
- BlueVelvetVersion = reinterpret_cast<decltype(BlueVelvetVersion)>(GetProcAddress(module, "BlueVelvetVersion"));\r
-}\r
-\r
-void blue_hanc_initialize()\r
-{\r
-#ifdef _DEBUG\r
- auto module = LoadLibrary(L"BlueHancUtils_d.dll");\r
-#else\r
- auto module = LoadLibrary(L"BlueHancUtils.dll");\r
-#endif\r
- if(!module)\r
- BOOST_THROW_EXCEPTION(file_not_found() << msg_info("Could not find BlueHancUtils.dll"));\r
- static std::shared_ptr<void> lib(module, FreeLibrary);\r
- encode_hanc_frame = reinterpret_cast<decltype(encode_hanc_frame)>(GetProcAddress(module, "encode_hanc_frame"));\r
- encode_hanc_frame_ex = reinterpret_cast<decltype(encode_hanc_frame_ex)>(GetProcAddress(module, "encode_hanc_frame_ex"));\r
-}\r
-\r
-void blue_initialize()\r
-{\r
- blue_velvet_initialize();\r
- blue_hanc_initialize();\r
-}\r
-\r
-safe_ptr<CBlueVelvet4> create_blue(size_t device_index)\r
-{\r
- if(!BlueVelvetFactory4 || !encode_hanc_frame || !encode_hanc_frame)\r
- BOOST_THROW_EXCEPTION(caspar_exception() << msg_info("Bluefish drivers not found."));\r
-\r
- auto blue = safe_ptr<CBlueVelvet4>(BlueVelvetFactory4());\r
- \r
- if(BLUE_FAIL(blue->device_attach(device_index, FALSE))) \r
- BOOST_THROW_EXCEPTION(bluefish_exception() << msg_info("Failed to attach device."));\r
-\r
- return blue;\r
-}\r
-\r
-EVideoMode get_video_mode(CBlueVelvet4& blue, const core::video_format_desc& format_desc)\r
-{\r
- EVideoMode vid_fmt = VID_FMT_INVALID;\r
- auto desiredVideoFormat = vid_fmt_from_video_format(format_desc.format);\r
- int videoModeCount = blue.count_video_mode();\r
- for(int videoModeIndex = 1; videoModeIndex <= videoModeCount; ++videoModeIndex) \r
- {\r
- EVideoMode videoMode = blue.enum_video_mode(videoModeIndex);\r
- if(videoMode == desiredVideoFormat) \r
- vid_fmt = videoMode; \r
- }\r
- if(vid_fmt == VID_FMT_INVALID)\r
- BOOST_THROW_EXCEPTION(bluefish_exception() << msg_info("Failed get videomode.") << arg_value_info(narrow(format_desc.name)));\r
-\r
- return vid_fmt;\r
-}\r
- \r
+namespace caspar { namespace bluefish { \r
+ \r
struct bluefish_consumer : boost::noncopyable\r
{\r
safe_ptr<CBlueVelvet4> blue_;\r
const unsigned int device_index_;\r
const core::video_format_desc format_desc_;\r
+ const int channel_index_;\r
\r
const std::wstring model_name_;\r
\r
- std::shared_ptr<diagnostics::graph> graph_;\r
+ safe_ptr<diagnostics::graph> graph_;\r
boost::timer frame_timer_;\r
boost::timer tick_timer_;\r
boost::timer sync_timer_; \r
- \r
- const EVideoMode vid_fmt_; \r
- const EMemoryFormat mem_fmt_;\r
- const EUpdateMethod upd_fmt_;\r
- const EResoFormat res_fmt_; \r
- EEngineMode engine_mode_;\r
- \r
- std::array<blue_dma_buffer_ptr, 4> reserved_frames_; \r
- tbb::concurrent_bounded_queue<std::shared_ptr<const core::read_frame>> frame_buffer_;\r
-\r
- int preroll_count_;\r
+ \r
+ unsigned int vid_fmt_;\r
\r
- const bool embedded_audio_;\r
+ std::array<blue_dma_buffer_ptr, 4> reserved_frames_; \r
+ tbb::concurrent_bounded_queue<std::shared_ptr<core::read_frame>> frame_buffer_;\r
\r
- executor executor_;\r
+ const bool embedded_audio_;\r
+ const bool key_only_;\r
+ \r
+ executor executor_;\r
public:\r
- bluefish_consumer(const core::video_format_desc& format_desc, unsigned int device_index, bool embedded_audio, size_t buffer_depth) \r
+ bluefish_consumer(const core::video_format_desc& format_desc, int device_index, bool embedded_audio, bool key_only, int channel_index) \r
: blue_(create_blue(device_index))\r
, device_index_(device_index)\r
, format_desc_(format_desc) \r
- , model_name_(get_card_desc(blue_->has_video_cardtype()))\r
- , vid_fmt_(get_video_mode(*blue_, format_desc)) \r
- , mem_fmt_(MEM_FMT_ARGB_PC)\r
- , upd_fmt_(UPD_FMT_FRAME)\r
- , res_fmt_(RES_FMT_NORMAL) \r
- , engine_mode_(VIDEO_ENGINE_FRAMESTORE) \r
- , preroll_count_(0)\r
+ , channel_index_(channel_index)\r
+ , model_name_(get_card_desc(*blue_))\r
+ , vid_fmt_(get_video_mode(*blue_, format_desc))\r
, embedded_audio_(embedded_audio)\r
+ , key_only_(key_only)\r
, executor_(print())\r
{\r
- executor_.set_capacity(buffer_depth);\r
+ executor_.set_capacity(1);\r
\r
- graph_ = diagnostics::create_graph(narrow(print()));\r
graph_->add_guide("tick-time", 0.5);\r
- graph_->set_color("tick-time", diagnostics::color(0.1f, 0.7f, 0.8f));\r
+ graph_->set_color("tick-time", diagnostics::color(0.0f, 0.6f, 0.9f)); \r
graph_->add_guide("frame-time", 0.5f); \r
- graph_->set_color("frame-time", diagnostics::color(1.0f, 0.0f, 0.0f));\r
- graph_->set_color("sync-time", diagnostics::color(0.5f, 1.0f, 0.2f));\r
- graph_->set_color("input-buffer", diagnostics::color(1.0f, 1.0f, 0.0f));\r
+ graph_->set_color("sync-time", diagnostics::color(1.0f, 0.0f, 0.0f));\r
+ graph_->set_color("frame-time", diagnostics::color(0.5f, 1.0f, 0.2f));\r
+ graph_->set_text(print());\r
+ diagnostics::register_graph(graph_);\r
\r
//Setting output Video mode\r
if(BLUE_FAIL(set_card_property(blue_, VIDEO_MODE, vid_fmt_))) \r
- BOOST_THROW_EXCEPTION(bluefish_exception() << msg_info(narrow(print()) + " Failed to set videomode."));\r
+ BOOST_THROW_EXCEPTION(caspar_exception() << wmsg_info(print() + L" Failed to set videomode."));\r
\r
//Select Update Mode for output\r
- if(BLUE_FAIL(set_card_property(blue_, VIDEO_UPDATE_TYPE, upd_fmt_))) \r
- BOOST_THROW_EXCEPTION(bluefish_exception() << msg_info(narrow(print()) + " Failed to set update type."));\r
+ if(BLUE_FAIL(set_card_property(blue_, VIDEO_UPDATE_TYPE, UPD_FMT_FRAME))) \r
+ BOOST_THROW_EXCEPTION(caspar_exception() << wmsg_info(print() + L" Failed to set update type."));\r
\r
disable_video_output();\r
\r
//Enable dual link output\r
if(BLUE_FAIL(set_card_property(blue_, VIDEO_DUAL_LINK_OUTPUT, 1)))\r
- BOOST_THROW_EXCEPTION(bluefish_exception() << msg_info(narrow(print()) + " Failed to enable dual link."));\r
+ BOOST_THROW_EXCEPTION(caspar_exception() << wmsg_info(print() + L" Failed to enable dual link."));\r
\r
if(BLUE_FAIL(set_card_property(blue_, VIDEO_DUAL_LINK_OUTPUT_SIGNAL_FORMAT_TYPE, Signal_FormatType_4224)))\r
- BOOST_THROW_EXCEPTION(bluefish_exception() << msg_info(narrow(print()) + " Failed to set dual link format type to 4:2:2:4."));\r
+ BOOST_THROW_EXCEPTION(caspar_exception() << wmsg_info(print() + L" Failed to set dual link format type to 4:2:2:4."));\r
\r
//Select output memory format\r
- if(BLUE_FAIL(set_card_property(blue_, VIDEO_MEMORY_FORMAT, mem_fmt_))) \r
- BOOST_THROW_EXCEPTION(bluefish_exception() << msg_info(narrow(print()) + " Failed to set memory format."));\r
+ if(BLUE_FAIL(set_card_property(blue_, VIDEO_MEMORY_FORMAT, MEM_FMT_ARGB_PC))) \r
+ BOOST_THROW_EXCEPTION(caspar_exception() << wmsg_info(print() + L" Failed to set memory format."));\r
\r
//Select image orientation\r
if(BLUE_FAIL(set_card_property(blue_, VIDEO_IMAGE_ORIENTATION, ImageOrientation_Normal)))\r
- CASPAR_LOG(warning) << print() << TEXT(" Failed to set image orientation to normal."); \r
+ CASPAR_LOG(warning) << print() << L" Failed to set image orientation to normal."; \r
\r
// Select data range\r
if(BLUE_FAIL(set_card_property(blue_, VIDEO_RGB_DATA_RANGE, CGR_RANGE))) \r
- CASPAR_LOG(warning) << print() << TEXT(" Failed to set RGB data range to CGR."); \r
+ CASPAR_LOG(warning) << print() << L" Failed to set RGB data range to CGR."; \r
\r
if(BLUE_FAIL(set_card_property(blue_, VIDEO_PREDEFINED_COLOR_MATRIX, vid_fmt_ == VID_FMT_PAL ? MATRIX_601_CGR : MATRIX_709_CGR)))\r
- CASPAR_LOG(warning) << print() << TEXT(" Failed to set colormatrix to ") << (vid_fmt_ == VID_FMT_PAL ? TEXT("601 CGR") : TEXT("709 CGR")) << TEXT(".");\r
+ CASPAR_LOG(warning) << print() << L" Failed to set colormatrix to " << (vid_fmt_ == VID_FMT_PAL ? L"601 CGR" : L"709 CGR") << L".";\r
\r
if(!embedded_audio_)\r
{\r
if(blue_->GetHDCardType(device_index_) != CRD_HD_INVALID) \r
blue_->Set_DownConverterSignalType(vid_fmt_ == VID_FMT_PAL ? SD_SDI : HD_SDI); \r
\r
- if(BLUE_FAIL(blue_->set_video_engine(*reinterpret_cast<unsigned long*>(&engine_mode_))))\r
- BOOST_THROW_EXCEPTION(bluefish_exception() << msg_info(narrow(print()) + " Failed to set video engine."));\r
-\r
+ if(BLUE_FAIL(set_card_property(blue_, VIDEO_OUTPUT_ENGINE, VIDEO_ENGINE_FRAMESTORE))) \r
+ CASPAR_LOG(warning) << print() << TEXT(" Failed to set video engine."); \r
+ \r
enable_video_output();\r
\r
- for(size_t n = 0; n < reserved_frames_.size(); ++n)\r
- reserved_frames_[n] = std::make_shared<blue_dma_buffer>(format_desc_.size, n); \r
- \r
- CASPAR_LOG(info) << print() << L" Successfully Initialized.";\r
+ int n = 0;\r
+ boost::range::generate(reserved_frames_, [&]{return std::make_shared<blue_dma_buffer>(format_desc_.size, n++);});\r
}\r
\r
~bluefish_consumer()\r
{\r
- executor_.invoke([&]\r
+ try\r
{\r
- disable_video_output();\r
- blue_->device_detach(); \r
- });\r
- \r
- CASPAR_LOG(info) << print() << L" Shutting down."; \r
+ executor_.invoke([&]\r
+ {\r
+ disable_video_output();\r
+ blue_->device_detach(); \r
+ });\r
+ }\r
+ catch(...)\r
+ {\r
+ CASPAR_LOG_CURRENT_EXCEPTION();\r
+ }\r
}\r
\r
- const core::video_format_desc& get_video_format_desc() const\r
- {\r
- return format_desc_;\r
- }\r
-\r
void enable_video_output()\r
{\r
if(!BLUE_PASS(set_card_property(blue_, VIDEO_BLACKGENERATOR, 0)))\r
\r
void disable_video_output()\r
{\r
+ blue_->video_playback_stop(0,0);\r
if(!BLUE_PASS(set_card_property(blue_, VIDEO_BLACKGENERATOR, 1)))\r
CASPAR_LOG(error)<< print() << TEXT(" Failed to disable video output."); \r
}\r
\r
- void send(const safe_ptr<const core::read_frame>& frame)\r
- { \r
- if(preroll_count_ < executor_.capacity())\r
- {\r
- while(preroll_count_++ < executor_.capacity())\r
- schedule_next_video(core::read_frame::empty());\r
- }\r
- \r
- schedule_next_video(frame); \r
- }\r
- \r
- void schedule_next_video(const safe_ptr<const core::read_frame>& frame)\r
- {\r
- static std::vector<short> silence(MAX_HANC_BUFFER_SIZE, 0);\r
- \r
+ void send(const safe_ptr<core::read_frame>& frame)\r
+ { \r
executor_.begin_invoke([=]\r
{\r
try\r
- {\r
- const size_t audio_samples = format_desc_.audio_samples_per_frame;\r
- const size_t audio_nchannels = format_desc_.audio_channels;\r
-\r
- frame_timer_.restart();\r
- \r
- if(!frame->image_data().empty())\r
- fast_memcpy(reserved_frames_.front()->image_data(), frame->image_data().begin(), frame->image_data().size());\r
- else\r
- fast_memclr(reserved_frames_.front()->image_data(), reserved_frames_.front()->image_size());\r
-\r
- sync_timer_.restart();\r
- unsigned long n_field = 0;\r
- blue_->wait_output_video_synch(UPD_FMT_FRAME, n_field);\r
- graph_->update_value("sync-time", static_cast<float>(sync_timer_.elapsed()*format_desc_.fps*0.5));\r
-\r
- if(embedded_audio_)\r
- { \r
- auto frame_audio_data = frame->audio_data().empty() ? silence.data() : const_cast<short*>(frame->audio_data().begin());\r
-\r
- encode_hanc(reinterpret_cast<BLUE_UINT32*>(reserved_frames_.front()->hanc_data()), frame_audio_data, audio_samples, audio_nchannels);\r
- \r
- blue_->system_buffer_write_async(const_cast<unsigned char*>(reserved_frames_.front()->image_data()), \r
- reserved_frames_.front()->image_size(), \r
- nullptr, \r
- BlueImage_HANC_DMABuffer(reserved_frames_.front()->id(), BLUE_DATA_IMAGE));\r
-\r
- blue_->system_buffer_write_async(reserved_frames_.front()->hanc_data(),\r
- reserved_frames_.front()->hanc_size(), \r
- nullptr, \r
- BlueImage_HANC_DMABuffer(reserved_frames_.front()->id(), BLUE_DATA_HANC));\r
-\r
- if(BLUE_FAIL(blue_->render_buffer_update(BlueBuffer_Image_HANC(reserved_frames_.front()->id()))))\r
- CASPAR_LOG(warning) << print() << TEXT(" render_buffer_update failed.");\r
- }\r
- else\r
- {\r
- blue_->system_buffer_write_async(const_cast<unsigned char*>(reserved_frames_.front()->image_data()),\r
- reserved_frames_.front()->image_size(), \r
- nullptr, \r
- BlueImage_DMABuffer(reserved_frames_.front()->id(), BLUE_DATA_IMAGE));\r
- \r
- if(BLUE_FAIL(blue_->render_buffer_update(BlueBuffer_Image(reserved_frames_.front()->id()))))\r
- CASPAR_LOG(warning) << print() << TEXT(" render_buffer_update failed.");\r
- }\r
-\r
- std::rotate(reserved_frames_.begin(), reserved_frames_.begin() + 1, reserved_frames_.end());\r
- \r
- graph_->update_value("frame-time", static_cast<float>(frame_timer_.elapsed()*format_desc_.fps*0.5));\r
-\r
+ { \r
+ display_frame(frame); \r
graph_->update_value("tick-time", static_cast<float>(tick_timer_.elapsed()*format_desc_.fps*0.5));\r
tick_timer_.restart();\r
}\r
{\r
CASPAR_LOG_CURRENT_EXCEPTION();\r
}\r
- graph_->set_value("input-buffer", static_cast<double>(executor_.size())/static_cast<double>(executor_.capacity()));\r
});\r
- graph_->set_value("input-buffer", static_cast<double>(executor_.size())/static_cast<double>(executor_.capacity()));\r
}\r
\r
- void encode_hanc(BLUE_UINT32* hanc_data, void* audio_data, size_t audio_samples, size_t audio_nchannels)\r
+ void display_frame(const safe_ptr<core::read_frame>& frame)\r
+ {\r
+ // Sync\r
+\r
+ sync_timer_.restart();\r
+ unsigned long n_field = 0;\r
+ blue_->wait_output_video_synch(UPD_FMT_FRAME, n_field);\r
+ graph_->update_value("sync-time", sync_timer_.elapsed()*format_desc_.fps*0.5);\r
+ \r
+ frame_timer_.restart(); \r
+\r
+ // Copy to local buffers\r
+ \r
+ if(!frame->image_data().empty())\r
+ {\r
+ if(key_only_) \r
+ aligned_memshfl(reserved_frames_.front()->image_data(), std::begin(frame->image_data()), frame->image_data().size(), 0x0F0F0F0F, 0x0B0B0B0B, 0x07070707, 0x03030303);\r
+ else\r
+ A_memcpy(reserved_frames_.front()->image_data(), std::begin(frame->image_data()), frame->image_data().size());\r
+ }\r
+ else\r
+ A_memset(reserved_frames_.front()->image_data(), 0, reserved_frames_.front()->image_size());\r
+ \r
+\r
+ // Send and display\r
+\r
+ if(embedded_audio_)\r
+ { \r
+ auto frame_audio = core::audio_32_to_24(frame->audio_data()); \r
+ encode_hanc(reinterpret_cast<BLUE_UINT32*>(reserved_frames_.front()->hanc_data()), \r
+ frame_audio.data(), \r
+ static_cast<int>(frame->audio_data().size()/format_desc_.audio_channels), \r
+ format_desc_.audio_channels);\r
+ \r
+ blue_->system_buffer_write_async(const_cast<uint8_t*>(reserved_frames_.front()->image_data()), \r
+ static_cast<unsigned long>(reserved_frames_.front()->image_size()), \r
+ nullptr, \r
+ BlueImage_HANC_DMABuffer(reserved_frames_.front()->id(), BLUE_DATA_IMAGE));\r
+\r
+ blue_->system_buffer_write_async(reserved_frames_.front()->hanc_data(),\r
+ static_cast<unsigned long>(reserved_frames_.front()->hanc_size()), \r
+ nullptr, \r
+ BlueImage_HANC_DMABuffer(reserved_frames_.front()->id(), BLUE_DATA_HANC));\r
+\r
+ if(BLUE_FAIL(blue_->render_buffer_update(BlueBuffer_Image_HANC(reserved_frames_.front()->id()))))\r
+ CASPAR_LOG(warning) << print() << TEXT(" render_buffer_update failed.");\r
+ }\r
+ else\r
+ {\r
+ blue_->system_buffer_write_async(const_cast<uint8_t*>(reserved_frames_.front()->image_data()),\r
+ static_cast<unsigned long>(reserved_frames_.front()->image_size()), \r
+ nullptr, \r
+ BlueImage_DMABuffer(reserved_frames_.front()->id(), BLUE_DATA_IMAGE));\r
+ \r
+ if(BLUE_FAIL(blue_->render_buffer_update(BlueBuffer_Image(reserved_frames_.front()->id()))))\r
+ CASPAR_LOG(warning) << print() << TEXT(" render_buffer_update failed.");\r
+ }\r
+\r
+ boost::range::rotate(reserved_frames_, std::begin(reserved_frames_)+1);\r
+ \r
+ graph_->update_value("frame-time", static_cast<float>(frame_timer_.elapsed()*format_desc_.fps*0.5));\r
+ }\r
+\r
+ void encode_hanc(BLUE_UINT32* hanc_data, void* audio_data, int audio_samples, int audio_nchannels)\r
{ \r
- auto card_type = blue_->has_video_cardtype();\r
- auto sample_type = (AUDIO_CHANNEL_16BIT | AUDIO_CHANNEL_LITTLEENDIAN);\r
+ const auto sample_type = AUDIO_CHANNEL_24BIT | AUDIO_CHANNEL_LITTLEENDIAN;\r
+ const auto emb_audio_flag = blue_emb_audio_enable | blue_emb_audio_group1_enable;\r
\r
hanc_stream_info_struct hanc_stream_info;\r
memset(&hanc_stream_info, 0, sizeof(hanc_stream_info));\r
hanc_stream_info.AudioDBNArray[1] = -1;\r
hanc_stream_info.AudioDBNArray[2] = -1;\r
hanc_stream_info.AudioDBNArray[3] = -1;\r
- hanc_stream_info.hanc_data_ptr = hanc_data;\r
- hanc_stream_info.video_mode = vid_fmt_;\r
+ hanc_stream_info.hanc_data_ptr = hanc_data;\r
+ hanc_stream_info.video_mode = vid_fmt_; \r
\r
- auto emb_audio_flag = (blue_emb_audio_enable | blue_emb_audio_group1_enable);\r
-\r
- if (!is_epoch_card(card_type))\r
+ if (!is_epoch_card(*blue_))\r
encode_hanc_frame(&hanc_stream_info, audio_data, audio_nchannels, audio_samples, sample_type, emb_audio_flag); \r
else\r
- encode_hanc_frame_ex(card_type, &hanc_stream_info, audio_data, audio_nchannels, audio_samples, sample_type, emb_audio_flag);\r
+ encode_hanc_frame_ex(blue_->has_video_cardtype(), &hanc_stream_info, audio_data, audio_nchannels, audio_samples, sample_type, emb_audio_flag);\r
}\r
\r
std::wstring print() const\r
{\r
- return model_name_ + L" [" + boost::lexical_cast<std::wstring>(device_index_) + L"|" + format_desc_.name + L"]";\r
+ return model_name_ + L" [" + boost::lexical_cast<std::wstring>(channel_index_) + L"-" + \r
+ boost::lexical_cast<std::wstring>(device_index_) + L"|" + format_desc_.name + L"]";\r
}\r
};\r
\r
struct bluefish_consumer_proxy : public core::frame_consumer\r
{\r
std::unique_ptr<bluefish_consumer> consumer_;\r
- const size_t device_index_;\r
+ const int device_index_;\r
const bool embedded_audio_;\r
- bool key_only_;\r
- size_t buffer_depth_;\r
+ const bool key_only_;\r
+ std::vector<int> audio_cadence_;\r
public:\r
\r
- bluefish_consumer_proxy(size_t device_index, bool embedded_audio, bool key_only, size_t buffer_depth)\r
+ bluefish_consumer_proxy(int device_index, bool embedded_audio, bool key_only)\r
: device_index_(device_index)\r
, embedded_audio_(embedded_audio)\r
, key_only_(key_only)\r
- , buffer_depth_(buffer_depth){}\r
- \r
- virtual void initialize(const core::video_format_desc& format_desc)\r
{\r
- consumer_.reset(new bluefish_consumer(format_desc, device_index_, embedded_audio_, buffer_depth_));\r
}\r
\r
- virtual void send(const safe_ptr<const core::read_frame>& frame)\r
+ ~bluefish_consumer_proxy()\r
{\r
- consumer_->send(frame);\r
+ if(consumer_)\r
+ {\r
+ auto str = print();\r
+ consumer_.reset();\r
+ CASPAR_LOG(info) << str << L" Successfully Uninitialized."; \r
+ }\r
}\r
\r
- virtual const core::video_format_desc& get_video_format_desc() const\r
+ // frame_consumer\r
+ \r
+ virtual void initialize(const core::video_format_desc& format_desc, int channel_index) override\r
{\r
- return consumer_->get_video_format_desc();\r
+ consumer_.reset(new bluefish_consumer(format_desc, device_index_, embedded_audio_, key_only_, channel_index));\r
+ audio_cadence_ = format_desc.audio_cadence;\r
+ CASPAR_LOG(info) << print() << L" Successfully Initialized."; \r
}\r
\r
- virtual std::wstring print() const\r
+ virtual bool send(const safe_ptr<core::read_frame>& frame) override\r
{\r
- return consumer_->print();\r
- }\r
+ CASPAR_VERIFY(audio_cadence_.front() == static_cast<size_t>(frame->audio_data().size()));\r
+ boost::range::rotate(audio_cadence_, std::begin(audio_cadence_)+1);\r
\r
- virtual bool key_only() const\r
+ consumer_->send(frame);\r
+ return true;\r
+ }\r
+ \r
+ virtual std::wstring print() const override\r
{\r
- return key_only_;\r
+ return consumer_ ? consumer_->print() : L"[bluefish_consumer]";\r
}\r
\r
- virtual size_t buffer_depth() const\r
+ virtual boost::property_tree::wptree info() const override\r
{\r
- return consumer_->executor_.capacity();\r
+ boost::property_tree::wptree info;\r
+ info.add(L"type", L"bluefish-consumer");\r
+ info.add(L"key-only", key_only_);\r
+ info.add(L"device", device_index_);\r
+ info.add(L"embedded-audio", embedded_audio_);\r
+ return info;\r
}\r
-}; \r
\r
-std::wstring get_bluefish_version()\r
-{\r
- try\r
+ int buffer_depth() const override\r
{\r
- blue_initialize();\r
+ return 1;\r
}\r
- catch(...)\r
+ \r
+ virtual int index() const override\r
{\r
- return L"Not found";\r
+ return 400 + device_index_;\r
}\r
- if(!BlueVelvetVersion)\r
- return L"Unknown";\r
-\r
- return widen(std::string(BlueVelvetVersion()));\r
-}\r
-\r
-std::vector<std::wstring> get_bluefish_device_list()\r
-{\r
- std::vector<std::wstring> devices;\r
-\r
- try\r
- { \r
- if(!BlueVelvetFactory4)\r
- return devices;\r
-\r
- std::shared_ptr<CBlueVelvet4> blue(BlueVelvetFactory4());\r
-\r
- for(int n = 1; BLUE_PASS(blue->device_attach(n, FALSE)); ++n)\r
- { \r
- devices.push_back(std::wstring(get_card_desc(blue->has_video_cardtype())) + L" [" + boost::lexical_cast<std::wstring>(n) + L"]");\r
- blue->device_detach(); \r
- }\r
- }\r
- catch(...){}\r
-\r
- return devices;\r
-}\r
+}; \r
\r
-safe_ptr<core::frame_consumer> create_bluefish_consumer(const std::vector<std::wstring>& params)\r
+safe_ptr<core::frame_consumer> create_consumer(const std::vector<std::wstring>& params)\r
{\r
if(params.size() < 1 || params[0] != L"BLUEFISH")\r
return core::frame_consumer::empty();\r
\r
- int device_index = 1;\r
- if(params.size() > 1)\r
- device_index = lexical_cast_or_default<int>(params[1], 1);\r
+ const auto device_index = params.size() > 1 ? lexical_cast_or_default<int>(params[1], 1) : 1;\r
\r
- bool embedded_audio = std::find(params.begin(), params.end(), L"EMBEDDED_AUDIO") != params.end();\r
- bool key_only = std::find(params.begin(), params.end(), L"KEY_ONLY") != params.end();\r
+ const auto embedded_audio = std::find(params.begin(), params.end(), L"EMBEDDED_AUDIO") != params.end();\r
+ const auto key_only = std::find(params.begin(), params.end(), L"KEY_ONLY") != params.end();\r
\r
- return make_safe<bluefish_consumer_proxy>(device_index, embedded_audio, key_only, 3);\r
+ return make_safe<bluefish_consumer_proxy>(device_index, embedded_audio, key_only);\r
}\r
\r
-safe_ptr<core::frame_consumer> create_bluefish_consumer(const boost::property_tree::ptree& ptree) \r
+safe_ptr<core::frame_consumer> create_consumer(const boost::property_tree::wptree& ptree) \r
{ \r
- auto device_index = ptree.get("device", 1);\r
- auto embedded_audio = ptree.get("embedded-audio", false);\r
- bool key_only = ptree.get("key-only", false);\r
- size_t buffer_depth = ptree.get("buffer-depth", 3);\r
+ const auto device_index = ptree.get(L"device", 1);\r
+ const auto embedded_audio = ptree.get(L"embedded-audio", false);\r
+ const auto key_only = ptree.get(L"key-only", false);\r
\r
- return make_safe<bluefish_consumer_proxy>(device_index, embedded_audio, key_only, buffer_depth);\r
+ return make_safe<bluefish_consumer_proxy>(device_index, embedded_audio, key_only);\r
}\r
\r
-}
\ No newline at end of file
+}}
\ No newline at end of file