--- /dev/null
+// The following ifdef block is the standard way of creating macros which make exporting \r
+// from a DLL simpler. All files within this DLL are compiled with the BLUEC_API_EXPORTS\r
+// symbol defined on the command line. this symbol should not be defined on any project\r
+// that uses this DLL. This way any other project whose source files include this file see \r
+// BLUEC_API_API functions as being imported from a DLL, whereas this DLL sees symbols\r
+// defined with this macro as being exported.\r
+#ifdef BLUEFISH_EXPORTS\r
+#define BLUE_CSDK_API __declspec(dllexport)\r
+#else\r
+#define BLUE_CSDK_API __declspec(dllimport)\r
+#endif\r
+#include "BlueTypes.h"\r
+#include "BlueDriver_p.h"\r
+\r
+#define IGNORE_SYNC_WAIT_TIMEOUT_VALUE (0xFFFFFFFF)\r
+\r
+\r
+#pragma once\r
+\r
+//\r
+//typedef enum _blue_video_engine_flags\r
+//{\r
+// BLUE_CAPTURE_PREVIEW_ON_OUTPUT_CHANNEL_A=0x1,\r
+// BLUE_CAPTURE_PREVIEW_ON_OUTPUT_CHANNEL_B=0x2,\r
+// BLUE_CAPTURE_PREVIEW_ON_OUTPUT_CHANNEL_C=0x3,\r
+// BLUE_CAPTURE_PREVIEW_ON_OUTPUT_CHANNEL_D=0x4\r
+//}blue_video_engine_flags;\r
+//\r
+typedef enum _EBLUEDMA_DIRECTION\r
+{\r
+ BLUEDMA_WRITE=0,\r
+ BLUEDMA_READ=1,\r
+ BLUEDMA_INVALID=3\r
+}EBLUEDMA_DIRECTION;\r
+\r
+typedef struct _EBlueConnectorPropertySetting\r
+{\r
+ EBlueVideoChannel channel;\r
+ EBlueConnectorIdentifier connector;\r
+ EBlueConnectorSignalDirection signaldirection;\r
+ EBlueConnectorProperty propType;\r
+ VARIANT Value;\r
+}EBlueConnectorPropertySetting;\r
+\r
+\r
+typedef struct _blue_card_property\r
+{\r
+ EBlueVideoChannel video_channel;\r
+ EBlueCardProperty prop;\r
+ VARIANT value;\r
+}blue_card_property;\r
+\r
+\r
+extern "C" {\r
+BLUE_CSDK_API unsigned int blue_device_count();\r
+BLUE_CSDK_API void * blue_attach_to_device(int device_id);\r
+BLUE_CSDK_API BERR blue_detach_from_device(void ** device_handle);\r
+\r
+//\r
+//BLUE_CSDK_API BERR blue_set_video_engine( void * device_handle,\r
+// EBlueVideoChannel video_channel,\r
+// EEngineMode * video_engine,\r
+// BLUE_UINT32 video_engine_flag);\r
+BLUE_CSDK_API BERR blue_video_dma( void * device_handle,\r
+ EBLUEDMA_DIRECTION dma_direction,\r
+ void * pFrameBuffer,\r
+ BLUE_UINT32 FrameSize,\r
+ BLUE_UINT32 BufferId,\r
+ BLUE_UINT32 * dma_bytes_transferred,\r
+ BLUE_UINT32 CardFrameOffset,\r
+ OVERLAPPED * pAsync\r
+ );\r
+\r
+\r
+/*\r
+ functions used to set the card property . This includes the video property \r
+ and the video connection/routing property.\r
+*/\r
+BLUE_CSDK_API BERR blue_set_video_property( void * device_handle,\r
+ BLUE_UINT32 prop_count,\r
+ blue_card_property * card_prop);\r
+\r
+BLUE_CSDK_API BERR blue_get_video_property( void * device_handle,\r
+ BLUE_UINT32 prop_count,\r
+ blue_card_property * card_prop);\r
+\r
+BLUE_CSDK_API BERR blue_get_connector_property(void * device_handle,\r
+ BLUE_UINT32 VideoChannel,\r
+ BLUE_UINT32 *settingsCount, // In: element count of settings array\r
+ // Out: if settings is non-NULL, number of valid elements\r
+ // Out: if settings is NULL, number of elements required\r
+ EBlueConnectorPropertySetting *settings // Caller allocates/frees memory\r
+ );\r
+\r
+BLUE_CSDK_API BERR blue_set_connector_property(\r
+ void * device_handle,\r
+ BLUE_UINT32 settingsCount,\r
+ EBlueConnectorPropertySetting *settings\r
+ );\r
+\r
+//BLUE_CSDK_API BERR blue_wait_video_interrupt(void * device_handle,\r
+// EBlueVideoChannel video_channel,\r
+// EUpdateMethod upd_fmt,\r
+// BLUE_UINT32 field_wait_count,\r
+// BLUE_UINT32 *field_count\r
+// );\r
+}
\ No newline at end of file
--- /dev/null
+/*\r
+ $Id: BlueDriver_p.h,v 1.62.2.16 2011/10/26 05:33:18 tim Exp $\r
+*/\r
+#pragma once \r
+#define _BLUEDRIVER_P_H__\r
+#define BLUE_UINT32 unsigned int \r
+#define BLUE_INT32 int \r
+#define BLUE_UINT8 unsigned char\r
+#define BLUE_INT8 char\r
+#define BLUE_UINT16 unsigned short\r
+#define BLUE_INT16 short\r
+#pragma once\r
+#ifdef BLUE_LINUX_CODE\r
+#define BLUE_UINT64 unsigned long long \r
+//#include <asm/types.h>\r
+#else\r
+#define BLUE_UINT64 unsigned __int64\r
+#endif \r
+\r
+//#define ORAC_FILMPOST_FIRMWARE_PRODUCTID (0x1)\r
+//#define ORAC_BROADCAST_FIRMWARE_PRODUCTID (0x2)\r
+//#define ORAC_ASI_FIRMWARE_PRODUCTID (0x3)\r
+//#define ORAC_4SDIINPUT_FIRMWARE_PRODUCTID (0x4)\r
+//#define ORAC_4SDIOUTPUT_FIRMWARE_PRODUCTID (0x5)\r
+\r
+\r
+\r
+/**\r
+ * This contains the enumerators that can be used to set the cards \n\r
+ * video output and also to determine the video mode of the incoming \n\r
+ * video signal.\r
+ */ \r
+typedef enum _EVideoMode\r
+{\r
+ VID_FMT_PAL=0,\r
+ VID_FMT_NTSC=1,\r
+ VID_FMT_576I_5000=0, /**< 720 x 576 50 Interlaced */\r
+ VID_FMT_486I_5994=1, /**< 720 x 486 60/1.001 Interlaced */\r
+ VID_FMT_720P_5994, /**< 1280 x 720 60/1.001 Progressive */\r
+ VID_FMT_720P_6000, /**< 1280 x 720 60 Progressive */\r
+ VID_FMT_1080PSF_2397, /**< 1920 x 1080 24/1.001 Segment Frame */\r
+ VID_FMT_1080PSF_2400, /**< 1920 x 1080 24 Segment Frame */\r
+ VID_FMT_1080P_2397, /**< 1920 x 1080 24/1.001 Progressive */\r
+ VID_FMT_1080P_2400, /**< 1920 x 1080 24 Progressive */\r
+ VID_FMT_1080I_5000, /**< 1920 x 1080 50 Interlaced */\r
+ VID_FMT_1080I_5994, /**< 1920 x 1080 60/1.001 Interlaced */\r
+ VID_FMT_1080I_6000, /**< 1920 x 1080 60 Interlaced */\r
+ VID_FMT_1080P_2500, /**< 1920 x 1080 25 Progressive */\r
+ VID_FMT_1080P_2997, /**< 1920 x 1080 30/1.001 Progressive */\r
+ VID_FMT_1080P_3000, /**< 1920 x 1080 30 Progressive */\r
+ VID_FMT_HSDL_1498, /**< 2048 x 1556 15/1.0 Segment Frame */\r
+ VID_FMT_HSDL_1500, /**< 2048 x 1556 15 Segment Frame */\r
+ VID_FMT_720P_5000, /**< 1280 x 720 50 Progressive */\r
+ VID_FMT_720P_2398, /**< 1280 x 720 24/1.001 Progressive */\r
+ VID_FMT_720P_2400, /**< 1280 x 720 24 Progressive */\r
+ VID_FMT_2048_1080PSF_2397=19, /**< 2048 x 1080 24/1.001 Segment Frame */\r
+ VID_FMT_2048_1080PSF_2400=20, /**< 2048 x 1080 24 Segment Frame */\r
+ VID_FMT_2048_1080P_2397=21, /**< 2048 x 1080 24/1.001 progressive */ \r
+ VID_FMT_2048_1080P_2400=22, /**< 2048 x 1080 24 progressive */\r
+ VID_FMT_1080PSF_2500=23,\r
+ VID_FMT_1080PSF_2997=24,\r
+ VID_FMT_1080PSF_3000=25,\r
+ VID_FMT_1080P_5000=26,\r
+ VID_FMT_1080P_5994=27,\r
+ VID_FMT_1080P_6000=28,\r
+ VID_FMT_720P_2500=29,\r
+ VID_FMT_720P_2997=30,\r
+ VID_FMT_720P_3000=31,\r
+ VID_FMT_DVB_ASI=32,\r
+ VID_FMT_2048_1080PSF_2500=33,\r
+ VID_FMT_2048_1080PSF_2997=34,\r
+ VID_FMT_2048_1080PSF_3000=35,\r
+ VID_FMT_2048_1080P_2500=36,\r
+ VID_FMT_2048_1080P_2997=37,\r
+ VID_FMT_2048_1080P_3000=38,\r
+ VID_FMT_2048_1080P_5000=39,\r
+ VID_FMT_2048_1080P_5994=40,\r
+ VID_FMT_2048_1080P_6000=41,\r
+ VID_FMT_INVALID=42\r
+} EVideoMode;\r
+\r
+/**\r
+@brief Use these enumerators to set the pixel format \r
+ that should be used by the video input and output \r
+ framestores.\r
+*/\r
+typedef enum _EMemoryFormat\r
+{\r
+ MEM_FMT_ARGB=0, /**< ARGB 4:4:4:4 */\r
+ MEM_FMT_BV10=1,\r
+ MEM_FMT_BV8=2,\r
+ MEM_FMT_YUVS=MEM_FMT_BV8,\r
+ MEM_FMT_V210=3, // Iridium HD (BAG1)\r
+ MEM_FMT_RGBA=4,\r
+ MEM_FMT_CINEON_LITTLE_ENDIAN=5,\r
+ MEM_FMT_ARGB_PC=6,\r
+ MEM_FMT_BGRA=MEM_FMT_ARGB_PC,\r
+ MEM_FMT_CINEON=7,\r
+ MEM_FMT_2VUY=8,\r
+ MEM_FMT_BGR=9,\r
+ MEM_FMT_BGR_16_16_16=10,\r
+ MEM_FMT_BGRA_16_16_16_16=11,\r
+ MEM_FMT_VUYA_4444=12,\r
+ MEM_FMT_V216=13,\r
+ MEM_FMT_Y210=14,\r
+ MEM_FMT_Y216=15,\r
+ MEM_FMT_INVALID=16\r
+} EMemoryFormat;\r
+\r
+/**\r
+@brief Used to control the video update type, \r
+ whether the card should capture/playback a\r
+ video frame or field.\r
+*/\r
+typedef enum _EUpdateMethod\r
+{\r
+ UPD_FMT_FIELD=0,\r
+ UPD_FMT_FRAME,\r
+ UPD_FMT_FRAME_DISPLAY_FIELD1,\r
+ UPD_FMT_FRAME_DISPLAY_FIELD2,\r
+ UPD_FMT_INVALID,\r
+ UPD_FMT_FLAG_RETURN_CURRENT_UNIQUEID=0x80000000,/**< if this flag is used on epoch cards, function would \r
+ return the unique id of the current frame as the return value.*/\r
+} EUpdateMethod;\r
+\r
+typedef enum _EResoFormat\r
+{\r
+ RES_FMT_NORMAL=0,\r
+ RES_FMT_HALF,\r
+ RES_FMT_INVALID\r
+} EResoFormat;\r
+\r
+typedef enum _ECardType\r
+{\r
+ CRD_BLUEDEEP_LT=0, // D64 Lite\r
+ CRD_BLUEDEEP_SD, // Iridium SD\r
+ CRD_BLUEDEEP_AV, // Iridium AV\r
+ CRD_BLUEDEEP_IO, // D64 Full\r
+ CRD_BLUEWILD_AV, // D64 AV\r
+ CRD_IRIDIUM_HD, // * Iridium HD\r
+ CRD_BLUEWILD_RT, // D64 RT\r
+ CRD_BLUEWILD_HD, // * BadAss G2\r
+ CRD_REDDEVIL, // Iridium Full\r
+ CRD_BLUEDEEP_HD, // * BadAss G2 variant, proposed, reserved\r
+ CRD_BLUE_EPOCH_2K = CRD_BLUEDEEP_HD,\r
+ CRD_BLUE_EPOCH_2K_HORIZON=CRD_BLUE_EPOCH_2K,\r
+ CRD_BLUEDEEP_HDS, // * BadAss G2 variant, proposed, reserved\r
+ CRD_BLUE_ENVY, // Mini Din \r
+ CRD_BLUE_PRIDE, //Mini Din Output \r
+ CRD_BLUE_GREED,\r
+ CRD_BLUE_INGEST,\r
+ CRD_BLUE_SD_DUALLINK,\r
+ CRD_BLUE_CATALYST,\r
+ CRD_BLUE_SD_DUALLINK_PRO,\r
+ CRD_BLUE_SD_INGEST_PRO,\r
+ CRD_BLUE_SD_DEEPBLUE_LITE_PRO,\r
+ CRD_BLUE_SD_SINGLELINK_PRO,\r
+ CRD_BLUE_SD_IRIDIUM_AV_PRO,\r
+ CRD_BLUE_SD_FIDELITY,\r
+ CRD_BLUE_SD_FOCUS,\r
+ CRD_BLUE_SD_PRIME,\r
+ CRD_BLUE_EPOCH_2K_CORE,\r
+ CRD_BLUE_EPOCH_2K_ULTRA,\r
+ CRD_BLUE_EPOCH_HORIZON,\r
+ CRD_BLUE_EPOCH_CORE,\r
+ CRD_BLUE_EPOCH_ULTRA,\r
+ CRD_BLUE_CREATE_HD,\r
+ CRD_BLUE_CREATE_2K,\r
+ CRD_BLUE_CREATE_2K_ULTRA,\r
+ CRD_BLUE_CREATE_3D = CRD_BLUE_CREATE_2K,\r
+ CRD_BLUE_CREATE_3D_ULTRA = CRD_BLUE_CREATE_2K_ULTRA,\r
+ CRD_BLUE_SUPER_NOVA,\r
+ CRD_INVALID\r
+} ECardType;\r
+\r
+\r
+typedef enum _EHDCardSubType\r
+{ \r
+ CRD_HD_FURY=1,\r
+ CRD_HD_VENGENCE=2,\r
+ CRD_HD_IRIDIUM_XP=3,\r
+ CRD_HD_IRIDIUM = 4,\r
+ CRD_HD_LUST=5,\r
+ CRD_HD_INVALID\r
+}EHDCardSubType;\r
+\r
+enum EEpochFirmwareProductID\r
+{\r
+ ORAC_FILMPOST_FIRMWARE_PRODUCTID=(0x1), //Epoch/Create, standard firmware\r
+ ORAC_BROADCAST_FIRMWARE_PRODUCTID=(0x2), //Epoch\r
+ ORAC_ASI_FIRMWARE_PRODUCTID=(0x3), //Epoch\r
+ ORAC_4SDIINPUT_FIRMWARE_PRODUCTID=(0x4), //SuperNova\r
+ ORAC_4SDIOUTPUT_FIRMWARE_PRODUCTID=(0x5), //SuperNova\r
+ ORAC_2SDIINPUT_2SDIOUTPUT_FIRMWARE_PRODUCTID=(0x6), //SuperNova\r
+};\r
+\r
+/**< @brief Use this enumerator to select the audio channels that should be captured or played back.\r
+*/\r
+typedef enum _BlueAudioChannelDesc\r
+{\r
+ MONO_FLAG = 0xC0000000,\r
+ MONO_CHANNEL_1 = 0x00000001,\r
+ MONO_CHANNEL_2 = 0x00000002,\r
+ MONO_CHANNEL_3 = 0x00000004,\r
+ MONO_CHANNEL_4 = 0x00000008,\r
+ MONO_CHANNEL_5 = 0x00000010,\r
+ MONO_CHANNEL_6 = 0x00000020,\r
+ MONO_CHANNEL_7 = 0x00000040,\r
+ MONO_CHANNEL_8 = 0x00000080,\r
+ MONO_CHANNEL_9 = 0x00000100,// to be used by analog audio output channels \r
+ MONO_CHANNEL_10 = 0x00000200,// to be used by analog audio output channels \r
+ MONO_CHANNEL_11 = 0x00000400,//actual channel 9\r
+ MONO_CHANNEL_12 = 0x00000800,//actual channel 10\r
+ MONO_CHANNEL_13 = 0x00001000,//actual channel 11\r
+ MONO_CHANNEL_14 = 0x00002000,//actual channel 12\r
+ MONO_CHANNEL_15 = 0x00004000,//actual channel 13\r
+ MONO_CHANNEL_16 = 0x00008000,//actual channel 14\r
+ MONO_CHANNEL_17 = 0x00010000,//actual channel 15\r
+ MONO_CHANNEL_18 = 0x00020000 //actual channel 16\r
+}BlueAudioChannelDesc;\r
+\r
+\r
+//----------------------------------------------------------------------------\r
+/**<\r
+Use this enumeraotor to set the type of pcm audio data.\r
+*/\r
+typedef enum\r
+{\r
+ AUDIO_CHANNEL_LOOPING_OFF = 0x00000000, /**< deprecated not used any more */\r
+ AUDIO_CHANNEL_LOOPING = 0x00000001,/**< deprecated not used any more */\r
+ AUDIO_CHANNEL_LITTLEENDIAN = 0x00000000, /**< if the audio data is little endian this flag must be set*/\r
+ AUDIO_CHANNEL_BIGENDIAN = 0x00000002,/**< if the audio data is big endian this flag must be set*/\r
+ AUDIO_CHANNEL_OFFSET_IN_BYTES = 0x00000004,/**< deprecated not used any more */\r
+ AUDIO_CHANNEL_16BIT = 0x00000008, /**< if the audio channel bit depth is 16 bits this flag must be set*/\r
+ AUDIO_CHANNEL_BLIP_PENDING = 0x00000010,/**< deprecated not used any more */\r
+ AUDIO_CHANNEL_BLIP_COMPLETE = 0x00000020,/**< deprecated not used any more */\r
+ AUDIO_CHANNEL_SELECT_CHANNEL = 0x00000040,/**< deprecated not used any more */\r
+ AUDIO_CHANNEL_24BIT = 0x00000080/**< if the audio channel bit depth is 24 bits this flag must be set*/\r
+} EAudioFlags;\r
+\r
+/**\r
+@desc Used to select Audio input source on new generation SD cards \r
+@remarks\r
+This enumerator works only when used with ReadAudioSample function.\r
+*/\r
+typedef enum\r
+{\r
+ BLUE_AUDIO_AES=0, /**< Used to select All 8 channels of Digital Audio using AES/AES3id connector*/\r
+ BLUE_AUDIO_ANALOG=1,/**< Used to select Analog audio*/\r
+ BLUE_AUDIO_SDIA=2, /**< Used to select Emb audio from SDI A */\r
+ BLUE_AUDIO_EMBEDDED=BLUE_AUDIO_SDIA,\r
+ BLUE_AUDIO_SDIB=3, /**< Used to select Emb audio from SDI B */\r
+ BLUE_AUDIO_AES_PAIR0=4, /**< Used to select stereo pair 0 as audio input source. This is only supported on SD Greed Derivative cards.*/\r
+ BLUE_AUDIO_AES_PAIR1=5,/**< Used to select stereo pair 1 as audio input source. This is only supported on SD Greed Derivative cards.*/\r
+ BLUE_AUDIO_AES_PAIR2=6,/**< Used to select stereo pair 2 as audio input source. This is only supported on SD Greed Derivative cards.*/\r
+ BLUE_AUDIO_AES_PAIR3=7,/**< Used to select stereo pair 3 as audio input source. This is only supported on SD Greed Derivative cards.*/\r
+ BLUE_AUDIO_SDIC=8, /**< Used to select Emb audio from SDI C */\r
+ BLUE_AUDIO_SDID=9, /**< Used to select Emb audio from SDI D */\r
+ BLUE_AUDIO_INVALID=10\r
+} Blue_Audio_Connector_Type;\r
+\r
+typedef enum _EAudioRate\r
+{\r
+ AUDIO_SAMPLE_RATE_48K=48000,\r
+ AUDIO_SAMPLE_RATE_96K=96000,\r
+ AUDIO_SAMPLE_RATE_UNKNOWN=-1\r
+} EAudioRate;\r
+\r
+/**<\r
+@brief use this enumerator to define the color space of the video signal on the SDI cable\r
+*/\r
+typedef enum _EConnectorSignalColorSpace\r
+{\r
+ RGB_ON_CONNECTOR=0x00400000, /**< Use this enumerator if the colorspace of video data on the SDI cable is RGB <br/>\r
+ When using dual link capture/playback , user can choose the \r
+ color space of the data. <br>\r
+ In single link SDI the color space of the signal is always YUB*/\r
+ YUV_ON_CONNECTOR=0 /**<Use this enumerator if color space of video data on the SDI cable is RGB.*/\r
+}EConnectorSignalColorSpace;\r
+\r
+\r
+/**<\r
+@brief use this enumerator for controlling the dual link functionality.\r
+*/\r
+typedef enum _EDualLinkSignalFormatType\r
+{\r
+ Signal_FormatType_4224=0, /**< sets the card to work in 4:2:2:4 mode*/\r
+ Signal_FormatType_4444=1,/**< sets the card to work in 4:4:4 10 bit dual link mode*/\r
+ Signal_FormatType_444_10BitSDI=Signal_FormatType_4444,/**< sets the card to work in 10 bit 4:4:4 dual link mode*/\r
+ Signal_FormatType_444_12BitSDI=0x4,/**< sets the card to work in 4:4:4 12 bit dual link mode*/\r
+ Signal_FormatType_Independent_422 = 0x2,\r
+ Signal_FormatType_Key_Key=0x8000/**< not used currently on epoch cards */\r
+ \r
+}EDualLinkSignalFormatType;\r
+\r
+\r
+enum ECardOperatingMode\r
+{\r
+ CardOperatingMode_SingleLink=0x0,\r
+ CardOperatingMode_Independent_422=CardOperatingMode_SingleLink,\r
+ CardOperatingMode_DualLink=0x1, \r
+ CardOperatingMode_StereoScopic_422=0x3, \r
+ CardOperatingMode_Dependent_422=CardOperatingMode_StereoScopic_422,/**< not used currently on epoch cards */\r
+};\r
+\r
+\r
+typedef enum _EPreDefinedColorSpaceMatrix\r
+{\r
+ UNITY_MATRIX=0,\r
+ MATRIX_709_CGR=1,\r
+ MATRIX_RGB_TO_YUV_709_CGR=MATRIX_709_CGR,\r
+ MATRIX_709=2,\r
+ MATRIX_RGB_TO_YUV_709=MATRIX_709,\r
+ RGB_FULL_RGB_SMPTE=3,\r
+ MATRIX_601_CGR=4,\r
+ MATRIX_RGB_TO_YUV_601_CGR=MATRIX_601_CGR,\r
+ MATRIX_601=5,\r
+ MATRIX_RGB_TO_YUV_601=MATRIX_601,\r
+ MATRIX_SMPTE_274_CGR=6,\r
+ MATRIX_SMPTE_274=7,\r
+ MATRIX_VUYA=8,\r
+ UNITY_MATRIX_INPUT=9,\r
+ MATRIX_YUV_TO_RGB_709_CGR=10,\r
+ MATRIX_YUV_TO_RGB_709=11,\r
+ RGB_SMPTE_RGB_FULL=12,\r
+ MATRIX_YUV_TO_RGB_601_CGR=13,\r
+ MATRIX_YUV_TO_RGB_601=14,\r
+ MATRIX_USER_DEFINED=15,\r
+}EPreDefinedColorSpaceMatrix;\r
+\r
+#ifndef BLUE_LINUX_CODE\r
+/**< \r
+@brief this enumerator contains the status of the driver video/hanc fifo \r
+*/\r
+typedef enum\r
+{\r
+ BLUE_FIFO_CLOSED=0, /**< Fifo has not been initialized*/\r
+ BLUE_FIFO_STARTING=1,/**< Fifo is starting */\r
+ BLUE_FIFO_RUNNING=2,/**< Fifo is running */\r
+ BLUE_FIFO_STOPPING=3,/**< Fifo is in the process of stopping */\r
+ BLUE_FIFO_PASSIVE=5,/**< Fifo is currently stopped or not active*/\r
+}BlueVideoFifoStatus;\r
+#endif\r
+\r
+/**<\r
+@brief use this enumerator to define the data range of the RGB video frame data.\r
+*/\r
+typedef enum _ERGBDataRange\r
+{\r
+ CGR_RANGE=0, /**< In this mode RGB data expected by the user (capture) or provided by the user(playback) is \r
+ in the range of 0-255(8 bit) or 0-1023(10 bit0).<br/>\r
+ driver uses this information to choose the appropriate YUV conversion matrices.*/\r
+ SMPTE_RANGE=1 /**< In this mode RGB data expected by the user (capture) or provided by the user(playback) is \r
+ in the range of 16-235(8 bit) or 64-940(10 bit0).<br/>\r
+ driver uses this information to choose the appropriate YUV conversion matrices.*/\r
+}ERGBDataRange;\r
+\r
+typedef enum _EHD_XCONNECTOR_MODE\r
+{\r
+ SD_SDI=1,\r
+ HD_SDI=2\r
+}EHD_XCONNECTOR_MODE;\r
+\r
+/**< @brief this enumerator can be used to set the image orienatation of the frame. \r
+*/\r
+typedef enum _EImageOrientation\r
+{\r
+ ImageOrientation_Normal=0, /**< in this configuration , frame is top to bottom and left to right */\r
+ ImageOrientation_VerticalFlip=1, /**< in this configuration frame is bottom to top and left to right*/\r
+ ImageOrientation_Invalid=2, \r
+}EImageOrientation;\r
+\r
+/**< @brief this enumerator defines the reference signal source that can be used with bluefish cards\r
+*/\r
+typedef enum _EBlueGenlockSource\r
+{\r
+ BlueGenlockBNC=0, /**< Genlock is used as reference signal source */\r
+ BlueSDIBNC=0x10000, /**< SDI input B is used as reference signal source */\r
+ BlueSDI_B_BNC=BlueSDIBNC,\r
+ BlueSDI_A_BNC=0x20000,/**< SDI input A is used as reference signal source */\r
+ BlueAnalog_BNC=0x40000, /**< Analog input is used as reference signal source */\r
+ BlueSoftware=0x80000,\r
+}EBlueGenlockSource;\r
+\r
+\r
+typedef enum _EBlueVideoChannel\r
+{\r
+ BLUE_VIDEOCHANNEL_A=0,\r
+ BLUE_VIDEO_OUTPUT_CHANNEL_A=BLUE_VIDEOCHANNEL_A,\r
+ \r
+ BLUE_VIDEOCHANNEL_B=1,\r
+ BLUE_VIDEO_OUTPUT_CHANNEL_B=BLUE_VIDEOCHANNEL_B,\r
+ \r
+ BLUE_VIDEOCHANNEL_C=2,\r
+ BLUE_VIDEO_INPUT_CHANNEL_A=BLUE_VIDEOCHANNEL_C,\r
+\r
+ BLUE_VIDEOCHANNEL_D=3,\r
+ BLUE_VIDEO_INPUT_CHANNEL_B=BLUE_VIDEOCHANNEL_D,\r
+\r
+ BLUE_VIDEOCHANNEL_E=4,\r
+ BLUE_VIDEO_INPUT_CHANNEL_C=BLUE_VIDEOCHANNEL_E,\r
+\r
+ BLUE_VIDEOCHANNEL_F=5,\r
+ BLUE_VIDEO_INPUT_CHANNEL_D=BLUE_VIDEOCHANNEL_F,\r
+\r
+ BLUE_VIDEOCHANNEL_G=6,\r
+ BLUE_VIDEO_OUTPUT_CHANNEL_C=BLUE_VIDEOCHANNEL_G,\r
+\r
+ BLUE_VIDEOCHANNEL_H=7,\r
+ BLUE_VIDEO_OUTPUT_CHANNEL_D=BLUE_VIDEOCHANNEL_H,\r
+ \r
+ BLUE_OUTPUT_MEM_MODULE_A=BLUE_VIDEO_OUTPUT_CHANNEL_A,\r
+ BLUE_OUTPUT_MEM_MODULE_B=BLUE_VIDEO_OUTPUT_CHANNEL_B,\r
+ BLUE_INPUT_MEM_MODULE_A=BLUE_VIDEO_INPUT_CHANNEL_A,\r
+ BLUE_INPUT_MEM_MODULE_B=BLUE_VIDEO_INPUT_CHANNEL_B,\r
+ //BLUE_JETSTREAM_SCALER_MODULE_0=0x10,\r
+ //BLUE_JETSTREAM_SCALER_MODULE_1=0x11,\r
+ //BLUE_JETSTREAM_SCALER_MODULE_2=0x12,\r
+ //BLUE_JETSTREAM_SCALER_MODULE_3=0x13,\r
+\r
+ BLUE_VIDEOCHANNEL_INVALID=30\r
+}EBlueVideoChannel;\r
+\r
+typedef enum _EBlueVideoRouting\r
+{\r
+ BLUE_VIDEO_LINK_INVALID=0,\r
+ BLUE_SDI_A_LINK1=4,\r
+ BLUE_SDI_A_LINK2=5,\r
+ BLUE_SDI_B_LINK1=6,\r
+ BLUE_SDI_B_LINK2=7,\r
+ BLUE_ANALOG_LINK1=8,\r
+ BLUE_ANALOG_LINK2=9,\r
+ BLUE_SDI_A_SINGLE_LINK=BLUE_SDI_A_LINK1,\r
+ BLUE_SDI_B_SINGLE_LINK=BLUE_SDI_B_LINK1,\r
+ BLUE_ANALOG_SINGLE_LINK=BLUE_ANALOG_LINK1\r
+\r
+}EBlueVideoRouting;\r
+\r
+typedef enum\r
+{\r
+ BLUE_FIFO_NULL_ATTRIBUTE=0x0,\r
+ BLUE_FIFO_ECHOPORT_ENABLED=0x1,\r
+ BLUE_FIFO_STEPMODE = 0x2,\r
+ BLUE_FIFO_LOOPMODE = 0x4\r
+}BlueVideoFifo_Attributes;\r
+\r
+typedef enum _BlueAudioOutputDest\r
+{\r
+ Blue_AnalogAudio_Output=0x0,\r
+ Blue_AES_Output=0x80000000,\r
+ Blue_Emb_Output=0x40000000,\r
+}BlueAudioOutputDest;\r
+\r
+\r
+/**<@brief this enumerator is not used need to be removed*/\r
+typedef enum _BlueAudioInputSource\r
+{\r
+ Blue_AES=0x10,\r
+ Blue_AnalogAudio=0x20,\r
+ Blue_SDIA_Embed=0x40,\r
+ Blue_SDIB_Embed=0x80,\r
+}BlueAudioInputSource;\r
+\r
+typedef enum _EBlueConnectorIdentifier\r
+{\r
+ BLUE_CONNECTOR_INVALID = -1,\r
+ \r
+ // BNC connectors in order from top to bottom of shield\r
+ BLUE_CONNECTOR_BNC_A = 0, // BNC closest to top of shield\r
+ BLUE_CONNECTOR_BNC_B,\r
+ BLUE_CONNECTOR_BNC_C,\r
+ BLUE_CONNECTOR_BNC_D,\r
+ BLUE_CONNECTOR_BNC_E,\r
+ BLUE_CONNECTOR_BNC_F,\r
+ BLUE_CONNECTOR_GENLOCK,\r
+ \r
+ BLUE_CONNECTOR_ANALOG_VIDEO_1 = 100,\r
+ BLUE_CONNECTOR_ANALOG_VIDEO_2,\r
+ BLUE_CONNECTOR_ANALOG_VIDEO_3,\r
+ BLUE_CONNECTOR_ANALOG_VIDEO_4,\r
+ BLUE_CONNECTOR_ANALOG_VIDEO_5,\r
+ BLUE_CONNECTOR_ANALOG_VIDEO_6,\r
+\r
+ BLUE_CONNECTOR_DVID_1 = 200,\r
+ BLUE_CONNECTOR_SDI_OUTPUT_A= BLUE_CONNECTOR_DVID_1,\r
+ BLUE_CONNECTOR_DVID_2,\r
+ BLUE_CONNECTOR_SDI_OUTPUT_B= BLUE_CONNECTOR_DVID_2,\r
+ BLUE_CONNECTOR_DVID_3,\r
+ BLUE_CONNECTOR_SDI_INPUT_A= BLUE_CONNECTOR_DVID_3,\r
+ BLUE_CONNECTOR_DVID_4,\r
+ BLUE_CONNECTOR_SDI_INPUT_B= BLUE_CONNECTOR_DVID_4,\r
+ BLUE_CONNECTOR_DVID_5,\r
+ BLUE_CONNECTOR_SDI_OUTPUT_C,\r
+ BLUE_CONNECTOR_SDI_OUTPUT_D,\r
+\r
+ BLUE_CONNECTOR_AES = 300,\r
+ BLUE_CONNECTOR_ANALOG_AUDIO_1,\r
+ BLUE_CONNECTOR_ANALOG_AUDIO_2,\r
+\r
+ BLUE_CONNECTOR_DVID_6,\r
+ BLUE_CONNECTOR_SDI_INPUT_C= BLUE_CONNECTOR_DVID_6,\r
+ BLUE_CONNECTOR_DVID_7,\r
+ BLUE_CONNECTOR_SDI_INPUT_D= BLUE_CONNECTOR_DVID_7,\r
+\r
+ //BLUE_CONNECTOR_RESOURCE_BLOCK=0x400,\r
+ //BLUE_CONNECTOR_JETSTREAM_SCALER_0=(BLUE_CONNECTOR_RESOURCE_BLOCK|BLUE_JETSTREAM_SCALER_MODULE_0),\r
+ //BLUE_CONNECTOR_JETSTREAM_SCALER_1=(BLUE_CONNECTOR_RESOURCE_BLOCK|BLUE_JETSTREAM_SCALER_MODULE_1),\r
+ //BLUE_CONNECTOR_JETSTREAM_SCALER_2=(BLUE_CONNECTOR_RESOURCE_BLOCK|BLUE_JETSTREAM_SCALER_MODULE_2),\r
+ //BLUE_CONNECTOR_JETSTREAM_SCALER_3=(BLUE_CONNECTOR_RESOURCE_BLOCK|BLUE_JETSTREAM_SCALER_MODULE_3),\r
+\r
+ //BLUE_CONNECTOR_OUTPUT_MEM_MODULE_A=(BLUE_CONNECTOR_RESOURCE_BLOCK|BLUE_OUTPUT_MEM_MODULE_A),\r
+ //BLUE_CONNECTOR_OUTPUT_MEM_MODULE_B=(BLUE_CONNECTOR_RESOURCE_BLOCK|BLUE_OUTPUT_MEM_MODULE_B),\r
+ //BLUE_CONNECTOR_INPUT_MEM_MODULE_A=(BLUE_CONNECTOR_RESOURCE_BLOCK|BLUE_INPUT_MEM_MODULE_A),\r
+ //BLUE_CONNECTOR_INPUT_MEM_MODULE_B=(BLUE_CONNECTOR_RESOURCE_BLOCK|BLUE_INPUT_MEM_MODULE_B),\r
+ //\r
+}EBlueConnectorIdentifier;\r
+\r
+typedef enum _EBlueConnectorSignalDirection\r
+{\r
+ BLUE_CONNECTOR_SIGNAL_INVALID=-1,\r
+ BLUE_CONNECTOR_SIGNAL_INPUT=0,\r
+ BLUE_CONNECTOR_SIGNAL_OUTPUT=1,\r
+}EBlueConnectorSignalDirection;\r
+\r
+typedef enum _EBlueConnectorProperty\r
+{\r
+ BLUE_INVALID_CONNECTOR_PROPERTY = -1,\r
+\r
+ //signal property\r
+ BLUE_CONNECTOR_PROP_INPUT_SIGNAL=0,\r
+ BLUE_CONNECTOR_PROP_OUTPUT_SIGNAL=1,\r
+\r
+ // Video output\r
+ BLUE_CONNECTOR_PROP_SDI = 0,\r
+ BLUE_CONNECTOR_PROP_YUV_Y,\r
+ BLUE_CONNECTOR_PROP_YUV_U,\r
+ BLUE_CONNECTOR_PROP_YUV_V,\r
+ BLUE_CONNECTOR_PROP_RGB_R,\r
+ BLUE_CONNECTOR_PROP_RGB_G,\r
+ BLUE_CONNECTOR_PROP_RGB_B,\r
+ BLUE_CONNECTOR_PROP_CVBS,\r
+ BLUE_CONNECTOR_PROP_SVIDEO_Y,\r
+ BLUE_CONNECTOR_PROP_SVIDEO_C,\r
+ \r
+ // Audio output\r
+ BLUE_CONNECTOR_PROP_AUDIO_AES = 0x2000,\r
+ BLUE_CONNECTOR_PROP_AUDIO_EMBEDDED,\r
+ BLUE_CONNECTOR_PROP_AUDIO_ANALOG,\r
+\r
+ \r
+ BLUE_CONNECTOR_PROP_SINGLE_LINK=0x3000,\r
+ BLUE_CONNECTOR_PROP_DUALLINK_LINK_1,\r
+ BLUE_CONNECTOR_PROP_DUALLINK_LINK_2,\r
+ BLUE_CONNECTOR_PROP_DUALLINK_LINK,\r
+\r
+ BLUE_CONNECTOR_PROP_STEREO_MODE_SIDE_BY_SIDE,\r
+ BLUE_CONNECTOR_PROP_STEREO_MODE_TOP_DOWN,\r
+ BLUE_CONNECTOR_PROP_STEREO_MODE_LINE_BY_LINE,\r
+\r
+}EBlueConnectorProperty;\r
+\r
+/*\r
+typedef enum _BLUE_AUDIOINPUT_SOURCE\r
+{\r
+ BLUE_AES_AUDIO_INPUT=0x10000,\r
+ BLUE_ANALOG_AUDIO_INPUT=0x20000,\r
+ BLUE_SDIA_AUDIO_INPUT=0x30000,\r
+ BLUE_SDIB_AUDIO_INPUT=0x40000\r
+}BLUE_AUDIOINPUT_SOURCE;\r
+*/\r
+/**\r
+@desc use the values in this enumerator for controlling card property\r
+*/\r
+typedef enum _EBlueCardProperty\r
+{\r
+ VIDEO_DUAL_LINK_OUTPUT=0,/**< Use this property to enable/diable cards dual link output property*/\r
+ VIDEO_DUAL_LINK_INPUT=1,/**< Use this property to enable/diable cards dual link input property*/\r
+ VIDEO_DUAL_LINK_OUTPUT_SIGNAL_FORMAT_TYPE=2, /**<Use this property to select signal format type that should be used \r
+ when dual link output is enabled. Possible values this property can\r
+ accept is defined in the enumerator EDualLinkSignalFormatType\r
+ */\r
+ VIDEO_DUAL_LINK_INPUT_SIGNAL_FORMAT_TYPE=3,/**< Use this property to select signal format type that should be used \r
+ when dual link input is enabled. Possible values this property can\r
+ accept is defined in the enumerator EDualLinkSignalFormatType\r
+ */\r
+ VIDEO_OUTPUT_SIGNAL_COLOR_SPACE=4,/**< Use this property to select color space of the signal when dual link output is set to \r
+ use 4:4:4/4:4:4:4 signal format type. Possible values this property can\r
+ accept is defined in the enumerator EConnectorSignalColorSpace\r
+ */\r
+ VIDEO_INPUT_SIGNAL_COLOR_SPACE=5,/**< Use this property to select color space of the signal when dual link input is set to \r
+ use 4:4:4/4:4:4:4 signal format type. Possible values this property can\r
+ accept is defined in the enumerator EConnectorSignalColorSpace\r
+ */\r
+ VIDEO_MEMORY_FORMAT=6, /**<Use this property to ser the pixel format that should be used by \r
+ video output channels. Possible values this property can\r
+ accept is defined in the enumerator EMemoryFormat\r
+ */ \r
+ VIDEO_MODE=7,/**<Use this property to set the video mode that should be used by \r
+ video output channels. Possible values this property can\r
+ accept is defined in the enumerator EVideoMode\r
+ */ //\r
+ VIDEO_UPDATE_TYPE=8,/**<Use this property to set the framestore update type that should be used by \r
+ video output channels. Card can update video framestore at field/frame rate.\r
+ Possible values this property can accept is defined in the enumerator EUpdateMethod\r
+ */ \r
+ VIDEO_ENGINE=9,\r
+ VIDEO_IMAGE_ORIENTATION=10,/**< Use this property to set the image orientation of the video output framestore.\r
+ This property must be set before frame is transferred to on card memory using \r
+ DMA transfer functions(system_buffer_write_async). It is recommended to use \r
+ vertical flipped image orientation only on RGB pixel formats.\r
+ Possible values this property can accept is defined in the enumerator EImageOrientation\r
+ */\r
+ VIDEO_USER_DEFINED_COLOR_MATRIX=11,\r
+ VIDEO_PREDEFINED_COLOR_MATRIX=12,//EPreDefinedColorSpaceMatrix\r
+ VIDEO_RGB_DATA_RANGE=13, /**< Use this property to set the data range of RGB pixel format, user can specify \r
+ whether the RGB data is in either SMPTE or CGR range. Based on this information \r
+ driver is decide which color matrix should be used.\r
+ Possible values this property can accept is defined in the enumerator ERGBDataRange\r
+ For SD cards this property will set the input and the output to the specified value.\r
+ For Epoch/Create/SuperNova cards this property will only set the output to the specified value.\r
+ For setting the input on Epoch/Create/SuperNova cards see EPOCH_VIDEO_INPUT_RGB_DATA_RANGE*/\r
+ VIDEO_KEY_OVER_BLACK=14,/**< this property is deprecated and no longer supported on epoch/create range of cards.*/ \r
+ VIDEO_KEY_OVER_INPUT_SIGNAL=15,\r
+ VIDEO_SET_DOWN_CONVERTER_VIDEO_MODE=16,/**< this property is deprecated and no longer supported on epoch/create range of cards.\r
+ EHD_XCONNECTOR_MODE\r
+ */\r
+ VIDEO_LETTER_BOX=17,\r
+ VIDEO_PILLOR_BOX_LEFT=18,\r
+ VIDEO_PILLOR_BOX_RIGHT=19,\r
+ VIDEO_PILLOR_BOX_TOP=20,\r
+ VIDEO_PILLOR_BOX_BOTTOM=21,\r
+ VIDEO_SAFE_PICTURE=22,\r
+ VIDEO_SAFE_TITLE=23,\r
+ VIDEO_INPUT_SIGNAL_VIDEO_MODE=24,/**< Use this property to retreive the video input signal information on the \r
+ default video input channel used by that SDK object.\r
+ */\r
+ VIDEO_COLOR_MATRIX_MODE=25,\r
+ VIDEO_OUTPUT_MAIN_LUT=26,/**< this property is deprecated and no longer supported on epoch/create range of cards.*/\r
+ VIDEO_OUTPUT_AUX_LUT=27,/**< this property is deprecated and no longer supported on epoch/create range of cards.*/\r
+ VIDEO_LTC=28, /**< this property is deprecated and no longer supported on epoch/create range of cards. To retreive/ outputting \r
+ LTC information you can use the HANC decoding and encoding functions.*/\r
+ VIDEO_GPIO=29, \r
+ VIDEO_PLAYBACK_FIFO_STATUS=30, /**< This property can be used to retreive how many frames are bufferd in the video playback fifo.*/\r
+ RS422_RX_BUFFER_LENGTH=31,\r
+ RS422_RX_BUFFER_FLUSH=32,\r
+ VIDEO_INPUT_UPDATE_TYPE=33,/**< Use this property to set the framestore update type that should be used by \r
+ video input channels. Card can update video framestore at field/frame rate.\r
+ Possible values this property can accept is defined in the enumerator EUpdateMethod\r
+ */ \r
+ VIDEO_INPUT_MEMORY_FORMAT=34,/**<Use this property to set the pixel format that should be used by \r
+ video input channels when it is capturing a frame from video input source. \r
+ Possible values this property can accept is defined in the enumerator EMemoryFormat\r
+ */ \r
+ VIDEO_GENLOCK_SIGNAL=35,/**< Use this property to retrieve video signal of the reference source that is used by the card.\r
+ This can also be used to select the reference signal source that should be used. \r
+ */\r
+\r
+ AUDIO_OUTPUT_PROP=36, /**< this can be used to route PCM audio data onto respective audio output connectors. */\r
+ AUDIO_CHANNEL_ROUTING=AUDIO_OUTPUT_PROP,\r
+ AUDIO_INPUT_PROP=37,/**< Use this property to select audio input source that should be used when doing \r
+ an audio capture.\r
+ Possible values this property can accept is defined in the enumerator Blue_Audio_Connector_Type.\r
+ */\r
+ VIDEO_ENABLE_LETTERBOX=38,\r
+ VIDEO_DUALLINK_OUTPUT_INVERT_KEY_COLOR=39,/**< this property is deprecated and no longer supported on epoch/create range of cards.*/\r
+ VIDEO_DUALLINK_OUTPUT_DEFAULT_KEY_COLOR=40,/**< this property is deprecated and no longer supported on epoch/create range of cards.*/\r
+ VIDEO_BLACKGENERATOR=41, /**< Use this property to control the black generator on the video output channel.\r
+ */\r
+ VIDEO_INPUTFRAMESTORE_IMAGE_ORIENTATION=42,\r
+ VIDEO_INPUT_SOURCE_SELECTION=43, /**< The video input source that should be used by the SDK default video input channel \r
+ can be configured using this property. \r
+ Possible values this property can accept is defined in the enumerator EBlueConnectorIdentifier.\r
+ */\r
+ DEFAULT_VIDEO_OUTPUT_CHANNEL=44,\r
+ DEFAULT_VIDEO_INPUT_CHANNEL=45,\r
+ VIDEO_REFERENCE_SIGNAL_TIMING=46,\r
+ EMBEDEDDED_AUDIO_OUTPUT=47, /**< the embedded audio output property can be configured using this property.\r
+ Possible values this property can accept is defined in the enumerator EBlueEmbAudioOutput.\r
+ */\r
+ EMBEDDED_AUDIO_OUTPUT=EMBEDEDDED_AUDIO_OUTPUT,\r
+ VIDEO_PLAYBACK_FIFO_FREE_STATUS=48, /**< this will return the number of free buffer in the fifo. \r
+ If the video engine is framestore this will give you the number of buffers that the framestore mode \r
+ can you use with that video output channel.*/\r
+ VIDEO_IMAGE_WIDTH=49, /**< only for selective DMA of a smaller image onto video output raster; size in bytes (not pixels) */\r
+ VIDEO_IMAGE_HEIGHT=50, /**< only for selective DMA of a smaller image onto video output raster; number of lines */\r
+ VIDEO_SCALER_MODE=51,\r
+ AVAIL_AUDIO_INPUT_SAMPLE_COUNT=52,\r
+ VIDEO_PLAYBACK_FIFO_ENGINE_STATUS=53, /**< this will return the playback fifo status. The values returned by this property \r
+ are defined in the enumerator BlueVideoFifoStatus.\r
+ */ \r
+ VIDEO_CAPTURE_FIFO_ENGINE_STATUS=54, /**< this will return the capture fifo status. \r
+ The values returned by this property are defined in the enumerator BlueVideoFifoStatus.\r
+ */\r
+ VIDEO_2K_1556_PANSCAN=55,/**< this property is deprecated and no longer supported on epoch/create range of cards.*/\r
+ VIDEO_OUTPUT_ENGINE=56, /**< Use this property to set the video engine of the video output channels.\r
+ Possible values this property can accept is defined in the enumerator EEngineMode \r
+ */\r
+ VIDEO_INPUT_ENGINE=57, /**< Use this property to set the video engine of the video input channels.\r
+ Possible values this property can accept is defined in the enumerator EEngineMode \r
+ */\r
+ BYPASS_RELAY_A_ENABLE=58, /**< use this property to control the bypass relay on SDI A output.*/\r
+ BYPASS_RELAY_B_ENABLE=59, /**< use this property to control the bypass relay on SDI B output.*/\r
+ VIDEO_PREMULTIPLIER=60,\r
+ VIDEO_PLAYBACK_START_TRIGGER_POINT=61, /**< Using this property you can instruct the driver to start the \r
+ video playback fifo on a particular video output field count.\r
+ Normally video playback fifo is started on the next video interrupt after \r
+ the video_playback_start call.*/\r
+ GENLOCK_TIMING=62,\r
+ VIDEO_IMAGE_PITCH=63,\r
+ VIDEO_IMAGE_OFFSET=64,\r
+ VIDEO_INPUT_IMAGE_WIDTH=65,\r
+ VIDEO_INPUT_IMAGE_HEIGHT=66,\r
+ VIDEO_INPUT_IMAGE_PITCH=67,\r
+ VIDEO_INPUT_IMAGE_OFFSET=68,\r
+ TIMECODE_RP188=69, /**< this property is deprecated and no longer supported on epoch/create range of cards.*/\r
+ BOARD_TEMPERATURE=70,/**<This property can be used to retreive the Board temperature, core temperature and \r
+ RPM of the Fan on epoch/create range of cards.<br/>\r
+ Use the macro's EPOCH_CORE_TEMP ,EPOCH_BOARD_TEMP and EPOCH_FAN_SPEED\r
+ to retireive the respective values from the property.<br/> \r
+ */ \r
+ MR2_ROUTING=71, /**< Use this property to control the MR2 functionlity on epoch range of cards.\r
+ Use the following macro with this property.<br/>\r
+ 1) EPOCH_SET_ROUTING --> for setting the source, destination and link type of the routing connection,<br/>\r
+ 2) EPOCH_ROUTING_GET_SRC_DATA --> for getting the routing source.<br/>\r
+ The possible source and destination elements supported by the routing matrix are defined in the \r
+ enumerator EEpochRoutingElements.<br/>\r
+ */\r
+ SAVEAS_POWERUP_SETTINGS=72,\r
+ VIDEO_CAPTURE_AVAIL_BUFFER_COUNT=73, /**< This property will return the number of captured frame avail in the fifo at present.\r
+ If the video engine is framestore this will give you the number of buffers that the framestore mode \r
+ can you use with that video input channel */\r
+ EPOCH_APP_WATCHDOG_TIMER=74,/**< Use this property to control the application watchdog timer functionality. \r
+ Possible values this property can accept is defined in the enumerator enum_blue_app_watchdog_timer_prop.\r
+ */ \r
+ EPOCH_RESET_VIDEO_INPUT_FIELDCOUNT=75, /**< Use this property to reset the field count on both the \r
+ video channels of the card. You can pass the value that \r
+ should be used as starting fieldcount after the reset.\r
+ This property can be used to keep track sync between left and right signal \r
+ when you are capturing in stereoscopic mode.\r
+ */\r
+ EPOCH_RS422_PORT_FLAGS=76,/**< Use this property to set the master/slave property of the RS422 ports.\r
+ Possible values this property can accept is defined in the enumerator enum_blue_rs422_port_flags.\r
+ */\r
+ EPOCH_DVB_ASI_INPUT_TIMEOUT=77, /**< Current DVB ASI input firmware does not support this property in hardware,\r
+ this is a future addition.\r
+ Use this property to set the timeout of the DVB ASI input stream. \r
+ timeout is specified in milliseconds.If hardware did not get the required no of \r
+ packets( specified using EPOCH_DVB_ASI_INPUT_LATENCY_PACKET_COUNT)\r
+ within the period specified in the timeout, hardware would generate a video input interrupt\r
+ and it would be safe to read the dvb asi packet from the card.\r
+ */\r
+ EPOCH_DVB_ASI_INPUT_PACKING_FORMAT=78, /**< Use this property to specify the packing method that should be used \r
+ when capturing DVB ASI packets.\r
+ The possible packing methods are defined in the enumerator enum_blue_dvb_asi_packing_format.*/\r
+ EPOCH_DVB_ASI_INPUT_LATENCY_PACKET_COUNT=79, /**< Use this property to set how many asi packets should be captured by the card , before it \r
+ notifies the driver of available data using video input interrupt.<br/>\r
+ */\r
+ VIDEO_PLAYBACK_FIFO_CURRENT_FRAME_UNIQUEID=80, /**< This property can be used to query the current unique id of \r
+ the frame that is being displayed currently by the video output channel. This \r
+ property is only usefull in the context of video fifo.<br/>\r
+ You get a uniqueid when you present a frame using video_playback_present function.\r
+ Alternative ways to get this information are <br/>\r
+ 1) using blue_wait_video_sync_async , the member current_display_frame_uniqueid contains the same information<br/>\r
+ 2) using wait_video_output_sync function on epoch cards, if \r
+ the flag UPD_FMT_FLAG_RETURN_CURRENT_UNIQUEID is appended with \r
+ either UPD_FMT_FRAME or UPD_FMT_FIELD , the return value of \r
+ the function wait_video_output_sync woukd contain the current display\r
+ frames uniqueid.<br/>*/\r
+\r
+ EPOCH_DVB_ASI_INPUT_GET_PACKET_SIZE = 81,/**< use this property to get the size of each asi transport stream packet\r
+ (whether it is 188 or 204.*/\r
+ EPOCH_DVB_ASI_INPUT_PACKET_COUNT = 82,/**< this property would give you the number of packets captured during the last \r
+ interrupt time frame. For ASI interrupt is generated if \r
+ hardware captured the requested number of packets or it hit the \r
+ timeout value\r
+ */\r
+ EPOCH_DVB_ASI_INPUT_LIVE_PACKET_COUNT = 83,/**< this property would give you the number of packets that\r
+ is being captured during the current interrupt time frame. \r
+ For ASI interrupt is generated when has hardware captured the \r
+ requested number of packets specified using \r
+ EPOCH_DVB_ASI_INPUT_LATENCY_PACKET_COUNT property.\r
+ */\r
+ EPOCH_DVB_ASI_INPUT_AVAIL_PACKETS_IN_FIFO = 84,/**< This property would return the number of ASI packets \r
+ that has been captured into card memory , that\r
+ can be retreived.\r
+ This property is only valid when the video input \r
+ channel is being used in FIFO modes.\r
+ */\r
+ EPOCH_ROUTING_SOURCE_VIDEO_MODE=VIDEO_SCALER_MODE,/**< Use this property to change the video mode that scaler should be set to.\r
+ USe the macro SET_EPOCH_SCALER_MODE when using this property, as this macro \r
+ would allow you to select which one of the scaler blocks video mode should be updated.\r
+ */\r
+ EPOCH_AVAIL_VIDEO_SCALER_COUNT=85,/**< This property would return available scaler processing block available on the card.*/\r
+ EPOCH_ENUM_AVAIL_VIDEO_SCALERS_ID=86,/**< You can enumerate the available scaler processing block available on the card using this property.\r
+ You pass in the index value as input parameter to get the scaler id that should be used.\r
+ Applications are recommended to use this property to query the available scaler id's \r
+ rather than hardcoding a scaler id. As the scaler id's that you can use would vary based on\r
+ whether you have VPS0 or VPS1 boards loaded on the base board.\r
+ */\r
+ EPOCH_ALLOCATE_VIDEO_SCALER=87, /**< This is just a helper property for applications who need to use more than one scaler\r
+ and just wants to query the next available scaler from the driver pool, rather than hardcoding \r
+ each thread to use a particular scaler.\r
+ Allocate a free scaler from the available scaler pool.\r
+ User has got the option to specify whether they want to use the scaler for \r
+ use with a single link or dual link stream */\r
+ EPOCH_RELEASE_VIDEO_SCALER=88, /**< Release the previously allocated scaler processing block back to the free pool.\r
+ If the user passes in a value of 0, all the allocated scaler blocks in the driver are released.\r
+ So effectively\r
+ */\r
+ EPOCH_DMA_CARDMEMORY_PITCH=89,\r
+ EPOCH_OUTPUT_CHANNEL_AV_OFFSET=90,\r
+ EPOCH_SCALER_CHANNEL_MUX_MODE=91,\r
+ EPOCH_INPUT_CHANNEL_AV_OFFSET=92,\r
+ EPOCH_AUDIOOUTPUT_MANUAL_UCZV_GENERATION=93,/* ASI firmware only */\r
+ EPOCH_SAMPLE_RATE_CONVERTER_BYPASS=94,\r
+ EPOCH_GET_PRODUCT_ID=95, /* returns the enum for the firmware type EEpochFirmwareProductID */\r
+ EPOCH_GENLOCK_IS_LOCKED=96,\r
+ EPOCH_DVB_ASI_OUTPUT_PACKET_COUNT=97, /* ASI firmware only */\r
+ EPOCH_DVB_ASI_OUTPUT_BIT_RATE=98, /* ASI firmware only */\r
+ EPOCH_DVB_ASI_DUPLICATE_OUTPUT_A=99, /* ASI firmware only */\r
+ EPOCH_DVB_ASI_DUPLICATE_OUTPUT_B=100, /* ASI firmware only */\r
+ EPOCH_SCALER_HORIZONTAL_FLIP=101, /* see SideBySide_3D sample application */\r
+ EPOCH_CONNECTOR_DIRECTION=102, /* see application notes */\r
+ EPOCH_AUDIOOUTPUT_VALIDITY_BITS=103, /* ASI firmware only */\r
+ EPOCH_SIZEOF_DRIVER_ALLOCATED_MEMORY=104, /* video buffer allocated in Kernel space; accessible in userland via system_buffer_map() */\r
+ INVALID_VIDEO_MODE_FLAG=105, /* returns the enum for VID_FMT_INVALID that this SDK/Driver was compiled with;\r
+ it changed between 5.9.x.x and 5.10.x.x driver branch and has to be handled differently for\r
+ each driver if the application wants to use the VID_FMT_INVALID flag and support both driver branches */\r
+ EPOCH_VIDEO_INPUT_VPID=106, /* returns the VPID for the current video input signal */\r
+ EPOCH_LOW_LATENCY_DMA=107, /* not fully supported yet */\r
+ EPOCH_VIDEO_INPUT_RGB_DATA_RANGE=108,\r
+\r
+ VIDEO_CARDPROPERTY_INVALID=1000\r
+}EBlueCardProperty;\r
+\r
+\r
+typedef enum _EAnalogConnectorSignalType\r
+{\r
+ ANALOG_OUTPUTSIGNAL_CVBS_Y_C=1,\r
+ ANALOG_OUTPUTSIGNAL_COMPONENT=2,\r
+ ANALOG_OUTPUTSIGNAL_RGB=4\r
+}EAnalogConnectorSignalType;\r
+\r
+/**<\r
+@brief Use this enumerator to set the analog video signal types and connectors.\r
+*/\r
+typedef enum _EAnalogInputConnectorType \r
+{\r
+/* Composite input */\r
+ ANALOG_VIDEO_INPUT_CVBS_AIN1=0x00, /**<only available on Mini COAX */\r
+ ANALOG_VIDEO_INPUT_CVBS_AIN2=0x01, /**<available on both Mini COAX and Mini DIN*/\r
+ ANALOG_VIDEO_INPUT_CVBS_AIN3=0x02, /**<available on both Mini COAX and Mini DIN*/\r
+ ANALOG_VIDEO_INPUT_CVBS_AIN4=0x03, /**<only available on Mini COAX */\r
+ ANALOG_VIDEO_INPUT_CVBS_AIN5=0x04, /**<only available on Mini COAX */\r
+ ANALOG_VIDEO_INPUT_CVBS_AIN6=0x05, /**<available on both Mini COAX and Mini DIN */\r
+\r
+/*svideo input*/\r
+//Y_C is a synonym for svideo\r
+ ANALOG_VIDEO_INPUT_Y_C_AIN1_AIN4=0x06, /**<only available on Mini COAX*/\r
+ ANALOG_VIDEO_INPUT_Y_C_AIN2_AIN5=0x07, /**<only available on Mini COAX*/\r
+ ANALOG_VIDEO_INPUT_Y_C_AIN3_AIN6=0x08, /**<available on both Mini COAX and Mini DIN*/\r
+\r
+/*YUV input*/\r
+ ANALOG_VIDEO_INPUT_YUV_AIN1_AIN4_AIN5=0x09, /**<only available on Mini COAX*/\r
+ ANALOG_VIDEO_INPUT_YUV_AIN2_AIN3_AIN6=0x0a, /**<available on both Mini COAX and Mini DIN*/\r
+ ANALOG_VIDEO_INPUT_USE_SDI_A=0x6F, \r
+ ANALOG_VIDEO_INPUT_USE_SDI=0x7F,\r
+ GENERIC_ANALOG_VIDEO_SOURCE=0x8F,\r
+ ANALOG_VIDEO_INPUT_USE_SDI_B=ANALOG_VIDEO_INPUT_USE_SDI\r
+}EAnalogInputConnectorType;\r
+\r
+\r
+typedef enum {\r
+ ANALOG_VIDEO_INPUT_CONNECTOR,//EAnalogInputConnectorType\r
+ ANALOG_VIDEO_INPUT_PED,\r
+ ANALOG_VIDEO_INPUT_BRIGHTNESS,\r
+ ANALOG_VIDEO_INPUT_HUE,\r
+ ANALOG_VIDEO_INPUT_LUMA_GAIN,\r
+ ANALOG_VIDEO_INPUT_CHROMA_GAIN,\r
+ ANALOG_VIDEO_INPUT_AUTO_GAIN,\r
+ ANALOG_VIDEO_INPUT_LOAD_DEFAULT_SETTING, \r
+ ANALOG_VIDEO_OUTPUT_PED,\r
+ ANALOG_VIDEO_OUTPUT_BRIGHTNESS,\r
+ ANALOG_VIDEO_OUTPUT_HUE,\r
+ ANALOG_VIDEO_OUTPUT_LUMA_GAIN,\r
+ ANALOG_VIDEO_OUTPUT_CHROMA_GAIN, \r
+ ANALOG_VIDEO_OUTPUT_SHARPNESS,\r
+ ANALOG_VIDEO_OUTPUT_AUTO_GAIN,\r
+ ANALOG_VIDEO_OUTPUT_LOAD_DEFAULT_SETTING,\r
+ ANALOG_VIDEO_OUTPUT_SIGNAL_TYPE,//_EAnalogConnectorSignalType\r
+ ANALOG_LOAD_BLUEFISH_DEFAULT_SETTING,\r
+ ANALOG_SET_AS_POWERUP_SETTINGS,\r
+ ANALOG_LOAD_POWERUP_SETTINGS,\r
+ ANALOG_CONNECTOR_STATUS\r
+} AnalogCard_Property;\r
+\r
+typedef struct {\r
+ BLUE_INT32 inputConnector; //ANALOG_VIDEO_INPUT_CONNECTOR, EAnalogInputConnectorType\r
+ BLUE_INT32 inputPed; //ANALOG_VIDEO_INPUT_PED,\r
+ BLUE_INT32 inputBrightness; //ANALOG_VIDEO_INPUT_BRIGHTNESS,\r
+ BLUE_INT32 inputHue; //ANALOG_VIDEO_INPUT_HUE,\r
+ BLUE_INT32 inputLumaGain; //ANALOG_VIDEO_INPUT_LUMA_GAIN,\r
+ BLUE_INT32 inputChromaGain; //ANALOG_VIDEO_INPUT_CHROMA_GAIN,\r
+ BLUE_INT32 inputAutoGain; //ANALOG_VIDEO_INPUT_AUTO_GAIN,\r
+ BLUE_INT32 outputPed; //ANALOG_VIDEO_OUTPUT_PED,\r
+ BLUE_INT32 outputBrightness; //ANALOG_VIDEO_OUTPUT_BRIGHTNESS,\r
+ BLUE_INT32 outputHue; //ANALOG_VIDEO_OUTPUT_HUE,\r
+ BLUE_INT32 outputYGain; //ANALOG_VIDEO_OUTPUT_Y_GAIN,\r
+ BLUE_INT32 outputUGain; //ANALOG_VIDEO_OUTPUT_U_GAIN,\r
+ BLUE_INT32 outputVGain; //ANALOG_VIDEO_OUTPUT_V_GAIN,\r
+ BLUE_INT32 outputSharpness; //ANALOG_VIDEO_OUTPUT_SHARPNESS,\r
+ BLUE_INT32 outputAutoGain; //ANALOG_VIDEO_OUTPUT_AUTO_GAIN,\r
+ BLUE_INT32 outputSignalTypes; //EAnalogConnectorSignalType\r
+}AnalogCardState;\r
+\r
+//----------------------------------------------------------------------------------------------------\r
+/**< brief Used to determine how video interrupts are handled*/\r
+typedef enum _EEngineMode\r
+{\r
+ VIDEO_ENGINE_FRAMESTORE=0, /**< framestore engine. In this mode user is responsible for \r
+ schduling a capture or playback after waiting for the \r
+ respective video sync;s*/\r
+ VIDEO_ENGINE_PLAYBACK=1, /**< Playback engine. In this mode there is a driver FIFO, which \r
+ is reponisble for scheudling a frame for playback.\r
+ User just adds video frames into the fifo.*/\r
+ VIDEO_ENGINE_CAPTURE=2, /**< Capture engine In this mode there is a driver FIFO, which \r
+ is reponisble for scheudling a frame for capture.\r
+ User just retreives video frames from the FIFO.*/\r
+ VIDEO_ENGINE_PAGEFLIP=3, /**< not supported any more */\r
+ VIDEO_ENGINE_DUPLEX=4, /**< Full Duplex video. This is a FIFO mode. Use this mode if you want \r
+ to capture and playback at the same time.*/\r
+ VIDEO_ENGINE_INVALID\r
+} EEngineMode;\r
+\r
+/**< use this enumerator for controlling emb audio output properties using the \r
+ property EMBEDDED_AUDIO_OUTPUT. \r
+*/\r
+typedef enum _EBlueEmbAudioOutput\r
+{\r
+ blue_emb_audio_enable=0x1, // Switches off/on the whole HANC output from connecotrs associated with the channel\r
+ blue_auto_aes_to_emb_audio_encoder=0x2, //control whether the auto aes to emb thread should be running or not.\r
+ blue_emb_audio_group1_enable=0x4, /**< enables group1(ch 0- 3) emb audio */\r
+ blue_emb_audio_group2_enable=0x8, /**< enables group2(ch 4- 7) emb audio */\r
+ blue_emb_audio_group3_enable=0x10, /**< enables group3(ch 8- 11) emb audio */\r
+ blue_emb_audio_group4_enable=0x20, /**< enables group4(ch 12- 16) emb audio */\r
+ blue_enable_hanc_timestamp_pkt = 0x40\r
+}EBlueEmbAudioOutput;\r
+\r
+\r
+/**< Not used any more */\r
+typedef enum _EBufferTarget\r
+{\r
+ BUFFER_TARGET_VIDEO=0, // Generic R/W DMA\r
+ BUFFER_TARGET_AUDIO, // Special processing required for audio\r
+ BUFFER_TARGET_VIDEO_8BIT, // Special R/W DMA utilising 8 bit aperture\r
+ BUFFER_TARGET_VIDEO_HALF, // Special R/W DMA every second line (currently unused)\r
+ BUFFER_TARGET_VIDEO_OUT, // Updates video out register on DMA completion for write \r
+ BUFFER_TARGET_INVALID\r
+} EBufferTarget;\r
+\r
+#define BUFFER_TYPE_VIDEO (0)\r
+#define BUFFER_TYPE_AUDIO (1)\r
+#define BUFFER_TYPE_VIDEO_8BIT (2) // use this when assigning a buffer to indicate DMA from aperture!\r
+#define BUFFER_TYPE_VIDEO_OUT (3) // On DMA start set video output address to DMA target\r
+#define BUFFER_TYPE_VIDEO_HALF (4) // DMA every second line...\r
+\r
+// Buffer identifiers\r
+#define BUFFER_ID_AUDIO_IN (0)\r
+#define BUFFER_ID_AUDIO_OUT (1)\r
+#define BUFFER_ID_VIDEO0 (2)\r
+#define BUFFER_ID_VIDEO1 (3)\r
+#define BUFFER_ID_VIDEO2 (4)\r
+#define BUFFER_ID_VIDEO3 (5)\r
+\r
+//#define BUFFER_ID_USER_BASE (6)\r
+\r
+\r
+\r
+#define VIDEO_BORDER_TOP (0x10000000)\r
+#define VIDEO_BORDER_BOTTOM (0x20000000)\r
+#define VIDEO_BORDER_LEFT (0x40000000)\r
+#define VIDEO_BORDER_RIGHT (0x80000000)\r
+\r
+typedef struct _AnalogCardPropStruct\r
+{\r
+ BLUE_UINT32 VideoChannel;\r
+ BLUE_INT32 prop;\r
+ BLUE_INT32 value;\r
+ BLUE_INT32 minValue;\r
+ BLUE_INT32 maxValue;\r
+ BLUE_INT32 bReadFlag;\r
+}AnalogCardPropStruct;\r
+\r
+typedef enum _EConnectorSignalFormatType\r
+{\r
+ Signal_Type_4444 =1,\r
+ Signal_Type_4224 =0,\r
+ Signal_Type_422=2\r
+}EConnectorSignalFormatType;\r
+\r
+typedef enum _EDMADirection\r
+{\r
+ DMA_WRITE=0,\r
+ DMA_READ=1,\r
+ DMA_INVALID=2\r
+}EDMADirection; \r
+\r
+\r
+typedef enum _MatrixColType\r
+{\r
+ COL_BLUE_PB=0,\r
+ COL_RED_PR=1,\r
+ COL_GREEN_Y=2,\r
+ COL_KEY=3\r
+}MatrixColType;\r
+\r
+\r
+\r
+\r
+/**< Bits defining supported features that can be used with VideoFeature_struct*/\r
+#define VIDEO_CAPS_INPUT_SDI (0x00000001) /**< Capable of input of SDI Video */\r
+#define VIDEO_CAPS_OUTPUT_SDI (0x00000002) /**< Capable of output of SDI Video */\r
+#define VIDEO_CAPS_INPUT_COMP (0x00000004) /**< Capable of capturing Composite Video input */\r
+#define VIDEO_CAPS_OUTPUT_COMP (0x00000008) /**< Capable of capturing Composite Video output */\r
+\r
+#define VIDEO_CAPS_INPUT_YUV (0x00000010) /**< Capable of capturing Component Video input */\r
+#define VIDEO_CAPS_OUTPUT_YUV (0x00000020) /**< Capable of capturing Component Video output */\r
+#define VIDEO_CAPS_INPUT_SVIDEO (0x00000040) /**< Capable of capturing SVideo input */\r
+#define VIDEO_CAPS_OUTPUT_SVIDEO (0x00000080) /**< Capable of capturing SVideo output */\r
+\r
+#define VIDEO_CAPS_GENLOCK (0x00000100) /**< Able to adjust Vert & Horiz timing */\r
+#define VIDEO_CAPS_VERTICAL_FLIP (0x00000200) /**< Able to flip rasterisation */\r
+#define VIDEO_CAPS_KEY_OUTPUT (0x00000400) /**< Video keying output capable */\r
+#define VIDEO_CAPS_4444_OUTPUT (0x00000800) /**< Capable of outputting 4444 (dual link) */\r
+\r
+#define VIDEO_CAPS_DUALLINK_INPUT (0x00001000) /**< Dual Link input */\r
+#define VIDEO_CAPS_INTERNAL_KEYER (0x00002000) /**< Has got an internal Keyer */\r
+#define VIDEO_CAPS_RGB_COLORSPACE_SDI_CONN (0x00004000) /**< Support RGB colorspace in on an SDI connector */\r
+#define VIDEO_CAPS_HAS_PILLOR_BOX (0x00008000) /**< Has got support for pillor box */\r
+\r
+#define VIDEO_CAPS_OUTPUT_RGB (0x00010000) /**< Has Analog RGB output connector */\r
+#define VIDEO_CAPS_SCALED_RGB (0x00020000) /**< Can scale RGB colour space */\r
+#define AUDIO_CAPS_PLAYBACK (0x00040000) /**< Has got audio output */\r
+#define AUDIO_CAPS_CAPTURE (0x00080000)\r
+\r
+#define VIDEO_CAPS_DOWNCONVERTER (0x00100000)\r
+#define VIDEO_CAPS_DUALOUTPUT_422_IND_STREAM (0x00200000) /**< Specifies whether the card supports Dual Indepenedent 422 output streams */\r
+#define VIDEO_CAPS_DUALINPUT_422_IND_STREAM (0x00400000) /**< Specifies whether the card supports Dual Indepenedent 422 input streams */\r
+\r
+#define VIDEO_CAPS_VBI_OUTPUT (0x00800000) /**< Specifies whether the card supports VBI output */\r
+#define VIDEO_CAPS_VBI_INPUT (0x04000000) /**< Specifies whether the card supports VBI input */ \r
+\r
+#define VIDEO_CAPS_HANC_OUTPUT (0x02000000)\r
+#define VIDEO_CAPS_HANC_INPUT (0x04000000)\r
+\r
+#define VIDEO_CAPS_FOUND_VPS0 VIDEO_CAPS_DOWNCONVERTER /**< specifies whether the VPS0 scaler board was found on the card */\r
+#define VIDEO_CAPS_FOUND_VPS1 (0x10000000) /**< specifies whether the VPS1 scaler board was found on the card */\r
+#define VIDEO_CAPS_FOUND_VPIO (0x20000000) /**< specifies whether the VPIO(DVI daughter board)board was found on the card */\r
+\r
+/*\r
+#define VIDEO_CAPS_DUALOUTPUT_422_IND_STREAM (0x00100000) // Specifies whether the card supports Dual Indepenedent 422 output streams\r
+#define VIDEO_CAPS_DUALINPUT_422_IND_STREAM (0x00200000) // Specifies whether the card supports Dual Indepenedent 422 input streams\r
+\r
+#define VIDEO_CAPS_VBI_OUTPUT (0x00400000)\r
+#define VIDEO_CAPS_VBI_INPUT (0x00800000)\r
+\r
+#define VIDEO_CAPS_HANC_OUTPUT (0x01000000)\r
+#define VIDEO_CAPS_HANC_INPUT (0x02000000)\r
+*/\r
+\r
+#define BLUE_CARD_BUFFER_TYPE_OFFSET (12)\r
+#define BLUE_DMA_DATA_TYPE_OFFSET (16)\r
+#define BLUE_DMA_FLAGS_OFFSET (20)\r
+#define GetDMACardBufferId(value) (value & 0xFFF)\r
+#define GetCardBufferType(value) ((value & 0xF000) >> BLUE_CARD_BUFFER_TYPE_OFFSET)\r
+#define GetDMADataType(value) ((value & 0xF0000) >> BLUE_DMA_DATA_TYPE_OFFSET)\r
+#define GetDMAFlags(value) ((value & 0xF00000) >> (BLUE_DMA_FLAGS_OFFSET))\r
+\r
+\r
+#define Blue_DMABuffer(CardBufferType,BufferId,DataType) ( (((ULONG)DataType&0xF)<<(ULONG)BLUE_DMA_DATA_TYPE_OFFSET)| \\r
+ ( CardBufferType<<(ULONG)BLUE_CARD_BUFFER_TYPE_OFFSET) | \\r
+ ( ((ULONG)BufferId&0xFFF)) |0)\r
+\r
+#define BlueImage_VBI_DMABuffer(BufferId,DataType) ( (((ULONG)DataType&0xF)<<(ULONG)BLUE_DMA_DATA_TYPE_OFFSET)| \\r
+ ( BLUE_CARDBUFFER_IMAGE_VBI<<(ULONG)BLUE_CARD_BUFFER_TYPE_OFFSET) | \\r
+ ( ((ULONG)BufferId&0xFFF)) |0)\r
+\r
+#define BlueImage_DMABuffer(BufferId,DataType) ( (((ULONG)DataType&0xF)<<(ULONG)BLUE_DMA_DATA_TYPE_OFFSET)| \\r
+ ( BLUE_CARDBUFFER_IMAGE<<(ULONG)BLUE_CARD_BUFFER_TYPE_OFFSET) | \\r
+ ( ((ULONG)BufferId&0xFFF)) |0)\r
+\r
+#define BlueImage_VBI_HANC_DMABuffer(BufferId,DataType) ( (((ULONG)DataType&0xF)<<(ULONG)BLUE_DMA_DATA_TYPE_OFFSET)| \\r
+ ( BLUE_CARDBUFFER_IMAGE_VBI_HANC<<(ULONG)BLUE_CARD_BUFFER_TYPE_OFFSET) | \\r
+ ( ((ULONG)BufferId&0xFFF)) |0)\r
+\r
+#define BlueImage_HANC_DMABuffer(BufferId,DataType) ( (((ULONG)DataType&0xF)<<(ULONG)BLUE_DMA_DATA_TYPE_OFFSET)| \\r
+ ( BLUE_CARDBUFFER_IMAGE_HANC<<(ULONG)BLUE_CARD_BUFFER_TYPE_OFFSET) | \\r
+ ( ((ULONG)BufferId&0xFFF)) |0)\r
+\r
+\r
+#define BlueBuffer(CardBufferType,BufferId) (((CardBufferType)<<BLUE_CARD_BUFFER_TYPE_OFFSET)|((BufferId&0xFFF))|0)\r
+#define BlueBuffer_Image_VBI(BufferId) (((BLUE_CARDBUFFER_IMAGE_VBI)<<BLUE_CARD_BUFFER_TYPE_OFFSET)|((BufferId&0xFFF))|0)\r
+#define BlueBuffer_Image(BufferId) (((BLUE_CARDBUFFER_IMAGE)<<BLUE_CARD_BUFFER_TYPE_OFFSET)|((BufferId&0xFFF))|0)\r
+#define BlueBuffer_Image_VBI_HANC(BufferId) (((BLUE_CARDBUFFER_IMAGE_VBI_HANC)<<BLUE_CARD_BUFFER_TYPE_OFFSET)|((BufferId&0xFFF))|0)\r
+#define BlueBuffer_Image_HANC(BufferId) (((BLUE_CARDBUFFER_IMAGE_HANC)<<BLUE_CARD_BUFFER_TYPE_OFFSET)|((BufferId&0xFFF))|0)\r
+#define BlueBuffer_HANC(BufferId) (((BLUE_CARDBUFFER_HANC)<<BLUE_CARD_BUFFER_TYPE_OFFSET)|((BufferId&0xFFF))|0)\r
+\r
+#define BYPASS_RELAY_A (0x00000001) // enable bypass relay channel a when loading driver , only used in linux \r
+#define BYPASS_RELAY_B (0x00000002) // enable bypass relay channel a when loading driver , only used in linux \r
+typedef enum _EDMACardBufferType\r
+{\r
+ BLUE_CARDBUFFER_IMAGE=0,\r
+ BLUE_CARDBUFFER_IMAGE_VBI_HANC=1,\r
+ BLUE_CARDBUFFER_IMAGE_VBI=2,\r
+ BLUE_CARDBUFFER_AUDIO_OUT=3,\r
+ BLUE_CARDBUFFER_AUDIO_IN=4,\r
+ BLUE_CARDBUFFER_HANC=5,\r
+ BLUE_CARDBUFFER_IMAGE_HANC=6,\r
+ BLUE_CARDBUFFER_INVALID=6\r
+}EDMACardBufferType;\r
+\r
+typedef enum _EDMADataType\r
+{\r
+ BLUE_DATA_FRAME=0,\r
+ BLUE_DATA_IMAGE=0,\r
+ BLUE_DATA_FIELD1=1,\r
+ BLUE_DATA_FIELD2=2,\r
+ BLUE_DATA_VBI=3,\r
+ BLUE_DATA_HANC=4,\r
+ BLUE_DATA_AUDIO_IN=5,\r
+ BLUE_DATA_AUDIO_OUT=6,\r
+ BLUE_DATA_FRAME_RDOM=7,\r
+ BLUE_DATA_FRAME_STEREO_LEFT=BLUE_DATA_FRAME,\r
+ BLUE_DATA_FRAME_STEREO_RIGHT=8,\r
+ BLUE_DMADATA_INVALID=9\r
+}EDMADataType;\r
+\r
+typedef struct _AUXILLARY_VIDEO_INFO\r
+{\r
+ BLUE_UINT32 video_channel_id;\r
+ BLUE_UINT32 lVideoMode;\r
+ BLUE_UINT32 lUniqueId;\r
+ BLUE_UINT32 lInfoType;\r
+ BLUE_UINT32 lMemFmt;\r
+ BLUE_UINT32 lGpio;\r
+ BLUE_UINT64 lLTC;\r
+}Auxillary_Video_Info;\r
+\r
+\r
+typedef enum _EBlueVideoAuxInfoType\r
+{\r
+ BLUE_VIDEO_AUX_MEMFMT_CHANGE=1,\r
+ BLUE_VIDEO_AUX_UPDATE_LTC=2,\r
+ BLUE_VIDEO_AUX_UPDATE_GPIO=4,\r
+ BLUE_VIDEO_AUX_VIDFMT_CHANGE=8,\r
+\r
+}EBlueVideoAuxInfoType;\r
+// Max of 4 bits \r
+\r
+#define GET_ANALOG_AUDIO_LEFT_ROUTINGCHANNEL(value) (value&0xFF)\r
+#define GET_ANALOG_AUDIO_RIGHT_ROUTINGCHANNEL(value) ((value&0xFF00)>>8)\r
+#define SET_ANALOG_AUDIO_ROUTINGCHANNEL(left,right) (((right & 0xFF)<<8)|(left & 0xFF))\r
+#define SET_AUDIO_OUTPUT_ROUTINGCHANNEL(output_type,src_channel_id,_output_channel_id) ((1<<31)|((output_type&3)<<29)|((src_channel_id & 0x7F)<<16)|((_output_channel_id &0x3f)<<23))\r
+#define GET_AUDIO_OUTPUT_SRC_CHANNEL_ROUTING(value) ((value>>16) & 0x7F)\r
+#define GET_AUDIO_OUTPUT_CHANNEL_ROUTING(value) ((value>>23) & 0x3F)\r
+#define GET_AUDIO_OUTPUT_TYPE_ROUTING(value) ((value & 0x60000000)>>29)\r
+\r
+#define AUDIO_INPUT_SOURCE_SELECT_FLAG (1<<16) \r
+#define AUDIO_INPUT_SOURCE_SELECT(SynchCount,AudioInputSource) (AUDIO_INPUT_SOURCE_SELECT_FLAG|(SynchCount)|(AudioInputSource<<17))\r
+\r
+struct blue_video_connection_routing_struct\r
+{\r
+ BLUE_UINT32 video_channel;\r
+ BLUE_UINT32 duallink_flag;\r
+ BLUE_UINT32 link1_connector;\r
+ BLUE_UINT32 link2_connector;\r
+};\r
+\r
+#pragma pack(push, video_sync_struct, 1)\r
+typedef struct _blue_video_sync_struct\r
+{\r
+ BLUE_UINT32 sync_wait_type;// field or frame\r
+ BLUE_UINT32 video_channel; // which video channel interrupt should the interrupt wait for \r
+ BLUE_UINT32 timeout_video_msc; //if the current video msc is equal to this one insert it into the queue.\r
+ BLUE_UINT32 video_msc; //current video msc\r
+ BLUE_UINT32 current_display_frame_id; // would give you the current frame id which is being displayed\r
+ BLUE_UINT32 current_display_frame_uniqueid; // would give you the unique id associated with current frame id which is being displayed\r
+ // this is only valid when using fifo modes.\r
+ BLUE_UINT8 pad[24];\r
+}blue_video_sync_struct;\r
+#pragma pack(pop,video_sync_struct)\r
+\r
+\r
+typedef enum _EBlueLUTType\r
+{\r
+ BLUE_MAIN_LUT_B_Pb=0,\r
+ BLUE_MAIN_LUT_G_Y=1,\r
+ BLUE_MAIN_LUT_R_Pr=2,\r
+ BLUE_AUX_LUT_B_Pb=3,\r
+ BLUE_AUX_LUT_G_Y=4,\r
+ BLUE_AUX_LUT_R_Pr=5,\r
+}EBlueLUTType;\r
+\r
+#pragma pack(push, video_frame, 1)\r
+struct VideoFeature_struct \r
+{\r
+ BLUE_UINT32 Type; // Bluefish card type\r
+ BLUE_UINT32 CardSubType; \r
+ BLUE_UINT32 Bus; // Which PIC bus (bridge) it is on\r
+ BLUE_UINT32 Slot; // Which slot card is plugged into\r
+ BLUE_UINT32 Feature; // Look at the _EBlueFishCardFeatures definition to know what each bit mean\r
+ BLUE_UINT32 FirmwareVersion;\r
+};\r
+\r
+struct blue_videoframe_info\r
+{\r
+ BLUE_UINT64 ltcTimeCode;\r
+ unsigned long videochannel;\r
+ unsigned long BufferId;\r
+ unsigned long Count;\r
+ unsigned long DroppedFrameCount;\r
+};\r
+\r
+struct blue_videoframe_info_ex\r
+{\r
+ BLUE_UINT64 ltcTimeCode; //LTC timecode\r
+ unsigned long videochannel; //the channel this frame was captured from\r
+ long BufferId; //this buffer contains the captured frame\r
+ unsigned long Count; //total captured frames\r
+ unsigned long DroppedFrameCount; //dropped frame count\r
+ unsigned long nFrameTimeStamp; //field count the frame was captured at\r
+ unsigned long nVideoSignalType; //video mode of this frame\r
+ unsigned int nASIPktCount; //only for DVB-ASI; how many ASI packets are in this frame\r
+ unsigned int nASIPktSize; //only for DVB-ASI; how many bytes per packet\r
+ unsigned int nAudioValidityBits; //part of the channels status block for audio\r
+ unsigned char pad[20]; //not used\r
+};\r
+\r
+struct blue_1d_lookup_table_struct\r
+{\r
+ BLUE_UINT32 nVideoChannel;\r
+ BLUE_UINT32 nLUTId;\r
+ BLUE_UINT16 * pLUTData;\r
+ BLUE_UINT32 nLUTElementCount; \r
+ BLUE_UINT8 pad[256];\r
+};\r
+#pragma pack(pop, video_frame)\r
+\r
+#pragma pack(push, blue_dma_request, 1)\r
+struct blue_dma_request_struct\r
+{\r
+ unsigned char * pBuffer;\r
+ BLUE_UINT32 video_channel;\r
+ BLUE_UINT32 BufferId;\r
+ unsigned int BufferDataType;\r
+ unsigned int FrameType;\r
+ unsigned int BufferSize;\r
+ unsigned int Offset;\r
+ unsigned long BytesTransferred;\r
+ unsigned char pad[64];\r
+};\r
+\r
+enum SerialPort_struct_flags\r
+{\r
+ SerialPort_Read=1,\r
+ SerialPort_Write=2,\r
+ SerialPort_TX_Queue_Status=4, \r
+ SerialPort_RX_Queue_Status=8,\r
+ SerialPort_RX_FlushBuffer=16,\r
+ SerialPort_RX_IntWait_Return_On_Data=32,\r
+ \r
+};\r
+\r
+struct SerialPort_struct\r
+{\r
+ unsigned char Buffer[64];\r
+ unsigned int nBufLength;\r
+ unsigned int nSerialPortId;\r
+ unsigned int bFlag; // SerialPort_struct_flags \r
+ unsigned short sTimeOut; \r
+};\r
+\r
+\r
+struct blue_video_scaler_ceofficent\r
+{\r
+ BLUE_UINT32 ioctl_read_only_flag;\r
+ BLUE_UINT32 nScalerId;\r
+ BLUE_UINT32 nScalerFilterType;\r
+ BLUE_UINT32 nScalerCoefficentWeight[15];\r
+};\r
+\r
+enum blue_video_scaler_param_flags\r
+{\r
+ scaler_flags_set_destrect_as_framesize = 0x1,\r
+};\r
+\r
+struct blue_video_scaler_param_struct\r
+{\r
+ BLUE_UINT32 ioctl_read_only_flag;\r
+ BLUE_UINT32 nScalerId;\r
+ BLUE_UINT32 nSrcVideoHeight;\r
+ BLUE_UINT32 nSrcVideoWidth;\r
+ BLUE_UINT32 nSrcVideoYPos;\r
+ BLUE_UINT32 nSrcVideoXPos;\r
+ BLUE_UINT32 nDestVideoHeight;\r
+ BLUE_UINT32 nDestVideoWidth;\r
+ BLUE_UINT32 nDestVideoYPos;\r
+ BLUE_UINT32 nDestVideoXPos;\r
+ BLUE_UINT32 nHScaleFactor;\r
+ BLUE_UINT32 nVScaleFactor;\r
+ BLUE_UINT32 nScalerOutputVideoMode;\r
+ BLUE_UINT32 nScalerParamFlags;\r
+ BLUE_UINT32 pad[128];\r
+};\r
+#ifndef EXCLUDE_USERLAND_STRUCT\r
+struct blue_color_matrix_struct{\r
+ BLUE_UINT32 VideoChannel;\r
+ BLUE_UINT32 MatrixColumn; //MatrixColType enumerator defines this \r
+ double Coeff_B;\r
+ double Coeff_R;\r
+ double Coeff_G;\r
+ double Coeff_K;\r
+ double const_value;\r
+};\r
+#endif\r
+#pragma pack(pop, blue_dma_request)\r
+\r
+typedef enum _blue_output_hanc_ioctl_enum\r
+{\r
+ blue_get_output_hanc_buffer=0,\r
+ blue_put_output_hanc_buffer=1,\r
+ blue_get_valid_silent_hanc_data_status=3,\r
+ blue_set_valid_silent_hanc_data_status=4,\r
+ blue_start_output_fifo=5,\r
+ blue_stop_output_fifo=6,\r
+ blue_init_output_fifo=7,\r
+ blue_get_queues_info=8,\r
+ blue_get_output_fifo_info=blue_get_queues_info,\r
+ blue_get_output_fifo_status=9,\r
+\r
+}blue_output_hanc_ioctl_enum;\r
+\r
+typedef enum _blue_input_hanc_ioctl_enum\r
+{\r
+ blue_get_input_hanc_buffer=0,\r
+ blue_start_input_fifo=3,\r
+ blue_stop_input_fifo=4,\r
+ blue_init_input_fifo=5,\r
+ blue_playthru_input_fifo=6,\r
+ blue_release_input_hanc_buffer=7,\r
+ blue_map_input_hanc_buffer=8,\r
+ blue_unmap_input_hanc_buffer=9,\r
+ blue_get_info_input_hanc_fifo=10,\r
+ blue_get_input_rp188=11,\r
+ blue_get_input_fifo_status=12,\r
+}blue_input_hanc_ioctl_enum;\r
+\r
+\r
+#define HANC_PLAYBACK_INIT (0x00000001)\r
+#define HANC_PLAYBACK_START (0x00000002)\r
+#define HANC_PLAYBACK_STOP (0x00000004)\r
+\r
+#define HANC_CAPTURE_INIT (0x00000010)\r
+#define HANC_CAPTURE_START (0x00000020)\r
+#define HANC_CAPTURE_STOP (0x00000040)\r
+#define HANC_CAPTURE_PLAYTHRU (0x00000080)\r
+\r
+\r
+typedef enum _EOracFPGAConfigCMD\r
+{\r
+ ORAC_FPGA_CONFIG_CMD_ERASE_SECTOR=0,\r
+ ORAC_FPGA_CONFIG_CMD_UNLOCK_SECTOR=1,\r
+ ORAC_FPGA_CONFIG_CMD_WRITE_DATA=2,\r
+ ORAC_FPGA_CONFIG_CMD_STATUS=3,\r
+ ORAC_FPGA_CONFIG_CMD_READMODE=4,\r
+ ORAC_FPGA_CONFIG_RAW_WRITE=5,\r
+ ORAC_FPGA_CONFIG_RAW_READ=6,\r
+ ORAC_FPGA_CONFIG_CMD_READ_DATA=7,\r
+ ORAC_FPGA_CONFIG_INIT=8,\r
+ ORAC_FPGA_CONFIG_EXIT=9\r
+}EOracFPGAConfigCMD;\r
+\r
+\r
+#define ANALOG_CHANNEL_0 MONO_CHANNEL_9\r
+#define ANALOG_CHANNEL_1 MONO_CHANNEL_10\r
+\r
+/*Assumes that the data is in stereo pairs not individual samples*/\r
+#define STEREO_PAIR_1 (MONO_CHANNEL_1|MONO_CHANNEL_2) /* Mono Channel 1 & Mono channel 2* together*/\r
+#define STEREO_PAIR_2 (MONO_CHANNEL_3|MONO_CHANNEL_4) /* Mono Channel 3 & Mono Channel 4* together*/\r
+#define STEREO_PAIR_3 (MONO_CHANNEL_5|MONO_CHANNEL_6) /* Mono Channel 5 & Mono Channel 6* together*/\r
+#define STEREO_PAIR_4 (MONO_CHANNEL_7|MONO_CHANNEL_8) /* Mono Channel 7 & Mono Channel 8* together*/\r
+\r
+#define ANALOG_AUDIO_PAIR (ANALOG_CHANNEL_0|ANALOG_CHANNEL_1)\r
+\r
+#define BLUE_LITTLE_ENDIAN 0\r
+#define BLUE_BIG_ENDIAN 1\r
+\r
+#define GREED_SILENT_HANC_BUFFER1 250\r
+#define GREED_SILENT_HANC_BUFFER2 251\r
+\r
+#define AES_SRC_BYPASS_CHANNEL_1_2 0x1\r
+#define AES_SRC_BYPASS_CHANNEL_3_4 0x2\r
+#define AES_SRC_BYPASS_CHANNEL_5_6 0x4\r
+#define AES_SRC_BYPASS_CHANNEL_7_8 0x8\r
+\r
+typedef enum _EEpochRoutingElements\r
+{\r
+ EPOCH_SRC_DEST_SCALER_0=0x1,\r
+ EPOCH_SRC_DEST_SCALER_1,\r
+ EPOCH_SRC_DEST_SCALER_2,\r
+ EPOCH_SRC_DEST_SCALER_3,\r
+\r
+ EPOCH_SRC_SDI_INPUT_A,\r
+ EPOCH_SRC_SDI_INPUT_B,\r
+ EPOCH_SRC_SDI_INPUT_C,\r
+ EPOCH_SRC_SDI_INPUT_D,\r
+\r
+ EPOCH_DEST_SDI_OUTPUT_A,\r
+ EPOCH_DEST_SDI_OUTPUT_B,\r
+ EPOCH_DEST_SDI_OUTPUT_C,\r
+ EPOCH_DEST_SDI_OUTPUT_D,\r
+\r
+ EPOCH_SRC_OUTPUT_MEM_INTERFACE_CHA,\r
+ EPOCH_SRC_OUTPUT_MEM_INTERFACE_CHB,\r
+\r
+ EPOCH_DEST_INPUT_MEM_INTERFACE_CHA,\r
+ EPOCH_DEST_INPUT_MEM_INTERFACE_CHB,\r
+\r
+ EPOCH_DEST_AES_ANALOG_AUDIO_OUTPUT,\r
+\r
+ EPOCH_SRC_AV_SIGNAL_GEN,\r
+ EPOCH_SRC_DEST_VPIO_SCALER_0,\r
+ EPOCH_SRC_DEST_VPIO_SCALER_1,\r
+\r
+ EPOCH_DEST_VARIVUE_HDMI,\r
+\r
+ EPOCH_DEST_INPUT_MEM_INTERFACE_CHC,\r
+ EPOCH_DEST_INPUT_MEM_INTERFACE_CHD,\r
+\r
+ EPOCH_SRC_OUTPUT_MEM_INTERFACE_CHC,\r
+ EPOCH_SRC_OUTPUT_MEM_INTERFACE_CHD,\r
+\r
+}EEpochRoutingElements;\r
+\r
+\r
+\r
+#define VPEnableFieldCountTrigger ((BLUE_UINT64)1<<63)\r
+#define VPTriggerGetFieldCount(value) ((BLUE_UINT64)value & 0xFFFFFFFF)\r
+\r
+typedef enum _EBlueScalerFilterType\r
+{\r
+ BlueScalerHorizontalYFilter=1,\r
+ BlueScalerHorizontalCFilter=2,\r
+ BlueScalerVerticalYFilter=3,\r
+ BlueScalerVerticalCFilter=4,\r
+}EBlueScalerFilterType;\r
+\r
+\r
+\r
+#define SET_EPOCH_SCALER_MODE(scaler_id,video_mode) ((scaler_id <<16)|video_mode)\r
+#define GET_EPOCH_SCALER_MODE(value) (value&0xFFFF)\r
+#define GET_EPOCH_SCALER_ID(value) ((value&0xFFFF0000)>>16)\r
+\r
+\r
+// use these macros for retreiving the temp and fan speed.\r
+// on epoch range of cards.\r
+#define EPOCH_CORE_TEMP(value) (value & 0xFFFF)\r
+#define EPOCH_BOARD_TEMP(value) ((value>>16) & 0xFF)\r
+#define EPOCH_FAN_SPEED(value) ((value>>24) & 0xFF)\r
+\r
+/** \r
+ @desc use these macro for doing the MR2 routing on epoch range of cards.\r
+ MR2 routing can be controlled using the property MR_ROUTING.\r
+*/\r
+#define EPOCH_SET_ROUTING(routing_src,routing_dest,data_link_type) ((routing_src & 0xFF) | ((routing_dest & 0xFF)<<8) | ((data_link_type&0xFFFF)<<16))\r
+#define EPOCH_ROUTING_GET_SRC_DATA(value) (value & 0xFF)\r
+#define EPOCH_ROUTING_GET_DEST_DATA(value) ((value>>8) & 0xFF)\r
+#define EPOCH_ROUTING_GET_LINK_TYPE_DATA(value) ((value>>16) & 0xFFFF)\r
+\r
+#define GPIO_TX_PORT_A (1)\r
+#define GPIO_TX_PORT_B (2)\r
+\r
+#define EPOCH_GPIO_TX(port,value) (port<<16|value) // if want to set each of the GPO \r
+ // ports individually you should use this macro.\r
+ // without the macro it would set both the GPO\r
+ // ports on the card\r
+\r
+/**\r
+ @desc use these macros for controlling epoch application watch dog settings.\r
+ The card property EPOCH_APP_WATCHDOG_TIMER can be used to control \r
+ the watchdog timer functionality.\r
+*/\r
+enum enum_blue_app_watchdog_timer_prop\r
+{\r
+ enum_blue_app_watchdog_timer_start_stop=(1<<31), // can be used to enable/disable timer \r
+ enum_blue_app_watchdog_timer_keepalive=(1<<30), // can be used to reset the timer value\r
+ enum_blue_app_watchdog_timer_get_present_time=(1<<29), // can query to get the value of the timer\r
+ enum_blue_app_watchdog_get_timer_activated_status=(1<<28), // can query to get whether the timer has been activated\r
+ enum_blue_app_watchdog_get_timer_start_stop_status=(1<<27), // can query whether the timer has been set.\r
+ enum_blue_app_watchdog_enable_gpo_on_active=(1<<26), // using this enumerator you can tell the system that when \r
+ // application watchdog timer has expired whether a GPO output should be triggered or not.\r
+ // you can use also use this enumerator to select\r
+ // which GPO output should be triggered with this. to use GPO port A pass a value of \r
+ // GPIO_TX_PORT_A when this enumerator is used.\r
+ enum_blue_hardware_watchdog_enable_gpo=(1<<25) // can be used to enable/disable GPO trigger when hardware watchdog timer has been \r
+ // triggered\r
+};\r
+\r
+#define EPOCH_WATCHDOG_TIMER_SET_MACRO(prop,value) (prop|(value &0xFFFF))\r
+#define EPOCH_WATCHDOG_TIMER_QUERY_MACRO(prop) (prop)\r
+#define EPOCH_WATCHDOG_TIMER_GET_VALUE_MACRO(value) (value&0xFFFF)\r
+\r
+enum enum_blue_rs422_port_flags\r
+{\r
+ enum_blue_rs422_port_set_as_slave =(1<<0) // If this flag is set the RS422 port would be set to slave mode.\r
+ // by default port is setup to work in master mode , where it would be acting \r
+ // as master in the transactions. \r
+};\r
+#define EPOCH_RS422_PORT_FLAG_SET_MACRO(portid,value) ((portid&0x3)|(value<<3))\r
+#define EPOCH_RS422_PORT_FLAG_GET_FLAG_MACRO(value) ((value>>3)&0xFFFF)\r
+#define EPOCH_RS422_PORT_FLAG_GET_PORTID_MACRO(value) (value&0x3)\r
+\r
+\r
+enum enum_blue_dvb_asi_packing_format\r
+{\r
+ enum_blue_dvb_asi_packed_format=1,/**< In this packing method the asi packets are stored as 188 or 204 bytes*/\r
+ enum_blue_dvb_asi_packed_format_with_timestamp=2,/**< In this packing method the asi packets are stored as (8+188) or (8+204) bytes\r
+ The timestamp is stored at the begininig of the packet , using 8 bytes*/\r
+ enum_blue_dvb_asi_256byte_container_format=3,\r
+ enum_blue_dvb_asi_256byte_container_format_with_timestamp=4\r
+};\r
+\r
+\r
+#define RS422_SERIALPORT_FLAG(timeout,port,RxFlushBuffer) (((unsigned long)(timeout)<<16)|(port & 0x3) | (RxFlushBuffer<<15))\r
+// use this macro with Wait_For_SerialPort_InputData,\r
+// if you you want the function to return \r
+// immediatelty when it gets byte in the serial RX port.\r
+#define RS422_SERIALPORT_FLAG2(timeout,port,RxFlushBuffer,RXIntWaitReturnOnAvailData) (((unsigned long)(timeout)<<16)|(port & 0x3) | (RxFlushBuffer<<15)|(RXIntWaitReturnOnAvailData<<14))\r
+\r
+typedef enum _blue_blackgenerator_status\r
+{\r
+ ENUM_BLACKGENERATOR_OFF = 0, //producing normal video output\r
+ ENUM_BLACKGENERATOR_ON = 1, //producing black video output\r
+ ENUM_BLACKGENERATOR_SDI_SYNC_OFF = 2 //no valid SDI signal is coming out of our SDI output connector; only available in Epoch ASI firmware\r
+}blue_blackgenerator_status;\r
--- /dev/null
+#pragma once \r
+#include "BlueDriver_p.h"\r
+\r
+#ifdef _WINDOWS\r
+#pragma pack(push,1)\r
+#endif\r
+\r
+#define BLUE_HANC_INVALID_DID (0x0)\r
+\r
+#define BLUE_HANC_AUDIOGROUP1 (0x2FF)\r
+#define BLUE_HANC_AUDIOGROUP2 (0x1FD)\r
+#define BLUE_HANC_AUDIOGROUP3 (0x1FB)\r
+#define BLUE_HANC_AUDIOGROUP4 (0x2F9)\r
+#define BLUE_HANC_RP188 (0x260)\r
+#define BLUE_HANC_AUDIOGROUP1_CONTROL (0x1EF)\r
+#define BLUE_HANC_AUDIOGROUP2_CONTROL (0x2EE)\r
+#define BLUE_HANC_AUDIOGROUP3_CONTROL (0x2ED)\r
+#define BLUE_HANC_AUDIOGROUP4_CONTROL (0x1EC)\r
+#define BLUE_HANC_AUDIOGROUP1_EXTENDED (0x1FE)\r
+#define BLUE_HANC_AUDIOGROUP2_EXTENDED (0x2FC)\r
+#define BLUE_HANC_AUDIOGROUP3_EXTENDED (0x2FA)\r
+#define BLUE_HANC_AUDIOGROUP4_EXTENDED (0x1F8)\r
+\r
+\r
+#define HANC_PACKET_HEADER_CONST (0xBFFFFC00)\r
+\r
+#define BLUE_HANC_START_NEWLINE(line_number) ((0xC0000000)| (line_number << 16))\r
+\r
+#define BLUE_HANC_CONTROL_WORD (0xC0000000)\r
+#define BLUE_HANC_3DATA_PACKET_WORD (0x80000000)\r
+#define BLUE_HANC_2DATA_PACKET_WORD (0x40000000)\r
+#define BLUE_HANC_1DATA_PACKET_WORD (0x00000000)\r
+#define BLUE_HANC_ENDOF_FRAME() ((0xC0000000)| (1 << 15))\r
+\r
+\r
+#define AESAUDIO_DATA_BLOCKSIZE (192)\r
+#define MAX_HANC_BUFFER_SIZE (65536) //256*256\r
+#define MAX_HANC_BUFFER_SIZE_WITHOUT_HEADER (65536 - 0x20/4) //32 bytes = 8 * 4 (8 * UINT32)\r
+#define MAX_HANC_BUFFER_SIZE_BYTES (256*1024)\r
+#define MAX_HANC_BUFFER_SIZE_WITHOUT_HEADER_BYTES (256*1024 - 0x20)\r
+/* \r
+HANC Packet header structure\r
+Contains 2 type of structure , \r
+which makes it easier to parse the data\r
+*/\r
+\r
+struct GenericV210_structure\r
+{\r
+#if defined(__LITTLE_ENDIAN__) || defined(_WINDOWS) || defined(BLUE_LINUX_CODE)\r
+ BLUE_UINT32 first_word:10,second_word:10,third_word:10,unused:2;\r
+#else\r
+ BLUE_UINT32 unused:2,third_word:10,second_word:10,first_word:10;\r
+#endif\r
+#ifndef _WINDOWS\r
+}__attribute__((packed));\r
+#else\r
+};\r
+#endif\r
+\r
+union GenericV210_union\r
+{\r
+ struct GenericV210_structure v210_struct;\r
+ BLUE_UINT32 v210_word;\r
+};\r
+\r
+/* HANC packet header*/\r
+struct HancPacketHeaderStruct\r
+{\r
+#if defined(__LITTLE_ENDIAN__) || defined(_WINDOWS) || defined(BLUE_LINUX_CODE)\r
+ union GenericV210_union ancillary_data_flag; // 0x0,0x3FF,0x3FF, This is a constant defined by smpte\r
+ union GenericV210_union packet_info; // first 10 bit word --> Data ID\r
+ // Commonly used Data ID packet values are \r
+ // 1) 0x2FF --> Group1 Embedded Audio packet\r
+ // 2) 0x1FD --> Group2 Embedded Audio Packet\r
+ // 3) 0x1FB --> Group3 Embedded Audio Packet\r
+ // 4) 0x2F9 --> Group4 Embedded Audio packet\r
+ // second 10 bit word --> Data Block Number\r
+ // This is used for type 1 packets.\r
+ // third 10 bit word --> Data Count \r
+ // This 10 bit word specifies the amount of user data \r
+ // that this hanc will contain.\r
+#else\r
+ union GenericV210_union packet_info;\r
+ union GenericV210_union ancillary_data_flag;\r
+#endif\r
+#ifndef _WINDOWS\r
+}__attribute__((packed));\r
+#else\r
+};\r
+#endif\r
+\r
+\r
+/* Audio SubFrame Packet */\r
+struct BlueAudioSubFrameStruct\r
+{ \r
+#if defined(__LITTLE_ENDIAN__) || defined(_WINDOWS) || defined(BLUE_LINUX_CODE)\r
+ BLUE_UINT32 ZBit:1, //bit 0 set to declare start of channel status word\r
+ Channel:2, //bit 1-2\r
+ AudioData_0_5:6, //bit 3-8\r
+ NotBit8:1, //bit 9\r
+ AudioData_6_14:9, //bit 10-18\r
+ NotBit18:1, //bit 19 use same value as NotBit8 (bit 9)\r
+ AudioData_15_19:5, //bit 20-24\r
+ AESSampleValidityBit:1, //bit 25\r
+ AESUserBit:1, //bit 26\r
+ AESAudioChannelStatusBit:1, //bit 27 one bit of the channel status word\r
+ ParityBit:1, //bit 28 xor of all bits except (NotBit8 (bit 9) and NotBit18 (bit 19))\r
+ NotBit31:1, //bit 29 not of ParityBit (bit 28)\r
+ akiraControlBits:2; //bit 30-31\r
+#else\r
+ BLUE_UINT32 akiraControlBits:2,\r
+ NotBit31:1,\r
+ ParityBit:1,\r
+ AESAudioChannelStatusBit:1,\r
+ AESUserBit:1,\r
+ AESSampleValidityBit:1,\r
+ AudioData_15_19:5,\r
+ NotBit18:1,\r
+ AudioData_6_14:9,\r
+ NotBit8:1,\r
+ AudioData_0_5:6,\r
+ Channel:2,\r
+ ZBit:1;\r
+#endif\r
+\r
+#ifndef _WINDOWS\r
+}__attribute__((packed));\r
+#else\r
+};\r
+#endif\r
+\r
+union BlueAudioSubFrameHeader\r
+{\r
+ struct BlueAudioSubFrameStruct audioSubFrame;\r
+ BLUE_UINT32 BlueAudioSubFrameWord;\r
+ struct GenericV210_structure audioSubFrame_v210;\r
+};\r
+\r
+#define MAX_AUDIO_SUBFRAMES_IN_A_LINE (64) // 4 samples per audio group and 4 channesl for each audio group per sample\r
+\r
+\r
+/*\r
+Time code structure that the function expects is the same format as LTC time code\r
+bits 0 - 3 :units of frame \r
+bits 4 - 7: binary group1 \r
+bits 8 - 9: tens of frame \r
+bits 10 -11: flags \r
+bits 12 -15: binary group2 \r
+bits 16-19 : units of seconds \r
+bits 20-23 : binary group3 \r
+\r
+bits 24 - 26: tens of seconds\r
+bit 27 : flag \r
+bits 28 - 31: group binary4 \r
+bits 32 -35: units of minutes \r
+\r
+bits 36 - 39 :binary5 \r
+bits 40 - 42: tens of minutes \r
+bit 43 : flag \r
+bits 44 - 47: binary group6 \r
+\r
+bits 48 - 51: units of hours \r
+bits 52 - 55: binary group7 \r
+bits 56 - 57: tens of hours \r
+bits 58 - 59: flag\r
+bits 60 - 63: binary8\r
+\r
+*/\r
+struct LTC_TimeCode\r
+{\r
+#if defined(__LITTLE_ENDIAN__) || defined(_WINDOWS) || defined(BLUE_LINUX_CODE)\r
+ BLUE_UINT64 unit_frame:4,binary1:4,ten_frame:2,drop_frame_flag:1,color_frame_flag:1,\r
+ binary2:4,unit_second:4,binary3:4,ten_second:3,unsued_1:1,binary4:4,\r
+ unit_minute:4,binary5:4,ten_minute:3,unsued_2:1,binary6:4,unit_hours:4,\r
+ binary7:4,ten_hours:2,unsued_3:2,binary8:4;\r
+#else\r
+ BLUE_UINT64 binary8:4,unsued_3:2,ten_hours:2,binary7:4,\r
+ unit_hours:4,binary6:4,unused_2:1,ten_minute:3,binary5:4,unit_minute:4,\r
+ binary4:4,unused_1:1,ten_second:3,binary3:4,unit_second:4,binary2:4,\r
+ color_frame_flag:1,drop_frame_flag:1,ten_frame:2,binary1:4,unit_frame:4;\r
+#endif\r
+\r
+#ifndef _WINDOWS\r
+}__attribute__((packed));\r
+#else\r
+};\r
+#endif\r
+\r
+struct LTC_TimeCode_union\r
+{\r
+ union \r
+ {\r
+ struct LTC_TimeCode struct_ltc;\r
+ BLUE_UINT64 lt_64_value;\r
+ };\r
+};\r
+\r
+/*\r
+ This is used to unpack the timecode word properly and quickly\r
+ in RP188 each 4 bits of the timecode is put into a 10 bit word.\r
+ So this structure helps in decoding \r
+*/\r
+struct nibble_struct\r
+{\r
+ BLUE_UINT8 first_half:4,second_half:4;\r
+\r
+#ifndef _WINDOWS\r
+}__attribute__((packed));\r
+#else\r
+};\r
+#endif\r
+\r
+struct TimeCode\r
+{\r
+ union \r
+ {\r
+ struct LTC_TimeCode struct_ltc;\r
+ BLUE_UINT64 ltc;\r
+ struct nibble_struct ltc_char[8]; \r
+ };\r
+ \r
+#ifndef _WINDOWS\r
+}__attribute__((packed));\r
+#else\r
+};\r
+#endif\r
+\r
+struct HANCTimeCodeStruct \r
+{ \r
+#if defined(__LITTLE_ENDIAN__) || defined(_WINDOWS) || defined(BLUE_LINUX_CODE)\r
+ BLUE_UINT32 zero_0:3,\r
+ DBB_0:1,\r
+ ANC_0:4,\r
+ partiy_0:1,\r
+ NotBit8_0:1,\r
+ zero_1:3,\r
+ DBB_1:1,\r
+ ANC_1:4,\r
+ partiy_1:1,\r
+ NotBit8_1:1,\r
+ zero_2:3,\r
+ DBB_2:1,\r
+ ANC_2:4,\r
+ partiy_2:1,\r
+ NotBit8_2:1,\r
+ akiraControlBits:2;\r
+#else\r
+ BLUE_UINT32 akiraControlBits:2,\r
+ Notbit8_2:1,\r
+ partiy_2:1,\r
+ ANC_2:4,\r
+ DBB_2:1,\r
+ zero_2:3,\r
+ NotBit81_1:1,\r
+ partiy_1:1,\r
+ ANC_1:4,\r
+ DBB_1:1,\r
+ zero_1:3,\r
+ NotBit8_0:1,\r
+ partiy_0:1,\r
+ ANC_0:4,\r
+ DBB_0:1,\r
+ zero_0:3;\r
+#endif\r
+\r
+#ifndef _WINDOWS\r
+}__attribute__((packed));\r
+#else\r
+};\r
+#endif\r
+\r
+union HANCTimeCode\r
+{\r
+ struct HANCTimeCodeStruct hanc_struct;\r
+ BLUE_UINT32 hanc_word;\r
+};\r
+\r
+struct BAG2VancTimeCodeStruct \r
+{ \r
+#if defined(__LITTLE_ENDIAN__) || defined(_WINDOWS) || defined(BLUE_LINUX_CODE)\r
+ BLUE_UINT16 zero_0:3,\r
+ DBB_0:1,\r
+ ANC_0:4,\r
+ partiy_0:1,\r
+ NotBit8_0:1;\r
+#else\r
+ BLUE_UINT16 NotBit8_0:1,\r
+ partiy_0:1,\r
+ ANC_0:4,\r
+ DBB_0:1,\r
+ zero_0:3;\r
+#endif\r
+\r
+#ifndef _WINDOWS\r
+}__attribute__((packed));\r
+#else\r
+};\r
+#endif\r
+\r
+union BAG2VancTimeCode\r
+{\r
+ struct BAG2VancTimeCodeStruct vanc_struct;\r
+ BLUE_UINT16 vanc_word;\r
+};\r
+\r
+\r
+inline BLUE_UINT64 convert_countto_timecode(BLUE_UINT32 frame_count,BLUE_UINT32 framePerSec)\r
+{\r
+ unsigned int frames ,second,minutes ,hour ;\r
+ struct TimeCode rp188_timcode;\r
+ hour = frame_count/(60*60*framePerSec);\r
+ minutes = frame_count%(60*60*framePerSec);\r
+ second = minutes%(60*framePerSec);\r
+ frames = second %framePerSec;\r
+ second = second/(framePerSec);\r
+ minutes=minutes/(60*framePerSec);\r
+ rp188_timcode.ltc = 0;\r
+ rp188_timcode.struct_ltc.unit_frame = (frames%10);\r
+ rp188_timcode.struct_ltc.ten_frame = (frames/10);\r
+ rp188_timcode.struct_ltc.unit_second = (second%10);\r
+ rp188_timcode.struct_ltc.ten_second = (second/10);\r
+ rp188_timcode.struct_ltc.unit_minute = (minutes%10);\r
+ rp188_timcode.struct_ltc.ten_minute = (minutes/10);\r
+ rp188_timcode.struct_ltc.unit_hours = (hour%10);\r
+ rp188_timcode.struct_ltc.ten_hours = (hour/10);\r
+ \r
+ return rp188_timcode.ltc;\r
+}\r
+\r
+\r
+inline BLUE_UINT64 convert_timecode_to_count(BLUE_UINT64 timecode,\r
+ BLUE_UINT32 framePerSec,\r
+ unsigned int & frames ,\r
+ unsigned int & second,\r
+ unsigned int & minutes ,\r
+ unsigned int & hours)\r
+{\r
+ \r
+ struct TimeCode rp188_timecode;\r
+ rp188_timecode.ltc = timecode;\r
+ hours = (BLUE_UINT32)((unsigned int)rp188_timecode.struct_ltc.ten_hours*10)+(unsigned int)rp188_timecode.struct_ltc.unit_hours;\r
+ minutes = (BLUE_UINT32)((unsigned int)rp188_timecode.struct_ltc.ten_minute*10)+(unsigned int)rp188_timecode.struct_ltc.unit_minute;\r
+ second = (BLUE_UINT32)((unsigned int)rp188_timecode.struct_ltc.ten_second*10)+(unsigned int)rp188_timecode.struct_ltc.unit_second;\r
+ frames = (BLUE_UINT32)((unsigned int)rp188_timecode.struct_ltc.ten_frame*10)+(unsigned int)rp188_timecode.struct_ltc.unit_frame; \r
+ return rp188_timecode.ltc;\r
+}\r
+\r
+// Determine endianess at run-time\r
+inline BLUE_UINT32 Int32SwapBigLittle(const BLUE_UINT32 i)\r
+{\r
+ unsigned char c1, c2, c3, c4;\r
+ const int endian = 1;\r
+ #define is_bigendian() ( (*(char*) & endian) == 0 )\r
+\r
+ if (is_bigendian())\r
+ {\r
+ c1 = i & 255;\r
+ c2 = (i >> 8) & 255;\r
+ c3 = (i >> 16) & 255;\r
+ c4 = (i >> 24) & 255;\r
+ \r
+ return ((int)c1 << 24) + ((int)c2 << 16) + ((int)c3 << 8) + c4;\r
+ }\r
+ else\r
+ {\r
+ return i;\r
+ }\r
+}\r
+\r
+#ifdef _WINDOWS\r
+#pragma pack(pop)\r
+#endif
\ No newline at end of file
--- /dev/null
+#pragma once\r
+#ifndef BLUE_LINUX_CODE\r
+#ifndef HANCUTILS_USE_STATIC_LIB\r
+ #ifdef HANCUTILS_EXPORTS\r
+ #define HANCUTILS_API __declspec(dllexport)\r
+ #elif defined(__APPLE__)\r
+ #define HANCUTILS_API\r
+ #define ATLTRACE printf\r
+ #else\r
+ #define HANCUTILS_API __declspec(dllimport)\r
+ #endif\r
+#else\r
+ #define HANCUTILS_API\r
+#endif\r
+#else\r
+ #define HANCUTILS_API\r
+typedef bool BOOL; \r
+#endif \r
+#include "BlueDriver_p.h"\r
+\r
+\r
+\r
+\r
+extern "C"\r
+{\r
+/**\r
+@defgroup hanc_manipilation_function Embedded audio\r
+@{\r
+*/\r
+\r
+#pragma pack(push, hanc_struct, 1)\r
+\r
+/**\r
+@brief The structure is used to extract/insert Embedded audio to and from the HANC stream of Greed and Leon based cards.*/\r
+\r
+struct hanc_stream_info_struct\r
+{\r
+ BLUE_INT32 AudioDBNArray[4]; /**< Contains the DBN values that should be used for each of the embedded audio groups*/\r
+ BLUE_INT32 AudioChannelStatusBlock[4]; /**< channel status block information for each of the embedded audio group*/\r
+ BLUE_UINT32 flag_valid_time_code; /**< flag which identifies the validity of the time code member in the #hanc_stream_info_struct*/\r
+ BLUE_UINT64 time_code; /**< RP188 time code that was extracted from the HANC buffer or RP188 timecode which should be inserted \r
+ into the HANC buffer*/\r
+ BLUE_UINT32* hanc_data_ptr; /**< Hanc Buffer which should be used as the source or destination for either extraction or insertion */\r
+ BLUE_UINT32 video_mode; /**< video mode which this hanc buffer which be used with. We need this information for do the required audio distribution \r
+ especially NTSC */\r
+ BLUE_UINT64 ltc_time_code;\r
+ BLUE_UINT64 sd_vitc_time_code;\r
+ BLUE_UINT64 rp188_ltc_time_code;\r
+ BLUE_UINT32 pad[126];\r
+};\r
+\r
+#define AUDIO_INPUT_SOURCE_EMB 0\r
+#define AUDIO_INPUT_SOURCE_AES 1\r
+struct hanc_decode_struct\r
+{\r
+ void* audio_pcm_data_ptr; // Buffer which would be used to store the extracted PCM\r
+ // audio data. Must be filled in by app before calling function.\r
+ BLUE_UINT32 audio_ch_required_mask; // which all audio channels should be extracted from the \r
+ // audio frame .Must be filled in by app before calling function.\r
+ BLUE_UINT32 type_of_sample_required;// type of destination audio channel\r
+ //ie 16 bit ,24 bit or 32 bit PCM data .\r
+ //Must be filled in by app before calling function.\r
+ BLUE_UINT32 no_audio_samples; // this would contain how many audio samples has been decoded from\r
+ // the hanc buffer.\r
+ BLUE_UINT64 timecodes[7]; // Would extract the timecode information from the audio frame.\r
+ void * raw_custom_anc_pkt_data_ptr; // This buffer would contain the raw ANC packets that was found in the orac hanc buffer.\r
+ // this would contain any ANC packets that is not of type embedded audio and RP188 TC.\r
+ //Must be filled in by app before calling function. can be NULL\r
+ BLUE_UINT32 sizeof_custom_anc_pkt_data_ptr; // size of the ANC buffer array\r
+ //Must be filled in by app before calling function. can be NULL\r
+ BLUE_UINT32 avail_custom_anc_pkt_data_bytes;// how many custom ANC packets has been decoded into raw_hanc_pkt_data_ptr\r
+ //Must be filled in by app before calling function. can be NULL\r
+ BLUE_UINT32 audio_input_source; // Used to select the audio input source. \r
+ // whether it is AES or Embedded.\r
+ //Must be filled in by app before calling function.\r
+ BLUE_UINT32 audio_temp_buffer[16]; // this is used to store split audio sample \r
+ // which did not contain all its audio channels\r
+ // in one audio frame\r
+ //Must be initialised to zero by app before first instantiating the function. \r
+ BLUE_UINT32 audio_split_buffer_mask; // The mask would be used to make a note of \r
+ // split audio sample information for a frame.\r
+ //Must be initialised to zero by app before first instantiating the function. \r
+ BLUE_UINT32 max_expected_audio_sample_count; // specify the maximum number of audio samples \r
+ // that the audio pcm buffer can contain.\r
+ //Must be filled in by app before calling function.\r
+ BLUE_UINT32 pad[124];\r
+};\r
+\r
+#pragma pack(pop, hanc_struct)\r
+\r
+HANCUTILS_API BLUE_UINT32 encode_hanc_frame(struct hanc_stream_info_struct* hanc_stream_ptr,\r
+ void* audio_pcm_ptr,\r
+ BLUE_UINT32 no_audio_ch,\r
+ BLUE_UINT32 no_audio_samples,\r
+ BLUE_UINT32 nTypeOfSample,\r
+ BLUE_UINT32 emb_audio_flag);\r
+\r
+HANCUTILS_API BLUE_UINT32 encode_hanc_frame_ex( BLUE_UINT32 card_type,\r
+ struct hanc_stream_info_struct* hanc_stream_ptr,\r
+ void* audio_pcm_ptr,\r
+ BLUE_UINT32 no_audio_ch,\r
+ BLUE_UINT32 no_audio_samples,\r
+ BLUE_UINT32 nTypeOfSample,\r
+ BLUE_UINT32 emb_audio_flag);\r
+\r
+\r
+HANCUTILS_API BLUE_UINT32 encode_hanc_frame_with_ucz( BLUE_UINT32 card_type,\r
+ struct hanc_stream_info_struct* hanc_stream_ptr,\r
+ void* audio_pcm_ptr,\r
+ BLUE_UINT32 no_audio_ch,\r
+ BLUE_UINT32 no_audio_samples,\r
+ BLUE_UINT32 nTypeOfSample,\r
+ BLUE_UINT32 emb_audio_flag,\r
+ BLUE_UINT8* pUCZBuffer);\r
+\r
+HANCUTILS_API BLUE_UINT32 create_embed_audiosample( void* raw_data_ptr,\r
+ BLUE_UINT32* emb_data_ptr,\r
+ BLUE_UINT32 channels_per_audio_sample,\r
+ BLUE_UINT32 bytes_per_ch,\r
+ BLUE_UINT32 no_samples,\r
+ BLUE_UINT32 emb_audio_flags,\r
+ BLUE_UINT8* Audio_Groups_DBN_Array,\r
+ BLUE_UINT8* Audio_Groups_statusblock_Array);\r
+\r
+HANCUTILS_API BLUE_UINT32* get_embed_audio_distribution_array(BLUE_UINT32 video_mode, BLUE_UINT32 sequence_no);\r
+//HANCUTILS_API BLUE_UINT32 * GetAudioFrameSequence(BLUE_UINT32 video_output_standard);\r
+\r
+HANCUTILS_API bool hanc_stream_analyzer(BLUE_UINT32 *src_hanc_buffer,struct hanc_stream_info_struct * hanc_stream_ptr);\r
+HANCUTILS_API bool orac_hanc_stream_analyzer(BLUE_UINT32 card_type,BLUE_UINT32 *src_hanc_buffer,struct hanc_decode_struct * decode_ptr,char * analyzer_output_file);\r
+HANCUTILS_API bool hanc_decoder_ex( BLUE_UINT32 card_type,\r
+ BLUE_UINT32* src_hanc_buffer,\r
+ struct hanc_decode_struct* hanc_decode_struct_ptr);\r
+\r
+/**\r
+@}\r
+*/\r
+\r
+/**\r
+@defgroup vanc_manipilation_function vanc packet I/O \r
+@{\r
+*/\r
+\r
+\r
+/**\r
+@brief enumerator used by VANC manipulation function on HD cards to notify whether \r
+ VANC pakcet shoule be inserted/extracted from VANC Y buffers or VANC CbCr buffer.\r
+ This enumerator will only be used on HD video modes as it is the only with \r
+ 2 type of ANC bufers ir Y and CbCr. On SD Modes the ANC data is inserted across \r
+ both Y anc CbCr values.\r
+ \r
+*/\r
+enum blue_vanc_pkt_type_enum\r
+{\r
+ blue_vanc_pkt_y_comp=0, /**< ANC pkt should be inserted/extracted from the Y component buffer*/\r
+ blue_vanc_pkt_cbcr_comp=1 /**< ANC pkt should be inserted/extracted from the CbCr component buffer*/\r
+};\r
+\r
+/*!\r
+@brief Use this function to initialise VANC buffer before inserting any packets into the buffer\r
+@param CardType type of bluefish card to which this vanc buffer was transferred to.\r
+@param nVideoMode video mode under which this vanc buffer will be used.\r
+@param pixels_per_line width in pixels of the vanc buffer that has to be initialised.\r
+@param lines_per_frame height of the vanc buffer that has to be initialised.\r
+@param pVancBuffer vanc buffer which has to be initialised.\r
+@remarks.\r
+\r
+*/\r
+HANCUTILS_API BLUE_UINT32 blue_init_vanc_buffer(BLUE_UINT32 CardType,BLUE_UINT32 nVideoMode,BLUE_UINT32 pixels_per_line,BLUE_UINT32 lines_per_frame,BLUE_UINT32 * pVancBuffer);\r
+/*!\r
+@brief this function can be used to extract ANC packet from HD cards. Currently we can only extract packets in the VANC space.\r
+@param CardType type of the card from which the vanc buffer was captured.\r
+@param vanc_pkt_type This parameter denotes whether to search for the VANC packet in Y Space or Cb/Cr Space.\r
+ The values this parameter accepts are defined in the enumerator #blue_vanc_pkt_type_enum\r
+@param src_vanc_buffer Vanc buffer which was captured from bluefish card\r
+@param src_vanc_buffer_size size of the vanc buffer which should be parsed for the specified vanc packet\r
+@param pixels_per_line specifies how many pixels are there in each line of VANC buffer\r
+@param vanc_pkt_did specifies the DID of the Vanc packet which should be extracted from the buffer\r
+@param vanc_pkt_sdid Returns the SDID of the extracted VANC packet\r
+@param vanc_pkt_data_length returns the size of the extracted VANC packet. The size is specifed as number of UDW words\r
+ that was contained in the packet\r
+@param vanc_pkt_data_ptr pointer to UDW of the VANC packets . The 10 bit UDW words are packed in a 16 bit integer. The bottom 10 bit of the \r
+ 16 bit word contains the UDW data.\r
+@param vanc_pkt_line_no line number where the packet was found .\r
+\r
+@remarks.\r
+\r
+*/\r
+HANCUTILS_API BLUE_INT32 vanc_pkt_extract( \r
+ BLUE_UINT32 CardType,\r
+ BLUE_UINT32 vanc_pkt_type,\r
+ BLUE_UINT32 * src_vanc_buffer,\r
+ BLUE_UINT32 src_vanc_buffer_size,\r
+ BLUE_UINT32 pixels_per_line,\r
+ BLUE_UINT32 vanc_pkt_did,\r
+ BLUE_UINT16 * vanc_pkt_sdid,\r
+ BLUE_UINT16 * vanc_pkt_data_length,\r
+ BLUE_UINT16 * vanc_pkt_data_ptr,\r
+ BLUE_UINT16 * vanc_pkt_line_no);\r
+\r
+/**\r
+@brief use this function to insert ANC packets into the VANC space of the HD cards.\r
+@param CardType type of the card from which the vanc buffer was captured.\r
+@param vanc_pkt_type This parameter denotes whether to search for the VANC packet in Y Space or Cb/Cr Space.\r
+ The values this parameter accepts are defined in the enumerator #blue_vanc_pkt_type_enum\r
+@param vanc_pkt_line_no line in th VANC buffer where the ANC packet should inserted.\r
+@param vanc_pkt_buffer vanc ANC packet which should be inserted into the VANC buffer.\r
+@param vanc_pkt_buffer_size size of the ANC packet including the checksum ,ADF , SDID, DID and Data Count\r
+@param dest_vanc_buffer VANC buffer into which the ANC packet will be inserted into.\r
+@param pixels_per_line specifies how many pixels are there in each line of VANC buffer\r
+*/\r
+HANCUTILS_API BLUE_INT32 vanc_pkt_insert(\r
+ BLUE_UINT32 CardType,\r
+ BLUE_UINT32 vanc_pkt_type,\r
+ BLUE_UINT32 vanc_pkt_line_no,\r
+ BLUE_UINT32 * vanc_pkt_buffer,\r
+ BLUE_UINT32 vanc_pkt_buffer_size,\r
+ BLUE_UINT32 * dest_vanc_buffer,\r
+ BLUE_UINT32 pixels_per_line);\r
+\r
+/** @} */\r
+\r
+/**\r
+@defgroup vanc_decode_encoder_helper ANC encoder/decoder \r
+ @{\r
+*/\r
+HANCUTILS_API BLUE_UINT32 decode_eia_708b_pkt(BLUE_UINT32 CardType,BLUE_UINT16 * vanc_pkt_data_ptr,BLUE_UINT16 pkt_udw_count,BLUE_UINT16 eia_pkt_subtype,BLUE_UINT8 * decoded_ch_str);\r
+//#ifndef BLUE_LINUX_CODE\r
+//HANCUTILS_API BLUE_UINT64 decode_rp188_packet(BLUE_UINT32 CardType,BLUE_UINT32 * src_vanc_buffer,BLUE_UINT32 UDW_Count,BLUE_UINT64 *rp188_dbb);\r
+//HANCUTILS_API bool blue_vitc_decoder_8bit_fmt(BLUE_UINT8 * raw_vbi_ptr,BLUE_UINT32 pixels_per_line,BLUE_UINT32 mem_fmt,BLUE_UINT32 vitc_line_no,BLUE_UINT64 * vitc_time_code);\r
+//HANCUTILS_API bool blue_vitc_decoder_10bit_v210(BLUE_UINT8 * raw_vbi_ptr, BLUE_UINT32 vitc_line_no, BLUE_UINT64 * vitc_time_code);\r
+//HANCUTILS_API unsigned int create_rp188_pkt(\r
+// BLUE_UINT32 cardType,\r
+// BLUE_UINT32 * emb_data_ptr,\r
+// BLUE_UINT32 line_no,\r
+// BLUE_UINT32 start_new_line,\r
+// BLUE_UINT64 timecode,\r
+// BLUE_UINT64 rp188_dbb);\r
+//#endif\r
+\r
+\r
+/** @} */\r
+}\r
--- /dev/null
+#pragma once \r
+\r
+#define BERR int\r
+#define BLUE_UINT32 unsigned int\r
+#define BLUE_INT32 int\r
+#define BLUE_UINT64 unsigned __int64\r
+#define BLUE_UINT8 unsigned char\r
+#define BLUE_INT8 char\r
+#define BLUE_UINT16 unsigned short\r
--- /dev/null
+/*\r
+// ==========================================================================\r
+// Bluefish444 BlueVelvet SDK library\r
+//\r
+// BlueVelvet.h\r
+// Public Header\r
+//\r
+// developed by : Cameron Duffy (C) 2002 Bluefish444 P/L\r
+//\r
+// derived from work begun by Vizrt Austria (C) 2001.\r
+//\r
+// ==========================================================================\r
+\r
+ $Id: BlueVelvet.h,v 1.32.8.1 2011/08/04 03:34:36 tim Exp $\r
+*/\r
+#ifndef _BLUEVELVET_H\r
+#define _BLUEVELVET_H\r
+\r
+#ifdef BLUEFISH_EXPORTS\r
+#define BLUEFISH_API __declspec(dllexport)\r
+#else\r
+#define BLUEFISH_API __declspec(dllimport)\r
+#endif\r
+\r
+//#include "BlueVelvet_c.h"\r
+\r
+#define BLUE_UINT32 unsigned int \r
+#define BLUE_INT32 int \r
+#define BLUE_UINT8 unsigned char\r
+#define BLUE_INT8 char\r
+#define BLUE_UINT16 unsigned short\r
+#define BLUE_INT16 short\r
+#define BLUE_UINT64 unsigned __int64\r
+\r
+\r
+#ifndef BLUEVELVET_2_DLL\r
+#define BLUEVELVET_SDK_VERSION3\r
+#endif\r
+\r
+#include "BlueDriver_p.h"\r
+\r
+\r
+//----------------------------------------------------------------------------\r
+// Some simple macros and definitions\r
+#define BLUEVELVET_MAX_DEVICES (5) // Maximum number of Blue Cards recognised by driver\r
+\r
+typedef int BErr;\r
+\r
+#define BLUE_OK(a) (!a) // Test for succcess of a method returning BErr\r
+#define BLUE_FAIL(a) (a) // Test for failure of a method returning BErr\r
+#define BLUE_PASS(a) (a>=0) // Use this where +ve return values still indicate success\r
+\r
+\r
+//----------------------------------------------------------------------------\r
+// The class definition\r
+class BLUEFISH_API CBlueVelvet\r
+{\r
+public:\r
+ // 4.1 Startup Functions\r
+ //---------------------------------\r
+ // 4.1.1 device_enumerate\r
+ // Counts accessible blue cards in target system. \r
+ virtual\r
+ BErr device_enumerate(\r
+ int& Devices\r
+ ) = 0;\r
+\r
+ // 4.1.2 device_attach\r
+ // Attach the class instance to the indexed device. \r
+ virtual\r
+ BErr device_attach(\r
+ int DeviceId,\r
+ int do_audio // DEPRECATED; SET TO 0\r
+ ) = 0;\r
+\r
+ // 4.1.3 device_detach\r
+ // Detach the current device from the class instance.\r
+ virtual\r
+ BErr device_detach(\r
+ void\r
+ ) = 0;\r
+\r
+ // 4.1.4 device_attach_audio\r
+ // Attach the class instance to the audio I/O component of the current device.\r
+ // DEPRECATED; DO NOT USE!\r
+ virtual\r
+ BErr device_attach_audio(\r
+ void\r
+ ) = 0;\r
+\r
+ // 4.1.5 device_detach_audio\r
+ // Remove audio I/O components of the current device from the class instance.\r
+ // DEPRECATED; DO NOT USE!\r
+ virtual\r
+ BErr device_detach_audio(\r
+ void\r
+ ) = 0;\r
+\r
+ // 4.1.6 device_attach_audio_in\r
+ // Attach the class instance to the audio IINPUT component of the current device.\r
+ // DEPRECATED; DO NOT USE!\r
+ virtual\r
+ BErr device_attach_audio_in(\r
+ void\r
+ ) = 0;\r
+\r
+ // 4.1.7 device_detach_audio_in\r
+ // Remove audio INPUT component of the current device from the class instance.\r
+ // DEPRECATED; DO NOT USE!\r
+ virtual\r
+ BErr device_detach_audio_in(\r
+ void\r
+ ) = 0;\r
+\r
+ // 4.1.8 device_attach_audio_out\r
+ // Attach the class instance to the audio OUTPUT component of the current device.\r
+ // DEPRECATED; DO NOT USE!\r
+ virtual\r
+ BErr device_attach_audio_out(\r
+ void\r
+ ) = 0;\r
+\r
+ // 4.1.9 device_detach_audio_out\r
+ // Remove audio OUTPUT component of the current device from the class instance. \r
+ // DEPRECATED; DO NOT USE!\r
+ virtual\r
+ BErr device_detach_audio_out(\r
+ void\r
+ ) = 0;\r
+\r
+ // 4.1.10 device_get_bar\r
+ // Get device Bar assets from driver\r
+ virtual\r
+ BErr device_get_bar(\r
+ unsigned long BarN,\r
+ void** ppAddress,\r
+ unsigned long& Length\r
+ ) = 0;\r
+\r
+\r
+\r
+ // 4.2 Feature Assessment Functions\r
+ //---------------------------------\r
+ // 4.2.1 has_timing_adjust\r
+ virtual\r
+ int has_timing_adjust(\r
+ int DeviceId=0\r
+ ) = 0;\r
+\r
+ // 4.2.2 has_vertical_flip\r
+ virtual\r
+ int has_vertical_flip(\r
+ int DeviceId=0\r
+ ) = 0;\r
+\r
+ // 4.2.3 has_half_res\r
+ virtual\r
+ int has_half_res(\r
+ int DeviceId=0\r
+ ) = 0;\r
+\r
+ // 4.2.4 has_dissolve\r
+ virtual\r
+ int has_dissolve(\r
+ int DeviceId=0\r
+ ) = 0;\r
+\r
+ // 4.2.5 has_aperture\r
+ virtual\r
+ int has_aperture(\r
+ int DeviceId=0\r
+ ) = 0;\r
+\r
+ // 4.2.6 has_input_sdi\r
+ virtual\r
+ int has_input_sdi(\r
+ int DeviceId=0\r
+ ) = 0;\r
+\r
+ // 4.2.7 has_output_sdi\r
+ virtual\r
+ int has_output_sdi(\r
+ int DeviceId=0\r
+ ) = 0;\r
+\r
+ // 4.2.8 has_input_composite\r
+ virtual\r
+ int has_input_composite(\r
+ int DeviceId=0\r
+ ) = 0;\r
+\r
+ // 4.2.9 has_output_composite\r
+ virtual\r
+ int has_output_composite(\r
+ int DeviceId=0\r
+ ) = 0;\r
+\r
+ // 4.2.10 has_input_yuv\r
+ virtual\r
+ int has_input_yuv(\r
+ int DeviceId=0\r
+ ) = 0;\r
+\r
+ // 4.2.11 has_output_yuv\r
+ virtual\r
+ int has_output_yuv(\r
+ int DeviceId=0\r
+ ) = 0;\r
+\r
+ // 4.2.12 has_output_rgb\r
+ virtual\r
+ int has_output_rgb(\r
+ int DeviceId=0\r
+ ) = 0;\r
+\r
+ // 4.2.13 has_input_svideo\r
+ virtual\r
+ int has_input_svideo(\r
+ int DeviceId=0\r
+ ) = 0;\r
+\r
+ // 4.2.14 has_output_svideo\r
+ virtual\r
+ int has_output_svideo(\r
+ int DeviceId=0\r
+ ) = 0;\r
+\r
+ // 4.2.15 has_output_key\r
+ virtual\r
+ int has_output_key(\r
+ int DeviceId=0\r
+ ) = 0;\r
+\r
+ // 4.2.16 has_output_key_v4444\r
+ virtual\r
+ int has_output_key_v4444(\r
+ int DeviceId=0\r
+ ) = 0;\r
+\r
+ // 4.2.17 has_letterbox\r
+ virtual\r
+ int has_letterbox(\r
+ int DeviceId=0\r
+ ) = 0;\r
+\r
+ // 4.2.18 has_video_memory\r
+ virtual\r
+ int has_video_memory(\r
+ int DeviceId=0\r
+ ) = 0;\r
+\r
+ // 4.2.18 has_video_memory_base\r
+ virtual\r
+ int has_video_memory_base(\r
+ int DeviceId=0\r
+ ) = 0;\r
+\r
+ // 4.2.19 has_video_cardtype\r
+ virtual\r
+ int has_video_cardtype(\r
+ int DeviceId=0\r
+ ) = 0;\r
+\r
+ // 4.2.20 count_video_mode\r
+ virtual\r
+ int count_video_mode(\r
+ int DeviceId=0\r
+ ) = 0;\r
+\r
+ // 4.2.21 enum_video_mode\r
+ virtual\r
+ EVideoMode enum_video_mode(\r
+ int Index,\r
+ int DeviceId=0\r
+ ) = 0;\r
+\r
+ // 4.2.22 count_memory_format\r
+ virtual\r
+ int count_memory_format(\r
+ int DeviceId=0\r
+ ) = 0;\r
+\r
+ // 4.2.23 enum_memory_format\r
+ virtual\r
+ EMemoryFormat enum_memory_format(\r
+ int Index,\r
+ int DeviceId=0\r
+ ) = 0;\r
+\r
+ // 4.2.24 count_update_method\r
+ virtual\r
+ int count_update_method (\r
+ int DeviceId=0\r
+ ) = 0;\r
+\r
+ // 4.2.25 enum_update_method\r
+ virtual\r
+ EUpdateMethod enum_update_method(\r
+ int Index,\r
+ int DeviceId=0\r
+ ) = 0;\r
+\r
+ // 4.2.26 has_audio_input\r
+ virtual\r
+ int has_audio_input(\r
+ int DeviceId=0\r
+ ) = 0;\r
+\r
+ // 4.2.27 has_audio_output\r
+ virtual\r
+ int has_audio_output(\r
+ int DeviceId=0\r
+ ) = 0;\r
+\r
+ // 4.2.28 count_audio_input_rate\r
+ virtual\r
+ int count_audio_input_rate(\r
+ int DeviceId=0\r
+ ) = 0;\r
+\r
+ // 4.2.29 count_audio_output_rate\r
+ virtual\r
+ int count_audio_output_rate(\r
+ int DeviceId=0\r
+ ) = 0;\r
+\r
+ // 4.2.30 enum_audio_input_rate\r
+ // Returns the enumeration for the Ith supported audio input rate.\r
+ virtual\r
+ EAudioRate enum_audio_input_rate(\r
+ int Index,\r
+ int DeviceId=0\r
+ ) = 0;\r
+\r
+ // 4.2.31 enum_audio_output_rate\r
+ // Returns the enumeration for the Ith supported audio output rate.\r
+ virtual\r
+ EAudioRate enum_audio_output_rate(\r
+ int Index,\r
+ int DeviceId=0\r
+ ) = 0;\r
+\r
+\r
+ // 4.2.32 has_audio_playthru\r
+ virtual\r
+ int has_audio_playthru(\r
+ int DeviceId=0\r
+ ) = 0;\r
+\r
+ // 4.2.33 has_dma_control\r
+ virtual\r
+ int has_dma_control(\r
+ int DeviceId=0\r
+ ) = 0;\r
+\r
+ // 4.2.34 has_scaled_rgb\r
+ virtual\r
+ int has_scaled_rgb(\r
+ int DeviceId=0\r
+ ) = 0;\r
+\r
+ // 4.3 Control Functions\r
+ //---------------------------------\r
+ // 4.3.1 set_timing_adjust\r
+ // Determines the video format of a signal applied to the Link A input. \r
+ virtual\r
+ BErr set_timing_adjust(\r
+ unsigned int HPhase,\r
+ unsigned int VPhase\r
+ ) = 0;\r
+\r
+ // 4.3.2 set_vertical_flip\r
+ virtual\r
+ BErr set_vertical_flip(\r
+ int& On\r
+ ) = 0;\r
+\r
+ // 4.3.3 set_output_key\r
+ virtual\r
+ BErr set_output_key(\r
+ int& On,\r
+ int& v4444,\r
+ int& Invert,\r
+ int& White\r
+ ) = 0;\r
+\r
+ // 4.3.4 set_output_key_on\r
+ virtual\r
+ BErr set_output_key_on(\r
+ int& On\r
+ ) = 0;\r
+\r
+ // 4.3.5 set_output_key_v4444\r
+ virtual\r
+ BErr set_output_key_v4444(\r
+ int& v4444\r
+ ) = 0;\r
+\r
+ // 4.3.6 set_output_key_invert\r
+ virtual\r
+ BErr set_output_key_invert(\r
+ int& Invert\r
+ ) = 0;\r
+\r
+ // 4.3.7 set_output_key_white\r
+ virtual\r
+ BErr set_output_key_white(\r
+ int& White\r
+ ) = 0;\r
+\r
+ // 4.3.8 set_letterbox\r
+ virtual\r
+ BErr set_letterbox(\r
+ unsigned int& Lines,\r
+ int& Black\r
+ ) = 0;\r
+\r
+ // 4.3.9 set_letterbox_lines\r
+ virtual\r
+ BErr set_letterbox_lines(\r
+ unsigned int& Lines\r
+ ) = 0;\r
+\r
+ // 4.3.10 set_letterbox_black\r
+ virtual\r
+ BErr set_letterbox_black(\r
+ int& Black\r
+ ) = 0;\r
+\r
+ // 4.3.11 set_safearea\r
+ virtual\r
+ BErr set_safearea(\r
+ int& Title,\r
+ int& Picture\r
+ ) = 0;\r
+\r
+ // 4.3.12 set_safearea_title\r
+ virtual\r
+ BErr set_safearea_title(\r
+ int& Title\r
+ ) = 0;\r
+\r
+ // 4.3.13 set_safearea_picture\r
+ virtual\r
+ BErr set_safearea_picture(\r
+ int& Picture\r
+ ) = 0;\r
+\r
+ // 4.3.14 set_output_video\r
+ virtual\r
+ BErr set_output_video(\r
+ int& Enable\r
+ ) = 0;\r
+\r
+ // 4.3.15 set_audio_rate\r
+ virtual\r
+ BErr set_audio_rate(\r
+ unsigned long& Rate\r
+ ) = 0;\r
+\r
+ // 4.3.16 set_audio_playthrough\r
+ virtual\r
+ BErr set_audio_playthrough(\r
+ int& Playthru\r
+ ) = 0;\r
+\r
+ // 4.3.17 wait_output_video_synch\r
+ virtual\r
+ BErr wait_output_video_synch(\r
+ unsigned long UpdFmt,\r
+ unsigned long& FieldCount\r
+ ) = 0;\r
+\r
+ // 4.3.18 get_output_video_synch_count\r
+ virtual\r
+ BErr get_output_video_synch_count(\r
+ unsigned long& FieldCount\r
+ ) = 0;\r
+\r
+ // 4.3.19 set_scaled_rgb\r
+ virtual\r
+ BErr set_scaled_rgb(\r
+ unsigned long& On\r
+ ) = 0;\r
+\r
+ // 4.3.20 wait_pci_interrupt\r
+ virtual\r
+ BErr wait_pci_interrupt(\r
+ unsigned long Wait\r
+ ) = 0;\r
+\r
+ // 4.3.21 get_audio_rate\r
+ virtual\r
+ BErr get_audio_rate(\r
+ unsigned long& Rate\r
+ ) = 0;\r
+\r
+ // 4.3.22 wait_input_video_synch\r
+ virtual\r
+ BErr wait_input_video_synch(\r
+ unsigned long UpdFmt,\r
+ unsigned long& FieldCount\r
+ ) = 0;\r
+\r
+ // 4.3.23 get_input_video_synch_count\r
+ virtual\r
+ BErr get_input_video_synch_count(\r
+ unsigned long& FieldCount\r
+ ) = 0;\r
+\r
+\r
+ // 4.4 Video STYLE Functions\r
+ //---------------------------------\r
+ // 4.4.1 get_video_input_format\r
+ // Determines the video format of a signal applied to the Link A input. \r
+ virtual\r
+ BErr get_video_input_format(\r
+ unsigned long& VidFmt\r
+ ) = 0;\r
+\r
+ // 4.4.2 get_video_genlock_format\r
+ // Determines the video format of a signal applied to the GENLOCK input. \r
+ virtual\r
+ BErr get_video_genlock_format(\r
+ unsigned long& VidFmt\r
+ ) = 0;\r
+\r
+ // 4.4.3 get_video_output_format\r
+ // Determines the video format of the output signal. \r
+ virtual\r
+ BErr get_video_output_format(\r
+ unsigned long& VidFmt\r
+ ) = 0;\r
+\r
+ // 4.4.4 set_video_output_format\r
+ // Changes the output signal video format of Link A output. \r
+ virtual\r
+ BErr set_video_output_format(\r
+ unsigned long& VidFmt\r
+ ) = 0;\r
+\r
+ // 4.4.5 get_video_memory_format\r
+ // Determines the pixel format for blue card video buffers. \r
+ virtual\r
+ BErr get_video_memory_format(\r
+ unsigned long& MemFmt\r
+ ) = 0;\r
+\r
+ // 4.4.6 set_video_memory_format\r
+ // Changes the pixel format for blue card video buffers. \r
+ virtual\r
+ BErr set_video_memory_format(\r
+ unsigned long& MemFmt\r
+ ) = 0;\r
+\r
+ // 4.4.7 get_video_update_format\r
+ // Determines the update synchronisation style of the video buffers. \r
+ virtual\r
+ BErr get_video_update_format(\r
+ unsigned long& UpdFmt\r
+ ) = 0;\r
+\r
+ // 4.4.8 set_video_update_format\r
+ // Changes the video synchronisation method. \r
+ virtual\r
+ BErr set_video_update_format(\r
+ unsigned long& UpdFmt\r
+ ) = 0;\r
+ // 4.4.9 get_video_zoom_format\r
+ // Determines the video resolution style of the video buffers. \r
+ virtual\r
+ BErr get_video_zoom_format(\r
+ unsigned long& ResFmt\r
+ ) = 0;\r
+\r
+ // 4.4.10 set_video_zoom_format\r
+ // Changes the video resolution style of the video buffers. \r
+ virtual\r
+ BErr set_video_zoom_format(\r
+ unsigned long& ResFmt\r
+ ) = 0;\r
+ // 4.4.11 get_video_framestore_style\r
+ // Determines the video mode, memory format and update synchronisation\r
+ // styles of the blue card video buffers. \r
+ virtual\r
+ BErr get_video_framestore_style(\r
+ unsigned long& VidFmt,\r
+ unsigned long& MemFmt,\r
+ unsigned long& UpdFmt,\r
+ unsigned long& ResFmt\r
+ ) = 0;\r
+\r
+ // 4.4.12 set_video_framestore_style\r
+ // Changes the video mode, memory format and update synchronisation styles. \r
+ virtual\r
+ BErr set_video_framestore_style(\r
+ unsigned long& VidFmt,\r
+ unsigned long& MemFmt,\r
+ unsigned long& UpdFmt,\r
+ unsigned long& ResFmt\r
+ ) = 0;\r
+\r
+ // 4.4.13 get_video_engine\r
+ // Instruct the device driver to change the operational mode of the\r
+ // video engine. \r
+ virtual\r
+ BErr get_video_engine(\r
+ unsigned long& Mode\r
+ ) = 0;\r
+\r
+ // 4.4.14 set_video_engine\r
+ // Instruct the device driver to change the operational mode of the\r
+ // video engine. \r
+ virtual\r
+ BErr set_video_engine(\r
+ unsigned long& Mode\r
+ ) = 0;\r
+\r
+ \r
+ // 4.5 DMA Memory Functions\r
+ //---------------------------------\r
+ // 4.5.1 system_buffer_map\r
+ // Obtains the virtual address of one of the driver managed system buffers. \r
+ virtual\r
+ BErr system_buffer_map(\r
+ void** ppAddress,\r
+ int BufferId\r
+ ) = 0;\r
+\r
+ // 4.5.2 system_buffer_unmap\r
+ // Unmaps the virtual address of one of the driver managed system buffers\r
+ // from the process address space. \r
+ virtual\r
+ BErr system_buffer_unmap(\r
+ void* pAddress\r
+ ) = 0;\r
+\r
+ // 4.5.3 system_buffer_assign\r
+ // Assign an arbitrary usermode buffer to a particular DMA function. \r
+ virtual\r
+ BErr system_buffer_assign(\r
+ void* pAddress,\r
+ unsigned long Id,\r
+ unsigned long Length,\r
+ unsigned long Target\r
+ ) = 0;\r
+\r
+ // 4.5.4 system_buffer_write\r
+ // Instructs the DMA engine to begin a DMA write operation to the\r
+ // active blue card host buffer. \r
+ // DEPRECATED; DO NOT USE! USE system_buffer_write_async() instead\r
+ virtual\r
+ int system_buffer_write(\r
+ unsigned char* pPixels,\r
+ unsigned long Size,\r
+ unsigned long Offset=0\r
+ ) = 0;\r
+\r
+ // 4.5.5 system_buffer_read\r
+ // Instructs the DMA engine to begin a DMA read operation from the\r
+ // active blue card host buffer. \r
+ // DEPRECATED; DO NOT USE! USE system_buffer_read_async() instead\r
+ virtual\r
+ int system_buffer_read(\r
+ unsigned char* pPixels,\r
+ unsigned long Size,\r
+ unsigned long Offset=0\r
+ ) = 0;\r
+\r
+ virtual\r
+ int system_buffer_write_async(\r
+ unsigned char* pPixels,\r
+ unsigned long Size,\r
+ OVERLAPPED * pAsync,\r
+ unsigned long BufferID,\r
+ unsigned long Offset=0\r
+ ) = 0;\r
+\r
+ // 4.5.5 system_buffer_read\r
+ // Instructs the DMA engine to begin a DMA read operation from the\r
+ // active blue card host buffer. \r
+ virtual\r
+ int system_buffer_read_async(\r
+ unsigned char* pPixels,\r
+ unsigned long Size,\r
+ OVERLAPPED * pAsync,\r
+ unsigned long BufferID,\r
+ unsigned long Offset=0\r
+ ) = 0;\r
+\r
+\r
+ // 4.6 Framestore Functions\r
+ //---------------------------------\r
+ // 4.6.1 render_buffer_count\r
+ // Determines the number of buffers the blue card memory has been partitioned into. \r
+ virtual\r
+ BErr render_buffer_count(\r
+ unsigned long& Count\r
+ ) = 0;\r
+\r
+ // 4.6.2 render_buffer_update\r
+ // Instructs the video digitiser to select a blue card buffer to rasterise. \r
+ virtual\r
+ BErr render_buffer_update(\r
+ unsigned long BufferId\r
+ ) = 0;\r
+\r
+ // 4.6.3 render_buffer_update_b\r
+ // Instructs the video digitiser to select a blue card buffer as the video\r
+ // channel B source for real-time dissolves. \r
+ virtual\r
+ BErr render_buffer_update_b(\r
+ unsigned long BufferId\r
+ ) = 0;\r
+\r
+ // 4.6.4 render_buffer_dissolve\r
+ // Set the percentage of Channel A over Channel B for real-time dissolve. \r
+ virtual\r
+ BErr render_buffer_dissolve(\r
+ unsigned long Dissolve\r
+ ) = 0;\r
+\r
+ // 4.6.5 render_buffer_dissolve_a_b\r
+ // Set the video source for Channel A and Channel B and the dissolve\r
+ // percentage between them. \r
+ virtual\r
+ BErr render_buffer_dissolve_a_b(\r
+ unsigned long BufferId_A,\r
+ unsigned long BufferId_B,\r
+ unsigned long Dissolve\r
+ ) = 0;\r
+\r
+ // 4.6.6 render_buffer_map\r
+ // Get the virtual address of the indexed blue card buffer. \r
+ virtual\r
+ BErr render_buffer_map(\r
+ void** pAddress,\r
+ unsigned long BufferId\r
+ ) = 0;\r
+\r
+ // 4.6.7 render_buffer_map_aperture\r
+ // Get the virtual address of the 8-bit aperture for the indexed blue card buffer. \r
+ virtual\r
+ BErr render_buffer_map_aperture(\r
+ void** pAddress,\r
+ unsigned long BufferId\r
+ ) = 0;\r
+\r
+ // 4.6.8 render_buffer_map_all\r
+ // Generates a table of the virtual addresses for all blue card buffers. \r
+ virtual\r
+ BErr render_buffer_map_all(\r
+ void** pTable,\r
+ unsigned long& Count\r
+ ) = 0;\r
+\r
+\r
+ // 4.6.9 render_buffer_map_aperture_all\r
+ // Generates a table of the virtual addresses for the 8-bit aperture\r
+ // of all blue card buffers. \r
+ virtual\r
+ BErr render_buffer_map_aperture_all(\r
+ void** pTable,\r
+ unsigned long& Count\r
+ ) = 0;\r
+\r
+ // 4.6.10 render_buffer_select\r
+ // Specify which blue card buffer will become the target of future DMA transactions. \r
+ virtual\r
+ BErr render_buffer_select(\r
+ unsigned long BufferId\r
+ ) = 0;\r
+\r
+ // 4.6.11 render_buffer_capture\r
+ // Specify which blue card buffer will be used for capture. \r
+ virtual\r
+ BErr render_buffer_capture(\r
+ unsigned long BufferId,\r
+ int On\r
+ ) = 0;\r
+\r
+ // 4.6.12 render_buffer_sizeof\r
+ // Determine the maximum byte size of each blue card memory partition. \r
+ virtual\r
+ BErr render_buffer_sizeof(\r
+ unsigned long& Count,\r
+ unsigned long& Length,\r
+ unsigned long& Actual,\r
+ unsigned long& Golden\r
+ ) = 0;\r
+\r
+ // 4.6.13 render_buffer_quantise\r
+ // Control whether blue card memory is repartitioned on style changes. \r
+ virtual\r
+ BErr render_buffer_quantise(\r
+ int On\r
+ ) = 0;\r
+\r
+ // 4.7 Audio Functions\r
+ //---------------------------------\r
+ // 4.7.1 audio_playback_start\r
+ // Start audio playback. \r
+ // DEPRECATED; DO NOT USE!\r
+ virtual\r
+ BErr audio_playback_start(\r
+ unsigned long Synch\r
+ ) = 0;\r
+\r
+ // 4.7.2 audio_playback_stop\r
+ // Stop audio playback. \r
+ // DEPRECATED; DO NOT USE!\r
+ virtual\r
+ BErr audio_playback_stop(\r
+ void\r
+ ) = 0;\r
+\r
+ // 4.7.3 audio_playback_stream\r
+ // Register a native interleaved audio file for playback. \r
+ // DEPRECATED; DO NOT USE!\r
+ virtual\r
+ BErr audio_playback_stream(\r
+ char* pName,\r
+ int Offset,\r
+ int Flags\r
+ ) = 0;\r
+\r
+ // 4.7.4 audio_playback_stream_mono\r
+ // Register a native monophonic audio file for playback. \r
+ // DEPRECATED; DO NOT USE!\r
+ virtual\r
+ BErr audio_playback_stream_mono(\r
+ unsigned long Chan,\r
+ char* pName,\r
+ int Offset,\r
+ int Flags\r
+ ) = 0;\r
+\r
+ // 4.7.5 audio_playback_stream_stereo\r
+ // Register a native stereophonic audio file for playback. \r
+ // DEPRECATED; DO NOT USE!\r
+ virtual\r
+ BErr audio_playback_stream_stereo(\r
+ unsigned long Pair,\r
+ char* pName,\r
+ int Offset,\r
+ int Flags\r
+ ) = 0;\r
+\r
+ // 4.7.6 audio_playback_buffer\r
+ // Register a native 6-channel interleaved audio buffer for playback. \r
+ // DEPRECATED; DO NOT USE!\r
+ virtual\r
+ BErr audio_playback_buffer(\r
+ void* pGlobal,\r
+ unsigned long* pBuffer,\r
+ unsigned long Length,\r
+ unsigned long Chunk,\r
+ int (*pFunc)(void* pGlobal, unsigned long* pBuffer, int Offset, int Length),\r
+ int Flags\r
+ ) = 0;\r
+\r
+ // 4.7.7 audio_playback_buffer_mono\r
+ // Register a native monophonic audio buffer for playback. \r
+ // DEPRECATED; DO NOT USE!\r
+ virtual\r
+ BErr audio_playback_buffer_mono(\r
+ unsigned long Chan,\r
+ void* pGlobal,\r
+ unsigned long* pBuffer,\r
+ unsigned long Length,\r
+ unsigned long Chunk,\r
+ int (*pFunc)(void* pGlobal, unsigned long* pBuffer, int Offset, int Length),\r
+ int Flags\r
+ ) = 0;\r
+\r
+ // 4.7.8 audio_playback_buffer_stereo\r
+ // Register a native stereophonic audio buffer for playback. \r
+ // DEPRECATED; DO NOT USE!\r
+ virtual\r
+ BErr audio_playback_buffer_stereo(\r
+ unsigned long Pair,\r
+ void* pGlobal,\r
+ unsigned long* pBuffer,\r
+ unsigned long Length,\r
+ unsigned long Chunk,\r
+ int (*pFunc)(void* pGlobal, unsigned long* pBuffer, int Offset, int Length),\r
+ int Flags\r
+ ) = 0;\r
+\r
+ // 4.7.9 audio_playback_deregister\r
+ // De-registers a native 6-channel interleaved audio source from playback. \r
+ // DEPRECATED; DO NOT USE!\r
+ virtual\r
+ BErr audio_playback_deregister(\r
+ void\r
+ ) = 0;\r
+\r
+ // 4.7.10 audio_playback_deregister_mono\r
+ // De-registers a native monophonic audio source from playback. \r
+ // DEPRECATED; DO NOT USE!\r
+ virtual\r
+ BErr audio_playback_deregister_mono(\r
+ unsigned long Chan\r
+ ) = 0;\r
+\r
+ // 4.7.11 audio_playback_deregister_stereo\r
+ // De-registers a native stereophonic audio source from playback. \r
+ // DEPRECATED; DO NOT USE!\r
+ virtual\r
+ BErr audio_playback_deregister_stereo(\r
+ unsigned long Pair\r
+ ) = 0;\r
+\r
+ // 4.7.12 AudioHandlerPlay\r
+ // Moves source audio data streams into the playback buffer. \r
+ // DEPRECATED; DO NOT USE!\r
+ virtual\r
+ BErr AudioHandlerPlay(\r
+ unsigned long& Snooze\r
+ ) = 0;\r
+\r
+ // 4.7.13 audio_capture_start\r
+ // Begin capturing audio. \r
+ virtual\r
+ BErr audio_capture_start(\r
+ unsigned long Synch,\r
+ unsigned long PlayThru\r
+ ) = 0;\r
+\r
+ // 4.7.14 audio_capture_stop\r
+ // Stop capturing audio. \r
+ virtual\r
+ BErr audio_capture_stop(\r
+ void\r
+ ) = 0;\r
+ // 4.7.15 audio_capture_stream\r
+ // Register a file for capture of native interleaved audio. \r
+ // DEPRECATED; DO NOT USE!\r
+ virtual\r
+ BErr audio_capture_stream (\r
+ char* pName,\r
+ int Offset,\r
+ int Flags\r
+ ) = 0;\r
+\r
+ // 4.7.16 audio_capture_stream_mono\r
+ // Register a file for capture of native monophonic audio. \r
+ // DEPRECATED; DO NOT USE!\r
+ virtual\r
+ BErr audio_capture_stream_mono(\r
+ unsigned long Chan,\r
+ char* pName,\r
+ int Offset,\r
+ int Flags\r
+ ) = 0;\r
+\r
+ // 4.7.17 audio_capture_stream_stereo\r
+ // Register a file for capture of native stereophonic audio. \r
+ // DEPRECATED; DO NOT USE!\r
+ virtual\r
+ BErr audio_capture_stream_stereo(\r
+ unsigned long Pair,\r
+ char* pName,\r
+ int Offset,\r
+ int Flags\r
+ ) = 0;\r
+\r
+ // 4.7.18 audio_capture_buffer\r
+ // Register a buffer for capture of native interleaved audio. \r
+ // DEPRECATED; DO NOT USE!\r
+ virtual\r
+ BErr audio_capture_buffer(\r
+ void* pGlobal,\r
+ unsigned long* pBuffer,\r
+ unsigned long Length,\r
+ unsigned long Chunk,\r
+ int (*pFunc)(void* pGlobal, unsigned long* pBuffer, int Offset, int Length),\r
+ int Flags\r
+ ) = 0;\r
+\r
+ // 4.7.19 audio_capture_buffer_mono\r
+ // Register a buffer for capture of native monophonic audio. \r
+ // DEPRECATED; DO NOT USE!\r
+ virtual\r
+ BErr audio_capture_buffer_mono(\r
+ unsigned long Chan,\r
+ void* pGlobal,\r
+ unsigned long* pBuffer,\r
+ unsigned long Length,\r
+ unsigned long Chunk,\r
+ int (*pFunc)(void* pGlobal, unsigned long* pBuffer, int Offset, int Length),\r
+ int Flags\r
+ ) = 0;\r
+\r
+ // 4.7.20 audio_capture_buffer_stereo\r
+ // Register a buffer for capture of native stereophonic audio. \r
+ // DEPRECATED; DO NOT USE!\r
+ virtual\r
+ BErr audio_capture_buffer_stereo(\r
+ unsigned long Pair,\r
+ void* pGlobal,\r
+ unsigned long* pBuffer,\r
+ unsigned long Length,\r
+ unsigned long Chunk,\r
+ int (*pFunc)(void* pGlobal, unsigned long* pBuffer, int Offset, int Length),\r
+ int Flags\r
+ ) = 0;\r
+\r
+ // 4.7.21 audio_capture_deregister\r
+ // De-registers a buffer from capture monitor thread. \r
+ // DEPRECATED; DO NOT USE!\r
+ virtual\r
+ BErr audio_capture_deregister(\r
+ void\r
+ ) = 0;\r
+\r
+ // 4.7.22 audio_capture_deregister_mono\r
+ // De-registers a single monophonic audio buffer from capture monitor thread. \r
+ // DEPRECATED; DO NOT USE!\r
+ virtual\r
+ BErr audio_capture_deregister_mono(\r
+ unsigned long Chan\r
+ ) = 0;\r
+\r
+ // 4.7.23 audio_capture_deregister_stereo\r
+ // De-registers a stereophonic audio buffer from capture monitor thread. \r
+ // DEPRECATED; DO NOT USE!\r
+ virtual\r
+ BErr audio_capture_deregister_stereo(\r
+ unsigned long Pair\r
+ ) = 0;\r
+\r
+\r
+ // 4.7.24 audio_playback_threshold\r
+ // Adjust the Chunk and Snooze times for the Audio Playback Monitor Thread\r
+ // DEPRECATED; DO NOT USE!\r
+ virtual\r
+ BErr audio_playback_threshold(\r
+ unsigned long Chunk,\r
+ unsigned long Snooze\r
+ ) = 0;\r
+\r
+ // 4.7.25 audio_capture_sample_count\r
+ // Number of samples captured. \r
+ // DEPRECATED; DO NOT USE!\r
+ virtual\r
+ ULONG audio_capture_sample_count() = 0;\r
+\r
+ // 4.7.26 audio_capture_sample_count_mono\r
+ // Number of samples captured. \r
+ // DEPRECATED; DO NOT USE!\r
+ virtual\r
+ ULONG audio_capture_sample_count_mono(unsigned long Chan) = 0;\r
+\r
+ // 4.7.27 audio_capture_sample_count_stereo\r
+ // Number of samples captured. \r
+ // DEPRECATED; DO NOT USE!\r
+ virtual\r
+ ULONG audio_capture_sample_count_stereo(unsigned long Pair) = 0;\r
+\r
+ // 4.7.28 audio_playback_blip\r
+ // Channel is to be blipped\r
+ // DEPRECATED; DO NOT USE!\r
+ virtual\r
+ BErr audio_playback_blip(\r
+ int Channel\r
+ ) = 0;\r
+\r
+ // 4.8 Video Engine Functions\r
+ //---------------------------------\r
+ // 4.8.1 video_playback_start\r
+ // Start video playback. \r
+ virtual\r
+ BErr video_playback_start(\r
+ int Step,\r
+ int Loop\r
+ ) = 0;\r
+\r
+ // 4.8.2 video_playback_stop\r
+ // Halts the video playback engine. \r
+ virtual\r
+ BErr video_playback_stop(\r
+ int Wait,\r
+ int Flush\r
+ ) = 0;\r
+\r
+ // 4.8.3 video_playback_flush\r
+ // Flush all pending display requests from all Channels. \r
+ virtual\r
+ BErr video_playback_flush(\r
+ void\r
+ ) = 0;\r
+\r
+ // 4.8.4 video_playback_flush_A\r
+ // Flush all pending display requests from Channel A. \r
+ // DEPRECATED; DO NOT USE!\r
+ virtual\r
+ BErr video_playback_flush_A(\r
+ void\r
+ ) = 0;\r
+\r
+ // 4.8.5 video_playback_flush_B\r
+ // Flush all pending display requests from Channel-B. \r
+ // DEPRECATED; DO NOT USE!\r
+ virtual\r
+ BErr video_playback_flush_B(\r
+ void\r
+ ) = 0;\r
+\r
+ // 4.8.6 video_playback_allocate\r
+ // Obtain the address of the next available video memory buffer. \r
+ virtual\r
+ BErr video_playback_allocate(\r
+ void** pAddress,\r
+ unsigned long& BufferId,\r
+ unsigned long& Underrun\r
+ ) = 0;\r
+\r
+ // 4.8.7 video_playback_release\r
+ // Release physical blue card video buffer. \r
+ virtual\r
+ BErr video_playback_release(\r
+ unsigned long BufferId\r
+ ) = 0;\r
+\r
+ // 4.8.8 video_playback_flush_display\r
+ // Remove a unique display request from the display lists. \r
+ virtual\r
+ BErr video_playback_flush_display(\r
+ unsigned long UniqueId\r
+ ) = 0;\r
+\r
+ // 4.8.9 video_playback_release_flush\r
+ // Purges all pending display requests and returns the frame to the free list.\r
+ virtual\r
+ BErr video_playback_release_flush(\r
+ unsigned long BufferId\r
+ ) = 0;\r
+\r
+ // 4.8.10 video_playback_present\r
+ // Present a buffer to the video playback engine Channel-A. \r
+ virtual\r
+ BErr video_playback_present(\r
+ unsigned long& UniqueId,\r
+ unsigned long BufferId,\r
+ unsigned long Count,\r
+ int Keep,\r
+ int Odd=0\r
+ ) = 0;\r
+\r
+ // 4.8.11 video_playback_present_dissolve\r
+ // Present a frame with a dissolve value to the video playback engine. \r
+ virtual\r
+ BErr video_playback_present_dissolve(\r
+ unsigned long& UniqueId,\r
+ unsigned long BufferId,\r
+ unsigned long Count,\r
+ unsigned long Dissolve,\r
+ int Keep,\r
+ int Odd=0\r
+ ) = 0;\r
+\r
+ // 4.8.12 video_playback_present_A\r
+ // Present a frame to the video playback engine that will be inserted into Channel-A. \r
+ // DEPRECATED; DO NOT USE!\r
+ virtual\r
+ BErr video_playback_present_A(\r
+ unsigned long& UniqueId,\r
+ unsigned long BufferId,\r
+ unsigned long Count,\r
+ unsigned long Dissolve,\r
+ int Synch_B,\r
+ int Keep,\r
+ int Odd=0\r
+ ) = 0;\r
+\r
+ // 4.8.13 video_playabck_present_B\r
+ // Present a frame to the video playback engine that will be inserted into Channel-B. \r
+ // DEPRECATED; DO NOT USE!\r
+ virtual\r
+ BErr video_playback_present_B(\r
+ unsigned long& UniqueId,\r
+ unsigned long BufferId,\r
+ unsigned long Count,\r
+ unsigned long Dissolve,\r
+ int Synch_A,\r
+ int Keep,\r
+ int Odd=0\r
+ ) = 0;\r
+\r
+ // 4.8.14 video_playback_present_detail\r
+ // The general purpose presentation function. \r
+ // DEPRECATED; DO NOT USE!\r
+ virtual\r
+ BErr video_playback_present_detail(\r
+ unsigned long& UniqueId,\r
+ unsigned long BufferId,\r
+ unsigned long Count,\r
+ unsigned long Dissolve,\r
+ unsigned long Flags\r
+ ) = 0;\r
+\r
+ // 4.8.15 video_capture_start\r
+ // Instruct the device driver to begin capturing images into the video framestore. \r
+ virtual\r
+ BErr video_capture_start(\r
+ int Step=0\r
+ ) = 0;\r
+\r
+ // 4.8.16 video_capture_stop\r
+ // Instruct the device driver to stop the video capture. \r
+ virtual\r
+ BErr video_capture_stop(\r
+ void\r
+ ) = 0;\r
+\r
+ // 4.8.17 video_capture_harvest\r
+ // Get the details about the next frame in a capture sequence. \r
+ virtual\r
+ BErr video_capture_harvest(\r
+ void** ppAddress,\r
+ unsigned long& BufferId,\r
+ unsigned long& Count,\r
+ unsigned long& Frames,\r
+ int CompostLater=0\r
+ ) = 0;\r
+\r
+ // not used for anything important...\r
+ // DEPRECATED; DO NOT USE!\r
+ virtual\r
+ BErr nudge_frame(LONG nudge) = 0;\r
+\r
+ // 4.8.18 video_playback_pause\r
+ // Suspend or Resume playback \r
+ virtual\r
+ BErr video_playback_pause(\r
+ int Suspend\r
+ ) = 0;\r
+\r
+ // 4.8.19 video_capture_compost\r
+ // Return a harvested frame for recycling\r
+ virtual\r
+ BErr video_capture_compost(\r
+ unsigned long BufferId\r
+ ) = 0;\r
+\r
+#ifdef BLUEVELVET_SDK_VERSION3\r
+ virtual BErr set_onboard_keyer(int & On)=0;\r
+ virtual BErr get_onboard_keyer_status(int &On)=0;\r
+ virtual BErr get_timing_adjust(unsigned int & HPhase,unsigned int & VPhase,unsigned int & MaxHPhase,unsigned int & MaxVPhase) = 0;\r
+ virtual BErr get_letterbox_values(unsigned int& Lines,int & bBlackEnableFlag)=0;\r
+ virtual BErr get_safearea_info(int& Title,int& Picture)=0;\r
+ virtual int has_downconverter_bnc(int deviceId)=0;\r
+ virtual int has_onboard_keyer(int deviceId)=0;\r
+ virtual int has_duallink_input(int deviceId)=0;\r
+ virtual int has_programmable_colorspace_matrix(int deviceId)=0;\r
+\r
+ virtual BErr SetMatrix_Col_Green_Y(double CoeffG_R,double CoeffG_G,double CoeffG_B,double constG)=0;\r
+ virtual BErr GetMatrix_Col_Green_Y(double & CoeffG_R,double & CoeffG_G,double & CoeffG_B,double & constG)=0;\r
+\r
+ virtual BErr SetMatrix_Col_Red_PR(double CoeffR_R,double CoeffR_G,double CoeffR_B,double constR)=0;\r
+ virtual BErr GetMatrix_Col_Red_PR(double & CoeffR_R,double & CoeffR_G,double & CoeffR_B,double & constR)=0;\r
+\r
+ virtual BErr SetMatrix_Col_Blue_PB(double CoeffB_R,double CoeffB_G,double CoeffB_B,double constB)=0;\r
+ virtual BErr GetMatrix_Col_Blue_PB(double & CoeffB_R,double & CoeffB_G,double & CoeffB_B,double & constB)=0;\r
+\r
+ virtual BErr SetDualLink_Output_Conn_SignalColorSpace(unsigned long & signalType,unsigned long updateMatrixFlag)=0;\r
+ virtual BErr SetDualLink_Input(unsigned long & EnableDualLink)=0;\r
+ virtual BErr SetDualLink_Input_SignalFormatType(unsigned long &v4444)=0;\r
+ virtual BErr GetDualLink_InputProperty(unsigned long & DualLink,unsigned long & connSignalColorSpace,unsigned long & v4444)=0;\r
+ virtual BErr GetDualLink_OutputProperty(unsigned long & DualLink,unsigned long & connSignalColorSpace,unsigned long & v4444)=0;\r
+\r
+ virtual BErr Set_DownConverterSignalType(unsigned long type)=0;\r
+ virtual BErr GetDownConverterSignalType(unsigned long & connSignalType)=0;\r
+\r
+ virtual BErr SetDualLink_Input_Conn_SignalColorSpace(unsigned long & signalType)=0;\r
+ virtual int GetHDCardType(int nDeviceId)=0;\r
+\r
+ // New Audio Interface \r
+ virtual BErr MaxAudioOutBufferSize(long & nSampleCount)=0;\r
+ virtual BErr GetAudioOutBufferFreeSpace(long & nSampleCount)=0;\r
+ virtual BErr wait_audio_output_interrupt(unsigned long & noQueuedSample,unsigned long & noFreeSample) = 0;\r
+ virtual BErr InitAudioPlaybackMode()=0;\r
+ virtual BErr StartAudioPlayback(int syncCount)=0;\r
+ virtual BErr StopAudioPlayback()=0;\r
+ virtual BErr WriteAudioSample(int nSampleType,void * pBuffer,long nNoSample,int bFlag,long nNoSamplesWritten)=0;\r
+ virtual BErr EndAudioPlaybackMode()=0; \r
+ virtual int GetPCIRevId()=0;\r
+#endif\r
+\r
+ // Need this so that derived destructor gets called\r
+ virtual ~CBlueVelvet(){}\r
+ HANDLE m_hDevice; // Handle to the blue card device driver\r
+};\r
+\r
+\r
+//------------------------------------------------------------------------------------------------------------\r
+extern "C" {\r
+//------------------------------------------------------------------------------------------------------------\r
+// 4.0.0 The Blue Velvet factory\r
+BLUEFISH_API CBlueVelvet* BlueVelvetFactory();\r
+\r
+\r
+// 4.0.1 Who am I\r
+BLUEFISH_API const char* BlueVelvetVersion();\r
+\r
+// 4.0.2 Golden Value calculation\r
+BLUEFISH_API unsigned long BlueVelvetGolden(\r
+ unsigned long VidFmt,\r
+ unsigned long MemFmt,\r
+ unsigned long UpdFmt\r
+ );\r
+// 4.0.3 Bytes Per Line calculation\r
+BLUEFISH_API unsigned long BlueVelvetLineBytes(\r
+ unsigned long VidFmt,\r
+ unsigned long MemFmt\r
+ );\r
+// 4.0.4 Bytes Per Frame calculation\r
+BLUEFISH_API unsigned long BlueVelvetFrameBytes(\r
+ unsigned long VidFmt,\r
+ unsigned long MemFmt,\r
+ unsigned long UpdFmt\r
+ );\r
+\r
+// 4.0.5 Lines Per Frame calculation\r
+BLUEFISH_API unsigned long BlueVelvetFrameLines(\r
+ unsigned long VidFmt,\r
+ unsigned long UpdFmt\r
+ );\r
+\r
+// 4.0.6 Pixels per Line calculation\r
+BLUEFISH_API unsigned long BlueVelvetLinePixels(\r
+ unsigned long VidFmt\r
+ );\r
+\r
+BLUEFISH_API unsigned long BlueVelvetVBILines(unsigned long VidFmt,unsigned long FrameType);\r
+\r
+}\r
+\r
+#endif //_BLUEVELVET_H\r
--- /dev/null
+#ifndef _BLUEVELVET4_H\r
+#define _BLUEVELVET4_H\r
+\r
+#include "BlueVelvet.h"\r
+#include "BlueC_Api.h"\r
+typedef struct \r
+{\r
+ VARIANT maxRange;\r
+ VARIANT minRange;\r
+ VARIANT currentValue;\r
+ unsigned long uniqueSteps;\r
+}AnalogPropertyValue;\r
+\r
+\r
+class BLUEFISH_API CBlueVelvet4: virtual public CBlueVelvet \r
+{\r
+public:\r
+ // Functions for new Analog Card Property \r
+ virtual BErr SetAnalogCardProperty(int propType,VARIANT & propValue)=0;\r
+ virtual BErr GetAnalogCardProperty(int propType,AnalogPropertyValue & propValue)=0;\r
+ virtual BErr GetAnalogCardProperty(int propType,VARIANT & propValue)=0;\r
+\r
+ virtual BErr has_analog_connector(int DeviceId) = 0;\r
+ virtual int has_svideo_input(int DeviceId) = 0;\r
+ virtual int has_component_input(int DeviceId)=0;\r
+ virtual int has_composite_input(int DeviceId)=0;\r
+ virtual int has_svideo_output(int DeviceId)=0;\r
+ virtual int has_component_output(int DeviceId)=0;\r
+ virtual int has_analog_rgb_output(int DeviceId)=0;\r
+ virtual int has_composite_output(int DeviceId)=0;\r
+\r
+ // Functions for new Audio Input architecture\r
+ // Functions for Future use not implemented \r
+ virtual BErr wait_audio_input_interrupt(unsigned long & noQueuedSample,unsigned long & noFreeSample) = 0;\r
+ virtual BErr InitAudioCaptureMode()=0;\r
+ virtual BErr StartAudioCapture(int syncCount)=0;\r
+ virtual BErr StopAudioCapture()=0;\r
+ virtual BErr ReadAudioSample(int nSampleType,void * pBuffer,long nNoSample,int bFlag,long nNoSamplesWritten)=0;\r
+ virtual BErr EndAudioCaptureMode()=0; \r
+\r
+ \r
+ virtual BErr SetCardProperty(int propType,VARIANT & Value)=0;\r
+ virtual BErr QueryCardProperty(int propType,VARIANT & Value)=0;\r
+\r
+ // RS422 Serial Port Functions\r
+ // Functions for Future use not implemented \r
+ virtual BErr Wait_For_SerialPort_InputData(unsigned long bFlag,unsigned long & NoDataAvailable)=0;\r
+ virtual int IsSerialPort_OutputBuffer_Full(unsigned long bFlag)=0;\r
+ virtual int Read_From_SerialPort(unsigned long bFlag,unsigned char * pBuffer,unsigned long ReadLength)=0;\r
+ virtual int Write_To_SerialPort(unsigned long bFlag,unsigned char * pBuffer,unsigned long WriteLength)=0;\r
+};\r
+\r
+\r
+\r
+\r
+extern "C" {\r
+BLUEFISH_API CBlueVelvet4* BlueVelvetFactory4();\r
+BLUEFISH_API void BlueVelvetDestroy(CBlueVelvet4* pObj);\r
+// this would give you number of VANC/VBI line \r
+BLUEFISH_API unsigned int BlueVelvetVANCLineCount(unsigned int CardType,unsigned long VidFmt,unsigned long FrameType);\r
+\r
+// this would give you golden value for the VANC frame size\r
+BLUEFISH_API unsigned int BlueVelvetVANCGoldenValue( unsigned int CardType,\r
+ unsigned long VidFmt,\r
+ unsigned long MemFmt,\r
+ unsigned long FrameType);\r
+\r
+// this would give you number of bytes contained in a VANC line \r
+BLUEFISH_API unsigned int BlueVelvetVANCLineBytes( unsigned int CardType,\r
+ unsigned long VidFmt,\r
+ unsigned long MemFmt);\r
+\r
+BLUEFISH_API unsigned int BlueVelvetBytesForGroupPixels(unsigned long MemFmt,unsigned int nPixelCount);\r
+BLUEFISH_API BErr SetVideo_MetaDataInfo(CBlueVelvet4 * pSdk,LPOVERLAPPED pOverLap,ULONG FrameId,ULONG prop,VARIANT value);\r
+\r
+BLUEFISH_API BErr GetVideo_CaptureFrameInfo(CBlueVelvet4 * pSdk,LPOVERLAPPED pOverlap,struct blue_videoframe_info & video_capture_frame,int CompostLater);\r
+BLUEFISH_API BErr GetVideo_CaptureFrameInfoEx(CBlueVelvet4 * pSdk,LPOVERLAPPED pOverlap,struct blue_videoframe_info_ex & video_capture_frame,int CompostLater,unsigned int *capture_fifo_size);\r
+\r
+BLUEFISH_API int GetHancQueuesInfo(CBlueVelvet4 * pSdk, LPOVERLAPPED pOverlap, UINT32 video_channel, UINT32* pFreeBuffers, UINT32* pMaximumBuffers, UINT32* pStatus);\r
+BLUEFISH_API int GetHancBuffer(CBlueVelvet4 * pSdk,LPOVERLAPPED pOverlap,UINT32 video_channel);\r
+BLUEFISH_API BERR PutHancBuffer(CBlueVelvet4 * pSdk,LPOVERLAPPED pOverlap,int hanc_buffer_id,UINT32 video_channel);\r
+BLUEFISH_API BERR HancInputFifoControl(CBlueVelvet4 * pSdk,LPOVERLAPPED pOverlap,UINT32 video_channel, UINT32 control);\r
+BLUEFISH_API BERR HancOutputFifoControl(CBlueVelvet4 * pSdk,LPOVERLAPPED pOverlap,UINT32 video_channel, UINT32 control);\r
+BLUEFISH_API int GetHancInputBuffer(CBlueVelvet4* pSdk,LPOVERLAPPED pOverlap,UINT32 video_channel,UINT32* signal_type, UINT32* field_count);\r
+BLUEFISH_API BLUE_UINT32 emb_audio_decoder( BLUE_UINT32 * src_hanc_buffer,\r
+ void * dest_data_ptr,\r
+ BLUE_UINT32 req_audio_sample_count,\r
+ BLUE_UINT32 required_audio_channels,\r
+ BLUE_UINT32 sample_type);\r
+BLUEFISH_API BERR blue_wait_video_sync_async(CBlueVelvet4 * pSdk,\r
+ LPOVERLAPPED pOverlap,\r
+ blue_video_sync_struct * sync_struct);\r
+BLUEFISH_API BERR blue_dma_read_async( CBlueVelvet4 * pSdk,\r
+ LPOVERLAPPED pOverlap,\r
+ struct blue_dma_request_struct *pUserDmaRequest);\r
+\r
+\r
+BLUEFISH_API BERR blue_load_1D_lookup_table(CBlueVelvet4 * pSdk, struct blue_1d_lookup_table_struct * lut);\r
+\r
+BLUEFISH_API BERR blue_control_video_scaler(CBlueVelvet4 * pSdk, unsigned int nScalerId,\r
+ bool bOnlyReadValue,\r
+ float *pSrcVideoHeight,\r
+ float *pSrcVideoWidth,\r
+ float *pSrcVideoYPos,\r
+ float *pSrcVideoXPos,\r
+ float *pDestVideoHeight,\r
+ float *pDestVideoWidth,\r
+ float *pDestVideoYPos,\r
+ float *pDestVideoXPos);\r
+\r
+BLUEFISH_API BERR blue_Epoch_GetTimecodes(CBlueVelvet4 * pSdk, UINT32 VideoChannel, UINT64* pArray, UINT32* FieldCount);\r
+\r
+BLUEFISH_API unsigned int count_scaler_video_mode(CBlueVelvet4 * pSdk,\r
+ int device_id,\r
+ unsigned int video_channel,\r
+ unsigned int video_mode);\r
+BLUEFISH_API EVideoMode enum_scaler_video_mode(CBlueVelvet4 * pSdk,\r
+ int device_id,\r
+ unsigned int video_channel,\r
+ unsigned int video_mode,\r
+ unsigned int index);\r
+BLUEFISH_API BERR blue_video_scaler_filter_coefficent( CBlueVelvet4 * pSdk,\r
+ unsigned int nScalerId,\r
+ bool bOnlyReadValue,\r
+ unsigned int nFilterType,\r
+ float *pCoefficentWeightArray,\r
+ unsigned int nArrayElementCount\r
+ );\r
+BLUEFISH_API BERR blue_audioplayback_pause(CBlueVelvet4 * pSdk);\r
+BLUEFISH_API BERR blue_audioplayback_resume(CBlueVelvet4 * pSdk, int syncCount);\r
+BLUEFISH_API BERR GetHancQueuesInfoEx(CBlueVelvet4 * pSdk, \r
+ LPOVERLAPPED pOverlap, \r
+ UINT32 video_channel, \r
+ UINT32* pFreeBuffers, \r
+ UINT32* pMaximumBuffers,\r
+ UINT32 * pStatus,\r
+ UINT32 * pSamplesUsed,\r
+ UINT64 *pStartHancFifoTimeStamp,\r
+ UINT64 *pVideoSyncTimeStamp);\r
+\r
+\r
+BLUEFISH_API BERR blue_color_matrix(CBlueVelvet4 * pSdk,bool bGetValue,blue_color_matrix_struct * color_matrix_ptr);\r
+}\r
+\r
+\r
+#endif //_BLUEVELVET4_H
\ No newline at end of file
--- /dev/null
+/*\r
+// ==========================================================================\r
+// Bluefish444 BlueVelvet SDK library\r
+//\r
+// BlueSD_c.h\r
+// Constants header\r
+// LARGELY superseded by dynamic calculations\r
+\r
+\r
+ $Id: BlueVelvet_c.h,v 1.4 2002/10/02 00:29:53 cameron Exp $\r
+//\r
+// developed by : Cameron Duffy (C) 2002 Bluefish444 P/L\r
+// ==========================================================================\r
+//\r
+*/\r
+\r
+//----------------------------------------------------------------------------\r
+#ifndef _BLUEVELVET_C_H\r
+#define _BLUEVELVET_C_H\r
+\r
+//----------------------------------------------------------------------------------------------------------------------\r
+// File SUB-types supported\r
+//\r
+typedef enum\r
+{\r
+ EBlue_10BIT_NTSC=0, // 10 BIT NTSC\r
+ EBlue_10BIT_PAL, // 10 BIT PAL\r
+ EBlue_08BIT_NTSC, // 8 BIT NTSC\r
+ EBlue_08BIT_PAL, // 8 BIT PAL\r
+ EBlue_32BIT_NTSC, // 32 BIT NTSC (ARGB - uncompressed)\r
+ EBlue_32BIT_PAL // 32 BIT PAL (ARGB - uncompressed)\r
+} EBlueFileId;\r
+\r
+// File subtype ID;s\r
+#define BLUE_FILE_10BIT_NTSC 0 // 10 BIT NTSC\r
+#define BLUE_FILE_10BIT_PAL 1 // 10 BIT PAL\r
+#define BLUE_FILE_08BIT_NTSC 2 // 8 BIT NTSC\r
+#define BLUE_FILE_08BIT_PAL 3 // 8 BIT PAL\r
+#define BLUE_FILE_32BIT_NTSC 4 // 32 BIT NTSC (ARGB - uncompressed)\r
+#define BLUE_FILE_32BIT_PAL 5 // 32 BIT PAL (ARGB - uncompressed)\r
+\r
+// File subtype FOURCC\r
+#define BLUE_SUBTYPE_10BIT_NTSC 0x52594e5f //'RYN_' // 10 BIT NTSC\r
+#define BLUE_SUBTYPE_10BIT_PAL 0x5259515f //'RYP_' // 10 BIT PAL\r
+#define BLUE_SUBTYPE_08BIT_NTSC 0x52384e5f //'R8N_' // 8 BIT NTSC\r
+#define BLUE_SUBTYPE_08BIT_PAL 0x5238515f //'R8P_' // 8 BIT PAL\r
+#define BLUE_SUBTYPE_32BIT_NTSC 0x52524e5f //'RRN_' // 32 BIT NTSC (ARGB - uncompressed)\r
+#define BLUE_SUBTYPE_32BIT_PAL 0x5252415f //'RRP_' // 32 BIT PAL (ARGB - uncompressed)\r
+\r
+#define BLUE_ROOTED_SUBTYPE 0x62626262 //'XXXX'\r
+#define BLUE_FILE_NOVIDEO 0x62626262 //'XXXX'\r
+#define BLUE_FILE_TYPE 0x5244565f //'RDV_'\r
+#define BLUE_CLASS_ID 0x52444456 //'RDDV'\r
+\r
+// align this structure on 512 byte boundary!\r
+typedef struct\r
+{\r
+ char name[20]; // "PREMIERE RDV_FILE";\r
+ ULONG hasAudio; // See BLUE_SUBTYPE_???\r
+ ULONG VideoSubtype; // See BLUE_SUBTYPE_???\r
+ ULONG width; // width of frame in pixels\r
+ ULONG height; // height of frame in pixels (can get video mode)\r
+ ULONG rowbytes; // total bytes in row (can get mem format from this and width)\r
+ ULONG numFrames; // number of frames in file\r
+ ULONG frameOffset; // GOLDEN frame size\r
+ ULONG duration; // TDB - value = total number of frames\r
+ long scale; // TDB - scale = scale / samplesize = timebase\r
+ long sampleSize; // TDB - sampleSize = 1 or 100 if 29.97 fps\r
+\r
+ ULONG gFmtVid;\r
+ ULONG gFmtMem;\r
+ ULONG gFmtUpd;\r
+ ULONG gFmtRes;\r
+ // 76 bytes\r
+ char orgtime[20]; // These fields map directly to those in imTimeInfoRec.\r
+ char alttime[20];\r
+ char orgreel[40];\r
+ char altreel[40];\r
+ // 196 bytes\r
+ char logcomment[256];\r
+ // 452 bytes\r
+// char pad[512-452-4];\r
+ char pad[56];\r
+ // For disk speed to work, this structure MUST be a multiple of sector size\r
+ ULONG len; // Length of TRAILER, *always* last!\r
+} RDV_File2_OLD;\r
+//#define SIZE_RDV_FILE 512\r
+\r
+#define kGoldenPageSize 4096\r
+\r
+typedef struct\r
+{\r
+ char name[20]; // "PREMIERE RDV_FILE ";\r
+ ULONG hasAudio; // See BLUE_SUBTYPE_???\r
+ ULONG VideoSubtype; // See BLUE_SUBTYPE_???\r
+ ULONG width; // width of frame in pixels\r
+ ULONG height; // height of frame in pixels (can get video mode)\r
+ ULONG rowbytes; // total bytes in row (can get mem format from this and width)\r
+ ULONG numFrames; // number of frames in file\r
+ ULONG frameOffset; // GOLDEN frame size\r
+ ULONG duration; // TDB - value = total number of frames\r
+ long scale; // TDB - scale = scale / samplesize = timebase\r
+ long sampleSize; // TDB - sampleSize = 1 or 100 if 29.97 fps\r
+\r
+ ULONG gFmtVid;\r
+ ULONG gFmtMem;\r
+ ULONG gFmtUpd;\r
+ ULONG gFmtRes;\r
+ // 76 bytes\r
+ char orgtime[20]; // These fields map directly to those in imTimeInfoRec.\r
+ char alttime[20];\r
+ char orgreel[40];\r
+ char altreel[40];\r
+ // 196 bytes\r
+ char logcomment[256];\r
+ // 452 bytes\r
+// char pad[512-452-4];\r
+ ULONG audioSampleRate; // 48000 or 96000\r
+ ULONG numChannels; // 2, 4, or 6\r
+ ULONG numAudioBlocks; // how many in the file?\r
+ // 464 bytes\r
+ char pad[36];\r
+\r
+ _int64 audioBlockOffsets[kGoldenPageSize * 4]; // something like 4.5 hours max length (enough for now I guess)\r
+ ULONG audioBlockSizes[kGoldenPageSize * 4];\r
+\r
+ // For disk speed to work, this structure MUST be a multiple of sector size\r
+ ULONG len; // Length of TRAILER, *always* last!\r
+} RDV_File2;\r
+\r
+#endif //_BLUEVELVET_C_H\r