doubleparameterwidget.cpp
audiosignal.cpp
blackmagic/include/DeckLinkAPIDispatch.cpp
+ stopmotion/stopmotion.cpp
+ stopmotion/capturehandler.cpp
blackmagic/capture.cpp
blackmagic/devices.cpp
- stopmotion/stopmotion.cpp
+ v4l/v4lcapture.cpp
+ v4l/src.c
+ v4l/src_v4l2.c
onmonitoritems/abstractonmonitoritem.cpp
onmonitoritems/onmonitorrectitem.cpp
onmonitoritems/onmonitorcornersitem.cpp
target_link_libraries(kdenlive ${QT_QTOPENGL_LIBRARY})
target_link_libraries(kdenlive ${OPENGL_LIBRARIES})
else (APPLE)
+ add_definitions(-DHAVE_V4L2)
if (OPENGL_FOUND)
target_link_libraries(kdenlive ${QT_QTOPENGL_LIBRARY})
target_link_libraries(kdenlive ${OPENGL_LIBRARIES})
#include "capture.h"
#include "kdenlivesettings.h"
-pthread_mutex_t sleepMutex;
-pthread_cond_t sleepCond;
-int videoOutputFile = -1;
-int audioOutputFile = -1;
+pthread_mutex_t sleepMutex;
+pthread_cond_t sleepCond;
+int videoOutputFile = -1;
+int audioOutputFile = -1;
-static BMDTimecodeFormat g_timecodeFormat = 0;
-static int g_videoModeIndex = -1;
-static int g_audioChannels = 2;
-static int g_audioSampleDepth = 16;
-static int g_maxFrames = -1;
-static QString doCaptureFrame;
-static double g_aspect_ratio = 16.0 / 9.0;
+static BMDTimecodeFormat g_timecodeFormat = 0;
+static int g_videoModeIndex = -1;
+static int g_audioChannels = 2;
+static int g_audioSampleDepth = 16;
+static int g_maxFrames = -1;
+static QString doCaptureFrame;
+static double g_aspect_ratio = 16.0 / 9.0;
-static unsigned long frameCount = 0;
+static unsigned long frameCount = 0;
void yuv2rgb_int(unsigned char *yuv_buffer, unsigned char *rgb_buffer, int width, int height)
{
-int len;
-int r,g,b;
-int Y,U,V,Y2;
-int rgb_ptr,y_ptr,t;
+ int len;
+ int r, g, b;
+ int Y, U, V, Y2;
+ int rgb_ptr, y_ptr, t;
- len=width*height / 2;
+ len = width * height / 2;
- rgb_ptr=0;
- y_ptr=0;
+ rgb_ptr = 0;
+ y_ptr = 0;
- for (t=0; t<len; t++) /* process 2 pixels at a time */
- {
- /* Compute parts of the UV components */
+ for(t = 0; t < len; t++) { /* process 2 pixels at a time */
+ /* Compute parts of the UV components */
- U = yuv_buffer[y_ptr];
- Y = yuv_buffer[y_ptr+1];
- V = yuv_buffer[y_ptr+2];
- Y2 = yuv_buffer[y_ptr+3];
- y_ptr +=4;
+ U = yuv_buffer[y_ptr];
+ Y = yuv_buffer[y_ptr+1];
+ V = yuv_buffer[y_ptr+2];
+ Y2 = yuv_buffer[y_ptr+3];
+ y_ptr += 4;
- /*r = 1.164*(Y-16) + 1.596*(V-128);
- g = 1.164*(Y-16) - 0.813*(V-128) - 0.391*(U-128);
- b = 1.164*(Y-16) + 2.018*(U-128);*/
-
+ /*r = 1.164*(Y-16) + 1.596*(V-128);
+ g = 1.164*(Y-16) - 0.813*(V-128) - 0.391*(U-128);
+ b = 1.164*(Y-16) + 2.018*(U-128);*/
- r = (( 298*(Y-16) + 409*(V-128) + 128) >> 8);
- g = (( 298*(Y-16) - 100*(U-128) - 208*(V-128) + 128) >> 8);
+ r = ((298 * (Y - 16) + 409 * (V - 128) + 128) >> 8);
- b = (( 298*(Y-16) + 516*(U-128) + 128) >> 8);
+ g = ((298 * (Y - 16) - 100 * (U - 128) - 208 * (V - 128) + 128) >> 8);
- if (r>255) r=255;
- if (g>255) g=255;
- if (b>255) b=255;
+ b = ((298 * (Y - 16) + 516 * (U - 128) + 128) >> 8);
- if (r<0) r=0;
- if (g<0) g=0;
- if (b<0) b=0;
+ if(r > 255) r = 255;
+ if(g > 255) g = 255;
+ if(b > 255) b = 255;
- rgb_buffer[rgb_ptr]=b;
- rgb_buffer[rgb_ptr+1]=g;
- rgb_buffer[rgb_ptr+2]=r;
- rgb_buffer[rgb_ptr+3]=255;
-
- rgb_ptr+=4;
- /*r = 1.164*(Y2-16) + 1.596*(V-128);
- g = 1.164*(Y2-16) - 0.813*(V-128) - 0.391*(U-128);
- b = 1.164*(Y2-16) + 2.018*(U-128);*/
+ if(r < 0) r = 0;
+ if(g < 0) g = 0;
+ if(b < 0) b = 0;
+ rgb_buffer[rgb_ptr] = b;
+ rgb_buffer[rgb_ptr+1] = g;
+ rgb_buffer[rgb_ptr+2] = r;
+ rgb_buffer[rgb_ptr+3] = 255;
- r = (( 298*(Y2-16) + 409*(V-128) + 128) >> 8);
+ rgb_ptr += 4;
+ /*r = 1.164*(Y2-16) + 1.596*(V-128);
+ g = 1.164*(Y2-16) - 0.813*(V-128) - 0.391*(U-128);
+ b = 1.164*(Y2-16) + 2.018*(U-128);*/
- g = (( 298*(Y2-16) - 100*(U-128) - 208*(V-128) + 128) >> 8);
- b = (( 298*(Y2-16) + 516*(U-128) + 128) >> 8);
+ r = ((298 * (Y2 - 16) + 409 * (V - 128) + 128) >> 8);
- if (r>255) r=255;
- if (g>255) g=255;
- if (b>255) b=255;
+ g = ((298 * (Y2 - 16) - 100 * (U - 128) - 208 * (V - 128) + 128) >> 8);
- if (r<0) r=0;
- if (g<0) g=0;
- if (b<0) b=0;
+ b = ((298 * (Y2 - 16) + 516 * (U - 128) + 128) >> 8);
- rgb_buffer[rgb_ptr]=b;
- rgb_buffer[rgb_ptr+1]=g;
- rgb_buffer[rgb_ptr+2]=r;
- rgb_buffer[rgb_ptr+3]=255;
- rgb_ptr+=4;
- }
+ if(r > 255) r = 255;
+ if(g > 255) g = 255;
+ if(b > 255) b = 255;
+
+ if(r < 0) r = 0;
+ if(g < 0) g = 0;
+ if(b < 0) b = 0;
+
+ rgb_buffer[rgb_ptr] = b;
+ rgb_buffer[rgb_ptr+1] = g;
+ rgb_buffer[rgb_ptr+2] = r;
+ rgb_buffer[rgb_ptr+3] = 255;
+ rgb_ptr += 4;
+ }
}
class CDeckLinkGLWidget : public QGLWidget, public IDeckLinkScreenPreviewCallback
{
private:
- QAtomicInt refCount;
- QMutex mutex;
- IDeckLinkInput* deckLinkIn;
- IDeckLinkGLScreenPreviewHelper* deckLinkScreenPreviewHelper;
- IDeckLinkVideoFrame* m_frame;
- QColor m_backgroundColor;
- GLuint m_texture;
- QImage m_img;
- double m_zx;
- double m_zy;
- int m_pictureWidth;
- int m_pictureHeight;
- bool m_transparentOverlay;
+ QAtomicInt refCount;
+ QMutex mutex;
+ IDeckLinkInput* deckLinkIn;
+ IDeckLinkGLScreenPreviewHelper* deckLinkScreenPreviewHelper;
+ IDeckLinkVideoFrame* m_frame;
+ QColor m_backgroundColor;
+ GLuint m_texture;
+ QImage m_img;
+ double m_zx;
+ double m_zy;
+ int m_pictureWidth;
+ int m_pictureHeight;
+ bool m_transparentOverlay;
public:
- CDeckLinkGLWidget(IDeckLinkInput* deckLinkInput, QWidget* parent);
- // IDeckLinkScreenPreviewCallback
- virtual HRESULT QueryInterface(REFIID iid, LPVOID *ppv);
- virtual ULONG AddRef();
- virtual ULONG Release();
- virtual HRESULT DrawFrame(IDeckLinkVideoFrame* theFrame);
- void showOverlay(QImage img, bool transparent);
- void hideOverlay();
+ CDeckLinkGLWidget(IDeckLinkInput* deckLinkInput, QWidget* parent);
+ // IDeckLinkScreenPreviewCallback
+ virtual HRESULT QueryInterface(REFIID iid, LPVOID *ppv);
+ virtual ULONG AddRef();
+ virtual ULONG Release();
+ virtual HRESULT DrawFrame(IDeckLinkVideoFrame* theFrame);
+ void showOverlay(QImage img, bool transparent);
+ void hideOverlay();
protected:
- void initializeGL();
- void paintGL();
- void resizeGL(int width, int height);
- /*void initializeOverlayGL();
- void paintOverlayGL();
- void resizeOverlayGL(int width, int height);*/
+ void initializeGL();
+ void paintGL();
+ void resizeGL(int width, int height);
+ /*void initializeOverlayGL();
+ void paintOverlayGL();
+ void resizeOverlayGL(int width, int height);*/
};
CDeckLinkGLWidget::CDeckLinkGLWidget(IDeckLinkInput* deckLinkInput, QWidget* parent) : QGLWidget(/*QGLFormat(QGL::HasOverlay | QGL::AlphaChannel),*/ parent)
, m_zy(1.0)
, m_transparentOverlay(true)
{
- refCount = 1;
- deckLinkIn = deckLinkInput;
- deckLinkScreenPreviewHelper = CreateOpenGLScreenPreviewHelper();
+ refCount = 1;
+ deckLinkIn = deckLinkInput;
+ deckLinkScreenPreviewHelper = CreateOpenGLScreenPreviewHelper();
}
void CDeckLinkGLWidget::showOverlay(QImage img, bool transparent)
m_img = convertToGLFormat(img);
m_zx = (double)m_pictureWidth / m_img.width();
m_zy = (double)m_pictureHeight / m_img.height();
- if (m_transparentOverlay) {
- glEnable(GL_BLEND);
- glBlendFunc(GL_SRC_ALPHA,GL_ONE_MINUS_SRC_COLOR);
- }
- else {
- glDisable(GL_BLEND);
+ if(m_transparentOverlay) {
+ glEnable(GL_BLEND);
+ glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_COLOR);
+ } else {
+ glDisable(GL_BLEND);
}
}
glDisable(GL_BLEND);
}
-void CDeckLinkGLWidget::initializeGL ()
-{
- if (deckLinkScreenPreviewHelper != NULL)
- {
- mutex.lock();
- deckLinkScreenPreviewHelper->InitializeGL();
- glShadeModel(GL_FLAT);
- glDisable(GL_DEPTH_TEST);
- glDisable(GL_CULL_FACE);
- glDisable(GL_LIGHTING);
- glDisable(GL_DITHER);
- glDisable(GL_BLEND);
-
- //Documents/images/alpha2.png");//
- //m_texture = bindTexture(convertToGLFormat(img), GL_TEXTURE_RECTANGLE_EXT, GL_RGBA8, QGLContext::LinearFilteringBindOption);
- mutex.unlock();
- }
+void CDeckLinkGLWidget::initializeGL()
+{
+ if(deckLinkScreenPreviewHelper != NULL) {
+ mutex.lock();
+ deckLinkScreenPreviewHelper->InitializeGL();
+ glShadeModel(GL_FLAT);
+ glDisable(GL_DEPTH_TEST);
+ glDisable(GL_CULL_FACE);
+ glDisable(GL_LIGHTING);
+ glDisable(GL_DITHER);
+ glDisable(GL_BLEND);
+
+ //Documents/images/alpha2.png");//
+ //m_texture = bindTexture(convertToGLFormat(img), GL_TEXTURE_RECTANGLE_EXT, GL_RGBA8, QGLContext::LinearFilteringBindOption);
+ mutex.unlock();
+ }
}
/*void CDeckLinkGLWidget::initializeOverlayGL ()
{
glDisable(GL_BLEND);
glEnable(GL_TEXTURE_RECTANGLE_EXT);
-
+
}
-void CDeckLinkGLWidget::paintOverlayGL()
+void CDeckLinkGLWidget::paintOverlayGL()
{
- makeOverlayCurrent();
- glEnable(GL_BLEND);
- //glClearDepth(0.5f);
- //glPixelTransferf(GL_ALPHA_SCALE, 10);
- //glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
-
-
+ makeOverlayCurrent();
+ glEnable(GL_BLEND);
+ //glClearDepth(0.5f);
+ //glPixelTransferf(GL_ALPHA_SCALE, 10);
+ //glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
+
+
}*/
-void CDeckLinkGLWidget::paintGL ()
-{
- mutex.lock();
- glLoadIdentity();
- qglClearColor(m_backgroundColor);
- //glClearColor(1.0f, 0.0f, 0.0f, 0.0f);
- glClear(GL_COLOR_BUFFER_BIT);
- deckLinkScreenPreviewHelper->PaintGL();
- if (!m_img.isNull()) {
- glPixelZoom(m_zx, m_zy);
- glDrawPixels(m_img.width(), m_img.height(), GL_RGBA, GL_UNSIGNED_BYTE, m_img.bits());
- }
- mutex.unlock();
+void CDeckLinkGLWidget::paintGL()
+{
+ mutex.lock();
+ glLoadIdentity();
+ qglClearColor(m_backgroundColor);
+ //glClearColor(1.0f, 0.0f, 0.0f, 0.0f);
+ glClear(GL_COLOR_BUFFER_BIT);
+ deckLinkScreenPreviewHelper->PaintGL();
+ if(!m_img.isNull()) {
+ glPixelZoom(m_zx, m_zy);
+ glDrawPixels(m_img.width(), m_img.height(), GL_RGBA, GL_UNSIGNED_BYTE, m_img.bits());
+ }
+ mutex.unlock();
}
/*
void CDeckLinkGLWidget::paintEvent(QPaintEvent *event)
mutex.unlock();
}*/
-void CDeckLinkGLWidget::resizeGL (int width, int height)
-{
- mutex.lock();
- m_pictureHeight = height;
- m_pictureWidth = width;
- int calculatedWidth = g_aspect_ratio * height;
- if (calculatedWidth > width) m_pictureHeight = width / g_aspect_ratio;
- else {
- int calculatedHeight = width / g_aspect_ratio;
- if (calculatedHeight > height) m_pictureWidth = height * g_aspect_ratio;
- }
- glViewport((width - m_pictureWidth) / 2, (height - m_pictureHeight) / 2, m_pictureWidth, m_pictureHeight);
- glMatrixMode(GL_PROJECTION);
- glLoadIdentity();
- glOrtho(-1.0, 1.0, -1.0, 1.0, -1.0, 1.0);
- glMatrixMode(GL_MODELVIEW);
- glRasterPos2i(-1, -1);
- if (!m_img.isNull()) {
- m_zx = (double)m_pictureWidth / m_img.width();
- m_zy = (double)m_pictureHeight / m_img.height();
- }
-
- mutex.unlock();
+void CDeckLinkGLWidget::resizeGL(int width, int height)
+{
+ mutex.lock();
+ m_pictureHeight = height;
+ m_pictureWidth = width;
+ int calculatedWidth = g_aspect_ratio * height;
+ if(calculatedWidth > width) m_pictureHeight = width / g_aspect_ratio;
+ else {
+ int calculatedHeight = width / g_aspect_ratio;
+ if(calculatedHeight > height) m_pictureWidth = height * g_aspect_ratio;
+ }
+ glViewport((width - m_pictureWidth) / 2, (height - m_pictureHeight) / 2, m_pictureWidth, m_pictureHeight);
+ glMatrixMode(GL_PROJECTION);
+ glLoadIdentity();
+ glOrtho(-1.0, 1.0, -1.0, 1.0, -1.0, 1.0);
+ glMatrixMode(GL_MODELVIEW);
+ glRasterPos2i(-1, -1);
+ if(!m_img.isNull()) {
+ m_zx = (double)m_pictureWidth / m_img.width();
+ m_zy = (double)m_pictureHeight / m_img.height();
+ }
+
+ mutex.unlock();
}
/*void CDeckLinkGLWidget::resizeOverlayGL ( int width, int height )
{
int newwidth = width;
- int newheight = height;
- int calculatedWidth = g_aspect_ratio * height;
- if (calculatedWidth > width) newheight = width / g_aspect_ratio;
- else {
- int calculatedHeight = width / g_aspect_ratio;
- if (calculatedHeight > height) newwidth = height * g_aspect_ratio;
- }
- glViewport((width - newwidth) / 2, (height - newheight) / 2, newwidth, newheight);
- glMatrixMode(GL_PROJECTION);
- glLoadIdentity();
- glOrtho(0, width, 0, height, -1.0, 1.0);
- glMatrixMode(GL_MODELVIEW);
- updateOverlayGL ();
+ int newheight = height;
+ int calculatedWidth = g_aspect_ratio * height;
+ if (calculatedWidth > width) newheight = width / g_aspect_ratio;
+ else {
+ int calculatedHeight = width / g_aspect_ratio;
+ if (calculatedHeight > height) newwidth = height * g_aspect_ratio;
+ }
+ glViewport((width - newwidth) / 2, (height - newheight) / 2, newwidth, newheight);
+ glMatrixMode(GL_PROJECTION);
+ glLoadIdentity();
+ glOrtho(0, width, 0, height, -1.0, 1.0);
+ glMatrixMode(GL_MODELVIEW);
+ updateOverlayGL ();
}*/
-HRESULT CDeckLinkGLWidget::QueryInterface (REFIID iid, LPVOID *ppv)
+HRESULT CDeckLinkGLWidget::QueryInterface(REFIID iid, LPVOID *ppv)
{
- Q_UNUSED(iid);
- *ppv = NULL;
- return E_NOINTERFACE;
+ Q_UNUSED(iid);
+ *ppv = NULL;
+ return E_NOINTERFACE;
}
-ULONG CDeckLinkGLWidget::AddRef ()
+ULONG CDeckLinkGLWidget::AddRef()
{
- int oldValue;
+ int oldValue;
- oldValue = refCount.fetchAndAddAcquire(1);
- return (ULONG)(oldValue + 1);
+ oldValue = refCount.fetchAndAddAcquire(1);
+ return (ULONG)(oldValue + 1);
}
-ULONG CDeckLinkGLWidget::Release ()
+ULONG CDeckLinkGLWidget::Release()
{
- int oldValue;
+ int oldValue;
- oldValue = refCount.fetchAndAddAcquire(-1);
- if (oldValue == 1)
- {
- delete this;
- }
+ oldValue = refCount.fetchAndAddAcquire(-1);
+ if(oldValue == 1) {
+ delete this;
+ }
- return (ULONG)(oldValue - 1);
+ return (ULONG)(oldValue - 1);
}
-HRESULT CDeckLinkGLWidget::DrawFrame (IDeckLinkVideoFrame* theFrame)
-{
- if (deckLinkScreenPreviewHelper != NULL && theFrame != NULL)
- {
- /*mutex.lock();
- m_frame = theFrame;
- mutex.unlock();*/
- deckLinkScreenPreviewHelper->SetFrame(theFrame);
- update();
- }
- return S_OK;
+HRESULT CDeckLinkGLWidget::DrawFrame(IDeckLinkVideoFrame* theFrame)
+{
+ if(deckLinkScreenPreviewHelper != NULL && theFrame != NULL) {
+ /*mutex.lock();
+ m_frame = theFrame;
+ mutex.unlock();*/
+ deckLinkScreenPreviewHelper->SetFrame(theFrame);
+ update();
+ }
+ return S_OK;
}
DeckLinkCaptureDelegate::DeckLinkCaptureDelegate() : m_refCount(0)
{
- pthread_mutex_init(&m_mutex, NULL);
+ pthread_mutex_init(&m_mutex, NULL);
}
DeckLinkCaptureDelegate::~DeckLinkCaptureDelegate()
{
- pthread_mutex_destroy(&m_mutex);
+ pthread_mutex_destroy(&m_mutex);
}
ULONG DeckLinkCaptureDelegate::AddRef(void)
{
- pthread_mutex_lock(&m_mutex);
- m_refCount++;
- pthread_mutex_unlock(&m_mutex);
+ pthread_mutex_lock(&m_mutex);
+ m_refCount++;
+ pthread_mutex_unlock(&m_mutex);
- return (ULONG)m_refCount;
+ return (ULONG)m_refCount;
}
ULONG DeckLinkCaptureDelegate::Release(void)
{
- pthread_mutex_lock(&m_mutex);
- m_refCount--;
- pthread_mutex_unlock(&m_mutex);
+ pthread_mutex_lock(&m_mutex);
+ m_refCount--;
+ pthread_mutex_unlock(&m_mutex);
- if (m_refCount == 0)
- {
- delete this;
- return 0;
- }
+ if(m_refCount == 0) {
+ delete this;
+ return 0;
+ }
- return (ULONG)m_refCount;
+ return (ULONG)m_refCount;
}
HRESULT DeckLinkCaptureDelegate::VideoInputFrameArrived(IDeckLinkVideoInputFrame* videoFrame, IDeckLinkAudioInputPacket* audioFrame)
{
- IDeckLinkVideoFrame* rightEyeFrame = NULL;
- IDeckLinkVideoFrame3DExtensions* threeDExtensions = NULL;
- void* frameBytes;
- void* audioFrameBytes;
-
- // Handle Video Frame
- if(videoFrame)
- {
- // If 3D mode is enabled we retreive the 3D extensions interface which gives.
- // us access to the right eye frame by calling GetFrameForRightEye() .
- if ( (videoFrame->QueryInterface(IID_IDeckLinkVideoFrame3DExtensions, (void **) &threeDExtensions) != S_OK) ||
- (threeDExtensions->GetFrameForRightEye(&rightEyeFrame) != S_OK))
- {
- rightEyeFrame = NULL;
- }
-
- if (videoFrame->GetFlags() & bmdFrameHasNoInputSource)
- {
- emit gotMessage(i18n("Frame (%1) - No input signal", frameCount));
- fprintf(stderr, "Frame received (#%lu) - No input signal detected\n", frameCount);
- }
- else
- {
- const char *timecodeString = NULL;
- if (g_timecodeFormat != 0)
- {
- IDeckLinkTimecode *timecode;
- if (videoFrame->GetTimecode(g_timecodeFormat, &timecode) == S_OK)
- {
- timecode->GetString(&timecodeString);
- }
- }
- // There seems to be No timecode with HDMI... Using frame number
- emit gotTimeCode(frameCount);
- /*fprintf(stderr, "Frame received (#%lu) [%s] - %s - Size: %li bytes\n",
- frameCount,
- timecodeString != NULL ? timecodeString : "No timecode",
- rightEyeFrame != NULL ? "Valid Frame (3D left/right)" : "Valid Frame",
- videoFrame->GetRowBytes() * videoFrame->GetHeight());*/
-
- if (timecodeString)
- free((void*)timecodeString);
-
- if (!doCaptureFrame.isEmpty()) {
- videoFrame->GetBytes(&frameBytes);
- if (doCaptureFrame.endsWith("raw")) {
- // Save as raw uyvy422 imgage
- videoOutputFile = open(doCaptureFrame.toUtf8().constData(), O_WRONLY|O_CREAT/*|O_TRUNC*/, 0664);
- write(videoOutputFile, frameBytes, videoFrame->GetRowBytes() * videoFrame->GetHeight());
- close(videoOutputFile);
- }
- else {
- QImage image(videoFrame->GetWidth(), videoFrame->GetHeight(), QImage::Format_ARGB32_Premultiplied);
- //convert from uyvy422 to rgba
- yuv2rgb_int((uchar *)frameBytes, (uchar *)image.bits(), videoFrame->GetWidth(), videoFrame->GetHeight());
- image.save(doCaptureFrame);
- }
- doCaptureFrame.clear();
- }
-
- if (videoOutputFile != -1)
- {
- videoFrame->GetBytes(&frameBytes);
- write(videoOutputFile, frameBytes, videoFrame->GetRowBytes() * videoFrame->GetHeight());
-
- if (rightEyeFrame)
- {
- rightEyeFrame->GetBytes(&frameBytes);
- write(videoOutputFile, frameBytes, videoFrame->GetRowBytes() * videoFrame->GetHeight());
- }
- }
- }
- frameCount++;
-
- if (g_maxFrames > 0 && frameCount >= g_maxFrames)
- {
- pthread_cond_signal(&sleepCond);
- }
- }
-
- // Handle Audio Frame
- if (audioFrame)
- {
- if (audioOutputFile != -1)
- {
- audioFrame->GetBytes(&audioFrameBytes);
- write(audioOutputFile, audioFrameBytes, audioFrame->GetSampleFrameCount() * g_audioChannels * (g_audioSampleDepth / 8));
- }
- }
+ IDeckLinkVideoFrame* rightEyeFrame = NULL;
+ IDeckLinkVideoFrame3DExtensions* threeDExtensions = NULL;
+ void* frameBytes;
+ void* audioFrameBytes;
+
+ // Handle Video Frame
+ if(videoFrame) {
+ // If 3D mode is enabled we retreive the 3D extensions interface which gives.
+ // us access to the right eye frame by calling GetFrameForRightEye() .
+ if((videoFrame->QueryInterface(IID_IDeckLinkVideoFrame3DExtensions, (void **) &threeDExtensions) != S_OK) ||
+ (threeDExtensions->GetFrameForRightEye(&rightEyeFrame) != S_OK)) {
+ rightEyeFrame = NULL;
+ }
+
+ if(videoFrame->GetFlags() & bmdFrameHasNoInputSource) {
+ emit gotMessage(i18n("Frame (%1) - No input signal", frameCount));
+ fprintf(stderr, "Frame received (#%lu) - No input signal detected\n", frameCount);
+ } else {
+ const char *timecodeString = NULL;
+ if(g_timecodeFormat != 0) {
+ IDeckLinkTimecode *timecode;
+ if(videoFrame->GetTimecode(g_timecodeFormat, &timecode) == S_OK) {
+ timecode->GetString(&timecodeString);
+ }
+ }
+ // There seems to be No timecode with HDMI... Using frame number
+ emit gotTimeCode(frameCount);
+ /*fprintf(stderr, "Frame received (#%lu) [%s] - %s - Size: %li bytes\n",
+ frameCount,
+ timecodeString != NULL ? timecodeString : "No timecode",
+ rightEyeFrame != NULL ? "Valid Frame (3D left/right)" : "Valid Frame",
+ videoFrame->GetRowBytes() * videoFrame->GetHeight());*/
+
+ if(timecodeString)
+ free((void*)timecodeString);
+
+ if(!doCaptureFrame.isEmpty()) {
+ videoFrame->GetBytes(&frameBytes);
+ if(doCaptureFrame.endsWith("raw")) {
+ // Save as raw uyvy422 imgage
+ videoOutputFile = open(doCaptureFrame.toUtf8().constData(), O_WRONLY | O_CREAT/*|O_TRUNC*/, 0664);
+ write(videoOutputFile, frameBytes, videoFrame->GetRowBytes() * videoFrame->GetHeight());
+ close(videoOutputFile);
+ } else {
+ QImage image(videoFrame->GetWidth(), videoFrame->GetHeight(), QImage::Format_ARGB32_Premultiplied);
+ //convert from uyvy422 to rgba
+ yuv2rgb_int((uchar *)frameBytes, (uchar *)image.bits(), videoFrame->GetWidth(), videoFrame->GetHeight());
+ image.save(doCaptureFrame);
+ }
+ doCaptureFrame.clear();
+ }
+
+ if(videoOutputFile != -1) {
+ videoFrame->GetBytes(&frameBytes);
+ write(videoOutputFile, frameBytes, videoFrame->GetRowBytes() * videoFrame->GetHeight());
+
+ if(rightEyeFrame) {
+ rightEyeFrame->GetBytes(&frameBytes);
+ write(videoOutputFile, frameBytes, videoFrame->GetRowBytes() * videoFrame->GetHeight());
+ }
+ }
+ }
+ frameCount++;
+
+ if(g_maxFrames > 0 && frameCount >= g_maxFrames) {
+ pthread_cond_signal(&sleepCond);
+ }
+ }
+
+ // Handle Audio Frame
+ if(audioFrame) {
+ if(audioOutputFile != -1) {
+ audioFrame->GetBytes(&audioFrameBytes);
+ write(audioOutputFile, audioFrameBytes, audioFrame->GetSampleFrameCount() * g_audioChannels *(g_audioSampleDepth / 8));
+ }
+ }
return S_OK;
}
/*int usage(int status)
{
- HRESULT result;
- IDeckLinkDisplayMode *displayMode;
- int displayModeCount = 0;
+ HRESULT result;
+ IDeckLinkDisplayMode *displayMode;
+ int displayModeCount = 0;
- fprintf(stderr,
- "Usage: Capture -m <mode id> [OPTIONS]\n"
- "\n"
- " -m <mode id>:\n"
- );
+ fprintf(stderr,
+ "Usage: Capture -m <mode id> [OPTIONS]\n"
+ "\n"
+ " -m <mode id>:\n"
+ );
while (displayModeIterator->Next(&displayMode) == S_OK)
{
result = displayMode->GetName((const char **) &displayModeString);
if (result == S_OK)
{
- BMDTimeValue frameRateDuration, frameRateScale;
+ BMDTimeValue frameRateDuration, frameRateScale;
displayMode->GetFrameRate(&frameRateDuration, &frameRateScale);
- fprintf(stderr, " %2d: %-20s \t %li x %li \t %g FPS\n",
- displayModeCount, displayModeString, displayMode->GetWidth(), displayMode->GetHeight(), (double)frameRateScale / (double)frameRateDuration);
+ fprintf(stderr, " %2d: %-20s \t %li x %li \t %g FPS\n",
+ displayModeCount, displayModeString, displayMode->GetWidth(), displayMode->GetHeight(), (double)frameRateScale / (double)frameRateDuration);
free(displayModeString);
- displayModeCount++;
+ displayModeCount++;
}
// Release the IDeckLinkDisplayMode object to prevent a leak
displayMode->Release();
}
- fprintf(stderr,
- " -p <pixelformat>\n"
- " 0: 8 bit YUV (4:2:2) (default)\n"
- " 1: 10 bit YUV (4:2:2)\n"
- " 2: 10 bit RGB (4:4:4)\n"
- " -t <format> Print timecode\n"
- " rp188: RP 188\n"
- " vitc: VITC\n"
- " serial: Serial Timecode\n"
- " -f <filename> Filename raw video will be written to\n"
- " -a <filename> Filename raw audio will be written to\n"
- " -c <channels> Audio Channels (2, 8 or 16 - default is 2)\n"
- " -s <depth> Audio Sample Depth (16 or 32 - default is 16)\n"
- " -n <frames> Number of frames to capture (default is unlimited)\n"
- " -3 Capture Stereoscopic 3D (Requires 3D Hardware support)\n"
- "\n"
- "Capture video and/or audio to a file. Raw video and/or audio can be viewed with mplayer eg:\n"
- "\n"
- " Capture -m2 -n 50 -f video.raw -a audio.raw\n"
- " mplayer video.raw -demuxer rawvideo -rawvideo pal:uyvy -audiofile audio.raw -audio-demuxer 20 -rawaudio rate=48000\n"
- );
-
- exit(status);
+ fprintf(stderr,
+ " -p <pixelformat>\n"
+ " 0: 8 bit YUV (4:2:2) (default)\n"
+ " 1: 10 bit YUV (4:2:2)\n"
+ " 2: 10 bit RGB (4:4:4)\n"
+ " -t <format> Print timecode\n"
+ " rp188: RP 188\n"
+ " vitc: VITC\n"
+ " serial: Serial Timecode\n"
+ " -f <filename> Filename raw video will be written to\n"
+ " -a <filename> Filename raw audio will be written to\n"
+ " -c <channels> Audio Channels (2, 8 or 16 - default is 2)\n"
+ " -s <depth> Audio Sample Depth (16 or 32 - default is 16)\n"
+ " -n <frames> Number of frames to capture (default is unlimited)\n"
+ " -3 Capture Stereoscopic 3D (Requires 3D Hardware support)\n"
+ "\n"
+ "Capture video and/or audio to a file. Raw video and/or audio can be viewed with mplayer eg:\n"
+ "\n"
+ " Capture -m2 -n 50 -f video.raw -a audio.raw\n"
+ " mplayer video.raw -demuxer rawvideo -rawvideo pal:uyvy -audiofile audio.raw -audio-demuxer 20 -rawaudio rate=48000\n"
+ );
+
+ exit(status);
}
*/
-CaptureHandler::CaptureHandler(QVBoxLayout *lay, QWidget *parent):
+BmdCaptureHandler::BmdCaptureHandler(QVBoxLayout *lay, QWidget *parent):
+ CaptureHandler(lay, parent),
previewView(NULL),
deckLinkIterator(NULL),
delegate(NULL),
displayMode(NULL),
deckLink(NULL),
deckLinkInput(NULL),
- displayModeIterator(NULL),
- m_layout(lay),
- m_parent(parent)
+ displayModeIterator(NULL)
{
}
-void CaptureHandler::startPreview(int deviceId, int captureMode)
-{
- deckLinkIterator = CreateDeckLinkIteratorInstance();
- BMDVideoInputFlags inputFlags = 0;
- BMDDisplayMode selectedDisplayMode = bmdModeNTSC;
- BMDPixelFormat pixelFormat = bmdFormat8BitYUV;
- int displayModeCount = 0;
- int exitStatus = 1;
- //int ch;
- bool foundDisplayMode = false;
- HRESULT result = 1;
-
- /*pthread_mutex_init(&sleepMutex, NULL);
- pthread_cond_init(&sleepCond, NULL);*/
- kDebug()<<"/// INIT CAPTURE ON DEV: "<<deviceId;
-
- if (!deckLinkIterator)
- {
- emit gotMessage(i18n("This application requires the DeckLink drivers installed."));
- fprintf(stderr, "This application requires the DeckLink drivers installed.\n");
- stopCapture();
- return;
- }
-
- /* Connect to selected DeckLink instance */
- for (int i = 0; i < deviceId + 1; i++)
- result = deckLinkIterator->Next(&deckLink);
- if (result != S_OK)
- {
- fprintf(stderr, "No DeckLink PCI cards found.\n");
- emit gotMessage(i18n("No DeckLink PCI cards found."));
- stopCapture();
- return;
- }
-
- if (deckLink->QueryInterface(IID_IDeckLinkInput, (void**)&deckLinkInput) != S_OK)
- {
- stopCapture();
- return;
- }
-
- delegate = new DeckLinkCaptureDelegate();
- connect(delegate, SIGNAL(gotTimeCode(ulong)), this, SIGNAL(gotTimeCode(ulong)));
- connect(delegate, SIGNAL(gotMessage(const QString &)), this, SIGNAL(gotMessage(const QString &)));
- deckLinkInput->SetCallback(delegate);
-
- previewView = new CDeckLinkGLWidget(deckLinkInput, m_parent);
- m_layout->addWidget(previewView);
- //previewView->resize(parent->size());
- previewView->setSizePolicy(QSizePolicy::Expanding, QSizePolicy::Expanding);
- previewView->DrawFrame(NULL);
-
- // Obtain an IDeckLinkDisplayModeIterator to enumerate the display modes supported on output
- result = deckLinkInput->GetDisplayModeIterator(&displayModeIterator);
- if (result != S_OK)
- {
- emit gotMessage(i18n("Could not obtain the video output display mode iterator - result = ", result));
- fprintf(stderr, "Could not obtain the video output display mode iterator - result = %08x\n", result);
- stopCapture();
- return;
- }
-
- g_videoModeIndex = captureMode;
- /*g_audioChannels = 2;
- g_audioSampleDepth = 16;*/
-
- // Parse command line options
- /*while ((ch = getopt(argc, argv, "?h3c:s:f:a:m:n:p:t:")) != -1)
- {
- switch (ch)
- {
- case 'm':
- g_videoModeIndex = atoi(optarg);
- break;
- case 'c':
- g_audioChannels = atoi(optarg);
- if (g_audioChannels != 2 &&
- g_audioChannels != 8 &&
- g_audioChannels != 16)
- {
- fprintf(stderr, "Invalid argument: Audio Channels must be either 2, 8 or 16\n");
+void BmdCaptureHandler::startPreview(int deviceId, int captureMode)
+{
+ deckLinkIterator = CreateDeckLinkIteratorInstance();
+ BMDVideoInputFlags inputFlags = 0;
+ BMDDisplayMode selectedDisplayMode = bmdModeNTSC;
+ BMDPixelFormat pixelFormat = bmdFormat8BitYUV;
+ int displayModeCount = 0;
+ int exitStatus = 1;
+ //int ch;
+ bool foundDisplayMode = false;
+ HRESULT result = 1;
+
+ /*pthread_mutex_init(&sleepMutex, NULL);
+ pthread_cond_init(&sleepCond, NULL);*/
+ kDebug() << "/// INIT CAPTURE ON DEV: " << deviceId;
+
+ if(!deckLinkIterator) {
+ emit gotMessage(i18n("This application requires the DeckLink drivers installed."));
+ fprintf(stderr, "This application requires the DeckLink drivers installed.\n");
+ stopCapture();
+ return;
+ }
+
+ /* Connect to selected DeckLink instance */
+ for(int i = 0; i < deviceId + 1; i++)
+ result = deckLinkIterator->Next(&deckLink);
+ if(result != S_OK) {
+ fprintf(stderr, "No DeckLink PCI cards found.\n");
+ emit gotMessage(i18n("No DeckLink PCI cards found."));
+ stopCapture();
+ return;
+ }
+
+ if(deckLink->QueryInterface(IID_IDeckLinkInput, (void**)&deckLinkInput) != S_OK) {
+ stopCapture();
+ return;
+ }
+
+ delegate = new DeckLinkCaptureDelegate();
+ connect(delegate, SIGNAL(gotTimeCode(ulong)), this, SIGNAL(gotTimeCode(ulong)));
+ connect(delegate, SIGNAL(gotMessage(const QString &)), this, SIGNAL(gotMessage(const QString &)));
+ deckLinkInput->SetCallback(delegate);
+
+ previewView = new CDeckLinkGLWidget(deckLinkInput, m_parent);
+ m_layout->addWidget(previewView);
+ //previewView->resize(parent->size());
+ previewView->setSizePolicy(QSizePolicy::Expanding, QSizePolicy::Expanding);
+ previewView->DrawFrame(NULL);
+
+ // Obtain an IDeckLinkDisplayModeIterator to enumerate the display modes supported on output
+ result = deckLinkInput->GetDisplayModeIterator(&displayModeIterator);
+ if(result != S_OK) {
+ emit gotMessage(i18n("Could not obtain the video output display mode iterator - result = ", result));
+ fprintf(stderr, "Could not obtain the video output display mode iterator - result = %08x\n", result);
+ stopCapture();
+ return;
+ }
+
+ g_videoModeIndex = captureMode;
+ /*g_audioChannels = 2;
+ g_audioSampleDepth = 16;*/
+
+ // Parse command line options
+ /*while ((ch = getopt(argc, argv, "?h3c:s:f:a:m:n:p:t:")) != -1)
+ {
+ switch (ch)
+ {
+ case 'm':
+ g_videoModeIndex = atoi(optarg);
+ break;
+ case 'c':
+ g_audioChannels = atoi(optarg);
+ if (g_audioChannels != 2 &&
+ g_audioChannels != 8 &&
+ g_audioChannels != 16)
+ {
+ fprintf(stderr, "Invalid argument: Audio Channels must be either 2, 8 or 16\n");
stopCapture();
- }
- break;
- case 's':
- g_audioSampleDepth = atoi(optarg);
- if (g_audioSampleDepth != 16 && g_audioSampleDepth != 32)
- {
- fprintf(stderr, "Invalid argument: Audio Sample Depth must be either 16 bits or 32 bits\n");
+ }
+ break;
+ case 's':
+ g_audioSampleDepth = atoi(optarg);
+ if (g_audioSampleDepth != 16 && g_audioSampleDepth != 32)
+ {
+ fprintf(stderr, "Invalid argument: Audio Sample Depth must be either 16 bits or 32 bits\n");
stopCapture();
- }
- break;
- case 'f':
- g_videoOutputFile = optarg;
- break;
- case 'a':
- g_audioOutputFile = optarg;
- break;
- case 'n':
- g_maxFrames = atoi(optarg);
- break;
- case '3':
- inputFlags |= bmdVideoInputDualStream3D;
- break;
- case 'p':
- switch(atoi(optarg))
- {
- case 0: pixelFormat = bmdFormat8BitYUV; break;
- case 1: pixelFormat = bmdFormat10BitYUV; break;
- case 2: pixelFormat = bmdFormat10BitRGB; break;
- default:
- fprintf(stderr, "Invalid argument: Pixel format %d is not valid", atoi(optarg));
+ }
+ break;
+ case 'f':
+ g_videoOutputFile = optarg;
+ break;
+ case 'a':
+ g_audioOutputFile = optarg;
+ break;
+ case 'n':
+ g_maxFrames = atoi(optarg);
+ break;
+ case '3':
+ inputFlags |= bmdVideoInputDualStream3D;
+ break;
+ case 'p':
+ switch(atoi(optarg))
+ {
+ case 0: pixelFormat = bmdFormat8BitYUV; break;
+ case 1: pixelFormat = bmdFormat10BitYUV; break;
+ case 2: pixelFormat = bmdFormat10BitRGB; break;
+ default:
+ fprintf(stderr, "Invalid argument: Pixel format %d is not valid", atoi(optarg));
stopCapture();
- }
- break;
- case 't':
- if (!strcmp(optarg, "rp188"))
- g_timecodeFormat = bmdTimecodeRP188;
- else if (!strcmp(optarg, "vitc"))
- g_timecodeFormat = bmdTimecodeVITC;
- else if (!strcmp(optarg, "serial"))
- g_timecodeFormat = bmdTimecodeSerial;
- else
- {
- fprintf(stderr, "Invalid argument: Timecode format \"%s\" is invalid\n", optarg);
+ }
+ break;
+ case 't':
+ if (!strcmp(optarg, "rp188"))
+ g_timecodeFormat = bmdTimecodeRP188;
+ else if (!strcmp(optarg, "vitc"))
+ g_timecodeFormat = bmdTimecodeVITC;
+ else if (!strcmp(optarg, "serial"))
+ g_timecodeFormat = bmdTimecodeSerial;
+ else
+ {
+ fprintf(stderr, "Invalid argument: Timecode format \"%s\" is invalid\n", optarg);
stopCapture();
- }
- break;
- case '?':
- case 'h':
- usage(0);
- }
- }*/
-
- if (g_videoModeIndex < 0)
- {
- emit gotMessage(i18n("No video mode specified"));
- fprintf(stderr, "No video mode specified\n");
- stopCapture();
- return;
- }
-
- /*if (g_videoOutputFile != NULL)
- {
- videoOutputFile = open(g_videoOutputFile, O_WRONLY|O_CREAT|O_TRUNC, 0664);
- if (videoOutputFile < 0)
- {
- emit gotMessage(i18n("Could not open video output file %1", g_videoOutputFile));
- fprintf(stderr, "Could not open video output file \"%s\"\n", g_videoOutputFile);
- stopCapture();
- }
- }
- if (g_audioOutputFile != NULL)
- {
- audioOutputFile = open(g_audioOutputFile, O_WRONLY|O_CREAT|O_TRUNC, 0664);
- if (audioOutputFile < 0)
- {
- emit gotMessage(i18n("Could not open audio output file %1", g_audioOutputFile));
- fprintf(stderr, "Could not open audio output file \"%s\"\n", g_audioOutputFile);
- stopCapture();
- }
- }*/
-
- while (displayModeIterator->Next(&displayMode) == S_OK)
- {
- if (g_videoModeIndex == displayModeCount)
- {
- BMDDisplayModeSupport result;
- const char *displayModeName;
-
- foundDisplayMode = true;
- displayMode->GetName(&displayModeName);
- selectedDisplayMode = displayMode->GetDisplayMode();
-
- g_aspect_ratio = (double) displayMode->GetWidth() / (double) displayMode->GetHeight();
-
- deckLinkInput->DoesSupportVideoMode(selectedDisplayMode, pixelFormat, bmdVideoInputFlagDefault, &result, NULL);
-
- if (result == bmdDisplayModeNotSupported)
- {
- emit gotMessage(i18n("The display mode %1 is not supported with the selected pixel format", displayModeName));
- fprintf(stderr, "The display mode %s is not supported with the selected pixel format\n", displayModeName);
- stopCapture();
- return;
- }
-
- if (inputFlags & bmdVideoInputDualStream3D)
- {
- if (!(displayMode->GetFlags() & bmdDisplayModeSupports3D))
- {
- emit gotMessage(i18n("The display mode %1 is not supported with 3D", displayModeName));
- fprintf(stderr, "The display mode %s is not supported with 3D\n", displayModeName);
- stopCapture();
- return;
- }
- }
-
- break;
- }
- displayModeCount++;
- displayMode->Release();
- }
-
- if (!foundDisplayMode)
- {
- emit gotMessage(i18n("Invalid mode %1 specified", g_videoModeIndex));
- fprintf(stderr, "Invalid mode %d specified\n", g_videoModeIndex);
- stopCapture();
- return;
- }
+ }
+ break;
+ case '?':
+ case 'h':
+ usage(0);
+ }
+ }*/
- result = deckLinkInput->EnableVideoInput(selectedDisplayMode, pixelFormat, inputFlags);
- if(result != S_OK)
+ if(g_videoModeIndex < 0) {
+ emit gotMessage(i18n("No video mode specified"));
+ fprintf(stderr, "No video mode specified\n");
+ stopCapture();
+ return;
+ }
+
+ /*if (g_videoOutputFile != NULL)
{
- emit gotMessage(i18n("Failed to enable video input. Is another application using the card?"));
- fprintf(stderr, "Failed to enable video input. Is another application using the card?\n");
- stopCapture();
- return;
+ videoOutputFile = open(g_videoOutputFile, O_WRONLY|O_CREAT|O_TRUNC, 0664);
+ if (videoOutputFile < 0)
+ {
+ emit gotMessage(i18n("Could not open video output file %1", g_videoOutputFile));
+ fprintf(stderr, "Could not open video output file \"%s\"\n", g_videoOutputFile);
+ stopCapture();
+ }
+ }
+ if (g_audioOutputFile != NULL)
+ {
+ audioOutputFile = open(g_audioOutputFile, O_WRONLY|O_CREAT|O_TRUNC, 0664);
+ if (audioOutputFile < 0)
+ {
+ emit gotMessage(i18n("Could not open audio output file %1", g_audioOutputFile));
+ fprintf(stderr, "Could not open audio output file \"%s\"\n", g_audioOutputFile);
+ stopCapture();
+ }
+ }*/
+
+ while(displayModeIterator->Next(&displayMode) == S_OK) {
+ if(g_videoModeIndex == displayModeCount) {
+ BMDDisplayModeSupport result;
+ const char *displayModeName;
+
+ foundDisplayMode = true;
+ displayMode->GetName(&displayModeName);
+ selectedDisplayMode = displayMode->GetDisplayMode();
+
+ g_aspect_ratio = (double) displayMode->GetWidth() / (double) displayMode->GetHeight();
+
+ deckLinkInput->DoesSupportVideoMode(selectedDisplayMode, pixelFormat, bmdVideoInputFlagDefault, &result, NULL);
+
+ if(result == bmdDisplayModeNotSupported) {
+ emit gotMessage(i18n("The display mode %1 is not supported with the selected pixel format", displayModeName));
+ fprintf(stderr, "The display mode %s is not supported with the selected pixel format\n", displayModeName);
+ stopCapture();
+ return;
+ }
+
+ if(inputFlags & bmdVideoInputDualStream3D) {
+ if(!(displayMode->GetFlags() & bmdDisplayModeSupports3D)) {
+ emit gotMessage(i18n("The display mode %1 is not supported with 3D", displayModeName));
+ fprintf(stderr, "The display mode %s is not supported with 3D\n", displayModeName);
+ stopCapture();
+ return;
+ }
+ }
+
+ break;
+ }
+ displayModeCount++;
+ displayMode->Release();
+ }
+
+ if(!foundDisplayMode) {
+ emit gotMessage(i18n("Invalid mode %1 specified", g_videoModeIndex));
+ fprintf(stderr, "Invalid mode %d specified\n", g_videoModeIndex);
+ stopCapture();
+ return;
+ }
+
+ result = deckLinkInput->EnableVideoInput(selectedDisplayMode, pixelFormat, inputFlags);
+ if(result != S_OK) {
+ emit gotMessage(i18n("Failed to enable video input. Is another application using the card?"));
+ fprintf(stderr, "Failed to enable video input. Is another application using the card?\n");
+ stopCapture();
+ return;
}
result = deckLinkInput->EnableAudioInput(bmdAudioSampleRate48kHz, g_audioSampleDepth, g_audioChannels);
- if(result != S_OK)
- {
+ if(result != S_OK) {
stopCapture();
- return;
+ return;
}
deckLinkInput->SetScreenPreviewCallback(previewView);
result = deckLinkInput->StartStreams();
- if(result != S_OK)
- {
- qDebug()<<"/// CAPTURE FAILED....";
- emit gotMessage(i18n("Capture failed"));
+ if(result != S_OK) {
+ qDebug() << "/// CAPTURE FAILED....";
+ emit gotMessage(i18n("Capture failed"));
}
- // All Okay.
- exitStatus = 0;
+ // All Okay.
+ exitStatus = 0;
- // Block main thread until signal occurs
-/* pthread_mutex_lock(&sleepMutex);
- pthread_cond_wait(&sleepCond, &sleepMutex);
- pthread_mutex_unlock(&sleepMutex);*/
+ // Block main thread until signal occurs
+ /* pthread_mutex_lock(&sleepMutex);
+ pthread_cond_wait(&sleepCond, &sleepMutex);
+ pthread_mutex_unlock(&sleepMutex);*/
-/*bail:
+ /*bail:
- if (videoOutputFile)
- close(videoOutputFile);
- if (audioOutputFile)
- close(audioOutputFile);
+ if (videoOutputFile)
+ close(videoOutputFile);
+ if (audioOutputFile)
+ close(audioOutputFile);
- if (displayModeIterator != NULL)
- {
- displayModeIterator->Release();
- displayModeIterator = NULL;
- }
+ if (displayModeIterator != NULL)
+ {
+ displayModeIterator->Release();
+ displayModeIterator = NULL;
+ }
- if (deckLinkInput != NULL)
- {
- deckLinkInput->Release();
- deckLinkInput = NULL;
- }
+ if (deckLinkInput != NULL)
+ {
+ deckLinkInput->Release();
+ deckLinkInput = NULL;
+ }
- if (deckLink != NULL)
- {
- deckLink->Release();
- deckLink = NULL;
- }
+ if (deckLink != NULL)
+ {
+ deckLink->Release();
+ deckLink = NULL;
+ }
- if (deckLinkIterator != NULL)
- deckLinkIterator->Release();
-*/
+ if (deckLinkIterator != NULL)
+ deckLinkIterator->Release();
+ */
}
-CaptureHandler::~CaptureHandler()
+BmdCaptureHandler::~BmdCaptureHandler()
{
stopCapture();
}
-void CaptureHandler::startCapture(const QString &path)
+void BmdCaptureHandler::startCapture(const QString &path)
{
int i = 0;
QString videopath = path + "_video_" + QString::number(i).rightJustified(4, '0', false) + ".raw";
QString audiopath = path + "_audio_" + QString::number(i).rightJustified(4, '0', false) + ".raw";
- while (QFile::exists(videopath) || QFile::exists(audiopath)) {
- i++;
- videopath = path + "_video_" + QString::number(i).rightJustified(4, '0', false) + ".raw";
- audiopath = path + "_audio_" + QString::number(i).rightJustified(4, '0', false) + ".raw";
+ while(QFile::exists(videopath) || QFile::exists(audiopath)) {
+ i++;
+ videopath = path + "_video_" + QString::number(i).rightJustified(4, '0', false) + ".raw";
+ audiopath = path + "_audio_" + QString::number(i).rightJustified(4, '0', false) + ".raw";
}
- videoOutputFile = open(videopath.toUtf8().constData(), O_WRONLY|O_CREAT|O_TRUNC, 0664);
- if (videoOutputFile < 0)
- {
- emit gotMessage(i18n("Could not open video output file %1", videopath));
- fprintf(stderr, "Could not open video output file \"%s\"\n", videopath.toUtf8().constData());
- return;
- }
- if (KdenliveSettings::hdmicaptureaudio()) {
- audioOutputFile = open(audiopath.toUtf8().constData(), O_WRONLY|O_CREAT|O_TRUNC, 0664);
- if (audioOutputFile < 0)
- {
- emit gotMessage(i18n("Could not open audio output file %1", audiopath));
- fprintf(stderr, "Could not open video output file \"%s\"\n", audiopath.toUtf8().constData());
- return;
- }
+ videoOutputFile = open(videopath.toUtf8().constData(), O_WRONLY | O_CREAT | O_TRUNC, 0664);
+ if(videoOutputFile < 0) {
+ emit gotMessage(i18n("Could not open video output file %1", videopath));
+ fprintf(stderr, "Could not open video output file \"%s\"\n", videopath.toUtf8().constData());
+ return;
+ }
+ if(KdenliveSettings::hdmicaptureaudio()) {
+ audioOutputFile = open(audiopath.toUtf8().constData(), O_WRONLY | O_CREAT | O_TRUNC, 0664);
+ if(audioOutputFile < 0) {
+ emit gotMessage(i18n("Could not open audio output file %1", audiopath));
+ fprintf(stderr, "Could not open video output file \"%s\"\n", audiopath.toUtf8().constData());
+ return;
+ }
}
}
-void CaptureHandler::stopCapture()
+void BmdCaptureHandler::stopCapture()
{
- if (videoOutputFile)
- close(videoOutputFile);
- if (audioOutputFile)
- close(audioOutputFile);
- videoOutputFile = -1;
- audioOutputFile = -1;
+ if(videoOutputFile)
+ close(videoOutputFile);
+ if(audioOutputFile)
+ close(audioOutputFile);
+ videoOutputFile = -1;
+ audioOutputFile = -1;
}
-void CaptureHandler::captureFrame(const QString &fname)
+void BmdCaptureHandler::captureFrame(const QString &fname)
{
doCaptureFrame = fname;
}
-void CaptureHandler::showOverlay(QImage img, bool transparent)
+void BmdCaptureHandler::showOverlay(QImage img, bool transparent)
{
- if (previewView) previewView->showOverlay(img, transparent);
+ if(previewView) previewView->showOverlay(img, transparent);
}
-void CaptureHandler::hideOverlay()
+void BmdCaptureHandler::hideOverlay()
{
- if (previewView) previewView->hideOverlay();
+ if(previewView) previewView->hideOverlay();
}
-void CaptureHandler::hidePreview(bool hide)
+void BmdCaptureHandler::hidePreview(bool hide)
{
- if (previewView) previewView->setHidden(hide);
+ if(previewView) previewView->setHidden(hide);
}
-void CaptureHandler::stopPreview()
-{
- if (!previewView) return;
- if (deckLinkInput != NULL) deckLinkInput->StopStreams();
- if (videoOutputFile)
- close(videoOutputFile);
- if (audioOutputFile)
- close(audioOutputFile);
-
- if (displayModeIterator != NULL)
- {
- displayModeIterator->Release();
- displayModeIterator = NULL;
- }
-
- if (deckLinkInput != NULL)
- {
+void BmdCaptureHandler::stopPreview()
+{
+ if(!previewView) return;
+ if(deckLinkInput != NULL) deckLinkInput->StopStreams();
+ if(videoOutputFile)
+ close(videoOutputFile);
+ if(audioOutputFile)
+ close(audioOutputFile);
+
+ if(displayModeIterator != NULL) {
+ displayModeIterator->Release();
+ displayModeIterator = NULL;
+ }
+
+ if(deckLinkInput != NULL) {
deckLinkInput->Release();
deckLinkInput = NULL;
}
- if (deckLink != NULL)
- {
+ if(deckLink != NULL) {
deckLink->Release();
deckLink = NULL;
}
- if (deckLinkIterator != NULL) {
- deckLinkIterator->Release();
- deckLinkIterator = NULL;
+ if(deckLinkIterator != NULL) {
+ deckLinkIterator->Release();
+ deckLinkIterator = NULL;
}
- if (previewView != NULL) {
- delete previewView;
- previewView = NULL;
+ if(previewView != NULL) {
+ delete previewView;
+ previewView = NULL;
}
/*if (delegate != NULL)
- delete delegate;*/
-
+ delete delegate;*/
+
}
-#ifndef __CAPTURE_H__
-#define __CAPTURE_H__
+#ifndef __BMDCAPTURE_H__
+#define __BMDCAPTURE_H__
#include "include/DeckLinkAPI.h"
+#include "../stopmotion/capturehandler.h"
#include <QWidget>
#include <QObject>
class DeckLinkCaptureDelegate : public QObject, public IDeckLinkInputCallback
{
-Q_OBJECT
+ Q_OBJECT
public:
- DeckLinkCaptureDelegate();
- virtual ~DeckLinkCaptureDelegate();
+ DeckLinkCaptureDelegate();
+ virtual ~DeckLinkCaptureDelegate();
- virtual HRESULT STDMETHODCALLTYPE QueryInterface(REFIID /*iid*/, LPVOID */*ppv*/) { return E_NOINTERFACE; }
- virtual ULONG STDMETHODCALLTYPE AddRef(void);
- virtual ULONG STDMETHODCALLTYPE Release(void);
- virtual HRESULT STDMETHODCALLTYPE VideoInputFormatChanged(BMDVideoInputFormatChangedEvents, IDeckLinkDisplayMode*, BMDDetectedVideoInputFormatFlags);
- virtual HRESULT STDMETHODCALLTYPE VideoInputFrameArrived(IDeckLinkVideoInputFrame*, IDeckLinkAudioInputPacket*);
+ virtual HRESULT STDMETHODCALLTYPE QueryInterface(REFIID /*iid*/, LPVOID */*ppv*/) {
+ return E_NOINTERFACE;
+ }
+ virtual ULONG STDMETHODCALLTYPE AddRef(void);
+ virtual ULONG STDMETHODCALLTYPE Release(void);
+ virtual HRESULT STDMETHODCALLTYPE VideoInputFormatChanged(BMDVideoInputFormatChangedEvents, IDeckLinkDisplayMode*, BMDDetectedVideoInputFormatFlags);
+ virtual HRESULT STDMETHODCALLTYPE VideoInputFrameArrived(IDeckLinkVideoInputFrame*, IDeckLinkAudioInputPacket*);
private:
- ULONG m_refCount;
- pthread_mutex_t m_mutex;
+ ULONG m_refCount;
+ pthread_mutex_t m_mutex;
signals:
- void gotTimeCode(ulong);
- void gotMessage(const QString &);
+ void gotTimeCode(ulong);
+ void gotMessage(const QString &);
};
-class CaptureHandler : public QObject
+class BmdCaptureHandler : public CaptureHandler
{
- Q_OBJECT
+ Q_OBJECT
public:
- CaptureHandler(QVBoxLayout *lay, QWidget *parent = 0);
- ~CaptureHandler();
- CDeckLinkGLWidget *previewView;
- void startPreview(int deviceId, int captureMode);
- void stopPreview();
- void startCapture(const QString &path);
- void stopCapture();
- void captureFrame(const QString &fname);
- void showOverlay(QImage img, bool transparent = true);
- void hideOverlay();
- void hidePreview(bool hide);
-
-private:
- IDeckLinkIterator *deckLinkIterator;
- DeckLinkCaptureDelegate *delegate;
- IDeckLinkDisplayMode *displayMode;
- IDeckLink *deckLink;
- IDeckLinkInput *deckLinkInput;
- IDeckLinkDisplayModeIterator *displayModeIterator;
- QVBoxLayout *m_layout;
- QWidget *m_parent;
+ BmdCaptureHandler(QVBoxLayout *lay, QWidget *parent = 0);
+ ~BmdCaptureHandler();
+ CDeckLinkGLWidget *previewView;
+ void startPreview(int deviceId, int captureMode);
+ void stopPreview();
+ void startCapture(const QString &path);
+ void stopCapture();
+ void captureFrame(const QString &fname);
+ void showOverlay(QImage img, bool transparent = true);
+ void hideOverlay();
+ void hidePreview(bool hide);
-signals:
- void gotTimeCode(ulong);
- void gotMessage(const QString &);
+private:
+ IDeckLinkIterator *deckLinkIterator;
+ DeckLinkCaptureDelegate *delegate;
+ IDeckLinkDisplayMode *displayMode;
+ IDeckLink *deckLink;
+ IDeckLinkInput *deckLinkInput;
+ IDeckLinkDisplayModeIterator *displayModeIterator;
};
}
//static
-void BMInterface::getBlackMagicDeviceList(KComboBox *devicelist, KComboBox *modelist)
+bool BMInterface::getBlackMagicDeviceList(KComboBox *devicelist, KComboBox *modelist)
{
- IDeckLinkIterator* deckLinkIterator;
- IDeckLink* deckLink;
- int numDevices = 0;
- HRESULT result;
-
- // Create an IDeckLinkIterator object to enumerate all DeckLink cards in the system
- deckLinkIterator = CreateDeckLinkIteratorInstance();
- if (deckLinkIterator == NULL)
- {
- kDebug()<< "A DeckLink iterator could not be created. The DeckLink drivers may not be installed.";
- return;
- }
-
- // Enumerate all cards in this system
- while (deckLinkIterator->Next(&deckLink) == S_OK)
- {
- char * deviceNameString = NULL;
-
- // Increment the total number of DeckLink cards found
- numDevices++;
- //if (numDevices > 1)
- kDebug()<<"// FOUND a BM device\n\n+++++++++++++++++++++++++++++++++++++";
-
- // *** Print the model name of the DeckLink card
- result = deckLink->GetModelName((const char **) &deviceNameString);
- if (result == S_OK)
- {
- QString deviceName(deviceNameString);
- free(deviceNameString);
-
- IDeckLinkInput* deckLinkInput = NULL;
- IDeckLinkDisplayModeIterator* displayModeIterator = NULL;
- IDeckLinkDisplayMode* displayMode = NULL;
- HRESULT result;
-
- // Query the DeckLink for its configuration interface
- result = deckLink->QueryInterface(IID_IDeckLinkInput, (void**)&deckLinkInput);
- if (result != S_OK)
- {
- kDebug()<< "Could not obtain the IDeckLinkInput interface - result = "<<result;
- return;
- }
-
- // Obtain an IDeckLinkDisplayModeIterator to enumerate the display modes supported on output
- result = deckLinkInput->GetDisplayModeIterator(&displayModeIterator);
- if (result != S_OK)
- {
- kDebug()<< "Could not obtain the video input display mode iterator - result = "<<result;
- return;
- }
- QStringList availableModes;
- // List all supported output display modes
- while (displayModeIterator->Next(&displayMode) == S_OK)
- {
- char * displayModeString = NULL;
-
- result = displayMode->GetName((const char **) &displayModeString);
- if (result == S_OK)
- {
- //char modeName[64];
- int modeWidth;
- int modeHeight;
- BMDTimeValue frameRateDuration;
- BMDTimeScale frameRateScale;
- //int pixelFormatIndex = 0; // index into the gKnownPixelFormats / gKnownFormatNames arrays
- //BMDDisplayModeSupport displayModeSupport;
-
-
- // Obtain the display mode's properties
- modeWidth = displayMode->GetWidth();
- modeHeight = displayMode->GetHeight();
- displayMode->GetFrameRate(&frameRateDuration, &frameRateScale);
- QString description = QString(displayModeString) + " (" + QString::number(modeWidth) + "x" + QString::number(modeHeight) + " - " + QString::number((double)frameRateScale / (double)frameRateDuration) + i18n("fps") + ")";
- availableModes << description;
- //modelist->addItem(description);
- //printf(" %-20s \t %d x %d \t %7g FPS\t", displayModeString, modeWidth, modeHeight, (double)frameRateScale / (double)frameRateDuration);
-
- // Print the supported pixel formats for this display mode
- /*while ((gKnownPixelFormats[pixelFormatIndex] != 0) && (gKnownPixelFormatNames[pixelFormatIndex] != NULL))
- {
- if ((deckLinkOutput->DoesSupportVideoMode(displayMode->GetDisplayMode(), gKnownPixelFormats[pixelFormatIndex], bmdVideoOutputFlagDefault, &displayModeSupport, NULL) == S_OK)
- && (displayModeSupport != bmdDisplayModeNotSupported))
- {
- printf("%s\t", gKnownPixelFormatNames[pixelFormatIndex]);
- }
- pixelFormatIndex++;
- }*/
- free(displayModeString);
- }
-
- // Release the IDeckLinkDisplayMode object to prevent a leak
- displayMode->Release();
- }
- devicelist->addItem(deviceName, availableModes);
- }
-
-
- //print_attributes(deckLink);
-
- // ** List the video output display modes supported by the card
- //print_output_modes(deckLink);
-
- // ** List the input and output capabilities of the card
- //print_capabilities(deckLink);
-
- // Release the IDeckLink instance when we've finished with it to prevent leaks
- deckLink->Release();
- }
-
- deckLinkIterator->Release();
- if (modelist != NULL && devicelist->count() > 0) {
- QStringList modes = devicelist->itemData(devicelist->currentIndex()).toStringList();
- modelist->insertItems(0, modes);
- }
+ IDeckLinkIterator* deckLinkIterator;
+ IDeckLink* deckLink;
+ int numDevices = 0;
+ HRESULT result;
+ bool found = false;
+
+ // Create an IDeckLinkIterator object to enumerate all DeckLink cards in the system
+ deckLinkIterator = CreateDeckLinkIteratorInstance();
+ if(deckLinkIterator == NULL) {
+ kDebug() << "A DeckLink iterator could not be created. The DeckLink drivers may not be installed.";
+ return found;
+ }
+
+ // Enumerate all cards in this system
+ while(deckLinkIterator->Next(&deckLink) == S_OK) {
+ char * deviceNameString = NULL;
+
+ // Increment the total number of DeckLink cards found
+ numDevices++;
+ //if (numDevices > 1)
+ kDebug() << "// FOUND a BM device\n\n+++++++++++++++++++++++++++++++++++++";
+
+ // *** Print the model name of the DeckLink card
+ result = deckLink->GetModelName((const char **) &deviceNameString);
+ if(result == S_OK) {
+ QString deviceName(deviceNameString);
+ free(deviceNameString);
+
+ IDeckLinkInput* deckLinkInput = NULL;
+ IDeckLinkDisplayModeIterator* displayModeIterator = NULL;
+ IDeckLinkDisplayMode* displayMode = NULL;
+ HRESULT result;
+
+ // Query the DeckLink for its configuration interface
+ result = deckLink->QueryInterface(IID_IDeckLinkInput, (void**)&deckLinkInput);
+ if(result != S_OK) {
+ kDebug() << "Could not obtain the IDeckLinkInput interface - result = " << result;
+ return found;
+ }
+
+ // Obtain an IDeckLinkDisplayModeIterator to enumerate the display modes supported on output
+ result = deckLinkInput->GetDisplayModeIterator(&displayModeIterator);
+ if(result != S_OK) {
+ kDebug() << "Could not obtain the video input display mode iterator - result = " << result;
+ return found;
+ }
+ QStringList availableModes;
+ // List all supported output display modes
+ while(displayModeIterator->Next(&displayMode) == S_OK) {
+ char * displayModeString = NULL;
+
+ result = displayMode->GetName((const char **) &displayModeString);
+ if(result == S_OK) {
+ //char modeName[64];
+ int modeWidth;
+ int modeHeight;
+ BMDTimeValue frameRateDuration;
+ BMDTimeScale frameRateScale;
+ //int pixelFormatIndex = 0; // index into the gKnownPixelFormats / gKnownFormatNames arrays
+ //BMDDisplayModeSupport displayModeSupport;
+
+
+ // Obtain the display mode's properties
+ modeWidth = displayMode->GetWidth();
+ modeHeight = displayMode->GetHeight();
+ displayMode->GetFrameRate(&frameRateDuration, &frameRateScale);
+ QString description = QString(displayModeString) + " (" + QString::number(modeWidth) + "x" + QString::number(modeHeight) + " - " + QString::number((double)frameRateScale / (double)frameRateDuration) + i18n("fps") + ")";
+ availableModes << description;
+ //modelist->addItem(description);
+ //printf(" %-20s \t %d x %d \t %7g FPS\t", displayModeString, modeWidth, modeHeight, (double)frameRateScale / (double)frameRateDuration);
+
+ // Print the supported pixel formats for this display mode
+ /*while ((gKnownPixelFormats[pixelFormatIndex] != 0) && (gKnownPixelFormatNames[pixelFormatIndex] != NULL))
+ {
+ if ((deckLinkOutput->DoesSupportVideoMode(displayMode->GetDisplayMode(), gKnownPixelFormats[pixelFormatIndex], bmdVideoOutputFlagDefault, &displayModeSupport, NULL) == S_OK)
+ && (displayModeSupport != bmdDisplayModeNotSupported))
+ {
+ printf("%s\t", gKnownPixelFormatNames[pixelFormatIndex]);
+ }
+ pixelFormatIndex++;
+ }*/
+ free(displayModeString);
+ }
+
+ // Release the IDeckLinkDisplayMode object to prevent a leak
+ displayMode->Release();
+ }
+ devicelist->addItem(deviceName, availableModes);
+ found = true;
+ }
+
+
+ //print_attributes(deckLink);
+
+ // ** List the video output display modes supported by the card
+ //print_output_modes(deckLink);
+
+ // ** List the input and output capabilities of the card
+ //print_capabilities(deckLink);
+
+ // Release the IDeckLink instance when we've finished with it to prevent leaks
+ deckLink->Release();
+ }
+
+ deckLinkIterator->Release();
+ if(modelist != NULL && devicelist->count() > 0) {
+ QStringList modes = devicelist->itemData(devicelist->currentIndex()).toStringList();
+ modelist->insertItems(0, modes);
+ }
+ return found;
}
\ No newline at end of file
class BMInterface
{
public:
- BMInterface();
- ~BMInterface();
- static void getBlackMagicDeviceList(KComboBox *devicelist, KComboBox *modelist);
+ BMInterface();
+ ~BMInterface();
+ static bool getBlackMagicDeviceList(KComboBox *devicelist, KComboBox *modelist);
};
#endif
\ No newline at end of file
--- /dev/null
+/***************************************************************************
+ * Copyright (C) 2010 by Jean-Baptiste Mardelle (jb@kdenlive.org) *
+ * *
+ * This program is free software; you can redistribute it and/or modify *
+ * it under the terms of the GNU General Public License as published by *
+ * the Free Software Foundation; either version 2 of the License, or *
+ * (at your option) any later version. *
+ * *
+ * This program is distributed in the hope that it will be useful, *
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of *
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
+ * GNU General Public License for more details. *
+ * *
+ * You should have received a copy of the GNU General Public License *
+ * along with this program; if not, write to the *
+ * Free Software Foundation, Inc., *
+ * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA *
+ ***************************************************************************/
+
+#include <KLocale>
+
+#include "capturehandler.h"
+#include "kdenlivesettings.h"
+
+CaptureHandler::CaptureHandler(QVBoxLayout *lay, QWidget *parent):
+ m_layout(lay),
+ m_parent(parent)
+{
+}
+
+CaptureHandler::~CaptureHandler()
+{
+ stopCapture();
+}
+
+void CaptureHandler::stopCapture()
+{
+}
+
+
+
--- /dev/null
+/***************************************************************************
+ * Copyright (C) 2010 by Jean-Baptiste Mardelle (jb@kdenlive.org) *
+ * *
+ * This program is free software; you can redistribute it and/or modify *
+ * it under the terms of the GNU General Public License as published by *
+ * the Free Software Foundation; either version 2 of the License, or *
+ * (at your option) any later version. *
+ * *
+ * This program is distributed in the hope that it will be useful, *
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of *
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
+ * GNU General Public License for more details. *
+ * *
+ * You should have received a copy of the GNU General Public License *
+ * along with this program; if not, write to the *
+ * Free Software Foundation, Inc., *
+ * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA *
+ ***************************************************************************/
+
+#ifndef __CAPTUREHANDLER_H__
+#define __CAPTUREHANDLER_H__
+
+#include <QWidget>
+#include <QObject>
+#include <QLayout>
+
+class CaptureHandler : public QObject
+{
+ Q_OBJECT
+public:
+ CaptureHandler(QVBoxLayout *lay, QWidget *parent = 0);
+ ~CaptureHandler();
+ virtual void startPreview(int deviceId, int captureMode) = 0;
+ virtual void stopPreview() = 0;
+ virtual void startCapture(const QString &path) = 0;
+ virtual void stopCapture();
+ virtual void captureFrame(const QString &fname) = 0;
+ virtual void showOverlay(QImage img, bool transparent = true) = 0;
+ virtual void hideOverlay() = 0;
+ virtual void hidePreview(bool hide) = 0;
+
+protected:
+ QVBoxLayout *m_layout;
+ QWidget *m_parent;
+
+signals:
+ void gotTimeCode(ulong);
+ void gotMessage(const QString &);
+};
+
+
+#endif
#include "stopmotion.h"
#include "../blackmagic/devices.h"
+#include "../v4l/v4lcapture.h"
#include "../slideshowclip.h"
#include "kdenlivesettings.h"
QDialog(parent)
, Ui::Stopmotion_UI()
, m_projectFolder(projectFolder)
+ , m_bmCapture(NULL)
, m_sequenceFrame(0)
, m_animatedIndex(-1)
{
capture_button->setEnabled(false);
connect(sequence_name, SIGNAL(textChanged(const QString &)), this, SLOT(sequenceNameChanged(const QString &)));
- BMInterface::getBlackMagicDeviceList(capture_device, NULL);
QVBoxLayout *lay = new QVBoxLayout;
- m_bmCapture = new CaptureHandler(lay);
- connect(m_bmCapture, SIGNAL(gotMessage(const QString &)), this, SLOT(slotGotHDMIMessage(const QString &)));
+ if (BMInterface::getBlackMagicDeviceList(capture_device, NULL)) {
+ // Found a BlackMagic device
+ kDebug()<<"CREATEÂ BM DEVICE";
+ m_bmCapture = new BmdCaptureHandler(lay);
+ connect(m_bmCapture, SIGNAL(gotMessage(const QString &)), this, SLOT(slotGotHDMIMessage(const QString &)));
+ }
+ else {
+ kDebug()<<"CREATEÂ V4L DEVICE";
+ m_bmCapture = new V4lCaptureHandler(lay);
+ capture_device->addItem(KdenliveSettings::video4vdevice());
+ }
m_frame_preview = new MyLabel(this);
connect(m_frame_preview, SIGNAL(seek(bool)), this, SLOT(slotSeekFrame(bool)));
lay->addWidget(m_frame_preview);
--- /dev/null
+/* fswebcam - FireStorm.cx's webcam generator */
+/*============================================================*/
+/* Copyright (C)2005-2010 Philip Heron <phil@sanslogic.co.uk> */
+/* */
+/* This program is distributed under the terms of the GNU */
+/* General Public License, version 2. You may use, modify, */
+/* and redistribute it under the terms of this license. A */
+/* copy should be included with this source. */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <stdlib.h>
+#include <time.h>
+#include <string.h>
+#include <sys/stat.h>
+#include <errno.h>
+#include "src.h"
+
+#ifdef HAVE_V4L2
+extern src_mod_t src_v4l2;
+#endif
+#ifdef HAVE_V4L1
+extern src_mod_t src_v4l1;
+#endif
+
+/* Supported palette types. */
+src_palette_t src_palette[] = {
+ { "PNG" },
+ { "JPEG" },
+ { "MJPEG" },
+ { "S561" },
+ { "RGB32" },
+ { "BGR32" },
+ { "RGB24" },
+ { "BGR24" },
+ { "YUYV" },
+ { "UYVY" },
+ { "YUV420P" },
+ { "NV12MB" },
+ { "BAYER" },
+ { "SGBRG8" },
+ { "SGRBG8" },
+ { "RGB565" },
+ { "RGB555" },
+ { "Y16" },
+ { "GREY" },
+ { NULL }
+};
+
+
+int src_open(src_t *src, char *source)
+{
+ int i = 0;
+ size_t sl;
+ char *s;
+ struct stat st;
+
+ if(!source)
+ {
+ fprintf(stderr, "No source was specified.......");
+ return(-1);
+ }
+
+ sl = strlen(source) + 1;
+ s = malloc(sl);
+ if(!s)
+ {
+ fprintf(stderr, "Out of memory.");
+ return(-1);
+ }
+
+ src->source = source;
+
+ i = 0;
+ int r = src_v4l2.flags;
+ if(S_ISCHR(st.st_mode) && r & SRC_TYPE_DEVICE) r = -1;
+ else if(!S_ISCHR(st.st_mode) && r & SRC_TYPE_FILE) r = -1;
+ else r = 0;
+ src->type = 0;
+ r = src_v4l2.open(src);
+ if(r == -2) return(-1);
+
+ /*
+ int frame;
+ for(frame = 0; frame < config->skipframes; frame++)
+ if(src_grab(src) == -1) break;*/
+
+ return 0;
+}
+
+int src_close(src_t *src)
+{
+ int r;
+
+ if(src->captured_frames)
+ {
+ double seconds =
+ (src->tv_last.tv_sec + src->tv_last.tv_usec / 1000000.0) -
+ (src->tv_first.tv_sec + src->tv_first.tv_usec / 1000000.0);
+
+ /* Display FPS if enough frames where captured. */
+ if(src->captured_frames == 1)
+ {
+ /*MSG("Captured frame in %0.2f seconds.", seconds);*/
+ }
+ else if(src->captured_frames < 3)
+ {
+ /*MSG("Captured %i frames in %0.2f seconds.",
+ src->captured_frames, seconds);*/
+ }
+ else
+ {
+ /*MSG("Captured %i frames in %0.2f seconds. (%i fps)",
+ src->captured_frames, seconds,
+ (int) (src->captured_frames / seconds));*/
+ }
+ }
+
+ r = src_v4l2.close(src);
+
+ if(src->source) free(src->source);
+
+ return(r);
+}
+
+int src_grab(src_t *src)
+{
+ int r = src_v4l2.grab(src);
+
+ if(!r)
+ {
+ if(!src->captured_frames) gettimeofday(&src->tv_first, NULL);
+ gettimeofday(&src->tv_last, NULL);
+
+ src->captured_frames++;
+ }
+
+ return(r);
+}
+
+/* Pointers are great things. Terrible things yes, but great. */
+/* These work but are very ugly and will be re-written soon. */
+
+int src_set_option(src_option_t ***options, char *name, char *value)
+{
+ src_option_t **opts, *opt;
+ int count;
+
+ if(!options) return(-1);
+ if(!*options)
+ {
+ *options = malloc(sizeof(src_option_t *));
+ if(!*options)
+ {
+ /*ERROR("Out of memory.");*/
+ return(-1);
+ }
+
+ *options[0] = NULL;
+ }
+
+ count = 0;
+ opts = *options;
+ while(*opts)
+ {
+ if((*opts)->name) if(!strcasecmp(name, (*opts)->name)) break;
+ opts++;
+ count++;
+ }
+
+ if(!*opts)
+ {
+ void *new;
+
+ opt = (src_option_t *) malloc(sizeof(src_option_t));
+ if(!opt)
+ {
+ /*ERROR("Out of memory.");*/
+ return(-1);
+ }
+
+ new = realloc(*options, sizeof(src_option_t *) * (count + 2));
+ if(!new)
+ {
+ free(opt);
+ /*ERROR("Out of memory.");*/
+ return(-1);
+ }
+
+ *options = (src_option_t **) new;
+ (*options)[count++] = opt;
+ (*options)[count++] = NULL;
+
+ opt->name = strdup(name);
+ opt->value = NULL;
+ }
+ else opt = *opts;
+
+ if(opt->value)
+ {
+ free(opt->value);
+ opt->value = NULL;
+ }
+ if(value) opt->value = strdup(value);
+
+ return(0);
+}
+
+int src_get_option_by_number(src_option_t **opt, int number,
+ char **name, char **value)
+{
+ int i;
+
+ if(!opt || !name || !value) return(-1);
+
+ i = 0;
+ while(*opt)
+ {
+ if(i == number)
+ {
+ *name = (*opt)->name;
+ *value = (*opt)->value;
+ return(0);
+ }
+
+ i++;
+ }
+
+ return(-1);
+}
+
+int src_get_option_by_name(src_option_t **opt, char *name, char **value)
+{
+ if(!opt || !name || !value) return(-1);
+
+ while(*opt)
+ {
+ if((*opt)->name)
+ {
+ if(!strcasecmp(name, (*opt)->name))
+ {
+ *value = (*opt)->value;
+ return(0);
+ }
+ }
+
+ opt++;
+ }
+
+ return(-1);
+}
+
+int src_free_options(src_option_t ***options)
+{
+ src_option_t **opts;
+
+ if(!options || !*options) return(-1);
+
+ opts = *options;
+ while(*opts)
+ {
+ if((*opts)->name) free((*opts)->name);
+ if((*opts)->value) free((*opts)->value);
+
+ free(*opts);
+
+ opts++;
+ }
+
+ free(*options);
+ *options = NULL;
+
+ return(0);
+}
+
--- /dev/null
+/* fswebcam - FireStorm.cx's webcam generator */
+/*============================================================*/
+/* Copyright (C)2005-2010 Philip Heron <phil@sanslogic.co.uk> */
+/* */
+/* This program is distributed under the terms of the GNU */
+/* General Public License, version 2. You may use, modify, */
+/* and redistribute it under the terms of this license. A */
+/* copy should be included with this source. */
+
+#ifdef __cplusplus
+extern "C" {
+ #endif
+
+#include <stdint.h>
+#include <sys/time.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <pthread.h>
+#include <unistd.h>
+#include <fcntl.h>
+
+#ifndef INC_SRC_H
+#define INC_SRC_H
+
+#define SRC_TYPE_NONE (0)
+#define SRC_TYPE_DEVICE (1 << 0) /* Can capture from a device */
+#define SRC_TYPE_FILE (1 << 1) /* Can capture from a file */
+
+/* When updating the palette list remember to update src_palette[] in src.c */
+
+#define SRC_PAL_ANY (-1)
+#define SRC_PAL_PNG (0)
+#define SRC_PAL_JPEG (1)
+#define SRC_PAL_MJPEG (2)
+#define SRC_PAL_S561 (3)
+#define SRC_PAL_RGB32 (4)
+#define SRC_PAL_BGR32 (5)
+#define SRC_PAL_RGB24 (6)
+#define SRC_PAL_BGR24 (7)
+#define SRC_PAL_YUYV (8)
+#define SRC_PAL_UYVY (9)
+#define SRC_PAL_YUV420P (10)
+#define SRC_PAL_NV12MB (11)
+#define SRC_PAL_BAYER (12)
+#define SRC_PAL_SGBRG8 (13)
+#define SRC_PAL_SGRBG8 (14)
+#define SRC_PAL_RGB565 (15)
+#define SRC_PAL_RGB555 (16)
+#define SRC_PAL_Y16 (17)
+#define SRC_PAL_GREY (18)
+
+#define SRC_LIST_INPUTS (1 << 1)
+#define SRC_LIST_TUNERS (1 << 2)
+#define SRC_LIST_FORMATS (1 << 3)
+#define SRC_LIST_CONTROLS (1 << 4)
+#define SRC_LIST_FRAMESIZES (1 << 5)
+#define SRC_LIST_FRAMERATES (1 << 6)
+
+/* The SCALE macro converts a value (sv) from one range (sf -> sr)
+ to another (df -> dr). */
+#define SCALE(df, dr, sf, sr, sv) (((sv - sf) * (dr - df) / (sr - sf)) + df)
+
+typedef struct {
+ char *name;
+} src_palette_t;
+
+extern src_palette_t src_palette[];
+
+typedef struct {
+ char *name;
+ char *value;
+} src_option_t;
+
+typedef struct {
+
+ /* Source Options */
+ char *source;
+ uint8_t type;
+
+ void *state;
+
+ /* Last captured image */
+ uint32_t length;
+ void *img;
+
+ /* Input Options */
+ char *input;
+ uint8_t tuner;
+ uint32_t frequency;
+ uint32_t delay;
+ uint32_t timeout;
+ char use_read;
+
+ /* List Options */
+ uint8_t list;
+
+ /* Image Options */
+ int palette;
+ uint32_t width;
+ uint32_t height;
+ uint32_t fps;
+
+ src_option_t **option;
+
+ /* For calculating capture FPS */
+ uint32_t captured_frames;
+ struct timeval tv_first;
+ struct timeval tv_last;
+
+} src_t;
+
+typedef struct {
+
+ char *name;
+
+ uint8_t flags;
+
+ int (*open)(src_t *);
+ int (*close)(src_t *);
+ int (*grab)(src_t *);
+
+} src_mod_t;
+
+typedef struct {
+
+ /* List of options. */
+ char *opts;
+ const struct option *long_opts;
+
+ /* When reading from the command line. */
+ int opt_index;
+
+ /* When reading from a configuration file. */
+ char *filename;
+ FILE *f;
+ size_t line;
+
+} fswc_getopt_t;
+
+typedef struct {
+ uint16_t id;
+ char *options;
+} fswebcam_job_t;
+
+typedef struct {
+
+ /* General options. */
+ unsigned long loop;
+ signed long offset;
+ unsigned char background;
+ char *pidfile;
+ char *logfile;
+ char gmt;
+
+ /* Capture start time. */
+ time_t start;
+
+ /* Device options. */
+ char *device;
+ char *input;
+ unsigned char tuner;
+ unsigned long frequency;
+ unsigned long delay;
+ char use_read;
+ uint8_t list;
+
+ /* Image capture options. */
+ unsigned int width;
+ unsigned int height;
+ unsigned int frames;
+ unsigned int fps;
+ unsigned int skipframes;
+ int palette;
+ src_option_t **option;
+ char *dumpframe;
+
+ /* Job queue. */
+ uint8_t jobs;
+ fswebcam_job_t **job;
+
+ /* Banner options. */
+ char banner;
+ uint32_t bg_colour;
+ uint32_t bl_colour;
+ uint32_t fg_colour;
+ char *title;
+ char *subtitle;
+ char *timestamp;
+ char *info;
+ char *font;
+ int fontsize;
+ char shadow;
+
+ /* Overlay options. */
+ char *underlay;
+ char *overlay;
+
+ /* Output options. */
+ char *filename;
+ char format;
+ char compression;
+
+} fswebcam_config_t;
+
+extern int src_open(src_t *src, char *source);
+extern int src_close(src_t *src);
+extern int src_grab(src_t *src);
+
+extern int src_set_option(src_option_t ***options, char *name, char *value);
+extern int src_get_option_by_number(src_option_t **opt, int number, char **name, char **value);
+extern int src_get_option_by_name(src_option_t **opt, char *name, char **value);
+extern int src_free_options(src_option_t ***options);
+
+#endif
+
+
+#ifdef __cplusplus
+}
+#endif
+
--- /dev/null
+/* fswebcam - FireStorm.cx's webcam generator */
+/*============================================================*/
+/* Copyright (C)2005-2010 Philip Heron <phil@sanslogic.co.uk> */
+/* */
+/* This program is distributed under the terms of the GNU */
+/* General Public License, version 2. You may use, modify, */
+/* and redistribute it under the terms of this license. A */
+/* copy should be included with this source. */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <fcntl.h>
+#include <unistd.h>
+#include <string.h>
+#include <errno.h>
+#include <sys/ioctl.h>
+#include <sys/mman.h>
+#include "videodev2.h"
+#include "src.h"
+
+#ifdef HAVE_V4L2
+
+typedef struct {
+ void *start;
+ size_t length;
+} v4l2_buffer_t;
+
+typedef struct {
+
+ int fd;
+ char map;
+
+ struct v4l2_capability cap;
+ struct v4l2_format fmt;
+ struct v4l2_requestbuffers req;
+ struct v4l2_buffer buf;
+
+ v4l2_buffer_t *buffer;
+
+ int pframe;
+
+} src_v4l2_t;
+
+static int src_v4l2_close(src_t *src);
+
+typedef struct {
+ uint16_t src;
+ uint32_t v4l2;
+} v4l2_palette_t;
+
+v4l2_palette_t v4l2_palette[] = {
+ { SRC_PAL_JPEG, V4L2_PIX_FMT_JPEG },
+ { SRC_PAL_MJPEG, V4L2_PIX_FMT_MJPEG },
+ { SRC_PAL_S561, V4L2_PIX_FMT_SPCA561 },
+ { SRC_PAL_RGB24, V4L2_PIX_FMT_RGB24 },
+ { SRC_PAL_BGR24, V4L2_PIX_FMT_BGR24 },
+ { SRC_PAL_RGB32, V4L2_PIX_FMT_RGB32 },
+ { SRC_PAL_BGR32, V4L2_PIX_FMT_BGR32 },
+ { SRC_PAL_YUYV, V4L2_PIX_FMT_YUYV },
+ { SRC_PAL_UYVY, V4L2_PIX_FMT_UYVY },
+ { SRC_PAL_YUV420P, V4L2_PIX_FMT_YUV420 },
+ { SRC_PAL_BAYER, V4L2_PIX_FMT_SBGGR8 },
+ { SRC_PAL_SGBRG8, V4L2_PIX_FMT_SGBRG8 },
+ { SRC_PAL_SGRBG8, V4L2_PIX_FMT_SGRBG8 },
+ { SRC_PAL_RGB565, V4L2_PIX_FMT_RGB565 },
+ { SRC_PAL_RGB555, V4L2_PIX_FMT_RGB555 },
+ { SRC_PAL_Y16, V4L2_PIX_FMT_Y16 },
+ { SRC_PAL_GREY, V4L2_PIX_FMT_GREY },
+ { 0, 0 }
+};
+
+int src_v4l2_get_capability(src_t *src)
+{
+ src_v4l2_t *s = (src_v4l2_t *) src->state;
+
+ if(ioctl(s->fd, VIDIOC_QUERYCAP, &s->cap) < 0)
+ {
+ /*ERROR("%s: Not a V4L2 device?", src->source);*/
+ return(-1);
+ }
+
+ /*DEBUG("%s information:", src->source);
+ DEBUG("cap.driver: \"%s\"", s->cap.driver);
+ DEBUG("cap.card: \"%s\"", s->cap.card);
+ DEBUG("cap.bus_info: \"%s\"", s->cap.bus_info);
+ DEBUG("cap.capabilities=0x%08X", s->cap.capabilities);*/
+ /*if(s->cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) DEBUG("- VIDEO_CAPTURE");
+ if(s->cap.capabilities & V4L2_CAP_VIDEO_OUTPUT) DEBUG("- VIDEO_OUTPUT");
+ if(s->cap.capabilities & V4L2_CAP_VIDEO_OVERLAY) DEBUG("- VIDEO_OVERLAY");
+ if(s->cap.capabilities & V4L2_CAP_VBI_CAPTURE) DEBUG("- VBI_CAPTURE");
+ if(s->cap.capabilities & V4L2_CAP_VBI_OUTPUT) DEBUG("- VBI_OUTPUT");
+ if(s->cap.capabilities & V4L2_CAP_RDS_CAPTURE) DEBUG("- RDS_CAPTURE");
+ if(s->cap.capabilities & V4L2_CAP_TUNER) DEBUG("- TUNER");
+ if(s->cap.capabilities & V4L2_CAP_AUDIO) DEBUG("- AUDIO");
+ if(s->cap.capabilities & V4L2_CAP_RADIO) DEBUG("- RADIO");
+ if(s->cap.capabilities & V4L2_CAP_READWRITE) DEBUG("- READWRITE");
+ if(s->cap.capabilities & V4L2_CAP_ASYNCIO) DEBUG("- ASYNCIO");
+ if(s->cap.capabilities & V4L2_CAP_STREAMING) DEBUG("- STREAMING");
+ if(s->cap.capabilities & V4L2_CAP_TIMEPERFRAME) DEBUG("- TIMEPERFRAME");*/
+
+ if(!s->cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)
+ {
+ /*ERROR("Device does not support capturing.");*/
+ return(-1);
+ }
+
+ return(0);
+}
+
+int src_v4l2_set_input(src_t *src)
+{
+ src_v4l2_t *s = (src_v4l2_t *) src->state;
+ struct v4l2_input input;
+ int count = 0, i = -1;
+
+ memset(&input, 0, sizeof(input));
+
+ if(src->list & SRC_LIST_INPUTS)
+ {
+ /*HEAD("--- Available inputs:");*/
+
+ input.index = count;
+ while(!ioctl(s->fd, VIDIOC_ENUMINPUT, &input))
+ {
+ /*MSG("%i: %s", count, input.name);*/
+ input.index = ++count;
+ }
+ }
+
+ /* If no input was specified, use input 0. */
+ if(!src->input)
+ {
+ /*MSG("No input was specified, using the first.");*/
+ count = 1;
+ i = 0;
+ }
+
+ /* Check if the input is specified by name. */
+ if(i == -1)
+ {
+ input.index = count;
+ while(!ioctl(s->fd, VIDIOC_ENUMINPUT, &input))
+ {
+ if(!strncasecmp((char *) input.name, src->input, 32))
+ i = count;
+ input.index = ++count;
+ }
+ }
+
+ if(i == -1)
+ {
+ char *endptr;
+
+ /* Is the input specified by number? */
+ i = strtol(src->input, &endptr, 10);
+
+ if(endptr == src->input) i = -1;
+ }
+
+ if(i == -1 || i >= count)
+ {
+ /* The specified input wasn't found! */
+ /*ERROR("Unrecognised input \"%s\"", src->input);*/
+ return(-1);
+ }
+
+ /* Set the input. */
+ input.index = i;
+ if(ioctl(s->fd, VIDIOC_ENUMINPUT, &input) == -1)
+ {
+ /*ERROR("Unable to query input %i.", i);
+ ERROR("VIDIOC_ENUMINPUT: %s", strerror(errno));*/
+ return(-1);
+ }
+
+ /*DEBUG("%s: Input %i information:", src->source, i);
+ DEBUG("name = \"%s\"", input.name);
+ DEBUG("type = %08X", input.type);
+ if(input.type & V4L2_INPUT_TYPE_TUNER) DEBUG("- TUNER");
+ if(input.type & V4L2_INPUT_TYPE_CAMERA) DEBUG("- CAMERA");
+ DEBUG("audioset = %08X", input.audioset);
+ DEBUG("tuner = %08X", input.tuner);
+ DEBUG("status = %08X", input.status);
+ if(input.status & V4L2_IN_ST_NO_POWER) DEBUG("- NO_POWER");
+ if(input.status & V4L2_IN_ST_NO_SIGNAL) DEBUG("- NO_SIGNAL");
+ if(input.status & V4L2_IN_ST_NO_COLOR) DEBUG("- NO_COLOR");
+ if(input.status & V4L2_IN_ST_NO_H_LOCK) DEBUG("- NO_H_LOCK");
+ if(input.status & V4L2_IN_ST_COLOR_KILL) DEBUG("- COLOR_KILL");
+ if(input.status & V4L2_IN_ST_NO_SYNC) DEBUG("- NO_SYNC");
+ if(input.status & V4L2_IN_ST_NO_EQU) DEBUG("- NO_EQU");
+ if(input.status & V4L2_IN_ST_NO_CARRIER) DEBUG("- NO_CARRIER");
+ if(input.status & V4L2_IN_ST_MACROVISION) DEBUG("- MACROVISION");
+ if(input.status & V4L2_IN_ST_NO_ACCESS) DEBUG("- NO_ACCESS");
+ if(input.status & V4L2_IN_ST_VTR) DEBUG("- VTR");*/
+
+ if(ioctl(s->fd, VIDIOC_S_INPUT, &i) == -1)
+ {
+ /*ERROR("Error selecting input %i", i);
+ ERROR("VIDIOC_S_INPUT: %s", strerror(errno));*/
+ return(-1);
+ }
+
+ /* If this input is attached to a tuner, set the frequency. */
+ if(input.type & V4L2_INPUT_TYPE_TUNER)
+ {
+ char *range;
+ struct v4l2_tuner tuner;
+ struct v4l2_frequency freq;
+
+ /* Query the tuners capabilities. */
+
+ memset(&tuner, 0, sizeof(tuner));
+ tuner.index = input.tuner;
+
+ if(ioctl(s->fd, VIDIOC_G_TUNER, &tuner) == -1)
+ {
+ /*WARN("Error querying tuner %i.", input.tuner);
+ WARN("VIDIOC_G_TUNER: %s", strerror(errno));*/
+ return(0);
+ }
+
+ if(tuner.capability & V4L2_TUNER_CAP_LOW) range = "kHz";
+ else range = "MHz";
+
+ /*DEBUG("%s: Tuner %i information:", src->source, input.tuner);
+ DEBUG("name = \"%s\"", tuner.name);
+ DEBUG("type = %08X", tuner.type);
+ if(tuner.type == V4L2_TUNER_RADIO) DEBUG("- RADIO");
+ if(tuner.type == V4L2_TUNER_ANALOG_TV) DEBUG("- ANALOG_TV");
+ DEBUG("capability = %08X", tuner.capability);
+ if(tuner.capability & V4L2_TUNER_CAP_LOW) DEBUG("- LOW");
+ if(tuner.capability & V4L2_TUNER_CAP_NORM) DEBUG("- NORM");
+ if(tuner.capability & V4L2_TUNER_CAP_STEREO) DEBUG("- STEREO");
+ if(tuner.capability & V4L2_TUNER_CAP_LANG1) DEBUG("- LANG1");
+ if(tuner.capability & V4L2_TUNER_CAP_LANG2) DEBUG("- LANG2");
+ if(tuner.capability & V4L2_TUNER_CAP_SAP) DEBUG("- SAP");
+ DEBUG("rangelow = %08X, (%.3f%s)", tuner.rangelow, (double) tuner.rangelow * 16 / 1000, range);
+ DEBUG("rangehigh = %08X, (%.3f%s)", tuner.rangehigh, (double) tuner.rangehigh * 16 / 1000, range);
+ DEBUG("signal = %08X", tuner.signal);
+ DEBUG("afc = %08X", tuner.afc);*/
+
+ /* Set the frequency. */
+ memset(&freq, 0, sizeof(freq));
+ freq.tuner = input.tuner;
+ freq.type = V4L2_TUNER_ANALOG_TV;
+ freq.frequency = (src->frequency / 1000) * 16;
+
+ if(ioctl(s->fd, VIDIOC_S_FREQUENCY, &freq) == -1)
+ {
+ /*WARN("Error setting frequency %.3f%s", src->frequency / 16.0, range);
+ WARN("VIDIOC_S_FREQUENCY: %s", strerror(errno));*/
+ return(0);
+ }
+
+ /*MSG("Set frequency to %.3f%s",
+ (double) src->frequency / 1000, range);*/
+ }
+
+ return(0);
+}
+
+int src_v4l2_show_control(src_t *src, struct v4l2_queryctrl *queryctrl)
+{
+ src_v4l2_t *s = (src_v4l2_t *) src->state;
+ struct v4l2_querymenu querymenu;
+ struct v4l2_control control;
+ char *t;
+ int m;
+
+ if(queryctrl->flags & V4L2_CTRL_FLAG_DISABLED) return(0);
+
+ memset(&querymenu, 0, sizeof(querymenu));
+ memset(&control, 0, sizeof(control));
+
+ if(queryctrl->type != V4L2_CTRL_TYPE_BUTTON)
+ {
+ control.id = queryctrl->id;
+ if(ioctl(s->fd, VIDIOC_G_CTRL, &control))
+ {
+ /*ERROR("Error reading value of control '%s'.", queryctrl->name);
+ ERROR("VIDIOC_G_CTRL: %s", strerror(errno));*/
+ }
+ }
+
+ switch(queryctrl->type)
+ {
+ case V4L2_CTRL_TYPE_INTEGER:
+
+ t = malloc(64); /* Ick ... TODO: re-write this. */
+ if(!t)
+ {
+ /*ERROR("Out of memory.");*/
+ return(-1);
+ }
+
+ if(queryctrl->maximum - queryctrl->minimum <= 10)
+ {
+ snprintf(t, 63, "%i", control.value);
+ }
+ else
+ {
+ snprintf(t, 63, "%i (%i%%)",
+ control.value,
+ SCALE(0, 100,
+ queryctrl->minimum,
+ queryctrl->maximum,
+ control.value));
+ }
+
+ /*MSG("%-25s %-15s %i - %i", queryctrl->name, t,
+ queryctrl->minimum, queryctrl->maximum);*/
+
+ free(t);
+
+ break;
+
+ case V4L2_CTRL_TYPE_BOOLEAN:
+ /*MSG("%-25s %-15s True | False", queryctrl->name,
+ (control.value ? "True" : "False"));*/
+ break;
+
+ case V4L2_CTRL_TYPE_MENU:
+
+ querymenu.id = queryctrl->id;
+
+ t = calloc((queryctrl->maximum - queryctrl->minimum) + 1, 34);
+ m = queryctrl->minimum;
+ for(m = queryctrl->minimum; m <= queryctrl->maximum; m++)
+ {
+ querymenu.index = m;
+ if(!ioctl(s->fd, VIDIOC_QUERYMENU, &querymenu))
+ {
+ strncat(t, (char *) querymenu.name, 32);
+ if(m < queryctrl->maximum) strncat(t, " | ", 3);
+ }
+ }
+
+ querymenu.index = control.value;
+ if(ioctl(s->fd, VIDIOC_QUERYMENU, &querymenu))
+ {
+ free(t);
+ /*ERROR("Error reading value of menu item %i for control '%s'",
+ control.value, queryctrl->name);
+ ERROR("VIDIOC_QUERYMENU: %s", strerror(errno));*/
+ return(0);
+ }
+
+ /*MSG("%-25s %-15s %s", queryctrl->name, querymenu.name, t);*/
+ free(t);
+
+ break;
+
+ case V4L2_CTRL_TYPE_BUTTON:
+ /*MSG("%-25s %-15s %s", queryctrl->name, "-", "[Button]");*/
+ break;
+
+ default:
+ /*MSG("%-25s %-15s %s", queryctrl->name, "N/A", "[Unknown Control Type]");*/
+ break;
+ }
+
+ return(0);
+}
+
+int src_v4l2_set_control(src_t *src, struct v4l2_queryctrl *queryctrl)
+{
+ src_v4l2_t *s = (src_v4l2_t *) src->state;
+ struct v4l2_control control;
+ struct v4l2_querymenu querymenu;
+ char *sv;
+ int iv;
+
+ if(queryctrl->flags & V4L2_CTRL_FLAG_DISABLED) return(0);
+ if(src_get_option_by_name(src->option, (char *) queryctrl->name, &sv))
+ return(0);
+
+ memset(&querymenu, 0, sizeof(querymenu));
+ memset(&control, 0, sizeof(control));
+
+ control.id = queryctrl->id;
+
+ switch(queryctrl->type)
+ {
+ case V4L2_CTRL_TYPE_INTEGER:
+
+ /* Convert the value to an integer. */
+ iv = atoi(sv);
+
+ /* Is the value a precentage? */
+ if(strchr(sv, '%'))
+ {
+ /* Adjust the precentage to fit the controls range. */
+ iv = SCALE(queryctrl->minimum, queryctrl->maximum,
+ 0, 100, iv);
+ }
+
+ /*MSG("Setting %s to %i (%i%%).", queryctrl->name, iv,
+ SCALE(0, 100, queryctrl->minimum, queryctrl->maximum, iv));*/
+
+ /*if(iv < queryctrl->minimum || iv > queryctrl->maximum)
+ WARN("Value is out of range. Setting anyway.");*/
+
+ control.value = iv;
+ ioctl(s->fd, VIDIOC_S_CTRL, &control);
+ break;
+
+ case V4L2_CTRL_TYPE_BOOLEAN:
+
+ iv = -1;
+ if(!strcasecmp(sv, "1") || !strcasecmp(sv, "true")) iv = 1;
+ if(!strcasecmp(sv, "0") || !strcasecmp(sv, "false")) iv = 0;
+
+ if(iv == -1)
+ {
+ /*WARN("Unknown boolean value '%s' for %s.",
+ sv, queryctrl->name);*/
+ return(-1);
+ }
+
+ /*MSG("Setting %s to %s (%i).", queryctrl->name, sv, iv);*/
+
+ control.value = iv;
+ ioctl(s->fd, VIDIOC_S_CTRL, &control);
+
+ break;
+
+ case V4L2_CTRL_TYPE_MENU:
+
+ /* Scan for a matching value. */
+ querymenu.id = queryctrl->id;
+
+ for(iv = queryctrl->minimum; iv <= queryctrl->maximum; iv++)
+ {
+ querymenu.index = iv;
+
+ if(ioctl(s->fd, VIDIOC_QUERYMENU, &querymenu))
+ {
+ /*ERROR("Error querying menu.");*/
+ continue;
+ }
+
+ if(!strncasecmp((char *) querymenu.name, sv, 32))
+ break;
+ }
+
+ if(iv > queryctrl->maximum)
+ {
+ /*MSG("Unknown value '%s' for %s.", sv, queryctrl->name);*/
+ return(-1);
+ }
+
+ /*MSG("Setting %s to %s (%i).",
+ queryctrl->name, querymenu.name, iv);*/
+
+ control.value = iv;
+ ioctl(s->fd, VIDIOC_S_CTRL, &control);
+
+ break;
+
+ case V4L2_CTRL_TYPE_BUTTON:
+
+ /*MSG("Triggering %s control.", queryctrl->name);
+ ioctl(s->fd, VIDIOC_S_CTRL, &control);*/
+
+ break;
+
+ default:
+ /*WARN("Not setting unknown control type %i (%s).",
+ queryctrl->name);*/
+ break;
+ }
+
+ return(0);
+}
+
+int src_v4l2_set_controls(src_t *src)
+{
+ src_v4l2_t *s = (src_v4l2_t *) src->state;
+ struct v4l2_queryctrl queryctrl;
+ int c;
+
+ memset(&queryctrl, 0, sizeof(queryctrl));
+
+ if(src->list & SRC_LIST_CONTROLS)
+ {
+ /*HEAD("%-25s %-15s %s", "Available Controls", "Current Value", "Range");
+ MSG("%-25s %-15s %s", "------------------", "-------------", "-----");*/
+
+ /* Display normal controls. */
+ for(c = V4L2_CID_BASE; c < V4L2_CID_LASTP1; c++)
+ {
+ queryctrl.id = c;
+
+ if(ioctl(s->fd, VIDIOC_QUERYCTRL, &queryctrl)) continue;
+ src_v4l2_show_control(src, &queryctrl);
+ }
+
+ /* Display device-specific controls. */
+ for(c = V4L2_CID_PRIVATE_BASE; ; c++)
+ {
+ queryctrl.id = c;
+
+ if(ioctl(s->fd, VIDIOC_QUERYCTRL, &queryctrl)) break;
+ src_v4l2_show_control(src, &queryctrl);
+ }
+ }
+
+ /* Scan normal controls. */
+ for(c = V4L2_CID_BASE; c < V4L2_CID_LASTP1; c++)
+ {
+ queryctrl.id = c;
+
+ if(ioctl(s->fd, VIDIOC_QUERYCTRL, &queryctrl)) continue;
+ src_v4l2_set_control(src, &queryctrl);
+ }
+
+ /* Scan device-specific controls. */
+ for(c = V4L2_CID_PRIVATE_BASE; ; c++)
+ {
+ queryctrl.id = c;
+
+ if(ioctl(s->fd, VIDIOC_QUERYCTRL, &queryctrl)) break;
+ src_v4l2_set_control(src, &queryctrl);
+ }
+
+ return(0);
+}
+
+int src_v4l2_set_pix_format(src_t *src)
+{
+ src_v4l2_t *s = (src_v4l2_t *) src->state;
+ struct v4l2_fmtdesc fmt;
+ int v4l2_pal;
+
+ /* Dump a list of formats the device supports. */
+ /*DEBUG("Device offers the following V4L2 pixel formats:");*/
+
+ v4l2_pal = 0;
+ memset(&fmt, 0, sizeof(fmt));
+ fmt.index = v4l2_pal;
+ fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+
+ while(ioctl(s->fd, VIDIOC_ENUM_FMT, &fmt) != -1)
+ {
+ /*DEBUG("%i: [0x%08X] '%c%c%c%c' (%s)", v4l2_pal,
+ fmt.pixelformat,
+ fmt.pixelformat >> 0, fmt.pixelformat >> 8,
+ fmt.pixelformat >> 16, fmt.pixelformat >> 24,
+ fmt.description);*/
+
+ memset(&fmt, 0, sizeof(fmt));
+ fmt.index = ++v4l2_pal;
+ fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ }
+
+ /* Step through each palette type. */
+ v4l2_pal = 0;
+
+ if(src->palette != -1)
+ {
+ while(v4l2_palette[v4l2_pal].v4l2)
+ {
+ if(v4l2_palette[v4l2_pal].src == src->palette) break;
+ v4l2_pal++;
+ }
+
+ if(!v4l2_palette[v4l2_pal].v4l2)
+ {
+ /*ERROR("Unable to handle palette format %s.",
+ src_palette[src->palette]);*/
+
+ return(-1);
+ }
+ }
+
+ while(v4l2_palette[v4l2_pal].v4l2)
+ {
+ /* Try the palette... */
+ memset(&s->fmt, 0, sizeof(s->fmt));
+ s->fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ s->fmt.fmt.pix.width = src->width;
+ s->fmt.fmt.pix.height = src->height;
+ s->fmt.fmt.pix.pixelformat = v4l2_palette[v4l2_pal].v4l2;
+ s->fmt.fmt.pix.field = V4L2_FIELD_ANY;
+
+ if(ioctl(s->fd, VIDIOC_TRY_FMT, &s->fmt) != -1 &&
+ s->fmt.fmt.pix.pixelformat == v4l2_palette[v4l2_pal].v4l2)
+ {
+ src->palette = v4l2_palette[v4l2_pal].src;
+
+ /*INFO("Using palette %s", src_palette[src->palette].name);*/
+
+ if(s->fmt.fmt.pix.width != src->width ||
+ s->fmt.fmt.pix.height != src->height)
+ {
+ /*MSG("Adjusting resolution from %ix%i to %ix%i.",
+ src->width, src->height,
+ s->fmt.fmt.pix.width,
+ s->fmt.fmt.pix.height);*/
+ src->width = s->fmt.fmt.pix.width;
+ src->height = s->fmt.fmt.pix.height;
+ }
+
+ if(ioctl(s->fd, VIDIOC_S_FMT, &s->fmt) == -1)
+ {
+ /*ERROR("Error setting pixel format.");
+ ERROR("VIDIOC_S_FMT: %s", strerror(errno));*/
+ return(-1);
+ }
+
+ if(v4l2_palette[v4l2_pal].v4l2 == V4L2_PIX_FMT_MJPEG)
+ {
+ struct v4l2_jpegcompression jpegcomp;
+
+ memset(&jpegcomp, 0, sizeof(jpegcomp));
+ ioctl(s->fd, VIDIOC_G_JPEGCOMP, &jpegcomp);
+ jpegcomp.jpeg_markers |= V4L2_JPEG_MARKER_DHT;
+ ioctl(s->fd, VIDIOC_S_JPEGCOMP, &jpegcomp);
+ }
+
+ return(0);
+ }
+
+ if(src->palette != -1) break;
+
+ v4l2_pal++;
+ }
+
+ /*ERROR("Unable to find a compatible palette format.");*/
+
+ return(-1);
+}
+
+int src_v4l2_set_fps(src_t *src)
+{
+ src_v4l2_t *s = (src_v4l2_t *) src->state;
+ struct v4l2_streamparm setfps;
+
+ memset(&setfps, 0, sizeof(setfps));
+
+ setfps.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ setfps.parm.capture.timeperframe.numerator = 1;
+ setfps.parm.capture.timeperframe.denominator = src->fps;
+ if(ioctl(s->fd, VIDIOC_S_PARM, setfps) == -1)
+ {
+ /* Not fatal - just warn about it */
+ /*WARN("Error setting frame rate:");
+ WARN("VIDIOC_S_PARM: %s", strerror(errno));*/
+ return(-1);
+ }
+
+ return(0);
+}
+
+int src_v4l2_free_mmap(src_t *src)
+{
+ src_v4l2_t *s = (src_v4l2_t *) src->state;
+ int i;
+
+ for(i = 0; i < s->req.count; i++)
+ munmap(s->buffer[i].start, s->buffer[i].length);
+
+ return(0);
+}
+
+int src_v4l2_set_mmap(src_t *src)
+{
+ src_v4l2_t *s = (src_v4l2_t *) src->state;
+ enum v4l2_buf_type type;
+ uint32_t b;
+
+ /* Does the device support streaming? */
+ if(~s->cap.capabilities & V4L2_CAP_STREAMING) return(-1);
+
+ memset(&s->req, 0, sizeof(s->req));
+
+ s->req.count = 4;
+ s->req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ s->req.memory = V4L2_MEMORY_MMAP;
+
+ if(ioctl(s->fd, VIDIOC_REQBUFS, &s->req) == -1)
+ {
+ /*ERROR("Error requesting buffers for memory map.");
+ ERROR("VIDIOC_REQBUFS: %s", strerror(errno));*/
+ return(-1);
+ }
+
+ /*DEBUG("mmap information:");
+ DEBUG("frames=%d", s->req.count);*/
+
+ if(s->req.count < 2)
+ {
+ /*ERROR("Insufficient buffer memory.");*/
+ return(-1);
+ }
+
+ s->buffer = calloc(s->req.count, sizeof(v4l2_buffer_t));
+ if(!s->buffer)
+ {
+ /*ERROR("Out of memory.");*/
+ return(-1);
+ }
+
+ for(b = 0; b < s->req.count; b++)
+ {
+ struct v4l2_buffer buf;
+
+ memset(&buf, 0, sizeof(buf));
+
+ buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ buf.memory = V4L2_MEMORY_MMAP;
+ buf.index = b;
+
+ if(ioctl(s->fd, VIDIOC_QUERYBUF, &buf) == -1)
+ {
+ /*ERROR("Error querying buffer %i", b);
+ ERROR("VIDIOC_QUERYBUF: %s", strerror(errno));*/
+ free(s->buffer);
+ return(-1);
+ }
+
+ s->buffer[b].length = buf.length;
+ s->buffer[b].start = mmap(NULL, buf.length,
+ PROT_READ | PROT_WRITE, MAP_SHARED, s->fd, buf.m.offset);
+
+ if(s->buffer[b].start == MAP_FAILED)
+ {
+ /*ERROR("Error mapping buffer %i", b);
+ ERROR("mmap: %s", strerror(errno));*/
+ s->req.count = b;
+ src_v4l2_free_mmap(src);
+ free(s->buffer);
+ return(-1);
+ }
+
+ /*DEBUG("%i length=%d", b, buf.length);*/
+ }
+
+ s->map = -1;
+
+ for(b = 0; b < s->req.count; b++)
+ {
+ memset(&s->buf, 0, sizeof(s->buf));
+
+ s->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ s->buf.memory = V4L2_MEMORY_MMAP;
+ s->buf.index = b;
+
+ if(ioctl(s->fd, VIDIOC_QBUF, &s->buf) == -1)
+ {
+ /*ERROR("VIDIOC_QBUF: %s", strerror(errno));*/
+ src_v4l2_free_mmap(src);
+ free(s->buffer);
+ return(-1);
+ }
+ }
+
+ type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+
+ if(ioctl(s->fd, VIDIOC_STREAMON, &type) == -1)
+ {
+ /*ERROR("Error starting stream.");
+ ERROR("VIDIOC_STREAMON: %s", strerror(errno));*/
+ src_v4l2_free_mmap(src);
+ free(s->buffer);
+ return(-1);
+ }
+
+ return(0);
+}
+
+int src_v4l2_set_read(src_t *src)
+{
+ src_v4l2_t *s = (src_v4l2_t *) src->state;
+
+ if(~s->cap.capabilities & V4L2_CAP_READWRITE) return(-1);
+
+ s->buffer = calloc(1, sizeof(v4l2_buffer_t));
+ if(!s->buffer)
+ {
+ /*ERROR("Out of memory.");*/
+ return(-1);
+ }
+
+ s->buffer[0].length = s->fmt.fmt.pix.sizeimage;
+ s->buffer[0].start = malloc(s->buffer[0].length);
+
+ if(!s->buffer[0].start)
+ {
+ /*ERROR("Out of memory.");*/
+
+ free(s->buffer);
+ s->buffer = NULL;
+
+ return(-1);
+ }
+
+ return(0);
+}
+
+static int src_v4l2_open(src_t *src)
+{
+ src_v4l2_t *s;
+
+ if(!src->source)
+ {
+ /*ERROR("No device name specified.");*/
+ return(-2);
+ }
+
+ /* Allocate memory for the state structure. */
+ s = calloc(sizeof(src_v4l2_t), 1);
+ if(!s)
+ {
+ /*ERROR("Out of memory.");*/
+ return(-2);
+ }
+
+ src->state = (void *) s;
+
+ /* Open the device. */
+ s->fd = open(src->source, O_RDWR | O_NONBLOCK);
+ if(s->fd < 0)
+ {
+ /*ERROR("Error opening device: %s", src->source);
+ ERROR("open: %s", strerror(errno));*/
+ free(s);
+ return(-2);
+ }
+
+ /*MSG("%s opened.", src->source);*/
+
+ /* Get the device capabilities. */
+ if(src_v4l2_get_capability(src))
+ {
+ src_v4l2_close(src);
+ return(-2);
+ }
+
+ /* Set the input. */
+ if(src_v4l2_set_input(src))
+ {
+ src_v4l2_close(src);
+ return(-1);
+ }
+
+ /* Set picture options. */
+ src_v4l2_set_controls(src);
+
+ /* Set the pixel format. */
+ if(src_v4l2_set_pix_format(src))
+ {
+ src_v4l2_close(src);
+ return(-1);
+ }
+
+ /* Set the frame-rate if > 0 */
+ if(src->fps) src_v4l2_set_fps(src);
+
+ /* Delay to let the image settle down. */
+ if(src->delay)
+ {
+ /*MSG("Delaying %i seconds.", src->delay);*/
+ usleep(src->delay * 1000 * 1000);
+ }
+
+ /* Try to setup mmap. */
+ if(!src->use_read && src_v4l2_set_mmap(src))
+ {
+ /*WARN("Unable to use mmap. Using read instead.");*/
+ src->use_read = -1;
+ }
+
+ /* If unable to use mmap or user requested read(). */
+ if(src->use_read)
+ {
+ if(src_v4l2_set_read(src))
+ {
+ /*ERROR("Unable to use read.");*/
+ src_v4l2_close(src);
+ return(-1);
+ }
+ }
+
+ s->pframe = -1;
+
+ return(0);
+}
+
+static int src_v4l2_close(src_t *src)
+{
+ src_v4l2_t *s = (src_v4l2_t *) src->state;
+
+ if(s->buffer)
+ {
+ if(!s->map) free(s->buffer[0].start);
+ else src_v4l2_free_mmap(src);
+ free(s->buffer);
+ }
+ if(s->fd >= 0) close(s->fd);
+ free(s);
+
+ return(0);
+}
+
+static int src_v4l2_grab(src_t *src)
+{
+ src_v4l2_t *s = (src_v4l2_t *) src->state;
+
+ if(src->timeout)
+ {
+ fd_set fds;
+ struct timeval tv;
+ int r;
+
+ /* Is a frame ready? */
+ FD_ZERO(&fds);
+ FD_SET(s->fd, &fds);
+
+ tv.tv_sec = src->timeout;
+ tv.tv_usec = 0;
+
+ r = select(s->fd + 1, &fds, NULL, NULL, &tv);
+
+ if(r == -1)
+ {
+ /*ERROR("select: %s", strerror(errno));*/
+ return(-1);
+ }
+
+ if(!r)
+ {
+ /*ERROR("Timed out waiting for frame!");*/
+ return(-1);
+ }
+ }
+
+ if(s->map)
+ {
+ if(s->pframe >= 0)
+ {
+ if(ioctl(s->fd, VIDIOC_QBUF, &s->buf) == -1)
+ {
+ /*ERROR("VIDIOC_QBUF: %s", strerror(errno));*/
+ return(-1);
+ }
+ }
+
+ memset(&s->buf, 0, sizeof(s->buf));
+
+ s->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ s->buf.memory = V4L2_MEMORY_MMAP;
+
+ if(ioctl(s->fd, VIDIOC_DQBUF, &s->buf) == -1)
+ {
+ /*ERROR("VIDIOC_DQBUF: %s", strerror(errno));*/
+ return(-1);
+ }
+
+ src->img = s->buffer[s->buf.index].start;
+ src->length = s->buffer[s->buf.index].length;
+
+ s->pframe = s->buf.index;
+ }
+ else
+ {
+ ssize_t r;
+
+ r = read(s->fd, s->buffer[0].start, s->buffer[0].length);
+ if(r <= 0)
+ {
+ /*ERROR("Unable to read a frame.");
+ ERROR("read: %s", strerror(errno));*/
+ return(-1);
+ }
+
+ src->img = s->buffer[0].start;
+ src->length = r;
+ }
+
+ return(0);
+}
+
+src_mod_t src_v4l2 = {
+ "v4l2", SRC_TYPE_DEVICE,
+ src_v4l2_open,
+ src_v4l2_close,
+ src_v4l2_grab
+};
+
+#else /* #ifdef HAVE_V4L2 */
+
+src_mod_t src_v4l2 = {
+ "", SRC_TYPE_NONE,
+ NULL,
+ NULL,
+ NULL
+};
+
+#endif /* #ifdef HAVE_V4L2 */
+
--- /dev/null
+/***************************************************************************
+ * Copyright (C) 2010 by Jean-Baptiste Mardelle (jb@kdenlive.org) *
+ * *
+ * This program is free software; you can redistribute it and/or modify *
+ * it under the terms of the GNU General Public License as published by *
+ * the Free Software Foundation; either version 2 of the License, or *
+ * (at your option) any later version. *
+ * *
+ * This program is distributed in the hope that it will be useful, *
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of *
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
+ * GNU General Public License for more details. *
+ * *
+ * You should have received a copy of the GNU General Public License *
+ * along with this program; if not, write to the *
+ * Free Software Foundation, Inc., *
+ * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA *
+ ***************************************************************************/
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <pthread.h>
+#include <unistd.h>
+#include <fcntl.h>
+
+#include <QDebug>
+#include <QImage>
+#include <QTimer>
+
+#include <KDebug>
+#include <KLocale>
+
+#include "v4lcapture.h"
+#include "kdenlivesettings.h"
+
+
+static src_t v4lsrc;
+
+void yuv2rgb_int3(unsigned char *yuv_buffer, unsigned char *rgb_buffer, int width, int height)
+{
+ int len;
+ int r, g, b;
+ int Y, U, V, Y2;
+ int rgb_ptr, y_ptr, t;
+
+ len = width * height / 2;
+
+ rgb_ptr = 0;
+ y_ptr = 0;
+
+ for(t = 0; t < len; t++) { /* process 2 pixels at a time */
+ /* Compute parts of the UV components */
+
+ U = yuv_buffer[y_ptr];
+ Y = yuv_buffer[y_ptr+1];
+ V = yuv_buffer[y_ptr+2];
+ Y2 = yuv_buffer[y_ptr+3];
+ y_ptr += 4;
+
+
+ /*r = 1.164*(Y-16) + 1.596*(V-128);
+ g = 1.164*(Y-16) - 0.813*(V-128) - 0.391*(U-128);
+ b = 1.164*(Y-16) + 2.018*(U-128);*/
+
+
+ r = ((298 * (Y - 16) + 409 * (V - 128) + 128) >> 8);
+
+ g = ((298 * (Y - 16) - 100 * (U - 128) - 208 * (V - 128) + 128) >> 8);
+
+ b = ((298 * (Y - 16) + 516 * (U - 128) + 128) >> 8);
+
+ if(r > 255) r = 255;
+ if(g > 255) g = 255;
+ if(b > 255) b = 255;
+
+ if(r < 0) r = 0;
+ if(g < 0) g = 0;
+ if(b < 0) b = 0;
+
+ rgb_buffer[rgb_ptr] = b;
+ rgb_buffer[rgb_ptr+1] = g;
+ rgb_buffer[rgb_ptr+2] = r;
+ rgb_buffer[rgb_ptr+3] = 255;
+
+ rgb_ptr += 4;
+ /*r = 1.164*(Y2-16) + 1.596*(V-128);
+ g = 1.164*(Y2-16) - 0.813*(V-128) - 0.391*(U-128);
+ b = 1.164*(Y2-16) + 2.018*(U-128);*/
+
+
+ r = ((298 * (Y2 - 16) + 409 * (V - 128) + 128) >> 8);
+
+ g = ((298 * (Y2 - 16) - 100 * (U - 128) - 208 * (V - 128) + 128) >> 8);
+
+ b = ((298 * (Y2 - 16) + 516 * (U - 128) + 128) >> 8);
+
+ if(r > 255) r = 255;
+ if(g > 255) g = 255;
+ if(b > 255) b = 255;
+
+ if(r < 0) r = 0;
+ if(g < 0) g = 0;
+ if(b < 0) b = 0;
+
+ rgb_buffer[rgb_ptr] = b;
+ rgb_buffer[rgb_ptr+1] = g;
+ rgb_buffer[rgb_ptr+2] = r;
+ rgb_buffer[rgb_ptr+3] = 255;
+ rgb_ptr += 4;
+ }
+}
+
+V4lCaptureHandler::V4lCaptureHandler(QVBoxLayout *lay, QWidget *parent):
+ CaptureHandler(lay, parent)
+ , m_update(false)
+{
+ m_display = new QLabel;
+ lay->addWidget(m_display);
+}
+
+void V4lCaptureHandler::startPreview(int /*deviceId*/, int /*captureMode*/)
+{
+ m_display->setHidden(false);
+ fswebcam_config_t *config;
+ /* Prepare the configuration structure. */
+ config = (fswebcam_config_t *) calloc(sizeof(fswebcam_config_t), 1);
+ if(!config) {
+ /*WARN("Out of memory.");*/
+ fprintf(stderr, "Out of MEM....");
+ return;
+ }
+
+ /* Set the defaults. */
+ config->loop = 0;
+ config->offset = 0;
+ config->background = 0;
+ config->pidfile = NULL;
+ config->logfile = NULL;
+ config->gmt = 0;
+ config->start = 0;
+ config->device = strdup(KdenliveSettings::video4vdevice().toUtf8().constData());
+ config->input = NULL;
+ config->tuner = 0;
+ config->frequency = 0;
+ config->delay = 0;
+ config->use_read = 0;
+ config->list = 0;
+ config->width = KdenliveSettings::video4size().section("x", 0, 0).toInt();/*384;*/
+ config->height = KdenliveSettings::video4size().section("x", -1).toInt();/*288;*/
+ config->fps = 0;
+ config->frames = 1;
+ config->skipframes = 0;
+ config->palette = SRC_PAL_ANY;
+ config->option = NULL;
+ config->dumpframe = NULL;
+ config->jobs = 0;
+ config->job = NULL;
+
+ /* Set defaults and parse the command line. */
+ /*if(fswc_getopts(config, argc, argv)) return(-1);*/
+
+
+ /* Record the start time. */
+ config->start = time(NULL);
+ /* Set source options... */
+ memset(&v4lsrc, 0, sizeof(v4lsrc));
+ v4lsrc.input = config->input;
+ v4lsrc.tuner = config->tuner;
+ v4lsrc.frequency = config->frequency;
+ v4lsrc.delay = config->delay;
+ v4lsrc.timeout = 10; /* seconds */
+ v4lsrc.use_read = config->use_read;
+ v4lsrc.list = config->list;
+ v4lsrc.palette = config->palette;
+ v4lsrc.width = config->width;
+ v4lsrc.height = config->height;
+ v4lsrc.fps = config->fps;
+ v4lsrc.option = config->option;
+ char *source = config->device;
+
+ if(src_open(&v4lsrc, source) != 0) return;
+ m_update = true;
+ QTimer::singleShot(200, this, SLOT(slotUpdate()));
+}
+
+V4lCaptureHandler::~V4lCaptureHandler()
+{
+ stopCapture();
+}
+
+void V4lCaptureHandler::slotUpdate()
+{
+ if(!m_update) return;
+ src_grab(&v4lsrc);
+ uint8_t *img = (uint8_t *) v4lsrc.img;
+ uint32_t i = v4lsrc.width * v4lsrc.height;
+
+ if(v4lsrc.length << 2 < i) return;
+
+ QImage qimg(v4lsrc.width, v4lsrc.height, QImage::Format_RGB32);
+ //Format_ARGB32_Premultiplied
+ //convert from uyvy422 to rgba
+ yuv2rgb_int3((uchar *)img, (uchar *)qimg.bits(), v4lsrc.width, v4lsrc.height);
+ if(!m_captureFramePath.isEmpty()) {
+ qimg.save(m_captureFramePath);
+ m_captureFramePath.clear();
+ }
+ m_display->setPixmap(QPixmap::fromImage(qimg));
+ if(m_update) QTimer::singleShot(200, this, SLOT(slotUpdate()));
+}
+
+void V4lCaptureHandler::startCapture(const QString &/*path*/)
+{
+}
+
+void V4lCaptureHandler::stopCapture()
+{
+}
+
+void V4lCaptureHandler::captureFrame(const QString &fname)
+{
+ m_captureFramePath = fname;
+}
+
+void V4lCaptureHandler::showOverlay(QImage /*img*/, bool /*transparent*/)
+{
+}
+
+void V4lCaptureHandler::hideOverlay()
+{
+}
+
+void V4lCaptureHandler::hidePreview(bool hide)
+{
+ m_display->setHidden(hide);
+}
+
+void V4lCaptureHandler::stopPreview()
+{
+ if(!m_update) return;
+ m_update = false;
+ src_close(&v4lsrc);
+}
--- /dev/null
+/***************************************************************************
+ * Copyright (C) 2010 by Jean-Baptiste Mardelle (jb@kdenlive.org) *
+ * *
+ * This program is free software; you can redistribute it and/or modify *
+ * it under the terms of the GNU General Public License as published by *
+ * the Free Software Foundation; either version 2 of the License, or *
+ * (at your option) any later version. *
+ * *
+ * This program is distributed in the hope that it will be useful, *
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of *
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
+ * GNU General Public License for more details. *
+ * *
+ * You should have received a copy of the GNU General Public License *
+ * along with this program; if not, write to the *
+ * Free Software Foundation, Inc., *
+ * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA *
+ ***************************************************************************/
+
+#ifndef __V4LCAPTUREHANDLER_H__
+#define __V4LCAPTUREHANDLER_H__
+
+#include "../stopmotion/capturehandler.h"
+#include "src.h"
+
+#include <QWidget>
+#include <QObject>
+#include <QLayout>
+#include <QLabel>
+
+class V4lCaptureHandler : public CaptureHandler
+{
+ Q_OBJECT
+public:
+ V4lCaptureHandler(QVBoxLayout *lay, QWidget *parent = 0);
+ ~V4lCaptureHandler();
+ void startPreview(int deviceId, int captureMode);
+ void stopPreview();
+ void startCapture(const QString &path);
+ void stopCapture();
+ void captureFrame(const QString &fname);
+ void showOverlay(QImage img, bool transparent = true);
+ void hideOverlay();
+ void hidePreview(bool hide);
+
+private:
+ bool m_update;
+ QLabel *m_display;
+ QString m_captureFramePath;
+
+private slots:
+ void slotUpdate();
+signals:
+};
+
+
+#endif
--- /dev/null
+/*
+ * Video for Linux Two header file
+ *
+ * Copyright (C) 1999-2007 the contributors
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * Alternatively you can redistribute this file under the terms of the
+ * BSD license as stated below:
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in
+ * the documentation and/or other materials provided with the
+ * distribution.
+ * 3. The names of its contributors may not be used to endorse or promote
+ * products derived from this software without specific prior written
+ * permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
+ * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+ * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+ * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ * Header file for v4l or V4L2 drivers and applications
+ * with public API.
+ * All kernel-specific stuff were moved to media/v4l2-dev.h, so
+ * no #if __KERNEL tests are allowed here
+ *
+ * See http://linuxtv.org for more info
+ *
+ * Author: Bill Dirks <bill@thedirks.org>
+ * Justin Schoeman
+ * Hans Verkuil <hverkuil@xs4all.nl>
+ * et al.
+ */
+#ifndef __LINUX_VIDEODEV2_H
+#define __LINUX_VIDEODEV2_H
+
+#include <sys/time.h>
+
+#include <linux/ioctl.h>
+#include <linux/types.h>
+
+/*
+ * Common stuff for both V4L1 and V4L2
+ * Moved from videodev.h
+ */
+#define VIDEO_MAX_FRAME 32
+
+
+/* These defines are V4L1 specific and should not be used with the V4L2 API!
+ They will be removed from this header in the future. */
+
+#define VID_TYPE_CAPTURE 1 /* Can capture */
+#define VID_TYPE_TUNER 2 /* Can tune */
+#define VID_TYPE_TELETEXT 4 /* Does teletext */
+#define VID_TYPE_OVERLAY 8 /* Overlay onto frame buffer */
+#define VID_TYPE_CHROMAKEY 16 /* Overlay by chromakey */
+#define VID_TYPE_CLIPPING 32 /* Can clip */
+#define VID_TYPE_FRAMERAM 64 /* Uses the frame buffer memory */
+#define VID_TYPE_SCALES 128 /* Scalable */
+#define VID_TYPE_MONOCHROME 256 /* Monochrome only */
+#define VID_TYPE_SUBCAPTURE 512 /* Can capture subareas of the image */
+#define VID_TYPE_MPEG_DECODER 1024 /* Can decode MPEG streams */
+#define VID_TYPE_MPEG_ENCODER 2048 /* Can encode MPEG streams */
+#define VID_TYPE_MJPEG_DECODER 4096 /* Can decode MJPEG streams */
+#define VID_TYPE_MJPEG_ENCODER 8192 /* Can encode MJPEG streams */
+
+/*
+ * M I S C E L L A N E O U S
+ */
+
+/* Four-character-code (FOURCC) */
+#define v4l2_fourcc(a, b, c, d)\
+ ((__u32)(a) | ((__u32)(b) << 8) | ((__u32)(c) << 16) | ((__u32)(d) << 24))
+
+/*
+ * E N U M S
+ */
+enum v4l2_field {
+ V4L2_FIELD_ANY = 0, /* driver can choose from none,
+ top, bottom, interlaced
+ depending on whatever it thinks
+ is approximate ... */
+ V4L2_FIELD_NONE = 1, /* this device has no fields ... */
+ V4L2_FIELD_TOP = 2, /* top field only */
+ V4L2_FIELD_BOTTOM = 3, /* bottom field only */
+ V4L2_FIELD_INTERLACED = 4, /* both fields interlaced */
+ V4L2_FIELD_SEQ_TB = 5, /* both fields sequential into one
+ buffer, top-bottom order */
+ V4L2_FIELD_SEQ_BT = 6, /* same as above + bottom-top order */
+ V4L2_FIELD_ALTERNATE = 7, /* both fields alternating into
+ separate buffers */
+ V4L2_FIELD_INTERLACED_TB = 8, /* both fields interlaced, top field
+ first and the top field is
+ transmitted first */
+ V4L2_FIELD_INTERLACED_BT = 9, /* both fields interlaced, top field
+ first and the bottom field is
+ transmitted first */
+};
+#define V4L2_FIELD_HAS_TOP(field) \
+ ((field) == V4L2_FIELD_TOP ||\
+ (field) == V4L2_FIELD_INTERLACED ||\
+ (field) == V4L2_FIELD_INTERLACED_TB ||\
+ (field) == V4L2_FIELD_INTERLACED_BT ||\
+ (field) == V4L2_FIELD_SEQ_TB ||\
+ (field) == V4L2_FIELD_SEQ_BT)
+#define V4L2_FIELD_HAS_BOTTOM(field) \
+ ((field) == V4L2_FIELD_BOTTOM ||\
+ (field) == V4L2_FIELD_INTERLACED ||\
+ (field) == V4L2_FIELD_INTERLACED_TB ||\
+ (field) == V4L2_FIELD_INTERLACED_BT ||\
+ (field) == V4L2_FIELD_SEQ_TB ||\
+ (field) == V4L2_FIELD_SEQ_BT)
+#define V4L2_FIELD_HAS_BOTH(field) \
+ ((field) == V4L2_FIELD_INTERLACED ||\
+ (field) == V4L2_FIELD_INTERLACED_TB ||\
+ (field) == V4L2_FIELD_INTERLACED_BT ||\
+ (field) == V4L2_FIELD_SEQ_TB ||\
+ (field) == V4L2_FIELD_SEQ_BT)
+
+enum v4l2_buf_type {
+ V4L2_BUF_TYPE_VIDEO_CAPTURE = 1,
+ V4L2_BUF_TYPE_VIDEO_OUTPUT = 2,
+ V4L2_BUF_TYPE_VIDEO_OVERLAY = 3,
+ V4L2_BUF_TYPE_VBI_CAPTURE = 4,
+ V4L2_BUF_TYPE_VBI_OUTPUT = 5,
+ V4L2_BUF_TYPE_SLICED_VBI_CAPTURE = 6,
+ V4L2_BUF_TYPE_SLICED_VBI_OUTPUT = 7,
+#if 1
+ /* Experimental */
+ V4L2_BUF_TYPE_VIDEO_OUTPUT_OVERLAY = 8,
+#endif
+ V4L2_BUF_TYPE_PRIVATE = 0x80,
+};
+
+enum v4l2_ctrl_type {
+ V4L2_CTRL_TYPE_INTEGER = 1,
+ V4L2_CTRL_TYPE_BOOLEAN = 2,
+ V4L2_CTRL_TYPE_MENU = 3,
+ V4L2_CTRL_TYPE_BUTTON = 4,
+ V4L2_CTRL_TYPE_INTEGER64 = 5,
+ V4L2_CTRL_TYPE_CTRL_CLASS = 6,
+};
+
+enum v4l2_tuner_type {
+ V4L2_TUNER_RADIO = 1,
+ V4L2_TUNER_ANALOG_TV = 2,
+ V4L2_TUNER_DIGITAL_TV = 3,
+};
+
+enum v4l2_memory {
+ V4L2_MEMORY_MMAP = 1,
+ V4L2_MEMORY_USERPTR = 2,
+ V4L2_MEMORY_OVERLAY = 3,
+};
+
+/* see also http://vektor.theorem.ca/graphics/ycbcr/ */
+enum v4l2_colorspace {
+ /* ITU-R 601 -- broadcast NTSC/PAL */
+ V4L2_COLORSPACE_SMPTE170M = 1,
+
+ /* 1125-Line (US) HDTV */
+ V4L2_COLORSPACE_SMPTE240M = 2,
+
+ /* HD and modern captures. */
+ V4L2_COLORSPACE_REC709 = 3,
+
+ /* broken BT878 extents (601, luma range 16-253 instead of 16-235) */
+ V4L2_COLORSPACE_BT878 = 4,
+
+ /* These should be useful. Assume 601 extents. */
+ V4L2_COLORSPACE_470_SYSTEM_M = 5,
+ V4L2_COLORSPACE_470_SYSTEM_BG = 6,
+
+ /* I know there will be cameras that send this. So, this is
+ * unspecified chromaticities and full 0-255 on each of the
+ * Y'CbCr components
+ */
+ V4L2_COLORSPACE_JPEG = 7,
+
+ /* For RGB colourspaces, this is probably a good start. */
+ V4L2_COLORSPACE_SRGB = 8,
+};
+
+enum v4l2_priority {
+ V4L2_PRIORITY_UNSET = 0, /* not initialized */
+ V4L2_PRIORITY_BACKGROUND = 1,
+ V4L2_PRIORITY_INTERACTIVE = 2,
+ V4L2_PRIORITY_RECORD = 3,
+ V4L2_PRIORITY_DEFAULT = V4L2_PRIORITY_INTERACTIVE,
+};
+
+struct v4l2_rect {
+ __s32 left;
+ __s32 top;
+ __s32 width;
+ __s32 height;
+};
+
+struct v4l2_fract {
+ __u32 numerator;
+ __u32 denominator;
+};
+
+/*
+ * D R I V E R C A P A B I L I T I E S
+ */
+struct v4l2_capability {
+ __u8 driver[16]; /* i.e. "bttv" */
+ __u8 card[32]; /* i.e. "Hauppauge WinTV" */
+ __u8 bus_info[32]; /* "PCI:" + pci_name(pci_dev) */
+ __u32 version; /* should use KERNEL_VERSION() */
+ __u32 capabilities; /* Device capabilities */
+ __u32 reserved[4];
+};
+
+/* Values for 'capabilities' field */
+#define V4L2_CAP_VIDEO_CAPTURE 0x00000001 /* Is a video capture device */
+#define V4L2_CAP_VIDEO_OUTPUT 0x00000002 /* Is a video output device */
+#define V4L2_CAP_VIDEO_OVERLAY 0x00000004 /* Can do video overlay */
+#define V4L2_CAP_VBI_CAPTURE 0x00000010 /* Is a raw VBI capture device */
+#define V4L2_CAP_VBI_OUTPUT 0x00000020 /* Is a raw VBI output device */
+#define V4L2_CAP_SLICED_VBI_CAPTURE 0x00000040 /* Is a sliced VBI capture device */
+#define V4L2_CAP_SLICED_VBI_OUTPUT 0x00000080 /* Is a sliced VBI output device */
+#define V4L2_CAP_RDS_CAPTURE 0x00000100 /* RDS data capture */
+#define V4L2_CAP_VIDEO_OUTPUT_OVERLAY 0x00000200 /* Can do video output overlay */
+#define V4L2_CAP_HW_FREQ_SEEK 0x00000400 /* Can do hardware frequency seek */
+
+#define V4L2_CAP_TUNER 0x00010000 /* has a tuner */
+#define V4L2_CAP_AUDIO 0x00020000 /* has audio support */
+#define V4L2_CAP_RADIO 0x00040000 /* is a radio device */
+
+#define V4L2_CAP_READWRITE 0x01000000 /* read/write systemcalls */
+#define V4L2_CAP_ASYNCIO 0x02000000 /* async I/O */
+#define V4L2_CAP_STREAMING 0x04000000 /* streaming I/O ioctls */
+
+/*
+ * V I D E O I M A G E F O R M A T
+ */
+struct v4l2_pix_format {
+ __u32 width;
+ __u32 height;
+ __u32 pixelformat;
+ enum v4l2_field field;
+ __u32 bytesperline; /* for padding, zero if unused */
+ __u32 sizeimage;
+ enum v4l2_colorspace colorspace;
+ __u32 priv; /* private data, depends on pixelformat */
+};
+
+/* Pixel format FOURCC depth Description */
+#define V4L2_PIX_FMT_RGB332 v4l2_fourcc('R', 'G', 'B', '1') /* 8 RGB-3-3-2 */
+#define V4L2_PIX_FMT_RGB444 v4l2_fourcc('R', '4', '4', '4') /* 16 xxxxrrrr ggggbbbb */
+#define V4L2_PIX_FMT_RGB555 v4l2_fourcc('R', 'G', 'B', 'O') /* 16 RGB-5-5-5 */
+#define V4L2_PIX_FMT_RGB565 v4l2_fourcc('R', 'G', 'B', 'P') /* 16 RGB-5-6-5 */
+#define V4L2_PIX_FMT_RGB555X v4l2_fourcc('R', 'G', 'B', 'Q') /* 16 RGB-5-5-5 BE */
+#define V4L2_PIX_FMT_RGB565X v4l2_fourcc('R', 'G', 'B', 'R') /* 16 RGB-5-6-5 BE */
+#define V4L2_PIX_FMT_BGR24 v4l2_fourcc('B', 'G', 'R', '3') /* 24 BGR-8-8-8 */
+#define V4L2_PIX_FMT_RGB24 v4l2_fourcc('R', 'G', 'B', '3') /* 24 RGB-8-8-8 */
+#define V4L2_PIX_FMT_BGR32 v4l2_fourcc('B', 'G', 'R', '4') /* 32 BGR-8-8-8-8 */
+#define V4L2_PIX_FMT_RGB32 v4l2_fourcc('R', 'G', 'B', '4') /* 32 RGB-8-8-8-8 */
+#define V4L2_PIX_FMT_GREY v4l2_fourcc('G', 'R', 'E', 'Y') /* 8 Greyscale */
+#define V4L2_PIX_FMT_Y16 v4l2_fourcc('Y', '1', '6', ' ') /* 16 Greyscale */
+#define V4L2_PIX_FMT_PAL8 v4l2_fourcc('P', 'A', 'L', '8') /* 8 8-bit palette */
+#define V4L2_PIX_FMT_YVU410 v4l2_fourcc('Y', 'V', 'U', '9') /* 9 YVU 4:1:0 */
+#define V4L2_PIX_FMT_YVU420 v4l2_fourcc('Y', 'V', '1', '2') /* 12 YVU 4:2:0 */
+#define V4L2_PIX_FMT_YUYV v4l2_fourcc('Y', 'U', 'Y', 'V') /* 16 YUV 4:2:2 */
+#define V4L2_PIX_FMT_UYVY v4l2_fourcc('U', 'Y', 'V', 'Y') /* 16 YUV 4:2:2 */
+#define V4L2_PIX_FMT_VYUY v4l2_fourcc('V', 'Y', 'U', 'Y') /* 16 YUV 4:2:2 */
+#define V4L2_PIX_FMT_YUV422P v4l2_fourcc('4', '2', '2', 'P') /* 16 YVU422 planar */
+#define V4L2_PIX_FMT_YUV411P v4l2_fourcc('4', '1', '1', 'P') /* 16 YVU411 planar */
+#define V4L2_PIX_FMT_Y41P v4l2_fourcc('Y', '4', '1', 'P') /* 12 YUV 4:1:1 */
+#define V4L2_PIX_FMT_YUV444 v4l2_fourcc('Y', '4', '4', '4') /* 16 xxxxyyyy uuuuvvvv */
+#define V4L2_PIX_FMT_YUV555 v4l2_fourcc('Y', 'U', 'V', 'O') /* 16 YUV-5-5-5 */
+#define V4L2_PIX_FMT_YUV565 v4l2_fourcc('Y', 'U', 'V', 'P') /* 16 YUV-5-6-5 */
+#define V4L2_PIX_FMT_YUV32 v4l2_fourcc('Y', 'U', 'V', '4') /* 32 YUV-8-8-8-8 */
+
+/* two planes -- one Y, one Cr + Cb interleaved */
+#define V4L2_PIX_FMT_NV12 v4l2_fourcc('N', 'V', '1', '2') /* 12 Y/CbCr 4:2:0 */
+#define V4L2_PIX_FMT_NV21 v4l2_fourcc('N', 'V', '2', '1') /* 12 Y/CrCb 4:2:0 */
+#define V4L2_PIX_FMT_NV16 v4l2_fourcc('N', 'V', '1', '6') /* 16 Y/CbCr 4:2:2 */
+#define V4L2_PIX_FMT_NV61 v4l2_fourcc('N', 'V', '6', '1') /* 16 Y/CrCb 4:2:2 */
+
+/* The following formats are not defined in the V4L2 specification */
+#define V4L2_PIX_FMT_YUV410 v4l2_fourcc('Y', 'U', 'V', '9') /* 9 YUV 4:1:0 */
+#define V4L2_PIX_FMT_YUV420 v4l2_fourcc('Y', 'U', '1', '2') /* 12 YUV 4:2:0 */
+#define V4L2_PIX_FMT_YYUV v4l2_fourcc('Y', 'Y', 'U', 'V') /* 16 YUV 4:2:2 */
+#define V4L2_PIX_FMT_HI240 v4l2_fourcc('H', 'I', '2', '4') /* 8 8-bit color */
+#define V4L2_PIX_FMT_HM12 v4l2_fourcc('H', 'M', '1', '2') /* 8 YUV 4:2:0 16x16 macroblocks */
+
+/* see http://www.siliconimaging.com/RGB%20Bayer.htm */
+#define V4L2_PIX_FMT_SBGGR8 v4l2_fourcc('B', 'A', '8', '1') /* 8 BGBG.. GRGR.. */
+#define V4L2_PIX_FMT_SGBRG8 v4l2_fourcc('G', 'B', 'R', 'G') /* 8 GBGB.. RGRG.. */
+#define V4L2_PIX_FMT_SGRBG8 v4l2_fourcc('G', 'R', 'B', 'G') /* 8 GRGR.. BGBG.. */
+
+/*
+ * 10bit raw bayer, expanded to 16 bits
+ * xxxxrrrrrrrrrrxxxxgggggggggg xxxxggggggggggxxxxbbbbbbbbbb...
+ */
+#define V4L2_PIX_FMT_SGRBG10 v4l2_fourcc('B', 'A', '1', '0')
+/* 10bit raw bayer DPCM compressed to 8 bits */
+#define V4L2_PIX_FMT_SGRBG10DPCM8 v4l2_fourcc('B', 'D', '1', '0')
+#define V4L2_PIX_FMT_SBGGR16 v4l2_fourcc('B', 'Y', 'R', '2') /* 16 BGBG.. GRGR.. */
+
+/* compressed formats */
+#define V4L2_PIX_FMT_MJPEG v4l2_fourcc('M', 'J', 'P', 'G') /* Motion-JPEG */
+#define V4L2_PIX_FMT_JPEG v4l2_fourcc('J', 'P', 'E', 'G') /* JFIF JPEG */
+#define V4L2_PIX_FMT_DV v4l2_fourcc('d', 'v', 's', 'd') /* 1394 */
+#define V4L2_PIX_FMT_MPEG v4l2_fourcc('M', 'P', 'E', 'G') /* MPEG-1/2/4 */
+
+/* Vendor-specific formats */
+#define V4L2_PIX_FMT_WNVA v4l2_fourcc('W', 'N', 'V', 'A') /* Winnov hw compress */
+#define V4L2_PIX_FMT_SN9C10X v4l2_fourcc('S', '9', '1', '0') /* SN9C10x compression */
+#define V4L2_PIX_FMT_SN9C20X_I420 v4l2_fourcc('S', '9', '2', '0') /* SN9C20x YUV 4:2:0 */
+#define V4L2_PIX_FMT_PWC1 v4l2_fourcc('P', 'W', 'C', '1') /* pwc older webcam */
+#define V4L2_PIX_FMT_PWC2 v4l2_fourcc('P', 'W', 'C', '2') /* pwc newer webcam */
+#define V4L2_PIX_FMT_ET61X251 v4l2_fourcc('E', '6', '2', '5') /* ET61X251 compression */
+#define V4L2_PIX_FMT_SPCA501 v4l2_fourcc('S', '5', '0', '1') /* YUYV per line */
+#define V4L2_PIX_FMT_SPCA505 v4l2_fourcc('S', '5', '0', '5') /* YYUV per line */
+#define V4L2_PIX_FMT_SPCA508 v4l2_fourcc('S', '5', '0', '8') /* YUVY per line */
+#define V4L2_PIX_FMT_SPCA561 v4l2_fourcc('S', '5', '6', '1') /* compressed GBRG bayer */
+#define V4L2_PIX_FMT_PAC207 v4l2_fourcc('P', '2', '0', '7') /* compressed BGGR bayer */
+#define V4L2_PIX_FMT_MR97310A v4l2_fourcc('M', '3', '1', '0') /* compressed BGGR bayer */
+#define V4L2_PIX_FMT_SQ905C v4l2_fourcc('9', '0', '5', 'C') /* compressed RGGB bayer */
+#define V4L2_PIX_FMT_PJPG v4l2_fourcc('P', 'J', 'P', 'G') /* Pixart 73xx JPEG */
+#define V4L2_PIX_FMT_YVYU v4l2_fourcc('Y', 'V', 'Y', 'U') /* 16 YVU 4:2:2 */
+#define V4L2_PIX_FMT_OV511 v4l2_fourcc('O', '5', '1', '1') /* ov511 JPEG */
+#define V4L2_PIX_FMT_OV518 v4l2_fourcc('O', '5', '1', '8') /* ov518 JPEG */
+
+/*
+ * F O R M A T E N U M E R A T I O N
+ */
+struct v4l2_fmtdesc {
+ __u32 index; /* Format number */
+ enum v4l2_buf_type type; /* buffer type */
+ __u32 flags;
+ __u8 description[32]; /* Description string */
+ __u32 pixelformat; /* Format fourcc */
+ __u32 reserved[4];
+};
+
+#define V4L2_FMT_FLAG_COMPRESSED 0x0001
+
+#if 1
+ /* Experimental Frame Size and frame rate enumeration */
+/*
+ * F R A M E S I Z E E N U M E R A T I O N
+ */
+enum v4l2_frmsizetypes {
+ V4L2_FRMSIZE_TYPE_DISCRETE = 1,
+ V4L2_FRMSIZE_TYPE_CONTINUOUS = 2,
+ V4L2_FRMSIZE_TYPE_STEPWISE = 3,
+};
+
+struct v4l2_frmsize_discrete {
+ __u32 width; /* Frame width [pixel] */
+ __u32 height; /* Frame height [pixel] */
+};
+
+struct v4l2_frmsize_stepwise {
+ __u32 min_width; /* Minimum frame width [pixel] */
+ __u32 max_width; /* Maximum frame width [pixel] */
+ __u32 step_width; /* Frame width step size [pixel] */
+ __u32 min_height; /* Minimum frame height [pixel] */
+ __u32 max_height; /* Maximum frame height [pixel] */
+ __u32 step_height; /* Frame height step size [pixel] */
+};
+
+struct v4l2_frmsizeenum {
+ __u32 index; /* Frame size number */
+ __u32 pixel_format; /* Pixel format */
+ __u32 type; /* Frame size type the device supports. */
+
+ union { /* Frame size */
+ struct v4l2_frmsize_discrete discrete;
+ struct v4l2_frmsize_stepwise stepwise;
+ };
+
+ __u32 reserved[2]; /* Reserved space for future use */
+};
+
+/*
+ * F R A M E R A T E E N U M E R A T I O N
+ */
+enum v4l2_frmivaltypes {
+ V4L2_FRMIVAL_TYPE_DISCRETE = 1,
+ V4L2_FRMIVAL_TYPE_CONTINUOUS = 2,
+ V4L2_FRMIVAL_TYPE_STEPWISE = 3,
+};
+
+struct v4l2_frmival_stepwise {
+ struct v4l2_fract min; /* Minimum frame interval [s] */
+ struct v4l2_fract max; /* Maximum frame interval [s] */
+ struct v4l2_fract step; /* Frame interval step size [s] */
+};
+
+struct v4l2_frmivalenum {
+ __u32 index; /* Frame format index */
+ __u32 pixel_format; /* Pixel format */
+ __u32 width; /* Frame width */
+ __u32 height; /* Frame height */
+ __u32 type; /* Frame interval type the device supports. */
+
+ union { /* Frame interval */
+ struct v4l2_fract discrete;
+ struct v4l2_frmival_stepwise stepwise;
+ };
+
+ __u32 reserved[2]; /* Reserved space for future use */
+};
+#endif
+
+/*
+ * T I M E C O D E
+ */
+struct v4l2_timecode {
+ __u32 type;
+ __u32 flags;
+ __u8 frames;
+ __u8 seconds;
+ __u8 minutes;
+ __u8 hours;
+ __u8 userbits[4];
+};
+
+/* Type */
+#define V4L2_TC_TYPE_24FPS 1
+#define V4L2_TC_TYPE_25FPS 2
+#define V4L2_TC_TYPE_30FPS 3
+#define V4L2_TC_TYPE_50FPS 4
+#define V4L2_TC_TYPE_60FPS 5
+
+/* Flags */
+#define V4L2_TC_FLAG_DROPFRAME 0x0001 /* "drop-frame" mode */
+#define V4L2_TC_FLAG_COLORFRAME 0x0002
+#define V4L2_TC_USERBITS_field 0x000C
+#define V4L2_TC_USERBITS_USERDEFINED 0x0000
+#define V4L2_TC_USERBITS_8BITCHARS 0x0008
+/* The above is based on SMPTE timecodes */
+
+struct v4l2_jpegcompression {
+ int quality;
+
+ int APPn; /* Number of APP segment to be written,
+ * must be 0..15 */
+ int APP_len; /* Length of data in JPEG APPn segment */
+ char APP_data[60]; /* Data in the JPEG APPn segment. */
+
+ int COM_len; /* Length of data in JPEG COM segment */
+ char COM_data[60]; /* Data in JPEG COM segment */
+
+ __u32 jpeg_markers; /* Which markers should go into the JPEG
+ * output. Unless you exactly know what
+ * you do, leave them untouched.
+ * Inluding less markers will make the
+ * resulting code smaller, but there will
+ * be fewer aplications which can read it.
+ * The presence of the APP and COM marker
+ * is influenced by APP_len and COM_len
+ * ONLY, not by this property! */
+
+#define V4L2_JPEG_MARKER_DHT (1<<3) /* Define Huffman Tables */
+#define V4L2_JPEG_MARKER_DQT (1<<4) /* Define Quantization Tables */
+#define V4L2_JPEG_MARKER_DRI (1<<5) /* Define Restart Interval */
+#define V4L2_JPEG_MARKER_COM (1<<6) /* Comment segment */
+#define V4L2_JPEG_MARKER_APP (1<<7) /* App segment, driver will
+ * allways use APP0 */
+};
+
+/*
+ * M E M O R Y - M A P P I N G B U F F E R S
+ */
+struct v4l2_requestbuffers {
+ __u32 count;
+ enum v4l2_buf_type type;
+ enum v4l2_memory memory;
+ __u32 reserved[2];
+};
+
+struct v4l2_buffer {
+ __u32 index;
+ enum v4l2_buf_type type;
+ __u32 bytesused;
+ __u32 flags;
+ enum v4l2_field field;
+ struct timeval timestamp;
+ struct v4l2_timecode timecode;
+ __u32 sequence;
+
+ /* memory location */
+ enum v4l2_memory memory;
+ union {
+ __u32 offset;
+ unsigned long userptr;
+ } m;
+ __u32 length;
+ __u32 input;
+ __u32 reserved;
+};
+
+/* Flags for 'flags' field */
+#define V4L2_BUF_FLAG_MAPPED 0x0001 /* Buffer is mapped (flag) */
+#define V4L2_BUF_FLAG_QUEUED 0x0002 /* Buffer is queued for processing */
+#define V4L2_BUF_FLAG_DONE 0x0004 /* Buffer is ready */
+#define V4L2_BUF_FLAG_KEYFRAME 0x0008 /* Image is a keyframe (I-frame) */
+#define V4L2_BUF_FLAG_PFRAME 0x0010 /* Image is a P-frame */
+#define V4L2_BUF_FLAG_BFRAME 0x0020 /* Image is a B-frame */
+#define V4L2_BUF_FLAG_TIMECODE 0x0100 /* timecode field is valid */
+#define V4L2_BUF_FLAG_INPUT 0x0200 /* input field is valid */
+
+/*
+ * O V E R L A Y P R E V I E W
+ */
+struct v4l2_framebuffer {
+ __u32 capability;
+ __u32 flags;
+/* FIXME: in theory we should pass something like PCI device + memory
+ * region + offset instead of some physical address */
+ void *base;
+ struct v4l2_pix_format fmt;
+};
+/* Flags for the 'capability' field. Read only */
+#define V4L2_FBUF_CAP_EXTERNOVERLAY 0x0001
+#define V4L2_FBUF_CAP_CHROMAKEY 0x0002
+#define V4L2_FBUF_CAP_LIST_CLIPPING 0x0004
+#define V4L2_FBUF_CAP_BITMAP_CLIPPING 0x0008
+#define V4L2_FBUF_CAP_LOCAL_ALPHA 0x0010
+#define V4L2_FBUF_CAP_GLOBAL_ALPHA 0x0020
+#define V4L2_FBUF_CAP_LOCAL_INV_ALPHA 0x0040
+/* Flags for the 'flags' field. */
+#define V4L2_FBUF_FLAG_PRIMARY 0x0001
+#define V4L2_FBUF_FLAG_OVERLAY 0x0002
+#define V4L2_FBUF_FLAG_CHROMAKEY 0x0004
+#define V4L2_FBUF_FLAG_LOCAL_ALPHA 0x0008
+#define V4L2_FBUF_FLAG_GLOBAL_ALPHA 0x0010
+#define V4L2_FBUF_FLAG_LOCAL_INV_ALPHA 0x0020
+
+struct v4l2_clip {
+ struct v4l2_rect c;
+ struct v4l2_clip *next;
+};
+
+struct v4l2_window {
+ struct v4l2_rect w;
+ enum v4l2_field field;
+ __u32 chromakey;
+ struct v4l2_clip *clips;
+ __u32 clipcount;
+ void *bitmap;
+ __u8 global_alpha;
+};
+
+/*
+ * C A P T U R E P A R A M E T E R S
+ */
+struct v4l2_captureparm {
+ __u32 capability; /* Supported modes */
+ __u32 capturemode; /* Current mode */
+ struct v4l2_fract timeperframe; /* Time per frame in .1us units */
+ __u32 extendedmode; /* Driver-specific extensions */
+ __u32 readbuffers; /* # of buffers for read */
+ __u32 reserved[4];
+};
+
+/* Flags for 'capability' and 'capturemode' fields */
+#define V4L2_MODE_HIGHQUALITY 0x0001 /* High quality imaging mode */
+#define V4L2_CAP_TIMEPERFRAME 0x1000 /* timeperframe field is supported */
+
+struct v4l2_outputparm {
+ __u32 capability; /* Supported modes */
+ __u32 outputmode; /* Current mode */
+ struct v4l2_fract timeperframe; /* Time per frame in seconds */
+ __u32 extendedmode; /* Driver-specific extensions */
+ __u32 writebuffers; /* # of buffers for write */
+ __u32 reserved[4];
+};
+
+/*
+ * I N P U T I M A G E C R O P P I N G
+ */
+struct v4l2_cropcap {
+ enum v4l2_buf_type type;
+ struct v4l2_rect bounds;
+ struct v4l2_rect defrect;
+ struct v4l2_fract pixelaspect;
+};
+
+struct v4l2_crop {
+ enum v4l2_buf_type type;
+ struct v4l2_rect c;
+};
+
+/*
+ * A N A L O G V I D E O S T A N D A R D
+ */
+
+typedef __u64 v4l2_std_id;
+
+/* one bit for each */
+#define V4L2_STD_PAL_B ((v4l2_std_id)0x00000001)
+#define V4L2_STD_PAL_B1 ((v4l2_std_id)0x00000002)
+#define V4L2_STD_PAL_G ((v4l2_std_id)0x00000004)
+#define V4L2_STD_PAL_H ((v4l2_std_id)0x00000008)
+#define V4L2_STD_PAL_I ((v4l2_std_id)0x00000010)
+#define V4L2_STD_PAL_D ((v4l2_std_id)0x00000020)
+#define V4L2_STD_PAL_D1 ((v4l2_std_id)0x00000040)
+#define V4L2_STD_PAL_K ((v4l2_std_id)0x00000080)
+
+#define V4L2_STD_PAL_M ((v4l2_std_id)0x00000100)
+#define V4L2_STD_PAL_N ((v4l2_std_id)0x00000200)
+#define V4L2_STD_PAL_Nc ((v4l2_std_id)0x00000400)
+#define V4L2_STD_PAL_60 ((v4l2_std_id)0x00000800)
+
+#define V4L2_STD_NTSC_M ((v4l2_std_id)0x00001000)
+#define V4L2_STD_NTSC_M_JP ((v4l2_std_id)0x00002000)
+#define V4L2_STD_NTSC_443 ((v4l2_std_id)0x00004000)
+#define V4L2_STD_NTSC_M_KR ((v4l2_std_id)0x00008000)
+
+#define V4L2_STD_SECAM_B ((v4l2_std_id)0x00010000)
+#define V4L2_STD_SECAM_D ((v4l2_std_id)0x00020000)
+#define V4L2_STD_SECAM_G ((v4l2_std_id)0x00040000)
+#define V4L2_STD_SECAM_H ((v4l2_std_id)0x00080000)
+#define V4L2_STD_SECAM_K ((v4l2_std_id)0x00100000)
+#define V4L2_STD_SECAM_K1 ((v4l2_std_id)0x00200000)
+#define V4L2_STD_SECAM_L ((v4l2_std_id)0x00400000)
+#define V4L2_STD_SECAM_LC ((v4l2_std_id)0x00800000)
+
+/* ATSC/HDTV */
+#define V4L2_STD_ATSC_8_VSB ((v4l2_std_id)0x01000000)
+#define V4L2_STD_ATSC_16_VSB ((v4l2_std_id)0x02000000)
+
+/* FIXME:
+ Although std_id is 64 bits, there is an issue on PPC32 architecture that
+ makes switch(__u64) to break. So, there's a hack on v4l2-common.c rounding
+ this value to 32 bits.
+ As, currently, the max value is for V4L2_STD_ATSC_16_VSB (30 bits wide),
+ it should work fine. However, if needed to add more than two standards,
+ v4l2-common.c should be fixed.
+ */
+
+/* some merged standards */
+#define V4L2_STD_MN (V4L2_STD_PAL_M|V4L2_STD_PAL_N|V4L2_STD_PAL_Nc|V4L2_STD_NTSC)
+#define V4L2_STD_B (V4L2_STD_PAL_B|V4L2_STD_PAL_B1|V4L2_STD_SECAM_B)
+#define V4L2_STD_GH (V4L2_STD_PAL_G|V4L2_STD_PAL_H|V4L2_STD_SECAM_G|V4L2_STD_SECAM_H)
+#define V4L2_STD_DK (V4L2_STD_PAL_DK|V4L2_STD_SECAM_DK)
+
+/* some common needed stuff */
+#define V4L2_STD_PAL_BG (V4L2_STD_PAL_B |\
+ V4L2_STD_PAL_B1 |\
+ V4L2_STD_PAL_G)
+#define V4L2_STD_PAL_DK (V4L2_STD_PAL_D |\
+ V4L2_STD_PAL_D1 |\
+ V4L2_STD_PAL_K)
+#define V4L2_STD_PAL (V4L2_STD_PAL_BG |\
+ V4L2_STD_PAL_DK |\
+ V4L2_STD_PAL_H |\
+ V4L2_STD_PAL_I)
+#define V4L2_STD_NTSC (V4L2_STD_NTSC_M |\
+ V4L2_STD_NTSC_M_JP |\
+ V4L2_STD_NTSC_M_KR)
+#define V4L2_STD_SECAM_DK (V4L2_STD_SECAM_D |\
+ V4L2_STD_SECAM_K |\
+ V4L2_STD_SECAM_K1)
+#define V4L2_STD_SECAM (V4L2_STD_SECAM_B |\
+ V4L2_STD_SECAM_G |\
+ V4L2_STD_SECAM_H |\
+ V4L2_STD_SECAM_DK |\
+ V4L2_STD_SECAM_L |\
+ V4L2_STD_SECAM_LC)
+
+#define V4L2_STD_525_60 (V4L2_STD_PAL_M |\
+ V4L2_STD_PAL_60 |\
+ V4L2_STD_NTSC |\
+ V4L2_STD_NTSC_443)
+#define V4L2_STD_625_50 (V4L2_STD_PAL |\
+ V4L2_STD_PAL_N |\
+ V4L2_STD_PAL_Nc |\
+ V4L2_STD_SECAM)
+#define V4L2_STD_ATSC (V4L2_STD_ATSC_8_VSB |\
+ V4L2_STD_ATSC_16_VSB)
+
+#define V4L2_STD_UNKNOWN 0
+#define V4L2_STD_ALL (V4L2_STD_525_60 |\
+ V4L2_STD_625_50)
+
+struct v4l2_standard {
+ __u32 index;
+ v4l2_std_id id;
+ __u8 name[24];
+ struct v4l2_fract frameperiod; /* Frames, not fields */
+ __u32 framelines;
+ __u32 reserved[4];
+};
+
+/*
+ * V I D E O I N P U T S
+ */
+struct v4l2_input {
+ __u32 index; /* Which input */
+ __u8 name[32]; /* Label */
+ __u32 type; /* Type of input */
+ __u32 audioset; /* Associated audios (bitfield) */
+ __u32 tuner; /* Associated tuner */
+ v4l2_std_id std;
+ __u32 status;
+ __u32 reserved[4];
+};
+
+/* Values for the 'type' field */
+#define V4L2_INPUT_TYPE_TUNER 1
+#define V4L2_INPUT_TYPE_CAMERA 2
+
+/* field 'status' - general */
+#define V4L2_IN_ST_NO_POWER 0x00000001 /* Attached device is off */
+#define V4L2_IN_ST_NO_SIGNAL 0x00000002
+#define V4L2_IN_ST_NO_COLOR 0x00000004
+
+/* field 'status' - sensor orientation */
+/* If sensor is mounted upside down set both bits */
+#define V4L2_IN_ST_HFLIP 0x00000010 /* Frames are flipped horizontally */
+#define V4L2_IN_ST_VFLIP 0x00000020 /* Frames are flipped vertically */
+
+/* field 'status' - analog */
+#define V4L2_IN_ST_NO_H_LOCK 0x00000100 /* No horizontal sync lock */
+#define V4L2_IN_ST_COLOR_KILL 0x00000200 /* Color killer is active */
+
+/* field 'status' - digital */
+#define V4L2_IN_ST_NO_SYNC 0x00010000 /* No synchronization lock */
+#define V4L2_IN_ST_NO_EQU 0x00020000 /* No equalizer lock */
+#define V4L2_IN_ST_NO_CARRIER 0x00040000 /* Carrier recovery failed */
+
+/* field 'status' - VCR and set-top box */
+#define V4L2_IN_ST_MACROVISION 0x01000000 /* Macrovision detected */
+#define V4L2_IN_ST_NO_ACCESS 0x02000000 /* Conditional access denied */
+#define V4L2_IN_ST_VTR 0x04000000 /* VTR time constant */
+
+/*
+ * V I D E O O U T P U T S
+ */
+struct v4l2_output {
+ __u32 index; /* Which output */
+ __u8 name[32]; /* Label */
+ __u32 type; /* Type of output */
+ __u32 audioset; /* Associated audios (bitfield) */
+ __u32 modulator; /* Associated modulator */
+ v4l2_std_id std;
+ __u32 reserved[4];
+};
+/* Values for the 'type' field */
+#define V4L2_OUTPUT_TYPE_MODULATOR 1
+#define V4L2_OUTPUT_TYPE_ANALOG 2
+#define V4L2_OUTPUT_TYPE_ANALOGVGAOVERLAY 3
+
+/*
+ * C O N T R O L S
+ */
+struct v4l2_control {
+ __u32 id;
+ __s32 value;
+};
+
+struct v4l2_ext_control {
+ __u32 id;
+ __u32 reserved2[2];
+ union {
+ __s32 value;
+ __s64 value64;
+ void *reserved;
+ };
+} __attribute__ ((packed));
+
+struct v4l2_ext_controls {
+ __u32 ctrl_class;
+ __u32 count;
+ __u32 error_idx;
+ __u32 reserved[2];
+ struct v4l2_ext_control *controls;
+};
+
+/* Values for ctrl_class field */
+#define V4L2_CTRL_CLASS_USER 0x00980000 /* Old-style 'user' controls */
+#define V4L2_CTRL_CLASS_MPEG 0x00990000 /* MPEG-compression controls */
+#define V4L2_CTRL_CLASS_CAMERA 0x009a0000 /* Camera class controls */
+
+#define V4L2_CTRL_ID_MASK (0x0fffffff)
+#define V4L2_CTRL_ID2CLASS(id) ((id) & 0x0fff0000UL)
+#define V4L2_CTRL_DRIVER_PRIV(id) (((id) & 0xffff) >= 0x1000)
+
+/* Used in the VIDIOC_QUERYCTRL ioctl for querying controls */
+struct v4l2_queryctrl {
+ __u32 id;
+ enum v4l2_ctrl_type type;
+ __u8 name[32]; /* Whatever */
+ __s32 minimum; /* Note signedness */
+ __s32 maximum;
+ __s32 step;
+ __s32 default_value;
+ __u32 flags;
+ __u32 reserved[2];
+};
+
+/* Used in the VIDIOC_QUERYMENU ioctl for querying menu items */
+struct v4l2_querymenu {
+ __u32 id;
+ __u32 index;
+ __u8 name[32]; /* Whatever */
+ __u32 reserved;
+};
+
+/* Control flags */
+#define V4L2_CTRL_FLAG_DISABLED 0x0001
+#define V4L2_CTRL_FLAG_GRABBED 0x0002
+#define V4L2_CTRL_FLAG_READ_ONLY 0x0004
+#define V4L2_CTRL_FLAG_UPDATE 0x0008
+#define V4L2_CTRL_FLAG_INACTIVE 0x0010
+#define V4L2_CTRL_FLAG_SLIDER 0x0020
+#define V4L2_CTRL_FLAG_WRITE_ONLY 0x0040
+
+/* Query flag, to be ORed with the control ID */
+#define V4L2_CTRL_FLAG_NEXT_CTRL 0x80000000
+
+/* User-class control IDs defined by V4L2 */
+#define V4L2_CID_BASE (V4L2_CTRL_CLASS_USER | 0x900)
+#define V4L2_CID_USER_BASE V4L2_CID_BASE
+/* IDs reserved for driver specific controls */
+#define V4L2_CID_PRIVATE_BASE 0x08000000
+
+#define V4L2_CID_USER_CLASS (V4L2_CTRL_CLASS_USER | 1)
+#define V4L2_CID_BRIGHTNESS (V4L2_CID_BASE+0)
+#define V4L2_CID_CONTRAST (V4L2_CID_BASE+1)
+#define V4L2_CID_SATURATION (V4L2_CID_BASE+2)
+#define V4L2_CID_HUE (V4L2_CID_BASE+3)
+#define V4L2_CID_AUDIO_VOLUME (V4L2_CID_BASE+5)
+#define V4L2_CID_AUDIO_BALANCE (V4L2_CID_BASE+6)
+#define V4L2_CID_AUDIO_BASS (V4L2_CID_BASE+7)
+#define V4L2_CID_AUDIO_TREBLE (V4L2_CID_BASE+8)
+#define V4L2_CID_AUDIO_MUTE (V4L2_CID_BASE+9)
+#define V4L2_CID_AUDIO_LOUDNESS (V4L2_CID_BASE+10)
+#define V4L2_CID_BLACK_LEVEL (V4L2_CID_BASE+11) /* Deprecated */
+#define V4L2_CID_AUTO_WHITE_BALANCE (V4L2_CID_BASE+12)
+#define V4L2_CID_DO_WHITE_BALANCE (V4L2_CID_BASE+13)
+#define V4L2_CID_RED_BALANCE (V4L2_CID_BASE+14)
+#define V4L2_CID_BLUE_BALANCE (V4L2_CID_BASE+15)
+#define V4L2_CID_GAMMA (V4L2_CID_BASE+16)
+#define V4L2_CID_WHITENESS (V4L2_CID_GAMMA) /* Deprecated */
+#define V4L2_CID_EXPOSURE (V4L2_CID_BASE+17)
+#define V4L2_CID_AUTOGAIN (V4L2_CID_BASE+18)
+#define V4L2_CID_GAIN (V4L2_CID_BASE+19)
+#define V4L2_CID_HFLIP (V4L2_CID_BASE+20)
+#define V4L2_CID_VFLIP (V4L2_CID_BASE+21)
+
+/* Deprecated; use V4L2_CID_PAN_RESET and V4L2_CID_TILT_RESET */
+#define V4L2_CID_HCENTER (V4L2_CID_BASE+22)
+#define V4L2_CID_VCENTER (V4L2_CID_BASE+23)
+
+#define V4L2_CID_POWER_LINE_FREQUENCY (V4L2_CID_BASE+24)
+enum v4l2_power_line_frequency {
+ V4L2_CID_POWER_LINE_FREQUENCY_DISABLED = 0,
+ V4L2_CID_POWER_LINE_FREQUENCY_50HZ = 1,
+ V4L2_CID_POWER_LINE_FREQUENCY_60HZ = 2,
+};
+#define V4L2_CID_HUE_AUTO (V4L2_CID_BASE+25)
+#define V4L2_CID_WHITE_BALANCE_TEMPERATURE (V4L2_CID_BASE+26)
+#define V4L2_CID_SHARPNESS (V4L2_CID_BASE+27)
+#define V4L2_CID_BACKLIGHT_COMPENSATION (V4L2_CID_BASE+28)
+#define V4L2_CID_CHROMA_AGC (V4L2_CID_BASE+29)
+#define V4L2_CID_COLOR_KILLER (V4L2_CID_BASE+30)
+#define V4L2_CID_COLORFX (V4L2_CID_BASE+31)
+enum v4l2_colorfx {
+ V4L2_COLORFX_NONE = 0,
+ V4L2_COLORFX_BW = 1,
+ V4L2_COLORFX_SEPIA = 2,
+};
+#define V4L2_CID_AUTOBRIGHTNESS (V4L2_CID_BASE+32)
+
+/* last CID + 1 */
+#define V4L2_CID_LASTP1 (V4L2_CID_BASE+33)
+
+/* MPEG-class control IDs defined by V4L2 */
+#define V4L2_CID_MPEG_BASE (V4L2_CTRL_CLASS_MPEG | 0x900)
+#define V4L2_CID_MPEG_CLASS (V4L2_CTRL_CLASS_MPEG | 1)
+
+/* MPEG streams */
+#define V4L2_CID_MPEG_STREAM_TYPE (V4L2_CID_MPEG_BASE+0)
+enum v4l2_mpeg_stream_type {
+ V4L2_MPEG_STREAM_TYPE_MPEG2_PS = 0, /* MPEG-2 program stream */
+ V4L2_MPEG_STREAM_TYPE_MPEG2_TS = 1, /* MPEG-2 transport stream */
+ V4L2_MPEG_STREAM_TYPE_MPEG1_SS = 2, /* MPEG-1 system stream */
+ V4L2_MPEG_STREAM_TYPE_MPEG2_DVD = 3, /* MPEG-2 DVD-compatible stream */
+ V4L2_MPEG_STREAM_TYPE_MPEG1_VCD = 4, /* MPEG-1 VCD-compatible stream */
+ V4L2_MPEG_STREAM_TYPE_MPEG2_SVCD = 5, /* MPEG-2 SVCD-compatible stream */
+};
+#define V4L2_CID_MPEG_STREAM_PID_PMT (V4L2_CID_MPEG_BASE+1)
+#define V4L2_CID_MPEG_STREAM_PID_AUDIO (V4L2_CID_MPEG_BASE+2)
+#define V4L2_CID_MPEG_STREAM_PID_VIDEO (V4L2_CID_MPEG_BASE+3)
+#define V4L2_CID_MPEG_STREAM_PID_PCR (V4L2_CID_MPEG_BASE+4)
+#define V4L2_CID_MPEG_STREAM_PES_ID_AUDIO (V4L2_CID_MPEG_BASE+5)
+#define V4L2_CID_MPEG_STREAM_PES_ID_VIDEO (V4L2_CID_MPEG_BASE+6)
+#define V4L2_CID_MPEG_STREAM_VBI_FMT (V4L2_CID_MPEG_BASE+7)
+enum v4l2_mpeg_stream_vbi_fmt {
+ V4L2_MPEG_STREAM_VBI_FMT_NONE = 0, /* No VBI in the MPEG stream */
+ V4L2_MPEG_STREAM_VBI_FMT_IVTV = 1, /* VBI in private packets, IVTV format */
+};
+
+/* MPEG audio */
+#define V4L2_CID_MPEG_AUDIO_SAMPLING_FREQ (V4L2_CID_MPEG_BASE+100)
+enum v4l2_mpeg_audio_sampling_freq {
+ V4L2_MPEG_AUDIO_SAMPLING_FREQ_44100 = 0,
+ V4L2_MPEG_AUDIO_SAMPLING_FREQ_48000 = 1,
+ V4L2_MPEG_AUDIO_SAMPLING_FREQ_32000 = 2,
+};
+#define V4L2_CID_MPEG_AUDIO_ENCODING (V4L2_CID_MPEG_BASE+101)
+enum v4l2_mpeg_audio_encoding {
+ V4L2_MPEG_AUDIO_ENCODING_LAYER_1 = 0,
+ V4L2_MPEG_AUDIO_ENCODING_LAYER_2 = 1,
+ V4L2_MPEG_AUDIO_ENCODING_LAYER_3 = 2,
+ V4L2_MPEG_AUDIO_ENCODING_AAC = 3,
+ V4L2_MPEG_AUDIO_ENCODING_AC3 = 4,
+};
+#define V4L2_CID_MPEG_AUDIO_L1_BITRATE (V4L2_CID_MPEG_BASE+102)
+enum v4l2_mpeg_audio_l1_bitrate {
+ V4L2_MPEG_AUDIO_L1_BITRATE_32K = 0,
+ V4L2_MPEG_AUDIO_L1_BITRATE_64K = 1,
+ V4L2_MPEG_AUDIO_L1_BITRATE_96K = 2,
+ V4L2_MPEG_AUDIO_L1_BITRATE_128K = 3,
+ V4L2_MPEG_AUDIO_L1_BITRATE_160K = 4,
+ V4L2_MPEG_AUDIO_L1_BITRATE_192K = 5,
+ V4L2_MPEG_AUDIO_L1_BITRATE_224K = 6,
+ V4L2_MPEG_AUDIO_L1_BITRATE_256K = 7,
+ V4L2_MPEG_AUDIO_L1_BITRATE_288K = 8,
+ V4L2_MPEG_AUDIO_L1_BITRATE_320K = 9,
+ V4L2_MPEG_AUDIO_L1_BITRATE_352K = 10,
+ V4L2_MPEG_AUDIO_L1_BITRATE_384K = 11,
+ V4L2_MPEG_AUDIO_L1_BITRATE_416K = 12,
+ V4L2_MPEG_AUDIO_L1_BITRATE_448K = 13,
+};
+#define V4L2_CID_MPEG_AUDIO_L2_BITRATE (V4L2_CID_MPEG_BASE+103)
+enum v4l2_mpeg_audio_l2_bitrate {
+ V4L2_MPEG_AUDIO_L2_BITRATE_32K = 0,
+ V4L2_MPEG_AUDIO_L2_BITRATE_48K = 1,
+ V4L2_MPEG_AUDIO_L2_BITRATE_56K = 2,
+ V4L2_MPEG_AUDIO_L2_BITRATE_64K = 3,
+ V4L2_MPEG_AUDIO_L2_BITRATE_80K = 4,
+ V4L2_MPEG_AUDIO_L2_BITRATE_96K = 5,
+ V4L2_MPEG_AUDIO_L2_BITRATE_112K = 6,
+ V4L2_MPEG_AUDIO_L2_BITRATE_128K = 7,
+ V4L2_MPEG_AUDIO_L2_BITRATE_160K = 8,
+ V4L2_MPEG_AUDIO_L2_BITRATE_192K = 9,
+ V4L2_MPEG_AUDIO_L2_BITRATE_224K = 10,
+ V4L2_MPEG_AUDIO_L2_BITRATE_256K = 11,
+ V4L2_MPEG_AUDIO_L2_BITRATE_320K = 12,
+ V4L2_MPEG_AUDIO_L2_BITRATE_384K = 13,
+};
+#define V4L2_CID_MPEG_AUDIO_L3_BITRATE (V4L2_CID_MPEG_BASE+104)
+enum v4l2_mpeg_audio_l3_bitrate {
+ V4L2_MPEG_AUDIO_L3_BITRATE_32K = 0,
+ V4L2_MPEG_AUDIO_L3_BITRATE_40K = 1,
+ V4L2_MPEG_AUDIO_L3_BITRATE_48K = 2,
+ V4L2_MPEG_AUDIO_L3_BITRATE_56K = 3,
+ V4L2_MPEG_AUDIO_L3_BITRATE_64K = 4,
+ V4L2_MPEG_AUDIO_L3_BITRATE_80K = 5,
+ V4L2_MPEG_AUDIO_L3_BITRATE_96K = 6,
+ V4L2_MPEG_AUDIO_L3_BITRATE_112K = 7,
+ V4L2_MPEG_AUDIO_L3_BITRATE_128K = 8,
+ V4L2_MPEG_AUDIO_L3_BITRATE_160K = 9,
+ V4L2_MPEG_AUDIO_L3_BITRATE_192K = 10,
+ V4L2_MPEG_AUDIO_L3_BITRATE_224K = 11,
+ V4L2_MPEG_AUDIO_L3_BITRATE_256K = 12,
+ V4L2_MPEG_AUDIO_L3_BITRATE_320K = 13,
+};
+#define V4L2_CID_MPEG_AUDIO_MODE (V4L2_CID_MPEG_BASE+105)
+enum v4l2_mpeg_audio_mode {
+ V4L2_MPEG_AUDIO_MODE_STEREO = 0,
+ V4L2_MPEG_AUDIO_MODE_JOINT_STEREO = 1,
+ V4L2_MPEG_AUDIO_MODE_DUAL = 2,
+ V4L2_MPEG_AUDIO_MODE_MONO = 3,
+};
+#define V4L2_CID_MPEG_AUDIO_MODE_EXTENSION (V4L2_CID_MPEG_BASE+106)
+enum v4l2_mpeg_audio_mode_extension {
+ V4L2_MPEG_AUDIO_MODE_EXTENSION_BOUND_4 = 0,
+ V4L2_MPEG_AUDIO_MODE_EXTENSION_BOUND_8 = 1,
+ V4L2_MPEG_AUDIO_MODE_EXTENSION_BOUND_12 = 2,
+ V4L2_MPEG_AUDIO_MODE_EXTENSION_BOUND_16 = 3,
+};
+#define V4L2_CID_MPEG_AUDIO_EMPHASIS (V4L2_CID_MPEG_BASE+107)
+enum v4l2_mpeg_audio_emphasis {
+ V4L2_MPEG_AUDIO_EMPHASIS_NONE = 0,
+ V4L2_MPEG_AUDIO_EMPHASIS_50_DIV_15_uS = 1,
+ V4L2_MPEG_AUDIO_EMPHASIS_CCITT_J17 = 2,
+};
+#define V4L2_CID_MPEG_AUDIO_CRC (V4L2_CID_MPEG_BASE+108)
+enum v4l2_mpeg_audio_crc {
+ V4L2_MPEG_AUDIO_CRC_NONE = 0,
+ V4L2_MPEG_AUDIO_CRC_CRC16 = 1,
+};
+#define V4L2_CID_MPEG_AUDIO_MUTE (V4L2_CID_MPEG_BASE+109)
+#define V4L2_CID_MPEG_AUDIO_AAC_BITRATE (V4L2_CID_MPEG_BASE+110)
+#define V4L2_CID_MPEG_AUDIO_AC3_BITRATE (V4L2_CID_MPEG_BASE+111)
+enum v4l2_mpeg_audio_ac3_bitrate {
+ V4L2_MPEG_AUDIO_AC3_BITRATE_32K = 0,
+ V4L2_MPEG_AUDIO_AC3_BITRATE_40K = 1,
+ V4L2_MPEG_AUDIO_AC3_BITRATE_48K = 2,
+ V4L2_MPEG_AUDIO_AC3_BITRATE_56K = 3,
+ V4L2_MPEG_AUDIO_AC3_BITRATE_64K = 4,
+ V4L2_MPEG_AUDIO_AC3_BITRATE_80K = 5,
+ V4L2_MPEG_AUDIO_AC3_BITRATE_96K = 6,
+ V4L2_MPEG_AUDIO_AC3_BITRATE_112K = 7,
+ V4L2_MPEG_AUDIO_AC3_BITRATE_128K = 8,
+ V4L2_MPEG_AUDIO_AC3_BITRATE_160K = 9,
+ V4L2_MPEG_AUDIO_AC3_BITRATE_192K = 10,
+ V4L2_MPEG_AUDIO_AC3_BITRATE_224K = 11,
+ V4L2_MPEG_AUDIO_AC3_BITRATE_256K = 12,
+ V4L2_MPEG_AUDIO_AC3_BITRATE_320K = 13,
+ V4L2_MPEG_AUDIO_AC3_BITRATE_384K = 14,
+ V4L2_MPEG_AUDIO_AC3_BITRATE_448K = 15,
+ V4L2_MPEG_AUDIO_AC3_BITRATE_512K = 16,
+ V4L2_MPEG_AUDIO_AC3_BITRATE_576K = 17,
+ V4L2_MPEG_AUDIO_AC3_BITRATE_640K = 18,
+};
+
+/* MPEG video */
+#define V4L2_CID_MPEG_VIDEO_ENCODING (V4L2_CID_MPEG_BASE+200)
+enum v4l2_mpeg_video_encoding {
+ V4L2_MPEG_VIDEO_ENCODING_MPEG_1 = 0,
+ V4L2_MPEG_VIDEO_ENCODING_MPEG_2 = 1,
+ V4L2_MPEG_VIDEO_ENCODING_MPEG_4_AVC = 2,
+};
+#define V4L2_CID_MPEG_VIDEO_ASPECT (V4L2_CID_MPEG_BASE+201)
+enum v4l2_mpeg_video_aspect {
+ V4L2_MPEG_VIDEO_ASPECT_1x1 = 0,
+ V4L2_MPEG_VIDEO_ASPECT_4x3 = 1,
+ V4L2_MPEG_VIDEO_ASPECT_16x9 = 2,
+ V4L2_MPEG_VIDEO_ASPECT_221x100 = 3,
+};
+#define V4L2_CID_MPEG_VIDEO_B_FRAMES (V4L2_CID_MPEG_BASE+202)
+#define V4L2_CID_MPEG_VIDEO_GOP_SIZE (V4L2_CID_MPEG_BASE+203)
+#define V4L2_CID_MPEG_VIDEO_GOP_CLOSURE (V4L2_CID_MPEG_BASE+204)
+#define V4L2_CID_MPEG_VIDEO_PULLDOWN (V4L2_CID_MPEG_BASE+205)
+#define V4L2_CID_MPEG_VIDEO_BITRATE_MODE (V4L2_CID_MPEG_BASE+206)
+enum v4l2_mpeg_video_bitrate_mode {
+ V4L2_MPEG_VIDEO_BITRATE_MODE_VBR = 0,
+ V4L2_MPEG_VIDEO_BITRATE_MODE_CBR = 1,
+};
+#define V4L2_CID_MPEG_VIDEO_BITRATE (V4L2_CID_MPEG_BASE+207)
+#define V4L2_CID_MPEG_VIDEO_BITRATE_PEAK (V4L2_CID_MPEG_BASE+208)
+#define V4L2_CID_MPEG_VIDEO_TEMPORAL_DECIMATION (V4L2_CID_MPEG_BASE+209)
+#define V4L2_CID_MPEG_VIDEO_MUTE (V4L2_CID_MPEG_BASE+210)
+#define V4L2_CID_MPEG_VIDEO_MUTE_YUV (V4L2_CID_MPEG_BASE+211)
+
+/* MPEG-class control IDs specific to the CX2341x driver as defined by V4L2 */
+#define V4L2_CID_MPEG_CX2341X_BASE (V4L2_CTRL_CLASS_MPEG | 0x1000)
+#define V4L2_CID_MPEG_CX2341X_VIDEO_SPATIAL_FILTER_MODE (V4L2_CID_MPEG_CX2341X_BASE+0)
+enum v4l2_mpeg_cx2341x_video_spatial_filter_mode {
+ V4L2_MPEG_CX2341X_VIDEO_SPATIAL_FILTER_MODE_MANUAL = 0,
+ V4L2_MPEG_CX2341X_VIDEO_SPATIAL_FILTER_MODE_AUTO = 1,
+};
+#define V4L2_CID_MPEG_CX2341X_VIDEO_SPATIAL_FILTER (V4L2_CID_MPEG_CX2341X_BASE+1)
+#define V4L2_CID_MPEG_CX2341X_VIDEO_LUMA_SPATIAL_FILTER_TYPE (V4L2_CID_MPEG_CX2341X_BASE+2)
+enum v4l2_mpeg_cx2341x_video_luma_spatial_filter_type {
+ V4L2_MPEG_CX2341X_VIDEO_LUMA_SPATIAL_FILTER_TYPE_OFF = 0,
+ V4L2_MPEG_CX2341X_VIDEO_LUMA_SPATIAL_FILTER_TYPE_1D_HOR = 1,
+ V4L2_MPEG_CX2341X_VIDEO_LUMA_SPATIAL_FILTER_TYPE_1D_VERT = 2,
+ V4L2_MPEG_CX2341X_VIDEO_LUMA_SPATIAL_FILTER_TYPE_2D_HV_SEPARABLE = 3,
+ V4L2_MPEG_CX2341X_VIDEO_LUMA_SPATIAL_FILTER_TYPE_2D_SYM_NON_SEPARABLE = 4,
+};
+#define V4L2_CID_MPEG_CX2341X_VIDEO_CHROMA_SPATIAL_FILTER_TYPE (V4L2_CID_MPEG_CX2341X_BASE+3)
+enum v4l2_mpeg_cx2341x_video_chroma_spatial_filter_type {
+ V4L2_MPEG_CX2341X_VIDEO_CHROMA_SPATIAL_FILTER_TYPE_OFF = 0,
+ V4L2_MPEG_CX2341X_VIDEO_CHROMA_SPATIAL_FILTER_TYPE_1D_HOR = 1,
+};
+#define V4L2_CID_MPEG_CX2341X_VIDEO_TEMPORAL_FILTER_MODE (V4L2_CID_MPEG_CX2341X_BASE+4)
+enum v4l2_mpeg_cx2341x_video_temporal_filter_mode {
+ V4L2_MPEG_CX2341X_VIDEO_TEMPORAL_FILTER_MODE_MANUAL = 0,
+ V4L2_MPEG_CX2341X_VIDEO_TEMPORAL_FILTER_MODE_AUTO = 1,
+};
+#define V4L2_CID_MPEG_CX2341X_VIDEO_TEMPORAL_FILTER (V4L2_CID_MPEG_CX2341X_BASE+5)
+#define V4L2_CID_MPEG_CX2341X_VIDEO_MEDIAN_FILTER_TYPE (V4L2_CID_MPEG_CX2341X_BASE+6)
+enum v4l2_mpeg_cx2341x_video_median_filter_type {
+ V4L2_MPEG_CX2341X_VIDEO_MEDIAN_FILTER_TYPE_OFF = 0,
+ V4L2_MPEG_CX2341X_VIDEO_MEDIAN_FILTER_TYPE_HOR = 1,
+ V4L2_MPEG_CX2341X_VIDEO_MEDIAN_FILTER_TYPE_VERT = 2,
+ V4L2_MPEG_CX2341X_VIDEO_MEDIAN_FILTER_TYPE_HOR_VERT = 3,
+ V4L2_MPEG_CX2341X_VIDEO_MEDIAN_FILTER_TYPE_DIAG = 4,
+};
+#define V4L2_CID_MPEG_CX2341X_VIDEO_LUMA_MEDIAN_FILTER_BOTTOM (V4L2_CID_MPEG_CX2341X_BASE+7)
+#define V4L2_CID_MPEG_CX2341X_VIDEO_LUMA_MEDIAN_FILTER_TOP (V4L2_CID_MPEG_CX2341X_BASE+8)
+#define V4L2_CID_MPEG_CX2341X_VIDEO_CHROMA_MEDIAN_FILTER_BOTTOM (V4L2_CID_MPEG_CX2341X_BASE+9)
+#define V4L2_CID_MPEG_CX2341X_VIDEO_CHROMA_MEDIAN_FILTER_TOP (V4L2_CID_MPEG_CX2341X_BASE+10)
+#define V4L2_CID_MPEG_CX2341X_STREAM_INSERT_NAV_PACKETS (V4L2_CID_MPEG_CX2341X_BASE+11)
+
+/* Camera class control IDs */
+#define V4L2_CID_CAMERA_CLASS_BASE (V4L2_CTRL_CLASS_CAMERA | 0x900)
+#define V4L2_CID_CAMERA_CLASS (V4L2_CTRL_CLASS_CAMERA | 1)
+
+#define V4L2_CID_EXPOSURE_AUTO (V4L2_CID_CAMERA_CLASS_BASE+1)
+enum v4l2_exposure_auto_type {
+ V4L2_EXPOSURE_AUTO = 0,
+ V4L2_EXPOSURE_MANUAL = 1,
+ V4L2_EXPOSURE_SHUTTER_PRIORITY = 2,
+ V4L2_EXPOSURE_APERTURE_PRIORITY = 3
+};
+#define V4L2_CID_EXPOSURE_ABSOLUTE (V4L2_CID_CAMERA_CLASS_BASE+2)
+#define V4L2_CID_EXPOSURE_AUTO_PRIORITY (V4L2_CID_CAMERA_CLASS_BASE+3)
+
+#define V4L2_CID_PAN_RELATIVE (V4L2_CID_CAMERA_CLASS_BASE+4)
+#define V4L2_CID_TILT_RELATIVE (V4L2_CID_CAMERA_CLASS_BASE+5)
+#define V4L2_CID_PAN_RESET (V4L2_CID_CAMERA_CLASS_BASE+6)
+#define V4L2_CID_TILT_RESET (V4L2_CID_CAMERA_CLASS_BASE+7)
+
+#define V4L2_CID_PAN_ABSOLUTE (V4L2_CID_CAMERA_CLASS_BASE+8)
+#define V4L2_CID_TILT_ABSOLUTE (V4L2_CID_CAMERA_CLASS_BASE+9)
+
+#define V4L2_CID_FOCUS_ABSOLUTE (V4L2_CID_CAMERA_CLASS_BASE+10)
+#define V4L2_CID_FOCUS_RELATIVE (V4L2_CID_CAMERA_CLASS_BASE+11)
+#define V4L2_CID_FOCUS_AUTO (V4L2_CID_CAMERA_CLASS_BASE+12)
+
+#define V4L2_CID_ZOOM_ABSOLUTE (V4L2_CID_CAMERA_CLASS_BASE+13)
+#define V4L2_CID_ZOOM_RELATIVE (V4L2_CID_CAMERA_CLASS_BASE+14)
+#define V4L2_CID_ZOOM_CONTINUOUS (V4L2_CID_CAMERA_CLASS_BASE+15)
+
+#define V4L2_CID_PRIVACY (V4L2_CID_CAMERA_CLASS_BASE+16)
+
+/*
+ * T U N I N G
+ */
+struct v4l2_tuner {
+ __u32 index;
+ __u8 name[32];
+ enum v4l2_tuner_type type;
+ __u32 capability;
+ __u32 rangelow;
+ __u32 rangehigh;
+ __u32 rxsubchans;
+ __u32 audmode;
+ __s32 signal;
+ __s32 afc;
+ __u32 reserved[4];
+};
+
+struct v4l2_modulator {
+ __u32 index;
+ __u8 name[32];
+ __u32 capability;
+ __u32 rangelow;
+ __u32 rangehigh;
+ __u32 txsubchans;
+ __u32 reserved[4];
+};
+
+/* Flags for the 'capability' field */
+#define V4L2_TUNER_CAP_LOW 0x0001
+#define V4L2_TUNER_CAP_NORM 0x0002
+#define V4L2_TUNER_CAP_STEREO 0x0010
+#define V4L2_TUNER_CAP_LANG2 0x0020
+#define V4L2_TUNER_CAP_SAP 0x0020
+#define V4L2_TUNER_CAP_LANG1 0x0040
+
+/* Flags for the 'rxsubchans' field */
+#define V4L2_TUNER_SUB_MONO 0x0001
+#define V4L2_TUNER_SUB_STEREO 0x0002
+#define V4L2_TUNER_SUB_LANG2 0x0004
+#define V4L2_TUNER_SUB_SAP 0x0004
+#define V4L2_TUNER_SUB_LANG1 0x0008
+
+/* Values for the 'audmode' field */
+#define V4L2_TUNER_MODE_MONO 0x0000
+#define V4L2_TUNER_MODE_STEREO 0x0001
+#define V4L2_TUNER_MODE_LANG2 0x0002
+#define V4L2_TUNER_MODE_SAP 0x0002
+#define V4L2_TUNER_MODE_LANG1 0x0003
+#define V4L2_TUNER_MODE_LANG1_LANG2 0x0004
+
+struct v4l2_frequency {
+ __u32 tuner;
+ enum v4l2_tuner_type type;
+ __u32 frequency;
+ __u32 reserved[8];
+};
+
+struct v4l2_hw_freq_seek {
+ __u32 tuner;
+ enum v4l2_tuner_type type;
+ __u32 seek_upward;
+ __u32 wrap_around;
+ __u32 reserved[8];
+};
+
+/*
+ * A U D I O
+ */
+struct v4l2_audio {
+ __u32 index;
+ __u8 name[32];
+ __u32 capability;
+ __u32 mode;
+ __u32 reserved[2];
+};
+
+/* Flags for the 'capability' field */
+#define V4L2_AUDCAP_STEREO 0x00001
+#define V4L2_AUDCAP_AVL 0x00002
+
+/* Flags for the 'mode' field */
+#define V4L2_AUDMODE_AVL 0x00001
+
+struct v4l2_audioout {
+ __u32 index;
+ __u8 name[32];
+ __u32 capability;
+ __u32 mode;
+ __u32 reserved[2];
+};
+
+/*
+ * M P E G S E R V I C E S
+ *
+ * NOTE: EXPERIMENTAL API
+ */
+#if 1
+#define V4L2_ENC_IDX_FRAME_I (0)
+#define V4L2_ENC_IDX_FRAME_P (1)
+#define V4L2_ENC_IDX_FRAME_B (2)
+#define V4L2_ENC_IDX_FRAME_MASK (0xf)
+
+struct v4l2_enc_idx_entry {
+ __u64 offset;
+ __u64 pts;
+ __u32 length;
+ __u32 flags;
+ __u32 reserved[2];
+};
+
+#define V4L2_ENC_IDX_ENTRIES (64)
+struct v4l2_enc_idx {
+ __u32 entries;
+ __u32 entries_cap;
+ __u32 reserved[4];
+ struct v4l2_enc_idx_entry entry[V4L2_ENC_IDX_ENTRIES];
+};
+
+
+#define V4L2_ENC_CMD_START (0)
+#define V4L2_ENC_CMD_STOP (1)
+#define V4L2_ENC_CMD_PAUSE (2)
+#define V4L2_ENC_CMD_RESUME (3)
+
+/* Flags for V4L2_ENC_CMD_STOP */
+#define V4L2_ENC_CMD_STOP_AT_GOP_END (1 << 0)
+
+struct v4l2_encoder_cmd {
+ __u32 cmd;
+ __u32 flags;
+ union {
+ struct {
+ __u32 data[8];
+ } raw;
+ };
+};
+
+#endif
+
+
+/*
+ * D A T A S E R V I C E S ( V B I )
+ *
+ * Data services API by Michael Schimek
+ */
+
+/* Raw VBI */
+struct v4l2_vbi_format {
+ __u32 sampling_rate; /* in 1 Hz */
+ __u32 offset;
+ __u32 samples_per_line;
+ __u32 sample_format; /* V4L2_PIX_FMT_* */
+ __s32 start[2];
+ __u32 count[2];
+ __u32 flags; /* V4L2_VBI_* */
+ __u32 reserved[2]; /* must be zero */
+};
+
+/* VBI flags */
+#define V4L2_VBI_UNSYNC (1 << 0)
+#define V4L2_VBI_INTERLACED (1 << 1)
+
+/* Sliced VBI
+ *
+ * This implements is a proposal V4L2 API to allow SLICED VBI
+ * required for some hardware encoders. It should change without
+ * notice in the definitive implementation.
+ */
+
+struct v4l2_sliced_vbi_format {
+ __u16 service_set;
+ /* service_lines[0][...] specifies lines 0-23 (1-23 used) of the first field
+ service_lines[1][...] specifies lines 0-23 (1-23 used) of the second field
+ (equals frame lines 313-336 for 625 line video
+ standards, 263-286 for 525 line standards) */
+ __u16 service_lines[2][24];
+ __u32 io_size;
+ __u32 reserved[2]; /* must be zero */
+};
+
+/* Teletext World System Teletext
+ (WST), defined on ITU-R BT.653-2 */
+#define V4L2_SLICED_TELETEXT_B (0x0001)
+/* Video Program System, defined on ETS 300 231*/
+#define V4L2_SLICED_VPS (0x0400)
+/* Closed Caption, defined on EIA-608 */
+#define V4L2_SLICED_CAPTION_525 (0x1000)
+/* Wide Screen System, defined on ITU-R BT1119.1 */
+#define V4L2_SLICED_WSS_625 (0x4000)
+
+#define V4L2_SLICED_VBI_525 (V4L2_SLICED_CAPTION_525)
+#define V4L2_SLICED_VBI_625 (V4L2_SLICED_TELETEXT_B | V4L2_SLICED_VPS | V4L2_SLICED_WSS_625)
+
+struct v4l2_sliced_vbi_cap {
+ __u16 service_set;
+ /* service_lines[0][...] specifies lines 0-23 (1-23 used) of the first field
+ service_lines[1][...] specifies lines 0-23 (1-23 used) of the second field
+ (equals frame lines 313-336 for 625 line video
+ standards, 263-286 for 525 line standards) */
+ __u16 service_lines[2][24];
+ enum v4l2_buf_type type;
+ __u32 reserved[3]; /* must be 0 */
+};
+
+struct v4l2_sliced_vbi_data {
+ __u32 id;
+ __u32 field; /* 0: first field, 1: second field */
+ __u32 line; /* 1-23 */
+ __u32 reserved; /* must be 0 */
+ __u8 data[48];
+};
+
+/*
+ * Sliced VBI data inserted into MPEG Streams
+ */
+
+/*
+ * V4L2_MPEG_STREAM_VBI_FMT_IVTV:
+ *
+ * Structure of payload contained in an MPEG 2 Private Stream 1 PES Packet in an
+ * MPEG-2 Program Pack that contains V4L2_MPEG_STREAM_VBI_FMT_IVTV Sliced VBI
+ * data
+ *
+ * Note, the MPEG-2 Program Pack and Private Stream 1 PES packet header
+ * definitions are not included here. See the MPEG-2 specifications for details
+ * on these headers.
+ */
+
+/* Line type IDs */
+#define V4L2_MPEG_VBI_IVTV_TELETEXT_B (1)
+#define V4L2_MPEG_VBI_IVTV_CAPTION_525 (4)
+#define V4L2_MPEG_VBI_IVTV_WSS_625 (5)
+#define V4L2_MPEG_VBI_IVTV_VPS (7)
+
+struct v4l2_mpeg_vbi_itv0_line {
+ __u8 id; /* One of V4L2_MPEG_VBI_IVTV_* above */
+ __u8 data[42]; /* Sliced VBI data for the line */
+} __attribute__ ((packed));
+
+struct v4l2_mpeg_vbi_itv0 {
+ __le32 linemask[2]; /* Bitmasks of VBI service lines present */
+ struct v4l2_mpeg_vbi_itv0_line line[35];
+} __attribute__ ((packed));
+
+struct v4l2_mpeg_vbi_ITV0 {
+ struct v4l2_mpeg_vbi_itv0_line line[36];
+} __attribute__ ((packed));
+
+#define V4L2_MPEG_VBI_IVTV_MAGIC0 "itv0"
+#define V4L2_MPEG_VBI_IVTV_MAGIC1 "ITV0"
+
+struct v4l2_mpeg_vbi_fmt_ivtv {
+ __u8 magic[4];
+ union {
+ struct v4l2_mpeg_vbi_itv0 itv0;
+ struct v4l2_mpeg_vbi_ITV0 ITV0;
+ };
+} __attribute__ ((packed));
+
+/*
+ * A G G R E G A T E S T R U C T U R E S
+ */
+
+/* Stream data format
+ */
+struct v4l2_format {
+ enum v4l2_buf_type type;
+ union {
+ struct v4l2_pix_format pix; /* V4L2_BUF_TYPE_VIDEO_CAPTURE */
+ struct v4l2_window win; /* V4L2_BUF_TYPE_VIDEO_OVERLAY */
+ struct v4l2_vbi_format vbi; /* V4L2_BUF_TYPE_VBI_CAPTURE */
+ struct v4l2_sliced_vbi_format sliced; /* V4L2_BUF_TYPE_SLICED_VBI_CAPTURE */
+ __u8 raw_data[200]; /* user-defined */
+ } fmt;
+};
+
+
+/* Stream type-dependent parameters
+ */
+struct v4l2_streamparm {
+ enum v4l2_buf_type type;
+ union {
+ struct v4l2_captureparm capture;
+ struct v4l2_outputparm output;
+ __u8 raw_data[200]; /* user-defined */
+ } parm;
+};
+
+/*
+ * A D V A N C E D D E B U G G I N G
+ *
+ * NOTE: EXPERIMENTAL API, NEVER RELY ON THIS IN APPLICATIONS!
+ * FOR DEBUGGING, TESTING AND INTERNAL USE ONLY!
+ */
+
+/* VIDIOC_DBG_G_REGISTER and VIDIOC_DBG_S_REGISTER */
+
+#define V4L2_CHIP_MATCH_HOST 0 /* Match against chip ID on host (0 for the host) */
+#define V4L2_CHIP_MATCH_I2C_DRIVER 1 /* Match against I2C driver name */
+#define V4L2_CHIP_MATCH_I2C_ADDR 2 /* Match against I2C 7-bit address */
+#define V4L2_CHIP_MATCH_AC97 3 /* Match against anciliary AC97 chip */
+
+struct v4l2_dbg_match {
+ __u32 type; /* Match type */
+ union { /* Match this chip, meaning determined by type */
+ __u32 addr;
+ char name[32];
+ };
+} __attribute__ ((packed));
+
+struct v4l2_dbg_register {
+ struct v4l2_dbg_match match;
+ __u32 size; /* register size in bytes */
+ __u64 reg;
+ __u64 val;
+} __attribute__ ((packed));
+
+/* VIDIOC_DBG_G_CHIP_IDENT */
+struct v4l2_dbg_chip_ident {
+ struct v4l2_dbg_match match;
+ __u32 ident; /* chip identifier as specified in <media/v4l2-chip-ident.h> */
+ __u32 revision; /* chip revision, chip specific */
+} __attribute__ ((packed));
+
+/*
+ * I O C T L C O D E S F O R V I D E O D E V I C E S
+ *
+ */
+#define VIDIOC_QUERYCAP _IOR('V', 0, struct v4l2_capability)
+#define VIDIOC_RESERVED _IO('V', 1)
+#define VIDIOC_ENUM_FMT _IOWR('V', 2, struct v4l2_fmtdesc)
+#define VIDIOC_G_FMT _IOWR('V', 4, struct v4l2_format)
+#define VIDIOC_S_FMT _IOWR('V', 5, struct v4l2_format)
+#define VIDIOC_REQBUFS _IOWR('V', 8, struct v4l2_requestbuffers)
+#define VIDIOC_QUERYBUF _IOWR('V', 9, struct v4l2_buffer)
+#define VIDIOC_G_FBUF _IOR('V', 10, struct v4l2_framebuffer)
+#define VIDIOC_S_FBUF _IOW('V', 11, struct v4l2_framebuffer)
+#define VIDIOC_OVERLAY _IOW('V', 14, int)
+#define VIDIOC_QBUF _IOWR('V', 15, struct v4l2_buffer)
+#define VIDIOC_DQBUF _IOWR('V', 17, struct v4l2_buffer)
+#define VIDIOC_STREAMON _IOW('V', 18, int)
+#define VIDIOC_STREAMOFF _IOW('V', 19, int)
+#define VIDIOC_G_PARM _IOWR('V', 21, struct v4l2_streamparm)
+#define VIDIOC_S_PARM _IOWR('V', 22, struct v4l2_streamparm)
+#define VIDIOC_G_STD _IOR('V', 23, v4l2_std_id)
+#define VIDIOC_S_STD _IOW('V', 24, v4l2_std_id)
+#define VIDIOC_ENUMSTD _IOWR('V', 25, struct v4l2_standard)
+#define VIDIOC_ENUMINPUT _IOWR('V', 26, struct v4l2_input)
+#define VIDIOC_G_CTRL _IOWR('V', 27, struct v4l2_control)
+#define VIDIOC_S_CTRL _IOWR('V', 28, struct v4l2_control)
+#define VIDIOC_G_TUNER _IOWR('V', 29, struct v4l2_tuner)
+#define VIDIOC_S_TUNER _IOW('V', 30, struct v4l2_tuner)
+#define VIDIOC_G_AUDIO _IOR('V', 33, struct v4l2_audio)
+#define VIDIOC_S_AUDIO _IOW('V', 34, struct v4l2_audio)
+#define VIDIOC_QUERYCTRL _IOWR('V', 36, struct v4l2_queryctrl)
+#define VIDIOC_QUERYMENU _IOWR('V', 37, struct v4l2_querymenu)
+#define VIDIOC_G_INPUT _IOR('V', 38, int)
+#define VIDIOC_S_INPUT _IOWR('V', 39, int)
+#define VIDIOC_G_OUTPUT _IOR('V', 46, int)
+#define VIDIOC_S_OUTPUT _IOWR('V', 47, int)
+#define VIDIOC_ENUMOUTPUT _IOWR('V', 48, struct v4l2_output)
+#define VIDIOC_G_AUDOUT _IOR('V', 49, struct v4l2_audioout)
+#define VIDIOC_S_AUDOUT _IOW('V', 50, struct v4l2_audioout)
+#define VIDIOC_G_MODULATOR _IOWR('V', 54, struct v4l2_modulator)
+#define VIDIOC_S_MODULATOR _IOW('V', 55, struct v4l2_modulator)
+#define VIDIOC_G_FREQUENCY _IOWR('V', 56, struct v4l2_frequency)
+#define VIDIOC_S_FREQUENCY _IOW('V', 57, struct v4l2_frequency)
+#define VIDIOC_CROPCAP _IOWR('V', 58, struct v4l2_cropcap)
+#define VIDIOC_G_CROP _IOWR('V', 59, struct v4l2_crop)
+#define VIDIOC_S_CROP _IOW('V', 60, struct v4l2_crop)
+#define VIDIOC_G_JPEGCOMP _IOR('V', 61, struct v4l2_jpegcompression)
+#define VIDIOC_S_JPEGCOMP _IOW('V', 62, struct v4l2_jpegcompression)
+#define VIDIOC_QUERYSTD _IOR('V', 63, v4l2_std_id)
+#define VIDIOC_TRY_FMT _IOWR('V', 64, struct v4l2_format)
+#define VIDIOC_ENUMAUDIO _IOWR('V', 65, struct v4l2_audio)
+#define VIDIOC_ENUMAUDOUT _IOWR('V', 66, struct v4l2_audioout)
+#define VIDIOC_G_PRIORITY _IOR('V', 67, enum v4l2_priority)
+#define VIDIOC_S_PRIORITY _IOW('V', 68, enum v4l2_priority)
+#define VIDIOC_G_SLICED_VBI_CAP _IOWR('V', 69, struct v4l2_sliced_vbi_cap)
+#define VIDIOC_LOG_STATUS _IO('V', 70)
+#define VIDIOC_G_EXT_CTRLS _IOWR('V', 71, struct v4l2_ext_controls)
+#define VIDIOC_S_EXT_CTRLS _IOWR('V', 72, struct v4l2_ext_controls)
+#define VIDIOC_TRY_EXT_CTRLS _IOWR('V', 73, struct v4l2_ext_controls)
+#if 1
+#define VIDIOC_ENUM_FRAMESIZES _IOWR('V', 74, struct v4l2_frmsizeenum)
+#define VIDIOC_ENUM_FRAMEINTERVALS _IOWR('V', 75, struct v4l2_frmivalenum)
+#define VIDIOC_G_ENC_INDEX _IOR('V', 76, struct v4l2_enc_idx)
+#define VIDIOC_ENCODER_CMD _IOWR('V', 77, struct v4l2_encoder_cmd)
+#define VIDIOC_TRY_ENCODER_CMD _IOWR('V', 78, struct v4l2_encoder_cmd)
+#endif
+
+#if 1
+/* Experimental, meant for debugging, testing and internal use.
+ Only implemented if CONFIG_VIDEO_ADV_DEBUG is defined.
+ You must be root to use these ioctls. Never use these in applications! */
+#define VIDIOC_DBG_S_REGISTER _IOW('V', 79, struct v4l2_dbg_register)
+#define VIDIOC_DBG_G_REGISTER _IOWR('V', 80, struct v4l2_dbg_register)
+
+/* Experimental, meant for debugging, testing and internal use.
+ Never use this ioctl in applications! */
+#define VIDIOC_DBG_G_CHIP_IDENT _IOWR('V', 81, struct v4l2_dbg_chip_ident)
+#endif
+
+#define VIDIOC_S_HW_FREQ_SEEK _IOW('V', 82, struct v4l2_hw_freq_seek)
+/* Reminder: when adding new ioctls please add support for them to
+ drivers/media/video/v4l2-compat-ioctl32.c as well! */
+
+#ifdef __OLD_VIDIOC_
+/* for compatibility, will go away some day */
+#define VIDIOC_OVERLAY_OLD _IOWR('V', 14, int)
+#define VIDIOC_S_PARM_OLD _IOW('V', 22, struct v4l2_streamparm)
+#define VIDIOC_S_CTRL_OLD _IOW('V', 28, struct v4l2_control)
+#define VIDIOC_G_AUDIO_OLD _IOWR('V', 33, struct v4l2_audio)
+#define VIDIOC_G_AUDOUT_OLD _IOWR('V', 49, struct v4l2_audioout)
+#define VIDIOC_CROPCAP_OLD _IOR('V', 58, struct v4l2_cropcap)
+#endif
+
+#define BASE_VIDIOC_PRIVATE 192 /* 192-255 are private */
+
+#endif /* __LINUX_VIDEODEV2_H */