]> git.sesse.net Git - kdenlive/blob - src/blackmagic/capture.cpp
* Add preliminary support for Blackmagic HDMI capture card
[kdenlive] / src / blackmagic / capture.cpp
1 /* -LICENSE-START-
2 ** Copyright (c) 2009 Blackmagic Design
3 **
4 ** Permission is hereby granted, free of charge, to any person or organization
5 ** obtaining a copy of the software and accompanying documentation covered by
6 ** this license (the "Software") to use, reproduce, display, distribute,
7 ** execute, and transmit the Software, and to prepare derivative works of the
8 ** Software, and to permit third-parties to whom the Software is furnished to
9 ** do so, all subject to the following:
10 **
11 ** The copyright notices in the Software and this entire statement, including
12 ** the above license grant, this restriction and the following disclaimer,
13 ** must be included in all copies of the Software, in whole or in part, and
14 ** all derivative works of the Software, unless such copies or derivative
15 ** works are solely in the form of machine-executable object code generated by
16 ** a source language processor.
17 **
18 ** THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19 ** IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20 ** FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT
21 ** SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE
22 ** FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE,
23 ** ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
24 ** DEALINGS IN THE SOFTWARE.
25 ** -LICENSE-END-
26 */
27
28 #include <stdio.h>
29 #include <stdlib.h>
30 #include <string.h>
31 #include <pthread.h>
32 #include <unistd.h>
33 #include <fcntl.h>
34
35 #include <QGLWidget>
36 #include <QDebug>
37 #include <QImage>
38 #include <QMutex>
39 #include <QPaintEvent>
40
41 #include <QtOpenGL>
42
43 #ifndef GL_TEXTURE_RECTANGLE_EXT
44 #define GL_TEXTURE_RECTANGLE_EXT GL_TEXTURE_RECTANGLE_NV
45 #endif
46
47 #include <KDebug>
48
49 #include "capture.h"
50 #include "kdenlivesettings.h"
51
52 pthread_mutex_t                                 sleepMutex;
53 pthread_cond_t                                  sleepCond;
54 int                                                             videoOutputFile = -1;
55 int                                                             audioOutputFile = -1;
56
57 static BMDTimecodeFormat                g_timecodeFormat = 0;
58 static int                                              g_videoModeIndex = -1;
59 static int                                              g_audioChannels = 2;
60 static int                                              g_audioSampleDepth = 16;
61 const char *                                    g_videoOutputFile = NULL;
62 const char *                                    g_audioOutputFile = NULL;
63 static int                                              g_maxFrames = -1;
64 static QString                                  doCaptureFrame;
65 static double                           g_aspect_ratio = 16.0 / 9.0;
66
67 static unsigned long                    frameCount = 0;
68
69 void yuv2rgb_int(unsigned char *yuv_buffer, unsigned char *rgb_buffer, int width, int height)
70 {
71 int len;
72 int r,g,b;
73 int Y,U,V,Y2;
74 int rgb_ptr,y_ptr,t;
75
76   len=width*height / 2;
77
78   rgb_ptr=0;
79   y_ptr=0;
80
81   for (t=0; t<len; t++)  /* process 2 pixels at a time */
82   {
83     /* Compute parts of the UV components */
84
85     U = yuv_buffer[y_ptr];
86     Y = yuv_buffer[y_ptr+1];
87     V = yuv_buffer[y_ptr+2];
88     Y2 = yuv_buffer[y_ptr+3];
89     y_ptr +=4;
90
91
92     /*r = 1.164*(Y-16) + 1.596*(V-128);
93     g = 1.164*(Y-16) - 0.813*(V-128) - 0.391*(U-128);
94     b = 1.164*(Y-16) + 2.018*(U-128);*/
95     
96
97     r = (( 298*(Y-16)               + 409*(V-128) + 128) >> 8);
98
99     g = (( 298*(Y-16) - 100*(U-128) - 208*(V-128) + 128) >> 8);
100
101     b = (( 298*(Y-16) + 516*(U-128)               + 128) >> 8);
102
103     if (r>255) r=255;
104     if (g>255) g=255;
105     if (b>255) b=255;
106
107     if (r<0) r=0;
108     if (g<0) g=0;
109     if (b<0) b=0;
110
111     rgb_buffer[rgb_ptr]=b;
112     rgb_buffer[rgb_ptr+1]=g;
113     rgb_buffer[rgb_ptr+2]=r;
114     rgb_buffer[rgb_ptr+3]=255;
115     
116     rgb_ptr+=4;
117     /*r = 1.164*(Y2-16) + 1.596*(V-128);
118     g = 1.164*(Y2-16) - 0.813*(V-128) - 0.391*(U-128);
119     b = 1.164*(Y2-16) + 2.018*(U-128);*/
120
121
122     r = (( 298*(Y2-16)               + 409*(V-128) + 128) >> 8);
123
124     g = (( 298*(Y2-16) - 100*(U-128) - 208*(V-128) + 128) >> 8);
125
126     b = (( 298*(Y2-16) + 516*(U-128)               + 128) >> 8);
127
128     if (r>255) r=255;
129     if (g>255) g=255;
130     if (b>255) b=255;
131
132     if (r<0) r=0;
133     if (g<0) g=0;
134     if (b<0) b=0;
135
136     rgb_buffer[rgb_ptr]=b;
137     rgb_buffer[rgb_ptr+1]=g;
138     rgb_buffer[rgb_ptr+2]=r;
139     rgb_buffer[rgb_ptr+3]=255;
140     rgb_ptr+=4;
141   }
142 }
143
144
145 class CDeckLinkGLWidget : public QGLWidget, public IDeckLinkScreenPreviewCallback
146 {
147 private:
148         QAtomicInt refCount;
149         QMutex mutex;
150         IDeckLinkInput* deckLinkIn;
151         IDeckLinkGLScreenPreviewHelper* deckLinkScreenPreviewHelper;
152         IDeckLinkVideoFrame* m_frame;
153         QColor m_backgroundColor;
154         GLuint m_texture;
155         QImage m_img;
156         double m_zx;
157         double m_zy;
158         int m_pictureWidth;
159         int m_pictureHeight;
160         bool m_transparentOverlay;
161
162 public:
163         CDeckLinkGLWidget(IDeckLinkInput* deckLinkInput, QWidget* parent);
164         // IDeckLinkScreenPreviewCallback
165         virtual HRESULT QueryInterface(REFIID iid, LPVOID *ppv);
166         virtual ULONG AddRef();
167         virtual ULONG Release();
168         virtual HRESULT DrawFrame(IDeckLinkVideoFrame* theFrame);
169         void showOverlay(QImage img, bool transparent);
170         void hideOverlay();
171
172 protected:
173         void initializeGL();
174         void paintGL();
175         void resizeGL(int width, int height);
176         /*void initializeOverlayGL();
177         void paintOverlayGL();
178         void resizeOverlayGL(int width, int height);*/
179 };
180
181 CDeckLinkGLWidget::CDeckLinkGLWidget(IDeckLinkInput* deckLinkInput, QWidget* parent) : QGLWidget(/*QGLFormat(QGL::HasOverlay | QGL::AlphaChannel),*/ parent)
182     , m_backgroundColor(KdenliveSettings::window_background())
183     , m_zx(1.0)
184     , m_zy(1.0)
185     , m_transparentOverlay(true)
186 {
187         refCount = 1;
188         deckLinkIn = deckLinkInput;
189         deckLinkScreenPreviewHelper = CreateOpenGLScreenPreviewHelper();
190 }
191
192 void CDeckLinkGLWidget::showOverlay(QImage img, bool transparent)
193 {
194     m_transparentOverlay = transparent;
195     m_img = convertToGLFormat(img);
196     m_zx = (double)m_pictureWidth / m_img.width();
197     m_zy = (double)m_pictureHeight / m_img.height();
198     if (m_transparentOverlay) {
199         glEnable(GL_BLEND);
200         glBlendFunc(GL_SRC_ALPHA,GL_ONE_MINUS_SRC_COLOR);
201     }
202     else {
203       glDisable(GL_BLEND);
204     }
205 }
206
207 void CDeckLinkGLWidget::hideOverlay()
208 {
209     m_img = QImage();
210     glDisable(GL_BLEND);
211 }
212
213 void    CDeckLinkGLWidget::initializeGL ()
214 {
215         if (deckLinkScreenPreviewHelper != NULL)
216         {
217                 mutex.lock();
218                         deckLinkScreenPreviewHelper->InitializeGL();
219                         glShadeModel(GL_FLAT);
220                         glDisable(GL_DEPTH_TEST);
221                         glDisable(GL_CULL_FACE);
222                         glDisable(GL_LIGHTING);
223                         glDisable(GL_DITHER);
224                         glDisable(GL_BLEND);
225
226                          //Documents/images/alpha2.png");//
227                         //m_texture = bindTexture(convertToGLFormat(img), GL_TEXTURE_RECTANGLE_EXT, GL_RGBA8, QGLContext::LinearFilteringBindOption);
228                 mutex.unlock();
229         }
230 }
231
232 /*void CDeckLinkGLWidget::initializeOverlayGL ()
233 {
234   glDisable(GL_BLEND);
235   glEnable(GL_TEXTURE_RECTANGLE_EXT);
236   
237 }
238
239 void    CDeckLinkGLWidget::paintOverlayGL()
240 {
241         makeOverlayCurrent();
242         glEnable(GL_BLEND);
243         //glClearDepth(0.5f);
244         //glPixelTransferf(GL_ALPHA_SCALE, 10);
245         //glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
246         
247   
248 }*/
249
250 void    CDeckLinkGLWidget::paintGL ()
251 {
252         mutex.lock();
253                 glLoadIdentity();
254                 qglClearColor(m_backgroundColor);
255                 //glClearColor(1.0f, 0.0f, 0.0f, 0.0f);
256                 glClear(GL_COLOR_BUFFER_BIT);
257                 deckLinkScreenPreviewHelper->PaintGL();
258                 if (!m_img.isNull()) {
259                     glPixelZoom(m_zx, m_zy);
260                     glDrawPixels(m_img.width(), m_img.height(), GL_RGBA, GL_UNSIGNED_BYTE, m_img.bits());
261                 }       
262         mutex.unlock();
263 }
264 /*
265 void CDeckLinkGLWidget::paintEvent(QPaintEvent *event)
266 {
267     mutex.lock();
268     QPainter p(this);
269     QRect r = event->rect();
270     p.setClipRect(r);
271     void *frameBytes;
272     m_frame->GetBytes(&frameBytes);
273     QImage img((uchar*)frameBytes, m_frame->GetWidth(), m_frame->GetHeight(), QImage::Format_ARGB32);//m_frame->GetPixelFormat());
274     QRectF re(0, 0, width(), height());
275     p.drawImage(re, img);
276     p.end();
277     mutex.unlock();
278 }*/
279
280 void    CDeckLinkGLWidget::resizeGL (int width, int height)
281 {
282         mutex.lock();
283         m_pictureHeight = height;
284         m_pictureWidth = width;
285         int calculatedWidth = g_aspect_ratio * height;
286         if (calculatedWidth > width) m_pictureHeight = width / g_aspect_ratio;
287         else {
288             int calculatedHeight = width / g_aspect_ratio;
289             if (calculatedHeight > height) m_pictureWidth = height * g_aspect_ratio;
290         }
291         glViewport((width - m_pictureWidth) / 2, (height - m_pictureHeight) / 2, m_pictureWidth, m_pictureHeight);
292         glMatrixMode(GL_PROJECTION);
293         glLoadIdentity();
294         glOrtho(-1.0, 1.0, -1.0, 1.0, -1.0, 1.0);
295         glMatrixMode(GL_MODELVIEW);
296         glRasterPos2i(-1, -1);
297         if (!m_img.isNull()) {
298             m_zx = (double)m_pictureWidth / m_img.width();
299             m_zy = (double)m_pictureHeight / m_img.height();
300         }
301
302         mutex.unlock();
303 }
304
305 /*void CDeckLinkGLWidget::resizeOverlayGL ( int width, int height )
306 {
307   int newwidth = width;
308         int newheight = height;
309         int calculatedWidth = g_aspect_ratio * height;
310         if (calculatedWidth > width) newheight = width / g_aspect_ratio;
311         else {
312             int calculatedHeight = width / g_aspect_ratio;
313             if (calculatedHeight > height) newwidth = height * g_aspect_ratio;
314         }
315         glViewport((width - newwidth) / 2, (height - newheight) / 2, newwidth, newheight);
316         glMatrixMode(GL_PROJECTION);
317         glLoadIdentity();
318         glOrtho(0, width, 0, height, -1.0, 1.0);
319         glMatrixMode(GL_MODELVIEW);
320         updateOverlayGL ();
321 }*/
322
323 HRESULT         CDeckLinkGLWidget::QueryInterface (REFIID iid, LPVOID *ppv)
324 {
325         *ppv = NULL;
326         return E_NOINTERFACE;
327 }
328
329 ULONG           CDeckLinkGLWidget::AddRef ()
330 {
331         int             oldValue;
332
333         oldValue = refCount.fetchAndAddAcquire(1);
334         return (ULONG)(oldValue + 1);
335 }
336
337 ULONG           CDeckLinkGLWidget::Release ()
338 {
339         int             oldValue;
340
341         oldValue = refCount.fetchAndAddAcquire(-1);
342         if (oldValue == 1)
343         {
344                 delete this;
345         }
346
347         return (ULONG)(oldValue - 1);
348 }
349
350 HRESULT         CDeckLinkGLWidget::DrawFrame (IDeckLinkVideoFrame* theFrame)
351 {
352         if (deckLinkScreenPreviewHelper != NULL && theFrame != NULL)
353         {
354                 /*mutex.lock();
355                 m_frame = theFrame;
356                 mutex.unlock();*/
357                 deckLinkScreenPreviewHelper->SetFrame(theFrame);
358                 update();
359         }
360         return S_OK;
361 }
362
363
364 DeckLinkCaptureDelegate::DeckLinkCaptureDelegate() : m_refCount(0)
365 {
366         pthread_mutex_init(&m_mutex, NULL);
367 }
368
369 DeckLinkCaptureDelegate::~DeckLinkCaptureDelegate()
370 {
371         pthread_mutex_destroy(&m_mutex);
372 }
373
374 ULONG DeckLinkCaptureDelegate::AddRef(void)
375 {
376         pthread_mutex_lock(&m_mutex);
377                 m_refCount++;
378         pthread_mutex_unlock(&m_mutex);
379
380         return (ULONG)m_refCount;
381 }
382
383 ULONG DeckLinkCaptureDelegate::Release(void)
384 {
385         pthread_mutex_lock(&m_mutex);
386                 m_refCount--;
387         pthread_mutex_unlock(&m_mutex);
388
389         if (m_refCount == 0)
390         {
391                 delete this;
392                 return 0;
393         }
394
395         return (ULONG)m_refCount;
396 }
397
398 HRESULT DeckLinkCaptureDelegate::VideoInputFrameArrived(IDeckLinkVideoInputFrame* videoFrame, IDeckLinkAudioInputPacket* audioFrame)
399 {
400         IDeckLinkVideoFrame*                    rightEyeFrame = NULL;
401         IDeckLinkVideoFrame3DExtensions*        threeDExtensions = NULL;
402         void*                                   frameBytes;
403         void*                                   audioFrameBytes;
404
405         // Handle Video Frame
406         if(videoFrame)
407         {
408                 // If 3D mode is enabled we retreive the 3D extensions interface which gives.
409                 // us access to the right eye frame by calling GetFrameForRightEye() .
410                 if ( (videoFrame->QueryInterface(IID_IDeckLinkVideoFrame3DExtensions, (void **) &threeDExtensions) != S_OK) ||
411                         (threeDExtensions->GetFrameForRightEye(&rightEyeFrame) != S_OK))
412                 {
413                         rightEyeFrame = NULL;
414                 }
415
416                 if (videoFrame->GetFlags() & bmdFrameHasNoInputSource)
417                 {
418                         fprintf(stderr, "Frame received (#%lu) - No input signal detected\n", frameCount);
419                 }
420                 else
421                 {
422                         const char *timecodeString = NULL;
423                         if (g_timecodeFormat != 0)
424                         {
425                                 IDeckLinkTimecode *timecode;
426                                 if (videoFrame->GetTimecode(g_timecodeFormat, &timecode) == S_OK)
427                                 {
428                                         timecode->GetString(&timecodeString);
429                                 }
430                         }
431
432                         /*fprintf(stderr, "Frame received (#%lu) [%s] - %s - Size: %li bytes\n",
433                                 frameCount,
434                                 timecodeString != NULL ? timecodeString : "No timecode",
435                                 rightEyeFrame != NULL ? "Valid Frame (3D left/right)" : "Valid Frame",
436                                 videoFrame->GetRowBytes() * videoFrame->GetHeight());*/
437
438                         if (timecodeString)
439                                 free((void*)timecodeString);
440
441                         if (!doCaptureFrame.isEmpty()) {
442                             videoFrame->GetBytes(&frameBytes);
443                             if (doCaptureFrame.endsWith("raw")) {
444                                 // Save as raw uyvy422 imgage
445                                 videoOutputFile = open(doCaptureFrame.toUtf8().constData(), O_WRONLY|O_CREAT/*|O_TRUNC*/, 0664);
446                                 write(videoOutputFile, frameBytes, videoFrame->GetRowBytes() * videoFrame->GetHeight());
447                                 close(videoOutputFile);
448                             }
449                             else {
450                                 QImage image(videoFrame->GetWidth(), videoFrame->GetHeight(), QImage::Format_ARGB32_Premultiplied);
451                                 //convert from uyvy422 to rgba
452                                 yuv2rgb_int((uchar *)frameBytes, (uchar *)image.bits(), videoFrame->GetWidth(), videoFrame->GetHeight());
453                                 image.save(doCaptureFrame);
454                             }
455                             doCaptureFrame.clear();
456                         }
457
458                         if (videoOutputFile != -1)
459                         {
460                                 videoFrame->GetBytes(&frameBytes);
461                                 write(videoOutputFile, frameBytes, videoFrame->GetRowBytes() * videoFrame->GetHeight());
462
463                                 if (rightEyeFrame)
464                                 {
465                                         rightEyeFrame->GetBytes(&frameBytes);
466                                         write(videoOutputFile, frameBytes, videoFrame->GetRowBytes() * videoFrame->GetHeight());
467                                 }
468                         }
469                 }
470                 frameCount++;
471
472                 if (g_maxFrames > 0 && frameCount >= g_maxFrames)
473                 {
474                         pthread_cond_signal(&sleepCond);
475                 }
476         }
477
478         // Handle Audio Frame
479         if (audioFrame)
480         {
481                 if (audioOutputFile != -1)
482                 {
483                         audioFrame->GetBytes(&audioFrameBytes);
484                         write(audioOutputFile, audioFrameBytes, audioFrame->GetSampleFrameCount() * g_audioChannels * (g_audioSampleDepth / 8));
485                 }
486         }
487     return S_OK;
488 }
489
490 HRESULT DeckLinkCaptureDelegate::VideoInputFormatChanged(BMDVideoInputFormatChangedEvents events, IDeckLinkDisplayMode *mode, BMDDetectedVideoInputFormatFlags)
491 {
492     return S_OK;
493 }
494
495 /*int usage(int status)
496 {
497         HRESULT result;
498         IDeckLinkDisplayMode *displayMode;
499         int displayModeCount = 0;
500
501         fprintf(stderr,
502                 "Usage: Capture -m <mode id> [OPTIONS]\n"
503                 "\n"
504                 "    -m <mode id>:\n"
505         );
506
507     while (displayModeIterator->Next(&displayMode) == S_OK)
508     {
509         char *          displayModeString = NULL;
510
511         result = displayMode->GetName((const char **) &displayModeString);
512         if (result == S_OK)
513         {
514                         BMDTimeValue frameRateDuration, frameRateScale;
515             displayMode->GetFrameRate(&frameRateDuration, &frameRateScale);
516
517                         fprintf(stderr, "        %2d:  %-20s \t %li x %li \t %g FPS\n",
518                                 displayModeCount, displayModeString, displayMode->GetWidth(), displayMode->GetHeight(), (double)frameRateScale / (double)frameRateDuration);
519
520             free(displayModeString);
521                         displayModeCount++;
522         }
523
524         // Release the IDeckLinkDisplayMode object to prevent a leak
525         displayMode->Release();
526     }
527
528         fprintf(stderr,
529                 "    -p <pixelformat>\n"
530                 "         0:  8 bit YUV (4:2:2) (default)\n"
531                 "         1:  10 bit YUV (4:2:2)\n"
532                 "         2:  10 bit RGB (4:4:4)\n"
533                 "    -t <format>          Print timecode\n"
534                 "     rp188:  RP 188\n"
535                 "      vitc:  VITC\n"
536                 "    serial:  Serial Timecode\n"
537                 "    -f <filename>        Filename raw video will be written to\n"
538                 "    -a <filename>        Filename raw audio will be written to\n"
539                 "    -c <channels>        Audio Channels (2, 8 or 16 - default is 2)\n"
540                 "    -s <depth>           Audio Sample Depth (16 or 32 - default is 16)\n"
541                 "    -n <frames>          Number of frames to capture (default is unlimited)\n"
542                 "    -3                   Capture Stereoscopic 3D (Requires 3D Hardware support)\n"
543                 "\n"
544                 "Capture video and/or audio to a file. Raw video and/or audio can be viewed with mplayer eg:\n"
545                 "\n"
546                 "    Capture -m2 -n 50 -f video.raw -a audio.raw\n"
547                 "    mplayer video.raw -demuxer rawvideo -rawvideo pal:uyvy -audiofile audio.raw -audio-demuxer 20 -rawaudio rate=48000\n"
548         );
549
550         exit(status);
551 }
552 */
553
554
555
556
557 CaptureHandler::CaptureHandler(QLayout *lay, QWidget *parent):
558     m_layout(lay)
559     , m_parent(parent)
560     , previewView(NULL)
561     , deckLinkInput(NULL)
562     , displayModeIterator(NULL)
563     , deckLink(NULL)
564     , displayMode(NULL)
565     , delegate(NULL)
566     , deckLinkIterator(NULL)
567 {
568 }
569
570 void CaptureHandler::startPreview(int deviceId, int captureMode)
571 {
572         deckLinkIterator = CreateDeckLinkIteratorInstance();
573         BMDVideoInputFlags                      inputFlags = 0;
574         BMDDisplayMode                          selectedDisplayMode = bmdModeNTSC;
575         BMDPixelFormat                          pixelFormat = bmdFormat8BitYUV;
576         int                                                     displayModeCount = 0;
577         int                                                     exitStatus = 1;
578         int                                                     ch;
579         bool                                            foundDisplayMode = false;
580         HRESULT                                         result;
581
582         /*pthread_mutex_init(&sleepMutex, NULL);
583         pthread_cond_init(&sleepCond, NULL);*/
584         kDebug()<<"/// INIT CAPTURE ON DEV: "<<deviceId;
585
586         if (!deckLinkIterator)
587         {
588                 fprintf(stderr, "This application requires the DeckLink drivers installed.\n");
589                 stopCapture();
590                 return;
591         }
592
593         /* Connect to selected DeckLink instance */
594         for (int i = 0; i < deviceId + 1; i++)
595             result = deckLinkIterator->Next(&deckLink);
596         if (result != S_OK)
597         {
598                 fprintf(stderr, "No DeckLink PCI cards found.\n");
599                 stopCapture();
600                 return;
601         }
602
603         if (deckLink->QueryInterface(IID_IDeckLinkInput, (void**)&deckLinkInput) != S_OK)
604         {
605             stopCapture();
606             return;
607         }
608
609         delegate = new DeckLinkCaptureDelegate();
610         deckLinkInput->SetCallback(delegate);
611
612         previewView = new CDeckLinkGLWidget(deckLinkInput, m_parent);
613         m_layout->addWidget(previewView);
614         //previewView->resize(parent->size());
615         previewView->setSizePolicy(QSizePolicy::Expanding, QSizePolicy::Expanding);
616         previewView->DrawFrame(NULL);
617
618         // Obtain an IDeckLinkDisplayModeIterator to enumerate the display modes supported on output
619         result = deckLinkInput->GetDisplayModeIterator(&displayModeIterator);
620         if (result != S_OK)
621         {
622                 fprintf(stderr, "Could not obtain the video output display mode iterator - result = %08x\n", result);
623                 stopCapture();
624                 return;
625         }
626
627         g_videoModeIndex = captureMode;
628         /*g_audioChannels = 2;
629         g_audioSampleDepth = 16;*/
630         
631         // Parse command line options
632         /*while ((ch = getopt(argc, argv, "?h3c:s:f:a:m:n:p:t:")) != -1)
633         {
634                 switch (ch)
635                 {
636                         case 'm':
637                                 g_videoModeIndex = atoi(optarg);
638                                 break;
639                         case 'c':
640                                 g_audioChannels = atoi(optarg);
641                                 if (g_audioChannels != 2 &&
642                                     g_audioChannels != 8 &&
643                                         g_audioChannels != 16)
644                                 {
645                                         fprintf(stderr, "Invalid argument: Audio Channels must be either 2, 8 or 16\n");
646      stopCapture();
647                                 }
648                                 break;
649                         case 's':
650                                 g_audioSampleDepth = atoi(optarg);
651                                 if (g_audioSampleDepth != 16 && g_audioSampleDepth != 32)
652                                 {
653                                         fprintf(stderr, "Invalid argument: Audio Sample Depth must be either 16 bits or 32 bits\n");
654      stopCapture();
655                                 }
656                                 break;
657                         case 'f':
658                                 g_videoOutputFile = optarg;
659                                 break;
660                         case 'a':
661                                 g_audioOutputFile = optarg;
662                                 break;
663                         case 'n':
664                                 g_maxFrames = atoi(optarg);
665                                 break;
666                         case '3':
667                                 inputFlags |= bmdVideoInputDualStream3D;
668                                 break;
669                         case 'p':
670                                 switch(atoi(optarg))
671                                 {
672                                         case 0: pixelFormat = bmdFormat8BitYUV; break;
673                                         case 1: pixelFormat = bmdFormat10BitYUV; break;
674                                         case 2: pixelFormat = bmdFormat10BitRGB; break;
675                                         default:
676                                                 fprintf(stderr, "Invalid argument: Pixel format %d is not valid", atoi(optarg));
677       stopCapture();
678                                 }
679                                 break;
680                         case 't':
681                                 if (!strcmp(optarg, "rp188"))
682                                         g_timecodeFormat = bmdTimecodeRP188;
683                         else if (!strcmp(optarg, "vitc"))
684                                         g_timecodeFormat = bmdTimecodeVITC;
685                         else if (!strcmp(optarg, "serial"))
686                                         g_timecodeFormat = bmdTimecodeSerial;
687                                 else
688                                 {
689                                         fprintf(stderr, "Invalid argument: Timecode format \"%s\" is invalid\n", optarg);
690      stopCapture();
691                                 }
692                                 break;
693                         case '?':
694                         case 'h':
695                                 usage(0);
696                 }
697         }*/
698
699         if (g_videoModeIndex < 0)
700         {
701                 fprintf(stderr, "No video mode specified\n");
702                 stopCapture();
703                 return;
704         }
705         //g_videoOutputFile="/home/one/bm.raw";
706         if (g_videoOutputFile != NULL)
707         {
708                 videoOutputFile = open(g_videoOutputFile, O_WRONLY|O_CREAT|O_TRUNC, 0664);
709                 if (videoOutputFile < 0)
710                 {
711                         fprintf(stderr, "Could not open video output file \"%s\"\n", g_videoOutputFile);
712    stopCapture();
713                 }
714         }
715         if (g_audioOutputFile != NULL)
716         {
717                 audioOutputFile = open(g_audioOutputFile, O_WRONLY|O_CREAT|O_TRUNC, 0664);
718                 if (audioOutputFile < 0)
719                 {
720                         fprintf(stderr, "Could not open audio output file \"%s\"\n", g_audioOutputFile);
721    stopCapture();
722                 }
723         }
724
725         while (displayModeIterator->Next(&displayMode) == S_OK)
726         {
727                 if (g_videoModeIndex == displayModeCount)
728                 {
729                         BMDDisplayModeSupport result;
730                         const char *displayModeName;
731
732                         foundDisplayMode = true;
733                         displayMode->GetName(&displayModeName);
734                         selectedDisplayMode = displayMode->GetDisplayMode();
735
736                         g_aspect_ratio = (double) displayMode->GetWidth() / (double) displayMode->GetHeight();
737
738                         deckLinkInput->DoesSupportVideoMode(selectedDisplayMode, pixelFormat, bmdVideoInputFlagDefault, &result, NULL);
739
740                         if (result == bmdDisplayModeNotSupported)
741                         {
742                                 fprintf(stderr, "The display mode %s is not supported with the selected pixel format\n", displayModeName);
743                                 stopCapture();
744                                 return;
745                         }
746
747                         if (inputFlags & bmdVideoInputDualStream3D)
748                         {
749                                 if (!(displayMode->GetFlags() & bmdDisplayModeSupports3D))
750                                 {
751                                         fprintf(stderr, "The display mode %s is not supported with 3D\n", displayModeName);
752                                         stopCapture();
753                                         return;
754                                 }
755                         }
756
757                         break;
758                 }
759                 displayModeCount++;
760                 displayMode->Release();
761         }
762
763         if (!foundDisplayMode)
764         {
765                 fprintf(stderr, "Invalid mode %d specified\n", g_videoModeIndex);
766                 stopCapture();
767                 return;
768         }
769
770     result = deckLinkInput->EnableVideoInput(selectedDisplayMode, pixelFormat, inputFlags);
771     if(result != S_OK)
772     {
773                 fprintf(stderr, "Failed to enable video input. Is another application using the card?\n");
774                 stopCapture();
775                 return;
776     }
777
778     result = deckLinkInput->EnableAudioInput(bmdAudioSampleRate48kHz, g_audioSampleDepth, g_audioChannels);
779     if(result != S_OK)
780     {
781         stopCapture();
782         return;
783     }
784     deckLinkInput->SetScreenPreviewCallback(previewView);
785     result = deckLinkInput->StartStreams();
786     if(result != S_OK)
787     {
788         qDebug()<<"/// CAPTURE FAILED....";
789     }
790
791         // All Okay.
792         exitStatus = 0;
793
794         // Block main thread until signal occurs
795 /*      pthread_mutex_lock(&sleepMutex);
796         pthread_cond_wait(&sleepCond, &sleepMutex);
797         pthread_mutex_unlock(&sleepMutex);*/
798
799 /*bail:
800
801         if (videoOutputFile)
802                 close(videoOutputFile);
803         if (audioOutputFile)
804                 close(audioOutputFile);
805
806         if (displayModeIterator != NULL)
807         {
808                 displayModeIterator->Release();
809                 displayModeIterator = NULL;
810         }
811
812     if (deckLinkInput != NULL)
813     {
814         deckLinkInput->Release();
815         deckLinkInput = NULL;
816     }
817
818     if (deckLink != NULL)
819     {
820         deckLink->Release();
821         deckLink = NULL;
822     }
823
824         if (deckLinkIterator != NULL)
825                 deckLinkIterator->Release();
826 */
827 }
828
829 CaptureHandler::~CaptureHandler()
830 {
831     stopCapture();
832 }
833
834 void CaptureHandler::startCapture()
835 {
836 }
837
838 void CaptureHandler::stopCapture()
839 {
840 }
841
842 void CaptureHandler::captureFrame(const QString &fname)
843 {
844     doCaptureFrame = fname;
845 }
846
847 void CaptureHandler::showOverlay(QImage img, bool transparent)
848 {
849     previewView->showOverlay(img, transparent);
850 }
851
852 void CaptureHandler::hideOverlay()
853 {
854     previewView->hideOverlay();
855 }
856
857 void CaptureHandler::stopPreview()
858 {
859       if (deckLinkInput != NULL) deckLinkInput->StopStreams();
860       if (videoOutputFile)
861                 close(videoOutputFile);
862         if (audioOutputFile)
863                 close(audioOutputFile);
864         
865         if (displayModeIterator != NULL)
866         {
867                 displayModeIterator->Release();
868                 displayModeIterator = NULL;
869         }
870
871     if (deckLinkInput != NULL)
872     {
873         deckLinkInput->Release();
874         deckLinkInput = NULL;
875     }
876
877     if (deckLink != NULL)
878     {
879         deckLink->Release();
880         deckLink = NULL;
881     }
882
883         if (deckLinkIterator != NULL)
884                 deckLinkIterator->Release();  
885
886     if (previewView != NULL)
887         delete previewView;
888
889     /*if (delegate != NULL)
890         delete delegate;*/
891         
892 }