]> git.sesse.net Git - kdenlive/blob - src/blackmagic/capture.cpp
Stop motion widget: create sequence thumbnails in secondary thread so that ui keeps...
[kdenlive] / src / blackmagic / capture.cpp
1 /* -LICENSE-START-
2 ** Copyright (c) 2009 Blackmagic Design
3 **
4 ** Permission is hereby granted, free of charge, to any person or organization
5 ** obtaining a copy of the software and accompanying documentation covered by
6 ** this license (the "Software") to use, reproduce, display, distribute,
7 ** execute, and transmit the Software, and to prepare derivative works of the
8 ** Software, and to permit third-parties to whom the Software is furnished to
9 ** do so, all subject to the following:
10 **
11 ** The copyright notices in the Software and this entire statement, including
12 ** the above license grant, this restriction and the following disclaimer,
13 ** must be included in all copies of the Software, in whole or in part, and
14 ** all derivative works of the Software, unless such copies or derivative
15 ** works are solely in the form of machine-executable object code generated by
16 ** a source language processor.
17 **
18 ** THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19 ** IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20 ** FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT
21 ** SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE
22 ** FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE,
23 ** ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
24 ** DEALINGS IN THE SOFTWARE.
25 ** -LICENSE-END-
26 */
27
28 #include <stdio.h>
29 #include <stdlib.h>
30 #include <string.h>
31 #include <pthread.h>
32 #include <unistd.h>
33 #include <fcntl.h>
34
35 #include <QGLWidget>
36 #include <QDebug>
37 #include <QImage>
38 #include <QMutex>
39 #include <QPaintEvent>
40
41 #include <QtOpenGL>
42
43 #ifndef GL_TEXTURE_RECTANGLE_EXT
44 #define GL_TEXTURE_RECTANGLE_EXT GL_TEXTURE_RECTANGLE_NV
45 #endif
46
47 #include <KDebug>
48
49 #include "capture.h"
50 #include "kdenlivesettings.h"
51
52 pthread_mutex_t                                 sleepMutex;
53 pthread_cond_t                                  sleepCond;
54 int                                                             videoOutputFile = -1;
55 int                                                             audioOutputFile = -1;
56
57 static BMDTimecodeFormat                g_timecodeFormat = 0;
58 static int                                              g_videoModeIndex = -1;
59 static int                                              g_audioChannels = 2;
60 static int                                              g_audioSampleDepth = 16;
61 const char *                                    g_videoOutputFile = NULL;
62 const char *                                    g_audioOutputFile = NULL;
63 static int                                              g_maxFrames = -1;
64 static QString                                  doCaptureFrame;
65 static double                           g_aspect_ratio = 16.0 / 9.0;
66
67 static unsigned long                    frameCount = 0;
68
69 void yuv2rgb_int(unsigned char *yuv_buffer, unsigned char *rgb_buffer, int width, int height)
70 {
71 int len;
72 int r,g,b;
73 int Y,U,V,Y2;
74 int rgb_ptr,y_ptr,t;
75
76   len=width*height / 2;
77
78   rgb_ptr=0;
79   y_ptr=0;
80
81   for (t=0; t<len; t++)  /* process 2 pixels at a time */
82   {
83     /* Compute parts of the UV components */
84
85     U = yuv_buffer[y_ptr];
86     Y = yuv_buffer[y_ptr+1];
87     V = yuv_buffer[y_ptr+2];
88     Y2 = yuv_buffer[y_ptr+3];
89     y_ptr +=4;
90
91
92     /*r = 1.164*(Y-16) + 1.596*(V-128);
93     g = 1.164*(Y-16) - 0.813*(V-128) - 0.391*(U-128);
94     b = 1.164*(Y-16) + 2.018*(U-128);*/
95     
96
97     r = (( 298*(Y-16)               + 409*(V-128) + 128) >> 8);
98
99     g = (( 298*(Y-16) - 100*(U-128) - 208*(V-128) + 128) >> 8);
100
101     b = (( 298*(Y-16) + 516*(U-128)               + 128) >> 8);
102
103     if (r>255) r=255;
104     if (g>255) g=255;
105     if (b>255) b=255;
106
107     if (r<0) r=0;
108     if (g<0) g=0;
109     if (b<0) b=0;
110
111     rgb_buffer[rgb_ptr]=b;
112     rgb_buffer[rgb_ptr+1]=g;
113     rgb_buffer[rgb_ptr+2]=r;
114     rgb_buffer[rgb_ptr+3]=255;
115     
116     rgb_ptr+=4;
117     /*r = 1.164*(Y2-16) + 1.596*(V-128);
118     g = 1.164*(Y2-16) - 0.813*(V-128) - 0.391*(U-128);
119     b = 1.164*(Y2-16) + 2.018*(U-128);*/
120
121
122     r = (( 298*(Y2-16)               + 409*(V-128) + 128) >> 8);
123
124     g = (( 298*(Y2-16) - 100*(U-128) - 208*(V-128) + 128) >> 8);
125
126     b = (( 298*(Y2-16) + 516*(U-128)               + 128) >> 8);
127
128     if (r>255) r=255;
129     if (g>255) g=255;
130     if (b>255) b=255;
131
132     if (r<0) r=0;
133     if (g<0) g=0;
134     if (b<0) b=0;
135
136     rgb_buffer[rgb_ptr]=b;
137     rgb_buffer[rgb_ptr+1]=g;
138     rgb_buffer[rgb_ptr+2]=r;
139     rgb_buffer[rgb_ptr+3]=255;
140     rgb_ptr+=4;
141   }
142 }
143
144
145 class CDeckLinkGLWidget : public QGLWidget, public IDeckLinkScreenPreviewCallback
146 {
147 private:
148         QAtomicInt refCount;
149         QMutex mutex;
150         IDeckLinkInput* deckLinkIn;
151         IDeckLinkGLScreenPreviewHelper* deckLinkScreenPreviewHelper;
152         IDeckLinkVideoFrame* m_frame;
153         QColor m_backgroundColor;
154         GLuint m_texture;
155         QImage m_img;
156         double m_zx;
157         double m_zy;
158         int m_pictureWidth;
159         int m_pictureHeight;
160         bool m_transparentOverlay;
161
162 public:
163         CDeckLinkGLWidget(IDeckLinkInput* deckLinkInput, QWidget* parent);
164         // IDeckLinkScreenPreviewCallback
165         virtual HRESULT QueryInterface(REFIID iid, LPVOID *ppv);
166         virtual ULONG AddRef();
167         virtual ULONG Release();
168         virtual HRESULT DrawFrame(IDeckLinkVideoFrame* theFrame);
169         void showOverlay(QImage img, bool transparent);
170         void hideOverlay();
171
172 protected:
173         void initializeGL();
174         void paintGL();
175         void resizeGL(int width, int height);
176         /*void initializeOverlayGL();
177         void paintOverlayGL();
178         void resizeOverlayGL(int width, int height);*/
179 };
180
181 CDeckLinkGLWidget::CDeckLinkGLWidget(IDeckLinkInput* deckLinkInput, QWidget* parent) : QGLWidget(/*QGLFormat(QGL::HasOverlay | QGL::AlphaChannel),*/ parent)
182     , m_backgroundColor(KdenliveSettings::window_background())
183     , m_zx(1.0)
184     , m_zy(1.0)
185     , m_transparentOverlay(true)
186 {
187         refCount = 1;
188         deckLinkIn = deckLinkInput;
189         deckLinkScreenPreviewHelper = CreateOpenGLScreenPreviewHelper();
190 }
191
192 void CDeckLinkGLWidget::showOverlay(QImage img, bool transparent)
193 {
194     m_transparentOverlay = transparent;
195     m_img = convertToGLFormat(img);
196     m_zx = (double)m_pictureWidth / m_img.width();
197     m_zy = (double)m_pictureHeight / m_img.height();
198     if (m_transparentOverlay) {
199         glEnable(GL_BLEND);
200         glBlendFunc(GL_SRC_ALPHA,GL_ONE_MINUS_SRC_COLOR);
201     }
202     else {
203       glDisable(GL_BLEND);
204     }
205 }
206
207 void CDeckLinkGLWidget::hideOverlay()
208 {
209     m_img = QImage();
210     glDisable(GL_BLEND);
211 }
212
213 void    CDeckLinkGLWidget::initializeGL ()
214 {
215         if (deckLinkScreenPreviewHelper != NULL)
216         {
217                 mutex.lock();
218                         deckLinkScreenPreviewHelper->InitializeGL();
219                         glShadeModel(GL_FLAT);
220                         glDisable(GL_DEPTH_TEST);
221                         glDisable(GL_CULL_FACE);
222                         glDisable(GL_LIGHTING);
223                         glDisable(GL_DITHER);
224                         glDisable(GL_BLEND);
225
226                          //Documents/images/alpha2.png");//
227                         //m_texture = bindTexture(convertToGLFormat(img), GL_TEXTURE_RECTANGLE_EXT, GL_RGBA8, QGLContext::LinearFilteringBindOption);
228                 mutex.unlock();
229         }
230 }
231
232 /*void CDeckLinkGLWidget::initializeOverlayGL ()
233 {
234   glDisable(GL_BLEND);
235   glEnable(GL_TEXTURE_RECTANGLE_EXT);
236   
237 }
238
239 void    CDeckLinkGLWidget::paintOverlayGL()
240 {
241         makeOverlayCurrent();
242         glEnable(GL_BLEND);
243         //glClearDepth(0.5f);
244         //glPixelTransferf(GL_ALPHA_SCALE, 10);
245         //glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
246         
247   
248 }*/
249
250 void    CDeckLinkGLWidget::paintGL ()
251 {
252         mutex.lock();
253                 glLoadIdentity();
254                 qglClearColor(m_backgroundColor);
255                 //glClearColor(1.0f, 0.0f, 0.0f, 0.0f);
256                 glClear(GL_COLOR_BUFFER_BIT);
257                 deckLinkScreenPreviewHelper->PaintGL();
258                 if (!m_img.isNull()) {
259                     glPixelZoom(m_zx, m_zy);
260                     glDrawPixels(m_img.width(), m_img.height(), GL_RGBA, GL_UNSIGNED_BYTE, m_img.bits());
261                 }       
262         mutex.unlock();
263 }
264 /*
265 void CDeckLinkGLWidget::paintEvent(QPaintEvent *event)
266 {
267     mutex.lock();
268     QPainter p(this);
269     QRect r = event->rect();
270     p.setClipRect(r);
271     void *frameBytes;
272     m_frame->GetBytes(&frameBytes);
273     QImage img((uchar*)frameBytes, m_frame->GetWidth(), m_frame->GetHeight(), QImage::Format_ARGB32);//m_frame->GetPixelFormat());
274     QRectF re(0, 0, width(), height());
275     p.drawImage(re, img);
276     p.end();
277     mutex.unlock();
278 }*/
279
280 void    CDeckLinkGLWidget::resizeGL (int width, int height)
281 {
282         mutex.lock();
283         m_pictureHeight = height;
284         m_pictureWidth = width;
285         int calculatedWidth = g_aspect_ratio * height;
286         if (calculatedWidth > width) m_pictureHeight = width / g_aspect_ratio;
287         else {
288             int calculatedHeight = width / g_aspect_ratio;
289             if (calculatedHeight > height) m_pictureWidth = height * g_aspect_ratio;
290         }
291         glViewport((width - m_pictureWidth) / 2, (height - m_pictureHeight) / 2, m_pictureWidth, m_pictureHeight);
292         glMatrixMode(GL_PROJECTION);
293         glLoadIdentity();
294         glOrtho(-1.0, 1.0, -1.0, 1.0, -1.0, 1.0);
295         glMatrixMode(GL_MODELVIEW);
296         glRasterPos2i(-1, -1);
297         if (!m_img.isNull()) {
298             m_zx = (double)m_pictureWidth / m_img.width();
299             m_zy = (double)m_pictureHeight / m_img.height();
300         }
301
302         mutex.unlock();
303 }
304
305 /*void CDeckLinkGLWidget::resizeOverlayGL ( int width, int height )
306 {
307   int newwidth = width;
308         int newheight = height;
309         int calculatedWidth = g_aspect_ratio * height;
310         if (calculatedWidth > width) newheight = width / g_aspect_ratio;
311         else {
312             int calculatedHeight = width / g_aspect_ratio;
313             if (calculatedHeight > height) newwidth = height * g_aspect_ratio;
314         }
315         glViewport((width - newwidth) / 2, (height - newheight) / 2, newwidth, newheight);
316         glMatrixMode(GL_PROJECTION);
317         glLoadIdentity();
318         glOrtho(0, width, 0, height, -1.0, 1.0);
319         glMatrixMode(GL_MODELVIEW);
320         updateOverlayGL ();
321 }*/
322
323 HRESULT         CDeckLinkGLWidget::QueryInterface (REFIID iid, LPVOID *ppv)
324 {
325         Q_UNUSED(iid);
326         *ppv = NULL;
327         return E_NOINTERFACE;
328 }
329
330 ULONG           CDeckLinkGLWidget::AddRef ()
331 {
332         int             oldValue;
333
334         oldValue = refCount.fetchAndAddAcquire(1);
335         return (ULONG)(oldValue + 1);
336 }
337
338 ULONG           CDeckLinkGLWidget::Release ()
339 {
340         int             oldValue;
341
342         oldValue = refCount.fetchAndAddAcquire(-1);
343         if (oldValue == 1)
344         {
345                 delete this;
346         }
347
348         return (ULONG)(oldValue - 1);
349 }
350
351 HRESULT         CDeckLinkGLWidget::DrawFrame (IDeckLinkVideoFrame* theFrame)
352 {
353         if (deckLinkScreenPreviewHelper != NULL && theFrame != NULL)
354         {
355                 /*mutex.lock();
356                 m_frame = theFrame;
357                 mutex.unlock();*/
358                 deckLinkScreenPreviewHelper->SetFrame(theFrame);
359                 update();
360         }
361         return S_OK;
362 }
363
364
365 DeckLinkCaptureDelegate::DeckLinkCaptureDelegate() : m_refCount(0)
366 {
367         pthread_mutex_init(&m_mutex, NULL);
368 }
369
370 DeckLinkCaptureDelegate::~DeckLinkCaptureDelegate()
371 {
372         pthread_mutex_destroy(&m_mutex);
373 }
374
375 ULONG DeckLinkCaptureDelegate::AddRef(void)
376 {
377         pthread_mutex_lock(&m_mutex);
378                 m_refCount++;
379         pthread_mutex_unlock(&m_mutex);
380
381         return (ULONG)m_refCount;
382 }
383
384 ULONG DeckLinkCaptureDelegate::Release(void)
385 {
386         pthread_mutex_lock(&m_mutex);
387                 m_refCount--;
388         pthread_mutex_unlock(&m_mutex);
389
390         if (m_refCount == 0)
391         {
392                 delete this;
393                 return 0;
394         }
395
396         return (ULONG)m_refCount;
397 }
398
399 HRESULT DeckLinkCaptureDelegate::VideoInputFrameArrived(IDeckLinkVideoInputFrame* videoFrame, IDeckLinkAudioInputPacket* audioFrame)
400 {
401         IDeckLinkVideoFrame*                    rightEyeFrame = NULL;
402         IDeckLinkVideoFrame3DExtensions*        threeDExtensions = NULL;
403         void*                                   frameBytes;
404         void*                                   audioFrameBytes;
405
406         // Handle Video Frame
407         if(videoFrame)
408         {
409                 // If 3D mode is enabled we retreive the 3D extensions interface which gives.
410                 // us access to the right eye frame by calling GetFrameForRightEye() .
411                 if ( (videoFrame->QueryInterface(IID_IDeckLinkVideoFrame3DExtensions, (void **) &threeDExtensions) != S_OK) ||
412                         (threeDExtensions->GetFrameForRightEye(&rightEyeFrame) != S_OK))
413                 {
414                         rightEyeFrame = NULL;
415                 }
416
417                 if (videoFrame->GetFlags() & bmdFrameHasNoInputSource)
418                 {
419                         fprintf(stderr, "Frame received (#%lu) - No input signal detected\n", frameCount);
420                 }
421                 else
422                 {
423                         const char *timecodeString = NULL;
424                         if (g_timecodeFormat != 0)
425                         {
426                                 IDeckLinkTimecode *timecode;
427                                 if (videoFrame->GetTimecode(g_timecodeFormat, &timecode) == S_OK)
428                                 {
429                                         timecode->GetString(&timecodeString);
430                                 }
431                         }
432
433                         /*fprintf(stderr, "Frame received (#%lu) [%s] - %s - Size: %li bytes\n",
434                                 frameCount,
435                                 timecodeString != NULL ? timecodeString : "No timecode",
436                                 rightEyeFrame != NULL ? "Valid Frame (3D left/right)" : "Valid Frame",
437                                 videoFrame->GetRowBytes() * videoFrame->GetHeight());*/
438
439                         if (timecodeString)
440                                 free((void*)timecodeString);
441
442                         if (!doCaptureFrame.isEmpty()) {
443                             videoFrame->GetBytes(&frameBytes);
444                             if (doCaptureFrame.endsWith("raw")) {
445                                 // Save as raw uyvy422 imgage
446                                 videoOutputFile = open(doCaptureFrame.toUtf8().constData(), O_WRONLY|O_CREAT/*|O_TRUNC*/, 0664);
447                                 write(videoOutputFile, frameBytes, videoFrame->GetRowBytes() * videoFrame->GetHeight());
448                                 close(videoOutputFile);
449                             }
450                             else {
451                                 QImage image(videoFrame->GetWidth(), videoFrame->GetHeight(), QImage::Format_ARGB32_Premultiplied);
452                                 //convert from uyvy422 to rgba
453                                 yuv2rgb_int((uchar *)frameBytes, (uchar *)image.bits(), videoFrame->GetWidth(), videoFrame->GetHeight());
454                                 image.save(doCaptureFrame);
455                             }
456                             doCaptureFrame.clear();
457                         }
458
459                         if (videoOutputFile != -1)
460                         {
461                                 videoFrame->GetBytes(&frameBytes);
462                                 write(videoOutputFile, frameBytes, videoFrame->GetRowBytes() * videoFrame->GetHeight());
463
464                                 if (rightEyeFrame)
465                                 {
466                                         rightEyeFrame->GetBytes(&frameBytes);
467                                         write(videoOutputFile, frameBytes, videoFrame->GetRowBytes() * videoFrame->GetHeight());
468                                 }
469                         }
470                 }
471                 frameCount++;
472
473                 if (g_maxFrames > 0 && frameCount >= g_maxFrames)
474                 {
475                         pthread_cond_signal(&sleepCond);
476                 }
477         }
478
479         // Handle Audio Frame
480         if (audioFrame)
481         {
482                 if (audioOutputFile != -1)
483                 {
484                         audioFrame->GetBytes(&audioFrameBytes);
485                         write(audioOutputFile, audioFrameBytes, audioFrame->GetSampleFrameCount() * g_audioChannels * (g_audioSampleDepth / 8));
486                 }
487         }
488     return S_OK;
489 }
490
491 HRESULT DeckLinkCaptureDelegate::VideoInputFormatChanged(BMDVideoInputFormatChangedEvents events, IDeckLinkDisplayMode *mode, BMDDetectedVideoInputFormatFlags)
492 {
493     Q_UNUSED(events);
494     Q_UNUSED(mode);
495     return S_OK;
496 }
497
498 /*int usage(int status)
499 {
500         HRESULT result;
501         IDeckLinkDisplayMode *displayMode;
502         int displayModeCount = 0;
503
504         fprintf(stderr,
505                 "Usage: Capture -m <mode id> [OPTIONS]\n"
506                 "\n"
507                 "    -m <mode id>:\n"
508         );
509
510     while (displayModeIterator->Next(&displayMode) == S_OK)
511     {
512         char *          displayModeString = NULL;
513
514         result = displayMode->GetName((const char **) &displayModeString);
515         if (result == S_OK)
516         {
517                         BMDTimeValue frameRateDuration, frameRateScale;
518             displayMode->GetFrameRate(&frameRateDuration, &frameRateScale);
519
520                         fprintf(stderr, "        %2d:  %-20s \t %li x %li \t %g FPS\n",
521                                 displayModeCount, displayModeString, displayMode->GetWidth(), displayMode->GetHeight(), (double)frameRateScale / (double)frameRateDuration);
522
523             free(displayModeString);
524                         displayModeCount++;
525         }
526
527         // Release the IDeckLinkDisplayMode object to prevent a leak
528         displayMode->Release();
529     }
530
531         fprintf(stderr,
532                 "    -p <pixelformat>\n"
533                 "         0:  8 bit YUV (4:2:2) (default)\n"
534                 "         1:  10 bit YUV (4:2:2)\n"
535                 "         2:  10 bit RGB (4:4:4)\n"
536                 "    -t <format>          Print timecode\n"
537                 "     rp188:  RP 188\n"
538                 "      vitc:  VITC\n"
539                 "    serial:  Serial Timecode\n"
540                 "    -f <filename>        Filename raw video will be written to\n"
541                 "    -a <filename>        Filename raw audio will be written to\n"
542                 "    -c <channels>        Audio Channels (2, 8 or 16 - default is 2)\n"
543                 "    -s <depth>           Audio Sample Depth (16 or 32 - default is 16)\n"
544                 "    -n <frames>          Number of frames to capture (default is unlimited)\n"
545                 "    -3                   Capture Stereoscopic 3D (Requires 3D Hardware support)\n"
546                 "\n"
547                 "Capture video and/or audio to a file. Raw video and/or audio can be viewed with mplayer eg:\n"
548                 "\n"
549                 "    Capture -m2 -n 50 -f video.raw -a audio.raw\n"
550                 "    mplayer video.raw -demuxer rawvideo -rawvideo pal:uyvy -audiofile audio.raw -audio-demuxer 20 -rawaudio rate=48000\n"
551         );
552
553         exit(status);
554 }
555 */
556
557
558
559
560 CaptureHandler::CaptureHandler(QVBoxLayout *lay, QWidget *parent):
561     previewView(NULL),
562     deckLinkIterator(NULL),
563     delegate(NULL),
564     displayMode(NULL),
565     deckLink(NULL),
566     deckLinkInput(NULL),
567     displayModeIterator(NULL),
568     m_layout(lay),
569     m_parent(parent)
570 {
571 }
572
573 void CaptureHandler::startPreview(int deviceId, int captureMode)
574 {
575         deckLinkIterator = CreateDeckLinkIteratorInstance();
576         BMDVideoInputFlags                      inputFlags = 0;
577         BMDDisplayMode                          selectedDisplayMode = bmdModeNTSC;
578         BMDPixelFormat                          pixelFormat = bmdFormat8BitYUV;
579         int                                                     displayModeCount = 0;
580         int                                                     exitStatus = 1;
581         //int                                                   ch;
582         bool                                            foundDisplayMode = false;
583         HRESULT                                         result = 1;
584
585         /*pthread_mutex_init(&sleepMutex, NULL);
586         pthread_cond_init(&sleepCond, NULL);*/
587         kDebug()<<"/// INIT CAPTURE ON DEV: "<<deviceId;
588
589         if (!deckLinkIterator)
590         {
591                 fprintf(stderr, "This application requires the DeckLink drivers installed.\n");
592                 stopCapture();
593                 return;
594         }
595
596         /* Connect to selected DeckLink instance */
597         for (int i = 0; i < deviceId + 1; i++)
598             result = deckLinkIterator->Next(&deckLink);
599         if (result != S_OK)
600         {
601                 fprintf(stderr, "No DeckLink PCI cards found.\n");
602                 stopCapture();
603                 return;
604         }
605
606         if (deckLink->QueryInterface(IID_IDeckLinkInput, (void**)&deckLinkInput) != S_OK)
607         {
608             stopCapture();
609             return;
610         }
611
612         delegate = new DeckLinkCaptureDelegate();
613         deckLinkInput->SetCallback(delegate);
614
615         previewView = new CDeckLinkGLWidget(deckLinkInput, m_parent);
616         m_layout->addWidget(previewView);
617         //previewView->resize(parent->size());
618         previewView->setSizePolicy(QSizePolicy::Expanding, QSizePolicy::Expanding);
619         previewView->DrawFrame(NULL);
620
621         // Obtain an IDeckLinkDisplayModeIterator to enumerate the display modes supported on output
622         result = deckLinkInput->GetDisplayModeIterator(&displayModeIterator);
623         if (result != S_OK)
624         {
625                 fprintf(stderr, "Could not obtain the video output display mode iterator - result = %08x\n", result);
626                 stopCapture();
627                 return;
628         }
629
630         g_videoModeIndex = captureMode;
631         /*g_audioChannels = 2;
632         g_audioSampleDepth = 16;*/
633         
634         // Parse command line options
635         /*while ((ch = getopt(argc, argv, "?h3c:s:f:a:m:n:p:t:")) != -1)
636         {
637                 switch (ch)
638                 {
639                         case 'm':
640                                 g_videoModeIndex = atoi(optarg);
641                                 break;
642                         case 'c':
643                                 g_audioChannels = atoi(optarg);
644                                 if (g_audioChannels != 2 &&
645                                     g_audioChannels != 8 &&
646                                         g_audioChannels != 16)
647                                 {
648                                         fprintf(stderr, "Invalid argument: Audio Channels must be either 2, 8 or 16\n");
649      stopCapture();
650                                 }
651                                 break;
652                         case 's':
653                                 g_audioSampleDepth = atoi(optarg);
654                                 if (g_audioSampleDepth != 16 && g_audioSampleDepth != 32)
655                                 {
656                                         fprintf(stderr, "Invalid argument: Audio Sample Depth must be either 16 bits or 32 bits\n");
657      stopCapture();
658                                 }
659                                 break;
660                         case 'f':
661                                 g_videoOutputFile = optarg;
662                                 break;
663                         case 'a':
664                                 g_audioOutputFile = optarg;
665                                 break;
666                         case 'n':
667                                 g_maxFrames = atoi(optarg);
668                                 break;
669                         case '3':
670                                 inputFlags |= bmdVideoInputDualStream3D;
671                                 break;
672                         case 'p':
673                                 switch(atoi(optarg))
674                                 {
675                                         case 0: pixelFormat = bmdFormat8BitYUV; break;
676                                         case 1: pixelFormat = bmdFormat10BitYUV; break;
677                                         case 2: pixelFormat = bmdFormat10BitRGB; break;
678                                         default:
679                                                 fprintf(stderr, "Invalid argument: Pixel format %d is not valid", atoi(optarg));
680       stopCapture();
681                                 }
682                                 break;
683                         case 't':
684                                 if (!strcmp(optarg, "rp188"))
685                                         g_timecodeFormat = bmdTimecodeRP188;
686                         else if (!strcmp(optarg, "vitc"))
687                                         g_timecodeFormat = bmdTimecodeVITC;
688                         else if (!strcmp(optarg, "serial"))
689                                         g_timecodeFormat = bmdTimecodeSerial;
690                                 else
691                                 {
692                                         fprintf(stderr, "Invalid argument: Timecode format \"%s\" is invalid\n", optarg);
693      stopCapture();
694                                 }
695                                 break;
696                         case '?':
697                         case 'h':
698                                 usage(0);
699                 }
700         }*/
701
702         if (g_videoModeIndex < 0)
703         {
704                 fprintf(stderr, "No video mode specified\n");
705                 stopCapture();
706                 return;
707         }
708         //g_videoOutputFile="/home/one/bm.raw";
709         if (g_videoOutputFile != NULL)
710         {
711                 videoOutputFile = open(g_videoOutputFile, O_WRONLY|O_CREAT|O_TRUNC, 0664);
712                 if (videoOutputFile < 0)
713                 {
714                         fprintf(stderr, "Could not open video output file \"%s\"\n", g_videoOutputFile);
715    stopCapture();
716                 }
717         }
718         if (g_audioOutputFile != NULL)
719         {
720                 audioOutputFile = open(g_audioOutputFile, O_WRONLY|O_CREAT|O_TRUNC, 0664);
721                 if (audioOutputFile < 0)
722                 {
723                         fprintf(stderr, "Could not open audio output file \"%s\"\n", g_audioOutputFile);
724    stopCapture();
725                 }
726         }
727
728         while (displayModeIterator->Next(&displayMode) == S_OK)
729         {
730                 if (g_videoModeIndex == displayModeCount)
731                 {
732                         BMDDisplayModeSupport result;
733                         const char *displayModeName;
734
735                         foundDisplayMode = true;
736                         displayMode->GetName(&displayModeName);
737                         selectedDisplayMode = displayMode->GetDisplayMode();
738
739                         g_aspect_ratio = (double) displayMode->GetWidth() / (double) displayMode->GetHeight();
740
741                         deckLinkInput->DoesSupportVideoMode(selectedDisplayMode, pixelFormat, bmdVideoInputFlagDefault, &result, NULL);
742
743                         if (result == bmdDisplayModeNotSupported)
744                         {
745                                 fprintf(stderr, "The display mode %s is not supported with the selected pixel format\n", displayModeName);
746                                 stopCapture();
747                                 return;
748                         }
749
750                         if (inputFlags & bmdVideoInputDualStream3D)
751                         {
752                                 if (!(displayMode->GetFlags() & bmdDisplayModeSupports3D))
753                                 {
754                                         fprintf(stderr, "The display mode %s is not supported with 3D\n", displayModeName);
755                                         stopCapture();
756                                         return;
757                                 }
758                         }
759
760                         break;
761                 }
762                 displayModeCount++;
763                 displayMode->Release();
764         }
765
766         if (!foundDisplayMode)
767         {
768                 fprintf(stderr, "Invalid mode %d specified\n", g_videoModeIndex);
769                 stopCapture();
770                 return;
771         }
772
773     result = deckLinkInput->EnableVideoInput(selectedDisplayMode, pixelFormat, inputFlags);
774     if(result != S_OK)
775     {
776                 fprintf(stderr, "Failed to enable video input. Is another application using the card?\n");
777                 stopCapture();
778                 return;
779     }
780
781     result = deckLinkInput->EnableAudioInput(bmdAudioSampleRate48kHz, g_audioSampleDepth, g_audioChannels);
782     if(result != S_OK)
783     {
784         stopCapture();
785         return;
786     }
787     deckLinkInput->SetScreenPreviewCallback(previewView);
788     result = deckLinkInput->StartStreams();
789     if(result != S_OK)
790     {
791         qDebug()<<"/// CAPTURE FAILED....";
792     }
793
794         // All Okay.
795         exitStatus = 0;
796
797         // Block main thread until signal occurs
798 /*      pthread_mutex_lock(&sleepMutex);
799         pthread_cond_wait(&sleepCond, &sleepMutex);
800         pthread_mutex_unlock(&sleepMutex);*/
801
802 /*bail:
803
804         if (videoOutputFile)
805                 close(videoOutputFile);
806         if (audioOutputFile)
807                 close(audioOutputFile);
808
809         if (displayModeIterator != NULL)
810         {
811                 displayModeIterator->Release();
812                 displayModeIterator = NULL;
813         }
814
815     if (deckLinkInput != NULL)
816     {
817         deckLinkInput->Release();
818         deckLinkInput = NULL;
819     }
820
821     if (deckLink != NULL)
822     {
823         deckLink->Release();
824         deckLink = NULL;
825     }
826
827         if (deckLinkIterator != NULL)
828                 deckLinkIterator->Release();
829 */
830 }
831
832 CaptureHandler::~CaptureHandler()
833 {
834     stopCapture();
835 }
836
837 void CaptureHandler::startCapture()
838 {
839 }
840
841 void CaptureHandler::stopCapture()
842 {
843 }
844
845 void CaptureHandler::captureFrame(const QString &fname)
846 {
847     doCaptureFrame = fname;
848 }
849
850 void CaptureHandler::showOverlay(QImage img, bool transparent)
851 {
852     if (previewView) previewView->showOverlay(img, transparent);
853 }
854
855 void CaptureHandler::hideOverlay()
856 {
857     if (previewView) previewView->hideOverlay();
858 }
859
860 void CaptureHandler::hidePreview(bool hide)
861 {
862     if (previewView) previewView->setHidden(hide);
863 }
864
865 void CaptureHandler::stopPreview()
866 {
867     if (!previewView) return;
868       if (deckLinkInput != NULL) deckLinkInput->StopStreams();
869       if (videoOutputFile)
870                 close(videoOutputFile);
871         if (audioOutputFile)
872                 close(audioOutputFile);
873         
874         if (displayModeIterator != NULL)
875         {
876                 displayModeIterator->Release();
877                 displayModeIterator = NULL;
878         }
879
880     if (deckLinkInput != NULL)
881     {
882         deckLinkInput->Release();
883         deckLinkInput = NULL;
884     }
885
886     if (deckLink != NULL)
887     {
888         deckLink->Release();
889         deckLink = NULL;
890     }
891
892     if (deckLinkIterator != NULL) {
893         deckLinkIterator->Release();
894         deckLinkIterator = NULL;
895     }
896
897     if (previewView != NULL) {
898         delete previewView;
899         previewView = NULL;
900     }
901
902     /*if (delegate != NULL)
903         delete delegate;*/
904         
905 }