]> git.sesse.net Git - nageru/blob - main.cpp
Decode 4:2:2 JPEGs via VA-API if available.
[nageru] / main.cpp
1 #include <assert.h>
2 #include <stdio.h>
3 #include <stdint.h>
4
5 #include <chrono>
6 #include <condition_variable>
7 #include <memory>
8 #include <mutex>
9 #include <string>
10 #include <thread>
11 #include <vector>
12
13 extern "C" {
14 #include <libavformat/avformat.h>
15 }
16
17 #include <QApplication>
18
19 #include <movit/init.h>
20 #include <movit/util.h>
21
22 #include "clip_list.h"
23 #include "context.h"
24 #include "defs.h"
25 #include "mainwindow.h"
26 #include "ffmpeg_raii.h"
27 #include "httpd.h"
28 #include "player.h"
29 #include "post_to_main_thread.h"
30 #include "ref_counted_gl_sync.h"
31 #include "timebase.h"
32 #include "ui_mainwindow.h"
33 #include "vaapi_jpeg_decoder.h"
34
35 using namespace std;
36 using namespace std::chrono;
37
38 std::mutex RefCountedGLsync::fence_lock;
39
40 // TODO: Replace by some sort of GUI control, I guess.
41 int64_t current_pts = 0;
42
43 string filename_for_frame(unsigned stream_idx, int64_t pts)
44 {
45         char filename[256];
46         snprintf(filename, sizeof(filename), "frames/cam%d-pts%09ld.jpeg", stream_idx, pts);
47         return filename;
48 }
49
50 mutex frame_mu;
51 vector<int64_t> frames[MAX_STREAMS];
52 HTTPD *global_httpd;
53
54 int record_thread_func();
55
56 int main(int argc, char **argv)
57 {
58         avformat_network_init();
59         global_httpd = new HTTPD;
60         global_httpd->start(DEFAULT_HTTPD_PORT);
61
62         QCoreApplication::setAttribute(Qt::AA_ShareOpenGLContexts, true);
63
64         QSurfaceFormat fmt;
65         fmt.setDepthBufferSize(0);
66         fmt.setStencilBufferSize(0);
67         fmt.setProfile(QSurfaceFormat::CoreProfile);
68         fmt.setMajorVersion(4);
69         fmt.setMinorVersion(5);
70
71         // Turn off vsync, since Qt generally gives us at most frame rate
72         // (display frequency) / (number of QGLWidgets active).
73         fmt.setSwapInterval(0);
74
75         QSurfaceFormat::setDefaultFormat(fmt);
76
77         QGLFormat::setDefaultFormat(QGLFormat::fromSurfaceFormat(fmt));
78
79         QApplication app(argc, argv);
80         global_share_widget = new QGLWidget();
81         if (!global_share_widget->isValid()) {
82                 fprintf(stderr, "Failed to initialize OpenGL. Futatabi needs at least OpenGL 4.5 to function properly.\n");
83                 exit(1);
84         }
85
86         // Initialize Movit.
87         {
88                 QSurface *surface = create_surface();
89                 QOpenGLContext *context = create_context(surface);
90                 make_current(context, surface);
91                 CHECK(movit::init_movit(MOVIT_SHADER_DIR, movit::MOVIT_DEBUG_OFF));
92                 delete_context(context);
93                 // TODO: Delete the surface, too.
94         }
95
96         MainWindow mainWindow;
97         mainWindow.show();
98
99         thread(record_thread_func).detach();
100
101         init_jpeg_vaapi();
102
103         return app.exec();
104 }
105
106 int record_thread_func()
107 {
108         auto format_ctx = avformat_open_input_unique("multiangle.mp4", nullptr, nullptr);
109         if (format_ctx == nullptr) {
110                 fprintf(stderr, "%s: Error opening file\n", "example.mp4");
111                 return 1;
112         }
113
114         int64_t last_pts = -1;
115
116         for ( ;; ) {
117                 AVPacket pkt;
118                 unique_ptr<AVPacket, decltype(av_packet_unref)*> pkt_cleanup(
119                         &pkt, av_packet_unref);
120                 av_init_packet(&pkt);
121                 pkt.data = nullptr;
122                 pkt.size = 0;
123                 if (av_read_frame(format_ctx.get(), &pkt) != 0) {
124                         break;
125                 }
126
127                 // Convert pts to our own timebase.
128                 // TODO: Figure out offsets, too.
129                 AVRational stream_timebase = format_ctx->streams[pkt.stream_index]->time_base;
130                 pkt.pts = av_rescale_q(pkt.pts, stream_timebase, AVRational{ 1, TIMEBASE });
131
132                 //fprintf(stderr, "Got a frame from camera %d, pts = %ld, size = %d\n",
133                 //      pkt.stream_index, pkt.pts, pkt.size);
134                 string filename = filename_for_frame(pkt.stream_index, pkt.pts);
135                 FILE *fp = fopen(filename.c_str(), "wb");
136                 if (fp == nullptr) {
137                         perror(filename.c_str());
138                         exit(1);
139                 }
140                 fwrite(pkt.data, pkt.size, 1, fp);
141                 fclose(fp);
142
143                 post_to_main_thread([pkt] {
144                         if (pkt.stream_index == 0) {
145                                 global_mainwindow->ui->input1_display->setFrame(pkt.stream_index, pkt.pts, /*interpolated=*/false);
146                         } else if (pkt.stream_index == 1) {
147                                 global_mainwindow->ui->input2_display->setFrame(pkt.stream_index, pkt.pts, /*interpolated=*/false);
148                         } else if (pkt.stream_index == 2) {
149                                 global_mainwindow->ui->input3_display->setFrame(pkt.stream_index, pkt.pts, /*interpolated=*/false);
150                         } else if (pkt.stream_index == 3) {
151                                 global_mainwindow->ui->input4_display->setFrame(pkt.stream_index, pkt.pts, /*interpolated=*/false);
152                         }
153                 });
154
155                 assert(pkt.stream_index < MAX_STREAMS);
156                 frames[pkt.stream_index].push_back(pkt.pts);
157
158                 // Hack. Remove when we're dealing with live streams.
159                 if (last_pts != -1) {
160                         this_thread::sleep_for(microseconds((pkt.pts - last_pts) * 1000000 / TIMEBASE));
161                 }
162                 last_pts = pkt.pts;
163                 current_pts = pkt.pts;
164         }
165
166         return 0;
167 }