]> git.sesse.net Git - nageru/blob - main.cpp
Start of persistence work: Keep existing frames that are at frames/ at startup.
[nageru] / main.cpp
1 #include <assert.h>
2 #include <dirent.h>
3 #include <stdio.h>
4 #include <stdint.h>
5 #include <sys/types.h>
6
7 #include <chrono>
8 #include <condition_variable>
9 #include <memory>
10 #include <mutex>
11 #include <string>
12 #include <thread>
13 #include <vector>
14
15 extern "C" {
16 #include <libavformat/avformat.h>
17 }
18
19 #include <QApplication>
20
21 #include <movit/init.h>
22 #include <movit/util.h>
23
24 #include "clip_list.h"
25 #include "context.h"
26 #include "defs.h"
27 #include "mainwindow.h"
28 #include "ffmpeg_raii.h"
29 #include "httpd.h"
30 #include "player.h"
31 #include "post_to_main_thread.h"
32 #include "ref_counted_gl_sync.h"
33 #include "timebase.h"
34 #include "ui_mainwindow.h"
35 #include "vaapi_jpeg_decoder.h"
36
37 using namespace std;
38 using namespace std::chrono;
39
40 std::mutex RefCountedGLsync::fence_lock;
41
42 int64_t start_pts = -1;
43
44 // TODO: Replace by some sort of GUI control, I guess.
45 int64_t current_pts = 0;
46
47 string filename_for_frame(unsigned stream_idx, int64_t pts)
48 {
49         char filename[256];
50         snprintf(filename, sizeof(filename), "frames/cam%d-pts%09ld.jpeg", stream_idx, pts);
51         return filename;
52 }
53
54 mutex frame_mu;
55 vector<int64_t> frames[MAX_STREAMS];
56 HTTPD *global_httpd;
57
58 void load_existing_frames();
59 int record_thread_func();
60
61 int main(int argc, char **argv)
62 {
63         avformat_network_init();
64         global_httpd = new HTTPD;
65         global_httpd->start(DEFAULT_HTTPD_PORT);
66
67         QCoreApplication::setAttribute(Qt::AA_ShareOpenGLContexts, true);
68
69         QSurfaceFormat fmt;
70         fmt.setDepthBufferSize(0);
71         fmt.setStencilBufferSize(0);
72         fmt.setProfile(QSurfaceFormat::CoreProfile);
73         fmt.setMajorVersion(4);
74         fmt.setMinorVersion(5);
75
76         // Turn off vsync, since Qt generally gives us at most frame rate
77         // (display frequency) / (number of QGLWidgets active).
78         fmt.setSwapInterval(0);
79
80         QSurfaceFormat::setDefaultFormat(fmt);
81
82         QGLFormat::setDefaultFormat(QGLFormat::fromSurfaceFormat(fmt));
83
84         QApplication app(argc, argv);
85         global_share_widget = new QGLWidget();
86         if (!global_share_widget->isValid()) {
87                 fprintf(stderr, "Failed to initialize OpenGL. Futatabi needs at least OpenGL 4.5 to function properly.\n");
88                 exit(1);
89         }
90
91         // Initialize Movit.
92         {
93                 QSurface *surface = create_surface();
94                 QOpenGLContext *context = create_context(surface);
95                 make_current(context, surface);
96                 CHECK(movit::init_movit(MOVIT_SHADER_DIR, movit::MOVIT_DEBUG_OFF));
97                 delete_context(context);
98                 // TODO: Delete the surface, too.
99         }
100
101         MainWindow mainWindow;
102         mainWindow.show();
103
104         load_existing_frames();
105         thread(record_thread_func).detach();
106
107         init_jpeg_vaapi();
108
109         return app.exec();
110 }
111
112 void load_existing_frames()
113 {
114         DIR *dir = opendir("frames/");
115         if (dir == nullptr) {
116                 perror("frames/");
117                 start_pts = 0;
118                 return;
119         }
120
121         for ( ;; ) {
122                 errno = 0;
123                 dirent *de = readdir(dir);
124                 if (de == nullptr) {
125                         if (errno != 0) {
126                                 perror("readdir");
127                                 exit(1);
128                         }
129                         break;
130                 }
131
132                 int stream_idx;
133                 int64_t pts;
134                 if (sscanf(de->d_name, "cam%d-pts%ld.jpeg", &stream_idx, &pts) == 2 &&
135                     stream_idx >= 0 && stream_idx < MAX_STREAMS) {
136                         frames[stream_idx].push_back(pts);
137                         start_pts = max(start_pts, pts);
138                 }
139         }
140
141         closedir(dir);
142
143         if (start_pts == -1) {
144                 start_pts = 0;
145         } else {
146                 // Add a gap of one second from the old frames to the new ones.
147                 start_pts += TIMEBASE;
148         }
149
150         for (int stream_idx = 0; stream_idx < MAX_STREAMS; ++stream_idx) {
151                 sort(frames[stream_idx].begin(), frames[stream_idx].end());
152         }
153 }
154
155 int record_thread_func()
156 {
157         auto format_ctx = avformat_open_input_unique("multiangle.mp4", nullptr, nullptr);
158         if (format_ctx == nullptr) {
159                 fprintf(stderr, "%s: Error opening file\n", "example.mp4");
160                 return 1;
161         }
162
163         int64_t last_pts = -1;
164         int64_t pts_offset;
165
166         for ( ;; ) {
167                 AVPacket pkt;
168                 unique_ptr<AVPacket, decltype(av_packet_unref)*> pkt_cleanup(
169                         &pkt, av_packet_unref);
170                 av_init_packet(&pkt);
171                 pkt.data = nullptr;
172                 pkt.size = 0;
173                 if (av_read_frame(format_ctx.get(), &pkt) != 0) {
174                         break;
175                 }
176
177                 // Convert pts to our own timebase.
178                 AVRational stream_timebase = format_ctx->streams[pkt.stream_index]->time_base;
179                 int64_t pts = av_rescale_q(pkt.pts, stream_timebase, AVRational{ 1, TIMEBASE });
180
181                 // Translate offset into our stream.
182                 if (last_pts == -1) {
183                         pts_offset = start_pts - pts;
184                 }
185                 pts = std::max(pts + pts_offset, start_pts);
186
187                 //fprintf(stderr, "Got a frame from camera %d, pts = %ld, size = %d\n",
188                 //      pkt.stream_index, pts, pkt.size);
189                 string filename = filename_for_frame(pkt.stream_index, pts);
190                 FILE *fp = fopen(filename.c_str(), "wb");
191                 if (fp == nullptr) {
192                         perror(filename.c_str());
193                         exit(1);
194                 }
195                 fwrite(pkt.data, pkt.size, 1, fp);
196                 fclose(fp);
197
198                 post_to_main_thread([pkt, pts] {
199                         if (pkt.stream_index == 0) {
200                                 global_mainwindow->ui->input1_display->setFrame(pkt.stream_index, pts, /*interpolated=*/false);
201                         } else if (pkt.stream_index == 1) {
202                                 global_mainwindow->ui->input2_display->setFrame(pkt.stream_index, pts, /*interpolated=*/false);
203                         } else if (pkt.stream_index == 2) {
204                                 global_mainwindow->ui->input3_display->setFrame(pkt.stream_index, pts, /*interpolated=*/false);
205                         } else if (pkt.stream_index == 3) {
206                                 global_mainwindow->ui->input4_display->setFrame(pkt.stream_index, pts, /*interpolated=*/false);
207                         }
208                 });
209
210                 assert(pkt.stream_index < MAX_STREAMS);
211                 frames[pkt.stream_index].push_back(pts);
212
213                 // Hack. Remove when we're dealing with live streams.
214                 if (last_pts != -1) {
215                         this_thread::sleep_for(microseconds((pts - last_pts) * 1000000 / TIMEBASE));
216                 }
217                 last_pts = pts;
218                 current_pts = pts;
219         }
220
221         return 0;
222 }