]> git.sesse.net Git - nageru/blob - main.cpp
Expose the queue status over HTTP.
[nageru] / main.cpp
1 #include <assert.h>
2 #include <atomic>
3 #include <chrono>
4 #include <condition_variable>
5 #include <dirent.h>
6 #include <getopt.h>
7 #include <memory>
8 #include <mutex>
9 #include <stdint.h>
10 #include <stdio.h>
11 #include <string>
12 #include <sys/types.h>
13 #include <thread>
14 #include <vector>
15
16 extern "C" {
17 #include <libavformat/avformat.h>
18 }
19
20 #include "clip_list.h"
21 #include "context.h"
22 #include "defs.h"
23 #include "disk_space_estimator.h"
24 #include "ffmpeg_raii.h"
25 #include "flags.h"
26 #include "httpd.h"
27 #include "mainwindow.h"
28 #include "player.h"
29 #include "post_to_main_thread.h"
30 #include "ref_counted_gl_sync.h"
31 #include "timebase.h"
32 #include "ui_mainwindow.h"
33 #include "vaapi_jpeg_decoder.h"
34
35 #include <QApplication>
36 #include <movit/init.h>
37 #include <movit/util.h>
38
39 using namespace std;
40 using namespace std::chrono;
41
42 mutex RefCountedGLsync::fence_lock;
43 atomic<bool> should_quit{false};
44
45 int64_t start_pts = -1;
46
47 // TODO: Replace by some sort of GUI control, I guess.
48 int64_t current_pts = 0;
49
50 string filename_for_frame(unsigned stream_idx, int64_t pts)
51 {
52         char filename[256];
53         snprintf(filename, sizeof(filename), "frames/cam%d-pts%09ld.jpeg", stream_idx, pts);
54         return filename;
55 }
56
57 mutex frame_mu;
58 vector<int64_t> frames[MAX_STREAMS];
59 HTTPD *global_httpd;
60
61 void load_existing_frames();
62 int record_thread_func();
63
64 int main(int argc, char **argv)
65 {
66         parse_flags(argc, argv);
67         if (optind == argc) {
68                 global_flags.stream_source = "multiangle.mp4";
69                 global_flags.slow_down_input = true;
70         } else if (optind + 1 == argc) {
71                 global_flags.stream_source = argv[optind];
72         } else {
73                 usage();
74                 exit(1);
75         }
76
77         avformat_network_init();
78         global_httpd = new HTTPD;
79
80         QCoreApplication::setAttribute(Qt::AA_ShareOpenGLContexts, true);
81
82         QSurfaceFormat fmt;
83         fmt.setDepthBufferSize(0);
84         fmt.setStencilBufferSize(0);
85         fmt.setProfile(QSurfaceFormat::CoreProfile);
86         fmt.setMajorVersion(4);
87         fmt.setMinorVersion(5);
88
89         // Turn off vsync, since Qt generally gives us at most frame rate
90         // (display frequency) / (number of QGLWidgets active).
91         fmt.setSwapInterval(0);
92
93         QSurfaceFormat::setDefaultFormat(fmt);
94
95         QGLFormat::setDefaultFormat(QGLFormat::fromSurfaceFormat(fmt));
96
97         QApplication app(argc, argv);
98         global_share_widget = new QGLWidget();
99         if (!global_share_widget->isValid()) {
100                 fprintf(stderr, "Failed to initialize OpenGL. Futatabi needs at least OpenGL 4.5 to function properly.\n");
101                 exit(1);
102         }
103
104         // Initialize Movit.
105         {
106                 QSurface *surface = create_surface();
107                 QOpenGLContext *context = create_context(surface);
108                 make_current(context, surface);
109                 CHECK(movit::init_movit(MOVIT_SHADER_DIR, movit::MOVIT_DEBUG_OFF));
110                 delete_context(context);
111                 // TODO: Delete the surface, too.
112         }
113
114         MainWindow main_window;
115         main_window.show();
116
117         global_httpd->add_endpoint("/queue_status", bind(&MainWindow::get_queue_status, &main_window), HTTPD::NO_CORS_POLICY);
118         global_httpd->start(DEFAULT_HTTPD_PORT);
119
120         init_jpeg_vaapi();
121
122         load_existing_frames();
123         thread record_thread(record_thread_func);
124
125         int ret = app.exec();
126
127         should_quit = true;
128         record_thread.join();
129         JPEGFrameView::shutdown();
130
131         return ret;
132 }
133
134 void load_existing_frames()
135 {
136         DIR *dir = opendir("frames/");
137         if (dir == nullptr) {
138                 perror("frames/");
139                 start_pts = 0;
140                 return;
141         }
142
143         for ( ;; ) {
144                 errno = 0;
145                 dirent *de = readdir(dir);
146                 if (de == nullptr) {
147                         if (errno != 0) {
148                                 perror("readdir");
149                                 exit(1);
150                         }
151                         break;
152                 }
153
154                 int stream_idx;
155                 int64_t pts;
156                 if (sscanf(de->d_name, "cam%d-pts%ld.jpeg", &stream_idx, &pts) == 2 &&
157                     stream_idx >= 0 && stream_idx < MAX_STREAMS) {
158                         frames[stream_idx].push_back(pts);
159                         start_pts = max(start_pts, pts);
160                 }
161         }
162
163         closedir(dir);
164
165         if (start_pts == -1) {
166                 start_pts = 0;
167         } else {
168                 // Add a gap of one second from the old frames to the new ones.
169                 start_pts += TIMEBASE;
170         }
171
172         for (int stream_idx = 0; stream_idx < MAX_STREAMS; ++stream_idx) {
173                 sort(frames[stream_idx].begin(), frames[stream_idx].end());
174         }
175 }
176
177 int record_thread_func()
178 {
179         auto format_ctx = avformat_open_input_unique(global_flags.stream_source.c_str(), nullptr, nullptr);
180         if (format_ctx == nullptr) {
181                 fprintf(stderr, "%s: Error opening file\n", global_flags.stream_source.c_str());
182                 return 1;
183         }
184
185         int64_t last_pts = -1;
186         int64_t pts_offset;
187
188         while (!should_quit.load()) {
189                 AVPacket pkt;
190                 unique_ptr<AVPacket, decltype(av_packet_unref)*> pkt_cleanup(
191                         &pkt, av_packet_unref);
192                 av_init_packet(&pkt);
193                 pkt.data = nullptr;
194                 pkt.size = 0;
195
196                 // TODO: Make it possible to abort av_read_frame() (use an interrupt callback);
197                 // right now, should_quit will be ignored if it's hung on I/O.
198                 if (av_read_frame(format_ctx.get(), &pkt) != 0) {
199                         break;
200                 }
201
202                 // Convert pts to our own timebase.
203                 AVRational stream_timebase = format_ctx->streams[pkt.stream_index]->time_base;
204                 int64_t pts = av_rescale_q(pkt.pts, stream_timebase, AVRational{ 1, TIMEBASE });
205
206                 // Translate offset into our stream.
207                 if (last_pts == -1) {
208                         pts_offset = start_pts - pts;
209                 }
210                 pts = std::max(pts + pts_offset, start_pts);
211
212                 //fprintf(stderr, "Got a frame from camera %d, pts = %ld, size = %d\n",
213                 //      pkt.stream_index, pts, pkt.size);
214                 string filename = filename_for_frame(pkt.stream_index, pts);
215                 FILE *fp = fopen(filename.c_str(), "wb");
216                 if (fp == nullptr) {
217                         perror(filename.c_str());
218                         exit(1);
219                 }
220                 fwrite(pkt.data, pkt.size, 1, fp);
221                 fclose(fp);
222
223                 global_disk_space_estimator->report_write(filename, pts);
224
225                 post_to_main_thread([pkt, pts] {
226                         if (pkt.stream_index == 0) {
227                                 global_mainwindow->ui->input1_display->setFrame(pkt.stream_index, pts, /*interpolated=*/false);
228                         } else if (pkt.stream_index == 1) {
229                                 global_mainwindow->ui->input2_display->setFrame(pkt.stream_index, pts, /*interpolated=*/false);
230                         } else if (pkt.stream_index == 2) {
231                                 global_mainwindow->ui->input3_display->setFrame(pkt.stream_index, pts, /*interpolated=*/false);
232                         } else if (pkt.stream_index == 3) {
233                                 global_mainwindow->ui->input4_display->setFrame(pkt.stream_index, pts, /*interpolated=*/false);
234                         }
235                 });
236
237                 assert(pkt.stream_index < MAX_STREAMS);
238                 frames[pkt.stream_index].push_back(pts);
239
240                 if (last_pts != -1 && global_flags.slow_down_input) {
241                         this_thread::sleep_for(microseconds((pts - last_pts) * 1000000 / TIMEBASE));
242                 }
243                 last_pts = pts;
244                 current_pts = pts;
245         }
246
247         return 0;
248 }