]> git.sesse.net Git - nageru/blob - main.cpp
Read timebase from the input video.
[nageru] / main.cpp
1 #include <assert.h>
2 #include <stdio.h>
3 #include <stdint.h>
4
5 #include <chrono>
6 #include <condition_variable>
7 #include <memory>
8 #include <mutex>
9 #include <string>
10 #include <thread>
11 #include <vector>
12
13 extern "C" {
14 #include <libavformat/avformat.h>
15 }
16
17 #include <QApplication>
18
19 #include <movit/init.h>
20 #include <movit/util.h>
21
22 #include "clip_list.h"
23 #include "context.h"
24 #include "defs.h"
25 #include "mainwindow.h"
26 #include "ffmpeg_raii.h"
27 #include "httpd.h"
28 #include "player.h"
29 #include "post_to_main_thread.h"
30 #include "ref_counted_gl_sync.h"
31 #include "timebase.h"
32 #include "ui_mainwindow.h"
33
34 using namespace std;
35 using namespace std::chrono;
36
37 std::mutex RefCountedGLsync::fence_lock;
38
39 // TODO: Replace by some sort of GUI control, I guess.
40 int64_t current_pts = 0;
41
42 string filename_for_frame(unsigned stream_idx, int64_t pts)
43 {
44         char filename[256];
45         snprintf(filename, sizeof(filename), "frames/cam%d-pts%09ld.jpeg", stream_idx, pts);
46         return filename;
47 }
48
49 mutex frame_mu;
50 vector<int64_t> frames[MAX_STREAMS];
51 HTTPD *global_httpd;
52
53 int record_thread_func();
54
55 int main(int argc, char **argv)
56 {
57         avformat_network_init();
58         global_httpd = new HTTPD;
59         global_httpd->start(DEFAULT_HTTPD_PORT);
60
61         QCoreApplication::setAttribute(Qt::AA_ShareOpenGLContexts, true);
62
63         QSurfaceFormat fmt;
64         fmt.setDepthBufferSize(0);
65         fmt.setStencilBufferSize(0);
66         fmt.setProfile(QSurfaceFormat::CoreProfile);
67         fmt.setMajorVersion(4);
68         fmt.setMinorVersion(5);
69
70         // Turn off vsync, since Qt generally gives us at most frame rate
71         // (display frequency) / (number of QGLWidgets active).
72         fmt.setSwapInterval(0);
73
74         QSurfaceFormat::setDefaultFormat(fmt);
75
76         QGLFormat::setDefaultFormat(QGLFormat::fromSurfaceFormat(fmt));
77
78         QApplication app(argc, argv);
79         global_share_widget = new QGLWidget();
80         if (!global_share_widget->isValid()) {
81                 fprintf(stderr, "Failed to initialize OpenGL. Futatabi needs at least OpenGL 4.5 to function properly.\n");
82                 exit(1);
83         }
84
85         // Initialize Movit.
86         {
87                 QSurface *surface = create_surface();
88                 QOpenGLContext *context = create_context(surface);
89                 make_current(context, surface);
90                 CHECK(movit::init_movit(MOVIT_SHADER_DIR, movit::MOVIT_DEBUG_OFF));
91                 delete_context(context);
92                 // TODO: Delete the surface, too.
93         }
94
95         MainWindow mainWindow;
96         mainWindow.show();
97
98         thread(record_thread_func).detach();
99
100         return app.exec();
101 }
102
103 int record_thread_func()
104 {
105         auto format_ctx = avformat_open_input_unique("multiangle.mp4", nullptr, nullptr);
106         if (format_ctx == nullptr) {
107                 fprintf(stderr, "%s: Error opening file\n", "example.mp4");
108                 return 1;
109         }
110
111         int64_t last_pts = -1;
112
113         for ( ;; ) {
114                 AVPacket pkt;
115                 unique_ptr<AVPacket, decltype(av_packet_unref)*> pkt_cleanup(
116                         &pkt, av_packet_unref);
117                 av_init_packet(&pkt);
118                 pkt.data = nullptr;
119                 pkt.size = 0;
120                 if (av_read_frame(format_ctx.get(), &pkt) != 0) {
121                         break;
122                 }
123
124                 // Convert pts to our own timebase.
125                 // TODO: Figure out offsets, too.
126                 AVRational stream_timebase = format_ctx->streams[pkt.stream_index]->time_base;
127                 pkt.pts = av_rescale_q(pkt.pts, stream_timebase, AVRational{ 1, TIMEBASE });
128
129                 //fprintf(stderr, "Got a frame from camera %d, pts = %ld, size = %d\n",
130                 //      pkt.stream_index, pkt.pts, pkt.size);
131                 string filename = filename_for_frame(pkt.stream_index, pkt.pts);
132                 FILE *fp = fopen(filename.c_str(), "wb");
133                 if (fp == nullptr) {
134                         perror(filename.c_str());
135                         exit(1);
136                 }
137                 fwrite(pkt.data, pkt.size, 1, fp);
138                 fclose(fp);
139
140                 post_to_main_thread([pkt] {
141                         if (pkt.stream_index == 0) {
142                                 global_mainwindow->ui->input1_display->setFrame(pkt.stream_index, pkt.pts);
143                         } else if (pkt.stream_index == 1) {
144                                 global_mainwindow->ui->input2_display->setFrame(pkt.stream_index, pkt.pts);
145                         } else if (pkt.stream_index == 2) {
146                                 global_mainwindow->ui->input3_display->setFrame(pkt.stream_index, pkt.pts);
147                         } else if (pkt.stream_index == 3) {
148                                 global_mainwindow->ui->input4_display->setFrame(pkt.stream_index, pkt.pts);
149                         }
150                 });
151
152                 assert(pkt.stream_index < MAX_STREAMS);
153                 frames[pkt.stream_index].push_back(pkt.pts);
154
155                 // Hack. Remove when we're dealing with live streams.
156                 if (last_pts != -1) {
157                         this_thread::sleep_for(microseconds((pkt.pts - last_pts) * 1000000 / TIMEBASE));
158                 }
159                 last_pts = pkt.pts;
160                 current_pts = pkt.pts;
161         }
162
163         return 0;
164 }