]> git.sesse.net Git - nageru/blob - video_stream.cpp
Start hacking in support for interpolated frames in the main application.
[nageru] / video_stream.cpp
1 #include "video_stream.h"
2
3 extern "C" {
4 #include <libavformat/avformat.h>
5 #include <libavformat/avio.h>
6 }
7
8 #include <unistd.h>
9
10 #include "context.h"
11 #include "flow.h"
12 #include "httpd.h"
13 #include "jpeg_frame_view.h"
14 #include "movit/util.h"
15 #include "mux.h"
16 #include "player.h"
17 #include "util.h"
18
19 #include <epoxy/glx.h>
20
21 using namespace std;
22
23 extern HTTPD *global_httpd;
24
25 namespace {
26
27 string read_file(const string &filename)
28 {
29         FILE *fp = fopen(filename.c_str(), "rb");
30         if (fp == nullptr) {
31                 perror(filename.c_str());
32                 return "";
33         }
34
35         fseek(fp, 0, SEEK_END);
36         long len = ftell(fp);
37         rewind(fp);
38
39         string ret;
40         ret.resize(len);
41         fread(&ret[0], len, 1, fp);
42         fclose(fp);
43         return ret;
44 }
45
46 }  // namespace
47
48 VideoStream::VideoStream()
49 {
50         using namespace movit;
51         // TODO: deduplicate code against JPEGFrameView?
52         ycbcr_convert_chain.reset(new EffectChain(1280, 720));
53         ImageFormat image_format;
54         image_format.color_space = COLORSPACE_sRGB;
55         image_format.gamma_curve = GAMMA_sRGB;
56         ycbcr_format.luma_coefficients = YCBCR_REC_709;
57         ycbcr_format.full_range = false;
58         ycbcr_format.num_levels = 256;
59         ycbcr_format.chroma_subsampling_x = 2;
60         ycbcr_format.chroma_subsampling_y = 1;
61         ycbcr_format.cb_x_position = 0.0f;  // H.264 -- _not_ JPEG, even though our input is MJPEG-encoded
62         ycbcr_format.cb_y_position = 0.5f;  // Irrelevant.
63         ycbcr_format.cr_x_position = 0.0f;
64         ycbcr_format.cr_y_position = 0.5f;
65         ycbcr_input = (movit::YCbCrInput *)ycbcr_convert_chain->add_input(new YCbCrInput(image_format, ycbcr_format, 1280, 720));
66
67         ImageFormat inout_format;
68         inout_format.color_space = COLORSPACE_sRGB;
69         inout_format.gamma_curve = GAMMA_sRGB;
70
71         check_error();
72         ycbcr_convert_chain->add_output(inout_format, OUTPUT_ALPHA_FORMAT_POSTMULTIPLIED);
73         check_error();
74         ycbcr_convert_chain->set_dither_bits(8);
75         check_error();
76         ycbcr_convert_chain->finalize();
77         check_error();
78
79         GLuint input_tex[num_interpolate_slots], gray_tex[num_interpolate_slots];
80         glCreateTextures(GL_TEXTURE_2D_ARRAY, 10, input_tex);
81         glCreateTextures(GL_TEXTURE_2D_ARRAY, 10, gray_tex);
82         check_error();
83         constexpr size_t width = 1280, height = 720;  // FIXME: adjustable width, height
84         int levels = find_num_levels(width, height);
85         for (size_t i = 0; i < num_interpolate_slots; ++i) {
86                 glTextureStorage3D(input_tex[i], levels, GL_RGBA8, width, height, 2);
87                 check_error();
88                 glTextureStorage3D(gray_tex[i], levels, GL_R8, width, height, 2);
89                 check_error();
90
91                 InterpolatedFrameResources resource;
92                 resource.input_tex = input_tex[i];
93                 resource.gray_tex = gray_tex[i];
94                 glCreateFramebuffers(2, resource.input_fbos);
95                 check_error();
96
97                 glNamedFramebufferTextureLayer(resource.input_fbos[0], GL_COLOR_ATTACHMENT0, input_tex[i], 0, 0);
98                 check_error();
99                 glNamedFramebufferTextureLayer(resource.input_fbos[1], GL_COLOR_ATTACHMENT0, input_tex[i], 0, 1);
100                 check_error();
101
102                 GLuint buf = GL_COLOR_ATTACHMENT0;
103                 glNamedFramebufferDrawBuffers(resource.input_fbos[0], 1, &buf);
104                 check_error();
105                 glNamedFramebufferDrawBuffers(resource.input_fbos[1], 1, &buf);
106                 check_error();
107
108                 glCreateBuffers(1, &resource.pbo);
109                 check_error();
110                 glNamedBufferStorage(resource.pbo, width * height * 4, nullptr, GL_MAP_READ_BIT | GL_MAP_PERSISTENT_BIT);
111                 check_error();
112                 resource.pbo_contents = glMapNamedBufferRange(resource.pbo, 0, width * height * 4, GL_MAP_READ_BIT | GL_MAP_PERSISTENT_BIT); 
113                 interpolate_resources.push_back(resource);
114         }
115
116         check_error();
117
118         compute_flow.reset(new DISComputeFlow(width, height, operating_point3));
119         gray.reset(new GrayscaleConversion);  // NOTE: Must come after DISComputeFlow, since it sets up the VBO!
120         interpolate.reset(new Interpolate(width, height, operating_point3));
121         check_error();
122 }
123
124 VideoStream::~VideoStream() {}
125
126 void VideoStream::start()
127 {
128         AVFormatContext *avctx = avformat_alloc_context();
129         avctx->oformat = av_guess_format("nut", nullptr, nullptr);
130
131         uint8_t *buf = (uint8_t *)av_malloc(MUX_BUFFER_SIZE);
132         avctx->pb = avio_alloc_context(buf, MUX_BUFFER_SIZE, 1, this, nullptr, nullptr, nullptr);
133         avctx->pb->write_data_type = &VideoStream::write_packet2_thunk;
134         avctx->pb->ignore_boundary_point = 1;
135
136         Mux::Codec video_codec = Mux::CODEC_MJPEG;
137
138         avctx->flags = AVFMT_FLAG_CUSTOM_IO;
139
140         string video_extradata;
141
142         constexpr int width = 1280, height = 720;  // Doesn't matter for MJPEG.
143         stream_mux.reset(new Mux(avctx, width, height, video_codec, video_extradata, /*audio_codec_parameters=*/nullptr, COARSE_TIMEBASE,
144                 /*write_callback=*/nullptr, Mux::WRITE_FOREGROUND, {}));
145
146
147         encode_thread = thread(&VideoStream::encode_thread_func, this);
148 }
149
150 void VideoStream::stop()
151 {
152         encode_thread.join();
153 }
154
155 void VideoStream::schedule_original_frame(int64_t output_pts, unsigned stream_idx, int64_t input_pts)
156 {
157         QueuedFrame qf;
158         qf.type = QueuedFrame::ORIGINAL;
159         qf.output_pts = output_pts;
160         qf.stream_idx = stream_idx;
161         qf.input_first_pts = input_pts; 
162
163         unique_lock<mutex> lock(queue_lock);
164         frame_queue.push_back(qf);
165         queue_nonempty.notify_all();
166 }
167
168 void VideoStream::schedule_interpolated_frame(int64_t output_pts, unsigned stream_idx, int64_t input_first_pts, int64_t input_second_pts, float alpha)
169 {
170         // Get the temporary OpenGL resources we need for doing the interpolation.
171         InterpolatedFrameResources resources;
172         {
173                 unique_lock<mutex> lock(queue_lock);
174                 if (interpolate_resources.empty()) {
175                         fprintf(stderr, "WARNING: Too many interpolated frames already in transit; dropping one.\n");
176                         return;
177                 }
178                 resources = interpolate_resources.front();
179                 interpolate_resources.pop_front();
180         }
181
182         QueuedFrame qf;
183         qf.type = QueuedFrame::INTERPOLATED;
184         qf.output_pts = output_pts;
185         qf.stream_idx = stream_idx;
186         qf.resources = resources;
187
188         check_error();
189
190         // Convert frame0 and frame1 to OpenGL textures.
191         // TODO: Deduplicate against JPEGFrameView::setDecodedFrame?
192         for (size_t frame_no = 0; frame_no < 2; ++frame_no) {
193                 shared_ptr<Frame> frame = decode_jpeg(filename_for_frame(stream_idx, frame_no == 1 ? input_second_pts : input_first_pts));
194                 ycbcr_format.chroma_subsampling_x = frame->chroma_subsampling_x;
195                 ycbcr_format.chroma_subsampling_y = frame->chroma_subsampling_y;
196                 ycbcr_input->change_ycbcr_format(ycbcr_format);
197                 ycbcr_input->set_width(frame->width);
198                 ycbcr_input->set_height(frame->height);
199                 ycbcr_input->set_pixel_data(0, frame->y.get());
200                 ycbcr_input->set_pixel_data(1, frame->cb.get());
201                 ycbcr_input->set_pixel_data(2, frame->cr.get());
202                 ycbcr_input->set_pitch(0, frame->pitch_y);
203                 ycbcr_input->set_pitch(1, frame->pitch_chroma);
204                 ycbcr_input->set_pitch(2, frame->pitch_chroma);
205                 ycbcr_convert_chain->render_to_fbo(resources.input_fbos[frame_no], 1280, 720);
206         }
207
208         glGenerateTextureMipmap(resources.input_tex);
209
210         // Compute the interpolated frame.
211         check_error();
212         gray->exec(resources.input_tex, resources.gray_tex, 1280, 720, /*num_layers=*/2);
213         check_error();
214         glGenerateTextureMipmap(resources.gray_tex);
215         check_error();
216         GLuint flow_tex = compute_flow->exec(resources.gray_tex, DISComputeFlow::FORWARD_AND_BACKWARD, DISComputeFlow::DO_NOT_RESIZE_FLOW);
217         check_error();
218
219         qf.output_tex = interpolate->exec(resources.input_tex, flow_tex, 1280, 720, alpha);
220         check_error();
221
222         // Read it down (asynchronously) to the CPU.
223         glPixelStorei(GL_PACK_ROW_LENGTH, 0);
224         glBindBuffer(GL_PIXEL_PACK_BUFFER, resources.pbo);
225         check_error();
226         glGetTextureImage(qf.output_tex, 0, GL_RGBA, GL_UNSIGNED_BYTE, 1280 * 720 * 4, nullptr);
227         check_error();
228         glBindBuffer(GL_PIXEL_PACK_BUFFER, 0);
229
230         // Set a fence we can wait for to make sure the CPU sees the read.
231         glMemoryBarrier(GL_CLIENT_MAPPED_BUFFER_BARRIER_BIT);
232         check_error();
233         qf.fence = RefCountedGLsync(GL_SYNC_GPU_COMMANDS_COMPLETE, /*flags=*/0);
234         check_error();
235
236         unique_lock<mutex> lock(queue_lock);
237         frame_queue.push_back(qf);
238         queue_nonempty.notify_all();
239 }
240
241 void VideoStream::encode_thread_func()
242 {
243         QSurface *surface = create_surface();
244         QOpenGLContext *context = create_context(surface);
245         bool ok = make_current(context, surface);
246         if (!ok) {
247                 fprintf(stderr, "Video stream couldn't get an OpenGL context\n");
248                 exit(1);
249         }
250
251         for ( ;; ) {
252                 QueuedFrame qf;
253                 {
254                         unique_lock<mutex> lock(queue_lock);
255                         queue_nonempty.wait(lock, [this]{
256                                 return !frame_queue.empty();
257                         });
258                         qf = frame_queue.front();
259                         frame_queue.pop_front();
260                 }
261
262                 if (qf.type == QueuedFrame::ORIGINAL) {
263                         // Send the JPEG frame on, unchanged.
264                         string jpeg = read_file(filename_for_frame(qf.stream_idx, qf.input_first_pts));
265                         AVPacket pkt;
266                         av_init_packet(&pkt);
267                         pkt.stream_index = 0;
268                         pkt.data = (uint8_t *)jpeg.data();
269                         pkt.size = jpeg.size();
270                         stream_mux->add_packet(pkt, qf.output_pts, qf.output_pts);
271                 } else if (qf.type == QueuedFrame::INTERPOLATED) {
272                         glClientWaitSync(qf.fence.get(), /*flags=*/0, GL_TIMEOUT_IGNORED);
273
274                         // DEBUG: Writing the frame to disk.
275                         FILE *fp = fopen("inter.ppm", "wb");
276                         fprintf(fp, "P6\n%d %d\n255\n", 1280, 720);
277                         for (size_t y = 0; y < 720; ++y) {
278                                 const uint8_t *ptr = (uint8_t *)qf.resources.pbo_contents + (719 - y) * 1280 * 4;
279                                 for (size_t x = 0; x < 1280; ++x) {
280                                         putc(ptr[0], fp);
281                                         putc(ptr[1], fp);
282                                         putc(ptr[2], fp);
283                                         ptr += 4;
284                                 }
285                         }
286                         fclose(fp);
287                         // TODO: Release flow and output textures.
288
289                         // Put the frame resources back.
290                         unique_lock<mutex> lock(queue_lock);
291                         interpolate_resources.push_back(qf.resources);
292                 }
293         }
294 }
295
296 int VideoStream::write_packet2_thunk(void *opaque, uint8_t *buf, int buf_size, AVIODataMarkerType type, int64_t time)
297 {
298         VideoStream *video_stream = (VideoStream *)opaque;
299         return video_stream->write_packet2(buf, buf_size, type, time);
300 }
301
302 int VideoStream::write_packet2(uint8_t *buf, int buf_size, AVIODataMarkerType type, int64_t time)
303 {
304         if (type == AVIO_DATA_MARKER_SYNC_POINT || type == AVIO_DATA_MARKER_BOUNDARY_POINT) {
305                 seen_sync_markers = true;
306         } else if (type == AVIO_DATA_MARKER_UNKNOWN && !seen_sync_markers) {
307                 // We don't know if this is a keyframe or not (the muxer could
308                 // avoid marking it), so we just have to make the best of it.
309                 type = AVIO_DATA_MARKER_SYNC_POINT;
310         }
311
312         if (type == AVIO_DATA_MARKER_HEADER) {
313                 stream_mux_header.append((char *)buf, buf_size);
314                 global_httpd->set_header(stream_mux_header);
315         } else {
316                 global_httpd->add_data((char *)buf, buf_size, type == AVIO_DATA_MARKER_SYNC_POINT, time, AVRational{ AV_TIME_BASE, 1 });
317         }
318         return buf_size;
319 }
320