]> git.sesse.net Git - nageru/blob - futatabi/video_stream.cpp
2a0c7c02b2ecb3e0edc1f4bd839735aafb377ab3
[nageru] / futatabi / video_stream.cpp
1 #include "video_stream.h"
2
3 extern "C" {
4 #include <libavformat/avformat.h>
5 #include <libavformat/avio.h>
6 #include <libavutil/channel_layout.h>
7 }
8
9 #include "chroma_subsampler.h"
10 #include "exif_parser.h"
11 #include "flags.h"
12 #include "flow.h"
13 #include "jpeg_frame_view.h"
14 #include "movit/util.h"
15 #include "pbo_pool.h"
16 #include "player.h"
17 #include "shared/context.h"
18 #include "shared/httpd.h"
19 #include "shared/metrics.h"
20 #include "shared/shared_defs.h"
21 #include "shared/mux.h"
22 #include "util.h"
23 #include "ycbcr_converter.h"
24
25 #include <epoxy/glx.h>
26 #include <jpeglib.h>
27 #include <unistd.h>
28
29 using namespace movit;
30 using namespace std;
31 using namespace std::chrono;
32
33 namespace {
34
35 once_flag video_metrics_inited;
36 Summary metric_jpeg_encode_time_seconds;
37 Summary metric_fade_latency_seconds;
38 Summary metric_interpolation_latency_seconds;
39 Summary metric_fade_fence_wait_time_seconds;
40 Summary metric_interpolation_fence_wait_time_seconds;
41
42 void wait_for_upload(shared_ptr<Frame> &frame)
43 {
44         if (frame->uploaded_interpolation != nullptr) {
45                 glWaitSync(frame->uploaded_interpolation.get(), /*flags=*/0, GL_TIMEOUT_IGNORED);
46                 frame->uploaded_interpolation.reset();
47         }
48 }
49
50 }  // namespace
51
52 extern HTTPD *global_httpd;
53
54 struct VectorDestinationManager {
55         jpeg_destination_mgr pub;
56         string dest;
57
58         VectorDestinationManager()
59         {
60                 pub.init_destination = init_destination_thunk;
61                 pub.empty_output_buffer = empty_output_buffer_thunk;
62                 pub.term_destination = term_destination_thunk;
63         }
64
65         static void init_destination_thunk(j_compress_ptr ptr)
66         {
67                 ((VectorDestinationManager *)(ptr->dest))->init_destination();
68         }
69
70         inline void init_destination()
71         {
72                 make_room(0);
73         }
74
75         static boolean empty_output_buffer_thunk(j_compress_ptr ptr)
76         {
77                 return ((VectorDestinationManager *)(ptr->dest))->empty_output_buffer();
78         }
79
80         inline bool empty_output_buffer()
81         {
82                 make_room(dest.size());  // Should ignore pub.free_in_buffer!
83                 return true;
84         }
85
86         inline void make_room(size_t bytes_used)
87         {
88                 dest.resize(bytes_used + 4096);
89                 dest.resize(dest.capacity());
90                 pub.next_output_byte = (uint8_t *)dest.data() + bytes_used;
91                 pub.free_in_buffer = dest.size() - bytes_used;
92         }
93
94         static void term_destination_thunk(j_compress_ptr ptr)
95         {
96                 ((VectorDestinationManager *)(ptr->dest))->term_destination();
97         }
98
99         inline void term_destination()
100         {
101                 dest.resize(dest.size() - pub.free_in_buffer);
102         }
103 };
104 static_assert(std::is_standard_layout<VectorDestinationManager>::value, "");
105
106 string encode_jpeg(const uint8_t *y_data, const uint8_t *cb_data, const uint8_t *cr_data, unsigned width, unsigned height, const string exif_data)
107 {
108         steady_clock::time_point start = steady_clock::now();
109         VectorDestinationManager dest;
110
111         jpeg_compress_struct cinfo;
112         jpeg_error_mgr jerr;
113         cinfo.err = jpeg_std_error(&jerr);
114         jpeg_create_compress(&cinfo);
115
116         cinfo.dest = (jpeg_destination_mgr *)&dest;
117         cinfo.input_components = 3;
118         cinfo.in_color_space = JCS_RGB;
119         jpeg_set_defaults(&cinfo);
120         constexpr int quality = 90;
121         jpeg_set_quality(&cinfo, quality, /*force_baseline=*/false);
122
123         cinfo.image_width = width;
124         cinfo.image_height = height;
125         cinfo.raw_data_in = true;
126         jpeg_set_colorspace(&cinfo, JCS_YCbCr);
127         cinfo.comp_info[0].h_samp_factor = 2;
128         cinfo.comp_info[0].v_samp_factor = 1;
129         cinfo.comp_info[1].h_samp_factor = 1;
130         cinfo.comp_info[1].v_samp_factor = 1;
131         cinfo.comp_info[2].h_samp_factor = 1;
132         cinfo.comp_info[2].v_samp_factor = 1;
133         cinfo.CCIR601_sampling = true;  // Seems to be mostly ignored by libjpeg, though.
134         jpeg_start_compress(&cinfo, true);
135
136         // This comment marker is private to FFmpeg. It signals limited Y'CbCr range
137         // (and nothing else).
138         jpeg_write_marker(&cinfo, JPEG_COM, (const JOCTET *)"CS=ITU601", strlen("CS=ITU601"));
139
140         if (!exif_data.empty()) {
141                 jpeg_write_marker(&cinfo, JPEG_APP0 + 1, (const JOCTET *)exif_data.data(), exif_data.size());
142         }
143
144         JSAMPROW yptr[8], cbptr[8], crptr[8];
145         JSAMPARRAY data[3] = { yptr, cbptr, crptr };
146         for (unsigned y = 0; y < height; y += 8) {
147                 for (unsigned yy = 0; yy < 8; ++yy) {
148                         yptr[yy] = const_cast<JSAMPROW>(&y_data[(y + yy) * width]);
149                         cbptr[yy] = const_cast<JSAMPROW>(&cb_data[(y + yy) * width / 2]);
150                         crptr[yy] = const_cast<JSAMPROW>(&cr_data[(y + yy) * width / 2]);
151                 }
152
153                 jpeg_write_raw_data(&cinfo, data, /*num_lines=*/8);
154         }
155
156         jpeg_finish_compress(&cinfo);
157         jpeg_destroy_compress(&cinfo);
158
159         steady_clock::time_point stop = steady_clock::now();
160         metric_jpeg_encode_time_seconds.count_event(duration<double>(stop - start).count());
161
162         return move(dest.dest);
163 }
164
165 string encode_jpeg_from_pbo(void *contents, unsigned width, unsigned height, const string exif_data)
166 {
167         unsigned chroma_width = width / 2;
168
169         const uint8_t *y = (const uint8_t *)contents;
170         const uint8_t *cb = (const uint8_t *)contents + width * height;
171         const uint8_t *cr = (const uint8_t *)contents + width * height + chroma_width * height;
172         return encode_jpeg(y, cb, cr, width, height, move(exif_data));
173 }
174
175 VideoStream::VideoStream(AVFormatContext *file_avctx)
176         : avctx(file_avctx), output_fast_forward(file_avctx != nullptr)
177 {
178         call_once(video_metrics_inited, [] {
179                 vector<double> quantiles{ 0.01, 0.1, 0.25, 0.5, 0.75, 0.9, 0.99 };
180                 metric_jpeg_encode_time_seconds.init(quantiles, 60.0);
181                 global_metrics.add("jpeg_encode_time_seconds", &metric_jpeg_encode_time_seconds);
182                 metric_fade_fence_wait_time_seconds.init(quantiles, 60.0);
183                 global_metrics.add("fade_fence_wait_time_seconds", &metric_fade_fence_wait_time_seconds);
184                 metric_interpolation_fence_wait_time_seconds.init(quantiles, 60.0);
185                 global_metrics.add("interpolation_fence_wait_time_seconds", &metric_interpolation_fence_wait_time_seconds);
186                 metric_fade_latency_seconds.init(quantiles, 60.0);
187                 global_metrics.add("fade_latency_seconds", &metric_fade_latency_seconds);
188                 metric_interpolation_latency_seconds.init(quantiles, 60.0);
189                 global_metrics.add("interpolation_latency_seconds", &metric_interpolation_latency_seconds);
190         });
191
192         ycbcr_converter.reset(new YCbCrConverter(YCbCrConverter::OUTPUT_TO_DUAL_YCBCR, /*resource_pool=*/nullptr));
193         ycbcr_semiplanar_converter.reset(new YCbCrConverter(YCbCrConverter::OUTPUT_TO_SEMIPLANAR, /*resource_pool=*/nullptr));
194
195         GLuint input_tex[num_interpolate_slots], gray_tex[num_interpolate_slots];
196         GLuint fade_y_output_tex[num_interpolate_slots], fade_cbcr_output_tex[num_interpolate_slots];
197         GLuint cb_tex[num_interpolate_slots], cr_tex[num_interpolate_slots];
198
199         glCreateTextures(GL_TEXTURE_2D_ARRAY, num_interpolate_slots, input_tex);
200         glCreateTextures(GL_TEXTURE_2D_ARRAY, num_interpolate_slots, gray_tex);
201         glCreateTextures(GL_TEXTURE_2D, num_interpolate_slots, fade_y_output_tex);
202         glCreateTextures(GL_TEXTURE_2D, num_interpolate_slots, fade_cbcr_output_tex);
203         glCreateTextures(GL_TEXTURE_2D, num_interpolate_slots, cb_tex);
204         glCreateTextures(GL_TEXTURE_2D, num_interpolate_slots, cr_tex);
205         check_error();
206
207         size_t width = global_flags.width, height = global_flags.height;
208         int levels = find_num_levels(width, height);
209         for (size_t i = 0; i < num_interpolate_slots; ++i) {
210                 glTextureStorage3D(input_tex[i], levels, GL_RGBA8, width, height, 2);
211                 check_error();
212                 glTextureStorage3D(gray_tex[i], levels, GL_R8, width, height, 2);
213                 check_error();
214                 glTextureStorage2D(fade_y_output_tex[i], 1, GL_R8, width, height);
215                 check_error();
216                 glTextureStorage2D(fade_cbcr_output_tex[i], 1, GL_RG8, width, height);
217                 check_error();
218                 glTextureStorage2D(cb_tex[i], 1, GL_R8, width / 2, height);
219                 check_error();
220                 glTextureStorage2D(cr_tex[i], 1, GL_R8, width / 2, height);
221                 check_error();
222
223                 unique_ptr<InterpolatedFrameResources> resource(new InterpolatedFrameResources);
224                 resource->owner = this;
225                 resource->input_tex = input_tex[i];
226                 resource->gray_tex = gray_tex[i];
227                 resource->fade_y_output_tex = fade_y_output_tex[i];
228                 resource->fade_cbcr_output_tex = fade_cbcr_output_tex[i];
229                 resource->cb_tex = cb_tex[i];
230                 resource->cr_tex = cr_tex[i];
231                 glCreateFramebuffers(2, resource->input_fbos);
232                 check_error();
233                 glCreateFramebuffers(1, &resource->fade_fbo);
234                 check_error();
235
236                 glNamedFramebufferTextureLayer(resource->input_fbos[0], GL_COLOR_ATTACHMENT0, input_tex[i], 0, 0);
237                 check_error();
238                 glNamedFramebufferTextureLayer(resource->input_fbos[0], GL_COLOR_ATTACHMENT1, gray_tex[i], 0, 0);
239                 check_error();
240                 glNamedFramebufferTextureLayer(resource->input_fbos[1], GL_COLOR_ATTACHMENT0, input_tex[i], 0, 1);
241                 check_error();
242                 glNamedFramebufferTextureLayer(resource->input_fbos[1], GL_COLOR_ATTACHMENT1, gray_tex[i], 0, 1);
243                 check_error();
244                 glNamedFramebufferTexture(resource->fade_fbo, GL_COLOR_ATTACHMENT0, fade_y_output_tex[i], 0);
245                 check_error();
246                 glNamedFramebufferTexture(resource->fade_fbo, GL_COLOR_ATTACHMENT1, fade_cbcr_output_tex[i], 0);
247                 check_error();
248
249                 GLuint bufs[] = { GL_COLOR_ATTACHMENT0, GL_COLOR_ATTACHMENT1 };
250                 glNamedFramebufferDrawBuffers(resource->input_fbos[0], 2, bufs);
251                 check_error();
252                 glNamedFramebufferDrawBuffers(resource->input_fbos[1], 2, bufs);
253                 check_error();
254                 glNamedFramebufferDrawBuffers(resource->fade_fbo, 2, bufs);
255                 check_error();
256
257                 glCreateBuffers(1, &resource->pbo);
258                 check_error();
259                 glNamedBufferStorage(resource->pbo, width * height * 4, nullptr, GL_MAP_READ_BIT | GL_MAP_PERSISTENT_BIT);
260                 check_error();
261                 resource->pbo_contents = glMapNamedBufferRange(resource->pbo, 0, width * height * 4, GL_MAP_READ_BIT | GL_MAP_PERSISTENT_BIT);
262                 interpolate_resources.push_back(move(resource));
263         }
264
265         check_error();
266
267         OperatingPoint op;
268         if (global_flags.interpolation_quality == 0 ||
269             global_flags.interpolation_quality == 1) {
270                 op = operating_point1;
271         } else if (global_flags.interpolation_quality == 2) {
272                 op = operating_point2;
273         } else if (global_flags.interpolation_quality == 3) {
274                 op = operating_point3;
275         } else if (global_flags.interpolation_quality == 4) {
276                 op = operating_point4;
277         } else {
278                 // Quality 0 will be changed to 1 in flags.cpp.
279                 assert(false);
280         }
281
282         compute_flow.reset(new DISComputeFlow(width, height, op));
283         interpolate.reset(new Interpolate(op, /*split_ycbcr_output=*/true));
284         interpolate_no_split.reset(new Interpolate(op, /*split_ycbcr_output=*/false));
285         chroma_subsampler.reset(new ChromaSubsampler);
286         check_error();
287
288         // The “last frame” is initially black.
289         unique_ptr<uint8_t[]> y(new uint8_t[global_flags.width * global_flags.height]);
290         unique_ptr<uint8_t[]> cb_or_cr(new uint8_t[(global_flags.width / 2) * global_flags.height]);
291         memset(y.get(), 16, global_flags.width * global_flags.height);
292         memset(cb_or_cr.get(), 128, (global_flags.width / 2) * global_flags.height);
293         last_frame = encode_jpeg(y.get(), cb_or_cr.get(), cb_or_cr.get(), global_flags.width, global_flags.height, /*exif_data=*/"");
294
295         if (file_avctx != nullptr) {
296                 with_subtitles = Mux::WITHOUT_SUBTITLES;
297         } else {
298                 with_subtitles = Mux::WITH_SUBTITLES;
299         }
300 }
301
302 VideoStream::~VideoStream()
303 {
304         if (last_flow_tex != 0) {
305                 compute_flow->release_texture(last_flow_tex);
306         }
307
308         for (const unique_ptr<InterpolatedFrameResources> &resource : interpolate_resources) {
309                 glUnmapNamedBuffer(resource->pbo);
310                 check_error();
311                 glDeleteBuffers(1, &resource->pbo);
312                 check_error();
313                 glDeleteFramebuffers(2, resource->input_fbos);
314                 check_error();
315                 glDeleteFramebuffers(1, &resource->fade_fbo);
316                 check_error();
317                 glDeleteTextures(1, &resource->input_tex);
318                 check_error();
319                 glDeleteTextures(1, &resource->gray_tex);
320                 check_error();
321                 glDeleteTextures(1, &resource->fade_y_output_tex);
322                 check_error();
323                 glDeleteTextures(1, &resource->fade_cbcr_output_tex);
324                 check_error();
325                 glDeleteTextures(1, &resource->cb_tex);
326                 check_error();
327                 glDeleteTextures(1, &resource->cr_tex);
328                 check_error();
329         }
330         assert(interpolate_resources.size() == num_interpolate_slots);
331 }
332
333 void VideoStream::start()
334 {
335         if (avctx == nullptr) {
336                 avctx = avformat_alloc_context();
337
338                 // We use Matroska, because it's pretty much the only mux where FFmpeg
339                 // allows writing chroma location to override JFIF's default center placement.
340                 // (Note that at the time of writing, however, FFmpeg does not correctly
341                 // _read_ this information!)
342                 avctx->oformat = av_guess_format("matroska", nullptr, nullptr);
343
344                 uint8_t *buf = (uint8_t *)av_malloc(MUX_BUFFER_SIZE);
345                 avctx->pb = avio_alloc_context(buf, MUX_BUFFER_SIZE, 1, this, nullptr, nullptr, nullptr);
346                 avctx->pb->write_data_type = &VideoStream::write_packet2_thunk;
347                 avctx->pb->ignore_boundary_point = 1;
348
349                 avctx->flags = AVFMT_FLAG_CUSTOM_IO;
350         }
351
352         AVCodecParameters *audio_codecpar = avcodec_parameters_alloc();
353
354         audio_codecpar->codec_type = AVMEDIA_TYPE_AUDIO;
355         audio_codecpar->codec_id = AV_CODEC_ID_PCM_S32LE;
356         audio_codecpar->ch_layout.order = AV_CHANNEL_ORDER_NATIVE;
357         audio_codecpar->ch_layout.nb_channels = 2;
358         audio_codecpar->ch_layout.u.mask = AV_CH_LAYOUT_STEREO;
359         audio_codecpar->sample_rate = OUTPUT_FREQUENCY;
360
361         size_t width = global_flags.width, height = global_flags.height;  // Doesn't matter for MJPEG.
362         mux.reset(new Mux(avctx, width, height, Mux::CODEC_MJPEG, /*video_extradata=*/"", audio_codecpar,
363                           AVCOL_SPC_BT709, COARSE_TIMEBASE, /*write_callback=*/nullptr, Mux::WRITE_FOREGROUND, {}, with_subtitles));
364
365         avcodec_parameters_free(&audio_codecpar);
366         encode_thread = thread(&VideoStream::encode_thread_func, this);
367 }
368
369 void VideoStream::stop()
370 {
371         should_quit = true;
372         queue_changed.notify_all();
373         clear_queue();
374         encode_thread.join();
375 }
376
377 void VideoStream::clear_queue()
378 {
379         deque<QueuedFrame> q;
380
381         {
382                 lock_guard<mutex> lock(queue_lock);
383                 q = move(frame_queue);
384         }
385
386         // These are not RAII-ed, unfortunately, so we'll need to clean them ourselves.
387         // Note that release_texture() is thread-safe.
388         for (const QueuedFrame &qf : q) {
389                 if (qf.type == QueuedFrame::INTERPOLATED ||
390                     qf.type == QueuedFrame::FADED_INTERPOLATED) {
391                         if (qf.flow_tex != 0) {
392                                 compute_flow->release_texture(qf.flow_tex);
393                         }
394                 }
395                 if (qf.type == QueuedFrame::INTERPOLATED) {
396                         interpolate->release_texture(qf.output_tex);
397                         interpolate->release_texture(qf.cbcr_tex);
398                 }
399         }
400
401         // Destroy q outside the mutex, as that would be a double-lock.
402 }
403
404 void VideoStream::schedule_original_frame(steady_clock::time_point local_pts,
405                                           int64_t output_pts, function<void()> &&display_func,
406                                           QueueSpotHolder &&queue_spot_holder,
407                                           FrameOnDisk frame, const string &subtitle, bool include_audio)
408 {
409         fprintf(stderr, "output_pts=%" PRId64 "  original      input_pts=%" PRId64 "\n", output_pts, frame.pts);
410
411         QueuedFrame qf;
412         qf.local_pts = local_pts;
413         qf.type = QueuedFrame::ORIGINAL;
414         qf.output_pts = output_pts;
415         qf.display_func = move(display_func);
416         qf.queue_spot_holder = move(queue_spot_holder);
417         qf.subtitle = subtitle;
418         FrameReader::Frame read_frame = frame_reader.read_frame(frame, /*read_video=*/true, include_audio);
419         qf.encoded_jpeg.reset(new string(move(read_frame.video)));
420         qf.audio = move(read_frame.audio);
421
422         lock_guard<mutex> lock(queue_lock);
423         frame_queue.push_back(move(qf));
424         queue_changed.notify_all();
425 }
426
427 void VideoStream::schedule_faded_frame(steady_clock::time_point local_pts, int64_t output_pts,
428                                        function<void()> &&display_func,
429                                        QueueSpotHolder &&queue_spot_holder,
430                                        FrameOnDisk frame1_spec, FrameOnDisk frame2_spec,
431                                        float fade_alpha, const string &subtitle)
432 {
433         fprintf(stderr, "output_pts=%" PRId64 "  faded         input_pts=%" PRId64 ",%" PRId64 "  fade_alpha=%.2f\n", output_pts, frame1_spec.pts, frame2_spec.pts, fade_alpha);
434
435         // Get the temporary OpenGL resources we need for doing the fade.
436         // (We share these with interpolated frames, which is slightly
437         // overkill, but there's no need to waste resources on keeping
438         // separate pools around.)
439         BorrowedInterpolatedFrameResources resources;
440         {
441                 lock_guard<mutex> lock(queue_lock);
442                 if (interpolate_resources.empty()) {
443                         fprintf(stderr, "WARNING: Too many interpolated frames already in transit; dropping one.\n");
444                         return;
445                 }
446                 resources = BorrowedInterpolatedFrameResources(interpolate_resources.front().release());
447                 interpolate_resources.pop_front();
448         }
449
450         bool did_decode;
451
452         shared_ptr<Frame> frame1 = decode_jpeg_with_cache(frame1_spec, DECODE_IF_NOT_IN_CACHE, &frame_reader, &did_decode);
453         shared_ptr<Frame> frame2 = decode_jpeg_with_cache(frame2_spec, DECODE_IF_NOT_IN_CACHE, &frame_reader, &did_decode);
454         wait_for_upload(frame1);
455         wait_for_upload(frame2);
456
457         ycbcr_semiplanar_converter->prepare_chain_for_fade(frame1, frame2, fade_alpha)->render_to_fbo(resources->fade_fbo, global_flags.width, global_flags.height);
458
459         QueuedFrame qf;
460         qf.local_pts = local_pts;
461         qf.type = QueuedFrame::FADED;
462         qf.output_pts = output_pts;
463         qf.frame1 = frame1_spec;
464         qf.display_func = move(display_func);
465         qf.queue_spot_holder = move(queue_spot_holder);
466         qf.subtitle = subtitle;
467
468         qf.secondary_frame = frame2_spec;
469
470         // Subsample and split Cb/Cr.
471         chroma_subsampler->subsample_chroma(resources->fade_cbcr_output_tex, global_flags.width, global_flags.height, resources->cb_tex, resources->cr_tex);
472
473         // Read it down (asynchronously) to the CPU.
474         glPixelStorei(GL_PACK_ROW_LENGTH, 0);
475         glBindBuffer(GL_PIXEL_PACK_BUFFER, resources->pbo);
476         check_error();
477         glGetTextureImage(resources->fade_y_output_tex, 0, GL_RED, GL_UNSIGNED_BYTE, global_flags.width * global_flags.height * 4, BUFFER_OFFSET(0));
478         check_error();
479         glGetTextureImage(resources->cb_tex, 0, GL_RED, GL_UNSIGNED_BYTE, global_flags.width * global_flags.height * 3, BUFFER_OFFSET(global_flags.width * global_flags.height));
480         check_error();
481         glGetTextureImage(resources->cr_tex, 0, GL_RED, GL_UNSIGNED_BYTE, global_flags.width * global_flags.height * 3 - (global_flags.width / 2) * global_flags.height, BUFFER_OFFSET(global_flags.width * global_flags.height + (global_flags.width / 2) * global_flags.height));
482         check_error();
483         glBindBuffer(GL_PIXEL_PACK_BUFFER, 0);
484
485         // Set a fence we can wait for to make sure the CPU sees the read.
486         glMemoryBarrier(GL_CLIENT_MAPPED_BUFFER_BARRIER_BIT);
487         check_error();
488         qf.fence_created = steady_clock::now();
489         qf.fence = RefCountedGLsync(GL_SYNC_GPU_COMMANDS_COMPLETE, /*flags=*/0);
490         check_error();
491         qf.resources = move(resources);
492         qf.local_pts = local_pts;
493
494         lock_guard<mutex> lock(queue_lock);
495         frame_queue.push_back(move(qf));
496         queue_changed.notify_all();
497 }
498
499 void VideoStream::schedule_interpolated_frame(steady_clock::time_point local_pts,
500                                               int64_t output_pts, function<void(shared_ptr<Frame>)> &&display_func,
501                                               QueueSpotHolder &&queue_spot_holder,
502                                               FrameOnDisk frame1, FrameOnDisk frame2,
503                                               float alpha, FrameOnDisk secondary_frame, float fade_alpha, const string &subtitle,
504                                               bool play_audio)
505 {
506         if (secondary_frame.pts != -1) {
507                 fprintf(stderr, "output_pts=%" PRId64 "  interpolated  input_pts1=%" PRId64 " input_pts2=%" PRId64 " alpha=%.3f  secondary_pts=%" PRId64 "  fade_alpha=%.2f\n", output_pts, frame1.pts, frame2.pts, alpha, secondary_frame.pts, fade_alpha);
508         } else {
509                 fprintf(stderr, "output_pts=%" PRId64 "  interpolated  input_pts1=%" PRId64 " input_pts2=%" PRId64 " alpha=%.3f\n", output_pts, frame1.pts, frame2.pts, alpha);
510         }
511
512         // Get the temporary OpenGL resources we need for doing the interpolation.
513         BorrowedInterpolatedFrameResources resources;
514         {
515                 lock_guard<mutex> lock(queue_lock);
516                 if (interpolate_resources.empty()) {
517                         fprintf(stderr, "WARNING: Too many interpolated frames already in transit; dropping one.\n");
518                         return;
519                 }
520                 resources = BorrowedInterpolatedFrameResources(interpolate_resources.front().release());
521                 interpolate_resources.pop_front();
522         }
523
524         QueuedFrame qf;
525         qf.type = (secondary_frame.pts == -1) ? QueuedFrame::INTERPOLATED : QueuedFrame::FADED_INTERPOLATED;
526         qf.output_pts = output_pts;
527         qf.display_decoded_func = move(display_func);
528         qf.queue_spot_holder = move(queue_spot_holder);
529         qf.local_pts = local_pts;
530         qf.subtitle = subtitle;
531
532         if (play_audio) {
533                 qf.audio = frame_reader.read_frame(frame1, /*read_video=*/false, /*read_audio=*/true).audio;
534         }
535
536         check_error();
537
538         // Convert frame0 and frame1 to OpenGL textures.
539         for (size_t frame_no = 0; frame_no < 2; ++frame_no) {
540                 FrameOnDisk frame_spec = frame_no == 1 ? frame2 : frame1;
541                 bool did_decode;
542                 shared_ptr<Frame> frame = decode_jpeg_with_cache(frame_spec, DECODE_IF_NOT_IN_CACHE, &frame_reader, &did_decode);
543                 wait_for_upload(frame);
544                 ycbcr_converter->prepare_chain_for_conversion(frame)->render_to_fbo(resources->input_fbos[frame_no], global_flags.width, global_flags.height);
545                 if (frame_no == 1) {
546                         qf.exif_data = frame->exif_data;  // Use the white point from the last frame.
547                 }
548         }
549
550         glGenerateTextureMipmap(resources->input_tex);
551         check_error();
552         glGenerateTextureMipmap(resources->gray_tex);
553         check_error();
554
555         GLuint flow_tex;
556         if (last_flow_tex != 0 && frame1 == last_frame1 && frame2 == last_frame2) {
557                 // Reuse the flow from previous computation. This frequently happens
558                 // if we slow down by more than 2x, so that there are multiple interpolated
559                 // frames between each original.
560                 flow_tex = last_flow_tex;
561                 qf.flow_tex = 0;
562         } else {
563                 // Cache miss, so release last_flow_tex.
564                 qf.flow_tex = last_flow_tex;
565
566                 // Compute the flow.
567                 flow_tex = compute_flow->exec(resources->gray_tex, DISComputeFlow::FORWARD_AND_BACKWARD, DISComputeFlow::DO_NOT_RESIZE_FLOW);
568                 check_error();
569
570                 // Store the flow texture for possible reuse next frame.
571                 last_flow_tex = flow_tex;
572                 last_frame1 = frame1;
573                 last_frame2 = frame2;
574         }
575
576         if (secondary_frame.pts != -1) {
577                 // Fade. First kick off the interpolation.
578                 tie(qf.output_tex, ignore) = interpolate_no_split->exec(resources->input_tex, resources->gray_tex, flow_tex, global_flags.width, global_flags.height, alpha);
579                 check_error();
580
581                 // Now decode the image we are fading against.
582                 bool did_decode;
583                 shared_ptr<Frame> frame2 = decode_jpeg_with_cache(secondary_frame, DECODE_IF_NOT_IN_CACHE, &frame_reader, &did_decode);
584                 wait_for_upload(frame2);
585
586                 // Then fade against it, putting it into the fade Y' and CbCr textures.
587                 RGBTriplet neutral_color = get_neutral_color(qf.exif_data);
588                 ycbcr_semiplanar_converter->prepare_chain_for_fade_from_texture(qf.output_tex, neutral_color, global_flags.width, global_flags.height, frame2, fade_alpha)->render_to_fbo(resources->fade_fbo, global_flags.width, global_flags.height);
589
590                 // Subsample and split Cb/Cr.
591                 chroma_subsampler->subsample_chroma(resources->fade_cbcr_output_tex, global_flags.width, global_flags.height, resources->cb_tex, resources->cr_tex);
592
593                 interpolate_no_split->release_texture(qf.output_tex);
594
595                 // We already applied the white balance, so don't have the client redo it.
596                 qf.exif_data.clear();
597         } else {
598                 tie(qf.output_tex, qf.cbcr_tex) = interpolate->exec(resources->input_tex, resources->gray_tex, flow_tex, global_flags.width, global_flags.height, alpha);
599                 check_error();
600
601                 // Subsample and split Cb/Cr.
602                 chroma_subsampler->subsample_chroma(qf.cbcr_tex, global_flags.width, global_flags.height, resources->cb_tex, resources->cr_tex);
603         }
604
605         // We could have released qf.flow_tex here, but to make sure we don't cause a stall
606         // when trying to reuse it for the next frame, we can just as well hold on to it
607         // and release it only when the readback is done.
608         //
609         // TODO: This is maybe less relevant now that qf.flow_tex contains the texture we used
610         // _last_ frame, not this one.
611
612         // Read it down (asynchronously) to the CPU.
613         glPixelStorei(GL_PACK_ROW_LENGTH, 0);
614         glBindBuffer(GL_PIXEL_PACK_BUFFER, resources->pbo);
615         check_error();
616         if (secondary_frame.pts != -1) {
617                 glGetTextureImage(resources->fade_y_output_tex, 0, GL_RED, GL_UNSIGNED_BYTE, global_flags.width * global_flags.height * 4, BUFFER_OFFSET(0));
618         } else {
619                 glGetTextureImage(qf.output_tex, 0, GL_RED, GL_UNSIGNED_BYTE, global_flags.width * global_flags.height * 4, BUFFER_OFFSET(0));
620         }
621         check_error();
622         glGetTextureImage(resources->cb_tex, 0, GL_RED, GL_UNSIGNED_BYTE, global_flags.width * global_flags.height * 3, BUFFER_OFFSET(global_flags.width * global_flags.height));
623         check_error();
624         glGetTextureImage(resources->cr_tex, 0, GL_RED, GL_UNSIGNED_BYTE, global_flags.width * global_flags.height * 3 - (global_flags.width / 2) * global_flags.height, BUFFER_OFFSET(global_flags.width * global_flags.height + (global_flags.width / 2) * global_flags.height));
625         check_error();
626         glBindBuffer(GL_PIXEL_PACK_BUFFER, 0);
627
628         // Set a fence we can wait for to make sure the CPU sees the read.
629         glMemoryBarrier(GL_CLIENT_MAPPED_BUFFER_BARRIER_BIT);
630         check_error();
631         qf.fence_created = steady_clock::now();
632         qf.fence = RefCountedGLsync(GL_SYNC_GPU_COMMANDS_COMPLETE, /*flags=*/0);
633         check_error();
634         qf.resources = move(resources);
635
636         lock_guard<mutex> lock(queue_lock);
637         frame_queue.push_back(move(qf));
638         queue_changed.notify_all();
639 }
640
641 void VideoStream::schedule_refresh_frame(steady_clock::time_point local_pts,
642                                          int64_t output_pts, function<void()> &&display_func,
643                                          QueueSpotHolder &&queue_spot_holder, const string &subtitle)
644 {
645         QueuedFrame qf;
646         qf.type = QueuedFrame::REFRESH;
647         qf.output_pts = output_pts;
648         qf.display_func = move(display_func);
649         qf.queue_spot_holder = move(queue_spot_holder);
650         qf.subtitle = subtitle;
651
652         lock_guard<mutex> lock(queue_lock);
653         frame_queue.push_back(move(qf));
654         queue_changed.notify_all();
655 }
656
657 void VideoStream::schedule_silence(steady_clock::time_point local_pts, int64_t output_pts,
658                                    int64_t length_pts, QueueSpotHolder &&queue_spot_holder)
659 {
660         QueuedFrame qf;
661         qf.type = QueuedFrame::SILENCE;
662         qf.output_pts = output_pts;
663         qf.queue_spot_holder = move(queue_spot_holder);
664         qf.silence_length_pts = length_pts;
665
666         lock_guard<mutex> lock(queue_lock);
667         frame_queue.push_back(move(qf));
668         queue_changed.notify_all();
669 }
670
671 namespace {
672
673 RefCountedTexture clone_r8_texture(GLuint src_tex, unsigned width, unsigned height)
674 {
675         GLuint tex;
676         glCreateTextures(GL_TEXTURE_2D, 1, &tex);
677         check_error();
678         glTextureStorage2D(tex, 1, GL_R8, width, height);
679         check_error();
680         glCopyImageSubData(src_tex, GL_TEXTURE_2D, 0, 0, 0, 0,
681                            tex, GL_TEXTURE_2D, 0, 0, 0, 0,
682                            width, height, 1);
683         check_error();
684         glTextureParameteri(tex, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
685         check_error();
686         glTextureParameteri(tex, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
687         check_error();
688         glTextureParameteri(tex, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
689         check_error();
690         glTextureParameteri(tex, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
691         check_error();
692
693         return RefCountedTexture(new GLuint(tex), TextureDeleter());
694 }
695
696 }  // namespace
697
698 void VideoStream::encode_thread_func()
699 {
700         pthread_setname_np(pthread_self(), "VideoStream");
701         QSurface *surface = create_surface();
702         QOpenGLContext *context = create_context(surface);
703         bool ok = make_current(context, surface);
704         if (!ok) {
705                 fprintf(stderr, "Video stream couldn't get an OpenGL context\n");
706                 abort();
707         }
708
709         init_pbo_pool();
710
711         while (!should_quit) {
712                 QueuedFrame qf;
713                 {
714                         unique_lock<mutex> lock(queue_lock);
715
716                         // Wait until we have a frame to play.
717                         queue_changed.wait(lock, [this] {
718                                 return !frame_queue.empty() || should_quit;
719                         });
720                         if (should_quit) {
721                                 break;
722                         }
723                         steady_clock::time_point frame_start = frame_queue.front().local_pts;
724
725                         // Now sleep until the frame is supposed to start (the usual case),
726                         // _or_ clear_queue() happened.
727                         bool aborted;
728                         if (output_fast_forward) {
729                                 aborted = frame_queue.empty() || frame_queue.front().local_pts != frame_start;
730                         } else {
731                                 aborted = queue_changed.wait_until(lock, frame_start, [this, frame_start] {
732                                         return frame_queue.empty() || frame_queue.front().local_pts != frame_start;
733                                 });
734                         }
735                         if (aborted) {
736                                 // clear_queue() happened, so don't play this frame after all.
737                                 continue;
738                         }
739                         qf = move(frame_queue.front());
740                         frame_queue.pop_front();
741                 }
742
743                 // Hack: We mux the subtitle packet one time unit before the actual frame,
744                 // so that Nageru is sure to get it first.
745                 if (!qf.subtitle.empty() && with_subtitles == Mux::WITH_SUBTITLES) {
746                         AVPacket pkt;
747                         av_init_packet(&pkt);
748                         pkt.stream_index = mux->get_subtitle_stream_idx();
749                         assert(pkt.stream_index != -1);
750                         pkt.data = (uint8_t *)qf.subtitle.data();
751                         pkt.size = qf.subtitle.size();
752                         pkt.flags = 0;
753                         pkt.duration = lrint(TIMEBASE / global_flags.output_framerate);  // Doesn't really matter for Nageru.
754                         mux->add_packet(pkt, qf.output_pts - 1, qf.output_pts - 1);
755                 }
756
757                 if (qf.type == QueuedFrame::ORIGINAL) {
758                         // Send the JPEG frame on, unchanged.
759                         string jpeg = move(*qf.encoded_jpeg);
760                         AVPacket pkt;
761                         av_init_packet(&pkt);
762                         pkt.stream_index = 0;
763                         pkt.data = (uint8_t *)jpeg.data();
764                         pkt.size = jpeg.size();
765                         pkt.flags = AV_PKT_FLAG_KEY;
766                         mux->add_packet(pkt, qf.output_pts, qf.output_pts);
767                         last_frame = move(jpeg);
768
769                         add_audio_or_silence(qf);
770                 } else if (qf.type == QueuedFrame::FADED) {
771                         steady_clock::time_point start = steady_clock::now();
772                         glClientWaitSync(qf.fence.get(), /*flags=*/0, GL_TIMEOUT_IGNORED);
773                         steady_clock::time_point stop = steady_clock::now();
774                         metric_fade_fence_wait_time_seconds.count_event(duration<double>(stop - start).count());
775                         metric_fade_latency_seconds.count_event(duration<double>(stop - qf.fence_created).count());
776
777                         // Now JPEG encode it, and send it on to the stream.
778                         string jpeg = encode_jpeg_from_pbo(qf.resources->pbo_contents, global_flags.width, global_flags.height, /*exif_data=*/"");
779
780                         AVPacket pkt;
781                         av_init_packet(&pkt);
782                         pkt.stream_index = 0;
783                         pkt.data = (uint8_t *)jpeg.data();
784                         pkt.size = jpeg.size();
785                         pkt.flags = AV_PKT_FLAG_KEY;
786                         mux->add_packet(pkt, qf.output_pts, qf.output_pts);
787                         last_frame = move(jpeg);
788
789                         add_audio_or_silence(qf);
790                 } else if (qf.type == QueuedFrame::INTERPOLATED || qf.type == QueuedFrame::FADED_INTERPOLATED) {
791                         steady_clock::time_point start = steady_clock::now();
792                         glClientWaitSync(qf.fence.get(), /*flags=*/0, GL_TIMEOUT_IGNORED);
793                         steady_clock::time_point stop = steady_clock::now();
794                         metric_interpolation_fence_wait_time_seconds.count_event(duration<double>(stop - start).count());
795                         metric_interpolation_latency_seconds.count_event(duration<double>(stop - qf.fence_created).count());
796
797                         // Send it on to display.
798                         if (qf.display_decoded_func != nullptr) {
799                                 shared_ptr<Frame> frame(new Frame);
800                                 if (qf.type == QueuedFrame::FADED_INTERPOLATED) {
801                                         frame->y = clone_r8_texture(qf.resources->fade_y_output_tex, global_flags.width, global_flags.height);
802                                 } else {
803                                         frame->y = clone_r8_texture(qf.output_tex, global_flags.width, global_flags.height);
804                                 }
805                                 frame->cb = clone_r8_texture(qf.resources->cb_tex, global_flags.width / 2, global_flags.height);
806                                 frame->cr = clone_r8_texture(qf.resources->cr_tex, global_flags.width / 2, global_flags.height);
807                                 frame->width = global_flags.width;
808                                 frame->height = global_flags.height;
809                                 frame->chroma_subsampling_x = 2;
810                                 frame->chroma_subsampling_y = 1;
811                                 frame->uploaded_ui_thread = RefCountedGLsync(GL_SYNC_GPU_COMMANDS_COMPLETE, /*flags=*/0);
812                                 qf.display_decoded_func(move(frame));
813                         }
814
815                         // Now JPEG encode it, and send it on to the stream.
816                         string jpeg = encode_jpeg_from_pbo(qf.resources->pbo_contents, global_flags.width, global_flags.height, move(qf.exif_data));
817                         if (qf.flow_tex != 0) {
818                                 compute_flow->release_texture(qf.flow_tex);
819                         }
820                         if (qf.type != QueuedFrame::FADED_INTERPOLATED) {
821                                 interpolate->release_texture(qf.output_tex);
822                                 interpolate->release_texture(qf.cbcr_tex);
823                         }
824
825                         AVPacket pkt;
826                         av_init_packet(&pkt);
827                         pkt.stream_index = 0;
828                         pkt.data = (uint8_t *)jpeg.data();
829                         pkt.size = jpeg.size();
830                         pkt.flags = AV_PKT_FLAG_KEY;
831                         mux->add_packet(pkt, qf.output_pts, qf.output_pts);
832                         last_frame = move(jpeg);
833
834                         add_audio_or_silence(qf);
835                 } else if (qf.type == QueuedFrame::REFRESH) {
836                         AVPacket pkt;
837                         av_init_packet(&pkt);
838                         pkt.stream_index = 0;
839                         pkt.data = (uint8_t *)last_frame.data();
840                         pkt.size = last_frame.size();
841                         pkt.flags = AV_PKT_FLAG_KEY;
842                         mux->add_packet(pkt, qf.output_pts, qf.output_pts);
843
844                         add_audio_or_silence(qf);  // Definitely silence.
845                 } else if (qf.type == QueuedFrame::SILENCE) {
846                         add_silence(qf.output_pts, qf.silence_length_pts);
847                 } else {
848                         assert(false);
849                 }
850                 if (qf.display_func != nullptr) {
851                         qf.display_func();
852                 }
853         }
854 }
855
856 int VideoStream::write_packet2_thunk(void *opaque, uint8_t *buf, int buf_size, AVIODataMarkerType type, int64_t time)
857 {
858         VideoStream *video_stream = (VideoStream *)opaque;
859         return video_stream->write_packet2(buf, buf_size, type, time);
860 }
861
862 int VideoStream::write_packet2(uint8_t *buf, int buf_size, AVIODataMarkerType type, int64_t time)
863 {
864         if (type == AVIO_DATA_MARKER_SYNC_POINT || type == AVIO_DATA_MARKER_BOUNDARY_POINT) {
865                 seen_sync_markers = true;
866         } else if (type == AVIO_DATA_MARKER_UNKNOWN && !seen_sync_markers) {
867                 // We don't know if this is a keyframe or not (the muxer could
868                 // avoid marking it), so we just have to make the best of it.
869                 type = AVIO_DATA_MARKER_SYNC_POINT;
870         }
871
872         HTTPD::StreamID stream_id{ HTTPD::MAIN_STREAM, 0 };
873         if (type == AVIO_DATA_MARKER_HEADER) {
874                 stream_mux_header.append((char *)buf, buf_size);
875                 global_httpd->set_header(stream_id, stream_mux_header);
876         } else {
877                 global_httpd->add_data(stream_id, (char *)buf, buf_size, type == AVIO_DATA_MARKER_SYNC_POINT, time, AVRational{ AV_TIME_BASE, 1 });
878         }
879         return buf_size;
880 }
881
882 void VideoStream::add_silence(int64_t pts, int64_t length_pts)
883 {
884         // At 59.94, this will never quite add up (even discounting refresh frames,
885         // which have unpredictable length), but hopefully, the player in the other
886         // end should be able to stretch silence easily enough.
887         long num_samples = lrint(length_pts * double(OUTPUT_FREQUENCY) / double(TIMEBASE)) * 2;
888         uint8_t *zero = (uint8_t *)calloc(num_samples, sizeof(int32_t));
889
890         AVPacket pkt;
891         av_init_packet(&pkt);
892         pkt.stream_index = 1;
893         pkt.data = zero;
894         pkt.size = num_samples * sizeof(int32_t);
895         pkt.flags = AV_PKT_FLAG_KEY;
896         mux->add_packet(pkt, pts, pts);
897
898         free(zero);
899 }
900
901 void VideoStream::add_audio_or_silence(const QueuedFrame &qf)
902 {
903         if (qf.audio.empty()) {
904                 int64_t frame_length = lrint(double(TIMEBASE) / global_flags.output_framerate);
905                 add_silence(qf.output_pts, frame_length);
906         } else {
907                 AVPacket pkt;
908                 av_init_packet(&pkt);
909                 pkt.stream_index = 1;
910                 pkt.data = (uint8_t *)qf.audio.data();
911                 pkt.size = qf.audio.size();
912                 pkt.flags = AV_PKT_FLAG_KEY;
913                 mux->add_packet(pkt, qf.output_pts, qf.output_pts);
914         }
915 }