]> git.sesse.net Git - nageru/blob - futatabi/video_stream.cpp
Add metrics for how many frames we are decoding, but did not have the time to display.
[nageru] / futatabi / video_stream.cpp
1 #include "video_stream.h"
2
3 extern "C" {
4 #include <libavformat/avformat.h>
5 #include <libavformat/avio.h>
6 }
7
8 #include "chroma_subsampler.h"
9 #include "exif_parser.h"
10 #include "flags.h"
11 #include "flow.h"
12 #include "jpeg_frame_view.h"
13 #include "movit/util.h"
14 #include "player.h"
15 #include "shared/context.h"
16 #include "shared/httpd.h"
17 #include "shared/metrics.h"
18 #include "shared/shared_defs.h"
19 #include "shared/mux.h"
20 #include "util.h"
21 #include "ycbcr_converter.h"
22
23 #include <epoxy/glx.h>
24 #include <jpeglib.h>
25 #include <unistd.h>
26
27 using namespace movit;
28 using namespace std;
29 using namespace std::chrono;
30
31 namespace {
32
33 once_flag video_metrics_inited;
34 Summary metric_jpeg_encode_time_seconds;
35 Summary metric_fade_latency_seconds;
36 Summary metric_interpolation_latency_seconds;
37 Summary metric_fade_fence_wait_time_seconds;
38 Summary metric_interpolation_fence_wait_time_seconds;
39
40 }  // namespace
41
42 extern HTTPD *global_httpd;
43
44 struct VectorDestinationManager {
45         jpeg_destination_mgr pub;
46         string dest;
47
48         VectorDestinationManager()
49         {
50                 pub.init_destination = init_destination_thunk;
51                 pub.empty_output_buffer = empty_output_buffer_thunk;
52                 pub.term_destination = term_destination_thunk;
53         }
54
55         static void init_destination_thunk(j_compress_ptr ptr)
56         {
57                 ((VectorDestinationManager *)(ptr->dest))->init_destination();
58         }
59
60         inline void init_destination()
61         {
62                 make_room(0);
63         }
64
65         static boolean empty_output_buffer_thunk(j_compress_ptr ptr)
66         {
67                 return ((VectorDestinationManager *)(ptr->dest))->empty_output_buffer();
68         }
69
70         inline bool empty_output_buffer()
71         {
72                 make_room(dest.size());  // Should ignore pub.free_in_buffer!
73                 return true;
74         }
75
76         inline void make_room(size_t bytes_used)
77         {
78                 dest.resize(bytes_used + 4096);
79                 dest.resize(dest.capacity());
80                 pub.next_output_byte = (uint8_t *)dest.data() + bytes_used;
81                 pub.free_in_buffer = dest.size() - bytes_used;
82         }
83
84         static void term_destination_thunk(j_compress_ptr ptr)
85         {
86                 ((VectorDestinationManager *)(ptr->dest))->term_destination();
87         }
88
89         inline void term_destination()
90         {
91                 dest.resize(dest.size() - pub.free_in_buffer);
92         }
93 };
94 static_assert(std::is_standard_layout<VectorDestinationManager>::value, "");
95
96 string encode_jpeg(const uint8_t *y_data, const uint8_t *cb_data, const uint8_t *cr_data, unsigned width, unsigned height, const string exif_data)
97 {
98         steady_clock::time_point start = steady_clock::now();
99         VectorDestinationManager dest;
100
101         jpeg_compress_struct cinfo;
102         jpeg_error_mgr jerr;
103         cinfo.err = jpeg_std_error(&jerr);
104         jpeg_create_compress(&cinfo);
105
106         cinfo.dest = (jpeg_destination_mgr *)&dest;
107         cinfo.input_components = 3;
108         cinfo.in_color_space = JCS_RGB;
109         jpeg_set_defaults(&cinfo);
110         constexpr int quality = 90;
111         jpeg_set_quality(&cinfo, quality, /*force_baseline=*/false);
112
113         cinfo.image_width = width;
114         cinfo.image_height = height;
115         cinfo.raw_data_in = true;
116         jpeg_set_colorspace(&cinfo, JCS_YCbCr);
117         cinfo.comp_info[0].h_samp_factor = 2;
118         cinfo.comp_info[0].v_samp_factor = 1;
119         cinfo.comp_info[1].h_samp_factor = 1;
120         cinfo.comp_info[1].v_samp_factor = 1;
121         cinfo.comp_info[2].h_samp_factor = 1;
122         cinfo.comp_info[2].v_samp_factor = 1;
123         cinfo.CCIR601_sampling = true;  // Seems to be mostly ignored by libjpeg, though.
124         jpeg_start_compress(&cinfo, true);
125
126         // This comment marker is private to FFmpeg. It signals limited Y'CbCr range
127         // (and nothing else).
128         jpeg_write_marker(&cinfo, JPEG_COM, (const JOCTET *)"CS=ITU601", strlen("CS=ITU601"));
129
130         if (!exif_data.empty()) {
131                 jpeg_write_marker(&cinfo, JPEG_APP0 + 1, (const JOCTET *)exif_data.data(), exif_data.size());
132         }
133
134         JSAMPROW yptr[8], cbptr[8], crptr[8];
135         JSAMPARRAY data[3] = { yptr, cbptr, crptr };
136         for (unsigned y = 0; y < height; y += 8) {
137                 for (unsigned yy = 0; yy < 8; ++yy) {
138                         yptr[yy] = const_cast<JSAMPROW>(&y_data[(y + yy) * width]);
139                         cbptr[yy] = const_cast<JSAMPROW>(&cb_data[(y + yy) * width / 2]);
140                         crptr[yy] = const_cast<JSAMPROW>(&cr_data[(y + yy) * width / 2]);
141                 }
142
143                 jpeg_write_raw_data(&cinfo, data, /*num_lines=*/8);
144         }
145
146         jpeg_finish_compress(&cinfo);
147         jpeg_destroy_compress(&cinfo);
148
149         steady_clock::time_point stop = steady_clock::now();
150         metric_jpeg_encode_time_seconds.count_event(duration<double>(stop - start).count());
151
152         return move(dest.dest);
153 }
154
155 VideoStream::VideoStream(AVFormatContext *file_avctx)
156         : avctx(file_avctx), output_fast_forward(file_avctx != nullptr)
157 {
158         call_once(video_metrics_inited, [] {
159                 vector<double> quantiles{ 0.01, 0.1, 0.25, 0.5, 0.75, 0.9, 0.99 };
160                 metric_jpeg_encode_time_seconds.init(quantiles, 60.0);
161                 global_metrics.add("jpeg_encode_time_seconds", &metric_jpeg_encode_time_seconds);
162                 metric_fade_fence_wait_time_seconds.init(quantiles, 60.0);
163                 global_metrics.add("fade_fence_wait_time_seconds", &metric_fade_fence_wait_time_seconds);
164                 metric_interpolation_fence_wait_time_seconds.init(quantiles, 60.0);
165                 global_metrics.add("interpolation_fence_wait_time_seconds", &metric_interpolation_fence_wait_time_seconds);
166                 metric_fade_latency_seconds.init(quantiles, 60.0);
167                 global_metrics.add("fade_latency_seconds", &metric_fade_latency_seconds);
168                 metric_interpolation_latency_seconds.init(quantiles, 60.0);
169                 global_metrics.add("interpolation_latency_seconds", &metric_interpolation_latency_seconds);
170         });
171
172         ycbcr_converter.reset(new YCbCrConverter(YCbCrConverter::OUTPUT_TO_DUAL_YCBCR, /*resource_pool=*/nullptr));
173         ycbcr_semiplanar_converter.reset(new YCbCrConverter(YCbCrConverter::OUTPUT_TO_SEMIPLANAR, /*resource_pool=*/nullptr));
174
175         GLuint input_tex[num_interpolate_slots], gray_tex[num_interpolate_slots];
176         GLuint fade_y_output_tex[num_interpolate_slots], fade_cbcr_output_tex[num_interpolate_slots];
177         GLuint cb_tex[num_interpolate_slots], cr_tex[num_interpolate_slots];
178
179         glCreateTextures(GL_TEXTURE_2D_ARRAY, num_interpolate_slots, input_tex);
180         glCreateTextures(GL_TEXTURE_2D_ARRAY, num_interpolate_slots, gray_tex);
181         glCreateTextures(GL_TEXTURE_2D, num_interpolate_slots, fade_y_output_tex);
182         glCreateTextures(GL_TEXTURE_2D, num_interpolate_slots, fade_cbcr_output_tex);
183         glCreateTextures(GL_TEXTURE_2D, num_interpolate_slots, cb_tex);
184         glCreateTextures(GL_TEXTURE_2D, num_interpolate_slots, cr_tex);
185         check_error();
186
187         size_t width = global_flags.width, height = global_flags.height;
188         int levels = find_num_levels(width, height);
189         for (size_t i = 0; i < num_interpolate_slots; ++i) {
190                 glTextureStorage3D(input_tex[i], levels, GL_RGBA8, width, height, 2);
191                 check_error();
192                 glTextureStorage3D(gray_tex[i], levels, GL_R8, width, height, 2);
193                 check_error();
194                 glTextureStorage2D(fade_y_output_tex[i], 1, GL_R8, width, height);
195                 check_error();
196                 glTextureStorage2D(fade_cbcr_output_tex[i], 1, GL_RG8, width, height);
197                 check_error();
198                 glTextureStorage2D(cb_tex[i], 1, GL_R8, width / 2, height);
199                 check_error();
200                 glTextureStorage2D(cr_tex[i], 1, GL_R8, width / 2, height);
201                 check_error();
202
203                 unique_ptr<InterpolatedFrameResources> resource(new InterpolatedFrameResources);
204                 resource->owner = this;
205                 resource->input_tex = input_tex[i];
206                 resource->gray_tex = gray_tex[i];
207                 resource->fade_y_output_tex = fade_y_output_tex[i];
208                 resource->fade_cbcr_output_tex = fade_cbcr_output_tex[i];
209                 resource->cb_tex = cb_tex[i];
210                 resource->cr_tex = cr_tex[i];
211                 glCreateFramebuffers(2, resource->input_fbos);
212                 check_error();
213                 glCreateFramebuffers(1, &resource->fade_fbo);
214                 check_error();
215
216                 glNamedFramebufferTextureLayer(resource->input_fbos[0], GL_COLOR_ATTACHMENT0, input_tex[i], 0, 0);
217                 check_error();
218                 glNamedFramebufferTextureLayer(resource->input_fbos[0], GL_COLOR_ATTACHMENT1, gray_tex[i], 0, 0);
219                 check_error();
220                 glNamedFramebufferTextureLayer(resource->input_fbos[1], GL_COLOR_ATTACHMENT0, input_tex[i], 0, 1);
221                 check_error();
222                 glNamedFramebufferTextureLayer(resource->input_fbos[1], GL_COLOR_ATTACHMENT1, gray_tex[i], 0, 1);
223                 check_error();
224                 glNamedFramebufferTexture(resource->fade_fbo, GL_COLOR_ATTACHMENT0, fade_y_output_tex[i], 0);
225                 check_error();
226                 glNamedFramebufferTexture(resource->fade_fbo, GL_COLOR_ATTACHMENT1, fade_cbcr_output_tex[i], 0);
227                 check_error();
228
229                 GLuint bufs[] = { GL_COLOR_ATTACHMENT0, GL_COLOR_ATTACHMENT1 };
230                 glNamedFramebufferDrawBuffers(resource->input_fbos[0], 2, bufs);
231                 check_error();
232                 glNamedFramebufferDrawBuffers(resource->input_fbos[1], 2, bufs);
233                 check_error();
234                 glNamedFramebufferDrawBuffers(resource->fade_fbo, 2, bufs);
235                 check_error();
236
237                 glCreateBuffers(1, &resource->pbo);
238                 check_error();
239                 glNamedBufferStorage(resource->pbo, width * height * 4, nullptr, GL_MAP_READ_BIT | GL_MAP_PERSISTENT_BIT);
240                 check_error();
241                 resource->pbo_contents = glMapNamedBufferRange(resource->pbo, 0, width * height * 4, GL_MAP_READ_BIT | GL_MAP_PERSISTENT_BIT);
242                 interpolate_resources.push_back(move(resource));
243         }
244
245         check_error();
246
247         OperatingPoint op;
248         if (global_flags.interpolation_quality == 0 ||
249             global_flags.interpolation_quality == 1) {
250                 op = operating_point1;
251         } else if (global_flags.interpolation_quality == 2) {
252                 op = operating_point2;
253         } else if (global_flags.interpolation_quality == 3) {
254                 op = operating_point3;
255         } else if (global_flags.interpolation_quality == 4) {
256                 op = operating_point4;
257         } else {
258                 // Quality 0 will be changed to 1 in flags.cpp.
259                 assert(false);
260         }
261
262         compute_flow.reset(new DISComputeFlow(width, height, op));
263         interpolate.reset(new Interpolate(op, /*split_ycbcr_output=*/true));
264         interpolate_no_split.reset(new Interpolate(op, /*split_ycbcr_output=*/false));
265         chroma_subsampler.reset(new ChromaSubsampler);
266         check_error();
267
268         // The “last frame” is initially black.
269         unique_ptr<uint8_t[]> y(new uint8_t[global_flags.width * global_flags.height]);
270         unique_ptr<uint8_t[]> cb_or_cr(new uint8_t[(global_flags.width / 2) * global_flags.height]);
271         memset(y.get(), 16, global_flags.width * global_flags.height);
272         memset(cb_or_cr.get(), 128, (global_flags.width / 2) * global_flags.height);
273         last_frame = encode_jpeg(y.get(), cb_or_cr.get(), cb_or_cr.get(), global_flags.width, global_flags.height, /*exif_data=*/"");
274
275         if (file_avctx != nullptr) {
276                 with_subtitles = Mux::WITHOUT_SUBTITLES;
277         } else {
278                 with_subtitles = Mux::WITH_SUBTITLES;
279         }
280 }
281
282 VideoStream::~VideoStream()
283 {
284         if (last_flow_tex != 0) {
285                 compute_flow->release_texture(last_flow_tex);
286         }
287
288         for (const unique_ptr<InterpolatedFrameResources> &resource : interpolate_resources) {
289                 glUnmapNamedBuffer(resource->pbo);
290                 check_error();
291                 glDeleteBuffers(1, &resource->pbo);
292                 check_error();
293                 glDeleteFramebuffers(2, resource->input_fbos);
294                 check_error();
295                 glDeleteFramebuffers(1, &resource->fade_fbo);
296                 check_error();
297                 glDeleteTextures(1, &resource->input_tex);
298                 check_error();
299                 glDeleteTextures(1, &resource->gray_tex);
300                 check_error();
301                 glDeleteTextures(1, &resource->fade_y_output_tex);
302                 check_error();
303                 glDeleteTextures(1, &resource->fade_cbcr_output_tex);
304                 check_error();
305                 glDeleteTextures(1, &resource->cb_tex);
306                 check_error();
307                 glDeleteTextures(1, &resource->cr_tex);
308                 check_error();
309         }
310         assert(interpolate_resources.size() == num_interpolate_slots);
311 }
312
313 void VideoStream::start()
314 {
315         if (avctx == nullptr) {
316                 avctx = avformat_alloc_context();
317
318                 // We use Matroska, because it's pretty much the only mux where FFmpeg
319                 // allows writing chroma location to override JFIF's default center placement.
320                 // (Note that at the time of writing, however, FFmpeg does not correctly
321                 // _read_ this information!)
322                 avctx->oformat = av_guess_format("matroska", nullptr, nullptr);
323
324                 uint8_t *buf = (uint8_t *)av_malloc(MUX_BUFFER_SIZE);
325                 avctx->pb = avio_alloc_context(buf, MUX_BUFFER_SIZE, 1, this, nullptr, nullptr, nullptr);
326                 avctx->pb->write_data_type = &VideoStream::write_packet2_thunk;
327                 avctx->pb->ignore_boundary_point = 1;
328
329                 avctx->flags = AVFMT_FLAG_CUSTOM_IO;
330         }
331
332         AVCodecParameters *audio_codecpar = avcodec_parameters_alloc();
333
334         audio_codecpar->codec_type = AVMEDIA_TYPE_AUDIO;
335         audio_codecpar->codec_id = AV_CODEC_ID_PCM_S32LE;
336         audio_codecpar->channel_layout = AV_CH_LAYOUT_STEREO;
337         audio_codecpar->channels = 2;
338         audio_codecpar->sample_rate = OUTPUT_FREQUENCY;
339
340         size_t width = global_flags.width, height = global_flags.height;  // Doesn't matter for MJPEG.
341         mux.reset(new Mux(avctx, width, height, Mux::CODEC_MJPEG, /*video_extradata=*/"", audio_codecpar,
342                           AVCOL_SPC_BT709, COARSE_TIMEBASE, /*write_callback=*/nullptr, Mux::WRITE_FOREGROUND, {}, with_subtitles));
343
344         avcodec_parameters_free(&audio_codecpar);
345         encode_thread = thread(&VideoStream::encode_thread_func, this);
346 }
347
348 void VideoStream::stop()
349 {
350         should_quit = true;
351         queue_changed.notify_all();
352         clear_queue();
353         encode_thread.join();
354 }
355
356 void VideoStream::clear_queue()
357 {
358         deque<QueuedFrame> q;
359
360         {
361                 lock_guard<mutex> lock(queue_lock);
362                 q = move(frame_queue);
363         }
364
365         // These are not RAII-ed, unfortunately, so we'll need to clean them ourselves.
366         // Note that release_texture() is thread-safe.
367         for (const QueuedFrame &qf : q) {
368                 if (qf.type == QueuedFrame::INTERPOLATED ||
369                     qf.type == QueuedFrame::FADED_INTERPOLATED) {
370                         if (qf.flow_tex != 0) {
371                                 compute_flow->release_texture(qf.flow_tex);
372                         }
373                 }
374                 if (qf.type == QueuedFrame::INTERPOLATED) {
375                         interpolate->release_texture(qf.output_tex);
376                         interpolate->release_texture(qf.cbcr_tex);
377                 }
378         }
379
380         // Destroy q outside the mutex, as that would be a double-lock.
381 }
382
383 void VideoStream::schedule_original_frame(steady_clock::time_point local_pts,
384                                           int64_t output_pts, function<void()> &&display_func,
385                                           QueueSpotHolder &&queue_spot_holder,
386                                           FrameOnDisk frame, const string &subtitle, bool include_audio)
387 {
388         fprintf(stderr, "output_pts=%" PRId64 "  original      input_pts=%" PRId64 "\n", output_pts, frame.pts);
389
390         QueuedFrame qf;
391         qf.local_pts = local_pts;
392         qf.type = QueuedFrame::ORIGINAL;
393         qf.output_pts = output_pts;
394         qf.display_func = move(display_func);
395         qf.queue_spot_holder = move(queue_spot_holder);
396         qf.subtitle = subtitle;
397         FrameReader::Frame read_frame = frame_reader.read_frame(frame, /*read_video=*/true, include_audio);
398         qf.encoded_jpeg.reset(new string(move(read_frame.video)));
399         qf.audio = move(read_frame.audio);
400
401         lock_guard<mutex> lock(queue_lock);
402         frame_queue.push_back(move(qf));
403         queue_changed.notify_all();
404 }
405
406 void VideoStream::schedule_faded_frame(steady_clock::time_point local_pts, int64_t output_pts,
407                                        function<void()> &&display_func,
408                                        QueueSpotHolder &&queue_spot_holder,
409                                        FrameOnDisk frame1_spec, FrameOnDisk frame2_spec,
410                                        float fade_alpha, const string &subtitle)
411 {
412         fprintf(stderr, "output_pts=%" PRId64 "  faded         input_pts=%" PRId64 ",%" PRId64 "  fade_alpha=%.2f\n", output_pts, frame1_spec.pts, frame2_spec.pts, fade_alpha);
413
414         // Get the temporary OpenGL resources we need for doing the fade.
415         // (We share these with interpolated frames, which is slightly
416         // overkill, but there's no need to waste resources on keeping
417         // separate pools around.)
418         BorrowedInterpolatedFrameResources resources;
419         {
420                 lock_guard<mutex> lock(queue_lock);
421                 if (interpolate_resources.empty()) {
422                         fprintf(stderr, "WARNING: Too many interpolated frames already in transit; dropping one.\n");
423                         return;
424                 }
425                 resources = BorrowedInterpolatedFrameResources(interpolate_resources.front().release());
426                 interpolate_resources.pop_front();
427         }
428
429         bool did_decode;
430
431         shared_ptr<Frame> frame1 = decode_jpeg_with_cache(frame1_spec, DECODE_IF_NOT_IN_CACHE, &frame_reader, &did_decode);
432         shared_ptr<Frame> frame2 = decode_jpeg_with_cache(frame2_spec, DECODE_IF_NOT_IN_CACHE, &frame_reader, &did_decode);
433
434         ycbcr_semiplanar_converter->prepare_chain_for_fade(frame1, frame2, fade_alpha)->render_to_fbo(resources->fade_fbo, global_flags.width, global_flags.height);
435
436         QueuedFrame qf;
437         qf.local_pts = local_pts;
438         qf.type = QueuedFrame::FADED;
439         qf.output_pts = output_pts;
440         qf.frame1 = frame1_spec;
441         qf.display_func = move(display_func);
442         qf.queue_spot_holder = move(queue_spot_holder);
443         qf.subtitle = subtitle;
444
445         qf.secondary_frame = frame2_spec;
446
447         // Subsample and split Cb/Cr.
448         chroma_subsampler->subsample_chroma(resources->fade_cbcr_output_tex, global_flags.width, global_flags.height, resources->cb_tex, resources->cr_tex);
449
450         // Read it down (asynchronously) to the CPU.
451         glPixelStorei(GL_PACK_ROW_LENGTH, 0);
452         glBindBuffer(GL_PIXEL_PACK_BUFFER, resources->pbo);
453         check_error();
454         glGetTextureImage(resources->fade_y_output_tex, 0, GL_RED, GL_UNSIGNED_BYTE, global_flags.width * global_flags.height * 4, BUFFER_OFFSET(0));
455         check_error();
456         glGetTextureImage(resources->cb_tex, 0, GL_RED, GL_UNSIGNED_BYTE, global_flags.width * global_flags.height * 3, BUFFER_OFFSET(global_flags.width * global_flags.height));
457         check_error();
458         glGetTextureImage(resources->cr_tex, 0, GL_RED, GL_UNSIGNED_BYTE, global_flags.width * global_flags.height * 3 - (global_flags.width / 2) * global_flags.height, BUFFER_OFFSET(global_flags.width * global_flags.height + (global_flags.width / 2) * global_flags.height));
459         check_error();
460         glBindBuffer(GL_PIXEL_PACK_BUFFER, 0);
461
462         // Set a fence we can wait for to make sure the CPU sees the read.
463         glMemoryBarrier(GL_CLIENT_MAPPED_BUFFER_BARRIER_BIT);
464         check_error();
465         qf.fence_created = steady_clock::now();
466         qf.fence = RefCountedGLsync(GL_SYNC_GPU_COMMANDS_COMPLETE, /*flags=*/0);
467         check_error();
468         qf.resources = move(resources);
469         qf.local_pts = local_pts;
470
471         lock_guard<mutex> lock(queue_lock);
472         frame_queue.push_back(move(qf));
473         queue_changed.notify_all();
474 }
475
476 void VideoStream::schedule_interpolated_frame(steady_clock::time_point local_pts,
477                                               int64_t output_pts, function<void(shared_ptr<Frame>)> &&display_func,
478                                               QueueSpotHolder &&queue_spot_holder,
479                                               FrameOnDisk frame1, FrameOnDisk frame2,
480                                               float alpha, FrameOnDisk secondary_frame, float fade_alpha, const string &subtitle,
481                                               bool play_audio)
482 {
483         if (secondary_frame.pts != -1) {
484                 fprintf(stderr, "output_pts=%" PRId64 "  interpolated  input_pts1=%" PRId64 " input_pts2=%" PRId64 " alpha=%.3f  secondary_pts=%" PRId64 "  fade_alpha=%.2f\n", output_pts, frame1.pts, frame2.pts, alpha, secondary_frame.pts, fade_alpha);
485         } else {
486                 fprintf(stderr, "output_pts=%" PRId64 "  interpolated  input_pts1=%" PRId64 " input_pts2=%" PRId64 " alpha=%.3f\n", output_pts, frame1.pts, frame2.pts, alpha);
487         }
488
489         // Get the temporary OpenGL resources we need for doing the interpolation.
490         BorrowedInterpolatedFrameResources resources;
491         {
492                 lock_guard<mutex> lock(queue_lock);
493                 if (interpolate_resources.empty()) {
494                         fprintf(stderr, "WARNING: Too many interpolated frames already in transit; dropping one.\n");
495                         return;
496                 }
497                 resources = BorrowedInterpolatedFrameResources(interpolate_resources.front().release());
498                 interpolate_resources.pop_front();
499         }
500
501         QueuedFrame qf;
502         qf.type = (secondary_frame.pts == -1) ? QueuedFrame::INTERPOLATED : QueuedFrame::FADED_INTERPOLATED;
503         qf.output_pts = output_pts;
504         qf.display_decoded_func = move(display_func);
505         qf.queue_spot_holder = move(queue_spot_holder);
506         qf.local_pts = local_pts;
507         qf.subtitle = subtitle;
508
509         if (play_audio) {
510                 qf.audio = frame_reader.read_frame(frame1, /*read_video=*/false, /*read_audio=*/true).audio;
511         }
512
513         check_error();
514
515         // Convert frame0 and frame1 to OpenGL textures.
516         for (size_t frame_no = 0; frame_no < 2; ++frame_no) {
517                 FrameOnDisk frame_spec = frame_no == 1 ? frame2 : frame1;
518                 bool did_decode;
519                 shared_ptr<Frame> frame = decode_jpeg_with_cache(frame_spec, DECODE_IF_NOT_IN_CACHE, &frame_reader, &did_decode);
520                 ycbcr_converter->prepare_chain_for_conversion(frame)->render_to_fbo(resources->input_fbos[frame_no], global_flags.width, global_flags.height);
521                 if (frame_no == 1) {
522                         qf.exif_data = frame->exif_data;  // Use the white point from the last frame.
523                 }
524         }
525
526         glGenerateTextureMipmap(resources->input_tex);
527         check_error();
528         glGenerateTextureMipmap(resources->gray_tex);
529         check_error();
530
531         GLuint flow_tex;
532         if (last_flow_tex != 0 && frame1 == last_frame1 && frame2 == last_frame2) {
533                 // Reuse the flow from previous computation. This frequently happens
534                 // if we slow down by more than 2x, so that there are multiple interpolated
535                 // frames between each original.
536                 flow_tex = last_flow_tex;
537                 qf.flow_tex = 0;
538         } else {
539                 // Cache miss, so release last_flow_tex.
540                 qf.flow_tex = last_flow_tex;
541
542                 // Compute the flow.
543                 flow_tex = compute_flow->exec(resources->gray_tex, DISComputeFlow::FORWARD_AND_BACKWARD, DISComputeFlow::DO_NOT_RESIZE_FLOW);
544                 check_error();
545
546                 // Store the flow texture for possible reuse next frame.
547                 last_flow_tex = flow_tex;
548                 last_frame1 = frame1;
549                 last_frame2 = frame2;
550         }
551
552         if (secondary_frame.pts != -1) {
553                 // Fade. First kick off the interpolation.
554                 tie(qf.output_tex, ignore) = interpolate_no_split->exec(resources->input_tex, resources->gray_tex, flow_tex, global_flags.width, global_flags.height, alpha);
555                 check_error();
556
557                 // Now decode the image we are fading against.
558                 bool did_decode;
559                 shared_ptr<Frame> frame2 = decode_jpeg_with_cache(secondary_frame, DECODE_IF_NOT_IN_CACHE, &frame_reader, &did_decode);
560
561                 // Then fade against it, putting it into the fade Y' and CbCr textures.
562                 RGBTriplet neutral_color = get_neutral_color(qf.exif_data);
563                 ycbcr_semiplanar_converter->prepare_chain_for_fade_from_texture(qf.output_tex, neutral_color, global_flags.width, global_flags.height, frame2, fade_alpha)->render_to_fbo(resources->fade_fbo, global_flags.width, global_flags.height);
564
565                 // Subsample and split Cb/Cr.
566                 chroma_subsampler->subsample_chroma(resources->fade_cbcr_output_tex, global_flags.width, global_flags.height, resources->cb_tex, resources->cr_tex);
567
568                 interpolate_no_split->release_texture(qf.output_tex);
569
570                 // We already applied the white balance, so don't have the client redo it.
571                 qf.exif_data.clear();
572         } else {
573                 tie(qf.output_tex, qf.cbcr_tex) = interpolate->exec(resources->input_tex, resources->gray_tex, flow_tex, global_flags.width, global_flags.height, alpha);
574                 check_error();
575
576                 // Subsample and split Cb/Cr.
577                 chroma_subsampler->subsample_chroma(qf.cbcr_tex, global_flags.width, global_flags.height, resources->cb_tex, resources->cr_tex);
578         }
579
580         // We could have released qf.flow_tex here, but to make sure we don't cause a stall
581         // when trying to reuse it for the next frame, we can just as well hold on to it
582         // and release it only when the readback is done.
583         //
584         // TODO: This is maybe less relevant now that qf.flow_tex contains the texture we used
585         // _last_ frame, not this one.
586
587         // Read it down (asynchronously) to the CPU.
588         glPixelStorei(GL_PACK_ROW_LENGTH, 0);
589         glBindBuffer(GL_PIXEL_PACK_BUFFER, resources->pbo);
590         check_error();
591         if (secondary_frame.pts != -1) {
592                 glGetTextureImage(resources->fade_y_output_tex, 0, GL_RED, GL_UNSIGNED_BYTE, global_flags.width * global_flags.height * 4, BUFFER_OFFSET(0));
593         } else {
594                 glGetTextureImage(qf.output_tex, 0, GL_RED, GL_UNSIGNED_BYTE, global_flags.width * global_flags.height * 4, BUFFER_OFFSET(0));
595         }
596         check_error();
597         glGetTextureImage(resources->cb_tex, 0, GL_RED, GL_UNSIGNED_BYTE, global_flags.width * global_flags.height * 3, BUFFER_OFFSET(global_flags.width * global_flags.height));
598         check_error();
599         glGetTextureImage(resources->cr_tex, 0, GL_RED, GL_UNSIGNED_BYTE, global_flags.width * global_flags.height * 3 - (global_flags.width / 2) * global_flags.height, BUFFER_OFFSET(global_flags.width * global_flags.height + (global_flags.width / 2) * global_flags.height));
600         check_error();
601         glBindBuffer(GL_PIXEL_PACK_BUFFER, 0);
602
603         // Set a fence we can wait for to make sure the CPU sees the read.
604         glMemoryBarrier(GL_CLIENT_MAPPED_BUFFER_BARRIER_BIT);
605         check_error();
606         qf.fence_created = steady_clock::now();
607         qf.fence = RefCountedGLsync(GL_SYNC_GPU_COMMANDS_COMPLETE, /*flags=*/0);
608         check_error();
609         qf.resources = move(resources);
610
611         lock_guard<mutex> lock(queue_lock);
612         frame_queue.push_back(move(qf));
613         queue_changed.notify_all();
614 }
615
616 void VideoStream::schedule_refresh_frame(steady_clock::time_point local_pts,
617                                          int64_t output_pts, function<void()> &&display_func,
618                                          QueueSpotHolder &&queue_spot_holder, const string &subtitle)
619 {
620         QueuedFrame qf;
621         qf.type = QueuedFrame::REFRESH;
622         qf.output_pts = output_pts;
623         qf.display_func = move(display_func);
624         qf.queue_spot_holder = move(queue_spot_holder);
625         qf.subtitle = subtitle;
626
627         lock_guard<mutex> lock(queue_lock);
628         frame_queue.push_back(move(qf));
629         queue_changed.notify_all();
630 }
631
632 void VideoStream::schedule_silence(steady_clock::time_point local_pts, int64_t output_pts,
633                                    int64_t length_pts, QueueSpotHolder &&queue_spot_holder)
634 {
635         QueuedFrame qf;
636         qf.type = QueuedFrame::SILENCE;
637         qf.output_pts = output_pts;
638         qf.queue_spot_holder = move(queue_spot_holder);
639         qf.silence_length_pts = length_pts;
640
641         lock_guard<mutex> lock(queue_lock);
642         frame_queue.push_back(move(qf));
643         queue_changed.notify_all();
644 }
645
646 namespace {
647
648 shared_ptr<Frame> frame_from_pbo(void *contents, size_t width, size_t height)
649 {
650         size_t chroma_width = width / 2;
651
652         const uint8_t *y = (const uint8_t *)contents;
653         const uint8_t *cb = (const uint8_t *)contents + width * height;
654         const uint8_t *cr = (const uint8_t *)contents + width * height + chroma_width * height;
655
656         shared_ptr<Frame> frame(new Frame);
657         frame->y.reset(new uint8_t[width * height]);
658         frame->cb.reset(new uint8_t[chroma_width * height]);
659         frame->cr.reset(new uint8_t[chroma_width * height]);
660         for (unsigned yy = 0; yy < height; ++yy) {
661                 memcpy(frame->y.get() + width * yy, y + width * yy, width);
662                 memcpy(frame->cb.get() + chroma_width * yy, cb + chroma_width * yy, chroma_width);
663                 memcpy(frame->cr.get() + chroma_width * yy, cr + chroma_width * yy, chroma_width);
664         }
665         frame->is_semiplanar = false;
666         frame->width = width;
667         frame->height = height;
668         frame->chroma_subsampling_x = 2;
669         frame->chroma_subsampling_y = 1;
670         frame->pitch_y = width;
671         frame->pitch_chroma = chroma_width;
672         return frame;
673 }
674
675 }  // namespace
676
677 void VideoStream::encode_thread_func()
678 {
679         pthread_setname_np(pthread_self(), "VideoStream");
680         QSurface *surface = create_surface();
681         QOpenGLContext *context = create_context(surface);
682         bool ok = make_current(context, surface);
683         if (!ok) {
684                 fprintf(stderr, "Video stream couldn't get an OpenGL context\n");
685                 abort();
686         }
687
688         while (!should_quit) {
689                 QueuedFrame qf;
690                 {
691                         unique_lock<mutex> lock(queue_lock);
692
693                         // Wait until we have a frame to play.
694                         queue_changed.wait(lock, [this] {
695                                 return !frame_queue.empty() || should_quit;
696                         });
697                         if (should_quit) {
698                                 break;
699                         }
700                         steady_clock::time_point frame_start = frame_queue.front().local_pts;
701
702                         // Now sleep until the frame is supposed to start (the usual case),
703                         // _or_ clear_queue() happened.
704                         bool aborted;
705                         if (output_fast_forward) {
706                                 aborted = frame_queue.empty() || frame_queue.front().local_pts != frame_start;
707                         } else {
708                                 aborted = queue_changed.wait_until(lock, frame_start, [this, frame_start] {
709                                         return frame_queue.empty() || frame_queue.front().local_pts != frame_start;
710                                 });
711                         }
712                         if (aborted) {
713                                 // clear_queue() happened, so don't play this frame after all.
714                                 continue;
715                         }
716                         qf = move(frame_queue.front());
717                         frame_queue.pop_front();
718                 }
719
720                 // Hack: We mux the subtitle packet one time unit before the actual frame,
721                 // so that Nageru is sure to get it first.
722                 if (!qf.subtitle.empty() && with_subtitles == Mux::WITH_SUBTITLES) {
723                         AVPacket pkt;
724                         av_init_packet(&pkt);
725                         pkt.stream_index = mux->get_subtitle_stream_idx();
726                         assert(pkt.stream_index != -1);
727                         pkt.data = (uint8_t *)qf.subtitle.data();
728                         pkt.size = qf.subtitle.size();
729                         pkt.flags = 0;
730                         pkt.duration = lrint(TIMEBASE / global_flags.output_framerate);  // Doesn't really matter for Nageru.
731                         mux->add_packet(pkt, qf.output_pts - 1, qf.output_pts - 1);
732                 }
733
734                 if (qf.type == QueuedFrame::ORIGINAL) {
735                         // Send the JPEG frame on, unchanged.
736                         string jpeg = move(*qf.encoded_jpeg);
737                         AVPacket pkt;
738                         av_init_packet(&pkt);
739                         pkt.stream_index = 0;
740                         pkt.data = (uint8_t *)jpeg.data();
741                         pkt.size = jpeg.size();
742                         pkt.flags = AV_PKT_FLAG_KEY;
743                         mux->add_packet(pkt, qf.output_pts, qf.output_pts);
744                         last_frame = move(jpeg);
745
746                         add_audio_or_silence(qf);
747                 } else if (qf.type == QueuedFrame::FADED) {
748                         steady_clock::time_point start = steady_clock::now();
749                         glClientWaitSync(qf.fence.get(), /*flags=*/0, GL_TIMEOUT_IGNORED);
750                         steady_clock::time_point stop = steady_clock::now();
751                         metric_fade_fence_wait_time_seconds.count_event(duration<double>(stop - start).count());
752                         metric_fade_latency_seconds.count_event(duration<double>(stop - qf.fence_created).count());
753
754                         shared_ptr<Frame> frame = frame_from_pbo(qf.resources->pbo_contents, global_flags.width, global_flags.height);
755                         assert(frame->exif_data.empty());
756
757                         // Now JPEG encode it, and send it on to the stream.
758                         string jpeg = encode_jpeg(frame->y.get(), frame->cb.get(), frame->cr.get(), global_flags.width, global_flags.height, /*exif_data=*/"");
759
760                         AVPacket pkt;
761                         av_init_packet(&pkt);
762                         pkt.stream_index = 0;
763                         pkt.data = (uint8_t *)jpeg.data();
764                         pkt.size = jpeg.size();
765                         pkt.flags = AV_PKT_FLAG_KEY;
766                         mux->add_packet(pkt, qf.output_pts, qf.output_pts);
767                         last_frame = move(jpeg);
768
769                         add_audio_or_silence(qf);
770                 } else if (qf.type == QueuedFrame::INTERPOLATED || qf.type == QueuedFrame::FADED_INTERPOLATED) {
771                         steady_clock::time_point start = steady_clock::now();
772                         glClientWaitSync(qf.fence.get(), /*flags=*/0, GL_TIMEOUT_IGNORED);
773                         steady_clock::time_point stop = steady_clock::now();
774                         metric_interpolation_fence_wait_time_seconds.count_event(duration<double>(stop - start).count());
775                         metric_interpolation_latency_seconds.count_event(duration<double>(stop - qf.fence_created).count());
776
777                         // Send it on to display.
778                         shared_ptr<Frame> frame = frame_from_pbo(qf.resources->pbo_contents, global_flags.width, global_flags.height);
779                         if (qf.display_decoded_func != nullptr) {
780                                 qf.display_decoded_func(frame);
781                         }
782
783                         // Now JPEG encode it, and send it on to the stream.
784                         string jpeg = encode_jpeg(frame->y.get(), frame->cb.get(), frame->cr.get(), global_flags.width, global_flags.height, move(qf.exif_data));
785                         if (qf.flow_tex != 0) {
786                                 compute_flow->release_texture(qf.flow_tex);
787                         }
788                         if (qf.type != QueuedFrame::FADED_INTERPOLATED) {
789                                 interpolate->release_texture(qf.output_tex);
790                                 interpolate->release_texture(qf.cbcr_tex);
791                         }
792
793                         AVPacket pkt;
794                         av_init_packet(&pkt);
795                         pkt.stream_index = 0;
796                         pkt.data = (uint8_t *)jpeg.data();
797                         pkt.size = jpeg.size();
798                         pkt.flags = AV_PKT_FLAG_KEY;
799                         mux->add_packet(pkt, qf.output_pts, qf.output_pts);
800                         last_frame = move(jpeg);
801
802                         add_audio_or_silence(qf);
803                 } else if (qf.type == QueuedFrame::REFRESH) {
804                         AVPacket pkt;
805                         av_init_packet(&pkt);
806                         pkt.stream_index = 0;
807                         pkt.data = (uint8_t *)last_frame.data();
808                         pkt.size = last_frame.size();
809                         pkt.flags = AV_PKT_FLAG_KEY;
810                         mux->add_packet(pkt, qf.output_pts, qf.output_pts);
811
812                         add_audio_or_silence(qf);  // Definitely silence.
813                 } else if (qf.type == QueuedFrame::SILENCE) {
814                         add_silence(qf.output_pts, qf.silence_length_pts);
815                 } else {
816                         assert(false);
817                 }
818                 if (qf.display_func != nullptr) {
819                         qf.display_func();
820                 }
821         }
822 }
823
824 int VideoStream::write_packet2_thunk(void *opaque, uint8_t *buf, int buf_size, AVIODataMarkerType type, int64_t time)
825 {
826         VideoStream *video_stream = (VideoStream *)opaque;
827         return video_stream->write_packet2(buf, buf_size, type, time);
828 }
829
830 int VideoStream::write_packet2(uint8_t *buf, int buf_size, AVIODataMarkerType type, int64_t time)
831 {
832         if (type == AVIO_DATA_MARKER_SYNC_POINT || type == AVIO_DATA_MARKER_BOUNDARY_POINT) {
833                 seen_sync_markers = true;
834         } else if (type == AVIO_DATA_MARKER_UNKNOWN && !seen_sync_markers) {
835                 // We don't know if this is a keyframe or not (the muxer could
836                 // avoid marking it), so we just have to make the best of it.
837                 type = AVIO_DATA_MARKER_SYNC_POINT;
838         }
839
840         HTTPD::StreamID stream_id{ HTTPD::MAIN_STREAM, 0 };
841         if (type == AVIO_DATA_MARKER_HEADER) {
842                 stream_mux_header.append((char *)buf, buf_size);
843                 global_httpd->set_header(stream_id, stream_mux_header);
844         } else {
845                 global_httpd->add_data(stream_id, (char *)buf, buf_size, type == AVIO_DATA_MARKER_SYNC_POINT, time, AVRational{ AV_TIME_BASE, 1 });
846         }
847         return buf_size;
848 }
849
850 void VideoStream::add_silence(int64_t pts, int64_t length_pts)
851 {
852         // At 59.94, this will never quite add up (even discounting refresh frames,
853         // which have unpredictable length), but hopefully, the player in the other
854         // end should be able to stretch silence easily enough.
855         long num_samples = lrint(length_pts * double(OUTPUT_FREQUENCY) / double(TIMEBASE)) * 2;
856         uint8_t *zero = (uint8_t *)calloc(num_samples, sizeof(int32_t));
857
858         AVPacket pkt;
859         av_init_packet(&pkt);
860         pkt.stream_index = 1;
861         pkt.data = zero;
862         pkt.size = num_samples * sizeof(int32_t);
863         pkt.flags = AV_PKT_FLAG_KEY;
864         mux->add_packet(pkt, pts, pts);
865
866         free(zero);
867 }
868
869 void VideoStream::add_audio_or_silence(const QueuedFrame &qf)
870 {
871         if (qf.audio.empty()) {
872                 int64_t frame_length = lrint(double(TIMEBASE) / global_flags.output_framerate);
873                 add_silence(qf.output_pts, frame_length);
874         } else {
875                 AVPacket pkt;
876                 av_init_packet(&pkt);
877                 pkt.stream_index = 1;
878                 pkt.data = (uint8_t *)qf.audio.data();
879                 pkt.size = qf.audio.size();
880                 pkt.flags = AV_PKT_FLAG_KEY;
881                 mux->add_packet(pkt, qf.output_pts, qf.output_pts);
882         }
883 }