]> git.sesse.net Git - nageru/blob - video_stream.cpp
Fix some flickering due to YCbCr interpretation.
[nageru] / video_stream.cpp
1 #include "video_stream.h"
2
3 extern "C" {
4 #include <libavformat/avformat.h>
5 #include <libavformat/avio.h>
6 }
7
8 #include <jpeglib.h>
9 #include <unistd.h>
10
11 #include "context.h"
12 #include "flow.h"
13 #include "httpd.h"
14 #include "jpeg_frame_view.h"
15 #include "movit/util.h"
16 #include "mux.h"
17 #include "player.h"
18 #include "util.h"
19
20 #include <epoxy/glx.h>
21
22 using namespace std;
23
24 extern HTTPD *global_httpd;
25
26 namespace {
27
28 string read_file(const string &filename)
29 {
30         FILE *fp = fopen(filename.c_str(), "rb");
31         if (fp == nullptr) {
32                 perror(filename.c_str());
33                 return "";
34         }
35
36         fseek(fp, 0, SEEK_END);
37         long len = ftell(fp);
38         rewind(fp);
39
40         string ret;
41         ret.resize(len);
42         fread(&ret[0], len, 1, fp);
43         fclose(fp);
44         return ret;
45 }
46
47 }  // namespace
48
49 struct VectorDestinationManager {
50         jpeg_destination_mgr pub;
51         std::vector<uint8_t> dest;
52
53         VectorDestinationManager()
54         {
55                 pub.init_destination = init_destination_thunk;
56                 pub.empty_output_buffer = empty_output_buffer_thunk;
57                 pub.term_destination = term_destination_thunk;
58         }
59
60         static void init_destination_thunk(j_compress_ptr ptr)
61         {
62                 ((VectorDestinationManager *)(ptr->dest))->init_destination();
63         }
64
65         inline void init_destination()
66         {
67                 make_room(0);
68         }
69
70         static boolean empty_output_buffer_thunk(j_compress_ptr ptr)
71         {
72                 return ((VectorDestinationManager *)(ptr->dest))->empty_output_buffer();
73         }
74
75         inline bool empty_output_buffer()
76         {
77                 make_room(dest.size());  // Should ignore pub.free_in_buffer!
78                 return true;
79         }
80
81         inline void make_room(size_t bytes_used)
82         {
83                 dest.resize(bytes_used + 4096);
84                 dest.resize(dest.capacity());
85                 pub.next_output_byte = dest.data() + bytes_used;
86                 pub.free_in_buffer = dest.size() - bytes_used;
87         }
88
89         static void term_destination_thunk(j_compress_ptr ptr)
90         {
91                 ((VectorDestinationManager *)(ptr->dest))->term_destination();
92         }
93
94         inline void term_destination()
95         {
96                 dest.resize(dest.size() - pub.free_in_buffer);
97         }
98 };
99 static_assert(std::is_standard_layout<VectorDestinationManager>::value, "");
100
101 vector<uint8_t> encode_jpeg(const uint8_t *pixel_data, unsigned width, unsigned height)
102 {
103         VectorDestinationManager dest;
104
105         jpeg_compress_struct cinfo;
106         jpeg_error_mgr jerr;
107         cinfo.err = jpeg_std_error(&jerr);
108         jpeg_create_compress(&cinfo);
109
110         cinfo.dest = (jpeg_destination_mgr *)&dest;
111         cinfo.input_components = 3;
112         cinfo.in_color_space = JCS_RGB;
113         jpeg_set_defaults(&cinfo);
114         constexpr int quality = 90;
115         jpeg_set_quality(&cinfo, quality, /*force_baseline=*/false);
116
117         cinfo.image_width = width;
118         cinfo.image_height = height;
119         cinfo.input_components = 3;
120         cinfo.comp_info[0].h_samp_factor = 2;
121         cinfo.comp_info[0].v_samp_factor = 1;
122         cinfo.comp_info[1].h_samp_factor = 1;
123         cinfo.comp_info[1].v_samp_factor = 1;
124         cinfo.comp_info[2].h_samp_factor = 1;
125         cinfo.comp_info[2].v_samp_factor = 1;
126         // cinfo.CCIR601_sampling = true;  // TODO: Subsample ourselves.
127         jpeg_start_compress(&cinfo, true);
128
129         unique_ptr<uint8_t[]> row(new uint8_t[width * 3]);
130         JSAMPROW row_pointer[1] = { row.get() };
131         for (unsigned y = 0; y < height; ++y) {
132                 const uint8_t *sptr = &pixel_data[(height - cinfo.next_scanline - 1) * width * 4];
133                 uint8_t *dptr = row.get();
134                 for (unsigned x = 0; x < width; ++x) {
135                         *dptr++ = *sptr++;
136                         *dptr++ = *sptr++;
137                         *dptr++ = *sptr++;
138                         ++sptr;
139                 }
140                 (void) jpeg_write_scanlines(&cinfo, row_pointer, 1);
141         }
142
143         jpeg_finish_compress(&cinfo);
144         jpeg_destroy_compress(&cinfo);
145
146         return move(dest.dest);
147 }
148
149 VideoStream::VideoStream()
150 {
151         using namespace movit;
152         // TODO: deduplicate code against JPEGFrameView?
153         ycbcr_convert_chain.reset(new EffectChain(1280, 720));
154         ImageFormat image_format;
155         image_format.color_space = COLORSPACE_sRGB;
156         image_format.gamma_curve = GAMMA_sRGB;
157         ycbcr_format.luma_coefficients = YCBCR_REC_709;
158         ycbcr_format.full_range = true;  // JPEG.
159         ycbcr_format.num_levels = 256;
160         ycbcr_format.chroma_subsampling_x = 2;
161         ycbcr_format.chroma_subsampling_y = 1;
162         ycbcr_format.cb_x_position = 0.0f;  // H.264 -- _not_ JPEG, even though our input is MJPEG-encoded
163         ycbcr_format.cb_y_position = 0.5f;  // Irrelevant.
164         ycbcr_format.cr_x_position = 0.0f;
165         ycbcr_format.cr_y_position = 0.5f;
166         ycbcr_input = (movit::YCbCrInput *)ycbcr_convert_chain->add_input(new YCbCrInput(image_format, ycbcr_format, 1280, 720));
167
168         ImageFormat inout_format;
169         inout_format.color_space = COLORSPACE_sRGB;
170         inout_format.gamma_curve = GAMMA_sRGB;
171
172         check_error();
173         ycbcr_convert_chain->add_output(inout_format, OUTPUT_ALPHA_FORMAT_POSTMULTIPLIED);
174         check_error();
175         ycbcr_convert_chain->set_dither_bits(8);
176         check_error();
177         ycbcr_convert_chain->finalize();
178         check_error();
179
180         GLuint input_tex[num_interpolate_slots], gray_tex[num_interpolate_slots];
181         glCreateTextures(GL_TEXTURE_2D_ARRAY, 10, input_tex);
182         glCreateTextures(GL_TEXTURE_2D_ARRAY, 10, gray_tex);
183         check_error();
184         constexpr size_t width = 1280, height = 720;  // FIXME: adjustable width, height
185         int levels = find_num_levels(width, height);
186         for (size_t i = 0; i < num_interpolate_slots; ++i) {
187                 glTextureStorage3D(input_tex[i], levels, GL_RGBA8, width, height, 2);
188                 check_error();
189                 glTextureStorage3D(gray_tex[i], levels, GL_R8, width, height, 2);
190                 check_error();
191
192                 InterpolatedFrameResources resource;
193                 resource.input_tex = input_tex[i];
194                 resource.gray_tex = gray_tex[i];
195                 glCreateFramebuffers(2, resource.input_fbos);
196                 check_error();
197
198                 glNamedFramebufferTextureLayer(resource.input_fbos[0], GL_COLOR_ATTACHMENT0, input_tex[i], 0, 0);
199                 check_error();
200                 glNamedFramebufferTextureLayer(resource.input_fbos[1], GL_COLOR_ATTACHMENT0, input_tex[i], 0, 1);
201                 check_error();
202
203                 GLuint buf = GL_COLOR_ATTACHMENT0;
204                 glNamedFramebufferDrawBuffers(resource.input_fbos[0], 1, &buf);
205                 check_error();
206                 glNamedFramebufferDrawBuffers(resource.input_fbos[1], 1, &buf);
207                 check_error();
208
209                 glCreateBuffers(1, &resource.pbo);
210                 check_error();
211                 glNamedBufferStorage(resource.pbo, width * height * 4, nullptr, GL_MAP_READ_BIT | GL_MAP_PERSISTENT_BIT);
212                 check_error();
213                 resource.pbo_contents = glMapNamedBufferRange(resource.pbo, 0, width * height * 4, GL_MAP_READ_BIT | GL_MAP_PERSISTENT_BIT); 
214                 interpolate_resources.push_back(resource);
215         }
216
217         check_error();
218
219         compute_flow.reset(new DISComputeFlow(width, height, operating_point3));
220         gray.reset(new GrayscaleConversion);  // NOTE: Must come after DISComputeFlow, since it sets up the VBO!
221         interpolate.reset(new Interpolate(width, height, operating_point3));
222         check_error();
223 }
224
225 VideoStream::~VideoStream() {}
226
227 void VideoStream::start()
228 {
229         AVFormatContext *avctx = avformat_alloc_context();
230         avctx->oformat = av_guess_format("nut", nullptr, nullptr);
231
232         uint8_t *buf = (uint8_t *)av_malloc(MUX_BUFFER_SIZE);
233         avctx->pb = avio_alloc_context(buf, MUX_BUFFER_SIZE, 1, this, nullptr, nullptr, nullptr);
234         avctx->pb->write_data_type = &VideoStream::write_packet2_thunk;
235         avctx->pb->ignore_boundary_point = 1;
236
237         Mux::Codec video_codec = Mux::CODEC_MJPEG;
238
239         avctx->flags = AVFMT_FLAG_CUSTOM_IO;
240
241         string video_extradata;
242
243         constexpr int width = 1280, height = 720;  // Doesn't matter for MJPEG.
244         stream_mux.reset(new Mux(avctx, width, height, video_codec, video_extradata, /*audio_codec_parameters=*/nullptr, COARSE_TIMEBASE,
245                 /*write_callback=*/nullptr, Mux::WRITE_FOREGROUND, {}));
246
247
248         encode_thread = thread(&VideoStream::encode_thread_func, this);
249 }
250
251 void VideoStream::stop()
252 {
253         encode_thread.join();
254 }
255
256 void VideoStream::schedule_original_frame(int64_t output_pts, unsigned stream_idx, int64_t input_pts)
257 {
258         QueuedFrame qf;
259         qf.type = QueuedFrame::ORIGINAL;
260         qf.output_pts = output_pts;
261         qf.stream_idx = stream_idx;
262         qf.input_first_pts = input_pts; 
263
264         unique_lock<mutex> lock(queue_lock);
265         frame_queue.push_back(qf);
266         queue_nonempty.notify_all();
267 }
268
269 void VideoStream::schedule_interpolated_frame(int64_t output_pts, unsigned stream_idx, int64_t input_first_pts, int64_t input_second_pts, float alpha)
270 {
271         // Get the temporary OpenGL resources we need for doing the interpolation.
272         InterpolatedFrameResources resources;
273         {
274                 unique_lock<mutex> lock(queue_lock);
275                 if (interpolate_resources.empty()) {
276                         fprintf(stderr, "WARNING: Too many interpolated frames already in transit; dropping one.\n");
277                         return;
278                 }
279                 resources = interpolate_resources.front();
280                 interpolate_resources.pop_front();
281         }
282
283         QueuedFrame qf;
284         qf.type = QueuedFrame::INTERPOLATED;
285         qf.output_pts = output_pts;
286         qf.stream_idx = stream_idx;
287         qf.resources = resources;
288
289         check_error();
290
291         // Convert frame0 and frame1 to OpenGL textures.
292         // TODO: Deduplicate against JPEGFrameView::setDecodedFrame?
293         for (size_t frame_no = 0; frame_no < 2; ++frame_no) {
294                 shared_ptr<Frame> frame = decode_jpeg(filename_for_frame(stream_idx, frame_no == 1 ? input_second_pts : input_first_pts));
295                 ycbcr_format.chroma_subsampling_x = frame->chroma_subsampling_x;
296                 ycbcr_format.chroma_subsampling_y = frame->chroma_subsampling_y;
297                 ycbcr_input->change_ycbcr_format(ycbcr_format);
298                 ycbcr_input->set_width(frame->width);
299                 ycbcr_input->set_height(frame->height);
300                 ycbcr_input->set_pixel_data(0, frame->y.get());
301                 ycbcr_input->set_pixel_data(1, frame->cb.get());
302                 ycbcr_input->set_pixel_data(2, frame->cr.get());
303                 ycbcr_input->set_pitch(0, frame->pitch_y);
304                 ycbcr_input->set_pitch(1, frame->pitch_chroma);
305                 ycbcr_input->set_pitch(2, frame->pitch_chroma);
306                 ycbcr_convert_chain->render_to_fbo(resources.input_fbos[frame_no], 1280, 720);
307         }
308
309         glGenerateTextureMipmap(resources.input_tex);
310
311         // Compute the interpolated frame.
312         check_error();
313         gray->exec(resources.input_tex, resources.gray_tex, 1280, 720, /*num_layers=*/2);
314         check_error();
315         glGenerateTextureMipmap(resources.gray_tex);
316         check_error();
317         GLuint flow_tex = compute_flow->exec(resources.gray_tex, DISComputeFlow::FORWARD_AND_BACKWARD, DISComputeFlow::DO_NOT_RESIZE_FLOW);
318         check_error();
319
320         qf.output_tex = interpolate->exec(resources.input_tex, flow_tex, 1280, 720, alpha);
321         check_error();
322
323         // Read it down (asynchronously) to the CPU.
324         glPixelStorei(GL_PACK_ROW_LENGTH, 0);
325         glBindBuffer(GL_PIXEL_PACK_BUFFER, resources.pbo);
326         check_error();
327         glGetTextureImage(qf.output_tex, 0, GL_RGBA, GL_UNSIGNED_BYTE, 1280 * 720 * 4, nullptr);
328         check_error();
329         glBindBuffer(GL_PIXEL_PACK_BUFFER, 0);
330
331         // Set a fence we can wait for to make sure the CPU sees the read.
332         glMemoryBarrier(GL_CLIENT_MAPPED_BUFFER_BARRIER_BIT);
333         check_error();
334         qf.fence = RefCountedGLsync(GL_SYNC_GPU_COMMANDS_COMPLETE, /*flags=*/0);
335         check_error();
336
337         unique_lock<mutex> lock(queue_lock);
338         frame_queue.push_back(qf);
339         queue_nonempty.notify_all();
340 }
341
342 void VideoStream::encode_thread_func()
343 {
344         QSurface *surface = create_surface();
345         QOpenGLContext *context = create_context(surface);
346         bool ok = make_current(context, surface);
347         if (!ok) {
348                 fprintf(stderr, "Video stream couldn't get an OpenGL context\n");
349                 exit(1);
350         }
351
352         for ( ;; ) {
353                 QueuedFrame qf;
354                 {
355                         unique_lock<mutex> lock(queue_lock);
356                         queue_nonempty.wait(lock, [this]{
357                                 return !frame_queue.empty();
358                         });
359                         qf = frame_queue.front();
360                         frame_queue.pop_front();
361                 }
362
363                 if (qf.type == QueuedFrame::ORIGINAL) {
364                         // Send the JPEG frame on, unchanged.
365                         string jpeg = read_file(filename_for_frame(qf.stream_idx, qf.input_first_pts));
366                         AVPacket pkt;
367                         av_init_packet(&pkt);
368                         pkt.stream_index = 0;
369                         pkt.data = (uint8_t *)jpeg.data();
370                         pkt.size = jpeg.size();
371                         stream_mux->add_packet(pkt, qf.output_pts, qf.output_pts);
372                 } else if (qf.type == QueuedFrame::INTERPOLATED) {
373                         glClientWaitSync(qf.fence.get(), /*flags=*/0, GL_TIMEOUT_IGNORED);
374
375                         vector<uint8_t> jpeg = encode_jpeg((const uint8_t *)qf.resources.pbo_contents, 1280, 720);
376
377                         AVPacket pkt;
378                         av_init_packet(&pkt);
379                         pkt.stream_index = 0;
380                         pkt.data = (uint8_t *)jpeg.data();
381                         pkt.size = jpeg.size();
382                         stream_mux->add_packet(pkt, qf.output_pts, qf.output_pts);
383
384                         // Put the frame resources back.
385                         unique_lock<mutex> lock(queue_lock);
386                         interpolate_resources.push_back(qf.resources);
387                 }
388         }
389 }
390
391 int VideoStream::write_packet2_thunk(void *opaque, uint8_t *buf, int buf_size, AVIODataMarkerType type, int64_t time)
392 {
393         VideoStream *video_stream = (VideoStream *)opaque;
394         return video_stream->write_packet2(buf, buf_size, type, time);
395 }
396
397 int VideoStream::write_packet2(uint8_t *buf, int buf_size, AVIODataMarkerType type, int64_t time)
398 {
399         if (type == AVIO_DATA_MARKER_SYNC_POINT || type == AVIO_DATA_MARKER_BOUNDARY_POINT) {
400                 seen_sync_markers = true;
401         } else if (type == AVIO_DATA_MARKER_UNKNOWN && !seen_sync_markers) {
402                 // We don't know if this is a keyframe or not (the muxer could
403                 // avoid marking it), so we just have to make the best of it.
404                 type = AVIO_DATA_MARKER_SYNC_POINT;
405         }
406
407         if (type == AVIO_DATA_MARKER_HEADER) {
408                 stream_mux_header.append((char *)buf, buf_size);
409                 global_httpd->set_header(stream_mux_header);
410         } else {
411                 global_httpd->add_data((char *)buf, buf_size, type == AVIO_DATA_MARKER_SYNC_POINT, time, AVRational{ AV_TIME_BASE, 1 });
412         }
413         return buf_size;
414 }
415