]> git.sesse.net Git - nageru/blob - mjpeg_encoder.cpp
Add functionality for MJPEG export.
[nageru] / mjpeg_encoder.cpp
1 #include "mjpeg_encoder.h"
2
3 #include <jpeglib.h>
4 #include <unistd.h>
5 #if __SSE2__
6 #include <immintrin.h>
7 #endif
8 #include <list>
9
10 extern "C" {
11 #include <libavformat/avformat.h>
12 }
13
14 #include "defs.h"
15 #include "ffmpeg_raii.h"
16 #include "flags.h"
17 #include "httpd.h"
18 #include "memcpy_interleaved.h"
19 #include "pbo_frame_allocator.h"
20 #include "timebase.h"
21 #include "va_display_with_cleanup.h"
22
23 #include <va/va.h>
24 #include <va/va_drm.h>
25 #include <va/va_x11.h>
26
27 using namespace bmusb;
28 using namespace std;
29
30 extern void memcpy_with_pitch(uint8_t *dst, const uint8_t *src, size_t src_width, size_t dst_pitch, size_t height);
31
32 #define CHECK_VASTATUS(va_status, func)                                 \
33     if (va_status != VA_STATUS_SUCCESS) {                               \
34         fprintf(stderr, "%s:%d (%s) failed with %d\n", __func__, __LINE__, func, va_status); \
35         exit(1);                                                        \
36     }
37
38 // From libjpeg (although it's of course identical between implementations).
39 static const int jpeg_natural_order[DCTSIZE2] = {
40          0,  1,  8, 16,  9,  2,  3, 10,
41         17, 24, 32, 25, 18, 11,  4,  5,
42         12, 19, 26, 33, 40, 48, 41, 34,
43         27, 20, 13,  6,  7, 14, 21, 28,
44         35, 42, 49, 56, 57, 50, 43, 36,
45         29, 22, 15, 23, 30, 37, 44, 51,
46         58, 59, 52, 45, 38, 31, 39, 46,
47         53, 60, 61, 54, 47, 55, 62, 63,
48 };
49
50 struct VectorDestinationManager {
51         jpeg_destination_mgr pub;
52         std::vector<uint8_t> dest;
53
54         VectorDestinationManager()
55         {
56                 pub.init_destination = init_destination_thunk;
57                 pub.empty_output_buffer = empty_output_buffer_thunk;
58                 pub.term_destination = term_destination_thunk;
59         }
60
61         static void init_destination_thunk(j_compress_ptr ptr)
62         {
63                 ((VectorDestinationManager *)(ptr->dest))->init_destination();
64         }
65
66         inline void init_destination()
67         {
68                 make_room(0);
69         }
70
71         static boolean empty_output_buffer_thunk(j_compress_ptr ptr)
72         {
73                 return ((VectorDestinationManager *)(ptr->dest))->empty_output_buffer();
74         }
75
76         inline bool empty_output_buffer()
77         {
78                 make_room(dest.size());  // Should ignore pub.free_in_buffer!
79                 return true;
80         }
81
82         inline void make_room(size_t bytes_used)
83         {
84                 dest.resize(bytes_used + 4096);
85                 dest.resize(dest.capacity());
86                 pub.next_output_byte = dest.data() + bytes_used;
87                 pub.free_in_buffer = dest.size() - bytes_used;
88         }
89
90         static void term_destination_thunk(j_compress_ptr ptr)
91         {
92                 ((VectorDestinationManager *)(ptr->dest))->term_destination();
93         }
94
95         inline void term_destination()
96         {
97                 dest.resize(dest.size() - pub.free_in_buffer);
98         }
99 };
100 static_assert(std::is_standard_layout<VectorDestinationManager>::value, "");
101
102 int MJPEGEncoder::write_packet2_thunk(void *opaque, uint8_t *buf, int buf_size, AVIODataMarkerType type, int64_t time)
103 {
104         MJPEGEncoder *engine = (MJPEGEncoder *)opaque;
105         return engine->write_packet2(buf, buf_size, type, time);
106 }
107
108 int MJPEGEncoder::write_packet2(uint8_t *buf, int buf_size, AVIODataMarkerType type, int64_t time)
109 {
110         if (type == AVIO_DATA_MARKER_HEADER) {
111                 mux_header.append((char *)buf, buf_size);
112                 httpd->set_header(HTTPD::MULTICAM_STREAM, mux_header);
113         } else {
114                 httpd->add_data(HTTPD::MULTICAM_STREAM, (char *)buf, buf_size, /*keyframe=*/true, AV_NOPTS_VALUE, AVRational{ AV_TIME_BASE, 1 });
115         }
116         return buf_size;
117 }
118
119 MJPEGEncoder::MJPEGEncoder(HTTPD *httpd, const string &va_display)
120         : httpd(httpd)
121 {
122         encoder_thread = thread(&MJPEGEncoder::encoder_thread_func, this);
123
124         // Set up the mux. We don't use the Mux wrapper, because it's geared towards
125         // a situation with only one video stream (and possibly one audio stream)
126         // with known width/height, and we don't need the extra functionality it provides.
127         avctx.reset(avformat_alloc_context());
128         avctx->oformat = av_guess_format("mp4", nullptr, nullptr);
129
130         uint8_t *buf = (uint8_t *)av_malloc(MUX_BUFFER_SIZE);
131         avctx->pb = avio_alloc_context(buf, MUX_BUFFER_SIZE, 1, this, nullptr, nullptr, nullptr);
132         avctx->pb->write_data_type = &MJPEGEncoder::write_packet2_thunk;
133         avctx->flags = AVFMT_FLAG_CUSTOM_IO;
134
135         for (int card_idx = 0; card_idx < global_flags.num_cards; ++card_idx) {
136                 AVStream *stream = avformat_new_stream(avctx.get(), nullptr);
137                 if (stream == nullptr) {
138                         fprintf(stderr, "avformat_new_stream() failed\n");
139                         exit(1);
140                 }
141                 stream->time_base = AVRational{ 1, TIMEBASE };
142                 stream->codecpar->codec_type = AVMEDIA_TYPE_VIDEO;
143                 stream->codecpar->codec_id = AV_CODEC_ID_MJPEG;
144
145                 // Used for aspect ratio only. Can change without notice (the mux won't care).
146                 stream->codecpar->width = global_flags.width;
147                 stream->codecpar->height = global_flags.height;
148
149                 // TODO: We could perhaps use the interpretation for each card here
150                 // (or at least the command-line flags) instead of the defaults,
151                 // but what would we do when they change?
152                 stream->codecpar->color_primaries = AVCOL_PRI_BT709;
153                 stream->codecpar->color_trc = AVCOL_TRC_IEC61966_2_1;
154                 stream->codecpar->color_space = AVCOL_SPC_BT709;
155                 stream->codecpar->color_range = AVCOL_RANGE_MPEG;
156                 stream->codecpar->chroma_location = AVCHROMA_LOC_LEFT;
157                 stream->codecpar->field_order = AV_FIELD_PROGRESSIVE;
158         }
159
160         AVDictionary *options = NULL;
161         vector<pair<string, string>> opts = MUX_OPTS;
162         for (pair<string, string> opt : opts) {
163                 av_dict_set(&options, opt.first.c_str(), opt.second.c_str(), 0);
164         }
165         if (avformat_write_header(avctx.get(), &options) < 0) {
166                 fprintf(stderr, "avformat_write_header() failed\n");
167                 exit(1);
168         }
169
170         // Initialize VA-API.
171         string error;
172         va_dpy = try_open_va(va_display, &error, &config_id);
173         if (va_dpy == nullptr) {
174                 fprintf(stderr, "Could not initialize VA-API for MJPEG encoding: %s. JPEGs will be encoded in software if needed.\n", error.c_str());
175         }
176
177         running = true;
178 }
179
180 void MJPEGEncoder::stop()
181 {
182         if (!running) {
183                 return;
184         }
185         running = false;
186         should_quit = true;
187         any_frames_to_be_encoded.notify_all();
188         encoder_thread.join();
189 }
190
191 unique_ptr<VADisplayWithCleanup> MJPEGEncoder::try_open_va(const string &va_display, string *error, VAConfigID *config_id)
192 {
193         unique_ptr<VADisplayWithCleanup> va_dpy = va_open_display(va_display);
194         if (va_dpy == nullptr) {
195                 if (error) *error = "Opening VA display failed";
196                 return nullptr;
197         }
198         int major_ver, minor_ver;
199         VAStatus va_status = vaInitialize(va_dpy->va_dpy, &major_ver, &minor_ver);
200         if (va_status != VA_STATUS_SUCCESS) {
201                 char buf[256];
202                 snprintf(buf, sizeof(buf), "vaInitialize() failed with status %d\n", va_status);
203                 if (error != nullptr) *error = buf;
204                 return nullptr;
205         }
206
207         VAConfigAttrib attr = { VAConfigAttribRTFormat, VA_RT_FORMAT_YUV422 };
208         va_status = vaCreateConfig(va_dpy->va_dpy, VAProfileJPEGBaseline, VAEntrypointEncPicture,
209                 &attr, 1, config_id);
210         if (va_status == VA_STATUS_ERROR_UNSUPPORTED_ENTRYPOINT) {
211                 if (error != nullptr) *error = "No hardware support";
212                 return nullptr;
213         } else if (va_status != VA_STATUS_SUCCESS) {
214                 char buf[256];
215                 snprintf(buf, sizeof(buf), "vaCreateConfig() failed with status %d\n", va_status);
216                 if (error != nullptr) *error = buf;
217                 return nullptr;
218         }
219
220         int num_formats = vaMaxNumImageFormats(va_dpy->va_dpy);
221         assert(num_formats > 0);
222
223         unique_ptr<VAImageFormat[]> formats(new VAImageFormat[num_formats]);
224         va_status = vaQueryImageFormats(va_dpy->va_dpy, formats.get(), &num_formats);
225         if (va_status != VA_STATUS_SUCCESS) {
226                 char buf[256];
227                 snprintf(buf, sizeof(buf), "vaQueryImageFormats() failed with status %d\n", va_status);
228                 if (error != nullptr) *error = buf;
229                 return nullptr;
230         }
231
232         return va_dpy;
233 }
234
235 void MJPEGEncoder::upload_frame(int64_t pts, unsigned card_index, RefCountedFrame frame, const bmusb::VideoFormat &video_format, size_t y_offset, size_t cbcr_offset)
236 {
237         PBOFrameAllocator::Userdata *userdata = (PBOFrameAllocator::Userdata *)frame->userdata;
238         if (video_format.width == 0 || video_format.height == 0) {
239                 return;
240         }
241         if (video_format.interlaced) {
242                 fprintf(stderr, "Card %u: Ignoring JPEG encoding for interlaced frame\n", card_index);
243                 return;
244         }
245         if (userdata->pixel_format != PixelFormat_8BitYCbCr ||
246             !frame->interleaved) {
247                 fprintf(stderr, "Card %u: Ignoring JPEG encoding for unsupported pixel format\n", card_index);
248                 return;
249         }
250         if (video_format.width > 4096 || video_format.height > 4096) {
251                 fprintf(stderr, "Card %u: Ignoring JPEG encoding for oversized frame\n", card_index);
252                 return;
253         }
254
255         lock_guard<mutex> lock(mu);
256         frames_to_be_encoded.push(QueuedFrame{ pts, card_index, frame, video_format, y_offset, cbcr_offset });
257         any_frames_to_be_encoded.notify_all();
258 }
259
260 void MJPEGEncoder::encoder_thread_func()
261 {
262         pthread_setname_np(pthread_self(), "MJPEG_Encode");
263         posix_memalign((void **)&tmp_y, 4096, 4096 * 8);
264         posix_memalign((void **)&tmp_cbcr, 4096, 4096 * 8);
265         posix_memalign((void **)&tmp_cb, 4096, 4096 * 8);
266         posix_memalign((void **)&tmp_cr, 4096, 4096 * 8);
267
268         unique_lock<mutex> lock(mu);
269         for (;;) {
270                 any_frames_to_be_encoded.wait(lock, [this] { return !frames_to_be_encoded.empty() || should_quit; });
271                 if (should_quit) return;
272                 QueuedFrame qf = move(frames_to_be_encoded.front());
273                 frames_to_be_encoded.pop();
274
275                 vector<uint8_t> jpeg = encode_jpeg(qf);
276
277                 AVPacket pkt;
278                 memset(&pkt, 0, sizeof(pkt));
279                 pkt.buf = nullptr;
280                 pkt.data = &jpeg[0];
281                 pkt.size = jpeg.size();
282                 pkt.stream_index = qf.card_index;
283                 pkt.flags = AV_PKT_FLAG_KEY;
284                 pkt.pts = pkt.dts = qf.pts;
285
286                 if (av_write_frame(avctx.get(), &pkt) < 0) {
287                         fprintf(stderr, "av_write_frame() failed\n");
288                         exit(1);
289                 }
290         }
291 }
292
293 class VABufferDestroyer {
294 public:
295         VABufferDestroyer(VADisplay dpy, VABufferID buf)
296                 : dpy(dpy), buf(buf) {}
297
298         ~VABufferDestroyer() {
299                 VAStatus va_status = vaDestroyBuffer(dpy, buf);
300                 CHECK_VASTATUS(va_status, "vaDestroyBuffer");
301         }
302
303 private:
304         VADisplay dpy;
305         VABufferID buf;
306 };
307
308 MJPEGEncoder::VAResources MJPEGEncoder::get_va_resources(unsigned width, unsigned height)
309 {
310         {
311                 lock_guard<mutex> lock(va_resources_mutex);
312                 for (auto it = va_resources_freelist.begin(); it != va_resources_freelist.end(); ++it) {
313                         if (it->width == width && it->height == height) {
314                                 VAResources ret = *it;
315                                 va_resources_freelist.erase(it);
316                                 return ret;
317                         }
318                 }
319         }
320
321         VAResources ret;
322
323         ret.width = width;
324         ret.height = height;
325
326         VASurfaceAttrib attrib;
327         attrib.flags = VA_SURFACE_ATTRIB_SETTABLE;
328         attrib.type = VASurfaceAttribPixelFormat;
329         attrib.value.type = VAGenericValueTypeInteger;
330         attrib.value.value.i = VA_FOURCC_UYVY;
331
332         VAStatus va_status = vaCreateSurfaces(va_dpy->va_dpy, VA_RT_FORMAT_YUV422,
333                 width, height,
334                 &ret.surface, 1, &attrib, 1);
335         CHECK_VASTATUS(va_status, "vaCreateSurfaces");
336
337         va_status = vaCreateContext(va_dpy->va_dpy, config_id, width, height, 0, &ret.surface, 1, &ret.context);
338         CHECK_VASTATUS(va_status, "vaCreateContext");
339
340         va_status = vaCreateBuffer(va_dpy->va_dpy, config_id, VAEncCodedBufferType, width * height * 3 + 8192, 1, nullptr, &ret.data_buffer);
341         CHECK_VASTATUS(va_status, "vaCreateBuffer");
342
343         return ret;
344 }
345
346 void MJPEGEncoder::release_va_resources(MJPEGEncoder::VAResources resources)
347 {
348         lock_guard<mutex> lock(va_resources_mutex);
349         if (va_resources_freelist.size() > 10) {
350                 auto it = va_resources_freelist.end();
351                 --it;
352
353                 VAStatus va_status = vaDestroyBuffer(va_dpy->va_dpy, it->data_buffer);
354                 CHECK_VASTATUS(va_status, "vaDestroyBuffer");
355
356                 va_status = vaDestroyContext(va_dpy->va_dpy, it->context);
357                 CHECK_VASTATUS(va_status, "vaDestroyContext");
358
359                 va_status = vaDestroySurfaces(va_dpy->va_dpy, &it->surface, 1);
360                 CHECK_VASTATUS(va_status, "vaDestroySurfaces");
361
362                 va_resources_freelist.erase(it);
363         }
364
365         va_resources_freelist.push_front(resources);
366 }
367
368 void MJPEGEncoder::init_jpeg_422(unsigned width, unsigned height, VectorDestinationManager *dest, jpeg_compress_struct *cinfo)
369 {
370         jpeg_error_mgr jerr;
371         cinfo->err = jpeg_std_error(&jerr);
372         jpeg_create_compress(cinfo);
373
374         cinfo->dest = (jpeg_destination_mgr *)dest;
375
376         cinfo->input_components = 3;
377         jpeg_set_defaults(cinfo);
378         jpeg_set_quality(cinfo, quality, /*force_baseline=*/false);
379
380         cinfo->image_width = width;
381         cinfo->image_height = height;
382         cinfo->raw_data_in = true;
383         jpeg_set_colorspace(cinfo, JCS_YCbCr);
384         cinfo->comp_info[0].h_samp_factor = 2;
385         cinfo->comp_info[0].v_samp_factor = 1;
386         cinfo->comp_info[1].h_samp_factor = 1;
387         cinfo->comp_info[1].v_samp_factor = 1;
388         cinfo->comp_info[2].h_samp_factor = 1;
389         cinfo->comp_info[2].v_samp_factor = 1;
390         cinfo->CCIR601_sampling = true;  // Seems to be mostly ignored by libjpeg, though.
391         jpeg_start_compress(cinfo, true);
392 }
393
394 vector<uint8_t> MJPEGEncoder::get_jpeg_header(unsigned width, unsigned height, jpeg_compress_struct *cinfo)
395 {
396         VectorDestinationManager dest;
397         init_jpeg_422(width, height, &dest, cinfo);
398
399         // Make a dummy black image; there's seemingly no other easy way of
400         // making libjpeg outputting all of its headers.
401         JSAMPROW yptr[8], cbptr[8], crptr[8];
402         JSAMPARRAY data[3] = { yptr, cbptr, crptr };
403         memset(tmp_y, 0, 4096);
404         memset(tmp_cb, 0, 4096);
405         memset(tmp_cr, 0, 4096);
406         for (unsigned yy = 0; yy < 8; ++yy) {
407                 yptr[yy] = tmp_y;
408                 cbptr[yy] = tmp_cb;
409                 crptr[yy] = tmp_cr;
410         }
411         for (unsigned y = 0; y < height; y += 8) {
412                 jpeg_write_raw_data(cinfo, data, /*num_lines=*/8);
413         }
414         jpeg_finish_compress(cinfo);
415
416         // We're only interested in the header, not the data after it.
417         dest.term_destination();
418         for (size_t i = 0; i < dest.dest.size() - 1; ++i) {
419                 if (dest.dest[i] == 0xff && dest.dest[i + 1] == 0xda) {  // Start of scan (SOS).
420                         unsigned len = dest.dest[i + 2] * 256 + dest.dest[i + 3];
421                         dest.dest.resize(i + len + 2);
422                         break;
423                 }
424         }
425
426         return dest.dest;
427 }
428
429 MJPEGEncoder::VAData MJPEGEncoder::get_va_data_for_resolution(unsigned width, unsigned height)
430 {
431         pair<unsigned, unsigned> key(width, height);
432         if (va_data_for_resolution.count(key)) {
433                 return va_data_for_resolution[key];
434         }
435
436         // Use libjpeg to generate a header and set sane defaults for e.g.
437         // quantization tables. Then do the actual encode with VA-API.
438         jpeg_compress_struct cinfo;
439         vector<uint8_t> jpeg_header = get_jpeg_header(width, height, &cinfo);
440
441         // Picture parameters.
442         VAEncPictureParameterBufferJPEG pic_param;
443         memset(&pic_param, 0, sizeof(pic_param));
444         pic_param.reconstructed_picture = VA_INVALID_ID;
445         pic_param.picture_width = cinfo.image_width;
446         pic_param.picture_height = cinfo.image_height;
447         for (int component_idx = 0; component_idx < cinfo.num_components; ++component_idx) {
448                 const jpeg_component_info *comp = &cinfo.comp_info[component_idx];
449                 pic_param.component_id[component_idx] = comp->component_id;
450                 pic_param.quantiser_table_selector[component_idx] = comp->quant_tbl_no;
451         }
452         pic_param.num_components = cinfo.num_components;
453         pic_param.num_scan = 1;
454         pic_param.sample_bit_depth = 8;
455         pic_param.coded_buf = VA_INVALID_ID;  // To be filled out by caller.
456         pic_param.pic_flags.bits.huffman = 1;
457         pic_param.quality = 50;  // Don't scale the given quantization matrices. (See gen8_mfc_jpeg_fqm_state)
458
459         // Quantization matrices.
460         VAQMatrixBufferJPEG q;
461         memset(&q, 0, sizeof(q));
462
463         q.load_lum_quantiser_matrix = true;
464         q.load_chroma_quantiser_matrix = true;
465         for (int quant_tbl_idx = 0; quant_tbl_idx < min(4, NUM_QUANT_TBLS); ++quant_tbl_idx) {
466                 const JQUANT_TBL *qtbl = cinfo.quant_tbl_ptrs[quant_tbl_idx];
467                 assert((qtbl == nullptr) == (quant_tbl_idx >= 2));
468                 if (qtbl == nullptr) continue;
469
470                 uint8_t *qmatrix = (quant_tbl_idx == 0) ? q.lum_quantiser_matrix : q.chroma_quantiser_matrix;
471                 for (int i = 0; i < 64; ++i) {
472                         if (qtbl->quantval[i] > 255) {
473                                 fprintf(stderr, "Baseline JPEG only!\n");
474                                 abort();
475                         }
476                         qmatrix[i] = qtbl->quantval[jpeg_natural_order[i]];
477                 }
478         }
479
480         // Huffman tables (arithmetic is not supported).
481         VAHuffmanTableBufferJPEGBaseline huff;
482         memset(&huff, 0, sizeof(huff));
483
484         for (int huff_tbl_idx = 0; huff_tbl_idx < min(2, NUM_HUFF_TBLS); ++huff_tbl_idx) {
485                 const JHUFF_TBL *ac_hufftbl = cinfo.ac_huff_tbl_ptrs[huff_tbl_idx];
486                 const JHUFF_TBL *dc_hufftbl = cinfo.dc_huff_tbl_ptrs[huff_tbl_idx];
487                 if (ac_hufftbl == nullptr) {
488                         assert(dc_hufftbl == nullptr);
489                         huff.load_huffman_table[huff_tbl_idx] = 0;
490                 } else {
491                         assert(dc_hufftbl != nullptr);
492                         huff.load_huffman_table[huff_tbl_idx] = 1;
493
494                         for (int i = 0; i < 16; ++i) {
495                                 huff.huffman_table[huff_tbl_idx].num_dc_codes[i] = dc_hufftbl->bits[i + 1];
496                         }
497                         for (int i = 0; i < 12; ++i) {
498                                 huff.huffman_table[huff_tbl_idx].dc_values[i] = dc_hufftbl->huffval[i];
499                         }
500                         for (int i = 0; i < 16; ++i) {
501                                 huff.huffman_table[huff_tbl_idx].num_ac_codes[i] = ac_hufftbl->bits[i + 1];
502                         }
503                         for (int i = 0; i < 162; ++i) {
504                                 huff.huffman_table[huff_tbl_idx].ac_values[i] = ac_hufftbl->huffval[i];
505                         }
506                 }
507         }
508
509         // Slice parameters (metadata about the slice).
510         VAEncSliceParameterBufferJPEG parms;
511         memset(&parms, 0, sizeof(parms));
512         for (int component_idx = 0; component_idx < cinfo.num_components; ++component_idx) {
513                 const jpeg_component_info *comp = &cinfo.comp_info[component_idx];
514                 parms.components[component_idx].component_selector = comp->component_id;
515                 parms.components[component_idx].dc_table_selector = comp->dc_tbl_no;
516                 parms.components[component_idx].ac_table_selector = comp->ac_tbl_no;
517                 if (parms.components[component_idx].dc_table_selector > 1 ||
518                     parms.components[component_idx].ac_table_selector > 1) {
519                         fprintf(stderr, "Uses too many Huffman tables\n");
520                         abort();
521                 }
522         }
523         parms.num_components = cinfo.num_components;
524         parms.restart_interval = cinfo.restart_interval;
525
526         jpeg_destroy_compress(&cinfo);
527
528         VAData ret;
529         ret.jpeg_header = move(jpeg_header);
530         ret.pic_param = pic_param;
531         ret.q = q;
532         ret.huff = huff;
533         ret.parms = parms;
534         va_data_for_resolution[key] = ret;
535         return ret;
536 }
537
538 vector<uint8_t> MJPEGEncoder::encode_jpeg(const QueuedFrame &qf)
539 {
540         if (va_dpy != nullptr) {
541                 return encode_jpeg_va(qf);
542         } else {
543                 return encode_jpeg_libjpeg(qf);
544         }
545 }
546
547 vector<uint8_t> MJPEGEncoder::encode_jpeg_va(const QueuedFrame &qf)
548 {
549         unsigned width = qf.video_format.width;
550         unsigned height = qf.video_format.height;
551
552         VAResources resources = get_va_resources(width, height);
553         ReleaseVAResources release(this, resources);
554
555         VAData va_data = get_va_data_for_resolution(width, height);
556         va_data.pic_param.coded_buf = resources.data_buffer;
557
558         VABufferID pic_param_buffer;
559         VAStatus va_status = vaCreateBuffer(va_dpy->va_dpy, config_id, VAEncPictureParameterBufferType, sizeof(va_data.pic_param), 1, &va_data.pic_param, &pic_param_buffer);
560         CHECK_VASTATUS(va_status, "vaCreateBuffer");
561         VABufferDestroyer destroy_pic_param(va_dpy->va_dpy, pic_param_buffer);
562
563         VABufferID q_buffer;
564         va_status = vaCreateBuffer(va_dpy->va_dpy, config_id, VAQMatrixBufferType, sizeof(va_data.q), 1, &va_data.q, &q_buffer);
565         CHECK_VASTATUS(va_status, "vaCreateBuffer");
566         VABufferDestroyer destroy_iq(va_dpy->va_dpy, q_buffer);
567
568         VABufferID huff_buffer;
569         va_status = vaCreateBuffer(va_dpy->va_dpy, config_id, VAHuffmanTableBufferType, sizeof(va_data.huff), 1, &va_data.huff, &huff_buffer);
570         CHECK_VASTATUS(va_status, "vaCreateBuffer");
571         VABufferDestroyer destroy_huff(va_dpy->va_dpy, huff_buffer);
572
573         VABufferID slice_param_buffer;
574         va_status = vaCreateBuffer(va_dpy->va_dpy, config_id, VAEncSliceParameterBufferType, sizeof(va_data.parms), 1, &va_data.parms, &slice_param_buffer);
575         CHECK_VASTATUS(va_status, "vaCreateBuffer");
576         VABufferDestroyer destroy_slice_param(va_dpy->va_dpy, slice_param_buffer);
577
578         VAImage image;
579         va_status = vaDeriveImage(va_dpy->va_dpy, resources.surface, &image);
580         CHECK_VASTATUS(va_status, "vaDeriveImage");
581
582         // Upload the pixel data.
583         uint8_t *surface_p = nullptr;
584         vaMapBuffer(va_dpy->va_dpy, image.buf, (void **)&surface_p);
585
586         size_t field_start_line = qf.video_format.extra_lines_top;  // No interlacing support.
587         size_t field_start = qf.cbcr_offset * 2 + qf.video_format.width * field_start_line * 2;
588
589         {
590                 const uint8_t *src = qf.frame->data_copy + field_start;
591                 uint8_t *dst = (unsigned char *)surface_p + image.offsets[0];
592                 memcpy_with_pitch(dst, src, qf.video_format.width * 2, image.pitches[0], qf.video_format.height);
593         }
594
595         va_status = vaUnmapBuffer(va_dpy->va_dpy, image.buf);
596         CHECK_VASTATUS(va_status, "vaUnmapBuffer");
597         va_status = vaDestroyImage(va_dpy->va_dpy, image.image_id);
598         CHECK_VASTATUS(va_status, "vaDestroyImage");
599
600         // Finally, stick in the JPEG header.
601         VAEncPackedHeaderParameterBuffer header_parm;
602         header_parm.type = VAEncPackedHeaderRawData;
603         header_parm.bit_length = 8 * va_data.jpeg_header.size();
604
605         VABufferID header_parm_buffer;
606         va_status = vaCreateBuffer(va_dpy->va_dpy, config_id, VAEncPackedHeaderParameterBufferType, sizeof(header_parm), 1, &header_parm, &header_parm_buffer);
607         CHECK_VASTATUS(va_status, "vaCreateBuffer");
608         VABufferDestroyer destroy_header(va_dpy->va_dpy, header_parm_buffer);
609
610         VABufferID header_data_buffer;
611         va_status = vaCreateBuffer(va_dpy->va_dpy, config_id, VAEncPackedHeaderDataBufferType, va_data.jpeg_header.size(), 1, va_data.jpeg_header.data(), &header_data_buffer);
612         CHECK_VASTATUS(va_status, "vaCreateBuffer");
613         VABufferDestroyer destroy_header_data(va_dpy->va_dpy, header_data_buffer);
614
615         va_status = vaBeginPicture(va_dpy->va_dpy, resources.context, resources.surface);
616         CHECK_VASTATUS(va_status, "vaBeginPicture");
617         va_status = vaRenderPicture(va_dpy->va_dpy, resources.context, &pic_param_buffer, 1);
618         CHECK_VASTATUS(va_status, "vaRenderPicture(pic_param)");
619         va_status = vaRenderPicture(va_dpy->va_dpy, resources.context, &q_buffer, 1);
620         CHECK_VASTATUS(va_status, "vaRenderPicture(q)");
621         va_status = vaRenderPicture(va_dpy->va_dpy, resources.context, &huff_buffer, 1);
622         CHECK_VASTATUS(va_status, "vaRenderPicture(huff)");
623         va_status = vaRenderPicture(va_dpy->va_dpy, resources.context, &slice_param_buffer, 1);
624         CHECK_VASTATUS(va_status, "vaRenderPicture(slice_param)");
625         va_status = vaRenderPicture(va_dpy->va_dpy, resources.context, &header_parm_buffer, 1);
626         CHECK_VASTATUS(va_status, "vaRenderPicture(header_parm)");
627         va_status = vaRenderPicture(va_dpy->va_dpy, resources.context, &header_data_buffer, 1);
628         CHECK_VASTATUS(va_status, "vaRenderPicture(header_data)");
629         va_status = vaEndPicture(va_dpy->va_dpy, resources.context);
630         CHECK_VASTATUS(va_status, "vaEndPicture");
631
632         va_status = vaSyncSurface(va_dpy->va_dpy, resources.surface);
633         CHECK_VASTATUS(va_status, "vaSyncSurface");
634
635         VACodedBufferSegment *segment;
636         va_status = vaMapBuffer(va_dpy->va_dpy, resources.data_buffer, (void **)&segment);
637         CHECK_VASTATUS(va_status, "vaMapBuffer");
638
639         const char *coded_buf = reinterpret_cast<char *>(segment->buf);
640         vector<uint8_t> jpeg(coded_buf, coded_buf + segment->size);
641
642         va_status = vaUnmapBuffer(va_dpy->va_dpy, resources.data_buffer);
643         CHECK_VASTATUS(va_status, "vaUnmapBuffer");
644
645         return jpeg;
646 }
647
648 vector<uint8_t> MJPEGEncoder::encode_jpeg_libjpeg(const QueuedFrame &qf)
649 {
650         unsigned width = qf.video_format.width;
651         unsigned height = qf.video_format.height;
652
653         VectorDestinationManager dest;
654         jpeg_compress_struct cinfo;
655         init_jpeg_422(width, height, &dest, &cinfo);
656
657         size_t field_start_line = qf.video_format.extra_lines_top;  // No interlacing support.
658         size_t field_start = qf.cbcr_offset * 2 + qf.video_format.width * field_start_line * 2;
659
660         JSAMPROW yptr[8], cbptr[8], crptr[8];
661         JSAMPARRAY data[3] = { yptr, cbptr, crptr };
662         for (unsigned y = 0; y < qf.video_format.height; y += 8) {
663                 const uint8_t *src = qf.frame->data_copy + field_start + y * qf.video_format.width * 2;
664
665                 memcpy_interleaved(tmp_y, tmp_cbcr, src, qf.video_format.width * 8 * 2);
666                 memcpy_interleaved(tmp_cb, tmp_cr, tmp_cbcr, qf.video_format.width * 8);
667                 for (unsigned yy = 0; yy < 8; ++yy) {
668                         yptr[yy] = tmp_y + yy * width;
669                         cbptr[yy] = tmp_cb + yy * width / 2;
670                         crptr[yy] = tmp_cr + yy * width / 2;
671                 }
672                 jpeg_write_raw_data(&cinfo, data, /*num_lines=*/8);
673         }
674         jpeg_finish_compress(&cinfo);
675
676         return dest.dest;
677 }