]> git.sesse.net Git - nageru/blob - h264encode.cpp
Add support for uncompressed video instead of H.264 (while still storing H.264 to...
[nageru] / h264encode.cpp
1 //#include "sysdeps.h"
2 #include "h264encode.h"
3
4 #include <movit/util.h>
5 #include <EGL/eglplatform.h>
6 #include <X11/X.h>
7 #include <X11/Xlib.h>
8 #include <assert.h>
9 #include <epoxy/egl.h>
10 extern "C" {
11 #include <libavcodec/avcodec.h>
12 #include <libavformat/avformat.h>
13 #include <libavutil/channel_layout.h>
14 #include <libavutil/frame.h>
15 #include <libavutil/rational.h>
16 #include <libavutil/samplefmt.h>
17 }
18 #include <libdrm/drm_fourcc.h>
19 #include <stdio.h>
20 #include <stdlib.h>
21 #include <string.h>
22 #include <fcntl.h>
23 #include <va/va.h>
24 #include <va/va_drm.h>
25 #include <va/va_drmcommon.h>
26 #include <va/va_enc_h264.h>
27 #include <va/va_x11.h>
28 #include <algorithm>
29 #include <condition_variable>
30 #include <cstdint>
31 #include <map>
32 #include <memory>
33 #include <mutex>
34 #include <queue>
35 #include <string>
36 #include <thread>
37 #include <utility>
38
39 #include "context.h"
40 #include "defs.h"
41 #include "flags.h"
42 #include "httpd.h"
43 #include "timebase.h"
44
45 using namespace std;
46
47 class QOpenGLContext;
48 class QSurface;
49
50 #define CHECK_VASTATUS(va_status, func)                                 \
51     if (va_status != VA_STATUS_SUCCESS) {                               \
52         fprintf(stderr, "%s:%d (%s) failed with %d\n", __func__, __LINE__, func, va_status); \
53         exit(1);                                                        \
54     }
55
56 #define BUFFER_OFFSET(i) ((char *)NULL + (i))
57
58 //#include "loadsurface.h"
59
60 #define NAL_REF_IDC_NONE        0
61 #define NAL_REF_IDC_LOW         1
62 #define NAL_REF_IDC_MEDIUM      2
63 #define NAL_REF_IDC_HIGH        3
64
65 #define NAL_NON_IDR             1
66 #define NAL_IDR                 5
67 #define NAL_SPS                 7
68 #define NAL_PPS                 8
69 #define NAL_SEI                 6
70
71 #define SLICE_TYPE_P            0
72 #define SLICE_TYPE_B            1
73 #define SLICE_TYPE_I            2
74 #define IS_P_SLICE(type) (SLICE_TYPE_P == (type))
75 #define IS_B_SLICE(type) (SLICE_TYPE_B == (type))
76 #define IS_I_SLICE(type) (SLICE_TYPE_I == (type))
77
78
79 #define ENTROPY_MODE_CAVLC      0
80 #define ENTROPY_MODE_CABAC      1
81
82 #define PROFILE_IDC_BASELINE    66
83 #define PROFILE_IDC_MAIN        77
84 #define PROFILE_IDC_HIGH        100
85    
86 #define BITSTREAM_ALLOCATE_STEPPING     4096
87 #define SURFACE_NUM 16 /* 16 surfaces for source YUV */
88 #define MAX_NUM_REF1 16 // Seemingly a hardware-fixed value, not related to SURFACE_NUM
89 #define MAX_NUM_REF2 32 // Seemingly a hardware-fixed value, not related to SURFACE_NUM
90
91 static constexpr unsigned int MaxFrameNum = (2<<16);
92 static constexpr unsigned int MaxPicOrderCntLsb = (2<<8);
93 static constexpr unsigned int Log2MaxFrameNum = 16;
94 static constexpr unsigned int Log2MaxPicOrderCntLsb = 8;
95 static constexpr int rc_default_modes[] = {  // Priority list of modes.
96     VA_RC_VBR,
97     VA_RC_CQP,
98     VA_RC_VBR_CONSTRAINED,
99     VA_RC_CBR,
100     VA_RC_VCM,
101     VA_RC_NONE,
102 };
103
104 /* thread to save coded data */
105 #define SRC_SURFACE_FREE        0
106 #define SRC_SURFACE_IN_ENCODING 1
107     
108 struct __bitstream {
109     unsigned int *buffer;
110     int bit_offset;
111     int max_size_in_dword;
112 };
113 typedef struct __bitstream bitstream;
114
115 using namespace std;
116
117 // H.264 video comes out in encoding order (e.g. with two B-frames:
118 // 0, 3, 1, 2, 6, 4, 5, etc.), but uncompressed video needs to
119 // come in the right order. Since we do everything, including waiting
120 // for the frames to come out of OpenGL, in encoding order, we need
121 // a reordering buffer for uncompressed frames so that they come out
122 // correctly. We go the super-lazy way of not making it understand
123 // anything about the true order (which introduces some extra latency,
124 // though); we know that for N B-frames we need at most (N-1) frames
125 // in the reorder buffer, and can just sort on that.
126 //
127 // The class also deals with keeping a freelist as needed.
128 class FrameReorderer {
129 public:
130         FrameReorderer(unsigned queue_length, int width, int height);
131
132         // Returns the next frame to insert with its pts, if any. Otherwise -1 and nullptr.
133         // Does _not_ take ownership of data; a copy is taken if needed.
134         // The returned pointer is valid until the next call to reorder_frame, or destruction.
135         // As a special case, if queue_length == 0, will just return pts and data (no reordering needed).
136         pair<int64_t, const uint8_t *> reorder_frame(int64_t pts, const uint8_t *data);
137
138         // The same as reorder_frame, but without inserting anything. Used to empty the queue.
139         pair<int64_t, const uint8_t *> get_first_frame();
140
141         bool empty() const { return frames.empty(); }
142
143 private:
144         unsigned queue_length;
145         int width, height;
146
147         priority_queue<pair<int64_t, uint8_t *>> frames;
148         stack<uint8_t *> freelist;  // Includes the last value returned from reorder_frame.
149
150         // Owns all the pointers. Normally, freelist and frames could do this themselves,
151         // except priority_queue doesn't work well with movable-only types.
152         vector<unique_ptr<uint8_t[]>> owner;
153 };
154
155 FrameReorderer::FrameReorderer(unsigned queue_length, int width, int height)
156     : queue_length(queue_length), width(width), height(height)
157 {
158         for (unsigned i = 0; i < queue_length; ++i) {
159                 owner.emplace_back(new uint8_t[width * height * 2]);
160                 freelist.push(owner.back().get());
161         }
162 }
163
164 pair<int64_t, const uint8_t *> FrameReorderer::reorder_frame(int64_t pts, const uint8_t *data)
165 {
166         if (queue_length == 0) {
167                 return make_pair(pts, data);
168         }
169
170         assert(!freelist.empty());
171         uint8_t *storage = freelist.top();
172         freelist.pop();
173         memcpy(storage, data, width * height * 2);
174         frames.emplace(-pts, storage);  // Invert pts to get smallest first.
175
176         if (frames.size() >= queue_length) {
177                 return get_first_frame();
178         } else {
179                 return make_pair(-1, nullptr);
180         }
181 }
182
183 pair<int64_t, const uint8_t *> FrameReorderer::get_first_frame()
184 {
185         assert(!frames.empty());
186         pair<int64_t, uint8_t *> storage = frames.top();
187         frames.pop();
188         int64_t pts = storage.first;
189         freelist.push(storage.second);
190         return make_pair(-pts, storage.second);  // Re-invert pts (see reorder_frame()).
191 }
192
193 class H264EncoderImpl {
194 public:
195         H264EncoderImpl(QSurface *surface, const string &va_display, int width, int height, HTTPD *httpd);
196         ~H264EncoderImpl();
197         void add_audio(int64_t pts, vector<float> audio);
198         bool begin_frame(GLuint *y_tex, GLuint *cbcr_tex);
199         void end_frame(RefCountedGLsync fence, int64_t pts, const vector<RefCountedFrame> &input_frames);
200         void shutdown();
201
202 private:
203         struct storage_task {
204                 unsigned long long display_order;
205                 int frame_type;
206                 vector<float> audio;
207                 int64_t pts, dts;
208         };
209         struct PendingFrame {
210                 RefCountedGLsync fence;
211                 vector<RefCountedFrame> input_frames;
212                 int64_t pts;
213         };
214
215         void encode_thread_func();
216         void encode_remaining_frames_as_p(int encoding_frame_num, int gop_start_display_frame_num, int64_t last_dts);
217         void add_packet_for_uncompressed_frame(int64_t pts, const uint8_t *data);
218         void encode_frame(PendingFrame frame, int encoding_frame_num, int display_frame_num, int gop_start_display_frame_num,
219                           int frame_type, int64_t pts, int64_t dts);
220         void storage_task_thread();
221         void storage_task_enqueue(storage_task task);
222         void save_codeddata(storage_task task);
223         int render_packedsequence();
224         int render_packedpicture();
225         void render_packedslice();
226         int render_sequence();
227         int render_picture(int frame_type, int display_frame_num, int gop_start_display_frame_num);
228         void sps_rbsp(bitstream *bs);
229         void pps_rbsp(bitstream *bs);
230         int build_packed_pic_buffer(unsigned char **header_buffer);
231         int render_slice(int encoding_frame_num, int display_frame_num, int gop_start_display_frame_num, int frame_type);
232         void slice_header(bitstream *bs);
233         int build_packed_seq_buffer(unsigned char **header_buffer);
234         int build_packed_slice_buffer(unsigned char **header_buffer);
235         int init_va(const string &va_display);
236         int deinit_va();
237         VADisplay va_open_display(const string &va_display);
238         void va_close_display(VADisplay va_dpy);
239         int setup_encode();
240         int release_encode();
241         void update_ReferenceFrames(int frame_type);
242         int update_RefPicList(int frame_type);
243
244         bool is_shutdown = false;
245         bool use_zerocopy;
246         int drm_fd = -1;
247
248         thread encode_thread, storage_thread;
249
250         mutex storage_task_queue_mutex;
251         condition_variable storage_task_queue_changed;
252         int srcsurface_status[SURFACE_NUM];  // protected by storage_task_queue_mutex
253         queue<storage_task> storage_task_queue;  // protected by storage_task_queue_mutex
254         bool storage_thread_should_quit = false;  // protected by storage_task_queue_mutex
255
256         mutex frame_queue_mutex;
257         condition_variable frame_queue_nonempty;
258         bool encode_thread_should_quit = false;  // under frame_queue_mutex
259
260         int current_storage_frame;
261
262         map<int, PendingFrame> pending_video_frames;  // under frame_queue_mutex
263         map<int64_t, vector<float>> pending_audio_frames;  // under frame_queue_mutex
264         QSurface *surface;
265
266         AVCodecContext *context_audio;
267         AVFrame *audio_frame = nullptr;
268         HTTPD *httpd;
269         unique_ptr<FrameReorderer> reorderer;
270
271         Display *x11_display = nullptr;
272
273         // Encoder parameters
274         VADisplay va_dpy;
275         VAProfile h264_profile = (VAProfile)~0;
276         VAConfigAttrib config_attrib[VAConfigAttribTypeMax];
277         int config_attrib_num = 0, enc_packed_header_idx;
278
279         struct GLSurface {
280                 VASurfaceID src_surface, ref_surface;
281                 VABufferID coded_buf;
282
283                 VAImage surface_image;
284                 GLuint y_tex, cbcr_tex;
285
286                 // Only if use_zerocopy == true.
287                 EGLImage y_egl_image, cbcr_egl_image;
288
289                 // Only if use_zerocopy == false.
290                 GLuint pbo;
291                 uint8_t *y_ptr, *cbcr_ptr;
292                 size_t y_offset, cbcr_offset;
293         };
294         GLSurface gl_surfaces[SURFACE_NUM];
295
296         VAConfigID config_id;
297         VAContextID context_id;
298         VAEncSequenceParameterBufferH264 seq_param;
299         VAEncPictureParameterBufferH264 pic_param;
300         VAEncSliceParameterBufferH264 slice_param;
301         VAPictureH264 CurrentCurrPic;
302         VAPictureH264 ReferenceFrames[MAX_NUM_REF1], RefPicList0_P[MAX_NUM_REF2], RefPicList0_B[MAX_NUM_REF2], RefPicList1_B[MAX_NUM_REF2];
303
304         // Static quality settings.
305         static constexpr unsigned int frame_bitrate = 15000000 / 60;  // Doesn't really matter; only initial_qp does.
306         static constexpr unsigned int num_ref_frames = 2;
307         static constexpr int initial_qp = 15;
308         static constexpr int minimal_qp = 0;
309         static constexpr int intra_period = 30;
310         static constexpr int intra_idr_period = MAX_FPS;  // About a second; more at lower frame rates. Not ideal.
311
312         // Quality settings that are meant to be static, but might be overridden
313         // by the profile.
314         int constraint_set_flag = 0;
315         int h264_packedheader = 0; /* support pack header? */
316         int h264_maxref = (1<<16|1);
317         int h264_entropy_mode = 1; /* cabac */
318         int ip_period = 3;
319
320         int rc_mode = -1;
321         unsigned int current_frame_num = 0;
322         unsigned int numShortTerm = 0;
323
324         int frame_width;
325         int frame_height;
326         int frame_width_mbaligned;
327         int frame_height_mbaligned;
328 };
329
330 // Supposedly vaRenderPicture() is supposed to destroy the buffer implicitly,
331 // but if we don't delete it here, we get leaks. The GStreamer implementation
332 // does the same.
333 static void render_picture_and_delete(VADisplay dpy, VAContextID context, VABufferID *buffers, int num_buffers)
334 {
335     VAStatus va_status = vaRenderPicture(dpy, context, buffers, num_buffers);
336     CHECK_VASTATUS(va_status, "vaRenderPicture");
337
338     for (int i = 0; i < num_buffers; ++i) {
339         va_status = vaDestroyBuffer(dpy, buffers[i]);
340         CHECK_VASTATUS(va_status, "vaDestroyBuffer");
341     }
342 }
343
344 static unsigned int 
345 va_swap32(unsigned int val)
346 {
347     unsigned char *pval = (unsigned char *)&val;
348
349     return ((pval[0] << 24)     |
350             (pval[1] << 16)     |
351             (pval[2] << 8)      |
352             (pval[3] << 0));
353 }
354
355 static void
356 bitstream_start(bitstream *bs)
357 {
358     bs->max_size_in_dword = BITSTREAM_ALLOCATE_STEPPING;
359     bs->buffer = (unsigned int *)calloc(bs->max_size_in_dword * sizeof(int), 1);
360     bs->bit_offset = 0;
361 }
362
363 static void
364 bitstream_end(bitstream *bs)
365 {
366     int pos = (bs->bit_offset >> 5);
367     int bit_offset = (bs->bit_offset & 0x1f);
368     int bit_left = 32 - bit_offset;
369
370     if (bit_offset) {
371         bs->buffer[pos] = va_swap32((bs->buffer[pos] << bit_left));
372     }
373 }
374  
375 static void
376 bitstream_put_ui(bitstream *bs, unsigned int val, int size_in_bits)
377 {
378     int pos = (bs->bit_offset >> 5);
379     int bit_offset = (bs->bit_offset & 0x1f);
380     int bit_left = 32 - bit_offset;
381
382     if (!size_in_bits)
383         return;
384
385     bs->bit_offset += size_in_bits;
386
387     if (bit_left > size_in_bits) {
388         bs->buffer[pos] = (bs->buffer[pos] << size_in_bits | val);
389     } else {
390         size_in_bits -= bit_left;
391         if (bit_left >= 32) {
392             bs->buffer[pos] = (val >> size_in_bits);
393         } else {
394             bs->buffer[pos] = (bs->buffer[pos] << bit_left) | (val >> size_in_bits);
395         }
396         bs->buffer[pos] = va_swap32(bs->buffer[pos]);
397
398         if (pos + 1 == bs->max_size_in_dword) {
399             bs->max_size_in_dword += BITSTREAM_ALLOCATE_STEPPING;
400             bs->buffer = (unsigned int *)realloc(bs->buffer, bs->max_size_in_dword * sizeof(unsigned int));
401         }
402
403         bs->buffer[pos + 1] = val;
404     }
405 }
406
407 static void
408 bitstream_put_ue(bitstream *bs, unsigned int val)
409 {
410     int size_in_bits = 0;
411     int tmp_val = ++val;
412
413     while (tmp_val) {
414         tmp_val >>= 1;
415         size_in_bits++;
416     }
417
418     bitstream_put_ui(bs, 0, size_in_bits - 1); // leading zero
419     bitstream_put_ui(bs, val, size_in_bits);
420 }
421
422 static void
423 bitstream_put_se(bitstream *bs, int val)
424 {
425     unsigned int new_val;
426
427     if (val <= 0)
428         new_val = -2 * val;
429     else
430         new_val = 2 * val - 1;
431
432     bitstream_put_ue(bs, new_val);
433 }
434
435 static void
436 bitstream_byte_aligning(bitstream *bs, int bit)
437 {
438     int bit_offset = (bs->bit_offset & 0x7);
439     int bit_left = 8 - bit_offset;
440     int new_val;
441
442     if (!bit_offset)
443         return;
444
445     assert(bit == 0 || bit == 1);
446
447     if (bit)
448         new_val = (1 << bit_left) - 1;
449     else
450         new_val = 0;
451
452     bitstream_put_ui(bs, new_val, bit_left);
453 }
454
455 static void 
456 rbsp_trailing_bits(bitstream *bs)
457 {
458     bitstream_put_ui(bs, 1, 1);
459     bitstream_byte_aligning(bs, 0);
460 }
461
462 static void nal_start_code_prefix(bitstream *bs)
463 {
464     bitstream_put_ui(bs, 0x00000001, 32);
465 }
466
467 static void nal_header(bitstream *bs, int nal_ref_idc, int nal_unit_type)
468 {
469     bitstream_put_ui(bs, 0, 1);                /* forbidden_zero_bit: 0 */
470     bitstream_put_ui(bs, nal_ref_idc, 2);
471     bitstream_put_ui(bs, nal_unit_type, 5);
472 }
473
474 void H264EncoderImpl::sps_rbsp(bitstream *bs)
475 {
476     int profile_idc = PROFILE_IDC_BASELINE;
477
478     if (h264_profile  == VAProfileH264High)
479         profile_idc = PROFILE_IDC_HIGH;
480     else if (h264_profile  == VAProfileH264Main)
481         profile_idc = PROFILE_IDC_MAIN;
482
483     bitstream_put_ui(bs, profile_idc, 8);               /* profile_idc */
484     bitstream_put_ui(bs, !!(constraint_set_flag & 1), 1);                         /* constraint_set0_flag */
485     bitstream_put_ui(bs, !!(constraint_set_flag & 2), 1);                         /* constraint_set1_flag */
486     bitstream_put_ui(bs, !!(constraint_set_flag & 4), 1);                         /* constraint_set2_flag */
487     bitstream_put_ui(bs, !!(constraint_set_flag & 8), 1);                         /* constraint_set3_flag */
488     bitstream_put_ui(bs, 0, 4);                         /* reserved_zero_4bits */
489     bitstream_put_ui(bs, seq_param.level_idc, 8);      /* level_idc */
490     bitstream_put_ue(bs, seq_param.seq_parameter_set_id);      /* seq_parameter_set_id */
491
492     if ( profile_idc == PROFILE_IDC_HIGH) {
493         bitstream_put_ue(bs, 1);        /* chroma_format_idc = 1, 4:2:0 */ 
494         bitstream_put_ue(bs, 0);        /* bit_depth_luma_minus8 */
495         bitstream_put_ue(bs, 0);        /* bit_depth_chroma_minus8 */
496         bitstream_put_ui(bs, 0, 1);     /* qpprime_y_zero_transform_bypass_flag */
497         bitstream_put_ui(bs, 0, 1);     /* seq_scaling_matrix_present_flag */
498     }
499
500     bitstream_put_ue(bs, seq_param.seq_fields.bits.log2_max_frame_num_minus4); /* log2_max_frame_num_minus4 */
501     bitstream_put_ue(bs, seq_param.seq_fields.bits.pic_order_cnt_type);        /* pic_order_cnt_type */
502
503     if (seq_param.seq_fields.bits.pic_order_cnt_type == 0)
504         bitstream_put_ue(bs, seq_param.seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4);     /* log2_max_pic_order_cnt_lsb_minus4 */
505     else {
506         assert(0);
507     }
508
509     bitstream_put_ue(bs, seq_param.max_num_ref_frames);        /* num_ref_frames */
510     bitstream_put_ui(bs, 0, 1);                                 /* gaps_in_frame_num_value_allowed_flag */
511
512     bitstream_put_ue(bs, seq_param.picture_width_in_mbs - 1);  /* pic_width_in_mbs_minus1 */
513     bitstream_put_ue(bs, seq_param.picture_height_in_mbs - 1); /* pic_height_in_map_units_minus1 */
514     bitstream_put_ui(bs, seq_param.seq_fields.bits.frame_mbs_only_flag, 1);    /* frame_mbs_only_flag */
515
516     if (!seq_param.seq_fields.bits.frame_mbs_only_flag) {
517         assert(0);
518     }
519
520     bitstream_put_ui(bs, seq_param.seq_fields.bits.direct_8x8_inference_flag, 1);      /* direct_8x8_inference_flag */
521     bitstream_put_ui(bs, seq_param.frame_cropping_flag, 1);            /* frame_cropping_flag */
522
523     if (seq_param.frame_cropping_flag) {
524         bitstream_put_ue(bs, seq_param.frame_crop_left_offset);        /* frame_crop_left_offset */
525         bitstream_put_ue(bs, seq_param.frame_crop_right_offset);       /* frame_crop_right_offset */
526         bitstream_put_ue(bs, seq_param.frame_crop_top_offset);         /* frame_crop_top_offset */
527         bitstream_put_ue(bs, seq_param.frame_crop_bottom_offset);      /* frame_crop_bottom_offset */
528     }
529     
530     //if ( frame_bit_rate < 0 ) { //TODO EW: the vui header isn't correct
531     if ( false ) {
532         bitstream_put_ui(bs, 0, 1); /* vui_parameters_present_flag */
533     } else {
534         bitstream_put_ui(bs, 1, 1); /* vui_parameters_present_flag */
535         bitstream_put_ui(bs, 0, 1); /* aspect_ratio_info_present_flag */
536         bitstream_put_ui(bs, 0, 1); /* overscan_info_present_flag */
537         bitstream_put_ui(bs, 1, 1); /* video_signal_type_present_flag */
538         {
539             bitstream_put_ui(bs, 5, 3);  /* video_format (5 = Unspecified) */
540             bitstream_put_ui(bs, 0, 1);  /* video_full_range_flag */
541             bitstream_put_ui(bs, 1, 1);  /* colour_description_present_flag */
542             {
543                 bitstream_put_ui(bs, 1, 8);  /* colour_primaries (1 = BT.709) */
544                 bitstream_put_ui(bs, 2, 8);  /* transfer_characteristics (2 = unspecified, since we use sRGB) */
545                 bitstream_put_ui(bs, 6, 8);  /* matrix_coefficients (6 = BT.601/SMPTE 170M) */
546             }
547         }
548         bitstream_put_ui(bs, 0, 1); /* chroma_loc_info_present_flag */
549         bitstream_put_ui(bs, 1, 1); /* timing_info_present_flag */
550         {
551             bitstream_put_ui(bs, 1, 32);  // FPS
552             bitstream_put_ui(bs, TIMEBASE * 2, 32);  // FPS
553             bitstream_put_ui(bs, 1, 1);
554         }
555         bitstream_put_ui(bs, 1, 1); /* nal_hrd_parameters_present_flag */
556         {
557             // hrd_parameters 
558             bitstream_put_ue(bs, 0);    /* cpb_cnt_minus1 */
559             bitstream_put_ui(bs, 4, 4); /* bit_rate_scale */
560             bitstream_put_ui(bs, 6, 4); /* cpb_size_scale */
561            
562             bitstream_put_ue(bs, frame_bitrate - 1); /* bit_rate_value_minus1[0] */
563             bitstream_put_ue(bs, frame_bitrate*8 - 1); /* cpb_size_value_minus1[0] */
564             bitstream_put_ui(bs, 1, 1);  /* cbr_flag[0] */
565
566             bitstream_put_ui(bs, 23, 5);   /* initial_cpb_removal_delay_length_minus1 */
567             bitstream_put_ui(bs, 23, 5);   /* cpb_removal_delay_length_minus1 */
568             bitstream_put_ui(bs, 23, 5);   /* dpb_output_delay_length_minus1 */
569             bitstream_put_ui(bs, 23, 5);   /* time_offset_length  */
570         }
571         bitstream_put_ui(bs, 0, 1);   /* vcl_hrd_parameters_present_flag */
572         bitstream_put_ui(bs, 0, 1);   /* low_delay_hrd_flag */ 
573
574         bitstream_put_ui(bs, 0, 1); /* pic_struct_present_flag */
575         bitstream_put_ui(bs, 0, 1); /* bitstream_restriction_flag */
576     }
577
578     rbsp_trailing_bits(bs);     /* rbsp_trailing_bits */
579 }
580
581
582 void H264EncoderImpl::pps_rbsp(bitstream *bs)
583 {
584     bitstream_put_ue(bs, pic_param.pic_parameter_set_id);      /* pic_parameter_set_id */
585     bitstream_put_ue(bs, pic_param.seq_parameter_set_id);      /* seq_parameter_set_id */
586
587     bitstream_put_ui(bs, pic_param.pic_fields.bits.entropy_coding_mode_flag, 1);  /* entropy_coding_mode_flag */
588
589     bitstream_put_ui(bs, 0, 1);                         /* pic_order_present_flag: 0 */
590
591     bitstream_put_ue(bs, 0);                            /* num_slice_groups_minus1 */
592
593     bitstream_put_ue(bs, pic_param.num_ref_idx_l0_active_minus1);      /* num_ref_idx_l0_active_minus1 */
594     bitstream_put_ue(bs, pic_param.num_ref_idx_l1_active_minus1);      /* num_ref_idx_l1_active_minus1 1 */
595
596     bitstream_put_ui(bs, pic_param.pic_fields.bits.weighted_pred_flag, 1);     /* weighted_pred_flag: 0 */
597     bitstream_put_ui(bs, pic_param.pic_fields.bits.weighted_bipred_idc, 2);     /* weighted_bipred_idc: 0 */
598
599     bitstream_put_se(bs, pic_param.pic_init_qp - 26);  /* pic_init_qp_minus26 */
600     bitstream_put_se(bs, 0);                            /* pic_init_qs_minus26 */
601     bitstream_put_se(bs, 0);                            /* chroma_qp_index_offset */
602
603     bitstream_put_ui(bs, pic_param.pic_fields.bits.deblocking_filter_control_present_flag, 1); /* deblocking_filter_control_present_flag */
604     bitstream_put_ui(bs, 0, 1);                         /* constrained_intra_pred_flag */
605     bitstream_put_ui(bs, 0, 1);                         /* redundant_pic_cnt_present_flag */
606     
607     /* more_rbsp_data */
608     bitstream_put_ui(bs, pic_param.pic_fields.bits.transform_8x8_mode_flag, 1);    /*transform_8x8_mode_flag */
609     bitstream_put_ui(bs, 0, 1);                         /* pic_scaling_matrix_present_flag */
610     bitstream_put_se(bs, pic_param.second_chroma_qp_index_offset );    /*second_chroma_qp_index_offset */
611
612     rbsp_trailing_bits(bs);
613 }
614
615 void H264EncoderImpl::slice_header(bitstream *bs)
616 {
617     int first_mb_in_slice = slice_param.macroblock_address;
618
619     bitstream_put_ue(bs, first_mb_in_slice);        /* first_mb_in_slice: 0 */
620     bitstream_put_ue(bs, slice_param.slice_type);   /* slice_type */
621     bitstream_put_ue(bs, slice_param.pic_parameter_set_id);        /* pic_parameter_set_id: 0 */
622     bitstream_put_ui(bs, pic_param.frame_num, seq_param.seq_fields.bits.log2_max_frame_num_minus4 + 4); /* frame_num */
623
624     /* frame_mbs_only_flag == 1 */
625     if (!seq_param.seq_fields.bits.frame_mbs_only_flag) {
626         /* FIXME: */
627         assert(0);
628     }
629
630     if (pic_param.pic_fields.bits.idr_pic_flag)
631         bitstream_put_ue(bs, slice_param.idr_pic_id);           /* idr_pic_id: 0 */
632
633     if (seq_param.seq_fields.bits.pic_order_cnt_type == 0) {
634         bitstream_put_ui(bs, pic_param.CurrPic.TopFieldOrderCnt, seq_param.seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4 + 4);
635         /* pic_order_present_flag == 0 */
636     } else {
637         /* FIXME: */
638         assert(0);
639     }
640
641     /* redundant_pic_cnt_present_flag == 0 */
642     /* slice type */
643     if (IS_P_SLICE(slice_param.slice_type)) {
644         bitstream_put_ui(bs, slice_param.num_ref_idx_active_override_flag, 1);            /* num_ref_idx_active_override_flag: */
645
646         if (slice_param.num_ref_idx_active_override_flag)
647             bitstream_put_ue(bs, slice_param.num_ref_idx_l0_active_minus1);
648
649         /* ref_pic_list_reordering */
650         bitstream_put_ui(bs, 0, 1);            /* ref_pic_list_reordering_flag_l0: 0 */
651     } else if (IS_B_SLICE(slice_param.slice_type)) {
652         bitstream_put_ui(bs, slice_param.direct_spatial_mv_pred_flag, 1);            /* direct_spatial_mv_pred: 1 */
653
654         bitstream_put_ui(bs, slice_param.num_ref_idx_active_override_flag, 1);       /* num_ref_idx_active_override_flag: */
655
656         if (slice_param.num_ref_idx_active_override_flag) {
657             bitstream_put_ue(bs, slice_param.num_ref_idx_l0_active_minus1);
658             bitstream_put_ue(bs, slice_param.num_ref_idx_l1_active_minus1);
659         }
660
661         /* ref_pic_list_reordering */
662         bitstream_put_ui(bs, 0, 1);            /* ref_pic_list_reordering_flag_l0: 0 */
663         bitstream_put_ui(bs, 0, 1);            /* ref_pic_list_reordering_flag_l1: 0 */
664     }
665
666     if ((pic_param.pic_fields.bits.weighted_pred_flag &&
667          IS_P_SLICE(slice_param.slice_type)) ||
668         ((pic_param.pic_fields.bits.weighted_bipred_idc == 1) &&
669          IS_B_SLICE(slice_param.slice_type))) {
670         /* FIXME: fill weight/offset table */
671         assert(0);
672     }
673
674     /* dec_ref_pic_marking */
675     if (pic_param.pic_fields.bits.reference_pic_flag) {     /* nal_ref_idc != 0 */
676         unsigned char no_output_of_prior_pics_flag = 0;
677         unsigned char long_term_reference_flag = 0;
678         unsigned char adaptive_ref_pic_marking_mode_flag = 0;
679
680         if (pic_param.pic_fields.bits.idr_pic_flag) {
681             bitstream_put_ui(bs, no_output_of_prior_pics_flag, 1);            /* no_output_of_prior_pics_flag: 0 */
682             bitstream_put_ui(bs, long_term_reference_flag, 1);            /* long_term_reference_flag: 0 */
683         } else {
684             bitstream_put_ui(bs, adaptive_ref_pic_marking_mode_flag, 1);            /* adaptive_ref_pic_marking_mode_flag: 0 */
685         }
686     }
687
688     if (pic_param.pic_fields.bits.entropy_coding_mode_flag &&
689         !IS_I_SLICE(slice_param.slice_type))
690         bitstream_put_ue(bs, slice_param.cabac_init_idc);               /* cabac_init_idc: 0 */
691
692     bitstream_put_se(bs, slice_param.slice_qp_delta);                   /* slice_qp_delta: 0 */
693
694     /* ignore for SP/SI */
695
696     if (pic_param.pic_fields.bits.deblocking_filter_control_present_flag) {
697         bitstream_put_ue(bs, slice_param.disable_deblocking_filter_idc);           /* disable_deblocking_filter_idc: 0 */
698
699         if (slice_param.disable_deblocking_filter_idc != 1) {
700             bitstream_put_se(bs, slice_param.slice_alpha_c0_offset_div2);          /* slice_alpha_c0_offset_div2: 2 */
701             bitstream_put_se(bs, slice_param.slice_beta_offset_div2);              /* slice_beta_offset_div2: 2 */
702         }
703     }
704
705     if (pic_param.pic_fields.bits.entropy_coding_mode_flag) {
706         bitstream_byte_aligning(bs, 1);
707     }
708 }
709
710 int H264EncoderImpl::build_packed_pic_buffer(unsigned char **header_buffer)
711 {
712     bitstream bs;
713
714     bitstream_start(&bs);
715     nal_start_code_prefix(&bs);
716     nal_header(&bs, NAL_REF_IDC_HIGH, NAL_PPS);
717     pps_rbsp(&bs);
718     bitstream_end(&bs);
719
720     *header_buffer = (unsigned char *)bs.buffer;
721     return bs.bit_offset;
722 }
723
724 int
725 H264EncoderImpl::build_packed_seq_buffer(unsigned char **header_buffer)
726 {
727     bitstream bs;
728
729     bitstream_start(&bs);
730     nal_start_code_prefix(&bs);
731     nal_header(&bs, NAL_REF_IDC_HIGH, NAL_SPS);
732     sps_rbsp(&bs);
733     bitstream_end(&bs);
734
735     *header_buffer = (unsigned char *)bs.buffer;
736     return bs.bit_offset;
737 }
738
739 int H264EncoderImpl::build_packed_slice_buffer(unsigned char **header_buffer)
740 {
741     bitstream bs;
742     int is_idr = !!pic_param.pic_fields.bits.idr_pic_flag;
743     int is_ref = !!pic_param.pic_fields.bits.reference_pic_flag;
744
745     bitstream_start(&bs);
746     nal_start_code_prefix(&bs);
747
748     if (IS_I_SLICE(slice_param.slice_type)) {
749         nal_header(&bs, NAL_REF_IDC_HIGH, is_idr ? NAL_IDR : NAL_NON_IDR);
750     } else if (IS_P_SLICE(slice_param.slice_type)) {
751         nal_header(&bs, NAL_REF_IDC_MEDIUM, NAL_NON_IDR);
752     } else {
753         assert(IS_B_SLICE(slice_param.slice_type));
754         nal_header(&bs, is_ref ? NAL_REF_IDC_LOW : NAL_REF_IDC_NONE, NAL_NON_IDR);
755     }
756
757     slice_header(&bs);
758     bitstream_end(&bs);
759
760     *header_buffer = (unsigned char *)bs.buffer;
761     return bs.bit_offset;
762 }
763
764
765 /*
766   Assume frame sequence is: Frame#0, #1, #2, ..., #M, ..., #X, ... (encoding order)
767   1) period between Frame #X and Frame #N = #X - #N
768   2) 0 means infinite for intra_period/intra_idr_period, and 0 is invalid for ip_period
769   3) intra_idr_period % intra_period (intra_period > 0) and intra_period % ip_period must be 0
770   4) intra_period and intra_idr_period take precedence over ip_period
771   5) if ip_period > 1, intra_period and intra_idr_period are not  the strict periods 
772      of I/IDR frames, see bellow examples
773   -------------------------------------------------------------------
774   intra_period intra_idr_period ip_period frame sequence (intra_period/intra_idr_period/ip_period)
775   0            ignored          1          IDRPPPPPPP ...     (No IDR/I any more)
776   0            ignored        >=2          IDR(PBB)(PBB)...   (No IDR/I any more)
777   1            0                ignored    IDRIIIIIII...      (No IDR any more)
778   1            1                ignored    IDR IDR IDR IDR...
779   1            >=2              ignored    IDRII IDRII IDR... (1/3/ignore)
780   >=2          0                1          IDRPPP IPPP I...   (3/0/1)
781   >=2          0              >=2          IDR(PBB)(PBB)(IBB) (6/0/3)
782                                               (PBB)(IBB)(PBB)(IBB)... 
783   >=2          >=2              1          IDRPPPPP IPPPPP IPPPPP (6/18/1)
784                                            IDRPPPPP IPPPPP IPPPPP...
785   >=2          >=2              >=2        {IDR(PBB)(PBB)(IBB)(PBB)(IBB)(PBB)} (6/18/3)
786                                            {IDR(PBB)(PBB)(IBB)(PBB)(IBB)(PBB)}...
787                                            {IDR(PBB)(PBB)(IBB)(PBB)}           (6/12/3)
788                                            {IDR(PBB)(PBB)(IBB)(PBB)}...
789                                            {IDR(PBB)(PBB)}                     (6/6/3)
790                                            {IDR(PBB)(PBB)}.
791 */
792
793 // General pts/dts strategy:
794 //
795 // Getting pts and dts right with variable frame rate (VFR) and B-frames can be a
796 // bit tricky. We assume first of all that the frame rate never goes _above_
797 // MAX_FPS, which gives us a frame period N. The decoder can always decode
798 // in at least this speed, as long at dts <= pts (the frame is not attempted
799 // presented before it is decoded). Furthermore, we never have longer chains of
800 // B-frames than a fixed constant C. (In a B-frame chain, we say that the base
801 // I/P-frame has order O=0, the B-frame depending on it directly has order O=1,
802 // etc. The last frame in the chain, which no B-frames depend on, is the “tip”
803 // frame, with an order O <= C.)
804 //
805 // Many strategies are possible, but we establish these rules:
806 //
807 //  - Tip frames have dts = pts - (C-O)*N.
808 //  - Non-tip frames have dts = dts_last + N.
809 //
810 // An example, with C=2 and N=10 and the data flow showed with arrows:
811 //
812 //        I  B  P  B  B  P
813 //   pts: 30 40 50 60 70 80
814 //        ↓  ↓     ↓
815 //   dts: 10 30 20 60 50←40
816 //         |  |  ↑        ↑
817 //         `--|--'        |
818 //             `----------'
819 //
820 // To show that this works fine also with irregular spacings, let's say that
821 // the third frame is delayed a bit (something earlier was dropped). Now the
822 // situation looks like this:
823 //
824 //        I  B  P  B  B   P
825 //   pts: 30 40 80 90 100 110
826 //        ↓  ↓     ↓
827 //   dts: 10 30 20 90 50←40
828 //         |  |  ↑        ↑
829 //         `--|--'        |
830 //             `----------'
831 //
832 // The resetting on every tip frame makes sure dts never ends up lagging a lot
833 // behind pts, and the subtraction of (C-O)*N makes sure pts <= dts.
834 //
835 // In the output of this function, if <dts_lag> is >= 0, it means to reset the
836 // dts from the current pts minus <dts_lag>, while if it's -1, the frame is not
837 // a tip frame and should be given a dts based on the previous one.
838 #define FRAME_P 0
839 #define FRAME_B 1
840 #define FRAME_I 2
841 #define FRAME_IDR 7
842 void encoding2display_order(
843     int encoding_order, int intra_period,
844     int intra_idr_period, int ip_period,
845     int *displaying_order,
846     int *frame_type, int *pts_lag)
847 {
848     int encoding_order_gop = 0;
849
850     *pts_lag = 0;
851
852     if (intra_period == 1) { /* all are I/IDR frames */
853         *displaying_order = encoding_order;
854         if (intra_idr_period == 0)
855             *frame_type = (encoding_order == 0)?FRAME_IDR:FRAME_I;
856         else
857             *frame_type = (encoding_order % intra_idr_period == 0)?FRAME_IDR:FRAME_I;
858         return;
859     }
860
861     if (intra_period == 0)
862         intra_idr_period = 0;
863
864     if (ip_period == 1) {
865         // No B-frames, sequence is like IDR PPPPP IPPPPP.
866         encoding_order_gop = (intra_idr_period == 0) ? encoding_order : (encoding_order % intra_idr_period);
867         *displaying_order = encoding_order;
868
869         if (encoding_order_gop == 0) { /* the first frame */
870             *frame_type = FRAME_IDR;
871         } else if (intra_period != 0 && /* have I frames */
872                    encoding_order_gop >= 2 &&
873                    (encoding_order_gop % intra_period == 0)) {
874             *frame_type = FRAME_I;
875         } else {
876             *frame_type = FRAME_P;
877         }
878         return;
879     } 
880
881     // We have B-frames. Sequence is like IDR (PBB)(PBB)(IBB)(PBB).
882     encoding_order_gop = (intra_idr_period == 0) ? encoding_order : (encoding_order % (intra_idr_period + 1));
883     *pts_lag = -1;  // Most frames are not tip frames.
884          
885     if (encoding_order_gop == 0) { /* the first frame */
886         *frame_type = FRAME_IDR;
887         *displaying_order = encoding_order;
888         // IDR frames are a special case; I honestly can't find the logic behind
889         // why this is the right thing, but it seems to line up nicely in practice :-)
890         *pts_lag = TIMEBASE / MAX_FPS;
891     } else if (((encoding_order_gop - 1) % ip_period) != 0) { /* B frames */
892         *frame_type = FRAME_B;
893         *displaying_order = encoding_order - 1;
894         if ((encoding_order_gop % ip_period) == 0) {
895             *pts_lag = 0;  // Last B-frame.
896         }
897     } else if (intra_period != 0 && /* have I frames */
898                encoding_order_gop >= 2 &&
899                ((encoding_order_gop - 1) / ip_period % (intra_period / ip_period)) == 0) {
900         *frame_type = FRAME_I;
901         *displaying_order = encoding_order + ip_period - 1;
902     } else {
903         *frame_type = FRAME_P;
904         *displaying_order = encoding_order + ip_period - 1;
905     }
906 }
907
908
909 static const char *rc_to_string(int rc_mode)
910 {
911     switch (rc_mode) {
912     case VA_RC_NONE:
913         return "NONE";
914     case VA_RC_CBR:
915         return "CBR";
916     case VA_RC_VBR:
917         return "VBR";
918     case VA_RC_VCM:
919         return "VCM";
920     case VA_RC_CQP:
921         return "CQP";
922     case VA_RC_VBR_CONSTRAINED:
923         return "VBR_CONSTRAINED";
924     default:
925         return "Unknown";
926     }
927 }
928
929 VADisplay H264EncoderImpl::va_open_display(const string &va_display)
930 {
931         if (va_display.empty()) {
932                 x11_display = XOpenDisplay(NULL);
933                 if (!x11_display) {
934                         fprintf(stderr, "error: can't connect to X server!\n");
935                         return NULL;
936                 }
937                 use_zerocopy = true;
938                 if (global_flags.uncompressed_video_to_http) {
939                         fprintf(stderr, "Disabling zerocopy H.264 encoding due to --uncompressed_video_to_http.\n");
940                         use_zerocopy = false;
941                 }
942                 return vaGetDisplay(x11_display);
943         } else if (va_display[0] != '/') {
944                 x11_display = XOpenDisplay(va_display.c_str());
945                 if (!x11_display) {
946                         fprintf(stderr, "error: can't connect to X server!\n");
947                         return NULL;
948                 }
949                 use_zerocopy = true;
950                 if (global_flags.uncompressed_video_to_http) {
951                         fprintf(stderr, "Disabling zerocopy H.264 encoding due to --uncompressed_video_to_http.\n");
952                         use_zerocopy = false;
953                 }
954                 return vaGetDisplay(x11_display);
955         } else {
956                 drm_fd = open(va_display.c_str(), O_RDWR);
957                 if (drm_fd == -1) {
958                         perror(va_display.c_str());
959                         return NULL;
960                 }
961                 use_zerocopy = false;
962                 return vaGetDisplayDRM(drm_fd);
963         }
964 }
965
966 void H264EncoderImpl::va_close_display(VADisplay va_dpy)
967 {
968         if (x11_display) {
969                 XCloseDisplay(x11_display);
970                 x11_display = nullptr;
971         }
972         if (drm_fd != -1) {
973                 close(drm_fd);
974         }
975 }
976
977 int H264EncoderImpl::init_va(const string &va_display)
978 {
979     VAProfile profile_list[]={VAProfileH264High, VAProfileH264Main, VAProfileH264Baseline, VAProfileH264ConstrainedBaseline};
980     VAEntrypoint *entrypoints;
981     int num_entrypoints, slice_entrypoint;
982     int support_encode = 0;    
983     int major_ver, minor_ver;
984     VAStatus va_status;
985     unsigned int i;
986
987     va_dpy = va_open_display(va_display);
988     va_status = vaInitialize(va_dpy, &major_ver, &minor_ver);
989     CHECK_VASTATUS(va_status, "vaInitialize");
990
991     num_entrypoints = vaMaxNumEntrypoints(va_dpy);
992     entrypoints = (VAEntrypoint *)malloc(num_entrypoints * sizeof(*entrypoints));
993     if (!entrypoints) {
994         fprintf(stderr, "error: failed to initialize VA entrypoints array\n");
995         exit(1);
996     }
997
998     /* use the highest profile */
999     for (i = 0; i < sizeof(profile_list)/sizeof(profile_list[0]); i++) {
1000         if ((h264_profile != ~0) && h264_profile != profile_list[i])
1001             continue;
1002         
1003         h264_profile = profile_list[i];
1004         vaQueryConfigEntrypoints(va_dpy, h264_profile, entrypoints, &num_entrypoints);
1005         for (slice_entrypoint = 0; slice_entrypoint < num_entrypoints; slice_entrypoint++) {
1006             if (entrypoints[slice_entrypoint] == VAEntrypointEncSlice) {
1007                 support_encode = 1;
1008                 break;
1009             }
1010         }
1011         if (support_encode == 1)
1012             break;
1013     }
1014     
1015     if (support_encode == 0) {
1016         printf("Can't find VAEntrypointEncSlice for H264 profiles. If you are using a non-Intel GPU\n");
1017         printf("but have one in your system, try launching Nageru with --va-display /dev/dri/renderD128\n");
1018         printf("to use VA-API against DRM instead of X11.\n");
1019         exit(1);
1020     } else {
1021         switch (h264_profile) {
1022             case VAProfileH264Baseline:
1023                 ip_period = 1;
1024                 constraint_set_flag |= (1 << 0); /* Annex A.2.1 */
1025                 h264_entropy_mode = 0;
1026                 break;
1027             case VAProfileH264ConstrainedBaseline:
1028                 constraint_set_flag |= (1 << 0 | 1 << 1); /* Annex A.2.2 */
1029                 ip_period = 1;
1030                 break;
1031
1032             case VAProfileH264Main:
1033                 constraint_set_flag |= (1 << 1); /* Annex A.2.2 */
1034                 break;
1035
1036             case VAProfileH264High:
1037                 constraint_set_flag |= (1 << 3); /* Annex A.2.4 */
1038                 break;
1039             default:
1040                 h264_profile = VAProfileH264Baseline;
1041                 ip_period = 1;
1042                 constraint_set_flag |= (1 << 0); /* Annex A.2.1 */
1043                 break;
1044         }
1045     }
1046
1047     VAConfigAttrib attrib[VAConfigAttribTypeMax];
1048
1049     /* find out the format for the render target, and rate control mode */
1050     for (i = 0; i < VAConfigAttribTypeMax; i++)
1051         attrib[i].type = (VAConfigAttribType)i;
1052
1053     va_status = vaGetConfigAttributes(va_dpy, h264_profile, VAEntrypointEncSlice,
1054                                       &attrib[0], VAConfigAttribTypeMax);
1055     CHECK_VASTATUS(va_status, "vaGetConfigAttributes");
1056     /* check the interested configattrib */
1057     if ((attrib[VAConfigAttribRTFormat].value & VA_RT_FORMAT_YUV420) == 0) {
1058         printf("Not find desired YUV420 RT format\n");
1059         exit(1);
1060     } else {
1061         config_attrib[config_attrib_num].type = VAConfigAttribRTFormat;
1062         config_attrib[config_attrib_num].value = VA_RT_FORMAT_YUV420;
1063         config_attrib_num++;
1064     }
1065     
1066     if (attrib[VAConfigAttribRateControl].value != VA_ATTRIB_NOT_SUPPORTED) {
1067         int tmp = attrib[VAConfigAttribRateControl].value;
1068
1069         if (rc_mode == -1 || !(rc_mode & tmp))  {
1070             if (rc_mode != -1) {
1071                 printf("Warning: Don't support the specified RateControl mode: %s!!!, switch to ", rc_to_string(rc_mode));
1072             }
1073
1074             for (i = 0; i < sizeof(rc_default_modes) / sizeof(rc_default_modes[0]); i++) {
1075                 if (rc_default_modes[i] & tmp) {
1076                     rc_mode = rc_default_modes[i];
1077                     break;
1078                 }
1079             }
1080         }
1081
1082         config_attrib[config_attrib_num].type = VAConfigAttribRateControl;
1083         config_attrib[config_attrib_num].value = rc_mode;
1084         config_attrib_num++;
1085     }
1086     
1087
1088     if (attrib[VAConfigAttribEncPackedHeaders].value != VA_ATTRIB_NOT_SUPPORTED) {
1089         int tmp = attrib[VAConfigAttribEncPackedHeaders].value;
1090
1091         h264_packedheader = 1;
1092         config_attrib[config_attrib_num].type = VAConfigAttribEncPackedHeaders;
1093         config_attrib[config_attrib_num].value = VA_ENC_PACKED_HEADER_NONE;
1094         
1095         if (tmp & VA_ENC_PACKED_HEADER_SEQUENCE) {
1096             config_attrib[config_attrib_num].value |= VA_ENC_PACKED_HEADER_SEQUENCE;
1097         }
1098         
1099         if (tmp & VA_ENC_PACKED_HEADER_PICTURE) {
1100             config_attrib[config_attrib_num].value |= VA_ENC_PACKED_HEADER_PICTURE;
1101         }
1102         
1103         if (tmp & VA_ENC_PACKED_HEADER_SLICE) {
1104             config_attrib[config_attrib_num].value |= VA_ENC_PACKED_HEADER_SLICE;
1105         }
1106         
1107         if (tmp & VA_ENC_PACKED_HEADER_MISC) {
1108             config_attrib[config_attrib_num].value |= VA_ENC_PACKED_HEADER_MISC;
1109         }
1110         
1111         enc_packed_header_idx = config_attrib_num;
1112         config_attrib_num++;
1113     }
1114
1115     if (attrib[VAConfigAttribEncInterlaced].value != VA_ATTRIB_NOT_SUPPORTED) {
1116         config_attrib[config_attrib_num].type = VAConfigAttribEncInterlaced;
1117         config_attrib[config_attrib_num].value = VA_ENC_PACKED_HEADER_NONE;
1118         config_attrib_num++;
1119     }
1120     
1121     if (attrib[VAConfigAttribEncMaxRefFrames].value != VA_ATTRIB_NOT_SUPPORTED) {
1122         h264_maxref = attrib[VAConfigAttribEncMaxRefFrames].value;
1123     }
1124
1125     free(entrypoints);
1126     return 0;
1127 }
1128
1129 int H264EncoderImpl::setup_encode()
1130 {
1131     VAStatus va_status;
1132     VASurfaceID *tmp_surfaceid;
1133     int codedbuf_size, i;
1134     static VASurfaceID src_surface[SURFACE_NUM];
1135     static VASurfaceID ref_surface[SURFACE_NUM];
1136     
1137     va_status = vaCreateConfig(va_dpy, h264_profile, VAEntrypointEncSlice,
1138             &config_attrib[0], config_attrib_num, &config_id);
1139     CHECK_VASTATUS(va_status, "vaCreateConfig");
1140
1141     /* create source surfaces */
1142     va_status = vaCreateSurfaces(va_dpy,
1143                                  VA_RT_FORMAT_YUV420, frame_width_mbaligned, frame_height_mbaligned,
1144                                  &src_surface[0], SURFACE_NUM,
1145                                  NULL, 0);
1146     CHECK_VASTATUS(va_status, "vaCreateSurfaces");
1147
1148     /* create reference surfaces */
1149     va_status = vaCreateSurfaces(va_dpy,
1150                                  VA_RT_FORMAT_YUV420, frame_width_mbaligned, frame_height_mbaligned,
1151                                  &ref_surface[0], SURFACE_NUM,
1152                                  NULL, 0);
1153     CHECK_VASTATUS(va_status, "vaCreateSurfaces");
1154
1155     tmp_surfaceid = (VASurfaceID *)calloc(2 * SURFACE_NUM, sizeof(VASurfaceID));
1156     memcpy(tmp_surfaceid, src_surface, SURFACE_NUM * sizeof(VASurfaceID));
1157     memcpy(tmp_surfaceid + SURFACE_NUM, ref_surface, SURFACE_NUM * sizeof(VASurfaceID));
1158     
1159     /* Create a context for this encode pipe */
1160     va_status = vaCreateContext(va_dpy, config_id,
1161                                 frame_width_mbaligned, frame_height_mbaligned,
1162                                 VA_PROGRESSIVE,
1163                                 tmp_surfaceid, 2 * SURFACE_NUM,
1164                                 &context_id);
1165     CHECK_VASTATUS(va_status, "vaCreateContext");
1166     free(tmp_surfaceid);
1167
1168     codedbuf_size = (frame_width_mbaligned * frame_height_mbaligned * 400) / (16*16);
1169
1170     for (i = 0; i < SURFACE_NUM; i++) {
1171         /* create coded buffer once for all
1172          * other VA buffers which won't be used again after vaRenderPicture.
1173          * so APP can always vaCreateBuffer for every frame
1174          * but coded buffer need to be mapped and accessed after vaRenderPicture/vaEndPicture
1175          * so VA won't maintain the coded buffer
1176          */
1177         va_status = vaCreateBuffer(va_dpy, context_id, VAEncCodedBufferType,
1178                 codedbuf_size, 1, NULL, &gl_surfaces[i].coded_buf);
1179         CHECK_VASTATUS(va_status, "vaCreateBuffer");
1180     }
1181
1182     /* create OpenGL objects */
1183     //glGenFramebuffers(SURFACE_NUM, fbos);
1184     
1185     for (i = 0; i < SURFACE_NUM; i++) {
1186         glGenTextures(1, &gl_surfaces[i].y_tex);
1187         glGenTextures(1, &gl_surfaces[i].cbcr_tex);
1188
1189         if (!use_zerocopy) {
1190             // Create Y image.
1191             glBindTexture(GL_TEXTURE_2D, gl_surfaces[i].y_tex);
1192             glTexStorage2D(GL_TEXTURE_2D, 1, GL_R8, frame_width, frame_height);
1193
1194             // Create CbCr image.
1195             glBindTexture(GL_TEXTURE_2D, gl_surfaces[i].cbcr_tex);
1196             glTexStorage2D(GL_TEXTURE_2D, 1, GL_RG8, frame_width / 2, frame_height / 2);
1197
1198             // Generate a PBO to read into. It doesn't necessarily fit 1:1 with the VA-API
1199             // buffers, due to potentially differing pitch.
1200             glGenBuffers(1, &gl_surfaces[i].pbo);
1201             glBindBuffer(GL_PIXEL_PACK_BUFFER, gl_surfaces[i].pbo);
1202             glBufferStorage(GL_PIXEL_PACK_BUFFER, frame_width * frame_height * 2, nullptr, GL_MAP_READ_BIT | GL_MAP_WRITE_BIT | GL_MAP_PERSISTENT_BIT);
1203             uint8_t *ptr = (uint8_t *)glMapBufferRange(GL_PIXEL_PACK_BUFFER, 0, frame_width * frame_height * 2, GL_MAP_READ_BIT | GL_MAP_PERSISTENT_BIT);
1204             gl_surfaces[i].y_offset = 0;
1205             gl_surfaces[i].cbcr_offset = frame_width * frame_height;
1206             gl_surfaces[i].y_ptr = ptr + gl_surfaces[i].y_offset;
1207             gl_surfaces[i].cbcr_ptr = ptr + gl_surfaces[i].cbcr_offset;
1208             glBindBuffer(GL_PIXEL_PACK_BUFFER, 0);
1209         }
1210     }
1211
1212     for (i = 0; i < SURFACE_NUM; i++) {
1213         gl_surfaces[i].src_surface = src_surface[i];
1214         gl_surfaces[i].ref_surface = ref_surface[i];
1215     }
1216     
1217     return 0;
1218 }
1219
1220 // Given a list like 1 9 3 0 2 8 4 and a pivot element 3, will produce
1221 //
1222 //   2 1 0 [3] 4 8 9
1223 template<class T, class C>
1224 static void sort_two(T *begin, T *end, const T &pivot, const C &less_than)
1225 {
1226         T *middle = partition(begin, end, [&](const T &elem) { return less_than(elem, pivot); });
1227         sort(begin, middle, [&](const T &a, const T &b) { return less_than(b, a); });
1228         sort(middle, end, less_than);
1229 }
1230
1231 void H264EncoderImpl::update_ReferenceFrames(int frame_type)
1232 {
1233     int i;
1234     
1235     if (frame_type == FRAME_B)
1236         return;
1237
1238     CurrentCurrPic.flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE;
1239     numShortTerm++;
1240     if (numShortTerm > num_ref_frames)
1241         numShortTerm = num_ref_frames;
1242     for (i=numShortTerm-1; i>0; i--)
1243         ReferenceFrames[i] = ReferenceFrames[i-1];
1244     ReferenceFrames[0] = CurrentCurrPic;
1245     
1246     current_frame_num++;
1247     if (current_frame_num > MaxFrameNum)
1248         current_frame_num = 0;
1249 }
1250
1251
1252 int H264EncoderImpl::update_RefPicList(int frame_type)
1253 {
1254     const auto descending_by_frame_idx = [](const VAPictureH264 &a, const VAPictureH264 &b) {
1255         return a.frame_idx > b.frame_idx;
1256     };
1257     const auto ascending_by_top_field_order_cnt = [](const VAPictureH264 &a, const VAPictureH264 &b) {
1258         return a.TopFieldOrderCnt < b.TopFieldOrderCnt;
1259     };
1260     const auto descending_by_top_field_order_cnt = [](const VAPictureH264 &a, const VAPictureH264 &b) {
1261         return a.TopFieldOrderCnt > b.TopFieldOrderCnt;
1262     };
1263     
1264     if (frame_type == FRAME_P) {
1265         memcpy(RefPicList0_P, ReferenceFrames, numShortTerm * sizeof(VAPictureH264));
1266         sort(&RefPicList0_P[0], &RefPicList0_P[numShortTerm], descending_by_frame_idx);
1267     } else if (frame_type == FRAME_B) {
1268         memcpy(RefPicList0_B, ReferenceFrames, numShortTerm * sizeof(VAPictureH264));
1269         sort_two(&RefPicList0_B[0], &RefPicList0_B[numShortTerm], CurrentCurrPic, ascending_by_top_field_order_cnt);
1270
1271         memcpy(RefPicList1_B, ReferenceFrames, numShortTerm * sizeof(VAPictureH264));
1272         sort_two(&RefPicList1_B[0], &RefPicList1_B[numShortTerm], CurrentCurrPic, descending_by_top_field_order_cnt);
1273     }
1274     
1275     return 0;
1276 }
1277
1278
1279 int H264EncoderImpl::render_sequence()
1280 {
1281     VABufferID seq_param_buf, rc_param_buf, render_id[2];
1282     VAStatus va_status;
1283     VAEncMiscParameterBuffer *misc_param;
1284     VAEncMiscParameterRateControl *misc_rate_ctrl;
1285     
1286     seq_param.level_idc = 41 /*SH_LEVEL_3*/;
1287     seq_param.picture_width_in_mbs = frame_width_mbaligned / 16;
1288     seq_param.picture_height_in_mbs = frame_height_mbaligned / 16;
1289     seq_param.bits_per_second = frame_bitrate;
1290
1291     seq_param.intra_period = intra_period;
1292     seq_param.intra_idr_period = intra_idr_period;
1293     seq_param.ip_period = ip_period;
1294
1295     seq_param.max_num_ref_frames = num_ref_frames;
1296     seq_param.seq_fields.bits.frame_mbs_only_flag = 1;
1297     seq_param.time_scale = TIMEBASE * 2;
1298     seq_param.num_units_in_tick = 1; /* Tc = num_units_in_tick / scale */
1299     seq_param.seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4 = Log2MaxPicOrderCntLsb - 4;
1300     seq_param.seq_fields.bits.log2_max_frame_num_minus4 = Log2MaxFrameNum - 4;;
1301     seq_param.seq_fields.bits.frame_mbs_only_flag = 1;
1302     seq_param.seq_fields.bits.chroma_format_idc = 1;
1303     seq_param.seq_fields.bits.direct_8x8_inference_flag = 1;
1304     
1305     if (frame_width != frame_width_mbaligned ||
1306         frame_height != frame_height_mbaligned) {
1307         seq_param.frame_cropping_flag = 1;
1308         seq_param.frame_crop_left_offset = 0;
1309         seq_param.frame_crop_right_offset = (frame_width_mbaligned - frame_width)/2;
1310         seq_param.frame_crop_top_offset = 0;
1311         seq_param.frame_crop_bottom_offset = (frame_height_mbaligned - frame_height)/2;
1312     }
1313     
1314     va_status = vaCreateBuffer(va_dpy, context_id,
1315                                VAEncSequenceParameterBufferType,
1316                                sizeof(seq_param), 1, &seq_param, &seq_param_buf);
1317     CHECK_VASTATUS(va_status, "vaCreateBuffer");
1318     
1319     va_status = vaCreateBuffer(va_dpy, context_id,
1320                                VAEncMiscParameterBufferType,
1321                                sizeof(VAEncMiscParameterBuffer) + sizeof(VAEncMiscParameterRateControl),
1322                                1, NULL, &rc_param_buf);
1323     CHECK_VASTATUS(va_status, "vaCreateBuffer");
1324     
1325     vaMapBuffer(va_dpy, rc_param_buf, (void **)&misc_param);
1326     misc_param->type = VAEncMiscParameterTypeRateControl;
1327     misc_rate_ctrl = (VAEncMiscParameterRateControl *)misc_param->data;
1328     memset(misc_rate_ctrl, 0, sizeof(*misc_rate_ctrl));
1329     misc_rate_ctrl->bits_per_second = frame_bitrate;
1330     misc_rate_ctrl->target_percentage = 66;
1331     misc_rate_ctrl->window_size = 1000;
1332     misc_rate_ctrl->initial_qp = initial_qp;
1333     misc_rate_ctrl->min_qp = minimal_qp;
1334     misc_rate_ctrl->basic_unit_size = 0;
1335     vaUnmapBuffer(va_dpy, rc_param_buf);
1336
1337     render_id[0] = seq_param_buf;
1338     render_id[1] = rc_param_buf;
1339     
1340     render_picture_and_delete(va_dpy, context_id, &render_id[0], 2);
1341     
1342     return 0;
1343 }
1344
1345 static int calc_poc(int pic_order_cnt_lsb, int frame_type)
1346 {
1347     static int PicOrderCntMsb_ref = 0, pic_order_cnt_lsb_ref = 0;
1348     int prevPicOrderCntMsb, prevPicOrderCntLsb;
1349     int PicOrderCntMsb, TopFieldOrderCnt;
1350     
1351     if (frame_type == FRAME_IDR)
1352         prevPicOrderCntMsb = prevPicOrderCntLsb = 0;
1353     else {
1354         prevPicOrderCntMsb = PicOrderCntMsb_ref;
1355         prevPicOrderCntLsb = pic_order_cnt_lsb_ref;
1356     }
1357     
1358     if ((pic_order_cnt_lsb < prevPicOrderCntLsb) &&
1359         ((prevPicOrderCntLsb - pic_order_cnt_lsb) >= (int)(MaxPicOrderCntLsb / 2)))
1360         PicOrderCntMsb = prevPicOrderCntMsb + MaxPicOrderCntLsb;
1361     else if ((pic_order_cnt_lsb > prevPicOrderCntLsb) &&
1362              ((pic_order_cnt_lsb - prevPicOrderCntLsb) > (int)(MaxPicOrderCntLsb / 2)))
1363         PicOrderCntMsb = prevPicOrderCntMsb - MaxPicOrderCntLsb;
1364     else
1365         PicOrderCntMsb = prevPicOrderCntMsb;
1366     
1367     TopFieldOrderCnt = PicOrderCntMsb + pic_order_cnt_lsb;
1368
1369     if (frame_type != FRAME_B) {
1370         PicOrderCntMsb_ref = PicOrderCntMsb;
1371         pic_order_cnt_lsb_ref = pic_order_cnt_lsb;
1372     }
1373     
1374     return TopFieldOrderCnt;
1375 }
1376
1377 int H264EncoderImpl::render_picture(int frame_type, int display_frame_num, int gop_start_display_frame_num)
1378 {
1379     VABufferID pic_param_buf;
1380     VAStatus va_status;
1381     int i = 0;
1382
1383     pic_param.CurrPic.picture_id = gl_surfaces[display_frame_num % SURFACE_NUM].ref_surface;
1384     pic_param.CurrPic.frame_idx = current_frame_num;
1385     pic_param.CurrPic.flags = 0;
1386     pic_param.CurrPic.TopFieldOrderCnt = calc_poc((display_frame_num - gop_start_display_frame_num) % MaxPicOrderCntLsb, frame_type);
1387     pic_param.CurrPic.BottomFieldOrderCnt = pic_param.CurrPic.TopFieldOrderCnt;
1388     CurrentCurrPic = pic_param.CurrPic;
1389
1390     memcpy(pic_param.ReferenceFrames, ReferenceFrames, numShortTerm*sizeof(VAPictureH264));
1391     for (i = numShortTerm; i < MAX_NUM_REF1; i++) {
1392         pic_param.ReferenceFrames[i].picture_id = VA_INVALID_SURFACE;
1393         pic_param.ReferenceFrames[i].flags = VA_PICTURE_H264_INVALID;
1394     }
1395     
1396     pic_param.pic_fields.bits.idr_pic_flag = (frame_type == FRAME_IDR);
1397     pic_param.pic_fields.bits.reference_pic_flag = (frame_type != FRAME_B);
1398     pic_param.pic_fields.bits.entropy_coding_mode_flag = h264_entropy_mode;
1399     pic_param.pic_fields.bits.deblocking_filter_control_present_flag = 1;
1400     pic_param.frame_num = current_frame_num;
1401     pic_param.coded_buf = gl_surfaces[display_frame_num % SURFACE_NUM].coded_buf;
1402     pic_param.last_picture = false;  // FIXME
1403     pic_param.pic_init_qp = initial_qp;
1404
1405     va_status = vaCreateBuffer(va_dpy, context_id, VAEncPictureParameterBufferType,
1406                                sizeof(pic_param), 1, &pic_param, &pic_param_buf);
1407     CHECK_VASTATUS(va_status, "vaCreateBuffer");
1408
1409     render_picture_and_delete(va_dpy, context_id, &pic_param_buf, 1);
1410
1411     return 0;
1412 }
1413
1414 int H264EncoderImpl::render_packedsequence()
1415 {
1416     VAEncPackedHeaderParameterBuffer packedheader_param_buffer;
1417     VABufferID packedseq_para_bufid, packedseq_data_bufid, render_id[2];
1418     unsigned int length_in_bits;
1419     unsigned char *packedseq_buffer = NULL;
1420     VAStatus va_status;
1421
1422     length_in_bits = build_packed_seq_buffer(&packedseq_buffer); 
1423     
1424     packedheader_param_buffer.type = VAEncPackedHeaderSequence;
1425     
1426     packedheader_param_buffer.bit_length = length_in_bits; /*length_in_bits*/
1427     packedheader_param_buffer.has_emulation_bytes = 0;
1428     va_status = vaCreateBuffer(va_dpy,
1429                                context_id,
1430                                VAEncPackedHeaderParameterBufferType,
1431                                sizeof(packedheader_param_buffer), 1, &packedheader_param_buffer,
1432                                &packedseq_para_bufid);
1433     CHECK_VASTATUS(va_status, "vaCreateBuffer");
1434
1435     va_status = vaCreateBuffer(va_dpy,
1436                                context_id,
1437                                VAEncPackedHeaderDataBufferType,
1438                                (length_in_bits + 7) / 8, 1, packedseq_buffer,
1439                                &packedseq_data_bufid);
1440     CHECK_VASTATUS(va_status, "vaCreateBuffer");
1441
1442     render_id[0] = packedseq_para_bufid;
1443     render_id[1] = packedseq_data_bufid;
1444     render_picture_and_delete(va_dpy, context_id, render_id, 2);
1445
1446     free(packedseq_buffer);
1447     
1448     return 0;
1449 }
1450
1451
1452 int H264EncoderImpl::render_packedpicture()
1453 {
1454     VAEncPackedHeaderParameterBuffer packedheader_param_buffer;
1455     VABufferID packedpic_para_bufid, packedpic_data_bufid, render_id[2];
1456     unsigned int length_in_bits;
1457     unsigned char *packedpic_buffer = NULL;
1458     VAStatus va_status;
1459
1460     length_in_bits = build_packed_pic_buffer(&packedpic_buffer); 
1461     packedheader_param_buffer.type = VAEncPackedHeaderPicture;
1462     packedheader_param_buffer.bit_length = length_in_bits;
1463     packedheader_param_buffer.has_emulation_bytes = 0;
1464
1465     va_status = vaCreateBuffer(va_dpy,
1466                                context_id,
1467                                VAEncPackedHeaderParameterBufferType,
1468                                sizeof(packedheader_param_buffer), 1, &packedheader_param_buffer,
1469                                &packedpic_para_bufid);
1470     CHECK_VASTATUS(va_status, "vaCreateBuffer");
1471
1472     va_status = vaCreateBuffer(va_dpy,
1473                                context_id,
1474                                VAEncPackedHeaderDataBufferType,
1475                                (length_in_bits + 7) / 8, 1, packedpic_buffer,
1476                                &packedpic_data_bufid);
1477     CHECK_VASTATUS(va_status, "vaCreateBuffer");
1478
1479     render_id[0] = packedpic_para_bufid;
1480     render_id[1] = packedpic_data_bufid;
1481     render_picture_and_delete(va_dpy, context_id, render_id, 2);
1482
1483     free(packedpic_buffer);
1484     
1485     return 0;
1486 }
1487
1488 void H264EncoderImpl::render_packedslice()
1489 {
1490     VAEncPackedHeaderParameterBuffer packedheader_param_buffer;
1491     VABufferID packedslice_para_bufid, packedslice_data_bufid, render_id[2];
1492     unsigned int length_in_bits;
1493     unsigned char *packedslice_buffer = NULL;
1494     VAStatus va_status;
1495
1496     length_in_bits = build_packed_slice_buffer(&packedslice_buffer);
1497     packedheader_param_buffer.type = VAEncPackedHeaderSlice;
1498     packedheader_param_buffer.bit_length = length_in_bits;
1499     packedheader_param_buffer.has_emulation_bytes = 0;
1500
1501     va_status = vaCreateBuffer(va_dpy,
1502                                context_id,
1503                                VAEncPackedHeaderParameterBufferType,
1504                                sizeof(packedheader_param_buffer), 1, &packedheader_param_buffer,
1505                                &packedslice_para_bufid);
1506     CHECK_VASTATUS(va_status, "vaCreateBuffer");
1507
1508     va_status = vaCreateBuffer(va_dpy,
1509                                context_id,
1510                                VAEncPackedHeaderDataBufferType,
1511                                (length_in_bits + 7) / 8, 1, packedslice_buffer,
1512                                &packedslice_data_bufid);
1513     CHECK_VASTATUS(va_status, "vaCreateBuffer");
1514
1515     render_id[0] = packedslice_para_bufid;
1516     render_id[1] = packedslice_data_bufid;
1517     render_picture_and_delete(va_dpy, context_id, render_id, 2);
1518
1519     free(packedslice_buffer);
1520 }
1521
1522 int H264EncoderImpl::render_slice(int encoding_frame_num, int display_frame_num, int gop_start_display_frame_num, int frame_type)
1523 {
1524     VABufferID slice_param_buf;
1525     VAStatus va_status;
1526     int i;
1527
1528     update_RefPicList(frame_type);
1529     
1530     /* one frame, one slice */
1531     slice_param.macroblock_address = 0;
1532     slice_param.num_macroblocks = frame_width_mbaligned * frame_height_mbaligned/(16*16); /* Measured by MB */
1533     slice_param.slice_type = (frame_type == FRAME_IDR)?2:frame_type;
1534     if (frame_type == FRAME_IDR) {
1535         if (encoding_frame_num != 0)
1536             ++slice_param.idr_pic_id;
1537     } else if (frame_type == FRAME_P) {
1538         int refpiclist0_max = h264_maxref & 0xffff;
1539         memcpy(slice_param.RefPicList0, RefPicList0_P, refpiclist0_max*sizeof(VAPictureH264));
1540
1541         for (i = refpiclist0_max; i < MAX_NUM_REF2; i++) {
1542             slice_param.RefPicList0[i].picture_id = VA_INVALID_SURFACE;
1543             slice_param.RefPicList0[i].flags = VA_PICTURE_H264_INVALID;
1544         }
1545     } else if (frame_type == FRAME_B) {
1546         int refpiclist0_max = h264_maxref & 0xffff;
1547         int refpiclist1_max = (h264_maxref >> 16) & 0xffff;
1548
1549         memcpy(slice_param.RefPicList0, RefPicList0_B, refpiclist0_max*sizeof(VAPictureH264));
1550         for (i = refpiclist0_max; i < MAX_NUM_REF2; i++) {
1551             slice_param.RefPicList0[i].picture_id = VA_INVALID_SURFACE;
1552             slice_param.RefPicList0[i].flags = VA_PICTURE_H264_INVALID;
1553         }
1554
1555         memcpy(slice_param.RefPicList1, RefPicList1_B, refpiclist1_max*sizeof(VAPictureH264));
1556         for (i = refpiclist1_max; i < MAX_NUM_REF2; i++) {
1557             slice_param.RefPicList1[i].picture_id = VA_INVALID_SURFACE;
1558             slice_param.RefPicList1[i].flags = VA_PICTURE_H264_INVALID;
1559         }
1560     }
1561
1562     slice_param.slice_alpha_c0_offset_div2 = 0;
1563     slice_param.slice_beta_offset_div2 = 0;
1564     slice_param.direct_spatial_mv_pred_flag = 1;
1565     slice_param.pic_order_cnt_lsb = (display_frame_num - gop_start_display_frame_num) % MaxPicOrderCntLsb;
1566     
1567
1568     if (h264_packedheader &&
1569         config_attrib[enc_packed_header_idx].value & VA_ENC_PACKED_HEADER_SLICE)
1570         render_packedslice();
1571
1572     va_status = vaCreateBuffer(va_dpy, context_id, VAEncSliceParameterBufferType,
1573                                sizeof(slice_param), 1, &slice_param, &slice_param_buf);
1574     CHECK_VASTATUS(va_status, "vaCreateBuffer");
1575
1576     render_picture_and_delete(va_dpy, context_id, &slice_param_buf, 1);
1577
1578     return 0;
1579 }
1580
1581
1582
1583 void H264EncoderImpl::save_codeddata(storage_task task)
1584 {    
1585     VACodedBufferSegment *buf_list = NULL;
1586     VAStatus va_status;
1587
1588     string data;
1589
1590     const int64_t global_delay = int64_t(ip_period - 1) * (TIMEBASE / MAX_FPS);  // So we never get negative dts.
1591
1592     va_status = vaMapBuffer(va_dpy, gl_surfaces[task.display_order % SURFACE_NUM].coded_buf, (void **)(&buf_list));
1593     CHECK_VASTATUS(va_status, "vaMapBuffer");
1594     while (buf_list != NULL) {
1595         data.append(reinterpret_cast<const char *>(buf_list->buf), buf_list->size);
1596         buf_list = (VACodedBufferSegment *) buf_list->next;
1597     }
1598     vaUnmapBuffer(va_dpy, gl_surfaces[task.display_order % SURFACE_NUM].coded_buf);
1599
1600     {
1601         // Add video.
1602         AVPacket pkt;
1603         memset(&pkt, 0, sizeof(pkt));
1604         pkt.buf = nullptr;
1605         pkt.data = reinterpret_cast<uint8_t *>(&data[0]);
1606         pkt.size = data.size();
1607         pkt.stream_index = 0;
1608         if (task.frame_type == FRAME_IDR || task.frame_type == FRAME_I) {
1609             pkt.flags = AV_PKT_FLAG_KEY;
1610         } else {
1611             pkt.flags = 0;
1612         }
1613         //pkt.duration = 1;
1614         httpd->add_packet(pkt, task.pts + global_delay, task.dts + global_delay,
1615                 global_flags.uncompressed_video_to_http ? HTTPD::DESTINATION_FILE_ONLY : HTTPD::DESTINATION_FILE_AND_HTTP);
1616     }
1617     // Encode and add all audio frames up to and including the pts of this video frame.
1618     for ( ;; ) {
1619         int64_t audio_pts;
1620         vector<float> audio;
1621         {
1622              unique_lock<mutex> lock(frame_queue_mutex);
1623              frame_queue_nonempty.wait(lock, [this]{ return storage_thread_should_quit || !pending_audio_frames.empty(); });
1624              if (storage_thread_should_quit && pending_audio_frames.empty()) return;
1625              auto it = pending_audio_frames.begin();
1626              if (it->first > task.pts) break;
1627              audio_pts = it->first;
1628              audio = move(it->second);
1629              pending_audio_frames.erase(it); 
1630         }
1631
1632         audio_frame->nb_samples = audio.size() / 2;
1633         audio_frame->format = AV_SAMPLE_FMT_S32;
1634         audio_frame->channel_layout = AV_CH_LAYOUT_STEREO;
1635
1636         unique_ptr<int32_t[]> int_samples(new int32_t[audio.size()]);
1637         int ret = avcodec_fill_audio_frame(audio_frame, 2, AV_SAMPLE_FMT_S32, (const uint8_t*)int_samples.get(), audio.size() * sizeof(int32_t), 1);
1638         if (ret < 0) {
1639             fprintf(stderr, "avcodec_fill_audio_frame() failed with %d\n", ret);
1640             exit(1);
1641         }
1642         for (int i = 0; i < audio_frame->nb_samples * 2; ++i) {
1643             if (audio[i] >= 1.0f) {
1644                 int_samples[i] = 2147483647;
1645             } else if (audio[i] <= -1.0f) {
1646                 int_samples[i] = -2147483647;
1647             } else {
1648                 int_samples[i] = lrintf(audio[i] * 2147483647.0f);
1649             }
1650         }
1651
1652         AVPacket pkt;
1653         av_init_packet(&pkt);
1654         pkt.data = nullptr;
1655         pkt.size = 0;
1656         int got_output;
1657         avcodec_encode_audio2(context_audio, &pkt, audio_frame, &got_output);
1658         if (got_output) {
1659             pkt.stream_index = 1;
1660             httpd->add_packet(pkt, audio_pts + global_delay, audio_pts + global_delay, HTTPD::DESTINATION_FILE_AND_HTTP);
1661         }
1662         // TODO: Delayed frames.
1663         av_frame_unref(audio_frame);
1664         av_free_packet(&pkt);
1665         if (audio_pts == task.pts) break;
1666     }
1667 }
1668
1669
1670 // this is weird. but it seems to put a new frame onto the queue
1671 void H264EncoderImpl::storage_task_enqueue(storage_task task)
1672 {
1673         unique_lock<mutex> lock(storage_task_queue_mutex);
1674         storage_task_queue.push(move(task));
1675         storage_task_queue_changed.notify_all();
1676 }
1677
1678 void H264EncoderImpl::storage_task_thread()
1679 {
1680         for ( ;; ) {
1681                 storage_task current;
1682                 {
1683                         // wait until there's an encoded frame  
1684                         unique_lock<mutex> lock(storage_task_queue_mutex);
1685                         storage_task_queue_changed.wait(lock, [this]{ return storage_thread_should_quit || !storage_task_queue.empty(); });
1686                         if (storage_thread_should_quit && storage_task_queue.empty()) return;
1687                         current = move(storage_task_queue.front());
1688                         storage_task_queue.pop();
1689                 }
1690
1691                 VAStatus va_status;
1692            
1693                 // waits for data, then saves it to disk.
1694                 va_status = vaSyncSurface(va_dpy, gl_surfaces[current.display_order % SURFACE_NUM].src_surface);
1695                 CHECK_VASTATUS(va_status, "vaSyncSurface");
1696                 save_codeddata(move(current));
1697
1698                 {
1699                         unique_lock<mutex> lock(storage_task_queue_mutex);
1700                         srcsurface_status[current.display_order % SURFACE_NUM] = SRC_SURFACE_FREE;
1701                         storage_task_queue_changed.notify_all();
1702                 }
1703         }
1704 }
1705
1706 int H264EncoderImpl::release_encode()
1707 {
1708         for (unsigned i = 0; i < SURFACE_NUM; i++) {
1709                 vaDestroyBuffer(va_dpy, gl_surfaces[i].coded_buf);
1710                 vaDestroySurfaces(va_dpy, &gl_surfaces[i].src_surface, 1);
1711                 vaDestroySurfaces(va_dpy, &gl_surfaces[i].ref_surface, 1);
1712
1713                 if (!use_zerocopy) {
1714                         glBindBuffer(GL_PIXEL_PACK_BUFFER, gl_surfaces[i].pbo);
1715                         glUnmapBuffer(GL_PIXEL_PACK_BUFFER);
1716                         glBindBuffer(GL_PIXEL_PACK_BUFFER, 0);
1717                         glDeleteBuffers(1, &gl_surfaces[i].pbo);
1718                 }
1719                 glDeleteTextures(1, &gl_surfaces[i].y_tex);
1720                 glDeleteTextures(1, &gl_surfaces[i].cbcr_tex);
1721         }
1722
1723         vaDestroyContext(va_dpy, context_id);
1724         vaDestroyConfig(va_dpy, config_id);
1725
1726         return 0;
1727 }
1728
1729 int H264EncoderImpl::deinit_va()
1730
1731     vaTerminate(va_dpy);
1732
1733     va_close_display(va_dpy);
1734
1735     return 0;
1736 }
1737
1738
1739 H264EncoderImpl::H264EncoderImpl(QSurface *surface, const string &va_display, int width, int height, HTTPD *httpd)
1740         : current_storage_frame(0), surface(surface), httpd(httpd)
1741 {
1742         AVCodec *codec_audio = avcodec_find_encoder(AUDIO_OUTPUT_CODEC);
1743         context_audio = avcodec_alloc_context3(codec_audio);
1744         context_audio->bit_rate = AUDIO_OUTPUT_BIT_RATE;
1745         context_audio->sample_rate = OUTPUT_FREQUENCY;
1746         context_audio->sample_fmt = AUDIO_OUTPUT_SAMPLE_FMT;
1747         context_audio->channels = 2;
1748         context_audio->channel_layout = AV_CH_LAYOUT_STEREO;
1749         context_audio->time_base = AVRational{1, TIMEBASE};
1750         if (avcodec_open2(context_audio, codec_audio, NULL) < 0) {
1751                 fprintf(stderr, "Could not open codec\n");
1752                 exit(1);
1753         }
1754         audio_frame = av_frame_alloc();
1755
1756         frame_width = width;
1757         frame_height = height;
1758         frame_width_mbaligned = (frame_width + 15) & (~15);
1759         frame_height_mbaligned = (frame_height + 15) & (~15);
1760
1761         //print_input();
1762
1763         if (global_flags.uncompressed_video_to_http) {
1764                 reorderer.reset(new FrameReorderer(ip_period - 1, frame_width, frame_height));
1765         }
1766
1767         init_va(va_display);
1768         setup_encode();
1769
1770         // No frames are ready yet.
1771         memset(srcsurface_status, SRC_SURFACE_FREE, sizeof(srcsurface_status));
1772             
1773         memset(&seq_param, 0, sizeof(seq_param));
1774         memset(&pic_param, 0, sizeof(pic_param));
1775         memset(&slice_param, 0, sizeof(slice_param));
1776
1777         storage_thread = thread(&H264EncoderImpl::storage_task_thread, this);
1778
1779         encode_thread = thread([this]{
1780                 //SDL_GL_MakeCurrent(window, context);
1781                 QOpenGLContext *context = create_context(this->surface);
1782                 eglBindAPI(EGL_OPENGL_API);
1783                 if (!make_current(context, this->surface)) {
1784                         printf("display=%p surface=%p context=%p curr=%p err=%d\n", eglGetCurrentDisplay(), this->surface, context, eglGetCurrentContext(),
1785                                 eglGetError());
1786                         exit(1);
1787                 }
1788                 encode_thread_func();
1789         });
1790 }
1791
1792 H264EncoderImpl::~H264EncoderImpl()
1793 {
1794         shutdown();
1795         av_frame_free(&audio_frame);
1796
1797         // TODO: Destroy context.
1798 }
1799
1800 bool H264EncoderImpl::begin_frame(GLuint *y_tex, GLuint *cbcr_tex)
1801 {
1802         assert(!is_shutdown);
1803         {
1804                 // Wait until this frame slot is done encoding.
1805                 unique_lock<mutex> lock(storage_task_queue_mutex);
1806                 if (srcsurface_status[current_storage_frame % SURFACE_NUM] != SRC_SURFACE_FREE) {
1807                         fprintf(stderr, "Warning: Slot %d (for frame %d) is still encoding, rendering has to wait for H.264 encoder\n",
1808                                 current_storage_frame % SURFACE_NUM, current_storage_frame);
1809                 }
1810                 storage_task_queue_changed.wait(lock, [this]{ return storage_thread_should_quit || (srcsurface_status[current_storage_frame % SURFACE_NUM] == SRC_SURFACE_FREE); });
1811                 srcsurface_status[current_storage_frame % SURFACE_NUM] = SRC_SURFACE_IN_ENCODING;
1812                 if (storage_thread_should_quit) return false;
1813         }
1814
1815         //*fbo = fbos[current_storage_frame % SURFACE_NUM];
1816         GLSurface *surf = &gl_surfaces[current_storage_frame % SURFACE_NUM];
1817         *y_tex = surf->y_tex;
1818         *cbcr_tex = surf->cbcr_tex;
1819
1820         VAStatus va_status = vaDeriveImage(va_dpy, surf->src_surface, &surf->surface_image);
1821         CHECK_VASTATUS(va_status, "vaDeriveImage");
1822
1823         if (use_zerocopy) {
1824                 VABufferInfo buf_info;
1825                 buf_info.mem_type = VA_SURFACE_ATTRIB_MEM_TYPE_DRM_PRIME;  // or VA_SURFACE_ATTRIB_MEM_TYPE_KERNEL_DRM?
1826                 va_status = vaAcquireBufferHandle(va_dpy, surf->surface_image.buf, &buf_info);
1827                 CHECK_VASTATUS(va_status, "vaAcquireBufferHandle");
1828
1829                 // Create Y image.
1830                 surf->y_egl_image = EGL_NO_IMAGE_KHR;
1831                 EGLint y_attribs[] = {
1832                         EGL_WIDTH, frame_width,
1833                         EGL_HEIGHT, frame_height,
1834                         EGL_LINUX_DRM_FOURCC_EXT, fourcc_code('R', '8', ' ', ' '),
1835                         EGL_DMA_BUF_PLANE0_FD_EXT, EGLint(buf_info.handle),
1836                         EGL_DMA_BUF_PLANE0_OFFSET_EXT, EGLint(surf->surface_image.offsets[0]),
1837                         EGL_DMA_BUF_PLANE0_PITCH_EXT, EGLint(surf->surface_image.pitches[0]),
1838                         EGL_NONE
1839                 };
1840
1841                 surf->y_egl_image = eglCreateImageKHR(eglGetCurrentDisplay(), EGL_NO_CONTEXT, EGL_LINUX_DMA_BUF_EXT, NULL, y_attribs);
1842                 assert(surf->y_egl_image != EGL_NO_IMAGE_KHR);
1843
1844                 // Associate Y image to a texture.
1845                 glBindTexture(GL_TEXTURE_2D, *y_tex);
1846                 glEGLImageTargetTexture2DOES(GL_TEXTURE_2D, surf->y_egl_image);
1847
1848                 // Create CbCr image.
1849                 surf->cbcr_egl_image = EGL_NO_IMAGE_KHR;
1850                 EGLint cbcr_attribs[] = {
1851                         EGL_WIDTH, frame_width,
1852                         EGL_HEIGHT, frame_height,
1853                         EGL_LINUX_DRM_FOURCC_EXT, fourcc_code('G', 'R', '8', '8'),
1854                         EGL_DMA_BUF_PLANE0_FD_EXT, EGLint(buf_info.handle),
1855                         EGL_DMA_BUF_PLANE0_OFFSET_EXT, EGLint(surf->surface_image.offsets[1]),
1856                         EGL_DMA_BUF_PLANE0_PITCH_EXT, EGLint(surf->surface_image.pitches[1]),
1857                         EGL_NONE
1858                 };
1859
1860                 surf->cbcr_egl_image = eglCreateImageKHR(eglGetCurrentDisplay(), EGL_NO_CONTEXT, EGL_LINUX_DMA_BUF_EXT, NULL, cbcr_attribs);
1861                 assert(surf->cbcr_egl_image != EGL_NO_IMAGE_KHR);
1862
1863                 // Associate CbCr image to a texture.
1864                 glBindTexture(GL_TEXTURE_2D, *cbcr_tex);
1865                 glEGLImageTargetTexture2DOES(GL_TEXTURE_2D, surf->cbcr_egl_image);
1866         }
1867
1868         return true;
1869 }
1870
1871 void H264EncoderImpl::add_audio(int64_t pts, vector<float> audio)
1872 {
1873         assert(!is_shutdown);
1874         {
1875                 unique_lock<mutex> lock(frame_queue_mutex);
1876                 pending_audio_frames[pts] = move(audio);
1877         }
1878         frame_queue_nonempty.notify_all();
1879 }
1880
1881 void H264EncoderImpl::end_frame(RefCountedGLsync fence, int64_t pts, const vector<RefCountedFrame> &input_frames)
1882 {
1883         assert(!is_shutdown);
1884
1885         if (!use_zerocopy) {
1886                 GLSurface *surf = &gl_surfaces[current_storage_frame % SURFACE_NUM];
1887
1888                 glPixelStorei(GL_PACK_ROW_LENGTH, 0);
1889                 check_error();
1890
1891                 glBindBuffer(GL_PIXEL_PACK_BUFFER, surf->pbo);
1892                 check_error();
1893
1894                 glBindTexture(GL_TEXTURE_2D, surf->y_tex);
1895                 check_error();
1896                 glGetTexImage(GL_TEXTURE_2D, 0, GL_RED, GL_UNSIGNED_BYTE, BUFFER_OFFSET(surf->y_offset));
1897                 check_error();
1898
1899                 glBindTexture(GL_TEXTURE_2D, surf->cbcr_tex);
1900                 check_error();
1901                 glGetTexImage(GL_TEXTURE_2D, 0, GL_RG, GL_UNSIGNED_BYTE, BUFFER_OFFSET(surf->cbcr_offset));
1902                 check_error();
1903
1904                 glBindTexture(GL_TEXTURE_2D, 0);
1905                 check_error();
1906                 glBindBuffer(GL_PIXEL_PACK_BUFFER, 0);
1907                 check_error();
1908
1909                 glMemoryBarrier(GL_TEXTURE_UPDATE_BARRIER_BIT | GL_CLIENT_MAPPED_BUFFER_BARRIER_BIT);
1910                 check_error();
1911                 fence = RefCountedGLsync(GL_SYNC_GPU_COMMANDS_COMPLETE, /*flags=*/0);
1912                 check_error();
1913         }
1914
1915         {
1916                 unique_lock<mutex> lock(frame_queue_mutex);
1917                 pending_video_frames[current_storage_frame] = PendingFrame{ fence, input_frames, pts };
1918                 ++current_storage_frame;
1919         }
1920         frame_queue_nonempty.notify_all();
1921 }
1922
1923 void H264EncoderImpl::shutdown()
1924 {
1925         if (is_shutdown) {
1926                 return;
1927         }
1928
1929         {
1930                 unique_lock<mutex> lock(frame_queue_mutex);
1931                 encode_thread_should_quit = true;
1932                 frame_queue_nonempty.notify_all();
1933         }
1934         encode_thread.join();
1935         {
1936                 unique_lock<mutex> lock(storage_task_queue_mutex);
1937                 storage_thread_should_quit = true;
1938                 frame_queue_nonempty.notify_all();
1939                 storage_task_queue_changed.notify_all();
1940         }
1941         storage_thread.join();
1942
1943         release_encode();
1944         deinit_va();
1945         is_shutdown = true;
1946 }
1947
1948 void H264EncoderImpl::encode_thread_func()
1949 {
1950         int64_t last_dts = -1;
1951         int gop_start_display_frame_num = 0;
1952         for (int encoding_frame_num = 0; ; ++encoding_frame_num) {
1953                 PendingFrame frame;
1954                 int pts_lag;
1955                 int frame_type, display_frame_num;
1956                 encoding2display_order(encoding_frame_num, intra_period, intra_idr_period, ip_period,
1957                                        &display_frame_num, &frame_type, &pts_lag);
1958                 if (frame_type == FRAME_IDR) {
1959                         numShortTerm = 0;
1960                         current_frame_num = 0;
1961                         gop_start_display_frame_num = display_frame_num;
1962                 }
1963
1964                 {
1965                         unique_lock<mutex> lock(frame_queue_mutex);
1966                         frame_queue_nonempty.wait(lock, [this, display_frame_num]{
1967                                 return encode_thread_should_quit || pending_video_frames.count(display_frame_num) != 0;
1968                         });
1969                         if (encode_thread_should_quit && pending_video_frames.count(display_frame_num) == 0) {
1970                                 // We have queued frames that were supposed to be B-frames,
1971                                 // but will be no P-frame to encode them against. Encode them all
1972                                 // as P-frames instead. Note that this happens under the mutex,
1973                                 // but nobody else uses it at this point, since we're shutting down,
1974                                 // so there's no contention.
1975                                 encode_remaining_frames_as_p(encoding_frame_num, gop_start_display_frame_num, last_dts);
1976                                 return;
1977                         } else {
1978                                 frame = move(pending_video_frames[display_frame_num]);
1979                                 pending_video_frames.erase(display_frame_num);
1980                         }
1981                 }
1982
1983                 // Determine the dts of this frame.
1984                 int64_t dts;
1985                 if (pts_lag == -1) {
1986                         assert(last_dts != -1);
1987                         dts = last_dts + (TIMEBASE / MAX_FPS);
1988                 } else {
1989                         dts = frame.pts - pts_lag;
1990                 }
1991                 last_dts = dts;
1992
1993                 encode_frame(frame, encoding_frame_num, display_frame_num, gop_start_display_frame_num, frame_type, frame.pts, dts);
1994         }
1995 }
1996
1997 void H264EncoderImpl::encode_remaining_frames_as_p(int encoding_frame_num, int gop_start_display_frame_num, int64_t last_dts)
1998 {
1999         if (pending_video_frames.empty()) {
2000                 return;
2001         }
2002
2003         for (auto &pending_frame : pending_video_frames) {
2004                 int display_frame_num = pending_frame.first;
2005                 assert(display_frame_num > 0);
2006                 PendingFrame frame = move(pending_frame.second);
2007                 int64_t dts = last_dts + (TIMEBASE / MAX_FPS);
2008                 printf("Finalizing encode: Encoding leftover frame %d as P-frame instead of B-frame.\n", display_frame_num);
2009                 encode_frame(frame, encoding_frame_num++, display_frame_num, gop_start_display_frame_num, FRAME_P, frame.pts, dts);
2010                 last_dts = dts;
2011         }
2012
2013         if (global_flags.uncompressed_video_to_http) {
2014                 // Add frames left in reorderer.
2015                 while (!reorderer->empty()) {
2016                         pair<int64_t, const uint8_t *> output_frame = reorderer->get_first_frame();
2017                         add_packet_for_uncompressed_frame(output_frame.first, output_frame.second);
2018                 }
2019         }
2020 }
2021
2022 void H264EncoderImpl::add_packet_for_uncompressed_frame(int64_t pts, const uint8_t *data)
2023 {
2024         AVPacket pkt;
2025         memset(&pkt, 0, sizeof(pkt));
2026         pkt.buf = nullptr;
2027         pkt.data = const_cast<uint8_t *>(data);
2028         pkt.size = frame_width * frame_height * 2;
2029         pkt.stream_index = 0;
2030         pkt.flags = AV_PKT_FLAG_KEY;
2031         httpd->add_packet(pkt, pts, pts, HTTPD::DESTINATION_HTTP_ONLY);
2032 }
2033
2034 namespace {
2035
2036 void memcpy_with_pitch(uint8_t *dst, const uint8_t *src, size_t src_width, size_t dst_pitch, size_t height)
2037 {
2038         if (src_width == dst_pitch) {
2039                 memcpy(dst, src, src_width * height);
2040         } else {
2041                 for (size_t y = 0; y < height; ++y) {
2042                         const uint8_t *sptr = src + y * src_width;
2043                         uint8_t *dptr = dst + y * dst_pitch;
2044                         memcpy(dptr, sptr, src_width);
2045                 }
2046         }
2047 }
2048
2049 }  // namespace
2050
2051 void H264EncoderImpl::encode_frame(H264EncoderImpl::PendingFrame frame, int encoding_frame_num, int display_frame_num, int gop_start_display_frame_num,
2052                                    int frame_type, int64_t pts, int64_t dts)
2053 {
2054         // Wait for the GPU to be done with the frame.
2055         GLenum sync_status;
2056         do {
2057                 sync_status = glClientWaitSync(frame.fence.get(), 0, 1000000000);
2058                 check_error();
2059         } while (sync_status == GL_TIMEOUT_EXPIRED);
2060         assert(sync_status != GL_WAIT_FAILED);
2061
2062         // Release back any input frames we needed to render this frame.
2063         frame.input_frames.clear();
2064
2065         GLSurface *surf = &gl_surfaces[display_frame_num % SURFACE_NUM];
2066         VAStatus va_status;
2067
2068         if (use_zerocopy) {
2069                 eglDestroyImageKHR(eglGetCurrentDisplay(), surf->y_egl_image);
2070                 eglDestroyImageKHR(eglGetCurrentDisplay(), surf->cbcr_egl_image);
2071                 va_status = vaReleaseBufferHandle(va_dpy, surf->surface_image.buf);
2072                 CHECK_VASTATUS(va_status, "vaReleaseBufferHandle");
2073         } else {
2074                 unsigned char *surface_p = nullptr;
2075                 vaMapBuffer(va_dpy, surf->surface_image.buf, (void **)&surface_p);
2076
2077                 unsigned char *va_y_ptr = (unsigned char *)surface_p + surf->surface_image.offsets[0];
2078                 memcpy_with_pitch(va_y_ptr, surf->y_ptr, frame_width, surf->surface_image.pitches[0], frame_height);
2079
2080                 unsigned char *va_cbcr_ptr = (unsigned char *)surface_p + surf->surface_image.offsets[1];
2081                 memcpy_with_pitch(va_cbcr_ptr, surf->cbcr_ptr, (frame_width / 2) * sizeof(uint16_t), surf->surface_image.pitches[1], frame_height / 2);
2082
2083                 va_status = vaUnmapBuffer(va_dpy, surf->surface_image.buf);
2084                 CHECK_VASTATUS(va_status, "vaUnmapBuffer");
2085
2086                 if (global_flags.uncompressed_video_to_http) {
2087                         // Add uncompressed video. (Note that pts == dts here.)
2088                         const int64_t global_delay = int64_t(ip_period - 1) * (TIMEBASE / MAX_FPS);  // Needs to match audio.
2089                         pair<int64_t, const uint8_t *> output_frame = reorderer->reorder_frame(pts + global_delay, reinterpret_cast<uint8_t *>(surf->y_ptr));
2090                         if (output_frame.second != nullptr) {
2091                                 add_packet_for_uncompressed_frame(output_frame.first, output_frame.second);
2092                         }
2093                 }
2094         }
2095
2096         va_status = vaDestroyImage(va_dpy, surf->surface_image.image_id);
2097         CHECK_VASTATUS(va_status, "vaDestroyImage");
2098
2099         // Schedule the frame for encoding.
2100         VASurfaceID va_surface = surf->src_surface;
2101         va_status = vaBeginPicture(va_dpy, context_id, va_surface);
2102         CHECK_VASTATUS(va_status, "vaBeginPicture");
2103
2104         if (frame_type == FRAME_IDR) {
2105                 render_sequence();
2106                 render_picture(frame_type, display_frame_num, gop_start_display_frame_num);
2107                 if (h264_packedheader) {
2108                         render_packedsequence();
2109                         render_packedpicture();
2110                 }
2111         } else {
2112                 //render_sequence();
2113                 render_picture(frame_type, display_frame_num, gop_start_display_frame_num);
2114         }
2115         render_slice(encoding_frame_num, display_frame_num, gop_start_display_frame_num, frame_type);
2116
2117         va_status = vaEndPicture(va_dpy, context_id);
2118         CHECK_VASTATUS(va_status, "vaEndPicture");
2119
2120         // so now the data is done encoding (well, async job kicked off)...
2121         // we send that to the storage thread
2122         storage_task tmp;
2123         tmp.display_order = display_frame_num;
2124         tmp.frame_type = frame_type;
2125         tmp.pts = pts;
2126         tmp.dts = dts;
2127         storage_task_enqueue(move(tmp));
2128
2129         update_ReferenceFrames(frame_type);
2130 }
2131
2132 // Proxy object.
2133 H264Encoder::H264Encoder(QSurface *surface, const string &va_display, int width, int height, HTTPD *httpd)
2134         : impl(new H264EncoderImpl(surface, va_display, width, height, httpd)) {}
2135
2136 // Must be defined here because unique_ptr<> destructor needs to know the impl.
2137 H264Encoder::~H264Encoder() {}
2138
2139 void H264Encoder::add_audio(int64_t pts, vector<float> audio)
2140 {
2141         impl->add_audio(pts, audio);
2142 }
2143
2144 bool H264Encoder::begin_frame(GLuint *y_tex, GLuint *cbcr_tex)
2145 {
2146         return impl->begin_frame(y_tex, cbcr_tex);
2147 }
2148
2149 void H264Encoder::end_frame(RefCountedGLsync fence, int64_t pts, const vector<RefCountedFrame> &input_frames)
2150 {
2151         impl->end_frame(fence, pts, input_frames);
2152 }
2153
2154 void H264Encoder::shutdown()
2155 {
2156         impl->shutdown();
2157 }
2158
2159 // Real class.