]> git.sesse.net Git - nageru/blob - h264encode.cpp
Make it possible for file and HTTP streams to use different audio codecs.
[nageru] / h264encode.cpp
1 //#include "sysdeps.h"
2 #include "h264encode.h"
3
4 #include <movit/util.h>
5 #include <EGL/eglplatform.h>
6 #include <X11/X.h>
7 #include <X11/Xlib.h>
8 #include <assert.h>
9 #include <epoxy/egl.h>
10 extern "C" {
11 #include <libavcodec/avcodec.h>
12 #include <libavformat/avformat.h>
13 #include <libavutil/channel_layout.h>
14 #include <libavutil/frame.h>
15 #include <libavutil/rational.h>
16 #include <libavutil/samplefmt.h>
17 }
18 #include <libdrm/drm_fourcc.h>
19 #include <stdio.h>
20 #include <stdlib.h>
21 #include <string.h>
22 #include <fcntl.h>
23 #include <va/va.h>
24 #include <va/va_drm.h>
25 #include <va/va_drmcommon.h>
26 #include <va/va_enc_h264.h>
27 #include <va/va_x11.h>
28 #include <algorithm>
29 #include <condition_variable>
30 #include <cstdint>
31 #include <map>
32 #include <memory>
33 #include <mutex>
34 #include <queue>
35 #include <string>
36 #include <thread>
37 #include <utility>
38
39 #include "context.h"
40 #include "defs.h"
41 #include "flags.h"
42 #include "httpd.h"
43 #include "timebase.h"
44
45 using namespace std;
46
47 class QOpenGLContext;
48 class QSurface;
49
50 #define CHECK_VASTATUS(va_status, func)                                 \
51     if (va_status != VA_STATUS_SUCCESS) {                               \
52         fprintf(stderr, "%s:%d (%s) failed with %d\n", __func__, __LINE__, func, va_status); \
53         exit(1);                                                        \
54     }
55
56 #define BUFFER_OFFSET(i) ((char *)NULL + (i))
57
58 //#include "loadsurface.h"
59
60 #define NAL_REF_IDC_NONE        0
61 #define NAL_REF_IDC_LOW         1
62 #define NAL_REF_IDC_MEDIUM      2
63 #define NAL_REF_IDC_HIGH        3
64
65 #define NAL_NON_IDR             1
66 #define NAL_IDR                 5
67 #define NAL_SPS                 7
68 #define NAL_PPS                 8
69 #define NAL_SEI                 6
70
71 #define SLICE_TYPE_P            0
72 #define SLICE_TYPE_B            1
73 #define SLICE_TYPE_I            2
74 #define IS_P_SLICE(type) (SLICE_TYPE_P == (type))
75 #define IS_B_SLICE(type) (SLICE_TYPE_B == (type))
76 #define IS_I_SLICE(type) (SLICE_TYPE_I == (type))
77
78
79 #define ENTROPY_MODE_CAVLC      0
80 #define ENTROPY_MODE_CABAC      1
81
82 #define PROFILE_IDC_BASELINE    66
83 #define PROFILE_IDC_MAIN        77
84 #define PROFILE_IDC_HIGH        100
85    
86 #define BITSTREAM_ALLOCATE_STEPPING     4096
87 #define SURFACE_NUM 16 /* 16 surfaces for source YUV */
88 #define MAX_NUM_REF1 16 // Seemingly a hardware-fixed value, not related to SURFACE_NUM
89 #define MAX_NUM_REF2 32 // Seemingly a hardware-fixed value, not related to SURFACE_NUM
90
91 static constexpr unsigned int MaxFrameNum = (2<<16);
92 static constexpr unsigned int MaxPicOrderCntLsb = (2<<8);
93 static constexpr unsigned int Log2MaxFrameNum = 16;
94 static constexpr unsigned int Log2MaxPicOrderCntLsb = 8;
95 static constexpr int rc_default_modes[] = {  // Priority list of modes.
96     VA_RC_VBR,
97     VA_RC_CQP,
98     VA_RC_VBR_CONSTRAINED,
99     VA_RC_CBR,
100     VA_RC_VCM,
101     VA_RC_NONE,
102 };
103
104 /* thread to save coded data */
105 #define SRC_SURFACE_FREE        0
106 #define SRC_SURFACE_IN_ENCODING 1
107     
108 struct __bitstream {
109     unsigned int *buffer;
110     int bit_offset;
111     int max_size_in_dword;
112 };
113 typedef struct __bitstream bitstream;
114
115 using namespace std;
116
117 // H.264 video comes out in encoding order (e.g. with two B-frames:
118 // 0, 3, 1, 2, 6, 4, 5, etc.), but uncompressed video needs to
119 // come in the right order. Since we do everything, including waiting
120 // for the frames to come out of OpenGL, in encoding order, we need
121 // a reordering buffer for uncompressed frames so that they come out
122 // correctly. We go the super-lazy way of not making it understand
123 // anything about the true order (which introduces some extra latency,
124 // though); we know that for N B-frames we need at most (N-1) frames
125 // in the reorder buffer, and can just sort on that.
126 //
127 // The class also deals with keeping a freelist as needed.
128 class FrameReorderer {
129 public:
130         FrameReorderer(unsigned queue_length, int width, int height);
131
132         // Returns the next frame to insert with its pts, if any. Otherwise -1 and nullptr.
133         // Does _not_ take ownership of data; a copy is taken if needed.
134         // The returned pointer is valid until the next call to reorder_frame, or destruction.
135         // As a special case, if queue_length == 0, will just return pts and data (no reordering needed).
136         pair<int64_t, const uint8_t *> reorder_frame(int64_t pts, const uint8_t *data);
137
138         // The same as reorder_frame, but without inserting anything. Used to empty the queue.
139         pair<int64_t, const uint8_t *> get_first_frame();
140
141         bool empty() const { return frames.empty(); }
142
143 private:
144         unsigned queue_length;
145         int width, height;
146
147         priority_queue<pair<int64_t, uint8_t *>> frames;
148         stack<uint8_t *> freelist;  // Includes the last value returned from reorder_frame.
149
150         // Owns all the pointers. Normally, freelist and frames could do this themselves,
151         // except priority_queue doesn't work well with movable-only types.
152         vector<unique_ptr<uint8_t[]>> owner;
153 };
154
155 FrameReorderer::FrameReorderer(unsigned queue_length, int width, int height)
156     : queue_length(queue_length), width(width), height(height)
157 {
158         for (unsigned i = 0; i < queue_length; ++i) {
159                 owner.emplace_back(new uint8_t[width * height * 2]);
160                 freelist.push(owner.back().get());
161         }
162 }
163
164 pair<int64_t, const uint8_t *> FrameReorderer::reorder_frame(int64_t pts, const uint8_t *data)
165 {
166         if (queue_length == 0) {
167                 return make_pair(pts, data);
168         }
169
170         assert(!freelist.empty());
171         uint8_t *storage = freelist.top();
172         freelist.pop();
173         memcpy(storage, data, width * height * 2);
174         frames.emplace(-pts, storage);  // Invert pts to get smallest first.
175
176         if (frames.size() >= queue_length) {
177                 return get_first_frame();
178         } else {
179                 return make_pair(-1, nullptr);
180         }
181 }
182
183 pair<int64_t, const uint8_t *> FrameReorderer::get_first_frame()
184 {
185         assert(!frames.empty());
186         pair<int64_t, uint8_t *> storage = frames.top();
187         frames.pop();
188         int64_t pts = storage.first;
189         freelist.push(storage.second);
190         return make_pair(-pts, storage.second);  // Re-invert pts (see reorder_frame()).
191 }
192
193 class H264EncoderImpl {
194 public:
195         H264EncoderImpl(QSurface *surface, const string &va_display, int width, int height, HTTPD *httpd);
196         ~H264EncoderImpl();
197         void add_audio(int64_t pts, vector<float> audio);
198         bool begin_frame(GLuint *y_tex, GLuint *cbcr_tex);
199         RefCountedGLsync end_frame(int64_t pts, const vector<RefCountedFrame> &input_frames);
200         void shutdown();
201         void open_output_file(const std::string &filename);
202         void close_output_file();
203
204 private:
205         struct storage_task {
206                 unsigned long long display_order;
207                 int frame_type;
208                 vector<float> audio;
209                 int64_t pts, dts;
210         };
211         struct PendingFrame {
212                 RefCountedGLsync fence;
213                 vector<RefCountedFrame> input_frames;
214                 int64_t pts;
215         };
216
217         // So we never get negative dts.
218         int64_t global_delay() const {
219                 return int64_t(ip_period - 1) * (TIMEBASE / MAX_FPS);
220         }
221
222         void encode_thread_func();
223         void encode_remaining_frames_as_p(int encoding_frame_num, int gop_start_display_frame_num, int64_t last_dts);
224         void add_packet_for_uncompressed_frame(int64_t pts, const uint8_t *data);
225         void encode_frame(PendingFrame frame, int encoding_frame_num, int display_frame_num, int gop_start_display_frame_num,
226                           int frame_type, int64_t pts, int64_t dts);
227         void storage_task_thread();
228         void encode_audio(const vector<float> &audio,
229                           vector<float> *audio_queue,
230                           int64_t audio_pts,
231                           AVCodecContext *ctx,
232                           const vector<PacketDestination *> &destinations);
233         void encode_audio_one_frame(const float *audio,
234                                     size_t num_samples,  // In each channel.
235                                     int64_t audio_pts,
236                                     AVCodecContext *ctx,
237                                     const vector<PacketDestination *> &destinations);
238         void storage_task_enqueue(storage_task task);
239         void save_codeddata(storage_task task);
240         int render_packedsequence();
241         int render_packedpicture();
242         void render_packedslice();
243         int render_sequence();
244         int render_picture(int frame_type, int display_frame_num, int gop_start_display_frame_num);
245         void sps_rbsp(bitstream *bs);
246         void pps_rbsp(bitstream *bs);
247         int build_packed_pic_buffer(unsigned char **header_buffer);
248         int render_slice(int encoding_frame_num, int display_frame_num, int gop_start_display_frame_num, int frame_type);
249         void slice_header(bitstream *bs);
250         int build_packed_seq_buffer(unsigned char **header_buffer);
251         int build_packed_slice_buffer(unsigned char **header_buffer);
252         int init_va(const string &va_display);
253         int deinit_va();
254         void enable_zerocopy_if_possible();
255         VADisplay va_open_display(const string &va_display);
256         void va_close_display(VADisplay va_dpy);
257         int setup_encode();
258         int release_encode();
259         void update_ReferenceFrames(int frame_type);
260         int update_RefPicList(int frame_type);
261
262         bool is_shutdown = false;
263         bool use_zerocopy;
264         int drm_fd = -1;
265
266         thread encode_thread, storage_thread;
267
268         mutex storage_task_queue_mutex;
269         condition_variable storage_task_queue_changed;
270         int srcsurface_status[SURFACE_NUM];  // protected by storage_task_queue_mutex
271         queue<storage_task> storage_task_queue;  // protected by storage_task_queue_mutex
272         bool storage_thread_should_quit = false;  // protected by storage_task_queue_mutex
273
274         mutex frame_queue_mutex;
275         condition_variable frame_queue_nonempty;
276         bool encode_thread_should_quit = false;  // under frame_queue_mutex
277
278         int current_storage_frame;
279
280         map<int, PendingFrame> pending_video_frames;  // under frame_queue_mutex
281         map<int64_t, vector<float>> pending_audio_frames;  // under frame_queue_mutex
282         QSurface *surface;
283
284         AVCodecContext *context_audio_file;
285         AVCodecContext *context_audio_stream = nullptr;  // nullptr = don't code separate audio for stream.
286
287         vector<float> audio_queue_file;
288         vector<float> audio_queue_stream;
289
290         AVFrame *audio_frame = nullptr;
291         HTTPD *httpd;
292         unique_ptr<FrameReorderer> reorderer;
293
294         Display *x11_display = nullptr;
295
296         // Encoder parameters
297         VADisplay va_dpy;
298         VAProfile h264_profile = (VAProfile)~0;
299         VAConfigAttrib config_attrib[VAConfigAttribTypeMax];
300         int config_attrib_num = 0, enc_packed_header_idx;
301
302         struct GLSurface {
303                 VASurfaceID src_surface, ref_surface;
304                 VABufferID coded_buf;
305
306                 VAImage surface_image;
307                 GLuint y_tex, cbcr_tex;
308
309                 // Only if use_zerocopy == true.
310                 EGLImage y_egl_image, cbcr_egl_image;
311
312                 // Only if use_zerocopy == false.
313                 GLuint pbo;
314                 uint8_t *y_ptr, *cbcr_ptr;
315                 size_t y_offset, cbcr_offset;
316         };
317         GLSurface gl_surfaces[SURFACE_NUM];
318
319         VAConfigID config_id;
320         VAContextID context_id;
321         VAEncSequenceParameterBufferH264 seq_param;
322         VAEncPictureParameterBufferH264 pic_param;
323         VAEncSliceParameterBufferH264 slice_param;
324         VAPictureH264 CurrentCurrPic;
325         VAPictureH264 ReferenceFrames[MAX_NUM_REF1], RefPicList0_P[MAX_NUM_REF2], RefPicList0_B[MAX_NUM_REF2], RefPicList1_B[MAX_NUM_REF2];
326
327         // Static quality settings.
328         static constexpr unsigned int frame_bitrate = 15000000 / 60;  // Doesn't really matter; only initial_qp does.
329         static constexpr unsigned int num_ref_frames = 2;
330         static constexpr int initial_qp = 15;
331         static constexpr int minimal_qp = 0;
332         static constexpr int intra_period = 30;
333         static constexpr int intra_idr_period = MAX_FPS;  // About a second; more at lower frame rates. Not ideal.
334
335         // Quality settings that are meant to be static, but might be overridden
336         // by the profile.
337         int constraint_set_flag = 0;
338         int h264_packedheader = 0; /* support pack header? */
339         int h264_maxref = (1<<16|1);
340         int h264_entropy_mode = 1; /* cabac */
341         int ip_period = 3;
342
343         int rc_mode = -1;
344         unsigned int current_frame_num = 0;
345         unsigned int numShortTerm = 0;
346
347         int frame_width;
348         int frame_height;
349         int frame_width_mbaligned;
350         int frame_height_mbaligned;
351
352         unique_ptr<Mux> file_mux;  // To local disk.
353 };
354
355 // Supposedly vaRenderPicture() is supposed to destroy the buffer implicitly,
356 // but if we don't delete it here, we get leaks. The GStreamer implementation
357 // does the same.
358 static void render_picture_and_delete(VADisplay dpy, VAContextID context, VABufferID *buffers, int num_buffers)
359 {
360     VAStatus va_status = vaRenderPicture(dpy, context, buffers, num_buffers);
361     CHECK_VASTATUS(va_status, "vaRenderPicture");
362
363     for (int i = 0; i < num_buffers; ++i) {
364         va_status = vaDestroyBuffer(dpy, buffers[i]);
365         CHECK_VASTATUS(va_status, "vaDestroyBuffer");
366     }
367 }
368
369 static unsigned int 
370 va_swap32(unsigned int val)
371 {
372     unsigned char *pval = (unsigned char *)&val;
373
374     return ((pval[0] << 24)     |
375             (pval[1] << 16)     |
376             (pval[2] << 8)      |
377             (pval[3] << 0));
378 }
379
380 static void
381 bitstream_start(bitstream *bs)
382 {
383     bs->max_size_in_dword = BITSTREAM_ALLOCATE_STEPPING;
384     bs->buffer = (unsigned int *)calloc(bs->max_size_in_dword * sizeof(int), 1);
385     bs->bit_offset = 0;
386 }
387
388 static void
389 bitstream_end(bitstream *bs)
390 {
391     int pos = (bs->bit_offset >> 5);
392     int bit_offset = (bs->bit_offset & 0x1f);
393     int bit_left = 32 - bit_offset;
394
395     if (bit_offset) {
396         bs->buffer[pos] = va_swap32((bs->buffer[pos] << bit_left));
397     }
398 }
399  
400 static void
401 bitstream_put_ui(bitstream *bs, unsigned int val, int size_in_bits)
402 {
403     int pos = (bs->bit_offset >> 5);
404     int bit_offset = (bs->bit_offset & 0x1f);
405     int bit_left = 32 - bit_offset;
406
407     if (!size_in_bits)
408         return;
409
410     bs->bit_offset += size_in_bits;
411
412     if (bit_left > size_in_bits) {
413         bs->buffer[pos] = (bs->buffer[pos] << size_in_bits | val);
414     } else {
415         size_in_bits -= bit_left;
416         if (bit_left >= 32) {
417             bs->buffer[pos] = (val >> size_in_bits);
418         } else {
419             bs->buffer[pos] = (bs->buffer[pos] << bit_left) | (val >> size_in_bits);
420         }
421         bs->buffer[pos] = va_swap32(bs->buffer[pos]);
422
423         if (pos + 1 == bs->max_size_in_dword) {
424             bs->max_size_in_dword += BITSTREAM_ALLOCATE_STEPPING;
425             bs->buffer = (unsigned int *)realloc(bs->buffer, bs->max_size_in_dword * sizeof(unsigned int));
426         }
427
428         bs->buffer[pos + 1] = val;
429     }
430 }
431
432 static void
433 bitstream_put_ue(bitstream *bs, unsigned int val)
434 {
435     int size_in_bits = 0;
436     int tmp_val = ++val;
437
438     while (tmp_val) {
439         tmp_val >>= 1;
440         size_in_bits++;
441     }
442
443     bitstream_put_ui(bs, 0, size_in_bits - 1); // leading zero
444     bitstream_put_ui(bs, val, size_in_bits);
445 }
446
447 static void
448 bitstream_put_se(bitstream *bs, int val)
449 {
450     unsigned int new_val;
451
452     if (val <= 0)
453         new_val = -2 * val;
454     else
455         new_val = 2 * val - 1;
456
457     bitstream_put_ue(bs, new_val);
458 }
459
460 static void
461 bitstream_byte_aligning(bitstream *bs, int bit)
462 {
463     int bit_offset = (bs->bit_offset & 0x7);
464     int bit_left = 8 - bit_offset;
465     int new_val;
466
467     if (!bit_offset)
468         return;
469
470     assert(bit == 0 || bit == 1);
471
472     if (bit)
473         new_val = (1 << bit_left) - 1;
474     else
475         new_val = 0;
476
477     bitstream_put_ui(bs, new_val, bit_left);
478 }
479
480 static void 
481 rbsp_trailing_bits(bitstream *bs)
482 {
483     bitstream_put_ui(bs, 1, 1);
484     bitstream_byte_aligning(bs, 0);
485 }
486
487 static void nal_start_code_prefix(bitstream *bs)
488 {
489     bitstream_put_ui(bs, 0x00000001, 32);
490 }
491
492 static void nal_header(bitstream *bs, int nal_ref_idc, int nal_unit_type)
493 {
494     bitstream_put_ui(bs, 0, 1);                /* forbidden_zero_bit: 0 */
495     bitstream_put_ui(bs, nal_ref_idc, 2);
496     bitstream_put_ui(bs, nal_unit_type, 5);
497 }
498
499 void H264EncoderImpl::sps_rbsp(bitstream *bs)
500 {
501     int profile_idc = PROFILE_IDC_BASELINE;
502
503     if (h264_profile  == VAProfileH264High)
504         profile_idc = PROFILE_IDC_HIGH;
505     else if (h264_profile  == VAProfileH264Main)
506         profile_idc = PROFILE_IDC_MAIN;
507
508     bitstream_put_ui(bs, profile_idc, 8);               /* profile_idc */
509     bitstream_put_ui(bs, !!(constraint_set_flag & 1), 1);                         /* constraint_set0_flag */
510     bitstream_put_ui(bs, !!(constraint_set_flag & 2), 1);                         /* constraint_set1_flag */
511     bitstream_put_ui(bs, !!(constraint_set_flag & 4), 1);                         /* constraint_set2_flag */
512     bitstream_put_ui(bs, !!(constraint_set_flag & 8), 1);                         /* constraint_set3_flag */
513     bitstream_put_ui(bs, 0, 4);                         /* reserved_zero_4bits */
514     bitstream_put_ui(bs, seq_param.level_idc, 8);      /* level_idc */
515     bitstream_put_ue(bs, seq_param.seq_parameter_set_id);      /* seq_parameter_set_id */
516
517     if ( profile_idc == PROFILE_IDC_HIGH) {
518         bitstream_put_ue(bs, 1);        /* chroma_format_idc = 1, 4:2:0 */ 
519         bitstream_put_ue(bs, 0);        /* bit_depth_luma_minus8 */
520         bitstream_put_ue(bs, 0);        /* bit_depth_chroma_minus8 */
521         bitstream_put_ui(bs, 0, 1);     /* qpprime_y_zero_transform_bypass_flag */
522         bitstream_put_ui(bs, 0, 1);     /* seq_scaling_matrix_present_flag */
523     }
524
525     bitstream_put_ue(bs, seq_param.seq_fields.bits.log2_max_frame_num_minus4); /* log2_max_frame_num_minus4 */
526     bitstream_put_ue(bs, seq_param.seq_fields.bits.pic_order_cnt_type);        /* pic_order_cnt_type */
527
528     if (seq_param.seq_fields.bits.pic_order_cnt_type == 0)
529         bitstream_put_ue(bs, seq_param.seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4);     /* log2_max_pic_order_cnt_lsb_minus4 */
530     else {
531         assert(0);
532     }
533
534     bitstream_put_ue(bs, seq_param.max_num_ref_frames);        /* num_ref_frames */
535     bitstream_put_ui(bs, 0, 1);                                 /* gaps_in_frame_num_value_allowed_flag */
536
537     bitstream_put_ue(bs, seq_param.picture_width_in_mbs - 1);  /* pic_width_in_mbs_minus1 */
538     bitstream_put_ue(bs, seq_param.picture_height_in_mbs - 1); /* pic_height_in_map_units_minus1 */
539     bitstream_put_ui(bs, seq_param.seq_fields.bits.frame_mbs_only_flag, 1);    /* frame_mbs_only_flag */
540
541     if (!seq_param.seq_fields.bits.frame_mbs_only_flag) {
542         assert(0);
543     }
544
545     bitstream_put_ui(bs, seq_param.seq_fields.bits.direct_8x8_inference_flag, 1);      /* direct_8x8_inference_flag */
546     bitstream_put_ui(bs, seq_param.frame_cropping_flag, 1);            /* frame_cropping_flag */
547
548     if (seq_param.frame_cropping_flag) {
549         bitstream_put_ue(bs, seq_param.frame_crop_left_offset);        /* frame_crop_left_offset */
550         bitstream_put_ue(bs, seq_param.frame_crop_right_offset);       /* frame_crop_right_offset */
551         bitstream_put_ue(bs, seq_param.frame_crop_top_offset);         /* frame_crop_top_offset */
552         bitstream_put_ue(bs, seq_param.frame_crop_bottom_offset);      /* frame_crop_bottom_offset */
553     }
554     
555     //if ( frame_bit_rate < 0 ) { //TODO EW: the vui header isn't correct
556     if ( false ) {
557         bitstream_put_ui(bs, 0, 1); /* vui_parameters_present_flag */
558     } else {
559         bitstream_put_ui(bs, 1, 1); /* vui_parameters_present_flag */
560         bitstream_put_ui(bs, 0, 1); /* aspect_ratio_info_present_flag */
561         bitstream_put_ui(bs, 0, 1); /* overscan_info_present_flag */
562         bitstream_put_ui(bs, 1, 1); /* video_signal_type_present_flag */
563         {
564             bitstream_put_ui(bs, 5, 3);  /* video_format (5 = Unspecified) */
565             bitstream_put_ui(bs, 0, 1);  /* video_full_range_flag */
566             bitstream_put_ui(bs, 1, 1);  /* colour_description_present_flag */
567             {
568                 bitstream_put_ui(bs, 1, 8);  /* colour_primaries (1 = BT.709) */
569                 bitstream_put_ui(bs, 2, 8);  /* transfer_characteristics (2 = unspecified, since we use sRGB) */
570                 bitstream_put_ui(bs, 6, 8);  /* matrix_coefficients (6 = BT.601/SMPTE 170M) */
571             }
572         }
573         bitstream_put_ui(bs, 0, 1); /* chroma_loc_info_present_flag */
574         bitstream_put_ui(bs, 1, 1); /* timing_info_present_flag */
575         {
576             bitstream_put_ui(bs, 1, 32);  // FPS
577             bitstream_put_ui(bs, TIMEBASE * 2, 32);  // FPS
578             bitstream_put_ui(bs, 1, 1);
579         }
580         bitstream_put_ui(bs, 1, 1); /* nal_hrd_parameters_present_flag */
581         {
582             // hrd_parameters 
583             bitstream_put_ue(bs, 0);    /* cpb_cnt_minus1 */
584             bitstream_put_ui(bs, 4, 4); /* bit_rate_scale */
585             bitstream_put_ui(bs, 6, 4); /* cpb_size_scale */
586            
587             bitstream_put_ue(bs, frame_bitrate - 1); /* bit_rate_value_minus1[0] */
588             bitstream_put_ue(bs, frame_bitrate*8 - 1); /* cpb_size_value_minus1[0] */
589             bitstream_put_ui(bs, 1, 1);  /* cbr_flag[0] */
590
591             bitstream_put_ui(bs, 23, 5);   /* initial_cpb_removal_delay_length_minus1 */
592             bitstream_put_ui(bs, 23, 5);   /* cpb_removal_delay_length_minus1 */
593             bitstream_put_ui(bs, 23, 5);   /* dpb_output_delay_length_minus1 */
594             bitstream_put_ui(bs, 23, 5);   /* time_offset_length  */
595         }
596         bitstream_put_ui(bs, 0, 1);   /* vcl_hrd_parameters_present_flag */
597         bitstream_put_ui(bs, 0, 1);   /* low_delay_hrd_flag */ 
598
599         bitstream_put_ui(bs, 0, 1); /* pic_struct_present_flag */
600         bitstream_put_ui(bs, 0, 1); /* bitstream_restriction_flag */
601     }
602
603     rbsp_trailing_bits(bs);     /* rbsp_trailing_bits */
604 }
605
606
607 void H264EncoderImpl::pps_rbsp(bitstream *bs)
608 {
609     bitstream_put_ue(bs, pic_param.pic_parameter_set_id);      /* pic_parameter_set_id */
610     bitstream_put_ue(bs, pic_param.seq_parameter_set_id);      /* seq_parameter_set_id */
611
612     bitstream_put_ui(bs, pic_param.pic_fields.bits.entropy_coding_mode_flag, 1);  /* entropy_coding_mode_flag */
613
614     bitstream_put_ui(bs, 0, 1);                         /* pic_order_present_flag: 0 */
615
616     bitstream_put_ue(bs, 0);                            /* num_slice_groups_minus1 */
617
618     bitstream_put_ue(bs, pic_param.num_ref_idx_l0_active_minus1);      /* num_ref_idx_l0_active_minus1 */
619     bitstream_put_ue(bs, pic_param.num_ref_idx_l1_active_minus1);      /* num_ref_idx_l1_active_minus1 1 */
620
621     bitstream_put_ui(bs, pic_param.pic_fields.bits.weighted_pred_flag, 1);     /* weighted_pred_flag: 0 */
622     bitstream_put_ui(bs, pic_param.pic_fields.bits.weighted_bipred_idc, 2);     /* weighted_bipred_idc: 0 */
623
624     bitstream_put_se(bs, pic_param.pic_init_qp - 26);  /* pic_init_qp_minus26 */
625     bitstream_put_se(bs, 0);                            /* pic_init_qs_minus26 */
626     bitstream_put_se(bs, 0);                            /* chroma_qp_index_offset */
627
628     bitstream_put_ui(bs, pic_param.pic_fields.bits.deblocking_filter_control_present_flag, 1); /* deblocking_filter_control_present_flag */
629     bitstream_put_ui(bs, 0, 1);                         /* constrained_intra_pred_flag */
630     bitstream_put_ui(bs, 0, 1);                         /* redundant_pic_cnt_present_flag */
631     
632     /* more_rbsp_data */
633     bitstream_put_ui(bs, pic_param.pic_fields.bits.transform_8x8_mode_flag, 1);    /*transform_8x8_mode_flag */
634     bitstream_put_ui(bs, 0, 1);                         /* pic_scaling_matrix_present_flag */
635     bitstream_put_se(bs, pic_param.second_chroma_qp_index_offset );    /*second_chroma_qp_index_offset */
636
637     rbsp_trailing_bits(bs);
638 }
639
640 void H264EncoderImpl::slice_header(bitstream *bs)
641 {
642     int first_mb_in_slice = slice_param.macroblock_address;
643
644     bitstream_put_ue(bs, first_mb_in_slice);        /* first_mb_in_slice: 0 */
645     bitstream_put_ue(bs, slice_param.slice_type);   /* slice_type */
646     bitstream_put_ue(bs, slice_param.pic_parameter_set_id);        /* pic_parameter_set_id: 0 */
647     bitstream_put_ui(bs, pic_param.frame_num, seq_param.seq_fields.bits.log2_max_frame_num_minus4 + 4); /* frame_num */
648
649     /* frame_mbs_only_flag == 1 */
650     if (!seq_param.seq_fields.bits.frame_mbs_only_flag) {
651         /* FIXME: */
652         assert(0);
653     }
654
655     if (pic_param.pic_fields.bits.idr_pic_flag)
656         bitstream_put_ue(bs, slice_param.idr_pic_id);           /* idr_pic_id: 0 */
657
658     if (seq_param.seq_fields.bits.pic_order_cnt_type == 0) {
659         bitstream_put_ui(bs, pic_param.CurrPic.TopFieldOrderCnt, seq_param.seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4 + 4);
660         /* pic_order_present_flag == 0 */
661     } else {
662         /* FIXME: */
663         assert(0);
664     }
665
666     /* redundant_pic_cnt_present_flag == 0 */
667     /* slice type */
668     if (IS_P_SLICE(slice_param.slice_type)) {
669         bitstream_put_ui(bs, slice_param.num_ref_idx_active_override_flag, 1);            /* num_ref_idx_active_override_flag: */
670
671         if (slice_param.num_ref_idx_active_override_flag)
672             bitstream_put_ue(bs, slice_param.num_ref_idx_l0_active_minus1);
673
674         /* ref_pic_list_reordering */
675         bitstream_put_ui(bs, 0, 1);            /* ref_pic_list_reordering_flag_l0: 0 */
676     } else if (IS_B_SLICE(slice_param.slice_type)) {
677         bitstream_put_ui(bs, slice_param.direct_spatial_mv_pred_flag, 1);            /* direct_spatial_mv_pred: 1 */
678
679         bitstream_put_ui(bs, slice_param.num_ref_idx_active_override_flag, 1);       /* num_ref_idx_active_override_flag: */
680
681         if (slice_param.num_ref_idx_active_override_flag) {
682             bitstream_put_ue(bs, slice_param.num_ref_idx_l0_active_minus1);
683             bitstream_put_ue(bs, slice_param.num_ref_idx_l1_active_minus1);
684         }
685
686         /* ref_pic_list_reordering */
687         bitstream_put_ui(bs, 0, 1);            /* ref_pic_list_reordering_flag_l0: 0 */
688         bitstream_put_ui(bs, 0, 1);            /* ref_pic_list_reordering_flag_l1: 0 */
689     }
690
691     if ((pic_param.pic_fields.bits.weighted_pred_flag &&
692          IS_P_SLICE(slice_param.slice_type)) ||
693         ((pic_param.pic_fields.bits.weighted_bipred_idc == 1) &&
694          IS_B_SLICE(slice_param.slice_type))) {
695         /* FIXME: fill weight/offset table */
696         assert(0);
697     }
698
699     /* dec_ref_pic_marking */
700     if (pic_param.pic_fields.bits.reference_pic_flag) {     /* nal_ref_idc != 0 */
701         unsigned char no_output_of_prior_pics_flag = 0;
702         unsigned char long_term_reference_flag = 0;
703         unsigned char adaptive_ref_pic_marking_mode_flag = 0;
704
705         if (pic_param.pic_fields.bits.idr_pic_flag) {
706             bitstream_put_ui(bs, no_output_of_prior_pics_flag, 1);            /* no_output_of_prior_pics_flag: 0 */
707             bitstream_put_ui(bs, long_term_reference_flag, 1);            /* long_term_reference_flag: 0 */
708         } else {
709             bitstream_put_ui(bs, adaptive_ref_pic_marking_mode_flag, 1);            /* adaptive_ref_pic_marking_mode_flag: 0 */
710         }
711     }
712
713     if (pic_param.pic_fields.bits.entropy_coding_mode_flag &&
714         !IS_I_SLICE(slice_param.slice_type))
715         bitstream_put_ue(bs, slice_param.cabac_init_idc);               /* cabac_init_idc: 0 */
716
717     bitstream_put_se(bs, slice_param.slice_qp_delta);                   /* slice_qp_delta: 0 */
718
719     /* ignore for SP/SI */
720
721     if (pic_param.pic_fields.bits.deblocking_filter_control_present_flag) {
722         bitstream_put_ue(bs, slice_param.disable_deblocking_filter_idc);           /* disable_deblocking_filter_idc: 0 */
723
724         if (slice_param.disable_deblocking_filter_idc != 1) {
725             bitstream_put_se(bs, slice_param.slice_alpha_c0_offset_div2);          /* slice_alpha_c0_offset_div2: 2 */
726             bitstream_put_se(bs, slice_param.slice_beta_offset_div2);              /* slice_beta_offset_div2: 2 */
727         }
728     }
729
730     if (pic_param.pic_fields.bits.entropy_coding_mode_flag) {
731         bitstream_byte_aligning(bs, 1);
732     }
733 }
734
735 int H264EncoderImpl::build_packed_pic_buffer(unsigned char **header_buffer)
736 {
737     bitstream bs;
738
739     bitstream_start(&bs);
740     nal_start_code_prefix(&bs);
741     nal_header(&bs, NAL_REF_IDC_HIGH, NAL_PPS);
742     pps_rbsp(&bs);
743     bitstream_end(&bs);
744
745     *header_buffer = (unsigned char *)bs.buffer;
746     return bs.bit_offset;
747 }
748
749 int
750 H264EncoderImpl::build_packed_seq_buffer(unsigned char **header_buffer)
751 {
752     bitstream bs;
753
754     bitstream_start(&bs);
755     nal_start_code_prefix(&bs);
756     nal_header(&bs, NAL_REF_IDC_HIGH, NAL_SPS);
757     sps_rbsp(&bs);
758     bitstream_end(&bs);
759
760     *header_buffer = (unsigned char *)bs.buffer;
761     return bs.bit_offset;
762 }
763
764 int H264EncoderImpl::build_packed_slice_buffer(unsigned char **header_buffer)
765 {
766     bitstream bs;
767     int is_idr = !!pic_param.pic_fields.bits.idr_pic_flag;
768     int is_ref = !!pic_param.pic_fields.bits.reference_pic_flag;
769
770     bitstream_start(&bs);
771     nal_start_code_prefix(&bs);
772
773     if (IS_I_SLICE(slice_param.slice_type)) {
774         nal_header(&bs, NAL_REF_IDC_HIGH, is_idr ? NAL_IDR : NAL_NON_IDR);
775     } else if (IS_P_SLICE(slice_param.slice_type)) {
776         nal_header(&bs, NAL_REF_IDC_MEDIUM, NAL_NON_IDR);
777     } else {
778         assert(IS_B_SLICE(slice_param.slice_type));
779         nal_header(&bs, is_ref ? NAL_REF_IDC_LOW : NAL_REF_IDC_NONE, NAL_NON_IDR);
780     }
781
782     slice_header(&bs);
783     bitstream_end(&bs);
784
785     *header_buffer = (unsigned char *)bs.buffer;
786     return bs.bit_offset;
787 }
788
789
790 /*
791   Assume frame sequence is: Frame#0, #1, #2, ..., #M, ..., #X, ... (encoding order)
792   1) period between Frame #X and Frame #N = #X - #N
793   2) 0 means infinite for intra_period/intra_idr_period, and 0 is invalid for ip_period
794   3) intra_idr_period % intra_period (intra_period > 0) and intra_period % ip_period must be 0
795   4) intra_period and intra_idr_period take precedence over ip_period
796   5) if ip_period > 1, intra_period and intra_idr_period are not  the strict periods 
797      of I/IDR frames, see bellow examples
798   -------------------------------------------------------------------
799   intra_period intra_idr_period ip_period frame sequence (intra_period/intra_idr_period/ip_period)
800   0            ignored          1          IDRPPPPPPP ...     (No IDR/I any more)
801   0            ignored        >=2          IDR(PBB)(PBB)...   (No IDR/I any more)
802   1            0                ignored    IDRIIIIIII...      (No IDR any more)
803   1            1                ignored    IDR IDR IDR IDR...
804   1            >=2              ignored    IDRII IDRII IDR... (1/3/ignore)
805   >=2          0                1          IDRPPP IPPP I...   (3/0/1)
806   >=2          0              >=2          IDR(PBB)(PBB)(IBB) (6/0/3)
807                                               (PBB)(IBB)(PBB)(IBB)... 
808   >=2          >=2              1          IDRPPPPP IPPPPP IPPPPP (6/18/1)
809                                            IDRPPPPP IPPPPP IPPPPP...
810   >=2          >=2              >=2        {IDR(PBB)(PBB)(IBB)(PBB)(IBB)(PBB)} (6/18/3)
811                                            {IDR(PBB)(PBB)(IBB)(PBB)(IBB)(PBB)}...
812                                            {IDR(PBB)(PBB)(IBB)(PBB)}           (6/12/3)
813                                            {IDR(PBB)(PBB)(IBB)(PBB)}...
814                                            {IDR(PBB)(PBB)}                     (6/6/3)
815                                            {IDR(PBB)(PBB)}.
816 */
817
818 // General pts/dts strategy:
819 //
820 // Getting pts and dts right with variable frame rate (VFR) and B-frames can be a
821 // bit tricky. We assume first of all that the frame rate never goes _above_
822 // MAX_FPS, which gives us a frame period N. The decoder can always decode
823 // in at least this speed, as long at dts <= pts (the frame is not attempted
824 // presented before it is decoded). Furthermore, we never have longer chains of
825 // B-frames than a fixed constant C. (In a B-frame chain, we say that the base
826 // I/P-frame has order O=0, the B-frame depending on it directly has order O=1,
827 // etc. The last frame in the chain, which no B-frames depend on, is the “tip”
828 // frame, with an order O <= C.)
829 //
830 // Many strategies are possible, but we establish these rules:
831 //
832 //  - Tip frames have dts = pts - (C-O)*N.
833 //  - Non-tip frames have dts = dts_last + N.
834 //
835 // An example, with C=2 and N=10 and the data flow showed with arrows:
836 //
837 //        I  B  P  B  B  P
838 //   pts: 30 40 50 60 70 80
839 //        ↓  ↓     ↓
840 //   dts: 10 30 20 60 50←40
841 //         |  |  ↑        ↑
842 //         `--|--'        |
843 //             `----------'
844 //
845 // To show that this works fine also with irregular spacings, let's say that
846 // the third frame is delayed a bit (something earlier was dropped). Now the
847 // situation looks like this:
848 //
849 //        I  B  P  B  B   P
850 //   pts: 30 40 80 90 100 110
851 //        ↓  ↓     ↓
852 //   dts: 10 30 20 90 50←40
853 //         |  |  ↑        ↑
854 //         `--|--'        |
855 //             `----------'
856 //
857 // The resetting on every tip frame makes sure dts never ends up lagging a lot
858 // behind pts, and the subtraction of (C-O)*N makes sure pts <= dts.
859 //
860 // In the output of this function, if <dts_lag> is >= 0, it means to reset the
861 // dts from the current pts minus <dts_lag>, while if it's -1, the frame is not
862 // a tip frame and should be given a dts based on the previous one.
863 #define FRAME_P 0
864 #define FRAME_B 1
865 #define FRAME_I 2
866 #define FRAME_IDR 7
867 void encoding2display_order(
868     int encoding_order, int intra_period,
869     int intra_idr_period, int ip_period,
870     int *displaying_order,
871     int *frame_type, int *pts_lag)
872 {
873     int encoding_order_gop = 0;
874
875     *pts_lag = 0;
876
877     if (intra_period == 1) { /* all are I/IDR frames */
878         *displaying_order = encoding_order;
879         if (intra_idr_period == 0)
880             *frame_type = (encoding_order == 0)?FRAME_IDR:FRAME_I;
881         else
882             *frame_type = (encoding_order % intra_idr_period == 0)?FRAME_IDR:FRAME_I;
883         return;
884     }
885
886     if (intra_period == 0)
887         intra_idr_period = 0;
888
889     if (ip_period == 1) {
890         // No B-frames, sequence is like IDR PPPPP IPPPPP.
891         encoding_order_gop = (intra_idr_period == 0) ? encoding_order : (encoding_order % intra_idr_period);
892         *displaying_order = encoding_order;
893
894         if (encoding_order_gop == 0) { /* the first frame */
895             *frame_type = FRAME_IDR;
896         } else if (intra_period != 0 && /* have I frames */
897                    encoding_order_gop >= 2 &&
898                    (encoding_order_gop % intra_period == 0)) {
899             *frame_type = FRAME_I;
900         } else {
901             *frame_type = FRAME_P;
902         }
903         return;
904     } 
905
906     // We have B-frames. Sequence is like IDR (PBB)(PBB)(IBB)(PBB).
907     encoding_order_gop = (intra_idr_period == 0) ? encoding_order : (encoding_order % (intra_idr_period + 1));
908     *pts_lag = -1;  // Most frames are not tip frames.
909          
910     if (encoding_order_gop == 0) { /* the first frame */
911         *frame_type = FRAME_IDR;
912         *displaying_order = encoding_order;
913         // IDR frames are a special case; I honestly can't find the logic behind
914         // why this is the right thing, but it seems to line up nicely in practice :-)
915         *pts_lag = TIMEBASE / MAX_FPS;
916     } else if (((encoding_order_gop - 1) % ip_period) != 0) { /* B frames */
917         *frame_type = FRAME_B;
918         *displaying_order = encoding_order - 1;
919         if ((encoding_order_gop % ip_period) == 0) {
920             *pts_lag = 0;  // Last B-frame.
921         }
922     } else if (intra_period != 0 && /* have I frames */
923                encoding_order_gop >= 2 &&
924                ((encoding_order_gop - 1) / ip_period % (intra_period / ip_period)) == 0) {
925         *frame_type = FRAME_I;
926         *displaying_order = encoding_order + ip_period - 1;
927     } else {
928         *frame_type = FRAME_P;
929         *displaying_order = encoding_order + ip_period - 1;
930     }
931 }
932
933
934 static const char *rc_to_string(int rc_mode)
935 {
936     switch (rc_mode) {
937     case VA_RC_NONE:
938         return "NONE";
939     case VA_RC_CBR:
940         return "CBR";
941     case VA_RC_VBR:
942         return "VBR";
943     case VA_RC_VCM:
944         return "VCM";
945     case VA_RC_CQP:
946         return "CQP";
947     case VA_RC_VBR_CONSTRAINED:
948         return "VBR_CONSTRAINED";
949     default:
950         return "Unknown";
951     }
952 }
953
954 void H264EncoderImpl::enable_zerocopy_if_possible()
955 {
956         if (global_flags.uncompressed_video_to_http) {
957                 fprintf(stderr, "Disabling zerocopy H.264 encoding due to --uncompressed_video_to_http.\n");
958                 use_zerocopy = false;
959         } else {
960                 use_zerocopy = true;
961         }
962 }
963
964 VADisplay H264EncoderImpl::va_open_display(const string &va_display)
965 {
966         if (va_display.empty()) {
967                 x11_display = XOpenDisplay(NULL);
968                 if (!x11_display) {
969                         fprintf(stderr, "error: can't connect to X server!\n");
970                         return NULL;
971                 }
972                 enable_zerocopy_if_possible();
973                 return vaGetDisplay(x11_display);
974         } else if (va_display[0] != '/') {
975                 x11_display = XOpenDisplay(va_display.c_str());
976                 if (!x11_display) {
977                         fprintf(stderr, "error: can't connect to X server!\n");
978                         return NULL;
979                 }
980                 enable_zerocopy_if_possible();
981                 return vaGetDisplay(x11_display);
982         } else {
983                 drm_fd = open(va_display.c_str(), O_RDWR);
984                 if (drm_fd == -1) {
985                         perror(va_display.c_str());
986                         return NULL;
987                 }
988                 use_zerocopy = false;
989                 return vaGetDisplayDRM(drm_fd);
990         }
991 }
992
993 void H264EncoderImpl::va_close_display(VADisplay va_dpy)
994 {
995         if (x11_display) {
996                 XCloseDisplay(x11_display);
997                 x11_display = nullptr;
998         }
999         if (drm_fd != -1) {
1000                 close(drm_fd);
1001         }
1002 }
1003
1004 int H264EncoderImpl::init_va(const string &va_display)
1005 {
1006     VAProfile profile_list[]={VAProfileH264High, VAProfileH264Main, VAProfileH264Baseline, VAProfileH264ConstrainedBaseline};
1007     VAEntrypoint *entrypoints;
1008     int num_entrypoints, slice_entrypoint;
1009     int support_encode = 0;    
1010     int major_ver, minor_ver;
1011     VAStatus va_status;
1012     unsigned int i;
1013
1014     va_dpy = va_open_display(va_display);
1015     va_status = vaInitialize(va_dpy, &major_ver, &minor_ver);
1016     CHECK_VASTATUS(va_status, "vaInitialize");
1017
1018     num_entrypoints = vaMaxNumEntrypoints(va_dpy);
1019     entrypoints = (VAEntrypoint *)malloc(num_entrypoints * sizeof(*entrypoints));
1020     if (!entrypoints) {
1021         fprintf(stderr, "error: failed to initialize VA entrypoints array\n");
1022         exit(1);
1023     }
1024
1025     /* use the highest profile */
1026     for (i = 0; i < sizeof(profile_list)/sizeof(profile_list[0]); i++) {
1027         if ((h264_profile != ~0) && h264_profile != profile_list[i])
1028             continue;
1029         
1030         h264_profile = profile_list[i];
1031         vaQueryConfigEntrypoints(va_dpy, h264_profile, entrypoints, &num_entrypoints);
1032         for (slice_entrypoint = 0; slice_entrypoint < num_entrypoints; slice_entrypoint++) {
1033             if (entrypoints[slice_entrypoint] == VAEntrypointEncSlice) {
1034                 support_encode = 1;
1035                 break;
1036             }
1037         }
1038         if (support_encode == 1)
1039             break;
1040     }
1041     
1042     if (support_encode == 0) {
1043         printf("Can't find VAEntrypointEncSlice for H264 profiles. If you are using a non-Intel GPU\n");
1044         printf("but have one in your system, try launching Nageru with --va-display /dev/dri/renderD128\n");
1045         printf("to use VA-API against DRM instead of X11.\n");
1046         exit(1);
1047     } else {
1048         switch (h264_profile) {
1049             case VAProfileH264Baseline:
1050                 ip_period = 1;
1051                 constraint_set_flag |= (1 << 0); /* Annex A.2.1 */
1052                 h264_entropy_mode = 0;
1053                 break;
1054             case VAProfileH264ConstrainedBaseline:
1055                 constraint_set_flag |= (1 << 0 | 1 << 1); /* Annex A.2.2 */
1056                 ip_period = 1;
1057                 break;
1058
1059             case VAProfileH264Main:
1060                 constraint_set_flag |= (1 << 1); /* Annex A.2.2 */
1061                 break;
1062
1063             case VAProfileH264High:
1064                 constraint_set_flag |= (1 << 3); /* Annex A.2.4 */
1065                 break;
1066             default:
1067                 h264_profile = VAProfileH264Baseline;
1068                 ip_period = 1;
1069                 constraint_set_flag |= (1 << 0); /* Annex A.2.1 */
1070                 break;
1071         }
1072     }
1073
1074     VAConfigAttrib attrib[VAConfigAttribTypeMax];
1075
1076     /* find out the format for the render target, and rate control mode */
1077     for (i = 0; i < VAConfigAttribTypeMax; i++)
1078         attrib[i].type = (VAConfigAttribType)i;
1079
1080     va_status = vaGetConfigAttributes(va_dpy, h264_profile, VAEntrypointEncSlice,
1081                                       &attrib[0], VAConfigAttribTypeMax);
1082     CHECK_VASTATUS(va_status, "vaGetConfigAttributes");
1083     /* check the interested configattrib */
1084     if ((attrib[VAConfigAttribRTFormat].value & VA_RT_FORMAT_YUV420) == 0) {
1085         printf("Not find desired YUV420 RT format\n");
1086         exit(1);
1087     } else {
1088         config_attrib[config_attrib_num].type = VAConfigAttribRTFormat;
1089         config_attrib[config_attrib_num].value = VA_RT_FORMAT_YUV420;
1090         config_attrib_num++;
1091     }
1092     
1093     if (attrib[VAConfigAttribRateControl].value != VA_ATTRIB_NOT_SUPPORTED) {
1094         int tmp = attrib[VAConfigAttribRateControl].value;
1095
1096         if (rc_mode == -1 || !(rc_mode & tmp))  {
1097             if (rc_mode != -1) {
1098                 printf("Warning: Don't support the specified RateControl mode: %s!!!, switch to ", rc_to_string(rc_mode));
1099             }
1100
1101             for (i = 0; i < sizeof(rc_default_modes) / sizeof(rc_default_modes[0]); i++) {
1102                 if (rc_default_modes[i] & tmp) {
1103                     rc_mode = rc_default_modes[i];
1104                     break;
1105                 }
1106             }
1107         }
1108
1109         config_attrib[config_attrib_num].type = VAConfigAttribRateControl;
1110         config_attrib[config_attrib_num].value = rc_mode;
1111         config_attrib_num++;
1112     }
1113     
1114
1115     if (attrib[VAConfigAttribEncPackedHeaders].value != VA_ATTRIB_NOT_SUPPORTED) {
1116         int tmp = attrib[VAConfigAttribEncPackedHeaders].value;
1117
1118         h264_packedheader = 1;
1119         config_attrib[config_attrib_num].type = VAConfigAttribEncPackedHeaders;
1120         config_attrib[config_attrib_num].value = VA_ENC_PACKED_HEADER_NONE;
1121         
1122         if (tmp & VA_ENC_PACKED_HEADER_SEQUENCE) {
1123             config_attrib[config_attrib_num].value |= VA_ENC_PACKED_HEADER_SEQUENCE;
1124         }
1125         
1126         if (tmp & VA_ENC_PACKED_HEADER_PICTURE) {
1127             config_attrib[config_attrib_num].value |= VA_ENC_PACKED_HEADER_PICTURE;
1128         }
1129         
1130         if (tmp & VA_ENC_PACKED_HEADER_SLICE) {
1131             config_attrib[config_attrib_num].value |= VA_ENC_PACKED_HEADER_SLICE;
1132         }
1133         
1134         if (tmp & VA_ENC_PACKED_HEADER_MISC) {
1135             config_attrib[config_attrib_num].value |= VA_ENC_PACKED_HEADER_MISC;
1136         }
1137         
1138         enc_packed_header_idx = config_attrib_num;
1139         config_attrib_num++;
1140     }
1141
1142     if (attrib[VAConfigAttribEncInterlaced].value != VA_ATTRIB_NOT_SUPPORTED) {
1143         config_attrib[config_attrib_num].type = VAConfigAttribEncInterlaced;
1144         config_attrib[config_attrib_num].value = VA_ENC_PACKED_HEADER_NONE;
1145         config_attrib_num++;
1146     }
1147     
1148     if (attrib[VAConfigAttribEncMaxRefFrames].value != VA_ATTRIB_NOT_SUPPORTED) {
1149         h264_maxref = attrib[VAConfigAttribEncMaxRefFrames].value;
1150     }
1151
1152     free(entrypoints);
1153     return 0;
1154 }
1155
1156 int H264EncoderImpl::setup_encode()
1157 {
1158     VAStatus va_status;
1159     VASurfaceID *tmp_surfaceid;
1160     int codedbuf_size, i;
1161     static VASurfaceID src_surface[SURFACE_NUM];
1162     static VASurfaceID ref_surface[SURFACE_NUM];
1163     
1164     va_status = vaCreateConfig(va_dpy, h264_profile, VAEntrypointEncSlice,
1165             &config_attrib[0], config_attrib_num, &config_id);
1166     CHECK_VASTATUS(va_status, "vaCreateConfig");
1167
1168     /* create source surfaces */
1169     va_status = vaCreateSurfaces(va_dpy,
1170                                  VA_RT_FORMAT_YUV420, frame_width_mbaligned, frame_height_mbaligned,
1171                                  &src_surface[0], SURFACE_NUM,
1172                                  NULL, 0);
1173     CHECK_VASTATUS(va_status, "vaCreateSurfaces");
1174
1175     /* create reference surfaces */
1176     va_status = vaCreateSurfaces(va_dpy,
1177                                  VA_RT_FORMAT_YUV420, frame_width_mbaligned, frame_height_mbaligned,
1178                                  &ref_surface[0], SURFACE_NUM,
1179                                  NULL, 0);
1180     CHECK_VASTATUS(va_status, "vaCreateSurfaces");
1181
1182     tmp_surfaceid = (VASurfaceID *)calloc(2 * SURFACE_NUM, sizeof(VASurfaceID));
1183     memcpy(tmp_surfaceid, src_surface, SURFACE_NUM * sizeof(VASurfaceID));
1184     memcpy(tmp_surfaceid + SURFACE_NUM, ref_surface, SURFACE_NUM * sizeof(VASurfaceID));
1185     
1186     /* Create a context for this encode pipe */
1187     va_status = vaCreateContext(va_dpy, config_id,
1188                                 frame_width_mbaligned, frame_height_mbaligned,
1189                                 VA_PROGRESSIVE,
1190                                 tmp_surfaceid, 2 * SURFACE_NUM,
1191                                 &context_id);
1192     CHECK_VASTATUS(va_status, "vaCreateContext");
1193     free(tmp_surfaceid);
1194
1195     codedbuf_size = (frame_width_mbaligned * frame_height_mbaligned * 400) / (16*16);
1196
1197     for (i = 0; i < SURFACE_NUM; i++) {
1198         /* create coded buffer once for all
1199          * other VA buffers which won't be used again after vaRenderPicture.
1200          * so APP can always vaCreateBuffer for every frame
1201          * but coded buffer need to be mapped and accessed after vaRenderPicture/vaEndPicture
1202          * so VA won't maintain the coded buffer
1203          */
1204         va_status = vaCreateBuffer(va_dpy, context_id, VAEncCodedBufferType,
1205                 codedbuf_size, 1, NULL, &gl_surfaces[i].coded_buf);
1206         CHECK_VASTATUS(va_status, "vaCreateBuffer");
1207     }
1208
1209     /* create OpenGL objects */
1210     //glGenFramebuffers(SURFACE_NUM, fbos);
1211     
1212     for (i = 0; i < SURFACE_NUM; i++) {
1213         glGenTextures(1, &gl_surfaces[i].y_tex);
1214         glGenTextures(1, &gl_surfaces[i].cbcr_tex);
1215
1216         if (!use_zerocopy) {
1217             // Create Y image.
1218             glBindTexture(GL_TEXTURE_2D, gl_surfaces[i].y_tex);
1219             glTexStorage2D(GL_TEXTURE_2D, 1, GL_R8, frame_width, frame_height);
1220
1221             // Create CbCr image.
1222             glBindTexture(GL_TEXTURE_2D, gl_surfaces[i].cbcr_tex);
1223             glTexStorage2D(GL_TEXTURE_2D, 1, GL_RG8, frame_width / 2, frame_height / 2);
1224
1225             // Generate a PBO to read into. It doesn't necessarily fit 1:1 with the VA-API
1226             // buffers, due to potentially differing pitch.
1227             glGenBuffers(1, &gl_surfaces[i].pbo);
1228             glBindBuffer(GL_PIXEL_PACK_BUFFER, gl_surfaces[i].pbo);
1229             glBufferStorage(GL_PIXEL_PACK_BUFFER, frame_width * frame_height * 2, nullptr, GL_MAP_READ_BIT | GL_MAP_WRITE_BIT | GL_MAP_PERSISTENT_BIT);
1230             uint8_t *ptr = (uint8_t *)glMapBufferRange(GL_PIXEL_PACK_BUFFER, 0, frame_width * frame_height * 2, GL_MAP_READ_BIT | GL_MAP_PERSISTENT_BIT);
1231             gl_surfaces[i].y_offset = 0;
1232             gl_surfaces[i].cbcr_offset = frame_width * frame_height;
1233             gl_surfaces[i].y_ptr = ptr + gl_surfaces[i].y_offset;
1234             gl_surfaces[i].cbcr_ptr = ptr + gl_surfaces[i].cbcr_offset;
1235             glBindBuffer(GL_PIXEL_PACK_BUFFER, 0);
1236         }
1237     }
1238
1239     for (i = 0; i < SURFACE_NUM; i++) {
1240         gl_surfaces[i].src_surface = src_surface[i];
1241         gl_surfaces[i].ref_surface = ref_surface[i];
1242     }
1243     
1244     return 0;
1245 }
1246
1247 // Given a list like 1 9 3 0 2 8 4 and a pivot element 3, will produce
1248 //
1249 //   2 1 0 [3] 4 8 9
1250 template<class T, class C>
1251 static void sort_two(T *begin, T *end, const T &pivot, const C &less_than)
1252 {
1253         T *middle = partition(begin, end, [&](const T &elem) { return less_than(elem, pivot); });
1254         sort(begin, middle, [&](const T &a, const T &b) { return less_than(b, a); });
1255         sort(middle, end, less_than);
1256 }
1257
1258 void H264EncoderImpl::update_ReferenceFrames(int frame_type)
1259 {
1260     int i;
1261     
1262     if (frame_type == FRAME_B)
1263         return;
1264
1265     CurrentCurrPic.flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE;
1266     numShortTerm++;
1267     if (numShortTerm > num_ref_frames)
1268         numShortTerm = num_ref_frames;
1269     for (i=numShortTerm-1; i>0; i--)
1270         ReferenceFrames[i] = ReferenceFrames[i-1];
1271     ReferenceFrames[0] = CurrentCurrPic;
1272     
1273     current_frame_num++;
1274     if (current_frame_num > MaxFrameNum)
1275         current_frame_num = 0;
1276 }
1277
1278
1279 int H264EncoderImpl::update_RefPicList(int frame_type)
1280 {
1281     const auto descending_by_frame_idx = [](const VAPictureH264 &a, const VAPictureH264 &b) {
1282         return a.frame_idx > b.frame_idx;
1283     };
1284     const auto ascending_by_top_field_order_cnt = [](const VAPictureH264 &a, const VAPictureH264 &b) {
1285         return a.TopFieldOrderCnt < b.TopFieldOrderCnt;
1286     };
1287     const auto descending_by_top_field_order_cnt = [](const VAPictureH264 &a, const VAPictureH264 &b) {
1288         return a.TopFieldOrderCnt > b.TopFieldOrderCnt;
1289     };
1290     
1291     if (frame_type == FRAME_P) {
1292         memcpy(RefPicList0_P, ReferenceFrames, numShortTerm * sizeof(VAPictureH264));
1293         sort(&RefPicList0_P[0], &RefPicList0_P[numShortTerm], descending_by_frame_idx);
1294     } else if (frame_type == FRAME_B) {
1295         memcpy(RefPicList0_B, ReferenceFrames, numShortTerm * sizeof(VAPictureH264));
1296         sort_two(&RefPicList0_B[0], &RefPicList0_B[numShortTerm], CurrentCurrPic, ascending_by_top_field_order_cnt);
1297
1298         memcpy(RefPicList1_B, ReferenceFrames, numShortTerm * sizeof(VAPictureH264));
1299         sort_two(&RefPicList1_B[0], &RefPicList1_B[numShortTerm], CurrentCurrPic, descending_by_top_field_order_cnt);
1300     }
1301     
1302     return 0;
1303 }
1304
1305
1306 int H264EncoderImpl::render_sequence()
1307 {
1308     VABufferID seq_param_buf, rc_param_buf, render_id[2];
1309     VAStatus va_status;
1310     VAEncMiscParameterBuffer *misc_param;
1311     VAEncMiscParameterRateControl *misc_rate_ctrl;
1312     
1313     seq_param.level_idc = 41 /*SH_LEVEL_3*/;
1314     seq_param.picture_width_in_mbs = frame_width_mbaligned / 16;
1315     seq_param.picture_height_in_mbs = frame_height_mbaligned / 16;
1316     seq_param.bits_per_second = frame_bitrate;
1317
1318     seq_param.intra_period = intra_period;
1319     seq_param.intra_idr_period = intra_idr_period;
1320     seq_param.ip_period = ip_period;
1321
1322     seq_param.max_num_ref_frames = num_ref_frames;
1323     seq_param.seq_fields.bits.frame_mbs_only_flag = 1;
1324     seq_param.time_scale = TIMEBASE * 2;
1325     seq_param.num_units_in_tick = 1; /* Tc = num_units_in_tick / scale */
1326     seq_param.seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4 = Log2MaxPicOrderCntLsb - 4;
1327     seq_param.seq_fields.bits.log2_max_frame_num_minus4 = Log2MaxFrameNum - 4;;
1328     seq_param.seq_fields.bits.frame_mbs_only_flag = 1;
1329     seq_param.seq_fields.bits.chroma_format_idc = 1;
1330     seq_param.seq_fields.bits.direct_8x8_inference_flag = 1;
1331     
1332     if (frame_width != frame_width_mbaligned ||
1333         frame_height != frame_height_mbaligned) {
1334         seq_param.frame_cropping_flag = 1;
1335         seq_param.frame_crop_left_offset = 0;
1336         seq_param.frame_crop_right_offset = (frame_width_mbaligned - frame_width)/2;
1337         seq_param.frame_crop_top_offset = 0;
1338         seq_param.frame_crop_bottom_offset = (frame_height_mbaligned - frame_height)/2;
1339     }
1340     
1341     va_status = vaCreateBuffer(va_dpy, context_id,
1342                                VAEncSequenceParameterBufferType,
1343                                sizeof(seq_param), 1, &seq_param, &seq_param_buf);
1344     CHECK_VASTATUS(va_status, "vaCreateBuffer");
1345     
1346     va_status = vaCreateBuffer(va_dpy, context_id,
1347                                VAEncMiscParameterBufferType,
1348                                sizeof(VAEncMiscParameterBuffer) + sizeof(VAEncMiscParameterRateControl),
1349                                1, NULL, &rc_param_buf);
1350     CHECK_VASTATUS(va_status, "vaCreateBuffer");
1351     
1352     vaMapBuffer(va_dpy, rc_param_buf, (void **)&misc_param);
1353     misc_param->type = VAEncMiscParameterTypeRateControl;
1354     misc_rate_ctrl = (VAEncMiscParameterRateControl *)misc_param->data;
1355     memset(misc_rate_ctrl, 0, sizeof(*misc_rate_ctrl));
1356     misc_rate_ctrl->bits_per_second = frame_bitrate;
1357     misc_rate_ctrl->target_percentage = 66;
1358     misc_rate_ctrl->window_size = 1000;
1359     misc_rate_ctrl->initial_qp = initial_qp;
1360     misc_rate_ctrl->min_qp = minimal_qp;
1361     misc_rate_ctrl->basic_unit_size = 0;
1362     vaUnmapBuffer(va_dpy, rc_param_buf);
1363
1364     render_id[0] = seq_param_buf;
1365     render_id[1] = rc_param_buf;
1366     
1367     render_picture_and_delete(va_dpy, context_id, &render_id[0], 2);
1368     
1369     return 0;
1370 }
1371
1372 static int calc_poc(int pic_order_cnt_lsb, int frame_type)
1373 {
1374     static int PicOrderCntMsb_ref = 0, pic_order_cnt_lsb_ref = 0;
1375     int prevPicOrderCntMsb, prevPicOrderCntLsb;
1376     int PicOrderCntMsb, TopFieldOrderCnt;
1377     
1378     if (frame_type == FRAME_IDR)
1379         prevPicOrderCntMsb = prevPicOrderCntLsb = 0;
1380     else {
1381         prevPicOrderCntMsb = PicOrderCntMsb_ref;
1382         prevPicOrderCntLsb = pic_order_cnt_lsb_ref;
1383     }
1384     
1385     if ((pic_order_cnt_lsb < prevPicOrderCntLsb) &&
1386         ((prevPicOrderCntLsb - pic_order_cnt_lsb) >= (int)(MaxPicOrderCntLsb / 2)))
1387         PicOrderCntMsb = prevPicOrderCntMsb + MaxPicOrderCntLsb;
1388     else if ((pic_order_cnt_lsb > prevPicOrderCntLsb) &&
1389              ((pic_order_cnt_lsb - prevPicOrderCntLsb) > (int)(MaxPicOrderCntLsb / 2)))
1390         PicOrderCntMsb = prevPicOrderCntMsb - MaxPicOrderCntLsb;
1391     else
1392         PicOrderCntMsb = prevPicOrderCntMsb;
1393     
1394     TopFieldOrderCnt = PicOrderCntMsb + pic_order_cnt_lsb;
1395
1396     if (frame_type != FRAME_B) {
1397         PicOrderCntMsb_ref = PicOrderCntMsb;
1398         pic_order_cnt_lsb_ref = pic_order_cnt_lsb;
1399     }
1400     
1401     return TopFieldOrderCnt;
1402 }
1403
1404 int H264EncoderImpl::render_picture(int frame_type, int display_frame_num, int gop_start_display_frame_num)
1405 {
1406     VABufferID pic_param_buf;
1407     VAStatus va_status;
1408     int i = 0;
1409
1410     pic_param.CurrPic.picture_id = gl_surfaces[display_frame_num % SURFACE_NUM].ref_surface;
1411     pic_param.CurrPic.frame_idx = current_frame_num;
1412     pic_param.CurrPic.flags = 0;
1413     pic_param.CurrPic.TopFieldOrderCnt = calc_poc((display_frame_num - gop_start_display_frame_num) % MaxPicOrderCntLsb, frame_type);
1414     pic_param.CurrPic.BottomFieldOrderCnt = pic_param.CurrPic.TopFieldOrderCnt;
1415     CurrentCurrPic = pic_param.CurrPic;
1416
1417     memcpy(pic_param.ReferenceFrames, ReferenceFrames, numShortTerm*sizeof(VAPictureH264));
1418     for (i = numShortTerm; i < MAX_NUM_REF1; i++) {
1419         pic_param.ReferenceFrames[i].picture_id = VA_INVALID_SURFACE;
1420         pic_param.ReferenceFrames[i].flags = VA_PICTURE_H264_INVALID;
1421     }
1422     
1423     pic_param.pic_fields.bits.idr_pic_flag = (frame_type == FRAME_IDR);
1424     pic_param.pic_fields.bits.reference_pic_flag = (frame_type != FRAME_B);
1425     pic_param.pic_fields.bits.entropy_coding_mode_flag = h264_entropy_mode;
1426     pic_param.pic_fields.bits.deblocking_filter_control_present_flag = 1;
1427     pic_param.frame_num = current_frame_num;
1428     pic_param.coded_buf = gl_surfaces[display_frame_num % SURFACE_NUM].coded_buf;
1429     pic_param.last_picture = false;  // FIXME
1430     pic_param.pic_init_qp = initial_qp;
1431
1432     va_status = vaCreateBuffer(va_dpy, context_id, VAEncPictureParameterBufferType,
1433                                sizeof(pic_param), 1, &pic_param, &pic_param_buf);
1434     CHECK_VASTATUS(va_status, "vaCreateBuffer");
1435
1436     render_picture_and_delete(va_dpy, context_id, &pic_param_buf, 1);
1437
1438     return 0;
1439 }
1440
1441 int H264EncoderImpl::render_packedsequence()
1442 {
1443     VAEncPackedHeaderParameterBuffer packedheader_param_buffer;
1444     VABufferID packedseq_para_bufid, packedseq_data_bufid, render_id[2];
1445     unsigned int length_in_bits;
1446     unsigned char *packedseq_buffer = NULL;
1447     VAStatus va_status;
1448
1449     length_in_bits = build_packed_seq_buffer(&packedseq_buffer); 
1450     
1451     packedheader_param_buffer.type = VAEncPackedHeaderSequence;
1452     
1453     packedheader_param_buffer.bit_length = length_in_bits; /*length_in_bits*/
1454     packedheader_param_buffer.has_emulation_bytes = 0;
1455     va_status = vaCreateBuffer(va_dpy,
1456                                context_id,
1457                                VAEncPackedHeaderParameterBufferType,
1458                                sizeof(packedheader_param_buffer), 1, &packedheader_param_buffer,
1459                                &packedseq_para_bufid);
1460     CHECK_VASTATUS(va_status, "vaCreateBuffer");
1461
1462     va_status = vaCreateBuffer(va_dpy,
1463                                context_id,
1464                                VAEncPackedHeaderDataBufferType,
1465                                (length_in_bits + 7) / 8, 1, packedseq_buffer,
1466                                &packedseq_data_bufid);
1467     CHECK_VASTATUS(va_status, "vaCreateBuffer");
1468
1469     render_id[0] = packedseq_para_bufid;
1470     render_id[1] = packedseq_data_bufid;
1471     render_picture_and_delete(va_dpy, context_id, render_id, 2);
1472
1473     free(packedseq_buffer);
1474     
1475     return 0;
1476 }
1477
1478
1479 int H264EncoderImpl::render_packedpicture()
1480 {
1481     VAEncPackedHeaderParameterBuffer packedheader_param_buffer;
1482     VABufferID packedpic_para_bufid, packedpic_data_bufid, render_id[2];
1483     unsigned int length_in_bits;
1484     unsigned char *packedpic_buffer = NULL;
1485     VAStatus va_status;
1486
1487     length_in_bits = build_packed_pic_buffer(&packedpic_buffer); 
1488     packedheader_param_buffer.type = VAEncPackedHeaderPicture;
1489     packedheader_param_buffer.bit_length = length_in_bits;
1490     packedheader_param_buffer.has_emulation_bytes = 0;
1491
1492     va_status = vaCreateBuffer(va_dpy,
1493                                context_id,
1494                                VAEncPackedHeaderParameterBufferType,
1495                                sizeof(packedheader_param_buffer), 1, &packedheader_param_buffer,
1496                                &packedpic_para_bufid);
1497     CHECK_VASTATUS(va_status, "vaCreateBuffer");
1498
1499     va_status = vaCreateBuffer(va_dpy,
1500                                context_id,
1501                                VAEncPackedHeaderDataBufferType,
1502                                (length_in_bits + 7) / 8, 1, packedpic_buffer,
1503                                &packedpic_data_bufid);
1504     CHECK_VASTATUS(va_status, "vaCreateBuffer");
1505
1506     render_id[0] = packedpic_para_bufid;
1507     render_id[1] = packedpic_data_bufid;
1508     render_picture_and_delete(va_dpy, context_id, render_id, 2);
1509
1510     free(packedpic_buffer);
1511     
1512     return 0;
1513 }
1514
1515 void H264EncoderImpl::render_packedslice()
1516 {
1517     VAEncPackedHeaderParameterBuffer packedheader_param_buffer;
1518     VABufferID packedslice_para_bufid, packedslice_data_bufid, render_id[2];
1519     unsigned int length_in_bits;
1520     unsigned char *packedslice_buffer = NULL;
1521     VAStatus va_status;
1522
1523     length_in_bits = build_packed_slice_buffer(&packedslice_buffer);
1524     packedheader_param_buffer.type = VAEncPackedHeaderSlice;
1525     packedheader_param_buffer.bit_length = length_in_bits;
1526     packedheader_param_buffer.has_emulation_bytes = 0;
1527
1528     va_status = vaCreateBuffer(va_dpy,
1529                                context_id,
1530                                VAEncPackedHeaderParameterBufferType,
1531                                sizeof(packedheader_param_buffer), 1, &packedheader_param_buffer,
1532                                &packedslice_para_bufid);
1533     CHECK_VASTATUS(va_status, "vaCreateBuffer");
1534
1535     va_status = vaCreateBuffer(va_dpy,
1536                                context_id,
1537                                VAEncPackedHeaderDataBufferType,
1538                                (length_in_bits + 7) / 8, 1, packedslice_buffer,
1539                                &packedslice_data_bufid);
1540     CHECK_VASTATUS(va_status, "vaCreateBuffer");
1541
1542     render_id[0] = packedslice_para_bufid;
1543     render_id[1] = packedslice_data_bufid;
1544     render_picture_and_delete(va_dpy, context_id, render_id, 2);
1545
1546     free(packedslice_buffer);
1547 }
1548
1549 int H264EncoderImpl::render_slice(int encoding_frame_num, int display_frame_num, int gop_start_display_frame_num, int frame_type)
1550 {
1551     VABufferID slice_param_buf;
1552     VAStatus va_status;
1553     int i;
1554
1555     update_RefPicList(frame_type);
1556     
1557     /* one frame, one slice */
1558     slice_param.macroblock_address = 0;
1559     slice_param.num_macroblocks = frame_width_mbaligned * frame_height_mbaligned/(16*16); /* Measured by MB */
1560     slice_param.slice_type = (frame_type == FRAME_IDR)?2:frame_type;
1561     if (frame_type == FRAME_IDR) {
1562         if (encoding_frame_num != 0)
1563             ++slice_param.idr_pic_id;
1564     } else if (frame_type == FRAME_P) {
1565         int refpiclist0_max = h264_maxref & 0xffff;
1566         memcpy(slice_param.RefPicList0, RefPicList0_P, refpiclist0_max*sizeof(VAPictureH264));
1567
1568         for (i = refpiclist0_max; i < MAX_NUM_REF2; i++) {
1569             slice_param.RefPicList0[i].picture_id = VA_INVALID_SURFACE;
1570             slice_param.RefPicList0[i].flags = VA_PICTURE_H264_INVALID;
1571         }
1572     } else if (frame_type == FRAME_B) {
1573         int refpiclist0_max = h264_maxref & 0xffff;
1574         int refpiclist1_max = (h264_maxref >> 16) & 0xffff;
1575
1576         memcpy(slice_param.RefPicList0, RefPicList0_B, refpiclist0_max*sizeof(VAPictureH264));
1577         for (i = refpiclist0_max; i < MAX_NUM_REF2; i++) {
1578             slice_param.RefPicList0[i].picture_id = VA_INVALID_SURFACE;
1579             slice_param.RefPicList0[i].flags = VA_PICTURE_H264_INVALID;
1580         }
1581
1582         memcpy(slice_param.RefPicList1, RefPicList1_B, refpiclist1_max*sizeof(VAPictureH264));
1583         for (i = refpiclist1_max; i < MAX_NUM_REF2; i++) {
1584             slice_param.RefPicList1[i].picture_id = VA_INVALID_SURFACE;
1585             slice_param.RefPicList1[i].flags = VA_PICTURE_H264_INVALID;
1586         }
1587     }
1588
1589     slice_param.slice_alpha_c0_offset_div2 = 0;
1590     slice_param.slice_beta_offset_div2 = 0;
1591     slice_param.direct_spatial_mv_pred_flag = 1;
1592     slice_param.pic_order_cnt_lsb = (display_frame_num - gop_start_display_frame_num) % MaxPicOrderCntLsb;
1593     
1594
1595     if (h264_packedheader &&
1596         config_attrib[enc_packed_header_idx].value & VA_ENC_PACKED_HEADER_SLICE)
1597         render_packedslice();
1598
1599     va_status = vaCreateBuffer(va_dpy, context_id, VAEncSliceParameterBufferType,
1600                                sizeof(slice_param), 1, &slice_param, &slice_param_buf);
1601     CHECK_VASTATUS(va_status, "vaCreateBuffer");
1602
1603     render_picture_and_delete(va_dpy, context_id, &slice_param_buf, 1);
1604
1605     return 0;
1606 }
1607
1608
1609
1610 void H264EncoderImpl::save_codeddata(storage_task task)
1611 {    
1612         VACodedBufferSegment *buf_list = NULL;
1613         VAStatus va_status;
1614
1615         string data;
1616
1617         va_status = vaMapBuffer(va_dpy, gl_surfaces[task.display_order % SURFACE_NUM].coded_buf, (void **)(&buf_list));
1618         CHECK_VASTATUS(va_status, "vaMapBuffer");
1619         while (buf_list != NULL) {
1620                 data.append(reinterpret_cast<const char *>(buf_list->buf), buf_list->size);
1621                 buf_list = (VACodedBufferSegment *) buf_list->next;
1622         }
1623         vaUnmapBuffer(va_dpy, gl_surfaces[task.display_order % SURFACE_NUM].coded_buf);
1624
1625         {
1626                 // Add video.
1627                 AVPacket pkt;
1628                 memset(&pkt, 0, sizeof(pkt));
1629                 pkt.buf = nullptr;
1630                 pkt.data = reinterpret_cast<uint8_t *>(&data[0]);
1631                 pkt.size = data.size();
1632                 pkt.stream_index = 0;
1633                 if (task.frame_type == FRAME_IDR) {
1634                         pkt.flags = AV_PKT_FLAG_KEY;
1635                 } else {
1636                         pkt.flags = 0;
1637                 }
1638                 //pkt.duration = 1;
1639                 if (file_mux) {
1640                         file_mux->add_packet(pkt, task.pts + global_delay(), task.dts + global_delay());
1641                 }
1642                 if (!global_flags.uncompressed_video_to_http) {
1643                         httpd->add_packet(pkt, task.pts + global_delay(), task.dts + global_delay());
1644                 }
1645         }
1646         // Encode and add all audio frames up to and including the pts of this video frame.
1647         for ( ;; ) {
1648                 int64_t audio_pts;
1649                 vector<float> audio;
1650                 {
1651                         unique_lock<mutex> lock(frame_queue_mutex);
1652                         frame_queue_nonempty.wait(lock, [this]{ return storage_thread_should_quit || !pending_audio_frames.empty(); });
1653                         if (storage_thread_should_quit && pending_audio_frames.empty()) return;
1654                         auto it = pending_audio_frames.begin();
1655                         if (it->first > task.pts) break;
1656                         audio_pts = it->first;
1657                         audio = move(it->second);
1658                         pending_audio_frames.erase(it); 
1659                 }
1660
1661                 if (context_audio_stream) {
1662                         encode_audio(audio, &audio_queue_file, audio_pts, context_audio_file, { file_mux.get() });
1663                         encode_audio(audio, &audio_queue_stream, audio_pts, context_audio_stream, { httpd });
1664                 } else {
1665                         encode_audio(audio, &audio_queue_file, audio_pts, context_audio_file, { httpd, file_mux.get() });
1666                 }
1667
1668                 if (audio_pts == task.pts) break;
1669         }
1670 }
1671
1672 void H264EncoderImpl::encode_audio(
1673         const vector<float> &audio,
1674         vector<float> *audio_queue,
1675         int64_t audio_pts,
1676         AVCodecContext *ctx,
1677         const vector<PacketDestination *> &destinations)
1678 {
1679         if (ctx->frame_size == 0) {
1680                 // No queueing needed.
1681                 assert(audio_queue->empty());
1682                 assert(audio.size() % 2 == 0);
1683                 encode_audio_one_frame(&audio[0], audio.size() / 2, audio_pts, ctx, destinations);
1684                 return;
1685         }
1686
1687         audio_queue->insert(audio_queue->end(), audio.begin(), audio.end());
1688         size_t sample_num;
1689         for (sample_num = 0;
1690              sample_num + ctx->frame_size * 2 <= audio_queue->size();
1691              sample_num += ctx->frame_size * 2) {
1692                 encode_audio_one_frame(&(*audio_queue)[sample_num],
1693                                        ctx->frame_size,
1694                                        audio_pts,
1695                                        ctx,
1696                                        destinations);
1697         }
1698         audio_queue->erase(audio_queue->begin(), audio_queue->begin() + sample_num);
1699 }
1700
1701 void H264EncoderImpl::encode_audio_one_frame(
1702         const float *audio,
1703         size_t num_samples,
1704         int64_t audio_pts,
1705         AVCodecContext *ctx,
1706         const vector<PacketDestination *> &destinations)
1707 {
1708         audio_frame->nb_samples = num_samples;
1709         audio_frame->channel_layout = AV_CH_LAYOUT_STEREO;
1710
1711         unique_ptr<float[]> planar_samples;
1712         unique_ptr<int32_t[]> int_samples;
1713
1714         if (ctx->sample_fmt == AV_SAMPLE_FMT_FLTP) {
1715                 audio_frame->format = AV_SAMPLE_FMT_FLTP;
1716                 planar_samples.reset(new float[num_samples * 2]);
1717                 avcodec_fill_audio_frame(audio_frame, 2, AV_SAMPLE_FMT_FLTP, (const uint8_t*)planar_samples.get(), num_samples * 2 * sizeof(float), 0);
1718                 for (size_t i = 0; i < num_samples; ++i) {
1719                         planar_samples[i] = audio[i * 2 + 0];
1720                         planar_samples[i + num_samples] = audio[i * 2 + 1];
1721                 }
1722         } else {
1723                 assert(ctx->sample_fmt == AV_SAMPLE_FMT_S32);
1724                 int_samples.reset(new int32_t[num_samples * 2]);
1725                 int ret = avcodec_fill_audio_frame(audio_frame, 2, AV_SAMPLE_FMT_S32, (const uint8_t*)int_samples.get(), num_samples * 2 * sizeof(int32_t), 1);
1726                 if (ret < 0) {
1727                         fprintf(stderr, "avcodec_fill_audio_frame() failed with %d\n", ret);
1728                         exit(1);
1729                 }
1730                 for (size_t i = 0; i < num_samples * 2; ++i) {
1731                         if (audio[i] >= 1.0f) {
1732                                 int_samples[i] = 2147483647;
1733                         } else if (audio[i] <= -1.0f) {
1734                                 int_samples[i] = -2147483647;
1735                         } else {
1736                                 int_samples[i] = lrintf(audio[i] * 2147483647.0f);
1737                         }
1738                 }
1739         }
1740
1741         AVPacket pkt;
1742         av_init_packet(&pkt);
1743         pkt.data = nullptr;
1744         pkt.size = 0;
1745         int got_output = 0;
1746         avcodec_encode_audio2(ctx, &pkt, audio_frame, &got_output);
1747         if (got_output) {
1748                 pkt.stream_index = 1;
1749                 pkt.flags = AV_PKT_FLAG_KEY;
1750                 for (PacketDestination *dest : destinations) {
1751                         dest->add_packet(pkt, audio_pts + global_delay(), audio_pts + global_delay());
1752                 }
1753         }
1754         // TODO: Delayed frames.
1755         av_frame_unref(audio_frame);
1756         av_free_packet(&pkt);
1757 }
1758
1759 // this is weird. but it seems to put a new frame onto the queue
1760 void H264EncoderImpl::storage_task_enqueue(storage_task task)
1761 {
1762         unique_lock<mutex> lock(storage_task_queue_mutex);
1763         storage_task_queue.push(move(task));
1764         storage_task_queue_changed.notify_all();
1765 }
1766
1767 void H264EncoderImpl::storage_task_thread()
1768 {
1769         for ( ;; ) {
1770                 storage_task current;
1771                 {
1772                         // wait until there's an encoded frame  
1773                         unique_lock<mutex> lock(storage_task_queue_mutex);
1774                         storage_task_queue_changed.wait(lock, [this]{ return storage_thread_should_quit || !storage_task_queue.empty(); });
1775                         if (storage_thread_should_quit && storage_task_queue.empty()) return;
1776                         current = move(storage_task_queue.front());
1777                         storage_task_queue.pop();
1778                 }
1779
1780                 VAStatus va_status;
1781            
1782                 // waits for data, then saves it to disk.
1783                 va_status = vaSyncSurface(va_dpy, gl_surfaces[current.display_order % SURFACE_NUM].src_surface);
1784                 CHECK_VASTATUS(va_status, "vaSyncSurface");
1785                 save_codeddata(move(current));
1786
1787                 {
1788                         unique_lock<mutex> lock(storage_task_queue_mutex);
1789                         srcsurface_status[current.display_order % SURFACE_NUM] = SRC_SURFACE_FREE;
1790                         storage_task_queue_changed.notify_all();
1791                 }
1792         }
1793 }
1794
1795 int H264EncoderImpl::release_encode()
1796 {
1797         for (unsigned i = 0; i < SURFACE_NUM; i++) {
1798                 vaDestroyBuffer(va_dpy, gl_surfaces[i].coded_buf);
1799                 vaDestroySurfaces(va_dpy, &gl_surfaces[i].src_surface, 1);
1800                 vaDestroySurfaces(va_dpy, &gl_surfaces[i].ref_surface, 1);
1801
1802                 if (!use_zerocopy) {
1803                         glBindBuffer(GL_PIXEL_PACK_BUFFER, gl_surfaces[i].pbo);
1804                         glUnmapBuffer(GL_PIXEL_PACK_BUFFER);
1805                         glBindBuffer(GL_PIXEL_PACK_BUFFER, 0);
1806                         glDeleteBuffers(1, &gl_surfaces[i].pbo);
1807                 }
1808                 glDeleteTextures(1, &gl_surfaces[i].y_tex);
1809                 glDeleteTextures(1, &gl_surfaces[i].cbcr_tex);
1810         }
1811
1812         vaDestroyContext(va_dpy, context_id);
1813         vaDestroyConfig(va_dpy, config_id);
1814
1815         return 0;
1816 }
1817
1818 int H264EncoderImpl::deinit_va()
1819
1820     vaTerminate(va_dpy);
1821
1822     va_close_display(va_dpy);
1823
1824     return 0;
1825 }
1826
1827 namespace {
1828
1829 void init_audio_encoder(const string &codec_name, int bit_rate, AVCodecContext **ctx)
1830 {
1831         AVCodec *codec_audio = avcodec_find_encoder_by_name(codec_name.c_str());
1832         if (codec_audio == nullptr) {
1833                 fprintf(stderr, "ERROR: Could not find codec '%s'\n", codec_name.c_str());
1834                 exit(1);
1835         }
1836
1837         AVCodecContext *context_audio = avcodec_alloc_context3(codec_audio);
1838         context_audio->bit_rate = bit_rate;
1839         context_audio->sample_rate = OUTPUT_FREQUENCY;
1840
1841         // Choose sample format; we currently only support these two
1842         // (see encode_audio), so we're a bit picky.
1843         const AVSampleFormat *ptr = codec_audio->sample_fmts;
1844         for ( ; *ptr != -1; ++ptr) {
1845                 if (*ptr == AV_SAMPLE_FMT_FLTP || *ptr == AV_SAMPLE_FMT_S32) {
1846                         context_audio->sample_fmt = *ptr;
1847                         break;
1848                 }
1849         }
1850         if (*ptr == -1) {
1851                 fprintf(stderr, "ERROR: Audio codec does not support fltp or s32 sample formats\n");
1852                 exit(1);
1853         }
1854
1855         context_audio->channels = 2;
1856         context_audio->channel_layout = AV_CH_LAYOUT_STEREO;
1857         context_audio->time_base = AVRational{1, TIMEBASE};
1858         if (avcodec_open2(context_audio, codec_audio, NULL) < 0) {
1859                 fprintf(stderr, "Could not open codec '%s'\n", codec_name.c_str());
1860                 exit(1);
1861         }
1862
1863         *ctx = context_audio;
1864 }
1865
1866 }  // namespace
1867
1868 H264EncoderImpl::H264EncoderImpl(QSurface *surface, const string &va_display, int width, int height, HTTPD *httpd)
1869         : current_storage_frame(0), surface(surface), httpd(httpd)
1870 {
1871         init_audio_encoder(AUDIO_OUTPUT_CODEC_NAME, DEFAULT_AUDIO_OUTPUT_BIT_RATE, &context_audio_file);
1872
1873         if (!global_flags.stream_audio_codec_name.empty()) {
1874                 init_audio_encoder(global_flags.stream_audio_codec_name,
1875                         global_flags.stream_audio_codec_bitrate, &context_audio_stream);
1876         }
1877
1878         audio_frame = av_frame_alloc();
1879
1880         frame_width = width;
1881         frame_height = height;
1882         frame_width_mbaligned = (frame_width + 15) & (~15);
1883         frame_height_mbaligned = (frame_height + 15) & (~15);
1884
1885         //print_input();
1886
1887         if (global_flags.uncompressed_video_to_http) {
1888                 reorderer.reset(new FrameReorderer(ip_period - 1, frame_width, frame_height));
1889         }
1890
1891         init_va(va_display);
1892         setup_encode();
1893
1894         // No frames are ready yet.
1895         memset(srcsurface_status, SRC_SURFACE_FREE, sizeof(srcsurface_status));
1896             
1897         memset(&seq_param, 0, sizeof(seq_param));
1898         memset(&pic_param, 0, sizeof(pic_param));
1899         memset(&slice_param, 0, sizeof(slice_param));
1900
1901         storage_thread = thread(&H264EncoderImpl::storage_task_thread, this);
1902
1903         encode_thread = thread([this]{
1904                 //SDL_GL_MakeCurrent(window, context);
1905                 QOpenGLContext *context = create_context(this->surface);
1906                 eglBindAPI(EGL_OPENGL_API);
1907                 if (!make_current(context, this->surface)) {
1908                         printf("display=%p surface=%p context=%p curr=%p err=%d\n", eglGetCurrentDisplay(), this->surface, context, eglGetCurrentContext(),
1909                                 eglGetError());
1910                         exit(1);
1911                 }
1912                 encode_thread_func();
1913         });
1914 }
1915
1916 H264EncoderImpl::~H264EncoderImpl()
1917 {
1918         shutdown();
1919         av_frame_free(&audio_frame);
1920
1921         // TODO: Destroy context.
1922 }
1923
1924 bool H264EncoderImpl::begin_frame(GLuint *y_tex, GLuint *cbcr_tex)
1925 {
1926         assert(!is_shutdown);
1927         {
1928                 // Wait until this frame slot is done encoding.
1929                 unique_lock<mutex> lock(storage_task_queue_mutex);
1930                 if (srcsurface_status[current_storage_frame % SURFACE_NUM] != SRC_SURFACE_FREE) {
1931                         fprintf(stderr, "Warning: Slot %d (for frame %d) is still encoding, rendering has to wait for H.264 encoder\n",
1932                                 current_storage_frame % SURFACE_NUM, current_storage_frame);
1933                 }
1934                 storage_task_queue_changed.wait(lock, [this]{ return storage_thread_should_quit || (srcsurface_status[current_storage_frame % SURFACE_NUM] == SRC_SURFACE_FREE); });
1935                 srcsurface_status[current_storage_frame % SURFACE_NUM] = SRC_SURFACE_IN_ENCODING;
1936                 if (storage_thread_should_quit) return false;
1937         }
1938
1939         //*fbo = fbos[current_storage_frame % SURFACE_NUM];
1940         GLSurface *surf = &gl_surfaces[current_storage_frame % SURFACE_NUM];
1941         *y_tex = surf->y_tex;
1942         *cbcr_tex = surf->cbcr_tex;
1943
1944         VAStatus va_status = vaDeriveImage(va_dpy, surf->src_surface, &surf->surface_image);
1945         CHECK_VASTATUS(va_status, "vaDeriveImage");
1946
1947         if (use_zerocopy) {
1948                 VABufferInfo buf_info;
1949                 buf_info.mem_type = VA_SURFACE_ATTRIB_MEM_TYPE_DRM_PRIME;  // or VA_SURFACE_ATTRIB_MEM_TYPE_KERNEL_DRM?
1950                 va_status = vaAcquireBufferHandle(va_dpy, surf->surface_image.buf, &buf_info);
1951                 CHECK_VASTATUS(va_status, "vaAcquireBufferHandle");
1952
1953                 // Create Y image.
1954                 surf->y_egl_image = EGL_NO_IMAGE_KHR;
1955                 EGLint y_attribs[] = {
1956                         EGL_WIDTH, frame_width,
1957                         EGL_HEIGHT, frame_height,
1958                         EGL_LINUX_DRM_FOURCC_EXT, fourcc_code('R', '8', ' ', ' '),
1959                         EGL_DMA_BUF_PLANE0_FD_EXT, EGLint(buf_info.handle),
1960                         EGL_DMA_BUF_PLANE0_OFFSET_EXT, EGLint(surf->surface_image.offsets[0]),
1961                         EGL_DMA_BUF_PLANE0_PITCH_EXT, EGLint(surf->surface_image.pitches[0]),
1962                         EGL_NONE
1963                 };
1964
1965                 surf->y_egl_image = eglCreateImageKHR(eglGetCurrentDisplay(), EGL_NO_CONTEXT, EGL_LINUX_DMA_BUF_EXT, NULL, y_attribs);
1966                 assert(surf->y_egl_image != EGL_NO_IMAGE_KHR);
1967
1968                 // Associate Y image to a texture.
1969                 glBindTexture(GL_TEXTURE_2D, *y_tex);
1970                 glEGLImageTargetTexture2DOES(GL_TEXTURE_2D, surf->y_egl_image);
1971
1972                 // Create CbCr image.
1973                 surf->cbcr_egl_image = EGL_NO_IMAGE_KHR;
1974                 EGLint cbcr_attribs[] = {
1975                         EGL_WIDTH, frame_width,
1976                         EGL_HEIGHT, frame_height,
1977                         EGL_LINUX_DRM_FOURCC_EXT, fourcc_code('G', 'R', '8', '8'),
1978                         EGL_DMA_BUF_PLANE0_FD_EXT, EGLint(buf_info.handle),
1979                         EGL_DMA_BUF_PLANE0_OFFSET_EXT, EGLint(surf->surface_image.offsets[1]),
1980                         EGL_DMA_BUF_PLANE0_PITCH_EXT, EGLint(surf->surface_image.pitches[1]),
1981                         EGL_NONE
1982                 };
1983
1984                 surf->cbcr_egl_image = eglCreateImageKHR(eglGetCurrentDisplay(), EGL_NO_CONTEXT, EGL_LINUX_DMA_BUF_EXT, NULL, cbcr_attribs);
1985                 assert(surf->cbcr_egl_image != EGL_NO_IMAGE_KHR);
1986
1987                 // Associate CbCr image to a texture.
1988                 glBindTexture(GL_TEXTURE_2D, *cbcr_tex);
1989                 glEGLImageTargetTexture2DOES(GL_TEXTURE_2D, surf->cbcr_egl_image);
1990         }
1991
1992         return true;
1993 }
1994
1995 void H264EncoderImpl::add_audio(int64_t pts, vector<float> audio)
1996 {
1997         assert(!is_shutdown);
1998         {
1999                 unique_lock<mutex> lock(frame_queue_mutex);
2000                 pending_audio_frames[pts] = move(audio);
2001         }
2002         frame_queue_nonempty.notify_all();
2003 }
2004
2005 RefCountedGLsync H264EncoderImpl::end_frame(int64_t pts, const vector<RefCountedFrame> &input_frames)
2006 {
2007         assert(!is_shutdown);
2008
2009         if (!use_zerocopy) {
2010                 GLSurface *surf = &gl_surfaces[current_storage_frame % SURFACE_NUM];
2011
2012                 glPixelStorei(GL_PACK_ROW_LENGTH, 0);
2013                 check_error();
2014
2015                 glBindBuffer(GL_PIXEL_PACK_BUFFER, surf->pbo);
2016                 check_error();
2017
2018                 glBindTexture(GL_TEXTURE_2D, surf->y_tex);
2019                 check_error();
2020                 glGetTexImage(GL_TEXTURE_2D, 0, GL_RED, GL_UNSIGNED_BYTE, BUFFER_OFFSET(surf->y_offset));
2021                 check_error();
2022
2023                 glBindTexture(GL_TEXTURE_2D, surf->cbcr_tex);
2024                 check_error();
2025                 glGetTexImage(GL_TEXTURE_2D, 0, GL_RG, GL_UNSIGNED_BYTE, BUFFER_OFFSET(surf->cbcr_offset));
2026                 check_error();
2027
2028                 glBindTexture(GL_TEXTURE_2D, 0);
2029                 check_error();
2030                 glBindBuffer(GL_PIXEL_PACK_BUFFER, 0);
2031                 check_error();
2032
2033                 glMemoryBarrier(GL_TEXTURE_UPDATE_BARRIER_BIT | GL_CLIENT_MAPPED_BUFFER_BARRIER_BIT);
2034                 check_error();
2035         }
2036
2037         RefCountedGLsync fence = RefCountedGLsync(GL_SYNC_GPU_COMMANDS_COMPLETE, /*flags=*/0);
2038         check_error();
2039         glFlush();  // Make the H.264 thread see the fence as soon as possible.
2040         check_error();
2041
2042         {
2043                 unique_lock<mutex> lock(frame_queue_mutex);
2044                 pending_video_frames[current_storage_frame] = PendingFrame{ fence, input_frames, pts };
2045                 ++current_storage_frame;
2046         }
2047         frame_queue_nonempty.notify_all();
2048         return fence;
2049 }
2050
2051 void H264EncoderImpl::shutdown()
2052 {
2053         if (is_shutdown) {
2054                 return;
2055         }
2056
2057         {
2058                 unique_lock<mutex> lock(frame_queue_mutex);
2059                 encode_thread_should_quit = true;
2060                 frame_queue_nonempty.notify_all();
2061         }
2062         encode_thread.join();
2063         {
2064                 unique_lock<mutex> lock(storage_task_queue_mutex);
2065                 storage_thread_should_quit = true;
2066                 frame_queue_nonempty.notify_all();
2067                 storage_task_queue_changed.notify_all();
2068         }
2069         storage_thread.join();
2070
2071         release_encode();
2072         deinit_va();
2073         is_shutdown = true;
2074 }
2075
2076 void H264EncoderImpl::open_output_file(const std::string &filename)
2077 {
2078         AVFormatContext *avctx = avformat_alloc_context();
2079         avctx->oformat = av_guess_format(NULL, filename.c_str(), NULL);
2080         assert(filename.size() < sizeof(avctx->filename) - 1);
2081         strcpy(avctx->filename, filename.c_str());
2082
2083         string url = "file:" + filename;
2084         int ret = avio_open2(&avctx->pb, url.c_str(), AVIO_FLAG_WRITE, &avctx->interrupt_callback, NULL);
2085         if (ret < 0) {
2086                 char tmp[AV_ERROR_MAX_STRING_SIZE];
2087                 fprintf(stderr, "%s: avio_open2() failed: %s\n", filename.c_str(), av_make_error_string(tmp, sizeof(tmp), ret));
2088                 exit(1);
2089         }
2090
2091         file_mux.reset(new Mux(avctx, frame_width, frame_height, Mux::CODEC_H264, TIMEBASE, DEFAULT_AUDIO_OUTPUT_BIT_RATE));
2092 }
2093
2094 void H264EncoderImpl::close_output_file()
2095 {
2096         file_mux.reset();
2097 }
2098
2099 void H264EncoderImpl::encode_thread_func()
2100 {
2101         int64_t last_dts = -1;
2102         int gop_start_display_frame_num = 0;
2103         for (int encoding_frame_num = 0; ; ++encoding_frame_num) {
2104                 PendingFrame frame;
2105                 int pts_lag;
2106                 int frame_type, display_frame_num;
2107                 encoding2display_order(encoding_frame_num, intra_period, intra_idr_period, ip_period,
2108                                        &display_frame_num, &frame_type, &pts_lag);
2109                 if (frame_type == FRAME_IDR) {
2110                         numShortTerm = 0;
2111                         current_frame_num = 0;
2112                         gop_start_display_frame_num = display_frame_num;
2113                 }
2114
2115                 {
2116                         unique_lock<mutex> lock(frame_queue_mutex);
2117                         frame_queue_nonempty.wait(lock, [this, display_frame_num]{
2118                                 return encode_thread_should_quit || pending_video_frames.count(display_frame_num) != 0;
2119                         });
2120                         if (encode_thread_should_quit && pending_video_frames.count(display_frame_num) == 0) {
2121                                 // We have queued frames that were supposed to be B-frames,
2122                                 // but will be no P-frame to encode them against. Encode them all
2123                                 // as P-frames instead. Note that this happens under the mutex,
2124                                 // but nobody else uses it at this point, since we're shutting down,
2125                                 // so there's no contention.
2126                                 encode_remaining_frames_as_p(encoding_frame_num, gop_start_display_frame_num, last_dts);
2127                                 return;
2128                         } else {
2129                                 frame = move(pending_video_frames[display_frame_num]);
2130                                 pending_video_frames.erase(display_frame_num);
2131                         }
2132                 }
2133
2134                 // Determine the dts of this frame.
2135                 int64_t dts;
2136                 if (pts_lag == -1) {
2137                         assert(last_dts != -1);
2138                         dts = last_dts + (TIMEBASE / MAX_FPS);
2139                 } else {
2140                         dts = frame.pts - pts_lag;
2141                 }
2142                 last_dts = dts;
2143
2144                 encode_frame(frame, encoding_frame_num, display_frame_num, gop_start_display_frame_num, frame_type, frame.pts, dts);
2145         }
2146 }
2147
2148 void H264EncoderImpl::encode_remaining_frames_as_p(int encoding_frame_num, int gop_start_display_frame_num, int64_t last_dts)
2149 {
2150         if (pending_video_frames.empty()) {
2151                 return;
2152         }
2153
2154         for (auto &pending_frame : pending_video_frames) {
2155                 int display_frame_num = pending_frame.first;
2156                 assert(display_frame_num > 0);
2157                 PendingFrame frame = move(pending_frame.second);
2158                 int64_t dts = last_dts + (TIMEBASE / MAX_FPS);
2159                 printf("Finalizing encode: Encoding leftover frame %d as P-frame instead of B-frame.\n", display_frame_num);
2160                 encode_frame(frame, encoding_frame_num++, display_frame_num, gop_start_display_frame_num, FRAME_P, frame.pts, dts);
2161                 last_dts = dts;
2162         }
2163
2164         if (global_flags.uncompressed_video_to_http) {
2165                 // Add frames left in reorderer.
2166                 while (!reorderer->empty()) {
2167                         pair<int64_t, const uint8_t *> output_frame = reorderer->get_first_frame();
2168                         add_packet_for_uncompressed_frame(output_frame.first, output_frame.second);
2169                 }
2170         }
2171 }
2172
2173 void H264EncoderImpl::add_packet_for_uncompressed_frame(int64_t pts, const uint8_t *data)
2174 {
2175         AVPacket pkt;
2176         memset(&pkt, 0, sizeof(pkt));
2177         pkt.buf = nullptr;
2178         pkt.data = const_cast<uint8_t *>(data);
2179         pkt.size = frame_width * frame_height * 2;
2180         pkt.stream_index = 0;
2181         pkt.flags = AV_PKT_FLAG_KEY;
2182         httpd->add_packet(pkt, pts, pts);
2183 }
2184
2185 namespace {
2186
2187 void memcpy_with_pitch(uint8_t *dst, const uint8_t *src, size_t src_width, size_t dst_pitch, size_t height)
2188 {
2189         if (src_width == dst_pitch) {
2190                 memcpy(dst, src, src_width * height);
2191         } else {
2192                 for (size_t y = 0; y < height; ++y) {
2193                         const uint8_t *sptr = src + y * src_width;
2194                         uint8_t *dptr = dst + y * dst_pitch;
2195                         memcpy(dptr, sptr, src_width);
2196                 }
2197         }
2198 }
2199
2200 }  // namespace
2201
2202 void H264EncoderImpl::encode_frame(H264EncoderImpl::PendingFrame frame, int encoding_frame_num, int display_frame_num, int gop_start_display_frame_num,
2203                                    int frame_type, int64_t pts, int64_t dts)
2204 {
2205         // Wait for the GPU to be done with the frame.
2206         GLenum sync_status;
2207         do {
2208                 sync_status = glClientWaitSync(frame.fence.get(), 0, 1000000000);
2209                 check_error();
2210         } while (sync_status == GL_TIMEOUT_EXPIRED);
2211         assert(sync_status != GL_WAIT_FAILED);
2212
2213         // Release back any input frames we needed to render this frame.
2214         frame.input_frames.clear();
2215
2216         GLSurface *surf = &gl_surfaces[display_frame_num % SURFACE_NUM];
2217         VAStatus va_status;
2218
2219         if (use_zerocopy) {
2220                 eglDestroyImageKHR(eglGetCurrentDisplay(), surf->y_egl_image);
2221                 eglDestroyImageKHR(eglGetCurrentDisplay(), surf->cbcr_egl_image);
2222                 va_status = vaReleaseBufferHandle(va_dpy, surf->surface_image.buf);
2223                 CHECK_VASTATUS(va_status, "vaReleaseBufferHandle");
2224         } else {
2225                 unsigned char *surface_p = nullptr;
2226                 vaMapBuffer(va_dpy, surf->surface_image.buf, (void **)&surface_p);
2227
2228                 unsigned char *va_y_ptr = (unsigned char *)surface_p + surf->surface_image.offsets[0];
2229                 memcpy_with_pitch(va_y_ptr, surf->y_ptr, frame_width, surf->surface_image.pitches[0], frame_height);
2230
2231                 unsigned char *va_cbcr_ptr = (unsigned char *)surface_p + surf->surface_image.offsets[1];
2232                 memcpy_with_pitch(va_cbcr_ptr, surf->cbcr_ptr, (frame_width / 2) * sizeof(uint16_t), surf->surface_image.pitches[1], frame_height / 2);
2233
2234                 va_status = vaUnmapBuffer(va_dpy, surf->surface_image.buf);
2235                 CHECK_VASTATUS(va_status, "vaUnmapBuffer");
2236
2237                 if (global_flags.uncompressed_video_to_http) {
2238                         // Add uncompressed video. (Note that pts == dts here.)
2239                         // Delay needs to match audio.
2240                         pair<int64_t, const uint8_t *> output_frame = reorderer->reorder_frame(pts + global_delay(), reinterpret_cast<uint8_t *>(surf->y_ptr));
2241                         if (output_frame.second != nullptr) {
2242                                 add_packet_for_uncompressed_frame(output_frame.first, output_frame.second);
2243                         }
2244                 }
2245         }
2246
2247         va_status = vaDestroyImage(va_dpy, surf->surface_image.image_id);
2248         CHECK_VASTATUS(va_status, "vaDestroyImage");
2249
2250         // Schedule the frame for encoding.
2251         VASurfaceID va_surface = surf->src_surface;
2252         va_status = vaBeginPicture(va_dpy, context_id, va_surface);
2253         CHECK_VASTATUS(va_status, "vaBeginPicture");
2254
2255         if (frame_type == FRAME_IDR) {
2256                 render_sequence();
2257                 render_picture(frame_type, display_frame_num, gop_start_display_frame_num);
2258                 if (h264_packedheader) {
2259                         render_packedsequence();
2260                         render_packedpicture();
2261                 }
2262         } else {
2263                 //render_sequence();
2264                 render_picture(frame_type, display_frame_num, gop_start_display_frame_num);
2265         }
2266         render_slice(encoding_frame_num, display_frame_num, gop_start_display_frame_num, frame_type);
2267
2268         va_status = vaEndPicture(va_dpy, context_id);
2269         CHECK_VASTATUS(va_status, "vaEndPicture");
2270
2271         // so now the data is done encoding (well, async job kicked off)...
2272         // we send that to the storage thread
2273         storage_task tmp;
2274         tmp.display_order = display_frame_num;
2275         tmp.frame_type = frame_type;
2276         tmp.pts = pts;
2277         tmp.dts = dts;
2278         storage_task_enqueue(move(tmp));
2279
2280         update_ReferenceFrames(frame_type);
2281 }
2282
2283 // Proxy object.
2284 H264Encoder::H264Encoder(QSurface *surface, const string &va_display, int width, int height, HTTPD *httpd)
2285         : impl(new H264EncoderImpl(surface, va_display, width, height, httpd)) {}
2286
2287 // Must be defined here because unique_ptr<> destructor needs to know the impl.
2288 H264Encoder::~H264Encoder() {}
2289
2290 void H264Encoder::add_audio(int64_t pts, vector<float> audio)
2291 {
2292         impl->add_audio(pts, audio);
2293 }
2294
2295 bool H264Encoder::begin_frame(GLuint *y_tex, GLuint *cbcr_tex)
2296 {
2297         return impl->begin_frame(y_tex, cbcr_tex);
2298 }
2299
2300 RefCountedGLsync H264Encoder::end_frame(int64_t pts, const vector<RefCountedFrame> &input_frames)
2301 {
2302         return impl->end_frame(pts, input_frames);
2303 }
2304
2305 void H264Encoder::shutdown()
2306 {
2307         impl->shutdown();
2308 }
2309
2310 void H264Encoder::open_output_file(const std::string &filename)
2311 {
2312         impl->open_output_file(filename);
2313 }
2314
2315 void H264Encoder::close_output_file()
2316 {
2317         impl->close_output_file();
2318 }