]> git.sesse.net Git - nageru/blob - h264encode.cpp
Separate muxing entirely out of the HTTPD class.
[nageru] / h264encode.cpp
1 //#include "sysdeps.h"
2 #include "h264encode.h"
3
4 #include <movit/util.h>
5 #include <EGL/eglplatform.h>
6 #include <X11/X.h>
7 #include <X11/Xlib.h>
8 #include <assert.h>
9 #include <epoxy/egl.h>
10 extern "C" {
11 #include <libavcodec/avcodec.h>
12 #include <libavformat/avformat.h>
13 #include <libavresample/avresample.h>
14 #include <libavutil/channel_layout.h>
15 #include <libavutil/frame.h>
16 #include <libavutil/rational.h>
17 #include <libavutil/samplefmt.h>
18 #include <libavutil/opt.h>
19 }
20 #include <libdrm/drm_fourcc.h>
21 #include <stdio.h>
22 #include <stdlib.h>
23 #include <string.h>
24 #include <fcntl.h>
25 #include <va/va.h>
26 #include <va/va_drm.h>
27 #include <va/va_drmcommon.h>
28 #include <va/va_enc_h264.h>
29 #include <va/va_x11.h>
30 #include <algorithm>
31 #include <condition_variable>
32 #include <cstdint>
33 #include <map>
34 #include <memory>
35 #include <mutex>
36 #include <queue>
37 #include <string>
38 #include <thread>
39 #include <utility>
40
41 #include "context.h"
42 #include "defs.h"
43 #include "flags.h"
44 #include "httpd.h"
45 #include "mux.h"
46 #include "timebase.h"
47 #include "x264encode.h"
48
49 using namespace std;
50
51 class QOpenGLContext;
52 class QSurface;
53
54 #define CHECK_VASTATUS(va_status, func)                                 \
55     if (va_status != VA_STATUS_SUCCESS) {                               \
56         fprintf(stderr, "%s:%d (%s) failed with %d\n", __func__, __LINE__, func, va_status); \
57         exit(1);                                                        \
58     }
59
60 #define BUFFER_OFFSET(i) ((char *)NULL + (i))
61
62 //#include "loadsurface.h"
63
64 #define NAL_REF_IDC_NONE        0
65 #define NAL_REF_IDC_LOW         1
66 #define NAL_REF_IDC_MEDIUM      2
67 #define NAL_REF_IDC_HIGH        3
68
69 #define NAL_NON_IDR             1
70 #define NAL_IDR                 5
71 #define NAL_SPS                 7
72 #define NAL_PPS                 8
73 #define NAL_SEI                 6
74
75 #define SLICE_TYPE_P            0
76 #define SLICE_TYPE_B            1
77 #define SLICE_TYPE_I            2
78 #define IS_P_SLICE(type) (SLICE_TYPE_P == (type))
79 #define IS_B_SLICE(type) (SLICE_TYPE_B == (type))
80 #define IS_I_SLICE(type) (SLICE_TYPE_I == (type))
81
82
83 #define ENTROPY_MODE_CAVLC      0
84 #define ENTROPY_MODE_CABAC      1
85
86 #define PROFILE_IDC_BASELINE    66
87 #define PROFILE_IDC_MAIN        77
88 #define PROFILE_IDC_HIGH        100
89    
90 #define BITSTREAM_ALLOCATE_STEPPING     4096
91 #define SURFACE_NUM 16 /* 16 surfaces for source YUV */
92 #define MAX_NUM_REF1 16 // Seemingly a hardware-fixed value, not related to SURFACE_NUM
93 #define MAX_NUM_REF2 32 // Seemingly a hardware-fixed value, not related to SURFACE_NUM
94
95 static constexpr unsigned int MaxFrameNum = (2<<16);
96 static constexpr unsigned int MaxPicOrderCntLsb = (2<<8);
97 static constexpr unsigned int Log2MaxFrameNum = 16;
98 static constexpr unsigned int Log2MaxPicOrderCntLsb = 8;
99 static constexpr int rc_default_modes[] = {  // Priority list of modes.
100     VA_RC_VBR,
101     VA_RC_CQP,
102     VA_RC_VBR_CONSTRAINED,
103     VA_RC_CBR,
104     VA_RC_VCM,
105     VA_RC_NONE,
106 };
107
108 /* thread to save coded data */
109 #define SRC_SURFACE_FREE        0
110 #define SRC_SURFACE_IN_ENCODING 1
111     
112 struct __bitstream {
113     unsigned int *buffer;
114     int bit_offset;
115     int max_size_in_dword;
116 };
117 typedef struct __bitstream bitstream;
118
119 using namespace std;
120
121 // H.264 video comes out in encoding order (e.g. with two B-frames:
122 // 0, 3, 1, 2, 6, 4, 5, etc.), but uncompressed video needs to
123 // come in the right order. Since we do everything, including waiting
124 // for the frames to come out of OpenGL, in encoding order, we need
125 // a reordering buffer for uncompressed frames so that they come out
126 // correctly. We go the super-lazy way of not making it understand
127 // anything about the true order (which introduces some extra latency,
128 // though); we know that for N B-frames we need at most (N-1) frames
129 // in the reorder buffer, and can just sort on that.
130 //
131 // The class also deals with keeping a freelist as needed.
132 class FrameReorderer {
133 public:
134         FrameReorderer(unsigned queue_length, int width, int height);
135
136         // Returns the next frame to insert with its pts, if any. Otherwise -1 and nullptr.
137         // Does _not_ take ownership of data; a copy is taken if needed.
138         // The returned pointer is valid until the next call to reorder_frame, or destruction.
139         // As a special case, if queue_length == 0, will just return pts and data (no reordering needed).
140         pair<int64_t, const uint8_t *> reorder_frame(int64_t pts, const uint8_t *data);
141
142         // The same as reorder_frame, but without inserting anything. Used to empty the queue.
143         pair<int64_t, const uint8_t *> get_first_frame();
144
145         bool empty() const { return frames.empty(); }
146
147 private:
148         unsigned queue_length;
149         int width, height;
150
151         priority_queue<pair<int64_t, uint8_t *>> frames;
152         stack<uint8_t *> freelist;  // Includes the last value returned from reorder_frame.
153
154         // Owns all the pointers. Normally, freelist and frames could do this themselves,
155         // except priority_queue doesn't work well with movable-only types.
156         vector<unique_ptr<uint8_t[]>> owner;
157 };
158
159 FrameReorderer::FrameReorderer(unsigned queue_length, int width, int height)
160     : queue_length(queue_length), width(width), height(height)
161 {
162         for (unsigned i = 0; i < queue_length; ++i) {
163                 owner.emplace_back(new uint8_t[width * height * 2]);
164                 freelist.push(owner.back().get());
165         }
166 }
167
168 pair<int64_t, const uint8_t *> FrameReorderer::reorder_frame(int64_t pts, const uint8_t *data)
169 {
170         if (queue_length == 0) {
171                 return make_pair(pts, data);
172         }
173
174         assert(!freelist.empty());
175         uint8_t *storage = freelist.top();
176         freelist.pop();
177         memcpy(storage, data, width * height * 2);
178         frames.emplace(-pts, storage);  // Invert pts to get smallest first.
179
180         if (frames.size() >= queue_length) {
181                 return get_first_frame();
182         } else {
183                 return make_pair(-1, nullptr);
184         }
185 }
186
187 pair<int64_t, const uint8_t *> FrameReorderer::get_first_frame()
188 {
189         assert(!frames.empty());
190         pair<int64_t, uint8_t *> storage = frames.top();
191         frames.pop();
192         int64_t pts = storage.first;
193         freelist.push(storage.second);
194         return make_pair(-pts, storage.second);  // Re-invert pts (see reorder_frame()).
195 }
196
197 class H264EncoderImpl : public KeyFrameSignalReceiver {
198 public:
199         H264EncoderImpl(QSurface *surface, const string &va_display, int width, int height, HTTPD *httpd);
200         ~H264EncoderImpl();
201         void add_audio(int64_t pts, vector<float> audio);
202         bool begin_frame(GLuint *y_tex, GLuint *cbcr_tex);
203         RefCountedGLsync end_frame(int64_t pts, const vector<RefCountedFrame> &input_frames);
204         void shutdown();
205         void open_output_file(const std::string &filename);
206         void close_output_file();
207
208         virtual void signal_keyframe() override {
209                 stream_mux_writing_keyframes = true;
210         }
211
212 private:
213         struct storage_task {
214                 unsigned long long display_order;
215                 int frame_type;
216                 vector<float> audio;
217                 int64_t pts, dts;
218         };
219         struct PendingFrame {
220                 RefCountedGLsync fence;
221                 vector<RefCountedFrame> input_frames;
222                 int64_t pts;
223         };
224
225         // So we never get negative dts.
226         int64_t global_delay() const {
227                 return int64_t(ip_period - 1) * (TIMEBASE / MAX_FPS);
228         }
229
230         void encode_thread_func();
231         void encode_remaining_frames_as_p(int encoding_frame_num, int gop_start_display_frame_num, int64_t last_dts);
232         void add_packet_for_uncompressed_frame(int64_t pts, const uint8_t *data);
233         void encode_frame(PendingFrame frame, int encoding_frame_num, int display_frame_num, int gop_start_display_frame_num,
234                           int frame_type, int64_t pts, int64_t dts);
235         void storage_task_thread();
236         void encode_audio(const vector<float> &audio,
237                           vector<float> *audio_queue,
238                           int64_t audio_pts,
239                           AVCodecContext *ctx,
240                           AVAudioResampleContext *resampler,
241                           const vector<Mux *> &muxes);
242         void encode_audio_one_frame(const float *audio,
243                                     size_t num_samples,  // In each channel.
244                                     int64_t audio_pts,
245                                     AVCodecContext *ctx,
246                                     AVAudioResampleContext *resampler,
247                                     const vector<Mux *> &muxes);
248         void storage_task_enqueue(storage_task task);
249         void save_codeddata(storage_task task);
250         int render_packedsequence();
251         int render_packedpicture();
252         void render_packedslice();
253         int render_sequence();
254         int render_picture(int frame_type, int display_frame_num, int gop_start_display_frame_num);
255         void sps_rbsp(bitstream *bs);
256         void pps_rbsp(bitstream *bs);
257         int build_packed_pic_buffer(unsigned char **header_buffer);
258         int render_slice(int encoding_frame_num, int display_frame_num, int gop_start_display_frame_num, int frame_type);
259         void slice_header(bitstream *bs);
260         int build_packed_seq_buffer(unsigned char **header_buffer);
261         int build_packed_slice_buffer(unsigned char **header_buffer);
262         int init_va(const string &va_display);
263         int deinit_va();
264         void enable_zerocopy_if_possible();
265         VADisplay va_open_display(const string &va_display);
266         void va_close_display(VADisplay va_dpy);
267         int setup_encode();
268         int release_encode();
269         void update_ReferenceFrames(int frame_type);
270         int update_RefPicList(int frame_type);
271         void open_output_stream();
272         void close_output_stream();
273         static int write_packet_thunk(void *opaque, uint8_t *buf, int buf_size);
274         int write_packet(uint8_t *buf, int buf_size);
275
276         bool is_shutdown = false;
277         bool use_zerocopy;
278         int drm_fd = -1;
279
280         thread encode_thread, storage_thread;
281
282         mutex storage_task_queue_mutex;
283         condition_variable storage_task_queue_changed;
284         int srcsurface_status[SURFACE_NUM];  // protected by storage_task_queue_mutex
285         queue<storage_task> storage_task_queue;  // protected by storage_task_queue_mutex
286         bool storage_thread_should_quit = false;  // protected by storage_task_queue_mutex
287
288         mutex frame_queue_mutex;
289         condition_variable frame_queue_nonempty;
290         bool encode_thread_should_quit = false;  // under frame_queue_mutex
291
292         int current_storage_frame;
293
294         map<int, PendingFrame> pending_video_frames;  // under frame_queue_mutex
295         map<int64_t, vector<float>> pending_audio_frames;  // under frame_queue_mutex
296         QSurface *surface;
297
298         AVCodecContext *context_audio_file;
299         AVCodecContext *context_audio_stream = nullptr;  // nullptr = don't code separate audio for stream.
300
301         AVAudioResampleContext *resampler_audio_file = nullptr;
302         AVAudioResampleContext *resampler_audio_stream = nullptr;
303
304         vector<float> audio_queue_file;
305         vector<float> audio_queue_stream;
306
307         AVFrame *audio_frame = nullptr;
308         HTTPD *httpd;
309         unique_ptr<FrameReorderer> reorderer;
310         unique_ptr<X264Encoder> x264_encoder;  // nullptr if not using x264.
311
312         Display *x11_display = nullptr;
313
314         // Encoder parameters
315         VADisplay va_dpy;
316         VAProfile h264_profile = (VAProfile)~0;
317         VAConfigAttrib config_attrib[VAConfigAttribTypeMax];
318         int config_attrib_num = 0, enc_packed_header_idx;
319
320         struct GLSurface {
321                 VASurfaceID src_surface, ref_surface;
322                 VABufferID coded_buf;
323
324                 VAImage surface_image;
325                 GLuint y_tex, cbcr_tex;
326
327                 // Only if use_zerocopy == true.
328                 EGLImage y_egl_image, cbcr_egl_image;
329
330                 // Only if use_zerocopy == false.
331                 GLuint pbo;
332                 uint8_t *y_ptr, *cbcr_ptr;
333                 size_t y_offset, cbcr_offset;
334         };
335         GLSurface gl_surfaces[SURFACE_NUM];
336
337         VAConfigID config_id;
338         VAContextID context_id;
339         VAEncSequenceParameterBufferH264 seq_param;
340         VAEncPictureParameterBufferH264 pic_param;
341         VAEncSliceParameterBufferH264 slice_param;
342         VAPictureH264 CurrentCurrPic;
343         VAPictureH264 ReferenceFrames[MAX_NUM_REF1], RefPicList0_P[MAX_NUM_REF2], RefPicList0_B[MAX_NUM_REF2], RefPicList1_B[MAX_NUM_REF2];
344
345         // Static quality settings.
346         static constexpr unsigned int frame_bitrate = 15000000 / 60;  // Doesn't really matter; only initial_qp does.
347         static constexpr unsigned int num_ref_frames = 2;
348         static constexpr int initial_qp = 15;
349         static constexpr int minimal_qp = 0;
350         static constexpr int intra_period = 30;
351         static constexpr int intra_idr_period = MAX_FPS;  // About a second; more at lower frame rates. Not ideal.
352
353         // Quality settings that are meant to be static, but might be overridden
354         // by the profile.
355         int constraint_set_flag = 0;
356         int h264_packedheader = 0; /* support pack header? */
357         int h264_maxref = (1<<16|1);
358         int h264_entropy_mode = 1; /* cabac */
359         int ip_period = 3;
360
361         int rc_mode = -1;
362         unsigned int current_frame_num = 0;
363         unsigned int numShortTerm = 0;
364
365         int frame_width;
366         int frame_height;
367         int frame_width_mbaligned;
368         int frame_height_mbaligned;
369
370         unique_ptr<Mux> stream_mux;  // To HTTP.
371         unique_ptr<Mux> file_mux;  // To local disk.
372
373         // While Mux object is constructing, <stream_mux_writing_header> is true,
374         // and the header is being collected into stream_mux_header.
375         bool stream_mux_writing_header;
376         string stream_mux_header;
377
378         bool stream_mux_writing_keyframes = false;
379 };
380
381 // Supposedly vaRenderPicture() is supposed to destroy the buffer implicitly,
382 // but if we don't delete it here, we get leaks. The GStreamer implementation
383 // does the same.
384 static void render_picture_and_delete(VADisplay dpy, VAContextID context, VABufferID *buffers, int num_buffers)
385 {
386     VAStatus va_status = vaRenderPicture(dpy, context, buffers, num_buffers);
387     CHECK_VASTATUS(va_status, "vaRenderPicture");
388
389     for (int i = 0; i < num_buffers; ++i) {
390         va_status = vaDestroyBuffer(dpy, buffers[i]);
391         CHECK_VASTATUS(va_status, "vaDestroyBuffer");
392     }
393 }
394
395 static unsigned int 
396 va_swap32(unsigned int val)
397 {
398     unsigned char *pval = (unsigned char *)&val;
399
400     return ((pval[0] << 24)     |
401             (pval[1] << 16)     |
402             (pval[2] << 8)      |
403             (pval[3] << 0));
404 }
405
406 static void
407 bitstream_start(bitstream *bs)
408 {
409     bs->max_size_in_dword = BITSTREAM_ALLOCATE_STEPPING;
410     bs->buffer = (unsigned int *)calloc(bs->max_size_in_dword * sizeof(int), 1);
411     bs->bit_offset = 0;
412 }
413
414 static void
415 bitstream_end(bitstream *bs)
416 {
417     int pos = (bs->bit_offset >> 5);
418     int bit_offset = (bs->bit_offset & 0x1f);
419     int bit_left = 32 - bit_offset;
420
421     if (bit_offset) {
422         bs->buffer[pos] = va_swap32((bs->buffer[pos] << bit_left));
423     }
424 }
425  
426 static void
427 bitstream_put_ui(bitstream *bs, unsigned int val, int size_in_bits)
428 {
429     int pos = (bs->bit_offset >> 5);
430     int bit_offset = (bs->bit_offset & 0x1f);
431     int bit_left = 32 - bit_offset;
432
433     if (!size_in_bits)
434         return;
435
436     bs->bit_offset += size_in_bits;
437
438     if (bit_left > size_in_bits) {
439         bs->buffer[pos] = (bs->buffer[pos] << size_in_bits | val);
440     } else {
441         size_in_bits -= bit_left;
442         if (bit_left >= 32) {
443             bs->buffer[pos] = (val >> size_in_bits);
444         } else {
445             bs->buffer[pos] = (bs->buffer[pos] << bit_left) | (val >> size_in_bits);
446         }
447         bs->buffer[pos] = va_swap32(bs->buffer[pos]);
448
449         if (pos + 1 == bs->max_size_in_dword) {
450             bs->max_size_in_dword += BITSTREAM_ALLOCATE_STEPPING;
451             bs->buffer = (unsigned int *)realloc(bs->buffer, bs->max_size_in_dword * sizeof(unsigned int));
452         }
453
454         bs->buffer[pos + 1] = val;
455     }
456 }
457
458 static void
459 bitstream_put_ue(bitstream *bs, unsigned int val)
460 {
461     int size_in_bits = 0;
462     int tmp_val = ++val;
463
464     while (tmp_val) {
465         tmp_val >>= 1;
466         size_in_bits++;
467     }
468
469     bitstream_put_ui(bs, 0, size_in_bits - 1); // leading zero
470     bitstream_put_ui(bs, val, size_in_bits);
471 }
472
473 static void
474 bitstream_put_se(bitstream *bs, int val)
475 {
476     unsigned int new_val;
477
478     if (val <= 0)
479         new_val = -2 * val;
480     else
481         new_val = 2 * val - 1;
482
483     bitstream_put_ue(bs, new_val);
484 }
485
486 static void
487 bitstream_byte_aligning(bitstream *bs, int bit)
488 {
489     int bit_offset = (bs->bit_offset & 0x7);
490     int bit_left = 8 - bit_offset;
491     int new_val;
492
493     if (!bit_offset)
494         return;
495
496     assert(bit == 0 || bit == 1);
497
498     if (bit)
499         new_val = (1 << bit_left) - 1;
500     else
501         new_val = 0;
502
503     bitstream_put_ui(bs, new_val, bit_left);
504 }
505
506 static void 
507 rbsp_trailing_bits(bitstream *bs)
508 {
509     bitstream_put_ui(bs, 1, 1);
510     bitstream_byte_aligning(bs, 0);
511 }
512
513 static void nal_start_code_prefix(bitstream *bs)
514 {
515     bitstream_put_ui(bs, 0x00000001, 32);
516 }
517
518 static void nal_header(bitstream *bs, int nal_ref_idc, int nal_unit_type)
519 {
520     bitstream_put_ui(bs, 0, 1);                /* forbidden_zero_bit: 0 */
521     bitstream_put_ui(bs, nal_ref_idc, 2);
522     bitstream_put_ui(bs, nal_unit_type, 5);
523 }
524
525 void H264EncoderImpl::sps_rbsp(bitstream *bs)
526 {
527     int profile_idc = PROFILE_IDC_BASELINE;
528
529     if (h264_profile  == VAProfileH264High)
530         profile_idc = PROFILE_IDC_HIGH;
531     else if (h264_profile  == VAProfileH264Main)
532         profile_idc = PROFILE_IDC_MAIN;
533
534     bitstream_put_ui(bs, profile_idc, 8);               /* profile_idc */
535     bitstream_put_ui(bs, !!(constraint_set_flag & 1), 1);                         /* constraint_set0_flag */
536     bitstream_put_ui(bs, !!(constraint_set_flag & 2), 1);                         /* constraint_set1_flag */
537     bitstream_put_ui(bs, !!(constraint_set_flag & 4), 1);                         /* constraint_set2_flag */
538     bitstream_put_ui(bs, !!(constraint_set_flag & 8), 1);                         /* constraint_set3_flag */
539     bitstream_put_ui(bs, 0, 4);                         /* reserved_zero_4bits */
540     bitstream_put_ui(bs, seq_param.level_idc, 8);      /* level_idc */
541     bitstream_put_ue(bs, seq_param.seq_parameter_set_id);      /* seq_parameter_set_id */
542
543     if ( profile_idc == PROFILE_IDC_HIGH) {
544         bitstream_put_ue(bs, 1);        /* chroma_format_idc = 1, 4:2:0 */ 
545         bitstream_put_ue(bs, 0);        /* bit_depth_luma_minus8 */
546         bitstream_put_ue(bs, 0);        /* bit_depth_chroma_minus8 */
547         bitstream_put_ui(bs, 0, 1);     /* qpprime_y_zero_transform_bypass_flag */
548         bitstream_put_ui(bs, 0, 1);     /* seq_scaling_matrix_present_flag */
549     }
550
551     bitstream_put_ue(bs, seq_param.seq_fields.bits.log2_max_frame_num_minus4); /* log2_max_frame_num_minus4 */
552     bitstream_put_ue(bs, seq_param.seq_fields.bits.pic_order_cnt_type);        /* pic_order_cnt_type */
553
554     if (seq_param.seq_fields.bits.pic_order_cnt_type == 0)
555         bitstream_put_ue(bs, seq_param.seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4);     /* log2_max_pic_order_cnt_lsb_minus4 */
556     else {
557         assert(0);
558     }
559
560     bitstream_put_ue(bs, seq_param.max_num_ref_frames);        /* num_ref_frames */
561     bitstream_put_ui(bs, 0, 1);                                 /* gaps_in_frame_num_value_allowed_flag */
562
563     bitstream_put_ue(bs, seq_param.picture_width_in_mbs - 1);  /* pic_width_in_mbs_minus1 */
564     bitstream_put_ue(bs, seq_param.picture_height_in_mbs - 1); /* pic_height_in_map_units_minus1 */
565     bitstream_put_ui(bs, seq_param.seq_fields.bits.frame_mbs_only_flag, 1);    /* frame_mbs_only_flag */
566
567     if (!seq_param.seq_fields.bits.frame_mbs_only_flag) {
568         assert(0);
569     }
570
571     bitstream_put_ui(bs, seq_param.seq_fields.bits.direct_8x8_inference_flag, 1);      /* direct_8x8_inference_flag */
572     bitstream_put_ui(bs, seq_param.frame_cropping_flag, 1);            /* frame_cropping_flag */
573
574     if (seq_param.frame_cropping_flag) {
575         bitstream_put_ue(bs, seq_param.frame_crop_left_offset);        /* frame_crop_left_offset */
576         bitstream_put_ue(bs, seq_param.frame_crop_right_offset);       /* frame_crop_right_offset */
577         bitstream_put_ue(bs, seq_param.frame_crop_top_offset);         /* frame_crop_top_offset */
578         bitstream_put_ue(bs, seq_param.frame_crop_bottom_offset);      /* frame_crop_bottom_offset */
579     }
580     
581     //if ( frame_bit_rate < 0 ) { //TODO EW: the vui header isn't correct
582     if ( false ) {
583         bitstream_put_ui(bs, 0, 1); /* vui_parameters_present_flag */
584     } else {
585         bitstream_put_ui(bs, 1, 1); /* vui_parameters_present_flag */
586         bitstream_put_ui(bs, 0, 1); /* aspect_ratio_info_present_flag */
587         bitstream_put_ui(bs, 0, 1); /* overscan_info_present_flag */
588         bitstream_put_ui(bs, 1, 1); /* video_signal_type_present_flag */
589         {
590             bitstream_put_ui(bs, 5, 3);  /* video_format (5 = Unspecified) */
591             bitstream_put_ui(bs, 0, 1);  /* video_full_range_flag */
592             bitstream_put_ui(bs, 1, 1);  /* colour_description_present_flag */
593             {
594                 bitstream_put_ui(bs, 1, 8);  /* colour_primaries (1 = BT.709) */
595                 bitstream_put_ui(bs, 2, 8);  /* transfer_characteristics (2 = unspecified, since we use sRGB) */
596                 bitstream_put_ui(bs, 6, 8);  /* matrix_coefficients (6 = BT.601/SMPTE 170M) */
597             }
598         }
599         bitstream_put_ui(bs, 0, 1); /* chroma_loc_info_present_flag */
600         bitstream_put_ui(bs, 1, 1); /* timing_info_present_flag */
601         {
602             bitstream_put_ui(bs, 1, 32);  // FPS
603             bitstream_put_ui(bs, TIMEBASE * 2, 32);  // FPS
604             bitstream_put_ui(bs, 1, 1);
605         }
606         bitstream_put_ui(bs, 1, 1); /* nal_hrd_parameters_present_flag */
607         {
608             // hrd_parameters 
609             bitstream_put_ue(bs, 0);    /* cpb_cnt_minus1 */
610             bitstream_put_ui(bs, 4, 4); /* bit_rate_scale */
611             bitstream_put_ui(bs, 6, 4); /* cpb_size_scale */
612            
613             bitstream_put_ue(bs, frame_bitrate - 1); /* bit_rate_value_minus1[0] */
614             bitstream_put_ue(bs, frame_bitrate*8 - 1); /* cpb_size_value_minus1[0] */
615             bitstream_put_ui(bs, 1, 1);  /* cbr_flag[0] */
616
617             bitstream_put_ui(bs, 23, 5);   /* initial_cpb_removal_delay_length_minus1 */
618             bitstream_put_ui(bs, 23, 5);   /* cpb_removal_delay_length_minus1 */
619             bitstream_put_ui(bs, 23, 5);   /* dpb_output_delay_length_minus1 */
620             bitstream_put_ui(bs, 23, 5);   /* time_offset_length  */
621         }
622         bitstream_put_ui(bs, 0, 1);   /* vcl_hrd_parameters_present_flag */
623         bitstream_put_ui(bs, 0, 1);   /* low_delay_hrd_flag */ 
624
625         bitstream_put_ui(bs, 0, 1); /* pic_struct_present_flag */
626         bitstream_put_ui(bs, 0, 1); /* bitstream_restriction_flag */
627     }
628
629     rbsp_trailing_bits(bs);     /* rbsp_trailing_bits */
630 }
631
632
633 void H264EncoderImpl::pps_rbsp(bitstream *bs)
634 {
635     bitstream_put_ue(bs, pic_param.pic_parameter_set_id);      /* pic_parameter_set_id */
636     bitstream_put_ue(bs, pic_param.seq_parameter_set_id);      /* seq_parameter_set_id */
637
638     bitstream_put_ui(bs, pic_param.pic_fields.bits.entropy_coding_mode_flag, 1);  /* entropy_coding_mode_flag */
639
640     bitstream_put_ui(bs, 0, 1);                         /* pic_order_present_flag: 0 */
641
642     bitstream_put_ue(bs, 0);                            /* num_slice_groups_minus1 */
643
644     bitstream_put_ue(bs, pic_param.num_ref_idx_l0_active_minus1);      /* num_ref_idx_l0_active_minus1 */
645     bitstream_put_ue(bs, pic_param.num_ref_idx_l1_active_minus1);      /* num_ref_idx_l1_active_minus1 1 */
646
647     bitstream_put_ui(bs, pic_param.pic_fields.bits.weighted_pred_flag, 1);     /* weighted_pred_flag: 0 */
648     bitstream_put_ui(bs, pic_param.pic_fields.bits.weighted_bipred_idc, 2);     /* weighted_bipred_idc: 0 */
649
650     bitstream_put_se(bs, pic_param.pic_init_qp - 26);  /* pic_init_qp_minus26 */
651     bitstream_put_se(bs, 0);                            /* pic_init_qs_minus26 */
652     bitstream_put_se(bs, 0);                            /* chroma_qp_index_offset */
653
654     bitstream_put_ui(bs, pic_param.pic_fields.bits.deblocking_filter_control_present_flag, 1); /* deblocking_filter_control_present_flag */
655     bitstream_put_ui(bs, 0, 1);                         /* constrained_intra_pred_flag */
656     bitstream_put_ui(bs, 0, 1);                         /* redundant_pic_cnt_present_flag */
657     
658     /* more_rbsp_data */
659     bitstream_put_ui(bs, pic_param.pic_fields.bits.transform_8x8_mode_flag, 1);    /*transform_8x8_mode_flag */
660     bitstream_put_ui(bs, 0, 1);                         /* pic_scaling_matrix_present_flag */
661     bitstream_put_se(bs, pic_param.second_chroma_qp_index_offset );    /*second_chroma_qp_index_offset */
662
663     rbsp_trailing_bits(bs);
664 }
665
666 void H264EncoderImpl::slice_header(bitstream *bs)
667 {
668     int first_mb_in_slice = slice_param.macroblock_address;
669
670     bitstream_put_ue(bs, first_mb_in_slice);        /* first_mb_in_slice: 0 */
671     bitstream_put_ue(bs, slice_param.slice_type);   /* slice_type */
672     bitstream_put_ue(bs, slice_param.pic_parameter_set_id);        /* pic_parameter_set_id: 0 */
673     bitstream_put_ui(bs, pic_param.frame_num, seq_param.seq_fields.bits.log2_max_frame_num_minus4 + 4); /* frame_num */
674
675     /* frame_mbs_only_flag == 1 */
676     if (!seq_param.seq_fields.bits.frame_mbs_only_flag) {
677         /* FIXME: */
678         assert(0);
679     }
680
681     if (pic_param.pic_fields.bits.idr_pic_flag)
682         bitstream_put_ue(bs, slice_param.idr_pic_id);           /* idr_pic_id: 0 */
683
684     if (seq_param.seq_fields.bits.pic_order_cnt_type == 0) {
685         bitstream_put_ui(bs, pic_param.CurrPic.TopFieldOrderCnt, seq_param.seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4 + 4);
686         /* pic_order_present_flag == 0 */
687     } else {
688         /* FIXME: */
689         assert(0);
690     }
691
692     /* redundant_pic_cnt_present_flag == 0 */
693     /* slice type */
694     if (IS_P_SLICE(slice_param.slice_type)) {
695         bitstream_put_ui(bs, slice_param.num_ref_idx_active_override_flag, 1);            /* num_ref_idx_active_override_flag: */
696
697         if (slice_param.num_ref_idx_active_override_flag)
698             bitstream_put_ue(bs, slice_param.num_ref_idx_l0_active_minus1);
699
700         /* ref_pic_list_reordering */
701         bitstream_put_ui(bs, 0, 1);            /* ref_pic_list_reordering_flag_l0: 0 */
702     } else if (IS_B_SLICE(slice_param.slice_type)) {
703         bitstream_put_ui(bs, slice_param.direct_spatial_mv_pred_flag, 1);            /* direct_spatial_mv_pred: 1 */
704
705         bitstream_put_ui(bs, slice_param.num_ref_idx_active_override_flag, 1);       /* num_ref_idx_active_override_flag: */
706
707         if (slice_param.num_ref_idx_active_override_flag) {
708             bitstream_put_ue(bs, slice_param.num_ref_idx_l0_active_minus1);
709             bitstream_put_ue(bs, slice_param.num_ref_idx_l1_active_minus1);
710         }
711
712         /* ref_pic_list_reordering */
713         bitstream_put_ui(bs, 0, 1);            /* ref_pic_list_reordering_flag_l0: 0 */
714         bitstream_put_ui(bs, 0, 1);            /* ref_pic_list_reordering_flag_l1: 0 */
715     }
716
717     if ((pic_param.pic_fields.bits.weighted_pred_flag &&
718          IS_P_SLICE(slice_param.slice_type)) ||
719         ((pic_param.pic_fields.bits.weighted_bipred_idc == 1) &&
720          IS_B_SLICE(slice_param.slice_type))) {
721         /* FIXME: fill weight/offset table */
722         assert(0);
723     }
724
725     /* dec_ref_pic_marking */
726     if (pic_param.pic_fields.bits.reference_pic_flag) {     /* nal_ref_idc != 0 */
727         unsigned char no_output_of_prior_pics_flag = 0;
728         unsigned char long_term_reference_flag = 0;
729         unsigned char adaptive_ref_pic_marking_mode_flag = 0;
730
731         if (pic_param.pic_fields.bits.idr_pic_flag) {
732             bitstream_put_ui(bs, no_output_of_prior_pics_flag, 1);            /* no_output_of_prior_pics_flag: 0 */
733             bitstream_put_ui(bs, long_term_reference_flag, 1);            /* long_term_reference_flag: 0 */
734         } else {
735             bitstream_put_ui(bs, adaptive_ref_pic_marking_mode_flag, 1);            /* adaptive_ref_pic_marking_mode_flag: 0 */
736         }
737     }
738
739     if (pic_param.pic_fields.bits.entropy_coding_mode_flag &&
740         !IS_I_SLICE(slice_param.slice_type))
741         bitstream_put_ue(bs, slice_param.cabac_init_idc);               /* cabac_init_idc: 0 */
742
743     bitstream_put_se(bs, slice_param.slice_qp_delta);                   /* slice_qp_delta: 0 */
744
745     /* ignore for SP/SI */
746
747     if (pic_param.pic_fields.bits.deblocking_filter_control_present_flag) {
748         bitstream_put_ue(bs, slice_param.disable_deblocking_filter_idc);           /* disable_deblocking_filter_idc: 0 */
749
750         if (slice_param.disable_deblocking_filter_idc != 1) {
751             bitstream_put_se(bs, slice_param.slice_alpha_c0_offset_div2);          /* slice_alpha_c0_offset_div2: 2 */
752             bitstream_put_se(bs, slice_param.slice_beta_offset_div2);              /* slice_beta_offset_div2: 2 */
753         }
754     }
755
756     if (pic_param.pic_fields.bits.entropy_coding_mode_flag) {
757         bitstream_byte_aligning(bs, 1);
758     }
759 }
760
761 int H264EncoderImpl::build_packed_pic_buffer(unsigned char **header_buffer)
762 {
763     bitstream bs;
764
765     bitstream_start(&bs);
766     nal_start_code_prefix(&bs);
767     nal_header(&bs, NAL_REF_IDC_HIGH, NAL_PPS);
768     pps_rbsp(&bs);
769     bitstream_end(&bs);
770
771     *header_buffer = (unsigned char *)bs.buffer;
772     return bs.bit_offset;
773 }
774
775 int
776 H264EncoderImpl::build_packed_seq_buffer(unsigned char **header_buffer)
777 {
778     bitstream bs;
779
780     bitstream_start(&bs);
781     nal_start_code_prefix(&bs);
782     nal_header(&bs, NAL_REF_IDC_HIGH, NAL_SPS);
783     sps_rbsp(&bs);
784     bitstream_end(&bs);
785
786     *header_buffer = (unsigned char *)bs.buffer;
787     return bs.bit_offset;
788 }
789
790 int H264EncoderImpl::build_packed_slice_buffer(unsigned char **header_buffer)
791 {
792     bitstream bs;
793     int is_idr = !!pic_param.pic_fields.bits.idr_pic_flag;
794     int is_ref = !!pic_param.pic_fields.bits.reference_pic_flag;
795
796     bitstream_start(&bs);
797     nal_start_code_prefix(&bs);
798
799     if (IS_I_SLICE(slice_param.slice_type)) {
800         nal_header(&bs, NAL_REF_IDC_HIGH, is_idr ? NAL_IDR : NAL_NON_IDR);
801     } else if (IS_P_SLICE(slice_param.slice_type)) {
802         nal_header(&bs, NAL_REF_IDC_MEDIUM, NAL_NON_IDR);
803     } else {
804         assert(IS_B_SLICE(slice_param.slice_type));
805         nal_header(&bs, is_ref ? NAL_REF_IDC_LOW : NAL_REF_IDC_NONE, NAL_NON_IDR);
806     }
807
808     slice_header(&bs);
809     bitstream_end(&bs);
810
811     *header_buffer = (unsigned char *)bs.buffer;
812     return bs.bit_offset;
813 }
814
815
816 /*
817   Assume frame sequence is: Frame#0, #1, #2, ..., #M, ..., #X, ... (encoding order)
818   1) period between Frame #X and Frame #N = #X - #N
819   2) 0 means infinite for intra_period/intra_idr_period, and 0 is invalid for ip_period
820   3) intra_idr_period % intra_period (intra_period > 0) and intra_period % ip_period must be 0
821   4) intra_period and intra_idr_period take precedence over ip_period
822   5) if ip_period > 1, intra_period and intra_idr_period are not  the strict periods 
823      of I/IDR frames, see bellow examples
824   -------------------------------------------------------------------
825   intra_period intra_idr_period ip_period frame sequence (intra_period/intra_idr_period/ip_period)
826   0            ignored          1          IDRPPPPPPP ...     (No IDR/I any more)
827   0            ignored        >=2          IDR(PBB)(PBB)...   (No IDR/I any more)
828   1            0                ignored    IDRIIIIIII...      (No IDR any more)
829   1            1                ignored    IDR IDR IDR IDR...
830   1            >=2              ignored    IDRII IDRII IDR... (1/3/ignore)
831   >=2          0                1          IDRPPP IPPP I...   (3/0/1)
832   >=2          0              >=2          IDR(PBB)(PBB)(IBB) (6/0/3)
833                                               (PBB)(IBB)(PBB)(IBB)... 
834   >=2          >=2              1          IDRPPPPP IPPPPP IPPPPP (6/18/1)
835                                            IDRPPPPP IPPPPP IPPPPP...
836   >=2          >=2              >=2        {IDR(PBB)(PBB)(IBB)(PBB)(IBB)(PBB)} (6/18/3)
837                                            {IDR(PBB)(PBB)(IBB)(PBB)(IBB)(PBB)}...
838                                            {IDR(PBB)(PBB)(IBB)(PBB)}           (6/12/3)
839                                            {IDR(PBB)(PBB)(IBB)(PBB)}...
840                                            {IDR(PBB)(PBB)}                     (6/6/3)
841                                            {IDR(PBB)(PBB)}.
842 */
843
844 // General pts/dts strategy:
845 //
846 // Getting pts and dts right with variable frame rate (VFR) and B-frames can be a
847 // bit tricky. We assume first of all that the frame rate never goes _above_
848 // MAX_FPS, which gives us a frame period N. The decoder can always decode
849 // in at least this speed, as long at dts <= pts (the frame is not attempted
850 // presented before it is decoded). Furthermore, we never have longer chains of
851 // B-frames than a fixed constant C. (In a B-frame chain, we say that the base
852 // I/P-frame has order O=0, the B-frame depending on it directly has order O=1,
853 // etc. The last frame in the chain, which no B-frames depend on, is the “tip”
854 // frame, with an order O <= C.)
855 //
856 // Many strategies are possible, but we establish these rules:
857 //
858 //  - Tip frames have dts = pts - (C-O)*N.
859 //  - Non-tip frames have dts = dts_last + N.
860 //
861 // An example, with C=2 and N=10 and the data flow showed with arrows:
862 //
863 //        I  B  P  B  B  P
864 //   pts: 30 40 50 60 70 80
865 //        ↓  ↓     ↓
866 //   dts: 10 30 20 60 50←40
867 //         |  |  ↑        ↑
868 //         `--|--'        |
869 //             `----------'
870 //
871 // To show that this works fine also with irregular spacings, let's say that
872 // the third frame is delayed a bit (something earlier was dropped). Now the
873 // situation looks like this:
874 //
875 //        I  B  P  B  B   P
876 //   pts: 30 40 80 90 100 110
877 //        ↓  ↓     ↓
878 //   dts: 10 30 20 90 50←40
879 //         |  |  ↑        ↑
880 //         `--|--'        |
881 //             `----------'
882 //
883 // The resetting on every tip frame makes sure dts never ends up lagging a lot
884 // behind pts, and the subtraction of (C-O)*N makes sure pts <= dts.
885 //
886 // In the output of this function, if <dts_lag> is >= 0, it means to reset the
887 // dts from the current pts minus <dts_lag>, while if it's -1, the frame is not
888 // a tip frame and should be given a dts based on the previous one.
889 #define FRAME_P 0
890 #define FRAME_B 1
891 #define FRAME_I 2
892 #define FRAME_IDR 7
893 void encoding2display_order(
894     int encoding_order, int intra_period,
895     int intra_idr_period, int ip_period,
896     int *displaying_order,
897     int *frame_type, int *pts_lag)
898 {
899     int encoding_order_gop = 0;
900
901     *pts_lag = 0;
902
903     if (intra_period == 1) { /* all are I/IDR frames */
904         *displaying_order = encoding_order;
905         if (intra_idr_period == 0)
906             *frame_type = (encoding_order == 0)?FRAME_IDR:FRAME_I;
907         else
908             *frame_type = (encoding_order % intra_idr_period == 0)?FRAME_IDR:FRAME_I;
909         return;
910     }
911
912     if (intra_period == 0)
913         intra_idr_period = 0;
914
915     if (ip_period == 1) {
916         // No B-frames, sequence is like IDR PPPPP IPPPPP.
917         encoding_order_gop = (intra_idr_period == 0) ? encoding_order : (encoding_order % intra_idr_period);
918         *displaying_order = encoding_order;
919
920         if (encoding_order_gop == 0) { /* the first frame */
921             *frame_type = FRAME_IDR;
922         } else if (intra_period != 0 && /* have I frames */
923                    encoding_order_gop >= 2 &&
924                    (encoding_order_gop % intra_period == 0)) {
925             *frame_type = FRAME_I;
926         } else {
927             *frame_type = FRAME_P;
928         }
929         return;
930     } 
931
932     // We have B-frames. Sequence is like IDR (PBB)(PBB)(IBB)(PBB).
933     encoding_order_gop = (intra_idr_period == 0) ? encoding_order : (encoding_order % (intra_idr_period + 1));
934     *pts_lag = -1;  // Most frames are not tip frames.
935          
936     if (encoding_order_gop == 0) { /* the first frame */
937         *frame_type = FRAME_IDR;
938         *displaying_order = encoding_order;
939         // IDR frames are a special case; I honestly can't find the logic behind
940         // why this is the right thing, but it seems to line up nicely in practice :-)
941         *pts_lag = TIMEBASE / MAX_FPS;
942     } else if (((encoding_order_gop - 1) % ip_period) != 0) { /* B frames */
943         *frame_type = FRAME_B;
944         *displaying_order = encoding_order - 1;
945         if ((encoding_order_gop % ip_period) == 0) {
946             *pts_lag = 0;  // Last B-frame.
947         }
948     } else if (intra_period != 0 && /* have I frames */
949                encoding_order_gop >= 2 &&
950                ((encoding_order_gop - 1) / ip_period % (intra_period / ip_period)) == 0) {
951         *frame_type = FRAME_I;
952         *displaying_order = encoding_order + ip_period - 1;
953     } else {
954         *frame_type = FRAME_P;
955         *displaying_order = encoding_order + ip_period - 1;
956     }
957 }
958
959
960 static const char *rc_to_string(int rc_mode)
961 {
962     switch (rc_mode) {
963     case VA_RC_NONE:
964         return "NONE";
965     case VA_RC_CBR:
966         return "CBR";
967     case VA_RC_VBR:
968         return "VBR";
969     case VA_RC_VCM:
970         return "VCM";
971     case VA_RC_CQP:
972         return "CQP";
973     case VA_RC_VBR_CONSTRAINED:
974         return "VBR_CONSTRAINED";
975     default:
976         return "Unknown";
977     }
978 }
979
980 void H264EncoderImpl::enable_zerocopy_if_possible()
981 {
982         if (global_flags.uncompressed_video_to_http) {
983                 fprintf(stderr, "Disabling zerocopy H.264 encoding due to --uncompressed_video_to_http.\n");
984                 use_zerocopy = false;
985         } else if (global_flags.x264_video_to_http) {
986                 fprintf(stderr, "Disabling zerocopy H.264 encoding due to --x264_video_to_http.\n");
987                 use_zerocopy = false;
988         } else {
989                 use_zerocopy = true;
990         }
991 }
992
993 VADisplay H264EncoderImpl::va_open_display(const string &va_display)
994 {
995         if (va_display.empty()) {
996                 x11_display = XOpenDisplay(NULL);
997                 if (!x11_display) {
998                         fprintf(stderr, "error: can't connect to X server!\n");
999                         return NULL;
1000                 }
1001                 enable_zerocopy_if_possible();
1002                 return vaGetDisplay(x11_display);
1003         } else if (va_display[0] != '/') {
1004                 x11_display = XOpenDisplay(va_display.c_str());
1005                 if (!x11_display) {
1006                         fprintf(stderr, "error: can't connect to X server!\n");
1007                         return NULL;
1008                 }
1009                 enable_zerocopy_if_possible();
1010                 return vaGetDisplay(x11_display);
1011         } else {
1012                 drm_fd = open(va_display.c_str(), O_RDWR);
1013                 if (drm_fd == -1) {
1014                         perror(va_display.c_str());
1015                         return NULL;
1016                 }
1017                 use_zerocopy = false;
1018                 return vaGetDisplayDRM(drm_fd);
1019         }
1020 }
1021
1022 void H264EncoderImpl::va_close_display(VADisplay va_dpy)
1023 {
1024         if (x11_display) {
1025                 XCloseDisplay(x11_display);
1026                 x11_display = nullptr;
1027         }
1028         if (drm_fd != -1) {
1029                 close(drm_fd);
1030         }
1031 }
1032
1033 int H264EncoderImpl::init_va(const string &va_display)
1034 {
1035     VAProfile profile_list[]={VAProfileH264High, VAProfileH264Main, VAProfileH264Baseline, VAProfileH264ConstrainedBaseline};
1036     VAEntrypoint *entrypoints;
1037     int num_entrypoints, slice_entrypoint;
1038     int support_encode = 0;    
1039     int major_ver, minor_ver;
1040     VAStatus va_status;
1041     unsigned int i;
1042
1043     va_dpy = va_open_display(va_display);
1044     va_status = vaInitialize(va_dpy, &major_ver, &minor_ver);
1045     CHECK_VASTATUS(va_status, "vaInitialize");
1046
1047     num_entrypoints = vaMaxNumEntrypoints(va_dpy);
1048     entrypoints = (VAEntrypoint *)malloc(num_entrypoints * sizeof(*entrypoints));
1049     if (!entrypoints) {
1050         fprintf(stderr, "error: failed to initialize VA entrypoints array\n");
1051         exit(1);
1052     }
1053
1054     /* use the highest profile */
1055     for (i = 0; i < sizeof(profile_list)/sizeof(profile_list[0]); i++) {
1056         if ((h264_profile != ~0) && h264_profile != profile_list[i])
1057             continue;
1058         
1059         h264_profile = profile_list[i];
1060         vaQueryConfigEntrypoints(va_dpy, h264_profile, entrypoints, &num_entrypoints);
1061         for (slice_entrypoint = 0; slice_entrypoint < num_entrypoints; slice_entrypoint++) {
1062             if (entrypoints[slice_entrypoint] == VAEntrypointEncSlice) {
1063                 support_encode = 1;
1064                 break;
1065             }
1066         }
1067         if (support_encode == 1)
1068             break;
1069     }
1070     
1071     if (support_encode == 0) {
1072         printf("Can't find VAEntrypointEncSlice for H264 profiles. If you are using a non-Intel GPU\n");
1073         printf("but have one in your system, try launching Nageru with --va-display /dev/dri/renderD128\n");
1074         printf("to use VA-API against DRM instead of X11.\n");
1075         exit(1);
1076     } else {
1077         switch (h264_profile) {
1078             case VAProfileH264Baseline:
1079                 ip_period = 1;
1080                 constraint_set_flag |= (1 << 0); /* Annex A.2.1 */
1081                 h264_entropy_mode = 0;
1082                 break;
1083             case VAProfileH264ConstrainedBaseline:
1084                 constraint_set_flag |= (1 << 0 | 1 << 1); /* Annex A.2.2 */
1085                 ip_period = 1;
1086                 break;
1087
1088             case VAProfileH264Main:
1089                 constraint_set_flag |= (1 << 1); /* Annex A.2.2 */
1090                 break;
1091
1092             case VAProfileH264High:
1093                 constraint_set_flag |= (1 << 3); /* Annex A.2.4 */
1094                 break;
1095             default:
1096                 h264_profile = VAProfileH264Baseline;
1097                 ip_period = 1;
1098                 constraint_set_flag |= (1 << 0); /* Annex A.2.1 */
1099                 break;
1100         }
1101     }
1102
1103     VAConfigAttrib attrib[VAConfigAttribTypeMax];
1104
1105     /* find out the format for the render target, and rate control mode */
1106     for (i = 0; i < VAConfigAttribTypeMax; i++)
1107         attrib[i].type = (VAConfigAttribType)i;
1108
1109     va_status = vaGetConfigAttributes(va_dpy, h264_profile, VAEntrypointEncSlice,
1110                                       &attrib[0], VAConfigAttribTypeMax);
1111     CHECK_VASTATUS(va_status, "vaGetConfigAttributes");
1112     /* check the interested configattrib */
1113     if ((attrib[VAConfigAttribRTFormat].value & VA_RT_FORMAT_YUV420) == 0) {
1114         printf("Not find desired YUV420 RT format\n");
1115         exit(1);
1116     } else {
1117         config_attrib[config_attrib_num].type = VAConfigAttribRTFormat;
1118         config_attrib[config_attrib_num].value = VA_RT_FORMAT_YUV420;
1119         config_attrib_num++;
1120     }
1121     
1122     if (attrib[VAConfigAttribRateControl].value != VA_ATTRIB_NOT_SUPPORTED) {
1123         int tmp = attrib[VAConfigAttribRateControl].value;
1124
1125         if (rc_mode == -1 || !(rc_mode & tmp))  {
1126             if (rc_mode != -1) {
1127                 printf("Warning: Don't support the specified RateControl mode: %s!!!, switch to ", rc_to_string(rc_mode));
1128             }
1129
1130             for (i = 0; i < sizeof(rc_default_modes) / sizeof(rc_default_modes[0]); i++) {
1131                 if (rc_default_modes[i] & tmp) {
1132                     rc_mode = rc_default_modes[i];
1133                     break;
1134                 }
1135             }
1136         }
1137
1138         config_attrib[config_attrib_num].type = VAConfigAttribRateControl;
1139         config_attrib[config_attrib_num].value = rc_mode;
1140         config_attrib_num++;
1141     }
1142     
1143
1144     if (attrib[VAConfigAttribEncPackedHeaders].value != VA_ATTRIB_NOT_SUPPORTED) {
1145         int tmp = attrib[VAConfigAttribEncPackedHeaders].value;
1146
1147         h264_packedheader = 1;
1148         config_attrib[config_attrib_num].type = VAConfigAttribEncPackedHeaders;
1149         config_attrib[config_attrib_num].value = VA_ENC_PACKED_HEADER_NONE;
1150         
1151         if (tmp & VA_ENC_PACKED_HEADER_SEQUENCE) {
1152             config_attrib[config_attrib_num].value |= VA_ENC_PACKED_HEADER_SEQUENCE;
1153         }
1154         
1155         if (tmp & VA_ENC_PACKED_HEADER_PICTURE) {
1156             config_attrib[config_attrib_num].value |= VA_ENC_PACKED_HEADER_PICTURE;
1157         }
1158         
1159         if (tmp & VA_ENC_PACKED_HEADER_SLICE) {
1160             config_attrib[config_attrib_num].value |= VA_ENC_PACKED_HEADER_SLICE;
1161         }
1162         
1163         if (tmp & VA_ENC_PACKED_HEADER_MISC) {
1164             config_attrib[config_attrib_num].value |= VA_ENC_PACKED_HEADER_MISC;
1165         }
1166         
1167         enc_packed_header_idx = config_attrib_num;
1168         config_attrib_num++;
1169     }
1170
1171     if (attrib[VAConfigAttribEncInterlaced].value != VA_ATTRIB_NOT_SUPPORTED) {
1172         config_attrib[config_attrib_num].type = VAConfigAttribEncInterlaced;
1173         config_attrib[config_attrib_num].value = VA_ENC_PACKED_HEADER_NONE;
1174         config_attrib_num++;
1175     }
1176     
1177     if (attrib[VAConfigAttribEncMaxRefFrames].value != VA_ATTRIB_NOT_SUPPORTED) {
1178         h264_maxref = attrib[VAConfigAttribEncMaxRefFrames].value;
1179     }
1180
1181     free(entrypoints);
1182     return 0;
1183 }
1184
1185 int H264EncoderImpl::setup_encode()
1186 {
1187     VAStatus va_status;
1188     VASurfaceID *tmp_surfaceid;
1189     int codedbuf_size, i;
1190     static VASurfaceID src_surface[SURFACE_NUM];
1191     static VASurfaceID ref_surface[SURFACE_NUM];
1192     
1193     va_status = vaCreateConfig(va_dpy, h264_profile, VAEntrypointEncSlice,
1194             &config_attrib[0], config_attrib_num, &config_id);
1195     CHECK_VASTATUS(va_status, "vaCreateConfig");
1196
1197     /* create source surfaces */
1198     va_status = vaCreateSurfaces(va_dpy,
1199                                  VA_RT_FORMAT_YUV420, frame_width_mbaligned, frame_height_mbaligned,
1200                                  &src_surface[0], SURFACE_NUM,
1201                                  NULL, 0);
1202     CHECK_VASTATUS(va_status, "vaCreateSurfaces");
1203
1204     /* create reference surfaces */
1205     va_status = vaCreateSurfaces(va_dpy,
1206                                  VA_RT_FORMAT_YUV420, frame_width_mbaligned, frame_height_mbaligned,
1207                                  &ref_surface[0], SURFACE_NUM,
1208                                  NULL, 0);
1209     CHECK_VASTATUS(va_status, "vaCreateSurfaces");
1210
1211     tmp_surfaceid = (VASurfaceID *)calloc(2 * SURFACE_NUM, sizeof(VASurfaceID));
1212     memcpy(tmp_surfaceid, src_surface, SURFACE_NUM * sizeof(VASurfaceID));
1213     memcpy(tmp_surfaceid + SURFACE_NUM, ref_surface, SURFACE_NUM * sizeof(VASurfaceID));
1214     
1215     /* Create a context for this encode pipe */
1216     va_status = vaCreateContext(va_dpy, config_id,
1217                                 frame_width_mbaligned, frame_height_mbaligned,
1218                                 VA_PROGRESSIVE,
1219                                 tmp_surfaceid, 2 * SURFACE_NUM,
1220                                 &context_id);
1221     CHECK_VASTATUS(va_status, "vaCreateContext");
1222     free(tmp_surfaceid);
1223
1224     codedbuf_size = (frame_width_mbaligned * frame_height_mbaligned * 400) / (16*16);
1225
1226     for (i = 0; i < SURFACE_NUM; i++) {
1227         /* create coded buffer once for all
1228          * other VA buffers which won't be used again after vaRenderPicture.
1229          * so APP can always vaCreateBuffer for every frame
1230          * but coded buffer need to be mapped and accessed after vaRenderPicture/vaEndPicture
1231          * so VA won't maintain the coded buffer
1232          */
1233         va_status = vaCreateBuffer(va_dpy, context_id, VAEncCodedBufferType,
1234                 codedbuf_size, 1, NULL, &gl_surfaces[i].coded_buf);
1235         CHECK_VASTATUS(va_status, "vaCreateBuffer");
1236     }
1237
1238     /* create OpenGL objects */
1239     //glGenFramebuffers(SURFACE_NUM, fbos);
1240     
1241     for (i = 0; i < SURFACE_NUM; i++) {
1242         glGenTextures(1, &gl_surfaces[i].y_tex);
1243         glGenTextures(1, &gl_surfaces[i].cbcr_tex);
1244
1245         if (!use_zerocopy) {
1246             // Create Y image.
1247             glBindTexture(GL_TEXTURE_2D, gl_surfaces[i].y_tex);
1248             glTexStorage2D(GL_TEXTURE_2D, 1, GL_R8, frame_width, frame_height);
1249
1250             // Create CbCr image.
1251             glBindTexture(GL_TEXTURE_2D, gl_surfaces[i].cbcr_tex);
1252             glTexStorage2D(GL_TEXTURE_2D, 1, GL_RG8, frame_width / 2, frame_height / 2);
1253
1254             // Generate a PBO to read into. It doesn't necessarily fit 1:1 with the VA-API
1255             // buffers, due to potentially differing pitch.
1256             glGenBuffers(1, &gl_surfaces[i].pbo);
1257             glBindBuffer(GL_PIXEL_PACK_BUFFER, gl_surfaces[i].pbo);
1258             glBufferStorage(GL_PIXEL_PACK_BUFFER, frame_width * frame_height * 2, nullptr, GL_MAP_READ_BIT | GL_MAP_WRITE_BIT | GL_MAP_PERSISTENT_BIT);
1259             uint8_t *ptr = (uint8_t *)glMapBufferRange(GL_PIXEL_PACK_BUFFER, 0, frame_width * frame_height * 2, GL_MAP_READ_BIT | GL_MAP_PERSISTENT_BIT);
1260             gl_surfaces[i].y_offset = 0;
1261             gl_surfaces[i].cbcr_offset = frame_width * frame_height;
1262             gl_surfaces[i].y_ptr = ptr + gl_surfaces[i].y_offset;
1263             gl_surfaces[i].cbcr_ptr = ptr + gl_surfaces[i].cbcr_offset;
1264             glBindBuffer(GL_PIXEL_PACK_BUFFER, 0);
1265         }
1266     }
1267
1268     for (i = 0; i < SURFACE_NUM; i++) {
1269         gl_surfaces[i].src_surface = src_surface[i];
1270         gl_surfaces[i].ref_surface = ref_surface[i];
1271     }
1272     
1273     return 0;
1274 }
1275
1276 // Given a list like 1 9 3 0 2 8 4 and a pivot element 3, will produce
1277 //
1278 //   2 1 0 [3] 4 8 9
1279 template<class T, class C>
1280 static void sort_two(T *begin, T *end, const T &pivot, const C &less_than)
1281 {
1282         T *middle = partition(begin, end, [&](const T &elem) { return less_than(elem, pivot); });
1283         sort(begin, middle, [&](const T &a, const T &b) { return less_than(b, a); });
1284         sort(middle, end, less_than);
1285 }
1286
1287 void H264EncoderImpl::update_ReferenceFrames(int frame_type)
1288 {
1289     int i;
1290     
1291     if (frame_type == FRAME_B)
1292         return;
1293
1294     CurrentCurrPic.flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE;
1295     numShortTerm++;
1296     if (numShortTerm > num_ref_frames)
1297         numShortTerm = num_ref_frames;
1298     for (i=numShortTerm-1; i>0; i--)
1299         ReferenceFrames[i] = ReferenceFrames[i-1];
1300     ReferenceFrames[0] = CurrentCurrPic;
1301     
1302     current_frame_num++;
1303     if (current_frame_num > MaxFrameNum)
1304         current_frame_num = 0;
1305 }
1306
1307
1308 int H264EncoderImpl::update_RefPicList(int frame_type)
1309 {
1310     const auto descending_by_frame_idx = [](const VAPictureH264 &a, const VAPictureH264 &b) {
1311         return a.frame_idx > b.frame_idx;
1312     };
1313     const auto ascending_by_top_field_order_cnt = [](const VAPictureH264 &a, const VAPictureH264 &b) {
1314         return a.TopFieldOrderCnt < b.TopFieldOrderCnt;
1315     };
1316     const auto descending_by_top_field_order_cnt = [](const VAPictureH264 &a, const VAPictureH264 &b) {
1317         return a.TopFieldOrderCnt > b.TopFieldOrderCnt;
1318     };
1319     
1320     if (frame_type == FRAME_P) {
1321         memcpy(RefPicList0_P, ReferenceFrames, numShortTerm * sizeof(VAPictureH264));
1322         sort(&RefPicList0_P[0], &RefPicList0_P[numShortTerm], descending_by_frame_idx);
1323     } else if (frame_type == FRAME_B) {
1324         memcpy(RefPicList0_B, ReferenceFrames, numShortTerm * sizeof(VAPictureH264));
1325         sort_two(&RefPicList0_B[0], &RefPicList0_B[numShortTerm], CurrentCurrPic, ascending_by_top_field_order_cnt);
1326
1327         memcpy(RefPicList1_B, ReferenceFrames, numShortTerm * sizeof(VAPictureH264));
1328         sort_two(&RefPicList1_B[0], &RefPicList1_B[numShortTerm], CurrentCurrPic, descending_by_top_field_order_cnt);
1329     }
1330     
1331     return 0;
1332 }
1333
1334
1335 int H264EncoderImpl::render_sequence()
1336 {
1337     VABufferID seq_param_buf, rc_param_buf, render_id[2];
1338     VAStatus va_status;
1339     VAEncMiscParameterBuffer *misc_param;
1340     VAEncMiscParameterRateControl *misc_rate_ctrl;
1341     
1342     seq_param.level_idc = 41 /*SH_LEVEL_3*/;
1343     seq_param.picture_width_in_mbs = frame_width_mbaligned / 16;
1344     seq_param.picture_height_in_mbs = frame_height_mbaligned / 16;
1345     seq_param.bits_per_second = frame_bitrate;
1346
1347     seq_param.intra_period = intra_period;
1348     seq_param.intra_idr_period = intra_idr_period;
1349     seq_param.ip_period = ip_period;
1350
1351     seq_param.max_num_ref_frames = num_ref_frames;
1352     seq_param.seq_fields.bits.frame_mbs_only_flag = 1;
1353     seq_param.time_scale = TIMEBASE * 2;
1354     seq_param.num_units_in_tick = 1; /* Tc = num_units_in_tick / scale */
1355     seq_param.seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4 = Log2MaxPicOrderCntLsb - 4;
1356     seq_param.seq_fields.bits.log2_max_frame_num_minus4 = Log2MaxFrameNum - 4;;
1357     seq_param.seq_fields.bits.frame_mbs_only_flag = 1;
1358     seq_param.seq_fields.bits.chroma_format_idc = 1;
1359     seq_param.seq_fields.bits.direct_8x8_inference_flag = 1;
1360     
1361     if (frame_width != frame_width_mbaligned ||
1362         frame_height != frame_height_mbaligned) {
1363         seq_param.frame_cropping_flag = 1;
1364         seq_param.frame_crop_left_offset = 0;
1365         seq_param.frame_crop_right_offset = (frame_width_mbaligned - frame_width)/2;
1366         seq_param.frame_crop_top_offset = 0;
1367         seq_param.frame_crop_bottom_offset = (frame_height_mbaligned - frame_height)/2;
1368     }
1369     
1370     va_status = vaCreateBuffer(va_dpy, context_id,
1371                                VAEncSequenceParameterBufferType,
1372                                sizeof(seq_param), 1, &seq_param, &seq_param_buf);
1373     CHECK_VASTATUS(va_status, "vaCreateBuffer");
1374     
1375     va_status = vaCreateBuffer(va_dpy, context_id,
1376                                VAEncMiscParameterBufferType,
1377                                sizeof(VAEncMiscParameterBuffer) + sizeof(VAEncMiscParameterRateControl),
1378                                1, NULL, &rc_param_buf);
1379     CHECK_VASTATUS(va_status, "vaCreateBuffer");
1380     
1381     vaMapBuffer(va_dpy, rc_param_buf, (void **)&misc_param);
1382     misc_param->type = VAEncMiscParameterTypeRateControl;
1383     misc_rate_ctrl = (VAEncMiscParameterRateControl *)misc_param->data;
1384     memset(misc_rate_ctrl, 0, sizeof(*misc_rate_ctrl));
1385     misc_rate_ctrl->bits_per_second = frame_bitrate;
1386     misc_rate_ctrl->target_percentage = 66;
1387     misc_rate_ctrl->window_size = 1000;
1388     misc_rate_ctrl->initial_qp = initial_qp;
1389     misc_rate_ctrl->min_qp = minimal_qp;
1390     misc_rate_ctrl->basic_unit_size = 0;
1391     vaUnmapBuffer(va_dpy, rc_param_buf);
1392
1393     render_id[0] = seq_param_buf;
1394     render_id[1] = rc_param_buf;
1395     
1396     render_picture_and_delete(va_dpy, context_id, &render_id[0], 2);
1397     
1398     return 0;
1399 }
1400
1401 static int calc_poc(int pic_order_cnt_lsb, int frame_type)
1402 {
1403     static int PicOrderCntMsb_ref = 0, pic_order_cnt_lsb_ref = 0;
1404     int prevPicOrderCntMsb, prevPicOrderCntLsb;
1405     int PicOrderCntMsb, TopFieldOrderCnt;
1406     
1407     if (frame_type == FRAME_IDR)
1408         prevPicOrderCntMsb = prevPicOrderCntLsb = 0;
1409     else {
1410         prevPicOrderCntMsb = PicOrderCntMsb_ref;
1411         prevPicOrderCntLsb = pic_order_cnt_lsb_ref;
1412     }
1413     
1414     if ((pic_order_cnt_lsb < prevPicOrderCntLsb) &&
1415         ((prevPicOrderCntLsb - pic_order_cnt_lsb) >= (int)(MaxPicOrderCntLsb / 2)))
1416         PicOrderCntMsb = prevPicOrderCntMsb + MaxPicOrderCntLsb;
1417     else if ((pic_order_cnt_lsb > prevPicOrderCntLsb) &&
1418              ((pic_order_cnt_lsb - prevPicOrderCntLsb) > (int)(MaxPicOrderCntLsb / 2)))
1419         PicOrderCntMsb = prevPicOrderCntMsb - MaxPicOrderCntLsb;
1420     else
1421         PicOrderCntMsb = prevPicOrderCntMsb;
1422     
1423     TopFieldOrderCnt = PicOrderCntMsb + pic_order_cnt_lsb;
1424
1425     if (frame_type != FRAME_B) {
1426         PicOrderCntMsb_ref = PicOrderCntMsb;
1427         pic_order_cnt_lsb_ref = pic_order_cnt_lsb;
1428     }
1429     
1430     return TopFieldOrderCnt;
1431 }
1432
1433 int H264EncoderImpl::render_picture(int frame_type, int display_frame_num, int gop_start_display_frame_num)
1434 {
1435     VABufferID pic_param_buf;
1436     VAStatus va_status;
1437     int i = 0;
1438
1439     pic_param.CurrPic.picture_id = gl_surfaces[display_frame_num % SURFACE_NUM].ref_surface;
1440     pic_param.CurrPic.frame_idx = current_frame_num;
1441     pic_param.CurrPic.flags = 0;
1442     pic_param.CurrPic.TopFieldOrderCnt = calc_poc((display_frame_num - gop_start_display_frame_num) % MaxPicOrderCntLsb, frame_type);
1443     pic_param.CurrPic.BottomFieldOrderCnt = pic_param.CurrPic.TopFieldOrderCnt;
1444     CurrentCurrPic = pic_param.CurrPic;
1445
1446     memcpy(pic_param.ReferenceFrames, ReferenceFrames, numShortTerm*sizeof(VAPictureH264));
1447     for (i = numShortTerm; i < MAX_NUM_REF1; i++) {
1448         pic_param.ReferenceFrames[i].picture_id = VA_INVALID_SURFACE;
1449         pic_param.ReferenceFrames[i].flags = VA_PICTURE_H264_INVALID;
1450     }
1451     
1452     pic_param.pic_fields.bits.idr_pic_flag = (frame_type == FRAME_IDR);
1453     pic_param.pic_fields.bits.reference_pic_flag = (frame_type != FRAME_B);
1454     pic_param.pic_fields.bits.entropy_coding_mode_flag = h264_entropy_mode;
1455     pic_param.pic_fields.bits.deblocking_filter_control_present_flag = 1;
1456     pic_param.frame_num = current_frame_num;
1457     pic_param.coded_buf = gl_surfaces[display_frame_num % SURFACE_NUM].coded_buf;
1458     pic_param.last_picture = false;  // FIXME
1459     pic_param.pic_init_qp = initial_qp;
1460
1461     va_status = vaCreateBuffer(va_dpy, context_id, VAEncPictureParameterBufferType,
1462                                sizeof(pic_param), 1, &pic_param, &pic_param_buf);
1463     CHECK_VASTATUS(va_status, "vaCreateBuffer");
1464
1465     render_picture_and_delete(va_dpy, context_id, &pic_param_buf, 1);
1466
1467     return 0;
1468 }
1469
1470 int H264EncoderImpl::render_packedsequence()
1471 {
1472     VAEncPackedHeaderParameterBuffer packedheader_param_buffer;
1473     VABufferID packedseq_para_bufid, packedseq_data_bufid, render_id[2];
1474     unsigned int length_in_bits;
1475     unsigned char *packedseq_buffer = NULL;
1476     VAStatus va_status;
1477
1478     length_in_bits = build_packed_seq_buffer(&packedseq_buffer); 
1479     
1480     packedheader_param_buffer.type = VAEncPackedHeaderSequence;
1481     
1482     packedheader_param_buffer.bit_length = length_in_bits; /*length_in_bits*/
1483     packedheader_param_buffer.has_emulation_bytes = 0;
1484     va_status = vaCreateBuffer(va_dpy,
1485                                context_id,
1486                                VAEncPackedHeaderParameterBufferType,
1487                                sizeof(packedheader_param_buffer), 1, &packedheader_param_buffer,
1488                                &packedseq_para_bufid);
1489     CHECK_VASTATUS(va_status, "vaCreateBuffer");
1490
1491     va_status = vaCreateBuffer(va_dpy,
1492                                context_id,
1493                                VAEncPackedHeaderDataBufferType,
1494                                (length_in_bits + 7) / 8, 1, packedseq_buffer,
1495                                &packedseq_data_bufid);
1496     CHECK_VASTATUS(va_status, "vaCreateBuffer");
1497
1498     render_id[0] = packedseq_para_bufid;
1499     render_id[1] = packedseq_data_bufid;
1500     render_picture_and_delete(va_dpy, context_id, render_id, 2);
1501
1502     free(packedseq_buffer);
1503     
1504     return 0;
1505 }
1506
1507
1508 int H264EncoderImpl::render_packedpicture()
1509 {
1510     VAEncPackedHeaderParameterBuffer packedheader_param_buffer;
1511     VABufferID packedpic_para_bufid, packedpic_data_bufid, render_id[2];
1512     unsigned int length_in_bits;
1513     unsigned char *packedpic_buffer = NULL;
1514     VAStatus va_status;
1515
1516     length_in_bits = build_packed_pic_buffer(&packedpic_buffer); 
1517     packedheader_param_buffer.type = VAEncPackedHeaderPicture;
1518     packedheader_param_buffer.bit_length = length_in_bits;
1519     packedheader_param_buffer.has_emulation_bytes = 0;
1520
1521     va_status = vaCreateBuffer(va_dpy,
1522                                context_id,
1523                                VAEncPackedHeaderParameterBufferType,
1524                                sizeof(packedheader_param_buffer), 1, &packedheader_param_buffer,
1525                                &packedpic_para_bufid);
1526     CHECK_VASTATUS(va_status, "vaCreateBuffer");
1527
1528     va_status = vaCreateBuffer(va_dpy,
1529                                context_id,
1530                                VAEncPackedHeaderDataBufferType,
1531                                (length_in_bits + 7) / 8, 1, packedpic_buffer,
1532                                &packedpic_data_bufid);
1533     CHECK_VASTATUS(va_status, "vaCreateBuffer");
1534
1535     render_id[0] = packedpic_para_bufid;
1536     render_id[1] = packedpic_data_bufid;
1537     render_picture_and_delete(va_dpy, context_id, render_id, 2);
1538
1539     free(packedpic_buffer);
1540     
1541     return 0;
1542 }
1543
1544 void H264EncoderImpl::render_packedslice()
1545 {
1546     VAEncPackedHeaderParameterBuffer packedheader_param_buffer;
1547     VABufferID packedslice_para_bufid, packedslice_data_bufid, render_id[2];
1548     unsigned int length_in_bits;
1549     unsigned char *packedslice_buffer = NULL;
1550     VAStatus va_status;
1551
1552     length_in_bits = build_packed_slice_buffer(&packedslice_buffer);
1553     packedheader_param_buffer.type = VAEncPackedHeaderSlice;
1554     packedheader_param_buffer.bit_length = length_in_bits;
1555     packedheader_param_buffer.has_emulation_bytes = 0;
1556
1557     va_status = vaCreateBuffer(va_dpy,
1558                                context_id,
1559                                VAEncPackedHeaderParameterBufferType,
1560                                sizeof(packedheader_param_buffer), 1, &packedheader_param_buffer,
1561                                &packedslice_para_bufid);
1562     CHECK_VASTATUS(va_status, "vaCreateBuffer");
1563
1564     va_status = vaCreateBuffer(va_dpy,
1565                                context_id,
1566                                VAEncPackedHeaderDataBufferType,
1567                                (length_in_bits + 7) / 8, 1, packedslice_buffer,
1568                                &packedslice_data_bufid);
1569     CHECK_VASTATUS(va_status, "vaCreateBuffer");
1570
1571     render_id[0] = packedslice_para_bufid;
1572     render_id[1] = packedslice_data_bufid;
1573     render_picture_and_delete(va_dpy, context_id, render_id, 2);
1574
1575     free(packedslice_buffer);
1576 }
1577
1578 int H264EncoderImpl::render_slice(int encoding_frame_num, int display_frame_num, int gop_start_display_frame_num, int frame_type)
1579 {
1580     VABufferID slice_param_buf;
1581     VAStatus va_status;
1582     int i;
1583
1584     update_RefPicList(frame_type);
1585     
1586     /* one frame, one slice */
1587     slice_param.macroblock_address = 0;
1588     slice_param.num_macroblocks = frame_width_mbaligned * frame_height_mbaligned/(16*16); /* Measured by MB */
1589     slice_param.slice_type = (frame_type == FRAME_IDR)?2:frame_type;
1590     if (frame_type == FRAME_IDR) {
1591         if (encoding_frame_num != 0)
1592             ++slice_param.idr_pic_id;
1593     } else if (frame_type == FRAME_P) {
1594         int refpiclist0_max = h264_maxref & 0xffff;
1595         memcpy(slice_param.RefPicList0, RefPicList0_P, refpiclist0_max*sizeof(VAPictureH264));
1596
1597         for (i = refpiclist0_max; i < MAX_NUM_REF2; i++) {
1598             slice_param.RefPicList0[i].picture_id = VA_INVALID_SURFACE;
1599             slice_param.RefPicList0[i].flags = VA_PICTURE_H264_INVALID;
1600         }
1601     } else if (frame_type == FRAME_B) {
1602         int refpiclist0_max = h264_maxref & 0xffff;
1603         int refpiclist1_max = (h264_maxref >> 16) & 0xffff;
1604
1605         memcpy(slice_param.RefPicList0, RefPicList0_B, refpiclist0_max*sizeof(VAPictureH264));
1606         for (i = refpiclist0_max; i < MAX_NUM_REF2; i++) {
1607             slice_param.RefPicList0[i].picture_id = VA_INVALID_SURFACE;
1608             slice_param.RefPicList0[i].flags = VA_PICTURE_H264_INVALID;
1609         }
1610
1611         memcpy(slice_param.RefPicList1, RefPicList1_B, refpiclist1_max*sizeof(VAPictureH264));
1612         for (i = refpiclist1_max; i < MAX_NUM_REF2; i++) {
1613             slice_param.RefPicList1[i].picture_id = VA_INVALID_SURFACE;
1614             slice_param.RefPicList1[i].flags = VA_PICTURE_H264_INVALID;
1615         }
1616     }
1617
1618     slice_param.slice_alpha_c0_offset_div2 = 0;
1619     slice_param.slice_beta_offset_div2 = 0;
1620     slice_param.direct_spatial_mv_pred_flag = 1;
1621     slice_param.pic_order_cnt_lsb = (display_frame_num - gop_start_display_frame_num) % MaxPicOrderCntLsb;
1622     
1623
1624     if (h264_packedheader &&
1625         config_attrib[enc_packed_header_idx].value & VA_ENC_PACKED_HEADER_SLICE)
1626         render_packedslice();
1627
1628     va_status = vaCreateBuffer(va_dpy, context_id, VAEncSliceParameterBufferType,
1629                                sizeof(slice_param), 1, &slice_param, &slice_param_buf);
1630     CHECK_VASTATUS(va_status, "vaCreateBuffer");
1631
1632     render_picture_and_delete(va_dpy, context_id, &slice_param_buf, 1);
1633
1634     return 0;
1635 }
1636
1637
1638
1639 void H264EncoderImpl::save_codeddata(storage_task task)
1640 {    
1641         VACodedBufferSegment *buf_list = NULL;
1642         VAStatus va_status;
1643
1644         string data;
1645
1646         va_status = vaMapBuffer(va_dpy, gl_surfaces[task.display_order % SURFACE_NUM].coded_buf, (void **)(&buf_list));
1647         CHECK_VASTATUS(va_status, "vaMapBuffer");
1648         while (buf_list != NULL) {
1649                 data.append(reinterpret_cast<const char *>(buf_list->buf), buf_list->size);
1650                 buf_list = (VACodedBufferSegment *) buf_list->next;
1651         }
1652         vaUnmapBuffer(va_dpy, gl_surfaces[task.display_order % SURFACE_NUM].coded_buf);
1653
1654         {
1655                 // Add video.
1656                 AVPacket pkt;
1657                 memset(&pkt, 0, sizeof(pkt));
1658                 pkt.buf = nullptr;
1659                 pkt.data = reinterpret_cast<uint8_t *>(&data[0]);
1660                 pkt.size = data.size();
1661                 pkt.stream_index = 0;
1662                 if (task.frame_type == FRAME_IDR) {
1663                         pkt.flags = AV_PKT_FLAG_KEY;
1664                 } else {
1665                         pkt.flags = 0;
1666                 }
1667                 //pkt.duration = 1;
1668                 if (file_mux) {
1669                         file_mux->add_packet(pkt, task.pts + global_delay(), task.dts + global_delay());
1670                 }
1671                 if (!global_flags.uncompressed_video_to_http &&
1672                     !global_flags.x264_video_to_http) {
1673                         stream_mux->add_packet(pkt, task.pts + global_delay(), task.dts + global_delay());
1674                 }
1675         }
1676         // Encode and add all audio frames up to and including the pts of this video frame.
1677         for ( ;; ) {
1678                 int64_t audio_pts;
1679                 vector<float> audio;
1680                 {
1681                         unique_lock<mutex> lock(frame_queue_mutex);
1682                         frame_queue_nonempty.wait(lock, [this]{ return storage_thread_should_quit || !pending_audio_frames.empty(); });
1683                         if (storage_thread_should_quit && pending_audio_frames.empty()) return;
1684                         auto it = pending_audio_frames.begin();
1685                         if (it->first > task.pts) break;
1686                         audio_pts = it->first;
1687                         audio = move(it->second);
1688                         pending_audio_frames.erase(it); 
1689                 }
1690
1691                 if (context_audio_stream) {
1692                         encode_audio(audio, &audio_queue_file, audio_pts, context_audio_file, resampler_audio_file, { file_mux.get() });
1693                         encode_audio(audio, &audio_queue_stream, audio_pts, context_audio_stream, resampler_audio_stream, { stream_mux.get() });
1694                 } else {
1695                         encode_audio(audio, &audio_queue_file, audio_pts, context_audio_file, resampler_audio_file, { stream_mux.get(), file_mux.get() });
1696                 }
1697
1698                 if (audio_pts == task.pts) break;
1699         }
1700 }
1701
1702 void H264EncoderImpl::encode_audio(
1703         const vector<float> &audio,
1704         vector<float> *audio_queue,
1705         int64_t audio_pts,
1706         AVCodecContext *ctx,
1707         AVAudioResampleContext *resampler,
1708         const vector<Mux *> &muxes)
1709 {
1710         if (ctx->frame_size == 0) {
1711                 // No queueing needed.
1712                 assert(audio_queue->empty());
1713                 assert(audio.size() % 2 == 0);
1714                 encode_audio_one_frame(&audio[0], audio.size() / 2, audio_pts, ctx, resampler, muxes);
1715                 return;
1716         }
1717
1718         int64_t sample_offset = audio_queue->size();
1719
1720         audio_queue->insert(audio_queue->end(), audio.begin(), audio.end());
1721         size_t sample_num;
1722         for (sample_num = 0;
1723              sample_num + ctx->frame_size * 2 <= audio_queue->size();
1724              sample_num += ctx->frame_size * 2) {
1725                 int64_t adjusted_audio_pts = audio_pts + (int64_t(sample_num) - sample_offset) * TIMEBASE / (OUTPUT_FREQUENCY * 2);
1726                 encode_audio_one_frame(&(*audio_queue)[sample_num],
1727                                        ctx->frame_size,
1728                                        adjusted_audio_pts,
1729                                        ctx,
1730                                        resampler,
1731                                        muxes);
1732         }
1733         audio_queue->erase(audio_queue->begin(), audio_queue->begin() + sample_num);
1734 }
1735
1736 void H264EncoderImpl::encode_audio_one_frame(
1737         const float *audio,
1738         size_t num_samples,
1739         int64_t audio_pts,
1740         AVCodecContext *ctx,
1741         AVAudioResampleContext *resampler,
1742         const vector<Mux *> &muxes)
1743 {
1744         audio_frame->nb_samples = num_samples;
1745         audio_frame->channel_layout = AV_CH_LAYOUT_STEREO;
1746         audio_frame->format = ctx->sample_fmt;
1747         audio_frame->sample_rate = OUTPUT_FREQUENCY;
1748
1749         if (av_samples_alloc(audio_frame->data, nullptr, 2, num_samples, ctx->sample_fmt, 0) < 0) {
1750                 fprintf(stderr, "Could not allocate %ld samples.\n", num_samples);
1751                 exit(1);
1752         }
1753
1754         if (avresample_convert(resampler, audio_frame->data, 0, num_samples,
1755                                (uint8_t **)&audio, 0, num_samples) < 0) {
1756                 fprintf(stderr, "Audio conversion failed.\n");
1757                 exit(1);
1758         }
1759
1760         AVPacket pkt;
1761         av_init_packet(&pkt);
1762         pkt.data = nullptr;
1763         pkt.size = 0;
1764         int got_output = 0;
1765         avcodec_encode_audio2(ctx, &pkt, audio_frame, &got_output);
1766         if (got_output) {
1767                 pkt.stream_index = 1;
1768                 pkt.flags = 0;
1769                 for (Mux *mux : muxes) {
1770                         mux->add_packet(pkt, audio_pts + global_delay(), audio_pts + global_delay());
1771                 }
1772         }
1773
1774         av_freep(&audio_frame->data[0]);
1775
1776         // TODO: Delayed frames.
1777         av_frame_unref(audio_frame);
1778         av_free_packet(&pkt);
1779 }
1780
1781 // this is weird. but it seems to put a new frame onto the queue
1782 void H264EncoderImpl::storage_task_enqueue(storage_task task)
1783 {
1784         unique_lock<mutex> lock(storage_task_queue_mutex);
1785         storage_task_queue.push(move(task));
1786         storage_task_queue_changed.notify_all();
1787 }
1788
1789 void H264EncoderImpl::storage_task_thread()
1790 {
1791         for ( ;; ) {
1792                 storage_task current;
1793                 {
1794                         // wait until there's an encoded frame  
1795                         unique_lock<mutex> lock(storage_task_queue_mutex);
1796                         storage_task_queue_changed.wait(lock, [this]{ return storage_thread_should_quit || !storage_task_queue.empty(); });
1797                         if (storage_thread_should_quit && storage_task_queue.empty()) return;
1798                         current = move(storage_task_queue.front());
1799                         storage_task_queue.pop();
1800                 }
1801
1802                 VAStatus va_status;
1803            
1804                 // waits for data, then saves it to disk.
1805                 va_status = vaSyncSurface(va_dpy, gl_surfaces[current.display_order % SURFACE_NUM].src_surface);
1806                 CHECK_VASTATUS(va_status, "vaSyncSurface");
1807                 save_codeddata(move(current));
1808
1809                 {
1810                         unique_lock<mutex> lock(storage_task_queue_mutex);
1811                         srcsurface_status[current.display_order % SURFACE_NUM] = SRC_SURFACE_FREE;
1812                         storage_task_queue_changed.notify_all();
1813                 }
1814         }
1815 }
1816
1817 int H264EncoderImpl::release_encode()
1818 {
1819         for (unsigned i = 0; i < SURFACE_NUM; i++) {
1820                 vaDestroyBuffer(va_dpy, gl_surfaces[i].coded_buf);
1821                 vaDestroySurfaces(va_dpy, &gl_surfaces[i].src_surface, 1);
1822                 vaDestroySurfaces(va_dpy, &gl_surfaces[i].ref_surface, 1);
1823
1824                 if (!use_zerocopy) {
1825                         glBindBuffer(GL_PIXEL_PACK_BUFFER, gl_surfaces[i].pbo);
1826                         glUnmapBuffer(GL_PIXEL_PACK_BUFFER);
1827                         glBindBuffer(GL_PIXEL_PACK_BUFFER, 0);
1828                         glDeleteBuffers(1, &gl_surfaces[i].pbo);
1829                 }
1830                 glDeleteTextures(1, &gl_surfaces[i].y_tex);
1831                 glDeleteTextures(1, &gl_surfaces[i].cbcr_tex);
1832         }
1833
1834         vaDestroyContext(va_dpy, context_id);
1835         vaDestroyConfig(va_dpy, config_id);
1836
1837         return 0;
1838 }
1839
1840 int H264EncoderImpl::deinit_va()
1841
1842     vaTerminate(va_dpy);
1843
1844     va_close_display(va_dpy);
1845
1846     return 0;
1847 }
1848
1849 namespace {
1850
1851 void init_audio_encoder(const string &codec_name, int bit_rate, AVCodecContext **ctx, AVAudioResampleContext **resampler)
1852 {
1853         AVCodec *codec_audio = avcodec_find_encoder_by_name(codec_name.c_str());
1854         if (codec_audio == nullptr) {
1855                 fprintf(stderr, "ERROR: Could not find codec '%s'\n", codec_name.c_str());
1856                 exit(1);
1857         }
1858
1859         AVCodecContext *context_audio = avcodec_alloc_context3(codec_audio);
1860         context_audio->bit_rate = bit_rate;
1861         context_audio->sample_rate = OUTPUT_FREQUENCY;
1862         context_audio->sample_fmt = codec_audio->sample_fmts[0];
1863         context_audio->channels = 2;
1864         context_audio->channel_layout = AV_CH_LAYOUT_STEREO;
1865         context_audio->time_base = AVRational{1, TIMEBASE};
1866         context_audio->flags |= CODEC_FLAG_GLOBAL_HEADER;
1867         if (avcodec_open2(context_audio, codec_audio, NULL) < 0) {
1868                 fprintf(stderr, "Could not open codec '%s'\n", codec_name.c_str());
1869                 exit(1);
1870         }
1871
1872         *ctx = context_audio;
1873
1874         *resampler = avresample_alloc_context();
1875         if (*resampler == nullptr) {
1876                 fprintf(stderr, "Allocating resampler failed.\n");
1877                 exit(1);
1878         }
1879
1880         av_opt_set_int(*resampler, "in_channel_layout",  AV_CH_LAYOUT_STEREO,       0);
1881         av_opt_set_int(*resampler, "out_channel_layout", AV_CH_LAYOUT_STEREO,       0);
1882         av_opt_set_int(*resampler, "in_sample_rate",     OUTPUT_FREQUENCY,          0);
1883         av_opt_set_int(*resampler, "out_sample_rate",    OUTPUT_FREQUENCY,          0);
1884         av_opt_set_int(*resampler, "in_sample_fmt",      AV_SAMPLE_FMT_FLT,         0);
1885         av_opt_set_int(*resampler, "out_sample_fmt",     context_audio->sample_fmt, 0);
1886
1887         if (avresample_open(*resampler) < 0) {
1888                 fprintf(stderr, "Could not open resample context.\n");
1889                 exit(1);
1890         }
1891 }
1892
1893 }  // namespace
1894
1895 H264EncoderImpl::H264EncoderImpl(QSurface *surface, const string &va_display, int width, int height, HTTPD *httpd)
1896         : current_storage_frame(0), surface(surface), httpd(httpd), frame_width(width), frame_height(height)
1897 {
1898         init_audio_encoder(AUDIO_OUTPUT_CODEC_NAME, DEFAULT_AUDIO_OUTPUT_BIT_RATE, &context_audio_file, &resampler_audio_file);
1899
1900         if (!global_flags.stream_audio_codec_name.empty()) {
1901                 init_audio_encoder(global_flags.stream_audio_codec_name,
1902                         global_flags.stream_audio_codec_bitrate, &context_audio_stream, &resampler_audio_stream);
1903         }
1904
1905         frame_width_mbaligned = (frame_width + 15) & (~15);
1906         frame_height_mbaligned = (frame_height + 15) & (~15);
1907
1908         open_output_stream();
1909
1910         audio_frame = av_frame_alloc();
1911
1912         //print_input();
1913
1914         if (global_flags.uncompressed_video_to_http ||
1915             global_flags.x264_video_to_http) {
1916                 reorderer.reset(new FrameReorderer(ip_period - 1, frame_width, frame_height));
1917         }
1918         if (global_flags.x264_video_to_http) {
1919                 x264_encoder.reset(new X264Encoder(stream_mux.get()));
1920         }
1921
1922         init_va(va_display);
1923         setup_encode();
1924
1925         // No frames are ready yet.
1926         memset(srcsurface_status, SRC_SURFACE_FREE, sizeof(srcsurface_status));
1927             
1928         memset(&seq_param, 0, sizeof(seq_param));
1929         memset(&pic_param, 0, sizeof(pic_param));
1930         memset(&slice_param, 0, sizeof(slice_param));
1931
1932         storage_thread = thread(&H264EncoderImpl::storage_task_thread, this);
1933
1934         encode_thread = thread([this]{
1935                 //SDL_GL_MakeCurrent(window, context);
1936                 QOpenGLContext *context = create_context(this->surface);
1937                 eglBindAPI(EGL_OPENGL_API);
1938                 if (!make_current(context, this->surface)) {
1939                         printf("display=%p surface=%p context=%p curr=%p err=%d\n", eglGetCurrentDisplay(), this->surface, context, eglGetCurrentContext(),
1940                                 eglGetError());
1941                         exit(1);
1942                 }
1943                 encode_thread_func();
1944         });
1945 }
1946
1947 H264EncoderImpl::~H264EncoderImpl()
1948 {
1949         shutdown();
1950         av_frame_free(&audio_frame);
1951         avresample_free(&resampler_audio_file);
1952         avresample_free(&resampler_audio_stream);
1953         avcodec_free_context(&context_audio_file);
1954         avcodec_free_context(&context_audio_stream);
1955         close_output_stream();
1956 }
1957
1958 bool H264EncoderImpl::begin_frame(GLuint *y_tex, GLuint *cbcr_tex)
1959 {
1960         assert(!is_shutdown);
1961         {
1962                 // Wait until this frame slot is done encoding.
1963                 unique_lock<mutex> lock(storage_task_queue_mutex);
1964                 if (srcsurface_status[current_storage_frame % SURFACE_NUM] != SRC_SURFACE_FREE) {
1965                         fprintf(stderr, "Warning: Slot %d (for frame %d) is still encoding, rendering has to wait for H.264 encoder\n",
1966                                 current_storage_frame % SURFACE_NUM, current_storage_frame);
1967                 }
1968                 storage_task_queue_changed.wait(lock, [this]{ return storage_thread_should_quit || (srcsurface_status[current_storage_frame % SURFACE_NUM] == SRC_SURFACE_FREE); });
1969                 srcsurface_status[current_storage_frame % SURFACE_NUM] = SRC_SURFACE_IN_ENCODING;
1970                 if (storage_thread_should_quit) return false;
1971         }
1972
1973         //*fbo = fbos[current_storage_frame % SURFACE_NUM];
1974         GLSurface *surf = &gl_surfaces[current_storage_frame % SURFACE_NUM];
1975         *y_tex = surf->y_tex;
1976         *cbcr_tex = surf->cbcr_tex;
1977
1978         VAStatus va_status = vaDeriveImage(va_dpy, surf->src_surface, &surf->surface_image);
1979         CHECK_VASTATUS(va_status, "vaDeriveImage");
1980
1981         if (use_zerocopy) {
1982                 VABufferInfo buf_info;
1983                 buf_info.mem_type = VA_SURFACE_ATTRIB_MEM_TYPE_DRM_PRIME;  // or VA_SURFACE_ATTRIB_MEM_TYPE_KERNEL_DRM?
1984                 va_status = vaAcquireBufferHandle(va_dpy, surf->surface_image.buf, &buf_info);
1985                 CHECK_VASTATUS(va_status, "vaAcquireBufferHandle");
1986
1987                 // Create Y image.
1988                 surf->y_egl_image = EGL_NO_IMAGE_KHR;
1989                 EGLint y_attribs[] = {
1990                         EGL_WIDTH, frame_width,
1991                         EGL_HEIGHT, frame_height,
1992                         EGL_LINUX_DRM_FOURCC_EXT, fourcc_code('R', '8', ' ', ' '),
1993                         EGL_DMA_BUF_PLANE0_FD_EXT, EGLint(buf_info.handle),
1994                         EGL_DMA_BUF_PLANE0_OFFSET_EXT, EGLint(surf->surface_image.offsets[0]),
1995                         EGL_DMA_BUF_PLANE0_PITCH_EXT, EGLint(surf->surface_image.pitches[0]),
1996                         EGL_NONE
1997                 };
1998
1999                 surf->y_egl_image = eglCreateImageKHR(eglGetCurrentDisplay(), EGL_NO_CONTEXT, EGL_LINUX_DMA_BUF_EXT, NULL, y_attribs);
2000                 assert(surf->y_egl_image != EGL_NO_IMAGE_KHR);
2001
2002                 // Associate Y image to a texture.
2003                 glBindTexture(GL_TEXTURE_2D, *y_tex);
2004                 glEGLImageTargetTexture2DOES(GL_TEXTURE_2D, surf->y_egl_image);
2005
2006                 // Create CbCr image.
2007                 surf->cbcr_egl_image = EGL_NO_IMAGE_KHR;
2008                 EGLint cbcr_attribs[] = {
2009                         EGL_WIDTH, frame_width,
2010                         EGL_HEIGHT, frame_height,
2011                         EGL_LINUX_DRM_FOURCC_EXT, fourcc_code('G', 'R', '8', '8'),
2012                         EGL_DMA_BUF_PLANE0_FD_EXT, EGLint(buf_info.handle),
2013                         EGL_DMA_BUF_PLANE0_OFFSET_EXT, EGLint(surf->surface_image.offsets[1]),
2014                         EGL_DMA_BUF_PLANE0_PITCH_EXT, EGLint(surf->surface_image.pitches[1]),
2015                         EGL_NONE
2016                 };
2017
2018                 surf->cbcr_egl_image = eglCreateImageKHR(eglGetCurrentDisplay(), EGL_NO_CONTEXT, EGL_LINUX_DMA_BUF_EXT, NULL, cbcr_attribs);
2019                 assert(surf->cbcr_egl_image != EGL_NO_IMAGE_KHR);
2020
2021                 // Associate CbCr image to a texture.
2022                 glBindTexture(GL_TEXTURE_2D, *cbcr_tex);
2023                 glEGLImageTargetTexture2DOES(GL_TEXTURE_2D, surf->cbcr_egl_image);
2024         }
2025
2026         return true;
2027 }
2028
2029 void H264EncoderImpl::add_audio(int64_t pts, vector<float> audio)
2030 {
2031         assert(!is_shutdown);
2032         {
2033                 unique_lock<mutex> lock(frame_queue_mutex);
2034                 pending_audio_frames[pts] = move(audio);
2035         }
2036         frame_queue_nonempty.notify_all();
2037 }
2038
2039 RefCountedGLsync H264EncoderImpl::end_frame(int64_t pts, const vector<RefCountedFrame> &input_frames)
2040 {
2041         assert(!is_shutdown);
2042
2043         if (!use_zerocopy) {
2044                 GLSurface *surf = &gl_surfaces[current_storage_frame % SURFACE_NUM];
2045
2046                 glPixelStorei(GL_PACK_ROW_LENGTH, 0);
2047                 check_error();
2048
2049                 glBindBuffer(GL_PIXEL_PACK_BUFFER, surf->pbo);
2050                 check_error();
2051
2052                 glBindTexture(GL_TEXTURE_2D, surf->y_tex);
2053                 check_error();
2054                 glGetTexImage(GL_TEXTURE_2D, 0, GL_RED, GL_UNSIGNED_BYTE, BUFFER_OFFSET(surf->y_offset));
2055                 check_error();
2056
2057                 glBindTexture(GL_TEXTURE_2D, surf->cbcr_tex);
2058                 check_error();
2059                 glGetTexImage(GL_TEXTURE_2D, 0, GL_RG, GL_UNSIGNED_BYTE, BUFFER_OFFSET(surf->cbcr_offset));
2060                 check_error();
2061
2062                 glBindTexture(GL_TEXTURE_2D, 0);
2063                 check_error();
2064                 glBindBuffer(GL_PIXEL_PACK_BUFFER, 0);
2065                 check_error();
2066
2067                 glMemoryBarrier(GL_TEXTURE_UPDATE_BARRIER_BIT | GL_CLIENT_MAPPED_BUFFER_BARRIER_BIT);
2068                 check_error();
2069         }
2070
2071         RefCountedGLsync fence = RefCountedGLsync(GL_SYNC_GPU_COMMANDS_COMPLETE, /*flags=*/0);
2072         check_error();
2073         glFlush();  // Make the H.264 thread see the fence as soon as possible.
2074         check_error();
2075
2076         {
2077                 unique_lock<mutex> lock(frame_queue_mutex);
2078                 pending_video_frames[current_storage_frame] = PendingFrame{ fence, input_frames, pts };
2079                 ++current_storage_frame;
2080         }
2081         frame_queue_nonempty.notify_all();
2082         return fence;
2083 }
2084
2085 void H264EncoderImpl::shutdown()
2086 {
2087         if (is_shutdown) {
2088                 return;
2089         }
2090
2091         {
2092                 unique_lock<mutex> lock(frame_queue_mutex);
2093                 encode_thread_should_quit = true;
2094                 frame_queue_nonempty.notify_all();
2095         }
2096         encode_thread.join();
2097         {
2098                 unique_lock<mutex> lock(storage_task_queue_mutex);
2099                 storage_thread_should_quit = true;
2100                 frame_queue_nonempty.notify_all();
2101                 storage_task_queue_changed.notify_all();
2102         }
2103         storage_thread.join();
2104
2105         release_encode();
2106         deinit_va();
2107         is_shutdown = true;
2108 }
2109
2110 void H264EncoderImpl::open_output_file(const std::string &filename)
2111 {
2112         AVFormatContext *avctx = avformat_alloc_context();
2113         avctx->oformat = av_guess_format(NULL, filename.c_str(), NULL);
2114         assert(filename.size() < sizeof(avctx->filename) - 1);
2115         strcpy(avctx->filename, filename.c_str());
2116
2117         string url = "file:" + filename;
2118         int ret = avio_open2(&avctx->pb, url.c_str(), AVIO_FLAG_WRITE, &avctx->interrupt_callback, NULL);
2119         if (ret < 0) {
2120                 char tmp[AV_ERROR_MAX_STRING_SIZE];
2121                 fprintf(stderr, "%s: avio_open2() failed: %s\n", filename.c_str(), av_make_error_string(tmp, sizeof(tmp), ret));
2122                 exit(1);
2123         }
2124
2125         file_mux.reset(new Mux(avctx, frame_width, frame_height, Mux::CODEC_H264, context_audio_file->codec, TIMEBASE, DEFAULT_AUDIO_OUTPUT_BIT_RATE, nullptr));
2126 }
2127
2128 void H264EncoderImpl::close_output_file()
2129 {
2130         file_mux.reset();
2131 }
2132
2133 void H264EncoderImpl::open_output_stream()
2134 {
2135         AVFormatContext *avctx = avformat_alloc_context();
2136         AVOutputFormat *oformat = av_guess_format(global_flags.stream_mux_name.c_str(), nullptr, nullptr);
2137         assert(oformat != nullptr);
2138         avctx->oformat = oformat;
2139
2140         string codec_name;
2141         int bit_rate;
2142
2143         if (global_flags.stream_audio_codec_name.empty()) {
2144                 codec_name = AUDIO_OUTPUT_CODEC_NAME;
2145                 bit_rate = DEFAULT_AUDIO_OUTPUT_BIT_RATE;
2146         } else {
2147                 codec_name = global_flags.stream_audio_codec_name;
2148                 bit_rate = global_flags.stream_audio_codec_bitrate;
2149         }
2150
2151         uint8_t *buf = (uint8_t *)av_malloc(MUX_BUFFER_SIZE);
2152         avctx->pb = avio_alloc_context(buf, MUX_BUFFER_SIZE, 1, this, nullptr, &H264EncoderImpl::write_packet_thunk, nullptr);
2153
2154         Mux::Codec video_codec;
2155         if (global_flags.uncompressed_video_to_http) {
2156                 video_codec = Mux::CODEC_NV12;
2157         } else {
2158                 video_codec = Mux::CODEC_H264;
2159         }
2160
2161         avctx->flags = AVFMT_FLAG_CUSTOM_IO;
2162         AVCodec *codec_audio = avcodec_find_encoder_by_name(codec_name.c_str());
2163         if (codec_audio == nullptr) {
2164                 fprintf(stderr, "ERROR: Could not find codec '%s'\n", codec_name.c_str());
2165                 exit(1);
2166         }
2167
2168         int time_base = global_flags.stream_coarse_timebase ? COARSE_TIMEBASE : TIMEBASE;
2169         stream_mux_writing_header = true;
2170         stream_mux.reset(new Mux(avctx, frame_width, frame_height, video_codec, codec_audio, time_base, bit_rate, this));
2171         stream_mux_writing_header = false;
2172         httpd->set_header(stream_mux_header);
2173         stream_mux_header.clear();
2174 }
2175
2176 void H264EncoderImpl::close_output_stream()
2177 {
2178         stream_mux.reset();
2179 }
2180
2181 int H264EncoderImpl::write_packet_thunk(void *opaque, uint8_t *buf, int buf_size)
2182 {
2183         H264EncoderImpl *h264_encoder = (H264EncoderImpl *)opaque;
2184         return h264_encoder->write_packet(buf, buf_size);
2185 }
2186
2187 int H264EncoderImpl::write_packet(uint8_t *buf, int buf_size)
2188 {
2189         if (stream_mux_writing_header) {
2190                 stream_mux_header.append((char *)buf, buf_size);
2191         } else {
2192                 httpd->add_data((char *)buf, buf_size, stream_mux_writing_keyframes);
2193                 stream_mux_writing_keyframes = false;
2194         }
2195         return buf_size;
2196 }
2197
2198 void H264EncoderImpl::encode_thread_func()
2199 {
2200         int64_t last_dts = -1;
2201         int gop_start_display_frame_num = 0;
2202         for (int encoding_frame_num = 0; ; ++encoding_frame_num) {
2203                 PendingFrame frame;
2204                 int pts_lag;
2205                 int frame_type, display_frame_num;
2206                 encoding2display_order(encoding_frame_num, intra_period, intra_idr_period, ip_period,
2207                                        &display_frame_num, &frame_type, &pts_lag);
2208                 if (frame_type == FRAME_IDR) {
2209                         numShortTerm = 0;
2210                         current_frame_num = 0;
2211                         gop_start_display_frame_num = display_frame_num;
2212                 }
2213
2214                 {
2215                         unique_lock<mutex> lock(frame_queue_mutex);
2216                         frame_queue_nonempty.wait(lock, [this, display_frame_num]{
2217                                 return encode_thread_should_quit || pending_video_frames.count(display_frame_num) != 0;
2218                         });
2219                         if (encode_thread_should_quit && pending_video_frames.count(display_frame_num) == 0) {
2220                                 // We have queued frames that were supposed to be B-frames,
2221                                 // but will be no P-frame to encode them against. Encode them all
2222                                 // as P-frames instead. Note that this happens under the mutex,
2223                                 // but nobody else uses it at this point, since we're shutting down,
2224                                 // so there's no contention.
2225                                 encode_remaining_frames_as_p(encoding_frame_num, gop_start_display_frame_num, last_dts);
2226                                 return;
2227                         } else {
2228                                 frame = move(pending_video_frames[display_frame_num]);
2229                                 pending_video_frames.erase(display_frame_num);
2230                         }
2231                 }
2232
2233                 // Determine the dts of this frame.
2234                 int64_t dts;
2235                 if (pts_lag == -1) {
2236                         assert(last_dts != -1);
2237                         dts = last_dts + (TIMEBASE / MAX_FPS);
2238                 } else {
2239                         dts = frame.pts - pts_lag;
2240                 }
2241                 last_dts = dts;
2242
2243                 encode_frame(frame, encoding_frame_num, display_frame_num, gop_start_display_frame_num, frame_type, frame.pts, dts);
2244         }
2245 }
2246
2247 void H264EncoderImpl::encode_remaining_frames_as_p(int encoding_frame_num, int gop_start_display_frame_num, int64_t last_dts)
2248 {
2249         if (pending_video_frames.empty()) {
2250                 return;
2251         }
2252
2253         for (auto &pending_frame : pending_video_frames) {
2254                 int display_frame_num = pending_frame.first;
2255                 assert(display_frame_num > 0);
2256                 PendingFrame frame = move(pending_frame.second);
2257                 int64_t dts = last_dts + (TIMEBASE / MAX_FPS);
2258                 printf("Finalizing encode: Encoding leftover frame %d as P-frame instead of B-frame.\n", display_frame_num);
2259                 encode_frame(frame, encoding_frame_num++, display_frame_num, gop_start_display_frame_num, FRAME_P, frame.pts, dts);
2260                 last_dts = dts;
2261         }
2262
2263         if (global_flags.uncompressed_video_to_http ||
2264             global_flags.x264_video_to_http) {
2265                 // Add frames left in reorderer.
2266                 while (!reorderer->empty()) {
2267                         pair<int64_t, const uint8_t *> output_frame = reorderer->get_first_frame();
2268                         if (global_flags.uncompressed_video_to_http) {
2269                                 add_packet_for_uncompressed_frame(output_frame.first, output_frame.second);
2270                         } else {
2271                                 assert(global_flags.x264_video_to_http);
2272                                 x264_encoder->add_frame(output_frame.first, output_frame.second);
2273                         }
2274                 }
2275         }
2276 }
2277
2278 void H264EncoderImpl::add_packet_for_uncompressed_frame(int64_t pts, const uint8_t *data)
2279 {
2280         AVPacket pkt;
2281         memset(&pkt, 0, sizeof(pkt));
2282         pkt.buf = nullptr;
2283         pkt.data = const_cast<uint8_t *>(data);
2284         pkt.size = frame_width * frame_height * 2;
2285         pkt.stream_index = 0;
2286         pkt.flags = AV_PKT_FLAG_KEY;
2287         stream_mux->add_packet(pkt, pts, pts);
2288 }
2289
2290 namespace {
2291
2292 void memcpy_with_pitch(uint8_t *dst, const uint8_t *src, size_t src_width, size_t dst_pitch, size_t height)
2293 {
2294         if (src_width == dst_pitch) {
2295                 memcpy(dst, src, src_width * height);
2296         } else {
2297                 for (size_t y = 0; y < height; ++y) {
2298                         const uint8_t *sptr = src + y * src_width;
2299                         uint8_t *dptr = dst + y * dst_pitch;
2300                         memcpy(dptr, sptr, src_width);
2301                 }
2302         }
2303 }
2304
2305 }  // namespace
2306
2307 void H264EncoderImpl::encode_frame(H264EncoderImpl::PendingFrame frame, int encoding_frame_num, int display_frame_num, int gop_start_display_frame_num,
2308                                    int frame_type, int64_t pts, int64_t dts)
2309 {
2310         // Wait for the GPU to be done with the frame.
2311         GLenum sync_status;
2312         do {
2313                 sync_status = glClientWaitSync(frame.fence.get(), 0, 1000000000);
2314                 check_error();
2315         } while (sync_status == GL_TIMEOUT_EXPIRED);
2316         assert(sync_status != GL_WAIT_FAILED);
2317
2318         // Release back any input frames we needed to render this frame.
2319         frame.input_frames.clear();
2320
2321         GLSurface *surf = &gl_surfaces[display_frame_num % SURFACE_NUM];
2322         VAStatus va_status;
2323
2324         if (use_zerocopy) {
2325                 eglDestroyImageKHR(eglGetCurrentDisplay(), surf->y_egl_image);
2326                 eglDestroyImageKHR(eglGetCurrentDisplay(), surf->cbcr_egl_image);
2327                 va_status = vaReleaseBufferHandle(va_dpy, surf->surface_image.buf);
2328                 CHECK_VASTATUS(va_status, "vaReleaseBufferHandle");
2329         } else {
2330                 unsigned char *surface_p = nullptr;
2331                 vaMapBuffer(va_dpy, surf->surface_image.buf, (void **)&surface_p);
2332
2333                 unsigned char *va_y_ptr = (unsigned char *)surface_p + surf->surface_image.offsets[0];
2334                 memcpy_with_pitch(va_y_ptr, surf->y_ptr, frame_width, surf->surface_image.pitches[0], frame_height);
2335
2336                 unsigned char *va_cbcr_ptr = (unsigned char *)surface_p + surf->surface_image.offsets[1];
2337                 memcpy_with_pitch(va_cbcr_ptr, surf->cbcr_ptr, (frame_width / 2) * sizeof(uint16_t), surf->surface_image.pitches[1], frame_height / 2);
2338
2339                 va_status = vaUnmapBuffer(va_dpy, surf->surface_image.buf);
2340                 CHECK_VASTATUS(va_status, "vaUnmapBuffer");
2341
2342                 if (global_flags.uncompressed_video_to_http ||
2343                     global_flags.x264_video_to_http) {
2344                         // Add uncompressed video. (Note that pts == dts here.)
2345                         // Delay needs to match audio.
2346                         pair<int64_t, const uint8_t *> output_frame = reorderer->reorder_frame(pts + global_delay(), reinterpret_cast<uint8_t *>(surf->y_ptr));
2347                         if (output_frame.second != nullptr) {
2348                                 if (global_flags.uncompressed_video_to_http) {
2349                                         add_packet_for_uncompressed_frame(output_frame.first, output_frame.second);
2350                                 } else {
2351                                         assert(global_flags.x264_video_to_http);
2352                                         x264_encoder->add_frame(output_frame.first, output_frame.second);
2353                                 }
2354                         }
2355                 }
2356         }
2357
2358         va_status = vaDestroyImage(va_dpy, surf->surface_image.image_id);
2359         CHECK_VASTATUS(va_status, "vaDestroyImage");
2360
2361         // Schedule the frame for encoding.
2362         VASurfaceID va_surface = surf->src_surface;
2363         va_status = vaBeginPicture(va_dpy, context_id, va_surface);
2364         CHECK_VASTATUS(va_status, "vaBeginPicture");
2365
2366         if (frame_type == FRAME_IDR) {
2367                 render_sequence();
2368                 render_picture(frame_type, display_frame_num, gop_start_display_frame_num);
2369                 if (h264_packedheader) {
2370                         render_packedsequence();
2371                         render_packedpicture();
2372                 }
2373         } else {
2374                 //render_sequence();
2375                 render_picture(frame_type, display_frame_num, gop_start_display_frame_num);
2376         }
2377         render_slice(encoding_frame_num, display_frame_num, gop_start_display_frame_num, frame_type);
2378
2379         va_status = vaEndPicture(va_dpy, context_id);
2380         CHECK_VASTATUS(va_status, "vaEndPicture");
2381
2382         // so now the data is done encoding (well, async job kicked off)...
2383         // we send that to the storage thread
2384         storage_task tmp;
2385         tmp.display_order = display_frame_num;
2386         tmp.frame_type = frame_type;
2387         tmp.pts = pts;
2388         tmp.dts = dts;
2389         storage_task_enqueue(move(tmp));
2390
2391         update_ReferenceFrames(frame_type);
2392 }
2393
2394 // Proxy object.
2395 H264Encoder::H264Encoder(QSurface *surface, const string &va_display, int width, int height, HTTPD *httpd)
2396         : impl(new H264EncoderImpl(surface, va_display, width, height, httpd)) {}
2397
2398 // Must be defined here because unique_ptr<> destructor needs to know the impl.
2399 H264Encoder::~H264Encoder() {}
2400
2401 void H264Encoder::add_audio(int64_t pts, vector<float> audio)
2402 {
2403         impl->add_audio(pts, audio);
2404 }
2405
2406 bool H264Encoder::begin_frame(GLuint *y_tex, GLuint *cbcr_tex)
2407 {
2408         return impl->begin_frame(y_tex, cbcr_tex);
2409 }
2410
2411 RefCountedGLsync H264Encoder::end_frame(int64_t pts, const vector<RefCountedFrame> &input_frames)
2412 {
2413         return impl->end_frame(pts, input_frames);
2414 }
2415
2416 void H264Encoder::shutdown()
2417 {
2418         impl->shutdown();
2419 }
2420
2421 void H264Encoder::open_output_file(const std::string &filename)
2422 {
2423         impl->open_output_file(filename);
2424 }
2425
2426 void H264Encoder::close_output_file()
2427 {
2428         impl->close_output_file();
2429 }