2 #include "h264encode.h"
4 #include <movit/util.h>
5 #include <EGL/eglplatform.h>
11 #include <libavcodec/avcodec.h>
12 #include <libavformat/avformat.h>
13 #include <libavutil/channel_layout.h>
14 #include <libavutil/frame.h>
15 #include <libavutil/rational.h>
16 #include <libavutil/samplefmt.h>
18 #include <libdrm/drm_fourcc.h>
24 #include <va/va_drm.h>
25 #include <va/va_drmcommon.h>
26 #include <va/va_enc_h264.h>
27 #include <va/va_x11.h>
29 #include <condition_variable>
50 #define CHECK_VASTATUS(va_status, func) \
51 if (va_status != VA_STATUS_SUCCESS) { \
52 fprintf(stderr, "%s:%d (%s) failed with %d\n", __func__, __LINE__, func, va_status); \
56 #define BUFFER_OFFSET(i) ((char *)NULL + (i))
58 //#include "loadsurface.h"
60 #define NAL_REF_IDC_NONE 0
61 #define NAL_REF_IDC_LOW 1
62 #define NAL_REF_IDC_MEDIUM 2
63 #define NAL_REF_IDC_HIGH 3
71 #define SLICE_TYPE_P 0
72 #define SLICE_TYPE_B 1
73 #define SLICE_TYPE_I 2
74 #define IS_P_SLICE(type) (SLICE_TYPE_P == (type))
75 #define IS_B_SLICE(type) (SLICE_TYPE_B == (type))
76 #define IS_I_SLICE(type) (SLICE_TYPE_I == (type))
79 #define ENTROPY_MODE_CAVLC 0
80 #define ENTROPY_MODE_CABAC 1
82 #define PROFILE_IDC_BASELINE 66
83 #define PROFILE_IDC_MAIN 77
84 #define PROFILE_IDC_HIGH 100
86 #define BITSTREAM_ALLOCATE_STEPPING 4096
87 #define SURFACE_NUM 16 /* 16 surfaces for source YUV */
88 #define MAX_NUM_REF1 16 // Seemingly a hardware-fixed value, not related to SURFACE_NUM
89 #define MAX_NUM_REF2 32 // Seemingly a hardware-fixed value, not related to SURFACE_NUM
91 static constexpr unsigned int MaxFrameNum = (2<<16);
92 static constexpr unsigned int MaxPicOrderCntLsb = (2<<8);
93 static constexpr unsigned int Log2MaxFrameNum = 16;
94 static constexpr unsigned int Log2MaxPicOrderCntLsb = 8;
95 static constexpr int rc_default_modes[] = { // Priority list of modes.
98 VA_RC_VBR_CONSTRAINED,
104 /* thread to save coded data */
105 #define SRC_SURFACE_FREE 0
106 #define SRC_SURFACE_IN_ENCODING 1
109 unsigned int *buffer;
111 int max_size_in_dword;
113 typedef struct __bitstream bitstream;
117 // H.264 video comes out in encoding order (e.g. with two B-frames:
118 // 0, 3, 1, 2, 6, 4, 5, etc.), but uncompressed video needs to
119 // come in the right order. Since we do everything, including waiting
120 // for the frames to come out of OpenGL, in encoding order, we need
121 // a reordering buffer for uncompressed frames so that they come out
122 // correctly. We go the super-lazy way of not making it understand
123 // anything about the true order (which introduces some extra latency,
124 // though); we know that for N B-frames we need at most (N-1) frames
125 // in the reorder buffer, and can just sort on that.
127 // The class also deals with keeping a freelist as needed.
128 class FrameReorderer {
130 FrameReorderer(unsigned queue_length, int width, int height);
132 // Returns the next frame to insert with its pts, if any. Otherwise -1 and nullptr.
133 // Does _not_ take ownership of data; a copy is taken if needed.
134 // The returned pointer is valid until the next call to reorder_frame, or destruction.
135 // As a special case, if queue_length == 0, will just return pts and data (no reordering needed).
136 pair<int64_t, const uint8_t *> reorder_frame(int64_t pts, const uint8_t *data);
138 // The same as reorder_frame, but without inserting anything. Used to empty the queue.
139 pair<int64_t, const uint8_t *> get_first_frame();
141 bool empty() const { return frames.empty(); }
144 unsigned queue_length;
147 priority_queue<pair<int64_t, uint8_t *>> frames;
148 stack<uint8_t *> freelist; // Includes the last value returned from reorder_frame.
150 // Owns all the pointers. Normally, freelist and frames could do this themselves,
151 // except priority_queue doesn't work well with movable-only types.
152 vector<unique_ptr<uint8_t[]>> owner;
155 FrameReorderer::FrameReorderer(unsigned queue_length, int width, int height)
156 : queue_length(queue_length), width(width), height(height)
158 for (unsigned i = 0; i < queue_length; ++i) {
159 owner.emplace_back(new uint8_t[width * height * 2]);
160 freelist.push(owner.back().get());
164 pair<int64_t, const uint8_t *> FrameReorderer::reorder_frame(int64_t pts, const uint8_t *data)
166 if (queue_length == 0) {
167 return make_pair(pts, data);
170 assert(!freelist.empty());
171 uint8_t *storage = freelist.top();
173 memcpy(storage, data, width * height * 2);
174 frames.emplace(-pts, storage); // Invert pts to get smallest first.
176 if (frames.size() >= queue_length) {
177 return get_first_frame();
179 return make_pair(-1, nullptr);
183 pair<int64_t, const uint8_t *> FrameReorderer::get_first_frame()
185 assert(!frames.empty());
186 pair<int64_t, uint8_t *> storage = frames.top();
188 int64_t pts = storage.first;
189 freelist.push(storage.second);
190 return make_pair(-pts, storage.second); // Re-invert pts (see reorder_frame()).
193 class H264EncoderImpl {
195 H264EncoderImpl(QSurface *surface, const string &va_display, int width, int height, HTTPD *httpd);
197 void add_audio(int64_t pts, vector<float> audio);
198 bool begin_frame(GLuint *y_tex, GLuint *cbcr_tex);
199 RefCountedGLsync end_frame(int64_t pts, const vector<RefCountedFrame> &input_frames);
203 struct storage_task {
204 unsigned long long display_order;
209 struct PendingFrame {
210 RefCountedGLsync fence;
211 vector<RefCountedFrame> input_frames;
215 void encode_thread_func();
216 void encode_remaining_frames_as_p(int encoding_frame_num, int gop_start_display_frame_num, int64_t last_dts);
217 void add_packet_for_uncompressed_frame(int64_t pts, const uint8_t *data);
218 void encode_frame(PendingFrame frame, int encoding_frame_num, int display_frame_num, int gop_start_display_frame_num,
219 int frame_type, int64_t pts, int64_t dts);
220 void storage_task_thread();
221 void storage_task_enqueue(storage_task task);
222 void save_codeddata(storage_task task);
223 int render_packedsequence();
224 int render_packedpicture();
225 void render_packedslice();
226 int render_sequence();
227 int render_picture(int frame_type, int display_frame_num, int gop_start_display_frame_num);
228 void sps_rbsp(bitstream *bs);
229 void pps_rbsp(bitstream *bs);
230 int build_packed_pic_buffer(unsigned char **header_buffer);
231 int render_slice(int encoding_frame_num, int display_frame_num, int gop_start_display_frame_num, int frame_type);
232 void slice_header(bitstream *bs);
233 int build_packed_seq_buffer(unsigned char **header_buffer);
234 int build_packed_slice_buffer(unsigned char **header_buffer);
235 int init_va(const string &va_display);
237 VADisplay va_open_display(const string &va_display);
238 void va_close_display(VADisplay va_dpy);
240 int release_encode();
241 void update_ReferenceFrames(int frame_type);
242 int update_RefPicList(int frame_type);
244 bool is_shutdown = false;
248 thread encode_thread, storage_thread;
250 mutex storage_task_queue_mutex;
251 condition_variable storage_task_queue_changed;
252 int srcsurface_status[SURFACE_NUM]; // protected by storage_task_queue_mutex
253 queue<storage_task> storage_task_queue; // protected by storage_task_queue_mutex
254 bool storage_thread_should_quit = false; // protected by storage_task_queue_mutex
256 mutex frame_queue_mutex;
257 condition_variable frame_queue_nonempty;
258 bool encode_thread_should_quit = false; // under frame_queue_mutex
260 int current_storage_frame;
262 map<int, PendingFrame> pending_video_frames; // under frame_queue_mutex
263 map<int64_t, vector<float>> pending_audio_frames; // under frame_queue_mutex
266 AVCodecContext *context_audio;
267 AVFrame *audio_frame = nullptr;
269 unique_ptr<FrameReorderer> reorderer;
271 Display *x11_display = nullptr;
273 // Encoder parameters
275 VAProfile h264_profile = (VAProfile)~0;
276 VAConfigAttrib config_attrib[VAConfigAttribTypeMax];
277 int config_attrib_num = 0, enc_packed_header_idx;
280 VASurfaceID src_surface, ref_surface;
281 VABufferID coded_buf;
283 VAImage surface_image;
284 GLuint y_tex, cbcr_tex;
286 // Only if use_zerocopy == true.
287 EGLImage y_egl_image, cbcr_egl_image;
289 // Only if use_zerocopy == false.
291 uint8_t *y_ptr, *cbcr_ptr;
292 size_t y_offset, cbcr_offset;
294 GLSurface gl_surfaces[SURFACE_NUM];
296 VAConfigID config_id;
297 VAContextID context_id;
298 VAEncSequenceParameterBufferH264 seq_param;
299 VAEncPictureParameterBufferH264 pic_param;
300 VAEncSliceParameterBufferH264 slice_param;
301 VAPictureH264 CurrentCurrPic;
302 VAPictureH264 ReferenceFrames[MAX_NUM_REF1], RefPicList0_P[MAX_NUM_REF2], RefPicList0_B[MAX_NUM_REF2], RefPicList1_B[MAX_NUM_REF2];
304 // Static quality settings.
305 static constexpr unsigned int frame_bitrate = 15000000 / 60; // Doesn't really matter; only initial_qp does.
306 static constexpr unsigned int num_ref_frames = 2;
307 static constexpr int initial_qp = 15;
308 static constexpr int minimal_qp = 0;
309 static constexpr int intra_period = 30;
310 static constexpr int intra_idr_period = MAX_FPS; // About a second; more at lower frame rates. Not ideal.
312 // Quality settings that are meant to be static, but might be overridden
314 int constraint_set_flag = 0;
315 int h264_packedheader = 0; /* support pack header? */
316 int h264_maxref = (1<<16|1);
317 int h264_entropy_mode = 1; /* cabac */
321 unsigned int current_frame_num = 0;
322 unsigned int numShortTerm = 0;
326 int frame_width_mbaligned;
327 int frame_height_mbaligned;
330 // Supposedly vaRenderPicture() is supposed to destroy the buffer implicitly,
331 // but if we don't delete it here, we get leaks. The GStreamer implementation
333 static void render_picture_and_delete(VADisplay dpy, VAContextID context, VABufferID *buffers, int num_buffers)
335 VAStatus va_status = vaRenderPicture(dpy, context, buffers, num_buffers);
336 CHECK_VASTATUS(va_status, "vaRenderPicture");
338 for (int i = 0; i < num_buffers; ++i) {
339 va_status = vaDestroyBuffer(dpy, buffers[i]);
340 CHECK_VASTATUS(va_status, "vaDestroyBuffer");
345 va_swap32(unsigned int val)
347 unsigned char *pval = (unsigned char *)&val;
349 return ((pval[0] << 24) |
356 bitstream_start(bitstream *bs)
358 bs->max_size_in_dword = BITSTREAM_ALLOCATE_STEPPING;
359 bs->buffer = (unsigned int *)calloc(bs->max_size_in_dword * sizeof(int), 1);
364 bitstream_end(bitstream *bs)
366 int pos = (bs->bit_offset >> 5);
367 int bit_offset = (bs->bit_offset & 0x1f);
368 int bit_left = 32 - bit_offset;
371 bs->buffer[pos] = va_swap32((bs->buffer[pos] << bit_left));
376 bitstream_put_ui(bitstream *bs, unsigned int val, int size_in_bits)
378 int pos = (bs->bit_offset >> 5);
379 int bit_offset = (bs->bit_offset & 0x1f);
380 int bit_left = 32 - bit_offset;
385 bs->bit_offset += size_in_bits;
387 if (bit_left > size_in_bits) {
388 bs->buffer[pos] = (bs->buffer[pos] << size_in_bits | val);
390 size_in_bits -= bit_left;
391 if (bit_left >= 32) {
392 bs->buffer[pos] = (val >> size_in_bits);
394 bs->buffer[pos] = (bs->buffer[pos] << bit_left) | (val >> size_in_bits);
396 bs->buffer[pos] = va_swap32(bs->buffer[pos]);
398 if (pos + 1 == bs->max_size_in_dword) {
399 bs->max_size_in_dword += BITSTREAM_ALLOCATE_STEPPING;
400 bs->buffer = (unsigned int *)realloc(bs->buffer, bs->max_size_in_dword * sizeof(unsigned int));
403 bs->buffer[pos + 1] = val;
408 bitstream_put_ue(bitstream *bs, unsigned int val)
410 int size_in_bits = 0;
418 bitstream_put_ui(bs, 0, size_in_bits - 1); // leading zero
419 bitstream_put_ui(bs, val, size_in_bits);
423 bitstream_put_se(bitstream *bs, int val)
425 unsigned int new_val;
430 new_val = 2 * val - 1;
432 bitstream_put_ue(bs, new_val);
436 bitstream_byte_aligning(bitstream *bs, int bit)
438 int bit_offset = (bs->bit_offset & 0x7);
439 int bit_left = 8 - bit_offset;
445 assert(bit == 0 || bit == 1);
448 new_val = (1 << bit_left) - 1;
452 bitstream_put_ui(bs, new_val, bit_left);
456 rbsp_trailing_bits(bitstream *bs)
458 bitstream_put_ui(bs, 1, 1);
459 bitstream_byte_aligning(bs, 0);
462 static void nal_start_code_prefix(bitstream *bs)
464 bitstream_put_ui(bs, 0x00000001, 32);
467 static void nal_header(bitstream *bs, int nal_ref_idc, int nal_unit_type)
469 bitstream_put_ui(bs, 0, 1); /* forbidden_zero_bit: 0 */
470 bitstream_put_ui(bs, nal_ref_idc, 2);
471 bitstream_put_ui(bs, nal_unit_type, 5);
474 void H264EncoderImpl::sps_rbsp(bitstream *bs)
476 int profile_idc = PROFILE_IDC_BASELINE;
478 if (h264_profile == VAProfileH264High)
479 profile_idc = PROFILE_IDC_HIGH;
480 else if (h264_profile == VAProfileH264Main)
481 profile_idc = PROFILE_IDC_MAIN;
483 bitstream_put_ui(bs, profile_idc, 8); /* profile_idc */
484 bitstream_put_ui(bs, !!(constraint_set_flag & 1), 1); /* constraint_set0_flag */
485 bitstream_put_ui(bs, !!(constraint_set_flag & 2), 1); /* constraint_set1_flag */
486 bitstream_put_ui(bs, !!(constraint_set_flag & 4), 1); /* constraint_set2_flag */
487 bitstream_put_ui(bs, !!(constraint_set_flag & 8), 1); /* constraint_set3_flag */
488 bitstream_put_ui(bs, 0, 4); /* reserved_zero_4bits */
489 bitstream_put_ui(bs, seq_param.level_idc, 8); /* level_idc */
490 bitstream_put_ue(bs, seq_param.seq_parameter_set_id); /* seq_parameter_set_id */
492 if ( profile_idc == PROFILE_IDC_HIGH) {
493 bitstream_put_ue(bs, 1); /* chroma_format_idc = 1, 4:2:0 */
494 bitstream_put_ue(bs, 0); /* bit_depth_luma_minus8 */
495 bitstream_put_ue(bs, 0); /* bit_depth_chroma_minus8 */
496 bitstream_put_ui(bs, 0, 1); /* qpprime_y_zero_transform_bypass_flag */
497 bitstream_put_ui(bs, 0, 1); /* seq_scaling_matrix_present_flag */
500 bitstream_put_ue(bs, seq_param.seq_fields.bits.log2_max_frame_num_minus4); /* log2_max_frame_num_minus4 */
501 bitstream_put_ue(bs, seq_param.seq_fields.bits.pic_order_cnt_type); /* pic_order_cnt_type */
503 if (seq_param.seq_fields.bits.pic_order_cnt_type == 0)
504 bitstream_put_ue(bs, seq_param.seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4); /* log2_max_pic_order_cnt_lsb_minus4 */
509 bitstream_put_ue(bs, seq_param.max_num_ref_frames); /* num_ref_frames */
510 bitstream_put_ui(bs, 0, 1); /* gaps_in_frame_num_value_allowed_flag */
512 bitstream_put_ue(bs, seq_param.picture_width_in_mbs - 1); /* pic_width_in_mbs_minus1 */
513 bitstream_put_ue(bs, seq_param.picture_height_in_mbs - 1); /* pic_height_in_map_units_minus1 */
514 bitstream_put_ui(bs, seq_param.seq_fields.bits.frame_mbs_only_flag, 1); /* frame_mbs_only_flag */
516 if (!seq_param.seq_fields.bits.frame_mbs_only_flag) {
520 bitstream_put_ui(bs, seq_param.seq_fields.bits.direct_8x8_inference_flag, 1); /* direct_8x8_inference_flag */
521 bitstream_put_ui(bs, seq_param.frame_cropping_flag, 1); /* frame_cropping_flag */
523 if (seq_param.frame_cropping_flag) {
524 bitstream_put_ue(bs, seq_param.frame_crop_left_offset); /* frame_crop_left_offset */
525 bitstream_put_ue(bs, seq_param.frame_crop_right_offset); /* frame_crop_right_offset */
526 bitstream_put_ue(bs, seq_param.frame_crop_top_offset); /* frame_crop_top_offset */
527 bitstream_put_ue(bs, seq_param.frame_crop_bottom_offset); /* frame_crop_bottom_offset */
530 //if ( frame_bit_rate < 0 ) { //TODO EW: the vui header isn't correct
532 bitstream_put_ui(bs, 0, 1); /* vui_parameters_present_flag */
534 bitstream_put_ui(bs, 1, 1); /* vui_parameters_present_flag */
535 bitstream_put_ui(bs, 0, 1); /* aspect_ratio_info_present_flag */
536 bitstream_put_ui(bs, 0, 1); /* overscan_info_present_flag */
537 bitstream_put_ui(bs, 1, 1); /* video_signal_type_present_flag */
539 bitstream_put_ui(bs, 5, 3); /* video_format (5 = Unspecified) */
540 bitstream_put_ui(bs, 0, 1); /* video_full_range_flag */
541 bitstream_put_ui(bs, 1, 1); /* colour_description_present_flag */
543 bitstream_put_ui(bs, 1, 8); /* colour_primaries (1 = BT.709) */
544 bitstream_put_ui(bs, 2, 8); /* transfer_characteristics (2 = unspecified, since we use sRGB) */
545 bitstream_put_ui(bs, 6, 8); /* matrix_coefficients (6 = BT.601/SMPTE 170M) */
548 bitstream_put_ui(bs, 0, 1); /* chroma_loc_info_present_flag */
549 bitstream_put_ui(bs, 1, 1); /* timing_info_present_flag */
551 bitstream_put_ui(bs, 1, 32); // FPS
552 bitstream_put_ui(bs, TIMEBASE * 2, 32); // FPS
553 bitstream_put_ui(bs, 1, 1);
555 bitstream_put_ui(bs, 1, 1); /* nal_hrd_parameters_present_flag */
558 bitstream_put_ue(bs, 0); /* cpb_cnt_minus1 */
559 bitstream_put_ui(bs, 4, 4); /* bit_rate_scale */
560 bitstream_put_ui(bs, 6, 4); /* cpb_size_scale */
562 bitstream_put_ue(bs, frame_bitrate - 1); /* bit_rate_value_minus1[0] */
563 bitstream_put_ue(bs, frame_bitrate*8 - 1); /* cpb_size_value_minus1[0] */
564 bitstream_put_ui(bs, 1, 1); /* cbr_flag[0] */
566 bitstream_put_ui(bs, 23, 5); /* initial_cpb_removal_delay_length_minus1 */
567 bitstream_put_ui(bs, 23, 5); /* cpb_removal_delay_length_minus1 */
568 bitstream_put_ui(bs, 23, 5); /* dpb_output_delay_length_minus1 */
569 bitstream_put_ui(bs, 23, 5); /* time_offset_length */
571 bitstream_put_ui(bs, 0, 1); /* vcl_hrd_parameters_present_flag */
572 bitstream_put_ui(bs, 0, 1); /* low_delay_hrd_flag */
574 bitstream_put_ui(bs, 0, 1); /* pic_struct_present_flag */
575 bitstream_put_ui(bs, 0, 1); /* bitstream_restriction_flag */
578 rbsp_trailing_bits(bs); /* rbsp_trailing_bits */
582 void H264EncoderImpl::pps_rbsp(bitstream *bs)
584 bitstream_put_ue(bs, pic_param.pic_parameter_set_id); /* pic_parameter_set_id */
585 bitstream_put_ue(bs, pic_param.seq_parameter_set_id); /* seq_parameter_set_id */
587 bitstream_put_ui(bs, pic_param.pic_fields.bits.entropy_coding_mode_flag, 1); /* entropy_coding_mode_flag */
589 bitstream_put_ui(bs, 0, 1); /* pic_order_present_flag: 0 */
591 bitstream_put_ue(bs, 0); /* num_slice_groups_minus1 */
593 bitstream_put_ue(bs, pic_param.num_ref_idx_l0_active_minus1); /* num_ref_idx_l0_active_minus1 */
594 bitstream_put_ue(bs, pic_param.num_ref_idx_l1_active_minus1); /* num_ref_idx_l1_active_minus1 1 */
596 bitstream_put_ui(bs, pic_param.pic_fields.bits.weighted_pred_flag, 1); /* weighted_pred_flag: 0 */
597 bitstream_put_ui(bs, pic_param.pic_fields.bits.weighted_bipred_idc, 2); /* weighted_bipred_idc: 0 */
599 bitstream_put_se(bs, pic_param.pic_init_qp - 26); /* pic_init_qp_minus26 */
600 bitstream_put_se(bs, 0); /* pic_init_qs_minus26 */
601 bitstream_put_se(bs, 0); /* chroma_qp_index_offset */
603 bitstream_put_ui(bs, pic_param.pic_fields.bits.deblocking_filter_control_present_flag, 1); /* deblocking_filter_control_present_flag */
604 bitstream_put_ui(bs, 0, 1); /* constrained_intra_pred_flag */
605 bitstream_put_ui(bs, 0, 1); /* redundant_pic_cnt_present_flag */
608 bitstream_put_ui(bs, pic_param.pic_fields.bits.transform_8x8_mode_flag, 1); /*transform_8x8_mode_flag */
609 bitstream_put_ui(bs, 0, 1); /* pic_scaling_matrix_present_flag */
610 bitstream_put_se(bs, pic_param.second_chroma_qp_index_offset ); /*second_chroma_qp_index_offset */
612 rbsp_trailing_bits(bs);
615 void H264EncoderImpl::slice_header(bitstream *bs)
617 int first_mb_in_slice = slice_param.macroblock_address;
619 bitstream_put_ue(bs, first_mb_in_slice); /* first_mb_in_slice: 0 */
620 bitstream_put_ue(bs, slice_param.slice_type); /* slice_type */
621 bitstream_put_ue(bs, slice_param.pic_parameter_set_id); /* pic_parameter_set_id: 0 */
622 bitstream_put_ui(bs, pic_param.frame_num, seq_param.seq_fields.bits.log2_max_frame_num_minus4 + 4); /* frame_num */
624 /* frame_mbs_only_flag == 1 */
625 if (!seq_param.seq_fields.bits.frame_mbs_only_flag) {
630 if (pic_param.pic_fields.bits.idr_pic_flag)
631 bitstream_put_ue(bs, slice_param.idr_pic_id); /* idr_pic_id: 0 */
633 if (seq_param.seq_fields.bits.pic_order_cnt_type == 0) {
634 bitstream_put_ui(bs, pic_param.CurrPic.TopFieldOrderCnt, seq_param.seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4 + 4);
635 /* pic_order_present_flag == 0 */
641 /* redundant_pic_cnt_present_flag == 0 */
643 if (IS_P_SLICE(slice_param.slice_type)) {
644 bitstream_put_ui(bs, slice_param.num_ref_idx_active_override_flag, 1); /* num_ref_idx_active_override_flag: */
646 if (slice_param.num_ref_idx_active_override_flag)
647 bitstream_put_ue(bs, slice_param.num_ref_idx_l0_active_minus1);
649 /* ref_pic_list_reordering */
650 bitstream_put_ui(bs, 0, 1); /* ref_pic_list_reordering_flag_l0: 0 */
651 } else if (IS_B_SLICE(slice_param.slice_type)) {
652 bitstream_put_ui(bs, slice_param.direct_spatial_mv_pred_flag, 1); /* direct_spatial_mv_pred: 1 */
654 bitstream_put_ui(bs, slice_param.num_ref_idx_active_override_flag, 1); /* num_ref_idx_active_override_flag: */
656 if (slice_param.num_ref_idx_active_override_flag) {
657 bitstream_put_ue(bs, slice_param.num_ref_idx_l0_active_minus1);
658 bitstream_put_ue(bs, slice_param.num_ref_idx_l1_active_minus1);
661 /* ref_pic_list_reordering */
662 bitstream_put_ui(bs, 0, 1); /* ref_pic_list_reordering_flag_l0: 0 */
663 bitstream_put_ui(bs, 0, 1); /* ref_pic_list_reordering_flag_l1: 0 */
666 if ((pic_param.pic_fields.bits.weighted_pred_flag &&
667 IS_P_SLICE(slice_param.slice_type)) ||
668 ((pic_param.pic_fields.bits.weighted_bipred_idc == 1) &&
669 IS_B_SLICE(slice_param.slice_type))) {
670 /* FIXME: fill weight/offset table */
674 /* dec_ref_pic_marking */
675 if (pic_param.pic_fields.bits.reference_pic_flag) { /* nal_ref_idc != 0 */
676 unsigned char no_output_of_prior_pics_flag = 0;
677 unsigned char long_term_reference_flag = 0;
678 unsigned char adaptive_ref_pic_marking_mode_flag = 0;
680 if (pic_param.pic_fields.bits.idr_pic_flag) {
681 bitstream_put_ui(bs, no_output_of_prior_pics_flag, 1); /* no_output_of_prior_pics_flag: 0 */
682 bitstream_put_ui(bs, long_term_reference_flag, 1); /* long_term_reference_flag: 0 */
684 bitstream_put_ui(bs, adaptive_ref_pic_marking_mode_flag, 1); /* adaptive_ref_pic_marking_mode_flag: 0 */
688 if (pic_param.pic_fields.bits.entropy_coding_mode_flag &&
689 !IS_I_SLICE(slice_param.slice_type))
690 bitstream_put_ue(bs, slice_param.cabac_init_idc); /* cabac_init_idc: 0 */
692 bitstream_put_se(bs, slice_param.slice_qp_delta); /* slice_qp_delta: 0 */
694 /* ignore for SP/SI */
696 if (pic_param.pic_fields.bits.deblocking_filter_control_present_flag) {
697 bitstream_put_ue(bs, slice_param.disable_deblocking_filter_idc); /* disable_deblocking_filter_idc: 0 */
699 if (slice_param.disable_deblocking_filter_idc != 1) {
700 bitstream_put_se(bs, slice_param.slice_alpha_c0_offset_div2); /* slice_alpha_c0_offset_div2: 2 */
701 bitstream_put_se(bs, slice_param.slice_beta_offset_div2); /* slice_beta_offset_div2: 2 */
705 if (pic_param.pic_fields.bits.entropy_coding_mode_flag) {
706 bitstream_byte_aligning(bs, 1);
710 int H264EncoderImpl::build_packed_pic_buffer(unsigned char **header_buffer)
714 bitstream_start(&bs);
715 nal_start_code_prefix(&bs);
716 nal_header(&bs, NAL_REF_IDC_HIGH, NAL_PPS);
720 *header_buffer = (unsigned char *)bs.buffer;
721 return bs.bit_offset;
725 H264EncoderImpl::build_packed_seq_buffer(unsigned char **header_buffer)
729 bitstream_start(&bs);
730 nal_start_code_prefix(&bs);
731 nal_header(&bs, NAL_REF_IDC_HIGH, NAL_SPS);
735 *header_buffer = (unsigned char *)bs.buffer;
736 return bs.bit_offset;
739 int H264EncoderImpl::build_packed_slice_buffer(unsigned char **header_buffer)
742 int is_idr = !!pic_param.pic_fields.bits.idr_pic_flag;
743 int is_ref = !!pic_param.pic_fields.bits.reference_pic_flag;
745 bitstream_start(&bs);
746 nal_start_code_prefix(&bs);
748 if (IS_I_SLICE(slice_param.slice_type)) {
749 nal_header(&bs, NAL_REF_IDC_HIGH, is_idr ? NAL_IDR : NAL_NON_IDR);
750 } else if (IS_P_SLICE(slice_param.slice_type)) {
751 nal_header(&bs, NAL_REF_IDC_MEDIUM, NAL_NON_IDR);
753 assert(IS_B_SLICE(slice_param.slice_type));
754 nal_header(&bs, is_ref ? NAL_REF_IDC_LOW : NAL_REF_IDC_NONE, NAL_NON_IDR);
760 *header_buffer = (unsigned char *)bs.buffer;
761 return bs.bit_offset;
766 Assume frame sequence is: Frame#0, #1, #2, ..., #M, ..., #X, ... (encoding order)
767 1) period between Frame #X and Frame #N = #X - #N
768 2) 0 means infinite for intra_period/intra_idr_period, and 0 is invalid for ip_period
769 3) intra_idr_period % intra_period (intra_period > 0) and intra_period % ip_period must be 0
770 4) intra_period and intra_idr_period take precedence over ip_period
771 5) if ip_period > 1, intra_period and intra_idr_period are not the strict periods
772 of I/IDR frames, see bellow examples
773 -------------------------------------------------------------------
774 intra_period intra_idr_period ip_period frame sequence (intra_period/intra_idr_period/ip_period)
775 0 ignored 1 IDRPPPPPPP ... (No IDR/I any more)
776 0 ignored >=2 IDR(PBB)(PBB)... (No IDR/I any more)
777 1 0 ignored IDRIIIIIII... (No IDR any more)
778 1 1 ignored IDR IDR IDR IDR...
779 1 >=2 ignored IDRII IDRII IDR... (1/3/ignore)
780 >=2 0 1 IDRPPP IPPP I... (3/0/1)
781 >=2 0 >=2 IDR(PBB)(PBB)(IBB) (6/0/3)
782 (PBB)(IBB)(PBB)(IBB)...
783 >=2 >=2 1 IDRPPPPP IPPPPP IPPPPP (6/18/1)
784 IDRPPPPP IPPPPP IPPPPP...
785 >=2 >=2 >=2 {IDR(PBB)(PBB)(IBB)(PBB)(IBB)(PBB)} (6/18/3)
786 {IDR(PBB)(PBB)(IBB)(PBB)(IBB)(PBB)}...
787 {IDR(PBB)(PBB)(IBB)(PBB)} (6/12/3)
788 {IDR(PBB)(PBB)(IBB)(PBB)}...
789 {IDR(PBB)(PBB)} (6/6/3)
793 // General pts/dts strategy:
795 // Getting pts and dts right with variable frame rate (VFR) and B-frames can be a
796 // bit tricky. We assume first of all that the frame rate never goes _above_
797 // MAX_FPS, which gives us a frame period N. The decoder can always decode
798 // in at least this speed, as long at dts <= pts (the frame is not attempted
799 // presented before it is decoded). Furthermore, we never have longer chains of
800 // B-frames than a fixed constant C. (In a B-frame chain, we say that the base
801 // I/P-frame has order O=0, the B-frame depending on it directly has order O=1,
802 // etc. The last frame in the chain, which no B-frames depend on, is the “tip”
803 // frame, with an order O <= C.)
805 // Many strategies are possible, but we establish these rules:
807 // - Tip frames have dts = pts - (C-O)*N.
808 // - Non-tip frames have dts = dts_last + N.
810 // An example, with C=2 and N=10 and the data flow showed with arrows:
813 // pts: 30 40 50 60 70 80
815 // dts: 10 30 20 60 50←40
820 // To show that this works fine also with irregular spacings, let's say that
821 // the third frame is delayed a bit (something earlier was dropped). Now the
822 // situation looks like this:
825 // pts: 30 40 80 90 100 110
827 // dts: 10 30 20 90 50←40
832 // The resetting on every tip frame makes sure dts never ends up lagging a lot
833 // behind pts, and the subtraction of (C-O)*N makes sure pts <= dts.
835 // In the output of this function, if <dts_lag> is >= 0, it means to reset the
836 // dts from the current pts minus <dts_lag>, while if it's -1, the frame is not
837 // a tip frame and should be given a dts based on the previous one.
842 void encoding2display_order(
843 int encoding_order, int intra_period,
844 int intra_idr_period, int ip_period,
845 int *displaying_order,
846 int *frame_type, int *pts_lag)
848 int encoding_order_gop = 0;
852 if (intra_period == 1) { /* all are I/IDR frames */
853 *displaying_order = encoding_order;
854 if (intra_idr_period == 0)
855 *frame_type = (encoding_order == 0)?FRAME_IDR:FRAME_I;
857 *frame_type = (encoding_order % intra_idr_period == 0)?FRAME_IDR:FRAME_I;
861 if (intra_period == 0)
862 intra_idr_period = 0;
864 if (ip_period == 1) {
865 // No B-frames, sequence is like IDR PPPPP IPPPPP.
866 encoding_order_gop = (intra_idr_period == 0) ? encoding_order : (encoding_order % intra_idr_period);
867 *displaying_order = encoding_order;
869 if (encoding_order_gop == 0) { /* the first frame */
870 *frame_type = FRAME_IDR;
871 } else if (intra_period != 0 && /* have I frames */
872 encoding_order_gop >= 2 &&
873 (encoding_order_gop % intra_period == 0)) {
874 *frame_type = FRAME_I;
876 *frame_type = FRAME_P;
881 // We have B-frames. Sequence is like IDR (PBB)(PBB)(IBB)(PBB).
882 encoding_order_gop = (intra_idr_period == 0) ? encoding_order : (encoding_order % (intra_idr_period + 1));
883 *pts_lag = -1; // Most frames are not tip frames.
885 if (encoding_order_gop == 0) { /* the first frame */
886 *frame_type = FRAME_IDR;
887 *displaying_order = encoding_order;
888 // IDR frames are a special case; I honestly can't find the logic behind
889 // why this is the right thing, but it seems to line up nicely in practice :-)
890 *pts_lag = TIMEBASE / MAX_FPS;
891 } else if (((encoding_order_gop - 1) % ip_period) != 0) { /* B frames */
892 *frame_type = FRAME_B;
893 *displaying_order = encoding_order - 1;
894 if ((encoding_order_gop % ip_period) == 0) {
895 *pts_lag = 0; // Last B-frame.
897 } else if (intra_period != 0 && /* have I frames */
898 encoding_order_gop >= 2 &&
899 ((encoding_order_gop - 1) / ip_period % (intra_period / ip_period)) == 0) {
900 *frame_type = FRAME_I;
901 *displaying_order = encoding_order + ip_period - 1;
903 *frame_type = FRAME_P;
904 *displaying_order = encoding_order + ip_period - 1;
909 static const char *rc_to_string(int rc_mode)
922 case VA_RC_VBR_CONSTRAINED:
923 return "VBR_CONSTRAINED";
929 VADisplay H264EncoderImpl::va_open_display(const string &va_display)
931 if (va_display.empty()) {
932 x11_display = XOpenDisplay(NULL);
934 fprintf(stderr, "error: can't connect to X server!\n");
938 if (global_flags.uncompressed_video_to_http) {
939 fprintf(stderr, "Disabling zerocopy H.264 encoding due to --uncompressed_video_to_http.\n");
940 use_zerocopy = false;
942 return vaGetDisplay(x11_display);
943 } else if (va_display[0] != '/') {
944 x11_display = XOpenDisplay(va_display.c_str());
946 fprintf(stderr, "error: can't connect to X server!\n");
950 if (global_flags.uncompressed_video_to_http) {
951 fprintf(stderr, "Disabling zerocopy H.264 encoding due to --uncompressed_video_to_http.\n");
952 use_zerocopy = false;
954 return vaGetDisplay(x11_display);
956 drm_fd = open(va_display.c_str(), O_RDWR);
958 perror(va_display.c_str());
961 use_zerocopy = false;
962 return vaGetDisplayDRM(drm_fd);
966 void H264EncoderImpl::va_close_display(VADisplay va_dpy)
969 XCloseDisplay(x11_display);
970 x11_display = nullptr;
977 int H264EncoderImpl::init_va(const string &va_display)
979 VAProfile profile_list[]={VAProfileH264High, VAProfileH264Main, VAProfileH264Baseline, VAProfileH264ConstrainedBaseline};
980 VAEntrypoint *entrypoints;
981 int num_entrypoints, slice_entrypoint;
982 int support_encode = 0;
983 int major_ver, minor_ver;
987 va_dpy = va_open_display(va_display);
988 va_status = vaInitialize(va_dpy, &major_ver, &minor_ver);
989 CHECK_VASTATUS(va_status, "vaInitialize");
991 num_entrypoints = vaMaxNumEntrypoints(va_dpy);
992 entrypoints = (VAEntrypoint *)malloc(num_entrypoints * sizeof(*entrypoints));
994 fprintf(stderr, "error: failed to initialize VA entrypoints array\n");
998 /* use the highest profile */
999 for (i = 0; i < sizeof(profile_list)/sizeof(profile_list[0]); i++) {
1000 if ((h264_profile != ~0) && h264_profile != profile_list[i])
1003 h264_profile = profile_list[i];
1004 vaQueryConfigEntrypoints(va_dpy, h264_profile, entrypoints, &num_entrypoints);
1005 for (slice_entrypoint = 0; slice_entrypoint < num_entrypoints; slice_entrypoint++) {
1006 if (entrypoints[slice_entrypoint] == VAEntrypointEncSlice) {
1011 if (support_encode == 1)
1015 if (support_encode == 0) {
1016 printf("Can't find VAEntrypointEncSlice for H264 profiles. If you are using a non-Intel GPU\n");
1017 printf("but have one in your system, try launching Nageru with --va-display /dev/dri/renderD128\n");
1018 printf("to use VA-API against DRM instead of X11.\n");
1021 switch (h264_profile) {
1022 case VAProfileH264Baseline:
1024 constraint_set_flag |= (1 << 0); /* Annex A.2.1 */
1025 h264_entropy_mode = 0;
1027 case VAProfileH264ConstrainedBaseline:
1028 constraint_set_flag |= (1 << 0 | 1 << 1); /* Annex A.2.2 */
1032 case VAProfileH264Main:
1033 constraint_set_flag |= (1 << 1); /* Annex A.2.2 */
1036 case VAProfileH264High:
1037 constraint_set_flag |= (1 << 3); /* Annex A.2.4 */
1040 h264_profile = VAProfileH264Baseline;
1042 constraint_set_flag |= (1 << 0); /* Annex A.2.1 */
1047 VAConfigAttrib attrib[VAConfigAttribTypeMax];
1049 /* find out the format for the render target, and rate control mode */
1050 for (i = 0; i < VAConfigAttribTypeMax; i++)
1051 attrib[i].type = (VAConfigAttribType)i;
1053 va_status = vaGetConfigAttributes(va_dpy, h264_profile, VAEntrypointEncSlice,
1054 &attrib[0], VAConfigAttribTypeMax);
1055 CHECK_VASTATUS(va_status, "vaGetConfigAttributes");
1056 /* check the interested configattrib */
1057 if ((attrib[VAConfigAttribRTFormat].value & VA_RT_FORMAT_YUV420) == 0) {
1058 printf("Not find desired YUV420 RT format\n");
1061 config_attrib[config_attrib_num].type = VAConfigAttribRTFormat;
1062 config_attrib[config_attrib_num].value = VA_RT_FORMAT_YUV420;
1063 config_attrib_num++;
1066 if (attrib[VAConfigAttribRateControl].value != VA_ATTRIB_NOT_SUPPORTED) {
1067 int tmp = attrib[VAConfigAttribRateControl].value;
1069 if (rc_mode == -1 || !(rc_mode & tmp)) {
1070 if (rc_mode != -1) {
1071 printf("Warning: Don't support the specified RateControl mode: %s!!!, switch to ", rc_to_string(rc_mode));
1074 for (i = 0; i < sizeof(rc_default_modes) / sizeof(rc_default_modes[0]); i++) {
1075 if (rc_default_modes[i] & tmp) {
1076 rc_mode = rc_default_modes[i];
1082 config_attrib[config_attrib_num].type = VAConfigAttribRateControl;
1083 config_attrib[config_attrib_num].value = rc_mode;
1084 config_attrib_num++;
1088 if (attrib[VAConfigAttribEncPackedHeaders].value != VA_ATTRIB_NOT_SUPPORTED) {
1089 int tmp = attrib[VAConfigAttribEncPackedHeaders].value;
1091 h264_packedheader = 1;
1092 config_attrib[config_attrib_num].type = VAConfigAttribEncPackedHeaders;
1093 config_attrib[config_attrib_num].value = VA_ENC_PACKED_HEADER_NONE;
1095 if (tmp & VA_ENC_PACKED_HEADER_SEQUENCE) {
1096 config_attrib[config_attrib_num].value |= VA_ENC_PACKED_HEADER_SEQUENCE;
1099 if (tmp & VA_ENC_PACKED_HEADER_PICTURE) {
1100 config_attrib[config_attrib_num].value |= VA_ENC_PACKED_HEADER_PICTURE;
1103 if (tmp & VA_ENC_PACKED_HEADER_SLICE) {
1104 config_attrib[config_attrib_num].value |= VA_ENC_PACKED_HEADER_SLICE;
1107 if (tmp & VA_ENC_PACKED_HEADER_MISC) {
1108 config_attrib[config_attrib_num].value |= VA_ENC_PACKED_HEADER_MISC;
1111 enc_packed_header_idx = config_attrib_num;
1112 config_attrib_num++;
1115 if (attrib[VAConfigAttribEncInterlaced].value != VA_ATTRIB_NOT_SUPPORTED) {
1116 config_attrib[config_attrib_num].type = VAConfigAttribEncInterlaced;
1117 config_attrib[config_attrib_num].value = VA_ENC_PACKED_HEADER_NONE;
1118 config_attrib_num++;
1121 if (attrib[VAConfigAttribEncMaxRefFrames].value != VA_ATTRIB_NOT_SUPPORTED) {
1122 h264_maxref = attrib[VAConfigAttribEncMaxRefFrames].value;
1129 int H264EncoderImpl::setup_encode()
1132 VASurfaceID *tmp_surfaceid;
1133 int codedbuf_size, i;
1134 static VASurfaceID src_surface[SURFACE_NUM];
1135 static VASurfaceID ref_surface[SURFACE_NUM];
1137 va_status = vaCreateConfig(va_dpy, h264_profile, VAEntrypointEncSlice,
1138 &config_attrib[0], config_attrib_num, &config_id);
1139 CHECK_VASTATUS(va_status, "vaCreateConfig");
1141 /* create source surfaces */
1142 va_status = vaCreateSurfaces(va_dpy,
1143 VA_RT_FORMAT_YUV420, frame_width_mbaligned, frame_height_mbaligned,
1144 &src_surface[0], SURFACE_NUM,
1146 CHECK_VASTATUS(va_status, "vaCreateSurfaces");
1148 /* create reference surfaces */
1149 va_status = vaCreateSurfaces(va_dpy,
1150 VA_RT_FORMAT_YUV420, frame_width_mbaligned, frame_height_mbaligned,
1151 &ref_surface[0], SURFACE_NUM,
1153 CHECK_VASTATUS(va_status, "vaCreateSurfaces");
1155 tmp_surfaceid = (VASurfaceID *)calloc(2 * SURFACE_NUM, sizeof(VASurfaceID));
1156 memcpy(tmp_surfaceid, src_surface, SURFACE_NUM * sizeof(VASurfaceID));
1157 memcpy(tmp_surfaceid + SURFACE_NUM, ref_surface, SURFACE_NUM * sizeof(VASurfaceID));
1159 /* Create a context for this encode pipe */
1160 va_status = vaCreateContext(va_dpy, config_id,
1161 frame_width_mbaligned, frame_height_mbaligned,
1163 tmp_surfaceid, 2 * SURFACE_NUM,
1165 CHECK_VASTATUS(va_status, "vaCreateContext");
1166 free(tmp_surfaceid);
1168 codedbuf_size = (frame_width_mbaligned * frame_height_mbaligned * 400) / (16*16);
1170 for (i = 0; i < SURFACE_NUM; i++) {
1171 /* create coded buffer once for all
1172 * other VA buffers which won't be used again after vaRenderPicture.
1173 * so APP can always vaCreateBuffer for every frame
1174 * but coded buffer need to be mapped and accessed after vaRenderPicture/vaEndPicture
1175 * so VA won't maintain the coded buffer
1177 va_status = vaCreateBuffer(va_dpy, context_id, VAEncCodedBufferType,
1178 codedbuf_size, 1, NULL, &gl_surfaces[i].coded_buf);
1179 CHECK_VASTATUS(va_status, "vaCreateBuffer");
1182 /* create OpenGL objects */
1183 //glGenFramebuffers(SURFACE_NUM, fbos);
1185 for (i = 0; i < SURFACE_NUM; i++) {
1186 glGenTextures(1, &gl_surfaces[i].y_tex);
1187 glGenTextures(1, &gl_surfaces[i].cbcr_tex);
1189 if (!use_zerocopy) {
1191 glBindTexture(GL_TEXTURE_2D, gl_surfaces[i].y_tex);
1192 glTexStorage2D(GL_TEXTURE_2D, 1, GL_R8, frame_width, frame_height);
1194 // Create CbCr image.
1195 glBindTexture(GL_TEXTURE_2D, gl_surfaces[i].cbcr_tex);
1196 glTexStorage2D(GL_TEXTURE_2D, 1, GL_RG8, frame_width / 2, frame_height / 2);
1198 // Generate a PBO to read into. It doesn't necessarily fit 1:1 with the VA-API
1199 // buffers, due to potentially differing pitch.
1200 glGenBuffers(1, &gl_surfaces[i].pbo);
1201 glBindBuffer(GL_PIXEL_PACK_BUFFER, gl_surfaces[i].pbo);
1202 glBufferStorage(GL_PIXEL_PACK_BUFFER, frame_width * frame_height * 2, nullptr, GL_MAP_READ_BIT | GL_MAP_WRITE_BIT | GL_MAP_PERSISTENT_BIT);
1203 uint8_t *ptr = (uint8_t *)glMapBufferRange(GL_PIXEL_PACK_BUFFER, 0, frame_width * frame_height * 2, GL_MAP_READ_BIT | GL_MAP_PERSISTENT_BIT);
1204 gl_surfaces[i].y_offset = 0;
1205 gl_surfaces[i].cbcr_offset = frame_width * frame_height;
1206 gl_surfaces[i].y_ptr = ptr + gl_surfaces[i].y_offset;
1207 gl_surfaces[i].cbcr_ptr = ptr + gl_surfaces[i].cbcr_offset;
1208 glBindBuffer(GL_PIXEL_PACK_BUFFER, 0);
1212 for (i = 0; i < SURFACE_NUM; i++) {
1213 gl_surfaces[i].src_surface = src_surface[i];
1214 gl_surfaces[i].ref_surface = ref_surface[i];
1220 // Given a list like 1 9 3 0 2 8 4 and a pivot element 3, will produce
1223 template<class T, class C>
1224 static void sort_two(T *begin, T *end, const T &pivot, const C &less_than)
1226 T *middle = partition(begin, end, [&](const T &elem) { return less_than(elem, pivot); });
1227 sort(begin, middle, [&](const T &a, const T &b) { return less_than(b, a); });
1228 sort(middle, end, less_than);
1231 void H264EncoderImpl::update_ReferenceFrames(int frame_type)
1235 if (frame_type == FRAME_B)
1238 CurrentCurrPic.flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE;
1240 if (numShortTerm > num_ref_frames)
1241 numShortTerm = num_ref_frames;
1242 for (i=numShortTerm-1; i>0; i--)
1243 ReferenceFrames[i] = ReferenceFrames[i-1];
1244 ReferenceFrames[0] = CurrentCurrPic;
1246 current_frame_num++;
1247 if (current_frame_num > MaxFrameNum)
1248 current_frame_num = 0;
1252 int H264EncoderImpl::update_RefPicList(int frame_type)
1254 const auto descending_by_frame_idx = [](const VAPictureH264 &a, const VAPictureH264 &b) {
1255 return a.frame_idx > b.frame_idx;
1257 const auto ascending_by_top_field_order_cnt = [](const VAPictureH264 &a, const VAPictureH264 &b) {
1258 return a.TopFieldOrderCnt < b.TopFieldOrderCnt;
1260 const auto descending_by_top_field_order_cnt = [](const VAPictureH264 &a, const VAPictureH264 &b) {
1261 return a.TopFieldOrderCnt > b.TopFieldOrderCnt;
1264 if (frame_type == FRAME_P) {
1265 memcpy(RefPicList0_P, ReferenceFrames, numShortTerm * sizeof(VAPictureH264));
1266 sort(&RefPicList0_P[0], &RefPicList0_P[numShortTerm], descending_by_frame_idx);
1267 } else if (frame_type == FRAME_B) {
1268 memcpy(RefPicList0_B, ReferenceFrames, numShortTerm * sizeof(VAPictureH264));
1269 sort_two(&RefPicList0_B[0], &RefPicList0_B[numShortTerm], CurrentCurrPic, ascending_by_top_field_order_cnt);
1271 memcpy(RefPicList1_B, ReferenceFrames, numShortTerm * sizeof(VAPictureH264));
1272 sort_two(&RefPicList1_B[0], &RefPicList1_B[numShortTerm], CurrentCurrPic, descending_by_top_field_order_cnt);
1279 int H264EncoderImpl::render_sequence()
1281 VABufferID seq_param_buf, rc_param_buf, render_id[2];
1283 VAEncMiscParameterBuffer *misc_param;
1284 VAEncMiscParameterRateControl *misc_rate_ctrl;
1286 seq_param.level_idc = 41 /*SH_LEVEL_3*/;
1287 seq_param.picture_width_in_mbs = frame_width_mbaligned / 16;
1288 seq_param.picture_height_in_mbs = frame_height_mbaligned / 16;
1289 seq_param.bits_per_second = frame_bitrate;
1291 seq_param.intra_period = intra_period;
1292 seq_param.intra_idr_period = intra_idr_period;
1293 seq_param.ip_period = ip_period;
1295 seq_param.max_num_ref_frames = num_ref_frames;
1296 seq_param.seq_fields.bits.frame_mbs_only_flag = 1;
1297 seq_param.time_scale = TIMEBASE * 2;
1298 seq_param.num_units_in_tick = 1; /* Tc = num_units_in_tick / scale */
1299 seq_param.seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4 = Log2MaxPicOrderCntLsb - 4;
1300 seq_param.seq_fields.bits.log2_max_frame_num_minus4 = Log2MaxFrameNum - 4;;
1301 seq_param.seq_fields.bits.frame_mbs_only_flag = 1;
1302 seq_param.seq_fields.bits.chroma_format_idc = 1;
1303 seq_param.seq_fields.bits.direct_8x8_inference_flag = 1;
1305 if (frame_width != frame_width_mbaligned ||
1306 frame_height != frame_height_mbaligned) {
1307 seq_param.frame_cropping_flag = 1;
1308 seq_param.frame_crop_left_offset = 0;
1309 seq_param.frame_crop_right_offset = (frame_width_mbaligned - frame_width)/2;
1310 seq_param.frame_crop_top_offset = 0;
1311 seq_param.frame_crop_bottom_offset = (frame_height_mbaligned - frame_height)/2;
1314 va_status = vaCreateBuffer(va_dpy, context_id,
1315 VAEncSequenceParameterBufferType,
1316 sizeof(seq_param), 1, &seq_param, &seq_param_buf);
1317 CHECK_VASTATUS(va_status, "vaCreateBuffer");
1319 va_status = vaCreateBuffer(va_dpy, context_id,
1320 VAEncMiscParameterBufferType,
1321 sizeof(VAEncMiscParameterBuffer) + sizeof(VAEncMiscParameterRateControl),
1322 1, NULL, &rc_param_buf);
1323 CHECK_VASTATUS(va_status, "vaCreateBuffer");
1325 vaMapBuffer(va_dpy, rc_param_buf, (void **)&misc_param);
1326 misc_param->type = VAEncMiscParameterTypeRateControl;
1327 misc_rate_ctrl = (VAEncMiscParameterRateControl *)misc_param->data;
1328 memset(misc_rate_ctrl, 0, sizeof(*misc_rate_ctrl));
1329 misc_rate_ctrl->bits_per_second = frame_bitrate;
1330 misc_rate_ctrl->target_percentage = 66;
1331 misc_rate_ctrl->window_size = 1000;
1332 misc_rate_ctrl->initial_qp = initial_qp;
1333 misc_rate_ctrl->min_qp = minimal_qp;
1334 misc_rate_ctrl->basic_unit_size = 0;
1335 vaUnmapBuffer(va_dpy, rc_param_buf);
1337 render_id[0] = seq_param_buf;
1338 render_id[1] = rc_param_buf;
1340 render_picture_and_delete(va_dpy, context_id, &render_id[0], 2);
1345 static int calc_poc(int pic_order_cnt_lsb, int frame_type)
1347 static int PicOrderCntMsb_ref = 0, pic_order_cnt_lsb_ref = 0;
1348 int prevPicOrderCntMsb, prevPicOrderCntLsb;
1349 int PicOrderCntMsb, TopFieldOrderCnt;
1351 if (frame_type == FRAME_IDR)
1352 prevPicOrderCntMsb = prevPicOrderCntLsb = 0;
1354 prevPicOrderCntMsb = PicOrderCntMsb_ref;
1355 prevPicOrderCntLsb = pic_order_cnt_lsb_ref;
1358 if ((pic_order_cnt_lsb < prevPicOrderCntLsb) &&
1359 ((prevPicOrderCntLsb - pic_order_cnt_lsb) >= (int)(MaxPicOrderCntLsb / 2)))
1360 PicOrderCntMsb = prevPicOrderCntMsb + MaxPicOrderCntLsb;
1361 else if ((pic_order_cnt_lsb > prevPicOrderCntLsb) &&
1362 ((pic_order_cnt_lsb - prevPicOrderCntLsb) > (int)(MaxPicOrderCntLsb / 2)))
1363 PicOrderCntMsb = prevPicOrderCntMsb - MaxPicOrderCntLsb;
1365 PicOrderCntMsb = prevPicOrderCntMsb;
1367 TopFieldOrderCnt = PicOrderCntMsb + pic_order_cnt_lsb;
1369 if (frame_type != FRAME_B) {
1370 PicOrderCntMsb_ref = PicOrderCntMsb;
1371 pic_order_cnt_lsb_ref = pic_order_cnt_lsb;
1374 return TopFieldOrderCnt;
1377 int H264EncoderImpl::render_picture(int frame_type, int display_frame_num, int gop_start_display_frame_num)
1379 VABufferID pic_param_buf;
1383 pic_param.CurrPic.picture_id = gl_surfaces[display_frame_num % SURFACE_NUM].ref_surface;
1384 pic_param.CurrPic.frame_idx = current_frame_num;
1385 pic_param.CurrPic.flags = 0;
1386 pic_param.CurrPic.TopFieldOrderCnt = calc_poc((display_frame_num - gop_start_display_frame_num) % MaxPicOrderCntLsb, frame_type);
1387 pic_param.CurrPic.BottomFieldOrderCnt = pic_param.CurrPic.TopFieldOrderCnt;
1388 CurrentCurrPic = pic_param.CurrPic;
1390 memcpy(pic_param.ReferenceFrames, ReferenceFrames, numShortTerm*sizeof(VAPictureH264));
1391 for (i = numShortTerm; i < MAX_NUM_REF1; i++) {
1392 pic_param.ReferenceFrames[i].picture_id = VA_INVALID_SURFACE;
1393 pic_param.ReferenceFrames[i].flags = VA_PICTURE_H264_INVALID;
1396 pic_param.pic_fields.bits.idr_pic_flag = (frame_type == FRAME_IDR);
1397 pic_param.pic_fields.bits.reference_pic_flag = (frame_type != FRAME_B);
1398 pic_param.pic_fields.bits.entropy_coding_mode_flag = h264_entropy_mode;
1399 pic_param.pic_fields.bits.deblocking_filter_control_present_flag = 1;
1400 pic_param.frame_num = current_frame_num;
1401 pic_param.coded_buf = gl_surfaces[display_frame_num % SURFACE_NUM].coded_buf;
1402 pic_param.last_picture = false; // FIXME
1403 pic_param.pic_init_qp = initial_qp;
1405 va_status = vaCreateBuffer(va_dpy, context_id, VAEncPictureParameterBufferType,
1406 sizeof(pic_param), 1, &pic_param, &pic_param_buf);
1407 CHECK_VASTATUS(va_status, "vaCreateBuffer");
1409 render_picture_and_delete(va_dpy, context_id, &pic_param_buf, 1);
1414 int H264EncoderImpl::render_packedsequence()
1416 VAEncPackedHeaderParameterBuffer packedheader_param_buffer;
1417 VABufferID packedseq_para_bufid, packedseq_data_bufid, render_id[2];
1418 unsigned int length_in_bits;
1419 unsigned char *packedseq_buffer = NULL;
1422 length_in_bits = build_packed_seq_buffer(&packedseq_buffer);
1424 packedheader_param_buffer.type = VAEncPackedHeaderSequence;
1426 packedheader_param_buffer.bit_length = length_in_bits; /*length_in_bits*/
1427 packedheader_param_buffer.has_emulation_bytes = 0;
1428 va_status = vaCreateBuffer(va_dpy,
1430 VAEncPackedHeaderParameterBufferType,
1431 sizeof(packedheader_param_buffer), 1, &packedheader_param_buffer,
1432 &packedseq_para_bufid);
1433 CHECK_VASTATUS(va_status, "vaCreateBuffer");
1435 va_status = vaCreateBuffer(va_dpy,
1437 VAEncPackedHeaderDataBufferType,
1438 (length_in_bits + 7) / 8, 1, packedseq_buffer,
1439 &packedseq_data_bufid);
1440 CHECK_VASTATUS(va_status, "vaCreateBuffer");
1442 render_id[0] = packedseq_para_bufid;
1443 render_id[1] = packedseq_data_bufid;
1444 render_picture_and_delete(va_dpy, context_id, render_id, 2);
1446 free(packedseq_buffer);
1452 int H264EncoderImpl::render_packedpicture()
1454 VAEncPackedHeaderParameterBuffer packedheader_param_buffer;
1455 VABufferID packedpic_para_bufid, packedpic_data_bufid, render_id[2];
1456 unsigned int length_in_bits;
1457 unsigned char *packedpic_buffer = NULL;
1460 length_in_bits = build_packed_pic_buffer(&packedpic_buffer);
1461 packedheader_param_buffer.type = VAEncPackedHeaderPicture;
1462 packedheader_param_buffer.bit_length = length_in_bits;
1463 packedheader_param_buffer.has_emulation_bytes = 0;
1465 va_status = vaCreateBuffer(va_dpy,
1467 VAEncPackedHeaderParameterBufferType,
1468 sizeof(packedheader_param_buffer), 1, &packedheader_param_buffer,
1469 &packedpic_para_bufid);
1470 CHECK_VASTATUS(va_status, "vaCreateBuffer");
1472 va_status = vaCreateBuffer(va_dpy,
1474 VAEncPackedHeaderDataBufferType,
1475 (length_in_bits + 7) / 8, 1, packedpic_buffer,
1476 &packedpic_data_bufid);
1477 CHECK_VASTATUS(va_status, "vaCreateBuffer");
1479 render_id[0] = packedpic_para_bufid;
1480 render_id[1] = packedpic_data_bufid;
1481 render_picture_and_delete(va_dpy, context_id, render_id, 2);
1483 free(packedpic_buffer);
1488 void H264EncoderImpl::render_packedslice()
1490 VAEncPackedHeaderParameterBuffer packedheader_param_buffer;
1491 VABufferID packedslice_para_bufid, packedslice_data_bufid, render_id[2];
1492 unsigned int length_in_bits;
1493 unsigned char *packedslice_buffer = NULL;
1496 length_in_bits = build_packed_slice_buffer(&packedslice_buffer);
1497 packedheader_param_buffer.type = VAEncPackedHeaderSlice;
1498 packedheader_param_buffer.bit_length = length_in_bits;
1499 packedheader_param_buffer.has_emulation_bytes = 0;
1501 va_status = vaCreateBuffer(va_dpy,
1503 VAEncPackedHeaderParameterBufferType,
1504 sizeof(packedheader_param_buffer), 1, &packedheader_param_buffer,
1505 &packedslice_para_bufid);
1506 CHECK_VASTATUS(va_status, "vaCreateBuffer");
1508 va_status = vaCreateBuffer(va_dpy,
1510 VAEncPackedHeaderDataBufferType,
1511 (length_in_bits + 7) / 8, 1, packedslice_buffer,
1512 &packedslice_data_bufid);
1513 CHECK_VASTATUS(va_status, "vaCreateBuffer");
1515 render_id[0] = packedslice_para_bufid;
1516 render_id[1] = packedslice_data_bufid;
1517 render_picture_and_delete(va_dpy, context_id, render_id, 2);
1519 free(packedslice_buffer);
1522 int H264EncoderImpl::render_slice(int encoding_frame_num, int display_frame_num, int gop_start_display_frame_num, int frame_type)
1524 VABufferID slice_param_buf;
1528 update_RefPicList(frame_type);
1530 /* one frame, one slice */
1531 slice_param.macroblock_address = 0;
1532 slice_param.num_macroblocks = frame_width_mbaligned * frame_height_mbaligned/(16*16); /* Measured by MB */
1533 slice_param.slice_type = (frame_type == FRAME_IDR)?2:frame_type;
1534 if (frame_type == FRAME_IDR) {
1535 if (encoding_frame_num != 0)
1536 ++slice_param.idr_pic_id;
1537 } else if (frame_type == FRAME_P) {
1538 int refpiclist0_max = h264_maxref & 0xffff;
1539 memcpy(slice_param.RefPicList0, RefPicList0_P, refpiclist0_max*sizeof(VAPictureH264));
1541 for (i = refpiclist0_max; i < MAX_NUM_REF2; i++) {
1542 slice_param.RefPicList0[i].picture_id = VA_INVALID_SURFACE;
1543 slice_param.RefPicList0[i].flags = VA_PICTURE_H264_INVALID;
1545 } else if (frame_type == FRAME_B) {
1546 int refpiclist0_max = h264_maxref & 0xffff;
1547 int refpiclist1_max = (h264_maxref >> 16) & 0xffff;
1549 memcpy(slice_param.RefPicList0, RefPicList0_B, refpiclist0_max*sizeof(VAPictureH264));
1550 for (i = refpiclist0_max; i < MAX_NUM_REF2; i++) {
1551 slice_param.RefPicList0[i].picture_id = VA_INVALID_SURFACE;
1552 slice_param.RefPicList0[i].flags = VA_PICTURE_H264_INVALID;
1555 memcpy(slice_param.RefPicList1, RefPicList1_B, refpiclist1_max*sizeof(VAPictureH264));
1556 for (i = refpiclist1_max; i < MAX_NUM_REF2; i++) {
1557 slice_param.RefPicList1[i].picture_id = VA_INVALID_SURFACE;
1558 slice_param.RefPicList1[i].flags = VA_PICTURE_H264_INVALID;
1562 slice_param.slice_alpha_c0_offset_div2 = 0;
1563 slice_param.slice_beta_offset_div2 = 0;
1564 slice_param.direct_spatial_mv_pred_flag = 1;
1565 slice_param.pic_order_cnt_lsb = (display_frame_num - gop_start_display_frame_num) % MaxPicOrderCntLsb;
1568 if (h264_packedheader &&
1569 config_attrib[enc_packed_header_idx].value & VA_ENC_PACKED_HEADER_SLICE)
1570 render_packedslice();
1572 va_status = vaCreateBuffer(va_dpy, context_id, VAEncSliceParameterBufferType,
1573 sizeof(slice_param), 1, &slice_param, &slice_param_buf);
1574 CHECK_VASTATUS(va_status, "vaCreateBuffer");
1576 render_picture_and_delete(va_dpy, context_id, &slice_param_buf, 1);
1583 void H264EncoderImpl::save_codeddata(storage_task task)
1585 VACodedBufferSegment *buf_list = NULL;
1590 const int64_t global_delay = int64_t(ip_period - 1) * (TIMEBASE / MAX_FPS); // So we never get negative dts.
1592 va_status = vaMapBuffer(va_dpy, gl_surfaces[task.display_order % SURFACE_NUM].coded_buf, (void **)(&buf_list));
1593 CHECK_VASTATUS(va_status, "vaMapBuffer");
1594 while (buf_list != NULL) {
1595 data.append(reinterpret_cast<const char *>(buf_list->buf), buf_list->size);
1596 buf_list = (VACodedBufferSegment *) buf_list->next;
1598 vaUnmapBuffer(va_dpy, gl_surfaces[task.display_order % SURFACE_NUM].coded_buf);
1603 memset(&pkt, 0, sizeof(pkt));
1605 pkt.data = reinterpret_cast<uint8_t *>(&data[0]);
1606 pkt.size = data.size();
1607 pkt.stream_index = 0;
1608 if (task.frame_type == FRAME_IDR) {
1609 pkt.flags = AV_PKT_FLAG_KEY;
1614 httpd->add_packet(pkt, task.pts + global_delay, task.dts + global_delay,
1615 global_flags.uncompressed_video_to_http ? HTTPD::DESTINATION_FILE_ONLY : HTTPD::DESTINATION_FILE_AND_HTTP);
1617 // Encode and add all audio frames up to and including the pts of this video frame.
1620 vector<float> audio;
1622 unique_lock<mutex> lock(frame_queue_mutex);
1623 frame_queue_nonempty.wait(lock, [this]{ return storage_thread_should_quit || !pending_audio_frames.empty(); });
1624 if (storage_thread_should_quit && pending_audio_frames.empty()) return;
1625 auto it = pending_audio_frames.begin();
1626 if (it->first > task.pts) break;
1627 audio_pts = it->first;
1628 audio = move(it->second);
1629 pending_audio_frames.erase(it);
1632 audio_frame->nb_samples = audio.size() / 2;
1633 audio_frame->format = AV_SAMPLE_FMT_S32;
1634 audio_frame->channel_layout = AV_CH_LAYOUT_STEREO;
1636 unique_ptr<int32_t[]> int_samples(new int32_t[audio.size()]);
1637 int ret = avcodec_fill_audio_frame(audio_frame, 2, AV_SAMPLE_FMT_S32, (const uint8_t*)int_samples.get(), audio.size() * sizeof(int32_t), 1);
1639 fprintf(stderr, "avcodec_fill_audio_frame() failed with %d\n", ret);
1642 for (int i = 0; i < audio_frame->nb_samples * 2; ++i) {
1643 if (audio[i] >= 1.0f) {
1644 int_samples[i] = 2147483647;
1645 } else if (audio[i] <= -1.0f) {
1646 int_samples[i] = -2147483647;
1648 int_samples[i] = lrintf(audio[i] * 2147483647.0f);
1653 av_init_packet(&pkt);
1657 avcodec_encode_audio2(context_audio, &pkt, audio_frame, &got_output);
1659 pkt.stream_index = 1;
1660 pkt.flags = AV_PKT_FLAG_KEY;
1661 httpd->add_packet(pkt, audio_pts + global_delay, audio_pts + global_delay, HTTPD::DESTINATION_FILE_AND_HTTP);
1663 // TODO: Delayed frames.
1664 av_frame_unref(audio_frame);
1665 av_free_packet(&pkt);
1666 if (audio_pts == task.pts) break;
1671 // this is weird. but it seems to put a new frame onto the queue
1672 void H264EncoderImpl::storage_task_enqueue(storage_task task)
1674 unique_lock<mutex> lock(storage_task_queue_mutex);
1675 storage_task_queue.push(move(task));
1676 storage_task_queue_changed.notify_all();
1679 void H264EncoderImpl::storage_task_thread()
1682 storage_task current;
1684 // wait until there's an encoded frame
1685 unique_lock<mutex> lock(storage_task_queue_mutex);
1686 storage_task_queue_changed.wait(lock, [this]{ return storage_thread_should_quit || !storage_task_queue.empty(); });
1687 if (storage_thread_should_quit && storage_task_queue.empty()) return;
1688 current = move(storage_task_queue.front());
1689 storage_task_queue.pop();
1694 // waits for data, then saves it to disk.
1695 va_status = vaSyncSurface(va_dpy, gl_surfaces[current.display_order % SURFACE_NUM].src_surface);
1696 CHECK_VASTATUS(va_status, "vaSyncSurface");
1697 save_codeddata(move(current));
1700 unique_lock<mutex> lock(storage_task_queue_mutex);
1701 srcsurface_status[current.display_order % SURFACE_NUM] = SRC_SURFACE_FREE;
1702 storage_task_queue_changed.notify_all();
1707 int H264EncoderImpl::release_encode()
1709 for (unsigned i = 0; i < SURFACE_NUM; i++) {
1710 vaDestroyBuffer(va_dpy, gl_surfaces[i].coded_buf);
1711 vaDestroySurfaces(va_dpy, &gl_surfaces[i].src_surface, 1);
1712 vaDestroySurfaces(va_dpy, &gl_surfaces[i].ref_surface, 1);
1714 if (!use_zerocopy) {
1715 glBindBuffer(GL_PIXEL_PACK_BUFFER, gl_surfaces[i].pbo);
1716 glUnmapBuffer(GL_PIXEL_PACK_BUFFER);
1717 glBindBuffer(GL_PIXEL_PACK_BUFFER, 0);
1718 glDeleteBuffers(1, &gl_surfaces[i].pbo);
1720 glDeleteTextures(1, &gl_surfaces[i].y_tex);
1721 glDeleteTextures(1, &gl_surfaces[i].cbcr_tex);
1724 vaDestroyContext(va_dpy, context_id);
1725 vaDestroyConfig(va_dpy, config_id);
1730 int H264EncoderImpl::deinit_va()
1732 vaTerminate(va_dpy);
1734 va_close_display(va_dpy);
1740 H264EncoderImpl::H264EncoderImpl(QSurface *surface, const string &va_display, int width, int height, HTTPD *httpd)
1741 : current_storage_frame(0), surface(surface), httpd(httpd)
1743 AVCodec *codec_audio = avcodec_find_encoder(AUDIO_OUTPUT_CODEC);
1744 context_audio = avcodec_alloc_context3(codec_audio);
1745 context_audio->bit_rate = AUDIO_OUTPUT_BIT_RATE;
1746 context_audio->sample_rate = OUTPUT_FREQUENCY;
1747 context_audio->sample_fmt = AUDIO_OUTPUT_SAMPLE_FMT;
1748 context_audio->channels = 2;
1749 context_audio->channel_layout = AV_CH_LAYOUT_STEREO;
1750 context_audio->time_base = AVRational{1, TIMEBASE};
1751 if (avcodec_open2(context_audio, codec_audio, NULL) < 0) {
1752 fprintf(stderr, "Could not open codec\n");
1755 audio_frame = av_frame_alloc();
1757 frame_width = width;
1758 frame_height = height;
1759 frame_width_mbaligned = (frame_width + 15) & (~15);
1760 frame_height_mbaligned = (frame_height + 15) & (~15);
1764 if (global_flags.uncompressed_video_to_http) {
1765 reorderer.reset(new FrameReorderer(ip_period - 1, frame_width, frame_height));
1768 init_va(va_display);
1771 // No frames are ready yet.
1772 memset(srcsurface_status, SRC_SURFACE_FREE, sizeof(srcsurface_status));
1774 memset(&seq_param, 0, sizeof(seq_param));
1775 memset(&pic_param, 0, sizeof(pic_param));
1776 memset(&slice_param, 0, sizeof(slice_param));
1778 storage_thread = thread(&H264EncoderImpl::storage_task_thread, this);
1780 encode_thread = thread([this]{
1781 //SDL_GL_MakeCurrent(window, context);
1782 QOpenGLContext *context = create_context(this->surface);
1783 eglBindAPI(EGL_OPENGL_API);
1784 if (!make_current(context, this->surface)) {
1785 printf("display=%p surface=%p context=%p curr=%p err=%d\n", eglGetCurrentDisplay(), this->surface, context, eglGetCurrentContext(),
1789 encode_thread_func();
1793 H264EncoderImpl::~H264EncoderImpl()
1796 av_frame_free(&audio_frame);
1798 // TODO: Destroy context.
1801 bool H264EncoderImpl::begin_frame(GLuint *y_tex, GLuint *cbcr_tex)
1803 assert(!is_shutdown);
1805 // Wait until this frame slot is done encoding.
1806 unique_lock<mutex> lock(storage_task_queue_mutex);
1807 if (srcsurface_status[current_storage_frame % SURFACE_NUM] != SRC_SURFACE_FREE) {
1808 fprintf(stderr, "Warning: Slot %d (for frame %d) is still encoding, rendering has to wait for H.264 encoder\n",
1809 current_storage_frame % SURFACE_NUM, current_storage_frame);
1811 storage_task_queue_changed.wait(lock, [this]{ return storage_thread_should_quit || (srcsurface_status[current_storage_frame % SURFACE_NUM] == SRC_SURFACE_FREE); });
1812 srcsurface_status[current_storage_frame % SURFACE_NUM] = SRC_SURFACE_IN_ENCODING;
1813 if (storage_thread_should_quit) return false;
1816 //*fbo = fbos[current_storage_frame % SURFACE_NUM];
1817 GLSurface *surf = &gl_surfaces[current_storage_frame % SURFACE_NUM];
1818 *y_tex = surf->y_tex;
1819 *cbcr_tex = surf->cbcr_tex;
1821 VAStatus va_status = vaDeriveImage(va_dpy, surf->src_surface, &surf->surface_image);
1822 CHECK_VASTATUS(va_status, "vaDeriveImage");
1825 VABufferInfo buf_info;
1826 buf_info.mem_type = VA_SURFACE_ATTRIB_MEM_TYPE_DRM_PRIME; // or VA_SURFACE_ATTRIB_MEM_TYPE_KERNEL_DRM?
1827 va_status = vaAcquireBufferHandle(va_dpy, surf->surface_image.buf, &buf_info);
1828 CHECK_VASTATUS(va_status, "vaAcquireBufferHandle");
1831 surf->y_egl_image = EGL_NO_IMAGE_KHR;
1832 EGLint y_attribs[] = {
1833 EGL_WIDTH, frame_width,
1834 EGL_HEIGHT, frame_height,
1835 EGL_LINUX_DRM_FOURCC_EXT, fourcc_code('R', '8', ' ', ' '),
1836 EGL_DMA_BUF_PLANE0_FD_EXT, EGLint(buf_info.handle),
1837 EGL_DMA_BUF_PLANE0_OFFSET_EXT, EGLint(surf->surface_image.offsets[0]),
1838 EGL_DMA_BUF_PLANE0_PITCH_EXT, EGLint(surf->surface_image.pitches[0]),
1842 surf->y_egl_image = eglCreateImageKHR(eglGetCurrentDisplay(), EGL_NO_CONTEXT, EGL_LINUX_DMA_BUF_EXT, NULL, y_attribs);
1843 assert(surf->y_egl_image != EGL_NO_IMAGE_KHR);
1845 // Associate Y image to a texture.
1846 glBindTexture(GL_TEXTURE_2D, *y_tex);
1847 glEGLImageTargetTexture2DOES(GL_TEXTURE_2D, surf->y_egl_image);
1849 // Create CbCr image.
1850 surf->cbcr_egl_image = EGL_NO_IMAGE_KHR;
1851 EGLint cbcr_attribs[] = {
1852 EGL_WIDTH, frame_width,
1853 EGL_HEIGHT, frame_height,
1854 EGL_LINUX_DRM_FOURCC_EXT, fourcc_code('G', 'R', '8', '8'),
1855 EGL_DMA_BUF_PLANE0_FD_EXT, EGLint(buf_info.handle),
1856 EGL_DMA_BUF_PLANE0_OFFSET_EXT, EGLint(surf->surface_image.offsets[1]),
1857 EGL_DMA_BUF_PLANE0_PITCH_EXT, EGLint(surf->surface_image.pitches[1]),
1861 surf->cbcr_egl_image = eglCreateImageKHR(eglGetCurrentDisplay(), EGL_NO_CONTEXT, EGL_LINUX_DMA_BUF_EXT, NULL, cbcr_attribs);
1862 assert(surf->cbcr_egl_image != EGL_NO_IMAGE_KHR);
1864 // Associate CbCr image to a texture.
1865 glBindTexture(GL_TEXTURE_2D, *cbcr_tex);
1866 glEGLImageTargetTexture2DOES(GL_TEXTURE_2D, surf->cbcr_egl_image);
1872 void H264EncoderImpl::add_audio(int64_t pts, vector<float> audio)
1874 assert(!is_shutdown);
1876 unique_lock<mutex> lock(frame_queue_mutex);
1877 pending_audio_frames[pts] = move(audio);
1879 frame_queue_nonempty.notify_all();
1882 RefCountedGLsync H264EncoderImpl::end_frame(int64_t pts, const vector<RefCountedFrame> &input_frames)
1884 assert(!is_shutdown);
1886 if (!use_zerocopy) {
1887 GLSurface *surf = &gl_surfaces[current_storage_frame % SURFACE_NUM];
1889 glPixelStorei(GL_PACK_ROW_LENGTH, 0);
1892 glBindBuffer(GL_PIXEL_PACK_BUFFER, surf->pbo);
1895 glBindTexture(GL_TEXTURE_2D, surf->y_tex);
1897 glGetTexImage(GL_TEXTURE_2D, 0, GL_RED, GL_UNSIGNED_BYTE, BUFFER_OFFSET(surf->y_offset));
1900 glBindTexture(GL_TEXTURE_2D, surf->cbcr_tex);
1902 glGetTexImage(GL_TEXTURE_2D, 0, GL_RG, GL_UNSIGNED_BYTE, BUFFER_OFFSET(surf->cbcr_offset));
1905 glBindTexture(GL_TEXTURE_2D, 0);
1907 glBindBuffer(GL_PIXEL_PACK_BUFFER, 0);
1910 glMemoryBarrier(GL_TEXTURE_UPDATE_BARRIER_BIT | GL_CLIENT_MAPPED_BUFFER_BARRIER_BIT);
1914 RefCountedGLsync fence = RefCountedGLsync(GL_SYNC_GPU_COMMANDS_COMPLETE, /*flags=*/0);
1916 glFlush(); // Make the H.264 thread see the fence as soon as possible.
1920 unique_lock<mutex> lock(frame_queue_mutex);
1921 pending_video_frames[current_storage_frame] = PendingFrame{ fence, input_frames, pts };
1922 ++current_storage_frame;
1924 frame_queue_nonempty.notify_all();
1928 void H264EncoderImpl::shutdown()
1935 unique_lock<mutex> lock(frame_queue_mutex);
1936 encode_thread_should_quit = true;
1937 frame_queue_nonempty.notify_all();
1939 encode_thread.join();
1941 unique_lock<mutex> lock(storage_task_queue_mutex);
1942 storage_thread_should_quit = true;
1943 frame_queue_nonempty.notify_all();
1944 storage_task_queue_changed.notify_all();
1946 storage_thread.join();
1953 void H264EncoderImpl::encode_thread_func()
1955 int64_t last_dts = -1;
1956 int gop_start_display_frame_num = 0;
1957 for (int encoding_frame_num = 0; ; ++encoding_frame_num) {
1960 int frame_type, display_frame_num;
1961 encoding2display_order(encoding_frame_num, intra_period, intra_idr_period, ip_period,
1962 &display_frame_num, &frame_type, &pts_lag);
1963 if (frame_type == FRAME_IDR) {
1965 current_frame_num = 0;
1966 gop_start_display_frame_num = display_frame_num;
1970 unique_lock<mutex> lock(frame_queue_mutex);
1971 frame_queue_nonempty.wait(lock, [this, display_frame_num]{
1972 return encode_thread_should_quit || pending_video_frames.count(display_frame_num) != 0;
1974 if (encode_thread_should_quit && pending_video_frames.count(display_frame_num) == 0) {
1975 // We have queued frames that were supposed to be B-frames,
1976 // but will be no P-frame to encode them against. Encode them all
1977 // as P-frames instead. Note that this happens under the mutex,
1978 // but nobody else uses it at this point, since we're shutting down,
1979 // so there's no contention.
1980 encode_remaining_frames_as_p(encoding_frame_num, gop_start_display_frame_num, last_dts);
1983 frame = move(pending_video_frames[display_frame_num]);
1984 pending_video_frames.erase(display_frame_num);
1988 // Determine the dts of this frame.
1990 if (pts_lag == -1) {
1991 assert(last_dts != -1);
1992 dts = last_dts + (TIMEBASE / MAX_FPS);
1994 dts = frame.pts - pts_lag;
1998 encode_frame(frame, encoding_frame_num, display_frame_num, gop_start_display_frame_num, frame_type, frame.pts, dts);
2002 void H264EncoderImpl::encode_remaining_frames_as_p(int encoding_frame_num, int gop_start_display_frame_num, int64_t last_dts)
2004 if (pending_video_frames.empty()) {
2008 for (auto &pending_frame : pending_video_frames) {
2009 int display_frame_num = pending_frame.first;
2010 assert(display_frame_num > 0);
2011 PendingFrame frame = move(pending_frame.second);
2012 int64_t dts = last_dts + (TIMEBASE / MAX_FPS);
2013 printf("Finalizing encode: Encoding leftover frame %d as P-frame instead of B-frame.\n", display_frame_num);
2014 encode_frame(frame, encoding_frame_num++, display_frame_num, gop_start_display_frame_num, FRAME_P, frame.pts, dts);
2018 if (global_flags.uncompressed_video_to_http) {
2019 // Add frames left in reorderer.
2020 while (!reorderer->empty()) {
2021 pair<int64_t, const uint8_t *> output_frame = reorderer->get_first_frame();
2022 add_packet_for_uncompressed_frame(output_frame.first, output_frame.second);
2027 void H264EncoderImpl::add_packet_for_uncompressed_frame(int64_t pts, const uint8_t *data)
2030 memset(&pkt, 0, sizeof(pkt));
2032 pkt.data = const_cast<uint8_t *>(data);
2033 pkt.size = frame_width * frame_height * 2;
2034 pkt.stream_index = 0;
2035 pkt.flags = AV_PKT_FLAG_KEY;
2036 httpd->add_packet(pkt, pts, pts, HTTPD::DESTINATION_HTTP_ONLY);
2041 void memcpy_with_pitch(uint8_t *dst, const uint8_t *src, size_t src_width, size_t dst_pitch, size_t height)
2043 if (src_width == dst_pitch) {
2044 memcpy(dst, src, src_width * height);
2046 for (size_t y = 0; y < height; ++y) {
2047 const uint8_t *sptr = src + y * src_width;
2048 uint8_t *dptr = dst + y * dst_pitch;
2049 memcpy(dptr, sptr, src_width);
2056 void H264EncoderImpl::encode_frame(H264EncoderImpl::PendingFrame frame, int encoding_frame_num, int display_frame_num, int gop_start_display_frame_num,
2057 int frame_type, int64_t pts, int64_t dts)
2059 // Wait for the GPU to be done with the frame.
2062 sync_status = glClientWaitSync(frame.fence.get(), 0, 1000000000);
2064 } while (sync_status == GL_TIMEOUT_EXPIRED);
2065 assert(sync_status != GL_WAIT_FAILED);
2067 // Release back any input frames we needed to render this frame.
2068 frame.input_frames.clear();
2070 GLSurface *surf = &gl_surfaces[display_frame_num % SURFACE_NUM];
2074 eglDestroyImageKHR(eglGetCurrentDisplay(), surf->y_egl_image);
2075 eglDestroyImageKHR(eglGetCurrentDisplay(), surf->cbcr_egl_image);
2076 va_status = vaReleaseBufferHandle(va_dpy, surf->surface_image.buf);
2077 CHECK_VASTATUS(va_status, "vaReleaseBufferHandle");
2079 unsigned char *surface_p = nullptr;
2080 vaMapBuffer(va_dpy, surf->surface_image.buf, (void **)&surface_p);
2082 unsigned char *va_y_ptr = (unsigned char *)surface_p + surf->surface_image.offsets[0];
2083 memcpy_with_pitch(va_y_ptr, surf->y_ptr, frame_width, surf->surface_image.pitches[0], frame_height);
2085 unsigned char *va_cbcr_ptr = (unsigned char *)surface_p + surf->surface_image.offsets[1];
2086 memcpy_with_pitch(va_cbcr_ptr, surf->cbcr_ptr, (frame_width / 2) * sizeof(uint16_t), surf->surface_image.pitches[1], frame_height / 2);
2088 va_status = vaUnmapBuffer(va_dpy, surf->surface_image.buf);
2089 CHECK_VASTATUS(va_status, "vaUnmapBuffer");
2091 if (global_flags.uncompressed_video_to_http) {
2092 // Add uncompressed video. (Note that pts == dts here.)
2093 const int64_t global_delay = int64_t(ip_period - 1) * (TIMEBASE / MAX_FPS); // Needs to match audio.
2094 pair<int64_t, const uint8_t *> output_frame = reorderer->reorder_frame(pts + global_delay, reinterpret_cast<uint8_t *>(surf->y_ptr));
2095 if (output_frame.second != nullptr) {
2096 add_packet_for_uncompressed_frame(output_frame.first, output_frame.second);
2101 va_status = vaDestroyImage(va_dpy, surf->surface_image.image_id);
2102 CHECK_VASTATUS(va_status, "vaDestroyImage");
2104 // Schedule the frame for encoding.
2105 VASurfaceID va_surface = surf->src_surface;
2106 va_status = vaBeginPicture(va_dpy, context_id, va_surface);
2107 CHECK_VASTATUS(va_status, "vaBeginPicture");
2109 if (frame_type == FRAME_IDR) {
2111 render_picture(frame_type, display_frame_num, gop_start_display_frame_num);
2112 if (h264_packedheader) {
2113 render_packedsequence();
2114 render_packedpicture();
2117 //render_sequence();
2118 render_picture(frame_type, display_frame_num, gop_start_display_frame_num);
2120 render_slice(encoding_frame_num, display_frame_num, gop_start_display_frame_num, frame_type);
2122 va_status = vaEndPicture(va_dpy, context_id);
2123 CHECK_VASTATUS(va_status, "vaEndPicture");
2125 // so now the data is done encoding (well, async job kicked off)...
2126 // we send that to the storage thread
2128 tmp.display_order = display_frame_num;
2129 tmp.frame_type = frame_type;
2132 storage_task_enqueue(move(tmp));
2134 update_ReferenceFrames(frame_type);
2138 H264Encoder::H264Encoder(QSurface *surface, const string &va_display, int width, int height, HTTPD *httpd)
2139 : impl(new H264EncoderImpl(surface, va_display, width, height, httpd)) {}
2141 // Must be defined here because unique_ptr<> destructor needs to know the impl.
2142 H264Encoder::~H264Encoder() {}
2144 void H264Encoder::add_audio(int64_t pts, vector<float> audio)
2146 impl->add_audio(pts, audio);
2149 bool H264Encoder::begin_frame(GLuint *y_tex, GLuint *cbcr_tex)
2151 return impl->begin_frame(y_tex, cbcr_tex);
2154 RefCountedGLsync H264Encoder::end_frame(int64_t pts, const vector<RefCountedFrame> &input_frames)
2156 return impl->end_frame(pts, input_frames);
2159 void H264Encoder::shutdown()