2 #include "quicksync_encoder.h"
4 #include <movit/util.h>
5 #include <EGL/eglplatform.h>
11 #include <libavcodec/avcodec.h>
12 #include <libavformat/avformat.h>
13 #include <libavresample/avresample.h>
14 #include <libavutil/channel_layout.h>
15 #include <libavutil/frame.h>
16 #include <libavutil/rational.h>
17 #include <libavutil/samplefmt.h>
18 #include <libavutil/opt.h>
20 #include <libdrm/drm_fourcc.h>
26 #include <va/va_drm.h>
27 #include <va/va_drmcommon.h>
28 #include <va/va_enc_h264.h>
29 #include <va/va_x11.h>
31 #include <condition_variable>
47 #include "x264_encoder.h"
54 #define CHECK_VASTATUS(va_status, func) \
55 if (va_status != VA_STATUS_SUCCESS) { \
56 fprintf(stderr, "%s:%d (%s) failed with %d\n", __func__, __LINE__, func, va_status); \
60 #define BUFFER_OFFSET(i) ((char *)NULL + (i))
62 //#include "loadsurface.h"
64 #define NAL_REF_IDC_NONE 0
65 #define NAL_REF_IDC_LOW 1
66 #define NAL_REF_IDC_MEDIUM 2
67 #define NAL_REF_IDC_HIGH 3
75 #define SLICE_TYPE_P 0
76 #define SLICE_TYPE_B 1
77 #define SLICE_TYPE_I 2
78 #define IS_P_SLICE(type) (SLICE_TYPE_P == (type))
79 #define IS_B_SLICE(type) (SLICE_TYPE_B == (type))
80 #define IS_I_SLICE(type) (SLICE_TYPE_I == (type))
83 #define ENTROPY_MODE_CAVLC 0
84 #define ENTROPY_MODE_CABAC 1
86 #define PROFILE_IDC_BASELINE 66
87 #define PROFILE_IDC_MAIN 77
88 #define PROFILE_IDC_HIGH 100
90 #define BITSTREAM_ALLOCATE_STEPPING 4096
91 #define SURFACE_NUM 16 /* 16 surfaces for source YUV */
92 #define MAX_NUM_REF1 16 // Seemingly a hardware-fixed value, not related to SURFACE_NUM
93 #define MAX_NUM_REF2 32 // Seemingly a hardware-fixed value, not related to SURFACE_NUM
95 static constexpr unsigned int MaxFrameNum = (2<<16);
96 static constexpr unsigned int MaxPicOrderCntLsb = (2<<8);
97 static constexpr unsigned int Log2MaxFrameNum = 16;
98 static constexpr unsigned int Log2MaxPicOrderCntLsb = 8;
99 static constexpr int rc_default_modes[] = { // Priority list of modes.
102 VA_RC_VBR_CONSTRAINED,
108 /* thread to save coded data */
109 #define SRC_SURFACE_FREE 0
110 #define SRC_SURFACE_IN_ENCODING 1
113 unsigned int *buffer;
115 int max_size_in_dword;
117 typedef struct __bitstream bitstream;
121 // H.264 video comes out in encoding order (e.g. with two B-frames:
122 // 0, 3, 1, 2, 6, 4, 5, etc.), but uncompressed video needs to
123 // come in the right order. Since we do everything, including waiting
124 // for the frames to come out of OpenGL, in encoding order, we need
125 // a reordering buffer for uncompressed frames so that they come out
126 // correctly. We go the super-lazy way of not making it understand
127 // anything about the true order (which introduces some extra latency,
128 // though); we know that for N B-frames we need at most (N-1) frames
129 // in the reorder buffer, and can just sort on that.
131 // The class also deals with keeping a freelist as needed.
132 class FrameReorderer {
134 FrameReorderer(unsigned queue_length, int width, int height);
137 int64_t pts, duration;
140 // Invert to get the smallest pts first.
141 bool operator< (const Frame &other) const { return pts > other.pts; }
144 // Returns the next frame to insert with its pts, if any. Otherwise -1 and nullptr.
145 // Does _not_ take ownership of data; a copy is taken if needed.
146 // The returned pointer is valid until the next call to reorder_frame, or destruction.
147 // As a special case, if queue_length == 0, will just return pts and data (no reordering needed).
148 Frame reorder_frame(int64_t pts, int64_t duration, uint8_t *data);
150 // The same as reorder_frame, but without inserting anything. Used to empty the queue.
151 Frame get_first_frame();
153 bool empty() const { return frames.empty(); }
156 unsigned queue_length;
159 priority_queue<Frame> frames;
160 stack<uint8_t *> freelist; // Includes the last value returned from reorder_frame.
162 // Owns all the pointers. Normally, freelist and frames could do this themselves,
163 // except priority_queue doesn't work well with movable-only types.
164 vector<unique_ptr<uint8_t[]>> owner;
167 FrameReorderer::FrameReorderer(unsigned queue_length, int width, int height)
168 : queue_length(queue_length), width(width), height(height)
170 for (unsigned i = 0; i < queue_length; ++i) {
171 owner.emplace_back(new uint8_t[width * height * 2]);
172 freelist.push(owner.back().get());
176 FrameReorderer::Frame FrameReorderer::reorder_frame(int64_t pts, int64_t duration, uint8_t *data)
178 if (queue_length == 0) {
179 return Frame{pts, duration, data};
182 assert(!freelist.empty());
183 uint8_t *storage = freelist.top();
185 memcpy(storage, data, width * height * 2);
186 frames.push(Frame{pts, duration, storage});
188 if (frames.size() >= queue_length) {
189 return get_first_frame();
191 return Frame{-1, -1, nullptr};
195 FrameReorderer::Frame FrameReorderer::get_first_frame()
197 assert(!frames.empty());
198 Frame storage = frames.top();
200 freelist.push(storage.data);
204 class QuickSyncEncoderImpl : public KeyFrameSignalReceiver {
206 QuickSyncEncoderImpl(QSurface *surface, const string &va_display, int width, int height, HTTPD *httpd);
207 ~QuickSyncEncoderImpl();
208 void add_audio(int64_t pts, vector<float> audio);
209 bool begin_frame(GLuint *y_tex, GLuint *cbcr_tex);
210 RefCountedGLsync end_frame(int64_t pts, int64_t duration, const vector<RefCountedFrame> &input_frames);
212 void open_output_file(const std::string &filename);
213 void close_output_file();
215 virtual void signal_keyframe() override {
216 stream_mux_writing_keyframes = true;
220 struct storage_task {
221 unsigned long long display_order;
224 int64_t pts, dts, duration;
226 struct PendingFrame {
227 RefCountedGLsync fence;
228 vector<RefCountedFrame> input_frames;
229 int64_t pts, duration;
232 // So we never get negative dts.
233 int64_t global_delay() const {
234 return int64_t(ip_period - 1) * (TIMEBASE / MAX_FPS);
237 void encode_thread_func();
238 void encode_remaining_frames_as_p(int encoding_frame_num, int gop_start_display_frame_num, int64_t last_dts);
239 void add_packet_for_uncompressed_frame(int64_t pts, int64_t duration, const uint8_t *data);
240 void encode_frame(PendingFrame frame, int encoding_frame_num, int display_frame_num, int gop_start_display_frame_num,
241 int frame_type, int64_t pts, int64_t dts, int64_t duration);
242 void storage_task_thread();
243 void encode_audio(const vector<float> &audio,
244 vector<float> *audio_queue,
247 AVAudioResampleContext *resampler,
248 const vector<Mux *> &muxes);
249 void encode_audio_one_frame(const float *audio,
250 size_t num_samples, // In each channel.
253 AVAudioResampleContext *resampler,
254 const vector<Mux *> &muxes);
255 void encode_last_audio(vector<float> *audio_queue,
258 AVAudioResampleContext *resampler,
259 const vector<Mux *> &muxes);
260 void encode_remaining_audio();
261 void storage_task_enqueue(storage_task task);
262 void save_codeddata(storage_task task);
263 int render_packedsequence();
264 int render_packedpicture();
265 void render_packedslice();
266 int render_sequence();
267 int render_picture(int frame_type, int display_frame_num, int gop_start_display_frame_num);
268 void sps_rbsp(bitstream *bs);
269 void pps_rbsp(bitstream *bs);
270 int build_packed_pic_buffer(unsigned char **header_buffer);
271 int render_slice(int encoding_frame_num, int display_frame_num, int gop_start_display_frame_num, int frame_type);
272 void slice_header(bitstream *bs);
273 int build_packed_seq_buffer(unsigned char **header_buffer);
274 int build_packed_slice_buffer(unsigned char **header_buffer);
275 int init_va(const string &va_display);
277 void enable_zerocopy_if_possible();
278 VADisplay va_open_display(const string &va_display);
279 void va_close_display(VADisplay va_dpy);
281 int release_encode();
282 void update_ReferenceFrames(int frame_type);
283 int update_RefPicList(int frame_type);
284 void open_output_stream();
285 void close_output_stream();
286 static int write_packet_thunk(void *opaque, uint8_t *buf, int buf_size);
287 int write_packet(uint8_t *buf, int buf_size);
289 bool is_shutdown = false;
293 thread encode_thread, storage_thread;
295 mutex storage_task_queue_mutex;
296 condition_variable storage_task_queue_changed;
297 int srcsurface_status[SURFACE_NUM]; // protected by storage_task_queue_mutex
298 queue<storage_task> storage_task_queue; // protected by storage_task_queue_mutex
299 bool storage_thread_should_quit = false; // protected by storage_task_queue_mutex
301 mutex frame_queue_mutex;
302 condition_variable frame_queue_nonempty;
303 bool encode_thread_should_quit = false; // under frame_queue_mutex
305 int current_storage_frame;
307 map<int, PendingFrame> pending_video_frames; // under frame_queue_mutex
308 map<int64_t, vector<float>> pending_audio_frames; // under frame_queue_mutex
309 int64_t last_audio_pts = 0; // The first pts after all audio we've encoded.
312 AVCodecContext *context_audio_file;
313 AVCodecContext *context_audio_stream = nullptr; // nullptr = don't code separate audio for stream.
315 AVAudioResampleContext *resampler_audio_file = nullptr;
316 AVAudioResampleContext *resampler_audio_stream = nullptr;
318 vector<float> audio_queue_file;
319 vector<float> audio_queue_stream;
321 unique_ptr<Mux> stream_mux; // To HTTP.
322 unique_ptr<Mux> file_mux; // To local disk.
324 // While Mux object is constructing, <stream_mux_writing_header> is true,
325 // and the header is being collected into stream_mux_header.
326 bool stream_mux_writing_header;
327 string stream_mux_header;
329 bool stream_mux_writing_keyframes = false;
331 AVFrame *audio_frame = nullptr;
333 unique_ptr<FrameReorderer> reorderer;
334 unique_ptr<X264Encoder> x264_encoder; // nullptr if not using x264.
336 Display *x11_display = nullptr;
338 // Encoder parameters
340 VAProfile h264_profile = (VAProfile)~0;
341 VAConfigAttrib config_attrib[VAConfigAttribTypeMax];
342 int config_attrib_num = 0, enc_packed_header_idx;
345 VASurfaceID src_surface, ref_surface;
346 VABufferID coded_buf;
348 VAImage surface_image;
349 GLuint y_tex, cbcr_tex;
351 // Only if use_zerocopy == true.
352 EGLImage y_egl_image, cbcr_egl_image;
354 // Only if use_zerocopy == false.
356 uint8_t *y_ptr, *cbcr_ptr;
357 size_t y_offset, cbcr_offset;
359 GLSurface gl_surfaces[SURFACE_NUM];
361 VAConfigID config_id;
362 VAContextID context_id;
363 VAEncSequenceParameterBufferH264 seq_param;
364 VAEncPictureParameterBufferH264 pic_param;
365 VAEncSliceParameterBufferH264 slice_param;
366 VAPictureH264 CurrentCurrPic;
367 VAPictureH264 ReferenceFrames[MAX_NUM_REF1], RefPicList0_P[MAX_NUM_REF2], RefPicList0_B[MAX_NUM_REF2], RefPicList1_B[MAX_NUM_REF2];
369 // Static quality settings.
370 static constexpr unsigned int frame_bitrate = 15000000 / 60; // Doesn't really matter; only initial_qp does.
371 static constexpr unsigned int num_ref_frames = 2;
372 static constexpr int initial_qp = 15;
373 static constexpr int minimal_qp = 0;
374 static constexpr int intra_period = 30;
375 static constexpr int intra_idr_period = MAX_FPS; // About a second; more at lower frame rates. Not ideal.
377 // Quality settings that are meant to be static, but might be overridden
379 int constraint_set_flag = 0;
380 int h264_packedheader = 0; /* support pack header? */
381 int h264_maxref = (1<<16|1);
382 int h264_entropy_mode = 1; /* cabac */
386 unsigned int current_frame_num = 0;
387 unsigned int numShortTerm = 0;
391 int frame_width_mbaligned;
392 int frame_height_mbaligned;
395 // Supposedly vaRenderPicture() is supposed to destroy the buffer implicitly,
396 // but if we don't delete it here, we get leaks. The GStreamer implementation
398 static void render_picture_and_delete(VADisplay dpy, VAContextID context, VABufferID *buffers, int num_buffers)
400 VAStatus va_status = vaRenderPicture(dpy, context, buffers, num_buffers);
401 CHECK_VASTATUS(va_status, "vaRenderPicture");
403 for (int i = 0; i < num_buffers; ++i) {
404 va_status = vaDestroyBuffer(dpy, buffers[i]);
405 CHECK_VASTATUS(va_status, "vaDestroyBuffer");
410 va_swap32(unsigned int val)
412 unsigned char *pval = (unsigned char *)&val;
414 return ((pval[0] << 24) |
421 bitstream_start(bitstream *bs)
423 bs->max_size_in_dword = BITSTREAM_ALLOCATE_STEPPING;
424 bs->buffer = (unsigned int *)calloc(bs->max_size_in_dword * sizeof(int), 1);
429 bitstream_end(bitstream *bs)
431 int pos = (bs->bit_offset >> 5);
432 int bit_offset = (bs->bit_offset & 0x1f);
433 int bit_left = 32 - bit_offset;
436 bs->buffer[pos] = va_swap32((bs->buffer[pos] << bit_left));
441 bitstream_put_ui(bitstream *bs, unsigned int val, int size_in_bits)
443 int pos = (bs->bit_offset >> 5);
444 int bit_offset = (bs->bit_offset & 0x1f);
445 int bit_left = 32 - bit_offset;
450 bs->bit_offset += size_in_bits;
452 if (bit_left > size_in_bits) {
453 bs->buffer[pos] = (bs->buffer[pos] << size_in_bits | val);
455 size_in_bits -= bit_left;
456 if (bit_left >= 32) {
457 bs->buffer[pos] = (val >> size_in_bits);
459 bs->buffer[pos] = (bs->buffer[pos] << bit_left) | (val >> size_in_bits);
461 bs->buffer[pos] = va_swap32(bs->buffer[pos]);
463 if (pos + 1 == bs->max_size_in_dword) {
464 bs->max_size_in_dword += BITSTREAM_ALLOCATE_STEPPING;
465 bs->buffer = (unsigned int *)realloc(bs->buffer, bs->max_size_in_dword * sizeof(unsigned int));
468 bs->buffer[pos + 1] = val;
473 bitstream_put_ue(bitstream *bs, unsigned int val)
475 int size_in_bits = 0;
483 bitstream_put_ui(bs, 0, size_in_bits - 1); // leading zero
484 bitstream_put_ui(bs, val, size_in_bits);
488 bitstream_put_se(bitstream *bs, int val)
490 unsigned int new_val;
495 new_val = 2 * val - 1;
497 bitstream_put_ue(bs, new_val);
501 bitstream_byte_aligning(bitstream *bs, int bit)
503 int bit_offset = (bs->bit_offset & 0x7);
504 int bit_left = 8 - bit_offset;
510 assert(bit == 0 || bit == 1);
513 new_val = (1 << bit_left) - 1;
517 bitstream_put_ui(bs, new_val, bit_left);
521 rbsp_trailing_bits(bitstream *bs)
523 bitstream_put_ui(bs, 1, 1);
524 bitstream_byte_aligning(bs, 0);
527 static void nal_start_code_prefix(bitstream *bs)
529 bitstream_put_ui(bs, 0x00000001, 32);
532 static void nal_header(bitstream *bs, int nal_ref_idc, int nal_unit_type)
534 bitstream_put_ui(bs, 0, 1); /* forbidden_zero_bit: 0 */
535 bitstream_put_ui(bs, nal_ref_idc, 2);
536 bitstream_put_ui(bs, nal_unit_type, 5);
539 void QuickSyncEncoderImpl::sps_rbsp(bitstream *bs)
541 int profile_idc = PROFILE_IDC_BASELINE;
543 if (h264_profile == VAProfileH264High)
544 profile_idc = PROFILE_IDC_HIGH;
545 else if (h264_profile == VAProfileH264Main)
546 profile_idc = PROFILE_IDC_MAIN;
548 bitstream_put_ui(bs, profile_idc, 8); /* profile_idc */
549 bitstream_put_ui(bs, !!(constraint_set_flag & 1), 1); /* constraint_set0_flag */
550 bitstream_put_ui(bs, !!(constraint_set_flag & 2), 1); /* constraint_set1_flag */
551 bitstream_put_ui(bs, !!(constraint_set_flag & 4), 1); /* constraint_set2_flag */
552 bitstream_put_ui(bs, !!(constraint_set_flag & 8), 1); /* constraint_set3_flag */
553 bitstream_put_ui(bs, 0, 4); /* reserved_zero_4bits */
554 bitstream_put_ui(bs, seq_param.level_idc, 8); /* level_idc */
555 bitstream_put_ue(bs, seq_param.seq_parameter_set_id); /* seq_parameter_set_id */
557 if ( profile_idc == PROFILE_IDC_HIGH) {
558 bitstream_put_ue(bs, 1); /* chroma_format_idc = 1, 4:2:0 */
559 bitstream_put_ue(bs, 0); /* bit_depth_luma_minus8 */
560 bitstream_put_ue(bs, 0); /* bit_depth_chroma_minus8 */
561 bitstream_put_ui(bs, 0, 1); /* qpprime_y_zero_transform_bypass_flag */
562 bitstream_put_ui(bs, 0, 1); /* seq_scaling_matrix_present_flag */
565 bitstream_put_ue(bs, seq_param.seq_fields.bits.log2_max_frame_num_minus4); /* log2_max_frame_num_minus4 */
566 bitstream_put_ue(bs, seq_param.seq_fields.bits.pic_order_cnt_type); /* pic_order_cnt_type */
568 if (seq_param.seq_fields.bits.pic_order_cnt_type == 0)
569 bitstream_put_ue(bs, seq_param.seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4); /* log2_max_pic_order_cnt_lsb_minus4 */
574 bitstream_put_ue(bs, seq_param.max_num_ref_frames); /* num_ref_frames */
575 bitstream_put_ui(bs, 0, 1); /* gaps_in_frame_num_value_allowed_flag */
577 bitstream_put_ue(bs, seq_param.picture_width_in_mbs - 1); /* pic_width_in_mbs_minus1 */
578 bitstream_put_ue(bs, seq_param.picture_height_in_mbs - 1); /* pic_height_in_map_units_minus1 */
579 bitstream_put_ui(bs, seq_param.seq_fields.bits.frame_mbs_only_flag, 1); /* frame_mbs_only_flag */
581 if (!seq_param.seq_fields.bits.frame_mbs_only_flag) {
585 bitstream_put_ui(bs, seq_param.seq_fields.bits.direct_8x8_inference_flag, 1); /* direct_8x8_inference_flag */
586 bitstream_put_ui(bs, seq_param.frame_cropping_flag, 1); /* frame_cropping_flag */
588 if (seq_param.frame_cropping_flag) {
589 bitstream_put_ue(bs, seq_param.frame_crop_left_offset); /* frame_crop_left_offset */
590 bitstream_put_ue(bs, seq_param.frame_crop_right_offset); /* frame_crop_right_offset */
591 bitstream_put_ue(bs, seq_param.frame_crop_top_offset); /* frame_crop_top_offset */
592 bitstream_put_ue(bs, seq_param.frame_crop_bottom_offset); /* frame_crop_bottom_offset */
595 //if ( frame_bit_rate < 0 ) { //TODO EW: the vui header isn't correct
597 bitstream_put_ui(bs, 0, 1); /* vui_parameters_present_flag */
599 bitstream_put_ui(bs, 1, 1); /* vui_parameters_present_flag */
600 bitstream_put_ui(bs, 0, 1); /* aspect_ratio_info_present_flag */
601 bitstream_put_ui(bs, 0, 1); /* overscan_info_present_flag */
602 bitstream_put_ui(bs, 1, 1); /* video_signal_type_present_flag */
604 bitstream_put_ui(bs, 5, 3); /* video_format (5 = Unspecified) */
605 bitstream_put_ui(bs, 0, 1); /* video_full_range_flag */
606 bitstream_put_ui(bs, 1, 1); /* colour_description_present_flag */
608 bitstream_put_ui(bs, 1, 8); /* colour_primaries (1 = BT.709) */
609 bitstream_put_ui(bs, 2, 8); /* transfer_characteristics (2 = unspecified, since we use sRGB) */
610 bitstream_put_ui(bs, 6, 8); /* matrix_coefficients (6 = BT.601/SMPTE 170M) */
613 bitstream_put_ui(bs, 0, 1); /* chroma_loc_info_present_flag */
614 bitstream_put_ui(bs, 1, 1); /* timing_info_present_flag */
616 bitstream_put_ui(bs, 1, 32); // FPS
617 bitstream_put_ui(bs, TIMEBASE * 2, 32); // FPS
618 bitstream_put_ui(bs, 1, 1);
620 bitstream_put_ui(bs, 1, 1); /* nal_hrd_parameters_present_flag */
623 bitstream_put_ue(bs, 0); /* cpb_cnt_minus1 */
624 bitstream_put_ui(bs, 4, 4); /* bit_rate_scale */
625 bitstream_put_ui(bs, 6, 4); /* cpb_size_scale */
627 bitstream_put_ue(bs, frame_bitrate - 1); /* bit_rate_value_minus1[0] */
628 bitstream_put_ue(bs, frame_bitrate*8 - 1); /* cpb_size_value_minus1[0] */
629 bitstream_put_ui(bs, 1, 1); /* cbr_flag[0] */
631 bitstream_put_ui(bs, 23, 5); /* initial_cpb_removal_delay_length_minus1 */
632 bitstream_put_ui(bs, 23, 5); /* cpb_removal_delay_length_minus1 */
633 bitstream_put_ui(bs, 23, 5); /* dpb_output_delay_length_minus1 */
634 bitstream_put_ui(bs, 23, 5); /* time_offset_length */
636 bitstream_put_ui(bs, 0, 1); /* vcl_hrd_parameters_present_flag */
637 bitstream_put_ui(bs, 0, 1); /* low_delay_hrd_flag */
639 bitstream_put_ui(bs, 0, 1); /* pic_struct_present_flag */
640 bitstream_put_ui(bs, 0, 1); /* bitstream_restriction_flag */
643 rbsp_trailing_bits(bs); /* rbsp_trailing_bits */
647 void QuickSyncEncoderImpl::pps_rbsp(bitstream *bs)
649 bitstream_put_ue(bs, pic_param.pic_parameter_set_id); /* pic_parameter_set_id */
650 bitstream_put_ue(bs, pic_param.seq_parameter_set_id); /* seq_parameter_set_id */
652 bitstream_put_ui(bs, pic_param.pic_fields.bits.entropy_coding_mode_flag, 1); /* entropy_coding_mode_flag */
654 bitstream_put_ui(bs, 0, 1); /* pic_order_present_flag: 0 */
656 bitstream_put_ue(bs, 0); /* num_slice_groups_minus1 */
658 bitstream_put_ue(bs, pic_param.num_ref_idx_l0_active_minus1); /* num_ref_idx_l0_active_minus1 */
659 bitstream_put_ue(bs, pic_param.num_ref_idx_l1_active_minus1); /* num_ref_idx_l1_active_minus1 1 */
661 bitstream_put_ui(bs, pic_param.pic_fields.bits.weighted_pred_flag, 1); /* weighted_pred_flag: 0 */
662 bitstream_put_ui(bs, pic_param.pic_fields.bits.weighted_bipred_idc, 2); /* weighted_bipred_idc: 0 */
664 bitstream_put_se(bs, pic_param.pic_init_qp - 26); /* pic_init_qp_minus26 */
665 bitstream_put_se(bs, 0); /* pic_init_qs_minus26 */
666 bitstream_put_se(bs, 0); /* chroma_qp_index_offset */
668 bitstream_put_ui(bs, pic_param.pic_fields.bits.deblocking_filter_control_present_flag, 1); /* deblocking_filter_control_present_flag */
669 bitstream_put_ui(bs, 0, 1); /* constrained_intra_pred_flag */
670 bitstream_put_ui(bs, 0, 1); /* redundant_pic_cnt_present_flag */
673 bitstream_put_ui(bs, pic_param.pic_fields.bits.transform_8x8_mode_flag, 1); /*transform_8x8_mode_flag */
674 bitstream_put_ui(bs, 0, 1); /* pic_scaling_matrix_present_flag */
675 bitstream_put_se(bs, pic_param.second_chroma_qp_index_offset ); /*second_chroma_qp_index_offset */
677 rbsp_trailing_bits(bs);
680 void QuickSyncEncoderImpl::slice_header(bitstream *bs)
682 int first_mb_in_slice = slice_param.macroblock_address;
684 bitstream_put_ue(bs, first_mb_in_slice); /* first_mb_in_slice: 0 */
685 bitstream_put_ue(bs, slice_param.slice_type); /* slice_type */
686 bitstream_put_ue(bs, slice_param.pic_parameter_set_id); /* pic_parameter_set_id: 0 */
687 bitstream_put_ui(bs, pic_param.frame_num, seq_param.seq_fields.bits.log2_max_frame_num_minus4 + 4); /* frame_num */
689 /* frame_mbs_only_flag == 1 */
690 if (!seq_param.seq_fields.bits.frame_mbs_only_flag) {
695 if (pic_param.pic_fields.bits.idr_pic_flag)
696 bitstream_put_ue(bs, slice_param.idr_pic_id); /* idr_pic_id: 0 */
698 if (seq_param.seq_fields.bits.pic_order_cnt_type == 0) {
699 bitstream_put_ui(bs, pic_param.CurrPic.TopFieldOrderCnt, seq_param.seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4 + 4);
700 /* pic_order_present_flag == 0 */
706 /* redundant_pic_cnt_present_flag == 0 */
708 if (IS_P_SLICE(slice_param.slice_type)) {
709 bitstream_put_ui(bs, slice_param.num_ref_idx_active_override_flag, 1); /* num_ref_idx_active_override_flag: */
711 if (slice_param.num_ref_idx_active_override_flag)
712 bitstream_put_ue(bs, slice_param.num_ref_idx_l0_active_minus1);
714 /* ref_pic_list_reordering */
715 bitstream_put_ui(bs, 0, 1); /* ref_pic_list_reordering_flag_l0: 0 */
716 } else if (IS_B_SLICE(slice_param.slice_type)) {
717 bitstream_put_ui(bs, slice_param.direct_spatial_mv_pred_flag, 1); /* direct_spatial_mv_pred: 1 */
719 bitstream_put_ui(bs, slice_param.num_ref_idx_active_override_flag, 1); /* num_ref_idx_active_override_flag: */
721 if (slice_param.num_ref_idx_active_override_flag) {
722 bitstream_put_ue(bs, slice_param.num_ref_idx_l0_active_minus1);
723 bitstream_put_ue(bs, slice_param.num_ref_idx_l1_active_minus1);
726 /* ref_pic_list_reordering */
727 bitstream_put_ui(bs, 0, 1); /* ref_pic_list_reordering_flag_l0: 0 */
728 bitstream_put_ui(bs, 0, 1); /* ref_pic_list_reordering_flag_l1: 0 */
731 if ((pic_param.pic_fields.bits.weighted_pred_flag &&
732 IS_P_SLICE(slice_param.slice_type)) ||
733 ((pic_param.pic_fields.bits.weighted_bipred_idc == 1) &&
734 IS_B_SLICE(slice_param.slice_type))) {
735 /* FIXME: fill weight/offset table */
739 /* dec_ref_pic_marking */
740 if (pic_param.pic_fields.bits.reference_pic_flag) { /* nal_ref_idc != 0 */
741 unsigned char no_output_of_prior_pics_flag = 0;
742 unsigned char long_term_reference_flag = 0;
743 unsigned char adaptive_ref_pic_marking_mode_flag = 0;
745 if (pic_param.pic_fields.bits.idr_pic_flag) {
746 bitstream_put_ui(bs, no_output_of_prior_pics_flag, 1); /* no_output_of_prior_pics_flag: 0 */
747 bitstream_put_ui(bs, long_term_reference_flag, 1); /* long_term_reference_flag: 0 */
749 bitstream_put_ui(bs, adaptive_ref_pic_marking_mode_flag, 1); /* adaptive_ref_pic_marking_mode_flag: 0 */
753 if (pic_param.pic_fields.bits.entropy_coding_mode_flag &&
754 !IS_I_SLICE(slice_param.slice_type))
755 bitstream_put_ue(bs, slice_param.cabac_init_idc); /* cabac_init_idc: 0 */
757 bitstream_put_se(bs, slice_param.slice_qp_delta); /* slice_qp_delta: 0 */
759 /* ignore for SP/SI */
761 if (pic_param.pic_fields.bits.deblocking_filter_control_present_flag) {
762 bitstream_put_ue(bs, slice_param.disable_deblocking_filter_idc); /* disable_deblocking_filter_idc: 0 */
764 if (slice_param.disable_deblocking_filter_idc != 1) {
765 bitstream_put_se(bs, slice_param.slice_alpha_c0_offset_div2); /* slice_alpha_c0_offset_div2: 2 */
766 bitstream_put_se(bs, slice_param.slice_beta_offset_div2); /* slice_beta_offset_div2: 2 */
770 if (pic_param.pic_fields.bits.entropy_coding_mode_flag) {
771 bitstream_byte_aligning(bs, 1);
775 int QuickSyncEncoderImpl::build_packed_pic_buffer(unsigned char **header_buffer)
779 bitstream_start(&bs);
780 nal_start_code_prefix(&bs);
781 nal_header(&bs, NAL_REF_IDC_HIGH, NAL_PPS);
785 *header_buffer = (unsigned char *)bs.buffer;
786 return bs.bit_offset;
790 QuickSyncEncoderImpl::build_packed_seq_buffer(unsigned char **header_buffer)
794 bitstream_start(&bs);
795 nal_start_code_prefix(&bs);
796 nal_header(&bs, NAL_REF_IDC_HIGH, NAL_SPS);
800 *header_buffer = (unsigned char *)bs.buffer;
801 return bs.bit_offset;
804 int QuickSyncEncoderImpl::build_packed_slice_buffer(unsigned char **header_buffer)
807 int is_idr = !!pic_param.pic_fields.bits.idr_pic_flag;
808 int is_ref = !!pic_param.pic_fields.bits.reference_pic_flag;
810 bitstream_start(&bs);
811 nal_start_code_prefix(&bs);
813 if (IS_I_SLICE(slice_param.slice_type)) {
814 nal_header(&bs, NAL_REF_IDC_HIGH, is_idr ? NAL_IDR : NAL_NON_IDR);
815 } else if (IS_P_SLICE(slice_param.slice_type)) {
816 nal_header(&bs, NAL_REF_IDC_MEDIUM, NAL_NON_IDR);
818 assert(IS_B_SLICE(slice_param.slice_type));
819 nal_header(&bs, is_ref ? NAL_REF_IDC_LOW : NAL_REF_IDC_NONE, NAL_NON_IDR);
825 *header_buffer = (unsigned char *)bs.buffer;
826 return bs.bit_offset;
831 Assume frame sequence is: Frame#0, #1, #2, ..., #M, ..., #X, ... (encoding order)
832 1) period between Frame #X and Frame #N = #X - #N
833 2) 0 means infinite for intra_period/intra_idr_period, and 0 is invalid for ip_period
834 3) intra_idr_period % intra_period (intra_period > 0) and intra_period % ip_period must be 0
835 4) intra_period and intra_idr_period take precedence over ip_period
836 5) if ip_period > 1, intra_period and intra_idr_period are not the strict periods
837 of I/IDR frames, see bellow examples
838 -------------------------------------------------------------------
839 intra_period intra_idr_period ip_period frame sequence (intra_period/intra_idr_period/ip_period)
840 0 ignored 1 IDRPPPPPPP ... (No IDR/I any more)
841 0 ignored >=2 IDR(PBB)(PBB)... (No IDR/I any more)
842 1 0 ignored IDRIIIIIII... (No IDR any more)
843 1 1 ignored IDR IDR IDR IDR...
844 1 >=2 ignored IDRII IDRII IDR... (1/3/ignore)
845 >=2 0 1 IDRPPP IPPP I... (3/0/1)
846 >=2 0 >=2 IDR(PBB)(PBB)(IBB) (6/0/3)
847 (PBB)(IBB)(PBB)(IBB)...
848 >=2 >=2 1 IDRPPPPP IPPPPP IPPPPP (6/18/1)
849 IDRPPPPP IPPPPP IPPPPP...
850 >=2 >=2 >=2 {IDR(PBB)(PBB)(IBB)(PBB)(IBB)(PBB)} (6/18/3)
851 {IDR(PBB)(PBB)(IBB)(PBB)(IBB)(PBB)}...
852 {IDR(PBB)(PBB)(IBB)(PBB)} (6/12/3)
853 {IDR(PBB)(PBB)(IBB)(PBB)}...
854 {IDR(PBB)(PBB)} (6/6/3)
858 // General pts/dts strategy:
860 // Getting pts and dts right with variable frame rate (VFR) and B-frames can be a
861 // bit tricky. We assume first of all that the frame rate never goes _above_
862 // MAX_FPS, which gives us a frame period N. The decoder can always decode
863 // in at least this speed, as long at dts <= pts (the frame is not attempted
864 // presented before it is decoded). Furthermore, we never have longer chains of
865 // B-frames than a fixed constant C. (In a B-frame chain, we say that the base
866 // I/P-frame has order O=0, the B-frame depending on it directly has order O=1,
867 // etc. The last frame in the chain, which no B-frames depend on, is the “tip”
868 // frame, with an order O <= C.)
870 // Many strategies are possible, but we establish these rules:
872 // - Tip frames have dts = pts - (C-O)*N.
873 // - Non-tip frames have dts = dts_last + N.
875 // An example, with C=2 and N=10 and the data flow showed with arrows:
878 // pts: 30 40 50 60 70 80
880 // dts: 10 30 20 60 50←40
885 // To show that this works fine also with irregular spacings, let's say that
886 // the third frame is delayed a bit (something earlier was dropped). Now the
887 // situation looks like this:
890 // pts: 30 40 80 90 100 110
892 // dts: 10 30 20 90 50←40
897 // The resetting on every tip frame makes sure dts never ends up lagging a lot
898 // behind pts, and the subtraction of (C-O)*N makes sure pts <= dts.
900 // In the output of this function, if <dts_lag> is >= 0, it means to reset the
901 // dts from the current pts minus <dts_lag>, while if it's -1, the frame is not
902 // a tip frame and should be given a dts based on the previous one.
907 void encoding2display_order(
908 int encoding_order, int intra_period,
909 int intra_idr_period, int ip_period,
910 int *displaying_order,
911 int *frame_type, int *pts_lag)
913 int encoding_order_gop = 0;
917 if (intra_period == 1) { /* all are I/IDR frames */
918 *displaying_order = encoding_order;
919 if (intra_idr_period == 0)
920 *frame_type = (encoding_order == 0)?FRAME_IDR:FRAME_I;
922 *frame_type = (encoding_order % intra_idr_period == 0)?FRAME_IDR:FRAME_I;
926 if (intra_period == 0)
927 intra_idr_period = 0;
929 if (ip_period == 1) {
930 // No B-frames, sequence is like IDR PPPPP IPPPPP.
931 encoding_order_gop = (intra_idr_period == 0) ? encoding_order : (encoding_order % intra_idr_period);
932 *displaying_order = encoding_order;
934 if (encoding_order_gop == 0) { /* the first frame */
935 *frame_type = FRAME_IDR;
936 } else if (intra_period != 0 && /* have I frames */
937 encoding_order_gop >= 2 &&
938 (encoding_order_gop % intra_period == 0)) {
939 *frame_type = FRAME_I;
941 *frame_type = FRAME_P;
946 // We have B-frames. Sequence is like IDR (PBB)(PBB)(IBB)(PBB).
947 encoding_order_gop = (intra_idr_period == 0) ? encoding_order : (encoding_order % (intra_idr_period + 1));
948 *pts_lag = -1; // Most frames are not tip frames.
950 if (encoding_order_gop == 0) { /* the first frame */
951 *frame_type = FRAME_IDR;
952 *displaying_order = encoding_order;
953 // IDR frames are a special case; I honestly can't find the logic behind
954 // why this is the right thing, but it seems to line up nicely in practice :-)
955 *pts_lag = TIMEBASE / MAX_FPS;
956 } else if (((encoding_order_gop - 1) % ip_period) != 0) { /* B frames */
957 *frame_type = FRAME_B;
958 *displaying_order = encoding_order - 1;
959 if ((encoding_order_gop % ip_period) == 0) {
960 *pts_lag = 0; // Last B-frame.
962 } else if (intra_period != 0 && /* have I frames */
963 encoding_order_gop >= 2 &&
964 ((encoding_order_gop - 1) / ip_period % (intra_period / ip_period)) == 0) {
965 *frame_type = FRAME_I;
966 *displaying_order = encoding_order + ip_period - 1;
968 *frame_type = FRAME_P;
969 *displaying_order = encoding_order + ip_period - 1;
974 static const char *rc_to_string(int rc_mode)
987 case VA_RC_VBR_CONSTRAINED:
988 return "VBR_CONSTRAINED";
994 void QuickSyncEncoderImpl::enable_zerocopy_if_possible()
996 if (global_flags.uncompressed_video_to_http) {
997 fprintf(stderr, "Disabling zerocopy H.264 encoding due to --http-uncompressed-video.\n");
998 use_zerocopy = false;
999 } else if (global_flags.x264_video_to_http) {
1000 fprintf(stderr, "Disabling zerocopy H.264 encoding due to --http-x264-video.\n");
1001 use_zerocopy = false;
1003 use_zerocopy = true;
1007 VADisplay QuickSyncEncoderImpl::va_open_display(const string &va_display)
1009 if (va_display.empty()) {
1010 x11_display = XOpenDisplay(NULL);
1012 fprintf(stderr, "error: can't connect to X server!\n");
1015 enable_zerocopy_if_possible();
1016 return vaGetDisplay(x11_display);
1017 } else if (va_display[0] != '/') {
1018 x11_display = XOpenDisplay(va_display.c_str());
1020 fprintf(stderr, "error: can't connect to X server!\n");
1023 enable_zerocopy_if_possible();
1024 return vaGetDisplay(x11_display);
1026 drm_fd = open(va_display.c_str(), O_RDWR);
1028 perror(va_display.c_str());
1031 use_zerocopy = false;
1032 return vaGetDisplayDRM(drm_fd);
1036 void QuickSyncEncoderImpl::va_close_display(VADisplay va_dpy)
1039 XCloseDisplay(x11_display);
1040 x11_display = nullptr;
1047 int QuickSyncEncoderImpl::init_va(const string &va_display)
1049 VAProfile profile_list[]={VAProfileH264High, VAProfileH264Main, VAProfileH264Baseline, VAProfileH264ConstrainedBaseline};
1050 VAEntrypoint *entrypoints;
1051 int num_entrypoints, slice_entrypoint;
1052 int support_encode = 0;
1053 int major_ver, minor_ver;
1057 va_dpy = va_open_display(va_display);
1058 va_status = vaInitialize(va_dpy, &major_ver, &minor_ver);
1059 CHECK_VASTATUS(va_status, "vaInitialize");
1061 num_entrypoints = vaMaxNumEntrypoints(va_dpy);
1062 entrypoints = (VAEntrypoint *)malloc(num_entrypoints * sizeof(*entrypoints));
1064 fprintf(stderr, "error: failed to initialize VA entrypoints array\n");
1068 /* use the highest profile */
1069 for (i = 0; i < sizeof(profile_list)/sizeof(profile_list[0]); i++) {
1070 if ((h264_profile != ~0) && h264_profile != profile_list[i])
1073 h264_profile = profile_list[i];
1074 vaQueryConfigEntrypoints(va_dpy, h264_profile, entrypoints, &num_entrypoints);
1075 for (slice_entrypoint = 0; slice_entrypoint < num_entrypoints; slice_entrypoint++) {
1076 if (entrypoints[slice_entrypoint] == VAEntrypointEncSlice) {
1081 if (support_encode == 1)
1085 if (support_encode == 0) {
1086 printf("Can't find VAEntrypointEncSlice for H264 profiles. If you are using a non-Intel GPU\n");
1087 printf("but have one in your system, try launching Nageru with --va-display /dev/dri/renderD128\n");
1088 printf("to use VA-API against DRM instead of X11.\n");
1091 switch (h264_profile) {
1092 case VAProfileH264Baseline:
1094 constraint_set_flag |= (1 << 0); /* Annex A.2.1 */
1095 h264_entropy_mode = 0;
1097 case VAProfileH264ConstrainedBaseline:
1098 constraint_set_flag |= (1 << 0 | 1 << 1); /* Annex A.2.2 */
1102 case VAProfileH264Main:
1103 constraint_set_flag |= (1 << 1); /* Annex A.2.2 */
1106 case VAProfileH264High:
1107 constraint_set_flag |= (1 << 3); /* Annex A.2.4 */
1110 h264_profile = VAProfileH264Baseline;
1112 constraint_set_flag |= (1 << 0); /* Annex A.2.1 */
1117 VAConfigAttrib attrib[VAConfigAttribTypeMax];
1119 /* find out the format for the render target, and rate control mode */
1120 for (i = 0; i < VAConfigAttribTypeMax; i++)
1121 attrib[i].type = (VAConfigAttribType)i;
1123 va_status = vaGetConfigAttributes(va_dpy, h264_profile, VAEntrypointEncSlice,
1124 &attrib[0], VAConfigAttribTypeMax);
1125 CHECK_VASTATUS(va_status, "vaGetConfigAttributes");
1126 /* check the interested configattrib */
1127 if ((attrib[VAConfigAttribRTFormat].value & VA_RT_FORMAT_YUV420) == 0) {
1128 printf("Not find desired YUV420 RT format\n");
1131 config_attrib[config_attrib_num].type = VAConfigAttribRTFormat;
1132 config_attrib[config_attrib_num].value = VA_RT_FORMAT_YUV420;
1133 config_attrib_num++;
1136 if (attrib[VAConfigAttribRateControl].value != VA_ATTRIB_NOT_SUPPORTED) {
1137 int tmp = attrib[VAConfigAttribRateControl].value;
1139 if (rc_mode == -1 || !(rc_mode & tmp)) {
1140 if (rc_mode != -1) {
1141 printf("Warning: Don't support the specified RateControl mode: %s!!!, switch to ", rc_to_string(rc_mode));
1144 for (i = 0; i < sizeof(rc_default_modes) / sizeof(rc_default_modes[0]); i++) {
1145 if (rc_default_modes[i] & tmp) {
1146 rc_mode = rc_default_modes[i];
1152 config_attrib[config_attrib_num].type = VAConfigAttribRateControl;
1153 config_attrib[config_attrib_num].value = rc_mode;
1154 config_attrib_num++;
1158 if (attrib[VAConfigAttribEncPackedHeaders].value != VA_ATTRIB_NOT_SUPPORTED) {
1159 int tmp = attrib[VAConfigAttribEncPackedHeaders].value;
1161 h264_packedheader = 1;
1162 config_attrib[config_attrib_num].type = VAConfigAttribEncPackedHeaders;
1163 config_attrib[config_attrib_num].value = VA_ENC_PACKED_HEADER_NONE;
1165 if (tmp & VA_ENC_PACKED_HEADER_SEQUENCE) {
1166 config_attrib[config_attrib_num].value |= VA_ENC_PACKED_HEADER_SEQUENCE;
1169 if (tmp & VA_ENC_PACKED_HEADER_PICTURE) {
1170 config_attrib[config_attrib_num].value |= VA_ENC_PACKED_HEADER_PICTURE;
1173 if (tmp & VA_ENC_PACKED_HEADER_SLICE) {
1174 config_attrib[config_attrib_num].value |= VA_ENC_PACKED_HEADER_SLICE;
1177 if (tmp & VA_ENC_PACKED_HEADER_MISC) {
1178 config_attrib[config_attrib_num].value |= VA_ENC_PACKED_HEADER_MISC;
1181 enc_packed_header_idx = config_attrib_num;
1182 config_attrib_num++;
1185 if (attrib[VAConfigAttribEncInterlaced].value != VA_ATTRIB_NOT_SUPPORTED) {
1186 config_attrib[config_attrib_num].type = VAConfigAttribEncInterlaced;
1187 config_attrib[config_attrib_num].value = VA_ENC_PACKED_HEADER_NONE;
1188 config_attrib_num++;
1191 if (attrib[VAConfigAttribEncMaxRefFrames].value != VA_ATTRIB_NOT_SUPPORTED) {
1192 h264_maxref = attrib[VAConfigAttribEncMaxRefFrames].value;
1199 int QuickSyncEncoderImpl::setup_encode()
1202 VASurfaceID *tmp_surfaceid;
1203 int codedbuf_size, i;
1204 static VASurfaceID src_surface[SURFACE_NUM];
1205 static VASurfaceID ref_surface[SURFACE_NUM];
1207 va_status = vaCreateConfig(va_dpy, h264_profile, VAEntrypointEncSlice,
1208 &config_attrib[0], config_attrib_num, &config_id);
1209 CHECK_VASTATUS(va_status, "vaCreateConfig");
1211 /* create source surfaces */
1212 va_status = vaCreateSurfaces(va_dpy,
1213 VA_RT_FORMAT_YUV420, frame_width_mbaligned, frame_height_mbaligned,
1214 &src_surface[0], SURFACE_NUM,
1216 CHECK_VASTATUS(va_status, "vaCreateSurfaces");
1218 /* create reference surfaces */
1219 va_status = vaCreateSurfaces(va_dpy,
1220 VA_RT_FORMAT_YUV420, frame_width_mbaligned, frame_height_mbaligned,
1221 &ref_surface[0], SURFACE_NUM,
1223 CHECK_VASTATUS(va_status, "vaCreateSurfaces");
1225 tmp_surfaceid = (VASurfaceID *)calloc(2 * SURFACE_NUM, sizeof(VASurfaceID));
1226 memcpy(tmp_surfaceid, src_surface, SURFACE_NUM * sizeof(VASurfaceID));
1227 memcpy(tmp_surfaceid + SURFACE_NUM, ref_surface, SURFACE_NUM * sizeof(VASurfaceID));
1229 /* Create a context for this encode pipe */
1230 va_status = vaCreateContext(va_dpy, config_id,
1231 frame_width_mbaligned, frame_height_mbaligned,
1233 tmp_surfaceid, 2 * SURFACE_NUM,
1235 CHECK_VASTATUS(va_status, "vaCreateContext");
1236 free(tmp_surfaceid);
1238 codedbuf_size = (frame_width_mbaligned * frame_height_mbaligned * 400) / (16*16);
1240 for (i = 0; i < SURFACE_NUM; i++) {
1241 /* create coded buffer once for all
1242 * other VA buffers which won't be used again after vaRenderPicture.
1243 * so APP can always vaCreateBuffer for every frame
1244 * but coded buffer need to be mapped and accessed after vaRenderPicture/vaEndPicture
1245 * so VA won't maintain the coded buffer
1247 va_status = vaCreateBuffer(va_dpy, context_id, VAEncCodedBufferType,
1248 codedbuf_size, 1, NULL, &gl_surfaces[i].coded_buf);
1249 CHECK_VASTATUS(va_status, "vaCreateBuffer");
1252 /* create OpenGL objects */
1253 //glGenFramebuffers(SURFACE_NUM, fbos);
1255 for (i = 0; i < SURFACE_NUM; i++) {
1256 glGenTextures(1, &gl_surfaces[i].y_tex);
1257 glGenTextures(1, &gl_surfaces[i].cbcr_tex);
1259 if (!use_zerocopy) {
1261 glBindTexture(GL_TEXTURE_2D, gl_surfaces[i].y_tex);
1262 glTexStorage2D(GL_TEXTURE_2D, 1, GL_R8, frame_width, frame_height);
1264 // Create CbCr image.
1265 glBindTexture(GL_TEXTURE_2D, gl_surfaces[i].cbcr_tex);
1266 glTexStorage2D(GL_TEXTURE_2D, 1, GL_RG8, frame_width / 2, frame_height / 2);
1268 // Generate a PBO to read into. It doesn't necessarily fit 1:1 with the VA-API
1269 // buffers, due to potentially differing pitch.
1270 glGenBuffers(1, &gl_surfaces[i].pbo);
1271 glBindBuffer(GL_PIXEL_PACK_BUFFER, gl_surfaces[i].pbo);
1272 glBufferStorage(GL_PIXEL_PACK_BUFFER, frame_width * frame_height * 2, nullptr, GL_MAP_READ_BIT | GL_MAP_WRITE_BIT | GL_MAP_PERSISTENT_BIT);
1273 uint8_t *ptr = (uint8_t *)glMapBufferRange(GL_PIXEL_PACK_BUFFER, 0, frame_width * frame_height * 2, GL_MAP_READ_BIT | GL_MAP_PERSISTENT_BIT);
1274 gl_surfaces[i].y_offset = 0;
1275 gl_surfaces[i].cbcr_offset = frame_width * frame_height;
1276 gl_surfaces[i].y_ptr = ptr + gl_surfaces[i].y_offset;
1277 gl_surfaces[i].cbcr_ptr = ptr + gl_surfaces[i].cbcr_offset;
1278 glBindBuffer(GL_PIXEL_PACK_BUFFER, 0);
1282 for (i = 0; i < SURFACE_NUM; i++) {
1283 gl_surfaces[i].src_surface = src_surface[i];
1284 gl_surfaces[i].ref_surface = ref_surface[i];
1290 // Given a list like 1 9 3 0 2 8 4 and a pivot element 3, will produce
1293 template<class T, class C>
1294 static void sort_two(T *begin, T *end, const T &pivot, const C &less_than)
1296 T *middle = partition(begin, end, [&](const T &elem) { return less_than(elem, pivot); });
1297 sort(begin, middle, [&](const T &a, const T &b) { return less_than(b, a); });
1298 sort(middle, end, less_than);
1301 void QuickSyncEncoderImpl::update_ReferenceFrames(int frame_type)
1305 if (frame_type == FRAME_B)
1308 CurrentCurrPic.flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE;
1310 if (numShortTerm > num_ref_frames)
1311 numShortTerm = num_ref_frames;
1312 for (i=numShortTerm-1; i>0; i--)
1313 ReferenceFrames[i] = ReferenceFrames[i-1];
1314 ReferenceFrames[0] = CurrentCurrPic;
1316 current_frame_num++;
1317 if (current_frame_num > MaxFrameNum)
1318 current_frame_num = 0;
1322 int QuickSyncEncoderImpl::update_RefPicList(int frame_type)
1324 const auto descending_by_frame_idx = [](const VAPictureH264 &a, const VAPictureH264 &b) {
1325 return a.frame_idx > b.frame_idx;
1327 const auto ascending_by_top_field_order_cnt = [](const VAPictureH264 &a, const VAPictureH264 &b) {
1328 return a.TopFieldOrderCnt < b.TopFieldOrderCnt;
1330 const auto descending_by_top_field_order_cnt = [](const VAPictureH264 &a, const VAPictureH264 &b) {
1331 return a.TopFieldOrderCnt > b.TopFieldOrderCnt;
1334 if (frame_type == FRAME_P) {
1335 memcpy(RefPicList0_P, ReferenceFrames, numShortTerm * sizeof(VAPictureH264));
1336 sort(&RefPicList0_P[0], &RefPicList0_P[numShortTerm], descending_by_frame_idx);
1337 } else if (frame_type == FRAME_B) {
1338 memcpy(RefPicList0_B, ReferenceFrames, numShortTerm * sizeof(VAPictureH264));
1339 sort_two(&RefPicList0_B[0], &RefPicList0_B[numShortTerm], CurrentCurrPic, ascending_by_top_field_order_cnt);
1341 memcpy(RefPicList1_B, ReferenceFrames, numShortTerm * sizeof(VAPictureH264));
1342 sort_two(&RefPicList1_B[0], &RefPicList1_B[numShortTerm], CurrentCurrPic, descending_by_top_field_order_cnt);
1349 int QuickSyncEncoderImpl::render_sequence()
1351 VABufferID seq_param_buf, rc_param_buf, render_id[2];
1353 VAEncMiscParameterBuffer *misc_param;
1354 VAEncMiscParameterRateControl *misc_rate_ctrl;
1356 seq_param.level_idc = 41 /*SH_LEVEL_3*/;
1357 seq_param.picture_width_in_mbs = frame_width_mbaligned / 16;
1358 seq_param.picture_height_in_mbs = frame_height_mbaligned / 16;
1359 seq_param.bits_per_second = frame_bitrate;
1361 seq_param.intra_period = intra_period;
1362 seq_param.intra_idr_period = intra_idr_period;
1363 seq_param.ip_period = ip_period;
1365 seq_param.max_num_ref_frames = num_ref_frames;
1366 seq_param.seq_fields.bits.frame_mbs_only_flag = 1;
1367 seq_param.time_scale = TIMEBASE * 2;
1368 seq_param.num_units_in_tick = 1; /* Tc = num_units_in_tick / scale */
1369 seq_param.seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4 = Log2MaxPicOrderCntLsb - 4;
1370 seq_param.seq_fields.bits.log2_max_frame_num_minus4 = Log2MaxFrameNum - 4;;
1371 seq_param.seq_fields.bits.frame_mbs_only_flag = 1;
1372 seq_param.seq_fields.bits.chroma_format_idc = 1;
1373 seq_param.seq_fields.bits.direct_8x8_inference_flag = 1;
1375 if (frame_width != frame_width_mbaligned ||
1376 frame_height != frame_height_mbaligned) {
1377 seq_param.frame_cropping_flag = 1;
1378 seq_param.frame_crop_left_offset = 0;
1379 seq_param.frame_crop_right_offset = (frame_width_mbaligned - frame_width)/2;
1380 seq_param.frame_crop_top_offset = 0;
1381 seq_param.frame_crop_bottom_offset = (frame_height_mbaligned - frame_height)/2;
1384 va_status = vaCreateBuffer(va_dpy, context_id,
1385 VAEncSequenceParameterBufferType,
1386 sizeof(seq_param), 1, &seq_param, &seq_param_buf);
1387 CHECK_VASTATUS(va_status, "vaCreateBuffer");
1389 va_status = vaCreateBuffer(va_dpy, context_id,
1390 VAEncMiscParameterBufferType,
1391 sizeof(VAEncMiscParameterBuffer) + sizeof(VAEncMiscParameterRateControl),
1392 1, NULL, &rc_param_buf);
1393 CHECK_VASTATUS(va_status, "vaCreateBuffer");
1395 vaMapBuffer(va_dpy, rc_param_buf, (void **)&misc_param);
1396 misc_param->type = VAEncMiscParameterTypeRateControl;
1397 misc_rate_ctrl = (VAEncMiscParameterRateControl *)misc_param->data;
1398 memset(misc_rate_ctrl, 0, sizeof(*misc_rate_ctrl));
1399 misc_rate_ctrl->bits_per_second = frame_bitrate;
1400 misc_rate_ctrl->target_percentage = 66;
1401 misc_rate_ctrl->window_size = 1000;
1402 misc_rate_ctrl->initial_qp = initial_qp;
1403 misc_rate_ctrl->min_qp = minimal_qp;
1404 misc_rate_ctrl->basic_unit_size = 0;
1405 vaUnmapBuffer(va_dpy, rc_param_buf);
1407 render_id[0] = seq_param_buf;
1408 render_id[1] = rc_param_buf;
1410 render_picture_and_delete(va_dpy, context_id, &render_id[0], 2);
1415 static int calc_poc(int pic_order_cnt_lsb, int frame_type)
1417 static int PicOrderCntMsb_ref = 0, pic_order_cnt_lsb_ref = 0;
1418 int prevPicOrderCntMsb, prevPicOrderCntLsb;
1419 int PicOrderCntMsb, TopFieldOrderCnt;
1421 if (frame_type == FRAME_IDR)
1422 prevPicOrderCntMsb = prevPicOrderCntLsb = 0;
1424 prevPicOrderCntMsb = PicOrderCntMsb_ref;
1425 prevPicOrderCntLsb = pic_order_cnt_lsb_ref;
1428 if ((pic_order_cnt_lsb < prevPicOrderCntLsb) &&
1429 ((prevPicOrderCntLsb - pic_order_cnt_lsb) >= (int)(MaxPicOrderCntLsb / 2)))
1430 PicOrderCntMsb = prevPicOrderCntMsb + MaxPicOrderCntLsb;
1431 else if ((pic_order_cnt_lsb > prevPicOrderCntLsb) &&
1432 ((pic_order_cnt_lsb - prevPicOrderCntLsb) > (int)(MaxPicOrderCntLsb / 2)))
1433 PicOrderCntMsb = prevPicOrderCntMsb - MaxPicOrderCntLsb;
1435 PicOrderCntMsb = prevPicOrderCntMsb;
1437 TopFieldOrderCnt = PicOrderCntMsb + pic_order_cnt_lsb;
1439 if (frame_type != FRAME_B) {
1440 PicOrderCntMsb_ref = PicOrderCntMsb;
1441 pic_order_cnt_lsb_ref = pic_order_cnt_lsb;
1444 return TopFieldOrderCnt;
1447 int QuickSyncEncoderImpl::render_picture(int frame_type, int display_frame_num, int gop_start_display_frame_num)
1449 VABufferID pic_param_buf;
1453 pic_param.CurrPic.picture_id = gl_surfaces[display_frame_num % SURFACE_NUM].ref_surface;
1454 pic_param.CurrPic.frame_idx = current_frame_num;
1455 pic_param.CurrPic.flags = 0;
1456 pic_param.CurrPic.TopFieldOrderCnt = calc_poc((display_frame_num - gop_start_display_frame_num) % MaxPicOrderCntLsb, frame_type);
1457 pic_param.CurrPic.BottomFieldOrderCnt = pic_param.CurrPic.TopFieldOrderCnt;
1458 CurrentCurrPic = pic_param.CurrPic;
1460 memcpy(pic_param.ReferenceFrames, ReferenceFrames, numShortTerm*sizeof(VAPictureH264));
1461 for (i = numShortTerm; i < MAX_NUM_REF1; i++) {
1462 pic_param.ReferenceFrames[i].picture_id = VA_INVALID_SURFACE;
1463 pic_param.ReferenceFrames[i].flags = VA_PICTURE_H264_INVALID;
1466 pic_param.pic_fields.bits.idr_pic_flag = (frame_type == FRAME_IDR);
1467 pic_param.pic_fields.bits.reference_pic_flag = (frame_type != FRAME_B);
1468 pic_param.pic_fields.bits.entropy_coding_mode_flag = h264_entropy_mode;
1469 pic_param.pic_fields.bits.deblocking_filter_control_present_flag = 1;
1470 pic_param.frame_num = current_frame_num;
1471 pic_param.coded_buf = gl_surfaces[display_frame_num % SURFACE_NUM].coded_buf;
1472 pic_param.last_picture = false; // FIXME
1473 pic_param.pic_init_qp = initial_qp;
1475 va_status = vaCreateBuffer(va_dpy, context_id, VAEncPictureParameterBufferType,
1476 sizeof(pic_param), 1, &pic_param, &pic_param_buf);
1477 CHECK_VASTATUS(va_status, "vaCreateBuffer");
1479 render_picture_and_delete(va_dpy, context_id, &pic_param_buf, 1);
1484 int QuickSyncEncoderImpl::render_packedsequence()
1486 VAEncPackedHeaderParameterBuffer packedheader_param_buffer;
1487 VABufferID packedseq_para_bufid, packedseq_data_bufid, render_id[2];
1488 unsigned int length_in_bits;
1489 unsigned char *packedseq_buffer = NULL;
1492 length_in_bits = build_packed_seq_buffer(&packedseq_buffer);
1494 packedheader_param_buffer.type = VAEncPackedHeaderSequence;
1496 packedheader_param_buffer.bit_length = length_in_bits; /*length_in_bits*/
1497 packedheader_param_buffer.has_emulation_bytes = 0;
1498 va_status = vaCreateBuffer(va_dpy,
1500 VAEncPackedHeaderParameterBufferType,
1501 sizeof(packedheader_param_buffer), 1, &packedheader_param_buffer,
1502 &packedseq_para_bufid);
1503 CHECK_VASTATUS(va_status, "vaCreateBuffer");
1505 va_status = vaCreateBuffer(va_dpy,
1507 VAEncPackedHeaderDataBufferType,
1508 (length_in_bits + 7) / 8, 1, packedseq_buffer,
1509 &packedseq_data_bufid);
1510 CHECK_VASTATUS(va_status, "vaCreateBuffer");
1512 render_id[0] = packedseq_para_bufid;
1513 render_id[1] = packedseq_data_bufid;
1514 render_picture_and_delete(va_dpy, context_id, render_id, 2);
1516 free(packedseq_buffer);
1522 int QuickSyncEncoderImpl::render_packedpicture()
1524 VAEncPackedHeaderParameterBuffer packedheader_param_buffer;
1525 VABufferID packedpic_para_bufid, packedpic_data_bufid, render_id[2];
1526 unsigned int length_in_bits;
1527 unsigned char *packedpic_buffer = NULL;
1530 length_in_bits = build_packed_pic_buffer(&packedpic_buffer);
1531 packedheader_param_buffer.type = VAEncPackedHeaderPicture;
1532 packedheader_param_buffer.bit_length = length_in_bits;
1533 packedheader_param_buffer.has_emulation_bytes = 0;
1535 va_status = vaCreateBuffer(va_dpy,
1537 VAEncPackedHeaderParameterBufferType,
1538 sizeof(packedheader_param_buffer), 1, &packedheader_param_buffer,
1539 &packedpic_para_bufid);
1540 CHECK_VASTATUS(va_status, "vaCreateBuffer");
1542 va_status = vaCreateBuffer(va_dpy,
1544 VAEncPackedHeaderDataBufferType,
1545 (length_in_bits + 7) / 8, 1, packedpic_buffer,
1546 &packedpic_data_bufid);
1547 CHECK_VASTATUS(va_status, "vaCreateBuffer");
1549 render_id[0] = packedpic_para_bufid;
1550 render_id[1] = packedpic_data_bufid;
1551 render_picture_and_delete(va_dpy, context_id, render_id, 2);
1553 free(packedpic_buffer);
1558 void QuickSyncEncoderImpl::render_packedslice()
1560 VAEncPackedHeaderParameterBuffer packedheader_param_buffer;
1561 VABufferID packedslice_para_bufid, packedslice_data_bufid, render_id[2];
1562 unsigned int length_in_bits;
1563 unsigned char *packedslice_buffer = NULL;
1566 length_in_bits = build_packed_slice_buffer(&packedslice_buffer);
1567 packedheader_param_buffer.type = VAEncPackedHeaderSlice;
1568 packedheader_param_buffer.bit_length = length_in_bits;
1569 packedheader_param_buffer.has_emulation_bytes = 0;
1571 va_status = vaCreateBuffer(va_dpy,
1573 VAEncPackedHeaderParameterBufferType,
1574 sizeof(packedheader_param_buffer), 1, &packedheader_param_buffer,
1575 &packedslice_para_bufid);
1576 CHECK_VASTATUS(va_status, "vaCreateBuffer");
1578 va_status = vaCreateBuffer(va_dpy,
1580 VAEncPackedHeaderDataBufferType,
1581 (length_in_bits + 7) / 8, 1, packedslice_buffer,
1582 &packedslice_data_bufid);
1583 CHECK_VASTATUS(va_status, "vaCreateBuffer");
1585 render_id[0] = packedslice_para_bufid;
1586 render_id[1] = packedslice_data_bufid;
1587 render_picture_and_delete(va_dpy, context_id, render_id, 2);
1589 free(packedslice_buffer);
1592 int QuickSyncEncoderImpl::render_slice(int encoding_frame_num, int display_frame_num, int gop_start_display_frame_num, int frame_type)
1594 VABufferID slice_param_buf;
1598 update_RefPicList(frame_type);
1600 /* one frame, one slice */
1601 slice_param.macroblock_address = 0;
1602 slice_param.num_macroblocks = frame_width_mbaligned * frame_height_mbaligned/(16*16); /* Measured by MB */
1603 slice_param.slice_type = (frame_type == FRAME_IDR)?2:frame_type;
1604 if (frame_type == FRAME_IDR) {
1605 if (encoding_frame_num != 0)
1606 ++slice_param.idr_pic_id;
1607 } else if (frame_type == FRAME_P) {
1608 int refpiclist0_max = h264_maxref & 0xffff;
1609 memcpy(slice_param.RefPicList0, RefPicList0_P, refpiclist0_max*sizeof(VAPictureH264));
1611 for (i = refpiclist0_max; i < MAX_NUM_REF2; i++) {
1612 slice_param.RefPicList0[i].picture_id = VA_INVALID_SURFACE;
1613 slice_param.RefPicList0[i].flags = VA_PICTURE_H264_INVALID;
1615 } else if (frame_type == FRAME_B) {
1616 int refpiclist0_max = h264_maxref & 0xffff;
1617 int refpiclist1_max = (h264_maxref >> 16) & 0xffff;
1619 memcpy(slice_param.RefPicList0, RefPicList0_B, refpiclist0_max*sizeof(VAPictureH264));
1620 for (i = refpiclist0_max; i < MAX_NUM_REF2; i++) {
1621 slice_param.RefPicList0[i].picture_id = VA_INVALID_SURFACE;
1622 slice_param.RefPicList0[i].flags = VA_PICTURE_H264_INVALID;
1625 memcpy(slice_param.RefPicList1, RefPicList1_B, refpiclist1_max*sizeof(VAPictureH264));
1626 for (i = refpiclist1_max; i < MAX_NUM_REF2; i++) {
1627 slice_param.RefPicList1[i].picture_id = VA_INVALID_SURFACE;
1628 slice_param.RefPicList1[i].flags = VA_PICTURE_H264_INVALID;
1632 slice_param.slice_alpha_c0_offset_div2 = 0;
1633 slice_param.slice_beta_offset_div2 = 0;
1634 slice_param.direct_spatial_mv_pred_flag = 1;
1635 slice_param.pic_order_cnt_lsb = (display_frame_num - gop_start_display_frame_num) % MaxPicOrderCntLsb;
1638 if (h264_packedheader &&
1639 config_attrib[enc_packed_header_idx].value & VA_ENC_PACKED_HEADER_SLICE)
1640 render_packedslice();
1642 va_status = vaCreateBuffer(va_dpy, context_id, VAEncSliceParameterBufferType,
1643 sizeof(slice_param), 1, &slice_param, &slice_param_buf);
1644 CHECK_VASTATUS(va_status, "vaCreateBuffer");
1646 render_picture_and_delete(va_dpy, context_id, &slice_param_buf, 1);
1653 void QuickSyncEncoderImpl::save_codeddata(storage_task task)
1655 VACodedBufferSegment *buf_list = NULL;
1660 va_status = vaMapBuffer(va_dpy, gl_surfaces[task.display_order % SURFACE_NUM].coded_buf, (void **)(&buf_list));
1661 CHECK_VASTATUS(va_status, "vaMapBuffer");
1662 while (buf_list != NULL) {
1663 data.append(reinterpret_cast<const char *>(buf_list->buf), buf_list->size);
1664 buf_list = (VACodedBufferSegment *) buf_list->next;
1666 vaUnmapBuffer(va_dpy, gl_surfaces[task.display_order % SURFACE_NUM].coded_buf);
1671 memset(&pkt, 0, sizeof(pkt));
1673 pkt.data = reinterpret_cast<uint8_t *>(&data[0]);
1674 pkt.size = data.size();
1675 pkt.stream_index = 0;
1676 if (task.frame_type == FRAME_IDR) {
1677 pkt.flags = AV_PKT_FLAG_KEY;
1681 pkt.duration = task.duration;
1683 file_mux->add_packet(pkt, task.pts + global_delay(), task.dts + global_delay());
1685 if (!global_flags.uncompressed_video_to_http &&
1686 !global_flags.x264_video_to_http) {
1687 stream_mux->add_packet(pkt, task.pts + global_delay(), task.dts + global_delay());
1690 // Encode and add all audio frames up to and including the pts of this video frame.
1693 vector<float> audio;
1695 unique_lock<mutex> lock(frame_queue_mutex);
1696 frame_queue_nonempty.wait(lock, [this]{ return storage_thread_should_quit || !pending_audio_frames.empty(); });
1697 if (storage_thread_should_quit && pending_audio_frames.empty()) return;
1698 auto it = pending_audio_frames.begin();
1699 if (it->first > task.pts) break;
1700 audio_pts = it->first;
1701 audio = move(it->second);
1702 pending_audio_frames.erase(it);
1705 if (context_audio_stream) {
1706 encode_audio(audio, &audio_queue_file, audio_pts, context_audio_file, resampler_audio_file, { file_mux.get() });
1707 encode_audio(audio, &audio_queue_stream, audio_pts, context_audio_stream, resampler_audio_stream, { stream_mux.get() });
1709 encode_audio(audio, &audio_queue_file, audio_pts, context_audio_file, resampler_audio_file, { stream_mux.get(), file_mux.get() });
1711 last_audio_pts = audio_pts + audio.size() * TIMEBASE / (OUTPUT_FREQUENCY * 2);
1713 if (audio_pts == task.pts) break;
1717 void QuickSyncEncoderImpl::encode_audio(
1718 const vector<float> &audio,
1719 vector<float> *audio_queue,
1721 AVCodecContext *ctx,
1722 AVAudioResampleContext *resampler,
1723 const vector<Mux *> &muxes)
1725 if (ctx->frame_size == 0) {
1726 // No queueing needed.
1727 assert(audio_queue->empty());
1728 assert(audio.size() % 2 == 0);
1729 encode_audio_one_frame(&audio[0], audio.size() / 2, audio_pts, ctx, resampler, muxes);
1733 int64_t sample_offset = audio_queue->size();
1735 audio_queue->insert(audio_queue->end(), audio.begin(), audio.end());
1737 for (sample_num = 0;
1738 sample_num + ctx->frame_size * 2 <= audio_queue->size();
1739 sample_num += ctx->frame_size * 2) {
1740 int64_t adjusted_audio_pts = audio_pts + (int64_t(sample_num) - sample_offset) * TIMEBASE / (OUTPUT_FREQUENCY * 2);
1741 encode_audio_one_frame(&(*audio_queue)[sample_num],
1748 audio_queue->erase(audio_queue->begin(), audio_queue->begin() + sample_num);
1751 void QuickSyncEncoderImpl::encode_audio_one_frame(
1755 AVCodecContext *ctx,
1756 AVAudioResampleContext *resampler,
1757 const vector<Mux *> &muxes)
1759 audio_frame->pts = audio_pts + global_delay();
1760 audio_frame->nb_samples = num_samples;
1761 audio_frame->channel_layout = AV_CH_LAYOUT_STEREO;
1762 audio_frame->format = ctx->sample_fmt;
1763 audio_frame->sample_rate = OUTPUT_FREQUENCY;
1765 if (av_samples_alloc(audio_frame->data, nullptr, 2, num_samples, ctx->sample_fmt, 0) < 0) {
1766 fprintf(stderr, "Could not allocate %ld samples.\n", num_samples);
1770 if (avresample_convert(resampler, audio_frame->data, 0, num_samples,
1771 (uint8_t **)&audio, 0, num_samples) < 0) {
1772 fprintf(stderr, "Audio conversion failed.\n");
1777 av_init_packet(&pkt);
1781 avcodec_encode_audio2(ctx, &pkt, audio_frame, &got_output);
1783 pkt.stream_index = 1;
1785 for (Mux *mux : muxes) {
1786 mux->add_packet(pkt, pkt.pts, pkt.dts);
1790 av_freep(&audio_frame->data[0]);
1792 av_frame_unref(audio_frame);
1793 av_free_packet(&pkt);
1796 void QuickSyncEncoderImpl::encode_last_audio(
1797 vector<float> *audio_queue,
1799 AVCodecContext *ctx,
1800 AVAudioResampleContext *resampler,
1801 const vector<Mux *> &muxes)
1803 if (!audio_queue->empty()) {
1804 // Last frame can be whatever size we want.
1805 assert(audio_queue->size() % 2 == 0);
1806 encode_audio_one_frame(&(*audio_queue)[0], audio_queue->size() / 2, audio_pts, ctx, resampler, muxes);
1807 audio_queue->clear();
1810 if (ctx->codec->capabilities & AV_CODEC_CAP_DELAY) {
1811 // Collect any delayed frames.
1815 av_init_packet(&pkt);
1818 avcodec_encode_audio2(ctx, &pkt, nullptr, &got_output);
1819 if (!got_output) break;
1821 pkt.stream_index = 1;
1823 for (Mux *mux : muxes) {
1824 mux->add_packet(pkt, pkt.pts, pkt.dts);
1826 av_free_packet(&pkt);
1831 // this is weird. but it seems to put a new frame onto the queue
1832 void QuickSyncEncoderImpl::storage_task_enqueue(storage_task task)
1834 unique_lock<mutex> lock(storage_task_queue_mutex);
1835 storage_task_queue.push(move(task));
1836 storage_task_queue_changed.notify_all();
1839 void QuickSyncEncoderImpl::storage_task_thread()
1842 storage_task current;
1844 // wait until there's an encoded frame
1845 unique_lock<mutex> lock(storage_task_queue_mutex);
1846 storage_task_queue_changed.wait(lock, [this]{ return storage_thread_should_quit || !storage_task_queue.empty(); });
1847 if (storage_thread_should_quit && storage_task_queue.empty()) return;
1848 current = move(storage_task_queue.front());
1849 storage_task_queue.pop();
1854 // waits for data, then saves it to disk.
1855 va_status = vaSyncSurface(va_dpy, gl_surfaces[current.display_order % SURFACE_NUM].src_surface);
1856 CHECK_VASTATUS(va_status, "vaSyncSurface");
1857 save_codeddata(move(current));
1860 unique_lock<mutex> lock(storage_task_queue_mutex);
1861 srcsurface_status[current.display_order % SURFACE_NUM] = SRC_SURFACE_FREE;
1862 storage_task_queue_changed.notify_all();
1867 int QuickSyncEncoderImpl::release_encode()
1869 for (unsigned i = 0; i < SURFACE_NUM; i++) {
1870 vaDestroyBuffer(va_dpy, gl_surfaces[i].coded_buf);
1871 vaDestroySurfaces(va_dpy, &gl_surfaces[i].src_surface, 1);
1872 vaDestroySurfaces(va_dpy, &gl_surfaces[i].ref_surface, 1);
1874 if (!use_zerocopy) {
1875 glBindBuffer(GL_PIXEL_PACK_BUFFER, gl_surfaces[i].pbo);
1876 glUnmapBuffer(GL_PIXEL_PACK_BUFFER);
1877 glBindBuffer(GL_PIXEL_PACK_BUFFER, 0);
1878 glDeleteBuffers(1, &gl_surfaces[i].pbo);
1880 glDeleteTextures(1, &gl_surfaces[i].y_tex);
1881 glDeleteTextures(1, &gl_surfaces[i].cbcr_tex);
1884 vaDestroyContext(va_dpy, context_id);
1885 vaDestroyConfig(va_dpy, config_id);
1890 int QuickSyncEncoderImpl::deinit_va()
1892 vaTerminate(va_dpy);
1894 va_close_display(va_dpy);
1901 void init_audio_encoder(const string &codec_name, int bit_rate, AVCodecContext **ctx, AVAudioResampleContext **resampler)
1903 AVCodec *codec_audio = avcodec_find_encoder_by_name(codec_name.c_str());
1904 if (codec_audio == nullptr) {
1905 fprintf(stderr, "ERROR: Could not find codec '%s'\n", codec_name.c_str());
1909 AVCodecContext *context_audio = avcodec_alloc_context3(codec_audio);
1910 context_audio->bit_rate = bit_rate;
1911 context_audio->sample_rate = OUTPUT_FREQUENCY;
1912 context_audio->sample_fmt = codec_audio->sample_fmts[0];
1913 context_audio->channels = 2;
1914 context_audio->channel_layout = AV_CH_LAYOUT_STEREO;
1915 context_audio->time_base = AVRational{1, TIMEBASE};
1916 context_audio->flags |= CODEC_FLAG_GLOBAL_HEADER;
1917 if (avcodec_open2(context_audio, codec_audio, NULL) < 0) {
1918 fprintf(stderr, "Could not open codec '%s'\n", codec_name.c_str());
1922 *ctx = context_audio;
1924 *resampler = avresample_alloc_context();
1925 if (*resampler == nullptr) {
1926 fprintf(stderr, "Allocating resampler failed.\n");
1930 av_opt_set_int(*resampler, "in_channel_layout", AV_CH_LAYOUT_STEREO, 0);
1931 av_opt_set_int(*resampler, "out_channel_layout", AV_CH_LAYOUT_STEREO, 0);
1932 av_opt_set_int(*resampler, "in_sample_rate", OUTPUT_FREQUENCY, 0);
1933 av_opt_set_int(*resampler, "out_sample_rate", OUTPUT_FREQUENCY, 0);
1934 av_opt_set_int(*resampler, "in_sample_fmt", AV_SAMPLE_FMT_FLT, 0);
1935 av_opt_set_int(*resampler, "out_sample_fmt", context_audio->sample_fmt, 0);
1937 if (avresample_open(*resampler) < 0) {
1938 fprintf(stderr, "Could not open resample context.\n");
1945 QuickSyncEncoderImpl::QuickSyncEncoderImpl(QSurface *surface, const string &va_display, int width, int height, HTTPD *httpd)
1946 : current_storage_frame(0), surface(surface), httpd(httpd), frame_width(width), frame_height(height)
1948 init_audio_encoder(AUDIO_OUTPUT_CODEC_NAME, DEFAULT_AUDIO_OUTPUT_BIT_RATE, &context_audio_file, &resampler_audio_file);
1950 if (!global_flags.stream_audio_codec_name.empty()) {
1951 init_audio_encoder(global_flags.stream_audio_codec_name,
1952 global_flags.stream_audio_codec_bitrate, &context_audio_stream, &resampler_audio_stream);
1955 frame_width_mbaligned = (frame_width + 15) & (~15);
1956 frame_height_mbaligned = (frame_height + 15) & (~15);
1958 open_output_stream();
1960 audio_frame = av_frame_alloc();
1964 if (global_flags.uncompressed_video_to_http ||
1965 global_flags.x264_video_to_http) {
1966 reorderer.reset(new FrameReorderer(ip_period - 1, frame_width, frame_height));
1968 if (global_flags.x264_video_to_http) {
1969 x264_encoder.reset(new X264Encoder(stream_mux.get()));
1972 init_va(va_display);
1975 // No frames are ready yet.
1976 memset(srcsurface_status, SRC_SURFACE_FREE, sizeof(srcsurface_status));
1978 memset(&seq_param, 0, sizeof(seq_param));
1979 memset(&pic_param, 0, sizeof(pic_param));
1980 memset(&slice_param, 0, sizeof(slice_param));
1982 storage_thread = thread(&QuickSyncEncoderImpl::storage_task_thread, this);
1984 encode_thread = thread([this]{
1985 //SDL_GL_MakeCurrent(window, context);
1986 QOpenGLContext *context = create_context(this->surface);
1987 eglBindAPI(EGL_OPENGL_API);
1988 if (!make_current(context, this->surface)) {
1989 printf("display=%p surface=%p context=%p curr=%p err=%d\n", eglGetCurrentDisplay(), this->surface, context, eglGetCurrentContext(),
1993 encode_thread_func();
1997 QuickSyncEncoderImpl::~QuickSyncEncoderImpl()
2000 av_frame_free(&audio_frame);
2001 avresample_free(&resampler_audio_file);
2002 avresample_free(&resampler_audio_stream);
2003 avcodec_free_context(&context_audio_file);
2004 avcodec_free_context(&context_audio_stream);
2005 close_output_stream();
2008 bool QuickSyncEncoderImpl::begin_frame(GLuint *y_tex, GLuint *cbcr_tex)
2010 assert(!is_shutdown);
2012 // Wait until this frame slot is done encoding.
2013 unique_lock<mutex> lock(storage_task_queue_mutex);
2014 if (srcsurface_status[current_storage_frame % SURFACE_NUM] != SRC_SURFACE_FREE) {
2015 fprintf(stderr, "Warning: Slot %d (for frame %d) is still encoding, rendering has to wait for H.264 encoder\n",
2016 current_storage_frame % SURFACE_NUM, current_storage_frame);
2018 storage_task_queue_changed.wait(lock, [this]{ return storage_thread_should_quit || (srcsurface_status[current_storage_frame % SURFACE_NUM] == SRC_SURFACE_FREE); });
2019 srcsurface_status[current_storage_frame % SURFACE_NUM] = SRC_SURFACE_IN_ENCODING;
2020 if (storage_thread_should_quit) return false;
2023 //*fbo = fbos[current_storage_frame % SURFACE_NUM];
2024 GLSurface *surf = &gl_surfaces[current_storage_frame % SURFACE_NUM];
2025 *y_tex = surf->y_tex;
2026 *cbcr_tex = surf->cbcr_tex;
2028 VAStatus va_status = vaDeriveImage(va_dpy, surf->src_surface, &surf->surface_image);
2029 CHECK_VASTATUS(va_status, "vaDeriveImage");
2032 VABufferInfo buf_info;
2033 buf_info.mem_type = VA_SURFACE_ATTRIB_MEM_TYPE_DRM_PRIME; // or VA_SURFACE_ATTRIB_MEM_TYPE_KERNEL_DRM?
2034 va_status = vaAcquireBufferHandle(va_dpy, surf->surface_image.buf, &buf_info);
2035 CHECK_VASTATUS(va_status, "vaAcquireBufferHandle");
2038 surf->y_egl_image = EGL_NO_IMAGE_KHR;
2039 EGLint y_attribs[] = {
2040 EGL_WIDTH, frame_width,
2041 EGL_HEIGHT, frame_height,
2042 EGL_LINUX_DRM_FOURCC_EXT, fourcc_code('R', '8', ' ', ' '),
2043 EGL_DMA_BUF_PLANE0_FD_EXT, EGLint(buf_info.handle),
2044 EGL_DMA_BUF_PLANE0_OFFSET_EXT, EGLint(surf->surface_image.offsets[0]),
2045 EGL_DMA_BUF_PLANE0_PITCH_EXT, EGLint(surf->surface_image.pitches[0]),
2049 surf->y_egl_image = eglCreateImageKHR(eglGetCurrentDisplay(), EGL_NO_CONTEXT, EGL_LINUX_DMA_BUF_EXT, NULL, y_attribs);
2050 assert(surf->y_egl_image != EGL_NO_IMAGE_KHR);
2052 // Associate Y image to a texture.
2053 glBindTexture(GL_TEXTURE_2D, *y_tex);
2054 glEGLImageTargetTexture2DOES(GL_TEXTURE_2D, surf->y_egl_image);
2056 // Create CbCr image.
2057 surf->cbcr_egl_image = EGL_NO_IMAGE_KHR;
2058 EGLint cbcr_attribs[] = {
2059 EGL_WIDTH, frame_width,
2060 EGL_HEIGHT, frame_height,
2061 EGL_LINUX_DRM_FOURCC_EXT, fourcc_code('G', 'R', '8', '8'),
2062 EGL_DMA_BUF_PLANE0_FD_EXT, EGLint(buf_info.handle),
2063 EGL_DMA_BUF_PLANE0_OFFSET_EXT, EGLint(surf->surface_image.offsets[1]),
2064 EGL_DMA_BUF_PLANE0_PITCH_EXT, EGLint(surf->surface_image.pitches[1]),
2068 surf->cbcr_egl_image = eglCreateImageKHR(eglGetCurrentDisplay(), EGL_NO_CONTEXT, EGL_LINUX_DMA_BUF_EXT, NULL, cbcr_attribs);
2069 assert(surf->cbcr_egl_image != EGL_NO_IMAGE_KHR);
2071 // Associate CbCr image to a texture.
2072 glBindTexture(GL_TEXTURE_2D, *cbcr_tex);
2073 glEGLImageTargetTexture2DOES(GL_TEXTURE_2D, surf->cbcr_egl_image);
2079 void QuickSyncEncoderImpl::add_audio(int64_t pts, vector<float> audio)
2081 assert(!is_shutdown);
2083 unique_lock<mutex> lock(frame_queue_mutex);
2084 pending_audio_frames[pts] = move(audio);
2086 frame_queue_nonempty.notify_all();
2089 RefCountedGLsync QuickSyncEncoderImpl::end_frame(int64_t pts, int64_t duration, const vector<RefCountedFrame> &input_frames)
2091 assert(!is_shutdown);
2093 if (!use_zerocopy) {
2094 GLSurface *surf = &gl_surfaces[current_storage_frame % SURFACE_NUM];
2096 glPixelStorei(GL_PACK_ROW_LENGTH, 0);
2099 glBindBuffer(GL_PIXEL_PACK_BUFFER, surf->pbo);
2102 glBindTexture(GL_TEXTURE_2D, surf->y_tex);
2104 glGetTexImage(GL_TEXTURE_2D, 0, GL_RED, GL_UNSIGNED_BYTE, BUFFER_OFFSET(surf->y_offset));
2107 glBindTexture(GL_TEXTURE_2D, surf->cbcr_tex);
2109 glGetTexImage(GL_TEXTURE_2D, 0, GL_RG, GL_UNSIGNED_BYTE, BUFFER_OFFSET(surf->cbcr_offset));
2112 glBindTexture(GL_TEXTURE_2D, 0);
2114 glBindBuffer(GL_PIXEL_PACK_BUFFER, 0);
2117 glMemoryBarrier(GL_TEXTURE_UPDATE_BARRIER_BIT | GL_CLIENT_MAPPED_BUFFER_BARRIER_BIT);
2121 RefCountedGLsync fence = RefCountedGLsync(GL_SYNC_GPU_COMMANDS_COMPLETE, /*flags=*/0);
2123 glFlush(); // Make the H.264 thread see the fence as soon as possible.
2127 unique_lock<mutex> lock(frame_queue_mutex);
2128 pending_video_frames[current_storage_frame] = PendingFrame{ fence, input_frames, pts, duration };
2129 ++current_storage_frame;
2131 frame_queue_nonempty.notify_all();
2135 void QuickSyncEncoderImpl::shutdown()
2142 unique_lock<mutex> lock(frame_queue_mutex);
2143 encode_thread_should_quit = true;
2144 frame_queue_nonempty.notify_all();
2146 encode_thread.join();
2147 x264_encoder.reset();
2149 unique_lock<mutex> lock(storage_task_queue_mutex);
2150 storage_thread_should_quit = true;
2151 frame_queue_nonempty.notify_all();
2152 storage_task_queue_changed.notify_all();
2154 storage_thread.join();
2155 encode_remaining_audio();
2162 void QuickSyncEncoderImpl::open_output_file(const std::string &filename)
2164 AVFormatContext *avctx = avformat_alloc_context();
2165 avctx->oformat = av_guess_format(NULL, filename.c_str(), NULL);
2166 assert(filename.size() < sizeof(avctx->filename) - 1);
2167 strcpy(avctx->filename, filename.c_str());
2169 string url = "file:" + filename;
2170 int ret = avio_open2(&avctx->pb, url.c_str(), AVIO_FLAG_WRITE, &avctx->interrupt_callback, NULL);
2172 char tmp[AV_ERROR_MAX_STRING_SIZE];
2173 fprintf(stderr, "%s: avio_open2() failed: %s\n", filename.c_str(), av_make_error_string(tmp, sizeof(tmp), ret));
2177 file_mux.reset(new Mux(avctx, frame_width, frame_height, Mux::CODEC_H264, context_audio_file->codec, TIMEBASE, DEFAULT_AUDIO_OUTPUT_BIT_RATE, nullptr));
2180 void QuickSyncEncoderImpl::close_output_file()
2185 void QuickSyncEncoderImpl::open_output_stream()
2187 AVFormatContext *avctx = avformat_alloc_context();
2188 AVOutputFormat *oformat = av_guess_format(global_flags.stream_mux_name.c_str(), nullptr, nullptr);
2189 assert(oformat != nullptr);
2190 avctx->oformat = oformat;
2195 if (global_flags.stream_audio_codec_name.empty()) {
2196 codec_name = AUDIO_OUTPUT_CODEC_NAME;
2197 bit_rate = DEFAULT_AUDIO_OUTPUT_BIT_RATE;
2199 codec_name = global_flags.stream_audio_codec_name;
2200 bit_rate = global_flags.stream_audio_codec_bitrate;
2203 uint8_t *buf = (uint8_t *)av_malloc(MUX_BUFFER_SIZE);
2204 avctx->pb = avio_alloc_context(buf, MUX_BUFFER_SIZE, 1, this, nullptr, &QuickSyncEncoderImpl::write_packet_thunk, nullptr);
2206 Mux::Codec video_codec;
2207 if (global_flags.uncompressed_video_to_http) {
2208 video_codec = Mux::CODEC_NV12;
2210 video_codec = Mux::CODEC_H264;
2213 avctx->flags = AVFMT_FLAG_CUSTOM_IO;
2214 AVCodec *codec_audio = avcodec_find_encoder_by_name(codec_name.c_str());
2215 if (codec_audio == nullptr) {
2216 fprintf(stderr, "ERROR: Could not find codec '%s'\n", codec_name.c_str());
2220 int time_base = global_flags.stream_coarse_timebase ? COARSE_TIMEBASE : TIMEBASE;
2221 stream_mux_writing_header = true;
2222 stream_mux.reset(new Mux(avctx, frame_width, frame_height, video_codec, codec_audio, time_base, bit_rate, this));
2223 stream_mux_writing_header = false;
2224 httpd->set_header(stream_mux_header);
2225 stream_mux_header.clear();
2228 void QuickSyncEncoderImpl::close_output_stream()
2233 int QuickSyncEncoderImpl::write_packet_thunk(void *opaque, uint8_t *buf, int buf_size)
2235 QuickSyncEncoderImpl *h264_encoder = (QuickSyncEncoderImpl *)opaque;
2236 return h264_encoder->write_packet(buf, buf_size);
2239 int QuickSyncEncoderImpl::write_packet(uint8_t *buf, int buf_size)
2241 if (stream_mux_writing_header) {
2242 stream_mux_header.append((char *)buf, buf_size);
2244 httpd->add_data((char *)buf, buf_size, stream_mux_writing_keyframes);
2245 stream_mux_writing_keyframes = false;
2250 void QuickSyncEncoderImpl::encode_thread_func()
2252 int64_t last_dts = -1;
2253 int gop_start_display_frame_num = 0;
2254 for (int encoding_frame_num = 0; ; ++encoding_frame_num) {
2257 int frame_type, display_frame_num;
2258 encoding2display_order(encoding_frame_num, intra_period, intra_idr_period, ip_period,
2259 &display_frame_num, &frame_type, &pts_lag);
2260 if (frame_type == FRAME_IDR) {
2262 current_frame_num = 0;
2263 gop_start_display_frame_num = display_frame_num;
2267 unique_lock<mutex> lock(frame_queue_mutex);
2268 frame_queue_nonempty.wait(lock, [this, display_frame_num]{
2269 return encode_thread_should_quit || pending_video_frames.count(display_frame_num) != 0;
2271 if (encode_thread_should_quit && pending_video_frames.count(display_frame_num) == 0) {
2272 // We have queued frames that were supposed to be B-frames,
2273 // but will be no P-frame to encode them against. Encode them all
2274 // as P-frames instead. Note that this happens under the mutex,
2275 // but nobody else uses it at this point, since we're shutting down,
2276 // so there's no contention.
2277 encode_remaining_frames_as_p(encoding_frame_num, gop_start_display_frame_num, last_dts);
2280 frame = move(pending_video_frames[display_frame_num]);
2281 pending_video_frames.erase(display_frame_num);
2285 // Determine the dts of this frame.
2287 if (pts_lag == -1) {
2288 assert(last_dts != -1);
2289 dts = last_dts + (TIMEBASE / MAX_FPS);
2291 dts = frame.pts - pts_lag;
2295 encode_frame(frame, encoding_frame_num, display_frame_num, gop_start_display_frame_num, frame_type, frame.pts, dts, frame.duration);
2299 void QuickSyncEncoderImpl::encode_remaining_frames_as_p(int encoding_frame_num, int gop_start_display_frame_num, int64_t last_dts)
2301 if (pending_video_frames.empty()) {
2305 for (auto &pending_frame : pending_video_frames) {
2306 int display_frame_num = pending_frame.first;
2307 assert(display_frame_num > 0);
2308 PendingFrame frame = move(pending_frame.second);
2309 int64_t dts = last_dts + (TIMEBASE / MAX_FPS);
2310 printf("Finalizing encode: Encoding leftover frame %d as P-frame instead of B-frame.\n", display_frame_num);
2311 encode_frame(frame, encoding_frame_num++, display_frame_num, gop_start_display_frame_num, FRAME_P, frame.pts, dts, frame.duration);
2315 if (global_flags.uncompressed_video_to_http ||
2316 global_flags.x264_video_to_http) {
2317 // Add frames left in reorderer.
2318 while (!reorderer->empty()) {
2319 FrameReorderer::Frame output_frame = reorderer->get_first_frame();
2320 if (global_flags.uncompressed_video_to_http) {
2321 add_packet_for_uncompressed_frame(output_frame.pts, output_frame.duration, output_frame.data);
2323 assert(global_flags.x264_video_to_http);
2324 x264_encoder->add_frame(output_frame.pts, output_frame.duration, output_frame.data);
2330 void QuickSyncEncoderImpl::encode_remaining_audio()
2332 // This really ought to be empty by now, but just to be sure...
2333 for (auto &pending_frame : pending_audio_frames) {
2334 int64_t audio_pts = pending_frame.first;
2335 vector<float> audio = move(pending_frame.second);
2337 if (context_audio_stream) {
2338 encode_audio(audio, &audio_queue_file, audio_pts, context_audio_file, resampler_audio_file, { file_mux.get() });
2339 encode_audio(audio, &audio_queue_stream, audio_pts, context_audio_stream, resampler_audio_stream, { stream_mux.get() });
2341 encode_audio(audio, &audio_queue_file, audio_pts, context_audio_file, resampler_audio_file, { stream_mux.get(), file_mux.get() });
2343 last_audio_pts = audio_pts + audio.size() * TIMEBASE / (OUTPUT_FREQUENCY * 2);
2345 pending_audio_frames.clear();
2347 // Encode any leftover audio in the queues, and also any delayed frames.
2348 if (context_audio_stream) {
2349 encode_last_audio(&audio_queue_file, last_audio_pts, context_audio_file, resampler_audio_file, { file_mux.get() });
2350 encode_last_audio(&audio_queue_stream, last_audio_pts, context_audio_stream, resampler_audio_stream, { stream_mux.get() });
2352 encode_last_audio(&audio_queue_file, last_audio_pts, context_audio_file, resampler_audio_file, { stream_mux.get(), file_mux.get() });
2356 void QuickSyncEncoderImpl::add_packet_for_uncompressed_frame(int64_t pts, int64_t duration, const uint8_t *data)
2359 memset(&pkt, 0, sizeof(pkt));
2361 pkt.data = const_cast<uint8_t *>(data);
2362 pkt.size = frame_width * frame_height * 2;
2363 pkt.stream_index = 0;
2364 pkt.flags = AV_PKT_FLAG_KEY;
2365 pkt.duration = duration;
2366 stream_mux->add_packet(pkt, pts, pts);
2371 void memcpy_with_pitch(uint8_t *dst, const uint8_t *src, size_t src_width, size_t dst_pitch, size_t height)
2373 if (src_width == dst_pitch) {
2374 memcpy(dst, src, src_width * height);
2376 for (size_t y = 0; y < height; ++y) {
2377 const uint8_t *sptr = src + y * src_width;
2378 uint8_t *dptr = dst + y * dst_pitch;
2379 memcpy(dptr, sptr, src_width);
2386 void QuickSyncEncoderImpl::encode_frame(QuickSyncEncoderImpl::PendingFrame frame, int encoding_frame_num, int display_frame_num, int gop_start_display_frame_num,
2387 int frame_type, int64_t pts, int64_t dts, int64_t duration)
2389 // Wait for the GPU to be done with the frame.
2392 sync_status = glClientWaitSync(frame.fence.get(), 0, 1000000000);
2394 } while (sync_status == GL_TIMEOUT_EXPIRED);
2395 assert(sync_status != GL_WAIT_FAILED);
2397 // Release back any input frames we needed to render this frame.
2398 frame.input_frames.clear();
2400 GLSurface *surf = &gl_surfaces[display_frame_num % SURFACE_NUM];
2404 eglDestroyImageKHR(eglGetCurrentDisplay(), surf->y_egl_image);
2405 eglDestroyImageKHR(eglGetCurrentDisplay(), surf->cbcr_egl_image);
2406 va_status = vaReleaseBufferHandle(va_dpy, surf->surface_image.buf);
2407 CHECK_VASTATUS(va_status, "vaReleaseBufferHandle");
2409 unsigned char *surface_p = nullptr;
2410 vaMapBuffer(va_dpy, surf->surface_image.buf, (void **)&surface_p);
2412 unsigned char *va_y_ptr = (unsigned char *)surface_p + surf->surface_image.offsets[0];
2413 memcpy_with_pitch(va_y_ptr, surf->y_ptr, frame_width, surf->surface_image.pitches[0], frame_height);
2415 unsigned char *va_cbcr_ptr = (unsigned char *)surface_p + surf->surface_image.offsets[1];
2416 memcpy_with_pitch(va_cbcr_ptr, surf->cbcr_ptr, (frame_width / 2) * sizeof(uint16_t), surf->surface_image.pitches[1], frame_height / 2);
2418 va_status = vaUnmapBuffer(va_dpy, surf->surface_image.buf);
2419 CHECK_VASTATUS(va_status, "vaUnmapBuffer");
2421 if (global_flags.uncompressed_video_to_http ||
2422 global_flags.x264_video_to_http) {
2423 // Add uncompressed video. (Note that pts == dts here.)
2424 // Delay needs to match audio.
2425 FrameReorderer::Frame output_frame = reorderer->reorder_frame(pts + global_delay(), duration, reinterpret_cast<uint8_t *>(surf->y_ptr));
2426 if (output_frame.data != nullptr) {
2427 if (global_flags.uncompressed_video_to_http) {
2428 add_packet_for_uncompressed_frame(output_frame.pts, output_frame.duration, output_frame.data);
2430 assert(global_flags.x264_video_to_http);
2431 x264_encoder->add_frame(output_frame.pts, output_frame.duration, output_frame.data);
2437 va_status = vaDestroyImage(va_dpy, surf->surface_image.image_id);
2438 CHECK_VASTATUS(va_status, "vaDestroyImage");
2440 // Schedule the frame for encoding.
2441 VASurfaceID va_surface = surf->src_surface;
2442 va_status = vaBeginPicture(va_dpy, context_id, va_surface);
2443 CHECK_VASTATUS(va_status, "vaBeginPicture");
2445 if (frame_type == FRAME_IDR) {
2447 render_picture(frame_type, display_frame_num, gop_start_display_frame_num);
2448 if (h264_packedheader) {
2449 render_packedsequence();
2450 render_packedpicture();
2453 //render_sequence();
2454 render_picture(frame_type, display_frame_num, gop_start_display_frame_num);
2456 render_slice(encoding_frame_num, display_frame_num, gop_start_display_frame_num, frame_type);
2458 va_status = vaEndPicture(va_dpy, context_id);
2459 CHECK_VASTATUS(va_status, "vaEndPicture");
2461 // so now the data is done encoding (well, async job kicked off)...
2462 // we send that to the storage thread
2464 tmp.display_order = display_frame_num;
2465 tmp.frame_type = frame_type;
2468 tmp.duration = duration;
2469 storage_task_enqueue(move(tmp));
2471 update_ReferenceFrames(frame_type);
2475 QuickSyncEncoder::QuickSyncEncoder(QSurface *surface, const string &va_display, int width, int height, HTTPD *httpd)
2476 : impl(new QuickSyncEncoderImpl(surface, va_display, width, height, httpd)) {}
2478 // Must be defined here because unique_ptr<> destructor needs to know the impl.
2479 QuickSyncEncoder::~QuickSyncEncoder() {}
2481 void QuickSyncEncoder::add_audio(int64_t pts, vector<float> audio)
2483 impl->add_audio(pts, audio);
2486 bool QuickSyncEncoder::begin_frame(GLuint *y_tex, GLuint *cbcr_tex)
2488 return impl->begin_frame(y_tex, cbcr_tex);
2491 RefCountedGLsync QuickSyncEncoder::end_frame(int64_t pts, int64_t duration, const vector<RefCountedFrame> &input_frames)
2493 return impl->end_frame(pts, duration, input_frames);
2496 void QuickSyncEncoder::shutdown()
2501 void QuickSyncEncoder::open_output_file(const std::string &filename)
2503 impl->open_output_file(filename);
2506 void QuickSyncEncoder::close_output_file()
2508 impl->close_output_file();