#include "flags.h"
#include "mux.h"
#include "print_latency.h"
+#include "quicksync_encoder_impl.h"
#include "ref_counted_frame.h"
#include "timebase.h"
#include "x264_encoder.h"
#define PROFILE_IDC_HIGH 100
#define BITSTREAM_ALLOCATE_STEPPING 4096
-#define SURFACE_NUM 16 /* 16 surfaces for source YUV */
-#define MAX_NUM_REF1 16 // Seemingly a hardware-fixed value, not related to SURFACE_NUM
-#define MAX_NUM_REF2 32 // Seemingly a hardware-fixed value, not related to SURFACE_NUM
static constexpr unsigned int MaxFrameNum = (2<<16);
static constexpr unsigned int MaxPicOrderCntLsb = (2<<8);
#define SRC_SURFACE_FREE 0
#define SRC_SURFACE_IN_ENCODING 1
-struct __bitstream {
- unsigned int *buffer;
- int bit_offset;
- int max_size_in_dword;
-};
-typedef struct __bitstream bitstream;
-
using namespace std;
-// H.264 video comes out in encoding order (e.g. with two B-frames:
-// 0, 3, 1, 2, 6, 4, 5, etc.), but uncompressed video needs to
-// come in the right order. Since we do everything, including waiting
-// for the frames to come out of OpenGL, in encoding order, we need
-// a reordering buffer for uncompressed frames so that they come out
-// correctly. We go the super-lazy way of not making it understand
-// anything about the true order (which introduces some extra latency,
-// though); we know that for N B-frames we need at most (N-1) frames
-// in the reorder buffer, and can just sort on that.
-//
-// The class also deals with keeping a freelist as needed.
-class FrameReorderer {
-public:
- FrameReorderer(unsigned queue_length, int width, int height);
-
- struct Frame {
- int64_t pts, duration;
- uint8_t *data;
- ReceivedTimestamps received_ts;
-
- // Invert to get the smallest pts first.
- bool operator< (const Frame &other) const { return pts > other.pts; }
- };
-
- // Returns the next frame to insert with its pts, if any. Otherwise -1 and nullptr.
- // Does _not_ take ownership of data; a copy is taken if needed.
- // The returned pointer is valid until the next call to reorder_frame, or destruction.
- // As a special case, if queue_length == 0, will just return pts and data (no reordering needed).
- Frame reorder_frame(int64_t pts, int64_t duration, uint8_t *data, const ReceivedTimestamps &received_ts);
-
- // The same as reorder_frame, but without inserting anything. Used to empty the queue.
- Frame get_first_frame();
-
- bool empty() const { return frames.empty(); }
-
-private:
- unsigned queue_length;
- int width, height;
-
- priority_queue<Frame> frames;
- stack<uint8_t *> freelist; // Includes the last value returned from reorder_frame.
-
- // Owns all the pointers. Normally, freelist and frames could do this themselves,
- // except priority_queue doesn't work well with movable-only types.
- vector<unique_ptr<uint8_t[]>> owner;
-};
-
-FrameReorderer::FrameReorderer(unsigned queue_length, int width, int height)
- : queue_length(queue_length), width(width), height(height)
-{
- for (unsigned i = 0; i < queue_length; ++i) {
- owner.emplace_back(new uint8_t[width * height * 2]);
- freelist.push(owner.back().get());
- }
-}
-
-FrameReorderer::Frame FrameReorderer::reorder_frame(int64_t pts, int64_t duration, uint8_t *data, const ReceivedTimestamps &received_ts)
-{
- if (queue_length == 0) {
- return Frame{pts, duration, data, received_ts};
- }
-
- assert(!freelist.empty());
- uint8_t *storage = freelist.top();
- freelist.pop();
- memcpy(storage, data, width * height * 2);
- frames.push(Frame{pts, duration, storage, received_ts});
-
- if (frames.size() >= queue_length) {
- return get_first_frame();
- } else {
- return Frame{-1, -1, nullptr, steady_clock::time_point::min(), steady_clock::time_point::min()};
- }
-}
-
-FrameReorderer::Frame FrameReorderer::get_first_frame()
-{
- assert(!frames.empty());
- Frame storage = frames.top();
- frames.pop();
- freelist.push(storage.data);
- return storage;
-}
-
-class QuickSyncEncoderImpl {
-public:
- QuickSyncEncoderImpl(const std::string &filename, movit::ResourcePool *resource_pool, QSurface *surface, const string &va_display, int width, int height, AVOutputFormat *oformat, X264Encoder *x264_encoder, DiskSpaceEstimator *disk_space_estimator);
- ~QuickSyncEncoderImpl();
- void add_audio(int64_t pts, vector<float> audio);
- bool begin_frame(GLuint *y_tex, GLuint *cbcr_tex);
- RefCountedGLsync end_frame(int64_t pts, int64_t duration, const vector<RefCountedFrame> &input_frames);
- void shutdown();
- void release_gl_resources();
- void set_stream_mux(Mux *mux)
- {
- stream_mux = mux;
- }
-
- // So we never get negative dts.
- int64_t global_delay() const {
- return int64_t(ip_period - 1) * (TIMEBASE / MAX_FPS);
- }
-
-private:
- struct storage_task {
- unsigned long long display_order;
- int frame_type;
- vector<float> audio;
- int64_t pts, dts, duration;
- ReceivedTimestamps received_ts;
- };
- struct PendingFrame {
- RefCountedGLsync fence;
- vector<RefCountedFrame> input_frames;
- int64_t pts, duration;
- };
-
- void open_output_file(const std::string &filename);
- void encode_thread_func();
- void encode_remaining_frames_as_p(int encoding_frame_num, int gop_start_display_frame_num, int64_t last_dts);
- void add_packet_for_uncompressed_frame(int64_t pts, int64_t duration, const uint8_t *data);
- void encode_frame(PendingFrame frame, int encoding_frame_num, int display_frame_num, int gop_start_display_frame_num,
- int frame_type, int64_t pts, int64_t dts, int64_t duration);
- void storage_task_thread();
- void storage_task_enqueue(storage_task task);
- void save_codeddata(storage_task task);
- int render_packedsequence();
- int render_packedpicture();
- void render_packedslice();
- int render_sequence();
- int render_picture(int frame_type, int display_frame_num, int gop_start_display_frame_num);
- void sps_rbsp(bitstream *bs);
- void pps_rbsp(bitstream *bs);
- int build_packed_pic_buffer(unsigned char **header_buffer);
- int render_slice(int encoding_frame_num, int display_frame_num, int gop_start_display_frame_num, int frame_type);
- void slice_header(bitstream *bs);
- int build_packed_seq_buffer(unsigned char **header_buffer);
- int build_packed_slice_buffer(unsigned char **header_buffer);
- int init_va(const string &va_display);
- int deinit_va();
- void enable_zerocopy_if_possible();
- VADisplay va_open_display(const string &va_display);
- void va_close_display(VADisplay va_dpy);
- int setup_encode();
- void release_encode();
- void update_ReferenceFrames(int frame_type);
- int update_RefPicList(int frame_type);
-
- bool is_shutdown = false;
- bool has_released_gl_resources = false;
- bool use_zerocopy;
- int drm_fd = -1;
-
- thread encode_thread, storage_thread;
-
- mutex storage_task_queue_mutex;
- condition_variable storage_task_queue_changed;
- int srcsurface_status[SURFACE_NUM]; // protected by storage_task_queue_mutex
- queue<storage_task> storage_task_queue; // protected by storage_task_queue_mutex
- bool storage_thread_should_quit = false; // protected by storage_task_queue_mutex
-
- mutex frame_queue_mutex;
- condition_variable frame_queue_nonempty;
- bool encode_thread_should_quit = false; // under frame_queue_mutex
-
- int current_storage_frame;
-
- map<int, PendingFrame> pending_video_frames; // under frame_queue_mutex
- movit::ResourcePool *resource_pool;
- QSurface *surface;
-
- unique_ptr<AudioEncoder> file_audio_encoder;
-
- unique_ptr<FrameReorderer> reorderer;
- X264Encoder *x264_encoder; // nullptr if not using x264.
-
- Mux* stream_mux = nullptr; // To HTTP.
- unique_ptr<Mux> file_mux; // To local disk.
-
- Display *x11_display = nullptr;
-
- // Encoder parameters
- VADisplay va_dpy;
- VAProfile h264_profile = (VAProfile)~0;
- VAConfigAttrib config_attrib[VAConfigAttribTypeMax];
- int config_attrib_num = 0, enc_packed_header_idx;
-
- struct GLSurface {
- VASurfaceID src_surface, ref_surface;
- VABufferID coded_buf;
-
- VAImage surface_image;
- GLuint y_tex, cbcr_tex;
-
- // Only if use_zerocopy == true.
- EGLImage y_egl_image, cbcr_egl_image;
-
- // Only if use_zerocopy == false.
- GLuint pbo;
- uint8_t *y_ptr, *cbcr_ptr;
- size_t y_offset, cbcr_offset;
- };
- GLSurface gl_surfaces[SURFACE_NUM];
-
- VAConfigID config_id;
- VAContextID context_id;
- VAEncSequenceParameterBufferH264 seq_param;
- VAEncPictureParameterBufferH264 pic_param;
- VAEncSliceParameterBufferH264 slice_param;
- VAPictureH264 CurrentCurrPic;
- VAPictureH264 ReferenceFrames[MAX_NUM_REF1], RefPicList0_P[MAX_NUM_REF2], RefPicList0_B[MAX_NUM_REF2], RefPicList1_B[MAX_NUM_REF2];
-
- // Static quality settings.
- static constexpr unsigned int frame_bitrate = 15000000 / 60; // Doesn't really matter; only initial_qp does.
- static constexpr unsigned int num_ref_frames = 2;
- static constexpr int initial_qp = 15;
- static constexpr int minimal_qp = 0;
- static constexpr int intra_period = 30;
- static constexpr int intra_idr_period = MAX_FPS; // About a second; more at lower frame rates. Not ideal.
-
- // Quality settings that are meant to be static, but might be overridden
- // by the profile.
- int constraint_set_flag = 0;
- int h264_packedheader = 0; /* support pack header? */
- int h264_maxref = (1<<16|1);
- int h264_entropy_mode = 1; /* cabac */
- int ip_period = 3;
-
- int rc_mode = -1;
- unsigned int current_frame_num = 0;
- unsigned int numShortTerm = 0;
-
- int frame_width;
- int frame_height;
- int frame_width_mbaligned;
- int frame_height_mbaligned;
-
- DiskSpaceEstimator *disk_space_estimator;
-};
-
// Supposedly vaRenderPicture() is supposed to destroy the buffer implicitly,
// but if we don't delete it here, we get leaks. The GStreamer implementation
// does the same.
//print_input();
- if (global_flags.uncompressed_video_to_http ||
- global_flags.x264_video_to_http) {
- reorderer.reset(new FrameReorderer(ip_period - 1, frame_width, frame_height));
- }
if (global_flags.x264_video_to_http) {
assert(x264_encoder != nullptr);
} else {
{
unique_lock<mutex> lock(frame_queue_mutex);
- pending_video_frames[current_storage_frame] = PendingFrame{ fence, input_frames, pts, duration };
+ pending_video_frames.push(PendingFrame{ fence, input_frames, pts, duration });
++current_storage_frame;
}
frame_queue_nonempty.notify_all();
{
int64_t last_dts = -1;
int gop_start_display_frame_num = 0;
- for (int encoding_frame_num = 0; ; ++encoding_frame_num) {
+ for (int display_frame_num = 0; ; ++display_frame_num) {
+ // Wait for the frame to be in the queue. Note that this only means
+ // we started rendering it.
PendingFrame frame;
- int pts_lag;
- int frame_type, display_frame_num;
- encoding2display_order(encoding_frame_num, intra_period, intra_idr_period, ip_period,
- &display_frame_num, &frame_type, &pts_lag);
- if (frame_type == FRAME_IDR) {
- numShortTerm = 0;
- current_frame_num = 0;
- gop_start_display_frame_num = display_frame_num;
- }
-
{
unique_lock<mutex> lock(frame_queue_mutex);
- frame_queue_nonempty.wait(lock, [this, display_frame_num]{
- return encode_thread_should_quit || pending_video_frames.count(display_frame_num) != 0;
+ frame_queue_nonempty.wait(lock, [this]{
+ return encode_thread_should_quit || !pending_video_frames.empty();
});
- if (encode_thread_should_quit && pending_video_frames.count(display_frame_num) == 0) {
- // We have queued frames that were supposed to be B-frames,
- // but will be no P-frame to encode them against. Encode them all
- // as P-frames instead. Note that this happens under the mutex,
+ if (encode_thread_should_quit && pending_video_frames.empty()) {
+ // We may have queued frames left in the reorder buffer
+ // that were supposed to be B-frames, but have no P-frame
+ // to be encoded against. If so, encode them all as
+ // P-frames instead. Note that this happens under the mutex,
// but nobody else uses it at this point, since we're shutting down,
// so there's no contention.
- encode_remaining_frames_as_p(encoding_frame_num, gop_start_display_frame_num, last_dts);
+ encode_remaining_frames_as_p(quicksync_encoding_frame_num, gop_start_display_frame_num, last_dts);
return;
} else {
- frame = move(pending_video_frames[display_frame_num]);
- pending_video_frames.erase(display_frame_num);
+ frame = move(pending_video_frames.front());
+ pending_video_frames.pop();
}
}
- // Determine the dts of this frame.
- int64_t dts;
- if (pts_lag == -1) {
- assert(last_dts != -1);
- dts = last_dts + (TIMEBASE / MAX_FPS);
- } else {
- dts = frame.pts - pts_lag;
- }
- last_dts = dts;
+ // Pass the frame on to x264 (or uncompressed to HTTP) as needed.
+ // Note that this implicitly waits for the frame to be done rendering.
+ pass_frame(frame, display_frame_num, frame.pts, frame.duration);
+ reorder_buffer[display_frame_num] = move(frame);
+
+ // Now encode as many QuickSync frames as we can using the frames we have available.
+ // (It could be zero, or it could be multiple.) FIXME: make a function.
+ for ( ;; ) {
+ int pts_lag;
+ int frame_type, quicksync_display_frame_num;
+ encoding2display_order(quicksync_encoding_frame_num, intra_period, intra_idr_period, ip_period,
+ &quicksync_display_frame_num, &frame_type, &pts_lag);
+ if (!reorder_buffer.count(quicksync_display_frame_num)) {
+ break;
+ }
+ frame = move(reorder_buffer[quicksync_display_frame_num]);
+ reorder_buffer.erase(quicksync_display_frame_num);
+
+ if (frame_type == FRAME_IDR) {
+ numShortTerm = 0;
+ current_frame_num = 0;
+ gop_start_display_frame_num = quicksync_display_frame_num;
+ }
- encode_frame(frame, encoding_frame_num, display_frame_num, gop_start_display_frame_num, frame_type, frame.pts, dts, frame.duration);
+ // Determine the dts of this frame.
+ int64_t dts;
+ if (pts_lag == -1) {
+ assert(last_dts != -1);
+ dts = last_dts + (TIMEBASE / MAX_FPS);
+ } else {
+ dts = frame.pts - pts_lag;
+ }
+ last_dts = dts;
+
+ encode_frame(frame, quicksync_encoding_frame_num, quicksync_display_frame_num, gop_start_display_frame_num, frame_type, frame.pts, dts, frame.duration);
+ ++quicksync_encoding_frame_num;
+ }
}
}
void QuickSyncEncoderImpl::encode_remaining_frames_as_p(int encoding_frame_num, int gop_start_display_frame_num, int64_t last_dts)
{
- if (pending_video_frames.empty()) {
+ if (reorder_buffer.empty()) {
return;
}
- for (auto &pending_frame : pending_video_frames) {
+ for (auto &pending_frame : reorder_buffer) {
int display_frame_num = pending_frame.first;
assert(display_frame_num > 0);
PendingFrame frame = move(pending_frame.second);
encode_frame(frame, encoding_frame_num++, display_frame_num, gop_start_display_frame_num, FRAME_P, frame.pts, dts, frame.duration);
last_dts = dts;
}
-
- if (global_flags.uncompressed_video_to_http ||
- global_flags.x264_video_to_http) {
- // Add frames left in reorderer.
- while (!reorderer->empty()) {
- FrameReorderer::Frame output_frame = reorderer->get_first_frame();
- if (global_flags.uncompressed_video_to_http) {
- add_packet_for_uncompressed_frame(output_frame.pts, output_frame.duration, output_frame.data);
- } else {
- assert(global_flags.x264_video_to_http);
- x264_encoder->add_frame(output_frame.pts, output_frame.duration, output_frame.data, output_frame.received_ts);
- }
- }
- }
}
void QuickSyncEncoderImpl::add_packet_for_uncompressed_frame(int64_t pts, int64_t duration, const uint8_t *data)
} // namespace
-void QuickSyncEncoderImpl::encode_frame(QuickSyncEncoderImpl::PendingFrame frame, int encoding_frame_num, int display_frame_num, int gop_start_display_frame_num,
- int frame_type, int64_t pts, int64_t dts, int64_t duration)
+void QuickSyncEncoderImpl::pass_frame(QuickSyncEncoderImpl::PendingFrame frame, int display_frame_num, int64_t pts, int64_t duration)
{
// Wait for the GPU to be done with the frame.
GLenum sync_status;
} while (sync_status == GL_TIMEOUT_EXPIRED);
assert(sync_status != GL_WAIT_FAILED);
- // Find min and max timestamp of all input frames that have a timestamp.
- steady_clock::time_point min_ts = steady_clock::time_point::max(), max_ts = steady_clock::time_point::min();
- for (const RefCountedFrame &input_frame : frame.input_frames) {
- if (input_frame && input_frame->received_timestamp > steady_clock::time_point::min()) {
- min_ts = min(min_ts, input_frame->received_timestamp);
- max_ts = max(max_ts, input_frame->received_timestamp);
- }
- }
- const ReceivedTimestamps received_ts{ min_ts, max_ts };
-
+ ReceivedTimestamps received_ts = find_received_timestamp(frame.input_frames);
static int frameno = 0;
print_latency("Current mixer latency (video inputs → ready for encode):",
- received_ts, (frame_type == FRAME_B), &frameno);
+ received_ts, false, &frameno);
// Release back any input frames we needed to render this frame.
frame.input_frames.clear();
+ GLSurface *surf = &gl_surfaces[display_frame_num % SURFACE_NUM];
+ uint8_t *data = reinterpret_cast<uint8_t *>(surf->y_ptr);
+ if (global_flags.uncompressed_video_to_http) {
+ add_packet_for_uncompressed_frame(pts, duration, data);
+ } else if (global_flags.x264_video_to_http) {
+ x264_encoder->add_frame(pts, duration, data, received_ts);
+ }
+}
+
+void QuickSyncEncoderImpl::encode_frame(QuickSyncEncoderImpl::PendingFrame frame, int encoding_frame_num, int display_frame_num, int gop_start_display_frame_num,
+ int frame_type, int64_t pts, int64_t dts, int64_t duration)
+{
+ const ReceivedTimestamps received_ts = find_received_timestamp(frame.input_frames);
+
GLSurface *surf = &gl_surfaces[display_frame_num % SURFACE_NUM];
VAStatus va_status;
va_status = vaReleaseBufferHandle(va_dpy, surf->surface_image.buf);
CHECK_VASTATUS(va_status, "vaReleaseBufferHandle");
} else {
+ // Upload the frame to VA-API.
unsigned char *surface_p = nullptr;
vaMapBuffer(va_dpy, surf->surface_image.buf, (void **)&surface_p);
va_status = vaUnmapBuffer(va_dpy, surf->surface_image.buf);
CHECK_VASTATUS(va_status, "vaUnmapBuffer");
-
- if (global_flags.uncompressed_video_to_http ||
- global_flags.x264_video_to_http) {
- // Add uncompressed video. (Note that pts == dts here.)
- // Delay needs to match audio.
- FrameReorderer::Frame output_frame = reorderer->reorder_frame(pts + global_delay(), duration, reinterpret_cast<uint8_t *>(surf->y_ptr), received_ts);
- if (output_frame.data != nullptr) {
- if (global_flags.uncompressed_video_to_http) {
- add_packet_for_uncompressed_frame(output_frame.pts, output_frame.duration, output_frame.data);
- } else {
- assert(global_flags.x264_video_to_http);
- x264_encoder->add_frame(output_frame.pts, output_frame.duration, output_frame.data, output_frame.received_ts);
- }
- }
- }
}
va_status = vaDestroyImage(va_dpy, surf->surface_image.image_id);