#include "decklink_capture.h"
#include "defs.h"
#include "flags.h"
-#include "h264encode.h"
+#include "quicksync_encode.h"
#include "pbo_frame_allocator.h"
#include "ref_counted_gl_sync.h"
#include "timebase.h"
display_chain->set_dither_bits(0); // Don't bother.
display_chain->finalize();
- h264_encoder.reset(new H264Encoder(h264_encoder_surface, global_flags.va_display, WIDTH, HEIGHT, &httpd));
- h264_encoder->open_output_file(generate_local_dump_filename(/*frame=*/0).c_str());
+ quicksync_encoder.reset(new QuickSyncEncoder(h264_encoder_surface, global_flags.va_display, WIDTH, HEIGHT, &httpd));
+ quicksync_encoder->open_output_file(generate_local_dump_filename(/*frame=*/0).c_str());
// Start listening for clients only once H264Encoder has written its header, if any.
httpd.start(9095);
cards[card_index].capture->stop_dequeue_thread();
}
- h264_encoder.reset(nullptr);
+ quicksync_encoder.reset(nullptr);
}
void Mixer::configure_card(unsigned card_index, const QSurfaceFormat &format, CaptureInterface *capture)
if (should_cut.exchange(false)) { // Test and clear.
string filename = generate_local_dump_filename(frame);
printf("Starting new recording: %s\n", filename.c_str());
- h264_encoder->close_output_file();
- h264_encoder->shutdown();
- h264_encoder.reset(new H264Encoder(h264_encoder_surface, global_flags.va_display, WIDTH, HEIGHT, &httpd));
- h264_encoder->open_output_file(filename.c_str());
+ quicksync_encoder->close_output_file();
+ quicksync_encoder->shutdown();
+ quicksync_encoder.reset(new QuickSyncEncoder(h264_encoder_surface, global_flags.va_display, WIDTH, HEIGHT, &httpd));
+ quicksync_encoder->open_output_file(filename.c_str());
}
#if 0
//theme_main_chain.chain->enable_phase_timing(true);
GLuint y_tex, cbcr_tex;
- bool got_frame = h264_encoder->begin_frame(&y_tex, &cbcr_tex);
+ bool got_frame = quicksync_encoder->begin_frame(&y_tex, &cbcr_tex);
assert(got_frame);
// Render main chain.
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
const int64_t av_delay = TIMEBASE / 10; // Corresponds to the fixed delay in resampling_queue.h. TODO: Make less hard-coded.
- RefCountedGLsync fence = h264_encoder->end_frame(pts_int + av_delay, duration, theme_main_chain.input_frames);
+ RefCountedGLsync fence = quicksync_encoder->end_frame(pts_int + av_delay, duration, theme_main_chain.input_frames);
// The live frame just shows the RGBA texture we just rendered.
// It owns rgba_tex now.
}
// And finally add them to the output.
- h264_encoder->add_audio(frame_pts_int, move(samples_out));
+ quicksync_encoder->add_audio(frame_pts_int, move(samples_out));
}
void Mixer::subsample_chroma(GLuint src_tex, GLuint dst_tex)
//#include "sysdeps.h"
-#include "h264encode.h"
+#include "quicksync_encode.h"
#include <movit/util.h>
#include <EGL/eglplatform.h>
return storage;
}
-class H264EncoderImpl : public KeyFrameSignalReceiver {
+class QuickSyncEncoderImpl : public KeyFrameSignalReceiver {
public:
- H264EncoderImpl(QSurface *surface, const string &va_display, int width, int height, HTTPD *httpd);
- ~H264EncoderImpl();
+ QuickSyncEncoderImpl(QSurface *surface, const string &va_display, int width, int height, HTTPD *httpd);
+ ~QuickSyncEncoderImpl();
void add_audio(int64_t pts, vector<float> audio);
bool begin_frame(GLuint *y_tex, GLuint *cbcr_tex);
RefCountedGLsync end_frame(int64_t pts, int64_t duration, const vector<RefCountedFrame> &input_frames);
bitstream_put_ui(bs, nal_unit_type, 5);
}
-void H264EncoderImpl::sps_rbsp(bitstream *bs)
+void QuickSyncEncoderImpl::sps_rbsp(bitstream *bs)
{
int profile_idc = PROFILE_IDC_BASELINE;
}
-void H264EncoderImpl::pps_rbsp(bitstream *bs)
+void QuickSyncEncoderImpl::pps_rbsp(bitstream *bs)
{
bitstream_put_ue(bs, pic_param.pic_parameter_set_id); /* pic_parameter_set_id */
bitstream_put_ue(bs, pic_param.seq_parameter_set_id); /* seq_parameter_set_id */
rbsp_trailing_bits(bs);
}
-void H264EncoderImpl::slice_header(bitstream *bs)
+void QuickSyncEncoderImpl::slice_header(bitstream *bs)
{
int first_mb_in_slice = slice_param.macroblock_address;
}
}
-int H264EncoderImpl::build_packed_pic_buffer(unsigned char **header_buffer)
+int QuickSyncEncoderImpl::build_packed_pic_buffer(unsigned char **header_buffer)
{
bitstream bs;
}
int
-H264EncoderImpl::build_packed_seq_buffer(unsigned char **header_buffer)
+QuickSyncEncoderImpl::build_packed_seq_buffer(unsigned char **header_buffer)
{
bitstream bs;
return bs.bit_offset;
}
-int H264EncoderImpl::build_packed_slice_buffer(unsigned char **header_buffer)
+int QuickSyncEncoderImpl::build_packed_slice_buffer(unsigned char **header_buffer)
{
bitstream bs;
int is_idr = !!pic_param.pic_fields.bits.idr_pic_flag;
}
}
-void H264EncoderImpl::enable_zerocopy_if_possible()
+void QuickSyncEncoderImpl::enable_zerocopy_if_possible()
{
if (global_flags.uncompressed_video_to_http) {
fprintf(stderr, "Disabling zerocopy H.264 encoding due to --http-uncompressed-video.\n");
}
}
-VADisplay H264EncoderImpl::va_open_display(const string &va_display)
+VADisplay QuickSyncEncoderImpl::va_open_display(const string &va_display)
{
if (va_display.empty()) {
x11_display = XOpenDisplay(NULL);
}
}
-void H264EncoderImpl::va_close_display(VADisplay va_dpy)
+void QuickSyncEncoderImpl::va_close_display(VADisplay va_dpy)
{
if (x11_display) {
XCloseDisplay(x11_display);
}
}
-int H264EncoderImpl::init_va(const string &va_display)
+int QuickSyncEncoderImpl::init_va(const string &va_display)
{
VAProfile profile_list[]={VAProfileH264High, VAProfileH264Main, VAProfileH264Baseline, VAProfileH264ConstrainedBaseline};
VAEntrypoint *entrypoints;
return 0;
}
-int H264EncoderImpl::setup_encode()
+int QuickSyncEncoderImpl::setup_encode()
{
VAStatus va_status;
VASurfaceID *tmp_surfaceid;
sort(middle, end, less_than);
}
-void H264EncoderImpl::update_ReferenceFrames(int frame_type)
+void QuickSyncEncoderImpl::update_ReferenceFrames(int frame_type)
{
int i;
}
-int H264EncoderImpl::update_RefPicList(int frame_type)
+int QuickSyncEncoderImpl::update_RefPicList(int frame_type)
{
const auto descending_by_frame_idx = [](const VAPictureH264 &a, const VAPictureH264 &b) {
return a.frame_idx > b.frame_idx;
}
-int H264EncoderImpl::render_sequence()
+int QuickSyncEncoderImpl::render_sequence()
{
VABufferID seq_param_buf, rc_param_buf, render_id[2];
VAStatus va_status;
return TopFieldOrderCnt;
}
-int H264EncoderImpl::render_picture(int frame_type, int display_frame_num, int gop_start_display_frame_num)
+int QuickSyncEncoderImpl::render_picture(int frame_type, int display_frame_num, int gop_start_display_frame_num)
{
VABufferID pic_param_buf;
VAStatus va_status;
return 0;
}
-int H264EncoderImpl::render_packedsequence()
+int QuickSyncEncoderImpl::render_packedsequence()
{
VAEncPackedHeaderParameterBuffer packedheader_param_buffer;
VABufferID packedseq_para_bufid, packedseq_data_bufid, render_id[2];
}
-int H264EncoderImpl::render_packedpicture()
+int QuickSyncEncoderImpl::render_packedpicture()
{
VAEncPackedHeaderParameterBuffer packedheader_param_buffer;
VABufferID packedpic_para_bufid, packedpic_data_bufid, render_id[2];
return 0;
}
-void H264EncoderImpl::render_packedslice()
+void QuickSyncEncoderImpl::render_packedslice()
{
VAEncPackedHeaderParameterBuffer packedheader_param_buffer;
VABufferID packedslice_para_bufid, packedslice_data_bufid, render_id[2];
free(packedslice_buffer);
}
-int H264EncoderImpl::render_slice(int encoding_frame_num, int display_frame_num, int gop_start_display_frame_num, int frame_type)
+int QuickSyncEncoderImpl::render_slice(int encoding_frame_num, int display_frame_num, int gop_start_display_frame_num, int frame_type)
{
VABufferID slice_param_buf;
VAStatus va_status;
-void H264EncoderImpl::save_codeddata(storage_task task)
+void QuickSyncEncoderImpl::save_codeddata(storage_task task)
{
VACodedBufferSegment *buf_list = NULL;
VAStatus va_status;
}
}
-void H264EncoderImpl::encode_audio(
+void QuickSyncEncoderImpl::encode_audio(
const vector<float> &audio,
vector<float> *audio_queue,
int64_t audio_pts,
audio_queue->erase(audio_queue->begin(), audio_queue->begin() + sample_num);
}
-void H264EncoderImpl::encode_audio_one_frame(
+void QuickSyncEncoderImpl::encode_audio_one_frame(
const float *audio,
size_t num_samples,
int64_t audio_pts,
av_free_packet(&pkt);
}
-void H264EncoderImpl::encode_last_audio(
+void QuickSyncEncoderImpl::encode_last_audio(
vector<float> *audio_queue,
int64_t audio_pts,
AVCodecContext *ctx,
}
// this is weird. but it seems to put a new frame onto the queue
-void H264EncoderImpl::storage_task_enqueue(storage_task task)
+void QuickSyncEncoderImpl::storage_task_enqueue(storage_task task)
{
unique_lock<mutex> lock(storage_task_queue_mutex);
storage_task_queue.push(move(task));
storage_task_queue_changed.notify_all();
}
-void H264EncoderImpl::storage_task_thread()
+void QuickSyncEncoderImpl::storage_task_thread()
{
for ( ;; ) {
storage_task current;
}
}
-int H264EncoderImpl::release_encode()
+int QuickSyncEncoderImpl::release_encode()
{
for (unsigned i = 0; i < SURFACE_NUM; i++) {
vaDestroyBuffer(va_dpy, gl_surfaces[i].coded_buf);
return 0;
}
-int H264EncoderImpl::deinit_va()
+int QuickSyncEncoderImpl::deinit_va()
{
vaTerminate(va_dpy);
} // namespace
-H264EncoderImpl::H264EncoderImpl(QSurface *surface, const string &va_display, int width, int height, HTTPD *httpd)
+QuickSyncEncoderImpl::QuickSyncEncoderImpl(QSurface *surface, const string &va_display, int width, int height, HTTPD *httpd)
: current_storage_frame(0), surface(surface), httpd(httpd), frame_width(width), frame_height(height)
{
init_audio_encoder(AUDIO_OUTPUT_CODEC_NAME, DEFAULT_AUDIO_OUTPUT_BIT_RATE, &context_audio_file, &resampler_audio_file);
memset(&pic_param, 0, sizeof(pic_param));
memset(&slice_param, 0, sizeof(slice_param));
- storage_thread = thread(&H264EncoderImpl::storage_task_thread, this);
+ storage_thread = thread(&QuickSyncEncoderImpl::storage_task_thread, this);
encode_thread = thread([this]{
//SDL_GL_MakeCurrent(window, context);
});
}
-H264EncoderImpl::~H264EncoderImpl()
+QuickSyncEncoderImpl::~QuickSyncEncoderImpl()
{
shutdown();
av_frame_free(&audio_frame);
close_output_stream();
}
-bool H264EncoderImpl::begin_frame(GLuint *y_tex, GLuint *cbcr_tex)
+bool QuickSyncEncoderImpl::begin_frame(GLuint *y_tex, GLuint *cbcr_tex)
{
assert(!is_shutdown);
{
return true;
}
-void H264EncoderImpl::add_audio(int64_t pts, vector<float> audio)
+void QuickSyncEncoderImpl::add_audio(int64_t pts, vector<float> audio)
{
assert(!is_shutdown);
{
frame_queue_nonempty.notify_all();
}
-RefCountedGLsync H264EncoderImpl::end_frame(int64_t pts, int64_t duration, const vector<RefCountedFrame> &input_frames)
+RefCountedGLsync QuickSyncEncoderImpl::end_frame(int64_t pts, int64_t duration, const vector<RefCountedFrame> &input_frames)
{
assert(!is_shutdown);
return fence;
}
-void H264EncoderImpl::shutdown()
+void QuickSyncEncoderImpl::shutdown()
{
if (is_shutdown) {
return;
is_shutdown = true;
}
-void H264EncoderImpl::open_output_file(const std::string &filename)
+void QuickSyncEncoderImpl::open_output_file(const std::string &filename)
{
AVFormatContext *avctx = avformat_alloc_context();
avctx->oformat = av_guess_format(NULL, filename.c_str(), NULL);
file_mux.reset(new Mux(avctx, frame_width, frame_height, Mux::CODEC_H264, context_audio_file->codec, TIMEBASE, DEFAULT_AUDIO_OUTPUT_BIT_RATE, nullptr));
}
-void H264EncoderImpl::close_output_file()
+void QuickSyncEncoderImpl::close_output_file()
{
file_mux.reset();
}
-void H264EncoderImpl::open_output_stream()
+void QuickSyncEncoderImpl::open_output_stream()
{
AVFormatContext *avctx = avformat_alloc_context();
AVOutputFormat *oformat = av_guess_format(global_flags.stream_mux_name.c_str(), nullptr, nullptr);
}
uint8_t *buf = (uint8_t *)av_malloc(MUX_BUFFER_SIZE);
- avctx->pb = avio_alloc_context(buf, MUX_BUFFER_SIZE, 1, this, nullptr, &H264EncoderImpl::write_packet_thunk, nullptr);
+ avctx->pb = avio_alloc_context(buf, MUX_BUFFER_SIZE, 1, this, nullptr, &QuickSyncEncoderImpl::write_packet_thunk, nullptr);
Mux::Codec video_codec;
if (global_flags.uncompressed_video_to_http) {
stream_mux_header.clear();
}
-void H264EncoderImpl::close_output_stream()
+void QuickSyncEncoderImpl::close_output_stream()
{
stream_mux.reset();
}
-int H264EncoderImpl::write_packet_thunk(void *opaque, uint8_t *buf, int buf_size)
+int QuickSyncEncoderImpl::write_packet_thunk(void *opaque, uint8_t *buf, int buf_size)
{
- H264EncoderImpl *h264_encoder = (H264EncoderImpl *)opaque;
+ QuickSyncEncoderImpl *h264_encoder = (QuickSyncEncoderImpl *)opaque;
return h264_encoder->write_packet(buf, buf_size);
}
-int H264EncoderImpl::write_packet(uint8_t *buf, int buf_size)
+int QuickSyncEncoderImpl::write_packet(uint8_t *buf, int buf_size)
{
if (stream_mux_writing_header) {
stream_mux_header.append((char *)buf, buf_size);
return buf_size;
}
-void H264EncoderImpl::encode_thread_func()
+void QuickSyncEncoderImpl::encode_thread_func()
{
int64_t last_dts = -1;
int gop_start_display_frame_num = 0;
}
}
-void H264EncoderImpl::encode_remaining_frames_as_p(int encoding_frame_num, int gop_start_display_frame_num, int64_t last_dts)
+void QuickSyncEncoderImpl::encode_remaining_frames_as_p(int encoding_frame_num, int gop_start_display_frame_num, int64_t last_dts)
{
if (pending_video_frames.empty()) {
return;
}
}
-void H264EncoderImpl::encode_remaining_audio()
+void QuickSyncEncoderImpl::encode_remaining_audio()
{
// This really ought to be empty by now, but just to be sure...
for (auto &pending_frame : pending_audio_frames) {
}
}
-void H264EncoderImpl::add_packet_for_uncompressed_frame(int64_t pts, int64_t duration, const uint8_t *data)
+void QuickSyncEncoderImpl::add_packet_for_uncompressed_frame(int64_t pts, int64_t duration, const uint8_t *data)
{
AVPacket pkt;
memset(&pkt, 0, sizeof(pkt));
} // namespace
-void H264EncoderImpl::encode_frame(H264EncoderImpl::PendingFrame frame, int encoding_frame_num, int display_frame_num, int gop_start_display_frame_num,
+void QuickSyncEncoderImpl::encode_frame(QuickSyncEncoderImpl::PendingFrame frame, int encoding_frame_num, int display_frame_num, int gop_start_display_frame_num,
int frame_type, int64_t pts, int64_t dts, int64_t duration)
{
// Wait for the GPU to be done with the frame.
}
// Proxy object.
-H264Encoder::H264Encoder(QSurface *surface, const string &va_display, int width, int height, HTTPD *httpd)
- : impl(new H264EncoderImpl(surface, va_display, width, height, httpd)) {}
+QuickSyncEncoder::QuickSyncEncoder(QSurface *surface, const string &va_display, int width, int height, HTTPD *httpd)
+ : impl(new QuickSyncEncoderImpl(surface, va_display, width, height, httpd)) {}
// Must be defined here because unique_ptr<> destructor needs to know the impl.
-H264Encoder::~H264Encoder() {}
+QuickSyncEncoder::~QuickSyncEncoder() {}
-void H264Encoder::add_audio(int64_t pts, vector<float> audio)
+void QuickSyncEncoder::add_audio(int64_t pts, vector<float> audio)
{
impl->add_audio(pts, audio);
}
-bool H264Encoder::begin_frame(GLuint *y_tex, GLuint *cbcr_tex)
+bool QuickSyncEncoder::begin_frame(GLuint *y_tex, GLuint *cbcr_tex)
{
return impl->begin_frame(y_tex, cbcr_tex);
}
-RefCountedGLsync H264Encoder::end_frame(int64_t pts, int64_t duration, const vector<RefCountedFrame> &input_frames)
+RefCountedGLsync QuickSyncEncoder::end_frame(int64_t pts, int64_t duration, const vector<RefCountedFrame> &input_frames)
{
return impl->end_frame(pts, duration, input_frames);
}
-void H264Encoder::shutdown()
+void QuickSyncEncoder::shutdown()
{
impl->shutdown();
}
-void H264Encoder::open_output_file(const std::string &filename)
+void QuickSyncEncoder::open_output_file(const std::string &filename)
{
impl->open_output_file(filename);
}
-void H264Encoder::close_output_file()
+void QuickSyncEncoder::close_output_file()
{
impl->close_output_file();
}