#include "context.h"
#include "defs.h"
#include "flags.h"
-#include "httpd.h"
#include "mux.h"
#include "timebase.h"
#include "x264_encoder.h"
return storage;
}
-class QuickSyncEncoderImpl : public KeyFrameSignalReceiver {
+class QuickSyncEncoderImpl {
public:
- QuickSyncEncoderImpl(QSurface *surface, const string &va_display, int width, int height, HTTPD *httpd);
+ QuickSyncEncoderImpl(QSurface *surface, const string &va_display, int width, int height, Mux *stream_mux);
~QuickSyncEncoderImpl();
void add_audio(int64_t pts, vector<float> audio);
bool begin_frame(GLuint *y_tex, GLuint *cbcr_tex);
void open_output_file(const std::string &filename);
void close_output_file();
- virtual void signal_keyframe() override {
- stream_mux_writing_keyframes = true;
- }
-
private:
struct storage_task {
unsigned long long display_order;
int release_encode();
void update_ReferenceFrames(int frame_type);
int update_RefPicList(int frame_type);
- void open_output_stream();
- void close_output_stream();
- static int write_packet_thunk(void *opaque, uint8_t *buf, int buf_size);
- int write_packet(uint8_t *buf, int buf_size);
bool is_shutdown = false;
bool use_zerocopy;
vector<float> audio_queue_file;
vector<float> audio_queue_stream;
- unique_ptr<Mux> stream_mux; // To HTTP.
+ Mux* stream_mux; // To HTTP.
unique_ptr<Mux> file_mux; // To local disk.
- // While Mux object is constructing, <stream_mux_writing_header> is true,
- // and the header is being collected into stream_mux_header.
- bool stream_mux_writing_header;
- string stream_mux_header;
-
- bool stream_mux_writing_keyframes = false;
-
AVFrame *audio_frame = nullptr;
- HTTPD *httpd;
unique_ptr<FrameReorderer> reorderer;
unique_ptr<X264Encoder> x264_encoder; // nullptr if not using x264.
if (context_audio_stream) {
encode_audio(audio, &audio_queue_file, audio_pts, context_audio_file, resampler_audio_file, { file_mux.get() });
- encode_audio(audio, &audio_queue_stream, audio_pts, context_audio_stream, resampler_audio_stream, { stream_mux.get() });
+ encode_audio(audio, &audio_queue_stream, audio_pts, context_audio_stream, resampler_audio_stream, { stream_mux });
} else {
- encode_audio(audio, &audio_queue_file, audio_pts, context_audio_file, resampler_audio_file, { stream_mux.get(), file_mux.get() });
+ encode_audio(audio, &audio_queue_file, audio_pts, context_audio_file, resampler_audio_file, { stream_mux, file_mux.get() });
}
last_audio_pts = audio_pts + audio.size() * TIMEBASE / (OUTPUT_FREQUENCY * 2);
} // namespace
-QuickSyncEncoderImpl::QuickSyncEncoderImpl(QSurface *surface, const string &va_display, int width, int height, HTTPD *httpd)
- : current_storage_frame(0), surface(surface), httpd(httpd), frame_width(width), frame_height(height)
+QuickSyncEncoderImpl::QuickSyncEncoderImpl(QSurface *surface, const string &va_display, int width, int height, Mux *stream_mux)
+ : current_storage_frame(0), surface(surface), stream_mux(stream_mux), frame_width(width), frame_height(height)
{
init_audio_encoder(AUDIO_OUTPUT_CODEC_NAME, DEFAULT_AUDIO_OUTPUT_BIT_RATE, &context_audio_file, &resampler_audio_file);
frame_width_mbaligned = (frame_width + 15) & (~15);
frame_height_mbaligned = (frame_height + 15) & (~15);
- open_output_stream();
-
audio_frame = av_frame_alloc();
//print_input();
reorderer.reset(new FrameReorderer(ip_period - 1, frame_width, frame_height));
}
if (global_flags.x264_video_to_http) {
- x264_encoder.reset(new X264Encoder(stream_mux.get()));
+ x264_encoder.reset(new X264Encoder(stream_mux));
}
init_va(va_display);
avresample_free(&resampler_audio_stream);
avcodec_free_context(&context_audio_file);
avcodec_free_context(&context_audio_stream);
- close_output_stream();
}
bool QuickSyncEncoderImpl::begin_frame(GLuint *y_tex, GLuint *cbcr_tex)
file_mux.reset();
}
-void QuickSyncEncoderImpl::open_output_stream()
-{
- AVFormatContext *avctx = avformat_alloc_context();
- AVOutputFormat *oformat = av_guess_format(global_flags.stream_mux_name.c_str(), nullptr, nullptr);
- assert(oformat != nullptr);
- avctx->oformat = oformat;
-
- string codec_name;
- int bit_rate;
-
- if (global_flags.stream_audio_codec_name.empty()) {
- codec_name = AUDIO_OUTPUT_CODEC_NAME;
- bit_rate = DEFAULT_AUDIO_OUTPUT_BIT_RATE;
- } else {
- codec_name = global_flags.stream_audio_codec_name;
- bit_rate = global_flags.stream_audio_codec_bitrate;
- }
-
- uint8_t *buf = (uint8_t *)av_malloc(MUX_BUFFER_SIZE);
- avctx->pb = avio_alloc_context(buf, MUX_BUFFER_SIZE, 1, this, nullptr, &QuickSyncEncoderImpl::write_packet_thunk, nullptr);
-
- Mux::Codec video_codec;
- if (global_flags.uncompressed_video_to_http) {
- video_codec = Mux::CODEC_NV12;
- } else {
- video_codec = Mux::CODEC_H264;
- }
-
- avctx->flags = AVFMT_FLAG_CUSTOM_IO;
- AVCodec *codec_audio = avcodec_find_encoder_by_name(codec_name.c_str());
- if (codec_audio == nullptr) {
- fprintf(stderr, "ERROR: Could not find codec '%s'\n", codec_name.c_str());
- exit(1);
- }
-
- int time_base = global_flags.stream_coarse_timebase ? COARSE_TIMEBASE : TIMEBASE;
- stream_mux_writing_header = true;
- stream_mux.reset(new Mux(avctx, frame_width, frame_height, video_codec, codec_audio, time_base, bit_rate, this));
- stream_mux_writing_header = false;
- httpd->set_header(stream_mux_header);
- stream_mux_header.clear();
-}
-
-void QuickSyncEncoderImpl::close_output_stream()
-{
- stream_mux.reset();
-}
-
-int QuickSyncEncoderImpl::write_packet_thunk(void *opaque, uint8_t *buf, int buf_size)
-{
- QuickSyncEncoderImpl *h264_encoder = (QuickSyncEncoderImpl *)opaque;
- return h264_encoder->write_packet(buf, buf_size);
-}
-
-int QuickSyncEncoderImpl::write_packet(uint8_t *buf, int buf_size)
-{
- if (stream_mux_writing_header) {
- stream_mux_header.append((char *)buf, buf_size);
- } else {
- httpd->add_data((char *)buf, buf_size, stream_mux_writing_keyframes);
- stream_mux_writing_keyframes = false;
- }
- return buf_size;
-}
-
void QuickSyncEncoderImpl::encode_thread_func()
{
int64_t last_dts = -1;
if (context_audio_stream) {
encode_audio(audio, &audio_queue_file, audio_pts, context_audio_file, resampler_audio_file, { file_mux.get() });
- encode_audio(audio, &audio_queue_stream, audio_pts, context_audio_stream, resampler_audio_stream, { stream_mux.get() });
+ encode_audio(audio, &audio_queue_stream, audio_pts, context_audio_stream, resampler_audio_stream, { stream_mux });
} else {
- encode_audio(audio, &audio_queue_file, audio_pts, context_audio_file, resampler_audio_file, { stream_mux.get(), file_mux.get() });
+ encode_audio(audio, &audio_queue_file, audio_pts, context_audio_file, resampler_audio_file, { stream_mux, file_mux.get() });
}
last_audio_pts = audio_pts + audio.size() * TIMEBASE / (OUTPUT_FREQUENCY * 2);
}
// Encode any leftover audio in the queues, and also any delayed frames.
if (context_audio_stream) {
encode_last_audio(&audio_queue_file, last_audio_pts, context_audio_file, resampler_audio_file, { file_mux.get() });
- encode_last_audio(&audio_queue_stream, last_audio_pts, context_audio_stream, resampler_audio_stream, { stream_mux.get() });
+ encode_last_audio(&audio_queue_stream, last_audio_pts, context_audio_stream, resampler_audio_stream, { stream_mux });
} else {
- encode_last_audio(&audio_queue_file, last_audio_pts, context_audio_file, resampler_audio_file, { stream_mux.get(), file_mux.get() });
+ encode_last_audio(&audio_queue_file, last_audio_pts, context_audio_file, resampler_audio_file, { stream_mux, file_mux.get() });
}
}
}
// Proxy object.
-QuickSyncEncoder::QuickSyncEncoder(QSurface *surface, const string &va_display, int width, int height, HTTPD *httpd)
- : impl(new QuickSyncEncoderImpl(surface, va_display, width, height, httpd)) {}
+QuickSyncEncoder::QuickSyncEncoder(QSurface *surface, const string &va_display, int width, int height, Mux *stream_mux)
+ : impl(new QuickSyncEncoderImpl(surface, va_display, width, height, stream_mux)) {}
// Must be defined here because unique_ptr<> destructor needs to know the impl.
QuickSyncEncoder::~QuickSyncEncoder() {}
#include "video_encoder.h"
+#include <assert.h>
+
#include <string>
#include "defs.h"
+#include "flags.h"
+#include "httpd.h"
+#include "timebase.h"
#include "quicksync_encoder.h"
using namespace std;
VideoEncoder::VideoEncoder(QSurface *surface, const std::string &va_display, int width, int height, HTTPD *httpd)
: surface(surface), va_display(va_display), width(width), height(height), httpd(httpd)
{
- quicksync_encoder.reset(new QuickSyncEncoder(surface, va_display, width, height, httpd));
+ open_output_stream();
+
+ quicksync_encoder.reset(new QuickSyncEncoder(surface, va_display, width, height, stream_mux.get()));
quicksync_encoder->open_output_file(generate_local_dump_filename(/*frame=*/0).c_str());
}
VideoEncoder::~VideoEncoder()
{
quicksync_encoder.reset(nullptr);
+ close_output_stream();
}
void VideoEncoder::do_cut(int frame)
printf("Starting new recording: %s\n", filename.c_str());
quicksync_encoder->close_output_file();
quicksync_encoder->shutdown();
- quicksync_encoder.reset(new QuickSyncEncoder(surface, va_display, width, height, httpd));
+ quicksync_encoder.reset(new QuickSyncEncoder(surface, va_display, width, height, stream_mux.get()));
quicksync_encoder->open_output_file(filename.c_str());
}
{
return quicksync_encoder->end_frame(pts, duration, input_frames);
}
+
+void VideoEncoder::open_output_stream()
+{
+ AVFormatContext *avctx = avformat_alloc_context();
+ AVOutputFormat *oformat = av_guess_format(global_flags.stream_mux_name.c_str(), nullptr, nullptr);
+ assert(oformat != nullptr);
+ avctx->oformat = oformat;
+
+ string codec_name;
+ int bit_rate;
+
+ if (global_flags.stream_audio_codec_name.empty()) {
+ codec_name = AUDIO_OUTPUT_CODEC_NAME;
+ bit_rate = DEFAULT_AUDIO_OUTPUT_BIT_RATE;
+ } else {
+ codec_name = global_flags.stream_audio_codec_name;
+ bit_rate = global_flags.stream_audio_codec_bitrate;
+ }
+
+ uint8_t *buf = (uint8_t *)av_malloc(MUX_BUFFER_SIZE);
+ avctx->pb = avio_alloc_context(buf, MUX_BUFFER_SIZE, 1, this, nullptr, &VideoEncoder::write_packet_thunk, nullptr);
+
+ Mux::Codec video_codec;
+ if (global_flags.uncompressed_video_to_http) {
+ video_codec = Mux::CODEC_NV12;
+ } else {
+ video_codec = Mux::CODEC_H264;
+ }
+
+ avctx->flags = AVFMT_FLAG_CUSTOM_IO;
+ AVCodec *codec_audio = avcodec_find_encoder_by_name(codec_name.c_str());
+ if (codec_audio == nullptr) {
+ fprintf(stderr, "ERROR: Could not find codec '%s'\n", codec_name.c_str());
+ exit(1);
+ }
+
+ int time_base = global_flags.stream_coarse_timebase ? COARSE_TIMEBASE : TIMEBASE;
+ stream_mux_writing_header = true;
+ stream_mux.reset(new Mux(avctx, width, height, video_codec, codec_audio, time_base, bit_rate, this));
+ stream_mux_writing_header = false;
+ httpd->set_header(stream_mux_header);
+ stream_mux_header.clear();
+}
+
+void VideoEncoder::close_output_stream()
+{
+ stream_mux.reset();
+}
+
+int VideoEncoder::write_packet_thunk(void *opaque, uint8_t *buf, int buf_size)
+{
+ VideoEncoder *video_encoder = (VideoEncoder *)opaque;
+ return video_encoder->write_packet(buf, buf_size);
+}
+
+int VideoEncoder::write_packet(uint8_t *buf, int buf_size)
+{
+ if (stream_mux_writing_header) {
+ stream_mux_header.append((char *)buf, buf_size);
+ } else {
+ httpd->add_data((char *)buf, buf_size, stream_mux_writing_keyframes);
+ stream_mux_writing_keyframes = false;
+ }
+ return buf_size;
+}
+