//#include "sysdeps.h"
#include "quicksync_encoder.h"
+#include <movit/resource_pool.h>
#include <movit/util.h>
#include <EGL/eglplatform.h>
#include <X11/X.h>
class QuickSyncEncoderImpl {
public:
- QuickSyncEncoderImpl(const std::string &filename, QSurface *surface, const string &va_display, int width, int height, Mux *stream_mux);
+ QuickSyncEncoderImpl(const std::string &filename, movit::ResourcePool *resource_pool, QSurface *surface, const string &va_display, int width, int height, AVOutputFormat *oformat, AudioEncoder *stream_audio_encoder, X264Encoder *x264_encoder);
~QuickSyncEncoderImpl();
void add_audio(int64_t pts, vector<float> audio);
bool begin_frame(GLuint *y_tex, GLuint *cbcr_tex);
RefCountedGLsync end_frame(int64_t pts, int64_t duration, const vector<RefCountedFrame> &input_frames);
void shutdown();
+ void release_gl_resources();
+ void set_stream_mux(Mux *mux)
+ {
+ stream_mux = mux;
+ }
private:
struct storage_task {
VADisplay va_open_display(const string &va_display);
void va_close_display(VADisplay va_dpy);
int setup_encode();
- int release_encode();
+ void release_encode();
void update_ReferenceFrames(int frame_type);
int update_RefPicList(int frame_type);
bool is_shutdown = false;
+ bool has_released_gl_resources = false;
bool use_zerocopy;
int drm_fd = -1;
map<int, PendingFrame> pending_video_frames; // under frame_queue_mutex
map<int64_t, vector<float>> pending_audio_frames; // under frame_queue_mutex
+ movit::ResourcePool *resource_pool;
QSurface *surface;
unique_ptr<AudioEncoder> file_audio_encoder;
- unique_ptr<AudioEncoder> stream_audio_encoder;
-
- Mux* stream_mux; // To HTTP.
- unique_ptr<Mux> file_mux; // To local disk.
+ AudioEncoder *stream_audio_encoder;
unique_ptr<FrameReorderer> reorderer;
- unique_ptr<X264Encoder> x264_encoder; // nullptr if not using x264.
+ X264Encoder *x264_encoder; // nullptr if not using x264.
+
+ Mux* stream_mux = nullptr; // To HTTP.
+ unique_ptr<Mux> file_mux; // To local disk.
Display *x11_display = nullptr;
VAStatus va_status;
VASurfaceID *tmp_surfaceid;
int codedbuf_size, i;
- static VASurfaceID src_surface[SURFACE_NUM];
- static VASurfaceID ref_surface[SURFACE_NUM];
+ VASurfaceID src_surface[SURFACE_NUM];
+ VASurfaceID ref_surface[SURFACE_NUM];
va_status = vaCreateConfig(va_dpy, h264_profile, VAEntrypointEncSlice,
&config_attrib[0], config_attrib_num, &config_id);
//glGenFramebuffers(SURFACE_NUM, fbos);
for (i = 0; i < SURFACE_NUM; i++) {
- glGenTextures(1, &gl_surfaces[i].y_tex);
- glGenTextures(1, &gl_surfaces[i].cbcr_tex);
-
- if (!use_zerocopy) {
- // Create Y image.
- glBindTexture(GL_TEXTURE_2D, gl_surfaces[i].y_tex);
- glTexStorage2D(GL_TEXTURE_2D, 1, GL_R8, frame_width, frame_height);
-
- // Create CbCr image.
- glBindTexture(GL_TEXTURE_2D, gl_surfaces[i].cbcr_tex);
- glTexStorage2D(GL_TEXTURE_2D, 1, GL_RG8, frame_width / 2, frame_height / 2);
+ if (use_zerocopy) {
+ gl_surfaces[i].y_tex = resource_pool->create_2d_texture(GL_R8, 1, 1);
+ gl_surfaces[i].cbcr_tex = resource_pool->create_2d_texture(GL_RG8, 1, 1);
+ } else {
+ gl_surfaces[i].y_tex = resource_pool->create_2d_texture(GL_R8, frame_width, frame_height);
+ gl_surfaces[i].cbcr_tex = resource_pool->create_2d_texture(GL_RG8, frame_width / 2, frame_height / 2);
// Generate a PBO to read into. It doesn't necessarily fit 1:1 with the VA-API
// buffers, due to potentially differing pitch.
}
file_audio_encoder->encode_audio(audio, audio_pts + global_delay());
- if (stream_audio_encoder) {
- stream_audio_encoder->encode_audio(audio, audio_pts + global_delay());
- }
+ stream_audio_encoder->encode_audio(audio, audio_pts + global_delay());
if (audio_pts == task.pts) break;
}
}
}
-int QuickSyncEncoderImpl::release_encode()
+void QuickSyncEncoderImpl::release_encode()
{
for (unsigned i = 0; i < SURFACE_NUM; i++) {
vaDestroyBuffer(va_dpy, gl_surfaces[i].coded_buf);
vaDestroySurfaces(va_dpy, &gl_surfaces[i].src_surface, 1);
vaDestroySurfaces(va_dpy, &gl_surfaces[i].ref_surface, 1);
+ }
+
+ vaDestroyContext(va_dpy, context_id);
+ vaDestroyConfig(va_dpy, config_id);
+}
+void QuickSyncEncoderImpl::release_gl_resources()
+{
+ assert(is_shutdown);
+ if (has_released_gl_resources) {
+ return;
+ }
+
+ for (unsigned i = 0; i < SURFACE_NUM; i++) {
if (!use_zerocopy) {
glBindBuffer(GL_PIXEL_PACK_BUFFER, gl_surfaces[i].pbo);
glUnmapBuffer(GL_PIXEL_PACK_BUFFER);
glBindBuffer(GL_PIXEL_PACK_BUFFER, 0);
glDeleteBuffers(1, &gl_surfaces[i].pbo);
}
- glDeleteTextures(1, &gl_surfaces[i].y_tex);
- glDeleteTextures(1, &gl_surfaces[i].cbcr_tex);
+ resource_pool->release_2d_texture(gl_surfaces[i].y_tex);
+ resource_pool->release_2d_texture(gl_surfaces[i].cbcr_tex);
}
- vaDestroyContext(va_dpy, context_id);
- vaDestroyConfig(va_dpy, config_id);
-
- return 0;
+ has_released_gl_resources = true;
}
int QuickSyncEncoderImpl::deinit_va()
} // namespace
-QuickSyncEncoderImpl::QuickSyncEncoderImpl(const std::string &filename, QSurface *surface, const string &va_display, int width, int height, Mux *stream_mux)
- : current_storage_frame(0), surface(surface), stream_mux(stream_mux), frame_width(width), frame_height(height)
+QuickSyncEncoderImpl::QuickSyncEncoderImpl(const std::string &filename, movit::ResourcePool *resource_pool, QSurface *surface, const string &va_display, int width, int height, AVOutputFormat *oformat, AudioEncoder *stream_audio_encoder, X264Encoder *x264_encoder)
+ : current_storage_frame(0), resource_pool(resource_pool), surface(surface), stream_audio_encoder(stream_audio_encoder), x264_encoder(x264_encoder), frame_width(width), frame_height(height)
{
- file_audio_encoder.reset(new AudioEncoder(AUDIO_OUTPUT_CODEC_NAME, DEFAULT_AUDIO_OUTPUT_BIT_RATE));
+ file_audio_encoder.reset(new AudioEncoder(AUDIO_OUTPUT_CODEC_NAME, DEFAULT_AUDIO_OUTPUT_BIT_RATE, oformat));
open_output_file(filename);
file_audio_encoder->add_mux(file_mux.get());
- if (global_flags.stream_audio_codec_name.empty()) {
- file_audio_encoder->add_mux(stream_mux);
- } else {
- stream_audio_encoder.reset(new AudioEncoder(global_flags.stream_audio_codec_name, global_flags.stream_audio_codec_bitrate));
- stream_audio_encoder->add_mux(stream_mux);
- }
frame_width_mbaligned = (frame_width + 15) & (~15);
frame_height_mbaligned = (frame_height + 15) & (~15);
reorderer.reset(new FrameReorderer(ip_period - 1, frame_width, frame_height));
}
if (global_flags.x264_video_to_http) {
- x264_encoder.reset(new X264Encoder(stream_mux));
+ assert(x264_encoder != nullptr);
+ } else {
+ assert(x264_encoder == nullptr);
}
init_va(va_display);
exit(1);
}
encode_thread_func();
+ delete_context(context);
});
}
QuickSyncEncoderImpl::~QuickSyncEncoderImpl()
{
shutdown();
+ release_gl_resources();
}
bool QuickSyncEncoderImpl::begin_frame(GLuint *y_tex, GLuint *cbcr_tex)
frame_queue_nonempty.notify_all();
}
encode_thread.join();
- x264_encoder.reset();
{
unique_lock<mutex> lock(storage_task_queue_mutex);
storage_thread_should_quit = true;
exit(1);
}
- file_mux.reset(new Mux(avctx, frame_width, frame_height, Mux::CODEC_H264, file_audio_encoder->get_codec(), TIMEBASE, DEFAULT_AUDIO_OUTPUT_BIT_RATE, nullptr));
+ string video_extradata = ""; // FIXME: See other comment about global headers.
+ file_mux.reset(new Mux(avctx, frame_width, frame_height, Mux::CODEC_H264, video_extradata, file_audio_encoder->get_ctx(), TIMEBASE, nullptr));
}
void QuickSyncEncoderImpl::encode_thread_func()
pending_audio_frames.clear();
// Encode any leftover audio in the queues, and also any delayed frames.
+ // Note: stream_audio_encoder is not owned by us, so don't call encode_last_audio().
file_audio_encoder->encode_last_audio();
- if (stream_audio_encoder) {
- stream_audio_encoder->encode_last_audio();
- }
}
void QuickSyncEncoderImpl::add_packet_for_uncompressed_frame(int64_t pts, int64_t duration, const uint8_t *data)
CHECK_VASTATUS(va_status, "vaBeginPicture");
if (frame_type == FRAME_IDR) {
+ // FIXME: If the mux wants global headers, we should not put the
+ // SPS/PPS before each IDR frame, but rather put it into the
+ // codec extradata (formatted differently?).
render_sequence();
render_picture(frame_type, display_frame_num, gop_start_display_frame_num);
if (h264_packedheader) {
}
// Proxy object.
-QuickSyncEncoder::QuickSyncEncoder(const std::string &filename, QSurface *surface, const string &va_display, int width, int height, Mux *stream_mux)
- : impl(new QuickSyncEncoderImpl(filename, surface, va_display, width, height, stream_mux)) {}
+QuickSyncEncoder::QuickSyncEncoder(const std::string &filename, movit::ResourcePool *resource_pool, QSurface *surface, const string &va_display, int width, int height, AVOutputFormat *oformat, AudioEncoder *stream_audio_encoder, X264Encoder *x264_encoder)
+ : impl(new QuickSyncEncoderImpl(filename, resource_pool, surface, va_display, width, height, oformat, stream_audio_encoder, x264_encoder)) {}
// Must be defined here because unique_ptr<> destructor needs to know the impl.
QuickSyncEncoder::~QuickSyncEncoder() {}
{
impl->shutdown();
}
+
+void QuickSyncEncoder::set_stream_mux(Mux *mux)
+{
+ impl->set_stream_mux(mux);
+}
+