X-Git-Url: https://git.sesse.net/?a=blobdiff_plain;f=quicksync_encoder.cpp;h=001901873b7fd641a43183f0c7a2b15b45e46ed8;hb=ab03e5e6f24b1651b4ca7df95e20aa5786939209;hp=b2316dab550235b8ac9c6f5a2ec944b94d9c1823;hpb=8e3e40723b72bbf0c233167cee60b4959d6b08e7;p=nageru diff --git a/quicksync_encoder.cpp b/quicksync_encoder.cpp index b2316da..0019018 100644 --- a/quicksync_encoder.cpp +++ b/quicksync_encoder.cpp @@ -1,6 +1,7 @@ //#include "sysdeps.h" #include "quicksync_encoder.h" +#include #include #include #include @@ -193,14 +194,17 @@ FrameReorderer::Frame FrameReorderer::get_first_frame() class QuickSyncEncoderImpl { public: - QuickSyncEncoderImpl(QSurface *surface, const string &va_display, int width, int height, Mux *stream_mux); + QuickSyncEncoderImpl(const std::string &filename, movit::ResourcePool *resource_pool, QSurface *surface, const string &va_display, int width, int height, AVOutputFormat *oformat, AudioEncoder *stream_audio_encoder, X264Encoder *x264_encoder); ~QuickSyncEncoderImpl(); void add_audio(int64_t pts, vector audio); bool begin_frame(GLuint *y_tex, GLuint *cbcr_tex); RefCountedGLsync end_frame(int64_t pts, int64_t duration, const vector &input_frames); void shutdown(); - void open_output_file(const std::string &filename); - void close_output_file(); + void release_gl_resources(); + void set_stream_mux(Mux *mux) + { + stream_mux = mux; + } private: struct storage_task { @@ -220,6 +224,7 @@ private: return int64_t(ip_period - 1) * (TIMEBASE / MAX_FPS); } + void open_output_file(const std::string &filename); void encode_thread_func(); void encode_remaining_frames_as_p(int encoding_frame_num, int gop_start_display_frame_num, int64_t last_dts); void add_packet_for_uncompressed_frame(int64_t pts, int64_t duration, const uint8_t *data); @@ -247,11 +252,12 @@ private: VADisplay va_open_display(const string &va_display); void va_close_display(VADisplay va_dpy); int setup_encode(); - int release_encode(); + void release_encode(); void update_ReferenceFrames(int frame_type); int update_RefPicList(int frame_type); bool is_shutdown = false; + bool has_released_gl_resources = false; bool use_zerocopy; int drm_fd = -1; @@ -271,16 +277,17 @@ private: map pending_video_frames; // under frame_queue_mutex map> pending_audio_frames; // under frame_queue_mutex + movit::ResourcePool *resource_pool; QSurface *surface; unique_ptr file_audio_encoder; - unique_ptr stream_audio_encoder; - - Mux* stream_mux; // To HTTP. - unique_ptr file_mux; // To local disk. + AudioEncoder *stream_audio_encoder; unique_ptr reorderer; - unique_ptr x264_encoder; // nullptr if not using x264. + X264Encoder *x264_encoder; // nullptr if not using x264. + + Mux* stream_mux = nullptr; // To HTTP. + unique_ptr file_mux; // To local disk. Display *x11_display = nullptr; @@ -1150,8 +1157,8 @@ int QuickSyncEncoderImpl::setup_encode() VAStatus va_status; VASurfaceID *tmp_surfaceid; int codedbuf_size, i; - static VASurfaceID src_surface[SURFACE_NUM]; - static VASurfaceID ref_surface[SURFACE_NUM]; + VASurfaceID src_surface[SURFACE_NUM]; + VASurfaceID ref_surface[SURFACE_NUM]; va_status = vaCreateConfig(va_dpy, h264_profile, VAEntrypointEncSlice, &config_attrib[0], config_attrib_num, &config_id); @@ -1202,17 +1209,12 @@ int QuickSyncEncoderImpl::setup_encode() //glGenFramebuffers(SURFACE_NUM, fbos); for (i = 0; i < SURFACE_NUM; i++) { - glGenTextures(1, &gl_surfaces[i].y_tex); - glGenTextures(1, &gl_surfaces[i].cbcr_tex); - - if (!use_zerocopy) { - // Create Y image. - glBindTexture(GL_TEXTURE_2D, gl_surfaces[i].y_tex); - glTexStorage2D(GL_TEXTURE_2D, 1, GL_R8, frame_width, frame_height); - - // Create CbCr image. - glBindTexture(GL_TEXTURE_2D, gl_surfaces[i].cbcr_tex); - glTexStorage2D(GL_TEXTURE_2D, 1, GL_RG8, frame_width / 2, frame_height / 2); + if (use_zerocopy) { + gl_surfaces[i].y_tex = resource_pool->create_2d_texture(GL_R8, 1, 1); + gl_surfaces[i].cbcr_tex = resource_pool->create_2d_texture(GL_RG8, 1, 1); + } else { + gl_surfaces[i].y_tex = resource_pool->create_2d_texture(GL_R8, frame_width, frame_height); + gl_surfaces[i].cbcr_tex = resource_pool->create_2d_texture(GL_RG8, frame_width / 2, frame_height / 2); // Generate a PBO to read into. It doesn't necessarily fit 1:1 with the VA-API // buffers, due to potentially differing pitch. @@ -1652,9 +1654,7 @@ void QuickSyncEncoderImpl::save_codeddata(storage_task task) } file_audio_encoder->encode_audio(audio, audio_pts + global_delay()); - if (stream_audio_encoder) { - stream_audio_encoder->encode_audio(audio, audio_pts + global_delay()); - } + stream_audio_encoder->encode_audio(audio, audio_pts + global_delay()); if (audio_pts == task.pts) break; } @@ -1697,27 +1697,37 @@ void QuickSyncEncoderImpl::storage_task_thread() } } -int QuickSyncEncoderImpl::release_encode() +void QuickSyncEncoderImpl::release_encode() { for (unsigned i = 0; i < SURFACE_NUM; i++) { vaDestroyBuffer(va_dpy, gl_surfaces[i].coded_buf); vaDestroySurfaces(va_dpy, &gl_surfaces[i].src_surface, 1); vaDestroySurfaces(va_dpy, &gl_surfaces[i].ref_surface, 1); + } + + vaDestroyContext(va_dpy, context_id); + vaDestroyConfig(va_dpy, config_id); +} + +void QuickSyncEncoderImpl::release_gl_resources() +{ + assert(is_shutdown); + if (has_released_gl_resources) { + return; + } + for (unsigned i = 0; i < SURFACE_NUM; i++) { if (!use_zerocopy) { glBindBuffer(GL_PIXEL_PACK_BUFFER, gl_surfaces[i].pbo); glUnmapBuffer(GL_PIXEL_PACK_BUFFER); glBindBuffer(GL_PIXEL_PACK_BUFFER, 0); glDeleteBuffers(1, &gl_surfaces[i].pbo); } - glDeleteTextures(1, &gl_surfaces[i].y_tex); - glDeleteTextures(1, &gl_surfaces[i].cbcr_tex); + resource_pool->release_2d_texture(gl_surfaces[i].y_tex); + resource_pool->release_2d_texture(gl_surfaces[i].cbcr_tex); } - vaDestroyContext(va_dpy, context_id); - vaDestroyConfig(va_dpy, config_id); - - return 0; + has_released_gl_resources = true; } int QuickSyncEncoderImpl::deinit_va() @@ -1733,15 +1743,12 @@ namespace { } // namespace -QuickSyncEncoderImpl::QuickSyncEncoderImpl(QSurface *surface, const string &va_display, int width, int height, Mux *stream_mux) - : current_storage_frame(0), surface(surface), stream_mux(stream_mux), frame_width(width), frame_height(height) +QuickSyncEncoderImpl::QuickSyncEncoderImpl(const std::string &filename, movit::ResourcePool *resource_pool, QSurface *surface, const string &va_display, int width, int height, AVOutputFormat *oformat, AudioEncoder *stream_audio_encoder, X264Encoder *x264_encoder) + : current_storage_frame(0), resource_pool(resource_pool), surface(surface), stream_audio_encoder(stream_audio_encoder), x264_encoder(x264_encoder), frame_width(width), frame_height(height) { - if (global_flags.stream_audio_codec_name.empty()) { - file_audio_encoder.reset(new AudioEncoder(AUDIO_OUTPUT_CODEC_NAME, DEFAULT_AUDIO_OUTPUT_BIT_RATE, { file_mux.get(), stream_mux })); - } else { - file_audio_encoder.reset(new AudioEncoder(AUDIO_OUTPUT_CODEC_NAME, DEFAULT_AUDIO_OUTPUT_BIT_RATE, { file_mux.get() })); - stream_audio_encoder.reset(new AudioEncoder(global_flags.stream_audio_codec_name, global_flags.stream_audio_codec_bitrate, { stream_mux })); - } + file_audio_encoder.reset(new AudioEncoder(AUDIO_OUTPUT_CODEC_NAME, DEFAULT_AUDIO_OUTPUT_BIT_RATE, oformat)); + open_output_file(filename); + file_audio_encoder->add_mux(file_mux.get()); frame_width_mbaligned = (frame_width + 15) & (~15); frame_height_mbaligned = (frame_height + 15) & (~15); @@ -1753,7 +1760,9 @@ QuickSyncEncoderImpl::QuickSyncEncoderImpl(QSurface *surface, const string &va_d reorderer.reset(new FrameReorderer(ip_period - 1, frame_width, frame_height)); } if (global_flags.x264_video_to_http) { - x264_encoder.reset(new X264Encoder(stream_mux)); + assert(x264_encoder != nullptr); + } else { + assert(x264_encoder == nullptr); } init_va(va_display); @@ -1778,12 +1787,14 @@ QuickSyncEncoderImpl::QuickSyncEncoderImpl(QSurface *surface, const string &va_d exit(1); } encode_thread_func(); + delete_context(context); }); } QuickSyncEncoderImpl::~QuickSyncEncoderImpl() { shutdown(); + release_gl_resources(); } bool QuickSyncEncoderImpl::begin_frame(GLuint *y_tex, GLuint *cbcr_tex) @@ -1925,7 +1936,6 @@ void QuickSyncEncoderImpl::shutdown() frame_queue_nonempty.notify_all(); } encode_thread.join(); - x264_encoder.reset(); { unique_lock lock(storage_task_queue_mutex); storage_thread_should_quit = true; @@ -1937,6 +1947,7 @@ void QuickSyncEncoderImpl::shutdown() release_encode(); deinit_va(); + file_mux.reset(); is_shutdown = true; } @@ -1955,12 +1966,8 @@ void QuickSyncEncoderImpl::open_output_file(const std::string &filename) exit(1); } - file_mux.reset(new Mux(avctx, frame_width, frame_height, Mux::CODEC_H264, file_audio_encoder->get_codec(), TIMEBASE, DEFAULT_AUDIO_OUTPUT_BIT_RATE, nullptr)); -} - -void QuickSyncEncoderImpl::close_output_file() -{ - file_mux.reset(); + string video_extradata = ""; // FIXME: See other comment about global headers. + file_mux.reset(new Mux(avctx, frame_width, frame_height, Mux::CODEC_H264, video_extradata, file_audio_encoder->get_ctx(), TIMEBASE, nullptr)); } void QuickSyncEncoderImpl::encode_thread_func() @@ -2058,10 +2065,8 @@ void QuickSyncEncoderImpl::encode_remaining_audio() pending_audio_frames.clear(); // Encode any leftover audio in the queues, and also any delayed frames. + // Note: stream_audio_encoder is not owned by us, so don't call encode_last_audio(). file_audio_encoder->encode_last_audio(); - if (stream_audio_encoder) { - stream_audio_encoder->encode_last_audio(); - } } void QuickSyncEncoderImpl::add_packet_for_uncompressed_frame(int64_t pts, int64_t duration, const uint8_t *data) @@ -2154,6 +2159,9 @@ void QuickSyncEncoderImpl::encode_frame(QuickSyncEncoderImpl::PendingFrame frame CHECK_VASTATUS(va_status, "vaBeginPicture"); if (frame_type == FRAME_IDR) { + // FIXME: If the mux wants global headers, we should not put the + // SPS/PPS before each IDR frame, but rather put it into the + // codec extradata (formatted differently?). render_sequence(); render_picture(frame_type, display_frame_num, gop_start_display_frame_num); if (h264_packedheader) { @@ -2183,8 +2191,8 @@ void QuickSyncEncoderImpl::encode_frame(QuickSyncEncoderImpl::PendingFrame frame } // Proxy object. -QuickSyncEncoder::QuickSyncEncoder(QSurface *surface, const string &va_display, int width, int height, Mux *stream_mux) - : impl(new QuickSyncEncoderImpl(surface, va_display, width, height, stream_mux)) {} +QuickSyncEncoder::QuickSyncEncoder(const std::string &filename, movit::ResourcePool *resource_pool, QSurface *surface, const string &va_display, int width, int height, AVOutputFormat *oformat, AudioEncoder *stream_audio_encoder, X264Encoder *x264_encoder) + : impl(new QuickSyncEncoderImpl(filename, resource_pool, surface, va_display, width, height, oformat, stream_audio_encoder, x264_encoder)) {} // Must be defined here because unique_ptr<> destructor needs to know the impl. QuickSyncEncoder::~QuickSyncEncoder() {} @@ -2209,12 +2217,8 @@ void QuickSyncEncoder::shutdown() impl->shutdown(); } -void QuickSyncEncoder::open_output_file(const std::string &filename) +void QuickSyncEncoder::set_stream_mux(Mux *mux) { - impl->open_output_file(filename); + impl->set_stream_mux(mux); } -void QuickSyncEncoder::close_output_file() -{ - impl->close_output_file(); -}