]> git.sesse.net Git - nageru/blobdiff - h264encode.cpp
Fix a leak in the H.264 encoder.
[nageru] / h264encode.cpp
index 55ed9a18caa362264e836f161645e8cfad33c601..a16a5ea0b376dc4ed5678a48b7c4417ef433533e 100644 (file)
@@ -7,11 +7,11 @@
 #include <assert.h>
 #include <epoxy/egl.h>
 #include <libavcodec/avcodec.h>
-#include <libavformat/avio.h>
-#include <libavutil/mathematics.h>
+#include <libavutil/channel_layout.h>
+#include <libavutil/frame.h>
 #include <libavutil/rational.h>
+#include <libavutil/samplefmt.h>
 #include <libdrm/drm_fourcc.h>
-#include <stdint.h>
 #include <stdio.h>
 #include <stdlib.h>
 #include <string.h>
 #include <va/va_enc_h264.h>
 #include <va/va_x11.h>
 #include <condition_variable>
+#include <cstdint>
+#include <memory>
 #include <mutex>
 #include <queue>
 #include <string>
 #include <thread>
+#include <utility>
 
 #include "context.h"
+#include "defs.h"
 #include "httpd.h"
 #include "timebase.h"
 
@@ -110,7 +114,7 @@ static  int frame_width = 176;
 static  int frame_height = 144;
 static  int frame_width_mbaligned;
 static  int frame_height_mbaligned;
-static  int frame_rate = 60;
+static  int frame_rate = FPS;
 static  unsigned int frame_bitrate = 0;
 static  unsigned int frame_slices = 1;
 static  double frame_size = 0;
@@ -118,7 +122,7 @@ static  int initial_qp = 15;
 //static  int initial_qp = 28;
 static  int minimal_qp = 0;
 static  int intra_period = 30;
-static  int intra_idr_period = 60;
+static  int intra_idr_period = FPS;
 static  int ip_period = 3;
 static  int rc_mode = -1;
 static  int rc_default_modes[] = {
@@ -340,7 +344,17 @@ static void sps_rbsp(bitstream *bs)
         bitstream_put_ui(bs, 1, 1); /* vui_parameters_present_flag */
         bitstream_put_ui(bs, 0, 1); /* aspect_ratio_info_present_flag */
         bitstream_put_ui(bs, 0, 1); /* overscan_info_present_flag */
-        bitstream_put_ui(bs, 0, 1); /* video_signal_type_present_flag */
+        bitstream_put_ui(bs, 1, 1); /* video_signal_type_present_flag */
+        {
+            bitstream_put_ui(bs, 5, 3);  /* video_format (5 = Unspecified) */
+            bitstream_put_ui(bs, 0, 1);  /* video_full_range_flag */
+            bitstream_put_ui(bs, 1, 1);  /* colour_description_present_flag */
+            {
+                bitstream_put_ui(bs, 1, 8);  /* colour_primaries (1 = BT.709) */
+                bitstream_put_ui(bs, 2, 8);  /* transfer_characteristics (2 = unspecified, since we use sRGB) */
+                bitstream_put_ui(bs, 6, 8);  /* matrix_coefficients (6 = BT.601/SMPTE 170M) */
+            }
+        }
         bitstream_put_ui(bs, 0, 1); /* chroma_loc_info_present_flag */
         bitstream_put_ui(bs, 1, 1); /* timing_info_present_flag */
         {
@@ -1455,6 +1469,12 @@ static int render_picture(void)
     va_status = vaRenderPicture(va_dpy, context_id, &pic_param_buf, 1);
     CHECK_VASTATUS(va_status, "vaRenderPicture");
 
+    // Supposedly vaRenderPicture() is supposed to destroy the buffer implicitly,
+    // but if we don't delete it here, we get leaks. The GStreamer implementation
+    // does the same.
+    va_status = vaDestroyBuffer(va_dpy, pic_param_buf);
+    CHECK_VASTATUS(va_status, "vaDestroyBuffer");
+
     return 0;
 }
 
@@ -1621,11 +1641,17 @@ static int render_slice(void)
 
     va_status = vaCreateBuffer(va_dpy, context_id, VAEncSliceParameterBufferType,
                                sizeof(slice_param), 1, &slice_param, &slice_param_buf);
-    CHECK_VASTATUS(va_status, "vaCreateBuffer");;
+    CHECK_VASTATUS(va_status, "vaCreateBuffer");
 
     va_status = vaRenderPicture(va_dpy, context_id, &slice_param_buf, 1);
     CHECK_VASTATUS(va_status, "vaRenderPicture");
-    
+
+    // Supposedly vaRenderPicture() is supposed to destroy the buffer implicitly,
+    // but if we don't delete it here, we get leaks. The GStreamer implementation
+    // does the same.
+    va_status = vaDestroyBuffer(va_dpy, slice_param_buf);
+    CHECK_VASTATUS(va_status, "vaDestroyBuffer");
+
     return 0;
 }
 
@@ -1684,14 +1710,19 @@ int H264Encoder::save_codeddata(storage_task task)
 
         AVFrame *frame = avcodec_alloc_frame();
         frame->nb_samples = audio.size() / 2;
-        frame->format = AV_SAMPLE_FMT_FLT;
+        frame->format = AV_SAMPLE_FMT_S32;
         frame->channel_layout = AV_CH_LAYOUT_STEREO;
 
-        unique_ptr<float[]> planar_samples(new float[audio.size()]);
-        avcodec_fill_audio_frame(frame, 2, AV_SAMPLE_FMT_FLTP, (const uint8_t*)planar_samples.get(), audio.size() * sizeof(float), 0);
-        for (int i = 0; i < frame->nb_samples; ++i) {
-            planar_samples[i] = audio[i * 2 + 0];
-            planar_samples[i + frame->nb_samples] = audio[i * 2 + 1];
+        unique_ptr<int32_t[]> int_samples(new int32_t[audio.size()]);
+        avcodec_fill_audio_frame(frame, 2, AV_SAMPLE_FMT_S32, (const uint8_t*)int_samples.get(), audio.size() * sizeof(int32_t), 0);
+        for (int i = 0; i < frame->nb_samples * 2; ++i) {
+            if (audio[i] >= 1.0f) {
+                int_samples[i] = 2147483647;
+            } else if (audio[i] <= -1.0f) {
+                int_samples[i] = -2147483647;
+            } else {
+                int_samples[i] = lrintf(audio[i] * 2147483647.0f);
+            }
         }
 
         AVPacket pkt;
@@ -1706,6 +1737,7 @@ int H264Encoder::save_codeddata(storage_task task)
         }
         // TODO: Delayed frames.
         avcodec_free_frame(&frame);
+        av_free_packet(&pkt);
     }
 
 #if 0
@@ -1819,11 +1851,11 @@ static int print_input()
 H264Encoder::H264Encoder(QSurface *surface, int width, int height, HTTPD *httpd)
        : current_storage_frame(0), surface(surface), httpd(httpd)
 {
-       AVCodec *codec_audio = avcodec_find_encoder(AV_CODEC_ID_MP3);
+       AVCodec *codec_audio = avcodec_find_encoder(AUDIO_OUTPUT_CODEC);
        context_audio = avcodec_alloc_context3(codec_audio);
-       context_audio->bit_rate = 256000;
-       context_audio->sample_rate = 48000;
-       context_audio->sample_fmt = AV_SAMPLE_FMT_FLTP;
+       context_audio->bit_rate = AUDIO_OUTPUT_BIT_RATE;
+       context_audio->sample_rate = OUTPUT_FREQUENCY;
+       context_audio->sample_fmt = AUDIO_OUTPUT_SAMPLE_FMT;
        context_audio->channels = 2;
        context_audio->channel_layout = AV_CH_LAYOUT_STEREO;
        context_audio->time_base = AVRational{1, TIMEBASE};
@@ -1855,7 +1887,7 @@ H264Encoder::H264Encoder(QSurface *surface, int width, int height, HTTPD *httpd)
 
        copy_thread = std::thread([this]{
                //SDL_GL_MakeCurrent(window, context);
-               QOpenGLContext *context = create_context();
+               QOpenGLContext *context = create_context(this->surface);
                eglBindAPI(EGL_OPENGL_API);
                if (!make_current(context, this->surface)) {
                        printf("display=%p surface=%p context=%p curr=%p err=%d\n", eglGetCurrentDisplay(), this->surface, context, eglGetCurrentContext(),