]> git.sesse.net Git - nageru/blob - video_encoder.cpp
Let settings follow buses when editing the mapping.
[nageru] / video_encoder.cpp
1 #include "video_encoder.h"
2
3 #include <assert.h>
4
5 #include <string>
6
7 #include "defs.h"
8 #include "flags.h"
9 #include "httpd.h"
10 #include "timebase.h"
11 #include "quicksync_encoder.h"
12 #include "x264_encoder.h"
13
14 using namespace std;
15 using namespace movit;
16
17 namespace {
18
19 string generate_local_dump_filename(int frame)
20 {
21         time_t now = time(NULL);
22         tm now_tm;
23         localtime_r(&now, &now_tm);
24
25         char timestamp[256];
26         strftime(timestamp, sizeof(timestamp), "%F-%T%z", &now_tm);
27
28         // Use the frame number to disambiguate between two cuts starting
29         // on the same second.
30         char filename[256];
31         snprintf(filename, sizeof(filename), "%s%s-f%02d%s",
32                 LOCAL_DUMP_PREFIX, timestamp, frame % 100, LOCAL_DUMP_SUFFIX);
33         return filename;
34 }
35
36 }  // namespace
37
38 VideoEncoder::VideoEncoder(ResourcePool *resource_pool, QSurface *surface, const std::string &va_display, int width, int height, HTTPD *httpd, DiskSpaceEstimator *disk_space_estimator)
39         : resource_pool(resource_pool), surface(surface), va_display(va_display), width(width), height(height), httpd(httpd), disk_space_estimator(disk_space_estimator)
40 {
41         oformat = av_guess_format(global_flags.stream_mux_name.c_str(), nullptr, nullptr);
42         assert(oformat != nullptr);
43         if (global_flags.stream_audio_codec_name.empty()) {
44                 stream_audio_encoder.reset(new AudioEncoder(AUDIO_OUTPUT_CODEC_NAME, DEFAULT_AUDIO_OUTPUT_BIT_RATE, oformat));
45         } else {
46                 stream_audio_encoder.reset(new AudioEncoder(global_flags.stream_audio_codec_name, global_flags.stream_audio_codec_bitrate, oformat));
47         }
48         if (global_flags.x264_video_to_http) {
49                 x264_encoder.reset(new X264Encoder(oformat));
50         }
51
52         string filename = generate_local_dump_filename(/*frame=*/0);
53         quicksync_encoder.reset(new QuickSyncEncoder(filename, resource_pool, surface, va_display, width, height, oformat, x264_encoder.get(), disk_space_estimator));
54
55         open_output_stream();
56         stream_audio_encoder->add_mux(stream_mux.get());
57         quicksync_encoder->set_stream_mux(stream_mux.get());
58         if (global_flags.x264_video_to_http) {
59                 x264_encoder->set_mux(stream_mux.get());
60         }
61 }
62
63 VideoEncoder::~VideoEncoder()
64 {
65         quicksync_encoder.reset(nullptr);
66         while (quicksync_encoders_in_shutdown.load() > 0) {
67                 usleep(10000);
68         }
69 }
70
71 void VideoEncoder::do_cut(int frame)
72 {
73         string filename = generate_local_dump_filename(frame);
74         printf("Starting new recording: %s\n", filename.c_str());
75
76         // Do the shutdown of the old encoder in a separate thread, since it can
77         // take some time (it needs to wait for all the frames in the queue to be
78         // done encoding, for one) and we are running on the main mixer thread.
79         // However, since this means both encoders could be sending packets at
80         // the same time, it means pts could come out of order to the stream mux,
81         // and we need to plug it until the shutdown is complete.
82         stream_mux->plug();
83         lock_guard<mutex> lock(qs_mu);
84         QuickSyncEncoder *old_encoder = quicksync_encoder.release();  // When we go C++14, we can use move capture instead.
85         thread([old_encoder, this]{
86                 old_encoder->shutdown();
87                 stream_mux->unplug();
88
89                 // We cannot delete the encoder here, as this thread has no OpenGL context.
90                 // We'll deal with it in begin_frame().
91                 lock_guard<mutex> lock(qs_mu);
92                 qs_needing_cleanup.emplace_back(old_encoder);
93         }).detach();
94
95         quicksync_encoder.reset(new QuickSyncEncoder(filename, resource_pool, surface, va_display, width, height, oformat, x264_encoder.get(), disk_space_estimator));
96         quicksync_encoder->set_stream_mux(stream_mux.get());
97 }
98
99 void VideoEncoder::change_x264_bitrate(unsigned rate_kbit)
100 {
101         x264_encoder->change_bitrate(rate_kbit);
102 }
103
104 void VideoEncoder::add_audio(int64_t pts, std::vector<float> audio)
105 {
106         lock_guard<mutex> lock(qs_mu);
107         quicksync_encoder->add_audio(pts, audio);
108         stream_audio_encoder->encode_audio(audio, pts + quicksync_encoder->global_delay());
109 }
110
111 bool VideoEncoder::begin_frame(GLuint *y_tex, GLuint *cbcr_tex)
112 {
113         lock_guard<mutex> lock(qs_mu);
114         qs_needing_cleanup.clear();  // Since we have an OpenGL context here, and are called regularly.
115         return quicksync_encoder->begin_frame(y_tex, cbcr_tex);
116 }
117
118 RefCountedGLsync VideoEncoder::end_frame(int64_t pts, int64_t duration, const std::vector<RefCountedFrame> &input_frames)
119 {
120         lock_guard<mutex> lock(qs_mu);
121         return quicksync_encoder->end_frame(pts, duration, input_frames);
122 }
123
124 void VideoEncoder::open_output_stream()
125 {
126         AVFormatContext *avctx = avformat_alloc_context();
127         avctx->oformat = oformat;
128
129         uint8_t *buf = (uint8_t *)av_malloc(MUX_BUFFER_SIZE);
130         avctx->pb = avio_alloc_context(buf, MUX_BUFFER_SIZE, 1, this, nullptr, nullptr, nullptr);
131         avctx->pb->write_data_type = &VideoEncoder::write_packet2_thunk;
132         avctx->pb->ignore_boundary_point = 1;
133
134         Mux::Codec video_codec;
135         if (global_flags.uncompressed_video_to_http) {
136                 video_codec = Mux::CODEC_NV12;
137         } else {
138                 video_codec = Mux::CODEC_H264;
139         }
140
141         avctx->flags = AVFMT_FLAG_CUSTOM_IO;
142
143         string video_extradata;
144         if (global_flags.x264_video_to_http) {
145                 video_extradata = x264_encoder->get_global_headers();
146         }
147
148         int time_base = global_flags.stream_coarse_timebase ? COARSE_TIMEBASE : TIMEBASE;
149         stream_mux.reset(new Mux(avctx, width, height, video_codec, video_extradata, stream_audio_encoder->get_codec_parameters().get(), time_base,
150                 /*write_callback=*/nullptr));
151 }
152
153 int VideoEncoder::write_packet2_thunk(void *opaque, uint8_t *buf, int buf_size, AVIODataMarkerType type, int64_t time)
154 {
155         VideoEncoder *video_encoder = (VideoEncoder *)opaque;
156         return video_encoder->write_packet2(buf, buf_size, type, time);
157 }
158
159 int VideoEncoder::write_packet2(uint8_t *buf, int buf_size, AVIODataMarkerType type, int64_t time)
160 {
161         if (type == AVIO_DATA_MARKER_HEADER) {
162                 stream_mux_header.append((char *)buf, buf_size);
163                 httpd->set_header(stream_mux_header);
164         } else {
165                 httpd->add_data((char *)buf, buf_size, type == AVIO_DATA_MARKER_SYNC_POINT);
166         }
167         return buf_size;
168 }
169