]> git.sesse.net Git - nageru/blob - ffmpeg_capture.cpp
Factor out rewinding code in its own member function.
[nageru] / ffmpeg_capture.cpp
1 #include "ffmpeg_capture.h"
2
3 #include <assert.h>
4 #include <pthread.h>
5 #include <stdint.h>
6 #include <stdio.h>
7 #include <stdlib.h>
8 #include <string.h>
9 #include <unistd.h>
10
11 extern "C" {
12 #include <libavcodec/avcodec.h>
13 #include <libavformat/avformat.h>
14 #include <libavutil/avutil.h>
15 #include <libavutil/error.h>
16 #include <libavutil/frame.h>
17 #include <libavutil/imgutils.h>
18 #include <libavutil/mem.h>
19 #include <libavutil/pixfmt.h>
20 #include <libswscale/swscale.h>
21 }
22
23 #include <chrono>
24 #include <cstdint>
25 #include <utility>
26 #include <vector>
27
28 #include "bmusb/bmusb.h"
29 #include "ffmpeg_raii.h"
30 #include "flags.h"
31 #include "image_input.h"
32
33 #define FRAME_SIZE (8 << 20)  // 8 MB.
34
35 using namespace std;
36 using namespace std::chrono;
37 using namespace bmusb;
38
39 namespace {
40
41 steady_clock::time_point compute_frame_start(int64_t frame_pts, int64_t pts_origin, const AVRational &video_timebase, const steady_clock::time_point &origin, double rate)
42 {
43         const duration<double> pts((frame_pts - pts_origin) * double(video_timebase.num) / double(video_timebase.den));
44         return origin + duration_cast<steady_clock::duration>(pts / rate);
45 }
46
47 }  // namespace
48
49 FFmpegCapture::FFmpegCapture(const string &filename, unsigned width, unsigned height)
50         : filename(filename), width(width), height(height)
51 {
52         // Not really used for anything.
53         description = "Video: " + filename;
54 }
55
56 FFmpegCapture::~FFmpegCapture()
57 {
58         if (has_dequeue_callbacks) {
59                 dequeue_cleanup_callback();
60         }
61 }
62
63 void FFmpegCapture::configure_card()
64 {
65         if (video_frame_allocator == nullptr) {
66                 owned_video_frame_allocator.reset(new MallocFrameAllocator(FRAME_SIZE, NUM_QUEUED_VIDEO_FRAMES));
67                 set_video_frame_allocator(owned_video_frame_allocator.get());
68         }
69         if (audio_frame_allocator == nullptr) {
70                 owned_audio_frame_allocator.reset(new MallocFrameAllocator(65536, NUM_QUEUED_AUDIO_FRAMES));
71                 set_audio_frame_allocator(owned_audio_frame_allocator.get());
72         }
73 }
74
75 void FFmpegCapture::start_bm_capture()
76 {
77         if (running) {
78                 return;
79         }
80         running = true;
81         producer_thread_should_quit.unquit();
82         producer_thread = thread(&FFmpegCapture::producer_thread_func, this);
83 }
84
85 void FFmpegCapture::stop_dequeue_thread()
86 {
87         if (!running) {
88                 return;
89         }
90         running = false;
91         producer_thread_should_quit.quit();
92         producer_thread.join();
93 }
94
95 std::map<uint32_t, VideoMode> FFmpegCapture::get_available_video_modes() const
96 {
97         // Note: This will never really be shown in the UI.
98         VideoMode mode;
99
100         char buf[256];
101         snprintf(buf, sizeof(buf), "%ux%u", width, height);
102         mode.name = buf;
103         
104         mode.autodetect = false;
105         mode.width = width;
106         mode.height = height;
107         mode.frame_rate_num = 60;
108         mode.frame_rate_den = 1;
109         mode.interlaced = false;
110
111         return {{ 0, mode }};
112 }
113
114 void FFmpegCapture::producer_thread_func()
115 {
116         char thread_name[16];
117         snprintf(thread_name, sizeof(thread_name), "FFmpeg_C_%d", card_index);
118         pthread_setname_np(pthread_self(), thread_name);
119
120         while (!producer_thread_should_quit.should_quit()) {
121                 string pathname = search_for_file(filename);
122                 if (filename.empty()) {
123                         fprintf(stderr, "%s not found, sleeping one second and trying again...\n", filename.c_str());
124                         producer_thread_should_quit.sleep_for(seconds(1));
125                         continue;
126                 }
127                 if (!play_video(pathname)) {
128                         // Error.
129                         fprintf(stderr, "Error when playing %s, sleeping one second and trying again...\n", pathname.c_str());
130                         producer_thread_should_quit.sleep_for(seconds(1));
131                         continue;
132                 }
133
134                 // Probably just EOF, will exit the loop above on next test.
135         }
136
137         if (has_dequeue_callbacks) {
138                 dequeue_cleanup_callback();
139                 has_dequeue_callbacks = false;
140         }
141 }
142
143 bool FFmpegCapture::play_video(const string &pathname)
144 {
145         auto format_ctx = avformat_open_input_unique(pathname.c_str(), nullptr, nullptr);
146         if (format_ctx == nullptr) {
147                 fprintf(stderr, "%s: Error opening file\n", pathname.c_str());
148                 return false;
149         }
150
151         if (avformat_find_stream_info(format_ctx.get(), nullptr) < 0) {
152                 fprintf(stderr, "%s: Error finding stream info\n", pathname.c_str());
153                 return false;
154         }
155
156         int video_stream_index = -1, audio_stream_index = -1;
157         AVRational video_timebase{ 1, 1 };
158         for (unsigned i = 0; i < format_ctx->nb_streams; ++i) {
159                 if (format_ctx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO &&
160                     video_stream_index == -1) {
161                         video_stream_index = i;
162                         video_timebase = format_ctx->streams[i]->time_base;
163                 }
164                 if (format_ctx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_AUDIO &&
165                     audio_stream_index == -1) {
166                         audio_stream_index = i;
167                 }
168         }
169         if (video_stream_index == -1) {
170                 fprintf(stderr, "%s: No video stream found\n", pathname.c_str());
171                 return false;
172         }
173
174         const AVCodecParameters *codecpar = format_ctx->streams[video_stream_index]->codecpar;
175         AVCodecContextWithDeleter codec_ctx = avcodec_alloc_context3_unique(nullptr);
176         if (avcodec_parameters_to_context(codec_ctx.get(), codecpar) < 0) {
177                 fprintf(stderr, "%s: Cannot fill codec parameters\n", pathname.c_str());
178                 return false;
179         }
180         AVCodec *codec = avcodec_find_decoder(codecpar->codec_id);
181         if (codec == nullptr) {
182                 fprintf(stderr, "%s: Cannot find decoder\n", pathname.c_str());
183                 return false;
184         }
185         if (avcodec_open2(codec_ctx.get(), codec, nullptr) < 0) {
186                 fprintf(stderr, "%s: Cannot open decoder\n", pathname.c_str());
187                 return false;
188         }
189         unique_ptr<AVCodecContext, decltype(avcodec_close)*> codec_ctx_cleanup(
190                 codec_ctx.get(), avcodec_close);
191
192         internal_rewind();
193         double rate = 1.0;
194
195         unique_ptr<SwsContext, decltype(sws_freeContext)*> sws_ctx(nullptr, sws_freeContext);
196         int sws_last_width = -1, sws_last_height = -1;
197
198         // Main loop.
199         while (!producer_thread_should_quit.should_quit()) {
200                 // Process any queued commands from other threads.
201                 vector<QueuedCommand> commands;
202                 {
203                         lock_guard<mutex> lock(queue_mu);
204                         swap(commands, command_queue);
205                 }
206                 for (const QueuedCommand &cmd : commands) {
207                         switch (cmd.command) {
208                         case QueuedCommand::REWIND:
209                                 if (av_seek_frame(format_ctx.get(), /*stream_index=*/-1, /*timestamp=*/0, /*flags=*/0) < 0) {
210                                         fprintf(stderr, "%s: Rewind failed, stopping play.\n", pathname.c_str());
211                                 }
212                                 internal_rewind();
213                                 break;
214
215                         case QueuedCommand::CHANGE_RATE:
216                                 start = next_frame_start;
217                                 pts_origin = last_pts;
218                                 rate = cmd.new_rate;
219                                 break;
220                         }
221                 }
222
223                 // Read packets until we have a frame or there are none left.
224                 int frame_finished = 0;
225                 AVFrameWithDeleter frame = av_frame_alloc_unique();
226                 bool eof = false;
227                 do {
228                         AVPacket pkt;
229                         unique_ptr<AVPacket, decltype(av_packet_unref)*> pkt_cleanup(
230                                 &pkt, av_packet_unref);
231                         av_init_packet(&pkt);
232                         pkt.data = nullptr;
233                         pkt.size = 0;
234                         if (av_read_frame(format_ctx.get(), &pkt) == 0) {
235                                 if (pkt.stream_index != video_stream_index) {
236                                         // Ignore audio for now.
237                                         continue;
238                                 }
239                                 if (avcodec_send_packet(codec_ctx.get(), &pkt) < 0) {
240                                         fprintf(stderr, "%s: Cannot send packet to codec.\n", pathname.c_str());
241                                         return false;
242                                 }
243                         } else {
244                                 eof = true;  // Or error, but ignore that for the time being.
245                         }
246
247                         int err = avcodec_receive_frame(codec_ctx.get(), frame.get());
248                         if (err == 0) {
249                                 frame_finished = true;
250                                 break;
251                         } else if (err != AVERROR(EAGAIN)) {
252                                 fprintf(stderr, "%s: Cannot receive frame from codec.\n", pathname.c_str());
253                                 return false;
254                         }
255                 } while (!eof);
256
257                 if (!frame_finished) {
258                         // EOF. Loop back to the start if we can.
259                         if (av_seek_frame(format_ctx.get(), /*stream_index=*/-1, /*timestamp=*/0, /*flags=*/0) < 0) {
260                                 fprintf(stderr, "%s: Rewind failed, not looping.\n", pathname.c_str());
261                                 return true;
262                         }
263                         internal_rewind();
264                         continue;
265                 }
266
267                 if (sws_ctx == nullptr || sws_last_width != frame->width || sws_last_height != frame->height) {
268                         sws_ctx.reset(
269                                 sws_getContext(frame->width, frame->height, (AVPixelFormat)frame->format,
270                                         width, height, AV_PIX_FMT_RGBA,
271                                         SWS_BICUBIC, nullptr, nullptr, nullptr));
272                         sws_last_width = frame->width;
273                         sws_last_height = frame->height;
274                 }
275                 if (sws_ctx == nullptr) {
276                         fprintf(stderr, "%s: Could not create scaler context\n", pathname.c_str());
277                         return false;
278                 }
279
280                 VideoFormat video_format;
281                 video_format.width = width;
282                 video_format.height = height;
283                 video_format.stride = width * 4;
284                 video_format.frame_rate_nom = video_timebase.den;
285                 video_format.frame_rate_den = av_frame_get_pkt_duration(frame.get()) * video_timebase.num;
286                 video_format.has_signal = true;
287                 video_format.is_connected = true;
288
289                 next_frame_start = compute_frame_start(frame->pts, pts_origin, video_timebase, start, rate);
290                 last_pts = frame->pts;
291
292                 FrameAllocator::Frame video_frame = video_frame_allocator->alloc_frame();
293                 if (video_frame.data != nullptr) {
294                         uint8_t *pic_data[4] = { video_frame.data, nullptr, nullptr, nullptr };
295                         int linesizes[4] = { int(video_format.stride), 0, 0, 0 };
296                         sws_scale(sws_ctx.get(), frame->data, frame->linesize, 0, frame->height, pic_data, linesizes);
297                         video_frame.len = video_format.stride * height;
298                         video_frame.received_timestamp = next_frame_start;
299                 }
300
301                 FrameAllocator::Frame audio_frame;
302                 AudioFormat audio_format;
303                 audio_format.bits_per_sample = 32;
304                 audio_format.num_channels = 8;
305
306                 producer_thread_should_quit.sleep_until(next_frame_start);
307                 frame_callback(timecode++,
308                         video_frame, 0, video_format,
309                         audio_frame, 0, audio_format);
310         }
311         return true;
312 }
313
314 void FFmpegCapture::internal_rewind()
315 {                               
316         pts_origin = last_pts = 0;
317         start = next_frame_start = steady_clock::now();
318 }