]> git.sesse.net Git - nageru/blob - ffmpeg_capture.cpp
Add a Lua function to rewind the video.
[nageru] / ffmpeg_capture.cpp
1 #include "ffmpeg_capture.h"
2
3 #include <assert.h>
4 #include <pthread.h>
5 #include <stdint.h>
6 #include <stdio.h>
7 #include <stdlib.h>
8 #include <string.h>
9 #include <unistd.h>
10
11 extern "C" {
12 #include <libavcodec/avcodec.h>
13 #include <libavformat/avformat.h>
14 #include <libavutil/avutil.h>
15 #include <libavutil/error.h>
16 #include <libavutil/frame.h>
17 #include <libavutil/imgutils.h>
18 #include <libavutil/mem.h>
19 #include <libavutil/pixfmt.h>
20 #include <libswscale/swscale.h>
21 }
22
23 #include <chrono>
24 #include <cstdint>
25 #include <utility>
26 #include <vector>
27
28 #include "bmusb/bmusb.h"
29 #include "ffmpeg_raii.h"
30 #include "flags.h"
31 #include "image_input.h"
32
33 #define FRAME_SIZE (8 << 20)  // 8 MB.
34
35 using namespace std;
36 using namespace std::chrono;
37 using namespace bmusb;
38
39 FFmpegCapture::FFmpegCapture(const string &filename, unsigned width, unsigned height)
40         : filename(filename), width(width), height(height)
41 {
42         // Not really used for anything.
43         description = "Video: " + filename;
44 }
45
46 FFmpegCapture::~FFmpegCapture()
47 {
48         if (has_dequeue_callbacks) {
49                 dequeue_cleanup_callback();
50         }
51 }
52
53 void FFmpegCapture::configure_card()
54 {
55         if (video_frame_allocator == nullptr) {
56                 owned_video_frame_allocator.reset(new MallocFrameAllocator(FRAME_SIZE, NUM_QUEUED_VIDEO_FRAMES));
57                 set_video_frame_allocator(owned_video_frame_allocator.get());
58         }
59         if (audio_frame_allocator == nullptr) {
60                 owned_audio_frame_allocator.reset(new MallocFrameAllocator(65536, NUM_QUEUED_AUDIO_FRAMES));
61                 set_audio_frame_allocator(owned_audio_frame_allocator.get());
62         }
63 }
64
65 void FFmpegCapture::start_bm_capture()
66 {
67         if (running) {
68                 return;
69         }
70         running = true;
71         producer_thread_should_quit = false;
72         producer_thread = thread(&FFmpegCapture::producer_thread_func, this);
73 }
74
75 void FFmpegCapture::stop_dequeue_thread()
76 {
77         if (!running) {
78                 return;
79         }
80         running = false;
81         producer_thread_should_quit = true;
82         producer_thread.join();
83 }
84
85 std::map<uint32_t, VideoMode> FFmpegCapture::get_available_video_modes() const
86 {
87         // Note: This will never really be shown in the UI.
88         VideoMode mode;
89
90         char buf[256];
91         snprintf(buf, sizeof(buf), "%ux%u", width, height);
92         mode.name = buf;
93         
94         mode.autodetect = false;
95         mode.width = width;
96         mode.height = height;
97         mode.frame_rate_num = 60;
98         mode.frame_rate_den = 1;
99         mode.interlaced = false;
100
101         return {{ 0, mode }};
102 }
103
104 void FFmpegCapture::producer_thread_func()
105 {
106         char thread_name[16];
107         snprintf(thread_name, sizeof(thread_name), "FFmpeg_C_%d", card_index);
108         pthread_setname_np(pthread_self(), thread_name);
109
110         while (!producer_thread_should_quit) {
111                 string pathname = search_for_file(filename);
112                 if (filename.empty()) {
113                         fprintf(stderr, "%s not found, sleeping one second and trying again...\n", filename.c_str());
114                         sleep(1);
115                         continue;
116                 }
117                 if (!play_video(pathname)) {
118                         // Error.
119                         fprintf(stderr, "Error when playing %s, sleeping one second and trying again...\n", pathname.c_str());
120                         sleep(1);
121                         continue;
122                 }
123
124                 // Probably just EOF, will exit the loop above on next test.
125         }
126
127         if (has_dequeue_callbacks) {
128                 dequeue_cleanup_callback();
129                 has_dequeue_callbacks = false;
130         }
131 }
132
133 bool FFmpegCapture::play_video(const string &pathname)
134 {
135         auto format_ctx = avformat_open_input_unique(pathname.c_str(), nullptr, nullptr);
136         if (format_ctx == nullptr) {
137                 fprintf(stderr, "%s: Error opening file\n", pathname.c_str());
138                 return false;
139         }
140
141         if (avformat_find_stream_info(format_ctx.get(), nullptr) < 0) {
142                 fprintf(stderr, "%s: Error finding stream info\n", pathname.c_str());
143                 return false;
144         }
145
146         int video_stream_index = -1, audio_stream_index = -1;
147         AVRational video_timebase{ 1, 1 };
148         for (unsigned i = 0; i < format_ctx->nb_streams; ++i) {
149                 if (format_ctx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO &&
150                     video_stream_index == -1) {
151                         video_stream_index = i;
152                         video_timebase = format_ctx->streams[i]->time_base;
153                 }
154                 if (format_ctx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_AUDIO &&
155                     audio_stream_index == -1) {
156                         audio_stream_index = i;
157                 }
158         }
159         if (video_stream_index == -1) {
160                 fprintf(stderr, "%s: No video stream found\n", pathname.c_str());
161                 return false;
162         }
163
164         const AVCodecParameters *codecpar = format_ctx->streams[video_stream_index]->codecpar;
165         AVCodecContextWithDeleter codec_ctx = avcodec_alloc_context3_unique(nullptr);
166         if (avcodec_parameters_to_context(codec_ctx.get(), codecpar) < 0) {
167                 fprintf(stderr, "%s: Cannot fill codec parameters\n", pathname.c_str());
168                 return false;
169         }
170         AVCodec *codec = avcodec_find_decoder(codecpar->codec_id);
171         if (codec == nullptr) {
172                 fprintf(stderr, "%s: Cannot find decoder\n", pathname.c_str());
173                 return false;
174         }
175         if (avcodec_open2(codec_ctx.get(), codec, nullptr) < 0) {
176                 fprintf(stderr, "%s: Cannot open decoder\n", pathname.c_str());
177                 return false;
178         }
179         unique_ptr<AVCodecContext, decltype(avcodec_close)*> codec_ctx_cleanup(
180                 codec_ctx.get(), avcodec_close);
181
182         steady_clock::time_point start = steady_clock::now();
183
184         unique_ptr<SwsContext, decltype(sws_freeContext)*> sws_ctx(nullptr, sws_freeContext);
185         int sws_last_width = -1, sws_last_height = -1;
186
187         // Main loop.
188         while (!producer_thread_should_quit) {
189                 // Process any queued commands from other threads.
190                 vector<QueuedCommand> commands;
191                 {
192                         lock_guard<mutex> lock(queue_mu);
193                         swap(commands, command_queue);
194                 }
195                 for (const QueuedCommand &cmd : commands) {
196                         if (cmd.command == QueuedCommand::REWIND) {
197                                 if (av_seek_frame(format_ctx.get(), /*stream_index=*/-1, /*timestamp=*/0, /*flags=*/0) < 0) {
198                                         fprintf(stderr, "%s: Rewind failed, stopping play.\n", pathname.c_str());
199                                 }
200                                 start = steady_clock::now();
201                                 continue;
202                         }
203                 }
204
205                 // Read packets until we have a frame or there are none left.
206                 int frame_finished = 0;
207                 AVFrameWithDeleter frame = av_frame_alloc_unique();
208                 bool eof = false;
209                 do {
210                         AVPacket pkt;
211                         unique_ptr<AVPacket, decltype(av_packet_unref)*> pkt_cleanup(
212                                 &pkt, av_packet_unref);
213                         av_init_packet(&pkt);
214                         pkt.data = nullptr;
215                         pkt.size = 0;
216                         if (av_read_frame(format_ctx.get(), &pkt) == 0) {
217                                 if (pkt.stream_index != video_stream_index) {
218                                         // Ignore audio for now.
219                                         continue;
220                                 }
221                                 if (avcodec_send_packet(codec_ctx.get(), &pkt) < 0) {
222                                         fprintf(stderr, "%s: Cannot send packet to codec.\n", pathname.c_str());
223                                         return false;
224                                 }
225                         } else {
226                                 eof = true;  // Or error, but ignore that for the time being.
227                         }
228
229                         int err = avcodec_receive_frame(codec_ctx.get(), frame.get());
230                         if (err == 0) {
231                                 frame_finished = true;
232                                 break;
233                         } else if (err != AVERROR(EAGAIN)) {
234                                 fprintf(stderr, "%s: Cannot receive frame from codec.\n", pathname.c_str());
235                                 return false;
236                         }
237                 } while (!eof);
238
239                 if (!frame_finished) {
240                         // EOF. Loop back to the start if we can.
241                         if (av_seek_frame(format_ctx.get(), /*stream_index=*/-1, /*timestamp=*/0, /*flags=*/0) < 0) {
242                                 fprintf(stderr, "%s: Rewind failed, not looping.\n", pathname.c_str());
243                                 return true;
244                         }
245                         start = steady_clock::now();
246                         continue;
247                 }
248
249                 if (sws_ctx == nullptr || sws_last_width != frame->width || sws_last_height != frame->height) {
250                         sws_ctx.reset(
251                                 sws_getContext(frame->width, frame->height, (AVPixelFormat)frame->format,
252                                         width, height, AV_PIX_FMT_RGBA,
253                                         SWS_BICUBIC, nullptr, nullptr, nullptr));
254                         sws_last_width = frame->width;
255                         sws_last_height = frame->height;
256                 }
257                 if (sws_ctx == nullptr) {
258                         fprintf(stderr, "%s: Could not create scaler context\n", pathname.c_str());
259                         return false;
260                 }
261
262                 VideoFormat video_format;
263                 video_format.width = width;
264                 video_format.height = height;
265                 video_format.stride = width * 4;
266                 video_format.frame_rate_nom = video_timebase.den;
267                 video_format.frame_rate_den = av_frame_get_pkt_duration(frame.get()) * video_timebase.num;
268                 video_format.has_signal = true;
269                 video_format.is_connected = true;
270
271                 const duration<double> pts(frame->pts * double(video_timebase.num) / double(video_timebase.den));
272                 const steady_clock::time_point frame_start = start + duration_cast<steady_clock::duration>(pts);
273
274                 FrameAllocator::Frame video_frame = video_frame_allocator->alloc_frame();
275                 if (video_frame.data != nullptr) {
276                         uint8_t *pic_data[4] = { video_frame.data, nullptr, nullptr, nullptr };
277                         int linesizes[4] = { int(video_format.stride), 0, 0, 0 };
278                         sws_scale(sws_ctx.get(), frame->data, frame->linesize, 0, frame->height, pic_data, linesizes);
279                         video_frame.len = video_format.stride * height;
280                         video_frame.received_timestamp = frame_start;
281                 }
282
283                 FrameAllocator::Frame audio_frame;
284                 AudioFormat audio_format;
285                 audio_format.bits_per_sample = 32;
286                 audio_format.num_channels = 8;
287
288                 this_thread::sleep_until(frame_start);
289                 frame_callback(timecode++,
290                         video_frame, 0, video_format,
291                         audio_frame, 0, audio_format);
292         }
293         return true;
294 }