QuittableSleeper should_quit;
MuxMetrics stream_mux_metrics;
+//unsigned frameno = 0;
+double video_start_time = 0;
+double fps = 60.0 / 1.001;
+struct BodetMsg {
+ double t;
+ std::string msg;
+};
+std::vector<BodetMsg> bodet_msgs;
+unsigned cur_msg = 0;
+string team1, team2, team1color, team2color;
+int score1 = 0, score2 = 0, bodet_clock = 0;
+std::string output_filename = "out.mp4";
+
namespace {
int write_packet(void *opaque, uint8_t *buf, int buf_size, AVIODataMarkerType type, int64_t time)
avctx->oformat = const_cast<decltype(avctx->oformat)>(oformat); // const_cast is a hack to work in FFmpeg both before and after 5.0.
uint8_t *buf = (uint8_t *)av_malloc(MUX_BUFFER_SIZE);
- avctx->pb = avio_alloc_context(buf, MUX_BUFFER_SIZE, 1, httpd, nullptr, nullptr, nullptr);
- avctx->pb->write_data_type = &write_packet;
- avctx->pb->ignore_boundary_point = 1;
- avctx->flags = AVFMT_FLAG_CUSTOM_IO;
+ //avctx->pb = avio_alloc_context(buf, MUX_BUFFER_SIZE, 1, httpd, nullptr, nullptr, nullptr);
+ //avctx->pb->write_data_type = &write_packet;
+ //avctx->pb->ignore_boundary_point = 1;
+ //avctx->flags = AVFMT_FLAG_CUSTOM_IO;
+ avio_open(&avctx->pb, output_filename.c_str(), AVIO_FLAG_WRITE);
string video_extradata = x264_encoder->get_global_headers();
return mux;
}
+uint8_t *vfd = nullptr;
+uint8_t cefimg[1280 * 720 * 4];
+SwsContext *sws = nullptr;
+
+extern "C" {
+#include <libswscale/swscale.h>
+}
+
+void convert_stuff(const VideoFormat &video_format, const uint8_t *ptr)
+{
+ if (sws == nullptr) {
+ sws = sws_getContext(video_format.width, video_format.height, AV_PIX_FMT_BGRA,
+ video_format.width, video_format.height, AV_PIX_FMT_NV12,
+ SWS_BICUBIC, nullptr, nullptr, nullptr);
+ vfd = new uint8_t[video_format.width * video_format.height * 2];
+ }
+
+ uint8_t *src_pic_data[4] = { nullptr, nullptr, nullptr, nullptr };
+ int src_linesizes[4] = { 0, 0, 0, 0 };
+ src_pic_data[0] = (uint8_t *)ptr;
+ src_linesizes[0] = video_format.width * 4;
+
+ uint8_t *dst_pic_data[4] = { nullptr, nullptr, nullptr, nullptr };
+ int dst_linesizes[4] = { 0, 0, 0, 0 };
+ dst_pic_data[0] = vfd;
+ dst_linesizes[0] = video_format.width;
+ dst_pic_data[1] = vfd + video_format.width * video_format.height;
+ dst_linesizes[1] = video_format.width;
+
+ sws_scale(sws, src_pic_data, src_linesizes, 0, video_format.height, dst_pic_data, dst_linesizes);
+}
+
+#include <cef_app.h>
+#include <cef_browser.h>
+#include <cef_client.h>
+#include "nageru_cef_app.h"
+
+recursive_mutex browser_mutex;
+int browser_ready = 0;
+
+class KaeruCEFClient : public CefClient, public CefRenderHandler, public CefLoadHandler
+{
+public:
+ KaeruCEFClient() {}
+
+ CefRefPtr<CefRenderHandler> GetRenderHandler() override
+ {
+ return this;
+ }
+
+ CefRefPtr<CefLoadHandler> GetLoadHandler() override
+ {
+ return this;
+ }
+
+ // CefRenderHandler.
+
+ void OnPaint(CefRefPtr<CefBrowser> browser, PaintElementType type, const RectList &dirtyRects, const void *buffer, int width, int height) override
+ {
+ // fprintf(stderr, "onpaint %dx%d\n", width, height);
+ memcpy(cefimg, buffer, width * height * 4); // FIXME lock?
+
+ lock_guard<recursive_mutex> lock(browser_mutex);
+ if (browser_ready == 1)
+ browser_ready = 2;
+ }
+
+ void GetViewRect(CefRefPtr<CefBrowser> browser, CefRect &rect) override
+ {
+ fprintf(stderr, "getviewrect\n");
+ rect = CefRect(0, 0, 1280, 720);
+ }
+
+ // CefLoadHandler.
+
+ void OnLoadEnd(CefRefPtr<CefBrowser> browser, CefRefPtr<CefFrame> frame, int httpStatusCode) override
+ {
+ fprintf(stderr, "onload\n");
+
+ CefString script_url("<theme eval>");
+ int start_line = 1;
+ browser->GetMainFrame()->ExecuteJavaScript("play();", script_url, start_line);
+
+ lock_guard<recursive_mutex> lock(browser_mutex);
+ browser_ready = 1;
+ }
+
+private:
+ CEFCapture *parent;
+
+ IMPLEMENT_REFCOUNTING(KaeruCEFClient);
+};
+
+CefRefPtr<NageruCefApp> cef_app;
+CefRefPtr<CefBrowser> browser;
+unique_ptr<KaeruCEFClient> cef_client;
+
+ int parse_digit(char ch)
+ {
+ if (ch >= '0' && ch <= '9') {
+ return ch - '0';
+ }
+ return 0;
+ }
+
+ int parse_clock(char ch1, char ch2)
+ {
+ int s1 = parse_digit(ch1);
+ int s2 = parse_digit(ch2);
+ return s1 * 10 + s2;
+ }
+
+
+ int parse_score_weird(char ch1, char ch2, char ch3)
+ {
+ char str[4];
+ char *ptr = str;
+ if (ch1 != ' ') *ptr++ = ch1;
+ if (ch2 != ' ') *ptr++ = ch2;
+ if (ch3 != ' ') *ptr++ = ch3;
+ *ptr++ = 0;
+
+ return atoi(str);
+ }
+
+void add_cef(uint8_t *data, unsigned width, unsigned height, int64_t video_pts, AVRational video_timebase)
+{
+ if (cef_client == nullptr) {
+ cef_client.reset(new KaeruCEFClient);
+
+ //cef_app.reset(new NageruCefApp);
+ cef_app->initialize_cef();
+
+ CefPostTask(TID_UI, new CEFTaskAdapter([&]{
+ lock_guard<recursive_mutex> lock(browser_mutex);
+
+ CefBrowserSettings browser_settings;
+ // browser_settings.web_security = cef_state_t::STATE_DISABLED;
+ browser_settings.webgl = cef_state_t::STATE_DISABLED;
+ browser_settings.windowless_frame_rate = 60.00;
+
+ CefWindowInfo window_info;
+ window_info.SetAsWindowless(0);
+ browser = CefBrowserHost::CreateBrowserSync(window_info, cef_client.get(), "file:///home/sesse/dev/ultimatescore/score.html", browser_settings, nullptr, nullptr);
+ }));
+ }
+
+ {
+ CefString script_url("<theme eval>");
+ int start_line = 1;
+ char buf[256];
+
+ int old_bodet_clock = bodet_clock;
+
+ //fprintf(stderr, "video_pts=%ld timebase = %ld/%ld\n", video_pts, video_timebase.num, video_timebase.den);
+ //double cur_time = video_start_time + video_pts * double(video_timebase.num) / double(video_timebase.den);
+ double cur_time = video_start_time + video_pts / double(TIMEBASE);
+ //double cur_time = video_start_time + (frameno++) / fps;
+ while (cur_msg < bodet_msgs.size() && cur_time > bodet_msgs[cur_msg].t) {
+ const string &m = bodet_msgs[cur_msg].msg;
+ if (m.size() >= 10 && m[0] == 'G' && m[1] == '1' && m[2] == '0') {
+ int min = parse_clock(m[4], m[5]);
+ int sec = parse_clock(m[6], m[7]);
+ bodet_clock = min * 60 + sec;
+ score1 = parse_score_weird(m[8], m[9], m[10]);
+ score2 = parse_score_weird(m[11], m[12], m[13]);
+ }
+ ++cur_msg;
+ }
+
+ string str = "update('{";
+ snprintf(buf, 256, "\"score1\": %d", score1);
+ str += buf;
+ snprintf(buf, 256, ",\"score2\": %d", score2);
+ str += buf;
+
+ if (false) {
+ int doh = uint64_t(cur_time + 7200) % 86400;
+ snprintf(buf, 256, "%02d:%02d:%02d", doh / 3600, (doh % 3600) / 60, doh % 60);
+ team1 = buf;
+ }
+
+ str += ",\"team1\": \"" + team1 + "\"";
+ str += ",\"team2\": \"" + team2 + "\"";
+ str += ",\"team1color\": \"" + team1color + "\"";
+ str += ",\"team2color\": \"" + team2color + "\"";
+ str += "}');setteams();setcolors();setscore();";
+
+ snprintf(buf, 256, "update_given_clock(%d,'clock');", bodet_clock);
+ str += buf;
+
+ if (old_bodet_clock == 0 && bodet_clock != 0) {
+ str += "showclock();";
+ } else if (old_bodet_clock != 0 && bodet_clock == 0) {
+ str += "hideclock();";
+ }
+
+ //printf("%s\n", str.c_str());
+
+ bool ok = false;
+ do {
+ browser_mutex.lock();
+ if (browser_ready >= 2) {
+ browser->GetMainFrame()->ExecuteJavaScript(str, script_url, start_line);
+ browser_mutex.unlock();
+ ok = true;
+ } else {
+ browser_mutex.unlock();
+ printf("Waiting for CEF...\n");
+ usleep(100000);
+ }
+ } while (!ok);
+ }
+
+ unsigned char r0, g0, b0;
+ unsigned char a1, r1, g1, b1;
+ unsigned char *sptr = cefimg;
+ unsigned char *dptr = data;
+ for (int i = 0; i < 1280 * 720; ++i) {
+ //a0 = dptr[3];
+ r0 = dptr[2];
+ g0 = dptr[1];
+ b0 = dptr[0];
+
+ a1 = sptr[3];
+ r1 = sptr[2];
+ g1 = sptr[1];
+ b1 = sptr[0];
+
+ unsigned a = 255;
+ unsigned r = r0 + ((r1 - r0) * a1) / 255;
+ unsigned g = g0 + ((g1 - g0) * a1) / 255;
+ unsigned b = b0 + ((b1 - b0) * a1) / 255;
+
+ sptr += 4;
+ *dptr++ = b;
+ *dptr++ = g;
+ *dptr++ = r;
+ *dptr++ = a;
+ }
+ //memcpy(data, cefimg, 1280*720*4);
+}
+
+double crop_start = 0.0;
+double crop_end = HUGE_VAL;
+
+bool within(double t)
+{
+ return t >= crop_start && t < crop_end;
+}
+
+string last_ts;
+
+int64_t video_pts_offset = 0, audio_pts_offset = 0;
+int64_t next_video_pts = 0, next_audio_pts = 0;
+
+AVRational prev_video_timebase{0, 0};
+AVRational prev_audio_timebase{0, 0};
+
void video_frame_callback(FFmpegCapture *video, X264Encoder *x264_encoder, AudioEncoder *audio_encoder,
int64_t video_pts, AVRational video_timebase,
int64_t audio_pts, AVRational audio_timebase,
FrameAllocator::Frame video_frame, size_t video_offset, VideoFormat video_format,
FrameAllocator::Frame audio_frame, size_t audio_offset, AudioFormat audio_format)
{
- if (video_pts >= 0 && video_frame.len > 0) {
+ // Our splicing wants consistent timebases...
+ if (video_pts >= 0) {
+ if (prev_video_timebase.den == 0) {
+ prev_video_timebase = video_timebase;
+ }
+ video_pts = av_rescale_q(video_pts, video_timebase, prev_video_timebase);
+ video_timebase = prev_video_timebase;
+ }
+ if (audio_pts >= 0) {
+ if (prev_audio_timebase.den == 0) {
+ prev_audio_timebase = audio_timebase;
+ }
+ audio_pts = av_rescale_q(audio_pts, audio_timebase, prev_audio_timebase);
+ audio_timebase = prev_audio_timebase;
+ }
+
+ if (video_pts >= 0)
+ video_pts += video_pts_offset;
+ if (audio_pts >= 0)
+ audio_pts += audio_pts_offset;
+ if ((video_pts >= 0 && video_pts < next_video_pts) || (audio_pts >= 0 && audio_pts < next_audio_pts)) {
+ printf("=== next file (%ld < %ld || %ld < %ld) ===\n", video_pts, next_video_pts, audio_pts, next_audio_pts);
+ if (video_pts >= 0)
+ video_pts -= video_pts_offset;
+ if (audio_pts >= 0)
+ audio_pts -= audio_pts_offset;
+ video_pts_offset = next_video_pts;
+ audio_pts_offset = next_audio_pts;
+ if (video_pts >= 0)
+ video_pts += video_pts_offset;
+ if (audio_pts >= 0)
+ audio_pts += audio_pts_offset;
+ }
+
+ double cur_video_time = video_start_time + video_pts * double(video_timebase.num) / double(video_timebase.den);
+ double cur_audio_time = video_start_time + audio_pts * double(audio_timebase.num) / double(audio_timebase.den);
+ char ts[256];
+ {
+ int doh = uint64_t(cur_video_time + 7200) % 86400;
+ snprintf(ts, 256, "%02d:%02d:%02d", doh / 3600, (doh % 3600) / 60, doh % 60);
+ }
+ if (ts != last_ts) {
+ if (!within(cur_video_time)) {
+ printf("%s [skip]\n", ts);
+ } else {
+ if (cur_msg < bodet_msgs.size()) {
+ printf("%s %s\n", ts, bodet_msgs[cur_msg].msg.c_str());
+ } else {
+ printf("%s\n", ts);
+ }
+ }
+ last_ts = ts;
+ }
+
+ if (video_pts >= 0 && cur_video_time > crop_end) {
+ printf("=== sending quit signal ===\n");
+ should_quit.quit();
+ }
+
+ if (video_pts >= 0 && video_frame.len > 0 && within(cur_video_time)) {
ReceivedTimestamps ts;
ts.ts.push_back(steady_clock::now());
+ //next_video_pts = video_pts + av_rescale_q(1, AVRational{ 1001, 60000 }, video_timebase);
+
+ // I hate Matroska timestamps and their inaccuracy...
+ next_video_pts = video_pts + av_rescale_q(1, AVRational{ 500, 60000 }, video_timebase);
+
video_pts = av_rescale_q(video_pts, video_timebase, AVRational{ 1, TIMEBASE });
int64_t frame_duration = int64_t(TIMEBASE) * video_format.frame_rate_den / video_format.frame_rate_nom;
- x264_encoder->add_frame(video_pts, frame_duration, video->get_current_frame_ycbcr_format().luma_coefficients, video_frame.data + video_offset, ts);
+ if (team1 != "nocef") {
+ add_cef(video_frame.data + video_offset, video_format.width, video_format.height, video_pts, video_timebase);
+ }
+ convert_stuff(video_format, video_frame.data + video_offset);
+ x264_encoder->add_frame(video_pts, frame_duration, video->get_current_frame_ycbcr_format().luma_coefficients, vfd, ts);
+ //} else {
+ // x264_encoder->add_frame(video_pts, frame_duration, video->get_current_frame_ycbcr_format().luma_coefficients, video_frame.data + video_offset, ts);
+ //}
global_basic_stats->update(frame_num++, /*dropped_frames=*/0);
}
- if (audio_frame.len > 0) {
+ if (audio_frame.len > 0 && within(cur_audio_time)) {
// FFmpegCapture takes care of this for us.
assert(audio_format.num_channels == 2);
assert(audio_format.sample_rate == OUTPUT_FREQUENCY);
} else {
assert(false);
}
+ //next_audio_pts = audio_pts + av_rescale_q(num_samples / 2, AVRational{ 1, OUTPUT_FREQUENCY }, audio_timebase);
+ // Matroska hack...
+ next_audio_pts = audio_pts + av_rescale_q(num_samples / 4, AVRational{ 1, OUTPUT_FREQUENCY }, audio_timebase);
audio_pts = av_rescale_q(audio_pts, audio_timebase, AVRational{ 1, TIMEBASE });
audio_encoder->encode_audio(float_samples, audio_pts);
}
should_quit.quit();
}
+int hex(char ch)
+{
+ if (ch == ' ') {
+ return 0;
+ } else if (ch >= 'A' && ch <= 'F') {
+ return 10 + (ch - 'A');
+ } else {
+ return ch - '0';
+ }
+}
+
+char msgbuf[65536];
+
+double parse_time(const char *str)
+{
+ struct tm tm;
+ strptime(str, "%Y-%m-%d %H:%M:%S", &tm);
+ return mktime(&tm);
+}
+
+
+std::vector<std::string> split(const std::string& str, char delim) {
+ std::vector<std::string> strings;
+ size_t start;
+ size_t end = 0;
+ while ((start = str.find_first_not_of(delim, end)) != std::string::npos) {
+ end = str.find(delim, start);
+ strings.push_back(str.substr(start, end - start));
+ }
+ return strings;
+}
+
int main(int argc, char *argv[])
{
+ CefMainArgs main_args(argc, argv);
+ cef_app = CefRefPtr<NageruCefApp>(new NageruCefApp());
+ int err = CefExecuteProcess(main_args, cef_app.get(), nullptr);
+ if (err >= 0) {
+ return err;
+ }
+
+ // CEF wants to use GLib for its main loop, which interferes with Qt's use of it.
+ // The alternative is trying to integrate CEF into Qt's main loop, but that requires
+ // fairly extensive cross-thread communication and that parts of CEF runs on Qt's UI
+ // thread.
+ setenv("QT_NO_GLIB", "1", 0);
+
parse_flags(PROGRAM_KAERU, argc, argv);
- if (optind + 1 != argc) {
- usage(PROGRAM_KAERU);
- abort();
+
+ video_start_time = atof(argv[optind + 1]);
+ team1 = argv[optind + 3];
+ team2 = argv[optind + 4];
+ team1color = argv[optind + 5];
+ team2color = argv[optind + 6];
+ if (argc > optind + 7) crop_start = parse_time(argv[optind + 7]);
+ if (argc > optind + 8) crop_end = parse_time(argv[optind + 8]);
+ if (argc > optind + 9) output_filename = argv[optind + 9];
+ //printf("crop= %f %f\n", crop_start, crop_end);
+ //exit(1);
+
+ FILE *msgfp = fopen(argv[optind + 2], "r");
+ while (!feof(msgfp)) {
+ double t;
+ if (fscanf(msgfp, "%lf,%s", &t, msgbuf) != 2) break;
+ BodetMsg bm;
+ bm.t = t;
+ if (t < video_start_time) {
+ continue;
+ }
+ for (unsigned i = 1; i < strlen(msgbuf) / 2; ++i) {
+ bm.msg.push_back(hex(msgbuf[i * 2]) * 16 + hex(msgbuf[i * 2 + 1]));
+ }
+ bodet_msgs.push_back(bm);
+ printf("%.3f %s\n", t, bm.msg.c_str());
}
+ fclose(msgfp);
+
global_flags.max_num_cards = 1; // For latency metrics.
#if LIBAVFORMAT_VERSION_INT < AV_VERSION_INT(58, 9, 100)
}
global_x264_encoder = x264_encoder.get();
- FFmpegCapture video(argv[optind], global_flags.width, global_flags.height);
- video.set_pixel_format(FFmpegCapture::PixelFormat_NV12);
+ vector<string> filenames = split(argv[optind], ':');
+
+ FFmpegCapture video(filenames, global_flags.width, global_flags.height);
+ video.set_pixel_format(bmusb::PixelFormat_8BitBGRA);
if (global_flags.transcode_video) {
video.set_frame_callback(bind(video_frame_callback, &video, x264_encoder.get(), audio_encoder.get(), _1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11));
} else {