]> git.sesse.net Git - nageru/commitdiff
A hack to make Kaeru overlay CEF scores on a transcoded file. Probably not useful...
authorSteinar H. Gunderson <sgunderson@bigfoot.com>
Thu, 14 Apr 2022 12:46:09 +0000 (14:46 +0200)
committerSteinar H. Gunderson <sgunderson@bigfoot.com>
Thu, 14 Apr 2022 12:46:09 +0000 (14:46 +0200)
meson.build
nageru/cef_capture.cpp
nageru/ffmpeg_capture.cpp
nageru/ffmpeg_capture.h
nageru/kaeru.cpp
nageru/nageru_cef_app.cpp

index bb37e716efa7e89d1742261f03046224dcfd1819..69759cb43baf35b6ecdedb46fcc1f4482a8d37ca 100644 (file)
@@ -128,7 +128,7 @@ if have_cef
        endif
 
        cef_libs = ['libEGL.so', 'libGLESv2.so', 'snapshot_blob.bin', 'v8_context_snapshot.bin']
-       cef_resources = ['cef.pak', 'cef_100_percent.pak', 'cef_200_percent.pak', 'cef_extensions.pak', 'devtools_resources.pak']
+       cef_resources = ['chrome_100_percent.pak', 'chrome_200_percent.pak', 'resources.pak']
        if not get_option('cef_no_icudtl')
                cef_resources += ['icudtl.dat']
        endif
@@ -264,7 +264,7 @@ executable('nageru', 'nageru/main.cpp',
 meson.add_install_script('nageru/scripts/setup_nageru_symlink.sh')
 
 # Kaeru executable.
-executable('kaeru', 'nageru/kaeru.cpp',
+executable('kaeru', ['nageru/kaeru.cpp', 'nageru/nageru_cef_app.cpp'],
        dependencies: [nageru_deps, kaeru_extra_deps],
        include_directories: nageru_include_dirs,
        link_with: [stream, aux, kaeru_link_with],
index b9baa0e001cd073912d2679e235feed4503576da..f60fa5c3acabc8d8e50520bad0f6b7d17e70f25b 100644 (file)
@@ -183,8 +183,8 @@ void CEFCapture::start_bm_capture()
                lock_guard<recursive_mutex> lock(browser_mutex);
 
                CefBrowserSettings browser_settings;
-               browser_settings.web_security = cef_state_t::STATE_DISABLED;
-               browser_settings.webgl = cef_state_t::STATE_ENABLED;
+               //browser_settings.web_security = cef_state_t::STATE_DISABLED;
+               browser_settings.webgl = cef_state_t::STATE_DISABLED;
                browser_settings.windowless_frame_rate = max_fps;
 
                CefWindowInfo window_info;
index a9ad73d8d7ee0948744111bfaffc451159c25e9f..984ce5e8b8add69da4b0a9fd8a68d232c7ab6b41 100644 (file)
@@ -258,8 +258,9 @@ RGBTriplet get_neutral_color(AVDictionary *metadata)
 }  // namespace
 
 FFmpegCapture::FFmpegCapture(const string &filename, unsigned width, unsigned height)
-       : filename(filename), width(width), height(height), video_timebase{1, 1}
+       : width(width), height(height), video_timebase{1, 1}
 {
+       filenames.push_back(filename);
        description = "Video: " + filename;
 
        last_frame = steady_clock::now();
@@ -267,6 +268,17 @@ FFmpegCapture::FFmpegCapture(const string &filename, unsigned width, unsigned he
        avformat_network_init();  // In case someone wants this.
 }
 
+FFmpegCapture::FFmpegCapture(const std::vector<std::string> &filenames, unsigned width, unsigned height)
+       : filenames(filenames), width(width), height(height), video_timebase{1, 1}
+{
+       description = "Video: " + filenames[0];
+
+       last_frame = steady_clock::now();
+       play_once = true;
+
+       avformat_network_init();  // In case someone wants this.
+}
+
 #ifdef HAVE_SRT
 FFmpegCapture::FFmpegCapture(int srt_sock, const string &stream_id)
        : srt_sock(srt_sock),
@@ -357,11 +369,14 @@ void FFmpegCapture::producer_thread_func()
        snprintf(thread_name, sizeof(thread_name), "FFmpeg_C_%d", card_index);
        pthread_setname_np(pthread_self(), thread_name);
 
+       printf("CAP\n");
        while (!producer_thread_should_quit.should_quit()) {
                string filename_copy;
+               printf("CAP %zu LEFT\n", filenames.size());
                {
                        lock_guard<mutex> lock(filename_mu);
-                       filename_copy = filename;
+                       filename_copy = filenames.front();
+                       filenames.erase(filenames.begin());
                }
 
                string pathname;
@@ -370,6 +385,7 @@ void FFmpegCapture::producer_thread_func()
                } else {
                        pathname = description;
                }
+               printf("CAP %s\n", pathname.c_str());
                if (pathname.empty()) {
                        send_disconnected_frame();
                        if (play_once) {
@@ -391,7 +407,7 @@ void FFmpegCapture::producer_thread_func()
                        continue;
                }
 
-               if (play_once) {
+               if (play_once && filenames.empty()) {
                        send_disconnected_frame();
                        break;
                }
@@ -547,7 +563,7 @@ bool FFmpegCapture::play_video(const string &pathname)
        // “whatever goes”, so we don't get VDPAU or CUDA here without enumerating
        // through several different types. VA-API will do for now.
        AVBufferRef *hw_device_ctx = nullptr;
-       if (av_hwdevice_ctx_create(&hw_device_ctx, AV_HWDEVICE_TYPE_VAAPI, nullptr, nullptr, 0) < 0) {
+       if (av_hwdevice_ctx_create(&hw_device_ctx, AV_HWDEVICE_TYPE_VAAPI, nullptr, nullptr, 0) < 0 || true) {
                fprintf(stderr, "Failed to initialize VA-API for FFmpeg acceleration. Decoding video in software.\n");
        } else {
                video_codec_ctx->hw_device_ctx = av_buffer_ref(hw_device_ctx);
@@ -616,7 +632,7 @@ bool FFmpegCapture::play_video(const string &pathname)
                                // so don't try).
                                return true;
                        }
-                       if (av_seek_frame(format_ctx.get(), /*stream_index=*/-1, /*timestamp=*/0, /*flags=*/0) < 0) {
+                       if (true || av_seek_frame(format_ctx.get(), /*stream_index=*/-1, /*timestamp=*/0, /*flags=*/0) < 0) {
                                fprintf(stderr, "%s: Rewind failed, not looping.\n", pathname.c_str());
                                return true;
                        }
index 2ab9481aad6d351e5597f86cc0df3315a39d6606..0823ce09bdb345d1884efc506abbd7cc9a456e35 100644 (file)
@@ -57,6 +57,7 @@ class FFmpegCapture : public bmusb::CaptureInterface
 {
 public:
        FFmpegCapture(const std::string &filename, unsigned width, unsigned height);
+       FFmpegCapture(const std::vector<std::string> &filenames, unsigned width, unsigned height);
 #ifdef HAVE_SRT
        // Takes ownership of the SRT client socket.
        FFmpegCapture(int srt_sock, const std::string &stream_id);
@@ -90,13 +91,13 @@ public:
        std::string get_filename() const
        {
                std::lock_guard<std::mutex> lock(filename_mu);
-               return filename;
+               return filenames[0];
        }
 
        void change_filename(const std::string &new_filename)
        {
                std::lock_guard<std::mutex> lock(filename_mu);
-               filename = new_filename;
+               //filename = new_filename;
                should_interrupt = true;
        }
 
@@ -275,7 +276,8 @@ private:
        inline unsigned frame_height(const AVFrame *frame) const;
 
        mutable std::mutex filename_mu;
-       std::string description, filename;
+       std::string description;
+       std::vector<std::string> filenames;
        int srt_sock = -1;
        uint16_t timecode = 0;
        unsigned width, height;  // 0 means keep input size.
index 9ff672d0f16ab946f1b7e03dce85f068af67f01d..419a3d149f9275dcf59968d1320d8d6d896281c5 100644 (file)
@@ -35,6 +35,19 @@ BasicStats *global_basic_stats = nullptr;
 QuittableSleeper should_quit;
 MuxMetrics stream_mux_metrics;
 
+//unsigned frameno = 0;
+double video_start_time = 0;
+double fps = 60.0 / 1.001;
+struct BodetMsg {
+       double t;
+       std::string msg;
+};
+std::vector<BodetMsg> bodet_msgs;
+unsigned cur_msg = 0;
+string team1, team2, team1color, team2color;
+int score1 = 0, score2 = 0, bodet_clock = 0;
+std::string output_filename = "out.mp4";
+
 namespace {
 
 int write_packet(void *opaque, uint8_t *buf, int buf_size, AVIODataMarkerType type, int64_t time)
@@ -69,10 +82,11 @@ unique_ptr<Mux> create_mux(HTTPD *httpd, const AVOutputFormat *oformat, X264Enco
        avctx->oformat = const_cast<decltype(avctx->oformat)>(oformat);  // const_cast is a hack to work in FFmpeg both before and after 5.0.
 
        uint8_t *buf = (uint8_t *)av_malloc(MUX_BUFFER_SIZE);
-       avctx->pb = avio_alloc_context(buf, MUX_BUFFER_SIZE, 1, httpd, nullptr, nullptr, nullptr);
-       avctx->pb->write_data_type = &write_packet;
-       avctx->pb->ignore_boundary_point = 1;
-       avctx->flags = AVFMT_FLAG_CUSTOM_IO;
+       //avctx->pb = avio_alloc_context(buf, MUX_BUFFER_SIZE, 1, httpd, nullptr, nullptr, nullptr);
+       //avctx->pb->write_data_type = &write_packet;
+       //avctx->pb->ignore_boundary_point = 1;
+       //avctx->flags = AVFMT_FLAG_CUSTOM_IO;
+       avio_open(&avctx->pb, output_filename.c_str(), AVIO_FLAG_WRITE);
 
        string video_extradata = x264_encoder->get_global_headers();
 
@@ -89,6 +103,262 @@ unique_ptr<Mux> create_mux(HTTPD *httpd, const AVOutputFormat *oformat, X264Enco
        return mux;
 }
 
+uint8_t *vfd = nullptr;
+uint8_t cefimg[1280 * 720 * 4];
+SwsContext *sws = nullptr;
+
+extern "C" {
+#include <libswscale/swscale.h>
+}
+
+void convert_stuff(const VideoFormat &video_format, const uint8_t *ptr)
+{
+       if (sws == nullptr) {
+               sws = sws_getContext(video_format.width, video_format.height, AV_PIX_FMT_BGRA,
+                       video_format.width, video_format.height, AV_PIX_FMT_NV12,
+                       SWS_BICUBIC, nullptr, nullptr, nullptr);
+               vfd = new uint8_t[video_format.width * video_format.height * 2];
+       }
+
+        uint8_t *src_pic_data[4] = { nullptr, nullptr, nullptr, nullptr };
+        int src_linesizes[4] = { 0, 0, 0, 0 };
+       src_pic_data[0] = (uint8_t *)ptr;
+       src_linesizes[0] = video_format.width * 4;
+
+        uint8_t *dst_pic_data[4] = { nullptr, nullptr, nullptr, nullptr };
+        int dst_linesizes[4] = { 0, 0, 0, 0 };
+       dst_pic_data[0] = vfd;
+       dst_linesizes[0] = video_format.width;
+       dst_pic_data[1] = vfd + video_format.width * video_format.height;
+       dst_linesizes[1] = video_format.width;
+
+       sws_scale(sws, src_pic_data, src_linesizes, 0, video_format.height, dst_pic_data, dst_linesizes);
+}
+
+#include <cef_app.h>
+#include <cef_browser.h>
+#include <cef_client.h>
+#include "nageru_cef_app.h"
+
+recursive_mutex browser_mutex;
+int browser_ready = 0;
+
+class KaeruCEFClient : public CefClient, public CefRenderHandler, public CefLoadHandler
+{
+public:
+        KaeruCEFClient() {}
+
+        CefRefPtr<CefRenderHandler> GetRenderHandler() override
+        {
+                return this;
+        }
+
+        CefRefPtr<CefLoadHandler> GetLoadHandler() override
+        {
+                return this;
+        }
+
+        // CefRenderHandler.
+
+        void OnPaint(CefRefPtr<CefBrowser> browser, PaintElementType type, const RectList &dirtyRects, const void *buffer, int width, int height) override
+       {
+               // fprintf(stderr, "onpaint %dx%d\n", width, height);
+               memcpy(cefimg, buffer, width * height * 4);  // FIXME lock?
+
+               lock_guard<recursive_mutex> lock(browser_mutex);
+               if (browser_ready == 1)
+                       browser_ready = 2;
+       }
+
+        void GetViewRect(CefRefPtr<CefBrowser> browser, CefRect &rect) override
+       {
+               fprintf(stderr, "getviewrect\n");
+               rect = CefRect(0, 0, 1280, 720);
+       }
+
+        // CefLoadHandler.
+
+        void OnLoadEnd(CefRefPtr<CefBrowser> browser, CefRefPtr<CefFrame> frame, int httpStatusCode) override
+       {
+               fprintf(stderr, "onload\n");
+
+               CefString script_url("<theme eval>");
+               int start_line = 1;
+               browser->GetMainFrame()->ExecuteJavaScript("play();", script_url, start_line);
+
+               lock_guard<recursive_mutex> lock(browser_mutex);
+               browser_ready = 1;
+       }
+
+private:
+        CEFCapture *parent;
+
+        IMPLEMENT_REFCOUNTING(KaeruCEFClient);
+};
+
+CefRefPtr<NageruCefApp> cef_app;
+CefRefPtr<CefBrowser> browser;
+unique_ptr<KaeruCEFClient> cef_client;
+
+  int parse_digit(char ch)
+  {
+          if (ch >= '0' && ch <= '9') {
+                  return ch - '0';
+          }
+          return 0;
+  }
+  
+  int parse_clock(char ch1, char ch2)
+  {
+          int s1 = parse_digit(ch1);
+          int s2 = parse_digit(ch2);
+          return s1 * 10 + s2;
+  }
+  
+
+  int parse_score_weird(char ch1, char ch2, char ch3)
+  {
+          char str[4];
+          char *ptr = str;
+          if (ch1 != ' ') *ptr++ = ch1;
+          if (ch2 != ' ') *ptr++ = ch2;
+          if (ch3 != ' ') *ptr++ = ch3;
+          *ptr++ = 0;
+  
+          return atoi(str);
+  }
+
+void add_cef(uint8_t *data, unsigned width, unsigned height, int64_t video_pts, AVRational video_timebase)
+{
+       if (cef_client == nullptr) {
+               cef_client.reset(new KaeruCEFClient);
+
+               //cef_app.reset(new NageruCefApp);
+               cef_app->initialize_cef();
+
+               CefPostTask(TID_UI, new CEFTaskAdapter([&]{
+                       lock_guard<recursive_mutex> lock(browser_mutex);
+
+                       CefBrowserSettings browser_settings;
+                       // browser_settings.web_security = cef_state_t::STATE_DISABLED;
+                       browser_settings.webgl = cef_state_t::STATE_DISABLED;
+                       browser_settings.windowless_frame_rate = 60.00;
+
+                       CefWindowInfo window_info;
+                       window_info.SetAsWindowless(0);
+                       browser = CefBrowserHost::CreateBrowserSync(window_info, cef_client.get(), "file:///home/sesse/dev/ultimatescore/score.html", browser_settings, nullptr, nullptr);
+               }));
+       }
+
+       {
+               CefString script_url("<theme eval>");
+               int start_line = 1;
+               char buf[256];
+
+               int old_bodet_clock = bodet_clock;
+
+               //fprintf(stderr, "video_pts=%ld  timebase = %ld/%ld\n", video_pts, video_timebase.num, video_timebase.den);
+               //double cur_time = video_start_time + video_pts * double(video_timebase.num) / double(video_timebase.den);
+               double cur_time = video_start_time + video_pts / double(TIMEBASE);
+               //double cur_time = video_start_time + (frameno++) / fps;
+               while (cur_msg < bodet_msgs.size() && cur_time > bodet_msgs[cur_msg].t) {
+                       const string &m = bodet_msgs[cur_msg].msg;
+                       if (m.size() >= 10 && m[0] == 'G' && m[1] == '1' && m[2] == '0') {
+                               int min = parse_clock(m[4], m[5]);
+                               int sec = parse_clock(m[6], m[7]);
+                               bodet_clock = min * 60 + sec;
+                               score1 = parse_score_weird(m[8], m[9], m[10]);
+                               score2 = parse_score_weird(m[11], m[12], m[13]);
+                       }
+                       ++cur_msg;
+               }
+
+               string str = "update('{";
+               snprintf(buf, 256, "\"score1\": %d", score1);
+               str += buf;
+               snprintf(buf, 256, ",\"score2\": %d", score2);
+               str += buf;
+
+               if (false) {
+                       int doh = uint64_t(cur_time + 7200) % 86400;
+                       snprintf(buf, 256, "%02d:%02d:%02d", doh / 3600, (doh % 3600) / 60, doh % 60);
+                       team1 = buf;
+               }
+
+               str += ",\"team1\": \"" + team1 + "\"";
+               str += ",\"team2\": \"" + team2 + "\"";
+               str += ",\"team1color\": \"" + team1color + "\"";
+               str += ",\"team2color\": \"" + team2color + "\"";
+               str += "}');setteams();setcolors();setscore();";
+
+               snprintf(buf, 256, "update_given_clock(%d,'clock');", bodet_clock);
+               str += buf;
+
+               if (old_bodet_clock == 0 && bodet_clock != 0) {
+                       str += "showclock();";
+               } else if (old_bodet_clock != 0 && bodet_clock == 0) {
+                       str += "hideclock();";
+               }
+
+               //printf("%s\n", str.c_str());
+
+               bool ok = false;
+               do {
+                       browser_mutex.lock();
+                       if (browser_ready >= 2) {
+                               browser->GetMainFrame()->ExecuteJavaScript(str, script_url, start_line);
+                               browser_mutex.unlock();
+                               ok = true;
+                       } else {
+                               browser_mutex.unlock();
+                               printf("Waiting for CEF...\n");
+                               usleep(100000);
+                       }
+               } while (!ok);
+       }
+
+       unsigned char r0, g0, b0;
+       unsigned char a1, r1, g1, b1;
+       unsigned char *sptr = cefimg;
+       unsigned char *dptr = data;
+       for (int i = 0; i < 1280 * 720; ++i) {
+               //a0 = dptr[3];
+               r0 = dptr[2];
+               g0 = dptr[1];
+               b0 = dptr[0];
+
+               a1 = sptr[3];
+               r1 = sptr[2];
+               g1 = sptr[1];
+               b1 = sptr[0];
+               
+               unsigned a = 255;
+               unsigned r = r0 + ((r1 - r0) * a1) / 255;
+               unsigned g = g0 + ((g1 - g0) * a1) / 255;
+               unsigned b = b0 + ((b1 - b0) * a1) / 255;
+       
+               sptr += 4;      
+               *dptr++ = b;
+               *dptr++ = g;
+               *dptr++ = r;
+               *dptr++ = a;
+       }
+       //memcpy(data, cefimg, 1280*720*4);
+}
+
+double crop_start = 0.0;
+double crop_end = HUGE_VAL;
+
+bool within(double t)
+{
+       return t >= crop_start && t < crop_end;
+}
+
+string last_ts;
+
+int64_t video_pts_offset = 0, audio_pts_offset = 0;
+int64_t next_video_pts = 0, next_audio_pts = 0;
+
 void video_frame_callback(FFmpegCapture *video, X264Encoder *x264_encoder, AudioEncoder *audio_encoder,
                           int64_t video_pts, AVRational video_timebase,
                           int64_t audio_pts, AVRational audio_timebase,
@@ -96,16 +366,67 @@ void video_frame_callback(FFmpegCapture *video, X264Encoder *x264_encoder, Audio
                          FrameAllocator::Frame video_frame, size_t video_offset, VideoFormat video_format,
                          FrameAllocator::Frame audio_frame, size_t audio_offset, AudioFormat audio_format)
 {
-       if (video_pts >= 0 && video_frame.len > 0) {
+       if (video_pts >= 0)
+               video_pts += video_pts_offset;
+       if (audio_pts >= 0)
+               audio_pts += audio_pts_offset;
+       if ((video_pts >= 0 && video_pts < next_video_pts) || (audio_pts >= 0 && audio_pts < next_audio_pts)) {
+               printf("=== next file (%ld < %ld || %ld < %ld) ===\n", video_pts, next_video_pts, audio_pts, next_audio_pts);
+               if (video_pts >= 0)
+                       video_pts -= video_pts_offset;
+               if (audio_pts >= 0)
+                       audio_pts -= audio_pts_offset;
+               video_pts_offset = next_video_pts;
+               audio_pts_offset = next_audio_pts;
+               if (video_pts >= 0)
+                       video_pts += video_pts_offset;
+               if (audio_pts >= 0)
+                       audio_pts += audio_pts_offset;
+       }
+
+       double cur_video_time = video_start_time + video_pts * double(video_timebase.num) / double(video_timebase.den);
+       double cur_audio_time = video_start_time + audio_pts * double(audio_timebase.num) / double(audio_timebase.den);
+       char ts[256];
+       {
+               int doh = uint64_t(cur_video_time + 7200) % 86400;
+               snprintf(ts, 256, "%02d:%02d:%02d", doh / 3600, (doh % 3600) / 60, doh % 60);
+       }
+       if (ts != last_ts) {
+               if (!within(cur_video_time)) {
+                       printf("%s [skip]\n", ts);
+               } else {
+                       if (cur_msg < bodet_msgs.size()) {
+                               printf("%s %s\n", ts, bodet_msgs[cur_msg].msg.c_str());
+                       } else {
+                               printf("%s\n", ts);
+                       }
+               }
+               last_ts = ts;
+       }
+
+       if (video_pts >= 0 && cur_video_time > crop_end) {
+               printf("=== sending quit signal ===\n");
+               should_quit.quit();
+       }
+
+       if (video_pts >= 0 && video_frame.len > 0 && within(cur_video_time)) {
                ReceivedTimestamps ts;
                ts.ts.push_back(steady_clock::now());
 
+               next_video_pts = video_pts + av_rescale_q(1, AVRational{ 1001, 60000 }, video_timebase);
                video_pts = av_rescale_q(video_pts, video_timebase, AVRational{ 1, TIMEBASE });
                int64_t frame_duration = int64_t(TIMEBASE) * video_format.frame_rate_den / video_format.frame_rate_nom;
-               x264_encoder->add_frame(video_pts, frame_duration, video->get_current_frame_ycbcr_format().luma_coefficients, video_frame.data + video_offset, ts);
+               if (team1 != "nocef") {
+                       add_cef(video_frame.data + video_offset, video_format.width, video_format.height, video_pts, video_timebase);
+               }
+               convert_stuff(video_format, video_frame.data + video_offset);
+               x264_encoder->add_frame(video_pts, frame_duration, video->get_current_frame_ycbcr_format().luma_coefficients, vfd, ts);
+               //} else {
+               //      x264_encoder->add_frame(video_pts, frame_duration, video->get_current_frame_ycbcr_format().luma_coefficients, video_frame.data + video_offset, ts);
+               //}
                global_basic_stats->update(frame_num++, /*dropped_frames=*/0);
        }
-       if (audio_frame.len > 0) {
+       if (audio_frame.len > 0 && within(cur_audio_time)) {
                // FFmpegCapture takes care of this for us.
                assert(audio_format.num_channels == 2);
                assert(audio_format.sample_rate == OUTPUT_FREQUENCY);
@@ -130,6 +451,7 @@ void video_frame_callback(FFmpegCapture *video, X264Encoder *x264_encoder, Audio
                } else {
                        assert(false);
                }
+               next_audio_pts = audio_pts + av_rescale_q(num_samples / 2, AVRational{ 1, OUTPUT_FREQUENCY }, audio_timebase);
                audio_pts = av_rescale_q(audio_pts, audio_timebase, AVRational{ 1, TIMEBASE });
                audio_encoder->encode_audio(float_samples, audio_pts);
         }
@@ -208,13 +530,83 @@ void request_quit(int signal)
        should_quit.quit();
 }
 
+int hex(char ch)
+{
+       if (ch == ' ') {
+               return 0;
+       } else if (ch >= 'A' && ch <= 'F') {
+               return 10 + (ch - 'A');
+       } else {
+               return ch - '0';
+       }
+}
+               
+char msgbuf[65536];
+
+double parse_time(const char *str)
+{
+       struct tm tm;
+       strptime(str, "%Y-%m-%d %H:%M:%S", &tm);
+       return mktime(&tm);
+}
+
+
+std::vector<std::string> split(const std::string& str, char delim) {
+    std::vector<std::string> strings;
+    size_t start;
+    size_t end = 0;
+    while ((start = str.find_first_not_of(delim, end)) != std::string::npos) {
+        end = str.find(delim, start);
+        strings.push_back(str.substr(start, end - start));
+    }
+    return strings;
+}
+
 int main(int argc, char *argv[])
 {
+       CefMainArgs main_args(argc, argv);
+       cef_app = CefRefPtr<NageruCefApp>(new NageruCefApp());
+       int err = CefExecuteProcess(main_args, cef_app.get(), nullptr);
+       if (err >= 0) {
+               return err;
+       }
+
+       // CEF wants to use GLib for its main loop, which interferes with Qt's use of it.
+       // The alternative is trying to integrate CEF into Qt's main loop, but that requires
+       // fairly extensive cross-thread communication and that parts of CEF runs on Qt's UI
+       // thread.
+       setenv("QT_NO_GLIB", "1", 0);
+
        parse_flags(PROGRAM_KAERU, argc, argv);
-       if (optind + 1 != argc) {
-               usage(PROGRAM_KAERU);
-               abort();
+
+       video_start_time = atof(argv[optind + 1]);
+       team1 = argv[optind + 3];
+       team2 = argv[optind + 4];
+       team1color = argv[optind + 5];
+       team2color = argv[optind + 6];
+       if (argc > optind + 7) crop_start = parse_time(argv[optind + 7]);
+       if (argc > optind + 8) crop_end = parse_time(argv[optind + 8]);
+       if (argc > optind + 9) output_filename = argv[optind + 9];
+       //printf("crop= %f %f\n", crop_start, crop_end);
+       //exit(1);
+
+       FILE *msgfp = fopen(argv[optind + 2], "r");
+       while (!feof(msgfp)) {
+               double t;
+               if (fscanf(msgfp, "%lf,%s", &t, msgbuf) != 2) break;
+               BodetMsg bm;
+               bm.t = t;
+               if (t < video_start_time) {
+                       continue;
+               }
+               for (unsigned i = 1; i < strlen(msgbuf) / 2; ++i) {
+                       bm.msg.push_back(hex(msgbuf[i * 2]) * 16 + hex(msgbuf[i * 2 + 1]));
+               }
+               bodet_msgs.push_back(bm);
+               printf("%.3f %s\n", t, bm.msg.c_str());
        }
+       fclose(msgfp);
+
        global_flags.max_num_cards = 1;  // For latency metrics.
 
 #if LIBAVFORMAT_VERSION_INT < AV_VERSION_INT(58, 9, 100)
@@ -244,8 +636,10 @@ int main(int argc, char *argv[])
        }
        global_x264_encoder = x264_encoder.get();
 
-       FFmpegCapture video(argv[optind], global_flags.width, global_flags.height);
-       video.set_pixel_format(FFmpegCapture::PixelFormat_NV12);
+       vector<string> filenames = split(argv[optind], ':');    
+
+       FFmpegCapture video(filenames, global_flags.width, global_flags.height);
+       video.set_pixel_format(bmusb::PixelFormat_8BitBGRA);
        if (global_flags.transcode_video) {
                video.set_frame_callback(bind(video_frame_callback, &video, x264_encoder.get(), audio_encoder.get(), _1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11));
        } else {
index 877635daf79acd19b2424d022b1707acfb6385f5..5361b301fabaac310b120223527e6e4f3d88ede1 100644 (file)
@@ -15,6 +15,7 @@ void NageruCefApp::OnBeforeCommandLineProcessing(
 {
        command_line->AppendSwitch("disable-gpu");
        command_line->AppendSwitch("disable-gpu-compositing");
+       command_line->AppendSwitch("disable-software-rasterizer");
        command_line->AppendSwitch("enable-begin-frame-scheduling");
 
        // https://bitbucket.org/chromiumembedded/cef/issues/2717/xmlhttprequest-empty-responsetext