]> git.sesse.net Git - nageru/commitdiff
Implement basic support for CEF.
authorSteinar H. Gunderson <sgunderson@bigfoot.com>
Sat, 24 Feb 2018 17:49:36 +0000 (18:49 +0100)
committerSteinar H. Gunderson <sgunderson@bigfoot.com>
Mon, 26 Feb 2018 17:06:16 +0000 (18:06 +0100)
Having CEF directly into the process is less cumbersome than calling out
to CasparCG over a pipe, and typically yields higher performance.

Makefile
cef_capture.cpp [new file with mode: 0644]
cef_capture.h [new file with mode: 0644]
main.cpp
mixer.cpp
mixer.h
nageru_cef_app.cpp [new file with mode: 0644]
nageru_cef_app.h [new file with mode: 0644]
theme.cpp
theme.h

index 3ee9a795c57fbe65380243a50b2db99f2d9fa2b3..af5937c60ceac7f95e38fc29a3902341fd3a93dd 100644 (file)
--- a/Makefile
+++ b/Makefile
@@ -6,6 +6,12 @@ PKG_MODULES := Qt5Core Qt5Gui Qt5Widgets Qt5OpenGLExtensions Qt5OpenGL Qt5PrintS
 CXXFLAGS ?= -O2 -g -Wall  # Will be overridden by environment.
 CXXFLAGS += -std=gnu++11 -fPIC $(shell pkg-config --cflags $(PKG_MODULES)) -pthread -DMOVIT_SHADER_DIR=\"$(shell pkg-config --variable=shaderdir movit)\" -Idecklink/
 
+CEF_DIR=/home/sesse/nmu/cef_binary_3.3282.1734.g8f26fe0_linux64
+CEF_BUILD_TYPE=Release
+CEF_LIBS=$(CEF_DIR)/$(CEF_BUILD_TYPE)/libcef.so $(CEF_DIR)/libcef_dll_wrapper/libcef_dll_wrapper.a
+CPPFLAGS += -DHAVE_CEF=1 -I$(CEF_DIR) -I$(CEF_DIR)/include
+LDFLAGS += -L$(CEF_DIR)/$(CEF_BUILD_TYPE) -Wl,-rpath $(CEF_DIR)/$(CEF_BUILD_TYPE)
+
 ifeq ($(EMBEDDED_BMUSB),yes)
   CPPFLAGS += -Ibmusb/
 else
@@ -41,6 +47,9 @@ endif
 # FFmpeg input
 OBJS += ffmpeg_capture.o
 
+# CEF input
+OBJS += nageru_cef_app.o cef_capture.o
+
 # Benchmark program.
 BM_OBJS = benchmark_audio_mixer.o $(AUDIO_MIXER_OBJS) flags.o metrics.o
 
@@ -59,8 +68,8 @@ BM_OBJS = benchmark_audio_mixer.o $(AUDIO_MIXER_OBJS) flags.o metrics.o
 
 all: nageru kaeru benchmark_audio_mixer
 
-nageru: $(OBJS)
-       $(CXX) -o $@ $^ $(LDFLAGS) $(LDLIBS)
+nageru: $(OBJS) $(CEF_LIBS)
+       $(CXX) -o $@ $^ $(LDFLAGS) $(LDLIBS) $(CEF_LIBS)
 kaeru: $(KAERU_OBJS)
        $(CXX) -o $@ $^ $(LDFLAGS) $(LDLIBS)
 benchmark_audio_mixer: $(BM_OBJS)
@@ -79,6 +88,12 @@ midi_mapper.o: midi_mapping.pb.h
 midi_mapping_dialog.o: ui_midi_mapping.h midi_mapping.pb.h
 mixer.o: json.pb.h
 
+$(CEF_DIR)/libcef_dll_wrapper/libcef_dll_wrapper.a: $(CEF_DIR)/Makefile
+       cd $(CEF_DIR) && $(MAKE) libcef_dll_wrapper
+
+$(CEF_DIR)/Makefile:
+       cd $(CEF_DIR) && cmake .
+
 DEPS=$(OBJS:.o=.d) $(BM_OBJS:.o=.d) $(KAERU_OBJS:.o=.d)
 -include $(DEPS)
 
diff --git a/cef_capture.cpp b/cef_capture.cpp
new file mode 100644 (file)
index 0000000..c05c295
--- /dev/null
@@ -0,0 +1,152 @@
+#include <assert.h>
+#include <stdio.h>
+#include <string.h>
+#include <chrono>
+#include <memory>
+#include <string>
+
+#include "cef_capture.h"
+#include "nageru_cef_app.h"
+
+#undef CHECK
+#include <cef_app.h>
+#include <cef_browser.h>
+#include <cef_client.h>
+
+#include "bmusb/bmusb.h"
+
+using namespace std;
+using namespace std::chrono;
+using namespace bmusb;
+
+extern CefRefPtr<NageruCefApp> cef_app;
+
+CEFCapture::CEFCapture(const string &url, unsigned width, unsigned height)
+       : cef_client(new NageruCEFClient(width, height, this)),
+         width(width),
+         height(height),
+         start_url(url)
+{
+       char buf[256];
+       snprintf(buf, sizeof(buf), "CEF card %d", card_index + 1);
+       description = buf;
+}
+
+CEFCapture::~CEFCapture()
+{
+       if (has_dequeue_callbacks) {
+               dequeue_cleanup_callback();
+       }
+}
+
+void CEFCapture::OnPaint(const void *buffer, int width, int height)
+{
+       steady_clock::time_point timestamp = steady_clock::now();
+
+       VideoFormat video_format;
+       video_format.width = width;
+       video_format.height = height;
+       video_format.stride = width * 4;
+       video_format.frame_rate_nom = 60;  // FIXME
+       video_format.frame_rate_den = 1;
+       video_format.has_signal = true;
+       video_format.is_connected = true;
+
+       FrameAllocator::Frame video_frame = video_frame_allocator->alloc_frame();
+       if (video_frame.data != nullptr) {
+               assert(video_frame.size >= unsigned(width * height * 4));
+               assert(!video_frame.interleaved);
+               memcpy(video_frame.data, buffer, width * height * 4);
+               video_frame.len = video_format.stride * height;
+               video_frame.received_timestamp = timestamp;
+       }
+       frame_callback(timecode++,
+               video_frame, 0, video_format,
+               FrameAllocator::Frame(), 0, AudioFormat());
+}
+
+#define FRAME_SIZE (8 << 20)  // 8 MB.
+
+void CEFCapture::configure_card()
+{
+       if (video_frame_allocator == nullptr) {
+               owned_video_frame_allocator.reset(new MallocFrameAllocator(FRAME_SIZE, NUM_QUEUED_VIDEO_FRAMES));
+               set_video_frame_allocator(owned_video_frame_allocator.get());
+       }
+}
+
+void CEFCapture::start_bm_capture()
+{
+       cef_app->initialize_cef();
+
+       CefBrowserSettings browser_settings;
+       browser_settings.web_security = cef_state_t::STATE_DISABLED;
+       browser_settings.webgl = cef_state_t::STATE_ENABLED;
+       browser_settings.windowless_frame_rate = 60;
+
+       CefWindowInfo window_info;
+       window_info.SetAsWindowless(0);
+       CefBrowserHost::CreateBrowser(window_info, cef_client, start_url, browser_settings, nullptr);
+}
+
+void CEFCapture::stop_dequeue_thread()
+{
+       lock_guard<mutex> lock(browser_mutex);
+       cef_app->close_browser(browser);
+       browser = nullptr;  // Or unref_cef() will be sad.
+       cef_app->unref_cef();
+}
+
+std::map<uint32_t, VideoMode> CEFCapture::get_available_video_modes() const
+{
+       VideoMode mode;
+
+       char buf[256];
+       snprintf(buf, sizeof(buf), "%ux%u", width, height);
+       mode.name = buf;
+
+       mode.autodetect = false;
+       mode.width = width;
+       mode.height = height;
+       mode.frame_rate_num = 60;  // FIXME
+       mode.frame_rate_den = 1;
+       mode.interlaced = false;
+
+       return {{ 0, mode }};
+}
+
+std::map<uint32_t, std::string> CEFCapture::get_available_video_inputs() const
+{
+       return {{ 0, "HTML video input" }};
+}
+
+std::map<uint32_t, std::string> CEFCapture::get_available_audio_inputs() const
+{
+       return {{ 0, "Fake HTML audio input (silence)" }};
+}
+
+void CEFCapture::set_video_mode(uint32_t video_mode_id)
+{
+       assert(video_mode_id == 0);
+}
+
+void CEFCapture::set_video_input(uint32_t video_input_id)
+{
+       assert(video_input_id == 0);
+}
+
+void CEFCapture::set_audio_input(uint32_t audio_input_id)
+{
+       assert(audio_input_id == 0);
+}
+
+void NageruCEFClient::OnPaint(CefRefPtr<CefBrowser> browser, PaintElementType type, const RectList &dirtyRects, const void *buffer, int width, int height)
+{
+       parent->OnPaint(buffer, width, height);
+}
+
+bool NageruCEFClient::GetViewRect(CefRefPtr<CefBrowser> browser, CefRect &rect)
+{
+       rect = CefRect(0, 0, width, height);
+       return true;
+}
diff --git a/cef_capture.h b/cef_capture.h
new file mode 100644 (file)
index 0000000..dc74b30
--- /dev/null
@@ -0,0 +1,165 @@
+#ifndef _CEF_CAPTURE_H
+#define _CEF_CAPTURE_H 1
+
+// CEFCapture represents a single CEF virtual capture card (usually, there would only
+// be one globally), similar to FFmpegCapture. It owns a CefBrowser, which calls
+// OnPaint() back every time it has a frame. Note that it runs asynchronously;
+// there's no way to get frame-perfect sync.
+
+#include <assert.h>
+#include <stdint.h>
+
+#include <condition_variable>
+#include <functional>
+#include <map>
+#include <memory>
+#include <mutex>
+#include <set>
+#include <string>
+#include <thread>
+
+#undef CHECK
+#include <cef_client.h>
+#include <cef_base.h>
+#include <cef_render_handler.h>
+
+#include <bmusb/bmusb.h>
+
+class CefBrowser;
+class CefRect;
+class CEFCapture;
+
+// A helper class for CEFCapture to proxy information to CEF, without becoming
+// CEF-refcounted itself.
+class NageruCEFClient : public CefClient, public CefRenderHandler
+{
+public:
+       NageruCEFClient(int width, int height, CEFCapture *parent)
+               : width(width), height(height), parent(parent) {}
+
+       CefRefPtr<CefRenderHandler> GetRenderHandler() override
+       {
+               return this;
+       }
+
+       void OnPaint(CefRefPtr<CefBrowser> browser, PaintElementType type, const RectList &dirtyRects, const void *buffer, int width, int height) override;
+
+       bool GetViewRect(CefRefPtr<CefBrowser> browser, CefRect &rect);
+
+private:
+       int width, height;
+       CEFCapture *parent;
+
+       IMPLEMENT_REFCOUNTING(NageruCEFClient);
+};
+
+class CEFCapture : public bmusb::CaptureInterface
+{
+public:
+       CEFCapture(const std::string &url, unsigned width, unsigned height);
+       ~CEFCapture();
+
+       void set_card_index(int card_index)
+       {
+               this->card_index = card_index;
+       }
+
+       int get_card_index() const
+       {
+               return card_index;
+       }
+
+       void OnPaint(const void *buffer, int width, int height);
+
+       // CaptureInterface.
+       void set_video_frame_allocator(bmusb::FrameAllocator *allocator) override
+       {
+               video_frame_allocator = allocator;
+               if (owned_video_frame_allocator.get() != allocator) {
+                       owned_video_frame_allocator.reset();
+               }
+       }
+
+       bmusb::FrameAllocator *get_video_frame_allocator() override
+       {
+               return video_frame_allocator;
+       }
+
+       // Does not take ownership.
+       void set_audio_frame_allocator(bmusb::FrameAllocator *allocator) override
+       {
+       }
+
+       bmusb::FrameAllocator *get_audio_frame_allocator() override
+       {
+               return nullptr;
+       }
+
+       void set_frame_callback(bmusb::frame_callback_t callback) override
+       {
+               frame_callback = callback;
+       }
+
+       void set_dequeue_thread_callbacks(std::function<void()> init, std::function<void()> cleanup) override
+       {
+               dequeue_init_callback = init;
+               dequeue_cleanup_callback = cleanup;
+               has_dequeue_callbacks = true;
+       }
+
+       std::string get_description() const override
+       {
+               return description;
+       }
+
+       void configure_card() override;
+       void start_bm_capture() override;
+       void stop_dequeue_thread() override;
+       bool get_disconnected() const override { return false; }
+
+       std::set<bmusb::PixelFormat> get_available_pixel_formats() const override
+       {
+               return std::set<bmusb::PixelFormat>{ bmusb::PixelFormat_8BitBGRA };
+       }
+
+       void set_pixel_format(bmusb::PixelFormat pixel_format) override
+       {
+               assert(pixel_format == bmusb::PixelFormat_8BitBGRA);
+       }
+
+       bmusb::PixelFormat get_current_pixel_format() const
+       {
+               return bmusb::PixelFormat_8BitBGRA;
+       }
+
+       std::map<uint32_t, bmusb::VideoMode> get_available_video_modes() const override;
+       void set_video_mode(uint32_t video_mode_id) override;
+       uint32_t get_current_video_mode() const override { return 0; }
+
+       std::map<uint32_t, std::string> get_available_video_inputs() const override;
+       void set_video_input(uint32_t video_input_id) override;
+       uint32_t get_current_video_input() const override { return 0; }
+
+       std::map<uint32_t, std::string> get_available_audio_inputs() const override;
+       void set_audio_input(uint32_t audio_input_id) override;
+       uint32_t get_current_audio_input() const override { return 0; }
+
+private:
+       CefRefPtr<NageruCEFClient> cef_client;
+       unsigned width, height;
+       int card_index = -1;
+
+       bool has_dequeue_callbacks = false;
+       std::function<void()> dequeue_init_callback = nullptr;
+       std::function<void()> dequeue_cleanup_callback = nullptr;
+
+       bmusb::FrameAllocator *video_frame_allocator = nullptr;
+       std::unique_ptr<bmusb::FrameAllocator> owned_video_frame_allocator;
+       bmusb::frame_callback_t frame_callback = nullptr;
+
+       std::string description, start_url;
+
+       int timecode = 0;
+};
+
+#endif  // !defined(_CEF_CAPTURE_H)
index afac03ed4d3ea81e4c2c813d3bb0b119a097ef07..65eade924ecc15e559c655e9fa47a9864ac65bdb 100644 (file)
--- a/main.cpp
+++ b/main.cpp
@@ -13,15 +13,40 @@ extern "C" {
 #include <QSurfaceFormat>
 #include <string>
 
+#include <cef_app.h>
+#include <cef_browser.h>
+#include <cef_client.h>
+#include <cef_version.h>
+
 #include "basic_stats.h"
+#include "nageru_cef_app.h"
 #include "context.h"
 #include "flags.h"
 #include "image_input.h"
 #include "mainwindow.h"
 #include "mixer.h"
 
+CefRefPtr<NageruCefApp> cef_app;
+
 int main(int argc, char *argv[])
 {
+#ifdef HAVE_CEF
+       // Let CEF have first priority on parsing the command line, because we might be
+       // launched as a CEF sub-process.
+       CefMainArgs main_args(argc, argv);
+       cef_app = CefRefPtr<NageruCefApp>(new NageruCefApp());
+       int err = CefExecuteProcess(main_args, cef_app.get(), nullptr);
+       if (err >= 0) {
+               return err;
+       }
+
+       // CEF wants to use GLib for its main loop, which interferes with Qt's use of it.
+       // The alternative is trying to integrate CEF into Qt's main loop, but that requires
+       // fairly extensive cross-thread communication and that parts of CEF runs on Qt's UI
+       // thread.
+       setenv("QT_NO_GLIB", "1", 0);
+#endif
+
        parse_flags(PROGRAM_NAGERU, argc, argv);
 
        if (global_flags.va_display.empty() ||
index 55604c15115a0d7d1c84f032ba3a540ebce43df2..557452db91cb9e7176b7df51ebecedb7c286c355 100644 (file)
--- a/mixer.cpp
+++ b/mixer.cpp
@@ -33,6 +33,7 @@
 #include "basic_stats.h"
 #include "bmusb/bmusb.h"
 #include "bmusb/fake_capture.h"
+#include "cef_capture.h"
 #include "chroma_subsampler.h"
 #include "context.h"
 #include "decklink_capture.h"
@@ -427,10 +428,22 @@ Mixer::Mixer(const QSurfaceFormat &format, unsigned num_cards)
        }
        num_video_inputs = video_inputs.size();
 
+       // Same, for HTML inputs.
+       std::vector<CEFCapture *> html_inputs = theme->get_html_inputs();
+       for (unsigned html_card_index = 0; html_card_index < html_inputs.size(); ++card_index, ++html_card_index) {
+               if (card_index >= MAX_VIDEO_CARDS) {
+                       fprintf(stderr, "ERROR: Not enough card slots available for the HTML inputs the theme requested.\n");
+                       exit(1);
+               }
+               configure_card(card_index, html_inputs[html_card_index], CardType::CEF_INPUT, /*output=*/nullptr);
+               html_inputs[html_card_index]->set_card_index(card_index);
+       }
+       num_html_inputs = html_inputs.size();
+
        BMUSBCapture::set_card_connected_callback(bind(&Mixer::bm_hotplug_add, this, _1));
        BMUSBCapture::start_bm_thread();
 
-       for (unsigned card_index = 0; card_index < num_cards + num_video_inputs; ++card_index) {
+       for (unsigned card_index = 0; card_index < num_cards + num_video_inputs + num_html_inputs; ++card_index) {
                cards[card_index].queue_length_policy.reset(card_index);
        }
 
@@ -480,7 +493,7 @@ Mixer::~Mixer()
 {
        BMUSBCapture::stop_bm_thread();
 
-       for (unsigned card_index = 0; card_index < num_cards + num_video_inputs; ++card_index) {
+       for (unsigned card_index = 0; card_index < num_cards + num_video_inputs + num_html_inputs; ++card_index) {
                {
                        unique_lock<mutex> lock(card_mutex);
                        cards[card_index].should_quit = true;  // Unblock thread.
@@ -514,6 +527,8 @@ void Mixer::configure_card(unsigned card_index, CaptureInterface *capture, CardT
        PixelFormat pixel_format;
        if (card_type == CardType::FFMPEG_INPUT) {
                pixel_format = capture->get_current_pixel_format();
+       } else if (card_type == CardType::CEF_INPUT) {
+               pixel_format = PixelFormat_8BitBGRA;
        } else if (global_flags.ten_bit_input) {
                pixel_format = PixelFormat_10BitYCbCr;
        } else {
@@ -578,6 +593,9 @@ void Mixer::configure_card(unsigned card_index, CaptureInterface *capture, CardT
        case CardType::FFMPEG_INPUT:
                labels.emplace_back("cardtype", "ffmpeg");
                break;
+       case CardType::CEF_INPUT:
+               labels.emplace_back("cardtype", "cef");
+               break;
        default:
                assert(false);
        }
@@ -947,7 +965,7 @@ void Mixer::thread_func()
 
        // Start the actual capture. (We don't want to do it before we're actually ready
        // to process output frames.)
-       for (unsigned card_index = 0; card_index < num_cards + num_video_inputs; ++card_index) {
+       for (unsigned card_index = 0; card_index < num_cards + num_video_inputs + num_html_inputs; ++card_index) {
                if (int(card_index) != output_card_index) {
                        cards[card_index].capture->start_bm_capture();
                }
@@ -988,7 +1006,7 @@ void Mixer::thread_func()
 
                handle_hotplugged_cards();
 
-               for (unsigned card_index = 0; card_index < num_cards + num_video_inputs; ++card_index) {
+               for (unsigned card_index = 0; card_index < num_cards + num_video_inputs + num_html_inputs; ++card_index) {
                        if (card_index == master_card_index || !has_new_frame[card_index]) {
                                continue;
                        }
@@ -1009,7 +1027,7 @@ void Mixer::thread_func()
                        continue;
                }
 
-               for (unsigned card_index = 0; card_index < num_cards + num_video_inputs; ++card_index) {
+               for (unsigned card_index = 0; card_index < num_cards + num_video_inputs + num_html_inputs; ++card_index) {
                        if (!has_new_frame[card_index] || new_frames[card_index].frame->len == 0)
                                continue;
 
@@ -1154,7 +1172,7 @@ start:
                goto start;
        }
 
-       for (unsigned card_index = 0; card_index < num_cards + num_video_inputs; ++card_index) {
+       for (unsigned card_index = 0; card_index < num_cards + num_video_inputs + num_html_inputs; ++card_index) {
                CaptureCard *card = &cards[card_index];
                if (card->new_frames.empty()) {  // Starvation.
                        ++card->metric_input_duped_frames;
@@ -1176,7 +1194,7 @@ start:
                output_jitter_history.frame_arrived(output_frame_info.frame_timestamp, output_frame_info.frame_duration, output_frame_info.dropped_frames);
        }
 
-       for (unsigned card_index = 0; card_index < num_cards + num_video_inputs; ++card_index) {
+       for (unsigned card_index = 0; card_index < num_cards + num_video_inputs + num_html_inputs; ++card_index) {
                CaptureCard *card = &cards[card_index];
                if (has_new_frame[card_index] &&
                    !input_card_is_master_clock(card_index, master_card_index) &&
@@ -1310,8 +1328,11 @@ void Mixer::render_one_frame(int64_t duration)
        //theme_main_chain.chain->enable_phase_timing(true);
 
        // The theme can't (or at least shouldn't!) call connect_signal() on
-       // each FFmpeg input, so we'll do it here.
-       for (const pair<LiveInputWrapper *, FFmpegCapture *> &conn : theme->get_signal_connections()) {
+       // each FFmpeg or CEF input, so we'll do it here.
+       for (const pair<LiveInputWrapper *, FFmpegCapture *> &conn : theme->get_video_signal_connections()) {
+               conn.first->connect_signal_raw(conn.second->get_card_index(), input_state);
+       }
+       for (const pair<LiveInputWrapper *, CEFCapture *> &conn : theme->get_html_signal_connections()) {
                conn.first->connect_signal_raw(conn.second->get_card_index(), input_state);
        }
 
diff --git a/mixer.h b/mixer.h
index aa3baac2fff99e90b1113f61af65e7f82761f42e..940dc1308c04445c166b3956aeda6b36cbaa3287 100644 (file)
--- a/mixer.h
+++ b/mixer.h
@@ -409,7 +409,8 @@ private:
        enum class CardType {
                LIVE_CARD,
                FAKE_CAPTURE,
-               FFMPEG_INPUT
+               FFMPEG_INPUT,
+               CEF_INPUT,
        };
        void configure_card(unsigned card_index, bmusb::CaptureInterface *capture, CardType card_type, DeckLinkOutput *output);
        void set_output_card_internal(int card_index);  // Should only be called from the mixer thread.
@@ -432,7 +433,7 @@ private:
        std::pair<std::string, std::string> get_channel_color_http(unsigned channel_idx);
 
        HTTPD httpd;
-       unsigned num_cards, num_video_inputs;
+       unsigned num_cards, num_video_inputs, num_html_inputs;
 
        QSurface *mixer_surface, *h264_encoder_surface, *decklink_output_surface;
        std::unique_ptr<movit::ResourcePool> resource_pool;
diff --git a/nageru_cef_app.cpp b/nageru_cef_app.cpp
new file mode 100644 (file)
index 0000000..bd8b9d6
--- /dev/null
@@ -0,0 +1,76 @@
+#include <cef_app.h>
+#include <cef_browser.h>
+#include <cef_client.h>
+#include <cef_version.h>
+#include <QTimer>
+#include <QWidget>
+
+#include "nageru_cef_app.h"
+
+using namespace std;
+
+void NageruCefApp::OnBeforeCommandLineProcessing(
+       const CefString& process_type,
+       CefRefPtr<CefCommandLine> command_line)
+{
+       command_line->AppendSwitch("disable-gpu");
+       command_line->AppendSwitch("disable-gpu-compositing");
+       command_line->AppendSwitch("enable-begin-frame-scheduling");
+}
+
+void NageruCefApp::OnBrowserDestroyed(CefRefPtr<CefBrowser> browser)
+{
+       lock_guard<mutex> lock(cef_mutex);
+       pending_browsers.erase(browser.get());
+       browser_closed_cond.notify_all();
+}
+
+void NageruCefApp::initialize_cef()
+{
+       unique_lock<mutex> lock(cef_mutex);
+       if (cef_thread_refcount++ == 0) {
+               cef_thread = thread(&NageruCefApp::cef_thread_func, this);
+       }
+       cef_initialized_cond.wait(lock, [this]{ return cef_initialized; });
+}
+
+void NageruCefApp::close_browser(CefRefPtr<CefBrowser> browser)
+{
+       unique_lock<mutex> lock(cef_mutex);
+       CefBrowser *raw_ptr = browser.get();
+       pending_browsers.insert(raw_ptr);
+       browser->GetHost()->CloseBrowser(/*force_close=*/true);
+       browser = nullptr;
+       browser_closed_cond.wait(lock, [this, raw_ptr]{ return pending_browsers.count(raw_ptr) != 0; });
+}
+
+void NageruCefApp::unref_cef()
+{
+       unique_lock<mutex> lock(cef_mutex);
+       if (--cef_thread_refcount == 0) {
+               CefPostTask(TID_UI, new CEFTaskAdapter(&CefQuitMessageLoop));
+               cef_thread.join();
+       }
+}
+
+void NageruCefApp::cef_thread_func()
+{
+       CefMainArgs main_args;
+       CefSettings settings;
+       //settings.log_severity = LOGSEVERITY_VERBOSE;
+       settings.windowless_rendering_enabled = true;
+       settings.no_sandbox = true;
+       settings.command_line_args_disabled = false;
+       CefInitialize(main_args, settings, this, nullptr);
+
+       {
+               lock_guard<mutex> lock(cef_mutex);
+               cef_initialized = true;
+       }
+       cef_initialized_cond.notify_all();
+
+       CefRunMessageLoop();
+
+       CefShutdown();
+}
+
diff --git a/nageru_cef_app.h b/nageru_cef_app.h
new file mode 100644 (file)
index 0000000..dff9844
--- /dev/null
@@ -0,0 +1,97 @@
+#ifndef _NAGERU_CEF_APP_H
+#define _NAGERU_CEF_APP_H 1
+
+// NageruCefApp deals with global state around CEF, in particular the global
+// CEF event loop. CEF is pretty picky about which threads everything runs on;
+// in particular, the documentation says CefExecute, CefInitialize and
+// CefRunMessageLoop must all be on the main thread (ie., the first thread
+// created). However, Qt wants to run _its_ event loop on this thread, too,
+// and integrating the two has proved problematic (see also the comment in
+// main.cpp). It seems that as long as you don't have two GLib loops running,
+// it's completely fine in practice to have a separate thread for the main loop
+// (running CefInitialize, CefRunMessageLoop, and finally CefDestroy).
+// Many other tasks (like most things related to interacting with browsers)
+// have to be run from the message loop, but that's fine; CEF gives us tools
+// to post tasks to it.
+
+#include <stdio.h>
+
+#include <cef_app.h>
+#include <cef_browser.h>
+#include <cef_client.h>
+#include <cef_version.h>
+
+#include <atomic>
+#include <condition_variable>
+#include <functional>
+#include <mutex>
+#include <unordered_set>
+#include <thread>
+#include <vector>
+
+// Takes in arbitrary lambdas and converts them to something CefPostTask() will accept.
+class CEFTaskAdapter : public CefTask
+{
+public:
+       CEFTaskAdapter(const std::function<void()>& func)
+               : func(func) {}
+       void Execute() override { func(); }
+
+private:
+       std::function<void()> func;
+
+       IMPLEMENT_REFCOUNTING(CEFTaskAdapter);
+};
+
+// Runs and stops the CEF event loop, and also makes some startup tasks.
+class NageruCefApp : public CefApp, public CefRenderProcessHandler, public CefBrowserProcessHandler {
+public:
+       NageruCefApp() {}
+
+       // Starts up the CEF main loop if it does not already run, and blocks until
+       // CEF is properly initialized. You can call initialize_ref() multiple times,
+       // which will then increase the refcount.
+       void initialize_cef();
+
+       // If the refcount goes to zero, shut down the main loop and uninitialize CEF.
+       void unref_cef();
+
+       // Closes the given browser, and blocks until it is done closing.
+       //
+       // NOTE: We can't call unref_cef() from close_browser(), since
+       // CefRefPtr<T> does not support move semantics, so it would have a
+       // refcount of either zero or two going into close_browser (not one,
+       // as it should). The latter means the caller would hold on to an extra
+       // reference to the browser (which triggers an assert failure), and the
+       // former would mean that the browser gets deleted before it's closed.
+       void close_browser(CefRefPtr<CefBrowser> browser);
+
+       CefRefPtr<CefRenderProcessHandler> GetRenderProcessHandler() override
+       {
+               return this;
+       }
+
+       CefRefPtr<CefBrowserProcessHandler> GetBrowserProcessHandler() override
+       {
+               return this;
+       }
+
+       void OnBeforeCommandLineProcessing(const CefString& process_type, CefRefPtr<CefCommandLine> command_line);
+
+       void OnBrowserDestroyed(CefRefPtr<CefBrowser> browser) override;
+
+private:
+       void cef_thread_func();
+
+       std::thread cef_thread;
+       std::mutex cef_mutex;
+       int cef_thread_refcount = 0;  // Under <cef_mutex>.
+       bool cef_initialized = false;  // Under <cef_mutex>.
+       std::condition_variable cef_initialized_cond;
+       std::unordered_set<CefBrowser *> pending_browsers;  // Under <cef_mutex>.
+       std::condition_variable browser_closed_cond;
+
+       IMPLEMENT_REFCOUNTING(NageruCefApp);
+};
+
+#endif  // !defined(_NAGERU_CEF_APP_H)
index c03776b4b8e0a8b53bbb9ec04f625bff74a707c5..d43cc3299c529f759fcbd692fa98fec74465aabe 100644 (file)
--- a/theme.cpp
+++ b/theme.cpp
@@ -28,6 +28,7 @@
 #include <utility>
 
 #include "defs.h"
+#include "cef_capture.h"
 #include "ffmpeg_capture.h"
 #include "flags.h"
 #include "image_input.h"
@@ -227,7 +228,29 @@ int EffectChain_add_video_input(lua_State* L)
        if (ret == 1) {
                Theme *theme = get_theme_updata(L);
                LiveInputWrapper **live_input = (LiveInputWrapper **)lua_touserdata(L, -1);
-               theme->register_signal_connection(*live_input, *capture);
+               theme->register_video_signal_connection(*live_input, *capture);
+       }
+       return ret;
+}
+
+int EffectChain_add_html_input(lua_State* L)
+{
+       assert(lua_gettop(L) == 2);
+       Theme *theme = get_theme_updata(L);
+       EffectChain *chain = (EffectChain *)luaL_checkudata(L, 1, "EffectChain");
+       CEFCapture **capture = (CEFCapture **)luaL_checkudata(L, 2, "HTMLInput");
+
+       // These need to be nonowned, so that the LiveInputWrapper still exists
+       // and can feed frames to the right EffectChain even if the Lua code
+       // doesn't care about the object anymore. (If we change this, we'd need
+       // to also unregister the signal connection on __gc.)
+       int ret = wrap_lua_object_nonowned<LiveInputWrapper>(
+               L, "LiveInputWrapper", theme, chain, (*capture)->get_current_pixel_format(),
+               /*override_bounce=*/false, /*deinterlace=*/false);
+       if (ret == 1) {
+               Theme *theme = get_theme_updata(L);
+               LiveInputWrapper **live_input = (LiveInputWrapper **)lua_touserdata(L, -1);
+               theme->register_html_signal_connection(*live_input, *capture);
        }
        return ret;
 }
@@ -386,6 +409,27 @@ int VideoInput_get_signal_num(lua_State* L)
        return 1;
 }
 
+int HTMLInput_new(lua_State* L)
+{
+       assert(lua_gettop(L) == 1);
+       string url = checkstdstring(L, 1);
+       int ret = wrap_lua_object_nonowned<CEFCapture>(L, "HTMLInput", url, global_flags.width, global_flags.height);
+       if (ret == 1) {
+               CEFCapture **capture = (CEFCapture **)lua_touserdata(L, -1);
+               Theme *theme = get_theme_updata(L);
+               theme->register_html_input(*capture);
+       }
+       return ret;
+}
+
+int HTMLInput_get_signal_num(lua_State* L)
+{
+       assert(lua_gettop(L) == 1);
+       CEFCapture **video_input = (CEFCapture **)luaL_checkudata(L, 1, "HTMLInput");
+       lua_pushnumber(L, -1 - (*video_input)->get_card_index());
+       return 1;
+}
+
 int WhiteBalanceEffect_new(lua_State* L)
 {
        assert(lua_gettop(L) == 0);
@@ -566,6 +610,7 @@ const luaL_Reg EffectChain_funcs[] = {
        { "__gc", EffectChain_gc },
        { "add_live_input", EffectChain_add_live_input },
        { "add_video_input", EffectChain_add_video_input },
+       { "add_html_input", EffectChain_add_html_input },
        { "add_effect", EffectChain_add_effect },
        { "finalize", EffectChain_finalize },
        { NULL, NULL }
@@ -593,6 +638,13 @@ const luaL_Reg VideoInput_funcs[] = {
        { NULL, NULL }
 };
 
+const luaL_Reg HTMLInput_funcs[] = {
+       // TODO: reload, set_url, execute_javascript, perhaps change_framerate?
+       { "new", HTMLInput_new },
+       { "get_signal_num", HTMLInput_get_signal_num },
+       { NULL, NULL }
+};
+
 const luaL_Reg WhiteBalanceEffect_funcs[] = {
        { "new", WhiteBalanceEffect_new },
        { "set_float", Effect_set_float },
@@ -906,6 +958,7 @@ Theme::Theme(const string &filename, const vector<string> &search_dirs, Resource
        register_class("LiveInputWrapper", LiveInputWrapper_funcs); 
        register_class("ImageInput", ImageInput_funcs);
        register_class("VideoInput", VideoInput_funcs);
+       register_class("HTMLInput", HTMLInput_funcs);
        register_class("WhiteBalanceEffect", WhiteBalanceEffect_funcs);
        register_class("ResampleEffect", ResampleEffect_funcs);
        register_class("PaddingEffect", PaddingEffect_funcs);
diff --git a/theme.h b/theme.h
index e1c3ed1baa0baa0820705a6c4428ea8d95d9c2d0..c2b93c2301b98d2b092bb9bae03b6c240e826503 100644 (file)
--- a/theme.h
+++ b/theme.h
@@ -15,6 +15,7 @@
 #include "ref_counted_frame.h"
 #include "tweaked_inputs.h"
 
+class CEFCapture;
 class FFmpegCapture;
 class LiveInputWrapper;
 struct InputState;
@@ -68,14 +69,35 @@ public:
                return video_inputs;
        }
 
-       void register_signal_connection(LiveInputWrapper *live_input, FFmpegCapture *capture)
+       // Should be called as part of HTMLInput.new() only.
+       void register_html_input(CEFCapture *capture)
        {
-               signal_connections.emplace_back(live_input, capture);
+               html_inputs.push_back(capture);
        }
 
-       std::vector<std::pair<LiveInputWrapper *, FFmpegCapture *>> get_signal_connections() const
+       std::vector<CEFCapture *> get_html_inputs() const
        {
-               return signal_connections;
+               return html_inputs;
+       }
+
+       void register_video_signal_connection(LiveInputWrapper *live_input, FFmpegCapture *capture)
+       {
+               video_signal_connections.emplace_back(live_input, capture);
+       }
+
+       std::vector<std::pair<LiveInputWrapper *, FFmpegCapture *>> get_video_signal_connections() const
+       {
+               return video_signal_connections;
+       }
+
+       void register_html_signal_connection(LiveInputWrapper *live_input, CEFCapture *capture)
+       {
+               html_signal_connections.emplace_back(live_input, capture);
+       }
+
+       std::vector<std::pair<LiveInputWrapper *, CEFCapture *>> get_html_signal_connections() const
+       {
+               return html_signal_connections;
        }
 
 private:
@@ -93,7 +115,9 @@ private:
        std::map<int, int> signal_to_card_mapping;  // Protected by <map_m>.
 
        std::vector<FFmpegCapture *> video_inputs;
-       std::vector<std::pair<LiveInputWrapper *, FFmpegCapture *>> signal_connections;
+       std::vector<std::pair<LiveInputWrapper *, FFmpegCapture *>> video_signal_connections;
+       std::vector<CEFCapture *> html_inputs;
+       std::vector<std::pair<LiveInputWrapper *, CEFCapture *>> html_signal_connections;
 
        friend class LiveInputWrapper;
 };