OBJS += chroma_subsampler.o v210_converter.o mixer.o basic_stats.o metrics.o pbo_frame_allocator.o context.o ref_counted_frame.o theme.o httpd.o flags.o image_input.o alsa_output.o disk_space_estimator.o print_latency.o timecode_renderer.o tweaked_inputs.o $(AUDIO_MIXER_OBJS)
# Streaming and encoding objects
-OBJS += quicksync_encoder.o x264_encoder.o x264_dynamic.o x264_speed_control.o video_encoder.o metacube2.o mux.o audio_encoder.o ffmpeg_raii.o ffmpeg_util.o
+OBJS += quicksync_encoder.o x264_encoder.o x264_dynamic.o x264_speed_control.o video_encoder.o metacube2.o mux.o audio_encoder.o ffmpeg_raii.o ffmpeg_util.o json.pb.o
# DeckLink
OBJS += decklink_capture.o decklink_util.o decklink_output.o decklink/DeckLinkAPIDispatch.o
-KAERU_OBJS = kaeru.o x264_encoder.o mux.o basic_stats.o metrics.o flags.o audio_encoder.o x264_speed_control.o print_latency.o x264_dynamic.o ffmpeg_raii.o ref_counted_frame.o ffmpeg_capture.o ffmpeg_util.o httpd.o metacube2.o
+KAERU_OBJS = kaeru.o x264_encoder.o mux.o basic_stats.o metrics.o flags.o audio_encoder.o x264_speed_control.o print_latency.o x264_dynamic.o ffmpeg_raii.o ref_counted_frame.o ffmpeg_capture.o ffmpeg_util.o httpd.o json.pb.o metacube2.o
# bmusb
ifeq ($(EMBEDDED_BMUSB),yes)
mainwindow.o: midi_mapping.pb.h
midi_mapper.o: midi_mapping.pb.h
midi_mapping_dialog.o: ui_midi_mapping.h midi_mapping.pb.h
+httpd.o: json.pb.h
DEPS=$(OBJS:.o=.d) $(BM_OBJS:.o=.d) $(KAERU_OBJS:.o=.d)
-include $(DEPS)
MHD_destroy_response(response); // Only decreases the refcount; actual free is after the request is done.
return ret;
}
+ if (endpoints.count(url)) {
+ pair<string, string> contents_and_type = endpoints[url]();
+ MHD_Response *response = MHD_create_response_from_buffer(
+ contents_and_type.first.size(), &contents_and_type.first[0], MHD_RESPMEM_MUST_COPY);
+ MHD_add_response_header(response, "Content-type", contents_and_type.second.c_str());
+ int ret = MHD_queue_response(connection, MHD_HTTP_OK, response);
+ MHD_destroy_response(response); // Only decreases the refcount; actual free is after the request is done.
+ return ret;
+ }
HTTPD::Stream *stream = new HTTPD::Stream(this, framing);
stream->add_data(header.data(), header.size(), Stream::DATA_TYPE_HEADER);
#include <atomic>
#include <condition_variable>
#include <deque>
+#include <functional>
#include <mutex>
#include <set>
#include <string>
+#include <unordered_map>
+#include <utility>
struct MHD_Connection;
struct MHD_Daemon;
class HTTPD {
public:
+ // Returns a pair of content and content-type.
+ using EndpointCallback = std::function<std::pair<std::string, std::string>()>;
+
HTTPD();
~HTTPD();
header = data;
}
+ // Should be called before start() (due to threading issues).
+ void add_endpoint(const std::string &url, const EndpointCallback &callback) {
+ endpoints[url] = callback;
+ }
+
void start(int port);
void add_data(const char *buf, size_t size, bool keyframe);
int64_t get_num_connected_clients() const {
MHD_Daemon *mhd = nullptr;
std::mutex streams_mutex;
std::set<Stream *> streams; // Not owned.
+ std::unordered_map<std::string, EndpointCallback> endpoints;
std::string header;
// Metrics.
--- /dev/null
+// Messages used to produce JSON (it's the simplest way we can create valid
+// JSON without pulling in an external JSON library).
+
+message Channels {
+ repeated Channel channel = 1;
+}
+
+message Channel {
+ required int32 index = 1;
+ required string name = 2;
+ required string color = 3;
+}
#include "v210_converter.h"
#include "video_encoder.h"
+#undef Status
+#include <google/protobuf/util/json_util.h>
+#include "json.pb.h"
+
class IDeckLink;
class QOpenGLContext;
// Must be instantiated after VideoEncoder has initialized global_flags.use_zerocopy.
theme.reset(new Theme(global_flags.theme_filename, global_flags.theme_dirs, resource_pool.get(), num_cards));
+ httpd.add_endpoint("/channels", bind(&Mixer::get_channels_json, this));
+ for (int channel_idx = 2; channel_idx < theme->get_num_channels(); ++channel_idx) {
+ char url[256];
+ snprintf(url, sizeof(url), "/channels/%d/color", channel_idx);
+ httpd.add_endpoint(url, bind(&Mixer::get_channel_color_http, this, unsigned(channel_idx)));
+ }
+
// Start listening for clients only once VideoEncoder has written its header, if any.
httpd.start(global_flags.http_port);
#endif
}
+pair<string, string> Mixer::get_channels_json()
+{
+ Channels ret;
+ for (int channel_idx = 2; channel_idx < theme->get_num_channels(); ++channel_idx) {
+ Channel *channel = ret.add_channel();
+ channel->set_index(channel_idx);
+ channel->set_name(theme->get_channel_name(channel_idx));
+ channel->set_color(theme->get_channel_color(channel_idx));
+ }
+ string contents;
+ google::protobuf::util::MessageToJsonString(ret, &contents); // Ignore any errors.
+ return make_pair(contents, "text/json");
+}
+
+pair<string, string> Mixer::get_channel_color_http(unsigned channel_idx)
+{
+ return make_pair(theme->get_channel_color(channel_idx), "text/plain");
+}
Mixer::OutputFrameInfo Mixer::get_one_frame_from_each_card(unsigned master_card_index, bool master_card_is_output, CaptureCard::NewFrame new_frames[MAX_VIDEO_CARDS], bool has_new_frame[MAX_VIDEO_CARDS])
{
void release_display_frame(DisplayFrame *frame);
double pts() { return double(pts_int) / TIMEBASE; }
void trim_queue(CaptureCard *card, size_t safe_queue_length);
+ std::pair<std::string, std::string> get_channels_json();
+ std::pair<std::string, std::string> get_channel_color_http(unsigned channel_idx);
HTTPD httpd;
unsigned num_cards, num_video_inputs;