+#include "httpd.h"
+
#include <assert.h>
+#include <byteswap.h>
+#include <endian.h>
#include <microhttpd.h>
+#include <netinet/in.h>
#include <stdio.h>
-#include <stdlib.h>
#include <string.h>
-
-extern "C" {
-#include <libavcodec/avcodec.h>
-#include <libavutil/channel_layout.h>
-#include <libavutil/mathematics.h>
-#include <libavutil/mem.h>
-#include <libavutil/pixfmt.h>
-#include <libavutil/rational.h>
-#include <libavutil/samplefmt.h>
-}
-
-#include <vector>
-
-#include "httpd.h"
+#include <sys/time.h>
+#include <time.h>
+#include <memory>
#include "defs.h"
-#include "timebase.h"
+#include "metacube2.h"
+#include "metrics.h"
struct MHD_Connection;
struct MHD_Response;
using namespace std;
-HTTPD::HTTPD(int width, int height)
- : width(width), height(height)
-{
-}
-
-void HTTPD::start(int port)
+HTTPD::HTTPD()
{
- MHD_start_daemon(MHD_USE_THREAD_PER_CONNECTION | MHD_USE_POLL_INTERNALLY | MHD_USE_DUAL_STACK,
- port,
- nullptr, nullptr,
- &answer_to_connection_thunk, this,
- MHD_OPTION_NOTIFY_COMPLETED, &request_completed_thunk, this,
- MHD_OPTION_END);
+ global_metrics.add("num_connected_clients", &metric_num_connected_clients, Metrics::TYPE_GAUGE);
}
-void HTTPD::add_packet(const AVPacket &pkt, int64_t pts, int64_t dts)
+HTTPD::~HTTPD()
{
- unique_lock<mutex> lock(streams_mutex);
- for (Stream *stream : streams) {
- stream->add_packet(pkt, pts, dts);
- }
- if (file_mux) {
- file_mux->add_packet(pkt, pts, dts);
+ if (mhd) {
+ MHD_quiesce_daemon(mhd);
+ for (Stream *stream : streams) {
+ stream->stop();
+ }
+ MHD_stop_daemon(mhd);
}
}
-void HTTPD::open_output_file(const string &filename)
+void HTTPD::start(int port)
{
- AVFormatContext *avctx = avformat_alloc_context();
- avctx->oformat = av_guess_format(NULL, filename.c_str(), NULL);
- assert(filename.size() < sizeof(avctx->filename) - 1);
- strcpy(avctx->filename, filename.c_str());
-
- string url = "file:" + filename;
- int ret = avio_open2(&avctx->pb, url.c_str(), AVIO_FLAG_WRITE, &avctx->interrupt_callback, NULL);
- if (ret < 0) {
- char tmp[AV_ERROR_MAX_STRING_SIZE];
- fprintf(stderr, "%s: avio_open2() failed: %s\n", filename.c_str(), av_make_error_string(tmp, sizeof(tmp), ret));
- exit(1);
+ mhd = MHD_start_daemon(MHD_USE_THREAD_PER_CONNECTION | MHD_USE_POLL_INTERNALLY | MHD_USE_DUAL_STACK,
+ port,
+ nullptr, nullptr,
+ &answer_to_connection_thunk, this,
+ MHD_OPTION_NOTIFY_COMPLETED, nullptr, this,
+ MHD_OPTION_END);
+ if (mhd == nullptr) {
+ fprintf(stderr, "Warning: Could not open HTTP server. (Port already in use?)\n");
}
-
- file_mux.reset(new Mux(avctx, width, height));
}
-void HTTPD::close_output_file()
+void HTTPD::add_data(const char *buf, size_t size, bool keyframe)
{
- file_mux.reset();
+ unique_lock<mutex> lock(streams_mutex);
+ for (Stream *stream : streams) {
+ stream->add_data(buf, size, keyframe ? Stream::DATA_TYPE_KEYFRAME : Stream::DATA_TYPE_OTHER);
+ }
}
int HTTPD::answer_to_connection_thunk(void *cls, MHD_Connection *connection,
const char *version, const char *upload_data,
size_t *upload_data_size, void **con_cls)
{
- AVOutputFormat *oformat = av_guess_format(STREAM_MUX_NAME, nullptr, nullptr);
- assert(oformat != nullptr);
- HTTPD::Stream *stream = new HTTPD::Stream(oformat, width, height);
+ // See if the URL ends in “.metacube”.
+ HTTPD::Stream::Framing framing;
+ if (strstr(url, ".metacube") == url + strlen(url) - strlen(".metacube")) {
+ framing = HTTPD::Stream::FRAMING_METACUBE;
+ } else {
+ framing = HTTPD::Stream::FRAMING_RAW;
+ }
+
+ if (strcmp(url, "/metrics") == 0) {
+ string contents = global_metrics.serialize();
+ MHD_Response *response = MHD_create_response_from_buffer(
+ contents.size(), &contents[0], MHD_RESPMEM_MUST_COPY);
+ MHD_add_response_header(response, "Content-type", "text/plain");
+ int ret = MHD_queue_response(connection, MHD_HTTP_OK, response);
+ MHD_destroy_response(response); // Only decreases the refcount; actual free is after the request is done.
+ return ret;
+ }
+
+ HTTPD::Stream *stream = new HTTPD::Stream(this, framing);
+ stream->add_data(header.data(), header.size(), Stream::DATA_TYPE_HEADER);
{
unique_lock<mutex> lock(streams_mutex);
streams.insert(stream);
}
+ ++metric_num_connected_clients;
*con_cls = stream;
// Does not strictly have to be equal to MUX_BUFFER_SIZE.
MHD_Response *response = MHD_create_response_from_callback(
(size_t)-1, MUX_BUFFER_SIZE, &HTTPD::Stream::reader_callback_thunk, stream, &HTTPD::free_stream);
+ // TODO: Content-type?
+ if (framing == HTTPD::Stream::FRAMING_METACUBE) {
+ MHD_add_response_header(response, "Content-encoding", "metacube");
+ }
+
int ret = MHD_queue_response(connection, MHD_HTTP_OK, response);
- //MHD_destroy_response(response);
+ MHD_destroy_response(response); // Only decreases the refcount; actual free is after the request is done.
return ret;
}
void HTTPD::free_stream(void *cls)
{
- // FIXME: When is this actually called, if ever?
- // Also, shouldn't we remove it from streams?
HTTPD::Stream *stream = (HTTPD::Stream *)cls;
- delete stream;
-}
-
-void HTTPD::request_completed_thunk(void *cls, struct MHD_Connection *connection, void **con_cls, enum MHD_RequestTerminationCode toe)
-{
- HTTPD *httpd = (HTTPD *)cls;
- return httpd->request_completed(connection, con_cls, toe);
-}
-
-void HTTPD::request_completed(struct MHD_Connection *connection, void **con_cls, enum MHD_RequestTerminationCode toe)
-{
- if (con_cls == nullptr) {
- // Request was never set up.
- return;
- }
- HTTPD::Stream *stream = (HTTPD::Stream *)*con_cls;
+ HTTPD *httpd = stream->get_parent();
{
- unique_lock<mutex> lock(streams_mutex);
+ unique_lock<mutex> lock(httpd->streams_mutex);
delete stream;
- streams.erase(stream);
+ httpd->streams.erase(stream);
}
-}
-
-HTTPD::Mux::Mux(AVFormatContext *avctx, int width, int height)
- : avctx(avctx)
-{
- AVCodec *codec_video = avcodec_find_encoder(AV_CODEC_ID_H264);
- avstream_video = avformat_new_stream(avctx, codec_video);
- if (avstream_video == nullptr) {
- fprintf(stderr, "avformat_new_stream() failed\n");
- exit(1);
- }
- avstream_video->time_base = AVRational{1, TIMEBASE};
- avstream_video->codec->codec_type = AVMEDIA_TYPE_VIDEO;
- avstream_video->codec->codec_id = AV_CODEC_ID_H264;
- avstream_video->codec->width = width;
- avstream_video->codec->height = height;
- avstream_video->codec->time_base = AVRational{1, TIMEBASE};
- avstream_video->codec->ticks_per_frame = 1; // or 2?
-
- // Colorspace details. Closely correspond to settings in EffectChain_finalize,
- // as noted in each comment.
- // Note that the H.264 stream also contains this information and depending on the
- // mux, this might simply get ignored. See sps_rbsp().
- avstream_video->codec->color_primaries = AVCOL_PRI_BT709; // RGB colorspace (inout_format.color_space).
- avstream_video->codec->color_trc = AVCOL_TRC_UNSPECIFIED; // Gamma curve (inout_format.gamma_curve).
- avstream_video->codec->colorspace = AVCOL_SPC_SMPTE170M; // YUV colorspace (output_ycbcr_format.luma_coefficients).
- avstream_video->codec->color_range = AVCOL_RANGE_MPEG; // Full vs. limited range (output_ycbcr_format.full_range).
- avstream_video->codec->chroma_sample_location = AVCHROMA_LOC_LEFT; // Chroma sample location. See chroma_offset_0[] in Mixer::subsample_chroma().
- avstream_video->codec->field_order = AV_FIELD_PROGRESSIVE;
- if (avctx->oformat->flags & AVFMT_GLOBALHEADER) {
- avstream_video->codec->flags = AV_CODEC_FLAG_GLOBAL_HEADER;
- }
-
- AVCodec *codec_audio = avcodec_find_encoder(AUDIO_OUTPUT_CODEC);
- avstream_audio = avformat_new_stream(avctx, codec_audio);
- if (avstream_audio == nullptr) {
- fprintf(stderr, "avformat_new_stream() failed\n");
- exit(1);
- }
- avstream_audio->time_base = AVRational{1, TIMEBASE};
- avstream_audio->codec->bit_rate = AUDIO_OUTPUT_BIT_RATE;
- avstream_audio->codec->sample_rate = OUTPUT_FREQUENCY;
- avstream_audio->codec->sample_fmt = AUDIO_OUTPUT_SAMPLE_FMT;
- avstream_audio->codec->channels = 2;
- avstream_audio->codec->channel_layout = AV_CH_LAYOUT_STEREO;
- avstream_audio->codec->time_base = AVRational{1, TIMEBASE};
- if (avctx->oformat->flags & AVFMT_GLOBALHEADER) {
- avstream_audio->codec->flags = AV_CODEC_FLAG_GLOBAL_HEADER;
- }
-
- AVDictionary *options = NULL;
- vector<pair<string, string>> opts = MUX_OPTS;
- for (pair<string, string> opt : opts) {
- av_dict_set(&options, opt.first.c_str(), opt.second.c_str(), 0);
- }
- if (avformat_write_header(avctx, &options) < 0) {
- fprintf(stderr, "avformat_write_header() failed\n");
- exit(1);
- }
-}
-
-HTTPD::Mux::~Mux()
-{
- av_write_trailer(avctx);
- av_free(avctx->pb);
- avformat_free_context(avctx);
-}
-
-void HTTPD::Mux::add_packet(const AVPacket &pkt, int64_t pts, int64_t dts)
-{
- if (!seen_keyframe && !(pkt.stream_index == 0 && (pkt.flags & AV_PKT_FLAG_KEY))) {
- // Wait until we see the first (video) key frame.
- return;
- }
- seen_keyframe = true;
-
- AVPacket pkt_copy;
- av_copy_packet(&pkt_copy, &pkt);
- if (pkt.stream_index == 0) {
- pkt_copy.pts = av_rescale_q(pts, AVRational{1, TIMEBASE}, avstream_video->time_base);
- pkt_copy.dts = av_rescale_q(dts, AVRational{1, TIMEBASE}, avstream_video->time_base);
- } else if (pkt.stream_index == 1) {
- pkt_copy.pts = av_rescale_q(pts, AVRational{1, TIMEBASE}, avstream_audio->time_base);
- pkt_copy.dts = av_rescale_q(dts, AVRational{1, TIMEBASE}, avstream_audio->time_base);
- } else {
- assert(false);
- }
-
- if (av_interleaved_write_frame(avctx, &pkt_copy) < 0) {
- fprintf(stderr, "av_interleaved_write_frame() failed\n");
- exit(1);
- }
-
- av_packet_unref(&pkt_copy);
-}
-
-HTTPD::Stream::Stream(AVOutputFormat *oformat, int width, int height)
-{
- AVFormatContext *avctx = avformat_alloc_context();
- avctx->oformat = oformat;
- uint8_t *buf = (uint8_t *)av_malloc(MUX_BUFFER_SIZE);
- avctx->pb = avio_alloc_context(buf, MUX_BUFFER_SIZE, 1, this, nullptr, &HTTPD::Stream::write_packet_thunk, nullptr);
- avctx->flags = AVFMT_FLAG_CUSTOM_IO;
-
- mux.reset(new Mux(avctx, width, height));
+ --httpd->metric_num_connected_clients;
}
ssize_t HTTPD::Stream::reader_callback_thunk(void *cls, uint64_t pos, char *buf, size_t max)
ssize_t HTTPD::Stream::reader_callback(uint64_t pos, char *buf, size_t max)
{
unique_lock<mutex> lock(buffer_mutex);
- has_buffered_data.wait(lock, [this]{ return !buffered_data.empty(); });
+ has_buffered_data.wait(lock, [this]{ return should_quit || !buffered_data.empty(); });
+ if (should_quit) {
+ return 0;
+ }
ssize_t ret = 0;
while (max > 0 && !buffered_data.empty()) {
if (max >= len) {
// Consume the entire (rest of the) string.
memcpy(buf, s.data() + used_of_buffered_data, len);
+ buf += len;
ret += len;
max -= len;
buffered_data.pop_front();
} else {
// We don't need the entire string; just use the first part of it.
memcpy(buf, s.data() + used_of_buffered_data, max);
+ buf += max;
used_of_buffered_data += max;
ret += max;
max = 0;
return ret;
}
-void HTTPD::Stream::add_packet(const AVPacket &pkt, int64_t pts, int64_t dts)
+void HTTPD::Stream::add_data(const char *buf, size_t buf_size, HTTPD::Stream::DataType data_type)
{
- mux->add_packet(pkt, pts, dts);
-}
+ if (buf_size == 0) {
+ return;
+ }
+ if (data_type == DATA_TYPE_KEYFRAME) {
+ seen_keyframe = true;
+ } else if (data_type == DATA_TYPE_OTHER && !seen_keyframe) {
+ // Start sending only once we see a keyframe.
+ return;
+ }
-int HTTPD::Stream::write_packet_thunk(void *opaque, uint8_t *buf, int buf_size)
-{
- HTTPD::Stream *stream = (HTTPD::Stream *)opaque;
- return stream->write_packet(buf, buf_size);
+ unique_lock<mutex> lock(buffer_mutex);
+
+ if (framing == FRAMING_METACUBE) {
+ metacube2_block_header hdr;
+ memcpy(hdr.sync, METACUBE2_SYNC, sizeof(hdr.sync));
+ hdr.size = htonl(buf_size);
+ int flags = 0;
+ if (data_type == DATA_TYPE_HEADER) {
+ flags |= METACUBE_FLAGS_HEADER;
+ } else if (data_type == DATA_TYPE_OTHER) {
+ flags |= METACUBE_FLAGS_NOT_SUITABLE_FOR_STREAM_START;
+ }
+ hdr.flags = htons(flags);
+ hdr.csum = htons(metacube2_compute_crc(&hdr));
+ buffered_data.emplace_back((char *)&hdr, sizeof(hdr));
+ }
+ buffered_data.emplace_back(buf, buf_size);
+
+ // Send a Metacube2 timestamp every keyframe.
+ if (framing == FRAMING_METACUBE && data_type == DATA_TYPE_KEYFRAME) {
+ timespec now;
+ clock_gettime(CLOCK_REALTIME, &now);
+
+ metacube2_timestamp_packet packet;
+ packet.type = htobe64(METACUBE_METADATA_TYPE_ENCODER_TIMESTAMP);
+ packet.tv_sec = htobe64(now.tv_sec);
+ packet.tv_nsec = htobe64(now.tv_nsec);
+
+ metacube2_block_header hdr;
+ memcpy(hdr.sync, METACUBE2_SYNC, sizeof(hdr.sync));
+ hdr.size = htonl(sizeof(packet));
+ hdr.flags = htons(METACUBE_FLAGS_METADATA);
+ hdr.csum = htons(metacube2_compute_crc(&hdr));
+ buffered_data.emplace_back((char *)&hdr, sizeof(hdr));
+ buffered_data.emplace_back((char *)&packet, sizeof(packet));
+ }
+
+ has_buffered_data.notify_all();
}
-int HTTPD::Stream::write_packet(uint8_t *buf, int buf_size)
+void HTTPD::Stream::stop()
{
unique_lock<mutex> lock(buffer_mutex);
- buffered_data.emplace_back((char *)buf, buf_size);
- has_buffered_data.notify_all();
- return buf_size;
+ should_quit = true;
+ has_buffered_data.notify_all();
}
-