#include "httpd.h"
#include "defs.h"
+#include "flags.h"
#include "timebase.h"
struct MHD_Connection;
MHD_OPTION_END);
}
-void HTTPD::add_packet(const AVPacket &pkt, int64_t pts, int64_t dts)
+void HTTPD::add_packet(const AVPacket &pkt, int64_t pts, int64_t dts, PacketDestination destination)
{
unique_lock<mutex> lock(streams_mutex);
- for (Stream *stream : streams) {
- stream->add_packet(pkt, pts, dts);
+ if (destination != DESTINATION_FILE_ONLY) {
+ for (Stream *stream : streams) {
+ stream->add_packet(pkt, pts, dts);
+ }
}
- if (file_mux) {
+ if (file_mux && destination != DESTINATION_HTTP_ONLY) {
file_mux->add_packet(pkt, pts, dts);
}
}
exit(1);
}
- file_mux.reset(new Mux(avctx, width, height));
+ file_mux.reset(new Mux(avctx, width, height, Mux::CODEC_H264));
}
void HTTPD::close_output_file()
}
}
-HTTPD::Mux::Mux(AVFormatContext *avctx, int width, int height)
+HTTPD::Mux::Mux(AVFormatContext *avctx, int width, int height, Codec video_codec)
: avctx(avctx)
{
- AVCodec *codec_video = avcodec_find_encoder(AV_CODEC_ID_H264);
+ AVCodec *codec_video = avcodec_find_encoder((video_codec == CODEC_H264) ? AV_CODEC_ID_H264 : AV_CODEC_ID_RAWVIDEO);
avstream_video = avformat_new_stream(avctx, codec_video);
if (avstream_video == nullptr) {
fprintf(stderr, "avformat_new_stream() failed\n");
}
avstream_video->time_base = AVRational{1, TIMEBASE};
avstream_video->codec->codec_type = AVMEDIA_TYPE_VIDEO;
- avstream_video->codec->codec_id = AV_CODEC_ID_H264;
+ if (video_codec == CODEC_H264) {
+ avstream_video->codec->codec_id = AV_CODEC_ID_H264;
+ } else {
+ assert(video_codec == CODEC_NV12);
+ avstream_video->codec->codec_id = AV_CODEC_ID_RAWVIDEO;
+ avstream_video->codec->codec_tag = avcodec_pix_fmt_to_codec_tag(AV_PIX_FMT_NV12);
+ }
avstream_video->codec->width = width;
avstream_video->codec->height = height;
avstream_video->codec->time_base = AVRational{1, TIMEBASE};
avstream_video->codec->color_range = AVCOL_RANGE_MPEG; // Full vs. limited range (output_ycbcr_format.full_range).
avstream_video->codec->chroma_sample_location = AVCHROMA_LOC_LEFT; // Chroma sample location. See chroma_offset_0[] in Mixer::subsample_chroma().
avstream_video->codec->field_order = AV_FIELD_PROGRESSIVE;
+ if (avctx->oformat->flags & AVFMT_GLOBALHEADER) {
+ avstream_video->codec->flags = AV_CODEC_FLAG_GLOBAL_HEADER;
+ }
AVCodec *codec_audio = avcodec_find_encoder(AUDIO_OUTPUT_CODEC);
avstream_audio = avformat_new_stream(avctx, codec_audio);
avstream_audio->codec->channels = 2;
avstream_audio->codec->channel_layout = AV_CH_LAYOUT_STEREO;
avstream_audio->codec->time_base = AVRational{1, TIMEBASE};
+ if (avctx->oformat->flags & AVFMT_GLOBALHEADER) {
+ avstream_audio->codec->flags = AV_CODEC_FLAG_GLOBAL_HEADER;
+ }
AVDictionary *options = NULL;
vector<pair<string, string>> opts = MUX_OPTS;
HTTPD::Mux::~Mux()
{
av_write_trailer(avctx);
+ av_free(avctx->pb->buffer);
+ av_free(avctx->pb);
avformat_free_context(avctx);
}
void HTTPD::Mux::add_packet(const AVPacket &pkt, int64_t pts, int64_t dts)
{
+ if (!seen_keyframe && !(pkt.stream_index == 0 && (pkt.flags & AV_PKT_FLAG_KEY))) {
+ // Wait until we see the first (video) key frame.
+ return;
+ }
+ seen_keyframe = true;
+
AVPacket pkt_copy;
av_copy_packet(&pkt_copy, &pkt);
if (pkt.stream_index == 0) {
fprintf(stderr, "av_interleaved_write_frame() failed\n");
exit(1);
}
+
+ av_packet_unref(&pkt_copy);
}
HTTPD::Stream::Stream(AVOutputFormat *oformat, int width, int height)
avctx->oformat = oformat;
uint8_t *buf = (uint8_t *)av_malloc(MUX_BUFFER_SIZE);
avctx->pb = avio_alloc_context(buf, MUX_BUFFER_SIZE, 1, this, nullptr, &HTTPD::Stream::write_packet_thunk, nullptr);
+
+ Mux::Codec video_codec;
+ if (global_flags.uncompressed_video_to_http) {
+ video_codec = Mux::CODEC_NV12;
+ } else {
+ video_codec = Mux::CODEC_H264;
+ }
+
avctx->flags = AVFMT_FLAG_CUSTOM_IO;
- mux.reset(new Mux(avctx, width, height));
+ mux.reset(new Mux(avctx, width, height, video_codec));
}
ssize_t HTTPD::Stream::reader_callback_thunk(void *cls, uint64_t pos, char *buf, size_t max)
if (max >= len) {
// Consume the entire (rest of the) string.
memcpy(buf, s.data() + used_of_buffered_data, len);
+ buf += len;
ret += len;
max -= len;
buffered_data.pop_front();
} else {
// We don't need the entire string; just use the first part of it.
memcpy(buf, s.data() + used_of_buffered_data, max);
+ buf += max;
used_of_buffered_data += max;
ret += max;
max = 0;