From 9a5e0639486a26685ea623c7c85e9ae87f8ae656 Mon Sep 17 00:00:00 2001 From: "Steinar H. Gunderson" Date: Mon, 20 Aug 2018 23:43:29 +0200 Subject: [PATCH] Move stream generation into a new class VideoStream, which will also soon deal with the GPU. --- Makefile | 2 +- player.cpp | 84 +----------------------------- player.h | 11 +--- video_stream.cpp | 129 +++++++++++++++++++++++++++++++++++++++++++++++ video_stream.h | 55 ++++++++++++++++++++ 5 files changed, 189 insertions(+), 92 deletions(-) create mode 100644 video_stream.cpp create mode 100644 video_stream.h diff --git a/Makefile b/Makefile index 305516e..8e9edd2 100644 --- a/Makefile +++ b/Makefile @@ -10,7 +10,7 @@ OBJS_WITH_MOC = mainwindow.o jpeg_frame_view.o clip_list.o OBJS += $(OBJS_WITH_MOC) OBJS += $(OBJS_WITH_MOC:.o=.moc.o) -OBJS += ffmpeg_raii.o main.o player.o httpd.o mux.o metacube2.o +OBJS += ffmpeg_raii.o main.o player.o httpd.o mux.o metacube2.o video_stream.o %.o: %.cpp $(CXX) -MMD -MP $(CPPFLAGS) $(CXXFLAGS) -o $@ -c $< diff --git a/player.cpp b/player.cpp index e399891..1533977 100644 --- a/player.cpp +++ b/player.cpp @@ -7,11 +7,6 @@ #include -extern "C" { -#include -#include -} - #include "clip_list.h" #include "defs.h" #include "ffmpeg_raii.h" @@ -27,29 +22,6 @@ extern mutex frame_mu; extern vector frames[MAX_STREAMS]; extern HTTPD *global_httpd; -namespace { - -string read_file(const string &filename) -{ - FILE *fp = fopen(filename.c_str(), "rb"); - if (fp == nullptr) { - perror(filename.c_str()); - return ""; - } - - fseek(fp, 0, SEEK_END); - long len = ftell(fp); - rewind(fp); - - string ret; - ret.resize(len); - fread(&ret[0], len, 1, fp); - fclose(fp); - return ret; -} - -} // namespace - void Player::thread_func() { for ( ;; ) { @@ -114,13 +86,7 @@ void Player::thread_func() // FIXME: Vaguely less crazy pts, perhaps. double pts_float = fmod(duration(next_frame_start.time_since_epoch()).count(), 86400.0f); int64_t pts = lrint(pts_float * TIMEBASE); - string jpeg = read_file(filename_for_frame(stream_idx, next_pts)); - AVPacket pkt; - av_init_packet(&pkt); - pkt.stream_index = 0; - pkt.data = (uint8_t *)jpeg.data(); - pkt.size = jpeg.size(); - stream_mux->add_packet(pkt, pts, pts); + video_stream.schedule_original_frame(pts, stream_idx, next_pts); } { @@ -136,7 +102,7 @@ void Player::thread_func() Player::Player(JPEGFrameView *destination) : destination(destination) { - open_output_stream(); + video_stream.start(); thread(&Player::thread_func, this).detach(); } @@ -197,49 +163,3 @@ void Player::override_angle(unsigned stream_idx) } destination->setFrame(stream_idx, *it); } - -void Player::open_output_stream() -{ - AVFormatContext *avctx = avformat_alloc_context(); - avctx->oformat = av_guess_format("nut", nullptr, nullptr); - - uint8_t *buf = (uint8_t *)av_malloc(MUX_BUFFER_SIZE); - avctx->pb = avio_alloc_context(buf, MUX_BUFFER_SIZE, 1, this, nullptr, nullptr, nullptr); - avctx->pb->write_data_type = &Player::write_packet2_thunk; - avctx->pb->ignore_boundary_point = 1; - - Mux::Codec video_codec = Mux::CODEC_MJPEG; - - avctx->flags = AVFMT_FLAG_CUSTOM_IO; - - string video_extradata; - - constexpr int width = 1280, height = 720; // Doesn't matter for MJPEG. - stream_mux.reset(new Mux(avctx, width, height, video_codec, video_extradata, /*audio_codec_parameters=*/nullptr, COARSE_TIMEBASE, - /*write_callback=*/nullptr, Mux::WRITE_FOREGROUND, {})); -} - -int Player::write_packet2_thunk(void *opaque, uint8_t *buf, int buf_size, AVIODataMarkerType type, int64_t time) -{ - Player *player = (Player *)opaque; - return player->write_packet2(buf, buf_size, type, time); -} - -int Player::write_packet2(uint8_t *buf, int buf_size, AVIODataMarkerType type, int64_t time) -{ - if (type == AVIO_DATA_MARKER_SYNC_POINT || type == AVIO_DATA_MARKER_BOUNDARY_POINT) { - seen_sync_markers = true; - } else if (type == AVIO_DATA_MARKER_UNKNOWN && !seen_sync_markers) { - // We don't know if this is a keyframe or not (the muxer could - // avoid marking it), so we just have to make the best of it. - type = AVIO_DATA_MARKER_SYNC_POINT; - } - - if (type == AVIO_DATA_MARKER_HEADER) { - stream_mux_header.append((char *)buf, buf_size); - global_httpd->set_header(stream_mux_header); - } else { - global_httpd->add_data((char *)buf, buf_size, type == AVIO_DATA_MARKER_SYNC_POINT, time, AVRational{ AV_TIME_BASE, 1 }); - } - return buf_size; -} diff --git a/player.h b/player.h index ab8f078..3321a30 100644 --- a/player.h +++ b/player.h @@ -2,17 +2,13 @@ #define _PLAYER_H 1 #include "clip_list.h" +#include "video_stream.h" #include #include #include -extern "C" { -#include -} - class JPEGFrameView; -class Mux; class Player { public: @@ -45,10 +41,7 @@ private: bool playing = false; // Under queue_state_mu. int override_stream_idx = -1; // Under queue_state_mu. - // For streaming. - std::unique_ptr stream_mux; // To HTTP. - std::string stream_mux_header; - bool seen_sync_markers = false; + VideoStream video_stream; }; #endif // !defined(_PLAYER_H) diff --git a/video_stream.cpp b/video_stream.cpp new file mode 100644 index 0000000..65f52c0 --- /dev/null +++ b/video_stream.cpp @@ -0,0 +1,129 @@ +#include "video_stream.h" + +extern "C" { +#include +#include +} + +#include "httpd.h" +#include "jpeg_frame_view.h" +#include "mux.h" +#include "player.h" + +using namespace std; + +extern HTTPD *global_httpd; + +namespace { + +string read_file(const string &filename) +{ + FILE *fp = fopen(filename.c_str(), "rb"); + if (fp == nullptr) { + perror(filename.c_str()); + return ""; + } + + fseek(fp, 0, SEEK_END); + long len = ftell(fp); + rewind(fp); + + string ret; + ret.resize(len); + fread(&ret[0], len, 1, fp); + fclose(fp); + return ret; +} + +} // namespace + +void VideoStream::start() +{ + AVFormatContext *avctx = avformat_alloc_context(); + avctx->oformat = av_guess_format("nut", nullptr, nullptr); + + uint8_t *buf = (uint8_t *)av_malloc(MUX_BUFFER_SIZE); + avctx->pb = avio_alloc_context(buf, MUX_BUFFER_SIZE, 1, this, nullptr, nullptr, nullptr); + avctx->pb->write_data_type = &VideoStream::write_packet2_thunk; + avctx->pb->ignore_boundary_point = 1; + + Mux::Codec video_codec = Mux::CODEC_MJPEG; + + avctx->flags = AVFMT_FLAG_CUSTOM_IO; + + string video_extradata; + + constexpr int width = 1280, height = 720; // Doesn't matter for MJPEG. + stream_mux.reset(new Mux(avctx, width, height, video_codec, video_extradata, /*audio_codec_parameters=*/nullptr, COARSE_TIMEBASE, + /*write_callback=*/nullptr, Mux::WRITE_FOREGROUND, {})); + + + encode_thread = thread(&VideoStream::encode_thread_func, this); +} + +void VideoStream::stop() +{ + encode_thread.join(); +} + +void VideoStream::schedule_original_frame(int64_t output_pts, unsigned stream_idx, int64_t input_pts) +{ + unique_lock lock(queue_lock); + QueuedFrame qf; + qf.output_pts = output_pts; + qf.stream_idx = stream_idx; + qf.input_first_pts = input_pts; + frame_queue.push_back(qf); + queue_nonempty.notify_all(); +} + +void VideoStream::encode_thread_func() +{ + for ( ;; ) { + QueuedFrame qf; + { + unique_lock lock(queue_lock); + queue_nonempty.wait(lock, [this]{ + return !frame_queue.empty(); + }); + qf = frame_queue.front(); + frame_queue.pop_front(); + } + + if (qf.type == QueuedFrame::ORIGINAL) { + string jpeg = read_file(filename_for_frame(qf.stream_idx, qf.input_first_pts)); + AVPacket pkt; + av_init_packet(&pkt); + pkt.stream_index = 0; + pkt.data = (uint8_t *)jpeg.data(); + pkt.size = jpeg.size(); + stream_mux->add_packet(pkt, qf.output_pts, qf.output_pts); + } + } +} + +int VideoStream::write_packet2_thunk(void *opaque, uint8_t *buf, int buf_size, AVIODataMarkerType type, int64_t time) +{ + VideoStream *video_stream = (VideoStream *)opaque; + return video_stream->write_packet2(buf, buf_size, type, time); +} + +int VideoStream::write_packet2(uint8_t *buf, int buf_size, AVIODataMarkerType type, int64_t time) +{ + if (type == AVIO_DATA_MARKER_SYNC_POINT || type == AVIO_DATA_MARKER_BOUNDARY_POINT) { + seen_sync_markers = true; + } else if (type == AVIO_DATA_MARKER_UNKNOWN && !seen_sync_markers) { + // We don't know if this is a keyframe or not (the muxer could + // avoid marking it), so we just have to make the best of it. + type = AVIO_DATA_MARKER_SYNC_POINT; + } + + if (type == AVIO_DATA_MARKER_HEADER) { + stream_mux_header.append((char *)buf, buf_size); + global_httpd->set_header(stream_mux_header); + } else { + global_httpd->add_data((char *)buf, buf_size, type == AVIO_DATA_MARKER_SYNC_POINT, time, AVRational{ AV_TIME_BASE, 1 }); + } + return buf_size; +} + diff --git a/video_stream.h b/video_stream.h new file mode 100644 index 0000000..3a8a2f2 --- /dev/null +++ b/video_stream.h @@ -0,0 +1,55 @@ +#ifndef _VIDEO_STREAM_H +#define _VIDEO_STREAM_H 1 + +#include +#include + +extern "C" { +#include +} + +#include +#include +#include +#include +#include + +class Mux; + +class VideoStream { +public: + void start(); + void stop(); + + void schedule_original_frame(int64_t output_pts, unsigned stream_idx, int64_t input_pts); + void schedule_interpolated_frame(int64_t output_pts, unsigned stream_idx, int64_t input_first_pts, int64_t input_second_pts, float alpha); + +private: + void encode_thread_func(); + std::thread encode_thread; + + static int write_packet2_thunk(void *opaque, uint8_t *buf, int buf_size, AVIODataMarkerType type, int64_t time); + int write_packet2(uint8_t *buf, int buf_size, AVIODataMarkerType type, int64_t time); + + struct QueuedFrame { + int64_t output_pts; + enum Type { ORIGINAL, INTERPOLATED } type; + unsigned stream_idx; + int64_t input_first_pts; // The only pts for original frames. + + // For interpolated frames only. + int64_t input_second_pts; + float alpha; + GLuint flow_tex; + GLuint fence; // Set when the flow is done computing. + }; + std::deque frame_queue; // Under . + std::mutex queue_lock; + std::condition_variable queue_nonempty; + + std::unique_ptr stream_mux; // To HTTP. + std::string stream_mux_header; + bool seen_sync_markers = false; +}; + +#endif // !defined(_VIDEO_STREAM_H) -- 2.39.2