OBJS += $(OBJS_WITH_MOC)
OBJS += $(OBJS_WITH_MOC:.o=.moc.o)
-OBJS += ffmpeg_raii.o main.o player.o httpd.o mux.o metacube2.o
+OBJS += ffmpeg_raii.o main.o player.o httpd.o mux.o metacube2.o video_stream.o
%.o: %.cpp
$(CXX) -MMD -MP $(CPPFLAGS) $(CXXFLAGS) -o $@ -c $<
#include <stdio.h>
-extern "C" {
-#include <libavformat/avformat.h>
-#include <libavformat/avio.h>
-}
-
#include "clip_list.h"
#include "defs.h"
#include "ffmpeg_raii.h"
extern vector<int64_t> frames[MAX_STREAMS];
extern HTTPD *global_httpd;
-namespace {
-
-string read_file(const string &filename)
-{
- FILE *fp = fopen(filename.c_str(), "rb");
- if (fp == nullptr) {
- perror(filename.c_str());
- return "";
- }
-
- fseek(fp, 0, SEEK_END);
- long len = ftell(fp);
- rewind(fp);
-
- string ret;
- ret.resize(len);
- fread(&ret[0], len, 1, fp);
- fclose(fp);
- return ret;
-}
-
-} // namespace
-
void Player::thread_func()
{
for ( ;; ) {
// FIXME: Vaguely less crazy pts, perhaps.
double pts_float = fmod(duration<double>(next_frame_start.time_since_epoch()).count(), 86400.0f);
int64_t pts = lrint(pts_float * TIMEBASE);
- string jpeg = read_file(filename_for_frame(stream_idx, next_pts));
- AVPacket pkt;
- av_init_packet(&pkt);
- pkt.stream_index = 0;
- pkt.data = (uint8_t *)jpeg.data();
- pkt.size = jpeg.size();
- stream_mux->add_packet(pkt, pts, pts);
+ video_stream.schedule_original_frame(pts, stream_idx, next_pts);
}
{
Player::Player(JPEGFrameView *destination)
: destination(destination)
{
- open_output_stream();
+ video_stream.start();
thread(&Player::thread_func, this).detach();
}
}
destination->setFrame(stream_idx, *it);
}
-
-void Player::open_output_stream()
-{
- AVFormatContext *avctx = avformat_alloc_context();
- avctx->oformat = av_guess_format("nut", nullptr, nullptr);
-
- uint8_t *buf = (uint8_t *)av_malloc(MUX_BUFFER_SIZE);
- avctx->pb = avio_alloc_context(buf, MUX_BUFFER_SIZE, 1, this, nullptr, nullptr, nullptr);
- avctx->pb->write_data_type = &Player::write_packet2_thunk;
- avctx->pb->ignore_boundary_point = 1;
-
- Mux::Codec video_codec = Mux::CODEC_MJPEG;
-
- avctx->flags = AVFMT_FLAG_CUSTOM_IO;
-
- string video_extradata;
-
- constexpr int width = 1280, height = 720; // Doesn't matter for MJPEG.
- stream_mux.reset(new Mux(avctx, width, height, video_codec, video_extradata, /*audio_codec_parameters=*/nullptr, COARSE_TIMEBASE,
- /*write_callback=*/nullptr, Mux::WRITE_FOREGROUND, {}));
-}
-
-int Player::write_packet2_thunk(void *opaque, uint8_t *buf, int buf_size, AVIODataMarkerType type, int64_t time)
-{
- Player *player = (Player *)opaque;
- return player->write_packet2(buf, buf_size, type, time);
-}
-
-int Player::write_packet2(uint8_t *buf, int buf_size, AVIODataMarkerType type, int64_t time)
-{
- if (type == AVIO_DATA_MARKER_SYNC_POINT || type == AVIO_DATA_MARKER_BOUNDARY_POINT) {
- seen_sync_markers = true;
- } else if (type == AVIO_DATA_MARKER_UNKNOWN && !seen_sync_markers) {
- // We don't know if this is a keyframe or not (the muxer could
- // avoid marking it), so we just have to make the best of it.
- type = AVIO_DATA_MARKER_SYNC_POINT;
- }
-
- if (type == AVIO_DATA_MARKER_HEADER) {
- stream_mux_header.append((char *)buf, buf_size);
- global_httpd->set_header(stream_mux_header);
- } else {
- global_httpd->add_data((char *)buf, buf_size, type == AVIO_DATA_MARKER_SYNC_POINT, time, AVRational{ AV_TIME_BASE, 1 });
- }
- return buf_size;
-}
#define _PLAYER_H 1
#include "clip_list.h"
+#include "video_stream.h"
#include <condition_variable>
#include <functional>
#include <mutex>
-extern "C" {
-#include <libavformat/avio.h>
-}
-
class JPEGFrameView;
-class Mux;
class Player {
public:
bool playing = false; // Under queue_state_mu.
int override_stream_idx = -1; // Under queue_state_mu.
- // For streaming.
- std::unique_ptr<Mux> stream_mux; // To HTTP.
- std::string stream_mux_header;
- bool seen_sync_markers = false;
+ VideoStream video_stream;
};
#endif // !defined(_PLAYER_H)
--- /dev/null
+#include "video_stream.h"
+
+extern "C" {
+#include <libavformat/avformat.h>
+#include <libavformat/avio.h>
+}
+
+#include "httpd.h"
+#include "jpeg_frame_view.h"
+#include "mux.h"
+#include "player.h"
+
+using namespace std;
+
+extern HTTPD *global_httpd;
+
+namespace {
+
+string read_file(const string &filename)
+{
+ FILE *fp = fopen(filename.c_str(), "rb");
+ if (fp == nullptr) {
+ perror(filename.c_str());
+ return "";
+ }
+
+ fseek(fp, 0, SEEK_END);
+ long len = ftell(fp);
+ rewind(fp);
+
+ string ret;
+ ret.resize(len);
+ fread(&ret[0], len, 1, fp);
+ fclose(fp);
+ return ret;
+}
+
+} // namespace
+
+void VideoStream::start()
+{
+ AVFormatContext *avctx = avformat_alloc_context();
+ avctx->oformat = av_guess_format("nut", nullptr, nullptr);
+
+ uint8_t *buf = (uint8_t *)av_malloc(MUX_BUFFER_SIZE);
+ avctx->pb = avio_alloc_context(buf, MUX_BUFFER_SIZE, 1, this, nullptr, nullptr, nullptr);
+ avctx->pb->write_data_type = &VideoStream::write_packet2_thunk;
+ avctx->pb->ignore_boundary_point = 1;
+
+ Mux::Codec video_codec = Mux::CODEC_MJPEG;
+
+ avctx->flags = AVFMT_FLAG_CUSTOM_IO;
+
+ string video_extradata;
+
+ constexpr int width = 1280, height = 720; // Doesn't matter for MJPEG.
+ stream_mux.reset(new Mux(avctx, width, height, video_codec, video_extradata, /*audio_codec_parameters=*/nullptr, COARSE_TIMEBASE,
+ /*write_callback=*/nullptr, Mux::WRITE_FOREGROUND, {}));
+
+
+ encode_thread = thread(&VideoStream::encode_thread_func, this);
+}
+
+void VideoStream::stop()
+{
+ encode_thread.join();
+}
+
+void VideoStream::schedule_original_frame(int64_t output_pts, unsigned stream_idx, int64_t input_pts)
+{
+ unique_lock<mutex> lock(queue_lock);
+ QueuedFrame qf;
+ qf.output_pts = output_pts;
+ qf.stream_idx = stream_idx;
+ qf.input_first_pts = input_pts;
+ frame_queue.push_back(qf);
+ queue_nonempty.notify_all();
+}
+
+void VideoStream::encode_thread_func()
+{
+ for ( ;; ) {
+ QueuedFrame qf;
+ {
+ unique_lock<mutex> lock(queue_lock);
+ queue_nonempty.wait(lock, [this]{
+ return !frame_queue.empty();
+ });
+ qf = frame_queue.front();
+ frame_queue.pop_front();
+ }
+
+ if (qf.type == QueuedFrame::ORIGINAL) {
+ string jpeg = read_file(filename_for_frame(qf.stream_idx, qf.input_first_pts));
+ AVPacket pkt;
+ av_init_packet(&pkt);
+ pkt.stream_index = 0;
+ pkt.data = (uint8_t *)jpeg.data();
+ pkt.size = jpeg.size();
+ stream_mux->add_packet(pkt, qf.output_pts, qf.output_pts);
+ }
+ }
+}
+
+int VideoStream::write_packet2_thunk(void *opaque, uint8_t *buf, int buf_size, AVIODataMarkerType type, int64_t time)
+{
+ VideoStream *video_stream = (VideoStream *)opaque;
+ return video_stream->write_packet2(buf, buf_size, type, time);
+}
+
+int VideoStream::write_packet2(uint8_t *buf, int buf_size, AVIODataMarkerType type, int64_t time)
+{
+ if (type == AVIO_DATA_MARKER_SYNC_POINT || type == AVIO_DATA_MARKER_BOUNDARY_POINT) {
+ seen_sync_markers = true;
+ } else if (type == AVIO_DATA_MARKER_UNKNOWN && !seen_sync_markers) {
+ // We don't know if this is a keyframe or not (the muxer could
+ // avoid marking it), so we just have to make the best of it.
+ type = AVIO_DATA_MARKER_SYNC_POINT;
+ }
+
+ if (type == AVIO_DATA_MARKER_HEADER) {
+ stream_mux_header.append((char *)buf, buf_size);
+ global_httpd->set_header(stream_mux_header);
+ } else {
+ global_httpd->add_data((char *)buf, buf_size, type == AVIO_DATA_MARKER_SYNC_POINT, time, AVRational{ AV_TIME_BASE, 1 });
+ }
+ return buf_size;
+}
+
--- /dev/null
+#ifndef _VIDEO_STREAM_H
+#define _VIDEO_STREAM_H 1
+
+#include <stdint.h>
+#include <epoxy/gl.h>
+
+extern "C" {
+#include <libavformat/avio.h>
+}
+
+#include <condition_variable>
+#include <deque>
+#include <mutex>
+#include <string>
+#include <thread>
+
+class Mux;
+
+class VideoStream {
+public:
+ void start();
+ void stop();
+
+ void schedule_original_frame(int64_t output_pts, unsigned stream_idx, int64_t input_pts);
+ void schedule_interpolated_frame(int64_t output_pts, unsigned stream_idx, int64_t input_first_pts, int64_t input_second_pts, float alpha);
+
+private:
+ void encode_thread_func();
+ std::thread encode_thread;
+
+ static int write_packet2_thunk(void *opaque, uint8_t *buf, int buf_size, AVIODataMarkerType type, int64_t time);
+ int write_packet2(uint8_t *buf, int buf_size, AVIODataMarkerType type, int64_t time);
+
+ struct QueuedFrame {
+ int64_t output_pts;
+ enum Type { ORIGINAL, INTERPOLATED } type;
+ unsigned stream_idx;
+ int64_t input_first_pts; // The only pts for original frames.
+
+ // For interpolated frames only.
+ int64_t input_second_pts;
+ float alpha;
+ GLuint flow_tex;
+ GLuint fence; // Set when the flow is done computing.
+ };
+ std::deque<QueuedFrame> frame_queue; // Under <queue_lock>.
+ std::mutex queue_lock;
+ std::condition_variable queue_nonempty;
+
+ std::unique_ptr<Mux> stream_mux; // To HTTP.
+ std::string stream_mux_header;
+ bool seen_sync_markers = false;
+};
+
+#endif // !defined(_VIDEO_STREAM_H)