{
}
-void DiskSpaceEstimator::report_write(const std::string &filename, uint64_t pts)
+void DiskSpaceEstimator::report_write(const std::string &filename, size_t bytes, uint64_t pts)
{
// Reject points that are out-of-order (happens with B-frames).
if (!measure_points.empty() && pts <= measure_points.back().pts) {
measure_points.pop_front();
}
- struct stat st;
- if (stat(filename.c_str(), &st) == -1) {
- perror(filename.c_str());
- return;
- }
-
- total_size += st.st_size;
+ total_size += bytes;
struct statfs fst;
if (statfs(filename.c_str(), &fst) == -1) {
typedef std::function<void(off_t free_bytes, double estimated_seconds_left)> callback_t;
DiskSpaceEstimator(callback_t callback);
- // Report that a video frame with the given pts has just been written
- // to the given file, so the estimator should stat the file and see
- // by big it is. (The file is assumed to hold only that single frame,
- // unlike in Nageru, where it is a growing file.)
+ // Report that a video frame with the given pts and size has just been
+ // written (possibly appended) to the given file.
//
- // If the filename changed since last time, the estimation is reset.
// <pts> is taken to be in TIMEBASE units (see timebase.h).
- void report_write(const std::string &filename, uint64_t pts);
+ void report_write(const std::string &filename, size_t bytes, uint64_t pts);
private:
static constexpr int64_t window_length = 30 * TIMEBASE;
--- /dev/null
+syntax = "proto3";
+
+// Used as header before each frame in a .frames file:
+//
+// 1. "Ftbifrm0" (8 bytes, ASCII -- note that no byte repeats)
+// 2. Length of upcoming FrameHeaderProto (uint32, binary, big endian)
+// 3. The FrameHeaderProto itself
+// 4. The actual frame
+
+message FrameHeaderProto {
+ int32 stream_idx = 1;
+ int64 pts = 2;
+ int64 file_size = 3; // In bytes of compressed frame.
+}
--- /dev/null
+#ifndef _FRAME_ON_DISK_H
+#define _FRAME_ON_DISK_H 1
+
+#include <mutex>
+#include <string>
+#include <vector>
+
+#include <stdint.h>
+
+#include "defs.h"
+
+extern std::mutex frame_mu;
+struct FrameOnDisk {
+ int64_t pts = -1; // -1 means empty.
+ off_t offset;
+ unsigned filename_idx;
+ uint32_t size; // Not using size_t saves a few bytes; we can have so many frames.
+};
+extern std::vector<FrameOnDisk> frames[MAX_STREAMS]; // Under frame_mu.
+extern std::vector<std::string> frame_filenames; // Under frame_mu.
+
+std::string read_frame(FrameOnDisk frame);
+
+#endif // !defined(_FRAME_ON_DISK_H)
namespace {
// Just an arbitrary order for std::map.
-struct JPEGIDLexicalOrder
+struct FrameOnDiskLexicalOrder
{
- bool operator() (const JPEGID &a, const JPEGID &b) const
+ bool operator() (const FrameOnDisk &a, const FrameOnDisk &b) const
{
- if (a.stream_idx != b.stream_idx)
- return a.stream_idx < b.stream_idx;
- return a.pts < b.pts;
+ if (a.pts != b.pts)
+ return a.pts < b.pts;
+ if (a.offset != b.offset)
+ return a.offset < b.offset;
+ if (a.filename_idx != b.filename_idx)
+ return a.filename_idx < b.filename_idx;
+ assert(a.size == b.size);
+ return false;
}
};
JPEGFrameView *destination;
// For actual decodes (only if frame below is nullptr).
- JPEGID primary, secondary;
+ FrameOnDisk primary, secondary;
float fade_alpha; // Irrelevant if secondary.stream_idx == -1.
// Already-decoded frames are also sent through PendingDecode,
thread JPEGFrameView::jpeg_decoder_thread;
mutex cache_mu;
-map<JPEGID, LRUFrame, JPEGIDLexicalOrder> cache; // Under cache_mu.
+map<FrameOnDisk, LRUFrame, FrameOnDiskLexicalOrder> cache; // Under cache_mu.
size_t cache_bytes_used = 0; // Under cache_mu.
condition_variable any_pending_decodes;
deque<PendingDecode> pending_decodes; // Under cache_mu.
}
}
-shared_ptr<Frame> decode_jpeg_with_cache(JPEGID id, CacheMissBehavior cache_miss_behavior, bool *did_decode)
+shared_ptr<Frame> decode_jpeg_with_cache(FrameOnDisk frame_spec, CacheMissBehavior cache_miss_behavior, bool *did_decode)
{
*did_decode = false;
{
unique_lock<mutex> lock(cache_mu);
- auto it = cache.find(id);
+ auto it = cache.find(frame_spec);
if (it != cache.end()) {
it->second.last_used = event_counter++;
return it->second.frame;
}
*did_decode = true;
- shared_ptr<Frame> frame = decode_jpeg(filename_for_frame(id.stream_idx, id.pts));
+ shared_ptr<Frame> frame = decode_jpeg(read_frame(frame_spec));
unique_lock<mutex> lock(cache_mu);
cache_bytes_used += frame_size(*frame);
- cache[id] = LRUFrame{ frame, event_counter++ };
+ cache[frame_spec] = LRUFrame{ frame, event_counter++ };
if (cache_bytes_used > size_t(CACHE_SIZE_MB) * 1024 * 1024) {
prune_cache();
shared_ptr<Frame> primary_frame, secondary_frame;
bool drop = false;
for (int subframe_idx = 0; subframe_idx < 2; ++subframe_idx) {
- const JPEGID &id = (subframe_idx == 0 ? decode.primary : decode.secondary);
- if (id.stream_idx == (unsigned)-1) {
+ const FrameOnDisk &frame_spec = (subframe_idx == 0 ? decode.primary : decode.secondary);
+ if (frame_spec.pts == -1) {
// No secondary frame.
continue;
}
bool found_in_cache;
- shared_ptr<Frame> frame = decode_jpeg_with_cache(id, cache_miss_behavior, &found_in_cache);
+ shared_ptr<Frame> frame = decode_jpeg_with_cache(frame_spec, cache_miss_behavior, &found_in_cache);
if (frame == nullptr) {
assert(cache_miss_behavior == RETURN_NULLPTR_IF_NOT_IN_CACHE);
{
}
-void JPEGFrameView::setFrame(unsigned stream_idx, int64_t pts, int secondary_stream_idx, int64_t secondary_pts, float fade_alpha)
+void JPEGFrameView::setFrame(unsigned stream_idx, FrameOnDisk frame, FrameOnDisk secondary_frame, float fade_alpha)
{
- if (secondary_stream_idx != -1) assert(secondary_pts != -1);
current_stream_idx = stream_idx; // TODO: Does this interact with fades?
unique_lock<mutex> lock(cache_mu);
PendingDecode decode;
- decode.primary = JPEGID{ stream_idx, pts };
- decode.secondary = JPEGID{ (unsigned)secondary_stream_idx, secondary_pts };
+ decode.primary = frame;
+ decode.secondary = secondary_frame;
decode.fade_alpha = fade_alpha;
decode.destination = this;
pending_decodes.push_back(decode);
#ifndef _JPEG_FRAME_VIEW_H
#define _JPEG_FRAME_VIEW_H 1
+#include "frame_on_disk.h"
#include "jpeg_frame.h"
#include "ycbcr_converter.h"
#include <stdint.h>
#include <thread>
-struct JPEGID {
- unsigned stream_idx;
- int64_t pts;
-};
enum CacheMissBehavior {
DECODE_IF_NOT_IN_CACHE,
RETURN_NULLPTR_IF_NOT_IN_CACHE
};
-std::string filename_for_frame(unsigned stream_idx, int64_t pts);
std::shared_ptr<Frame> decode_jpeg(const std::string &filename);
-std::shared_ptr<Frame> decode_jpeg_with_cache(JPEGID id, CacheMissBehavior cache_miss_behavior, bool *did_decode);
+std::shared_ptr<Frame> decode_jpeg_with_cache(FrameOnDisk id, CacheMissBehavior cache_miss_behavior, bool *did_decode);
class JPEGFrameView : public QGLWidget {
Q_OBJECT
public:
JPEGFrameView(QWidget *parent);
- void setFrame(unsigned stream_idx, int64_t pts, int secondary_stream_idx = -1, int64_t secondary_pts = -1, float fade_alpha = 0.0f);
+ void setFrame(unsigned stream_idx, FrameOnDisk frame, FrameOnDisk secondary_frame = {}, float fade_alpha = 0.0f);
void setFrame(std::shared_ptr<Frame> frame);
void mousePressEvent(QMouseEvent *event) override;
#include <assert.h>
+#include <arpa/inet.h>
#include <atomic>
#include <chrono>
#include <condition_variable>
#include "disk_space_estimator.h"
#include "ffmpeg_raii.h"
#include "flags.h"
+#include "frame_on_disk.h"
+#include "frame.pb.h"
#include "httpd.h"
#include "mainwindow.h"
#include "player.h"
#include "vaapi_jpeg_decoder.h"
#include <QApplication>
+#include <QGLFormat>
+#include <QSurfaceFormat>
#include <movit/init.h>
#include <movit/util.h>
using namespace std;
using namespace std::chrono;
+constexpr char frame_magic[] = "Ftbifrm0";
+constexpr size_t frame_magic_len = 8;
+
mutex RefCountedGLsync::fence_lock;
atomic<bool> should_quit{false};
// TODO: Replace by some sort of GUI control, I guess.
int64_t current_pts = 0;
-string filename_for_frame(unsigned stream_idx, int64_t pts)
+struct FrameFile {
+ FILE *fp = nullptr;
+ int filename_idx;
+ size_t frames_written_so_far = 0;
+};
+std::map<int, FrameFile> open_frame_files;
+
+mutex frame_mu;
+vector<FrameOnDisk> frames[MAX_STREAMS]; // Under frame_mu.
+vector<string> frame_filenames; // Under frame_mu.
+
+namespace {
+
+FrameOnDisk write_frame(int stream_idx, int64_t pts, const uint8_t *data, size_t size)
{
- char filename[256];
- snprintf(filename, sizeof(filename), "%s/frames/cam%d-pts%09ld.jpeg",
- global_flags.working_directory.c_str(), stream_idx, pts);
- return filename;
+ if (open_frame_files.count(stream_idx) == 0) {
+ char filename[256];
+ snprintf(filename, sizeof(filename), "%s/frames/cam%d-pts%09ld.frames",
+ global_flags.working_directory.c_str(), stream_idx, pts);
+ FILE *fp = fopen(filename, "wb");
+ if (fp == nullptr) {
+ perror(filename);
+ exit(1);
+ }
+
+ lock_guard<mutex> lock(frame_mu);
+ int filename_idx = frame_filenames.size();
+ frame_filenames.push_back(filename);
+ open_frame_files[stream_idx] = FrameFile{ fp, filename_idx, 0 };
+ }
+
+ FrameFile &file = open_frame_files[stream_idx];
+ string filename;
+ {
+ lock_guard<mutex> lock(frame_mu);
+ filename = frame_filenames[file.filename_idx];
+ }
+
+ FrameHeaderProto hdr;
+ hdr.set_stream_idx(stream_idx);
+ hdr.set_pts(pts);
+ hdr.set_file_size(size);
+
+ string serialized;
+ if (!hdr.SerializeToString(&serialized)) {
+ fprintf(stderr, "Frame header serialization failed.\n");
+ exit(1);
+ }
+ uint32_t len = htonl(serialized.size());
+
+ if (fwrite(frame_magic, frame_magic_len, 1, file.fp) != 1) {
+ perror("fwrite");
+ exit(1);
+ }
+ if (fwrite(&len, sizeof(len), 1, file.fp) != 1) {
+ perror("fwrite");
+ exit(1);
+ }
+ if (fwrite(serialized.data(), serialized.size(), 1, file.fp) != 1) {
+ perror("fwrite");
+ exit(1);
+ }
+ off_t offset = ftell(file.fp);
+ if (fwrite(data, size, 1, file.fp) != 1) {
+ perror("fwrite");
+ exit(1);
+ }
+ fflush(file.fp); // No fsync(), though. We can accept losing a few frames.
+ global_disk_space_estimator->report_write(filename, 8 + sizeof(len) + serialized.size() + size, pts);
+
+ if (++file.frames_written_so_far >= 1000) {
+ // Start a new file next time.
+ if (fclose(file.fp) != 0) {
+ perror("fclose");
+ exit(1);
+ }
+ open_frame_files.erase(stream_idx);
+
+ // TODO: Write to SQLite.
+ }
+
+ FrameOnDisk frame;
+ frame.pts = pts;
+ frame.filename_idx = file.filename_idx;
+ frame.offset = offset;
+ frame.size = size;
+
+ {
+ lock_guard<mutex> lock(frame_mu);
+ assert(stream_idx < MAX_STREAMS);
+ frames[stream_idx].push_back(frame);
+ }
+
+ return frame;
}
-mutex frame_mu;
-vector<int64_t> frames[MAX_STREAMS];
+} // namespace
+
HTTPD *global_httpd;
void load_existing_frames();
return ret;
}
+void load_frame_file(const char *filename, unsigned filename_idx)
+{
+ // TODO: Look up in the SQLite database.
+
+ FILE *fp = fopen(filename, "rb");
+ if (fp == nullptr) {
+ perror(filename);
+ exit(1);
+ }
+
+ size_t magic_offset = 0;
+ size_t skipped_bytes = 0;
+ while (!feof(fp) && !ferror(fp)) {
+ int ch = getc(fp);
+ if (ch == -1) {
+ break;
+ }
+ if (ch != frame_magic[magic_offset++]) {
+ skipped_bytes += magic_offset;
+ magic_offset = 0;
+ continue;
+ }
+ if (magic_offset < frame_magic_len) {
+ // Still reading the magic (hopefully).
+ continue;
+ }
+
+ // OK, found the magic. Try to parse the frame header.
+ magic_offset = 0;
+
+ if (skipped_bytes > 0) {
+ fprintf(stderr, "WARNING: %s: Skipped %zu garbage bytes in the middle.\n",
+ filename, skipped_bytes);
+ skipped_bytes = 0;
+ }
+
+ uint32_t len;
+ if (fread(&len, sizeof(len), 1, fp) != 1) {
+ fprintf(stderr, "WARNING: %s: Short read when getting length.\n", filename);
+ break;
+ }
+
+ string serialized;
+ serialized.resize(ntohl(len));
+ if (fread(&serialized[0], serialized.size(), 1, fp) != 1) {
+ fprintf(stderr, "WARNING: %s: Short read when reading frame header (%zu bytes).\n", filename, serialized.size());
+ break;
+ }
+
+ FrameHeaderProto hdr;
+ if (!hdr.ParseFromString(serialized)) {
+ fprintf(stderr, "WARNING: %s: Corrupted frame header.\n", filename);
+ continue;
+ }
+
+ FrameOnDisk frame;
+ frame.pts = hdr.pts();
+ frame.offset = ftell(fp);
+ frame.filename_idx = filename_idx;
+ frame.size = hdr.file_size();
+
+ if (fseek(fp, frame.offset + frame.size, SEEK_SET) == -1) {
+ fprintf(stderr, "WARNING: %s: Could not seek past frame (probably truncated).\n", filename);
+ continue;
+ }
+
+ if (hdr.stream_idx() >= 0 && hdr.stream_idx() < MAX_STREAMS) {
+ frames[hdr.stream_idx()].push_back(frame);
+ start_pts = max(start_pts, hdr.pts());
+ }
+ }
+
+ if (skipped_bytes > 0) {
+ fprintf(stderr, "WARNING: %s: Skipped %zu garbage bytes at the end.\n",
+ filename, skipped_bytes);
+ }
+}
+
void load_existing_frames()
{
string frame_dir = global_flags.working_directory + "/frames";
break;
}
- int stream_idx;
- int64_t pts;
- if (sscanf(de->d_name, "cam%d-pts%ld.jpeg", &stream_idx, &pts) == 2 &&
- stream_idx >= 0 && stream_idx < MAX_STREAMS) {
- frames[stream_idx].push_back(pts);
- start_pts = max(start_pts, pts);
+ if (de->d_type == DT_REG) {
+ string filename = frame_dir + "/" + de->d_name;
+ load_frame_file(filename.c_str(), frame_filenames.size());
+ frame_filenames.push_back(filename);
}
}
}
for (int stream_idx = 0; stream_idx < MAX_STREAMS; ++stream_idx) {
- sort(frames[stream_idx].begin(), frames[stream_idx].end());
+ sort(frames[stream_idx].begin(), frames[stream_idx].end(),
+ [](const auto &a, const auto &b) { return a.pts < b.pts; });
}
}
//fprintf(stderr, "Got a frame from camera %d, pts = %ld, size = %d\n",
// pkt.stream_index, pts, pkt.size);
- string filename = filename_for_frame(pkt.stream_index, pts);
- FILE *fp = fopen(filename.c_str(), "wb");
- if (fp == nullptr) {
- perror(filename.c_str());
- exit(1);
- }
- fwrite(pkt.data, pkt.size, 1, fp);
- fclose(fp);
-
- global_disk_space_estimator->report_write(filename, pts);
+ FrameOnDisk frame = write_frame(pkt.stream_index, pts, pkt.data, pkt.size);
- post_to_main_thread([pkt, pts] {
+ post_to_main_thread([pkt, frame] {
if (pkt.stream_index == 0) {
- global_mainwindow->ui->input1_display->setFrame(pkt.stream_index, pts);
+ global_mainwindow->ui->input1_display->setFrame(pkt.stream_index, frame);
} else if (pkt.stream_index == 1) {
- global_mainwindow->ui->input2_display->setFrame(pkt.stream_index, pts);
+ global_mainwindow->ui->input2_display->setFrame(pkt.stream_index, frame);
} else if (pkt.stream_index == 2) {
- global_mainwindow->ui->input3_display->setFrame(pkt.stream_index, pts);
+ global_mainwindow->ui->input3_display->setFrame(pkt.stream_index, frame);
} else if (pkt.stream_index == 3) {
- global_mainwindow->ui->input4_display->setFrame(pkt.stream_index, pts);
+ global_mainwindow->ui->input4_display->setFrame(pkt.stream_index, frame);
}
});
- assert(pkt.stream_index < MAX_STREAMS);
- frames[pkt.stream_index].push_back(pts);
-
if (last_pts != -1 && global_flags.slow_down_input) {
this_thread::sleep_for(microseconds((pts - last_pts) * 1000000 / TIMEBASE));
}
return 0;
}
+
+string read_frame(FrameOnDisk frame)
+{
+ string filename;
+ {
+ lock_guard<mutex> lock(frame_mu);
+ filename = frame_filenames[frame.filename_idx];
+ }
+
+ // TODO: cache the open file handles
+ FILE *fp = fopen(filename.c_str(), "rb");
+ if (fp == nullptr) {
+ perror(filename.c_str());
+ exit(1);
+ }
+ if (fseek(fp, frame.offset, SEEK_SET) == -1) {
+ perror("fseek");
+ exit(1);
+ }
+
+ string str;
+ str.resize(frame.size);
+ if (fread(&str[0], frame.size, 1, fp) != 1) {
+ perror("fread");
+ exit(1);
+ }
+
+ fclose(fp);
+ return str;
+}
#include "clip_list.h"
#include "disk_space_estimator.h"
#include "flags.h"
+#include "frame_on_disk.h"
#include "player.h"
#include "post_to_main_thread.h"
#include "timebase.h"
static PlayList *playlist_clips;
extern int64_t current_pts;
-extern mutex frame_mu;
-extern vector<int64_t> frames[MAX_STREAMS];
MainWindow::MainWindow()
: ui(new Ui::MainWindow),
lock_guard<mutex> lock(frame_mu);
if (frames[stream_idx].empty())
return;
- auto it = lower_bound(frames[stream_idx].begin(), frames[stream_idx].end(), pts);
+ auto it = lower_bound(frames[stream_idx].begin(), frames[stream_idx].end(), pts,
+ [](const FrameOnDisk &frame, int64_t pts) { return frame.pts < pts; });
if (it != frames[stream_idx].end()) {
- pts = *it;
+ pts = it->pts;
}
} else {
assert(rounding == FIRST_AT_OR_AFTER);
lock_guard<mutex> lock(frame_mu);
if (frames[stream_idx].empty())
return;
- auto it = upper_bound(frames[stream_idx].begin(), frames[stream_idx].end(), pts - 1);
+ auto it = upper_bound(frames[stream_idx].begin(), frames[stream_idx].end(), pts - 1,
+ [](int64_t pts, const FrameOnDisk &frame) { return pts < frame.pts; });
if (it != frames[stream_idx].end()) {
- pts = *it;
+ pts = it->pts;
}
}
gen = generator(protoc, \
output : ['@BASENAME@.pb.cc', '@BASENAME@.pb.h'],
arguments : ['--proto_path=@CURRENT_SOURCE_DIR@', '--cpp_out=@BUILD_DIR@', '@INPUT@'])
-proto_generated = gen.process('state.proto')
+proto_generated = gen.process('state.proto', 'frame.proto')
# Preprocess Qt as needed.
moc_files = qt5.preprocess(
#include "context.h"
#include "defs.h"
#include "ffmpeg_raii.h"
+#include "frame_on_disk.h"
#include "httpd.h"
#include "jpeg_frame_view.h"
#include "mux.h"
using namespace std;
using namespace std::chrono;
-extern mutex frame_mu;
-extern vector<int64_t> frames[MAX_STREAMS];
extern HTTPD *global_httpd;
void Player::thread_func(bool also_output_to_stream)
// Find the first frame such that frame.pts <= in_pts.
auto it = lower_bound(frames[stream_idx].begin(),
frames[stream_idx].end(),
- in_pts_origin);
+ in_pts_origin,
+ [](const FrameOnDisk &frame, int64_t pts) { return frame.pts < pts; });
if (it != frames[stream_idx].end()) {
- in_pts_origin = *it;
+ in_pts_origin = it->pts;
}
}
int64_t in_pts_for_progress = in_pts, in_pts_secondary_for_progress = -1;
int primary_stream_idx = stream_idx;
+ FrameOnDisk secondary_frame;
int secondary_stream_idx = -1;
- int64_t secondary_pts = -1;
- int64_t in_pts_secondary = -1;
float fade_alpha = 0.0f;
if (got_next_clip && time_left_this_clip <= next_clip_fade_time) {
secondary_stream_idx = next_clip.stream_idx;
- in_pts_secondary = lrint(next_clip.pts_in + (next_clip_fade_time - time_left_this_clip) * TIMEBASE * speed);
+ int64_t in_pts_secondary = lrint(next_clip.pts_in + (next_clip_fade_time - time_left_this_clip) * TIMEBASE * speed);
in_pts_secondary_for_progress = in_pts_secondary;
fade_alpha = 1.0f - time_left_this_clip / next_clip_fade_time;
fade_alpha = 1.0f - fade_alpha;
}
- int64_t in_pts_lower, in_pts_upper;
- bool ok = find_surrounding_frames(in_pts_secondary, secondary_stream_idx, &in_pts_lower, &in_pts_upper);
+ FrameOnDisk frame_lower, frame_upper;
+ bool ok = find_surrounding_frames(in_pts_secondary, secondary_stream_idx, &frame_lower, &frame_upper);
if (ok) {
- secondary_pts = in_pts_lower;
- } else {
- secondary_stream_idx = -1;
+ secondary_frame = frame_lower;
}
}
progress_callback(progress);
}
- int64_t in_pts_lower, in_pts_upper;
- bool ok = find_surrounding_frames(in_pts, primary_stream_idx, &in_pts_lower, &in_pts_upper);
+ FrameOnDisk frame_lower, frame_upper;
+ bool ok = find_surrounding_frames(in_pts, primary_stream_idx, &frame_lower, &frame_upper);
if (!ok) {
break;
}
}
}
- if (in_pts_lower == in_pts_upper) {
- auto display_func = [this, primary_stream_idx, in_pts_lower, secondary_stream_idx, secondary_pts, fade_alpha]{
- destination->setFrame(primary_stream_idx, in_pts_lower, secondary_stream_idx, secondary_pts, fade_alpha);
+ if (frame_lower.pts == frame_upper.pts) {
+ auto display_func = [this, primary_stream_idx, frame_lower, secondary_frame, fade_alpha]{
+ destination->setFrame(primary_stream_idx, frame_lower, secondary_frame, fade_alpha);
};
if (video_stream == nullptr) {
display_func();
if (secondary_stream_idx == -1) {
video_stream->schedule_original_frame(
next_frame_start, pts, display_func, QueueSpotHolder(this),
- primary_stream_idx, in_pts_lower);
+ frame_lower);
} else {
- assert(secondary_pts != -1);
+ assert(secondary_frame.pts != -1);
video_stream->schedule_faded_frame(next_frame_start, pts, display_func,
- QueueSpotHolder(this), primary_stream_idx, in_pts_lower,
- secondary_stream_idx, secondary_pts, fade_alpha);
+ QueueSpotHolder(this), frame_lower,
+ secondary_frame, fade_alpha);
}
}
continue;
// (ie., move less than 1% of an _output_ frame), do so.
// TODO: Snap secondary (fade-to) clips in the same fashion.
bool snapped = false;
- for (int64_t snap_pts : { in_pts_lower, in_pts_upper }) {
+ for (int64_t snap_pts : { frame_lower.pts, frame_upper.pts }) {
double snap_pts_as_frameno = (snap_pts - in_pts_origin) * output_framerate / TIMEBASE / speed;
if (fabs(snap_pts_as_frameno - frameno) < 0.01) {
- auto display_func = [this, primary_stream_idx, snap_pts, secondary_stream_idx, secondary_pts, fade_alpha]{
- destination->setFrame(primary_stream_idx, snap_pts, secondary_stream_idx, secondary_pts, fade_alpha);
+ FrameOnDisk snap_frame = frame_lower;
+ snap_frame.pts = snap_pts;
+ auto display_func = [this, primary_stream_idx, snap_frame, secondary_frame, fade_alpha]{
+ destination->setFrame(primary_stream_idx, snap_frame, secondary_frame, fade_alpha);
};
if (video_stream == nullptr) {
display_func();
if (secondary_stream_idx == -1) {
video_stream->schedule_original_frame(
next_frame_start, pts, display_func,
- QueueSpotHolder(this), primary_stream_idx, snap_pts);
+ QueueSpotHolder(this), snap_frame);
} else {
- assert(secondary_pts != -1);
+ assert(secondary_frame.pts != -1);
video_stream->schedule_faded_frame(
next_frame_start, pts, display_func, QueueSpotHolder(this),
- primary_stream_idx, snap_pts, secondary_stream_idx, secondary_pts, fade_alpha);
+ snap_frame, secondary_frame, fade_alpha);
}
}
in_pts_origin += snap_pts - in_pts;
continue;
}
- double alpha = double(in_pts - in_pts_lower) / (in_pts_upper - in_pts_lower);
+ double alpha = double(in_pts - frame_lower.pts) / (frame_upper.pts - frame_lower.pts);
if (video_stream == nullptr) {
// Previews don't do any interpolation.
assert(secondary_stream_idx == -1);
- destination->setFrame(primary_stream_idx, in_pts_lower);
+ destination->setFrame(primary_stream_idx, frame_lower);
} else {
auto display_func = [this](shared_ptr<Frame> frame) {
destination->setFrame(frame);
};
video_stream->schedule_interpolated_frame(
next_frame_start, pts, display_func, QueueSpotHolder(this),
- primary_stream_idx, in_pts_lower, in_pts_upper, alpha,
- secondary_stream_idx, secondary_pts, fade_alpha);
+ frame_lower, frame_upper, alpha,
+ secondary_frame, fade_alpha);
}
}
}
// Find the frame immediately before and after this point.
-bool Player::find_surrounding_frames(int64_t pts, int stream_idx, int64_t *pts_lower, int64_t *pts_upper)
+bool Player::find_surrounding_frames(int64_t pts, int stream_idx, FrameOnDisk *frame_lower, FrameOnDisk *frame_upper)
{
lock_guard<mutex> lock(frame_mu);
// Find the first frame such that frame.pts >= pts.
auto it = lower_bound(frames[stream_idx].begin(),
frames[stream_idx].end(),
- pts);
+ pts,
+ [](const FrameOnDisk &frame, int64_t pts) { return frame.pts < pts; });
if (it == frames[stream_idx].end()) {
return false;
}
- *pts_upper = *it;
+ *frame_upper = *it;
// Find the last frame such that in_pts <= frame.pts (if any).
if (it == frames[stream_idx].begin()) {
- *pts_lower = *it;
+ *frame_lower = *it;
} else {
- *pts_lower = *(it - 1);
+ *frame_lower = *(it - 1);
}
- assert(pts >= *pts_lower);
- assert(pts <= *pts_upper);
+ assert(pts >= frame_lower->pts);
+ assert(pts <= frame_upper->pts);
return true;
}
}
lock_guard<mutex> lock(frame_mu);
- auto it = upper_bound(frames[stream_idx].begin(), frames[stream_idx].end(), pts_out);
+ auto it = upper_bound(frames[stream_idx].begin(), frames[stream_idx].end(), pts_out,
+ [](int64_t pts, const FrameOnDisk &frame) { return pts < frame.pts; });
if (it == frames[stream_idx].end()) {
return;
}
#define _PLAYER_H 1
#include "clip_list.h"
+#include "frame_on_disk.h"
#include "queue_spot_holder.h"
extern "C" {
// Find the frame immediately before and after this point.
// Returns false if pts is after the last frame.
- bool find_surrounding_frames(int64_t pts, int stream_idx, int64_t *pts_lower, int64_t *pts_upper);
+ bool find_surrounding_frames(int64_t pts, int stream_idx, FrameOnDisk *frame_lower, FrameOnDisk *frame_upper);
JPEGFrameView *destination;
done_callback_func done_callback;
VABufferID buf;
};
-shared_ptr<Frame> decode_jpeg_vaapi(const string &filename)
+shared_ptr<Frame> decode_jpeg_vaapi(const string &jpeg)
{
jpeg_decompress_struct dinfo;
jpeg_error_mgr jerr;
jpeg_create_decompress(&dinfo);
JPEGDestroyer destroy_dinfo(&dinfo);
- FILE *fp = fopen(filename.c_str(), "rb");
- if (fp == nullptr) {
- perror(filename.c_str());
- exit(1);
- }
- jpeg_stdio_src(&dinfo, fp);
-
+ jpeg_mem_src(&dinfo, reinterpret_cast<const unsigned char *>(jpeg.data()), jpeg.size());
jpeg_read_header(&dinfo, true);
- // Read the data that comes after the header. VA-API will destuff and all for us.
- std::string str((const char *)dinfo.src->next_input_byte, dinfo.src->bytes_in_buffer);
- while (!feof(fp)) {
- char buf[4096];
- size_t ret = fread(buf, 1, sizeof(buf), fp);
- str.append(buf, ret);
- }
- fclose(fp);
-
if (dinfo.num_components != 3) {
fprintf(stderr, "Not a color JPEG. (%d components, Y=%dx%d, Cb=%dx%d, Cr=%dx%d)\n",
dinfo.num_components,
// Slice parameters (metadata about the slice).
VASliceParameterBufferJPEGBaseline parms;
memset(&parms, 0, sizeof(parms));
- parms.slice_data_size = str.size();
+ parms.slice_data_size = dinfo.src->bytes_in_buffer;
parms.slice_data_offset = 0;
parms.slice_data_flag = VA_SLICE_DATA_FLAG_ALL;
parms.slice_horizontal_position = 0;
CHECK_VASTATUS_RET(va_status, "vaCreateBuffer");
VABufferDestroyer destroy_slice_param(va_dpy->va_dpy, slice_param_buffer);
- // The actual data.
+ // The actual data. VA-API will destuff and all for us.
VABufferID data_buffer;
- va_status = vaCreateBuffer(va_dpy->va_dpy, config_id, VASliceDataBufferType, str.size(), 1, &str[0], &data_buffer);
+ va_status = vaCreateBuffer(va_dpy->va_dpy, config_id, VASliceDataBufferType, dinfo.src->bytes_in_buffer, 1, const_cast<unsigned char *>(dinfo.src->next_input_byte), &data_buffer);
CHECK_VASTATUS_RET(va_status, "vaCreateBuffer");
VABufferDestroyer destroy_data(va_dpy->va_dpy, data_buffer);
std::string get_usable_va_display();
void init_jpeg_vaapi();
-std::shared_ptr<Frame> decode_jpeg_vaapi(const std::string &filename);
+std::shared_ptr<Frame> decode_jpeg_vaapi(const std::string &jpeg);
extern bool vaapi_jpeg_decoding_usable;
extern HTTPD *global_httpd;
-namespace {
-
-string read_file(const string &filename)
-{
- FILE *fp = fopen(filename.c_str(), "rb");
- if (fp == nullptr) {
- perror(filename.c_str());
- return "";
- }
-
- fseek(fp, 0, SEEK_END);
- long len = ftell(fp);
- rewind(fp);
-
- string ret;
- ret.resize(len);
- fread(&ret[0], len, 1, fp);
- fclose(fp);
- return ret;
-}
-
-} // namespace
-
struct VectorDestinationManager {
jpeg_destination_mgr pub;
std::vector<uint8_t> dest;
void VideoStream::schedule_original_frame(steady_clock::time_point local_pts,
int64_t output_pts, function<void()> &&display_func,
QueueSpotHolder &&queue_spot_holder,
- unsigned stream_idx, int64_t input_pts)
+ FrameOnDisk frame)
{
- fprintf(stderr, "output_pts=%ld original input_pts=%ld\n", output_pts, input_pts);
+ fprintf(stderr, "output_pts=%ld original input_pts=%ld\n", output_pts, frame.pts);
// Preload the file from disk, so that the encoder thread does not get stalled.
// TODO: Consider sending it through the queue instead.
- (void)read_file(filename_for_frame(stream_idx, input_pts));
+ (void)read_frame(frame);
QueuedFrame qf;
qf.local_pts = local_pts;
qf.type = QueuedFrame::ORIGINAL;
qf.output_pts = output_pts;
- qf.stream_idx = stream_idx;
- qf.input_first_pts = input_pts;
+ qf.frame1 = frame;
qf.display_func = move(display_func);
qf.queue_spot_holder = move(queue_spot_holder);
void VideoStream::schedule_faded_frame(steady_clock::time_point local_pts, int64_t output_pts,
function<void()> &&display_func,
QueueSpotHolder &&queue_spot_holder,
- unsigned stream_idx, int64_t input_pts, int secondary_stream_idx,
- int64_t secondary_input_pts, float fade_alpha)
+ FrameOnDisk frame1_spec, FrameOnDisk frame2_spec,
+ float fade_alpha)
{
- fprintf(stderr, "output_pts=%ld faded input_pts=%ld,%ld fade_alpha=%.2f\n", output_pts, input_pts, secondary_input_pts, fade_alpha);
+ fprintf(stderr, "output_pts=%ld faded input_pts=%ld,%ld fade_alpha=%.2f\n", output_pts, frame1_spec.pts, frame2_spec.pts, fade_alpha);
// Get the temporary OpenGL resources we need for doing the fade.
// (We share these with interpolated frames, which is slightly
bool did_decode;
- JPEGID jpeg_id1;
- jpeg_id1.stream_idx = stream_idx;
- jpeg_id1.pts = input_pts;
- shared_ptr<Frame> frame1 = decode_jpeg_with_cache(jpeg_id1, DECODE_IF_NOT_IN_CACHE, &did_decode);
-
- JPEGID jpeg_id2;
- jpeg_id2.stream_idx = secondary_stream_idx;
- jpeg_id2.pts = secondary_input_pts;
- shared_ptr<Frame> frame2 = decode_jpeg_with_cache(jpeg_id2, DECODE_IF_NOT_IN_CACHE, &did_decode);
+ shared_ptr<Frame> frame1 = decode_jpeg_with_cache(frame1_spec, DECODE_IF_NOT_IN_CACHE, &did_decode);
+ shared_ptr<Frame> frame2 = decode_jpeg_with_cache(frame2_spec, DECODE_IF_NOT_IN_CACHE, &did_decode);
ycbcr_semiplanar_converter->prepare_chain_for_fade(frame1, frame2, fade_alpha)->render_to_fbo(resources->fade_fbo, 1280, 720);
qf.local_pts = local_pts;
qf.type = QueuedFrame::FADED;
qf.output_pts = output_pts;
- qf.stream_idx = stream_idx;
- qf.input_first_pts = input_pts;
+ qf.frame1 = frame1_spec;
qf.display_func = move(display_func);
qf.queue_spot_holder = move(queue_spot_holder);
- qf.secondary_stream_idx = secondary_stream_idx;
- qf.secondary_input_pts = secondary_input_pts;
+ qf.secondary_frame = frame2_spec;
// Subsample and split Cb/Cr.
chroma_subsampler->subsample_chroma(resources->fade_cbcr_output_tex, 1280, 720, resources->cb_tex, resources->cr_tex);
void VideoStream::schedule_interpolated_frame(steady_clock::time_point local_pts,
int64_t output_pts, function<void(shared_ptr<Frame>)> &&display_func,
QueueSpotHolder &&queue_spot_holder,
- unsigned stream_idx, int64_t input_first_pts,
- int64_t input_second_pts, float alpha,
- int secondary_stream_idx, int64_t secondary_input_pts,
- float fade_alpha)
+ FrameOnDisk frame1, FrameOnDisk frame2,
+ float alpha, FrameOnDisk secondary_frame, float fade_alpha)
{
- if (secondary_stream_idx != -1) {
- fprintf(stderr, "output_pts=%ld interpolated input_pts1=%ld input_pts2=%ld alpha=%.3f secondary_pts=%ld fade_alpha=%.2f\n", output_pts, input_first_pts, input_second_pts, alpha, secondary_input_pts, fade_alpha);
+ if (secondary_frame.pts != -1) {
+ fprintf(stderr, "output_pts=%ld interpolated input_pts1=%ld input_pts2=%ld alpha=%.3f secondary_pts=%ld fade_alpha=%.2f\n", output_pts, frame1.pts, frame2.pts, alpha, secondary_frame.pts, fade_alpha);
} else {
- fprintf(stderr, "output_pts=%ld interpolated input_pts1=%ld input_pts2=%ld alpha=%.3f\n", output_pts, input_first_pts, input_second_pts, alpha);
+ fprintf(stderr, "output_pts=%ld interpolated input_pts1=%ld input_pts2=%ld alpha=%.3f\n", output_pts, frame1.pts, frame2.pts, alpha);
}
// Get the temporary OpenGL resources we need for doing the interpolation.
}
QueuedFrame qf;
- qf.type = (secondary_stream_idx == -1) ? QueuedFrame::INTERPOLATED : QueuedFrame::FADED_INTERPOLATED;
+ qf.type = (secondary_frame.pts == -1) ? QueuedFrame::INTERPOLATED : QueuedFrame::FADED_INTERPOLATED;
qf.output_pts = output_pts;
- qf.stream_idx = stream_idx;
qf.display_decoded_func = move(display_func);
qf.queue_spot_holder = move(queue_spot_holder);
qf.local_pts = local_pts;
// Convert frame0 and frame1 to OpenGL textures.
for (size_t frame_no = 0; frame_no < 2; ++frame_no) {
- JPEGID jpeg_id;
- jpeg_id.stream_idx = stream_idx;
- jpeg_id.pts = frame_no == 1 ? input_second_pts : input_first_pts;
+ FrameOnDisk frame_spec = frame_no == 1 ? frame2 : frame1;
bool did_decode;
- shared_ptr<Frame> frame = decode_jpeg_with_cache(jpeg_id, DECODE_IF_NOT_IN_CACHE, &did_decode);
+ shared_ptr<Frame> frame = decode_jpeg_with_cache(frame_spec, DECODE_IF_NOT_IN_CACHE, &did_decode);
ycbcr_converter->prepare_chain_for_conversion(frame)->render_to_fbo(resources->input_fbos[frame_no], 1280, 720);
}
qf.flow_tex = compute_flow->exec(resources->gray_tex, DISComputeFlow::FORWARD_AND_BACKWARD, DISComputeFlow::DO_NOT_RESIZE_FLOW);
check_error();
- if (secondary_stream_idx != -1) {
+ if (secondary_frame.pts != -1) {
// Fade. First kick off the interpolation.
tie(qf.output_tex, ignore) = interpolate_no_split->exec(resources->input_tex, resources->gray_tex, qf.flow_tex, 1280, 720, alpha);
check_error();
// Now decode the image we are fading against.
- JPEGID jpeg_id;
- jpeg_id.stream_idx = secondary_stream_idx;
- jpeg_id.pts = secondary_input_pts;
bool did_decode;
- shared_ptr<Frame> frame2 = decode_jpeg_with_cache(jpeg_id, DECODE_IF_NOT_IN_CACHE, &did_decode);
+ shared_ptr<Frame> frame2 = decode_jpeg_with_cache(secondary_frame, DECODE_IF_NOT_IN_CACHE, &did_decode);
// Then fade against it, putting it into the fade Y' and CbCr textures.
ycbcr_semiplanar_converter->prepare_chain_for_fade_from_texture(qf.output_tex, frame2, fade_alpha)->render_to_fbo(resources->fade_fbo, 1280, 720);
glPixelStorei(GL_PACK_ROW_LENGTH, 0);
glBindBuffer(GL_PIXEL_PACK_BUFFER, resources->pbo);
check_error();
- if (secondary_stream_idx != -1) {
+ if (secondary_frame.pts != -1) {
glGetTextureImage(resources->fade_y_output_tex, 0, GL_RED, GL_UNSIGNED_BYTE, 1280 * 720 * 4, BUFFER_OFFSET(0));
} else {
glGetTextureImage(qf.output_tex, 0, GL_RED, GL_UNSIGNED_BYTE, 1280 * 720 * 4, BUFFER_OFFSET(0));
if (qf.type == QueuedFrame::ORIGINAL) {
// Send the JPEG frame on, unchanged.
- string jpeg = read_file(filename_for_frame(qf.stream_idx, qf.input_first_pts));
+ string jpeg = read_frame(qf.frame1);
AVPacket pkt;
av_init_packet(&pkt);
pkt.stream_index = 0;
#include <libavformat/avio.h>
}
+#include "frame_on_disk.h"
#include "jpeg_frame_view.h"
#include "ref_counted_gl_sync.h"
#include "queue_spot_holder.h"
void schedule_original_frame(std::chrono::steady_clock::time_point,
int64_t output_pts, std::function<void()> &&display_func,
QueueSpotHolder &&queue_spot_holder,
- unsigned stream_idx, int64_t input_pts);
+ FrameOnDisk frame);
void schedule_faded_frame(std::chrono::steady_clock::time_point, int64_t output_pts,
std::function<void()> &&display_func,
QueueSpotHolder &&queue_spot_holder,
- unsigned stream_idx, int64_t input_pts, int secondary_stream_idx,
- int64_t secondary_input_pts, float fade_alpha);
+ FrameOnDisk frame1, FrameOnDisk frame2,
+ float fade_alpha);
void schedule_interpolated_frame(std::chrono::steady_clock::time_point, int64_t output_pts,
std::function<void(std::shared_ptr<Frame>)> &&display_func,
QueueSpotHolder &&queue_spot_holder,
- unsigned stream_idx, int64_t input_first_pts, int64_t input_second_pts,
- float alpha, int secondary_stream_idx = -1, int64_t secondary_inputs_pts = -1,
- float fade_alpha = 0.0f); // -1 = no secondary frame.
+ FrameOnDisk frame1, FrameOnDisk frame2,
+ float alpha, FrameOnDisk secondary_frame = {}, // Empty = no secondary (fade) frame.
+ float fade_alpha = 0.0f);
void schedule_refresh_frame(std::chrono::steady_clock::time_point, int64_t output_pts,
std::function<void()> &&display_func,
QueueSpotHolder &&queue_spot_holder);
int64_t output_pts;
enum Type { ORIGINAL, FADED, INTERPOLATED, FADED_INTERPOLATED, REFRESH } type;
- unsigned stream_idx;
- int64_t input_first_pts; // The only pts for original frames.
+ FrameOnDisk frame1; // The only frame for original frames.
// For fades only (including fades against interpolated frames).
- int secondary_stream_idx = -1;
- int64_t secondary_input_pts;
+ FrameOnDisk secondary_frame;
// For interpolated frames only.
- int64_t input_second_pts;
+ FrameOnDisk frame2;
float alpha;
BorrowedInterpolatedFrameResources resources;
RefCountedGLsync fence; // Set when the interpolated image is read back to the CPU.
GLuint flow_tex, output_tex, cbcr_tex; // Released in the receiving thread; not really used for anything else.
- JPEGID id;
+ FrameOnDisk id;
std::function<void()> display_func; // Called when the image is done decoding.
std::function<void(std::shared_ptr<Frame>)> display_decoded_func; // Same, except for INTERPOLATED and FADED_INTERPOLATED.