]> git.sesse.net Git - nageru/commitdiff
Change from file-per-frame to multiple files per frame.
authorSteinar H. Gunderson <sgunderson@bigfoot.com>
Thu, 22 Nov 2018 23:48:15 +0000 (00:48 +0100)
committerSteinar H. Gunderson <sgunderson@bigfoot.com>
Sun, 25 Nov 2018 22:36:26 +0000 (23:36 +0100)
This puts a lot less strain on the file system, and should hopefully
also make for much more sequential I/O, which is important when using
non-SSDs.

15 files changed:
disk_space_estimator.cpp
disk_space_estimator.h
frame.proto [new file with mode: 0644]
frame_on_disk.h [new file with mode: 0644]
jpeg_frame_view.cpp
jpeg_frame_view.h
main.cpp
mainwindow.cpp
meson.build
player.cpp
player.h
vaapi_jpeg_decoder.cpp
vaapi_jpeg_decoder.h
video_stream.cpp
video_stream.h

index 5ce507020d58b6918049757f6d533cce02b7a069..69de4d5578d24877664ff45ddb70b5972911e3fe 100644 (file)
@@ -12,7 +12,7 @@ DiskSpaceEstimator::DiskSpaceEstimator(DiskSpaceEstimator::callback_t callback)
 {
 }
 
-void DiskSpaceEstimator::report_write(const std::string &filename, uint64_t pts)
+void DiskSpaceEstimator::report_write(const std::string &filename, size_t bytes, uint64_t pts)
 {
        // Reject points that are out-of-order (happens with B-frames).
        if (!measure_points.empty() && pts <= measure_points.back().pts) {
@@ -24,13 +24,7 @@ void DiskSpaceEstimator::report_write(const std::string &filename, uint64_t pts)
                measure_points.pop_front();
        }
 
-       struct stat st;
-       if (stat(filename.c_str(), &st) == -1) {
-               perror(filename.c_str());
-               return;
-       }
-
-       total_size += st.st_size;
+       total_size += bytes;
 
        struct statfs fst;
        if (statfs(filename.c_str(), &fst) == -1) {
index a86ee2e486a799a8de3530488265aa89693d6bdb..f02cb9c616f804003818e522ff4aa001eb72ff8c 100644 (file)
@@ -22,14 +22,11 @@ public:
        typedef std::function<void(off_t free_bytes, double estimated_seconds_left)> callback_t;
        DiskSpaceEstimator(callback_t callback);
 
-       // Report that a video frame with the given pts has just been written
-       // to the given file, so the estimator should stat the file and see
-       // by big it is. (The file is assumed to hold only that single frame,
-       // unlike in Nageru, where it is a growing file.)
+       // Report that a video frame with the given pts and size has just been
+       // written (possibly appended) to the given file.
        //
-       // If the filename changed since last time, the estimation is reset.
        // <pts> is taken to be in TIMEBASE units (see timebase.h).
-       void report_write(const std::string &filename, uint64_t pts);
+       void report_write(const std::string &filename, size_t bytes, uint64_t pts);
 
 private:
        static constexpr int64_t window_length = 30 * TIMEBASE;
diff --git a/frame.proto b/frame.proto
new file mode 100644 (file)
index 0000000..766b3c1
--- /dev/null
@@ -0,0 +1,14 @@
+syntax = "proto3";
+
+// Used as header before each frame in a .frames file:
+//
+//  1. "Ftbifrm0" (8 bytes, ASCII -- note that no byte repeats)
+//  2. Length of upcoming FrameHeaderProto (uint32, binary, big endian)
+//  3. The FrameHeaderProto itself
+//  4. The actual frame
+
+message FrameHeaderProto {
+       int32 stream_idx = 1;
+       int64 pts = 2;
+       int64 file_size = 3;  // In bytes of compressed frame.
+}
diff --git a/frame_on_disk.h b/frame_on_disk.h
new file mode 100644 (file)
index 0000000..a0f46ad
--- /dev/null
@@ -0,0 +1,24 @@
+#ifndef _FRAME_ON_DISK_H
+#define _FRAME_ON_DISK_H 1
+
+#include <mutex>
+#include <string>
+#include <vector>
+
+#include <stdint.h>
+
+#include "defs.h"
+
+extern std::mutex frame_mu;
+struct FrameOnDisk {
+        int64_t pts = -1;  // -1 means empty.
+        off_t offset;
+        unsigned filename_idx;
+        uint32_t size;  // Not using size_t saves a few bytes; we can have so many frames.
+};
+extern std::vector<FrameOnDisk> frames[MAX_STREAMS];  // Under frame_mu.
+extern std::vector<std::string> frame_filenames;  // Under frame_mu.
+
+std::string read_frame(FrameOnDisk frame);
+
+#endif  // !defined(_FRAME_ON_DISK_H)
index f960d4b606ea4149c80044d724d74e4936d20750..be53688a03ee33bca3bb50688030978eb411ade2 100644 (file)
@@ -30,13 +30,18 @@ using namespace std;
 namespace {
 
 // Just an arbitrary order for std::map.
-struct JPEGIDLexicalOrder
+struct FrameOnDiskLexicalOrder
 {
-       bool operator() (const JPEGID &a, const JPEGID &b) const
+       bool operator() (const FrameOnDisk &a, const FrameOnDisk &b) const
        {
-               if (a.stream_idx != b.stream_idx)
-                       return a.stream_idx < b.stream_idx;
-               return a.pts < b.pts;
+               if (a.pts != b.pts)
+                       return a.pts < b.pts;
+               if (a.offset != b.offset)
+                       return a.offset < b.offset;
+               if (a.filename_idx != b.filename_idx)
+                       return a.filename_idx < b.filename_idx;
+               assert(a.size == b.size);
+               return false;
        }
 };
 
@@ -56,7 +61,7 @@ struct PendingDecode {
        JPEGFrameView *destination;
 
        // For actual decodes (only if frame below is nullptr).
-       JPEGID primary, secondary;
+       FrameOnDisk primary, secondary;
        float fade_alpha;  // Irrelevant if secondary.stream_idx == -1.
 
        // Already-decoded frames are also sent through PendingDecode,
@@ -69,7 +74,7 @@ struct PendingDecode {
 
 thread JPEGFrameView::jpeg_decoder_thread;
 mutex cache_mu;
-map<JPEGID, LRUFrame, JPEGIDLexicalOrder> cache;  // Under cache_mu.
+map<FrameOnDisk, LRUFrame, FrameOnDiskLexicalOrder> cache;  // Under cache_mu.
 size_t cache_bytes_used = 0;  // Under cache_mu.
 condition_variable any_pending_decodes;
 deque<PendingDecode> pending_decodes;  // Under cache_mu.
@@ -202,12 +207,12 @@ void prune_cache()
        }
 }
 
-shared_ptr<Frame> decode_jpeg_with_cache(JPEGID id, CacheMissBehavior cache_miss_behavior, bool *did_decode)
+shared_ptr<Frame> decode_jpeg_with_cache(FrameOnDisk frame_spec, CacheMissBehavior cache_miss_behavior, bool *did_decode)
 {
        *did_decode = false;
        {
                unique_lock<mutex> lock(cache_mu);
-               auto it = cache.find(id);
+               auto it = cache.find(frame_spec);
                if (it != cache.end()) {
                        it->second.last_used = event_counter++;
                        return it->second.frame;
@@ -219,11 +224,11 @@ shared_ptr<Frame> decode_jpeg_with_cache(JPEGID id, CacheMissBehavior cache_miss
        }
 
        *did_decode = true;
-       shared_ptr<Frame> frame = decode_jpeg(filename_for_frame(id.stream_idx, id.pts));
+       shared_ptr<Frame> frame = decode_jpeg(read_frame(frame_spec));
 
        unique_lock<mutex> lock(cache_mu);
        cache_bytes_used += frame_size(*frame);
-       cache[id] = LRUFrame{ frame, event_counter++ };
+       cache[frame_spec] = LRUFrame{ frame, event_counter++ };
 
        if (cache_bytes_used > size_t(CACHE_SIZE_MB) * 1024 * 1024) {
                prune_cache();
@@ -269,14 +274,14 @@ void jpeg_decoder_thread_func()
                shared_ptr<Frame> primary_frame, secondary_frame;
                bool drop = false;
                for (int subframe_idx = 0; subframe_idx < 2; ++subframe_idx) {
-                       const JPEGID &id = (subframe_idx == 0 ? decode.primary : decode.secondary);
-                       if (id.stream_idx == (unsigned)-1) {
+                       const FrameOnDisk &frame_spec = (subframe_idx == 0 ? decode.primary : decode.secondary);
+                       if (frame_spec.pts == -1) {
                                // No secondary frame.
                                continue;
                        }
 
                        bool found_in_cache;
-                       shared_ptr<Frame> frame = decode_jpeg_with_cache(id, cache_miss_behavior, &found_in_cache);
+                       shared_ptr<Frame> frame = decode_jpeg_with_cache(frame_spec, cache_miss_behavior, &found_in_cache);
 
                        if (frame == nullptr) {
                                assert(cache_miss_behavior == RETURN_NULLPTR_IF_NOT_IN_CACHE);
@@ -318,15 +323,14 @@ JPEGFrameView::JPEGFrameView(QWidget *parent)
 {
 }
 
-void JPEGFrameView::setFrame(unsigned stream_idx, int64_t pts, int secondary_stream_idx, int64_t secondary_pts, float fade_alpha)
+void JPEGFrameView::setFrame(unsigned stream_idx, FrameOnDisk frame, FrameOnDisk secondary_frame, float fade_alpha)
 {
-       if (secondary_stream_idx != -1) assert(secondary_pts != -1);
        current_stream_idx = stream_idx;  // TODO: Does this interact with fades?
 
        unique_lock<mutex> lock(cache_mu);
        PendingDecode decode;
-       decode.primary = JPEGID{ stream_idx, pts };
-       decode.secondary = JPEGID{ (unsigned)secondary_stream_idx, secondary_pts };
+       decode.primary = frame;
+       decode.secondary = secondary_frame;
        decode.fade_alpha = fade_alpha;
        decode.destination = this;
        pending_decodes.push_back(decode);
index 65a6608991ce8337616afdaf3d7f6f12a64d9362..bc36283774e987c69dd14692210722ac4d10bca8 100644 (file)
@@ -1,6 +1,7 @@
 #ifndef _JPEG_FRAME_VIEW_H
 #define _JPEG_FRAME_VIEW_H 1
 
+#include "frame_on_disk.h"
 #include "jpeg_frame.h"
 #include "ycbcr_converter.h"
 
 #include <stdint.h>
 #include <thread>
 
-struct JPEGID {
-       unsigned stream_idx;
-       int64_t pts;
-};
 enum CacheMissBehavior {
        DECODE_IF_NOT_IN_CACHE,
        RETURN_NULLPTR_IF_NOT_IN_CACHE
 };
 
-std::string filename_for_frame(unsigned stream_idx, int64_t pts);
 std::shared_ptr<Frame> decode_jpeg(const std::string &filename);
-std::shared_ptr<Frame> decode_jpeg_with_cache(JPEGID id, CacheMissBehavior cache_miss_behavior, bool *did_decode);
+std::shared_ptr<Frame> decode_jpeg_with_cache(FrameOnDisk id, CacheMissBehavior cache_miss_behavior, bool *did_decode);
 
 class JPEGFrameView : public QGLWidget {
        Q_OBJECT
@@ -33,7 +29,7 @@ class JPEGFrameView : public QGLWidget {
 public:
        JPEGFrameView(QWidget *parent);
 
-       void setFrame(unsigned stream_idx, int64_t pts, int secondary_stream_idx = -1, int64_t secondary_pts = -1, float fade_alpha = 0.0f);
+       void setFrame(unsigned stream_idx, FrameOnDisk frame, FrameOnDisk secondary_frame = {}, float fade_alpha = 0.0f);
        void setFrame(std::shared_ptr<Frame> frame);
 
        void mousePressEvent(QMouseEvent *event) override;
index 1d29735f92a2180f8734e4793571a15befbe50c8..e04af2251240379c4288d4e72f7ab2ea670d42a0 100644 (file)
--- a/main.cpp
+++ b/main.cpp
@@ -1,4 +1,5 @@
 #include <assert.h>
+#include <arpa/inet.h>
 #include <atomic>
 #include <chrono>
 #include <condition_variable>
@@ -24,6 +25,8 @@ extern "C" {
 #include "disk_space_estimator.h"
 #include "ffmpeg_raii.h"
 #include "flags.h"
+#include "frame_on_disk.h"
+#include "frame.pb.h"
 #include "httpd.h"
 #include "mainwindow.h"
 #include "player.h"
@@ -34,12 +37,17 @@ extern "C" {
 #include "vaapi_jpeg_decoder.h"
 
 #include <QApplication>
+#include <QGLFormat>
+#include <QSurfaceFormat>
 #include <movit/init.h>
 #include <movit/util.h>
 
 using namespace std;
 using namespace std::chrono;
 
+constexpr char frame_magic[] = "Ftbifrm0";
+constexpr size_t frame_magic_len = 8;
+
 mutex RefCountedGLsync::fence_lock;
 atomic<bool> should_quit{false};
 
@@ -48,16 +56,104 @@ int64_t start_pts = -1;
 // TODO: Replace by some sort of GUI control, I guess.
 int64_t current_pts = 0;
 
-string filename_for_frame(unsigned stream_idx, int64_t pts)
+struct FrameFile {
+       FILE *fp = nullptr;
+       int filename_idx;
+       size_t frames_written_so_far = 0;
+};
+std::map<int, FrameFile> open_frame_files;
+
+mutex frame_mu;
+vector<FrameOnDisk> frames[MAX_STREAMS];  // Under frame_mu.
+vector<string> frame_filenames;  // Under frame_mu.
+
+namespace {
+
+FrameOnDisk write_frame(int stream_idx, int64_t pts, const uint8_t *data, size_t size)
 {
-       char filename[256];
-       snprintf(filename, sizeof(filename), "%s/frames/cam%d-pts%09ld.jpeg",
-               global_flags.working_directory.c_str(), stream_idx, pts);
-       return filename;
+       if (open_frame_files.count(stream_idx) == 0) {
+               char filename[256];
+               snprintf(filename, sizeof(filename), "%s/frames/cam%d-pts%09ld.frames",
+                       global_flags.working_directory.c_str(), stream_idx, pts);
+               FILE *fp = fopen(filename, "wb");
+               if (fp == nullptr) {
+                       perror(filename);
+                       exit(1);
+               }
+
+               lock_guard<mutex> lock(frame_mu);
+               int filename_idx = frame_filenames.size();
+               frame_filenames.push_back(filename);
+               open_frame_files[stream_idx] = FrameFile{ fp, filename_idx, 0 };
+       }
+
+       FrameFile &file = open_frame_files[stream_idx];
+       string filename;
+       {
+               lock_guard<mutex> lock(frame_mu);
+               filename = frame_filenames[file.filename_idx];
+       }
+
+       FrameHeaderProto hdr;
+       hdr.set_stream_idx(stream_idx);
+       hdr.set_pts(pts);
+       hdr.set_file_size(size);
+
+       string serialized;
+       if (!hdr.SerializeToString(&serialized)) {
+               fprintf(stderr, "Frame header serialization failed.\n");
+               exit(1);
+       }
+       uint32_t len = htonl(serialized.size());
+
+       if (fwrite(frame_magic, frame_magic_len, 1, file.fp) != 1) {
+               perror("fwrite");
+               exit(1);
+       }
+       if (fwrite(&len, sizeof(len), 1, file.fp) != 1) {
+               perror("fwrite");
+               exit(1);
+       }
+       if (fwrite(serialized.data(), serialized.size(), 1, file.fp) != 1) {
+               perror("fwrite");
+               exit(1);
+       }
+       off_t offset = ftell(file.fp);
+       if (fwrite(data, size, 1, file.fp) != 1) {
+               perror("fwrite");
+               exit(1);
+       }
+       fflush(file.fp);  // No fsync(), though. We can accept losing a few frames.
+       global_disk_space_estimator->report_write(filename, 8 + sizeof(len) + serialized.size() + size, pts);
+
+       if (++file.frames_written_so_far >= 1000) {
+               // Start a new file next time.
+               if (fclose(file.fp) != 0) {
+                       perror("fclose");
+                       exit(1);
+               }
+               open_frame_files.erase(stream_idx);
+
+               // TODO: Write to SQLite.
+       }
+
+       FrameOnDisk frame;
+       frame.pts = pts;
+       frame.filename_idx = file.filename_idx;
+       frame.offset = offset;
+       frame.size = size;
+
+       {
+               lock_guard<mutex> lock(frame_mu);
+               assert(stream_idx < MAX_STREAMS);
+               frames[stream_idx].push_back(frame);
+       }
+
+       return frame;
 }
 
-mutex frame_mu;
-vector<int64_t> frames[MAX_STREAMS];
+} // namespace
+
 HTTPD *global_httpd;
 
 void load_existing_frames();
@@ -144,6 +240,84 @@ int main(int argc, char **argv)
        return ret;
 }
 
+void load_frame_file(const char *filename, unsigned filename_idx)
+{
+       // TODO: Look up in the SQLite database.
+
+       FILE *fp = fopen(filename, "rb");
+       if (fp == nullptr) {
+               perror(filename);
+               exit(1);
+       }
+
+       size_t magic_offset = 0;
+       size_t skipped_bytes = 0;
+       while (!feof(fp) && !ferror(fp)) {
+               int ch = getc(fp);
+               if (ch == -1) {
+                       break;
+               }
+               if (ch != frame_magic[magic_offset++]) {
+                       skipped_bytes += magic_offset;
+                       magic_offset = 0;
+                       continue;
+               }
+               if (magic_offset < frame_magic_len) {
+                       // Still reading the magic (hopefully).
+                       continue;
+               }
+
+               // OK, found the magic. Try to parse the frame header.
+               magic_offset = 0;
+
+               if (skipped_bytes > 0)  {
+                       fprintf(stderr, "WARNING: %s: Skipped %zu garbage bytes in the middle.\n",
+                               filename, skipped_bytes);
+                       skipped_bytes = 0;
+               }
+
+               uint32_t len;
+               if (fread(&len, sizeof(len), 1, fp) != 1) {
+                       fprintf(stderr, "WARNING: %s: Short read when getting length.\n", filename);
+                       break;
+               }
+
+               string serialized;
+               serialized.resize(ntohl(len));
+               if (fread(&serialized[0], serialized.size(), 1, fp) != 1) {
+                       fprintf(stderr, "WARNING: %s: Short read when reading frame header (%zu bytes).\n", filename, serialized.size());
+                       break;
+               }
+
+               FrameHeaderProto hdr;
+               if (!hdr.ParseFromString(serialized)) {
+                       fprintf(stderr, "WARNING: %s: Corrupted frame header.\n", filename);
+                       continue;
+               }
+
+               FrameOnDisk frame;
+               frame.pts = hdr.pts();
+               frame.offset = ftell(fp);
+               frame.filename_idx = filename_idx;
+               frame.size = hdr.file_size();
+
+               if (fseek(fp, frame.offset + frame.size, SEEK_SET) == -1) {
+                       fprintf(stderr, "WARNING: %s: Could not seek past frame (probably truncated).\n", filename);
+                       continue;
+               }
+
+               if (hdr.stream_idx() >= 0 && hdr.stream_idx() < MAX_STREAMS) {
+                       frames[hdr.stream_idx()].push_back(frame);
+                       start_pts = max(start_pts, hdr.pts());
+               }
+       }
+
+       if (skipped_bytes > 0) {
+               fprintf(stderr, "WARNING: %s: Skipped %zu garbage bytes at the end.\n",
+                       filename, skipped_bytes);
+       }
+}
+
 void load_existing_frames()
 {
        string frame_dir = global_flags.working_directory + "/frames";
@@ -165,12 +339,10 @@ void load_existing_frames()
                        break;
                }
 
-               int stream_idx;
-               int64_t pts;
-               if (sscanf(de->d_name, "cam%d-pts%ld.jpeg", &stream_idx, &pts) == 2 &&
-                   stream_idx >= 0 && stream_idx < MAX_STREAMS) {
-                       frames[stream_idx].push_back(pts);
-                       start_pts = max(start_pts, pts);
+               if (de->d_type == DT_REG) {
+                       string filename = frame_dir + "/" + de->d_name;
+                       load_frame_file(filename.c_str(), frame_filenames.size());
+                       frame_filenames.push_back(filename);
                }
        }
 
@@ -184,7 +356,8 @@ void load_existing_frames()
        }
 
        for (int stream_idx = 0; stream_idx < MAX_STREAMS; ++stream_idx) {
-               sort(frames[stream_idx].begin(), frames[stream_idx].end());
+               sort(frames[stream_idx].begin(), frames[stream_idx].end(),
+                       [](const auto &a, const auto &b) { return a.pts < b.pts; });
        }
 }
 
@@ -225,32 +398,20 @@ int record_thread_func()
 
                //fprintf(stderr, "Got a frame from camera %d, pts = %ld, size = %d\n",
                //      pkt.stream_index, pts, pkt.size);
-               string filename = filename_for_frame(pkt.stream_index, pts);
-               FILE *fp = fopen(filename.c_str(), "wb");
-               if (fp == nullptr) {
-                       perror(filename.c_str());
-                       exit(1);
-               }
-               fwrite(pkt.data, pkt.size, 1, fp);
-               fclose(fp);
-
-               global_disk_space_estimator->report_write(filename, pts);
+               FrameOnDisk frame = write_frame(pkt.stream_index, pts, pkt.data, pkt.size);
 
-               post_to_main_thread([pkt, pts] {
+               post_to_main_thread([pkt, frame] {
                        if (pkt.stream_index == 0) {
-                               global_mainwindow->ui->input1_display->setFrame(pkt.stream_index, pts);
+                               global_mainwindow->ui->input1_display->setFrame(pkt.stream_index, frame);
                        } else if (pkt.stream_index == 1) {
-                               global_mainwindow->ui->input2_display->setFrame(pkt.stream_index, pts);
+                               global_mainwindow->ui->input2_display->setFrame(pkt.stream_index, frame);
                        } else if (pkt.stream_index == 2) {
-                               global_mainwindow->ui->input3_display->setFrame(pkt.stream_index, pts);
+                               global_mainwindow->ui->input3_display->setFrame(pkt.stream_index, frame);
                        } else if (pkt.stream_index == 3) {
-                               global_mainwindow->ui->input4_display->setFrame(pkt.stream_index, pts);
+                               global_mainwindow->ui->input4_display->setFrame(pkt.stream_index, frame);
                        }
                });
 
-               assert(pkt.stream_index < MAX_STREAMS);
-               frames[pkt.stream_index].push_back(pts);
-
                if (last_pts != -1 && global_flags.slow_down_input) {
                        this_thread::sleep_for(microseconds((pts - last_pts) * 1000000 / TIMEBASE));
                }
@@ -260,3 +421,33 @@ int record_thread_func()
 
        return 0;
 }
+
+string read_frame(FrameOnDisk frame)
+{
+       string filename;
+       {
+               lock_guard<mutex> lock(frame_mu);
+               filename = frame_filenames[frame.filename_idx];
+       }
+
+       // TODO: cache the open file handles
+       FILE *fp = fopen(filename.c_str(), "rb");
+       if (fp == nullptr) {
+               perror(filename.c_str());
+               exit(1);
+       }
+       if (fseek(fp, frame.offset, SEEK_SET) == -1) {
+               perror("fseek");
+               exit(1);
+       }
+
+       string str;
+       str.resize(frame.size);
+       if (fread(&str[0], frame.size, 1, fp) != 1) {
+               perror("fread");
+               exit(1);
+       }
+
+       fclose(fp);
+       return str;
+}
index 13a9bd5516204473c88661c2775164c6bf9351b6..dd3c82764537bc23cb588a1dc9978e54db21b667 100644 (file)
@@ -3,6 +3,7 @@
 #include "clip_list.h"
 #include "disk_space_estimator.h"
 #include "flags.h"
+#include "frame_on_disk.h"
 #include "player.h"
 #include "post_to_main_thread.h"
 #include "timebase.h"
@@ -25,8 +26,6 @@ static ClipList *cliplist_clips;
 static PlayList *playlist_clips;
 
 extern int64_t current_pts;
-extern mutex frame_mu;
-extern vector<int64_t> frames[MAX_STREAMS];
 
 MainWindow::MainWindow()
        : ui(new Ui::MainWindow),
@@ -641,18 +640,20 @@ void MainWindow::preview_single_frame(int64_t pts, unsigned stream_idx, MainWind
                lock_guard<mutex> lock(frame_mu);
                if (frames[stream_idx].empty())
                        return;
-               auto it = lower_bound(frames[stream_idx].begin(), frames[stream_idx].end(), pts);
+               auto it = lower_bound(frames[stream_idx].begin(), frames[stream_idx].end(), pts,
+                       [](const FrameOnDisk &frame, int64_t pts) { return frame.pts < pts; });
                if (it != frames[stream_idx].end()) {
-                       pts = *it;
+                       pts = it->pts;
                }
        } else {
                assert(rounding == FIRST_AT_OR_AFTER);
                lock_guard<mutex> lock(frame_mu);
                if (frames[stream_idx].empty())
                        return;
-               auto it = upper_bound(frames[stream_idx].begin(), frames[stream_idx].end(), pts - 1);
+               auto it = upper_bound(frames[stream_idx].begin(), frames[stream_idx].end(), pts - 1,
+                       [](int64_t pts, const FrameOnDisk &frame) { return pts < frame.pts; });
                if (it != frames[stream_idx].end()) {
-                       pts = *it;
+                       pts = it->pts;
                }
        }
 
index 18f9272619bfcaa2bf77faa95e69c196db581648..874cb787ad6c36b256c13ca13fc719cf879ad12c 100644 (file)
@@ -30,7 +30,7 @@ add_global_arguments('-DMOVIT_SHADER_DIR="' + r.stdout().strip() + '"', language
 gen = generator(protoc, \
   output    : ['@BASENAME@.pb.cc', '@BASENAME@.pb.h'],
   arguments : ['--proto_path=@CURRENT_SOURCE_DIR@', '--cpp_out=@BUILD_DIR@', '@INPUT@'])
-proto_generated = gen.process('state.proto')
+proto_generated = gen.process('state.proto', 'frame.proto')
 
 # Preprocess Qt as needed.
 moc_files = qt5.preprocess(
index 6fbd1bdd04d7dd8e75c37f7e2667321611202bef..b0e862dd89e2b74a7ad6b46524399228f3cca076 100644 (file)
@@ -4,6 +4,7 @@
 #include "context.h"
 #include "defs.h"
 #include "ffmpeg_raii.h"
+#include "frame_on_disk.h"
 #include "httpd.h"
 #include "jpeg_frame_view.h"
 #include "mux.h"
@@ -22,8 +23,6 @@
 using namespace std;
 using namespace std::chrono;
 
-extern mutex frame_mu;
-extern vector<int64_t> frames[MAX_STREAMS];
 extern HTTPD *global_httpd;
 
 void Player::thread_func(bool also_output_to_stream)
@@ -102,9 +101,10 @@ got_clip:
                        // Find the first frame such that frame.pts <= in_pts.
                        auto it = lower_bound(frames[stream_idx].begin(),
                                frames[stream_idx].end(),
-                               in_pts_origin);
+                               in_pts_origin,
+                               [](const FrameOnDisk &frame, int64_t pts) { return frame.pts < pts; });
                        if (it != frames[stream_idx].end()) {
-                               in_pts_origin = *it;
+                               in_pts_origin = it->pts;
                        }
                }
 
@@ -147,13 +147,12 @@ got_clip:
                        int64_t in_pts_for_progress = in_pts, in_pts_secondary_for_progress = -1;
 
                        int primary_stream_idx = stream_idx;
+                       FrameOnDisk secondary_frame;
                        int secondary_stream_idx = -1;
-                       int64_t secondary_pts = -1;
-                       int64_t in_pts_secondary = -1;
                        float fade_alpha = 0.0f;
                        if (got_next_clip && time_left_this_clip <= next_clip_fade_time) {
                                secondary_stream_idx = next_clip.stream_idx;
-                               in_pts_secondary = lrint(next_clip.pts_in + (next_clip_fade_time - time_left_this_clip) * TIMEBASE * speed);
+                               int64_t in_pts_secondary = lrint(next_clip.pts_in + (next_clip_fade_time - time_left_this_clip) * TIMEBASE * speed);
                                in_pts_secondary_for_progress = in_pts_secondary;
                                fade_alpha = 1.0f - time_left_this_clip / next_clip_fade_time;
 
@@ -165,12 +164,10 @@ got_clip:
                                        fade_alpha = 1.0f - fade_alpha;
                                }
 
-                               int64_t in_pts_lower, in_pts_upper;
-                               bool ok = find_surrounding_frames(in_pts_secondary, secondary_stream_idx, &in_pts_lower, &in_pts_upper);
+                               FrameOnDisk frame_lower, frame_upper;
+                               bool ok = find_surrounding_frames(in_pts_secondary, secondary_stream_idx, &frame_lower, &frame_upper);
                                if (ok) {
-                                       secondary_pts = in_pts_lower;
-                               } else {
-                                       secondary_stream_idx = -1;
+                                       secondary_frame = frame_lower;
                                }
                        }
 
@@ -188,8 +185,8 @@ got_clip:
                                progress_callback(progress);
                        }
 
-                       int64_t in_pts_lower, in_pts_upper;
-                       bool ok = find_surrounding_frames(in_pts, primary_stream_idx, &in_pts_lower, &in_pts_upper);
+                       FrameOnDisk frame_lower, frame_upper;
+                       bool ok = find_surrounding_frames(in_pts, primary_stream_idx, &frame_lower, &frame_upper);
                        if (!ok) {
                                break;
                        }
@@ -231,9 +228,9 @@ got_clip:
                                }
                        }
 
-                       if (in_pts_lower == in_pts_upper) {
-                               auto display_func = [this, primary_stream_idx, in_pts_lower, secondary_stream_idx, secondary_pts, fade_alpha]{
-                                       destination->setFrame(primary_stream_idx, in_pts_lower, secondary_stream_idx, secondary_pts, fade_alpha);
+                       if (frame_lower.pts == frame_upper.pts) {
+                               auto display_func = [this, primary_stream_idx, frame_lower, secondary_frame, fade_alpha]{
+                                       destination->setFrame(primary_stream_idx, frame_lower, secondary_frame, fade_alpha);
                                };
                                if (video_stream == nullptr) {
                                        display_func();
@@ -241,12 +238,12 @@ got_clip:
                                        if (secondary_stream_idx == -1) {
                                                video_stream->schedule_original_frame(
                                                        next_frame_start, pts, display_func, QueueSpotHolder(this),
-                                                       primary_stream_idx, in_pts_lower);
+                                                       frame_lower);
                                        } else {
-                                               assert(secondary_pts != -1);
+                                               assert(secondary_frame.pts != -1);
                                                video_stream->schedule_faded_frame(next_frame_start, pts, display_func,
-                                                       QueueSpotHolder(this), primary_stream_idx, in_pts_lower,
-                                                       secondary_stream_idx, secondary_pts, fade_alpha);
+                                                       QueueSpotHolder(this), frame_lower,
+                                                       secondary_frame, fade_alpha);
                                        }
                                }
                                continue;
@@ -256,11 +253,13 @@ got_clip:
                        // (ie., move less than 1% of an _output_ frame), do so.
                        // TODO: Snap secondary (fade-to) clips in the same fashion.
                        bool snapped = false;
-                       for (int64_t snap_pts : { in_pts_lower, in_pts_upper }) {
+                       for (int64_t snap_pts : { frame_lower.pts, frame_upper.pts }) {
                                double snap_pts_as_frameno = (snap_pts - in_pts_origin) * output_framerate / TIMEBASE / speed;
                                if (fabs(snap_pts_as_frameno - frameno) < 0.01) {
-                                       auto display_func = [this, primary_stream_idx, snap_pts, secondary_stream_idx, secondary_pts, fade_alpha]{
-                                               destination->setFrame(primary_stream_idx, snap_pts, secondary_stream_idx, secondary_pts, fade_alpha);
+                                       FrameOnDisk snap_frame = frame_lower;
+                                       snap_frame.pts = snap_pts;
+                                       auto display_func = [this, primary_stream_idx, snap_frame, secondary_frame, fade_alpha]{
+                                               destination->setFrame(primary_stream_idx, snap_frame, secondary_frame, fade_alpha);
                                        };
                                        if (video_stream == nullptr) {
                                                display_func();
@@ -268,12 +267,12 @@ got_clip:
                                                if (secondary_stream_idx == -1) {
                                                        video_stream->schedule_original_frame(
                                                                next_frame_start, pts, display_func,
-                                                               QueueSpotHolder(this), primary_stream_idx, snap_pts);
+                                                               QueueSpotHolder(this), snap_frame);
                                                } else {
-                                                       assert(secondary_pts != -1);
+                                                       assert(secondary_frame.pts != -1);
                                                        video_stream->schedule_faded_frame(
                                                                next_frame_start, pts, display_func, QueueSpotHolder(this),
-                                                               primary_stream_idx, snap_pts, secondary_stream_idx, secondary_pts, fade_alpha);
+                                                               snap_frame, secondary_frame, fade_alpha);
                                                }
                                        }
                                        in_pts_origin += snap_pts - in_pts;
@@ -291,20 +290,20 @@ got_clip:
                                continue;
                        }
 
-                       double alpha = double(in_pts - in_pts_lower) / (in_pts_upper - in_pts_lower);
+                       double alpha = double(in_pts - frame_lower.pts) / (frame_upper.pts - frame_lower.pts);
 
                        if (video_stream == nullptr) {
                                // Previews don't do any interpolation.
                                assert(secondary_stream_idx == -1);
-                               destination->setFrame(primary_stream_idx, in_pts_lower);
+                               destination->setFrame(primary_stream_idx, frame_lower);
                        } else {
                                auto display_func = [this](shared_ptr<Frame> frame) {
                                        destination->setFrame(frame);
                                };
                                video_stream->schedule_interpolated_frame(
                                        next_frame_start, pts, display_func, QueueSpotHolder(this),
-                                       primary_stream_idx, in_pts_lower, in_pts_upper, alpha,
-                                       secondary_stream_idx, secondary_pts, fade_alpha);
+                                       frame_lower, frame_upper, alpha,
+                                       secondary_frame, fade_alpha);
                        }
                }
 
@@ -346,27 +345,28 @@ got_clip:
 }
 
 // Find the frame immediately before and after this point.
-bool Player::find_surrounding_frames(int64_t pts, int stream_idx, int64_t *pts_lower, int64_t *pts_upper)
+bool Player::find_surrounding_frames(int64_t pts, int stream_idx, FrameOnDisk *frame_lower, FrameOnDisk *frame_upper)
 {
        lock_guard<mutex> lock(frame_mu);
 
        // Find the first frame such that frame.pts >= pts.
        auto it = lower_bound(frames[stream_idx].begin(),
                frames[stream_idx].end(),
-               pts);
+               pts,
+               [](const FrameOnDisk &frame, int64_t pts) { return frame.pts < pts; });
        if (it == frames[stream_idx].end()) {
                return false;
        }
-       *pts_upper = *it;
+       *frame_upper = *it;
 
        // Find the last frame such that in_pts <= frame.pts (if any).
        if (it == frames[stream_idx].begin()) {
-               *pts_lower = *it;
+               *frame_lower = *it;
        } else {
-               *pts_lower = *(it - 1);
+               *frame_lower = *(it - 1);
        }
-       assert(pts >= *pts_lower);
-       assert(pts <= *pts_upper);
+       assert(pts >= frame_lower->pts);
+       assert(pts <= frame_upper->pts);
        return true;
 }
 
@@ -428,7 +428,8 @@ void Player::override_angle(unsigned stream_idx)
        }
 
        lock_guard<mutex> lock(frame_mu);
-       auto it = upper_bound(frames[stream_idx].begin(), frames[stream_idx].end(), pts_out);
+       auto it = upper_bound(frames[stream_idx].begin(), frames[stream_idx].end(), pts_out,
+               [](int64_t pts, const FrameOnDisk &frame) { return pts < frame.pts; });
        if (it == frames[stream_idx].end()) {
                return;
        }
index aa50adee0eaaec686cb0cef91b6ed4d591e96aa1..c7f8e070a71a0ce697e1c21edd14727a3892762f 100644 (file)
--- a/player.h
+++ b/player.h
@@ -2,6 +2,7 @@
 #define _PLAYER_H 1
 
 #include "clip_list.h"
+#include "frame_on_disk.h"
 #include "queue_spot_holder.h"
 
 extern "C" {
@@ -52,7 +53,7 @@ private:
 
        // Find the frame immediately before and after this point.
        // Returns false if pts is after the last frame.
-       bool find_surrounding_frames(int64_t pts, int stream_idx, int64_t *pts_lower, int64_t *pts_upper);
+       bool find_surrounding_frames(int64_t pts, int stream_idx, FrameOnDisk *frame_lower, FrameOnDisk *frame_upper);
 
        JPEGFrameView *destination;
        done_callback_func done_callback;
index 1a3b64eb326b0c05731f1f439b6dd9df15d539b7..12db78b194a52075ca8fc578d5af6589698e2df7 100644 (file)
@@ -326,7 +326,7 @@ private:
        VABufferID buf;
 };
 
-shared_ptr<Frame> decode_jpeg_vaapi(const string &filename)
+shared_ptr<Frame> decode_jpeg_vaapi(const string &jpeg)
 {
        jpeg_decompress_struct dinfo;
        jpeg_error_mgr jerr;
@@ -334,24 +334,9 @@ shared_ptr<Frame> decode_jpeg_vaapi(const string &filename)
        jpeg_create_decompress(&dinfo);
        JPEGDestroyer destroy_dinfo(&dinfo);
 
-       FILE *fp = fopen(filename.c_str(), "rb");
-       if (fp == nullptr) {
-               perror(filename.c_str());
-               exit(1);
-       }
-       jpeg_stdio_src(&dinfo, fp);
-
+       jpeg_mem_src(&dinfo, reinterpret_cast<const unsigned char *>(jpeg.data()), jpeg.size());
        jpeg_read_header(&dinfo, true);
 
-       // Read the data that comes after the header. VA-API will destuff and all for us.
-       std::string str((const char *)dinfo.src->next_input_byte, dinfo.src->bytes_in_buffer);
-       while (!feof(fp)) {
-               char buf[4096];
-               size_t ret = fread(buf, 1, sizeof(buf), fp);
-               str.append(buf, ret);
-       }
-       fclose(fp);
-
        if (dinfo.num_components != 3) {
                fprintf(stderr, "Not a color JPEG. (%d components, Y=%dx%d, Cb=%dx%d, Cr=%dx%d)\n",
                        dinfo.num_components,
@@ -455,7 +440,7 @@ shared_ptr<Frame> decode_jpeg_vaapi(const string &filename)
        // Slice parameters (metadata about the slice).
        VASliceParameterBufferJPEGBaseline parms;
        memset(&parms, 0, sizeof(parms));
-       parms.slice_data_size = str.size();
+       parms.slice_data_size = dinfo.src->bytes_in_buffer;
        parms.slice_data_offset = 0;
        parms.slice_data_flag = VA_SLICE_DATA_FLAG_ALL;
        parms.slice_horizontal_position = 0;
@@ -482,9 +467,9 @@ shared_ptr<Frame> decode_jpeg_vaapi(const string &filename)
        CHECK_VASTATUS_RET(va_status, "vaCreateBuffer");
        VABufferDestroyer destroy_slice_param(va_dpy->va_dpy, slice_param_buffer);
 
-       // The actual data.
+       // The actual data. VA-API will destuff and all for us.
        VABufferID data_buffer;
-       va_status = vaCreateBuffer(va_dpy->va_dpy, config_id, VASliceDataBufferType, str.size(), 1, &str[0], &data_buffer);
+       va_status = vaCreateBuffer(va_dpy->va_dpy, config_id, VASliceDataBufferType, dinfo.src->bytes_in_buffer, 1, const_cast<unsigned char *>(dinfo.src->next_input_byte), &data_buffer);
        CHECK_VASTATUS_RET(va_status, "vaCreateBuffer");
        VABufferDestroyer destroy_data(va_dpy->va_dpy, data_buffer);
 
index 2009a567fc343bceb1ab3ce581dce147a946efaf..4182cfc3d0405c66208093d0de540d360861f83e 100644 (file)
@@ -19,7 +19,7 @@ std::unique_ptr<VADisplayWithCleanup> va_open_display(const std::string &va_disp
 std::string get_usable_va_display();
 
 void init_jpeg_vaapi();
-std::shared_ptr<Frame> decode_jpeg_vaapi(const std::string &filename);
+std::shared_ptr<Frame> decode_jpeg_vaapi(const std::string &jpeg);
 
 extern bool vaapi_jpeg_decoding_usable;
 
index c580b722e8e3717c80cfdb93afafc809ba7e8c97..a9ed0610e4518729e9aaa9614bb98ab0083d33ce 100644 (file)
@@ -26,29 +26,6 @@ using namespace std::chrono;
 
 extern HTTPD *global_httpd;
 
-namespace {
-
-string read_file(const string &filename)
-{
-       FILE *fp = fopen(filename.c_str(), "rb");
-       if (fp == nullptr) {
-               perror(filename.c_str());
-               return "";
-       }
-
-       fseek(fp, 0, SEEK_END);
-       long len = ftell(fp);
-       rewind(fp);
-
-       string ret;
-       ret.resize(len);
-       fread(&ret[0], len, 1, fp);
-       fclose(fp);
-       return ret;
-}
-
-}  // namespace
-
 struct VectorDestinationManager {
        jpeg_destination_mgr pub;
        std::vector<uint8_t> dest;
@@ -311,20 +288,19 @@ void VideoStream::clear_queue()
 void VideoStream::schedule_original_frame(steady_clock::time_point local_pts,
                                           int64_t output_pts, function<void()> &&display_func,
                                           QueueSpotHolder &&queue_spot_holder,
-                                          unsigned stream_idx, int64_t input_pts)
+                                          FrameOnDisk frame)
 {
-       fprintf(stderr, "output_pts=%ld  original      input_pts=%ld\n", output_pts, input_pts);
+       fprintf(stderr, "output_pts=%ld  original      input_pts=%ld\n", output_pts, frame.pts);
 
        // Preload the file from disk, so that the encoder thread does not get stalled.
        // TODO: Consider sending it through the queue instead.
-       (void)read_file(filename_for_frame(stream_idx, input_pts));
+       (void)read_frame(frame);
 
        QueuedFrame qf;
        qf.local_pts = local_pts;
        qf.type = QueuedFrame::ORIGINAL;
        qf.output_pts = output_pts;
-       qf.stream_idx = stream_idx;
-       qf.input_first_pts = input_pts;
+       qf.frame1 = frame;
        qf.display_func = move(display_func);
        qf.queue_spot_holder = move(queue_spot_holder);
 
@@ -336,10 +312,10 @@ void VideoStream::schedule_original_frame(steady_clock::time_point local_pts,
 void VideoStream::schedule_faded_frame(steady_clock::time_point local_pts, int64_t output_pts,
                                        function<void()> &&display_func,
                                        QueueSpotHolder &&queue_spot_holder,
-                                       unsigned stream_idx, int64_t input_pts, int secondary_stream_idx,
-                                       int64_t secondary_input_pts, float fade_alpha)
+                                       FrameOnDisk frame1_spec, FrameOnDisk frame2_spec,
+                                       float fade_alpha)
 {
-       fprintf(stderr, "output_pts=%ld  faded         input_pts=%ld,%ld  fade_alpha=%.2f\n", output_pts, input_pts, secondary_input_pts, fade_alpha);
+       fprintf(stderr, "output_pts=%ld  faded         input_pts=%ld,%ld  fade_alpha=%.2f\n", output_pts, frame1_spec.pts, frame2_spec.pts, fade_alpha);
 
        // Get the temporary OpenGL resources we need for doing the fade.
        // (We share these with interpolated frames, which is slightly
@@ -358,15 +334,8 @@ void VideoStream::schedule_faded_frame(steady_clock::time_point local_pts, int64
 
        bool did_decode;
 
-       JPEGID jpeg_id1;
-       jpeg_id1.stream_idx = stream_idx;
-       jpeg_id1.pts = input_pts;
-       shared_ptr<Frame> frame1 = decode_jpeg_with_cache(jpeg_id1, DECODE_IF_NOT_IN_CACHE, &did_decode);
-
-       JPEGID jpeg_id2;
-       jpeg_id2.stream_idx = secondary_stream_idx;
-       jpeg_id2.pts = secondary_input_pts;
-       shared_ptr<Frame> frame2 = decode_jpeg_with_cache(jpeg_id2, DECODE_IF_NOT_IN_CACHE, &did_decode);
+       shared_ptr<Frame> frame1 = decode_jpeg_with_cache(frame1_spec, DECODE_IF_NOT_IN_CACHE, &did_decode);
+       shared_ptr<Frame> frame2 = decode_jpeg_with_cache(frame2_spec, DECODE_IF_NOT_IN_CACHE, &did_decode);
 
        ycbcr_semiplanar_converter->prepare_chain_for_fade(frame1, frame2, fade_alpha)->render_to_fbo(resources->fade_fbo, 1280, 720);
 
@@ -374,13 +343,11 @@ void VideoStream::schedule_faded_frame(steady_clock::time_point local_pts, int64
        qf.local_pts = local_pts;
        qf.type = QueuedFrame::FADED;
        qf.output_pts = output_pts;
-       qf.stream_idx = stream_idx;
-       qf.input_first_pts = input_pts;
+       qf.frame1 = frame1_spec;
        qf.display_func = move(display_func);
        qf.queue_spot_holder = move(queue_spot_holder);
 
-       qf.secondary_stream_idx = secondary_stream_idx;
-       qf.secondary_input_pts = secondary_input_pts;
+       qf.secondary_frame = frame2_spec;
 
        // Subsample and split Cb/Cr.
        chroma_subsampler->subsample_chroma(resources->fade_cbcr_output_tex, 1280, 720, resources->cb_tex, resources->cr_tex);
@@ -413,15 +380,13 @@ void VideoStream::schedule_faded_frame(steady_clock::time_point local_pts, int64
 void VideoStream::schedule_interpolated_frame(steady_clock::time_point local_pts,
                                               int64_t output_pts, function<void(shared_ptr<Frame>)> &&display_func,
                                               QueueSpotHolder &&queue_spot_holder,
-                                              unsigned stream_idx, int64_t input_first_pts,
-                                              int64_t input_second_pts, float alpha,
-                                              int secondary_stream_idx, int64_t secondary_input_pts,
-                                              float fade_alpha)
+                                              FrameOnDisk frame1, FrameOnDisk frame2,
+                                              float alpha, FrameOnDisk secondary_frame, float fade_alpha)
 {
-       if (secondary_stream_idx != -1) {
-               fprintf(stderr, "output_pts=%ld  interpolated  input_pts1=%ld input_pts2=%ld alpha=%.3f  secondary_pts=%ld  fade_alpha=%.2f\n", output_pts, input_first_pts, input_second_pts, alpha, secondary_input_pts, fade_alpha);
+       if (secondary_frame.pts != -1) {
+               fprintf(stderr, "output_pts=%ld  interpolated  input_pts1=%ld input_pts2=%ld alpha=%.3f  secondary_pts=%ld  fade_alpha=%.2f\n", output_pts, frame1.pts, frame2.pts, alpha, secondary_frame.pts, fade_alpha);
        } else {
-               fprintf(stderr, "output_pts=%ld  interpolated  input_pts1=%ld input_pts2=%ld alpha=%.3f\n", output_pts, input_first_pts, input_second_pts, alpha);
+               fprintf(stderr, "output_pts=%ld  interpolated  input_pts1=%ld input_pts2=%ld alpha=%.3f\n", output_pts, frame1.pts, frame2.pts, alpha);
        }
 
        // Get the temporary OpenGL resources we need for doing the interpolation.
@@ -437,9 +402,8 @@ void VideoStream::schedule_interpolated_frame(steady_clock::time_point local_pts
        }
 
        QueuedFrame qf;
-       qf.type = (secondary_stream_idx == -1) ? QueuedFrame::INTERPOLATED : QueuedFrame::FADED_INTERPOLATED;
+       qf.type = (secondary_frame.pts == -1) ? QueuedFrame::INTERPOLATED : QueuedFrame::FADED_INTERPOLATED;
        qf.output_pts = output_pts;
-       qf.stream_idx = stream_idx;
        qf.display_decoded_func = move(display_func);
        qf.queue_spot_holder = move(queue_spot_holder);
        qf.local_pts = local_pts;
@@ -448,11 +412,9 @@ void VideoStream::schedule_interpolated_frame(steady_clock::time_point local_pts
 
        // Convert frame0 and frame1 to OpenGL textures.
        for (size_t frame_no = 0; frame_no < 2; ++frame_no) {
-               JPEGID jpeg_id;
-               jpeg_id.stream_idx = stream_idx;
-               jpeg_id.pts = frame_no == 1 ? input_second_pts : input_first_pts;
+               FrameOnDisk frame_spec = frame_no == 1 ? frame2 : frame1;
                bool did_decode;
-               shared_ptr<Frame> frame = decode_jpeg_with_cache(jpeg_id, DECODE_IF_NOT_IN_CACHE, &did_decode);
+               shared_ptr<Frame> frame = decode_jpeg_with_cache(frame_spec, DECODE_IF_NOT_IN_CACHE, &did_decode);
                ycbcr_converter->prepare_chain_for_conversion(frame)->render_to_fbo(resources->input_fbos[frame_no], 1280, 720);
        }
 
@@ -465,17 +427,14 @@ void VideoStream::schedule_interpolated_frame(steady_clock::time_point local_pts
        qf.flow_tex = compute_flow->exec(resources->gray_tex, DISComputeFlow::FORWARD_AND_BACKWARD, DISComputeFlow::DO_NOT_RESIZE_FLOW);
        check_error();
 
-       if (secondary_stream_idx != -1) {
+       if (secondary_frame.pts != -1) {
                // Fade. First kick off the interpolation.
                tie(qf.output_tex, ignore) = interpolate_no_split->exec(resources->input_tex, resources->gray_tex, qf.flow_tex, 1280, 720, alpha);
                check_error();
 
                // Now decode the image we are fading against.
-               JPEGID jpeg_id;
-               jpeg_id.stream_idx = secondary_stream_idx;
-               jpeg_id.pts = secondary_input_pts;
                bool did_decode;
-               shared_ptr<Frame> frame2 = decode_jpeg_with_cache(jpeg_id, DECODE_IF_NOT_IN_CACHE, &did_decode);
+               shared_ptr<Frame> frame2 = decode_jpeg_with_cache(secondary_frame, DECODE_IF_NOT_IN_CACHE, &did_decode);
 
                // Then fade against it, putting it into the fade Y' and CbCr textures.
                ycbcr_semiplanar_converter->prepare_chain_for_fade_from_texture(qf.output_tex, frame2, fade_alpha)->render_to_fbo(resources->fade_fbo, 1280, 720);
@@ -500,7 +459,7 @@ void VideoStream::schedule_interpolated_frame(steady_clock::time_point local_pts
        glPixelStorei(GL_PACK_ROW_LENGTH, 0);
        glBindBuffer(GL_PIXEL_PACK_BUFFER, resources->pbo);
        check_error();
-       if (secondary_stream_idx != -1) {
+       if (secondary_frame.pts != -1) {
                glGetTextureImage(resources->fade_y_output_tex, 0, GL_RED, GL_UNSIGNED_BYTE, 1280 * 720 * 4, BUFFER_OFFSET(0));
        } else {
                glGetTextureImage(qf.output_tex, 0, GL_RED, GL_UNSIGNED_BYTE, 1280 * 720 * 4, BUFFER_OFFSET(0));
@@ -607,7 +566,7 @@ void VideoStream::encode_thread_func()
 
                if (qf.type == QueuedFrame::ORIGINAL) {
                        // Send the JPEG frame on, unchanged.
-                       string jpeg = read_file(filename_for_frame(qf.stream_idx, qf.input_first_pts));
+                       string jpeg = read_frame(qf.frame1);
                        AVPacket pkt;
                        av_init_packet(&pkt);
                        pkt.stream_index = 0;
index 22ae26a42b195f9f93338fccbd559687eeeacf27..6cc9471befde29a25aecdc163d0956ebc21f6a37 100644 (file)
@@ -8,6 +8,7 @@ extern "C" {
 #include <libavformat/avio.h>
 }
 
+#include "frame_on_disk.h"
 #include "jpeg_frame_view.h"
 #include "ref_counted_gl_sync.h"
 #include "queue_spot_holder.h"
@@ -44,18 +45,18 @@ public:
        void schedule_original_frame(std::chrono::steady_clock::time_point,
                                     int64_t output_pts, std::function<void()> &&display_func,
                                     QueueSpotHolder &&queue_spot_holder,
-                                    unsigned stream_idx, int64_t input_pts);
+                                    FrameOnDisk frame);
        void schedule_faded_frame(std::chrono::steady_clock::time_point, int64_t output_pts,
                                  std::function<void()> &&display_func,
                                  QueueSpotHolder &&queue_spot_holder,
-                                 unsigned stream_idx, int64_t input_pts, int secondary_stream_idx,
-                                 int64_t secondary_input_pts, float fade_alpha);
+                                 FrameOnDisk frame1, FrameOnDisk frame2,
+                                 float fade_alpha);
        void schedule_interpolated_frame(std::chrono::steady_clock::time_point, int64_t output_pts,
                                  std::function<void(std::shared_ptr<Frame>)> &&display_func,
                                  QueueSpotHolder &&queue_spot_holder,
-                                 unsigned stream_idx, int64_t input_first_pts, int64_t input_second_pts,
-                                 float alpha, int secondary_stream_idx = -1, int64_t secondary_inputs_pts = -1,
-                                 float fade_alpha = 0.0f);  // -1 = no secondary frame.
+                                 FrameOnDisk frame1, FrameOnDisk frame2,
+                                 float alpha, FrameOnDisk secondary_frame = {},  // Empty = no secondary (fade) frame.
+                                 float fade_alpha = 0.0f);
        void schedule_refresh_frame(std::chrono::steady_clock::time_point, int64_t output_pts,
                                    std::function<void()> &&display_func,
                                    QueueSpotHolder &&queue_spot_holder);
@@ -105,20 +106,18 @@ private:
 
                int64_t output_pts;
                enum Type { ORIGINAL, FADED, INTERPOLATED, FADED_INTERPOLATED, REFRESH } type;
-               unsigned stream_idx;
-               int64_t input_first_pts;  // The only pts for original frames.
+               FrameOnDisk frame1;  // The only frame for original frames.
 
                // For fades only (including fades against interpolated frames).
-               int secondary_stream_idx = -1;
-               int64_t secondary_input_pts;
+               FrameOnDisk secondary_frame;
 
                // For interpolated frames only.
-               int64_t input_second_pts;
+               FrameOnDisk frame2;
                float alpha;
                BorrowedInterpolatedFrameResources resources;
                RefCountedGLsync fence;  // Set when the interpolated image is read back to the CPU.
                GLuint flow_tex, output_tex, cbcr_tex;  // Released in the receiving thread; not really used for anything else.
-               JPEGID id;
+               FrameOnDisk id;
 
                std::function<void()> display_func;  // Called when the image is done decoding.
                std::function<void(std::shared_ptr<Frame>)> display_decoded_func;  // Same, except for INTERPOLATED and FADED_INTERPOLATED.