]> git.sesse.net Git - nageru/commitdiff
Some clang-formatting of Futatabi.
authorSteinar H. Gunderson <sgunderson@bigfoot.com>
Fri, 28 Dec 2018 10:56:15 +0000 (11:56 +0100)
committerSteinar H. Gunderson <sgunderson@bigfoot.com>
Fri, 28 Dec 2018 10:56:52 +0000 (11:56 +0100)
26 files changed:
futatabi/chroma_subsampler.cpp
futatabi/clip_list.h
futatabi/db.cpp
futatabi/db.h
futatabi/export.cpp
futatabi/export.h
futatabi/flags.cpp
futatabi/flags.h
futatabi/flow.cpp
futatabi/flow.h
futatabi/flow_main.cpp
futatabi/frame_on_disk.cpp
futatabi/frame_on_disk.h
futatabi/jpeg_destroyer.h
futatabi/jpeg_frame.h
futatabi/jpeg_frame_view.cpp
futatabi/main.cpp
futatabi/mainwindow.cpp
futatabi/mainwindow.h
futatabi/player.cpp
futatabi/player.h
futatabi/queue_spot_holder.h
futatabi/vaapi_jpeg_decoder.cpp
futatabi/video_stream.cpp
futatabi/video_stream.h
futatabi/ycbcr_converter.cpp

index d064bc7c1d821d29bf9de86fc0194de0cfddba78..d38edacb35f527542d730ae9c3c6e39184af8700 100644 (file)
@@ -1,10 +1,10 @@
 #include "chroma_subsampler.h"
 
+#include "embedded_files.h"
+
 #include <movit/util.h>
 #include <string>
 
-#include "embedded_files.h"
-
 #define BUFFER_OFFSET(i) ((char *)nullptr + (i))
 
 using namespace std;
index 6bd0fbb1274f0cc2b4270270af4768f68bfd4301..af3d1f524c42084ab77815f91cb4e30552cf5d07 100644 (file)
@@ -5,8 +5,8 @@
 #include "state.pb.h"
 
 #include <QAbstractTableModel>
-#include <stdint.h>
 #include <map>
+#include <stdint.h>
 #include <string>
 #include <vector>
 
index fb6694be3a2f2c729edefedcd3d923a9a4ca2209..1ad17cbfece33368742f5e08da29e8072156edb7 100644 (file)
@@ -17,19 +17,23 @@ DB::DB(const string &filename)
 
        sqlite3_exec(db, R"(
                CREATE TABLE IF NOT EXISTS state (state BLOB);
-       )", nullptr, nullptr, nullptr);  // Ignore errors.
+       )",
+                    nullptr, nullptr, nullptr);  // Ignore errors.
 
        sqlite3_exec(db, R"(
                CREATE TABLE IF NOT EXISTS settings (settings BLOB);
-       )", nullptr, nullptr, nullptr);  // Ignore errors.
+       )",
+                    nullptr, nullptr, nullptr);  // Ignore errors.
 
        sqlite3_exec(db, R"(
                DROP TABLE file;
-       )", nullptr, nullptr, nullptr);  // Ignore errors.
+       )",
+                    nullptr, nullptr, nullptr);  // Ignore errors.
 
        sqlite3_exec(db, R"(
                DROP TABLE frame;
-       )", nullptr, nullptr, nullptr);  // Ignore errors.
+       )",
+                    nullptr, nullptr, nullptr);  // Ignore errors.
 
        sqlite3_exec(db, R"(
                CREATE TABLE IF NOT EXISTS filev2 (
@@ -38,7 +42,8 @@ DB::DB(const string &filename)
                        size BIGINT NOT NULL,
                        frames BLOB NOT NULL
                );
-       )", nullptr, nullptr, nullptr);  // Ignore errors.
+       )",
+                    nullptr, nullptr, nullptr);  // Ignore errors.
 
        sqlite3_exec(db, "PRAGMA journal_mode=WAL", nullptr, nullptr, nullptr);  // Ignore errors.
        sqlite3_exec(db, "PRAGMA synchronous=NORMAL", nullptr, nullptr, nullptr);  // Ignore errors.
@@ -342,7 +347,8 @@ void DB::clean_unused_frame_files(const vector<string> &used_filenames)
 
        ret = sqlite3_exec(db, R"(
                CREATE TEMPORARY TABLE used_filenames ( filename VARCHAR NOT NULL PRIMARY KEY )
-       )", nullptr, nullptr, nullptr);
+       )",
+                          nullptr, nullptr, nullptr);
 
        if (ret != SQLITE_OK) {
                fprintf(stderr, "CREATE TEMPORARY TABLE: %s\n", sqlite3_errmsg(db));
@@ -381,7 +387,8 @@ void DB::clean_unused_frame_files(const vector<string> &used_filenames)
 
        ret = sqlite3_exec(db, R"(
                DELETE FROM filev2 WHERE filename NOT IN ( SELECT filename FROM used_filenames )
-       )", nullptr, nullptr, nullptr);
+       )",
+                          nullptr, nullptr, nullptr);
 
        if (ret != SQLITE_OK) {
                fprintf(stderr, "DELETE: %s\n", sqlite3_errmsg(db));
@@ -390,7 +397,8 @@ void DB::clean_unused_frame_files(const vector<string> &used_filenames)
 
        ret = sqlite3_exec(db, R"(
                DROP TABLE used_filenames
-       )", nullptr, nullptr, nullptr);
+       )",
+                          nullptr, nullptr, nullptr);
 
        if (ret != SQLITE_OK) {
                fprintf(stderr, "DROP TABLE: %s\n", sqlite3_errmsg(db));
index c0c3623975c7c10d1d67ccc36aa19a9bfc197484..59ca733a1d32c568a62624d62b6acfa4bb13d21e 100644 (file)
@@ -1,14 +1,13 @@
 #ifndef DB_H
 #define DB_H 1
 
+#include "frame_on_disk.h"
 #include "state.pb.h"
 
 #include <sqlite3.h>
 #include <string>
 #include <vector>
 
-#include "frame_on_disk.h"
-
 class DB {
 public:
        explicit DB(const std::string &filename);
index f3717409bff5235c47e7cfe91196eed80896826d..dce83c34a34b1503ef276800c31930b2602ea89b 100644 (file)
@@ -1,6 +1,7 @@
+#include "export.h"
+
 #include "clip_list.h"
 #include "defs.h"
-#include "export.h"
 #include "flags.h"
 #include "frame_on_disk.h"
 #include "player.h"
@@ -9,11 +10,9 @@
 
 #include <QMessageBox>
 #include <QProgressDialog>
-
 #include <future>
-#include <vector>
-
 #include <unistd.h>
+#include <vector>
 
 extern "C" {
 #include <libavformat/avformat.h>
@@ -100,14 +99,14 @@ void export_multitrack_clip(const string &filename, const Clip &clip)
 
        // Create the streams. Note that some of them could be without frames
        // (we try to maintain the stream indexes in the export).
-       vector<AVStream *> video_streams; 
+       vector<AVStream *> video_streams;
        for (unsigned stream_idx = 0; stream_idx <= last_stream_idx; ++stream_idx) {
                AVStream *avstream_video = avformat_new_stream(avctx, nullptr);
                if (avstream_video == nullptr) {
                        fprintf(stderr, "avformat_new_stream() failed\n");
                        exit(1);
                }
-               avstream_video->time_base = AVRational{1, TIMEBASE};
+               avstream_video->time_base = AVRational{ 1, TIMEBASE };
                avstream_video->codecpar->codec_type = AVMEDIA_TYPE_VIDEO;
                avstream_video->codecpar->codec_id = AV_CODEC_ID_MJPEG;
                avstream_video->codecpar->width = global_flags.width;  // Might be wrong, but doesn't matter all that much.
@@ -165,8 +164,8 @@ void export_multitrack_clip(const string &filename, const Clip &clip)
                        }
                }
                string jpeg = readers[first_frame_stream_idx].read_frame(first_frame);
-               int64_t scaled_pts = av_rescale_q(first_frame.pts, AVRational{1, TIMEBASE},
-                       video_streams[first_frame_stream_idx]->time_base);
+               int64_t scaled_pts = av_rescale_q(first_frame.pts, AVRational{ 1, TIMEBASE },
+                                                 video_streams[first_frame_stream_idx]->time_base);
                buffered_jpegs.emplace_back(BufferedJPEG{ scaled_pts, first_frame_stream_idx, std::move(jpeg) });
                if (buffered_jpegs.size() >= 1000) {
                        if (!write_buffered_jpegs(avctx, buffered_jpegs)) {
@@ -183,7 +182,7 @@ void export_multitrack_clip(const string &filename, const Clip &clip)
                if (progress.wasCanceled()) {
                        unlink(filename.c_str());
                        return;
-                }
+               }
        }
 
        if (!write_buffered_jpegs(avctx, buffered_jpegs)) {
@@ -224,11 +223,11 @@ void export_interpolated_clip(const string &filename, const vector<Clip> &clips)
        progress.setMaximum(100000);
        progress.setValue(0);
 
-       double total_length = compute_time_left(clips, {{0, 0.0}});
+       double total_length = compute_time_left(clips, { { 0, 0.0 } });
 
        promise<void> done_promise;
        future<void> done = done_promise.get_future();
-       std::atomic<double> current_value{0.0};
+       std::atomic<double> current_value{ 0.0 };
        size_t clip_idx = 0;
 
        Player player(/*destination=*/nullptr, Player::FILE_STREAM_OUTPUT, closer.release());
@@ -237,7 +236,7 @@ void export_interpolated_clip(const string &filename, const vector<Clip> &clips)
                        done_promise.set_value();
                }
        });
-       player.set_progress_callback([&current_value, &clips, total_length] (const std::map<size_t, double> &player_progress) {
+       player.set_progress_callback([&current_value, &clips, total_length](const std::map<size_t, double> &player_progress) {
                current_value = 1.0 - compute_time_left(clips, player_progress) / total_length;
        });
        player.play(clips);
index 0397749b8b04299b888e89cfcbabd1bf75b8dadf..ae21a7de35f40ee33ef7720de39e548448269341 100644 (file)
@@ -1,6 +1,8 @@
 #ifndef _EXPORT_H
 #define _EXPORT_H 1
 
+#include "clip_list.h"
+
 #include <string>
 #include <vector>
 
index aefde273aa64fdbc32504ff2658e743faa6a09d9..211509890d69a93ba212152fe2985063b4f9e210 100644 (file)
@@ -4,7 +4,6 @@
 #include <stdio.h>
 #include <stdlib.h>
 #include <string.h>
-
 #include <utility>
 
 using namespace std;
@@ -43,7 +42,7 @@ void usage()
        fprintf(stderr, "      --tally-url URL             URL to get tally color from (polled every 100 ms)\n");
 }
 
-void parse_flags(int argc, char * const argv[])
+void parse_flags(int argc, char *const argv[])
 {
        static const option long_options[] = {
                { "help", no_argument, 0, OPTION_HELP },
@@ -58,7 +57,7 @@ void parse_flags(int argc, char * const argv[])
                { "cue-point-padding", required_argument, 0, OPTION_CUE_POINT_PADDING },
                { 0, 0, 0, 0 }
        };
-       for ( ;; ) {
+       for (;;) {
                int option_index = 0;
                int c = getopt_long(argc, argv, "w:h:r:q:d:", long_options, &option_index);
 
index 6796c147bc2b65da3f3047b43b563844b316c846..1e0284bcd8c3ff9d08e545ec91b5348de4276ba3 100644 (file)
@@ -1,10 +1,10 @@
 #ifndef _FLAGS_H
 #define _FLAGS_H
 
-#include <string>
-
 #include "defs.h"
 
+#include <string>
+
 struct Flags {
        int width = 1280, height = 720;
        std::string stream_source;
@@ -26,6 +26,6 @@ extern Flags global_flags;
 extern int flow_initialized_interpolation_quality;
 
 void usage();
-void parse_flags(int argc, char * const argv[]);
+void parse_flags(int argc, char *const argv[]);
 
 #endif  // !defined(_FLAGS_H)
index a10d83b11f725e19b11e86cab888fd60e25aa3f2..90317da70a5ab62d4b72a9036f374cb24a50628f 100644 (file)
@@ -4,8 +4,8 @@
 
 #include "embedded_files.h"
 #include "gpu_timers.h"
-#include "util.h"
 #include "shared/read_file.h"
+#include "util.h"
 
 #include <algorithm>
 #include <assert.h>
@@ -43,7 +43,7 @@ GLuint vertex_vbo;
 int find_num_levels(int width, int height)
 {
        int levels = 1;
-       for (int w = width, h = height; w > 1 || h > 1; ) {
+       for (int w = width, h = height; w > 1 || h > 1;) {
                w >>= 1;
                h >>= 1;
                ++levels;
@@ -98,7 +98,7 @@ GLuint link_program(GLuint vs_obj, GLuint fs_obj)
        GLint success;
        glGetProgramiv(program, GL_LINK_STATUS, &success);
        if (success == GL_FALSE) {
-               GLchar error_log[1024] = {0};
+               GLchar error_log[1024] = { 0 };
                glGetProgramInfoLog(program, 1024, nullptr, error_log);
                fprintf(stderr, "Error linking program: %s\n", error_log);
                exit(1);
@@ -269,8 +269,8 @@ void Densify::exec(GLuint tex_view, GLuint flow_tex, GLuint dense_flow_tex, int
        bind_sampler(densify_program, uniform_flow_tex, 1, flow_tex, nearest_sampler);
 
        glProgramUniform2f(densify_program, uniform_patch_size,
-               float(op.patch_size_pixels) / level_width,
-               float(op.patch_size_pixels) / level_height);
+                          float(op.patch_size_pixels) / level_width,
+                          float(op.patch_size_pixels) / level_height);
 
        glViewport(0, 0, level_width, level_height);
        glEnable(GL_BLEND);
@@ -919,7 +919,8 @@ Interpolate::Interpolate(const OperatingPoint &op, bool split_ycbcr_output)
        : flow_level(op.finest_level),
          split_ycbcr_output(split_ycbcr_output),
          splat(op),
-         blend(split_ycbcr_output) {
+         blend(split_ycbcr_output)
+{
        // Set up the vertex data that will be shared between all passes.
        float vertices[] = {
                0.0f, 1.0f,
index 08b25904356ab61265835e46cf219105c5ae88d2..760a5f31d58dd70b9f04ecbda70377db7e9ee689 100644 (file)
@@ -90,20 +90,24 @@ public:
        void render_to(const std::array<GLuint, num_elements> &textures);
 
        // Convenience wrappers.
-       void render_to(GLuint texture0) {
-               render_to({{texture0}});
+       void render_to(GLuint texture0)
+       {
+               render_to({ { texture0 } });
        }
 
-       void render_to(GLuint texture0, GLuint texture1) {
-               render_to({{texture0, texture1}});
+       void render_to(GLuint texture0, GLuint texture1)
+       {
+               render_to({ { texture0, texture1 } });
        }
 
-       void render_to(GLuint texture0, GLuint texture1, GLuint texture2) {
-               render_to({{texture0, texture1, texture2}});
+       void render_to(GLuint texture0, GLuint texture1, GLuint texture2)
+       {
+               render_to({ { texture0, texture1, texture2 } });
        }
 
-       void render_to(GLuint texture0, GLuint texture1, GLuint texture2, GLuint texture3) {
-               render_to({{texture0, texture1, texture2, texture3}});
+       void render_to(GLuint texture0, GLuint texture1, GLuint texture2, GLuint texture3)
+       {
+               render_to({ { texture0, texture1, texture2, texture3 } });
        }
 
 private:
@@ -118,20 +122,24 @@ public:
        void render_to(GLuint depth_rb, const std::array<GLuint, num_elements> &textures);
 
        // Convenience wrappers.
-       void render_to(GLuint depth_rb, GLuint texture0) {
-               render_to(depth_rb, {{texture0}});
+       void render_to(GLuint depth_rb, GLuint texture0)
+       {
+               render_to(depth_rb, { { texture0 } });
        }
 
-       void render_to(GLuint depth_rb, GLuint texture0, GLuint texture1) {
-               render_to(depth_rb, {{texture0, texture1}});
+       void render_to(GLuint depth_rb, GLuint texture0, GLuint texture1)
+       {
+               render_to(depth_rb, { { texture0, texture1 } });
        }
 
-       void render_to(GLuint depth_rb, GLuint texture0, GLuint texture1, GLuint texture2) {
-               render_to(depth_rb, {{texture0, texture1, texture2}});
+       void render_to(GLuint depth_rb, GLuint texture0, GLuint texture1, GLuint texture2)
+       {
+               render_to(depth_rb, { { texture0, texture1, texture2 } });
        }
 
-       void render_to(GLuint depth_rb, GLuint texture0, GLuint texture1, GLuint texture2, GLuint texture3) {
-               render_to(depth_rb, {{texture0, texture1, texture2, texture3}});
+       void render_to(GLuint depth_rb, GLuint texture0, GLuint texture1, GLuint texture2, GLuint texture3)
+       {
+               render_to(depth_rb, { { texture0, texture1, texture2, texture3 } });
        }
 
 private:
index dc82d229ca8b121bcb92496b7a7114ac8b06a1a6..ef481a04b4ba8e7360214d442cdd3e0f05d44d2b 100644 (file)
@@ -177,7 +177,7 @@ struct RGBAType {
        static constexpr int num_channels = 4;
 };
 
-template <class Type>
+template<class Type>
 void finish_one_read(GLuint width, GLuint height)
 {
        using T = typename Type::type;
@@ -203,7 +203,7 @@ void finish_one_read(GLuint width, GLuint height)
        }
 }
 
-template <class Type>
+template<class Type>
 void schedule_read(GLuint tex, GLuint width, GLuint height, const char *filename0, const char *filename1, const char *flow_filename, const char *ppm_filename)
 {
        using T = typename Type::type;
@@ -233,7 +233,7 @@ void compute_flow_only(int argc, char **argv, int optind)
 
        if (width1 != width2 || height1 != height2) {
                fprintf(stderr, "Image dimensions don't match (%dx%d versus %dx%d)\n",
-                       width1, height1, width2, height2);
+                       width1, height1, width2, height2);
                exit(1);
        }
 
@@ -297,7 +297,7 @@ void compute_flow_only(int argc, char **argv, int optind)
                GLuint tex0 = load_texture(filename0, &width, &height, WITHOUT_MIPMAPS);
                if (width != width1 || height != height1) {
                        fprintf(stderr, "%s: Image dimensions don't match (%dx%d versus %dx%d)\n",
-                               filename0, width, height, width1, height1);
+                               filename0, width, height, width1, height1);
                        exit(1);
                }
                glCopyImageSubData(tex0, GL_TEXTURE_2D, 0, 0, 0, 0, image_tex, GL_TEXTURE_2D_ARRAY, 0, 0, 0, 0, width1, height1, 1);
@@ -306,7 +306,7 @@ void compute_flow_only(int argc, char **argv, int optind)
                GLuint tex1 = load_texture(filename1, &width, &height, WITHOUT_MIPMAPS);
                if (width != width1 || height != height1) {
                        fprintf(stderr, "%s: Image dimensions don't match (%dx%d versus %dx%d)\n",
-                               filename1, width, height, width1, height1);
+                               filename1, width, height, width1, height1);
                        exit(1);
                }
                glCopyImageSubData(tex1, GL_TEXTURE_2D, 0, 0, 0, 0, image_tex, GL_TEXTURE_2D_ARRAY, 0, 0, 0, 1, width1, height1, 1);
@@ -346,7 +346,7 @@ void interpolate_image(int argc, char **argv, int optind)
 
        if (width1 != width2 || height1 != height2) {
                fprintf(stderr, "Image dimensions don't match (%dx%d versus %dx%d)\n",
-                       width1, height1, width2, height2);
+                       width1, height1, width2, height2);
                exit(1);
        }
 
@@ -428,7 +428,7 @@ int main(int argc, char **argv)
 
        enable_timing = true;
 
-       for ( ;; ) {
+       for (;;) {
                int option_index = 0;
                int c = getopt_long(argc, argv, "s:i:g:", long_options, &option_index);
 
@@ -480,10 +480,10 @@ int main(int argc, char **argv)
        SDL_GL_SetAttribute(SDL_GL_CONTEXT_MINOR_VERSION, 5);
        // SDL_GL_SetAttribute(SDL_GL_CONTEXT_FLAGS, SDL_GL_CONTEXT_DEBUG_FLAG);
        window = SDL_CreateWindow("OpenGL window",
-               SDL_WINDOWPOS_UNDEFINED,
-               SDL_WINDOWPOS_UNDEFINED,
-               64, 64,
-               SDL_WINDOW_OPENGL | SDL_WINDOW_HIDDEN);
+                                 SDL_WINDOWPOS_UNDEFINED,
+                                 SDL_WINDOWPOS_UNDEFINED,
+                                 64, 64,
+                                 SDL_WINDOW_OPENGL | SDL_WINDOW_HIDDEN);
        SDL_GLContext context = SDL_GL_CreateContext(window);
        assert(context != nullptr);
 
index 34fcd38ef768394edb59d4e08ea9796fd2451de1..c3e3b4db496e4902e7239f61b2d53c98b57e7825 100644 (file)
@@ -1,12 +1,12 @@
-#include <fcntl.h>
-#include <unistd.h>
+#include "frame_on_disk.h"
+
+#include "shared/metrics.h"
 
 #include <atomic>
 #include <chrono>
+#include <fcntl.h>
 #include <mutex>
-
-#include "frame_on_disk.h"
-#include "shared/metrics.h"
+#include <unistd.h>
 
 using namespace std;
 using namespace std::chrono;
@@ -16,10 +16,10 @@ namespace {
 // There can be multiple FrameReader classes, so make all the metrics static.
 once_flag frame_metrics_inited;
 
-atomic<int64_t> metric_frame_opened_files{0};
-atomic<int64_t> metric_frame_closed_files{0};
-atomic<int64_t> metric_frame_read_bytes{0};
-atomic<int64_t> metric_frame_read_frames{0};
+atomic<int64_t> metric_frame_opened_files{ 0 };
+atomic<int64_t> metric_frame_closed_files{ 0 };
+atomic<int64_t> metric_frame_read_bytes{ 0 };
+atomic<int64_t> metric_frame_read_frames{ 0 };
 
 Summary metric_frame_read_time_seconds;
 
@@ -27,13 +27,13 @@ Summary metric_frame_read_time_seconds;
 
 FrameReader::FrameReader()
 {
-       call_once(frame_metrics_inited, []{
+       call_once(frame_metrics_inited, [] {
                global_metrics.add("frame_opened_files", &metric_frame_opened_files);
                global_metrics.add("frame_closed_files", &metric_frame_closed_files);
                global_metrics.add("frame_read_bytes", &metric_frame_read_bytes);
                global_metrics.add("frame_read_frames", &metric_frame_read_frames);
 
-               vector<double> quantiles{0.01, 0.1, 0.25, 0.5, 0.75, 0.9, 0.99};
+               vector<double> quantiles{ 0.01, 0.1, 0.25, 0.5, 0.75, 0.9, 0.99 };
                metric_frame_read_time_seconds.init(quantiles, 60.0);
                global_metrics.add("frame_read_time_seconds", &metric_frame_read_time_seconds);
        });
index 7807c52b4a69cdb2a08819bb10636f9e59bdc44e..f74cb86f717161a9b6a9ffab8575594a5a8548b4 100644 (file)
@@ -1,21 +1,20 @@
 #ifndef _FRAME_ON_DISK_H
 #define _FRAME_ON_DISK_H 1
 
+#include "defs.h"
+
 #include <algorithm>
 #include <mutex>
+#include <stdint.h>
 #include <string>
 #include <vector>
 
-#include <stdint.h>
-
-#include "defs.h"
-
 extern std::mutex frame_mu;
 struct FrameOnDisk {
-        int64_t pts = -1;  // -1 means empty.
-        off_t offset;
-        unsigned filename_idx;
-        uint32_t size;  // Not using size_t saves a few bytes; we can have so many frames.
+       int64_t pts = -1;  // -1 means empty.
+       off_t offset;
+       unsigned filename_idx;
+       uint32_t size;  // Not using size_t saves a few bytes; we can have so many frames.
 };
 extern std::vector<FrameOnDisk> frames[MAX_STREAMS];  // Under frame_mu.
 extern std::vector<std::string> frame_filenames;  // Under frame_mu.
@@ -49,14 +48,14 @@ inline std::vector<FrameOnDisk>::iterator
 find_last_frame_before(std::vector<FrameOnDisk> &frames, int64_t pts_origin)
 {
        return std::lower_bound(frames.begin(), frames.end(), pts_origin,
-               [](const FrameOnDisk &frame, int64_t pts) { return frame.pts < pts; });
+                               [](const FrameOnDisk &frame, int64_t pts) { return frame.pts < pts; });
 }
 
 inline std::vector<FrameOnDisk>::iterator
 find_first_frame_at_or_after(std::vector<FrameOnDisk> &frames, int64_t pts_origin)
 {
        return std::upper_bound(frames.begin(), frames.end(), pts_origin - 1,
-               [](int64_t pts, const FrameOnDisk &frame) { return pts < frame.pts; });
+                               [](int64_t pts, const FrameOnDisk &frame) { return pts < frame.pts; });
 }
 
 #endif  // !defined(_FRAME_ON_DISK_H)
index 5fc5c955e687ef0ec88bd8219a7f352ca9f8b89e..69f0ba3f83bed94d78a0bfb9304e9f63bca9a31d 100644 (file)
@@ -8,7 +8,8 @@ public:
        JPEGDestroyer(jpeg_decompress_struct *dinfo)
                : dinfo(dinfo) {}
 
-       ~JPEGDestroyer() {
+       ~JPEGDestroyer()
+       {
                jpeg_destroy_decompress(dinfo);
        }
 
index eb73e13fec90d564f3695f198d1282c613120edf..edc7381663e0f60ab4909b200c7d495c595d1b64 100644 (file)
@@ -6,11 +6,11 @@
 struct Frame {
        bool is_semiplanar = false;
        std::unique_ptr<uint8_t[]> y;
-       std::unique_ptr<uint8_t[]> cb, cr; // For planar.
+       std::unique_ptr<uint8_t[]> cb, cr;  // For planar.
        std::unique_ptr<uint8_t[]> cbcr;  // For semiplanar.
        unsigned width, height;
        unsigned chroma_subsampling_x, chroma_subsampling_y;
        unsigned pitch_y, pitch_chroma;
 };
 
-#endif   // !defined(_JPEG_FRAME_H)
+#endif  // !defined(_JPEG_FRAME_H)
index 1924a543ff017346930f0c2d8c605253555e5728..c9b8090544b8313f7cf769812580fa4e11e0fd39 100644 (file)
@@ -33,9 +33,8 @@ using namespace std;
 namespace {
 
 // Just an arbitrary order for std::map.
-struct FrameOnDiskLexicalOrder
-{
-       bool operator() (const FrameOnDisk &a, const FrameOnDisk &b) const
+struct FrameOnDiskLexicalOrder {
+       bool operator()(const FrameOnDisk &a, const FrameOnDisk &b) const
        {
                if (a.pts != b.pts)
                        return a.pts < b.pts;
@@ -75,15 +74,15 @@ struct PendingDecode {
 
 // There can be multiple JPEGFrameView instances, so make all the metrics static.
 once_flag jpeg_metrics_inited;
-atomic<int64_t> metric_jpeg_cache_used_bytes{0};  // Same value as cache_bytes_used.
-atomic<int64_t> metric_jpeg_cache_limit_bytes{size_t(CACHE_SIZE_MB) * 1024 * 1024};
-atomic<int64_t> metric_jpeg_cache_given_up_frames{0};
-atomic<int64_t> metric_jpeg_cache_hit_frames{0};
-atomic<int64_t> metric_jpeg_cache_miss_frames{0};
-atomic<int64_t> metric_jpeg_software_decode_frames{0};
-atomic<int64_t> metric_jpeg_software_fail_frames{0};
-atomic<int64_t> metric_jpeg_vaapi_decode_frames{0};
-atomic<int64_t> metric_jpeg_vaapi_fail_frames{0};
+atomic<int64_t> metric_jpeg_cache_used_bytes{ 0 };  // Same value as cache_bytes_used.
+atomic<int64_t> metric_jpeg_cache_limit_bytes{ size_t(CACHE_SIZE_MB) * 1024 * 1024 };
+atomic<int64_t> metric_jpeg_cache_given_up_frames{ 0 };
+atomic<int64_t> metric_jpeg_cache_hit_frames{ 0 };
+atomic<int64_t> metric_jpeg_cache_miss_frames{ 0 };
+atomic<int64_t> metric_jpeg_software_decode_frames{ 0 };
+atomic<int64_t> metric_jpeg_software_fail_frames{ 0 };
+atomic<int64_t> metric_jpeg_vaapi_decode_frames{ 0 };
+atomic<int64_t> metric_jpeg_vaapi_fail_frames{ 0 };
 
 }  // namespace
 
@@ -93,7 +92,7 @@ map<FrameOnDisk, LRUFrame, FrameOnDiskLexicalOrder> cache;  // Under cache_mu.
 size_t cache_bytes_used = 0;  // Under cache_mu.
 condition_variable any_pending_decodes;
 deque<PendingDecode> pending_decodes;  // Under cache_mu.
-atomic<size_t> event_counter{0};
+atomic<size_t> event_counter{ 0 };
 extern QGLWidget *global_share_widget;
 extern atomic<bool> should_quit;
 
@@ -114,24 +113,24 @@ shared_ptr<Frame> decode_jpeg(const string &jpeg)
 
        jpeg_decompress_struct dinfo;
        JPEGWrapErrorManager error_mgr(&dinfo);
-       if (!error_mgr.run([&dinfo]{ jpeg_create_decompress(&dinfo); })) {
+       if (!error_mgr.run([&dinfo] { jpeg_create_decompress(&dinfo); })) {
                return get_black_frame();
        }
        JPEGDestroyer destroy_dinfo(&dinfo);
 
-       if (!error_mgr.run([&dinfo, &jpeg]{
-               jpeg_mem_src(&dinfo, reinterpret_cast<const unsigned char *>(jpeg.data()), jpeg.size());
-               jpeg_read_header(&dinfo, true);
-       })) {
+       if (!error_mgr.run([&dinfo, &jpeg] {
+                   jpeg_mem_src(&dinfo, reinterpret_cast<const unsigned char *>(jpeg.data()), jpeg.size());
+                   jpeg_read_header(&dinfo, true);
+           })) {
                return get_black_frame();
        }
 
        if (dinfo.num_components != 3) {
                fprintf(stderr, "Not a color JPEG. (%d components, Y=%dx%d, Cb=%dx%d, Cr=%dx%d)\n",
-                       dinfo.num_components,
-                       dinfo.comp_info[0].h_samp_factor, dinfo.comp_info[0].v_samp_factor,
-                       dinfo.comp_info[1].h_samp_factor, dinfo.comp_info[1].v_samp_factor,
-                       dinfo.comp_info[2].h_samp_factor, dinfo.comp_info[2].v_samp_factor);
+                       dinfo.num_components,
+                       dinfo.comp_info[0].h_samp_factor, dinfo.comp_info[0].v_samp_factor,
+                       dinfo.comp_info[1].h_samp_factor, dinfo.comp_info[1].v_samp_factor,
+                       dinfo.comp_info[2].h_samp_factor, dinfo.comp_info[2].v_samp_factor);
                return get_black_frame();
        }
        if (dinfo.comp_info[0].h_samp_factor != dinfo.max_h_samp_factor ||
@@ -141,16 +140,16 @@ shared_ptr<Frame> decode_jpeg(const string &jpeg)
            (dinfo.max_h_samp_factor % dinfo.comp_info[1].h_samp_factor) != 0 ||
            (dinfo.max_v_samp_factor % dinfo.comp_info[1].v_samp_factor) != 0) {  // No 2:3 subsampling or other weirdness.
                fprintf(stderr, "Unsupported subsampling scheme. (Y=%dx%d, Cb=%dx%d, Cr=%dx%d)\n",
-                       dinfo.comp_info[0].h_samp_factor, dinfo.comp_info[0].v_samp_factor,
-                       dinfo.comp_info[1].h_samp_factor, dinfo.comp_info[1].v_samp_factor,
-                       dinfo.comp_info[2].h_samp_factor, dinfo.comp_info[2].v_samp_factor);
+                       dinfo.comp_info[0].h_samp_factor, dinfo.comp_info[0].v_samp_factor,
+                       dinfo.comp_info[1].h_samp_factor, dinfo.comp_info[1].v_samp_factor,
+                       dinfo.comp_info[2].h_samp_factor, dinfo.comp_info[2].v_samp_factor);
                exit(1);
        }
        dinfo.raw_data_out = true;
 
-       if (!error_mgr.run([&dinfo]{
-               jpeg_start_decompress(&dinfo);
-       })) {
+       if (!error_mgr.run([&dinfo] {
+                   jpeg_start_decompress(&dinfo);
+           })) {
                return get_black_frame();
        }
 
@@ -177,21 +176,21 @@ shared_ptr<Frame> decode_jpeg(const string &jpeg)
        frame->pitch_chroma = chroma_width_blocks * DCTSIZE;
 
        if (!error_mgr.run([&dinfo, &frame, v_mcu_size, mcu_height_blocks] {
-               JSAMPROW yptr[v_mcu_size], cbptr[v_mcu_size], crptr[v_mcu_size];
-               JSAMPARRAY data[3] = { yptr, cbptr, crptr };
-               for (unsigned y = 0; y < mcu_height_blocks; ++y) {
-                       // NOTE: The last elements of cbptr/crptr will be unused for vertically subsampled chroma.
-                       for (unsigned yy = 0; yy < v_mcu_size; ++yy) {
-                               yptr[yy] = frame->y.get() + (y * DCTSIZE * dinfo.max_v_samp_factor + yy) * frame->pitch_y;
-                               cbptr[yy] = frame->cb.get() + (y * DCTSIZE * dinfo.comp_info[1].v_samp_factor + yy) * frame->pitch_chroma;
-                               crptr[yy] = frame->cr.get() + (y * DCTSIZE * dinfo.comp_info[1].v_samp_factor + yy) * frame->pitch_chroma;
-                       }
-
-                       jpeg_read_raw_data(&dinfo, data, v_mcu_size);
-               }
-
-               (void)jpeg_finish_decompress(&dinfo);
-       })) {
+                   JSAMPROW yptr[v_mcu_size], cbptr[v_mcu_size], crptr[v_mcu_size];
+                   JSAMPARRAY data[3] = { yptr, cbptr, crptr };
+                   for (unsigned y = 0; y < mcu_height_blocks; ++y) {
+                           // NOTE: The last elements of cbptr/crptr will be unused for vertically subsampled chroma.
+                           for (unsigned yy = 0; yy < v_mcu_size; ++yy) {
+                                   yptr[yy] = frame->y.get() + (y * DCTSIZE * dinfo.max_v_samp_factor + yy) * frame->pitch_y;
+                                   cbptr[yy] = frame->cb.get() + (y * DCTSIZE * dinfo.comp_info[1].v_samp_factor + yy) * frame->pitch_chroma;
+                                   crptr[yy] = frame->cr.get() + (y * DCTSIZE * dinfo.comp_info[1].v_samp_factor + yy) * frame->pitch_chroma;
+                           }
+
+                           jpeg_read_raw_data(&dinfo, data, v_mcu_size);
+                   }
+
+                   (void)jpeg_finish_decompress(&dinfo);
+           })) {
                return get_black_frame();
        }
 
@@ -203,7 +202,8 @@ void prune_cache()
 {
        // Assumes cache_mu is held.
        int64_t bytes_still_to_remove = cache_bytes_used - (size_t(CACHE_SIZE_MB) * 1024 * 1024) * 9 / 10;
-       if (bytes_still_to_remove <= 0) return;
+       if (bytes_still_to_remove <= 0)
+               return;
 
        vector<pair<size_t, size_t>> lru_timestamps_and_size;
        for (const auto &key_and_value : cache) {
@@ -218,10 +218,11 @@ void prune_cache()
        for (const pair<size_t, size_t> &it : lru_timestamps_and_size) {
                lru_cutoff_point = it.first;
                bytes_still_to_remove -= it.second;
-               if (bytes_still_to_remove <= 0) break;
+               if (bytes_still_to_remove <= 0)
+                       break;
        }
 
-       for (auto it = cache.begin(); it != cache.end(); ) {
+       for (auto it = cache.begin(); it != cache.end();) {
                if (it->second.last_used <= lru_cutoff_point) {
                        cache_bytes_used -= frame_size(*it->second.frame);
                        metric_jpeg_cache_used_bytes = cache_bytes_used;
@@ -323,7 +324,7 @@ void JPEGFrameView::jpeg_decoder_thread_func()
                                ++num_decoded;
                                if (num_decoded % 1000 == 0) {
                                        fprintf(stderr, "Decoded %zu images, dropped %zu (%.2f%% dropped)\n",
-                                               num_decoded, num_dropped, (100.0 * num_dropped) / (num_decoded + num_dropped));
+                                               num_decoded, num_dropped, (100.0 * num_dropped) / (num_decoded + num_dropped));
                                }
                        }
                        if (subframe_idx == 0) {
@@ -351,16 +352,16 @@ void JPEGFrameView::shutdown()
 JPEGFrameView::JPEGFrameView(QWidget *parent)
        : QGLWidget(parent, global_share_widget)
 {
-       call_once(jpeg_metrics_inited, []{
+       call_once(jpeg_metrics_inited, [] {
                global_metrics.add("jpeg_cache_used_bytes", &metric_jpeg_cache_used_bytes, Metrics::TYPE_GAUGE);
                global_metrics.add("jpeg_cache_limit_bytes", &metric_jpeg_cache_limit_bytes, Metrics::TYPE_GAUGE);
-               global_metrics.add("jpeg_cache_frames", {{ "action", "given_up" }}, &metric_jpeg_cache_given_up_frames);
-               global_metrics.add("jpeg_cache_frames", {{ "action", "hit" }}, &metric_jpeg_cache_hit_frames);
-               global_metrics.add("jpeg_cache_frames", {{ "action", "miss" }}, &metric_jpeg_cache_miss_frames);
-               global_metrics.add("jpeg_decode_frames", {{ "decoder", "software" }, { "result", "decode" }}, &metric_jpeg_software_decode_frames);
-               global_metrics.add("jpeg_decode_frames", {{ "decoder", "software" }, { "result", "fail" }}, &metric_jpeg_software_fail_frames);
-               global_metrics.add("jpeg_decode_frames", {{ "decoder", "vaapi" }, { "result", "decode" }}, &metric_jpeg_vaapi_decode_frames);
-               global_metrics.add("jpeg_decode_frames", {{ "decoder", "vaapi" }, { "result", "fail" }}, &metric_jpeg_vaapi_fail_frames);
+               global_metrics.add("jpeg_cache_frames", { { "action", "given_up" } }, &metric_jpeg_cache_given_up_frames);
+               global_metrics.add("jpeg_cache_frames", { { "action", "hit" } }, &metric_jpeg_cache_hit_frames);
+               global_metrics.add("jpeg_cache_frames", { { "action", "miss" } }, &metric_jpeg_cache_miss_frames);
+               global_metrics.add("jpeg_decode_frames", { { "decoder", "software" }, { "result", "decode" } }, &metric_jpeg_software_decode_frames);
+               global_metrics.add("jpeg_decode_frames", { { "decoder", "software" }, { "result", "fail" } }, &metric_jpeg_software_fail_frames);
+               global_metrics.add("jpeg_decode_frames", { { "decoder", "vaapi" }, { "result", "decode" } }, &metric_jpeg_vaapi_decode_frames);
+               global_metrics.add("jpeg_decode_frames", { { "decoder", "vaapi" }, { "result", "fail" } }, &metric_jpeg_vaapi_fail_frames);
        });
 }
 
index 3b6f8e3044b804d637bff98f026b1795db9aaace..c13e19694783e3e2b178ce2f28ef5b6e8fcb3702 100644 (file)
@@ -1,5 +1,5 @@
-#include <assert.h>
 #include <arpa/inet.h>
+#include <assert.h>
 #include <atomic>
 #include <chrono>
 #include <condition_variable>
@@ -21,27 +21,27 @@ extern "C" {
 }
 
 #include "clip_list.h"
-#include "shared/context.h"
 #include "defs.h"
-#include "shared/disk_space_estimator.h"
-#include "shared/ffmpeg_raii.h"
 #include "flags.h"
-#include "frame_on_disk.h"
 #include "frame.pb.h"
-#include "shared/httpd.h"
+#include "frame_on_disk.h"
 #include "mainwindow.h"
 #include "player.h"
+#include "shared/context.h"
+#include "shared/disk_space_estimator.h"
+#include "shared/ffmpeg_raii.h"
+#include "shared/httpd.h"
+#include "shared/metrics.h"
 #include "shared/post_to_main_thread.h"
 #include "shared/ref_counted_gl_sync.h"
 #include "shared/timebase.h"
-#include "shared/metrics.h"
 #include "ui_mainwindow.h"
 #include "vaapi_jpeg_decoder.h"
 
 #include <QApplication>
 #include <QGLFormat>
-#include <QSurfaceFormat>
 #include <QProgressDialog>
+#include <QSurfaceFormat>
 #include <movit/init.h>
 #include <movit/util.h>
 
@@ -52,7 +52,7 @@ constexpr char frame_magic[] = "Ftbifrm0";
 constexpr size_t frame_magic_len = 8;
 
 mutex RefCountedGLsync::fence_lock;
-atomic<bool> should_quit{false};
+atomic<bool> should_quit{ false };
 
 int64_t start_pts = -1;
 
@@ -70,7 +70,7 @@ mutex frame_mu;
 vector<FrameOnDisk> frames[MAX_STREAMS];  // Under frame_mu.
 vector<string> frame_filenames;  // Under frame_mu.
 
-atomic<int64_t> metric_received_frames[MAX_STREAMS]{{0}};
+atomic<int64_t> metric_received_frames[MAX_STREAMS]{ { 0 } };
 Summary metric_received_frame_size_bytes;
 
 namespace {
@@ -80,7 +80,7 @@ FrameOnDisk write_frame(int stream_idx, int64_t pts, const uint8_t *data, size_t
        if (open_frame_files.count(stream_idx) == 0) {
                char filename[256];
                snprintf(filename, sizeof(filename), "%s/frames/cam%d-pts%09ld.frames",
-                       global_flags.working_directory.c_str(), stream_idx, pts);
+                        global_flags.working_directory.c_str(), stream_idx, pts);
                FILE *fp = fopen(filename, "wb");
                if (fp == nullptr) {
                        perror(filename);
@@ -182,7 +182,7 @@ FrameOnDisk write_frame(int stream_idx, int64_t pts, const uint8_t *data, size_t
        return frame;
 }
 
-} // namespace
+}  // namespace
 
 HTTPD *global_httpd;
 
@@ -320,9 +320,9 @@ void load_frame_file(const char *filename, const string &basename, unsigned file
                // OK, found the magic. Try to parse the frame header.
                magic_offset = 0;
 
-               if (skipped_bytes > 0)  {
+               if (skipped_bytes > 0) {
                        fprintf(stderr, "WARNING: %s: Skipped %zu garbage bytes in the middle.\n",
-                               filename, skipped_bytes);
+                               filename, skipped_bytes);
                        skipped_bytes = 0;
                }
 
@@ -369,7 +369,7 @@ void load_frame_file(const char *filename, const string &basename, unsigned file
 
        if (skipped_bytes > 0) {
                fprintf(stderr, "WARNING: %s: Skipped %zu garbage bytes at the end.\n",
-                       filename, skipped_bytes);
+                       filename, skipped_bytes);
        }
 
        off_t size = ftell(fp);
@@ -401,7 +401,7 @@ void load_existing_frames()
        }
 
        vector<string> frame_basenames;
-       for ( ;; ) {
+       for (;;) {
                errno = 0;
                dirent *de = readdir(dir);
                if (de == nullptr) {
@@ -451,7 +451,7 @@ void load_existing_frames()
 
        for (int stream_idx = 0; stream_idx < MAX_STREAMS; ++stream_idx) {
                sort(frames[stream_idx].begin(), frames[stream_idx].end(),
-                       [](const auto &a, const auto &b) { return a.pts < b.pts; });
+                    [](const auto &a, const auto &b) { return a.pts < b.pts; });
        }
 
        db.clean_unused_frame_files(frame_basenames);
@@ -460,7 +460,7 @@ void load_existing_frames()
 void record_thread_func()
 {
        for (unsigned i = 0; i < MAX_STREAMS; ++i) {
-               global_metrics.add("received_frames", {{ "stream", to_string(i) }}, &metric_received_frames[i]);
+               global_metrics.add("received_frames", { { "stream", to_string(i) } }, &metric_received_frames[i]);
        }
        global_metrics.add("received_frame_size_bytes", &metric_received_frame_size_bytes);
 
@@ -486,7 +486,7 @@ void record_thread_func()
 
                while (!should_quit.load()) {
                        AVPacket pkt;
-                       unique_ptr<AVPacket, decltype(av_packet_unref)*> pkt_cleanup(
+                       unique_ptr<AVPacket, decltype(av_packet_unref) *> pkt_cleanup(
                                &pkt, av_packet_unref);
                        av_init_packet(&pkt);
                        pkt.data = nullptr;
index 0537f0099688bda5b31d39bbca94750d7b32ee24..6553c8c56190e6f75571eee2d53e628563e2bd5c 100644 (file)
@@ -1,12 +1,12 @@
 #include "mainwindow.h"
 
-#include "shared/aboutdialog.h"
 #include "clip_list.h"
 #include "export.h"
-#include "shared/disk_space_estimator.h"
 #include "flags.h"
 #include "frame_on_disk.h"
 #include "player.h"
+#include "shared/aboutdialog.h"
+#include "shared/disk_space_estimator.h"
 #include "shared/post_to_main_thread.h"
 #include "shared/timebase.h"
 #include "ui_mainwindow.h"
@@ -175,17 +175,17 @@ MainWindow::MainWindow()
        });
 
        // TODO: support drag-and-drop.
-       connect(ui->playlist_move_up_btn, &QPushButton::clicked, [this]{ playlist_move(-1); });
-       connect(ui->playlist_move_down_btn, &QPushButton::clicked, [this]{ playlist_move(1); });
+       connect(ui->playlist_move_up_btn, &QPushButton::clicked, [this] { playlist_move(-1); });
+       connect(ui->playlist_move_down_btn, &QPushButton::clicked, [this] { playlist_move(1); });
 
        connect(ui->playlist->selectionModel(), &QItemSelectionModel::selectionChanged,
-               this, &MainWindow::playlist_selection_changed);
+               this, &MainWindow::playlist_selection_changed);
        playlist_selection_changed();  // First time set-up.
 
        preview_player.reset(new Player(ui->preview_display, Player::NO_STREAM_OUTPUT));
        live_player.reset(new Player(ui->live_display, Player::HTTPD_STREAM_OUTPUT));
-       live_player->set_done_callback([this]{
-               post_to_main_thread([this]{
+       live_player->set_done_callback([this] {
+               post_to_main_thread([this] {
                        live_player_clip_done();
                });
        });
@@ -202,7 +202,7 @@ MainWindow::MainWindow()
        ui->undo_action->setEnabled(true);
 
        connect(ui->clip_list->selectionModel(), &QItemSelectionModel::currentChanged,
-               this, &MainWindow::clip_list_selection_changed);
+               this, &MainWindow::clip_list_selection_changed);
 
        // Find out how many cameras we have in the existing frames;
        // if none, we start with two cameras.
@@ -255,7 +255,7 @@ void MainWindow::change_num_cameras()
                QShortcut *shortcut = new QShortcut(QKeySequence(Qt::Key_1 + i), this);
                connect(shortcut, &QShortcut::activated, preview_btn, &QPushButton::click);
 
-               connect(preview_btn, &QPushButton::clicked, [this, i]{ preview_angle_clicked(i); });
+               connect(preview_btn, &QPushButton::clicked, [this, i] { preview_angle_clicked(i); });
        }
 
        cliplist_clips->change_num_cameras(num_cameras);
@@ -336,7 +336,7 @@ void MainWindow::preview_clicked()
                if (selected->hasSelection()) {
                        QModelIndex index = selected->currentIndex();
                        const Clip &clip = *playlist_clips->clip(index.row());
-                       preview_player->play({clip});
+                       preview_player->play({ clip });
                        return;
                }
        }
@@ -346,7 +346,7 @@ void MainWindow::preview_clicked()
 
        QItemSelectionModel *selected = ui->clip_list->selectionModel();
        if (!selected->hasSelection()) {
-               preview_player->play({*cliplist_clips->back()});
+               preview_player->play({ *cliplist_clips->back() });
                return;
        }
 
@@ -357,7 +357,7 @@ void MainWindow::preview_clicked()
        } else {
                clip.stream_idx = ui->preview_display->get_stream_idx();
        }
-       preview_player->play({clip});
+       preview_player->play({ clip });
 }
 
 void MainWindow::preview_angle_clicked(unsigned stream_idx)
@@ -495,7 +495,7 @@ void MainWindow::play_clicked()
                clips.push_back(*playlist_clips->clip(row));
        }
        live_player->play(clips);
-       playlist_clips->set_progress({{ start_row, 0.0f }});
+       playlist_clips->set_progress({ { start_row, 0.0f } });
        playlist_clips->set_currently_playing(start_row, 0.0f);
        playlist_selection_changed();
 
@@ -510,7 +510,7 @@ void MainWindow::stop_clicked()
        size_t last_row = playlist_clips->size() - 1;
        playlist_clips->set_currently_playing(last_row, 0.0f);
        live_player_index_to_row.clear();
-       live_player->play({fake_clip});
+       live_player->play({ fake_clip });
 }
 
 void MainWindow::live_player_clip_done()
@@ -521,7 +521,7 @@ void MainWindow::live_player_clip_done()
                playlist_clips->set_progress({});
                playlist_clips->set_currently_playing(-1, 0.0f);
        } else {
-               playlist_clips->set_progress({{ row + 1, 0.0f }});
+               playlist_clips->set_progress({ { row + 1, 0.0f } });
                playlist_clips->set_currently_playing(row + 1, 0.0f);
        }
        ui->stop_btn->setEnabled(false);
@@ -759,7 +759,8 @@ bool MainWindow::eventFilter(QObject *watched, QEvent *event)
                }
                int column = destination->columnAt(wheel->x());
                int row = destination->rowAt(wheel->y());
-               if (column == -1 || row == -1) return false;
+               if (column == -1 || row == -1)
+                       return false;
 
                // Only adjust pts with the wheel if the given row is selected.
                if (!destination->hasFocus() ||
@@ -770,8 +771,7 @@ bool MainWindow::eventFilter(QObject *watched, QEvent *event)
                currently_deferring_model_changes = true;
                {
                        current_change_id = (watched == ui->clip_list->viewport()) ? "cliplist:" : "playlist:";
-                       ClipProxy clip = (watched == ui->clip_list->viewport()) ?
-                               cliplist_clips->mutable_clip(row) : playlist_clips->mutable_clip(row);
+                       ClipProxy clip = (watched == ui->clip_list->viewport()) ? cliplist_clips->mutable_clip(row) : playlist_clips->mutable_clip(row);
                        if (watched == ui->playlist->viewport()) {
                                stream_idx = clip->stream_idx;
                        }
@@ -841,7 +841,7 @@ void MainWindow::preview_single_frame(int64_t pts, unsigned stream_idx, MainWind
        Clip fake_clip;
        fake_clip.pts_in = pts;
        fake_clip.pts_out = pts + 1;
-       preview_player->play({fake_clip});
+       preview_player->play({ fake_clip });
 }
 
 void MainWindow::playlist_selection_changed()
@@ -863,7 +863,7 @@ void MainWindow::playlist_selection_changed()
                for (size_t row = 0; row < playlist_clips->size(); ++row) {
                        clips.push_back(*playlist_clips->clip(row));
                }
-               double remaining = compute_time_left(clips, {{selected->selectedRows().front().row(), 0.0}});
+               double remaining = compute_time_left(clips, { { selected->selectedRows().front().row(), 0.0 } });
                set_output_status(format_duration(remaining) + " ready");
        }
 }
@@ -1040,7 +1040,8 @@ void MainWindow::quality_toggled(int quality, bool checked)
                msgbox.setText(QString::fromStdString(
                        "The interpolation quality for the main output cannot be changed at runtime, "
                        "except being turned completely off; it will take effect for exported files "
-                       "only until next restart. The live output quality thus remains at " + to_string(flow_initialized_interpolation_quality) + "."));
+                       "only until next restart. The live output quality thus remains at " +
+                       to_string(flow_initialized_interpolation_quality) + "."));
                msgbox.exec();
        }
 
@@ -1075,9 +1076,10 @@ void MainWindow::set_output_status(const string &status)
        queue_status = status;
 }
 
-pair<string, string> MainWindow::get_queue_status() const {
+pair<string, string> MainWindow::get_queue_status() const
+{
        lock_guard<mutex> lock(queue_status_mu);
-       return {queue_status, "text/plain"};
+       return { queue_status, "text/plain" };
 }
 
 void MainWindow::display_frame(unsigned stream_idx, const FrameOnDisk &frame)
@@ -1087,7 +1089,7 @@ void MainWindow::display_frame(unsigned stream_idx, const FrameOnDisk &frame)
                return;
        }
        if (stream_idx >= num_cameras) {
-               post_to_main_thread_and_wait([this, stream_idx]{
+               post_to_main_thread_and_wait([this, stream_idx] {
                        num_cameras = stream_idx + 1;
                        change_num_cameras();
                });
@@ -1095,7 +1097,7 @@ void MainWindow::display_frame(unsigned stream_idx, const FrameOnDisk &frame)
        displays[stream_idx].display->setFrame(stream_idx, frame);
 }
 
-template <class Model>
+template<class Model>
 void MainWindow::replace_model(QTableView *view, Model **model, Model *new_model)
 {
        QItemSelectionModel *old_selection_model = view->selectionModel();
@@ -1117,7 +1119,7 @@ void MainWindow::tally_received()
        unsigned time_to_next_tally_ms;
        if (http_reply->error()) {
                fprintf(stderr, "HTTP get of '%s' failed: %s\n", global_flags.tally_url.c_str(),
-                       http_reply->errorString().toStdString().c_str());
+                       http_reply->errorString().toStdString().c_str());
                ui->live_frame->setStyleSheet("");
                time_to_next_tally_ms = 1000;
        } else {
index 669fc0e9e29e810c2ef27f91d9a9bfa8ccf3050d..afc16ff4c68c41f8cca8ff9a04ba38256f0fe369 100644 (file)
@@ -5,15 +5,15 @@
 #include "db.h"
 #include "state.pb.h"
 
-#include <deque>
-#include <memory>
-#include <mutex>
 #include <QLabel>
 #include <QMainWindow>
 #include <QNetworkAccessManager>
+#include <deque>
+#include <memory>
+#include <mutex>
 #include <stdbool.h>
-#include <sys/types.h>
 #include <string>
+#include <sys/types.h>
 #include <utility>
 
 namespace Ui {
@@ -53,7 +53,8 @@ private:
        int64_t scrub_pts_origin;
 
        // Which element (e.g. pts_in on clip 4) we are scrubbing.
-       enum ScrubType { SCRUBBING_CLIP_LIST, SCRUBBING_PLAYLIST } scrub_type;
+       enum ScrubType { SCRUBBING_CLIP_LIST,
+                        SCRUBBING_PLAYLIST } scrub_type;
        int scrub_row;
        int scrub_column;
 
@@ -115,7 +116,8 @@ private:
        void state_changed(const StateProto &state);  // Called post-filtering.
        void save_settings();
 
-       enum Rounding { FIRST_AT_OR_AFTER, LAST_BEFORE };
+       enum Rounding { FIRST_AT_OR_AFTER,
+                       LAST_BEFORE };
        void preview_single_frame(int64_t pts, unsigned stream_idx, Rounding rounding);
 
        // Also covers when the playlist itself changes.
@@ -139,7 +141,7 @@ private:
 
        void highlight_camera_input(int stream_idx);
 
-       template <class Model>
+       template<class Model>
        void replace_model(QTableView *view, Model **model, Model *new_model);
 
        void start_tally();
index 4f110f9222fe621c7d55874a2e0bdfafdcd9feca..f342ac57bb60bf8acf682bcff86b02c86dc99314 100644 (file)
@@ -1,13 +1,13 @@
 #include "player.h"
 
 #include "clip_list.h"
-#include "shared/context.h"
 #include "defs.h"
-#include "shared/ffmpeg_raii.h"
 #include "flags.h"
 #include "frame_on_disk.h"
-#include "shared/httpd.h"
 #include "jpeg_frame_view.h"
+#include "shared/context.h"
+#include "shared/ffmpeg_raii.h"
+#include "shared/httpd.h"
 #include "shared/metrics.h"
 #include "shared/mux.h"
 #include "shared/timebase.h"
@@ -142,12 +142,11 @@ void Player::play_playlist_once()
                        steady_clock::duration time_behind = steady_clock::now() - next_frame_start;
                        if (stream_output != FILE_STREAM_OUTPUT && time_behind >= milliseconds(200)) {
                                fprintf(stderr, "WARNING: %ld ms behind, dropping a frame (no matter the type).\n",
-                                       lrint(1e3 * duration<double>(time_behind).count()));
+                                       lrint(1e3 * duration<double>(time_behind).count()));
                                ++metric_dropped_unconditional_frame;
                                continue;
                        }
 
-
                        // pts not affected by the swapping below.
                        int64_t in_pts_for_progress = in_pts, in_pts_secondary_for_progress = -1;
 
@@ -180,7 +179,7 @@ void Player::play_playlist_once()
 
                        if (progress_callback != nullptr) {
                                // NOTE: None of this will take into account any snapping done below.
-                               map<size_t, double> progress{{ clip_idx, calc_progress(clip, in_pts_for_progress) }};
+                               map<size_t, double> progress{ { clip_idx, calc_progress(clip, in_pts_for_progress) } };
                                if (next_clip != nullptr && time_left_this_clip <= next_clip_fade_time) {
                                        progress[clip_idx + 1] = calc_progress(*next_clip, in_pts_secondary_for_progress);
                                }
@@ -198,7 +197,7 @@ void Player::play_playlist_once()
                                unique_lock<mutex> lock(queue_state_mu);
                                if (video_stream == nullptr) {
                                        // No queue, just wait until the right time and then show the frame.
-                                       new_clip_changed.wait_until(lock, next_frame_start, [this]{
+                                       new_clip_changed.wait_until(lock, next_frame_start, [this] {
                                                return should_quit || new_clip_ready || override_stream_idx != -1;
                                        });
                                        if (should_quit) {
@@ -211,7 +210,7 @@ void Player::play_playlist_once()
                                        //
                                        // In this case, we don't sleep until next_frame_start; the displaying is
                                        // done by the queue.
-                                       new_clip_changed.wait(lock, [this]{
+                                       new_clip_changed.wait(lock, [this] {
                                                if (num_queued_frames < max_queued_frames) {
                                                        return true;
                                                }
@@ -253,7 +252,7 @@ void Player::play_playlist_once()
                        for (FrameOnDisk snap_frame : { frame_lower, frame_upper }) {
                                if (fabs(snap_frame.pts - in_pts) < pts_snap_tolerance) {
                                        display_single_frame(primary_stream_idx, snap_frame, secondary_stream_idx,
-                                                            secondary_frame, fade_alpha, next_frame_start, /*snapped=*/true);
+                                                            secondary_frame, fade_alpha, next_frame_start, /*snapped=*/true);
                                        in_pts_origin += snap_frame.pts - in_pts;
                                        snapped = true;
                                        break;
@@ -279,7 +278,7 @@ void Player::play_playlist_once()
                        // decorrelated with no common factor, of course (e.g. 12.345 â†’ 34.567, which we should
                        // really never see in practice).
                        for (double fraction : { 1.0 / 2.0, 1.0 / 3.0, 2.0 / 3.0, 1.0 / 4.0, 3.0 / 4.0,
-                                                1.0 / 5.0, 2.0 / 5.0, 3.0 / 5.0, 4.0 / 5.0 }) {
+                                                1.0 / 5.0, 2.0 / 5.0, 3.0 / 5.0, 4.0 / 5.0 }) {
                                double subsnap_pts = frame_lower.pts + fraction * (frame_upper.pts - frame_lower.pts);
                                if (fabs(subsnap_pts - in_pts) < pts_snap_tolerance) {
                                        in_pts_origin += lrint(subsnap_pts) - in_pts;
@@ -290,7 +289,7 @@ void Player::play_playlist_once()
 
                        if (stream_output != FILE_STREAM_OUTPUT && time_behind >= milliseconds(100)) {
                                fprintf(stderr, "WARNING: %ld ms behind, dropping an interpolated frame.\n",
-                                       lrint(1e3 * duration<double>(time_behind).count()));
+                                       lrint(1e3 * duration<double>(time_behind).count()));
                                ++metric_dropped_interpolated_frame;
                                continue;
                        }
@@ -335,7 +334,7 @@ void Player::play_playlist_once()
 
 void Player::display_single_frame(int primary_stream_idx, const FrameOnDisk &primary_frame, int secondary_stream_idx, const FrameOnDisk &secondary_frame, double fade_alpha, steady_clock::time_point frame_start, bool snapped)
 {
-       auto display_func = [this, primary_stream_idx, primary_frame, secondary_frame, fade_alpha]{
+       auto display_func = [this, primary_stream_idx, primary_frame, secondary_frame, fade_alpha] {
                if (destination != nullptr) {
                        destination->setFrame(primary_stream_idx, primary_frame, secondary_frame, fade_alpha);
                }
@@ -362,8 +361,8 @@ void Player::display_single_frame(int primary_stream_idx, const FrameOnDisk &pri
                                ++metric_faded_frame;
                        }
                        video_stream->schedule_faded_frame(frame_start, pts, display_func,
-                               QueueSpotHolder(this), primary_frame,
-                               secondary_frame, fade_alpha);
+                                                          QueueSpotHolder(this), primary_frame,
+                                                          secondary_frame, fade_alpha);
                }
        }
        last_pts_played = primary_frame.pts;
@@ -398,15 +397,15 @@ Player::Player(JPEGFrameView *destination, Player::StreamOutput stream_output, A
        player_thread = thread(&Player::thread_func, this, file_avctx);
 
        if (stream_output == HTTPD_STREAM_OUTPUT) {
-               global_metrics.add("http_output_frames", {{ "type", "original" }, { "reason", "edge_frame_or_no_interpolation" }}, &metric_original_frame);
-               global_metrics.add("http_output_frames", {{ "type", "faded" }, { "reason", "edge_frame_or_no_interpolation" }}, &metric_faded_frame);
-               global_metrics.add("http_output_frames", {{ "type", "original" }, { "reason", "snapped" }}, &metric_original_snapped_frame);
-               global_metrics.add("http_output_frames", {{ "type", "faded" }, { "reason", "snapped" }}, &metric_faded_snapped_frame);
-               global_metrics.add("http_output_frames", {{ "type", "interpolated" }}, &metric_interpolated_frame);
-               global_metrics.add("http_output_frames", {{ "type", "interpolated_faded" }}, &metric_interpolated_faded_frame);
-               global_metrics.add("http_output_frames", {{ "type", "refresh" }}, &metric_refresh_frame);
-               global_metrics.add("http_dropped_frames", {{ "type", "interpolated" }}, &metric_dropped_interpolated_frame);
-               global_metrics.add("http_dropped_frames", {{ "type", "unconditional" }}, &metric_dropped_unconditional_frame);
+               global_metrics.add("http_output_frames", { { "type", "original" }, { "reason", "edge_frame_or_no_interpolation" } }, &metric_original_frame);
+               global_metrics.add("http_output_frames", { { "type", "faded" }, { "reason", "edge_frame_or_no_interpolation" } }, &metric_faded_frame);
+               global_metrics.add("http_output_frames", { { "type", "original" }, { "reason", "snapped" } }, &metric_original_snapped_frame);
+               global_metrics.add("http_output_frames", { { "type", "faded" }, { "reason", "snapped" } }, &metric_faded_snapped_frame);
+               global_metrics.add("http_output_frames", { { "type", "interpolated" } }, &metric_interpolated_frame);
+               global_metrics.add("http_output_frames", { { "type", "interpolated_faded" } }, &metric_interpolated_faded_frame);
+               global_metrics.add("http_output_frames", { { "type", "refresh" } }, &metric_refresh_frame);
+               global_metrics.add("http_dropped_frames", { { "type", "interpolated" } }, &metric_dropped_interpolated_frame);
+               global_metrics.add("http_dropped_frames", { { "type", "unconditional" } }, &metric_dropped_unconditional_frame);
        }
 }
 
index 27676e3f9480e750e97427e86ce748e38fb44197..549b1f388bc8d5717db64a3a0858cec1c96c648a 100644 (file)
@@ -63,7 +63,7 @@ private:
        bool find_surrounding_frames(int64_t pts, int stream_idx, FrameOnDisk *frame_lower, FrameOnDisk *frame_upper);
 
        std::thread player_thread;
-       std::atomic<bool> should_quit{false};
+       std::atomic<bool> should_quit{ false };
 
        JPEGFrameView *destination;
        done_callback_func done_callback;
@@ -71,7 +71,7 @@ private:
 
        std::mutex queue_state_mu;
        std::condition_variable new_clip_changed;
-       std::vector<Clip> queued_clip_list;   // Under queue_state_mu.
+       std::vector<Clip> queued_clip_list;  // Under queue_state_mu.
        bool new_clip_ready = false;  // Under queue_state_mu.
        bool playing = false;  // Under queue_state_mu.
        int override_stream_idx = -1;  // Under queue_state_mu.
@@ -79,15 +79,15 @@ private:
 
        std::unique_ptr<VideoStream> video_stream;  // Can be nullptr.
 
-       std::atomic<int64_t> metric_dropped_interpolated_frame{0};
-       std::atomic<int64_t> metric_dropped_unconditional_frame{0};
-       std::atomic<int64_t> metric_faded_frame{0};
-       std::atomic<int64_t> metric_faded_snapped_frame{0};
-       std::atomic<int64_t> metric_original_frame{0};
-       std::atomic<int64_t> metric_original_snapped_frame{0};
-       std::atomic<int64_t> metric_refresh_frame{0};
-       std::atomic<int64_t> metric_interpolated_frame{0};
-       std::atomic<int64_t> metric_interpolated_faded_frame{0};
+       std::atomic<int64_t> metric_dropped_interpolated_frame{ 0 };
+       std::atomic<int64_t> metric_dropped_unconditional_frame{ 0 };
+       std::atomic<int64_t> metric_faded_frame{ 0 };
+       std::atomic<int64_t> metric_faded_snapped_frame{ 0 };
+       std::atomic<int64_t> metric_original_frame{ 0 };
+       std::atomic<int64_t> metric_original_snapped_frame{ 0 };
+       std::atomic<int64_t> metric_refresh_frame{ 0 };
+       std::atomic<int64_t> metric_interpolated_frame{ 0 };
+       std::atomic<int64_t> metric_interpolated_faded_frame{ 0 };
 
        // under queue_state_mu. Part of this instead of VideoStream so that we own
        // its lock and can sleep on it.
index b9dee062ba8231c27d4b7f5a08204a998119c29c..8d47d4cbdf1eaa775e101a0c283ec5da1b5299fd 100644 (file)
@@ -13,23 +13,30 @@ public:
 
 class QueueSpotHolder {
 public:
-       QueueSpotHolder() : queue(nullptr) {}
+       QueueSpotHolder()
+               : queue(nullptr) {}
 
-       explicit QueueSpotHolder(QueueInterface *queue) : queue(queue) {
+       explicit QueueSpotHolder(QueueInterface *queue)
+               : queue(queue)
+       {
                queue->take_queue_spot();
        }
 
-       QueueSpotHolder(QueueSpotHolder &&other) : queue(other.queue) {
+       QueueSpotHolder(QueueSpotHolder &&other)
+               : queue(other.queue)
+       {
                other.queue = nullptr;
        }
 
-       QueueSpotHolder &operator=(QueueSpotHolder &&other) {
+       QueueSpotHolder &operator=(QueueSpotHolder &&other)
+       {
                queue = other.queue;
                other.queue = nullptr;
                return *this;
        }
 
-       ~QueueSpotHolder() {
+       ~QueueSpotHolder()
+       {
                if (queue != nullptr) {
                        queue->release_queue_spot();
                }
@@ -40,7 +47,7 @@ public:
        QueueSpotHolder &operator=(QueueSpotHolder &) = delete;
 
 private:
-       QueueInterface *queue;  
+       QueueInterface *queue;
 };
 
-#endif // !defined(_QUEUE_SPOT_HOLDER)
+#endif  // !defined(_QUEUE_SPOT_HOLDER)
index cec70da4e6ce20d5df76fbb791e97fac442c03a6..1d0f771cddb2d2f27d414b7393320df44799b51b 100644 (file)
@@ -38,17 +38,17 @@ struct VAResources {
 static list<VAResources> va_resources_freelist;
 static mutex va_resources_mutex;
 
-#define CHECK_VASTATUS(va_status, func)                                 \
-    if (va_status != VA_STATUS_SUCCESS) {                               \
-        fprintf(stderr, "%s:%d (%s) failed with %d\n", __func__, __LINE__, func, va_status); \
-        exit(1);                                                        \
-    }
-
-#define CHECK_VASTATUS_RET(va_status, func)                             \
-    if (va_status != VA_STATUS_SUCCESS) {                               \
-        fprintf(stderr, "%s:%d (%s) failed with %d\n", __func__, __LINE__, func, va_status); \
-        return nullptr;                                                 \
-    }
+#define CHECK_VASTATUS(va_status, func) \
+       if (va_status != VA_STATUS_SUCCESS) { \
+               fprintf(stderr, "%s:%d (%s) failed with %d\n", __func__, __LINE__, func, va_status); \
+               exit(1); \
+       }
+
+#define CHECK_VASTATUS_RET(va_status, func) \
+       if (va_status != VA_STATUS_SUCCESS) { \
+               fprintf(stderr, "%s:%d (%s) failed with %d\n", __func__, __LINE__, func, va_status); \
+               return nullptr; \
+       }
 
 // From libjpeg (although it's of course identical between implementations).
 static const int jpeg_natural_order[DCTSIZE2] = {
@@ -81,8 +81,8 @@ VAResources get_va_resources(unsigned width, unsigned height)
        ret.height = height;
 
        VAStatus va_status = vaCreateSurfaces(va_dpy->va_dpy, VA_RT_FORMAT_YUV422,
-               width, height,
-               &ret.surface, 1, nullptr, 0);
+                                             width, height,
+                                             &ret.surface, 1, nullptr, 0);
        CHECK_VASTATUS(va_status, "vaCreateSurfaces");
 
        va_status = vaCreateContext(va_dpy->va_dpy, config_id, width, height, 0, &ret.surface, 1, &ret.context);
@@ -246,14 +246,14 @@ string get_usable_va_display()
        glob_t g;
        int err = glob("/dev/dri/renderD*", 0, nullptr, &g);
        if (err != 0) {
-               fprintf(stderr, "Couldn't list render nodes (%s) when trying to autodetect a replacement.\n", strerror(errno));
+               fprintf(stderr, "Couldn't list render nodes (%s) when trying to autodetect a replacement.\n", strerror(errno));
        } else {
                for (size_t i = 0; i < g.gl_pathc; ++i) {
                        string path = g.gl_pathv[i];
                        va_dpy = try_open_va(path, nullptr);
                        if (va_dpy != nullptr) {
                                fprintf(stderr, "Autodetected %s as a suitable replacement; using it.\n",
-                                       path.c_str());
+                                       path.c_str());
                                globfree(&g);
                                if (need_env_reset) {
                                        unsetenv("LIBVA_MESSAGING_LEVEL");
@@ -285,7 +285,7 @@ void init_jpeg_vaapi()
        VAConfigAttrib attr = { VAConfigAttribRTFormat, VA_RT_FORMAT_YUV422 };
 
        VAStatus va_status = vaCreateConfig(va_dpy->va_dpy, VAProfileJPEGBaseline, VAEntrypointVLD,
-               &attr, 1, &config_id);
+                                           &attr, 1, &config_id);
        CHECK_VASTATUS(va_status, "vaCreateConfig");
 
        int num_formats = vaMaxNumImageFormats(va_dpy->va_dpy);
@@ -317,7 +317,8 @@ public:
        VABufferDestroyer(VADisplay dpy, VABufferID buf)
                : dpy(dpy), buf(buf) {}
 
-       ~VABufferDestroyer() {
+       ~VABufferDestroyer()
+       {
                VAStatus va_status = vaDestroyBuffer(dpy, buf);
                CHECK_VASTATUS(va_status, "vaDestroyBuffer");
        }
@@ -343,10 +344,10 @@ shared_ptr<Frame> decode_jpeg_vaapi(const string &jpeg)
 
        if (dinfo.num_components != 3) {
                fprintf(stderr, "Not a color JPEG. (%d components, Y=%dx%d, Cb=%dx%d, Cr=%dx%d)\n",
-                       dinfo.num_components,
-                       dinfo.comp_info[0].h_samp_factor, dinfo.comp_info[0].v_samp_factor,
-                       dinfo.comp_info[1].h_samp_factor, dinfo.comp_info[1].v_samp_factor,
-                       dinfo.comp_info[2].h_samp_factor, dinfo.comp_info[2].v_samp_factor);
+                       dinfo.num_components,
+                       dinfo.comp_info[0].h_samp_factor, dinfo.comp_info[0].v_samp_factor,
+                       dinfo.comp_info[1].h_samp_factor, dinfo.comp_info[1].v_samp_factor,
+                       dinfo.comp_info[2].h_samp_factor, dinfo.comp_info[2].v_samp_factor);
                return nullptr;
        }
        if (dinfo.comp_info[0].h_samp_factor != 2 ||
@@ -355,9 +356,9 @@ shared_ptr<Frame> decode_jpeg_vaapi(const string &jpeg)
            dinfo.comp_info[2].h_samp_factor != 1 ||
            dinfo.comp_info[2].v_samp_factor != dinfo.comp_info[0].v_samp_factor) {
                fprintf(stderr, "Not 4:2:2. (Y=%dx%d, Cb=%dx%d, Cr=%dx%d)\n",
-                       dinfo.comp_info[0].h_samp_factor, dinfo.comp_info[0].v_samp_factor,
-                       dinfo.comp_info[1].h_samp_factor, dinfo.comp_info[1].v_samp_factor,
-                       dinfo.comp_info[2].h_samp_factor, dinfo.comp_info[2].v_samp_factor);
+                       dinfo.comp_info[0].h_samp_factor, dinfo.comp_info[0].v_samp_factor,
+                       dinfo.comp_info[1].h_samp_factor, dinfo.comp_info[1].v_samp_factor,
+                       dinfo.comp_info[2].h_samp_factor, dinfo.comp_info[2].v_samp_factor);
                return nullptr;
        }
 
index 7c2f8f90464c3af487c119bf8962830ae299b831..66323b88749afc17a7142c51cc7b2e017ee61758 100644 (file)
@@ -6,14 +6,14 @@ extern "C" {
 }
 
 #include "chroma_subsampler.h"
-#include "shared/context.h"
 #include "flags.h"
 #include "flow.h"
-#include "shared/httpd.h"
 #include "jpeg_frame_view.h"
 #include "movit/util.h"
-#include "shared/mux.h"
 #include "player.h"
+#include "shared/context.h"
+#include "shared/httpd.h"
+#include "shared/mux.h"
 #include "util.h"
 #include "ycbcr_converter.h"
 
@@ -264,7 +264,7 @@ void VideoStream::start()
 
        size_t width = global_flags.width, height = global_flags.height;  // Doesn't matter for MJPEG.
        mux.reset(new Mux(avctx, width, height, Mux::CODEC_MJPEG, /*video_extradata=*/"", /*audio_codec_parameters=*/nullptr,
-               AVCOL_SPC_BT709, COARSE_TIMEBASE, /*write_callback=*/nullptr, Mux::WRITE_FOREGROUND, {}));
+                         AVCOL_SPC_BT709, COARSE_TIMEBASE, /*write_callback=*/nullptr, Mux::WRITE_FOREGROUND, {}));
 
        encode_thread = thread(&VideoStream::encode_thread_func, this);
 }
@@ -585,7 +585,7 @@ void VideoStream::encode_thread_func()
                        unique_lock<mutex> lock(queue_lock);
 
                        // Wait until we have a frame to play.
-                       queue_changed.wait(lock, [this]{
+                       queue_changed.wait(lock, [this] {
                                return !frame_queue.empty() || should_quit;
                        });
                        if (should_quit) {
@@ -599,7 +599,7 @@ void VideoStream::encode_thread_func()
                        if (output_fast_forward) {
                                aborted = frame_queue.empty() || frame_queue.front().local_pts != frame_start;
                        } else {
-                               aborted = queue_changed.wait_until(lock, frame_start, [this, frame_start]{
+                               aborted = queue_changed.wait_until(lock, frame_start, [this, frame_start] {
                                        return frame_queue.empty() || frame_queue.front().local_pts != frame_start;
                                });
                        }
index 05bd7a7b9453c46228ce66eb08362223ddeb40c3..906cd77af9224056e6524c3a01a7c91ab0e061bf 100644 (file)
@@ -11,8 +11,8 @@ extern "C" {
 
 #include "frame_on_disk.h"
 #include "jpeg_frame_view.h"
-#include "shared/ref_counted_gl_sync.h"
 #include "queue_spot_holder.h"
+#include "shared/ref_counted_gl_sync.h"
 
 #include <atomic>
 #include <chrono>
@@ -54,11 +54,11 @@ public:
                                  FrameOnDisk frame1, FrameOnDisk frame2,
                                  float fade_alpha);
        void schedule_interpolated_frame(std::chrono::steady_clock::time_point, int64_t output_pts,
-                                 std::function<void(std::shared_ptr<Frame>)> &&display_func,
-                                 QueueSpotHolder &&queue_spot_holder,
-                                 FrameOnDisk frame1, FrameOnDisk frame2,
-                                 float alpha, FrameOnDisk secondary_frame = {},  // Empty = no secondary (fade) frame.
-                                 float fade_alpha = 0.0f);
+                                        std::function<void(std::shared_ptr<Frame>)> &&display_func,
+                                        QueueSpotHolder &&queue_spot_holder,
+                                        FrameOnDisk frame1, FrameOnDisk frame2,
+                                        float alpha, FrameOnDisk secondary_frame = {},  // Empty = no secondary (fade) frame.
+                                        float fade_alpha = 0.0f);
        void schedule_refresh_frame(std::chrono::steady_clock::time_point, int64_t output_pts,
                                    std::function<void()> &&display_func,
                                    QueueSpotHolder &&queue_spot_holder);
@@ -68,7 +68,7 @@ private:
 
        void encode_thread_func();
        std::thread encode_thread;
-       std::atomic<bool> should_quit{false};
+       std::atomic<bool> should_quit{ false };
 
        static int write_packet2_thunk(void *opaque, uint8_t *buf, int buf_size, AVIODataMarkerType type, int64_t time);
        int write_packet2(uint8_t *buf, int buf_size, AVIODataMarkerType type, int64_t time);
@@ -96,7 +96,7 @@ private:
        static constexpr size_t num_interpolate_slots = 15;  // Should be larger than Player::max_queued_frames, or we risk mass-dropping frames.
 
        struct IFRReleaser {
-               void operator() (InterpolatedFrameResources *ifr) const
+               void operator()(InterpolatedFrameResources *ifr) const
                {
                        if (ifr != nullptr) {
                                std::lock_guard<std::mutex> lock(ifr->owner->queue_lock);
index 689de5c5afb4df9e55eed8e5014a3dfcf53d6827..ef402a57d3926a974317db8e3b0bb52faafc1f7a 100644 (file)
@@ -84,10 +84,10 @@ YCbCrConverter::YCbCrConverter(YCbCrConverter::OutputMode output_mode, ResourceP
                        fade_chain.chain.reset(new EffectChain(global_flags.width, global_flags.height, resource_pool));
                        fade_chain.input[0] = (movit::YCbCrInput *)fade_chain.chain->add_input(
                                new YCbCrInput(inout_format, ycbcr_format, global_flags.width, global_flags.height,
-                                       first_input_is_semiplanar ? YCBCR_INPUT_SPLIT_Y_AND_CBCR : YCBCR_INPUT_PLANAR));
+                                              first_input_is_semiplanar ? YCBCR_INPUT_SPLIT_Y_AND_CBCR : YCBCR_INPUT_PLANAR));
                        fade_chain.input[1] = (movit::YCbCrInput *)fade_chain.chain->add_input(
                                new YCbCrInput(inout_format, ycbcr_format, global_flags.width, global_flags.height,
-                                       second_input_is_semiplanar ? YCBCR_INPUT_SPLIT_Y_AND_CBCR : YCBCR_INPUT_PLANAR));
+                                              second_input_is_semiplanar ? YCBCR_INPUT_SPLIT_Y_AND_CBCR : YCBCR_INPUT_PLANAR));
                        fade_chain.mix_effect = (movit::MixEffect *)fade_chain.chain->add_effect(
                                new MixEffect, fade_chain.input[0], fade_chain.input[1]);
                        setup_outputs(output_mode, inout_format, ycbcr_output_format, fade_chain.chain.get());
@@ -105,12 +105,12 @@ YCbCrConverter::YCbCrConverter(YCbCrConverter::OutputMode output_mode, ResourceP
                ycbcr_format.chroma_subsampling_x = 1;
                fade_chain.input[0] = (movit::YCbCrInput *)fade_chain.chain->add_input(
                        new YCbCrInput(inout_format, ycbcr_format, global_flags.width, global_flags.height,
-                               YCBCR_INPUT_INTERLEAVED));
+                                      YCBCR_INPUT_INTERLEAVED));
 
                ycbcr_format.chroma_subsampling_x = 2;
                fade_chain.input[1] = (movit::YCbCrInput *)fade_chain.chain->add_input(
                        new YCbCrInput(inout_format, ycbcr_format, global_flags.width, global_flags.height,
-                               second_input_is_semiplanar ? YCBCR_INPUT_SPLIT_Y_AND_CBCR : YCBCR_INPUT_PLANAR));
+                                      second_input_is_semiplanar ? YCBCR_INPUT_SPLIT_Y_AND_CBCR : YCBCR_INPUT_PLANAR));
 
                fade_chain.mix_effect = (movit::MixEffect *)fade_chain.chain->add_effect(
                        new MixEffect, fade_chain.input[0], fade_chain.input[1]);