]> git.sesse.net Git - nageru/commitdiff
Make Futatabi copy Exif data for interpolated frames.
authorSteinar H. Gunderson <sgunderson@bigfoot.com>
Tue, 11 Feb 2020 17:15:24 +0000 (18:15 +0100)
committerSteinar H. Gunderson <sgunderson@bigfoot.com>
Tue, 11 Feb 2020 17:16:31 +0000 (18:16 +0100)
This means that interpolated frames also get the right white balance,
so now only faded frames get none.

futatabi/jpeg_frame.h
futatabi/jpeg_frame_view.cpp
futatabi/vaapi_jpeg_decoder.cpp
futatabi/video_stream.cpp
futatabi/video_stream.h

index edc7381663e0f60ab4909b200c7d495c595d1b64..6fd0d4b9d05cfb685495ae8e86287b61efb09f4f 100644 (file)
@@ -2,6 +2,7 @@
 #define _JPEG_FRAME_H 1
 
 #include <memory>
+#include <string>
 
 struct Frame {
        bool is_semiplanar = false;
@@ -11,6 +12,7 @@ struct Frame {
        unsigned width, height;
        unsigned chroma_subsampling_x, chroma_subsampling_y;
        unsigned pitch_y, pitch_chroma;
+       std::string exif_data;
 };
 
 #endif  // !defined(_JPEG_FRAME_H)
index 943b3e15ac5858886472c1866371debc9e6eb733..d471634932da364c3e4a80229948b351511c6913 100644 (file)
@@ -109,6 +109,8 @@ shared_ptr<Frame> decode_jpeg(const string &jpeg)
                return get_black_frame();
        }
 
+       jpeg_save_markers(&dinfo, JPEG_APP0 + 1, 0xFFFF);
+
        if (dinfo.num_components != 3) {
                fprintf(stderr, "Not a color JPEG. (%d components, Y=%dx%d, Cb=%dx%d, Cr=%dx%d)\n",
                        dinfo.num_components,
@@ -159,6 +161,14 @@ shared_ptr<Frame> decode_jpeg(const string &jpeg)
        frame->pitch_y = luma_width_blocks * DCTSIZE;
        frame->pitch_chroma = chroma_width_blocks * DCTSIZE;
 
+       if (dinfo.marker_list != nullptr &&
+           dinfo.marker_list->marker == JPEG_APP0 + 1 &&
+           dinfo.marker_list->data_length >= 4 &&
+           memcmp(dinfo.marker_list->data, "Exif", 4) == 0) {
+               frame->exif_data.assign(reinterpret_cast<char *>(dinfo.marker_list->data),
+                       dinfo.marker_list->data_length);
+       }
+
        if (!error_mgr.run([&dinfo, &frame, v_mcu_size, mcu_height_blocks] {
                    JSAMPROW yptr[v_mcu_size], cbptr[v_mcu_size], crptr[v_mcu_size];
                    JSAMPARRAY data[3] = { yptr, cbptr, crptr };
index d18a8735c11a23853ea6109b340c031dfee2a19c..f34654d508504434e81fbcc816e12b3a8aba95a5 100644 (file)
@@ -337,6 +337,8 @@ shared_ptr<Frame> decode_jpeg_vaapi(const string &jpeg)
        }
        JPEGDestroyer destroy_dinfo(&dinfo);
 
+       jpeg_save_markers(&dinfo, JPEG_APP0 + 1, 0xFFFF);
+
        jpeg_mem_src(&dinfo, reinterpret_cast<const unsigned char *>(jpeg.data()), jpeg.size());
        if (!error_mgr.run([&dinfo] { jpeg_read_header(&dinfo, true); })) {
                return nullptr;
@@ -566,6 +568,14 @@ shared_ptr<Frame> decode_jpeg_vaapi(const string &jpeg)
        frame->pitch_y = dinfo.image_width;
        frame->pitch_chroma = dinfo.image_width / 2;
 
+       if (dinfo.marker_list != nullptr &&
+           dinfo.marker_list->marker == JPEG_APP0 + 1 &&
+           dinfo.marker_list->data_length >= 4 &&
+           memcmp(dinfo.marker_list->data, "Exif", 4) == 0) {
+               frame->exif_data.assign(reinterpret_cast<char *>(dinfo.marker_list->data),
+                       dinfo.marker_list->data_length);
+       }
+
        va_status = vaUnmapBuffer(va_dpy->va_dpy, resources.image.buf);
        CHECK_VASTATUS_RET(va_status, "vaUnmapBuffer");
 
index 3cd56a022401ac8cec245d8ac29f36ebb35c1894..9a120b51344a70b6b4744519ee07f328c50f7b3b 100644 (file)
@@ -79,7 +79,7 @@ struct VectorDestinationManager {
 };
 static_assert(std::is_standard_layout<VectorDestinationManager>::value, "");
 
-string encode_jpeg(const uint8_t *y_data, const uint8_t *cb_data, const uint8_t *cr_data, unsigned width, unsigned height)
+string encode_jpeg(const uint8_t *y_data, const uint8_t *cb_data, const uint8_t *cr_data, unsigned width, unsigned height, const string exif_data)
 {
        VectorDestinationManager dest;
 
@@ -112,6 +112,10 @@ string encode_jpeg(const uint8_t *y_data, const uint8_t *cb_data, const uint8_t
        // (and nothing else).
        jpeg_write_marker(&cinfo, JPEG_COM, (const JOCTET *)"CS=ITU601", strlen("CS=ITU601"));
 
+       if (!exif_data.empty()) {
+               jpeg_write_marker(&cinfo, JPEG_APP0 + 1, (const JOCTET *)exif_data.data(), exif_data.size());
+       }
+
        JSAMPROW yptr[8], cbptr[8], crptr[8];
        JSAMPARRAY data[3] = { yptr, cbptr, crptr };
        for (unsigned y = 0; y < height; y += 8) {
@@ -234,7 +238,7 @@ VideoStream::VideoStream(AVFormatContext *file_avctx)
        unique_ptr<uint8_t[]> cb_or_cr(new uint8_t[(global_flags.width / 2) * global_flags.height]);
        memset(y.get(), 16, global_flags.width * global_flags.height);
        memset(cb_or_cr.get(), 128, (global_flags.width / 2) * global_flags.height);
-       last_frame = encode_jpeg(y.get(), cb_or_cr.get(), cb_or_cr.get(), global_flags.width, global_flags.height);
+       last_frame = encode_jpeg(y.get(), cb_or_cr.get(), cb_or_cr.get(), global_flags.width, global_flags.height, /*exif_data=*/"");
 
        if (file_avctx != nullptr) {
                with_subtitles = Mux::WITHOUT_SUBTITLES;
@@ -481,6 +485,9 @@ void VideoStream::schedule_interpolated_frame(steady_clock::time_point local_pts
                bool did_decode;
                shared_ptr<Frame> frame = decode_jpeg_with_cache(frame_spec, DECODE_IF_NOT_IN_CACHE, &frame_reader, &did_decode);
                ycbcr_converter->prepare_chain_for_conversion(frame)->render_to_fbo(resources->input_fbos[frame_no], global_flags.width, global_flags.height);
+               if (frame_no == 1) {
+                       qf.exif_data = frame->exif_data;  // Use the white point from the last frame.
+               }
        }
 
        glGenerateTextureMipmap(resources->input_tex);
@@ -705,7 +712,7 @@ void VideoStream::encode_thread_func()
                        shared_ptr<Frame> frame = frame_from_pbo(qf.resources->pbo_contents, global_flags.width, global_flags.height);
 
                        // Now JPEG encode it, and send it on to the stream.
-                       string jpeg = encode_jpeg(frame->y.get(), frame->cb.get(), frame->cr.get(), global_flags.width, global_flags.height);
+                       string jpeg = encode_jpeg(frame->y.get(), frame->cb.get(), frame->cr.get(), global_flags.width, global_flags.height, move(frame->exif_data));
 
                        AVPacket pkt;
                        av_init_packet(&pkt);
@@ -727,7 +734,7 @@ void VideoStream::encode_thread_func()
                        }
 
                        // Now JPEG encode it, and send it on to the stream.
-                       string jpeg = encode_jpeg(frame->y.get(), frame->cb.get(), frame->cr.get(), global_flags.width, global_flags.height);
+                       string jpeg = encode_jpeg(frame->y.get(), frame->cb.get(), frame->cr.get(), global_flags.width, global_flags.height, move(qf.exif_data));
                        if (qf.flow_tex != 0) {
                                compute_flow->release_texture(qf.flow_tex);
                        }
index 12e8ed123a532db2ae3726790356dd43ecd816b2..a6215e963e384829c5ca8c2c8753dffb6d28fa57 100644 (file)
@@ -142,6 +142,7 @@ private:
                std::function<void(std::shared_ptr<Frame>)> display_decoded_func;  // Same, except for INTERPOLATED and FADED_INTERPOLATED.
 
                std::string subtitle;  // Blank for none.
+               std::string exif_data;  // Blank for none.
 
                // Audio, in stereo interleaved 32-bit PCM. If empty and not of type SILENCE, one frame's worth of silence samples
                // is synthesized.