X-Git-Url: https://git.sesse.net/?a=blobdiff_plain;f=futatabi%2Fvideo_stream.cpp;h=d7d9fd04bcc97b0a8f3a56dbb467daf70996f49c;hb=f9024d141398e69e7b4011becd3ebbe37eaa1776;hp=3cd56a022401ac8cec245d8ac29f36ebb35c1894;hpb=058330fb422a66d22c9b4e213afeec3ade903a0d;p=nageru diff --git a/futatabi/video_stream.cpp b/futatabi/video_stream.cpp index 3cd56a0..d7d9fd0 100644 --- a/futatabi/video_stream.cpp +++ b/futatabi/video_stream.cpp @@ -6,6 +6,7 @@ extern "C" { } #include "chroma_subsampler.h" +#include "exif_parser.h" #include "flags.h" #include "flow.h" #include "jpeg_frame_view.h" @@ -22,6 +23,7 @@ extern "C" { #include #include +using namespace movit; using namespace std; using namespace std::chrono; @@ -79,7 +81,7 @@ struct VectorDestinationManager { }; static_assert(std::is_standard_layout::value, ""); -string encode_jpeg(const uint8_t *y_data, const uint8_t *cb_data, const uint8_t *cr_data, unsigned width, unsigned height) +string encode_jpeg(const uint8_t *y_data, const uint8_t *cb_data, const uint8_t *cr_data, unsigned width, unsigned height, const string exif_data) { VectorDestinationManager dest; @@ -112,6 +114,10 @@ string encode_jpeg(const uint8_t *y_data, const uint8_t *cb_data, const uint8_t // (and nothing else). jpeg_write_marker(&cinfo, JPEG_COM, (const JOCTET *)"CS=ITU601", strlen("CS=ITU601")); + if (!exif_data.empty()) { + jpeg_write_marker(&cinfo, JPEG_APP0 + 1, (const JOCTET *)exif_data.data(), exif_data.size()); + } + JSAMPROW yptr[8], cbptr[8], crptr[8]; JSAMPARRAY data[3] = { yptr, cbptr, crptr }; for (unsigned y = 0; y < height; y += 8) { @@ -234,7 +240,7 @@ VideoStream::VideoStream(AVFormatContext *file_avctx) unique_ptr cb_or_cr(new uint8_t[(global_flags.width / 2) * global_flags.height]); memset(y.get(), 16, global_flags.width * global_flags.height); memset(cb_or_cr.get(), 128, (global_flags.width / 2) * global_flags.height); - last_frame = encode_jpeg(y.get(), cb_or_cr.get(), cb_or_cr.get(), global_flags.width, global_flags.height); + last_frame = encode_jpeg(y.get(), cb_or_cr.get(), cb_or_cr.get(), global_flags.width, global_flags.height, /*exif_data=*/""); if (file_avctx != nullptr) { with_subtitles = Mux::WITHOUT_SUBTITLES; @@ -481,6 +487,9 @@ void VideoStream::schedule_interpolated_frame(steady_clock::time_point local_pts bool did_decode; shared_ptr frame = decode_jpeg_with_cache(frame_spec, DECODE_IF_NOT_IN_CACHE, &frame_reader, &did_decode); ycbcr_converter->prepare_chain_for_conversion(frame)->render_to_fbo(resources->input_fbos[frame_no], global_flags.width, global_flags.height); + if (frame_no == 1) { + qf.exif_data = frame->exif_data; // Use the white point from the last frame. + } } glGenerateTextureMipmap(resources->input_tex); @@ -519,12 +528,16 @@ void VideoStream::schedule_interpolated_frame(steady_clock::time_point local_pts shared_ptr frame2 = decode_jpeg_with_cache(secondary_frame, DECODE_IF_NOT_IN_CACHE, &frame_reader, &did_decode); // Then fade against it, putting it into the fade Y' and CbCr textures. - ycbcr_semiplanar_converter->prepare_chain_for_fade_from_texture(qf.output_tex, global_flags.width, global_flags.height, frame2, fade_alpha)->render_to_fbo(resources->fade_fbo, global_flags.width, global_flags.height); + RGBTriplet neutral_color = get_neutral_color(qf.exif_data); + ycbcr_semiplanar_converter->prepare_chain_for_fade_from_texture(qf.output_tex, neutral_color, global_flags.width, global_flags.height, frame2, fade_alpha)->render_to_fbo(resources->fade_fbo, global_flags.width, global_flags.height); // Subsample and split Cb/Cr. chroma_subsampler->subsample_chroma(resources->fade_cbcr_output_tex, global_flags.width, global_flags.height, resources->cb_tex, resources->cr_tex); interpolate_no_split->release_texture(qf.output_tex); + + // We already applied the white balance, so don't have the client redo it. + qf.exif_data.clear(); } else { tie(qf.output_tex, qf.cbcr_tex) = interpolate->exec(resources->input_tex, resources->gray_tex, flow_tex, global_flags.width, global_flags.height, alpha); check_error(); @@ -703,9 +716,10 @@ void VideoStream::encode_thread_func() glClientWaitSync(qf.fence.get(), /*flags=*/0, GL_TIMEOUT_IGNORED); shared_ptr frame = frame_from_pbo(qf.resources->pbo_contents, global_flags.width, global_flags.height); + assert(frame->exif_data.empty()); // Now JPEG encode it, and send it on to the stream. - string jpeg = encode_jpeg(frame->y.get(), frame->cb.get(), frame->cr.get(), global_flags.width, global_flags.height); + string jpeg = encode_jpeg(frame->y.get(), frame->cb.get(), frame->cr.get(), global_flags.width, global_flags.height, /*exif_data=*/""); AVPacket pkt; av_init_packet(&pkt); @@ -727,7 +741,7 @@ void VideoStream::encode_thread_func() } // Now JPEG encode it, and send it on to the stream. - string jpeg = encode_jpeg(frame->y.get(), frame->cb.get(), frame->cr.get(), global_flags.width, global_flags.height); + string jpeg = encode_jpeg(frame->y.get(), frame->cb.get(), frame->cr.get(), global_flags.width, global_flags.height, move(qf.exif_data)); if (qf.flow_tex != 0) { compute_flow->release_texture(qf.flow_tex); }