X-Git-Url: https://git.sesse.net/?a=blobdiff_plain;f=futatabi%2Fvideo_stream.cpp;h=d7d9fd04bcc97b0a8f3a56dbb467daf70996f49c;hb=e15251d2787cb8e6b677af801de6180e55171763;hp=9a120b51344a70b6b4744519ee07f328c50f7b3b;hpb=32b87c91cf51d730ff5abc8347884219918fad66;p=nageru diff --git a/futatabi/video_stream.cpp b/futatabi/video_stream.cpp index 9a120b5..d7d9fd0 100644 --- a/futatabi/video_stream.cpp +++ b/futatabi/video_stream.cpp @@ -6,6 +6,7 @@ extern "C" { } #include "chroma_subsampler.h" +#include "exif_parser.h" #include "flags.h" #include "flow.h" #include "jpeg_frame_view.h" @@ -22,6 +23,7 @@ extern "C" { #include #include +using namespace movit; using namespace std; using namespace std::chrono; @@ -526,12 +528,16 @@ void VideoStream::schedule_interpolated_frame(steady_clock::time_point local_pts shared_ptr frame2 = decode_jpeg_with_cache(secondary_frame, DECODE_IF_NOT_IN_CACHE, &frame_reader, &did_decode); // Then fade against it, putting it into the fade Y' and CbCr textures. - ycbcr_semiplanar_converter->prepare_chain_for_fade_from_texture(qf.output_tex, global_flags.width, global_flags.height, frame2, fade_alpha)->render_to_fbo(resources->fade_fbo, global_flags.width, global_flags.height); + RGBTriplet neutral_color = get_neutral_color(qf.exif_data); + ycbcr_semiplanar_converter->prepare_chain_for_fade_from_texture(qf.output_tex, neutral_color, global_flags.width, global_flags.height, frame2, fade_alpha)->render_to_fbo(resources->fade_fbo, global_flags.width, global_flags.height); // Subsample and split Cb/Cr. chroma_subsampler->subsample_chroma(resources->fade_cbcr_output_tex, global_flags.width, global_flags.height, resources->cb_tex, resources->cr_tex); interpolate_no_split->release_texture(qf.output_tex); + + // We already applied the white balance, so don't have the client redo it. + qf.exif_data.clear(); } else { tie(qf.output_tex, qf.cbcr_tex) = interpolate->exec(resources->input_tex, resources->gray_tex, flow_tex, global_flags.width, global_flags.height, alpha); check_error(); @@ -710,9 +716,10 @@ void VideoStream::encode_thread_func() glClientWaitSync(qf.fence.get(), /*flags=*/0, GL_TIMEOUT_IGNORED); shared_ptr frame = frame_from_pbo(qf.resources->pbo_contents, global_flags.width, global_flags.height); + assert(frame->exif_data.empty()); // Now JPEG encode it, and send it on to the stream. - string jpeg = encode_jpeg(frame->y.get(), frame->cb.get(), frame->cr.get(), global_flags.width, global_flags.height, move(frame->exif_data)); + string jpeg = encode_jpeg(frame->y.get(), frame->cb.get(), frame->cr.get(), global_flags.width, global_flags.height, /*exif_data=*/""); AVPacket pkt; av_init_packet(&pkt);