}
#include "chroma_subsampler.h"
+#include "exif_parser.h"
#include "flags.h"
#include "flow.h"
#include "jpeg_frame_view.h"
#include "player.h"
#include "shared/context.h"
#include "shared/httpd.h"
+#include "shared/metrics.h"
#include "shared/shared_defs.h"
#include "shared/mux.h"
#include "util.h"
#include <jpeglib.h>
#include <unistd.h>
+using namespace movit;
using namespace std;
using namespace std::chrono;
+namespace {
+
+once_flag video_metrics_inited;
+Summary metric_jpeg_encode_time_seconds;
+Summary metric_fade_latency_seconds;
+Summary metric_interpolation_latency_seconds;
+Summary metric_fade_fence_wait_time_seconds;
+Summary metric_interpolation_fence_wait_time_seconds;
+
+} // namespace
+
extern HTTPD *global_httpd;
struct VectorDestinationManager {
};
static_assert(std::is_standard_layout<VectorDestinationManager>::value, "");
-string encode_jpeg(const uint8_t *y_data, const uint8_t *cb_data, const uint8_t *cr_data, unsigned width, unsigned height)
+string encode_jpeg(const uint8_t *y_data, const uint8_t *cb_data, const uint8_t *cr_data, unsigned width, unsigned height, const string exif_data)
{
+ steady_clock::time_point start = steady_clock::now();
VectorDestinationManager dest;
jpeg_compress_struct cinfo;
// (and nothing else).
jpeg_write_marker(&cinfo, JPEG_COM, (const JOCTET *)"CS=ITU601", strlen("CS=ITU601"));
+ if (!exif_data.empty()) {
+ jpeg_write_marker(&cinfo, JPEG_APP0 + 1, (const JOCTET *)exif_data.data(), exif_data.size());
+ }
+
JSAMPROW yptr[8], cbptr[8], crptr[8];
JSAMPARRAY data[3] = { yptr, cbptr, crptr };
for (unsigned y = 0; y < height; y += 8) {
jpeg_finish_compress(&cinfo);
jpeg_destroy_compress(&cinfo);
+ steady_clock::time_point stop = steady_clock::now();
+ metric_jpeg_encode_time_seconds.count_event(duration<double>(stop - start).count());
+
return move(dest.dest);
}
VideoStream::VideoStream(AVFormatContext *file_avctx)
: avctx(file_avctx), output_fast_forward(file_avctx != nullptr)
{
+ call_once(video_metrics_inited, [] {
+ vector<double> quantiles{ 0.01, 0.1, 0.25, 0.5, 0.75, 0.9, 0.99 };
+ metric_jpeg_encode_time_seconds.init(quantiles, 60.0);
+ global_metrics.add("jpeg_encode_time_seconds", &metric_jpeg_encode_time_seconds);
+ metric_fade_fence_wait_time_seconds.init(quantiles, 60.0);
+ global_metrics.add("fade_fence_wait_time_seconds", &metric_fade_fence_wait_time_seconds);
+ metric_interpolation_fence_wait_time_seconds.init(quantiles, 60.0);
+ global_metrics.add("interpolation_fence_wait_time_seconds", &metric_interpolation_fence_wait_time_seconds);
+ metric_fade_latency_seconds.init(quantiles, 60.0);
+ global_metrics.add("fade_latency_seconds", &metric_fade_latency_seconds);
+ metric_interpolation_latency_seconds.init(quantiles, 60.0);
+ global_metrics.add("interpolation_latency_seconds", &metric_interpolation_latency_seconds);
+ });
+
ycbcr_converter.reset(new YCbCrConverter(YCbCrConverter::OUTPUT_TO_DUAL_YCBCR, /*resource_pool=*/nullptr));
ycbcr_semiplanar_converter.reset(new YCbCrConverter(YCbCrConverter::OUTPUT_TO_SEMIPLANAR, /*resource_pool=*/nullptr));
unique_ptr<uint8_t[]> cb_or_cr(new uint8_t[(global_flags.width / 2) * global_flags.height]);
memset(y.get(), 16, global_flags.width * global_flags.height);
memset(cb_or_cr.get(), 128, (global_flags.width / 2) * global_flags.height);
- last_frame = encode_jpeg(y.get(), cb_or_cr.get(), cb_or_cr.get(), global_flags.width, global_flags.height);
+ last_frame = encode_jpeg(y.get(), cb_or_cr.get(), cb_or_cr.get(), global_flags.width, global_flags.height, /*exif_data=*/"");
if (file_avctx != nullptr) {
with_subtitles = Mux::WITHOUT_SUBTITLES;
// Set a fence we can wait for to make sure the CPU sees the read.
glMemoryBarrier(GL_CLIENT_MAPPED_BUFFER_BARRIER_BIT);
check_error();
+ qf.fence_created = steady_clock::now();
qf.fence = RefCountedGLsync(GL_SYNC_GPU_COMMANDS_COMPLETE, /*flags=*/0);
check_error();
qf.resources = move(resources);
bool did_decode;
shared_ptr<Frame> frame = decode_jpeg_with_cache(frame_spec, DECODE_IF_NOT_IN_CACHE, &frame_reader, &did_decode);
ycbcr_converter->prepare_chain_for_conversion(frame)->render_to_fbo(resources->input_fbos[frame_no], global_flags.width, global_flags.height);
+ if (frame_no == 1) {
+ qf.exif_data = frame->exif_data; // Use the white point from the last frame.
+ }
}
glGenerateTextureMipmap(resources->input_tex);
shared_ptr<Frame> frame2 = decode_jpeg_with_cache(secondary_frame, DECODE_IF_NOT_IN_CACHE, &frame_reader, &did_decode);
// Then fade against it, putting it into the fade Y' and CbCr textures.
- ycbcr_semiplanar_converter->prepare_chain_for_fade_from_texture(qf.output_tex, global_flags.width, global_flags.height, frame2, fade_alpha)->render_to_fbo(resources->fade_fbo, global_flags.width, global_flags.height);
+ RGBTriplet neutral_color = get_neutral_color(qf.exif_data);
+ ycbcr_semiplanar_converter->prepare_chain_for_fade_from_texture(qf.output_tex, neutral_color, global_flags.width, global_flags.height, frame2, fade_alpha)->render_to_fbo(resources->fade_fbo, global_flags.width, global_flags.height);
// Subsample and split Cb/Cr.
chroma_subsampler->subsample_chroma(resources->fade_cbcr_output_tex, global_flags.width, global_flags.height, resources->cb_tex, resources->cr_tex);
interpolate_no_split->release_texture(qf.output_tex);
+
+ // We already applied the white balance, so don't have the client redo it.
+ qf.exif_data.clear();
} else {
tie(qf.output_tex, qf.cbcr_tex) = interpolate->exec(resources->input_tex, resources->gray_tex, flow_tex, global_flags.width, global_flags.height, alpha);
check_error();
// Set a fence we can wait for to make sure the CPU sees the read.
glMemoryBarrier(GL_CLIENT_MAPPED_BUFFER_BARRIER_BIT);
check_error();
+ qf.fence_created = steady_clock::now();
qf.fence = RefCountedGLsync(GL_SYNC_GPU_COMMANDS_COMPLETE, /*flags=*/0);
check_error();
qf.resources = move(resources);
add_audio_or_silence(qf);
} else if (qf.type == QueuedFrame::FADED) {
+ steady_clock::time_point start = steady_clock::now();
glClientWaitSync(qf.fence.get(), /*flags=*/0, GL_TIMEOUT_IGNORED);
+ steady_clock::time_point stop = steady_clock::now();
+ metric_fade_fence_wait_time_seconds.count_event(duration<double>(stop - start).count());
+ metric_fade_latency_seconds.count_event(duration<double>(stop - qf.fence_created).count());
shared_ptr<Frame> frame = frame_from_pbo(qf.resources->pbo_contents, global_flags.width, global_flags.height);
+ assert(frame->exif_data.empty());
// Now JPEG encode it, and send it on to the stream.
- string jpeg = encode_jpeg(frame->y.get(), frame->cb.get(), frame->cr.get(), global_flags.width, global_flags.height);
+ string jpeg = encode_jpeg(frame->y.get(), frame->cb.get(), frame->cr.get(), global_flags.width, global_flags.height, /*exif_data=*/"");
AVPacket pkt;
av_init_packet(&pkt);
add_audio_or_silence(qf);
} else if (qf.type == QueuedFrame::INTERPOLATED || qf.type == QueuedFrame::FADED_INTERPOLATED) {
+ steady_clock::time_point start = steady_clock::now();
glClientWaitSync(qf.fence.get(), /*flags=*/0, GL_TIMEOUT_IGNORED);
+ steady_clock::time_point stop = steady_clock::now();
+ metric_interpolation_fence_wait_time_seconds.count_event(duration<double>(stop - start).count());
+ metric_interpolation_latency_seconds.count_event(duration<double>(stop - qf.fence_created).count());
// Send it on to display.
shared_ptr<Frame> frame = frame_from_pbo(qf.resources->pbo_contents, global_flags.width, global_flags.height);
}
// Now JPEG encode it, and send it on to the stream.
- string jpeg = encode_jpeg(frame->y.get(), frame->cb.get(), frame->cr.get(), global_flags.width, global_flags.height);
+ string jpeg = encode_jpeg(frame->y.get(), frame->cb.get(), frame->cr.get(), global_flags.width, global_flags.height, move(qf.exif_data));
if (qf.flow_tex != 0) {
compute_flow->release_texture(qf.flow_tex);
}
type = AVIO_DATA_MARKER_SYNC_POINT;
}
+ HTTPD::StreamID stream_id{ HTTPD::MAIN_STREAM, 0 };
if (type == AVIO_DATA_MARKER_HEADER) {
stream_mux_header.append((char *)buf, buf_size);
- global_httpd->set_header(HTTPD::MAIN_STREAM, stream_mux_header);
+ global_httpd->set_header(stream_id, stream_mux_header);
} else {
- global_httpd->add_data(HTTPD::MAIN_STREAM, (char *)buf, buf_size, type == AVIO_DATA_MARKER_SYNC_POINT, time, AVRational{ AV_TIME_BASE, 1 });
+ global_httpd->add_data(stream_id, (char *)buf, buf_size, type == AVIO_DATA_MARKER_SYNC_POINT, time, AVRational{ AV_TIME_BASE, 1 });
}
return buf_size;
}