return av_get_pix_fmt(best_format);
}
-YCbCrFormat decode_ycbcr_format(const AVPixFmtDescriptor *desc, const AVFrame *frame, bool is_mjpeg)
+YCbCrFormat decode_ycbcr_format(const AVPixFmtDescriptor *desc, const AVFrame *frame, bool is_mjpeg, AVColorSpace *last_colorspace, AVChromaLocation *last_chroma_location)
{
YCbCrFormat format;
AVColorSpace colorspace = frame->colorspace;
format.luma_coefficients = (frame->height >= 720 ? YCBCR_REC_709 : YCBCR_REC_601);
break;
default:
- fprintf(stderr, "Unknown Y'CbCr coefficient enum %d from FFmpeg; choosing Rec. 709.\n",
- colorspace);
+ if (colorspace != *last_colorspace) {
+ fprintf(stderr, "Unknown Y'CbCr coefficient enum %d from FFmpeg; choosing Rec. 709.\n",
+ colorspace);
+ }
format.luma_coefficients = YCBCR_REC_709;
break;
}
+ *last_colorspace = colorspace;
format.full_range = is_full_range(desc);
format.num_levels = 1 << desc->comp[0].depth;
format.cb_y_position = 1.0;
break;
default:
- fprintf(stderr, "Unknown chroma location coefficient enum %d from FFmpeg; choosing center.\n",
- frame->chroma_location);
+ if (frame->chroma_location != *last_chroma_location) {
+ fprintf(stderr, "Unknown chroma location coefficient enum %d from FFmpeg; choosing center.\n",
+ frame->chroma_location);
+ }
format.cb_x_position = 0.5;
format.cb_y_position = 0.5;
break;
}
+ *last_chroma_location = frame->chroma_location;
if (is_mjpeg && !format.full_range) {
// Limited-range MJPEG is only detected by FFmpeg whenever a special
dequeue_cleanup_callback();
}
swr_free(&resampler);
+#ifdef HAVE_SRT
+ if (srt_sock != -1) {
+ srt_close(srt_sock);
+ }
+#endif
}
void FFmpegCapture::configure_card()
} else {
last_modified = buf.st_mtim;
}
+ last_colorspace = static_cast<AVColorSpace>(-1);
+ last_chroma_location = static_cast<AVChromaLocation>(-1);
AVFormatContextWithCloser format_ctx;
if (srt_sock == -1) {
}
VideoFormat video_format = construct_video_format(frame.get(), video_timebase);
+ if (video_format.frame_rate_nom == 0 || video_format.frame_rate_den == 0) {
+ // Invalid frame rate; try constructing it from the previous frame length.
+ // (This is especially important if we are the master card, for SRT,
+ // since it affects audio. Not all senders have good timebases
+ // (e.g., Larix rounds first to timebase 1000 and then multiplies by
+ // 90 from there, it seems), but it's much better to have an oscillating
+ // value than just locking at 60.
+ if (last_pts != 0 && frame->pts > last_pts) {
+ int64_t pts_diff = frame->pts - last_pts;
+ video_format.frame_rate_nom = video_timebase.den;
+ video_format.frame_rate_den = video_timebase.num * pts_diff;
+ } else {
+ video_format.frame_rate_nom = 60;
+ video_format.frame_rate_den = 1;
+ }
+ }
UniqueFrame video_frame = make_video_frame(frame.get(), pathname, &error);
if (error) {
return false;
}
video_format.frame_rate_nom = video_timebase.den;
video_format.frame_rate_den = frame->pkt_duration * video_timebase.num;
- if (video_format.frame_rate_nom == 0 || video_format.frame_rate_den == 0) {
- // Invalid frame rate.
- video_format.frame_rate_nom = 60;
- video_format.frame_rate_den = 1;
- }
video_format.has_signal = true;
video_format.is_connected = true;
return video_format;
video_frame->len = (frame_width(frame) * 2) * frame_height(frame);
const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(sws_dst_format);
- current_frame_ycbcr_format = decode_ycbcr_format(desc, frame, is_mjpeg);
+ current_frame_ycbcr_format = decode_ycbcr_format(desc, frame, is_mjpeg, &last_colorspace, &last_chroma_location);
} else {
assert(pixel_format == bmusb::PixelFormat_8BitYCbCrPlanar);
const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(sws_dst_format);
video_frame->len = frame_width(frame) * frame_height(frame) + 2 * chroma_width * chroma_height;
- current_frame_ycbcr_format = decode_ycbcr_format(desc, frame, is_mjpeg);
+ current_frame_ycbcr_format = decode_ycbcr_format(desc, frame, is_mjpeg, &last_colorspace, &last_chroma_location);
}
sws_scale(sws_ctx.get(), frame->data, frame->linesize, 0, frame->height, pic_data, linesizes);