cbcr_width != userdata->last_cbcr_width[field] ||
cbcr_height != userdata->last_cbcr_height[field];
const bool recreate_v210_texture =
- global_flags.ten_bit_input &&
+ global_flags.bit_depth > 8 &&
(first || v210_width != userdata->last_v210_width[field] || height != userdata->last_height[field]);
if (recreate_main_texture) {
ycbcr_format.luma_coefficients = YCBCR_REC_601;
}
ycbcr_format.full_range = false;
- ycbcr_format.num_levels = 1 << global_flags.x264_bit_depth;
+ ycbcr_format.num_levels = 1 << global_flags.bit_depth;
ycbcr_format.cb_x_position = 0.0f;
ycbcr_format.cr_x_position = 0.0f;
ycbcr_format.cb_y_position = 0.5f;
// Display chain; shows the live output produced by the main chain (or rather, a copy of it).
display_chain.reset(new EffectChain(global_flags.width, global_flags.height, resource_pool.get()));
check_error();
- GLenum type = global_flags.x264_bit_depth > 8 ? GL_UNSIGNED_SHORT : GL_UNSIGNED_BYTE;
+ GLenum type = global_flags.bit_depth > 8 ? GL_UNSIGNED_SHORT : GL_UNSIGNED_BYTE;
display_input = new YCbCrInput(inout_format, ycbcr_format, global_flags.width, global_flags.height, YCBCR_INPUT_SPLIT_Y_AND_CBCR, type);
display_chain->add_input(display_input);
display_chain->add_output(inout_format, OUTPUT_ALPHA_FORMAT_POSTMULTIPLIED);
chroma_subsampler.reset(new ChromaSubsampler(resource_pool.get()));
- if (global_flags.ten_bit_input) {
+ if (global_flags.bit_depth > 8) {
if (!v210Converter::has_hardware_support()) {
fprintf(stderr, "ERROR: --ten-bit-input requires support for OpenGL compute shaders\n");
fprintf(stderr, " (OpenGL 4.3, or GL_ARB_compute_shader + GL_ARB_shader_image_load_store).\n");
v210_converter->precompile_shader(3840);
v210_converter->precompile_shader(4096);
}
- if (global_flags.ten_bit_output) {
+ if (global_flags.bit_depth > 8) {
if (!v210Converter::has_hardware_support()) {
fprintf(stderr, "ERROR: --ten-bit-output requires support for OpenGL compute shaders\n");
fprintf(stderr, " (OpenGL 4.3, or GL_ARB_compute_shader + GL_ARB_shader_image_load_store).\n");
pixel_format = capture->get_current_pixel_format();
} else if (card_type == CardType::CEF_INPUT) {
pixel_format = PixelFormat_8BitBGRA;
- } else if (global_flags.ten_bit_input) {
+ } else if (global_flags.bit_depth > 8) {
pixel_format = PixelFormat_10BitYCbCr;
} else {
pixel_format = PixelFormat_8BitYCbCr;
if (is_active) {
card->capture->set_frame_callback(bind(&Mixer::bm_frame, this, card_index, _1, _2, _3, _4, _5, _6, _7));
if (card->frame_allocator == nullptr) {
- card->frame_allocator.reset(new PBOFrameAllocator(pixel_format, 8 << 20, global_flags.width, global_flags.height, card_index, mjpeg_encoder.get())); // 8 MB.
+ card->frame_allocator.reset(new PBOFrameAllocator(pixel_format, FRAME_SIZE, global_flags.width, global_flags.height, card_index, mjpeg_encoder.get()));
} else {
// The format could have changed, but we cannot reset the allocator
// and create a new one from scratch, since there may be allocated
// any old ones as they come back. This takes the mutex while
// allocating, but nothing should really be sending frames in there
// right now anyway (start_bm_capture() has not been called yet).
- card->frame_allocator->reconfigure(pixel_format, 8 << 20, global_flags.width, global_flags.height, card_index, mjpeg_encoder.get());
+ card->frame_allocator->reconfigure(pixel_format, FRAME_SIZE, global_flags.width, global_flags.height, card_index, mjpeg_encoder.get());
}
card->capture->set_video_frame_allocator(card->frame_allocator.get());
if (card->surface == nullptr) {
// (Could be nonintegral, but resampling will save us then.)
const int silence_samples = OUTPUT_FREQUENCY * video_format.frame_rate_den / video_format.frame_rate_nom;
- if (dropped_frames > MAX_FPS * 2) {
+ if (dropped_frames > TYPICAL_FPS * 2) {
fprintf(stderr, "%s lost more than two seconds (or time code jumping around; from 0x%04x to 0x%04x), resetting resampler\n",
description_for_card(card_index).c_str(), card->last_timecode, timecode);
audio_mixer->reset_resampler(device);
output_ycbcr_format.chroma_subsampling_y = 1;
output_ycbcr_format.luma_coefficients = ycbcr_output_coefficients;
output_ycbcr_format.full_range = false;
- output_ycbcr_format.num_levels = 1 << global_flags.x264_bit_depth;
+ output_ycbcr_format.num_levels = 1 << global_flags.bit_depth;
chain->change_ycbcr_output_format(output_ycbcr_format);
// Render main chain. If we're using zerocopy Quick Sync encoding
GLuint y_tex, cbcr_full_tex, cbcr_tex;
GLuint y_copy_tex, cbcr_copy_tex = 0;
GLuint y_display_tex, cbcr_display_tex;
- GLenum y_type = (global_flags.x264_bit_depth > 8) ? GL_R16 : GL_R8;
- GLenum cbcr_type = (global_flags.x264_bit_depth > 8) ? GL_RG16 : GL_RG8;
+ GLenum y_type = (global_flags.bit_depth > 8) ? GL_R16 : GL_R8;
+ GLenum cbcr_type = (global_flags.bit_depth > 8) ? GL_RG16 : GL_RG8;
const bool is_zerocopy = video_encoder->is_zerocopy();
if (is_zerocopy) {
cbcr_full_tex = resource_pool->create_2d_texture(cbcr_type, global_flags.width, global_flags.height);
addr.sin6_family = AF_INET6;
addr.sin6_port = htons(global_flags.srt_port);
- int err = srt_bind(sock, (sockaddr *)&addr, sizeof(addr));
+ int zero = 0;
+ int err = srt_setsockopt(sock, /*level=*/0, SRTO_IPV6ONLY, &zero, sizeof(zero));
+ if (err != 0) {
+ fprintf(stderr, "srt_setsockopt(SRTO_IPV6ONLY): %s\n", srt_getlasterror_str());
+ abort();
+ }
+ err = srt_bind(sock, (sockaddr *)&addr, sizeof(addr));
if (err != 0) {
fprintf(stderr, "srt_bind: %s\n", srt_getlasterror_str());
abort();