return value;
}
-static void set_luma_transfer( struct SwsContext *context, int is_709, int no_scale )
+static void set_luma_transfer( struct SwsContext *context, int yuv_std, int no_scale )
{
int *coefficients;
int range;
// Don't change these from defaults unless explicitly told to.
if ( no_scale )
range = 1;
- if ( is_709 )
+ if ( yuv_std == 709 )
coefficients = sws_getCoefficients( SWS_CS_ITU709 );
+ else if ( yuv_std == 240 )
+ coefficients = sws_getCoefficients( SWS_CS_SMPTE240M );
sws_setColorspaceDetails( context, coefficients, range, coefficients, range,
brightness, contrast, saturation );
}
}
static void av_convert_image( uint8_t *out, uint8_t *in, int out_fmt, int in_fmt,
- int width, int height, int is_709, int no_scale )
+ int width, int height, int yuv_std, int no_scale )
{
AVPicture input;
AVPicture output;
#ifdef SWSCALE
struct SwsContext *context = sws_getContext( width, height, in_fmt,
width, height, out_fmt, flags, NULL, NULL, NULL);
- set_luma_transfer( context, is_709, no_scale );
+ set_luma_transfer( context, yuv_std, no_scale );
sws_scale( context, input.data, input.linesize, 0, height,
output.data, output.linesize);
sws_freeContext( context );
}
// Update the output
- int is_709 = output_format == mlt_image_yuv422 && width * height > 750000;
+ int yuv_std = mlt_properties_get_int( properties, "yuv_std" );
int no_scale_luma = 0;
if ( *format == mlt_image_yuv422 && mlt_properties_get_int( properties, "skip_luma_scale" )
&& ( output_format == mlt_image_rgb24 || output_format == mlt_image_rgb24a ) )
no_scale_luma = 1;
mlt_properties_set( properties, "skip_luma_scale", NULL );
}
- av_convert_image( output, *image, out_fmt, in_fmt, width, height, is_709, no_scale_luma );
+ av_convert_image( output, *image, out_fmt, in_fmt, width, height, yuv_std, no_scale_luma );
*image = output;
*format = output_format;
mlt_properties_set_data( properties, "image", output, size, mlt_pool_release, NULL );
unsigned int invalid_pts_counter;
double resample_factor;
mlt_cache image_cache;
+ int yuv_std;
#ifdef VDPAU
struct
{
mlt_properties_set( meta_media, key, avcodec_get_pix_fmt_name( codec_context->pix_fmt ) );
snprintf( key, sizeof(key), "meta.media.%d.codec.sample_aspect_ratio", i );
mlt_properties_set_double( meta_media, key, av_q2d( codec_context->sample_aspect_ratio ) );
+ snprintf( key, sizeof(key), "meta.media.%d.codec.colorspace", i );
+ mlt_properties_set_int( meta_media, key, codec_context->colorspace );
break;
case CODEC_TYPE_AUDIO:
if ( *audio_index < 0 )
this->resample_factor = 1.0;
}
-#ifdef SWSCALE
-static void set_luma_transfer( struct SwsContext *context, int is_709, int no_scale )
+static void set_luma_transfer( struct SwsContext *context, int yuv_std, int no_scale )
{
int *coefficients;
int range;
// Don't change these from defaults unless explicitly told to.
if ( no_scale )
range = 1;
- if ( is_709 )
+ if ( yuv_std == 709 )
coefficients = sws_getCoefficients( SWS_CS_ITU709 );
+ else if ( yuv_std == 240 )
+ coefficients = sws_getCoefficients( SWS_CS_SMPTE240M );
sws_setColorspaceDetails( context, coefficients, range, coefficients, range,
brightness, contrast, saturation );
}
}
-#endif
-static inline void convert_image( AVFrame *frame, uint8_t *buffer, int pix_fmt, mlt_image_format *format, int width, int height )
+static inline void convert_image( AVFrame *frame, uint8_t *buffer, int pix_fmt,
+ mlt_image_format *format, int width, int height, int yuv_std )
{
#ifdef SWSCALE
- int is_709 = *format == mlt_image_yuv422 && width * height > 750000;
int luma = 0;
int flags = SWS_BILINEAR | SWS_ACCURATE_RND;
width, height, PIX_FMT_RGBA, flags, NULL, NULL, NULL);
AVPicture output;
avpicture_fill( &output, buffer, PIX_FMT_RGBA, width, height );
- set_luma_transfer( context, is_709, luma );
+ set_luma_transfer( context, yuv_std, luma );
sws_scale( context, frame->data, frame->linesize, 0, height,
output.data, output.linesize);
sws_freeContext( context );
output.linesize[0] = width;
output.linesize[1] = width >> 1;
output.linesize[2] = width >> 1;
- set_luma_transfer( context, is_709, luma );
+ set_luma_transfer( context, yuv_std, luma );
sws_scale( context, frame->data, frame->linesize, 0, height,
output.data, output.linesize);
sws_freeContext( context );
AVPicture output;
avpicture_fill( &output, buffer, PIX_FMT_RGB24, width, height );
luma = 1;
- set_luma_transfer( context, is_709, luma );
+ set_luma_transfer( context, yuv_std, luma );
sws_scale( context, frame->data, frame->linesize, 0, height,
output.data, output.linesize);
sws_freeContext( context );
AVPicture output;
avpicture_fill( &output, buffer, PIX_FMT_RGBA, width, height );
luma = 1;
- set_luma_transfer( context, is_709, luma );
+ set_luma_transfer( context, yuv_std, luma );
sws_scale( context, frame->data, frame->linesize, 0, height,
output.data, output.linesize);
sws_freeContext( context );
width, height, PIX_FMT_YUYV422, flags | SWS_FULL_CHR_H_INP, NULL, NULL, NULL);
AVPicture output;
avpicture_fill( &output, buffer, PIX_FMT_YUYV422, width, height );
- is_709 = width * height > 750000;
- set_luma_transfer( context, is_709, luma );
+ set_luma_transfer( context, yuv_std, luma );
sws_scale( context, frame->data, frame->linesize, 0, height,
output.data, output.linesize);
sws_freeContext( context );
picture.linesize[0] = codec_context->width;
picture.linesize[1] = codec_context->width / 2;
picture.linesize[2] = codec_context->width / 2;
- convert_image( (AVFrame*) &picture, *buffer, PIX_FMT_YUV420P, format, *width, *height );
+ convert_image( (AVFrame*) &picture, *buffer,
+ PIX_FMT_YUV420P, format, *width, *height, this->yuv_std );
}
else
#endif
- convert_image( this->av_frame, *buffer, codec_context->pix_fmt, format, *width, *height );
+ convert_image( this->av_frame, *buffer, codec_context->pix_fmt,
+ format, *width, *height, this->yuv_std );
}
else
mlt_frame_get_image( frame, buffer, format, width, height, writable );
VdpStatus status = vdp_surface_get_bits( render->surface, dest_format, planes, pitches );
if ( status == VDP_STATUS_OK )
{
- convert_image( (AVFrame*) &picture, *buffer, PIX_FMT_YUV420P, format, *width, *height );
+ convert_image( (AVFrame*) &picture, *buffer, PIX_FMT_YUV420P,
+ format, *width, *height, this->yuv_std );
}
else
{
}
else
#endif
- convert_image( this->av_frame, *buffer, codec_context->pix_fmt, format, *width, *height );
+ convert_image( this->av_frame, *buffer, codec_context->pix_fmt,
+ format, *width, *height, this->yuv_std );
this->top_field_first |= this->av_frame->top_field_first;
this->current_position = int_position;
this->got_picture = 1;
mlt_properties_set_double( properties, "source_fps", source_fps );
else
mlt_properties_set_double( properties, "source_fps", mlt_producer_get_fps( this->parent ) );
+
+ // Set the YUV colorspace from override or detect
+ this->yuv_std = mlt_properties_get_int( properties, "force_yuv_std" );
+ if ( ! this->yuv_std )
+ {
+ switch ( this->video_codec->colorspace )
+ {
+ case AVCOL_SPC_BT709:
+ this->yuv_std = 709;
+ break;
+ case AVCOL_SPC_BT470BG:
+ case AVCOL_SPC_SMPTE170M:
+ this->yuv_std = 601;
+ break;
+ case AVCOL_SPC_SMPTE240M:
+ this->yuv_std = 240;
+ break;
+ default:
+ this->yuv_std = this->video_codec->width * this->video_codec->height > 750000 ? 709 : 601;
+ break;
+ }
+ }
+ // Let apps get chosen colorspace
+ mlt_properties_set_int( properties, "yuv_std", this->yuv_std );
}
return this->video_codec && this->video_index > -1;
}
mlt_properties_set_int( frame_properties, "real_width", this->video_codec->width );
mlt_properties_set_int( frame_properties, "real_height", this->video_codec->height );
mlt_properties_set_double( frame_properties, "aspect_ratio", aspect_ratio );
+ mlt_properties_set_int( frame_properties, "yuv_std", this->yuv_std );
// Workaround 1088 encodings missing cropping info.
if ( this->video_codec->height == 1088 && mlt_profile_dar( mlt_service_profile( MLT_PRODUCER_SERVICE( producer ) ) ) == 16.0/9.0 )