]> git.sesse.net Git - mlt/blobdiff - src/modules/avformat/consumer_avformat.c
libavcodec's vorbis encoder is also experimental.
[mlt] / src / modules / avformat / consumer_avformat.c
index 3831e09f860701e5b6439e719c13d110b304d6a8..f09bebd94d4e09cb341576aafc9689b22c29041f 100644 (file)
@@ -441,6 +441,21 @@ static void apply_properties( void *obj, mlt_properties properties, int flags )
        }
 }
 
+static enum PixelFormat pick_pix_fmt( mlt_image_format img_fmt )
+{
+       switch ( img_fmt )
+       {
+       case mlt_image_rgb24:
+               return PIX_FMT_RGB24;
+       case mlt_image_rgb24a:
+               return PIX_FMT_RGBA;
+       case mlt_image_yuv420p:
+               return PIX_FMT_YUV420P;
+       default:
+               return PIX_FMT_YUYV422;
+       }
+}
+
 static int get_mlt_audio_format( int av_sample_fmt )
 {
        switch ( av_sample_fmt )
@@ -1126,14 +1141,33 @@ static void *consumer_thread( void *arg )
        mlt_deque queue = mlt_properties_get_data( properties, "frame_queue", NULL );
        sample_fifo fifo = mlt_properties_get_data( properties, "sample_fifo", NULL );
 
-       // Need two av pictures for converting
-       AVFrame *converted_avframe = NULL;
-       AVFrame *audio_avframe = NULL;
-       AVFrame *video_avframe = alloc_picture( PIX_FMT_YUYV422, width, height );
-
        // For receiving images from an mlt_frame
        uint8_t *image;
        mlt_image_format img_fmt = mlt_image_yuv422;
+       // Get the image format to use for rendering threads
+       const char* img_fmt_name = mlt_properties_get( properties, "mlt_image_format" );
+       if ( img_fmt_name )
+       {
+               if ( !strcmp( img_fmt_name, "rgb24" ) )
+                       img_fmt = mlt_image_rgb24;
+               else if ( !strcmp( img_fmt_name, "rgb24a" ) )
+                       img_fmt = mlt_image_rgb24a;
+               else if ( !strcmp( img_fmt_name, "yuv420p" ) )
+                       img_fmt = mlt_image_yuv420p;
+       }
+       else if ( mlt_properties_get( properties, "pix_fmt" ) )
+       {
+               img_fmt_name = mlt_properties_get( properties, "pix_fmt" );
+               if ( !strcmp( img_fmt_name, "rgba" ) ||
+                    !strcmp( img_fmt_name, "argb" ) ||
+                    !strcmp( img_fmt_name, "bgra" ) )
+                       img_fmt = mlt_image_rgb24a;
+       }
+
+       // Need two av pictures for converting
+       AVFrame *converted_avframe = NULL;
+       AVFrame *audio_avframe = NULL;
+       AVFrame *video_avframe = alloc_picture( pick_pix_fmt( img_fmt ), width, height );
 
        // For receiving audio samples back from the fifo
        uint8_t *audio_buf_1 = av_malloc( AUDIO_ENCODE_BUFFER_SIZE );
@@ -1171,6 +1205,7 @@ static void *consumer_thread( void *arg )
        // Misc
        char key[27];
        mlt_properties frame_meta_properties = mlt_properties_new();
+       int error_count = 0;
 
        // Initialize audio_st
        int i = MAX_AUDIO_STREAMS;
@@ -1212,7 +1247,7 @@ static void *consumer_thread( void *arg )
                                acodec = mlt_properties_get( properties, "_acodec" );
                                audio_codec = avcodec_find_encoder_by_name( acodec );
                        }
-                       else if ( !strcmp( acodec, "aac" ) )
+                       else if ( !strcmp( acodec, "aac" ) || !strcmp( acodec, "vorbis" ) )
                        {
                                mlt_properties_set( properties, "astrict", "experimental" );
                        }
@@ -1662,6 +1697,7 @@ static void *consumer_thread( void *arg )
                                                                }
                                                        }
 #if LIBAVCODEC_VERSION_MAJOR >= 55
+                                                       audio_avframe->nb_samples = FFMAX( samples, audio_input_nb_samples );
                                                        avcodec_fill_audio_frame( audio_avframe, codec->channels, codec->sample_fmt,
                                                                (const uint8_t*) audio_buf_2, AUDIO_ENCODE_BUFFER_SIZE, 0 );
                                                        int got_packet = 0;
@@ -1671,6 +1707,7 @@ static void *consumer_thread( void *arg )
                                                        else if ( !got_packet )
                                                                pkt.size = 0;
 #else
+                                                       codec->frame_size = FFMAX( samples, audio_input_nb_samples );
                                                        pkt.size = avcodec_encode_audio( codec, audio_outbuf, audio_outbuf_size, (short*) audio_buf_2 );
                                                        pkt.pts = codec->coded_frame? codec->coded_frame->pts : AV_NOPTS_VALUE;
                                                        pkt.flags |= PKT_FLAG_KEY;
@@ -1695,10 +1732,13 @@ static void *consumer_thread( void *arg )
                                                                mlt_events_fire( properties, "consumer-fatal-error", NULL );
                                                                goto on_fatal_error;
                                                        }
+                                                       error_count = 0;
                                                }
                                                else if ( pkt.size < 0 )
                                                {
                                                        mlt_log_warning( MLT_CONSUMER_SERVICE( consumer ), "error with audio encode %d\n", frame_count );
+                                                       if ( ++error_count > 2 )
+                                                               goto on_fatal_error;
                                                }
 
                                                mlt_log_debug( MLT_CONSUMER_SERVICE( consumer ), " frame_size %d\n", codec->frame_size );
@@ -1719,29 +1759,35 @@ static void *consumer_thread( void *arg )
                                if ( mlt_deque_count( queue ) )
                                {
                                        int ret = 0;
-                                       AVCodecContext *c;
+                                       AVCodecContext *c = video_st->codec;
 
                                        frame = mlt_deque_pop_front( queue );
                                        frame_properties = MLT_FRAME_PROPERTIES( frame );
 
-                                       c = video_st->codec;
-                                       
                                        if ( mlt_properties_get_int( frame_properties, "rendered" ) )
                                        {
                                                int i = 0;
                                                uint8_t *p;
                                                uint8_t *q;
+                                               int stride = mlt_image_format_size( img_fmt, width, 0, NULL );
 
                                                mlt_frame_get_image( frame, &image, &img_fmt, &img_width, &img_height, 0 );
-
                                                q = image;
 
                                                // Convert the mlt frame to an AVPicture
-                                               for ( i = 0; i < height; i ++ )
+                                               if ( img_fmt == mlt_image_yuv420p )
+                                               {
+                                                       memcpy( video_avframe->data[0], q, video_avframe->linesize[0] );
+                                                       q += stride;
+                                                       memcpy( video_avframe->data[1], q, video_avframe->linesize[1] );
+                                                       q += stride / 4;
+                                                       memcpy( video_avframe->data[2], q, video_avframe->linesize[2] );
+                                               }
+                                               else for ( i = 0; i < height; i ++ )
                                                {
-                                                       p = video_avframe->data[ 0 ] + i * video_avframe->linesize[ 0 ];
-                                                       memcpy( p, q, width * 2 );
-                                                       q += width * 2;
+                                                       p = video_avframe->data[0] + i * video_avframe->linesize[0];
+                                                       memcpy( p, q, stride );
+                                                       q += stride;
                                                }
 
                                                // Do the colour space conversion
@@ -1752,8 +1798,8 @@ static void *consumer_thread( void *arg )
 #ifdef USE_SSE
                                                flags |= SWS_CPU_CAPS_MMX2;
 #endif
-                                               struct SwsContext *context = sws_getContext( width, height, PIX_FMT_YUYV422,
-                                                       width, height, video_st->codec->pix_fmt, flags, NULL, NULL, NULL);
+                                               struct SwsContext *context = sws_getContext( width, height, pick_pix_fmt( img_fmt ),
+                                                       width, height, c->pix_fmt, flags, NULL, NULL, NULL);
                                                sws_scale( context, (const uint8_t* const*) video_avframe->data, video_avframe->linesize, 0, height,
                                                        converted_avframe->data, converted_avframe->linesize);
                                                sws_freeContext( context );
@@ -1761,7 +1807,11 @@ static void *consumer_thread( void *arg )
                                                mlt_events_fire( properties, "consumer-frame-show", frame, NULL );
 
                                                // Apply the alpha if applicable
-                                               if ( video_st->codec->pix_fmt == PIX_FMT_RGB32 )
+                                               if ( !mlt_properties_get( properties, "mlt_image_format" ) ||
+                                                    strcmp( mlt_properties_get( properties, "mlt_image_format" ), "rgb24a" ) )
+                                               if ( c->pix_fmt == PIX_FMT_RGBA ||
+                                                    c->pix_fmt == PIX_FMT_ARGB ||
+                                                    c->pix_fmt == PIX_FMT_BGRA )
                                                {
                                                        uint8_t *alpha = mlt_frame_get_alpha_mask( frame );
                                                        register int n;
@@ -1860,10 +1910,14 @@ static void *consumer_thread( void *arg )
                                                        // Dual pass logging
                                                        if ( mlt_properties_get_data( properties, "_logfile", NULL ) && c->stats_out )
                                                                fprintf( mlt_properties_get_data( properties, "_logfile", NULL ), "%s", c->stats_out );
+
+                                                       error_count = 0;
                                                } 
                                                else if ( pkt.size < 0 )
                                                {
                                                        mlt_log_warning( MLT_CONSUMER_SERVICE( consumer ), "error with video encode %d\n", frame_count );
+                                                       if ( ++error_count > 2 )
+                                                               goto on_fatal_error;
                                                        ret = 0;
                                                }
                                        }