producer_avformat.o \
consumer_avformat.o \
filter_avcolour_space.o \
- filter_avdeinterlace.o \
filter_avresample.o
+ifdef MMX_FLAGS
+ OBJS += filter_avdeinterlace.o
+endif
+
CFLAGS+=-I../..
-LDFLAGS+=-lavformat -lavcodec $(EXTRA_LIBS)
-LDFLAGS+=-L../../framework -lmlt
+LDFLAGS+=-L../../framework
+
+ifdef LOCAL_FFMPEG
+ LDFLAGS+=-lmltavformat -lmltavcodec $(EXTRA_LIBS) -lmlt
+else
+ LDFLAGS+=-lavformat -lavcodec $(EXTRA_LIBS) -lmlt
+endif
SRCS := $(OBJS:.o=.c)
all: $(TARGET)
$(TARGET): $(OBJS)
- if [ $(LOCAL_FFMPEG) ] ; then $(MAKE) -C ffmpeg all ; fi
+ if [ $(LOCAL_FFMPEG) ] ; then \
+ $(MAKE) -C ffmpeg all ; \
+ cp -f ffmpeg/libavcodec/libavcodec.so ../../framework/libmltavcodec.so ; \
+ cp -f ffmpeg/libavformat/libavformat.so ../../framework/libmltavformat.so ; \
+ fi
$(CC) -shared -o $@ $(OBJS) $(LDFLAGS)
depend: $(SRCS)
rm -f .depend
clean:
- if [ $(LOCAL_FFMPEG) ] ; then $(MAKE) -C ffmpeg clean ; fi
+ #if [ $(LOCAL_FFMPEG) ] ; then $(MAKE) -C ffmpeg clean ; fi
rm -f $(OBJS) $(TARGET)
install: all
install -m 755 $(TARGET) "$(prefix)/share/mlt/modules"
+ if [ $(LOCAL_FFMPEG) ] ; then \
+ install -m 755 ../../framework/libmltavcodec.so "$(prefix)/share/mlt/modules" ; \
+ install -m 755 ../../framework/libmltavformat.so "$(prefix)/share/mlt/modules" ; \
+ fi
ifneq ($(wildcard .depend),)
include .depend
if [ "$cvs_ffmpeg" != "" ]
then
- [ ! -d "ffmpeg" ] && cvs -z9 -d:pserver:anonymous@mplayerhq.hu:/cvsroot/ffmpeg co -D "10/11/2004 20:30 CET" ffmpeg
- [ -d "ffmpeg" ] && ( cd ffmpeg ; ./configure )
+ [ ! -d "ffmpeg" ] && cvs -z9 -d:pserver:anonymous@mplayerhq.hu:/cvsroot/ffmpeg co ffmpeg
+ [ -d "ffmpeg" ] && ( cd ffmpeg ; ./configure --enable-shared )
#[ ! -f "ffmpeg/ffmpeg.patch" ] && ( cd ffmpeg ; cp ../ffmpeg.patch . ; patch -p0 < ffmpeg.patch )
echo "CFLAGS+=-I`pwd`/ffmpeg/libavformat -I`pwd`/ffmpeg/libavcodec" >> config.mak
- echo "LDFLAGS+=-L`pwd`/ffmpeg/libavformat -L`pwd`/ffmpeg/libavcodec" >> config.mak
echo "LOCAL_FFMPEG=1" >> config.mak
extra_libs="$extra_libs -lz"
elif [ "$static_ffmpeg" != "" ]
avformat_init( );
if ( !strcmp( id, "avcolour_space" ) )
return filter_avcolour_space_init( arg );
+#ifdef USE_MMX
if ( !strcmp( id, "avdeinterlace" ) )
return filter_avdeinterlace_init( arg );
+#endif
if ( !strcmp( id, "avresample" ) )
return filter_avresample_init( arg );
return NULL;
static mlt_frame filter_process( mlt_filter this, mlt_frame frame )
{
// Only call this if we have a means to get audio
- if ( mlt_frame_is_test_audio( frame ) != 0 )
+ if ( mlt_frame_is_test_audio( frame ) == 0 )
{
// Push the filter on to the stack
mlt_frame_push_audio( frame, this );
pixops.o \
filter_rescale.o
-ifeq ($(MMX_FLAGS),-DUSE_MMX)
-ASM_OBJS = have_mmx.o \
- scale_line_22_yuv_mmx.o
+ifdef ($(MMX_FLAGS))
+ASM_OBJS = have_mmx.o scale_line_22_yuv_mmx.o
endif
CFLAGS += `pkg-config gtk+-2.0 --cflags` `pkg-config gdk-pixbuf-2.0 --cflags` `pkg-config pangoft2 --cflags` -I../..
#include "ui.h"
-#define BUFFER_LEN 2048 * 3
+#define BUFFER_LEN 204800 * 3
static void *jackrack_thread( void *arg )
{
// Process the audio
int16_t *q = *buffer;
- float sample;
+ float sample[ 2 ][ 10000 ];
int i, j;
+ struct timespec tm = { 0, 0 };
// Convert to floats and write into output ringbuffer
if ( jack_ringbuffer_write_space( output_buffers[0] ) >= ( *samples * sizeof(float) ) )
{
for ( i = 0; i < *samples; i++ )
for ( j = 0; j < *channels; j++ )
- {
- sample = ( float )( *q ++ ) / 32768.0;
- jack_ringbuffer_write( output_buffers[j], ( char * )&sample, sizeof(float) );
- }
+ sample[ j ][ i ] = ( float )( *q ++ ) / 32768.0;
+
+ for ( j = 0; j < *channels; j++ )
+ jack_ringbuffer_write( output_buffers[j], ( char * )sample[ j ], *samples * sizeof(float) );
}
// Synchronization phase - wait for signal from Jack process
- while ( mlt_properties_get_int( filter_properties, "_sync" )
- && jack_ringbuffer_read_space( input_buffers[ *channels - 1 ] ) < ( *samples * sizeof(float) ) )
- pthread_cond_wait( output_ready, output_lock );
+ while ( jack_ringbuffer_read_space( input_buffers[ *channels - 1 ] ) < ( *samples * sizeof(float) ) ) ;
+ //pthread_cond_wait( output_ready, output_lock );
// Read from input ringbuffer and convert from floats
- //if ( jack_ringbuffer_read_space( input_buffers[0] ) >= ( *samples * sizeof(float) ) )
+ if ( jack_ringbuffer_read_space( input_buffers[0] ) >= ( *samples * sizeof(float) ) )
{
// Initialise to silence, but repeat last frame if available in case of
// buffer underrun
- sample = 0;
+ for ( j = 0; j < *channels; j++ )
+ jack_ringbuffer_read( input_buffers[j], ( char * )sample[ j ], *samples * sizeof(float) );
+
q = *buffer;
for ( i = 0; i < *samples; i++ )
for ( j = 0; j < *channels; j++ )
{
- jack_ringbuffer_read( input_buffers[j], ( char * )&sample, sizeof(float) );
-
- if ( sample > 1.0 )
- sample = 1.0;
- else if ( sample < -1.0 )
- sample = -1.0;
+ if ( sample[ j ][ i ] > 1.0 )
+ sample[ j ][ i ] = 1.0;
+ else if ( sample[ j ][ i ] < -1.0 )
+ sample[ j ][ i ] = -1.0;
- if ( sample > 0 )
- *q ++ = 32767 * sample;
+ if ( sample[ j ][ i ] > 0 )
+ *q ++ = 32767 * sample[ j ][ i ];
else
- *q ++ = 32768 * sample;
+ *q ++ = 32768 * sample[ j ][ i ];
}
}
static mlt_frame filter_process( mlt_filter this, mlt_frame frame )
{
- if ( mlt_frame_is_test_audio( frame ) != 0 )
{
mlt_properties properties = MLT_FILTER_PROPERTIES( this );
mlt_frame_push_audio( frame, this );
static mlt_frame filter_process( mlt_filter this, mlt_frame frame )
{
- if ( mlt_frame_is_test_audio( frame ) != 0 )
+ if ( mlt_frame_is_test_audio( frame ) == 0 )
{
// Add the filter to the frame
mlt_frame_push_audio( frame, this );