/*****************************************************************************
* mp4.c: mp4 muxer
*****************************************************************************
- * Copyright (C) 2003-2010 x264 project
+ * Copyright (C) 2003-2013 x264 project
*
* Authors: Laurent Aimar <fenrir@via.ecp.fr>
* Loren Merritt <lorenm@u.washington.edu>
#include "output.h"
#include <gpac/isomedia.h>
+#ifdef _WIN32
+#include <windows.h>
+#endif
+
#if HAVE_GF_MALLOC
#undef malloc
#undef free
+#undef realloc
#define malloc gf_malloc
#define free gf_free
+#define realloc gf_realloc
#endif
typedef struct
int i_delay_frames;
int b_dts_compress;
int i_dts_compress_multiplier;
+ int i_data_size;
} mp4_hnd_t;
static void recompute_bitrate_mp4( GF_ISOFile *p_file, int i_track )
if( p_mp4->p_file )
{
- /* The mdhd duration is defined as CTS[final] - CTS[0] + duration of last frame.
- * The mdhd duration (in seconds) should be able to be longer than the tkhd duration since the track is managed by edts.
- * So, if mdhd duration is equal to the last DTS or less, we give the last composition time delta to the last sample duration.
- * And then, the mdhd duration is updated, but it time-wise doesn't give the actual duration.
- * The tkhd duration is the actual track duration. */
- uint64_t mdhd_duration = (2 * largest_pts - second_largest_pts) * p_mp4->i_time_inc;
- if( mdhd_duration != gf_isom_get_media_duration( p_mp4->p_file, p_mp4->i_track ) )
- {
- uint64_t last_dts = gf_isom_get_sample_dts( p_mp4->p_file, p_mp4->i_track, p_mp4->i_numframe );
- uint32_t last_duration = (uint32_t)( mdhd_duration > last_dts ? mdhd_duration - last_dts : (largest_pts - second_largest_pts) * p_mp4->i_time_inc );
- gf_isom_set_last_sample_duration( p_mp4->p_file, p_mp4->i_track, last_duration );
- }
-
- /* Write an Edit Box if the first CTS offset is positive.
- * A media_time is given by not the mvhd timescale but rather the mdhd timescale.
- * The reason is that an Edit Box maps the presentation time-line to the media time-line.
- * Any demuxers should follow the Edit Box if it exists. */
- GF_ISOSample *sample = gf_isom_get_sample_info( p_mp4->p_file, p_mp4->i_track, 1, NULL, NULL );
- if( sample && sample->CTS_Offset > 0 )
+ if( p_mp4->i_track )
{
- uint32_t mvhd_timescale = gf_isom_get_timescale( p_mp4->p_file );
- uint64_t tkhd_duration = (uint64_t)( mdhd_duration * ( (double)mvhd_timescale / p_mp4->i_time_res ) );
- gf_isom_append_edit_segment( p_mp4->p_file, p_mp4->i_track, tkhd_duration, sample->CTS_Offset, GF_ISOM_EDIT_NORMAL );
+ /* The mdhd duration is defined as CTS[final] - CTS[0] + duration of last frame.
+ * The mdhd duration (in seconds) should be able to be longer than the tkhd duration since the track is managed by edts.
+ * So, if mdhd duration is equal to the last DTS or less, we give the last composition time delta to the last sample duration.
+ * And then, the mdhd duration is updated, but it time-wise doesn't give the actual duration.
+ * The tkhd duration is the actual track duration. */
+ uint64_t mdhd_duration = (2 * largest_pts - second_largest_pts) * p_mp4->i_time_inc;
+ if( mdhd_duration != gf_isom_get_media_duration( p_mp4->p_file, p_mp4->i_track ) )
+ {
+ uint64_t last_dts = gf_isom_get_sample_dts( p_mp4->p_file, p_mp4->i_track, p_mp4->i_numframe );
+ uint32_t last_duration = (uint32_t)( mdhd_duration > last_dts ? mdhd_duration - last_dts : (largest_pts - second_largest_pts) * p_mp4->i_time_inc );
+ gf_isom_set_last_sample_duration( p_mp4->p_file, p_mp4->i_track, last_duration );
+ }
+
+ /* Write an Edit Box if the first CTS offset is positive.
+ * A media_time is given by not the mvhd timescale but rather the mdhd timescale.
+ * The reason is that an Edit Box maps the presentation time-line to the media time-line.
+ * Any demuxers should follow the Edit Box if it exists. */
+ GF_ISOSample *sample = gf_isom_get_sample_info( p_mp4->p_file, p_mp4->i_track, 1, NULL, NULL );
+ if( sample && sample->CTS_Offset > 0 )
+ {
+ uint32_t mvhd_timescale = gf_isom_get_timescale( p_mp4->p_file );
+ uint64_t tkhd_duration = (uint64_t)( mdhd_duration * ( (double)mvhd_timescale / p_mp4->i_time_res ) );
+ gf_isom_append_edit_segment( p_mp4->p_file, p_mp4->i_track, tkhd_duration, sample->CTS_Offset, GF_ISOM_EDIT_NORMAL );
+ }
+ gf_isom_sample_del( &sample );
+
+ recompute_bitrate_mp4( p_mp4->p_file, p_mp4->i_track );
}
- gf_isom_sample_del( &sample );
-
- recompute_bitrate_mp4( p_mp4->p_file, p_mp4->i_track );
gf_isom_set_pl_indication( p_mp4->p_file, GF_ISOM_PL_VISUAL, 0x15 );
gf_isom_set_storage_mode( p_mp4->p_file, GF_ISOM_STORE_FLAT );
gf_isom_close( p_mp4->p_file );
mp4_hnd_t *p_mp4;
*p_handle = NULL;
- FILE *fh = fopen( psz_filename, "w" );
+ FILE *fh = x264_fopen( psz_filename, "w" );
if( !fh )
return -1;
FAIL_IF_ERR( !x264_is_regular_file( fh ), "mp4", "MP4 output is incompatible with non-regular file `%s'\n", psz_filename )
return -1;
memset( p_mp4, 0, sizeof(mp4_hnd_t) );
+
+#ifdef _WIN32
+ /* GPAC doesn't support Unicode filenames. */
+ char ansi_filename[MAX_PATH];
+ FAIL_IF_ERR( !x264_ansi_filename( psz_filename, ansi_filename, MAX_PATH, 1 ), "mp4", "invalid ansi filename\n" )
+ p_mp4->p_file = gf_isom_open( ansi_filename, GF_ISOM_OPEN_WRITE, NULL );
+#else
p_mp4->p_file = gf_isom_open( psz_filename, GF_ISOM_OPEN_WRITE, NULL );
+#endif
p_mp4->b_dts_compress = opt->use_dts_compress;
p_mp4->i_delay_frames = p_param->i_bframe ? (p_param->i_bframe_pyramid ? 2 : 1) : 0;
p_mp4->i_dts_compress_multiplier = p_mp4->b_dts_compress * p_mp4->i_delay_frames + 1;
- p_mp4->i_time_res = p_param->i_timebase_den * p_mp4->i_dts_compress_multiplier;
- p_mp4->i_time_inc = p_param->i_timebase_num * p_mp4->i_dts_compress_multiplier;
+ p_mp4->i_time_res = (uint64_t)p_param->i_timebase_den * p_mp4->i_dts_compress_multiplier;
+ p_mp4->i_time_inc = (uint64_t)p_param->i_timebase_num * p_mp4->i_dts_compress_multiplier;
FAIL_IF_ERR( p_mp4->i_time_res > UINT32_MAX, "mp4", "MP4 media timescale %"PRIu64" exceeds maximum\n", p_mp4->i_time_res )
p_mp4->i_track = gf_isom_new_track( p_mp4->p_file, 0, GF_ISOM_MEDIA_VISUAL,
gf_isom_set_track_layout_info( p_mp4->p_file, p_mp4->i_track, dw, dh, 0, 0, 0 );
}
- p_mp4->p_sample->data = malloc( p_param->i_width * p_param->i_height * 3 / 2 );
+ p_mp4->i_data_size = p_param->i_width * p_param->i_height * 3 / 2;
+ p_mp4->p_sample->data = malloc( p_mp4->i_data_size );
if( !p_mp4->p_sample->data )
+ {
+ p_mp4->i_data_size = 0;
return -1;
+ }
return 0;
}
+static int check_buffer( mp4_hnd_t *p_mp4, int needed_size )
+{
+ if( needed_size > p_mp4->i_data_size )
+ {
+ void *ptr = realloc( p_mp4->p_sample->data, needed_size );
+ if( !ptr )
+ return -1;
+ p_mp4->p_sample->data = ptr;
+ p_mp4->i_data_size = needed_size;
+ }
+ return 0;
+}
+
static int write_headers( hnd_t handle, x264_nal_t *p_nal )
{
mp4_hnd_t *p_mp4 = handle;
// SEI
+ if( check_buffer( p_mp4, p_mp4->p_sample->dataLength + sei_size ) )
+ return -1;
memcpy( p_mp4->p_sample->data + p_mp4->p_sample->dataLength, sei, sei_size );
p_mp4->p_sample->dataLength += sei_size;
int64_t dts;
int64_t cts;
+ if( check_buffer( p_mp4, p_mp4->p_sample->dataLength + i_size ) )
+ return -1;
memcpy( p_mp4->p_sample->data + p_mp4->p_sample->dataLength, p_nalu, i_size );
p_mp4->p_sample->dataLength += i_size;