int read_all)
{
RMContext *rm = s->priv_data;
- ByteIOContext *pb = &s->pb;
+ ByteIOContext *pb = s->pb;
char buf[256];
uint32_t version;
int i;
return 0;
}
-static int
+int
ff_rm_read_mdpr_codecdata (AVFormatContext *s, AVStream *st)
{
- ByteIOContext *pb = &s->pb;
+ ByteIOContext *pb = s->pb;
unsigned int v;
int codec_data_size, size;
int64_t codec_pos;
{
RMContext *rm = s->priv_data;
AVStream *st;
- ByteIOContext *pb = &s->pb;
+ ByteIOContext *pb = s->pb;
unsigned int tag;
int tag_size, i;
unsigned int start_time, duration;
n = get_be16(pb);
(*len)-=2;
-// n &= 0x7FFF;
+ n &= 0x7FFF;
if (n >= 0x4000) {
return n - 0x4000;
} else {
static int sync(AVFormatContext *s, int64_t *timestamp, int *flags, int *stream_index, int64_t *pos){
RMContext *rm = s->priv_data;
- ByteIOContext *pb = &s->pb;
+ ByteIOContext *pb = s->pb;
int len, num, res, i;
AVStream *st;
uint32_t state=0xFFFFFFFF;
static int rm_assemble_video_frame(AVFormatContext *s, RMContext *rm, AVPacket *pkt, int len)
{
- ByteIOContext *pb = &s->pb;
+ ByteIOContext *pb = s->pb;
int hdr, seq, pic_num, len2, pos;
int type;
if((seq & 0x7F) == 1 || rm->curpic_num != pic_num){
rm->slices = ((hdr & 0x3F) << 1) + 1;
rm->videobufsize = len2 + 8*rm->slices + 1;
- if(!(rm->videobuf = av_realloc(rm->videobuf, rm->videobufsize)))
+ av_free(rm->videobuf);
+ if(!(rm->videobuf = av_malloc(rm->videobufsize)))
return AVERROR(ENOMEM);
rm->videobufpos = 8*rm->slices + 1;
rm->cur_slice = 0;
return 1;
if (get_buffer(pb, rm->videobuf + rm->videobufpos, len) != len)
return AVERROR(EIO);
- rm->videobufpos += len,
+ rm->videobufpos += len;
rm->remaining_len-= len;
if(type == 2 || (rm->videobufpos) == rm->videobufsize){
}
}
-static int
+int
ff_rm_parse_packet (AVFormatContext *s, AVStream *st, int len, AVPacket *pkt,
int *seq, int *flags, int64_t *timestamp)
{
- ByteIOContext *pb = &s->pb;
+ ByteIOContext *pb = s->pb;
RMContext *rm = s->priv_data;
if (st->codec->codec_type == CODEC_TYPE_VIDEO) {
av_get_packet(pb, pkt, rm->sub_packet_lengths[0]);
*flags = 2; // Mark first packet as keyframe
}
- } else
+ } else {
av_get_packet(pb, pkt, len);
rm_ac3_swap_bytes(st, pkt);
-
+ }
} else
av_get_packet(pb, pkt, len);
return 0;
}
-static void
+void
ff_rm_retrieve_cache (AVFormatContext *s, AVStream *st, AVPacket *pkt)
{
- ByteIOContext *pb = &s->pb;
+ ByteIOContext *pb = s->pb;
RMContext *rm = s->priv_data;
assert (rm->audio_pkt_cnt > 0);
static int rm_read_packet(AVFormatContext *s, AVPacket *pkt)
{
RMContext *rm = s->priv_data;
- ByteIOContext *pb = &s->pb;
+ ByteIOContext *pb = s->pb;
AVStream *st;
int i, len;
int64_t timestamp, pos;
if (ff_rm_parse_packet (s, st, len, pkt, &seq, &flags, ×tamp) < 0)
goto resync;
- if(flags&2){
- if((seq&0x7F) == 1)
- av_add_index_entry(st, pos, timestamp, 0, 0, AVINDEX_KEYFRAME);
- }
+ if((flags&2) && (seq&0x7F) == 1)
+ av_add_index_entry(st, pos, timestamp, 0, 0, AVINDEX_KEYFRAME);
}
return 0;
if(rm->old_format)
return AV_NOPTS_VALUE;
- url_fseek(&s->pb, pos, SEEK_SET);
+ url_fseek(s->pb, pos, SEEK_SET);
rm->remaining_len=0;
for(;;){
int seq=1;
st = s->streams[stream_index2];
if (st->codec->codec_type == CODEC_TYPE_VIDEO) {
- h= get_byte(&s->pb); len--;
+ h= get_byte(s->pb); len--;
if(!(h & 0x40)){
- seq = get_byte(&s->pb); len--;
+ seq = get_byte(s->pb); len--;
}
}
break;
}
- url_fskip(&s->pb, len);
+ url_fskip(s->pb, len);
}
*ppos = pos;
return dts;