X-Git-Url: https://git.sesse.net/?a=blobdiff_plain;f=libavformat%2Frtpdec.c;h=823e03cf529c08824524113089e9ab646d13cfb5;hb=e02de9df4b218bd6e1e927b67fd4075741545688;hp=38bd333e9ccb43ba71ae1bd74215b955d2054962;hpb=a9c847c1baff09f954b12c1dcc832b4f601f6f6e;p=ffmpeg diff --git a/libavformat/rtpdec.c b/libavformat/rtpdec.c index 38bd333e9cc..823e03cf529 100644 --- a/libavformat/rtpdec.c +++ b/libavformat/rtpdec.c @@ -50,6 +50,12 @@ static RTPDynamicProtocolHandler opus_dynamic_handler = { .codec_id = AV_CODEC_ID_OPUS, }; +static RTPDynamicProtocolHandler t140_dynamic_handler = { /* RFC 4103 */ + .enc_name = "t140", + .codec_type = AVMEDIA_TYPE_DATA, + .codec_id = AV_CODEC_ID_TEXT, +}; + static RTPDynamicProtocolHandler *rtp_first_dynamic_payload_handler = NULL; void ff_register_dynamic_payload_handler(RTPDynamicProtocolHandler *handler) @@ -58,28 +64,33 @@ void ff_register_dynamic_payload_handler(RTPDynamicProtocolHandler *handler) rtp_first_dynamic_payload_handler = handler; } -void av_register_rtp_dynamic_payload_handlers(void) +void ff_register_rtp_dynamic_payload_handlers(void) { + ff_register_dynamic_payload_handler(&ff_ac3_dynamic_handler); ff_register_dynamic_payload_handler(&ff_amr_nb_dynamic_handler); ff_register_dynamic_payload_handler(&ff_amr_wb_dynamic_handler); + ff_register_dynamic_payload_handler(&ff_dv_dynamic_handler); ff_register_dynamic_payload_handler(&ff_g726_16_dynamic_handler); ff_register_dynamic_payload_handler(&ff_g726_24_dynamic_handler); ff_register_dynamic_payload_handler(&ff_g726_32_dynamic_handler); ff_register_dynamic_payload_handler(&ff_g726_40_dynamic_handler); + ff_register_dynamic_payload_handler(&ff_h261_dynamic_handler); ff_register_dynamic_payload_handler(&ff_h263_1998_dynamic_handler); ff_register_dynamic_payload_handler(&ff_h263_2000_dynamic_handler); ff_register_dynamic_payload_handler(&ff_h263_rfc2190_dynamic_handler); ff_register_dynamic_payload_handler(&ff_h264_dynamic_handler); + ff_register_dynamic_payload_handler(&ff_hevc_dynamic_handler); ff_register_dynamic_payload_handler(&ff_ilbc_dynamic_handler); ff_register_dynamic_payload_handler(&ff_jpeg_dynamic_handler); ff_register_dynamic_payload_handler(&ff_mp4a_latm_dynamic_handler); ff_register_dynamic_payload_handler(&ff_mp4v_es_dynamic_handler); ff_register_dynamic_payload_handler(&ff_mpeg_audio_dynamic_handler); + ff_register_dynamic_payload_handler(&ff_mpeg_audio_robust_dynamic_handler); ff_register_dynamic_payload_handler(&ff_mpeg_video_dynamic_handler); ff_register_dynamic_payload_handler(&ff_mpeg4_generic_dynamic_handler); ff_register_dynamic_payload_handler(&ff_mpegts_dynamic_handler); - ff_register_dynamic_payload_handler(&ff_ms_rtp_asf_pfv_handler); ff_register_dynamic_payload_handler(&ff_ms_rtp_asf_pfa_handler); + ff_register_dynamic_payload_handler(&ff_ms_rtp_asf_pfv_handler); ff_register_dynamic_payload_handler(&ff_qcelp_dynamic_handler); ff_register_dynamic_payload_handler(&ff_qdm2_dynamic_handler); ff_register_dynamic_payload_handler(&ff_qt_rtp_aud_handler); @@ -90,9 +101,11 @@ void av_register_rtp_dynamic_payload_handlers(void) ff_register_dynamic_payload_handler(&ff_theora_dynamic_handler); ff_register_dynamic_payload_handler(&ff_vorbis_dynamic_handler); ff_register_dynamic_payload_handler(&ff_vp8_dynamic_handler); + ff_register_dynamic_payload_handler(&ff_vp9_dynamic_handler); ff_register_dynamic_payload_handler(&opus_dynamic_handler); ff_register_dynamic_payload_handler(&realmedia_mp3_dynamic_handler); ff_register_dynamic_payload_handler(&speex_dynamic_handler); + ff_register_dynamic_payload_handler(&t140_dynamic_handler); } RTPDynamicProtocolHandler *ff_rtp_handler_find_by_name(const char *name, @@ -101,7 +114,8 @@ RTPDynamicProtocolHandler *ff_rtp_handler_find_by_name(const char *name, RTPDynamicProtocolHandler *handler; for (handler = rtp_first_dynamic_payload_handler; handler; handler = handler->next) - if (!av_strcasecmp(name, handler->enc_name) && + if (handler->enc_name && + !av_strcasecmp(name, handler->enc_name) && codec_type == handler->codec_type) return handler; return NULL; @@ -134,14 +148,14 @@ static int rtcp_parse_packet(RTPDemuxContext *s, const unsigned char *buf, return AVERROR_INVALIDDATA; } - s->last_rtcp_reception_time = av_gettime(); + s->last_rtcp_reception_time = av_gettime_relative(); s->last_rtcp_ntp_time = AV_RB64(buf + 8); s->last_rtcp_timestamp = AV_RB32(buf + 16); if (s->first_rtcp_ntp_time == AV_NOPTS_VALUE) { s->first_rtcp_ntp_time = s->last_rtcp_ntp_time; if (!s->base_timestamp) s->base_timestamp = s->last_rtcp_timestamp; - s->rtcp_ts_offset = s->last_rtcp_timestamp - s->base_timestamp; + s->rtcp_ts_offset = (int32_t)(s->last_rtcp_timestamp - s->base_timestamp); } break; @@ -314,7 +328,7 @@ int ff_rtp_check_and_send_back_rr(RTPDemuxContext *s, URLContext *fd, avio_wb32(pb, 0); /* delay since last SR */ } else { uint32_t middle_32_bits = s->last_rtcp_ntp_time >> 16; // this is valid, right? do we need to handle 64 bit values special? - uint32_t delay_since_last = av_rescale(av_gettime() - s->last_rtcp_reception_time, + uint32_t delay_since_last = av_rescale(av_gettime_relative() - s->last_rtcp_reception_time, 65536, AV_TIME_BASE); avio_wb32(pb, middle_32_bits); /* last SR timestamp */ @@ -341,9 +355,9 @@ int ff_rtp_check_and_send_back_rr(RTPDemuxContext *s, URLContext *fd, len = avio_close_dyn_buf(pb, &buf); if ((len > 0) && buf) { int av_unused result; - av_dlog(s->ic, "sending %d bytes of RR\n", len); + av_log(s->ic, AV_LOG_TRACE, "sending %d bytes of RR\n", len); result = ffurl_write(fd, buf, len); - av_dlog(s->ic, "result from ffurl_write: %d\n", result); + av_log(s->ic, AV_LOG_TRACE, "result from ffurl_write: %d\n", result); av_free(buf); } return 0; @@ -424,7 +438,7 @@ int ff_rtp_send_rtcp_feedback(RTPDemuxContext *s, URLContext *fd, AVIOContext *pb; uint8_t *buf; int64_t now; - uint16_t first_missing, missing_mask; + uint16_t first_missing = 0, missing_mask = 0; if (!fd && !avio) return -1; @@ -439,7 +453,7 @@ int ff_rtp_send_rtcp_feedback(RTPDemuxContext *s, URLContext *fd, /* Send new feedback if enough time has elapsed since the last * feedback packet. */ - now = av_gettime(); + now = av_gettime_relative(); if (s->last_feedback_time && (now - s->last_feedback_time) < MIN_FEEDBACK_INTERVAL) return 0; @@ -499,21 +513,13 @@ RTPDemuxContext *ff_rtp_parse_open(AVFormatContext *s1, AVStream *st, s->ic = s1; s->st = st; s->queue_size = queue_size; + + av_log(s->st ? s->st->codec : NULL, AV_LOG_VERBOSE, + "setting jitter buffer size to %d\n", s->queue_size); + rtp_init_statistics(&s->statistics, 0); if (st) { switch (st->codec->codec_id) { - case AV_CODEC_ID_MPEG1VIDEO: - case AV_CODEC_ID_MPEG2VIDEO: - case AV_CODEC_ID_MP2: - case AV_CODEC_ID_MP3: - case AV_CODEC_ID_MPEG4: - case AV_CODEC_ID_H263: - case AV_CODEC_ID_H264: - st->need_parsing = AVSTREAM_PARSE_FULL; - break; - case AV_CODEC_ID_VORBIS: - st->need_parsing = AVSTREAM_PARSE_HEADERS; - break; case AV_CODEC_ID_ADPCM_G722: /* According to RFC 3551, the stream clock rate is 8000 * even if the sample rate is 16000. */ @@ -587,11 +593,12 @@ static int rtp_parse_packet_internal(RTPDemuxContext *s, AVPacket *pkt, { unsigned int ssrc; int payload_type, seq, flags = 0; - int ext; + int ext, csrc; AVStream *st; uint32_t timestamp; int rv = 0; + csrc = buf[0] & 0x0f; ext = buf[0] & 0x10; payload_type = buf[1] & 0x7f; if (buf[1] & 0x80) @@ -625,6 +632,11 @@ static int rtp_parse_packet_internal(RTPDemuxContext *s, AVPacket *pkt, len -= 12; buf += 12; + len -= 4 * csrc; + buf += 4 * csrc; + if (len < 0) + return AVERROR_INVALIDDATA; + /* RFC 3550 Section 5.3.1 RTP Header Extension handling */ if (ext) { if (len < 4) @@ -645,9 +657,8 @@ static int rtp_parse_packet_internal(RTPDemuxContext *s, AVPacket *pkt, s->st, pkt, ×tamp, buf, len, seq, flags); } else if (st) { - /* At this point, the RTP header has been stripped; - * This is ASSUMING that there is only 1 CSRC, which isn't wise. */ - av_new_packet(pkt, len); + if ((rv = av_new_packet(pkt, len)) < 0) + return rv; memcpy(pkt->data, buf, len); pkt->stream_index = st->index; } else { @@ -673,7 +684,7 @@ void ff_rtp_reset_packet_queue(RTPDemuxContext *s) s->prev_ret = 0; } -static void enqueue_packet(RTPDemuxContext *s, uint8_t *buf, int len) +static int enqueue_packet(RTPDemuxContext *s, uint8_t *buf, int len) { uint16_t seq = AV_RB16(buf + 2); RTPPacket **cur = &s->queue, *packet; @@ -688,14 +699,16 @@ static void enqueue_packet(RTPDemuxContext *s, uint8_t *buf, int len) packet = av_mallocz(sizeof(*packet)); if (!packet) - return; - packet->recvtime = av_gettime(); + return AVERROR(ENOMEM); + packet->recvtime = av_gettime_relative(); packet->seq = seq; packet->len = len; packet->buf = buf; packet->next = *cur; *cur = packet; s->queue_len++; + + return 0; } static int has_next_packet(RTPDemuxContext *s) @@ -767,7 +780,7 @@ static int rtp_parse_one_packet(RTPDemuxContext *s, AVPacket *pkt, } if (s->st) { - int64_t received = av_gettime(); + int64_t received = av_gettime_relative(); uint32_t arrival_ts = av_rescale_q(received, AV_TIME_BASE_Q, s->st->time_base); timestamp = AV_RB32(buf + 4); @@ -793,12 +806,17 @@ static int rtp_parse_one_packet(RTPDemuxContext *s, AVPacket *pkt, return rv; } else { /* Still missing some packet, enqueue this one. */ - enqueue_packet(s, buf, len); + rv = enqueue_packet(s, buf, len); + if (rv < 0) + return rv; *bufptr = NULL; /* Return the first enqueued packet if the queue is full, * even if we're missing something */ - if (s->queue_len >= s->queue_size) + if (s->queue_len >= s->queue_size) { + av_log(s->st ? s->st->codec : NULL, AV_LOG_WARNING, + "jitter buffer full\n"); return rtp_parse_queued_packet(s, pkt); + } return -1; } } @@ -833,10 +851,12 @@ void ff_rtp_parse_close(RTPDemuxContext *s) av_free(s); } -int ff_parse_fmtp(AVStream *stream, PayloadContext *data, const char *p, - int (*parse_fmtp)(AVStream *stream, +int ff_parse_fmtp(AVFormatContext *s, + AVStream *stream, PayloadContext *data, const char *p, + int (*parse_fmtp)(AVFormatContext *s, + AVStream *stream, PayloadContext *data, - char *attr, char *value)) + const char *attr, const char *value)) { char attr[256]; char *value; @@ -859,7 +879,7 @@ int ff_parse_fmtp(AVStream *stream, PayloadContext *data, const char *p, while (ff_rtsp_next_attr_and_value(&p, attr, sizeof(attr), value, value_size)) { - res = parse_fmtp(stream, data, attr, value); + res = parse_fmtp(s, stream, data, attr, value); if (res < 0 && res != AVERROR_PATCHWELCOME) { av_free(value); return res; @@ -871,11 +891,15 @@ int ff_parse_fmtp(AVStream *stream, PayloadContext *data, const char *p, int ff_rtp_finalize_packet(AVPacket *pkt, AVIOContext **dyn_buf, int stream_idx) { + int ret; av_init_packet(pkt); pkt->size = avio_close_dyn_buf(*dyn_buf, &pkt->data); pkt->stream_index = stream_idx; - pkt->destruct = av_destruct_packet; - *dyn_buf = NULL; + *dyn_buf = NULL; + if ((ret = av_packet_from_data(pkt, pkt->data, pkt->size)) < 0) { + av_freep(&pkt->data); + return ret; + } return pkt->size; }