]> git.sesse.net Git - ffmpeg/blobdiff - ffserver.c
Remove wrappers of pcm_read_seek().
[ffmpeg] / ffserver.c
index 372e6abd84bfde4cee46e4032a477143efab5249..2d4664652394f61ca30d05a1741806253379305a 100644 (file)
 #include <dlfcn.h>
 #endif
 
-#include "version.h"
-#include "ffserver.h"
 #include "cmdutils.h"
 
 #undef exit
 
 const char program_name[] = "FFserver";
-static const int program_birth_year = 2000;
+const int program_birth_year = 2000;
 
 static const OptionDef options[];
 
@@ -2033,8 +2031,10 @@ static int http_prepare_data(HTTPContext *c)
         c->fmt_ctx.pb->is_streamed = 1;
 
         av_set_parameters(&c->fmt_ctx, NULL);
-        if (av_write_header(&c->fmt_ctx) < 0)
+        if (av_write_header(&c->fmt_ctx) < 0) {
+            http_log("Error writing output header\n");
             return -1;
+        }
 
         len = url_close_dyn_buf(c->fmt_ctx.pb, &c->pb_buffer);
         c->buffer_ptr = c->pb_buffer;
@@ -2045,153 +2045,149 @@ static int http_prepare_data(HTTPContext *c)
         break;
     case HTTPSTATE_SEND_DATA:
         /* find a new packet */
-        {
+        /* read a packet from the input stream */
+        if (c->stream->feed)
+            ffm_set_write_index(c->fmt_in,
+                                c->stream->feed->feed_write_index,
+                                c->stream->feed->feed_size);
+
+        if (c->stream->max_time &&
+            c->stream->max_time + c->start_time - cur_time < 0)
+            /* We have timed out */
+            c->state = HTTPSTATE_SEND_DATA_TRAILER;
+        else {
             AVPacket pkt;
-
-            /* read a packet from the input stream */
-            if (c->stream->feed)
-                ffm_set_write_index(c->fmt_in,
-                                    c->stream->feed->feed_write_index,
-                                    c->stream->feed->feed_size);
-
-            if (c->stream->max_time &&
-                c->stream->max_time + c->start_time - cur_time < 0)
-                /* We have timed out */
-                c->state = HTTPSTATE_SEND_DATA_TRAILER;
-            else {
-            redo:
-                if (av_read_frame(c->fmt_in, &pkt) < 0) {
-                    if (c->stream->feed && c->stream->feed->feed_opened) {
-                        /* if coming from feed, it means we reached the end of the
-                           ffm file, so must wait for more data */
-                        c->state = HTTPSTATE_WAIT_FEED;
-                        return 1; /* state changed */
-                    } else {
-                        if (c->stream->loop) {
-                            av_close_input_file(c->fmt_in);
-                            c->fmt_in = NULL;
-                            if (open_input_stream(c, "") < 0)
-                                goto no_loop;
-                            goto redo;
-                        } else {
-                        no_loop:
-                            /* must send trailer now because eof or error */
-                            c->state = HTTPSTATE_SEND_DATA_TRAILER;
-                        }
-                    }
+        redo:
+            if (av_read_frame(c->fmt_in, &pkt) < 0) {
+                if (c->stream->feed && c->stream->feed->feed_opened) {
+                    /* if coming from feed, it means we reached the end of the
+                       ffm file, so must wait for more data */
+                    c->state = HTTPSTATE_WAIT_FEED;
+                    return 1; /* state changed */
                 } else {
-                    /* update first pts if needed */
-                    if (c->first_pts == AV_NOPTS_VALUE) {
-                        c->first_pts = av_rescale_q(pkt.dts, c->fmt_in->streams[pkt.stream_index]->time_base, AV_TIME_BASE_Q);
-                        c->start_time = cur_time;
+                    if (c->stream->loop) {
+                        av_close_input_file(c->fmt_in);
+                        c->fmt_in = NULL;
+                        if (open_input_stream(c, "") < 0)
+                            goto no_loop;
+                        goto redo;
+                    } else {
+                    no_loop:
+                        /* must send trailer now because eof or error */
+                        c->state = HTTPSTATE_SEND_DATA_TRAILER;
                     }
-                    /* send it to the appropriate stream */
-                    if (c->stream->feed) {
-                        /* if coming from a feed, select the right stream */
-                        if (c->switch_pending) {
-                            c->switch_pending = 0;
-                            for(i=0;i<c->stream->nb_streams;i++) {
-                                if (c->switch_feed_streams[i] == pkt.stream_index)
-                                    if (pkt.flags & PKT_FLAG_KEY)
-                                        do_switch_stream(c, i);
-                                if (c->switch_feed_streams[i] >= 0)
-                                    c->switch_pending = 1;
-                            }
-                        }
+                }
+            } else {
+                int source_index = pkt.stream_index;
+                /* update first pts if needed */
+                if (c->first_pts == AV_NOPTS_VALUE) {
+                    c->first_pts = av_rescale_q(pkt.dts, c->fmt_in->streams[pkt.stream_index]->time_base, AV_TIME_BASE_Q);
+                    c->start_time = cur_time;
+                }
+                /* send it to the appropriate stream */
+                if (c->stream->feed) {
+                    /* if coming from a feed, select the right stream */
+                    if (c->switch_pending) {
+                        c->switch_pending = 0;
                         for(i=0;i<c->stream->nb_streams;i++) {
-                            if (c->feed_streams[i] == pkt.stream_index) {
-                                pkt.stream_index = i;
+                            if (c->switch_feed_streams[i] == pkt.stream_index)
                                 if (pkt.flags & PKT_FLAG_KEY)
-                                    c->got_key_frame |= 1 << i;
-                                /* See if we have all the key frames, then
-                                 * we start to send. This logic is not quite
-                                 * right, but it works for the case of a
-                                 * single video stream with one or more
-                                 * audio streams (for which every frame is
-                                 * typically a key frame).
-                                 */
-                                if (!c->stream->send_on_key ||
-                                    ((c->got_key_frame + 1) >> c->stream->nb_streams))
-                                    goto send_it;
-                            }
+                                    do_switch_stream(c, i);
+                            if (c->switch_feed_streams[i] >= 0)
+                                c->switch_pending = 1;
                         }
-                    } else {
-                        AVCodecContext *codec;
-
-                    send_it:
-                        /* specific handling for RTP: we use several
-                           output stream (one for each RTP
-                           connection). XXX: need more abstract handling */
-                        if (c->is_packetized) {
-                            AVStream *st;
-                            /* compute send time and duration */
-                            st = c->fmt_in->streams[pkt.stream_index];
-                            c->cur_pts = av_rescale_q(pkt.dts, st->time_base, AV_TIME_BASE_Q);
-                            if (st->start_time != AV_NOPTS_VALUE)
-                                c->cur_pts -= av_rescale_q(st->start_time, st->time_base, AV_TIME_BASE_Q);
-                            c->cur_frame_duration = av_rescale_q(pkt.duration, st->time_base, AV_TIME_BASE_Q);
+                    }
+                    for(i=0;i<c->stream->nb_streams;i++) {
+                        if (c->feed_streams[i] == pkt.stream_index) {
+                            AVStream *st = c->fmt_in->streams[source_index];
+                            pkt.stream_index = i;
+                            if (pkt.flags & PKT_FLAG_KEY &&
+                                (st->codec->codec_type == CODEC_TYPE_VIDEO ||
+                                 c->stream->nb_streams == 1))
+                                c->got_key_frame = 1;
+                            if (!c->stream->send_on_key || c->got_key_frame)
+                                goto send_it;
+                        }
+                    }
+                } else {
+                    AVCodecContext *codec;
+
+                send_it:
+                    /* specific handling for RTP: we use several
+                       output stream (one for each RTP
+                       connection). XXX: need more abstract handling */
+                    if (c->is_packetized) {
+                        AVStream *st;
+                        /* compute send time and duration */
+                        st = c->fmt_in->streams[pkt.stream_index];
+                        c->cur_pts = av_rescale_q(pkt.dts, st->time_base, AV_TIME_BASE_Q);
+                        if (st->start_time != AV_NOPTS_VALUE)
+                            c->cur_pts -= av_rescale_q(st->start_time, st->time_base, AV_TIME_BASE_Q);
+                        c->cur_frame_duration = av_rescale_q(pkt.duration, st->time_base, AV_TIME_BASE_Q);
 #if 0
-                            printf("index=%d pts=%0.3f duration=%0.6f\n",
-                                   pkt.stream_index,
-                                   (double)c->cur_pts /
-                                   AV_TIME_BASE,
-                                   (double)c->cur_frame_duration /
-                                   AV_TIME_BASE);
+                        printf("index=%d pts=%0.3f duration=%0.6f\n",
+                               pkt.stream_index,
+                               (double)c->cur_pts /
+                               AV_TIME_BASE,
+                               (double)c->cur_frame_duration /
+                               AV_TIME_BASE);
 #endif
-                            /* find RTP context */
-                            c->packet_stream_index = pkt.stream_index;
-                            ctx = c->rtp_ctx[c->packet_stream_index];
-                            if(!ctx) {
-                              av_free_packet(&pkt);
-                              break;
-                            }
-                            codec = ctx->streams[0]->codec;
-                            /* only one stream per RTP connection */
-                            pkt.stream_index = 0;
-                        } else {
-                            ctx = &c->fmt_ctx;
-                            /* Fudge here */
-                            codec = ctx->streams[pkt.stream_index]->codec;
-                        }
-
-                        if (c->is_packetized) {
-                            int max_packet_size;
-                            if (c->rtp_protocol == RTSP_PROTOCOL_RTP_TCP)
-                                max_packet_size = RTSP_TCP_MAX_PACKET_SIZE;
-                            else
-                                max_packet_size = url_get_max_packet_size(c->rtp_handles[c->packet_stream_index]);
-                            ret = url_open_dyn_packet_buf(&ctx->pb, max_packet_size);
-                        } else {
-                            ret = url_open_dyn_buf(&ctx->pb);
-                        }
-                        if (ret < 0) {
-                            /* XXX: potential leak */
-                            return -1;
-                        }
-                        if (pkt.dts != AV_NOPTS_VALUE)
-                            pkt.dts = av_rescale_q(pkt.dts,
-                                c->fmt_in->streams[pkt.stream_index]->time_base,
-                                ctx->streams[pkt.stream_index]->time_base);
-                        if (pkt.pts != AV_NOPTS_VALUE)
-                            pkt.pts = av_rescale_q(pkt.pts,
-                                c->fmt_in->streams[pkt.stream_index]->time_base,
-                                ctx->streams[pkt.stream_index]->time_base);
-                        if (av_write_frame(ctx, &pkt))
-                            c->state = HTTPSTATE_SEND_DATA_TRAILER;
-
-                        len = url_close_dyn_buf(ctx->pb, &c->pb_buffer);
-                        c->cur_frame_bytes = len;
-                        c->buffer_ptr = c->pb_buffer;
-                        c->buffer_end = c->pb_buffer + len;
-
-                        codec->frame_number++;
-                        if (len == 0) {
+                        /* find RTP context */
+                        c->packet_stream_index = pkt.stream_index;
+                        ctx = c->rtp_ctx[c->packet_stream_index];
+                        if(!ctx) {
                             av_free_packet(&pkt);
-                            goto redo;
+                            break;
                         }
+                        codec = ctx->streams[0]->codec;
+                        /* only one stream per RTP connection */
+                        pkt.stream_index = 0;
+                    } else {
+                        ctx = &c->fmt_ctx;
+                        /* Fudge here */
+                        codec = ctx->streams[pkt.stream_index]->codec;
+                    }
+
+                    if (c->is_packetized) {
+                        int max_packet_size;
+                        if (c->rtp_protocol == RTSP_PROTOCOL_RTP_TCP)
+                            max_packet_size = RTSP_TCP_MAX_PACKET_SIZE;
+                        else
+                            max_packet_size = url_get_max_packet_size(c->rtp_handles[c->packet_stream_index]);
+                        ret = url_open_dyn_packet_buf(&ctx->pb, max_packet_size);
+                    } else {
+                        ret = url_open_dyn_buf(&ctx->pb);
+                    }
+                    if (ret < 0) {
+                        /* XXX: potential leak */
+                        return -1;
+                    }
+                    c->fmt_ctx.pb->is_streamed = 1;
+                    if (pkt.dts != AV_NOPTS_VALUE)
+                        pkt.dts = av_rescale_q(pkt.dts,
+                                               c->fmt_in->streams[source_index]->time_base,
+                                               ctx->streams[pkt.stream_index]->time_base);
+                    if (pkt.pts != AV_NOPTS_VALUE)
+                        pkt.pts = av_rescale_q(pkt.pts,
+                                               c->fmt_in->streams[source_index]->time_base,
+                                               ctx->streams[pkt.stream_index]->time_base);
+                    if (av_write_frame(ctx, &pkt) < 0) {
+                        http_log("Error writing frame to output\n");
+                        c->state = HTTPSTATE_SEND_DATA_TRAILER;
+                    }
+
+                    len = url_close_dyn_buf(ctx->pb, &c->pb_buffer);
+                    c->cur_frame_bytes = len;
+                    c->buffer_ptr = c->pb_buffer;
+                    c->buffer_end = c->pb_buffer + len;
+
+                    codec->frame_number++;
+                    if (len == 0) {
+                        av_free_packet(&pkt);
+                        goto redo;
                     }
-                    av_free_packet(&pkt);
                 }
+                av_free_packet(&pkt);
             }
         }
         break;
@@ -2206,6 +2202,7 @@ static int http_prepare_data(HTTPContext *c)
             /* XXX: potential leak */
             return -1;
         }
+        c->fmt_ctx.pb->is_streamed = 1;
         av_write_trailer(ctx);
         len = url_close_dyn_buf(ctx->pb, &c->pb_buffer);
         c->buffer_ptr = c->pb_buffer;
@@ -2352,8 +2349,10 @@ static int http_start_receive_data(HTTPContext *c)
 
     /* open feed */
     fd = open(c->stream->feed_filename, O_RDWR);
-    if (fd < 0)
+    if (fd < 0) {
+        http_log("Error opening feeder file: %s\n", strerror(errno));
         return -1;
+    }
     c->feed_fd = fd;
 
     c->stream->feed_write_index = ffm_read_write_index(fd);
@@ -2407,7 +2406,10 @@ static int http_receive_data(HTTPContext *c)
             //            printf("writing pos=0x%"PRIx64" size=0x%"PRIx64"\n", feed->feed_write_index, feed->feed_size);
             /* XXX: use llseek or url_seek */
             lseek(c->feed_fd, feed->feed_write_index, SEEK_SET);
-            write(c->feed_fd, c->buffer, FFM_PACKET_SIZE);
+            if (write(c->feed_fd, c->buffer, FFM_PACKET_SIZE) < 0) {
+                http_log("Error writing to feed file: %s\n", strerror(errno));
+                goto fail;
+            }
 
             feed->feed_write_index += FFM_PACKET_SIZE;
             /* update file size */
@@ -2472,6 +2474,12 @@ static int http_receive_data(HTTPContext *c)
  fail:
     c->stream->feed_opened = 0;
     close(c->feed_fd);
+    /* wake up any waiting connections to stop waiting for feed */
+    for(c1 = first_http_ctx; c1 != NULL; c1 = c1->next) {
+        if (c1->state == HTTPSTATE_WAIT_FEED &&
+            c1->stream->feed == c->stream->feed)
+            c1->state = HTTPSTATE_SEND_DATA_TRAILER;
+    }
     return -1;
 }
 
@@ -3322,7 +3330,7 @@ static void build_file_streams(void)
                 /* find all the AVStreams inside and reference them in
                    'stream' */
                 if (av_find_stream_info(infile) < 0) {
-                    http_log("Could not find codec parameters from '%s'",
+                    http_log("Could not find codec parameters from '%s'\n",
                              stream->feed_filename);
                     av_close_input_file(infile);
                     goto fail;
@@ -4056,8 +4064,14 @@ static int parse_ffconfig(const char *filename)
         } else if (!strcasecmp(cmd, "VideoFrameRate")) {
             get_arg(arg, sizeof(arg), &p);
             if (stream) {
-                video_enc.time_base.num= DEFAULT_FRAME_RATE_BASE;
-                video_enc.time_base.den = (int)(strtod(arg, NULL) * video_enc.time_base.num);
+                AVRational frame_rate;
+                if (av_parse_video_frame_rate(&frame_rate, arg) < 0) {
+                    fprintf(stderr, "Incorrect frame rate\n");
+                    errors++;
+                } else {
+                    video_enc.time_base.num = frame_rate.den;
+                    video_enc.time_base.den = frame_rate.num;
+                }
             }
         } else if (!strcasecmp(cmd, "VideoGopSize")) {
             get_arg(arg, sizeof(arg), &p);
@@ -4233,20 +4247,21 @@ static int parse_ffconfig(const char *filename)
                 fprintf(stderr, "%s:%d: No corresponding <Stream> for </Stream>\n",
                         filename, line_num);
                 errors++;
-            }
-            if (stream->feed && stream->fmt && strcmp(stream->fmt->name, "ffm") != 0) {
-                if (audio_id != CODEC_ID_NONE) {
-                    audio_enc.codec_type = CODEC_TYPE_AUDIO;
-                    audio_enc.codec_id = audio_id;
-                    add_codec(stream, &audio_enc);
-                }
-                if (video_id != CODEC_ID_NONE) {
-                    video_enc.codec_type = CODEC_TYPE_VIDEO;
-                    video_enc.codec_id = video_id;
-                    add_codec(stream, &video_enc);
+            } else {
+                if (stream->feed && stream->fmt && strcmp(stream->fmt->name, "ffm") != 0) {
+                    if (audio_id != CODEC_ID_NONE) {
+                        audio_enc.codec_type = CODEC_TYPE_AUDIO;
+                        audio_enc.codec_id = audio_id;
+                        add_codec(stream, &audio_enc);
+                    }
+                    if (video_id != CODEC_ID_NONE) {
+                        video_enc.codec_type = CODEC_TYPE_VIDEO;
+                        video_enc.codec_id = video_id;
+                        add_codec(stream, &video_enc);
+                    }
                 }
+                stream = NULL;
             }
-            stream = NULL;
         } else if (!strcasecmp(cmd, "<Redirect")) {
             /*********************************************/
             char *q;
@@ -4273,13 +4288,14 @@ static int parse_ffconfig(const char *filename)
                 fprintf(stderr, "%s:%d: No corresponding <Redirect> for </Redirect>\n",
                         filename, line_num);
                 errors++;
+            } else {
+                if (!redirect->feed_filename[0]) {
+                    fprintf(stderr, "%s:%d: No URL found for <Redirect>\n",
+                            filename, line_num);
+                    errors++;
+                }
+                redirect = NULL;
             }
-            if (!redirect->feed_filename[0]) {
-                fprintf(stderr, "%s:%d: No URL found for <Redirect>\n",
-                        filename, line_num);
-                errors++;
-            }
-            redirect = NULL;
         } else if (!strcasecmp(cmd, "LoadModule")) {
             get_arg(arg, sizeof(arg), &p);
 #ifdef HAVE_DLOPEN
@@ -4359,7 +4375,7 @@ int main(int argc, char **argv)
 
     av_register_all();
 
-    show_banner(program_name, program_birth_year);
+    show_banner();
 
     config_filename = "/etc/ffserver.conf";