X-Git-Url: https://git.sesse.net/?a=blobdiff_plain;f=ffplay.c;h=9ce13d71fbdec8effd5749d73175c166d4e9b6f6;hb=a0124b89e393812ff273ffc2f7e0f044c66a287c;hp=adeae505f3c47ac2385fcfe0b532ea6aaafee682;hpb=f1db984288f21a1ecdff0b5702630456c089ce36;p=ffmpeg diff --git a/ffplay.c b/ffplay.c index adeae505f3c..9ce13d71fbd 100644 --- a/ffplay.c +++ b/ffplay.c @@ -32,6 +32,7 @@ #include "libavutil/avstring.h" #include "libavutil/colorspace.h" +#include "libavutil/eval.h" #include "libavutil/mathematics.h" #include "libavutil/pixdesc.h" #include "libavutil/imgutils.h" @@ -1531,7 +1532,7 @@ static void step_to_next_frame(VideoState *is) static double compute_target_delay(double delay, VideoState *is) { - double sync_threshold, diff; + double sync_threshold, diff = 0; /* update delay to follow master synchronisation source */ if (get_master_sync_type(is) != AV_SYNC_VIDEO_MASTER) { @@ -1995,20 +1996,20 @@ static int configure_video_filters(AVFilterGraph *graph, VideoState *is, const c /* Note: this macro adds a filter before the lastly added filter, so the * processing order of the filters is in reverse */ -#define INSERT_FILT(name, arg) do { \ - AVFilterContext *filt_ctx; \ - \ - ret = avfilter_graph_create_filter(&filt_ctx, \ - avfilter_get_by_name(name), \ - "ffplay_" name, arg, NULL, graph); \ - if (ret < 0) \ - goto fail; \ - \ - ret = avfilter_link(filt_ctx, 0, last_filter, 0); \ - if (ret < 0) \ - goto fail; \ - \ - last_filter = filt_ctx; \ +#define INSERT_FILT(name, arg) do { \ + AVFilterContext *filt_ctx; \ + \ + ret = avfilter_graph_create_filter(&filt_ctx, \ + avfilter_get_by_name(name), \ + "ffplay_" name, arg, NULL, graph); \ + if (ret < 0) \ + goto fail; \ + \ + ret = avfilter_link(filt_ctx, 0, last_filter, 0); \ + if (ret < 0) \ + goto fail; \ + \ + last_filter = filt_ctx; \ } while (0) /* SDL YUV code is not handling odd width/height for some driver @@ -2016,20 +2017,19 @@ static int configure_video_filters(AVFilterGraph *graph, VideoState *is, const c INSERT_FILT("crop", "floor(in_w/2)*2:floor(in_h/2)*2"); if (autorotate) { - AVDictionaryEntry *rotate_tag = av_dict_get(is->video_st->metadata, "rotate", NULL, 0); - if (rotate_tag && *rotate_tag->value && strcmp(rotate_tag->value, "0")) { - if (!strcmp(rotate_tag->value, "90")) { - INSERT_FILT("transpose", "clock"); - } else if (!strcmp(rotate_tag->value, "180")) { - INSERT_FILT("hflip", NULL); - INSERT_FILT("vflip", NULL); - } else if (!strcmp(rotate_tag->value, "270")) { - INSERT_FILT("transpose", "cclock"); - } else { - char rotate_buf[64]; - snprintf(rotate_buf, sizeof(rotate_buf), "%s*PI/180", rotate_tag->value); - INSERT_FILT("rotate", rotate_buf); - } + double theta = get_rotation(is->video_st); + + if (fabs(theta - 90) < 1.0) { + INSERT_FILT("transpose", "clock"); + } else if (fabs(theta - 180) < 1.0) { + INSERT_FILT("hflip", NULL); + INSERT_FILT("vflip", NULL); + } else if (fabs(theta - 270) < 1.0) { + INSERT_FILT("transpose", "cclock"); + } else if (fabs(theta) > 1.0) { + char rotate_buf[64]; + snprintf(rotate_buf, sizeof(rotate_buf), "%f*PI/180", theta); + INSERT_FILT("rotate", rotate_buf); } } @@ -2231,13 +2231,19 @@ static int video_thread(void *arg) enum AVPixelFormat last_format = -2; int last_serial = -1; int last_vfilter_idx = 0; - if (!graph) + if (!graph) { + av_frame_free(&frame); return AVERROR(ENOMEM); + } #endif - if (!frame) + if (!frame) { +#if CONFIG_AVFILTER + avfilter_graph_free(&graph); +#endif return AVERROR(ENOMEM); + } for (;;) { ret = get_video_frame(is, frame);