aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorMichael Niedermayer <michaelni@gmx.at>2011-10-25 01:03:10 +0200
committerMichael Niedermayer <michaelni@gmx.at>2011-10-25 01:03:20 +0200
commit61b1805f73f8ab2214ad2ae6ba6bb8c7d84aca91 (patch)
tree7292a6db287dccc21c0b2808d5ee45b21cccaf0a
parentf0a7b67a356def037d26f9460b22c86d82be2b89 (diff)
parentd54af906a4e6fcc5e5cac456f5d5912bdb292a67 (diff)
downloadffmpeg-61b1805f73f8ab2214ad2ae6ba6bb8c7d84aca91.tar.gz
Merge remote-tracking branch 'cus/stable'
* cus/stable: ffplay: add frame drop statistics ffplay: consider estimated time of filter in early frame drop ffplay: reimplement early frame drop Merged-by: Michael Niedermayer <michaelni@gmx.at>
-rw-r--r--ffplay.c70
1 files changed, 60 insertions, 10 deletions
diff --git a/ffplay.c b/ffplay.c
index c998b5d446..5a32d4e5dc 100644
--- a/ffplay.c
+++ b/ffplay.c
@@ -172,6 +172,8 @@ typedef struct VideoState {
struct SwrContext *swr_ctx;
double audio_current_pts;
double audio_current_pts_drift;
+ int frame_drops_early;
+ int frame_drops_late;
enum ShowMode {
SHOW_MODE_NONE = -1, SHOW_MODE_VIDEO = 0, SHOW_MODE_WAVES, SHOW_MODE_RDFT, SHOW_MODE_NB
@@ -197,6 +199,10 @@ typedef struct VideoState {
double frame_timer;
double frame_last_pts;
double frame_last_duration;
+ double frame_last_dropped_pts;
+ double frame_last_returned_time;
+ double frame_last_filter_delay;
+ int64_t frame_last_dropped_pos;
double video_clock; ///<pts of last decoded frame / predicted pts of next decoded frame
int video_stream;
AVStream *video_st;
@@ -1102,20 +1108,35 @@ static void pictq_next_picture(VideoState *is) {
SDL_UnlockMutex(is->pictq_mutex);
}
+static void update_video_pts(VideoState *is, double pts, int64_t pos) {
+ double time = av_gettime() / 1000000.0;
+ /* update current video pts */
+ is->video_current_pts = pts;
+ is->video_current_pts_drift = is->video_current_pts - time;
+ is->video_current_pos = pos;
+ is->frame_last_pts = pts;
+}
+
/* called to display each frame */
static void video_refresh(void *opaque)
{
VideoState *is = opaque;
VideoPicture *vp;
+ double time;
SubPicture *sp, *sp2;
if (is->video_st) {
retry:
if (is->pictq_size == 0) {
+ SDL_LockMutex(is->pictq_mutex);
+ if (is->frame_last_dropped_pts != AV_NOPTS_VALUE && is->frame_last_dropped_pts > is->frame_last_pts) {
+ update_video_pts(is, is->frame_last_dropped_pts, is->frame_last_dropped_pos);
+ is->frame_last_dropped_pts = AV_NOPTS_VALUE;
+ }
+ SDL_UnlockMutex(is->pictq_mutex);
//nothing to do, no picture to display in the que
} else {
- double time= av_gettime()/1000000.0;
double last_duration, duration, delay;
/* dequeue the picture */
vp = &is->pictq[is->pictq_rindex];
@@ -1133,17 +1154,16 @@ retry:
}
delay = compute_target_delay(is->frame_last_duration, is);
+ time= av_gettime()/1000000.0;
if(time < is->frame_timer + delay)
return;
- is->frame_last_pts = vp->pts;
if (delay > 0)
is->frame_timer += delay * FFMAX(1, floor((time-is->frame_timer) / delay));
- /* update current video pts */
- is->video_current_pts = vp->pts;
- is->video_current_pts_drift = is->video_current_pts - time;
- is->video_current_pos = vp->pos;
+ SDL_LockMutex(is->pictq_mutex);
+ update_video_pts(is, vp->pts, vp->pos);
+ SDL_UnlockMutex(is->pictq_mutex);
if(is->pictq_size > 1) {
VideoPicture *nextvp= &is->pictq[(is->pictq_rindex+1)%VIDEO_PICTURE_QUEUE_SIZE];
@@ -1154,6 +1174,7 @@ retry:
if((framedrop>0 || (framedrop && is->audio_st)) && time > is->frame_timer + duration){
if(is->pictq_size > 1){
+ is->frame_drops_late++;
pictq_next_picture(is);
goto retry;
}
@@ -1239,9 +1260,10 @@ retry:
av_diff = 0;
if (is->audio_st && is->video_st)
av_diff = get_audio_clock(is) - get_video_clock(is);
- printf("%7.2f A-V:%7.3f aq=%5dKB vq=%5dKB sq=%5dB f=%"PRId64"/%"PRId64" \r",
+ printf("%7.2f A-V:%7.3f fd=%4d aq=%5dKB vq=%5dKB sq=%5dB f=%"PRId64"/%"PRId64" \r",
get_master_clock(is),
av_diff,
+ is->frame_drops_early + is->frame_drops_late,
aqsize / 1024,
vqsize / 1024,
sqsize,
@@ -1448,17 +1470,20 @@ static int get_video_frame(VideoState *is, AVFrame *frame, int64_t *pts, AVPacke
SDL_CondWait(is->pictq_cond, is->pictq_mutex);
}
is->video_current_pos = -1;
- SDL_UnlockMutex(is->pictq_mutex);
-
is->frame_last_pts = AV_NOPTS_VALUE;
is->frame_last_duration = 0;
is->frame_timer = (double)av_gettime() / 1000000.0;
+ is->frame_last_dropped_pts = AV_NOPTS_VALUE;
+ SDL_UnlockMutex(is->pictq_mutex);
+
return 0;
}
avcodec_decode_video2(is->video_st->codec, frame, &got_picture, pkt);
if (got_picture) {
+ int ret = 1;
+
if (decoder_reorder_pts == -1) {
*pts = frame->best_effort_timestamp;
} else if (decoder_reorder_pts) {
@@ -1471,8 +1496,29 @@ static int get_video_frame(VideoState *is, AVFrame *frame, int64_t *pts, AVPacke
*pts = 0;
}
- return 1;
+ if (((is->av_sync_type == AV_SYNC_AUDIO_MASTER && is->audio_st) || is->av_sync_type == AV_SYNC_EXTERNAL_CLOCK) &&
+ (framedrop>0 || (framedrop && is->audio_st))) {
+ SDL_LockMutex(is->pictq_mutex);
+ if (is->frame_last_pts != AV_NOPTS_VALUE && *pts) {
+ double clockdiff = get_video_clock(is) - get_master_clock(is);
+ double dpts = av_q2d(is->video_st->time_base) * *pts;
+ double ptsdiff = dpts - is->frame_last_pts;
+ if (fabs(clockdiff) < AV_NOSYNC_THRESHOLD &&
+ ptsdiff > 0 && ptsdiff < AV_NOSYNC_THRESHOLD &&
+ clockdiff + ptsdiff - is->frame_last_filter_delay < 0) {
+ is->frame_last_dropped_pos = pkt->pos;
+ is->frame_last_dropped_pts = dpts;
+ is->frame_drops_early++;
+ ret = 0;
+ }
+ }
+ SDL_UnlockMutex(is->pictq_mutex);
+ }
+ if (ret)
+ is->frame_last_returned_time = av_gettime() / 1000000.0;
+
+ return ret;
}
return 0;
}
@@ -1796,6 +1842,10 @@ static int video_thread(void *arg)
if (ret < 0) goto the_end;
+ is->frame_last_filter_delay = av_gettime() / 1000000.0 - is->frame_last_returned_time;
+ if (fabs(is->frame_last_filter_delay) > AV_NOSYNC_THRESHOLD / 10.0)
+ is->frame_last_filter_delay = 0;
+
#if CONFIG_AVFILTER
if (!picref)
continue;