static int video_disable;
static int wanted_audio_stream= 0;
static int wanted_video_stream= 0;
+static int wanted_subtitle_stream= -1;
static int seek_by_bytes;
static int display_disable;
static int show_status;
q->last_pkt->next = pkt1;
q->last_pkt = pkt1;
q->nb_packets++;
- q->size += pkt1->pkt.size;
+ q->size += pkt1->pkt.size + sizeof(*pkt1);
/* XXX: should duplicate packet data in DV case */
SDL_CondSignal(q->cond);
if (!q->first_pkt)
q->last_pkt = NULL;
q->nb_packets--;
- q->size -= pkt1->pkt.size;
+ q->size -= pkt1->pkt.size + sizeof(*pkt1);
*pkt = pkt1->pkt;
av_free(pkt1);
ret = 1;
cb = dst->data[1] + (dsty >> 1) * dst->linesize[1];
cr = dst->data[2] + (dsty >> 1) * dst->linesize[2];
- width2 = (dstw + 1) >> 1;
+ width2 = ((dstw + 1) >> 1) + (dstx & ~dstw & 1);
skip2 = dstx >> 1;
wrap = dst->linesize[0];
wrap3 = rect->pict.linesize[0];
lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
+ p++;
+ lum++;
}
- p += wrap3 + (wrap3 - dstw * BPP);
- lum += wrap + (wrap - dstw - dstx);
+ p += wrap3 - dstw * BPP;
+ lum += wrap - dstw - dstx;
cb += dst->linesize[1] - width2 - skip2;
cr += dst->linesize[2] - width2 - skip2;
}
a1 = a;
lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
- YUVA_IN(y, u, v, a, p, pal);
+ YUVA_IN(y, u, v, a, p + BPP, pal);
u1 += u;
v1 += v;
a1 += a;
a1 += a;
lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
- YUVA_IN(y, u, v, a, p, pal);
+ YUVA_IN(y, u, v, a, p + BPP, pal);
u1 += u;
v1 += v;
a1 += a;
}
}
+static double compute_frame_delay(double frame_current_pts, VideoState *is)
+{
+ double actual_delay, delay, sync_threshold, ref_clock, diff;
+
+ /* compute nominal delay */
+ delay = frame_current_pts - is->frame_last_pts;
+ if (delay <= 0 || delay >= 10.0) {
+ /* if incorrect delay, use previous one */
+ delay = is->frame_last_delay;
+ } else {
+ is->frame_last_delay = delay;
+ }
+ is->frame_last_pts = frame_current_pts;
+
+ /* update delay to follow master synchronisation source */
+ if (((is->av_sync_type == AV_SYNC_AUDIO_MASTER && is->audio_st) ||
+ is->av_sync_type == AV_SYNC_EXTERNAL_CLOCK)) {
+ /* if video is slave, we try to correct big delays by
+ duplicating or deleting a frame */
+ ref_clock = get_master_clock(is);
+ diff = frame_current_pts - ref_clock;
+
+ /* skip or repeat frame. We take into account the
+ delay to compute the threshold. I still don't know
+ if it is the best guess */
+ sync_threshold = FFMAX(AV_SYNC_THRESHOLD, delay);
+ if (fabs(diff) < AV_NOSYNC_THRESHOLD) {
+ if (diff <= -sync_threshold)
+ delay = 0;
+ else if (diff >= sync_threshold)
+ delay = 2 * delay;
+ }
+ }
+
+ is->frame_timer += delay;
+ /* compute the REAL delay (we need to do that to avoid
+ long term errors */
+ actual_delay = is->frame_timer - (av_gettime() / 1000000.0);
+ if (actual_delay < 0.010) {
+ /* XXX: should skip picture */
+ actual_delay = 0.010;
+ }
+
+#if defined(DEBUG_SYNC)
+ printf("video: delay=%0.3f actual_delay=%0.3f pts=%0.3f A-V=%f\n",
+ delay, actual_delay, frame_current_pts, -diff);
+#endif
+
+ return actual_delay;
+}
+
/* called to display each frame */
static void video_refresh_timer(void *opaque)
{
VideoState *is = opaque;
VideoPicture *vp;
- double actual_delay, delay, sync_threshold, ref_clock, diff;
SubPicture *sp, *sp2;
is->video_current_pts = vp->pts;
is->video_current_pts_time = av_gettime();
- /* compute nominal delay */
- delay = vp->pts - is->frame_last_pts;
- if (delay <= 0 || delay >= 10.0) {
- /* if incorrect delay, use previous one */
- delay = is->frame_last_delay;
- }
- is->frame_last_delay = delay;
- is->frame_last_pts = vp->pts;
-
- /* update delay to follow master synchronisation source */
- if (((is->av_sync_type == AV_SYNC_AUDIO_MASTER && is->audio_st) ||
- is->av_sync_type == AV_SYNC_EXTERNAL_CLOCK)) {
- /* if video is slave, we try to correct big delays by
- duplicating or deleting a frame */
- ref_clock = get_master_clock(is);
- diff = vp->pts - ref_clock;
-
- /* skip or repeat frame. We take into account the
- delay to compute the threshold. I still don't know
- if it is the best guess */
- sync_threshold = FFMAX(AV_SYNC_THRESHOLD, delay);
- if (fabs(diff) < AV_NOSYNC_THRESHOLD) {
- if (diff <= -sync_threshold)
- delay = 0;
- else if (diff >= sync_threshold)
- delay = 2 * delay;
- }
- }
-
- is->frame_timer += delay;
- /* compute the REAL delay (we need to do that to avoid
- long term errors */
- actual_delay = is->frame_timer - (av_gettime() / 1000000.0);
- if (actual_delay < 0.010) {
- /* XXX: should skip picture */
- actual_delay = 0.010;
- }
/* launch timer for next picture */
- schedule_refresh(is, (int)(actual_delay * 1000 + 0.5));
-
-#if defined(DEBUG_SYNC)
- printf("video: delay=%0.3f actual_delay=%0.3f pts=%0.3f A-V=%f\n",
- delay, actual_delay, vp->pts, -diff);
-#endif
+ schedule_refresh(is, (int)(compute_frame_delay(vp->pts, is) * 1000 + 0.5));
if(is->subtitle_st) {
if (is->subtitle_stream_changed) {
{
VideoState *is = arg;
AVFormatContext *ic;
- int err, i, ret, video_index, audio_index;
+ int err, i, ret, video_index, audio_index, subtitle_index;
AVPacket pkt1, *pkt = &pkt1;
AVFormatParameters params, *ap = ¶ms;
video_index = -1;
audio_index = -1;
+ subtitle_index = -1;
is->video_stream = -1;
is->audio_stream = -1;
is->subtitle_stream = -1;
if ((video_index < 0 || wanted_video_stream-- > 0) && !video_disable)
video_index = i;
break;
+ case CODEC_TYPE_SUBTITLE:
+ if (wanted_subtitle_stream >= 0 && !video_disable &&
+ (subtitle_index < 0 || wanted_subtitle_stream-- > 0))
+ subtitle_index = i;
+ break;
default:
break;
}
is->show_audio = 1;
}
+ if (subtitle_index >= 0) {
+ stream_component_open(is, subtitle_index);
+ }
+
if (is->video_stream < 0 && is->audio_stream < 0) {
fprintf(stderr, "%s: could not open codecs\n", is->filename);
ret = -1;
else
av_read_play(ic);
}
-#if defined(CONFIG_RTSP_DEMUXER) || defined(CONFIG_MMSH_PROTOCOL)
- if (is->paused &&
- (!strcmp(ic->iformat->name, "rtsp") ||
- (ic->pb && !strcmp(url_fileno(ic->pb)->prot->name, "mmsh")))) {
+#if CONFIG_RTSP_DEMUXER
+ if (is->paused && !strcmp(ic->iformat->name, "rtsp")) {
/* wait 10 ms to avoid trying to get another packet */
/* XXX: horrible */
SDL_Delay(10);
}
ret = av_read_frame(ic, pkt);
if (ret < 0) {
- if (url_ferror(ic->pb) == 0) {
+ if (ret != AVERROR_EOF && url_ferror(ic->pb) == 0) {
SDL_Delay(100); /* wait for user event */
continue;
} else
static int opt_thread_count(const char *opt, const char *arg)
{
thread_count= parse_number_or_die(opt, arg, OPT_INT64, 0, INT_MAX);
-#if !defined(HAVE_THREADS)
+#if !HAVE_THREADS
fprintf(stderr, "Warning: not compiled with thread support, using thread emulation\n");
#endif
return 0;
{ "vn", OPT_BOOL, {(void*)&video_disable}, "disable video" },
{ "ast", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&wanted_audio_stream}, "", "" },
{ "vst", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&wanted_video_stream}, "", "" },
+ { "sst", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&wanted_subtitle_stream}, "", "" },
{ "ss", HAS_ARG | OPT_FUNC2, {(void*)&opt_seek}, "seek to a given position in seconds", "pos" },
{ "bytes", OPT_BOOL, {(void*)&seek_by_bytes}, "seek by bytes" },
{ "nodisp", OPT_BOOL, {(void*)&display_disable}, "disable graphical display" },
for(i=0; i<CODEC_TYPE_NB; i++){
avctx_opts[i]= avcodec_alloc_context2(i);
}
- avformat_opts = av_alloc_format_context();
+ avformat_opts = avformat_alloc_context();
sws_opts = sws_getContext(16,16,0, 16,16,0, sws_flags, NULL,NULL,NULL);
show_banner();
}
if (!display_disable) {
-#ifdef HAVE_SDL_VIDEO_SIZE
+#if HAVE_SDL_VIDEO_SIZE
const SDL_VideoInfo *vi = SDL_GetVideoInfo();
fs_screen_width = vi->current_w;
fs_screen_height = vi->current_h;