yading@10: /* yading@10: * Copyright (c) 2003 Fabrice Bellard yading@10: * yading@10: * This file is part of FFmpeg. yading@10: * yading@10: * FFmpeg is free software; you can redistribute it and/or yading@10: * modify it under the terms of the GNU Lesser General Public yading@10: * License as published by the Free Software Foundation; either yading@10: * version 2.1 of the License, or (at your option) any later version. yading@10: * yading@10: * FFmpeg is distributed in the hope that it will be useful, yading@10: * but WITHOUT ANY WARRANTY; without even the implied warranty of yading@10: * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU yading@10: * Lesser General Public License for more details. yading@10: * yading@10: * You should have received a copy of the GNU Lesser General Public yading@10: * License along with FFmpeg; if not, write to the Free Software yading@10: * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA yading@10: */ yading@10: yading@10: /** yading@10: * @file yading@10: * simple media player based on the FFmpeg libraries yading@10: */ yading@10: yading@10: #include "config.h" yading@10: #include yading@10: #include yading@10: #include yading@10: #include yading@10: #include "libavutil/avstring.h" yading@10: #include "libavutil/colorspace.h" yading@10: #include "libavutil/mathematics.h" yading@10: #include "libavutil/pixdesc.h" yading@10: #include "libavutil/imgutils.h" yading@10: #include "libavutil/dict.h" yading@10: #include "libavutil/parseutils.h" yading@10: #include "libavutil/samplefmt.h" yading@10: #include "libavutil/avassert.h" yading@10: #include "libavutil/time.h" yading@10: #include "libavformat/avformat.h" yading@10: #include "libavdevice/avdevice.h" yading@10: #include "libswscale/swscale.h" yading@10: #include "libavutil/opt.h" yading@10: #include "libavcodec/avfft.h" yading@10: #include "libswresample/swresample.h" yading@10: yading@10: #if CONFIG_AVFILTER yading@10: # include "libavfilter/avcodec.h" yading@10: # include "libavfilter/avfilter.h" yading@10: # include "libavfilter/buffersink.h" yading@10: # include "libavfilter/buffersrc.h" yading@10: #endif yading@10: yading@10: #include yading@10: #include yading@10: yading@10: #include "cmdutils.h" yading@10: yading@10: #include yading@10: yading@10: const char program_name[] = "ffplay"; yading@10: const int program_birth_year = 2003; yading@10: yading@10: #define MAX_QUEUE_SIZE (15 * 1024 * 1024) yading@10: #define MIN_FRAMES 5 yading@10: yading@10: /* SDL audio buffer size, in samples. Should be small to have precise yading@10: A/V sync as SDL does not have hardware buffer fullness info. */ yading@10: #define SDL_AUDIO_BUFFER_SIZE 1024 yading@10: yading@10: /* no AV sync correction is done if below the AV sync threshold */ yading@10: #define AV_SYNC_THRESHOLD 0.01 yading@10: /* no AV correction is done if too big error */ yading@10: #define AV_NOSYNC_THRESHOLD 10.0 yading@10: yading@10: /* maximum audio speed change to get correct sync */ yading@10: #define SAMPLE_CORRECTION_PERCENT_MAX 10 yading@10: yading@10: /* external clock speed adjustment constants for realtime sources based on buffer fullness */ yading@10: #define EXTERNAL_CLOCK_SPEED_MIN 0.900 yading@10: #define EXTERNAL_CLOCK_SPEED_MAX 1.010 yading@10: #define EXTERNAL_CLOCK_SPEED_STEP 0.001 yading@10: yading@10: /* we use about AUDIO_DIFF_AVG_NB A-V differences to make the average */ yading@10: #define AUDIO_DIFF_AVG_NB 20 yading@10: yading@10: /* polls for possible required screen refresh at least this often, should be less than 1/fps */ yading@10: #define REFRESH_RATE 0.01 yading@10: yading@10: /* NOTE: the size must be big enough to compensate the hardware audio buffersize size */ yading@10: /* TODO: We assume that a decoded and resampled frame fits into this buffer */ yading@10: #define SAMPLE_ARRAY_SIZE (8 * 65536) yading@10: yading@10: #define CURSOR_HIDE_DELAY 1000000 yading@10: yading@10: static int64_t sws_flags = SWS_BICUBIC; yading@10: yading@10: typedef struct MyAVPacketList { yading@10: AVPacket pkt; yading@10: struct MyAVPacketList *next; yading@10: int serial; yading@10: } MyAVPacketList; yading@10: yading@10: typedef struct PacketQueue { yading@10: MyAVPacketList *first_pkt, *last_pkt; yading@10: int nb_packets; yading@10: int size; yading@10: int abort_request; yading@10: int serial; yading@10: SDL_mutex *mutex; yading@10: SDL_cond *cond; yading@10: } PacketQueue; yading@10: yading@10: #define VIDEO_PICTURE_QUEUE_SIZE 4 yading@10: #define SUBPICTURE_QUEUE_SIZE 4 yading@10: yading@10: typedef struct VideoPicture { yading@10: double pts; // presentation timestamp for this picture yading@10: int64_t pos; // byte position in file yading@10: SDL_Overlay *bmp; yading@10: int width, height; /* source height & width */ yading@10: int allocated; yading@10: int reallocate; yading@10: int serial; yading@10: yading@10: AVRational sar; yading@10: } VideoPicture; yading@10: yading@10: typedef struct SubPicture { yading@10: double pts; /* presentation time stamp for this picture */ yading@10: AVSubtitle sub; yading@10: } SubPicture; yading@10: yading@10: typedef struct AudioParams { yading@10: int freq; yading@10: int channels; yading@10: int64_t channel_layout; yading@10: enum AVSampleFormat fmt; yading@10: } AudioParams; yading@10: yading@10: enum { yading@10: AV_SYNC_AUDIO_MASTER, /* default choice */ yading@10: AV_SYNC_VIDEO_MASTER, yading@10: AV_SYNC_EXTERNAL_CLOCK, /* synchronize to an external clock */ yading@10: }; yading@10: yading@10: typedef struct VideoState { yading@10: SDL_Thread *read_tid; yading@10: SDL_Thread *video_tid; yading@10: AVInputFormat *iformat; yading@10: int no_background; yading@10: int abort_request; yading@10: int force_refresh; yading@10: int paused; yading@10: int last_paused; yading@10: int queue_attachments_req; yading@10: int seek_req; yading@10: int seek_flags; yading@10: int64_t seek_pos; yading@10: int64_t seek_rel; yading@10: int read_pause_return; yading@10: AVFormatContext *ic; yading@10: int realtime; yading@10: yading@10: int audio_stream; yading@10: yading@10: int av_sync_type; yading@10: double external_clock; ///< external clock base yading@10: double external_clock_drift; ///< external clock base - time (av_gettime) at which we updated external_clock yading@10: int64_t external_clock_time; ///< last reference time yading@10: double external_clock_speed; ///< speed of the external clock yading@10: yading@10: double audio_clock; yading@10: int audio_clock_serial; yading@10: double audio_diff_cum; /* used for AV difference average computation */ yading@10: double audio_diff_avg_coef; yading@10: double audio_diff_threshold; yading@10: int audio_diff_avg_count; yading@10: AVStream *audio_st; yading@10: PacketQueue audioq; yading@10: int audio_hw_buf_size; yading@10: uint8_t silence_buf[SDL_AUDIO_BUFFER_SIZE]; yading@10: uint8_t *audio_buf; yading@10: uint8_t *audio_buf1; yading@10: unsigned int audio_buf_size; /* in bytes */ yading@10: unsigned int audio_buf1_size; yading@10: int audio_buf_index; /* in bytes */ yading@10: int audio_write_buf_size; yading@10: int audio_buf_frames_pending; yading@10: AVPacket audio_pkt_temp; yading@10: AVPacket audio_pkt; yading@10: int audio_pkt_temp_serial; yading@10: int audio_last_serial; yading@10: struct AudioParams audio_src; yading@10: #if CONFIG_AVFILTER yading@10: struct AudioParams audio_filter_src; yading@10: #endif yading@10: struct AudioParams audio_tgt; yading@10: struct SwrContext *swr_ctx; yading@10: double audio_current_pts; yading@10: double audio_current_pts_drift; yading@10: int frame_drops_early; yading@10: int frame_drops_late; yading@10: AVFrame *frame; yading@10: yading@10: enum ShowMode { yading@10: SHOW_MODE_NONE = -1, SHOW_MODE_VIDEO = 0, SHOW_MODE_WAVES, SHOW_MODE_RDFT, SHOW_MODE_NB yading@10: } show_mode; yading@10: int16_t sample_array[SAMPLE_ARRAY_SIZE]; yading@10: int sample_array_index; yading@10: int last_i_start; yading@10: RDFTContext *rdft; yading@10: int rdft_bits; yading@10: FFTSample *rdft_data; yading@10: int xpos; yading@10: double last_vis_time; yading@10: yading@10: SDL_Thread *subtitle_tid; yading@10: int subtitle_stream; yading@10: int subtitle_stream_changed; yading@10: AVStream *subtitle_st; yading@10: PacketQueue subtitleq; yading@10: SubPicture subpq[SUBPICTURE_QUEUE_SIZE]; yading@10: int subpq_size, subpq_rindex, subpq_windex; yading@10: SDL_mutex *subpq_mutex; yading@10: SDL_cond *subpq_cond; yading@10: yading@10: double frame_timer; yading@10: double frame_last_pts; yading@10: double frame_last_duration; yading@10: double frame_last_dropped_pts; yading@10: double frame_last_returned_time; yading@10: double frame_last_filter_delay; yading@10: int64_t frame_last_dropped_pos; yading@10: int frame_last_dropped_serial; yading@10: int video_stream; yading@10: AVStream *video_st; yading@10: PacketQueue videoq; yading@10: double video_current_pts; // current displayed pts yading@10: double video_current_pts_drift; // video_current_pts - time (av_gettime) at which we updated video_current_pts - used to have running video pts yading@10: int64_t video_current_pos; // current displayed file pos yading@10: double max_frame_duration; // maximum duration of a frame - above this, we consider the jump a timestamp discontinuity yading@10: int video_clock_serial; yading@10: VideoPicture pictq[VIDEO_PICTURE_QUEUE_SIZE]; yading@10: int pictq_size, pictq_rindex, pictq_windex; yading@10: SDL_mutex *pictq_mutex; yading@10: SDL_cond *pictq_cond; yading@10: #if !CONFIG_AVFILTER yading@10: struct SwsContext *img_convert_ctx; yading@10: #endif yading@10: SDL_Rect last_display_rect; yading@10: yading@10: char filename[1024]; yading@10: int width, height, xleft, ytop; yading@10: int step; yading@10: yading@10: #if CONFIG_AVFILTER yading@10: AVFilterContext *in_video_filter; // the first filter in the video chain yading@10: AVFilterContext *out_video_filter; // the last filter in the video chain yading@10: AVFilterContext *in_audio_filter; // the first filter in the audio chain yading@10: AVFilterContext *out_audio_filter; // the last filter in the audio chain yading@10: AVFilterGraph *agraph; // audio filter graph yading@10: #endif yading@10: yading@10: int last_video_stream, last_audio_stream, last_subtitle_stream; yading@10: yading@10: SDL_cond *continue_read_thread; yading@10: } VideoState; yading@10: yading@10: /* options specified by the user */ yading@10: static AVInputFormat *file_iformat; yading@10: static const char *input_filename; yading@10: static const char *window_title; yading@10: static int fs_screen_width; yading@10: static int fs_screen_height; yading@10: static int default_width = 640; yading@10: static int default_height = 480; yading@10: static int screen_width = 0; yading@10: static int screen_height = 0; yading@10: static int audio_disable; yading@10: static int video_disable; yading@10: static int subtitle_disable; yading@10: static int wanted_stream[AVMEDIA_TYPE_NB] = { yading@10: [AVMEDIA_TYPE_AUDIO] = -1, yading@10: [AVMEDIA_TYPE_VIDEO] = -1, yading@10: [AVMEDIA_TYPE_SUBTITLE] = -1, yading@10: }; yading@10: static int seek_by_bytes = -1; yading@10: static int display_disable; yading@10: static int show_status = 1; yading@10: static int av_sync_type = AV_SYNC_AUDIO_MASTER; yading@10: static int64_t start_time = AV_NOPTS_VALUE; yading@10: static int64_t duration = AV_NOPTS_VALUE; yading@10: static int workaround_bugs = 1; yading@10: static int fast = 0; yading@10: static int genpts = 0; yading@10: static int lowres = 0; yading@10: static int idct = FF_IDCT_AUTO; yading@10: static int error_concealment = 3; yading@10: static int decoder_reorder_pts = -1; yading@10: static int autoexit; yading@10: static int exit_on_keydown; yading@10: static int exit_on_mousedown; yading@10: static int loop = 1; yading@10: static int framedrop = -1; yading@10: static int infinite_buffer = -1; yading@10: static enum ShowMode show_mode = SHOW_MODE_NONE; yading@10: static const char *audio_codec_name; yading@10: static const char *subtitle_codec_name; yading@10: static const char *video_codec_name; yading@10: double rdftspeed = 0.02; yading@10: static int64_t cursor_last_shown; yading@10: static int cursor_hidden = 0; yading@10: #if CONFIG_AVFILTER yading@10: static char *vfilters = NULL; yading@10: static char *afilters = NULL; yading@10: #endif yading@10: yading@10: /* current context */ yading@10: static int is_full_screen; yading@10: static int64_t audio_callback_time; yading@10: yading@10: static AVPacket flush_pkt; yading@10: yading@10: #define FF_ALLOC_EVENT (SDL_USEREVENT) yading@10: #define FF_QUIT_EVENT (SDL_USEREVENT + 2) yading@10: yading@10: static SDL_Surface *screen; yading@10: yading@10: static inline yading@10: int cmp_audio_fmts(enum AVSampleFormat fmt1, int64_t channel_count1, yading@10: enum AVSampleFormat fmt2, int64_t channel_count2) yading@10: { yading@10: /* If channel count == 1, planar and non-planar formats are the same */ yading@10: if (channel_count1 == 1 && channel_count2 == 1) yading@10: return av_get_packed_sample_fmt(fmt1) != av_get_packed_sample_fmt(fmt2); yading@10: else yading@10: return channel_count1 != channel_count2 || fmt1 != fmt2; yading@10: } yading@10: yading@10: static inline yading@10: int64_t get_valid_channel_layout(int64_t channel_layout, int channels) yading@10: { yading@10: if (channel_layout && av_get_channel_layout_nb_channels(channel_layout) == channels) yading@10: return channel_layout; yading@10: else yading@10: return 0; yading@10: } yading@10: yading@10: static int packet_queue_put(PacketQueue *q, AVPacket *pkt); yading@10: yading@10: static int packet_queue_put_private(PacketQueue *q, AVPacket *pkt) yading@10: { yading@10: MyAVPacketList *pkt1; yading@10: yading@10: if (q->abort_request) yading@10: return -1; yading@10: yading@10: pkt1 = av_malloc(sizeof(MyAVPacketList)); yading@10: if (!pkt1) yading@10: return -1; yading@10: pkt1->pkt = *pkt; yading@10: pkt1->next = NULL; yading@10: if (pkt == &flush_pkt) yading@10: q->serial++; yading@10: pkt1->serial = q->serial; yading@10: yading@10: if (!q->last_pkt) yading@10: q->first_pkt = pkt1; yading@10: else yading@10: q->last_pkt->next = pkt1; yading@10: q->last_pkt = pkt1; yading@10: q->nb_packets++; yading@10: q->size += pkt1->pkt.size + sizeof(*pkt1); yading@10: /* XXX: should duplicate packet data in DV case */ yading@10: SDL_CondSignal(q->cond); yading@10: return 0; yading@10: } yading@10: yading@10: static int packet_queue_put(PacketQueue *q, AVPacket *pkt) yading@10: { yading@10: int ret; yading@10: yading@10: /* duplicate the packet */ yading@10: if (pkt != &flush_pkt && av_dup_packet(pkt) < 0) yading@10: return -1; yading@10: yading@10: SDL_LockMutex(q->mutex); yading@10: ret = packet_queue_put_private(q, pkt); yading@10: SDL_UnlockMutex(q->mutex); yading@10: yading@10: if (pkt != &flush_pkt && ret < 0) yading@10: av_free_packet(pkt); yading@10: yading@10: return ret; yading@10: } yading@10: yading@10: /* packet queue handling */ yading@10: static void packet_queue_init(PacketQueue *q) yading@10: { yading@10: memset(q, 0, sizeof(PacketQueue)); yading@10: q->mutex = SDL_CreateMutex(); yading@10: q->cond = SDL_CreateCond(); yading@10: q->abort_request = 1; yading@10: } yading@10: yading@10: static void packet_queue_flush(PacketQueue *q) yading@10: { yading@10: MyAVPacketList *pkt, *pkt1; yading@10: yading@10: SDL_LockMutex(q->mutex); yading@10: for (pkt = q->first_pkt; pkt != NULL; pkt = pkt1) { yading@10: pkt1 = pkt->next; yading@10: av_free_packet(&pkt->pkt); yading@10: av_freep(&pkt); yading@10: } yading@10: q->last_pkt = NULL; yading@10: q->first_pkt = NULL; yading@10: q->nb_packets = 0; yading@10: q->size = 0; yading@10: SDL_UnlockMutex(q->mutex); yading@10: } yading@10: yading@10: static void packet_queue_destroy(PacketQueue *q) yading@10: { yading@10: packet_queue_flush(q); yading@10: SDL_DestroyMutex(q->mutex); yading@10: SDL_DestroyCond(q->cond); yading@10: } yading@10: yading@10: static void packet_queue_abort(PacketQueue *q) yading@10: { yading@10: SDL_LockMutex(q->mutex); yading@10: yading@10: q->abort_request = 1; yading@10: yading@10: SDL_CondSignal(q->cond); yading@10: yading@10: SDL_UnlockMutex(q->mutex); yading@10: } yading@10: yading@10: static void packet_queue_start(PacketQueue *q) yading@10: { yading@10: SDL_LockMutex(q->mutex); yading@10: q->abort_request = 0; yading@10: packet_queue_put_private(q, &flush_pkt); yading@10: SDL_UnlockMutex(q->mutex); yading@10: } yading@10: yading@10: /* return < 0 if aborted, 0 if no packet and > 0 if packet. */ yading@10: static int packet_queue_get(PacketQueue *q, AVPacket *pkt, int block, int *serial) yading@10: { yading@10: MyAVPacketList *pkt1; yading@10: int ret; yading@10: yading@10: SDL_LockMutex(q->mutex); yading@10: yading@10: for (;;) { yading@10: if (q->abort_request) { yading@10: ret = -1; yading@10: break; yading@10: } yading@10: yading@10: pkt1 = q->first_pkt; yading@10: if (pkt1) { yading@10: q->first_pkt = pkt1->next; yading@10: if (!q->first_pkt) yading@10: q->last_pkt = NULL; yading@10: q->nb_packets--; yading@10: q->size -= pkt1->pkt.size + sizeof(*pkt1); yading@10: *pkt = pkt1->pkt; yading@10: if (serial) yading@10: *serial = pkt1->serial; yading@10: av_free(pkt1); yading@10: ret = 1; yading@10: break; yading@10: } else if (!block) { yading@10: ret = 0; yading@10: break; yading@10: } else { yading@10: SDL_CondWait(q->cond, q->mutex); yading@10: } yading@10: } yading@10: SDL_UnlockMutex(q->mutex); yading@10: return ret; yading@10: } yading@10: yading@10: static inline void fill_rectangle(SDL_Surface *screen, yading@10: int x, int y, int w, int h, int color, int update) yading@10: { yading@10: SDL_Rect rect; yading@10: rect.x = x; yading@10: rect.y = y; yading@10: rect.w = w; yading@10: rect.h = h; yading@10: SDL_FillRect(screen, &rect, color); yading@10: if (update && w > 0 && h > 0) yading@10: SDL_UpdateRect(screen, x, y, w, h); yading@10: } yading@10: yading@10: /* draw only the border of a rectangle */ yading@10: static void fill_border(int xleft, int ytop, int width, int height, int x, int y, int w, int h, int color, int update) yading@10: { yading@10: int w1, w2, h1, h2; yading@10: yading@10: /* fill the background */ yading@10: w1 = x; yading@10: if (w1 < 0) yading@10: w1 = 0; yading@10: w2 = width - (x + w); yading@10: if (w2 < 0) yading@10: w2 = 0; yading@10: h1 = y; yading@10: if (h1 < 0) yading@10: h1 = 0; yading@10: h2 = height - (y + h); yading@10: if (h2 < 0) yading@10: h2 = 0; yading@10: fill_rectangle(screen, yading@10: xleft, ytop, yading@10: w1, height, yading@10: color, update); yading@10: fill_rectangle(screen, yading@10: xleft + width - w2, ytop, yading@10: w2, height, yading@10: color, update); yading@10: fill_rectangle(screen, yading@10: xleft + w1, ytop, yading@10: width - w1 - w2, h1, yading@10: color, update); yading@10: fill_rectangle(screen, yading@10: xleft + w1, ytop + height - h2, yading@10: width - w1 - w2, h2, yading@10: color, update); yading@10: } yading@10: yading@10: #define ALPHA_BLEND(a, oldp, newp, s)\ yading@10: ((((oldp << s) * (255 - (a))) + (newp * (a))) / (255 << s)) yading@10: yading@10: #define RGBA_IN(r, g, b, a, s)\ yading@10: {\ yading@10: unsigned int v = ((const uint32_t *)(s))[0];\ yading@10: a = (v >> 24) & 0xff;\ yading@10: r = (v >> 16) & 0xff;\ yading@10: g = (v >> 8) & 0xff;\ yading@10: b = v & 0xff;\ yading@10: } yading@10: yading@10: #define YUVA_IN(y, u, v, a, s, pal)\ yading@10: {\ yading@10: unsigned int val = ((const uint32_t *)(pal))[*(const uint8_t*)(s)];\ yading@10: a = (val >> 24) & 0xff;\ yading@10: y = (val >> 16) & 0xff;\ yading@10: u = (val >> 8) & 0xff;\ yading@10: v = val & 0xff;\ yading@10: } yading@10: yading@10: #define YUVA_OUT(d, y, u, v, a)\ yading@10: {\ yading@10: ((uint32_t *)(d))[0] = (a << 24) | (y << 16) | (u << 8) | v;\ yading@10: } yading@10: yading@10: yading@10: #define BPP 1 yading@10: yading@10: static void blend_subrect(AVPicture *dst, const AVSubtitleRect *rect, int imgw, int imgh) yading@10: { yading@10: int wrap, wrap3, width2, skip2; yading@10: int y, u, v, a, u1, v1, a1, w, h; yading@10: uint8_t *lum, *cb, *cr; yading@10: const uint8_t *p; yading@10: const uint32_t *pal; yading@10: int dstx, dsty, dstw, dsth; yading@10: yading@10: dstw = av_clip(rect->w, 0, imgw); yading@10: dsth = av_clip(rect->h, 0, imgh); yading@10: dstx = av_clip(rect->x, 0, imgw - dstw); yading@10: dsty = av_clip(rect->y, 0, imgh - dsth); yading@10: lum = dst->data[0] + dsty * dst->linesize[0]; yading@10: cb = dst->data[1] + (dsty >> 1) * dst->linesize[1]; yading@10: cr = dst->data[2] + (dsty >> 1) * dst->linesize[2]; yading@10: yading@10: width2 = ((dstw + 1) >> 1) + (dstx & ~dstw & 1); yading@10: skip2 = dstx >> 1; yading@10: wrap = dst->linesize[0]; yading@10: wrap3 = rect->pict.linesize[0]; yading@10: p = rect->pict.data[0]; yading@10: pal = (const uint32_t *)rect->pict.data[1]; /* Now in YCrCb! */ yading@10: yading@10: if (dsty & 1) { yading@10: lum += dstx; yading@10: cb += skip2; yading@10: cr += skip2; yading@10: yading@10: if (dstx & 1) { yading@10: YUVA_IN(y, u, v, a, p, pal); yading@10: lum[0] = ALPHA_BLEND(a, lum[0], y, 0); yading@10: cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0); yading@10: cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0); yading@10: cb++; yading@10: cr++; yading@10: lum++; yading@10: p += BPP; yading@10: } yading@10: for (w = dstw - (dstx & 1); w >= 2; w -= 2) { yading@10: YUVA_IN(y, u, v, a, p, pal); yading@10: u1 = u; yading@10: v1 = v; yading@10: a1 = a; yading@10: lum[0] = ALPHA_BLEND(a, lum[0], y, 0); yading@10: yading@10: YUVA_IN(y, u, v, a, p + BPP, pal); yading@10: u1 += u; yading@10: v1 += v; yading@10: a1 += a; yading@10: lum[1] = ALPHA_BLEND(a, lum[1], y, 0); yading@10: cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 1); yading@10: cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 1); yading@10: cb++; yading@10: cr++; yading@10: p += 2 * BPP; yading@10: lum += 2; yading@10: } yading@10: if (w) { yading@10: YUVA_IN(y, u, v, a, p, pal); yading@10: lum[0] = ALPHA_BLEND(a, lum[0], y, 0); yading@10: cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0); yading@10: cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0); yading@10: p++; yading@10: lum++; yading@10: } yading@10: p += wrap3 - dstw * BPP; yading@10: lum += wrap - dstw - dstx; yading@10: cb += dst->linesize[1] - width2 - skip2; yading@10: cr += dst->linesize[2] - width2 - skip2; yading@10: } yading@10: for (h = dsth - (dsty & 1); h >= 2; h -= 2) { yading@10: lum += dstx; yading@10: cb += skip2; yading@10: cr += skip2; yading@10: yading@10: if (dstx & 1) { yading@10: YUVA_IN(y, u, v, a, p, pal); yading@10: u1 = u; yading@10: v1 = v; yading@10: a1 = a; yading@10: lum[0] = ALPHA_BLEND(a, lum[0], y, 0); yading@10: p += wrap3; yading@10: lum += wrap; yading@10: YUVA_IN(y, u, v, a, p, pal); yading@10: u1 += u; yading@10: v1 += v; yading@10: a1 += a; yading@10: lum[0] = ALPHA_BLEND(a, lum[0], y, 0); yading@10: cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 1); yading@10: cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 1); yading@10: cb++; yading@10: cr++; yading@10: p += -wrap3 + BPP; yading@10: lum += -wrap + 1; yading@10: } yading@10: for (w = dstw - (dstx & 1); w >= 2; w -= 2) { yading@10: YUVA_IN(y, u, v, a, p, pal); yading@10: u1 = u; yading@10: v1 = v; yading@10: a1 = a; yading@10: lum[0] = ALPHA_BLEND(a, lum[0], y, 0); yading@10: yading@10: YUVA_IN(y, u, v, a, p + BPP, pal); yading@10: u1 += u; yading@10: v1 += v; yading@10: a1 += a; yading@10: lum[1] = ALPHA_BLEND(a, lum[1], y, 0); yading@10: p += wrap3; yading@10: lum += wrap; yading@10: yading@10: YUVA_IN(y, u, v, a, p, pal); yading@10: u1 += u; yading@10: v1 += v; yading@10: a1 += a; yading@10: lum[0] = ALPHA_BLEND(a, lum[0], y, 0); yading@10: yading@10: YUVA_IN(y, u, v, a, p + BPP, pal); yading@10: u1 += u; yading@10: v1 += v; yading@10: a1 += a; yading@10: lum[1] = ALPHA_BLEND(a, lum[1], y, 0); yading@10: yading@10: cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 2); yading@10: cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 2); yading@10: yading@10: cb++; yading@10: cr++; yading@10: p += -wrap3 + 2 * BPP; yading@10: lum += -wrap + 2; yading@10: } yading@10: if (w) { yading@10: YUVA_IN(y, u, v, a, p, pal); yading@10: u1 = u; yading@10: v1 = v; yading@10: a1 = a; yading@10: lum[0] = ALPHA_BLEND(a, lum[0], y, 0); yading@10: p += wrap3; yading@10: lum += wrap; yading@10: YUVA_IN(y, u, v, a, p, pal); yading@10: u1 += u; yading@10: v1 += v; yading@10: a1 += a; yading@10: lum[0] = ALPHA_BLEND(a, lum[0], y, 0); yading@10: cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 1); yading@10: cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 1); yading@10: cb++; yading@10: cr++; yading@10: p += -wrap3 + BPP; yading@10: lum += -wrap + 1; yading@10: } yading@10: p += wrap3 + (wrap3 - dstw * BPP); yading@10: lum += wrap + (wrap - dstw - dstx); yading@10: cb += dst->linesize[1] - width2 - skip2; yading@10: cr += dst->linesize[2] - width2 - skip2; yading@10: } yading@10: /* handle odd height */ yading@10: if (h) { yading@10: lum += dstx; yading@10: cb += skip2; yading@10: cr += skip2; yading@10: yading@10: if (dstx & 1) { yading@10: YUVA_IN(y, u, v, a, p, pal); yading@10: lum[0] = ALPHA_BLEND(a, lum[0], y, 0); yading@10: cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0); yading@10: cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0); yading@10: cb++; yading@10: cr++; yading@10: lum++; yading@10: p += BPP; yading@10: } yading@10: for (w = dstw - (dstx & 1); w >= 2; w -= 2) { yading@10: YUVA_IN(y, u, v, a, p, pal); yading@10: u1 = u; yading@10: v1 = v; yading@10: a1 = a; yading@10: lum[0] = ALPHA_BLEND(a, lum[0], y, 0); yading@10: yading@10: YUVA_IN(y, u, v, a, p + BPP, pal); yading@10: u1 += u; yading@10: v1 += v; yading@10: a1 += a; yading@10: lum[1] = ALPHA_BLEND(a, lum[1], y, 0); yading@10: cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u, 1); yading@10: cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v, 1); yading@10: cb++; yading@10: cr++; yading@10: p += 2 * BPP; yading@10: lum += 2; yading@10: } yading@10: if (w) { yading@10: YUVA_IN(y, u, v, a, p, pal); yading@10: lum[0] = ALPHA_BLEND(a, lum[0], y, 0); yading@10: cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0); yading@10: cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0); yading@10: } yading@10: } yading@10: } yading@10: yading@10: static void free_subpicture(SubPicture *sp) yading@10: { yading@10: avsubtitle_free(&sp->sub); yading@10: } yading@10: yading@10: static void calculate_display_rect(SDL_Rect *rect, int scr_xleft, int scr_ytop, int scr_width, int scr_height, VideoPicture *vp) yading@10: { yading@10: float aspect_ratio; yading@10: int width, height, x, y; yading@10: yading@10: if (vp->sar.num == 0) yading@10: aspect_ratio = 0; yading@10: else yading@10: aspect_ratio = av_q2d(vp->sar); yading@10: yading@10: if (aspect_ratio <= 0.0) yading@10: aspect_ratio = 1.0; yading@10: aspect_ratio *= (float)vp->width / (float)vp->height; yading@10: yading@10: /* XXX: we suppose the screen has a 1.0 pixel ratio */ yading@10: height = scr_height; yading@10: width = ((int)rint(height * aspect_ratio)) & ~1; yading@10: if (width > scr_width) { yading@10: width = scr_width; yading@10: height = ((int)rint(width / aspect_ratio)) & ~1; yading@10: } yading@10: x = (scr_width - width) / 2; yading@10: y = (scr_height - height) / 2; yading@10: rect->x = scr_xleft + x; yading@10: rect->y = scr_ytop + y; yading@10: rect->w = FFMAX(width, 1); yading@10: rect->h = FFMAX(height, 1); yading@10: } yading@10: yading@10: static void video_image_display(VideoState *is) yading@10: { yading@10: VideoPicture *vp; yading@10: SubPicture *sp; yading@10: AVPicture pict; yading@10: SDL_Rect rect; yading@10: int i; yading@10: yading@10: vp = &is->pictq[is->pictq_rindex]; yading@10: if (vp->bmp) { yading@10: if (is->subtitle_st) { yading@10: if (is->subpq_size > 0) { yading@10: sp = &is->subpq[is->subpq_rindex]; yading@10: yading@10: if (vp->pts >= sp->pts + ((float) sp->sub.start_display_time / 1000)) { yading@10: SDL_LockYUVOverlay (vp->bmp); yading@10: yading@10: pict.data[0] = vp->bmp->pixels[0]; yading@10: pict.data[1] = vp->bmp->pixels[2]; yading@10: pict.data[2] = vp->bmp->pixels[1]; yading@10: yading@10: pict.linesize[0] = vp->bmp->pitches[0]; yading@10: pict.linesize[1] = vp->bmp->pitches[2]; yading@10: pict.linesize[2] = vp->bmp->pitches[1]; yading@10: yading@10: for (i = 0; i < sp->sub.num_rects; i++) yading@10: blend_subrect(&pict, sp->sub.rects[i], yading@10: vp->bmp->w, vp->bmp->h); yading@10: yading@10: SDL_UnlockYUVOverlay (vp->bmp); yading@10: } yading@10: } yading@10: } yading@10: yading@10: calculate_display_rect(&rect, is->xleft, is->ytop, is->width, is->height, vp); yading@10: yading@10: SDL_DisplayYUVOverlay(vp->bmp, &rect); yading@10: yading@10: if (rect.x != is->last_display_rect.x || rect.y != is->last_display_rect.y || rect.w != is->last_display_rect.w || rect.h != is->last_display_rect.h || is->force_refresh) { yading@10: int bgcolor = SDL_MapRGB(screen->format, 0x00, 0x00, 0x00); yading@10: fill_border(is->xleft, is->ytop, is->width, is->height, rect.x, rect.y, rect.w, rect.h, bgcolor, 1); yading@10: is->last_display_rect = rect; yading@10: } yading@10: } yading@10: } yading@10: yading@10: static inline int compute_mod(int a, int b) yading@10: { yading@10: return a < 0 ? a%b + b : a%b; yading@10: } yading@10: yading@10: static void video_audio_display(VideoState *s) yading@10: { yading@10: int i, i_start, x, y1, y, ys, delay, n, nb_display_channels; yading@10: int ch, channels, h, h2, bgcolor, fgcolor; yading@10: int64_t time_diff; yading@10: int rdft_bits, nb_freq; yading@10: yading@10: for (rdft_bits = 1; (1 << rdft_bits) < 2 * s->height; rdft_bits++) yading@10: ; yading@10: nb_freq = 1 << (rdft_bits - 1); yading@10: yading@10: /* compute display index : center on currently output samples */ yading@10: channels = s->audio_tgt.channels; yading@10: nb_display_channels = channels; yading@10: if (!s->paused) { yading@10: int data_used= s->show_mode == SHOW_MODE_WAVES ? s->width : (2*nb_freq); yading@10: n = 2 * channels; yading@10: delay = s->audio_write_buf_size; yading@10: delay /= n; yading@10: yading@10: /* to be more precise, we take into account the time spent since yading@10: the last buffer computation */ yading@10: if (audio_callback_time) { yading@10: time_diff = av_gettime() - audio_callback_time; yading@10: delay -= (time_diff * s->audio_tgt.freq) / 1000000; yading@10: } yading@10: yading@10: delay += 2 * data_used; yading@10: if (delay < data_used) yading@10: delay = data_used; yading@10: yading@10: i_start= x = compute_mod(s->sample_array_index - delay * channels, SAMPLE_ARRAY_SIZE); yading@10: if (s->show_mode == SHOW_MODE_WAVES) { yading@10: h = INT_MIN; yading@10: for (i = 0; i < 1000; i += channels) { yading@10: int idx = (SAMPLE_ARRAY_SIZE + x - i) % SAMPLE_ARRAY_SIZE; yading@10: int a = s->sample_array[idx]; yading@10: int b = s->sample_array[(idx + 4 * channels) % SAMPLE_ARRAY_SIZE]; yading@10: int c = s->sample_array[(idx + 5 * channels) % SAMPLE_ARRAY_SIZE]; yading@10: int d = s->sample_array[(idx + 9 * channels) % SAMPLE_ARRAY_SIZE]; yading@10: int score = a - d; yading@10: if (h < score && (b ^ c) < 0) { yading@10: h = score; yading@10: i_start = idx; yading@10: } yading@10: } yading@10: } yading@10: yading@10: s->last_i_start = i_start; yading@10: } else { yading@10: i_start = s->last_i_start; yading@10: } yading@10: yading@10: bgcolor = SDL_MapRGB(screen->format, 0x00, 0x00, 0x00); yading@10: if (s->show_mode == SHOW_MODE_WAVES) { yading@10: fill_rectangle(screen, yading@10: s->xleft, s->ytop, s->width, s->height, yading@10: bgcolor, 0); yading@10: yading@10: fgcolor = SDL_MapRGB(screen->format, 0xff, 0xff, 0xff); yading@10: yading@10: /* total height for one channel */ yading@10: h = s->height / nb_display_channels; yading@10: /* graph height / 2 */ yading@10: h2 = (h * 9) / 20; yading@10: for (ch = 0; ch < nb_display_channels; ch++) { yading@10: i = i_start + ch; yading@10: y1 = s->ytop + ch * h + (h / 2); /* position of center line */ yading@10: for (x = 0; x < s->width; x++) { yading@10: y = (s->sample_array[i] * h2) >> 15; yading@10: if (y < 0) { yading@10: y = -y; yading@10: ys = y1 - y; yading@10: } else { yading@10: ys = y1; yading@10: } yading@10: fill_rectangle(screen, yading@10: s->xleft + x, ys, 1, y, yading@10: fgcolor, 0); yading@10: i += channels; yading@10: if (i >= SAMPLE_ARRAY_SIZE) yading@10: i -= SAMPLE_ARRAY_SIZE; yading@10: } yading@10: } yading@10: yading@10: fgcolor = SDL_MapRGB(screen->format, 0x00, 0x00, 0xff); yading@10: yading@10: for (ch = 1; ch < nb_display_channels; ch++) { yading@10: y = s->ytop + ch * h; yading@10: fill_rectangle(screen, yading@10: s->xleft, y, s->width, 1, yading@10: fgcolor, 0); yading@10: } yading@10: SDL_UpdateRect(screen, s->xleft, s->ytop, s->width, s->height); yading@10: } else { yading@10: nb_display_channels= FFMIN(nb_display_channels, 2); yading@10: if (rdft_bits != s->rdft_bits) { yading@10: av_rdft_end(s->rdft); yading@10: av_free(s->rdft_data); yading@10: s->rdft = av_rdft_init(rdft_bits, DFT_R2C); yading@10: s->rdft_bits = rdft_bits; yading@10: s->rdft_data = av_malloc(4 * nb_freq * sizeof(*s->rdft_data)); yading@10: } yading@10: { yading@10: FFTSample *data[2]; yading@10: for (ch = 0; ch < nb_display_channels; ch++) { yading@10: data[ch] = s->rdft_data + 2 * nb_freq * ch; yading@10: i = i_start + ch; yading@10: for (x = 0; x < 2 * nb_freq; x++) { yading@10: double w = (x-nb_freq) * (1.0 / nb_freq); yading@10: data[ch][x] = s->sample_array[i] * (1.0 - w * w); yading@10: i += channels; yading@10: if (i >= SAMPLE_ARRAY_SIZE) yading@10: i -= SAMPLE_ARRAY_SIZE; yading@10: } yading@10: av_rdft_calc(s->rdft, data[ch]); yading@10: } yading@10: // least efficient way to do this, we should of course directly access it but its more than fast enough yading@10: for (y = 0; y < s->height; y++) { yading@10: double w = 1 / sqrt(nb_freq); yading@10: int a = sqrt(w * sqrt(data[0][2 * y + 0] * data[0][2 * y + 0] + data[0][2 * y + 1] * data[0][2 * y + 1])); yading@10: int b = (nb_display_channels == 2 ) ? sqrt(w * sqrt(data[1][2 * y + 0] * data[1][2 * y + 0] yading@10: + data[1][2 * y + 1] * data[1][2 * y + 1])) : a; yading@10: a = FFMIN(a, 255); yading@10: b = FFMIN(b, 255); yading@10: fgcolor = SDL_MapRGB(screen->format, a, b, (a + b) / 2); yading@10: yading@10: fill_rectangle(screen, yading@10: s->xpos, s->height-y, 1, 1, yading@10: fgcolor, 0); yading@10: } yading@10: } yading@10: SDL_UpdateRect(screen, s->xpos, s->ytop, 1, s->height); yading@10: if (!s->paused) yading@10: s->xpos++; yading@10: if (s->xpos >= s->width) yading@10: s->xpos= s->xleft; yading@10: } yading@10: } yading@10: yading@10: static void stream_close(VideoState *is) yading@10: { yading@10: VideoPicture *vp; yading@10: int i; yading@10: /* XXX: use a special url_shutdown call to abort parse cleanly */ yading@10: is->abort_request = 1; yading@10: SDL_WaitThread(is->read_tid, NULL); yading@10: packet_queue_destroy(&is->videoq); yading@10: packet_queue_destroy(&is->audioq); yading@10: packet_queue_destroy(&is->subtitleq); yading@10: yading@10: /* free all pictures */ yading@10: for (i = 0; i < VIDEO_PICTURE_QUEUE_SIZE; i++) { yading@10: vp = &is->pictq[i]; yading@10: if (vp->bmp) { yading@10: SDL_FreeYUVOverlay(vp->bmp); yading@10: vp->bmp = NULL; yading@10: } yading@10: } yading@10: SDL_DestroyMutex(is->pictq_mutex); yading@10: SDL_DestroyCond(is->pictq_cond); yading@10: SDL_DestroyMutex(is->subpq_mutex); yading@10: SDL_DestroyCond(is->subpq_cond); yading@10: SDL_DestroyCond(is->continue_read_thread); yading@10: #if !CONFIG_AVFILTER yading@10: sws_freeContext(is->img_convert_ctx); yading@10: #endif yading@10: av_free(is); yading@10: } yading@10: yading@10: static void do_exit(VideoState *is) yading@10: { yading@10: if (is) { yading@10: stream_close(is); yading@10: } yading@10: av_lockmgr_register(NULL); yading@10: uninit_opts(); yading@10: #if CONFIG_AVFILTER yading@10: av_freep(&vfilters); yading@10: #endif yading@10: avformat_network_deinit(); yading@10: if (show_status) yading@10: printf("\n"); yading@10: SDL_Quit(); yading@10: av_log(NULL, AV_LOG_QUIET, "%s", ""); yading@10: exit(0); yading@10: } yading@10: yading@10: static void sigterm_handler(int sig) yading@10: { yading@10: exit(123); yading@10: } yading@10: yading@10: static int video_open(VideoState *is, int force_set_video_mode, VideoPicture *vp) yading@10: { yading@10: int flags = SDL_HWSURFACE | SDL_ASYNCBLIT | SDL_HWACCEL; yading@10: int w,h; yading@10: SDL_Rect rect; yading@10: yading@10: if (is_full_screen) flags |= SDL_FULLSCREEN; yading@10: else flags |= SDL_RESIZABLE; yading@10: yading@10: if (vp && vp->width) { yading@10: calculate_display_rect(&rect, 0, 0, INT_MAX, vp->height, vp); yading@10: default_width = rect.w; yading@10: default_height = rect.h; yading@10: } yading@10: yading@10: if (is_full_screen && fs_screen_width) { yading@10: w = fs_screen_width; yading@10: h = fs_screen_height; yading@10: } else if (!is_full_screen && screen_width) { yading@10: w = screen_width; yading@10: h = screen_height; yading@10: } else { yading@10: w = default_width; yading@10: h = default_height; yading@10: } yading@10: if (screen && is->width == screen->w && screen->w == w yading@10: && is->height== screen->h && screen->h == h && !force_set_video_mode) yading@10: return 0; yading@10: screen = SDL_SetVideoMode(w, h, 0, flags); yading@10: if (!screen) { yading@10: fprintf(stderr, "SDL: could not set video mode - exiting\n"); yading@10: do_exit(is); yading@10: } yading@10: if (!window_title) yading@10: window_title = input_filename; yading@10: SDL_WM_SetCaption(window_title, window_title); yading@10: yading@10: is->width = screen->w; yading@10: is->height = screen->h; yading@10: yading@10: return 0; yading@10: } yading@10: yading@10: /* display the current picture, if any */ yading@10: static void video_display(VideoState *is) yading@10: { yading@10: if (!screen) yading@10: video_open(is, 0, NULL); yading@10: if (is->audio_st && is->show_mode != SHOW_MODE_VIDEO) yading@10: video_audio_display(is); yading@10: else if (is->video_st) yading@10: video_image_display(is); yading@10: } yading@10: yading@10: /* get the current audio clock value */ yading@10: static double get_audio_clock(VideoState *is) yading@10: { yading@10: if (is->audio_clock_serial != is->audioq.serial) yading@10: return NAN; yading@10: if (is->paused) { yading@10: return is->audio_current_pts; yading@10: } else { yading@10: return is->audio_current_pts_drift + av_gettime() / 1000000.0; yading@10: } yading@10: } yading@10: yading@10: /* get the current video clock value */ yading@10: static double get_video_clock(VideoState *is) yading@10: { yading@10: if (is->video_clock_serial != is->videoq.serial) yading@10: return NAN; yading@10: if (is->paused) { yading@10: return is->video_current_pts; yading@10: } else { yading@10: return is->video_current_pts_drift + av_gettime() / 1000000.0; yading@10: } yading@10: } yading@10: yading@10: /* get the current external clock value */ yading@10: static double get_external_clock(VideoState *is) yading@10: { yading@10: if (is->paused) { yading@10: return is->external_clock; yading@10: } else { yading@10: double time = av_gettime() / 1000000.0; yading@10: return is->external_clock_drift + time - (time - is->external_clock_time / 1000000.0) * (1.0 - is->external_clock_speed); yading@10: } yading@10: } yading@10: yading@10: static int get_master_sync_type(VideoState *is) { yading@10: if (is->av_sync_type == AV_SYNC_VIDEO_MASTER) { yading@10: if (is->video_st) yading@10: return AV_SYNC_VIDEO_MASTER; yading@10: else yading@10: return AV_SYNC_AUDIO_MASTER; yading@10: } else if (is->av_sync_type == AV_SYNC_AUDIO_MASTER) { yading@10: if (is->audio_st) yading@10: return AV_SYNC_AUDIO_MASTER; yading@10: else yading@10: return AV_SYNC_EXTERNAL_CLOCK; yading@10: } else { yading@10: return AV_SYNC_EXTERNAL_CLOCK; yading@10: } yading@10: } yading@10: yading@10: /* get the current master clock value */ yading@10: static double get_master_clock(VideoState *is) yading@10: { yading@10: double val; yading@10: yading@10: switch (get_master_sync_type(is)) { yading@10: case AV_SYNC_VIDEO_MASTER: yading@10: val = get_video_clock(is); yading@10: break; yading@10: case AV_SYNC_AUDIO_MASTER: yading@10: val = get_audio_clock(is); yading@10: break; yading@10: default: yading@10: val = get_external_clock(is); yading@10: break; yading@10: } yading@10: return val; yading@10: } yading@10: yading@10: static void update_external_clock_pts(VideoState *is, double pts) yading@10: { yading@10: is->external_clock_time = av_gettime(); yading@10: is->external_clock = pts; yading@10: is->external_clock_drift = pts - is->external_clock_time / 1000000.0; yading@10: } yading@10: yading@10: static void check_external_clock_sync(VideoState *is, double pts) { yading@10: double ext_clock = get_external_clock(is); yading@10: if (isnan(ext_clock) || fabs(ext_clock - pts) > AV_NOSYNC_THRESHOLD) { yading@10: update_external_clock_pts(is, pts); yading@10: } yading@10: } yading@10: yading@10: static void update_external_clock_speed(VideoState *is, double speed) { yading@10: update_external_clock_pts(is, get_external_clock(is)); yading@10: is->external_clock_speed = speed; yading@10: } yading@10: yading@10: static void check_external_clock_speed(VideoState *is) { yading@10: if (is->video_stream >= 0 && is->videoq.nb_packets <= MIN_FRAMES / 2 || yading@10: is->audio_stream >= 0 && is->audioq.nb_packets <= MIN_FRAMES / 2) { yading@10: update_external_clock_speed(is, FFMAX(EXTERNAL_CLOCK_SPEED_MIN, is->external_clock_speed - EXTERNAL_CLOCK_SPEED_STEP)); yading@10: } else if ((is->video_stream < 0 || is->videoq.nb_packets > MIN_FRAMES * 2) && yading@10: (is->audio_stream < 0 || is->audioq.nb_packets > MIN_FRAMES * 2)) { yading@10: update_external_clock_speed(is, FFMIN(EXTERNAL_CLOCK_SPEED_MAX, is->external_clock_speed + EXTERNAL_CLOCK_SPEED_STEP)); yading@10: } else { yading@10: double speed = is->external_clock_speed; yading@10: if (speed != 1.0) yading@10: update_external_clock_speed(is, speed + EXTERNAL_CLOCK_SPEED_STEP * (1.0 - speed) / fabs(1.0 - speed)); yading@10: } yading@10: } yading@10: yading@10: /* seek in the stream */ yading@10: static void stream_seek(VideoState *is, int64_t pos, int64_t rel, int seek_by_bytes) yading@10: { yading@10: if (!is->seek_req) { yading@10: is->seek_pos = pos; yading@10: is->seek_rel = rel; yading@10: is->seek_flags &= ~AVSEEK_FLAG_BYTE; yading@10: if (seek_by_bytes) yading@10: is->seek_flags |= AVSEEK_FLAG_BYTE; yading@10: is->seek_req = 1; yading@10: SDL_CondSignal(is->continue_read_thread); yading@10: } yading@10: } yading@10: yading@10: /* pause or resume the video */ yading@10: static void stream_toggle_pause(VideoState *is) yading@10: { yading@10: if (is->paused) { yading@10: is->frame_timer += av_gettime() / 1000000.0 + is->video_current_pts_drift - is->video_current_pts; yading@10: if (is->read_pause_return != AVERROR(ENOSYS)) { yading@10: is->video_current_pts = is->video_current_pts_drift + av_gettime() / 1000000.0; yading@10: } yading@10: is->video_current_pts_drift = is->video_current_pts - av_gettime() / 1000000.0; yading@10: } yading@10: update_external_clock_pts(is, get_external_clock(is)); yading@10: is->paused = !is->paused; yading@10: } yading@10: yading@10: static void toggle_pause(VideoState *is) yading@10: { yading@10: stream_toggle_pause(is); yading@10: is->step = 0; yading@10: } yading@10: yading@10: static void step_to_next_frame(VideoState *is) yading@10: { yading@10: /* if the stream is paused unpause it, then step */ yading@10: if (is->paused) yading@10: stream_toggle_pause(is); yading@10: is->step = 1; yading@10: } yading@10: yading@10: static double compute_target_delay(double delay, VideoState *is) yading@10: { yading@10: double sync_threshold, diff; yading@10: yading@10: /* update delay to follow master synchronisation source */ yading@10: if (get_master_sync_type(is) != AV_SYNC_VIDEO_MASTER) { yading@10: /* if video is slave, we try to correct big delays by yading@10: duplicating or deleting a frame */ yading@10: diff = get_video_clock(is) - get_master_clock(is); yading@10: yading@10: /* skip or repeat frame. We take into account the yading@10: delay to compute the threshold. I still don't know yading@10: if it is the best guess */ yading@10: sync_threshold = FFMAX(AV_SYNC_THRESHOLD, delay); yading@10: if (!isnan(diff) && fabs(diff) < AV_NOSYNC_THRESHOLD) { yading@10: if (diff <= -sync_threshold) yading@10: delay = 0; yading@10: else if (diff >= sync_threshold) yading@10: delay = 2 * delay; yading@10: } yading@10: } yading@10: yading@10: av_dlog(NULL, "video: delay=%0.3f A-V=%f\n", yading@10: delay, -diff); yading@10: yading@10: return delay; yading@10: } yading@10: yading@10: static void pictq_next_picture(VideoState *is) { yading@10: /* update queue size and signal for next picture */ yading@10: if (++is->pictq_rindex == VIDEO_PICTURE_QUEUE_SIZE) yading@10: is->pictq_rindex = 0; yading@10: yading@10: SDL_LockMutex(is->pictq_mutex); yading@10: is->pictq_size--; yading@10: SDL_CondSignal(is->pictq_cond); yading@10: SDL_UnlockMutex(is->pictq_mutex); yading@10: } yading@10: yading@10: static int pictq_prev_picture(VideoState *is) { yading@10: VideoPicture *prevvp; yading@10: int ret = 0; yading@10: /* update queue size and signal for the previous picture */ yading@10: prevvp = &is->pictq[(is->pictq_rindex + VIDEO_PICTURE_QUEUE_SIZE - 1) % VIDEO_PICTURE_QUEUE_SIZE]; yading@10: if (prevvp->allocated && prevvp->serial == is->videoq.serial) { yading@10: SDL_LockMutex(is->pictq_mutex); yading@10: if (is->pictq_size < VIDEO_PICTURE_QUEUE_SIZE - 1) { yading@10: if (--is->pictq_rindex == -1) yading@10: is->pictq_rindex = VIDEO_PICTURE_QUEUE_SIZE - 1; yading@10: is->pictq_size++; yading@10: ret = 1; yading@10: } yading@10: SDL_CondSignal(is->pictq_cond); yading@10: SDL_UnlockMutex(is->pictq_mutex); yading@10: } yading@10: return ret; yading@10: } yading@10: yading@10: static void update_video_pts(VideoState *is, double pts, int64_t pos, int serial) { yading@10: double time = av_gettime() / 1000000.0; yading@10: /* update current video pts */ yading@10: is->video_current_pts = pts; yading@10: is->video_current_pts_drift = is->video_current_pts - time; yading@10: is->video_current_pos = pos; yading@10: is->frame_last_pts = pts; yading@10: is->video_clock_serial = serial; yading@10: if (is->videoq.serial == serial) yading@10: check_external_clock_sync(is, is->video_current_pts); yading@10: } yading@10: yading@10: /* called to display each frame */ yading@10: static void video_refresh(void *opaque, double *remaining_time) yading@10: { yading@10: VideoState *is = opaque; yading@10: VideoPicture *vp; yading@10: double time; yading@10: yading@10: SubPicture *sp, *sp2; yading@10: yading@10: if (!is->paused && get_master_sync_type(is) == AV_SYNC_EXTERNAL_CLOCK && is->realtime) yading@10: check_external_clock_speed(is); yading@10: yading@10: if (!display_disable && is->show_mode != SHOW_MODE_VIDEO && is->audio_st) { yading@10: time = av_gettime() / 1000000.0; yading@10: if (is->force_refresh || is->last_vis_time + rdftspeed < time) { yading@10: video_display(is); yading@10: is->last_vis_time = time; yading@10: } yading@10: *remaining_time = FFMIN(*remaining_time, is->last_vis_time + rdftspeed - time); yading@10: } yading@10: yading@10: if (is->video_st) { yading@10: int redisplay = 0; yading@10: if (is->force_refresh) yading@10: redisplay = pictq_prev_picture(is); yading@10: retry: yading@10: if (is->pictq_size == 0) { yading@10: SDL_LockMutex(is->pictq_mutex); yading@10: if (is->frame_last_dropped_pts != AV_NOPTS_VALUE && is->frame_last_dropped_pts > is->frame_last_pts) { yading@10: update_video_pts(is, is->frame_last_dropped_pts, is->frame_last_dropped_pos, is->frame_last_dropped_serial); yading@10: is->frame_last_dropped_pts = AV_NOPTS_VALUE; yading@10: } yading@10: SDL_UnlockMutex(is->pictq_mutex); yading@10: // nothing to do, no picture to display in the queue yading@10: } else { yading@10: double last_duration, duration, delay; yading@10: /* dequeue the picture */ yading@10: vp = &is->pictq[is->pictq_rindex]; yading@10: yading@10: if (vp->serial != is->videoq.serial) { yading@10: pictq_next_picture(is); yading@10: redisplay = 0; yading@10: goto retry; yading@10: } yading@10: yading@10: if (is->paused) yading@10: goto display; yading@10: yading@10: /* compute nominal last_duration */ yading@10: last_duration = vp->pts - is->frame_last_pts; yading@10: if (!isnan(last_duration) && last_duration > 0 && last_duration < is->max_frame_duration) { yading@10: /* if duration of the last frame was sane, update last_duration in video state */ yading@10: is->frame_last_duration = last_duration; yading@10: } yading@10: delay = compute_target_delay(is->frame_last_duration, is); yading@10: yading@10: time= av_gettime()/1000000.0; yading@10: if (time < is->frame_timer + delay) { yading@10: *remaining_time = FFMIN(is->frame_timer + delay - time, *remaining_time); yading@10: return; yading@10: } yading@10: yading@10: if (delay > 0) yading@10: is->frame_timer += delay * FFMAX(1, floor((time-is->frame_timer) / delay)); yading@10: yading@10: SDL_LockMutex(is->pictq_mutex); yading@10: if (!isnan(vp->pts)) yading@10: update_video_pts(is, vp->pts, vp->pos, vp->serial); yading@10: SDL_UnlockMutex(is->pictq_mutex); yading@10: yading@10: if (is->pictq_size > 1) { yading@10: VideoPicture *nextvp = &is->pictq[(is->pictq_rindex + 1) % VIDEO_PICTURE_QUEUE_SIZE]; yading@10: duration = nextvp->pts - vp->pts; yading@10: if(!is->step && (redisplay || framedrop>0 || (framedrop && get_master_sync_type(is) != AV_SYNC_VIDEO_MASTER)) && time > is->frame_timer + duration){ yading@10: if (!redisplay) yading@10: is->frame_drops_late++; yading@10: pictq_next_picture(is); yading@10: redisplay = 0; yading@10: goto retry; yading@10: } yading@10: } yading@10: yading@10: if (is->subtitle_st) { yading@10: if (is->subtitle_stream_changed) { yading@10: SDL_LockMutex(is->subpq_mutex); yading@10: yading@10: while (is->subpq_size) { yading@10: free_subpicture(&is->subpq[is->subpq_rindex]); yading@10: yading@10: /* update queue size and signal for next picture */ yading@10: if (++is->subpq_rindex == SUBPICTURE_QUEUE_SIZE) yading@10: is->subpq_rindex = 0; yading@10: yading@10: is->subpq_size--; yading@10: } yading@10: is->subtitle_stream_changed = 0; yading@10: yading@10: SDL_CondSignal(is->subpq_cond); yading@10: SDL_UnlockMutex(is->subpq_mutex); yading@10: } else { yading@10: if (is->subpq_size > 0) { yading@10: sp = &is->subpq[is->subpq_rindex]; yading@10: yading@10: if (is->subpq_size > 1) yading@10: sp2 = &is->subpq[(is->subpq_rindex + 1) % SUBPICTURE_QUEUE_SIZE]; yading@10: else yading@10: sp2 = NULL; yading@10: yading@10: if ((is->video_current_pts > (sp->pts + ((float) sp->sub.end_display_time / 1000))) yading@10: || (sp2 && is->video_current_pts > (sp2->pts + ((float) sp2->sub.start_display_time / 1000)))) yading@10: { yading@10: free_subpicture(sp); yading@10: yading@10: /* update queue size and signal for next picture */ yading@10: if (++is->subpq_rindex == SUBPICTURE_QUEUE_SIZE) yading@10: is->subpq_rindex = 0; yading@10: yading@10: SDL_LockMutex(is->subpq_mutex); yading@10: is->subpq_size--; yading@10: SDL_CondSignal(is->subpq_cond); yading@10: SDL_UnlockMutex(is->subpq_mutex); yading@10: } yading@10: } yading@10: } yading@10: } yading@10: yading@10: display: yading@10: /* display picture */ yading@10: if (!display_disable && is->show_mode == SHOW_MODE_VIDEO) yading@10: video_display(is); yading@10: yading@10: pictq_next_picture(is); yading@10: yading@10: if (is->step && !is->paused) yading@10: stream_toggle_pause(is); yading@10: } yading@10: } yading@10: is->force_refresh = 0; yading@10: if (show_status) { yading@10: static int64_t last_time; yading@10: int64_t cur_time; yading@10: int aqsize, vqsize, sqsize; yading@10: double av_diff; yading@10: yading@10: cur_time = av_gettime(); yading@10: if (!last_time || (cur_time - last_time) >= 30000) { yading@10: aqsize = 0; yading@10: vqsize = 0; yading@10: sqsize = 0; yading@10: if (is->audio_st) yading@10: aqsize = is->audioq.size; yading@10: if (is->video_st) yading@10: vqsize = is->videoq.size; yading@10: if (is->subtitle_st) yading@10: sqsize = is->subtitleq.size; yading@10: av_diff = 0; yading@10: if (is->audio_st && is->video_st) yading@10: av_diff = get_audio_clock(is) - get_video_clock(is); yading@10: printf("%7.2f A-V:%7.3f fd=%4d aq=%5dKB vq=%5dKB sq=%5dB f=%"PRId64"/%"PRId64" \r", yading@10: get_master_clock(is), yading@10: av_diff, yading@10: is->frame_drops_early + is->frame_drops_late, yading@10: aqsize / 1024, yading@10: vqsize / 1024, yading@10: sqsize, yading@10: is->video_st ? is->video_st->codec->pts_correction_num_faulty_dts : 0, yading@10: is->video_st ? is->video_st->codec->pts_correction_num_faulty_pts : 0); yading@10: fflush(stdout); yading@10: last_time = cur_time; yading@10: } yading@10: } yading@10: } yading@10: yading@10: /* allocate a picture (needs to do that in main thread to avoid yading@10: potential locking problems */ yading@10: static void alloc_picture(VideoState *is) yading@10: { yading@10: VideoPicture *vp; yading@10: yading@10: vp = &is->pictq[is->pictq_windex]; yading@10: yading@10: if (vp->bmp) yading@10: SDL_FreeYUVOverlay(vp->bmp); yading@10: yading@10: video_open(is, 0, vp); yading@10: yading@10: vp->bmp = SDL_CreateYUVOverlay(vp->width, vp->height, yading@10: SDL_YV12_OVERLAY, yading@10: screen); yading@10: if (!vp->bmp || vp->bmp->pitches[0] < vp->width) { yading@10: /* SDL allocates a buffer smaller than requested if the video yading@10: * overlay hardware is unable to support the requested size. */ yading@10: fprintf(stderr, "Error: the video system does not support an image\n" yading@10: "size of %dx%d pixels. Try using -lowres or -vf \"scale=w:h\"\n" yading@10: "to reduce the image size.\n", vp->width, vp->height ); yading@10: do_exit(is); yading@10: } yading@10: yading@10: SDL_LockMutex(is->pictq_mutex); yading@10: vp->allocated = 1; yading@10: SDL_CondSignal(is->pictq_cond); yading@10: SDL_UnlockMutex(is->pictq_mutex); yading@10: } yading@10: yading@10: static void duplicate_right_border_pixels(SDL_Overlay *bmp) { yading@10: int i, width, height; yading@10: Uint8 *p, *maxp; yading@10: for (i = 0; i < 3; i++) { yading@10: width = bmp->w; yading@10: height = bmp->h; yading@10: if (i > 0) { yading@10: width >>= 1; yading@10: height >>= 1; yading@10: } yading@10: if (bmp->pitches[i] > width) { yading@10: maxp = bmp->pixels[i] + bmp->pitches[i] * height - 1; yading@10: for (p = bmp->pixels[i] + width - 1; p < maxp; p += bmp->pitches[i]) yading@10: *(p+1) = *p; yading@10: } yading@10: } yading@10: } yading@10: yading@10: static int queue_picture(VideoState *is, AVFrame *src_frame, double pts, int64_t pos, int serial) yading@10: { yading@10: VideoPicture *vp; yading@10: yading@10: #if defined(DEBUG_SYNC) && 0 yading@10: printf("frame_type=%c pts=%0.3f\n", yading@10: av_get_picture_type_char(src_frame->pict_type), pts); yading@10: #endif yading@10: yading@10: /* wait until we have space to put a new picture */ yading@10: SDL_LockMutex(is->pictq_mutex); yading@10: yading@10: /* keep the last already displayed picture in the queue */ yading@10: while (is->pictq_size >= VIDEO_PICTURE_QUEUE_SIZE - 2 && yading@10: !is->videoq.abort_request) { yading@10: SDL_CondWait(is->pictq_cond, is->pictq_mutex); yading@10: } yading@10: SDL_UnlockMutex(is->pictq_mutex); yading@10: yading@10: if (is->videoq.abort_request) yading@10: return -1; yading@10: yading@10: vp = &is->pictq[is->pictq_windex]; yading@10: yading@10: vp->sar = src_frame->sample_aspect_ratio; yading@10: yading@10: /* alloc or resize hardware picture buffer */ yading@10: if (!vp->bmp || vp->reallocate || !vp->allocated || yading@10: vp->width != src_frame->width || yading@10: vp->height != src_frame->height) { yading@10: SDL_Event event; yading@10: yading@10: vp->allocated = 0; yading@10: vp->reallocate = 0; yading@10: vp->width = src_frame->width; yading@10: vp->height = src_frame->height; yading@10: yading@10: /* the allocation must be done in the main thread to avoid yading@10: locking problems. */ yading@10: event.type = FF_ALLOC_EVENT; yading@10: event.user.data1 = is; yading@10: SDL_PushEvent(&event); yading@10: yading@10: /* wait until the picture is allocated */ yading@10: SDL_LockMutex(is->pictq_mutex); yading@10: while (!vp->allocated && !is->videoq.abort_request) { yading@10: SDL_CondWait(is->pictq_cond, is->pictq_mutex); yading@10: } yading@10: /* if the queue is aborted, we have to pop the pending ALLOC event or wait for the allocation to complete */ yading@10: if (is->videoq.abort_request && SDL_PeepEvents(&event, 1, SDL_GETEVENT, SDL_EVENTMASK(FF_ALLOC_EVENT)) != 1) { yading@10: while (!vp->allocated) { yading@10: SDL_CondWait(is->pictq_cond, is->pictq_mutex); yading@10: } yading@10: } yading@10: SDL_UnlockMutex(is->pictq_mutex); yading@10: yading@10: if (is->videoq.abort_request) yading@10: return -1; yading@10: } yading@10: yading@10: /* if the frame is not skipped, then display it */ yading@10: if (vp->bmp) { yading@10: AVPicture pict = { { 0 } }; yading@10: yading@10: /* get a pointer on the bitmap */ yading@10: SDL_LockYUVOverlay (vp->bmp); yading@10: yading@10: pict.data[0] = vp->bmp->pixels[0]; yading@10: pict.data[1] = vp->bmp->pixels[2]; yading@10: pict.data[2] = vp->bmp->pixels[1]; yading@10: yading@10: pict.linesize[0] = vp->bmp->pitches[0]; yading@10: pict.linesize[1] = vp->bmp->pitches[2]; yading@10: pict.linesize[2] = vp->bmp->pitches[1]; yading@10: yading@10: #if CONFIG_AVFILTER yading@10: // FIXME use direct rendering yading@10: av_picture_copy(&pict, (AVPicture *)src_frame, yading@10: src_frame->format, vp->width, vp->height); yading@10: #else yading@10: av_opt_get_int(sws_opts, "sws_flags", 0, &sws_flags); yading@10: is->img_convert_ctx = sws_getCachedContext(is->img_convert_ctx, yading@10: vp->width, vp->height, src_frame->format, vp->width, vp->height, yading@10: AV_PIX_FMT_YUV420P, sws_flags, NULL, NULL, NULL); yading@10: if (is->img_convert_ctx == NULL) { yading@10: fprintf(stderr, "Cannot initialize the conversion context\n"); yading@10: exit(1); yading@10: } yading@10: sws_scale(is->img_convert_ctx, src_frame->data, src_frame->linesize, yading@10: 0, vp->height, pict.data, pict.linesize); yading@10: #endif yading@10: /* workaround SDL PITCH_WORKAROUND */ yading@10: duplicate_right_border_pixels(vp->bmp); yading@10: /* update the bitmap content */ yading@10: SDL_UnlockYUVOverlay(vp->bmp); yading@10: yading@10: vp->pts = pts; yading@10: vp->pos = pos; yading@10: vp->serial = serial; yading@10: yading@10: /* now we can update the picture count */ yading@10: if (++is->pictq_windex == VIDEO_PICTURE_QUEUE_SIZE) yading@10: is->pictq_windex = 0; yading@10: SDL_LockMutex(is->pictq_mutex); yading@10: is->pictq_size++; yading@10: SDL_UnlockMutex(is->pictq_mutex); yading@10: } yading@10: return 0; yading@10: } yading@10: yading@10: static int get_video_frame(VideoState *is, AVFrame *frame, AVPacket *pkt, int *serial) yading@10: { yading@10: int got_picture; yading@10: yading@10: if (packet_queue_get(&is->videoq, pkt, 1, serial) < 0) yading@10: return -1; yading@10: yading@10: if (pkt->data == flush_pkt.data) { yading@10: avcodec_flush_buffers(is->video_st->codec); yading@10: yading@10: SDL_LockMutex(is->pictq_mutex); yading@10: // Make sure there are no long delay timers (ideally we should just flush the queue but that's harder) yading@10: while (is->pictq_size && !is->videoq.abort_request) { yading@10: SDL_CondWait(is->pictq_cond, is->pictq_mutex); yading@10: } yading@10: is->video_current_pos = -1; yading@10: is->frame_last_pts = AV_NOPTS_VALUE; yading@10: is->frame_last_duration = 0; yading@10: is->frame_timer = (double)av_gettime() / 1000000.0; yading@10: is->frame_last_dropped_pts = AV_NOPTS_VALUE; yading@10: SDL_UnlockMutex(is->pictq_mutex); yading@10: return 0; yading@10: } yading@10: yading@10: if(avcodec_decode_video2(is->video_st->codec, frame, &got_picture, pkt) < 0) yading@10: return 0; yading@10: yading@10: if (got_picture) { yading@10: int ret = 1; yading@10: double dpts = NAN; yading@10: yading@10: if (decoder_reorder_pts == -1) { yading@10: frame->pts = av_frame_get_best_effort_timestamp(frame); yading@10: } else if (decoder_reorder_pts) { yading@10: frame->pts = frame->pkt_pts; yading@10: } else { yading@10: frame->pts = frame->pkt_dts; yading@10: } yading@10: yading@10: if (frame->pts != AV_NOPTS_VALUE) yading@10: dpts = av_q2d(is->video_st->time_base) * frame->pts; yading@10: yading@10: frame->sample_aspect_ratio = av_guess_sample_aspect_ratio(is->ic, is->video_st, frame); yading@10: yading@10: if (framedrop>0 || (framedrop && get_master_sync_type(is) != AV_SYNC_VIDEO_MASTER)) { yading@10: SDL_LockMutex(is->pictq_mutex); yading@10: if (is->frame_last_pts != AV_NOPTS_VALUE && frame->pts != AV_NOPTS_VALUE) { yading@10: double clockdiff = get_video_clock(is) - get_master_clock(is); yading@10: double ptsdiff = dpts - is->frame_last_pts; yading@10: if (!isnan(clockdiff) && fabs(clockdiff) < AV_NOSYNC_THRESHOLD && yading@10: !isnan(ptsdiff) && ptsdiff > 0 && ptsdiff < AV_NOSYNC_THRESHOLD && yading@10: clockdiff + ptsdiff - is->frame_last_filter_delay < 0 && yading@10: is->videoq.nb_packets) { yading@10: is->frame_last_dropped_pos = pkt->pos; yading@10: is->frame_last_dropped_pts = dpts; yading@10: is->frame_last_dropped_serial = *serial; yading@10: is->frame_drops_early++; yading@10: av_frame_unref(frame); yading@10: ret = 0; yading@10: } yading@10: } yading@10: SDL_UnlockMutex(is->pictq_mutex); yading@10: } yading@10: yading@10: return ret; yading@10: } yading@10: return 0; yading@10: } yading@10: yading@10: #if CONFIG_AVFILTER yading@10: static int configure_filtergraph(AVFilterGraph *graph, const char *filtergraph, yading@10: AVFilterContext *source_ctx, AVFilterContext *sink_ctx) yading@10: { yading@10: int ret; yading@10: AVFilterInOut *outputs = NULL, *inputs = NULL; yading@10: yading@10: if (filtergraph) { yading@10: outputs = avfilter_inout_alloc(); yading@10: inputs = avfilter_inout_alloc(); yading@10: if (!outputs || !inputs) { yading@10: ret = AVERROR(ENOMEM); yading@10: goto fail; yading@10: } yading@10: yading@10: outputs->name = av_strdup("in"); yading@10: outputs->filter_ctx = source_ctx; yading@10: outputs->pad_idx = 0; yading@10: outputs->next = NULL; yading@10: yading@10: inputs->name = av_strdup("out"); yading@10: inputs->filter_ctx = sink_ctx; yading@10: inputs->pad_idx = 0; yading@10: inputs->next = NULL; yading@10: yading@10: if ((ret = avfilter_graph_parse(graph, filtergraph, &inputs, &outputs, NULL)) < 0) yading@10: goto fail; yading@10: } else { yading@10: if ((ret = avfilter_link(source_ctx, 0, sink_ctx, 0)) < 0) yading@10: goto fail; yading@10: } yading@10: yading@10: ret = avfilter_graph_config(graph, NULL); yading@10: fail: yading@10: avfilter_inout_free(&outputs); yading@10: avfilter_inout_free(&inputs); yading@10: return ret; yading@10: } yading@10: yading@10: static int configure_video_filters(AVFilterGraph *graph, VideoState *is, const char *vfilters, AVFrame *frame) yading@10: { yading@10: static const enum AVPixelFormat pix_fmts[] = { AV_PIX_FMT_YUV420P, AV_PIX_FMT_NONE }; yading@10: char sws_flags_str[128]; yading@10: char buffersrc_args[256]; yading@10: int ret; yading@10: AVFilterContext *filt_src = NULL, *filt_out = NULL, *filt_crop; yading@10: AVCodecContext *codec = is->video_st->codec; yading@10: AVRational fr = av_guess_frame_rate(is->ic, is->video_st, NULL); yading@10: yading@10: av_opt_get_int(sws_opts, "sws_flags", 0, &sws_flags); yading@10: snprintf(sws_flags_str, sizeof(sws_flags_str), "flags=%"PRId64, sws_flags); yading@10: graph->scale_sws_opts = av_strdup(sws_flags_str); yading@10: yading@10: snprintf(buffersrc_args, sizeof(buffersrc_args), yading@10: "video_size=%dx%d:pix_fmt=%d:time_base=%d/%d:pixel_aspect=%d/%d", yading@10: frame->width, frame->height, frame->format, yading@10: is->video_st->time_base.num, is->video_st->time_base.den, yading@10: codec->sample_aspect_ratio.num, FFMAX(codec->sample_aspect_ratio.den, 1)); yading@10: if (fr.num && fr.den) yading@10: av_strlcatf(buffersrc_args, sizeof(buffersrc_args), ":frame_rate=%d/%d", fr.num, fr.den); yading@10: yading@10: if ((ret = avfilter_graph_create_filter(&filt_src, yading@10: avfilter_get_by_name("buffer"), yading@10: "ffplay_buffer", buffersrc_args, NULL, yading@10: graph)) < 0) yading@10: goto fail; yading@10: yading@10: ret = avfilter_graph_create_filter(&filt_out, yading@10: avfilter_get_by_name("buffersink"), yading@10: "ffplay_buffersink", NULL, NULL, graph); yading@10: if (ret < 0) yading@10: goto fail; yading@10: yading@10: if ((ret = av_opt_set_int_list(filt_out, "pix_fmts", pix_fmts, AV_PIX_FMT_NONE, AV_OPT_SEARCH_CHILDREN)) < 0) yading@10: goto fail; yading@10: yading@10: /* SDL YUV code is not handling odd width/height for some driver yading@10: * combinations, therefore we crop the picture to an even width/height. */ yading@10: if ((ret = avfilter_graph_create_filter(&filt_crop, yading@10: avfilter_get_by_name("crop"), yading@10: "ffplay_crop", "floor(in_w/2)*2:floor(in_h/2)*2", NULL, graph)) < 0) yading@10: goto fail; yading@10: if ((ret = avfilter_link(filt_crop, 0, filt_out, 0)) < 0) yading@10: goto fail; yading@10: yading@10: if ((ret = configure_filtergraph(graph, vfilters, filt_src, filt_crop)) < 0) yading@10: goto fail; yading@10: yading@10: is->in_video_filter = filt_src; yading@10: is->out_video_filter = filt_out; yading@10: yading@10: fail: yading@10: return ret; yading@10: } yading@10: yading@10: static int configure_audio_filters(VideoState *is, const char *afilters, int force_output_format) yading@10: { yading@10: static const enum AVSampleFormat sample_fmts[] = { AV_SAMPLE_FMT_S16, AV_SAMPLE_FMT_NONE }; yading@10: int sample_rates[2] = { 0, -1 }; yading@10: int64_t channel_layouts[2] = { 0, -1 }; yading@10: int channels[2] = { 0, -1 }; yading@10: AVFilterContext *filt_asrc = NULL, *filt_asink = NULL; yading@10: char asrc_args[256]; yading@10: int ret; yading@10: yading@10: avfilter_graph_free(&is->agraph); yading@10: if (!(is->agraph = avfilter_graph_alloc())) yading@10: return AVERROR(ENOMEM); yading@10: yading@10: ret = snprintf(asrc_args, sizeof(asrc_args), yading@10: "sample_rate=%d:sample_fmt=%s:channels=%d:time_base=%d/%d", yading@10: is->audio_filter_src.freq, av_get_sample_fmt_name(is->audio_filter_src.fmt), yading@10: is->audio_filter_src.channels, yading@10: 1, is->audio_filter_src.freq); yading@10: if (is->audio_filter_src.channel_layout) yading@10: snprintf(asrc_args + ret, sizeof(asrc_args) - ret, yading@10: ":channel_layout=0x%"PRIx64, is->audio_filter_src.channel_layout); yading@10: yading@10: ret = avfilter_graph_create_filter(&filt_asrc, yading@10: avfilter_get_by_name("abuffer"), "ffplay_abuffer", yading@10: asrc_args, NULL, is->agraph); yading@10: if (ret < 0) yading@10: goto end; yading@10: yading@10: yading@10: ret = avfilter_graph_create_filter(&filt_asink, yading@10: avfilter_get_by_name("abuffersink"), "ffplay_abuffersink", yading@10: NULL, NULL, is->agraph); yading@10: if (ret < 0) yading@10: goto end; yading@10: yading@10: if ((ret = av_opt_set_int_list(filt_asink, "sample_fmts", sample_fmts, AV_SAMPLE_FMT_NONE, AV_OPT_SEARCH_CHILDREN)) < 0) yading@10: goto end; yading@10: if ((ret = av_opt_set_int(filt_asink, "all_channel_counts", 1, AV_OPT_SEARCH_CHILDREN)) < 0) yading@10: goto end; yading@10: yading@10: if (force_output_format) { yading@10: channel_layouts[0] = is->audio_tgt.channel_layout; yading@10: channels [0] = is->audio_tgt.channels; yading@10: sample_rates [0] = is->audio_tgt.freq; yading@10: if ((ret = av_opt_set_int(filt_asink, "all_channel_counts", 0, AV_OPT_SEARCH_CHILDREN)) < 0) yading@10: goto end; yading@10: if ((ret = av_opt_set_int_list(filt_asink, "channel_layouts", channel_layouts, -1, AV_OPT_SEARCH_CHILDREN)) < 0) yading@10: goto end; yading@10: if ((ret = av_opt_set_int_list(filt_asink, "channel_counts" , channels , -1, AV_OPT_SEARCH_CHILDREN)) < 0) yading@10: goto end; yading@10: if ((ret = av_opt_set_int_list(filt_asink, "sample_rates" , sample_rates , -1, AV_OPT_SEARCH_CHILDREN)) < 0) yading@10: goto end; yading@10: } yading@10: yading@10: yading@10: if ((ret = configure_filtergraph(is->agraph, afilters, filt_asrc, filt_asink)) < 0) yading@10: goto end; yading@10: yading@10: is->in_audio_filter = filt_asrc; yading@10: is->out_audio_filter = filt_asink; yading@10: yading@10: end: yading@10: if (ret < 0) yading@10: avfilter_graph_free(&is->agraph); yading@10: return ret; yading@10: } yading@10: #endif /* CONFIG_AVFILTER */ yading@10: yading@10: static int video_thread(void *arg) yading@10: { yading@10: AVPacket pkt = { 0 }; yading@10: VideoState *is = arg; yading@10: AVFrame *frame = av_frame_alloc(); yading@10: double pts; yading@10: int ret; yading@10: int serial = 0; yading@10: yading@10: #if CONFIG_AVFILTER yading@10: AVFilterGraph *graph = avfilter_graph_alloc(); yading@10: AVFilterContext *filt_out = NULL, *filt_in = NULL; yading@10: int last_w = 0; yading@10: int last_h = 0; yading@10: enum AVPixelFormat last_format = -2; yading@10: int last_serial = -1; yading@10: #endif yading@10: yading@10: for (;;) { yading@10: while (is->paused && !is->videoq.abort_request) yading@10: SDL_Delay(10); yading@10: yading@10: avcodec_get_frame_defaults(frame); yading@10: av_free_packet(&pkt); yading@10: yading@10: ret = get_video_frame(is, frame, &pkt, &serial); yading@10: if (ret < 0) yading@10: goto the_end; yading@10: if (!ret) yading@10: continue; yading@10: yading@10: #if CONFIG_AVFILTER yading@10: if ( last_w != frame->width yading@10: || last_h != frame->height yading@10: || last_format != frame->format yading@10: || last_serial != serial) { yading@10: av_log(NULL, AV_LOG_DEBUG, yading@10: "Video frame changed from size:%dx%d format:%s serial:%d to size:%dx%d format:%s serial:%d\n", yading@10: last_w, last_h, yading@10: (const char *)av_x_if_null(av_get_pix_fmt_name(last_format), "none"), last_serial, yading@10: frame->width, frame->height, yading@10: (const char *)av_x_if_null(av_get_pix_fmt_name(frame->format), "none"), serial); yading@10: avfilter_graph_free(&graph); yading@10: graph = avfilter_graph_alloc(); yading@10: if ((ret = configure_video_filters(graph, is, vfilters, frame)) < 0) { yading@10: SDL_Event event; yading@10: event.type = FF_QUIT_EVENT; yading@10: event.user.data1 = is; yading@10: SDL_PushEvent(&event); yading@10: av_free_packet(&pkt); yading@10: goto the_end; yading@10: } yading@10: filt_in = is->in_video_filter; yading@10: filt_out = is->out_video_filter; yading@10: last_w = frame->width; yading@10: last_h = frame->height; yading@10: last_format = frame->format; yading@10: last_serial = serial; yading@10: } yading@10: yading@10: ret = av_buffersrc_add_frame(filt_in, frame); yading@10: if (ret < 0) yading@10: goto the_end; yading@10: av_frame_unref(frame); yading@10: avcodec_get_frame_defaults(frame); yading@10: av_free_packet(&pkt); yading@10: yading@10: while (ret >= 0) { yading@10: is->frame_last_returned_time = av_gettime() / 1000000.0; yading@10: yading@10: ret = av_buffersink_get_frame_flags(filt_out, frame, 0); yading@10: if (ret < 0) { yading@10: ret = 0; yading@10: break; yading@10: } yading@10: yading@10: is->frame_last_filter_delay = av_gettime() / 1000000.0 - is->frame_last_returned_time; yading@10: if (fabs(is->frame_last_filter_delay) > AV_NOSYNC_THRESHOLD / 10.0) yading@10: is->frame_last_filter_delay = 0; yading@10: yading@10: pts = (frame->pts == AV_NOPTS_VALUE) ? NAN : frame->pts * av_q2d(filt_out->inputs[0]->time_base); yading@10: ret = queue_picture(is, frame, pts, av_frame_get_pkt_pos(frame), serial); yading@10: av_frame_unref(frame); yading@10: } yading@10: #else yading@10: pts = (frame->pts == AV_NOPTS_VALUE) ? NAN : frame->pts * av_q2d(is->video_st->time_base); yading@10: ret = queue_picture(is, frame, pts, pkt.pos, serial); yading@10: av_frame_unref(frame); yading@10: #endif yading@10: yading@10: if (ret < 0) yading@10: goto the_end; yading@10: } yading@10: the_end: yading@10: avcodec_flush_buffers(is->video_st->codec); yading@10: #if CONFIG_AVFILTER yading@10: avfilter_graph_free(&graph); yading@10: #endif yading@10: av_free_packet(&pkt); yading@10: av_frame_free(&frame); yading@10: return 0; yading@10: } yading@10: yading@10: static int subtitle_thread(void *arg) yading@10: { yading@10: VideoState *is = arg; yading@10: SubPicture *sp; yading@10: AVPacket pkt1, *pkt = &pkt1; yading@10: int got_subtitle; yading@10: double pts; yading@10: int i, j; yading@10: int r, g, b, y, u, v, a; yading@10: yading@10: for (;;) { yading@10: while (is->paused && !is->subtitleq.abort_request) { yading@10: SDL_Delay(10); yading@10: } yading@10: if (packet_queue_get(&is->subtitleq, pkt, 1, NULL) < 0) yading@10: break; yading@10: yading@10: if (pkt->data == flush_pkt.data) { yading@10: avcodec_flush_buffers(is->subtitle_st->codec); yading@10: continue; yading@10: } yading@10: SDL_LockMutex(is->subpq_mutex); yading@10: while (is->subpq_size >= SUBPICTURE_QUEUE_SIZE && yading@10: !is->subtitleq.abort_request) { yading@10: SDL_CondWait(is->subpq_cond, is->subpq_mutex); yading@10: } yading@10: SDL_UnlockMutex(is->subpq_mutex); yading@10: yading@10: if (is->subtitleq.abort_request) yading@10: return 0; yading@10: yading@10: sp = &is->subpq[is->subpq_windex]; yading@10: yading@10: /* NOTE: ipts is the PTS of the _first_ picture beginning in yading@10: this packet, if any */ yading@10: pts = 0; yading@10: if (pkt->pts != AV_NOPTS_VALUE) yading@10: pts = av_q2d(is->subtitle_st->time_base) * pkt->pts; yading@10: yading@10: avcodec_decode_subtitle2(is->subtitle_st->codec, &sp->sub, yading@10: &got_subtitle, pkt); yading@10: if (got_subtitle && sp->sub.format == 0) { yading@10: if (sp->sub.pts != AV_NOPTS_VALUE) yading@10: pts = sp->sub.pts / (double)AV_TIME_BASE; yading@10: sp->pts = pts; yading@10: yading@10: for (i = 0; i < sp->sub.num_rects; i++) yading@10: { yading@10: for (j = 0; j < sp->sub.rects[i]->nb_colors; j++) yading@10: { yading@10: RGBA_IN(r, g, b, a, (uint32_t*)sp->sub.rects[i]->pict.data[1] + j); yading@10: y = RGB_TO_Y_CCIR(r, g, b); yading@10: u = RGB_TO_U_CCIR(r, g, b, 0); yading@10: v = RGB_TO_V_CCIR(r, g, b, 0); yading@10: YUVA_OUT((uint32_t*)sp->sub.rects[i]->pict.data[1] + j, y, u, v, a); yading@10: } yading@10: } yading@10: yading@10: /* now we can update the picture count */ yading@10: if (++is->subpq_windex == SUBPICTURE_QUEUE_SIZE) yading@10: is->subpq_windex = 0; yading@10: SDL_LockMutex(is->subpq_mutex); yading@10: is->subpq_size++; yading@10: SDL_UnlockMutex(is->subpq_mutex); yading@10: } yading@10: av_free_packet(pkt); yading@10: } yading@10: return 0; yading@10: } yading@10: yading@10: /* copy samples for viewing in editor window */ yading@10: static void update_sample_display(VideoState *is, short *samples, int samples_size) yading@10: { yading@10: int size, len; yading@10: yading@10: size = samples_size / sizeof(short); yading@10: while (size > 0) { yading@10: len = SAMPLE_ARRAY_SIZE - is->sample_array_index; yading@10: if (len > size) yading@10: len = size; yading@10: memcpy(is->sample_array + is->sample_array_index, samples, len * sizeof(short)); yading@10: samples += len; yading@10: is->sample_array_index += len; yading@10: if (is->sample_array_index >= SAMPLE_ARRAY_SIZE) yading@10: is->sample_array_index = 0; yading@10: size -= len; yading@10: } yading@10: } yading@10: yading@10: /* return the wanted number of samples to get better sync if sync_type is video yading@10: * or external master clock */ yading@10: static int synchronize_audio(VideoState *is, int nb_samples) yading@10: { yading@10: int wanted_nb_samples = nb_samples; yading@10: yading@10: /* if not master, then we try to remove or add samples to correct the clock */ yading@10: if (get_master_sync_type(is) != AV_SYNC_AUDIO_MASTER) { yading@10: double diff, avg_diff; yading@10: int min_nb_samples, max_nb_samples; yading@10: yading@10: diff = get_audio_clock(is) - get_master_clock(is); yading@10: yading@10: if (!isnan(diff) && fabs(diff) < AV_NOSYNC_THRESHOLD) { yading@10: is->audio_diff_cum = diff + is->audio_diff_avg_coef * is->audio_diff_cum; yading@10: if (is->audio_diff_avg_count < AUDIO_DIFF_AVG_NB) { yading@10: /* not enough measures to have a correct estimate */ yading@10: is->audio_diff_avg_count++; yading@10: } else { yading@10: /* estimate the A-V difference */ yading@10: avg_diff = is->audio_diff_cum * (1.0 - is->audio_diff_avg_coef); yading@10: yading@10: if (fabs(avg_diff) >= is->audio_diff_threshold) { yading@10: wanted_nb_samples = nb_samples + (int)(diff * is->audio_src.freq); yading@10: min_nb_samples = ((nb_samples * (100 - SAMPLE_CORRECTION_PERCENT_MAX) / 100)); yading@10: max_nb_samples = ((nb_samples * (100 + SAMPLE_CORRECTION_PERCENT_MAX) / 100)); yading@10: wanted_nb_samples = FFMIN(FFMAX(wanted_nb_samples, min_nb_samples), max_nb_samples); yading@10: } yading@10: av_dlog(NULL, "diff=%f adiff=%f sample_diff=%d apts=%0.3f %f\n", yading@10: diff, avg_diff, wanted_nb_samples - nb_samples, yading@10: is->audio_clock, is->audio_diff_threshold); yading@10: } yading@10: } else { yading@10: /* too big difference : may be initial PTS errors, so yading@10: reset A-V filter */ yading@10: is->audio_diff_avg_count = 0; yading@10: is->audio_diff_cum = 0; yading@10: } yading@10: } yading@10: yading@10: return wanted_nb_samples; yading@10: } yading@10: yading@10: /** yading@10: * Decode one audio frame and return its uncompressed size. yading@10: * yading@10: * The processed audio frame is decoded, converted if required, and yading@10: * stored in is->audio_buf, with size in bytes given by the return yading@10: * value. yading@10: */ yading@10: static int audio_decode_frame(VideoState *is) yading@10: { yading@10: AVPacket *pkt_temp = &is->audio_pkt_temp; yading@10: AVPacket *pkt = &is->audio_pkt; yading@10: AVCodecContext *dec = is->audio_st->codec; yading@10: int len1, data_size, resampled_data_size; yading@10: int64_t dec_channel_layout; yading@10: int got_frame; yading@10: av_unused double audio_clock0; yading@10: int new_packet = 0; yading@10: int flush_complete = 0; yading@10: int wanted_nb_samples; yading@10: AVRational tb; yading@10: int ret; yading@10: int reconfigure; yading@10: yading@10: for (;;) { yading@10: /* NOTE: the audio packet can contain several frames */ yading@10: while (pkt_temp->size > 0 || (!pkt_temp->data && new_packet) || is->audio_buf_frames_pending) { yading@10: if (!is->frame) { yading@10: if (!(is->frame = avcodec_alloc_frame())) yading@10: return AVERROR(ENOMEM); yading@10: } else { yading@10: av_frame_unref(is->frame); yading@10: avcodec_get_frame_defaults(is->frame); yading@10: } yading@10: yading@10: if (is->audioq.serial != is->audio_pkt_temp_serial) yading@10: break; yading@10: yading@10: if (is->paused) yading@10: return -1; yading@10: yading@10: if (!is->audio_buf_frames_pending) { yading@10: if (flush_complete) yading@10: break; yading@10: new_packet = 0; yading@10: len1 = avcodec_decode_audio4(dec, is->frame, &got_frame, pkt_temp); yading@10: if (len1 < 0) { yading@10: /* if error, we skip the frame */ yading@10: pkt_temp->size = 0; yading@10: break; yading@10: } yading@10: yading@10: pkt_temp->data += len1; yading@10: pkt_temp->size -= len1; yading@10: yading@10: if (!got_frame) { yading@10: /* stop sending empty packets if the decoder is finished */ yading@10: if (!pkt_temp->data && dec->codec->capabilities & CODEC_CAP_DELAY) yading@10: flush_complete = 1; yading@10: continue; yading@10: } yading@10: yading@10: tb = (AVRational){1, is->frame->sample_rate}; yading@10: if (is->frame->pts != AV_NOPTS_VALUE) yading@10: is->frame->pts = av_rescale_q(is->frame->pts, dec->time_base, tb); yading@10: if (is->frame->pts == AV_NOPTS_VALUE && pkt_temp->pts != AV_NOPTS_VALUE) yading@10: is->frame->pts = av_rescale_q(pkt_temp->pts, is->audio_st->time_base, tb); yading@10: if (pkt_temp->pts != AV_NOPTS_VALUE) yading@10: pkt_temp->pts += (double) is->frame->nb_samples / is->frame->sample_rate / av_q2d(is->audio_st->time_base); yading@10: yading@10: #if CONFIG_AVFILTER yading@10: dec_channel_layout = get_valid_channel_layout(is->frame->channel_layout, av_frame_get_channels(is->frame)); yading@10: yading@10: reconfigure = yading@10: cmp_audio_fmts(is->audio_filter_src.fmt, is->audio_filter_src.channels, yading@10: is->frame->format, av_frame_get_channels(is->frame)) || yading@10: is->audio_filter_src.channel_layout != dec_channel_layout || yading@10: is->audio_filter_src.freq != is->frame->sample_rate || yading@10: is->audio_pkt_temp_serial != is->audio_last_serial; yading@10: yading@10: if (reconfigure) { yading@10: char buf1[1024], buf2[1024]; yading@10: av_get_channel_layout_string(buf1, sizeof(buf1), -1, is->audio_filter_src.channel_layout); yading@10: av_get_channel_layout_string(buf2, sizeof(buf2), -1, dec_channel_layout); yading@10: av_log(NULL, AV_LOG_DEBUG, yading@10: "Audio frame changed from rate:%d ch:%d fmt:%s layout:%s serial:%d to rate:%d ch:%d fmt:%s layout:%s serial:%d\n", yading@10: is->audio_filter_src.freq, is->audio_filter_src.channels, av_get_sample_fmt_name(is->audio_filter_src.fmt), buf1, is->audio_last_serial, yading@10: is->frame->sample_rate, av_frame_get_channels(is->frame), av_get_sample_fmt_name(is->frame->format), buf2, is->audio_pkt_temp_serial); yading@10: yading@10: is->audio_filter_src.fmt = is->frame->format; yading@10: is->audio_filter_src.channels = av_frame_get_channels(is->frame); yading@10: is->audio_filter_src.channel_layout = dec_channel_layout; yading@10: is->audio_filter_src.freq = is->frame->sample_rate; yading@10: is->audio_last_serial = is->audio_pkt_temp_serial; yading@10: yading@10: if ((ret = configure_audio_filters(is, afilters, 1)) < 0) yading@10: return ret; yading@10: } yading@10: yading@10: if ((ret = av_buffersrc_add_frame(is->in_audio_filter, is->frame)) < 0) yading@10: return ret; yading@10: av_frame_unref(is->frame); yading@10: #endif yading@10: } yading@10: #if CONFIG_AVFILTER yading@10: if ((ret = av_buffersink_get_frame_flags(is->out_audio_filter, is->frame, 0)) < 0) { yading@10: if (ret == AVERROR(EAGAIN)) { yading@10: is->audio_buf_frames_pending = 0; yading@10: continue; yading@10: } yading@10: return ret; yading@10: } yading@10: is->audio_buf_frames_pending = 1; yading@10: tb = is->out_audio_filter->inputs[0]->time_base; yading@10: #endif yading@10: yading@10: data_size = av_samples_get_buffer_size(NULL, av_frame_get_channels(is->frame), yading@10: is->frame->nb_samples, yading@10: is->frame->format, 1); yading@10: yading@10: dec_channel_layout = yading@10: (is->frame->channel_layout && av_frame_get_channels(is->frame) == av_get_channel_layout_nb_channels(is->frame->channel_layout)) ? yading@10: is->frame->channel_layout : av_get_default_channel_layout(av_frame_get_channels(is->frame)); yading@10: wanted_nb_samples = synchronize_audio(is, is->frame->nb_samples); yading@10: yading@10: if (is->frame->format != is->audio_src.fmt || yading@10: dec_channel_layout != is->audio_src.channel_layout || yading@10: is->frame->sample_rate != is->audio_src.freq || yading@10: (wanted_nb_samples != is->frame->nb_samples && !is->swr_ctx)) { yading@10: swr_free(&is->swr_ctx); yading@10: is->swr_ctx = swr_alloc_set_opts(NULL, yading@10: is->audio_tgt.channel_layout, is->audio_tgt.fmt, is->audio_tgt.freq, yading@10: dec_channel_layout, is->frame->format, is->frame->sample_rate, yading@10: 0, NULL); yading@10: if (!is->swr_ctx || swr_init(is->swr_ctx) < 0) { yading@10: fprintf(stderr, "Cannot create sample rate converter for conversion of %d Hz %s %d channels to %d Hz %s %d channels!\n", yading@10: is->frame->sample_rate, av_get_sample_fmt_name(is->frame->format), av_frame_get_channels(is->frame), yading@10: is->audio_tgt.freq, av_get_sample_fmt_name(is->audio_tgt.fmt), is->audio_tgt.channels); yading@10: break; yading@10: } yading@10: is->audio_src.channel_layout = dec_channel_layout; yading@10: is->audio_src.channels = av_frame_get_channels(is->frame); yading@10: is->audio_src.freq = is->frame->sample_rate; yading@10: is->audio_src.fmt = is->frame->format; yading@10: } yading@10: yading@10: if (is->swr_ctx) { yading@10: const uint8_t **in = (const uint8_t **)is->frame->extended_data; yading@10: uint8_t **out = &is->audio_buf1; yading@10: int out_count = (int64_t)wanted_nb_samples * is->audio_tgt.freq / is->frame->sample_rate + 256; yading@10: int out_size = av_samples_get_buffer_size(NULL, is->audio_tgt.channels, out_count, is->audio_tgt.fmt, 0); yading@10: int len2; yading@10: if (wanted_nb_samples != is->frame->nb_samples) { yading@10: if (swr_set_compensation(is->swr_ctx, (wanted_nb_samples - is->frame->nb_samples) * is->audio_tgt.freq / is->frame->sample_rate, yading@10: wanted_nb_samples * is->audio_tgt.freq / is->frame->sample_rate) < 0) { yading@10: fprintf(stderr, "swr_set_compensation() failed\n"); yading@10: break; yading@10: } yading@10: } yading@10: av_fast_malloc(&is->audio_buf1, &is->audio_buf1_size, out_size); yading@10: if (!is->audio_buf1) yading@10: return AVERROR(ENOMEM); yading@10: len2 = swr_convert(is->swr_ctx, out, out_count, in, is->frame->nb_samples); yading@10: if (len2 < 0) { yading@10: fprintf(stderr, "swr_convert() failed\n"); yading@10: break; yading@10: } yading@10: if (len2 == out_count) { yading@10: fprintf(stderr, "warning: audio buffer is probably too small\n"); yading@10: swr_init(is->swr_ctx); yading@10: } yading@10: is->audio_buf = is->audio_buf1; yading@10: resampled_data_size = len2 * is->audio_tgt.channels * av_get_bytes_per_sample(is->audio_tgt.fmt); yading@10: } else { yading@10: is->audio_buf = is->frame->data[0]; yading@10: resampled_data_size = data_size; yading@10: } yading@10: yading@10: audio_clock0 = is->audio_clock; yading@10: /* update the audio clock with the pts */ yading@10: if (is->frame->pts != AV_NOPTS_VALUE) { yading@10: is->audio_clock = is->frame->pts * av_q2d(tb) + (double) is->frame->nb_samples / is->frame->sample_rate; yading@10: is->audio_clock_serial = is->audio_pkt_temp_serial; yading@10: } yading@10: #ifdef DEBUG yading@10: { yading@10: static double last_clock; yading@10: printf("audio: delay=%0.3f clock=%0.3f clock0=%0.3f\n", yading@10: is->audio_clock - last_clock, yading@10: is->audio_clock, audio_clock0); yading@10: last_clock = is->audio_clock; yading@10: } yading@10: #endif yading@10: return resampled_data_size; yading@10: } yading@10: yading@10: /* free the current packet */ yading@10: if (pkt->data) yading@10: av_free_packet(pkt); yading@10: memset(pkt_temp, 0, sizeof(*pkt_temp)); yading@10: yading@10: if (is->audioq.abort_request) { yading@10: return -1; yading@10: } yading@10: yading@10: if (is->audioq.nb_packets == 0) yading@10: SDL_CondSignal(is->continue_read_thread); yading@10: yading@10: /* read next packet */ yading@10: if ((new_packet = packet_queue_get(&is->audioq, pkt, 1, &is->audio_pkt_temp_serial)) < 0) yading@10: return -1; yading@10: yading@10: if (pkt->data == flush_pkt.data) { yading@10: avcodec_flush_buffers(dec); yading@10: flush_complete = 0; yading@10: is->audio_buf_frames_pending = 0; yading@10: } yading@10: yading@10: *pkt_temp = *pkt; yading@10: } yading@10: } yading@10: yading@10: /* prepare a new audio buffer */ yading@10: static void sdl_audio_callback(void *opaque, Uint8 *stream, int len) yading@10: { yading@10: VideoState *is = opaque; yading@10: int audio_size, len1; yading@10: int bytes_per_sec; yading@10: int frame_size = av_samples_get_buffer_size(NULL, is->audio_tgt.channels, 1, is->audio_tgt.fmt, 1); yading@10: yading@10: audio_callback_time = av_gettime(); yading@10: yading@10: while (len > 0) { yading@10: if (is->audio_buf_index >= is->audio_buf_size) { yading@10: audio_size = audio_decode_frame(is); yading@10: if (audio_size < 0) { yading@10: /* if error, just output silence */ yading@10: is->audio_buf = is->silence_buf; yading@10: is->audio_buf_size = sizeof(is->silence_buf) / frame_size * frame_size; yading@10: } else { yading@10: if (is->show_mode != SHOW_MODE_VIDEO) yading@10: update_sample_display(is, (int16_t *)is->audio_buf, audio_size); yading@10: is->audio_buf_size = audio_size; yading@10: } yading@10: is->audio_buf_index = 0; yading@10: } yading@10: len1 = is->audio_buf_size - is->audio_buf_index; yading@10: if (len1 > len) yading@10: len1 = len; yading@10: memcpy(stream, (uint8_t *)is->audio_buf + is->audio_buf_index, len1); yading@10: len -= len1; yading@10: stream += len1; yading@10: is->audio_buf_index += len1; yading@10: } yading@10: bytes_per_sec = is->audio_tgt.freq * is->audio_tgt.channels * av_get_bytes_per_sample(is->audio_tgt.fmt); yading@10: is->audio_write_buf_size = is->audio_buf_size - is->audio_buf_index; yading@10: /* Let's assume the audio driver that is used by SDL has two periods. */ yading@10: is->audio_current_pts = is->audio_clock - (double)(2 * is->audio_hw_buf_size + is->audio_write_buf_size) / bytes_per_sec; yading@10: is->audio_current_pts_drift = is->audio_current_pts - audio_callback_time / 1000000.0; yading@10: if (is->audioq.serial == is->audio_clock_serial) yading@10: check_external_clock_sync(is, is->audio_current_pts); yading@10: } yading@10: yading@10: static int audio_open(void *opaque, int64_t wanted_channel_layout, int wanted_nb_channels, int wanted_sample_rate, struct AudioParams *audio_hw_params) yading@10: { yading@10: SDL_AudioSpec wanted_spec, spec; yading@10: const char *env; yading@10: const int next_nb_channels[] = {0, 0, 1, 6, 2, 6, 4, 6}; yading@10: yading@10: env = SDL_getenv("SDL_AUDIO_CHANNELS"); yading@10: if (env) { yading@10: wanted_nb_channels = atoi(env); yading@10: wanted_channel_layout = av_get_default_channel_layout(wanted_nb_channels); yading@10: } yading@10: if (!wanted_channel_layout || wanted_nb_channels != av_get_channel_layout_nb_channels(wanted_channel_layout)) { yading@10: wanted_channel_layout = av_get_default_channel_layout(wanted_nb_channels); yading@10: wanted_channel_layout &= ~AV_CH_LAYOUT_STEREO_DOWNMIX; yading@10: } yading@10: wanted_spec.channels = av_get_channel_layout_nb_channels(wanted_channel_layout); yading@10: wanted_spec.freq = wanted_sample_rate; yading@10: if (wanted_spec.freq <= 0 || wanted_spec.channels <= 0) { yading@10: fprintf(stderr, "Invalid sample rate or channel count!\n"); yading@10: return -1; yading@10: } yading@10: wanted_spec.format = AUDIO_S16SYS; yading@10: wanted_spec.silence = 0; yading@10: wanted_spec.samples = SDL_AUDIO_BUFFER_SIZE; yading@10: wanted_spec.callback = sdl_audio_callback; yading@10: wanted_spec.userdata = opaque; yading@10: while (SDL_OpenAudio(&wanted_spec, &spec) < 0) { yading@10: fprintf(stderr, "SDL_OpenAudio (%d channels): %s\n", wanted_spec.channels, SDL_GetError()); yading@10: wanted_spec.channels = next_nb_channels[FFMIN(7, wanted_spec.channels)]; yading@10: if (!wanted_spec.channels) { yading@10: fprintf(stderr, "No more channel combinations to try, audio open failed\n"); yading@10: return -1; yading@10: } yading@10: wanted_channel_layout = av_get_default_channel_layout(wanted_spec.channels); yading@10: } yading@10: if (spec.format != AUDIO_S16SYS) { yading@10: fprintf(stderr, "SDL advised audio format %d is not supported!\n", spec.format); yading@10: return -1; yading@10: } yading@10: if (spec.channels != wanted_spec.channels) { yading@10: wanted_channel_layout = av_get_default_channel_layout(spec.channels); yading@10: if (!wanted_channel_layout) { yading@10: fprintf(stderr, "SDL advised channel count %d is not supported!\n", spec.channels); yading@10: return -1; yading@10: } yading@10: } yading@10: yading@10: audio_hw_params->fmt = AV_SAMPLE_FMT_S16; yading@10: audio_hw_params->freq = spec.freq; yading@10: audio_hw_params->channel_layout = wanted_channel_layout; yading@10: audio_hw_params->channels = spec.channels; yading@10: return spec.size; yading@10: } yading@10: yading@10: /* open a given stream. Return 0 if OK */ yading@10: static int stream_component_open(VideoState *is, int stream_index) yading@10: { yading@10: AVFormatContext *ic = is->ic; yading@10: AVCodecContext *avctx; yading@10: AVCodec *codec; yading@10: const char *forced_codec_name = NULL; yading@10: AVDictionary *opts; yading@10: AVDictionaryEntry *t = NULL; yading@10: int sample_rate, nb_channels; yading@10: int64_t channel_layout; yading@10: int ret; yading@10: yading@10: if (stream_index < 0 || stream_index >= ic->nb_streams) yading@10: return -1; yading@10: avctx = ic->streams[stream_index]->codec; yading@10: yading@10: codec = avcodec_find_decoder(avctx->codec_id); yading@10: yading@10: switch(avctx->codec_type){ yading@10: case AVMEDIA_TYPE_AUDIO : is->last_audio_stream = stream_index; forced_codec_name = audio_codec_name; break; yading@10: case AVMEDIA_TYPE_SUBTITLE: is->last_subtitle_stream = stream_index; forced_codec_name = subtitle_codec_name; break; yading@10: case AVMEDIA_TYPE_VIDEO : is->last_video_stream = stream_index; forced_codec_name = video_codec_name; break; yading@10: } yading@10: if (forced_codec_name) yading@10: codec = avcodec_find_decoder_by_name(forced_codec_name); yading@10: if (!codec) { yading@10: if (forced_codec_name) fprintf(stderr, "No codec could be found with name '%s'\n", forced_codec_name); yading@10: else fprintf(stderr, "No codec could be found with id %d\n", avctx->codec_id); yading@10: return -1; yading@10: } yading@10: yading@10: avctx->codec_id = codec->id; yading@10: avctx->workaround_bugs = workaround_bugs; yading@10: avctx->lowres = lowres; yading@10: if(avctx->lowres > codec->max_lowres){ yading@10: av_log(avctx, AV_LOG_WARNING, "The maximum value for lowres supported by the decoder is %d\n", yading@10: codec->max_lowres); yading@10: avctx->lowres= codec->max_lowres; yading@10: } yading@10: avctx->idct_algo = idct; yading@10: avctx->error_concealment = error_concealment; yading@10: yading@10: if(avctx->lowres) avctx->flags |= CODEC_FLAG_EMU_EDGE; yading@10: if (fast) avctx->flags2 |= CODEC_FLAG2_FAST; yading@10: if(codec->capabilities & CODEC_CAP_DR1) yading@10: avctx->flags |= CODEC_FLAG_EMU_EDGE; yading@10: yading@10: opts = filter_codec_opts(codec_opts, avctx->codec_id, ic, ic->streams[stream_index], codec); yading@10: if (!av_dict_get(opts, "threads", NULL, 0)) yading@10: av_dict_set(&opts, "threads", "auto", 0); yading@10: if (avctx->codec_type == AVMEDIA_TYPE_VIDEO || avctx->codec_type == AVMEDIA_TYPE_AUDIO) yading@10: av_dict_set(&opts, "refcounted_frames", "1", 0); yading@10: if (avcodec_open2(avctx, codec, &opts) < 0) yading@10: return -1; yading@10: if ((t = av_dict_get(opts, "", NULL, AV_DICT_IGNORE_SUFFIX))) { yading@10: av_log(NULL, AV_LOG_ERROR, "Option %s not found.\n", t->key); yading@10: return AVERROR_OPTION_NOT_FOUND; yading@10: } yading@10: yading@10: ic->streams[stream_index]->discard = AVDISCARD_DEFAULT; yading@10: switch (avctx->codec_type) { yading@10: case AVMEDIA_TYPE_AUDIO: yading@10: #if CONFIG_AVFILTER yading@10: { yading@10: AVFilterLink *link; yading@10: yading@10: is->audio_filter_src.freq = avctx->sample_rate; yading@10: is->audio_filter_src.channels = avctx->channels; yading@10: is->audio_filter_src.channel_layout = get_valid_channel_layout(avctx->channel_layout, avctx->channels); yading@10: is->audio_filter_src.fmt = avctx->sample_fmt; yading@10: if ((ret = configure_audio_filters(is, afilters, 0)) < 0) yading@10: return ret; yading@10: link = is->out_audio_filter->inputs[0]; yading@10: sample_rate = link->sample_rate; yading@10: nb_channels = link->channels; yading@10: channel_layout = link->channel_layout; yading@10: } yading@10: #else yading@10: sample_rate = avctx->sample_rate; yading@10: nb_channels = avctx->channels; yading@10: channel_layout = avctx->channel_layout; yading@10: #endif yading@10: yading@10: /* prepare audio output */ yading@10: if ((ret = audio_open(is, channel_layout, nb_channels, sample_rate, &is->audio_tgt)) < 0) yading@10: return ret; yading@10: is->audio_hw_buf_size = ret; yading@10: is->audio_src = is->audio_tgt; yading@10: is->audio_buf_size = 0; yading@10: is->audio_buf_index = 0; yading@10: yading@10: /* init averaging filter */ yading@10: is->audio_diff_avg_coef = exp(log(0.01) / AUDIO_DIFF_AVG_NB); yading@10: is->audio_diff_avg_count = 0; yading@10: /* since we do not have a precise anough audio fifo fullness, yading@10: we correct audio sync only if larger than this threshold */ yading@10: is->audio_diff_threshold = 2.0 * is->audio_hw_buf_size / av_samples_get_buffer_size(NULL, is->audio_tgt.channels, is->audio_tgt.freq, is->audio_tgt.fmt, 1); yading@10: yading@10: memset(&is->audio_pkt, 0, sizeof(is->audio_pkt)); yading@10: memset(&is->audio_pkt_temp, 0, sizeof(is->audio_pkt_temp)); yading@10: yading@10: is->audio_stream = stream_index; yading@10: is->audio_st = ic->streams[stream_index]; yading@10: yading@10: packet_queue_start(&is->audioq); yading@10: SDL_PauseAudio(0); yading@10: break; yading@10: case AVMEDIA_TYPE_VIDEO: yading@10: is->video_stream = stream_index; yading@10: is->video_st = ic->streams[stream_index]; yading@10: yading@10: packet_queue_start(&is->videoq); yading@10: is->video_tid = SDL_CreateThread(video_thread, is); yading@10: is->queue_attachments_req = 1; yading@10: break; yading@10: case AVMEDIA_TYPE_SUBTITLE: yading@10: is->subtitle_stream = stream_index; yading@10: is->subtitle_st = ic->streams[stream_index]; yading@10: packet_queue_start(&is->subtitleq); yading@10: yading@10: is->subtitle_tid = SDL_CreateThread(subtitle_thread, is); yading@10: break; yading@10: default: yading@10: break; yading@10: } yading@10: return 0; yading@10: } yading@10: yading@10: static void stream_component_close(VideoState *is, int stream_index) yading@10: { yading@10: AVFormatContext *ic = is->ic; yading@10: AVCodecContext *avctx; yading@10: yading@10: if (stream_index < 0 || stream_index >= ic->nb_streams) yading@10: return; yading@10: avctx = ic->streams[stream_index]->codec; yading@10: yading@10: switch (avctx->codec_type) { yading@10: case AVMEDIA_TYPE_AUDIO: yading@10: packet_queue_abort(&is->audioq); yading@10: yading@10: SDL_CloseAudio(); yading@10: yading@10: packet_queue_flush(&is->audioq); yading@10: av_free_packet(&is->audio_pkt); yading@10: swr_free(&is->swr_ctx); yading@10: av_freep(&is->audio_buf1); yading@10: is->audio_buf1_size = 0; yading@10: is->audio_buf = NULL; yading@10: av_frame_free(&is->frame); yading@10: yading@10: if (is->rdft) { yading@10: av_rdft_end(is->rdft); yading@10: av_freep(&is->rdft_data); yading@10: is->rdft = NULL; yading@10: is->rdft_bits = 0; yading@10: } yading@10: #if CONFIG_AVFILTER yading@10: avfilter_graph_free(&is->agraph); yading@10: #endif yading@10: break; yading@10: case AVMEDIA_TYPE_VIDEO: yading@10: packet_queue_abort(&is->videoq); yading@10: yading@10: /* note: we also signal this mutex to make sure we deblock the yading@10: video thread in all cases */ yading@10: SDL_LockMutex(is->pictq_mutex); yading@10: SDL_CondSignal(is->pictq_cond); yading@10: SDL_UnlockMutex(is->pictq_mutex); yading@10: yading@10: SDL_WaitThread(is->video_tid, NULL); yading@10: yading@10: packet_queue_flush(&is->videoq); yading@10: break; yading@10: case AVMEDIA_TYPE_SUBTITLE: yading@10: packet_queue_abort(&is->subtitleq); yading@10: yading@10: /* note: we also signal this mutex to make sure we deblock the yading@10: video thread in all cases */ yading@10: SDL_LockMutex(is->subpq_mutex); yading@10: is->subtitle_stream_changed = 1; yading@10: yading@10: SDL_CondSignal(is->subpq_cond); yading@10: SDL_UnlockMutex(is->subpq_mutex); yading@10: yading@10: SDL_WaitThread(is->subtitle_tid, NULL); yading@10: yading@10: packet_queue_flush(&is->subtitleq); yading@10: break; yading@10: default: yading@10: break; yading@10: } yading@10: yading@10: ic->streams[stream_index]->discard = AVDISCARD_ALL; yading@10: avcodec_close(avctx); yading@10: switch (avctx->codec_type) { yading@10: case AVMEDIA_TYPE_AUDIO: yading@10: is->audio_st = NULL; yading@10: is->audio_stream = -1; yading@10: break; yading@10: case AVMEDIA_TYPE_VIDEO: yading@10: is->video_st = NULL; yading@10: is->video_stream = -1; yading@10: break; yading@10: case AVMEDIA_TYPE_SUBTITLE: yading@10: is->subtitle_st = NULL; yading@10: is->subtitle_stream = -1; yading@10: break; yading@10: default: yading@10: break; yading@10: } yading@10: } yading@10: yading@10: static int decode_interrupt_cb(void *ctx) yading@10: { yading@10: VideoState *is = ctx; yading@10: return is->abort_request; yading@10: } yading@10: yading@10: static int is_realtime(AVFormatContext *s) yading@10: { yading@10: if( !strcmp(s->iformat->name, "rtp") yading@10: || !strcmp(s->iformat->name, "rtsp") yading@10: || !strcmp(s->iformat->name, "sdp") yading@10: ) yading@10: return 1; yading@10: yading@10: if(s->pb && ( !strncmp(s->filename, "rtp:", 4) yading@10: || !strncmp(s->filename, "udp:", 4) yading@10: ) yading@10: ) yading@10: return 1; yading@10: return 0; yading@10: } yading@10: yading@10: /* this thread gets the stream from the disk or the network */ yading@10: static int read_thread(void *arg) yading@10: { yading@10: VideoState *is = arg; yading@10: AVFormatContext *ic = NULL; yading@10: int err, i, ret; yading@10: int st_index[AVMEDIA_TYPE_NB]; yading@10: AVPacket pkt1, *pkt = &pkt1; yading@10: int eof = 0; yading@10: int pkt_in_play_range = 0; yading@10: AVDictionaryEntry *t; yading@10: AVDictionary **opts; yading@10: int orig_nb_streams; yading@10: SDL_mutex *wait_mutex = SDL_CreateMutex(); yading@10: yading@10: memset(st_index, -1, sizeof(st_index)); yading@10: is->last_video_stream = is->video_stream = -1; yading@10: is->last_audio_stream = is->audio_stream = -1; yading@10: is->last_subtitle_stream = is->subtitle_stream = -1; yading@10: yading@10: ic = avformat_alloc_context(); yading@10: ic->interrupt_callback.callback = decode_interrupt_cb; yading@10: ic->interrupt_callback.opaque = is; yading@10: err = avformat_open_input(&ic, is->filename, is->iformat, &format_opts); yading@10: if (err < 0) { yading@10: print_error(is->filename, err); yading@10: ret = -1; yading@10: goto fail; yading@10: } yading@10: if ((t = av_dict_get(format_opts, "", NULL, AV_DICT_IGNORE_SUFFIX))) { yading@10: av_log(NULL, AV_LOG_ERROR, "Option %s not found.\n", t->key); yading@10: ret = AVERROR_OPTION_NOT_FOUND; yading@10: goto fail; yading@10: } yading@10: is->ic = ic; yading@10: yading@10: if (genpts) yading@10: ic->flags |= AVFMT_FLAG_GENPTS; yading@10: yading@10: opts = setup_find_stream_info_opts(ic, codec_opts); yading@10: orig_nb_streams = ic->nb_streams; yading@10: yading@10: err = avformat_find_stream_info(ic, opts); yading@10: if (err < 0) { yading@10: fprintf(stderr, "%s: could not find codec parameters\n", is->filename); yading@10: ret = -1; yading@10: goto fail; yading@10: } yading@10: for (i = 0; i < orig_nb_streams; i++) yading@10: av_dict_free(&opts[i]); yading@10: av_freep(&opts); yading@10: yading@10: if (ic->pb) yading@10: ic->pb->eof_reached = 0; // FIXME hack, ffplay maybe should not use url_feof() to test for the end yading@10: yading@10: if (seek_by_bytes < 0) yading@10: seek_by_bytes = !!(ic->iformat->flags & AVFMT_TS_DISCONT) && strcmp("ogg", ic->iformat->name); yading@10: yading@10: is->max_frame_duration = (ic->iformat->flags & AVFMT_TS_DISCONT) ? 10.0 : 3600.0; yading@10: yading@10: if (!window_title && (t = av_dict_get(ic->metadata, "title", NULL, 0))) yading@10: window_title = av_asprintf("%s - %s", t->value, input_filename); yading@10: yading@10: /* if seeking requested, we execute it */ yading@10: if (start_time != AV_NOPTS_VALUE) { yading@10: int64_t timestamp; yading@10: yading@10: timestamp = start_time; yading@10: /* add the stream start time */ yading@10: if (ic->start_time != AV_NOPTS_VALUE) yading@10: timestamp += ic->start_time; yading@10: ret = avformat_seek_file(ic, -1, INT64_MIN, timestamp, INT64_MAX, 0); yading@10: if (ret < 0) { yading@10: fprintf(stderr, "%s: could not seek to position %0.3f\n", yading@10: is->filename, (double)timestamp / AV_TIME_BASE); yading@10: } yading@10: } yading@10: yading@10: is->realtime = is_realtime(ic); yading@10: yading@10: for (i = 0; i < ic->nb_streams; i++) yading@10: ic->streams[i]->discard = AVDISCARD_ALL; yading@10: if (!video_disable) yading@10: st_index[AVMEDIA_TYPE_VIDEO] = yading@10: av_find_best_stream(ic, AVMEDIA_TYPE_VIDEO, yading@10: wanted_stream[AVMEDIA_TYPE_VIDEO], -1, NULL, 0); yading@10: if (!audio_disable) yading@10: st_index[AVMEDIA_TYPE_AUDIO] = yading@10: av_find_best_stream(ic, AVMEDIA_TYPE_AUDIO, yading@10: wanted_stream[AVMEDIA_TYPE_AUDIO], yading@10: st_index[AVMEDIA_TYPE_VIDEO], yading@10: NULL, 0); yading@10: if (!video_disable && !subtitle_disable) yading@10: st_index[AVMEDIA_TYPE_SUBTITLE] = yading@10: av_find_best_stream(ic, AVMEDIA_TYPE_SUBTITLE, yading@10: wanted_stream[AVMEDIA_TYPE_SUBTITLE], yading@10: (st_index[AVMEDIA_TYPE_AUDIO] >= 0 ? yading@10: st_index[AVMEDIA_TYPE_AUDIO] : yading@10: st_index[AVMEDIA_TYPE_VIDEO]), yading@10: NULL, 0); yading@10: if (show_status) { yading@10: av_dump_format(ic, 0, is->filename, 0); yading@10: } yading@10: yading@10: is->show_mode = show_mode; yading@10: yading@10: /* open the streams */ yading@10: if (st_index[AVMEDIA_TYPE_AUDIO] >= 0) { yading@10: stream_component_open(is, st_index[AVMEDIA_TYPE_AUDIO]); yading@10: } yading@10: yading@10: ret = -1; yading@10: if (st_index[AVMEDIA_TYPE_VIDEO] >= 0) { yading@10: ret = stream_component_open(is, st_index[AVMEDIA_TYPE_VIDEO]); yading@10: } yading@10: if (is->show_mode == SHOW_MODE_NONE) yading@10: is->show_mode = ret >= 0 ? SHOW_MODE_VIDEO : SHOW_MODE_RDFT; yading@10: yading@10: if (st_index[AVMEDIA_TYPE_SUBTITLE] >= 0) { yading@10: stream_component_open(is, st_index[AVMEDIA_TYPE_SUBTITLE]); yading@10: } yading@10: yading@10: if (is->video_stream < 0 && is->audio_stream < 0) { yading@10: fprintf(stderr, "%s: could not open codecs\n", is->filename); yading@10: ret = -1; yading@10: goto fail; yading@10: } yading@10: yading@10: if (infinite_buffer < 0 && is->realtime) yading@10: infinite_buffer = 1; yading@10: yading@10: for (;;) { yading@10: if (is->abort_request) yading@10: break; yading@10: if (is->paused != is->last_paused) { yading@10: is->last_paused = is->paused; yading@10: if (is->paused) yading@10: is->read_pause_return = av_read_pause(ic); yading@10: else yading@10: av_read_play(ic); yading@10: } yading@10: #if CONFIG_RTSP_DEMUXER || CONFIG_MMSH_PROTOCOL yading@10: if (is->paused && yading@10: (!strcmp(ic->iformat->name, "rtsp") || yading@10: (ic->pb && !strncmp(input_filename, "mmsh:", 5)))) { yading@10: /* wait 10 ms to avoid trying to get another packet */ yading@10: /* XXX: horrible */ yading@10: SDL_Delay(10); yading@10: continue; yading@10: } yading@10: #endif yading@10: if (is->seek_req) { yading@10: int64_t seek_target = is->seek_pos; yading@10: int64_t seek_min = is->seek_rel > 0 ? seek_target - is->seek_rel + 2: INT64_MIN; yading@10: int64_t seek_max = is->seek_rel < 0 ? seek_target - is->seek_rel - 2: INT64_MAX; yading@10: // FIXME the +-2 is due to rounding being not done in the correct direction in generation yading@10: // of the seek_pos/seek_rel variables yading@10: yading@10: ret = avformat_seek_file(is->ic, -1, seek_min, seek_target, seek_max, is->seek_flags); yading@10: if (ret < 0) { yading@10: fprintf(stderr, "%s: error while seeking\n", is->ic->filename); yading@10: } else { yading@10: if (is->audio_stream >= 0) { yading@10: packet_queue_flush(&is->audioq); yading@10: packet_queue_put(&is->audioq, &flush_pkt); yading@10: } yading@10: if (is->subtitle_stream >= 0) { yading@10: packet_queue_flush(&is->subtitleq); yading@10: packet_queue_put(&is->subtitleq, &flush_pkt); yading@10: } yading@10: if (is->video_stream >= 0) { yading@10: packet_queue_flush(&is->videoq); yading@10: packet_queue_put(&is->videoq, &flush_pkt); yading@10: } yading@10: if (is->seek_flags & AVSEEK_FLAG_BYTE) { yading@10: update_external_clock_pts(is, NAN); yading@10: } else { yading@10: update_external_clock_pts(is, seek_target / (double)AV_TIME_BASE); yading@10: } yading@10: } yading@10: is->seek_req = 0; yading@10: is->queue_attachments_req = 1; yading@10: eof = 0; yading@10: if (is->paused) yading@10: step_to_next_frame(is); yading@10: } yading@10: if (is->queue_attachments_req) { yading@10: if (is->video_st && is->video_st->disposition & AV_DISPOSITION_ATTACHED_PIC) { yading@10: AVPacket copy; yading@10: if ((ret = av_copy_packet(©, &is->video_st->attached_pic)) < 0) yading@10: goto fail; yading@10: packet_queue_put(&is->videoq, ©); yading@10: } yading@10: is->queue_attachments_req = 0; yading@10: } yading@10: yading@10: /* if the queue are full, no need to read more */ yading@10: if (infinite_buffer<1 && yading@10: (is->audioq.size + is->videoq.size + is->subtitleq.size > MAX_QUEUE_SIZE yading@10: || ( (is->audioq .nb_packets > MIN_FRAMES || is->audio_stream < 0 || is->audioq.abort_request) yading@10: && (is->videoq .nb_packets > MIN_FRAMES || is->video_stream < 0 || is->videoq.abort_request yading@10: || (is->video_st->disposition & AV_DISPOSITION_ATTACHED_PIC)) yading@10: && (is->subtitleq.nb_packets > MIN_FRAMES || is->subtitle_stream < 0 || is->subtitleq.abort_request)))) { yading@10: /* wait 10 ms */ yading@10: SDL_LockMutex(wait_mutex); yading@10: SDL_CondWaitTimeout(is->continue_read_thread, wait_mutex, 10); yading@10: SDL_UnlockMutex(wait_mutex); yading@10: continue; yading@10: } yading@10: if (eof) { yading@10: if (is->video_stream >= 0) { yading@10: av_init_packet(pkt); yading@10: pkt->data = NULL; yading@10: pkt->size = 0; yading@10: pkt->stream_index = is->video_stream; yading@10: packet_queue_put(&is->videoq, pkt); yading@10: } yading@10: if (is->audio_stream >= 0 && yading@10: is->audio_st->codec->codec->capabilities & CODEC_CAP_DELAY) { yading@10: av_init_packet(pkt); yading@10: pkt->data = NULL; yading@10: pkt->size = 0; yading@10: pkt->stream_index = is->audio_stream; yading@10: packet_queue_put(&is->audioq, pkt); yading@10: } yading@10: SDL_Delay(10); yading@10: if (is->audioq.size + is->videoq.size + is->subtitleq.size == 0) { yading@10: if (loop != 1 && (!loop || --loop)) { yading@10: stream_seek(is, start_time != AV_NOPTS_VALUE ? start_time : 0, 0, 0); yading@10: } else if (autoexit) { yading@10: ret = AVERROR_EOF; yading@10: goto fail; yading@10: } yading@10: } yading@10: eof=0; yading@10: continue; yading@10: } yading@10: ret = av_read_frame(ic, pkt); yading@10: if (ret < 0) { yading@10: if (ret == AVERROR_EOF || url_feof(ic->pb)) yading@10: eof = 1; yading@10: if (ic->pb && ic->pb->error) yading@10: break; yading@10: SDL_LockMutex(wait_mutex); yading@10: SDL_CondWaitTimeout(is->continue_read_thread, wait_mutex, 10); yading@10: SDL_UnlockMutex(wait_mutex); yading@10: continue; yading@10: } yading@10: /* check if packet is in play range specified by user, then queue, otherwise discard */ yading@10: pkt_in_play_range = duration == AV_NOPTS_VALUE || yading@10: (pkt->pts - ic->streams[pkt->stream_index]->start_time) * yading@10: av_q2d(ic->streams[pkt->stream_index]->time_base) - yading@10: (double)(start_time != AV_NOPTS_VALUE ? start_time : 0) / 1000000 yading@10: <= ((double)duration / 1000000); yading@10: if (pkt->stream_index == is->audio_stream && pkt_in_play_range) { yading@10: packet_queue_put(&is->audioq, pkt); yading@10: } else if (pkt->stream_index == is->video_stream && pkt_in_play_range yading@10: && !(is->video_st->disposition & AV_DISPOSITION_ATTACHED_PIC)) { yading@10: packet_queue_put(&is->videoq, pkt); yading@10: } else if (pkt->stream_index == is->subtitle_stream && pkt_in_play_range) { yading@10: packet_queue_put(&is->subtitleq, pkt); yading@10: } else { yading@10: av_free_packet(pkt); yading@10: } yading@10: } yading@10: /* wait until the end */ yading@10: while (!is->abort_request) { yading@10: SDL_Delay(100); yading@10: } yading@10: yading@10: ret = 0; yading@10: fail: yading@10: /* close each stream */ yading@10: if (is->audio_stream >= 0) yading@10: stream_component_close(is, is->audio_stream); yading@10: if (is->video_stream >= 0) yading@10: stream_component_close(is, is->video_stream); yading@10: if (is->subtitle_stream >= 0) yading@10: stream_component_close(is, is->subtitle_stream); yading@10: if (is->ic) { yading@10: avformat_close_input(&is->ic); yading@10: } yading@10: yading@10: if (ret != 0) { yading@10: SDL_Event event; yading@10: yading@10: event.type = FF_QUIT_EVENT; yading@10: event.user.data1 = is; yading@10: SDL_PushEvent(&event); yading@10: } yading@10: SDL_DestroyMutex(wait_mutex); yading@10: return 0; yading@10: } yading@10: yading@10: static VideoState *stream_open(const char *filename, AVInputFormat *iformat) yading@10: { yading@10: VideoState *is; yading@10: yading@10: is = av_mallocz(sizeof(VideoState)); yading@10: if (!is) yading@10: return NULL; yading@10: av_strlcpy(is->filename, filename, sizeof(is->filename)); yading@10: is->iformat = iformat; yading@10: is->ytop = 0; yading@10: is->xleft = 0; yading@10: yading@10: /* start video display */ yading@10: is->pictq_mutex = SDL_CreateMutex(); yading@10: is->pictq_cond = SDL_CreateCond(); yading@10: yading@10: is->subpq_mutex = SDL_CreateMutex(); yading@10: is->subpq_cond = SDL_CreateCond(); yading@10: yading@10: packet_queue_init(&is->videoq); yading@10: packet_queue_init(&is->audioq); yading@10: packet_queue_init(&is->subtitleq); yading@10: yading@10: is->continue_read_thread = SDL_CreateCond(); yading@10: yading@10: update_external_clock_pts(is, NAN); yading@10: update_external_clock_speed(is, 1.0); yading@10: is->audio_current_pts_drift = -av_gettime() / 1000000.0; yading@10: is->video_current_pts_drift = is->audio_current_pts_drift; yading@10: is->audio_clock_serial = -1; yading@10: is->video_clock_serial = -1; yading@10: is->audio_last_serial = -1; yading@10: is->av_sync_type = av_sync_type; yading@10: is->read_tid = SDL_CreateThread(read_thread, is); yading@10: if (!is->read_tid) { yading@10: av_free(is); yading@10: return NULL; yading@10: } yading@10: return is; yading@10: } yading@10: yading@10: static void stream_cycle_channel(VideoState *is, int codec_type) yading@10: { yading@10: AVFormatContext *ic = is->ic; yading@10: int start_index, stream_index; yading@10: int old_index; yading@10: AVStream *st; yading@10: yading@10: if (codec_type == AVMEDIA_TYPE_VIDEO) { yading@10: start_index = is->last_video_stream; yading@10: old_index = is->video_stream; yading@10: } else if (codec_type == AVMEDIA_TYPE_AUDIO) { yading@10: start_index = is->last_audio_stream; yading@10: old_index = is->audio_stream; yading@10: } else { yading@10: start_index = is->last_subtitle_stream; yading@10: old_index = is->subtitle_stream; yading@10: } yading@10: stream_index = start_index; yading@10: for (;;) { yading@10: if (++stream_index >= is->ic->nb_streams) yading@10: { yading@10: if (codec_type == AVMEDIA_TYPE_SUBTITLE) yading@10: { yading@10: stream_index = -1; yading@10: is->last_subtitle_stream = -1; yading@10: goto the_end; yading@10: } yading@10: if (start_index == -1) yading@10: return; yading@10: stream_index = 0; yading@10: } yading@10: if (stream_index == start_index) yading@10: return; yading@10: st = ic->streams[stream_index]; yading@10: if (st->codec->codec_type == codec_type) { yading@10: /* check that parameters are OK */ yading@10: switch (codec_type) { yading@10: case AVMEDIA_TYPE_AUDIO: yading@10: if (st->codec->sample_rate != 0 && yading@10: st->codec->channels != 0) yading@10: goto the_end; yading@10: break; yading@10: case AVMEDIA_TYPE_VIDEO: yading@10: case AVMEDIA_TYPE_SUBTITLE: yading@10: goto the_end; yading@10: default: yading@10: break; yading@10: } yading@10: } yading@10: } yading@10: the_end: yading@10: stream_component_close(is, old_index); yading@10: stream_component_open(is, stream_index); yading@10: } yading@10: yading@10: yading@10: static void toggle_full_screen(VideoState *is) yading@10: { yading@10: #if defined(__APPLE__) && SDL_VERSION_ATLEAST(1, 2, 14) yading@10: /* OS X needs to reallocate the SDL overlays */ yading@10: int i; yading@10: for (i = 0; i < VIDEO_PICTURE_QUEUE_SIZE; i++) yading@10: is->pictq[i].reallocate = 1; yading@10: #endif yading@10: is_full_screen = !is_full_screen; yading@10: video_open(is, 1, NULL); yading@10: } yading@10: yading@10: static void toggle_audio_display(VideoState *is) yading@10: { yading@10: int bgcolor = SDL_MapRGB(screen->format, 0x00, 0x00, 0x00); yading@10: int next = is->show_mode; yading@10: do { yading@10: next = (next + 1) % SHOW_MODE_NB; yading@10: } while (next != is->show_mode && (next == SHOW_MODE_VIDEO && !is->video_st || next != SHOW_MODE_VIDEO && !is->audio_st)); yading@10: if (is->show_mode != next) { yading@10: fill_rectangle(screen, yading@10: is->xleft, is->ytop, is->width, is->height, yading@10: bgcolor, 1); yading@10: is->force_refresh = 1; yading@10: is->show_mode = next; yading@10: } yading@10: } yading@10: yading@10: static void refresh_loop_wait_event(VideoState *is, SDL_Event *event) { yading@10: double remaining_time = 0.0; yading@10: SDL_PumpEvents(); yading@10: while (!SDL_PeepEvents(event, 1, SDL_GETEVENT, SDL_ALLEVENTS)) { yading@10: if (!cursor_hidden && av_gettime() - cursor_last_shown > CURSOR_HIDE_DELAY) { yading@10: SDL_ShowCursor(0); yading@10: cursor_hidden = 1; yading@10: } yading@10: if (remaining_time > 0.0) yading@10: av_usleep((int64_t)(remaining_time * 1000000.0)); yading@10: remaining_time = REFRESH_RATE; yading@10: if (is->show_mode != SHOW_MODE_NONE && (!is->paused || is->force_refresh)) yading@10: video_refresh(is, &remaining_time); yading@10: SDL_PumpEvents(); yading@10: } yading@10: } yading@10: yading@10: /* handle an event sent by the GUI */ yading@10: static void event_loop(VideoState *cur_stream) yading@10: { yading@10: SDL_Event event; yading@10: double incr, pos, frac; yading@10: yading@10: for (;;) { yading@10: double x; yading@10: refresh_loop_wait_event(cur_stream, &event); yading@10: switch (event.type) { yading@10: case SDL_KEYDOWN: yading@10: if (exit_on_keydown) { yading@10: do_exit(cur_stream); yading@10: break; yading@10: } yading@10: switch (event.key.keysym.sym) { yading@10: case SDLK_ESCAPE: yading@10: case SDLK_q: yading@10: do_exit(cur_stream); yading@10: break; yading@10: case SDLK_f: yading@10: toggle_full_screen(cur_stream); yading@10: cur_stream->force_refresh = 1; yading@10: break; yading@10: case SDLK_p: yading@10: case SDLK_SPACE: yading@10: toggle_pause(cur_stream); yading@10: break; yading@10: case SDLK_s: // S: Step to next frame yading@10: step_to_next_frame(cur_stream); yading@10: break; yading@10: case SDLK_a: yading@10: stream_cycle_channel(cur_stream, AVMEDIA_TYPE_AUDIO); yading@10: break; yading@10: case SDLK_v: yading@10: stream_cycle_channel(cur_stream, AVMEDIA_TYPE_VIDEO); yading@10: break; yading@10: case SDLK_t: yading@10: stream_cycle_channel(cur_stream, AVMEDIA_TYPE_SUBTITLE); yading@10: break; yading@10: case SDLK_w: yading@10: toggle_audio_display(cur_stream); yading@10: break; yading@10: case SDLK_PAGEUP: yading@10: incr = 600.0; yading@10: goto do_seek; yading@10: case SDLK_PAGEDOWN: yading@10: incr = -600.0; yading@10: goto do_seek; yading@10: case SDLK_LEFT: yading@10: incr = -10.0; yading@10: goto do_seek; yading@10: case SDLK_RIGHT: yading@10: incr = 10.0; yading@10: goto do_seek; yading@10: case SDLK_UP: yading@10: incr = 60.0; yading@10: goto do_seek; yading@10: case SDLK_DOWN: yading@10: incr = -60.0; yading@10: do_seek: yading@10: if (seek_by_bytes) { yading@10: if (cur_stream->video_stream >= 0 && cur_stream->video_current_pos >= 0) { yading@10: pos = cur_stream->video_current_pos; yading@10: } else if (cur_stream->audio_stream >= 0 && cur_stream->audio_pkt.pos >= 0) { yading@10: pos = cur_stream->audio_pkt.pos; yading@10: } else yading@10: pos = avio_tell(cur_stream->ic->pb); yading@10: if (cur_stream->ic->bit_rate) yading@10: incr *= cur_stream->ic->bit_rate / 8.0; yading@10: else yading@10: incr *= 180000.0; yading@10: pos += incr; yading@10: stream_seek(cur_stream, pos, incr, 1); yading@10: } else { yading@10: pos = get_master_clock(cur_stream); yading@10: if (isnan(pos)) yading@10: pos = (double)cur_stream->seek_pos / AV_TIME_BASE; yading@10: pos += incr; yading@10: if (cur_stream->ic->start_time != AV_NOPTS_VALUE && pos < cur_stream->ic->start_time / (double)AV_TIME_BASE) yading@10: pos = cur_stream->ic->start_time / (double)AV_TIME_BASE; yading@10: stream_seek(cur_stream, (int64_t)(pos * AV_TIME_BASE), (int64_t)(incr * AV_TIME_BASE), 0); yading@10: } yading@10: break; yading@10: default: yading@10: break; yading@10: } yading@10: break; yading@10: case SDL_VIDEOEXPOSE: yading@10: cur_stream->force_refresh = 1; yading@10: break; yading@10: case SDL_MOUSEBUTTONDOWN: yading@10: if (exit_on_mousedown) { yading@10: do_exit(cur_stream); yading@10: break; yading@10: } yading@10: case SDL_MOUSEMOTION: yading@10: if (cursor_hidden) { yading@10: SDL_ShowCursor(1); yading@10: cursor_hidden = 0; yading@10: } yading@10: cursor_last_shown = av_gettime(); yading@10: if (event.type == SDL_MOUSEBUTTONDOWN) { yading@10: x = event.button.x; yading@10: } else { yading@10: if (event.motion.state != SDL_PRESSED) yading@10: break; yading@10: x = event.motion.x; yading@10: } yading@10: if (seek_by_bytes || cur_stream->ic->duration <= 0) { yading@10: uint64_t size = avio_size(cur_stream->ic->pb); yading@10: stream_seek(cur_stream, size*x/cur_stream->width, 0, 1); yading@10: } else { yading@10: int64_t ts; yading@10: int ns, hh, mm, ss; yading@10: int tns, thh, tmm, tss; yading@10: tns = cur_stream->ic->duration / 1000000LL; yading@10: thh = tns / 3600; yading@10: tmm = (tns % 3600) / 60; yading@10: tss = (tns % 60); yading@10: frac = x / cur_stream->width; yading@10: ns = frac * tns; yading@10: hh = ns / 3600; yading@10: mm = (ns % 3600) / 60; yading@10: ss = (ns % 60); yading@10: fprintf(stderr, "Seek to %2.0f%% (%2d:%02d:%02d) of total duration (%2d:%02d:%02d) \n", frac*100, yading@10: hh, mm, ss, thh, tmm, tss); yading@10: ts = frac * cur_stream->ic->duration; yading@10: if (cur_stream->ic->start_time != AV_NOPTS_VALUE) yading@10: ts += cur_stream->ic->start_time; yading@10: stream_seek(cur_stream, ts, 0, 0); yading@10: } yading@10: break; yading@10: case SDL_VIDEORESIZE: yading@10: screen = SDL_SetVideoMode(event.resize.w, event.resize.h, 0, yading@10: SDL_HWSURFACE|SDL_RESIZABLE|SDL_ASYNCBLIT|SDL_HWACCEL); yading@10: screen_width = cur_stream->width = event.resize.w; yading@10: screen_height = cur_stream->height = event.resize.h; yading@10: cur_stream->force_refresh = 1; yading@10: break; yading@10: case SDL_QUIT: yading@10: case FF_QUIT_EVENT: yading@10: do_exit(cur_stream); yading@10: break; yading@10: case FF_ALLOC_EVENT: yading@10: alloc_picture(event.user.data1); yading@10: break; yading@10: default: yading@10: break; yading@10: } yading@10: } yading@10: } yading@10: yading@10: static int opt_frame_size(void *optctx, const char *opt, const char *arg) yading@10: { yading@10: av_log(NULL, AV_LOG_WARNING, "Option -s is deprecated, use -video_size.\n"); yading@10: return opt_default(NULL, "video_size", arg); yading@10: } yading@10: yading@10: static int opt_width(void *optctx, const char *opt, const char *arg) yading@10: { yading@10: screen_width = parse_number_or_die(opt, arg, OPT_INT64, 1, INT_MAX); yading@10: return 0; yading@10: } yading@10: yading@10: static int opt_height(void *optctx, const char *opt, const char *arg) yading@10: { yading@10: screen_height = parse_number_or_die(opt, arg, OPT_INT64, 1, INT_MAX); yading@10: return 0; yading@10: } yading@10: yading@10: static int opt_format(void *optctx, const char *opt, const char *arg) yading@10: { yading@10: file_iformat = av_find_input_format(arg); yading@10: if (!file_iformat) { yading@10: fprintf(stderr, "Unknown input format: %s\n", arg); yading@10: return AVERROR(EINVAL); yading@10: } yading@10: return 0; yading@10: } yading@10: yading@10: static int opt_frame_pix_fmt(void *optctx, const char *opt, const char *arg) yading@10: { yading@10: av_log(NULL, AV_LOG_WARNING, "Option -pix_fmt is deprecated, use -pixel_format.\n"); yading@10: return opt_default(NULL, "pixel_format", arg); yading@10: } yading@10: yading@10: static int opt_sync(void *optctx, const char *opt, const char *arg) yading@10: { yading@10: if (!strcmp(arg, "audio")) yading@10: av_sync_type = AV_SYNC_AUDIO_MASTER; yading@10: else if (!strcmp(arg, "video")) yading@10: av_sync_type = AV_SYNC_VIDEO_MASTER; yading@10: else if (!strcmp(arg, "ext")) yading@10: av_sync_type = AV_SYNC_EXTERNAL_CLOCK; yading@10: else { yading@10: fprintf(stderr, "Unknown value for %s: %s\n", opt, arg); yading@10: exit(1); yading@10: } yading@10: return 0; yading@10: } yading@10: yading@10: static int opt_seek(void *optctx, const char *opt, const char *arg) yading@10: { yading@10: start_time = parse_time_or_die(opt, arg, 1); yading@10: return 0; yading@10: } yading@10: yading@10: static int opt_duration(void *optctx, const char *opt, const char *arg) yading@10: { yading@10: duration = parse_time_or_die(opt, arg, 1); yading@10: return 0; yading@10: } yading@10: yading@10: static int opt_show_mode(void *optctx, const char *opt, const char *arg) yading@10: { yading@10: show_mode = !strcmp(arg, "video") ? SHOW_MODE_VIDEO : yading@10: !strcmp(arg, "waves") ? SHOW_MODE_WAVES : yading@10: !strcmp(arg, "rdft" ) ? SHOW_MODE_RDFT : yading@10: parse_number_or_die(opt, arg, OPT_INT, 0, SHOW_MODE_NB-1); yading@10: return 0; yading@10: } yading@10: yading@10: static void opt_input_file(void *optctx, const char *filename) yading@10: { yading@10: if (input_filename) { yading@10: fprintf(stderr, "Argument '%s' provided as input filename, but '%s' was already specified.\n", yading@10: filename, input_filename); yading@10: exit(1); yading@10: } yading@10: if (!strcmp(filename, "-")) yading@10: filename = "pipe:"; yading@10: input_filename = filename; yading@10: } yading@10: yading@10: static int opt_codec(void *optctx, const char *opt, const char *arg) yading@10: { yading@10: const char *spec = strchr(opt, ':'); yading@10: if (!spec) { yading@10: fprintf(stderr, "No media specifier was specified in '%s' in option '%s'\n", yading@10: arg, opt); yading@10: return AVERROR(EINVAL); yading@10: } yading@10: spec++; yading@10: switch (spec[0]) { yading@10: case 'a' : audio_codec_name = arg; break; yading@10: case 's' : subtitle_codec_name = arg; break; yading@10: case 'v' : video_codec_name = arg; break; yading@10: default: yading@10: fprintf(stderr, "Invalid media specifier '%s' in option '%s'\n", spec, opt); yading@10: return AVERROR(EINVAL); yading@10: } yading@10: return 0; yading@10: } yading@10: yading@10: static int dummy; yading@10: yading@10: static const OptionDef options[] = { yading@10: #include "cmdutils_common_opts.h" yading@10: { "x", HAS_ARG, { .func_arg = opt_width }, "force displayed width", "width" }, yading@10: { "y", HAS_ARG, { .func_arg = opt_height }, "force displayed height", "height" }, yading@10: { "s", HAS_ARG | OPT_VIDEO, { .func_arg = opt_frame_size }, "set frame size (WxH or abbreviation)", "size" }, yading@10: { "fs", OPT_BOOL, { &is_full_screen }, "force full screen" }, yading@10: { "an", OPT_BOOL, { &audio_disable }, "disable audio" }, yading@10: { "vn", OPT_BOOL, { &video_disable }, "disable video" }, yading@10: { "sn", OPT_BOOL, { &subtitle_disable }, "disable subtitling" }, yading@10: { "ast", OPT_INT | HAS_ARG | OPT_EXPERT, { &wanted_stream[AVMEDIA_TYPE_AUDIO] }, "select desired audio stream", "stream_number" }, yading@10: { "vst", OPT_INT | HAS_ARG | OPT_EXPERT, { &wanted_stream[AVMEDIA_TYPE_VIDEO] }, "select desired video stream", "stream_number" }, yading@10: { "sst", OPT_INT | HAS_ARG | OPT_EXPERT, { &wanted_stream[AVMEDIA_TYPE_SUBTITLE] }, "select desired subtitle stream", "stream_number" }, yading@10: { "ss", HAS_ARG, { .func_arg = opt_seek }, "seek to a given position in seconds", "pos" }, yading@10: { "t", HAS_ARG, { .func_arg = opt_duration }, "play \"duration\" seconds of audio/video", "duration" }, yading@10: { "bytes", OPT_INT | HAS_ARG, { &seek_by_bytes }, "seek by bytes 0=off 1=on -1=auto", "val" }, yading@10: { "nodisp", OPT_BOOL, { &display_disable }, "disable graphical display" }, yading@10: { "f", HAS_ARG, { .func_arg = opt_format }, "force format", "fmt" }, yading@10: { "pix_fmt", HAS_ARG | OPT_EXPERT | OPT_VIDEO, { .func_arg = opt_frame_pix_fmt }, "set pixel format", "format" }, yading@10: { "stats", OPT_BOOL | OPT_EXPERT, { &show_status }, "show status", "" }, yading@10: { "bug", OPT_INT | HAS_ARG | OPT_EXPERT, { &workaround_bugs }, "workaround bugs", "" }, yading@10: { "fast", OPT_BOOL | OPT_EXPERT, { &fast }, "non spec compliant optimizations", "" }, yading@10: { "genpts", OPT_BOOL | OPT_EXPERT, { &genpts }, "generate pts", "" }, yading@10: { "drp", OPT_INT | HAS_ARG | OPT_EXPERT, { &decoder_reorder_pts }, "let decoder reorder pts 0=off 1=on -1=auto", ""}, yading@10: { "lowres", OPT_INT | HAS_ARG | OPT_EXPERT, { &lowres }, "", "" }, yading@10: { "idct", OPT_INT | HAS_ARG | OPT_EXPERT, { &idct }, "set idct algo", "algo" }, yading@10: { "ec", OPT_INT | HAS_ARG | OPT_EXPERT, { &error_concealment }, "set error concealment options", "bit_mask" }, yading@10: { "sync", HAS_ARG | OPT_EXPERT, { .func_arg = opt_sync }, "set audio-video sync. type (type=audio/video/ext)", "type" }, yading@10: { "autoexit", OPT_BOOL | OPT_EXPERT, { &autoexit }, "exit at the end", "" }, yading@10: { "exitonkeydown", OPT_BOOL | OPT_EXPERT, { &exit_on_keydown }, "exit on key down", "" }, yading@10: { "exitonmousedown", OPT_BOOL | OPT_EXPERT, { &exit_on_mousedown }, "exit on mouse down", "" }, yading@10: { "loop", OPT_INT | HAS_ARG | OPT_EXPERT, { &loop }, "set number of times the playback shall be looped", "loop count" }, yading@10: { "framedrop", OPT_BOOL | OPT_EXPERT, { &framedrop }, "drop frames when cpu is too slow", "" }, yading@10: { "infbuf", OPT_BOOL | OPT_EXPERT, { &infinite_buffer }, "don't limit the input buffer size (useful with realtime streams)", "" }, yading@10: { "window_title", OPT_STRING | HAS_ARG, { &window_title }, "set window title", "window title" }, yading@10: #if CONFIG_AVFILTER yading@10: { "vf", OPT_STRING | HAS_ARG, { &vfilters }, "set video filters", "filter_graph" }, yading@10: { "af", OPT_STRING | HAS_ARG, { &afilters }, "set audio filters", "filter_graph" }, yading@10: #endif yading@10: { "rdftspeed", OPT_INT | HAS_ARG| OPT_AUDIO | OPT_EXPERT, { &rdftspeed }, "rdft speed", "msecs" }, yading@10: { "showmode", HAS_ARG, { .func_arg = opt_show_mode}, "select show mode (0 = video, 1 = waves, 2 = RDFT)", "mode" }, yading@10: { "default", HAS_ARG | OPT_AUDIO | OPT_VIDEO | OPT_EXPERT, { .func_arg = opt_default }, "generic catch all option", "" }, yading@10: { "i", OPT_BOOL, { &dummy}, "read specified file", "input_file"}, yading@10: { "codec", HAS_ARG, { .func_arg = opt_codec}, "force decoder", "decoder_name" }, yading@10: { "acodec", HAS_ARG | OPT_STRING | OPT_EXPERT, { &audio_codec_name }, "force audio decoder", "decoder_name" }, yading@10: { "scodec", HAS_ARG | OPT_STRING | OPT_EXPERT, { &subtitle_codec_name }, "force subtitle decoder", "decoder_name" }, yading@10: { "vcodec", HAS_ARG | OPT_STRING | OPT_EXPERT, { &video_codec_name }, "force video decoder", "decoder_name" }, yading@10: { NULL, }, yading@10: }; yading@10: yading@10: static void show_usage(void) yading@10: { yading@10: av_log(NULL, AV_LOG_INFO, "Simple media player\n"); yading@10: av_log(NULL, AV_LOG_INFO, "usage: %s [options] input_file\n", program_name); yading@10: av_log(NULL, AV_LOG_INFO, "\n"); yading@10: } yading@10: yading@10: void show_help_default(const char *opt, const char *arg) yading@10: { yading@10: av_log_set_callback(log_callback_help); yading@10: show_usage(); yading@10: show_help_options(options, "Main options:", 0, OPT_EXPERT, 0); yading@10: show_help_options(options, "Advanced options:", OPT_EXPERT, 0, 0); yading@10: printf("\n"); yading@10: show_help_children(avcodec_get_class(), AV_OPT_FLAG_DECODING_PARAM); yading@10: show_help_children(avformat_get_class(), AV_OPT_FLAG_DECODING_PARAM); yading@10: #if !CONFIG_AVFILTER yading@10: show_help_children(sws_get_class(), AV_OPT_FLAG_ENCODING_PARAM); yading@10: #else yading@10: show_help_children(avfilter_get_class(), AV_OPT_FLAG_FILTERING_PARAM); yading@10: #endif yading@10: printf("\nWhile playing:\n" yading@10: "q, ESC quit\n" yading@10: "f toggle full screen\n" yading@10: "p, SPC pause\n" yading@10: "a cycle audio channel\n" yading@10: "v cycle video channel\n" yading@10: "t cycle subtitle channel\n" yading@10: "w show audio waves\n" yading@10: "s activate frame-step mode\n" yading@10: "left/right seek backward/forward 10 seconds\n" yading@10: "down/up seek backward/forward 1 minute\n" yading@10: "page down/page up seek backward/forward 10 minutes\n" yading@10: "mouse click seek to percentage in file corresponding to fraction of width\n" yading@10: ); yading@10: } yading@10: yading@10: static int lockmgr(void **mtx, enum AVLockOp op) yading@10: { yading@10: switch(op) { yading@10: case AV_LOCK_CREATE: yading@10: *mtx = SDL_CreateMutex(); yading@10: if(!*mtx) yading@10: return 1; yading@10: return 0; yading@10: case AV_LOCK_OBTAIN: yading@10: return !!SDL_LockMutex(*mtx); yading@10: case AV_LOCK_RELEASE: yading@10: return !!SDL_UnlockMutex(*mtx); yading@10: case AV_LOCK_DESTROY: yading@10: SDL_DestroyMutex(*mtx); yading@10: return 0; yading@10: } yading@10: return 1; yading@10: } yading@10: yading@10: /* Called from the main */ yading@10: int main(int argc, char **argv) yading@10: { yading@10: int flags; yading@10: VideoState *is; yading@10: char dummy_videodriver[] = "SDL_VIDEODRIVER=dummy"; yading@10: yading@10: av_log_set_flags(AV_LOG_SKIP_REPEATED); yading@10: parse_loglevel(argc, argv, options); yading@10: yading@10: /* register all codecs, demux and protocols */ yading@10: avcodec_register_all(); yading@10: #if CONFIG_AVDEVICE yading@10: avdevice_register_all(); yading@10: #endif yading@10: #if CONFIG_AVFILTER yading@10: avfilter_register_all(); yading@10: #endif yading@10: av_register_all(); yading@10: avformat_network_init(); yading@10: yading@10: init_opts(); yading@10: yading@10: signal(SIGINT , sigterm_handler); /* Interrupt (ANSI). */ yading@10: signal(SIGTERM, sigterm_handler); /* Termination (ANSI). */ yading@10: yading@10: show_banner(argc, argv, options); yading@10: yading@10: parse_options(NULL, argc, argv, options, opt_input_file); yading@10: yading@10: if (!input_filename) { yading@10: show_usage(); yading@10: fprintf(stderr, "An input file must be specified\n"); yading@10: fprintf(stderr, "Use -h to get full help or, even better, run 'man %s'\n", program_name); yading@10: exit(1); yading@10: } yading@10: yading@10: if (display_disable) { yading@10: video_disable = 1; yading@10: } yading@10: flags = SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER; yading@10: if (audio_disable) yading@10: flags &= ~SDL_INIT_AUDIO; yading@10: if (display_disable) yading@10: SDL_putenv(dummy_videodriver); /* For the event queue, we always need a video driver. */ yading@10: #if !defined(__MINGW32__) && !defined(__APPLE__) yading@10: flags |= SDL_INIT_EVENTTHREAD; /* Not supported on Windows or Mac OS X */ yading@10: #endif yading@10: if (SDL_Init (flags)) { yading@10: fprintf(stderr, "Could not initialize SDL - %s\n", SDL_GetError()); yading@10: fprintf(stderr, "(Did you set the DISPLAY variable?)\n"); yading@10: exit(1); yading@10: } yading@10: yading@10: if (!display_disable) { yading@10: const SDL_VideoInfo *vi = SDL_GetVideoInfo(); yading@10: fs_screen_width = vi->current_w; yading@10: fs_screen_height = vi->current_h; yading@10: } yading@10: yading@10: SDL_EventState(SDL_ACTIVEEVENT, SDL_IGNORE); yading@10: SDL_EventState(SDL_SYSWMEVENT, SDL_IGNORE); yading@10: SDL_EventState(SDL_USEREVENT, SDL_IGNORE); yading@10: yading@10: if (av_lockmgr_register(lockmgr)) { yading@10: fprintf(stderr, "Could not initialize lock manager!\n"); yading@10: do_exit(NULL); yading@10: } yading@10: yading@10: av_init_packet(&flush_pkt); yading@10: flush_pkt.data = (char *)(intptr_t)"FLUSH"; yading@10: yading@10: is = stream_open(input_filename, file_iformat); yading@10: if (!is) { yading@10: fprintf(stderr, "Failed to initialize VideoState!\n"); yading@10: do_exit(NULL); yading@10: } yading@10: yading@10: event_loop(is); yading@10: yading@10: /* never returns */ yading@10: yading@10: return 0; yading@10: }