yading@10
|
1 /*
|
yading@10
|
2 * Copyright (c) 2003 Fabrice Bellard
|
yading@10
|
3 *
|
yading@10
|
4 * This file is part of FFmpeg.
|
yading@10
|
5 *
|
yading@10
|
6 * FFmpeg is free software; you can redistribute it and/or
|
yading@10
|
7 * modify it under the terms of the GNU Lesser General Public
|
yading@10
|
8 * License as published by the Free Software Foundation; either
|
yading@10
|
9 * version 2.1 of the License, or (at your option) any later version.
|
yading@10
|
10 *
|
yading@10
|
11 * FFmpeg is distributed in the hope that it will be useful,
|
yading@10
|
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
|
yading@10
|
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
yading@10
|
14 * Lesser General Public License for more details.
|
yading@10
|
15 *
|
yading@10
|
16 * You should have received a copy of the GNU Lesser General Public
|
yading@10
|
17 * License along with FFmpeg; if not, write to the Free Software
|
yading@10
|
18 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
yading@10
|
19 */
|
yading@10
|
20
|
yading@10
|
21 /**
|
yading@10
|
22 * @file
|
yading@10
|
23 * simple media player based on the FFmpeg libraries
|
yading@10
|
24 */
|
yading@10
|
25
|
yading@10
|
26 #include "config.h"
|
yading@10
|
27 #include <inttypes.h>
|
yading@10
|
28 #include <math.h>
|
yading@10
|
29 #include <limits.h>
|
yading@10
|
30 #include <signal.h>
|
yading@10
|
31 #include "libavutil/avstring.h"
|
yading@10
|
32 #include "libavutil/colorspace.h"
|
yading@10
|
33 #include "libavutil/mathematics.h"
|
yading@10
|
34 #include "libavutil/pixdesc.h"
|
yading@10
|
35 #include "libavutil/imgutils.h"
|
yading@10
|
36 #include "libavutil/dict.h"
|
yading@10
|
37 #include "libavutil/parseutils.h"
|
yading@10
|
38 #include "libavutil/samplefmt.h"
|
yading@10
|
39 #include "libavutil/avassert.h"
|
yading@10
|
40 #include "libavutil/time.h"
|
yading@10
|
41 #include "libavformat/avformat.h"
|
yading@10
|
42 #include "libavdevice/avdevice.h"
|
yading@10
|
43 #include "libswscale/swscale.h"
|
yading@10
|
44 #include "libavutil/opt.h"
|
yading@10
|
45 #include "libavcodec/avfft.h"
|
yading@10
|
46 #include "libswresample/swresample.h"
|
yading@10
|
47
|
yading@10
|
48 #if CONFIG_AVFILTER
|
yading@10
|
49 # include "libavfilter/avcodec.h"
|
yading@10
|
50 # include "libavfilter/avfilter.h"
|
yading@10
|
51 # include "libavfilter/buffersink.h"
|
yading@10
|
52 # include "libavfilter/buffersrc.h"
|
yading@10
|
53 #endif
|
yading@10
|
54
|
yading@10
|
55 #include <SDL.h>
|
yading@10
|
56 #include <SDL_thread.h>
|
yading@10
|
57
|
yading@10
|
58 #include "cmdutils.h"
|
yading@10
|
59
|
yading@10
|
60 #include <assert.h>
|
yading@10
|
61
|
yading@10
|
62 const char program_name[] = "ffplay";
|
yading@10
|
63 const int program_birth_year = 2003;
|
yading@10
|
64
|
yading@10
|
65 #define MAX_QUEUE_SIZE (15 * 1024 * 1024)
|
yading@10
|
66 #define MIN_FRAMES 5
|
yading@10
|
67
|
yading@10
|
68 /* SDL audio buffer size, in samples. Should be small to have precise
|
yading@10
|
69 A/V sync as SDL does not have hardware buffer fullness info. */
|
yading@10
|
70 #define SDL_AUDIO_BUFFER_SIZE 1024
|
yading@10
|
71
|
yading@10
|
72 /* no AV sync correction is done if below the AV sync threshold */
|
yading@10
|
73 #define AV_SYNC_THRESHOLD 0.01
|
yading@10
|
74 /* no AV correction is done if too big error */
|
yading@10
|
75 #define AV_NOSYNC_THRESHOLD 10.0
|
yading@10
|
76
|
yading@10
|
77 /* maximum audio speed change to get correct sync */
|
yading@10
|
78 #define SAMPLE_CORRECTION_PERCENT_MAX 10
|
yading@10
|
79
|
yading@10
|
80 /* external clock speed adjustment constants for realtime sources based on buffer fullness */
|
yading@10
|
81 #define EXTERNAL_CLOCK_SPEED_MIN 0.900
|
yading@10
|
82 #define EXTERNAL_CLOCK_SPEED_MAX 1.010
|
yading@10
|
83 #define EXTERNAL_CLOCK_SPEED_STEP 0.001
|
yading@10
|
84
|
yading@10
|
85 /* we use about AUDIO_DIFF_AVG_NB A-V differences to make the average */
|
yading@10
|
86 #define AUDIO_DIFF_AVG_NB 20
|
yading@10
|
87
|
yading@10
|
88 /* polls for possible required screen refresh at least this often, should be less than 1/fps */
|
yading@10
|
89 #define REFRESH_RATE 0.01
|
yading@10
|
90
|
yading@10
|
91 /* NOTE: the size must be big enough to compensate the hardware audio buffersize size */
|
yading@10
|
92 /* TODO: We assume that a decoded and resampled frame fits into this buffer */
|
yading@10
|
93 #define SAMPLE_ARRAY_SIZE (8 * 65536)
|
yading@10
|
94
|
yading@10
|
95 #define CURSOR_HIDE_DELAY 1000000
|
yading@10
|
96
|
yading@10
|
97 static int64_t sws_flags = SWS_BICUBIC;
|
yading@10
|
98
|
yading@10
|
99 typedef struct MyAVPacketList {
|
yading@10
|
100 AVPacket pkt;
|
yading@10
|
101 struct MyAVPacketList *next;
|
yading@10
|
102 int serial;
|
yading@10
|
103 } MyAVPacketList;
|
yading@10
|
104
|
yading@10
|
105 typedef struct PacketQueue {
|
yading@10
|
106 MyAVPacketList *first_pkt, *last_pkt;
|
yading@10
|
107 int nb_packets;
|
yading@10
|
108 int size;
|
yading@10
|
109 int abort_request;
|
yading@10
|
110 int serial;
|
yading@10
|
111 SDL_mutex *mutex;
|
yading@10
|
112 SDL_cond *cond;
|
yading@10
|
113 } PacketQueue;
|
yading@10
|
114
|
yading@10
|
115 #define VIDEO_PICTURE_QUEUE_SIZE 4
|
yading@10
|
116 #define SUBPICTURE_QUEUE_SIZE 4
|
yading@10
|
117
|
yading@10
|
118 typedef struct VideoPicture {
|
yading@10
|
119 double pts; // presentation timestamp for this picture
|
yading@10
|
120 int64_t pos; // byte position in file
|
yading@10
|
121 SDL_Overlay *bmp;
|
yading@10
|
122 int width, height; /* source height & width */
|
yading@10
|
123 int allocated;
|
yading@10
|
124 int reallocate;
|
yading@10
|
125 int serial;
|
yading@10
|
126
|
yading@10
|
127 AVRational sar;
|
yading@10
|
128 } VideoPicture;
|
yading@10
|
129
|
yading@10
|
130 typedef struct SubPicture {
|
yading@10
|
131 double pts; /* presentation time stamp for this picture */
|
yading@10
|
132 AVSubtitle sub;
|
yading@10
|
133 } SubPicture;
|
yading@10
|
134
|
yading@10
|
135 typedef struct AudioParams {
|
yading@10
|
136 int freq;
|
yading@10
|
137 int channels;
|
yading@10
|
138 int64_t channel_layout;
|
yading@10
|
139 enum AVSampleFormat fmt;
|
yading@10
|
140 } AudioParams;
|
yading@10
|
141
|
yading@10
|
142 enum {
|
yading@10
|
143 AV_SYNC_AUDIO_MASTER, /* default choice */
|
yading@10
|
144 AV_SYNC_VIDEO_MASTER,
|
yading@10
|
145 AV_SYNC_EXTERNAL_CLOCK, /* synchronize to an external clock */
|
yading@10
|
146 };
|
yading@10
|
147
|
yading@10
|
148 typedef struct VideoState {
|
yading@10
|
149 SDL_Thread *read_tid;
|
yading@10
|
150 SDL_Thread *video_tid;
|
yading@10
|
151 AVInputFormat *iformat;
|
yading@10
|
152 int no_background;
|
yading@10
|
153 int abort_request;
|
yading@10
|
154 int force_refresh;
|
yading@10
|
155 int paused;
|
yading@10
|
156 int last_paused;
|
yading@10
|
157 int queue_attachments_req;
|
yading@10
|
158 int seek_req;
|
yading@10
|
159 int seek_flags;
|
yading@10
|
160 int64_t seek_pos;
|
yading@10
|
161 int64_t seek_rel;
|
yading@10
|
162 int read_pause_return;
|
yading@10
|
163 AVFormatContext *ic;
|
yading@10
|
164 int realtime;
|
yading@10
|
165
|
yading@10
|
166 int audio_stream;
|
yading@10
|
167
|
yading@10
|
168 int av_sync_type;
|
yading@10
|
169 double external_clock; ///< external clock base
|
yading@10
|
170 double external_clock_drift; ///< external clock base - time (av_gettime) at which we updated external_clock
|
yading@10
|
171 int64_t external_clock_time; ///< last reference time
|
yading@10
|
172 double external_clock_speed; ///< speed of the external clock
|
yading@10
|
173
|
yading@10
|
174 double audio_clock;
|
yading@10
|
175 int audio_clock_serial;
|
yading@10
|
176 double audio_diff_cum; /* used for AV difference average computation */
|
yading@10
|
177 double audio_diff_avg_coef;
|
yading@10
|
178 double audio_diff_threshold;
|
yading@10
|
179 int audio_diff_avg_count;
|
yading@10
|
180 AVStream *audio_st;
|
yading@10
|
181 PacketQueue audioq;
|
yading@10
|
182 int audio_hw_buf_size;
|
yading@10
|
183 uint8_t silence_buf[SDL_AUDIO_BUFFER_SIZE];
|
yading@10
|
184 uint8_t *audio_buf;
|
yading@10
|
185 uint8_t *audio_buf1;
|
yading@10
|
186 unsigned int audio_buf_size; /* in bytes */
|
yading@10
|
187 unsigned int audio_buf1_size;
|
yading@10
|
188 int audio_buf_index; /* in bytes */
|
yading@10
|
189 int audio_write_buf_size;
|
yading@10
|
190 int audio_buf_frames_pending;
|
yading@10
|
191 AVPacket audio_pkt_temp;
|
yading@10
|
192 AVPacket audio_pkt;
|
yading@10
|
193 int audio_pkt_temp_serial;
|
yading@10
|
194 int audio_last_serial;
|
yading@10
|
195 struct AudioParams audio_src;
|
yading@10
|
196 #if CONFIG_AVFILTER
|
yading@10
|
197 struct AudioParams audio_filter_src;
|
yading@10
|
198 #endif
|
yading@10
|
199 struct AudioParams audio_tgt;
|
yading@10
|
200 struct SwrContext *swr_ctx;
|
yading@10
|
201 double audio_current_pts;
|
yading@10
|
202 double audio_current_pts_drift;
|
yading@10
|
203 int frame_drops_early;
|
yading@10
|
204 int frame_drops_late;
|
yading@10
|
205 AVFrame *frame;
|
yading@10
|
206
|
yading@10
|
207 enum ShowMode {
|
yading@10
|
208 SHOW_MODE_NONE = -1, SHOW_MODE_VIDEO = 0, SHOW_MODE_WAVES, SHOW_MODE_RDFT, SHOW_MODE_NB
|
yading@10
|
209 } show_mode;
|
yading@10
|
210 int16_t sample_array[SAMPLE_ARRAY_SIZE];
|
yading@10
|
211 int sample_array_index;
|
yading@10
|
212 int last_i_start;
|
yading@10
|
213 RDFTContext *rdft;
|
yading@10
|
214 int rdft_bits;
|
yading@10
|
215 FFTSample *rdft_data;
|
yading@10
|
216 int xpos;
|
yading@10
|
217 double last_vis_time;
|
yading@10
|
218
|
yading@10
|
219 SDL_Thread *subtitle_tid;
|
yading@10
|
220 int subtitle_stream;
|
yading@10
|
221 int subtitle_stream_changed;
|
yading@10
|
222 AVStream *subtitle_st;
|
yading@10
|
223 PacketQueue subtitleq;
|
yading@10
|
224 SubPicture subpq[SUBPICTURE_QUEUE_SIZE];
|
yading@10
|
225 int subpq_size, subpq_rindex, subpq_windex;
|
yading@10
|
226 SDL_mutex *subpq_mutex;
|
yading@10
|
227 SDL_cond *subpq_cond;
|
yading@10
|
228
|
yading@10
|
229 double frame_timer;
|
yading@10
|
230 double frame_last_pts;
|
yading@10
|
231 double frame_last_duration;
|
yading@10
|
232 double frame_last_dropped_pts;
|
yading@10
|
233 double frame_last_returned_time;
|
yading@10
|
234 double frame_last_filter_delay;
|
yading@10
|
235 int64_t frame_last_dropped_pos;
|
yading@10
|
236 int frame_last_dropped_serial;
|
yading@10
|
237 int video_stream;
|
yading@10
|
238 AVStream *video_st;
|
yading@10
|
239 PacketQueue videoq;
|
yading@10
|
240 double video_current_pts; // current displayed pts
|
yading@10
|
241 double video_current_pts_drift; // video_current_pts - time (av_gettime) at which we updated video_current_pts - used to have running video pts
|
yading@10
|
242 int64_t video_current_pos; // current displayed file pos
|
yading@10
|
243 double max_frame_duration; // maximum duration of a frame - above this, we consider the jump a timestamp discontinuity
|
yading@10
|
244 int video_clock_serial;
|
yading@10
|
245 VideoPicture pictq[VIDEO_PICTURE_QUEUE_SIZE];
|
yading@10
|
246 int pictq_size, pictq_rindex, pictq_windex;
|
yading@10
|
247 SDL_mutex *pictq_mutex;
|
yading@10
|
248 SDL_cond *pictq_cond;
|
yading@10
|
249 #if !CONFIG_AVFILTER
|
yading@10
|
250 struct SwsContext *img_convert_ctx;
|
yading@10
|
251 #endif
|
yading@10
|
252 SDL_Rect last_display_rect;
|
yading@10
|
253
|
yading@10
|
254 char filename[1024];
|
yading@10
|
255 int width, height, xleft, ytop;
|
yading@10
|
256 int step;
|
yading@10
|
257
|
yading@10
|
258 #if CONFIG_AVFILTER
|
yading@10
|
259 AVFilterContext *in_video_filter; // the first filter in the video chain
|
yading@10
|
260 AVFilterContext *out_video_filter; // the last filter in the video chain
|
yading@10
|
261 AVFilterContext *in_audio_filter; // the first filter in the audio chain
|
yading@10
|
262 AVFilterContext *out_audio_filter; // the last filter in the audio chain
|
yading@10
|
263 AVFilterGraph *agraph; // audio filter graph
|
yading@10
|
264 #endif
|
yading@10
|
265
|
yading@10
|
266 int last_video_stream, last_audio_stream, last_subtitle_stream;
|
yading@10
|
267
|
yading@10
|
268 SDL_cond *continue_read_thread;
|
yading@10
|
269 } VideoState;
|
yading@10
|
270
|
yading@10
|
271 /* options specified by the user */
|
yading@10
|
272 static AVInputFormat *file_iformat;
|
yading@10
|
273 static const char *input_filename;
|
yading@10
|
274 static const char *window_title;
|
yading@10
|
275 static int fs_screen_width;
|
yading@10
|
276 static int fs_screen_height;
|
yading@10
|
277 static int default_width = 640;
|
yading@10
|
278 static int default_height = 480;
|
yading@10
|
279 static int screen_width = 0;
|
yading@10
|
280 static int screen_height = 0;
|
yading@10
|
281 static int audio_disable;
|
yading@10
|
282 static int video_disable;
|
yading@10
|
283 static int subtitle_disable;
|
yading@10
|
284 static int wanted_stream[AVMEDIA_TYPE_NB] = {
|
yading@10
|
285 [AVMEDIA_TYPE_AUDIO] = -1,
|
yading@10
|
286 [AVMEDIA_TYPE_VIDEO] = -1,
|
yading@10
|
287 [AVMEDIA_TYPE_SUBTITLE] = -1,
|
yading@10
|
288 };
|
yading@10
|
289 static int seek_by_bytes = -1;
|
yading@10
|
290 static int display_disable;
|
yading@10
|
291 static int show_status = 1;
|
yading@10
|
292 static int av_sync_type = AV_SYNC_AUDIO_MASTER;
|
yading@10
|
293 static int64_t start_time = AV_NOPTS_VALUE;
|
yading@10
|
294 static int64_t duration = AV_NOPTS_VALUE;
|
yading@10
|
295 static int workaround_bugs = 1;
|
yading@10
|
296 static int fast = 0;
|
yading@10
|
297 static int genpts = 0;
|
yading@10
|
298 static int lowres = 0;
|
yading@10
|
299 static int idct = FF_IDCT_AUTO;
|
yading@10
|
300 static int error_concealment = 3;
|
yading@10
|
301 static int decoder_reorder_pts = -1;
|
yading@10
|
302 static int autoexit;
|
yading@10
|
303 static int exit_on_keydown;
|
yading@10
|
304 static int exit_on_mousedown;
|
yading@10
|
305 static int loop = 1;
|
yading@10
|
306 static int framedrop = -1;
|
yading@10
|
307 static int infinite_buffer = -1;
|
yading@10
|
308 static enum ShowMode show_mode = SHOW_MODE_NONE;
|
yading@10
|
309 static const char *audio_codec_name;
|
yading@10
|
310 static const char *subtitle_codec_name;
|
yading@10
|
311 static const char *video_codec_name;
|
yading@10
|
312 double rdftspeed = 0.02;
|
yading@10
|
313 static int64_t cursor_last_shown;
|
yading@10
|
314 static int cursor_hidden = 0;
|
yading@10
|
315 #if CONFIG_AVFILTER
|
yading@10
|
316 static char *vfilters = NULL;
|
yading@10
|
317 static char *afilters = NULL;
|
yading@10
|
318 #endif
|
yading@10
|
319
|
yading@10
|
320 /* current context */
|
yading@10
|
321 static int is_full_screen;
|
yading@10
|
322 static int64_t audio_callback_time;
|
yading@10
|
323
|
yading@10
|
324 static AVPacket flush_pkt;
|
yading@10
|
325
|
yading@10
|
326 #define FF_ALLOC_EVENT (SDL_USEREVENT)
|
yading@10
|
327 #define FF_QUIT_EVENT (SDL_USEREVENT + 2)
|
yading@10
|
328
|
yading@10
|
329 static SDL_Surface *screen;
|
yading@10
|
330
|
yading@10
|
331 static inline
|
yading@10
|
332 int cmp_audio_fmts(enum AVSampleFormat fmt1, int64_t channel_count1,
|
yading@10
|
333 enum AVSampleFormat fmt2, int64_t channel_count2)
|
yading@10
|
334 {
|
yading@10
|
335 /* If channel count == 1, planar and non-planar formats are the same */
|
yading@10
|
336 if (channel_count1 == 1 && channel_count2 == 1)
|
yading@10
|
337 return av_get_packed_sample_fmt(fmt1) != av_get_packed_sample_fmt(fmt2);
|
yading@10
|
338 else
|
yading@10
|
339 return channel_count1 != channel_count2 || fmt1 != fmt2;
|
yading@10
|
340 }
|
yading@10
|
341
|
yading@10
|
342 static inline
|
yading@10
|
343 int64_t get_valid_channel_layout(int64_t channel_layout, int channels)
|
yading@10
|
344 {
|
yading@10
|
345 if (channel_layout && av_get_channel_layout_nb_channels(channel_layout) == channels)
|
yading@10
|
346 return channel_layout;
|
yading@10
|
347 else
|
yading@10
|
348 return 0;
|
yading@10
|
349 }
|
yading@10
|
350
|
yading@10
|
351 static int packet_queue_put(PacketQueue *q, AVPacket *pkt);
|
yading@10
|
352
|
yading@10
|
353 static int packet_queue_put_private(PacketQueue *q, AVPacket *pkt)
|
yading@10
|
354 {
|
yading@10
|
355 MyAVPacketList *pkt1;
|
yading@10
|
356
|
yading@10
|
357 if (q->abort_request)
|
yading@10
|
358 return -1;
|
yading@10
|
359
|
yading@10
|
360 pkt1 = av_malloc(sizeof(MyAVPacketList));
|
yading@10
|
361 if (!pkt1)
|
yading@10
|
362 return -1;
|
yading@10
|
363 pkt1->pkt = *pkt;
|
yading@10
|
364 pkt1->next = NULL;
|
yading@10
|
365 if (pkt == &flush_pkt)
|
yading@10
|
366 q->serial++;
|
yading@10
|
367 pkt1->serial = q->serial;
|
yading@10
|
368
|
yading@10
|
369 if (!q->last_pkt)
|
yading@10
|
370 q->first_pkt = pkt1;
|
yading@10
|
371 else
|
yading@10
|
372 q->last_pkt->next = pkt1;
|
yading@10
|
373 q->last_pkt = pkt1;
|
yading@10
|
374 q->nb_packets++;
|
yading@10
|
375 q->size += pkt1->pkt.size + sizeof(*pkt1);
|
yading@10
|
376 /* XXX: should duplicate packet data in DV case */
|
yading@10
|
377 SDL_CondSignal(q->cond);
|
yading@10
|
378 return 0;
|
yading@10
|
379 }
|
yading@10
|
380
|
yading@10
|
381 static int packet_queue_put(PacketQueue *q, AVPacket *pkt)
|
yading@10
|
382 {
|
yading@10
|
383 int ret;
|
yading@10
|
384
|
yading@10
|
385 /* duplicate the packet */
|
yading@10
|
386 if (pkt != &flush_pkt && av_dup_packet(pkt) < 0)
|
yading@10
|
387 return -1;
|
yading@10
|
388
|
yading@10
|
389 SDL_LockMutex(q->mutex);
|
yading@10
|
390 ret = packet_queue_put_private(q, pkt);
|
yading@10
|
391 SDL_UnlockMutex(q->mutex);
|
yading@10
|
392
|
yading@10
|
393 if (pkt != &flush_pkt && ret < 0)
|
yading@10
|
394 av_free_packet(pkt);
|
yading@10
|
395
|
yading@10
|
396 return ret;
|
yading@10
|
397 }
|
yading@10
|
398
|
yading@10
|
399 /* packet queue handling */
|
yading@10
|
400 static void packet_queue_init(PacketQueue *q)
|
yading@10
|
401 {
|
yading@10
|
402 memset(q, 0, sizeof(PacketQueue));
|
yading@10
|
403 q->mutex = SDL_CreateMutex();
|
yading@10
|
404 q->cond = SDL_CreateCond();
|
yading@10
|
405 q->abort_request = 1;
|
yading@10
|
406 }
|
yading@10
|
407
|
yading@10
|
408 static void packet_queue_flush(PacketQueue *q)
|
yading@10
|
409 {
|
yading@10
|
410 MyAVPacketList *pkt, *pkt1;
|
yading@10
|
411
|
yading@10
|
412 SDL_LockMutex(q->mutex);
|
yading@10
|
413 for (pkt = q->first_pkt; pkt != NULL; pkt = pkt1) {
|
yading@10
|
414 pkt1 = pkt->next;
|
yading@10
|
415 av_free_packet(&pkt->pkt);
|
yading@10
|
416 av_freep(&pkt);
|
yading@10
|
417 }
|
yading@10
|
418 q->last_pkt = NULL;
|
yading@10
|
419 q->first_pkt = NULL;
|
yading@10
|
420 q->nb_packets = 0;
|
yading@10
|
421 q->size = 0;
|
yading@10
|
422 SDL_UnlockMutex(q->mutex);
|
yading@10
|
423 }
|
yading@10
|
424
|
yading@10
|
425 static void packet_queue_destroy(PacketQueue *q)
|
yading@10
|
426 {
|
yading@10
|
427 packet_queue_flush(q);
|
yading@10
|
428 SDL_DestroyMutex(q->mutex);
|
yading@10
|
429 SDL_DestroyCond(q->cond);
|
yading@10
|
430 }
|
yading@10
|
431
|
yading@10
|
432 static void packet_queue_abort(PacketQueue *q)
|
yading@10
|
433 {
|
yading@10
|
434 SDL_LockMutex(q->mutex);
|
yading@10
|
435
|
yading@10
|
436 q->abort_request = 1;
|
yading@10
|
437
|
yading@10
|
438 SDL_CondSignal(q->cond);
|
yading@10
|
439
|
yading@10
|
440 SDL_UnlockMutex(q->mutex);
|
yading@10
|
441 }
|
yading@10
|
442
|
yading@10
|
443 static void packet_queue_start(PacketQueue *q)
|
yading@10
|
444 {
|
yading@10
|
445 SDL_LockMutex(q->mutex);
|
yading@10
|
446 q->abort_request = 0;
|
yading@10
|
447 packet_queue_put_private(q, &flush_pkt);
|
yading@10
|
448 SDL_UnlockMutex(q->mutex);
|
yading@10
|
449 }
|
yading@10
|
450
|
yading@10
|
451 /* return < 0 if aborted, 0 if no packet and > 0 if packet. */
|
yading@10
|
452 static int packet_queue_get(PacketQueue *q, AVPacket *pkt, int block, int *serial)
|
yading@10
|
453 {
|
yading@10
|
454 MyAVPacketList *pkt1;
|
yading@10
|
455 int ret;
|
yading@10
|
456
|
yading@10
|
457 SDL_LockMutex(q->mutex);
|
yading@10
|
458
|
yading@10
|
459 for (;;) {
|
yading@10
|
460 if (q->abort_request) {
|
yading@10
|
461 ret = -1;
|
yading@10
|
462 break;
|
yading@10
|
463 }
|
yading@10
|
464
|
yading@10
|
465 pkt1 = q->first_pkt;
|
yading@10
|
466 if (pkt1) {
|
yading@10
|
467 q->first_pkt = pkt1->next;
|
yading@10
|
468 if (!q->first_pkt)
|
yading@10
|
469 q->last_pkt = NULL;
|
yading@10
|
470 q->nb_packets--;
|
yading@10
|
471 q->size -= pkt1->pkt.size + sizeof(*pkt1);
|
yading@10
|
472 *pkt = pkt1->pkt;
|
yading@10
|
473 if (serial)
|
yading@10
|
474 *serial = pkt1->serial;
|
yading@10
|
475 av_free(pkt1);
|
yading@10
|
476 ret = 1;
|
yading@10
|
477 break;
|
yading@10
|
478 } else if (!block) {
|
yading@10
|
479 ret = 0;
|
yading@10
|
480 break;
|
yading@10
|
481 } else {
|
yading@10
|
482 SDL_CondWait(q->cond, q->mutex);
|
yading@10
|
483 }
|
yading@10
|
484 }
|
yading@10
|
485 SDL_UnlockMutex(q->mutex);
|
yading@10
|
486 return ret;
|
yading@10
|
487 }
|
yading@10
|
488
|
yading@10
|
489 static inline void fill_rectangle(SDL_Surface *screen,
|
yading@10
|
490 int x, int y, int w, int h, int color, int update)
|
yading@10
|
491 {
|
yading@10
|
492 SDL_Rect rect;
|
yading@10
|
493 rect.x = x;
|
yading@10
|
494 rect.y = y;
|
yading@10
|
495 rect.w = w;
|
yading@10
|
496 rect.h = h;
|
yading@10
|
497 SDL_FillRect(screen, &rect, color);
|
yading@10
|
498 if (update && w > 0 && h > 0)
|
yading@10
|
499 SDL_UpdateRect(screen, x, y, w, h);
|
yading@10
|
500 }
|
yading@10
|
501
|
yading@10
|
502 /* draw only the border of a rectangle */
|
yading@10
|
503 static void fill_border(int xleft, int ytop, int width, int height, int x, int y, int w, int h, int color, int update)
|
yading@10
|
504 {
|
yading@10
|
505 int w1, w2, h1, h2;
|
yading@10
|
506
|
yading@10
|
507 /* fill the background */
|
yading@10
|
508 w1 = x;
|
yading@10
|
509 if (w1 < 0)
|
yading@10
|
510 w1 = 0;
|
yading@10
|
511 w2 = width - (x + w);
|
yading@10
|
512 if (w2 < 0)
|
yading@10
|
513 w2 = 0;
|
yading@10
|
514 h1 = y;
|
yading@10
|
515 if (h1 < 0)
|
yading@10
|
516 h1 = 0;
|
yading@10
|
517 h2 = height - (y + h);
|
yading@10
|
518 if (h2 < 0)
|
yading@10
|
519 h2 = 0;
|
yading@10
|
520 fill_rectangle(screen,
|
yading@10
|
521 xleft, ytop,
|
yading@10
|
522 w1, height,
|
yading@10
|
523 color, update);
|
yading@10
|
524 fill_rectangle(screen,
|
yading@10
|
525 xleft + width - w2, ytop,
|
yading@10
|
526 w2, height,
|
yading@10
|
527 color, update);
|
yading@10
|
528 fill_rectangle(screen,
|
yading@10
|
529 xleft + w1, ytop,
|
yading@10
|
530 width - w1 - w2, h1,
|
yading@10
|
531 color, update);
|
yading@10
|
532 fill_rectangle(screen,
|
yading@10
|
533 xleft + w1, ytop + height - h2,
|
yading@10
|
534 width - w1 - w2, h2,
|
yading@10
|
535 color, update);
|
yading@10
|
536 }
|
yading@10
|
537
|
yading@10
|
538 #define ALPHA_BLEND(a, oldp, newp, s)\
|
yading@10
|
539 ((((oldp << s) * (255 - (a))) + (newp * (a))) / (255 << s))
|
yading@10
|
540
|
yading@10
|
541 #define RGBA_IN(r, g, b, a, s)\
|
yading@10
|
542 {\
|
yading@10
|
543 unsigned int v = ((const uint32_t *)(s))[0];\
|
yading@10
|
544 a = (v >> 24) & 0xff;\
|
yading@10
|
545 r = (v >> 16) & 0xff;\
|
yading@10
|
546 g = (v >> 8) & 0xff;\
|
yading@10
|
547 b = v & 0xff;\
|
yading@10
|
548 }
|
yading@10
|
549
|
yading@10
|
550 #define YUVA_IN(y, u, v, a, s, pal)\
|
yading@10
|
551 {\
|
yading@10
|
552 unsigned int val = ((const uint32_t *)(pal))[*(const uint8_t*)(s)];\
|
yading@10
|
553 a = (val >> 24) & 0xff;\
|
yading@10
|
554 y = (val >> 16) & 0xff;\
|
yading@10
|
555 u = (val >> 8) & 0xff;\
|
yading@10
|
556 v = val & 0xff;\
|
yading@10
|
557 }
|
yading@10
|
558
|
yading@10
|
559 #define YUVA_OUT(d, y, u, v, a)\
|
yading@10
|
560 {\
|
yading@10
|
561 ((uint32_t *)(d))[0] = (a << 24) | (y << 16) | (u << 8) | v;\
|
yading@10
|
562 }
|
yading@10
|
563
|
yading@10
|
564
|
yading@10
|
565 #define BPP 1
|
yading@10
|
566
|
yading@10
|
567 static void blend_subrect(AVPicture *dst, const AVSubtitleRect *rect, int imgw, int imgh)
|
yading@10
|
568 {
|
yading@10
|
569 int wrap, wrap3, width2, skip2;
|
yading@10
|
570 int y, u, v, a, u1, v1, a1, w, h;
|
yading@10
|
571 uint8_t *lum, *cb, *cr;
|
yading@10
|
572 const uint8_t *p;
|
yading@10
|
573 const uint32_t *pal;
|
yading@10
|
574 int dstx, dsty, dstw, dsth;
|
yading@10
|
575
|
yading@10
|
576 dstw = av_clip(rect->w, 0, imgw);
|
yading@10
|
577 dsth = av_clip(rect->h, 0, imgh);
|
yading@10
|
578 dstx = av_clip(rect->x, 0, imgw - dstw);
|
yading@10
|
579 dsty = av_clip(rect->y, 0, imgh - dsth);
|
yading@10
|
580 lum = dst->data[0] + dsty * dst->linesize[0];
|
yading@10
|
581 cb = dst->data[1] + (dsty >> 1) * dst->linesize[1];
|
yading@10
|
582 cr = dst->data[2] + (dsty >> 1) * dst->linesize[2];
|
yading@10
|
583
|
yading@10
|
584 width2 = ((dstw + 1) >> 1) + (dstx & ~dstw & 1);
|
yading@10
|
585 skip2 = dstx >> 1;
|
yading@10
|
586 wrap = dst->linesize[0];
|
yading@10
|
587 wrap3 = rect->pict.linesize[0];
|
yading@10
|
588 p = rect->pict.data[0];
|
yading@10
|
589 pal = (const uint32_t *)rect->pict.data[1]; /* Now in YCrCb! */
|
yading@10
|
590
|
yading@10
|
591 if (dsty & 1) {
|
yading@10
|
592 lum += dstx;
|
yading@10
|
593 cb += skip2;
|
yading@10
|
594 cr += skip2;
|
yading@10
|
595
|
yading@10
|
596 if (dstx & 1) {
|
yading@10
|
597 YUVA_IN(y, u, v, a, p, pal);
|
yading@10
|
598 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
|
yading@10
|
599 cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
|
yading@10
|
600 cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
|
yading@10
|
601 cb++;
|
yading@10
|
602 cr++;
|
yading@10
|
603 lum++;
|
yading@10
|
604 p += BPP;
|
yading@10
|
605 }
|
yading@10
|
606 for (w = dstw - (dstx & 1); w >= 2; w -= 2) {
|
yading@10
|
607 YUVA_IN(y, u, v, a, p, pal);
|
yading@10
|
608 u1 = u;
|
yading@10
|
609 v1 = v;
|
yading@10
|
610 a1 = a;
|
yading@10
|
611 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
|
yading@10
|
612
|
yading@10
|
613 YUVA_IN(y, u, v, a, p + BPP, pal);
|
yading@10
|
614 u1 += u;
|
yading@10
|
615 v1 += v;
|
yading@10
|
616 a1 += a;
|
yading@10
|
617 lum[1] = ALPHA_BLEND(a, lum[1], y, 0);
|
yading@10
|
618 cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 1);
|
yading@10
|
619 cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 1);
|
yading@10
|
620 cb++;
|
yading@10
|
621 cr++;
|
yading@10
|
622 p += 2 * BPP;
|
yading@10
|
623 lum += 2;
|
yading@10
|
624 }
|
yading@10
|
625 if (w) {
|
yading@10
|
626 YUVA_IN(y, u, v, a, p, pal);
|
yading@10
|
627 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
|
yading@10
|
628 cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
|
yading@10
|
629 cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
|
yading@10
|
630 p++;
|
yading@10
|
631 lum++;
|
yading@10
|
632 }
|
yading@10
|
633 p += wrap3 - dstw * BPP;
|
yading@10
|
634 lum += wrap - dstw - dstx;
|
yading@10
|
635 cb += dst->linesize[1] - width2 - skip2;
|
yading@10
|
636 cr += dst->linesize[2] - width2 - skip2;
|
yading@10
|
637 }
|
yading@10
|
638 for (h = dsth - (dsty & 1); h >= 2; h -= 2) {
|
yading@10
|
639 lum += dstx;
|
yading@10
|
640 cb += skip2;
|
yading@10
|
641 cr += skip2;
|
yading@10
|
642
|
yading@10
|
643 if (dstx & 1) {
|
yading@10
|
644 YUVA_IN(y, u, v, a, p, pal);
|
yading@10
|
645 u1 = u;
|
yading@10
|
646 v1 = v;
|
yading@10
|
647 a1 = a;
|
yading@10
|
648 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
|
yading@10
|
649 p += wrap3;
|
yading@10
|
650 lum += wrap;
|
yading@10
|
651 YUVA_IN(y, u, v, a, p, pal);
|
yading@10
|
652 u1 += u;
|
yading@10
|
653 v1 += v;
|
yading@10
|
654 a1 += a;
|
yading@10
|
655 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
|
yading@10
|
656 cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 1);
|
yading@10
|
657 cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 1);
|
yading@10
|
658 cb++;
|
yading@10
|
659 cr++;
|
yading@10
|
660 p += -wrap3 + BPP;
|
yading@10
|
661 lum += -wrap + 1;
|
yading@10
|
662 }
|
yading@10
|
663 for (w = dstw - (dstx & 1); w >= 2; w -= 2) {
|
yading@10
|
664 YUVA_IN(y, u, v, a, p, pal);
|
yading@10
|
665 u1 = u;
|
yading@10
|
666 v1 = v;
|
yading@10
|
667 a1 = a;
|
yading@10
|
668 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
|
yading@10
|
669
|
yading@10
|
670 YUVA_IN(y, u, v, a, p + BPP, pal);
|
yading@10
|
671 u1 += u;
|
yading@10
|
672 v1 += v;
|
yading@10
|
673 a1 += a;
|
yading@10
|
674 lum[1] = ALPHA_BLEND(a, lum[1], y, 0);
|
yading@10
|
675 p += wrap3;
|
yading@10
|
676 lum += wrap;
|
yading@10
|
677
|
yading@10
|
678 YUVA_IN(y, u, v, a, p, pal);
|
yading@10
|
679 u1 += u;
|
yading@10
|
680 v1 += v;
|
yading@10
|
681 a1 += a;
|
yading@10
|
682 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
|
yading@10
|
683
|
yading@10
|
684 YUVA_IN(y, u, v, a, p + BPP, pal);
|
yading@10
|
685 u1 += u;
|
yading@10
|
686 v1 += v;
|
yading@10
|
687 a1 += a;
|
yading@10
|
688 lum[1] = ALPHA_BLEND(a, lum[1], y, 0);
|
yading@10
|
689
|
yading@10
|
690 cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 2);
|
yading@10
|
691 cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 2);
|
yading@10
|
692
|
yading@10
|
693 cb++;
|
yading@10
|
694 cr++;
|
yading@10
|
695 p += -wrap3 + 2 * BPP;
|
yading@10
|
696 lum += -wrap + 2;
|
yading@10
|
697 }
|
yading@10
|
698 if (w) {
|
yading@10
|
699 YUVA_IN(y, u, v, a, p, pal);
|
yading@10
|
700 u1 = u;
|
yading@10
|
701 v1 = v;
|
yading@10
|
702 a1 = a;
|
yading@10
|
703 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
|
yading@10
|
704 p += wrap3;
|
yading@10
|
705 lum += wrap;
|
yading@10
|
706 YUVA_IN(y, u, v, a, p, pal);
|
yading@10
|
707 u1 += u;
|
yading@10
|
708 v1 += v;
|
yading@10
|
709 a1 += a;
|
yading@10
|
710 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
|
yading@10
|
711 cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 1);
|
yading@10
|
712 cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 1);
|
yading@10
|
713 cb++;
|
yading@10
|
714 cr++;
|
yading@10
|
715 p += -wrap3 + BPP;
|
yading@10
|
716 lum += -wrap + 1;
|
yading@10
|
717 }
|
yading@10
|
718 p += wrap3 + (wrap3 - dstw * BPP);
|
yading@10
|
719 lum += wrap + (wrap - dstw - dstx);
|
yading@10
|
720 cb += dst->linesize[1] - width2 - skip2;
|
yading@10
|
721 cr += dst->linesize[2] - width2 - skip2;
|
yading@10
|
722 }
|
yading@10
|
723 /* handle odd height */
|
yading@10
|
724 if (h) {
|
yading@10
|
725 lum += dstx;
|
yading@10
|
726 cb += skip2;
|
yading@10
|
727 cr += skip2;
|
yading@10
|
728
|
yading@10
|
729 if (dstx & 1) {
|
yading@10
|
730 YUVA_IN(y, u, v, a, p, pal);
|
yading@10
|
731 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
|
yading@10
|
732 cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
|
yading@10
|
733 cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
|
yading@10
|
734 cb++;
|
yading@10
|
735 cr++;
|
yading@10
|
736 lum++;
|
yading@10
|
737 p += BPP;
|
yading@10
|
738 }
|
yading@10
|
739 for (w = dstw - (dstx & 1); w >= 2; w -= 2) {
|
yading@10
|
740 YUVA_IN(y, u, v, a, p, pal);
|
yading@10
|
741 u1 = u;
|
yading@10
|
742 v1 = v;
|
yading@10
|
743 a1 = a;
|
yading@10
|
744 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
|
yading@10
|
745
|
yading@10
|
746 YUVA_IN(y, u, v, a, p + BPP, pal);
|
yading@10
|
747 u1 += u;
|
yading@10
|
748 v1 += v;
|
yading@10
|
749 a1 += a;
|
yading@10
|
750 lum[1] = ALPHA_BLEND(a, lum[1], y, 0);
|
yading@10
|
751 cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u, 1);
|
yading@10
|
752 cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v, 1);
|
yading@10
|
753 cb++;
|
yading@10
|
754 cr++;
|
yading@10
|
755 p += 2 * BPP;
|
yading@10
|
756 lum += 2;
|
yading@10
|
757 }
|
yading@10
|
758 if (w) {
|
yading@10
|
759 YUVA_IN(y, u, v, a, p, pal);
|
yading@10
|
760 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
|
yading@10
|
761 cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
|
yading@10
|
762 cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
|
yading@10
|
763 }
|
yading@10
|
764 }
|
yading@10
|
765 }
|
yading@10
|
766
|
yading@10
|
767 static void free_subpicture(SubPicture *sp)
|
yading@10
|
768 {
|
yading@10
|
769 avsubtitle_free(&sp->sub);
|
yading@10
|
770 }
|
yading@10
|
771
|
yading@10
|
772 static void calculate_display_rect(SDL_Rect *rect, int scr_xleft, int scr_ytop, int scr_width, int scr_height, VideoPicture *vp)
|
yading@10
|
773 {
|
yading@10
|
774 float aspect_ratio;
|
yading@10
|
775 int width, height, x, y;
|
yading@10
|
776
|
yading@10
|
777 if (vp->sar.num == 0)
|
yading@10
|
778 aspect_ratio = 0;
|
yading@10
|
779 else
|
yading@10
|
780 aspect_ratio = av_q2d(vp->sar);
|
yading@10
|
781
|
yading@10
|
782 if (aspect_ratio <= 0.0)
|
yading@10
|
783 aspect_ratio = 1.0;
|
yading@10
|
784 aspect_ratio *= (float)vp->width / (float)vp->height;
|
yading@10
|
785
|
yading@10
|
786 /* XXX: we suppose the screen has a 1.0 pixel ratio */
|
yading@10
|
787 height = scr_height;
|
yading@10
|
788 width = ((int)rint(height * aspect_ratio)) & ~1;
|
yading@10
|
789 if (width > scr_width) {
|
yading@10
|
790 width = scr_width;
|
yading@10
|
791 height = ((int)rint(width / aspect_ratio)) & ~1;
|
yading@10
|
792 }
|
yading@10
|
793 x = (scr_width - width) / 2;
|
yading@10
|
794 y = (scr_height - height) / 2;
|
yading@10
|
795 rect->x = scr_xleft + x;
|
yading@10
|
796 rect->y = scr_ytop + y;
|
yading@10
|
797 rect->w = FFMAX(width, 1);
|
yading@10
|
798 rect->h = FFMAX(height, 1);
|
yading@10
|
799 }
|
yading@10
|
800
|
yading@10
|
801 static void video_image_display(VideoState *is)
|
yading@10
|
802 {
|
yading@10
|
803 VideoPicture *vp;
|
yading@10
|
804 SubPicture *sp;
|
yading@10
|
805 AVPicture pict;
|
yading@10
|
806 SDL_Rect rect;
|
yading@10
|
807 int i;
|
yading@10
|
808
|
yading@10
|
809 vp = &is->pictq[is->pictq_rindex];
|
yading@10
|
810 if (vp->bmp) {
|
yading@10
|
811 if (is->subtitle_st) {
|
yading@10
|
812 if (is->subpq_size > 0) {
|
yading@10
|
813 sp = &is->subpq[is->subpq_rindex];
|
yading@10
|
814
|
yading@10
|
815 if (vp->pts >= sp->pts + ((float) sp->sub.start_display_time / 1000)) {
|
yading@10
|
816 SDL_LockYUVOverlay (vp->bmp);
|
yading@10
|
817
|
yading@10
|
818 pict.data[0] = vp->bmp->pixels[0];
|
yading@10
|
819 pict.data[1] = vp->bmp->pixels[2];
|
yading@10
|
820 pict.data[2] = vp->bmp->pixels[1];
|
yading@10
|
821
|
yading@10
|
822 pict.linesize[0] = vp->bmp->pitches[0];
|
yading@10
|
823 pict.linesize[1] = vp->bmp->pitches[2];
|
yading@10
|
824 pict.linesize[2] = vp->bmp->pitches[1];
|
yading@10
|
825
|
yading@10
|
826 for (i = 0; i < sp->sub.num_rects; i++)
|
yading@10
|
827 blend_subrect(&pict, sp->sub.rects[i],
|
yading@10
|
828 vp->bmp->w, vp->bmp->h);
|
yading@10
|
829
|
yading@10
|
830 SDL_UnlockYUVOverlay (vp->bmp);
|
yading@10
|
831 }
|
yading@10
|
832 }
|
yading@10
|
833 }
|
yading@10
|
834
|
yading@10
|
835 calculate_display_rect(&rect, is->xleft, is->ytop, is->width, is->height, vp);
|
yading@10
|
836
|
yading@10
|
837 SDL_DisplayYUVOverlay(vp->bmp, &rect);
|
yading@10
|
838
|
yading@10
|
839 if (rect.x != is->last_display_rect.x || rect.y != is->last_display_rect.y || rect.w != is->last_display_rect.w || rect.h != is->last_display_rect.h || is->force_refresh) {
|
yading@10
|
840 int bgcolor = SDL_MapRGB(screen->format, 0x00, 0x00, 0x00);
|
yading@10
|
841 fill_border(is->xleft, is->ytop, is->width, is->height, rect.x, rect.y, rect.w, rect.h, bgcolor, 1);
|
yading@10
|
842 is->last_display_rect = rect;
|
yading@10
|
843 }
|
yading@10
|
844 }
|
yading@10
|
845 }
|
yading@10
|
846
|
yading@10
|
847 static inline int compute_mod(int a, int b)
|
yading@10
|
848 {
|
yading@10
|
849 return a < 0 ? a%b + b : a%b;
|
yading@10
|
850 }
|
yading@10
|
851
|
yading@10
|
852 static void video_audio_display(VideoState *s)
|
yading@10
|
853 {
|
yading@10
|
854 int i, i_start, x, y1, y, ys, delay, n, nb_display_channels;
|
yading@10
|
855 int ch, channels, h, h2, bgcolor, fgcolor;
|
yading@10
|
856 int64_t time_diff;
|
yading@10
|
857 int rdft_bits, nb_freq;
|
yading@10
|
858
|
yading@10
|
859 for (rdft_bits = 1; (1 << rdft_bits) < 2 * s->height; rdft_bits++)
|
yading@10
|
860 ;
|
yading@10
|
861 nb_freq = 1 << (rdft_bits - 1);
|
yading@10
|
862
|
yading@10
|
863 /* compute display index : center on currently output samples */
|
yading@10
|
864 channels = s->audio_tgt.channels;
|
yading@10
|
865 nb_display_channels = channels;
|
yading@10
|
866 if (!s->paused) {
|
yading@10
|
867 int data_used= s->show_mode == SHOW_MODE_WAVES ? s->width : (2*nb_freq);
|
yading@10
|
868 n = 2 * channels;
|
yading@10
|
869 delay = s->audio_write_buf_size;
|
yading@10
|
870 delay /= n;
|
yading@10
|
871
|
yading@10
|
872 /* to be more precise, we take into account the time spent since
|
yading@10
|
873 the last buffer computation */
|
yading@10
|
874 if (audio_callback_time) {
|
yading@10
|
875 time_diff = av_gettime() - audio_callback_time;
|
yading@10
|
876 delay -= (time_diff * s->audio_tgt.freq) / 1000000;
|
yading@10
|
877 }
|
yading@10
|
878
|
yading@10
|
879 delay += 2 * data_used;
|
yading@10
|
880 if (delay < data_used)
|
yading@10
|
881 delay = data_used;
|
yading@10
|
882
|
yading@10
|
883 i_start= x = compute_mod(s->sample_array_index - delay * channels, SAMPLE_ARRAY_SIZE);
|
yading@10
|
884 if (s->show_mode == SHOW_MODE_WAVES) {
|
yading@10
|
885 h = INT_MIN;
|
yading@10
|
886 for (i = 0; i < 1000; i += channels) {
|
yading@10
|
887 int idx = (SAMPLE_ARRAY_SIZE + x - i) % SAMPLE_ARRAY_SIZE;
|
yading@10
|
888 int a = s->sample_array[idx];
|
yading@10
|
889 int b = s->sample_array[(idx + 4 * channels) % SAMPLE_ARRAY_SIZE];
|
yading@10
|
890 int c = s->sample_array[(idx + 5 * channels) % SAMPLE_ARRAY_SIZE];
|
yading@10
|
891 int d = s->sample_array[(idx + 9 * channels) % SAMPLE_ARRAY_SIZE];
|
yading@10
|
892 int score = a - d;
|
yading@10
|
893 if (h < score && (b ^ c) < 0) {
|
yading@10
|
894 h = score;
|
yading@10
|
895 i_start = idx;
|
yading@10
|
896 }
|
yading@10
|
897 }
|
yading@10
|
898 }
|
yading@10
|
899
|
yading@10
|
900 s->last_i_start = i_start;
|
yading@10
|
901 } else {
|
yading@10
|
902 i_start = s->last_i_start;
|
yading@10
|
903 }
|
yading@10
|
904
|
yading@10
|
905 bgcolor = SDL_MapRGB(screen->format, 0x00, 0x00, 0x00);
|
yading@10
|
906 if (s->show_mode == SHOW_MODE_WAVES) {
|
yading@10
|
907 fill_rectangle(screen,
|
yading@10
|
908 s->xleft, s->ytop, s->width, s->height,
|
yading@10
|
909 bgcolor, 0);
|
yading@10
|
910
|
yading@10
|
911 fgcolor = SDL_MapRGB(screen->format, 0xff, 0xff, 0xff);
|
yading@10
|
912
|
yading@10
|
913 /* total height for one channel */
|
yading@10
|
914 h = s->height / nb_display_channels;
|
yading@10
|
915 /* graph height / 2 */
|
yading@10
|
916 h2 = (h * 9) / 20;
|
yading@10
|
917 for (ch = 0; ch < nb_display_channels; ch++) {
|
yading@10
|
918 i = i_start + ch;
|
yading@10
|
919 y1 = s->ytop + ch * h + (h / 2); /* position of center line */
|
yading@10
|
920 for (x = 0; x < s->width; x++) {
|
yading@10
|
921 y = (s->sample_array[i] * h2) >> 15;
|
yading@10
|
922 if (y < 0) {
|
yading@10
|
923 y = -y;
|
yading@10
|
924 ys = y1 - y;
|
yading@10
|
925 } else {
|
yading@10
|
926 ys = y1;
|
yading@10
|
927 }
|
yading@10
|
928 fill_rectangle(screen,
|
yading@10
|
929 s->xleft + x, ys, 1, y,
|
yading@10
|
930 fgcolor, 0);
|
yading@10
|
931 i += channels;
|
yading@10
|
932 if (i >= SAMPLE_ARRAY_SIZE)
|
yading@10
|
933 i -= SAMPLE_ARRAY_SIZE;
|
yading@10
|
934 }
|
yading@10
|
935 }
|
yading@10
|
936
|
yading@10
|
937 fgcolor = SDL_MapRGB(screen->format, 0x00, 0x00, 0xff);
|
yading@10
|
938
|
yading@10
|
939 for (ch = 1; ch < nb_display_channels; ch++) {
|
yading@10
|
940 y = s->ytop + ch * h;
|
yading@10
|
941 fill_rectangle(screen,
|
yading@10
|
942 s->xleft, y, s->width, 1,
|
yading@10
|
943 fgcolor, 0);
|
yading@10
|
944 }
|
yading@10
|
945 SDL_UpdateRect(screen, s->xleft, s->ytop, s->width, s->height);
|
yading@10
|
946 } else {
|
yading@10
|
947 nb_display_channels= FFMIN(nb_display_channels, 2);
|
yading@10
|
948 if (rdft_bits != s->rdft_bits) {
|
yading@10
|
949 av_rdft_end(s->rdft);
|
yading@10
|
950 av_free(s->rdft_data);
|
yading@10
|
951 s->rdft = av_rdft_init(rdft_bits, DFT_R2C);
|
yading@10
|
952 s->rdft_bits = rdft_bits;
|
yading@10
|
953 s->rdft_data = av_malloc(4 * nb_freq * sizeof(*s->rdft_data));
|
yading@10
|
954 }
|
yading@10
|
955 {
|
yading@10
|
956 FFTSample *data[2];
|
yading@10
|
957 for (ch = 0; ch < nb_display_channels; ch++) {
|
yading@10
|
958 data[ch] = s->rdft_data + 2 * nb_freq * ch;
|
yading@10
|
959 i = i_start + ch;
|
yading@10
|
960 for (x = 0; x < 2 * nb_freq; x++) {
|
yading@10
|
961 double w = (x-nb_freq) * (1.0 / nb_freq);
|
yading@10
|
962 data[ch][x] = s->sample_array[i] * (1.0 - w * w);
|
yading@10
|
963 i += channels;
|
yading@10
|
964 if (i >= SAMPLE_ARRAY_SIZE)
|
yading@10
|
965 i -= SAMPLE_ARRAY_SIZE;
|
yading@10
|
966 }
|
yading@10
|
967 av_rdft_calc(s->rdft, data[ch]);
|
yading@10
|
968 }
|
yading@10
|
969 // least efficient way to do this, we should of course directly access it but its more than fast enough
|
yading@10
|
970 for (y = 0; y < s->height; y++) {
|
yading@10
|
971 double w = 1 / sqrt(nb_freq);
|
yading@10
|
972 int a = sqrt(w * sqrt(data[0][2 * y + 0] * data[0][2 * y + 0] + data[0][2 * y + 1] * data[0][2 * y + 1]));
|
yading@10
|
973 int b = (nb_display_channels == 2 ) ? sqrt(w * sqrt(data[1][2 * y + 0] * data[1][2 * y + 0]
|
yading@10
|
974 + data[1][2 * y + 1] * data[1][2 * y + 1])) : a;
|
yading@10
|
975 a = FFMIN(a, 255);
|
yading@10
|
976 b = FFMIN(b, 255);
|
yading@10
|
977 fgcolor = SDL_MapRGB(screen->format, a, b, (a + b) / 2);
|
yading@10
|
978
|
yading@10
|
979 fill_rectangle(screen,
|
yading@10
|
980 s->xpos, s->height-y, 1, 1,
|
yading@10
|
981 fgcolor, 0);
|
yading@10
|
982 }
|
yading@10
|
983 }
|
yading@10
|
984 SDL_UpdateRect(screen, s->xpos, s->ytop, 1, s->height);
|
yading@10
|
985 if (!s->paused)
|
yading@10
|
986 s->xpos++;
|
yading@10
|
987 if (s->xpos >= s->width)
|
yading@10
|
988 s->xpos= s->xleft;
|
yading@10
|
989 }
|
yading@10
|
990 }
|
yading@10
|
991
|
yading@10
|
992 static void stream_close(VideoState *is)
|
yading@10
|
993 {
|
yading@10
|
994 VideoPicture *vp;
|
yading@10
|
995 int i;
|
yading@10
|
996 /* XXX: use a special url_shutdown call to abort parse cleanly */
|
yading@10
|
997 is->abort_request = 1;
|
yading@10
|
998 SDL_WaitThread(is->read_tid, NULL);
|
yading@10
|
999 packet_queue_destroy(&is->videoq);
|
yading@10
|
1000 packet_queue_destroy(&is->audioq);
|
yading@10
|
1001 packet_queue_destroy(&is->subtitleq);
|
yading@10
|
1002
|
yading@10
|
1003 /* free all pictures */
|
yading@10
|
1004 for (i = 0; i < VIDEO_PICTURE_QUEUE_SIZE; i++) {
|
yading@10
|
1005 vp = &is->pictq[i];
|
yading@10
|
1006 if (vp->bmp) {
|
yading@10
|
1007 SDL_FreeYUVOverlay(vp->bmp);
|
yading@10
|
1008 vp->bmp = NULL;
|
yading@10
|
1009 }
|
yading@10
|
1010 }
|
yading@10
|
1011 SDL_DestroyMutex(is->pictq_mutex);
|
yading@10
|
1012 SDL_DestroyCond(is->pictq_cond);
|
yading@10
|
1013 SDL_DestroyMutex(is->subpq_mutex);
|
yading@10
|
1014 SDL_DestroyCond(is->subpq_cond);
|
yading@10
|
1015 SDL_DestroyCond(is->continue_read_thread);
|
yading@10
|
1016 #if !CONFIG_AVFILTER
|
yading@10
|
1017 sws_freeContext(is->img_convert_ctx);
|
yading@10
|
1018 #endif
|
yading@10
|
1019 av_free(is);
|
yading@10
|
1020 }
|
yading@10
|
1021
|
yading@10
|
1022 static void do_exit(VideoState *is)
|
yading@10
|
1023 {
|
yading@10
|
1024 if (is) {
|
yading@10
|
1025 stream_close(is);
|
yading@10
|
1026 }
|
yading@10
|
1027 av_lockmgr_register(NULL);
|
yading@10
|
1028 uninit_opts();
|
yading@10
|
1029 #if CONFIG_AVFILTER
|
yading@10
|
1030 av_freep(&vfilters);
|
yading@10
|
1031 #endif
|
yading@10
|
1032 avformat_network_deinit();
|
yading@10
|
1033 if (show_status)
|
yading@10
|
1034 printf("\n");
|
yading@10
|
1035 SDL_Quit();
|
yading@10
|
1036 av_log(NULL, AV_LOG_QUIET, "%s", "");
|
yading@10
|
1037 exit(0);
|
yading@10
|
1038 }
|
yading@10
|
1039
|
yading@10
|
1040 static void sigterm_handler(int sig)
|
yading@10
|
1041 {
|
yading@10
|
1042 exit(123);
|
yading@10
|
1043 }
|
yading@10
|
1044
|
yading@10
|
1045 static int video_open(VideoState *is, int force_set_video_mode, VideoPicture *vp)
|
yading@10
|
1046 {
|
yading@10
|
1047 int flags = SDL_HWSURFACE | SDL_ASYNCBLIT | SDL_HWACCEL;
|
yading@10
|
1048 int w,h;
|
yading@10
|
1049 SDL_Rect rect;
|
yading@10
|
1050
|
yading@10
|
1051 if (is_full_screen) flags |= SDL_FULLSCREEN;
|
yading@10
|
1052 else flags |= SDL_RESIZABLE;
|
yading@10
|
1053
|
yading@10
|
1054 if (vp && vp->width) {
|
yading@10
|
1055 calculate_display_rect(&rect, 0, 0, INT_MAX, vp->height, vp);
|
yading@10
|
1056 default_width = rect.w;
|
yading@10
|
1057 default_height = rect.h;
|
yading@10
|
1058 }
|
yading@10
|
1059
|
yading@10
|
1060 if (is_full_screen && fs_screen_width) {
|
yading@10
|
1061 w = fs_screen_width;
|
yading@10
|
1062 h = fs_screen_height;
|
yading@10
|
1063 } else if (!is_full_screen && screen_width) {
|
yading@10
|
1064 w = screen_width;
|
yading@10
|
1065 h = screen_height;
|
yading@10
|
1066 } else {
|
yading@10
|
1067 w = default_width;
|
yading@10
|
1068 h = default_height;
|
yading@10
|
1069 }
|
yading@10
|
1070 if (screen && is->width == screen->w && screen->w == w
|
yading@10
|
1071 && is->height== screen->h && screen->h == h && !force_set_video_mode)
|
yading@10
|
1072 return 0;
|
yading@10
|
1073 screen = SDL_SetVideoMode(w, h, 0, flags);
|
yading@10
|
1074 if (!screen) {
|
yading@10
|
1075 fprintf(stderr, "SDL: could not set video mode - exiting\n");
|
yading@10
|
1076 do_exit(is);
|
yading@10
|
1077 }
|
yading@10
|
1078 if (!window_title)
|
yading@10
|
1079 window_title = input_filename;
|
yading@10
|
1080 SDL_WM_SetCaption(window_title, window_title);
|
yading@10
|
1081
|
yading@10
|
1082 is->width = screen->w;
|
yading@10
|
1083 is->height = screen->h;
|
yading@10
|
1084
|
yading@10
|
1085 return 0;
|
yading@10
|
1086 }
|
yading@10
|
1087
|
yading@10
|
1088 /* display the current picture, if any */
|
yading@10
|
1089 static void video_display(VideoState *is)
|
yading@10
|
1090 {
|
yading@10
|
1091 if (!screen)
|
yading@10
|
1092 video_open(is, 0, NULL);
|
yading@10
|
1093 if (is->audio_st && is->show_mode != SHOW_MODE_VIDEO)
|
yading@10
|
1094 video_audio_display(is);
|
yading@10
|
1095 else if (is->video_st)
|
yading@10
|
1096 video_image_display(is);
|
yading@10
|
1097 }
|
yading@10
|
1098
|
yading@10
|
1099 /* get the current audio clock value */
|
yading@10
|
1100 static double get_audio_clock(VideoState *is)
|
yading@10
|
1101 {
|
yading@10
|
1102 if (is->audio_clock_serial != is->audioq.serial)
|
yading@10
|
1103 return NAN;
|
yading@10
|
1104 if (is->paused) {
|
yading@10
|
1105 return is->audio_current_pts;
|
yading@10
|
1106 } else {
|
yading@10
|
1107 return is->audio_current_pts_drift + av_gettime() / 1000000.0;
|
yading@10
|
1108 }
|
yading@10
|
1109 }
|
yading@10
|
1110
|
yading@10
|
1111 /* get the current video clock value */
|
yading@10
|
1112 static double get_video_clock(VideoState *is)
|
yading@10
|
1113 {
|
yading@10
|
1114 if (is->video_clock_serial != is->videoq.serial)
|
yading@10
|
1115 return NAN;
|
yading@10
|
1116 if (is->paused) {
|
yading@10
|
1117 return is->video_current_pts;
|
yading@10
|
1118 } else {
|
yading@10
|
1119 return is->video_current_pts_drift + av_gettime() / 1000000.0;
|
yading@10
|
1120 }
|
yading@10
|
1121 }
|
yading@10
|
1122
|
yading@10
|
1123 /* get the current external clock value */
|
yading@10
|
1124 static double get_external_clock(VideoState *is)
|
yading@10
|
1125 {
|
yading@10
|
1126 if (is->paused) {
|
yading@10
|
1127 return is->external_clock;
|
yading@10
|
1128 } else {
|
yading@10
|
1129 double time = av_gettime() / 1000000.0;
|
yading@10
|
1130 return is->external_clock_drift + time - (time - is->external_clock_time / 1000000.0) * (1.0 - is->external_clock_speed);
|
yading@10
|
1131 }
|
yading@10
|
1132 }
|
yading@10
|
1133
|
yading@10
|
1134 static int get_master_sync_type(VideoState *is) {
|
yading@10
|
1135 if (is->av_sync_type == AV_SYNC_VIDEO_MASTER) {
|
yading@10
|
1136 if (is->video_st)
|
yading@10
|
1137 return AV_SYNC_VIDEO_MASTER;
|
yading@10
|
1138 else
|
yading@10
|
1139 return AV_SYNC_AUDIO_MASTER;
|
yading@10
|
1140 } else if (is->av_sync_type == AV_SYNC_AUDIO_MASTER) {
|
yading@10
|
1141 if (is->audio_st)
|
yading@10
|
1142 return AV_SYNC_AUDIO_MASTER;
|
yading@10
|
1143 else
|
yading@10
|
1144 return AV_SYNC_EXTERNAL_CLOCK;
|
yading@10
|
1145 } else {
|
yading@10
|
1146 return AV_SYNC_EXTERNAL_CLOCK;
|
yading@10
|
1147 }
|
yading@10
|
1148 }
|
yading@10
|
1149
|
yading@10
|
1150 /* get the current master clock value */
|
yading@10
|
1151 static double get_master_clock(VideoState *is)
|
yading@10
|
1152 {
|
yading@10
|
1153 double val;
|
yading@10
|
1154
|
yading@10
|
1155 switch (get_master_sync_type(is)) {
|
yading@10
|
1156 case AV_SYNC_VIDEO_MASTER:
|
yading@10
|
1157 val = get_video_clock(is);
|
yading@10
|
1158 break;
|
yading@10
|
1159 case AV_SYNC_AUDIO_MASTER:
|
yading@10
|
1160 val = get_audio_clock(is);
|
yading@10
|
1161 break;
|
yading@10
|
1162 default:
|
yading@10
|
1163 val = get_external_clock(is);
|
yading@10
|
1164 break;
|
yading@10
|
1165 }
|
yading@10
|
1166 return val;
|
yading@10
|
1167 }
|
yading@10
|
1168
|
yading@10
|
1169 static void update_external_clock_pts(VideoState *is, double pts)
|
yading@10
|
1170 {
|
yading@10
|
1171 is->external_clock_time = av_gettime();
|
yading@10
|
1172 is->external_clock = pts;
|
yading@10
|
1173 is->external_clock_drift = pts - is->external_clock_time / 1000000.0;
|
yading@10
|
1174 }
|
yading@10
|
1175
|
yading@10
|
1176 static void check_external_clock_sync(VideoState *is, double pts) {
|
yading@10
|
1177 double ext_clock = get_external_clock(is);
|
yading@10
|
1178 if (isnan(ext_clock) || fabs(ext_clock - pts) > AV_NOSYNC_THRESHOLD) {
|
yading@10
|
1179 update_external_clock_pts(is, pts);
|
yading@10
|
1180 }
|
yading@10
|
1181 }
|
yading@10
|
1182
|
yading@10
|
1183 static void update_external_clock_speed(VideoState *is, double speed) {
|
yading@10
|
1184 update_external_clock_pts(is, get_external_clock(is));
|
yading@10
|
1185 is->external_clock_speed = speed;
|
yading@10
|
1186 }
|
yading@10
|
1187
|
yading@10
|
1188 static void check_external_clock_speed(VideoState *is) {
|
yading@10
|
1189 if (is->video_stream >= 0 && is->videoq.nb_packets <= MIN_FRAMES / 2 ||
|
yading@10
|
1190 is->audio_stream >= 0 && is->audioq.nb_packets <= MIN_FRAMES / 2) {
|
yading@10
|
1191 update_external_clock_speed(is, FFMAX(EXTERNAL_CLOCK_SPEED_MIN, is->external_clock_speed - EXTERNAL_CLOCK_SPEED_STEP));
|
yading@10
|
1192 } else if ((is->video_stream < 0 || is->videoq.nb_packets > MIN_FRAMES * 2) &&
|
yading@10
|
1193 (is->audio_stream < 0 || is->audioq.nb_packets > MIN_FRAMES * 2)) {
|
yading@10
|
1194 update_external_clock_speed(is, FFMIN(EXTERNAL_CLOCK_SPEED_MAX, is->external_clock_speed + EXTERNAL_CLOCK_SPEED_STEP));
|
yading@10
|
1195 } else {
|
yading@10
|
1196 double speed = is->external_clock_speed;
|
yading@10
|
1197 if (speed != 1.0)
|
yading@10
|
1198 update_external_clock_speed(is, speed + EXTERNAL_CLOCK_SPEED_STEP * (1.0 - speed) / fabs(1.0 - speed));
|
yading@10
|
1199 }
|
yading@10
|
1200 }
|
yading@10
|
1201
|
yading@10
|
1202 /* seek in the stream */
|
yading@10
|
1203 static void stream_seek(VideoState *is, int64_t pos, int64_t rel, int seek_by_bytes)
|
yading@10
|
1204 {
|
yading@10
|
1205 if (!is->seek_req) {
|
yading@10
|
1206 is->seek_pos = pos;
|
yading@10
|
1207 is->seek_rel = rel;
|
yading@10
|
1208 is->seek_flags &= ~AVSEEK_FLAG_BYTE;
|
yading@10
|
1209 if (seek_by_bytes)
|
yading@10
|
1210 is->seek_flags |= AVSEEK_FLAG_BYTE;
|
yading@10
|
1211 is->seek_req = 1;
|
yading@10
|
1212 SDL_CondSignal(is->continue_read_thread);
|
yading@10
|
1213 }
|
yading@10
|
1214 }
|
yading@10
|
1215
|
yading@10
|
1216 /* pause or resume the video */
|
yading@10
|
1217 static void stream_toggle_pause(VideoState *is)
|
yading@10
|
1218 {
|
yading@10
|
1219 if (is->paused) {
|
yading@10
|
1220 is->frame_timer += av_gettime() / 1000000.0 + is->video_current_pts_drift - is->video_current_pts;
|
yading@10
|
1221 if (is->read_pause_return != AVERROR(ENOSYS)) {
|
yading@10
|
1222 is->video_current_pts = is->video_current_pts_drift + av_gettime() / 1000000.0;
|
yading@10
|
1223 }
|
yading@10
|
1224 is->video_current_pts_drift = is->video_current_pts - av_gettime() / 1000000.0;
|
yading@10
|
1225 }
|
yading@10
|
1226 update_external_clock_pts(is, get_external_clock(is));
|
yading@10
|
1227 is->paused = !is->paused;
|
yading@10
|
1228 }
|
yading@10
|
1229
|
yading@10
|
1230 static void toggle_pause(VideoState *is)
|
yading@10
|
1231 {
|
yading@10
|
1232 stream_toggle_pause(is);
|
yading@10
|
1233 is->step = 0;
|
yading@10
|
1234 }
|
yading@10
|
1235
|
yading@10
|
1236 static void step_to_next_frame(VideoState *is)
|
yading@10
|
1237 {
|
yading@10
|
1238 /* if the stream is paused unpause it, then step */
|
yading@10
|
1239 if (is->paused)
|
yading@10
|
1240 stream_toggle_pause(is);
|
yading@10
|
1241 is->step = 1;
|
yading@10
|
1242 }
|
yading@10
|
1243
|
yading@10
|
1244 static double compute_target_delay(double delay, VideoState *is)
|
yading@10
|
1245 {
|
yading@10
|
1246 double sync_threshold, diff;
|
yading@10
|
1247
|
yading@10
|
1248 /* update delay to follow master synchronisation source */
|
yading@10
|
1249 if (get_master_sync_type(is) != AV_SYNC_VIDEO_MASTER) {
|
yading@10
|
1250 /* if video is slave, we try to correct big delays by
|
yading@10
|
1251 duplicating or deleting a frame */
|
yading@10
|
1252 diff = get_video_clock(is) - get_master_clock(is);
|
yading@10
|
1253
|
yading@10
|
1254 /* skip or repeat frame. We take into account the
|
yading@10
|
1255 delay to compute the threshold. I still don't know
|
yading@10
|
1256 if it is the best guess */
|
yading@10
|
1257 sync_threshold = FFMAX(AV_SYNC_THRESHOLD, delay);
|
yading@10
|
1258 if (!isnan(diff) && fabs(diff) < AV_NOSYNC_THRESHOLD) {
|
yading@10
|
1259 if (diff <= -sync_threshold)
|
yading@10
|
1260 delay = 0;
|
yading@10
|
1261 else if (diff >= sync_threshold)
|
yading@10
|
1262 delay = 2 * delay;
|
yading@10
|
1263 }
|
yading@10
|
1264 }
|
yading@10
|
1265
|
yading@10
|
1266 av_dlog(NULL, "video: delay=%0.3f A-V=%f\n",
|
yading@10
|
1267 delay, -diff);
|
yading@10
|
1268
|
yading@10
|
1269 return delay;
|
yading@10
|
1270 }
|
yading@10
|
1271
|
yading@10
|
1272 static void pictq_next_picture(VideoState *is) {
|
yading@10
|
1273 /* update queue size and signal for next picture */
|
yading@10
|
1274 if (++is->pictq_rindex == VIDEO_PICTURE_QUEUE_SIZE)
|
yading@10
|
1275 is->pictq_rindex = 0;
|
yading@10
|
1276
|
yading@10
|
1277 SDL_LockMutex(is->pictq_mutex);
|
yading@10
|
1278 is->pictq_size--;
|
yading@10
|
1279 SDL_CondSignal(is->pictq_cond);
|
yading@10
|
1280 SDL_UnlockMutex(is->pictq_mutex);
|
yading@10
|
1281 }
|
yading@10
|
1282
|
yading@10
|
1283 static int pictq_prev_picture(VideoState *is) {
|
yading@10
|
1284 VideoPicture *prevvp;
|
yading@10
|
1285 int ret = 0;
|
yading@10
|
1286 /* update queue size and signal for the previous picture */
|
yading@10
|
1287 prevvp = &is->pictq[(is->pictq_rindex + VIDEO_PICTURE_QUEUE_SIZE - 1) % VIDEO_PICTURE_QUEUE_SIZE];
|
yading@10
|
1288 if (prevvp->allocated && prevvp->serial == is->videoq.serial) {
|
yading@10
|
1289 SDL_LockMutex(is->pictq_mutex);
|
yading@10
|
1290 if (is->pictq_size < VIDEO_PICTURE_QUEUE_SIZE - 1) {
|
yading@10
|
1291 if (--is->pictq_rindex == -1)
|
yading@10
|
1292 is->pictq_rindex = VIDEO_PICTURE_QUEUE_SIZE - 1;
|
yading@10
|
1293 is->pictq_size++;
|
yading@10
|
1294 ret = 1;
|
yading@10
|
1295 }
|
yading@10
|
1296 SDL_CondSignal(is->pictq_cond);
|
yading@10
|
1297 SDL_UnlockMutex(is->pictq_mutex);
|
yading@10
|
1298 }
|
yading@10
|
1299 return ret;
|
yading@10
|
1300 }
|
yading@10
|
1301
|
yading@10
|
1302 static void update_video_pts(VideoState *is, double pts, int64_t pos, int serial) {
|
yading@10
|
1303 double time = av_gettime() / 1000000.0;
|
yading@10
|
1304 /* update current video pts */
|
yading@10
|
1305 is->video_current_pts = pts;
|
yading@10
|
1306 is->video_current_pts_drift = is->video_current_pts - time;
|
yading@10
|
1307 is->video_current_pos = pos;
|
yading@10
|
1308 is->frame_last_pts = pts;
|
yading@10
|
1309 is->video_clock_serial = serial;
|
yading@10
|
1310 if (is->videoq.serial == serial)
|
yading@10
|
1311 check_external_clock_sync(is, is->video_current_pts);
|
yading@10
|
1312 }
|
yading@10
|
1313
|
yading@10
|
1314 /* called to display each frame */
|
yading@10
|
1315 static void video_refresh(void *opaque, double *remaining_time)
|
yading@10
|
1316 {
|
yading@10
|
1317 VideoState *is = opaque;
|
yading@10
|
1318 VideoPicture *vp;
|
yading@10
|
1319 double time;
|
yading@10
|
1320
|
yading@10
|
1321 SubPicture *sp, *sp2;
|
yading@10
|
1322
|
yading@10
|
1323 if (!is->paused && get_master_sync_type(is) == AV_SYNC_EXTERNAL_CLOCK && is->realtime)
|
yading@10
|
1324 check_external_clock_speed(is);
|
yading@10
|
1325
|
yading@10
|
1326 if (!display_disable && is->show_mode != SHOW_MODE_VIDEO && is->audio_st) {
|
yading@10
|
1327 time = av_gettime() / 1000000.0;
|
yading@10
|
1328 if (is->force_refresh || is->last_vis_time + rdftspeed < time) {
|
yading@10
|
1329 video_display(is);
|
yading@10
|
1330 is->last_vis_time = time;
|
yading@10
|
1331 }
|
yading@10
|
1332 *remaining_time = FFMIN(*remaining_time, is->last_vis_time + rdftspeed - time);
|
yading@10
|
1333 }
|
yading@10
|
1334
|
yading@10
|
1335 if (is->video_st) {
|
yading@10
|
1336 int redisplay = 0;
|
yading@10
|
1337 if (is->force_refresh)
|
yading@10
|
1338 redisplay = pictq_prev_picture(is);
|
yading@10
|
1339 retry:
|
yading@10
|
1340 if (is->pictq_size == 0) {
|
yading@10
|
1341 SDL_LockMutex(is->pictq_mutex);
|
yading@10
|
1342 if (is->frame_last_dropped_pts != AV_NOPTS_VALUE && is->frame_last_dropped_pts > is->frame_last_pts) {
|
yading@10
|
1343 update_video_pts(is, is->frame_last_dropped_pts, is->frame_last_dropped_pos, is->frame_last_dropped_serial);
|
yading@10
|
1344 is->frame_last_dropped_pts = AV_NOPTS_VALUE;
|
yading@10
|
1345 }
|
yading@10
|
1346 SDL_UnlockMutex(is->pictq_mutex);
|
yading@10
|
1347 // nothing to do, no picture to display in the queue
|
yading@10
|
1348 } else {
|
yading@10
|
1349 double last_duration, duration, delay;
|
yading@10
|
1350 /* dequeue the picture */
|
yading@10
|
1351 vp = &is->pictq[is->pictq_rindex];
|
yading@10
|
1352
|
yading@10
|
1353 if (vp->serial != is->videoq.serial) {
|
yading@10
|
1354 pictq_next_picture(is);
|
yading@10
|
1355 redisplay = 0;
|
yading@10
|
1356 goto retry;
|
yading@10
|
1357 }
|
yading@10
|
1358
|
yading@10
|
1359 if (is->paused)
|
yading@10
|
1360 goto display;
|
yading@10
|
1361
|
yading@10
|
1362 /* compute nominal last_duration */
|
yading@10
|
1363 last_duration = vp->pts - is->frame_last_pts;
|
yading@10
|
1364 if (!isnan(last_duration) && last_duration > 0 && last_duration < is->max_frame_duration) {
|
yading@10
|
1365 /* if duration of the last frame was sane, update last_duration in video state */
|
yading@10
|
1366 is->frame_last_duration = last_duration;
|
yading@10
|
1367 }
|
yading@10
|
1368 delay = compute_target_delay(is->frame_last_duration, is);
|
yading@10
|
1369
|
yading@10
|
1370 time= av_gettime()/1000000.0;
|
yading@10
|
1371 if (time < is->frame_timer + delay) {
|
yading@10
|
1372 *remaining_time = FFMIN(is->frame_timer + delay - time, *remaining_time);
|
yading@10
|
1373 return;
|
yading@10
|
1374 }
|
yading@10
|
1375
|
yading@10
|
1376 if (delay > 0)
|
yading@10
|
1377 is->frame_timer += delay * FFMAX(1, floor((time-is->frame_timer) / delay));
|
yading@10
|
1378
|
yading@10
|
1379 SDL_LockMutex(is->pictq_mutex);
|
yading@10
|
1380 if (!isnan(vp->pts))
|
yading@10
|
1381 update_video_pts(is, vp->pts, vp->pos, vp->serial);
|
yading@10
|
1382 SDL_UnlockMutex(is->pictq_mutex);
|
yading@10
|
1383
|
yading@10
|
1384 if (is->pictq_size > 1) {
|
yading@10
|
1385 VideoPicture *nextvp = &is->pictq[(is->pictq_rindex + 1) % VIDEO_PICTURE_QUEUE_SIZE];
|
yading@10
|
1386 duration = nextvp->pts - vp->pts;
|
yading@10
|
1387 if(!is->step && (redisplay || framedrop>0 || (framedrop && get_master_sync_type(is) != AV_SYNC_VIDEO_MASTER)) && time > is->frame_timer + duration){
|
yading@10
|
1388 if (!redisplay)
|
yading@10
|
1389 is->frame_drops_late++;
|
yading@10
|
1390 pictq_next_picture(is);
|
yading@10
|
1391 redisplay = 0;
|
yading@10
|
1392 goto retry;
|
yading@10
|
1393 }
|
yading@10
|
1394 }
|
yading@10
|
1395
|
yading@10
|
1396 if (is->subtitle_st) {
|
yading@10
|
1397 if (is->subtitle_stream_changed) {
|
yading@10
|
1398 SDL_LockMutex(is->subpq_mutex);
|
yading@10
|
1399
|
yading@10
|
1400 while (is->subpq_size) {
|
yading@10
|
1401 free_subpicture(&is->subpq[is->subpq_rindex]);
|
yading@10
|
1402
|
yading@10
|
1403 /* update queue size and signal for next picture */
|
yading@10
|
1404 if (++is->subpq_rindex == SUBPICTURE_QUEUE_SIZE)
|
yading@10
|
1405 is->subpq_rindex = 0;
|
yading@10
|
1406
|
yading@10
|
1407 is->subpq_size--;
|
yading@10
|
1408 }
|
yading@10
|
1409 is->subtitle_stream_changed = 0;
|
yading@10
|
1410
|
yading@10
|
1411 SDL_CondSignal(is->subpq_cond);
|
yading@10
|
1412 SDL_UnlockMutex(is->subpq_mutex);
|
yading@10
|
1413 } else {
|
yading@10
|
1414 if (is->subpq_size > 0) {
|
yading@10
|
1415 sp = &is->subpq[is->subpq_rindex];
|
yading@10
|
1416
|
yading@10
|
1417 if (is->subpq_size > 1)
|
yading@10
|
1418 sp2 = &is->subpq[(is->subpq_rindex + 1) % SUBPICTURE_QUEUE_SIZE];
|
yading@10
|
1419 else
|
yading@10
|
1420 sp2 = NULL;
|
yading@10
|
1421
|
yading@10
|
1422 if ((is->video_current_pts > (sp->pts + ((float) sp->sub.end_display_time / 1000)))
|
yading@10
|
1423 || (sp2 && is->video_current_pts > (sp2->pts + ((float) sp2->sub.start_display_time / 1000))))
|
yading@10
|
1424 {
|
yading@10
|
1425 free_subpicture(sp);
|
yading@10
|
1426
|
yading@10
|
1427 /* update queue size and signal for next picture */
|
yading@10
|
1428 if (++is->subpq_rindex == SUBPICTURE_QUEUE_SIZE)
|
yading@10
|
1429 is->subpq_rindex = 0;
|
yading@10
|
1430
|
yading@10
|
1431 SDL_LockMutex(is->subpq_mutex);
|
yading@10
|
1432 is->subpq_size--;
|
yading@10
|
1433 SDL_CondSignal(is->subpq_cond);
|
yading@10
|
1434 SDL_UnlockMutex(is->subpq_mutex);
|
yading@10
|
1435 }
|
yading@10
|
1436 }
|
yading@10
|
1437 }
|
yading@10
|
1438 }
|
yading@10
|
1439
|
yading@10
|
1440 display:
|
yading@10
|
1441 /* display picture */
|
yading@10
|
1442 if (!display_disable && is->show_mode == SHOW_MODE_VIDEO)
|
yading@10
|
1443 video_display(is);
|
yading@10
|
1444
|
yading@10
|
1445 pictq_next_picture(is);
|
yading@10
|
1446
|
yading@10
|
1447 if (is->step && !is->paused)
|
yading@10
|
1448 stream_toggle_pause(is);
|
yading@10
|
1449 }
|
yading@10
|
1450 }
|
yading@10
|
1451 is->force_refresh = 0;
|
yading@10
|
1452 if (show_status) {
|
yading@10
|
1453 static int64_t last_time;
|
yading@10
|
1454 int64_t cur_time;
|
yading@10
|
1455 int aqsize, vqsize, sqsize;
|
yading@10
|
1456 double av_diff;
|
yading@10
|
1457
|
yading@10
|
1458 cur_time = av_gettime();
|
yading@10
|
1459 if (!last_time || (cur_time - last_time) >= 30000) {
|
yading@10
|
1460 aqsize = 0;
|
yading@10
|
1461 vqsize = 0;
|
yading@10
|
1462 sqsize = 0;
|
yading@10
|
1463 if (is->audio_st)
|
yading@10
|
1464 aqsize = is->audioq.size;
|
yading@10
|
1465 if (is->video_st)
|
yading@10
|
1466 vqsize = is->videoq.size;
|
yading@10
|
1467 if (is->subtitle_st)
|
yading@10
|
1468 sqsize = is->subtitleq.size;
|
yading@10
|
1469 av_diff = 0;
|
yading@10
|
1470 if (is->audio_st && is->video_st)
|
yading@10
|
1471 av_diff = get_audio_clock(is) - get_video_clock(is);
|
yading@10
|
1472 printf("%7.2f A-V:%7.3f fd=%4d aq=%5dKB vq=%5dKB sq=%5dB f=%"PRId64"/%"PRId64" \r",
|
yading@10
|
1473 get_master_clock(is),
|
yading@10
|
1474 av_diff,
|
yading@10
|
1475 is->frame_drops_early + is->frame_drops_late,
|
yading@10
|
1476 aqsize / 1024,
|
yading@10
|
1477 vqsize / 1024,
|
yading@10
|
1478 sqsize,
|
yading@10
|
1479 is->video_st ? is->video_st->codec->pts_correction_num_faulty_dts : 0,
|
yading@10
|
1480 is->video_st ? is->video_st->codec->pts_correction_num_faulty_pts : 0);
|
yading@10
|
1481 fflush(stdout);
|
yading@10
|
1482 last_time = cur_time;
|
yading@10
|
1483 }
|
yading@10
|
1484 }
|
yading@10
|
1485 }
|
yading@10
|
1486
|
yading@10
|
1487 /* allocate a picture (needs to do that in main thread to avoid
|
yading@10
|
1488 potential locking problems */
|
yading@10
|
1489 static void alloc_picture(VideoState *is)
|
yading@10
|
1490 {
|
yading@10
|
1491 VideoPicture *vp;
|
yading@10
|
1492
|
yading@10
|
1493 vp = &is->pictq[is->pictq_windex];
|
yading@10
|
1494
|
yading@10
|
1495 if (vp->bmp)
|
yading@10
|
1496 SDL_FreeYUVOverlay(vp->bmp);
|
yading@10
|
1497
|
yading@10
|
1498 video_open(is, 0, vp);
|
yading@10
|
1499
|
yading@10
|
1500 vp->bmp = SDL_CreateYUVOverlay(vp->width, vp->height,
|
yading@10
|
1501 SDL_YV12_OVERLAY,
|
yading@10
|
1502 screen);
|
yading@10
|
1503 if (!vp->bmp || vp->bmp->pitches[0] < vp->width) {
|
yading@10
|
1504 /* SDL allocates a buffer smaller than requested if the video
|
yading@10
|
1505 * overlay hardware is unable to support the requested size. */
|
yading@10
|
1506 fprintf(stderr, "Error: the video system does not support an image\n"
|
yading@10
|
1507 "size of %dx%d pixels. Try using -lowres or -vf \"scale=w:h\"\n"
|
yading@10
|
1508 "to reduce the image size.\n", vp->width, vp->height );
|
yading@10
|
1509 do_exit(is);
|
yading@10
|
1510 }
|
yading@10
|
1511
|
yading@10
|
1512 SDL_LockMutex(is->pictq_mutex);
|
yading@10
|
1513 vp->allocated = 1;
|
yading@10
|
1514 SDL_CondSignal(is->pictq_cond);
|
yading@10
|
1515 SDL_UnlockMutex(is->pictq_mutex);
|
yading@10
|
1516 }
|
yading@10
|
1517
|
yading@10
|
1518 static void duplicate_right_border_pixels(SDL_Overlay *bmp) {
|
yading@10
|
1519 int i, width, height;
|
yading@10
|
1520 Uint8 *p, *maxp;
|
yading@10
|
1521 for (i = 0; i < 3; i++) {
|
yading@10
|
1522 width = bmp->w;
|
yading@10
|
1523 height = bmp->h;
|
yading@10
|
1524 if (i > 0) {
|
yading@10
|
1525 width >>= 1;
|
yading@10
|
1526 height >>= 1;
|
yading@10
|
1527 }
|
yading@10
|
1528 if (bmp->pitches[i] > width) {
|
yading@10
|
1529 maxp = bmp->pixels[i] + bmp->pitches[i] * height - 1;
|
yading@10
|
1530 for (p = bmp->pixels[i] + width - 1; p < maxp; p += bmp->pitches[i])
|
yading@10
|
1531 *(p+1) = *p;
|
yading@10
|
1532 }
|
yading@10
|
1533 }
|
yading@10
|
1534 }
|
yading@10
|
1535
|
yading@10
|
1536 static int queue_picture(VideoState *is, AVFrame *src_frame, double pts, int64_t pos, int serial)
|
yading@10
|
1537 {
|
yading@10
|
1538 VideoPicture *vp;
|
yading@10
|
1539
|
yading@10
|
1540 #if defined(DEBUG_SYNC) && 0
|
yading@10
|
1541 printf("frame_type=%c pts=%0.3f\n",
|
yading@10
|
1542 av_get_picture_type_char(src_frame->pict_type), pts);
|
yading@10
|
1543 #endif
|
yading@10
|
1544
|
yading@10
|
1545 /* wait until we have space to put a new picture */
|
yading@10
|
1546 SDL_LockMutex(is->pictq_mutex);
|
yading@10
|
1547
|
yading@10
|
1548 /* keep the last already displayed picture in the queue */
|
yading@10
|
1549 while (is->pictq_size >= VIDEO_PICTURE_QUEUE_SIZE - 2 &&
|
yading@10
|
1550 !is->videoq.abort_request) {
|
yading@10
|
1551 SDL_CondWait(is->pictq_cond, is->pictq_mutex);
|
yading@10
|
1552 }
|
yading@10
|
1553 SDL_UnlockMutex(is->pictq_mutex);
|
yading@10
|
1554
|
yading@10
|
1555 if (is->videoq.abort_request)
|
yading@10
|
1556 return -1;
|
yading@10
|
1557
|
yading@10
|
1558 vp = &is->pictq[is->pictq_windex];
|
yading@10
|
1559
|
yading@10
|
1560 vp->sar = src_frame->sample_aspect_ratio;
|
yading@10
|
1561
|
yading@10
|
1562 /* alloc or resize hardware picture buffer */
|
yading@10
|
1563 if (!vp->bmp || vp->reallocate || !vp->allocated ||
|
yading@10
|
1564 vp->width != src_frame->width ||
|
yading@10
|
1565 vp->height != src_frame->height) {
|
yading@10
|
1566 SDL_Event event;
|
yading@10
|
1567
|
yading@10
|
1568 vp->allocated = 0;
|
yading@10
|
1569 vp->reallocate = 0;
|
yading@10
|
1570 vp->width = src_frame->width;
|
yading@10
|
1571 vp->height = src_frame->height;
|
yading@10
|
1572
|
yading@10
|
1573 /* the allocation must be done in the main thread to avoid
|
yading@10
|
1574 locking problems. */
|
yading@10
|
1575 event.type = FF_ALLOC_EVENT;
|
yading@10
|
1576 event.user.data1 = is;
|
yading@10
|
1577 SDL_PushEvent(&event);
|
yading@10
|
1578
|
yading@10
|
1579 /* wait until the picture is allocated */
|
yading@10
|
1580 SDL_LockMutex(is->pictq_mutex);
|
yading@10
|
1581 while (!vp->allocated && !is->videoq.abort_request) {
|
yading@10
|
1582 SDL_CondWait(is->pictq_cond, is->pictq_mutex);
|
yading@10
|
1583 }
|
yading@10
|
1584 /* if the queue is aborted, we have to pop the pending ALLOC event or wait for the allocation to complete */
|
yading@10
|
1585 if (is->videoq.abort_request && SDL_PeepEvents(&event, 1, SDL_GETEVENT, SDL_EVENTMASK(FF_ALLOC_EVENT)) != 1) {
|
yading@10
|
1586 while (!vp->allocated) {
|
yading@10
|
1587 SDL_CondWait(is->pictq_cond, is->pictq_mutex);
|
yading@10
|
1588 }
|
yading@10
|
1589 }
|
yading@10
|
1590 SDL_UnlockMutex(is->pictq_mutex);
|
yading@10
|
1591
|
yading@10
|
1592 if (is->videoq.abort_request)
|
yading@10
|
1593 return -1;
|
yading@10
|
1594 }
|
yading@10
|
1595
|
yading@10
|
1596 /* if the frame is not skipped, then display it */
|
yading@10
|
1597 if (vp->bmp) {
|
yading@10
|
1598 AVPicture pict = { { 0 } };
|
yading@10
|
1599
|
yading@10
|
1600 /* get a pointer on the bitmap */
|
yading@10
|
1601 SDL_LockYUVOverlay (vp->bmp);
|
yading@10
|
1602
|
yading@10
|
1603 pict.data[0] = vp->bmp->pixels[0];
|
yading@10
|
1604 pict.data[1] = vp->bmp->pixels[2];
|
yading@10
|
1605 pict.data[2] = vp->bmp->pixels[1];
|
yading@10
|
1606
|
yading@10
|
1607 pict.linesize[0] = vp->bmp->pitches[0];
|
yading@10
|
1608 pict.linesize[1] = vp->bmp->pitches[2];
|
yading@10
|
1609 pict.linesize[2] = vp->bmp->pitches[1];
|
yading@10
|
1610
|
yading@10
|
1611 #if CONFIG_AVFILTER
|
yading@10
|
1612 // FIXME use direct rendering
|
yading@10
|
1613 av_picture_copy(&pict, (AVPicture *)src_frame,
|
yading@10
|
1614 src_frame->format, vp->width, vp->height);
|
yading@10
|
1615 #else
|
yading@10
|
1616 av_opt_get_int(sws_opts, "sws_flags", 0, &sws_flags);
|
yading@10
|
1617 is->img_convert_ctx = sws_getCachedContext(is->img_convert_ctx,
|
yading@10
|
1618 vp->width, vp->height, src_frame->format, vp->width, vp->height,
|
yading@10
|
1619 AV_PIX_FMT_YUV420P, sws_flags, NULL, NULL, NULL);
|
yading@10
|
1620 if (is->img_convert_ctx == NULL) {
|
yading@10
|
1621 fprintf(stderr, "Cannot initialize the conversion context\n");
|
yading@10
|
1622 exit(1);
|
yading@10
|
1623 }
|
yading@10
|
1624 sws_scale(is->img_convert_ctx, src_frame->data, src_frame->linesize,
|
yading@10
|
1625 0, vp->height, pict.data, pict.linesize);
|
yading@10
|
1626 #endif
|
yading@10
|
1627 /* workaround SDL PITCH_WORKAROUND */
|
yading@10
|
1628 duplicate_right_border_pixels(vp->bmp);
|
yading@10
|
1629 /* update the bitmap content */
|
yading@10
|
1630 SDL_UnlockYUVOverlay(vp->bmp);
|
yading@10
|
1631
|
yading@10
|
1632 vp->pts = pts;
|
yading@10
|
1633 vp->pos = pos;
|
yading@10
|
1634 vp->serial = serial;
|
yading@10
|
1635
|
yading@10
|
1636 /* now we can update the picture count */
|
yading@10
|
1637 if (++is->pictq_windex == VIDEO_PICTURE_QUEUE_SIZE)
|
yading@10
|
1638 is->pictq_windex = 0;
|
yading@10
|
1639 SDL_LockMutex(is->pictq_mutex);
|
yading@10
|
1640 is->pictq_size++;
|
yading@10
|
1641 SDL_UnlockMutex(is->pictq_mutex);
|
yading@10
|
1642 }
|
yading@10
|
1643 return 0;
|
yading@10
|
1644 }
|
yading@10
|
1645
|
yading@10
|
1646 static int get_video_frame(VideoState *is, AVFrame *frame, AVPacket *pkt, int *serial)
|
yading@10
|
1647 {
|
yading@10
|
1648 int got_picture;
|
yading@10
|
1649
|
yading@10
|
1650 if (packet_queue_get(&is->videoq, pkt, 1, serial) < 0)
|
yading@10
|
1651 return -1;
|
yading@10
|
1652
|
yading@10
|
1653 if (pkt->data == flush_pkt.data) {
|
yading@10
|
1654 avcodec_flush_buffers(is->video_st->codec);
|
yading@10
|
1655
|
yading@10
|
1656 SDL_LockMutex(is->pictq_mutex);
|
yading@10
|
1657 // Make sure there are no long delay timers (ideally we should just flush the queue but that's harder)
|
yading@10
|
1658 while (is->pictq_size && !is->videoq.abort_request) {
|
yading@10
|
1659 SDL_CondWait(is->pictq_cond, is->pictq_mutex);
|
yading@10
|
1660 }
|
yading@10
|
1661 is->video_current_pos = -1;
|
yading@10
|
1662 is->frame_last_pts = AV_NOPTS_VALUE;
|
yading@10
|
1663 is->frame_last_duration = 0;
|
yading@10
|
1664 is->frame_timer = (double)av_gettime() / 1000000.0;
|
yading@10
|
1665 is->frame_last_dropped_pts = AV_NOPTS_VALUE;
|
yading@10
|
1666 SDL_UnlockMutex(is->pictq_mutex);
|
yading@10
|
1667 return 0;
|
yading@10
|
1668 }
|
yading@10
|
1669
|
yading@10
|
1670 if(avcodec_decode_video2(is->video_st->codec, frame, &got_picture, pkt) < 0)
|
yading@10
|
1671 return 0;
|
yading@10
|
1672
|
yading@10
|
1673 if (got_picture) {
|
yading@10
|
1674 int ret = 1;
|
yading@10
|
1675 double dpts = NAN;
|
yading@10
|
1676
|
yading@10
|
1677 if (decoder_reorder_pts == -1) {
|
yading@10
|
1678 frame->pts = av_frame_get_best_effort_timestamp(frame);
|
yading@10
|
1679 } else if (decoder_reorder_pts) {
|
yading@10
|
1680 frame->pts = frame->pkt_pts;
|
yading@10
|
1681 } else {
|
yading@10
|
1682 frame->pts = frame->pkt_dts;
|
yading@10
|
1683 }
|
yading@10
|
1684
|
yading@10
|
1685 if (frame->pts != AV_NOPTS_VALUE)
|
yading@10
|
1686 dpts = av_q2d(is->video_st->time_base) * frame->pts;
|
yading@10
|
1687
|
yading@10
|
1688 frame->sample_aspect_ratio = av_guess_sample_aspect_ratio(is->ic, is->video_st, frame);
|
yading@10
|
1689
|
yading@10
|
1690 if (framedrop>0 || (framedrop && get_master_sync_type(is) != AV_SYNC_VIDEO_MASTER)) {
|
yading@10
|
1691 SDL_LockMutex(is->pictq_mutex);
|
yading@10
|
1692 if (is->frame_last_pts != AV_NOPTS_VALUE && frame->pts != AV_NOPTS_VALUE) {
|
yading@10
|
1693 double clockdiff = get_video_clock(is) - get_master_clock(is);
|
yading@10
|
1694 double ptsdiff = dpts - is->frame_last_pts;
|
yading@10
|
1695 if (!isnan(clockdiff) && fabs(clockdiff) < AV_NOSYNC_THRESHOLD &&
|
yading@10
|
1696 !isnan(ptsdiff) && ptsdiff > 0 && ptsdiff < AV_NOSYNC_THRESHOLD &&
|
yading@10
|
1697 clockdiff + ptsdiff - is->frame_last_filter_delay < 0 &&
|
yading@10
|
1698 is->videoq.nb_packets) {
|
yading@10
|
1699 is->frame_last_dropped_pos = pkt->pos;
|
yading@10
|
1700 is->frame_last_dropped_pts = dpts;
|
yading@10
|
1701 is->frame_last_dropped_serial = *serial;
|
yading@10
|
1702 is->frame_drops_early++;
|
yading@10
|
1703 av_frame_unref(frame);
|
yading@10
|
1704 ret = 0;
|
yading@10
|
1705 }
|
yading@10
|
1706 }
|
yading@10
|
1707 SDL_UnlockMutex(is->pictq_mutex);
|
yading@10
|
1708 }
|
yading@10
|
1709
|
yading@10
|
1710 return ret;
|
yading@10
|
1711 }
|
yading@10
|
1712 return 0;
|
yading@10
|
1713 }
|
yading@10
|
1714
|
yading@10
|
1715 #if CONFIG_AVFILTER
|
yading@10
|
1716 static int configure_filtergraph(AVFilterGraph *graph, const char *filtergraph,
|
yading@10
|
1717 AVFilterContext *source_ctx, AVFilterContext *sink_ctx)
|
yading@10
|
1718 {
|
yading@10
|
1719 int ret;
|
yading@10
|
1720 AVFilterInOut *outputs = NULL, *inputs = NULL;
|
yading@10
|
1721
|
yading@10
|
1722 if (filtergraph) {
|
yading@10
|
1723 outputs = avfilter_inout_alloc();
|
yading@10
|
1724 inputs = avfilter_inout_alloc();
|
yading@10
|
1725 if (!outputs || !inputs) {
|
yading@10
|
1726 ret = AVERROR(ENOMEM);
|
yading@10
|
1727 goto fail;
|
yading@10
|
1728 }
|
yading@10
|
1729
|
yading@10
|
1730 outputs->name = av_strdup("in");
|
yading@10
|
1731 outputs->filter_ctx = source_ctx;
|
yading@10
|
1732 outputs->pad_idx = 0;
|
yading@10
|
1733 outputs->next = NULL;
|
yading@10
|
1734
|
yading@10
|
1735 inputs->name = av_strdup("out");
|
yading@10
|
1736 inputs->filter_ctx = sink_ctx;
|
yading@10
|
1737 inputs->pad_idx = 0;
|
yading@10
|
1738 inputs->next = NULL;
|
yading@10
|
1739
|
yading@10
|
1740 if ((ret = avfilter_graph_parse(graph, filtergraph, &inputs, &outputs, NULL)) < 0)
|
yading@10
|
1741 goto fail;
|
yading@10
|
1742 } else {
|
yading@10
|
1743 if ((ret = avfilter_link(source_ctx, 0, sink_ctx, 0)) < 0)
|
yading@10
|
1744 goto fail;
|
yading@10
|
1745 }
|
yading@10
|
1746
|
yading@10
|
1747 ret = avfilter_graph_config(graph, NULL);
|
yading@10
|
1748 fail:
|
yading@10
|
1749 avfilter_inout_free(&outputs);
|
yading@10
|
1750 avfilter_inout_free(&inputs);
|
yading@10
|
1751 return ret;
|
yading@10
|
1752 }
|
yading@10
|
1753
|
yading@10
|
1754 static int configure_video_filters(AVFilterGraph *graph, VideoState *is, const char *vfilters, AVFrame *frame)
|
yading@10
|
1755 {
|
yading@10
|
1756 static const enum AVPixelFormat pix_fmts[] = { AV_PIX_FMT_YUV420P, AV_PIX_FMT_NONE };
|
yading@10
|
1757 char sws_flags_str[128];
|
yading@10
|
1758 char buffersrc_args[256];
|
yading@10
|
1759 int ret;
|
yading@10
|
1760 AVFilterContext *filt_src = NULL, *filt_out = NULL, *filt_crop;
|
yading@10
|
1761 AVCodecContext *codec = is->video_st->codec;
|
yading@10
|
1762 AVRational fr = av_guess_frame_rate(is->ic, is->video_st, NULL);
|
yading@10
|
1763
|
yading@10
|
1764 av_opt_get_int(sws_opts, "sws_flags", 0, &sws_flags);
|
yading@10
|
1765 snprintf(sws_flags_str, sizeof(sws_flags_str), "flags=%"PRId64, sws_flags);
|
yading@10
|
1766 graph->scale_sws_opts = av_strdup(sws_flags_str);
|
yading@10
|
1767
|
yading@10
|
1768 snprintf(buffersrc_args, sizeof(buffersrc_args),
|
yading@10
|
1769 "video_size=%dx%d:pix_fmt=%d:time_base=%d/%d:pixel_aspect=%d/%d",
|
yading@10
|
1770 frame->width, frame->height, frame->format,
|
yading@10
|
1771 is->video_st->time_base.num, is->video_st->time_base.den,
|
yading@10
|
1772 codec->sample_aspect_ratio.num, FFMAX(codec->sample_aspect_ratio.den, 1));
|
yading@10
|
1773 if (fr.num && fr.den)
|
yading@10
|
1774 av_strlcatf(buffersrc_args, sizeof(buffersrc_args), ":frame_rate=%d/%d", fr.num, fr.den);
|
yading@10
|
1775
|
yading@10
|
1776 if ((ret = avfilter_graph_create_filter(&filt_src,
|
yading@10
|
1777 avfilter_get_by_name("buffer"),
|
yading@10
|
1778 "ffplay_buffer", buffersrc_args, NULL,
|
yading@10
|
1779 graph)) < 0)
|
yading@10
|
1780 goto fail;
|
yading@10
|
1781
|
yading@10
|
1782 ret = avfilter_graph_create_filter(&filt_out,
|
yading@10
|
1783 avfilter_get_by_name("buffersink"),
|
yading@10
|
1784 "ffplay_buffersink", NULL, NULL, graph);
|
yading@10
|
1785 if (ret < 0)
|
yading@10
|
1786 goto fail;
|
yading@10
|
1787
|
yading@10
|
1788 if ((ret = av_opt_set_int_list(filt_out, "pix_fmts", pix_fmts, AV_PIX_FMT_NONE, AV_OPT_SEARCH_CHILDREN)) < 0)
|
yading@10
|
1789 goto fail;
|
yading@10
|
1790
|
yading@10
|
1791 /* SDL YUV code is not handling odd width/height for some driver
|
yading@10
|
1792 * combinations, therefore we crop the picture to an even width/height. */
|
yading@10
|
1793 if ((ret = avfilter_graph_create_filter(&filt_crop,
|
yading@10
|
1794 avfilter_get_by_name("crop"),
|
yading@10
|
1795 "ffplay_crop", "floor(in_w/2)*2:floor(in_h/2)*2", NULL, graph)) < 0)
|
yading@10
|
1796 goto fail;
|
yading@10
|
1797 if ((ret = avfilter_link(filt_crop, 0, filt_out, 0)) < 0)
|
yading@10
|
1798 goto fail;
|
yading@10
|
1799
|
yading@10
|
1800 if ((ret = configure_filtergraph(graph, vfilters, filt_src, filt_crop)) < 0)
|
yading@10
|
1801 goto fail;
|
yading@10
|
1802
|
yading@10
|
1803 is->in_video_filter = filt_src;
|
yading@10
|
1804 is->out_video_filter = filt_out;
|
yading@10
|
1805
|
yading@10
|
1806 fail:
|
yading@10
|
1807 return ret;
|
yading@10
|
1808 }
|
yading@10
|
1809
|
yading@10
|
1810 static int configure_audio_filters(VideoState *is, const char *afilters, int force_output_format)
|
yading@10
|
1811 {
|
yading@10
|
1812 static const enum AVSampleFormat sample_fmts[] = { AV_SAMPLE_FMT_S16, AV_SAMPLE_FMT_NONE };
|
yading@10
|
1813 int sample_rates[2] = { 0, -1 };
|
yading@10
|
1814 int64_t channel_layouts[2] = { 0, -1 };
|
yading@10
|
1815 int channels[2] = { 0, -1 };
|
yading@10
|
1816 AVFilterContext *filt_asrc = NULL, *filt_asink = NULL;
|
yading@10
|
1817 char asrc_args[256];
|
yading@10
|
1818 int ret;
|
yading@10
|
1819
|
yading@10
|
1820 avfilter_graph_free(&is->agraph);
|
yading@10
|
1821 if (!(is->agraph = avfilter_graph_alloc()))
|
yading@10
|
1822 return AVERROR(ENOMEM);
|
yading@10
|
1823
|
yading@10
|
1824 ret = snprintf(asrc_args, sizeof(asrc_args),
|
yading@10
|
1825 "sample_rate=%d:sample_fmt=%s:channels=%d:time_base=%d/%d",
|
yading@10
|
1826 is->audio_filter_src.freq, av_get_sample_fmt_name(is->audio_filter_src.fmt),
|
yading@10
|
1827 is->audio_filter_src.channels,
|
yading@10
|
1828 1, is->audio_filter_src.freq);
|
yading@10
|
1829 if (is->audio_filter_src.channel_layout)
|
yading@10
|
1830 snprintf(asrc_args + ret, sizeof(asrc_args) - ret,
|
yading@10
|
1831 ":channel_layout=0x%"PRIx64, is->audio_filter_src.channel_layout);
|
yading@10
|
1832
|
yading@10
|
1833 ret = avfilter_graph_create_filter(&filt_asrc,
|
yading@10
|
1834 avfilter_get_by_name("abuffer"), "ffplay_abuffer",
|
yading@10
|
1835 asrc_args, NULL, is->agraph);
|
yading@10
|
1836 if (ret < 0)
|
yading@10
|
1837 goto end;
|
yading@10
|
1838
|
yading@10
|
1839
|
yading@10
|
1840 ret = avfilter_graph_create_filter(&filt_asink,
|
yading@10
|
1841 avfilter_get_by_name("abuffersink"), "ffplay_abuffersink",
|
yading@10
|
1842 NULL, NULL, is->agraph);
|
yading@10
|
1843 if (ret < 0)
|
yading@10
|
1844 goto end;
|
yading@10
|
1845
|
yading@10
|
1846 if ((ret = av_opt_set_int_list(filt_asink, "sample_fmts", sample_fmts, AV_SAMPLE_FMT_NONE, AV_OPT_SEARCH_CHILDREN)) < 0)
|
yading@10
|
1847 goto end;
|
yading@10
|
1848 if ((ret = av_opt_set_int(filt_asink, "all_channel_counts", 1, AV_OPT_SEARCH_CHILDREN)) < 0)
|
yading@10
|
1849 goto end;
|
yading@10
|
1850
|
yading@10
|
1851 if (force_output_format) {
|
yading@10
|
1852 channel_layouts[0] = is->audio_tgt.channel_layout;
|
yading@10
|
1853 channels [0] = is->audio_tgt.channels;
|
yading@10
|
1854 sample_rates [0] = is->audio_tgt.freq;
|
yading@10
|
1855 if ((ret = av_opt_set_int(filt_asink, "all_channel_counts", 0, AV_OPT_SEARCH_CHILDREN)) < 0)
|
yading@10
|
1856 goto end;
|
yading@10
|
1857 if ((ret = av_opt_set_int_list(filt_asink, "channel_layouts", channel_layouts, -1, AV_OPT_SEARCH_CHILDREN)) < 0)
|
yading@10
|
1858 goto end;
|
yading@10
|
1859 if ((ret = av_opt_set_int_list(filt_asink, "channel_counts" , channels , -1, AV_OPT_SEARCH_CHILDREN)) < 0)
|
yading@10
|
1860 goto end;
|
yading@10
|
1861 if ((ret = av_opt_set_int_list(filt_asink, "sample_rates" , sample_rates , -1, AV_OPT_SEARCH_CHILDREN)) < 0)
|
yading@10
|
1862 goto end;
|
yading@10
|
1863 }
|
yading@10
|
1864
|
yading@10
|
1865
|
yading@10
|
1866 if ((ret = configure_filtergraph(is->agraph, afilters, filt_asrc, filt_asink)) < 0)
|
yading@10
|
1867 goto end;
|
yading@10
|
1868
|
yading@10
|
1869 is->in_audio_filter = filt_asrc;
|
yading@10
|
1870 is->out_audio_filter = filt_asink;
|
yading@10
|
1871
|
yading@10
|
1872 end:
|
yading@10
|
1873 if (ret < 0)
|
yading@10
|
1874 avfilter_graph_free(&is->agraph);
|
yading@10
|
1875 return ret;
|
yading@10
|
1876 }
|
yading@10
|
1877 #endif /* CONFIG_AVFILTER */
|
yading@10
|
1878
|
yading@10
|
1879 static int video_thread(void *arg)
|
yading@10
|
1880 {
|
yading@10
|
1881 AVPacket pkt = { 0 };
|
yading@10
|
1882 VideoState *is = arg;
|
yading@10
|
1883 AVFrame *frame = av_frame_alloc();
|
yading@10
|
1884 double pts;
|
yading@10
|
1885 int ret;
|
yading@10
|
1886 int serial = 0;
|
yading@10
|
1887
|
yading@10
|
1888 #if CONFIG_AVFILTER
|
yading@10
|
1889 AVFilterGraph *graph = avfilter_graph_alloc();
|
yading@10
|
1890 AVFilterContext *filt_out = NULL, *filt_in = NULL;
|
yading@10
|
1891 int last_w = 0;
|
yading@10
|
1892 int last_h = 0;
|
yading@10
|
1893 enum AVPixelFormat last_format = -2;
|
yading@10
|
1894 int last_serial = -1;
|
yading@10
|
1895 #endif
|
yading@10
|
1896
|
yading@10
|
1897 for (;;) {
|
yading@10
|
1898 while (is->paused && !is->videoq.abort_request)
|
yading@10
|
1899 SDL_Delay(10);
|
yading@10
|
1900
|
yading@10
|
1901 avcodec_get_frame_defaults(frame);
|
yading@10
|
1902 av_free_packet(&pkt);
|
yading@10
|
1903
|
yading@10
|
1904 ret = get_video_frame(is, frame, &pkt, &serial);
|
yading@10
|
1905 if (ret < 0)
|
yading@10
|
1906 goto the_end;
|
yading@10
|
1907 if (!ret)
|
yading@10
|
1908 continue;
|
yading@10
|
1909
|
yading@10
|
1910 #if CONFIG_AVFILTER
|
yading@10
|
1911 if ( last_w != frame->width
|
yading@10
|
1912 || last_h != frame->height
|
yading@10
|
1913 || last_format != frame->format
|
yading@10
|
1914 || last_serial != serial) {
|
yading@10
|
1915 av_log(NULL, AV_LOG_DEBUG,
|
yading@10
|
1916 "Video frame changed from size:%dx%d format:%s serial:%d to size:%dx%d format:%s serial:%d\n",
|
yading@10
|
1917 last_w, last_h,
|
yading@10
|
1918 (const char *)av_x_if_null(av_get_pix_fmt_name(last_format), "none"), last_serial,
|
yading@10
|
1919 frame->width, frame->height,
|
yading@10
|
1920 (const char *)av_x_if_null(av_get_pix_fmt_name(frame->format), "none"), serial);
|
yading@10
|
1921 avfilter_graph_free(&graph);
|
yading@10
|
1922 graph = avfilter_graph_alloc();
|
yading@10
|
1923 if ((ret = configure_video_filters(graph, is, vfilters, frame)) < 0) {
|
yading@10
|
1924 SDL_Event event;
|
yading@10
|
1925 event.type = FF_QUIT_EVENT;
|
yading@10
|
1926 event.user.data1 = is;
|
yading@10
|
1927 SDL_PushEvent(&event);
|
yading@10
|
1928 av_free_packet(&pkt);
|
yading@10
|
1929 goto the_end;
|
yading@10
|
1930 }
|
yading@10
|
1931 filt_in = is->in_video_filter;
|
yading@10
|
1932 filt_out = is->out_video_filter;
|
yading@10
|
1933 last_w = frame->width;
|
yading@10
|
1934 last_h = frame->height;
|
yading@10
|
1935 last_format = frame->format;
|
yading@10
|
1936 last_serial = serial;
|
yading@10
|
1937 }
|
yading@10
|
1938
|
yading@10
|
1939 ret = av_buffersrc_add_frame(filt_in, frame);
|
yading@10
|
1940 if (ret < 0)
|
yading@10
|
1941 goto the_end;
|
yading@10
|
1942 av_frame_unref(frame);
|
yading@10
|
1943 avcodec_get_frame_defaults(frame);
|
yading@10
|
1944 av_free_packet(&pkt);
|
yading@10
|
1945
|
yading@10
|
1946 while (ret >= 0) {
|
yading@10
|
1947 is->frame_last_returned_time = av_gettime() / 1000000.0;
|
yading@10
|
1948
|
yading@10
|
1949 ret = av_buffersink_get_frame_flags(filt_out, frame, 0);
|
yading@10
|
1950 if (ret < 0) {
|
yading@10
|
1951 ret = 0;
|
yading@10
|
1952 break;
|
yading@10
|
1953 }
|
yading@10
|
1954
|
yading@10
|
1955 is->frame_last_filter_delay = av_gettime() / 1000000.0 - is->frame_last_returned_time;
|
yading@10
|
1956 if (fabs(is->frame_last_filter_delay) > AV_NOSYNC_THRESHOLD / 10.0)
|
yading@10
|
1957 is->frame_last_filter_delay = 0;
|
yading@10
|
1958
|
yading@10
|
1959 pts = (frame->pts == AV_NOPTS_VALUE) ? NAN : frame->pts * av_q2d(filt_out->inputs[0]->time_base);
|
yading@10
|
1960 ret = queue_picture(is, frame, pts, av_frame_get_pkt_pos(frame), serial);
|
yading@10
|
1961 av_frame_unref(frame);
|
yading@10
|
1962 }
|
yading@10
|
1963 #else
|
yading@10
|
1964 pts = (frame->pts == AV_NOPTS_VALUE) ? NAN : frame->pts * av_q2d(is->video_st->time_base);
|
yading@10
|
1965 ret = queue_picture(is, frame, pts, pkt.pos, serial);
|
yading@10
|
1966 av_frame_unref(frame);
|
yading@10
|
1967 #endif
|
yading@10
|
1968
|
yading@10
|
1969 if (ret < 0)
|
yading@10
|
1970 goto the_end;
|
yading@10
|
1971 }
|
yading@10
|
1972 the_end:
|
yading@10
|
1973 avcodec_flush_buffers(is->video_st->codec);
|
yading@10
|
1974 #if CONFIG_AVFILTER
|
yading@10
|
1975 avfilter_graph_free(&graph);
|
yading@10
|
1976 #endif
|
yading@10
|
1977 av_free_packet(&pkt);
|
yading@10
|
1978 av_frame_free(&frame);
|
yading@10
|
1979 return 0;
|
yading@10
|
1980 }
|
yading@10
|
1981
|
yading@10
|
1982 static int subtitle_thread(void *arg)
|
yading@10
|
1983 {
|
yading@10
|
1984 VideoState *is = arg;
|
yading@10
|
1985 SubPicture *sp;
|
yading@10
|
1986 AVPacket pkt1, *pkt = &pkt1;
|
yading@10
|
1987 int got_subtitle;
|
yading@10
|
1988 double pts;
|
yading@10
|
1989 int i, j;
|
yading@10
|
1990 int r, g, b, y, u, v, a;
|
yading@10
|
1991
|
yading@10
|
1992 for (;;) {
|
yading@10
|
1993 while (is->paused && !is->subtitleq.abort_request) {
|
yading@10
|
1994 SDL_Delay(10);
|
yading@10
|
1995 }
|
yading@10
|
1996 if (packet_queue_get(&is->subtitleq, pkt, 1, NULL) < 0)
|
yading@10
|
1997 break;
|
yading@10
|
1998
|
yading@10
|
1999 if (pkt->data == flush_pkt.data) {
|
yading@10
|
2000 avcodec_flush_buffers(is->subtitle_st->codec);
|
yading@10
|
2001 continue;
|
yading@10
|
2002 }
|
yading@10
|
2003 SDL_LockMutex(is->subpq_mutex);
|
yading@10
|
2004 while (is->subpq_size >= SUBPICTURE_QUEUE_SIZE &&
|
yading@10
|
2005 !is->subtitleq.abort_request) {
|
yading@10
|
2006 SDL_CondWait(is->subpq_cond, is->subpq_mutex);
|
yading@10
|
2007 }
|
yading@10
|
2008 SDL_UnlockMutex(is->subpq_mutex);
|
yading@10
|
2009
|
yading@10
|
2010 if (is->subtitleq.abort_request)
|
yading@10
|
2011 return 0;
|
yading@10
|
2012
|
yading@10
|
2013 sp = &is->subpq[is->subpq_windex];
|
yading@10
|
2014
|
yading@10
|
2015 /* NOTE: ipts is the PTS of the _first_ picture beginning in
|
yading@10
|
2016 this packet, if any */
|
yading@10
|
2017 pts = 0;
|
yading@10
|
2018 if (pkt->pts != AV_NOPTS_VALUE)
|
yading@10
|
2019 pts = av_q2d(is->subtitle_st->time_base) * pkt->pts;
|
yading@10
|
2020
|
yading@10
|
2021 avcodec_decode_subtitle2(is->subtitle_st->codec, &sp->sub,
|
yading@10
|
2022 &got_subtitle, pkt);
|
yading@10
|
2023 if (got_subtitle && sp->sub.format == 0) {
|
yading@10
|
2024 if (sp->sub.pts != AV_NOPTS_VALUE)
|
yading@10
|
2025 pts = sp->sub.pts / (double)AV_TIME_BASE;
|
yading@10
|
2026 sp->pts = pts;
|
yading@10
|
2027
|
yading@10
|
2028 for (i = 0; i < sp->sub.num_rects; i++)
|
yading@10
|
2029 {
|
yading@10
|
2030 for (j = 0; j < sp->sub.rects[i]->nb_colors; j++)
|
yading@10
|
2031 {
|
yading@10
|
2032 RGBA_IN(r, g, b, a, (uint32_t*)sp->sub.rects[i]->pict.data[1] + j);
|
yading@10
|
2033 y = RGB_TO_Y_CCIR(r, g, b);
|
yading@10
|
2034 u = RGB_TO_U_CCIR(r, g, b, 0);
|
yading@10
|
2035 v = RGB_TO_V_CCIR(r, g, b, 0);
|
yading@10
|
2036 YUVA_OUT((uint32_t*)sp->sub.rects[i]->pict.data[1] + j, y, u, v, a);
|
yading@10
|
2037 }
|
yading@10
|
2038 }
|
yading@10
|
2039
|
yading@10
|
2040 /* now we can update the picture count */
|
yading@10
|
2041 if (++is->subpq_windex == SUBPICTURE_QUEUE_SIZE)
|
yading@10
|
2042 is->subpq_windex = 0;
|
yading@10
|
2043 SDL_LockMutex(is->subpq_mutex);
|
yading@10
|
2044 is->subpq_size++;
|
yading@10
|
2045 SDL_UnlockMutex(is->subpq_mutex);
|
yading@10
|
2046 }
|
yading@10
|
2047 av_free_packet(pkt);
|
yading@10
|
2048 }
|
yading@10
|
2049 return 0;
|
yading@10
|
2050 }
|
yading@10
|
2051
|
yading@10
|
2052 /* copy samples for viewing in editor window */
|
yading@10
|
2053 static void update_sample_display(VideoState *is, short *samples, int samples_size)
|
yading@10
|
2054 {
|
yading@10
|
2055 int size, len;
|
yading@10
|
2056
|
yading@10
|
2057 size = samples_size / sizeof(short);
|
yading@10
|
2058 while (size > 0) {
|
yading@10
|
2059 len = SAMPLE_ARRAY_SIZE - is->sample_array_index;
|
yading@10
|
2060 if (len > size)
|
yading@10
|
2061 len = size;
|
yading@10
|
2062 memcpy(is->sample_array + is->sample_array_index, samples, len * sizeof(short));
|
yading@10
|
2063 samples += len;
|
yading@10
|
2064 is->sample_array_index += len;
|
yading@10
|
2065 if (is->sample_array_index >= SAMPLE_ARRAY_SIZE)
|
yading@10
|
2066 is->sample_array_index = 0;
|
yading@10
|
2067 size -= len;
|
yading@10
|
2068 }
|
yading@10
|
2069 }
|
yading@10
|
2070
|
yading@10
|
2071 /* return the wanted number of samples to get better sync if sync_type is video
|
yading@10
|
2072 * or external master clock */
|
yading@10
|
2073 static int synchronize_audio(VideoState *is, int nb_samples)
|
yading@10
|
2074 {
|
yading@10
|
2075 int wanted_nb_samples = nb_samples;
|
yading@10
|
2076
|
yading@10
|
2077 /* if not master, then we try to remove or add samples to correct the clock */
|
yading@10
|
2078 if (get_master_sync_type(is) != AV_SYNC_AUDIO_MASTER) {
|
yading@10
|
2079 double diff, avg_diff;
|
yading@10
|
2080 int min_nb_samples, max_nb_samples;
|
yading@10
|
2081
|
yading@10
|
2082 diff = get_audio_clock(is) - get_master_clock(is);
|
yading@10
|
2083
|
yading@10
|
2084 if (!isnan(diff) && fabs(diff) < AV_NOSYNC_THRESHOLD) {
|
yading@10
|
2085 is->audio_diff_cum = diff + is->audio_diff_avg_coef * is->audio_diff_cum;
|
yading@10
|
2086 if (is->audio_diff_avg_count < AUDIO_DIFF_AVG_NB) {
|
yading@10
|
2087 /* not enough measures to have a correct estimate */
|
yading@10
|
2088 is->audio_diff_avg_count++;
|
yading@10
|
2089 } else {
|
yading@10
|
2090 /* estimate the A-V difference */
|
yading@10
|
2091 avg_diff = is->audio_diff_cum * (1.0 - is->audio_diff_avg_coef);
|
yading@10
|
2092
|
yading@10
|
2093 if (fabs(avg_diff) >= is->audio_diff_threshold) {
|
yading@10
|
2094 wanted_nb_samples = nb_samples + (int)(diff * is->audio_src.freq);
|
yading@10
|
2095 min_nb_samples = ((nb_samples * (100 - SAMPLE_CORRECTION_PERCENT_MAX) / 100));
|
yading@10
|
2096 max_nb_samples = ((nb_samples * (100 + SAMPLE_CORRECTION_PERCENT_MAX) / 100));
|
yading@10
|
2097 wanted_nb_samples = FFMIN(FFMAX(wanted_nb_samples, min_nb_samples), max_nb_samples);
|
yading@10
|
2098 }
|
yading@10
|
2099 av_dlog(NULL, "diff=%f adiff=%f sample_diff=%d apts=%0.3f %f\n",
|
yading@10
|
2100 diff, avg_diff, wanted_nb_samples - nb_samples,
|
yading@10
|
2101 is->audio_clock, is->audio_diff_threshold);
|
yading@10
|
2102 }
|
yading@10
|
2103 } else {
|
yading@10
|
2104 /* too big difference : may be initial PTS errors, so
|
yading@10
|
2105 reset A-V filter */
|
yading@10
|
2106 is->audio_diff_avg_count = 0;
|
yading@10
|
2107 is->audio_diff_cum = 0;
|
yading@10
|
2108 }
|
yading@10
|
2109 }
|
yading@10
|
2110
|
yading@10
|
2111 return wanted_nb_samples;
|
yading@10
|
2112 }
|
yading@10
|
2113
|
yading@10
|
2114 /**
|
yading@10
|
2115 * Decode one audio frame and return its uncompressed size.
|
yading@10
|
2116 *
|
yading@10
|
2117 * The processed audio frame is decoded, converted if required, and
|
yading@10
|
2118 * stored in is->audio_buf, with size in bytes given by the return
|
yading@10
|
2119 * value.
|
yading@10
|
2120 */
|
yading@10
|
2121 static int audio_decode_frame(VideoState *is)
|
yading@10
|
2122 {
|
yading@10
|
2123 AVPacket *pkt_temp = &is->audio_pkt_temp;
|
yading@10
|
2124 AVPacket *pkt = &is->audio_pkt;
|
yading@10
|
2125 AVCodecContext *dec = is->audio_st->codec;
|
yading@10
|
2126 int len1, data_size, resampled_data_size;
|
yading@10
|
2127 int64_t dec_channel_layout;
|
yading@10
|
2128 int got_frame;
|
yading@10
|
2129 av_unused double audio_clock0;
|
yading@10
|
2130 int new_packet = 0;
|
yading@10
|
2131 int flush_complete = 0;
|
yading@10
|
2132 int wanted_nb_samples;
|
yading@10
|
2133 AVRational tb;
|
yading@10
|
2134 int ret;
|
yading@10
|
2135 int reconfigure;
|
yading@10
|
2136
|
yading@10
|
2137 for (;;) {
|
yading@10
|
2138 /* NOTE: the audio packet can contain several frames */
|
yading@10
|
2139 while (pkt_temp->size > 0 || (!pkt_temp->data && new_packet) || is->audio_buf_frames_pending) {
|
yading@10
|
2140 if (!is->frame) {
|
yading@10
|
2141 if (!(is->frame = avcodec_alloc_frame()))
|
yading@10
|
2142 return AVERROR(ENOMEM);
|
yading@10
|
2143 } else {
|
yading@10
|
2144 av_frame_unref(is->frame);
|
yading@10
|
2145 avcodec_get_frame_defaults(is->frame);
|
yading@10
|
2146 }
|
yading@10
|
2147
|
yading@10
|
2148 if (is->audioq.serial != is->audio_pkt_temp_serial)
|
yading@10
|
2149 break;
|
yading@10
|
2150
|
yading@10
|
2151 if (is->paused)
|
yading@10
|
2152 return -1;
|
yading@10
|
2153
|
yading@10
|
2154 if (!is->audio_buf_frames_pending) {
|
yading@10
|
2155 if (flush_complete)
|
yading@10
|
2156 break;
|
yading@10
|
2157 new_packet = 0;
|
yading@10
|
2158 len1 = avcodec_decode_audio4(dec, is->frame, &got_frame, pkt_temp);
|
yading@10
|
2159 if (len1 < 0) {
|
yading@10
|
2160 /* if error, we skip the frame */
|
yading@10
|
2161 pkt_temp->size = 0;
|
yading@10
|
2162 break;
|
yading@10
|
2163 }
|
yading@10
|
2164
|
yading@10
|
2165 pkt_temp->data += len1;
|
yading@10
|
2166 pkt_temp->size -= len1;
|
yading@10
|
2167
|
yading@10
|
2168 if (!got_frame) {
|
yading@10
|
2169 /* stop sending empty packets if the decoder is finished */
|
yading@10
|
2170 if (!pkt_temp->data && dec->codec->capabilities & CODEC_CAP_DELAY)
|
yading@10
|
2171 flush_complete = 1;
|
yading@10
|
2172 continue;
|
yading@10
|
2173 }
|
yading@10
|
2174
|
yading@10
|
2175 tb = (AVRational){1, is->frame->sample_rate};
|
yading@10
|
2176 if (is->frame->pts != AV_NOPTS_VALUE)
|
yading@10
|
2177 is->frame->pts = av_rescale_q(is->frame->pts, dec->time_base, tb);
|
yading@10
|
2178 if (is->frame->pts == AV_NOPTS_VALUE && pkt_temp->pts != AV_NOPTS_VALUE)
|
yading@10
|
2179 is->frame->pts = av_rescale_q(pkt_temp->pts, is->audio_st->time_base, tb);
|
yading@10
|
2180 if (pkt_temp->pts != AV_NOPTS_VALUE)
|
yading@10
|
2181 pkt_temp->pts += (double) is->frame->nb_samples / is->frame->sample_rate / av_q2d(is->audio_st->time_base);
|
yading@10
|
2182
|
yading@10
|
2183 #if CONFIG_AVFILTER
|
yading@10
|
2184 dec_channel_layout = get_valid_channel_layout(is->frame->channel_layout, av_frame_get_channels(is->frame));
|
yading@10
|
2185
|
yading@10
|
2186 reconfigure =
|
yading@10
|
2187 cmp_audio_fmts(is->audio_filter_src.fmt, is->audio_filter_src.channels,
|
yading@10
|
2188 is->frame->format, av_frame_get_channels(is->frame)) ||
|
yading@10
|
2189 is->audio_filter_src.channel_layout != dec_channel_layout ||
|
yading@10
|
2190 is->audio_filter_src.freq != is->frame->sample_rate ||
|
yading@10
|
2191 is->audio_pkt_temp_serial != is->audio_last_serial;
|
yading@10
|
2192
|
yading@10
|
2193 if (reconfigure) {
|
yading@10
|
2194 char buf1[1024], buf2[1024];
|
yading@10
|
2195 av_get_channel_layout_string(buf1, sizeof(buf1), -1, is->audio_filter_src.channel_layout);
|
yading@10
|
2196 av_get_channel_layout_string(buf2, sizeof(buf2), -1, dec_channel_layout);
|
yading@10
|
2197 av_log(NULL, AV_LOG_DEBUG,
|
yading@10
|
2198 "Audio frame changed from rate:%d ch:%d fmt:%s layout:%s serial:%d to rate:%d ch:%d fmt:%s layout:%s serial:%d\n",
|
yading@10
|
2199 is->audio_filter_src.freq, is->audio_filter_src.channels, av_get_sample_fmt_name(is->audio_filter_src.fmt), buf1, is->audio_last_serial,
|
yading@10
|
2200 is->frame->sample_rate, av_frame_get_channels(is->frame), av_get_sample_fmt_name(is->frame->format), buf2, is->audio_pkt_temp_serial);
|
yading@10
|
2201
|
yading@10
|
2202 is->audio_filter_src.fmt = is->frame->format;
|
yading@10
|
2203 is->audio_filter_src.channels = av_frame_get_channels(is->frame);
|
yading@10
|
2204 is->audio_filter_src.channel_layout = dec_channel_layout;
|
yading@10
|
2205 is->audio_filter_src.freq = is->frame->sample_rate;
|
yading@10
|
2206 is->audio_last_serial = is->audio_pkt_temp_serial;
|
yading@10
|
2207
|
yading@10
|
2208 if ((ret = configure_audio_filters(is, afilters, 1)) < 0)
|
yading@10
|
2209 return ret;
|
yading@10
|
2210 }
|
yading@10
|
2211
|
yading@10
|
2212 if ((ret = av_buffersrc_add_frame(is->in_audio_filter, is->frame)) < 0)
|
yading@10
|
2213 return ret;
|
yading@10
|
2214 av_frame_unref(is->frame);
|
yading@10
|
2215 #endif
|
yading@10
|
2216 }
|
yading@10
|
2217 #if CONFIG_AVFILTER
|
yading@10
|
2218 if ((ret = av_buffersink_get_frame_flags(is->out_audio_filter, is->frame, 0)) < 0) {
|
yading@10
|
2219 if (ret == AVERROR(EAGAIN)) {
|
yading@10
|
2220 is->audio_buf_frames_pending = 0;
|
yading@10
|
2221 continue;
|
yading@10
|
2222 }
|
yading@10
|
2223 return ret;
|
yading@10
|
2224 }
|
yading@10
|
2225 is->audio_buf_frames_pending = 1;
|
yading@10
|
2226 tb = is->out_audio_filter->inputs[0]->time_base;
|
yading@10
|
2227 #endif
|
yading@10
|
2228
|
yading@10
|
2229 data_size = av_samples_get_buffer_size(NULL, av_frame_get_channels(is->frame),
|
yading@10
|
2230 is->frame->nb_samples,
|
yading@10
|
2231 is->frame->format, 1);
|
yading@10
|
2232
|
yading@10
|
2233 dec_channel_layout =
|
yading@10
|
2234 (is->frame->channel_layout && av_frame_get_channels(is->frame) == av_get_channel_layout_nb_channels(is->frame->channel_layout)) ?
|
yading@10
|
2235 is->frame->channel_layout : av_get_default_channel_layout(av_frame_get_channels(is->frame));
|
yading@10
|
2236 wanted_nb_samples = synchronize_audio(is, is->frame->nb_samples);
|
yading@10
|
2237
|
yading@10
|
2238 if (is->frame->format != is->audio_src.fmt ||
|
yading@10
|
2239 dec_channel_layout != is->audio_src.channel_layout ||
|
yading@10
|
2240 is->frame->sample_rate != is->audio_src.freq ||
|
yading@10
|
2241 (wanted_nb_samples != is->frame->nb_samples && !is->swr_ctx)) {
|
yading@10
|
2242 swr_free(&is->swr_ctx);
|
yading@10
|
2243 is->swr_ctx = swr_alloc_set_opts(NULL,
|
yading@10
|
2244 is->audio_tgt.channel_layout, is->audio_tgt.fmt, is->audio_tgt.freq,
|
yading@10
|
2245 dec_channel_layout, is->frame->format, is->frame->sample_rate,
|
yading@10
|
2246 0, NULL);
|
yading@10
|
2247 if (!is->swr_ctx || swr_init(is->swr_ctx) < 0) {
|
yading@10
|
2248 fprintf(stderr, "Cannot create sample rate converter for conversion of %d Hz %s %d channels to %d Hz %s %d channels!\n",
|
yading@10
|
2249 is->frame->sample_rate, av_get_sample_fmt_name(is->frame->format), av_frame_get_channels(is->frame),
|
yading@10
|
2250 is->audio_tgt.freq, av_get_sample_fmt_name(is->audio_tgt.fmt), is->audio_tgt.channels);
|
yading@10
|
2251 break;
|
yading@10
|
2252 }
|
yading@10
|
2253 is->audio_src.channel_layout = dec_channel_layout;
|
yading@10
|
2254 is->audio_src.channels = av_frame_get_channels(is->frame);
|
yading@10
|
2255 is->audio_src.freq = is->frame->sample_rate;
|
yading@10
|
2256 is->audio_src.fmt = is->frame->format;
|
yading@10
|
2257 }
|
yading@10
|
2258
|
yading@10
|
2259 if (is->swr_ctx) {
|
yading@10
|
2260 const uint8_t **in = (const uint8_t **)is->frame->extended_data;
|
yading@10
|
2261 uint8_t **out = &is->audio_buf1;
|
yading@10
|
2262 int out_count = (int64_t)wanted_nb_samples * is->audio_tgt.freq / is->frame->sample_rate + 256;
|
yading@10
|
2263 int out_size = av_samples_get_buffer_size(NULL, is->audio_tgt.channels, out_count, is->audio_tgt.fmt, 0);
|
yading@10
|
2264 int len2;
|
yading@10
|
2265 if (wanted_nb_samples != is->frame->nb_samples) {
|
yading@10
|
2266 if (swr_set_compensation(is->swr_ctx, (wanted_nb_samples - is->frame->nb_samples) * is->audio_tgt.freq / is->frame->sample_rate,
|
yading@10
|
2267 wanted_nb_samples * is->audio_tgt.freq / is->frame->sample_rate) < 0) {
|
yading@10
|
2268 fprintf(stderr, "swr_set_compensation() failed\n");
|
yading@10
|
2269 break;
|
yading@10
|
2270 }
|
yading@10
|
2271 }
|
yading@10
|
2272 av_fast_malloc(&is->audio_buf1, &is->audio_buf1_size, out_size);
|
yading@10
|
2273 if (!is->audio_buf1)
|
yading@10
|
2274 return AVERROR(ENOMEM);
|
yading@10
|
2275 len2 = swr_convert(is->swr_ctx, out, out_count, in, is->frame->nb_samples);
|
yading@10
|
2276 if (len2 < 0) {
|
yading@10
|
2277 fprintf(stderr, "swr_convert() failed\n");
|
yading@10
|
2278 break;
|
yading@10
|
2279 }
|
yading@10
|
2280 if (len2 == out_count) {
|
yading@10
|
2281 fprintf(stderr, "warning: audio buffer is probably too small\n");
|
yading@10
|
2282 swr_init(is->swr_ctx);
|
yading@10
|
2283 }
|
yading@10
|
2284 is->audio_buf = is->audio_buf1;
|
yading@10
|
2285 resampled_data_size = len2 * is->audio_tgt.channels * av_get_bytes_per_sample(is->audio_tgt.fmt);
|
yading@10
|
2286 } else {
|
yading@10
|
2287 is->audio_buf = is->frame->data[0];
|
yading@10
|
2288 resampled_data_size = data_size;
|
yading@10
|
2289 }
|
yading@10
|
2290
|
yading@10
|
2291 audio_clock0 = is->audio_clock;
|
yading@10
|
2292 /* update the audio clock with the pts */
|
yading@10
|
2293 if (is->frame->pts != AV_NOPTS_VALUE) {
|
yading@10
|
2294 is->audio_clock = is->frame->pts * av_q2d(tb) + (double) is->frame->nb_samples / is->frame->sample_rate;
|
yading@10
|
2295 is->audio_clock_serial = is->audio_pkt_temp_serial;
|
yading@10
|
2296 }
|
yading@10
|
2297 #ifdef DEBUG
|
yading@10
|
2298 {
|
yading@10
|
2299 static double last_clock;
|
yading@10
|
2300 printf("audio: delay=%0.3f clock=%0.3f clock0=%0.3f\n",
|
yading@10
|
2301 is->audio_clock - last_clock,
|
yading@10
|
2302 is->audio_clock, audio_clock0);
|
yading@10
|
2303 last_clock = is->audio_clock;
|
yading@10
|
2304 }
|
yading@10
|
2305 #endif
|
yading@10
|
2306 return resampled_data_size;
|
yading@10
|
2307 }
|
yading@10
|
2308
|
yading@10
|
2309 /* free the current packet */
|
yading@10
|
2310 if (pkt->data)
|
yading@10
|
2311 av_free_packet(pkt);
|
yading@10
|
2312 memset(pkt_temp, 0, sizeof(*pkt_temp));
|
yading@10
|
2313
|
yading@10
|
2314 if (is->audioq.abort_request) {
|
yading@10
|
2315 return -1;
|
yading@10
|
2316 }
|
yading@10
|
2317
|
yading@10
|
2318 if (is->audioq.nb_packets == 0)
|
yading@10
|
2319 SDL_CondSignal(is->continue_read_thread);
|
yading@10
|
2320
|
yading@10
|
2321 /* read next packet */
|
yading@10
|
2322 if ((new_packet = packet_queue_get(&is->audioq, pkt, 1, &is->audio_pkt_temp_serial)) < 0)
|
yading@10
|
2323 return -1;
|
yading@10
|
2324
|
yading@10
|
2325 if (pkt->data == flush_pkt.data) {
|
yading@10
|
2326 avcodec_flush_buffers(dec);
|
yading@10
|
2327 flush_complete = 0;
|
yading@10
|
2328 is->audio_buf_frames_pending = 0;
|
yading@10
|
2329 }
|
yading@10
|
2330
|
yading@10
|
2331 *pkt_temp = *pkt;
|
yading@10
|
2332 }
|
yading@10
|
2333 }
|
yading@10
|
2334
|
yading@10
|
2335 /* prepare a new audio buffer */
|
yading@10
|
2336 static void sdl_audio_callback(void *opaque, Uint8 *stream, int len)
|
yading@10
|
2337 {
|
yading@10
|
2338 VideoState *is = opaque;
|
yading@10
|
2339 int audio_size, len1;
|
yading@10
|
2340 int bytes_per_sec;
|
yading@10
|
2341 int frame_size = av_samples_get_buffer_size(NULL, is->audio_tgt.channels, 1, is->audio_tgt.fmt, 1);
|
yading@10
|
2342
|
yading@10
|
2343 audio_callback_time = av_gettime();
|
yading@10
|
2344
|
yading@10
|
2345 while (len > 0) {
|
yading@10
|
2346 if (is->audio_buf_index >= is->audio_buf_size) {
|
yading@10
|
2347 audio_size = audio_decode_frame(is);
|
yading@10
|
2348 if (audio_size < 0) {
|
yading@10
|
2349 /* if error, just output silence */
|
yading@10
|
2350 is->audio_buf = is->silence_buf;
|
yading@10
|
2351 is->audio_buf_size = sizeof(is->silence_buf) / frame_size * frame_size;
|
yading@10
|
2352 } else {
|
yading@10
|
2353 if (is->show_mode != SHOW_MODE_VIDEO)
|
yading@10
|
2354 update_sample_display(is, (int16_t *)is->audio_buf, audio_size);
|
yading@10
|
2355 is->audio_buf_size = audio_size;
|
yading@10
|
2356 }
|
yading@10
|
2357 is->audio_buf_index = 0;
|
yading@10
|
2358 }
|
yading@10
|
2359 len1 = is->audio_buf_size - is->audio_buf_index;
|
yading@10
|
2360 if (len1 > len)
|
yading@10
|
2361 len1 = len;
|
yading@10
|
2362 memcpy(stream, (uint8_t *)is->audio_buf + is->audio_buf_index, len1);
|
yading@10
|
2363 len -= len1;
|
yading@10
|
2364 stream += len1;
|
yading@10
|
2365 is->audio_buf_index += len1;
|
yading@10
|
2366 }
|
yading@10
|
2367 bytes_per_sec = is->audio_tgt.freq * is->audio_tgt.channels * av_get_bytes_per_sample(is->audio_tgt.fmt);
|
yading@10
|
2368 is->audio_write_buf_size = is->audio_buf_size - is->audio_buf_index;
|
yading@10
|
2369 /* Let's assume the audio driver that is used by SDL has two periods. */
|
yading@10
|
2370 is->audio_current_pts = is->audio_clock - (double)(2 * is->audio_hw_buf_size + is->audio_write_buf_size) / bytes_per_sec;
|
yading@10
|
2371 is->audio_current_pts_drift = is->audio_current_pts - audio_callback_time / 1000000.0;
|
yading@10
|
2372 if (is->audioq.serial == is->audio_clock_serial)
|
yading@10
|
2373 check_external_clock_sync(is, is->audio_current_pts);
|
yading@10
|
2374 }
|
yading@10
|
2375
|
yading@10
|
2376 static int audio_open(void *opaque, int64_t wanted_channel_layout, int wanted_nb_channels, int wanted_sample_rate, struct AudioParams *audio_hw_params)
|
yading@10
|
2377 {
|
yading@10
|
2378 SDL_AudioSpec wanted_spec, spec;
|
yading@10
|
2379 const char *env;
|
yading@10
|
2380 const int next_nb_channels[] = {0, 0, 1, 6, 2, 6, 4, 6};
|
yading@10
|
2381
|
yading@10
|
2382 env = SDL_getenv("SDL_AUDIO_CHANNELS");
|
yading@10
|
2383 if (env) {
|
yading@10
|
2384 wanted_nb_channels = atoi(env);
|
yading@10
|
2385 wanted_channel_layout = av_get_default_channel_layout(wanted_nb_channels);
|
yading@10
|
2386 }
|
yading@10
|
2387 if (!wanted_channel_layout || wanted_nb_channels != av_get_channel_layout_nb_channels(wanted_channel_layout)) {
|
yading@10
|
2388 wanted_channel_layout = av_get_default_channel_layout(wanted_nb_channels);
|
yading@10
|
2389 wanted_channel_layout &= ~AV_CH_LAYOUT_STEREO_DOWNMIX;
|
yading@10
|
2390 }
|
yading@10
|
2391 wanted_spec.channels = av_get_channel_layout_nb_channels(wanted_channel_layout);
|
yading@10
|
2392 wanted_spec.freq = wanted_sample_rate;
|
yading@10
|
2393 if (wanted_spec.freq <= 0 || wanted_spec.channels <= 0) {
|
yading@10
|
2394 fprintf(stderr, "Invalid sample rate or channel count!\n");
|
yading@10
|
2395 return -1;
|
yading@10
|
2396 }
|
yading@10
|
2397 wanted_spec.format = AUDIO_S16SYS;
|
yading@10
|
2398 wanted_spec.silence = 0;
|
yading@10
|
2399 wanted_spec.samples = SDL_AUDIO_BUFFER_SIZE;
|
yading@10
|
2400 wanted_spec.callback = sdl_audio_callback;
|
yading@10
|
2401 wanted_spec.userdata = opaque;
|
yading@10
|
2402 while (SDL_OpenAudio(&wanted_spec, &spec) < 0) {
|
yading@10
|
2403 fprintf(stderr, "SDL_OpenAudio (%d channels): %s\n", wanted_spec.channels, SDL_GetError());
|
yading@10
|
2404 wanted_spec.channels = next_nb_channels[FFMIN(7, wanted_spec.channels)];
|
yading@10
|
2405 if (!wanted_spec.channels) {
|
yading@10
|
2406 fprintf(stderr, "No more channel combinations to try, audio open failed\n");
|
yading@10
|
2407 return -1;
|
yading@10
|
2408 }
|
yading@10
|
2409 wanted_channel_layout = av_get_default_channel_layout(wanted_spec.channels);
|
yading@10
|
2410 }
|
yading@10
|
2411 if (spec.format != AUDIO_S16SYS) {
|
yading@10
|
2412 fprintf(stderr, "SDL advised audio format %d is not supported!\n", spec.format);
|
yading@10
|
2413 return -1;
|
yading@10
|
2414 }
|
yading@10
|
2415 if (spec.channels != wanted_spec.channels) {
|
yading@10
|
2416 wanted_channel_layout = av_get_default_channel_layout(spec.channels);
|
yading@10
|
2417 if (!wanted_channel_layout) {
|
yading@10
|
2418 fprintf(stderr, "SDL advised channel count %d is not supported!\n", spec.channels);
|
yading@10
|
2419 return -1;
|
yading@10
|
2420 }
|
yading@10
|
2421 }
|
yading@10
|
2422
|
yading@10
|
2423 audio_hw_params->fmt = AV_SAMPLE_FMT_S16;
|
yading@10
|
2424 audio_hw_params->freq = spec.freq;
|
yading@10
|
2425 audio_hw_params->channel_layout = wanted_channel_layout;
|
yading@10
|
2426 audio_hw_params->channels = spec.channels;
|
yading@10
|
2427 return spec.size;
|
yading@10
|
2428 }
|
yading@10
|
2429
|
yading@10
|
2430 /* open a given stream. Return 0 if OK */
|
yading@10
|
2431 static int stream_component_open(VideoState *is, int stream_index)
|
yading@10
|
2432 {
|
yading@10
|
2433 AVFormatContext *ic = is->ic;
|
yading@10
|
2434 AVCodecContext *avctx;
|
yading@10
|
2435 AVCodec *codec;
|
yading@10
|
2436 const char *forced_codec_name = NULL;
|
yading@10
|
2437 AVDictionary *opts;
|
yading@10
|
2438 AVDictionaryEntry *t = NULL;
|
yading@10
|
2439 int sample_rate, nb_channels;
|
yading@10
|
2440 int64_t channel_layout;
|
yading@10
|
2441 int ret;
|
yading@10
|
2442
|
yading@10
|
2443 if (stream_index < 0 || stream_index >= ic->nb_streams)
|
yading@10
|
2444 return -1;
|
yading@10
|
2445 avctx = ic->streams[stream_index]->codec;
|
yading@10
|
2446
|
yading@10
|
2447 codec = avcodec_find_decoder(avctx->codec_id);
|
yading@10
|
2448
|
yading@10
|
2449 switch(avctx->codec_type){
|
yading@10
|
2450 case AVMEDIA_TYPE_AUDIO : is->last_audio_stream = stream_index; forced_codec_name = audio_codec_name; break;
|
yading@10
|
2451 case AVMEDIA_TYPE_SUBTITLE: is->last_subtitle_stream = stream_index; forced_codec_name = subtitle_codec_name; break;
|
yading@10
|
2452 case AVMEDIA_TYPE_VIDEO : is->last_video_stream = stream_index; forced_codec_name = video_codec_name; break;
|
yading@10
|
2453 }
|
yading@10
|
2454 if (forced_codec_name)
|
yading@10
|
2455 codec = avcodec_find_decoder_by_name(forced_codec_name);
|
yading@10
|
2456 if (!codec) {
|
yading@10
|
2457 if (forced_codec_name) fprintf(stderr, "No codec could be found with name '%s'\n", forced_codec_name);
|
yading@10
|
2458 else fprintf(stderr, "No codec could be found with id %d\n", avctx->codec_id);
|
yading@10
|
2459 return -1;
|
yading@10
|
2460 }
|
yading@10
|
2461
|
yading@10
|
2462 avctx->codec_id = codec->id;
|
yading@10
|
2463 avctx->workaround_bugs = workaround_bugs;
|
yading@10
|
2464 avctx->lowres = lowres;
|
yading@10
|
2465 if(avctx->lowres > codec->max_lowres){
|
yading@10
|
2466 av_log(avctx, AV_LOG_WARNING, "The maximum value for lowres supported by the decoder is %d\n",
|
yading@10
|
2467 codec->max_lowres);
|
yading@10
|
2468 avctx->lowres= codec->max_lowres;
|
yading@10
|
2469 }
|
yading@10
|
2470 avctx->idct_algo = idct;
|
yading@10
|
2471 avctx->error_concealment = error_concealment;
|
yading@10
|
2472
|
yading@10
|
2473 if(avctx->lowres) avctx->flags |= CODEC_FLAG_EMU_EDGE;
|
yading@10
|
2474 if (fast) avctx->flags2 |= CODEC_FLAG2_FAST;
|
yading@10
|
2475 if(codec->capabilities & CODEC_CAP_DR1)
|
yading@10
|
2476 avctx->flags |= CODEC_FLAG_EMU_EDGE;
|
yading@10
|
2477
|
yading@10
|
2478 opts = filter_codec_opts(codec_opts, avctx->codec_id, ic, ic->streams[stream_index], codec);
|
yading@10
|
2479 if (!av_dict_get(opts, "threads", NULL, 0))
|
yading@10
|
2480 av_dict_set(&opts, "threads", "auto", 0);
|
yading@10
|
2481 if (avctx->codec_type == AVMEDIA_TYPE_VIDEO || avctx->codec_type == AVMEDIA_TYPE_AUDIO)
|
yading@10
|
2482 av_dict_set(&opts, "refcounted_frames", "1", 0);
|
yading@10
|
2483 if (avcodec_open2(avctx, codec, &opts) < 0)
|
yading@10
|
2484 return -1;
|
yading@10
|
2485 if ((t = av_dict_get(opts, "", NULL, AV_DICT_IGNORE_SUFFIX))) {
|
yading@10
|
2486 av_log(NULL, AV_LOG_ERROR, "Option %s not found.\n", t->key);
|
yading@10
|
2487 return AVERROR_OPTION_NOT_FOUND;
|
yading@10
|
2488 }
|
yading@10
|
2489
|
yading@10
|
2490 ic->streams[stream_index]->discard = AVDISCARD_DEFAULT;
|
yading@10
|
2491 switch (avctx->codec_type) {
|
yading@10
|
2492 case AVMEDIA_TYPE_AUDIO:
|
yading@10
|
2493 #if CONFIG_AVFILTER
|
yading@10
|
2494 {
|
yading@10
|
2495 AVFilterLink *link;
|
yading@10
|
2496
|
yading@10
|
2497 is->audio_filter_src.freq = avctx->sample_rate;
|
yading@10
|
2498 is->audio_filter_src.channels = avctx->channels;
|
yading@10
|
2499 is->audio_filter_src.channel_layout = get_valid_channel_layout(avctx->channel_layout, avctx->channels);
|
yading@10
|
2500 is->audio_filter_src.fmt = avctx->sample_fmt;
|
yading@10
|
2501 if ((ret = configure_audio_filters(is, afilters, 0)) < 0)
|
yading@10
|
2502 return ret;
|
yading@10
|
2503 link = is->out_audio_filter->inputs[0];
|
yading@10
|
2504 sample_rate = link->sample_rate;
|
yading@10
|
2505 nb_channels = link->channels;
|
yading@10
|
2506 channel_layout = link->channel_layout;
|
yading@10
|
2507 }
|
yading@10
|
2508 #else
|
yading@10
|
2509 sample_rate = avctx->sample_rate;
|
yading@10
|
2510 nb_channels = avctx->channels;
|
yading@10
|
2511 channel_layout = avctx->channel_layout;
|
yading@10
|
2512 #endif
|
yading@10
|
2513
|
yading@10
|
2514 /* prepare audio output */
|
yading@10
|
2515 if ((ret = audio_open(is, channel_layout, nb_channels, sample_rate, &is->audio_tgt)) < 0)
|
yading@10
|
2516 return ret;
|
yading@10
|
2517 is->audio_hw_buf_size = ret;
|
yading@10
|
2518 is->audio_src = is->audio_tgt;
|
yading@10
|
2519 is->audio_buf_size = 0;
|
yading@10
|
2520 is->audio_buf_index = 0;
|
yading@10
|
2521
|
yading@10
|
2522 /* init averaging filter */
|
yading@10
|
2523 is->audio_diff_avg_coef = exp(log(0.01) / AUDIO_DIFF_AVG_NB);
|
yading@10
|
2524 is->audio_diff_avg_count = 0;
|
yading@10
|
2525 /* since we do not have a precise anough audio fifo fullness,
|
yading@10
|
2526 we correct audio sync only if larger than this threshold */
|
yading@10
|
2527 is->audio_diff_threshold = 2.0 * is->audio_hw_buf_size / av_samples_get_buffer_size(NULL, is->audio_tgt.channels, is->audio_tgt.freq, is->audio_tgt.fmt, 1);
|
yading@10
|
2528
|
yading@10
|
2529 memset(&is->audio_pkt, 0, sizeof(is->audio_pkt));
|
yading@10
|
2530 memset(&is->audio_pkt_temp, 0, sizeof(is->audio_pkt_temp));
|
yading@10
|
2531
|
yading@10
|
2532 is->audio_stream = stream_index;
|
yading@10
|
2533 is->audio_st = ic->streams[stream_index];
|
yading@10
|
2534
|
yading@10
|
2535 packet_queue_start(&is->audioq);
|
yading@10
|
2536 SDL_PauseAudio(0);
|
yading@10
|
2537 break;
|
yading@10
|
2538 case AVMEDIA_TYPE_VIDEO:
|
yading@10
|
2539 is->video_stream = stream_index;
|
yading@10
|
2540 is->video_st = ic->streams[stream_index];
|
yading@10
|
2541
|
yading@10
|
2542 packet_queue_start(&is->videoq);
|
yading@10
|
2543 is->video_tid = SDL_CreateThread(video_thread, is);
|
yading@10
|
2544 is->queue_attachments_req = 1;
|
yading@10
|
2545 break;
|
yading@10
|
2546 case AVMEDIA_TYPE_SUBTITLE:
|
yading@10
|
2547 is->subtitle_stream = stream_index;
|
yading@10
|
2548 is->subtitle_st = ic->streams[stream_index];
|
yading@10
|
2549 packet_queue_start(&is->subtitleq);
|
yading@10
|
2550
|
yading@10
|
2551 is->subtitle_tid = SDL_CreateThread(subtitle_thread, is);
|
yading@10
|
2552 break;
|
yading@10
|
2553 default:
|
yading@10
|
2554 break;
|
yading@10
|
2555 }
|
yading@10
|
2556 return 0;
|
yading@10
|
2557 }
|
yading@10
|
2558
|
yading@10
|
2559 static void stream_component_close(VideoState *is, int stream_index)
|
yading@10
|
2560 {
|
yading@10
|
2561 AVFormatContext *ic = is->ic;
|
yading@10
|
2562 AVCodecContext *avctx;
|
yading@10
|
2563
|
yading@10
|
2564 if (stream_index < 0 || stream_index >= ic->nb_streams)
|
yading@10
|
2565 return;
|
yading@10
|
2566 avctx = ic->streams[stream_index]->codec;
|
yading@10
|
2567
|
yading@10
|
2568 switch (avctx->codec_type) {
|
yading@10
|
2569 case AVMEDIA_TYPE_AUDIO:
|
yading@10
|
2570 packet_queue_abort(&is->audioq);
|
yading@10
|
2571
|
yading@10
|
2572 SDL_CloseAudio();
|
yading@10
|
2573
|
yading@10
|
2574 packet_queue_flush(&is->audioq);
|
yading@10
|
2575 av_free_packet(&is->audio_pkt);
|
yading@10
|
2576 swr_free(&is->swr_ctx);
|
yading@10
|
2577 av_freep(&is->audio_buf1);
|
yading@10
|
2578 is->audio_buf1_size = 0;
|
yading@10
|
2579 is->audio_buf = NULL;
|
yading@10
|
2580 av_frame_free(&is->frame);
|
yading@10
|
2581
|
yading@10
|
2582 if (is->rdft) {
|
yading@10
|
2583 av_rdft_end(is->rdft);
|
yading@10
|
2584 av_freep(&is->rdft_data);
|
yading@10
|
2585 is->rdft = NULL;
|
yading@10
|
2586 is->rdft_bits = 0;
|
yading@10
|
2587 }
|
yading@10
|
2588 #if CONFIG_AVFILTER
|
yading@10
|
2589 avfilter_graph_free(&is->agraph);
|
yading@10
|
2590 #endif
|
yading@10
|
2591 break;
|
yading@10
|
2592 case AVMEDIA_TYPE_VIDEO:
|
yading@10
|
2593 packet_queue_abort(&is->videoq);
|
yading@10
|
2594
|
yading@10
|
2595 /* note: we also signal this mutex to make sure we deblock the
|
yading@10
|
2596 video thread in all cases */
|
yading@10
|
2597 SDL_LockMutex(is->pictq_mutex);
|
yading@10
|
2598 SDL_CondSignal(is->pictq_cond);
|
yading@10
|
2599 SDL_UnlockMutex(is->pictq_mutex);
|
yading@10
|
2600
|
yading@10
|
2601 SDL_WaitThread(is->video_tid, NULL);
|
yading@10
|
2602
|
yading@10
|
2603 packet_queue_flush(&is->videoq);
|
yading@10
|
2604 break;
|
yading@10
|
2605 case AVMEDIA_TYPE_SUBTITLE:
|
yading@10
|
2606 packet_queue_abort(&is->subtitleq);
|
yading@10
|
2607
|
yading@10
|
2608 /* note: we also signal this mutex to make sure we deblock the
|
yading@10
|
2609 video thread in all cases */
|
yading@10
|
2610 SDL_LockMutex(is->subpq_mutex);
|
yading@10
|
2611 is->subtitle_stream_changed = 1;
|
yading@10
|
2612
|
yading@10
|
2613 SDL_CondSignal(is->subpq_cond);
|
yading@10
|
2614 SDL_UnlockMutex(is->subpq_mutex);
|
yading@10
|
2615
|
yading@10
|
2616 SDL_WaitThread(is->subtitle_tid, NULL);
|
yading@10
|
2617
|
yading@10
|
2618 packet_queue_flush(&is->subtitleq);
|
yading@10
|
2619 break;
|
yading@10
|
2620 default:
|
yading@10
|
2621 break;
|
yading@10
|
2622 }
|
yading@10
|
2623
|
yading@10
|
2624 ic->streams[stream_index]->discard = AVDISCARD_ALL;
|
yading@10
|
2625 avcodec_close(avctx);
|
yading@10
|
2626 switch (avctx->codec_type) {
|
yading@10
|
2627 case AVMEDIA_TYPE_AUDIO:
|
yading@10
|
2628 is->audio_st = NULL;
|
yading@10
|
2629 is->audio_stream = -1;
|
yading@10
|
2630 break;
|
yading@10
|
2631 case AVMEDIA_TYPE_VIDEO:
|
yading@10
|
2632 is->video_st = NULL;
|
yading@10
|
2633 is->video_stream = -1;
|
yading@10
|
2634 break;
|
yading@10
|
2635 case AVMEDIA_TYPE_SUBTITLE:
|
yading@10
|
2636 is->subtitle_st = NULL;
|
yading@10
|
2637 is->subtitle_stream = -1;
|
yading@10
|
2638 break;
|
yading@10
|
2639 default:
|
yading@10
|
2640 break;
|
yading@10
|
2641 }
|
yading@10
|
2642 }
|
yading@10
|
2643
|
yading@10
|
2644 static int decode_interrupt_cb(void *ctx)
|
yading@10
|
2645 {
|
yading@10
|
2646 VideoState *is = ctx;
|
yading@10
|
2647 return is->abort_request;
|
yading@10
|
2648 }
|
yading@10
|
2649
|
yading@10
|
2650 static int is_realtime(AVFormatContext *s)
|
yading@10
|
2651 {
|
yading@10
|
2652 if( !strcmp(s->iformat->name, "rtp")
|
yading@10
|
2653 || !strcmp(s->iformat->name, "rtsp")
|
yading@10
|
2654 || !strcmp(s->iformat->name, "sdp")
|
yading@10
|
2655 )
|
yading@10
|
2656 return 1;
|
yading@10
|
2657
|
yading@10
|
2658 if(s->pb && ( !strncmp(s->filename, "rtp:", 4)
|
yading@10
|
2659 || !strncmp(s->filename, "udp:", 4)
|
yading@10
|
2660 )
|
yading@10
|
2661 )
|
yading@10
|
2662 return 1;
|
yading@10
|
2663 return 0;
|
yading@10
|
2664 }
|
yading@10
|
2665
|
yading@10
|
2666 /* this thread gets the stream from the disk or the network */
|
yading@10
|
2667 static int read_thread(void *arg)
|
yading@10
|
2668 {
|
yading@10
|
2669 VideoState *is = arg;
|
yading@10
|
2670 AVFormatContext *ic = NULL;
|
yading@10
|
2671 int err, i, ret;
|
yading@10
|
2672 int st_index[AVMEDIA_TYPE_NB];
|
yading@10
|
2673 AVPacket pkt1, *pkt = &pkt1;
|
yading@10
|
2674 int eof = 0;
|
yading@10
|
2675 int pkt_in_play_range = 0;
|
yading@10
|
2676 AVDictionaryEntry *t;
|
yading@10
|
2677 AVDictionary **opts;
|
yading@10
|
2678 int orig_nb_streams;
|
yading@10
|
2679 SDL_mutex *wait_mutex = SDL_CreateMutex();
|
yading@10
|
2680
|
yading@10
|
2681 memset(st_index, -1, sizeof(st_index));
|
yading@10
|
2682 is->last_video_stream = is->video_stream = -1;
|
yading@10
|
2683 is->last_audio_stream = is->audio_stream = -1;
|
yading@10
|
2684 is->last_subtitle_stream = is->subtitle_stream = -1;
|
yading@10
|
2685
|
yading@10
|
2686 ic = avformat_alloc_context();
|
yading@10
|
2687 ic->interrupt_callback.callback = decode_interrupt_cb;
|
yading@10
|
2688 ic->interrupt_callback.opaque = is;
|
yading@10
|
2689 err = avformat_open_input(&ic, is->filename, is->iformat, &format_opts);
|
yading@10
|
2690 if (err < 0) {
|
yading@10
|
2691 print_error(is->filename, err);
|
yading@10
|
2692 ret = -1;
|
yading@10
|
2693 goto fail;
|
yading@10
|
2694 }
|
yading@10
|
2695 if ((t = av_dict_get(format_opts, "", NULL, AV_DICT_IGNORE_SUFFIX))) {
|
yading@10
|
2696 av_log(NULL, AV_LOG_ERROR, "Option %s not found.\n", t->key);
|
yading@10
|
2697 ret = AVERROR_OPTION_NOT_FOUND;
|
yading@10
|
2698 goto fail;
|
yading@10
|
2699 }
|
yading@10
|
2700 is->ic = ic;
|
yading@10
|
2701
|
yading@10
|
2702 if (genpts)
|
yading@10
|
2703 ic->flags |= AVFMT_FLAG_GENPTS;
|
yading@10
|
2704
|
yading@10
|
2705 opts = setup_find_stream_info_opts(ic, codec_opts);
|
yading@10
|
2706 orig_nb_streams = ic->nb_streams;
|
yading@10
|
2707
|
yading@10
|
2708 err = avformat_find_stream_info(ic, opts);
|
yading@10
|
2709 if (err < 0) {
|
yading@10
|
2710 fprintf(stderr, "%s: could not find codec parameters\n", is->filename);
|
yading@10
|
2711 ret = -1;
|
yading@10
|
2712 goto fail;
|
yading@10
|
2713 }
|
yading@10
|
2714 for (i = 0; i < orig_nb_streams; i++)
|
yading@10
|
2715 av_dict_free(&opts[i]);
|
yading@10
|
2716 av_freep(&opts);
|
yading@10
|
2717
|
yading@10
|
2718 if (ic->pb)
|
yading@10
|
2719 ic->pb->eof_reached = 0; // FIXME hack, ffplay maybe should not use url_feof() to test for the end
|
yading@10
|
2720
|
yading@10
|
2721 if (seek_by_bytes < 0)
|
yading@10
|
2722 seek_by_bytes = !!(ic->iformat->flags & AVFMT_TS_DISCONT) && strcmp("ogg", ic->iformat->name);
|
yading@10
|
2723
|
yading@10
|
2724 is->max_frame_duration = (ic->iformat->flags & AVFMT_TS_DISCONT) ? 10.0 : 3600.0;
|
yading@10
|
2725
|
yading@10
|
2726 if (!window_title && (t = av_dict_get(ic->metadata, "title", NULL, 0)))
|
yading@10
|
2727 window_title = av_asprintf("%s - %s", t->value, input_filename);
|
yading@10
|
2728
|
yading@10
|
2729 /* if seeking requested, we execute it */
|
yading@10
|
2730 if (start_time != AV_NOPTS_VALUE) {
|
yading@10
|
2731 int64_t timestamp;
|
yading@10
|
2732
|
yading@10
|
2733 timestamp = start_time;
|
yading@10
|
2734 /* add the stream start time */
|
yading@10
|
2735 if (ic->start_time != AV_NOPTS_VALUE)
|
yading@10
|
2736 timestamp += ic->start_time;
|
yading@10
|
2737 ret = avformat_seek_file(ic, -1, INT64_MIN, timestamp, INT64_MAX, 0);
|
yading@10
|
2738 if (ret < 0) {
|
yading@10
|
2739 fprintf(stderr, "%s: could not seek to position %0.3f\n",
|
yading@10
|
2740 is->filename, (double)timestamp / AV_TIME_BASE);
|
yading@10
|
2741 }
|
yading@10
|
2742 }
|
yading@10
|
2743
|
yading@10
|
2744 is->realtime = is_realtime(ic);
|
yading@10
|
2745
|
yading@10
|
2746 for (i = 0; i < ic->nb_streams; i++)
|
yading@10
|
2747 ic->streams[i]->discard = AVDISCARD_ALL;
|
yading@10
|
2748 if (!video_disable)
|
yading@10
|
2749 st_index[AVMEDIA_TYPE_VIDEO] =
|
yading@10
|
2750 av_find_best_stream(ic, AVMEDIA_TYPE_VIDEO,
|
yading@10
|
2751 wanted_stream[AVMEDIA_TYPE_VIDEO], -1, NULL, 0);
|
yading@10
|
2752 if (!audio_disable)
|
yading@10
|
2753 st_index[AVMEDIA_TYPE_AUDIO] =
|
yading@10
|
2754 av_find_best_stream(ic, AVMEDIA_TYPE_AUDIO,
|
yading@10
|
2755 wanted_stream[AVMEDIA_TYPE_AUDIO],
|
yading@10
|
2756 st_index[AVMEDIA_TYPE_VIDEO],
|
yading@10
|
2757 NULL, 0);
|
yading@10
|
2758 if (!video_disable && !subtitle_disable)
|
yading@10
|
2759 st_index[AVMEDIA_TYPE_SUBTITLE] =
|
yading@10
|
2760 av_find_best_stream(ic, AVMEDIA_TYPE_SUBTITLE,
|
yading@10
|
2761 wanted_stream[AVMEDIA_TYPE_SUBTITLE],
|
yading@10
|
2762 (st_index[AVMEDIA_TYPE_AUDIO] >= 0 ?
|
yading@10
|
2763 st_index[AVMEDIA_TYPE_AUDIO] :
|
yading@10
|
2764 st_index[AVMEDIA_TYPE_VIDEO]),
|
yading@10
|
2765 NULL, 0);
|
yading@10
|
2766 if (show_status) {
|
yading@10
|
2767 av_dump_format(ic, 0, is->filename, 0);
|
yading@10
|
2768 }
|
yading@10
|
2769
|
yading@10
|
2770 is->show_mode = show_mode;
|
yading@10
|
2771
|
yading@10
|
2772 /* open the streams */
|
yading@10
|
2773 if (st_index[AVMEDIA_TYPE_AUDIO] >= 0) {
|
yading@10
|
2774 stream_component_open(is, st_index[AVMEDIA_TYPE_AUDIO]);
|
yading@10
|
2775 }
|
yading@10
|
2776
|
yading@10
|
2777 ret = -1;
|
yading@10
|
2778 if (st_index[AVMEDIA_TYPE_VIDEO] >= 0) {
|
yading@10
|
2779 ret = stream_component_open(is, st_index[AVMEDIA_TYPE_VIDEO]);
|
yading@10
|
2780 }
|
yading@10
|
2781 if (is->show_mode == SHOW_MODE_NONE)
|
yading@10
|
2782 is->show_mode = ret >= 0 ? SHOW_MODE_VIDEO : SHOW_MODE_RDFT;
|
yading@10
|
2783
|
yading@10
|
2784 if (st_index[AVMEDIA_TYPE_SUBTITLE] >= 0) {
|
yading@10
|
2785 stream_component_open(is, st_index[AVMEDIA_TYPE_SUBTITLE]);
|
yading@10
|
2786 }
|
yading@10
|
2787
|
yading@10
|
2788 if (is->video_stream < 0 && is->audio_stream < 0) {
|
yading@10
|
2789 fprintf(stderr, "%s: could not open codecs\n", is->filename);
|
yading@10
|
2790 ret = -1;
|
yading@10
|
2791 goto fail;
|
yading@10
|
2792 }
|
yading@10
|
2793
|
yading@10
|
2794 if (infinite_buffer < 0 && is->realtime)
|
yading@10
|
2795 infinite_buffer = 1;
|
yading@10
|
2796
|
yading@10
|
2797 for (;;) {
|
yading@10
|
2798 if (is->abort_request)
|
yading@10
|
2799 break;
|
yading@10
|
2800 if (is->paused != is->last_paused) {
|
yading@10
|
2801 is->last_paused = is->paused;
|
yading@10
|
2802 if (is->paused)
|
yading@10
|
2803 is->read_pause_return = av_read_pause(ic);
|
yading@10
|
2804 else
|
yading@10
|
2805 av_read_play(ic);
|
yading@10
|
2806 }
|
yading@10
|
2807 #if CONFIG_RTSP_DEMUXER || CONFIG_MMSH_PROTOCOL
|
yading@10
|
2808 if (is->paused &&
|
yading@10
|
2809 (!strcmp(ic->iformat->name, "rtsp") ||
|
yading@10
|
2810 (ic->pb && !strncmp(input_filename, "mmsh:", 5)))) {
|
yading@10
|
2811 /* wait 10 ms to avoid trying to get another packet */
|
yading@10
|
2812 /* XXX: horrible */
|
yading@10
|
2813 SDL_Delay(10);
|
yading@10
|
2814 continue;
|
yading@10
|
2815 }
|
yading@10
|
2816 #endif
|
yading@10
|
2817 if (is->seek_req) {
|
yading@10
|
2818 int64_t seek_target = is->seek_pos;
|
yading@10
|
2819 int64_t seek_min = is->seek_rel > 0 ? seek_target - is->seek_rel + 2: INT64_MIN;
|
yading@10
|
2820 int64_t seek_max = is->seek_rel < 0 ? seek_target - is->seek_rel - 2: INT64_MAX;
|
yading@10
|
2821 // FIXME the +-2 is due to rounding being not done in the correct direction in generation
|
yading@10
|
2822 // of the seek_pos/seek_rel variables
|
yading@10
|
2823
|
yading@10
|
2824 ret = avformat_seek_file(is->ic, -1, seek_min, seek_target, seek_max, is->seek_flags);
|
yading@10
|
2825 if (ret < 0) {
|
yading@10
|
2826 fprintf(stderr, "%s: error while seeking\n", is->ic->filename);
|
yading@10
|
2827 } else {
|
yading@10
|
2828 if (is->audio_stream >= 0) {
|
yading@10
|
2829 packet_queue_flush(&is->audioq);
|
yading@10
|
2830 packet_queue_put(&is->audioq, &flush_pkt);
|
yading@10
|
2831 }
|
yading@10
|
2832 if (is->subtitle_stream >= 0) {
|
yading@10
|
2833 packet_queue_flush(&is->subtitleq);
|
yading@10
|
2834 packet_queue_put(&is->subtitleq, &flush_pkt);
|
yading@10
|
2835 }
|
yading@10
|
2836 if (is->video_stream >= 0) {
|
yading@10
|
2837 packet_queue_flush(&is->videoq);
|
yading@10
|
2838 packet_queue_put(&is->videoq, &flush_pkt);
|
yading@10
|
2839 }
|
yading@10
|
2840 if (is->seek_flags & AVSEEK_FLAG_BYTE) {
|
yading@10
|
2841 update_external_clock_pts(is, NAN);
|
yading@10
|
2842 } else {
|
yading@10
|
2843 update_external_clock_pts(is, seek_target / (double)AV_TIME_BASE);
|
yading@10
|
2844 }
|
yading@10
|
2845 }
|
yading@10
|
2846 is->seek_req = 0;
|
yading@10
|
2847 is->queue_attachments_req = 1;
|
yading@10
|
2848 eof = 0;
|
yading@10
|
2849 if (is->paused)
|
yading@10
|
2850 step_to_next_frame(is);
|
yading@10
|
2851 }
|
yading@10
|
2852 if (is->queue_attachments_req) {
|
yading@10
|
2853 if (is->video_st && is->video_st->disposition & AV_DISPOSITION_ATTACHED_PIC) {
|
yading@10
|
2854 AVPacket copy;
|
yading@10
|
2855 if ((ret = av_copy_packet(©, &is->video_st->attached_pic)) < 0)
|
yading@10
|
2856 goto fail;
|
yading@10
|
2857 packet_queue_put(&is->videoq, ©);
|
yading@10
|
2858 }
|
yading@10
|
2859 is->queue_attachments_req = 0;
|
yading@10
|
2860 }
|
yading@10
|
2861
|
yading@10
|
2862 /* if the queue are full, no need to read more */
|
yading@10
|
2863 if (infinite_buffer<1 &&
|
yading@10
|
2864 (is->audioq.size + is->videoq.size + is->subtitleq.size > MAX_QUEUE_SIZE
|
yading@10
|
2865 || ( (is->audioq .nb_packets > MIN_FRAMES || is->audio_stream < 0 || is->audioq.abort_request)
|
yading@10
|
2866 && (is->videoq .nb_packets > MIN_FRAMES || is->video_stream < 0 || is->videoq.abort_request
|
yading@10
|
2867 || (is->video_st->disposition & AV_DISPOSITION_ATTACHED_PIC))
|
yading@10
|
2868 && (is->subtitleq.nb_packets > MIN_FRAMES || is->subtitle_stream < 0 || is->subtitleq.abort_request)))) {
|
yading@10
|
2869 /* wait 10 ms */
|
yading@10
|
2870 SDL_LockMutex(wait_mutex);
|
yading@10
|
2871 SDL_CondWaitTimeout(is->continue_read_thread, wait_mutex, 10);
|
yading@10
|
2872 SDL_UnlockMutex(wait_mutex);
|
yading@10
|
2873 continue;
|
yading@10
|
2874 }
|
yading@10
|
2875 if (eof) {
|
yading@10
|
2876 if (is->video_stream >= 0) {
|
yading@10
|
2877 av_init_packet(pkt);
|
yading@10
|
2878 pkt->data = NULL;
|
yading@10
|
2879 pkt->size = 0;
|
yading@10
|
2880 pkt->stream_index = is->video_stream;
|
yading@10
|
2881 packet_queue_put(&is->videoq, pkt);
|
yading@10
|
2882 }
|
yading@10
|
2883 if (is->audio_stream >= 0 &&
|
yading@10
|
2884 is->audio_st->codec->codec->capabilities & CODEC_CAP_DELAY) {
|
yading@10
|
2885 av_init_packet(pkt);
|
yading@10
|
2886 pkt->data = NULL;
|
yading@10
|
2887 pkt->size = 0;
|
yading@10
|
2888 pkt->stream_index = is->audio_stream;
|
yading@10
|
2889 packet_queue_put(&is->audioq, pkt);
|
yading@10
|
2890 }
|
yading@10
|
2891 SDL_Delay(10);
|
yading@10
|
2892 if (is->audioq.size + is->videoq.size + is->subtitleq.size == 0) {
|
yading@10
|
2893 if (loop != 1 && (!loop || --loop)) {
|
yading@10
|
2894 stream_seek(is, start_time != AV_NOPTS_VALUE ? start_time : 0, 0, 0);
|
yading@10
|
2895 } else if (autoexit) {
|
yading@10
|
2896 ret = AVERROR_EOF;
|
yading@10
|
2897 goto fail;
|
yading@10
|
2898 }
|
yading@10
|
2899 }
|
yading@10
|
2900 eof=0;
|
yading@10
|
2901 continue;
|
yading@10
|
2902 }
|
yading@10
|
2903 ret = av_read_frame(ic, pkt);
|
yading@10
|
2904 if (ret < 0) {
|
yading@10
|
2905 if (ret == AVERROR_EOF || url_feof(ic->pb))
|
yading@10
|
2906 eof = 1;
|
yading@10
|
2907 if (ic->pb && ic->pb->error)
|
yading@10
|
2908 break;
|
yading@10
|
2909 SDL_LockMutex(wait_mutex);
|
yading@10
|
2910 SDL_CondWaitTimeout(is->continue_read_thread, wait_mutex, 10);
|
yading@10
|
2911 SDL_UnlockMutex(wait_mutex);
|
yading@10
|
2912 continue;
|
yading@10
|
2913 }
|
yading@10
|
2914 /* check if packet is in play range specified by user, then queue, otherwise discard */
|
yading@10
|
2915 pkt_in_play_range = duration == AV_NOPTS_VALUE ||
|
yading@10
|
2916 (pkt->pts - ic->streams[pkt->stream_index]->start_time) *
|
yading@10
|
2917 av_q2d(ic->streams[pkt->stream_index]->time_base) -
|
yading@10
|
2918 (double)(start_time != AV_NOPTS_VALUE ? start_time : 0) / 1000000
|
yading@10
|
2919 <= ((double)duration / 1000000);
|
yading@10
|
2920 if (pkt->stream_index == is->audio_stream && pkt_in_play_range) {
|
yading@10
|
2921 packet_queue_put(&is->audioq, pkt);
|
yading@10
|
2922 } else if (pkt->stream_index == is->video_stream && pkt_in_play_range
|
yading@10
|
2923 && !(is->video_st->disposition & AV_DISPOSITION_ATTACHED_PIC)) {
|
yading@10
|
2924 packet_queue_put(&is->videoq, pkt);
|
yading@10
|
2925 } else if (pkt->stream_index == is->subtitle_stream && pkt_in_play_range) {
|
yading@10
|
2926 packet_queue_put(&is->subtitleq, pkt);
|
yading@10
|
2927 } else {
|
yading@10
|
2928 av_free_packet(pkt);
|
yading@10
|
2929 }
|
yading@10
|
2930 }
|
yading@10
|
2931 /* wait until the end */
|
yading@10
|
2932 while (!is->abort_request) {
|
yading@10
|
2933 SDL_Delay(100);
|
yading@10
|
2934 }
|
yading@10
|
2935
|
yading@10
|
2936 ret = 0;
|
yading@10
|
2937 fail:
|
yading@10
|
2938 /* close each stream */
|
yading@10
|
2939 if (is->audio_stream >= 0)
|
yading@10
|
2940 stream_component_close(is, is->audio_stream);
|
yading@10
|
2941 if (is->video_stream >= 0)
|
yading@10
|
2942 stream_component_close(is, is->video_stream);
|
yading@10
|
2943 if (is->subtitle_stream >= 0)
|
yading@10
|
2944 stream_component_close(is, is->subtitle_stream);
|
yading@10
|
2945 if (is->ic) {
|
yading@10
|
2946 avformat_close_input(&is->ic);
|
yading@10
|
2947 }
|
yading@10
|
2948
|
yading@10
|
2949 if (ret != 0) {
|
yading@10
|
2950 SDL_Event event;
|
yading@10
|
2951
|
yading@10
|
2952 event.type = FF_QUIT_EVENT;
|
yading@10
|
2953 event.user.data1 = is;
|
yading@10
|
2954 SDL_PushEvent(&event);
|
yading@10
|
2955 }
|
yading@10
|
2956 SDL_DestroyMutex(wait_mutex);
|
yading@10
|
2957 return 0;
|
yading@10
|
2958 }
|
yading@10
|
2959
|
yading@10
|
2960 static VideoState *stream_open(const char *filename, AVInputFormat *iformat)
|
yading@10
|
2961 {
|
yading@10
|
2962 VideoState *is;
|
yading@10
|
2963
|
yading@10
|
2964 is = av_mallocz(sizeof(VideoState));
|
yading@10
|
2965 if (!is)
|
yading@10
|
2966 return NULL;
|
yading@10
|
2967 av_strlcpy(is->filename, filename, sizeof(is->filename));
|
yading@10
|
2968 is->iformat = iformat;
|
yading@10
|
2969 is->ytop = 0;
|
yading@10
|
2970 is->xleft = 0;
|
yading@10
|
2971
|
yading@10
|
2972 /* start video display */
|
yading@10
|
2973 is->pictq_mutex = SDL_CreateMutex();
|
yading@10
|
2974 is->pictq_cond = SDL_CreateCond();
|
yading@10
|
2975
|
yading@10
|
2976 is->subpq_mutex = SDL_CreateMutex();
|
yading@10
|
2977 is->subpq_cond = SDL_CreateCond();
|
yading@10
|
2978
|
yading@10
|
2979 packet_queue_init(&is->videoq);
|
yading@10
|
2980 packet_queue_init(&is->audioq);
|
yading@10
|
2981 packet_queue_init(&is->subtitleq);
|
yading@10
|
2982
|
yading@10
|
2983 is->continue_read_thread = SDL_CreateCond();
|
yading@10
|
2984
|
yading@10
|
2985 update_external_clock_pts(is, NAN);
|
yading@10
|
2986 update_external_clock_speed(is, 1.0);
|
yading@10
|
2987 is->audio_current_pts_drift = -av_gettime() / 1000000.0;
|
yading@10
|
2988 is->video_current_pts_drift = is->audio_current_pts_drift;
|
yading@10
|
2989 is->audio_clock_serial = -1;
|
yading@10
|
2990 is->video_clock_serial = -1;
|
yading@10
|
2991 is->audio_last_serial = -1;
|
yading@10
|
2992 is->av_sync_type = av_sync_type;
|
yading@10
|
2993 is->read_tid = SDL_CreateThread(read_thread, is);
|
yading@10
|
2994 if (!is->read_tid) {
|
yading@10
|
2995 av_free(is);
|
yading@10
|
2996 return NULL;
|
yading@10
|
2997 }
|
yading@10
|
2998 return is;
|
yading@10
|
2999 }
|
yading@10
|
3000
|
yading@10
|
3001 static void stream_cycle_channel(VideoState *is, int codec_type)
|
yading@10
|
3002 {
|
yading@10
|
3003 AVFormatContext *ic = is->ic;
|
yading@10
|
3004 int start_index, stream_index;
|
yading@10
|
3005 int old_index;
|
yading@10
|
3006 AVStream *st;
|
yading@10
|
3007
|
yading@10
|
3008 if (codec_type == AVMEDIA_TYPE_VIDEO) {
|
yading@10
|
3009 start_index = is->last_video_stream;
|
yading@10
|
3010 old_index = is->video_stream;
|
yading@10
|
3011 } else if (codec_type == AVMEDIA_TYPE_AUDIO) {
|
yading@10
|
3012 start_index = is->last_audio_stream;
|
yading@10
|
3013 old_index = is->audio_stream;
|
yading@10
|
3014 } else {
|
yading@10
|
3015 start_index = is->last_subtitle_stream;
|
yading@10
|
3016 old_index = is->subtitle_stream;
|
yading@10
|
3017 }
|
yading@10
|
3018 stream_index = start_index;
|
yading@10
|
3019 for (;;) {
|
yading@10
|
3020 if (++stream_index >= is->ic->nb_streams)
|
yading@10
|
3021 {
|
yading@10
|
3022 if (codec_type == AVMEDIA_TYPE_SUBTITLE)
|
yading@10
|
3023 {
|
yading@10
|
3024 stream_index = -1;
|
yading@10
|
3025 is->last_subtitle_stream = -1;
|
yading@10
|
3026 goto the_end;
|
yading@10
|
3027 }
|
yading@10
|
3028 if (start_index == -1)
|
yading@10
|
3029 return;
|
yading@10
|
3030 stream_index = 0;
|
yading@10
|
3031 }
|
yading@10
|
3032 if (stream_index == start_index)
|
yading@10
|
3033 return;
|
yading@10
|
3034 st = ic->streams[stream_index];
|
yading@10
|
3035 if (st->codec->codec_type == codec_type) {
|
yading@10
|
3036 /* check that parameters are OK */
|
yading@10
|
3037 switch (codec_type) {
|
yading@10
|
3038 case AVMEDIA_TYPE_AUDIO:
|
yading@10
|
3039 if (st->codec->sample_rate != 0 &&
|
yading@10
|
3040 st->codec->channels != 0)
|
yading@10
|
3041 goto the_end;
|
yading@10
|
3042 break;
|
yading@10
|
3043 case AVMEDIA_TYPE_VIDEO:
|
yading@10
|
3044 case AVMEDIA_TYPE_SUBTITLE:
|
yading@10
|
3045 goto the_end;
|
yading@10
|
3046 default:
|
yading@10
|
3047 break;
|
yading@10
|
3048 }
|
yading@10
|
3049 }
|
yading@10
|
3050 }
|
yading@10
|
3051 the_end:
|
yading@10
|
3052 stream_component_close(is, old_index);
|
yading@10
|
3053 stream_component_open(is, stream_index);
|
yading@10
|
3054 }
|
yading@10
|
3055
|
yading@10
|
3056
|
yading@10
|
3057 static void toggle_full_screen(VideoState *is)
|
yading@10
|
3058 {
|
yading@10
|
3059 #if defined(__APPLE__) && SDL_VERSION_ATLEAST(1, 2, 14)
|
yading@10
|
3060 /* OS X needs to reallocate the SDL overlays */
|
yading@10
|
3061 int i;
|
yading@10
|
3062 for (i = 0; i < VIDEO_PICTURE_QUEUE_SIZE; i++)
|
yading@10
|
3063 is->pictq[i].reallocate = 1;
|
yading@10
|
3064 #endif
|
yading@10
|
3065 is_full_screen = !is_full_screen;
|
yading@10
|
3066 video_open(is, 1, NULL);
|
yading@10
|
3067 }
|
yading@10
|
3068
|
yading@10
|
3069 static void toggle_audio_display(VideoState *is)
|
yading@10
|
3070 {
|
yading@10
|
3071 int bgcolor = SDL_MapRGB(screen->format, 0x00, 0x00, 0x00);
|
yading@10
|
3072 int next = is->show_mode;
|
yading@10
|
3073 do {
|
yading@10
|
3074 next = (next + 1) % SHOW_MODE_NB;
|
yading@10
|
3075 } while (next != is->show_mode && (next == SHOW_MODE_VIDEO && !is->video_st || next != SHOW_MODE_VIDEO && !is->audio_st));
|
yading@10
|
3076 if (is->show_mode != next) {
|
yading@10
|
3077 fill_rectangle(screen,
|
yading@10
|
3078 is->xleft, is->ytop, is->width, is->height,
|
yading@10
|
3079 bgcolor, 1);
|
yading@10
|
3080 is->force_refresh = 1;
|
yading@10
|
3081 is->show_mode = next;
|
yading@10
|
3082 }
|
yading@10
|
3083 }
|
yading@10
|
3084
|
yading@10
|
3085 static void refresh_loop_wait_event(VideoState *is, SDL_Event *event) {
|
yading@10
|
3086 double remaining_time = 0.0;
|
yading@10
|
3087 SDL_PumpEvents();
|
yading@10
|
3088 while (!SDL_PeepEvents(event, 1, SDL_GETEVENT, SDL_ALLEVENTS)) {
|
yading@10
|
3089 if (!cursor_hidden && av_gettime() - cursor_last_shown > CURSOR_HIDE_DELAY) {
|
yading@10
|
3090 SDL_ShowCursor(0);
|
yading@10
|
3091 cursor_hidden = 1;
|
yading@10
|
3092 }
|
yading@10
|
3093 if (remaining_time > 0.0)
|
yading@10
|
3094 av_usleep((int64_t)(remaining_time * 1000000.0));
|
yading@10
|
3095 remaining_time = REFRESH_RATE;
|
yading@10
|
3096 if (is->show_mode != SHOW_MODE_NONE && (!is->paused || is->force_refresh))
|
yading@10
|
3097 video_refresh(is, &remaining_time);
|
yading@10
|
3098 SDL_PumpEvents();
|
yading@10
|
3099 }
|
yading@10
|
3100 }
|
yading@10
|
3101
|
yading@10
|
3102 /* handle an event sent by the GUI */
|
yading@10
|
3103 static void event_loop(VideoState *cur_stream)
|
yading@10
|
3104 {
|
yading@10
|
3105 SDL_Event event;
|
yading@10
|
3106 double incr, pos, frac;
|
yading@10
|
3107
|
yading@10
|
3108 for (;;) {
|
yading@10
|
3109 double x;
|
yading@10
|
3110 refresh_loop_wait_event(cur_stream, &event);
|
yading@10
|
3111 switch (event.type) {
|
yading@10
|
3112 case SDL_KEYDOWN:
|
yading@10
|
3113 if (exit_on_keydown) {
|
yading@10
|
3114 do_exit(cur_stream);
|
yading@10
|
3115 break;
|
yading@10
|
3116 }
|
yading@10
|
3117 switch (event.key.keysym.sym) {
|
yading@10
|
3118 case SDLK_ESCAPE:
|
yading@10
|
3119 case SDLK_q:
|
yading@10
|
3120 do_exit(cur_stream);
|
yading@10
|
3121 break;
|
yading@10
|
3122 case SDLK_f:
|
yading@10
|
3123 toggle_full_screen(cur_stream);
|
yading@10
|
3124 cur_stream->force_refresh = 1;
|
yading@10
|
3125 break;
|
yading@10
|
3126 case SDLK_p:
|
yading@10
|
3127 case SDLK_SPACE:
|
yading@10
|
3128 toggle_pause(cur_stream);
|
yading@10
|
3129 break;
|
yading@10
|
3130 case SDLK_s: // S: Step to next frame
|
yading@10
|
3131 step_to_next_frame(cur_stream);
|
yading@10
|
3132 break;
|
yading@10
|
3133 case SDLK_a:
|
yading@10
|
3134 stream_cycle_channel(cur_stream, AVMEDIA_TYPE_AUDIO);
|
yading@10
|
3135 break;
|
yading@10
|
3136 case SDLK_v:
|
yading@10
|
3137 stream_cycle_channel(cur_stream, AVMEDIA_TYPE_VIDEO);
|
yading@10
|
3138 break;
|
yading@10
|
3139 case SDLK_t:
|
yading@10
|
3140 stream_cycle_channel(cur_stream, AVMEDIA_TYPE_SUBTITLE);
|
yading@10
|
3141 break;
|
yading@10
|
3142 case SDLK_w:
|
yading@10
|
3143 toggle_audio_display(cur_stream);
|
yading@10
|
3144 break;
|
yading@10
|
3145 case SDLK_PAGEUP:
|
yading@10
|
3146 incr = 600.0;
|
yading@10
|
3147 goto do_seek;
|
yading@10
|
3148 case SDLK_PAGEDOWN:
|
yading@10
|
3149 incr = -600.0;
|
yading@10
|
3150 goto do_seek;
|
yading@10
|
3151 case SDLK_LEFT:
|
yading@10
|
3152 incr = -10.0;
|
yading@10
|
3153 goto do_seek;
|
yading@10
|
3154 case SDLK_RIGHT:
|
yading@10
|
3155 incr = 10.0;
|
yading@10
|
3156 goto do_seek;
|
yading@10
|
3157 case SDLK_UP:
|
yading@10
|
3158 incr = 60.0;
|
yading@10
|
3159 goto do_seek;
|
yading@10
|
3160 case SDLK_DOWN:
|
yading@10
|
3161 incr = -60.0;
|
yading@10
|
3162 do_seek:
|
yading@10
|
3163 if (seek_by_bytes) {
|
yading@10
|
3164 if (cur_stream->video_stream >= 0 && cur_stream->video_current_pos >= 0) {
|
yading@10
|
3165 pos = cur_stream->video_current_pos;
|
yading@10
|
3166 } else if (cur_stream->audio_stream >= 0 && cur_stream->audio_pkt.pos >= 0) {
|
yading@10
|
3167 pos = cur_stream->audio_pkt.pos;
|
yading@10
|
3168 } else
|
yading@10
|
3169 pos = avio_tell(cur_stream->ic->pb);
|
yading@10
|
3170 if (cur_stream->ic->bit_rate)
|
yading@10
|
3171 incr *= cur_stream->ic->bit_rate / 8.0;
|
yading@10
|
3172 else
|
yading@10
|
3173 incr *= 180000.0;
|
yading@10
|
3174 pos += incr;
|
yading@10
|
3175 stream_seek(cur_stream, pos, incr, 1);
|
yading@10
|
3176 } else {
|
yading@10
|
3177 pos = get_master_clock(cur_stream);
|
yading@10
|
3178 if (isnan(pos))
|
yading@10
|
3179 pos = (double)cur_stream->seek_pos / AV_TIME_BASE;
|
yading@10
|
3180 pos += incr;
|
yading@10
|
3181 if (cur_stream->ic->start_time != AV_NOPTS_VALUE && pos < cur_stream->ic->start_time / (double)AV_TIME_BASE)
|
yading@10
|
3182 pos = cur_stream->ic->start_time / (double)AV_TIME_BASE;
|
yading@10
|
3183 stream_seek(cur_stream, (int64_t)(pos * AV_TIME_BASE), (int64_t)(incr * AV_TIME_BASE), 0);
|
yading@10
|
3184 }
|
yading@10
|
3185 break;
|
yading@10
|
3186 default:
|
yading@10
|
3187 break;
|
yading@10
|
3188 }
|
yading@10
|
3189 break;
|
yading@10
|
3190 case SDL_VIDEOEXPOSE:
|
yading@10
|
3191 cur_stream->force_refresh = 1;
|
yading@10
|
3192 break;
|
yading@10
|
3193 case SDL_MOUSEBUTTONDOWN:
|
yading@10
|
3194 if (exit_on_mousedown) {
|
yading@10
|
3195 do_exit(cur_stream);
|
yading@10
|
3196 break;
|
yading@10
|
3197 }
|
yading@10
|
3198 case SDL_MOUSEMOTION:
|
yading@10
|
3199 if (cursor_hidden) {
|
yading@10
|
3200 SDL_ShowCursor(1);
|
yading@10
|
3201 cursor_hidden = 0;
|
yading@10
|
3202 }
|
yading@10
|
3203 cursor_last_shown = av_gettime();
|
yading@10
|
3204 if (event.type == SDL_MOUSEBUTTONDOWN) {
|
yading@10
|
3205 x = event.button.x;
|
yading@10
|
3206 } else {
|
yading@10
|
3207 if (event.motion.state != SDL_PRESSED)
|
yading@10
|
3208 break;
|
yading@10
|
3209 x = event.motion.x;
|
yading@10
|
3210 }
|
yading@10
|
3211 if (seek_by_bytes || cur_stream->ic->duration <= 0) {
|
yading@10
|
3212 uint64_t size = avio_size(cur_stream->ic->pb);
|
yading@10
|
3213 stream_seek(cur_stream, size*x/cur_stream->width, 0, 1);
|
yading@10
|
3214 } else {
|
yading@10
|
3215 int64_t ts;
|
yading@10
|
3216 int ns, hh, mm, ss;
|
yading@10
|
3217 int tns, thh, tmm, tss;
|
yading@10
|
3218 tns = cur_stream->ic->duration / 1000000LL;
|
yading@10
|
3219 thh = tns / 3600;
|
yading@10
|
3220 tmm = (tns % 3600) / 60;
|
yading@10
|
3221 tss = (tns % 60);
|
yading@10
|
3222 frac = x / cur_stream->width;
|
yading@10
|
3223 ns = frac * tns;
|
yading@10
|
3224 hh = ns / 3600;
|
yading@10
|
3225 mm = (ns % 3600) / 60;
|
yading@10
|
3226 ss = (ns % 60);
|
yading@10
|
3227 fprintf(stderr, "Seek to %2.0f%% (%2d:%02d:%02d) of total duration (%2d:%02d:%02d) \n", frac*100,
|
yading@10
|
3228 hh, mm, ss, thh, tmm, tss);
|
yading@10
|
3229 ts = frac * cur_stream->ic->duration;
|
yading@10
|
3230 if (cur_stream->ic->start_time != AV_NOPTS_VALUE)
|
yading@10
|
3231 ts += cur_stream->ic->start_time;
|
yading@10
|
3232 stream_seek(cur_stream, ts, 0, 0);
|
yading@10
|
3233 }
|
yading@10
|
3234 break;
|
yading@10
|
3235 case SDL_VIDEORESIZE:
|
yading@10
|
3236 screen = SDL_SetVideoMode(event.resize.w, event.resize.h, 0,
|
yading@10
|
3237 SDL_HWSURFACE|SDL_RESIZABLE|SDL_ASYNCBLIT|SDL_HWACCEL);
|
yading@10
|
3238 screen_width = cur_stream->width = event.resize.w;
|
yading@10
|
3239 screen_height = cur_stream->height = event.resize.h;
|
yading@10
|
3240 cur_stream->force_refresh = 1;
|
yading@10
|
3241 break;
|
yading@10
|
3242 case SDL_QUIT:
|
yading@10
|
3243 case FF_QUIT_EVENT:
|
yading@10
|
3244 do_exit(cur_stream);
|
yading@10
|
3245 break;
|
yading@10
|
3246 case FF_ALLOC_EVENT:
|
yading@10
|
3247 alloc_picture(event.user.data1);
|
yading@10
|
3248 break;
|
yading@10
|
3249 default:
|
yading@10
|
3250 break;
|
yading@10
|
3251 }
|
yading@10
|
3252 }
|
yading@10
|
3253 }
|
yading@10
|
3254
|
yading@10
|
3255 static int opt_frame_size(void *optctx, const char *opt, const char *arg)
|
yading@10
|
3256 {
|
yading@10
|
3257 av_log(NULL, AV_LOG_WARNING, "Option -s is deprecated, use -video_size.\n");
|
yading@10
|
3258 return opt_default(NULL, "video_size", arg);
|
yading@10
|
3259 }
|
yading@10
|
3260
|
yading@10
|
3261 static int opt_width(void *optctx, const char *opt, const char *arg)
|
yading@10
|
3262 {
|
yading@10
|
3263 screen_width = parse_number_or_die(opt, arg, OPT_INT64, 1, INT_MAX);
|
yading@10
|
3264 return 0;
|
yading@10
|
3265 }
|
yading@10
|
3266
|
yading@10
|
3267 static int opt_height(void *optctx, const char *opt, const char *arg)
|
yading@10
|
3268 {
|
yading@10
|
3269 screen_height = parse_number_or_die(opt, arg, OPT_INT64, 1, INT_MAX);
|
yading@10
|
3270 return 0;
|
yading@10
|
3271 }
|
yading@10
|
3272
|
yading@10
|
3273 static int opt_format(void *optctx, const char *opt, const char *arg)
|
yading@10
|
3274 {
|
yading@10
|
3275 file_iformat = av_find_input_format(arg);
|
yading@10
|
3276 if (!file_iformat) {
|
yading@10
|
3277 fprintf(stderr, "Unknown input format: %s\n", arg);
|
yading@10
|
3278 return AVERROR(EINVAL);
|
yading@10
|
3279 }
|
yading@10
|
3280 return 0;
|
yading@10
|
3281 }
|
yading@10
|
3282
|
yading@10
|
3283 static int opt_frame_pix_fmt(void *optctx, const char *opt, const char *arg)
|
yading@10
|
3284 {
|
yading@10
|
3285 av_log(NULL, AV_LOG_WARNING, "Option -pix_fmt is deprecated, use -pixel_format.\n");
|
yading@10
|
3286 return opt_default(NULL, "pixel_format", arg);
|
yading@10
|
3287 }
|
yading@10
|
3288
|
yading@10
|
3289 static int opt_sync(void *optctx, const char *opt, const char *arg)
|
yading@10
|
3290 {
|
yading@10
|
3291 if (!strcmp(arg, "audio"))
|
yading@10
|
3292 av_sync_type = AV_SYNC_AUDIO_MASTER;
|
yading@10
|
3293 else if (!strcmp(arg, "video"))
|
yading@10
|
3294 av_sync_type = AV_SYNC_VIDEO_MASTER;
|
yading@10
|
3295 else if (!strcmp(arg, "ext"))
|
yading@10
|
3296 av_sync_type = AV_SYNC_EXTERNAL_CLOCK;
|
yading@10
|
3297 else {
|
yading@10
|
3298 fprintf(stderr, "Unknown value for %s: %s\n", opt, arg);
|
yading@10
|
3299 exit(1);
|
yading@10
|
3300 }
|
yading@10
|
3301 return 0;
|
yading@10
|
3302 }
|
yading@10
|
3303
|
yading@10
|
3304 static int opt_seek(void *optctx, const char *opt, const char *arg)
|
yading@10
|
3305 {
|
yading@10
|
3306 start_time = parse_time_or_die(opt, arg, 1);
|
yading@10
|
3307 return 0;
|
yading@10
|
3308 }
|
yading@10
|
3309
|
yading@10
|
3310 static int opt_duration(void *optctx, const char *opt, const char *arg)
|
yading@10
|
3311 {
|
yading@10
|
3312 duration = parse_time_or_die(opt, arg, 1);
|
yading@10
|
3313 return 0;
|
yading@10
|
3314 }
|
yading@10
|
3315
|
yading@10
|
3316 static int opt_show_mode(void *optctx, const char *opt, const char *arg)
|
yading@10
|
3317 {
|
yading@10
|
3318 show_mode = !strcmp(arg, "video") ? SHOW_MODE_VIDEO :
|
yading@10
|
3319 !strcmp(arg, "waves") ? SHOW_MODE_WAVES :
|
yading@10
|
3320 !strcmp(arg, "rdft" ) ? SHOW_MODE_RDFT :
|
yading@10
|
3321 parse_number_or_die(opt, arg, OPT_INT, 0, SHOW_MODE_NB-1);
|
yading@10
|
3322 return 0;
|
yading@10
|
3323 }
|
yading@10
|
3324
|
yading@10
|
3325 static void opt_input_file(void *optctx, const char *filename)
|
yading@10
|
3326 {
|
yading@10
|
3327 if (input_filename) {
|
yading@10
|
3328 fprintf(stderr, "Argument '%s' provided as input filename, but '%s' was already specified.\n",
|
yading@10
|
3329 filename, input_filename);
|
yading@10
|
3330 exit(1);
|
yading@10
|
3331 }
|
yading@10
|
3332 if (!strcmp(filename, "-"))
|
yading@10
|
3333 filename = "pipe:";
|
yading@10
|
3334 input_filename = filename;
|
yading@10
|
3335 }
|
yading@10
|
3336
|
yading@10
|
3337 static int opt_codec(void *optctx, const char *opt, const char *arg)
|
yading@10
|
3338 {
|
yading@10
|
3339 const char *spec = strchr(opt, ':');
|
yading@10
|
3340 if (!spec) {
|
yading@10
|
3341 fprintf(stderr, "No media specifier was specified in '%s' in option '%s'\n",
|
yading@10
|
3342 arg, opt);
|
yading@10
|
3343 return AVERROR(EINVAL);
|
yading@10
|
3344 }
|
yading@10
|
3345 spec++;
|
yading@10
|
3346 switch (spec[0]) {
|
yading@10
|
3347 case 'a' : audio_codec_name = arg; break;
|
yading@10
|
3348 case 's' : subtitle_codec_name = arg; break;
|
yading@10
|
3349 case 'v' : video_codec_name = arg; break;
|
yading@10
|
3350 default:
|
yading@10
|
3351 fprintf(stderr, "Invalid media specifier '%s' in option '%s'\n", spec, opt);
|
yading@10
|
3352 return AVERROR(EINVAL);
|
yading@10
|
3353 }
|
yading@10
|
3354 return 0;
|
yading@10
|
3355 }
|
yading@10
|
3356
|
yading@10
|
3357 static int dummy;
|
yading@10
|
3358
|
yading@10
|
3359 static const OptionDef options[] = {
|
yading@10
|
3360 #include "cmdutils_common_opts.h"
|
yading@10
|
3361 { "x", HAS_ARG, { .func_arg = opt_width }, "force displayed width", "width" },
|
yading@10
|
3362 { "y", HAS_ARG, { .func_arg = opt_height }, "force displayed height", "height" },
|
yading@10
|
3363 { "s", HAS_ARG | OPT_VIDEO, { .func_arg = opt_frame_size }, "set frame size (WxH or abbreviation)", "size" },
|
yading@10
|
3364 { "fs", OPT_BOOL, { &is_full_screen }, "force full screen" },
|
yading@10
|
3365 { "an", OPT_BOOL, { &audio_disable }, "disable audio" },
|
yading@10
|
3366 { "vn", OPT_BOOL, { &video_disable }, "disable video" },
|
yading@10
|
3367 { "sn", OPT_BOOL, { &subtitle_disable }, "disable subtitling" },
|
yading@10
|
3368 { "ast", OPT_INT | HAS_ARG | OPT_EXPERT, { &wanted_stream[AVMEDIA_TYPE_AUDIO] }, "select desired audio stream", "stream_number" },
|
yading@10
|
3369 { "vst", OPT_INT | HAS_ARG | OPT_EXPERT, { &wanted_stream[AVMEDIA_TYPE_VIDEO] }, "select desired video stream", "stream_number" },
|
yading@10
|
3370 { "sst", OPT_INT | HAS_ARG | OPT_EXPERT, { &wanted_stream[AVMEDIA_TYPE_SUBTITLE] }, "select desired subtitle stream", "stream_number" },
|
yading@10
|
3371 { "ss", HAS_ARG, { .func_arg = opt_seek }, "seek to a given position in seconds", "pos" },
|
yading@10
|
3372 { "t", HAS_ARG, { .func_arg = opt_duration }, "play \"duration\" seconds of audio/video", "duration" },
|
yading@10
|
3373 { "bytes", OPT_INT | HAS_ARG, { &seek_by_bytes }, "seek by bytes 0=off 1=on -1=auto", "val" },
|
yading@10
|
3374 { "nodisp", OPT_BOOL, { &display_disable }, "disable graphical display" },
|
yading@10
|
3375 { "f", HAS_ARG, { .func_arg = opt_format }, "force format", "fmt" },
|
yading@10
|
3376 { "pix_fmt", HAS_ARG | OPT_EXPERT | OPT_VIDEO, { .func_arg = opt_frame_pix_fmt }, "set pixel format", "format" },
|
yading@10
|
3377 { "stats", OPT_BOOL | OPT_EXPERT, { &show_status }, "show status", "" },
|
yading@10
|
3378 { "bug", OPT_INT | HAS_ARG | OPT_EXPERT, { &workaround_bugs }, "workaround bugs", "" },
|
yading@10
|
3379 { "fast", OPT_BOOL | OPT_EXPERT, { &fast }, "non spec compliant optimizations", "" },
|
yading@10
|
3380 { "genpts", OPT_BOOL | OPT_EXPERT, { &genpts }, "generate pts", "" },
|
yading@10
|
3381 { "drp", OPT_INT | HAS_ARG | OPT_EXPERT, { &decoder_reorder_pts }, "let decoder reorder pts 0=off 1=on -1=auto", ""},
|
yading@10
|
3382 { "lowres", OPT_INT | HAS_ARG | OPT_EXPERT, { &lowres }, "", "" },
|
yading@10
|
3383 { "idct", OPT_INT | HAS_ARG | OPT_EXPERT, { &idct }, "set idct algo", "algo" },
|
yading@10
|
3384 { "ec", OPT_INT | HAS_ARG | OPT_EXPERT, { &error_concealment }, "set error concealment options", "bit_mask" },
|
yading@10
|
3385 { "sync", HAS_ARG | OPT_EXPERT, { .func_arg = opt_sync }, "set audio-video sync. type (type=audio/video/ext)", "type" },
|
yading@10
|
3386 { "autoexit", OPT_BOOL | OPT_EXPERT, { &autoexit }, "exit at the end", "" },
|
yading@10
|
3387 { "exitonkeydown", OPT_BOOL | OPT_EXPERT, { &exit_on_keydown }, "exit on key down", "" },
|
yading@10
|
3388 { "exitonmousedown", OPT_BOOL | OPT_EXPERT, { &exit_on_mousedown }, "exit on mouse down", "" },
|
yading@10
|
3389 { "loop", OPT_INT | HAS_ARG | OPT_EXPERT, { &loop }, "set number of times the playback shall be looped", "loop count" },
|
yading@10
|
3390 { "framedrop", OPT_BOOL | OPT_EXPERT, { &framedrop }, "drop frames when cpu is too slow", "" },
|
yading@10
|
3391 { "infbuf", OPT_BOOL | OPT_EXPERT, { &infinite_buffer }, "don't limit the input buffer size (useful with realtime streams)", "" },
|
yading@10
|
3392 { "window_title", OPT_STRING | HAS_ARG, { &window_title }, "set window title", "window title" },
|
yading@10
|
3393 #if CONFIG_AVFILTER
|
yading@10
|
3394 { "vf", OPT_STRING | HAS_ARG, { &vfilters }, "set video filters", "filter_graph" },
|
yading@10
|
3395 { "af", OPT_STRING | HAS_ARG, { &afilters }, "set audio filters", "filter_graph" },
|
yading@10
|
3396 #endif
|
yading@10
|
3397 { "rdftspeed", OPT_INT | HAS_ARG| OPT_AUDIO | OPT_EXPERT, { &rdftspeed }, "rdft speed", "msecs" },
|
yading@10
|
3398 { "showmode", HAS_ARG, { .func_arg = opt_show_mode}, "select show mode (0 = video, 1 = waves, 2 = RDFT)", "mode" },
|
yading@10
|
3399 { "default", HAS_ARG | OPT_AUDIO | OPT_VIDEO | OPT_EXPERT, { .func_arg = opt_default }, "generic catch all option", "" },
|
yading@10
|
3400 { "i", OPT_BOOL, { &dummy}, "read specified file", "input_file"},
|
yading@10
|
3401 { "codec", HAS_ARG, { .func_arg = opt_codec}, "force decoder", "decoder_name" },
|
yading@10
|
3402 { "acodec", HAS_ARG | OPT_STRING | OPT_EXPERT, { &audio_codec_name }, "force audio decoder", "decoder_name" },
|
yading@10
|
3403 { "scodec", HAS_ARG | OPT_STRING | OPT_EXPERT, { &subtitle_codec_name }, "force subtitle decoder", "decoder_name" },
|
yading@10
|
3404 { "vcodec", HAS_ARG | OPT_STRING | OPT_EXPERT, { &video_codec_name }, "force video decoder", "decoder_name" },
|
yading@10
|
3405 { NULL, },
|
yading@10
|
3406 };
|
yading@10
|
3407
|
yading@10
|
3408 static void show_usage(void)
|
yading@10
|
3409 {
|
yading@10
|
3410 av_log(NULL, AV_LOG_INFO, "Simple media player\n");
|
yading@10
|
3411 av_log(NULL, AV_LOG_INFO, "usage: %s [options] input_file\n", program_name);
|
yading@10
|
3412 av_log(NULL, AV_LOG_INFO, "\n");
|
yading@10
|
3413 }
|
yading@10
|
3414
|
yading@10
|
3415 void show_help_default(const char *opt, const char *arg)
|
yading@10
|
3416 {
|
yading@10
|
3417 av_log_set_callback(log_callback_help);
|
yading@10
|
3418 show_usage();
|
yading@10
|
3419 show_help_options(options, "Main options:", 0, OPT_EXPERT, 0);
|
yading@10
|
3420 show_help_options(options, "Advanced options:", OPT_EXPERT, 0, 0);
|
yading@10
|
3421 printf("\n");
|
yading@10
|
3422 show_help_children(avcodec_get_class(), AV_OPT_FLAG_DECODING_PARAM);
|
yading@10
|
3423 show_help_children(avformat_get_class(), AV_OPT_FLAG_DECODING_PARAM);
|
yading@10
|
3424 #if !CONFIG_AVFILTER
|
yading@10
|
3425 show_help_children(sws_get_class(), AV_OPT_FLAG_ENCODING_PARAM);
|
yading@10
|
3426 #else
|
yading@10
|
3427 show_help_children(avfilter_get_class(), AV_OPT_FLAG_FILTERING_PARAM);
|
yading@10
|
3428 #endif
|
yading@10
|
3429 printf("\nWhile playing:\n"
|
yading@10
|
3430 "q, ESC quit\n"
|
yading@10
|
3431 "f toggle full screen\n"
|
yading@10
|
3432 "p, SPC pause\n"
|
yading@10
|
3433 "a cycle audio channel\n"
|
yading@10
|
3434 "v cycle video channel\n"
|
yading@10
|
3435 "t cycle subtitle channel\n"
|
yading@10
|
3436 "w show audio waves\n"
|
yading@10
|
3437 "s activate frame-step mode\n"
|
yading@10
|
3438 "left/right seek backward/forward 10 seconds\n"
|
yading@10
|
3439 "down/up seek backward/forward 1 minute\n"
|
yading@10
|
3440 "page down/page up seek backward/forward 10 minutes\n"
|
yading@10
|
3441 "mouse click seek to percentage in file corresponding to fraction of width\n"
|
yading@10
|
3442 );
|
yading@10
|
3443 }
|
yading@10
|
3444
|
yading@10
|
3445 static int lockmgr(void **mtx, enum AVLockOp op)
|
yading@10
|
3446 {
|
yading@10
|
3447 switch(op) {
|
yading@10
|
3448 case AV_LOCK_CREATE:
|
yading@10
|
3449 *mtx = SDL_CreateMutex();
|
yading@10
|
3450 if(!*mtx)
|
yading@10
|
3451 return 1;
|
yading@10
|
3452 return 0;
|
yading@10
|
3453 case AV_LOCK_OBTAIN:
|
yading@10
|
3454 return !!SDL_LockMutex(*mtx);
|
yading@10
|
3455 case AV_LOCK_RELEASE:
|
yading@10
|
3456 return !!SDL_UnlockMutex(*mtx);
|
yading@10
|
3457 case AV_LOCK_DESTROY:
|
yading@10
|
3458 SDL_DestroyMutex(*mtx);
|
yading@10
|
3459 return 0;
|
yading@10
|
3460 }
|
yading@10
|
3461 return 1;
|
yading@10
|
3462 }
|
yading@10
|
3463
|
yading@10
|
3464 /* Called from the main */
|
yading@10
|
3465 int main(int argc, char **argv)
|
yading@10
|
3466 {
|
yading@10
|
3467 int flags;
|
yading@10
|
3468 VideoState *is;
|
yading@10
|
3469 char dummy_videodriver[] = "SDL_VIDEODRIVER=dummy";
|
yading@10
|
3470
|
yading@10
|
3471 av_log_set_flags(AV_LOG_SKIP_REPEATED);
|
yading@10
|
3472 parse_loglevel(argc, argv, options);
|
yading@10
|
3473
|
yading@10
|
3474 /* register all codecs, demux and protocols */
|
yading@10
|
3475 avcodec_register_all();
|
yading@10
|
3476 #if CONFIG_AVDEVICE
|
yading@10
|
3477 avdevice_register_all();
|
yading@10
|
3478 #endif
|
yading@10
|
3479 #if CONFIG_AVFILTER
|
yading@10
|
3480 avfilter_register_all();
|
yading@10
|
3481 #endif
|
yading@10
|
3482 av_register_all();
|
yading@10
|
3483 avformat_network_init();
|
yading@10
|
3484
|
yading@10
|
3485 init_opts();
|
yading@10
|
3486
|
yading@10
|
3487 signal(SIGINT , sigterm_handler); /* Interrupt (ANSI). */
|
yading@10
|
3488 signal(SIGTERM, sigterm_handler); /* Termination (ANSI). */
|
yading@10
|
3489
|
yading@10
|
3490 show_banner(argc, argv, options);
|
yading@10
|
3491
|
yading@10
|
3492 parse_options(NULL, argc, argv, options, opt_input_file);
|
yading@10
|
3493
|
yading@10
|
3494 if (!input_filename) {
|
yading@10
|
3495 show_usage();
|
yading@10
|
3496 fprintf(stderr, "An input file must be specified\n");
|
yading@10
|
3497 fprintf(stderr, "Use -h to get full help or, even better, run 'man %s'\n", program_name);
|
yading@10
|
3498 exit(1);
|
yading@10
|
3499 }
|
yading@10
|
3500
|
yading@10
|
3501 if (display_disable) {
|
yading@10
|
3502 video_disable = 1;
|
yading@10
|
3503 }
|
yading@10
|
3504 flags = SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER;
|
yading@10
|
3505 if (audio_disable)
|
yading@10
|
3506 flags &= ~SDL_INIT_AUDIO;
|
yading@10
|
3507 if (display_disable)
|
yading@10
|
3508 SDL_putenv(dummy_videodriver); /* For the event queue, we always need a video driver. */
|
yading@10
|
3509 #if !defined(__MINGW32__) && !defined(__APPLE__)
|
yading@10
|
3510 flags |= SDL_INIT_EVENTTHREAD; /* Not supported on Windows or Mac OS X */
|
yading@10
|
3511 #endif
|
yading@10
|
3512 if (SDL_Init (flags)) {
|
yading@10
|
3513 fprintf(stderr, "Could not initialize SDL - %s\n", SDL_GetError());
|
yading@10
|
3514 fprintf(stderr, "(Did you set the DISPLAY variable?)\n");
|
yading@10
|
3515 exit(1);
|
yading@10
|
3516 }
|
yading@10
|
3517
|
yading@10
|
3518 if (!display_disable) {
|
yading@10
|
3519 const SDL_VideoInfo *vi = SDL_GetVideoInfo();
|
yading@10
|
3520 fs_screen_width = vi->current_w;
|
yading@10
|
3521 fs_screen_height = vi->current_h;
|
yading@10
|
3522 }
|
yading@10
|
3523
|
yading@10
|
3524 SDL_EventState(SDL_ACTIVEEVENT, SDL_IGNORE);
|
yading@10
|
3525 SDL_EventState(SDL_SYSWMEVENT, SDL_IGNORE);
|
yading@10
|
3526 SDL_EventState(SDL_USEREVENT, SDL_IGNORE);
|
yading@10
|
3527
|
yading@10
|
3528 if (av_lockmgr_register(lockmgr)) {
|
yading@10
|
3529 fprintf(stderr, "Could not initialize lock manager!\n");
|
yading@10
|
3530 do_exit(NULL);
|
yading@10
|
3531 }
|
yading@10
|
3532
|
yading@10
|
3533 av_init_packet(&flush_pkt);
|
yading@10
|
3534 flush_pkt.data = (char *)(intptr_t)"FLUSH";
|
yading@10
|
3535
|
yading@10
|
3536 is = stream_open(input_filename, file_iformat);
|
yading@10
|
3537 if (!is) {
|
yading@10
|
3538 fprintf(stderr, "Failed to initialize VideoState!\n");
|
yading@10
|
3539 do_exit(NULL);
|
yading@10
|
3540 }
|
yading@10
|
3541
|
yading@10
|
3542 event_loop(is);
|
yading@10
|
3543
|
yading@10
|
3544 /* never returns */
|
yading@10
|
3545
|
yading@10
|
3546 return 0;
|
yading@10
|
3547 }
|