yading@10
|
1 /*
|
yading@10
|
2 * Copyright (c) 2000-2003 Fabrice Bellard
|
yading@10
|
3 *
|
yading@10
|
4 * This file is part of FFmpeg.
|
yading@10
|
5 *
|
yading@10
|
6 * FFmpeg is free software; you can redistribute it and/or
|
yading@10
|
7 * modify it under the terms of the GNU Lesser General Public
|
yading@10
|
8 * License as published by the Free Software Foundation; either
|
yading@10
|
9 * version 2.1 of the License, or (at your option) any later version.
|
yading@10
|
10 *
|
yading@10
|
11 * FFmpeg is distributed in the hope that it will be useful,
|
yading@10
|
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
|
yading@10
|
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
yading@10
|
14 * Lesser General Public License for more details.
|
yading@10
|
15 *
|
yading@10
|
16 * You should have received a copy of the GNU Lesser General Public
|
yading@10
|
17 * License along with FFmpeg; if not, write to the Free Software
|
yading@10
|
18 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
yading@10
|
19 */
|
yading@10
|
20
|
yading@10
|
21 /**
|
yading@10
|
22 * @file
|
yading@10
|
23 * multimedia converter based on the FFmpeg libraries
|
yading@10
|
24 */
|
yading@10
|
25
|
yading@10
|
26 #include "config.h"
|
yading@10
|
27 #include <ctype.h>
|
yading@10
|
28 #include <string.h>
|
yading@10
|
29 #include <math.h>
|
yading@10
|
30 #include <stdlib.h>
|
yading@10
|
31 #include <errno.h>
|
yading@10
|
32 #include <limits.h>
|
yading@10
|
33 #if HAVE_ISATTY
|
yading@10
|
34 #if HAVE_IO_H
|
yading@10
|
35 #include <io.h>
|
yading@10
|
36 #endif
|
yading@10
|
37 #if HAVE_UNISTD_H
|
yading@10
|
38 #include <unistd.h>
|
yading@10
|
39 #endif
|
yading@10
|
40 #endif
|
yading@10
|
41 #include "libavformat/avformat.h"
|
yading@10
|
42 #include "libavdevice/avdevice.h"
|
yading@10
|
43 #include "libswscale/swscale.h"
|
yading@10
|
44 #include "libswresample/swresample.h"
|
yading@10
|
45 #include "libavutil/opt.h"
|
yading@10
|
46 #include "libavutil/channel_layout.h"
|
yading@10
|
47 #include "libavutil/parseutils.h"
|
yading@10
|
48 #include "libavutil/samplefmt.h"
|
yading@10
|
49 #include "libavutil/colorspace.h"
|
yading@10
|
50 #include "libavutil/fifo.h"
|
yading@10
|
51 #include "libavutil/intreadwrite.h"
|
yading@10
|
52 #include "libavutil/dict.h"
|
yading@10
|
53 #include "libavutil/mathematics.h"
|
yading@10
|
54 #include "libavutil/pixdesc.h"
|
yading@10
|
55 #include "libavutil/avstring.h"
|
yading@10
|
56 #include "libavutil/libm.h"
|
yading@10
|
57 #include "libavutil/imgutils.h"
|
yading@10
|
58 #include "libavutil/timestamp.h"
|
yading@10
|
59 #include "libavutil/bprint.h"
|
yading@10
|
60 #include "libavutil/time.h"
|
yading@10
|
61 #include "libavformat/os_support.h"
|
yading@10
|
62
|
yading@10
|
63 #include "libavformat/ffm.h" // not public API
|
yading@10
|
64
|
yading@10
|
65 # include "libavfilter/avcodec.h"
|
yading@10
|
66 # include "libavfilter/avfilter.h"
|
yading@10
|
67 # include "libavfilter/buffersrc.h"
|
yading@10
|
68 # include "libavfilter/buffersink.h"
|
yading@10
|
69
|
yading@10
|
70 #if HAVE_SYS_RESOURCE_H
|
yading@10
|
71 #include <sys/time.h>
|
yading@10
|
72 #include <sys/types.h>
|
yading@10
|
73 #include <sys/resource.h>
|
yading@10
|
74 #elif HAVE_GETPROCESSTIMES
|
yading@10
|
75 #include <windows.h>
|
yading@10
|
76 #endif
|
yading@10
|
77 #if HAVE_GETPROCESSMEMORYINFO
|
yading@10
|
78 #include <windows.h>
|
yading@10
|
79 #include <psapi.h>
|
yading@10
|
80 #endif
|
yading@10
|
81
|
yading@10
|
82 #if HAVE_SYS_SELECT_H
|
yading@10
|
83 #include <sys/select.h>
|
yading@10
|
84 #endif
|
yading@10
|
85
|
yading@10
|
86 #if HAVE_TERMIOS_H
|
yading@10
|
87 #include <fcntl.h>
|
yading@10
|
88 #include <sys/ioctl.h>
|
yading@10
|
89 #include <sys/time.h>
|
yading@10
|
90 #include <termios.h>
|
yading@10
|
91 #elif HAVE_KBHIT
|
yading@10
|
92 #include <conio.h>
|
yading@10
|
93 #endif
|
yading@10
|
94
|
yading@10
|
95 #if HAVE_PTHREADS
|
yading@10
|
96 #include <pthread.h>
|
yading@10
|
97 #endif
|
yading@10
|
98
|
yading@10
|
99 #include <time.h>
|
yading@10
|
100
|
yading@10
|
101 #include "ffmpeg.h"
|
yading@10
|
102 #include "cmdutils.h"
|
yading@10
|
103
|
yading@10
|
104 #include "libavutil/avassert.h"
|
yading@10
|
105
|
yading@10
|
106 const char program_name[] = "ffmpeg";
|
yading@10
|
107 const int program_birth_year = 2000;
|
yading@10
|
108
|
yading@10
|
109 static FILE *vstats_file;
|
yading@10
|
110
|
yading@10
|
111 const char *const forced_keyframes_const_names[] = {
|
yading@10
|
112 "n",
|
yading@10
|
113 "n_forced",
|
yading@10
|
114 "prev_forced_n",
|
yading@10
|
115 "prev_forced_t",
|
yading@10
|
116 "t",
|
yading@10
|
117 NULL
|
yading@10
|
118 };
|
yading@10
|
119
|
yading@10
|
120 static void do_video_stats(OutputStream *ost, int frame_size);
|
yading@10
|
121 static int64_t getutime(void);
|
yading@10
|
122 static int64_t getmaxrss(void);
|
yading@10
|
123
|
yading@10
|
124 static int run_as_daemon = 0;
|
yading@10
|
125 static int64_t video_size = 0;
|
yading@10
|
126 static int64_t audio_size = 0;
|
yading@10
|
127 static int64_t subtitle_size = 0;
|
yading@10
|
128 static int64_t extra_size = 0;
|
yading@10
|
129 static int nb_frames_dup = 0;
|
yading@10
|
130 static int nb_frames_drop = 0;
|
yading@10
|
131 static int64_t decode_error_stat[2];
|
yading@10
|
132
|
yading@10
|
133 static int current_time;
|
yading@10
|
134 AVIOContext *progress_avio = NULL;
|
yading@10
|
135
|
yading@10
|
136 static uint8_t *subtitle_out;
|
yading@10
|
137
|
yading@10
|
138 #if HAVE_PTHREADS
|
yading@10
|
139 /* signal to input threads that they should exit; set by the main thread */
|
yading@10
|
140 static int transcoding_finished;
|
yading@10
|
141 #endif
|
yading@10
|
142
|
yading@10
|
143 #define DEFAULT_PASS_LOGFILENAME_PREFIX "ffmpeg2pass"
|
yading@10
|
144
|
yading@10
|
145 InputStream **input_streams = NULL;
|
yading@10
|
146 int nb_input_streams = 0;
|
yading@10
|
147 InputFile **input_files = NULL;
|
yading@10
|
148 int nb_input_files = 0;
|
yading@10
|
149
|
yading@10
|
150 OutputStream **output_streams = NULL;
|
yading@10
|
151 int nb_output_streams = 0;
|
yading@10
|
152 OutputFile **output_files = NULL;
|
yading@10
|
153 int nb_output_files = 0;
|
yading@10
|
154
|
yading@10
|
155 FilterGraph **filtergraphs;
|
yading@10
|
156 int nb_filtergraphs;
|
yading@10
|
157
|
yading@10
|
158 #if HAVE_TERMIOS_H
|
yading@10
|
159
|
yading@10
|
160 /* init terminal so that we can grab keys */
|
yading@10
|
161 static struct termios oldtty;
|
yading@10
|
162 static int restore_tty;
|
yading@10
|
163 #endif
|
yading@10
|
164
|
yading@10
|
165
|
yading@10
|
166 /* sub2video hack:
|
yading@10
|
167 Convert subtitles to video with alpha to insert them in filter graphs.
|
yading@10
|
168 This is a temporary solution until libavfilter gets real subtitles support.
|
yading@10
|
169 */
|
yading@10
|
170
|
yading@10
|
171 static int sub2video_get_blank_frame(InputStream *ist)
|
yading@10
|
172 {
|
yading@10
|
173 int ret;
|
yading@10
|
174 AVFrame *frame = ist->sub2video.frame;
|
yading@10
|
175
|
yading@10
|
176 av_frame_unref(frame);
|
yading@10
|
177 ist->sub2video.frame->width = ist->sub2video.w;
|
yading@10
|
178 ist->sub2video.frame->height = ist->sub2video.h;
|
yading@10
|
179 ist->sub2video.frame->format = AV_PIX_FMT_RGB32;
|
yading@10
|
180 if ((ret = av_frame_get_buffer(frame, 32)) < 0)
|
yading@10
|
181 return ret;
|
yading@10
|
182 memset(frame->data[0], 0, frame->height * frame->linesize[0]);
|
yading@10
|
183 return 0;
|
yading@10
|
184 }
|
yading@10
|
185
|
yading@10
|
186 static void sub2video_copy_rect(uint8_t *dst, int dst_linesize, int w, int h,
|
yading@10
|
187 AVSubtitleRect *r)
|
yading@10
|
188 {
|
yading@10
|
189 uint32_t *pal, *dst2;
|
yading@10
|
190 uint8_t *src, *src2;
|
yading@10
|
191 int x, y;
|
yading@10
|
192
|
yading@10
|
193 if (r->type != SUBTITLE_BITMAP) {
|
yading@10
|
194 av_log(NULL, AV_LOG_WARNING, "sub2video: non-bitmap subtitle\n");
|
yading@10
|
195 return;
|
yading@10
|
196 }
|
yading@10
|
197 if (r->x < 0 || r->x + r->w > w || r->y < 0 || r->y + r->h > h) {
|
yading@10
|
198 av_log(NULL, AV_LOG_WARNING, "sub2video: rectangle overflowing\n");
|
yading@10
|
199 return;
|
yading@10
|
200 }
|
yading@10
|
201
|
yading@10
|
202 dst += r->y * dst_linesize + r->x * 4;
|
yading@10
|
203 src = r->pict.data[0];
|
yading@10
|
204 pal = (uint32_t *)r->pict.data[1];
|
yading@10
|
205 for (y = 0; y < r->h; y++) {
|
yading@10
|
206 dst2 = (uint32_t *)dst;
|
yading@10
|
207 src2 = src;
|
yading@10
|
208 for (x = 0; x < r->w; x++)
|
yading@10
|
209 *(dst2++) = pal[*(src2++)];
|
yading@10
|
210 dst += dst_linesize;
|
yading@10
|
211 src += r->pict.linesize[0];
|
yading@10
|
212 }
|
yading@10
|
213 }
|
yading@10
|
214
|
yading@10
|
215 static void sub2video_push_ref(InputStream *ist, int64_t pts)
|
yading@10
|
216 {
|
yading@10
|
217 AVFrame *frame = ist->sub2video.frame;
|
yading@10
|
218 int i;
|
yading@10
|
219
|
yading@10
|
220 av_assert1(frame->data[0]);
|
yading@10
|
221 ist->sub2video.last_pts = frame->pts = pts;
|
yading@10
|
222 for (i = 0; i < ist->nb_filters; i++)
|
yading@10
|
223 av_buffersrc_add_frame_flags(ist->filters[i]->filter, frame,
|
yading@10
|
224 AV_BUFFERSRC_FLAG_KEEP_REF |
|
yading@10
|
225 AV_BUFFERSRC_FLAG_PUSH);
|
yading@10
|
226 }
|
yading@10
|
227
|
yading@10
|
228 static void sub2video_update(InputStream *ist, AVSubtitle *sub)
|
yading@10
|
229 {
|
yading@10
|
230 int w = ist->sub2video.w, h = ist->sub2video.h;
|
yading@10
|
231 AVFrame *frame = ist->sub2video.frame;
|
yading@10
|
232 int8_t *dst;
|
yading@10
|
233 int dst_linesize;
|
yading@10
|
234 int num_rects, i;
|
yading@10
|
235 int64_t pts, end_pts;
|
yading@10
|
236
|
yading@10
|
237 if (!frame)
|
yading@10
|
238 return;
|
yading@10
|
239 if (sub) {
|
yading@10
|
240 pts = av_rescale_q(sub->pts + sub->start_display_time * 1000,
|
yading@10
|
241 AV_TIME_BASE_Q, ist->st->time_base);
|
yading@10
|
242 end_pts = av_rescale_q(sub->pts + sub->end_display_time * 1000,
|
yading@10
|
243 AV_TIME_BASE_Q, ist->st->time_base);
|
yading@10
|
244 num_rects = sub->num_rects;
|
yading@10
|
245 } else {
|
yading@10
|
246 pts = ist->sub2video.end_pts;
|
yading@10
|
247 end_pts = INT64_MAX;
|
yading@10
|
248 num_rects = 0;
|
yading@10
|
249 }
|
yading@10
|
250 if (sub2video_get_blank_frame(ist) < 0) {
|
yading@10
|
251 av_log(ist->st->codec, AV_LOG_ERROR,
|
yading@10
|
252 "Impossible to get a blank canvas.\n");
|
yading@10
|
253 return;
|
yading@10
|
254 }
|
yading@10
|
255 dst = frame->data [0];
|
yading@10
|
256 dst_linesize = frame->linesize[0];
|
yading@10
|
257 for (i = 0; i < num_rects; i++)
|
yading@10
|
258 sub2video_copy_rect(dst, dst_linesize, w, h, sub->rects[i]);
|
yading@10
|
259 sub2video_push_ref(ist, pts);
|
yading@10
|
260 ist->sub2video.end_pts = end_pts;
|
yading@10
|
261 }
|
yading@10
|
262
|
yading@10
|
263 static void sub2video_heartbeat(InputStream *ist, int64_t pts)
|
yading@10
|
264 {
|
yading@10
|
265 InputFile *infile = input_files[ist->file_index];
|
yading@10
|
266 int i, j, nb_reqs;
|
yading@10
|
267 int64_t pts2;
|
yading@10
|
268
|
yading@10
|
269 /* When a frame is read from a file, examine all sub2video streams in
|
yading@10
|
270 the same file and send the sub2video frame again. Otherwise, decoded
|
yading@10
|
271 video frames could be accumulating in the filter graph while a filter
|
yading@10
|
272 (possibly overlay) is desperately waiting for a subtitle frame. */
|
yading@10
|
273 for (i = 0; i < infile->nb_streams; i++) {
|
yading@10
|
274 InputStream *ist2 = input_streams[infile->ist_index + i];
|
yading@10
|
275 if (!ist2->sub2video.frame)
|
yading@10
|
276 continue;
|
yading@10
|
277 /* subtitles seem to be usually muxed ahead of other streams;
|
yading@10
|
278 if not, substracting a larger time here is necessary */
|
yading@10
|
279 pts2 = av_rescale_q(pts, ist->st->time_base, ist2->st->time_base) - 1;
|
yading@10
|
280 /* do not send the heartbeat frame if the subtitle is already ahead */
|
yading@10
|
281 if (pts2 <= ist2->sub2video.last_pts)
|
yading@10
|
282 continue;
|
yading@10
|
283 if (pts2 >= ist2->sub2video.end_pts || !ist2->sub2video.frame->data[0])
|
yading@10
|
284 sub2video_update(ist2, NULL);
|
yading@10
|
285 for (j = 0, nb_reqs = 0; j < ist2->nb_filters; j++)
|
yading@10
|
286 nb_reqs += av_buffersrc_get_nb_failed_requests(ist2->filters[j]->filter);
|
yading@10
|
287 if (nb_reqs)
|
yading@10
|
288 sub2video_push_ref(ist2, pts2);
|
yading@10
|
289 }
|
yading@10
|
290 }
|
yading@10
|
291
|
yading@10
|
292 static void sub2video_flush(InputStream *ist)
|
yading@10
|
293 {
|
yading@10
|
294 int i;
|
yading@10
|
295
|
yading@10
|
296 for (i = 0; i < ist->nb_filters; i++)
|
yading@10
|
297 av_buffersrc_add_ref(ist->filters[i]->filter, NULL, 0);
|
yading@10
|
298 }
|
yading@10
|
299
|
yading@10
|
300 /* end of sub2video hack */
|
yading@10
|
301
|
yading@10
|
302 void term_exit(void)
|
yading@10
|
303 {
|
yading@10
|
304 av_log(NULL, AV_LOG_QUIET, "%s", "");
|
yading@10
|
305 #if HAVE_TERMIOS_H
|
yading@10
|
306 if(restore_tty)
|
yading@10
|
307 tcsetattr (0, TCSANOW, &oldtty);
|
yading@10
|
308 #endif
|
yading@10
|
309 }
|
yading@10
|
310
|
yading@10
|
311 static volatile int received_sigterm = 0;
|
yading@10
|
312 static volatile int received_nb_signals = 0;
|
yading@10
|
313
|
yading@10
|
314 static void
|
yading@10
|
315 sigterm_handler(int sig)
|
yading@10
|
316 {
|
yading@10
|
317 received_sigterm = sig;
|
yading@10
|
318 received_nb_signals++;
|
yading@10
|
319 term_exit();
|
yading@10
|
320 if(received_nb_signals > 3)
|
yading@10
|
321 exit(123);
|
yading@10
|
322 }
|
yading@10
|
323
|
yading@10
|
324 void term_init(void)
|
yading@10
|
325 {
|
yading@10
|
326 #if HAVE_TERMIOS_H
|
yading@10
|
327 if(!run_as_daemon){
|
yading@10
|
328 struct termios tty;
|
yading@10
|
329 int istty = 1;
|
yading@10
|
330 #if HAVE_ISATTY
|
yading@10
|
331 istty = isatty(0) && isatty(2);
|
yading@10
|
332 #endif
|
yading@10
|
333 if (istty && tcgetattr (0, &tty) == 0) {
|
yading@10
|
334 oldtty = tty;
|
yading@10
|
335 restore_tty = 1;
|
yading@10
|
336 atexit(term_exit);
|
yading@10
|
337
|
yading@10
|
338 tty.c_iflag &= ~(IGNBRK|BRKINT|PARMRK|ISTRIP
|
yading@10
|
339 |INLCR|IGNCR|ICRNL|IXON);
|
yading@10
|
340 tty.c_oflag |= OPOST;
|
yading@10
|
341 tty.c_lflag &= ~(ECHO|ECHONL|ICANON|IEXTEN);
|
yading@10
|
342 tty.c_cflag &= ~(CSIZE|PARENB);
|
yading@10
|
343 tty.c_cflag |= CS8;
|
yading@10
|
344 tty.c_cc[VMIN] = 1;
|
yading@10
|
345 tty.c_cc[VTIME] = 0;
|
yading@10
|
346
|
yading@10
|
347 tcsetattr (0, TCSANOW, &tty);
|
yading@10
|
348 }
|
yading@10
|
349 signal(SIGQUIT, sigterm_handler); /* Quit (POSIX). */
|
yading@10
|
350 }
|
yading@10
|
351 #endif
|
yading@10
|
352 avformat_network_deinit();
|
yading@10
|
353
|
yading@10
|
354 signal(SIGINT , sigterm_handler); /* Interrupt (ANSI). */
|
yading@10
|
355 signal(SIGTERM, sigterm_handler); /* Termination (ANSI). */
|
yading@10
|
356 #ifdef SIGXCPU
|
yading@10
|
357 signal(SIGXCPU, sigterm_handler);
|
yading@10
|
358 #endif
|
yading@10
|
359 }
|
yading@10
|
360
|
yading@10
|
361 /* read a key without blocking */
|
yading@10
|
362 static int read_key(void)
|
yading@10
|
363 {
|
yading@10
|
364 unsigned char ch;
|
yading@10
|
365 #if HAVE_TERMIOS_H
|
yading@10
|
366 int n = 1;
|
yading@10
|
367 struct timeval tv;
|
yading@10
|
368 fd_set rfds;
|
yading@10
|
369
|
yading@10
|
370 FD_ZERO(&rfds);
|
yading@10
|
371 FD_SET(0, &rfds);
|
yading@10
|
372 tv.tv_sec = 0;
|
yading@10
|
373 tv.tv_usec = 0;
|
yading@10
|
374 n = select(1, &rfds, NULL, NULL, &tv);
|
yading@10
|
375 if (n > 0) {
|
yading@10
|
376 n = read(0, &ch, 1);
|
yading@10
|
377 if (n == 1)
|
yading@10
|
378 return ch;
|
yading@10
|
379
|
yading@10
|
380 return n;
|
yading@10
|
381 }
|
yading@10
|
382 #elif HAVE_KBHIT
|
yading@10
|
383 # if HAVE_PEEKNAMEDPIPE
|
yading@10
|
384 static int is_pipe;
|
yading@10
|
385 static HANDLE input_handle;
|
yading@10
|
386 DWORD dw, nchars;
|
yading@10
|
387 if(!input_handle){
|
yading@10
|
388 input_handle = GetStdHandle(STD_INPUT_HANDLE);
|
yading@10
|
389 is_pipe = !GetConsoleMode(input_handle, &dw);
|
yading@10
|
390 }
|
yading@10
|
391
|
yading@10
|
392 if (stdin->_cnt > 0) {
|
yading@10
|
393 read(0, &ch, 1);
|
yading@10
|
394 return ch;
|
yading@10
|
395 }
|
yading@10
|
396 if (is_pipe) {
|
yading@10
|
397 /* When running under a GUI, you will end here. */
|
yading@10
|
398 if (!PeekNamedPipe(input_handle, NULL, 0, NULL, &nchars, NULL)) {
|
yading@10
|
399 // input pipe may have been closed by the program that ran ffmpeg
|
yading@10
|
400 return -1;
|
yading@10
|
401 }
|
yading@10
|
402 //Read it
|
yading@10
|
403 if(nchars != 0) {
|
yading@10
|
404 read(0, &ch, 1);
|
yading@10
|
405 return ch;
|
yading@10
|
406 }else{
|
yading@10
|
407 return -1;
|
yading@10
|
408 }
|
yading@10
|
409 }
|
yading@10
|
410 # endif
|
yading@10
|
411 if(kbhit())
|
yading@10
|
412 return(getch());
|
yading@10
|
413 #endif
|
yading@10
|
414 return -1;
|
yading@10
|
415 }
|
yading@10
|
416
|
yading@10
|
417 static int decode_interrupt_cb(void *ctx)
|
yading@10
|
418 {
|
yading@10
|
419 return received_nb_signals > 1;
|
yading@10
|
420 }
|
yading@10
|
421
|
yading@10
|
422 const AVIOInterruptCB int_cb = { decode_interrupt_cb, NULL };
|
yading@10
|
423
|
yading@10
|
424 static void exit_program(void)
|
yading@10
|
425 {
|
yading@10
|
426 int i, j;
|
yading@10
|
427
|
yading@10
|
428 if (do_benchmark) {
|
yading@10
|
429 int maxrss = getmaxrss() / 1024;
|
yading@10
|
430 printf("bench: maxrss=%ikB\n", maxrss);
|
yading@10
|
431 }
|
yading@10
|
432
|
yading@10
|
433 for (i = 0; i < nb_filtergraphs; i++) {
|
yading@10
|
434 avfilter_graph_free(&filtergraphs[i]->graph);
|
yading@10
|
435 for (j = 0; j < filtergraphs[i]->nb_inputs; j++) {
|
yading@10
|
436 av_freep(&filtergraphs[i]->inputs[j]->name);
|
yading@10
|
437 av_freep(&filtergraphs[i]->inputs[j]);
|
yading@10
|
438 }
|
yading@10
|
439 av_freep(&filtergraphs[i]->inputs);
|
yading@10
|
440 for (j = 0; j < filtergraphs[i]->nb_outputs; j++) {
|
yading@10
|
441 av_freep(&filtergraphs[i]->outputs[j]->name);
|
yading@10
|
442 av_freep(&filtergraphs[i]->outputs[j]);
|
yading@10
|
443 }
|
yading@10
|
444 av_freep(&filtergraphs[i]->outputs);
|
yading@10
|
445 av_freep(&filtergraphs[i]->graph_desc);
|
yading@10
|
446 av_freep(&filtergraphs[i]);
|
yading@10
|
447 }
|
yading@10
|
448 av_freep(&filtergraphs);
|
yading@10
|
449
|
yading@10
|
450 av_freep(&subtitle_out);
|
yading@10
|
451
|
yading@10
|
452 /* close files */
|
yading@10
|
453 for (i = 0; i < nb_output_files; i++) {
|
yading@10
|
454 AVFormatContext *s = output_files[i]->ctx;
|
yading@10
|
455 if (!(s->oformat->flags & AVFMT_NOFILE) && s->pb)
|
yading@10
|
456 avio_close(s->pb);
|
yading@10
|
457 avformat_free_context(s);
|
yading@10
|
458 av_dict_free(&output_files[i]->opts);
|
yading@10
|
459 av_freep(&output_files[i]);
|
yading@10
|
460 }
|
yading@10
|
461 for (i = 0; i < nb_output_streams; i++) {
|
yading@10
|
462 AVBitStreamFilterContext *bsfc = output_streams[i]->bitstream_filters;
|
yading@10
|
463 while (bsfc) {
|
yading@10
|
464 AVBitStreamFilterContext *next = bsfc->next;
|
yading@10
|
465 av_bitstream_filter_close(bsfc);
|
yading@10
|
466 bsfc = next;
|
yading@10
|
467 }
|
yading@10
|
468 output_streams[i]->bitstream_filters = NULL;
|
yading@10
|
469 avcodec_free_frame(&output_streams[i]->filtered_frame);
|
yading@10
|
470
|
yading@10
|
471 av_freep(&output_streams[i]->forced_keyframes);
|
yading@10
|
472 av_expr_free(output_streams[i]->forced_keyframes_pexpr);
|
yading@10
|
473 av_freep(&output_streams[i]->avfilter);
|
yading@10
|
474 av_freep(&output_streams[i]->logfile_prefix);
|
yading@10
|
475 av_freep(&output_streams[i]);
|
yading@10
|
476 }
|
yading@10
|
477 for (i = 0; i < nb_input_files; i++) {
|
yading@10
|
478 avformat_close_input(&input_files[i]->ctx);
|
yading@10
|
479 av_freep(&input_files[i]);
|
yading@10
|
480 }
|
yading@10
|
481 for (i = 0; i < nb_input_streams; i++) {
|
yading@10
|
482 av_frame_free(&input_streams[i]->decoded_frame);
|
yading@10
|
483 av_frame_free(&input_streams[i]->filter_frame);
|
yading@10
|
484 av_dict_free(&input_streams[i]->opts);
|
yading@10
|
485 avsubtitle_free(&input_streams[i]->prev_sub.subtitle);
|
yading@10
|
486 av_frame_free(&input_streams[i]->sub2video.frame);
|
yading@10
|
487 av_freep(&input_streams[i]->filters);
|
yading@10
|
488 av_freep(&input_streams[i]);
|
yading@10
|
489 }
|
yading@10
|
490
|
yading@10
|
491 if (vstats_file)
|
yading@10
|
492 fclose(vstats_file);
|
yading@10
|
493 av_free(vstats_filename);
|
yading@10
|
494
|
yading@10
|
495 av_freep(&input_streams);
|
yading@10
|
496 av_freep(&input_files);
|
yading@10
|
497 av_freep(&output_streams);
|
yading@10
|
498 av_freep(&output_files);
|
yading@10
|
499
|
yading@10
|
500 uninit_opts();
|
yading@10
|
501
|
yading@10
|
502 avformat_network_deinit();
|
yading@10
|
503
|
yading@10
|
504 if (received_sigterm) {
|
yading@10
|
505 av_log(NULL, AV_LOG_INFO, "Received signal %d: terminating.\n",
|
yading@10
|
506 (int) received_sigterm);
|
yading@10
|
507 }
|
yading@10
|
508 }
|
yading@10
|
509
|
yading@10
|
510 void assert_avoptions(AVDictionary *m)
|
yading@10
|
511 {
|
yading@10
|
512 AVDictionaryEntry *t;
|
yading@10
|
513 if ((t = av_dict_get(m, "", NULL, AV_DICT_IGNORE_SUFFIX))) {
|
yading@10
|
514 av_log(NULL, AV_LOG_FATAL, "Option %s not found.\n", t->key);
|
yading@10
|
515 exit(1);
|
yading@10
|
516 }
|
yading@10
|
517 }
|
yading@10
|
518
|
yading@10
|
519 static void abort_codec_experimental(AVCodec *c, int encoder)
|
yading@10
|
520 {
|
yading@10
|
521 exit(1);
|
yading@10
|
522 }
|
yading@10
|
523
|
yading@10
|
524 static void update_benchmark(const char *fmt, ...)
|
yading@10
|
525 {
|
yading@10
|
526 if (do_benchmark_all) {
|
yading@10
|
527 int64_t t = getutime();
|
yading@10
|
528 va_list va;
|
yading@10
|
529 char buf[1024];
|
yading@10
|
530
|
yading@10
|
531 if (fmt) {
|
yading@10
|
532 va_start(va, fmt);
|
yading@10
|
533 vsnprintf(buf, sizeof(buf), fmt, va);
|
yading@10
|
534 va_end(va);
|
yading@10
|
535 printf("bench: %8"PRIu64" %s \n", t - current_time, buf);
|
yading@10
|
536 }
|
yading@10
|
537 current_time = t;
|
yading@10
|
538 }
|
yading@10
|
539 }
|
yading@10
|
540
|
yading@10
|
541 static void write_frame(AVFormatContext *s, AVPacket *pkt, OutputStream *ost)
|
yading@10
|
542 {
|
yading@10
|
543 AVBitStreamFilterContext *bsfc = ost->bitstream_filters;
|
yading@10
|
544 AVCodecContext *avctx = ost->st->codec;
|
yading@10
|
545 int ret;
|
yading@10
|
546
|
yading@10
|
547 if ((avctx->codec_type == AVMEDIA_TYPE_VIDEO && video_sync_method == VSYNC_DROP) ||
|
yading@10
|
548 (avctx->codec_type == AVMEDIA_TYPE_AUDIO && audio_sync_method < 0))
|
yading@10
|
549 pkt->pts = pkt->dts = AV_NOPTS_VALUE;
|
yading@10
|
550
|
yading@10
|
551 if ((avctx->codec_type == AVMEDIA_TYPE_AUDIO || avctx->codec_type == AVMEDIA_TYPE_VIDEO) && pkt->dts != AV_NOPTS_VALUE) {
|
yading@10
|
552 int64_t max = ost->st->cur_dts + !(s->oformat->flags & AVFMT_TS_NONSTRICT);
|
yading@10
|
553 if (ost->st->cur_dts && ost->st->cur_dts != AV_NOPTS_VALUE && max > pkt->dts) {
|
yading@10
|
554 av_log(s, max - pkt->dts > 2 || avctx->codec_type == AVMEDIA_TYPE_VIDEO ? AV_LOG_WARNING : AV_LOG_DEBUG,
|
yading@10
|
555 "st:%d PTS: %"PRId64" DTS: %"PRId64" < %"PRId64" invalid, clipping\n", pkt->stream_index, pkt->pts, pkt->dts, max);
|
yading@10
|
556 if(pkt->pts >= pkt->dts)
|
yading@10
|
557 pkt->pts = FFMAX(pkt->pts, max);
|
yading@10
|
558 pkt->dts = max;
|
yading@10
|
559 }
|
yading@10
|
560 }
|
yading@10
|
561
|
yading@10
|
562 /*
|
yading@10
|
563 * Audio encoders may split the packets -- #frames in != #packets out.
|
yading@10
|
564 * But there is no reordering, so we can limit the number of output packets
|
yading@10
|
565 * by simply dropping them here.
|
yading@10
|
566 * Counting encoded video frames needs to be done separately because of
|
yading@10
|
567 * reordering, see do_video_out()
|
yading@10
|
568 */
|
yading@10
|
569 if (!(avctx->codec_type == AVMEDIA_TYPE_VIDEO && avctx->codec)) {
|
yading@10
|
570 if (ost->frame_number >= ost->max_frames) {
|
yading@10
|
571 av_free_packet(pkt);
|
yading@10
|
572 return;
|
yading@10
|
573 }
|
yading@10
|
574 ost->frame_number++;
|
yading@10
|
575 }
|
yading@10
|
576
|
yading@10
|
577 while (bsfc) {
|
yading@10
|
578 AVPacket new_pkt = *pkt;
|
yading@10
|
579 int a = av_bitstream_filter_filter(bsfc, avctx, NULL,
|
yading@10
|
580 &new_pkt.data, &new_pkt.size,
|
yading@10
|
581 pkt->data, pkt->size,
|
yading@10
|
582 pkt->flags & AV_PKT_FLAG_KEY);
|
yading@10
|
583 if(a == 0 && new_pkt.data != pkt->data && new_pkt.destruct) {
|
yading@10
|
584 uint8_t *t = av_malloc(new_pkt.size + FF_INPUT_BUFFER_PADDING_SIZE); //the new should be a subset of the old so cannot overflow
|
yading@10
|
585 if(t) {
|
yading@10
|
586 memcpy(t, new_pkt.data, new_pkt.size);
|
yading@10
|
587 memset(t + new_pkt.size, 0, FF_INPUT_BUFFER_PADDING_SIZE);
|
yading@10
|
588 new_pkt.data = t;
|
yading@10
|
589 new_pkt.buf = NULL;
|
yading@10
|
590 a = 1;
|
yading@10
|
591 } else
|
yading@10
|
592 a = AVERROR(ENOMEM);
|
yading@10
|
593 }
|
yading@10
|
594 if (a > 0) {
|
yading@10
|
595 av_free_packet(pkt);
|
yading@10
|
596 new_pkt.buf = av_buffer_create(new_pkt.data, new_pkt.size,
|
yading@10
|
597 av_buffer_default_free, NULL, 0);
|
yading@10
|
598 if (!new_pkt.buf)
|
yading@10
|
599 exit(1);
|
yading@10
|
600 } else if (a < 0) {
|
yading@10
|
601 av_log(NULL, AV_LOG_ERROR, "Failed to open bitstream filter %s for stream %d with codec %s",
|
yading@10
|
602 bsfc->filter->name, pkt->stream_index,
|
yading@10
|
603 avctx->codec ? avctx->codec->name : "copy");
|
yading@10
|
604 print_error("", a);
|
yading@10
|
605 if (exit_on_error)
|
yading@10
|
606 exit(1);
|
yading@10
|
607 }
|
yading@10
|
608 *pkt = new_pkt;
|
yading@10
|
609
|
yading@10
|
610 bsfc = bsfc->next;
|
yading@10
|
611 }
|
yading@10
|
612
|
yading@10
|
613 pkt->stream_index = ost->index;
|
yading@10
|
614
|
yading@10
|
615 if (debug_ts) {
|
yading@10
|
616 av_log(NULL, AV_LOG_INFO, "muxer <- type:%s "
|
yading@10
|
617 "pkt_pts:%s pkt_pts_time:%s pkt_dts:%s pkt_dts_time:%s size:%d\n",
|
yading@10
|
618 av_get_media_type_string(ost->st->codec->codec_type),
|
yading@10
|
619 av_ts2str(pkt->pts), av_ts2timestr(pkt->pts, &ost->st->time_base),
|
yading@10
|
620 av_ts2str(pkt->dts), av_ts2timestr(pkt->dts, &ost->st->time_base),
|
yading@10
|
621 pkt->size
|
yading@10
|
622 );
|
yading@10
|
623 }
|
yading@10
|
624
|
yading@10
|
625 ret = av_interleaved_write_frame(s, pkt);
|
yading@10
|
626 if (ret < 0) {
|
yading@10
|
627 print_error("av_interleaved_write_frame()", ret);
|
yading@10
|
628 exit(1);
|
yading@10
|
629 }
|
yading@10
|
630 }
|
yading@10
|
631
|
yading@10
|
632 static void close_output_stream(OutputStream *ost)
|
yading@10
|
633 {
|
yading@10
|
634 OutputFile *of = output_files[ost->file_index];
|
yading@10
|
635
|
yading@10
|
636 ost->finished = 1;
|
yading@10
|
637 if (of->shortest) {
|
yading@10
|
638 int64_t end = av_rescale_q(ost->sync_opts - ost->first_pts, ost->st->codec->time_base, AV_TIME_BASE_Q);
|
yading@10
|
639 of->recording_time = FFMIN(of->recording_time, end);
|
yading@10
|
640 }
|
yading@10
|
641 }
|
yading@10
|
642
|
yading@10
|
643 static int check_recording_time(OutputStream *ost)
|
yading@10
|
644 {
|
yading@10
|
645 OutputFile *of = output_files[ost->file_index];
|
yading@10
|
646
|
yading@10
|
647 if (of->recording_time != INT64_MAX &&
|
yading@10
|
648 av_compare_ts(ost->sync_opts - ost->first_pts, ost->st->codec->time_base, of->recording_time,
|
yading@10
|
649 AV_TIME_BASE_Q) >= 0) {
|
yading@10
|
650 close_output_stream(ost);
|
yading@10
|
651 return 0;
|
yading@10
|
652 }
|
yading@10
|
653 return 1;
|
yading@10
|
654 }
|
yading@10
|
655
|
yading@10
|
656 static void do_audio_out(AVFormatContext *s, OutputStream *ost,
|
yading@10
|
657 AVFrame *frame)
|
yading@10
|
658 {
|
yading@10
|
659 AVCodecContext *enc = ost->st->codec;
|
yading@10
|
660 AVPacket pkt;
|
yading@10
|
661 int got_packet = 0;
|
yading@10
|
662
|
yading@10
|
663 av_init_packet(&pkt);
|
yading@10
|
664 pkt.data = NULL;
|
yading@10
|
665 pkt.size = 0;
|
yading@10
|
666
|
yading@10
|
667 if (!check_recording_time(ost))
|
yading@10
|
668 return;
|
yading@10
|
669
|
yading@10
|
670 if (frame->pts == AV_NOPTS_VALUE || audio_sync_method < 0)
|
yading@10
|
671 frame->pts = ost->sync_opts;
|
yading@10
|
672 ost->sync_opts = frame->pts + frame->nb_samples;
|
yading@10
|
673
|
yading@10
|
674 av_assert0(pkt.size || !pkt.data);
|
yading@10
|
675 update_benchmark(NULL);
|
yading@10
|
676 if (avcodec_encode_audio2(enc, &pkt, frame, &got_packet) < 0) {
|
yading@10
|
677 av_log(NULL, AV_LOG_FATAL, "Audio encoding failed (avcodec_encode_audio2)\n");
|
yading@10
|
678 exit(1);
|
yading@10
|
679 }
|
yading@10
|
680 update_benchmark("encode_audio %d.%d", ost->file_index, ost->index);
|
yading@10
|
681
|
yading@10
|
682 if (got_packet) {
|
yading@10
|
683 if (pkt.pts != AV_NOPTS_VALUE)
|
yading@10
|
684 pkt.pts = av_rescale_q(pkt.pts, enc->time_base, ost->st->time_base);
|
yading@10
|
685 if (pkt.dts != AV_NOPTS_VALUE)
|
yading@10
|
686 pkt.dts = av_rescale_q(pkt.dts, enc->time_base, ost->st->time_base);
|
yading@10
|
687 if (pkt.duration > 0)
|
yading@10
|
688 pkt.duration = av_rescale_q(pkt.duration, enc->time_base, ost->st->time_base);
|
yading@10
|
689
|
yading@10
|
690 if (debug_ts) {
|
yading@10
|
691 av_log(NULL, AV_LOG_INFO, "encoder -> type:audio "
|
yading@10
|
692 "pkt_pts:%s pkt_pts_time:%s pkt_dts:%s pkt_dts_time:%s\n",
|
yading@10
|
693 av_ts2str(pkt.pts), av_ts2timestr(pkt.pts, &ost->st->time_base),
|
yading@10
|
694 av_ts2str(pkt.dts), av_ts2timestr(pkt.dts, &ost->st->time_base));
|
yading@10
|
695 }
|
yading@10
|
696
|
yading@10
|
697 audio_size += pkt.size;
|
yading@10
|
698 write_frame(s, &pkt, ost);
|
yading@10
|
699
|
yading@10
|
700 av_free_packet(&pkt);
|
yading@10
|
701 }
|
yading@10
|
702 }
|
yading@10
|
703
|
yading@10
|
704 static void do_subtitle_out(AVFormatContext *s,
|
yading@10
|
705 OutputStream *ost,
|
yading@10
|
706 InputStream *ist,
|
yading@10
|
707 AVSubtitle *sub)
|
yading@10
|
708 {
|
yading@10
|
709 int subtitle_out_max_size = 1024 * 1024;
|
yading@10
|
710 int subtitle_out_size, nb, i;
|
yading@10
|
711 AVCodecContext *enc;
|
yading@10
|
712 AVPacket pkt;
|
yading@10
|
713 int64_t pts;
|
yading@10
|
714
|
yading@10
|
715 if (sub->pts == AV_NOPTS_VALUE) {
|
yading@10
|
716 av_log(NULL, AV_LOG_ERROR, "Subtitle packets must have a pts\n");
|
yading@10
|
717 if (exit_on_error)
|
yading@10
|
718 exit(1);
|
yading@10
|
719 return;
|
yading@10
|
720 }
|
yading@10
|
721
|
yading@10
|
722 enc = ost->st->codec;
|
yading@10
|
723
|
yading@10
|
724 if (!subtitle_out) {
|
yading@10
|
725 subtitle_out = av_malloc(subtitle_out_max_size);
|
yading@10
|
726 }
|
yading@10
|
727
|
yading@10
|
728 /* Note: DVB subtitle need one packet to draw them and one other
|
yading@10
|
729 packet to clear them */
|
yading@10
|
730 /* XXX: signal it in the codec context ? */
|
yading@10
|
731 if (enc->codec_id == AV_CODEC_ID_DVB_SUBTITLE)
|
yading@10
|
732 nb = 2;
|
yading@10
|
733 else
|
yading@10
|
734 nb = 1;
|
yading@10
|
735
|
yading@10
|
736 /* shift timestamp to honor -ss and make check_recording_time() work with -t */
|
yading@10
|
737 pts = sub->pts - output_files[ost->file_index]->start_time;
|
yading@10
|
738 for (i = 0; i < nb; i++) {
|
yading@10
|
739 ost->sync_opts = av_rescale_q(pts, AV_TIME_BASE_Q, enc->time_base);
|
yading@10
|
740 if (!check_recording_time(ost))
|
yading@10
|
741 return;
|
yading@10
|
742
|
yading@10
|
743 sub->pts = pts;
|
yading@10
|
744 // start_display_time is required to be 0
|
yading@10
|
745 sub->pts += av_rescale_q(sub->start_display_time, (AVRational){ 1, 1000 }, AV_TIME_BASE_Q);
|
yading@10
|
746 sub->end_display_time -= sub->start_display_time;
|
yading@10
|
747 sub->start_display_time = 0;
|
yading@10
|
748 if (i == 1)
|
yading@10
|
749 sub->num_rects = 0;
|
yading@10
|
750 subtitle_out_size = avcodec_encode_subtitle(enc, subtitle_out,
|
yading@10
|
751 subtitle_out_max_size, sub);
|
yading@10
|
752 if (subtitle_out_size < 0) {
|
yading@10
|
753 av_log(NULL, AV_LOG_FATAL, "Subtitle encoding failed\n");
|
yading@10
|
754 exit(1);
|
yading@10
|
755 }
|
yading@10
|
756
|
yading@10
|
757 av_init_packet(&pkt);
|
yading@10
|
758 pkt.data = subtitle_out;
|
yading@10
|
759 pkt.size = subtitle_out_size;
|
yading@10
|
760 pkt.pts = av_rescale_q(sub->pts, AV_TIME_BASE_Q, ost->st->time_base);
|
yading@10
|
761 pkt.duration = av_rescale_q(sub->end_display_time, (AVRational){ 1, 1000 }, ost->st->time_base);
|
yading@10
|
762 if (enc->codec_id == AV_CODEC_ID_DVB_SUBTITLE) {
|
yading@10
|
763 /* XXX: the pts correction is handled here. Maybe handling
|
yading@10
|
764 it in the codec would be better */
|
yading@10
|
765 if (i == 0)
|
yading@10
|
766 pkt.pts += 90 * sub->start_display_time;
|
yading@10
|
767 else
|
yading@10
|
768 pkt.pts += 90 * sub->end_display_time;
|
yading@10
|
769 }
|
yading@10
|
770 subtitle_size += pkt.size;
|
yading@10
|
771 write_frame(s, &pkt, ost);
|
yading@10
|
772 }
|
yading@10
|
773 }
|
yading@10
|
774
|
yading@10
|
775 static void do_video_out(AVFormatContext *s,
|
yading@10
|
776 OutputStream *ost,
|
yading@10
|
777 AVFrame *in_picture)
|
yading@10
|
778 {
|
yading@10
|
779 int ret, format_video_sync;
|
yading@10
|
780 AVPacket pkt;
|
yading@10
|
781 AVCodecContext *enc = ost->st->codec;
|
yading@10
|
782 int nb_frames, i;
|
yading@10
|
783 double sync_ipts, delta;
|
yading@10
|
784 double duration = 0;
|
yading@10
|
785 int frame_size = 0;
|
yading@10
|
786 InputStream *ist = NULL;
|
yading@10
|
787
|
yading@10
|
788 if (ost->source_index >= 0)
|
yading@10
|
789 ist = input_streams[ost->source_index];
|
yading@10
|
790
|
yading@10
|
791 if(ist && ist->st->start_time != AV_NOPTS_VALUE && ist->st->first_dts != AV_NOPTS_VALUE && ost->frame_rate.num)
|
yading@10
|
792 duration = 1/(av_q2d(ost->frame_rate) * av_q2d(enc->time_base));
|
yading@10
|
793
|
yading@10
|
794 sync_ipts = in_picture->pts;
|
yading@10
|
795 delta = sync_ipts - ost->sync_opts + duration;
|
yading@10
|
796
|
yading@10
|
797 /* by default, we output a single frame */
|
yading@10
|
798 nb_frames = 1;
|
yading@10
|
799
|
yading@10
|
800 format_video_sync = video_sync_method;
|
yading@10
|
801 if (format_video_sync == VSYNC_AUTO)
|
yading@10
|
802 format_video_sync = (s->oformat->flags & AVFMT_VARIABLE_FPS) ? ((s->oformat->flags & AVFMT_NOTIMESTAMPS) ? VSYNC_PASSTHROUGH : VSYNC_VFR) : VSYNC_CFR;
|
yading@10
|
803
|
yading@10
|
804 switch (format_video_sync) {
|
yading@10
|
805 case VSYNC_CFR:
|
yading@10
|
806 // FIXME set to 0.5 after we fix some dts/pts bugs like in avidec.c
|
yading@10
|
807 if (delta < -1.1)
|
yading@10
|
808 nb_frames = 0;
|
yading@10
|
809 else if (delta > 1.1)
|
yading@10
|
810 nb_frames = lrintf(delta);
|
yading@10
|
811 break;
|
yading@10
|
812 case VSYNC_VFR:
|
yading@10
|
813 if (delta <= -0.6)
|
yading@10
|
814 nb_frames = 0;
|
yading@10
|
815 else if (delta > 0.6)
|
yading@10
|
816 ost->sync_opts = lrint(sync_ipts);
|
yading@10
|
817 break;
|
yading@10
|
818 case VSYNC_DROP:
|
yading@10
|
819 case VSYNC_PASSTHROUGH:
|
yading@10
|
820 ost->sync_opts = lrint(sync_ipts);
|
yading@10
|
821 break;
|
yading@10
|
822 default:
|
yading@10
|
823 av_assert0(0);
|
yading@10
|
824 }
|
yading@10
|
825
|
yading@10
|
826 nb_frames = FFMIN(nb_frames, ost->max_frames - ost->frame_number);
|
yading@10
|
827 if (nb_frames == 0) {
|
yading@10
|
828 nb_frames_drop++;
|
yading@10
|
829 av_log(NULL, AV_LOG_VERBOSE, "*** drop!\n");
|
yading@10
|
830 return;
|
yading@10
|
831 } else if (nb_frames > 1) {
|
yading@10
|
832 if (nb_frames > dts_error_threshold * 30) {
|
yading@10
|
833 av_log(NULL, AV_LOG_ERROR, "%d frame duplication too large, skipping\n", nb_frames - 1);
|
yading@10
|
834 nb_frames_drop++;
|
yading@10
|
835 return;
|
yading@10
|
836 }
|
yading@10
|
837 nb_frames_dup += nb_frames - 1;
|
yading@10
|
838 av_log(NULL, AV_LOG_VERBOSE, "*** %d dup!\n", nb_frames - 1);
|
yading@10
|
839 }
|
yading@10
|
840
|
yading@10
|
841 /* duplicates frame if needed */
|
yading@10
|
842 for (i = 0; i < nb_frames; i++) {
|
yading@10
|
843 av_init_packet(&pkt);
|
yading@10
|
844 pkt.data = NULL;
|
yading@10
|
845 pkt.size = 0;
|
yading@10
|
846
|
yading@10
|
847 in_picture->pts = ost->sync_opts;
|
yading@10
|
848
|
yading@10
|
849 if (!check_recording_time(ost))
|
yading@10
|
850 return;
|
yading@10
|
851
|
yading@10
|
852 if (s->oformat->flags & AVFMT_RAWPICTURE &&
|
yading@10
|
853 enc->codec->id == AV_CODEC_ID_RAWVIDEO) {
|
yading@10
|
854 /* raw pictures are written as AVPicture structure to
|
yading@10
|
855 avoid any copies. We support temporarily the older
|
yading@10
|
856 method. */
|
yading@10
|
857 enc->coded_frame->interlaced_frame = in_picture->interlaced_frame;
|
yading@10
|
858 enc->coded_frame->top_field_first = in_picture->top_field_first;
|
yading@10
|
859 if (enc->coded_frame->interlaced_frame)
|
yading@10
|
860 enc->field_order = enc->coded_frame->top_field_first ? AV_FIELD_TB:AV_FIELD_BT;
|
yading@10
|
861 else
|
yading@10
|
862 enc->field_order = AV_FIELD_PROGRESSIVE;
|
yading@10
|
863 pkt.data = (uint8_t *)in_picture;
|
yading@10
|
864 pkt.size = sizeof(AVPicture);
|
yading@10
|
865 pkt.pts = av_rescale_q(in_picture->pts, enc->time_base, ost->st->time_base);
|
yading@10
|
866 pkt.flags |= AV_PKT_FLAG_KEY;
|
yading@10
|
867
|
yading@10
|
868 video_size += pkt.size;
|
yading@10
|
869 write_frame(s, &pkt, ost);
|
yading@10
|
870 } else {
|
yading@10
|
871 int got_packet, forced_keyframe = 0;
|
yading@10
|
872 double pts_time;
|
yading@10
|
873
|
yading@10
|
874 if (ost->st->codec->flags & (CODEC_FLAG_INTERLACED_DCT|CODEC_FLAG_INTERLACED_ME) &&
|
yading@10
|
875 ost->top_field_first >= 0)
|
yading@10
|
876 in_picture->top_field_first = !!ost->top_field_first;
|
yading@10
|
877
|
yading@10
|
878 if (in_picture->interlaced_frame) {
|
yading@10
|
879 if (enc->codec->id == AV_CODEC_ID_MJPEG)
|
yading@10
|
880 enc->field_order = in_picture->top_field_first ? AV_FIELD_TT:AV_FIELD_BB;
|
yading@10
|
881 else
|
yading@10
|
882 enc->field_order = in_picture->top_field_first ? AV_FIELD_TB:AV_FIELD_BT;
|
yading@10
|
883 } else
|
yading@10
|
884 enc->field_order = AV_FIELD_PROGRESSIVE;
|
yading@10
|
885
|
yading@10
|
886 in_picture->quality = ost->st->codec->global_quality;
|
yading@10
|
887 if (!enc->me_threshold)
|
yading@10
|
888 in_picture->pict_type = 0;
|
yading@10
|
889
|
yading@10
|
890 pts_time = in_picture->pts != AV_NOPTS_VALUE ?
|
yading@10
|
891 in_picture->pts * av_q2d(enc->time_base) : NAN;
|
yading@10
|
892 if (ost->forced_kf_index < ost->forced_kf_count &&
|
yading@10
|
893 in_picture->pts >= ost->forced_kf_pts[ost->forced_kf_index]) {
|
yading@10
|
894 ost->forced_kf_index++;
|
yading@10
|
895 forced_keyframe = 1;
|
yading@10
|
896 } else if (ost->forced_keyframes_pexpr) {
|
yading@10
|
897 double res;
|
yading@10
|
898 ost->forced_keyframes_expr_const_values[FKF_T] = pts_time;
|
yading@10
|
899 res = av_expr_eval(ost->forced_keyframes_pexpr,
|
yading@10
|
900 ost->forced_keyframes_expr_const_values, NULL);
|
yading@10
|
901 av_dlog(NULL, "force_key_frame: n:%f n_forced:%f prev_forced_n:%f t:%f prev_forced_t:%f -> res:%f\n",
|
yading@10
|
902 ost->forced_keyframes_expr_const_values[FKF_N],
|
yading@10
|
903 ost->forced_keyframes_expr_const_values[FKF_N_FORCED],
|
yading@10
|
904 ost->forced_keyframes_expr_const_values[FKF_PREV_FORCED_N],
|
yading@10
|
905 ost->forced_keyframes_expr_const_values[FKF_T],
|
yading@10
|
906 ost->forced_keyframes_expr_const_values[FKF_PREV_FORCED_T],
|
yading@10
|
907 res);
|
yading@10
|
908 if (res) {
|
yading@10
|
909 forced_keyframe = 1;
|
yading@10
|
910 ost->forced_keyframes_expr_const_values[FKF_PREV_FORCED_N] =
|
yading@10
|
911 ost->forced_keyframes_expr_const_values[FKF_N];
|
yading@10
|
912 ost->forced_keyframes_expr_const_values[FKF_PREV_FORCED_T] =
|
yading@10
|
913 ost->forced_keyframes_expr_const_values[FKF_T];
|
yading@10
|
914 ost->forced_keyframes_expr_const_values[FKF_N_FORCED] += 1;
|
yading@10
|
915 }
|
yading@10
|
916
|
yading@10
|
917 ost->forced_keyframes_expr_const_values[FKF_N] += 1;
|
yading@10
|
918 }
|
yading@10
|
919 if (forced_keyframe) {
|
yading@10
|
920 in_picture->pict_type = AV_PICTURE_TYPE_I;
|
yading@10
|
921 av_log(NULL, AV_LOG_DEBUG, "Forced keyframe at time %f\n", pts_time);
|
yading@10
|
922 }
|
yading@10
|
923
|
yading@10
|
924 update_benchmark(NULL);
|
yading@10
|
925 ret = avcodec_encode_video2(enc, &pkt, in_picture, &got_packet);
|
yading@10
|
926 update_benchmark("encode_video %d.%d", ost->file_index, ost->index);
|
yading@10
|
927 if (ret < 0) {
|
yading@10
|
928 av_log(NULL, AV_LOG_FATAL, "Video encoding failed\n");
|
yading@10
|
929 exit(1);
|
yading@10
|
930 }
|
yading@10
|
931
|
yading@10
|
932 if (got_packet) {
|
yading@10
|
933 if (pkt.pts == AV_NOPTS_VALUE && !(enc->codec->capabilities & CODEC_CAP_DELAY))
|
yading@10
|
934 pkt.pts = ost->sync_opts;
|
yading@10
|
935
|
yading@10
|
936 if (pkt.pts != AV_NOPTS_VALUE)
|
yading@10
|
937 pkt.pts = av_rescale_q(pkt.pts, enc->time_base, ost->st->time_base);
|
yading@10
|
938 if (pkt.dts != AV_NOPTS_VALUE)
|
yading@10
|
939 pkt.dts = av_rescale_q(pkt.dts, enc->time_base, ost->st->time_base);
|
yading@10
|
940
|
yading@10
|
941 if (debug_ts) {
|
yading@10
|
942 av_log(NULL, AV_LOG_INFO, "encoder -> type:video "
|
yading@10
|
943 "pkt_pts:%s pkt_pts_time:%s pkt_dts:%s pkt_dts_time:%s\n",
|
yading@10
|
944 av_ts2str(pkt.pts), av_ts2timestr(pkt.pts, &ost->st->time_base),
|
yading@10
|
945 av_ts2str(pkt.dts), av_ts2timestr(pkt.dts, &ost->st->time_base));
|
yading@10
|
946 }
|
yading@10
|
947
|
yading@10
|
948 frame_size = pkt.size;
|
yading@10
|
949 video_size += pkt.size;
|
yading@10
|
950 write_frame(s, &pkt, ost);
|
yading@10
|
951 av_free_packet(&pkt);
|
yading@10
|
952
|
yading@10
|
953 /* if two pass, output log */
|
yading@10
|
954 if (ost->logfile && enc->stats_out) {
|
yading@10
|
955 fprintf(ost->logfile, "%s", enc->stats_out);
|
yading@10
|
956 }
|
yading@10
|
957 }
|
yading@10
|
958 }
|
yading@10
|
959 ost->sync_opts++;
|
yading@10
|
960 /*
|
yading@10
|
961 * For video, number of frames in == number of packets out.
|
yading@10
|
962 * But there may be reordering, so we can't throw away frames on encoder
|
yading@10
|
963 * flush, we need to limit them here, before they go into encoder.
|
yading@10
|
964 */
|
yading@10
|
965 ost->frame_number++;
|
yading@10
|
966
|
yading@10
|
967 if (vstats_filename && frame_size)
|
yading@10
|
968 do_video_stats(ost, frame_size);
|
yading@10
|
969 }
|
yading@10
|
970 }
|
yading@10
|
971
|
yading@10
|
972 static double psnr(double d)
|
yading@10
|
973 {
|
yading@10
|
974 return -10.0 * log(d) / log(10.0);
|
yading@10
|
975 }
|
yading@10
|
976
|
yading@10
|
977 static void do_video_stats(OutputStream *ost, int frame_size)
|
yading@10
|
978 {
|
yading@10
|
979 AVCodecContext *enc;
|
yading@10
|
980 int frame_number;
|
yading@10
|
981 double ti1, bitrate, avg_bitrate;
|
yading@10
|
982
|
yading@10
|
983 /* this is executed just the first time do_video_stats is called */
|
yading@10
|
984 if (!vstats_file) {
|
yading@10
|
985 vstats_file = fopen(vstats_filename, "w");
|
yading@10
|
986 if (!vstats_file) {
|
yading@10
|
987 perror("fopen");
|
yading@10
|
988 exit(1);
|
yading@10
|
989 }
|
yading@10
|
990 }
|
yading@10
|
991
|
yading@10
|
992 enc = ost->st->codec;
|
yading@10
|
993 if (enc->codec_type == AVMEDIA_TYPE_VIDEO) {
|
yading@10
|
994 frame_number = ost->st->nb_frames;
|
yading@10
|
995 fprintf(vstats_file, "frame= %5d q= %2.1f ", frame_number, enc->coded_frame->quality / (float)FF_QP2LAMBDA);
|
yading@10
|
996 if (enc->flags&CODEC_FLAG_PSNR)
|
yading@10
|
997 fprintf(vstats_file, "PSNR= %6.2f ", psnr(enc->coded_frame->error[0] / (enc->width * enc->height * 255.0 * 255.0)));
|
yading@10
|
998
|
yading@10
|
999 fprintf(vstats_file,"f_size= %6d ", frame_size);
|
yading@10
|
1000 /* compute pts value */
|
yading@10
|
1001 ti1 = ost->st->pts.val * av_q2d(enc->time_base);
|
yading@10
|
1002 if (ti1 < 0.01)
|
yading@10
|
1003 ti1 = 0.01;
|
yading@10
|
1004
|
yading@10
|
1005 bitrate = (frame_size * 8) / av_q2d(enc->time_base) / 1000.0;
|
yading@10
|
1006 avg_bitrate = (double)(video_size * 8) / ti1 / 1000.0;
|
yading@10
|
1007 fprintf(vstats_file, "s_size= %8.0fkB time= %0.3f br= %7.1fkbits/s avg_br= %7.1fkbits/s ",
|
yading@10
|
1008 (double)video_size / 1024, ti1, bitrate, avg_bitrate);
|
yading@10
|
1009 fprintf(vstats_file, "type= %c\n", av_get_picture_type_char(enc->coded_frame->pict_type));
|
yading@10
|
1010 }
|
yading@10
|
1011 }
|
yading@10
|
1012
|
yading@10
|
1013 /**
|
yading@10
|
1014 * Get and encode new output from any of the filtergraphs, without causing
|
yading@10
|
1015 * activity.
|
yading@10
|
1016 *
|
yading@10
|
1017 * @return 0 for success, <0 for severe errors
|
yading@10
|
1018 */
|
yading@10
|
1019 static int reap_filters(void)
|
yading@10
|
1020 {
|
yading@10
|
1021 AVFrame *filtered_frame = NULL;
|
yading@10
|
1022 int i;
|
yading@10
|
1023 int64_t frame_pts;
|
yading@10
|
1024
|
yading@10
|
1025 /* Reap all buffers present in the buffer sinks */
|
yading@10
|
1026 for (i = 0; i < nb_output_streams; i++) {
|
yading@10
|
1027 OutputStream *ost = output_streams[i];
|
yading@10
|
1028 OutputFile *of = output_files[ost->file_index];
|
yading@10
|
1029 int ret = 0;
|
yading@10
|
1030
|
yading@10
|
1031 if (!ost->filter)
|
yading@10
|
1032 continue;
|
yading@10
|
1033
|
yading@10
|
1034 if (!ost->filtered_frame && !(ost->filtered_frame = avcodec_alloc_frame())) {
|
yading@10
|
1035 return AVERROR(ENOMEM);
|
yading@10
|
1036 } else
|
yading@10
|
1037 avcodec_get_frame_defaults(ost->filtered_frame);
|
yading@10
|
1038 filtered_frame = ost->filtered_frame;
|
yading@10
|
1039
|
yading@10
|
1040 while (1) {
|
yading@10
|
1041 ret = av_buffersink_get_frame_flags(ost->filter->filter, filtered_frame,
|
yading@10
|
1042 AV_BUFFERSINK_FLAG_NO_REQUEST);
|
yading@10
|
1043 if (ret < 0) {
|
yading@10
|
1044 if (ret != AVERROR(EAGAIN) && ret != AVERROR_EOF) {
|
yading@10
|
1045 av_log(NULL, AV_LOG_WARNING,
|
yading@10
|
1046 "Error in av_buffersink_get_frame_flags(): %s\n", av_err2str(ret));
|
yading@10
|
1047 }
|
yading@10
|
1048 break;
|
yading@10
|
1049 }
|
yading@10
|
1050 frame_pts = AV_NOPTS_VALUE;
|
yading@10
|
1051 if (filtered_frame->pts != AV_NOPTS_VALUE) {
|
yading@10
|
1052 filtered_frame->pts = frame_pts = av_rescale_q(filtered_frame->pts,
|
yading@10
|
1053 ost->filter->filter->inputs[0]->time_base,
|
yading@10
|
1054 ost->st->codec->time_base) -
|
yading@10
|
1055 av_rescale_q(of->start_time,
|
yading@10
|
1056 AV_TIME_BASE_Q,
|
yading@10
|
1057 ost->st->codec->time_base);
|
yading@10
|
1058
|
yading@10
|
1059 if (of->start_time && filtered_frame->pts < 0) {
|
yading@10
|
1060 av_frame_unref(filtered_frame);
|
yading@10
|
1061 continue;
|
yading@10
|
1062 }
|
yading@10
|
1063 }
|
yading@10
|
1064 //if (ost->source_index >= 0)
|
yading@10
|
1065 // *filtered_frame= *input_streams[ost->source_index]->decoded_frame; //for me_threshold
|
yading@10
|
1066
|
yading@10
|
1067
|
yading@10
|
1068 switch (ost->filter->filter->inputs[0]->type) {
|
yading@10
|
1069 case AVMEDIA_TYPE_VIDEO:
|
yading@10
|
1070 filtered_frame->pts = frame_pts;
|
yading@10
|
1071 if (!ost->frame_aspect_ratio.num)
|
yading@10
|
1072 ost->st->codec->sample_aspect_ratio = filtered_frame->sample_aspect_ratio;
|
yading@10
|
1073
|
yading@10
|
1074 do_video_out(of->ctx, ost, filtered_frame);
|
yading@10
|
1075 break;
|
yading@10
|
1076 case AVMEDIA_TYPE_AUDIO:
|
yading@10
|
1077 filtered_frame->pts = frame_pts;
|
yading@10
|
1078 if (!(ost->st->codec->codec->capabilities & CODEC_CAP_PARAM_CHANGE) &&
|
yading@10
|
1079 ost->st->codec->channels != av_frame_get_channels(filtered_frame)) {
|
yading@10
|
1080 av_log(NULL, AV_LOG_ERROR,
|
yading@10
|
1081 "Audio filter graph output is not normalized and encoder does not support parameter changes\n");
|
yading@10
|
1082 break;
|
yading@10
|
1083 }
|
yading@10
|
1084 do_audio_out(of->ctx, ost, filtered_frame);
|
yading@10
|
1085 break;
|
yading@10
|
1086 default:
|
yading@10
|
1087 // TODO support subtitle filters
|
yading@10
|
1088 av_assert0(0);
|
yading@10
|
1089 }
|
yading@10
|
1090
|
yading@10
|
1091 av_frame_unref(filtered_frame);
|
yading@10
|
1092 }
|
yading@10
|
1093 }
|
yading@10
|
1094
|
yading@10
|
1095 return 0;
|
yading@10
|
1096 }
|
yading@10
|
1097
|
yading@10
|
1098 static void print_report(int is_last_report, int64_t timer_start, int64_t cur_time)
|
yading@10
|
1099 {
|
yading@10
|
1100 char buf[1024];
|
yading@10
|
1101 AVBPrint buf_script;
|
yading@10
|
1102 OutputStream *ost;
|
yading@10
|
1103 AVFormatContext *oc;
|
yading@10
|
1104 int64_t total_size;
|
yading@10
|
1105 AVCodecContext *enc;
|
yading@10
|
1106 int frame_number, vid, i;
|
yading@10
|
1107 double bitrate;
|
yading@10
|
1108 int64_t pts = INT64_MIN;
|
yading@10
|
1109 static int64_t last_time = -1;
|
yading@10
|
1110 static int qp_histogram[52];
|
yading@10
|
1111 int hours, mins, secs, us;
|
yading@10
|
1112
|
yading@10
|
1113 if (!print_stats && !is_last_report && !progress_avio)
|
yading@10
|
1114 return;
|
yading@10
|
1115
|
yading@10
|
1116 if (!is_last_report) {
|
yading@10
|
1117 if (last_time == -1) {
|
yading@10
|
1118 last_time = cur_time;
|
yading@10
|
1119 return;
|
yading@10
|
1120 }
|
yading@10
|
1121 if ((cur_time - last_time) < 500000)
|
yading@10
|
1122 return;
|
yading@10
|
1123 last_time = cur_time;
|
yading@10
|
1124 }
|
yading@10
|
1125
|
yading@10
|
1126
|
yading@10
|
1127 oc = output_files[0]->ctx;
|
yading@10
|
1128
|
yading@10
|
1129 total_size = avio_size(oc->pb);
|
yading@10
|
1130 if (total_size <= 0) // FIXME improve avio_size() so it works with non seekable output too
|
yading@10
|
1131 total_size = avio_tell(oc->pb);
|
yading@10
|
1132
|
yading@10
|
1133 buf[0] = '\0';
|
yading@10
|
1134 vid = 0;
|
yading@10
|
1135 av_bprint_init(&buf_script, 0, 1);
|
yading@10
|
1136 for (i = 0; i < nb_output_streams; i++) {
|
yading@10
|
1137 float q = -1;
|
yading@10
|
1138 ost = output_streams[i];
|
yading@10
|
1139 enc = ost->st->codec;
|
yading@10
|
1140 if (!ost->stream_copy && enc->coded_frame)
|
yading@10
|
1141 q = enc->coded_frame->quality / (float)FF_QP2LAMBDA;
|
yading@10
|
1142 if (vid && enc->codec_type == AVMEDIA_TYPE_VIDEO) {
|
yading@10
|
1143 snprintf(buf + strlen(buf), sizeof(buf) - strlen(buf), "q=%2.1f ", q);
|
yading@10
|
1144 av_bprintf(&buf_script, "stream_%d_%d_q=%.1f\n",
|
yading@10
|
1145 ost->file_index, ost->index, q);
|
yading@10
|
1146 }
|
yading@10
|
1147 if (!vid && enc->codec_type == AVMEDIA_TYPE_VIDEO) {
|
yading@10
|
1148 float fps, t = (cur_time-timer_start) / 1000000.0;
|
yading@10
|
1149
|
yading@10
|
1150 frame_number = ost->frame_number;
|
yading@10
|
1151 fps = t > 1 ? frame_number / t : 0;
|
yading@10
|
1152 snprintf(buf + strlen(buf), sizeof(buf) - strlen(buf), "frame=%5d fps=%3.*f q=%3.1f ",
|
yading@10
|
1153 frame_number, fps < 9.95, fps, q);
|
yading@10
|
1154 av_bprintf(&buf_script, "frame=%d\n", frame_number);
|
yading@10
|
1155 av_bprintf(&buf_script, "fps=%.1f\n", fps);
|
yading@10
|
1156 av_bprintf(&buf_script, "stream_%d_%d_q=%.1f\n",
|
yading@10
|
1157 ost->file_index, ost->index, q);
|
yading@10
|
1158 if (is_last_report)
|
yading@10
|
1159 snprintf(buf + strlen(buf), sizeof(buf) - strlen(buf), "L");
|
yading@10
|
1160 if (qp_hist) {
|
yading@10
|
1161 int j;
|
yading@10
|
1162 int qp = lrintf(q);
|
yading@10
|
1163 if (qp >= 0 && qp < FF_ARRAY_ELEMS(qp_histogram))
|
yading@10
|
1164 qp_histogram[qp]++;
|
yading@10
|
1165 for (j = 0; j < 32; j++)
|
yading@10
|
1166 snprintf(buf + strlen(buf), sizeof(buf) - strlen(buf), "%X", (int)lrintf(log2(qp_histogram[j] + 1)));
|
yading@10
|
1167 }
|
yading@10
|
1168 if ((enc->flags&CODEC_FLAG_PSNR) && (enc->coded_frame || is_last_report)) {
|
yading@10
|
1169 int j;
|
yading@10
|
1170 double error, error_sum = 0;
|
yading@10
|
1171 double scale, scale_sum = 0;
|
yading@10
|
1172 double p;
|
yading@10
|
1173 char type[3] = { 'Y','U','V' };
|
yading@10
|
1174 snprintf(buf + strlen(buf), sizeof(buf) - strlen(buf), "PSNR=");
|
yading@10
|
1175 for (j = 0; j < 3; j++) {
|
yading@10
|
1176 if (is_last_report) {
|
yading@10
|
1177 error = enc->error[j];
|
yading@10
|
1178 scale = enc->width * enc->height * 255.0 * 255.0 * frame_number;
|
yading@10
|
1179 } else {
|
yading@10
|
1180 error = enc->coded_frame->error[j];
|
yading@10
|
1181 scale = enc->width * enc->height * 255.0 * 255.0;
|
yading@10
|
1182 }
|
yading@10
|
1183 if (j)
|
yading@10
|
1184 scale /= 4;
|
yading@10
|
1185 error_sum += error;
|
yading@10
|
1186 scale_sum += scale;
|
yading@10
|
1187 p = psnr(error / scale);
|
yading@10
|
1188 snprintf(buf + strlen(buf), sizeof(buf) - strlen(buf), "%c:%2.2f ", type[j], p);
|
yading@10
|
1189 av_bprintf(&buf_script, "stream_%d_%d_psnr_%c=%2.2f\n",
|
yading@10
|
1190 ost->file_index, ost->index, type[j] | 32, p);
|
yading@10
|
1191 }
|
yading@10
|
1192 p = psnr(error_sum / scale_sum);
|
yading@10
|
1193 snprintf(buf + strlen(buf), sizeof(buf) - strlen(buf), "*:%2.2f ", psnr(error_sum / scale_sum));
|
yading@10
|
1194 av_bprintf(&buf_script, "stream_%d_%d_psnr_all=%2.2f\n",
|
yading@10
|
1195 ost->file_index, ost->index, p);
|
yading@10
|
1196 }
|
yading@10
|
1197 vid = 1;
|
yading@10
|
1198 }
|
yading@10
|
1199 /* compute min output value */
|
yading@10
|
1200 if ((is_last_report || !ost->finished) && ost->st->pts.val != AV_NOPTS_VALUE)
|
yading@10
|
1201 pts = FFMAX(pts, av_rescale_q(ost->st->pts.val,
|
yading@10
|
1202 ost->st->time_base, AV_TIME_BASE_Q));
|
yading@10
|
1203 }
|
yading@10
|
1204
|
yading@10
|
1205 secs = pts / AV_TIME_BASE;
|
yading@10
|
1206 us = pts % AV_TIME_BASE;
|
yading@10
|
1207 mins = secs / 60;
|
yading@10
|
1208 secs %= 60;
|
yading@10
|
1209 hours = mins / 60;
|
yading@10
|
1210 mins %= 60;
|
yading@10
|
1211
|
yading@10
|
1212 bitrate = pts && total_size >= 0 ? total_size * 8 / (pts / 1000.0) : -1;
|
yading@10
|
1213
|
yading@10
|
1214 if (total_size < 0) snprintf(buf + strlen(buf), sizeof(buf) - strlen(buf),
|
yading@10
|
1215 "size=N/A time=");
|
yading@10
|
1216 else snprintf(buf + strlen(buf), sizeof(buf) - strlen(buf),
|
yading@10
|
1217 "size=%8.0fkB time=", total_size / 1024.0);
|
yading@10
|
1218 snprintf(buf + strlen(buf), sizeof(buf) - strlen(buf),
|
yading@10
|
1219 "%02d:%02d:%02d.%02d ", hours, mins, secs,
|
yading@10
|
1220 (100 * us) / AV_TIME_BASE);
|
yading@10
|
1221 if (bitrate < 0) snprintf(buf + strlen(buf), sizeof(buf) - strlen(buf),
|
yading@10
|
1222 "bitrate=N/A");
|
yading@10
|
1223 else snprintf(buf + strlen(buf), sizeof(buf) - strlen(buf),
|
yading@10
|
1224 "bitrate=%6.1fkbits/s", bitrate);
|
yading@10
|
1225 if (total_size < 0) av_bprintf(&buf_script, "total_size=N/A\n");
|
yading@10
|
1226 else av_bprintf(&buf_script, "total_size=%"PRId64"\n", total_size);
|
yading@10
|
1227 av_bprintf(&buf_script, "out_time_ms=%"PRId64"\n", pts);
|
yading@10
|
1228 av_bprintf(&buf_script, "out_time=%02d:%02d:%02d.%06d\n",
|
yading@10
|
1229 hours, mins, secs, us);
|
yading@10
|
1230
|
yading@10
|
1231 if (nb_frames_dup || nb_frames_drop)
|
yading@10
|
1232 snprintf(buf + strlen(buf), sizeof(buf) - strlen(buf), " dup=%d drop=%d",
|
yading@10
|
1233 nb_frames_dup, nb_frames_drop);
|
yading@10
|
1234 av_bprintf(&buf_script, "dup_frames=%d\n", nb_frames_dup);
|
yading@10
|
1235 av_bprintf(&buf_script, "drop_frames=%d\n", nb_frames_drop);
|
yading@10
|
1236
|
yading@10
|
1237 if (print_stats || is_last_report) {
|
yading@10
|
1238 if (print_stats==1 && AV_LOG_INFO > av_log_get_level()) {
|
yading@10
|
1239 fprintf(stderr, "%s \r", buf);
|
yading@10
|
1240 } else
|
yading@10
|
1241 av_log(NULL, AV_LOG_INFO, "%s \r", buf);
|
yading@10
|
1242
|
yading@10
|
1243 fflush(stderr);
|
yading@10
|
1244 }
|
yading@10
|
1245
|
yading@10
|
1246 if (progress_avio) {
|
yading@10
|
1247 av_bprintf(&buf_script, "progress=%s\n",
|
yading@10
|
1248 is_last_report ? "end" : "continue");
|
yading@10
|
1249 avio_write(progress_avio, buf_script.str,
|
yading@10
|
1250 FFMIN(buf_script.len, buf_script.size - 1));
|
yading@10
|
1251 avio_flush(progress_avio);
|
yading@10
|
1252 av_bprint_finalize(&buf_script, NULL);
|
yading@10
|
1253 if (is_last_report) {
|
yading@10
|
1254 avio_close(progress_avio);
|
yading@10
|
1255 progress_avio = NULL;
|
yading@10
|
1256 }
|
yading@10
|
1257 }
|
yading@10
|
1258
|
yading@10
|
1259 if (is_last_report) {
|
yading@10
|
1260 int64_t raw= audio_size + video_size + subtitle_size + extra_size;
|
yading@10
|
1261 av_log(NULL, AV_LOG_INFO, "\n");
|
yading@10
|
1262 av_log(NULL, AV_LOG_INFO, "video:%1.0fkB audio:%1.0fkB subtitle:%1.0f global headers:%1.0fkB muxing overhead %f%%\n",
|
yading@10
|
1263 video_size / 1024.0,
|
yading@10
|
1264 audio_size / 1024.0,
|
yading@10
|
1265 subtitle_size / 1024.0,
|
yading@10
|
1266 extra_size / 1024.0,
|
yading@10
|
1267 100.0 * (total_size - raw) / raw
|
yading@10
|
1268 );
|
yading@10
|
1269 if(video_size + audio_size + subtitle_size + extra_size == 0){
|
yading@10
|
1270 av_log(NULL, AV_LOG_WARNING, "Output file is empty, nothing was encoded (check -ss / -t / -frames parameters if used)\n");
|
yading@10
|
1271 }
|
yading@10
|
1272 }
|
yading@10
|
1273 }
|
yading@10
|
1274
|
yading@10
|
1275 static void flush_encoders(void)
|
yading@10
|
1276 {
|
yading@10
|
1277 int i, ret;
|
yading@10
|
1278
|
yading@10
|
1279 for (i = 0; i < nb_output_streams; i++) {
|
yading@10
|
1280 OutputStream *ost = output_streams[i];
|
yading@10
|
1281 AVCodecContext *enc = ost->st->codec;
|
yading@10
|
1282 AVFormatContext *os = output_files[ost->file_index]->ctx;
|
yading@10
|
1283 int stop_encoding = 0;
|
yading@10
|
1284
|
yading@10
|
1285 if (!ost->encoding_needed)
|
yading@10
|
1286 continue;
|
yading@10
|
1287
|
yading@10
|
1288 if (ost->st->codec->codec_type == AVMEDIA_TYPE_AUDIO && enc->frame_size <= 1)
|
yading@10
|
1289 continue;
|
yading@10
|
1290 if (ost->st->codec->codec_type == AVMEDIA_TYPE_VIDEO && (os->oformat->flags & AVFMT_RAWPICTURE) && enc->codec->id == AV_CODEC_ID_RAWVIDEO)
|
yading@10
|
1291 continue;
|
yading@10
|
1292
|
yading@10
|
1293 for (;;) {
|
yading@10
|
1294 int (*encode)(AVCodecContext*, AVPacket*, const AVFrame*, int*) = NULL;
|
yading@10
|
1295 const char *desc;
|
yading@10
|
1296 int64_t *size;
|
yading@10
|
1297
|
yading@10
|
1298 switch (ost->st->codec->codec_type) {
|
yading@10
|
1299 case AVMEDIA_TYPE_AUDIO:
|
yading@10
|
1300 encode = avcodec_encode_audio2;
|
yading@10
|
1301 desc = "Audio";
|
yading@10
|
1302 size = &audio_size;
|
yading@10
|
1303 break;
|
yading@10
|
1304 case AVMEDIA_TYPE_VIDEO:
|
yading@10
|
1305 encode = avcodec_encode_video2;
|
yading@10
|
1306 desc = "Video";
|
yading@10
|
1307 size = &video_size;
|
yading@10
|
1308 break;
|
yading@10
|
1309 default:
|
yading@10
|
1310 stop_encoding = 1;
|
yading@10
|
1311 }
|
yading@10
|
1312
|
yading@10
|
1313 if (encode) {
|
yading@10
|
1314 AVPacket pkt;
|
yading@10
|
1315 int got_packet;
|
yading@10
|
1316 av_init_packet(&pkt);
|
yading@10
|
1317 pkt.data = NULL;
|
yading@10
|
1318 pkt.size = 0;
|
yading@10
|
1319
|
yading@10
|
1320 update_benchmark(NULL);
|
yading@10
|
1321 ret = encode(enc, &pkt, NULL, &got_packet);
|
yading@10
|
1322 update_benchmark("flush %s %d.%d", desc, ost->file_index, ost->index);
|
yading@10
|
1323 if (ret < 0) {
|
yading@10
|
1324 av_log(NULL, AV_LOG_FATAL, "%s encoding failed\n", desc);
|
yading@10
|
1325 exit(1);
|
yading@10
|
1326 }
|
yading@10
|
1327 *size += pkt.size;
|
yading@10
|
1328 if (ost->logfile && enc->stats_out) {
|
yading@10
|
1329 fprintf(ost->logfile, "%s", enc->stats_out);
|
yading@10
|
1330 }
|
yading@10
|
1331 if (!got_packet) {
|
yading@10
|
1332 stop_encoding = 1;
|
yading@10
|
1333 break;
|
yading@10
|
1334 }
|
yading@10
|
1335 if (pkt.pts != AV_NOPTS_VALUE)
|
yading@10
|
1336 pkt.pts = av_rescale_q(pkt.pts, enc->time_base, ost->st->time_base);
|
yading@10
|
1337 if (pkt.dts != AV_NOPTS_VALUE)
|
yading@10
|
1338 pkt.dts = av_rescale_q(pkt.dts, enc->time_base, ost->st->time_base);
|
yading@10
|
1339 if (pkt.duration > 0)
|
yading@10
|
1340 pkt.duration = av_rescale_q(pkt.duration, enc->time_base, ost->st->time_base);
|
yading@10
|
1341 write_frame(os, &pkt, ost);
|
yading@10
|
1342 if (ost->st->codec->codec_type == AVMEDIA_TYPE_VIDEO && vstats_filename) {
|
yading@10
|
1343 do_video_stats(ost, pkt.size);
|
yading@10
|
1344 }
|
yading@10
|
1345 }
|
yading@10
|
1346
|
yading@10
|
1347 if (stop_encoding)
|
yading@10
|
1348 break;
|
yading@10
|
1349 }
|
yading@10
|
1350 }
|
yading@10
|
1351 }
|
yading@10
|
1352
|
yading@10
|
1353 /*
|
yading@10
|
1354 * Check whether a packet from ist should be written into ost at this time
|
yading@10
|
1355 */
|
yading@10
|
1356 static int check_output_constraints(InputStream *ist, OutputStream *ost)
|
yading@10
|
1357 {
|
yading@10
|
1358 OutputFile *of = output_files[ost->file_index];
|
yading@10
|
1359 int ist_index = input_files[ist->file_index]->ist_index + ist->st->index;
|
yading@10
|
1360
|
yading@10
|
1361 if (ost->source_index != ist_index)
|
yading@10
|
1362 return 0;
|
yading@10
|
1363
|
yading@10
|
1364 if (of->start_time && ist->pts < of->start_time)
|
yading@10
|
1365 return 0;
|
yading@10
|
1366
|
yading@10
|
1367 return 1;
|
yading@10
|
1368 }
|
yading@10
|
1369
|
yading@10
|
1370 static void do_streamcopy(InputStream *ist, OutputStream *ost, const AVPacket *pkt)
|
yading@10
|
1371 {
|
yading@10
|
1372 OutputFile *of = output_files[ost->file_index];
|
yading@10
|
1373 int64_t ost_tb_start_time = av_rescale_q(of->start_time, AV_TIME_BASE_Q, ost->st->time_base);
|
yading@10
|
1374 AVPicture pict;
|
yading@10
|
1375 AVPacket opkt;
|
yading@10
|
1376
|
yading@10
|
1377 av_init_packet(&opkt);
|
yading@10
|
1378
|
yading@10
|
1379 if ((!ost->frame_number && !(pkt->flags & AV_PKT_FLAG_KEY)) &&
|
yading@10
|
1380 !ost->copy_initial_nonkeyframes)
|
yading@10
|
1381 return;
|
yading@10
|
1382
|
yading@10
|
1383 if (!ost->frame_number && ist->pts < of->start_time &&
|
yading@10
|
1384 !ost->copy_prior_start)
|
yading@10
|
1385 return;
|
yading@10
|
1386
|
yading@10
|
1387 if (of->recording_time != INT64_MAX &&
|
yading@10
|
1388 ist->pts >= of->recording_time + of->start_time) {
|
yading@10
|
1389 close_output_stream(ost);
|
yading@10
|
1390 return;
|
yading@10
|
1391 }
|
yading@10
|
1392
|
yading@10
|
1393 /* force the input stream PTS */
|
yading@10
|
1394 if (ost->st->codec->codec_type == AVMEDIA_TYPE_AUDIO)
|
yading@10
|
1395 audio_size += pkt->size;
|
yading@10
|
1396 else if (ost->st->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
|
yading@10
|
1397 video_size += pkt->size;
|
yading@10
|
1398 ost->sync_opts++;
|
yading@10
|
1399 } else if (ost->st->codec->codec_type == AVMEDIA_TYPE_SUBTITLE) {
|
yading@10
|
1400 subtitle_size += pkt->size;
|
yading@10
|
1401 }
|
yading@10
|
1402
|
yading@10
|
1403 if (pkt->pts != AV_NOPTS_VALUE)
|
yading@10
|
1404 opkt.pts = av_rescale_q(pkt->pts, ist->st->time_base, ost->st->time_base) - ost_tb_start_time;
|
yading@10
|
1405 else
|
yading@10
|
1406 opkt.pts = AV_NOPTS_VALUE;
|
yading@10
|
1407
|
yading@10
|
1408 if (pkt->dts == AV_NOPTS_VALUE)
|
yading@10
|
1409 opkt.dts = av_rescale_q(ist->dts, AV_TIME_BASE_Q, ost->st->time_base);
|
yading@10
|
1410 else
|
yading@10
|
1411 opkt.dts = av_rescale_q(pkt->dts, ist->st->time_base, ost->st->time_base);
|
yading@10
|
1412 opkt.dts -= ost_tb_start_time;
|
yading@10
|
1413
|
yading@10
|
1414 if (ost->st->codec->codec_type == AVMEDIA_TYPE_AUDIO && pkt->dts != AV_NOPTS_VALUE) {
|
yading@10
|
1415 int duration = av_get_audio_frame_duration(ist->st->codec, pkt->size);
|
yading@10
|
1416 if(!duration)
|
yading@10
|
1417 duration = ist->st->codec->frame_size;
|
yading@10
|
1418 opkt.dts = opkt.pts = av_rescale_delta(ist->st->time_base, pkt->dts,
|
yading@10
|
1419 (AVRational){1, ist->st->codec->sample_rate}, duration, &ist->filter_in_rescale_delta_last,
|
yading@10
|
1420 ost->st->time_base) - ost_tb_start_time;
|
yading@10
|
1421 }
|
yading@10
|
1422
|
yading@10
|
1423 opkt.duration = av_rescale_q(pkt->duration, ist->st->time_base, ost->st->time_base);
|
yading@10
|
1424 opkt.flags = pkt->flags;
|
yading@10
|
1425
|
yading@10
|
1426 // FIXME remove the following 2 lines they shall be replaced by the bitstream filters
|
yading@10
|
1427 if ( ost->st->codec->codec_id != AV_CODEC_ID_H264
|
yading@10
|
1428 && ost->st->codec->codec_id != AV_CODEC_ID_MPEG1VIDEO
|
yading@10
|
1429 && ost->st->codec->codec_id != AV_CODEC_ID_MPEG2VIDEO
|
yading@10
|
1430 && ost->st->codec->codec_id != AV_CODEC_ID_VC1
|
yading@10
|
1431 ) {
|
yading@10
|
1432 if (av_parser_change(ist->st->parser, ost->st->codec, &opkt.data, &opkt.size, pkt->data, pkt->size, pkt->flags & AV_PKT_FLAG_KEY)) {
|
yading@10
|
1433 opkt.buf = av_buffer_create(opkt.data, opkt.size, av_buffer_default_free, NULL, 0);
|
yading@10
|
1434 if (!opkt.buf)
|
yading@10
|
1435 exit(1);
|
yading@10
|
1436 }
|
yading@10
|
1437 } else {
|
yading@10
|
1438 opkt.data = pkt->data;
|
yading@10
|
1439 opkt.size = pkt->size;
|
yading@10
|
1440 }
|
yading@10
|
1441
|
yading@10
|
1442 if (ost->st->codec->codec_type == AVMEDIA_TYPE_VIDEO && (of->ctx->oformat->flags & AVFMT_RAWPICTURE)) {
|
yading@10
|
1443 /* store AVPicture in AVPacket, as expected by the output format */
|
yading@10
|
1444 avpicture_fill(&pict, opkt.data, ost->st->codec->pix_fmt, ost->st->codec->width, ost->st->codec->height);
|
yading@10
|
1445 opkt.data = (uint8_t *)&pict;
|
yading@10
|
1446 opkt.size = sizeof(AVPicture);
|
yading@10
|
1447 opkt.flags |= AV_PKT_FLAG_KEY;
|
yading@10
|
1448 }
|
yading@10
|
1449
|
yading@10
|
1450 write_frame(of->ctx, &opkt, ost);
|
yading@10
|
1451 ost->st->codec->frame_number++;
|
yading@10
|
1452 }
|
yading@10
|
1453
|
yading@10
|
1454 static void rate_emu_sleep(InputStream *ist)
|
yading@10
|
1455 {
|
yading@10
|
1456 if (input_files[ist->file_index]->rate_emu) {
|
yading@10
|
1457 int64_t pts = av_rescale(ist->dts, 1000000, AV_TIME_BASE);
|
yading@10
|
1458 int64_t now = av_gettime() - ist->start;
|
yading@10
|
1459 if (pts > now)
|
yading@10
|
1460 av_usleep(pts - now);
|
yading@10
|
1461 }
|
yading@10
|
1462 }
|
yading@10
|
1463
|
yading@10
|
1464 int guess_input_channel_layout(InputStream *ist)
|
yading@10
|
1465 {
|
yading@10
|
1466 AVCodecContext *dec = ist->st->codec;
|
yading@10
|
1467
|
yading@10
|
1468 if (!dec->channel_layout) {
|
yading@10
|
1469 char layout_name[256];
|
yading@10
|
1470
|
yading@10
|
1471 if (dec->channels > ist->guess_layout_max)
|
yading@10
|
1472 return 0;
|
yading@10
|
1473 dec->channel_layout = av_get_default_channel_layout(dec->channels);
|
yading@10
|
1474 if (!dec->channel_layout)
|
yading@10
|
1475 return 0;
|
yading@10
|
1476 av_get_channel_layout_string(layout_name, sizeof(layout_name),
|
yading@10
|
1477 dec->channels, dec->channel_layout);
|
yading@10
|
1478 av_log(NULL, AV_LOG_WARNING, "Guessed Channel Layout for Input Stream "
|
yading@10
|
1479 "#%d.%d : %s\n", ist->file_index, ist->st->index, layout_name);
|
yading@10
|
1480 }
|
yading@10
|
1481 return 1;
|
yading@10
|
1482 }
|
yading@10
|
1483
|
yading@10
|
1484 static int decode_audio(InputStream *ist, AVPacket *pkt, int *got_output)
|
yading@10
|
1485 {
|
yading@10
|
1486 AVFrame *decoded_frame, *f;
|
yading@10
|
1487 AVCodecContext *avctx = ist->st->codec;
|
yading@10
|
1488 int i, ret, err = 0, resample_changed;
|
yading@10
|
1489 AVRational decoded_frame_tb;
|
yading@10
|
1490
|
yading@10
|
1491 if (!ist->decoded_frame && !(ist->decoded_frame = avcodec_alloc_frame()))
|
yading@10
|
1492 return AVERROR(ENOMEM);
|
yading@10
|
1493 if (!ist->filter_frame && !(ist->filter_frame = av_frame_alloc()))
|
yading@10
|
1494 return AVERROR(ENOMEM);
|
yading@10
|
1495 decoded_frame = ist->decoded_frame;
|
yading@10
|
1496
|
yading@10
|
1497 update_benchmark(NULL);
|
yading@10
|
1498 ret = avcodec_decode_audio4(avctx, decoded_frame, got_output, pkt);
|
yading@10
|
1499 update_benchmark("decode_audio %d.%d", ist->file_index, ist->st->index);
|
yading@10
|
1500
|
yading@10
|
1501 if (ret >= 0 && avctx->sample_rate <= 0) {
|
yading@10
|
1502 av_log(avctx, AV_LOG_ERROR, "Sample rate %d invalid\n", avctx->sample_rate);
|
yading@10
|
1503 ret = AVERROR_INVALIDDATA;
|
yading@10
|
1504 }
|
yading@10
|
1505
|
yading@10
|
1506 if (*got_output || ret<0 || pkt->size)
|
yading@10
|
1507 decode_error_stat[ret<0] ++;
|
yading@10
|
1508
|
yading@10
|
1509 if (!*got_output || ret < 0) {
|
yading@10
|
1510 if (!pkt->size) {
|
yading@10
|
1511 for (i = 0; i < ist->nb_filters; i++)
|
yading@10
|
1512 #if 1
|
yading@10
|
1513 av_buffersrc_add_ref(ist->filters[i]->filter, NULL, 0);
|
yading@10
|
1514 #else
|
yading@10
|
1515 av_buffersrc_add_frame(ist->filters[i]->filter, NULL);
|
yading@10
|
1516 #endif
|
yading@10
|
1517 }
|
yading@10
|
1518 return ret;
|
yading@10
|
1519 }
|
yading@10
|
1520
|
yading@10
|
1521 #if 1
|
yading@10
|
1522 /* increment next_dts to use for the case where the input stream does not
|
yading@10
|
1523 have timestamps or there are multiple frames in the packet */
|
yading@10
|
1524 ist->next_pts += ((int64_t)AV_TIME_BASE * decoded_frame->nb_samples) /
|
yading@10
|
1525 avctx->sample_rate;
|
yading@10
|
1526 ist->next_dts += ((int64_t)AV_TIME_BASE * decoded_frame->nb_samples) /
|
yading@10
|
1527 avctx->sample_rate;
|
yading@10
|
1528 #endif
|
yading@10
|
1529
|
yading@10
|
1530 rate_emu_sleep(ist);
|
yading@10
|
1531
|
yading@10
|
1532 resample_changed = ist->resample_sample_fmt != decoded_frame->format ||
|
yading@10
|
1533 ist->resample_channels != avctx->channels ||
|
yading@10
|
1534 ist->resample_channel_layout != decoded_frame->channel_layout ||
|
yading@10
|
1535 ist->resample_sample_rate != decoded_frame->sample_rate;
|
yading@10
|
1536 if (resample_changed) {
|
yading@10
|
1537 char layout1[64], layout2[64];
|
yading@10
|
1538
|
yading@10
|
1539 if (!guess_input_channel_layout(ist)) {
|
yading@10
|
1540 av_log(NULL, AV_LOG_FATAL, "Unable to find default channel "
|
yading@10
|
1541 "layout for Input Stream #%d.%d\n", ist->file_index,
|
yading@10
|
1542 ist->st->index);
|
yading@10
|
1543 exit(1);
|
yading@10
|
1544 }
|
yading@10
|
1545 decoded_frame->channel_layout = avctx->channel_layout;
|
yading@10
|
1546
|
yading@10
|
1547 av_get_channel_layout_string(layout1, sizeof(layout1), ist->resample_channels,
|
yading@10
|
1548 ist->resample_channel_layout);
|
yading@10
|
1549 av_get_channel_layout_string(layout2, sizeof(layout2), avctx->channels,
|
yading@10
|
1550 decoded_frame->channel_layout);
|
yading@10
|
1551
|
yading@10
|
1552 av_log(NULL, AV_LOG_INFO,
|
yading@10
|
1553 "Input stream #%d:%d frame changed from rate:%d fmt:%s ch:%d chl:%s to rate:%d fmt:%s ch:%d chl:%s\n",
|
yading@10
|
1554 ist->file_index, ist->st->index,
|
yading@10
|
1555 ist->resample_sample_rate, av_get_sample_fmt_name(ist->resample_sample_fmt),
|
yading@10
|
1556 ist->resample_channels, layout1,
|
yading@10
|
1557 decoded_frame->sample_rate, av_get_sample_fmt_name(decoded_frame->format),
|
yading@10
|
1558 avctx->channels, layout2);
|
yading@10
|
1559
|
yading@10
|
1560 ist->resample_sample_fmt = decoded_frame->format;
|
yading@10
|
1561 ist->resample_sample_rate = decoded_frame->sample_rate;
|
yading@10
|
1562 ist->resample_channel_layout = decoded_frame->channel_layout;
|
yading@10
|
1563 ist->resample_channels = avctx->channels;
|
yading@10
|
1564
|
yading@10
|
1565 for (i = 0; i < nb_filtergraphs; i++)
|
yading@10
|
1566 if (ist_in_filtergraph(filtergraphs[i], ist)) {
|
yading@10
|
1567 FilterGraph *fg = filtergraphs[i];
|
yading@10
|
1568 int j;
|
yading@10
|
1569 if (configure_filtergraph(fg) < 0) {
|
yading@10
|
1570 av_log(NULL, AV_LOG_FATAL, "Error reinitializing filters!\n");
|
yading@10
|
1571 exit(1);
|
yading@10
|
1572 }
|
yading@10
|
1573 for (j = 0; j < fg->nb_outputs; j++) {
|
yading@10
|
1574 OutputStream *ost = fg->outputs[j]->ost;
|
yading@10
|
1575 if (ost->enc->type == AVMEDIA_TYPE_AUDIO &&
|
yading@10
|
1576 !(ost->enc->capabilities & CODEC_CAP_VARIABLE_FRAME_SIZE))
|
yading@10
|
1577 av_buffersink_set_frame_size(ost->filter->filter,
|
yading@10
|
1578 ost->st->codec->frame_size);
|
yading@10
|
1579 }
|
yading@10
|
1580 }
|
yading@10
|
1581 }
|
yading@10
|
1582
|
yading@10
|
1583 /* if the decoder provides a pts, use it instead of the last packet pts.
|
yading@10
|
1584 the decoder could be delaying output by a packet or more. */
|
yading@10
|
1585 if (decoded_frame->pts != AV_NOPTS_VALUE) {
|
yading@10
|
1586 ist->dts = ist->next_dts = ist->pts = ist->next_pts = av_rescale_q(decoded_frame->pts, avctx->time_base, AV_TIME_BASE_Q);
|
yading@10
|
1587 decoded_frame_tb = avctx->time_base;
|
yading@10
|
1588 } else if (decoded_frame->pkt_pts != AV_NOPTS_VALUE) {
|
yading@10
|
1589 decoded_frame->pts = decoded_frame->pkt_pts;
|
yading@10
|
1590 pkt->pts = AV_NOPTS_VALUE;
|
yading@10
|
1591 decoded_frame_tb = ist->st->time_base;
|
yading@10
|
1592 } else if (pkt->pts != AV_NOPTS_VALUE) {
|
yading@10
|
1593 decoded_frame->pts = pkt->pts;
|
yading@10
|
1594 pkt->pts = AV_NOPTS_VALUE;
|
yading@10
|
1595 decoded_frame_tb = ist->st->time_base;
|
yading@10
|
1596 }else {
|
yading@10
|
1597 decoded_frame->pts = ist->dts;
|
yading@10
|
1598 decoded_frame_tb = AV_TIME_BASE_Q;
|
yading@10
|
1599 }
|
yading@10
|
1600 if (decoded_frame->pts != AV_NOPTS_VALUE)
|
yading@10
|
1601 decoded_frame->pts = av_rescale_delta(decoded_frame_tb, decoded_frame->pts,
|
yading@10
|
1602 (AVRational){1, ist->st->codec->sample_rate}, decoded_frame->nb_samples, &ist->filter_in_rescale_delta_last,
|
yading@10
|
1603 (AVRational){1, ist->st->codec->sample_rate});
|
yading@10
|
1604 for (i = 0; i < ist->nb_filters; i++) {
|
yading@10
|
1605 if (i < ist->nb_filters - 1) {
|
yading@10
|
1606 f = ist->filter_frame;
|
yading@10
|
1607 err = av_frame_ref(f, decoded_frame);
|
yading@10
|
1608 if (err < 0)
|
yading@10
|
1609 break;
|
yading@10
|
1610 } else
|
yading@10
|
1611 f = decoded_frame;
|
yading@10
|
1612 err = av_buffersrc_add_frame_flags(ist->filters[i]->filter, f,
|
yading@10
|
1613 AV_BUFFERSRC_FLAG_PUSH);
|
yading@10
|
1614 if (err < 0)
|
yading@10
|
1615 break;
|
yading@10
|
1616 }
|
yading@10
|
1617 decoded_frame->pts = AV_NOPTS_VALUE;
|
yading@10
|
1618
|
yading@10
|
1619 av_frame_unref(ist->filter_frame);
|
yading@10
|
1620 av_frame_unref(decoded_frame);
|
yading@10
|
1621 return err < 0 ? err : ret;
|
yading@10
|
1622 }
|
yading@10
|
1623
|
yading@10
|
1624 static int decode_video(InputStream *ist, AVPacket *pkt, int *got_output)
|
yading@10
|
1625 {
|
yading@10
|
1626 AVFrame *decoded_frame, *f;
|
yading@10
|
1627 void *buffer_to_free = NULL;
|
yading@10
|
1628 int i, ret = 0, err = 0, resample_changed;
|
yading@10
|
1629 int64_t best_effort_timestamp;
|
yading@10
|
1630 AVRational *frame_sample_aspect;
|
yading@10
|
1631
|
yading@10
|
1632 if (!ist->decoded_frame && !(ist->decoded_frame = av_frame_alloc()))
|
yading@10
|
1633 return AVERROR(ENOMEM);
|
yading@10
|
1634 if (!ist->filter_frame && !(ist->filter_frame = av_frame_alloc()))
|
yading@10
|
1635 return AVERROR(ENOMEM);
|
yading@10
|
1636 decoded_frame = ist->decoded_frame;
|
yading@10
|
1637 pkt->dts = av_rescale_q(ist->dts, AV_TIME_BASE_Q, ist->st->time_base);
|
yading@10
|
1638
|
yading@10
|
1639 update_benchmark(NULL);
|
yading@10
|
1640 ret = avcodec_decode_video2(ist->st->codec,
|
yading@10
|
1641 decoded_frame, got_output, pkt);
|
yading@10
|
1642 update_benchmark("decode_video %d.%d", ist->file_index, ist->st->index);
|
yading@10
|
1643
|
yading@10
|
1644 if (*got_output || ret<0 || pkt->size)
|
yading@10
|
1645 decode_error_stat[ret<0] ++;
|
yading@10
|
1646
|
yading@10
|
1647 if (!*got_output || ret < 0) {
|
yading@10
|
1648 if (!pkt->size) {
|
yading@10
|
1649 for (i = 0; i < ist->nb_filters; i++)
|
yading@10
|
1650 #if 1
|
yading@10
|
1651 av_buffersrc_add_ref(ist->filters[i]->filter, NULL, 0);
|
yading@10
|
1652 #else
|
yading@10
|
1653 av_buffersrc_add_frame(ist->filters[i]->filter, NULL);
|
yading@10
|
1654 #endif
|
yading@10
|
1655 }
|
yading@10
|
1656 return ret;
|
yading@10
|
1657 }
|
yading@10
|
1658
|
yading@10
|
1659 if(ist->top_field_first>=0)
|
yading@10
|
1660 decoded_frame->top_field_first = ist->top_field_first;
|
yading@10
|
1661
|
yading@10
|
1662 best_effort_timestamp= av_frame_get_best_effort_timestamp(decoded_frame);
|
yading@10
|
1663 if(best_effort_timestamp != AV_NOPTS_VALUE)
|
yading@10
|
1664 ist->next_pts = ist->pts = av_rescale_q(decoded_frame->pts = best_effort_timestamp, ist->st->time_base, AV_TIME_BASE_Q);
|
yading@10
|
1665
|
yading@10
|
1666 if (debug_ts) {
|
yading@10
|
1667 av_log(NULL, AV_LOG_INFO, "decoder -> ist_index:%d type:video "
|
yading@10
|
1668 "frame_pts:%s frame_pts_time:%s best_effort_ts:%"PRId64" best_effort_ts_time:%s keyframe:%d frame_type:%d \n",
|
yading@10
|
1669 ist->st->index, av_ts2str(decoded_frame->pts),
|
yading@10
|
1670 av_ts2timestr(decoded_frame->pts, &ist->st->time_base),
|
yading@10
|
1671 best_effort_timestamp,
|
yading@10
|
1672 av_ts2timestr(best_effort_timestamp, &ist->st->time_base),
|
yading@10
|
1673 decoded_frame->key_frame, decoded_frame->pict_type);
|
yading@10
|
1674 }
|
yading@10
|
1675
|
yading@10
|
1676 pkt->size = 0;
|
yading@10
|
1677
|
yading@10
|
1678 rate_emu_sleep(ist);
|
yading@10
|
1679
|
yading@10
|
1680 if (ist->st->sample_aspect_ratio.num)
|
yading@10
|
1681 decoded_frame->sample_aspect_ratio = ist->st->sample_aspect_ratio;
|
yading@10
|
1682
|
yading@10
|
1683 resample_changed = ist->resample_width != decoded_frame->width ||
|
yading@10
|
1684 ist->resample_height != decoded_frame->height ||
|
yading@10
|
1685 ist->resample_pix_fmt != decoded_frame->format;
|
yading@10
|
1686 if (resample_changed) {
|
yading@10
|
1687 av_log(NULL, AV_LOG_INFO,
|
yading@10
|
1688 "Input stream #%d:%d frame changed from size:%dx%d fmt:%s to size:%dx%d fmt:%s\n",
|
yading@10
|
1689 ist->file_index, ist->st->index,
|
yading@10
|
1690 ist->resample_width, ist->resample_height, av_get_pix_fmt_name(ist->resample_pix_fmt),
|
yading@10
|
1691 decoded_frame->width, decoded_frame->height, av_get_pix_fmt_name(decoded_frame->format));
|
yading@10
|
1692
|
yading@10
|
1693 ist->resample_width = decoded_frame->width;
|
yading@10
|
1694 ist->resample_height = decoded_frame->height;
|
yading@10
|
1695 ist->resample_pix_fmt = decoded_frame->format;
|
yading@10
|
1696
|
yading@10
|
1697 for (i = 0; i < nb_filtergraphs; i++) {
|
yading@10
|
1698 if (ist_in_filtergraph(filtergraphs[i], ist) && ist->reinit_filters &&
|
yading@10
|
1699 configure_filtergraph(filtergraphs[i]) < 0) {
|
yading@10
|
1700 av_log(NULL, AV_LOG_FATAL, "Error reinitializing filters!\n");
|
yading@10
|
1701 exit(1);
|
yading@10
|
1702 }
|
yading@10
|
1703 }
|
yading@10
|
1704 }
|
yading@10
|
1705
|
yading@10
|
1706 frame_sample_aspect= av_opt_ptr(avcodec_get_frame_class(), decoded_frame, "sample_aspect_ratio");
|
yading@10
|
1707 for (i = 0; i < ist->nb_filters; i++) {
|
yading@10
|
1708 if (!frame_sample_aspect->num)
|
yading@10
|
1709 *frame_sample_aspect = ist->st->sample_aspect_ratio;
|
yading@10
|
1710
|
yading@10
|
1711 if (i < ist->nb_filters - 1) {
|
yading@10
|
1712 f = ist->filter_frame;
|
yading@10
|
1713 err = av_frame_ref(f, decoded_frame);
|
yading@10
|
1714 if (err < 0)
|
yading@10
|
1715 break;
|
yading@10
|
1716 } else
|
yading@10
|
1717 f = decoded_frame;
|
yading@10
|
1718 ret = av_buffersrc_add_frame_flags(ist->filters[i]->filter, f, AV_BUFFERSRC_FLAG_PUSH);
|
yading@10
|
1719 if (ret < 0) {
|
yading@10
|
1720 av_log(NULL, AV_LOG_FATAL,
|
yading@10
|
1721 "Failed to inject frame into filter network: %s\n", av_err2str(ret));
|
yading@10
|
1722 exit(1);
|
yading@10
|
1723 }
|
yading@10
|
1724 }
|
yading@10
|
1725
|
yading@10
|
1726 av_frame_unref(ist->filter_frame);
|
yading@10
|
1727 av_frame_unref(decoded_frame);
|
yading@10
|
1728 av_free(buffer_to_free);
|
yading@10
|
1729 return err < 0 ? err : ret;
|
yading@10
|
1730 }
|
yading@10
|
1731
|
yading@10
|
1732 static int transcode_subtitles(InputStream *ist, AVPacket *pkt, int *got_output)
|
yading@10
|
1733 {
|
yading@10
|
1734 AVSubtitle subtitle;
|
yading@10
|
1735 int i, ret = avcodec_decode_subtitle2(ist->st->codec,
|
yading@10
|
1736 &subtitle, got_output, pkt);
|
yading@10
|
1737
|
yading@10
|
1738 if (*got_output || ret<0 || pkt->size)
|
yading@10
|
1739 decode_error_stat[ret<0] ++;
|
yading@10
|
1740
|
yading@10
|
1741 if (ret < 0 || !*got_output) {
|
yading@10
|
1742 if (!pkt->size)
|
yading@10
|
1743 sub2video_flush(ist);
|
yading@10
|
1744 return ret;
|
yading@10
|
1745 }
|
yading@10
|
1746
|
yading@10
|
1747 if (ist->fix_sub_duration) {
|
yading@10
|
1748 if (ist->prev_sub.got_output) {
|
yading@10
|
1749 int end = av_rescale(subtitle.pts - ist->prev_sub.subtitle.pts,
|
yading@10
|
1750 1000, AV_TIME_BASE);
|
yading@10
|
1751 if (end < ist->prev_sub.subtitle.end_display_time) {
|
yading@10
|
1752 av_log(ist->st->codec, AV_LOG_DEBUG,
|
yading@10
|
1753 "Subtitle duration reduced from %d to %d\n",
|
yading@10
|
1754 ist->prev_sub.subtitle.end_display_time, end);
|
yading@10
|
1755 ist->prev_sub.subtitle.end_display_time = end;
|
yading@10
|
1756 }
|
yading@10
|
1757 }
|
yading@10
|
1758 FFSWAP(int, *got_output, ist->prev_sub.got_output);
|
yading@10
|
1759 FFSWAP(int, ret, ist->prev_sub.ret);
|
yading@10
|
1760 FFSWAP(AVSubtitle, subtitle, ist->prev_sub.subtitle);
|
yading@10
|
1761 }
|
yading@10
|
1762
|
yading@10
|
1763 sub2video_update(ist, &subtitle);
|
yading@10
|
1764
|
yading@10
|
1765 if (!*got_output || !subtitle.num_rects)
|
yading@10
|
1766 return ret;
|
yading@10
|
1767
|
yading@10
|
1768 rate_emu_sleep(ist);
|
yading@10
|
1769
|
yading@10
|
1770 for (i = 0; i < nb_output_streams; i++) {
|
yading@10
|
1771 OutputStream *ost = output_streams[i];
|
yading@10
|
1772
|
yading@10
|
1773 if (!check_output_constraints(ist, ost) || !ost->encoding_needed)
|
yading@10
|
1774 continue;
|
yading@10
|
1775
|
yading@10
|
1776 do_subtitle_out(output_files[ost->file_index]->ctx, ost, ist, &subtitle);
|
yading@10
|
1777 }
|
yading@10
|
1778
|
yading@10
|
1779 avsubtitle_free(&subtitle);
|
yading@10
|
1780 return ret;
|
yading@10
|
1781 }
|
yading@10
|
1782
|
yading@10
|
1783 /* pkt = NULL means EOF (needed to flush decoder buffers) */
|
yading@10
|
1784 static int output_packet(InputStream *ist, const AVPacket *pkt)
|
yading@10
|
1785 {
|
yading@10
|
1786 int ret = 0, i;
|
yading@10
|
1787 int got_output = 0;
|
yading@10
|
1788
|
yading@10
|
1789 AVPacket avpkt;
|
yading@10
|
1790 if (!ist->saw_first_ts) {
|
yading@10
|
1791 ist->dts = ist->st->avg_frame_rate.num ? - ist->st->codec->has_b_frames * AV_TIME_BASE / av_q2d(ist->st->avg_frame_rate) : 0;
|
yading@10
|
1792 ist->pts = 0;
|
yading@10
|
1793 if (pkt != NULL && pkt->pts != AV_NOPTS_VALUE && !ist->decoding_needed) {
|
yading@10
|
1794 ist->dts += av_rescale_q(pkt->pts, ist->st->time_base, AV_TIME_BASE_Q);
|
yading@10
|
1795 ist->pts = ist->dts; //unused but better to set it to a value thats not totally wrong
|
yading@10
|
1796 }
|
yading@10
|
1797 ist->saw_first_ts = 1;
|
yading@10
|
1798 }
|
yading@10
|
1799
|
yading@10
|
1800 if (ist->next_dts == AV_NOPTS_VALUE)
|
yading@10
|
1801 ist->next_dts = ist->dts;
|
yading@10
|
1802 if (ist->next_pts == AV_NOPTS_VALUE)
|
yading@10
|
1803 ist->next_pts = ist->pts;
|
yading@10
|
1804
|
yading@10
|
1805 if (pkt == NULL) {
|
yading@10
|
1806 /* EOF handling */
|
yading@10
|
1807 av_init_packet(&avpkt);
|
yading@10
|
1808 avpkt.data = NULL;
|
yading@10
|
1809 avpkt.size = 0;
|
yading@10
|
1810 goto handle_eof;
|
yading@10
|
1811 } else {
|
yading@10
|
1812 avpkt = *pkt;
|
yading@10
|
1813 }
|
yading@10
|
1814
|
yading@10
|
1815 if (pkt->dts != AV_NOPTS_VALUE) {
|
yading@10
|
1816 ist->next_dts = ist->dts = av_rescale_q(pkt->dts, ist->st->time_base, AV_TIME_BASE_Q);
|
yading@10
|
1817 if (ist->st->codec->codec_type != AVMEDIA_TYPE_VIDEO || !ist->decoding_needed)
|
yading@10
|
1818 ist->next_pts = ist->pts = ist->dts;
|
yading@10
|
1819 }
|
yading@10
|
1820
|
yading@10
|
1821 // while we have more to decode or while the decoder did output something on EOF
|
yading@10
|
1822 while (ist->decoding_needed && (avpkt.size > 0 || (!pkt && got_output))) {
|
yading@10
|
1823 int duration;
|
yading@10
|
1824 handle_eof:
|
yading@10
|
1825
|
yading@10
|
1826 ist->pts = ist->next_pts;
|
yading@10
|
1827 ist->dts = ist->next_dts;
|
yading@10
|
1828
|
yading@10
|
1829 if (avpkt.size && avpkt.size != pkt->size) {
|
yading@10
|
1830 av_log(NULL, ist->showed_multi_packet_warning ? AV_LOG_VERBOSE : AV_LOG_WARNING,
|
yading@10
|
1831 "Multiple frames in a packet from stream %d\n", pkt->stream_index);
|
yading@10
|
1832 ist->showed_multi_packet_warning = 1;
|
yading@10
|
1833 }
|
yading@10
|
1834
|
yading@10
|
1835 switch (ist->st->codec->codec_type) {
|
yading@10
|
1836 case AVMEDIA_TYPE_AUDIO:
|
yading@10
|
1837 ret = decode_audio (ist, &avpkt, &got_output);
|
yading@10
|
1838 break;
|
yading@10
|
1839 case AVMEDIA_TYPE_VIDEO:
|
yading@10
|
1840 ret = decode_video (ist, &avpkt, &got_output);
|
yading@10
|
1841 if (avpkt.duration) {
|
yading@10
|
1842 duration = av_rescale_q(avpkt.duration, ist->st->time_base, AV_TIME_BASE_Q);
|
yading@10
|
1843 } else if(ist->st->codec->time_base.num != 0 && ist->st->codec->time_base.den != 0) {
|
yading@10
|
1844 int ticks= ist->st->parser ? ist->st->parser->repeat_pict+1 : ist->st->codec->ticks_per_frame;
|
yading@10
|
1845 duration = ((int64_t)AV_TIME_BASE *
|
yading@10
|
1846 ist->st->codec->time_base.num * ticks) /
|
yading@10
|
1847 ist->st->codec->time_base.den;
|
yading@10
|
1848 } else
|
yading@10
|
1849 duration = 0;
|
yading@10
|
1850
|
yading@10
|
1851 if(ist->dts != AV_NOPTS_VALUE && duration) {
|
yading@10
|
1852 ist->next_dts += duration;
|
yading@10
|
1853 }else
|
yading@10
|
1854 ist->next_dts = AV_NOPTS_VALUE;
|
yading@10
|
1855
|
yading@10
|
1856 if (got_output)
|
yading@10
|
1857 ist->next_pts += duration; //FIXME the duration is not correct in some cases
|
yading@10
|
1858 break;
|
yading@10
|
1859 case AVMEDIA_TYPE_SUBTITLE:
|
yading@10
|
1860 ret = transcode_subtitles(ist, &avpkt, &got_output);
|
yading@10
|
1861 break;
|
yading@10
|
1862 default:
|
yading@10
|
1863 return -1;
|
yading@10
|
1864 }
|
yading@10
|
1865
|
yading@10
|
1866 if (ret < 0)
|
yading@10
|
1867 return ret;
|
yading@10
|
1868
|
yading@10
|
1869 avpkt.dts=
|
yading@10
|
1870 avpkt.pts= AV_NOPTS_VALUE;
|
yading@10
|
1871
|
yading@10
|
1872 // touch data and size only if not EOF
|
yading@10
|
1873 if (pkt) {
|
yading@10
|
1874 if(ist->st->codec->codec_type != AVMEDIA_TYPE_AUDIO)
|
yading@10
|
1875 ret = avpkt.size;
|
yading@10
|
1876 avpkt.data += ret;
|
yading@10
|
1877 avpkt.size -= ret;
|
yading@10
|
1878 }
|
yading@10
|
1879 if (!got_output) {
|
yading@10
|
1880 continue;
|
yading@10
|
1881 }
|
yading@10
|
1882 }
|
yading@10
|
1883
|
yading@10
|
1884 /* handle stream copy */
|
yading@10
|
1885 if (!ist->decoding_needed) {
|
yading@10
|
1886 rate_emu_sleep(ist);
|
yading@10
|
1887 ist->dts = ist->next_dts;
|
yading@10
|
1888 switch (ist->st->codec->codec_type) {
|
yading@10
|
1889 case AVMEDIA_TYPE_AUDIO:
|
yading@10
|
1890 ist->next_dts += ((int64_t)AV_TIME_BASE * ist->st->codec->frame_size) /
|
yading@10
|
1891 ist->st->codec->sample_rate;
|
yading@10
|
1892 break;
|
yading@10
|
1893 case AVMEDIA_TYPE_VIDEO:
|
yading@10
|
1894 if (ist->framerate.num) {
|
yading@10
|
1895 // TODO: Remove work-around for c99-to-c89 issue 7
|
yading@10
|
1896 AVRational time_base_q = AV_TIME_BASE_Q;
|
yading@10
|
1897 int64_t next_dts = av_rescale_q(ist->next_dts, time_base_q, av_inv_q(ist->framerate));
|
yading@10
|
1898 ist->next_dts = av_rescale_q(next_dts + 1, av_inv_q(ist->framerate), time_base_q);
|
yading@10
|
1899 } else if (pkt->duration) {
|
yading@10
|
1900 ist->next_dts += av_rescale_q(pkt->duration, ist->st->time_base, AV_TIME_BASE_Q);
|
yading@10
|
1901 } else if(ist->st->codec->time_base.num != 0) {
|
yading@10
|
1902 int ticks= ist->st->parser ? ist->st->parser->repeat_pict + 1 : ist->st->codec->ticks_per_frame;
|
yading@10
|
1903 ist->next_dts += ((int64_t)AV_TIME_BASE *
|
yading@10
|
1904 ist->st->codec->time_base.num * ticks) /
|
yading@10
|
1905 ist->st->codec->time_base.den;
|
yading@10
|
1906 }
|
yading@10
|
1907 break;
|
yading@10
|
1908 }
|
yading@10
|
1909 ist->pts = ist->dts;
|
yading@10
|
1910 ist->next_pts = ist->next_dts;
|
yading@10
|
1911 }
|
yading@10
|
1912 for (i = 0; pkt && i < nb_output_streams; i++) {
|
yading@10
|
1913 OutputStream *ost = output_streams[i];
|
yading@10
|
1914
|
yading@10
|
1915 if (!check_output_constraints(ist, ost) || ost->encoding_needed)
|
yading@10
|
1916 continue;
|
yading@10
|
1917
|
yading@10
|
1918 do_streamcopy(ist, ost, pkt);
|
yading@10
|
1919 }
|
yading@10
|
1920
|
yading@10
|
1921 return 0;
|
yading@10
|
1922 }
|
yading@10
|
1923
|
yading@10
|
1924 static void print_sdp(void)
|
yading@10
|
1925 {
|
yading@10
|
1926 char sdp[16384];
|
yading@10
|
1927 int i;
|
yading@10
|
1928 AVFormatContext **avc = av_malloc(sizeof(*avc) * nb_output_files);
|
yading@10
|
1929
|
yading@10
|
1930 if (!avc)
|
yading@10
|
1931 exit(1);
|
yading@10
|
1932 for (i = 0; i < nb_output_files; i++)
|
yading@10
|
1933 avc[i] = output_files[i]->ctx;
|
yading@10
|
1934
|
yading@10
|
1935 av_sdp_create(avc, nb_output_files, sdp, sizeof(sdp));
|
yading@10
|
1936 printf("SDP:\n%s\n", sdp);
|
yading@10
|
1937 fflush(stdout);
|
yading@10
|
1938 av_freep(&avc);
|
yading@10
|
1939 }
|
yading@10
|
1940
|
yading@10
|
1941 static int init_input_stream(int ist_index, char *error, int error_len)
|
yading@10
|
1942 {
|
yading@10
|
1943 int ret;
|
yading@10
|
1944 InputStream *ist = input_streams[ist_index];
|
yading@10
|
1945
|
yading@10
|
1946 if (ist->decoding_needed) {
|
yading@10
|
1947 AVCodec *codec = ist->dec;
|
yading@10
|
1948 if (!codec) {
|
yading@10
|
1949 snprintf(error, error_len, "Decoder (codec %s) not found for input stream #%d:%d",
|
yading@10
|
1950 avcodec_get_name(ist->st->codec->codec_id), ist->file_index, ist->st->index);
|
yading@10
|
1951 return AVERROR(EINVAL);
|
yading@10
|
1952 }
|
yading@10
|
1953
|
yading@10
|
1954 av_opt_set_int(ist->st->codec, "refcounted_frames", 1, 0);
|
yading@10
|
1955
|
yading@10
|
1956 if (!av_dict_get(ist->opts, "threads", NULL, 0))
|
yading@10
|
1957 av_dict_set(&ist->opts, "threads", "auto", 0);
|
yading@10
|
1958 if ((ret = avcodec_open2(ist->st->codec, codec, &ist->opts)) < 0) {
|
yading@10
|
1959 if (ret == AVERROR_EXPERIMENTAL)
|
yading@10
|
1960 abort_codec_experimental(codec, 0);
|
yading@10
|
1961 snprintf(error, error_len, "Error while opening decoder for input stream #%d:%d",
|
yading@10
|
1962 ist->file_index, ist->st->index);
|
yading@10
|
1963 return ret;
|
yading@10
|
1964 }
|
yading@10
|
1965 assert_avoptions(ist->opts);
|
yading@10
|
1966 }
|
yading@10
|
1967
|
yading@10
|
1968 ist->next_pts = AV_NOPTS_VALUE;
|
yading@10
|
1969 ist->next_dts = AV_NOPTS_VALUE;
|
yading@10
|
1970 ist->is_start = 1;
|
yading@10
|
1971
|
yading@10
|
1972 return 0;
|
yading@10
|
1973 }
|
yading@10
|
1974
|
yading@10
|
1975 static InputStream *get_input_stream(OutputStream *ost)
|
yading@10
|
1976 {
|
yading@10
|
1977 if (ost->source_index >= 0)
|
yading@10
|
1978 return input_streams[ost->source_index];
|
yading@10
|
1979 return NULL;
|
yading@10
|
1980 }
|
yading@10
|
1981
|
yading@10
|
1982 static int compare_int64(const void *a, const void *b)
|
yading@10
|
1983 {
|
yading@10
|
1984 int64_t va = *(int64_t *)a, vb = *(int64_t *)b;
|
yading@10
|
1985 return va < vb ? -1 : va > vb ? +1 : 0;
|
yading@10
|
1986 }
|
yading@10
|
1987
|
yading@10
|
1988 static void parse_forced_key_frames(char *kf, OutputStream *ost,
|
yading@10
|
1989 AVCodecContext *avctx)
|
yading@10
|
1990 {
|
yading@10
|
1991 char *p;
|
yading@10
|
1992 int n = 1, i, size, index = 0;
|
yading@10
|
1993 int64_t t, *pts;
|
yading@10
|
1994
|
yading@10
|
1995 for (p = kf; *p; p++)
|
yading@10
|
1996 if (*p == ',')
|
yading@10
|
1997 n++;
|
yading@10
|
1998 size = n;
|
yading@10
|
1999 pts = av_malloc(sizeof(*pts) * size);
|
yading@10
|
2000 if (!pts) {
|
yading@10
|
2001 av_log(NULL, AV_LOG_FATAL, "Could not allocate forced key frames array.\n");
|
yading@10
|
2002 exit(1);
|
yading@10
|
2003 }
|
yading@10
|
2004
|
yading@10
|
2005 p = kf;
|
yading@10
|
2006 for (i = 0; i < n; i++) {
|
yading@10
|
2007 char *next = strchr(p, ',');
|
yading@10
|
2008
|
yading@10
|
2009 if (next)
|
yading@10
|
2010 *next++ = 0;
|
yading@10
|
2011
|
yading@10
|
2012 if (!memcmp(p, "chapters", 8)) {
|
yading@10
|
2013
|
yading@10
|
2014 AVFormatContext *avf = output_files[ost->file_index]->ctx;
|
yading@10
|
2015 int j;
|
yading@10
|
2016
|
yading@10
|
2017 if (avf->nb_chapters > INT_MAX - size ||
|
yading@10
|
2018 !(pts = av_realloc_f(pts, size += avf->nb_chapters - 1,
|
yading@10
|
2019 sizeof(*pts)))) {
|
yading@10
|
2020 av_log(NULL, AV_LOG_FATAL,
|
yading@10
|
2021 "Could not allocate forced key frames array.\n");
|
yading@10
|
2022 exit(1);
|
yading@10
|
2023 }
|
yading@10
|
2024 t = p[8] ? parse_time_or_die("force_key_frames", p + 8, 1) : 0;
|
yading@10
|
2025 t = av_rescale_q(t, AV_TIME_BASE_Q, avctx->time_base);
|
yading@10
|
2026
|
yading@10
|
2027 for (j = 0; j < avf->nb_chapters; j++) {
|
yading@10
|
2028 AVChapter *c = avf->chapters[j];
|
yading@10
|
2029 av_assert1(index < size);
|
yading@10
|
2030 pts[index++] = av_rescale_q(c->start, c->time_base,
|
yading@10
|
2031 avctx->time_base) + t;
|
yading@10
|
2032 }
|
yading@10
|
2033
|
yading@10
|
2034 } else {
|
yading@10
|
2035
|
yading@10
|
2036 t = parse_time_or_die("force_key_frames", p, 1);
|
yading@10
|
2037 av_assert1(index < size);
|
yading@10
|
2038 pts[index++] = av_rescale_q(t, AV_TIME_BASE_Q, avctx->time_base);
|
yading@10
|
2039
|
yading@10
|
2040 }
|
yading@10
|
2041
|
yading@10
|
2042 p = next;
|
yading@10
|
2043 }
|
yading@10
|
2044
|
yading@10
|
2045 av_assert0(index == size);
|
yading@10
|
2046 qsort(pts, size, sizeof(*pts), compare_int64);
|
yading@10
|
2047 ost->forced_kf_count = size;
|
yading@10
|
2048 ost->forced_kf_pts = pts;
|
yading@10
|
2049 }
|
yading@10
|
2050
|
yading@10
|
2051 static void report_new_stream(int input_index, AVPacket *pkt)
|
yading@10
|
2052 {
|
yading@10
|
2053 InputFile *file = input_files[input_index];
|
yading@10
|
2054 AVStream *st = file->ctx->streams[pkt->stream_index];
|
yading@10
|
2055
|
yading@10
|
2056 if (pkt->stream_index < file->nb_streams_warn)
|
yading@10
|
2057 return;
|
yading@10
|
2058 av_log(file->ctx, AV_LOG_WARNING,
|
yading@10
|
2059 "New %s stream %d:%d at pos:%"PRId64" and DTS:%ss\n",
|
yading@10
|
2060 av_get_media_type_string(st->codec->codec_type),
|
yading@10
|
2061 input_index, pkt->stream_index,
|
yading@10
|
2062 pkt->pos, av_ts2timestr(pkt->dts, &st->time_base));
|
yading@10
|
2063 file->nb_streams_warn = pkt->stream_index + 1;
|
yading@10
|
2064 }
|
yading@10
|
2065
|
yading@10
|
2066 static int transcode_init(void)
|
yading@10
|
2067 {
|
yading@10
|
2068 int ret = 0, i, j, k;
|
yading@10
|
2069 AVFormatContext *oc;
|
yading@10
|
2070 AVCodecContext *codec;
|
yading@10
|
2071 OutputStream *ost;
|
yading@10
|
2072 InputStream *ist;
|
yading@10
|
2073 char error[1024];
|
yading@10
|
2074 int want_sdp = 1;
|
yading@10
|
2075
|
yading@10
|
2076 /* init framerate emulation */
|
yading@10
|
2077 for (i = 0; i < nb_input_files; i++) {
|
yading@10
|
2078 InputFile *ifile = input_files[i];
|
yading@10
|
2079 if (ifile->rate_emu)
|
yading@10
|
2080 for (j = 0; j < ifile->nb_streams; j++)
|
yading@10
|
2081 input_streams[j + ifile->ist_index]->start = av_gettime();
|
yading@10
|
2082 }
|
yading@10
|
2083
|
yading@10
|
2084 /* output stream init */
|
yading@10
|
2085 for (i = 0; i < nb_output_files; i++) {
|
yading@10
|
2086 oc = output_files[i]->ctx;
|
yading@10
|
2087 if (!oc->nb_streams && !(oc->oformat->flags & AVFMT_NOSTREAMS)) {
|
yading@10
|
2088 av_dump_format(oc, i, oc->filename, 1);
|
yading@10
|
2089 av_log(NULL, AV_LOG_ERROR, "Output file #%d does not contain any stream\n", i);
|
yading@10
|
2090 return AVERROR(EINVAL);
|
yading@10
|
2091 }
|
yading@10
|
2092 }
|
yading@10
|
2093
|
yading@10
|
2094 /* init complex filtergraphs */
|
yading@10
|
2095 for (i = 0; i < nb_filtergraphs; i++)
|
yading@10
|
2096 if ((ret = avfilter_graph_config(filtergraphs[i]->graph, NULL)) < 0)
|
yading@10
|
2097 return ret;
|
yading@10
|
2098
|
yading@10
|
2099 /* for each output stream, we compute the right encoding parameters */
|
yading@10
|
2100 for (i = 0; i < nb_output_streams; i++) {
|
yading@10
|
2101 AVCodecContext *icodec = NULL;
|
yading@10
|
2102 ost = output_streams[i];
|
yading@10
|
2103 oc = output_files[ost->file_index]->ctx;
|
yading@10
|
2104 ist = get_input_stream(ost);
|
yading@10
|
2105
|
yading@10
|
2106 if (ost->attachment_filename)
|
yading@10
|
2107 continue;
|
yading@10
|
2108
|
yading@10
|
2109 codec = ost->st->codec;
|
yading@10
|
2110
|
yading@10
|
2111 if (ist) {
|
yading@10
|
2112 icodec = ist->st->codec;
|
yading@10
|
2113
|
yading@10
|
2114 ost->st->disposition = ist->st->disposition;
|
yading@10
|
2115 codec->bits_per_raw_sample = icodec->bits_per_raw_sample;
|
yading@10
|
2116 codec->chroma_sample_location = icodec->chroma_sample_location;
|
yading@10
|
2117 }
|
yading@10
|
2118
|
yading@10
|
2119 if (ost->stream_copy) {
|
yading@10
|
2120 uint64_t extra_size;
|
yading@10
|
2121
|
yading@10
|
2122 av_assert0(ist && !ost->filter);
|
yading@10
|
2123
|
yading@10
|
2124 extra_size = (uint64_t)icodec->extradata_size + FF_INPUT_BUFFER_PADDING_SIZE;
|
yading@10
|
2125
|
yading@10
|
2126 if (extra_size > INT_MAX) {
|
yading@10
|
2127 return AVERROR(EINVAL);
|
yading@10
|
2128 }
|
yading@10
|
2129
|
yading@10
|
2130 /* if stream_copy is selected, no need to decode or encode */
|
yading@10
|
2131 codec->codec_id = icodec->codec_id;
|
yading@10
|
2132 codec->codec_type = icodec->codec_type;
|
yading@10
|
2133
|
yading@10
|
2134 if (!codec->codec_tag) {
|
yading@10
|
2135 unsigned int codec_tag;
|
yading@10
|
2136 if (!oc->oformat->codec_tag ||
|
yading@10
|
2137 av_codec_get_id (oc->oformat->codec_tag, icodec->codec_tag) == codec->codec_id ||
|
yading@10
|
2138 !av_codec_get_tag2(oc->oformat->codec_tag, icodec->codec_id, &codec_tag))
|
yading@10
|
2139 codec->codec_tag = icodec->codec_tag;
|
yading@10
|
2140 }
|
yading@10
|
2141
|
yading@10
|
2142 codec->bit_rate = icodec->bit_rate;
|
yading@10
|
2143 codec->rc_max_rate = icodec->rc_max_rate;
|
yading@10
|
2144 codec->rc_buffer_size = icodec->rc_buffer_size;
|
yading@10
|
2145 codec->field_order = icodec->field_order;
|
yading@10
|
2146 codec->extradata = av_mallocz(extra_size);
|
yading@10
|
2147 if (!codec->extradata) {
|
yading@10
|
2148 return AVERROR(ENOMEM);
|
yading@10
|
2149 }
|
yading@10
|
2150 memcpy(codec->extradata, icodec->extradata, icodec->extradata_size);
|
yading@10
|
2151 codec->extradata_size= icodec->extradata_size;
|
yading@10
|
2152 codec->bits_per_coded_sample = icodec->bits_per_coded_sample;
|
yading@10
|
2153
|
yading@10
|
2154 codec->time_base = ist->st->time_base;
|
yading@10
|
2155 /*
|
yading@10
|
2156 * Avi is a special case here because it supports variable fps but
|
yading@10
|
2157 * having the fps and timebase differe significantly adds quite some
|
yading@10
|
2158 * overhead
|
yading@10
|
2159 */
|
yading@10
|
2160 if(!strcmp(oc->oformat->name, "avi")) {
|
yading@10
|
2161 if ( copy_tb<0 && av_q2d(ist->st->r_frame_rate) >= av_q2d(ist->st->avg_frame_rate)
|
yading@10
|
2162 && 0.5/av_q2d(ist->st->r_frame_rate) > av_q2d(ist->st->time_base)
|
yading@10
|
2163 && 0.5/av_q2d(ist->st->r_frame_rate) > av_q2d(icodec->time_base)
|
yading@10
|
2164 && av_q2d(ist->st->time_base) < 1.0/500 && av_q2d(icodec->time_base) < 1.0/500
|
yading@10
|
2165 || copy_tb==2){
|
yading@10
|
2166 codec->time_base.num = ist->st->r_frame_rate.den;
|
yading@10
|
2167 codec->time_base.den = 2*ist->st->r_frame_rate.num;
|
yading@10
|
2168 codec->ticks_per_frame = 2;
|
yading@10
|
2169 } else if ( copy_tb<0 && av_q2d(icodec->time_base)*icodec->ticks_per_frame > 2*av_q2d(ist->st->time_base)
|
yading@10
|
2170 && av_q2d(ist->st->time_base) < 1.0/500
|
yading@10
|
2171 || copy_tb==0){
|
yading@10
|
2172 codec->time_base = icodec->time_base;
|
yading@10
|
2173 codec->time_base.num *= icodec->ticks_per_frame;
|
yading@10
|
2174 codec->time_base.den *= 2;
|
yading@10
|
2175 codec->ticks_per_frame = 2;
|
yading@10
|
2176 }
|
yading@10
|
2177 } else if(!(oc->oformat->flags & AVFMT_VARIABLE_FPS)
|
yading@10
|
2178 && strcmp(oc->oformat->name, "mov") && strcmp(oc->oformat->name, "mp4") && strcmp(oc->oformat->name, "3gp")
|
yading@10
|
2179 && strcmp(oc->oformat->name, "3g2") && strcmp(oc->oformat->name, "psp") && strcmp(oc->oformat->name, "ipod")
|
yading@10
|
2180 && strcmp(oc->oformat->name, "f4v")
|
yading@10
|
2181 ) {
|
yading@10
|
2182 if( copy_tb<0 && icodec->time_base.den
|
yading@10
|
2183 && av_q2d(icodec->time_base)*icodec->ticks_per_frame > av_q2d(ist->st->time_base)
|
yading@10
|
2184 && av_q2d(ist->st->time_base) < 1.0/500
|
yading@10
|
2185 || copy_tb==0){
|
yading@10
|
2186 codec->time_base = icodec->time_base;
|
yading@10
|
2187 codec->time_base.num *= icodec->ticks_per_frame;
|
yading@10
|
2188 }
|
yading@10
|
2189 }
|
yading@10
|
2190 if ( codec->codec_tag == AV_RL32("tmcd")
|
yading@10
|
2191 && icodec->time_base.num < icodec->time_base.den
|
yading@10
|
2192 && icodec->time_base.num > 0
|
yading@10
|
2193 && 121LL*icodec->time_base.num > icodec->time_base.den) {
|
yading@10
|
2194 codec->time_base = icodec->time_base;
|
yading@10
|
2195 }
|
yading@10
|
2196
|
yading@10
|
2197 if (ist && !ost->frame_rate.num)
|
yading@10
|
2198 ost->frame_rate = ist->framerate;
|
yading@10
|
2199 if(ost->frame_rate.num)
|
yading@10
|
2200 codec->time_base = av_inv_q(ost->frame_rate);
|
yading@10
|
2201
|
yading@10
|
2202 av_reduce(&codec->time_base.num, &codec->time_base.den,
|
yading@10
|
2203 codec->time_base.num, codec->time_base.den, INT_MAX);
|
yading@10
|
2204
|
yading@10
|
2205 switch (codec->codec_type) {
|
yading@10
|
2206 case AVMEDIA_TYPE_AUDIO:
|
yading@10
|
2207 if (audio_volume != 256) {
|
yading@10
|
2208 av_log(NULL, AV_LOG_FATAL, "-acodec copy and -vol are incompatible (frames are not decoded)\n");
|
yading@10
|
2209 exit(1);
|
yading@10
|
2210 }
|
yading@10
|
2211 codec->channel_layout = icodec->channel_layout;
|
yading@10
|
2212 codec->sample_rate = icodec->sample_rate;
|
yading@10
|
2213 codec->channels = icodec->channels;
|
yading@10
|
2214 codec->frame_size = icodec->frame_size;
|
yading@10
|
2215 codec->audio_service_type = icodec->audio_service_type;
|
yading@10
|
2216 codec->block_align = icodec->block_align;
|
yading@10
|
2217 if((codec->block_align == 1 || codec->block_align == 1152 || codec->block_align == 576) && codec->codec_id == AV_CODEC_ID_MP3)
|
yading@10
|
2218 codec->block_align= 0;
|
yading@10
|
2219 if(codec->codec_id == AV_CODEC_ID_AC3)
|
yading@10
|
2220 codec->block_align= 0;
|
yading@10
|
2221 break;
|
yading@10
|
2222 case AVMEDIA_TYPE_VIDEO:
|
yading@10
|
2223 codec->pix_fmt = icodec->pix_fmt;
|
yading@10
|
2224 codec->width = icodec->width;
|
yading@10
|
2225 codec->height = icodec->height;
|
yading@10
|
2226 codec->has_b_frames = icodec->has_b_frames;
|
yading@10
|
2227 if (ost->frame_aspect_ratio.num) { // overridden by the -aspect cli option
|
yading@10
|
2228 codec->sample_aspect_ratio =
|
yading@10
|
2229 ost->st->sample_aspect_ratio =
|
yading@10
|
2230 av_mul_q(ost->frame_aspect_ratio,
|
yading@10
|
2231 (AVRational){ codec->height, codec->width });
|
yading@10
|
2232 av_log(NULL, AV_LOG_WARNING, "Overriding aspect ratio "
|
yading@10
|
2233 "with stream copy may produce invalid files\n");
|
yading@10
|
2234 } else if (!codec->sample_aspect_ratio.num) {
|
yading@10
|
2235 codec->sample_aspect_ratio =
|
yading@10
|
2236 ost->st->sample_aspect_ratio =
|
yading@10
|
2237 ist->st->sample_aspect_ratio.num ? ist->st->sample_aspect_ratio :
|
yading@10
|
2238 ist->st->codec->sample_aspect_ratio.num ?
|
yading@10
|
2239 ist->st->codec->sample_aspect_ratio : (AVRational){0, 1};
|
yading@10
|
2240 }
|
yading@10
|
2241 ost->st->avg_frame_rate = ist->st->avg_frame_rate;
|
yading@10
|
2242 break;
|
yading@10
|
2243 case AVMEDIA_TYPE_SUBTITLE:
|
yading@10
|
2244 codec->width = icodec->width;
|
yading@10
|
2245 codec->height = icodec->height;
|
yading@10
|
2246 break;
|
yading@10
|
2247 case AVMEDIA_TYPE_DATA:
|
yading@10
|
2248 case AVMEDIA_TYPE_ATTACHMENT:
|
yading@10
|
2249 break;
|
yading@10
|
2250 default:
|
yading@10
|
2251 abort();
|
yading@10
|
2252 }
|
yading@10
|
2253 } else {
|
yading@10
|
2254 if (!ost->enc)
|
yading@10
|
2255 ost->enc = avcodec_find_encoder(codec->codec_id);
|
yading@10
|
2256 if (!ost->enc) {
|
yading@10
|
2257 /* should only happen when a default codec is not present. */
|
yading@10
|
2258 snprintf(error, sizeof(error), "Encoder (codec %s) not found for output stream #%d:%d",
|
yading@10
|
2259 avcodec_get_name(ost->st->codec->codec_id), ost->file_index, ost->index);
|
yading@10
|
2260 ret = AVERROR(EINVAL);
|
yading@10
|
2261 goto dump_format;
|
yading@10
|
2262 }
|
yading@10
|
2263
|
yading@10
|
2264 if (ist)
|
yading@10
|
2265 ist->decoding_needed++;
|
yading@10
|
2266 ost->encoding_needed = 1;
|
yading@10
|
2267
|
yading@10
|
2268 if (!ost->filter &&
|
yading@10
|
2269 (codec->codec_type == AVMEDIA_TYPE_VIDEO ||
|
yading@10
|
2270 codec->codec_type == AVMEDIA_TYPE_AUDIO)) {
|
yading@10
|
2271 FilterGraph *fg;
|
yading@10
|
2272 fg = init_simple_filtergraph(ist, ost);
|
yading@10
|
2273 if (configure_filtergraph(fg)) {
|
yading@10
|
2274 av_log(NULL, AV_LOG_FATAL, "Error opening filters!\n");
|
yading@10
|
2275 exit(1);
|
yading@10
|
2276 }
|
yading@10
|
2277 }
|
yading@10
|
2278
|
yading@10
|
2279 if (codec->codec_type == AVMEDIA_TYPE_VIDEO) {
|
yading@10
|
2280 if (ost->filter && !ost->frame_rate.num)
|
yading@10
|
2281 ost->frame_rate = av_buffersink_get_frame_rate(ost->filter->filter);
|
yading@10
|
2282 if (ist && !ost->frame_rate.num)
|
yading@10
|
2283 ost->frame_rate = ist->framerate;
|
yading@10
|
2284 if (ist && !ost->frame_rate.num)
|
yading@10
|
2285 ost->frame_rate = ist->st->r_frame_rate.num ? ist->st->r_frame_rate : (AVRational){25, 1};
|
yading@10
|
2286 // ost->frame_rate = ist->st->avg_frame_rate.num ? ist->st->avg_frame_rate : (AVRational){25, 1};
|
yading@10
|
2287 if (ost->enc && ost->enc->supported_framerates && !ost->force_fps) {
|
yading@10
|
2288 int idx = av_find_nearest_q_idx(ost->frame_rate, ost->enc->supported_framerates);
|
yading@10
|
2289 ost->frame_rate = ost->enc->supported_framerates[idx];
|
yading@10
|
2290 }
|
yading@10
|
2291 }
|
yading@10
|
2292
|
yading@10
|
2293 switch (codec->codec_type) {
|
yading@10
|
2294 case AVMEDIA_TYPE_AUDIO:
|
yading@10
|
2295 codec->sample_fmt = ost->filter->filter->inputs[0]->format;
|
yading@10
|
2296 codec->sample_rate = ost->filter->filter->inputs[0]->sample_rate;
|
yading@10
|
2297 codec->channel_layout = ost->filter->filter->inputs[0]->channel_layout;
|
yading@10
|
2298 codec->channels = avfilter_link_get_channels(ost->filter->filter->inputs[0]);
|
yading@10
|
2299 codec->time_base = (AVRational){ 1, codec->sample_rate };
|
yading@10
|
2300 break;
|
yading@10
|
2301 case AVMEDIA_TYPE_VIDEO:
|
yading@10
|
2302 codec->time_base = av_inv_q(ost->frame_rate);
|
yading@10
|
2303 if (ost->filter && !(codec->time_base.num && codec->time_base.den))
|
yading@10
|
2304 codec->time_base = ost->filter->filter->inputs[0]->time_base;
|
yading@10
|
2305 if ( av_q2d(codec->time_base) < 0.001 && video_sync_method != VSYNC_PASSTHROUGH
|
yading@10
|
2306 && (video_sync_method == VSYNC_CFR || (video_sync_method == VSYNC_AUTO && !(oc->oformat->flags & AVFMT_VARIABLE_FPS)))){
|
yading@10
|
2307 av_log(oc, AV_LOG_WARNING, "Frame rate very high for a muxer not efficiently supporting it.\n"
|
yading@10
|
2308 "Please consider specifying a lower framerate, a different muxer or -vsync 2\n");
|
yading@10
|
2309 }
|
yading@10
|
2310 for (j = 0; j < ost->forced_kf_count; j++)
|
yading@10
|
2311 ost->forced_kf_pts[j] = av_rescale_q(ost->forced_kf_pts[j],
|
yading@10
|
2312 AV_TIME_BASE_Q,
|
yading@10
|
2313 codec->time_base);
|
yading@10
|
2314
|
yading@10
|
2315 codec->width = ost->filter->filter->inputs[0]->w;
|
yading@10
|
2316 codec->height = ost->filter->filter->inputs[0]->h;
|
yading@10
|
2317 codec->sample_aspect_ratio = ost->st->sample_aspect_ratio =
|
yading@10
|
2318 ost->frame_aspect_ratio.num ? // overridden by the -aspect cli option
|
yading@10
|
2319 av_mul_q(ost->frame_aspect_ratio, (AVRational){ codec->height, codec->width }) :
|
yading@10
|
2320 ost->filter->filter->inputs[0]->sample_aspect_ratio;
|
yading@10
|
2321 if (!strncmp(ost->enc->name, "libx264", 7) &&
|
yading@10
|
2322 codec->pix_fmt == AV_PIX_FMT_NONE &&
|
yading@10
|
2323 ost->filter->filter->inputs[0]->format != AV_PIX_FMT_YUV420P)
|
yading@10
|
2324 av_log(NULL, AV_LOG_INFO,
|
yading@10
|
2325 "No pixel format specified, %s for H.264 encoding chosen.\n"
|
yading@10
|
2326 "Use -pix_fmt yuv420p for compatibility with outdated media players.\n",
|
yading@10
|
2327 av_get_pix_fmt_name(ost->filter->filter->inputs[0]->format));
|
yading@10
|
2328 codec->pix_fmt = ost->filter->filter->inputs[0]->format;
|
yading@10
|
2329
|
yading@10
|
2330 if (!icodec ||
|
yading@10
|
2331 codec->width != icodec->width ||
|
yading@10
|
2332 codec->height != icodec->height ||
|
yading@10
|
2333 codec->pix_fmt != icodec->pix_fmt) {
|
yading@10
|
2334 codec->bits_per_raw_sample = frame_bits_per_raw_sample;
|
yading@10
|
2335 }
|
yading@10
|
2336
|
yading@10
|
2337 if (ost->forced_keyframes) {
|
yading@10
|
2338 if (!strncmp(ost->forced_keyframes, "expr:", 5)) {
|
yading@10
|
2339 ret = av_expr_parse(&ost->forced_keyframes_pexpr, ost->forced_keyframes+5,
|
yading@10
|
2340 forced_keyframes_const_names, NULL, NULL, NULL, NULL, 0, NULL);
|
yading@10
|
2341 if (ret < 0) {
|
yading@10
|
2342 av_log(NULL, AV_LOG_ERROR,
|
yading@10
|
2343 "Invalid force_key_frames expression '%s'\n", ost->forced_keyframes+5);
|
yading@10
|
2344 return ret;
|
yading@10
|
2345 }
|
yading@10
|
2346 ost->forced_keyframes_expr_const_values[FKF_N] = 0;
|
yading@10
|
2347 ost->forced_keyframes_expr_const_values[FKF_N_FORCED] = 0;
|
yading@10
|
2348 ost->forced_keyframes_expr_const_values[FKF_PREV_FORCED_N] = NAN;
|
yading@10
|
2349 ost->forced_keyframes_expr_const_values[FKF_PREV_FORCED_T] = NAN;
|
yading@10
|
2350 } else {
|
yading@10
|
2351 parse_forced_key_frames(ost->forced_keyframes, ost, ost->st->codec);
|
yading@10
|
2352 }
|
yading@10
|
2353 }
|
yading@10
|
2354 break;
|
yading@10
|
2355 case AVMEDIA_TYPE_SUBTITLE:
|
yading@10
|
2356 codec->time_base = (AVRational){1, 1000};
|
yading@10
|
2357 if (!codec->width) {
|
yading@10
|
2358 codec->width = input_streams[ost->source_index]->st->codec->width;
|
yading@10
|
2359 codec->height = input_streams[ost->source_index]->st->codec->height;
|
yading@10
|
2360 }
|
yading@10
|
2361 break;
|
yading@10
|
2362 default:
|
yading@10
|
2363 abort();
|
yading@10
|
2364 break;
|
yading@10
|
2365 }
|
yading@10
|
2366 /* two pass mode */
|
yading@10
|
2367 if (codec->flags & (CODEC_FLAG_PASS1 | CODEC_FLAG_PASS2)) {
|
yading@10
|
2368 char logfilename[1024];
|
yading@10
|
2369 FILE *f;
|
yading@10
|
2370
|
yading@10
|
2371 snprintf(logfilename, sizeof(logfilename), "%s-%d.log",
|
yading@10
|
2372 ost->logfile_prefix ? ost->logfile_prefix :
|
yading@10
|
2373 DEFAULT_PASS_LOGFILENAME_PREFIX,
|
yading@10
|
2374 i);
|
yading@10
|
2375 if (!strcmp(ost->enc->name, "libx264")) {
|
yading@10
|
2376 av_dict_set(&ost->opts, "stats", logfilename, AV_DICT_DONT_OVERWRITE);
|
yading@10
|
2377 } else {
|
yading@10
|
2378 if (codec->flags & CODEC_FLAG_PASS2) {
|
yading@10
|
2379 char *logbuffer;
|
yading@10
|
2380 size_t logbuffer_size;
|
yading@10
|
2381 if (cmdutils_read_file(logfilename, &logbuffer, &logbuffer_size) < 0) {
|
yading@10
|
2382 av_log(NULL, AV_LOG_FATAL, "Error reading log file '%s' for pass-2 encoding\n",
|
yading@10
|
2383 logfilename);
|
yading@10
|
2384 exit(1);
|
yading@10
|
2385 }
|
yading@10
|
2386 codec->stats_in = logbuffer;
|
yading@10
|
2387 }
|
yading@10
|
2388 if (codec->flags & CODEC_FLAG_PASS1) {
|
yading@10
|
2389 f = fopen(logfilename, "wb");
|
yading@10
|
2390 if (!f) {
|
yading@10
|
2391 av_log(NULL, AV_LOG_FATAL, "Cannot write log file '%s' for pass-1 encoding: %s\n",
|
yading@10
|
2392 logfilename, strerror(errno));
|
yading@10
|
2393 exit(1);
|
yading@10
|
2394 }
|
yading@10
|
2395 ost->logfile = f;
|
yading@10
|
2396 }
|
yading@10
|
2397 }
|
yading@10
|
2398 }
|
yading@10
|
2399 }
|
yading@10
|
2400 }
|
yading@10
|
2401
|
yading@10
|
2402 /* open each encoder */
|
yading@10
|
2403 for (i = 0; i < nb_output_streams; i++) {
|
yading@10
|
2404 ost = output_streams[i];
|
yading@10
|
2405 if (ost->encoding_needed) {
|
yading@10
|
2406 AVCodec *codec = ost->enc;
|
yading@10
|
2407 AVCodecContext *dec = NULL;
|
yading@10
|
2408
|
yading@10
|
2409 if ((ist = get_input_stream(ost)))
|
yading@10
|
2410 dec = ist->st->codec;
|
yading@10
|
2411 if (dec && dec->subtitle_header) {
|
yading@10
|
2412 /* ASS code assumes this buffer is null terminated so add extra byte. */
|
yading@10
|
2413 ost->st->codec->subtitle_header = av_mallocz(dec->subtitle_header_size + 1);
|
yading@10
|
2414 if (!ost->st->codec->subtitle_header) {
|
yading@10
|
2415 ret = AVERROR(ENOMEM);
|
yading@10
|
2416 goto dump_format;
|
yading@10
|
2417 }
|
yading@10
|
2418 memcpy(ost->st->codec->subtitle_header, dec->subtitle_header, dec->subtitle_header_size);
|
yading@10
|
2419 ost->st->codec->subtitle_header_size = dec->subtitle_header_size;
|
yading@10
|
2420 }
|
yading@10
|
2421 if (!av_dict_get(ost->opts, "threads", NULL, 0))
|
yading@10
|
2422 av_dict_set(&ost->opts, "threads", "auto", 0);
|
yading@10
|
2423 if ((ret = avcodec_open2(ost->st->codec, codec, &ost->opts)) < 0) {
|
yading@10
|
2424 if (ret == AVERROR_EXPERIMENTAL)
|
yading@10
|
2425 abort_codec_experimental(codec, 1);
|
yading@10
|
2426 snprintf(error, sizeof(error), "Error while opening encoder for output stream #%d:%d - maybe incorrect parameters such as bit_rate, rate, width or height",
|
yading@10
|
2427 ost->file_index, ost->index);
|
yading@10
|
2428 goto dump_format;
|
yading@10
|
2429 }
|
yading@10
|
2430 if (ost->enc->type == AVMEDIA_TYPE_AUDIO &&
|
yading@10
|
2431 !(ost->enc->capabilities & CODEC_CAP_VARIABLE_FRAME_SIZE))
|
yading@10
|
2432 av_buffersink_set_frame_size(ost->filter->filter,
|
yading@10
|
2433 ost->st->codec->frame_size);
|
yading@10
|
2434 assert_avoptions(ost->opts);
|
yading@10
|
2435 if (ost->st->codec->bit_rate && ost->st->codec->bit_rate < 1000)
|
yading@10
|
2436 av_log(NULL, AV_LOG_WARNING, "The bitrate parameter is set too low."
|
yading@10
|
2437 " It takes bits/s as argument, not kbits/s\n");
|
yading@10
|
2438 extra_size += ost->st->codec->extradata_size;
|
yading@10
|
2439
|
yading@10
|
2440 if (ost->st->codec->me_threshold)
|
yading@10
|
2441 input_streams[ost->source_index]->st->codec->debug |= FF_DEBUG_MV;
|
yading@10
|
2442 } else {
|
yading@10
|
2443 av_opt_set_dict(ost->st->codec, &ost->opts);
|
yading@10
|
2444 }
|
yading@10
|
2445 }
|
yading@10
|
2446
|
yading@10
|
2447 /* init input streams */
|
yading@10
|
2448 for (i = 0; i < nb_input_streams; i++)
|
yading@10
|
2449 if ((ret = init_input_stream(i, error, sizeof(error))) < 0) {
|
yading@10
|
2450 for (i = 0; i < nb_output_streams; i++) {
|
yading@10
|
2451 ost = output_streams[i];
|
yading@10
|
2452 avcodec_close(ost->st->codec);
|
yading@10
|
2453 }
|
yading@10
|
2454 goto dump_format;
|
yading@10
|
2455 }
|
yading@10
|
2456
|
yading@10
|
2457 /* discard unused programs */
|
yading@10
|
2458 for (i = 0; i < nb_input_files; i++) {
|
yading@10
|
2459 InputFile *ifile = input_files[i];
|
yading@10
|
2460 for (j = 0; j < ifile->ctx->nb_programs; j++) {
|
yading@10
|
2461 AVProgram *p = ifile->ctx->programs[j];
|
yading@10
|
2462 int discard = AVDISCARD_ALL;
|
yading@10
|
2463
|
yading@10
|
2464 for (k = 0; k < p->nb_stream_indexes; k++)
|
yading@10
|
2465 if (!input_streams[ifile->ist_index + p->stream_index[k]]->discard) {
|
yading@10
|
2466 discard = AVDISCARD_DEFAULT;
|
yading@10
|
2467 break;
|
yading@10
|
2468 }
|
yading@10
|
2469 p->discard = discard;
|
yading@10
|
2470 }
|
yading@10
|
2471 }
|
yading@10
|
2472
|
yading@10
|
2473 /* open files and write file headers */
|
yading@10
|
2474 for (i = 0; i < nb_output_files; i++) {
|
yading@10
|
2475 oc = output_files[i]->ctx;
|
yading@10
|
2476 oc->interrupt_callback = int_cb;
|
yading@10
|
2477 if ((ret = avformat_write_header(oc, &output_files[i]->opts)) < 0) {
|
yading@10
|
2478 char errbuf[128];
|
yading@10
|
2479 const char *errbuf_ptr = errbuf;
|
yading@10
|
2480 if (av_strerror(ret, errbuf, sizeof(errbuf)) < 0)
|
yading@10
|
2481 errbuf_ptr = strerror(AVUNERROR(ret));
|
yading@10
|
2482 snprintf(error, sizeof(error), "Could not write header for output file #%d (incorrect codec parameters ?): %s", i, errbuf_ptr);
|
yading@10
|
2483 ret = AVERROR(EINVAL);
|
yading@10
|
2484 goto dump_format;
|
yading@10
|
2485 }
|
yading@10
|
2486 // assert_avoptions(output_files[i]->opts);
|
yading@10
|
2487 if (strcmp(oc->oformat->name, "rtp")) {
|
yading@10
|
2488 want_sdp = 0;
|
yading@10
|
2489 }
|
yading@10
|
2490 }
|
yading@10
|
2491
|
yading@10
|
2492 dump_format:
|
yading@10
|
2493 /* dump the file output parameters - cannot be done before in case
|
yading@10
|
2494 of stream copy */
|
yading@10
|
2495 for (i = 0; i < nb_output_files; i++) {
|
yading@10
|
2496 av_dump_format(output_files[i]->ctx, i, output_files[i]->ctx->filename, 1);
|
yading@10
|
2497 }
|
yading@10
|
2498
|
yading@10
|
2499 /* dump the stream mapping */
|
yading@10
|
2500 av_log(NULL, AV_LOG_INFO, "Stream mapping:\n");
|
yading@10
|
2501 for (i = 0; i < nb_input_streams; i++) {
|
yading@10
|
2502 ist = input_streams[i];
|
yading@10
|
2503
|
yading@10
|
2504 for (j = 0; j < ist->nb_filters; j++) {
|
yading@10
|
2505 if (ist->filters[j]->graph->graph_desc) {
|
yading@10
|
2506 av_log(NULL, AV_LOG_INFO, " Stream #%d:%d (%s) -> %s",
|
yading@10
|
2507 ist->file_index, ist->st->index, ist->dec ? ist->dec->name : "?",
|
yading@10
|
2508 ist->filters[j]->name);
|
yading@10
|
2509 if (nb_filtergraphs > 1)
|
yading@10
|
2510 av_log(NULL, AV_LOG_INFO, " (graph %d)", ist->filters[j]->graph->index);
|
yading@10
|
2511 av_log(NULL, AV_LOG_INFO, "\n");
|
yading@10
|
2512 }
|
yading@10
|
2513 }
|
yading@10
|
2514 }
|
yading@10
|
2515
|
yading@10
|
2516 for (i = 0; i < nb_output_streams; i++) {
|
yading@10
|
2517 ost = output_streams[i];
|
yading@10
|
2518
|
yading@10
|
2519 if (ost->attachment_filename) {
|
yading@10
|
2520 /* an attached file */
|
yading@10
|
2521 av_log(NULL, AV_LOG_INFO, " File %s -> Stream #%d:%d\n",
|
yading@10
|
2522 ost->attachment_filename, ost->file_index, ost->index);
|
yading@10
|
2523 continue;
|
yading@10
|
2524 }
|
yading@10
|
2525
|
yading@10
|
2526 if (ost->filter && ost->filter->graph->graph_desc) {
|
yading@10
|
2527 /* output from a complex graph */
|
yading@10
|
2528 av_log(NULL, AV_LOG_INFO, " %s", ost->filter->name);
|
yading@10
|
2529 if (nb_filtergraphs > 1)
|
yading@10
|
2530 av_log(NULL, AV_LOG_INFO, " (graph %d)", ost->filter->graph->index);
|
yading@10
|
2531
|
yading@10
|
2532 av_log(NULL, AV_LOG_INFO, " -> Stream #%d:%d (%s)\n", ost->file_index,
|
yading@10
|
2533 ost->index, ost->enc ? ost->enc->name : "?");
|
yading@10
|
2534 continue;
|
yading@10
|
2535 }
|
yading@10
|
2536
|
yading@10
|
2537 av_log(NULL, AV_LOG_INFO, " Stream #%d:%d -> #%d:%d",
|
yading@10
|
2538 input_streams[ost->source_index]->file_index,
|
yading@10
|
2539 input_streams[ost->source_index]->st->index,
|
yading@10
|
2540 ost->file_index,
|
yading@10
|
2541 ost->index);
|
yading@10
|
2542 if (ost->sync_ist != input_streams[ost->source_index])
|
yading@10
|
2543 av_log(NULL, AV_LOG_INFO, " [sync #%d:%d]",
|
yading@10
|
2544 ost->sync_ist->file_index,
|
yading@10
|
2545 ost->sync_ist->st->index);
|
yading@10
|
2546 if (ost->stream_copy)
|
yading@10
|
2547 av_log(NULL, AV_LOG_INFO, " (copy)");
|
yading@10
|
2548 else
|
yading@10
|
2549 av_log(NULL, AV_LOG_INFO, " (%s -> %s)", input_streams[ost->source_index]->dec ?
|
yading@10
|
2550 input_streams[ost->source_index]->dec->name : "?",
|
yading@10
|
2551 ost->enc ? ost->enc->name : "?");
|
yading@10
|
2552 av_log(NULL, AV_LOG_INFO, "\n");
|
yading@10
|
2553 }
|
yading@10
|
2554
|
yading@10
|
2555 if (ret) {
|
yading@10
|
2556 av_log(NULL, AV_LOG_ERROR, "%s\n", error);
|
yading@10
|
2557 return ret;
|
yading@10
|
2558 }
|
yading@10
|
2559
|
yading@10
|
2560 if (want_sdp) {
|
yading@10
|
2561 print_sdp();
|
yading@10
|
2562 }
|
yading@10
|
2563
|
yading@10
|
2564 return 0;
|
yading@10
|
2565 }
|
yading@10
|
2566
|
yading@10
|
2567 /* Return 1 if there remain streams where more output is wanted, 0 otherwise. */
|
yading@10
|
2568 static int need_output(void)
|
yading@10
|
2569 {
|
yading@10
|
2570 int i;
|
yading@10
|
2571
|
yading@10
|
2572 for (i = 0; i < nb_output_streams; i++) {
|
yading@10
|
2573 OutputStream *ost = output_streams[i];
|
yading@10
|
2574 OutputFile *of = output_files[ost->file_index];
|
yading@10
|
2575 AVFormatContext *os = output_files[ost->file_index]->ctx;
|
yading@10
|
2576
|
yading@10
|
2577 if (ost->finished ||
|
yading@10
|
2578 (os->pb && avio_tell(os->pb) >= of->limit_filesize))
|
yading@10
|
2579 continue;
|
yading@10
|
2580 if (ost->frame_number >= ost->max_frames) {
|
yading@10
|
2581 int j;
|
yading@10
|
2582 for (j = 0; j < of->ctx->nb_streams; j++)
|
yading@10
|
2583 close_output_stream(output_streams[of->ost_index + j]);
|
yading@10
|
2584 continue;
|
yading@10
|
2585 }
|
yading@10
|
2586
|
yading@10
|
2587 return 1;
|
yading@10
|
2588 }
|
yading@10
|
2589
|
yading@10
|
2590 return 0;
|
yading@10
|
2591 }
|
yading@10
|
2592
|
yading@10
|
2593 /**
|
yading@10
|
2594 * Select the output stream to process.
|
yading@10
|
2595 *
|
yading@10
|
2596 * @return selected output stream, or NULL if none available
|
yading@10
|
2597 */
|
yading@10
|
2598 static OutputStream *choose_output(void)
|
yading@10
|
2599 {
|
yading@10
|
2600 int i;
|
yading@10
|
2601 int64_t opts_min = INT64_MAX;
|
yading@10
|
2602 OutputStream *ost_min = NULL;
|
yading@10
|
2603
|
yading@10
|
2604 for (i = 0; i < nb_output_streams; i++) {
|
yading@10
|
2605 OutputStream *ost = output_streams[i];
|
yading@10
|
2606 int64_t opts = av_rescale_q(ost->st->cur_dts, ost->st->time_base,
|
yading@10
|
2607 AV_TIME_BASE_Q);
|
yading@10
|
2608 if (!ost->unavailable && !ost->finished && opts < opts_min) {
|
yading@10
|
2609 opts_min = opts;
|
yading@10
|
2610 ost_min = ost;
|
yading@10
|
2611 }
|
yading@10
|
2612 }
|
yading@10
|
2613 return ost_min;
|
yading@10
|
2614 }
|
yading@10
|
2615
|
yading@10
|
2616 static int check_keyboard_interaction(int64_t cur_time)
|
yading@10
|
2617 {
|
yading@10
|
2618 int i, ret, key;
|
yading@10
|
2619 static int64_t last_time;
|
yading@10
|
2620 if (received_nb_signals)
|
yading@10
|
2621 return AVERROR_EXIT;
|
yading@10
|
2622 /* read_key() returns 0 on EOF */
|
yading@10
|
2623 if(cur_time - last_time >= 100000 && !run_as_daemon){
|
yading@10
|
2624 key = read_key();
|
yading@10
|
2625 last_time = cur_time;
|
yading@10
|
2626 }else
|
yading@10
|
2627 key = -1;
|
yading@10
|
2628 if (key == 'q')
|
yading@10
|
2629 return AVERROR_EXIT;
|
yading@10
|
2630 if (key == '+') av_log_set_level(av_log_get_level()+10);
|
yading@10
|
2631 if (key == '-') av_log_set_level(av_log_get_level()-10);
|
yading@10
|
2632 if (key == 's') qp_hist ^= 1;
|
yading@10
|
2633 if (key == 'h'){
|
yading@10
|
2634 if (do_hex_dump){
|
yading@10
|
2635 do_hex_dump = do_pkt_dump = 0;
|
yading@10
|
2636 } else if(do_pkt_dump){
|
yading@10
|
2637 do_hex_dump = 1;
|
yading@10
|
2638 } else
|
yading@10
|
2639 do_pkt_dump = 1;
|
yading@10
|
2640 av_log_set_level(AV_LOG_DEBUG);
|
yading@10
|
2641 }
|
yading@10
|
2642 if (key == 'c' || key == 'C'){
|
yading@10
|
2643 char buf[4096], target[64], command[256], arg[256] = {0};
|
yading@10
|
2644 double time;
|
yading@10
|
2645 int k, n = 0;
|
yading@10
|
2646 fprintf(stderr, "\nEnter command: <target> <time> <command>[ <argument>]\n");
|
yading@10
|
2647 i = 0;
|
yading@10
|
2648 while ((k = read_key()) != '\n' && k != '\r' && i < sizeof(buf)-1)
|
yading@10
|
2649 if (k > 0)
|
yading@10
|
2650 buf[i++] = k;
|
yading@10
|
2651 buf[i] = 0;
|
yading@10
|
2652 if (k > 0 &&
|
yading@10
|
2653 (n = sscanf(buf, "%63[^ ] %lf %255[^ ] %255[^\n]", target, &time, command, arg)) >= 3) {
|
yading@10
|
2654 av_log(NULL, AV_LOG_DEBUG, "Processing command target:%s time:%f command:%s arg:%s",
|
yading@10
|
2655 target, time, command, arg);
|
yading@10
|
2656 for (i = 0; i < nb_filtergraphs; i++) {
|
yading@10
|
2657 FilterGraph *fg = filtergraphs[i];
|
yading@10
|
2658 if (fg->graph) {
|
yading@10
|
2659 if (time < 0) {
|
yading@10
|
2660 ret = avfilter_graph_send_command(fg->graph, target, command, arg, buf, sizeof(buf),
|
yading@10
|
2661 key == 'c' ? AVFILTER_CMD_FLAG_ONE : 0);
|
yading@10
|
2662 fprintf(stderr, "Command reply for stream %d: ret:%d res:%s\n", i, ret, buf);
|
yading@10
|
2663 } else {
|
yading@10
|
2664 ret = avfilter_graph_queue_command(fg->graph, target, command, arg, 0, time);
|
yading@10
|
2665 }
|
yading@10
|
2666 }
|
yading@10
|
2667 }
|
yading@10
|
2668 } else {
|
yading@10
|
2669 av_log(NULL, AV_LOG_ERROR,
|
yading@10
|
2670 "Parse error, at least 3 arguments were expected, "
|
yading@10
|
2671 "only %d given in string '%s'\n", n, buf);
|
yading@10
|
2672 }
|
yading@10
|
2673 }
|
yading@10
|
2674 if (key == 'd' || key == 'D'){
|
yading@10
|
2675 int debug=0;
|
yading@10
|
2676 if(key == 'D') {
|
yading@10
|
2677 debug = input_streams[0]->st->codec->debug<<1;
|
yading@10
|
2678 if(!debug) debug = 1;
|
yading@10
|
2679 while(debug & (FF_DEBUG_DCT_COEFF|FF_DEBUG_VIS_QP|FF_DEBUG_VIS_MB_TYPE)) //unsupported, would just crash
|
yading@10
|
2680 debug += debug;
|
yading@10
|
2681 }else
|
yading@10
|
2682 if(scanf("%d", &debug)!=1)
|
yading@10
|
2683 fprintf(stderr,"error parsing debug value\n");
|
yading@10
|
2684 for(i=0;i<nb_input_streams;i++) {
|
yading@10
|
2685 input_streams[i]->st->codec->debug = debug;
|
yading@10
|
2686 }
|
yading@10
|
2687 for(i=0;i<nb_output_streams;i++) {
|
yading@10
|
2688 OutputStream *ost = output_streams[i];
|
yading@10
|
2689 ost->st->codec->debug = debug;
|
yading@10
|
2690 }
|
yading@10
|
2691 if(debug) av_log_set_level(AV_LOG_DEBUG);
|
yading@10
|
2692 fprintf(stderr,"debug=%d\n", debug);
|
yading@10
|
2693 }
|
yading@10
|
2694 if (key == '?'){
|
yading@10
|
2695 fprintf(stderr, "key function\n"
|
yading@10
|
2696 "? show this help\n"
|
yading@10
|
2697 "+ increase verbosity\n"
|
yading@10
|
2698 "- decrease verbosity\n"
|
yading@10
|
2699 "c Send command to filtergraph\n"
|
yading@10
|
2700 "D cycle through available debug modes\n"
|
yading@10
|
2701 "h dump packets/hex press to cycle through the 3 states\n"
|
yading@10
|
2702 "q quit\n"
|
yading@10
|
2703 "s Show QP histogram\n"
|
yading@10
|
2704 );
|
yading@10
|
2705 }
|
yading@10
|
2706 return 0;
|
yading@10
|
2707 }
|
yading@10
|
2708
|
yading@10
|
2709 #if HAVE_PTHREADS
|
yading@10
|
2710 static void *input_thread(void *arg)
|
yading@10
|
2711 {
|
yading@10
|
2712 InputFile *f = arg;
|
yading@10
|
2713 int ret = 0;
|
yading@10
|
2714
|
yading@10
|
2715 while (!transcoding_finished && ret >= 0) {
|
yading@10
|
2716 AVPacket pkt;
|
yading@10
|
2717 ret = av_read_frame(f->ctx, &pkt);
|
yading@10
|
2718
|
yading@10
|
2719 if (ret == AVERROR(EAGAIN)) {
|
yading@10
|
2720 av_usleep(10000);
|
yading@10
|
2721 ret = 0;
|
yading@10
|
2722 continue;
|
yading@10
|
2723 } else if (ret < 0)
|
yading@10
|
2724 break;
|
yading@10
|
2725
|
yading@10
|
2726 pthread_mutex_lock(&f->fifo_lock);
|
yading@10
|
2727 while (!av_fifo_space(f->fifo))
|
yading@10
|
2728 pthread_cond_wait(&f->fifo_cond, &f->fifo_lock);
|
yading@10
|
2729
|
yading@10
|
2730 av_dup_packet(&pkt);
|
yading@10
|
2731 av_fifo_generic_write(f->fifo, &pkt, sizeof(pkt), NULL);
|
yading@10
|
2732
|
yading@10
|
2733 pthread_mutex_unlock(&f->fifo_lock);
|
yading@10
|
2734 }
|
yading@10
|
2735
|
yading@10
|
2736 f->finished = 1;
|
yading@10
|
2737 return NULL;
|
yading@10
|
2738 }
|
yading@10
|
2739
|
yading@10
|
2740 static void free_input_threads(void)
|
yading@10
|
2741 {
|
yading@10
|
2742 int i;
|
yading@10
|
2743
|
yading@10
|
2744 if (nb_input_files == 1)
|
yading@10
|
2745 return;
|
yading@10
|
2746
|
yading@10
|
2747 transcoding_finished = 1;
|
yading@10
|
2748
|
yading@10
|
2749 for (i = 0; i < nb_input_files; i++) {
|
yading@10
|
2750 InputFile *f = input_files[i];
|
yading@10
|
2751 AVPacket pkt;
|
yading@10
|
2752
|
yading@10
|
2753 if (!f->fifo || f->joined)
|
yading@10
|
2754 continue;
|
yading@10
|
2755
|
yading@10
|
2756 pthread_mutex_lock(&f->fifo_lock);
|
yading@10
|
2757 while (av_fifo_size(f->fifo)) {
|
yading@10
|
2758 av_fifo_generic_read(f->fifo, &pkt, sizeof(pkt), NULL);
|
yading@10
|
2759 av_free_packet(&pkt);
|
yading@10
|
2760 }
|
yading@10
|
2761 pthread_cond_signal(&f->fifo_cond);
|
yading@10
|
2762 pthread_mutex_unlock(&f->fifo_lock);
|
yading@10
|
2763
|
yading@10
|
2764 pthread_join(f->thread, NULL);
|
yading@10
|
2765 f->joined = 1;
|
yading@10
|
2766
|
yading@10
|
2767 while (av_fifo_size(f->fifo)) {
|
yading@10
|
2768 av_fifo_generic_read(f->fifo, &pkt, sizeof(pkt), NULL);
|
yading@10
|
2769 av_free_packet(&pkt);
|
yading@10
|
2770 }
|
yading@10
|
2771 av_fifo_free(f->fifo);
|
yading@10
|
2772 }
|
yading@10
|
2773 }
|
yading@10
|
2774
|
yading@10
|
2775 static int init_input_threads(void)
|
yading@10
|
2776 {
|
yading@10
|
2777 int i, ret;
|
yading@10
|
2778
|
yading@10
|
2779 if (nb_input_files == 1)
|
yading@10
|
2780 return 0;
|
yading@10
|
2781
|
yading@10
|
2782 for (i = 0; i < nb_input_files; i++) {
|
yading@10
|
2783 InputFile *f = input_files[i];
|
yading@10
|
2784
|
yading@10
|
2785 if (!(f->fifo = av_fifo_alloc(8*sizeof(AVPacket))))
|
yading@10
|
2786 return AVERROR(ENOMEM);
|
yading@10
|
2787
|
yading@10
|
2788 pthread_mutex_init(&f->fifo_lock, NULL);
|
yading@10
|
2789 pthread_cond_init (&f->fifo_cond, NULL);
|
yading@10
|
2790
|
yading@10
|
2791 if ((ret = pthread_create(&f->thread, NULL, input_thread, f)))
|
yading@10
|
2792 return AVERROR(ret);
|
yading@10
|
2793 }
|
yading@10
|
2794 return 0;
|
yading@10
|
2795 }
|
yading@10
|
2796
|
yading@10
|
2797 static int get_input_packet_mt(InputFile *f, AVPacket *pkt)
|
yading@10
|
2798 {
|
yading@10
|
2799 int ret = 0;
|
yading@10
|
2800
|
yading@10
|
2801 pthread_mutex_lock(&f->fifo_lock);
|
yading@10
|
2802
|
yading@10
|
2803 if (av_fifo_size(f->fifo)) {
|
yading@10
|
2804 av_fifo_generic_read(f->fifo, pkt, sizeof(*pkt), NULL);
|
yading@10
|
2805 pthread_cond_signal(&f->fifo_cond);
|
yading@10
|
2806 } else {
|
yading@10
|
2807 if (f->finished)
|
yading@10
|
2808 ret = AVERROR_EOF;
|
yading@10
|
2809 else
|
yading@10
|
2810 ret = AVERROR(EAGAIN);
|
yading@10
|
2811 }
|
yading@10
|
2812
|
yading@10
|
2813 pthread_mutex_unlock(&f->fifo_lock);
|
yading@10
|
2814
|
yading@10
|
2815 return ret;
|
yading@10
|
2816 }
|
yading@10
|
2817 #endif
|
yading@10
|
2818
|
yading@10
|
2819 static int get_input_packet(InputFile *f, AVPacket *pkt)
|
yading@10
|
2820 {
|
yading@10
|
2821 #if HAVE_PTHREADS
|
yading@10
|
2822 if (nb_input_files > 1)
|
yading@10
|
2823 return get_input_packet_mt(f, pkt);
|
yading@10
|
2824 #endif
|
yading@10
|
2825 return av_read_frame(f->ctx, pkt);
|
yading@10
|
2826 }
|
yading@10
|
2827
|
yading@10
|
2828 static int got_eagain(void)
|
yading@10
|
2829 {
|
yading@10
|
2830 int i;
|
yading@10
|
2831 for (i = 0; i < nb_output_streams; i++)
|
yading@10
|
2832 if (output_streams[i]->unavailable)
|
yading@10
|
2833 return 1;
|
yading@10
|
2834 return 0;
|
yading@10
|
2835 }
|
yading@10
|
2836
|
yading@10
|
2837 static void reset_eagain(void)
|
yading@10
|
2838 {
|
yading@10
|
2839 int i;
|
yading@10
|
2840 for (i = 0; i < nb_input_files; i++)
|
yading@10
|
2841 input_files[i]->eagain = 0;
|
yading@10
|
2842 for (i = 0; i < nb_output_streams; i++)
|
yading@10
|
2843 output_streams[i]->unavailable = 0;
|
yading@10
|
2844 }
|
yading@10
|
2845
|
yading@10
|
2846 /*
|
yading@10
|
2847 * Return
|
yading@10
|
2848 * - 0 -- one packet was read and processed
|
yading@10
|
2849 * - AVERROR(EAGAIN) -- no packets were available for selected file,
|
yading@10
|
2850 * this function should be called again
|
yading@10
|
2851 * - AVERROR_EOF -- this function should not be called again
|
yading@10
|
2852 */
|
yading@10
|
2853 static int process_input(int file_index)
|
yading@10
|
2854 {
|
yading@10
|
2855 InputFile *ifile = input_files[file_index];
|
yading@10
|
2856 AVFormatContext *is;
|
yading@10
|
2857 InputStream *ist;
|
yading@10
|
2858 AVPacket pkt;
|
yading@10
|
2859 int ret, i, j;
|
yading@10
|
2860
|
yading@10
|
2861 is = ifile->ctx;
|
yading@10
|
2862 ret = get_input_packet(ifile, &pkt);
|
yading@10
|
2863
|
yading@10
|
2864 if (ret == AVERROR(EAGAIN)) {
|
yading@10
|
2865 ifile->eagain = 1;
|
yading@10
|
2866 return ret;
|
yading@10
|
2867 }
|
yading@10
|
2868 if (ret < 0) {
|
yading@10
|
2869 if (ret != AVERROR_EOF) {
|
yading@10
|
2870 print_error(is->filename, ret);
|
yading@10
|
2871 if (exit_on_error)
|
yading@10
|
2872 exit(1);
|
yading@10
|
2873 }
|
yading@10
|
2874 ifile->eof_reached = 1;
|
yading@10
|
2875
|
yading@10
|
2876 for (i = 0; i < ifile->nb_streams; i++) {
|
yading@10
|
2877 ist = input_streams[ifile->ist_index + i];
|
yading@10
|
2878 if (ist->decoding_needed)
|
yading@10
|
2879 output_packet(ist, NULL);
|
yading@10
|
2880
|
yading@10
|
2881 /* mark all outputs that don't go through lavfi as finished */
|
yading@10
|
2882 for (j = 0; j < nb_output_streams; j++) {
|
yading@10
|
2883 OutputStream *ost = output_streams[j];
|
yading@10
|
2884
|
yading@10
|
2885 if (ost->source_index == ifile->ist_index + i &&
|
yading@10
|
2886 (ost->stream_copy || ost->enc->type == AVMEDIA_TYPE_SUBTITLE))
|
yading@10
|
2887 close_output_stream(ost);
|
yading@10
|
2888 }
|
yading@10
|
2889 }
|
yading@10
|
2890
|
yading@10
|
2891 return AVERROR(EAGAIN);
|
yading@10
|
2892 }
|
yading@10
|
2893
|
yading@10
|
2894 reset_eagain();
|
yading@10
|
2895
|
yading@10
|
2896 if (do_pkt_dump) {
|
yading@10
|
2897 av_pkt_dump_log2(NULL, AV_LOG_DEBUG, &pkt, do_hex_dump,
|
yading@10
|
2898 is->streams[pkt.stream_index]);
|
yading@10
|
2899 }
|
yading@10
|
2900 /* the following test is needed in case new streams appear
|
yading@10
|
2901 dynamically in stream : we ignore them */
|
yading@10
|
2902 if (pkt.stream_index >= ifile->nb_streams) {
|
yading@10
|
2903 report_new_stream(file_index, &pkt);
|
yading@10
|
2904 goto discard_packet;
|
yading@10
|
2905 }
|
yading@10
|
2906
|
yading@10
|
2907 ist = input_streams[ifile->ist_index + pkt.stream_index];
|
yading@10
|
2908 if (ist->discard)
|
yading@10
|
2909 goto discard_packet;
|
yading@10
|
2910
|
yading@10
|
2911 if (debug_ts) {
|
yading@10
|
2912 av_log(NULL, AV_LOG_INFO, "demuxer -> ist_index:%d type:%s "
|
yading@10
|
2913 "next_dts:%s next_dts_time:%s next_pts:%s next_pts_time:%s pkt_pts:%s pkt_pts_time:%s pkt_dts:%s pkt_dts_time:%s off:%s off_time:%s\n",
|
yading@10
|
2914 ifile->ist_index + pkt.stream_index, av_get_media_type_string(ist->st->codec->codec_type),
|
yading@10
|
2915 av_ts2str(ist->next_dts), av_ts2timestr(ist->next_dts, &AV_TIME_BASE_Q),
|
yading@10
|
2916 av_ts2str(ist->next_pts), av_ts2timestr(ist->next_pts, &AV_TIME_BASE_Q),
|
yading@10
|
2917 av_ts2str(pkt.pts), av_ts2timestr(pkt.pts, &ist->st->time_base),
|
yading@10
|
2918 av_ts2str(pkt.dts), av_ts2timestr(pkt.dts, &ist->st->time_base),
|
yading@10
|
2919 av_ts2str(input_files[ist->file_index]->ts_offset),
|
yading@10
|
2920 av_ts2timestr(input_files[ist->file_index]->ts_offset, &AV_TIME_BASE_Q));
|
yading@10
|
2921 }
|
yading@10
|
2922
|
yading@10
|
2923 if(!ist->wrap_correction_done && is->start_time != AV_NOPTS_VALUE && ist->st->pts_wrap_bits < 64){
|
yading@10
|
2924 int64_t stime, stime2;
|
yading@10
|
2925 // Correcting starttime based on the enabled streams
|
yading@10
|
2926 // FIXME this ideally should be done before the first use of starttime but we do not know which are the enabled streams at that point.
|
yading@10
|
2927 // so we instead do it here as part of discontinuity handling
|
yading@10
|
2928 if ( ist->next_dts == AV_NOPTS_VALUE
|
yading@10
|
2929 && ifile->ts_offset == -is->start_time
|
yading@10
|
2930 && (is->iformat->flags & AVFMT_TS_DISCONT)) {
|
yading@10
|
2931 int64_t new_start_time = INT64_MAX;
|
yading@10
|
2932 for (i=0; i<is->nb_streams; i++) {
|
yading@10
|
2933 AVStream *st = is->streams[i];
|
yading@10
|
2934 if(st->discard == AVDISCARD_ALL || st->start_time == AV_NOPTS_VALUE)
|
yading@10
|
2935 continue;
|
yading@10
|
2936 new_start_time = FFMIN(new_start_time, av_rescale_q(st->start_time, st->time_base, AV_TIME_BASE_Q));
|
yading@10
|
2937 }
|
yading@10
|
2938 if (new_start_time > is->start_time) {
|
yading@10
|
2939 av_log(is, AV_LOG_VERBOSE, "Correcting start time by %"PRId64"\n", new_start_time - is->start_time);
|
yading@10
|
2940 ifile->ts_offset = -new_start_time;
|
yading@10
|
2941 }
|
yading@10
|
2942 }
|
yading@10
|
2943
|
yading@10
|
2944 stime = av_rescale_q(is->start_time, AV_TIME_BASE_Q, ist->st->time_base);
|
yading@10
|
2945 stime2= stime + (1ULL<<ist->st->pts_wrap_bits);
|
yading@10
|
2946 ist->wrap_correction_done = 1;
|
yading@10
|
2947
|
yading@10
|
2948 if(stime2 > stime && pkt.dts != AV_NOPTS_VALUE && pkt.dts > stime + (1LL<<(ist->st->pts_wrap_bits-1))) {
|
yading@10
|
2949 pkt.dts -= 1ULL<<ist->st->pts_wrap_bits;
|
yading@10
|
2950 ist->wrap_correction_done = 0;
|
yading@10
|
2951 }
|
yading@10
|
2952 if(stime2 > stime && pkt.pts != AV_NOPTS_VALUE && pkt.pts > stime + (1LL<<(ist->st->pts_wrap_bits-1))) {
|
yading@10
|
2953 pkt.pts -= 1ULL<<ist->st->pts_wrap_bits;
|
yading@10
|
2954 ist->wrap_correction_done = 0;
|
yading@10
|
2955 }
|
yading@10
|
2956 }
|
yading@10
|
2957
|
yading@10
|
2958 if (pkt.dts != AV_NOPTS_VALUE)
|
yading@10
|
2959 pkt.dts += av_rescale_q(ifile->ts_offset, AV_TIME_BASE_Q, ist->st->time_base);
|
yading@10
|
2960 if (pkt.pts != AV_NOPTS_VALUE)
|
yading@10
|
2961 pkt.pts += av_rescale_q(ifile->ts_offset, AV_TIME_BASE_Q, ist->st->time_base);
|
yading@10
|
2962
|
yading@10
|
2963 if (pkt.pts != AV_NOPTS_VALUE)
|
yading@10
|
2964 pkt.pts *= ist->ts_scale;
|
yading@10
|
2965 if (pkt.dts != AV_NOPTS_VALUE)
|
yading@10
|
2966 pkt.dts *= ist->ts_scale;
|
yading@10
|
2967
|
yading@10
|
2968 if (pkt.dts != AV_NOPTS_VALUE && ist->next_dts == AV_NOPTS_VALUE && !copy_ts
|
yading@10
|
2969 && (is->iformat->flags & AVFMT_TS_DISCONT) && ifile->last_ts != AV_NOPTS_VALUE) {
|
yading@10
|
2970 int64_t pkt_dts = av_rescale_q(pkt.dts, ist->st->time_base, AV_TIME_BASE_Q);
|
yading@10
|
2971 int64_t delta = pkt_dts - ifile->last_ts;
|
yading@10
|
2972 if(delta < -1LL*dts_delta_threshold*AV_TIME_BASE ||
|
yading@10
|
2973 (delta > 1LL*dts_delta_threshold*AV_TIME_BASE &&
|
yading@10
|
2974 ist->st->codec->codec_type != AVMEDIA_TYPE_SUBTITLE)){
|
yading@10
|
2975 ifile->ts_offset -= delta;
|
yading@10
|
2976 av_log(NULL, AV_LOG_DEBUG,
|
yading@10
|
2977 "Inter stream timestamp discontinuity %"PRId64", new offset= %"PRId64"\n",
|
yading@10
|
2978 delta, ifile->ts_offset);
|
yading@10
|
2979 pkt.dts -= av_rescale_q(delta, AV_TIME_BASE_Q, ist->st->time_base);
|
yading@10
|
2980 if (pkt.pts != AV_NOPTS_VALUE)
|
yading@10
|
2981 pkt.pts -= av_rescale_q(delta, AV_TIME_BASE_Q, ist->st->time_base);
|
yading@10
|
2982 }
|
yading@10
|
2983 }
|
yading@10
|
2984
|
yading@10
|
2985 if (pkt.dts != AV_NOPTS_VALUE && ist->next_dts != AV_NOPTS_VALUE &&
|
yading@10
|
2986 !copy_ts) {
|
yading@10
|
2987 int64_t pkt_dts = av_rescale_q(pkt.dts, ist->st->time_base, AV_TIME_BASE_Q);
|
yading@10
|
2988 int64_t delta = pkt_dts - ist->next_dts;
|
yading@10
|
2989 if (is->iformat->flags & AVFMT_TS_DISCONT) {
|
yading@10
|
2990 if(delta < -1LL*dts_delta_threshold*AV_TIME_BASE ||
|
yading@10
|
2991 (delta > 1LL*dts_delta_threshold*AV_TIME_BASE &&
|
yading@10
|
2992 ist->st->codec->codec_type != AVMEDIA_TYPE_SUBTITLE) ||
|
yading@10
|
2993 pkt_dts+1<ist->pts){
|
yading@10
|
2994 ifile->ts_offset -= delta;
|
yading@10
|
2995 av_log(NULL, AV_LOG_DEBUG,
|
yading@10
|
2996 "timestamp discontinuity %"PRId64", new offset= %"PRId64"\n",
|
yading@10
|
2997 delta, ifile->ts_offset);
|
yading@10
|
2998 pkt.dts -= av_rescale_q(delta, AV_TIME_BASE_Q, ist->st->time_base);
|
yading@10
|
2999 if (pkt.pts != AV_NOPTS_VALUE)
|
yading@10
|
3000 pkt.pts -= av_rescale_q(delta, AV_TIME_BASE_Q, ist->st->time_base);
|
yading@10
|
3001 }
|
yading@10
|
3002 } else {
|
yading@10
|
3003 if ( delta < -1LL*dts_error_threshold*AV_TIME_BASE ||
|
yading@10
|
3004 (delta > 1LL*dts_error_threshold*AV_TIME_BASE && ist->st->codec->codec_type != AVMEDIA_TYPE_SUBTITLE)
|
yading@10
|
3005 ) {
|
yading@10
|
3006 av_log(NULL, AV_LOG_WARNING, "DTS %"PRId64", next:%"PRId64" st:%d invalid dropping\n", pkt.dts, ist->next_dts, pkt.stream_index);
|
yading@10
|
3007 pkt.dts = AV_NOPTS_VALUE;
|
yading@10
|
3008 }
|
yading@10
|
3009 if (pkt.pts != AV_NOPTS_VALUE){
|
yading@10
|
3010 int64_t pkt_pts = av_rescale_q(pkt.pts, ist->st->time_base, AV_TIME_BASE_Q);
|
yading@10
|
3011 delta = pkt_pts - ist->next_dts;
|
yading@10
|
3012 if ( delta < -1LL*dts_error_threshold*AV_TIME_BASE ||
|
yading@10
|
3013 (delta > 1LL*dts_error_threshold*AV_TIME_BASE && ist->st->codec->codec_type != AVMEDIA_TYPE_SUBTITLE)
|
yading@10
|
3014 ) {
|
yading@10
|
3015 av_log(NULL, AV_LOG_WARNING, "PTS %"PRId64", next:%"PRId64" invalid dropping st:%d\n", pkt.pts, ist->next_dts, pkt.stream_index);
|
yading@10
|
3016 pkt.pts = AV_NOPTS_VALUE;
|
yading@10
|
3017 }
|
yading@10
|
3018 }
|
yading@10
|
3019 }
|
yading@10
|
3020 }
|
yading@10
|
3021
|
yading@10
|
3022 if (pkt.dts != AV_NOPTS_VALUE)
|
yading@10
|
3023 ifile->last_ts = av_rescale_q(pkt.dts, ist->st->time_base, AV_TIME_BASE_Q);
|
yading@10
|
3024
|
yading@10
|
3025 if (debug_ts) {
|
yading@10
|
3026 av_log(NULL, AV_LOG_INFO, "demuxer+ffmpeg -> ist_index:%d type:%s pkt_pts:%s pkt_pts_time:%s pkt_dts:%s pkt_dts_time:%s off:%s off_time:%s\n",
|
yading@10
|
3027 ifile->ist_index + pkt.stream_index, av_get_media_type_string(ist->st->codec->codec_type),
|
yading@10
|
3028 av_ts2str(pkt.pts), av_ts2timestr(pkt.pts, &ist->st->time_base),
|
yading@10
|
3029 av_ts2str(pkt.dts), av_ts2timestr(pkt.dts, &ist->st->time_base),
|
yading@10
|
3030 av_ts2str(input_files[ist->file_index]->ts_offset),
|
yading@10
|
3031 av_ts2timestr(input_files[ist->file_index]->ts_offset, &AV_TIME_BASE_Q));
|
yading@10
|
3032 }
|
yading@10
|
3033
|
yading@10
|
3034 sub2video_heartbeat(ist, pkt.pts);
|
yading@10
|
3035
|
yading@10
|
3036 ret = output_packet(ist, &pkt);
|
yading@10
|
3037 if (ret < 0) {
|
yading@10
|
3038 char buf[128];
|
yading@10
|
3039 av_strerror(ret, buf, sizeof(buf));
|
yading@10
|
3040 av_log(NULL, AV_LOG_ERROR, "Error while decoding stream #%d:%d: %s\n",
|
yading@10
|
3041 ist->file_index, ist->st->index, buf);
|
yading@10
|
3042 if (exit_on_error)
|
yading@10
|
3043 exit(1);
|
yading@10
|
3044 }
|
yading@10
|
3045
|
yading@10
|
3046 discard_packet:
|
yading@10
|
3047 av_free_packet(&pkt);
|
yading@10
|
3048
|
yading@10
|
3049 return 0;
|
yading@10
|
3050 }
|
yading@10
|
3051
|
yading@10
|
3052 /**
|
yading@10
|
3053 * Perform a step of transcoding for the specified filter graph.
|
yading@10
|
3054 *
|
yading@10
|
3055 * @param[in] graph filter graph to consider
|
yading@10
|
3056 * @param[out] best_ist input stream where a frame would allow to continue
|
yading@10
|
3057 * @return 0 for success, <0 for error
|
yading@10
|
3058 */
|
yading@10
|
3059 static int transcode_from_filter(FilterGraph *graph, InputStream **best_ist)
|
yading@10
|
3060 {
|
yading@10
|
3061 int i, ret;
|
yading@10
|
3062 int nb_requests, nb_requests_max = 0;
|
yading@10
|
3063 InputFilter *ifilter;
|
yading@10
|
3064 InputStream *ist;
|
yading@10
|
3065
|
yading@10
|
3066 *best_ist = NULL;
|
yading@10
|
3067 ret = avfilter_graph_request_oldest(graph->graph);
|
yading@10
|
3068 if (ret >= 0)
|
yading@10
|
3069 return reap_filters();
|
yading@10
|
3070
|
yading@10
|
3071 if (ret == AVERROR_EOF) {
|
yading@10
|
3072 ret = reap_filters();
|
yading@10
|
3073 for (i = 0; i < graph->nb_outputs; i++)
|
yading@10
|
3074 close_output_stream(graph->outputs[i]->ost);
|
yading@10
|
3075 return ret;
|
yading@10
|
3076 }
|
yading@10
|
3077 if (ret != AVERROR(EAGAIN))
|
yading@10
|
3078 return ret;
|
yading@10
|
3079
|
yading@10
|
3080 for (i = 0; i < graph->nb_inputs; i++) {
|
yading@10
|
3081 ifilter = graph->inputs[i];
|
yading@10
|
3082 ist = ifilter->ist;
|
yading@10
|
3083 if (input_files[ist->file_index]->eagain ||
|
yading@10
|
3084 input_files[ist->file_index]->eof_reached)
|
yading@10
|
3085 continue;
|
yading@10
|
3086 nb_requests = av_buffersrc_get_nb_failed_requests(ifilter->filter);
|
yading@10
|
3087 if (nb_requests > nb_requests_max) {
|
yading@10
|
3088 nb_requests_max = nb_requests;
|
yading@10
|
3089 *best_ist = ist;
|
yading@10
|
3090 }
|
yading@10
|
3091 }
|
yading@10
|
3092
|
yading@10
|
3093 if (!*best_ist)
|
yading@10
|
3094 for (i = 0; i < graph->nb_outputs; i++)
|
yading@10
|
3095 graph->outputs[i]->ost->unavailable = 1;
|
yading@10
|
3096
|
yading@10
|
3097 return 0;
|
yading@10
|
3098 }
|
yading@10
|
3099
|
yading@10
|
3100 /**
|
yading@10
|
3101 * Run a single step of transcoding.
|
yading@10
|
3102 *
|
yading@10
|
3103 * @return 0 for success, <0 for error
|
yading@10
|
3104 */
|
yading@10
|
3105 static int transcode_step(void)
|
yading@10
|
3106 {
|
yading@10
|
3107 OutputStream *ost;
|
yading@10
|
3108 InputStream *ist;
|
yading@10
|
3109 int ret;
|
yading@10
|
3110
|
yading@10
|
3111 ost = choose_output();
|
yading@10
|
3112 if (!ost) {
|
yading@10
|
3113 if (got_eagain()) {
|
yading@10
|
3114 reset_eagain();
|
yading@10
|
3115 av_usleep(10000);
|
yading@10
|
3116 return 0;
|
yading@10
|
3117 }
|
yading@10
|
3118 av_log(NULL, AV_LOG_VERBOSE, "No more inputs to read from, finishing.\n");
|
yading@10
|
3119 return AVERROR_EOF;
|
yading@10
|
3120 }
|
yading@10
|
3121
|
yading@10
|
3122 if (ost->filter) {
|
yading@10
|
3123 if ((ret = transcode_from_filter(ost->filter->graph, &ist)) < 0)
|
yading@10
|
3124 return ret;
|
yading@10
|
3125 if (!ist)
|
yading@10
|
3126 return 0;
|
yading@10
|
3127 } else {
|
yading@10
|
3128 av_assert0(ost->source_index >= 0);
|
yading@10
|
3129 ist = input_streams[ost->source_index];
|
yading@10
|
3130 }
|
yading@10
|
3131
|
yading@10
|
3132 ret = process_input(ist->file_index);
|
yading@10
|
3133 if (ret == AVERROR(EAGAIN)) {
|
yading@10
|
3134 if (input_files[ist->file_index]->eagain)
|
yading@10
|
3135 ost->unavailable = 1;
|
yading@10
|
3136 return 0;
|
yading@10
|
3137 }
|
yading@10
|
3138 if (ret < 0)
|
yading@10
|
3139 return ret == AVERROR_EOF ? 0 : ret;
|
yading@10
|
3140
|
yading@10
|
3141 return reap_filters();
|
yading@10
|
3142 }
|
yading@10
|
3143
|
yading@10
|
3144 /*
|
yading@10
|
3145 * The following code is the main loop of the file converter
|
yading@10
|
3146 */
|
yading@10
|
3147 static int transcode(void)
|
yading@10
|
3148 {
|
yading@10
|
3149 int ret, i;
|
yading@10
|
3150 AVFormatContext *os;
|
yading@10
|
3151 OutputStream *ost;
|
yading@10
|
3152 InputStream *ist;
|
yading@10
|
3153 int64_t timer_start;
|
yading@10
|
3154
|
yading@10
|
3155 ret = transcode_init();
|
yading@10
|
3156 if (ret < 0)
|
yading@10
|
3157 goto fail;
|
yading@10
|
3158
|
yading@10
|
3159 if (stdin_interaction) {
|
yading@10
|
3160 av_log(NULL, AV_LOG_INFO, "Press [q] to stop, [?] for help\n");
|
yading@10
|
3161 }
|
yading@10
|
3162
|
yading@10
|
3163 timer_start = av_gettime();
|
yading@10
|
3164
|
yading@10
|
3165 #if HAVE_PTHREADS
|
yading@10
|
3166 if ((ret = init_input_threads()) < 0)
|
yading@10
|
3167 goto fail;
|
yading@10
|
3168 #endif
|
yading@10
|
3169
|
yading@10
|
3170 while (!received_sigterm) {
|
yading@10
|
3171 int64_t cur_time= av_gettime();
|
yading@10
|
3172
|
yading@10
|
3173 /* if 'q' pressed, exits */
|
yading@10
|
3174 if (stdin_interaction)
|
yading@10
|
3175 if (check_keyboard_interaction(cur_time) < 0)
|
yading@10
|
3176 break;
|
yading@10
|
3177
|
yading@10
|
3178 /* check if there's any stream where output is still needed */
|
yading@10
|
3179 if (!need_output()) {
|
yading@10
|
3180 av_log(NULL, AV_LOG_VERBOSE, "No more output streams to write to, finishing.\n");
|
yading@10
|
3181 break;
|
yading@10
|
3182 }
|
yading@10
|
3183
|
yading@10
|
3184 ret = transcode_step();
|
yading@10
|
3185 if (ret < 0) {
|
yading@10
|
3186 if (ret == AVERROR_EOF || ret == AVERROR(EAGAIN))
|
yading@10
|
3187 continue;
|
yading@10
|
3188
|
yading@10
|
3189 av_log(NULL, AV_LOG_ERROR, "Error while filtering.\n");
|
yading@10
|
3190 break;
|
yading@10
|
3191 }
|
yading@10
|
3192
|
yading@10
|
3193 /* dump report by using the output first video and audio streams */
|
yading@10
|
3194 print_report(0, timer_start, cur_time);
|
yading@10
|
3195 }
|
yading@10
|
3196 #if HAVE_PTHREADS
|
yading@10
|
3197 free_input_threads();
|
yading@10
|
3198 #endif
|
yading@10
|
3199
|
yading@10
|
3200 /* at the end of stream, we must flush the decoder buffers */
|
yading@10
|
3201 for (i = 0; i < nb_input_streams; i++) {
|
yading@10
|
3202 ist = input_streams[i];
|
yading@10
|
3203 if (!input_files[ist->file_index]->eof_reached && ist->decoding_needed) {
|
yading@10
|
3204 output_packet(ist, NULL);
|
yading@10
|
3205 }
|
yading@10
|
3206 }
|
yading@10
|
3207 flush_encoders();
|
yading@10
|
3208
|
yading@10
|
3209 term_exit();
|
yading@10
|
3210
|
yading@10
|
3211 /* write the trailer if needed and close file */
|
yading@10
|
3212 for (i = 0; i < nb_output_files; i++) {
|
yading@10
|
3213 os = output_files[i]->ctx;
|
yading@10
|
3214 av_write_trailer(os);
|
yading@10
|
3215 }
|
yading@10
|
3216
|
yading@10
|
3217 /* dump report by using the first video and audio streams */
|
yading@10
|
3218 print_report(1, timer_start, av_gettime());
|
yading@10
|
3219
|
yading@10
|
3220 /* close each encoder */
|
yading@10
|
3221 for (i = 0; i < nb_output_streams; i++) {
|
yading@10
|
3222 ost = output_streams[i];
|
yading@10
|
3223 if (ost->encoding_needed) {
|
yading@10
|
3224 av_freep(&ost->st->codec->stats_in);
|
yading@10
|
3225 avcodec_close(ost->st->codec);
|
yading@10
|
3226 }
|
yading@10
|
3227 }
|
yading@10
|
3228
|
yading@10
|
3229 /* close each decoder */
|
yading@10
|
3230 for (i = 0; i < nb_input_streams; i++) {
|
yading@10
|
3231 ist = input_streams[i];
|
yading@10
|
3232 if (ist->decoding_needed) {
|
yading@10
|
3233 avcodec_close(ist->st->codec);
|
yading@10
|
3234 }
|
yading@10
|
3235 }
|
yading@10
|
3236
|
yading@10
|
3237 /* finished ! */
|
yading@10
|
3238 ret = 0;
|
yading@10
|
3239
|
yading@10
|
3240 fail:
|
yading@10
|
3241 #if HAVE_PTHREADS
|
yading@10
|
3242 free_input_threads();
|
yading@10
|
3243 #endif
|
yading@10
|
3244
|
yading@10
|
3245 if (output_streams) {
|
yading@10
|
3246 for (i = 0; i < nb_output_streams; i++) {
|
yading@10
|
3247 ost = output_streams[i];
|
yading@10
|
3248 if (ost) {
|
yading@10
|
3249 if (ost->stream_copy)
|
yading@10
|
3250 av_freep(&ost->st->codec->extradata);
|
yading@10
|
3251 if (ost->logfile) {
|
yading@10
|
3252 fclose(ost->logfile);
|
yading@10
|
3253 ost->logfile = NULL;
|
yading@10
|
3254 }
|
yading@10
|
3255 av_freep(&ost->st->codec->subtitle_header);
|
yading@10
|
3256 av_free(ost->forced_kf_pts);
|
yading@10
|
3257 av_dict_free(&ost->opts);
|
yading@10
|
3258 av_dict_free(&ost->swr_opts);
|
yading@10
|
3259 av_dict_free(&ost->resample_opts);
|
yading@10
|
3260 }
|
yading@10
|
3261 }
|
yading@10
|
3262 }
|
yading@10
|
3263 return ret;
|
yading@10
|
3264 }
|
yading@10
|
3265
|
yading@10
|
3266
|
yading@10
|
3267 static int64_t getutime(void)
|
yading@10
|
3268 {
|
yading@10
|
3269 #if HAVE_GETRUSAGE
|
yading@10
|
3270 struct rusage rusage;
|
yading@10
|
3271
|
yading@10
|
3272 getrusage(RUSAGE_SELF, &rusage);
|
yading@10
|
3273 return (rusage.ru_utime.tv_sec * 1000000LL) + rusage.ru_utime.tv_usec;
|
yading@10
|
3274 #elif HAVE_GETPROCESSTIMES
|
yading@10
|
3275 HANDLE proc;
|
yading@10
|
3276 FILETIME c, e, k, u;
|
yading@10
|
3277 proc = GetCurrentProcess();
|
yading@10
|
3278 GetProcessTimes(proc, &c, &e, &k, &u);
|
yading@10
|
3279 return ((int64_t) u.dwHighDateTime << 32 | u.dwLowDateTime) / 10;
|
yading@10
|
3280 #else
|
yading@10
|
3281 return av_gettime();
|
yading@10
|
3282 #endif
|
yading@10
|
3283 }
|
yading@10
|
3284
|
yading@10
|
3285 static int64_t getmaxrss(void)
|
yading@10
|
3286 {
|
yading@10
|
3287 #if HAVE_GETRUSAGE && HAVE_STRUCT_RUSAGE_RU_MAXRSS
|
yading@10
|
3288 struct rusage rusage;
|
yading@10
|
3289 getrusage(RUSAGE_SELF, &rusage);
|
yading@10
|
3290 return (int64_t)rusage.ru_maxrss * 1024;
|
yading@10
|
3291 #elif HAVE_GETPROCESSMEMORYINFO
|
yading@10
|
3292 HANDLE proc;
|
yading@10
|
3293 PROCESS_MEMORY_COUNTERS memcounters;
|
yading@10
|
3294 proc = GetCurrentProcess();
|
yading@10
|
3295 memcounters.cb = sizeof(memcounters);
|
yading@10
|
3296 GetProcessMemoryInfo(proc, &memcounters, sizeof(memcounters));
|
yading@10
|
3297 return memcounters.PeakPagefileUsage;
|
yading@10
|
3298 #else
|
yading@10
|
3299 return 0;
|
yading@10
|
3300 #endif
|
yading@10
|
3301 }
|
yading@10
|
3302
|
yading@10
|
3303 static void log_callback_null(void *ptr, int level, const char *fmt, va_list vl)
|
yading@10
|
3304 {
|
yading@10
|
3305 }
|
yading@10
|
3306
|
yading@10
|
3307 int main(int argc, char **argv)
|
yading@10
|
3308 {
|
yading@10
|
3309 int ret;
|
yading@10
|
3310 int64_t ti;
|
yading@10
|
3311
|
yading@10
|
3312 atexit(exit_program);
|
yading@10
|
3313
|
yading@10
|
3314 setvbuf(stderr,NULL,_IONBF,0); /* win32 runtime needs this */
|
yading@10
|
3315
|
yading@10
|
3316 av_log_set_flags(AV_LOG_SKIP_REPEATED);
|
yading@10
|
3317 parse_loglevel(argc, argv, options);
|
yading@10
|
3318
|
yading@10
|
3319 if(argc>1 && !strcmp(argv[1], "-d")){
|
yading@10
|
3320 run_as_daemon=1;
|
yading@10
|
3321 av_log_set_callback(log_callback_null);
|
yading@10
|
3322 argc--;
|
yading@10
|
3323 argv++;
|
yading@10
|
3324 }
|
yading@10
|
3325
|
yading@10
|
3326 avcodec_register_all();
|
yading@10
|
3327 #if CONFIG_AVDEVICE
|
yading@10
|
3328 avdevice_register_all();
|
yading@10
|
3329 #endif
|
yading@10
|
3330 avfilter_register_all();
|
yading@10
|
3331 av_register_all();
|
yading@10
|
3332 avformat_network_init();
|
yading@10
|
3333
|
yading@10
|
3334 show_banner(argc, argv, options);
|
yading@10
|
3335
|
yading@10
|
3336 term_init();
|
yading@10
|
3337
|
yading@10
|
3338 /* parse options and open all input/output files */
|
yading@10
|
3339 ret = ffmpeg_parse_options(argc, argv);
|
yading@10
|
3340 if (ret < 0)
|
yading@10
|
3341 exit(1);
|
yading@10
|
3342
|
yading@10
|
3343 if (nb_output_files <= 0 && nb_input_files == 0) {
|
yading@10
|
3344 show_usage();
|
yading@10
|
3345 av_log(NULL, AV_LOG_WARNING, "Use -h to get full help or, even better, run 'man %s'\n", program_name);
|
yading@10
|
3346 exit(1);
|
yading@10
|
3347 }
|
yading@10
|
3348
|
yading@10
|
3349 /* file converter / grab */
|
yading@10
|
3350 if (nb_output_files <= 0) {
|
yading@10
|
3351 av_log(NULL, AV_LOG_FATAL, "At least one output file must be specified\n");
|
yading@10
|
3352 exit(1);
|
yading@10
|
3353 }
|
yading@10
|
3354
|
yading@10
|
3355 // if (nb_input_files == 0) {
|
yading@10
|
3356 // av_log(NULL, AV_LOG_FATAL, "At least one input file must be specified\n");
|
yading@10
|
3357 // exit(1);
|
yading@10
|
3358 // }
|
yading@10
|
3359
|
yading@10
|
3360 current_time = ti = getutime();
|
yading@10
|
3361 if (transcode() < 0)
|
yading@10
|
3362 exit(1);
|
yading@10
|
3363 ti = getutime() - ti;
|
yading@10
|
3364 if (do_benchmark) {
|
yading@10
|
3365 printf("bench: utime=%0.3fs\n", ti / 1000000.0);
|
yading@10
|
3366 }
|
yading@10
|
3367 av_log(NULL, AV_LOG_DEBUG, "%"PRIu64" frames successfully decoded, %"PRIu64" decoding errors\n",
|
yading@10
|
3368 decode_error_stat[0], decode_error_stat[1]);
|
yading@10
|
3369 if (2*decode_error_stat[0] < decode_error_stat[1])
|
yading@10
|
3370 exit(254);
|
yading@10
|
3371
|
yading@10
|
3372 exit(received_nb_signals ? 255 : 0);
|
yading@10
|
3373 return 0;
|
yading@10
|
3374 }
|