yading@11: /* yading@11: * various utility functions for use within FFmpeg yading@11: * Copyright (c) 2000, 2001, 2002 Fabrice Bellard yading@11: * yading@11: * This file is part of FFmpeg. yading@11: * yading@11: * FFmpeg is free software; you can redistribute it and/or yading@11: * modify it under the terms of the GNU Lesser General Public yading@11: * License as published by the Free Software Foundation; either yading@11: * version 2.1 of the License, or (at your option) any later version. yading@11: * yading@11: * FFmpeg is distributed in the hope that it will be useful, yading@11: * but WITHOUT ANY WARRANTY; without even the implied warranty of yading@11: * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU yading@11: * Lesser General Public License for more details. yading@11: * yading@11: * You should have received a copy of the GNU Lesser General Public yading@11: * License along with FFmpeg; if not, write to the Free Software yading@11: * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA yading@11: */ yading@11: yading@11: /* #define DEBUG */ yading@11: yading@11: #include "avformat.h" yading@11: #include "avio_internal.h" yading@11: #include "internal.h" yading@11: #include "libavcodec/internal.h" yading@11: #include "libavcodec/raw.h" yading@11: #include "libavcodec/bytestream.h" yading@11: #include "libavutil/avassert.h" yading@11: #include "libavutil/opt.h" yading@11: #include "libavutil/dict.h" yading@11: #include "libavutil/pixdesc.h" yading@11: #include "metadata.h" yading@11: #include "id3v2.h" yading@11: #include "libavutil/avassert.h" yading@11: #include "libavutil/avstring.h" yading@11: #include "libavutil/mathematics.h" yading@11: #include "libavutil/parseutils.h" yading@11: #include "libavutil/time.h" yading@11: #include "libavutil/timestamp.h" yading@11: #include "riff.h" yading@11: #include "audiointerleave.h" yading@11: #include "url.h" yading@11: #include yading@11: #if CONFIG_NETWORK yading@11: #include "network.h" yading@11: #endif yading@11: yading@11: #undef NDEBUG yading@11: #include yading@11: yading@11: /** yading@11: * @file yading@11: * various utility functions for use within FFmpeg yading@11: */ yading@11: yading@11: unsigned avformat_version(void) yading@11: { yading@11: av_assert0(LIBAVFORMAT_VERSION_MICRO >= 100); yading@11: return LIBAVFORMAT_VERSION_INT; yading@11: } yading@11: yading@11: const char *avformat_configuration(void) yading@11: { yading@11: return FFMPEG_CONFIGURATION; yading@11: } yading@11: yading@11: const char *avformat_license(void) yading@11: { yading@11: #define LICENSE_PREFIX "libavformat license: " yading@11: return LICENSE_PREFIX FFMPEG_LICENSE + sizeof(LICENSE_PREFIX) - 1; yading@11: } yading@11: yading@11: #define RELATIVE_TS_BASE (INT64_MAX - (1LL<<48)) yading@11: yading@11: static int is_relative(int64_t ts) { yading@11: return ts > (RELATIVE_TS_BASE - (1LL<<48)); yading@11: } yading@11: yading@11: /** yading@11: * Wrap a given time stamp, if there is an indication for an overflow yading@11: * yading@11: * @param st stream yading@11: * @param timestamp the time stamp to wrap yading@11: * @return resulting time stamp yading@11: */ yading@11: static int64_t wrap_timestamp(AVStream *st, int64_t timestamp) yading@11: { yading@11: if (st->pts_wrap_behavior != AV_PTS_WRAP_IGNORE && yading@11: st->pts_wrap_reference != AV_NOPTS_VALUE && timestamp != AV_NOPTS_VALUE) { yading@11: if (st->pts_wrap_behavior == AV_PTS_WRAP_ADD_OFFSET && yading@11: timestamp < st->pts_wrap_reference) yading@11: return timestamp + (1ULL<pts_wrap_bits); yading@11: else if (st->pts_wrap_behavior == AV_PTS_WRAP_SUB_OFFSET && yading@11: timestamp >= st->pts_wrap_reference) yading@11: return timestamp - (1ULL<pts_wrap_bits); yading@11: } yading@11: return timestamp; yading@11: } yading@11: yading@11: #define MAKE_ACCESSORS(str, name, type, field) \ yading@11: type av_##name##_get_##field(const str *s) { return s->field; } \ yading@11: void av_##name##_set_##field(str *s, type v) { s->field = v; } yading@11: yading@11: MAKE_ACCESSORS(AVStream, stream, AVRational, r_frame_rate) yading@11: yading@11: /** head of registered input format linked list */ yading@11: static AVInputFormat *first_iformat = NULL; yading@11: /** head of registered output format linked list */ yading@11: static AVOutputFormat *first_oformat = NULL; yading@11: yading@11: AVInputFormat *av_iformat_next(AVInputFormat *f) yading@11: { yading@11: if(f) return f->next; yading@11: else return first_iformat; yading@11: } yading@11: yading@11: AVOutputFormat *av_oformat_next(AVOutputFormat *f) yading@11: { yading@11: if(f) return f->next; yading@11: else return first_oformat; yading@11: } yading@11: yading@11: void av_register_input_format(AVInputFormat *format) yading@11: { yading@11: AVInputFormat **p; yading@11: p = &first_iformat; yading@11: while (*p != NULL) p = &(*p)->next; yading@11: *p = format; yading@11: format->next = NULL; yading@11: } yading@11: yading@11: void av_register_output_format(AVOutputFormat *format) yading@11: { yading@11: AVOutputFormat **p; yading@11: p = &first_oformat; yading@11: while (*p != NULL) p = &(*p)->next; yading@11: *p = format; yading@11: format->next = NULL; yading@11: } yading@11: yading@11: int av_match_ext(const char *filename, const char *extensions) yading@11: { yading@11: const char *ext, *p; yading@11: char ext1[32], *q; yading@11: yading@11: if(!filename) yading@11: return 0; yading@11: yading@11: ext = strrchr(filename, '.'); yading@11: if (ext) { yading@11: ext++; yading@11: p = extensions; yading@11: for(;;) { yading@11: q = ext1; yading@11: while (*p != '\0' && *p != ',' && q-ext1name && short_name && match_format(short_name, fmt->name)) yading@11: score += 100; yading@11: if (fmt->mime_type && mime_type && !strcmp(fmt->mime_type, mime_type)) yading@11: score += 10; yading@11: if (filename && fmt->extensions && yading@11: av_match_ext(filename, fmt->extensions)) { yading@11: score += 5; yading@11: } yading@11: if (score > score_max) { yading@11: score_max = score; yading@11: fmt_found = fmt; yading@11: } yading@11: } yading@11: return fmt_found; yading@11: } yading@11: yading@11: enum AVCodecID av_guess_codec(AVOutputFormat *fmt, const char *short_name, yading@11: const char *filename, const char *mime_type, enum AVMediaType type){ yading@11: if (!strcmp(fmt->name, "segment") || !strcmp(fmt->name, "ssegment")) { yading@11: fmt = av_guess_format(NULL, filename, NULL); yading@11: } yading@11: yading@11: if(type == AVMEDIA_TYPE_VIDEO){ yading@11: enum AVCodecID codec_id= AV_CODEC_ID_NONE; yading@11: yading@11: #if CONFIG_IMAGE2_MUXER yading@11: if(!strcmp(fmt->name, "image2") || !strcmp(fmt->name, "image2pipe")){ yading@11: codec_id= ff_guess_image2_codec(filename); yading@11: } yading@11: #endif yading@11: if(codec_id == AV_CODEC_ID_NONE) yading@11: codec_id= fmt->video_codec; yading@11: return codec_id; yading@11: }else if(type == AVMEDIA_TYPE_AUDIO) yading@11: return fmt->audio_codec; yading@11: else if (type == AVMEDIA_TYPE_SUBTITLE) yading@11: return fmt->subtitle_codec; yading@11: else yading@11: return AV_CODEC_ID_NONE; yading@11: } yading@11: yading@11: AVInputFormat *av_find_input_format(const char *short_name) yading@11: { yading@11: AVInputFormat *fmt = NULL; yading@11: while ((fmt = av_iformat_next(fmt))) { yading@11: if (match_format(short_name, fmt->name)) yading@11: return fmt; yading@11: } yading@11: return NULL; yading@11: } yading@11: yading@11: /* an arbitrarily chosen "sane" max packet size -- 50M */ yading@11: #define SANE_CHUNK_SIZE (50000000) yading@11: yading@11: int ffio_limit(AVIOContext *s, int size) yading@11: { yading@11: if(s->maxsize>=0){ yading@11: int64_t remaining= s->maxsize - avio_tell(s); yading@11: if(remaining < size){ yading@11: int64_t newsize= avio_size(s); yading@11: if(!s->maxsize || s->maxsizemaxsize= newsize - !newsize; yading@11: remaining= s->maxsize - avio_tell(s); yading@11: remaining= FFMAX(remaining, 0); yading@11: } yading@11: yading@11: if(s->maxsize>=0 && remaining+1 < size){ yading@11: av_log(NULL, remaining ? AV_LOG_ERROR : AV_LOG_DEBUG, "Truncating packet of size %d to %"PRId64"\n", size, remaining+1); yading@11: size= remaining+1; yading@11: } yading@11: } yading@11: return size; yading@11: } yading@11: yading@11: /* yading@11: * Read the data in sane-sized chunks and append to pkt. yading@11: * Return the number of bytes read or an error. yading@11: */ yading@11: static int append_packet_chunked(AVIOContext *s, AVPacket *pkt, int size) yading@11: { yading@11: int orig_pos = pkt->pos; // av_grow_packet might reset pos yading@11: int orig_size = pkt->size; yading@11: int ret; yading@11: yading@11: do { yading@11: int prev_size = pkt->size; yading@11: int read_size; yading@11: yading@11: /* yading@11: * When the caller requests a lot of data, limit it to the amount left yading@11: * in file or SANE_CHUNK_SIZE when it is not known yading@11: */ yading@11: read_size = size; yading@11: if (read_size > SANE_CHUNK_SIZE/10) { yading@11: read_size = ffio_limit(s, read_size); yading@11: // If filesize/maxsize is unknown, limit to SANE_CHUNK_SIZE yading@11: if (s->maxsize < 0) yading@11: read_size = FFMIN(read_size, SANE_CHUNK_SIZE); yading@11: } yading@11: yading@11: ret = av_grow_packet(pkt, read_size); yading@11: if (ret < 0) yading@11: break; yading@11: yading@11: ret = avio_read(s, pkt->data + prev_size, read_size); yading@11: if (ret != read_size) { yading@11: av_shrink_packet(pkt, prev_size + FFMAX(ret, 0)); yading@11: break; yading@11: } yading@11: yading@11: size -= read_size; yading@11: } while (size > 0); yading@11: if (size > 0) yading@11: pkt->flags |= AV_PKT_FLAG_CORRUPT; yading@11: yading@11: pkt->pos = orig_pos; yading@11: if (!pkt->size) yading@11: av_free_packet(pkt); yading@11: return pkt->size > orig_size ? pkt->size - orig_size : ret; yading@11: } yading@11: yading@11: int av_get_packet(AVIOContext *s, AVPacket *pkt, int size) yading@11: { yading@11: av_init_packet(pkt); yading@11: pkt->data = NULL; yading@11: pkt->size = 0; yading@11: pkt->pos = avio_tell(s); yading@11: yading@11: return append_packet_chunked(s, pkt, size); yading@11: } yading@11: yading@11: int av_append_packet(AVIOContext *s, AVPacket *pkt, int size) yading@11: { yading@11: if (!pkt->size) yading@11: return av_get_packet(s, pkt, size); yading@11: return append_packet_chunked(s, pkt, size); yading@11: } yading@11: yading@11: yading@11: int av_filename_number_test(const char *filename) yading@11: { yading@11: char buf[1024]; yading@11: return filename && (av_get_frame_filename(buf, sizeof(buf), filename, 1)>=0); yading@11: } yading@11: yading@11: AVInputFormat *av_probe_input_format3(AVProbeData *pd, int is_opened, int *score_ret) yading@11: { yading@11: AVProbeData lpd = *pd; yading@11: AVInputFormat *fmt1 = NULL, *fmt; yading@11: int score, nodat = 0, score_max=0; yading@11: const static uint8_t zerobuffer[AVPROBE_PADDING_SIZE]; yading@11: yading@11: if (!lpd.buf) yading@11: lpd.buf = zerobuffer; yading@11: yading@11: if (lpd.buf_size > 10 && ff_id3v2_match(lpd.buf, ID3v2_DEFAULT_MAGIC)) { yading@11: int id3len = ff_id3v2_tag_len(lpd.buf); yading@11: if (lpd.buf_size > id3len + 16) { yading@11: lpd.buf += id3len; yading@11: lpd.buf_size -= id3len; yading@11: }else yading@11: nodat = 1; yading@11: } yading@11: yading@11: fmt = NULL; yading@11: while ((fmt1 = av_iformat_next(fmt1))) { yading@11: if (!is_opened == !(fmt1->flags & AVFMT_NOFILE)) yading@11: continue; yading@11: score = 0; yading@11: if (fmt1->read_probe) { yading@11: score = fmt1->read_probe(&lpd); yading@11: if(fmt1->extensions && av_match_ext(lpd.filename, fmt1->extensions)) yading@11: score = FFMAX(score, nodat ? AVPROBE_SCORE_MAX/4-1 : 1); yading@11: } else if (fmt1->extensions) { yading@11: if (av_match_ext(lpd.filename, fmt1->extensions)) { yading@11: score = 50; yading@11: } yading@11: } yading@11: if (score > score_max) { yading@11: score_max = score; yading@11: fmt = fmt1; yading@11: }else if (score == score_max) yading@11: fmt = NULL; yading@11: } yading@11: if(nodat) yading@11: score_max = FFMIN(AVPROBE_SCORE_MAX/4-1, score_max); yading@11: *score_ret= score_max; yading@11: yading@11: return fmt; yading@11: } yading@11: yading@11: AVInputFormat *av_probe_input_format2(AVProbeData *pd, int is_opened, int *score_max) yading@11: { yading@11: int score_ret; yading@11: AVInputFormat *fmt= av_probe_input_format3(pd, is_opened, &score_ret); yading@11: if(score_ret > *score_max){ yading@11: *score_max= score_ret; yading@11: return fmt; yading@11: }else yading@11: return NULL; yading@11: } yading@11: yading@11: AVInputFormat *av_probe_input_format(AVProbeData *pd, int is_opened){ yading@11: int score=0; yading@11: return av_probe_input_format2(pd, is_opened, &score); yading@11: } yading@11: yading@11: static int set_codec_from_probe_data(AVFormatContext *s, AVStream *st, AVProbeData *pd) yading@11: { yading@11: static const struct { yading@11: const char *name; enum AVCodecID id; enum AVMediaType type; yading@11: } fmt_id_type[] = { yading@11: { "aac" , AV_CODEC_ID_AAC , AVMEDIA_TYPE_AUDIO }, yading@11: { "ac3" , AV_CODEC_ID_AC3 , AVMEDIA_TYPE_AUDIO }, yading@11: { "dts" , AV_CODEC_ID_DTS , AVMEDIA_TYPE_AUDIO }, yading@11: { "eac3" , AV_CODEC_ID_EAC3 , AVMEDIA_TYPE_AUDIO }, yading@11: { "h264" , AV_CODEC_ID_H264 , AVMEDIA_TYPE_VIDEO }, yading@11: { "loas" , AV_CODEC_ID_AAC_LATM , AVMEDIA_TYPE_AUDIO }, yading@11: { "m4v" , AV_CODEC_ID_MPEG4 , AVMEDIA_TYPE_VIDEO }, yading@11: { "mp3" , AV_CODEC_ID_MP3 , AVMEDIA_TYPE_AUDIO }, yading@11: { "mpegvideo", AV_CODEC_ID_MPEG2VIDEO, AVMEDIA_TYPE_VIDEO }, yading@11: { 0 } yading@11: }; yading@11: int score; yading@11: AVInputFormat *fmt = av_probe_input_format3(pd, 1, &score); yading@11: yading@11: if (fmt && st->request_probe <= score) { yading@11: int i; yading@11: av_log(s, AV_LOG_DEBUG, "Probe with size=%d, packets=%d detected %s with score=%d\n", yading@11: pd->buf_size, MAX_PROBE_PACKETS - st->probe_packets, fmt->name, score); yading@11: for (i = 0; fmt_id_type[i].name; i++) { yading@11: if (!strcmp(fmt->name, fmt_id_type[i].name)) { yading@11: st->codec->codec_id = fmt_id_type[i].id; yading@11: st->codec->codec_type = fmt_id_type[i].type; yading@11: break; yading@11: } yading@11: } yading@11: } yading@11: return score; yading@11: } yading@11: yading@11: /************************************************************/ yading@11: /* input media file */ yading@11: yading@11: int av_demuxer_open(AVFormatContext *ic){ yading@11: int err; yading@11: yading@11: if (ic->iformat->read_header) { yading@11: err = ic->iformat->read_header(ic); yading@11: if (err < 0) yading@11: return err; yading@11: } yading@11: yading@11: if (ic->pb && !ic->data_offset) yading@11: ic->data_offset = avio_tell(ic->pb); yading@11: yading@11: return 0; yading@11: } yading@11: yading@11: yading@11: /** size of probe buffer, for guessing file type from file contents */ yading@11: #define PROBE_BUF_MIN 2048 yading@11: #define PROBE_BUF_MAX (1<<20) yading@11: yading@11: int av_probe_input_buffer(AVIOContext *pb, AVInputFormat **fmt, yading@11: const char *filename, void *logctx, yading@11: unsigned int offset, unsigned int max_probe_size) yading@11: { yading@11: AVProbeData pd = { filename ? filename : "", NULL, -offset }; yading@11: unsigned char *buf = NULL; yading@11: uint8_t *mime_type; yading@11: int ret = 0, probe_size, buf_offset = 0; yading@11: yading@11: if (!max_probe_size) { yading@11: max_probe_size = PROBE_BUF_MAX; yading@11: } else if (max_probe_size > PROBE_BUF_MAX) { yading@11: max_probe_size = PROBE_BUF_MAX; yading@11: } else if (max_probe_size < PROBE_BUF_MIN) { yading@11: av_log(logctx, AV_LOG_ERROR, yading@11: "Specified probe size value %u cannot be < %u\n", max_probe_size, PROBE_BUF_MIN); yading@11: return AVERROR(EINVAL); yading@11: } yading@11: yading@11: if (offset >= max_probe_size) { yading@11: return AVERROR(EINVAL); yading@11: } yading@11: yading@11: if (!*fmt && pb->av_class && av_opt_get(pb, "mime_type", AV_OPT_SEARCH_CHILDREN, &mime_type) >= 0 && mime_type) { yading@11: if (!av_strcasecmp(mime_type, "audio/aacp")) { yading@11: *fmt = av_find_input_format("aac"); yading@11: } yading@11: av_freep(&mime_type); yading@11: } yading@11: yading@11: for(probe_size= PROBE_BUF_MIN; probe_size<=max_probe_size && !*fmt; yading@11: probe_size = FFMIN(probe_size<<1, FFMAX(max_probe_size, probe_size+1))) { yading@11: int score = probe_size < max_probe_size ? AVPROBE_SCORE_RETRY : 0; yading@11: void *buftmp; yading@11: yading@11: if (probe_size < offset) { yading@11: continue; yading@11: } yading@11: yading@11: /* read probe data */ yading@11: buftmp = av_realloc(buf, probe_size + AVPROBE_PADDING_SIZE); yading@11: if(!buftmp){ yading@11: av_free(buf); yading@11: return AVERROR(ENOMEM); yading@11: } yading@11: buf=buftmp; yading@11: if ((ret = avio_read(pb, buf + buf_offset, probe_size - buf_offset)) < 0) { yading@11: /* fail if error was not end of file, otherwise, lower score */ yading@11: if (ret != AVERROR_EOF) { yading@11: av_free(buf); yading@11: return ret; yading@11: } yading@11: score = 0; yading@11: ret = 0; /* error was end of file, nothing read */ yading@11: } yading@11: pd.buf_size = buf_offset += ret; yading@11: pd.buf = &buf[offset]; yading@11: yading@11: memset(pd.buf + pd.buf_size, 0, AVPROBE_PADDING_SIZE); yading@11: yading@11: /* guess file format */ yading@11: *fmt = av_probe_input_format2(&pd, 1, &score); yading@11: if(*fmt){ yading@11: if(score <= AVPROBE_SCORE_RETRY){ //this can only be true in the last iteration yading@11: av_log(logctx, AV_LOG_WARNING, "Format %s detected only with low score of %d, misdetection possible!\n", (*fmt)->name, score); yading@11: }else yading@11: av_log(logctx, AV_LOG_DEBUG, "Format %s probed with size=%d and score=%d\n", (*fmt)->name, probe_size, score); yading@11: } yading@11: } yading@11: yading@11: if (!*fmt) { yading@11: av_free(buf); yading@11: return AVERROR_INVALIDDATA; yading@11: } yading@11: yading@11: /* rewind. reuse probe buffer to avoid seeking */ yading@11: ret = ffio_rewind_with_probe_data(pb, &buf, pd.buf_size); yading@11: yading@11: return ret; yading@11: } yading@11: yading@11: /* open input file and probe the format if necessary */ yading@11: static int init_input(AVFormatContext *s, const char *filename, AVDictionary **options) yading@11: { yading@11: int ret; yading@11: AVProbeData pd = {filename, NULL, 0}; yading@11: int score = AVPROBE_SCORE_RETRY; yading@11: yading@11: if (s->pb) { yading@11: s->flags |= AVFMT_FLAG_CUSTOM_IO; yading@11: if (!s->iformat) yading@11: return av_probe_input_buffer(s->pb, &s->iformat, filename, s, 0, s->probesize); yading@11: else if (s->iformat->flags & AVFMT_NOFILE) yading@11: av_log(s, AV_LOG_WARNING, "Custom AVIOContext makes no sense and " yading@11: "will be ignored with AVFMT_NOFILE format.\n"); yading@11: return 0; yading@11: } yading@11: yading@11: if ( (s->iformat && s->iformat->flags & AVFMT_NOFILE) || yading@11: (!s->iformat && (s->iformat = av_probe_input_format2(&pd, 0, &score)))) yading@11: return 0; yading@11: yading@11: if ((ret = avio_open2(&s->pb, filename, AVIO_FLAG_READ | s->avio_flags, yading@11: &s->interrupt_callback, options)) < 0) yading@11: return ret; yading@11: if (s->iformat) yading@11: return 0; yading@11: return av_probe_input_buffer(s->pb, &s->iformat, filename, s, 0, s->probesize); yading@11: } yading@11: yading@11: static AVPacket *add_to_pktbuf(AVPacketList **packet_buffer, AVPacket *pkt, yading@11: AVPacketList **plast_pktl){ yading@11: AVPacketList *pktl = av_mallocz(sizeof(AVPacketList)); yading@11: if (!pktl) yading@11: return NULL; yading@11: yading@11: if (*packet_buffer) yading@11: (*plast_pktl)->next = pktl; yading@11: else yading@11: *packet_buffer = pktl; yading@11: yading@11: /* add the packet in the buffered packet list */ yading@11: *plast_pktl = pktl; yading@11: pktl->pkt= *pkt; yading@11: return &pktl->pkt; yading@11: } yading@11: yading@11: int avformat_queue_attached_pictures(AVFormatContext *s) yading@11: { yading@11: int i; yading@11: for (i = 0; i < s->nb_streams; i++) yading@11: if (s->streams[i]->disposition & AV_DISPOSITION_ATTACHED_PIC && yading@11: s->streams[i]->discard < AVDISCARD_ALL) { yading@11: AVPacket copy = s->streams[i]->attached_pic; yading@11: copy.buf = av_buffer_ref(copy.buf); yading@11: if (!copy.buf) yading@11: return AVERROR(ENOMEM); yading@11: yading@11: add_to_pktbuf(&s->raw_packet_buffer, ©, &s->raw_packet_buffer_end); yading@11: } yading@11: return 0; yading@11: } yading@11: yading@11: int avformat_open_input(AVFormatContext **ps, const char *filename, AVInputFormat *fmt, AVDictionary **options) yading@11: { yading@11: AVFormatContext *s = *ps; yading@11: int ret = 0; yading@11: AVDictionary *tmp = NULL; yading@11: ID3v2ExtraMeta *id3v2_extra_meta = NULL; yading@11: yading@11: if (!s && !(s = avformat_alloc_context())) yading@11: return AVERROR(ENOMEM); yading@11: if (!s->av_class){ yading@11: av_log(NULL, AV_LOG_ERROR, "Input context has not been properly allocated by avformat_alloc_context() and is not NULL either\n"); yading@11: return AVERROR(EINVAL); yading@11: } yading@11: if (fmt) yading@11: s->iformat = fmt; yading@11: yading@11: if (options) yading@11: av_dict_copy(&tmp, *options, 0); yading@11: yading@11: if ((ret = av_opt_set_dict(s, &tmp)) < 0) yading@11: goto fail; yading@11: yading@11: if ((ret = init_input(s, filename, &tmp)) < 0) yading@11: goto fail; yading@11: avio_skip(s->pb, s->skip_initial_bytes); yading@11: yading@11: /* check filename in case an image number is expected */ yading@11: if (s->iformat->flags & AVFMT_NEEDNUMBER) { yading@11: if (!av_filename_number_test(filename)) { yading@11: ret = AVERROR(EINVAL); yading@11: goto fail; yading@11: } yading@11: } yading@11: yading@11: s->duration = s->start_time = AV_NOPTS_VALUE; yading@11: av_strlcpy(s->filename, filename ? filename : "", sizeof(s->filename)); yading@11: yading@11: /* allocate private data */ yading@11: if (s->iformat->priv_data_size > 0) { yading@11: if (!(s->priv_data = av_mallocz(s->iformat->priv_data_size))) { yading@11: ret = AVERROR(ENOMEM); yading@11: goto fail; yading@11: } yading@11: if (s->iformat->priv_class) { yading@11: *(const AVClass**)s->priv_data = s->iformat->priv_class; yading@11: av_opt_set_defaults(s->priv_data); yading@11: if ((ret = av_opt_set_dict(s->priv_data, &tmp)) < 0) yading@11: goto fail; yading@11: } yading@11: } yading@11: yading@11: /* e.g. AVFMT_NOFILE formats will not have a AVIOContext */ yading@11: if (s->pb) yading@11: ff_id3v2_read(s, ID3v2_DEFAULT_MAGIC, &id3v2_extra_meta); yading@11: yading@11: if (!(s->flags&AVFMT_FLAG_PRIV_OPT) && s->iformat->read_header) yading@11: if ((ret = s->iformat->read_header(s)) < 0) yading@11: goto fail; yading@11: yading@11: if (id3v2_extra_meta) { yading@11: if (!strcmp(s->iformat->name, "mp3") || !strcmp(s->iformat->name, "aac")) { yading@11: if((ret = ff_id3v2_parse_apic(s, &id3v2_extra_meta)) < 0) yading@11: goto fail; yading@11: } else yading@11: av_log(s, AV_LOG_DEBUG, "demuxer does not support additional id3 data, skipping\n"); yading@11: } yading@11: ff_id3v2_free_extra_meta(&id3v2_extra_meta); yading@11: yading@11: if ((ret = avformat_queue_attached_pictures(s)) < 0) yading@11: goto fail; yading@11: yading@11: if (!(s->flags&AVFMT_FLAG_PRIV_OPT) && s->pb && !s->data_offset) yading@11: s->data_offset = avio_tell(s->pb); yading@11: yading@11: s->raw_packet_buffer_remaining_size = RAW_PACKET_BUFFER_SIZE; yading@11: yading@11: if (options) { yading@11: av_dict_free(options); yading@11: *options = tmp; yading@11: } yading@11: *ps = s; yading@11: return 0; yading@11: yading@11: fail: yading@11: ff_id3v2_free_extra_meta(&id3v2_extra_meta); yading@11: av_dict_free(&tmp); yading@11: if (s->pb && !(s->flags & AVFMT_FLAG_CUSTOM_IO)) yading@11: avio_close(s->pb); yading@11: avformat_free_context(s); yading@11: *ps = NULL; yading@11: return ret; yading@11: } yading@11: yading@11: /*******************************************************/ yading@11: yading@11: static void force_codec_ids(AVFormatContext *s, AVStream *st) yading@11: { yading@11: switch(st->codec->codec_type){ yading@11: case AVMEDIA_TYPE_VIDEO: yading@11: if(s->video_codec_id) st->codec->codec_id= s->video_codec_id; yading@11: break; yading@11: case AVMEDIA_TYPE_AUDIO: yading@11: if(s->audio_codec_id) st->codec->codec_id= s->audio_codec_id; yading@11: break; yading@11: case AVMEDIA_TYPE_SUBTITLE: yading@11: if(s->subtitle_codec_id)st->codec->codec_id= s->subtitle_codec_id; yading@11: break; yading@11: } yading@11: } yading@11: yading@11: static void probe_codec(AVFormatContext *s, AVStream *st, const AVPacket *pkt) yading@11: { yading@11: if(st->request_probe>0){ yading@11: AVProbeData *pd = &st->probe_data; yading@11: int end; yading@11: av_log(s, AV_LOG_DEBUG, "probing stream %d pp:%d\n", st->index, st->probe_packets); yading@11: --st->probe_packets; yading@11: yading@11: if (pkt) { yading@11: uint8_t *new_buf = av_realloc(pd->buf, pd->buf_size+pkt->size+AVPROBE_PADDING_SIZE); yading@11: if(!new_buf) yading@11: goto no_packet; yading@11: pd->buf = new_buf; yading@11: memcpy(pd->buf+pd->buf_size, pkt->data, pkt->size); yading@11: pd->buf_size += pkt->size; yading@11: memset(pd->buf+pd->buf_size, 0, AVPROBE_PADDING_SIZE); yading@11: } else { yading@11: no_packet: yading@11: st->probe_packets = 0; yading@11: if (!pd->buf_size) { yading@11: av_log(s, AV_LOG_WARNING, "nothing to probe for stream %d\n", yading@11: st->index); yading@11: } yading@11: } yading@11: yading@11: end= s->raw_packet_buffer_remaining_size <= 0 yading@11: || st->probe_packets<=0; yading@11: yading@11: if(end || av_log2(pd->buf_size) != av_log2(pd->buf_size - pkt->size)){ yading@11: int score= set_codec_from_probe_data(s, st, pd); yading@11: if( (st->codec->codec_id != AV_CODEC_ID_NONE && score > AVPROBE_SCORE_RETRY) yading@11: || end){ yading@11: pd->buf_size=0; yading@11: av_freep(&pd->buf); yading@11: st->request_probe= -1; yading@11: if(st->codec->codec_id != AV_CODEC_ID_NONE){ yading@11: av_log(s, AV_LOG_DEBUG, "probed stream %d\n", st->index); yading@11: }else yading@11: av_log(s, AV_LOG_WARNING, "probed stream %d failed\n", st->index); yading@11: } yading@11: force_codec_ids(s, st); yading@11: } yading@11: } yading@11: } yading@11: yading@11: int ff_read_packet(AVFormatContext *s, AVPacket *pkt) yading@11: { yading@11: int ret, i; yading@11: AVStream *st; yading@11: yading@11: for(;;){ yading@11: AVPacketList *pktl = s->raw_packet_buffer; yading@11: yading@11: if (pktl) { yading@11: *pkt = pktl->pkt; yading@11: st = s->streams[pkt->stream_index]; yading@11: if (s->raw_packet_buffer_remaining_size <= 0) yading@11: probe_codec(s, st, NULL); yading@11: if(st->request_probe <= 0){ yading@11: s->raw_packet_buffer = pktl->next; yading@11: s->raw_packet_buffer_remaining_size += pkt->size; yading@11: av_free(pktl); yading@11: return 0; yading@11: } yading@11: } yading@11: yading@11: pkt->data = NULL; yading@11: pkt->size = 0; yading@11: av_init_packet(pkt); yading@11: ret= s->iformat->read_packet(s, pkt); yading@11: if (ret < 0) { yading@11: if (!pktl || ret == AVERROR(EAGAIN)) yading@11: return ret; yading@11: for (i = 0; i < s->nb_streams; i++) { yading@11: st = s->streams[i]; yading@11: if (st->probe_packets) { yading@11: probe_codec(s, st, NULL); yading@11: } yading@11: av_assert0(st->request_probe <= 0); yading@11: } yading@11: continue; yading@11: } yading@11: yading@11: if ((s->flags & AVFMT_FLAG_DISCARD_CORRUPT) && yading@11: (pkt->flags & AV_PKT_FLAG_CORRUPT)) { yading@11: av_log(s, AV_LOG_WARNING, yading@11: "Dropped corrupted packet (stream = %d)\n", yading@11: pkt->stream_index); yading@11: av_free_packet(pkt); yading@11: continue; yading@11: } yading@11: yading@11: if(!(s->flags & AVFMT_FLAG_KEEP_SIDE_DATA)) yading@11: av_packet_merge_side_data(pkt); yading@11: yading@11: if(pkt->stream_index >= (unsigned)s->nb_streams){ yading@11: av_log(s, AV_LOG_ERROR, "Invalid stream index %d\n", pkt->stream_index); yading@11: continue; yading@11: } yading@11: yading@11: st= s->streams[pkt->stream_index]; yading@11: pkt->dts = wrap_timestamp(st, pkt->dts); yading@11: pkt->pts = wrap_timestamp(st, pkt->pts); yading@11: yading@11: force_codec_ids(s, st); yading@11: yading@11: /* TODO: audio: time filter; video: frame reordering (pts != dts) */ yading@11: if (s->use_wallclock_as_timestamps) yading@11: pkt->dts = pkt->pts = av_rescale_q(av_gettime(), AV_TIME_BASE_Q, st->time_base); yading@11: yading@11: if(!pktl && st->request_probe <= 0) yading@11: return ret; yading@11: yading@11: add_to_pktbuf(&s->raw_packet_buffer, pkt, &s->raw_packet_buffer_end); yading@11: s->raw_packet_buffer_remaining_size -= pkt->size; yading@11: yading@11: probe_codec(s, st, pkt); yading@11: } yading@11: } yading@11: yading@11: #if FF_API_READ_PACKET yading@11: int av_read_packet(AVFormatContext *s, AVPacket *pkt) yading@11: { yading@11: return ff_read_packet(s, pkt); yading@11: } yading@11: #endif yading@11: yading@11: yading@11: /**********************************************************/ yading@11: yading@11: static int determinable_frame_size(AVCodecContext *avctx) yading@11: { yading@11: if (/*avctx->codec_id == AV_CODEC_ID_AAC ||*/ yading@11: avctx->codec_id == AV_CODEC_ID_MP1 || yading@11: avctx->codec_id == AV_CODEC_ID_MP2 || yading@11: avctx->codec_id == AV_CODEC_ID_MP3/* || yading@11: avctx->codec_id == AV_CODEC_ID_CELT*/) yading@11: return 1; yading@11: return 0; yading@11: } yading@11: yading@11: /** yading@11: * Get the number of samples of an audio frame. Return -1 on error. yading@11: */ yading@11: int ff_get_audio_frame_size(AVCodecContext *enc, int size, int mux) yading@11: { yading@11: int frame_size; yading@11: yading@11: /* give frame_size priority if demuxing */ yading@11: if (!mux && enc->frame_size > 1) yading@11: return enc->frame_size; yading@11: yading@11: if ((frame_size = av_get_audio_frame_duration(enc, size)) > 0) yading@11: return frame_size; yading@11: yading@11: /* fallback to using frame_size if muxing */ yading@11: if (enc->frame_size > 1) yading@11: return enc->frame_size; yading@11: yading@11: //For WMA we currently have no other means to calculate duration thus we yading@11: //do it here by assuming CBR, which is true for all known cases. yading@11: if(!mux && enc->bit_rate>0 && size>0 && enc->sample_rate>0 && enc->block_align>1) { yading@11: if (enc->codec_id == AV_CODEC_ID_WMAV1 || enc->codec_id == AV_CODEC_ID_WMAV2) yading@11: return ((int64_t)size * 8 * enc->sample_rate) / enc->bit_rate; yading@11: } yading@11: yading@11: return -1; yading@11: } yading@11: yading@11: yading@11: /** yading@11: * Return the frame duration in seconds. Return 0 if not available. yading@11: */ yading@11: void ff_compute_frame_duration(int *pnum, int *pden, AVStream *st, yading@11: AVCodecParserContext *pc, AVPacket *pkt) yading@11: { yading@11: int frame_size; yading@11: yading@11: *pnum = 0; yading@11: *pden = 0; yading@11: switch(st->codec->codec_type) { yading@11: case AVMEDIA_TYPE_VIDEO: yading@11: if (st->r_frame_rate.num && !pc) { yading@11: *pnum = st->r_frame_rate.den; yading@11: *pden = st->r_frame_rate.num; yading@11: } else if(st->time_base.num*1000LL > st->time_base.den) { yading@11: *pnum = st->time_base.num; yading@11: *pden = st->time_base.den; yading@11: }else if(st->codec->time_base.num*1000LL > st->codec->time_base.den){ yading@11: *pnum = st->codec->time_base.num; yading@11: *pden = st->codec->time_base.den; yading@11: if (pc && pc->repeat_pict) { yading@11: if (*pnum > INT_MAX / (1 + pc->repeat_pict)) yading@11: *pden /= 1 + pc->repeat_pict; yading@11: else yading@11: *pnum *= 1 + pc->repeat_pict; yading@11: } yading@11: //If this codec can be interlaced or progressive then we need a parser to compute duration of a packet yading@11: //Thus if we have no parser in such case leave duration undefined. yading@11: if(st->codec->ticks_per_frame>1 && !pc){ yading@11: *pnum = *pden = 0; yading@11: } yading@11: } yading@11: break; yading@11: case AVMEDIA_TYPE_AUDIO: yading@11: frame_size = ff_get_audio_frame_size(st->codec, pkt->size, 0); yading@11: if (frame_size <= 0 || st->codec->sample_rate <= 0) yading@11: break; yading@11: *pnum = frame_size; yading@11: *pden = st->codec->sample_rate; yading@11: break; yading@11: default: yading@11: break; yading@11: } yading@11: } yading@11: yading@11: static int is_intra_only(AVCodecContext *enc){ yading@11: const AVCodecDescriptor *desc; yading@11: yading@11: if(enc->codec_type != AVMEDIA_TYPE_VIDEO) yading@11: return 1; yading@11: yading@11: desc = av_codec_get_codec_descriptor(enc); yading@11: if (!desc) { yading@11: desc = avcodec_descriptor_get(enc->codec_id); yading@11: av_codec_set_codec_descriptor(enc, desc); yading@11: } yading@11: if (desc) yading@11: return !!(desc->props & AV_CODEC_PROP_INTRA_ONLY); yading@11: return 0; yading@11: } yading@11: yading@11: static int has_decode_delay_been_guessed(AVStream *st) yading@11: { yading@11: if(st->codec->codec_id != AV_CODEC_ID_H264) return 1; yading@11: if(!st->info) // if we have left find_stream_info then nb_decoded_frames wont increase anymore for stream copy yading@11: return 1; yading@11: #if CONFIG_H264_DECODER yading@11: if(st->codec->has_b_frames && yading@11: avpriv_h264_has_num_reorder_frames(st->codec) == st->codec->has_b_frames) yading@11: return 1; yading@11: #endif yading@11: if(st->codec->has_b_frames<3) yading@11: return st->nb_decoded_frames >= 7; yading@11: else if(st->codec->has_b_frames<4) yading@11: return st->nb_decoded_frames >= 18; yading@11: else yading@11: return st->nb_decoded_frames >= 20; yading@11: } yading@11: yading@11: static AVPacketList *get_next_pkt(AVFormatContext *s, AVStream *st, AVPacketList *pktl) yading@11: { yading@11: if (pktl->next) yading@11: return pktl->next; yading@11: if (pktl == s->parse_queue_end) yading@11: return s->packet_buffer; yading@11: return NULL; yading@11: } yading@11: yading@11: static int update_wrap_reference(AVFormatContext *s, AVStream *st, int stream_index) yading@11: { yading@11: if (s->correct_ts_overflow && st->pts_wrap_bits < 63 && yading@11: st->pts_wrap_reference == AV_NOPTS_VALUE && st->first_dts != AV_NOPTS_VALUE) { yading@11: int i; yading@11: yading@11: // reference time stamp should be 60 s before first time stamp yading@11: int64_t pts_wrap_reference = st->first_dts - av_rescale(60, st->time_base.den, st->time_base.num); yading@11: // if first time stamp is not more than 1/8 and 60s before the wrap point, subtract rather than add wrap offset yading@11: int pts_wrap_behavior = (st->first_dts < (1LL<pts_wrap_bits) - (1LL<pts_wrap_bits-3)) || yading@11: (st->first_dts < (1LL<pts_wrap_bits) - av_rescale(60, st->time_base.den, st->time_base.num)) ? yading@11: AV_PTS_WRAP_ADD_OFFSET : AV_PTS_WRAP_SUB_OFFSET; yading@11: yading@11: AVProgram *first_program = av_find_program_from_stream(s, NULL, stream_index); yading@11: yading@11: if (!first_program) { yading@11: int default_stream_index = av_find_default_stream_index(s); yading@11: if (s->streams[default_stream_index]->pts_wrap_reference == AV_NOPTS_VALUE) { yading@11: for (i=0; inb_streams; i++) { yading@11: s->streams[i]->pts_wrap_reference = pts_wrap_reference; yading@11: s->streams[i]->pts_wrap_behavior = pts_wrap_behavior; yading@11: } yading@11: } yading@11: else { yading@11: st->pts_wrap_reference = s->streams[default_stream_index]->pts_wrap_reference; yading@11: st->pts_wrap_behavior = s->streams[default_stream_index]->pts_wrap_behavior; yading@11: } yading@11: } yading@11: else { yading@11: AVProgram *program = first_program; yading@11: while (program) { yading@11: if (program->pts_wrap_reference != AV_NOPTS_VALUE) { yading@11: pts_wrap_reference = program->pts_wrap_reference; yading@11: pts_wrap_behavior = program->pts_wrap_behavior; yading@11: break; yading@11: } yading@11: program = av_find_program_from_stream(s, program, stream_index); yading@11: } yading@11: yading@11: // update every program with differing pts_wrap_reference yading@11: program = first_program; yading@11: while(program) { yading@11: if (program->pts_wrap_reference != pts_wrap_reference) { yading@11: for (i=0; inb_stream_indexes; i++) { yading@11: s->streams[program->stream_index[i]]->pts_wrap_reference = pts_wrap_reference; yading@11: s->streams[program->stream_index[i]]->pts_wrap_behavior = pts_wrap_behavior; yading@11: } yading@11: yading@11: program->pts_wrap_reference = pts_wrap_reference; yading@11: program->pts_wrap_behavior = pts_wrap_behavior; yading@11: } yading@11: program = av_find_program_from_stream(s, program, stream_index); yading@11: } yading@11: } yading@11: return 1; yading@11: } yading@11: return 0; yading@11: } yading@11: yading@11: static void update_initial_timestamps(AVFormatContext *s, int stream_index, yading@11: int64_t dts, int64_t pts, AVPacket *pkt) yading@11: { yading@11: AVStream *st= s->streams[stream_index]; yading@11: AVPacketList *pktl= s->parse_queue ? s->parse_queue : s->packet_buffer; yading@11: int64_t pts_buffer[MAX_REORDER_DELAY+1]; yading@11: int64_t shift; yading@11: int i, delay; yading@11: yading@11: if(st->first_dts != AV_NOPTS_VALUE || dts == AV_NOPTS_VALUE || st->cur_dts == AV_NOPTS_VALUE || is_relative(dts)) yading@11: return; yading@11: yading@11: delay = st->codec->has_b_frames; yading@11: st->first_dts= dts - (st->cur_dts - RELATIVE_TS_BASE); yading@11: st->cur_dts= dts; yading@11: shift = st->first_dts - RELATIVE_TS_BASE; yading@11: yading@11: for (i=0; ipkt.stream_index != stream_index) yading@11: continue; yading@11: if(is_relative(pktl->pkt.pts)) yading@11: pktl->pkt.pts += shift; yading@11: yading@11: if(is_relative(pktl->pkt.dts)) yading@11: pktl->pkt.dts += shift; yading@11: yading@11: if(st->start_time == AV_NOPTS_VALUE && pktl->pkt.pts != AV_NOPTS_VALUE) yading@11: st->start_time= pktl->pkt.pts; yading@11: yading@11: if(pktl->pkt.pts != AV_NOPTS_VALUE && delay <= MAX_REORDER_DELAY && has_decode_delay_been_guessed(st)){ yading@11: pts_buffer[0]= pktl->pkt.pts; yading@11: for(i=0; i pts_buffer[i+1]; i++) yading@11: FFSWAP(int64_t, pts_buffer[i], pts_buffer[i+1]); yading@11: if(pktl->pkt.dts == AV_NOPTS_VALUE) yading@11: pktl->pkt.dts= pts_buffer[0]; yading@11: } yading@11: } yading@11: yading@11: if (update_wrap_reference(s, st, stream_index) && st->pts_wrap_behavior == AV_PTS_WRAP_SUB_OFFSET) { yading@11: // correct first time stamps to negative values yading@11: st->first_dts = wrap_timestamp(st, st->first_dts); yading@11: st->cur_dts = wrap_timestamp(st, st->cur_dts); yading@11: pkt->dts = wrap_timestamp(st, pkt->dts); yading@11: pkt->pts = wrap_timestamp(st, pkt->pts); yading@11: pts = wrap_timestamp(st, pts); yading@11: } yading@11: yading@11: if (st->start_time == AV_NOPTS_VALUE) yading@11: st->start_time = pts; yading@11: } yading@11: yading@11: static void update_initial_durations(AVFormatContext *s, AVStream *st, yading@11: int stream_index, int duration) yading@11: { yading@11: AVPacketList *pktl= s->parse_queue ? s->parse_queue : s->packet_buffer; yading@11: int64_t cur_dts= RELATIVE_TS_BASE; yading@11: yading@11: if(st->first_dts != AV_NOPTS_VALUE){ yading@11: cur_dts= st->first_dts; yading@11: for(; pktl; pktl= get_next_pkt(s, st, pktl)){ yading@11: if(pktl->pkt.stream_index == stream_index){ yading@11: if(pktl->pkt.pts != pktl->pkt.dts || pktl->pkt.dts != AV_NOPTS_VALUE || pktl->pkt.duration) yading@11: break; yading@11: cur_dts -= duration; yading@11: } yading@11: } yading@11: if(pktl && pktl->pkt.dts != st->first_dts) { yading@11: av_log(s, AV_LOG_DEBUG, "first_dts %s not matching first dts %s in the queue\n", av_ts2str(st->first_dts), av_ts2str(pktl->pkt.dts)); yading@11: return; yading@11: } yading@11: if(!pktl) { yading@11: av_log(s, AV_LOG_DEBUG, "first_dts %s but no packet with dts in the queue\n", av_ts2str(st->first_dts)); yading@11: return; yading@11: } yading@11: pktl= s->parse_queue ? s->parse_queue : s->packet_buffer; yading@11: st->first_dts = cur_dts; yading@11: }else if(st->cur_dts != RELATIVE_TS_BASE) yading@11: return; yading@11: yading@11: for(; pktl; pktl= get_next_pkt(s, st, pktl)){ yading@11: if(pktl->pkt.stream_index != stream_index) yading@11: continue; yading@11: if(pktl->pkt.pts == pktl->pkt.dts && (pktl->pkt.dts == AV_NOPTS_VALUE || pktl->pkt.dts == st->first_dts) yading@11: && !pktl->pkt.duration){ yading@11: pktl->pkt.dts= cur_dts; yading@11: if(!st->codec->has_b_frames) yading@11: pktl->pkt.pts= cur_dts; yading@11: // if (st->codec->codec_type != AVMEDIA_TYPE_AUDIO) yading@11: pktl->pkt.duration = duration; yading@11: }else yading@11: break; yading@11: cur_dts = pktl->pkt.dts + pktl->pkt.duration; yading@11: } yading@11: if(!pktl) yading@11: st->cur_dts= cur_dts; yading@11: } yading@11: yading@11: static void compute_pkt_fields(AVFormatContext *s, AVStream *st, yading@11: AVCodecParserContext *pc, AVPacket *pkt) yading@11: { yading@11: int num, den, presentation_delayed, delay, i; yading@11: int64_t offset; yading@11: yading@11: if (s->flags & AVFMT_FLAG_NOFILLIN) yading@11: return; yading@11: yading@11: if((s->flags & AVFMT_FLAG_IGNDTS) && pkt->pts != AV_NOPTS_VALUE) yading@11: pkt->dts= AV_NOPTS_VALUE; yading@11: yading@11: if (st->codec->codec_id != AV_CODEC_ID_H264 && pc && pc->pict_type == AV_PICTURE_TYPE_B) yading@11: //FIXME Set low_delay = 0 when has_b_frames = 1 yading@11: st->codec->has_b_frames = 1; yading@11: yading@11: /* do we have a video B-frame ? */ yading@11: delay= st->codec->has_b_frames; yading@11: presentation_delayed = 0; yading@11: yading@11: /* XXX: need has_b_frame, but cannot get it if the codec is yading@11: not initialized */ yading@11: if (delay && yading@11: pc && pc->pict_type != AV_PICTURE_TYPE_B) yading@11: presentation_delayed = 1; yading@11: yading@11: if(pkt->pts != AV_NOPTS_VALUE && pkt->dts != AV_NOPTS_VALUE && st->pts_wrap_bits<63 && pkt->dts - (1LL<<(st->pts_wrap_bits-1)) > pkt->pts){ yading@11: if(is_relative(st->cur_dts) || pkt->dts - (1LL<<(st->pts_wrap_bits-1)) > st->cur_dts) { yading@11: pkt->dts -= 1LL<pts_wrap_bits; yading@11: } else yading@11: pkt->pts += 1LL<pts_wrap_bits; yading@11: } yading@11: yading@11: // some mpeg2 in mpeg-ps lack dts (issue171 / input_file.mpg) yading@11: // we take the conservative approach and discard both yading@11: // Note, if this is misbehaving for a H.264 file then possibly presentation_delayed is not set correctly. yading@11: if(delay==1 && pkt->dts == pkt->pts && pkt->dts != AV_NOPTS_VALUE && presentation_delayed){ yading@11: av_log(s, AV_LOG_DEBUG, "invalid dts/pts combination %"PRIi64"\n", pkt->dts); yading@11: if(strcmp(s->iformat->name, "mov,mp4,m4a,3gp,3g2,mj2")) // otherwise we discard correct timestamps for vc1-wmapro.ism yading@11: pkt->dts= AV_NOPTS_VALUE; yading@11: } yading@11: yading@11: if (pkt->duration == 0) { yading@11: ff_compute_frame_duration(&num, &den, st, pc, pkt); yading@11: if (den && num) { yading@11: pkt->duration = av_rescale_rnd(1, num * (int64_t)st->time_base.den, den * (int64_t)st->time_base.num, AV_ROUND_DOWN); yading@11: } yading@11: } yading@11: if(pkt->duration != 0 && (s->packet_buffer || s->parse_queue)) yading@11: update_initial_durations(s, st, pkt->stream_index, pkt->duration); yading@11: yading@11: /* correct timestamps with byte offset if demuxers only have timestamps yading@11: on packet boundaries */ yading@11: if(pc && st->need_parsing == AVSTREAM_PARSE_TIMESTAMPS && pkt->size){ yading@11: /* this will estimate bitrate based on this frame's duration and size */ yading@11: offset = av_rescale(pc->offset, pkt->duration, pkt->size); yading@11: if(pkt->pts != AV_NOPTS_VALUE) yading@11: pkt->pts += offset; yading@11: if(pkt->dts != AV_NOPTS_VALUE) yading@11: pkt->dts += offset; yading@11: } yading@11: yading@11: if (pc && pc->dts_sync_point >= 0) { yading@11: // we have synchronization info from the parser yading@11: int64_t den = st->codec->time_base.den * (int64_t) st->time_base.num; yading@11: if (den > 0) { yading@11: int64_t num = st->codec->time_base.num * (int64_t) st->time_base.den; yading@11: if (pkt->dts != AV_NOPTS_VALUE) { yading@11: // got DTS from the stream, update reference timestamp yading@11: st->reference_dts = pkt->dts - pc->dts_ref_dts_delta * num / den; yading@11: pkt->pts = pkt->dts + pc->pts_dts_delta * num / den; yading@11: } else if (st->reference_dts != AV_NOPTS_VALUE) { yading@11: // compute DTS based on reference timestamp yading@11: pkt->dts = st->reference_dts + pc->dts_ref_dts_delta * num / den; yading@11: pkt->pts = pkt->dts + pc->pts_dts_delta * num / den; yading@11: } yading@11: if (pc->dts_sync_point > 0) yading@11: st->reference_dts = pkt->dts; // new reference yading@11: } yading@11: } yading@11: yading@11: /* This may be redundant, but it should not hurt. */ yading@11: if(pkt->dts != AV_NOPTS_VALUE && pkt->pts != AV_NOPTS_VALUE && pkt->pts > pkt->dts) yading@11: presentation_delayed = 1; yading@11: yading@11: av_dlog(NULL, "IN delayed:%d pts:%s, dts:%s cur_dts:%s st:%d pc:%p duration:%d\n", yading@11: presentation_delayed, av_ts2str(pkt->pts), av_ts2str(pkt->dts), av_ts2str(st->cur_dts), pkt->stream_index, pc, pkt->duration); yading@11: /* interpolate PTS and DTS if they are not present */ yading@11: //We skip H264 currently because delay and has_b_frames are not reliably set yading@11: if((delay==0 || (delay==1 && pc)) && st->codec->codec_id != AV_CODEC_ID_H264){ yading@11: if (presentation_delayed) { yading@11: /* DTS = decompression timestamp */ yading@11: /* PTS = presentation timestamp */ yading@11: if (pkt->dts == AV_NOPTS_VALUE) yading@11: pkt->dts = st->last_IP_pts; yading@11: update_initial_timestamps(s, pkt->stream_index, pkt->dts, pkt->pts, pkt); yading@11: if (pkt->dts == AV_NOPTS_VALUE) yading@11: pkt->dts = st->cur_dts; yading@11: yading@11: /* this is tricky: the dts must be incremented by the duration yading@11: of the frame we are displaying, i.e. the last I- or P-frame */ yading@11: if (st->last_IP_duration == 0) yading@11: st->last_IP_duration = pkt->duration; yading@11: if(pkt->dts != AV_NOPTS_VALUE) yading@11: st->cur_dts = pkt->dts + st->last_IP_duration; yading@11: st->last_IP_duration = pkt->duration; yading@11: st->last_IP_pts= pkt->pts; yading@11: /* cannot compute PTS if not present (we can compute it only yading@11: by knowing the future */ yading@11: } else if (pkt->pts != AV_NOPTS_VALUE || yading@11: pkt->dts != AV_NOPTS_VALUE || yading@11: pkt->duration ) { yading@11: int duration = pkt->duration; yading@11: yading@11: /* presentation is not delayed : PTS and DTS are the same */ yading@11: if (pkt->pts == AV_NOPTS_VALUE) yading@11: pkt->pts = pkt->dts; yading@11: update_initial_timestamps(s, pkt->stream_index, pkt->pts, yading@11: pkt->pts, pkt); yading@11: if (pkt->pts == AV_NOPTS_VALUE) yading@11: pkt->pts = st->cur_dts; yading@11: pkt->dts = pkt->pts; yading@11: if (pkt->pts != AV_NOPTS_VALUE) yading@11: st->cur_dts = pkt->pts + duration; yading@11: } yading@11: } yading@11: yading@11: if(pkt->pts != AV_NOPTS_VALUE && delay <= MAX_REORDER_DELAY && has_decode_delay_been_guessed(st)){ yading@11: st->pts_buffer[0]= pkt->pts; yading@11: for(i=0; ipts_buffer[i] > st->pts_buffer[i+1]; i++) yading@11: FFSWAP(int64_t, st->pts_buffer[i], st->pts_buffer[i+1]); yading@11: if(pkt->dts == AV_NOPTS_VALUE) yading@11: pkt->dts= st->pts_buffer[0]; yading@11: } yading@11: if(st->codec->codec_id == AV_CODEC_ID_H264){ // we skipped it above so we try here yading@11: update_initial_timestamps(s, pkt->stream_index, pkt->dts, pkt->pts, pkt); // this should happen on the first packet yading@11: } yading@11: if(pkt->dts > st->cur_dts) yading@11: st->cur_dts = pkt->dts; yading@11: yading@11: av_dlog(NULL, "OUTdelayed:%d/%d pts:%s, dts:%s cur_dts:%s\n", yading@11: presentation_delayed, delay, av_ts2str(pkt->pts), av_ts2str(pkt->dts), av_ts2str(st->cur_dts)); yading@11: yading@11: /* update flags */ yading@11: if (is_intra_only(st->codec)) yading@11: pkt->flags |= AV_PKT_FLAG_KEY; yading@11: if (pc) yading@11: pkt->convergence_duration = pc->convergence_duration; yading@11: } yading@11: yading@11: static void free_packet_buffer(AVPacketList **pkt_buf, AVPacketList **pkt_buf_end) yading@11: { yading@11: while (*pkt_buf) { yading@11: AVPacketList *pktl = *pkt_buf; yading@11: *pkt_buf = pktl->next; yading@11: av_free_packet(&pktl->pkt); yading@11: av_freep(&pktl); yading@11: } yading@11: *pkt_buf_end = NULL; yading@11: } yading@11: yading@11: /** yading@11: * Parse a packet, add all split parts to parse_queue yading@11: * yading@11: * @param pkt packet to parse, NULL when flushing the parser at end of stream yading@11: */ yading@11: static int parse_packet(AVFormatContext *s, AVPacket *pkt, int stream_index) yading@11: { yading@11: AVPacket out_pkt = { 0 }, flush_pkt = { 0 }; yading@11: AVStream *st = s->streams[stream_index]; yading@11: uint8_t *data = pkt ? pkt->data : NULL; yading@11: int size = pkt ? pkt->size : 0; yading@11: int ret = 0, got_output = 0; yading@11: yading@11: if (!pkt) { yading@11: av_init_packet(&flush_pkt); yading@11: pkt = &flush_pkt; yading@11: got_output = 1; yading@11: } else if (!size && st->parser->flags & PARSER_FLAG_COMPLETE_FRAMES) { yading@11: // preserve 0-size sync packets yading@11: compute_pkt_fields(s, st, st->parser, pkt); yading@11: } yading@11: yading@11: while (size > 0 || (pkt == &flush_pkt && got_output)) { yading@11: int len; yading@11: yading@11: av_init_packet(&out_pkt); yading@11: len = av_parser_parse2(st->parser, st->codec, yading@11: &out_pkt.data, &out_pkt.size, data, size, yading@11: pkt->pts, pkt->dts, pkt->pos); yading@11: yading@11: pkt->pts = pkt->dts = AV_NOPTS_VALUE; yading@11: pkt->pos = -1; yading@11: /* increment read pointer */ yading@11: data += len; yading@11: size -= len; yading@11: yading@11: got_output = !!out_pkt.size; yading@11: yading@11: if (!out_pkt.size) yading@11: continue; yading@11: yading@11: /* set the duration */ yading@11: out_pkt.duration = 0; yading@11: if (st->codec->codec_type == AVMEDIA_TYPE_AUDIO) { yading@11: if (st->codec->sample_rate > 0) { yading@11: out_pkt.duration = av_rescale_q_rnd(st->parser->duration, yading@11: (AVRational){ 1, st->codec->sample_rate }, yading@11: st->time_base, yading@11: AV_ROUND_DOWN); yading@11: } yading@11: } else if (st->codec->time_base.num != 0 && yading@11: st->codec->time_base.den != 0) { yading@11: out_pkt.duration = av_rescale_q_rnd(st->parser->duration, yading@11: st->codec->time_base, yading@11: st->time_base, yading@11: AV_ROUND_DOWN); yading@11: } yading@11: yading@11: out_pkt.stream_index = st->index; yading@11: out_pkt.pts = st->parser->pts; yading@11: out_pkt.dts = st->parser->dts; yading@11: out_pkt.pos = st->parser->pos; yading@11: yading@11: if(st->need_parsing == AVSTREAM_PARSE_FULL_RAW) yading@11: out_pkt.pos = st->parser->frame_offset; yading@11: yading@11: if (st->parser->key_frame == 1 || yading@11: (st->parser->key_frame == -1 && yading@11: st->parser->pict_type == AV_PICTURE_TYPE_I)) yading@11: out_pkt.flags |= AV_PKT_FLAG_KEY; yading@11: yading@11: if(st->parser->key_frame == -1 && st->parser->pict_type==AV_PICTURE_TYPE_NONE && (pkt->flags&AV_PKT_FLAG_KEY)) yading@11: out_pkt.flags |= AV_PKT_FLAG_KEY; yading@11: yading@11: compute_pkt_fields(s, st, st->parser, &out_pkt); yading@11: yading@11: if (out_pkt.data == pkt->data && out_pkt.size == pkt->size) { yading@11: out_pkt.buf = pkt->buf; yading@11: pkt->buf = NULL; yading@11: #if FF_API_DESTRUCT_PACKET yading@11: out_pkt.destruct = pkt->destruct; yading@11: pkt->destruct = NULL; yading@11: #endif yading@11: } yading@11: if ((ret = av_dup_packet(&out_pkt)) < 0) yading@11: goto fail; yading@11: yading@11: if (!add_to_pktbuf(&s->parse_queue, &out_pkt, &s->parse_queue_end)) { yading@11: av_free_packet(&out_pkt); yading@11: ret = AVERROR(ENOMEM); yading@11: goto fail; yading@11: } yading@11: } yading@11: yading@11: yading@11: /* end of the stream => close and free the parser */ yading@11: if (pkt == &flush_pkt) { yading@11: av_parser_close(st->parser); yading@11: st->parser = NULL; yading@11: } yading@11: yading@11: fail: yading@11: av_free_packet(pkt); yading@11: return ret; yading@11: } yading@11: yading@11: static int read_from_packet_buffer(AVPacketList **pkt_buffer, yading@11: AVPacketList **pkt_buffer_end, yading@11: AVPacket *pkt) yading@11: { yading@11: AVPacketList *pktl; yading@11: av_assert0(*pkt_buffer); yading@11: pktl = *pkt_buffer; yading@11: *pkt = pktl->pkt; yading@11: *pkt_buffer = pktl->next; yading@11: if (!pktl->next) yading@11: *pkt_buffer_end = NULL; yading@11: av_freep(&pktl); yading@11: return 0; yading@11: } yading@11: yading@11: static int read_frame_internal(AVFormatContext *s, AVPacket *pkt) yading@11: { yading@11: int ret = 0, i, got_packet = 0; yading@11: yading@11: av_init_packet(pkt); yading@11: yading@11: while (!got_packet && !s->parse_queue) { yading@11: AVStream *st; yading@11: AVPacket cur_pkt; yading@11: yading@11: /* read next packet */ yading@11: ret = ff_read_packet(s, &cur_pkt); yading@11: if (ret < 0) { yading@11: if (ret == AVERROR(EAGAIN)) yading@11: return ret; yading@11: /* flush the parsers */ yading@11: for(i = 0; i < s->nb_streams; i++) { yading@11: st = s->streams[i]; yading@11: if (st->parser && st->need_parsing) yading@11: parse_packet(s, NULL, st->index); yading@11: } yading@11: /* all remaining packets are now in parse_queue => yading@11: * really terminate parsing */ yading@11: break; yading@11: } yading@11: ret = 0; yading@11: st = s->streams[cur_pkt.stream_index]; yading@11: yading@11: if (cur_pkt.pts != AV_NOPTS_VALUE && yading@11: cur_pkt.dts != AV_NOPTS_VALUE && yading@11: cur_pkt.pts < cur_pkt.dts) { yading@11: av_log(s, AV_LOG_WARNING, "Invalid timestamps stream=%d, pts=%s, dts=%s, size=%d\n", yading@11: cur_pkt.stream_index, yading@11: av_ts2str(cur_pkt.pts), yading@11: av_ts2str(cur_pkt.dts), yading@11: cur_pkt.size); yading@11: } yading@11: if (s->debug & FF_FDEBUG_TS) yading@11: av_log(s, AV_LOG_DEBUG, "ff_read_packet stream=%d, pts=%s, dts=%s, size=%d, duration=%d, flags=%d\n", yading@11: cur_pkt.stream_index, yading@11: av_ts2str(cur_pkt.pts), yading@11: av_ts2str(cur_pkt.dts), yading@11: cur_pkt.size, yading@11: cur_pkt.duration, yading@11: cur_pkt.flags); yading@11: yading@11: if (st->need_parsing && !st->parser && !(s->flags & AVFMT_FLAG_NOPARSE)) { yading@11: st->parser = av_parser_init(st->codec->codec_id); yading@11: if (!st->parser) { yading@11: av_log(s, AV_LOG_VERBOSE, "parser not found for codec " yading@11: "%s, packets or times may be invalid.\n", yading@11: avcodec_get_name(st->codec->codec_id)); yading@11: /* no parser available: just output the raw packets */ yading@11: st->need_parsing = AVSTREAM_PARSE_NONE; yading@11: } else if(st->need_parsing == AVSTREAM_PARSE_HEADERS) { yading@11: st->parser->flags |= PARSER_FLAG_COMPLETE_FRAMES; yading@11: } else if(st->need_parsing == AVSTREAM_PARSE_FULL_ONCE) { yading@11: st->parser->flags |= PARSER_FLAG_ONCE; yading@11: } else if(st->need_parsing == AVSTREAM_PARSE_FULL_RAW) { yading@11: st->parser->flags |= PARSER_FLAG_USE_CODEC_TS; yading@11: } yading@11: } yading@11: yading@11: if (!st->need_parsing || !st->parser) { yading@11: /* no parsing needed: we just output the packet as is */ yading@11: *pkt = cur_pkt; yading@11: compute_pkt_fields(s, st, NULL, pkt); yading@11: if ((s->iformat->flags & AVFMT_GENERIC_INDEX) && yading@11: (pkt->flags & AV_PKT_FLAG_KEY) && pkt->dts != AV_NOPTS_VALUE) { yading@11: ff_reduce_index(s, st->index); yading@11: av_add_index_entry(st, pkt->pos, pkt->dts, 0, 0, AVINDEX_KEYFRAME); yading@11: } yading@11: got_packet = 1; yading@11: } else if (st->discard < AVDISCARD_ALL) { yading@11: if ((ret = parse_packet(s, &cur_pkt, cur_pkt.stream_index)) < 0) yading@11: return ret; yading@11: } else { yading@11: /* free packet */ yading@11: av_free_packet(&cur_pkt); yading@11: } yading@11: if (pkt->flags & AV_PKT_FLAG_KEY) yading@11: st->skip_to_keyframe = 0; yading@11: if (st->skip_to_keyframe) { yading@11: av_free_packet(&cur_pkt); yading@11: got_packet = 0; yading@11: } yading@11: } yading@11: yading@11: if (!got_packet && s->parse_queue) yading@11: ret = read_from_packet_buffer(&s->parse_queue, &s->parse_queue_end, pkt); yading@11: yading@11: if(s->debug & FF_FDEBUG_TS) yading@11: av_log(s, AV_LOG_DEBUG, "read_frame_internal stream=%d, pts=%s, dts=%s, size=%d, duration=%d, flags=%d\n", yading@11: pkt->stream_index, yading@11: av_ts2str(pkt->pts), yading@11: av_ts2str(pkt->dts), yading@11: pkt->size, yading@11: pkt->duration, yading@11: pkt->flags); yading@11: yading@11: return ret; yading@11: } yading@11: yading@11: int av_read_frame(AVFormatContext *s, AVPacket *pkt) yading@11: { yading@11: const int genpts = s->flags & AVFMT_FLAG_GENPTS; yading@11: int eof = 0; yading@11: int ret; yading@11: AVStream *st; yading@11: yading@11: if (!genpts) { yading@11: ret = s->packet_buffer ? yading@11: read_from_packet_buffer(&s->packet_buffer, &s->packet_buffer_end, pkt) : yading@11: read_frame_internal(s, pkt); yading@11: if (ret < 0) yading@11: return ret; yading@11: goto return_packet; yading@11: } yading@11: yading@11: for (;;) { yading@11: AVPacketList *pktl = s->packet_buffer; yading@11: yading@11: if (pktl) { yading@11: AVPacket *next_pkt = &pktl->pkt; yading@11: yading@11: if (next_pkt->dts != AV_NOPTS_VALUE) { yading@11: int wrap_bits = s->streams[next_pkt->stream_index]->pts_wrap_bits; yading@11: // last dts seen for this stream. if any of packets following yading@11: // current one had no dts, we will set this to AV_NOPTS_VALUE. yading@11: int64_t last_dts = next_pkt->dts; yading@11: while (pktl && next_pkt->pts == AV_NOPTS_VALUE) { yading@11: if (pktl->pkt.stream_index == next_pkt->stream_index && yading@11: (av_compare_mod(next_pkt->dts, pktl->pkt.dts, 2LL << (wrap_bits - 1)) < 0)) { yading@11: if (av_compare_mod(pktl->pkt.pts, pktl->pkt.dts, 2LL << (wrap_bits - 1))) { //not b frame yading@11: next_pkt->pts = pktl->pkt.dts; yading@11: } yading@11: if (last_dts != AV_NOPTS_VALUE) { yading@11: // Once last dts was set to AV_NOPTS_VALUE, we don't change it. yading@11: last_dts = pktl->pkt.dts; yading@11: } yading@11: } yading@11: pktl = pktl->next; yading@11: } yading@11: if (eof && next_pkt->pts == AV_NOPTS_VALUE && last_dts != AV_NOPTS_VALUE) { yading@11: // Fixing the last reference frame had none pts issue (For MXF etc). yading@11: // We only do this when yading@11: // 1. eof. yading@11: // 2. we are not able to resolve a pts value for current packet. yading@11: // 3. the packets for this stream at the end of the files had valid dts. yading@11: next_pkt->pts = last_dts + next_pkt->duration; yading@11: } yading@11: pktl = s->packet_buffer; yading@11: } yading@11: yading@11: /* read packet from packet buffer, if there is data */ yading@11: if (!(next_pkt->pts == AV_NOPTS_VALUE && yading@11: next_pkt->dts != AV_NOPTS_VALUE && !eof)) { yading@11: ret = read_from_packet_buffer(&s->packet_buffer, yading@11: &s->packet_buffer_end, pkt); yading@11: goto return_packet; yading@11: } yading@11: } yading@11: yading@11: ret = read_frame_internal(s, pkt); yading@11: if (ret < 0) { yading@11: if (pktl && ret != AVERROR(EAGAIN)) { yading@11: eof = 1; yading@11: continue; yading@11: } else yading@11: return ret; yading@11: } yading@11: yading@11: if (av_dup_packet(add_to_pktbuf(&s->packet_buffer, pkt, yading@11: &s->packet_buffer_end)) < 0) yading@11: return AVERROR(ENOMEM); yading@11: } yading@11: yading@11: return_packet: yading@11: yading@11: st = s->streams[pkt->stream_index]; yading@11: if (st->skip_samples) { yading@11: uint8_t *p = av_packet_new_side_data(pkt, AV_PKT_DATA_SKIP_SAMPLES, 10); yading@11: AV_WL32(p, st->skip_samples); yading@11: av_log(s, AV_LOG_DEBUG, "demuxer injecting skip %d\n", st->skip_samples); yading@11: st->skip_samples = 0; yading@11: } yading@11: yading@11: if ((s->iformat->flags & AVFMT_GENERIC_INDEX) && pkt->flags & AV_PKT_FLAG_KEY) { yading@11: ff_reduce_index(s, st->index); yading@11: av_add_index_entry(st, pkt->pos, pkt->dts, 0, 0, AVINDEX_KEYFRAME); yading@11: } yading@11: yading@11: if (is_relative(pkt->dts)) yading@11: pkt->dts -= RELATIVE_TS_BASE; yading@11: if (is_relative(pkt->pts)) yading@11: pkt->pts -= RELATIVE_TS_BASE; yading@11: yading@11: return ret; yading@11: } yading@11: yading@11: /* XXX: suppress the packet queue */ yading@11: static void flush_packet_queue(AVFormatContext *s) yading@11: { yading@11: free_packet_buffer(&s->parse_queue, &s->parse_queue_end); yading@11: free_packet_buffer(&s->packet_buffer, &s->packet_buffer_end); yading@11: free_packet_buffer(&s->raw_packet_buffer, &s->raw_packet_buffer_end); yading@11: yading@11: s->raw_packet_buffer_remaining_size = RAW_PACKET_BUFFER_SIZE; yading@11: } yading@11: yading@11: /*******************************************************/ yading@11: /* seek support */ yading@11: yading@11: int av_find_default_stream_index(AVFormatContext *s) yading@11: { yading@11: int first_audio_index = -1; yading@11: int i; yading@11: AVStream *st; yading@11: yading@11: if (s->nb_streams <= 0) yading@11: return -1; yading@11: for(i = 0; i < s->nb_streams; i++) { yading@11: st = s->streams[i]; yading@11: if (st->codec->codec_type == AVMEDIA_TYPE_VIDEO && yading@11: !(st->disposition & AV_DISPOSITION_ATTACHED_PIC)) { yading@11: return i; yading@11: } yading@11: if (first_audio_index < 0 && st->codec->codec_type == AVMEDIA_TYPE_AUDIO) yading@11: first_audio_index = i; yading@11: } yading@11: return first_audio_index >= 0 ? first_audio_index : 0; yading@11: } yading@11: yading@11: /** yading@11: * Flush the frame reader. yading@11: */ yading@11: void ff_read_frame_flush(AVFormatContext *s) yading@11: { yading@11: AVStream *st; yading@11: int i, j; yading@11: yading@11: flush_packet_queue(s); yading@11: yading@11: /* for each stream, reset read state */ yading@11: for(i = 0; i < s->nb_streams; i++) { yading@11: st = s->streams[i]; yading@11: yading@11: if (st->parser) { yading@11: av_parser_close(st->parser); yading@11: st->parser = NULL; yading@11: } yading@11: st->last_IP_pts = AV_NOPTS_VALUE; yading@11: if(st->first_dts == AV_NOPTS_VALUE) st->cur_dts = RELATIVE_TS_BASE; yading@11: else st->cur_dts = AV_NOPTS_VALUE; /* we set the current DTS to an unspecified origin */ yading@11: st->reference_dts = AV_NOPTS_VALUE; yading@11: yading@11: st->probe_packets = MAX_PROBE_PACKETS; yading@11: yading@11: for(j=0; jpts_buffer[j]= AV_NOPTS_VALUE; yading@11: } yading@11: } yading@11: yading@11: void ff_update_cur_dts(AVFormatContext *s, AVStream *ref_st, int64_t timestamp) yading@11: { yading@11: int i; yading@11: yading@11: for(i = 0; i < s->nb_streams; i++) { yading@11: AVStream *st = s->streams[i]; yading@11: yading@11: st->cur_dts = av_rescale(timestamp, yading@11: st->time_base.den * (int64_t)ref_st->time_base.num, yading@11: st->time_base.num * (int64_t)ref_st->time_base.den); yading@11: } yading@11: } yading@11: yading@11: void ff_reduce_index(AVFormatContext *s, int stream_index) yading@11: { yading@11: AVStream *st= s->streams[stream_index]; yading@11: unsigned int max_entries= s->max_index_size / sizeof(AVIndexEntry); yading@11: yading@11: if((unsigned)st->nb_index_entries >= max_entries){ yading@11: int i; yading@11: for(i=0; 2*inb_index_entries; i++) yading@11: st->index_entries[i]= st->index_entries[2*i]; yading@11: st->nb_index_entries= i; yading@11: } yading@11: } yading@11: yading@11: int ff_add_index_entry(AVIndexEntry **index_entries, yading@11: int *nb_index_entries, yading@11: unsigned int *index_entries_allocated_size, yading@11: int64_t pos, int64_t timestamp, int size, int distance, int flags) yading@11: { yading@11: AVIndexEntry *entries, *ie; yading@11: int index; yading@11: yading@11: if((unsigned)*nb_index_entries + 1 >= UINT_MAX / sizeof(AVIndexEntry)) yading@11: return -1; yading@11: yading@11: if(timestamp == AV_NOPTS_VALUE) yading@11: return AVERROR(EINVAL); yading@11: yading@11: if (is_relative(timestamp)) //FIXME this maintains previous behavior but we should shift by the correct offset once known yading@11: timestamp -= RELATIVE_TS_BASE; yading@11: yading@11: entries = av_fast_realloc(*index_entries, yading@11: index_entries_allocated_size, yading@11: (*nb_index_entries + 1) * yading@11: sizeof(AVIndexEntry)); yading@11: if(!entries) yading@11: return -1; yading@11: yading@11: *index_entries= entries; yading@11: yading@11: index= ff_index_search_timestamp(*index_entries, *nb_index_entries, timestamp, AVSEEK_FLAG_ANY); yading@11: yading@11: if(index<0){ yading@11: index= (*nb_index_entries)++; yading@11: ie= &entries[index]; yading@11: av_assert0(index==0 || ie[-1].timestamp < timestamp); yading@11: }else{ yading@11: ie= &entries[index]; yading@11: if(ie->timestamp != timestamp){ yading@11: if(ie->timestamp <= timestamp) yading@11: return -1; yading@11: memmove(entries + index + 1, entries + index, sizeof(AVIndexEntry)*(*nb_index_entries - index)); yading@11: (*nb_index_entries)++; yading@11: }else if(ie->pos == pos && distance < ie->min_distance) //do not reduce the distance yading@11: distance= ie->min_distance; yading@11: } yading@11: yading@11: ie->pos = pos; yading@11: ie->timestamp = timestamp; yading@11: ie->min_distance= distance; yading@11: ie->size= size; yading@11: ie->flags = flags; yading@11: yading@11: return index; yading@11: } yading@11: yading@11: int av_add_index_entry(AVStream *st, yading@11: int64_t pos, int64_t timestamp, int size, int distance, int flags) yading@11: { yading@11: timestamp = wrap_timestamp(st, timestamp); yading@11: return ff_add_index_entry(&st->index_entries, &st->nb_index_entries, yading@11: &st->index_entries_allocated_size, pos, yading@11: timestamp, size, distance, flags); yading@11: } yading@11: yading@11: int ff_index_search_timestamp(const AVIndexEntry *entries, int nb_entries, yading@11: int64_t wanted_timestamp, int flags) yading@11: { yading@11: int a, b, m; yading@11: int64_t timestamp; yading@11: yading@11: a = - 1; yading@11: b = nb_entries; yading@11: yading@11: //optimize appending index entries at the end yading@11: if(b && entries[b-1].timestamp < wanted_timestamp) yading@11: a= b-1; yading@11: yading@11: while (b - a > 1) { yading@11: m = (a + b) >> 1; yading@11: timestamp = entries[m].timestamp; yading@11: if(timestamp >= wanted_timestamp) yading@11: b = m; yading@11: if(timestamp <= wanted_timestamp) yading@11: a = m; yading@11: } yading@11: m= (flags & AVSEEK_FLAG_BACKWARD) ? a : b; yading@11: yading@11: if(!(flags & AVSEEK_FLAG_ANY)){ yading@11: while(m>=0 && mindex_entries, st->nb_index_entries, yading@11: wanted_timestamp, flags); yading@11: } yading@11: yading@11: static int64_t ff_read_timestamp(AVFormatContext *s, int stream_index, int64_t *ppos, int64_t pos_limit, yading@11: int64_t (*read_timestamp)(struct AVFormatContext *, int , int64_t *, int64_t )) yading@11: { yading@11: int64_t ts = read_timestamp(s, stream_index, ppos, pos_limit); yading@11: if (stream_index >= 0) yading@11: ts = wrap_timestamp(s->streams[stream_index], ts); yading@11: return ts; yading@11: } yading@11: yading@11: int ff_seek_frame_binary(AVFormatContext *s, int stream_index, int64_t target_ts, int flags) yading@11: { yading@11: AVInputFormat *avif= s->iformat; yading@11: int64_t av_uninit(pos_min), av_uninit(pos_max), pos, pos_limit; yading@11: int64_t ts_min, ts_max, ts; yading@11: int index; yading@11: int64_t ret; yading@11: AVStream *st; yading@11: yading@11: if (stream_index < 0) yading@11: return -1; yading@11: yading@11: av_dlog(s, "read_seek: %d %s\n", stream_index, av_ts2str(target_ts)); yading@11: yading@11: ts_max= yading@11: ts_min= AV_NOPTS_VALUE; yading@11: pos_limit= -1; //gcc falsely says it may be uninitialized yading@11: yading@11: st= s->streams[stream_index]; yading@11: if(st->index_entries){ yading@11: AVIndexEntry *e; yading@11: yading@11: index= av_index_search_timestamp(st, target_ts, flags | AVSEEK_FLAG_BACKWARD); //FIXME whole func must be checked for non-keyframe entries in index case, especially read_timestamp() yading@11: index= FFMAX(index, 0); yading@11: e= &st->index_entries[index]; yading@11: yading@11: if(e->timestamp <= target_ts || e->pos == e->min_distance){ yading@11: pos_min= e->pos; yading@11: ts_min= e->timestamp; yading@11: av_dlog(s, "using cached pos_min=0x%"PRIx64" dts_min=%s\n", yading@11: pos_min, av_ts2str(ts_min)); yading@11: }else{ yading@11: av_assert1(index==0); yading@11: } yading@11: yading@11: index= av_index_search_timestamp(st, target_ts, flags & ~AVSEEK_FLAG_BACKWARD); yading@11: av_assert0(index < st->nb_index_entries); yading@11: if(index >= 0){ yading@11: e= &st->index_entries[index]; yading@11: av_assert1(e->timestamp >= target_ts); yading@11: pos_max= e->pos; yading@11: ts_max= e->timestamp; yading@11: pos_limit= pos_max - e->min_distance; yading@11: av_dlog(s, "using cached pos_max=0x%"PRIx64" pos_limit=0x%"PRIx64" dts_max=%s\n", yading@11: pos_max, pos_limit, av_ts2str(ts_max)); yading@11: } yading@11: } yading@11: yading@11: pos= ff_gen_search(s, stream_index, target_ts, pos_min, pos_max, pos_limit, ts_min, ts_max, flags, &ts, avif->read_timestamp); yading@11: if(pos<0) yading@11: return -1; yading@11: yading@11: /* do the seek */ yading@11: if ((ret = avio_seek(s->pb, pos, SEEK_SET)) < 0) yading@11: return ret; yading@11: yading@11: ff_read_frame_flush(s); yading@11: ff_update_cur_dts(s, st, ts); yading@11: yading@11: return 0; yading@11: } yading@11: yading@11: int64_t ff_gen_search(AVFormatContext *s, int stream_index, int64_t target_ts, yading@11: int64_t pos_min, int64_t pos_max, int64_t pos_limit, yading@11: int64_t ts_min, int64_t ts_max, int flags, int64_t *ts_ret, yading@11: int64_t (*read_timestamp)(struct AVFormatContext *, int , int64_t *, int64_t )) yading@11: { yading@11: int64_t pos, ts; yading@11: int64_t start_pos, filesize; yading@11: int no_change; yading@11: yading@11: av_dlog(s, "gen_seek: %d %s\n", stream_index, av_ts2str(target_ts)); yading@11: yading@11: if(ts_min == AV_NOPTS_VALUE){ yading@11: pos_min = s->data_offset; yading@11: ts_min = ff_read_timestamp(s, stream_index, &pos_min, INT64_MAX, read_timestamp); yading@11: if (ts_min == AV_NOPTS_VALUE) yading@11: return -1; yading@11: } yading@11: yading@11: if(ts_min >= target_ts){ yading@11: *ts_ret= ts_min; yading@11: return pos_min; yading@11: } yading@11: yading@11: if(ts_max == AV_NOPTS_VALUE){ yading@11: int step= 1024; yading@11: filesize = avio_size(s->pb); yading@11: pos_max = filesize - 1; yading@11: do{ yading@11: pos_max = FFMAX(0, pos_max - step); yading@11: ts_max = ff_read_timestamp(s, stream_index, &pos_max, pos_max + step, read_timestamp); yading@11: step += step; yading@11: }while(ts_max == AV_NOPTS_VALUE && pos_max > 0); yading@11: if (ts_max == AV_NOPTS_VALUE) yading@11: return -1; yading@11: yading@11: for(;;){ yading@11: int64_t tmp_pos= pos_max + 1; yading@11: int64_t tmp_ts= ff_read_timestamp(s, stream_index, &tmp_pos, INT64_MAX, read_timestamp); yading@11: if(tmp_ts == AV_NOPTS_VALUE) yading@11: break; yading@11: ts_max= tmp_ts; yading@11: pos_max= tmp_pos; yading@11: if(tmp_pos >= filesize) yading@11: break; yading@11: } yading@11: pos_limit= pos_max; yading@11: } yading@11: yading@11: if(ts_max <= target_ts){ yading@11: *ts_ret= ts_max; yading@11: return pos_max; yading@11: } yading@11: yading@11: if(ts_min > ts_max){ yading@11: return -1; yading@11: }else if(ts_min == ts_max){ yading@11: pos_limit= pos_min; yading@11: } yading@11: yading@11: no_change=0; yading@11: while (pos_min < pos_limit) { yading@11: av_dlog(s, "pos_min=0x%"PRIx64" pos_max=0x%"PRIx64" dts_min=%s dts_max=%s\n", yading@11: pos_min, pos_max, av_ts2str(ts_min), av_ts2str(ts_max)); yading@11: assert(pos_limit <= pos_max); yading@11: yading@11: if(no_change==0){ yading@11: int64_t approximate_keyframe_distance= pos_max - pos_limit; yading@11: // interpolate position (better than dichotomy) yading@11: pos = av_rescale(target_ts - ts_min, pos_max - pos_min, ts_max - ts_min) yading@11: + pos_min - approximate_keyframe_distance; yading@11: }else if(no_change==1){ yading@11: // bisection, if interpolation failed to change min or max pos last time yading@11: pos = (pos_min + pos_limit)>>1; yading@11: }else{ yading@11: /* linear search if bisection failed, can only happen if there yading@11: are very few or no keyframes between min/max */ yading@11: pos=pos_min; yading@11: } yading@11: if(pos <= pos_min) yading@11: pos= pos_min + 1; yading@11: else if(pos > pos_limit) yading@11: pos= pos_limit; yading@11: start_pos= pos; yading@11: yading@11: ts = ff_read_timestamp(s, stream_index, &pos, INT64_MAX, read_timestamp); //may pass pos_limit instead of -1 yading@11: if(pos == pos_max) yading@11: no_change++; yading@11: else yading@11: no_change=0; yading@11: av_dlog(s, "%"PRId64" %"PRId64" %"PRId64" / %s %s %s target:%s limit:%"PRId64" start:%"PRId64" noc:%d\n", yading@11: pos_min, pos, pos_max, yading@11: av_ts2str(ts_min), av_ts2str(ts), av_ts2str(ts_max), av_ts2str(target_ts), yading@11: pos_limit, start_pos, no_change); yading@11: if(ts == AV_NOPTS_VALUE){ yading@11: av_log(s, AV_LOG_ERROR, "read_timestamp() failed in the middle\n"); yading@11: return -1; yading@11: } yading@11: assert(ts != AV_NOPTS_VALUE); yading@11: if (target_ts <= ts) { yading@11: pos_limit = start_pos - 1; yading@11: pos_max = pos; yading@11: ts_max = ts; yading@11: } yading@11: if (target_ts >= ts) { yading@11: pos_min = pos; yading@11: ts_min = ts; yading@11: } yading@11: } yading@11: yading@11: pos = (flags & AVSEEK_FLAG_BACKWARD) ? pos_min : pos_max; yading@11: ts = (flags & AVSEEK_FLAG_BACKWARD) ? ts_min : ts_max; yading@11: #if 0 yading@11: pos_min = pos; yading@11: ts_min = ff_read_timestamp(s, stream_index, &pos_min, INT64_MAX, read_timestamp); yading@11: pos_min++; yading@11: ts_max = ff_read_timestamp(s, stream_index, &pos_min, INT64_MAX, read_timestamp); yading@11: av_dlog(s, "pos=0x%"PRIx64" %s<=%s<=%s\n", yading@11: pos, av_ts2str(ts_min), av_ts2str(target_ts), av_ts2str(ts_max)); yading@11: #endif yading@11: *ts_ret= ts; yading@11: return pos; yading@11: } yading@11: yading@11: static int seek_frame_byte(AVFormatContext *s, int stream_index, int64_t pos, int flags){ yading@11: int64_t pos_min, pos_max; yading@11: yading@11: pos_min = s->data_offset; yading@11: pos_max = avio_size(s->pb) - 1; yading@11: yading@11: if (pos < pos_min) pos= pos_min; yading@11: else if(pos > pos_max) pos= pos_max; yading@11: yading@11: avio_seek(s->pb, pos, SEEK_SET); yading@11: yading@11: s->io_repositioned = 1; yading@11: yading@11: return 0; yading@11: } yading@11: yading@11: static int seek_frame_generic(AVFormatContext *s, yading@11: int stream_index, int64_t timestamp, int flags) yading@11: { yading@11: int index; yading@11: int64_t ret; yading@11: AVStream *st; yading@11: AVIndexEntry *ie; yading@11: yading@11: st = s->streams[stream_index]; yading@11: yading@11: index = av_index_search_timestamp(st, timestamp, flags); yading@11: yading@11: if(index < 0 && st->nb_index_entries && timestamp < st->index_entries[0].timestamp) yading@11: return -1; yading@11: yading@11: if(index < 0 || index==st->nb_index_entries-1){ yading@11: AVPacket pkt; yading@11: int nonkey=0; yading@11: yading@11: if(st->nb_index_entries){ yading@11: av_assert0(st->index_entries); yading@11: ie= &st->index_entries[st->nb_index_entries-1]; yading@11: if ((ret = avio_seek(s->pb, ie->pos, SEEK_SET)) < 0) yading@11: return ret; yading@11: ff_update_cur_dts(s, st, ie->timestamp); yading@11: }else{ yading@11: if ((ret = avio_seek(s->pb, s->data_offset, SEEK_SET)) < 0) yading@11: return ret; yading@11: } yading@11: for (;;) { yading@11: int read_status; yading@11: do{ yading@11: read_status = av_read_frame(s, &pkt); yading@11: } while (read_status == AVERROR(EAGAIN)); yading@11: if (read_status < 0) yading@11: break; yading@11: av_free_packet(&pkt); yading@11: if(stream_index == pkt.stream_index && pkt.dts > timestamp){ yading@11: if(pkt.flags & AV_PKT_FLAG_KEY) yading@11: break; yading@11: if(nonkey++ > 1000 && st->codec->codec_id != AV_CODEC_ID_CDGRAPHICS){ yading@11: av_log(s, AV_LOG_ERROR,"seek_frame_generic failed as this stream seems to contain no keyframes after the target timestamp, %d non keyframes found\n", nonkey); yading@11: break; yading@11: } yading@11: } yading@11: } yading@11: index = av_index_search_timestamp(st, timestamp, flags); yading@11: } yading@11: if (index < 0) yading@11: return -1; yading@11: yading@11: ff_read_frame_flush(s); yading@11: if (s->iformat->read_seek){ yading@11: if(s->iformat->read_seek(s, stream_index, timestamp, flags) >= 0) yading@11: return 0; yading@11: } yading@11: ie = &st->index_entries[index]; yading@11: if ((ret = avio_seek(s->pb, ie->pos, SEEK_SET)) < 0) yading@11: return ret; yading@11: ff_update_cur_dts(s, st, ie->timestamp); yading@11: yading@11: return 0; yading@11: } yading@11: yading@11: static int seek_frame_internal(AVFormatContext *s, int stream_index, yading@11: int64_t timestamp, int flags) yading@11: { yading@11: int ret; yading@11: AVStream *st; yading@11: yading@11: if (flags & AVSEEK_FLAG_BYTE) { yading@11: if (s->iformat->flags & AVFMT_NO_BYTE_SEEK) yading@11: return -1; yading@11: ff_read_frame_flush(s); yading@11: return seek_frame_byte(s, stream_index, timestamp, flags); yading@11: } yading@11: yading@11: if(stream_index < 0){ yading@11: stream_index= av_find_default_stream_index(s); yading@11: if(stream_index < 0) yading@11: return -1; yading@11: yading@11: st= s->streams[stream_index]; yading@11: /* timestamp for default must be expressed in AV_TIME_BASE units */ yading@11: timestamp = av_rescale(timestamp, st->time_base.den, AV_TIME_BASE * (int64_t)st->time_base.num); yading@11: } yading@11: yading@11: /* first, we try the format specific seek */ yading@11: if (s->iformat->read_seek) { yading@11: ff_read_frame_flush(s); yading@11: ret = s->iformat->read_seek(s, stream_index, timestamp, flags); yading@11: } else yading@11: ret = -1; yading@11: if (ret >= 0) { yading@11: return 0; yading@11: } yading@11: yading@11: if (s->iformat->read_timestamp && !(s->iformat->flags & AVFMT_NOBINSEARCH)) { yading@11: ff_read_frame_flush(s); yading@11: return ff_seek_frame_binary(s, stream_index, timestamp, flags); yading@11: } else if (!(s->iformat->flags & AVFMT_NOGENSEARCH)) { yading@11: ff_read_frame_flush(s); yading@11: return seek_frame_generic(s, stream_index, timestamp, flags); yading@11: } yading@11: else yading@11: return -1; yading@11: } yading@11: yading@11: int av_seek_frame(AVFormatContext *s, int stream_index, int64_t timestamp, int flags) yading@11: { yading@11: int ret = seek_frame_internal(s, stream_index, timestamp, flags); yading@11: yading@11: if (ret >= 0) yading@11: ret = avformat_queue_attached_pictures(s); yading@11: yading@11: return ret; yading@11: } yading@11: yading@11: int avformat_seek_file(AVFormatContext *s, int stream_index, int64_t min_ts, int64_t ts, int64_t max_ts, int flags) yading@11: { yading@11: if(min_ts > ts || max_ts < ts) yading@11: return -1; yading@11: if (stream_index < -1 || stream_index >= (int)s->nb_streams) yading@11: return AVERROR(EINVAL); yading@11: yading@11: if(s->seek2any>0) yading@11: flags |= AVSEEK_FLAG_ANY; yading@11: yading@11: if (s->iformat->read_seek2) { yading@11: int ret; yading@11: ff_read_frame_flush(s); yading@11: yading@11: if (stream_index == -1 && s->nb_streams == 1) { yading@11: AVRational time_base = s->streams[0]->time_base; yading@11: ts = av_rescale_q(ts, AV_TIME_BASE_Q, time_base); yading@11: min_ts = av_rescale_rnd(min_ts, time_base.den, yading@11: time_base.num * (int64_t)AV_TIME_BASE, yading@11: AV_ROUND_UP | AV_ROUND_PASS_MINMAX); yading@11: max_ts = av_rescale_rnd(max_ts, time_base.den, yading@11: time_base.num * (int64_t)AV_TIME_BASE, yading@11: AV_ROUND_DOWN | AV_ROUND_PASS_MINMAX); yading@11: } yading@11: yading@11: ret = s->iformat->read_seek2(s, stream_index, min_ts, ts, max_ts, flags); yading@11: yading@11: if (ret >= 0) yading@11: ret = avformat_queue_attached_pictures(s); yading@11: return ret; yading@11: } yading@11: yading@11: if(s->iformat->read_timestamp){ yading@11: //try to seek via read_timestamp() yading@11: } yading@11: yading@11: //Fallback to old API if new is not implemented but old is yading@11: //Note the old has somewhat different semantics yading@11: if (s->iformat->read_seek || 1) { yading@11: int dir = (ts - (uint64_t)min_ts > (uint64_t)max_ts - ts ? AVSEEK_FLAG_BACKWARD : 0); yading@11: int ret = av_seek_frame(s, stream_index, ts, flags | dir); yading@11: if (ret<0 && ts != min_ts && max_ts != ts) { yading@11: ret = av_seek_frame(s, stream_index, dir ? max_ts : min_ts, flags | dir); yading@11: if (ret >= 0) yading@11: ret = av_seek_frame(s, stream_index, ts, flags | (dir^AVSEEK_FLAG_BACKWARD)); yading@11: } yading@11: return ret; yading@11: } yading@11: yading@11: // try some generic seek like seek_frame_generic() but with new ts semantics yading@11: return -1; //unreachable yading@11: } yading@11: yading@11: /*******************************************************/ yading@11: yading@11: /** yading@11: * Return TRUE if the stream has accurate duration in any stream. yading@11: * yading@11: * @return TRUE if the stream has accurate duration for at least one component. yading@11: */ yading@11: static int has_duration(AVFormatContext *ic) yading@11: { yading@11: int i; yading@11: AVStream *st; yading@11: yading@11: for(i = 0;i < ic->nb_streams; i++) { yading@11: st = ic->streams[i]; yading@11: if (st->duration != AV_NOPTS_VALUE) yading@11: return 1; yading@11: } yading@11: if (ic->duration != AV_NOPTS_VALUE) yading@11: return 1; yading@11: return 0; yading@11: } yading@11: yading@11: /** yading@11: * Estimate the stream timings from the one of each components. yading@11: * yading@11: * Also computes the global bitrate if possible. yading@11: */ yading@11: static void update_stream_timings(AVFormatContext *ic) yading@11: { yading@11: int64_t start_time, start_time1, start_time_text, end_time, end_time1; yading@11: int64_t duration, duration1, filesize; yading@11: int i; yading@11: AVStream *st; yading@11: AVProgram *p; yading@11: yading@11: start_time = INT64_MAX; yading@11: start_time_text = INT64_MAX; yading@11: end_time = INT64_MIN; yading@11: duration = INT64_MIN; yading@11: for(i = 0;i < ic->nb_streams; i++) { yading@11: st = ic->streams[i]; yading@11: if (st->start_time != AV_NOPTS_VALUE && st->time_base.den) { yading@11: start_time1= av_rescale_q(st->start_time, st->time_base, AV_TIME_BASE_Q); yading@11: if (st->codec->codec_type == AVMEDIA_TYPE_SUBTITLE || st->codec->codec_type == AVMEDIA_TYPE_DATA) { yading@11: if (start_time1 < start_time_text) yading@11: start_time_text = start_time1; yading@11: } else yading@11: start_time = FFMIN(start_time, start_time1); yading@11: end_time1 = AV_NOPTS_VALUE; yading@11: if (st->duration != AV_NOPTS_VALUE) { yading@11: end_time1 = start_time1 yading@11: + av_rescale_q(st->duration, st->time_base, AV_TIME_BASE_Q); yading@11: end_time = FFMAX(end_time, end_time1); yading@11: } yading@11: for(p = NULL; (p = av_find_program_from_stream(ic, p, i)); ){ yading@11: if(p->start_time == AV_NOPTS_VALUE || p->start_time > start_time1) yading@11: p->start_time = start_time1; yading@11: if(p->end_time < end_time1) yading@11: p->end_time = end_time1; yading@11: } yading@11: } yading@11: if (st->duration != AV_NOPTS_VALUE) { yading@11: duration1 = av_rescale_q(st->duration, st->time_base, AV_TIME_BASE_Q); yading@11: duration = FFMAX(duration, duration1); yading@11: } yading@11: } yading@11: if (start_time == INT64_MAX || (start_time > start_time_text && start_time - start_time_text < AV_TIME_BASE)) yading@11: start_time = start_time_text; yading@11: else if(start_time > start_time_text) yading@11: av_log(ic, AV_LOG_VERBOSE, "Ignoring outlier non primary stream starttime %f\n", start_time_text / (float)AV_TIME_BASE); yading@11: yading@11: if (start_time != INT64_MAX) { yading@11: ic->start_time = start_time; yading@11: if (end_time != INT64_MIN) { yading@11: if (ic->nb_programs) { yading@11: for (i=0; inb_programs; i++) { yading@11: p = ic->programs[i]; yading@11: if(p->start_time != AV_NOPTS_VALUE && p->end_time > p->start_time) yading@11: duration = FFMAX(duration, p->end_time - p->start_time); yading@11: } yading@11: } else yading@11: duration = FFMAX(duration, end_time - start_time); yading@11: } yading@11: } yading@11: if (duration != INT64_MIN && duration > 0 && ic->duration == AV_NOPTS_VALUE) { yading@11: ic->duration = duration; yading@11: } yading@11: if (ic->pb && (filesize = avio_size(ic->pb)) > 0 && ic->duration != AV_NOPTS_VALUE) { yading@11: /* compute the bitrate */ yading@11: double bitrate = (double)filesize * 8.0 * AV_TIME_BASE / yading@11: (double)ic->duration; yading@11: if (bitrate >= 0 && bitrate <= INT_MAX) yading@11: ic->bit_rate = bitrate; yading@11: } yading@11: } yading@11: yading@11: static void fill_all_stream_timings(AVFormatContext *ic) yading@11: { yading@11: int i; yading@11: AVStream *st; yading@11: yading@11: update_stream_timings(ic); yading@11: for(i = 0;i < ic->nb_streams; i++) { yading@11: st = ic->streams[i]; yading@11: if (st->start_time == AV_NOPTS_VALUE) { yading@11: if(ic->start_time != AV_NOPTS_VALUE) yading@11: st->start_time = av_rescale_q(ic->start_time, AV_TIME_BASE_Q, st->time_base); yading@11: if(ic->duration != AV_NOPTS_VALUE) yading@11: st->duration = av_rescale_q(ic->duration, AV_TIME_BASE_Q, st->time_base); yading@11: } yading@11: } yading@11: } yading@11: yading@11: static void estimate_timings_from_bit_rate(AVFormatContext *ic) yading@11: { yading@11: int64_t filesize, duration; yading@11: int bit_rate, i, show_warning = 0; yading@11: AVStream *st; yading@11: yading@11: /* if bit_rate is already set, we believe it */ yading@11: if (ic->bit_rate <= 0) { yading@11: bit_rate = 0; yading@11: for(i=0;inb_streams;i++) { yading@11: st = ic->streams[i]; yading@11: if (st->codec->bit_rate > 0) yading@11: bit_rate += st->codec->bit_rate; yading@11: } yading@11: ic->bit_rate = bit_rate; yading@11: } yading@11: yading@11: /* if duration is already set, we believe it */ yading@11: if (ic->duration == AV_NOPTS_VALUE && yading@11: ic->bit_rate != 0) { yading@11: filesize = ic->pb ? avio_size(ic->pb) : 0; yading@11: if (filesize > 0) { yading@11: for(i = 0; i < ic->nb_streams; i++) { yading@11: st = ic->streams[i]; yading@11: if ( st->time_base.num <= INT64_MAX / ic->bit_rate yading@11: && st->duration == AV_NOPTS_VALUE) { yading@11: duration= av_rescale(8*filesize, st->time_base.den, ic->bit_rate*(int64_t)st->time_base.num); yading@11: st->duration = duration; yading@11: show_warning = 1; yading@11: } yading@11: } yading@11: } yading@11: } yading@11: if (show_warning) yading@11: av_log(ic, AV_LOG_WARNING, "Estimating duration from bitrate, this may be inaccurate\n"); yading@11: } yading@11: yading@11: #define DURATION_MAX_READ_SIZE 250000LL yading@11: #define DURATION_MAX_RETRY 4 yading@11: yading@11: /* only usable for MPEG-PS streams */ yading@11: static void estimate_timings_from_pts(AVFormatContext *ic, int64_t old_offset) yading@11: { yading@11: AVPacket pkt1, *pkt = &pkt1; yading@11: AVStream *st; yading@11: int read_size, i, ret; yading@11: int64_t end_time; yading@11: int64_t filesize, offset, duration; yading@11: int retry=0; yading@11: yading@11: /* flush packet queue */ yading@11: flush_packet_queue(ic); yading@11: yading@11: for (i=0; inb_streams; i++) { yading@11: st = ic->streams[i]; yading@11: if (st->start_time == AV_NOPTS_VALUE && st->first_dts == AV_NOPTS_VALUE) yading@11: av_log(st->codec, AV_LOG_WARNING, "start time is not set in estimate_timings_from_pts\n"); yading@11: yading@11: if (st->parser) { yading@11: av_parser_close(st->parser); yading@11: st->parser= NULL; yading@11: } yading@11: } yading@11: yading@11: /* estimate the end time (duration) */ yading@11: /* XXX: may need to support wrapping */ yading@11: filesize = ic->pb ? avio_size(ic->pb) : 0; yading@11: end_time = AV_NOPTS_VALUE; yading@11: do{ yading@11: offset = filesize - (DURATION_MAX_READ_SIZE<pb, offset, SEEK_SET); yading@11: read_size = 0; yading@11: for(;;) { yading@11: if (read_size >= DURATION_MAX_READ_SIZE<<(FFMAX(retry-1,0))) yading@11: break; yading@11: yading@11: do { yading@11: ret = ff_read_packet(ic, pkt); yading@11: } while(ret == AVERROR(EAGAIN)); yading@11: if (ret != 0) yading@11: break; yading@11: read_size += pkt->size; yading@11: st = ic->streams[pkt->stream_index]; yading@11: if (pkt->pts != AV_NOPTS_VALUE && yading@11: (st->start_time != AV_NOPTS_VALUE || yading@11: st->first_dts != AV_NOPTS_VALUE)) { yading@11: duration = end_time = pkt->pts; yading@11: if (st->start_time != AV_NOPTS_VALUE) yading@11: duration -= st->start_time; yading@11: else yading@11: duration -= st->first_dts; yading@11: if (duration > 0) { yading@11: if (st->duration == AV_NOPTS_VALUE || st->info->last_duration<=0 || yading@11: (st->duration < duration && FFABS(duration - st->info->last_duration) < 60LL*st->time_base.den / st->time_base.num)) yading@11: st->duration = duration; yading@11: st->info->last_duration = duration; yading@11: } yading@11: } yading@11: av_free_packet(pkt); yading@11: } yading@11: }while( end_time==AV_NOPTS_VALUE yading@11: && filesize > (DURATION_MAX_READ_SIZE<pb, old_offset, SEEK_SET); yading@11: for (i=0; inb_streams; i++) { yading@11: st= ic->streams[i]; yading@11: st->cur_dts= st->first_dts; yading@11: st->last_IP_pts = AV_NOPTS_VALUE; yading@11: st->reference_dts = AV_NOPTS_VALUE; yading@11: } yading@11: } yading@11: yading@11: static void estimate_timings(AVFormatContext *ic, int64_t old_offset) yading@11: { yading@11: int64_t file_size; yading@11: yading@11: /* get the file size, if possible */ yading@11: if (ic->iformat->flags & AVFMT_NOFILE) { yading@11: file_size = 0; yading@11: } else { yading@11: file_size = avio_size(ic->pb); yading@11: file_size = FFMAX(0, file_size); yading@11: } yading@11: yading@11: if ((!strcmp(ic->iformat->name, "mpeg") || yading@11: !strcmp(ic->iformat->name, "mpegts")) && yading@11: file_size && ic->pb->seekable) { yading@11: /* get accurate estimate from the PTSes */ yading@11: estimate_timings_from_pts(ic, old_offset); yading@11: ic->duration_estimation_method = AVFMT_DURATION_FROM_PTS; yading@11: } else if (has_duration(ic)) { yading@11: /* at least one component has timings - we use them for all yading@11: the components */ yading@11: fill_all_stream_timings(ic); yading@11: ic->duration_estimation_method = AVFMT_DURATION_FROM_STREAM; yading@11: } else { yading@11: /* less precise: use bitrate info */ yading@11: estimate_timings_from_bit_rate(ic); yading@11: ic->duration_estimation_method = AVFMT_DURATION_FROM_BITRATE; yading@11: } yading@11: update_stream_timings(ic); yading@11: yading@11: { yading@11: int i; yading@11: AVStream av_unused *st; yading@11: for(i = 0;i < ic->nb_streams; i++) { yading@11: st = ic->streams[i]; yading@11: av_dlog(ic, "%d: start_time: %0.3f duration: %0.3f\n", i, yading@11: (double) st->start_time / AV_TIME_BASE, yading@11: (double) st->duration / AV_TIME_BASE); yading@11: } yading@11: av_dlog(ic, "stream: start_time: %0.3f duration: %0.3f bitrate=%d kb/s\n", yading@11: (double) ic->start_time / AV_TIME_BASE, yading@11: (double) ic->duration / AV_TIME_BASE, yading@11: ic->bit_rate / 1000); yading@11: } yading@11: } yading@11: yading@11: static int has_codec_parameters(AVStream *st, const char **errmsg_ptr) yading@11: { yading@11: AVCodecContext *avctx = st->codec; yading@11: yading@11: #define FAIL(errmsg) do { \ yading@11: if (errmsg_ptr) \ yading@11: *errmsg_ptr = errmsg; \ yading@11: return 0; \ yading@11: } while (0) yading@11: yading@11: switch (avctx->codec_type) { yading@11: case AVMEDIA_TYPE_AUDIO: yading@11: if (!avctx->frame_size && determinable_frame_size(avctx)) yading@11: FAIL("unspecified frame size"); yading@11: if (st->info->found_decoder >= 0 && avctx->sample_fmt == AV_SAMPLE_FMT_NONE) yading@11: FAIL("unspecified sample format"); yading@11: if (!avctx->sample_rate) yading@11: FAIL("unspecified sample rate"); yading@11: if (!avctx->channels) yading@11: FAIL("unspecified number of channels"); yading@11: if (st->info->found_decoder >= 0 && !st->nb_decoded_frames && avctx->codec_id == AV_CODEC_ID_DTS) yading@11: FAIL("no decodable DTS frames"); yading@11: break; yading@11: case AVMEDIA_TYPE_VIDEO: yading@11: if (!avctx->width) yading@11: FAIL("unspecified size"); yading@11: if (st->info->found_decoder >= 0 && avctx->pix_fmt == AV_PIX_FMT_NONE) yading@11: FAIL("unspecified pixel format"); yading@11: if (st->codec->codec_id == AV_CODEC_ID_RV30 || st->codec->codec_id == AV_CODEC_ID_RV40) yading@11: if (!st->sample_aspect_ratio.num && !st->codec->sample_aspect_ratio.num && !st->codec_info_nb_frames) yading@11: FAIL("no frame in rv30/40 and no sar"); yading@11: break; yading@11: case AVMEDIA_TYPE_SUBTITLE: yading@11: if (avctx->codec_id == AV_CODEC_ID_HDMV_PGS_SUBTITLE && !avctx->width) yading@11: FAIL("unspecified size"); yading@11: break; yading@11: case AVMEDIA_TYPE_DATA: yading@11: if(avctx->codec_id == AV_CODEC_ID_NONE) return 1; yading@11: } yading@11: yading@11: if (avctx->codec_id == AV_CODEC_ID_NONE) yading@11: FAIL("unknown codec"); yading@11: return 1; yading@11: } yading@11: yading@11: /* returns 1 or 0 if or if not decoded data was returned, or a negative error */ yading@11: static int try_decode_frame(AVStream *st, AVPacket *avpkt, AVDictionary **options) yading@11: { yading@11: const AVCodec *codec; yading@11: int got_picture = 1, ret = 0; yading@11: AVFrame *frame = avcodec_alloc_frame(); yading@11: AVSubtitle subtitle; yading@11: AVPacket pkt = *avpkt; yading@11: yading@11: if (!frame) yading@11: return AVERROR(ENOMEM); yading@11: yading@11: if (!avcodec_is_open(st->codec) && !st->info->found_decoder) { yading@11: AVDictionary *thread_opt = NULL; yading@11: yading@11: codec = st->codec->codec ? st->codec->codec : yading@11: avcodec_find_decoder(st->codec->codec_id); yading@11: yading@11: if (!codec) { yading@11: st->info->found_decoder = -1; yading@11: ret = -1; yading@11: goto fail; yading@11: } yading@11: yading@11: /* force thread count to 1 since the h264 decoder will not extract SPS yading@11: * and PPS to extradata during multi-threaded decoding */ yading@11: av_dict_set(options ? options : &thread_opt, "threads", "1", 0); yading@11: ret = avcodec_open2(st->codec, codec, options ? options : &thread_opt); yading@11: if (!options) yading@11: av_dict_free(&thread_opt); yading@11: if (ret < 0) { yading@11: st->info->found_decoder = -1; yading@11: goto fail; yading@11: } yading@11: st->info->found_decoder = 1; yading@11: } else if (!st->info->found_decoder) yading@11: st->info->found_decoder = 1; yading@11: yading@11: if (st->info->found_decoder < 0) { yading@11: ret = -1; yading@11: goto fail; yading@11: } yading@11: yading@11: while ((pkt.size > 0 || (!pkt.data && got_picture)) && yading@11: ret >= 0 && yading@11: (!has_codec_parameters(st, NULL) || yading@11: !has_decode_delay_been_guessed(st) || yading@11: (!st->codec_info_nb_frames && st->codec->codec->capabilities & CODEC_CAP_CHANNEL_CONF))) { yading@11: got_picture = 0; yading@11: avcodec_get_frame_defaults(frame); yading@11: switch(st->codec->codec_type) { yading@11: case AVMEDIA_TYPE_VIDEO: yading@11: ret = avcodec_decode_video2(st->codec, frame, yading@11: &got_picture, &pkt); yading@11: break; yading@11: case AVMEDIA_TYPE_AUDIO: yading@11: ret = avcodec_decode_audio4(st->codec, frame, &got_picture, &pkt); yading@11: break; yading@11: case AVMEDIA_TYPE_SUBTITLE: yading@11: ret = avcodec_decode_subtitle2(st->codec, &subtitle, yading@11: &got_picture, &pkt); yading@11: ret = pkt.size; yading@11: break; yading@11: default: yading@11: break; yading@11: } yading@11: if (ret >= 0) { yading@11: if (got_picture) yading@11: st->nb_decoded_frames++; yading@11: pkt.data += ret; yading@11: pkt.size -= ret; yading@11: ret = got_picture; yading@11: } yading@11: } yading@11: yading@11: if(!pkt.data && !got_picture) yading@11: ret = -1; yading@11: yading@11: fail: yading@11: avcodec_free_frame(&frame); yading@11: return ret; yading@11: } yading@11: yading@11: unsigned int ff_codec_get_tag(const AVCodecTag *tags, enum AVCodecID id) yading@11: { yading@11: while (tags->id != AV_CODEC_ID_NONE) { yading@11: if (tags->id == id) yading@11: return tags->tag; yading@11: tags++; yading@11: } yading@11: return 0; yading@11: } yading@11: yading@11: enum AVCodecID ff_codec_get_id(const AVCodecTag *tags, unsigned int tag) yading@11: { yading@11: int i; yading@11: for(i=0; tags[i].id != AV_CODEC_ID_NONE;i++) { yading@11: if(tag == tags[i].tag) yading@11: return tags[i].id; yading@11: } yading@11: for(i=0; tags[i].id != AV_CODEC_ID_NONE; i++) { yading@11: if (avpriv_toupper4(tag) == avpriv_toupper4(tags[i].tag)) yading@11: return tags[i].id; yading@11: } yading@11: return AV_CODEC_ID_NONE; yading@11: } yading@11: yading@11: enum AVCodecID ff_get_pcm_codec_id(int bps, int flt, int be, int sflags) yading@11: { yading@11: if (flt) { yading@11: switch (bps) { yading@11: case 32: return be ? AV_CODEC_ID_PCM_F32BE : AV_CODEC_ID_PCM_F32LE; yading@11: case 64: return be ? AV_CODEC_ID_PCM_F64BE : AV_CODEC_ID_PCM_F64LE; yading@11: default: return AV_CODEC_ID_NONE; yading@11: } yading@11: } else { yading@11: bps += 7; yading@11: bps >>= 3; yading@11: if (sflags & (1 << (bps - 1))) { yading@11: switch (bps) { yading@11: case 1: return AV_CODEC_ID_PCM_S8; yading@11: case 2: return be ? AV_CODEC_ID_PCM_S16BE : AV_CODEC_ID_PCM_S16LE; yading@11: case 3: return be ? AV_CODEC_ID_PCM_S24BE : AV_CODEC_ID_PCM_S24LE; yading@11: case 4: return be ? AV_CODEC_ID_PCM_S32BE : AV_CODEC_ID_PCM_S32LE; yading@11: default: return AV_CODEC_ID_NONE; yading@11: } yading@11: } else { yading@11: switch (bps) { yading@11: case 1: return AV_CODEC_ID_PCM_U8; yading@11: case 2: return be ? AV_CODEC_ID_PCM_U16BE : AV_CODEC_ID_PCM_U16LE; yading@11: case 3: return be ? AV_CODEC_ID_PCM_U24BE : AV_CODEC_ID_PCM_U24LE; yading@11: case 4: return be ? AV_CODEC_ID_PCM_U32BE : AV_CODEC_ID_PCM_U32LE; yading@11: default: return AV_CODEC_ID_NONE; yading@11: } yading@11: } yading@11: } yading@11: } yading@11: yading@11: unsigned int av_codec_get_tag(const AVCodecTag * const *tags, enum AVCodecID id) yading@11: { yading@11: unsigned int tag; yading@11: if (!av_codec_get_tag2(tags, id, &tag)) yading@11: return 0; yading@11: return tag; yading@11: } yading@11: yading@11: int av_codec_get_tag2(const AVCodecTag * const *tags, enum AVCodecID id, yading@11: unsigned int *tag) yading@11: { yading@11: int i; yading@11: for(i=0; tags && tags[i]; i++){ yading@11: const AVCodecTag *codec_tags = tags[i]; yading@11: while (codec_tags->id != AV_CODEC_ID_NONE) { yading@11: if (codec_tags->id == id) { yading@11: *tag = codec_tags->tag; yading@11: return 1; yading@11: } yading@11: codec_tags++; yading@11: } yading@11: } yading@11: return 0; yading@11: } yading@11: yading@11: enum AVCodecID av_codec_get_id(const AVCodecTag * const *tags, unsigned int tag) yading@11: { yading@11: int i; yading@11: for(i=0; tags && tags[i]; i++){ yading@11: enum AVCodecID id= ff_codec_get_id(tags[i], tag); yading@11: if(id!=AV_CODEC_ID_NONE) return id; yading@11: } yading@11: return AV_CODEC_ID_NONE; yading@11: } yading@11: yading@11: static void compute_chapters_end(AVFormatContext *s) yading@11: { yading@11: unsigned int i, j; yading@11: int64_t max_time = s->duration + ((s->start_time == AV_NOPTS_VALUE) ? 0 : s->start_time); yading@11: yading@11: for (i = 0; i < s->nb_chapters; i++) yading@11: if (s->chapters[i]->end == AV_NOPTS_VALUE) { yading@11: AVChapter *ch = s->chapters[i]; yading@11: int64_t end = max_time ? av_rescale_q(max_time, AV_TIME_BASE_Q, ch->time_base) yading@11: : INT64_MAX; yading@11: yading@11: for (j = 0; j < s->nb_chapters; j++) { yading@11: AVChapter *ch1 = s->chapters[j]; yading@11: int64_t next_start = av_rescale_q(ch1->start, ch1->time_base, ch->time_base); yading@11: if (j != i && next_start > ch->start && next_start < end) yading@11: end = next_start; yading@11: } yading@11: ch->end = (end == INT64_MAX) ? ch->start : end; yading@11: } yading@11: } yading@11: yading@11: static int get_std_framerate(int i){ yading@11: if(i<60*12) return (i+1)*1001; yading@11: else return ((const int[]){24,30,60,12,15,48})[i-60*12]*1000*12; yading@11: } yading@11: yading@11: /* yading@11: * Is the time base unreliable. yading@11: * This is a heuristic to balance between quick acceptance of the values in yading@11: * the headers vs. some extra checks. yading@11: * Old DivX and Xvid often have nonsense timebases like 1fps or 2fps. yading@11: * MPEG-2 commonly misuses field repeat flags to store different framerates. yading@11: * And there are "variable" fps files this needs to detect as well. yading@11: */ yading@11: static int tb_unreliable(AVCodecContext *c){ yading@11: if( c->time_base.den >= 101L*c->time_base.num yading@11: || c->time_base.den < 5L*c->time_base.num yading@11: /* || c->codec_tag == AV_RL32("DIVX") yading@11: || c->codec_tag == AV_RL32("XVID")*/ yading@11: || c->codec_tag == AV_RL32("mp4v") yading@11: || c->codec_id == AV_CODEC_ID_MPEG2VIDEO yading@11: || c->codec_id == AV_CODEC_ID_H264 yading@11: ) yading@11: return 1; yading@11: return 0; yading@11: } yading@11: yading@11: #if FF_API_FORMAT_PARAMETERS yading@11: int av_find_stream_info(AVFormatContext *ic) yading@11: { yading@11: return avformat_find_stream_info(ic, NULL); yading@11: } yading@11: #endif yading@11: yading@11: int avformat_find_stream_info(AVFormatContext *ic, AVDictionary **options) yading@11: { yading@11: int i, count, ret, j; yading@11: int64_t read_size; yading@11: AVStream *st; yading@11: AVPacket pkt1, *pkt; yading@11: int64_t old_offset = avio_tell(ic->pb); yading@11: int orig_nb_streams = ic->nb_streams; // new streams might appear, no options for those yading@11: int flush_codecs = ic->probesize > 0; yading@11: yading@11: if(ic->pb) yading@11: av_log(ic, AV_LOG_DEBUG, "File position before avformat_find_stream_info() is %"PRId64"\n", avio_tell(ic->pb)); yading@11: yading@11: for(i=0;inb_streams;i++) { yading@11: const AVCodec *codec; yading@11: AVDictionary *thread_opt = NULL; yading@11: st = ic->streams[i]; yading@11: yading@11: if (st->codec->codec_type == AVMEDIA_TYPE_VIDEO || yading@11: st->codec->codec_type == AVMEDIA_TYPE_SUBTITLE) { yading@11: /* if(!st->time_base.num) yading@11: st->time_base= */ yading@11: if(!st->codec->time_base.num) yading@11: st->codec->time_base= st->time_base; yading@11: } yading@11: //only for the split stuff yading@11: if (!st->parser && !(ic->flags & AVFMT_FLAG_NOPARSE)) { yading@11: st->parser = av_parser_init(st->codec->codec_id); yading@11: if(st->parser){ yading@11: if(st->need_parsing == AVSTREAM_PARSE_HEADERS){ yading@11: st->parser->flags |= PARSER_FLAG_COMPLETE_FRAMES; yading@11: } else if(st->need_parsing == AVSTREAM_PARSE_FULL_RAW) { yading@11: st->parser->flags |= PARSER_FLAG_USE_CODEC_TS; yading@11: } yading@11: } else if (st->need_parsing) { yading@11: av_log(ic, AV_LOG_VERBOSE, "parser not found for codec " yading@11: "%s, packets or times may be invalid.\n", yading@11: avcodec_get_name(st->codec->codec_id)); yading@11: } yading@11: } yading@11: codec = st->codec->codec ? st->codec->codec : yading@11: avcodec_find_decoder(st->codec->codec_id); yading@11: yading@11: /* force thread count to 1 since the h264 decoder will not extract SPS yading@11: * and PPS to extradata during multi-threaded decoding */ yading@11: av_dict_set(options ? &options[i] : &thread_opt, "threads", "1", 0); yading@11: yading@11: /* Ensure that subtitle_header is properly set. */ yading@11: if (st->codec->codec_type == AVMEDIA_TYPE_SUBTITLE yading@11: && codec && !st->codec->codec) yading@11: avcodec_open2(st->codec, codec, options ? &options[i] yading@11: : &thread_opt); yading@11: yading@11: //try to just open decoders, in case this is enough to get parameters yading@11: if (!has_codec_parameters(st, NULL) && st->request_probe <= 0) { yading@11: if (codec && !st->codec->codec) yading@11: avcodec_open2(st->codec, codec, options ? &options[i] yading@11: : &thread_opt); yading@11: } yading@11: if (!options) yading@11: av_dict_free(&thread_opt); yading@11: } yading@11: yading@11: for (i=0; inb_streams; i++) { yading@11: #if FF_API_R_FRAME_RATE yading@11: ic->streams[i]->info->last_dts = AV_NOPTS_VALUE; yading@11: #endif yading@11: ic->streams[i]->info->fps_first_dts = AV_NOPTS_VALUE; yading@11: ic->streams[i]->info->fps_last_dts = AV_NOPTS_VALUE; yading@11: } yading@11: yading@11: count = 0; yading@11: read_size = 0; yading@11: for(;;) { yading@11: if (ff_check_interrupt(&ic->interrupt_callback)){ yading@11: ret= AVERROR_EXIT; yading@11: av_log(ic, AV_LOG_DEBUG, "interrupted\n"); yading@11: break; yading@11: } yading@11: yading@11: /* check if one codec still needs to be handled */ yading@11: for(i=0;inb_streams;i++) { yading@11: int fps_analyze_framecount = 20; yading@11: yading@11: st = ic->streams[i]; yading@11: if (!has_codec_parameters(st, NULL)) yading@11: break; yading@11: /* if the timebase is coarse (like the usual millisecond precision yading@11: of mkv), we need to analyze more frames to reliably arrive at yading@11: the correct fps */ yading@11: if (av_q2d(st->time_base) > 0.0005) yading@11: fps_analyze_framecount *= 2; yading@11: if (ic->fps_probe_size >= 0) yading@11: fps_analyze_framecount = ic->fps_probe_size; yading@11: if (st->disposition & AV_DISPOSITION_ATTACHED_PIC) yading@11: fps_analyze_framecount = 0; yading@11: /* variable fps and no guess at the real fps */ yading@11: if( tb_unreliable(st->codec) && !(st->r_frame_rate.num && st->avg_frame_rate.num) yading@11: && st->info->duration_count < fps_analyze_framecount yading@11: && st->codec->codec_type == AVMEDIA_TYPE_VIDEO) yading@11: break; yading@11: if(st->parser && st->parser->parser->split && !st->codec->extradata) yading@11: break; yading@11: if (st->first_dts == AV_NOPTS_VALUE && yading@11: (st->codec->codec_type == AVMEDIA_TYPE_VIDEO || yading@11: st->codec->codec_type == AVMEDIA_TYPE_AUDIO)) yading@11: break; yading@11: } yading@11: if (i == ic->nb_streams) { yading@11: /* NOTE: if the format has no header, then we need to read yading@11: some packets to get most of the streams, so we cannot yading@11: stop here */ yading@11: if (!(ic->ctx_flags & AVFMTCTX_NOHEADER)) { yading@11: /* if we found the info for all the codecs, we can stop */ yading@11: ret = count; yading@11: av_log(ic, AV_LOG_DEBUG, "All info found\n"); yading@11: flush_codecs = 0; yading@11: break; yading@11: } yading@11: } yading@11: /* we did not get all the codec info, but we read too much data */ yading@11: if (read_size >= ic->probesize) { yading@11: ret = count; yading@11: av_log(ic, AV_LOG_DEBUG, "Probe buffer size limit of %d bytes reached\n", ic->probesize); yading@11: for (i = 0; i < ic->nb_streams; i++) yading@11: if (!ic->streams[i]->r_frame_rate.num && yading@11: ic->streams[i]->info->duration_count <= 1) yading@11: av_log(ic, AV_LOG_WARNING, yading@11: "Stream #%d: not enough frames to estimate rate; " yading@11: "consider increasing probesize\n", i); yading@11: break; yading@11: } yading@11: yading@11: /* NOTE: a new stream can be added there if no header in file yading@11: (AVFMTCTX_NOHEADER) */ yading@11: ret = read_frame_internal(ic, &pkt1); yading@11: if (ret == AVERROR(EAGAIN)) yading@11: continue; yading@11: yading@11: if (ret < 0) { yading@11: /* EOF or error*/ yading@11: break; yading@11: } yading@11: yading@11: if (ic->flags & AVFMT_FLAG_NOBUFFER) { yading@11: pkt = &pkt1; yading@11: } else { yading@11: pkt = add_to_pktbuf(&ic->packet_buffer, &pkt1, yading@11: &ic->packet_buffer_end); yading@11: if ((ret = av_dup_packet(pkt)) < 0) yading@11: goto find_stream_info_err; yading@11: } yading@11: yading@11: read_size += pkt->size; yading@11: yading@11: st = ic->streams[pkt->stream_index]; yading@11: if (pkt->dts != AV_NOPTS_VALUE && st->codec_info_nb_frames > 1) { yading@11: /* check for non-increasing dts */ yading@11: if (st->info->fps_last_dts != AV_NOPTS_VALUE && yading@11: st->info->fps_last_dts >= pkt->dts) { yading@11: av_log(ic, AV_LOG_DEBUG, "Non-increasing DTS in stream %d: " yading@11: "packet %d with DTS %"PRId64", packet %d with DTS " yading@11: "%"PRId64"\n", st->index, st->info->fps_last_dts_idx, yading@11: st->info->fps_last_dts, st->codec_info_nb_frames, pkt->dts); yading@11: st->info->fps_first_dts = st->info->fps_last_dts = AV_NOPTS_VALUE; yading@11: } yading@11: /* check for a discontinuity in dts - if the difference in dts yading@11: * is more than 1000 times the average packet duration in the sequence, yading@11: * we treat it as a discontinuity */ yading@11: if (st->info->fps_last_dts != AV_NOPTS_VALUE && yading@11: st->info->fps_last_dts_idx > st->info->fps_first_dts_idx && yading@11: (pkt->dts - st->info->fps_last_dts) / 1000 > yading@11: (st->info->fps_last_dts - st->info->fps_first_dts) / (st->info->fps_last_dts_idx - st->info->fps_first_dts_idx)) { yading@11: av_log(ic, AV_LOG_WARNING, "DTS discontinuity in stream %d: " yading@11: "packet %d with DTS %"PRId64", packet %d with DTS " yading@11: "%"PRId64"\n", st->index, st->info->fps_last_dts_idx, yading@11: st->info->fps_last_dts, st->codec_info_nb_frames, pkt->dts); yading@11: st->info->fps_first_dts = st->info->fps_last_dts = AV_NOPTS_VALUE; yading@11: } yading@11: yading@11: /* update stored dts values */ yading@11: if (st->info->fps_first_dts == AV_NOPTS_VALUE) { yading@11: st->info->fps_first_dts = pkt->dts; yading@11: st->info->fps_first_dts_idx = st->codec_info_nb_frames; yading@11: } yading@11: st->info->fps_last_dts = pkt->dts; yading@11: st->info->fps_last_dts_idx = st->codec_info_nb_frames; yading@11: } yading@11: if (st->codec_info_nb_frames>1) { yading@11: int64_t t=0; yading@11: if (st->time_base.den > 0) yading@11: t = av_rescale_q(st->info->codec_info_duration, st->time_base, AV_TIME_BASE_Q); yading@11: if (st->avg_frame_rate.num > 0) yading@11: t = FFMAX(t, av_rescale_q(st->codec_info_nb_frames, av_inv_q(st->avg_frame_rate), AV_TIME_BASE_Q)); yading@11: yading@11: if (t >= ic->max_analyze_duration) { yading@11: av_log(ic, AV_LOG_WARNING, "max_analyze_duration %d reached at %"PRId64" microseconds\n", ic->max_analyze_duration, t); yading@11: break; yading@11: } yading@11: if (pkt->duration) { yading@11: st->info->codec_info_duration += pkt->duration; yading@11: st->info->codec_info_duration_fields += st->parser && st->codec->ticks_per_frame==2 ? st->parser->repeat_pict + 1 : 2; yading@11: } yading@11: } yading@11: #if FF_API_R_FRAME_RATE yading@11: { yading@11: int64_t last = st->info->last_dts; yading@11: yading@11: if( pkt->dts != AV_NOPTS_VALUE && last != AV_NOPTS_VALUE && pkt->dts > last yading@11: && pkt->dts - (uint64_t)last < INT64_MAX){ yading@11: double dts= (is_relative(pkt->dts) ? pkt->dts - RELATIVE_TS_BASE : pkt->dts) * av_q2d(st->time_base); yading@11: int64_t duration= pkt->dts - last; yading@11: yading@11: if (!st->info->duration_error) yading@11: st->info->duration_error = av_mallocz(sizeof(st->info->duration_error[0])*2); yading@11: yading@11: // if(st->codec->codec_type == AVMEDIA_TYPE_VIDEO) yading@11: // av_log(NULL, AV_LOG_ERROR, "%f\n", dts); yading@11: for (i=0; iinfo->duration_error[j][0][i] += error; yading@11: st->info->duration_error[j][1][i] += error*error; yading@11: } yading@11: } yading@11: st->info->duration_count++; yading@11: // ignore the first 4 values, they might have some random jitter yading@11: if (st->info->duration_count > 3 && is_relative(pkt->dts) == is_relative(last)) yading@11: st->info->duration_gcd = av_gcd(st->info->duration_gcd, duration); yading@11: } yading@11: if (pkt->dts != AV_NOPTS_VALUE) yading@11: st->info->last_dts = pkt->dts; yading@11: } yading@11: #endif yading@11: if(st->parser && st->parser->parser->split && !st->codec->extradata){ yading@11: int i= st->parser->parser->split(st->codec, pkt->data, pkt->size); yading@11: if (i > 0 && i < FF_MAX_EXTRADATA_SIZE) { yading@11: st->codec->extradata_size= i; yading@11: st->codec->extradata= av_malloc(st->codec->extradata_size + FF_INPUT_BUFFER_PADDING_SIZE); yading@11: if (!st->codec->extradata) yading@11: return AVERROR(ENOMEM); yading@11: memcpy(st->codec->extradata, pkt->data, st->codec->extradata_size); yading@11: memset(st->codec->extradata + i, 0, FF_INPUT_BUFFER_PADDING_SIZE); yading@11: } yading@11: } yading@11: yading@11: /* if still no information, we try to open the codec and to yading@11: decompress the frame. We try to avoid that in most cases as yading@11: it takes longer and uses more memory. For MPEG-4, we need to yading@11: decompress for QuickTime. yading@11: yading@11: If CODEC_CAP_CHANNEL_CONF is set this will force decoding of at yading@11: least one frame of codec data, this makes sure the codec initializes yading@11: the channel configuration and does not only trust the values from the container. yading@11: */ yading@11: try_decode_frame(st, pkt, (options && i < orig_nb_streams ) ? &options[i] : NULL); yading@11: yading@11: st->codec_info_nb_frames++; yading@11: count++; yading@11: } yading@11: yading@11: if (flush_codecs) { yading@11: AVPacket empty_pkt = { 0 }; yading@11: int err = 0; yading@11: av_init_packet(&empty_pkt); yading@11: yading@11: ret = -1; /* we could not have all the codec parameters before EOF */ yading@11: for(i=0;inb_streams;i++) { yading@11: const char *errmsg; yading@11: yading@11: st = ic->streams[i]; yading@11: yading@11: /* flush the decoders */ yading@11: if (st->info->found_decoder == 1) { yading@11: do { yading@11: err = try_decode_frame(st, &empty_pkt, yading@11: (options && i < orig_nb_streams) ? yading@11: &options[i] : NULL); yading@11: } while (err > 0 && !has_codec_parameters(st, NULL)); yading@11: yading@11: if (err < 0) { yading@11: av_log(ic, AV_LOG_INFO, yading@11: "decoding for stream %d failed\n", st->index); yading@11: } yading@11: } yading@11: yading@11: if (!has_codec_parameters(st, &errmsg)) { yading@11: char buf[256]; yading@11: avcodec_string(buf, sizeof(buf), st->codec, 0); yading@11: av_log(ic, AV_LOG_WARNING, yading@11: "Could not find codec parameters for stream %d (%s): %s\n" yading@11: "Consider increasing the value for the 'analyzeduration' and 'probesize' options\n", yading@11: i, buf, errmsg); yading@11: } else { yading@11: ret = 0; yading@11: } yading@11: } yading@11: } yading@11: yading@11: // close codecs which were opened in try_decode_frame() yading@11: for(i=0;inb_streams;i++) { yading@11: st = ic->streams[i]; yading@11: avcodec_close(st->codec); yading@11: } yading@11: for(i=0;inb_streams;i++) { yading@11: st = ic->streams[i]; yading@11: if (st->codec->codec_type == AVMEDIA_TYPE_VIDEO) { yading@11: if(st->codec->codec_id == AV_CODEC_ID_RAWVIDEO && !st->codec->codec_tag && !st->codec->bits_per_coded_sample){ yading@11: uint32_t tag= avcodec_pix_fmt_to_codec_tag(st->codec->pix_fmt); yading@11: if (avpriv_find_pix_fmt(ff_raw_pix_fmt_tags, tag) == st->codec->pix_fmt) yading@11: st->codec->codec_tag= tag; yading@11: } yading@11: yading@11: /* estimate average framerate if not set by demuxer */ yading@11: if (st->info->codec_info_duration_fields && !st->avg_frame_rate.num && st->info->codec_info_duration) { yading@11: int best_fps = 0; yading@11: double best_error = 0.01; yading@11: yading@11: av_reduce(&st->avg_frame_rate.num, &st->avg_frame_rate.den, yading@11: st->info->codec_info_duration_fields*(int64_t)st->time_base.den, yading@11: st->info->codec_info_duration*2*(int64_t)st->time_base.num, 60000); yading@11: yading@11: /* round guessed framerate to a "standard" framerate if it's yading@11: * within 1% of the original estimate*/ yading@11: for (j = 1; j < MAX_STD_TIMEBASES; j++) { yading@11: AVRational std_fps = { get_std_framerate(j), 12*1001 }; yading@11: double error = fabs(av_q2d(st->avg_frame_rate) / av_q2d(std_fps) - 1); yading@11: yading@11: if (error < best_error) { yading@11: best_error = error; yading@11: best_fps = std_fps.num; yading@11: } yading@11: } yading@11: if (best_fps) { yading@11: av_reduce(&st->avg_frame_rate.num, &st->avg_frame_rate.den, yading@11: best_fps, 12*1001, INT_MAX); yading@11: } yading@11: } yading@11: // the check for tb_unreliable() is not completely correct, since this is not about handling yading@11: // a unreliable/inexact time base, but a time base that is finer than necessary, as e.g. yading@11: // ipmovie.c produces. yading@11: if (tb_unreliable(st->codec) && st->info->duration_count > 15 && st->info->duration_gcd > FFMAX(1, st->time_base.den/(500LL*st->time_base.num)) && !st->r_frame_rate.num) yading@11: av_reduce(&st->r_frame_rate.num, &st->r_frame_rate.den, st->time_base.den, st->time_base.num * st->info->duration_gcd, INT_MAX); yading@11: if (st->info->duration_count>1 && !st->r_frame_rate.num yading@11: && tb_unreliable(st->codec)) { yading@11: int num = 0; yading@11: double best_error= 0.01; yading@11: yading@11: for (j=0; jinfo->codec_info_duration && st->info->codec_info_duration*av_q2d(st->time_base) < (1001*12.0)/get_std_framerate(j)) yading@11: continue; yading@11: if(!st->info->codec_info_duration && 1.0 < (1001*12.0)/get_std_framerate(j)) yading@11: continue; yading@11: for(k=0; k<2; k++){ yading@11: int n= st->info->duration_count; yading@11: double a= st->info->duration_error[k][0][j] / n; yading@11: double error= st->info->duration_error[k][1][j]/n - a*a; yading@11: yading@11: if(error < best_error && best_error> 0.000000001){ yading@11: best_error= error; yading@11: num = get_std_framerate(j); yading@11: } yading@11: if(error < 0.02) yading@11: av_log(NULL, AV_LOG_DEBUG, "rfps: %f %f\n", get_std_framerate(j) / 12.0/1001, error); yading@11: } yading@11: } yading@11: // do not increase frame rate by more than 1 % in order to match a standard rate. yading@11: if (num && (!st->r_frame_rate.num || (double)num/(12*1001) < 1.01 * av_q2d(st->r_frame_rate))) yading@11: av_reduce(&st->r_frame_rate.num, &st->r_frame_rate.den, num, 12*1001, INT_MAX); yading@11: } yading@11: yading@11: if (!st->r_frame_rate.num){ yading@11: if( st->codec->time_base.den * (int64_t)st->time_base.num yading@11: <= st->codec->time_base.num * st->codec->ticks_per_frame * (int64_t)st->time_base.den){ yading@11: st->r_frame_rate.num = st->codec->time_base.den; yading@11: st->r_frame_rate.den = st->codec->time_base.num * st->codec->ticks_per_frame; yading@11: }else{ yading@11: st->r_frame_rate.num = st->time_base.den; yading@11: st->r_frame_rate.den = st->time_base.num; yading@11: } yading@11: } yading@11: }else if(st->codec->codec_type == AVMEDIA_TYPE_AUDIO) { yading@11: if(!st->codec->bits_per_coded_sample) yading@11: st->codec->bits_per_coded_sample= av_get_bits_per_sample(st->codec->codec_id); yading@11: // set stream disposition based on audio service type yading@11: switch (st->codec->audio_service_type) { yading@11: case AV_AUDIO_SERVICE_TYPE_EFFECTS: yading@11: st->disposition = AV_DISPOSITION_CLEAN_EFFECTS; break; yading@11: case AV_AUDIO_SERVICE_TYPE_VISUALLY_IMPAIRED: yading@11: st->disposition = AV_DISPOSITION_VISUAL_IMPAIRED; break; yading@11: case AV_AUDIO_SERVICE_TYPE_HEARING_IMPAIRED: yading@11: st->disposition = AV_DISPOSITION_HEARING_IMPAIRED; break; yading@11: case AV_AUDIO_SERVICE_TYPE_COMMENTARY: yading@11: st->disposition = AV_DISPOSITION_COMMENT; break; yading@11: case AV_AUDIO_SERVICE_TYPE_KARAOKE: yading@11: st->disposition = AV_DISPOSITION_KARAOKE; break; yading@11: } yading@11: } yading@11: } yading@11: yading@11: if(ic->probesize) yading@11: estimate_timings(ic, old_offset); yading@11: yading@11: compute_chapters_end(ic); yading@11: yading@11: find_stream_info_err: yading@11: for (i=0; i < ic->nb_streams; i++) { yading@11: st = ic->streams[i]; yading@11: if (ic->streams[i]->codec) yading@11: ic->streams[i]->codec->thread_count = 0; yading@11: if (st->info) yading@11: av_freep(&st->info->duration_error); yading@11: av_freep(&ic->streams[i]->info); yading@11: } yading@11: if(ic->pb) yading@11: av_log(ic, AV_LOG_DEBUG, "File position after avformat_find_stream_info() is %"PRId64"\n", avio_tell(ic->pb)); yading@11: return ret; yading@11: } yading@11: yading@11: AVProgram *av_find_program_from_stream(AVFormatContext *ic, AVProgram *last, int s) yading@11: { yading@11: int i, j; yading@11: yading@11: for (i = 0; i < ic->nb_programs; i++) { yading@11: if (ic->programs[i] == last) { yading@11: last = NULL; yading@11: } else { yading@11: if (!last) yading@11: for (j = 0; j < ic->programs[i]->nb_stream_indexes; j++) yading@11: if (ic->programs[i]->stream_index[j] == s) yading@11: return ic->programs[i]; yading@11: } yading@11: } yading@11: return NULL; yading@11: } yading@11: yading@11: int av_find_best_stream(AVFormatContext *ic, yading@11: enum AVMediaType type, yading@11: int wanted_stream_nb, yading@11: int related_stream, yading@11: AVCodec **decoder_ret, yading@11: int flags) yading@11: { yading@11: int i, nb_streams = ic->nb_streams; yading@11: int ret = AVERROR_STREAM_NOT_FOUND, best_count = -1, best_bitrate = -1, best_multiframe = -1, count, bitrate, multiframe; yading@11: unsigned *program = NULL; yading@11: AVCodec *decoder = NULL, *best_decoder = NULL; yading@11: yading@11: if (related_stream >= 0 && wanted_stream_nb < 0) { yading@11: AVProgram *p = av_find_program_from_stream(ic, NULL, related_stream); yading@11: if (p) { yading@11: program = p->stream_index; yading@11: nb_streams = p->nb_stream_indexes; yading@11: } yading@11: } yading@11: for (i = 0; i < nb_streams; i++) { yading@11: int real_stream_index = program ? program[i] : i; yading@11: AVStream *st = ic->streams[real_stream_index]; yading@11: AVCodecContext *avctx = st->codec; yading@11: if (avctx->codec_type != type) yading@11: continue; yading@11: if (wanted_stream_nb >= 0 && real_stream_index != wanted_stream_nb) yading@11: continue; yading@11: if (st->disposition & (AV_DISPOSITION_HEARING_IMPAIRED|AV_DISPOSITION_VISUAL_IMPAIRED)) yading@11: continue; yading@11: if (decoder_ret) { yading@11: decoder = avcodec_find_decoder(st->codec->codec_id); yading@11: if (!decoder) { yading@11: if (ret < 0) yading@11: ret = AVERROR_DECODER_NOT_FOUND; yading@11: continue; yading@11: } yading@11: } yading@11: count = st->codec_info_nb_frames; yading@11: bitrate = avctx->bit_rate; yading@11: multiframe = FFMIN(5, count); yading@11: if ((best_multiframe > multiframe) || yading@11: (best_multiframe == multiframe && best_bitrate > bitrate) || yading@11: (best_multiframe == multiframe && best_bitrate == bitrate && best_count >= count)) yading@11: continue; yading@11: best_count = count; yading@11: best_bitrate = bitrate; yading@11: best_multiframe = multiframe; yading@11: ret = real_stream_index; yading@11: best_decoder = decoder; yading@11: if (program && i == nb_streams - 1 && ret < 0) { yading@11: program = NULL; yading@11: nb_streams = ic->nb_streams; yading@11: i = 0; /* no related stream found, try again with everything */ yading@11: } yading@11: } yading@11: if (decoder_ret) yading@11: *decoder_ret = best_decoder; yading@11: return ret; yading@11: } yading@11: yading@11: /*******************************************************/ yading@11: yading@11: int av_read_play(AVFormatContext *s) yading@11: { yading@11: if (s->iformat->read_play) yading@11: return s->iformat->read_play(s); yading@11: if (s->pb) yading@11: return avio_pause(s->pb, 0); yading@11: return AVERROR(ENOSYS); yading@11: } yading@11: yading@11: int av_read_pause(AVFormatContext *s) yading@11: { yading@11: if (s->iformat->read_pause) yading@11: return s->iformat->read_pause(s); yading@11: if (s->pb) yading@11: return avio_pause(s->pb, 1); yading@11: return AVERROR(ENOSYS); yading@11: } yading@11: yading@11: void ff_free_stream(AVFormatContext *s, AVStream *st){ yading@11: av_assert0(s->nb_streams>0); yading@11: av_assert0(s->streams[ s->nb_streams-1 ] == st); yading@11: yading@11: if (st->parser) { yading@11: av_parser_close(st->parser); yading@11: } yading@11: if (st->attached_pic.data) yading@11: av_free_packet(&st->attached_pic); yading@11: av_dict_free(&st->metadata); yading@11: av_freep(&st->probe_data.buf); yading@11: av_freep(&st->index_entries); yading@11: av_freep(&st->codec->extradata); yading@11: av_freep(&st->codec->subtitle_header); yading@11: av_freep(&st->codec); yading@11: av_freep(&st->priv_data); yading@11: if (st->info) yading@11: av_freep(&st->info->duration_error); yading@11: av_freep(&st->info); yading@11: av_freep(&s->streams[ --s->nb_streams ]); yading@11: } yading@11: yading@11: void avformat_free_context(AVFormatContext *s) yading@11: { yading@11: int i; yading@11: yading@11: if (!s) yading@11: return; yading@11: yading@11: av_opt_free(s); yading@11: if (s->iformat && s->iformat->priv_class && s->priv_data) yading@11: av_opt_free(s->priv_data); yading@11: yading@11: for(i=s->nb_streams-1; i>=0; i--) { yading@11: ff_free_stream(s, s->streams[i]); yading@11: } yading@11: for(i=s->nb_programs-1; i>=0; i--) { yading@11: av_dict_free(&s->programs[i]->metadata); yading@11: av_freep(&s->programs[i]->stream_index); yading@11: av_freep(&s->programs[i]); yading@11: } yading@11: av_freep(&s->programs); yading@11: av_freep(&s->priv_data); yading@11: while(s->nb_chapters--) { yading@11: av_dict_free(&s->chapters[s->nb_chapters]->metadata); yading@11: av_freep(&s->chapters[s->nb_chapters]); yading@11: } yading@11: av_freep(&s->chapters); yading@11: av_dict_free(&s->metadata); yading@11: av_freep(&s->streams); yading@11: av_free(s); yading@11: } yading@11: yading@11: #if FF_API_CLOSE_INPUT_FILE yading@11: void av_close_input_file(AVFormatContext *s) yading@11: { yading@11: avformat_close_input(&s); yading@11: } yading@11: #endif yading@11: yading@11: void avformat_close_input(AVFormatContext **ps) yading@11: { yading@11: AVFormatContext *s = *ps; yading@11: AVIOContext *pb = s->pb; yading@11: yading@11: if ((s->iformat && s->iformat->flags & AVFMT_NOFILE) || yading@11: (s->flags & AVFMT_FLAG_CUSTOM_IO)) yading@11: pb = NULL; yading@11: yading@11: flush_packet_queue(s); yading@11: yading@11: if (s->iformat) { yading@11: if (s->iformat->read_close) yading@11: s->iformat->read_close(s); yading@11: } yading@11: yading@11: avformat_free_context(s); yading@11: yading@11: *ps = NULL; yading@11: yading@11: avio_close(pb); yading@11: } yading@11: yading@11: #if FF_API_NEW_STREAM yading@11: AVStream *av_new_stream(AVFormatContext *s, int id) yading@11: { yading@11: AVStream *st = avformat_new_stream(s, NULL); yading@11: if (st) yading@11: st->id = id; yading@11: return st; yading@11: } yading@11: #endif yading@11: yading@11: AVStream *avformat_new_stream(AVFormatContext *s, const AVCodec *c) yading@11: { yading@11: AVStream *st; yading@11: int i; yading@11: AVStream **streams; yading@11: yading@11: if (s->nb_streams >= INT_MAX/sizeof(*streams)) yading@11: return NULL; yading@11: streams = av_realloc(s->streams, (s->nb_streams + 1) * sizeof(*streams)); yading@11: if (!streams) yading@11: return NULL; yading@11: s->streams = streams; yading@11: yading@11: st = av_mallocz(sizeof(AVStream)); yading@11: if (!st) yading@11: return NULL; yading@11: if (!(st->info = av_mallocz(sizeof(*st->info)))) { yading@11: av_free(st); yading@11: return NULL; yading@11: } yading@11: st->info->last_dts = AV_NOPTS_VALUE; yading@11: yading@11: st->codec = avcodec_alloc_context3(c); yading@11: if (s->iformat) { yading@11: /* no default bitrate if decoding */ yading@11: st->codec->bit_rate = 0; yading@11: } yading@11: st->index = s->nb_streams; yading@11: st->start_time = AV_NOPTS_VALUE; yading@11: st->duration = AV_NOPTS_VALUE; yading@11: /* we set the current DTS to 0 so that formats without any timestamps yading@11: but durations get some timestamps, formats with some unknown yading@11: timestamps have their first few packets buffered and the yading@11: timestamps corrected before they are returned to the user */ yading@11: st->cur_dts = s->iformat ? RELATIVE_TS_BASE : 0; yading@11: st->first_dts = AV_NOPTS_VALUE; yading@11: st->probe_packets = MAX_PROBE_PACKETS; yading@11: st->pts_wrap_reference = AV_NOPTS_VALUE; yading@11: st->pts_wrap_behavior = AV_PTS_WRAP_IGNORE; yading@11: yading@11: /* default pts setting is MPEG-like */ yading@11: avpriv_set_pts_info(st, 33, 1, 90000); yading@11: st->last_IP_pts = AV_NOPTS_VALUE; yading@11: for(i=0; ipts_buffer[i]= AV_NOPTS_VALUE; yading@11: st->reference_dts = AV_NOPTS_VALUE; yading@11: yading@11: st->sample_aspect_ratio = (AVRational){0,1}; yading@11: yading@11: #if FF_API_R_FRAME_RATE yading@11: st->info->last_dts = AV_NOPTS_VALUE; yading@11: #endif yading@11: st->info->fps_first_dts = AV_NOPTS_VALUE; yading@11: st->info->fps_last_dts = AV_NOPTS_VALUE; yading@11: yading@11: s->streams[s->nb_streams++] = st; yading@11: return st; yading@11: } yading@11: yading@11: AVProgram *av_new_program(AVFormatContext *ac, int id) yading@11: { yading@11: AVProgram *program=NULL; yading@11: int i; yading@11: yading@11: av_dlog(ac, "new_program: id=0x%04x\n", id); yading@11: yading@11: for(i=0; inb_programs; i++) yading@11: if(ac->programs[i]->id == id) yading@11: program = ac->programs[i]; yading@11: yading@11: if(!program){ yading@11: program = av_mallocz(sizeof(AVProgram)); yading@11: if (!program) yading@11: return NULL; yading@11: dynarray_add(&ac->programs, &ac->nb_programs, program); yading@11: program->discard = AVDISCARD_NONE; yading@11: } yading@11: program->id = id; yading@11: program->pts_wrap_reference = AV_NOPTS_VALUE; yading@11: program->pts_wrap_behavior = AV_PTS_WRAP_IGNORE; yading@11: yading@11: program->start_time = yading@11: program->end_time = AV_NOPTS_VALUE; yading@11: yading@11: return program; yading@11: } yading@11: yading@11: AVChapter *avpriv_new_chapter(AVFormatContext *s, int id, AVRational time_base, int64_t start, int64_t end, const char *title) yading@11: { yading@11: AVChapter *chapter = NULL; yading@11: int i; yading@11: yading@11: for(i=0; inb_chapters; i++) yading@11: if(s->chapters[i]->id == id) yading@11: chapter = s->chapters[i]; yading@11: yading@11: if(!chapter){ yading@11: chapter= av_mallocz(sizeof(AVChapter)); yading@11: if(!chapter) yading@11: return NULL; yading@11: dynarray_add(&s->chapters, &s->nb_chapters, chapter); yading@11: } yading@11: av_dict_set(&chapter->metadata, "title", title, 0); yading@11: chapter->id = id; yading@11: chapter->time_base= time_base; yading@11: chapter->start = start; yading@11: chapter->end = end; yading@11: yading@11: return chapter; yading@11: } yading@11: yading@11: void ff_program_add_stream_index(AVFormatContext *ac, int progid, unsigned int idx) yading@11: { yading@11: int i, j; yading@11: AVProgram *program=NULL; yading@11: void *tmp; yading@11: yading@11: if (idx >= ac->nb_streams) { yading@11: av_log(ac, AV_LOG_ERROR, "stream index %d is not valid\n", idx); yading@11: return; yading@11: } yading@11: yading@11: for(i=0; inb_programs; i++){ yading@11: if(ac->programs[i]->id != progid) yading@11: continue; yading@11: program = ac->programs[i]; yading@11: for(j=0; jnb_stream_indexes; j++) yading@11: if(program->stream_index[j] == idx) yading@11: return; yading@11: yading@11: tmp = av_realloc(program->stream_index, sizeof(unsigned int)*(program->nb_stream_indexes+1)); yading@11: if(!tmp) yading@11: return; yading@11: program->stream_index = tmp; yading@11: program->stream_index[program->nb_stream_indexes++] = idx; yading@11: return; yading@11: } yading@11: } yading@11: yading@11: static void print_fps(double d, const char *postfix){ yading@11: uint64_t v= lrintf(d*100); yading@11: if (v% 100 ) av_log(NULL, AV_LOG_INFO, ", %3.2f %s", d, postfix); yading@11: else if(v%(100*1000)) av_log(NULL, AV_LOG_INFO, ", %1.0f %s", d, postfix); yading@11: else av_log(NULL, AV_LOG_INFO, ", %1.0fk %s", d/1000, postfix); yading@11: } yading@11: yading@11: static void dump_metadata(void *ctx, AVDictionary *m, const char *indent) yading@11: { yading@11: if(m && !(av_dict_count(m) == 1 && av_dict_get(m, "language", NULL, 0))){ yading@11: AVDictionaryEntry *tag=NULL; yading@11: yading@11: av_log(ctx, AV_LOG_INFO, "%sMetadata:\n", indent); yading@11: while((tag=av_dict_get(m, "", tag, AV_DICT_IGNORE_SUFFIX))) { yading@11: if(strcmp("language", tag->key)){ yading@11: const char *p = tag->value; yading@11: av_log(ctx, AV_LOG_INFO, "%s %-16s: ", indent, tag->key); yading@11: while(*p) { yading@11: char tmp[256]; yading@11: size_t len = strcspn(p, "\x8\xa\xb\xc\xd"); yading@11: av_strlcpy(tmp, p, FFMIN(sizeof(tmp), len+1)); yading@11: av_log(ctx, AV_LOG_INFO, "%s", tmp); yading@11: p += len; yading@11: if (*p == 0xd) av_log(ctx, AV_LOG_INFO, " "); yading@11: if (*p == 0xa) av_log(ctx, AV_LOG_INFO, "\n%s %-16s: ", indent, ""); yading@11: if (*p) p++; yading@11: } yading@11: av_log(ctx, AV_LOG_INFO, "\n"); yading@11: } yading@11: } yading@11: } yading@11: } yading@11: yading@11: /* "user interface" functions */ yading@11: static void dump_stream_format(AVFormatContext *ic, int i, int index, int is_output) yading@11: { yading@11: char buf[256]; yading@11: int flags = (is_output ? ic->oformat->flags : ic->iformat->flags); yading@11: AVStream *st = ic->streams[i]; yading@11: int g = av_gcd(st->time_base.num, st->time_base.den); yading@11: AVDictionaryEntry *lang = av_dict_get(st->metadata, "language", NULL, 0); yading@11: avcodec_string(buf, sizeof(buf), st->codec, is_output); yading@11: av_log(NULL, AV_LOG_INFO, " Stream #%d:%d", index, i); yading@11: /* the pid is an important information, so we display it */ yading@11: /* XXX: add a generic system */ yading@11: if (flags & AVFMT_SHOW_IDS) yading@11: av_log(NULL, AV_LOG_INFO, "[0x%x]", st->id); yading@11: if (lang) yading@11: av_log(NULL, AV_LOG_INFO, "(%s)", lang->value); yading@11: av_log(NULL, AV_LOG_DEBUG, ", %d, %d/%d", st->codec_info_nb_frames, st->time_base.num/g, st->time_base.den/g); yading@11: av_log(NULL, AV_LOG_INFO, ": %s", buf); yading@11: if (st->sample_aspect_ratio.num && // default yading@11: av_cmp_q(st->sample_aspect_ratio, st->codec->sample_aspect_ratio)) { yading@11: AVRational display_aspect_ratio; yading@11: av_reduce(&display_aspect_ratio.num, &display_aspect_ratio.den, yading@11: st->codec->width*st->sample_aspect_ratio.num, yading@11: st->codec->height*st->sample_aspect_ratio.den, yading@11: 1024*1024); yading@11: av_log(NULL, AV_LOG_INFO, ", SAR %d:%d DAR %d:%d", yading@11: st->sample_aspect_ratio.num, st->sample_aspect_ratio.den, yading@11: display_aspect_ratio.num, display_aspect_ratio.den); yading@11: } yading@11: if(st->codec->codec_type == AVMEDIA_TYPE_VIDEO){ yading@11: if(st->avg_frame_rate.den && st->avg_frame_rate.num) yading@11: print_fps(av_q2d(st->avg_frame_rate), "fps"); yading@11: #if FF_API_R_FRAME_RATE yading@11: if(st->r_frame_rate.den && st->r_frame_rate.num) yading@11: print_fps(av_q2d(st->r_frame_rate), "tbr"); yading@11: #endif yading@11: if(st->time_base.den && st->time_base.num) yading@11: print_fps(1/av_q2d(st->time_base), "tbn"); yading@11: if(st->codec->time_base.den && st->codec->time_base.num) yading@11: print_fps(1/av_q2d(st->codec->time_base), "tbc"); yading@11: } yading@11: if (st->disposition & AV_DISPOSITION_DEFAULT) yading@11: av_log(NULL, AV_LOG_INFO, " (default)"); yading@11: if (st->disposition & AV_DISPOSITION_DUB) yading@11: av_log(NULL, AV_LOG_INFO, " (dub)"); yading@11: if (st->disposition & AV_DISPOSITION_ORIGINAL) yading@11: av_log(NULL, AV_LOG_INFO, " (original)"); yading@11: if (st->disposition & AV_DISPOSITION_COMMENT) yading@11: av_log(NULL, AV_LOG_INFO, " (comment)"); yading@11: if (st->disposition & AV_DISPOSITION_LYRICS) yading@11: av_log(NULL, AV_LOG_INFO, " (lyrics)"); yading@11: if (st->disposition & AV_DISPOSITION_KARAOKE) yading@11: av_log(NULL, AV_LOG_INFO, " (karaoke)"); yading@11: if (st->disposition & AV_DISPOSITION_FORCED) yading@11: av_log(NULL, AV_LOG_INFO, " (forced)"); yading@11: if (st->disposition & AV_DISPOSITION_HEARING_IMPAIRED) yading@11: av_log(NULL, AV_LOG_INFO, " (hearing impaired)"); yading@11: if (st->disposition & AV_DISPOSITION_VISUAL_IMPAIRED) yading@11: av_log(NULL, AV_LOG_INFO, " (visual impaired)"); yading@11: if (st->disposition & AV_DISPOSITION_CLEAN_EFFECTS) yading@11: av_log(NULL, AV_LOG_INFO, " (clean effects)"); yading@11: av_log(NULL, AV_LOG_INFO, "\n"); yading@11: dump_metadata(NULL, st->metadata, " "); yading@11: } yading@11: yading@11: void av_dump_format(AVFormatContext *ic, yading@11: int index, yading@11: const char *url, yading@11: int is_output) yading@11: { yading@11: int i; yading@11: uint8_t *printed = ic->nb_streams ? av_mallocz(ic->nb_streams) : NULL; yading@11: if (ic->nb_streams && !printed) yading@11: return; yading@11: yading@11: av_log(NULL, AV_LOG_INFO, "%s #%d, %s, %s '%s':\n", yading@11: is_output ? "Output" : "Input", yading@11: index, yading@11: is_output ? ic->oformat->name : ic->iformat->name, yading@11: is_output ? "to" : "from", url); yading@11: dump_metadata(NULL, ic->metadata, " "); yading@11: if (!is_output) { yading@11: av_log(NULL, AV_LOG_INFO, " Duration: "); yading@11: if (ic->duration != AV_NOPTS_VALUE) { yading@11: int hours, mins, secs, us; yading@11: int64_t duration = ic->duration + 5000; yading@11: secs = duration / AV_TIME_BASE; yading@11: us = duration % AV_TIME_BASE; yading@11: mins = secs / 60; yading@11: secs %= 60; yading@11: hours = mins / 60; yading@11: mins %= 60; yading@11: av_log(NULL, AV_LOG_INFO, "%02d:%02d:%02d.%02d", hours, mins, secs, yading@11: (100 * us) / AV_TIME_BASE); yading@11: } else { yading@11: av_log(NULL, AV_LOG_INFO, "N/A"); yading@11: } yading@11: if (ic->start_time != AV_NOPTS_VALUE) { yading@11: int secs, us; yading@11: av_log(NULL, AV_LOG_INFO, ", start: "); yading@11: secs = ic->start_time / AV_TIME_BASE; yading@11: us = abs(ic->start_time % AV_TIME_BASE); yading@11: av_log(NULL, AV_LOG_INFO, "%d.%06d", yading@11: secs, (int)av_rescale(us, 1000000, AV_TIME_BASE)); yading@11: } yading@11: av_log(NULL, AV_LOG_INFO, ", bitrate: "); yading@11: if (ic->bit_rate) { yading@11: av_log(NULL, AV_LOG_INFO,"%d kb/s", ic->bit_rate / 1000); yading@11: } else { yading@11: av_log(NULL, AV_LOG_INFO, "N/A"); yading@11: } yading@11: av_log(NULL, AV_LOG_INFO, "\n"); yading@11: } yading@11: for (i = 0; i < ic->nb_chapters; i++) { yading@11: AVChapter *ch = ic->chapters[i]; yading@11: av_log(NULL, AV_LOG_INFO, " Chapter #%d.%d: ", index, i); yading@11: av_log(NULL, AV_LOG_INFO, "start %f, ", ch->start * av_q2d(ch->time_base)); yading@11: av_log(NULL, AV_LOG_INFO, "end %f\n", ch->end * av_q2d(ch->time_base)); yading@11: yading@11: dump_metadata(NULL, ch->metadata, " "); yading@11: } yading@11: if(ic->nb_programs) { yading@11: int j, k, total = 0; yading@11: for(j=0; jnb_programs; j++) { yading@11: AVDictionaryEntry *name = av_dict_get(ic->programs[j]->metadata, yading@11: "name", NULL, 0); yading@11: av_log(NULL, AV_LOG_INFO, " Program %d %s\n", ic->programs[j]->id, yading@11: name ? name->value : ""); yading@11: dump_metadata(NULL, ic->programs[j]->metadata, " "); yading@11: for(k=0; kprograms[j]->nb_stream_indexes; k++) { yading@11: dump_stream_format(ic, ic->programs[j]->stream_index[k], index, is_output); yading@11: printed[ic->programs[j]->stream_index[k]] = 1; yading@11: } yading@11: total += ic->programs[j]->nb_stream_indexes; yading@11: } yading@11: if (total < ic->nb_streams) yading@11: av_log(NULL, AV_LOG_INFO, " No Program\n"); yading@11: } yading@11: for(i=0;inb_streams;i++) yading@11: if (!printed[i]) yading@11: dump_stream_format(ic, i, index, is_output); yading@11: yading@11: av_free(printed); yading@11: } yading@11: yading@11: uint64_t ff_ntp_time(void) yading@11: { yading@11: return (av_gettime() / 1000) * 1000 + NTP_OFFSET_US; yading@11: } yading@11: yading@11: int av_get_frame_filename(char *buf, int buf_size, yading@11: const char *path, int number) yading@11: { yading@11: const char *p; yading@11: char *q, buf1[20], c; yading@11: int nd, len, percentd_found; yading@11: yading@11: q = buf; yading@11: p = path; yading@11: percentd_found = 0; yading@11: for(;;) { yading@11: c = *p++; yading@11: if (c == '\0') yading@11: break; yading@11: if (c == '%') { yading@11: do { yading@11: nd = 0; yading@11: while (av_isdigit(*p)) { yading@11: nd = nd * 10 + *p++ - '0'; yading@11: } yading@11: c = *p++; yading@11: } while (av_isdigit(c)); yading@11: yading@11: switch(c) { yading@11: case '%': yading@11: goto addchar; yading@11: case 'd': yading@11: if (percentd_found) yading@11: goto fail; yading@11: percentd_found = 1; yading@11: snprintf(buf1, sizeof(buf1), "%0*d", nd, number); yading@11: len = strlen(buf1); yading@11: if ((q - buf + len) > buf_size - 1) yading@11: goto fail; yading@11: memcpy(q, buf1, len); yading@11: q += len; yading@11: break; yading@11: default: yading@11: goto fail; yading@11: } yading@11: } else { yading@11: addchar: yading@11: if ((q - buf) < buf_size - 1) yading@11: *q++ = c; yading@11: } yading@11: } yading@11: if (!percentd_found) yading@11: goto fail; yading@11: *q = '\0'; yading@11: return 0; yading@11: fail: yading@11: *q = '\0'; yading@11: return -1; yading@11: } yading@11: yading@11: static void hex_dump_internal(void *avcl, FILE *f, int level, yading@11: const uint8_t *buf, int size) yading@11: { yading@11: int len, i, j, c; yading@11: #define PRINT(...) do { if (!f) av_log(avcl, level, __VA_ARGS__); else fprintf(f, __VA_ARGS__); } while(0) yading@11: yading@11: for(i=0;i 16) yading@11: len = 16; yading@11: PRINT("%08x ", i); yading@11: for(j=0;j<16;j++) { yading@11: if (j < len) yading@11: PRINT(" %02x", buf[i+j]); yading@11: else yading@11: PRINT(" "); yading@11: } yading@11: PRINT(" "); yading@11: for(j=0;j '~') yading@11: c = '.'; yading@11: PRINT("%c", c); yading@11: } yading@11: PRINT("\n"); yading@11: } yading@11: #undef PRINT yading@11: } yading@11: yading@11: void av_hex_dump(FILE *f, const uint8_t *buf, int size) yading@11: { yading@11: hex_dump_internal(NULL, f, 0, buf, size); yading@11: } yading@11: yading@11: void av_hex_dump_log(void *avcl, int level, const uint8_t *buf, int size) yading@11: { yading@11: hex_dump_internal(avcl, NULL, level, buf, size); yading@11: } yading@11: yading@11: static void pkt_dump_internal(void *avcl, FILE *f, int level, AVPacket *pkt, int dump_payload, AVRational time_base) yading@11: { yading@11: #define PRINT(...) do { if (!f) av_log(avcl, level, __VA_ARGS__); else fprintf(f, __VA_ARGS__); } while(0) yading@11: PRINT("stream #%d:\n", pkt->stream_index); yading@11: PRINT(" keyframe=%d\n", ((pkt->flags & AV_PKT_FLAG_KEY) != 0)); yading@11: PRINT(" duration=%0.3f\n", pkt->duration * av_q2d(time_base)); yading@11: /* DTS is _always_ valid after av_read_frame() */ yading@11: PRINT(" dts="); yading@11: if (pkt->dts == AV_NOPTS_VALUE) yading@11: PRINT("N/A"); yading@11: else yading@11: PRINT("%0.3f", pkt->dts * av_q2d(time_base)); yading@11: /* PTS may not be known if B-frames are present. */ yading@11: PRINT(" pts="); yading@11: if (pkt->pts == AV_NOPTS_VALUE) yading@11: PRINT("N/A"); yading@11: else yading@11: PRINT("%0.3f", pkt->pts * av_q2d(time_base)); yading@11: PRINT("\n"); yading@11: PRINT(" size=%d\n", pkt->size); yading@11: #undef PRINT yading@11: if (dump_payload) yading@11: av_hex_dump(f, pkt->data, pkt->size); yading@11: } yading@11: yading@11: #if FF_API_PKT_DUMP yading@11: void av_pkt_dump(FILE *f, AVPacket *pkt, int dump_payload) yading@11: { yading@11: AVRational tb = { 1, AV_TIME_BASE }; yading@11: pkt_dump_internal(NULL, f, 0, pkt, dump_payload, tb); yading@11: } yading@11: #endif yading@11: yading@11: void av_pkt_dump2(FILE *f, AVPacket *pkt, int dump_payload, AVStream *st) yading@11: { yading@11: pkt_dump_internal(NULL, f, 0, pkt, dump_payload, st->time_base); yading@11: } yading@11: yading@11: #if FF_API_PKT_DUMP yading@11: void av_pkt_dump_log(void *avcl, int level, AVPacket *pkt, int dump_payload) yading@11: { yading@11: AVRational tb = { 1, AV_TIME_BASE }; yading@11: pkt_dump_internal(avcl, NULL, level, pkt, dump_payload, tb); yading@11: } yading@11: #endif yading@11: yading@11: void av_pkt_dump_log2(void *avcl, int level, AVPacket *pkt, int dump_payload, yading@11: AVStream *st) yading@11: { yading@11: pkt_dump_internal(avcl, NULL, level, pkt, dump_payload, st->time_base); yading@11: } yading@11: yading@11: void av_url_split(char *proto, int proto_size, yading@11: char *authorization, int authorization_size, yading@11: char *hostname, int hostname_size, yading@11: int *port_ptr, yading@11: char *path, int path_size, yading@11: const char *url) yading@11: { yading@11: const char *p, *ls, *ls2, *at, *at2, *col, *brk; yading@11: yading@11: if (port_ptr) *port_ptr = -1; yading@11: if (proto_size > 0) proto[0] = 0; yading@11: if (authorization_size > 0) authorization[0] = 0; yading@11: if (hostname_size > 0) hostname[0] = 0; yading@11: if (path_size > 0) path[0] = 0; yading@11: yading@11: /* parse protocol */ yading@11: if ((p = strchr(url, ':'))) { yading@11: av_strlcpy(proto, url, FFMIN(proto_size, p + 1 - url)); yading@11: p++; /* skip ':' */ yading@11: if (*p == '/') p++; yading@11: if (*p == '/') p++; yading@11: } else { yading@11: /* no protocol means plain filename */ yading@11: av_strlcpy(path, url, path_size); yading@11: return; yading@11: } yading@11: yading@11: /* separate path from hostname */ yading@11: ls = strchr(p, '/'); yading@11: ls2 = strchr(p, '?'); yading@11: if(!ls) yading@11: ls = ls2; yading@11: else if (ls && ls2) yading@11: ls = FFMIN(ls, ls2); yading@11: if(ls) yading@11: av_strlcpy(path, ls, path_size); yading@11: else yading@11: ls = &p[strlen(p)]; // XXX yading@11: yading@11: /* the rest is hostname, use that to parse auth/port */ yading@11: if (ls != p) { yading@11: /* authorization (user[:pass]@hostname) */ yading@11: at2 = p; yading@11: while ((at = strchr(p, '@')) && at < ls) { yading@11: av_strlcpy(authorization, at2, yading@11: FFMIN(authorization_size, at + 1 - at2)); yading@11: p = at + 1; /* skip '@' */ yading@11: } yading@11: yading@11: if (*p == '[' && (brk = strchr(p, ']')) && brk < ls) { yading@11: /* [host]:port */ yading@11: av_strlcpy(hostname, p + 1, yading@11: FFMIN(hostname_size, brk - p)); yading@11: if (brk[1] == ':' && port_ptr) yading@11: *port_ptr = atoi(brk + 2); yading@11: } else if ((col = strchr(p, ':')) && col < ls) { yading@11: av_strlcpy(hostname, p, yading@11: FFMIN(col + 1 - p, hostname_size)); yading@11: if (port_ptr) *port_ptr = atoi(col + 1); yading@11: } else yading@11: av_strlcpy(hostname, p, yading@11: FFMIN(ls + 1 - p, hostname_size)); yading@11: } yading@11: } yading@11: yading@11: char *ff_data_to_hex(char *buff, const uint8_t *src, int s, int lowercase) yading@11: { yading@11: int i; yading@11: static const char hex_table_uc[16] = { '0', '1', '2', '3', yading@11: '4', '5', '6', '7', yading@11: '8', '9', 'A', 'B', yading@11: 'C', 'D', 'E', 'F' }; yading@11: static const char hex_table_lc[16] = { '0', '1', '2', '3', yading@11: '4', '5', '6', '7', yading@11: '8', '9', 'a', 'b', yading@11: 'c', 'd', 'e', 'f' }; yading@11: const char *hex_table = lowercase ? hex_table_lc : hex_table_uc; yading@11: yading@11: for(i = 0; i < s; i++) { yading@11: buff[i * 2] = hex_table[src[i] >> 4]; yading@11: buff[i * 2 + 1] = hex_table[src[i] & 0xF]; yading@11: } yading@11: yading@11: return buff; yading@11: } yading@11: yading@11: int ff_hex_to_data(uint8_t *data, const char *p) yading@11: { yading@11: int c, len, v; yading@11: yading@11: len = 0; yading@11: v = 1; yading@11: for (;;) { yading@11: p += strspn(p, SPACE_CHARS); yading@11: if (*p == '\0') yading@11: break; yading@11: c = av_toupper((unsigned char) *p++); yading@11: if (c >= '0' && c <= '9') yading@11: c = c - '0'; yading@11: else if (c >= 'A' && c <= 'F') yading@11: c = c - 'A' + 10; yading@11: else yading@11: break; yading@11: v = (v << 4) | c; yading@11: if (v & 0x100) { yading@11: if (data) yading@11: data[len] = v; yading@11: len++; yading@11: v = 1; yading@11: } yading@11: } yading@11: return len; yading@11: } yading@11: yading@11: #if FF_API_SET_PTS_INFO yading@11: void av_set_pts_info(AVStream *s, int pts_wrap_bits, yading@11: unsigned int pts_num, unsigned int pts_den) yading@11: { yading@11: avpriv_set_pts_info(s, pts_wrap_bits, pts_num, pts_den); yading@11: } yading@11: #endif yading@11: yading@11: void avpriv_set_pts_info(AVStream *s, int pts_wrap_bits, yading@11: unsigned int pts_num, unsigned int pts_den) yading@11: { yading@11: AVRational new_tb; yading@11: if(av_reduce(&new_tb.num, &new_tb.den, pts_num, pts_den, INT_MAX)){ yading@11: if(new_tb.num != pts_num) yading@11: av_log(NULL, AV_LOG_DEBUG, "st:%d removing common factor %d from timebase\n", s->index, pts_num/new_tb.num); yading@11: }else yading@11: av_log(NULL, AV_LOG_WARNING, "st:%d has too large timebase, reducing\n", s->index); yading@11: yading@11: if(new_tb.num <= 0 || new_tb.den <= 0) { yading@11: av_log(NULL, AV_LOG_ERROR, "Ignoring attempt to set invalid timebase %d/%d for st:%d\n", new_tb.num, new_tb.den, s->index); yading@11: return; yading@11: } yading@11: s->time_base = new_tb; yading@11: av_codec_set_pkt_timebase(s->codec, new_tb); yading@11: s->pts_wrap_bits = pts_wrap_bits; yading@11: } yading@11: yading@11: int ff_url_join(char *str, int size, const char *proto, yading@11: const char *authorization, const char *hostname, yading@11: int port, const char *fmt, ...) yading@11: { yading@11: #if CONFIG_NETWORK yading@11: struct addrinfo hints = { 0 }, *ai; yading@11: #endif yading@11: yading@11: str[0] = '\0'; yading@11: if (proto) yading@11: av_strlcatf(str, size, "%s://", proto); yading@11: if (authorization && authorization[0]) yading@11: av_strlcatf(str, size, "%s@", authorization); yading@11: #if CONFIG_NETWORK && defined(AF_INET6) yading@11: /* Determine if hostname is a numerical IPv6 address, yading@11: * properly escape it within [] in that case. */ yading@11: hints.ai_flags = AI_NUMERICHOST; yading@11: if (!getaddrinfo(hostname, NULL, &hints, &ai)) { yading@11: if (ai->ai_family == AF_INET6) { yading@11: av_strlcat(str, "[", size); yading@11: av_strlcat(str, hostname, size); yading@11: av_strlcat(str, "]", size); yading@11: } else { yading@11: av_strlcat(str, hostname, size); yading@11: } yading@11: freeaddrinfo(ai); yading@11: } else yading@11: #endif yading@11: /* Not an IPv6 address, just output the plain string. */ yading@11: av_strlcat(str, hostname, size); yading@11: yading@11: if (port >= 0) yading@11: av_strlcatf(str, size, ":%d", port); yading@11: if (fmt) { yading@11: va_list vl; yading@11: int len = strlen(str); yading@11: yading@11: va_start(vl, fmt); yading@11: vsnprintf(str + len, size > len ? size - len : 0, fmt, vl); yading@11: va_end(vl); yading@11: } yading@11: return strlen(str); yading@11: } yading@11: yading@11: int ff_write_chained(AVFormatContext *dst, int dst_stream, AVPacket *pkt, yading@11: AVFormatContext *src) yading@11: { yading@11: AVPacket local_pkt; yading@11: yading@11: local_pkt = *pkt; yading@11: local_pkt.stream_index = dst_stream; yading@11: if (pkt->pts != AV_NOPTS_VALUE) yading@11: local_pkt.pts = av_rescale_q(pkt->pts, yading@11: src->streams[pkt->stream_index]->time_base, yading@11: dst->streams[dst_stream]->time_base); yading@11: if (pkt->dts != AV_NOPTS_VALUE) yading@11: local_pkt.dts = av_rescale_q(pkt->dts, yading@11: src->streams[pkt->stream_index]->time_base, yading@11: dst->streams[dst_stream]->time_base); yading@11: if (pkt->duration) yading@11: local_pkt.duration = av_rescale_q(pkt->duration, yading@11: src->streams[pkt->stream_index]->time_base, yading@11: dst->streams[dst_stream]->time_base); yading@11: return av_write_frame(dst, &local_pkt); yading@11: } yading@11: yading@11: void ff_parse_key_value(const char *str, ff_parse_key_val_cb callback_get_buf, yading@11: void *context) yading@11: { yading@11: const char *ptr = str; yading@11: yading@11: /* Parse key=value pairs. */ yading@11: for (;;) { yading@11: const char *key; yading@11: char *dest = NULL, *dest_end; yading@11: int key_len, dest_len = 0; yading@11: yading@11: /* Skip whitespace and potential commas. */ yading@11: while (*ptr && (av_isspace(*ptr) || *ptr == ',')) yading@11: ptr++; yading@11: if (!*ptr) yading@11: break; yading@11: yading@11: key = ptr; yading@11: yading@11: if (!(ptr = strchr(key, '='))) yading@11: break; yading@11: ptr++; yading@11: key_len = ptr - key; yading@11: yading@11: callback_get_buf(context, key, key_len, &dest, &dest_len); yading@11: dest_end = dest + dest_len - 1; yading@11: yading@11: if (*ptr == '\"') { yading@11: ptr++; yading@11: while (*ptr && *ptr != '\"') { yading@11: if (*ptr == '\\') { yading@11: if (!ptr[1]) yading@11: break; yading@11: if (dest && dest < dest_end) yading@11: *dest++ = ptr[1]; yading@11: ptr += 2; yading@11: } else { yading@11: if (dest && dest < dest_end) yading@11: *dest++ = *ptr; yading@11: ptr++; yading@11: } yading@11: } yading@11: if (*ptr == '\"') yading@11: ptr++; yading@11: } else { yading@11: for (; *ptr && !(av_isspace(*ptr) || *ptr == ','); ptr++) yading@11: if (dest && dest < dest_end) yading@11: *dest++ = *ptr; yading@11: } yading@11: if (dest) yading@11: *dest = 0; yading@11: } yading@11: } yading@11: yading@11: int ff_find_stream_index(AVFormatContext *s, int id) yading@11: { yading@11: int i; yading@11: for (i = 0; i < s->nb_streams; i++) { yading@11: if (s->streams[i]->id == id) yading@11: return i; yading@11: } yading@11: return -1; yading@11: } yading@11: yading@11: void ff_make_absolute_url(char *buf, int size, const char *base, yading@11: const char *rel) yading@11: { yading@11: char *sep, *path_query; yading@11: /* Absolute path, relative to the current server */ yading@11: if (base && strstr(base, "://") && rel[0] == '/') { yading@11: if (base != buf) yading@11: av_strlcpy(buf, base, size); yading@11: sep = strstr(buf, "://"); yading@11: if (sep) { yading@11: /* Take scheme from base url */ yading@11: if (rel[1] == '/') { yading@11: sep[1] = '\0'; yading@11: } else { yading@11: /* Take scheme and host from base url */ yading@11: sep += 3; yading@11: sep = strchr(sep, '/'); yading@11: if (sep) yading@11: *sep = '\0'; yading@11: } yading@11: } yading@11: av_strlcat(buf, rel, size); yading@11: return; yading@11: } yading@11: /* If rel actually is an absolute url, just copy it */ yading@11: if (!base || strstr(rel, "://") || rel[0] == '/') { yading@11: av_strlcpy(buf, rel, size); yading@11: return; yading@11: } yading@11: if (base != buf) yading@11: av_strlcpy(buf, base, size); yading@11: yading@11: /* Strip off any query string from base */ yading@11: path_query = strchr(buf, '?'); yading@11: if (path_query != NULL) yading@11: *path_query = '\0'; yading@11: yading@11: /* Is relative path just a new query part? */ yading@11: if (rel[0] == '?') { yading@11: av_strlcat(buf, rel, size); yading@11: return; yading@11: } yading@11: yading@11: /* Remove the file name from the base url */ yading@11: sep = strrchr(buf, '/'); yading@11: if (sep) yading@11: sep[1] = '\0'; yading@11: else yading@11: buf[0] = '\0'; yading@11: while (av_strstart(rel, "../", NULL) && sep) { yading@11: /* Remove the path delimiter at the end */ yading@11: sep[0] = '\0'; yading@11: sep = strrchr(buf, '/'); yading@11: /* If the next directory name to pop off is "..", break here */ yading@11: if (!strcmp(sep ? &sep[1] : buf, "..")) { yading@11: /* Readd the slash we just removed */ yading@11: av_strlcat(buf, "/", size); yading@11: break; yading@11: } yading@11: /* Cut off the directory name */ yading@11: if (sep) yading@11: sep[1] = '\0'; yading@11: else yading@11: buf[0] = '\0'; yading@11: rel += 3; yading@11: } yading@11: av_strlcat(buf, rel, size); yading@11: } yading@11: yading@11: int64_t ff_iso8601_to_unix_time(const char *datestr) yading@11: { yading@11: struct tm time1 = {0}, time2 = {0}; yading@11: char *ret1, *ret2; yading@11: ret1 = av_small_strptime(datestr, "%Y - %m - %d %H:%M:%S", &time1); yading@11: ret2 = av_small_strptime(datestr, "%Y - %m - %dT%H:%M:%S", &time2); yading@11: if (ret2 && !ret1) yading@11: return av_timegm(&time2); yading@11: else yading@11: return av_timegm(&time1); yading@11: } yading@11: yading@11: int avformat_query_codec(AVOutputFormat *ofmt, enum AVCodecID codec_id, int std_compliance) yading@11: { yading@11: if (ofmt) { yading@11: if (ofmt->query_codec) yading@11: return ofmt->query_codec(codec_id, std_compliance); yading@11: else if (ofmt->codec_tag) yading@11: return !!av_codec_get_tag(ofmt->codec_tag, codec_id); yading@11: else if (codec_id == ofmt->video_codec || codec_id == ofmt->audio_codec || yading@11: codec_id == ofmt->subtitle_codec) yading@11: return 1; yading@11: } yading@11: return AVERROR_PATCHWELCOME; yading@11: } yading@11: yading@11: int avformat_network_init(void) yading@11: { yading@11: #if CONFIG_NETWORK yading@11: int ret; yading@11: ff_network_inited_globally = 1; yading@11: if ((ret = ff_network_init()) < 0) yading@11: return ret; yading@11: ff_tls_init(); yading@11: #endif yading@11: return 0; yading@11: } yading@11: yading@11: int avformat_network_deinit(void) yading@11: { yading@11: #if CONFIG_NETWORK yading@11: ff_network_close(); yading@11: ff_tls_deinit(); yading@11: #endif yading@11: return 0; yading@11: } yading@11: yading@11: int ff_add_param_change(AVPacket *pkt, int32_t channels, yading@11: uint64_t channel_layout, int32_t sample_rate, yading@11: int32_t width, int32_t height) yading@11: { yading@11: uint32_t flags = 0; yading@11: int size = 4; yading@11: uint8_t *data; yading@11: if (!pkt) yading@11: return AVERROR(EINVAL); yading@11: if (channels) { yading@11: size += 4; yading@11: flags |= AV_SIDE_DATA_PARAM_CHANGE_CHANNEL_COUNT; yading@11: } yading@11: if (channel_layout) { yading@11: size += 8; yading@11: flags |= AV_SIDE_DATA_PARAM_CHANGE_CHANNEL_LAYOUT; yading@11: } yading@11: if (sample_rate) { yading@11: size += 4; yading@11: flags |= AV_SIDE_DATA_PARAM_CHANGE_SAMPLE_RATE; yading@11: } yading@11: if (width || height) { yading@11: size += 8; yading@11: flags |= AV_SIDE_DATA_PARAM_CHANGE_DIMENSIONS; yading@11: } yading@11: data = av_packet_new_side_data(pkt, AV_PKT_DATA_PARAM_CHANGE, size); yading@11: if (!data) yading@11: return AVERROR(ENOMEM); yading@11: bytestream_put_le32(&data, flags); yading@11: if (channels) yading@11: bytestream_put_le32(&data, channels); yading@11: if (channel_layout) yading@11: bytestream_put_le64(&data, channel_layout); yading@11: if (sample_rate) yading@11: bytestream_put_le32(&data, sample_rate); yading@11: if (width || height) { yading@11: bytestream_put_le32(&data, width); yading@11: bytestream_put_le32(&data, height); yading@11: } yading@11: return 0; yading@11: } yading@11: yading@11: const struct AVCodecTag *avformat_get_riff_video_tags(void) yading@11: { yading@11: return ff_codec_bmp_tags; yading@11: } yading@11: const struct AVCodecTag *avformat_get_riff_audio_tags(void) yading@11: { yading@11: return ff_codec_wav_tags; yading@11: } yading@11: yading@11: AVRational av_guess_sample_aspect_ratio(AVFormatContext *format, AVStream *stream, AVFrame *frame) yading@11: { yading@11: AVRational undef = {0, 1}; yading@11: AVRational stream_sample_aspect_ratio = stream ? stream->sample_aspect_ratio : undef; yading@11: AVRational codec_sample_aspect_ratio = stream && stream->codec ? stream->codec->sample_aspect_ratio : undef; yading@11: AVRational frame_sample_aspect_ratio = frame ? frame->sample_aspect_ratio : codec_sample_aspect_ratio; yading@11: yading@11: av_reduce(&stream_sample_aspect_ratio.num, &stream_sample_aspect_ratio.den, yading@11: stream_sample_aspect_ratio.num, stream_sample_aspect_ratio.den, INT_MAX); yading@11: if (stream_sample_aspect_ratio.num <= 0 || stream_sample_aspect_ratio.den <= 0) yading@11: stream_sample_aspect_ratio = undef; yading@11: yading@11: av_reduce(&frame_sample_aspect_ratio.num, &frame_sample_aspect_ratio.den, yading@11: frame_sample_aspect_ratio.num, frame_sample_aspect_ratio.den, INT_MAX); yading@11: if (frame_sample_aspect_ratio.num <= 0 || frame_sample_aspect_ratio.den <= 0) yading@11: frame_sample_aspect_ratio = undef; yading@11: yading@11: if (stream_sample_aspect_ratio.num) yading@11: return stream_sample_aspect_ratio; yading@11: else yading@11: return frame_sample_aspect_ratio; yading@11: } yading@11: yading@11: AVRational av_guess_frame_rate(AVFormatContext *format, AVStream *st, AVFrame *frame) yading@11: { yading@11: AVRational fr = st->r_frame_rate; yading@11: yading@11: if (st->codec->ticks_per_frame > 1) { yading@11: AVRational codec_fr = av_inv_q(st->codec->time_base); yading@11: AVRational avg_fr = st->avg_frame_rate; yading@11: codec_fr.den *= st->codec->ticks_per_frame; yading@11: if ( codec_fr.num > 0 && codec_fr.den > 0 && av_q2d(codec_fr) < av_q2d(fr)*0.7 yading@11: && fabs(1.0 - av_q2d(av_div_q(avg_fr, fr))) > 0.1) yading@11: fr = codec_fr; yading@11: } yading@11: yading@11: return fr; yading@11: } yading@11: yading@11: int avformat_match_stream_specifier(AVFormatContext *s, AVStream *st, yading@11: const char *spec) yading@11: { yading@11: if (*spec <= '9' && *spec >= '0') /* opt:index */ yading@11: return strtol(spec, NULL, 0) == st->index; yading@11: else if (*spec == 'v' || *spec == 'a' || *spec == 's' || *spec == 'd' || yading@11: *spec == 't') { /* opt:[vasdt] */ yading@11: enum AVMediaType type; yading@11: yading@11: switch (*spec++) { yading@11: case 'v': type = AVMEDIA_TYPE_VIDEO; break; yading@11: case 'a': type = AVMEDIA_TYPE_AUDIO; break; yading@11: case 's': type = AVMEDIA_TYPE_SUBTITLE; break; yading@11: case 'd': type = AVMEDIA_TYPE_DATA; break; yading@11: case 't': type = AVMEDIA_TYPE_ATTACHMENT; break; yading@11: default: av_assert0(0); yading@11: } yading@11: if (type != st->codec->codec_type) yading@11: return 0; yading@11: if (*spec++ == ':') { /* possibly followed by :index */ yading@11: int i, index = strtol(spec, NULL, 0); yading@11: for (i = 0; i < s->nb_streams; i++) yading@11: if (s->streams[i]->codec->codec_type == type && index-- == 0) yading@11: return i == st->index; yading@11: return 0; yading@11: } yading@11: return 1; yading@11: } else if (*spec == 'p' && *(spec + 1) == ':') { yading@11: int prog_id, i, j; yading@11: char *endptr; yading@11: spec += 2; yading@11: prog_id = strtol(spec, &endptr, 0); yading@11: for (i = 0; i < s->nb_programs; i++) { yading@11: if (s->programs[i]->id != prog_id) yading@11: continue; yading@11: yading@11: if (*endptr++ == ':') { yading@11: int stream_idx = strtol(endptr, NULL, 0); yading@11: return stream_idx >= 0 && yading@11: stream_idx < s->programs[i]->nb_stream_indexes && yading@11: st->index == s->programs[i]->stream_index[stream_idx]; yading@11: } yading@11: yading@11: for (j = 0; j < s->programs[i]->nb_stream_indexes; j++) yading@11: if (st->index == s->programs[i]->stream_index[j]) yading@11: return 1; yading@11: } yading@11: return 0; yading@11: } else if (*spec == '#') { yading@11: int sid; yading@11: char *endptr; yading@11: sid = strtol(spec + 1, &endptr, 0); yading@11: if (!*endptr) yading@11: return st->id == sid; yading@11: } else if (!*spec) /* empty specifier, matches everything */ yading@11: return 1; yading@11: yading@11: av_log(s, AV_LOG_ERROR, "Invalid stream specifier: %s.\n", spec); yading@11: return AVERROR(EINVAL); yading@11: } yading@11: yading@11: void ff_generate_avci_extradata(AVStream *st) yading@11: { yading@11: static const uint8_t avci100_1080p_extradata[] = { yading@11: // SPS yading@11: 0x00, 0x00, 0x00, 0x01, 0x67, 0x7a, 0x10, 0x29, yading@11: 0xb6, 0xd4, 0x20, 0x22, 0x33, 0x19, 0xc6, 0x63, yading@11: 0x23, 0x21, 0x01, 0x11, 0x98, 0xce, 0x33, 0x19, yading@11: 0x18, 0x21, 0x02, 0x56, 0xb9, 0x3d, 0x7d, 0x7e, yading@11: 0x4f, 0xe3, 0x3f, 0x11, 0xf1, 0x9e, 0x08, 0xb8, yading@11: 0x8c, 0x54, 0x43, 0xc0, 0x78, 0x02, 0x27, 0xe2, yading@11: 0x70, 0x1e, 0x30, 0x10, 0x10, 0x14, 0x00, 0x00, yading@11: 0x03, 0x00, 0x04, 0x00, 0x00, 0x03, 0x00, 0xca, yading@11: 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, yading@11: // PPS yading@11: 0x00, 0x00, 0x00, 0x01, 0x68, 0xce, 0x33, 0x48, yading@11: 0xd0 yading@11: }; yading@11: static const uint8_t avci100_1080i_extradata[] = { yading@11: // SPS yading@11: 0x00, 0x00, 0x00, 0x01, 0x67, 0x7a, 0x10, 0x29, yading@11: 0xb6, 0xd4, 0x20, 0x22, 0x33, 0x19, 0xc6, 0x63, yading@11: 0x23, 0x21, 0x01, 0x11, 0x98, 0xce, 0x33, 0x19, yading@11: 0x18, 0x21, 0x03, 0x3a, 0x46, 0x65, 0x6a, 0x65, yading@11: 0x24, 0xad, 0xe9, 0x12, 0x32, 0x14, 0x1a, 0x26, yading@11: 0x34, 0xad, 0xa4, 0x41, 0x82, 0x23, 0x01, 0x50, yading@11: 0x2b, 0x1a, 0x24, 0x69, 0x48, 0x30, 0x40, 0x2e, yading@11: 0x11, 0x12, 0x08, 0xc6, 0x8c, 0x04, 0x41, 0x28, yading@11: 0x4c, 0x34, 0xf0, 0x1e, 0x01, 0x13, 0xf2, 0xe0, yading@11: 0x3c, 0x60, 0x20, 0x20, 0x28, 0x00, 0x00, 0x03, yading@11: 0x00, 0x08, 0x00, 0x00, 0x03, 0x01, 0x94, 0x00, yading@11: // PPS yading@11: 0x00, 0x00, 0x00, 0x01, 0x68, 0xce, 0x33, 0x48, yading@11: 0xd0 yading@11: }; yading@11: static const uint8_t avci50_1080i_extradata[] = { yading@11: // SPS yading@11: 0x00, 0x00, 0x00, 0x01, 0x67, 0x6e, 0x10, 0x28, yading@11: 0xa6, 0xd4, 0x20, 0x32, 0x33, 0x0c, 0x71, 0x18, yading@11: 0x88, 0x62, 0x10, 0x19, 0x19, 0x86, 0x38, 0x8c, yading@11: 0x44, 0x30, 0x21, 0x02, 0x56, 0x4e, 0x6e, 0x61, yading@11: 0x87, 0x3e, 0x73, 0x4d, 0x98, 0x0c, 0x03, 0x06, yading@11: 0x9c, 0x0b, 0x73, 0xe6, 0xc0, 0xb5, 0x18, 0x63, yading@11: 0x0d, 0x39, 0xe0, 0x5b, 0x02, 0xd4, 0xc6, 0x19, yading@11: 0x1a, 0x79, 0x8c, 0x32, 0x34, 0x24, 0xf0, 0x16, yading@11: 0x81, 0x13, 0xf7, 0xff, 0x80, 0x02, 0x00, 0x01, yading@11: 0xf1, 0x80, 0x80, 0x80, 0xa0, 0x00, 0x00, 0x03, yading@11: 0x00, 0x20, 0x00, 0x00, 0x06, 0x50, 0x80, 0x00, yading@11: // PPS yading@11: 0x00, 0x00, 0x00, 0x01, 0x68, 0xee, 0x31, 0x12, yading@11: 0x11 yading@11: }; yading@11: static const uint8_t avci100_720p_extradata[] = { yading@11: // SPS yading@11: 0x00, 0x00, 0x00, 0x01, 0x67, 0x7a, 0x10, 0x29, yading@11: 0xb6, 0xd4, 0x20, 0x2a, 0x33, 0x1d, 0xc7, 0x62, yading@11: 0xa1, 0x08, 0x40, 0x54, 0x66, 0x3b, 0x8e, 0xc5, yading@11: 0x42, 0x02, 0x10, 0x25, 0x64, 0x2c, 0x89, 0xe8, yading@11: 0x85, 0xe4, 0x21, 0x4b, 0x90, 0x83, 0x06, 0x95, yading@11: 0xd1, 0x06, 0x46, 0x97, 0x20, 0xc8, 0xd7, 0x43, yading@11: 0x08, 0x11, 0xc2, 0x1e, 0x4c, 0x91, 0x0f, 0x01, yading@11: 0x40, 0x16, 0xec, 0x07, 0x8c, 0x04, 0x04, 0x05, yading@11: 0x00, 0x00, 0x03, 0x00, 0x01, 0x00, 0x00, 0x03, yading@11: 0x00, 0x64, 0x84, 0x00, 0x00, 0x00, 0x00, 0x00, yading@11: // PPS yading@11: 0x00, 0x00, 0x00, 0x01, 0x68, 0xce, 0x31, 0x12, yading@11: 0x11 yading@11: }; yading@11: int size = 0; yading@11: const uint8_t *data = 0; yading@11: if (st->codec->width == 1920) { yading@11: if (st->codec->field_order == AV_FIELD_PROGRESSIVE) { yading@11: data = avci100_1080p_extradata; yading@11: size = sizeof(avci100_1080p_extradata); yading@11: } else { yading@11: data = avci100_1080i_extradata; yading@11: size = sizeof(avci100_1080i_extradata); yading@11: } yading@11: } else if (st->codec->width == 1440) { yading@11: data = avci50_1080i_extradata; yading@11: size = sizeof(avci50_1080i_extradata); yading@11: } else if (st->codec->width == 1280) { yading@11: data = avci100_720p_extradata; yading@11: size = sizeof(avci100_720p_extradata); yading@11: } yading@11: if (!size) yading@11: return; yading@11: av_freep(&st->codec->extradata); yading@11: st->codec->extradata_size = 0; yading@11: st->codec->extradata = av_mallocz(size + FF_INPUT_BUFFER_PADDING_SIZE); yading@11: if (!st->codec->extradata) yading@11: return; yading@11: memcpy(st->codec->extradata, data, size); yading@11: st->codec->extradata_size = size; yading@11: } yading@11: yading@11: static int match_host_pattern(const char *pattern, const char *hostname) yading@11: { yading@11: int len_p, len_h; yading@11: if (!strcmp(pattern, "*")) yading@11: return 1; yading@11: // Skip a possible *. at the start of the pattern yading@11: if (pattern[0] == '*') yading@11: pattern++; yading@11: if (pattern[0] == '.') yading@11: pattern++; yading@11: len_p = strlen(pattern); yading@11: len_h = strlen(hostname); yading@11: if (len_p > len_h) yading@11: return 0; yading@11: // Simply check if the end of hostname is equal to 'pattern' yading@11: if (!strcmp(pattern, &hostname[len_h - len_p])) { yading@11: if (len_h == len_p) yading@11: return 1; // Exact match yading@11: if (hostname[len_h - len_p - 1] == '.') yading@11: return 1; // The matched substring is a domain and not just a substring of a domain yading@11: } yading@11: return 0; yading@11: } yading@11: yading@11: int ff_http_match_no_proxy(const char *no_proxy, const char *hostname) yading@11: { yading@11: char *buf, *start; yading@11: int ret = 0; yading@11: if (!no_proxy) yading@11: return 0; yading@11: if (!hostname) yading@11: return 0; yading@11: buf = av_strdup(no_proxy); yading@11: if (!buf) yading@11: return 0; yading@11: start = buf; yading@11: while (start) { yading@11: char *sep, *next = NULL; yading@11: start += strspn(start, " ,"); yading@11: sep = start + strcspn(start, " ,"); yading@11: if (*sep) { yading@11: next = sep + 1; yading@11: *sep = '\0'; yading@11: } yading@11: if (match_host_pattern(start, hostname)) { yading@11: ret = 1; yading@11: break; yading@11: } yading@11: start = next; yading@11: } yading@11: av_free(buf); yading@11: return ret; yading@11: }