annotate sv/videoio/SDL_ffmpeg.cpp @ 282:d9319859a4cf tip

(none)
author benoitrigolleau
date Fri, 31 Oct 2008 11:00:24 +0000
parents 6eeb195adbb4
children
rev   line source
ivand_qmul@125 1 /*******************************************************************************
ivand_qmul@125 2 * *
ivand_qmul@125 3 * SDL_ffmpeg is a library for basic multimedia functionality. *
ivand_qmul@125 4 * SDL_ffmpeg is based on ffmpeg. *
ivand_qmul@125 5 * *
ivand_qmul@125 6 * Copyright (C) 2007 Arjan Houben *
ivand_qmul@125 7 * *
ivand_qmul@125 8 * SDL_ffmpeg is free software: you can redistribute it and/or modify *
ivand_qmul@125 9 * it under the terms of the GNU Lesser General Public License as published *
ivand_qmul@125 10 * by the Free Software Foundation, either version 3 of the License, or any *
ivand_qmul@125 11 * later version. *
ivand_qmul@125 12 * *
ivand_qmul@125 13 * This program is distributed in the hope that it will be useful, *
ivand_qmul@125 14 * but WITHOUT ANY WARRANTY; without even the implied warranty of *
ivand_qmul@125 15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
ivand_qmul@125 16 * GNU Lesser General Public License for more details. *
ivand_qmul@125 17 * *
ivand_qmul@125 18 * You should have received a copy of the GNU Lesser General Public License *
ivand_qmul@125 19 * along with this program. If not, see <http://www.gnu.org/licenses/>. *
ivand_qmul@125 20 * *
ivand_qmul@125 21 *******************************************************************************/
ivand_qmul@125 22
ivand_qmul@125 23 #include <stdio.h>
ivand_qmul@125 24 #include <stdlib.h>
ivand_qmul@125 25 #ifdef __cplusplus
ivand_qmul@125 26 extern "C" {
ivand_qmul@125 27 #endif
ivand_qmul@125 28 #ifdef WIN32
ivand_qmul@125 29 #include "SDL_ffmpeg.h"
ivand_qmul@125 30 #include <SDL.h>
ivand_qmul@125 31 #include <SDL_thread.h>
ivand_qmul@129 32 #include <stdio.h>
ivand_qmul@129 33 #include <Windows.h>
ivand_qmul@125 34 #endif
ivand_qmul@125 35
ivand_qmul@125 36 #ifdef __unix__
ivand_qmul@125 37 #include <SDL/SDL.h>
ivand_qmul@125 38 #include <SDL/SDL_thread.h>
ivand_qmul@125 39 #endif
benoitrigolleau@130 40 #undef main
ivand_qmul@125 41 #ifdef __cplusplus
ivand_qmul@125 42 }
ivand_qmul@125 43 #endif
ivand_qmul@125 44 #include "../../sv/main/MainWindow.h"
ivand_qmul@129 45 #include <time.h>
ivand_qmul@125 46
ivand_qmul@125 47 //const int SDL_FFMPEG_MAX_BUFFERED_FRAMES = 25;
ivand_qmul@125 48 //const int SDL_FFMPEG_MAX_BUFFERED_SAMPLES = 512 * 512;
benoitrigolleau@256 49 extern int zoomWivan;
benoitrigolleau@256 50 extern int zoomHivan;
ivand_qmul@125 51 int FFMPEG_init_was_called = 0;
benoitrigolleau@256 52 //FILE *pFile, *tFile;
ivand_qmul@129 53 int64_t Time,Time1;
ivand_qmul@129 54 int64_t realt=0;
ivand_qmul@129 55
ivand_qmul@125 56 SDL_ffmpegFile* SDL_ffmpegCreateFile() {
ivand_qmul@125 57
ivand_qmul@125 58 // create SDL_ffmpegFile pointer
ivand_qmul@125 59 SDL_ffmpegFile *file = (SDL_ffmpegFile*)malloc( sizeof(SDL_ffmpegFile) );
ivand_qmul@125 60 if(!file) return 0;
ivand_qmul@125 61 file->_ffmpeg=av_alloc_format_context();//(AVFormatContext*)malloc(sizeof(AVFormatContext));
ivand_qmul@125 62 // create a semaphore for every file
ivand_qmul@125 63 file->decode = SDL_CreateSemaphore(1);
ivand_qmul@125 64
ivand_qmul@129 65 Time=0;
ivand_qmul@129 66 Time1=0;
benoitrigolleau@256 67 //fopen_s (&pFile,"myfile.txt","w");
benoitrigolleau@256 68 // fopen_s (&tFile,"Timestampfile.txt","w");
ivand_qmul@125 69 // allocate room for VStreams
ivand_qmul@125 70 file->vs = (SDL_ffmpegStream**)malloc( sizeof(SDL_ffmpegStream*) * MAX_STREAMS );
ivand_qmul@125 71 if(!file->vs) {
ivand_qmul@125 72 free( file );
ivand_qmul@125 73 return 0;
ivand_qmul@125 74 }
ivand_qmul@125 75
ivand_qmul@125 76 // allocate room for AStreams
ivand_qmul@125 77 file->as = (SDL_ffmpegStream**)malloc( sizeof(SDL_ffmpegStream*) * MAX_STREAMS );
ivand_qmul@125 78 if(!file->as) {
ivand_qmul@125 79 free( file );
ivand_qmul@125 80 return 0;
ivand_qmul@125 81 }
ivand_qmul@125 82
ivand_qmul@125 83 // initialize variables with standard values
ivand_qmul@125 84 file->audioStream = -1;
ivand_qmul@125 85 file->videoStream = -1;
ivand_qmul@125 86
ivand_qmul@125 87 file->offset = 0;
ivand_qmul@125 88 file->videoOffset = 0;
ivand_qmul@125 89 file->startTime = 0;
ivand_qmul@125 90
ivand_qmul@125 91 file->threadID = 0;
ivand_qmul@125 92
ivand_qmul@125 93 return file;
ivand_qmul@125 94 }
ivand_qmul@125 95
ivand_qmul@125 96 void SDL_ffmpegFree(SDL_ffmpegFile* file) {
ivand_qmul@125 97
ivand_qmul@125 98 SDL_ffmpegStopDecoding(file);
ivand_qmul@125 99
ivand_qmul@125 100 SDL_ffmpegFlush(file);
ivand_qmul@125 101
ivand_qmul@125 102 free(file);
ivand_qmul@125 103 }
ivand_qmul@125 104
ivand_qmul@125 105 SDL_ffmpegFile* SDL_ffmpegOpen(const char* filename) {
ivand_qmul@125 106
ivand_qmul@125 107
ivand_qmul@125 108 // register all codecs
ivand_qmul@125 109 if(!FFMPEG_init_was_called) {
ivand_qmul@125 110 FFMPEG_init_was_called = 1;
ivand_qmul@125 111 av_register_all();
ivand_qmul@125 112 }
ivand_qmul@125 113
ivand_qmul@125 114 // open new ffmpegFile
ivand_qmul@125 115 SDL_ffmpegFile *file = SDL_ffmpegCreateFile();
ivand_qmul@125 116 if(!file) return 0;
ivand_qmul@125 117
ivand_qmul@125 118 // information about format is stored in file->_ffmpeg
ivand_qmul@125 119
ivand_qmul@125 120 // open the file
ivand_qmul@125 121 if(av_open_input_file( (AVFormatContext**)&file->_ffmpeg, filename, 0, 0, 0) != 0) {
ivand_qmul@125 122 fprintf(stderr, "could not open \"%s\"\n", filename);
ivand_qmul@125 123 free(file);
ivand_qmul@125 124 return 0;
ivand_qmul@125 125 }
ivand_qmul@125 126
ivand_qmul@125 127 // retrieve format information
ivand_qmul@125 128 if(av_find_stream_info((AVFormatContext *)(file->_ffmpeg)) < 0) {
ivand_qmul@125 129 fprintf(stderr, "could not retrieve video stream info");
ivand_qmul@125 130 free(file);
ivand_qmul@125 131 return 0;
ivand_qmul@125 132 }
ivand_qmul@125 133
ivand_qmul@125 134 // dump info to logfile
ivand_qmul@125 135 // dump_format(file->_ffmpeg, 0, filename, 0);
ivand_qmul@125 136
ivand_qmul@125 137 // find the streams in the file
ivand_qmul@125 138 file->VStreams = 0;
ivand_qmul@125 139 file->AStreams = 0;
ivand_qmul@125 140 file->threadActive = 0;
ivand_qmul@125 141
ivand_qmul@125 142 // iterate through all the streams and store audio/video streams
ivand_qmul@125 143 size_t i;
ivand_qmul@125 144 for(i=0; i<((AVFormatContext*)file->_ffmpeg)->nb_streams; i++) {
ivand_qmul@125 145
ivand_qmul@125 146 if(((AVFormatContext*)file->_ffmpeg)->streams[i]->codec->codec_type == CODEC_TYPE_VIDEO) {
ivand_qmul@125 147
ivand_qmul@125 148 // if this is a packet of the correct type we create a new stream
ivand_qmul@125 149 SDL_ffmpegStream* stream = (SDL_ffmpegStream*)malloc( sizeof(SDL_ffmpegStream) );
ivand_qmul@125 150
ivand_qmul@125 151 if(stream) {
ivand_qmul@125 152 // we set our stream to zero
ivand_qmul@125 153 memset(stream, 0, sizeof(SDL_ffmpegStream));
ivand_qmul@125 154
ivand_qmul@125 155 // save unique streamid
ivand_qmul@125 156 stream->id = i;
ivand_qmul@125 157
ivand_qmul@125 158 // the timeBase is what we use to calculate from/to pts
ivand_qmul@125 159 stream->timeBase = av_q2d(((AVFormatContext*)file->_ffmpeg)->streams[i]->time_base) * 1000;
ivand_qmul@125 160
ivand_qmul@125 161 // save width, height and pixFmt of our outputframes
ivand_qmul@125 162 stream->width = ((AVFormatContext*)file->_ffmpeg)->streams[i]->codec->width;
ivand_qmul@125 163 stream->height = ((AVFormatContext*)file->_ffmpeg)->streams[i]->codec->height;
ivand_qmul@125 164 stream->pixFmt = PIX_FMT_RGB24;
ivand_qmul@125 165
ivand_qmul@125 166 // _ffmpeg holds data about streamcodec
ivand_qmul@125 167 stream->_ffmpeg = ((AVFormatContext*)file->_ffmpeg)->streams[i]->codec;
ivand_qmul@125 168
ivand_qmul@125 169 // get the correct decoder for this stream
ivand_qmul@125 170 AVCodec *codec = avcodec_find_decoder(((AVCodecContext*)stream->_ffmpeg)->codec_id);
ivand_qmul@125 171
ivand_qmul@125 172 if(!codec) {
ivand_qmul@125 173 free(stream);
ivand_qmul@125 174 fprintf(stderr, "could not find codec\n");
ivand_qmul@125 175 } else if(avcodec_open(((AVFormatContext*)file->_ffmpeg)->streams[i]->codec, codec) < 0) {
ivand_qmul@125 176 free(stream);
ivand_qmul@125 177 fprintf(stderr, "could not open decoder\n");
ivand_qmul@125 178 } else {
ivand_qmul@125 179
ivand_qmul@125 180 // copy metadata from AVStream into our stream
ivand_qmul@125 181 stream->frameRate[0] = ((AVFormatContext*)file->_ffmpeg)->streams[i]->time_base.num;
ivand_qmul@125 182 stream->frameRate[1] = ((AVFormatContext*)file->_ffmpeg)->streams[i]->time_base.den;
ivand_qmul@125 183 memcpy(stream->language, ((AVFormatContext*)file->_ffmpeg)->streams[i]->language, 4);
ivand_qmul@125 184 stream->sampleRate = ((AVFormatContext*)file->_ffmpeg)->streams[i]->codec->sample_rate;
ivand_qmul@125 185 stream->channels = ((AVFormatContext*)file->_ffmpeg)->streams[i]->codec->channels;
ivand_qmul@125 186 memcpy(stream->codecName, ((AVFormatContext*)file->_ffmpeg)->streams[i]->codec->codec_name, 32);
ivand_qmul@125 187
ivand_qmul@125 188 stream->audio = 0;
ivand_qmul@125 189 stream->size = 0;
ivand_qmul@125 190 stream->imageBuffer = (bufferImage**)calloc( SDL_FFMPEG_MAX_BUFFERED_FRAMES, sizeof(bufferImage*) );
ivand_qmul@129 191 stream->writeImage = 0;
ivand_qmul@129 192 stream->readImage = 0;
ivand_qmul@125 193 file->vs[file->VStreams] = stream;
ivand_qmul@125 194 file->VStreams++;
ivand_qmul@125 195
ivand_qmul@125 196 // create semaphore for thread-safe use
ivand_qmul@125 197 stream->sem = SDL_CreateSemaphore(1);
ivand_qmul@125 198 }
ivand_qmul@125 199 }
ivand_qmul@125 200 } else if(((AVFormatContext*)file->_ffmpeg)->streams[i]->codec->codec_type == CODEC_TYPE_AUDIO) {
ivand_qmul@125 201
ivand_qmul@125 202 // if this is a packet of the correct type we create a new stream
ivand_qmul@125 203 SDL_ffmpegStream* stream = (SDL_ffmpegStream*)malloc( sizeof(SDL_ffmpegStream) );
ivand_qmul@125 204
ivand_qmul@125 205 if(stream) {
ivand_qmul@125 206 // we set our stream to zero
ivand_qmul@125 207 memset(stream, 0, sizeof(SDL_ffmpegStream));
ivand_qmul@125 208
ivand_qmul@125 209 // save unique streamid
ivand_qmul@125 210 stream->id = i;
ivand_qmul@125 211
ivand_qmul@125 212 // the timeBase is what we use to calculate from/to pts
ivand_qmul@125 213 stream->timeBase = av_q2d(((AVFormatContext*)file->_ffmpeg)->streams[i]->time_base) * 1000;
ivand_qmul@125 214
ivand_qmul@125 215 // _ffmpeg holds data about streamcodec
ivand_qmul@125 216 stream->_ffmpeg = ((AVFormatContext*)file->_ffmpeg)->streams[i]->codec;
ivand_qmul@125 217
ivand_qmul@125 218 stream->width = 0;
ivand_qmul@125 219 stream->height = 0;
ivand_qmul@125 220 stream->pixFmt = PIX_FMT_RGB24;
ivand_qmul@125 221
ivand_qmul@125 222 // get the correct decoder for this stream
ivand_qmul@125 223 AVCodec *codec = avcodec_find_decoder(((AVFormatContext*)file->_ffmpeg)->streams[i]->codec->codec_id);
ivand_qmul@125 224
ivand_qmul@125 225 if(!codec) {
ivand_qmul@125 226 free( stream );
ivand_qmul@125 227 fprintf(stderr, "could not find codec\n");
ivand_qmul@125 228 } else if(avcodec_open(((AVFormatContext*)file->_ffmpeg)->streams[i]->codec, codec) < 0) {
ivand_qmul@125 229 free( stream );
ivand_qmul@125 230 fprintf(stderr, "could not open decoder\n");
ivand_qmul@125 231 } else {
ivand_qmul@125 232
ivand_qmul@125 233 // copy metadata from AVStream into our stream
ivand_qmul@125 234 stream->frameRate[0] = ((AVFormatContext*)file->_ffmpeg)->streams[i]->time_base.num;
ivand_qmul@125 235 stream->frameRate[1] = ((AVFormatContext*)file->_ffmpeg)->streams[i]->time_base.den;
ivand_qmul@125 236 memcpy(stream->language, ((AVFormatContext*)file->_ffmpeg)->streams[i]->language, 4);
ivand_qmul@125 237 stream->sampleRate = ((AVFormatContext*)file->_ffmpeg)->streams[i]->codec->sample_rate;
ivand_qmul@125 238 stream->channels = ((AVFormatContext*)file->_ffmpeg)->streams[i]->codec->channels;
ivand_qmul@125 239 memcpy(stream->codecName, ((AVFormatContext*)file->_ffmpeg)->streams[i]->codec->codec_name, 32);
ivand_qmul@125 240
ivand_qmul@125 241 stream->audio = (int8_t*)malloc( sizeof(int8_t) * SDL_FFMPEG_MAX_BUFFERED_SAMPLES );
ivand_qmul@125 242 stream->size = 0;
ivand_qmul@125 243 stream->imageBuffer = 0;
ivand_qmul@125 244
ivand_qmul@125 245 file->as[file->AStreams] = stream;
ivand_qmul@125 246 file->AStreams++;
ivand_qmul@125 247
ivand_qmul@125 248 // create semaphore for thread-safe use
ivand_qmul@125 249 stream->sem = SDL_CreateSemaphore(1);
ivand_qmul@125 250 }
ivand_qmul@125 251 }
ivand_qmul@125 252 }
ivand_qmul@125 253 }
ivand_qmul@125 254
ivand_qmul@125 255 return file;
ivand_qmul@125 256 }
ivand_qmul@125 257
ivand_qmul@125 258 SDL_Surface* SDL_ffmpegGetVideo(SDL_ffmpegFile* file) {
ivand_qmul@125 259
ivand_qmul@125 260 MainWindow * MWinsA=MainWindow::instance();
ivand_qmul@125 261 if( !SDL_ffmpegValidVideo(file) || file->pause || file->skipVideo) return 0;
ivand_qmul@125 262
ivand_qmul@125 263 SDL_SemWait(file->vs[file->videoStream]->sem);
ivand_qmul@125 264
ivand_qmul@125 265 bufferImage *option = 0;
ivand_qmul@129 266 //int i;
ivand_qmul@129 267 float ratio;
ivand_qmul@129 268 int64_t pos,pos1, pos2, timestamp;
ivand_qmul@129 269 //for(i=0; i<SDL_FFMPEG_MAX_BUFFERED_FRAMES; i++) {
ivand_qmul@129 270 pos=MWinsA->Get_CurAudioTime();
ivand_qmul@125 271
benoitrigolleau@256 272 /* if (pFile)
lbajardsilogic@178 273 {
lbajardsilogic@178 274 fprintf (pFile, "p: \t %u\t", pos);
benoitrigolleau@256 275 }*/
ivand_qmul@129 276 //if (MWinsA->Get_HardwareBufferTime()==0)
ivand_qmul@129 277 // pos1=0;
ivand_qmul@129 278 //else {
ivand_qmul@129 279 // pos1=MWinsA->Get_HardwareBufferTime();
ivand_qmul@129 280 // //fprintf (tFile, "%u\t", pos1);
ivand_qmul@129 281 // int64_t timeTemp;
ivand_qmul@129 282 // QueryPerformanceCounter((LARGE_INTEGER *)(&timeTemp));
ivand_qmul@129 283 //
ivand_qmul@129 284 // pos1=(timeTemp-pos1)/(file->countFreq*hopfactor);
ivand_qmul@129 285 // fprintf (pFile, "%u\t", pos1);
ivand_qmul@129 286 //}
ivand_qmul@129 287 //pos2=pos+pos1;
benoitrigolleau@256 288 /*if (pFile)
lbajardsilogic@178 289 {
lbajardsilogic@178 290 fprintf (pFile, "%u\n", pos);
benoitrigolleau@256 291 }*/
lbajardsilogic@178 292
ivand_qmul@129 293 // if this entry does not exist, continue
ivand_qmul@129 294 while(((file->vs[file->videoStream]->writeImage - file->vs[file->videoStream]->readImage)>0)&&(file->vs[file->videoStream]->imageBuffer[file->vs[file->videoStream]->readImage%SDL_FFMPEG_MAX_BUFFERED_FRAMES]->timestamp <= pos + ((AVFormatContext*)file->_ffmpeg)->start_time/1000))//&& (file->vs[file->videoStream]->imageBuffer[file->vs[file->videoStream]->readImage%SDL_FFMPEG_MAX_BUFFERED_FRAMES]->timestamp >= pos - file->timebase+ ((AVFormatContext*)file->_ffmpeg)->start_time/1000))
ivand_qmul@129 295 {
ivand_qmul@129 296 //pos=MWinsA->Get_CurAudioTime();
ivand_qmul@129 297 //timestamp=file->vs[file->videoStream]->imageBuffer[file->vs[file->videoStream]->readImage%SDL_FFMPEG_MAX_BUFFERED_FRAMES]->timestamp;
ivand_qmul@129 298 //fprintf (tFile, "try: %d %d\n", (pos+ ((AVFormatContext*)file->_ffmpeg)->start_time/1000), timestamp);
ivand_qmul@129 299 // do we have an image that should have been shown?
ivand_qmul@129 300 //if(file->vs[file->videoStream]->imageBuffer[mod(file->vs[file->videoStream]->readImage,SDL_FFMPEG_MAX_BUFFERED_FRAMES)]->timestamp <= pos + (file->vs[file->videoStream]->timeBase)/4+((AVFormatContext*)file->_ffmpeg)->start_time/1000) {
ivand_qmul@129 301
ivand_qmul@129 302 // if this is the first option we find, we simply save it
ivand_qmul@129 303 if(!option) {
ivand_qmul@125 304
ivand_qmul@129 305 option = file->vs[file->videoStream]->imageBuffer[file->vs[file->videoStream]->readImage%SDL_FFMPEG_MAX_BUFFERED_FRAMES];
ivand_qmul@125 306
ivand_qmul@129 307 // set to 0 so we know this position in the buffer is available again
ivand_qmul@129 308 file->vs[file->videoStream]->imageBuffer[file->vs[file->videoStream]->readImage%SDL_FFMPEG_MAX_BUFFERED_FRAMES] = 0;
ivand_qmul@129 309 file->vs[file->videoStream]->readImage++;
ivand_qmul@125 310
ivand_qmul@129 311 } else {
ivand_qmul@125 312
ivand_qmul@129 313 // we found a newer possible timestamp, we delete the older one
ivand_qmul@129 314 if( option->timestamp < file->vs[file->videoStream]->imageBuffer[file->vs[file->videoStream]->readImage%SDL_FFMPEG_MAX_BUFFERED_FRAMES]->timestamp) {
ivand_qmul@125 315
ivand_qmul@129 316 // this image is too old, we discard it
ivand_qmul@129 317 SDL_FreeSurface( option->img );
ivand_qmul@125 318
ivand_qmul@129 319 // free old option
ivand_qmul@129 320 free( option );
ivand_qmul@125 321
ivand_qmul@129 322 // new pointer to position in container
ivand_qmul@129 323 option = file->vs[file->videoStream]->imageBuffer[file->vs[file->videoStream]->readImage%SDL_FFMPEG_MAX_BUFFERED_FRAMES];
ivand_qmul@125 324
ivand_qmul@129 325 // set to 0 so we know this position in the buffer is available again
ivand_qmul@129 326 file->vs[file->videoStream]->imageBuffer[file->vs[file->videoStream]->readImage%SDL_FFMPEG_MAX_BUFFERED_FRAMES] = 0;
ivand_qmul@129 327 file->vs[file->videoStream]->readImage++;
ivand_qmul@129 328 }
ivand_qmul@129 329 else {
ivand_qmul@129 330 file->vs[file->videoStream]->imageBuffer[file->vs[file->videoStream]->readImage%SDL_FFMPEG_MAX_BUFFERED_FRAMES]=0;
ivand_qmul@129 331 file->vs[file->videoStream]->readImage++;
ivand_qmul@129 332 }
ivand_qmul@129 333 }
ivand_qmul@125 334
ivand_qmul@129 335
ivand_qmul@129 336 pos=MWinsA->Get_CurAudioTime();
benoitrigolleau@256 337 /* if (pFile)
lbajardsilogic@178 338 {
lbajardsilogic@178 339 fprintf (pFile, "e:\t%u\t", pos);
benoitrigolleau@256 340 }*/
ivand_qmul@129 341 //if (MWinsA->Get_HardwareBufferTime()==0)
ivand_qmul@129 342 // pos1=0;
ivand_qmul@129 343 //else {
ivand_qmul@129 344 // pos1=MWinsA->Get_HardwareBufferTime();
ivand_qmul@129 345 // //fprintf (tFile, "%u\t", pos1);
ivand_qmul@129 346 // int64_t timeTemp;
ivand_qmul@129 347 // QueryPerformanceCounter((LARGE_INTEGER *)(&timeTemp));
ivand_qmul@129 348
ivand_qmul@129 349 // pos1=(timeTemp-pos1)/(file->countFreq*hopfactor);
ivand_qmul@129 350 // fprintf (pFile, "%u\t", pos1);
ivand_qmul@129 351 //}
ivand_qmul@129 352 //fprintf (pFile, "%u\n", pos2);
ivand_qmul@129 353 //pos2=pos+pos1;
ivand_qmul@129 354 //if (pos<pos2) pos=pos2;
ivand_qmul@129 355 }
ivand_qmul@129 356 //}
ivand_qmul@129 357 //}
ivand_qmul@129 358 int x=file->vs[file->videoStream]->writeImage - file->vs[file->videoStream]->readImage;
ivand_qmul@125 359 // if we did not found an option, we exit
ivand_qmul@125 360 if(!option) {
ivand_qmul@125 361 // release the lock
ivand_qmul@129 362 /*timestamp=0;
ivand_qmul@129 363 int64_t tt=av_gettime()/1000-file->timer;
ivand_qmul@129 364 file->timer=av_gettime()/1000;
ivand_qmul@129 365 realt+=tt;
ivand_qmul@129 366 fprintf (tFile, "%u\t", realt);
ivand_qmul@129 367 fprintf (tFile, "%u\t", tt);
ivand_qmul@129 368 fprintf (tFile, "%u\t", pos);
ivand_qmul@129 369 fprintf (tFile, "%u\n", timestamp);*/
ivand_qmul@125 370 SDL_SemPost(file->vs[file->videoStream]->sem);
ivand_qmul@125 371 return 0;
ivand_qmul@125 372 }
ivand_qmul@129 373 int64_t tt;
ivand_qmul@129 374 QueryPerformanceCounter((LARGE_INTEGER *)(&tt));
ivand_qmul@129 375 tt=tt/(file->countFreq)-file->timer;
ivand_qmul@129 376
ivand_qmul@129 377 QueryPerformanceCounter((LARGE_INTEGER *)(&file->timer));
ivand_qmul@129 378 file->timer=file->timer/(file->countFreq);
ivand_qmul@129 379 realt+=tt;
benoitrigolleau@256 380 /*fprintf (tFile, "%u\t", x);
ivand_qmul@129 381 fprintf (tFile, "%u\t", realt);
benoitrigolleau@256 382 fprintf (tFile, "%u\t", tt);*/
ivand_qmul@138 383 timestamp=(pos-option->timestamp+((AVFormatContext*)file->_ffmpeg)->start_time/1000)/MWinsA->getPlaySpeedVal();
benoitrigolleau@256 384 /*fprintf (tFile, "%u\t", option->timestamp);//+ (file->vs[file->videoStream]->timeBase)/4+((AVFormatContext*)file->_ffmpeg)->start_time/1000);
benoitrigolleau@256 385 fprintf (tFile, "%d\n", timestamp);*/
ivand_qmul@125 386 // we did found an option, so we return the imagedata
ivand_qmul@125 387 return option->img;
ivand_qmul@125 388 }
ivand_qmul@125 389
ivand_qmul@125 390 int SDL_ffmpegReleaseVideo(SDL_ffmpegFile *file, SDL_Surface *bmp) {
ivand_qmul@125 391
ivand_qmul@125 392 // if there was no valid video stream, we should not release
ivand_qmul@125 393 if( !SDL_ffmpegValidVideo(file) || file->skipVideo) return -1;
ivand_qmul@125 394
ivand_qmul@125 395 // free surface
ivand_qmul@125 396 SDL_FreeSurface(bmp);
ivand_qmul@125 397
ivand_qmul@125 398 // release semaphore if needed
ivand_qmul@125 399 if( !SDL_SemValue(file->vs[file->videoStream]->sem) ) {
ivand_qmul@125 400 SDL_SemPost(file->vs[file->videoStream]->sem);
ivand_qmul@125 401 }
ivand_qmul@125 402
ivand_qmul@125 403 return 0;
ivand_qmul@125 404 }
ivand_qmul@125 405
ivand_qmul@125 406 SDL_ffmpegStream* SDL_ffmpegGetAudioStream(SDL_ffmpegFile *file, int audioID) {
ivand_qmul@125 407
ivand_qmul@125 408 // check if we have any audiostreams
ivand_qmul@125 409 if(!file->AStreams) return 0;
ivand_qmul@125 410
ivand_qmul@125 411 // check if the requested id is possible
ivand_qmul@125 412 if(audioID >= file->AStreams) return 0;
ivand_qmul@125 413
ivand_qmul@125 414 // return ausiostream linked to audioID
ivand_qmul@125 415 return file->as[audioID];
ivand_qmul@125 416 }
ivand_qmul@125 417
ivand_qmul@125 418 int SDL_ffmpegSelectAudioStream(SDL_ffmpegFile* file, int audioID) {
ivand_qmul@125 419
ivand_qmul@125 420 // check if we have any audiostreams
ivand_qmul@125 421 if(!file->AStreams) return -1;
ivand_qmul@125 422
ivand_qmul@125 423 // check if the requested id is possible
ivand_qmul@125 424 if(audioID >= file->AStreams) return -1;
ivand_qmul@125 425
ivand_qmul@125 426 // set current audiostream to stream linked to audioID
ivand_qmul@125 427 file->audioStream = audioID;
ivand_qmul@125 428
ivand_qmul@125 429 return 0;
ivand_qmul@125 430 }
ivand_qmul@125 431
ivand_qmul@125 432 SDL_ffmpegStream* SDL_ffmpegGetVideoStream(SDL_ffmpegFile *file, int videoID) {
ivand_qmul@125 433
ivand_qmul@125 434 // check if we have any videostreams
ivand_qmul@125 435 if(!file->VStreams) return 0;
ivand_qmul@125 436
ivand_qmul@125 437 // check if the requested id is possible
ivand_qmul@125 438 if(videoID >= file->VStreams) return 0;
ivand_qmul@125 439
ivand_qmul@125 440 // return ausiostream linked to videoID
ivand_qmul@125 441 return file->vs[videoID];
ivand_qmul@125 442 }
ivand_qmul@125 443
ivand_qmul@125 444 int SDL_ffmpegSelectVideoStream(SDL_ffmpegFile* file, int videoID) {
ivand_qmul@125 445
ivand_qmul@125 446 // check if we have any videostreams
ivand_qmul@125 447 if(!file->VStreams) return -1;
ivand_qmul@125 448
ivand_qmul@125 449 // check if the requested id is possible
ivand_qmul@125 450 if(videoID >= file->VStreams) return -1;
ivand_qmul@125 451
ivand_qmul@125 452 // set current videostream to stream linked to videoID
ivand_qmul@125 453 file->videoStream = videoID;
ivand_qmul@125 454
ivand_qmul@125 455 return 0;
ivand_qmul@125 456 }
ivand_qmul@125 457
ivand_qmul@125 458 int SDL_ffmpegStartDecoding(SDL_ffmpegFile* file) {
ivand_qmul@125 459
ivand_qmul@125 460 // start a thread that continues to fill audio/video buffers
ivand_qmul@125 461 if(!file->threadID) file->threadID = SDL_CreateThread(SDL_ffmpegDecodeThread, file);
ivand_qmul@125 462
ivand_qmul@125 463 return 0;
ivand_qmul@125 464 }
ivand_qmul@125 465
ivand_qmul@125 466 int SDL_ffmpegStopDecoding(SDL_ffmpegFile* file) {
ivand_qmul@125 467
ivand_qmul@125 468 // stop decode thread
ivand_qmul@125 469 file->threadActive = 0;
ivand_qmul@125 470 if(file->threadID) SDL_WaitThread(file->threadID, 0);
ivand_qmul@125 471
ivand_qmul@125 472 // set threadID to zero, so we can check for concurrent threads
ivand_qmul@125 473 file->threadID = 0;
ivand_qmul@125 474
ivand_qmul@125 475 return -1;
ivand_qmul@125 476 }
ivand_qmul@125 477
ivand_qmul@125 478 int SDL_ffmpegDecodeThread(void* data) {
ivand_qmul@125 479 static struct SwsContext *img_convert_ctx;
ivand_qmul@125 480 // unpack the void pointer
ivand_qmul@125 481 SDL_ffmpegFile* file = (SDL_ffmpegFile*)data;
ivand_qmul@125 482
ivand_qmul@125 483 // flag this thread as active, used for stopping
ivand_qmul@125 484 file->threadActive = 1;
ivand_qmul@125 485
ivand_qmul@125 486 // create a packet for our data
ivand_qmul@125 487 AVPacket pack;
ivand_qmul@125 488
ivand_qmul@125 489 // reserve some pointers for use in loop
ivand_qmul@125 490 AVFrame *inFrame, *inFrameRGB;
lbarthelemy@170 491 uint8_t *inVideoBuffer = NULL;
ivand_qmul@125 492
ivand_qmul@125 493 // allocate a frame
ivand_qmul@125 494 inFrame = avcodec_alloc_frame();
ivand_qmul@125 495
ivand_qmul@125 496 // allocate another frame for unknown->RGB conversion
ivand_qmul@125 497 inFrameRGB = avcodec_alloc_frame();
ivand_qmul@125 498
ivand_qmul@125 499 if(SDL_ffmpegValidVideo(file)) {
ivand_qmul@125 500 // allocate buffer
lbarthelemy@170 501 inVideoBuffer = (uint8_t*)malloc( avpicture_get_size(file->vs[file->videoStream]->pixFmt,
ivand_qmul@125 502 file->vs[file->videoStream]->width,
ivand_qmul@125 503 file->vs[file->videoStream]->height) );
ivand_qmul@125 504
ivand_qmul@125 505 // put buffer into our reserved frame
ivand_qmul@125 506 avpicture_fill( (AVPicture*)inFrameRGB,
ivand_qmul@125 507 inVideoBuffer,
ivand_qmul@125 508 file->vs[file->videoStream]->pixFmt,
ivand_qmul@125 509 file->vs[file->videoStream]->width,
ivand_qmul@125 510 file->vs[file->videoStream]->height);
ivand_qmul@125 511 }
ivand_qmul@125 512
ivand_qmul@125 513 // allocate temporary audiobuffer
ivand_qmul@125 514 int16_t *samples = (int16_t*)malloc( AVCODEC_MAX_AUDIO_FRAME_SIZE );
ivand_qmul@125 515
ivand_qmul@125 516 // reserve integer for use in loop
ivand_qmul@125 517 int got_frame;
ivand_qmul@125 518
ivand_qmul@125 519 while(file->threadActive) {
ivand_qmul@125 520
ivand_qmul@125 521 // read a packet from the file
ivand_qmul@125 522 if(av_read_frame((AVFormatContext *)(file->_ffmpeg), &pack) < 0) {
ivand_qmul@125 523 // thread is idle
ivand_qmul@125 524 SDL_Delay(10);
ivand_qmul@125 525 continue;
ivand_qmul@125 526 }
ivand_qmul@125 527 if (file->skipAudio && pack.stream_index == file->as[file->audioStream]->id){
ivand_qmul@125 528 SDL_Delay(1);
ivand_qmul@125 529 continue;
ivand_qmul@125 530 }
ivand_qmul@125 531
ivand_qmul@125 532 // we got a packet, lets handle it
ivand_qmul@125 533
ivand_qmul@125 534 // let's start by entering the video semaphore
ivand_qmul@125 535 SDL_SemWait(file->decode);
ivand_qmul@125 536
ivand_qmul@125 537 // If it's a audio packet from our stream...
ivand_qmul@125 538 if( SDL_ffmpegValidAudio(file) && pack.stream_index == file->as[file->audioStream]->id && !file->skipAudio) {
ivand_qmul@125 539
ivand_qmul@125 540 uint8_t *data = pack.data;
ivand_qmul@125 541 int size = pack.size;
ivand_qmul@125 542 int len;
ivand_qmul@125 543
ivand_qmul@125 544 while(size > 0 && file->threadActive) {
ivand_qmul@125 545
ivand_qmul@125 546 // Decode the packet
ivand_qmul@125 547 len = avcodec_decode_audio((AVCodecContext *)(file->as[file->audioStream]->_ffmpeg), samples, &got_frame, data, size);
ivand_qmul@125 548
ivand_qmul@125 549 // if error, we skip the frame
ivand_qmul@125 550 if(len < 0 || !got_frame) {
ivand_qmul@125 551 size = 0;
ivand_qmul@125 552 break;
ivand_qmul@125 553 }
ivand_qmul@125 554
ivand_qmul@125 555 // change pointers
ivand_qmul@125 556 data += got_frame;
ivand_qmul@125 557 size -= got_frame;
ivand_qmul@125 558
ivand_qmul@125 559 // if the audiobuffer is full, the thread waits
ivand_qmul@125 560 while( file->as[file->audioStream]->size + got_frame > SDL_FFMPEG_MAX_BUFFERED_SAMPLES &&
ivand_qmul@125 561 file->threadActive) {
ivand_qmul@125 562 SDL_Delay(5);
ivand_qmul@125 563 }
ivand_qmul@125 564
ivand_qmul@125 565 // write an audiopts
ivand_qmul@125 566 int64_t audiopts = pack.pts * file->as[file->audioStream]->timeBase;
ivand_qmul@125 567
ivand_qmul@125 568 // is the audioBuffer is empty
ivand_qmul@125 569 if(!file->as[file->audioStream]->size) {
ivand_qmul@125 570
ivand_qmul@125 571 // we set a new pts
ivand_qmul@125 572 file->as[file->audioStream]->hardPts = file->as[file->audioStream]->pts = audiopts;
ivand_qmul@125 573
ivand_qmul@125 574 // we set totalbytes to zero, as this represents the amount
ivand_qmul@125 575 // of bytes that were played since our last 'hardPts'
ivand_qmul@125 576 file->as[file->audioStream]->totalBytes = 0;
ivand_qmul@125 577 }
ivand_qmul@125 578
ivand_qmul@125 579 // no need to store old samples
ivand_qmul@125 580 if(audiopts >= SDL_ffmpegGetPosition(file)) {
ivand_qmul@125 581
ivand_qmul@125 582 // enter audio semaphore
ivand_qmul@125 583 SDL_SemWait(file->as[file->audioStream]->sem);
ivand_qmul@125 584
ivand_qmul@125 585 // copy data from temporary buffer to streambuffer
ivand_qmul@125 586 memcpy(file->as[file->audioStream]->audio+file->as[file->audioStream]->size, samples, got_frame);
ivand_qmul@125 587
ivand_qmul@125 588 // set the new size of the audiobuffer
ivand_qmul@125 589 file->as[file->audioStream]->size += got_frame;
ivand_qmul@125 590
ivand_qmul@125 591 // we leave the audio semaphore
ivand_qmul@125 592 SDL_SemPost(file->as[file->audioStream]->sem);
ivand_qmul@125 593 }
ivand_qmul@125 594 }
ivand_qmul@125 595 }
ivand_qmul@125 596
ivand_qmul@125 597 // If it's a video packet from our video stream...
ivand_qmul@125 598 if( SDL_ffmpegValidVideo(file) && pack.stream_index == file->vs[file->videoStream]->id && !file->skipVideo) {
ivand_qmul@125 599
ivand_qmul@125 600 got_frame = 0;
ivand_qmul@129 601 //Time1=av_gettime();
ivand_qmul@125 602 // Decode the packet
ivand_qmul@125 603 avcodec_decode_video((AVCodecContext *)(file->vs[file->videoStream]->_ffmpeg), inFrame, &got_frame, pack.data, pack.size);
ivand_qmul@125 604
ivand_qmul@125 605 if(got_frame) {
ivand_qmul@125 606
ivand_qmul@125 607 // create imagebuffer
ivand_qmul@125 608 bufferImage *buf = (bufferImage*)malloc( sizeof(bufferImage) );
ivand_qmul@125 609
ivand_qmul@125 610 // write timestamp into the buffer
ivand_qmul@125 611 buf->timestamp = file->vs[file->videoStream]->timeBase * pack.dts;
ivand_qmul@125 612
ivand_qmul@125 613 // usefull when dealing with B frames
ivand_qmul@125 614 if(pack.dts == AV_NOPTS_VALUE) {
ivand_qmul@125 615 // if we did not get a valid timestamp, we make one up based on the last
ivand_qmul@125 616 // valid timestamp + the duration of a frame
ivand_qmul@125 617 buf->timestamp = file->vs[file->videoStream]->lastTimeStamp + file->vs[file->videoStream]->timeBase;
ivand_qmul@125 618 }
ivand_qmul@125 619
ivand_qmul@125 620 // if new timestamp is from future, we proceed
ivand_qmul@125 621 // if(buf->timestamp >= SDL_ffmpegGetPosition(file))
ivand_qmul@125 622 // {
benoitrigolleau@256 623 int w=zoomWivan;//(int)(zoomFivan*320+0.5);
benoitrigolleau@256 624 int h=zoomHivan;//(int)(zoomFivan*240+0.5);
lbarthelemy@170 625 //if ((w>file->vs[file->videoStream]->width)||(h>file->vs[file->videoStream]->height)){
lbarthelemy@170 626 // w=file->vs[file->videoStream]->width;
lbarthelemy@170 627 // h=file->vs[file->videoStream]->height;
lbarthelemy@170 628 //}
lbarthelemy@169 629 // Be sure we have a multiple of 4
lbarthelemy@169 630 w &= 0xFFFFFFFC;
lbarthelemy@169 631 h &= 0xFFFFFFFC;
ivand_qmul@150 632 if (img_convert_ctx == NULL) {
lbarthelemy@169 633
ivand_qmul@125 634 img_convert_ctx = sws_getContext(file->vs[file->videoStream]->width, file->vs[file->videoStream]->height,
ivand_qmul@125 635 ((AVCodecContext*)file->vs[file->videoStream]->_ffmpeg)->pix_fmt,
ivand_qmul@150 636 w,h,
ivand_qmul@125 637 file->vs[file->videoStream]->pixFmt,
lbarthelemy@169 638 SWS_FAST_BILINEAR, NULL, NULL, NULL);
ivand_qmul@125 639 if (img_convert_ctx == NULL) {
ivand_qmul@125 640 fprintf(stderr, "Cannot initialize the conversion context\n");
ivand_qmul@125 641 exit(1);
ivand_qmul@125 642 }
ivand_qmul@125 643 }
lbarthelemy@170 644
lbarthelemy@170 645 // check to see if buffer is at the same size than the screen
lbarthelemy@170 646 if (inFrameRGB->linesize[0]/3 != w ) {
lbarthelemy@170 647 av_free(inFrameRGB);
lbarthelemy@170 648 free(inVideoBuffer);
lbarthelemy@170 649 //avcodec_default_release_buffer(img_convert_ctx , inFrameRGB);
lbarthelemy@170 650 inFrameRGB = avcodec_alloc_frame();
lbarthelemy@170 651 // allocate buffer
lbarthelemy@170 652 inVideoBuffer = (uint8_t*)malloc( avpicture_get_size(file->vs[file->videoStream]->pixFmt,
lbarthelemy@170 653 w,
lbarthelemy@170 654 h) );
lbarthelemy@170 655
lbarthelemy@170 656 // put buffer into our reserved frame
lbarthelemy@170 657 avpicture_fill( (AVPicture*)inFrameRGB,
lbarthelemy@170 658 inVideoBuffer,
lbarthelemy@170 659 file->vs[file->videoStream]->pixFmt,
lbarthelemy@170 660 w,
lbarthelemy@170 661 h);
lbarthelemy@170 662
lbarthelemy@170 663 }
lbarthelemy@170 664
ivand_qmul@150 665 ((AVPicture*)inFrameRGB)->linesize[0]=(int)w*3;
lbarthelemy@170 666 sws_scale(img_convert_ctx,
lbarthelemy@170 667 ((AVPicture*)inFrame)->data, ((AVPicture*)inFrame)->linesize, 0, file->vs[file->videoStream]->height,
lbarthelemy@170 668 ((AVPicture*)inFrameRGB)->data, ((AVPicture*)inFrameRGB)->linesize);
lbarthelemy@170 669
lbarthelemy@170 670 sws_freeContext(img_convert_ctx);
lbarthelemy@170 671 img_convert_ctx=NULL;
lbarthelemy@170 672
lbarthelemy@170 673 // we convert whatever type of data we got to RGB24
ivand_qmul@125 674 /* img_convert((AVPicture*)inFrameRGB,
ivand_qmul@125 675 file->vs[file->videoStream]->pixFmt,
ivand_qmul@125 676 (AVPicture*)inFrame,
ivand_qmul@125 677 ((AVCodecContext*)file->vs[file->videoStream]->_ffmpeg)->pix_fmt,
ivand_qmul@125 678 file->vs[file->videoStream]->width,
ivand_qmul@125 679 file->vs[file->videoStream]->height);
ivand_qmul@125 680 */
lbarthelemy@170 681
ivand_qmul@125 682 // allocate image room
ivand_qmul@125 683 buf->img = SDL_CreateRGBSurface(SDL_SWSURFACE,
ivand_qmul@150 684 w,
ivand_qmul@150 685 h,
ivand_qmul@125 686 24, 0x0000FF, 0x00FF00, 0xFF0000, 0);
ivand_qmul@125 687 // copy image data to image room
ivand_qmul@125 688 memcpy(buf->img->pixels, inFrameRGB->data[0],
ivand_qmul@150 689 w*h* 3);
ivand_qmul@129 690 file->timebase=buf->timestamp-file->vs[file->videoStream]->lastTimeStamp;
ivand_qmul@125 691 // we write the lastTimestamp we got
ivand_qmul@125 692 file->vs[file->videoStream]->lastTimeStamp = buf->timestamp;
ivand_qmul@125 693
ivand_qmul@129 694 //int i;
ivand_qmul@125 695 int again = 1;
ivand_qmul@129 696 //Time=av_gettime()-Time1;
ivand_qmul@125 697
ivand_qmul@129 698 //fprintf (pFile, "%d \n",Time);
ivand_qmul@125 699 // keep trying to fit in buffer, until the data was actually placed in the buffer
ivand_qmul@125 700 while(again && file->threadActive) {
ivand_qmul@125 701
ivand_qmul@125 702 // we enter the video semaphore
ivand_qmul@125 703 SDL_SemWait(file->vs[file->videoStream]->sem);
ivand_qmul@125 704
ivand_qmul@125 705 // loop through all positions in buffer until an empty
ivand_qmul@125 706 // space was found
ivand_qmul@129 707 //for(i=0; i<SDL_FFMPEG_MAX_BUFFERED_FRAMES; i++) {
ivand_qmul@125 708 // if this place in the buffer is empty we write our new frame
ivand_qmul@129 709 if((file->vs[file->videoStream]->writeImage - file->vs[file->videoStream]->readImage) < SDL_FFMPEG_MAX_BUFFERED_FRAMES) {
ivand_qmul@129 710 file->vs[file->videoStream]->imageBuffer[file->vs[file->videoStream]->writeImage%SDL_FFMPEG_MAX_BUFFERED_FRAMES] = buf;
ivand_qmul@129 711 file->vs[file->videoStream]->writeImage++;
ivand_qmul@125 712 // we placed our image in the buffer, moving on
ivand_qmul@125 713 again = 0;
ivand_qmul@129 714
ivand_qmul@125 715 }
ivand_qmul@129 716 //}
ivand_qmul@125 717
ivand_qmul@125 718 // we leave the video semaphore
ivand_qmul@125 719 SDL_SemPost(file->vs[file->videoStream]->sem);
ivand_qmul@129 720
ivand_qmul@125 721 // frames aren't being release every ms, so we can take some
ivand_qmul@125 722 // time before we try and fit our new image again
ivand_qmul@129 723 if(again)
ivand_qmul@129 724 {
ivand_qmul@129 725 SDL_SemPost(file->decode);
ivand_qmul@129 726 SDL_Delay(3);
ivand_qmul@129 727 SDL_SemWait(file->decode);
ivand_qmul@129 728 }
ivand_qmul@125 729 }
ivand_qmul@125 730 // }
ivand_qmul@125 731 //else {
ivand_qmul@125 732 // // if our decoded frame was too old, we don't bother putting
ivand_qmul@125 733 // // it in our buffer
ivand_qmul@125 734 // free( buf );
ivand_qmul@125 735 // }
ivand_qmul@125 736 }
ivand_qmul@125 737 }
ivand_qmul@125 738 // we leave the decode semaphore
ivand_qmul@125 739 SDL_SemPost(file->decode);
ivand_qmul@125 740 if ((file->skipAudio)&&(file->delay))
ivand_qmul@125 741 SDL_Delay(3);
ivand_qmul@125 742 }
lbarthelemy@170 743
lbarthelemy@170 744 if (inVideoBuffer)
lbarthelemy@170 745 {
lbarthelemy@170 746 free(inVideoBuffer);
lbarthelemy@170 747 inVideoBuffer = NULL;
lbarthelemy@170 748 }
lbarthelemy@170 749
ivand_qmul@125 750 // if we stop this thread, we can release the packet we reserved
ivand_qmul@125 751 av_free_packet(&pack);
lbarthelemy@170 752 free(samples);
lbarthelemy@170 753 av_free(inFrameRGB);
lbarthelemy@170 754 av_free(inFrame);
ivand_qmul@125 755
ivand_qmul@125 756 return 0;
ivand_qmul@125 757 }
ivand_qmul@125 758
ivand_qmul@125 759 int SDL_ffmpegSeek(SDL_ffmpegFile* file, int64_t timestamp) {
ivand_qmul@125 760
ivand_qmul@125 761 // if the seekposition is out of bounds, return
ivand_qmul@125 762 if(timestamp >= SDL_ffmpegGetDuration(file)) return -1;
ivand_qmul@125 763
ivand_qmul@125 764 // start by flushing the buffers
ivand_qmul@125 765 SDL_ffmpegFlush(file);
ivand_qmul@125 766
ivand_qmul@125 767 // we enter the decode semaphore so the decode thread cannot be working on
ivand_qmul@125 768 // data we are trying to flush
ivand_qmul@125 769 SDL_SemWait(file->decode);
ivand_qmul@125 770
ivand_qmul@125 771 // if the stream has an offset, add it to the start time
ivand_qmul@125 772 int64_t startOffset = 0;
ivand_qmul@125 773 if(((AVFormatContext*)file->_ffmpeg)->start_time != AV_NOPTS_VALUE) {
ivand_qmul@125 774 // inFormatCtx->start_time is in AV_TIME_BASE fractional seconds
ivand_qmul@125 775 startOffset = ((AVFormatContext*)file->_ffmpeg)->start_time;
ivand_qmul@125 776 }
ivand_qmul@125 777 //if (file->skipAudio) startOffset=0;
ivand_qmul@125 778 // calculate the final timestamp for the seek action this is in AV_TIME_BASE fractional seconds
ivand_qmul@125 779 startOffset += (timestamp * AV_TIME_BASE) / 1000;
ivand_qmul@125 780
ivand_qmul@125 781 // do the actual seeking, AVSEEK_FLAG_BACKWARD means we jump to the point
ivand_qmul@125 782 // closest to the point we want, resulting in an earlier position if the jump
ivand_qmul@125 783 // could not go the the exaxt point we wanted
ivand_qmul@125 784 if(av_seek_frame((AVFormatContext *)(file->_ffmpeg), -1, startOffset, AVSEEK_FLAG_BACKWARD|AVSEEK_FLAG_ANY) >= 0) {
ivand_qmul@125 785 SDL_Delay(5);
ivand_qmul@125 786 // set some values in our file so we now were to start playing
ivand_qmul@125 787 file->offset = timestamp;
ivand_qmul@125 788 file->startTime = av_gettime()/1000;//SDL_GetTicks();
ivand_qmul@125 789
ivand_qmul@125 790 // if we have a valid video, we probably have some data we want to flush
ivand_qmul@125 791 if( SDL_ffmpegValidVideo(file) && !file->skipVideo) {
ivand_qmul@125 792
ivand_qmul@125 793 // flushing happens inside the semaphore as not to interfere with the
ivand_qmul@125 794 // decoding thread
ivand_qmul@125 795 SDL_SemWait(file->vs[file->videoStream]->sem);
ivand_qmul@125 796 avcodec_flush_buffers((AVCodecContext *)(file->vs[file->videoStream]->_ffmpeg));
ivand_qmul@125 797 SDL_SemPost(file->vs[file->videoStream]->sem);
ivand_qmul@125 798 }
ivand_qmul@125 799
ivand_qmul@125 800 // same goes for audio, if there is data, we flush is
ivand_qmul@125 801 if( SDL_ffmpegValidAudio(file)&& !file->skipAudio ) {
ivand_qmul@125 802
ivand_qmul@125 803 // make sure this is done thread-save, so inside the appropriate
ivand_qmul@125 804 // semaphore
ivand_qmul@125 805 SDL_SemWait(file->as[file->audioStream]->sem);
ivand_qmul@125 806 avcodec_flush_buffers((AVCodecContext *)(file->as[file->audioStream]->_ffmpeg));
ivand_qmul@125 807 SDL_SemPost(file->as[file->audioStream]->sem);
ivand_qmul@125 808 }
ivand_qmul@125 809
ivand_qmul@125 810 // then there is our flush call
ivand_qmul@125 811 SDL_ffmpegFlush(file);
ivand_qmul@125 812
ivand_qmul@125 813 // and we are done, lets release the decode semaphore so the decode
ivand_qmul@125 814 // thread can move on, filling buffer from our new position
ivand_qmul@125 815 SDL_SemPost(file->decode);
ivand_qmul@125 816
ivand_qmul@125 817 return 0;
ivand_qmul@125 818 }
ivand_qmul@125 819
ivand_qmul@125 820 // if, for some reason, we could not seek, we still should flush our buffers
ivand_qmul@125 821 SDL_ffmpegFlush(file);
ivand_qmul@125 822
ivand_qmul@125 823 // and release our lock on the decodethread
ivand_qmul@125 824 SDL_SemPost(file->decode);
ivand_qmul@125 825
ivand_qmul@125 826 return -1;
ivand_qmul@125 827 }
ivand_qmul@125 828
ivand_qmul@125 829 int SDL_ffmpegSeekRelative(SDL_ffmpegFile *file, int64_t timestamp) {
ivand_qmul@125 830
ivand_qmul@125 831 // same thing as normal seek, just take into account the current position
ivand_qmul@125 832 return SDL_ffmpegSeek(file, SDL_ffmpegGetPosition(file) + timestamp);
ivand_qmul@125 833 }
ivand_qmul@125 834
ivand_qmul@125 835 int SDL_ffmpegFlush(SDL_ffmpegFile *file) {
ivand_qmul@125 836
ivand_qmul@125 837 // if we have a valid audio stream, we flush is
ivand_qmul@125 838 if( SDL_ffmpegValidAudio(file)&& !file->skipAudio ) {
ivand_qmul@125 839
ivand_qmul@125 840 // flush audiobuffer from semaphore, be thread-safe!
ivand_qmul@125 841 SDL_SemWait(file->as[file->audioStream]->sem);
ivand_qmul@125 842
ivand_qmul@125 843 file->as[file->audioStream]->size = 0;
ivand_qmul@125 844
ivand_qmul@125 845 SDL_SemPost(file->as[file->audioStream]->sem);
ivand_qmul@125 846 }
ivand_qmul@125 847
ivand_qmul@125 848 // if we have a valid video stream, we flush some more
ivand_qmul@125 849 if( SDL_ffmpegValidVideo(file) && !file->skipVideo) {
ivand_qmul@125 850
ivand_qmul@125 851 // flush videobuffer
ivand_qmul@125 852 int i;
ivand_qmul@125 853
ivand_qmul@125 854 // again, be thread safe!
ivand_qmul@125 855 SDL_SemWait(file->vs[file->videoStream]->sem);
ivand_qmul@125 856
ivand_qmul@125 857 // make sure we delete all frames from buffer
ivand_qmul@125 858 for(i=0; i<SDL_FFMPEG_MAX_BUFFERED_FRAMES; i++) {
ivand_qmul@125 859
ivand_qmul@125 860 // if this entry does not exist, continue
ivand_qmul@125 861 if(!file->vs[file->videoStream]->imageBuffer[i]) continue;
ivand_qmul@125 862
ivand_qmul@125 863 // free the actual image data
ivand_qmul@125 864 SDL_FreeSurface( file->vs[file->videoStream]->imageBuffer[i]->img );
ivand_qmul@125 865
ivand_qmul@125 866 // and free the struct containing it
ivand_qmul@125 867 free( file->vs[file->videoStream]->imageBuffer[i] );
ivand_qmul@125 868
ivand_qmul@125 869 // set position in buffer to 0, so we know it is empty
ivand_qmul@125 870 file->vs[file->videoStream]->imageBuffer[i] = 0;
ivand_qmul@125 871 }
ivand_qmul@129 872 file->vs[file->videoStream]->writeImage=0;
ivand_qmul@129 873 file->vs[file->videoStream]->readImage=0;
ivand_qmul@125 874 SDL_SemPost(file->vs[file->videoStream]->sem);
ivand_qmul@125 875 }
ivand_qmul@125 876
ivand_qmul@125 877 return 0;
ivand_qmul@125 878 }
ivand_qmul@125 879
ivand_qmul@125 880 int8_t* SDL_ffmpegGetAudio(SDL_ffmpegFile *file, int *len) {
ivand_qmul@125 881
ivand_qmul@125 882 // no valid audio, means no audio to get
ivand_qmul@125 883 if( !SDL_ffmpegValidAudio(file) || file->pause||file->skipAudio ) return 0;
ivand_qmul@125 884
ivand_qmul@125 885 // working on audiobuffer should always be done from semaphore
ivand_qmul@125 886 SDL_SemWait(file->as[file->audioStream]->sem);
ivand_qmul@125 887
ivand_qmul@125 888 // if we ask for more audiodata than we can give, we sent wat we can
ivand_qmul@125 889 // actually give, writing the amount of bytes into len
ivand_qmul@125 890 if(*len > file->as[file->audioStream]->size) *len = file->as[file->audioStream]->size;
ivand_qmul@125 891
ivand_qmul@125 892 // decrease the size of our audiobuffer by len
ivand_qmul@125 893 file->as[file->audioStream]->size -= *len;
ivand_qmul@125 894
ivand_qmul@125 895 // len represents the nr of bytes we sent, so we increase the total
ivand_qmul@125 896 file->as[file->audioStream]->totalBytes += *len;
ivand_qmul@125 897
ivand_qmul@125 898 // the videooffset makes sure we are always in sync with the audio
ivand_qmul@125 899 // it is actually the difference between the position were we are in the
ivand_qmul@125 900 // stream (GetPosition) and were we should be (pts)
ivand_qmul@125 901 // we use the same offset when selecting the current videoframe
ivand_qmul@125 902 file->videoOffset = SDL_ffmpegGetPosition(file) - file->as[file->audioStream]->pts;
ivand_qmul@125 903
ivand_qmul@125 904 // we calculate the new pts for our audiodata based on the hardPts
ivand_qmul@125 905 // (that is the one we got from ffmpeg) and than calculating how for we
ivand_qmul@125 906 // have come since
ivand_qmul@125 907 file->as[file->audioStream]->pts = file->as[file->audioStream]->hardPts;
ivand_qmul@125 908 // since we use 16bit per sample, we devide totalbytes by 2 before deviding by samplerate
ivand_qmul@125 909 file->as[file->audioStream]->pts += ((double)file->as[file->audioStream]->totalBytes / (2 * file->as[file->audioStream]->channels)) / (file->as[file->audioStream]->sampleRate / 1000.0);
ivand_qmul@125 910
ivand_qmul@125 911 // we return the audiobuffer, notice we are still in the audiosemaphore!
ivand_qmul@125 912 // we only leave this by calling SDL_ffmpegReleaseAudio
ivand_qmul@125 913 return file->as[file->audioStream]->audio;
ivand_qmul@125 914 }
ivand_qmul@125 915
ivand_qmul@125 916 int SDL_ffmpegReleaseAudio(SDL_ffmpegFile *file, int len) {
ivand_qmul@125 917
ivand_qmul@125 918 // no audio, means no releasing
ivand_qmul@125 919 if( !SDL_ffmpegValidAudio(file) || file->skipAudio) return -1;
ivand_qmul@125 920
ivand_qmul@125 921 // this call should be paired with SDL_ffmpegGetAudio, as it provides us
ivand_qmul@125 922 // with the correct length so we move the correct amount of data
ivand_qmul@125 923 memmove( file->as[file->audioStream]->audio,
ivand_qmul@125 924 file->as[file->audioStream]->audio+len,
ivand_qmul@125 925 file->as[file->audioStream]->size );
ivand_qmul@125 926
ivand_qmul@125 927 // work on audiodata is done, so we release the semaphore
ivand_qmul@125 928 SDL_SemPost(file->as[file->audioStream]->sem);
ivand_qmul@125 929
ivand_qmul@125 930 return 0;
ivand_qmul@125 931 }
ivand_qmul@125 932
ivand_qmul@125 933 int64_t SDL_ffmpegGetPosition(SDL_ffmpegFile *file) {
ivand_qmul@125 934 //MainWindow * MWinsA=MainWindow::instance();
ivand_qmul@125 935
ivand_qmul@125 936 if (file->skipAudio){
ivand_qmul@125 937 return (av_gettime()/1000+ file->offset - file->startTime);
ivand_qmul@125 938 //int64_t pos=MWinsA->Get_CurAudioTime();
ivand_qmul@125 939 //return (pos + ((AVFormatContext*)file->_ffmpeg)->start_time/1000);//SDL_GetTicks();
ivand_qmul@125 940 }
ivand_qmul@125 941 else
ivand_qmul@125 942 return (av_gettime()/1000+ file->offset - file->startTime);//SDL_GetTicks();
ivand_qmul@125 943 // return the current playposition of our file
ivand_qmul@125 944
ivand_qmul@125 945 }
ivand_qmul@125 946
ivand_qmul@125 947 SDL_AudioSpec* SDL_ffmpegGetAudioSpec(SDL_ffmpegFile *file, int samples, void *callback) {
ivand_qmul@125 948
ivand_qmul@125 949 // create audio spec
ivand_qmul@125 950 SDL_AudioSpec *spec = (SDL_AudioSpec*)malloc( sizeof(SDL_AudioSpec) );
ivand_qmul@125 951
ivand_qmul@125 952 if(spec) {
ivand_qmul@125 953 spec->format = AUDIO_S16SYS;
ivand_qmul@125 954 spec->samples = samples;
ivand_qmul@125 955 spec->userdata = file;
ivand_qmul@125 956 spec->callback = (void (__cdecl *)(void *,Uint8 *,int))(callback);
ivand_qmul@125 957 spec->freq = 48000;
ivand_qmul@125 958 spec->channels = 2;
ivand_qmul@125 959
ivand_qmul@125 960 // if we have a valid audiofile, we can use its data to create a
ivand_qmul@125 961 // more appropriate audio spec
ivand_qmul@125 962 if( SDL_ffmpegValidAudio(file) && !file->skipAudio ) {
ivand_qmul@125 963 spec->freq = file->as[file->audioStream]->sampleRate;
ivand_qmul@125 964 spec->channels = file->as[file->audioStream]->channels;
ivand_qmul@125 965 }
ivand_qmul@125 966 }
ivand_qmul@125 967
ivand_qmul@125 968 return spec;
ivand_qmul@125 969 }
ivand_qmul@125 970
ivand_qmul@125 971 int64_t SDL_ffmpegGetDuration(SDL_ffmpegFile *file) {
ivand_qmul@125 972
ivand_qmul@125 973 // returns the duration of the entire file, please note that ffmpeg doesn't
ivand_qmul@125 974 // always get this value right! so don't bet your life on it...
ivand_qmul@125 975 return ((AVFormatContext*)file->_ffmpeg)->duration / (AV_TIME_BASE / 1000);
ivand_qmul@125 976 }
ivand_qmul@125 977
ivand_qmul@125 978 int SDL_ffmpegGetVideoSize(SDL_ffmpegFile *file, int *w, int *h) {
ivand_qmul@125 979
ivand_qmul@125 980 if(!w || !h) return -1;
ivand_qmul@125 981
ivand_qmul@125 982 // if we have a valid video file selected, we use it
ivand_qmul@125 983 // if not, we send default values and return.
ivand_qmul@125 984 // by checking the return value you can check if you got a valid size
ivand_qmul@125 985 if( SDL_ffmpegValidVideo(file) && !file->skipVideo) {
ivand_qmul@125 986 *w = file->vs[file->videoStream]->width;
ivand_qmul@125 987 *h = file->vs[file->videoStream]->height;
ivand_qmul@125 988 return 0;
ivand_qmul@125 989 }
ivand_qmul@125 990
ivand_qmul@125 991 *w = 320;
ivand_qmul@125 992 *h = 240;
ivand_qmul@125 993 return -1;
ivand_qmul@125 994 }
ivand_qmul@125 995
ivand_qmul@125 996 int SDL_ffmpegValidAudio(SDL_ffmpegFile* file) {
ivand_qmul@125 997
ivand_qmul@125 998 // this function is used to check if we selected a valid audio stream
ivand_qmul@125 999 if(file->audioStream < 0 || file->audioStream >= file->AStreams) return 0;
ivand_qmul@125 1000
ivand_qmul@125 1001 return 1;
ivand_qmul@125 1002 }
ivand_qmul@125 1003
ivand_qmul@125 1004 int SDL_ffmpegValidVideo(SDL_ffmpegFile* file) {
ivand_qmul@125 1005
ivand_qmul@125 1006 // this function is used to check if we selected a valid video stream
ivand_qmul@125 1007 if(file->videoStream < 0 || file->videoStream >= file->VStreams) return 0;
ivand_qmul@125 1008
ivand_qmul@125 1009 return 1;
ivand_qmul@125 1010 }
ivand_qmul@125 1011
ivand_qmul@125 1012 int SDL_ffmpegPause(SDL_ffmpegFile *file, int state) {
ivand_qmul@125 1013
ivand_qmul@125 1014 // by putting 0 into state, we play the file
ivand_qmul@125 1015 // this behaviour is analogue to SDL audio
ivand_qmul@125 1016 file->pause = state;
ivand_qmul@125 1017
ivand_qmul@125 1018 if(!file->pause) {
ivand_qmul@125 1019 file->startTime = av_gettime()/1000;//SDL_GetTicks();
ivand_qmul@125 1020 }
ivand_qmul@125 1021
ivand_qmul@125 1022 return 0;
ivand_qmul@125 1023 }
ivand_qmul@125 1024
ivand_qmul@125 1025 int SDL_ffmpegGetState(SDL_ffmpegFile *file) {
ivand_qmul@125 1026 return file->pause;
ivand_qmul@125 1027 }