annotate ffmpeg/libavcodec/crystalhd.c @ 13:844d341cf643 tip

Back up before ISMIR
author Yading Song <yading.song@eecs.qmul.ac.uk>
date Thu, 31 Oct 2013 13:17:06 +0000
parents 6840f77b83aa
children
rev   line source
yading@10 1 /*
yading@10 2 * - CrystalHD decoder module -
yading@10 3 *
yading@10 4 * Copyright(C) 2010,2011 Philip Langdale <ffmpeg.philipl@overt.org>
yading@10 5 *
yading@10 6 * This file is part of FFmpeg.
yading@10 7 *
yading@10 8 * FFmpeg is free software; you can redistribute it and/or
yading@10 9 * modify it under the terms of the GNU Lesser General Public
yading@10 10 * License as published by the Free Software Foundation; either
yading@10 11 * version 2.1 of the License, or (at your option) any later version.
yading@10 12 *
yading@10 13 * FFmpeg is distributed in the hope that it will be useful,
yading@10 14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
yading@10 15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
yading@10 16 * Lesser General Public License for more details.
yading@10 17 *
yading@10 18 * You should have received a copy of the GNU Lesser General Public
yading@10 19 * License along with FFmpeg; if not, write to the Free Software
yading@10 20 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
yading@10 21 */
yading@10 22
yading@10 23 /*
yading@10 24 * - Principles of Operation -
yading@10 25 *
yading@10 26 * The CrystalHD decoder operates at the bitstream level - which is an even
yading@10 27 * higher level than the decoding hardware you typically see in modern GPUs.
yading@10 28 * This means it has a very simple interface, in principle. You feed demuxed
yading@10 29 * packets in one end and get decoded picture (fields/frames) out the other.
yading@10 30 *
yading@10 31 * Of course, nothing is ever that simple. Due, at the very least, to b-frame
yading@10 32 * dependencies in the supported formats, the hardware has a delay between
yading@10 33 * when a packet goes in, and when a picture comes out. Furthermore, this delay
yading@10 34 * is not just a function of time, but also one of the dependency on additional
yading@10 35 * frames being fed into the decoder to satisfy the b-frame dependencies.
yading@10 36 *
yading@10 37 * As such, a pipeline will build up that is roughly equivalent to the required
yading@10 38 * DPB for the file being played. If that was all it took, things would still
yading@10 39 * be simple - so, of course, it isn't.
yading@10 40 *
yading@10 41 * The hardware has a way of indicating that a picture is ready to be copied out,
yading@10 42 * but this is unreliable - and sometimes the attempt will still fail so, based
yading@10 43 * on testing, the code will wait until 3 pictures are ready before starting
yading@10 44 * to copy out - and this has the effect of extending the pipeline.
yading@10 45 *
yading@10 46 * Finally, while it is tempting to say that once the decoder starts outputting
yading@10 47 * frames, the software should never fail to return a frame from a decode(),
yading@10 48 * this is a hard assertion to make, because the stream may switch between
yading@10 49 * differently encoded content (number of b-frames, interlacing, etc) which
yading@10 50 * might require a longer pipeline than before. If that happened, you could
yading@10 51 * deadlock trying to retrieve a frame that can't be decoded without feeding
yading@10 52 * in additional packets.
yading@10 53 *
yading@10 54 * As such, the code will return in the event that a picture cannot be copied
yading@10 55 * out, leading to an increase in the length of the pipeline. This in turn,
yading@10 56 * means we have to be sensitive to the time it takes to decode a picture;
yading@10 57 * We do not want to give up just because the hardware needed a little more
yading@10 58 * time to prepare the picture! For this reason, there are delays included
yading@10 59 * in the decode() path that ensure that, under normal conditions, the hardware
yading@10 60 * will only fail to return a frame if it really needs additional packets to
yading@10 61 * complete the decoding.
yading@10 62 *
yading@10 63 * Finally, to be explicit, we do not want the pipeline to grow without bound
yading@10 64 * for two reasons: 1) The hardware can only buffer a finite number of packets,
yading@10 65 * and 2) The client application may not be able to cope with arbitrarily long
yading@10 66 * delays in the video path relative to the audio path. For example. MPlayer
yading@10 67 * can only handle a 20 picture delay (although this is arbitrary, and needs
yading@10 68 * to be extended to fully support the CrystalHD where the delay could be up
yading@10 69 * to 32 pictures - consider PAFF H.264 content with 16 b-frames).
yading@10 70 */
yading@10 71
yading@10 72 /*****************************************************************************
yading@10 73 * Includes
yading@10 74 ****************************************************************************/
yading@10 75
yading@10 76 #define _XOPEN_SOURCE 600
yading@10 77 #include <inttypes.h>
yading@10 78 #include <stdio.h>
yading@10 79 #include <stdlib.h>
yading@10 80 #include <unistd.h>
yading@10 81
yading@10 82 #include <libcrystalhd/bc_dts_types.h>
yading@10 83 #include <libcrystalhd/bc_dts_defs.h>
yading@10 84 #include <libcrystalhd/libcrystalhd_if.h>
yading@10 85
yading@10 86 #include "avcodec.h"
yading@10 87 #include "h264.h"
yading@10 88 #include "internal.h"
yading@10 89 #include "libavutil/imgutils.h"
yading@10 90 #include "libavutil/intreadwrite.h"
yading@10 91 #include "libavutil/opt.h"
yading@10 92
yading@10 93 /** Timeout parameter passed to DtsProcOutput() in us */
yading@10 94 #define OUTPUT_PROC_TIMEOUT 50
yading@10 95 /** Step between fake timestamps passed to hardware in units of 100ns */
yading@10 96 #define TIMESTAMP_UNIT 100000
yading@10 97 /** Initial value in us of the wait in decode() */
yading@10 98 #define BASE_WAIT 10000
yading@10 99 /** Increment in us to adjust wait in decode() */
yading@10 100 #define WAIT_UNIT 1000
yading@10 101
yading@10 102
yading@10 103 /*****************************************************************************
yading@10 104 * Module private data
yading@10 105 ****************************************************************************/
yading@10 106
yading@10 107 typedef enum {
yading@10 108 RET_ERROR = -1,
yading@10 109 RET_OK = 0,
yading@10 110 RET_COPY_AGAIN = 1,
yading@10 111 RET_SKIP_NEXT_COPY = 2,
yading@10 112 RET_COPY_NEXT_FIELD = 3,
yading@10 113 } CopyRet;
yading@10 114
yading@10 115 typedef struct OpaqueList {
yading@10 116 struct OpaqueList *next;
yading@10 117 uint64_t fake_timestamp;
yading@10 118 uint64_t reordered_opaque;
yading@10 119 uint8_t pic_type;
yading@10 120 } OpaqueList;
yading@10 121
yading@10 122 typedef struct {
yading@10 123 AVClass *av_class;
yading@10 124 AVCodecContext *avctx;
yading@10 125 AVFrame *pic;
yading@10 126 HANDLE dev;
yading@10 127
yading@10 128 uint8_t *orig_extradata;
yading@10 129 uint32_t orig_extradata_size;
yading@10 130
yading@10 131 AVBitStreamFilterContext *bsfc;
yading@10 132 AVCodecParserContext *parser;
yading@10 133
yading@10 134 uint8_t is_70012;
yading@10 135 uint8_t *sps_pps_buf;
yading@10 136 uint32_t sps_pps_size;
yading@10 137 uint8_t is_nal;
yading@10 138 uint8_t output_ready;
yading@10 139 uint8_t need_second_field;
yading@10 140 uint8_t skip_next_output;
yading@10 141 uint64_t decode_wait;
yading@10 142
yading@10 143 uint64_t last_picture;
yading@10 144
yading@10 145 OpaqueList *head;
yading@10 146 OpaqueList *tail;
yading@10 147
yading@10 148 /* Options */
yading@10 149 uint32_t sWidth;
yading@10 150 uint8_t bframe_bug;
yading@10 151 } CHDContext;
yading@10 152
yading@10 153 static const AVOption options[] = {
yading@10 154 { "crystalhd_downscale_width",
yading@10 155 "Turn on downscaling to the specified width",
yading@10 156 offsetof(CHDContext, sWidth),
yading@10 157 AV_OPT_TYPE_INT, {.i64 = 0}, 0, UINT32_MAX,
yading@10 158 AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_DECODING_PARAM, },
yading@10 159 { NULL, },
yading@10 160 };
yading@10 161
yading@10 162
yading@10 163 /*****************************************************************************
yading@10 164 * Helper functions
yading@10 165 ****************************************************************************/
yading@10 166
yading@10 167 static inline BC_MEDIA_SUBTYPE id2subtype(CHDContext *priv, enum AVCodecID id)
yading@10 168 {
yading@10 169 switch (id) {
yading@10 170 case AV_CODEC_ID_MPEG4:
yading@10 171 return BC_MSUBTYPE_DIVX;
yading@10 172 case AV_CODEC_ID_MSMPEG4V3:
yading@10 173 return BC_MSUBTYPE_DIVX311;
yading@10 174 case AV_CODEC_ID_MPEG2VIDEO:
yading@10 175 return BC_MSUBTYPE_MPEG2VIDEO;
yading@10 176 case AV_CODEC_ID_VC1:
yading@10 177 return BC_MSUBTYPE_VC1;
yading@10 178 case AV_CODEC_ID_WMV3:
yading@10 179 return BC_MSUBTYPE_WMV3;
yading@10 180 case AV_CODEC_ID_H264:
yading@10 181 return priv->is_nal ? BC_MSUBTYPE_AVC1 : BC_MSUBTYPE_H264;
yading@10 182 default:
yading@10 183 return BC_MSUBTYPE_INVALID;
yading@10 184 }
yading@10 185 }
yading@10 186
yading@10 187 static inline void print_frame_info(CHDContext *priv, BC_DTS_PROC_OUT *output)
yading@10 188 {
yading@10 189 av_log(priv->avctx, AV_LOG_VERBOSE, "\tYBuffSz: %u\n", output->YbuffSz);
yading@10 190 av_log(priv->avctx, AV_LOG_VERBOSE, "\tYBuffDoneSz: %u\n",
yading@10 191 output->YBuffDoneSz);
yading@10 192 av_log(priv->avctx, AV_LOG_VERBOSE, "\tUVBuffDoneSz: %u\n",
yading@10 193 output->UVBuffDoneSz);
yading@10 194 av_log(priv->avctx, AV_LOG_VERBOSE, "\tTimestamp: %"PRIu64"\n",
yading@10 195 output->PicInfo.timeStamp);
yading@10 196 av_log(priv->avctx, AV_LOG_VERBOSE, "\tPicture Number: %u\n",
yading@10 197 output->PicInfo.picture_number);
yading@10 198 av_log(priv->avctx, AV_LOG_VERBOSE, "\tWidth: %u\n",
yading@10 199 output->PicInfo.width);
yading@10 200 av_log(priv->avctx, AV_LOG_VERBOSE, "\tHeight: %u\n",
yading@10 201 output->PicInfo.height);
yading@10 202 av_log(priv->avctx, AV_LOG_VERBOSE, "\tChroma: 0x%03x\n",
yading@10 203 output->PicInfo.chroma_format);
yading@10 204 av_log(priv->avctx, AV_LOG_VERBOSE, "\tPulldown: %u\n",
yading@10 205 output->PicInfo.pulldown);
yading@10 206 av_log(priv->avctx, AV_LOG_VERBOSE, "\tFlags: 0x%08x\n",
yading@10 207 output->PicInfo.flags);
yading@10 208 av_log(priv->avctx, AV_LOG_VERBOSE, "\tFrame Rate/Res: %u\n",
yading@10 209 output->PicInfo.frame_rate);
yading@10 210 av_log(priv->avctx, AV_LOG_VERBOSE, "\tAspect Ratio: %u\n",
yading@10 211 output->PicInfo.aspect_ratio);
yading@10 212 av_log(priv->avctx, AV_LOG_VERBOSE, "\tColor Primaries: %u\n",
yading@10 213 output->PicInfo.colour_primaries);
yading@10 214 av_log(priv->avctx, AV_LOG_VERBOSE, "\tMetaData: %u\n",
yading@10 215 output->PicInfo.picture_meta_payload);
yading@10 216 av_log(priv->avctx, AV_LOG_VERBOSE, "\tSession Number: %u\n",
yading@10 217 output->PicInfo.sess_num);
yading@10 218 av_log(priv->avctx, AV_LOG_VERBOSE, "\tycom: %u\n",
yading@10 219 output->PicInfo.ycom);
yading@10 220 av_log(priv->avctx, AV_LOG_VERBOSE, "\tCustom Aspect: %u\n",
yading@10 221 output->PicInfo.custom_aspect_ratio_width_height);
yading@10 222 av_log(priv->avctx, AV_LOG_VERBOSE, "\tFrames to Drop: %u\n",
yading@10 223 output->PicInfo.n_drop);
yading@10 224 av_log(priv->avctx, AV_LOG_VERBOSE, "\tH264 Valid Fields: 0x%08x\n",
yading@10 225 output->PicInfo.other.h264.valid);
yading@10 226 }
yading@10 227
yading@10 228
yading@10 229 /*****************************************************************************
yading@10 230 * OpaqueList functions
yading@10 231 ****************************************************************************/
yading@10 232
yading@10 233 static uint64_t opaque_list_push(CHDContext *priv, uint64_t reordered_opaque,
yading@10 234 uint8_t pic_type)
yading@10 235 {
yading@10 236 OpaqueList *newNode = av_mallocz(sizeof (OpaqueList));
yading@10 237 if (!newNode) {
yading@10 238 av_log(priv->avctx, AV_LOG_ERROR,
yading@10 239 "Unable to allocate new node in OpaqueList.\n");
yading@10 240 return 0;
yading@10 241 }
yading@10 242 if (!priv->head) {
yading@10 243 newNode->fake_timestamp = TIMESTAMP_UNIT;
yading@10 244 priv->head = newNode;
yading@10 245 } else {
yading@10 246 newNode->fake_timestamp = priv->tail->fake_timestamp + TIMESTAMP_UNIT;
yading@10 247 priv->tail->next = newNode;
yading@10 248 }
yading@10 249 priv->tail = newNode;
yading@10 250 newNode->reordered_opaque = reordered_opaque;
yading@10 251 newNode->pic_type = pic_type;
yading@10 252
yading@10 253 return newNode->fake_timestamp;
yading@10 254 }
yading@10 255
yading@10 256 /*
yading@10 257 * The OpaqueList is built in decode order, while elements will be removed
yading@10 258 * in presentation order. If frames are reordered, this means we must be
yading@10 259 * able to remove elements that are not the first element.
yading@10 260 *
yading@10 261 * Returned node must be freed by caller.
yading@10 262 */
yading@10 263 static OpaqueList *opaque_list_pop(CHDContext *priv, uint64_t fake_timestamp)
yading@10 264 {
yading@10 265 OpaqueList *node = priv->head;
yading@10 266
yading@10 267 if (!priv->head) {
yading@10 268 av_log(priv->avctx, AV_LOG_ERROR,
yading@10 269 "CrystalHD: Attempted to query non-existent timestamps.\n");
yading@10 270 return NULL;
yading@10 271 }
yading@10 272
yading@10 273 /*
yading@10 274 * The first element is special-cased because we have to manipulate
yading@10 275 * the head pointer rather than the previous element in the list.
yading@10 276 */
yading@10 277 if (priv->head->fake_timestamp == fake_timestamp) {
yading@10 278 priv->head = node->next;
yading@10 279
yading@10 280 if (!priv->head->next)
yading@10 281 priv->tail = priv->head;
yading@10 282
yading@10 283 node->next = NULL;
yading@10 284 return node;
yading@10 285 }
yading@10 286
yading@10 287 /*
yading@10 288 * The list is processed at arm's length so that we have the
yading@10 289 * previous element available to rewrite its next pointer.
yading@10 290 */
yading@10 291 while (node->next) {
yading@10 292 OpaqueList *current = node->next;
yading@10 293 if (current->fake_timestamp == fake_timestamp) {
yading@10 294 node->next = current->next;
yading@10 295
yading@10 296 if (!node->next)
yading@10 297 priv->tail = node;
yading@10 298
yading@10 299 current->next = NULL;
yading@10 300 return current;
yading@10 301 } else {
yading@10 302 node = current;
yading@10 303 }
yading@10 304 }
yading@10 305
yading@10 306 av_log(priv->avctx, AV_LOG_VERBOSE,
yading@10 307 "CrystalHD: Couldn't match fake_timestamp.\n");
yading@10 308 return NULL;
yading@10 309 }
yading@10 310
yading@10 311
yading@10 312 /*****************************************************************************
yading@10 313 * Video decoder API function definitions
yading@10 314 ****************************************************************************/
yading@10 315
yading@10 316 static void flush(AVCodecContext *avctx)
yading@10 317 {
yading@10 318 CHDContext *priv = avctx->priv_data;
yading@10 319
yading@10 320 avctx->has_b_frames = 0;
yading@10 321 priv->last_picture = -1;
yading@10 322 priv->output_ready = 0;
yading@10 323 priv->need_second_field = 0;
yading@10 324 priv->skip_next_output = 0;
yading@10 325 priv->decode_wait = BASE_WAIT;
yading@10 326
yading@10 327 av_frame_unref (priv->pic);
yading@10 328
yading@10 329 /* Flush mode 4 flushes all software and hardware buffers. */
yading@10 330 DtsFlushInput(priv->dev, 4);
yading@10 331 }
yading@10 332
yading@10 333
yading@10 334 static av_cold int uninit(AVCodecContext *avctx)
yading@10 335 {
yading@10 336 CHDContext *priv = avctx->priv_data;
yading@10 337 HANDLE device;
yading@10 338
yading@10 339 device = priv->dev;
yading@10 340 DtsStopDecoder(device);
yading@10 341 DtsCloseDecoder(device);
yading@10 342 DtsDeviceClose(device);
yading@10 343
yading@10 344 /*
yading@10 345 * Restore original extradata, so that if the decoder is
yading@10 346 * reinitialised, the bitstream detection and filtering
yading@10 347 * will work as expected.
yading@10 348 */
yading@10 349 if (priv->orig_extradata) {
yading@10 350 av_free(avctx->extradata);
yading@10 351 avctx->extradata = priv->orig_extradata;
yading@10 352 avctx->extradata_size = priv->orig_extradata_size;
yading@10 353 priv->orig_extradata = NULL;
yading@10 354 priv->orig_extradata_size = 0;
yading@10 355 }
yading@10 356
yading@10 357 av_parser_close(priv->parser);
yading@10 358 if (priv->bsfc) {
yading@10 359 av_bitstream_filter_close(priv->bsfc);
yading@10 360 }
yading@10 361
yading@10 362 av_free(priv->sps_pps_buf);
yading@10 363
yading@10 364 av_frame_free (&priv->pic);
yading@10 365
yading@10 366 if (priv->head) {
yading@10 367 OpaqueList *node = priv->head;
yading@10 368 while (node) {
yading@10 369 OpaqueList *next = node->next;
yading@10 370 av_free(node);
yading@10 371 node = next;
yading@10 372 }
yading@10 373 }
yading@10 374
yading@10 375 return 0;
yading@10 376 }
yading@10 377
yading@10 378
yading@10 379 static av_cold int init(AVCodecContext *avctx)
yading@10 380 {
yading@10 381 CHDContext* priv;
yading@10 382 BC_STATUS ret;
yading@10 383 BC_INFO_CRYSTAL version;
yading@10 384 BC_INPUT_FORMAT format = {
yading@10 385 .FGTEnable = FALSE,
yading@10 386 .Progressive = TRUE,
yading@10 387 .OptFlags = 0x80000000 | vdecFrameRate59_94 | 0x40,
yading@10 388 .width = avctx->width,
yading@10 389 .height = avctx->height,
yading@10 390 };
yading@10 391
yading@10 392 BC_MEDIA_SUBTYPE subtype;
yading@10 393
yading@10 394 uint32_t mode = DTS_PLAYBACK_MODE |
yading@10 395 DTS_LOAD_FILE_PLAY_FW |
yading@10 396 DTS_SKIP_TX_CHK_CPB |
yading@10 397 DTS_PLAYBACK_DROP_RPT_MODE |
yading@10 398 DTS_SINGLE_THREADED_MODE |
yading@10 399 DTS_DFLT_RESOLUTION(vdecRESOLUTION_1080p23_976);
yading@10 400
yading@10 401 av_log(avctx, AV_LOG_VERBOSE, "CrystalHD Init for %s\n",
yading@10 402 avctx->codec->name);
yading@10 403
yading@10 404 avctx->pix_fmt = AV_PIX_FMT_YUYV422;
yading@10 405
yading@10 406 /* Initialize the library */
yading@10 407 priv = avctx->priv_data;
yading@10 408 priv->avctx = avctx;
yading@10 409 priv->is_nal = avctx->extradata_size > 0 && *(avctx->extradata) == 1;
yading@10 410 priv->last_picture = -1;
yading@10 411 priv->decode_wait = BASE_WAIT;
yading@10 412 priv->pic = av_frame_alloc();
yading@10 413
yading@10 414 subtype = id2subtype(priv, avctx->codec->id);
yading@10 415 switch (subtype) {
yading@10 416 case BC_MSUBTYPE_AVC1:
yading@10 417 {
yading@10 418 uint8_t *dummy_p;
yading@10 419 int dummy_int;
yading@10 420
yading@10 421 /* Back up the extradata so it can be restored at close time. */
yading@10 422 priv->orig_extradata = av_malloc(avctx->extradata_size);
yading@10 423 if (!priv->orig_extradata) {
yading@10 424 av_log(avctx, AV_LOG_ERROR,
yading@10 425 "Failed to allocate copy of extradata\n");
yading@10 426 return AVERROR(ENOMEM);
yading@10 427 }
yading@10 428 priv->orig_extradata_size = avctx->extradata_size;
yading@10 429 memcpy(priv->orig_extradata, avctx->extradata, avctx->extradata_size);
yading@10 430
yading@10 431 priv->bsfc = av_bitstream_filter_init("h264_mp4toannexb");
yading@10 432 if (!priv->bsfc) {
yading@10 433 av_log(avctx, AV_LOG_ERROR,
yading@10 434 "Cannot open the h264_mp4toannexb BSF!\n");
yading@10 435 return AVERROR_BSF_NOT_FOUND;
yading@10 436 }
yading@10 437 av_bitstream_filter_filter(priv->bsfc, avctx, NULL, &dummy_p,
yading@10 438 &dummy_int, NULL, 0, 0);
yading@10 439 }
yading@10 440 subtype = BC_MSUBTYPE_H264;
yading@10 441 // Fall-through
yading@10 442 case BC_MSUBTYPE_H264:
yading@10 443 format.startCodeSz = 4;
yading@10 444 // Fall-through
yading@10 445 case BC_MSUBTYPE_VC1:
yading@10 446 case BC_MSUBTYPE_WVC1:
yading@10 447 case BC_MSUBTYPE_WMV3:
yading@10 448 case BC_MSUBTYPE_WMVA:
yading@10 449 case BC_MSUBTYPE_MPEG2VIDEO:
yading@10 450 case BC_MSUBTYPE_DIVX:
yading@10 451 case BC_MSUBTYPE_DIVX311:
yading@10 452 format.pMetaData = avctx->extradata;
yading@10 453 format.metaDataSz = avctx->extradata_size;
yading@10 454 break;
yading@10 455 default:
yading@10 456 av_log(avctx, AV_LOG_ERROR, "CrystalHD: Unknown codec name\n");
yading@10 457 return AVERROR(EINVAL);
yading@10 458 }
yading@10 459 format.mSubtype = subtype;
yading@10 460
yading@10 461 if (priv->sWidth) {
yading@10 462 format.bEnableScaling = 1;
yading@10 463 format.ScalingParams.sWidth = priv->sWidth;
yading@10 464 }
yading@10 465
yading@10 466 /* Get a decoder instance */
yading@10 467 av_log(avctx, AV_LOG_VERBOSE, "CrystalHD: starting up\n");
yading@10 468 // Initialize the Link and Decoder devices
yading@10 469 ret = DtsDeviceOpen(&priv->dev, mode);
yading@10 470 if (ret != BC_STS_SUCCESS) {
yading@10 471 av_log(avctx, AV_LOG_VERBOSE, "CrystalHD: DtsDeviceOpen failed\n");
yading@10 472 goto fail;
yading@10 473 }
yading@10 474
yading@10 475 ret = DtsCrystalHDVersion(priv->dev, &version);
yading@10 476 if (ret != BC_STS_SUCCESS) {
yading@10 477 av_log(avctx, AV_LOG_VERBOSE,
yading@10 478 "CrystalHD: DtsCrystalHDVersion failed\n");
yading@10 479 goto fail;
yading@10 480 }
yading@10 481 priv->is_70012 = version.device == 0;
yading@10 482
yading@10 483 if (priv->is_70012 &&
yading@10 484 (subtype == BC_MSUBTYPE_DIVX || subtype == BC_MSUBTYPE_DIVX311)) {
yading@10 485 av_log(avctx, AV_LOG_VERBOSE,
yading@10 486 "CrystalHD: BCM70012 doesn't support MPEG4-ASP/DivX/Xvid\n");
yading@10 487 goto fail;
yading@10 488 }
yading@10 489
yading@10 490 ret = DtsSetInputFormat(priv->dev, &format);
yading@10 491 if (ret != BC_STS_SUCCESS) {
yading@10 492 av_log(avctx, AV_LOG_ERROR, "CrystalHD: SetInputFormat failed\n");
yading@10 493 goto fail;
yading@10 494 }
yading@10 495
yading@10 496 ret = DtsOpenDecoder(priv->dev, BC_STREAM_TYPE_ES);
yading@10 497 if (ret != BC_STS_SUCCESS) {
yading@10 498 av_log(avctx, AV_LOG_ERROR, "CrystalHD: DtsOpenDecoder failed\n");
yading@10 499 goto fail;
yading@10 500 }
yading@10 501
yading@10 502 ret = DtsSetColorSpace(priv->dev, OUTPUT_MODE422_YUY2);
yading@10 503 if (ret != BC_STS_SUCCESS) {
yading@10 504 av_log(avctx, AV_LOG_ERROR, "CrystalHD: DtsSetColorSpace failed\n");
yading@10 505 goto fail;
yading@10 506 }
yading@10 507 ret = DtsStartDecoder(priv->dev);
yading@10 508 if (ret != BC_STS_SUCCESS) {
yading@10 509 av_log(avctx, AV_LOG_ERROR, "CrystalHD: DtsStartDecoder failed\n");
yading@10 510 goto fail;
yading@10 511 }
yading@10 512 ret = DtsStartCapture(priv->dev);
yading@10 513 if (ret != BC_STS_SUCCESS) {
yading@10 514 av_log(avctx, AV_LOG_ERROR, "CrystalHD: DtsStartCapture failed\n");
yading@10 515 goto fail;
yading@10 516 }
yading@10 517
yading@10 518 if (avctx->codec->id == AV_CODEC_ID_H264) {
yading@10 519 priv->parser = av_parser_init(avctx->codec->id);
yading@10 520 if (!priv->parser)
yading@10 521 av_log(avctx, AV_LOG_WARNING,
yading@10 522 "Cannot open the h.264 parser! Interlaced h.264 content "
yading@10 523 "will not be detected reliably.\n");
yading@10 524 priv->parser->flags = PARSER_FLAG_COMPLETE_FRAMES;
yading@10 525 }
yading@10 526 av_log(avctx, AV_LOG_VERBOSE, "CrystalHD: Init complete.\n");
yading@10 527
yading@10 528 return 0;
yading@10 529
yading@10 530 fail:
yading@10 531 uninit(avctx);
yading@10 532 return -1;
yading@10 533 }
yading@10 534
yading@10 535
yading@10 536 static inline CopyRet copy_frame(AVCodecContext *avctx,
yading@10 537 BC_DTS_PROC_OUT *output,
yading@10 538 void *data, int *got_frame)
yading@10 539 {
yading@10 540 BC_STATUS ret;
yading@10 541 BC_DTS_STATUS decoder_status = { 0, };
yading@10 542 uint8_t trust_interlaced;
yading@10 543 uint8_t interlaced;
yading@10 544
yading@10 545 CHDContext *priv = avctx->priv_data;
yading@10 546 int64_t pkt_pts = AV_NOPTS_VALUE;
yading@10 547 uint8_t pic_type = 0;
yading@10 548
yading@10 549 uint8_t bottom_field = (output->PicInfo.flags & VDEC_FLAG_BOTTOMFIELD) ==
yading@10 550 VDEC_FLAG_BOTTOMFIELD;
yading@10 551 uint8_t bottom_first = !!(output->PicInfo.flags & VDEC_FLAG_BOTTOM_FIRST);
yading@10 552
yading@10 553 int width = output->PicInfo.width;
yading@10 554 int height = output->PicInfo.height;
yading@10 555 int bwidth;
yading@10 556 uint8_t *src = output->Ybuff;
yading@10 557 int sStride;
yading@10 558 uint8_t *dst;
yading@10 559 int dStride;
yading@10 560
yading@10 561 if (output->PicInfo.timeStamp != 0) {
yading@10 562 OpaqueList *node = opaque_list_pop(priv, output->PicInfo.timeStamp);
yading@10 563 if (node) {
yading@10 564 pkt_pts = node->reordered_opaque;
yading@10 565 pic_type = node->pic_type;
yading@10 566 av_free(node);
yading@10 567 } else {
yading@10 568 /*
yading@10 569 * We will encounter a situation where a timestamp cannot be
yading@10 570 * popped if a second field is being returned. In this case,
yading@10 571 * each field has the same timestamp and the first one will
yading@10 572 * cause it to be popped. To keep subsequent calculations
yading@10 573 * simple, pic_type should be set a FIELD value - doesn't
yading@10 574 * matter which, but I chose BOTTOM.
yading@10 575 */
yading@10 576 pic_type = PICT_BOTTOM_FIELD;
yading@10 577 }
yading@10 578 av_log(avctx, AV_LOG_VERBOSE, "output \"pts\": %"PRIu64"\n",
yading@10 579 output->PicInfo.timeStamp);
yading@10 580 av_log(avctx, AV_LOG_VERBOSE, "output picture type %d\n",
yading@10 581 pic_type);
yading@10 582 }
yading@10 583
yading@10 584 ret = DtsGetDriverStatus(priv->dev, &decoder_status);
yading@10 585 if (ret != BC_STS_SUCCESS) {
yading@10 586 av_log(avctx, AV_LOG_ERROR,
yading@10 587 "CrystalHD: GetDriverStatus failed: %u\n", ret);
yading@10 588 return RET_ERROR;
yading@10 589 }
yading@10 590
yading@10 591 /*
yading@10 592 * For most content, we can trust the interlaced flag returned
yading@10 593 * by the hardware, but sometimes we can't. These are the
yading@10 594 * conditions under which we can trust the flag:
yading@10 595 *
yading@10 596 * 1) It's not h.264 content
yading@10 597 * 2) The UNKNOWN_SRC flag is not set
yading@10 598 * 3) We know we're expecting a second field
yading@10 599 * 4) The hardware reports this picture and the next picture
yading@10 600 * have the same picture number.
yading@10 601 *
yading@10 602 * Note that there can still be interlaced content that will
yading@10 603 * fail this check, if the hardware hasn't decoded the next
yading@10 604 * picture or if there is a corruption in the stream. (In either
yading@10 605 * case a 0 will be returned for the next picture number)
yading@10 606 */
yading@10 607 trust_interlaced = avctx->codec->id != AV_CODEC_ID_H264 ||
yading@10 608 !(output->PicInfo.flags & VDEC_FLAG_UNKNOWN_SRC) ||
yading@10 609 priv->need_second_field ||
yading@10 610 (decoder_status.picNumFlags & ~0x40000000) ==
yading@10 611 output->PicInfo.picture_number;
yading@10 612
yading@10 613 /*
yading@10 614 * If we got a false negative for trust_interlaced on the first field,
yading@10 615 * we will realise our mistake here when we see that the picture number is that
yading@10 616 * of the previous picture. We cannot recover the frame and should discard the
yading@10 617 * second field to keep the correct number of output frames.
yading@10 618 */
yading@10 619 if (output->PicInfo.picture_number == priv->last_picture && !priv->need_second_field) {
yading@10 620 av_log(avctx, AV_LOG_WARNING,
yading@10 621 "Incorrectly guessed progressive frame. Discarding second field\n");
yading@10 622 /* Returning without providing a picture. */
yading@10 623 return RET_OK;
yading@10 624 }
yading@10 625
yading@10 626 interlaced = (output->PicInfo.flags & VDEC_FLAG_INTERLACED_SRC) &&
yading@10 627 trust_interlaced;
yading@10 628
yading@10 629 if (!trust_interlaced && (decoder_status.picNumFlags & ~0x40000000) == 0) {
yading@10 630 av_log(avctx, AV_LOG_VERBOSE,
yading@10 631 "Next picture number unknown. Assuming progressive frame.\n");
yading@10 632 }
yading@10 633
yading@10 634 av_log(avctx, AV_LOG_VERBOSE, "Interlaced state: %d | trust_interlaced %d\n",
yading@10 635 interlaced, trust_interlaced);
yading@10 636
yading@10 637 if (priv->pic->data[0] && !priv->need_second_field)
yading@10 638 av_frame_unref(priv->pic);
yading@10 639
yading@10 640 priv->need_second_field = interlaced && !priv->need_second_field;
yading@10 641
yading@10 642 if (!priv->pic->data[0]) {
yading@10 643 if (ff_get_buffer(avctx, priv->pic, AV_GET_BUFFER_FLAG_REF) < 0)
yading@10 644 return RET_ERROR;
yading@10 645 }
yading@10 646
yading@10 647 bwidth = av_image_get_linesize(avctx->pix_fmt, width, 0);
yading@10 648 if (priv->is_70012) {
yading@10 649 int pStride;
yading@10 650
yading@10 651 if (width <= 720)
yading@10 652 pStride = 720;
yading@10 653 else if (width <= 1280)
yading@10 654 pStride = 1280;
yading@10 655 else pStride = 1920;
yading@10 656 sStride = av_image_get_linesize(avctx->pix_fmt, pStride, 0);
yading@10 657 } else {
yading@10 658 sStride = bwidth;
yading@10 659 }
yading@10 660
yading@10 661 dStride = priv->pic->linesize[0];
yading@10 662 dst = priv->pic->data[0];
yading@10 663
yading@10 664 av_log(priv->avctx, AV_LOG_VERBOSE, "CrystalHD: Copying out frame\n");
yading@10 665
yading@10 666 if (interlaced) {
yading@10 667 int dY = 0;
yading@10 668 int sY = 0;
yading@10 669
yading@10 670 height /= 2;
yading@10 671 if (bottom_field) {
yading@10 672 av_log(priv->avctx, AV_LOG_VERBOSE, "Interlaced: bottom field\n");
yading@10 673 dY = 1;
yading@10 674 } else {
yading@10 675 av_log(priv->avctx, AV_LOG_VERBOSE, "Interlaced: top field\n");
yading@10 676 dY = 0;
yading@10 677 }
yading@10 678
yading@10 679 for (sY = 0; sY < height; dY++, sY++) {
yading@10 680 memcpy(&(dst[dY * dStride]), &(src[sY * sStride]), bwidth);
yading@10 681 dY++;
yading@10 682 }
yading@10 683 } else {
yading@10 684 av_image_copy_plane(dst, dStride, src, sStride, bwidth, height);
yading@10 685 }
yading@10 686
yading@10 687 priv->pic->interlaced_frame = interlaced;
yading@10 688 if (interlaced)
yading@10 689 priv->pic->top_field_first = !bottom_first;
yading@10 690
yading@10 691 priv->pic->pkt_pts = pkt_pts;
yading@10 692
yading@10 693 if (!priv->need_second_field) {
yading@10 694 *got_frame = 1;
yading@10 695 if ((ret = av_frame_ref(data, priv->pic)) < 0) {
yading@10 696 return ret;
yading@10 697 }
yading@10 698 }
yading@10 699
yading@10 700 /*
yading@10 701 * Two types of PAFF content have been observed. One form causes the
yading@10 702 * hardware to return a field pair and the other individual fields,
yading@10 703 * even though the input is always individual fields. We must skip
yading@10 704 * copying on the next decode() call to maintain pipeline length in
yading@10 705 * the first case.
yading@10 706 */
yading@10 707 if (!interlaced && (output->PicInfo.flags & VDEC_FLAG_UNKNOWN_SRC) &&
yading@10 708 (pic_type == PICT_TOP_FIELD || pic_type == PICT_BOTTOM_FIELD)) {
yading@10 709 av_log(priv->avctx, AV_LOG_VERBOSE, "Fieldpair from two packets.\n");
yading@10 710 return RET_SKIP_NEXT_COPY;
yading@10 711 }
yading@10 712
yading@10 713 /*
yading@10 714 * The logic here is purely based on empirical testing with samples.
yading@10 715 * If we need a second field, it could come from a second input packet,
yading@10 716 * or it could come from the same field-pair input packet at the current
yading@10 717 * field. In the first case, we should return and wait for the next time
yading@10 718 * round to get the second field, while in the second case, we should
yading@10 719 * ask the decoder for it immediately.
yading@10 720 *
yading@10 721 * Testing has shown that we are dealing with the fieldpair -> two fields
yading@10 722 * case if the VDEC_FLAG_UNKNOWN_SRC is not set or if the input picture
yading@10 723 * type was PICT_FRAME (in this second case, the flag might still be set)
yading@10 724 */
yading@10 725 return priv->need_second_field &&
yading@10 726 (!(output->PicInfo.flags & VDEC_FLAG_UNKNOWN_SRC) ||
yading@10 727 pic_type == PICT_FRAME) ?
yading@10 728 RET_COPY_NEXT_FIELD : RET_OK;
yading@10 729 }
yading@10 730
yading@10 731
yading@10 732 static inline CopyRet receive_frame(AVCodecContext *avctx,
yading@10 733 void *data, int *got_frame)
yading@10 734 {
yading@10 735 BC_STATUS ret;
yading@10 736 BC_DTS_PROC_OUT output = {
yading@10 737 .PicInfo.width = avctx->width,
yading@10 738 .PicInfo.height = avctx->height,
yading@10 739 };
yading@10 740 CHDContext *priv = avctx->priv_data;
yading@10 741 HANDLE dev = priv->dev;
yading@10 742
yading@10 743 *got_frame = 0;
yading@10 744
yading@10 745 // Request decoded data from the driver
yading@10 746 ret = DtsProcOutputNoCopy(dev, OUTPUT_PROC_TIMEOUT, &output);
yading@10 747 if (ret == BC_STS_FMT_CHANGE) {
yading@10 748 av_log(avctx, AV_LOG_VERBOSE, "CrystalHD: Initial format change\n");
yading@10 749 avctx->width = output.PicInfo.width;
yading@10 750 avctx->height = output.PicInfo.height;
yading@10 751 switch ( output.PicInfo.aspect_ratio ) {
yading@10 752 case vdecAspectRatioSquare:
yading@10 753 avctx->sample_aspect_ratio = (AVRational) { 1, 1};
yading@10 754 break;
yading@10 755 case vdecAspectRatio12_11:
yading@10 756 avctx->sample_aspect_ratio = (AVRational) { 12, 11};
yading@10 757 break;
yading@10 758 case vdecAspectRatio10_11:
yading@10 759 avctx->sample_aspect_ratio = (AVRational) { 10, 11};
yading@10 760 break;
yading@10 761 case vdecAspectRatio16_11:
yading@10 762 avctx->sample_aspect_ratio = (AVRational) { 16, 11};
yading@10 763 break;
yading@10 764 case vdecAspectRatio40_33:
yading@10 765 avctx->sample_aspect_ratio = (AVRational) { 40, 33};
yading@10 766 break;
yading@10 767 case vdecAspectRatio24_11:
yading@10 768 avctx->sample_aspect_ratio = (AVRational) { 24, 11};
yading@10 769 break;
yading@10 770 case vdecAspectRatio20_11:
yading@10 771 avctx->sample_aspect_ratio = (AVRational) { 20, 11};
yading@10 772 break;
yading@10 773 case vdecAspectRatio32_11:
yading@10 774 avctx->sample_aspect_ratio = (AVRational) { 32, 11};
yading@10 775 break;
yading@10 776 case vdecAspectRatio80_33:
yading@10 777 avctx->sample_aspect_ratio = (AVRational) { 80, 33};
yading@10 778 break;
yading@10 779 case vdecAspectRatio18_11:
yading@10 780 avctx->sample_aspect_ratio = (AVRational) { 18, 11};
yading@10 781 break;
yading@10 782 case vdecAspectRatio15_11:
yading@10 783 avctx->sample_aspect_ratio = (AVRational) { 15, 11};
yading@10 784 break;
yading@10 785 case vdecAspectRatio64_33:
yading@10 786 avctx->sample_aspect_ratio = (AVRational) { 64, 33};
yading@10 787 break;
yading@10 788 case vdecAspectRatio160_99:
yading@10 789 avctx->sample_aspect_ratio = (AVRational) {160, 99};
yading@10 790 break;
yading@10 791 case vdecAspectRatio4_3:
yading@10 792 avctx->sample_aspect_ratio = (AVRational) { 4, 3};
yading@10 793 break;
yading@10 794 case vdecAspectRatio16_9:
yading@10 795 avctx->sample_aspect_ratio = (AVRational) { 16, 9};
yading@10 796 break;
yading@10 797 case vdecAspectRatio221_1:
yading@10 798 avctx->sample_aspect_ratio = (AVRational) {221, 1};
yading@10 799 break;
yading@10 800 }
yading@10 801 return RET_COPY_AGAIN;
yading@10 802 } else if (ret == BC_STS_SUCCESS) {
yading@10 803 int copy_ret = -1;
yading@10 804 if (output.PoutFlags & BC_POUT_FLAGS_PIB_VALID) {
yading@10 805 if (priv->last_picture == -1) {
yading@10 806 /*
yading@10 807 * Init to one less, so that the incrementing code doesn't
yading@10 808 * need to be special-cased.
yading@10 809 */
yading@10 810 priv->last_picture = output.PicInfo.picture_number - 1;
yading@10 811 }
yading@10 812
yading@10 813 if (avctx->codec->id == AV_CODEC_ID_MPEG4 &&
yading@10 814 output.PicInfo.timeStamp == 0 && priv->bframe_bug) {
yading@10 815 av_log(avctx, AV_LOG_VERBOSE,
yading@10 816 "CrystalHD: Not returning packed frame twice.\n");
yading@10 817 priv->last_picture++;
yading@10 818 DtsReleaseOutputBuffs(dev, NULL, FALSE);
yading@10 819 return RET_COPY_AGAIN;
yading@10 820 }
yading@10 821
yading@10 822 print_frame_info(priv, &output);
yading@10 823
yading@10 824 if (priv->last_picture + 1 < output.PicInfo.picture_number) {
yading@10 825 av_log(avctx, AV_LOG_WARNING,
yading@10 826 "CrystalHD: Picture Number discontinuity\n");
yading@10 827 /*
yading@10 828 * Have we lost frames? If so, we need to shrink the
yading@10 829 * pipeline length appropriately.
yading@10 830 *
yading@10 831 * XXX: I have no idea what the semantics of this situation
yading@10 832 * are so I don't even know if we've lost frames or which
yading@10 833 * ones.
yading@10 834 *
yading@10 835 * In any case, only warn the first time.
yading@10 836 */
yading@10 837 priv->last_picture = output.PicInfo.picture_number - 1;
yading@10 838 }
yading@10 839
yading@10 840 copy_ret = copy_frame(avctx, &output, data, got_frame);
yading@10 841 if (*got_frame > 0) {
yading@10 842 avctx->has_b_frames--;
yading@10 843 priv->last_picture++;
yading@10 844 av_log(avctx, AV_LOG_VERBOSE, "CrystalHD: Pipeline length: %u\n",
yading@10 845 avctx->has_b_frames);
yading@10 846 }
yading@10 847 } else {
yading@10 848 /*
yading@10 849 * An invalid frame has been consumed.
yading@10 850 */
yading@10 851 av_log(avctx, AV_LOG_ERROR, "CrystalHD: ProcOutput succeeded with "
yading@10 852 "invalid PIB\n");
yading@10 853 avctx->has_b_frames--;
yading@10 854 copy_ret = RET_OK;
yading@10 855 }
yading@10 856 DtsReleaseOutputBuffs(dev, NULL, FALSE);
yading@10 857
yading@10 858 return copy_ret;
yading@10 859 } else if (ret == BC_STS_BUSY) {
yading@10 860 return RET_COPY_AGAIN;
yading@10 861 } else {
yading@10 862 av_log(avctx, AV_LOG_ERROR, "CrystalHD: ProcOutput failed %d\n", ret);
yading@10 863 return RET_ERROR;
yading@10 864 }
yading@10 865 }
yading@10 866
yading@10 867
yading@10 868 static int decode(AVCodecContext *avctx, void *data, int *got_frame, AVPacket *avpkt)
yading@10 869 {
yading@10 870 BC_STATUS ret;
yading@10 871 BC_DTS_STATUS decoder_status = { 0, };
yading@10 872 CopyRet rec_ret;
yading@10 873 CHDContext *priv = avctx->priv_data;
yading@10 874 HANDLE dev = priv->dev;
yading@10 875 uint8_t *in_data = avpkt->data;
yading@10 876 int len = avpkt->size;
yading@10 877 int free_data = 0;
yading@10 878 uint8_t pic_type = 0;
yading@10 879
yading@10 880 av_log(avctx, AV_LOG_VERBOSE, "CrystalHD: decode_frame\n");
yading@10 881
yading@10 882 if (avpkt->size == 7 && !priv->bframe_bug) {
yading@10 883 /*
yading@10 884 * The use of a drop frame triggers the bug
yading@10 885 */
yading@10 886 av_log(avctx, AV_LOG_INFO,
yading@10 887 "CrystalHD: Enabling work-around for packed b-frame bug\n");
yading@10 888 priv->bframe_bug = 1;
yading@10 889 } else if (avpkt->size == 8 && priv->bframe_bug) {
yading@10 890 /*
yading@10 891 * Delay frames don't trigger the bug
yading@10 892 */
yading@10 893 av_log(avctx, AV_LOG_INFO,
yading@10 894 "CrystalHD: Disabling work-around for packed b-frame bug\n");
yading@10 895 priv->bframe_bug = 0;
yading@10 896 }
yading@10 897
yading@10 898 if (len) {
yading@10 899 int32_t tx_free = (int32_t)DtsTxFreeSize(dev);
yading@10 900
yading@10 901 if (priv->parser) {
yading@10 902 int ret = 0;
yading@10 903
yading@10 904 if (priv->bsfc) {
yading@10 905 ret = av_bitstream_filter_filter(priv->bsfc, avctx, NULL,
yading@10 906 &in_data, &len,
yading@10 907 avpkt->data, len, 0);
yading@10 908 }
yading@10 909 free_data = ret > 0;
yading@10 910
yading@10 911 if (ret >= 0) {
yading@10 912 uint8_t *pout;
yading@10 913 int psize;
yading@10 914 int index;
yading@10 915 H264Context *h = priv->parser->priv_data;
yading@10 916
yading@10 917 index = av_parser_parse2(priv->parser, avctx, &pout, &psize,
yading@10 918 in_data, len, avctx->pkt->pts,
yading@10 919 avctx->pkt->dts, 0);
yading@10 920 if (index < 0) {
yading@10 921 av_log(avctx, AV_LOG_WARNING,
yading@10 922 "CrystalHD: Failed to parse h.264 packet to "
yading@10 923 "detect interlacing.\n");
yading@10 924 } else if (index != len) {
yading@10 925 av_log(avctx, AV_LOG_WARNING,
yading@10 926 "CrystalHD: Failed to parse h.264 packet "
yading@10 927 "completely. Interlaced frames may be "
yading@10 928 "incorrectly detected.\n");
yading@10 929 } else {
yading@10 930 av_log(avctx, AV_LOG_VERBOSE,
yading@10 931 "CrystalHD: parser picture type %d\n",
yading@10 932 h->picture_structure);
yading@10 933 pic_type = h->picture_structure;
yading@10 934 }
yading@10 935 } else {
yading@10 936 av_log(avctx, AV_LOG_WARNING,
yading@10 937 "CrystalHD: mp4toannexb filter failed to filter "
yading@10 938 "packet. Interlaced frames may be incorrectly "
yading@10 939 "detected.\n");
yading@10 940 }
yading@10 941 }
yading@10 942
yading@10 943 if (len < tx_free - 1024) {
yading@10 944 /*
yading@10 945 * Despite being notionally opaque, either libcrystalhd or
yading@10 946 * the hardware itself will mangle pts values that are too
yading@10 947 * small or too large. The docs claim it should be in units
yading@10 948 * of 100ns. Given that we're nominally dealing with a black
yading@10 949 * box on both sides, any transform we do has no guarantee of
yading@10 950 * avoiding mangling so we need to build a mapping to values
yading@10 951 * we know will not be mangled.
yading@10 952 */
yading@10 953 uint64_t pts = opaque_list_push(priv, avctx->pkt->pts, pic_type);
yading@10 954 if (!pts) {
yading@10 955 if (free_data) {
yading@10 956 av_freep(&in_data);
yading@10 957 }
yading@10 958 return AVERROR(ENOMEM);
yading@10 959 }
yading@10 960 av_log(priv->avctx, AV_LOG_VERBOSE,
yading@10 961 "input \"pts\": %"PRIu64"\n", pts);
yading@10 962 ret = DtsProcInput(dev, in_data, len, pts, 0);
yading@10 963 if (free_data) {
yading@10 964 av_freep(&in_data);
yading@10 965 }
yading@10 966 if (ret == BC_STS_BUSY) {
yading@10 967 av_log(avctx, AV_LOG_WARNING,
yading@10 968 "CrystalHD: ProcInput returned busy\n");
yading@10 969 usleep(BASE_WAIT);
yading@10 970 return AVERROR(EBUSY);
yading@10 971 } else if (ret != BC_STS_SUCCESS) {
yading@10 972 av_log(avctx, AV_LOG_ERROR,
yading@10 973 "CrystalHD: ProcInput failed: %u\n", ret);
yading@10 974 return -1;
yading@10 975 }
yading@10 976 avctx->has_b_frames++;
yading@10 977 } else {
yading@10 978 av_log(avctx, AV_LOG_WARNING, "CrystalHD: Input buffer full\n");
yading@10 979 len = 0; // We didn't consume any bytes.
yading@10 980 }
yading@10 981 } else {
yading@10 982 av_log(avctx, AV_LOG_INFO, "CrystalHD: No more input data\n");
yading@10 983 }
yading@10 984
yading@10 985 if (priv->skip_next_output) {
yading@10 986 av_log(avctx, AV_LOG_VERBOSE, "CrystalHD: Skipping next output.\n");
yading@10 987 priv->skip_next_output = 0;
yading@10 988 avctx->has_b_frames--;
yading@10 989 return len;
yading@10 990 }
yading@10 991
yading@10 992 ret = DtsGetDriverStatus(dev, &decoder_status);
yading@10 993 if (ret != BC_STS_SUCCESS) {
yading@10 994 av_log(avctx, AV_LOG_ERROR, "CrystalHD: GetDriverStatus failed\n");
yading@10 995 return -1;
yading@10 996 }
yading@10 997
yading@10 998 /*
yading@10 999 * No frames ready. Don't try to extract.
yading@10 1000 *
yading@10 1001 * Empirical testing shows that ReadyListCount can be a damn lie,
yading@10 1002 * and ProcOut still fails when count > 0. The same testing showed
yading@10 1003 * that two more iterations were needed before ProcOutput would
yading@10 1004 * succeed.
yading@10 1005 */
yading@10 1006 if (priv->output_ready < 2) {
yading@10 1007 if (decoder_status.ReadyListCount != 0)
yading@10 1008 priv->output_ready++;
yading@10 1009 usleep(BASE_WAIT);
yading@10 1010 av_log(avctx, AV_LOG_INFO, "CrystalHD: Filling pipeline.\n");
yading@10 1011 return len;
yading@10 1012 } else if (decoder_status.ReadyListCount == 0) {
yading@10 1013 /*
yading@10 1014 * After the pipeline is established, if we encounter a lack of frames
yading@10 1015 * that probably means we're not giving the hardware enough time to
yading@10 1016 * decode them, so start increasing the wait time at the end of a
yading@10 1017 * decode call.
yading@10 1018 */
yading@10 1019 usleep(BASE_WAIT);
yading@10 1020 priv->decode_wait += WAIT_UNIT;
yading@10 1021 av_log(avctx, AV_LOG_INFO, "CrystalHD: No frames ready. Returning\n");
yading@10 1022 return len;
yading@10 1023 }
yading@10 1024
yading@10 1025 do {
yading@10 1026 rec_ret = receive_frame(avctx, data, got_frame);
yading@10 1027 if (rec_ret == RET_OK && *got_frame == 0) {
yading@10 1028 /*
yading@10 1029 * This case is for when the encoded fields are stored
yading@10 1030 * separately and we get a separate avpkt for each one. To keep
yading@10 1031 * the pipeline stable, we should return nothing and wait for
yading@10 1032 * the next time round to grab the second field.
yading@10 1033 * H.264 PAFF is an example of this.
yading@10 1034 */
yading@10 1035 av_log(avctx, AV_LOG_VERBOSE, "Returning after first field.\n");
yading@10 1036 avctx->has_b_frames--;
yading@10 1037 } else if (rec_ret == RET_COPY_NEXT_FIELD) {
yading@10 1038 /*
yading@10 1039 * This case is for when the encoded fields are stored in a
yading@10 1040 * single avpkt but the hardware returns then separately. Unless
yading@10 1041 * we grab the second field before returning, we'll slip another
yading@10 1042 * frame in the pipeline and if that happens a lot, we're sunk.
yading@10 1043 * So we have to get that second field now.
yading@10 1044 * Interlaced mpeg2 and vc1 are examples of this.
yading@10 1045 */
yading@10 1046 av_log(avctx, AV_LOG_VERBOSE, "Trying to get second field.\n");
yading@10 1047 while (1) {
yading@10 1048 usleep(priv->decode_wait);
yading@10 1049 ret = DtsGetDriverStatus(dev, &decoder_status);
yading@10 1050 if (ret == BC_STS_SUCCESS &&
yading@10 1051 decoder_status.ReadyListCount > 0) {
yading@10 1052 rec_ret = receive_frame(avctx, data, got_frame);
yading@10 1053 if ((rec_ret == RET_OK && *got_frame > 0) ||
yading@10 1054 rec_ret == RET_ERROR)
yading@10 1055 break;
yading@10 1056 }
yading@10 1057 }
yading@10 1058 av_log(avctx, AV_LOG_VERBOSE, "CrystalHD: Got second field.\n");
yading@10 1059 } else if (rec_ret == RET_SKIP_NEXT_COPY) {
yading@10 1060 /*
yading@10 1061 * Two input packets got turned into a field pair. Gawd.
yading@10 1062 */
yading@10 1063 av_log(avctx, AV_LOG_VERBOSE,
yading@10 1064 "Don't output on next decode call.\n");
yading@10 1065 priv->skip_next_output = 1;
yading@10 1066 }
yading@10 1067 /*
yading@10 1068 * If rec_ret == RET_COPY_AGAIN, that means that either we just handled
yading@10 1069 * a FMT_CHANGE event and need to go around again for the actual frame,
yading@10 1070 * we got a busy status and need to try again, or we're dealing with
yading@10 1071 * packed b-frames, where the hardware strangely returns the packed
yading@10 1072 * p-frame twice. We choose to keep the second copy as it carries the
yading@10 1073 * valid pts.
yading@10 1074 */
yading@10 1075 } while (rec_ret == RET_COPY_AGAIN);
yading@10 1076 usleep(priv->decode_wait);
yading@10 1077 return len;
yading@10 1078 }
yading@10 1079
yading@10 1080
yading@10 1081 #if CONFIG_H264_CRYSTALHD_DECODER
yading@10 1082 static AVClass h264_class = {
yading@10 1083 "h264_crystalhd",
yading@10 1084 av_default_item_name,
yading@10 1085 options,
yading@10 1086 LIBAVUTIL_VERSION_INT,
yading@10 1087 };
yading@10 1088
yading@10 1089 AVCodec ff_h264_crystalhd_decoder = {
yading@10 1090 .name = "h264_crystalhd",
yading@10 1091 .type = AVMEDIA_TYPE_VIDEO,
yading@10 1092 .id = AV_CODEC_ID_H264,
yading@10 1093 .priv_data_size = sizeof(CHDContext),
yading@10 1094 .init = init,
yading@10 1095 .close = uninit,
yading@10 1096 .decode = decode,
yading@10 1097 .capabilities = CODEC_CAP_DR1 | CODEC_CAP_DELAY,
yading@10 1098 .flush = flush,
yading@10 1099 .long_name = NULL_IF_CONFIG_SMALL("H.264 / AVC / MPEG-4 AVC / MPEG-4 part 10 (CrystalHD acceleration)"),
yading@10 1100 .pix_fmts = (const enum AVPixelFormat[]){AV_PIX_FMT_YUYV422, AV_PIX_FMT_NONE},
yading@10 1101 .priv_class = &h264_class,
yading@10 1102 };
yading@10 1103 #endif
yading@10 1104
yading@10 1105 #if CONFIG_MPEG2_CRYSTALHD_DECODER
yading@10 1106 static AVClass mpeg2_class = {
yading@10 1107 "mpeg2_crystalhd",
yading@10 1108 av_default_item_name,
yading@10 1109 options,
yading@10 1110 LIBAVUTIL_VERSION_INT,
yading@10 1111 };
yading@10 1112
yading@10 1113 AVCodec ff_mpeg2_crystalhd_decoder = {
yading@10 1114 .name = "mpeg2_crystalhd",
yading@10 1115 .type = AVMEDIA_TYPE_VIDEO,
yading@10 1116 .id = AV_CODEC_ID_MPEG2VIDEO,
yading@10 1117 .priv_data_size = sizeof(CHDContext),
yading@10 1118 .init = init,
yading@10 1119 .close = uninit,
yading@10 1120 .decode = decode,
yading@10 1121 .capabilities = CODEC_CAP_DR1 | CODEC_CAP_DELAY,
yading@10 1122 .flush = flush,
yading@10 1123 .long_name = NULL_IF_CONFIG_SMALL("MPEG-2 Video (CrystalHD acceleration)"),
yading@10 1124 .pix_fmts = (const enum AVPixelFormat[]){AV_PIX_FMT_YUYV422, AV_PIX_FMT_NONE},
yading@10 1125 .priv_class = &mpeg2_class,
yading@10 1126 };
yading@10 1127 #endif
yading@10 1128
yading@10 1129 #if CONFIG_MPEG4_CRYSTALHD_DECODER
yading@10 1130 static AVClass mpeg4_class = {
yading@10 1131 "mpeg4_crystalhd",
yading@10 1132 av_default_item_name,
yading@10 1133 options,
yading@10 1134 LIBAVUTIL_VERSION_INT,
yading@10 1135 };
yading@10 1136
yading@10 1137 AVCodec ff_mpeg4_crystalhd_decoder = {
yading@10 1138 .name = "mpeg4_crystalhd",
yading@10 1139 .type = AVMEDIA_TYPE_VIDEO,
yading@10 1140 .id = AV_CODEC_ID_MPEG4,
yading@10 1141 .priv_data_size = sizeof(CHDContext),
yading@10 1142 .init = init,
yading@10 1143 .close = uninit,
yading@10 1144 .decode = decode,
yading@10 1145 .capabilities = CODEC_CAP_DR1 | CODEC_CAP_DELAY,
yading@10 1146 .flush = flush,
yading@10 1147 .long_name = NULL_IF_CONFIG_SMALL("MPEG-4 Part 2 (CrystalHD acceleration)"),
yading@10 1148 .pix_fmts = (const enum AVPixelFormat[]){AV_PIX_FMT_YUYV422, AV_PIX_FMT_NONE},
yading@10 1149 .priv_class = &mpeg4_class,
yading@10 1150 };
yading@10 1151 #endif
yading@10 1152
yading@10 1153 #if CONFIG_MSMPEG4_CRYSTALHD_DECODER
yading@10 1154 static AVClass msmpeg4_class = {
yading@10 1155 "msmpeg4_crystalhd",
yading@10 1156 av_default_item_name,
yading@10 1157 options,
yading@10 1158 LIBAVUTIL_VERSION_INT,
yading@10 1159 };
yading@10 1160
yading@10 1161 AVCodec ff_msmpeg4_crystalhd_decoder = {
yading@10 1162 .name = "msmpeg4_crystalhd",
yading@10 1163 .type = AVMEDIA_TYPE_VIDEO,
yading@10 1164 .id = AV_CODEC_ID_MSMPEG4V3,
yading@10 1165 .priv_data_size = sizeof(CHDContext),
yading@10 1166 .init = init,
yading@10 1167 .close = uninit,
yading@10 1168 .decode = decode,
yading@10 1169 .capabilities = CODEC_CAP_DR1 | CODEC_CAP_DELAY | CODEC_CAP_EXPERIMENTAL,
yading@10 1170 .flush = flush,
yading@10 1171 .long_name = NULL_IF_CONFIG_SMALL("MPEG-4 Part 2 Microsoft variant version 3 (CrystalHD acceleration)"),
yading@10 1172 .pix_fmts = (const enum AVPixelFormat[]){AV_PIX_FMT_YUYV422, AV_PIX_FMT_NONE},
yading@10 1173 .priv_class = &msmpeg4_class,
yading@10 1174 };
yading@10 1175 #endif
yading@10 1176
yading@10 1177 #if CONFIG_VC1_CRYSTALHD_DECODER
yading@10 1178 static AVClass vc1_class = {
yading@10 1179 "vc1_crystalhd",
yading@10 1180 av_default_item_name,
yading@10 1181 options,
yading@10 1182 LIBAVUTIL_VERSION_INT,
yading@10 1183 };
yading@10 1184
yading@10 1185 AVCodec ff_vc1_crystalhd_decoder = {
yading@10 1186 .name = "vc1_crystalhd",
yading@10 1187 .type = AVMEDIA_TYPE_VIDEO,
yading@10 1188 .id = AV_CODEC_ID_VC1,
yading@10 1189 .priv_data_size = sizeof(CHDContext),
yading@10 1190 .init = init,
yading@10 1191 .close = uninit,
yading@10 1192 .decode = decode,
yading@10 1193 .capabilities = CODEC_CAP_DR1 | CODEC_CAP_DELAY,
yading@10 1194 .flush = flush,
yading@10 1195 .long_name = NULL_IF_CONFIG_SMALL("SMPTE VC-1 (CrystalHD acceleration)"),
yading@10 1196 .pix_fmts = (const enum AVPixelFormat[]){AV_PIX_FMT_YUYV422, AV_PIX_FMT_NONE},
yading@10 1197 .priv_class = &vc1_class,
yading@10 1198 };
yading@10 1199 #endif
yading@10 1200
yading@10 1201 #if CONFIG_WMV3_CRYSTALHD_DECODER
yading@10 1202 static AVClass wmv3_class = {
yading@10 1203 "wmv3_crystalhd",
yading@10 1204 av_default_item_name,
yading@10 1205 options,
yading@10 1206 LIBAVUTIL_VERSION_INT,
yading@10 1207 };
yading@10 1208
yading@10 1209 AVCodec ff_wmv3_crystalhd_decoder = {
yading@10 1210 .name = "wmv3_crystalhd",
yading@10 1211 .type = AVMEDIA_TYPE_VIDEO,
yading@10 1212 .id = AV_CODEC_ID_WMV3,
yading@10 1213 .priv_data_size = sizeof(CHDContext),
yading@10 1214 .init = init,
yading@10 1215 .close = uninit,
yading@10 1216 .decode = decode,
yading@10 1217 .capabilities = CODEC_CAP_DR1 | CODEC_CAP_DELAY,
yading@10 1218 .flush = flush,
yading@10 1219 .long_name = NULL_IF_CONFIG_SMALL("Windows Media Video 9 (CrystalHD acceleration)"),
yading@10 1220 .pix_fmts = (const enum AVPixelFormat[]){AV_PIX_FMT_YUYV422, AV_PIX_FMT_NONE},
yading@10 1221 .priv_class = &wmv3_class,
yading@10 1222 };
yading@10 1223 #endif