annotate ffmpeg/libavcodec/utvideoenc.c @ 13:844d341cf643 tip

Back up before ISMIR
author Yading Song <yading.song@eecs.qmul.ac.uk>
date Thu, 31 Oct 2013 13:17:06 +0000
parents 6840f77b83aa
children
rev   line source
yading@10 1 /*
yading@10 2 * Ut Video encoder
yading@10 3 * Copyright (c) 2012 Jan Ekström
yading@10 4 *
yading@10 5 * This file is part of FFmpeg.
yading@10 6 *
yading@10 7 * FFmpeg is free software; you can redistribute it and/or
yading@10 8 * modify it under the terms of the GNU Lesser General Public
yading@10 9 * License as published by the Free Software Foundation; either
yading@10 10 * version 2.1 of the License, or (at your option) any later version.
yading@10 11 *
yading@10 12 * FFmpeg is distributed in the hope that it will be useful,
yading@10 13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
yading@10 14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
yading@10 15 * Lesser General Public License for more details.
yading@10 16 *
yading@10 17 * You should have received a copy of the GNU Lesser General Public
yading@10 18 * License along with FFmpeg; if not, write to the Free Software
yading@10 19 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
yading@10 20 */
yading@10 21
yading@10 22 /**
yading@10 23 * @file
yading@10 24 * Ut Video encoder
yading@10 25 */
yading@10 26
yading@10 27 #include "libavutil/intreadwrite.h"
yading@10 28 #include "avcodec.h"
yading@10 29 #include "internal.h"
yading@10 30 #include "bytestream.h"
yading@10 31 #include "put_bits.h"
yading@10 32 #include "dsputil.h"
yading@10 33 #include "mathops.h"
yading@10 34 #include "utvideo.h"
yading@10 35 #include "huffman.h"
yading@10 36
yading@10 37 /* Compare huffentry symbols */
yading@10 38 static int huff_cmp_sym(const void *a, const void *b)
yading@10 39 {
yading@10 40 const HuffEntry *aa = a, *bb = b;
yading@10 41 return aa->sym - bb->sym;
yading@10 42 }
yading@10 43
yading@10 44 static av_cold int utvideo_encode_close(AVCodecContext *avctx)
yading@10 45 {
yading@10 46 UtvideoContext *c = avctx->priv_data;
yading@10 47 int i;
yading@10 48
yading@10 49 av_freep(&avctx->coded_frame);
yading@10 50 av_freep(&c->slice_bits);
yading@10 51 for (i = 0; i < 4; i++)
yading@10 52 av_freep(&c->slice_buffer[i]);
yading@10 53
yading@10 54 return 0;
yading@10 55 }
yading@10 56
yading@10 57 static av_cold int utvideo_encode_init(AVCodecContext *avctx)
yading@10 58 {
yading@10 59 UtvideoContext *c = avctx->priv_data;
yading@10 60 int i;
yading@10 61 uint32_t original_format;
yading@10 62
yading@10 63 c->avctx = avctx;
yading@10 64 c->frame_info_size = 4;
yading@10 65 c->slice_stride = FFALIGN(avctx->width, 32);
yading@10 66
yading@10 67 switch (avctx->pix_fmt) {
yading@10 68 case AV_PIX_FMT_RGB24:
yading@10 69 c->planes = 3;
yading@10 70 avctx->codec_tag = MKTAG('U', 'L', 'R', 'G');
yading@10 71 original_format = UTVIDEO_RGB;
yading@10 72 break;
yading@10 73 case AV_PIX_FMT_RGBA:
yading@10 74 c->planes = 4;
yading@10 75 avctx->codec_tag = MKTAG('U', 'L', 'R', 'A');
yading@10 76 original_format = UTVIDEO_RGBA;
yading@10 77 break;
yading@10 78 case AV_PIX_FMT_YUV420P:
yading@10 79 if (avctx->width & 1 || avctx->height & 1) {
yading@10 80 av_log(avctx, AV_LOG_ERROR,
yading@10 81 "4:2:0 video requires even width and height.\n");
yading@10 82 return AVERROR_INVALIDDATA;
yading@10 83 }
yading@10 84 c->planes = 3;
yading@10 85 avctx->codec_tag = MKTAG('U', 'L', 'Y', '0');
yading@10 86 original_format = UTVIDEO_420;
yading@10 87 break;
yading@10 88 case AV_PIX_FMT_YUV422P:
yading@10 89 if (avctx->width & 1) {
yading@10 90 av_log(avctx, AV_LOG_ERROR,
yading@10 91 "4:2:2 video requires even width.\n");
yading@10 92 return AVERROR_INVALIDDATA;
yading@10 93 }
yading@10 94 c->planes = 3;
yading@10 95 avctx->codec_tag = MKTAG('U', 'L', 'Y', '2');
yading@10 96 original_format = UTVIDEO_422;
yading@10 97 break;
yading@10 98 default:
yading@10 99 av_log(avctx, AV_LOG_ERROR, "Unknown pixel format: %d\n",
yading@10 100 avctx->pix_fmt);
yading@10 101 return AVERROR_INVALIDDATA;
yading@10 102 }
yading@10 103
yading@10 104 ff_dsputil_init(&c->dsp, avctx);
yading@10 105
yading@10 106 /* Check the prediction method, and error out if unsupported */
yading@10 107 if (avctx->prediction_method < 0 || avctx->prediction_method > 4) {
yading@10 108 av_log(avctx, AV_LOG_WARNING,
yading@10 109 "Prediction method %d is not supported in Ut Video.\n",
yading@10 110 avctx->prediction_method);
yading@10 111 return AVERROR_OPTION_NOT_FOUND;
yading@10 112 }
yading@10 113
yading@10 114 if (avctx->prediction_method == FF_PRED_PLANE) {
yading@10 115 av_log(avctx, AV_LOG_ERROR,
yading@10 116 "Plane prediction is not supported in Ut Video.\n");
yading@10 117 return AVERROR_OPTION_NOT_FOUND;
yading@10 118 }
yading@10 119
yading@10 120 /* Convert from libavcodec prediction type to Ut Video's */
yading@10 121 c->frame_pred = ff_ut_pred_order[avctx->prediction_method];
yading@10 122
yading@10 123 if (c->frame_pred == PRED_GRADIENT) {
yading@10 124 av_log(avctx, AV_LOG_ERROR, "Gradient prediction is not supported.\n");
yading@10 125 return AVERROR_OPTION_NOT_FOUND;
yading@10 126 }
yading@10 127
yading@10 128 avctx->coded_frame = avcodec_alloc_frame();
yading@10 129
yading@10 130 if (!avctx->coded_frame) {
yading@10 131 av_log(avctx, AV_LOG_ERROR, "Could not allocate frame.\n");
yading@10 132 utvideo_encode_close(avctx);
yading@10 133 return AVERROR(ENOMEM);
yading@10 134 }
yading@10 135
yading@10 136 /* extradata size is 4 * 32bit */
yading@10 137 avctx->extradata_size = 16;
yading@10 138
yading@10 139 avctx->extradata = av_mallocz(avctx->extradata_size +
yading@10 140 FF_INPUT_BUFFER_PADDING_SIZE);
yading@10 141
yading@10 142 if (!avctx->extradata) {
yading@10 143 av_log(avctx, AV_LOG_ERROR, "Could not allocate extradata.\n");
yading@10 144 utvideo_encode_close(avctx);
yading@10 145 return AVERROR(ENOMEM);
yading@10 146 }
yading@10 147
yading@10 148 for (i = 0; i < c->planes; i++) {
yading@10 149 c->slice_buffer[i] = av_malloc(c->slice_stride * (avctx->height + 2) +
yading@10 150 FF_INPUT_BUFFER_PADDING_SIZE);
yading@10 151 if (!c->slice_buffer[i]) {
yading@10 152 av_log(avctx, AV_LOG_ERROR, "Cannot allocate temporary buffer 1.\n");
yading@10 153 utvideo_encode_close(avctx);
yading@10 154 return AVERROR(ENOMEM);
yading@10 155 }
yading@10 156 }
yading@10 157
yading@10 158 /*
yading@10 159 * Set the version of the encoder.
yading@10 160 * Last byte is "implementation ID", which is
yading@10 161 * obtained from the creator of the format.
yading@10 162 * Libavcodec has been assigned with the ID 0xF0.
yading@10 163 */
yading@10 164 AV_WB32(avctx->extradata, MKTAG(1, 0, 0, 0xF0));
yading@10 165
yading@10 166 /*
yading@10 167 * Set the "original format"
yading@10 168 * Not used for anything during decoding.
yading@10 169 */
yading@10 170 AV_WL32(avctx->extradata + 4, original_format);
yading@10 171
yading@10 172 /* Write 4 as the 'frame info size' */
yading@10 173 AV_WL32(avctx->extradata + 8, c->frame_info_size);
yading@10 174
yading@10 175 /*
yading@10 176 * Set how many slices are going to be used.
yading@10 177 * Set one slice for now.
yading@10 178 */
yading@10 179 c->slices = 1;
yading@10 180
yading@10 181 /* Set compression mode */
yading@10 182 c->compression = COMP_HUFF;
yading@10 183
yading@10 184 /*
yading@10 185 * Set the encoding flags:
yading@10 186 * - Slice count minus 1
yading@10 187 * - Interlaced encoding mode flag, set to zero for now.
yading@10 188 * - Compression mode (none/huff)
yading@10 189 * And write the flags.
yading@10 190 */
yading@10 191 c->flags = (c->slices - 1) << 24;
yading@10 192 c->flags |= 0 << 11; // bit field to signal interlaced encoding mode
yading@10 193 c->flags |= c->compression;
yading@10 194
yading@10 195 AV_WL32(avctx->extradata + 12, c->flags);
yading@10 196
yading@10 197 return 0;
yading@10 198 }
yading@10 199
yading@10 200 static void mangle_rgb_planes(uint8_t *dst[4], int dst_stride, uint8_t *src,
yading@10 201 int step, int stride, int width, int height)
yading@10 202 {
yading@10 203 int i, j;
yading@10 204 int k = 2 * dst_stride;
yading@10 205 unsigned int g;
yading@10 206
yading@10 207 for (j = 0; j < height; j++) {
yading@10 208 if (step == 3) {
yading@10 209 for (i = 0; i < width * step; i += step) {
yading@10 210 g = src[i + 1];
yading@10 211 dst[0][k] = g;
yading@10 212 g += 0x80;
yading@10 213 dst[1][k] = src[i + 2] - g;
yading@10 214 dst[2][k] = src[i + 0] - g;
yading@10 215 k++;
yading@10 216 }
yading@10 217 } else {
yading@10 218 for (i = 0; i < width * step; i += step) {
yading@10 219 g = src[i + 1];
yading@10 220 dst[0][k] = g;
yading@10 221 g += 0x80;
yading@10 222 dst[1][k] = src[i + 2] - g;
yading@10 223 dst[2][k] = src[i + 0] - g;
yading@10 224 dst[3][k] = src[i + 3];
yading@10 225 k++;
yading@10 226 }
yading@10 227 }
yading@10 228 k += dst_stride - width;
yading@10 229 src += stride;
yading@10 230 }
yading@10 231 }
yading@10 232
yading@10 233 /* Write data to a plane, no prediction applied */
yading@10 234 static void write_plane(uint8_t *src, uint8_t *dst, int stride,
yading@10 235 int width, int height)
yading@10 236 {
yading@10 237 int i, j;
yading@10 238
yading@10 239 for (j = 0; j < height; j++) {
yading@10 240 for (i = 0; i < width; i++)
yading@10 241 *dst++ = src[i];
yading@10 242
yading@10 243 src += stride;
yading@10 244 }
yading@10 245 }
yading@10 246
yading@10 247 /* Write data to a plane with left prediction */
yading@10 248 static void left_predict(uint8_t *src, uint8_t *dst, int stride,
yading@10 249 int width, int height)
yading@10 250 {
yading@10 251 int i, j;
yading@10 252 uint8_t prev;
yading@10 253
yading@10 254 prev = 0x80; /* Set the initial value */
yading@10 255 for (j = 0; j < height; j++) {
yading@10 256 for (i = 0; i < width; i++) {
yading@10 257 *dst++ = src[i] - prev;
yading@10 258 prev = src[i];
yading@10 259 }
yading@10 260 src += stride;
yading@10 261 }
yading@10 262 }
yading@10 263
yading@10 264 /* Write data to a plane with median prediction */
yading@10 265 static void median_predict(UtvideoContext *c, uint8_t *src, uint8_t *dst, int stride,
yading@10 266 int width, int height)
yading@10 267 {
yading@10 268 int i, j;
yading@10 269 int A, B;
yading@10 270 uint8_t prev;
yading@10 271
yading@10 272 /* First line uses left neighbour prediction */
yading@10 273 prev = 0x80; /* Set the initial value */
yading@10 274 for (i = 0; i < width; i++) {
yading@10 275 *dst++ = src[i] - prev;
yading@10 276 prev = src[i];
yading@10 277 }
yading@10 278
yading@10 279 if (height == 1)
yading@10 280 return;
yading@10 281
yading@10 282 src += stride;
yading@10 283
yading@10 284 /*
yading@10 285 * Second line uses top prediction for the first sample,
yading@10 286 * and median for the rest.
yading@10 287 */
yading@10 288 A = B = 0;
yading@10 289
yading@10 290 /* Rest of the coded part uses median prediction */
yading@10 291 for (j = 1; j < height; j++) {
yading@10 292 c->dsp.sub_hfyu_median_prediction(dst, src - stride, src, width, &A, &B);
yading@10 293 dst += width;
yading@10 294 src += stride;
yading@10 295 }
yading@10 296 }
yading@10 297
yading@10 298 /* Count the usage of values in a plane */
yading@10 299 static void count_usage(uint8_t *src, int width,
yading@10 300 int height, uint64_t *counts)
yading@10 301 {
yading@10 302 int i, j;
yading@10 303
yading@10 304 for (j = 0; j < height; j++) {
yading@10 305 for (i = 0; i < width; i++) {
yading@10 306 counts[src[i]]++;
yading@10 307 }
yading@10 308 src += width;
yading@10 309 }
yading@10 310 }
yading@10 311
yading@10 312 /* Calculate the actual huffman codes from the code lengths */
yading@10 313 static void calculate_codes(HuffEntry *he)
yading@10 314 {
yading@10 315 int last, i;
yading@10 316 uint32_t code;
yading@10 317
yading@10 318 qsort(he, 256, sizeof(*he), ff_ut_huff_cmp_len);
yading@10 319
yading@10 320 last = 255;
yading@10 321 while (he[last].len == 255 && last)
yading@10 322 last--;
yading@10 323
yading@10 324 code = 1;
yading@10 325 for (i = last; i >= 0; i--) {
yading@10 326 he[i].code = code >> (32 - he[i].len);
yading@10 327 code += 0x80000000u >> (he[i].len - 1);
yading@10 328 }
yading@10 329
yading@10 330 qsort(he, 256, sizeof(*he), huff_cmp_sym);
yading@10 331 }
yading@10 332
yading@10 333 /* Write huffman bit codes to a memory block */
yading@10 334 static int write_huff_codes(uint8_t *src, uint8_t *dst, int dst_size,
yading@10 335 int width, int height, HuffEntry *he)
yading@10 336 {
yading@10 337 PutBitContext pb;
yading@10 338 int i, j;
yading@10 339 int count;
yading@10 340
yading@10 341 init_put_bits(&pb, dst, dst_size);
yading@10 342
yading@10 343 /* Write the codes */
yading@10 344 for (j = 0; j < height; j++) {
yading@10 345 for (i = 0; i < width; i++)
yading@10 346 put_bits(&pb, he[src[i]].len, he[src[i]].code);
yading@10 347
yading@10 348 src += width;
yading@10 349 }
yading@10 350
yading@10 351 /* Pad output to a 32bit boundary */
yading@10 352 count = put_bits_count(&pb) & 0x1F;
yading@10 353
yading@10 354 if (count)
yading@10 355 put_bits(&pb, 32 - count, 0);
yading@10 356
yading@10 357 /* Get the amount of bits written */
yading@10 358 count = put_bits_count(&pb);
yading@10 359
yading@10 360 /* Flush the rest with zeroes */
yading@10 361 flush_put_bits(&pb);
yading@10 362
yading@10 363 return count;
yading@10 364 }
yading@10 365
yading@10 366 static int encode_plane(AVCodecContext *avctx, uint8_t *src,
yading@10 367 uint8_t *dst, int stride,
yading@10 368 int width, int height, PutByteContext *pb)
yading@10 369 {
yading@10 370 UtvideoContext *c = avctx->priv_data;
yading@10 371 uint8_t lengths[256];
yading@10 372 uint64_t counts[256] = { 0 };
yading@10 373
yading@10 374 HuffEntry he[256];
yading@10 375
yading@10 376 uint32_t offset = 0, slice_len = 0;
yading@10 377 int i, sstart, send = 0;
yading@10 378 int symbol;
yading@10 379
yading@10 380 /* Do prediction / make planes */
yading@10 381 switch (c->frame_pred) {
yading@10 382 case PRED_NONE:
yading@10 383 for (i = 0; i < c->slices; i++) {
yading@10 384 sstart = send;
yading@10 385 send = height * (i + 1) / c->slices;
yading@10 386 write_plane(src + sstart * stride, dst + sstart * width,
yading@10 387 stride, width, send - sstart);
yading@10 388 }
yading@10 389 break;
yading@10 390 case PRED_LEFT:
yading@10 391 for (i = 0; i < c->slices; i++) {
yading@10 392 sstart = send;
yading@10 393 send = height * (i + 1) / c->slices;
yading@10 394 left_predict(src + sstart * stride, dst + sstart * width,
yading@10 395 stride, width, send - sstart);
yading@10 396 }
yading@10 397 break;
yading@10 398 case PRED_MEDIAN:
yading@10 399 for (i = 0; i < c->slices; i++) {
yading@10 400 sstart = send;
yading@10 401 send = height * (i + 1) / c->slices;
yading@10 402 median_predict(c, src + sstart * stride, dst + sstart * width,
yading@10 403 stride, width, send - sstart);
yading@10 404 }
yading@10 405 break;
yading@10 406 default:
yading@10 407 av_log(avctx, AV_LOG_ERROR, "Unknown prediction mode: %d\n",
yading@10 408 c->frame_pred);
yading@10 409 return AVERROR_OPTION_NOT_FOUND;
yading@10 410 }
yading@10 411
yading@10 412 /* Count the usage of values */
yading@10 413 count_usage(dst, width, height, counts);
yading@10 414
yading@10 415 /* Check for a special case where only one symbol was used */
yading@10 416 for (symbol = 0; symbol < 256; symbol++) {
yading@10 417 /* If non-zero count is found, see if it matches width * height */
yading@10 418 if (counts[symbol]) {
yading@10 419 /* Special case if only one symbol was used */
yading@10 420 if (counts[symbol] == width * (int64_t)height) {
yading@10 421 /*
yading@10 422 * Write a zero for the single symbol
yading@10 423 * used in the plane, else 0xFF.
yading@10 424 */
yading@10 425 for (i = 0; i < 256; i++) {
yading@10 426 if (i == symbol)
yading@10 427 bytestream2_put_byte(pb, 0);
yading@10 428 else
yading@10 429 bytestream2_put_byte(pb, 0xFF);
yading@10 430 }
yading@10 431
yading@10 432 /* Write zeroes for lengths */
yading@10 433 for (i = 0; i < c->slices; i++)
yading@10 434 bytestream2_put_le32(pb, 0);
yading@10 435
yading@10 436 /* And that's all for that plane folks */
yading@10 437 return 0;
yading@10 438 }
yading@10 439 break;
yading@10 440 }
yading@10 441 }
yading@10 442
yading@10 443 /* Calculate huffman lengths */
yading@10 444 ff_huff_gen_len_table(lengths, counts);
yading@10 445
yading@10 446 /*
yading@10 447 * Write the plane's header into the output packet:
yading@10 448 * - huffman code lengths (256 bytes)
yading@10 449 * - slice end offsets (gotten from the slice lengths)
yading@10 450 */
yading@10 451 for (i = 0; i < 256; i++) {
yading@10 452 bytestream2_put_byte(pb, lengths[i]);
yading@10 453
yading@10 454 he[i].len = lengths[i];
yading@10 455 he[i].sym = i;
yading@10 456 }
yading@10 457
yading@10 458 /* Calculate the huffman codes themselves */
yading@10 459 calculate_codes(he);
yading@10 460
yading@10 461 send = 0;
yading@10 462 for (i = 0; i < c->slices; i++) {
yading@10 463 sstart = send;
yading@10 464 send = height * (i + 1) / c->slices;
yading@10 465
yading@10 466 /*
yading@10 467 * Write the huffman codes to a buffer,
yading@10 468 * get the offset in bits and convert to bytes.
yading@10 469 */
yading@10 470 offset += write_huff_codes(dst + sstart * width, c->slice_bits,
yading@10 471 width * (send - sstart), width,
yading@10 472 send - sstart, he) >> 3;
yading@10 473
yading@10 474 slice_len = offset - slice_len;
yading@10 475
yading@10 476 /* Byteswap the written huffman codes */
yading@10 477 c->dsp.bswap_buf((uint32_t *) c->slice_bits,
yading@10 478 (uint32_t *) c->slice_bits,
yading@10 479 slice_len >> 2);
yading@10 480
yading@10 481 /* Write the offset to the stream */
yading@10 482 bytestream2_put_le32(pb, offset);
yading@10 483
yading@10 484 /* Seek to the data part of the packet */
yading@10 485 bytestream2_seek_p(pb, 4 * (c->slices - i - 1) +
yading@10 486 offset - slice_len, SEEK_CUR);
yading@10 487
yading@10 488 /* Write the slices' data into the output packet */
yading@10 489 bytestream2_put_buffer(pb, c->slice_bits, slice_len);
yading@10 490
yading@10 491 /* Seek back to the slice offsets */
yading@10 492 bytestream2_seek_p(pb, -4 * (c->slices - i - 1) - offset,
yading@10 493 SEEK_CUR);
yading@10 494
yading@10 495 slice_len = offset;
yading@10 496 }
yading@10 497
yading@10 498 /* And at the end seek to the end of written slice(s) */
yading@10 499 bytestream2_seek_p(pb, offset, SEEK_CUR);
yading@10 500
yading@10 501 return 0;
yading@10 502 }
yading@10 503
yading@10 504 static int utvideo_encode_frame(AVCodecContext *avctx, AVPacket *pkt,
yading@10 505 const AVFrame *pic, int *got_packet)
yading@10 506 {
yading@10 507 UtvideoContext *c = avctx->priv_data;
yading@10 508 PutByteContext pb;
yading@10 509
yading@10 510 uint32_t frame_info;
yading@10 511
yading@10 512 uint8_t *dst;
yading@10 513
yading@10 514 int width = avctx->width, height = avctx->height;
yading@10 515 int i, ret = 0;
yading@10 516
yading@10 517 /* Allocate a new packet if needed, and set it to the pointer dst */
yading@10 518 ret = ff_alloc_packet2(avctx, pkt, (256 + 4 * c->slices + width * height) *
yading@10 519 c->planes + 4);
yading@10 520
yading@10 521 if (ret < 0)
yading@10 522 return ret;
yading@10 523
yading@10 524 dst = pkt->data;
yading@10 525
yading@10 526 bytestream2_init_writer(&pb, dst, pkt->size);
yading@10 527
yading@10 528 av_fast_malloc(&c->slice_bits, &c->slice_bits_size,
yading@10 529 width * height + FF_INPUT_BUFFER_PADDING_SIZE);
yading@10 530
yading@10 531 if (!c->slice_bits) {
yading@10 532 av_log(avctx, AV_LOG_ERROR, "Cannot allocate temporary buffer 2.\n");
yading@10 533 return AVERROR(ENOMEM);
yading@10 534 }
yading@10 535
yading@10 536 /* In case of RGB, mangle the planes to Ut Video's format */
yading@10 537 if (avctx->pix_fmt == AV_PIX_FMT_RGBA || avctx->pix_fmt == AV_PIX_FMT_RGB24)
yading@10 538 mangle_rgb_planes(c->slice_buffer, c->slice_stride, pic->data[0],
yading@10 539 c->planes, pic->linesize[0], width, height);
yading@10 540
yading@10 541 /* Deal with the planes */
yading@10 542 switch (avctx->pix_fmt) {
yading@10 543 case AV_PIX_FMT_RGB24:
yading@10 544 case AV_PIX_FMT_RGBA:
yading@10 545 for (i = 0; i < c->planes; i++) {
yading@10 546 ret = encode_plane(avctx, c->slice_buffer[i] + 2 * c->slice_stride,
yading@10 547 c->slice_buffer[i], c->slice_stride,
yading@10 548 width, height, &pb);
yading@10 549
yading@10 550 if (ret) {
yading@10 551 av_log(avctx, AV_LOG_ERROR, "Error encoding plane %d.\n", i);
yading@10 552 return ret;
yading@10 553 }
yading@10 554 }
yading@10 555 break;
yading@10 556 case AV_PIX_FMT_YUV422P:
yading@10 557 for (i = 0; i < c->planes; i++) {
yading@10 558 ret = encode_plane(avctx, pic->data[i], c->slice_buffer[0],
yading@10 559 pic->linesize[i], width >> !!i, height, &pb);
yading@10 560
yading@10 561 if (ret) {
yading@10 562 av_log(avctx, AV_LOG_ERROR, "Error encoding plane %d.\n", i);
yading@10 563 return ret;
yading@10 564 }
yading@10 565 }
yading@10 566 break;
yading@10 567 case AV_PIX_FMT_YUV420P:
yading@10 568 for (i = 0; i < c->planes; i++) {
yading@10 569 ret = encode_plane(avctx, pic->data[i], c->slice_buffer[0],
yading@10 570 pic->linesize[i], width >> !!i, height >> !!i,
yading@10 571 &pb);
yading@10 572
yading@10 573 if (ret) {
yading@10 574 av_log(avctx, AV_LOG_ERROR, "Error encoding plane %d.\n", i);
yading@10 575 return ret;
yading@10 576 }
yading@10 577 }
yading@10 578 break;
yading@10 579 default:
yading@10 580 av_log(avctx, AV_LOG_ERROR, "Unknown pixel format: %d\n",
yading@10 581 avctx->pix_fmt);
yading@10 582 return AVERROR_INVALIDDATA;
yading@10 583 }
yading@10 584
yading@10 585 /*
yading@10 586 * Write frame information (LE 32bit unsigned)
yading@10 587 * into the output packet.
yading@10 588 * Contains the prediction method.
yading@10 589 */
yading@10 590 frame_info = c->frame_pred << 8;
yading@10 591 bytestream2_put_le32(&pb, frame_info);
yading@10 592
yading@10 593 /*
yading@10 594 * At least currently Ut Video is IDR only.
yading@10 595 * Set flags accordingly.
yading@10 596 */
yading@10 597 avctx->coded_frame->key_frame = 1;
yading@10 598 avctx->coded_frame->pict_type = AV_PICTURE_TYPE_I;
yading@10 599
yading@10 600 pkt->size = bytestream2_tell_p(&pb);
yading@10 601 pkt->flags |= AV_PKT_FLAG_KEY;
yading@10 602
yading@10 603 /* Packet should be done */
yading@10 604 *got_packet = 1;
yading@10 605
yading@10 606 return 0;
yading@10 607 }
yading@10 608
yading@10 609 AVCodec ff_utvideo_encoder = {
yading@10 610 .name = "utvideo",
yading@10 611 .type = AVMEDIA_TYPE_VIDEO,
yading@10 612 .id = AV_CODEC_ID_UTVIDEO,
yading@10 613 .priv_data_size = sizeof(UtvideoContext),
yading@10 614 .init = utvideo_encode_init,
yading@10 615 .encode2 = utvideo_encode_frame,
yading@10 616 .close = utvideo_encode_close,
yading@10 617 .pix_fmts = (const enum AVPixelFormat[]) {
yading@10 618 AV_PIX_FMT_RGB24, AV_PIX_FMT_RGBA, AV_PIX_FMT_YUV422P,
yading@10 619 AV_PIX_FMT_YUV420P, AV_PIX_FMT_NONE
yading@10 620 },
yading@10 621 .long_name = NULL_IF_CONFIG_SMALL("Ut Video"),
yading@10 622 };