yading@11: /* yading@11: * Flash Compatible Streaming Format muxer yading@11: * Copyright (c) 2000 Fabrice Bellard yading@11: * Copyright (c) 2003 Tinic Uro yading@11: * yading@11: * This file is part of FFmpeg. yading@11: * yading@11: * FFmpeg is free software; you can redistribute it and/or yading@11: * modify it under the terms of the GNU Lesser General Public yading@11: * License as published by the Free Software Foundation; either yading@11: * version 2.1 of the License, or (at your option) any later version. yading@11: * yading@11: * FFmpeg is distributed in the hope that it will be useful, yading@11: * but WITHOUT ANY WARRANTY; without even the implied warranty of yading@11: * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU yading@11: * Lesser General Public License for more details. yading@11: * yading@11: * You should have received a copy of the GNU Lesser General Public yading@11: * License along with FFmpeg; if not, write to the Free Software yading@11: * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA yading@11: */ yading@11: yading@11: #include "libavcodec/put_bits.h" yading@11: #include "libavutil/avassert.h" yading@11: #include "avformat.h" yading@11: #include "swf.h" yading@11: yading@11: static void put_swf_tag(AVFormatContext *s, int tag) yading@11: { yading@11: SWFContext *swf = s->priv_data; yading@11: AVIOContext *pb = s->pb; yading@11: yading@11: swf->tag_pos = avio_tell(pb); yading@11: swf->tag = tag; yading@11: /* reserve some room for the tag */ yading@11: if (tag & TAG_LONG) { yading@11: avio_wl16(pb, 0); yading@11: avio_wl32(pb, 0); yading@11: } else { yading@11: avio_wl16(pb, 0); yading@11: } yading@11: } yading@11: yading@11: static void put_swf_end_tag(AVFormatContext *s) yading@11: { yading@11: SWFContext *swf = s->priv_data; yading@11: AVIOContext *pb = s->pb; yading@11: int64_t pos; yading@11: int tag_len, tag; yading@11: yading@11: pos = avio_tell(pb); yading@11: tag_len = pos - swf->tag_pos - 2; yading@11: tag = swf->tag; yading@11: avio_seek(pb, swf->tag_pos, SEEK_SET); yading@11: if (tag & TAG_LONG) { yading@11: tag &= ~TAG_LONG; yading@11: avio_wl16(pb, (tag << 6) | 0x3f); yading@11: avio_wl32(pb, tag_len - 4); yading@11: } else { yading@11: av_assert0(tag_len < 0x3f); yading@11: avio_wl16(pb, (tag << 6) | tag_len); yading@11: } yading@11: avio_seek(pb, pos, SEEK_SET); yading@11: } yading@11: yading@11: static inline void max_nbits(int *nbits_ptr, int val) yading@11: { yading@11: int n; yading@11: yading@11: if (val == 0) yading@11: return; yading@11: val = abs(val); yading@11: n = 1; yading@11: while (val != 0) { yading@11: n++; yading@11: val >>= 1; yading@11: } yading@11: if (n > *nbits_ptr) yading@11: *nbits_ptr = n; yading@11: } yading@11: yading@11: static void put_swf_rect(AVIOContext *pb, yading@11: int xmin, int xmax, int ymin, int ymax) yading@11: { yading@11: PutBitContext p; yading@11: uint8_t buf[256]; yading@11: int nbits, mask; yading@11: yading@11: init_put_bits(&p, buf, sizeof(buf)); yading@11: yading@11: nbits = 0; yading@11: max_nbits(&nbits, xmin); yading@11: max_nbits(&nbits, xmax); yading@11: max_nbits(&nbits, ymin); yading@11: max_nbits(&nbits, ymax); yading@11: mask = (1 << nbits) - 1; yading@11: yading@11: /* rectangle info */ yading@11: put_bits(&p, 5, nbits); yading@11: put_bits(&p, nbits, xmin & mask); yading@11: put_bits(&p, nbits, xmax & mask); yading@11: put_bits(&p, nbits, ymin & mask); yading@11: put_bits(&p, nbits, ymax & mask); yading@11: yading@11: flush_put_bits(&p); yading@11: avio_write(pb, buf, put_bits_ptr(&p) - p.buf); yading@11: } yading@11: yading@11: static void put_swf_line_edge(PutBitContext *pb, int dx, int dy) yading@11: { yading@11: int nbits, mask; yading@11: yading@11: put_bits(pb, 1, 1); /* edge */ yading@11: put_bits(pb, 1, 1); /* line select */ yading@11: nbits = 2; yading@11: max_nbits(&nbits, dx); yading@11: max_nbits(&nbits, dy); yading@11: yading@11: mask = (1 << nbits) - 1; yading@11: put_bits(pb, 4, nbits - 2); /* 16 bits precision */ yading@11: if (dx == 0) { yading@11: put_bits(pb, 1, 0); yading@11: put_bits(pb, 1, 1); yading@11: put_bits(pb, nbits, dy & mask); yading@11: } else if (dy == 0) { yading@11: put_bits(pb, 1, 0); yading@11: put_bits(pb, 1, 0); yading@11: put_bits(pb, nbits, dx & mask); yading@11: } else { yading@11: put_bits(pb, 1, 1); yading@11: put_bits(pb, nbits, dx & mask); yading@11: put_bits(pb, nbits, dy & mask); yading@11: } yading@11: } yading@11: yading@11: #define FRAC_BITS 16 yading@11: yading@11: static void put_swf_matrix(AVIOContext *pb, yading@11: int a, int b, int c, int d, int tx, int ty) yading@11: { yading@11: PutBitContext p; yading@11: uint8_t buf[256]; yading@11: int nbits; yading@11: yading@11: init_put_bits(&p, buf, sizeof(buf)); yading@11: yading@11: put_bits(&p, 1, 1); /* a, d present */ yading@11: nbits = 1; yading@11: max_nbits(&nbits, a); yading@11: max_nbits(&nbits, d); yading@11: put_bits(&p, 5, nbits); /* nb bits */ yading@11: put_bits(&p, nbits, a); yading@11: put_bits(&p, nbits, d); yading@11: yading@11: put_bits(&p, 1, 1); /* b, c present */ yading@11: nbits = 1; yading@11: max_nbits(&nbits, c); yading@11: max_nbits(&nbits, b); yading@11: put_bits(&p, 5, nbits); /* nb bits */ yading@11: put_bits(&p, nbits, c); yading@11: put_bits(&p, nbits, b); yading@11: yading@11: nbits = 1; yading@11: max_nbits(&nbits, tx); yading@11: max_nbits(&nbits, ty); yading@11: put_bits(&p, 5, nbits); /* nb bits */ yading@11: put_bits(&p, nbits, tx); yading@11: put_bits(&p, nbits, ty); yading@11: yading@11: flush_put_bits(&p); yading@11: avio_write(pb, buf, put_bits_ptr(&p) - p.buf); yading@11: } yading@11: yading@11: static int swf_write_header(AVFormatContext *s) yading@11: { yading@11: SWFContext *swf = s->priv_data; yading@11: AVIOContext *pb = s->pb; yading@11: PutBitContext p; yading@11: uint8_t buf1[256]; yading@11: int i, width, height, rate, rate_base; yading@11: int version; yading@11: yading@11: swf->sound_samples = 0; yading@11: swf->swf_frame_number = 0; yading@11: swf->video_frame_number = 0; yading@11: yading@11: for(i=0;inb_streams;i++) { yading@11: AVCodecContext *enc = s->streams[i]->codec; yading@11: if (enc->codec_type == AVMEDIA_TYPE_AUDIO) { yading@11: if (swf->audio_enc) { yading@11: av_log(s, AV_LOG_ERROR, "SWF muxer only supports 1 audio stream\n"); yading@11: return AVERROR_INVALIDDATA; yading@11: } yading@11: if (enc->codec_id == AV_CODEC_ID_MP3) { yading@11: if (!enc->frame_size) { yading@11: av_log(s, AV_LOG_ERROR, "audio frame size not set\n"); yading@11: return -1; yading@11: } yading@11: swf->audio_enc = enc; yading@11: swf->audio_fifo= av_fifo_alloc(AUDIO_FIFO_SIZE); yading@11: if (!swf->audio_fifo) yading@11: return AVERROR(ENOMEM); yading@11: } else { yading@11: av_log(s, AV_LOG_ERROR, "SWF muxer only supports MP3\n"); yading@11: return -1; yading@11: } yading@11: } else { yading@11: if (swf->video_enc) { yading@11: av_log(s, AV_LOG_ERROR, "SWF muxer only supports 1 video stream\n"); yading@11: return AVERROR_INVALIDDATA; yading@11: } yading@11: if (enc->codec_id == AV_CODEC_ID_VP6F || yading@11: enc->codec_id == AV_CODEC_ID_FLV1 || yading@11: enc->codec_id == AV_CODEC_ID_MJPEG) { yading@11: swf->video_enc = enc; yading@11: } else { yading@11: av_log(s, AV_LOG_ERROR, "SWF muxer only supports VP6, FLV1 and MJPEG\n"); yading@11: return -1; yading@11: } yading@11: } yading@11: } yading@11: yading@11: if (!swf->video_enc) { yading@11: /* currently, cannot work correctly if audio only */ yading@11: width = 320; yading@11: height = 200; yading@11: rate = 10; yading@11: rate_base= 1; yading@11: } else { yading@11: width = swf->video_enc->width; yading@11: height = swf->video_enc->height; yading@11: rate = swf->video_enc->time_base.den; yading@11: rate_base = swf->video_enc->time_base.num; yading@11: } yading@11: yading@11: if (!swf->audio_enc) yading@11: swf->samples_per_frame = (44100. * rate_base) / rate; yading@11: else yading@11: swf->samples_per_frame = (swf->audio_enc->sample_rate * rate_base) / rate; yading@11: yading@11: avio_write(pb, "FWS", 3); yading@11: yading@11: if (!strcmp("avm2", s->oformat->name)) yading@11: version = 9; yading@11: else if (swf->video_enc && swf->video_enc->codec_id == AV_CODEC_ID_VP6F) yading@11: version = 8; /* version 8 and above support VP6 codec */ yading@11: else if (swf->video_enc && swf->video_enc->codec_id == AV_CODEC_ID_FLV1) yading@11: version = 6; /* version 6 and above support FLV1 codec */ yading@11: else yading@11: version = 4; /* version 4 for mpeg audio support */ yading@11: avio_w8(pb, version); yading@11: yading@11: avio_wl32(pb, DUMMY_FILE_SIZE); /* dummy size yading@11: (will be patched if not streamed) */ yading@11: yading@11: put_swf_rect(pb, 0, width * 20, 0, height * 20); yading@11: avio_wl16(pb, (rate * 256) / rate_base); /* frame rate */ yading@11: swf->duration_pos = avio_tell(pb); yading@11: avio_wl16(pb, (uint16_t)(DUMMY_DURATION * (int64_t)rate / rate_base)); /* frame count */ yading@11: yading@11: /* avm2/swf v9 (also v8?) files require a file attribute tag */ yading@11: if (version == 9) { yading@11: put_swf_tag(s, TAG_FILEATTRIBUTES); yading@11: avio_wl32(pb, 1<<3); /* set ActionScript v3/AVM2 flag */ yading@11: put_swf_end_tag(s); yading@11: } yading@11: yading@11: /* define a shape with the jpeg inside */ yading@11: if (swf->video_enc && swf->video_enc->codec_id == AV_CODEC_ID_MJPEG) { yading@11: put_swf_tag(s, TAG_DEFINESHAPE); yading@11: yading@11: avio_wl16(pb, SHAPE_ID); /* ID of shape */ yading@11: /* bounding rectangle */ yading@11: put_swf_rect(pb, 0, width, 0, height); yading@11: /* style info */ yading@11: avio_w8(pb, 1); /* one fill style */ yading@11: avio_w8(pb, 0x41); /* clipped bitmap fill */ yading@11: avio_wl16(pb, BITMAP_ID); /* bitmap ID */ yading@11: /* position of the bitmap */ yading@11: put_swf_matrix(pb, (int)(1.0 * (1 << FRAC_BITS)), 0, yading@11: 0, (int)(1.0 * (1 << FRAC_BITS)), 0, 0); yading@11: avio_w8(pb, 0); /* no line style */ yading@11: yading@11: /* shape drawing */ yading@11: init_put_bits(&p, buf1, sizeof(buf1)); yading@11: put_bits(&p, 4, 1); /* one fill bit */ yading@11: put_bits(&p, 4, 0); /* zero line bit */ yading@11: yading@11: put_bits(&p, 1, 0); /* not an edge */ yading@11: put_bits(&p, 5, FLAG_MOVETO | FLAG_SETFILL0); yading@11: put_bits(&p, 5, 1); /* nbits */ yading@11: put_bits(&p, 1, 0); /* X */ yading@11: put_bits(&p, 1, 0); /* Y */ yading@11: put_bits(&p, 1, 1); /* set fill style 1 */ yading@11: yading@11: /* draw the rectangle ! */ yading@11: put_swf_line_edge(&p, width, 0); yading@11: put_swf_line_edge(&p, 0, height); yading@11: put_swf_line_edge(&p, -width, 0); yading@11: put_swf_line_edge(&p, 0, -height); yading@11: yading@11: /* end of shape */ yading@11: put_bits(&p, 1, 0); /* not an edge */ yading@11: put_bits(&p, 5, 0); yading@11: yading@11: flush_put_bits(&p); yading@11: avio_write(pb, buf1, put_bits_ptr(&p) - p.buf); yading@11: yading@11: put_swf_end_tag(s); yading@11: } yading@11: yading@11: if (swf->audio_enc && swf->audio_enc->codec_id == AV_CODEC_ID_MP3) { yading@11: int v = 0; yading@11: yading@11: /* start sound */ yading@11: put_swf_tag(s, TAG_STREAMHEAD2); yading@11: switch(swf->audio_enc->sample_rate) { yading@11: case 11025: v |= 1 << 2; break; yading@11: case 22050: v |= 2 << 2; break; yading@11: case 44100: v |= 3 << 2; break; yading@11: default: yading@11: /* not supported */ yading@11: av_log(s, AV_LOG_ERROR, "swf does not support that sample rate, choose from (44100, 22050, 11025).\n"); yading@11: return -1; yading@11: } yading@11: v |= 0x02; /* 16 bit playback */ yading@11: if (swf->audio_enc->channels == 2) yading@11: v |= 0x01; /* stereo playback */ yading@11: avio_w8(s->pb, v); yading@11: v |= 0x20; /* mp3 compressed */ yading@11: avio_w8(s->pb, v); yading@11: avio_wl16(s->pb, swf->samples_per_frame); /* avg samples per frame */ yading@11: avio_wl16(s->pb, 0); yading@11: yading@11: put_swf_end_tag(s); yading@11: } yading@11: yading@11: avio_flush(s->pb); yading@11: return 0; yading@11: } yading@11: yading@11: static int swf_write_video(AVFormatContext *s, yading@11: AVCodecContext *enc, const uint8_t *buf, int size) yading@11: { yading@11: SWFContext *swf = s->priv_data; yading@11: AVIOContext *pb = s->pb; yading@11: yading@11: /* Flash Player limit */ yading@11: if (swf->swf_frame_number == 16000) yading@11: av_log(enc, AV_LOG_INFO, "warning: Flash Player limit of 16000 frames reached\n"); yading@11: yading@11: if (enc->codec_id == AV_CODEC_ID_VP6F || yading@11: enc->codec_id == AV_CODEC_ID_FLV1) { yading@11: if (swf->video_frame_number == 0) { yading@11: /* create a new video object */ yading@11: put_swf_tag(s, TAG_VIDEOSTREAM); yading@11: avio_wl16(pb, VIDEO_ID); yading@11: swf->vframes_pos = avio_tell(pb); yading@11: avio_wl16(pb, 15000); /* hard flash player limit */ yading@11: avio_wl16(pb, enc->width); yading@11: avio_wl16(pb, enc->height); yading@11: avio_w8(pb, 0); yading@11: avio_w8(pb,ff_codec_get_tag(ff_swf_codec_tags, enc->codec_id)); yading@11: put_swf_end_tag(s); yading@11: yading@11: /* place the video object for the first time */ yading@11: put_swf_tag(s, TAG_PLACEOBJECT2); yading@11: avio_w8(pb, 0x36); yading@11: avio_wl16(pb, 1); yading@11: avio_wl16(pb, VIDEO_ID); yading@11: put_swf_matrix(pb, 1 << FRAC_BITS, 0, 0, 1 << FRAC_BITS, 0, 0); yading@11: avio_wl16(pb, swf->video_frame_number); yading@11: avio_write(pb, "video", 5); yading@11: avio_w8(pb, 0x00); yading@11: put_swf_end_tag(s); yading@11: } else { yading@11: /* mark the character for update */ yading@11: put_swf_tag(s, TAG_PLACEOBJECT2); yading@11: avio_w8(pb, 0x11); yading@11: avio_wl16(pb, 1); yading@11: avio_wl16(pb, swf->video_frame_number); yading@11: put_swf_end_tag(s); yading@11: } yading@11: yading@11: /* set video frame data */ yading@11: put_swf_tag(s, TAG_VIDEOFRAME | TAG_LONG); yading@11: avio_wl16(pb, VIDEO_ID); yading@11: avio_wl16(pb, swf->video_frame_number++); yading@11: avio_write(pb, buf, size); yading@11: put_swf_end_tag(s); yading@11: } else if (enc->codec_id == AV_CODEC_ID_MJPEG) { yading@11: if (swf->swf_frame_number > 0) { yading@11: /* remove the shape */ yading@11: put_swf_tag(s, TAG_REMOVEOBJECT); yading@11: avio_wl16(pb, SHAPE_ID); /* shape ID */ yading@11: avio_wl16(pb, 1); /* depth */ yading@11: put_swf_end_tag(s); yading@11: yading@11: /* free the bitmap */ yading@11: put_swf_tag(s, TAG_FREECHARACTER); yading@11: avio_wl16(pb, BITMAP_ID); yading@11: put_swf_end_tag(s); yading@11: } yading@11: yading@11: put_swf_tag(s, TAG_JPEG2 | TAG_LONG); yading@11: yading@11: avio_wl16(pb, BITMAP_ID); /* ID of the image */ yading@11: yading@11: /* a dummy jpeg header seems to be required */ yading@11: avio_wb32(pb, 0xffd8ffd9); yading@11: /* write the jpeg image */ yading@11: avio_write(pb, buf, size); yading@11: yading@11: put_swf_end_tag(s); yading@11: yading@11: /* draw the shape */ yading@11: yading@11: put_swf_tag(s, TAG_PLACEOBJECT); yading@11: avio_wl16(pb, SHAPE_ID); /* shape ID */ yading@11: avio_wl16(pb, 1); /* depth */ yading@11: put_swf_matrix(pb, 20 << FRAC_BITS, 0, 0, 20 << FRAC_BITS, 0, 0); yading@11: put_swf_end_tag(s); yading@11: } yading@11: yading@11: swf->swf_frame_number++; yading@11: yading@11: /* streaming sound always should be placed just before showframe tags */ yading@11: if (swf->audio_enc && av_fifo_size(swf->audio_fifo)) { yading@11: int frame_size = av_fifo_size(swf->audio_fifo); yading@11: put_swf_tag(s, TAG_STREAMBLOCK | TAG_LONG); yading@11: avio_wl16(pb, swf->sound_samples); yading@11: avio_wl16(pb, 0); // seek samples yading@11: av_fifo_generic_read(swf->audio_fifo, pb, frame_size, (void*)avio_write); yading@11: put_swf_end_tag(s); yading@11: yading@11: /* update FIFO */ yading@11: swf->sound_samples = 0; yading@11: } yading@11: yading@11: /* output the frame */ yading@11: put_swf_tag(s, TAG_SHOWFRAME); yading@11: put_swf_end_tag(s); yading@11: yading@11: return 0; yading@11: } yading@11: yading@11: static int swf_write_audio(AVFormatContext *s, yading@11: AVCodecContext *enc, uint8_t *buf, int size) yading@11: { yading@11: SWFContext *swf = s->priv_data; yading@11: yading@11: /* Flash Player limit */ yading@11: if (swf->swf_frame_number == 16000) yading@11: av_log(enc, AV_LOG_INFO, "warning: Flash Player limit of 16000 frames reached\n"); yading@11: yading@11: if (av_fifo_size(swf->audio_fifo) + size > AUDIO_FIFO_SIZE) { yading@11: av_log(s, AV_LOG_ERROR, "audio fifo too small to mux audio essence\n"); yading@11: return -1; yading@11: } yading@11: yading@11: av_fifo_generic_write(swf->audio_fifo, buf, size, NULL); yading@11: swf->sound_samples += enc->frame_size; yading@11: yading@11: /* if audio only stream make sure we add swf frames */ yading@11: if (!swf->video_enc) yading@11: swf_write_video(s, enc, 0, 0); yading@11: yading@11: return 0; yading@11: } yading@11: yading@11: static int swf_write_packet(AVFormatContext *s, AVPacket *pkt) yading@11: { yading@11: AVCodecContext *codec = s->streams[pkt->stream_index]->codec; yading@11: if (codec->codec_type == AVMEDIA_TYPE_AUDIO) yading@11: return swf_write_audio(s, codec, pkt->data, pkt->size); yading@11: else yading@11: return swf_write_video(s, codec, pkt->data, pkt->size); yading@11: } yading@11: yading@11: static int swf_write_trailer(AVFormatContext *s) yading@11: { yading@11: SWFContext *swf = s->priv_data; yading@11: AVIOContext *pb = s->pb; yading@11: AVCodecContext *enc, *video_enc; yading@11: int file_size, i; yading@11: yading@11: video_enc = NULL; yading@11: for(i=0;inb_streams;i++) { yading@11: enc = s->streams[i]->codec; yading@11: if (enc->codec_type == AVMEDIA_TYPE_VIDEO) yading@11: video_enc = enc; yading@11: else { yading@11: av_fifo_free(swf->audio_fifo); yading@11: swf->audio_fifo = NULL; yading@11: } yading@11: } yading@11: yading@11: put_swf_tag(s, TAG_END); yading@11: put_swf_end_tag(s); yading@11: yading@11: /* patch file size and number of frames if not streamed */ yading@11: if (s->pb->seekable && video_enc) { yading@11: file_size = avio_tell(pb); yading@11: avio_seek(pb, 4, SEEK_SET); yading@11: avio_wl32(pb, file_size); yading@11: avio_seek(pb, swf->duration_pos, SEEK_SET); yading@11: avio_wl16(pb, swf->video_frame_number); yading@11: if (swf->vframes_pos) { yading@11: avio_seek(pb, swf->vframes_pos, SEEK_SET); yading@11: avio_wl16(pb, swf->video_frame_number); yading@11: } yading@11: avio_seek(pb, file_size, SEEK_SET); yading@11: } yading@11: return 0; yading@11: } yading@11: yading@11: #if CONFIG_SWF_MUXER yading@11: AVOutputFormat ff_swf_muxer = { yading@11: .name = "swf", yading@11: .long_name = NULL_IF_CONFIG_SMALL("SWF (ShockWave Flash)"), yading@11: .mime_type = "application/x-shockwave-flash", yading@11: .extensions = "swf", yading@11: .priv_data_size = sizeof(SWFContext), yading@11: .audio_codec = AV_CODEC_ID_MP3, yading@11: .video_codec = AV_CODEC_ID_FLV1, yading@11: .write_header = swf_write_header, yading@11: .write_packet = swf_write_packet, yading@11: .write_trailer = swf_write_trailer, yading@11: .flags = AVFMT_TS_NONSTRICT, yading@11: }; yading@11: #endif yading@11: #if CONFIG_AVM2_MUXER yading@11: AVOutputFormat ff_avm2_muxer = { yading@11: .name = "avm2", yading@11: .long_name = NULL_IF_CONFIG_SMALL("SWF (ShockWave Flash) (AVM2)"), yading@11: .mime_type = "application/x-shockwave-flash", yading@11: .priv_data_size = sizeof(SWFContext), yading@11: .audio_codec = AV_CODEC_ID_MP3, yading@11: .video_codec = AV_CODEC_ID_FLV1, yading@11: .write_header = swf_write_header, yading@11: .write_packet = swf_write_packet, yading@11: .write_trailer = swf_write_trailer, yading@11: .flags = AVFMT_TS_NONSTRICT, yading@11: }; yading@11: #endif