diff options
author | Alexandra Hájková <alexandra.khirnova@gmail.com> | 2015-06-15 14:54:45 +0200 |
---|---|---|
committer | Anton Khirnov <anton@khirnov.net> | 2015-06-28 10:16:40 +0200 |
commit | b08569a23948db107e5e6175cd4c695427d5339d (patch) | |
tree | 0391df849ba3cf07ba58e78cac89f43d7203f0c3 /libavformat | |
parent | 441e8ae5efd681055e5af6f4317fb60110de9dd0 (diff) | |
download | ffmpeg-b08569a23948db107e5e6175cd4c695427d5339d.tar.gz |
lavf: Replace the ASF demuxer
The old one is the result of the reverse engineering and guesswork.
The new one has been written following the now-available specification.
This work is part of Outreach Program for Women Summer 2014 activities
for the Libav project.
The fate references had to be changed because the old demuxer truncates
the last frame in some cases, the new one handles it properly.
The seek-test reference is changed because seeking works differently
in the new demuxer. When seeking, the packet is not read from the stream
directly, but it is rather constructed by the demuxer. That is why
position is -1 now in the reference.
Signed-off-by: Anton Khirnov <anton@khirnov.net>
Diffstat (limited to 'libavformat')
-rw-r--r-- | libavformat/asf.h | 21 | ||||
-rw-r--r-- | libavformat/asfdec.c | 2530 | ||||
-rw-r--r-- | libavformat/asfenc.c | 21 | ||||
-rw-r--r-- | libavformat/version.h | 4 |
4 files changed, 1375 insertions, 1201 deletions
diff --git a/libavformat/asf.h b/libavformat/asf.h index 2f6722adca..7ac28dbb30 100644 --- a/libavformat/asf.h +++ b/libavformat/asf.h @@ -28,27 +28,6 @@ #define PACKET_SIZE 3200 -typedef struct ASFStream { - int num; - unsigned char seq; - /* use for reading */ - AVPacket pkt; - int frag_offset; - int timestamp; - int64_t duration; - - int ds_span; /* descrambling */ - int ds_packet_size; - int ds_chunk_size; - - int64_t packet_pos; - - uint16_t stream_language_index; - - int palette_changed; - uint32_t palette[256]; -} ASFStream; - typedef struct ASFMainHeader { ff_asf_guid guid; ///< generated by client computer uint64_t file_size; /**< in bytes diff --git a/libavformat/asfdec.c b/libavformat/asfdec.c index 34e9036d7a..8f46098632 100644 --- a/libavformat/asfdec.c +++ b/libavformat/asfdec.c @@ -1,6 +1,6 @@ /* - * ASF compatible demuxer - * Copyright (c) 2000, 2001 Fabrice Bellard + * Microsoft Advanced Streaming Format demuxer + * Copyright (c) 2014 Alexandra Hájková * * This file is part of Libav. * @@ -19,8 +19,6 @@ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ -#include <inttypes.h> - #include "libavutil/attributes.h" #include "libavutil/avassert.h" #include "libavutil/avstring.h" @@ -30,6 +28,8 @@ #include "libavutil/internal.h" #include "libavutil/mathematics.h" #include "libavutil/opt.h" +#include "libavutil/time_internal.h" + #include "avformat.h" #include "avio_internal.h" #include "avlanguage.h" @@ -39,153 +39,305 @@ #include "asf.h" #include "asfcrypt.h" +#define ASF_BOOL 0x2 +#define ASF_WORD 0x5 +#define ASF_GUID 0x6 +#define ASF_DWORD 0x3 +#define ASF_QWORD 0x4 +#define ASF_UNICODE 0x0 +#define ASF_FLAG_BROADCAST 0x1 +#define ASF_BYTE_ARRAY 0x1 +#define ASF_TYPE_AUDIO 0x2 +#define ASF_TYPE_VIDEO 0x1 +#define ASF_STREAM_NUM 0x7F +#define ASF_MAX_STREAMS 128 +#define BMP_HEADER_SIZE 40 +#define ASF_NUM_OF_PAYLOADS 0x3F +#define ASF_ERROR_CORRECTION_LENGTH_TYPE 0x60 +#define ASF_PACKET_ERROR_CORRECTION_DATA_SIZE 0x2 + +typedef struct GUIDParseTable { + const char *name; + ff_asf_guid guid; + int (*read_object)(AVFormatContext *, const struct GUIDParseTable *); + int is_subobject; +} GUIDParseTable; + +typedef struct ASFPacket { + AVPacket avpkt; + int64_t dts; + uint32_t frame_num; // ASF payloads with the same number are parts of the same frame + int flags; + int data_size; + int duration; + int size_left; + uint8_t stream_index; +} ASFPacket; + +typedef struct ASFStream { + uint8_t stream_index; // from packet header + int index; // stream index in AVFormatContext, set in asf_read_stream_properties + int type; + int indexed; // added index entries from the Simple Index Object or not + int8_t span; // for deinterleaving + uint16_t virtual_pkt_len; + uint16_t virtual_chunk_len; + int16_t lang_idx; + ASFPacket pkt; +} ASFStream; + +typedef struct ASFStreamData{ + char langs[32]; + AVDictionary *asf_met; // for storing per-stream metadata + AVRational aspect_ratio; +} ASFStreamData; + typedef struct ASFContext { - const AVClass *class; - int asfid2avid[128]; ///< conversion table from asf ID 2 AVStream ID - ASFStream streams[128]; ///< it's max number and it's not that big - uint32_t stream_bitrates[128]; ///< max number of streams, bitrate for each (for streaming) - AVRational dar[128]; - char stream_languages[128][6]; ///< max number of streams, language for each (RFC1766, e.g. en-US) - /* non streamed additonnal info */ - /* packet filling */ - int packet_size_left; - /* only for reading */ - uint64_t data_offset; ///< beginning of the first data packet - uint64_t data_object_offset; ///< data object offset (excl. GUID & size) - uint64_t data_object_size; ///< size of the data object - int index_read; - - ASFMainHeader hdr; - - int packet_flags; - int packet_property; - int packet_timestamp; - int packet_segsizetype; - int packet_segments; - int packet_seq; - int packet_replic_size; - int packet_key_frame; - int packet_padsize; - unsigned int packet_frag_offset; - unsigned int packet_frag_size; - int64_t packet_frag_timestamp; - int packet_multi_size; - int packet_obj_size; - int packet_time_delta; - int packet_time_start; - int64_t packet_pos; - - int stream_index; - - ASFStream *asf_st; ///< currently decoded stream - - int no_resync_search; - int export_xmp; + int data_reached; + int is_simple_index; // is simple index present or not 1/0 + int is_header; + + uint64_t preroll; + uint64_t nb_packets; // ASF packets + uint32_t packet_size; + int64_t send_time; + int duration; + + uint32_t b_flags; // flags with broadcast flag + uint32_t prop_flags; // file properties object flags + + uint64_t data_size; // data object size + uint64_t unknown_size; // size of the unknown object + + int64_t offset; // offset of the current object + + int64_t data_offset; + int64_t first_packet_offset; // packet offset + int64_t unknown_offset; // for top level header objects or subobjects without specified behavior + + // ASF file must not contain more than 128 streams according to the specification + ASFStream *asf_st[ASF_MAX_STREAMS]; + ASFStreamData asf_sd[ASF_MAX_STREAMS]; + int nb_streams; + + int stream_index; // from packet header, for the subpayload case + + // packet parameteres + uint64_t sub_header_offset; // offset of subplayload header + int64_t sub_dts; + uint8_t dts_delta; // for subpayloads + uint32_t packet_size_internal; // packet size stored inside ASFPacket, can be 0 + int64_t dts; + int64_t packet_offset; // offset of the current packet inside Data Object + uint32_t pad_len; // padding after payload + uint32_t rep_data_len; + + // packet state + uint64_t sub_left; // subpayloads left or not + int nb_sub; // number of subpayloads read so far from the current ASF packet + uint16_t mult_sub_len; // total length of subpayloads array inside multiple payload + uint64_t nb_mult_left; // multiple payloads left + int return_subpayload; + enum { + PARSE_PACKET_HEADER, + READ_SINGLE, + READ_MULTI, + READ_MULTI_SUB + } state; } ASFContext; -static const AVOption options[] = { - { "no_resync_search", "Don't try to resynchronize by looking for a certain optional start code", offsetof(ASFContext, no_resync_search), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, AV_OPT_FLAG_DECODING_PARAM }, - { "export_xmp", "Export full XMP metadata", offsetof(ASFContext, export_xmp), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, AV_OPT_FLAG_DECODING_PARAM }, - { NULL }, -}; +static int detect_unknown_subobject(AVFormatContext *s, int64_t offset, int64_t size); +static const GUIDParseTable *find_guid(ff_asf_guid guid); -static const AVClass asf_class = { - .class_name = "asf demuxer", - .item_name = av_default_item_name, - .option = options, - .version = LIBAVUTIL_VERSION_INT, -}; +static int asf_probe(AVProbeData *pd) +{ + /* check file header */ + if (!ff_guidcmp(pd->buf, &ff_asf_header)) + return AVPROBE_SCORE_MAX; + else + return 0; +} -#undef NDEBUG -#include <assert.h> +static void swap_guid(ff_asf_guid guid) +{ + FFSWAP(unsigned char, guid[0], guid[3]); + FFSWAP(unsigned char, guid[1], guid[2]); + FFSWAP(unsigned char, guid[4], guid[5]); + FFSWAP(unsigned char, guid[6], guid[7]); +} -#define ASF_MAX_STREAMS 127 -#define FRAME_HEADER_SIZE 17 -// Fix Me! FRAME_HEADER_SIZE may be different. +static void align_position(AVIOContext *pb, int64_t offset, uint64_t size) +{ + if (avio_tell(pb) != offset + size) + avio_seek(pb, offset + size, SEEK_SET); +} -static const ff_asf_guid index_guid = { - 0x90, 0x08, 0x00, 0x33, 0xb1, 0xe5, 0xcf, 0x11, 0x89, 0xf4, 0x00, 0xa0, 0xc9, 0x03, 0x49, 0xcb -}; +static int asf_read_unknown(AVFormatContext *s, const GUIDParseTable *g) +{ + ASFContext *asf = s->priv_data; + AVIOContext *pb = s->pb; + uint64_t size = avio_rl64(pb); + int ret; + + if (asf->is_header) + asf->unknown_size = size; + asf->is_header = 0; + if (!g->is_subobject) { + if (!(ret = strcmp(g->name, "Header Extension"))) + avio_skip(pb, 22); // skip reserved fields and Data Size + if ((ret = detect_unknown_subobject(s, asf->unknown_offset, + asf->unknown_size)) < 0) + return ret; + } else + avio_skip(pb, size - 24); -#ifdef DEBUG -static const ff_asf_guid stream_bitrate_guid = { /* (http://get.to/sdp) */ - 0xce, 0x75, 0xf8, 0x7b, 0x8d, 0x46, 0xd1, 0x11, 0x8d, 0x82, 0x00, 0x60, 0x97, 0xc9, 0xa2, 0xb2 -}; + return 0; +} + +static int get_asf_string(AVIOContext *pb, int maxlen, char *buf, int buflen) +{ + char *q = buf; + int ret = 0; + if (buflen <= 0) + return AVERROR(EINVAL); + while (ret + 1 < maxlen) { + uint8_t tmp; + uint32_t ch; + GET_UTF16(ch, (ret += 2) <= maxlen ? avio_rl16(pb) : 0, break;); + PUT_UTF8(ch, tmp, if (q - buf < buflen - 1) *q++ = tmp;) + } + *q = 0; -#define PRINT_IF_GUID(g, cmp) \ - if (!ff_guidcmp(g, &cmp)) \ - av_log(NULL, AV_LOG_TRACE, "(GUID: %s) ", # cmp) + return ret; +} -static void print_guid(ff_asf_guid *g) +static int asf_read_marker(AVFormatContext *s, const GUIDParseTable *g) { - int i; - PRINT_IF_GUID(g, ff_asf_header); - else PRINT_IF_GUID(g, ff_asf_file_header); - else PRINT_IF_GUID(g, ff_asf_stream_header); - else PRINT_IF_GUID(g, ff_asf_audio_stream); - else PRINT_IF_GUID(g, ff_asf_audio_conceal_none); - else PRINT_IF_GUID(g, ff_asf_video_stream); - else PRINT_IF_GUID(g, ff_asf_video_conceal_none); - else PRINT_IF_GUID(g, ff_asf_command_stream); - else PRINT_IF_GUID(g, ff_asf_comment_header); - else PRINT_IF_GUID(g, ff_asf_codec_comment_header); - else PRINT_IF_GUID(g, ff_asf_codec_comment1_header); - else PRINT_IF_GUID(g, ff_asf_data_header); - else PRINT_IF_GUID(g, index_guid); - else PRINT_IF_GUID(g, ff_asf_head1_guid); - else PRINT_IF_GUID(g, ff_asf_head2_guid); - else PRINT_IF_GUID(g, ff_asf_my_guid); - else PRINT_IF_GUID(g, ff_asf_ext_stream_header); - else PRINT_IF_GUID(g, ff_asf_extended_content_header); - else PRINT_IF_GUID(g, ff_asf_ext_stream_embed_stream_header); - else PRINT_IF_GUID(g, ff_asf_ext_stream_audio_stream); - else PRINT_IF_GUID(g, ff_asf_metadata_header); - else PRINT_IF_GUID(g, ff_asf_metadata_library_header); - else PRINT_IF_GUID(g, ff_asf_marker_header); - else PRINT_IF_GUID(g, stream_bitrate_guid); - else PRINT_IF_GUID(g, ff_asf_language_guid); - else - av_log(NULL, AV_LOG_TRACE, "(GUID: unknown) "); - for (i = 0; i < 16; i++) - av_log(NULL, AV_LOG_TRACE, " 0x%02x,", (*g)[i]); - av_log(NULL, AV_LOG_TRACE, "}\n"); + ASFContext *asf = s->priv_data; + AVIOContext *pb = s->pb; + uint64_t size = avio_rl64(pb); + int i, nb_markers, ret; + size_t len; + char name[1024]; + + avio_skip(pb, 8); + avio_skip(pb, 8); // skip reserved GUID + nb_markers = avio_rl32(pb); + avio_skip(pb, 2); // skip reserved field + len = avio_rl16(pb); + for (i = 0; i < len; i++) + avio_skip(pb, 1); + + for (i = 0; i < nb_markers; i++) { + int64_t pts; + + avio_skip(pb, 8); + pts = avio_rl64(pb); + pts -= asf->preroll * 10000; + avio_skip(pb, 2); // entry length + avio_skip(pb, 4); // send time + avio_skip(pb, 4); // flags + len = avio_rl32(pb); + + if ((ret = avio_get_str16le(pb, len, name, + sizeof(name))) < len) + avio_skip(pb, len - ret); + avpriv_new_chapter(s, i, (AVRational) { 1, 10000000 }, pts, + AV_NOPTS_VALUE, name); + } + align_position(pb, asf->offset, size); + + return 0; } -#undef PRINT_IF_GUID -#else -#define print_guid(g) -#endif -static int asf_probe(AVProbeData *pd) +static int asf_read_metadata(AVFormatContext *s, const char *title, uint16_t len, + unsigned char *ch, uint16_t buflen) { - /* check file header */ - if (!ff_guidcmp(pd->buf, &ff_asf_header)) - return AVPROBE_SCORE_MAX; - else - return 0; + AVIOContext *pb = s->pb; + + avio_get_str16le(pb, len, ch, buflen); + if (av_dict_set(&s->metadata, title, ch, 0) < 0) + av_log(s, AV_LOG_WARNING, "av_dict_set failed.\n"); + + return 0; } -/* size of type 2 (BOOL) is 32bit for "Extended Content Description Object" - * but 16 bit for "Metadata Object" and "Metadata Library Object" */ -static int get_value(AVIOContext *pb, int type, int type2_size) +static int asf_read_value(AVFormatContext *s, uint8_t *name, uint16_t name_len, + uint16_t val_len, int type, AVDictionary **met) { + int ret; + uint8_t *value; + uint16_t buflen = 2 * val_len + 1; + AVIOContext *pb = s->pb; + + value = av_malloc(buflen); + if (!value) + return AVERROR(ENOMEM); + if (type == ASF_UNICODE) { + // get_asf_string reads UTF-16 and converts it to UTF-8 which needs longer buffer + if ((ret = get_asf_string(pb, val_len, value, buflen)) < 0) + goto failed; + if (av_dict_set(met, name, value, 0) < 0) + av_log(s, AV_LOG_WARNING, "av_dict_set failed.\n"); + } else { + char buf[256]; + if (val_len > sizeof(buf)) + return AVERROR_INVALIDDATA; + if ((ret = avio_read(pb, value, val_len)) < 0) + goto failed; + if (ret < 2 * val_len) + value[ret] = '\0'; + else + value[2 * val_len - 1] = '\0'; + snprintf(buf, sizeof(buf), "%s", value); + if (av_dict_set(met, name, buf, 0) < 0) + av_log(s, AV_LOG_WARNING, "av_dict_set failed.\n"); + } + av_freep(&value); + + return 0; + +failed: + av_freep(&value); + return ret; +} + +static int asf_read_generic_value(AVFormatContext *s, uint8_t *name, uint16_t name_len, + int type, AVDictionary **met) +{ + AVIOContext *pb = s->pb; + uint64_t value; + char buf[32]; + switch (type) { - case 2: - return (type2_size == 32) ? avio_rl32(pb) : avio_rl16(pb); - case 3: - return avio_rl32(pb); - case 4: - return avio_rl64(pb); - case 5: - return avio_rl16(pb); + case ASF_BOOL: + value = avio_rl32(pb); + break; + case ASF_DWORD: + value = avio_rl32(pb); + break; + case ASF_QWORD: + value = avio_rl64(pb); + break; + case ASF_WORD: + value = avio_rl16(pb); + break; default: - return INT_MIN; + av_freep(&name); + return AVERROR_INVALIDDATA; } + snprintf(buf, sizeof(buf), "%"PRIu64, value); + if (av_dict_set(met, name, buf, 0) < 0) + av_log(s, AV_LOG_WARNING, "av_dict_set failed.\n"); + + return 0; } /* MSDN claims that this should be "compatible with the ID3 frame, APIC", * but in reality this is only loosely similar */ static int asf_read_picture(AVFormatContext *s, int len) { + ASFContext *asf = s->priv_data; AVPacket pkt = { 0 }; const CodecMime *mime = ff_id3v2_mime_tags; enum AVCodecID id = AV_CODEC_ID_NONE; @@ -193,6 +345,7 @@ static int asf_read_picture(AVFormatContext *s, int len) uint8_t *desc = NULL; AVStream *st = NULL; int ret, type, picsize, desc_len; + ASFStream *asf_st; /* type + picsize + mime + desc */ if (len < 1 + 4 + 2 + 2) { @@ -249,19 +402,28 @@ static int asf_read_picture(AVFormatContext *s, int len) ret = AVERROR(ENOMEM); goto fail; } + asf->asf_st[asf->nb_streams] = av_mallocz(sizeof(*asf_st)); + asf_st = asf->asf_st[asf->nb_streams]; + if (!asf_st) + return AVERROR(ENOMEM); + st->disposition |= AV_DISPOSITION_ATTACHED_PIC; - st->codec->codec_type = AVMEDIA_TYPE_VIDEO; + st->codec->codec_type = asf_st->type = AVMEDIA_TYPE_VIDEO; st->codec->codec_id = id; st->attached_pic = pkt; - st->attached_pic.stream_index = st->index; + st->attached_pic.stream_index = asf_st->index = st->index; st->attached_pic.flags |= AV_PKT_FLAG_KEY; - if (*desc) - av_dict_set(&st->metadata, "title", desc, AV_DICT_DONT_STRDUP_VAL); - else + asf->nb_streams++; + + if (*desc) { + if (av_dict_set(&st->metadata, "title", desc, AV_DICT_DONT_STRDUP_VAL) < 0) + av_log(s, AV_LOG_WARNING, "av_dict_set failed.\n"); + } else av_freep(&desc); - av_dict_set(&st->metadata, "comment", ff_id3v2_picture_types[type], 0); + if (av_dict_set(&st->metadata, "comment", ff_id3v2_picture_types[type], 0) < 0) + av_log(s, AV_LOG_WARNING, "av_dict_set failed.\n"); return 0; @@ -281,1235 +443,1248 @@ static void get_id3_tag(AVFormatContext *s, int len) ff_id3v2_free_extra_meta(&id3v2_extra_meta); } -static void get_tag(AVFormatContext *s, const char *key, int type, int len, int type2_size) +static int process_metadata(AVFormatContext *s, uint8_t *name, uint16_t name_len, + uint16_t val_len, uint16_t type, AVDictionary **met) { - ASFContext *asf = s->priv_data; - char *value = NULL; - int64_t off = avio_tell(s->pb); - - if ((unsigned)len >= (UINT_MAX - 1) / 2) - return; - - if (!asf->export_xmp && !strncmp(key, "xmp", 3)) - goto finish; + int ret; + ff_asf_guid guid; - value = av_malloc(2 * len + 1); - if (!value) - goto finish; - - if (type == 0) { // UTF16-LE - avio_get_str16le(s->pb, len, value, 2 * len + 1); - } else if (type == 1) { // byte array - if (!strcmp(key, "WM/Picture")) { // handle cover art - asf_read_picture(s, len); - } else if (!strcmp(key, "ID3")) { // handle ID3 tag - get_id3_tag(s, len); - } else { - av_log(s, AV_LOG_VERBOSE, "Unsupported byte array in tag %s.\n", key); + if (val_len) { + switch (type) { + case ASF_UNICODE: + asf_read_value(s, name, name_len, val_len, type, met); + break; + case ASF_BYTE_ARRAY: + if (!strcmp(name, "WM/Picture")) // handle cover art + asf_read_picture(s, val_len); + else if (!strcmp(name, "ID3")) // handle ID3 tag + get_id3_tag(s, val_len); + else + asf_read_value(s, name, name_len, val_len, type, met); + break; + case ASF_GUID: + ff_get_guid(s->pb, &guid); + break; + default: + if ((ret = asf_read_generic_value(s, name, name_len, type, met)) < 0) + return ret; + break; } - goto finish; - } else if (type > 1 && type <= 5) { // boolean or DWORD or QWORD or WORD - uint64_t num = get_value(s->pb, type, type2_size); - snprintf(value, len, "%"PRIu64, num); - } else if (type == 6) { // (don't) handle GUID - av_log(s, AV_LOG_DEBUG, "Unsupported GUID value in tag %s.\n", key); - goto finish; - } else { - av_log(s, AV_LOG_DEBUG, - "Unsupported value type %d in tag %s.\n", type, key); - goto finish; } - if (*value) - av_dict_set(&s->metadata, key, value, 0); + av_freep(&name); -finish: - av_freep(&value); - avio_seek(s->pb, off + len, SEEK_SET); + return 0; } -static int asf_read_file_properties(AVFormatContext *s, int64_t size) +static int asf_read_ext_content(AVFormatContext *s, const GUIDParseTable *g) { - ASFContext *asf = s->priv_data; - AVIOContext *pb = s->pb; + ASFContext *asf = s->priv_data; + AVIOContext *pb = s->pb; + uint64_t size = avio_rl64(pb); + uint16_t nb_desc = avio_rl16(pb); + int i, ret; - ff_get_guid(pb, &asf->hdr.guid); - asf->hdr.file_size = avio_rl64(pb); - asf->hdr.create_time = avio_rl64(pb); - avio_rl64(pb); /* number of packets */ - asf->hdr.play_time = avio_rl64(pb); - asf->hdr.send_time = avio_rl64(pb); - asf->hdr.preroll = avio_rl32(pb); - asf->hdr.ignore = avio_rl32(pb); - asf->hdr.flags = avio_rl32(pb); - asf->hdr.min_pktsize = avio_rl32(pb); - asf->hdr.max_pktsize = avio_rl32(pb); - if (asf->hdr.min_pktsize >= (1U << 29)) - return AVERROR_INVALIDDATA; - asf->hdr.max_bitrate = avio_rl32(pb); - s->packet_size = asf->hdr.max_pktsize; + for (i = 0; i < nb_desc; i++) { + uint16_t name_len, type, val_len; + uint8_t *name = NULL; + name_len = avio_rl16(pb); + if (!name_len) + return AVERROR_INVALIDDATA; + name = av_malloc(name_len); + if (!name) + return AVERROR(ENOMEM); + avio_get_str16le(pb, name_len, name, + name_len); + type = avio_rl16(pb); + val_len = avio_rl16(pb); + + if ((ret = process_metadata(s, name, name_len, val_len, type, &s->metadata)) < 0) + return ret; + } + + align_position(pb, asf->offset, size); return 0; } -static int asf_read_stream_properties(AVFormatContext *s, int64_t size) +static AVStream *find_stream(AVFormatContext *s, uint16_t st_num) { + AVStream *st = NULL; ASFContext *asf = s->priv_data; - AVIOContext *pb = s->pb; - AVStream *st; - ASFStream *asf_st; - ff_asf_guid g; - enum AVMediaType type; - int type_specific_size, sizeX; - unsigned int tag1; - int64_t pos1, pos2, start_time; - int test_for_ext_stream_audio, is_dvr_ms_audio = 0; + int i; - if (s->nb_streams == ASF_MAX_STREAMS) { - av_log(s, AV_LOG_ERROR, "too many streams\n"); - return AVERROR(EINVAL); + for (i = 0; i < asf->nb_streams; i++) { + if (asf->asf_st[i]->stream_index == st_num) { + st = s->streams[asf->asf_st[i]->index]; + break; + } } - pos1 = avio_tell(pb); - - st = avformat_new_stream(s, NULL); - if (!st) - return AVERROR(ENOMEM); - avpriv_set_pts_info(st, 32, 1, 1000); /* 32 bit pts in ms */ - asf_st = av_mallocz(sizeof(ASFStream)); - if (!asf_st) - return AVERROR(ENOMEM); - st->priv_data = asf_st; - st->start_time = 0; - start_time = asf->hdr.preroll; + return st; +} - asf_st->stream_language_index = 128; // invalid stream index means no language info +static void asf_store_aspect_ratio(AVFormatContext *s, uint8_t st_num, uint8_t *name) +{ + ASFContext *asf = s->priv_data; + AVIOContext *pb = s->pb; + uint16_t value = 0; - if (!(asf->hdr.flags & 0x01)) { // if we aren't streaming... - st->duration = asf->hdr.play_time / - (10000000 / 1000) - start_time; - } - ff_get_guid(pb, &g); + value = avio_rl16(pb); - test_for_ext_stream_audio = 0; - if (!ff_guidcmp(&g, &ff_asf_audio_stream)) { - type = AVMEDIA_TYPE_AUDIO; - } else if (!ff_guidcmp(&g, &ff_asf_video_stream)) { - type = AVMEDIA_TYPE_VIDEO; - } else if (!ff_guidcmp(&g, &ff_asf_jfif_media)) { - type = AVMEDIA_TYPE_VIDEO; - st->codec->codec_id = AV_CODEC_ID_MJPEG; - } else if (!ff_guidcmp(&g, &ff_asf_command_stream)) { - type = AVMEDIA_TYPE_DATA; - } else if (!ff_guidcmp(&g, &ff_asf_ext_stream_embed_stream_header)) { - test_for_ext_stream_audio = 1; - type = AVMEDIA_TYPE_UNKNOWN; - } else { - return -1; - } - ff_get_guid(pb, &g); - avio_skip(pb, 8); /* total_size */ - type_specific_size = avio_rl32(pb); - avio_rl32(pb); - st->id = avio_rl16(pb) & 0x7f; /* stream id */ - // mapping of asf ID to AV stream ID; - asf->asfid2avid[st->id] = s->nb_streams - 1; - - avio_rl32(pb); - - if (test_for_ext_stream_audio) { - ff_get_guid(pb, &g); - if (!ff_guidcmp(&g, &ff_asf_ext_stream_audio_stream)) { - type = AVMEDIA_TYPE_AUDIO; - is_dvr_ms_audio = 1; - ff_get_guid(pb, &g); - avio_rl32(pb); - avio_rl32(pb); - avio_rl32(pb); - ff_get_guid(pb, &g); - avio_rl32(pb); - } + if (st_num < ASF_MAX_STREAMS) { + if (!strcmp(name, "AspectRatioX")) + asf->asf_sd[st_num].aspect_ratio.num = value; + else + asf->asf_sd[st_num].aspect_ratio.den = value; } +} - st->codec->codec_type = type; - if (type == AVMEDIA_TYPE_AUDIO) { - int ret = ff_get_wav_header(pb, st->codec, type_specific_size); - if (ret < 0) - return ret; - if (is_dvr_ms_audio) { - // codec_id and codec_tag are unreliable in dvr_ms - // files. Set them later by probing stream. - st->codec->codec_id = AV_CODEC_ID_PROBE; - st->codec->codec_tag = 0; - } - if (st->codec->codec_id == AV_CODEC_ID_AAC) - st->need_parsing = AVSTREAM_PARSE_NONE; - else - st->need_parsing = AVSTREAM_PARSE_FULL; - /* We have to init the frame size at some point .... */ - pos2 = avio_tell(pb); - if (size >= (pos2 + 8 - pos1 + 24)) { - asf_st->ds_span = avio_r8(pb); - asf_st->ds_packet_size = avio_rl16(pb); - asf_st->ds_chunk_size = avio_rl16(pb); - avio_rl16(pb); // ds_data_size - avio_r8(pb); // ds_silence_data - } - if (asf_st->ds_span > 1) { - if (!asf_st->ds_chunk_size || - (asf_st->ds_packet_size / asf_st->ds_chunk_size <= 1) || - asf_st->ds_packet_size % asf_st->ds_chunk_size) - asf_st->ds_span = 0; // disable descrambling - } - } else if (type == AVMEDIA_TYPE_VIDEO && - size - (avio_tell(pb) - pos1 + 24) >= 51) { - avio_rl32(pb); - avio_rl32(pb); - avio_r8(pb); - avio_rl16(pb); /* size */ - sizeX = avio_rl32(pb); /* size */ - st->codec->width = avio_rl32(pb); - st->codec->height = avio_rl32(pb); - /* not available for asf */ - avio_rl16(pb); /* panes */ - st->codec->bits_per_coded_sample = avio_rl16(pb); /* depth */ - tag1 = avio_rl32(pb); - avio_skip(pb, 20); - if (sizeX > 40) { - st->codec->extradata_size = sizeX - 40; - st->codec->extradata = av_mallocz(st->codec->extradata_size + - FF_INPUT_BUFFER_PADDING_SIZE); - avio_read(pb, st->codec->extradata, st->codec->extradata_size); - } +static int asf_read_metadata_obj(AVFormatContext *s, const GUIDParseTable *g) +{ + ASFContext *asf = s->priv_data; + AVIOContext *pb = s->pb; + uint64_t size = avio_rl64(pb); + uint16_t nb_recs = avio_rl16(pb); // number of records in the Description Records list + int i, ret; - /* Extract palette from extradata if bpp <= 8 */ - /* This code assumes that extradata contains only palette */ - /* This is true for all paletted codecs implemented in libavcodec */ - if (st->codec->extradata_size && (st->codec->bits_per_coded_sample <= 8)) { -#if HAVE_BIGENDIAN - int i; - for (i = 0; i < FFMIN(st->codec->extradata_size, AVPALETTE_SIZE) / 4; i++) - asf_st->palette[i] = av_bswap32(((uint32_t *)st->codec->extradata)[i]); -#else - memcpy(asf_st->palette, st->codec->extradata, - FFMIN(st->codec->extradata_size, AVPALETTE_SIZE)); -#endif - asf_st->palette_changed = 1; - } + for (i = 0; i < nb_recs; i++) { + uint16_t name_len, buflen, type, val_len, st_num; + uint8_t *name = NULL; - st->codec->codec_tag = tag1; - st->codec->codec_id = ff_codec_get_id(ff_codec_bmp_tags, tag1); - if (tag1 == MKTAG('D', 'V', 'R', ' ')) { - st->need_parsing = AVSTREAM_PARSE_FULL; - /* issue658 contains wrong w/h and MS even puts a fake seq header - * with wrong w/h in extradata while a correct one is in the stream. - * maximum lameness */ - st->codec->width = - st->codec->height = 0; - av_freep(&st->codec->extradata); - st->codec->extradata_size = 0; + avio_skip(pb, 2); // skip reserved field + st_num = avio_rl16(pb); + name_len = avio_rl16(pb); + buflen = 2 * name_len + 1; + if (!name_len) + break; + type = avio_rl16(pb); + val_len = avio_rl32(pb); + name = av_malloc(name_len); + if (!name) + return AVERROR(ENOMEM); + avio_get_str16le(pb, name_len, name, + buflen); + + if (!strcmp(name, "AspectRatioX") || !strcmp(name, "AspectRatioY")) { + asf_store_aspect_ratio(s, st_num, name); + } else { + if (st_num < ASF_MAX_STREAMS) { + if ((ret = process_metadata(s, name, name_len, val_len, type, + &asf->asf_sd[st_num].asf_met)) < 0) + break; + } } - if (st->codec->codec_id == AV_CODEC_ID_H264) - st->need_parsing = AVSTREAM_PARSE_FULL_ONCE; } - pos2 = avio_tell(pb); - avio_skip(pb, size - (pos2 - pos1 + 24)); + align_position(pb, asf->offset, size); return 0; } -static int asf_read_ext_stream_properties(AVFormatContext *s, int64_t size) +static int asf_read_content_desc(AVFormatContext *s, const GUIDParseTable *g) { ASFContext *asf = s->priv_data; AVIOContext *pb = s->pb; - ff_asf_guid g; - int ext_len, payload_ext_ct, stream_ct, i; - uint32_t leak_rate, stream_num; - unsigned int stream_languageid_index; - - avio_rl64(pb); // starttime - avio_rl64(pb); // endtime - leak_rate = avio_rl32(pb); // leak-datarate - avio_rl32(pb); // bucket-datasize - avio_rl32(pb); // init-bucket-fullness - avio_rl32(pb); // alt-leak-datarate - avio_rl32(pb); // alt-bucket-datasize - avio_rl32(pb); // alt-init-bucket-fullness - avio_rl32(pb); // max-object-size - avio_rl32(pb); // flags (reliable,seekable,no_cleanpoints?,resend-live-cleanpoints, rest of bits reserved) - stream_num = avio_rl16(pb); // stream-num - - stream_languageid_index = avio_rl16(pb); // stream-language-id-index - if (stream_num < 128) - asf->streams[stream_num].stream_language_index = stream_languageid_index; - - avio_rl64(pb); // avg frametime in 100ns units - stream_ct = avio_rl16(pb); // stream-name-count - payload_ext_ct = avio_rl16(pb); // payload-extension-system-count - - if (stream_num < 128) - asf->stream_bitrates[stream_num] = leak_rate; - - for (i = 0; i < stream_ct; i++) { - avio_rl16(pb); - ext_len = avio_rl16(pb); - avio_skip(pb, ext_len); + int i; + static const char *const titles[] = + { "Title", "Author", "Copyright", "Description", "Rate" }; + uint16_t len[5], buflen[5] = { 0 }; + uint8_t *ch; + uint64_t size = avio_rl64(pb); + + for (i = 0; i < 5; i++) { + len[i] = avio_rl16(pb); + // utf8 string should be <= 2 * utf16 string, extra byte for the terminator + buflen[i] = 2 * len[i] + 1; } - for (i = 0; i < payload_ext_ct; i++) { - ff_get_guid(pb, &g); - avio_skip(pb, 2); - ext_len = avio_rl32(pb); - avio_skip(pb, ext_len); + for (i = 0; i < 5; i++) { + ch = av_malloc(buflen[i]); + if (!ch) + return(AVERROR(ENOMEM)); + asf_read_metadata(s, titles[i], len[i], ch, buflen[i]); + av_freep(&ch); } + align_position(pb, asf->offset, size); return 0; } -static int asf_read_content_desc(AVFormatContext *s, int64_t size) +static int asf_read_properties(AVFormatContext *s, const GUIDParseTable *g) { + ASFContext *asf = s->priv_data; AVIOContext *pb = s->pb; - int len1, len2, len3, len4, len5; - - len1 = avio_rl16(pb); - len2 = avio_rl16(pb); - len3 = avio_rl16(pb); - len4 = avio_rl16(pb); - len5 = avio_rl16(pb); - get_tag(s, "title", 0, len1, 32); - get_tag(s, "author", 0, len2, 32); - get_tag(s, "copyright", 0, len3, 32); - get_tag(s, "comment", 0, len4, 32); - avio_skip(pb, len5); + uint64_t creation_time; + + avio_rl64(pb); // read object size + avio_skip(pb, 16); // skip File ID + avio_skip(pb, 8); // skip File size + creation_time = avio_rl64(pb); + if (!(asf->b_flags & ASF_FLAG_BROADCAST)) { + struct tm tmbuf; + struct tm *tm; + char buf[64]; + + // creation date is in 100 ns units from 1 Jan 1601, conversion to s + creation_time /= 10000000; + // there are 11644473600 seconds between 1 Jan 1601 and 1 Jan 1970 + creation_time -= 11644473600; + tm = gmtime_r(&creation_time, &tmbuf); + if (tm) { + if (!strftime(buf, sizeof(buf), "%Y-%m-%d %H:%M:%S", tm)) + buf[0] = '\0'; + } else + buf[0] = '\0'; + if (buf[0]) { + if (av_dict_set(&s->metadata, "creation_time", buf, 0) < 0) + av_log(s, AV_LOG_WARNING, "av_dict_set failed.\n"); + } + } + asf->nb_packets = avio_rl64(pb); + asf->duration = avio_rl64(pb) / 10000; // stream duration + avio_skip(pb, 8); // skip send duration + asf->preroll = avio_rl64(pb); + asf->duration -= asf->preroll; + asf->b_flags = avio_rl32(pb); + avio_skip(pb, 4); // skip minimal packet size + asf->packet_size = avio_rl32(pb); + avio_skip(pb, 4); // skip max_bitrate return 0; } -static int asf_read_ext_content_desc(AVFormatContext *s, int64_t size) +static int parse_video_info(AVIOContext *pb, AVStream *st) +{ + uint16_t size; + unsigned int tag; + + st->codec->width = avio_rl32(pb); + st->codec->height = avio_rl32(pb); + avio_skip(pb, 1); // skip reserved flags + size = avio_rl16(pb); // size of the Format Data + tag = ff_get_bmp_header(pb, st); + st->codec->codec_tag = tag; + st->codec->codec_id = ff_codec_get_id(ff_codec_bmp_tags, tag); + + if (size > BMP_HEADER_SIZE) { + int ret; + st->codec->extradata_size = size - BMP_HEADER_SIZE; + if (!(st->codec->extradata = av_malloc(st->codec->extradata_size + + FF_INPUT_BUFFER_PADDING_SIZE))) { + st->codec->extradata_size = 0; + return AVERROR(ENOMEM); + } + memset(st->codec->extradata + st->codec->extradata_size , 0, + FF_INPUT_BUFFER_PADDING_SIZE); + if ((ret = avio_read(pb, st->codec->extradata, + st->codec->extradata_size)) < 0) + return ret; + } + return 0; +} + +static int asf_read_stream_properties(AVFormatContext *s, const GUIDParseTable *g) { - AVIOContext *pb = s->pb; ASFContext *asf = s->priv_data; - int desc_count, i, ret; + AVIOContext *pb = s->pb; + uint64_t size; + uint32_t err_data_len, ts_data_len; // type specific data length + uint16_t flags; + ff_asf_guid stream_type; + enum AVMediaType type; + int i, ret; + uint8_t stream_index; + AVStream *st; + ASFStream *asf_st; - desc_count = avio_rl16(pb); - for (i = 0; i < desc_count; i++) { - int name_len, value_type, value_len; - char name[1024]; + // ASF file must not contain more than 128 streams according to the specification + if (asf->nb_streams >= ASF_MAX_STREAMS) + return AVERROR_INVALIDDATA; - name_len = avio_rl16(pb); - if (name_len % 2) // must be even, broken lavf versions wrote len-1 - name_len += 1; - if ((ret = avio_get_str16le(pb, name_len, name, sizeof(name))) < name_len) - avio_skip(pb, name_len - ret); - value_type = avio_rl16(pb); - value_len = avio_rl16(pb); - if (!value_type && value_len % 2) - value_len += 1; - /* My sample has that stream set to 0 maybe that mean the container. - * ASF stream count starts at 1. I am using 0 to the container value - * since it's unused. */ - if (!strcmp(name, "AspectRatioX")) - asf->dar[0].num = get_value(s->pb, value_type, 32); - else if (!strcmp(name, "AspectRatioY")) - asf->dar[0].den = get_value(s->pb, value_type, 32); - else - get_tag(s, name, value_type, value_len, 32); + size = avio_rl64(pb); + ff_get_guid(pb, &stream_type); + if (!ff_guidcmp(&stream_type, &ff_asf_audio_stream)) + type = AVMEDIA_TYPE_AUDIO; + else if (!ff_guidcmp(&stream_type, &ff_asf_video_stream)) + type = AVMEDIA_TYPE_VIDEO; + else if (!ff_guidcmp(&stream_type, &ff_asf_jfif_media)) + type = AVMEDIA_TYPE_VIDEO; + else if (!ff_guidcmp(&stream_type, &ff_asf_command_stream)) + type = AVMEDIA_TYPE_DATA; + else if (!ff_guidcmp(&stream_type, + &ff_asf_ext_stream_embed_stream_header)) + type = AVMEDIA_TYPE_UNKNOWN; + else + return AVERROR_INVALIDDATA; + + ff_get_guid(pb, &stream_type); // error correction type + avio_skip(pb, 8); // skip the time offset + ts_data_len = avio_rl32(pb); + err_data_len = avio_rl32(pb); + flags = avio_rl16(pb); // bit 15 - Encrypted Content + + stream_index = flags & ASF_STREAM_NUM; + for (i = 0; i < asf->nb_streams; i++) + if (stream_index == asf->asf_st[i]->stream_index) { + av_log(s, AV_LOG_WARNING, + "Duplicate stream found, this stream will be ignored.\n"); + align_position(pb, asf->offset, size); + return 0; + } + + st = avformat_new_stream(s, NULL); + if (!st) + return AVERROR(ENOMEM); + avpriv_set_pts_info(st, 32, 1, 1000); // pts should be dword, in milliseconds + st->codec->codec_type = type; + asf->asf_st[asf->nb_streams] = av_mallocz(sizeof(*asf_st)); + if (!asf->asf_st[asf->nb_streams]) + return AVERROR(ENOMEM); + asf_st = asf->asf_st[asf->nb_streams]; + asf_st->stream_index = stream_index; + asf_st->index = st->index; + asf_st->indexed = 0; + st->id = flags & ASF_STREAM_NUM; + av_init_packet(&asf_st->pkt.avpkt); + asf_st->pkt.data_size = 0; + avio_skip(pb, 4); // skip reserved field + if (!ts_data_len) { + av_log(s, AV_LOG_WARNING, "Suspicious data found! ASF stream #%d will be ignored.\n", + asf_st->stream_index); + align_position(pb, asf->offset, size); + return 0; } + switch (type) { + case AVMEDIA_TYPE_AUDIO: + asf_st->type = AVMEDIA_TYPE_AUDIO; + if ((ret = ff_get_wav_header(pb, st->codec, ts_data_len)) < 0) + return ret; + break; + case AVMEDIA_TYPE_VIDEO: + asf_st->type = AVMEDIA_TYPE_VIDEO; + if ((ret = parse_video_info(pb, st)) < 0) + return ret; + break; + default: + avio_skip(pb, ts_data_len); + break; + } + + if (err_data_len) { + if (type == AVMEDIA_TYPE_AUDIO) { + uint8_t span = avio_r8(pb); + if (span > 1) { + asf_st->span = span; + asf_st->virtual_pkt_len = avio_rl16(pb); + asf_st->virtual_chunk_len = avio_rl16(pb); + avio_skip(pb, err_data_len - 5); + } else + avio_skip(pb, err_data_len - 1); + } else + avio_skip(pb, err_data_len); + } + + asf->nb_streams++; + align_position(pb, asf->offset, size); + return 0; } -static int asf_read_language_list(AVFormatContext *s, int64_t size) +static void set_language(AVFormatContext *s, const char *rfc1766, AVDictionary **met) +{ + // language abbr should contain at least 2 chars + if (rfc1766 && strlen(rfc1766) > 1) { + const char primary_tag[3] = { rfc1766[0], rfc1766[1], '\0' }; // ignore country code if any + const char *iso6392 = av_convert_lang_to(primary_tag, + AV_LANG_ISO639_2_BIBL); + if (iso6392) + if (av_dict_set(met, "language", iso6392, 0) < 0) + av_log(s, AV_LOG_WARNING, "av_dict_set failed.\n"); + } +} + +static int asf_read_ext_stream_properties(AVFormatContext *s, const GUIDParseTable *g) { - AVIOContext *pb = s->pb; ASFContext *asf = s->priv_data; - int j, ret; - int stream_count = avio_rl16(pb); - for (j = 0; j < stream_count; j++) { - char lang[6]; - unsigned int lang_len = avio_r8(pb); - if ((ret = avio_get_str16le(pb, lang_len, lang, - sizeof(lang))) < lang_len) - avio_skip(pb, lang_len - ret); - if (j < 128) - av_strlcpy(asf->stream_languages[j], lang, - sizeof(*asf->stream_languages)); + AVIOContext *pb = s->pb; + AVStream *st = NULL; + ff_asf_guid guid; + uint16_t nb_st_name, nb_pay_exts, st_num, lang_idx; + int i, ret; + uint32_t bitrate; + uint64_t start_time, end_time, time_per_frame; + uint64_t size = avio_rl64(pb); + + start_time = avio_rl64(pb); + end_time = avio_rl64(pb); + bitrate = avio_rl32(pb); + avio_skip(pb, 28); // skip some unused values + st_num = avio_rl16(pb); + st_num &= ASF_STREAM_NUM; + lang_idx = avio_rl16(pb); // Stream Language ID Index + for (i = 0; i < asf->nb_streams; i++) { + if (st_num == asf->asf_st[i]->stream_index) { + st = s->streams[asf->asf_st[i]->index]; + asf->asf_st[i]->lang_idx = lang_idx; + break; + } + } + time_per_frame = avio_rl64(pb); // average time per frame + if (st) { + st->start_time = start_time; + st->duration = end_time - start_time; + st->codec->bit_rate = bitrate; + st->avg_frame_rate.num = 10000000; + st->avg_frame_rate.den = time_per_frame; + } + nb_st_name = avio_rl16(pb); + nb_pay_exts = avio_rl16(pb); + for (i = 0; i < nb_st_name; i++) { + uint16_t len; + + avio_rl16(pb); // Language ID Index + len = avio_rl16(pb); + avio_skip(pb, len); + } + + for (i = 0; i < nb_pay_exts; i++) { + uint32_t len; + avio_skip(pb, 16); // Extension System ID + avio_skip(pb, 2); // Extension Data Size + len = avio_rl32(pb); + avio_skip(pb, len); + } + + if ((ret = ff_get_guid(pb, &guid)) < 0) { + align_position(pb, asf->offset, size); + + return 0; } + g = find_guid(guid); + if (g && !(strcmp(g->name, "Stream Properties"))) { + if ((ret = g->read_object(s, g)) < 0) + return ret; + } + + align_position(pb, asf->offset, size); return 0; } -static int asf_read_metadata(AVFormatContext *s, int64_t size) +static int asf_read_language_list(AVFormatContext *s, const GUIDParseTable *g) { - AVIOContext *pb = s->pb; - ASFContext *asf = s->priv_data; - int n, stream_num, name_len, value_len; - int ret, i; - n = avio_rl16(pb); - - for (i = 0; i < n; i++) { - char name[1024]; - int value_type; - - avio_rl16(pb); // lang_list_index - stream_num = avio_rl16(pb); - name_len = avio_rl16(pb); - value_type = avio_rl16(pb); /* value_type */ - value_len = avio_rl32(pb); - - if ((ret = avio_get_str16le(pb, name_len, name, sizeof(name))) < name_len) - avio_skip(pb, name_len - ret); - av_log(s, AV_LOG_TRACE, "%d stream %d name_len %2d type %d len %4d <%s>\n", - i, stream_num, name_len, value_type, value_len, name); - - if (!strcmp(name, "AspectRatioX")){ - int aspect_x = get_value(s->pb, value_type, 16); - if(stream_num < 128) - asf->dar[stream_num].num = aspect_x; - } else if(!strcmp(name, "AspectRatioY")){ - int aspect_y = get_value(s->pb, value_type, 16); - if(stream_num < 128) - asf->dar[stream_num].den = aspect_y; - } else { - get_tag(s, name, value_type, value_len, 16); + ASFContext *asf = s->priv_data; + AVIOContext *pb = s->pb; + int i, ret; + uint64_t size = avio_rl64(pb); + uint16_t nb_langs = avio_rl16(pb); + + if (nb_langs < ASF_MAX_STREAMS) { + for (i = 0; i < nb_langs; i++) { + size_t len; + len = avio_r8(pb); + if (!len) + len = 6; + if ((ret = get_asf_string(pb, len, asf->asf_sd[i].langs, + sizeof(asf->asf_sd[i].langs))) < 0) { + return ret; + } } } + align_position(pb, asf->offset, size); return 0; } -static int asf_read_marker(AVFormatContext *s, int64_t size) +// returns data object offset when reading this object for the first time +static int asf_read_data(AVFormatContext *s, const GUIDParseTable *g) { - AVIOContext *pb = s->pb; ASFContext *asf = s->priv_data; - int i, count, name_len, ret; - char name[1024]; + AVIOContext *pb = s->pb; + uint64_t size = asf->data_size = avio_rl64(pb); + int i; - avio_rl64(pb); // reserved 16 bytes - avio_rl64(pb); // ... - count = avio_rl32(pb); // markers count - avio_rl16(pb); // reserved 2 bytes - name_len = avio_rl16(pb); // name length - for (i = 0; i < name_len; i++) - avio_r8(pb); // skip the name - - for (i = 0; i < count; i++) { - int64_t pres_time; - int name_len; - - avio_rl64(pb); // offset, 8 bytes - pres_time = avio_rl64(pb); // presentation time - pres_time -= asf->hdr.preroll * 10000; - avio_rl16(pb); // entry length - avio_rl32(pb); // send time - avio_rl32(pb); // flags - name_len = avio_rl32(pb); // name length - if ((ret = avio_get_str16le(pb, name_len * 2, name, - sizeof(name))) < name_len) - avio_skip(pb, name_len - ret); - avpriv_new_chapter(s, i, (AVRational) { 1, 10000000 }, pres_time, - AV_NOPTS_VALUE, name); + if (!asf->data_reached && pb->seekable) { + asf->data_reached = 1; + asf->data_offset = asf->offset; + } + + for (i = 0; i < asf->nb_streams; i++) { + if (!(asf->b_flags & ASF_FLAG_BROADCAST)) + s->streams[i]->duration = asf->duration; } + asf->nb_mult_left = 0; + asf->sub_left = 0; + asf->state = PARSE_PACKET_HEADER; + asf->return_subpayload = 0; + asf->packet_size_internal = 0; + avio_skip(pb, 16); // skip File ID + size = avio_rl64(pb); // Total Data Packets + if (size != asf->nb_packets) + av_log(s, AV_LOG_WARNING, + "Number of Packets from File Properties Object is not equal to Total" + "Datapackets value! num of packets %"PRIu64" total num %"PRIu64".\n", + size, asf->nb_packets); + avio_skip(pb, 2); // skip reserved field + asf->first_packet_offset = avio_tell(pb); + align_position(pb, asf->offset, asf->data_size); return 0; } -static int asf_read_header(AVFormatContext *s) +static int asf_read_simple_index(AVFormatContext *s, const GUIDParseTable *g) { ASFContext *asf = s->priv_data; - ff_asf_guid g; AVIOContext *pb = s->pb; + AVStream *st = NULL; + uint64_t interval; // index entry time interval in 100 ns units, usually it's 1s + uint32_t pkt_num, nb_entries; + int32_t prev_pkt_num = -1; int i; - int64_t gsize; - - ff_get_guid(pb, &g); - if (ff_guidcmp(&g, &ff_asf_header)) - return -1; - avio_rl64(pb); - avio_rl32(pb); - avio_r8(pb); - avio_r8(pb); - memset(&asf->asfid2avid, -1, sizeof(asf->asfid2avid)); - for (;;) { - uint64_t gpos = avio_tell(pb); - ff_get_guid(pb, &g); - gsize = avio_rl64(pb); - print_guid(&g); - if (!ff_guidcmp(&g, &ff_asf_data_header)) { - asf->data_object_offset = avio_tell(pb); - /* If not streaming, gsize is not unlimited (how?), - * and there is enough space in the file.. */ - if (!(asf->hdr.flags & 0x01) && gsize >= 100) - asf->data_object_size = gsize - 24; - else - asf->data_object_size = (uint64_t)-1; + uint64_t size = avio_rl64(pb); + + // simple index objects should be ordered by stream number, this loop tries to find + // the first not indexed video stream + for (i = 0; i < asf->nb_streams; i++) { + if ((asf->asf_st[i]->type == AVMEDIA_TYPE_VIDEO) && !asf->asf_st[i]->indexed) { + asf->asf_st[i]->indexed = 1; + st = s->streams[asf->asf_st[i]->index]; break; } - if (gsize < 24) - return -1; - if (!ff_guidcmp(&g, &ff_asf_file_header)) { - int ret = asf_read_file_properties(s, gsize); - if (ret < 0) - return ret; - } else if (!ff_guidcmp(&g, &ff_asf_stream_header)) { - int ret = asf_read_stream_properties(s, gsize); - if (ret < 0) - return ret; - } else if (!ff_guidcmp(&g, &ff_asf_comment_header)) { - asf_read_content_desc(s, gsize); - } else if (!ff_guidcmp(&g, &ff_asf_language_guid)) { - asf_read_language_list(s, gsize); - } else if (!ff_guidcmp(&g, &ff_asf_extended_content_header)) { - asf_read_ext_content_desc(s, gsize); - } else if (!ff_guidcmp(&g, &ff_asf_metadata_header)) { - asf_read_metadata(s, gsize); - } else if (!ff_guidcmp(&g, &ff_asf_metadata_library_header)) { - asf_read_metadata(s, gsize); - } else if (!ff_guidcmp(&g, &ff_asf_ext_stream_header)) { - asf_read_ext_stream_properties(s, gsize); - - // there could be a optional stream properties object to follow - // if so the next iteration will pick it up - continue; - } else if (!ff_guidcmp(&g, &ff_asf_head1_guid)) { - ff_get_guid(pb, &g); - avio_skip(pb, 6); - continue; - } else if (!ff_guidcmp(&g, &ff_asf_marker_header)) { - asf_read_marker(s, gsize); - } else if (pb->eof_reached) { - return -1; - } else { - if (!s->keylen) { - if (!ff_guidcmp(&g, &ff_asf_content_encryption)) { - av_log(s, AV_LOG_WARNING, - "DRM protected stream detected, decoding will likely fail!\n"); - } else if (!ff_guidcmp(&g, &ff_asf_ext_content_encryption)) { - av_log(s, AV_LOG_WARNING, - "Ext DRM protected stream detected, decoding will likely fail!\n"); - } else if (!ff_guidcmp(&g, &ff_asf_digital_signature)) { - av_log(s, AV_LOG_WARNING, - "Digital signature detected, decoding will likely fail!\n"); - } - } - } - if (avio_tell(pb) != gpos + gsize) - av_log(s, AV_LOG_DEBUG, - "gpos mismatch our pos=%"PRIu64", end=%"PRId64"\n", - avio_tell(pb) - gpos, gsize); - avio_seek(pb, gpos + gsize, SEEK_SET); } - ff_get_guid(pb, &g); - avio_rl64(pb); - avio_r8(pb); - avio_r8(pb); - if (pb->eof_reached) - return -1; - asf->data_offset = avio_tell(pb); - asf->packet_size_left = 0; - - for (i = 0; i < 128; i++) { - int stream_num = asf->asfid2avid[i]; - if (stream_num >= 0) { - AVStream *st = s->streams[stream_num]; - if (!st->codec->bit_rate) - st->codec->bit_rate = asf->stream_bitrates[i]; - if (asf->dar[i].num > 0 && asf->dar[i].den > 0) { - av_reduce(&st->sample_aspect_ratio.num, - &st->sample_aspect_ratio.den, - asf->dar[i].num, asf->dar[i].den, INT_MAX); - } else if ((asf->dar[0].num > 0) && (asf->dar[0].den > 0) && - // Use ASF container value if the stream doesn't set AR. - (st->codec->codec_type == AVMEDIA_TYPE_VIDEO)) - av_reduce(&st->sample_aspect_ratio.num, - &st->sample_aspect_ratio.den, - asf->dar[0].num, asf->dar[0].den, INT_MAX); - - av_log(s, AV_LOG_TRACE, "i=%d, st->codec->codec_type:%d, asf->dar %d:%d sar=%d:%d\n", - i, st->codec->codec_type, asf->dar[i].num, asf->dar[i].den, - st->sample_aspect_ratio.num, st->sample_aspect_ratio.den); - - // copy and convert language codes to the frontend - if (asf->streams[i].stream_language_index < 128) { - const char *rfc1766 = asf->stream_languages[asf->streams[i].stream_language_index]; - if (rfc1766 && strlen(rfc1766) > 1) { - const char primary_tag[3] = { rfc1766[0], rfc1766[1], '\0' }; // ignore country code if any - const char *iso6392 = av_convert_lang_to(primary_tag, - AV_LANG_ISO639_2_BIBL); - if (iso6392) - av_dict_set(&st->metadata, "language", iso6392, 0); - } - } + if (!st) { + avio_skip(pb, size - 24); // if there's no video stream, skip index object + return 0; + } + avio_skip(pb, 16); // skip File ID + interval = avio_rl64(pb); + avio_skip(pb, 4); + nb_entries = avio_rl32(pb); + for (i = 0; i < nb_entries; i++) { + pkt_num = avio_rl32(pb); + avio_skip(pb, 2); + if (prev_pkt_num != pkt_num) { + av_add_index_entry(st, asf->first_packet_offset + asf->packet_size * + pkt_num, av_rescale(interval, i, 10000), + asf->packet_size, 0, AVINDEX_KEYFRAME); + prev_pkt_num = pkt_num; } } - - ff_metadata_conv(&s->metadata, NULL, ff_asf_metadata_conv); + asf->is_simple_index = 1; + align_position(pb, asf->offset, size); return 0; } -#define DO_2BITS(bits, var, defval) \ - switch (bits & 3) { \ - case 3: \ - var = avio_rl32(pb); \ - rsize += 4; \ - break; \ - case 2: \ - var = avio_rl16(pb); \ - rsize += 2; \ - break; \ - case 1: \ - var = avio_r8(pb); \ - rsize++; \ - break; \ - default: \ - var = defval; \ - break; \ - } +static const GUIDParseTable gdef[] = { + { "Data", { 0x75, 0xB2, 0x26, 0x36, 0x66, 0x8E, 0x11, 0xCF, 0xA6, 0xD9, 0x00, 0xAA, 0x00, 0x62, 0xCE, 0x6C }, asf_read_data, 1 }, + { "Simple Index", { 0x33, 0x00, 0x08, 0x90, 0xE5, 0xB1, 0x11, 0xCF, 0x89, 0xF4, 0x00, 0xA0, 0xC9, 0x03, 0x49, 0xCB }, asf_read_simple_index, 1 }, + { "Content Description", { 0x75, 0xB2, 0x26, 0x33, 0x66 ,0x8E, 0x11, 0xCF, 0xA6, 0xD9, 0x00, 0xAA, 0x00, 0x62, 0xCE, 0x6C }, asf_read_content_desc, 1 }, + { "Extended Content Description", { 0xD2, 0xD0, 0xA4, 0x40, 0xE3, 0x07, 0x11, 0xD2, 0x97, 0xF0, 0x00, 0xA0, 0xC9, 0x5e, 0xA8, 0x50 }, asf_read_ext_content, 1 }, + { "Stream Bitrate Properties", { 0x7B, 0xF8, 0x75, 0xCE, 0x46, 0x8D, 0x11, 0xD1, 0x8D, 0x82, 0x00, 0x60, 0x97, 0xC9, 0xA2, 0xB2 }, asf_read_unknown, 1 }, + { "File Properties", { 0x8C, 0xAB, 0xDC, 0xA1, 0xA9, 0x47, 0x11, 0xCF, 0x8E, 0xE4, 0x00, 0xC0, 0x0C, 0x20, 0x53, 0x65 }, asf_read_properties, 1 }, + { "Header Extension", { 0x5F, 0xBF, 0x03, 0xB5, 0xA9, 0x2E, 0x11, 0xCF, 0x8E, 0xE3, 0x00, 0xC0, 0x0C, 0x20, 0x53, 0x65 }, asf_read_unknown, 0 }, + { "Stream Properties", { 0xB7, 0xDC, 0x07, 0x91, 0xA9, 0xB7, 0x11, 0xCF, 0x8E, 0xE6, 0x00, 0xC0, 0x0C, 0x20, 0x53, 0x65 }, asf_read_stream_properties, 1 }, + { "Codec List", { 0x86, 0xD1, 0x52, 0x40, 0x31, 0x1D, 0x11, 0xD0, 0xA3, 0xA4, 0x00, 0xA0, 0xC9, 0x03, 0x48, 0xF6 }, asf_read_unknown, 1 }, + { "Marker", { 0xF4, 0x87, 0xCD, 0x01, 0xA9, 0x51, 0x11, 0xCF, 0x8E, 0xE6, 0x00, 0xC0, 0x0C, 0x20, 0x53, 0x65 }, asf_read_marker, 1 }, + { "Script Command", { 0x1E, 0xFB, 0x1A, 0x30, 0x0B, 0x62, 0x11, 0xD0, 0xA3, 0x9B, 0x00, 0xA0, 0xC9, 0x03, 0x48, 0xF6 }, asf_read_unknown, 1 }, + { "Language List", { 0x7C, 0x43, 0x46, 0xa9, 0xef, 0xe0, 0x4B, 0xFC, 0xB2, 0x29, 0x39, 0x3e, 0xde, 0x41, 0x5c, 0x85 }, asf_read_language_list, 1}, + { "Padding", { 0x18, 0x06, 0xD4, 0x74, 0xCA, 0xDF, 0x45, 0x09, 0xA4, 0xBA, 0x9A, 0xAB, 0xCB, 0x96, 0xAA, 0xE8 }, asf_read_unknown, 1 }, + { "DRMv1 Header", { 0x22, 0x11, 0xB3, 0xFB, 0xBD, 0x23, 0x11, 0xD2, 0xB4, 0xB7, 0x00, 0xA0, 0xC9, 0x55, 0xFC, 0x6E }, asf_read_unknown, 1 }, + { "DRMv2 Header", { 0x29, 0x8A, 0xE6, 0x14, 0x26, 0x22, 0x4C, 0x17, 0xB9, 0x35, 0xDA, 0xE0, 0x7E, 0xE9, 0x28, 0x9c }, asf_read_unknown, 1 }, + { "Index", { 0xD6, 0xE2, 0x29, 0xD3, 0x35, 0xDA, 0x11, 0xD1, 0x90, 0x34, 0x00, 0xA0, 0xC9, 0x03, 0x49, 0xBE }, asf_read_unknown, 1 }, + { "Media Object Index", { 0xFE, 0xB1, 0x03, 0xF8, 0x12, 0xAD, 0x4C, 0x64, 0x84, 0x0F, 0x2A, 0x1D, 0x2F, 0x7A, 0xD4, 0x8C }, asf_read_unknown, 1 }, + { "Timecode Index", { 0x3C, 0xB7, 0x3F, 0xD0, 0x0C, 0x4A, 0x48, 0x03, 0x95, 0x3D, 0xED, 0xF7, 0xB6, 0x22, 0x8F, 0x0C }, asf_read_unknown, 0 }, + { "Bitrate_Mutual_Exclusion", { 0xD6, 0xE2, 0x29, 0xDC, 0x35, 0xDA, 0x11, 0xD1, 0x90, 0x34, 0x00, 0xA0, 0xC9, 0x03, 0x49, 0xBE }, asf_read_unknown, 1 }, + { "Error Correction", { 0x75, 0xB2, 0x26, 0x35, 0x66, 0x8E, 0x11, 0xCF, 0xA6, 0xD9, 0x00, 0xAA, 0x00, 0x62, 0xCE, 0x6C }, asf_read_unknown, 1 }, + { "Content Branding", { 0x22, 0x11, 0xB3, 0xFA, 0xBD, 0x23, 0x11, 0xD2, 0xB4, 0xB7, 0x00, 0xA0, 0xC9, 0x55, 0xFC, 0x6E }, asf_read_unknown, 1 }, + { "Content Encryption", { 0x22, 0x11, 0xB3, 0xFB, 0xBD, 0x23, 0x11, 0xD2, 0xB4, 0xB7, 0x00, 0xA0, 0xC9, 0x55, 0xFC, 0x6E }, asf_read_unknown, 1 }, + { "Extended Content Encryption", { 0x29, 0x8A, 0xE6, 0x14, 0x26, 0x22, 0x4C, 0x17, 0xB9, 0x35, 0xDA, 0xE0, 0x7E, 0xE9, 0x28, 0x9C }, asf_read_unknown, 1 }, + { "Digital Signature", { 0x22, 0x11, 0xB3, 0xFC, 0xBD, 0x23, 0x11, 0xD2, 0xB4, 0xB7, 0x00, 0xA0, 0xC9, 0x55, 0xFC, 0x6E }, asf_read_unknown, 1 }, + { "Extended Stream Properties", { 0x14, 0xE6, 0xA5, 0xCB, 0xC6, 0x72, 0x43, 0x32, 0x83, 0x99, 0xA9, 0x69, 0x52, 0x06, 0x5B, 0x5A }, asf_read_ext_stream_properties, 1 }, + { "Advanced Mutual Exclusion", { 0xA0, 0x86, 0x49, 0xCF, 0x47, 0x75, 0x46, 0x70, 0x8A, 0x16, 0x6E, 0x35, 0x35, 0x75, 0x66, 0xCD }, asf_read_unknown, 1 }, + { "Group Mutual Exclusion", { 0xD1, 0x46, 0x5A, 0x40, 0x5A, 0x79, 0x43, 0x38, 0xB7, 0x1B, 0xE3, 0x6B, 0x8F, 0xD6, 0xC2, 0x49 }, asf_read_unknown, 1}, + { "Stream Prioritization", { 0xD4, 0xFE, 0xD1, 0x5B, 0x88, 0xD3, 0x45, 0x4F, 0x81, 0xF0, 0xED, 0x5C, 0x45, 0x99, 0x9E, 0x24 }, asf_read_unknown, 1 }, + { "Bandwidth Sharing Object", { 0xA6, 0x96, 0x09, 0xE6, 0x51, 0x7B, 0x11, 0xD2, 0xB6, 0xAF, 0x00, 0xC0, 0x4F, 0xD9, 0x08, 0xE9 }, asf_read_unknown, 1 }, + { "Metadata", { 0xC5, 0xF8, 0xCB, 0xEA, 0x5B, 0xAF, 0x48, 0x77, 0x84, 0x67, 0xAA, 0x8C, 0x44, 0xFA, 0x4C, 0xCA }, asf_read_metadata_obj, 1 }, + { "Metadata Library", { 0x44, 0x23, 0x1C, 0x94, 0x94, 0x98, 0x49, 0xD1, 0xA1, 0x41, 0x1D, 0x13, 0x4E, 0x45, 0x70, 0x54 }, asf_read_metadata_obj, 1 }, + { "Audio Spread", { 0xBF, 0xC3, 0xCD, 0x50, 0x61, 0x8F, 0x11, 0xCF, 0x8B, 0xB2, 0x00, 0xAA, 0x00, 0xB4, 0xE2, 0x20 }, asf_read_unknown, 1 }, + { "Index Parameters", { 0xD6, 0xE2, 0x29, 0xDF, 0x35, 0xDA, 0x11, 0xD1, 0x90, 0x34, 0x00, 0xA0, 0xC9, 0x03, 0x49, 0xBE }, asf_read_unknown, 1 }, + { "Content Encryption System Windows Media DRM Network Devices", + { 0x7A, 0x07, 0x9B, 0xB6, 0xDA, 0XA4, 0x4e, 0x12, 0xA5, 0xCA, 0x91, 0xD3, 0x8D, 0xC1, 0x1A, 0x8D }, asf_read_unknown, 1 }, + { "Mutex Language", { 0xD6, 0xE2, 0x2A, 0x00, 0x25, 0xDA, 0x11, 0xD1, 0x90, 0x34, 0x00, 0xA0, 0xC9, 0x03, 0x49, 0xBE }, asf_read_unknown, 1 }, + { "Mutex Bitrate", { 0xD6, 0xE2, 0x2A, 0x01, 0x25, 0xDA, 0x11, 0xD1, 0x90, 0x34, 0x00, 0xA0, 0xC9, 0x03, 0x49, 0xBE }, asf_read_unknown, 1 }, + { "Mutex Unknown", { 0xD6, 0xE2, 0x2A, 0x02, 0x25, 0xDA, 0x11, 0xD1, 0x90, 0x34, 0x00, 0xA0, 0xC9, 0x03, 0x49, 0xBE }, asf_read_unknown, 1 }, + { "Bandwith Sharing Exclusive", { 0xAF, 0x60, 0x60, 0xAA, 0x51, 0x97, 0x11, 0xD2, 0xB6, 0xAF, 0x00, 0xC0, 0x4F, 0xD9, 0x08, 0xE9 }, asf_read_unknown, 1 }, + { "Bandwith Sharing Partial", { 0xAF, 0x60, 0x60, 0xAB, 0x51, 0x97, 0x11, 0xD2, 0xB6, 0xAF, 0x00, 0xC0, 0x4F, 0xD9, 0x08, 0xE9 }, asf_read_unknown, 1 }, + { "Payload Extension System Timecode", { 0x39, 0x95, 0x95, 0xEC, 0x86, 0x67, 0x4E, 0x2D, 0x8F, 0xDB, 0x98, 0x81, 0x4C, 0xE7, 0x6C, 0x1E }, asf_read_unknown, 1 }, + { "Payload Extension System File Name", { 0xE1, 0x65, 0xEC, 0x0E, 0x19, 0xED, 0x45, 0xD7, 0xB4, 0xA7, 0x25, 0xCB, 0xD1, 0xE2, 0x8E, 0x9B }, asf_read_unknown, 1 }, + { "Payload Extension System Content Type", { 0xD5, 0x90, 0xDC, 0x20, 0x07, 0xBC, 0x43, 0x6C, 0x9C, 0xF7, 0xF3, 0xBB, 0xFB, 0xF1, 0xA4, 0xDC }, asf_read_unknown, 1 }, + { "Payload Extension System Pixel Aspect Ratio", { 0x1, 0x1E, 0xE5, 0x54, 0xF9, 0xEA, 0x4B, 0xC8, 0x82, 0x1A, 0x37, 0x6B, 0x74, 0xE4, 0xC4, 0xB8 }, asf_read_unknown, 1 }, + { "Payload Extension System Sample Duration", { 0xC6, 0xBD, 0x94, 0x50, 0x86, 0x7F, 0x49, 0x07, 0x83, 0xA3, 0xC7, 0x79, 0x21, 0xB7, 0x33, 0xAD }, asf_read_unknown, 1 }, + { "Payload Extension System Encryption Sample ID", { 0x66, 0x98, 0xB8, 0x4E, 0x0A, 0xFA, 0x43, 0x30, 0xAE, 0xB2, 0x1C, 0x0A, 0x98, 0xD7, 0xA4, 0x4D }, asf_read_unknown, 1 }, + { "Payload Extension System Degradable JPEG", { 0x00, 0xE1, 0xAF, 0x06, 0x7B, 0xEC, 0x11, 0xD1, 0xA5, 0x82, 0x00, 0xC0, 0x4F, 0xC2, 0x9C, 0xFB }, asf_read_unknown, 1 }, +}; -/** - * Load a single ASF packet into the demuxer. - * @param s demux context - * @param pb context to read data from - * @return 0 on success, <0 on error - */ -static int asf_get_packet(AVFormatContext *s, AVIOContext *pb) +#define READ_LEN(flag, name, len) \ + do { \ + if ((flag) == name ## IS_BYTE) \ + len = avio_r8(pb); \ + else if ((flag) == name ## IS_WORD) \ + len = avio_rl16(pb); \ + else if ((flag) == name ## IS_DWORD) \ + len = avio_rl32(pb); \ + else \ + len = 0; \ + } while(0) + +static int asf_read_subpayload(AVFormatContext *s, AVPacket *pkt, int is_header) { ASFContext *asf = s->priv_data; - uint32_t packet_length, padsize; - int rsize = 8; - int c, d, e, off; - - // if we do not know packet size, allow skipping up to 32 kB - off = 32768; - if (asf->no_resync_search) - off = 3; - else if (s->packet_size > 0) - off = (avio_tell(pb) - s->internal->data_offset) % s->packet_size + 3; - - c = d = e = -1; - while (off-- > 0) { - c = d; - d = e; - e = avio_r8(pb); - if (c == 0x82 && !d && !e) - break; - } + AVIOContext *pb = s->pb; + uint8_t sub_len; + int ret, i; - if (c != 0x82) { - /* This code allows handling of -EAGAIN at packet boundaries (i.e. - * if the packet sync code above triggers -EAGAIN). This does not - * imply complete -EAGAIN handling support at random positions in - * the stream. */ - if (pb->error == AVERROR(EAGAIN)) - return AVERROR(EAGAIN); - if (!pb->eof_reached) - av_log(s, AV_LOG_ERROR, - "ff asf bad header %x at:%"PRId64"\n", c, avio_tell(pb)); + if (is_header) { + asf->dts_delta = avio_r8(pb); + if (asf->nb_mult_left) { + asf->mult_sub_len = avio_rl16(pb); // total + } + asf->sub_header_offset = avio_tell(pb); + asf->nb_sub = 0; + asf->sub_left = 1; } - if ((c & 0x8f) == 0x82) { - if (d || e) { - if (!pb->eof_reached) - av_log(s, AV_LOG_ERROR, "ff asf bad non zero\n"); - return -1; + sub_len = avio_r8(pb); + if ((ret = av_get_packet(pb, pkt, sub_len)) < 0) // each subpayload is entire frame + return ret; + for (i = 0; i < asf->nb_streams; i++) { + if (asf->stream_index == asf->asf_st[i]->stream_index) { + pkt->stream_index = asf->asf_st[i]->index; + break; } - c = avio_r8(pb); - d = avio_r8(pb); - rsize += 3; - } else if (!pb->eof_reached) { - avio_seek(pb, -1, SEEK_CUR); // FIXME } - - asf->packet_flags = c; - asf->packet_property = d; - - DO_2BITS(asf->packet_flags >> 5, packet_length, s->packet_size); - DO_2BITS(asf->packet_flags >> 1, padsize, 0); // sequence ignored - DO_2BITS(asf->packet_flags >> 3, padsize, 0); // padding length - - // the following checks prevent overflows and infinite loops - if (!packet_length || packet_length >= (1U << 29)) { - av_log(s, AV_LOG_ERROR, - "invalid packet_length %"PRIu32" at:%"PRId64"\n", - packet_length, avio_tell(pb)); - return -1; + asf->return_subpayload = 1; + if (!sub_len) + asf->return_subpayload = 0; + + if (sub_len) + asf->nb_sub++; + pkt->dts = asf->sub_dts + (asf->nb_sub - 1) * asf->dts_delta - asf->preroll; + if (asf->nb_mult_left && (avio_tell(pb) >= + (asf->sub_header_offset + asf->mult_sub_len))) { + asf->sub_left = 0; + asf->nb_mult_left--; } - if (padsize >= packet_length) { - av_log(s, AV_LOG_ERROR, - "invalid padsize %"PRIu32" at:%"PRId64"\n", padsize, avio_tell(pb)); - return -1; + if (avio_tell(pb) >= asf->packet_offset + asf->packet_size - asf->pad_len) { + asf->sub_left = 0; + if (!asf->nb_mult_left) { + avio_skip(pb, asf->pad_len); + if (avio_tell(pb) != asf->packet_offset + asf->packet_size) { + if (!asf->packet_size) + return AVERROR_INVALIDDATA; + av_log(s, AV_LOG_WARNING, + "Position %"PRId64" wrong, should be %"PRId64"\n", + avio_tell(pb), asf->packet_offset + asf->packet_size); + avio_seek(pb, asf->packet_offset + asf->packet_size, SEEK_SET); + } + } } - asf->packet_timestamp = avio_rl32(pb); - avio_rl16(pb); /* duration */ - // rsize has at least 11 bytes which have to be present - - if (asf->packet_flags & 0x01) { - asf->packet_segsizetype = avio_r8(pb); - rsize++; - asf->packet_segments = asf->packet_segsizetype & 0x3f; - } else { - asf->packet_segments = 1; - asf->packet_segsizetype = 0x80; - } - if (rsize > packet_length - padsize) { - asf->packet_size_left = 0; - av_log(s, AV_LOG_ERROR, - "invalid packet header length %d for pktlen %"PRIu32"-%"PRIu32" at %"PRId64"\n", - rsize, packet_length, padsize, avio_tell(pb)); - return -1; - } - asf->packet_size_left = packet_length - padsize - rsize; - if (packet_length < asf->hdr.min_pktsize) - padsize += asf->hdr.min_pktsize - packet_length; - asf->packet_padsize = padsize; - av_log(s, AV_LOG_TRACE, "packet: size=%d padsize=%d left=%d\n", - s->packet_size, asf->packet_padsize, asf->packet_size_left); return 0; } -/** - * - * @return <0 if error - */ -static int asf_read_frame_header(AVFormatContext *s, AVIOContext *pb) +static void reset_packet(ASFPacket *asf_pkt) +{ + asf_pkt->size_left = 0; + asf_pkt->data_size = 0; + asf_pkt->duration = 0; + asf_pkt->flags = 0; + asf_pkt->dts = 0; + asf_pkt->duration = 0; + av_free_packet(&asf_pkt->avpkt); + av_init_packet(&asf_pkt->avpkt); +} + +static int asf_read_multiple_payload(AVFormatContext *s, AVPacket *pkt, + ASFPacket *asf_pkt) { ASFContext *asf = s->priv_data; - int rsize = 1; - int num = avio_r8(pb); - int64_t ts0; - - asf->packet_segments--; - asf->packet_key_frame = num >> 7; - asf->stream_index = asf->asfid2avid[num & 0x7f]; - // sequence should be ignored! - DO_2BITS(asf->packet_property >> 4, asf->packet_seq, 0); - DO_2BITS(asf->packet_property >> 2, asf->packet_frag_offset, 0); - DO_2BITS(asf->packet_property, asf->packet_replic_size, 0); - av_log(asf, AV_LOG_TRACE, "key:%d stream:%d seq:%d offset:%d replic_size:%d\n", - asf->packet_key_frame, asf->stream_index, asf->packet_seq, - asf->packet_frag_offset, asf->packet_replic_size); - if (asf->packet_replic_size >= 8) { - asf->packet_obj_size = avio_rl32(pb); - if (asf->packet_obj_size >= (1 << 24) || asf->packet_obj_size <= 0) { - av_log(s, AV_LOG_ERROR, "packet_obj_size invalid\n"); - return -1; - } - asf->packet_frag_timestamp = avio_rl32(pb); // timestamp - if (asf->packet_replic_size >= 8 + 38 + 4) { - avio_skip(pb, 10); - ts0 = avio_rl64(pb); - avio_skip(pb, 8); - avio_skip(pb, 12); - avio_rl32(pb); - avio_skip(pb, asf->packet_replic_size - 8 - 38 - 4); - if (ts0 != -1) - asf->packet_frag_timestamp = ts0 / 10000; - else - asf->packet_frag_timestamp = AV_NOPTS_VALUE; + AVIOContext *pb = s->pb; + uint16_t pay_len; + unsigned char *p; + int ret; + int skip = 0; + + // if replicated lenght is 1, subpayloads are present + if (asf->rep_data_len == 1) { + asf->sub_left = 1; + asf->state = READ_MULTI_SUB; + pkt->flags = asf_pkt->flags; + if ((ret = asf_read_subpayload(s, pkt, 1)) < 0) + return ret; + } else { + if (!asf_pkt->data_size) { + asf_pkt->data_size = asf_pkt->size_left = avio_rl32(pb); // read media object size + if (asf_pkt->data_size <= 0) + return AVERROR_EOF; + if ((ret = av_new_packet(&asf_pkt->avpkt, asf_pkt->data_size)) < 0) + return ret; } else - avio_skip(pb, asf->packet_replic_size - 8); - rsize += asf->packet_replic_size; // FIXME - check validity - } else if (asf->packet_replic_size == 1) { - // multipacket - frag_offset is beginning timestamp - asf->packet_time_start = asf->packet_frag_offset; - asf->packet_frag_offset = 0; - asf->packet_frag_timestamp = asf->packet_timestamp; - - asf->packet_time_delta = avio_r8(pb); - rsize++; - } else if (asf->packet_replic_size != 0) { - av_log(s, AV_LOG_ERROR, "unexpected packet_replic_size of %d\n", - asf->packet_replic_size); - return -1; - } - if (asf->packet_flags & 0x01) { - DO_2BITS(asf->packet_segsizetype >> 6, asf->packet_frag_size, 0); // 0 is illegal - if (rsize > asf->packet_size_left) { - av_log(s, AV_LOG_ERROR, "packet_replic_size is invalid\n"); - return -1; - } else if (asf->packet_frag_size > asf->packet_size_left - rsize) { - if (asf->packet_frag_size > asf->packet_size_left - rsize + asf->packet_padsize) { - av_log(s, AV_LOG_ERROR, "packet_frag_size is invalid (%d-%d)\n", - asf->packet_size_left, rsize); - return -1; - } else { - int diff = asf->packet_frag_size - (asf->packet_size_left - rsize); - asf->packet_size_left += diff; - asf->packet_padsize -= diff; - } + avio_skip(pb, 4); // reading of media object size is already done + asf_pkt->dts = avio_rl32(pb); // read presentation time + if ((asf->rep_data_len - 8) > 0) + avio_skip(pb, asf->rep_data_len - 8); // skip replicated data + pay_len = avio_rl16(pb); // payload length should be WORD + if (pay_len > asf->packet_size) { + av_log(s, AV_LOG_ERROR, + "Error: invalid data packet size, pay_len %"PRIu16", " + "asf->packet_size %"PRIu32", offset %"PRId64".\n", + pay_len, asf->packet_size, avio_tell(pb)); + return AVERROR_INVALIDDATA; } - } else { - if (rsize > asf->packet_size_left) { - av_log(s, AV_LOG_ERROR, "packet_replic_size is invalid\n"); - return -1; + p = asf_pkt->avpkt.data + asf_pkt->data_size - asf_pkt->size_left; + if (pay_len > asf_pkt->size_left) { + av_log(s, AV_LOG_ERROR, + "Error: invalid buffer size, pay_len %d, data size left %d.\n", + pay_len, asf_pkt->size_left); + skip = pay_len - asf_pkt->size_left; + pay_len = asf_pkt->size_left; } - asf->packet_frag_size = asf->packet_size_left - rsize; - } - if (asf->packet_replic_size == 1) { - asf->packet_multi_size = asf->packet_frag_size; - if (asf->packet_multi_size > asf->packet_size_left) - return -1; + if ((ret = avio_read(pb, p, pay_len)) < 0) + return ret; + if (s->key && s->keylen == 20) + ff_asfcrypt_dec(s->key, p, ret); + avio_skip(pb, skip); + asf_pkt->size_left -= pay_len; + asf->nb_mult_left--; } - asf->packet_size_left -= rsize; return 0; } -/** - * Parse data from individual ASF packets (which were previously loaded - * with asf_get_packet()). - * @param s demux context - * @param pb context to read data from - * @param pkt pointer to store packet data into - * @return 0 if data was stored in pkt, <0 on error or 1 if more ASF - * packets need to be loaded (through asf_get_packet()) - */ -static int asf_parse_packet(AVFormatContext *s, AVIOContext *pb, AVPacket *pkt) +static int asf_read_single_payload(AVFormatContext *s, AVPacket *pkt, + ASFPacket *asf_pkt) { - ASFContext *asf = s->priv_data; - ASFStream *asf_st = 0; - for (;;) { - int ret; - - if (pb->eof_reached) + ASFContext *asf = s->priv_data; + AVIOContext *pb = s->pb; + int64_t offset; + uint64_t size; + unsigned char *p; + int ret; + + if (!asf_pkt->data_size) { + asf_pkt->data_size = asf_pkt->size_left = avio_rl32(pb); // read media object size + if (asf_pkt->data_size <= 0) return AVERROR_EOF; + if ((ret = av_new_packet(&asf_pkt->avpkt, asf_pkt->data_size)) < 0) + return ret; + } else + avio_skip(pb, 4); // skip media object size + asf_pkt->dts = avio_rl32(pb); // read presentation time + if ((asf->rep_data_len - 8) > 0) + avio_skip(pb, asf->rep_data_len - 8); // skip replicated data + offset = avio_tell(pb); + + // size of the payload - size of the packet without header and padding + if (asf->packet_size_internal) + size = asf->packet_size_internal - offset + asf->packet_offset - asf->pad_len; + else + size = asf->packet_size - offset + asf->packet_offset - asf->pad_len; + if (size > asf->packet_size) { + av_log(s, AV_LOG_ERROR, + "Error: invalid data packet size, offset %"PRId64".\n", + avio_tell(pb)); + return AVERROR_INVALIDDATA; + } + p = asf_pkt->avpkt.data + asf_pkt->data_size - asf_pkt->size_left; + if (size > asf_pkt->size_left) + return AVERROR_INVALIDDATA; + if (asf_pkt->size_left > size) + asf_pkt->size_left -= size; + else + asf_pkt->size_left = 0; + if ((ret = avio_read(pb, p, size)) < 0) + return ret; + if (s->key && s->keylen == 20) + ff_asfcrypt_dec(s->key, p, ret); + if (asf->packet_size_internal) + avio_skip(pb, asf->packet_size - asf->packet_size_internal); + avio_skip(pb, asf->pad_len); // skip padding - if (asf->packet_size_left < FRAME_HEADER_SIZE || - asf->packet_segments < 1) { - int ret = asf->packet_size_left + asf->packet_padsize; - - assert(ret >= 0); - /* fail safe */ - avio_skip(pb, ret); + return 0; +} - asf->packet_pos = avio_tell(pb); - if (asf->data_object_size != (uint64_t)-1 && - (asf->packet_pos - asf->data_object_offset >= asf->data_object_size)) - return AVERROR_EOF; /* Do not exceed the size of the data object */ - return 1; - } - if (asf->packet_time_start == 0) { - if (asf_read_frame_header(s, pb) < 0) { - asf->packet_segments = 0; - continue; - } - if (asf->stream_index < 0 || - s->streams[asf->stream_index]->discard >= AVDISCARD_ALL || - (!asf->packet_key_frame && - s->streams[asf->stream_index]->discard >= AVDISCARD_NONKEY)) { - asf->packet_time_start = 0; - /* unhandled packet (should not happen) */ - avio_skip(pb, asf->packet_frag_size); - asf->packet_size_left -= asf->packet_frag_size; - if (asf->stream_index < 0) - av_log(s, AV_LOG_ERROR, "ff asf skip %d (unknown stream)\n", - asf->packet_frag_size); - continue; +static int asf_read_payload(AVFormatContext *s, AVPacket *pkt) +{ + ASFContext *asf = s->priv_data; + AVIOContext *pb = s->pb; + int ret, i; + ASFPacket *asf_pkt = NULL; + + if (!asf->sub_left) { + uint32_t off_len, media_len; + uint8_t stream_num; + + stream_num = avio_r8(pb); + asf->stream_index = stream_num & ASF_STREAM_NUM; + for (i = 0; i < asf->nb_streams; i++) { + if (asf->stream_index == asf->asf_st[i]->stream_index) { + asf_pkt = &asf->asf_st[i]->pkt; + asf_pkt->stream_index = asf->asf_st[i]->index; + asf_pkt->dts = asf->dts; + break; } - asf->asf_st = s->streams[asf->stream_index]->priv_data; } - asf_st = asf->asf_st; - av_assert0(asf_st); - - if (!asf_st->frag_offset && asf->packet_frag_offset) { - av_log(s, AV_LOG_TRACE, "skipping asf data pkt with fragment offset for " - "stream:%d, expected:%d but got %d from pkt)\n", - asf->stream_index, asf_st->frag_offset, - asf->packet_frag_offset); - avio_skip(pb, asf->packet_frag_size); - asf->packet_size_left -= asf->packet_frag_size; - continue; - } - - if (asf->packet_replic_size == 1) { - // frag_offset is here used as the beginning timestamp - asf->packet_frag_timestamp = asf->packet_time_start; - asf->packet_time_start += asf->packet_time_delta; - asf->packet_obj_size = asf->packet_frag_size = avio_r8(pb); - asf->packet_size_left--; - asf->packet_multi_size--; - if (asf->packet_multi_size < asf->packet_obj_size) { - asf->packet_time_start = 0; - avio_skip(pb, asf->packet_multi_size); - asf->packet_size_left -= asf->packet_multi_size; - continue; - } - asf->packet_multi_size -= asf->packet_obj_size; + if (!asf_pkt) + return AVERROR_INVALIDDATA; + if (stream_num >> 7) + asf_pkt->flags |= AV_PKT_FLAG_KEY; + READ_LEN(asf->prop_flags & ASF_PL_MASK_MEDIA_OBJECT_NUMBER_LENGTH_FIELD_SIZE, + ASF_PL_FLAG_MEDIA_OBJECT_NUMBER_LENGTH_FIELD_, media_len); + READ_LEN(asf->prop_flags & ASF_PL_MASK_OFFSET_INTO_MEDIA_OBJECT_LENGTH_FIELD_SIZE, + ASF_PL_FLAG_OFFSET_INTO_MEDIA_OBJECT_LENGTH_FIELD_, off_len); + READ_LEN(asf->prop_flags & ASF_PL_MASK_REPLICATED_DATA_LENGTH_FIELD_SIZE, + ASF_PL_FLAG_REPLICATED_DATA_LENGTH_FIELD_, asf->rep_data_len); + if (asf_pkt->size_left && (asf_pkt->frame_num != media_len)) { + av_log(s, AV_LOG_WARNING, "Unfinished frame will be ignored\n"); + reset_packet(asf_pkt); } - if (asf_st->frag_offset + asf->packet_frag_size <= asf_st->pkt.size && - asf_st->frag_offset + asf->packet_frag_size > asf->packet_obj_size) { - av_log(s, AV_LOG_INFO, "ignoring invalid packet_obj_size (%d %d %d %d)\n", - asf_st->frag_offset, asf->packet_frag_size, - asf->packet_obj_size, asf_st->pkt.size); - asf->packet_obj_size = asf_st->pkt.size; + asf_pkt->frame_num = media_len; + asf->sub_dts = off_len; + if (asf->nb_mult_left) { + if ((ret = asf_read_multiple_payload(s, pkt, asf_pkt)) < 0) + return ret; + } else if (asf->rep_data_len == 1) { + asf->sub_left = 1; + asf->state = READ_SINGLE; + pkt->flags = asf_pkt->flags; + if ((ret = asf_read_subpayload(s, pkt, 1)) < 0) + return ret; + } else { + if ((ret = asf_read_single_payload(s, pkt, asf_pkt)) < 0) + return ret; } - - if (asf_st->pkt.size != asf->packet_obj_size || - // FIXME is this condition sufficient? - asf_st->frag_offset + asf->packet_frag_size > asf_st->pkt.size) { - if (asf_st->pkt.data) { - av_log(s, AV_LOG_INFO, - "freeing incomplete packet size %d, new %d\n", - asf_st->pkt.size, asf->packet_obj_size); - asf_st->frag_offset = 0; - av_free_packet(&asf_st->pkt); - } - /* new packet */ - av_new_packet(&asf_st->pkt, asf->packet_obj_size); - asf_st->seq = asf->packet_seq; - asf_st->pkt.dts = asf->packet_frag_timestamp - asf->hdr.preroll; - asf_st->pkt.stream_index = asf->stream_index; - asf_st->pkt.pos = asf_st->packet_pos = asf->packet_pos; - - if (asf_st->pkt.data && asf_st->palette_changed) { - uint8_t *pal; - pal = av_packet_new_side_data(&asf_st->pkt, AV_PKT_DATA_PALETTE, - AVPALETTE_SIZE); - if (!pal) { - av_log(s, AV_LOG_ERROR, "Cannot append palette to packet\n"); - } else { - memcpy(pal, asf_st->palette, AVPALETTE_SIZE); - asf_st->palette_changed = 0; - } + } else { + for (i = 0; i <= asf->nb_streams; i++) { + if (asf->stream_index == asf->asf_st[i]->stream_index) { + asf_pkt = &asf->asf_st[i]->pkt; + break; } - av_log(asf, AV_LOG_TRACE, "new packet: stream:%d key:%d packet_key:%d audio:%d size:%d\n", - asf->stream_index, asf->packet_key_frame, - asf_st->pkt.flags & AV_PKT_FLAG_KEY, - s->streams[asf->stream_index]->codec->codec_type == AVMEDIA_TYPE_AUDIO, - asf->packet_obj_size); - if (s->streams[asf->stream_index]->codec->codec_type == AVMEDIA_TYPE_AUDIO) - asf->packet_key_frame = 1; - if (asf->packet_key_frame) - asf_st->pkt.flags |= AV_PKT_FLAG_KEY; } + if (!asf_pkt) + return AVERROR_INVALIDDATA; + pkt->flags = asf_pkt->flags; + pkt->dts = asf_pkt->dts; + pkt->stream_index = asf->asf_st[i]->index; + if ((ret = asf_read_subpayload(s, pkt, 0)) < 0) // read subpayload without its header + return ret; + } - /* read data */ - av_log(asf, AV_LOG_TRACE, "READ PACKET s:%d os:%d o:%d,%d l:%d DATA:%p\n", - s->packet_size, asf_st->pkt.size, asf->packet_frag_offset, - asf_st->frag_offset, asf->packet_frag_size, asf_st->pkt.data); - asf->packet_size_left -= asf->packet_frag_size; - if (asf->packet_size_left < 0) - continue; + return 0; +} - if (asf->packet_frag_offset >= asf_st->pkt.size || - asf->packet_frag_size > asf_st->pkt.size - asf->packet_frag_offset) { - av_log(s, AV_LOG_ERROR, - "packet fragment position invalid %u,%u not in %u\n", - asf->packet_frag_offset, asf->packet_frag_size, - asf_st->pkt.size); - continue; +static int asf_read_packet_header(AVFormatContext *s) +{ + ASFContext *asf = s->priv_data; + AVIOContext *pb = s->pb; + uint64_t size; + uint32_t av_unused seq; + unsigned char error_flags, len_flags, pay_flags; + + asf->packet_offset = avio_tell(pb); + error_flags = avio_r8(pb); // read Error Correction Flags + if (error_flags & ASF_PACKET_FLAG_ERROR_CORRECTION_PRESENT) + if (!(error_flags & ASF_ERROR_CORRECTION_LENGTH_TYPE)) { + size = error_flags & ASF_PACKET_ERROR_CORRECTION_DATA_SIZE; + avio_skip(pb, size); } + len_flags = avio_r8(pb); + asf->prop_flags = avio_r8(pb); + READ_LEN(len_flags & ASF_PPI_MASK_PACKET_LENGTH_FIELD_SIZE, + ASF_PPI_FLAG_PACKET_LENGTH_FIELD_, asf->packet_size_internal); + READ_LEN(len_flags & ASF_PPI_MASK_SEQUENCE_FIELD_SIZE, + ASF_PPI_FLAG_SEQUENCE_FIELD_, seq); + READ_LEN(len_flags & ASF_PPI_MASK_PADDING_LENGTH_FIELD_SIZE, + ASF_PPI_FLAG_PADDING_LENGTH_FIELD_, asf->pad_len ); + asf->send_time = avio_rl32(pb); // send time + avio_skip(pb, 2); // skip duration + if (len_flags & ASF_PPI_FLAG_MULTIPLE_PAYLOADS_PRESENT) { // Multiple Payloads present + pay_flags = avio_r8(pb); + asf->nb_mult_left = (pay_flags & ASF_NUM_OF_PAYLOADS); + } - ret = avio_read(pb, asf_st->pkt.data + asf->packet_frag_offset, - asf->packet_frag_size); - if (ret != asf->packet_frag_size) { - if (ret < 0 || asf->packet_frag_offset + ret == 0) - return ret < 0 ? ret : AVERROR_EOF; - - if (asf_st->ds_span > 1) { - // scrambling, we can either drop it completely or fill the remainder - // TODO: should we fill the whole packet instead of just the current - // fragment? - memset(asf_st->pkt.data + asf->packet_frag_offset + ret, 0, - asf->packet_frag_size - ret); - ret = asf->packet_frag_size; - } else { - // no scrambling, so we can return partial packets - av_shrink_packet(&asf_st->pkt, asf->packet_frag_offset + ret); - } + return 0; +} + +static int asf_deinterleave(AVFormatContext *s, ASFPacket *asf_pkt, int st_num) +{ + ASFContext *asf = s->priv_data; + ASFStream *asf_st = asf->asf_st[st_num]; + unsigned char *p = asf_pkt->avpkt.data; + uint16_t pkt_len = asf->asf_st[st_num]->virtual_pkt_len; + uint16_t chunk_len = asf->asf_st[st_num]->virtual_chunk_len; + int nchunks = pkt_len / chunk_len; + AVPacket pkt; + int pos = 0, j, l, ret; + + + if ((ret = av_new_packet(&pkt, asf_pkt->data_size)) < 0) + return ret; + + while (asf_pkt->data_size >= asf_st->span * pkt_len + pos) { + if (pos >= asf_pkt->data_size) { + break; } - if (s->key && s->keylen == 20) - ff_asfcrypt_dec(s->key, asf_st->pkt.data + asf->packet_frag_offset, - ret); - asf_st->frag_offset += ret; - /* test if whole packet is read */ - if (asf_st->frag_offset == asf_st->pkt.size) { - // workaround for macroshit radio DVR-MS files - if (s->streams[asf->stream_index]->codec->codec_id == AV_CODEC_ID_MPEG2VIDEO && - asf_st->pkt.size > 100) { - int i; - for (i = 0; i < asf_st->pkt.size && !asf_st->pkt.data[i]; i++) - ; - if (i == asf_st->pkt.size) { - av_log(s, AV_LOG_DEBUG, "discarding ms fart\n"); - asf_st->frag_offset = 0; - av_free_packet(&asf_st->pkt); - continue; - } + for (l = 0; l < pkt_len; l++) { + if (pos >= asf_pkt->data_size) { + break; } - - /* return packet */ - if (asf_st->ds_span > 1) { - if (asf_st->pkt.size != asf_st->ds_packet_size * asf_st->ds_span) { - av_log(s, AV_LOG_ERROR, - "pkt.size != ds_packet_size * ds_span (%d %d %d)\n", - asf_st->pkt.size, asf_st->ds_packet_size, - asf_st->ds_span); - } else { - /* packet descrambling */ - AVBufferRef *buf = av_buffer_alloc(asf_st->pkt.size + - FF_INPUT_BUFFER_PADDING_SIZE); - if (buf) { - uint8_t *newdata = buf->data; - int offset = 0; - memset(newdata + asf_st->pkt.size, 0, - FF_INPUT_BUFFER_PADDING_SIZE); - while (offset < asf_st->pkt.size) { - int off = offset / asf_st->ds_chunk_size; - int row = off / asf_st->ds_span; - int col = off % asf_st->ds_span; - int idx = row + col * asf_st->ds_packet_size / asf_st->ds_chunk_size; - assert(offset + asf_st->ds_chunk_size <= asf_st->pkt.size); - assert(idx + 1 <= asf_st->pkt.size / asf_st->ds_chunk_size); - memcpy(newdata + offset, - asf_st->pkt.data + idx * asf_st->ds_chunk_size, - asf_st->ds_chunk_size); - offset += asf_st->ds_chunk_size; - } - av_buffer_unref(&asf_st->pkt.buf); - asf_st->pkt.buf = buf; - asf_st->pkt.data = buf->data; - } - } + for (j = 0; j < asf_st->span; j++) { + if ((pos + chunk_len) >= asf_pkt->data_size) + break; + memcpy(pkt.data + pos, + p + (j * nchunks + l) * chunk_len, + chunk_len); + pos += chunk_len; } - asf_st->frag_offset = 0; - *pkt = asf_st->pkt; -#if FF_API_DESTRUCT_PACKET -FF_DISABLE_DEPRECATION_WARNINGS - asf_st->pkt.destruct = NULL; -FF_ENABLE_DEPRECATION_WARNINGS -#endif - asf_st->pkt.buf = 0; - asf_st->pkt.size = 0; - asf_st->pkt.data = 0; - asf_st->pkt.side_data_elems = 0; - asf_st->pkt.side_data = NULL; - break; // packet completed } + p += asf_st->span * pkt_len; + if (p > asf_pkt->avpkt.data + asf_pkt->data_size) + break; } + av_free_packet(&asf_pkt->avpkt); + asf_pkt->avpkt = pkt; + return 0; } static int asf_read_packet(AVFormatContext *s, AVPacket *pkt) { ASFContext *asf = s->priv_data; + AVIOContext *pb = s->pb; + int ret, i; - for (;;) { - int ret; - - /* parse cached packets, if any */ - if ((ret = asf_parse_packet(s, s->pb, pkt)) <= 0) + if ((avio_tell(pb) >= asf->data_offset + asf->data_size) && + !(asf->b_flags & ASF_FLAG_BROADCAST)) + return AVERROR_EOF; + while (!pb->eof_reached) { + if (asf->state == PARSE_PACKET_HEADER) { + asf_read_packet_header(s); + if (!asf->nb_mult_left) + asf->state = READ_SINGLE; + else + asf->state = READ_MULTI; + } + if ((ret = asf_read_payload(s, pkt)) < 0) return ret; - if ((ret = asf_get_packet(s, s->pb)) < 0) - assert(asf->packet_size_left < FRAME_HEADER_SIZE || - asf->packet_segments < 1); - asf->packet_time_start = 0; + switch (asf->state) { + case READ_SINGLE: + if (!asf->sub_left) + asf->state = PARSE_PACKET_HEADER; + break; + case READ_MULTI_SUB: + if (!asf->sub_left && !asf->nb_mult_left) { + asf->state = PARSE_PACKET_HEADER; + if (!asf->return_subpayload) + avio_skip(pb, asf->pad_len); // skip padding + if (asf->packet_offset + asf->packet_size > avio_tell(pb)) + avio_seek(pb, asf->packet_offset + asf->packet_size, SEEK_SET); + } else if (!asf->sub_left) + asf->state = READ_MULTI; + break; + case READ_MULTI: + if (!asf->nb_mult_left) { + asf->state = PARSE_PACKET_HEADER; + if (!asf->return_subpayload) { + avio_skip(pb, asf->pad_len); // skip padding + } + if (asf->packet_offset + asf->packet_size > avio_tell(pb)) + avio_seek(pb, asf->packet_offset + asf->packet_size, SEEK_SET); + } + break; + } + if (asf->return_subpayload) { + asf->return_subpayload = 0; + return 0; + } + for (i = 0; i < s->nb_streams; i++) { + ASFPacket *asf_pkt = &asf->asf_st[i]->pkt; + if (asf_pkt && !asf_pkt->size_left && asf_pkt->data_size) { + if (asf->asf_st[i]->span > 1 && + asf->asf_st[i]->type == AVMEDIA_TYPE_AUDIO) + if ((ret = asf_deinterleave(s, asf_pkt, i)) < 0) + return ret; + av_packet_move_ref(pkt, &asf_pkt->avpkt); + pkt->stream_index = asf->asf_st[i]->index; + pkt->flags = asf_pkt->flags; + pkt->dts = asf_pkt->dts - asf->preroll; + asf_pkt->data_size = 0; + asf_pkt->frame_num = 0; + return 0; + } + } } + + if (pb->eof_reached) + return AVERROR_EOF; + + return 0; } -// Added to support seeking after packets have been read -// If information is not reset, read_packet fails due to -// leftover information from previous reads -static void asf_reset_header(AVFormatContext *s) +static int asf_read_close(AVFormatContext *s) { ASFContext *asf = s->priv_data; - ASFStream *asf_st; int i; - asf->packet_size_left = 0; - asf->packet_segments = 0; - asf->packet_flags = 0; - asf->packet_property = 0; - asf->packet_timestamp = 0; - asf->packet_segsizetype = 0; - asf->packet_segments = 0; - asf->packet_seq = 0; - asf->packet_replic_size = 0; - asf->packet_key_frame = 0; - asf->packet_padsize = 0; - asf->packet_frag_offset = 0; - asf->packet_frag_size = 0; - asf->packet_frag_timestamp = 0; - asf->packet_multi_size = 0; - asf->packet_obj_size = 0; - asf->packet_time_delta = 0; - asf->packet_time_start = 0; - - for (i = 0; i < s->nb_streams; i++) { - asf_st = s->streams[i]->priv_data; - if (!asf_st) - continue; - av_free_packet(&asf_st->pkt); - asf_st->frag_offset = 0; - asf_st->seq = 0; + for (i = 0; i < asf->nb_streams; i++) { + av_free_packet(&asf->asf_st[i]->pkt.avpkt); + av_freep(&asf->asf_st[i]); + av_dict_free(&asf->asf_sd[i].asf_met); } - asf->asf_st = NULL; + + return 0; } -static int asf_read_close(AVFormatContext *s) +static void reset_packet_state(AVFormatContext *s) { - asf_reset_header(s); + ASFContext *asf = s->priv_data; + int i; - return 0; + asf->state = PARSE_PACKET_HEADER; + asf->offset = 0; + asf->return_subpayload = 0; + asf->sub_left = 0; + asf->sub_header_offset = 0; + asf->packet_offset = asf->first_packet_offset; + asf->pad_len = 0; + asf->rep_data_len = 0; + asf->dts_delta = 0; + asf->mult_sub_len = 0; + asf->nb_mult_left = 0; + asf->nb_sub = 0; + asf->prop_flags = 0; + asf->sub_dts = 0; + asf->dts = 0; + for (i = 0; i < asf->nb_streams; i++) { + ASFPacket *pkt = &asf->asf_st[i]->pkt; + pkt->size_left = 0; + pkt->data_size = 0; + pkt->duration = 0; + pkt->flags = 0; + pkt->dts = 0; + pkt->duration = 0; + av_free_packet(&pkt->avpkt); + av_init_packet(&pkt->avpkt); + } } -static int64_t asf_read_pts(AVFormatContext *s, int stream_index, - int64_t *ppos, int64_t pos_limit) +/* + * Find a timestamp for the requested position within the payload + * where the pos (position) is the offset inside the Data Object. + * When position is not on the packet boundary, asf_read_timestamp tries + * to find the closest packet offset after this position. If this packet + * is a key frame, this packet timestamp is read and an index entry is created + * for the packet. If this packet belongs to the requested stream, + * asf_read_timestamp upgrades pos to the packet beginning offset and + * returns this packet's dts. So returned dts is the dts of the first key frame with + * matching stream number after given position. + */ +static int64_t asf_read_timestamp(AVFormatContext *s, int stream_index, + int64_t *pos, int64_t pos_limit) { - AVPacket pkt1, *pkt = &pkt1; - ASFStream *asf_st; - int64_t pts; - int64_t pos = *ppos; - int i; - int64_t start_pos[ASF_MAX_STREAMS]; - - for (i = 0; i < s->nb_streams; i++) - start_pos[i] = pos; - - if (s->packet_size > 0) - pos = (pos + s->packet_size - 1 - s->internal->data_offset) / - s->packet_size * s->packet_size + - s->internal->data_offset; - *ppos = pos; - avio_seek(s->pb, pos, SEEK_SET); - - asf_reset_header(s); - for (;;) { - if (asf_read_packet(s, pkt) < 0) { - av_log(s, AV_LOG_INFO, "asf_read_pts failed\n"); - return AV_NOPTS_VALUE; - } + ASFContext *asf = s->priv_data; + int64_t pkt_pos = *pos, pkt_offset, dts = AV_NOPTS_VALUE, data_end; + AVPacket pkt; + int n; - pts = pkt->dts; + data_end = asf->data_offset + asf->data_size; - av_free_packet(pkt); - if (pkt->flags & AV_PKT_FLAG_KEY) { - i = pkt->stream_index; + n = (pkt_pos - asf->first_packet_offset + asf->packet_size - 1) / + asf->packet_size; + n = av_clip(n, 0, ((data_end - asf->first_packet_offset) / asf->packet_size - 1)); + pkt_pos = asf->first_packet_offset + n * asf->packet_size; - asf_st = s->streams[i]->priv_data; - av_assert0(asf_st); + avio_seek(s->pb, pkt_pos, SEEK_SET); + pkt_offset = pkt_pos; -// assert((asf_st->packet_pos - s->data_offset) % s->packet_size == 0); - pos = asf_st->packet_pos; + reset_packet_state(s); + while (avio_tell(s->pb) < data_end) { - av_add_index_entry(s->streams[i], pos, pts, pkt->size, - pos - start_pos[i] + 1, AVINDEX_KEYFRAME); - start_pos[i] = asf_st->packet_pos + 1; + int i, ret, st_found; - if (pkt->stream_index == stream_index) - break; + av_init_packet(&pkt); + pkt_offset = avio_tell(s->pb); + if ((ret = asf_read_packet(s, &pkt)) < 0) { + dts = AV_NOPTS_VALUE; + return ret; + } + // ASFPacket may contain fragments of packets belonging to different streams, + // pkt_offset is the offset of the first fragment within it. + if ((pkt_offset >= (pkt_pos + asf->packet_size))) + pkt_pos += asf->packet_size; + for (i = 0; i < asf->nb_streams; i++) { + ASFStream *st = asf->asf_st[i]; + + st_found = 0; + if (pkt.flags & AV_PKT_FLAG_KEY) { + dts = pkt.dts; + if (dts) { + av_add_index_entry(s->streams[pkt.stream_index], pkt_pos, + dts, pkt.size, 0, AVINDEX_KEYFRAME); + if (stream_index == st->index) { + st_found = 1; + break; + } + } + } } + if (st_found) + break; + av_free_packet(&pkt); } + *pos = pkt_pos; - *ppos = pos; - return pts; + av_free_packet(&pkt); + return dts; } -static int asf_build_simple_index(AVFormatContext *s, int stream_index) +static int asf_read_seek(AVFormatContext *s, int stream_index, + int64_t timestamp, int flags) { - ff_asf_guid g; - ASFContext *asf = s->priv_data; - int64_t current_pos = avio_tell(s->pb); - int i, ret = 0; - - avio_seek(s->pb, asf->data_object_offset + asf->data_object_size, SEEK_SET); - if ((ret = ff_get_guid(s->pb, &g)) < 0) - goto end; - - /* the data object can be followed by other top-level objects, - * skip them until the simple index object is reached */ - while (ff_guidcmp(&g, &index_guid)) { - int64_t gsize = avio_rl64(s->pb); - if (gsize < 24 || s->pb->eof_reached) { - goto end; - } - avio_skip(s->pb, gsize - 24); - if ((ret = ff_get_guid(s->pb, &g)) < 0) - goto end; + ASFContext *asf = s->priv_data; + int idx, ret; + + if (s->streams[stream_index]->nb_index_entries && asf->is_simple_index) { + idx = av_index_search_timestamp(s->streams[stream_index], timestamp, flags); + if (idx < 0 || idx >= s->streams[stream_index]->nb_index_entries) + return AVERROR_INVALIDDATA; + avio_seek(s->pb, s->streams[stream_index]->index_entries[idx].pos, SEEK_SET); + } else { + if ((ret = ff_seek_frame_binary(s, stream_index, timestamp, flags)) < 0) + return ret; + + // asf_read_timestamp is called inside ff_seek_frame_binary and leaves state dirty, + // so reset_packet_state have to be called after it. + reset_packet_state(s); } - { - int64_t itime, last_pos = -1; - int pct, ict; - int64_t av_unused gsize = avio_rl64(s->pb); - if ((ret = ff_get_guid(s->pb, &g)) < 0) - goto end; - itime = avio_rl64(s->pb); - pct = avio_rl32(s->pb); - ict = avio_rl32(s->pb); - av_log(s, AV_LOG_DEBUG, - "itime:0x%"PRIx64", pct:%d, ict:%d\n", itime, pct, ict); - - for (i = 0; i < ict; i++) { - int pktnum = avio_rl32(s->pb); - int pktct = avio_rl16(s->pb); - int64_t pos = s->internal->data_offset + s->packet_size * (int64_t)pktnum; - int64_t index_pts = FFMAX(av_rescale(itime, i, 10000) - asf->hdr.preroll, 0); - - if (pos != last_pos) { - av_log(s, AV_LOG_DEBUG, "pktnum:%d, pktct:%d pts: %"PRId64"\n", - pktnum, pktct, index_pts); - av_add_index_entry(s->streams[stream_index], pos, index_pts, - s->packet_size, 0, AVINDEX_KEYFRAME); - last_pos = pos; - } - } - asf->index_read = ict > 0; + return 0; +} + +static const GUIDParseTable *find_guid(ff_asf_guid guid) +{ + int j, ret; + const GUIDParseTable *g; + + swap_guid(guid); + g = gdef; + for (j = 0; j < FF_ARRAY_ELEMS(gdef); j++) { + if (!(ret = memcmp(guid, g->guid, sizeof(g->guid)))) + return g; + g++; } -end: - if (s->pb->eof_reached) - ret = 0; - avio_seek(s->pb, current_pos, SEEK_SET); - return ret; + + return NULL; } -static int asf_read_seek(AVFormatContext *s, int stream_index, - int64_t pts, int flags) +static int detect_unknown_subobject(AVFormatContext *s, int64_t offset, int64_t size) { ASFContext *asf = s->priv_data; - AVStream *st = s->streams[stream_index]; - int64_t pos; - int index, ret = 0; - - if (s->packet_size <= 0) - return -1; - - /* Try using the protocol's read_seek if available */ - if (s->pb) { - int ret = avio_seek_time(s->pb, stream_index, pts, flags); - if (ret >= 0) - asf_reset_header(s); - if (ret != AVERROR(ENOSYS)) + AVIOContext *pb = s->pb; + const GUIDParseTable *g = NULL; + ff_asf_guid guid; + int ret; + + while (avio_tell(pb) <= offset + size) { + asf->offset = avio_tell(pb); + if ((ret = ff_get_guid(pb, &guid)) < 0) return ret; + g = find_guid(guid); + if (g) { + if ((ret = g->read_object(s, g)) < 0) + return ret; + } else { + GUIDParseTable g2; + + g2.name = "Unknown"; + g2.is_subobject = 1; + asf_read_unknown(s, &g2); + } } - /* explicitly handle the case of seeking to 0 */ - if (!pts) { - asf_reset_header(s); - avio_seek(s->pb, s->internal->data_offset, SEEK_SET); - return 0; + return 0; +} + +static int asf_read_header(AVFormatContext *s) +{ + ASFContext *asf = s->priv_data; + AVIOContext *pb = s->pb; + const GUIDParseTable *g = NULL; + ff_asf_guid guid; + int i, ret; + uint64_t size; + + asf->preroll = 0; + asf->is_simple_index = 0; + ff_get_guid(pb, &guid); + if (ff_guidcmp(&guid, &ff_asf_header)) + return AVERROR_INVALIDDATA; + avio_skip(pb, 8); // skip header object size + avio_skip(pb, 6); // skip number of header objects and 2 reserved bytes + asf->data_reached = 0; + + /* 1 is here instead of pb->eof_reached because (when not streaming), Data are skipped + * for the first time, + * Index object is processed and got eof and then seeking back to the Data is performed. + */ + while (1) { + // for the cases when object size is invalid + if (avio_tell(pb) == asf->offset) { + if (asf->data_reached) + avio_seek(pb, asf->first_packet_offset, SEEK_SET); + break; + } + asf->offset = avio_tell(pb); + if ((ret = ff_get_guid(pb, &guid)) < 0) { + if (ret == AVERROR_EOF && asf->data_reached) { + avio_seek(pb, asf->first_packet_offset, SEEK_SET); + break; + } else + return ret; + } + g = find_guid(guid); + if (g) { + asf->unknown_offset = asf->offset; + asf->is_header = 1; + if ((ret = g->read_object(s, g)) < 0) + return ret; + } else { + size = avio_rl64(pb); + align_position(pb, asf->offset, size); + } + if (asf->data_reached && !pb->seekable) + break; } - if (!asf->index_read) - ret = asf_build_simple_index(s, stream_index); + for (i = 0; i < asf->nb_streams; i++) { + const char *rfc1766 = asf->asf_sd[asf->asf_st[i]->lang_idx].langs; + AVStream *st = s->streams[asf->asf_st[i]->index]; + set_language(s, rfc1766, &st->metadata); + } - if (!ret && asf->index_read && st->index_entries) { - index = av_index_search_timestamp(st, pts, flags); - if (index >= 0) { - /* find the position */ - pos = st->index_entries[index].pos; + for (i = 0; i < ASF_MAX_STREAMS; i++) { + AVStream *st = NULL; - /* do the seek */ - av_log(s, AV_LOG_DEBUG, "SEEKTO: %"PRId64"\n", pos); - avio_seek(s->pb, pos, SEEK_SET); - asf_reset_header(s); - return 0; + st = find_stream(s, i); + if (st) { + av_dict_copy(&st->metadata, asf->asf_sd[i].asf_met, AV_DICT_IGNORE_SUFFIX); + if (asf->asf_sd[i].aspect_ratio.num > 0 && asf->asf_sd[i].aspect_ratio.den > 0) { + st->sample_aspect_ratio.num = asf->asf_sd[i].aspect_ratio.num; + st->sample_aspect_ratio.den = asf->asf_sd[i].aspect_ratio.den; + } } } - /* no index or seeking by index failed */ - if (ff_seek_frame_binary(s, stream_index, pts, flags) < 0) - return -1; - asf_reset_header(s); + return 0; } @@ -1521,8 +1696,7 @@ AVInputFormat ff_asf_demuxer = { .read_header = asf_read_header, .read_packet = asf_read_packet, .read_close = asf_read_close, + .read_timestamp = asf_read_timestamp, .read_seek = asf_read_seek, - .read_timestamp = asf_read_pts, .flags = AVFMT_NOBINSEARCH | AVFMT_NOGENSEARCH, - .priv_class = &asf_class, }; diff --git a/libavformat/asfenc.c b/libavformat/asfenc.c index 39ba3d9b92..f6608d5292 100644 --- a/libavformat/asfenc.c +++ b/libavformat/asfenc.c @@ -185,6 +185,27 @@ #define DATA_HEADER_SIZE 50 +typedef struct ASFStream { + int num; + unsigned char seq; + /* use for reading */ + AVPacket pkt; + int frag_offset; + int timestamp; + int64_t duration; + + int ds_span; /* descrambling */ + int ds_packet_size; + int ds_chunk_size; + + int64_t packet_pos; + + uint16_t stream_language_index; + + int palette_changed; + uint32_t palette[256]; +} ASFStream; + typedef struct ASFContext { uint32_t seqno; int is_streamed; diff --git a/libavformat/version.h b/libavformat/version.h index bc7ee454de..3491f3609b 100644 --- a/libavformat/version.h +++ b/libavformat/version.h @@ -30,8 +30,8 @@ #include "libavutil/version.h" #define LIBAVFORMAT_VERSION_MAJOR 56 -#define LIBAVFORMAT_VERSION_MINOR 20 -#define LIBAVFORMAT_VERSION_MICRO 1 +#define LIBAVFORMAT_VERSION_MINOR 21 +#define LIBAVFORMAT_VERSION_MICRO 0 #define LIBAVFORMAT_VERSION_INT AV_VERSION_INT(LIBAVFORMAT_VERSION_MAJOR, \ LIBAVFORMAT_VERSION_MINOR, \ |