diff options
author | Anton Khirnov <anton@khirnov.net> | 2012-10-06 12:10:34 +0200 |
---|---|---|
committer | Anton Khirnov <anton@khirnov.net> | 2012-10-08 07:13:26 +0200 |
commit | 716d413c13981da15323c7a3821860536eefdbbb (patch) | |
tree | b15ebcded50b8edaa5b9fc8f261774043138e1fa /libavformat | |
parent | 78071a1420b425dfb787ac739048f523007b8139 (diff) | |
download | ffmpeg-716d413c13981da15323c7a3821860536eefdbbb.tar.gz |
Replace PIX_FMT_* -> AV_PIX_FMT_*, PixelFormat -> AVPixelFormat
Diffstat (limited to 'libavformat')
-rw-r--r-- | libavformat/bfi.c | 2 | ||||
-rw-r--r-- | libavformat/bmv.c | 2 | ||||
-rw-r--r-- | libavformat/dvenc.c | 2 | ||||
-rw-r--r-- | libavformat/filmstripdec.c | 2 | ||||
-rw-r--r-- | libavformat/filmstripenc.c | 4 | ||||
-rw-r--r-- | libavformat/gif.c | 6 | ||||
-rw-r--r-- | libavformat/gxfenc.c | 6 | ||||
-rw-r--r-- | libavformat/img2dec.c | 6 | ||||
-rw-r--r-- | libavformat/lxfdec.c | 2 | ||||
-rw-r--r-- | libavformat/movenc.c | 36 | ||||
-rw-r--r-- | libavformat/mtv.c | 2 | ||||
-rw-r--r-- | libavformat/mxf.c | 36 | ||||
-rw-r--r-- | libavformat/mxf.h | 2 | ||||
-rw-r--r-- | libavformat/mxfdec.c | 10 | ||||
-rw-r--r-- | libavformat/output-example.c | 12 | ||||
-rw-r--r-- | libavformat/rawdec.c | 4 | ||||
-rw-r--r-- | libavformat/rtpdec_xiph.c | 6 | ||||
-rw-r--r-- | libavformat/rtpenc_jpeg.c | 6 | ||||
-rw-r--r-- | libavformat/sdp.c | 6 | ||||
-rw-r--r-- | libavformat/segafilm.c | 2 | ||||
-rw-r--r-- | libavformat/siff.c | 2 | ||||
-rw-r--r-- | libavformat/smacker.c | 2 | ||||
-rw-r--r-- | libavformat/tmv.c | 2 | ||||
-rw-r--r-- | libavformat/utils.c | 2 | ||||
-rw-r--r-- | libavformat/yuv4mpeg.c | 58 |
25 files changed, 110 insertions, 110 deletions
diff --git a/libavformat/bfi.c b/libavformat/bfi.c index c1fd29e9f2..c520bea136 100644 --- a/libavformat/bfi.c +++ b/libavformat/bfi.c @@ -90,7 +90,7 @@ static int bfi_read_header(AVFormatContext * s) avpriv_set_pts_info(vstream, 32, 1, fps); vstream->codec->codec_type = AVMEDIA_TYPE_VIDEO; vstream->codec->codec_id = AV_CODEC_ID_BFI; - vstream->codec->pix_fmt = PIX_FMT_PAL8; + vstream->codec->pix_fmt = AV_PIX_FMT_PAL8; /* Set up the audio codec now... */ astream->codec->codec_type = AVMEDIA_TYPE_AUDIO; diff --git a/libavformat/bmv.c b/libavformat/bmv.c index 474f4e3bc3..fe5db3f004 100644 --- a/libavformat/bmv.c +++ b/libavformat/bmv.c @@ -50,7 +50,7 @@ static int bmv_read_header(AVFormatContext *s) st->codec->codec_id = AV_CODEC_ID_BMV_VIDEO; st->codec->width = 640; st->codec->height = 429; - st->codec->pix_fmt = PIX_FMT_PAL8; + st->codec->pix_fmt = AV_PIX_FMT_PAL8; avpriv_set_pts_info(st, 16, 1, 12); ast = avformat_new_stream(s, 0); if (!ast) diff --git a/libavformat/dvenc.c b/libavformat/dvenc.c index 577ba7c4da..27a444ea1f 100644 --- a/libavformat/dvenc.c +++ b/libavformat/dvenc.c @@ -137,7 +137,7 @@ static int dv_write_pack(enum dv_pack_type pack_id, DVMuxContext *c, uint8_t* bu (1 << 3) | /* recording mode: 1 -- original */ 7; buf[3] = (1 << 7) | /* direction: 1 -- forward */ - (c->sys->pix_fmt == PIX_FMT_YUV420P ? 0x20 : /* speed */ + (c->sys->pix_fmt == AV_PIX_FMT_YUV420P ? 0x20 : /* speed */ c->sys->ltc_divisor * 4); buf[4] = (1 << 7) | /* reserved -- always 1 */ 0x7f; /* genre category */ diff --git a/libavformat/filmstripdec.c b/libavformat/filmstripdec.c index 39de974dc8..7c327e9324 100644 --- a/libavformat/filmstripdec.c +++ b/libavformat/filmstripdec.c @@ -62,7 +62,7 @@ static int read_header(AVFormatContext *s) avio_skip(pb, 2); st->codec->codec_type = AVMEDIA_TYPE_VIDEO; st->codec->codec_id = AV_CODEC_ID_RAWVIDEO; - st->codec->pix_fmt = PIX_FMT_RGBA; + st->codec->pix_fmt = AV_PIX_FMT_RGBA; st->codec->codec_tag = 0; /* no fourcc */ st->codec->width = avio_rb16(pb); st->codec->height = avio_rb16(pb); diff --git a/libavformat/filmstripenc.c b/libavformat/filmstripenc.c index d000c4f9f5..90d9a7685c 100644 --- a/libavformat/filmstripenc.c +++ b/libavformat/filmstripenc.c @@ -35,8 +35,8 @@ typedef struct { static int write_header(AVFormatContext *s) { - if (s->streams[0]->codec->pix_fmt != PIX_FMT_RGBA) { - av_log(s, AV_LOG_ERROR, "only PIX_FMT_RGBA is supported\n"); + if (s->streams[0]->codec->pix_fmt != AV_PIX_FMT_RGBA) { + av_log(s, AV_LOG_ERROR, "only AV_PIX_FMT_RGBA is supported\n"); return AVERROR_INVALIDDATA; } return 0; diff --git a/libavformat/gif.c b/libavformat/gif.c index f11b267033..eb2db46b3e 100644 --- a/libavformat/gif.c +++ b/libavformat/gif.c @@ -223,7 +223,7 @@ static int gif_image_write_image(AVIOContext *pb, put_bits(&p, 9, 0x0100); /* clear code */ for (i = (left < GIF_CHUNKS) ? left : GIF_CHUNKS; i; i--) { - if (pix_fmt == PIX_FMT_RGB24) { + if (pix_fmt == AV_PIX_FMT_RGB24) { v = gif_clut_index(ptr[0], ptr[1], ptr[2]); ptr += 3; } else { @@ -290,7 +290,7 @@ static int gif_write_header(AVFormatContext *s) // rate = video_enc->time_base.den; } - if (video_enc->pix_fmt != PIX_FMT_RGB24) { + if (video_enc->pix_fmt != AV_PIX_FMT_RGB24) { av_log(s, AV_LOG_ERROR, "ERROR: gif only handles the rgb24 pixel format. Use -pix_fmt rgb24.\n"); return AVERROR(EIO); @@ -327,7 +327,7 @@ static int gif_write_video(AVFormatContext *s, AVCodecContext *enc, avio_w8(pb, 0x00); gif_image_write_image(pb, 0, 0, enc->width, enc->height, - buf, enc->width * 3, PIX_FMT_RGB24); + buf, enc->width * 3, AV_PIX_FMT_RGB24); avio_flush(s->pb); return 0; diff --git a/libavformat/gxfenc.c b/libavformat/gxfenc.c index 8adfb64de6..c5fb0aebaa 100644 --- a/libavformat/gxfenc.c +++ b/libavformat/gxfenc.c @@ -192,7 +192,7 @@ static int gxf_write_mpeg_auxiliary(AVIOContext *pb, AVStream *st) size = snprintf(buffer, 1024, "Ver 1\nBr %.6f\nIpg 1\nPpi %d\nBpiop %d\n" "Pix 0\nCf %d\nCg %d\nSl %d\nnl16 %d\nVi 1\nf1 1\n", (float)st->codec->bit_rate, sc->p_per_gop, sc->b_per_i_or_p, - st->codec->pix_fmt == PIX_FMT_YUV422P ? 2 : 1, sc->first_gop_closed == 1, + st->codec->pix_fmt == AV_PIX_FMT_YUV422P ? 2 : 1, sc->first_gop_closed == 1, starting_line, (st->codec->height + 15) / 16); avio_w8(pb, TRACK_MPG_AUX); avio_w8(pb, size + 1); @@ -471,7 +471,7 @@ static int gxf_write_umf_media_mpeg(AVIOContext *pb, AVStream *st) { GXFStreamContext *sc = st->priv_data; - if (st->codec->pix_fmt == PIX_FMT_YUV422P) + if (st->codec->pix_fmt == AV_PIX_FMT_YUV422P) avio_wl32(pb, 2); else avio_wl32(pb, 1); /* default to 420 */ @@ -718,7 +718,7 @@ static int gxf_write_header(AVFormatContext *s) media_info = 'M'; break; case AV_CODEC_ID_DVVIDEO: - if (st->codec->pix_fmt == PIX_FMT_YUV422P) { + if (st->codec->pix_fmt == AV_PIX_FMT_YUV422P) { sc->media_type += 2; sc->track_type = 6; gxf->flags |= 0x00002000; diff --git a/libavformat/img2dec.c b/libavformat/img2dec.c index 1151180b22..14f7785144 100644 --- a/libavformat/img2dec.c +++ b/libavformat/img2dec.c @@ -140,7 +140,7 @@ static int read_header(AVFormatContext *s1) int first_index, last_index, ret = 0; int width = 0, height = 0; AVStream *st; - enum PixelFormat pix_fmt = PIX_FMT_NONE; + enum AVPixelFormat pix_fmt = AV_PIX_FMT_NONE; AVRational framerate; s1->ctx_flags |= AVFMTCTX_NOHEADER; @@ -150,7 +150,7 @@ static int read_header(AVFormatContext *s1) return AVERROR(ENOMEM); } - if (s->pixel_format && (pix_fmt = av_get_pix_fmt(s->pixel_format)) == PIX_FMT_NONE) { + if (s->pixel_format && (pix_fmt = av_get_pix_fmt(s->pixel_format)) == AV_PIX_FMT_NONE) { av_log(s1, AV_LOG_ERROR, "No such pixel format: %s.\n", s->pixel_format); return AVERROR(EINVAL); } @@ -204,7 +204,7 @@ static int read_header(AVFormatContext *s1) st->codec->codec_type = AVMEDIA_TYPE_VIDEO; st->codec->codec_id = ff_guess_image2_codec(s->path); } - if(st->codec->codec_type == AVMEDIA_TYPE_VIDEO && pix_fmt != PIX_FMT_NONE) + if(st->codec->codec_type == AVMEDIA_TYPE_VIDEO && pix_fmt != AV_PIX_FMT_NONE) st->codec->pix_fmt = pix_fmt; return 0; diff --git a/libavformat/lxfdec.c b/libavformat/lxfdec.c index afeab85554..e0eb935f8b 100644 --- a/libavformat/lxfdec.c +++ b/libavformat/lxfdec.c @@ -39,7 +39,7 @@ static const AVCodecTag lxf_tags[] = { { AV_CODEC_ID_DVVIDEO, 4 }, //DV25 { AV_CODEC_ID_DVVIDEO, 5 }, //DVCPRO { AV_CODEC_ID_DVVIDEO, 6 }, //DVCPRO50 - { AV_CODEC_ID_RAWVIDEO, 7 }, //PIX_FMT_ARGB, where alpha is used for chroma keying + { AV_CODEC_ID_RAWVIDEO, 7 }, //AV_PIX_FMT_ARGB, where alpha is used for chroma keying { AV_CODEC_ID_RAWVIDEO, 8 }, //16-bit chroma key { AV_CODEC_ID_MPEG2VIDEO, 9 }, //4:2:2 CBP ("Constrained Bytes per Gop") { AV_CODEC_ID_NONE, 0 }, diff --git a/libavformat/movenc.c b/libavformat/movenc.c index 935b9a3ad7..50371cda96 100644 --- a/libavformat/movenc.c +++ b/libavformat/movenc.c @@ -778,10 +778,10 @@ static int mov_get_dv_codec_tag(AVFormatContext *s, MOVTrack *track) if (track->enc->width == 720) /* SD */ if (track->enc->height == 480) /* NTSC */ - if (track->enc->pix_fmt == PIX_FMT_YUV422P) tag = MKTAG('d','v','5','n'); + if (track->enc->pix_fmt == AV_PIX_FMT_YUV422P) tag = MKTAG('d','v','5','n'); else tag = MKTAG('d','v','c',' '); - else if (track->enc->pix_fmt == PIX_FMT_YUV422P) tag = MKTAG('d','v','5','p'); - else if (track->enc->pix_fmt == PIX_FMT_YUV420P) tag = MKTAG('d','v','c','p'); + else if (track->enc->pix_fmt == AV_PIX_FMT_YUV422P) tag = MKTAG('d','v','5','p'); + else if (track->enc->pix_fmt == AV_PIX_FMT_YUV420P) tag = MKTAG('d','v','c','p'); else tag = MKTAG('d','v','p','p'); else if (track->enc->height == 720) /* HD 720 line */ if (track->enc->time_base.den == 50) tag = MKTAG('d','v','h','q'); @@ -798,24 +798,24 @@ static int mov_get_dv_codec_tag(AVFormatContext *s, MOVTrack *track) } static const struct { - enum PixelFormat pix_fmt; + enum AVPixelFormat pix_fmt; uint32_t tag; unsigned bps; } mov_pix_fmt_tags[] = { - { PIX_FMT_YUYV422, MKTAG('y','u','v','s'), 0 }, - { PIX_FMT_UYVY422, MKTAG('2','v','u','y'), 0 }, - { PIX_FMT_RGB555BE,MKTAG('r','a','w',' '), 16 }, - { PIX_FMT_RGB555LE,MKTAG('L','5','5','5'), 16 }, - { PIX_FMT_RGB565LE,MKTAG('L','5','6','5'), 16 }, - { PIX_FMT_RGB565BE,MKTAG('B','5','6','5'), 16 }, - { PIX_FMT_GRAY16BE,MKTAG('b','1','6','g'), 16 }, - { PIX_FMT_RGB24, MKTAG('r','a','w',' '), 24 }, - { PIX_FMT_BGR24, MKTAG('2','4','B','G'), 24 }, - { PIX_FMT_ARGB, MKTAG('r','a','w',' '), 32 }, - { PIX_FMT_BGRA, MKTAG('B','G','R','A'), 32 }, - { PIX_FMT_RGBA, MKTAG('R','G','B','A'), 32 }, - { PIX_FMT_ABGR, MKTAG('A','B','G','R'), 32 }, - { PIX_FMT_RGB48BE, MKTAG('b','4','8','r'), 48 }, + { AV_PIX_FMT_YUYV422, MKTAG('y','u','v','s'), 0 }, + { AV_PIX_FMT_UYVY422, MKTAG('2','v','u','y'), 0 }, + { AV_PIX_FMT_RGB555BE,MKTAG('r','a','w',' '), 16 }, + { AV_PIX_FMT_RGB555LE,MKTAG('L','5','5','5'), 16 }, + { AV_PIX_FMT_RGB565LE,MKTAG('L','5','6','5'), 16 }, + { AV_PIX_FMT_RGB565BE,MKTAG('B','5','6','5'), 16 }, + { AV_PIX_FMT_GRAY16BE,MKTAG('b','1','6','g'), 16 }, + { AV_PIX_FMT_RGB24, MKTAG('r','a','w',' '), 24 }, + { AV_PIX_FMT_BGR24, MKTAG('2','4','B','G'), 24 }, + { AV_PIX_FMT_ARGB, MKTAG('r','a','w',' '), 32 }, + { AV_PIX_FMT_BGRA, MKTAG('B','G','R','A'), 32 }, + { AV_PIX_FMT_RGBA, MKTAG('R','G','B','A'), 32 }, + { AV_PIX_FMT_ABGR, MKTAG('A','B','G','R'), 32 }, + { AV_PIX_FMT_RGB48BE, MKTAG('b','4','8','r'), 48 }, }; static int mov_get_rawvideo_codec_tag(AVFormatContext *s, MOVTrack *track) diff --git a/libavformat/mtv.c b/libavformat/mtv.c index 19f35705b7..572288894d 100644 --- a/libavformat/mtv.c +++ b/libavformat/mtv.c @@ -130,7 +130,7 @@ static int mtv_read_header(AVFormatContext *s) avpriv_set_pts_info(st, 64, 1, mtv->video_fps); st->codec->codec_type = AVMEDIA_TYPE_VIDEO; st->codec->codec_id = AV_CODEC_ID_RAWVIDEO; - st->codec->pix_fmt = PIX_FMT_RGB565BE; + st->codec->pix_fmt = AV_PIX_FMT_RGB565BE; st->codec->width = mtv->img_width; st->codec->height = mtv->img_height; st->codec->sample_rate = mtv->video_fps; diff --git a/libavformat/mxf.c b/libavformat/mxf.c index 04e0bf4770..040d8a26c1 100644 --- a/libavformat/mxf.c +++ b/libavformat/mxf.c @@ -56,13 +56,13 @@ const MXFCodecUL ff_mxf_codec_uls[] = { }; const MXFCodecUL ff_mxf_pixel_format_uls[] = { - { { 0x06,0x0E,0x2B,0x34,0x04,0x01,0x01,0x0A,0x04,0x01,0x02,0x01,0x01,0x02,0x01,0x01 }, 16, PIX_FMT_UYVY422 }, - { { 0x06,0x0E,0x2B,0x34,0x04,0x01,0x01,0x0A,0x04,0x01,0x02,0x01,0x01,0x02,0x01,0x02 }, 16, PIX_FMT_YUYV422 }, - { { 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00 }, 0, PIX_FMT_NONE }, + { { 0x06,0x0E,0x2B,0x34,0x04,0x01,0x01,0x0A,0x04,0x01,0x02,0x01,0x01,0x02,0x01,0x01 }, 16, AV_PIX_FMT_UYVY422 }, + { { 0x06,0x0E,0x2B,0x34,0x04,0x01,0x01,0x0A,0x04,0x01,0x02,0x01,0x01,0x02,0x01,0x02 }, 16, AV_PIX_FMT_YUYV422 }, + { { 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00 }, 0, AV_PIX_FMT_NONE }, }; static const struct { - enum PixelFormat pix_fmt; + enum AVPixelFormat pix_fmt; const char data[16]; } ff_mxf_pixel_layouts[] = { /** @@ -74,24 +74,24 @@ static const struct { * Note: Do not use these for encoding descriptors for little-endian formats until we * get samples or official word from SMPTE on how/if those can be encoded. */ - {PIX_FMT_ABGR, {'A', 8, 'B', 8, 'G', 8, 'R', 8 }}, - {PIX_FMT_ARGB, {'A', 8, 'R', 8, 'G', 8, 'B', 8 }}, - {PIX_FMT_BGR24, {'B', 8, 'G', 8, 'R', 8 }}, - {PIX_FMT_BGRA, {'B', 8, 'G', 8, 'R', 8, 'A', 8 }}, - {PIX_FMT_RGB24, {'R', 8, 'G', 8, 'B', 8 }}, - {PIX_FMT_RGB444BE,{'F', 4, 'R', 4, 'G', 4, 'B', 4 }}, - {PIX_FMT_RGB48BE, {'R', 8, 'r', 8, 'G', 8, 'g', 8, 'B', 8, 'b', 8 }}, - {PIX_FMT_RGB48BE, {'R', 16, 'G', 16, 'B', 16 }}, - {PIX_FMT_RGB48LE, {'r', 8, 'R', 8, 'g', 8, 'G', 8, 'b', 8, 'B', 8 }}, - {PIX_FMT_RGB555BE,{'F', 1, 'R', 5, 'G', 5, 'B', 5 }}, - {PIX_FMT_RGB565BE,{'R', 5, 'G', 6, 'B', 5 }}, - {PIX_FMT_RGBA, {'R', 8, 'G', 8, 'B', 8, 'A', 8 }}, - {PIX_FMT_PAL8, {'P', 8 }}, + {AV_PIX_FMT_ABGR, {'A', 8, 'B', 8, 'G', 8, 'R', 8 }}, + {AV_PIX_FMT_ARGB, {'A', 8, 'R', 8, 'G', 8, 'B', 8 }}, + {AV_PIX_FMT_BGR24, {'B', 8, 'G', 8, 'R', 8 }}, + {AV_PIX_FMT_BGRA, {'B', 8, 'G', 8, 'R', 8, 'A', 8 }}, + {AV_PIX_FMT_RGB24, {'R', 8, 'G', 8, 'B', 8 }}, + {AV_PIX_FMT_RGB444BE,{'F', 4, 'R', 4, 'G', 4, 'B', 4 }}, + {AV_PIX_FMT_RGB48BE, {'R', 8, 'r', 8, 'G', 8, 'g', 8, 'B', 8, 'b', 8 }}, + {AV_PIX_FMT_RGB48BE, {'R', 16, 'G', 16, 'B', 16 }}, + {AV_PIX_FMT_RGB48LE, {'r', 8, 'R', 8, 'g', 8, 'G', 8, 'b', 8, 'B', 8 }}, + {AV_PIX_FMT_RGB555BE,{'F', 1, 'R', 5, 'G', 5, 'B', 5 }}, + {AV_PIX_FMT_RGB565BE,{'R', 5, 'G', 6, 'B', 5 }}, + {AV_PIX_FMT_RGBA, {'R', 8, 'G', 8, 'B', 8, 'A', 8 }}, + {AV_PIX_FMT_PAL8, {'P', 8 }}, }; static const int num_pixel_layouts = FF_ARRAY_ELEMS(ff_mxf_pixel_layouts); -int ff_mxf_decode_pixel_layout(const char pixel_layout[16], enum PixelFormat *pix_fmt) +int ff_mxf_decode_pixel_layout(const char pixel_layout[16], enum AVPixelFormat *pix_fmt) { int x; diff --git a/libavformat/mxf.h b/libavformat/mxf.h index 88322d1591..773f30f3a2 100644 --- a/libavformat/mxf.h +++ b/libavformat/mxf.h @@ -70,7 +70,7 @@ extern const MXFCodecUL ff_mxf_data_definition_uls[]; extern const MXFCodecUL ff_mxf_codec_uls[]; extern const MXFCodecUL ff_mxf_pixel_format_uls[]; -int ff_mxf_decode_pixel_layout(const char pixel_layout[16], enum PixelFormat *pix_fmt); +int ff_mxf_decode_pixel_layout(const char pixel_layout[16], enum AVPixelFormat *pix_fmt); #define PRINT_KEY(pc, s, x) av_dlog(pc, "%s %02X %02X %02X %02X %02X %02X %02X %02X %02X %02X %02X %02X %02X %02X %02X %02X\n", s, \ (x)[0], (x)[1], (x)[2], (x)[3], (x)[4], (x)[5], (x)[6], (x)[7], (x)[8], (x)[9], (x)[10], (x)[11], (x)[12], (x)[13], (x)[14], (x)[15]) diff --git a/libavformat/mxfdec.c b/libavformat/mxfdec.c index a630b5561a..9a61d0243e 100644 --- a/libavformat/mxfdec.c +++ b/libavformat/mxfdec.c @@ -144,7 +144,7 @@ typedef struct { int linked_track_id; uint8_t *extradata; int extradata_size; - enum PixelFormat pix_fmt; + enum AVPixelFormat pix_fmt; } MXFDescriptor; typedef struct { @@ -798,7 +798,7 @@ static void mxf_read_pixel_layout(AVIOContext *pb, MXFDescriptor *descriptor) static int mxf_read_generic_descriptor(void *arg, AVIOContext *pb, int tag, int size, UID uid, int64_t klv_offset) { MXFDescriptor *descriptor = arg; - descriptor->pix_fmt = PIX_FMT_NONE; + descriptor->pix_fmt = AV_PIX_FMT_NONE; switch(tag) { case 0x3F01: descriptor->sub_descriptors_count = avio_rb32(pb); @@ -1501,17 +1501,17 @@ static int mxf_parse_structural_metadata(MXFContext *mxf) } if (st->codec->codec_id == AV_CODEC_ID_RAWVIDEO) { st->codec->pix_fmt = descriptor->pix_fmt; - if (st->codec->pix_fmt == PIX_FMT_NONE) { + if (st->codec->pix_fmt == AV_PIX_FMT_NONE) { pix_fmt_ul = mxf_get_codec_ul(ff_mxf_pixel_format_uls, &descriptor->essence_codec_ul); st->codec->pix_fmt = pix_fmt_ul->id; - if (st->codec->pix_fmt == PIX_FMT_NONE) { + if (st->codec->pix_fmt == AV_PIX_FMT_NONE) { /* support files created before RP224v10 by defaulting to UYVY422 if subsampling is 4:2:2 and component depth is 8-bit */ if (descriptor->horiz_subsampling == 2 && descriptor->vert_subsampling == 1 && descriptor->component_depth == 8) { - st->codec->pix_fmt = PIX_FMT_UYVY422; + st->codec->pix_fmt = AV_PIX_FMT_UYVY422; } } } diff --git a/libavformat/output-example.c b/libavformat/output-example.c index 1011c2c645..e4babb49a5 100644 --- a/libavformat/output-example.c +++ b/libavformat/output-example.c @@ -44,7 +44,7 @@ #define STREAM_DURATION 5.0 #define STREAM_FRAME_RATE 25 /* 25 images/s */ #define STREAM_NB_FRAMES ((int)(STREAM_DURATION * STREAM_FRAME_RATE)) -#define STREAM_PIX_FMT PIX_FMT_YUV420P /* default pix_fmt */ +#define STREAM_PIX_FMT AV_PIX_FMT_YUV420P /* default pix_fmt */ static int sws_flags = SWS_BICUBIC; @@ -234,7 +234,7 @@ static AVStream *add_video_stream(AVFormatContext *oc, enum AVCodecID codec_id) return st; } -static AVFrame *alloc_picture(enum PixelFormat pix_fmt, int width, int height) +static AVFrame *alloc_picture(enum AVPixelFormat pix_fmt, int width, int height) { AVFrame *picture; uint8_t *picture_buf; @@ -289,8 +289,8 @@ static void open_video(AVFormatContext *oc, AVStream *st) * picture is needed too. It is then converted to the required * output format. */ tmp_picture = NULL; - if (c->pix_fmt != PIX_FMT_YUV420P) { - tmp_picture = alloc_picture(PIX_FMT_YUV420P, c->width, c->height); + if (c->pix_fmt != AV_PIX_FMT_YUV420P) { + tmp_picture = alloc_picture(AV_PIX_FMT_YUV420P, c->width, c->height); if (!tmp_picture) { fprintf(stderr, "Could not allocate temporary picture\n"); exit(1); @@ -333,12 +333,12 @@ static void write_video_frame(AVFormatContext *oc, AVStream *st) * frames if using B-frames, so we get the last frames by * passing the same picture again. */ } else { - if (c->pix_fmt != PIX_FMT_YUV420P) { + if (c->pix_fmt != AV_PIX_FMT_YUV420P) { /* as we only generate a YUV420P picture, we must convert it * to the codec pixel format if needed */ if (img_convert_ctx == NULL) { img_convert_ctx = sws_getContext(c->width, c->height, - PIX_FMT_YUV420P, + AV_PIX_FMT_YUV420P, c->width, c->height, c->pix_fmt, sws_flags, NULL, NULL, NULL); diff --git a/libavformat/rawdec.c b/libavformat/rawdec.c index a64c406bf1..7ea3d2b8fc 100644 --- a/libavformat/rawdec.c +++ b/libavformat/rawdec.c @@ -69,14 +69,14 @@ int ff_raw_read_header(AVFormatContext *s) case AVMEDIA_TYPE_VIDEO: { FFRawVideoDemuxerContext *s1 = s->priv_data; int width = 0, height = 0, ret = 0; - enum PixelFormat pix_fmt; + enum AVPixelFormat pix_fmt; AVRational framerate; if (s1->video_size && (ret = av_parse_video_size(&width, &height, s1->video_size)) < 0) { av_log(s, AV_LOG_ERROR, "Couldn't parse video size.\n"); goto fail; } - if ((pix_fmt = av_get_pix_fmt(s1->pixel_format)) == PIX_FMT_NONE) { + if ((pix_fmt = av_get_pix_fmt(s1->pixel_format)) == AV_PIX_FMT_NONE) { av_log(s, AV_LOG_ERROR, "No such pixel format: %s.\n", s1->pixel_format); ret = AVERROR(EINVAL); goto fail; diff --git a/libavformat/rtpdec_xiph.c b/libavformat/rtpdec_xiph.c index f3bfd52906..dc739eeb38 100644 --- a/libavformat/rtpdec_xiph.c +++ b/libavformat/rtpdec_xiph.c @@ -313,11 +313,11 @@ static int xiph_parse_fmtp_pair(AVStream* stream, if (!strcmp(attr, "sampling")) { if (!strcmp(value, "YCbCr-4:2:0")) { - codec->pix_fmt = PIX_FMT_YUV420P; + codec->pix_fmt = AV_PIX_FMT_YUV420P; } else if (!strcmp(value, "YCbCr-4:4:2")) { - codec->pix_fmt = PIX_FMT_YUV422P; + codec->pix_fmt = AV_PIX_FMT_YUV422P; } else if (!strcmp(value, "YCbCr-4:4:4")) { - codec->pix_fmt = PIX_FMT_YUV444P; + codec->pix_fmt = AV_PIX_FMT_YUV444P; } else { av_log(codec, AV_LOG_ERROR, "Unsupported pixel format %s\n", attr); diff --git a/libavformat/rtpenc_jpeg.c b/libavformat/rtpenc_jpeg.c index 53a5bd2238..04df6583df 100644 --- a/libavformat/rtpenc_jpeg.c +++ b/libavformat/rtpenc_jpeg.c @@ -29,7 +29,7 @@ void ff_rtp_send_jpeg(AVFormatContext *s1, const uint8_t *buf, int size) RTPMuxContext *s = s1->priv_data; const uint8_t *qtables = NULL; int nb_qtables = 0; - uint8_t type = 1; /* default pixel format is PIX_FMT_YUVJ420P */ + uint8_t type = 1; /* default pixel format is AV_PIX_FMT_YUVJ420P */ uint8_t w, h; uint8_t *p; int off = 0; /* fragment offset of the current JPEG frame */ @@ -44,9 +44,9 @@ void ff_rtp_send_jpeg(AVFormatContext *s1, const uint8_t *buf, int size) h = s1->streams[0]->codec->height >> 3; /* check if pixel format is not the normal 420 case */ - if (s1->streams[0]->codec->pix_fmt == PIX_FMT_YUVJ422P) { + if (s1->streams[0]->codec->pix_fmt == AV_PIX_FMT_YUVJ422P) { type = 0; - } else if (s1->streams[0]->codec->pix_fmt == PIX_FMT_YUVJ420P) { + } else if (s1->streams[0]->codec->pix_fmt == AV_PIX_FMT_YUVJ420P) { type = 1; } else { av_log(s1, AV_LOG_ERROR, "Unsupported pixel format\n"); diff --git a/libavformat/sdp.c b/libavformat/sdp.c index 3791930b59..91de413dc6 100644 --- a/libavformat/sdp.c +++ b/libavformat/sdp.c @@ -521,13 +521,13 @@ static char *sdp_write_media_attributes(char *buff, int size, AVCodecContext *c, return NULL; switch (c->pix_fmt) { - case PIX_FMT_YUV420P: + case AV_PIX_FMT_YUV420P: pix_fmt = "YCbCr-4:2:0"; break; - case PIX_FMT_YUV422P: + case AV_PIX_FMT_YUV422P: pix_fmt = "YCbCr-4:2:2"; break; - case PIX_FMT_YUV444P: + case AV_PIX_FMT_YUV444P: pix_fmt = "YCbCr-4:4:4"; break; default: diff --git a/libavformat/segafilm.c b/libavformat/segafilm.c index 23eea64956..068d432083 100644 --- a/libavformat/segafilm.c +++ b/libavformat/segafilm.c @@ -150,7 +150,7 @@ static int film_read_header(AVFormatContext *s) if (film->video_type == AV_CODEC_ID_RAWVIDEO) { if (scratch[20] == 24) { - st->codec->pix_fmt = PIX_FMT_RGB24; + st->codec->pix_fmt = AV_PIX_FMT_RGB24; } else { av_log(s, AV_LOG_ERROR, "raw video is using unhandled %dbpp\n", scratch[20]); return -1; diff --git a/libavformat/siff.c b/libavformat/siff.c index 98b1d4f393..02af80bfab 100644 --- a/libavformat/siff.c +++ b/libavformat/siff.c @@ -124,7 +124,7 @@ static int siff_parse_vbv1(AVFormatContext *s, SIFFContext *c, AVIOContext *pb) st->codec->codec_tag = MKTAG('V', 'B', 'V', '1'); st->codec->width = width; st->codec->height = height; - st->codec->pix_fmt = PIX_FMT_PAL8; + st->codec->pix_fmt = AV_PIX_FMT_PAL8; avpriv_set_pts_info(st, 16, 1, 12); c->cur_frame = 0; diff --git a/libavformat/smacker.c b/libavformat/smacker.c index 362a7e6c12..dcca1afd37 100644 --- a/libavformat/smacker.c +++ b/libavformat/smacker.c @@ -161,7 +161,7 @@ static int smacker_read_header(AVFormatContext *s) smk->videoindex = st->index; st->codec->width = smk->width; st->codec->height = smk->height; - st->codec->pix_fmt = PIX_FMT_PAL8; + st->codec->pix_fmt = AV_PIX_FMT_PAL8; st->codec->codec_type = AVMEDIA_TYPE_VIDEO; st->codec->codec_id = AV_CODEC_ID_SMACKVIDEO; st->codec->codec_tag = smk->magic; diff --git a/libavformat/tmv.c b/libavformat/tmv.c index 077a3713bc..100e12fa53 100644 --- a/libavformat/tmv.c +++ b/libavformat/tmv.c @@ -124,7 +124,7 @@ static int tmv_read_header(AVFormatContext *s) vst->codec->codec_type = AVMEDIA_TYPE_VIDEO; vst->codec->codec_id = AV_CODEC_ID_TMV; - vst->codec->pix_fmt = PIX_FMT_PAL8; + vst->codec->pix_fmt = AV_PIX_FMT_PAL8; vst->codec->width = char_cols * 8; vst->codec->height = char_rows * 8; avpriv_set_pts_info(vst, 32, fps.den, fps.num); diff --git a/libavformat/utils.c b/libavformat/utils.c index b932322ed7..bba5b2ae35 100644 --- a/libavformat/utils.c +++ b/libavformat/utils.c @@ -2018,7 +2018,7 @@ static int has_codec_parameters(AVStream *st) break; case AVMEDIA_TYPE_VIDEO: val = avctx->width; - if (st->info->found_decoder >= 0 && avctx->pix_fmt == PIX_FMT_NONE) + if (st->info->found_decoder >= 0 && avctx->pix_fmt == AV_PIX_FMT_NONE) return 0; break; default: diff --git a/libavformat/yuv4mpeg.c b/libavformat/yuv4mpeg.c index 23fb129971..bdae17b93e 100644 --- a/libavformat/yuv4mpeg.c +++ b/libavformat/yuv4mpeg.c @@ -57,23 +57,23 @@ static int yuv4_generate_header(AVFormatContext *s, char* buf) inter = st->codec->coded_frame->top_field_first ? 't' : 'b'; switch (st->codec->pix_fmt) { - case PIX_FMT_GRAY8: + case AV_PIX_FMT_GRAY8: colorspace = " Cmono"; break; - case PIX_FMT_YUV411P: + case AV_PIX_FMT_YUV411P: colorspace = " C411 XYSCSS=411"; break; - case PIX_FMT_YUV420P: + case AV_PIX_FMT_YUV420P: switch (st->codec->chroma_sample_location) { case AVCHROMA_LOC_TOPLEFT: colorspace = " C420paldv XYSCSS=420PALDV"; break; case AVCHROMA_LOC_LEFT: colorspace = " C420mpeg2 XYSCSS=420MPEG2"; break; default: colorspace = " C420jpeg XYSCSS=420JPEG"; break; } break; - case PIX_FMT_YUV422P: + case AV_PIX_FMT_YUV422P: colorspace = " C422 XYSCSS=422"; break; - case PIX_FMT_YUV444P: + case AV_PIX_FMT_YUV444P: colorspace = " C444 XYSCSS=444"; break; } @@ -126,7 +126,7 @@ static int yuv4_write_packet(AVFormatContext *s, AVPacket *pkt) ptr += picture->linesize[0]; } - if (st->codec->pix_fmt != PIX_FMT_GRAY8) { + if (st->codec->pix_fmt != AV_PIX_FMT_GRAY8) { // Adjust for smaller Cb and Cr planes avcodec_get_chroma_sub_sample(st->codec->pix_fmt, &h_chroma_shift, &v_chroma_shift); @@ -155,13 +155,13 @@ static int yuv4_write_header(AVFormatContext *s) if (s->nb_streams != 1) return AVERROR(EIO); - if (s->streams[0]->codec->pix_fmt == PIX_FMT_YUV411P) { + if (s->streams[0]->codec->pix_fmt == AV_PIX_FMT_YUV411P) { av_log(s, AV_LOG_ERROR, "Warning: generating rarely used 4:1:1 YUV " "stream, some mjpegtools might not work.\n"); - } else if ((s->streams[0]->codec->pix_fmt != PIX_FMT_YUV420P) && - (s->streams[0]->codec->pix_fmt != PIX_FMT_YUV422P) && - (s->streams[0]->codec->pix_fmt != PIX_FMT_GRAY8) && - (s->streams[0]->codec->pix_fmt != PIX_FMT_YUV444P)) { + } else if ((s->streams[0]->codec->pix_fmt != AV_PIX_FMT_YUV420P) && + (s->streams[0]->codec->pix_fmt != AV_PIX_FMT_YUV422P) && + (s->streams[0]->codec->pix_fmt != AV_PIX_FMT_GRAY8) && + (s->streams[0]->codec->pix_fmt != AV_PIX_FMT_YUV444P)) { av_log(s, AV_LOG_ERROR, "ERROR: yuv4mpeg only handles yuv444p, " "yuv422p, yuv420p, yuv411p and gray pixel formats. " "Use -pix_fmt to select one.\n"); @@ -199,7 +199,7 @@ static int yuv4_read_header(AVFormatContext *s) AVIOContext *pb = s->pb; int width = -1, height = -1, raten = 0, rated = 0, aspectn = 0, aspectd = 0; - enum PixelFormat pix_fmt = PIX_FMT_NONE, alt_pix_fmt = PIX_FMT_NONE; + enum AVPixelFormat pix_fmt = AV_PIX_FMT_NONE, alt_pix_fmt = AV_PIX_FMT_NONE; enum AVChromaLocation chroma_sample_location = AVCHROMA_LOC_UNSPECIFIED; AVStream *st; struct frame_attributes *s1 = s->priv_data; @@ -236,29 +236,29 @@ static int yuv4_read_header(AVFormatContext *s) break; case 'C': // Color space if (strncmp("420jpeg", tokstart, 7) == 0) { - pix_fmt = PIX_FMT_YUV420P; + pix_fmt = AV_PIX_FMT_YUV420P; chroma_sample_location = AVCHROMA_LOC_CENTER; } else if (strncmp("420mpeg2", tokstart, 8) == 0) { - pix_fmt = PIX_FMT_YUV420P; + pix_fmt = AV_PIX_FMT_YUV420P; chroma_sample_location = AVCHROMA_LOC_LEFT; } else if (strncmp("420paldv", tokstart, 8) == 0) { - pix_fmt = PIX_FMT_YUV420P; + pix_fmt = AV_PIX_FMT_YUV420P; chroma_sample_location = AVCHROMA_LOC_TOPLEFT; } else if (strncmp("420", tokstart, 3) == 0) { - pix_fmt = PIX_FMT_YUV420P; + pix_fmt = AV_PIX_FMT_YUV420P; chroma_sample_location = AVCHROMA_LOC_CENTER; } else if (strncmp("411", tokstart, 3) == 0) - pix_fmt = PIX_FMT_YUV411P; + pix_fmt = AV_PIX_FMT_YUV411P; else if (strncmp("422", tokstart, 3) == 0) - pix_fmt = PIX_FMT_YUV422P; + pix_fmt = AV_PIX_FMT_YUV422P; else if (strncmp("444alpha", tokstart, 8) == 0 ) { av_log(s, AV_LOG_ERROR, "Cannot handle 4:4:4:4 " "YUV4MPEG stream.\n"); return -1; } else if (strncmp("444", tokstart, 3) == 0) - pix_fmt = PIX_FMT_YUV444P; + pix_fmt = AV_PIX_FMT_YUV444P; else if (strncmp("mono", tokstart, 4) == 0) { - pix_fmt = PIX_FMT_GRAY8; + pix_fmt = AV_PIX_FMT_GRAY8; } else { av_log(s, AV_LOG_ERROR, "YUV4MPEG stream contains an unknown " "pixel format.\n"); @@ -306,17 +306,17 @@ static int yuv4_read_header(AVFormatContext *s) // Older nonstandard pixel format representation tokstart += 6; if (strncmp("420JPEG", tokstart, 7) == 0) - alt_pix_fmt = PIX_FMT_YUV420P; + alt_pix_fmt = AV_PIX_FMT_YUV420P; else if (strncmp("420MPEG2", tokstart, 8) == 0) - alt_pix_fmt = PIX_FMT_YUV420P; + alt_pix_fmt = AV_PIX_FMT_YUV420P; else if (strncmp("420PALDV", tokstart, 8) == 0) - alt_pix_fmt = PIX_FMT_YUV420P; + alt_pix_fmt = AV_PIX_FMT_YUV420P; else if (strncmp("411", tokstart, 3) == 0) - alt_pix_fmt = PIX_FMT_YUV411P; + alt_pix_fmt = AV_PIX_FMT_YUV411P; else if (strncmp("422", tokstart, 3) == 0) - alt_pix_fmt = PIX_FMT_YUV422P; + alt_pix_fmt = AV_PIX_FMT_YUV422P; else if (strncmp("444", tokstart, 3) == 0) - alt_pix_fmt = PIX_FMT_YUV444P; + alt_pix_fmt = AV_PIX_FMT_YUV444P; } while (tokstart < header_end && *tokstart != 0x20) tokstart++; @@ -329,9 +329,9 @@ static int yuv4_read_header(AVFormatContext *s) return -1; } - if (pix_fmt == PIX_FMT_NONE) { - if (alt_pix_fmt == PIX_FMT_NONE) - pix_fmt = PIX_FMT_YUV420P; + if (pix_fmt == AV_PIX_FMT_NONE) { + if (alt_pix_fmt == AV_PIX_FMT_NONE) + pix_fmt = AV_PIX_FMT_YUV420P; else pix_fmt = alt_pix_fmt; } |