diff options
author | Michael Niedermayer <michaelni@gmx.at> | 2014-04-09 14:36:47 +0200 |
---|---|---|
committer | Michael Niedermayer <michaelni@gmx.at> | 2014-04-09 15:18:46 +0200 |
commit | 59a53842d360ddaf883a0415b11013038fa57da0 (patch) | |
tree | bec280eb85d158ae75e54268cef1ebdd47be563c /libavcodec/vc1dec.c | |
parent | 60ef0c6f909703ae4dc021f6857d254c7badc1ec (diff) | |
parent | f6774f905fb3cfdc319523ac640be30b14c1bc55 (diff) | |
download | ffmpeg-59a53842d360ddaf883a0415b11013038fa57da0.tar.gz |
Merge remote-tracking branch 'qatar/master'
* qatar/master:
mpegvideo: operate with pointers to AVFrames instead of whole structs
Conflicts:
libavcodec/h261dec.c
libavcodec/h263dec.c
libavcodec/intrax8.c
libavcodec/mpeg12enc.c
libavcodec/mpegvideo.c
libavcodec/mpegvideo.h
libavcodec/mpegvideo_enc.c
libavcodec/mpegvideo_motion.c
libavcodec/mpegvideo_xvmc.c
libavcodec/msmpeg4.c
libavcodec/ratecontrol.c
libavcodec/vaapi.c
libavcodec/vc1dec.c
libavcodec/vdpau_vc1.c
See: fc567ac49e17151f00f31b59030cd10f952612ef
Merged-by: Michael Niedermayer <michaelni@gmx.at>
Diffstat (limited to 'libavcodec/vc1dec.c')
-rw-r--r-- | libavcodec/vc1dec.c | 140 |
1 files changed, 70 insertions, 70 deletions
diff --git a/libavcodec/vc1dec.c b/libavcodec/vc1dec.c index f44176fd72..5b6c5d99cf 100644 --- a/libavcodec/vc1dec.c +++ b/libavcodec/vc1dec.c @@ -66,9 +66,9 @@ static void init_block_index(VC1Context *v) MpegEncContext *s = &v->s; ff_init_block_index(s); if (v->field_mode && !(v->second_field ^ v->tff)) { - s->dest[0] += s->current_picture_ptr->f.linesize[0]; - s->dest[1] += s->current_picture_ptr->f.linesize[1]; - s->dest[2] += s->current_picture_ptr->f.linesize[2]; + s->dest[0] += s->current_picture_ptr->f->linesize[0]; + s->dest[1] += s->current_picture_ptr->f->linesize[1]; + s->dest[2] += s->current_picture_ptr->f->linesize[2]; } } @@ -341,7 +341,7 @@ static void vc1_mc_1mv(VC1Context *v, int dir) if ((!v->field_mode || (v->ref_field_type[dir] == 1 && v->cur_field_type == 1)) && - !v->s.last_picture.f.data[0]) + !v->s.last_picture.f->data[0]) return; mx = s->mv[dir][0][0]; @@ -373,24 +373,24 @@ static void vc1_mc_1mv(VC1Context *v, int dir) } if (!dir) { if (v->field_mode && (v->cur_field_type != v->ref_field_type[dir]) && v->second_field) { - srcY = s->current_picture.f.data[0]; - srcU = s->current_picture.f.data[1]; - srcV = s->current_picture.f.data[2]; + srcY = s->current_picture.f->data[0]; + srcU = s->current_picture.f->data[1]; + srcV = s->current_picture.f->data[2]; luty = v->curr_luty; lutuv = v->curr_lutuv; use_ic = *v->curr_use_ic; } else { - srcY = s->last_picture.f.data[0]; - srcU = s->last_picture.f.data[1]; - srcV = s->last_picture.f.data[2]; + srcY = s->last_picture.f->data[0]; + srcU = s->last_picture.f->data[1]; + srcV = s->last_picture.f->data[2]; luty = v->last_luty; lutuv = v->last_lutuv; use_ic = v->last_use_ic; } } else { - srcY = s->next_picture.f.data[0]; - srcU = s->next_picture.f.data[1]; - srcV = s->next_picture.f.data[2]; + srcY = s->next_picture.f->data[0]; + srcU = s->next_picture.f->data[1]; + srcV = s->next_picture.f->data[2]; luty = v->next_luty; lutuv = v->next_lutuv; use_ic = v->next_use_ic; @@ -423,9 +423,9 @@ static void vc1_mc_1mv(VC1Context *v, int dir) srcV += uvsrc_y * s->uvlinesize + uvsrc_x; if (v->field_mode && v->ref_field_type[dir]) { - srcY += s->current_picture_ptr->f.linesize[0]; - srcU += s->current_picture_ptr->f.linesize[1]; - srcV += s->current_picture_ptr->f.linesize[2]; + srcY += s->current_picture_ptr->f->linesize[0]; + srcU += s->current_picture_ptr->f->linesize[1]; + srcV += s->current_picture_ptr->f->linesize[2]; } /* for grayscale we should not try to read from unknown area */ @@ -562,7 +562,7 @@ static void vc1_mc_4mv_luma(VC1Context *v, int n, int dir, int avg) if ((!v->field_mode || (v->ref_field_type[dir] == 1 && v->cur_field_type == 1)) && - !v->s.last_picture.f.data[0]) + !v->s.last_picture.f->data[0]) return; mx = s->mv[dir][n][0]; @@ -570,16 +570,16 @@ static void vc1_mc_4mv_luma(VC1Context *v, int n, int dir, int avg) if (!dir) { if (v->field_mode && (v->cur_field_type != v->ref_field_type[dir]) && v->second_field) { - srcY = s->current_picture.f.data[0]; + srcY = s->current_picture.f->data[0]; luty = v->curr_luty; use_ic = *v->curr_use_ic; } else { - srcY = s->last_picture.f.data[0]; + srcY = s->last_picture.f->data[0]; luty = v->last_luty; use_ic = v->last_use_ic; } } else { - srcY = s->next_picture.f.data[0]; + srcY = s->next_picture.f->data[0]; luty = v->next_luty; use_ic = v->next_use_ic; } @@ -679,7 +679,7 @@ static void vc1_mc_4mv_luma(VC1Context *v, int n, int dir, int avg) srcY += src_y * s->linesize + src_x; if (v->field_mode && v->ref_field_type[dir]) - srcY += s->current_picture_ptr->f.linesize[0]; + srcY += s->current_picture_ptr->f->linesize[0]; if (fieldmv && !(src_y & 1)) v_edge_pos--; @@ -809,7 +809,7 @@ static void vc1_mc_4mv_chroma(VC1Context *v, int dir) uint8_t (*lutuv)[256]; int use_ic; - if (!v->field_mode && !v->s.last_picture.f.data[0]) + if (!v->field_mode && !v->s.last_picture.f->data[0]) return; if (s->flags & CODEC_FLAG_GRAY) return; @@ -840,7 +840,7 @@ static void vc1_mc_4mv_chroma(VC1Context *v, int dir) if (dominant) chroma_ref_type = !v->cur_field_type; } - if (v->field_mode && chroma_ref_type == 1 && v->cur_field_type == 1 && !v->s.last_picture.f.data[0]) + if (v->field_mode && chroma_ref_type == 1 && v->cur_field_type == 1 && !v->s.last_picture.f->data[0]) return; s->current_picture.motion_val[1][s->block_index[0] + v->blocks_off][0] = tx; s->current_picture.motion_val[1][s->block_index[0] + v->blocks_off][1] = ty; @@ -871,19 +871,19 @@ static void vc1_mc_4mv_chroma(VC1Context *v, int dir) if (!dir) { if (v->field_mode && (v->cur_field_type != chroma_ref_type) && v->second_field) { - srcU = s->current_picture.f.data[1]; - srcV = s->current_picture.f.data[2]; + srcU = s->current_picture.f->data[1]; + srcV = s->current_picture.f->data[2]; lutuv = v->curr_lutuv; use_ic = *v->curr_use_ic; } else { - srcU = s->last_picture.f.data[1]; - srcV = s->last_picture.f.data[2]; + srcU = s->last_picture.f->data[1]; + srcV = s->last_picture.f->data[2]; lutuv = v->last_lutuv; use_ic = v->last_use_ic; } } else { - srcU = s->next_picture.f.data[1]; - srcV = s->next_picture.f.data[2]; + srcU = s->next_picture.f->data[1]; + srcV = s->next_picture.f->data[2]; lutuv = v->next_lutuv; use_ic = v->next_use_ic; } @@ -898,8 +898,8 @@ static void vc1_mc_4mv_chroma(VC1Context *v, int dir) if (v->field_mode) { if (chroma_ref_type) { - srcU += s->current_picture_ptr->f.linesize[1]; - srcV += s->current_picture_ptr->f.linesize[2]; + srcU += s->current_picture_ptr->f->linesize[1]; + srcV += s->current_picture_ptr->f->linesize[2]; } } @@ -1004,13 +1004,13 @@ static void vc1_mc_4mv_chroma4(VC1Context *v, int dir, int dir2, int avg) uvsrc_x = av_clip(uvsrc_x, -8, s->avctx->coded_width >> 1); uvsrc_y = av_clip(uvsrc_y, -8, s->avctx->coded_height >> 1); if (i < 2 ? dir : dir2) { - srcU = s->next_picture.f.data[1]; - srcV = s->next_picture.f.data[2]; + srcU = s->next_picture.f->data[1]; + srcV = s->next_picture.f->data[2]; lutuv = v->next_lutuv; use_ic = v->next_use_ic; } else { - srcU = s->last_picture.f.data[1]; - srcV = s->last_picture.f.data[2]; + srcU = s->last_picture.f->data[1]; + srcV = s->last_picture.f->data[2]; lutuv = v->last_lutuv; use_ic = v->last_use_ic; } @@ -1905,7 +1905,7 @@ static void vc1_interp_mc(VC1Context *v) int v_edge_pos = s->v_edge_pos >> v->field_mode; int use_ic = v->next_use_ic; - if (!v->field_mode && !v->s.next_picture.f.data[0]) + if (!v->field_mode && !v->s.next_picture.f->data[0]) return; mx = s->mv[1][0][0]; @@ -1921,9 +1921,9 @@ static void vc1_interp_mc(VC1Context *v) uvmx = uvmx + ((uvmx < 0) ? -(uvmx & 1) : (uvmx & 1)); uvmy = uvmy + ((uvmy < 0) ? -(uvmy & 1) : (uvmy & 1)); } - srcY = s->next_picture.f.data[0]; - srcU = s->next_picture.f.data[1]; - srcV = s->next_picture.f.data[2]; + srcY = s->next_picture.f->data[0]; + srcU = s->next_picture.f->data[1]; + srcV = s->next_picture.f->data[2]; src_x = s->mb_x * 16 + (mx >> 2); src_y = s->mb_y * 16 + (my >> 2); @@ -1947,9 +1947,9 @@ static void vc1_interp_mc(VC1Context *v) srcV += uvsrc_y * s->uvlinesize + uvsrc_x; if (v->field_mode && v->ref_field_type[1]) { - srcY += s->current_picture_ptr->f.linesize[0]; - srcU += s->current_picture_ptr->f.linesize[1]; - srcV += s->current_picture_ptr->f.linesize[2]; + srcY += s->current_picture_ptr->f->linesize[0]; + srcU += s->current_picture_ptr->f->linesize[1]; + srcV += s->current_picture_ptr->f->linesize[2]; } /* for grayscale we should not try to read from unknown area */ @@ -5169,7 +5169,7 @@ static void vc1_decode_skip_blocks(VC1Context *v) { MpegEncContext *s = &v->s; - if (!v->s.last_picture.f.data[0]) + if (!v->s.last_picture.f->data[0]) return; ff_er_add_slice(&s->er, 0, s->start_mb_y, s->mb_width - 1, s->end_mb_y - 1, ER_MB_END); @@ -5178,9 +5178,9 @@ static void vc1_decode_skip_blocks(VC1Context *v) s->mb_x = 0; init_block_index(v); ff_update_block_index(s); - memcpy(s->dest[0], s->last_picture.f.data[0] + s->mb_y * 16 * s->linesize, s->linesize * 16); - memcpy(s->dest[1], s->last_picture.f.data[1] + s->mb_y * 8 * s->uvlinesize, s->uvlinesize * 8); - memcpy(s->dest[2], s->last_picture.f.data[2] + s->mb_y * 8 * s->uvlinesize, s->uvlinesize * 8); + memcpy(s->dest[0], s->last_picture.f->data[0] + s->mb_y * 16 * s->linesize, s->linesize * 16); + memcpy(s->dest[1], s->last_picture.f->data[1] + s->mb_y * 8 * s->uvlinesize, s->uvlinesize * 8); + memcpy(s->dest[2], s->last_picture.f->data[2] + s->mb_y * 8 * s->uvlinesize, s->uvlinesize * 8); ff_mpeg_draw_horiz_band(s, s->mb_y * 16, 16); s->first_slice_line = 0; } @@ -5383,15 +5383,15 @@ static void vc1_draw_sprites(VC1Context *v, SpriteData* sd) v->sprite_output_frame->linesize[plane] * row; for (sprite = 0; sprite <= v->two_sprites; sprite++) { - uint8_t *iplane = s->current_picture.f.data[plane]; - int iline = s->current_picture.f.linesize[plane]; + uint8_t *iplane = s->current_picture.f->data[plane]; + int iline = s->current_picture.f->linesize[plane]; int ycoord = yoff[sprite] + yadv[sprite] * row; int yline = ycoord >> 16; int next_line; ysub[sprite] = ycoord & 0xFFFF; if (sprite) { - iplane = s->last_picture.f.data[plane]; - iline = s->last_picture.f.linesize[plane]; + iplane = s->last_picture.f->data[plane]; + iline = s->last_picture.f->linesize[plane]; } next_line = FFMIN(yline + 1, (v->sprite_height >> !!plane) - 1) * iline; if (!(xoff[sprite] & 0xFFFF) && xadv[sprite] == 1 << 16) { @@ -5465,12 +5465,12 @@ static int vc1_decode_sprites(VC1Context *v, GetBitContext* gb) if (ret < 0) return ret; - if (!s->current_picture.f.data[0]) { + if (!s->current_picture.f->data[0]) { av_log(avctx, AV_LOG_ERROR, "Got no sprites\n"); return -1; } - if (v->two_sprites && (!s->last_picture_ptr || !s->last_picture.f.data[0])) { + if (v->two_sprites && (!s->last_picture_ptr || !s->last_picture.f->data[0])) { av_log(avctx, AV_LOG_WARNING, "Need two sprites, only got one\n"); v->two_sprites = 0; } @@ -5488,7 +5488,7 @@ static void vc1_sprite_flush(AVCodecContext *avctx) { VC1Context *v = avctx->priv_data; MpegEncContext *s = &v->s; - AVFrame *f = &s->current_picture.f; + AVFrame *f = s->current_picture.f; int plane, i; /* Windows Media Image codecs have a convergence interval of two keyframes. @@ -5808,7 +5808,7 @@ static int vc1_decode_frame(AVCodecContext *avctx, void *data, if (buf_size == 0 || (buf_size == 4 && AV_RB32(buf) == VC1_CODE_ENDOFSEQ)) { /* special case for last picture */ if (s->low_delay == 0 && s->next_picture_ptr) { - if ((ret = av_frame_ref(pict, &s->next_picture_ptr->f)) < 0) + if ((ret = av_frame_ref(pict, s->next_picture_ptr->f)) < 0) return ret; s->next_picture_ptr = NULL; @@ -6000,8 +6000,8 @@ static int vc1_decode_frame(AVCodecContext *avctx, void *data, } // for skipping the frame - s->current_picture.f.pict_type = s->pict_type; - s->current_picture.f.key_frame = s->pict_type == AV_PICTURE_TYPE_I; + s->current_picture.f->pict_type = s->pict_type; + s->current_picture.f->key_frame = s->pict_type == AV_PICTURE_TYPE_I; /* skip B-frames if we don't have reference frames */ if (s->last_picture_ptr == NULL && (s->pict_type == AV_PICTURE_TYPE_B || s->droppable)) { @@ -6025,19 +6025,19 @@ static int vc1_decode_frame(AVCodecContext *avctx, void *data, } v->s.current_picture_ptr->field_picture = v->field_mode; - v->s.current_picture_ptr->f.interlaced_frame = (v->fcm != PROGRESSIVE); - v->s.current_picture_ptr->f.top_field_first = v->tff; + v->s.current_picture_ptr->f->interlaced_frame = (v->fcm != PROGRESSIVE); + v->s.current_picture_ptr->f->top_field_first = v->tff; // process pulldown flags - s->current_picture_ptr->f.repeat_pict = 0; + s->current_picture_ptr->f->repeat_pict = 0; // Pulldown flags are only valid when 'broadcast' has been set. // So ticks_per_frame will be 2 if (v->rff) { // repeat field - s->current_picture_ptr->f.repeat_pict = 1; + s->current_picture_ptr->f->repeat_pict = 1; } else if (v->rptfrm) { // repeat frames - s->current_picture_ptr->f.repeat_pict = v->rptfrm * 2; + s->current_picture_ptr->f->repeat_pict = v->rptfrm * 2; } s->me.qpel_put = s->dsp.put_qpel_pixels_tab; @@ -6071,7 +6071,7 @@ static int vc1_decode_frame(AVCodecContext *avctx, void *data, av_log(avctx, AV_LOG_ERROR, "parsing header for second field failed"); goto err; } - v->s.current_picture_ptr->f.pict_type = v->s.pict_type; + v->s.current_picture_ptr->f->pict_type = v->s.pict_type; if (avctx->hwaccel->start_frame(avctx, buf_start_second_field, (buf + buf_size) - buf_start_second_field) < 0) goto err; @@ -6096,9 +6096,9 @@ static int vc1_decode_frame(AVCodecContext *avctx, void *data, v->bits = buf_size * 8; v->end_mb_x = s->mb_width; if (v->field_mode) { - s->current_picture.f.linesize[0] <<= 1; - s->current_picture.f.linesize[1] <<= 1; - s->current_picture.f.linesize[2] <<= 1; + s->current_picture.f->linesize[0] <<= 1; + s->current_picture.f->linesize[1] <<= 1; + s->current_picture.f->linesize[2] <<= 1; s->linesize <<= 1; s->uvlinesize <<= 1; } @@ -6168,9 +6168,9 @@ static int vc1_decode_frame(AVCodecContext *avctx, void *data, } if (v->field_mode) { v->second_field = 0; - s->current_picture.f.linesize[0] >>= 1; - s->current_picture.f.linesize[1] >>= 1; - s->current_picture.f.linesize[2] >>= 1; + s->current_picture.f->linesize[0] >>= 1; + s->current_picture.f->linesize[1] >>= 1; + s->current_picture.f->linesize[2] >>= 1; s->linesize >>= 1; s->uvlinesize >>= 1; if (v->s.pict_type != AV_PICTURE_TYPE_BI && v->s.pict_type != AV_PICTURE_TYPE_B) { @@ -6205,12 +6205,12 @@ image: *got_frame = 1; } else { if (s->pict_type == AV_PICTURE_TYPE_B || s->low_delay) { - if ((ret = av_frame_ref(pict, &s->current_picture_ptr->f)) < 0) + if ((ret = av_frame_ref(pict, s->current_picture_ptr->f)) < 0) goto err; ff_print_debug_info(s, s->current_picture_ptr, pict); *got_frame = 1; } else if (s->last_picture_ptr != NULL) { - if ((ret = av_frame_ref(pict, &s->last_picture_ptr->f)) < 0) + if ((ret = av_frame_ref(pict, s->last_picture_ptr->f)) < 0) goto err; ff_print_debug_info(s, s->last_picture_ptr, pict); *got_frame = 1; |