diff options
author | Michael Niedermayer <michaelni@gmx.at> | 2013-05-16 17:00:10 +0200 |
---|---|---|
committer | Michael Niedermayer <michaelni@gmx.at> | 2013-05-16 17:03:52 +0200 |
commit | af00d68af0f0bb18be0979a274e6e666360130f5 (patch) | |
tree | 71cda577c3b564fc02e064ce2cfd39be2d361289 | |
parent | df003cbb565575fe539b06105b76a95db72e8720 (diff) | |
parent | 3062ac4c47b18305aa55788133b4c8aaded12f11 (diff) | |
download | ffmpeg-af00d68af0f0bb18be0979a274e6e666360130f5.tar.gz |
Merge remote-tracking branch 'qatar/master'
* qatar/master:
vf_pad: use the name 's' for the pointer to the private context
vf_overlay: use the name 's' for the pointer to the private context
vf_lut: use the name 's' for the pointer to the private context
Conflicts:
libavfilter/vf_lut.c
libavfilter/vf_overlay.c
libavfilter/vf_pad.c
Merged-by: Michael Niedermayer <michaelni@gmx.at>
-rw-r--r-- | libavfilter/vf_lut.c | 108 | ||||
-rw-r--r-- | libavfilter/vf_overlay.c | 208 | ||||
-rw-r--r-- | libavfilter/vf_pad.c | 138 |
3 files changed, 227 insertions, 227 deletions
diff --git a/libavfilter/vf_lut.c b/libavfilter/vf_lut.c index f57c2845e6..7a6da5ec11 100644 --- a/libavfilter/vf_lut.c +++ b/libavfilter/vf_lut.c @@ -97,13 +97,13 @@ static const AVOption options[] = { static av_cold void uninit(AVFilterContext *ctx) { - LutContext *lut = ctx->priv; + LutContext *s = ctx->priv; int i; for (i = 0; i < 4; i++) { - av_expr_free(lut->comp_expr[i]); - lut->comp_expr[i] = NULL; - av_freep(&lut->comp_expr_str[i]); + av_expr_free(s->comp_expr[i]); + s->comp_expr[i] = NULL; + av_freep(&s->comp_expr_str[i]); } } @@ -125,10 +125,10 @@ static const enum AVPixelFormat all_pix_fmts[] = { RGB_FORMATS, YUV_FORMATS, AV_ static int query_formats(AVFilterContext *ctx) { - LutContext *lut = ctx->priv; + LutContext *s = ctx->priv; - const enum AVPixelFormat *pix_fmts = lut->is_rgb ? rgb_pix_fmts : - lut->is_yuv ? yuv_pix_fmts : all_pix_fmts; + const enum AVPixelFormat *pix_fmts = s->is_rgb ? rgb_pix_fmts : + s->is_yuv ? yuv_pix_fmts : all_pix_fmts; ff_set_common_formats(ctx, ff_make_format_list(pix_fmts)); return 0; @@ -139,9 +139,9 @@ static int query_formats(AVFilterContext *ctx) */ static double clip(void *opaque, double val) { - LutContext *lut = opaque; - double minval = lut->var_values[VAR_MINVAL]; - double maxval = lut->var_values[VAR_MAXVAL]; + LutContext *s = opaque; + double minval = s->var_values[VAR_MINVAL]; + double maxval = s->var_values[VAR_MAXVAL]; return av_clip(val, minval, maxval); } @@ -152,10 +152,10 @@ static double clip(void *opaque, double val) */ static double compute_gammaval(void *opaque, double gamma) { - LutContext *lut = opaque; - double val = lut->var_values[VAR_CLIPVAL]; - double minval = lut->var_values[VAR_MINVAL]; - double maxval = lut->var_values[VAR_MAXVAL]; + LutContext *s = opaque; + double val = s->var_values[VAR_CLIPVAL]; + double minval = s->var_values[VAR_MINVAL]; + double maxval = s->var_values[VAR_MAXVAL]; return pow((val-minval)/(maxval-minval), gamma) * (maxval-minval)+minval; } @@ -175,17 +175,17 @@ static const char * const funcs1_names[] = { static int config_props(AVFilterLink *inlink) { AVFilterContext *ctx = inlink->dst; - LutContext *lut = ctx->priv; + LutContext *s = ctx->priv; const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(inlink->format); uint8_t rgba_map[4]; /* component index -> RGBA color index map */ int min[4], max[4]; int val, color, ret; - lut->hsub = desc->log2_chroma_w; - lut->vsub = desc->log2_chroma_h; + s->hsub = desc->log2_chroma_w; + s->vsub = desc->log2_chroma_h; - lut->var_values[VAR_W] = inlink->w; - lut->var_values[VAR_H] = inlink->h; + s->var_values[VAR_W] = inlink->w; + s->var_values[VAR_H] = inlink->h; switch (inlink->format) { case AV_PIX_FMT_YUV410P: @@ -207,49 +207,49 @@ static int config_props(AVFilterLink *inlink) max[0] = max[1] = max[2] = max[3] = 255; } - lut->is_yuv = lut->is_rgb = 0; - if (ff_fmt_is_in(inlink->format, yuv_pix_fmts)) lut->is_yuv = 1; - else if (ff_fmt_is_in(inlink->format, rgb_pix_fmts)) lut->is_rgb = 1; + s->is_yuv = s->is_rgb = 0; + if (ff_fmt_is_in(inlink->format, yuv_pix_fmts)) s->is_yuv = 1; + else if (ff_fmt_is_in(inlink->format, rgb_pix_fmts)) s->is_rgb = 1; - if (lut->is_rgb) { + if (s->is_rgb) { ff_fill_rgba_map(rgba_map, inlink->format); - lut->step = av_get_bits_per_pixel(desc) >> 3; + s->step = av_get_bits_per_pixel(desc) >> 3; } for (color = 0; color < desc->nb_components; color++) { double res; - int comp = lut->is_rgb ? rgba_map[color] : color; + int comp = s->is_rgb ? rgba_map[color] : color; /* create the parsed expression */ - ret = av_expr_parse(&lut->comp_expr[color], lut->comp_expr_str[color], + ret = av_expr_parse(&s->comp_expr[color], s->comp_expr_str[color], var_names, funcs1_names, funcs1, NULL, NULL, 0, ctx); if (ret < 0) { av_log(ctx, AV_LOG_ERROR, "Error when parsing the expression '%s' for the component %d and color %d.\n", - lut->comp_expr_str[comp], comp, color); + s->comp_expr_str[comp], comp, color); return AVERROR(EINVAL); } /* compute the lut */ - lut->var_values[VAR_MAXVAL] = max[color]; - lut->var_values[VAR_MINVAL] = min[color]; + s->var_values[VAR_MAXVAL] = max[color]; + s->var_values[VAR_MINVAL] = min[color]; for (val = 0; val < 256; val++) { - lut->var_values[VAR_VAL] = val; - lut->var_values[VAR_CLIPVAL] = av_clip(val, min[color], max[color]); - lut->var_values[VAR_NEGVAL] = - av_clip(min[color] + max[color] - lut->var_values[VAR_VAL], + s->var_values[VAR_VAL] = val; + s->var_values[VAR_CLIPVAL] = av_clip(val, min[color], max[color]); + s->var_values[VAR_NEGVAL] = + av_clip(min[color] + max[color] - s->var_values[VAR_VAL], min[color], max[color]); - res = av_expr_eval(lut->comp_expr[color], lut->var_values, lut); + res = av_expr_eval(s->comp_expr[color], s->var_values, s); if (isnan(res)) { av_log(ctx, AV_LOG_ERROR, "Error when evaluating the expression '%s' for the value %d for the component %d.\n", - lut->comp_expr_str[color], val, comp); + s->comp_expr_str[color], val, comp); return AVERROR(EINVAL); } - lut->lut[comp][val] = av_clip((int)res, min[color], max[color]); - av_log(ctx, AV_LOG_DEBUG, "val[%d][%d] = %d\n", comp, val, lut->lut[comp][val]); + s->lut[comp][val] = av_clip((int)res, min[color], max[color]); + av_log(ctx, AV_LOG_DEBUG, "val[%d][%d] = %d\n", comp, val, s->lut[comp][val]); } } @@ -259,7 +259,7 @@ static int config_props(AVFilterLink *inlink) static int filter_frame(AVFilterLink *inlink, AVFrame *in) { AVFilterContext *ctx = inlink->dst; - LutContext *lut = ctx->priv; + LutContext *s = ctx->priv; AVFilterLink *outlink = ctx->outputs[0]; AVFrame *out; uint8_t *inrow, *outrow, *inrow0, *outrow0; @@ -277,25 +277,25 @@ static int filter_frame(AVFilterLink *inlink, AVFrame *in) av_frame_copy_props(out, in); } - if (lut->is_rgb) { + if (s->is_rgb) { /* packed */ inrow0 = in ->data[0]; outrow0 = out->data[0]; for (i = 0; i < in->height; i ++) { int w = inlink->w; - const uint8_t (*tab)[256] = (const uint8_t (*)[256])lut->lut; + const uint8_t (*tab)[256] = (const uint8_t (*)[256])s->lut; inrow = inrow0; outrow = outrow0; for (j = 0; j < w; j++) { - switch (lut->step) { + switch (s->step) { case 4: outrow[3] = tab[3][inrow[3]]; // Fall-through case 3: outrow[2] = tab[2][inrow[2]]; // Fall-through case 2: outrow[1] = tab[1][inrow[1]]; // Fall-through default: outrow[0] = tab[0][inrow[0]]; } - outrow += lut->step; - inrow += lut->step; + outrow += s->step; + inrow += s->step; } inrow0 += in ->linesize[0]; outrow0 += out->linesize[0]; @@ -303,8 +303,8 @@ static int filter_frame(AVFilterLink *inlink, AVFrame *in) } else { /* planar */ for (plane = 0; plane < 4 && in->data[plane]; plane++) { - int vsub = plane == 1 || plane == 2 ? lut->vsub : 0; - int hsub = plane == 1 || plane == 2 ? lut->hsub : 0; + int vsub = plane == 1 || plane == 2 ? s->vsub : 0; + int hsub = plane == 1 || plane == 2 ? s->hsub : 0; int h = FF_CEIL_RSHIFT(inlink->h, vsub); int w = FF_CEIL_RSHIFT(inlink->w, hsub); @@ -312,7 +312,7 @@ static int filter_frame(AVFilterLink *inlink, AVFrame *in) outrow = out->data[plane]; for (i = 0; i < h; i++) { - const uint8_t *tab = lut->lut[plane]; + const uint8_t *tab = s->lut[plane]; for (j = 0; j < w; j++) outrow[j] = tab[inrow[j]]; inrow += in ->linesize[plane]; @@ -377,9 +377,9 @@ AVFILTER_DEFINE_CLASS(lutyuv); static av_cold int lutyuv_init(AVFilterContext *ctx) { - LutContext *lut = ctx->priv; + LutContext *s = ctx->priv; - lut->is_yuv = 1; + s->is_yuv = 1; return 0; } @@ -394,9 +394,9 @@ AVFILTER_DEFINE_CLASS(lutrgb); static av_cold int lutrgb_init(AVFilterContext *ctx) { - LutContext *lut = ctx->priv; + LutContext *s = ctx->priv; - lut->is_rgb = 1; + s->is_rgb = 1; return 0; } @@ -415,15 +415,15 @@ AVFILTER_DEFINE_CLASS(negate); static av_cold int negate_init(AVFilterContext *ctx) { - LutContext *lut = ctx->priv; + LutContext *s = ctx->priv; int i; - av_log(ctx, AV_LOG_DEBUG, "negate_alpha:%d\n", lut->negate_alpha); + av_log(ctx, AV_LOG_DEBUG, "negate_alpha:%d\n", s->negate_alpha); for (i = 0; i < 4; i++) { - lut->comp_expr_str[i] = av_strdup((i == 3 && !lut->negate_alpha) ? + s->comp_expr_str[i] = av_strdup((i == 3 && !s->negate_alpha) ? "val" : "negval"); - if (!lut->comp_expr_str[i]) { + if (!s->comp_expr_str[i]) { uninit(ctx); return AVERROR(ENOMEM); } diff --git a/libavfilter/vf_overlay.c b/libavfilter/vf_overlay.c index ec58dc4b66..478d6504fd 100644 --- a/libavfilter/vf_overlay.c +++ b/libavfilter/vf_overlay.c @@ -118,25 +118,25 @@ typedef struct { static av_cold int init(AVFilterContext *ctx) { - OverlayContext *over = ctx->priv; + OverlayContext *s = ctx->priv; - if (over->allow_packed_rgb) { + if (s->allow_packed_rgb) { av_log(ctx, AV_LOG_WARNING, "The rgb option is deprecated and is overriding the format option, use format instead\n"); - over->format = OVERLAY_FORMAT_RGB; + s->format = OVERLAY_FORMAT_RGB; } return 0; } static av_cold void uninit(AVFilterContext *ctx) { - OverlayContext *over = ctx->priv; + OverlayContext *s = ctx->priv; - av_frame_free(&over->overpicref); - ff_bufqueue_discard_all(&over->queue_main); - ff_bufqueue_discard_all(&over->queue_over); - av_expr_free(over->x_pexpr); over->x_pexpr = NULL; - av_expr_free(over->y_pexpr); over->y_pexpr = NULL; + av_frame_free(&s->overpicref); + ff_bufqueue_discard_all(&s->queue_main); + ff_bufqueue_discard_all(&s->queue_over); + av_expr_free(s->x_pexpr); s->x_pexpr = NULL; + av_expr_free(s->y_pexpr); s->y_pexpr = NULL; } static inline int normalize_xy(double d, int chroma_sub) @@ -148,13 +148,13 @@ static inline int normalize_xy(double d, int chroma_sub) static void eval_expr(AVFilterContext *ctx) { - OverlayContext *over = ctx->priv; + OverlayContext *s = ctx->priv; - over->var_values[VAR_X] = av_expr_eval(over->x_pexpr, over->var_values, NULL); - over->var_values[VAR_Y] = av_expr_eval(over->y_pexpr, over->var_values, NULL); - over->var_values[VAR_X] = av_expr_eval(over->x_pexpr, over->var_values, NULL); - over->x = normalize_xy(over->var_values[VAR_X], over->hsub); - over->y = normalize_xy(over->var_values[VAR_Y], over->vsub); + s->var_values[VAR_X] = av_expr_eval(s->x_pexpr, s->var_values, NULL); + s->var_values[VAR_Y] = av_expr_eval(s->y_pexpr, s->var_values, NULL); + s->var_values[VAR_X] = av_expr_eval(s->x_pexpr, s->var_values, NULL); + s->x = normalize_xy(s->var_values[VAR_X], s->hsub); + s->y = normalize_xy(s->var_values[VAR_Y], s->vsub); } static int set_expr(AVExpr **pexpr, const char *expr, const char *option, void *log_ctx) @@ -181,31 +181,31 @@ static int set_expr(AVExpr **pexpr, const char *expr, const char *option, void * static int process_command(AVFilterContext *ctx, const char *cmd, const char *args, char *res, int res_len, int flags) { - OverlayContext *over = ctx->priv; + OverlayContext *s = ctx->priv; int ret; if (!strcmp(cmd, "x")) - ret = set_expr(&over->x_pexpr, args, cmd, ctx); + ret = set_expr(&s->x_pexpr, args, cmd, ctx); else if (!strcmp(cmd, "y")) - ret = set_expr(&over->y_pexpr, args, cmd, ctx); + ret = set_expr(&s->y_pexpr, args, cmd, ctx); else ret = AVERROR(ENOSYS); if (ret < 0) return ret; - if (over->eval_mode == EVAL_MODE_INIT) { + if (s->eval_mode == EVAL_MODE_INIT) { eval_expr(ctx); av_log(ctx, AV_LOG_VERBOSE, "x:%f xi:%d y:%f yi:%d\n", - over->var_values[VAR_X], over->x, - over->var_values[VAR_Y], over->y); + s->var_values[VAR_X], s->x, + s->var_values[VAR_Y], s->y); } return ret; } static int query_formats(AVFilterContext *ctx) { - OverlayContext *over = ctx->priv; + OverlayContext *s = ctx->priv; /* overlay formats contains alpha, for avoiding conversion with alpha information loss */ static const enum AVPixelFormat main_pix_fmts_yuv420[] = { @@ -237,7 +237,7 @@ static int query_formats(AVFilterContext *ctx) AVFilterFormats *main_formats; AVFilterFormats *overlay_formats; - switch (over->format) { + switch (s->format) { case OVERLAY_FORMAT_YUV420: main_formats = ff_make_format_list(main_pix_fmts_yuv420); overlay_formats = ff_make_format_list(overlay_pix_fmts_yuv420); @@ -269,56 +269,56 @@ static const enum AVPixelFormat alpha_pix_fmts[] = { static int config_input_main(AVFilterLink *inlink) { - OverlayContext *over = inlink->dst->priv; + OverlayContext *s = inlink->dst->priv; const AVPixFmtDescriptor *pix_desc = av_pix_fmt_desc_get(inlink->format); - av_image_fill_max_pixsteps(over->main_pix_step, NULL, pix_desc); + av_image_fill_max_pixsteps(s->main_pix_step, NULL, pix_desc); - over->hsub = pix_desc->log2_chroma_w; - over->vsub = pix_desc->log2_chroma_h; + s->hsub = pix_desc->log2_chroma_w; + s->vsub = pix_desc->log2_chroma_h; - over->main_is_packed_rgb = - ff_fill_rgba_map(over->main_rgba_map, inlink->format) >= 0; - over->main_has_alpha = ff_fmt_is_in(inlink->format, alpha_pix_fmts); + s->main_is_packed_rgb = + ff_fill_rgba_map(s->main_rgba_map, inlink->format) >= 0; + s->main_has_alpha = ff_fmt_is_in(inlink->format, alpha_pix_fmts); return 0; } static int config_input_overlay(AVFilterLink *inlink) { AVFilterContext *ctx = inlink->dst; - OverlayContext *over = inlink->dst->priv; + OverlayContext *s = inlink->dst->priv; int ret; const AVPixFmtDescriptor *pix_desc = av_pix_fmt_desc_get(inlink->format); - av_image_fill_max_pixsteps(over->overlay_pix_step, NULL, pix_desc); + av_image_fill_max_pixsteps(s->overlay_pix_step, NULL, pix_desc); /* Finish the configuration by evaluating the expressions now when both inputs are configured. */ - over->var_values[VAR_MAIN_W ] = over->var_values[VAR_MW] = ctx->inputs[MAIN ]->w; - over->var_values[VAR_MAIN_H ] = over->var_values[VAR_MH] = ctx->inputs[MAIN ]->h; - over->var_values[VAR_OVERLAY_W] = over->var_values[VAR_OW] = ctx->inputs[OVERLAY]->w; - over->var_values[VAR_OVERLAY_H] = over->var_values[VAR_OH] = ctx->inputs[OVERLAY]->h; - over->var_values[VAR_HSUB] = 1<<pix_desc->log2_chroma_w; - over->var_values[VAR_VSUB] = 1<<pix_desc->log2_chroma_h; - over->var_values[VAR_X] = NAN; - over->var_values[VAR_Y] = NAN; - over->var_values[VAR_N] = 0; - over->var_values[VAR_T] = NAN; - over->var_values[VAR_POS] = NAN; - - if ((ret = set_expr(&over->x_pexpr, over->x_expr, "x", ctx)) < 0 || - (ret = set_expr(&over->y_pexpr, over->y_expr, "y", ctx)) < 0) + s->var_values[VAR_MAIN_W ] = s->var_values[VAR_MW] = ctx->inputs[MAIN ]->w; + s->var_values[VAR_MAIN_H ] = s->var_values[VAR_MH] = ctx->inputs[MAIN ]->h; + s->var_values[VAR_OVERLAY_W] = s->var_values[VAR_OW] = ctx->inputs[OVERLAY]->w; + s->var_values[VAR_OVERLAY_H] = s->var_values[VAR_OH] = ctx->inputs[OVERLAY]->h; + s->var_values[VAR_HSUB] = 1<<pix_desc->log2_chroma_w; + s->var_values[VAR_VSUB] = 1<<pix_desc->log2_chroma_h; + s->var_values[VAR_X] = NAN; + s->var_values[VAR_Y] = NAN; + s->var_values[VAR_N] = 0; + s->var_values[VAR_T] = NAN; + s->var_values[VAR_POS] = NAN; + + if ((ret = set_expr(&s->x_pexpr, s->x_expr, "x", ctx)) < 0 || + (ret = set_expr(&s->y_pexpr, s->y_expr, "y", ctx)) < 0) return ret; - over->overlay_is_packed_rgb = - ff_fill_rgba_map(over->overlay_rgba_map, inlink->format) >= 0; - over->overlay_has_alpha = ff_fmt_is_in(inlink->format, alpha_pix_fmts); + s->overlay_is_packed_rgb = + ff_fill_rgba_map(s->overlay_rgba_map, inlink->format) >= 0; + s->overlay_has_alpha = ff_fmt_is_in(inlink->format, alpha_pix_fmts); - if (over->eval_mode == EVAL_MODE_INIT) { + if (s->eval_mode == EVAL_MODE_INIT) { eval_expr(ctx); av_log(ctx, AV_LOG_VERBOSE, "x:%f xi:%d y:%f yi:%d\n", - over->var_values[VAR_X], over->x, - over->var_values[VAR_Y], over->y); + s->var_values[VAR_X], s->x, + s->var_values[VAR_Y], s->y); } av_log(ctx, AV_LOG_VERBOSE, @@ -358,7 +358,7 @@ static void blend_image(AVFilterContext *ctx, AVFrame *dst, AVFrame *src, int x, int y) { - OverlayContext *over = ctx->priv; + OverlayContext *s = ctx->priv; int i, imax, j, jmax, k, kmax; const int src_w = src->width; const int src_h = src->height; @@ -369,19 +369,19 @@ static void blend_image(AVFilterContext *ctx, y >= dst_h || y+dst_h < 0) return; /* no intersection */ - if (over->main_is_packed_rgb) { + if (s->main_is_packed_rgb) { uint8_t alpha; ///< the amount of overlay to blend on to main - const int dr = over->main_rgba_map[R]; - const int dg = over->main_rgba_map[G]; - const int db = over->main_rgba_map[B]; - const int da = over->main_rgba_map[A]; - const int dstep = over->main_pix_step[0]; - const int sr = over->overlay_rgba_map[R]; - const int sg = over->overlay_rgba_map[G]; - const int sb = over->overlay_rgba_map[B]; - const int sa = over->overlay_rgba_map[A]; - const int sstep = over->overlay_pix_step[0]; - const int main_has_alpha = over->main_has_alpha; + const int dr = s->main_rgba_map[R]; + const int dg = s->main_rgba_map[G]; + const int db = s->main_rgba_map[B]; + const int da = s->main_rgba_map[A]; + const int dstep = s->main_pix_step[0]; + const int sr = s->overlay_rgba_map[R]; + const int sg = s->overlay_rgba_map[G]; + const int sb = s->overlay_rgba_map[B]; + const int sa = s->overlay_rgba_map[A]; + const int sstep = s->overlay_pix_step[0]; + const int main_has_alpha = s->main_has_alpha; uint8_t *s, *sp, *d, *dp; i = FFMAX(-y, 0); @@ -437,7 +437,7 @@ static void blend_image(AVFilterContext *ctx, sp += src->linesize[0]; } } else { - const int main_has_alpha = over->main_has_alpha; + const int main_has_alpha = s->main_has_alpha; if (main_has_alpha) { uint8_t alpha; ///< the amount of overlay to blend on to main uint8_t *s, *sa, *d, *da; @@ -475,8 +475,8 @@ static void blend_image(AVFilterContext *ctx, } } for (i = 0; i < 3; i++) { - int hsub = i ? over->hsub : 0; - int vsub = i ? over->vsub : 0; + int hsub = i ? s->hsub : 0; + int vsub = i ? s->vsub : 0; int src_wp = FF_CEIL_RSHIFT(src_w, hsub); int src_hp = FF_CEIL_RSHIFT(src_h, vsub); int dst_wp = FF_CEIL_RSHIFT(dst_w, hsub); @@ -544,7 +544,7 @@ static void blend_image(AVFilterContext *ctx, static int try_filter_frame(AVFilterContext *ctx, AVFrame *mainpic) { - OverlayContext *over = ctx->priv; + OverlayContext *s = ctx->priv; AVFilterLink *inlink = ctx->inputs[0]; AVFrame *next_overpic; int ret; @@ -552,23 +552,23 @@ static int try_filter_frame(AVFilterContext *ctx, AVFrame *mainpic) /* Discard obsolete overlay frames: if there is a next overlay frame with pts * before the main frame, we can drop the current overlay. */ while (1) { - next_overpic = ff_bufqueue_peek(&over->queue_over, 0); - if (!next_overpic && over->overlay_eof && !over->repeatlast) { - av_frame_free(&over->overpicref); + next_overpic = ff_bufqueue_peek(&s->queue_over, 0); + if (!next_overpic && s->overlay_eof && !s->repeatlast) { + av_frame_free(&s->overpicref); break; } if (!next_overpic || av_compare_ts(next_overpic->pts, ctx->inputs[OVERLAY]->time_base, mainpic->pts , ctx->inputs[MAIN]->time_base) > 0) break; - ff_bufqueue_get(&over->queue_over); - av_frame_free(&over->overpicref); - over->overpicref = next_overpic; + ff_bufqueue_get(&s->queue_over); + av_frame_free(&s->overpicref); + s->overpicref = next_overpic; } /* If there is no next frame and no EOF and the overlay frame is before * the main frame, we can not know yet if it will be superseded. */ - if (!over->queue_over.available && !over->overlay_eof && - (!over->overpicref || av_compare_ts(over->overpicref->pts, ctx->inputs[OVERLAY]->time_base, + if (!s->queue_over.available && !s->overlay_eof && + (!s->overpicref || av_compare_ts(s->overpicref->pts, ctx->inputs[OVERLAY]->time_base, mainpic->pts , ctx->inputs[MAIN]->time_base) < 0)) return AVERROR(EAGAIN); @@ -576,47 +576,47 @@ static int try_filter_frame(AVFilterContext *ctx, AVFrame *mainpic) * time of the main frame. */ av_dlog(ctx, "main_pts:%s main_pts_time:%s", av_ts2str(mainpic->pts), av_ts2timestr(mainpic->pts, &ctx->inputs[MAIN]->time_base)); - if (over->overpicref) + if (s->overpicref) av_dlog(ctx, " over_pts:%s over_pts_time:%s", - av_ts2str(over->overpicref->pts), av_ts2timestr(over->overpicref->pts, &ctx->inputs[OVERLAY]->time_base)); + av_ts2str(s->overpicref->pts), av_ts2timestr(s->overpicref->pts, &ctx->inputs[OVERLAY]->time_base)); av_dlog(ctx, "\n"); - if (over->overpicref) { - if (over->eval_mode == EVAL_MODE_FRAME) { + if (s->overpicref) { + if (s->eval_mode == EVAL_MODE_FRAME) { int64_t pos = av_frame_get_pkt_pos(mainpic); - over->var_values[VAR_N] = inlink->frame_count; - over->var_values[VAR_T] = mainpic->pts == AV_NOPTS_VALUE ? + s->var_values[VAR_N] = inlink->frame_count; + s->var_values[VAR_T] = mainpic->pts == AV_NOPTS_VALUE ? NAN : mainpic->pts * av_q2d(inlink->time_base); - over->var_values[VAR_POS] = pos == -1 ? NAN : pos; + s->var_values[VAR_POS] = pos == -1 ? NAN : pos; eval_expr(ctx); av_log(ctx, AV_LOG_DEBUG, "n:%f t:%f pos:%f x:%f xi:%d y:%f yi:%d\n", - over->var_values[VAR_N], over->var_values[VAR_T], over->var_values[VAR_POS], - over->var_values[VAR_X], over->x, - over->var_values[VAR_Y], over->y); + s->var_values[VAR_N], s->var_values[VAR_T], s->var_values[VAR_POS], + s->var_values[VAR_X], s->x, + s->var_values[VAR_Y], s->y); } if (!ctx->is_disabled) - blend_image(ctx, mainpic, over->overpicref, over->x, over->y); + blend_image(ctx, mainpic, s->overpicref, s->x, s->y); } ret = ff_filter_frame(ctx->outputs[0], mainpic); av_assert1(ret != AVERROR(EAGAIN)); - over->frame_requested = 0; + s->frame_requested = 0; return ret; } static int try_filter_next_frame(AVFilterContext *ctx) { - OverlayContext *over = ctx->priv; - AVFrame *next_mainpic = ff_bufqueue_peek(&over->queue_main, 0); + OverlayContext *s = ctx->priv; + AVFrame *next_mainpic = ff_bufqueue_peek(&s->queue_main, 0); int ret; if (!next_mainpic) return AVERROR(EAGAIN); if ((ret = try_filter_frame(ctx, next_mainpic)) == AVERROR(EAGAIN)) return ret; - ff_bufqueue_get(&over->queue_main); + ff_bufqueue_get(&s->queue_main); return ret; } @@ -631,7 +631,7 @@ static int flush_frames(AVFilterContext *ctx) static int filter_frame_main(AVFilterLink *inlink, AVFrame *inpicref) { AVFilterContext *ctx = inlink->dst; - OverlayContext *over = ctx->priv; + OverlayContext *s = ctx->priv; int ret; if ((ret = flush_frames(ctx)) < 0) @@ -639,10 +639,10 @@ static int filter_frame_main(AVFilterLink *inlink, AVFrame *inpicref) if ((ret = try_filter_frame(ctx, inpicref)) < 0) { if (ret != AVERROR(EAGAIN)) return ret; - ff_bufqueue_add(ctx, &over->queue_main, inpicref); + ff_bufqueue_add(ctx, &s->queue_main, inpicref); } - if (!over->overpicref) + if (!s->overpicref) return 0; flush_frames(ctx); @@ -652,12 +652,12 @@ static int filter_frame_main(AVFilterLink *inlink, AVFrame *inpicref) static int filter_frame_over(AVFilterLink *inlink, AVFrame *inpicref) { AVFilterContext *ctx = inlink->dst; - OverlayContext *over = ctx->priv; + OverlayContext *s = ctx->priv; int ret; if ((ret = flush_frames(ctx)) < 0) return ret; - ff_bufqueue_add(ctx, &over->queue_over, inpicref); + ff_bufqueue_add(ctx, &s->queue_over, inpicref); ret = try_filter_next_frame(ctx); return ret == AVERROR(EAGAIN) ? 0 : ret; } @@ -665,22 +665,22 @@ static int filter_frame_over(AVFilterLink *inlink, AVFrame *inpicref) static int request_frame(AVFilterLink *outlink) { AVFilterContext *ctx = outlink->src; - OverlayContext *over = ctx->priv; + OverlayContext *s = ctx->priv; int input, ret; if (!try_filter_next_frame(ctx)) return 0; - over->frame_requested = 1; - while (over->frame_requested) { + s->frame_requested = 1; + while (s->frame_requested) { /* TODO if we had a frame duration, we could guess more accurately */ - input = !over->overlay_eof && (over->queue_main.available || - over->queue_over.available < 2) ? + input = !s->overlay_eof && (s->queue_main.available || + s->queue_over.available < 2) ? OVERLAY : MAIN; ret = ff_request_frame(ctx->inputs[input]); /* EOF on main is reported immediately */ if (ret == AVERROR_EOF && input == OVERLAY) { - over->overlay_eof = 1; - if (over->shortest) + s->overlay_eof = 1; + if (s->shortest) return ret; if ((ret = try_filter_next_frame(ctx)) != AVERROR(EAGAIN)) return ret; diff --git a/libavfilter/vf_pad.c b/libavfilter/vf_pad.c index 34076799a6..0a2ab33958 100644 --- a/libavfilter/vf_pad.c +++ b/libavfilter/vf_pad.c @@ -96,9 +96,9 @@ typedef struct { static av_cold int init(AVFilterContext *ctx) { - PadContext *pad = ctx->priv; + PadContext *s = ctx->priv; - if (av_parse_color(pad->rgba_color, pad->color_str, -1, ctx) < 0) + if (av_parse_color(s->rgba_color, s->color_str, -1, ctx) < 0) return AVERROR(EINVAL); return 0; @@ -107,13 +107,13 @@ static av_cold int init(AVFilterContext *ctx) static int config_input(AVFilterLink *inlink) { AVFilterContext *ctx = inlink->dst; - PadContext *pad = ctx->priv; + PadContext *s = ctx->priv; int ret; double var_values[VARS_NB], res; char *expr; - ff_draw_init(&pad->draw, inlink->format, 0); - ff_draw_color(&pad->draw, &pad->color, pad->rgba_color); + ff_draw_init(&s->draw, inlink->format, 0); + ff_draw_color(&s->draw, &s->color, s->rgba_color); var_values[VAR_IN_W] = var_values[VAR_IW] = inlink->w; var_values[VAR_IN_H] = var_values[VAR_IH] = inlink->h; @@ -123,72 +123,72 @@ static int config_input(AVFilterLink *inlink) var_values[VAR_SAR] = inlink->sample_aspect_ratio.num ? (double) inlink->sample_aspect_ratio.num / inlink->sample_aspect_ratio.den : 1; var_values[VAR_DAR] = var_values[VAR_A] * var_values[VAR_SAR]; - var_values[VAR_HSUB] = 1 << pad->draw.hsub_max; - var_values[VAR_VSUB] = 1 << pad->draw.vsub_max; + var_values[VAR_HSUB] = 1 << s->draw.hsub_max; + var_values[VAR_VSUB] = 1 << s->draw.vsub_max; /* evaluate width and height */ - av_expr_parse_and_eval(&res, (expr = pad->w_expr), + av_expr_parse_and_eval(&res, (expr = s->w_expr), var_names, var_values, NULL, NULL, NULL, NULL, NULL, 0, ctx); - pad->w = var_values[VAR_OUT_W] = var_values[VAR_OW] = res; - if ((ret = av_expr_parse_and_eval(&res, (expr = pad->h_expr), + s->w = var_values[VAR_OUT_W] = var_values[VAR_OW] = res; + if ((ret = av_expr_parse_and_eval(&res, (expr = s->h_expr), var_names, var_values, NULL, NULL, NULL, NULL, NULL, 0, ctx)) < 0) goto eval_fail; - pad->h = var_values[VAR_OUT_H] = var_values[VAR_OH] = res; + s->h = var_values[VAR_OUT_H] = var_values[VAR_OH] = res; /* evaluate the width again, as it may depend on the evaluated output height */ - if ((ret = av_expr_parse_and_eval(&res, (expr = pad->w_expr), + if ((ret = av_expr_parse_and_eval(&res, (expr = s->w_expr), var_names, var_values, NULL, NULL, NULL, NULL, NULL, 0, ctx)) < 0) goto eval_fail; - pad->w = var_values[VAR_OUT_W] = var_values[VAR_OW] = res; + s->w = var_values[VAR_OUT_W] = var_values[VAR_OW] = res; /* evaluate x and y */ - av_expr_parse_and_eval(&res, (expr = pad->x_expr), + av_expr_parse_and_eval(&res, (expr = s->x_expr), var_names, var_values, NULL, NULL, NULL, NULL, NULL, 0, ctx); - pad->x = var_values[VAR_X] = res; - if ((ret = av_expr_parse_and_eval(&res, (expr = pad->y_expr), + s->x = var_values[VAR_X] = res; + if ((ret = av_expr_parse_and_eval(&res, (expr = s->y_expr), var_names, var_values, NULL, NULL, NULL, NULL, NULL, 0, ctx)) < 0) goto eval_fail; - pad->y = var_values[VAR_Y] = res; + s->y = var_values[VAR_Y] = res; /* evaluate x again, as it may depend on the evaluated y value */ - if ((ret = av_expr_parse_and_eval(&res, (expr = pad->x_expr), + if ((ret = av_expr_parse_and_eval(&res, (expr = s->x_expr), var_names, var_values, NULL, NULL, NULL, NULL, NULL, 0, ctx)) < 0) goto eval_fail; - pad->x = var_values[VAR_X] = res; + s->x = var_values[VAR_X] = res; /* sanity check params */ - if (pad->w < 0 || pad->h < 0 || pad->x < 0 || pad->y < 0) { + if (s->w < 0 || s->h < 0 || s->x < 0 || s->y < 0) { av_log(ctx, AV_LOG_ERROR, "Negative values are not acceptable.\n"); return AVERROR(EINVAL); } - if (!pad->w) - pad->w = inlink->w; - if (!pad->h) - pad->h = inlink->h; + if (!s->w) + s->w = inlink->w; + if (!s->h) + s->h = inlink->h; - pad->w = ff_draw_round_to_sub(&pad->draw, 0, -1, pad->w); - pad->h = ff_draw_round_to_sub(&pad->draw, 1, -1, pad->h); - pad->x = ff_draw_round_to_sub(&pad->draw, 0, -1, pad->x); - pad->y = ff_draw_round_to_sub(&pad->draw, 1, -1, pad->y); - pad->in_w = ff_draw_round_to_sub(&pad->draw, 0, -1, inlink->w); - pad->in_h = ff_draw_round_to_sub(&pad->draw, 1, -1, inlink->h); + s->w = ff_draw_round_to_sub(&s->draw, 0, -1, s->w); + s->h = ff_draw_round_to_sub(&s->draw, 1, -1, s->h); + s->x = ff_draw_round_to_sub(&s->draw, 0, -1, s->x); + s->y = ff_draw_round_to_sub(&s->draw, 1, -1, s->y); + s->in_w = ff_draw_round_to_sub(&s->draw, 0, -1, inlink->w); + s->in_h = ff_draw_round_to_sub(&s->draw, 1, -1, inlink->h); av_log(ctx, AV_LOG_VERBOSE, "w:%d h:%d -> w:%d h:%d x:%d y:%d color:0x%02X%02X%02X%02X\n", - inlink->w, inlink->h, pad->w, pad->h, pad->x, pad->y, - pad->rgba_color[0], pad->rgba_color[1], pad->rgba_color[2], pad->rgba_color[3]); + inlink->w, inlink->h, s->w, s->h, s->x, s->y, + s->rgba_color[0], s->rgba_color[1], s->rgba_color[2], s->rgba_color[3]); - if (pad->x < 0 || pad->y < 0 || - pad->w <= 0 || pad->h <= 0 || - (unsigned)pad->x + (unsigned)inlink->w > pad->w || - (unsigned)pad->y + (unsigned)inlink->h > pad->h) { + if (s->x < 0 || s->y < 0 || + s->w <= 0 || s->h <= 0 || + (unsigned)s->x + (unsigned)inlink->w > s->w || + (unsigned)s->y + (unsigned)inlink->h > s->h) { av_log(ctx, AV_LOG_ERROR, "Input area %d:%d:%d:%d not within the padded area 0:0:%d:%d or zero-sized\n", - pad->x, pad->y, pad->x + inlink->w, pad->y + inlink->h, pad->w, pad->h); + s->x, s->y, s->x + inlink->w, s->y + inlink->h, s->w, s->h); return AVERROR(EINVAL); } @@ -203,20 +203,20 @@ eval_fail: static int config_output(AVFilterLink *outlink) { - PadContext *pad = outlink->src->priv; + PadContext *s = outlink->src->priv; - outlink->w = pad->w; - outlink->h = pad->h; + outlink->w = s->w; + outlink->h = s->h; return 0; } static AVFrame *get_video_buffer(AVFilterLink *inlink, int w, int h) { - PadContext *pad = inlink->dst->priv; + PadContext *s = inlink->dst->priv; AVFrame *frame = ff_get_video_buffer(inlink->dst->outputs[0], - w + (pad->w - pad->in_w), - h + (pad->h - pad->in_h)); + w + (s->w - s->in_w), + h + (s->h - s->in_h)); int plane; if (!frame) @@ -226,10 +226,10 @@ static AVFrame *get_video_buffer(AVFilterLink *inlink, int w, int h) frame->height = h; for (plane = 0; plane < 4 && frame->data[plane]; plane++) { - int hsub = pad->draw.hsub[plane]; - int vsub = pad->draw.vsub[plane]; - frame->data[plane] += (pad->x >> hsub) * pad->draw.pixelstep[plane] + - (pad->y >> vsub) * frame->linesize[plane]; + int hsub = s->draw.hsub[plane]; + int vsub = s->draw.vsub[plane]; + frame->data[plane] += (s->x >> hsub) * s->draw.pixelstep[plane] + + (s->y >> vsub) * frame->linesize[plane]; } return frame; @@ -304,15 +304,15 @@ static int frame_needs_copy(PadContext *s, AVFrame *frame) static int filter_frame(AVFilterLink *inlink, AVFrame *in) { - PadContext *pad = inlink->dst->priv; + PadContext *s = inlink->dst->priv; AVFrame *out; - int needs_copy = frame_needs_copy(pad, in); + int needs_copy = frame_needs_copy(s, in); if (needs_copy) { av_log(inlink->dst, AV_LOG_DEBUG, "Direct padding impossible allocating new frame\n"); out = ff_get_video_buffer(inlink->dst->outputs[0], - FFMAX(inlink->w, pad->w), - FFMAX(inlink->h, pad->h)); + FFMAX(inlink->w, s->w), + FFMAX(inlink->h, s->h)); if (!out) { av_frame_free(&in); return AVERROR(ENOMEM); @@ -324,44 +324,44 @@ static int filter_frame(AVFilterLink *inlink, AVFrame *in) out = in; for (i = 0; i < 4 && out->data[i]; i++) { - int hsub = pad->draw.hsub[i]; - int vsub = pad->draw.vsub[i]; - out->data[i] -= (pad->x >> hsub) * pad->draw.pixelstep[i] + - (pad->y >> vsub) * out->linesize[i]; + int hsub = s->draw.hsub[i]; + int vsub = s->draw.vsub[i]; + out->data[i] -= (s->x >> hsub) * s->draw.pixelstep[i] + + (s->y >> vsub) * out->linesize[i]; } } /* top bar */ - if (pad->y) { - ff_fill_rectangle(&pad->draw, &pad->color, + if (s->y) { + ff_fill_rectangle(&s->draw, &s->color, out->data, out->linesize, - 0, 0, pad->w, pad->y); + 0, 0, s->w, s->y); } /* bottom bar */ - if (pad->h > pad->y + pad->in_h) { - ff_fill_rectangle(&pad->draw, &pad->color, + if (s->h > s->y + s->in_h) { + ff_fill_rectangle(&s->draw, &s->color, out->data, out->linesize, - 0, pad->y + pad->in_h, pad->w, pad->h - pad->y - pad->in_h); + 0, s->y + s->in_h, s->w, s->h - s->y - s->in_h); } /* left border */ - ff_fill_rectangle(&pad->draw, &pad->color, out->data, out->linesize, - 0, pad->y, pad->x, in->height); + ff_fill_rectangle(&s->draw, &s->color, out->data, out->linesize, + 0, s->y, s->x, in->height); if (needs_copy) { - ff_copy_rectangle2(&pad->draw, + ff_copy_rectangle2(&s->draw, out->data, out->linesize, in->data, in->linesize, - pad->x, pad->y, 0, 0, in->width, in->height); + s->x, s->y, 0, 0, in->width, in->height); } /* right border */ - ff_fill_rectangle(&pad->draw, &pad->color, out->data, out->linesize, - pad->x + pad->in_w, pad->y, pad->w - pad->x - pad->in_w, + ff_fill_rectangle(&s->draw, &s->color, out->data, out->linesize, + s->x + s->in_w, s->y, s->w - s->x - s->in_w, in->height); - out->width = pad->w; - out->height = pad->h; + out->width = s->w; + out->height = s->h; if (in != out) av_frame_free(&in); |