143 return (
int)d & ~((1 << chroma_sub) - 1);
169 "Error when evaluating the expression '%s' for %s\n",
180 char *res,
int res_len,
int flags)
185 if (!strcmp(cmd,
"x"))
187 else if (!strcmp(cmd,
"y"))
220 static const enum AVPixelFormat overlay_pix_fmts_yuv420[] = {
227 static const enum AVPixelFormat overlay_pix_fmts_yuv422[] = {
234 static const enum AVPixelFormat overlay_pix_fmts_yuv444[] = {
368 "main w:%d h:%d fmt:%s overlay w:%d h:%d fmt:%s\n",
394 #define FAST_DIV255(x) ((((x) + 128) * 257) >> 16) 400 #define UNPREMULTIPLY_ALPHA(x, y) ((((x) << 16) - ((x) << 9) + (x)) / ((((x) + (y)) << 8) - ((x) + (y)) - (y) * (x))) 412 int i, imax, j, jmax;
413 const int src_w = src->
width;
414 const int src_h = src->
height;
415 const int dst_w = dst->
width;
416 const int dst_h = dst->
height;
434 for (imax =
FFMIN(-y + dst_h, src_h); i < imax; i++) {
437 d = dp + (x+j) * dstep;
439 for (jmax =
FFMIN(-x + dst_w, src_w); j < jmax; j++) {
444 if (main_has_alpha && alpha != 0 && alpha != 255) {
460 d[dr] = is_straight ?
FAST_DIV255(d[dr] * (255 - alpha) + S[sr] * alpha) :
462 d[dg] = is_straight ?
FAST_DIV255(d[dg] * (255 - alpha) + S[sg] * alpha) :
464 d[db] = is_straight ?
FAST_DIV255(d[db] * (255 - alpha) + S[sb] * alpha) :
467 if (main_has_alpha) {
489 int src_w,
int src_h,
490 int dst_w,
int dst_h,
507 int jmax, j, k, kmax;
511 dp = dst->
data[dst_plane]
515 dap = dst->
data[3] + ((yp+j) << vsub) * dst->
linesize[3];
517 for (jmax =
FFMIN(-yp + dst_hp, src_hp); j < jmax; j++) {
519 d = dp + (xp+k) * dst_step;
522 da = dap + ((xp+k) << hsub);
524 for (kmax =
FFMIN(-xp + dst_wp, src_wp); k < kmax; k++) {
525 int alpha_v, alpha_h,
alpha;
528 if (hsub && vsub && j+1 < src_hp && k+1 < src_wp) {
529 alpha = (a[0] + a[src->
linesize[3]] +
531 }
else if (hsub || vsub) {
532 alpha_h = hsub && k+1 < src_wp ?
533 (a[0] + a[1]) >> 1 : a[0];
534 alpha_v = vsub && j+1 < src_hp ?
535 (a[0] + a[src->
linesize[3]]) >> 1 : a[0];
536 alpha = (alpha_v + alpha_h) >> 1;
541 if (main_has_alpha && alpha != 0 && alpha != 255) {
544 if (hsub && vsub && j+1 < src_hp && k+1 < src_wp) {
545 alpha_d = (da[0] + da[dst->
linesize[3]] +
546 da[1] + da[dst->
linesize[3]+1]) >> 2;
547 }
else if (hsub || vsub) {
548 alpha_h = hsub && k+1 < src_wp ?
549 (da[0] + da[1]) >> 1 : da[0];
550 alpha_v = vsub && j+1 < src_hp ?
551 (da[0] + da[dst->
linesize[3]]) >> 1 : da[0];
552 alpha_d = (alpha_v + alpha_h) >> 1;
561 *d = av_clip(
FAST_DIV255((*d - 128) * (255 - alpha)) + *s - 128, -128, 128) + 128;
573 dap += (1 << vsub) * dst->
linesize[3];
578 int src_w,
int src_h,
579 int dst_w,
int dst_h,
584 int i, imax, j, jmax;
590 for (imax =
FFMIN(-y + dst_h, src_h); i < imax; i++) {
595 for (jmax =
FFMIN(-x + dst_w, src_w); j < jmax; j++) {
597 if (alpha != 0 && alpha != 255) {
627 const int src_w = src->
width;
628 const int src_h = src->
height;
629 const int dst_w = dst->
width;
630 const int dst_h = dst->
height;
632 blend_plane(ctx, dst, src, src_w, src_h, dst_w, dst_h, 0, 0, 0, x, y, main_has_alpha,
634 blend_plane(ctx, dst, src, src_w, src_h, dst_w, dst_h, 1, hsub, vsub, x, y, main_has_alpha,
636 blend_plane(ctx, dst, src, src_w, src_h, dst_w, dst_h, 2, hsub, vsub, x, y, main_has_alpha,
651 const int src_w = src->
width;
652 const int src_h = src->
height;
653 const int dst_w = dst->
width;
654 const int dst_h = dst->
height;
656 blend_plane(ctx, dst, src, src_w, src_h, dst_w, dst_h, 0, 0, 0, x, y, main_has_alpha,
658 blend_plane(ctx, dst, src, src_w, src_h, dst_w, dst_h, 1, hsub, vsub, x, y, main_has_alpha,
660 blend_plane(ctx, dst, src, src_w, src_h, dst_w, dst_h, 2, hsub, vsub, x, y, main_has_alpha,
888 int64_t pos = mainpic->
pkt_pos;
907 if (s->
x < mainpic->
width && s->
x + second->
width >= 0 ||
927 #define OFFSET(x) offsetof(OverlayContext, x) 928 #define FLAGS AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM 933 {
"eof_action",
"Action to take when encountering EOF from secondary input ",
985 .preinit = overlay_framesync_preinit,
989 .priv_class = &overlay_class,
993 .
inputs = avfilter_vf_overlay_inputs,
994 .
outputs = avfilter_vf_overlay_outputs,
static int activate(AVFilterContext *ctx)
int plane
Which of the 4 planes contains the component.
static void blend_image_yuva420_pm(AVFilterContext *ctx, AVFrame *dst, const AVFrame *src, int x, int y)
static float alpha(float a)
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
This structure describes decoded (raw) audio or video data.
int64_t pkt_pos
reordered pos from the last AVPacket that has been input into the decoder
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Main libavfilter public API header.
packed RGB 8:8:8, 24bpp, RGBRGB...
int h
agreed upon image height
const AVPixFmtDescriptor * main_desc
format descriptor for main input
static const AVFilterPad avfilter_vf_overlay_inputs[]
static void blend_image_rgb_pm(AVFilterContext *ctx, AVFrame *dst, const AVFrame *src, int x, int y)
int ff_framesync_configure(FFFrameSync *fs)
Configure a frame sync structure.
int av_expr_parse(AVExpr **expr, const char *s, const char *const *const_names, const char *const *func1_names, double(*const *funcs1)(void *, double), const char *const *func2_names, double(*const *funcs2)(void *, double, double), int log_offset, void *log_ctx)
Parse an expression.
uint8_t log2_chroma_w
Amount to shift the luma width right to find the chroma width.
void av_image_fill_max_pixsteps(int max_pixsteps[4], int max_pixstep_comps[4], const AVPixFmtDescriptor *pixdesc)
Compute the max pixel step for each plane of an image with a format described by pixdesc.
const char * name
Pad name.
AVFilterContext * parent
Parent filter context.
AVFilterLink ** inputs
array of pointers to input links
static void blend_image_yuv422(AVFilterContext *ctx, AVFrame *dst, const AVFrame *src, int x, int y)
#define av_assert0(cond)
assert() equivalent, that is always enabled.
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
planar YUV 4:2:0, 20bpp, (1 Cr & Cb sample per 2x2 Y & A samples)
AVComponentDescriptor comp[4]
Parameters that describe how pixels are packed.
static void blend_image_yuv444(AVFilterContext *ctx, AVFrame *dst, const AVFrame *src, int x, int y)
timestamp utils, mostly useful for debugging/logging purposes
static const char *const var_names[]
int ff_framesync_init_dualinput(FFFrameSync *fs, AVFilterContext *parent)
Initialize a frame sync structure for dualinput.
double var_values[VAR_VARS_NB]
int ff_framesync_dualinput_get_writable(FFFrameSync *fs, AVFrame **f0, AVFrame **f1)
Same as ff_framesync_dualinput_get(), but make sure that f0 is writable.
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
packed ABGR 8:8:8:8, 32bpp, ABGRABGR...
uint8_t overlay_rgba_map[4]
static double av_q2d(AVRational a)
Convert an AVRational to a double.
planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV422P and setting col...
static void blend_image_gbrap(AVFilterContext *ctx, AVFrame *dst, const AVFrame *src, int x, int y)
#define AV_LOG_VERBOSE
Detailed information.
static void blend_image_yuva444(AVFilterContext *ctx, AVFrame *dst, const AVFrame *src, int x, int y)
static void blend_image_yuva422_pm(AVFilterContext *ctx, AVFrame *dst, const AVFrame *src, int x, int y)
A filter pad used for either input or output.
A link between two filters.
planar YUV 4:2:2 24bpp, (1 Cr & Cb sample per 2x1 Y & A samples)
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
#define UNPREMULTIPLY_ALPHA(x, y)
uint8_t log2_chroma_h
Amount to shift the luma height right to find the chroma height.
void ff_framesync_uninit(FFFrameSync *fs)
Free all memory currently allocated.
void(* blend_image)(AVFilterContext *ctx, AVFrame *dst, const AVFrame *src, int x, int y)
static void alpha_composite(const AVFrame *src, const AVFrame *dst, int src_w, int src_h, int dst_w, int dst_h, int x, int y)
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
packed BGRA 8:8:8:8, 32bpp, BGRABGRA...
static void blend_image_yuv420_pm(AVFilterContext *ctx, AVFrame *dst, const AVFrame *src, int x, int y)
void * priv
private data for use by the filter
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
AVRational time_base
Define the time base used by the PTS of the frames/samples which will pass through this link...
static int config_input_overlay(AVFilterLink *inlink)
int ff_framesync_activate(FFFrameSync *fs)
Examine the frames in the filter's input and try to produce output.
AVFilterFormats * in_formats
Lists of formats and channel layouts supported by the input and output filters respectively.
int(* on_event)(struct FFFrameSync *fs)
Callback called when a frame event is ready.
packed ARGB 8:8:8:8, 32bpp, ARGBARGB...
packed RGBA 8:8:8:8, 32bpp, RGBARGBA...
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
int w
agreed upon image width
as above, but U and V bytes are swapped
static void blend_image_yuv422_pm(AVFilterContext *ctx, AVFrame *dst, const AVFrame *src, int x, int y)
static const AVFilterPad avfilter_vf_overlay_outputs[]
static void blend_image_yuva444_pm(AVFilterContext *ctx, AVFrame *dst, const AVFrame *src, int x, int y)
planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P and setting col...
const char AVS_Value args
static av_always_inline void blend_image_planar_rgb(AVFilterContext *ctx, AVFrame *dst, const AVFrame *src, int hsub, int vsub, int main_has_alpha, int x, int y, int is_straight)
packed RGB 8:8:8, 24bpp, BGRBGR...
AVFilterContext * src
source filter
static av_cold int init(AVFilterContext *ctx)
static const AVFilterPad inputs[]
static void blend_image_yuva422(AVFilterContext *ctx, AVFrame *dst, const AVFrame *src, int x, int y)
int main_pix_step[4]
steps per pixel for each plane of the main output
static const AVFilterPad outputs[]
int format
agreed upon media format
int ff_fill_rgba_map(uint8_t *rgba_map, enum AVPixelFormat pix_fmt)
static const AVOption overlay_options[]
static void blend_image_rgba_pm(AVFilterContext *ctx, AVFrame *dst, const AVFrame *src, int x, int y)
typedef void(RENAME(mix_any_func_type))
void av_expr_free(AVExpr *e)
Free a parsed expression previously created with av_expr_parse().
static av_cold void uninit(AVFilterContext *ctx)
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
planar YUV 4:4:4 32bpp, (1 Cr & Cb sample per 1x1 Y & A samples)
static int set_expr(AVExpr **pexpr, const char *expr, const char *option, void *log_ctx)
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
int y
position of overlaid picture
static av_always_inline void blend_plane(AVFilterContext *ctx, AVFrame *dst, const AVFrame *src, int src_w, int src_h, int dst_w, int dst_h, int i, int hsub, int vsub, int x, int y, int main_has_alpha, int dst_plane, int dst_offset, int dst_step, int straight, int yuv)
Describe the class of an AVClass context structure.
static void blend_image_rgb(AVFilterContext *ctx, AVFrame *dst, const AVFrame *src, int x, int y)
uint8_t overlay_has_alpha
uint8_t overlay_is_packed_rgb
const char * name
Filter name.
static void blend_image_gbrp_pm(AVFilterContext *ctx, AVFrame *dst, const AVFrame *src, int x, int y)
static int query_formats(AVFilterContext *ctx)
#define AVFILTER_FLAG_SUPPORT_TIMELINE_INTERNAL
Same as AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC, except that the filter will have its filter_frame() c...
int overlay_pix_step[4]
steps per pixel for each plane of the overlay
AVFilterLink ** outputs
array of pointers to output links
static void blend_image_yuva420(AVFilterContext *ctx, AVFrame *dst, const AVFrame *src, int x, int y)
int offset
Number of elements before the component of the first pixel.
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
static void blend_image_rgba(AVFilterContext *ctx, AVFrame *dst, const AVFrame *src, int x, int y)
static int normalize_xy(double d, int chroma_sub)
static int config_input_main(AVFilterLink *inlink)
static av_always_inline void blend_image_yuv(AVFilterContext *ctx, AVFrame *dst, const AVFrame *src, int hsub, int vsub, int main_has_alpha, int x, int y, int is_straight)
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
common internal and external API header
planar GBRA 4:4:4:4 32bpp
static void blend_image_yuv420(AVFilterContext *ctx, AVFrame *dst, const AVFrame *src, int x, int y)
int vsub
chroma subsampling values
planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV444P and setting col...
double av_expr_eval(AVExpr *e, const double *const_values, void *opaque)
Evaluate a previously parsed expression.
AVFilterContext * dst
dest filter
static void blend_image_gbrap_pm(AVFilterContext *ctx, AVFrame *dst, const AVFrame *src, int x, int y)
static int config_output(AVFilterLink *outlink)
static void blend_image_yuv444_pm(AVFilterContext *ctx, AVFrame *dst, const AVFrame *src, int x, int y)
uint8_t main_is_packed_rgb
static int process_command(AVFilterContext *ctx, const char *cmd, const char *args, char *res, int res_len, int flags)
FRAMESYNC_DEFINE_CLASS(overlay, OverlayContext, fs)
static av_always_inline void blend_image_packed_rgb(AVFilterContext *ctx, AVFrame *dst, const AVFrame *src, int main_has_alpha, int x, int y, int is_straight)
Blend image in src to destination buffer dst at position (x, y).
static void eval_expr(AVFilterContext *ctx)
static int do_blend(FFFrameSync *fs)
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
AVPixelFormat
Pixel format.
static void blend_image_gbrp(AVFilterContext *ctx, AVFrame *dst, const AVFrame *src, int x, int y)
#define AV_NOPTS_VALUE
Undefined timestamp value.
static enum AVPixelFormat alpha_pix_fmts[]
AVFilterFormats * out_formats
int step
Number of elements between 2 horizontally consecutive pixels.
simple arithmetic expression evaluator
#define AV_CEIL_RSHIFT(a, b)