FFmpeg  4.0
vf_displace.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2013 Paul B Mahol
3  *
4  * This file is part of FFmpeg.
5  *
6  * FFmpeg is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Lesser General Public
8  * License as published by the Free Software Foundation; either
9  * version 2.1 of the License, or (at your option) any later version.
10  *
11  * FFmpeg is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14  * Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public
17  * License along with FFmpeg; if not, write to the Free Software
18  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19  */
20 
21 #include "libavutil/imgutils.h"
22 #include "libavutil/pixdesc.h"
23 #include "libavutil/opt.h"
24 #include "avfilter.h"
25 #include "formats.h"
26 #include "framesync.h"
27 #include "internal.h"
28 #include "video.h"
29 
30 enum EdgeMode {
36 };
37 
38 typedef struct DisplaceContext {
39  const AVClass *class;
40  int width[4], height[4];
41  enum EdgeMode edge;
42  int nb_planes;
44  int step;
47 
48  void (*displace)(struct DisplaceContext *s, const AVFrame *in,
49  const AVFrame *xpic, const AVFrame *ypic, AVFrame *out);
51 
52 #define OFFSET(x) offsetof(DisplaceContext, x)
53 #define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM
54 
55 static const AVOption displace_options[] = {
56  { "edge", "set edge mode", OFFSET(edge), AV_OPT_TYPE_INT, {.i64=EDGE_SMEAR}, 0, EDGE_NB-1, FLAGS, "edge" },
57  { "blank", "", 0, AV_OPT_TYPE_CONST, {.i64=EDGE_BLANK}, 0, 0, FLAGS, "edge" },
58  { "smear", "", 0, AV_OPT_TYPE_CONST, {.i64=EDGE_SMEAR}, 0, 0, FLAGS, "edge" },
59  { "wrap" , "", 0, AV_OPT_TYPE_CONST, {.i64=EDGE_WRAP}, 0, 0, FLAGS, "edge" },
60  { "mirror" , "", 0, AV_OPT_TYPE_CONST, {.i64=EDGE_MIRROR}, 0, 0, FLAGS, "edge" },
61  { NULL }
62 };
63 
65 
67 {
68  static const enum AVPixelFormat pix_fmts[] = {
79  };
80 
81  return ff_set_common_formats(ctx, ff_make_format_list(pix_fmts));
82 }
83 
85  const AVFrame *xpic, const AVFrame *ypic,
86  AVFrame *out)
87 {
88  int plane, x, y;
89 
90  for (plane = 0; plane < s->nb_planes; plane++) {
91  const int h = s->height[plane];
92  const int w = s->width[plane];
93  const int dlinesize = out->linesize[plane];
94  const int slinesize = in->linesize[plane];
95  const int xlinesize = xpic->linesize[plane];
96  const int ylinesize = ypic->linesize[plane];
97  const uint8_t *src = in->data[plane];
98  const uint8_t *ysrc = ypic->data[plane];
99  const uint8_t *xsrc = xpic->data[plane];
100  uint8_t *dst = out->data[plane];
101  const uint8_t blank = s->blank[plane];
102 
103  for (y = 0; y < h; y++) {
104  switch (s->edge) {
105  case EDGE_BLANK:
106  for (x = 0; x < w; x++) {
107  int Y = y + ysrc[x] - 128;
108  int X = x + xsrc[x] - 128;
109 
110  if (Y < 0 || Y >= h || X < 0 || X >= w)
111  dst[x] = blank;
112  else
113  dst[x] = src[Y * slinesize + X];
114  }
115  break;
116  case EDGE_SMEAR:
117  for (x = 0; x < w; x++) {
118  int Y = av_clip(y + ysrc[x] - 128, 0, h - 1);
119  int X = av_clip(x + xsrc[x] - 128, 0, w - 1);
120  dst[x] = src[Y * slinesize + X];
121  }
122  break;
123  case EDGE_WRAP:
124  for (x = 0; x < w; x++) {
125  int Y = (y + ysrc[x] - 128) % h;
126  int X = (x + xsrc[x] - 128) % w;
127 
128  if (Y < 0)
129  Y += h;
130  if (X < 0)
131  X += w;
132  dst[x] = src[Y * slinesize + X];
133  }
134  break;
135  case EDGE_MIRROR:
136  for (x = 0; x < w; x++) {
137  int Y = y + ysrc[x] - 128;
138  int X = x + xsrc[x] - 128;
139 
140  if (Y < 0)
141  Y = (-Y) % h;
142  if (X < 0)
143  X = (-X) % w;
144  if (Y >= h)
145  Y = h - (Y % h) - 1;
146  if (X >= w)
147  X = w - (X % w) - 1;
148  dst[x] = src[Y * slinesize + X];
149  }
150  break;
151  }
152 
153  ysrc += ylinesize;
154  xsrc += xlinesize;
155  dst += dlinesize;
156  }
157  }
158 }
159 
161  const AVFrame *xpic, const AVFrame *ypic,
162  AVFrame *out)
163 {
164  const int step = s->step;
165  const int h = s->height[0];
166  const int w = s->width[0];
167  const int dlinesize = out->linesize[0];
168  const int slinesize = in->linesize[0];
169  const int xlinesize = xpic->linesize[0];
170  const int ylinesize = ypic->linesize[0];
171  const uint8_t *src = in->data[0];
172  const uint8_t *ysrc = ypic->data[0];
173  const uint8_t *xsrc = xpic->data[0];
174  const uint8_t *blank = s->blank;
175  uint8_t *dst = out->data[0];
176  int c, x, y;
177 
178  for (y = 0; y < h; y++) {
179  switch (s->edge) {
180  case EDGE_BLANK:
181  for (x = 0; x < w; x++) {
182  for (c = 0; c < s->nb_components; c++) {
183  int Y = y + (ysrc[x * step + c] - 128);
184  int X = x + (xsrc[x * step + c] - 128);
185 
186  if (Y < 0 || Y >= h || X < 0 || X >= w)
187  dst[x * step + c] = blank[c];
188  else
189  dst[x * step + c] = src[Y * slinesize + X * step + c];
190  }
191  }
192  break;
193  case EDGE_SMEAR:
194  for (x = 0; x < w; x++) {
195  for (c = 0; c < s->nb_components; c++) {
196  int Y = av_clip(y + (ysrc[x * step + c] - 128), 0, h - 1);
197  int X = av_clip(x + (xsrc[x * step + c] - 128), 0, w - 1);
198 
199  dst[x * step + c] = src[Y * slinesize + X * step + c];
200  }
201  }
202  break;
203  case EDGE_WRAP:
204  for (x = 0; x < w; x++) {
205  for (c = 0; c < s->nb_components; c++) {
206  int Y = (y + (ysrc[x * step + c] - 128)) % h;
207  int X = (x + (xsrc[x * step + c] - 128)) % w;
208 
209  if (Y < 0)
210  Y += h;
211  if (X < 0)
212  X += w;
213  dst[x * step + c] = src[Y * slinesize + X * step + c];
214  }
215  }
216  break;
217  case EDGE_MIRROR:
218  for (x = 0; x < w; x++) {
219  for (c = 0; c < s->nb_components; c++) {
220  int Y = y + ysrc[x * step + c] - 128;
221  int X = x + xsrc[x * step + c] - 128;
222 
223  if (Y < 0)
224  Y = (-Y) % h;
225  if (X < 0)
226  X = (-X) % w;
227  if (Y >= h)
228  Y = h - (Y % h) - 1;
229  if (X >= w)
230  X = w - (X % w) - 1;
231  dst[x * step + c] = src[Y * slinesize + X * step + c];
232  }
233  }
234  break;
235  }
236 
237  ysrc += ylinesize;
238  xsrc += xlinesize;
239  dst += dlinesize;
240  }
241 }
242 
244 {
245  AVFilterContext *ctx = fs->parent;
246  DisplaceContext *s = fs->opaque;
247  AVFilterLink *outlink = ctx->outputs[0];
248  AVFrame *out, *in, *xpic, *ypic;
249  int ret;
250 
251  if ((ret = ff_framesync_get_frame(&s->fs, 0, &in, 0)) < 0 ||
252  (ret = ff_framesync_get_frame(&s->fs, 1, &xpic, 0)) < 0 ||
253  (ret = ff_framesync_get_frame(&s->fs, 2, &ypic, 0)) < 0)
254  return ret;
255 
256  if (ctx->is_disabled) {
257  out = av_frame_clone(in);
258  if (!out)
259  return AVERROR(ENOMEM);
260  } else {
261  out = ff_get_video_buffer(outlink, outlink->w, outlink->h);
262  if (!out)
263  return AVERROR(ENOMEM);
264  av_frame_copy_props(out, in);
265 
266  s->displace(s, in, xpic, ypic, out);
267  }
268  out->pts = av_rescale_q(in->pts, s->fs.time_base, outlink->time_base);
269 
270  return ff_filter_frame(outlink, out);
271 }
272 
273 static int config_input(AVFilterLink *inlink)
274 {
275  AVFilterContext *ctx = inlink->dst;
276  DisplaceContext *s = ctx->priv;
278  int vsub, hsub;
279 
281  s->nb_components = desc->nb_components;
282 
283  if (s->nb_planes > 1 || s->nb_components == 1)
285  else
287 
288  if (!(desc->flags & AV_PIX_FMT_FLAG_RGB)) {
289  s->blank[1] = s->blank[2] = 128;
290  s->blank[0] = 16;
291  }
292 
293  s->step = av_get_padded_bits_per_pixel(desc) >> 3;
294  hsub = desc->log2_chroma_w;
295  vsub = desc->log2_chroma_h;
296  s->height[1] = s->height[2] = AV_CEIL_RSHIFT(inlink->h, vsub);
297  s->height[0] = s->height[3] = inlink->h;
298  s->width[1] = s->width[2] = AV_CEIL_RSHIFT(inlink->w, hsub);
299  s->width[0] = s->width[3] = inlink->w;
300 
301  return 0;
302 }
303 
304 static int config_output(AVFilterLink *outlink)
305 {
306  AVFilterContext *ctx = outlink->src;
307  DisplaceContext *s = ctx->priv;
308  AVFilterLink *srclink = ctx->inputs[0];
309  AVFilterLink *xlink = ctx->inputs[1];
310  AVFilterLink *ylink = ctx->inputs[2];
311  FFFrameSyncIn *in;
312  int ret;
313 
314  if (srclink->format != xlink->format ||
315  srclink->format != ylink->format) {
316  av_log(ctx, AV_LOG_ERROR, "inputs must be of same pixel format\n");
317  return AVERROR(EINVAL);
318  }
319  if (srclink->w != xlink->w ||
320  srclink->h != xlink->h ||
321  srclink->w != ylink->w ||
322  srclink->h != ylink->h) {
323  av_log(ctx, AV_LOG_ERROR, "First input link %s parameters "
324  "(size %dx%d) do not match the corresponding "
325  "second input link %s parameters (%dx%d) "
326  "and/or third input link %s parameters (%dx%d)\n",
327  ctx->input_pads[0].name, srclink->w, srclink->h,
328  ctx->input_pads[1].name, xlink->w, xlink->h,
329  ctx->input_pads[2].name, ylink->w, ylink->h);
330  return AVERROR(EINVAL);
331  }
332 
333  outlink->w = srclink->w;
334  outlink->h = srclink->h;
335  outlink->time_base = srclink->time_base;
336  outlink->sample_aspect_ratio = srclink->sample_aspect_ratio;
337  outlink->frame_rate = srclink->frame_rate;
338 
339  ret = ff_framesync_init(&s->fs, ctx, 3);
340  if (ret < 0)
341  return ret;
342 
343  in = s->fs.in;
344  in[0].time_base = srclink->time_base;
345  in[1].time_base = xlink->time_base;
346  in[2].time_base = ylink->time_base;
347  in[0].sync = 2;
348  in[0].before = EXT_STOP;
349  in[0].after = EXT_STOP;
350  in[1].sync = 1;
351  in[1].before = EXT_NULL;
352  in[1].after = EXT_INFINITY;
353  in[2].sync = 1;
354  in[2].before = EXT_NULL;
355  in[2].after = EXT_INFINITY;
356  s->fs.opaque = s;
358 
359  return ff_framesync_configure(&s->fs);
360 }
361 
363 {
364  DisplaceContext *s = ctx->priv;
365  return ff_framesync_activate(&s->fs);
366 }
367 
369 {
370  DisplaceContext *s = ctx->priv;
371 
372  ff_framesync_uninit(&s->fs);
373 }
374 
375 static const AVFilterPad displace_inputs[] = {
376  {
377  .name = "source",
378  .type = AVMEDIA_TYPE_VIDEO,
379  .config_props = config_input,
380  },
381  {
382  .name = "xmap",
383  .type = AVMEDIA_TYPE_VIDEO,
384  },
385  {
386  .name = "ymap",
387  .type = AVMEDIA_TYPE_VIDEO,
388  },
389  { NULL }
390 };
391 
392 static const AVFilterPad displace_outputs[] = {
393  {
394  .name = "default",
395  .type = AVMEDIA_TYPE_VIDEO,
396  .config_props = config_output,
397  },
398  { NULL }
399 };
400 
402  .name = "displace",
403  .description = NULL_IF_CONFIG_SMALL("Displace pixels."),
404  .priv_size = sizeof(DisplaceContext),
405  .uninit = uninit,
407  .activate = activate,
408  .inputs = displace_inputs,
409  .outputs = displace_outputs,
410  .priv_class = &displace_class,
412 };
int plane
Definition: avisynth_c.h:422
#define NULL
Definition: coverity.c:32
const char * s
Definition: avisynth_c.h:768
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2363
This structure describes decoded (raw) audio or video data.
Definition: frame.h:218
AVOption.
Definition: opt.h:246
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:67
misc image utilities
int av_pix_fmt_count_planes(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2403
AVFilter ff_vf_displace
Definition: vf_displace.c:401
Main libavfilter public API header.
packed RGB 8:8:8, 24bpp, RGBRGB...
Definition: pixfmt.h:64
const char * desc
Definition: nvenc.c:65
static const AVOption displace_options[]
Definition: vf_displace.c:55
planar GBR 4:4:4 24bpp
Definition: pixfmt.h:164
int ff_framesync_configure(FFFrameSync *fs)
Configure a frame sync structure.
Definition: framesync.c:117
packed BGR 8:8:8, 32bpp, XBGRXBGR... X=unused/undefined
Definition: pixfmt.h:235
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:99
#define src
Definition: vp8dsp.c:254
int is_disabled
the enabled state from the last expression evaluation
Definition: avfilter.h:385
uint8_t log2_chroma_w
Amount to shift the luma width right to find the chroma width.
Definition: pixdesc.h:92
enum FFFrameSyncExtMode before
Extrapolation mode for timestamps before the first frame.
Definition: framesync.h:86
AVFilterFormats * ff_make_format_list(const int *fmts)
Create a list of supported formats.
Definition: formats.c:283
const char * name
Pad name.
Definition: internal.h:60
AVFilterContext * parent
Parent filter context.
Definition: framesync.h:152
AVFilterLink ** inputs
array of pointers to input links
Definition: avfilter.h:346
static int activate(AVFilterContext *ctx)
Definition: vf_displace.c:362
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1080
planar YUV 4:2:0, 20bpp, (1 Cr & Cb sample per 2x2 Y & A samples)
Definition: pixfmt.h:97
uint8_t
#define av_cold
Definition: attributes.h:82
packed RGB 8:8:8, 32bpp, RGBXRGBX... X=unused/undefined
Definition: pixfmt.h:234
AVOptions.
#define Y
Definition: vf_boxblur.c:76
AVFILTER_DEFINE_CLASS(displace)
static int config_output(AVFilterLink *outlink)
Definition: vf_displace.c:304
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:311
FFFrameSyncIn * in
Pointer to array of inputs.
Definition: framesync.h:203
packed ABGR 8:8:8:8, 32bpp, ABGRABGR...
Definition: pixfmt.h:90
planar YUV 4:4:0 full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV440P and setting color_range...
Definition: pixfmt.h:96
static int flags
Definition: log.c:55
planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV422P and setting col...
Definition: pixfmt.h:75
enum FFFrameSyncExtMode after
Extrapolation mode for timestamps after the last frame.
Definition: framesync.h:91
Input stream structure.
Definition: framesync.h:81
#define av_log(a,...)
A filter pad used for either input or output.
Definition: internal.h:54
int64_t av_rescale_q(int64_t a, AVRational bq, AVRational cq)
Rescale a 64-bit integer by 2 rational numbers.
Definition: mathematics.c:142
AVFilterPad * input_pads
array of input pads
Definition: avfilter.h:345
planar YUV 4:2:2 24bpp, (1 Cr & Cb sample per 2x1 Y & A samples)
Definition: pixfmt.h:172
static const AVFilterPad displace_inputs[]
Definition: vf_displace.c:375
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:176
int ff_set_common_formats(AVFilterContext *ctx, AVFilterFormats *formats)
A helper for query_formats() which sets all links to the same list of formats.
Definition: formats.c:568
uint8_t log2_chroma_h
Amount to shift the luma height right to find the chroma height.
Definition: pixdesc.h:101
#define FLAGS
Definition: vf_displace.c:53
void ff_framesync_uninit(FFFrameSync *fs)
Free all memory currently allocated.
Definition: framesync.c:293
Frame sync structure.
Definition: framesync.h:146
#define AVERROR(e)
Definition: error.h:43
#define AV_PIX_FMT_FLAG_RGB
The pixel format contains RGB-like data (as opposed to YUV/grayscale).
Definition: pixdesc.h:148
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
Definition: internal.h:186
static void displace_planar(DisplaceContext *s, const AVFrame *in, const AVFrame *xpic, const AVFrame *ypic, AVFrame *out)
Definition: vf_displace.c:84
packed BGRA 8:8:8:8, 32bpp, BGRABGRA...
Definition: pixfmt.h:91
void * priv
private data for use by the filter
Definition: avfilter.h:353
int av_get_padded_bits_per_pixel(const AVPixFmtDescriptor *pixdesc)
Return the number of bits per pixel for the pixel format described by pixdesc, including any padding ...
Definition: pixdesc.c:2328
AVRational time_base
Time base for the incoming frames.
Definition: framesync.h:96
int ff_framesync_activate(FFFrameSync *fs)
Examine the frames in the filter&#39;s input and try to produce output.
Definition: framesync.c:344
int(* on_event)(struct FFFrameSync *fs)
Callback called when a frame event is ready.
Definition: framesync.h:172
packed ARGB 8:8:8:8, 32bpp, ARGBARGB...
Definition: pixfmt.h:88
packed RGBA 8:8:8:8, 32bpp, RGBARGBA...
Definition: pixfmt.h:89
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:66
static void displace_packed(DisplaceContext *s, const AVFrame *in, const AVFrame *xpic, const AVFrame *ypic, AVFrame *out)
Definition: vf_displace.c:160
uint64_t flags
Combination of AV_PIX_FMT_FLAG_...
Definition: pixdesc.h:106
uint8_t nb_components
The number of components each pixel has, (1-4)
Definition: pixdesc.h:83
static int process_frame(FFFrameSync *fs)
Definition: vf_displace.c:243
planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P and setting col...
Definition: pixfmt.h:74
uint8_t w
Definition: llviddspenc.c:38
AVFormatContext * ctx
Definition: movenc.c:48
AVRational time_base
Time base for the output events.
Definition: framesync.h:162
packed RGB 8:8:8, 24bpp, BGRBGR...
Definition: pixfmt.h:65
static const AVFilterPad inputs[]
Definition: af_acontrast.c:193
void * opaque
Opaque pointer, not used by the API.
Definition: framesync.h:177
AVFrame * av_frame_clone(const AVFrame *src)
Create a new frame that references the same data as src.
Definition: frame.c:538
static const AVFilterPad outputs[]
Definition: af_acontrast.c:203
#define OFFSET(x)
Definition: vf_displace.c:52
static const AVFilterPad displace_outputs[]
Definition: vf_displace.c:392
Extend the frame to infinity.
Definition: framesync.h:75
typedef void(RENAME(mix_any_func_type))
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:249
int ff_framesync_init(FFFrameSync *fs, AVFilterContext *parent, unsigned nb_in)
Initialize a frame sync structure.
Definition: framesync.c:77
planar YUV 4:4:4 32bpp, (1 Cr & Cb sample per 1x1 Y & A samples)
Definition: pixfmt.h:173
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:81
uint8_t pi<< 24) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_U8, uint8_t,(*(const uint8_t *) pi - 0x80) *(1.0f/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_U8, uint8_t,(*(const uint8_t *) pi - 0x80) *(1.0/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S16, int16_t,(*(const int16_t *) pi >> 8)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S16, int16_t, *(const int16_t *) pi *(1.0f/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S16, int16_t, *(const int16_t *) pi *(1.0/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S32, int32_t,(*(const int32_t *) pi >> 24)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S32, int32_t, *(const int32_t *) pi *(1.0f/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S32, int32_t, *(const int32_t *) pi *(1.0/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_FLT, float, av_clip_uint8(lrintf(*(const float *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_FLT, float, av_clip_int16(lrintf(*(const float *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_FLT, float, av_clipl_int32(llrintf(*(const float *) pi *(1U<< 31)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_DBL, double, av_clip_uint8(lrint(*(const double *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_DBL, double, av_clip_int16(lrint(*(const double *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_DBL, double, av_clipl_int32(llrint(*(const double *) pi *(1U<< 31)))) #define SET_CONV_FUNC_GROUP(ofmt, ifmt) static void set_generic_function(AudioConvert *ac) { } void ff_audio_convert_free(AudioConvert **ac) { if(! *ac) return;ff_dither_free(&(*ac) ->dc);av_freep(ac);} AudioConvert *ff_audio_convert_alloc(AVAudioResampleContext *avr, enum AVSampleFormat out_fmt, enum AVSampleFormat in_fmt, int channels, int sample_rate, int apply_map) { AudioConvert *ac;int in_planar, out_planar;ac=av_mallocz(sizeof(*ac));if(!ac) return NULL;ac->avr=avr;ac->out_fmt=out_fmt;ac->in_fmt=in_fmt;ac->channels=channels;ac->apply_map=apply_map;if(avr->dither_method !=AV_RESAMPLE_DITHER_NONE &&av_get_packed_sample_fmt(out_fmt)==AV_SAMPLE_FMT_S16 &&av_get_bytes_per_sample(in_fmt) > 2) { ac->dc=ff_dither_alloc(avr, out_fmt, in_fmt, channels, sample_rate, apply_map);if(!ac->dc) { av_free(ac);return NULL;} return ac;} in_planar=ff_sample_fmt_is_planar(in_fmt, channels);out_planar=ff_sample_fmt_is_planar(out_fmt, channels);if(in_planar==out_planar) { ac->func_type=CONV_FUNC_TYPE_FLAT;ac->planes=in_planar ? ac->channels :1;} else if(in_planar) ac->func_type=CONV_FUNC_TYPE_INTERLEAVE;else ac->func_type=CONV_FUNC_TYPE_DEINTERLEAVE;set_generic_function(ac);if(ARCH_AARCH64) ff_audio_convert_init_aarch64(ac);if(ARCH_ARM) ff_audio_convert_init_arm(ac);if(ARCH_X86) ff_audio_convert_init_x86(ac);return ac;} int ff_audio_convert(AudioConvert *ac, AudioData *out, AudioData *in) { int use_generic=1;int len=in->nb_samples;int p;if(ac->dc) { av_log(ac->avr, AV_LOG_TRACE, "%d samples - audio_convert: %s to %s (dithered)\", len, av_get_sample_fmt_name(ac->in_fmt), av_get_sample_fmt_name(ac->out_fmt));return ff_convert_dither(ac-> in
planar YUV 4:1:0, 9bpp, (1 Cr & Cb sample per 4x4 Y samples)
Definition: pixfmt.h:68
unsigned sync
Synchronization level: frames on input at the highest sync level will generate output frame events...
Definition: framesync.h:139
Describe the class of an AVClass context structure.
Definition: log.h:67
Filter definition.
Definition: avfilter.h:144
Ignore this stream and continue processing the other ones.
Definition: framesync.h:70
static int query_formats(AVFilterContext *ctx)
Definition: vf_displace.c:66
packed BGR 8:8:8, 32bpp, BGRXBGRX... X=unused/undefined
Definition: pixfmt.h:236
const char * name
Filter name.
Definition: avfilter.h:148
#define AVFILTER_FLAG_SUPPORT_TIMELINE_INTERNAL
Same as AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC, except that the filter will have its filter_frame() c...
Definition: avfilter.h:133
AVFilterLink ** outputs
array of pointers to output links
Definition: avfilter.h:350
enum EdgeMode edge
Definition: vf_displace.c:41
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:266
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:232
FFFrameSync fs
Definition: vf_displace.c:46
static av_cold void uninit(AVFilterContext *ctx)
Definition: vf_displace.c:368
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:62
Y , 8bpp.
Definition: pixfmt.h:70
uint8_t blank[4]
Definition: vf_displace.c:45
planar GBRA 4:4:4:4 32bpp
Definition: pixfmt.h:211
static double c[64]
planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV444P and setting col...
Definition: pixfmt.h:76
planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples)
Definition: pixfmt.h:69
static int config_input(AVFilterLink *inlink)
Definition: vf_displace.c:273
Completely stop all streams with this one.
Definition: framesync.h:65
void(* displace)(struct DisplaceContext *s, const AVFrame *in, const AVFrame *xpic, const AVFrame *ypic, AVFrame *out)
Definition: vf_displace.c:48
planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples) full scale (JPEG), deprecated in favor ...
Definition: pixfmt.h:254
An instance of a filter.
Definition: avfilter.h:338
FILE * out
Definition: movenc.c:54
planar YUV 4:4:0 (1 Cr & Cb sample per 1x2 Y samples)
Definition: pixfmt.h:95
EdgeMode
Definition: vf_displace.c:30
internal API functions
int ff_framesync_get_frame(FFFrameSync *fs, unsigned in, AVFrame **rframe, unsigned get)
Get the current frame in an input.
Definition: framesync.c:256
packed RGB 8:8:8, 32bpp, XRGBXRGB... X=unused/undefined
Definition: pixfmt.h:233
AVPixelFormat
Pixel format.
Definition: pixfmt.h:60
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:652
#define AV_CEIL_RSHIFT(a, b)
Definition: common.h:58