38 #define MV_P_FOR (1<<0) 39 #define MV_B_FOR (1<<1) 40 #define MV_B_BACK (1<<2) 41 #define MV_TYPE_FOR (1<<0) 42 #define MV_TYPE_BACK (1<<1) 43 #define FRAME_TYPE_I (1<<0) 44 #define FRAME_TYPE_P (1<<1) 45 #define FRAME_TYPE_B (1<<2) 56 #define OFFSET(x) offsetof(CodecViewContext, x) 57 #define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM 58 #define CONST(name, help, val, unit) { name, help, 0, AV_OPT_TYPE_CONST, {.i64=val}, 0, 0, FLAGS, unit } 62 CONST(
"pf",
"forward predicted MVs of P-frames",
MV_P_FOR,
"mv"),
63 CONST(
"bf",
"forward predicted MVs of B-frames",
MV_B_FOR,
"mv"),
64 CONST(
"bb",
"backward predicted MVs of B-frames",
MV_B_BACK,
"mv"),
91 static int clip_line(
int *sx,
int *sy,
int *ex,
int *ey,
int maxx)
99 *sy = *ey + (*sy - *ey) * (int64_t)*ex / (*ex - *sx);
106 *ey = *sy + (*ey - *sy) * (int64_t)(maxx - *sx) / (*ex - *sx);
124 if (
clip_line(&sx, &sy, &ex, &ey, w - 1))
126 if (
clip_line(&sy, &sx, &ey, &ex, h - 1))
129 sx = av_clip(sx, 0, w - 1);
130 sy = av_clip(sy, 0, h - 1);
131 ex = av_clip(ex, 0, w - 1);
132 ey = av_clip(ey, 0, h - 1);
134 buf[sy * stride + sx] +=
color;
143 f = ((ey - sy) << 16) / ex;
144 for (x = 0; x <= ex; x++) {
146 fr = (x * f) & 0xFFFF;
147 buf[ y * stride + x] += (color * (0x10000 - fr)) >> 16;
148 if(fr) buf[(y + 1) * stride + x] += (color * fr ) >> 16;
158 f = ((ex - sx) << 16) / ey;
161 for(y= 0; y <= ey; y++){
164 buf[y * stride + x ] += (color * (0x10000 - fr)) >> 16;
165 if(fr) buf[y * stride + x + 1] += (color * fr ) >> 16;
178 int ey,
int w,
int h,
int stride,
int color,
int tail,
int direction)
187 sx = av_clip(sx, -100, w + 100);
188 sy = av_clip(sy, -100, h + 100);
189 ex = av_clip(ex, -100, w + 100);
190 ey = av_clip(ey, -100, h + 100);
195 if (dx * dx + dy * dy > 3 * 3) {
198 int length = sqrt((rx * rx + ry * ry) << 8);
209 draw_line(buf, sx, sy, sx + rx, sy + ry, w, h, stride, color);
210 draw_line(buf, sx, sy, sx - ry, sy + rx, w, h, stride, color);
212 draw_line(buf, sx, sy, ex, ey, w, h, stride, color);
222 int qstride, qp_type;
234 for (y = 0; y <
h; y++) {
235 for (x = 0; x <
w; x++) {
236 const int qp =
ff_norm_qscale(qp_table[(y >> 3) * qstride + (x >> 3)], qp_type) * 128/31;
254 for (i = 0; i < sd->
size /
sizeof(*mvs); i++) {
256 const int direction = mv->
source > 0;
263 is_iframe && is_fp || is_iframe && is_bp ||
264 is_pframe && is_fp ||
265 is_bframe && is_fp || is_bframe && is_bp)
318 .
inputs = codecview_inputs,
320 .priv_class = &codecview_class,
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
This structure describes decoded (raw) audio or video data.
int16_t src_x
Absolute source position.
Main libavfilter public API header.
static const AVFilterPad codecview_outputs[]
static int filter_frame(AVFilterLink *inlink, AVFrame *frame)
static const AVOption codecview_options[]
uint8_t log2_chroma_w
Amount to shift the luma width right to find the chroma width.
AVFrameSideData * av_frame_get_side_data(const AVFrame *frame, enum AVFrameSideDataType type)
#define AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC
Some filters support a generic "enable" expression option that can be used to enable or disable a fil...
const char * name
Pad name.
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
static const uint32_t color[16+AV_CLASS_CATEGORY_NB]
int16_t dst_x
Absolute destination position.
Structure to hold side data for an AVFrame.
int32_t source
Where the current macroblock comes from; negative value when it comes from the past, positive value when it comes from the future.
static void draw_arrow(uint8_t *buf, int sx, int sy, int ex, int ey, int w, int h, int stride, int color, int tail, int direction)
Draw an arrow from (ex, ey) -> (sx, sy).
#define ROUNDED_DIV(a, b)
A filter pad used for either input or output.
static void draw_line(uint8_t *buf, int sx, int sy, int ex, int ey, int w, int h, int stride, int color)
Draw a line from (ex, ey) -> (sx, sy).
A link between two filters.
#define CONST(name, help, val, unit)
uint8_t log2_chroma_h
Amount to shift the luma height right to find the chroma height.
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
void * priv
private data for use by the filter
AVFILTER_DEFINE_CLASS(codecview)
enum AVPictureType pict_type
Picture type of the frame.
Motion vectors exported by some codecs (on demand through the export_mvs flag set in the libavcodec A...
#define FFABS(a)
Absolute value, Note, INT_MIN / INT64_MIN result in undefined behavior as they are not representable ...
static const AVFilterPad inputs[]
static const AVFilterPad outputs[]
int format
agreed upon media format
static int query_formats(AVFilterContext *ctx)
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
static int clip_line(int *sx, int *sy, int *ex, int *ey, int maxx)
Describe the class of an AVClass context structure.
const char * name
Filter name.
AVFilterLink ** outputs
array of pointers to output links
static int ff_norm_qscale(int qscale, int type)
Normalize the qscale factor FIXME the H264 qscale is a log based scale, mpeg1/2 is not...
static enum AVPixelFormat pix_fmts[]
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
static int config_input(AVFilterLink *inlink)
static const AVFilterPad codecview_inputs[]
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
AVFilterContext * dst
dest filter
#define FFSWAP(type, a, b)
int8_t * av_frame_get_qp_table(AVFrame *f, int *stride, int *type)
AVPixelFormat
Pixel format.
#define AV_CEIL_RSHIFT(a, b)