41 extern emu_edge_vfix_func ff_emu_edge_vfix1_mmx;
42 extern emu_edge_vfix_func ff_emu_edge_vfix2_mmx;
43 extern emu_edge_vfix_func ff_emu_edge_vfix3_mmx;
44 extern emu_edge_vfix_func ff_emu_edge_vfix4_mmx;
45 extern emu_edge_vfix_func ff_emu_edge_vfix5_mmx;
46 extern emu_edge_vfix_func ff_emu_edge_vfix6_mmx;
47 extern emu_edge_vfix_func ff_emu_edge_vfix7_mmx;
48 extern emu_edge_vfix_func ff_emu_edge_vfix8_mmx;
49 extern emu_edge_vfix_func ff_emu_edge_vfix9_mmx;
50 extern emu_edge_vfix_func ff_emu_edge_vfix10_mmx;
51 extern emu_edge_vfix_func ff_emu_edge_vfix11_mmx;
52 extern emu_edge_vfix_func ff_emu_edge_vfix12_mmx;
53 extern emu_edge_vfix_func ff_emu_edge_vfix13_mmx;
54 extern emu_edge_vfix_func ff_emu_edge_vfix14_mmx;
55 extern emu_edge_vfix_func ff_emu_edge_vfix15_mmx;
56 extern emu_edge_vfix_func ff_emu_edge_vfix16_mmx;
57 extern emu_edge_vfix_func ff_emu_edge_vfix17_mmx;
58 extern emu_edge_vfix_func ff_emu_edge_vfix18_mmx;
59 extern emu_edge_vfix_func ff_emu_edge_vfix19_mmx;
60 extern emu_edge_vfix_func ff_emu_edge_vfix20_mmx;
61 extern emu_edge_vfix_func ff_emu_edge_vfix21_mmx;
62 extern emu_edge_vfix_func ff_emu_edge_vfix22_mmx;
64 static emu_edge_vfix_func *
const vfixtbl_mmx[22] = {
65 &ff_emu_edge_vfix1_mmx, &ff_emu_edge_vfix2_mmx, &ff_emu_edge_vfix3_mmx,
66 &ff_emu_edge_vfix4_mmx, &ff_emu_edge_vfix5_mmx, &ff_emu_edge_vfix6_mmx,
67 &ff_emu_edge_vfix7_mmx, &ff_emu_edge_vfix8_mmx, &ff_emu_edge_vfix9_mmx,
68 &ff_emu_edge_vfix10_mmx, &ff_emu_edge_vfix11_mmx, &ff_emu_edge_vfix12_mmx,
69 &ff_emu_edge_vfix13_mmx, &ff_emu_edge_vfix14_mmx, &ff_emu_edge_vfix15_mmx,
70 &ff_emu_edge_vfix16_mmx, &ff_emu_edge_vfix17_mmx, &ff_emu_edge_vfix18_mmx,
71 &ff_emu_edge_vfix19_mmx, &ff_emu_edge_vfix20_mmx, &ff_emu_edge_vfix21_mmx,
72 &ff_emu_edge_vfix22_mmx
75 extern emu_edge_vvar_func ff_emu_edge_vvar_mmx;
76 extern emu_edge_vfix_func ff_emu_edge_vfix16_sse;
77 extern emu_edge_vfix_func ff_emu_edge_vfix17_sse;
78 extern emu_edge_vfix_func ff_emu_edge_vfix18_sse;
79 extern emu_edge_vfix_func ff_emu_edge_vfix19_sse;
80 extern emu_edge_vfix_func ff_emu_edge_vfix20_sse;
81 extern emu_edge_vfix_func ff_emu_edge_vfix21_sse;
82 extern emu_edge_vfix_func ff_emu_edge_vfix22_sse;
83 static emu_edge_vfix_func *
const vfixtbl_sse[22] = {
84 ff_emu_edge_vfix1_mmx, ff_emu_edge_vfix2_mmx, ff_emu_edge_vfix3_mmx,
85 ff_emu_edge_vfix4_mmx, ff_emu_edge_vfix5_mmx, ff_emu_edge_vfix6_mmx,
86 ff_emu_edge_vfix7_mmx, ff_emu_edge_vfix8_mmx, ff_emu_edge_vfix9_mmx,
87 ff_emu_edge_vfix10_mmx, ff_emu_edge_vfix11_mmx, ff_emu_edge_vfix12_mmx,
88 ff_emu_edge_vfix13_mmx, ff_emu_edge_vfix14_mmx, ff_emu_edge_vfix15_mmx,
89 ff_emu_edge_vfix16_sse, ff_emu_edge_vfix17_sse, ff_emu_edge_vfix18_sse,
90 ff_emu_edge_vfix19_sse, ff_emu_edge_vfix20_sse, ff_emu_edge_vfix21_sse,
91 ff_emu_edge_vfix22_sse
93 extern emu_edge_vvar_func ff_emu_edge_vvar_sse;
100 extern emu_edge_hfix_func ff_emu_edge_hfix2_mmx;
101 extern emu_edge_hfix_func ff_emu_edge_hfix4_mmx;
102 extern emu_edge_hfix_func ff_emu_edge_hfix6_mmx;
103 extern emu_edge_hfix_func ff_emu_edge_hfix8_mmx;
104 extern emu_edge_hfix_func ff_emu_edge_hfix10_mmx;
105 extern emu_edge_hfix_func ff_emu_edge_hfix12_mmx;
106 extern emu_edge_hfix_func ff_emu_edge_hfix14_mmx;
107 extern emu_edge_hfix_func ff_emu_edge_hfix16_mmx;
108 extern emu_edge_hfix_func ff_emu_edge_hfix18_mmx;
109 extern emu_edge_hfix_func ff_emu_edge_hfix20_mmx;
110 extern emu_edge_hfix_func ff_emu_edge_hfix22_mmx;
112 static emu_edge_hfix_func *
const hfixtbl_mmx[11] = {
113 ff_emu_edge_hfix2_mmx, ff_emu_edge_hfix4_mmx, ff_emu_edge_hfix6_mmx,
114 ff_emu_edge_hfix8_mmx, ff_emu_edge_hfix10_mmx, ff_emu_edge_hfix12_mmx,
115 ff_emu_edge_hfix14_mmx, ff_emu_edge_hfix16_mmx, ff_emu_edge_hfix18_mmx,
116 ff_emu_edge_hfix20_mmx, ff_emu_edge_hfix22_mmx
119 extern emu_edge_hvar_func ff_emu_edge_hvar_mmx;
120 extern emu_edge_hfix_func ff_emu_edge_hfix16_sse2;
121 extern emu_edge_hfix_func ff_emu_edge_hfix18_sse2;
122 extern emu_edge_hfix_func ff_emu_edge_hfix20_sse2;
123 extern emu_edge_hfix_func ff_emu_edge_hfix22_sse2;
124 static emu_edge_hfix_func *
const hfixtbl_sse2[11] = {
125 ff_emu_edge_hfix2_mmx, ff_emu_edge_hfix4_mmx, ff_emu_edge_hfix6_mmx,
126 ff_emu_edge_hfix8_mmx, ff_emu_edge_hfix10_mmx, ff_emu_edge_hfix12_mmx,
127 ff_emu_edge_hfix14_mmx, ff_emu_edge_hfix16_sse2, ff_emu_edge_hfix18_sse2,
128 ff_emu_edge_hfix20_sse2, ff_emu_edge_hfix22_sse2
130 extern emu_edge_hvar_func ff_emu_edge_hvar_sse2;
131 #if HAVE_AVX2_EXTERNAL 132 extern emu_edge_hfix_func ff_emu_edge_hfix8_avx2;
133 extern emu_edge_hfix_func ff_emu_edge_hfix10_avx2;
134 extern emu_edge_hfix_func ff_emu_edge_hfix12_avx2;
135 extern emu_edge_hfix_func ff_emu_edge_hfix14_avx2;
136 extern emu_edge_hfix_func ff_emu_edge_hfix16_avx2;
137 extern emu_edge_hfix_func ff_emu_edge_hfix18_avx2;
138 extern emu_edge_hfix_func ff_emu_edge_hfix20_avx2;
139 extern emu_edge_hfix_func ff_emu_edge_hfix22_avx2;
140 static emu_edge_hfix_func *
const hfixtbl_avx2[11] = {
141 ff_emu_edge_hfix2_mmx, ff_emu_edge_hfix4_mmx, ff_emu_edge_hfix6_mmx,
142 ff_emu_edge_hfix8_avx2, ff_emu_edge_hfix10_avx2, ff_emu_edge_hfix12_avx2,
143 ff_emu_edge_hfix14_avx2, ff_emu_edge_hfix16_avx2, ff_emu_edge_hfix18_avx2,
144 ff_emu_edge_hfix20_avx2, ff_emu_edge_hfix22_avx2
146 extern emu_edge_hvar_func ff_emu_edge_hvar_avx2;
150 ptrdiff_t dst_stride,
151 ptrdiff_t src_stride,
155 emu_edge_vfix_func *
const *vfix_tbl,
156 emu_edge_vvar_func *v_extend_var,
157 emu_edge_hfix_func *
const *hfix_tbl,
158 emu_edge_hvar_func *h_extend_var)
160 x86_reg start_y, start_x, end_y, end_x, src_y_add = 0, p;
168 src -= src_y*src_stride;
171 }
else if (src_y <= -block_h) {
172 src -= src_y*src_stride;
173 src_y_add = 1 - block_h;
177 src += w - 1 - src_x;
179 }
else if (src_x <= -block_w) {
180 src += 1 - block_w - src_x;
184 start_y =
FFMAX(0, -src_y);
185 start_x =
FFMAX(0, -src_x);
186 end_y =
FFMIN(block_h, h-src_y);
187 end_x =
FFMIN(block_w, w-src_x);
192 src += (src_y_add + start_y) * src_stride + start_x;
195 vfix_tbl[w - 1](dst + start_x, dst_stride,
src, src_stride,
196 start_y, end_y, block_h);
198 v_extend_var(dst + start_x, dst_stride, src, src_stride,
199 start_y, end_y, block_h, w);
205 hfix_tbl[(start_x - 1) >> 1](dst, dst_stride, start_x, block_h);
207 h_extend_var(dst, dst_stride,
208 start_x, (start_x + 1) >> 1, block_h);
216 hfix_tbl[(p - 1) >> 1](dst + end_x - (p & 1), dst_stride,
219 h_extend_var(dst + end_x - (p & 1), dst_stride,
220 -!(p & 1), (p + 1) >> 1, block_h);
227 ptrdiff_t buf_stride,
228 ptrdiff_t src_stride,
229 int block_w,
int block_h,
230 int src_x,
int src_y,
int w,
int h)
232 emulated_edge_mc(buf, src, buf_stride, src_stride, block_w, block_h,
233 src_x, src_y, w, h, vfixtbl_mmx, &ff_emu_edge_vvar_mmx,
234 hfixtbl_mmx, &ff_emu_edge_hvar_mmx);
238 ptrdiff_t buf_stride,
239 ptrdiff_t src_stride,
240 int block_w,
int block_h,
241 int src_x,
int src_y,
int w,
int h)
243 emulated_edge_mc(buf, src, buf_stride, src_stride, block_w, block_h,
244 src_x, src_y, w, h, vfixtbl_sse, &ff_emu_edge_vvar_sse,
245 hfixtbl_mmx, &ff_emu_edge_hvar_mmx);
250 ptrdiff_t buf_stride,
251 ptrdiff_t src_stride,
252 int block_w,
int block_h,
253 int src_x,
int src_y,
int w,
256 emulated_edge_mc(buf, src, buf_stride, src_stride, block_w, block_h,
257 src_x, src_y, w, h, vfixtbl_sse, &ff_emu_edge_vvar_sse,
258 hfixtbl_sse2, &ff_emu_edge_hvar_sse2);
261 #if HAVE_AVX2_EXTERNAL 263 ptrdiff_t buf_stride,
264 ptrdiff_t src_stride,
265 int block_w,
int block_h,
266 int src_x,
int src_y,
int w,
269 emulated_edge_mc(buf, src, buf_stride, src_stride, block_w, block_h,
270 src_x, src_y, w, h, vfixtbl_sse, &ff_emu_edge_vvar_sse,
271 hfixtbl_avx2, &ff_emu_edge_hvar_avx2);
303 #if HAVE_AVX2_EXTERNAL #define EXTERNAL_MMX(flags)
Memory handling functions.
static atomic_int cpu_flags
void(* prefetch)(uint8_t *buf, ptrdiff_t stride, int h)
Prefetch memory into cache (if supported by hardware).
#define EXTERNAL_SSE(flags)
Macro definitions for various function/variable attributes.
void(* emulated_edge_mc)(uint8_t *dst, const uint8_t *src, ptrdiff_t dst_linesize, ptrdiff_t src_linesize, int block_w, int block_h, int src_x, int src_y, int w, int h)
Copy a rectangular area of samples to a temporary buffer and replicate the border samples...
#define av_assert2(cond)
assert() equivalent, that does lie in speed critical code.
#define EXTERNAL_SSE2(flags)
av_cold void ff_videodsp_init_x86(VideoDSPContext *ctx, int bpc)
simple assert() macros that are a bit more flexible than ISO C assert().
void ff_prefetch_3dnow(uint8_t *buf, ptrdiff_t stride, int h)
#define FFABS(a)
Absolute value, Note, INT_MIN / INT64_MIN result in undefined behavior as they are not representable ...
int av_get_cpu_flags(void)
Return the flags which specify extensions supported by the CPU.
#define EXTERNAL_AVX2(flags)
#define EXTERNAL_MMXEXT(flags)
common internal and external API header
Core video DSP helper functions.
#define EXTERNAL_AMD3DNOW(flags)
void ff_prefetch_mmxext(uint8_t *buf, ptrdiff_t stride, int h)