58 int err, i, main_planes, overlay_planes;
63 main_planes = overlay_planes = 0;
65 main_planes =
FFMAX(main_planes,
68 overlay_planes =
FFMAX(overlay_planes,
78 "does not match subsampling (%d, %d).\n",
83 if (main_planes == overlay_planes) {
85 kernel =
"overlay_no_alpha";
87 kernel =
"overlay_internal_alpha";
90 kernel =
"overlay_external_alpha";
105 "command queue: %d.\n", cle);
124 clReleaseKernel(ctx->
kernel);
133 AVFrame *input_main, *input_overlay;
137 size_t global_work[2];
155 overlay_fc->sw_format);
166 for (plane = 0; plane < ctx->
nb_planes; plane++) {
169 mem = (cl_mem)output->
data[plane];
170 cle = clSetKernelArg(ctx->
kernel, kernel_arg++,
sizeof(cl_mem), &
mem);
171 if (cle != CL_SUCCESS)
172 goto fail_kernel_arg;
174 mem = (cl_mem)input_main->
data[plane];
175 cle = clSetKernelArg(ctx->
kernel, kernel_arg++,
sizeof(cl_mem), &
mem);
176 if (cle != CL_SUCCESS)
177 goto fail_kernel_arg;
179 mem = (cl_mem)input_overlay->
data[plane];
180 cle = clSetKernelArg(ctx->
kernel, kernel_arg++,
sizeof(cl_mem), &
mem);
181 if (cle != CL_SUCCESS)
182 goto fail_kernel_arg;
186 cle = clSetKernelArg(ctx->
kernel, kernel_arg++,
sizeof(cl_mem), &
mem);
187 if (cle != CL_SUCCESS)
188 goto fail_kernel_arg;
194 cle = clSetKernelArg(ctx->
kernel, kernel_arg++,
sizeof(cl_int), &x);
195 if (cle != CL_SUCCESS)
196 goto fail_kernel_arg;
197 cle = clSetKernelArg(ctx->
kernel, kernel_arg++,
sizeof(cl_int), &y);
198 if (cle != CL_SUCCESS)
199 goto fail_kernel_arg;
202 cl_int alpha_adj_x = plane == 0 ? 1 : ctx->
x_subsample;
203 cl_int alpha_adj_y = plane == 0 ? 1 : ctx->
y_subsample;
205 cle = clSetKernelArg(ctx->
kernel, kernel_arg++,
sizeof(cl_int), &alpha_adj_x);
206 if (cle != CL_SUCCESS)
207 goto fail_kernel_arg;
208 cle = clSetKernelArg(ctx->
kernel, kernel_arg++,
sizeof(cl_int), &alpha_adj_y);
209 if (cle != CL_SUCCESS)
210 goto fail_kernel_arg;
220 if (cle != CL_SUCCESS) {
222 "overlay kernel for plane %d: %d.\n", cle, plane);
229 if (cle != CL_SUCCESS) {
231 "command queue: %d.\n", cle);
292 cle = clReleaseKernel(ctx->
kernel);
293 if (cle != CL_SUCCESS)
295 "kernel: %d.\n", cle);
300 if (cle != CL_SUCCESS)
302 "command queue: %d.\n", cle);
310 #define OFFSET(x) offsetof(OverlayOpenCLContext, x) 311 #define FLAGS (AV_OPT_FLAG_FILTERING_PARAM | AV_OPT_FLAG_VIDEO_PARAM) 313 {
"x",
"Overlay x position",
315 {
"y",
"Overlay y position",
346 .
name =
"overlay_opencl",
349 .priv_class = &overlay_opencl_class,
354 .
inputs = overlay_opencl_inputs,
355 .
outputs = overlay_opencl_outputs,
int plane
Which of the 4 planes contains the component.
#define FF_FILTER_FLAG_HWFRAME_AWARE
The filter is aware of hardware frames, and any hardware frame context should not be automatically pr...
static const AVFilterPad overlay_opencl_outputs[]
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
This structure describes decoded (raw) audio or video data.
int ff_opencl_filter_work_size_from_image(AVFilterContext *avctx, size_t *work_size, AVFrame *frame, int plane, int block_alignment)
Find the work size needed needed for a given plane of an image.
int ff_opencl_filter_config_input(AVFilterLink *inlink)
Check that the input link contains a suitable hardware frames context and extract the device from it...
int ff_opencl_filter_query_formats(AVFilterContext *avctx)
Return that all inputs and outputs support only AV_PIX_FMT_OPENCL.
#define AV_LOG_WARNING
Something somehow does not look correct.
Main libavfilter public API header.
Memory handling functions.
static av_cold int init(AVCodecContext *avctx)
int h
agreed upon image height
int ff_framesync_configure(FFFrameSync *fs)
Configure a frame sync structure.
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
int ff_opencl_filter_init(AVFilterContext *avctx)
Initialise an OpenCL filter context.
uint8_t log2_chroma_w
Amount to shift the luma width right to find the chroma width.
AVFILTER_DEFINE_CLASS(overlay_opencl)
AVOpenCLDeviceContext * hwctx
AVBufferRef * hw_frames_ctx
For hwaccel-format frames, this should be a reference to the AVHWFramesContext describing the frame...
static int overlay_opencl_load(AVFilterContext *avctx, enum AVPixelFormat main_format, enum AVPixelFormat overlay_format)
const char * name
Pad name.
AVFilterContext * parent
Parent filter context.
static int overlay_opencl_activate(AVFilterContext *avctx)
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
static int activate(AVFilterContext *ctx)
AVFilter ff_vf_overlay_opencl
AVComponentDescriptor comp[4]
Parameters that describe how pixels are packed.
static av_cold int uninit(AVCodecContext *avctx)
cl_command_queue command_queue
int ff_framesync_init_dualinput(FFFrameSync *fs, AVFilterContext *parent)
Initialize a frame sync structure for dualinput.
static int overlay_opencl_blend(FFFrameSync *fs)
cl_device_id device_id
The primary device ID of the device.
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
static av_cold int overlay_opencl_init(AVFilterContext *avctx)
int ff_opencl_filter_config_output(AVFilterLink *outlink)
Create a suitable hardware frames context for the output.
const char * ff_opencl_source_overlay
A filter pad used for either input or output.
A link between two filters.
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
uint8_t log2_chroma_h
Amount to shift the luma height right to find the chroma height.
void ff_framesync_uninit(FFFrameSync *fs)
Free all memory currently allocated.
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
void * priv
private data for use by the filter
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
static const AVFilterPad overlay_opencl_inputs[]
int ff_framesync_activate(FFFrameSync *fs)
Examine the frames in the filter's input and try to produce output.
int(* on_event)(struct FFFrameSync *fs)
Callback called when a frame event is ready.
int w
agreed upon image width
uint8_t nb_components
The number of components each pixel has, (1-4)
AVFilterContext * src
source filter
static const AVFilterPad inputs[]
static const AVFilterPad outputs[]
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames...
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
uint8_t * data
The data buffer.
static int overlay_opencl_config_output(AVFilterLink *outlink)
static av_cold void overlay_opencl_uninit(AVFilterContext *avctx)
This struct describes a set or pool of "hardware" frames (i.e.
const char * name
Filter name.
AVFilterLink ** outputs
array of pointers to output links
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
static const AVOption overlay_opencl_options[]
static int query_formats(AVFilterContext *ctx)
void ff_opencl_filter_uninit(AVFilterContext *avctx)
Uninitialise an OpenCL filter context.
cl_context context
The OpenCL context which will contain all operations and frames on this device.
int ff_opencl_filter_load_program(AVFilterContext *avctx, const char **program_source_array, int nb_strings)
Load a new OpenCL program from strings in memory.
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
int ff_framesync_get_frame(FFFrameSync *fs, unsigned in, AVFrame **rframe, unsigned get)
Get the current frame in an input.
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
AVPixelFormat
Pixel format.
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.