FFmpeg  4.0
hwcontext_qsv.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 #include <stdint.h>
20 #include <string.h>
21 
22 #include <mfx/mfxvideo.h>
23 
24 #include "config.h"
25 
26 #if CONFIG_VAAPI
27 #include "hwcontext_vaapi.h"
28 #endif
29 #if CONFIG_DXVA2
30 #include "hwcontext_dxva2.h"
31 #endif
32 
33 #include "buffer.h"
34 #include "common.h"
35 #include "hwcontext.h"
36 #include "hwcontext_internal.h"
37 #include "hwcontext_qsv.h"
38 #include "mem.h"
39 #include "pixfmt.h"
40 #include "pixdesc.h"
41 #include "time.h"
42 
43 typedef struct QSVDevicePriv {
46 
47 typedef struct QSVDeviceContext {
48  mfxHDL handle;
49  mfxHandleType handle_type;
50  mfxVersion ver;
51  mfxIMPL impl;
52 
53  enum AVHWDeviceType child_device_type;
54  enum AVPixelFormat child_pix_fmt;
56 
57 typedef struct QSVFramesContext {
58  mfxSession session_download;
59  mfxSession session_upload;
60 
62  mfxFrameSurface1 *surfaces_internal;
64 
65  // used in the frame allocator for non-opaque surfaces
66  mfxMemId *mem_ids;
67  // used in the opaque alloc request for opaque surfaces
68  mfxFrameSurface1 **surface_ptrs;
69 
70  mfxExtOpaqueSurfaceAlloc opaque_alloc;
71  mfxExtBuffer *ext_buffers[1];
73 
74 static const struct {
75  mfxHandleType handle_type;
79 #if CONFIG_VAAPI
80  { MFX_HANDLE_VA_DISPLAY, AV_HWDEVICE_TYPE_VAAPI, AV_PIX_FMT_VAAPI },
81 #endif
82 #if CONFIG_DXVA2
83  { MFX_HANDLE_D3D9_DEVICE_MANAGER, AV_HWDEVICE_TYPE_DXVA2, AV_PIX_FMT_DXVA2_VLD },
84 #endif
85  { 0 },
86 };
87 
88 static const struct {
90  uint32_t fourcc;
92  { AV_PIX_FMT_NV12, MFX_FOURCC_NV12 },
93  { AV_PIX_FMT_P010, MFX_FOURCC_P010 },
94  { AV_PIX_FMT_PAL8, MFX_FOURCC_P8 },
95 };
96 
98 {
99  int i;
100  for (i = 0; i < FF_ARRAY_ELEMS(supported_pixel_formats); i++) {
101  if (supported_pixel_formats[i].pix_fmt == pix_fmt)
102  return supported_pixel_formats[i].fourcc;
103  }
104  return 0;
105 }
106 
108 {
109  AVQSVDeviceContext *hwctx = ctx->hwctx;
110  QSVDeviceContext *s = ctx->internal->priv;
111 
112  mfxStatus err;
113  int i;
114 
115  for (i = 0; supported_handle_types[i].handle_type; i++) {
116  err = MFXVideoCORE_GetHandle(hwctx->session, supported_handle_types[i].handle_type,
117  &s->handle);
118  if (err == MFX_ERR_NONE) {
119  s->handle_type = supported_handle_types[i].handle_type;
120  s->child_device_type = supported_handle_types[i].device_type;
121  s->child_pix_fmt = supported_handle_types[i].pix_fmt;
122  break;
123  }
124  }
125  if (!s->handle) {
126  av_log(ctx, AV_LOG_VERBOSE, "No supported hw handle could be retrieved "
127  "from the session\n");
128  }
129 
130  err = MFXQueryIMPL(hwctx->session, &s->impl);
131  if (err == MFX_ERR_NONE)
132  err = MFXQueryVersion(hwctx->session, &s->ver);
133  if (err != MFX_ERR_NONE) {
134  av_log(ctx, AV_LOG_ERROR, "Error querying the session attributes\n");
135  return AVERROR_UNKNOWN;
136  }
137 
138  return 0;
139 }
140 
142 {
143  QSVFramesContext *s = ctx->internal->priv;
144 
145  if (s->session_download) {
146  MFXVideoVPP_Close(s->session_download);
147  MFXClose(s->session_download);
148  }
149  s->session_download = NULL;
150 
151  if (s->session_upload) {
152  MFXVideoVPP_Close(s->session_upload);
153  MFXClose(s->session_upload);
154  }
155  s->session_upload = NULL;
156 
157  av_freep(&s->mem_ids);
158  av_freep(&s->surface_ptrs);
161 }
162 
163 static void qsv_pool_release_dummy(void *opaque, uint8_t *data)
164 {
165 }
166 
167 static AVBufferRef *qsv_pool_alloc(void *opaque, int size)
168 {
170  QSVFramesContext *s = ctx->internal->priv;
171  AVQSVFramesContext *hwctx = ctx->hwctx;
172 
173  if (s->nb_surfaces_used < hwctx->nb_surfaces) {
174  s->nb_surfaces_used++;
176  sizeof(*hwctx->surfaces), qsv_pool_release_dummy, NULL, 0);
177  }
178 
179  return NULL;
180 }
181 
183 {
184  AVQSVFramesContext *hwctx = ctx->hwctx;
185  QSVFramesContext *s = ctx->internal->priv;
186  QSVDeviceContext *device_priv = ctx->device_ctx->internal->priv;
187 
188  AVBufferRef *child_device_ref = NULL;
189  AVBufferRef *child_frames_ref = NULL;
190 
192  AVHWFramesContext *child_frames_ctx;
193 
194  int i, ret = 0;
195 
196  if (!device_priv->handle) {
197  av_log(ctx, AV_LOG_ERROR,
198  "Cannot create a non-opaque internal surface pool without "
199  "a hardware handle\n");
200  return AVERROR(EINVAL);
201  }
202 
203  child_device_ref = av_hwdevice_ctx_alloc(device_priv->child_device_type);
204  if (!child_device_ref)
205  return AVERROR(ENOMEM);
206  child_device_ctx = (AVHWDeviceContext*)child_device_ref->data;
207 
208 #if CONFIG_VAAPI
209  if (child_device_ctx->type == AV_HWDEVICE_TYPE_VAAPI) {
210  AVVAAPIDeviceContext *child_device_hwctx = child_device_ctx->hwctx;
211  child_device_hwctx->display = (VADisplay)device_priv->handle;
212  }
213 #endif
214 #if CONFIG_DXVA2
215  if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
216  AVDXVA2DeviceContext *child_device_hwctx = child_device_ctx->hwctx;
217  child_device_hwctx->devmgr = (IDirect3DDeviceManager9*)device_priv->handle;
218  }
219 #endif
220 
221  ret = av_hwdevice_ctx_init(child_device_ref);
222  if (ret < 0) {
223  av_log(ctx, AV_LOG_ERROR, "Error initializing a child device context\n");
224  goto fail;
225  }
226 
227  child_frames_ref = av_hwframe_ctx_alloc(child_device_ref);
228  if (!child_frames_ref) {
229  ret = AVERROR(ENOMEM);
230  goto fail;
231  }
232  child_frames_ctx = (AVHWFramesContext*)child_frames_ref->data;
233 
234  child_frames_ctx->format = device_priv->child_pix_fmt;
235  child_frames_ctx->sw_format = ctx->sw_format;
236  child_frames_ctx->initial_pool_size = ctx->initial_pool_size;
237  child_frames_ctx->width = FFALIGN(ctx->width, 16);
238  child_frames_ctx->height = FFALIGN(ctx->height, 16);
239 
240 #if CONFIG_DXVA2
241  if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
242  AVDXVA2FramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
243  if (hwctx->frame_type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET)
244  child_frames_hwctx->surface_type = DXVA2_VideoProcessorRenderTarget;
245  else
246  child_frames_hwctx->surface_type = DXVA2_VideoDecoderRenderTarget;
247  }
248 #endif
249 
250  ret = av_hwframe_ctx_init(child_frames_ref);
251  if (ret < 0) {
252  av_log(ctx, AV_LOG_ERROR, "Error initializing a child frames context\n");
253  goto fail;
254  }
255 
256 #if CONFIG_VAAPI
257  if (child_device_ctx->type == AV_HWDEVICE_TYPE_VAAPI) {
258  AVVAAPIFramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
259  for (i = 0; i < ctx->initial_pool_size; i++)
260  s->surfaces_internal[i].Data.MemId = child_frames_hwctx->surface_ids + i;
261  hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
262  }
263 #endif
264 #if CONFIG_DXVA2
265  if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
266  AVDXVA2FramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
267  for (i = 0; i < ctx->initial_pool_size; i++)
268  s->surfaces_internal[i].Data.MemId = (mfxMemId)child_frames_hwctx->surfaces[i];
269  if (child_frames_hwctx->surface_type == DXVA2_VideoProcessorRenderTarget)
270  hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
271  else
272  hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
273  }
274 #endif
275 
276  s->child_frames_ref = child_frames_ref;
277  child_frames_ref = NULL;
278 
279 fail:
280  av_buffer_unref(&child_device_ref);
281  av_buffer_unref(&child_frames_ref);
282  return ret;
283 }
284 
285 static int qsv_init_surface(AVHWFramesContext *ctx, mfxFrameSurface1 *surf)
286 {
287  const AVPixFmtDescriptor *desc;
288  uint32_t fourcc;
289 
290  desc = av_pix_fmt_desc_get(ctx->sw_format);
291  if (!desc)
292  return AVERROR(EINVAL);
293 
294  fourcc = qsv_fourcc_from_pix_fmt(ctx->sw_format);
295  if (!fourcc)
296  return AVERROR(EINVAL);
297 
298  surf->Info.BitDepthLuma = desc->comp[0].depth;
299  surf->Info.BitDepthChroma = desc->comp[0].depth;
300  surf->Info.Shift = desc->comp[0].depth > 8;
301 
302  if (desc->log2_chroma_w && desc->log2_chroma_h)
303  surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV420;
304  else if (desc->log2_chroma_w)
305  surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV422;
306  else
307  surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV444;
308 
309  surf->Info.FourCC = fourcc;
310  surf->Info.Width = FFALIGN(ctx->width, 16);
311  surf->Info.CropW = ctx->width;
312  surf->Info.Height = FFALIGN(ctx->height, 16);
313  surf->Info.CropH = ctx->height;
314  surf->Info.FrameRateExtN = 25;
315  surf->Info.FrameRateExtD = 1;
316  surf->Info.PicStruct = MFX_PICSTRUCT_PROGRESSIVE;
317 
318  return 0;
319 }
320 
322 {
323  QSVFramesContext *s = ctx->internal->priv;
324  AVQSVFramesContext *frames_hwctx = ctx->hwctx;
325 
326  int i, ret = 0;
327 
328  if (ctx->initial_pool_size <= 0) {
329  av_log(ctx, AV_LOG_ERROR, "QSV requires a fixed frame pool size\n");
330  return AVERROR(EINVAL);
331  }
332 
334  sizeof(*s->surfaces_internal));
335  if (!s->surfaces_internal)
336  return AVERROR(ENOMEM);
337 
338  for (i = 0; i < ctx->initial_pool_size; i++) {
339  ret = qsv_init_surface(ctx, &s->surfaces_internal[i]);
340  if (ret < 0)
341  return ret;
342  }
343 
344  if (!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME)) {
345  ret = qsv_init_child_ctx(ctx);
346  if (ret < 0)
347  return ret;
348  }
349 
350  ctx->internal->pool_internal = av_buffer_pool_init2(sizeof(mfxFrameSurface1),
351  ctx, qsv_pool_alloc, NULL);
352  if (!ctx->internal->pool_internal)
353  return AVERROR(ENOMEM);
354 
355  frames_hwctx->surfaces = s->surfaces_internal;
356  frames_hwctx->nb_surfaces = ctx->initial_pool_size;
357 
358  return 0;
359 }
360 
361 static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req,
362  mfxFrameAllocResponse *resp)
363 {
364  AVHWFramesContext *ctx = pthis;
365  QSVFramesContext *s = ctx->internal->priv;
366  AVQSVFramesContext *hwctx = ctx->hwctx;
367  mfxFrameInfo *i = &req->Info;
368  mfxFrameInfo *i1 = &hwctx->surfaces[0].Info;
369 
370  if (!(req->Type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET) ||
371  !(req->Type & (MFX_MEMTYPE_FROM_VPPIN | MFX_MEMTYPE_FROM_VPPOUT)) ||
372  !(req->Type & MFX_MEMTYPE_EXTERNAL_FRAME))
373  return MFX_ERR_UNSUPPORTED;
374  if (i->Width != i1->Width || i->Height != i1->Height ||
375  i->FourCC != i1->FourCC || i->ChromaFormat != i1->ChromaFormat) {
376  av_log(ctx, AV_LOG_ERROR, "Mismatching surface properties in an "
377  "allocation request: %dx%d %d %d vs %dx%d %d %d\n",
378  i->Width, i->Height, i->FourCC, i->ChromaFormat,
379  i1->Width, i1->Height, i1->FourCC, i1->ChromaFormat);
380  return MFX_ERR_UNSUPPORTED;
381  }
382 
383  resp->mids = s->mem_ids;
384  resp->NumFrameActual = hwctx->nb_surfaces;
385 
386  return MFX_ERR_NONE;
387 }
388 
389 static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
390 {
391  return MFX_ERR_NONE;
392 }
393 
394 static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
395 {
396  return MFX_ERR_UNSUPPORTED;
397 }
398 
399 static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
400 {
401  return MFX_ERR_UNSUPPORTED;
402 }
403 
404 static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
405 {
406  *hdl = mid;
407  return MFX_ERR_NONE;
408 }
409 
411  mfxSession *session, int upload)
412 {
413  QSVFramesContext *s = ctx->internal->priv;
414  AVQSVFramesContext *frames_hwctx = ctx->hwctx;
415  QSVDeviceContext *device_priv = ctx->device_ctx->internal->priv;
416  int opaque = !!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME);
417 
418  mfxFrameAllocator frame_allocator = {
419  .pthis = ctx,
420  .Alloc = frame_alloc,
421  .Lock = frame_lock,
422  .Unlock = frame_unlock,
423  .GetHDL = frame_get_hdl,
424  .Free = frame_free,
425  };
426 
427  mfxVideoParam par;
428  mfxStatus err;
429 
430  err = MFXInit(device_priv->impl, &device_priv->ver, session);
431  if (err != MFX_ERR_NONE) {
432  av_log(ctx, AV_LOG_ERROR, "Error initializing an internal session\n");
433  return AVERROR_UNKNOWN;
434  }
435 
436  if (device_priv->handle) {
437  err = MFXVideoCORE_SetHandle(*session, device_priv->handle_type,
438  device_priv->handle);
439  if (err != MFX_ERR_NONE)
440  return AVERROR_UNKNOWN;
441  }
442 
443  if (!opaque) {
444  err = MFXVideoCORE_SetFrameAllocator(*session, &frame_allocator);
445  if (err != MFX_ERR_NONE)
446  return AVERROR_UNKNOWN;
447  }
448 
449  memset(&par, 0, sizeof(par));
450 
451  if (opaque) {
452  par.ExtParam = s->ext_buffers;
453  par.NumExtParam = FF_ARRAY_ELEMS(s->ext_buffers);
454  par.IOPattern = upload ? MFX_IOPATTERN_OUT_OPAQUE_MEMORY :
455  MFX_IOPATTERN_IN_OPAQUE_MEMORY;
456  } else {
457  par.IOPattern = upload ? MFX_IOPATTERN_OUT_VIDEO_MEMORY :
458  MFX_IOPATTERN_IN_VIDEO_MEMORY;
459  }
460 
461  par.IOPattern |= upload ? MFX_IOPATTERN_IN_SYSTEM_MEMORY :
462  MFX_IOPATTERN_OUT_SYSTEM_MEMORY;
463  par.AsyncDepth = 1;
464 
465  par.vpp.In = frames_hwctx->surfaces[0].Info;
466 
467  /* Apparently VPP requires the frame rate to be set to some value, otherwise
468  * init will fail (probably for the framerate conversion filter). Since we
469  * are only doing data upload/download here, we just invent an arbitrary
470  * value */
471  par.vpp.In.FrameRateExtN = 25;
472  par.vpp.In.FrameRateExtD = 1;
473  par.vpp.Out = par.vpp.In;
474 
475  err = MFXVideoVPP_Init(*session, &par);
476  if (err != MFX_ERR_NONE) {
477  av_log(ctx, AV_LOG_VERBOSE, "Error opening the internal VPP session."
478  "Surface upload/download will not be possible\n");
479  MFXClose(*session);
480  *session = NULL;
481  }
482 
483  return 0;
484 }
485 
487 {
488  QSVFramesContext *s = ctx->internal->priv;
489  AVQSVFramesContext *frames_hwctx = ctx->hwctx;
490 
491  int opaque = !!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME);
492 
493  uint32_t fourcc;
494  int i, ret;
495 
496  fourcc = qsv_fourcc_from_pix_fmt(ctx->sw_format);
497  if (!fourcc) {
498  av_log(ctx, AV_LOG_ERROR, "Unsupported pixel format\n");
499  return AVERROR(ENOSYS);
500  }
501 
502  if (!ctx->pool) {
503  ret = qsv_init_pool(ctx, fourcc);
504  if (ret < 0) {
505  av_log(ctx, AV_LOG_ERROR, "Error creating an internal frame pool\n");
506  return ret;
507  }
508  }
509 
510  if (opaque) {
511  s->surface_ptrs = av_mallocz_array(frames_hwctx->nb_surfaces,
512  sizeof(*s->surface_ptrs));
513  if (!s->surface_ptrs)
514  return AVERROR(ENOMEM);
515 
516  for (i = 0; i < frames_hwctx->nb_surfaces; i++)
517  s->surface_ptrs[i] = frames_hwctx->surfaces + i;
518 
519  s->opaque_alloc.In.Surfaces = s->surface_ptrs;
520  s->opaque_alloc.In.NumSurface = frames_hwctx->nb_surfaces;
521  s->opaque_alloc.In.Type = frames_hwctx->frame_type;
522 
523  s->opaque_alloc.Out = s->opaque_alloc.In;
524 
525  s->opaque_alloc.Header.BufferId = MFX_EXTBUFF_OPAQUE_SURFACE_ALLOCATION;
526  s->opaque_alloc.Header.BufferSz = sizeof(s->opaque_alloc);
527 
528  s->ext_buffers[0] = (mfxExtBuffer*)&s->opaque_alloc;
529  } else {
530  s->mem_ids = av_mallocz_array(frames_hwctx->nb_surfaces, sizeof(*s->mem_ids));
531  if (!s->mem_ids)
532  return AVERROR(ENOMEM);
533 
534  for (i = 0; i < frames_hwctx->nb_surfaces; i++)
535  s->mem_ids[i] = frames_hwctx->surfaces[i].Data.MemId;
536  }
537 
538  ret = qsv_init_internal_session(ctx, &s->session_download, 0);
539  if (ret < 0)
540  return ret;
541 
542  ret = qsv_init_internal_session(ctx, &s->session_upload, 1);
543  if (ret < 0)
544  return ret;
545 
546  return 0;
547 }
548 
550 {
551  frame->buf[0] = av_buffer_pool_get(ctx->pool);
552  if (!frame->buf[0])
553  return AVERROR(ENOMEM);
554 
555  frame->data[3] = frame->buf[0]->data;
556  frame->format = AV_PIX_FMT_QSV;
557  frame->width = ctx->width;
558  frame->height = ctx->height;
559 
560  return 0;
561 }
562 
565  enum AVPixelFormat **formats)
566 {
567  enum AVPixelFormat *fmts;
568 
569  fmts = av_malloc_array(2, sizeof(*fmts));
570  if (!fmts)
571  return AVERROR(ENOMEM);
572 
573  fmts[0] = ctx->sw_format;
574  fmts[1] = AV_PIX_FMT_NONE;
575 
576  *formats = fmts;
577 
578  return 0;
579 }
580 
582  AVHWFramesContext *src_ctx, int flags)
583 {
584  AVQSVFramesContext *src_hwctx = src_ctx->hwctx;
585  int i;
586 
587  switch (dst_ctx->device_ctx->type) {
588 #if CONFIG_VAAPI
590  {
591  AVVAAPIFramesContext *dst_hwctx = dst_ctx->hwctx;
592  dst_hwctx->surface_ids = av_mallocz_array(src_hwctx->nb_surfaces,
593  sizeof(*dst_hwctx->surface_ids));
594  if (!dst_hwctx->surface_ids)
595  return AVERROR(ENOMEM);
596  for (i = 0; i < src_hwctx->nb_surfaces; i++)
597  dst_hwctx->surface_ids[i] =
598  *(VASurfaceID*)src_hwctx->surfaces[i].Data.MemId;
599  dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
600  }
601  break;
602 #endif
603 #if CONFIG_DXVA2
605  {
606  AVDXVA2FramesContext *dst_hwctx = dst_ctx->hwctx;
607  dst_hwctx->surfaces = av_mallocz_array(src_hwctx->nb_surfaces,
608  sizeof(*dst_hwctx->surfaces));
609  if (!dst_hwctx->surfaces)
610  return AVERROR(ENOMEM);
611  for (i = 0; i < src_hwctx->nb_surfaces; i++)
612  dst_hwctx->surfaces[i] =
613  (IDirect3DSurface9*)src_hwctx->surfaces[i].Data.MemId;
614  dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
615  if (src_hwctx->frame_type == MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET)
616  dst_hwctx->surface_type = DXVA2_VideoDecoderRenderTarget;
617  else
618  dst_hwctx->surface_type = DXVA2_VideoProcessorRenderTarget;
619  }
620  break;
621 #endif
622  default:
623  return AVERROR(ENOSYS);
624  }
625 
626  return 0;
627 }
628 
630  AVFrame *dst, const AVFrame *src, int flags)
631 {
632  QSVFramesContext *s = ctx->internal->priv;
633  mfxFrameSurface1 *surf = (mfxFrameSurface1*)src->data[3];
634  AVHWFramesContext *child_frames_ctx;
635  const AVPixFmtDescriptor *desc;
636  uint8_t *child_data;
637  AVFrame *dummy;
638  int ret = 0;
639 
640  if (!s->child_frames_ref)
641  return AVERROR(ENOSYS);
642  child_frames_ctx = (AVHWFramesContext*)s->child_frames_ref->data;
643 
644  switch (child_frames_ctx->device_ctx->type) {
645 #if CONFIG_VAAPI
647  child_data = (uint8_t*)(intptr_t)*(VASurfaceID*)surf->Data.MemId;
648  break;
649 #endif
650 #if CONFIG_DXVA2
652  child_data = surf->Data.MemId;
653  break;
654 #endif
655  default:
656  return AVERROR(ENOSYS);
657  }
658 
659  if (dst->format == child_frames_ctx->format) {
661  dst, src, NULL, NULL);
662  if (ret < 0)
663  return ret;
664 
665  dst->width = src->width;
666  dst->height = src->height;
667  dst->data[3] = child_data;
668 
669  return 0;
670  }
671 
673  if (desc && desc->flags & AV_PIX_FMT_FLAG_HWACCEL) {
674  // This only supports mapping to software.
675  return AVERROR(ENOSYS);
676  }
677 
678  dummy = av_frame_alloc();
679  if (!dummy)
680  return AVERROR(ENOMEM);
681 
682  dummy->buf[0] = av_buffer_ref(src->buf[0]);
683  dummy->hw_frames_ctx = av_buffer_ref(s->child_frames_ref);
684  if (!dummy->buf[0] || !dummy->hw_frames_ctx)
685  goto fail;
686 
687  dummy->format = child_frames_ctx->format;
688  dummy->width = src->width;
689  dummy->height = src->height;
690  dummy->data[3] = child_data;
691 
692  ret = av_hwframe_map(dst, dummy, flags);
693 
694 fail:
696 
697  return ret;
698 }
699 
701  const AVFrame *src)
702 {
703  QSVFramesContext *s = ctx->internal->priv;
704  AVHWFramesContext *child_frames_ctx = (AVHWFramesContext*)s->child_frames_ref->data;
705  int download = !!src->hw_frames_ctx;
706  mfxFrameSurface1 *surf = (mfxFrameSurface1*)(download ? src->data[3] : dst->data[3]);
707 
708  AVFrame *dummy;
709  int ret;
710 
711  dummy = av_frame_alloc();
712  if (!dummy)
713  return AVERROR(ENOMEM);
714 
715  dummy->format = child_frames_ctx->format;
716  dummy->width = src->width;
717  dummy->height = src->height;
718  dummy->buf[0] = download ? src->buf[0] : dst->buf[0];
719  dummy->data[3] = surf->Data.MemId;
720  dummy->hw_frames_ctx = s->child_frames_ref;
721 
722  ret = download ? av_hwframe_transfer_data(dst, dummy, 0) :
723  av_hwframe_transfer_data(dummy, src, 0);
724 
725  dummy->buf[0] = NULL;
726  dummy->data[3] = NULL;
727  dummy->hw_frames_ctx = NULL;
728 
729  av_frame_free(&dummy);
730 
731  return ret;
732 }
733 
735  const AVFrame *src)
736 {
737  QSVFramesContext *s = ctx->internal->priv;
738  mfxFrameSurface1 out = {{ 0 }};
739  mfxFrameSurface1 *in = (mfxFrameSurface1*)src->data[3];
740 
741  mfxSyncPoint sync = NULL;
742  mfxStatus err;
743 
744  if (!s->session_download) {
745  if (s->child_frames_ref)
746  return qsv_transfer_data_child(ctx, dst, src);
747 
748  av_log(ctx, AV_LOG_ERROR, "Surface download not possible\n");
749  return AVERROR(ENOSYS);
750  }
751 
752  out.Info = in->Info;
753  out.Data.PitchLow = dst->linesize[0];
754  out.Data.Y = dst->data[0];
755  out.Data.U = dst->data[1];
756  out.Data.V = dst->data[2];
757  out.Data.A = dst->data[3];
758 
759  do {
760  err = MFXVideoVPP_RunFrameVPPAsync(s->session_download, in, &out, NULL, &sync);
761  if (err == MFX_WRN_DEVICE_BUSY)
762  av_usleep(1);
763  } while (err == MFX_WRN_DEVICE_BUSY);
764 
765  if (err < 0 || !sync) {
766  av_log(ctx, AV_LOG_ERROR, "Error downloading the surface\n");
767  return AVERROR_UNKNOWN;
768  }
769 
770  do {
771  err = MFXVideoCORE_SyncOperation(s->session_download, sync, 1000);
772  } while (err == MFX_WRN_IN_EXECUTION);
773  if (err < 0) {
774  av_log(ctx, AV_LOG_ERROR, "Error synchronizing the operation: %d\n", err);
775  return AVERROR_UNKNOWN;
776  }
777 
778  return 0;
779 }
780 
782  const AVFrame *src)
783 {
784  QSVFramesContext *s = ctx->internal->priv;
785  mfxFrameSurface1 in = {{ 0 }};
786  mfxFrameSurface1 *out = (mfxFrameSurface1*)dst->data[3];
787 
788  mfxSyncPoint sync = NULL;
789  mfxStatus err;
790 
791  if (!s->session_upload) {
792  if (s->child_frames_ref)
793  return qsv_transfer_data_child(ctx, dst, src);
794 
795  av_log(ctx, AV_LOG_ERROR, "Surface upload not possible\n");
796  return AVERROR(ENOSYS);
797  }
798 
799  in.Info = out->Info;
800  in.Data.PitchLow = src->linesize[0];
801  in.Data.Y = src->data[0];
802  in.Data.U = src->data[1];
803  in.Data.V = src->data[2];
804  in.Data.A = src->data[3];
805 
806  do {
807  err = MFXVideoVPP_RunFrameVPPAsync(s->session_upload, &in, out, NULL, &sync);
808  if (err == MFX_WRN_DEVICE_BUSY)
809  av_usleep(1);
810  } while (err == MFX_WRN_DEVICE_BUSY);
811 
812  if (err < 0 || !sync) {
813  av_log(ctx, AV_LOG_ERROR, "Error uploading the surface\n");
814  return AVERROR_UNKNOWN;
815  }
816 
817  do {
818  err = MFXVideoCORE_SyncOperation(s->session_upload, sync, 1000);
819  } while (err == MFX_WRN_IN_EXECUTION);
820  if (err < 0) {
821  av_log(ctx, AV_LOG_ERROR, "Error synchronizing the operation\n");
822  return AVERROR_UNKNOWN;
823  }
824 
825  return 0;
826 }
827 
829  AVHWFramesContext *src_ctx, int flags)
830 {
831  QSVFramesContext *s = dst_ctx->internal->priv;
832  AVQSVFramesContext *dst_hwctx = dst_ctx->hwctx;
833  int i;
834 
835  switch (src_ctx->device_ctx->type) {
836 #if CONFIG_VAAPI
838  {
839  AVVAAPIFramesContext *src_hwctx = src_ctx->hwctx;
841  sizeof(*s->surfaces_internal));
842  if (!s->surfaces_internal)
843  return AVERROR(ENOMEM);
844  for (i = 0; i < src_hwctx->nb_surfaces; i++) {
845  qsv_init_surface(dst_ctx, &s->surfaces_internal[i]);
846  s->surfaces_internal[i].Data.MemId = src_hwctx->surface_ids + i;
847  }
848  dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
849  dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
850  }
851  break;
852 #endif
853 #if CONFIG_DXVA2
855  {
856  AVDXVA2FramesContext *src_hwctx = src_ctx->hwctx;
858  sizeof(*s->surfaces_internal));
859  if (!s->surfaces_internal)
860  return AVERROR(ENOMEM);
861  for (i = 0; i < src_hwctx->nb_surfaces; i++) {
862  qsv_init_surface(dst_ctx, &s->surfaces_internal[i]);
863  s->surfaces_internal[i].Data.MemId = (mfxMemId)src_hwctx->surfaces[i];
864  }
865  dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
866  if (src_hwctx->surface_type == DXVA2_VideoProcessorRenderTarget)
867  dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
868  else
869  dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
870  }
871  break;
872 #endif
873  default:
874  return AVERROR(ENOSYS);
875  }
876 
877  dst_hwctx->surfaces = s->surfaces_internal;
878 
879  return 0;
880 }
881 
882 static int qsv_map_to(AVHWFramesContext *dst_ctx,
883  AVFrame *dst, const AVFrame *src, int flags)
884 {
885  AVQSVFramesContext *hwctx = dst_ctx->hwctx;
886  int i, err;
887 
888  for (i = 0; i < hwctx->nb_surfaces; i++) {
889 #if CONFIG_VAAPI
890  if (*(VASurfaceID*)hwctx->surfaces[i].Data.MemId ==
891  (VASurfaceID)(uintptr_t)src->data[3])
892  break;
893 #endif
894 #if CONFIG_DXVA2
895  if ((IDirect3DSurface9*)hwctx->surfaces[i].Data.MemId ==
896  (IDirect3DSurface9*)(uintptr_t)src->data[3])
897  break;
898 #endif
899  }
900  if (i >= hwctx->nb_surfaces) {
901  av_log(dst_ctx, AV_LOG_ERROR, "Trying to map from a surface which "
902  "is not in the mapped frames context.\n");
903  return AVERROR(EINVAL);
904  }
905 
907  dst, src, NULL, NULL);
908  if (err)
909  return err;
910 
911  dst->width = src->width;
912  dst->height = src->height;
913  dst->data[3] = (uint8_t*)&hwctx->surfaces[i];
914 
915  return 0;
916 }
917 
919  const void *hwconfig,
920  AVHWFramesConstraints *constraints)
921 {
922  int i;
923 
925  sizeof(*constraints->valid_sw_formats));
926  if (!constraints->valid_sw_formats)
927  return AVERROR(ENOMEM);
928 
929  for (i = 0; i < FF_ARRAY_ELEMS(supported_pixel_formats); i++)
930  constraints->valid_sw_formats[i] = supported_pixel_formats[i].pix_fmt;
931  constraints->valid_sw_formats[FF_ARRAY_ELEMS(supported_pixel_formats)] = AV_PIX_FMT_NONE;
932 
933  constraints->valid_hw_formats = av_malloc_array(2, sizeof(*constraints->valid_hw_formats));
934  if (!constraints->valid_hw_formats)
935  return AVERROR(ENOMEM);
936 
937  constraints->valid_hw_formats[0] = AV_PIX_FMT_QSV;
938  constraints->valid_hw_formats[1] = AV_PIX_FMT_NONE;
939 
940  return 0;
941 }
942 
944 {
945  AVQSVDeviceContext *hwctx = ctx->hwctx;
946  QSVDevicePriv *priv = ctx->user_opaque;
947 
948  if (hwctx->session)
949  MFXClose(hwctx->session);
950 
952  av_freep(&priv);
953 }
954 
955 static mfxIMPL choose_implementation(const char *device)
956 {
957  static const struct {
958  const char *name;
959  mfxIMPL impl;
960  } impl_map[] = {
961  { "auto", MFX_IMPL_AUTO },
962  { "sw", MFX_IMPL_SOFTWARE },
963  { "hw", MFX_IMPL_HARDWARE },
964  { "auto_any", MFX_IMPL_AUTO_ANY },
965  { "hw_any", MFX_IMPL_HARDWARE_ANY },
966  { "hw2", MFX_IMPL_HARDWARE2 },
967  { "hw3", MFX_IMPL_HARDWARE3 },
968  { "hw4", MFX_IMPL_HARDWARE4 },
969  };
970 
971  mfxIMPL impl = MFX_IMPL_AUTO_ANY;
972  int i;
973 
974  if (device) {
975  for (i = 0; i < FF_ARRAY_ELEMS(impl_map); i++)
976  if (!strcmp(device, impl_map[i].name)) {
977  impl = impl_map[i].impl;
978  break;
979  }
980  if (i == FF_ARRAY_ELEMS(impl_map))
981  impl = strtol(device, NULL, 0);
982  }
983 
984  return impl;
985 }
986 
988  mfxIMPL implementation,
990  int flags)
991 {
992  AVQSVDeviceContext *hwctx = ctx->hwctx;
993 
994  mfxVersion ver = { { 3, 1 } };
995  mfxHDL handle;
996  mfxHandleType handle_type;
997  mfxStatus err;
998  int ret;
999 
1000  switch (child_device_ctx->type) {
1001 #if CONFIG_VAAPI
1003  {
1004  AVVAAPIDeviceContext *child_device_hwctx = child_device_ctx->hwctx;
1005  handle_type = MFX_HANDLE_VA_DISPLAY;
1006  handle = (mfxHDL)child_device_hwctx->display;
1007  }
1008  break;
1009 #endif
1010 #if CONFIG_DXVA2
1012  {
1013  AVDXVA2DeviceContext *child_device_hwctx = child_device_ctx->hwctx;
1014  handle_type = MFX_HANDLE_D3D9_DEVICE_MANAGER;
1015  handle = (mfxHDL)child_device_hwctx->devmgr;
1016  }
1017  break;
1018 #endif
1019  default:
1020  ret = AVERROR(ENOSYS);
1021  goto fail;
1022  }
1023 
1024  err = MFXInit(implementation, &ver, &hwctx->session);
1025  if (err != MFX_ERR_NONE) {
1026  av_log(ctx, AV_LOG_ERROR, "Error initializing an MFX session: "
1027  "%d.\n", err);
1028  ret = AVERROR_UNKNOWN;
1029  goto fail;
1030  }
1031 
1032  err = MFXQueryVersion(hwctx->session, &ver);
1033  if (err != MFX_ERR_NONE) {
1034  av_log(ctx, AV_LOG_ERROR, "Error querying an MFX session: %d.\n", err);
1035  ret = AVERROR_UNKNOWN;
1036  goto fail;
1037  }
1038 
1039  av_log(ctx, AV_LOG_VERBOSE,
1040  "Initialize MFX session: API version is %d.%d, implementation version is %d.%d\n",
1041  MFX_VERSION_MAJOR, MFX_VERSION_MINOR, ver.Major, ver.Minor);
1042 
1043  MFXClose(hwctx->session);
1044 
1045  err = MFXInit(implementation, &ver, &hwctx->session);
1046  if (err != MFX_ERR_NONE) {
1047  av_log(ctx, AV_LOG_ERROR,
1048  "Error initializing an MFX session: %d.\n", err);
1049  ret = AVERROR_UNKNOWN;
1050  goto fail;
1051  }
1052 
1053  err = MFXVideoCORE_SetHandle(hwctx->session, handle_type, handle);
1054  if (err != MFX_ERR_NONE) {
1055  av_log(ctx, AV_LOG_ERROR, "Error setting child device handle: "
1056  "%d\n", err);
1057  ret = AVERROR_UNKNOWN;
1058  goto fail;
1059  }
1060 
1061  ret = MFXQueryVersion(hwctx->session,&ver);
1062  if (ret == MFX_ERR_NONE) {
1063  av_log(ctx, AV_LOG_VERBOSE, "MFX compile/runtime API: %d.%d/%d.%d\n",
1064  MFX_VERSION_MAJOR, MFX_VERSION_MINOR, ver.Major, ver.Minor);
1065  }
1066  return 0;
1067 
1068 fail:
1069  if (hwctx->session)
1070  MFXClose(hwctx->session);
1071  return ret;
1072 }
1073 
1076 {
1077  return qsv_device_derive_from_child(ctx, MFX_IMPL_HARDWARE_ANY,
1078  child_device_ctx, flags);
1079 }
1080 
1081 static int qsv_device_create(AVHWDeviceContext *ctx, const char *device,
1082  AVDictionary *opts, int flags)
1083 {
1084  QSVDevicePriv *priv;
1085  enum AVHWDeviceType child_device_type;
1086  AVHWDeviceContext *child_device;
1087  AVDictionaryEntry *e;
1088 
1089  mfxIMPL impl;
1090  int ret;
1091 
1092  priv = av_mallocz(sizeof(*priv));
1093  if (!priv)
1094  return AVERROR(ENOMEM);
1095 
1096  ctx->user_opaque = priv;
1097  ctx->free = qsv_device_free;
1098 
1099  e = av_dict_get(opts, "child_device", NULL, 0);
1100 
1101  if (CONFIG_VAAPI)
1102  child_device_type = AV_HWDEVICE_TYPE_VAAPI;
1103  else if (CONFIG_DXVA2)
1104  child_device_type = AV_HWDEVICE_TYPE_DXVA2;
1105  else {
1106  av_log(ctx, AV_LOG_ERROR, "No supported child device type is enabled\n");
1107  return AVERROR(ENOSYS);
1108  }
1109 
1110  ret = av_hwdevice_ctx_create(&priv->child_device_ctx, child_device_type,
1111  e ? e->value : NULL, NULL, 0);
1112  if (ret < 0)
1113  return ret;
1114 
1115  child_device = (AVHWDeviceContext*)priv->child_device_ctx->data;
1116 
1117  impl = choose_implementation(device);
1118 
1119  return qsv_device_derive_from_child(ctx, impl, child_device, 0);
1120 }
1121 
1124  .name = "QSV",
1125 
1126  .device_hwctx_size = sizeof(AVQSVDeviceContext),
1127  .device_priv_size = sizeof(QSVDeviceContext),
1128  .frames_hwctx_size = sizeof(AVQSVFramesContext),
1129  .frames_priv_size = sizeof(QSVFramesContext),
1130 
1131  .device_create = qsv_device_create,
1132  .device_derive = qsv_device_derive,
1133  .device_init = qsv_device_init,
1134  .frames_get_constraints = qsv_frames_get_constraints,
1135  .frames_init = qsv_frames_init,
1136  .frames_uninit = qsv_frames_uninit,
1137  .frames_get_buffer = qsv_get_buffer,
1138  .transfer_get_formats = qsv_transfer_get_formats,
1139  .transfer_data_to = qsv_transfer_data_to,
1140  .transfer_data_from = qsv_transfer_data_from,
1141  .map_to = qsv_map_to,
1142  .map_from = qsv_map_from,
1143  .frames_derive_to = qsv_frames_derive_to,
1144  .frames_derive_from = qsv_frames_derive_from,
1145 
1146  .pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_QSV, AV_PIX_FMT_NONE },
1147 };
const char * name
Definition: avisynth_c.h:775
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:60
static const struct @257 supported_pixel_formats[]
#define NULL
Definition: coverity.c:32
const char * s
Definition: avisynth_c.h:768
uint32_t fourcc
Definition: hwcontext_qsv.c:90
static int qsv_init_child_ctx(AVHWFramesContext *ctx)
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it...
Definition: buffer.c:125
int size
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2363
VAAPI-specific data associated with a frame pool.
This structure describes decoded (raw) audio or video data.
Definition: frame.h:218
static int qsv_frames_get_constraints(AVHWDeviceContext *ctx, const void *hwconfig, AVHWFramesConstraints *constraints)
mfxFrameSurface1 * surfaces_internal
Definition: hwcontext_qsv.c:62
int frame_type
A combination of MFX_MEMTYPE_* describing the frame pool.
Definition: hwcontext_qsv.h:49
mfxExtBuffer * ext_buffers[1]
Definition: hwcontext_qsv.c:71
This struct is allocated as AVHWFramesContext.hwctx.
Memory handling functions.
mfxHandleType handle_type
Definition: hwcontext_qsv.c:75
AVBufferRef * buf[AV_NUM_DATA_POINTERS]
AVBuffer references backing the data for this frame.
Definition: frame.h:410
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_qsv.h:42
const char * desc
Definition: nvenc.c:65
An API-specific header for AV_HWDEVICE_TYPE_DXVA2.
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:228
mfxMemId * mem_ids
Definition: hwcontext_qsv.c:66
enum AVPixelFormat format
The pixel format identifying the underlying HW surface type.
Definition: hwcontext.h:208
mfxHandleType handle_type
Definition: hwcontext_qsv.c:49
int av_usleep(unsigned usec)
Sleep for a period of time.
Definition: time.c:84
mfxVersion ver
Definition: hwcontext_qsv.c:50
#define src
Definition: vp8dsp.c:254
static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req, mfxFrameAllocResponse *resp)
uint8_t log2_chroma_w
Amount to shift the luma width right to find the chroma width.
Definition: pixdesc.h:92
static int qsv_init_pool(AVHWFramesContext *ctx, uint32_t fourcc)
API-specific header for AV_HWDEVICE_TYPE_VAAPI.
AVBufferRef * hw_frames_ctx
For hwaccel-format frames, this should be a reference to the AVHWFramesContext describing the frame...
Definition: frame.h:556
#define AV_PIX_FMT_P010
Definition: pixfmt.h:413
static int qsv_transfer_data_child(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
AVBufferPool * pool_internal
static int qsv_transfer_data_to(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
enum AVHWDeviceType type
AVComponentDescriptor comp[4]
Parameters that describe how pixels are packed.
Definition: pixdesc.h:117
uint8_t
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:189
8 bits with AV_PIX_FMT_RGB32 palette
Definition: pixfmt.h:73
DWORD surface_type
The surface type (e.g.
#define CONFIG_DXVA2
Definition: config.h:513
enum AVHWDeviceType child_device_type
Definition: hwcontext_qsv.c:53
static int qsv_map_from(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src, int flags)
static AVFrame * frame
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:91
const char data[16]
Definition: mxf.c:90
AVDictionaryEntry * av_dict_get(const AVDictionary *m, const char *key, const AVDictionaryEntry *prev, int flags)
Get a dictionary entry with matching key.
Definition: dict.c:40
int av_hwdevice_ctx_create(AVBufferRef **pdevice_ref, enum AVHWDeviceType type, const char *device, AVDictionary *opts, int flags)
Open a device of the specified type and create an AVHWDeviceContext for it.
Definition: hwcontext.c:571
static int flags
Definition: log.c:55
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:192
static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
static int qsv_frames_init(AVHWFramesContext *ctx)
#define FFALIGN(x, a)
Definition: macros.h:48
#define av_log(a,...)
static int qsv_init_surface(AVHWFramesContext *ctx, mfxFrameSurface1 *surf)
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_qsv.h:35
static int qsv_get_buffer(AVHWFramesContext *ctx, AVFrame *frame)
int width
Definition: frame.h:276
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:176
uint8_t log2_chroma_h
Amount to shift the luma height right to find the chroma height.
Definition: pixdesc.h:101
#define AVERROR(e)
Definition: error.h:43
static int qsv_transfer_data_from(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:202
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:85
enum AVHWDeviceType device_type
Definition: hwcontext_qsv.c:76
#define AV_PIX_FMT_FLAG_HWACCEL
Pixel format is an HW accelerated format.
Definition: pixdesc.h:140
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:236
static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
IDirect3DDeviceManager9 * devmgr
AVBufferRef * av_buffer_create(uint8_t *data, int size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
Definition: buffer.c:28
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
Definition: hwcontext.c:329
enum AVHWDeviceType type
This field identifies the underlying API used for hardware access.
Definition: hwcontext.h:78
static int qsv_frames_derive_to(AVHWFramesContext *dst_ctx, AVHWFramesContext *src_ctx, int flags)
#define fail()
Definition: checkasm.h:116
static int qsv_transfer_get_formats(AVHWFramesContext *ctx, enum AVHWFrameTransferDirection dir, enum AVPixelFormat **formats)
int av_hwframe_transfer_data(AVFrame *dst, const AVFrame *src, int flags)
Copy data to or from a hw surface.
Definition: hwcontext.c:439
#define CONFIG_VAAPI
Definition: config.h:517
int initial_pool_size
Initial size of the frame pool.
Definition: hwcontext.h:198
AVDictionary * opts
Definition: movenc.c:50
static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
AVHWDeviceContext * device_ctx
The parent AVHWDeviceContext.
Definition: hwcontext.h:148
AVFormatContext * ctx
Definition: movenc.c:48
mfxFrameSurface1 ** surface_ptrs
Definition: hwcontext_qsv.c:68
int dummy
Definition: motion.c:64
mfxSession session_download
Definition: hwcontext_qsv.c:58
if(ret< 0)
Definition: vf_mcdeint.c:279
AVBufferPool * av_buffer_pool_init2(int size, void *opaque, AVBufferRef *(*alloc)(void *opaque, int size), void(*pool_free)(void *opaque))
Allocate and initialize a buffer pool with a more complex allocator.
Definition: buffer.c:218
#define FF_ARRAY_ELEMS(a)
VADisplay display
The VADisplay handle, to be filled by the user.
mfxSession session
Definition: hwcontext_qsv.h:36
static void qsv_frames_uninit(AVHWFramesContext *ctx)
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames...
Definition: frame.h:291
AVBufferRef * av_hwdevice_ctx_alloc(enum AVHWDeviceType type)
Allocate an AVHWDeviceContext for a given hardware type.
Definition: hwcontext.c:138
static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
const HWContextType ff_hwcontext_type_qsv
This struct describes the constraints on hardware frames attached to a given device with a hardware-s...
Definition: hwcontext.h:432
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:249
mfxFrameSurface1 * surfaces
Definition: hwcontext_qsv.h:43
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:81
uint8_t * data
The data buffer.
Definition: buffer.h:89
void * hwctx
The format-specific data, allocated and freed automatically along with this context.
Definition: hwcontext.h:161
uint8_t pi<< 24) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_U8, uint8_t,(*(const uint8_t *) pi - 0x80) *(1.0f/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_U8, uint8_t,(*(const uint8_t *) pi - 0x80) *(1.0/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S16, int16_t,(*(const int16_t *) pi >> 8)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S16, int16_t, *(const int16_t *) pi *(1.0f/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S16, int16_t, *(const int16_t *) pi *(1.0/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S32, int32_t,(*(const int32_t *) pi >> 24)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S32, int32_t, *(const int32_t *) pi *(1.0f/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S32, int32_t, *(const int32_t *) pi *(1.0/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_FLT, float, av_clip_uint8(lrintf(*(const float *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_FLT, float, av_clip_int16(lrintf(*(const float *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_FLT, float, av_clipl_int32(llrintf(*(const float *) pi *(1U<< 31)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_DBL, double, av_clip_uint8(lrint(*(const double *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_DBL, double, av_clip_int16(lrint(*(const double *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_DBL, double, av_clipl_int32(llrint(*(const double *) pi *(1U<< 31)))) #define SET_CONV_FUNC_GROUP(ofmt, ifmt) static void set_generic_function(AudioConvert *ac) { } void ff_audio_convert_free(AudioConvert **ac) { if(! *ac) return;ff_dither_free(&(*ac) ->dc);av_freep(ac);} AudioConvert *ff_audio_convert_alloc(AVAudioResampleContext *avr, enum AVSampleFormat out_fmt, enum AVSampleFormat in_fmt, int channels, int sample_rate, int apply_map) { AudioConvert *ac;int in_planar, out_planar;ac=av_mallocz(sizeof(*ac));if(!ac) return NULL;ac->avr=avr;ac->out_fmt=out_fmt;ac->in_fmt=in_fmt;ac->channels=channels;ac->apply_map=apply_map;if(avr->dither_method !=AV_RESAMPLE_DITHER_NONE &&av_get_packed_sample_fmt(out_fmt)==AV_SAMPLE_FMT_S16 &&av_get_bytes_per_sample(in_fmt) > 2) { ac->dc=ff_dither_alloc(avr, out_fmt, in_fmt, channels, sample_rate, apply_map);if(!ac->dc) { av_free(ac);return NULL;} return ac;} in_planar=ff_sample_fmt_is_planar(in_fmt, channels);out_planar=ff_sample_fmt_is_planar(out_fmt, channels);if(in_planar==out_planar) { ac->func_type=CONV_FUNC_TYPE_FLAT;ac->planes=in_planar ? ac->channels :1;} else if(in_planar) ac->func_type=CONV_FUNC_TYPE_INTERLEAVE;else ac->func_type=CONV_FUNC_TYPE_DEINTERLEAVE;set_generic_function(ac);if(ARCH_AARCH64) ff_audio_convert_init_aarch64(ac);if(ARCH_ARM) ff_audio_convert_init_arm(ac);if(ARCH_X86) ff_audio_convert_init_x86(ac);return ac;} int ff_audio_convert(AudioConvert *ac, AudioData *out, AudioData *in) { int use_generic=1;int len=in->nb_samples;int p;if(ac->dc) { av_log(ac->avr, AV_LOG_TRACE, "%d samples - audio_convert: %s to %s (dithered)\", len, av_get_sample_fmt_name(ac->in_fmt), av_get_sample_fmt_name(ac->out_fmt));return ff_convert_dither(ac-> in
int av_hwdevice_ctx_init(AVBufferRef *ref)
Finalize the device context before use.
Definition: hwcontext.c:196
int ff_hwframe_map_create(AVBufferRef *hwframe_ref, AVFrame *dst, const AVFrame *src, void(*unmap)(AVHWFramesContext *ctx, HWMapDescriptor *hwmap), void *priv)
Definition: hwcontext.c:689
static int qsv_device_create(AVHWDeviceContext *ctx, const char *device, AVDictionary *opts, int flags)
static int qsv_init_internal_session(AVHWFramesContext *ctx, mfxSession *session, int upload)
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:123
HW acceleration through QSV, data[3] contains a pointer to the mfxFrameSurface1 structure.
Definition: pixfmt.h:218
static mfxIMPL choose_implementation(const char *device)
refcounted data buffer API
enum AVPixelFormat * valid_hw_formats
A list of possible values for format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE.
Definition: hwcontext.h:437
mfxExtOpaqueSurfaceAlloc opaque_alloc
Definition: hwcontext_qsv.c:70
HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer. ...
Definition: pixfmt.h:133
AVHWFramesInternal * internal
Private data used internally by libavutil.
Definition: hwcontext.h:133
static AVBufferRef * qsv_pool_alloc(void *opaque, int size)
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:232
void * user_opaque
Arbitrary user data, to be used e.g.
Definition: hwcontext.h:108
static int qsv_device_derive_from_child(AVHWDeviceContext *ctx, mfxIMPL implementation, AVHWDeviceContext *child_device_ctx, int flags)
A reference to a data buffer.
Definition: buffer.h:81
static void qsv_device_free(AVHWDeviceContext *ctx)
common internal and external API header
int av_hwframe_map(AVFrame *dst, const AVFrame *src, int flags)
Map a hardware frame.
Definition: hwcontext.c:741
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
Definition: hwcontext.c:243
static int qsv_device_derive(AVHWDeviceContext *ctx, AVHWDeviceContext *child_device_ctx, int flags)
AVBufferRef * av_buffer_ref(AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:93
static int qsv_device_init(AVHWDeviceContext *ctx)
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:71
enum AVPixelFormat child_pix_fmt
Definition: hwcontext_qsv.c:54
IDirect3DSurface9 ** surfaces
The surface pool.
AVHWFrameTransferDirection
Definition: hwcontext.h:394
pixel format definitions
AVBufferPool * pool
A pool from which the frames are allocated by av_hwframe_get_buffer().
Definition: hwcontext.h:189
AVHWDeviceType
Definition: hwcontext.h:27
This struct is allocated as AVHWDeviceContext.hwctx.
char * value
Definition: dict.h:87
static uint32_t qsv_fourcc_from_pix_fmt(enum AVPixelFormat pix_fmt)
Definition: hwcontext_qsv.c:97
enum AVPixelFormat * valid_sw_formats
A list of possible values for sw_format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE.
Definition: hwcontext.h:444
VAAPI connection details.
static int qsv_frames_derive_from(AVHWFramesContext *dst_ctx, AVHWFramesContext *src_ctx, int flags)
void(* free)(struct AVHWDeviceContext *ctx)
This field may be set by the caller before calling av_hwdevice_ctx_init().
Definition: hwcontext.h:103
int height
Definition: frame.h:276
FILE * out
Definition: movenc.c:54
#define av_freep(p)
VASurfaceID * surface_ids
The surfaces IDs of all surfaces in the pool after creation.
An API-specific header for AV_HWDEVICE_TYPE_QSV.
AVBufferRef * child_device_ctx
Definition: hwcontext_qsv.c:44
AVBufferRef * child_frames_ref
Definition: hwcontext_qsv.c:61
AVBufferRef * av_buffer_pool_get(AVBufferPool *pool)
Allocate a new AVBuffer, reusing an old buffer from the pool when available.
Definition: buffer.c:334
#define av_malloc_array(a, b)
static const struct @256 supported_handle_types[]
formats
Definition: signature.h:48
AVHWDeviceInternal * internal
Private data used internally by libavutil.
Definition: hwcontext.h:70
int depth
Number of bits in the component.
Definition: pixdesc.h:58
static void qsv_pool_release_dummy(void *opaque, uint8_t *data)
static int qsv_map_to(AVHWFramesContext *dst_ctx, AVFrame *dst, const AVFrame *src, int flags)
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:221
AVPixelFormat
Pixel format.
Definition: pixfmt.h:60
enum AVPixelFormat pix_fmt
Definition: hwcontext_qsv.c:77
mfxSession session_upload
Definition: hwcontext_qsv.c:59
void * av_mallocz_array(size_t nmemb, size_t size)
Allocate a memory block for an array with av_mallocz().
Definition: mem.c:191