FFmpeg  4.0
qsvdec.c
Go to the documentation of this file.
1 /*
2  * Intel MediaSDK QSV codec-independent code
3  *
4  * copyright (c) 2013 Luca Barbato
5  * copyright (c) 2015 Anton Khirnov <anton@khirnov.net>
6  *
7  * This file is part of FFmpeg.
8  *
9  * FFmpeg is free software; you can redistribute it and/or
10  * modify it under the terms of the GNU Lesser General Public
11  * License as published by the Free Software Foundation; either
12  * version 2.1 of the License, or (at your option) any later version.
13  *
14  * FFmpeg is distributed in the hope that it will be useful,
15  * but WITHOUT ANY WARRANTY; without even the implied warranty of
16  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17  * Lesser General Public License for more details.
18  *
19  * You should have received a copy of the GNU Lesser General Public
20  * License along with FFmpeg; if not, write to the Free Software
21  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
22  */
23 
24 #include <string.h>
25 #include <sys/types.h>
26 
27 #include <mfx/mfxvideo.h>
28 
29 #include "libavutil/common.h"
30 #include "libavutil/hwcontext.h"
32 #include "libavutil/mem.h"
33 #include "libavutil/log.h"
34 #include "libavutil/pixdesc.h"
35 #include "libavutil/pixfmt.h"
36 #include "libavutil/time.h"
37 
38 #include "avcodec.h"
39 #include "internal.h"
40 #include "qsv.h"
41 #include "qsv_internal.h"
42 #include "qsvdec.h"
43 
45  &(const AVCodecHWConfigInternal) {
46  .public = {
50  .device_type = AV_HWDEVICE_TYPE_QSV,
51  },
52  .hwaccel = NULL,
53  },
54  NULL
55 };
56 
57 static int qsv_init_session(AVCodecContext *avctx, QSVContext *q, mfxSession session,
58  AVBufferRef *hw_frames_ref, AVBufferRef *hw_device_ref)
59 {
60  int ret;
61 
62  if (session) {
63  q->session = session;
64  } else if (hw_frames_ref) {
65  if (q->internal_session) {
66  MFXClose(q->internal_session);
68  }
70 
71  q->frames_ctx.hw_frames_ctx = av_buffer_ref(hw_frames_ref);
72  if (!q->frames_ctx.hw_frames_ctx)
73  return AVERROR(ENOMEM);
74 
76  &q->frames_ctx, q->load_plugins,
77  q->iopattern == MFX_IOPATTERN_OUT_OPAQUE_MEMORY);
78  if (ret < 0) {
80  return ret;
81  }
82 
83  q->session = q->internal_session;
84  } else if (hw_device_ref) {
85  if (q->internal_session) {
86  MFXClose(q->internal_session);
88  }
89 
91  hw_device_ref, q->load_plugins);
92  if (ret < 0)
93  return ret;
94 
95  q->session = q->internal_session;
96  } else {
97  if (!q->internal_session) {
99  q->load_plugins);
100  if (ret < 0)
101  return ret;
102  }
103 
104  q->session = q->internal_session;
105  }
106 
107  /* make sure the decoder is uninitialized */
108  MFXVideoDECODE_Close(q->session);
109 
110  return 0;
111 }
112 
114 {
115  const AVPixFmtDescriptor *desc;
116  mfxSession session = NULL;
117  int iopattern = 0;
118  mfxVideoParam param = { 0 };
119  int frame_width = avctx->coded_width;
120  int frame_height = avctx->coded_height;
121  int ret;
122 
123  desc = av_pix_fmt_desc_get(avctx->sw_pix_fmt);
124  if (!desc)
125  return AVERROR_BUG;
126 
127  if (!q->async_fifo) {
128  q->async_fifo = av_fifo_alloc((1 + q->async_depth) *
129  (sizeof(mfxSyncPoint*) + sizeof(QSVFrame*)));
130  if (!q->async_fifo)
131  return AVERROR(ENOMEM);
132  }
133 
134  if (avctx->pix_fmt == AV_PIX_FMT_QSV && avctx->hwaccel_context) {
135  AVQSVContext *user_ctx = avctx->hwaccel_context;
136  session = user_ctx->session;
137  iopattern = user_ctx->iopattern;
138  q->ext_buffers = user_ctx->ext_buffers;
139  q->nb_ext_buffers = user_ctx->nb_ext_buffers;
140  }
141 
142  if (avctx->hw_frames_ctx) {
143  AVHWFramesContext *frames_ctx = (AVHWFramesContext*)avctx->hw_frames_ctx->data;
144  AVQSVFramesContext *frames_hwctx = frames_ctx->hwctx;
145 
146  if (!iopattern) {
147  if (frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME)
148  iopattern = MFX_IOPATTERN_OUT_OPAQUE_MEMORY;
149  else if (frames_hwctx->frame_type & MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET)
150  iopattern = MFX_IOPATTERN_OUT_VIDEO_MEMORY;
151  }
152  }
153 
154  if (!iopattern)
155  iopattern = MFX_IOPATTERN_OUT_SYSTEM_MEMORY;
156  q->iopattern = iopattern;
157 
158  ret = qsv_init_session(avctx, q, session, avctx->hw_frames_ctx, avctx->hw_device_ctx);
159  if (ret < 0) {
160  av_log(avctx, AV_LOG_ERROR, "Error initializing an MFX session\n");
161  return ret;
162  }
163 
164  ret = ff_qsv_codec_id_to_mfx(avctx->codec_id);
165  if (ret < 0)
166  return ret;
167 
168  param.mfx.CodecId = ret;
169  param.mfx.CodecProfile = ff_qsv_profile_to_mfx(avctx->codec_id, avctx->profile);
170  param.mfx.CodecLevel = avctx->level == FF_LEVEL_UNKNOWN ? MFX_LEVEL_UNKNOWN : avctx->level;
171 
172  param.mfx.FrameInfo.BitDepthLuma = desc->comp[0].depth;
173  param.mfx.FrameInfo.BitDepthChroma = desc->comp[0].depth;
174  param.mfx.FrameInfo.Shift = desc->comp[0].depth > 8;
175  param.mfx.FrameInfo.FourCC = q->fourcc;
176  param.mfx.FrameInfo.Width = frame_width;
177  param.mfx.FrameInfo.Height = frame_height;
178  param.mfx.FrameInfo.ChromaFormat = MFX_CHROMAFORMAT_YUV420;
179 
180  switch (avctx->field_order) {
182  param.mfx.FrameInfo.PicStruct = MFX_PICSTRUCT_PROGRESSIVE;
183  break;
184  case AV_FIELD_TT:
185  param.mfx.FrameInfo.PicStruct = MFX_PICSTRUCT_FIELD_TFF;
186  break;
187  case AV_FIELD_BB:
188  param.mfx.FrameInfo.PicStruct = MFX_PICSTRUCT_FIELD_BFF;
189  break;
190  default:
191  param.mfx.FrameInfo.PicStruct = MFX_PICSTRUCT_UNKNOWN;
192  break;
193  }
194 
195  param.IOPattern = q->iopattern;
196  param.AsyncDepth = q->async_depth;
197  param.ExtParam = q->ext_buffers;
198  param.NumExtParam = q->nb_ext_buffers;
199 
200  ret = MFXVideoDECODE_Init(q->session, &param);
201  if (ret < 0)
202  return ff_qsv_print_error(avctx, ret,
203  "Error initializing the MFX video decoder");
204 
205  q->frame_info = param.mfx.FrameInfo;
206 
207  return 0;
208 }
209 
211 {
212  int ret;
213 
214  ret = ff_get_buffer(avctx, frame->frame, AV_GET_BUFFER_FLAG_REF);
215  if (ret < 0)
216  return ret;
217 
218  if (frame->frame->format == AV_PIX_FMT_QSV) {
219  frame->surface = *(mfxFrameSurface1*)frame->frame->data[3];
220  } else {
221  frame->surface.Info = q->frame_info;
222 
223  frame->surface.Data.PitchLow = frame->frame->linesize[0];
224  frame->surface.Data.Y = frame->frame->data[0];
225  frame->surface.Data.UV = frame->frame->data[1];
226  }
227 
228  if (q->frames_ctx.mids) {
229  ret = ff_qsv_find_surface_idx(&q->frames_ctx, frame);
230  if (ret < 0)
231  return ret;
232 
233  frame->surface.Data.MemId = &q->frames_ctx.mids[ret];
234  }
235  frame->surface.Data.ExtParam = &frame->ext_param;
236  frame->surface.Data.NumExtParam = 1;
237  frame->ext_param = (mfxExtBuffer*)&frame->dec_info;
238  frame->dec_info.Header.BufferId = MFX_EXTBUFF_DECODED_FRAME_INFO;
239  frame->dec_info.Header.BufferSz = sizeof(frame->dec_info);
240 
241  frame->used = 1;
242 
243  return 0;
244 }
245 
247 {
248  QSVFrame *cur = q->work_frames;
249  while (cur) {
250  if (cur->used && !cur->surface.Data.Locked && !cur->queued) {
251  cur->used = 0;
252  av_frame_unref(cur->frame);
253  }
254  cur = cur->next;
255  }
256 }
257 
258 static int get_surface(AVCodecContext *avctx, QSVContext *q, mfxFrameSurface1 **surf)
259 {
260  QSVFrame *frame, **last;
261  int ret;
262 
264 
265  frame = q->work_frames;
266  last = &q->work_frames;
267  while (frame) {
268  if (!frame->used) {
269  ret = alloc_frame(avctx, q, frame);
270  if (ret < 0)
271  return ret;
272  *surf = &frame->surface;
273  return 0;
274  }
275 
276  last = &frame->next;
277  frame = frame->next;
278  }
279 
280  frame = av_mallocz(sizeof(*frame));
281  if (!frame)
282  return AVERROR(ENOMEM);
283  frame->frame = av_frame_alloc();
284  if (!frame->frame) {
285  av_freep(&frame);
286  return AVERROR(ENOMEM);
287  }
288  *last = frame;
289 
290  ret = alloc_frame(avctx, q, frame);
291  if (ret < 0)
292  return ret;
293 
294  *surf = &frame->surface;
295 
296  return 0;
297 }
298 
299 static QSVFrame *find_frame(QSVContext *q, mfxFrameSurface1 *surf)
300 {
301  QSVFrame *cur = q->work_frames;
302  while (cur) {
303  if (surf == &cur->surface)
304  return cur;
305  cur = cur->next;
306  }
307  return NULL;
308 }
309 
310 static int qsv_decode(AVCodecContext *avctx, QSVContext *q,
311  AVFrame *frame, int *got_frame,
312  AVPacket *avpkt)
313 {
314  QSVFrame *out_frame;
315  mfxFrameSurface1 *insurf;
316  mfxFrameSurface1 *outsurf;
317  mfxSyncPoint *sync;
318  mfxBitstream bs = { { { 0 } } };
319  int ret;
320 
321  if (avpkt->size) {
322  bs.Data = avpkt->data;
323  bs.DataLength = avpkt->size;
324  bs.MaxLength = bs.DataLength;
325  bs.TimeStamp = avpkt->pts;
326  if (avctx->field_order == AV_FIELD_PROGRESSIVE)
327  bs.DataFlag |= MFX_BITSTREAM_COMPLETE_FRAME;
328  }
329 
330  sync = av_mallocz(sizeof(*sync));
331  if (!sync) {
332  av_freep(&sync);
333  return AVERROR(ENOMEM);
334  }
335 
336  do {
337  ret = get_surface(avctx, q, &insurf);
338  if (ret < 0) {
339  av_freep(&sync);
340  return ret;
341  }
342 
343  ret = MFXVideoDECODE_DecodeFrameAsync(q->session, avpkt->size ? &bs : NULL,
344  insurf, &outsurf, sync);
345  if (ret == MFX_WRN_DEVICE_BUSY)
346  av_usleep(500);
347 
348  } while (ret == MFX_WRN_DEVICE_BUSY || ret == MFX_ERR_MORE_SURFACE);
349 
350  if (ret != MFX_ERR_NONE &&
351  ret != MFX_ERR_MORE_DATA &&
352  ret != MFX_WRN_VIDEO_PARAM_CHANGED &&
353  ret != MFX_ERR_MORE_SURFACE) {
354  av_freep(&sync);
355  return ff_qsv_print_error(avctx, ret,
356  "Error during QSV decoding.");
357  }
358 
359  /* make sure we do not enter an infinite loop if the SDK
360  * did not consume any data and did not return anything */
361  if (!*sync && !bs.DataOffset) {
362  bs.DataOffset = avpkt->size;
363  ++q->zero_consume_run;
364  if (q->zero_consume_run > 1)
365  ff_qsv_print_warning(avctx, ret, "A decode call did not consume any data");
366  } else {
367  q->zero_consume_run = 0;
368  }
369 
370  if (*sync) {
371  QSVFrame *out_frame = find_frame(q, outsurf);
372 
373  if (!out_frame) {
374  av_log(avctx, AV_LOG_ERROR,
375  "The returned surface does not correspond to any frame\n");
376  av_freep(&sync);
377  return AVERROR_BUG;
378  }
379 
380  out_frame->queued = 1;
381  av_fifo_generic_write(q->async_fifo, &out_frame, sizeof(out_frame), NULL);
382  av_fifo_generic_write(q->async_fifo, &sync, sizeof(sync), NULL);
383  } else {
384  av_freep(&sync);
385  }
386 
387  if (!av_fifo_space(q->async_fifo) ||
388  (!avpkt->size && av_fifo_size(q->async_fifo))) {
389  AVFrame *src_frame;
390 
391  av_fifo_generic_read(q->async_fifo, &out_frame, sizeof(out_frame), NULL);
392  av_fifo_generic_read(q->async_fifo, &sync, sizeof(sync), NULL);
393  out_frame->queued = 0;
394 
395  if (avctx->pix_fmt != AV_PIX_FMT_QSV) {
396  do {
397  ret = MFXVideoCORE_SyncOperation(q->session, *sync, 1000);
398  } while (ret == MFX_WRN_IN_EXECUTION);
399  }
400 
401  av_freep(&sync);
402 
403  src_frame = out_frame->frame;
404 
405  ret = av_frame_ref(frame, src_frame);
406  if (ret < 0)
407  return ret;
408 
409  outsurf = &out_frame->surface;
410 
411 #if FF_API_PKT_PTS
413  frame->pkt_pts = outsurf->Data.TimeStamp;
415 #endif
416  frame->pts = outsurf->Data.TimeStamp;
417 
418  frame->repeat_pict =
419  outsurf->Info.PicStruct & MFX_PICSTRUCT_FRAME_TRIPLING ? 4 :
420  outsurf->Info.PicStruct & MFX_PICSTRUCT_FRAME_DOUBLING ? 2 :
421  outsurf->Info.PicStruct & MFX_PICSTRUCT_FIELD_REPEATED ? 1 : 0;
422  frame->top_field_first =
423  outsurf->Info.PicStruct & MFX_PICSTRUCT_FIELD_TFF;
424  frame->interlaced_frame =
425  !(outsurf->Info.PicStruct & MFX_PICSTRUCT_PROGRESSIVE);
426  frame->pict_type = ff_qsv_map_pictype(out_frame->dec_info.FrameType);
427  //Key frame is IDR frame is only suitable for H264. For HEVC, IRAPs are key frames.
428  if (avctx->codec_id == AV_CODEC_ID_H264)
429  frame->key_frame = !!(out_frame->dec_info.FrameType & MFX_FRAMETYPE_IDR);
430 
431  /* update the surface properties */
432  if (avctx->pix_fmt == AV_PIX_FMT_QSV)
433  ((mfxFrameSurface1*)frame->data[3])->Info = outsurf->Info;
434 
435  *got_frame = 1;
436  }
437 
438  return bs.DataOffset;
439 }
440 
442 {
443  QSVFrame *cur = q->work_frames;
444 
445  if (q->session)
446  MFXVideoDECODE_Close(q->session);
447 
448  while (q->async_fifo && av_fifo_size(q->async_fifo)) {
449  QSVFrame *out_frame;
450  mfxSyncPoint *sync;
451 
452  av_fifo_generic_read(q->async_fifo, &out_frame, sizeof(out_frame), NULL);
453  av_fifo_generic_read(q->async_fifo, &sync, sizeof(sync), NULL);
454 
455  av_freep(&sync);
456  }
457 
458  while (cur) {
459  q->work_frames = cur->next;
460  av_frame_free(&cur->frame);
461  av_freep(&cur);
462  cur = q->work_frames;
463  }
464 
466  q->async_fifo = NULL;
467 
470 
471  if (q->internal_session)
472  MFXClose(q->internal_session);
473 
476 
477  return 0;
478 }
479 
481  AVFrame *frame, int *got_frame, AVPacket *pkt)
482 {
483  uint8_t *dummy_data;
484  int dummy_size;
485  int ret;
486  const AVPixFmtDescriptor *desc;
487 
488  if (!q->avctx_internal) {
490  if (!q->avctx_internal)
491  return AVERROR(ENOMEM);
492 
493  q->parser = av_parser_init(avctx->codec_id);
494  if (!q->parser)
495  return AVERROR(ENOMEM);
496 
499  }
500 
501  if (!pkt->size)
502  return qsv_decode(avctx, q, frame, got_frame, pkt);
503 
504  /* we assume the packets are already split properly and want
505  * just the codec parameters here */
507  &dummy_data, &dummy_size,
508  pkt->data, pkt->size, pkt->pts, pkt->dts,
509  pkt->pos);
510 
511  avctx->field_order = q->parser->field_order;
512  /* TODO: flush delayed frames on reinit */
513  if (q->parser->format != q->orig_pix_fmt ||
514  FFALIGN(q->parser->coded_width, 16) != FFALIGN(avctx->coded_width, 16) ||
515  FFALIGN(q->parser->coded_height, 16) != FFALIGN(avctx->coded_height, 16)) {
518  AV_PIX_FMT_NONE };
519  enum AVPixelFormat qsv_format;
520 
521  qsv_format = ff_qsv_map_pixfmt(q->parser->format, &q->fourcc);
522  if (qsv_format < 0) {
523  av_log(avctx, AV_LOG_ERROR,
524  "Decoding pixel format '%s' is not supported\n",
526  ret = AVERROR(ENOSYS);
527  goto reinit_fail;
528  }
529 
530  q->orig_pix_fmt = q->parser->format;
531  avctx->pix_fmt = pix_fmts[1] = qsv_format;
532  avctx->width = q->parser->width;
533  avctx->height = q->parser->height;
534  avctx->coded_width = FFALIGN(q->parser->coded_width, 16);
535  avctx->coded_height = FFALIGN(q->parser->coded_height, 16);
536  avctx->level = q->avctx_internal->level;
537  avctx->profile = q->avctx_internal->profile;
538 
539  ret = ff_get_format(avctx, pix_fmts);
540  if (ret < 0)
541  goto reinit_fail;
542 
543  avctx->pix_fmt = ret;
544 
545  desc = av_pix_fmt_desc_get(avctx->pix_fmt);
546  if (!desc)
547  goto reinit_fail;
548 
549  if (desc->comp[0].depth > 8) {
550  avctx->coded_width = FFALIGN(q->parser->coded_width, 32);
551  avctx->coded_height = FFALIGN(q->parser->coded_height, 32);
552  }
553 
554  ret = qsv_decode_init(avctx, q);
555  if (ret < 0)
556  goto reinit_fail;
557  }
558 
559  return qsv_decode(avctx, q, frame, got_frame, pkt);
560 
561 reinit_fail:
562  q->orig_pix_fmt = q->parser->format = avctx->pix_fmt = AV_PIX_FMT_NONE;
563  return ret;
564 }
565 
567 {
569 }
AVCodecHWConfig public
This is the structure which will be returned to the user by avcodec_get_hw_config().
Definition: hwaccel.h:34
#define NULL
Definition: coverity.c:32
int ff_get_format(AVCodecContext *avctx, const enum AVPixelFormat *fmt)
Select the (possibly hardware accelerated) pixel format.
Definition: decode.c:1299
static int qsv_decode_init(AVCodecContext *avctx, QSVContext *q)
Definition: qsvdec.c:113
int iopattern
Definition: qsvdec.h:66
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it...
Definition: buffer.c:125
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2363
This structure describes decoded (raw) audio or video data.
Definition: frame.h:218
int coded_width
Bitstream width / height, may be different from width/height e.g.
Definition: avcodec.h:1705
Memory handling functions.
int64_t pos
byte position in stream, -1 if unknown
Definition: avcodec.h:1450
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_qsv.h:42
const char * desc
Definition: nvenc.c:65
int width
Dimensions of the decoded video intended for presentation.
Definition: avcodec.h:5179
enum AVFieldOrder field_order
Definition: avcodec.h:5156
int repeat_pict
When decoding, this signals how much the picture must be delayed.
Definition: frame.h:360
int size
Definition: avcodec.h:1431
int coded_width
Dimensions of the coded video.
Definition: avcodec.h:5185
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:1727
mfxExtBuffer ** ext_buffers
Definition: qsvdec.h:70
int av_usleep(unsigned usec)
Sleep for a period of time.
Definition: time.c:84
static AVPacket pkt
static int qsv_decode(AVCodecContext *avctx, QSVContext *q, AVFrame *frame, int *got_frame, AVPacket *avpkt)
Definition: qsvdec.c:310
int profile
profile
Definition: avcodec.h:2843
AVBufferRef * hw_frames_ctx
Definition: qsv_internal.h:68
int av_fifo_generic_write(AVFifoBuffer *f, void *src, int size, int(*func)(void *, void *, int))
Feed data from a user-supplied callback to an AVFifoBuffer.
Definition: fifo.c:122
int ff_qsv_print_error(void *log_ctx, mfxStatus err, const char *error_string)
Definition: qsv.c:140
#define FF_LEVEL_UNKNOWN
Definition: avcodec.h:2954
mfxExtDecodedFrameInfo dec_info
Definition: qsv_internal.h:58
AVComponentDescriptor comp[4]
Parameters that describe how pixels are packed.
Definition: pixdesc.h:117
uint8_t
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:189
void * hwaccel_context
Hardware accelerator context.
Definition: avcodec.h:2686
AVBufferRef * mids_buf
Definition: qsv_internal.h:75
int ff_qsv_decode_close(QSVContext *q)
Definition: qsvdec.c:441
int av_frame_ref(AVFrame *dst, const AVFrame *src)
Set up a new reference to the data described by the source frame.
Definition: frame.c:441
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:311
enum AVPixelFormat pix_fmt
A hardware pixel format which the codec can use.
Definition: avcodec.h:3386
int av_fifo_space(const AVFifoBuffer *f)
Return the amount of space in bytes in the AVFifoBuffer, that is the amount of data you can write int...
Definition: fifo.c:82
static AVFrame * frame
int queued
Definition: qsv_internal.h:61
uint8_t * data
Definition: avcodec.h:1430
void av_fifo_free(AVFifoBuffer *f)
Free an AVFifoBuffer.
Definition: fifo.c:55
int interlaced_frame
The content of the picture is interlaced.
Definition: frame.h:365
int ff_qsv_init_session_frames(AVCodecContext *avctx, mfxSession *psession, QSVFramesContext *qsv_frames_ctx, const char *load_plugins, int opaque)
Definition: qsv.c:637
#define FFALIGN(x, a)
Definition: macros.h:48
#define av_log(a,...)
static QSVFrame * find_frame(QSVContext *q, mfxFrameSurface1 *surf)
Definition: qsvdec.c:299
AVCodecParserContext * parser
Definition: qsvdec.h:58
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:176
int ff_qsv_find_surface_idx(QSVFramesContext *ctx, QSVFrame *frame)
Definition: qsv.c:187
#define AVERROR(e)
Definition: error.h:43
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:202
int av_fifo_generic_read(AVFifoBuffer *f, void *dest, int buf_size, void(*func)(void *, void *, int))
Feed data from an AVFifoBuffer to a user-supplied callback.
Definition: fifo.c:213
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:236
int iopattern
The IO pattern to use.
Definition: qsv.h:46
enum AVPictureType ff_qsv_map_pictype(int mfx_pic_type)
Definition: qsv.c:198
int nb_ext_buffers
Definition: qsv.h:52
void ff_qsv_decode_flush(AVCodecContext *avctx, QSVContext *q)
Definition: qsvdec.c:566
The codec supports this format by some ad-hoc method.
Definition: avcodec.h:3379
int ff_qsv_print_warning(void *log_ctx, mfxStatus err, const char *warning_string)
Definition: qsv.c:150
mfxExtBuffer * ext_param
Definition: qsv_internal.h:59
int ff_qsv_map_pixfmt(enum AVPixelFormat format, uint32_t *fourcc)
Definition: qsv.c:170
int ff_qsv_profile_to_mfx(enum AVCodecID codec_id, int profile)
Definition: qsv.c:69
int zero_consume_run
Definition: qsvdec.h:55
enum AVPictureType pict_type
Picture type of the frame.
Definition: frame.h:301
AVCodecContext * avcodec_alloc_context3(const AVCodec *codec)
Allocate an AVCodecContext and set its fields to default values.
Definition: options.c:156
int av_parser_parse2(AVCodecParserContext *s, AVCodecContext *avctx, uint8_t **poutbuf, int *poutbuf_size, const uint8_t *buf, int buf_size, int64_t pts, int64_t dts, int64_t pos)
Parse a packet.
Definition: parser.c:205
int width
picture width / height.
Definition: avcodec.h:1690
AVBufferRef * hw_frames_ctx
A reference to the AVHWFramesContext describing the input (for encoding) or output (decoding) frames...
Definition: avcodec.h:3197
int ff_qsv_codec_id_to_mfx(enum AVCodecID codec_id)
Definition: qsv.c:42
void av_parser_close(AVCodecParserContext *s)
Definition: parser.c:309
int level
level
Definition: avcodec.h:2953
mfxFrameSurface1 surface
Definition: qsv_internal.h:56
if(ret< 0)
Definition: vf_mcdeint.c:279
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames...
Definition: frame.h:291
enum AVPixelFormat orig_pix_fmt
Definition: qsvdec.h:60
AVCodecParserContext * av_parser_init(int codec_id)
Definition: parser.c:119
Libavcodec external API header.
void avcodec_free_context(AVCodecContext **avctx)
Free the codec context and everything associated with it and write NULL to the provided pointer...
Definition: options.c:171
enum AVCodecID codec_id
Definition: avcodec.h:1528
mfxSession internal_session
Definition: qsvdec.h:45
int av_fifo_size(const AVFifoBuffer *f)
Return the amount of data in bytes in the AVFifoBuffer, that is the amount of data you can read from ...
Definition: fifo.c:77
mfxExtBuffer ** ext_buffers
Extra buffers to pass to encoder or decoder initialization.
Definition: qsv.h:51
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:249
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:81
static int alloc_frame(AVCodecContext *avctx, QSVContext *q, QSVFrame *frame)
Definition: qsvdec.c:210
main external API structure.
Definition: avcodec.h:1518
uint8_t * data
The data buffer.
Definition: buffer.h:89
struct QSVFrame * next
Definition: qsv_internal.h:64
void * hwctx
The format-specific data, allocated and freed automatically along with this context.
Definition: hwcontext.h:161
int ff_get_buffer(AVCodecContext *avctx, AVFrame *frame, int flags)
Get a buffer for a frame.
Definition: decode.c:1891
static int get_surface(AVCodecContext *avctx, QSVContext *q, mfxFrameSurface1 **surf)
Definition: qsvdec.c:258
#define AVERROR_BUG
Internal bug, also see AVERROR_BUG2.
Definition: error.h:50
int coded_height
Definition: avcodec.h:1705
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:123
HW acceleration through QSV, data[3] contains a pointer to the mfxFrameSurface1 structure.
Definition: pixfmt.h:218
char * load_plugins
Definition: qsvdec.h:68
This struct is used for communicating QSV parameters between libavcodec and the caller.
Definition: qsv.h:36
static void qsv_clear_unused_frames(QSVContext *q)
Definition: qsvdec.c:246
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:266
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:551
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:232
attribute_deprecated int64_t pkt_pts
PTS copied from the AVPacket that was decoded to produce this frame.
Definition: frame.h:319
A reference to a data buffer.
Definition: buffer.h:81
#define FF_DISABLE_DEPRECATION_WARNINGS
Definition: internal.h:84
common internal api header.
common internal and external API header
static int qsv_init_session(AVCodecContext *avctx, QSVContext *q, mfxSession session, AVBufferRef *hw_frames_ref, AVBufferRef *hw_device_ref)
Definition: qsvdec.c:57
AVBufferRef * av_buffer_ref(AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:93
int ff_qsv_init_internal_session(AVCodecContext *avctx, mfxSession *session, const char *load_plugins)
Definition: qsv.c:273
#define PARSER_FLAG_COMPLETE_FRAMES
Definition: avcodec.h:5066
mfxFrameInfo frame_info
Definition: qsvdec.h:62
pixel format definitions
AVCodecContext * avctx_internal
Definition: qsvdec.h:59
#define FF_ENABLE_DEPRECATION_WARNINGS
Definition: internal.h:85
AVFifoBuffer * av_fifo_alloc(unsigned int size)
Initialize an AVFifoBuffer.
Definition: fifo.c:43
int top_field_first
If the content is interlaced, is top field displayed first.
Definition: frame.h:370
The codec supports this format via the hw_frames_ctx interface.
Definition: avcodec.h:3363
QSVFramesContext frames_ctx
Definition: qsvdec.h:47
int format
The format of the coded data, corresponds to enum AVPixelFormat for video and for enum AVSampleFormat...
Definition: avcodec.h:5196
mfxSession session
Definition: qsvdec.h:41
int key_frame
1 -> keyframe, 0-> not
Definition: frame.h:296
AVFifoBuffer * async_fifo
Definition: qsvdec.h:54
uint32_t fourcc
Definition: qsvdec.h:61
int64_t dts
Decompression timestamp in AVStream->time_base units; the time at which the packet is decompressed...
Definition: avcodec.h:1429
#define av_freep(p)
enum AVFieldOrder field_order
Field order.
Definition: avcodec.h:2170
An API-specific header for AV_HWDEVICE_TYPE_QSV.
AVFrame * frame
Definition: qsv_internal.h:55
const AVCodecHWConfigInternal * ff_qsv_hw_configs[]
Definition: qsvdec.c:44
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:2279
int async_depth
Definition: qsvdec.h:65
int depth
Number of bits in the component.
Definition: pixdesc.h:58
AVBufferRef * hw_device_ctx
A reference to the AVHWDeviceContext describing the device which will be used by a hardware encoder/d...
Definition: avcodec.h:3249
int ff_qsv_process_data(AVCodecContext *avctx, QSVContext *q, AVFrame *frame, int *got_frame, AVPacket *pkt)
Definition: qsvdec.c:480
QSVFrame * work_frames
a linked list of frames currently being used by QSV
Definition: qsvdec.h:52
AVPixelFormat
Pixel format.
Definition: pixfmt.h:60
This structure stores compressed data.
Definition: avcodec.h:1407
#define AV_GET_BUFFER_FLAG_REF
The decoder will keep a reference to the frame and may reuse it later.
Definition: avcodec.h:1135
mfxSession session
If non-NULL, the session to use for encoding or decoding.
Definition: qsv.h:41
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
Definition: avcodec.h:1423
enum AVPixelFormat sw_pix_fmt
Nominal unaccelerated pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:3047
int ff_qsv_init_session_device(AVCodecContext *avctx, mfxSession *psession, AVBufferRef *device_ref, const char *load_plugins)
Definition: qsv.c:567
int nb_ext_buffers
Definition: qsvdec.h:71