FFmpeg
qsv.c
Go to the documentation of this file.
1 /*
2  * Intel MediaSDK QSV encoder/decoder shared code
3  *
4  * This file is part of FFmpeg.
5  *
6  * FFmpeg is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Lesser General Public
8  * License as published by the Free Software Foundation; either
9  * version 2.1 of the License, or (at your option) any later version.
10  *
11  * FFmpeg is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14  * Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public
17  * License along with FFmpeg; if not, write to the Free Software
18  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19  */
20 
21 #include <mfxvideo.h>
22 #include <mfxjpeg.h>
23 #include <mfxvp8.h>
24 
25 #include <stdio.h>
26 #include <string.h>
27 
28 #include "libavutil/avstring.h"
29 #include "libavutil/common.h"
30 #include "libavutil/error.h"
31 #include "libavutil/hwcontext.h"
33 #include "libavutil/avassert.h"
34 #include "libavutil/mem.h"
35 
36 #include "avcodec.h"
37 #include "qsv_internal.h"
38 #include "refstruct.h"
39 
40 #define MFX_IMPL_VIA_MASK(impl) (0x0f00 & (impl))
41 #define QSV_HAVE_USER_PLUGIN !QSV_ONEVPL
42 #define QSV_HAVE_AUDIO !QSV_ONEVPL
43 
44 #if QSV_HAVE_USER_PLUGIN
45 #include <mfxplugin.h>
46 #endif
47 
48 #if QSV_ONEVPL
49 #include <mfxdispatcher.h>
50 #else
51 #define MFXUnload(a) do { } while(0)
52 #endif
53 
55 {
56  switch (codec_id) {
57  case AV_CODEC_ID_H264:
58  return MFX_CODEC_AVC;
59  case AV_CODEC_ID_HEVC:
60  return MFX_CODEC_HEVC;
63  return MFX_CODEC_MPEG2;
64  case AV_CODEC_ID_VC1:
65  return MFX_CODEC_VC1;
66  case AV_CODEC_ID_VP8:
67  return MFX_CODEC_VP8;
68  case AV_CODEC_ID_MJPEG:
69  return MFX_CODEC_JPEG;
70  case AV_CODEC_ID_VP9:
71  return MFX_CODEC_VP9;
72 #if QSV_VERSION_ATLEAST(1, 34)
73  case AV_CODEC_ID_AV1:
74  return MFX_CODEC_AV1;
75 #endif
76 
77  default:
78  break;
79  }
80 
81  return AVERROR(ENOSYS);
82 }
83 
84 static const struct {
86  const char *desc;
87 } qsv_iopatterns[] = {
88  {MFX_IOPATTERN_IN_VIDEO_MEMORY, "input is video memory surface" },
89  {MFX_IOPATTERN_IN_SYSTEM_MEMORY, "input is system memory surface" },
90 #if QSV_HAVE_OPAQUE
91  {MFX_IOPATTERN_IN_OPAQUE_MEMORY, "input is opaque memory surface" },
92 #endif
93  {MFX_IOPATTERN_OUT_VIDEO_MEMORY, "output is video memory surface" },
94  {MFX_IOPATTERN_OUT_SYSTEM_MEMORY, "output is system memory surface" },
95 #if QSV_HAVE_OPAQUE
96  {MFX_IOPATTERN_OUT_OPAQUE_MEMORY, "output is opaque memory surface" },
97 #endif
98 };
99 
100 int ff_qsv_print_iopattern(void *log_ctx, int mfx_iopattern,
101  const char *extra_string)
102 {
103  const char *desc = NULL;
104 
105  for (int i = 0; i < FF_ARRAY_ELEMS(qsv_iopatterns); i++) {
107  desc = qsv_iopatterns[i].desc;
108  }
109  }
110  if (!desc)
111  desc = "unknown iopattern";
112 
113  av_log(log_ctx, AV_LOG_VERBOSE, "%s: %s\n", extra_string, desc);
114  return 0;
115 }
116 
117 static const struct {
118  mfxStatus mfxerr;
119  int averr;
120  const char *desc;
121 } qsv_errors[] = {
122  { MFX_ERR_NONE, 0, "success" },
123  { MFX_ERR_UNKNOWN, AVERROR_UNKNOWN, "unknown error" },
124  { MFX_ERR_NULL_PTR, AVERROR(EINVAL), "NULL pointer" },
125  { MFX_ERR_UNSUPPORTED, AVERROR(ENOSYS), "unsupported" },
126  { MFX_ERR_MEMORY_ALLOC, AVERROR(ENOMEM), "failed to allocate memory" },
127  { MFX_ERR_NOT_ENOUGH_BUFFER, AVERROR(ENOMEM), "insufficient input/output buffer" },
128  { MFX_ERR_INVALID_HANDLE, AVERROR(EINVAL), "invalid handle" },
129  { MFX_ERR_LOCK_MEMORY, AVERROR(EIO), "failed to lock the memory block" },
130  { MFX_ERR_NOT_INITIALIZED, AVERROR_BUG, "not initialized" },
131  { MFX_ERR_NOT_FOUND, AVERROR(ENOSYS), "specified object was not found" },
132  /* the following 3 errors should always be handled explicitly, so those "mappings"
133  * are for completeness only */
134  { MFX_ERR_MORE_DATA, AVERROR_UNKNOWN, "expect more data at input" },
135  { MFX_ERR_MORE_SURFACE, AVERROR_UNKNOWN, "expect more surface at output" },
136  { MFX_ERR_MORE_BITSTREAM, AVERROR_UNKNOWN, "expect more bitstream at output" },
137  { MFX_ERR_ABORTED, AVERROR_UNKNOWN, "operation aborted" },
138  { MFX_ERR_DEVICE_LOST, AVERROR(EIO), "device lost" },
139  { MFX_ERR_INCOMPATIBLE_VIDEO_PARAM, AVERROR(EINVAL), "incompatible video parameters" },
140  { MFX_ERR_INVALID_VIDEO_PARAM, AVERROR(EINVAL), "invalid video parameters" },
141  { MFX_ERR_UNDEFINED_BEHAVIOR, AVERROR_BUG, "undefined behavior" },
142  { MFX_ERR_DEVICE_FAILED, AVERROR(EIO), "device failed" },
143 #if QSV_HAVE_AUDIO
144  { MFX_ERR_INCOMPATIBLE_AUDIO_PARAM, AVERROR(EINVAL), "incompatible audio parameters" },
145  { MFX_ERR_INVALID_AUDIO_PARAM, AVERROR(EINVAL), "invalid audio parameters" },
146 #endif
147  { MFX_ERR_GPU_HANG, AVERROR(EIO), "GPU Hang" },
148  { MFX_ERR_REALLOC_SURFACE, AVERROR_UNKNOWN, "need bigger surface for output" },
149 
150  { MFX_WRN_IN_EXECUTION, 0, "operation in execution" },
151  { MFX_WRN_DEVICE_BUSY, 0, "device busy" },
152  { MFX_WRN_VIDEO_PARAM_CHANGED, 0, "video parameters changed" },
153  { MFX_WRN_PARTIAL_ACCELERATION, 0, "partial acceleration" },
154  { MFX_WRN_INCOMPATIBLE_VIDEO_PARAM, 0, "incompatible video parameters" },
155  { MFX_WRN_VALUE_NOT_CHANGED, 0, "value is saturated" },
156  { MFX_WRN_OUT_OF_RANGE, 0, "value out of range" },
157  { MFX_WRN_FILTER_SKIPPED, 0, "filter skipped" },
158 #if QSV_HAVE_AUDIO
159  { MFX_WRN_INCOMPATIBLE_AUDIO_PARAM, 0, "incompatible audio parameters" },
160 #endif
161 
162 #if QSV_VERSION_ATLEAST(1, 31)
163  { MFX_ERR_NONE_PARTIAL_OUTPUT, 0, "partial output" },
164 #endif
165 };
166 
167 /**
168  * Convert a libmfx error code into an FFmpeg error code.
169  */
170 static int qsv_map_error(mfxStatus mfx_err, const char **desc)
171 {
172  int i;
173  for (i = 0; i < FF_ARRAY_ELEMS(qsv_errors); i++) {
174  if (qsv_errors[i].mfxerr == mfx_err) {
175  if (desc)
176  *desc = qsv_errors[i].desc;
177  return qsv_errors[i].averr;
178  }
179  }
180  if (desc)
181  *desc = "unknown error";
182  return AVERROR_UNKNOWN;
183 }
184 
185 int ff_qsv_print_error(void *log_ctx, mfxStatus err,
186  const char *error_string)
187 {
188  const char *desc;
189  int ret = qsv_map_error(err, &desc);
190  av_log(log_ctx, AV_LOG_ERROR, "%s: %s (%d)\n", error_string, desc, err);
191  return ret;
192 }
193 
194 int ff_qsv_print_warning(void *log_ctx, mfxStatus err,
195  const char *warning_string)
196 {
197  const char *desc;
198  int ret = qsv_map_error(err, &desc);
199  av_log(log_ctx, AV_LOG_WARNING, "%s: %s (%d)\n", warning_string, desc, err);
200  return ret;
201 }
202 
204 {
205  switch (fourcc) {
206  case MFX_FOURCC_NV12: return AV_PIX_FMT_NV12;
207  case MFX_FOURCC_P010: return AV_PIX_FMT_P010;
208  case MFX_FOURCC_P8: return AV_PIX_FMT_PAL8;
209  case MFX_FOURCC_A2RGB10: return AV_PIX_FMT_X2RGB10;
210  case MFX_FOURCC_RGB4: return AV_PIX_FMT_BGRA;
211  case MFX_FOURCC_YUY2: return AV_PIX_FMT_YUYV422;
212  case MFX_FOURCC_Y210: return AV_PIX_FMT_Y210;
213  case MFX_FOURCC_AYUV: return AV_PIX_FMT_VUYX;
214  case MFX_FOURCC_Y410: return AV_PIX_FMT_XV30;
215 #if QSV_VERSION_ATLEAST(1, 31)
216  case MFX_FOURCC_P016: return AV_PIX_FMT_P012;
217  case MFX_FOURCC_Y216: return AV_PIX_FMT_Y212;
218  case MFX_FOURCC_Y416: return AV_PIX_FMT_XV36;
219 #endif
220  }
221  return AV_PIX_FMT_NONE;
222 }
223 
224 int ff_qsv_map_pixfmt(enum AVPixelFormat format, uint32_t *fourcc, uint16_t *shift)
225 {
226  switch (format) {
227  case AV_PIX_FMT_YUV420P:
228  case AV_PIX_FMT_YUVJ420P:
229  case AV_PIX_FMT_NV12:
230  *fourcc = MFX_FOURCC_NV12;
231  *shift = 0;
232  return AV_PIX_FMT_NV12;
234  case AV_PIX_FMT_P010:
235  *fourcc = MFX_FOURCC_P010;
236  *shift = 1;
237  return AV_PIX_FMT_P010;
238  case AV_PIX_FMT_X2RGB10:
239  *fourcc = MFX_FOURCC_A2RGB10;
240  *shift = 1;
241  return AV_PIX_FMT_X2RGB10;
242  case AV_PIX_FMT_BGRA:
243  *fourcc = MFX_FOURCC_RGB4;
244  *shift = 0;
245  return AV_PIX_FMT_BGRA;
246  case AV_PIX_FMT_YUV422P:
247  case AV_PIX_FMT_YUYV422:
248  *fourcc = MFX_FOURCC_YUY2;
249  *shift = 0;
250  return AV_PIX_FMT_YUYV422;
252  case AV_PIX_FMT_Y210:
253  *fourcc = MFX_FOURCC_Y210;
254  *shift = 1;
255  return AV_PIX_FMT_Y210;
256  case AV_PIX_FMT_VUYX:
257  *fourcc = MFX_FOURCC_AYUV;
258  *shift = 0;
259  return AV_PIX_FMT_VUYX;
260  case AV_PIX_FMT_XV30:
261  *fourcc = MFX_FOURCC_Y410;
262  *shift = 0;
263  return AV_PIX_FMT_XV30;
264 #if QSV_VERSION_ATLEAST(1, 31)
265  case AV_PIX_FMT_P012:
266  *fourcc = MFX_FOURCC_P016;
267  *shift = 1;
268  return AV_PIX_FMT_P012;
269  case AV_PIX_FMT_Y212:
270  *fourcc = MFX_FOURCC_Y216;
271  *shift = 1;
272  return AV_PIX_FMT_Y212;
273  case AV_PIX_FMT_XV36:
274  *fourcc = MFX_FOURCC_Y416;
275  *shift = 1;
276  return AV_PIX_FMT_XV36;
277 #endif
278  default:
279  return AVERROR(ENOSYS);
280  }
281 }
282 
283 int ff_qsv_map_frame_to_surface(const AVFrame *frame, mfxFrameSurface1 *surface)
284 {
285  switch (frame->format) {
286  case AV_PIX_FMT_NV12:
287  case AV_PIX_FMT_P010:
288  case AV_PIX_FMT_P012:
289  surface->Data.Y = frame->data[0];
290  surface->Data.UV = frame->data[1];
291  /* The SDK checks Data.V when using system memory for VP9 encoding */
292  surface->Data.V = surface->Data.UV + 1;
293  break;
295  case AV_PIX_FMT_BGRA:
296  surface->Data.B = frame->data[0];
297  surface->Data.G = frame->data[0] + 1;
298  surface->Data.R = frame->data[0] + 2;
299  surface->Data.A = frame->data[0] + 3;
300  break;
301  case AV_PIX_FMT_YUYV422:
302  surface->Data.Y = frame->data[0];
303  surface->Data.U = frame->data[0] + 1;
304  surface->Data.V = frame->data[0] + 3;
305  break;
306 
307  case AV_PIX_FMT_Y210:
308  case AV_PIX_FMT_Y212:
309  surface->Data.Y16 = (mfxU16 *)frame->data[0];
310  surface->Data.U16 = (mfxU16 *)frame->data[0] + 1;
311  surface->Data.V16 = (mfxU16 *)frame->data[0] + 3;
312  break;
313 
314  case AV_PIX_FMT_VUYX:
315  surface->Data.V = frame->data[0];
316  surface->Data.U = frame->data[0] + 1;
317  surface->Data.Y = frame->data[0] + 2;
318  // Only set Data.A to a valid address, the SDK doesn't
319  // use the value from the frame.
320  surface->Data.A = frame->data[0] + 3;
321  break;
322 
323  case AV_PIX_FMT_XV30:
324  surface->Data.U = frame->data[0];
325  break;
326 
327  case AV_PIX_FMT_XV36:
328  surface->Data.U = frame->data[0];
329  surface->Data.Y = frame->data[0] + 2;
330  surface->Data.V = frame->data[0] + 4;
331  // Only set Data.A to a valid address, the SDK doesn't
332  // use the value from the frame.
333  surface->Data.A = frame->data[0] + 6;
334  break;
335 
336  default:
337  return AVERROR(ENOSYS);
338  }
339  surface->Data.PitchLow = frame->linesize[0];
340 
341  return 0;
342 }
343 
345 {
346  int i;
347  for (i = 0; i < ctx->nb_mids; i++) {
348  QSVMid *mid = &ctx->mids[i];
349  mfxHDLPair *pair = (mfxHDLPair*)frame->surface.Data.MemId;
350  if ((mid->handle_pair->first == pair->first) &&
351  (mid->handle_pair->second == pair->second))
352  return i;
353  }
354  return AVERROR_BUG;
355 }
356 
357 enum AVFieldOrder ff_qsv_map_picstruct(int mfx_pic_struct)
358 {
360  switch (mfx_pic_struct & 0xF) {
361  case MFX_PICSTRUCT_PROGRESSIVE:
363  break;
364  case MFX_PICSTRUCT_FIELD_TFF:
365  field = AV_FIELD_TT;
366  break;
367  case MFX_PICSTRUCT_FIELD_BFF:
368  field = AV_FIELD_BB;
369  break;
370  }
371 
372  return field;
373 }
374 
375 enum AVPictureType ff_qsv_map_pictype(int mfx_pic_type)
376 {
377  enum AVPictureType type;
378  switch (mfx_pic_type & 0x7) {
379  case MFX_FRAMETYPE_I:
380  if (mfx_pic_type & MFX_FRAMETYPE_S)
382  else
384  break;
385  case MFX_FRAMETYPE_B:
387  break;
388  case MFX_FRAMETYPE_P:
389  if (mfx_pic_type & MFX_FRAMETYPE_S)
391  else
393  break;
394  case MFX_FRAMETYPE_UNKNOWN:
396  break;
397  default:
398  av_assert0(0);
399  }
400 
401  return type;
402 }
403 
404 static int qsv_load_plugins(mfxSession session, const char *load_plugins,
405  void *logctx)
406 {
407 #if QSV_HAVE_USER_PLUGIN
408  if (!load_plugins || !*load_plugins)
409  return 0;
410 
411  while (*load_plugins) {
412  mfxPluginUID uid;
413  mfxStatus ret;
414  int i, err = 0;
415 
416  char *plugin = av_get_token(&load_plugins, ":");
417  if (!plugin)
418  return AVERROR(ENOMEM);
419  if (strlen(plugin) != 2 * sizeof(uid.Data)) {
420  av_log(logctx, AV_LOG_ERROR, "Invalid plugin UID length\n");
421  err = AVERROR(EINVAL);
422  goto load_plugin_fail;
423  }
424 
425  for (i = 0; i < sizeof(uid.Data); i++) {
426  err = sscanf(plugin + 2 * i, "%2hhx", uid.Data + i);
427  if (err != 1) {
428  av_log(logctx, AV_LOG_ERROR, "Invalid plugin UID\n");
429  err = AVERROR(EINVAL);
430  goto load_plugin_fail;
431  }
432 
433  }
434 
435  ret = MFXVideoUSER_Load(session, &uid, 1);
436  if (ret < 0) {
437  char errorbuf[128];
438  snprintf(errorbuf, sizeof(errorbuf),
439  "Could not load the requested plugin '%s'", plugin);
440  err = ff_qsv_print_error(logctx, ret, errorbuf);
441  goto load_plugin_fail;
442  }
443 
444  if (*load_plugins)
445  load_plugins++;
446 load_plugin_fail:
447  av_freep(&plugin);
448  if (err < 0)
449  return err;
450  }
451 #endif
452 
453  return 0;
454 
455 }
456 
457 //This code is only required for Linux since a display handle is required.
458 //For Windows the session is complete and ready to use.
459 
460 #ifdef AVCODEC_QSV_LINUX_SESSION_HANDLE
461 static int ff_qsv_set_display_handle(AVCodecContext *avctx, QSVSession *qs)
462 {
463  AVDictionary *child_device_opts = NULL;
464  AVVAAPIDeviceContext *hwctx;
465  int ret;
466 
467  av_dict_set(&child_device_opts, "kernel_driver", "i915", 0);
468  av_dict_set(&child_device_opts, "driver", "iHD", 0);
469 
470  ret = av_hwdevice_ctx_create(&qs->va_device_ref, AV_HWDEVICE_TYPE_VAAPI, NULL, child_device_opts, 0);
471  av_dict_free(&child_device_opts);
472  if (ret < 0) {
473  av_log(avctx, AV_LOG_ERROR, "Failed to create a VAAPI device.\n");
474  return ret;
475  } else {
476  qs->va_device_ctx = (AVHWDeviceContext*)qs->va_device_ref->data;
477  hwctx = qs->va_device_ctx->hwctx;
478 
479  ret = MFXVideoCORE_SetHandle(qs->session,
480  (mfxHandleType)MFX_HANDLE_VA_DISPLAY, (mfxHDL)hwctx->display);
481  if (ret < 0) {
482  return ff_qsv_print_error(avctx, ret, "Error during set display handle\n");
483  }
484  }
485 
486  return 0;
487 }
488 #endif //AVCODEC_QSV_LINUX_SESSION_HANDLE
489 
490 #if QSV_ONEVPL
491 static int qsv_new_mfx_loader(AVCodecContext *avctx,
492  mfxIMPL implementation,
493  mfxVersion *pver,
494  void **ploader)
495 {
496  mfxStatus sts;
497  mfxLoader loader = NULL;
498  mfxConfig cfg;
499  mfxVariant impl_value;
500 
501  loader = MFXLoad();
502  if (!loader) {
503  av_log(avctx, AV_LOG_ERROR, "Error creating a MFX loader\n");
504  goto fail;
505  }
506 
507  /* Create configurations for implementation */
508  cfg = MFXCreateConfig(loader);
509  if (!cfg) {
510  av_log(avctx, AV_LOG_ERROR, "Error creating a MFX configurations\n");
511  goto fail;
512  }
513 
514  impl_value.Type = MFX_VARIANT_TYPE_U32;
515  impl_value.Data.U32 = (implementation == MFX_IMPL_SOFTWARE) ?
516  MFX_IMPL_TYPE_SOFTWARE : MFX_IMPL_TYPE_HARDWARE;
517  sts = MFXSetConfigFilterProperty(cfg,
518  (const mfxU8 *)"mfxImplDescription.Impl", impl_value);
519  if (sts != MFX_ERR_NONE) {
520  av_log(avctx, AV_LOG_ERROR, "Error adding a MFX configuration "
521  "property: %d\n", sts);
522  goto fail;
523  }
524 
525  impl_value.Type = MFX_VARIANT_TYPE_U32;
526  impl_value.Data.U32 = pver->Version;
527  sts = MFXSetConfigFilterProperty(cfg,
528  (const mfxU8 *)"mfxImplDescription.ApiVersion.Version",
529  impl_value);
530  if (sts != MFX_ERR_NONE) {
531  av_log(avctx, AV_LOG_ERROR, "Error adding a MFX configuration "
532  "property: %d\n", sts);
533  goto fail;
534  }
535 
536  *ploader = loader;
537 
538  return 0;
539 
540 fail:
541  if (loader)
542  MFXUnload(loader);
543 
544  *ploader = NULL;
545  return AVERROR_UNKNOWN;
546 }
547 
548 static int qsv_create_mfx_session_from_loader(void *ctx, mfxLoader loader, mfxSession *psession)
549 {
550  mfxStatus sts;
551  mfxSession session = NULL;
552  uint32_t impl_idx = 0;
553 
554  while (1) {
555  /* Enumerate all implementations */
556  mfxImplDescription *impl_desc;
557 
558  sts = MFXEnumImplementations(loader, impl_idx,
559  MFX_IMPLCAPS_IMPLDESCSTRUCTURE,
560  (mfxHDL *)&impl_desc);
561  /* Failed to find an available implementation */
562  if (sts == MFX_ERR_NOT_FOUND)
563  break;
564  else if (sts != MFX_ERR_NONE) {
565  impl_idx++;
566  continue;
567  }
568 
569  sts = MFXCreateSession(loader, impl_idx, &session);
570  MFXDispReleaseImplDescription(loader, impl_desc);
571  if (sts == MFX_ERR_NONE)
572  break;
573 
574  impl_idx++;
575  }
576 
577  if (sts != MFX_ERR_NONE) {
578  av_log(ctx, AV_LOG_ERROR, "Error creating a MFX session: %d.\n", sts);
579  goto fail;
580  }
581 
582  *psession = session;
583 
584  return 0;
585 
586 fail:
587  if (session)
588  MFXClose(session);
589 
590  *psession = NULL;
591  return AVERROR_UNKNOWN;
592 }
593 
594 static int qsv_create_mfx_session(AVCodecContext *avctx,
595  mfxIMPL implementation,
596  mfxVersion *pver,
597  int gpu_copy,
598  mfxSession *psession,
599  void **ploader)
600 {
601  mfxLoader loader = NULL;
602 
603  /* Don't create a new MFX loader if the input loader is valid */
604  if (*ploader == NULL) {
605  av_log(avctx, AV_LOG_VERBOSE,
606  "Use Intel(R) oneVPL to create MFX session, the required "
607  "implementation version is %d.%d\n",
608  pver->Major, pver->Minor);
609 
610  if (qsv_new_mfx_loader(avctx, implementation, pver, (void **)&loader))
611  goto fail;
612 
613  av_assert0(loader);
614  } else {
615  av_log(avctx, AV_LOG_VERBOSE,
616  "Use Intel(R) oneVPL to create MFX session with the specified MFX loader\n");
617 
618  loader = *ploader;
619  }
620 
621  if (qsv_create_mfx_session_from_loader(avctx, loader, psession))
622  goto fail;
623 
624  if (!*ploader)
625  *ploader = loader;
626 
627  return 0;
628 
629 fail:
630  if (!*ploader && loader)
631  MFXUnload(loader);
632 
633  return AVERROR_UNKNOWN;
634 }
635 
636 #else
637 
639  mfxIMPL implementation,
640  mfxVersion *pver,
641  int gpu_copy,
642  mfxSession *psession,
643  void **ploader)
644 {
645  mfxInitParam init_par = { MFX_IMPL_AUTO_ANY };
646  mfxSession session = NULL;
647  mfxStatus sts;
648 
649  av_log(avctx, AV_LOG_VERBOSE,
650  "Use Intel(R) Media SDK to create MFX session, the required "
651  "implementation version is %d.%d\n",
652  pver->Major, pver->Minor);
653 
654  *psession = NULL;
655  *ploader = NULL;
656 
657  init_par.GPUCopy = gpu_copy;
658  init_par.Implementation = implementation;
659  init_par.Version = *pver;
660  sts = MFXInitEx(init_par, &session);
661  if (sts < 0)
662  return ff_qsv_print_error(avctx, sts,
663  "Error initializing a MFX session");
664  else if (sts > 0) {
665  ff_qsv_print_warning(avctx, sts,
666  "Warning in MFX initialization");
667  return AVERROR_UNKNOWN;
668  }
669 
670  *psession = session;
671 
672  return 0;
673 }
674 
675 #endif
676 
678  const char *load_plugins, int gpu_copy)
679 {
680  mfxIMPL impls[] = {
681 #if CONFIG_D3D11VA
682  MFX_IMPL_AUTO_ANY | MFX_IMPL_VIA_D3D11,
683 #endif
684  MFX_IMPL_AUTO_ANY
685  };
686  mfxIMPL impl;
687  mfxVersion ver = { { QSV_VERSION_MINOR, QSV_VERSION_MAJOR } };
688 
689  const char *desc;
690  int ret;
691 
692  for (int i = 0; i < FF_ARRAY_ELEMS(impls); i++) {
693  ret = qsv_create_mfx_session(avctx, impls[i], &ver, gpu_copy, &qs->session,
694  &qs->loader);
695 
696  if (ret == 0)
697  break;
698 
699  if (i == FF_ARRAY_ELEMS(impls) - 1)
700  return ret;
701  else
702  av_log(avctx, AV_LOG_ERROR, "The current mfx implementation is not "
703  "supported, try next mfx implementation.\n");
704  }
705 
706 #ifdef AVCODEC_QSV_LINUX_SESSION_HANDLE
707  ret = ff_qsv_set_display_handle(avctx, qs);
708  if (ret < 0)
709  return ret;
710 #endif
711 
712  ret = qsv_load_plugins(qs->session, load_plugins, avctx);
713  if (ret < 0) {
714  av_log(avctx, AV_LOG_ERROR, "Error loading plugins\n");
715  return ret;
716  }
717 
718  ret = MFXQueryIMPL(qs->session, &impl);
719  if (ret != MFX_ERR_NONE)
720  return ff_qsv_print_error(avctx, ret,
721  "Error querying the session attributes");
722 
723  switch (MFX_IMPL_BASETYPE(impl)) {
724  case MFX_IMPL_SOFTWARE:
725  desc = "software";
726  break;
727  case MFX_IMPL_HARDWARE:
728  case MFX_IMPL_HARDWARE2:
729  case MFX_IMPL_HARDWARE3:
730  case MFX_IMPL_HARDWARE4:
731  desc = "hardware accelerated";
732  break;
733  default:
734  desc = "unknown";
735  }
736 
737  av_log(avctx, AV_LOG_VERBOSE,
738  "Initialized an internal MFX session using %s implementation\n",
739  desc);
740 
741  return 0;
742 }
743 
744 static void mids_buf_free(FFRefStructOpaque opaque, void *obj)
745 {
746  AVBufferRef *hw_frames_ref = opaque.nc;
747  av_buffer_unref(&hw_frames_ref);
748 }
749 
750 static QSVMid *qsv_create_mids(AVBufferRef *hw_frames_ref)
751 {
752  AVHWFramesContext *frames_ctx = (AVHWFramesContext*)hw_frames_ref->data;
753  AVQSVFramesContext *frames_hwctx = frames_ctx->hwctx;
754  int nb_surfaces = frames_hwctx->nb_surfaces;
755 
756  AVBufferRef *hw_frames_ref1;
757  QSVMid *mids;
758  int i;
759 
760  hw_frames_ref1 = av_buffer_ref(hw_frames_ref);
761  if (!hw_frames_ref1)
762  return NULL;
763 
764  mids = ff_refstruct_alloc_ext(nb_surfaces * sizeof(*mids), 0,
765  hw_frames_ref1, mids_buf_free);
766  if (!mids) {
767  av_buffer_unref(&hw_frames_ref1);
768  return NULL;
769  }
770 
771  for (i = 0; i < nb_surfaces; i++) {
772  QSVMid *mid = &mids[i];
773  mid->handle_pair = (mfxHDLPair*)frames_hwctx->surfaces[i].Data.MemId;
774  mid->hw_frames_ref = hw_frames_ref1;
775  }
776 
777  return mids;
778 }
779 
780 static int qsv_setup_mids(mfxFrameAllocResponse *resp, AVBufferRef *hw_frames_ref,
781  QSVMid *mids)
782 {
783  AVHWFramesContext *frames_ctx = (AVHWFramesContext*)hw_frames_ref->data;
784  AVQSVFramesContext *frames_hwctx = frames_ctx->hwctx;
785  int nb_surfaces = frames_hwctx->nb_surfaces;
786  int i;
787 
788  // the allocated size of the array is two larger than the number of
789  // surfaces, we store the references to the frames context and the
790  // QSVMid array there
791  resp->mids = av_calloc(nb_surfaces + 2, sizeof(*resp->mids));
792  if (!resp->mids)
793  return AVERROR(ENOMEM);
794 
795  for (i = 0; i < nb_surfaces; i++)
796  resp->mids[i] = &mids[i];
797  resp->NumFrameActual = nb_surfaces;
798 
799  resp->mids[resp->NumFrameActual] = (mfxMemId)av_buffer_ref(hw_frames_ref);
800  if (!resp->mids[resp->NumFrameActual]) {
801  av_freep(&resp->mids);
802  return AVERROR(ENOMEM);
803  }
804 
805  resp->mids[resp->NumFrameActual + 1] = ff_refstruct_ref(mids);
806 
807  return 0;
808 }
809 
810 static mfxStatus qsv_frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req,
811  mfxFrameAllocResponse *resp)
812 {
813  QSVFramesContext *ctx = pthis;
814  int ret;
815 
816  /* this should only be called from an encoder or decoder and
817  * only allocates video memory frames */
818  if (!(req->Type & (MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET |
819  MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET)) ||
820  !(req->Type & (MFX_MEMTYPE_FROM_DECODE | MFX_MEMTYPE_FROM_ENCODE)))
821  return MFX_ERR_UNSUPPORTED;
822 
823  if (req->Type & MFX_MEMTYPE_EXTERNAL_FRAME) {
824  /* external frames -- fill from the caller-supplied frames context */
825  AVHWFramesContext *frames_ctx = (AVHWFramesContext*)ctx->hw_frames_ctx->data;
826  AVQSVFramesContext *frames_hwctx = frames_ctx->hwctx;
827  mfxFrameInfo *i = &req->Info;
828  mfxFrameInfo *i1 = &frames_hwctx->surfaces[0].Info;
829 
830  if (i->Width > i1->Width || i->Height > i1->Height ||
831  i->FourCC != i1->FourCC || i->ChromaFormat != i1->ChromaFormat) {
832  av_log(ctx->logctx, AV_LOG_ERROR, "Mismatching surface properties in an "
833  "allocation request: %dx%d %d %d vs %dx%d %d %d\n",
834  i->Width, i->Height, i->FourCC, i->ChromaFormat,
835  i1->Width, i1->Height, i1->FourCC, i1->ChromaFormat);
836  return MFX_ERR_UNSUPPORTED;
837  }
838 
839  ret = qsv_setup_mids(resp, ctx->hw_frames_ctx, ctx->mids);
840  if (ret < 0) {
841  av_log(ctx->logctx, AV_LOG_ERROR,
842  "Error filling an external frame allocation request\n");
843  return MFX_ERR_MEMORY_ALLOC;
844  }
845  } else if (req->Type & MFX_MEMTYPE_INTERNAL_FRAME) {
846  /* internal frames -- allocate a new hw frames context */
847  AVHWFramesContext *ext_frames_ctx = (AVHWFramesContext*)ctx->hw_frames_ctx->data;
848  mfxFrameInfo *i = &req->Info;
849 
850  AVBufferRef *frames_ref;
851  QSVMid *mids;
852  AVHWFramesContext *frames_ctx;
853  AVQSVFramesContext *frames_hwctx;
854 
855  frames_ref = av_hwframe_ctx_alloc(ext_frames_ctx->device_ref);
856  if (!frames_ref)
857  return MFX_ERR_MEMORY_ALLOC;
858 
859  frames_ctx = (AVHWFramesContext*)frames_ref->data;
860  frames_hwctx = frames_ctx->hwctx;
861 
862  frames_ctx->format = AV_PIX_FMT_QSV;
863  frames_ctx->sw_format = ff_qsv_map_fourcc(i->FourCC);
864  frames_ctx->width = i->Width;
865  frames_ctx->height = i->Height;
866  frames_ctx->initial_pool_size = req->NumFrameSuggested;
867 
868  frames_hwctx->frame_type = req->Type;
869 
870  ret = av_hwframe_ctx_init(frames_ref);
871  if (ret < 0) {
872  av_log(ctx->logctx, AV_LOG_ERROR,
873  "Error initializing a frames context for an internal frame "
874  "allocation request\n");
875  av_buffer_unref(&frames_ref);
876  return MFX_ERR_MEMORY_ALLOC;
877  }
878 
879  mids = qsv_create_mids(frames_ref);
880  if (!mids) {
881  av_buffer_unref(&frames_ref);
882  return MFX_ERR_MEMORY_ALLOC;
883  }
884 
885  ret = qsv_setup_mids(resp, frames_ref, mids);
886  ff_refstruct_unref(&mids);
887  av_buffer_unref(&frames_ref);
888  if (ret < 0) {
889  av_log(ctx->logctx, AV_LOG_ERROR,
890  "Error filling an internal frame allocation request\n");
891  return MFX_ERR_MEMORY_ALLOC;
892  }
893  } else {
894  return MFX_ERR_UNSUPPORTED;
895  }
896 
897  return MFX_ERR_NONE;
898 }
899 
900 static mfxStatus qsv_frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
901 {
902  av_buffer_unref((AVBufferRef**)&resp->mids[resp->NumFrameActual]);
903  ff_refstruct_unref(&resp->mids[resp->NumFrameActual + 1]);
904  av_freep(&resp->mids);
905  return MFX_ERR_NONE;
906 }
907 
908 static mfxStatus qsv_frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
909 {
910  QSVMid *qsv_mid = mid;
911  AVHWFramesContext *hw_frames_ctx = (AVHWFramesContext*)qsv_mid->hw_frames_ref->data;
912  AVQSVFramesContext *hw_frames_hwctx = hw_frames_ctx->hwctx;
913  int ret;
914 
915  if (qsv_mid->locked_frame)
916  return MFX_ERR_UNDEFINED_BEHAVIOR;
917 
918  /* Allocate a system memory frame that will hold the mapped data. */
919  qsv_mid->locked_frame = av_frame_alloc();
920  if (!qsv_mid->locked_frame)
921  return MFX_ERR_MEMORY_ALLOC;
922  qsv_mid->locked_frame->format = hw_frames_ctx->sw_format;
923 
924  /* wrap the provided handle in a hwaccel AVFrame */
925  qsv_mid->hw_frame = av_frame_alloc();
926  if (!qsv_mid->hw_frame)
927  goto fail;
928 
929  qsv_mid->hw_frame->data[3] = (uint8_t*)&qsv_mid->surf;
930  qsv_mid->hw_frame->format = AV_PIX_FMT_QSV;
931 
932  // doesn't really matter what buffer is used here
933  qsv_mid->hw_frame->buf[0] = av_buffer_alloc(1);
934  if (!qsv_mid->hw_frame->buf[0])
935  goto fail;
936 
937  qsv_mid->hw_frame->width = hw_frames_ctx->width;
938  qsv_mid->hw_frame->height = hw_frames_ctx->height;
939 
940  qsv_mid->hw_frame->hw_frames_ctx = av_buffer_ref(qsv_mid->hw_frames_ref);
941  if (!qsv_mid->hw_frame->hw_frames_ctx)
942  goto fail;
943 
944  qsv_mid->surf.Info = hw_frames_hwctx->surfaces[0].Info;
945  qsv_mid->surf.Data.MemId = qsv_mid->handle_pair;
946 
947  /* map the data to the system memory */
948  ret = av_hwframe_map(qsv_mid->locked_frame, qsv_mid->hw_frame,
950  if (ret < 0)
951  goto fail;
952 
953  ptr->Pitch = qsv_mid->locked_frame->linesize[0];
954  ptr->Y = qsv_mid->locked_frame->data[0];
955  ptr->U = qsv_mid->locked_frame->data[1];
956  ptr->V = qsv_mid->locked_frame->data[1] + 1;
957 
958  return MFX_ERR_NONE;
959 fail:
960  av_frame_free(&qsv_mid->hw_frame);
961  av_frame_free(&qsv_mid->locked_frame);
962  return MFX_ERR_MEMORY_ALLOC;
963 }
964 
965 static mfxStatus qsv_frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
966 {
967  QSVMid *qsv_mid = mid;
968 
969  av_frame_free(&qsv_mid->locked_frame);
970  av_frame_free(&qsv_mid->hw_frame);
971 
972  return MFX_ERR_NONE;
973 }
974 
975 static mfxStatus qsv_frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
976 {
977  QSVMid *qsv_mid = (QSVMid*)mid;
978  mfxHDLPair *pair_dst = (mfxHDLPair*)hdl;
979  mfxHDLPair *pair_src = (mfxHDLPair*)qsv_mid->handle_pair;
980 
981  pair_dst->first = pair_src->first;
982 
983  if (pair_src->second != (mfxMemId)MFX_INFINITE)
984  pair_dst->second = pair_src->second;
985  return MFX_ERR_NONE;
986 }
987 
988 int ff_qsv_init_session_device(AVCodecContext *avctx, mfxSession *psession,
989  AVBufferRef *device_ref, const char *load_plugins,
990  int gpu_copy)
991 {
992  AVHWDeviceContext *device_ctx = (AVHWDeviceContext*)device_ref->data;
993  AVQSVDeviceContext *device_hwctx = device_ctx->hwctx;
994  mfxSession parent_session = device_hwctx->session;
995  void *loader = device_hwctx->loader;
996  mfxHDL handle = NULL;
997  int hw_handle_supported = 0;
998 
999  mfxSession session;
1000  mfxVersion ver;
1001  mfxIMPL impl;
1002  mfxHandleType handle_type;
1003  mfxStatus err;
1004  int ret;
1005 
1006  err = MFXQueryIMPL(parent_session, &impl);
1007  if (err == MFX_ERR_NONE)
1008  err = MFXQueryVersion(parent_session, &ver);
1009  if (err != MFX_ERR_NONE)
1010  return ff_qsv_print_error(avctx, err,
1011  "Error querying the session attributes");
1012 
1013  if (MFX_IMPL_VIA_VAAPI == MFX_IMPL_VIA_MASK(impl)) {
1014  handle_type = MFX_HANDLE_VA_DISPLAY;
1015  hw_handle_supported = 1;
1016  } else if (MFX_IMPL_VIA_D3D11 == MFX_IMPL_VIA_MASK(impl)) {
1017  handle_type = MFX_HANDLE_D3D11_DEVICE;
1018  hw_handle_supported = 1;
1019  } else if (MFX_IMPL_VIA_D3D9 == MFX_IMPL_VIA_MASK(impl)) {
1020  handle_type = MFX_HANDLE_D3D9_DEVICE_MANAGER;
1021  hw_handle_supported = 1;
1022  }
1023 
1024  if (hw_handle_supported) {
1025  err = MFXVideoCORE_GetHandle(parent_session, handle_type, &handle);
1026  if (err != MFX_ERR_NONE) {
1027  return ff_qsv_print_error(avctx, err,
1028  "Error getting handle session");
1029  }
1030  }
1031  if (!handle) {
1032  av_log(avctx, AV_LOG_VERBOSE, "No supported hw handle could be retrieved "
1033  "from the session\n");
1034  }
1035 
1036  ret = qsv_create_mfx_session(avctx, impl, &ver, gpu_copy, &session,
1037  &loader);
1038  if (ret)
1039  return ret;
1040 
1041  if (handle) {
1042  err = MFXVideoCORE_SetHandle(session, handle_type, handle);
1043  if (err != MFX_ERR_NONE)
1044  return ff_qsv_print_error(avctx, err,
1045  "Error setting a HW handle");
1046  }
1047 
1048  if (QSV_RUNTIME_VERSION_ATLEAST(ver, 1, 25)) {
1049  err = MFXJoinSession(parent_session, session);
1050  if (err != MFX_ERR_NONE)
1051  return ff_qsv_print_error(avctx, err,
1052  "Error joining session");
1053  }
1054 
1055  ret = qsv_load_plugins(session, load_plugins, avctx);
1056  if (ret < 0) {
1057  av_log(avctx, AV_LOG_ERROR, "Error loading plugins\n");
1058  return ret;
1059  }
1060 
1061  *psession = session;
1062  return 0;
1063 }
1064 
1065 int ff_qsv_init_session_frames(AVCodecContext *avctx, mfxSession *psession,
1066  QSVFramesContext *qsv_frames_ctx,
1067  const char *load_plugins, int opaque, int gpu_copy)
1068 {
1069  mfxFrameAllocator frame_allocator = {
1070  .pthis = qsv_frames_ctx,
1071  .Alloc = qsv_frame_alloc,
1072  .Lock = qsv_frame_lock,
1073  .Unlock = qsv_frame_unlock,
1074  .GetHDL = qsv_frame_get_hdl,
1075  .Free = qsv_frame_free,
1076  };
1077 
1078  AVHWFramesContext *frames_ctx = (AVHWFramesContext*)qsv_frames_ctx->hw_frames_ctx->data;
1079  AVQSVFramesContext *frames_hwctx = frames_ctx->hwctx;
1080 
1081  mfxSession session;
1082  mfxStatus err;
1083 
1084  int ret;
1085 
1086  ret = ff_qsv_init_session_device(avctx, &session,
1087  frames_ctx->device_ref, load_plugins, gpu_copy);
1088  if (ret < 0)
1089  return ret;
1090 
1091  if (!opaque) {
1092  qsv_frames_ctx->logctx = avctx;
1093 
1094  /* allocate the memory ids for the external frames */
1095  ff_refstruct_unref(&qsv_frames_ctx->mids);
1096  qsv_frames_ctx->mids = qsv_create_mids(qsv_frames_ctx->hw_frames_ctx);
1097  if (!qsv_frames_ctx->mids)
1098  return AVERROR(ENOMEM);
1099  qsv_frames_ctx->nb_mids = frames_hwctx->nb_surfaces;
1100 
1101  err = MFXVideoCORE_SetFrameAllocator(session, &frame_allocator);
1102  if (err != MFX_ERR_NONE)
1103  return ff_qsv_print_error(avctx, err,
1104  "Error setting a frame allocator");
1105  }
1106 
1107  *psession = session;
1108  return 0;
1109 }
1110 
1112 {
1113  if (qs->session) {
1114  MFXClose(qs->session);
1115  qs->session = NULL;
1116  }
1117 
1118  if (qs->loader) {
1119  MFXUnload(qs->loader);
1120  qs->loader = NULL;
1121  }
1122 
1123 #ifdef AVCODEC_QSV_LINUX_SESSION_HANDLE
1124  av_buffer_unref(&qs->va_device_ref);
1125 #endif
1126  return 0;
1127 }
1128 
1130  mfxExtBuffer * param)
1131 {
1132  int i;
1133 
1134  for (i = 0; i < frame->num_ext_params; i++) {
1135  mfxExtBuffer *ext_buffer = frame->ext_param[i];
1136 
1137  if (ext_buffer->BufferId == param->BufferId) {
1138  av_log(avctx, AV_LOG_WARNING, "A buffer with the same type has been "
1139  "added\n");
1140  return;
1141  }
1142  }
1143 
1144  if (frame->num_ext_params < QSV_MAX_FRAME_EXT_PARAMS) {
1145  frame->ext_param[frame->num_ext_params] = param;
1146  frame->num_ext_params++;
1147  frame->surface.Data.NumExtParam = frame->num_ext_params;
1148  } else {
1149  av_log(avctx, AV_LOG_WARNING, "Ignore this extra buffer because do not "
1150  "have enough space\n");
1151  }
1152 
1153 
1154 }
QSV_MAX_FRAME_EXT_PARAMS
#define QSV_MAX_FRAME_EXT_PARAMS
Definition: qsv_internal.h:57
AVHWDeviceContext::hwctx
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:85
qsv_frame_unlock
static mfxStatus qsv_frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
Definition: qsv.c:965
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:186
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:71
QSVFramesContext::nb_mids
int nb_mids
Definition: qsv_internal.h:125
ff_refstruct_ref
void * ff_refstruct_ref(void *obj)
Create a new reference to an object managed via this API, i.e.
Definition: refstruct.c:140
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
uid
UID uid
Definition: mxfenc.c:2422
QSV_VERSION_MAJOR
#define QSV_VERSION_MAJOR
Definition: qsv_internal.h:47
QSVFramesContext::hw_frames_ctx
AVBufferRef * hw_frames_ctx
Definition: qsv_internal.h:115
AVFieldOrder
AVFieldOrder
Definition: defs.h:198
averr
int averr
Definition: qsv.c:119
QSVMid::locked_frame
AVFrame * locked_frame
Definition: qsv_internal.h:74
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:90
AV_FIELD_PROGRESSIVE
@ AV_FIELD_PROGRESSIVE
Definition: defs.h:200
AVPictureType
AVPictureType
Definition: avutil.h:277
ff_refstruct_alloc_ext
static void * ff_refstruct_alloc_ext(size_t size, unsigned flags, void *opaque, void(*free_cb)(FFRefStructOpaque opaque, void *obj))
A wrapper around ff_refstruct_alloc_ext_c() for the common case of a non-const qualified opaque.
Definition: refstruct.h:94
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:160
av_hwframe_ctx_init
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
Definition: hwcontext.c:322
ff_qsv_close_internal_session
int ff_qsv_close_internal_session(QSVSession *qs)
Definition: qsv.c:1111
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:374
ff_qsv_map_pictype
enum AVPictureType ff_qsv_map_pictype(int mfx_pic_type)
Definition: qsv.c:375
AVFrame::width
int width
Definition: frame.h:446
AVQSVDeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_qsv.h:35
av_hwframe_ctx_alloc
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
Definition: hwcontext.c:248
av_hwframe_map
int av_hwframe_map(AVFrame *dst, const AVFrame *src, int flags)
Map a hardware frame.
Definition: hwcontext.c:778
ff_qsv_find_surface_idx
int ff_qsv_find_surface_idx(QSVFramesContext *ctx, QSVFrame *frame)
Definition: qsv.c:344
AV_PIX_FMT_YUV420P10
#define AV_PIX_FMT_YUV420P10
Definition: pixfmt.h:478
AV_PIX_FMT_XV30
#define AV_PIX_FMT_XV30
Definition: pixfmt.h:534
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:196
AVVAAPIDeviceContext::display
VADisplay display
The VADisplay handle, to be filled by the user.
Definition: hwcontext_vaapi.h:72
AV_PIX_FMT_BGRA
@ AV_PIX_FMT_BGRA
packed BGRA 8:8:8:8, 32bpp, BGRABGRA...
Definition: pixfmt.h:102
AVDictionary
Definition: dict.c:34
FFRefStructOpaque
RefStruct is an API for creating reference-counted objects with minimal overhead.
Definition: refstruct.h:58
av_buffer_ref
AVBufferRef * av_buffer_ref(const AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:103
AVERROR_UNKNOWN
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:73
AVHWFramesContext::width
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:217
AVFrame::buf
AVBufferRef * buf[AV_NUM_DATA_POINTERS]
AVBuffer references backing the data for this frame.
Definition: frame.h:587
mids_buf_free
static void mids_buf_free(FFRefStructOpaque opaque, void *obj)
Definition: qsv.c:744
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:395
ff_qsv_init_session_device
int ff_qsv_init_session_device(AVCodecContext *avctx, mfxSession *psession, AVBufferRef *device_ref, const char *load_plugins, int gpu_copy)
Definition: qsv.c:988
AV_FIELD_TT
@ AV_FIELD_TT
Top coded_first, top displayed first.
Definition: defs.h:201
ff_qsv_map_frame_to_surface
int ff_qsv_map_frame_to_surface(const AVFrame *frame, mfxFrameSurface1 *surface)
Definition: qsv.c:283
AV_HWFRAME_MAP_DIRECT
@ AV_HWFRAME_MAP_DIRECT
The mapping must be direct.
Definition: hwcontext.h:528
fail
#define fail()
Definition: checkasm.h:180
MFXUnload
#define MFXUnload(a)
Definition: qsv.c:51
qsv_load_plugins
static int qsv_load_plugins(mfxSession session, const char *load_plugins, void *logctx)
Definition: qsv.c:404
type
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf type
Definition: writing_filters.txt:86
refstruct.h
desc
const char * desc
Definition: qsv.c:86
qsv_internal.h
AVHWDeviceContext
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:60
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:148
AV_PIX_FMT_Y210
#define AV_PIX_FMT_Y210
Definition: pixfmt.h:532
qsv_setup_mids
static int qsv_setup_mids(mfxFrameAllocResponse *resp, AVBufferRef *hw_frames_ref, QSVMid *mids)
Definition: qsv.c:780
avassert.h
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
ff_qsv_print_warning
int ff_qsv_print_warning(void *log_ctx, mfxStatus err, const char *warning_string)
Definition: qsv.c:194
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
AVHWFramesContext::height
int height
Definition: hwcontext.h:217
AV_FIELD_UNKNOWN
@ AV_FIELD_UNKNOWN
Definition: defs.h:199
qsv_iopatterns
static const struct @164 qsv_iopatterns[]
QSV_VERSION_MINOR
#define QSV_VERSION_MINOR
Definition: qsv_internal.h:48
QSVMid::hw_frame
AVFrame * hw_frame
Definition: qsv_internal.h:75
format
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample format(the sample packing is implied by the sample format) and sample rate. The lists are not just lists
AV_CODEC_ID_VP9
@ AV_CODEC_ID_VP9
Definition: codec_id.h:220
QSV_RUNTIME_VERSION_ATLEAST
#define QSV_RUNTIME_VERSION_ATLEAST(MFX_VERSION, MAJOR, MINOR)
Definition: qsv_internal.h:63
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:40
ctx
AVFormatContext * ctx
Definition: movenc.c:49
field
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this field
Definition: writing_filters.txt:78
codec_id
enum AVCodecID codec_id
Definition: vaapi_decode.c:387
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:73
AV_CODEC_ID_H264
@ AV_CODEC_ID_H264
Definition: codec_id.h:79
if
if(ret)
Definition: filter_design.txt:179
ff_qsv_init_session_frames
int ff_qsv_init_session_frames(AVCodecContext *avctx, mfxSession *psession, QSVFramesContext *qsv_frames_ctx, const char *load_plugins, int opaque, int gpu_copy)
Definition: qsv.c:1065
QSVFrame
Definition: qsv_internal.h:79
NULL
#define NULL
Definition: coverity.c:32
AVHWFramesContext::sw_format
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:210
av_buffer_unref
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:139
AV_CODEC_ID_AV1
@ AV_CODEC_ID_AV1
Definition: codec_id.h:280
AV_PIX_FMT_YUYV422
@ AV_PIX_FMT_YUYV422
packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
Definition: pixfmt.h:74
AVHWFramesContext::device_ref
AVBufferRef * device_ref
A reference to the parent AVHWDeviceContext.
Definition: hwcontext.h:126
AV_PIX_FMT_YUVJ420P
@ AV_PIX_FMT_YUVJ420P
planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P and setting col...
Definition: pixfmt.h:85
AV_PICTURE_TYPE_SI
@ AV_PICTURE_TYPE_SI
Switching Intra.
Definition: avutil.h:283
AV_PICTURE_TYPE_I
@ AV_PICTURE_TYPE_I
Intra.
Definition: avutil.h:279
ff_qsv_print_iopattern
int ff_qsv_print_iopattern(void *log_ctx, int mfx_iopattern, const char *extra_string)
Definition: qsv.c:100
AV_PIX_FMT_YUV422P10
#define AV_PIX_FMT_YUV422P10
Definition: pixfmt.h:479
AV_PIX_FMT_QSV
@ AV_PIX_FMT_QSV
HW acceleration through QSV, data[3] contains a pointer to the mfxFrameSurface1 structure.
Definition: pixfmt.h:247
AV_PICTURE_TYPE_SP
@ AV_PICTURE_TYPE_SP
Switching Predicted.
Definition: avutil.h:284
AV_CODEC_ID_MPEG1VIDEO
@ AV_CODEC_ID_MPEG1VIDEO
Definition: codec_id.h:53
error.h
AVCodecID
AVCodecID
Identify the syntax and semantics of the bitstream.
Definition: codec_id.h:49
AV_PIX_FMT_X2RGB10LE
@ AV_PIX_FMT_X2RGB10LE
packed RGB 10:10:10, 30bpp, (msb)2X 10R 10G 10B(lsb), little-endian, X=unused/undefined
Definition: pixfmt.h:384
mfxerr
mfxStatus mfxerr
Definition: qsv.c:118
AV_PIX_FMT_P012
#define AV_PIX_FMT_P012
Definition: pixfmt.h:529
shift
static int shift(int a, int b)
Definition: bonk.c:261
QSVMid::hw_frames_ref
AVBufferRef * hw_frames_ref
Definition: qsv_internal.h:71
FFRefStructOpaque::nc
void * nc
Definition: refstruct.h:59
AVFrame::format
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
Definition: frame.h:461
AV_PICTURE_TYPE_NONE
@ AV_PICTURE_TYPE_NONE
Undefined.
Definition: avutil.h:278
AV_PIX_FMT_Y212
#define AV_PIX_FMT_Y212
Definition: pixfmt.h:533
mfx_iopattern
int mfx_iopattern
Definition: qsv.c:85
QSVMid::handle_pair
mfxHDLPair * handle_pair
Definition: qsv_internal.h:72
av_dict_free
void av_dict_free(AVDictionary **pm)
Free all the memory allocated for an AVDictionary struct and all keys and values.
Definition: dict.c:223
av_buffer_alloc
AVBufferRef * av_buffer_alloc(size_t size)
Allocate an AVBuffer of the given size using av_malloc().
Definition: buffer.c:77
QSVFramesContext::mids
QSVMid * mids
The memory ids for the external frames.
Definition: qsv_internal.h:124
AV_HWDEVICE_TYPE_VAAPI
@ AV_HWDEVICE_TYPE_VAAPI
Definition: hwcontext.h:31
AV_CODEC_ID_MJPEG
@ AV_CODEC_ID_MJPEG
Definition: codec_id.h:59
hwcontext_qsv.h
ff_qsv_map_pixfmt
int ff_qsv_map_pixfmt(enum AVPixelFormat format, uint32_t *fourcc, uint16_t *shift)
Definition: qsv.c:224
ff_qsv_map_picstruct
enum AVFieldOrder ff_qsv_map_picstruct(int mfx_pic_struct)
Definition: qsv.c:357
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:256
AV_FIELD_BB
@ AV_FIELD_BB
Bottom coded first, bottom displayed first.
Definition: defs.h:202
common.h
AV_CODEC_ID_HEVC
@ AV_CODEC_ID_HEVC
Definition: codec_id.h:226
QSVMid::surf
mfxFrameSurface1 surf
Definition: qsv_internal.h:76
AV_PIX_FMT_X2RGB10
#define AV_PIX_FMT_X2RGB10
Definition: pixfmt.h:536
AV_CODEC_ID_VC1
@ AV_CODEC_ID_VC1
Definition: codec_id.h:122
qsv_map_error
static int qsv_map_error(mfxStatus mfx_err, const char **desc)
Convert a libmfx error code into an FFmpeg error code.
Definition: qsv.c:170
av_calloc
void * av_calloc(size_t nmemb, size_t size)
Definition: mem.c:264
avcodec.h
AVHWFramesContext
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:115
qsv_frame_free
static mfxStatus qsv_frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
Definition: qsv.c:900
AV_PIX_FMT_PAL8
@ AV_PIX_FMT_PAL8
8 bits with AV_PIX_FMT_RGB32 palette
Definition: pixfmt.h:84
ret
ret
Definition: filter_design.txt:187
AV_PIX_FMT_NV12
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:96
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
QSVSession
Definition: qsv_internal.h:105
AVHWFramesContext::hwctx
void * hwctx
The format-specific data, allocated and freed automatically along with this context.
Definition: hwcontext.h:150
ff_qsv_codec_id_to_mfx
int ff_qsv_codec_id_to_mfx(enum AVCodecID codec_id)
Definition: qsv.c:54
qsv_frame_get_hdl
static mfxStatus qsv_frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
Definition: qsv.c:975
av_hwdevice_ctx_create
int av_hwdevice_ctx_create(AVBufferRef **pdevice_ref, enum AVHWDeviceType type, const char *device, AVDictionary *opts, int flags)
Open a device of the specified type and create an AVHWDeviceContext for it.
Definition: hwcontext.c:600
AVFrame::hw_frames_ctx
AVBufferRef * hw_frames_ctx
For hwaccel-format frames, this should be a reference to the AVHWFramesContext describing the frame.
Definition: frame.h:725
AVCodecContext
main external API structure.
Definition: avcodec.h:445
AVFrame::height
int height
Definition: frame.h:446
qsv_frame_alloc
static mfxStatus qsv_frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req, mfxFrameAllocResponse *resp)
Definition: qsv.c:810
AV_PICTURE_TYPE_B
@ AV_PICTURE_TYPE_B
Bi-dir predicted.
Definition: avutil.h:281
qsv_errors
static const struct @165 qsv_errors[]
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:72
av_get_token
char * av_get_token(const char **buf, const char *term)
Unescape the given string until a non escaped terminating char, and return the token corresponding to...
Definition: avstring.c:143
qsv_create_mfx_session
static int qsv_create_mfx_session(AVCodecContext *avctx, mfxIMPL implementation, mfxVersion *pver, int gpu_copy, mfxSession *psession, void **ploader)
Definition: qsv.c:638
MFX_IMPL_VIA_MASK
#define MFX_IMPL_VIA_MASK(impl)
Definition: qsv.c:40
QSVSession::session
mfxSession session
Definition: qsv_internal.h:106
ff_qsv_map_fourcc
enum AVPixelFormat ff_qsv_map_fourcc(uint32_t fourcc)
Definition: qsv.c:203
AV_PIX_FMT_P010
#define AV_PIX_FMT_P010
Definition: pixfmt.h:528
AVQSVFramesContext
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_qsv.h:53
AV_PICTURE_TYPE_P
@ AV_PICTURE_TYPE_P
Predicted.
Definition: avutil.h:280
AV_PIX_FMT_YUV422P
@ AV_PIX_FMT_YUV422P
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:77
mem.h
AVBufferRef
A reference to a data buffer.
Definition: buffer.h:82
QSVFramesContext::logctx
void * logctx
Definition: qsv_internal.h:116
QSVFramesContext
Definition: qsv_internal.h:114
qsv_create_mids
static QSVMid * qsv_create_mids(AVBufferRef *hw_frames_ref)
Definition: qsv.c:750
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
av_dict_set
int av_dict_set(AVDictionary **pm, const char *key, const char *value, int flags)
Set the given entry in *pm, overwriting an existing entry.
Definition: dict.c:88
AV_PIX_FMT_XV36
#define AV_PIX_FMT_XV36
Definition: pixfmt.h:535
AV_CODEC_ID_VP8
@ AV_CODEC_ID_VP8
Definition: codec_id.h:192
hwcontext.h
AVERROR_BUG
#define AVERROR_BUG
Internal bug, also see AVERROR_BUG2.
Definition: error.h:52
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:419
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
AVVAAPIDeviceContext
VAAPI connection details.
Definition: hwcontext_vaapi.h:68
avstring.h
AV_PIX_FMT_VUYX
@ AV_PIX_FMT_VUYX
packed VUYX 4:4:4, 32bpp, Variant of VUYA where alpha channel is left undefined
Definition: pixfmt.h:406
QSVSession::loader
void * loader
Definition: qsv_internal.h:111
ff_qsv_frame_add_ext_param
void ff_qsv_frame_add_ext_param(AVCodecContext *avctx, QSVFrame *frame, mfxExtBuffer *param)
Definition: qsv.c:1129
AV_CODEC_ID_MPEG2VIDEO
@ AV_CODEC_ID_MPEG2VIDEO
preferred ID for MPEG-1/2 video decoding
Definition: codec_id.h:54
QSVMid
Definition: qsv_internal.h:70
ff_qsv_print_error
int ff_qsv_print_error(void *log_ctx, mfxStatus err, const char *error_string)
Definition: qsv.c:185
fourcc
uint32_t fourcc
Definition: vaapi_decode.c:240
ff_qsv_init_internal_session
int ff_qsv_init_internal_session(AVCodecContext *avctx, QSVSession *qs, const char *load_plugins, int gpu_copy)
Definition: qsv.c:677
snprintf
#define snprintf
Definition: snprintf.h:34
qsv_frame_lock
static mfxStatus qsv_frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
Definition: qsv.c:908
ff_refstruct_unref
void ff_refstruct_unref(void *objp)
Decrement the reference count of the underlying object and automatically free the object if there are...
Definition: refstruct.c:120