FFmpeg
vsrc_ddagrab.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 #include "config.h"
20 
21 #if !defined(_WIN32_WINNT) || _WIN32_WINNT < 0x0A00
22 #undef _WIN32_WINNT
23 #define _WIN32_WINNT 0x0A00
24 #endif
25 
26 #include <windows.h>
27 
28 #define COBJMACROS
29 
30 #include <initguid.h>
31 #include <d3d11.h>
32 #include <dxgi1_2.h>
33 #if HAVE_IDXGIOUTPUT5
34 #include <dxgi1_5.h>
35 #endif
36 
37 #include "libavutil/mem.h"
38 #include "libavutil/opt.h"
39 #include "libavutil/time.h"
40 #include "libavutil/avstring.h"
41 #include "libavutil/avassert.h"
42 #include "libavutil/hwcontext.h"
44 #include "compat/w32dlfcn.h"
45 #include "avfilter.h"
46 #include "internal.h"
47 #include "video.h"
48 
49 #include "vsrc_ddagrab_shaders.h"
50 
51 // avutil/time.h takes and returns time in microseconds
52 #define TIMER_RES 1000000
53 #define TIMER_RES64 INT64_C(1000000)
54 
55 typedef struct DdagrabContext {
56  const AVClass *class;
57 
61 
65 
66  DXGI_OUTPUT_DESC output_desc;
67  IDXGIOutputDuplication *dxgi_outdupl;
69 
71  ID3D11Texture2D *mouse_texture;
72  ID3D11ShaderResourceView* mouse_resource_view;
73  ID3D11Texture2D *mouse_xor_texture;
74  ID3D11ShaderResourceView* mouse_xor_resource_view;
75 
77  int64_t time_frame;
78  int64_t time_timeout;
79  int64_t first_pts;
80 
81  DXGI_FORMAT raw_format;
82  int raw_width;
84 
85  ID3D11Texture2D *probed_texture;
86  ID3D11Texture2D *buffer_texture;
87 
88  ID3D11VertexShader *vertex_shader;
89  ID3D11InputLayout *input_layout;
90  ID3D11PixelShader *pixel_shader;
91  ID3D11Buffer *const_buffer;
92  ID3D11SamplerState *sampler_state;
93  ID3D11BlendState *blend_state;
94  ID3D11BlendState *blend_state_xor;
95 
99  int width;
100  int height;
101  int offset_x;
102  int offset_y;
103  int out_fmt;
108 
109 #define OFFSET(x) offsetof(DdagrabContext, x)
110 #define FLAGS AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM
111 static const AVOption ddagrab_options[] = {
112  { "output_idx", "dda output index to capture", OFFSET(output_idx), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, INT_MAX, FLAGS },
113  { "draw_mouse", "draw the mouse pointer", OFFSET(draw_mouse), AV_OPT_TYPE_BOOL, { .i64 = 1 }, 0, 1, FLAGS },
114  { "framerate", "set video frame rate", OFFSET(framerate), AV_OPT_TYPE_VIDEO_RATE, { .str = "30" }, 0, INT_MAX, FLAGS },
115  { "video_size", "set video frame size", OFFSET(width), AV_OPT_TYPE_IMAGE_SIZE, { .str = NULL }, 0, 0, FLAGS },
116  { "offset_x", "capture area x offset", OFFSET(offset_x), AV_OPT_TYPE_INT, { .i64 = 0 }, INT_MIN, INT_MAX, FLAGS },
117  { "offset_y", "capture area y offset", OFFSET(offset_y), AV_OPT_TYPE_INT, { .i64 = 0 }, INT_MIN, INT_MAX, FLAGS },
118  { "output_fmt", "desired output format", OFFSET(out_fmt), AV_OPT_TYPE_INT, { .i64 = DXGI_FORMAT_B8G8R8A8_UNORM }, 0, INT_MAX, FLAGS, .unit = "output_fmt" },
119  { "auto", "let dda pick its preferred format", 0, AV_OPT_TYPE_CONST, { .i64 = 0 }, 0, INT_MAX, FLAGS, .unit = "output_fmt" },
120  { "8bit", "only output default 8 Bit format", 0, AV_OPT_TYPE_CONST, { .i64 = DXGI_FORMAT_B8G8R8A8_UNORM }, 0, INT_MAX, FLAGS, .unit = "output_fmt" },
121  { "bgra", "only output 8 Bit BGRA", 0, AV_OPT_TYPE_CONST, { .i64 = DXGI_FORMAT_B8G8R8A8_UNORM }, 0, INT_MAX, FLAGS, .unit = "output_fmt" },
122  { "10bit", "only output default 10 Bit format", 0, AV_OPT_TYPE_CONST, { .i64 = DXGI_FORMAT_R10G10B10A2_UNORM }, 0, INT_MAX, FLAGS, .unit = "output_fmt" },
123  { "x2bgr10", "only output 10 Bit X2BGR10", 0, AV_OPT_TYPE_CONST, { .i64 = DXGI_FORMAT_R10G10B10A2_UNORM }, 0, INT_MAX, FLAGS, .unit = "output_fmt" },
124  { "16bit", "only output default 16 Bit format", 0, AV_OPT_TYPE_CONST, { .i64 = DXGI_FORMAT_R16G16B16A16_FLOAT },0, INT_MAX, FLAGS, .unit = "output_fmt" },
125  { "rgbaf16", "only output 16 Bit RGBAF16", 0, AV_OPT_TYPE_CONST, { .i64 = DXGI_FORMAT_R16G16B16A16_FLOAT },0, INT_MAX, FLAGS, .unit = "output_fmt" },
126  { "allow_fallback", "don't error on fallback to default 8 Bit format",
127  OFFSET(allow_fallback), AV_OPT_TYPE_BOOL, { .i64 = 0 }, 0, 1, FLAGS },
128  { "force_fmt", "exclude BGRA from format list (experimental, discouraged by Microsoft)",
129  OFFSET(force_fmt), AV_OPT_TYPE_BOOL, { .i64 = 0 }, 0, 1, FLAGS },
130  { "dup_frames", "duplicate frames to maintain framerate",
131  OFFSET(dup_frames), AV_OPT_TYPE_BOOL, { .i64 = 1 }, 0, 1, FLAGS },
132  { NULL }
133 };
134 
135 AVFILTER_DEFINE_CLASS(ddagrab);
136 
137 static inline void release_resource(void *resource)
138 {
139  IUnknown **resp = (IUnknown**)resource;
140  if (*resp) {
141  IUnknown_Release(*resp);
142  *resp = NULL;
143  }
144 }
145 
147 {
148  DdagrabContext *dda = avctx->priv;
149 
157 
160 
166 
167  av_frame_free(&dda->last_frame);
170 }
171 
173 {
174  DdagrabContext *dda = avctx->priv;
175  IDXGIDevice *dxgi_device = NULL;
176  IDXGIAdapter *dxgi_adapter = NULL;
177  IDXGIOutput *dxgi_output = NULL;
178  IDXGIOutput1 *dxgi_output1 = NULL;
179 #if HAVE_IDXGIOUTPUT5 && HAVE_DPI_AWARENESS_CONTEXT
180  IDXGIOutput5 *dxgi_output5 = NULL;
181 
182  typedef DPI_AWARENESS_CONTEXT (*set_thread_dpi_t)(DPI_AWARENESS_CONTEXT);
183  set_thread_dpi_t set_thread_dpi;
184  HMODULE user32_module;
185 #endif
186  int w, h;
187  HRESULT hr;
188 
189  hr = ID3D11Device_QueryInterface(dda->device_hwctx->device, &IID_IDXGIDevice, (void**)&dxgi_device);
190  if (FAILED(hr)) {
191  av_log(avctx, AV_LOG_ERROR, "Failed querying IDXGIDevice\n");
192  return AVERROR_EXTERNAL;
193  }
194 
195  hr = IDXGIDevice_GetParent(dxgi_device, &IID_IDXGIAdapter, (void**)&dxgi_adapter);
196  IDXGIDevice_Release(dxgi_device);
197  dxgi_device = NULL;
198  if (FAILED(hr)) {
199  av_log(avctx, AV_LOG_ERROR, "Failed getting parent IDXGIAdapter\n");
200  return AVERROR_EXTERNAL;
201  }
202 
203  hr = IDXGIAdapter_EnumOutputs(dxgi_adapter, dda->output_idx, &dxgi_output);
204  IDXGIAdapter_Release(dxgi_adapter);
205  dxgi_adapter = NULL;
206  if (FAILED(hr)) {
207  av_log(avctx, AV_LOG_ERROR, "Failed to enumerate DXGI output %d\n", dda->output_idx);
208  return AVERROR_EXTERNAL;
209  }
210 
211  hr = IDXGIOutput_GetDesc(dxgi_output, &dda->output_desc);
212  if (FAILED(hr)) {
213  IDXGIOutput_Release(dxgi_output);
214  av_log(avctx, AV_LOG_ERROR, "Failed getting output description\n");
215  return AVERROR_EXTERNAL;
216  }
217 
218 #if HAVE_IDXGIOUTPUT5 && HAVE_DPI_AWARENESS_CONTEXT
219  user32_module = dlopen("user32.dll", 0);
220  if (!user32_module) {
221  av_log(avctx, AV_LOG_ERROR, "Failed loading user32.dll\n");
222  return AVERROR_EXTERNAL;
223  }
224 
225  set_thread_dpi = (set_thread_dpi_t)dlsym(user32_module, "SetThreadDpiAwarenessContext");
226 
227  if (set_thread_dpi)
228  hr = IDXGIOutput_QueryInterface(dxgi_output, &IID_IDXGIOutput5, (void**)&dxgi_output5);
229 
230  if (set_thread_dpi && SUCCEEDED(hr)) {
231  DPI_AWARENESS_CONTEXT prev_dpi_ctx;
232  DXGI_FORMAT formats[] = {
234  DXGI_FORMAT_R10G10B10A2_UNORM,
236  };
237  int nb_formats = FF_ARRAY_ELEMS(formats);
238 
239  if(dda->out_fmt == DXGI_FORMAT_B8G8R8A8_UNORM) {
241  nb_formats = 1;
242  } else if (dda->out_fmt) {
243  formats[0] = dda->out_fmt;
245  nb_formats = dda->force_fmt ? 1 : 2;
246  }
247 
248  IDXGIOutput_Release(dxgi_output);
249  dxgi_output = NULL;
250 
251  prev_dpi_ctx = set_thread_dpi(DPI_AWARENESS_CONTEXT_PER_MONITOR_AWARE_V2);
252  if (!prev_dpi_ctx)
253  av_log(avctx, AV_LOG_WARNING, "Failed enabling DPI awareness for DDA\n");
254 
255  hr = IDXGIOutput5_DuplicateOutput1(dxgi_output5,
256  (IUnknown*)dda->device_hwctx->device,
257  0,
258  nb_formats,
259  formats,
260  &dda->dxgi_outdupl);
261  IDXGIOutput5_Release(dxgi_output5);
262  dxgi_output5 = NULL;
263 
264  if (prev_dpi_ctx)
265  set_thread_dpi(prev_dpi_ctx);
266 
267  dlclose(user32_module);
268  user32_module = NULL;
269  set_thread_dpi = NULL;
270 
271  av_log(avctx, AV_LOG_DEBUG, "Using IDXGIOutput5 interface\n");
272  } else {
273  dlclose(user32_module);
274  user32_module = NULL;
275  set_thread_dpi = NULL;
276 
277  av_log(avctx, AV_LOG_DEBUG, "Falling back to IDXGIOutput1\n");
278 #else
279  {
280 #endif
281  if (dda->out_fmt && dda->out_fmt != DXGI_FORMAT_B8G8R8A8_UNORM && (!dda->allow_fallback || dda->force_fmt)) {
282  av_log(avctx, AV_LOG_ERROR, "Only 8 bit output supported with legacy API\n");
283  return AVERROR(ENOTSUP);
284  }
285 
286  hr = IDXGIOutput_QueryInterface(dxgi_output, &IID_IDXGIOutput1, (void**)&dxgi_output1);
287  IDXGIOutput_Release(dxgi_output);
288  dxgi_output = NULL;
289  if (FAILED(hr)) {
290  av_log(avctx, AV_LOG_ERROR, "Failed querying IDXGIOutput1\n");
291  return AVERROR_EXTERNAL;
292  }
293 
294  hr = IDXGIOutput1_DuplicateOutput(dxgi_output1,
295  (IUnknown*)dda->device_hwctx->device,
296  &dda->dxgi_outdupl);
297  IDXGIOutput1_Release(dxgi_output1);
298  dxgi_output1 = NULL;
299  }
300 
301  if (hr == DXGI_ERROR_NOT_CURRENTLY_AVAILABLE) {
302  av_log(avctx, AV_LOG_ERROR, "Too many open duplication sessions\n");
303  return AVERROR(EBUSY);
304  } else if (hr == DXGI_ERROR_UNSUPPORTED) {
305  av_log(avctx, AV_LOG_ERROR, "Selected output not supported\n");
306  return AVERROR_EXTERNAL;
307  } else if (hr == E_INVALIDARG) {
308  av_log(avctx, AV_LOG_ERROR, "Invalid output duplication argument\n");
309  return AVERROR(EINVAL);
310  } else if (hr == E_ACCESSDENIED) {
311  av_log(avctx, AV_LOG_ERROR, "Desktop duplication access denied\n");
312  return AVERROR(EPERM);
313  } else if (FAILED(hr)) {
314  av_log(avctx, AV_LOG_ERROR, "Failed duplicating output\n");
315  return AVERROR_EXTERNAL;
316  }
317 
318  w = dda->output_desc.DesktopCoordinates.right - dda->output_desc.DesktopCoordinates.left;
319  h = dda->output_desc.DesktopCoordinates.bottom - dda->output_desc.DesktopCoordinates.top;
320  av_log(avctx, AV_LOG_VERBOSE, "Opened dxgi output %d with dimensions %dx%d\n", dda->output_idx, w, h);
321 
322  return 0;
323 }
324 
325 typedef struct ConstBufferData
326 {
327  float width;
328  float height;
329 
330  uint64_t padding;
332 
333 static const D3D11_INPUT_ELEMENT_DESC vertex_shader_input_layout[] =
334 {
335  { "POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0 },
336  { "TEXCOORD", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 12, D3D11_INPUT_PER_VERTEX_DATA, 0 }
337 };
338 
340 {
341  DdagrabContext *dda = avctx->priv;
342  ID3D11Device *dev = dda->device_hwctx->device;
343  D3D11_SAMPLER_DESC sampler_desc = { 0 };
344  D3D11_BLEND_DESC blend_desc = { 0 };
345  D3D11_BUFFER_DESC buffer_desc = { 0 };
346  D3D11_SUBRESOURCE_DATA buffer_data = { 0 };
347  ConstBufferData const_data = { 0 };
348  HRESULT hr;
349 
350  hr = ID3D11Device_CreateVertexShader(dev,
353  NULL,
354  &dda->vertex_shader);
355  if (FAILED(hr)) {
356  av_log(avctx, AV_LOG_ERROR, "CreateVertexShader failed: %lx\n", hr);
357  return AVERROR_EXTERNAL;
358  }
359 
360  hr = ID3D11Device_CreateInputLayout(dev,
365  &dda->input_layout);
366  if (FAILED(hr)) {
367  av_log(avctx, AV_LOG_ERROR, "CreateInputLayout failed: %lx\n", hr);
368  return AVERROR_EXTERNAL;
369  }
370 
371  hr = ID3D11Device_CreatePixelShader(dev,
374  NULL,
375  &dda->pixel_shader);
376  if (FAILED(hr)) {
377  av_log(avctx, AV_LOG_ERROR, "CreatePixelShader failed: %lx\n", hr);
378  return AVERROR_EXTERNAL;
379  }
380 
381  const_data = (ConstBufferData){ dda->width, dda->height };
382 
383  buffer_data.pSysMem = &const_data;
384  buffer_desc.ByteWidth = sizeof(const_data);
385  buffer_desc.Usage = D3D11_USAGE_IMMUTABLE;
386  buffer_desc.BindFlags = D3D11_BIND_CONSTANT_BUFFER;
387  hr = ID3D11Device_CreateBuffer(dev,
388  &buffer_desc,
389  &buffer_data,
390  &dda->const_buffer);
391  if (FAILED(hr)) {
392  av_log(avctx, AV_LOG_ERROR, "CreateBuffer const buffer failed: %lx\n", hr);
393  return AVERROR_EXTERNAL;
394  }
395 
396  sampler_desc.Filter = D3D11_FILTER_MIN_MAG_MIP_LINEAR;
397  sampler_desc.AddressU = D3D11_TEXTURE_ADDRESS_CLAMP;
398  sampler_desc.AddressV = D3D11_TEXTURE_ADDRESS_CLAMP;
399  sampler_desc.AddressW = D3D11_TEXTURE_ADDRESS_CLAMP;
400  sampler_desc.ComparisonFunc = D3D11_COMPARISON_NEVER;
401  hr = ID3D11Device_CreateSamplerState(dev,
402  &sampler_desc,
403  &dda->sampler_state);
404  if (FAILED(hr)) {
405  av_log(avctx, AV_LOG_ERROR, "CreateSamplerState failed: %lx\n", hr);
406  return AVERROR_EXTERNAL;
407  }
408 
409  blend_desc.AlphaToCoverageEnable = FALSE;
410  blend_desc.IndependentBlendEnable = FALSE;
411  blend_desc.RenderTarget[0].BlendEnable = TRUE;
412  blend_desc.RenderTarget[0].SrcBlend = D3D11_BLEND_SRC_ALPHA;
413  blend_desc.RenderTarget[0].DestBlend = D3D11_BLEND_INV_SRC_ALPHA;
414  blend_desc.RenderTarget[0].BlendOp = D3D11_BLEND_OP_ADD;
415  blend_desc.RenderTarget[0].SrcBlendAlpha = D3D11_BLEND_ONE;
416  blend_desc.RenderTarget[0].DestBlendAlpha = D3D11_BLEND_ZERO;
417  blend_desc.RenderTarget[0].BlendOpAlpha = D3D11_BLEND_OP_ADD;
418  blend_desc.RenderTarget[0].RenderTargetWriteMask = D3D11_COLOR_WRITE_ENABLE_ALL;
419  hr = ID3D11Device_CreateBlendState(dev,
420  &blend_desc,
421  &dda->blend_state);
422  if (FAILED(hr)) {
423  av_log(avctx, AV_LOG_ERROR, "CreateBlendState failed: %lx\n", hr);
424  return AVERROR_EXTERNAL;
425  }
426 
427  blend_desc.RenderTarget[0].SrcBlend = D3D11_BLEND_INV_DEST_COLOR;
428  blend_desc.RenderTarget[0].DestBlend = D3D11_BLEND_INV_SRC_COLOR;
429  hr = ID3D11Device_CreateBlendState(dev,
430  &blend_desc,
431  &dda->blend_state_xor);
432  if (FAILED(hr)) {
433  av_log(avctx, AV_LOG_ERROR, "CreateBlendState (xor) failed: %lx\n", hr);
434  return AVERROR_EXTERNAL;
435  }
436 
437  return 0;
438 }
439 
441 {
442  DdagrabContext *dda = avctx->priv;
443 
444  dda->last_frame = av_frame_alloc();
445  if (!dda->last_frame)
446  return AVERROR(ENOMEM);
447 
448  dda->mouse_x = -1;
449  dda->mouse_y = -1;
450 
451  return 0;
452 }
453 
455  uint8_t *buf,
456  DXGI_OUTDUPL_POINTER_SHAPE_INFO *shape_info,
457  ID3D11Texture2D **out_tex,
458  ID3D11ShaderResourceView **res_view)
459 {
460  DdagrabContext *dda = avctx->priv;
461  D3D11_TEXTURE2D_DESC desc = { 0 };
462  D3D11_SUBRESOURCE_DATA init_data = { 0 };
463  D3D11_SHADER_RESOURCE_VIEW_DESC resource_desc = { 0 };
464  HRESULT hr;
465 
466  desc.MipLevels = 1;
467  desc.ArraySize = 1;
469  desc.SampleDesc.Count = 1;
470  desc.SampleDesc.Quality = 0;
471  desc.Usage = D3D11_USAGE_IMMUTABLE;
472  desc.BindFlags = D3D11_BIND_SHADER_RESOURCE;
473 
474  desc.Width = shape_info->Width;
475  desc.Height = shape_info->Height;
476 
477  init_data.pSysMem = buf;
478  init_data.SysMemPitch = shape_info->Pitch;
479 
480  resource_desc.Format = desc.Format;
481  resource_desc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURE2D;
482  resource_desc.Texture2D.MostDetailedMip = 0;
483  resource_desc.Texture2D.MipLevels = 1;
484 
485  hr = ID3D11Device_CreateTexture2D(dda->device_hwctx->device,
486  &desc,
487  &init_data,
488  out_tex);
489  if (FAILED(hr)) {
490  av_log(avctx, AV_LOG_ERROR, "Failed creating pointer texture\n");
491  return AVERROR_EXTERNAL;
492  }
493 
494  hr = ID3D11Device_CreateShaderResourceView(dda->device_hwctx->device,
495  (ID3D11Resource*)*out_tex,
496  &resource_desc,
497  res_view);
498  if (FAILED(hr)) {
499  release_resource(out_tex);
500  av_log(avctx, AV_LOG_ERROR, "CreateShaderResourceView for mouse failed: %lx\n", hr);
501  return AVERROR_EXTERNAL;
502  }
503 
504  return 0;
505 }
506 
507 static int convert_mono_buffer(uint8_t *input, uint8_t **rgba_out, uint8_t **xor_out, int *_width, int *_height, int *_pitch)
508 {
509  int width = *_width, height = *_height, pitch = *_pitch;
510  int real_height = height / 2;
511  int size = real_height * pitch;
512 
513  uint8_t *output = av_malloc(real_height * width * 4);
514  uint8_t *output_xor = av_malloc(real_height * width * 4);
515 
516  int y, x;
517 
518  if (!output || !output_xor) {
519  av_free(output);
520  av_free(output_xor);
521  return AVERROR(ENOMEM);
522  }
523 
524  for (y = 0; y < real_height; y++) {
525  for (x = 0; x < width; x++) {
526  int in_pos = (y * pitch) + (x / 8);
527  int out_pos = 4 * ((y * width) + x);
528  int and_val = (input[in_pos] >> (7 - (x % 8))) & 1;
529  int xor_val = (input[in_pos + size] >> (7 - (x % 8))) & 1;
530 
531  if (!and_val && !xor_val) {
532  // solid black
533  memset(&output[out_pos], 0, 4);
534  output[out_pos + 3] = 0xFF;
535 
536  // transparent
537  memset(&output_xor[out_pos], 0, 4);
538  } else if (and_val && !xor_val) {
539  // transparent
540  memset(&output[out_pos], 0, 4);
541 
542  // transparent
543  memset(&output_xor[out_pos], 0, 4);
544  } else if (!and_val && xor_val) {
545  // solid white
546  memset(&output[out_pos], 0xFF, 4);
547 
548  // transparent
549  memset(&output_xor[out_pos], 0, 4);
550  } else if (and_val && xor_val) {
551  // transparent
552  memset(&output[out_pos], 0, 4);
553 
554  // solid white -> invert color
555  memset(&output_xor[out_pos], 0xFF, 4);
556  }
557  }
558  }
559 
560  *_pitch = width * 4;
561  *_height = real_height;
562  *rgba_out = output;
563  *xor_out = output_xor;
564 
565  return 0;
566 }
567 
568 static int fixup_color_mask(uint8_t *input, uint8_t **rgba_out, uint8_t **xor_out, int width, int height, int pitch)
569 {
570  int size = height * pitch;
571  uint8_t *output = av_malloc(size);
572  uint8_t *output_xor = av_malloc(size);
573  int x, y;
574 
575  if (!output || !output_xor) {
576  av_free(output);
577  av_free(output_xor);
578  return AVERROR(ENOMEM);
579  }
580 
581  memcpy(output, input, size);
582  memcpy(output_xor, input, size);
583 
584  for (y = 0; y < height; y++) {
585  for (x = 0; x < width; x++) {
586  int pos = (y*pitch) + (4*x) + 3;
587  output[pos] = input[pos] ? 0 : 0xFF;
588  output_xor[pos] = input[pos] ? 0xFF : 0;
589  }
590  }
591 
592  *rgba_out = output;
593  *xor_out = output_xor;
594 
595  return 0;
596 }
597 
598 static int update_mouse_pointer(AVFilterContext *avctx, DXGI_OUTDUPL_FRAME_INFO *frame_info)
599 {
600  DdagrabContext *dda = avctx->priv;
601  HRESULT hr;
602  int ret, ret2;
603 
604  if (frame_info->LastMouseUpdateTime.QuadPart == 0)
605  return 0;
606 
607  if (frame_info->PointerPosition.Visible) {
608  switch (dda->output_desc.Rotation) {
609  case DXGI_MODE_ROTATION_ROTATE90:
610  dda->mouse_x = frame_info->PointerPosition.Position.y;
611  dda->mouse_y = dda->output_desc.DesktopCoordinates.right - dda->output_desc.DesktopCoordinates.left - frame_info->PointerPosition.Position.x - 1;
612  break;
613  case DXGI_MODE_ROTATION_ROTATE180:
614  dda->mouse_x = dda->output_desc.DesktopCoordinates.right - dda->output_desc.DesktopCoordinates.left - frame_info->PointerPosition.Position.x - 1;
615  dda->mouse_y = dda->output_desc.DesktopCoordinates.bottom - dda->output_desc.DesktopCoordinates.top - frame_info->PointerPosition.Position.y - 1;
616  break;
617  case DXGI_MODE_ROTATION_ROTATE270:
618  dda->mouse_x = dda->output_desc.DesktopCoordinates.bottom - dda->output_desc.DesktopCoordinates.top - frame_info->PointerPosition.Position.y - 1;
619  dda->mouse_y = frame_info->PointerPosition.Position.x;
620  break;
621  default:
622  dda->mouse_x = frame_info->PointerPosition.Position.x;
623  dda->mouse_y = frame_info->PointerPosition.Position.y;
624  }
625  } else {
626  dda->mouse_x = dda->mouse_y = -1;
627  }
628 
629  if (frame_info->PointerShapeBufferSize) {
630  UINT size = frame_info->PointerShapeBufferSize;
631  DXGI_OUTDUPL_POINTER_SHAPE_INFO shape_info;
632  uint8_t *rgba_buf = NULL, *rgb_xor_buf = NULL;
633  uint8_t *buf = av_malloc(size);
634  if (!buf)
635  return AVERROR(ENOMEM);
636 
637  hr = IDXGIOutputDuplication_GetFramePointerShape(dda->dxgi_outdupl,
638  size,
639  buf,
640  &size,
641  &shape_info);
642  if (FAILED(hr)) {
643  av_free(buf);
644  av_log(avctx, AV_LOG_ERROR, "Failed getting pointer shape: %lx\n", hr);
645  return AVERROR_EXTERNAL;
646  }
647 
648  if (shape_info.Type == DXGI_OUTDUPL_POINTER_SHAPE_TYPE_MONOCHROME) {
649  ret = convert_mono_buffer(buf, &rgba_buf, &rgb_xor_buf, &shape_info.Width, &shape_info.Height, &shape_info.Pitch);
650  av_freep(&buf);
651  if (ret < 0)
652  return ret;
653  } else if (shape_info.Type == DXGI_OUTDUPL_POINTER_SHAPE_TYPE_MASKED_COLOR) {
654  ret = fixup_color_mask(buf, &rgba_buf, &rgb_xor_buf, shape_info.Width, shape_info.Height, shape_info.Pitch);
655  av_freep(&buf);
656  if (ret < 0)
657  return ret;
658  } else if (shape_info.Type == DXGI_OUTDUPL_POINTER_SHAPE_TYPE_COLOR) {
659  rgba_buf = buf;
660  buf = NULL;
661  } else {
662  av_log(avctx, AV_LOG_WARNING, "Unsupported pointer shape type: %d\n", (int)shape_info.Type);
663  av_freep(&buf);
664  return 0;
665  }
666 
671 
672  ret = create_d3d11_pointer_tex(avctx, rgba_buf, &shape_info, &dda->mouse_texture, &dda->mouse_resource_view);
673  ret2 = rgb_xor_buf ? create_d3d11_pointer_tex(avctx, rgb_xor_buf, &shape_info, &dda->mouse_xor_texture, &dda->mouse_xor_resource_view) : 0;
674  av_freep(&rgba_buf);
675  av_freep(&rgb_xor_buf);
676  if (ret < 0)
677  return ret;
678  if (ret2 < 0)
679  return ret2;
680 
681  av_log(avctx, AV_LOG_VERBOSE, "Updated pointer shape texture\n");
682  }
683 
684  return 0;
685 }
686 
687 static int next_frame_internal(AVFilterContext *avctx, ID3D11Texture2D **desktop_texture, int need_frame)
688 {
689  DXGI_OUTDUPL_FRAME_INFO frame_info;
690  DdagrabContext *dda = avctx->priv;
691  IDXGIResource *desktop_resource = NULL;
692  HRESULT hr;
693  int ret;
694 
695  hr = IDXGIOutputDuplication_AcquireNextFrame(
696  dda->dxgi_outdupl,
697  dda->time_timeout,
698  &frame_info,
699  &desktop_resource);
700  if (hr == DXGI_ERROR_WAIT_TIMEOUT) {
701  return AVERROR(EAGAIN);
702  } else if (FAILED(hr)) {
703  av_log(avctx, AV_LOG_ERROR, "AcquireNextFrame failed: %lx\n", hr);
704  return AVERROR_EXTERNAL;
705  }
706 
707  if (dda->draw_mouse) {
708  ret = update_mouse_pointer(avctx, &frame_info);
709  if (ret < 0)
710  goto error;
711  }
712 
713  if (!frame_info.LastPresentTime.QuadPart || !frame_info.AccumulatedFrames) {
714  if (need_frame) {
715  ret = AVERROR(EAGAIN);
716  goto error;
717  }
718 
719  // Unforunately, we can't rely on the desktop_resource's format in this case.
720  // The API might even return it in with a format that was not in the initial
721  // list of supported formats, and it can change/flicker randomly.
722  // To work around this, return an internal copy of the last valid texture we got.
723  release_resource(&desktop_resource);
724 
725  // The initial probing should make this impossible.
726  if (!dda->buffer_texture) {
727  av_log(avctx, AV_LOG_ERROR, "No buffer texture while operating!\n");
728  ret = AVERROR_BUG;
729  goto error;
730  }
731 
732  av_log(avctx, AV_LOG_TRACE, "Returning internal buffer for a frame!\n");
733  ID3D11Texture2D_AddRef(dda->buffer_texture);
734  *desktop_texture = dda->buffer_texture;
735  return 0;
736  }
737 
738  hr = IDXGIResource_QueryInterface(desktop_resource, &IID_ID3D11Texture2D, (void**)desktop_texture);
739  release_resource(&desktop_resource);
740  if (FAILED(hr)) {
741  av_log(avctx, AV_LOG_ERROR, "DXGIResource QueryInterface failed\n");
743  goto error;
744  }
745 
746  if (!dda->buffer_texture) {
747  D3D11_TEXTURE2D_DESC desc;
748  ID3D11Texture2D_GetDesc(*desktop_texture, &desc);
749  desc.Usage = D3D11_USAGE_DEFAULT;
750  desc.BindFlags = 0;
751  desc.CPUAccessFlags = 0;
752  desc.MiscFlags = 0;
753 
754  hr = ID3D11Device_CreateTexture2D(dda->device_hwctx->device, &desc, NULL, &dda->buffer_texture);
755  if (FAILED(hr)) {
756  release_resource(desktop_texture);
757  av_log(avctx, AV_LOG_ERROR, "Failed creating internal buffer texture.\n");
758  ret = AVERROR(ENOMEM);
759  goto error;
760  }
761  }
762 
763  ID3D11DeviceContext_CopyResource(dda->device_hwctx->device_context,
764  (ID3D11Resource*)dda->buffer_texture,
765  (ID3D11Resource*)*desktop_texture);
766 
767  return 0;
768 
769 error:
770  release_resource(&desktop_resource);
771 
772  hr = IDXGIOutputDuplication_ReleaseFrame(dda->dxgi_outdupl);
773  if (FAILED(hr))
774  av_log(avctx, AV_LOG_ERROR, "DDA error ReleaseFrame failed!\n");
775 
776  return ret;
777 }
778 
780 {
781  DdagrabContext *dda = avctx->priv;
782  D3D11_TEXTURE2D_DESC desc;
783  int ret;
784 
785  av_assert1(!dda->probed_texture);
786 
787  do {
788  ret = next_frame_internal(avctx, &dda->probed_texture, 1);
789  } while(ret == AVERROR(EAGAIN));
790  if (ret < 0)
791  return ret;
792 
793  ID3D11Texture2D_GetDesc(dda->probed_texture, &desc);
794 
795  dda->raw_format = desc.Format;
796  dda->raw_width = desc.Width;
797  dda->raw_height = desc.Height;
798 
799  if (dda->width <= 0)
800  dda->width = dda->raw_width;
801  if (dda->height <= 0)
802  dda->height = dda->raw_height;
803 
804  return 0;
805 }
806 
808 {
809  DdagrabContext *dda = avctx->priv;
810  int ret = 0;
811 
813  if (!dda->frames_ref)
814  return AVERROR(ENOMEM);
817 
819  dda->frames_ctx->width = dda->width;
820  dda->frames_ctx->height = dda->height;
821 
822  switch (dda->raw_format) {
824  av_log(avctx, AV_LOG_VERBOSE, "Probed 8 bit RGB frame format\n");
826  break;
827  case DXGI_FORMAT_R10G10B10A2_UNORM:
828  av_log(avctx, AV_LOG_VERBOSE, "Probed 10 bit RGB frame format\n");
830  break;
832  av_log(avctx, AV_LOG_VERBOSE, "Probed 16 bit float RGB frame format\n");
834  break;
835  default:
836  av_log(avctx, AV_LOG_ERROR, "Unexpected texture output format!\n");
837  return AVERROR_BUG;
838  }
839 
840  if (dda->draw_mouse)
841  dda->frames_hwctx->BindFlags |= D3D11_BIND_RENDER_TARGET;
842 
844  if (ret < 0) {
845  av_log(avctx, AV_LOG_ERROR, "Failed to initialise hardware frames context: %d.\n", ret);
846  goto fail;
847  }
848 
849  return 0;
850 fail:
852  return ret;
853 }
854 
855 static int ddagrab_config_props(AVFilterLink *outlink)
856 {
857  AVFilterContext *avctx = outlink->src;
858  DdagrabContext *dda = avctx->priv;
859  int ret;
860 
861  if (avctx->hw_device_ctx) {
863 
865  av_log(avctx, AV_LOG_ERROR, "Non-D3D11VA input hw_device_ctx\n");
866  return AVERROR(EINVAL);
867  }
868 
869  dda->device_ref = av_buffer_ref(avctx->hw_device_ctx);
870  if (!dda->device_ref)
871  return AVERROR(ENOMEM);
872 
873  av_log(avctx, AV_LOG_VERBOSE, "Using provided hw_device_ctx\n");
874  } else {
876  if (ret < 0) {
877  av_log(avctx, AV_LOG_ERROR, "Failed to create D3D11VA device.\n");
878  return ret;
879  }
880 
882 
883  av_log(avctx, AV_LOG_VERBOSE, "Created internal hw_device_ctx\n");
884  }
885 
887 
888  ret = init_dxgi_dda(avctx);
889  if (ret < 0)
890  return ret;
891 
892  ret = probe_output_format(avctx);
893  if (ret < 0)
894  return ret;
895 
896  if (dda->out_fmt && dda->raw_format != dda->out_fmt && (!dda->allow_fallback || dda->force_fmt)) {
897  av_log(avctx, AV_LOG_ERROR, "Requested output format unavailable.\n");
898  return AVERROR(ENOTSUP);
899  }
900 
901  dda->width -= FFMAX(dda->width - dda->raw_width + dda->offset_x, 0);
902  dda->height -= FFMAX(dda->height - dda->raw_height + dda->offset_y, 0);
903 
904  dda->time_base = av_inv_q(dda->framerate);
906  dda->time_timeout = av_rescale_q(1, dda->time_base, (AVRational) { 1, 1000 }) / 2;
907 
908  if (dda->draw_mouse) {
909  ret = init_render_resources(avctx);
910  if (ret < 0)
911  return ret;
912  }
913 
914  ret = init_hwframes_ctx(avctx);
915  if (ret < 0)
916  return ret;
917 
918  outlink->hw_frames_ctx = av_buffer_ref(dda->frames_ref);
919  if (!outlink->hw_frames_ctx)
920  return AVERROR(ENOMEM);
921 
922  outlink->w = dda->width;
923  outlink->h = dda->height;
924  outlink->time_base = (AVRational){1, TIMER_RES};
925  outlink->frame_rate = dda->framerate;
926 
927  return 0;
928 }
929 
931 {
932  DdagrabContext *dda = avctx->priv;
933  ID3D11DeviceContext *devctx = dda->device_hwctx->device_context;
934  ID3D11Texture2D *frame_tex = (ID3D11Texture2D*)frame->data[0];
935  D3D11_RENDER_TARGET_VIEW_DESC target_desc = { 0 };
936  ID3D11RenderTargetView* target_view = NULL;
937  ID3D11Buffer *mouse_vertex_buffer = NULL;
938  D3D11_TEXTURE2D_DESC tex_desc;
939  int num_vertices = 0;
940  int x, y;
941  HRESULT hr;
942  int ret = 0;
943 
944  if (!dda->mouse_texture || dda->mouse_x < 0 || dda->mouse_y < 0)
945  return 0;
946 
947  ID3D11Texture2D_GetDesc(dda->mouse_texture, &tex_desc);
948 
949  x = dda->mouse_x - dda->offset_x;
950  y = dda->mouse_y - dda->offset_y;
951 
952  if (x >= dda->width || y >= dda->height ||
953  -x >= (int)tex_desc.Width || -y >= (int)tex_desc.Height)
954  return 0;
955 
956  target_desc.Format = dda->raw_format;
957  target_desc.ViewDimension = D3D11_RTV_DIMENSION_TEXTURE2D;
958  target_desc.Texture2D.MipSlice = 0;
959 
960  hr = ID3D11Device_CreateRenderTargetView(dda->device_hwctx->device,
961  (ID3D11Resource*)frame_tex,
962  &target_desc,
963  &target_view);
964  if (FAILED(hr)) {
965  av_log(avctx, AV_LOG_ERROR, "CreateRenderTargetView failed: %lx\n", hr);
967  goto end;
968  }
969 
970  ID3D11DeviceContext_ClearState(devctx);
971 
972  {
973  D3D11_VIEWPORT viewport = { 0 };
974  viewport.Width = dda->width;
975  viewport.Height = dda->height;
976  viewport.MinDepth = 0.0f;
977  viewport.MaxDepth = 1.0f;
978 
979  ID3D11DeviceContext_RSSetViewports(devctx, 1, &viewport);
980  }
981 
982  {
983  FLOAT vertices[] = {
984  // x, y, z, u, v
985  x , y + tex_desc.Height, 0.0f, 0.0f, 1.0f,
986  x , y , 0.0f, 0.0f, 0.0f,
987  x + tex_desc.Width, y + tex_desc.Height, 0.0f, 1.0f, 1.0f,
988  x + tex_desc.Width, y , 0.0f, 1.0f, 0.0f,
989  };
990  UINT stride = sizeof(FLOAT) * 5;
991  UINT offset = 0;
992 
993  D3D11_SUBRESOURCE_DATA init_data = { 0 };
994  D3D11_BUFFER_DESC buf_desc = { 0 };
995 
996  switch (dda->output_desc.Rotation) {
997  case DXGI_MODE_ROTATION_ROTATE90:
998  vertices[ 0] = x; vertices[ 1] = y;
999  vertices[ 5] = x; vertices[ 6] = y - tex_desc.Width;
1000  vertices[10] = x + tex_desc.Height; vertices[11] = y;
1001  vertices[15] = x + tex_desc.Height; vertices[16] = y - tex_desc.Width;
1002  vertices[ 3] = 0.0f; vertices[ 4] = 0.0f;
1003  vertices[ 8] = 1.0f; vertices[ 9] = 0.0f;
1004  vertices[13] = 0.0f; vertices[14] = 1.0f;
1005  vertices[18] = 1.0f; vertices[19] = 1.0f;
1006  break;
1007  case DXGI_MODE_ROTATION_ROTATE180:
1008  vertices[ 0] = x - tex_desc.Width; vertices[ 1] = y;
1009  vertices[ 5] = x - tex_desc.Width; vertices[ 6] = y - tex_desc.Height;
1010  vertices[10] = x; vertices[11] = y;
1011  vertices[15] = x; vertices[16] = y - tex_desc.Height;
1012  vertices[ 3] = 1.0f; vertices[ 4] = 0.0f;
1013  vertices[ 8] = 1.0f; vertices[ 9] = 1.0f;
1014  vertices[13] = 0.0f; vertices[14] = 0.0f;
1015  vertices[18] = 0.0f; vertices[19] = 1.0f;
1016  break;
1017  case DXGI_MODE_ROTATION_ROTATE270:
1018  vertices[ 0] = x - tex_desc.Height; vertices[ 1] = y + tex_desc.Width;
1019  vertices[ 5] = x - tex_desc.Height; vertices[ 6] = y;
1020  vertices[10] = x; vertices[11] = y + tex_desc.Width;
1021  vertices[15] = x; vertices[16] = y;
1022  vertices[ 3] = 1.0f; vertices[ 4] = 1.0f;
1023  vertices[ 8] = 0.0f; vertices[ 9] = 1.0f;
1024  vertices[13] = 1.0f; vertices[14] = 0.0f;
1025  vertices[18] = 0.0f; vertices[19] = 0.0f;
1026  break;
1027  default:
1028  break;
1029  }
1030 
1031  num_vertices = sizeof(vertices) / (sizeof(FLOAT) * 5);
1032 
1033  buf_desc.Usage = D3D11_USAGE_DEFAULT;
1034  buf_desc.BindFlags = D3D11_BIND_VERTEX_BUFFER;
1035  buf_desc.ByteWidth = sizeof(vertices);
1036  init_data.pSysMem = vertices;
1037 
1038  hr = ID3D11Device_CreateBuffer(dda->device_hwctx->device,
1039  &buf_desc,
1040  &init_data,
1041  &mouse_vertex_buffer);
1042  if (FAILED(hr)) {
1043  av_log(avctx, AV_LOG_ERROR, "CreateBuffer failed: %lx\n", hr);
1045  goto end;
1046  }
1047 
1048  ID3D11DeviceContext_IASetVertexBuffers(devctx, 0, 1, &mouse_vertex_buffer, &stride, &offset);
1049  ID3D11DeviceContext_IASetInputLayout(devctx, dda->input_layout);
1050  ID3D11DeviceContext_IASetPrimitiveTopology(devctx, D3D11_PRIMITIVE_TOPOLOGY_TRIANGLESTRIP);
1051  }
1052 
1053  ID3D11DeviceContext_VSSetShader(devctx, dda->vertex_shader, NULL, 0);
1054  ID3D11DeviceContext_VSSetConstantBuffers(devctx, 0, 1, &dda->const_buffer);
1055  ID3D11DeviceContext_PSSetSamplers(devctx, 0, 1, &dda->sampler_state);
1056  ID3D11DeviceContext_PSSetShaderResources(devctx, 0, 1, &dda->mouse_resource_view);
1057  ID3D11DeviceContext_PSSetShader(devctx, dda->pixel_shader, NULL, 0);
1058 
1059  ID3D11DeviceContext_OMSetBlendState(devctx, dda->blend_state, NULL, 0xFFFFFFFF);
1060  ID3D11DeviceContext_OMSetRenderTargets(devctx, 1, &target_view, NULL);
1061 
1062  ID3D11DeviceContext_Draw(devctx, num_vertices, 0);
1063 
1064  if (dda->mouse_xor_resource_view) {
1065  ID3D11DeviceContext_PSSetShaderResources(devctx, 0, 1, &dda->mouse_xor_resource_view);
1066  ID3D11DeviceContext_OMSetBlendState(devctx, dda->blend_state_xor, NULL, 0xFFFFFFFF);
1067 
1068  ID3D11DeviceContext_Draw(devctx, num_vertices, 0);
1069  }
1070 
1071 end:
1072  release_resource(&mouse_vertex_buffer);
1073  release_resource(&target_view);
1074 
1075  return ret;
1076 }
1077 
1079 {
1080  AVFilterContext *avctx = outlink->src;
1081  DdagrabContext *dda = avctx->priv;
1082 
1083  ID3D11Texture2D *cur_texture = NULL;
1084  D3D11_TEXTURE2D_DESC desc = { 0 };
1085  D3D11_BOX box = { 0 };
1086 
1087  int64_t time_frame = dda->time_frame;
1088  int64_t now, delay;
1089  AVFrame *frame = NULL;
1090  HRESULT hr;
1091  int ret;
1092 
1093  /* time_frame is in units of microseconds divided by the time_base.
1094  * This means that adding a clean 1M to it is the equivalent of adding
1095  * 1M*time_base microseconds to it, except it avoids all rounding error.
1096  * The only time rounding error occurs is when multiplying to calculate
1097  * the delay. So any rounding error there corrects itself over time.
1098  */
1099  time_frame += TIMER_RES64;
1100  for (;;) {
1101  now = av_gettime_relative();
1102  delay = time_frame * av_q2d(dda->time_base) - now;
1103  if (delay <= 0) {
1104  if (delay < -TIMER_RES64 * av_q2d(dda->time_base)) {
1105  time_frame += TIMER_RES64;
1106  }
1107  break;
1108  }
1109  av_usleep(delay);
1110  }
1111 
1112  if (!dda->first_pts)
1113  dda->first_pts = now;
1114  now -= dda->first_pts;
1115 
1116  if (!dda->probed_texture) {
1117  do {
1118  ret = next_frame_internal(avctx, &cur_texture, 0);
1119  } while (ret == AVERROR(EAGAIN) && !dda->dup_frames);
1120  } else {
1121  cur_texture = dda->probed_texture;
1122  dda->probed_texture = NULL;
1123  ret = 0;
1124  }
1125 
1126  if (ret == AVERROR(EAGAIN) && dda->last_frame->buf[0]) {
1127  frame = av_frame_alloc();
1128  if (!frame)
1129  return AVERROR(ENOMEM);
1130 
1131  ret = av_frame_ref(frame, dda->last_frame);
1132  if (ret < 0) {
1133  av_frame_free(&frame);
1134  return ret;
1135  }
1136 
1137  av_log(avctx, AV_LOG_DEBUG, "Duplicated output frame\n");
1138 
1139  goto frame_done;
1140  } else if (ret == AVERROR(EAGAIN)) {
1141  av_log(avctx, AV_LOG_VERBOSE, "Initial DDA AcquireNextFrame timeout!\n");
1142  return AVERROR(EAGAIN);
1143  } else if (ret < 0) {
1144  return ret;
1145  }
1146 
1147  // AcquireNextFrame sometimes has bursts of delay.
1148  // This increases accuracy of the timestamp, but might upset consumers due to more jittery framerate?
1149  now = av_gettime_relative() - dda->first_pts;
1150 
1151  ID3D11Texture2D_GetDesc(cur_texture, &desc);
1152  if (desc.Format != dda->raw_format ||
1153  (int)desc.Width != dda->raw_width ||
1154  (int)desc.Height != dda->raw_height) {
1155  av_log(avctx, AV_LOG_ERROR, "Output parameters changed!\n");
1157  goto fail;
1158  }
1159 
1160  frame = ff_get_video_buffer(outlink, dda->width, dda->height);
1161  if (!frame) {
1162  ret = AVERROR(ENOMEM);
1163  goto fail;
1164  }
1165 
1166  box.left = dda->offset_x;
1167  box.top = dda->offset_y;
1168  box.right = box.left + dda->width;
1169  box.bottom = box.top + dda->height;
1170  box.front = 0;
1171  box.back = 1;
1172 
1173  ID3D11DeviceContext_CopySubresourceRegion(
1175  (ID3D11Resource*)frame->data[0], (UINT)(intptr_t)frame->data[1],
1176  0, 0, 0,
1177  (ID3D11Resource*)cur_texture, 0,
1178  &box);
1179 
1180  release_resource(&cur_texture);
1181 
1182  hr = IDXGIOutputDuplication_ReleaseFrame(dda->dxgi_outdupl);
1183  if (FAILED(hr)) {
1184  av_log(avctx, AV_LOG_ERROR, "DDA ReleaseFrame failed!\n");
1186  goto fail;
1187  }
1188 
1189  if (dda->draw_mouse) {
1190  ret = draw_mouse_pointer(avctx, frame);
1191  if (ret < 0)
1192  goto fail;
1193  }
1194 
1195  frame->sample_aspect_ratio = (AVRational){1, 1};
1196 
1197  if (desc.Format == DXGI_FORMAT_B8G8R8A8_UNORM ||
1198  desc.Format == DXGI_FORMAT_R10G10B10A2_UNORM) {
1199  // According to MSDN, all integer formats contain sRGB image data
1200  frame->color_range = AVCOL_RANGE_JPEG;
1201  frame->color_primaries = AVCOL_PRI_BT709;
1202  frame->color_trc = AVCOL_TRC_IEC61966_2_1;
1203  frame->colorspace = AVCOL_SPC_RGB;
1204  } else if(desc.Format == DXGI_FORMAT_R16G16B16A16_FLOAT) {
1205  // According to MSDN, all floating point formats contain sRGB image data with linear 1.0 gamma.
1206  frame->color_range = AVCOL_RANGE_JPEG;
1207  frame->color_primaries = AVCOL_PRI_BT709;
1208  frame->color_trc = AVCOL_TRC_LINEAR;
1209  frame->colorspace = AVCOL_SPC_RGB;
1210  } else {
1211  ret = AVERROR_BUG;
1212  goto fail;
1213  }
1214 
1216  if (ret < 0)
1217  return ret;
1218 
1219 frame_done:
1220  frame->pts = now;
1221  dda->time_frame = time_frame;
1222 
1223  return ff_filter_frame(outlink, frame);
1224 
1225 fail:
1226  if (frame)
1227  av_frame_free(&frame);
1228 
1229  if (cur_texture)
1230  IDXGIOutputDuplication_ReleaseFrame(dda->dxgi_outdupl);
1231 
1232  release_resource(&cur_texture);
1233  return ret;
1234 }
1235 
1236 static const AVFilterPad ddagrab_outputs[] = {
1237  {
1238  .name = "default",
1239  .type = AVMEDIA_TYPE_VIDEO,
1240  .request_frame = ddagrab_request_frame,
1241  .config_props = ddagrab_config_props,
1242  },
1243 };
1244 
1246  .name = "ddagrab",
1247  .description = NULL_IF_CONFIG_SMALL("Grab Windows Desktop images using Desktop Duplication API"),
1248  .priv_size = sizeof(DdagrabContext),
1249  .priv_class = &ddagrab_class,
1250  .init = ddagrab_init,
1252  .inputs = NULL,
1255  .flags_internal = FF_FILTER_FLAG_HWFRAME_AWARE,
1256  .flags = AVFILTER_FLAG_HWDEVICE,
1257 };
error
static void error(const char *err)
Definition: target_bsf_fuzzer.c:32
formats
formats
Definition: signature.h:48
ff_get_video_buffer
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:112
AVHWDeviceContext::hwctx
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:85
DdagrabContext::raw_height
int raw_height
Definition: vsrc_ddagrab.c:83
av_gettime_relative
int64_t av_gettime_relative(void)
Get the current time in microseconds since some unspecified starting point.
Definition: time.c:56
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:186
TIMER_RES
#define TIMER_RES
Definition: vsrc_ddagrab.c:52
DdagrabContext::force_fmt
int force_fmt
Definition: vsrc_ddagrab.c:105
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
opt.h
draw_mouse_pointer
static int draw_mouse_pointer(AVFilterContext *avctx, AVFrame *frame)
Definition: vsrc_ddagrab.c:930
FF_FILTER_FLAG_HWFRAME_AWARE
#define FF_FILTER_FLAG_HWFRAME_AWARE
The filter is aware of hardware frames, and any hardware frame context should not be automatically pr...
Definition: internal.h:351
init_render_resources
static av_cold int init_render_resources(AVFilterContext *avctx)
Definition: vsrc_ddagrab.c:339
ConstBufferData::width
float width
Definition: vsrc_ddagrab.c:327
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1015
DXGI_FORMAT_B8G8R8A8_UNORM
@ DXGI_FORMAT_B8G8R8A8_UNORM
Definition: dds.c:91
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:90
fixup_color_mask
static int fixup_color_mask(uint8_t *input, uint8_t **rgba_out, uint8_t **xor_out, int width, int height, int pitch)
Definition: vsrc_ddagrab.c:568
AVCOL_TRC_LINEAR
@ AVCOL_TRC_LINEAR
"Linear transfer characteristics"
Definition: pixfmt.h:589
AV_OPT_TYPE_VIDEO_RATE
@ AV_OPT_TYPE_VIDEO_RATE
offset must point to AVRational
Definition: opt.h:258
DdagrabContext::frames_ctx
AVHWFramesContext * frames_ctx
Definition: vsrc_ddagrab.c:63
AVHWFramesContext::format
enum AVPixelFormat format
The pixel format identifying the underlying HW surface type.
Definition: hwcontext.h:197
next_frame_internal
static int next_frame_internal(AVFilterContext *avctx, ID3D11Texture2D **desktop_texture, int need_frame)
Definition: vsrc_ddagrab.c:687
DdagrabContext::blend_state
ID3D11BlendState * blend_state
Definition: vsrc_ddagrab.c:93
output
filter_frame For filters that do not use the this method is called when a frame is pushed to the filter s input It can be called at any time except in a reentrant way If the input frame is enough to produce output
Definition: filter_design.txt:225
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:160
DdagrabContext::device_ctx
AVHWDeviceContext * device_ctx
Definition: vsrc_ddagrab.c:59
av_hwframe_ctx_init
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
Definition: hwcontext.c:322
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:374
w
uint8_t w
Definition: llviddspenc.c:38
AVCOL_RANGE_JPEG
@ AVCOL_RANGE_JPEG
Full range content.
Definition: pixfmt.h:686
av_hwframe_ctx_alloc
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
Definition: hwcontext.c:248
AVOption
AVOption.
Definition: opt.h:356
DdagrabContext::frames_hwctx
AVD3D11VAFramesContext * frames_hwctx
Definition: vsrc_ddagrab.c:64
ddagrab_request_frame
static int ddagrab_request_frame(AVFilterLink *outlink)
Definition: vsrc_ddagrab.c:1078
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:196
AVCOL_SPC_RGB
@ AVCOL_SPC_RGB
order of coefficients is actually GBR, also IEC 61966-2-1 (sRGB), YZX and ST 428-1
Definition: pixfmt.h:610
AVFilterContext::hw_device_ctx
AVBufferRef * hw_device_ctx
For filters which will create hardware frames, sets the device the filter should create them in.
Definition: avfilter.h:469
AV_PIX_FMT_BGRA
@ AV_PIX_FMT_BGRA
packed BGRA 8:8:8:8, 32bpp, BGRABGRA...
Definition: pixfmt.h:102
DdagrabContext::width
int width
Definition: vsrc_ddagrab.c:99
FFMAX
#define FFMAX(a, b)
Definition: macros.h:47
av_buffer_ref
AVBufferRef * av_buffer_ref(const AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:103
DdagrabContext::first_pts
int64_t first_pts
Definition: vsrc_ddagrab.c:79
AVFilter::name
const char * name
Filter name.
Definition: avfilter.h:170
ff_vsrc_ddagrab
const AVFilter ff_vsrc_ddagrab
Definition: vsrc_ddagrab.c:1245
AVHWFramesContext::width
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:217
video.h
AVFrame::buf
AVBufferRef * buf[AV_NUM_DATA_POINTERS]
AVBuffer references backing the data for this frame.
Definition: frame.h:587
DdagrabContext::last_frame
AVFrame * last_frame
Definition: vsrc_ddagrab.c:68
av_malloc
#define av_malloc(s)
Definition: tableprint_vlc.h:30
AVFILTER_DEFINE_CLASS
AVFILTER_DEFINE_CLASS(ddagrab)
AVD3D11VAFramesContext::BindFlags
UINT BindFlags
D3D11_TEXTURE2D_DESC.BindFlags used for texture creation.
Definition: hwcontext_d3d11va.h:160
vsrc_ddagrab_shaders.h
AVCOL_TRC_IEC61966_2_1
@ AVCOL_TRC_IEC61966_2_1
IEC 61966-2-1 (sRGB or sYCC)
Definition: pixfmt.h:594
AVFilterContext::priv
void * priv
private data for use by the filter
Definition: avfilter.h:422
fail
#define fail()
Definition: checkasm.h:179
DdagrabContext::pixel_shader
ID3D11PixelShader * pixel_shader
Definition: vsrc_ddagrab.c:90
AV_HWDEVICE_TYPE_D3D11VA
@ AV_HWDEVICE_TYPE_D3D11VA
Definition: hwcontext.h:35
DdagrabContext::framerate
AVRational framerate
Definition: vsrc_ddagrab.c:98
DdagrabContext::mouse_xor_resource_view
ID3D11ShaderResourceView * mouse_xor_resource_view
Definition: vsrc_ddagrab.c:74
AVFilterPad
A filter pad used for either input or output.
Definition: internal.h:33
AVHWDeviceContext
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:60
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:148
avassert.h
DdagrabContext::device_hwctx
AVD3D11VADeviceContext * device_hwctx
Definition: vsrc_ddagrab.c:60
AV_LOG_TRACE
#define AV_LOG_TRACE
Extremely verbose debugging, useful for libav* development.
Definition: log.h:206
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
ddagrab_options
static const AVOption ddagrab_options[]
Definition: vsrc_ddagrab.c:111
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
av_cold
#define av_cold
Definition: attributes.h:90
AVHWFramesContext::height
int height
Definition: hwcontext.h:217
width
#define width
AVD3D11VADeviceContext::device
ID3D11Device * device
Device used for texture creation and access.
Definition: hwcontext_d3d11va.h:56
DdagrabContext::raw_width
int raw_width
Definition: vsrc_ddagrab.c:82
av_q2d
static double av_q2d(AVRational a)
Convert an AVRational to a double.
Definition: rational.h:104
AV_LOG_DEBUG
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:201
create_d3d11_pointer_tex
static int create_d3d11_pointer_tex(AVFilterContext *avctx, uint8_t *buf, DXGI_OUTDUPL_POINTER_SHAPE_INFO *shape_info, ID3D11Texture2D **out_tex, ID3D11ShaderResourceView **res_view)
Definition: vsrc_ddagrab.c:454
DdagrabContext::dxgi_outdupl
IDXGIOutputDuplication * dxgi_outdupl
Definition: vsrc_ddagrab.c:67
av_rescale_q
int64_t av_rescale_q(int64_t a, AVRational bq, AVRational cq)
Rescale a 64-bit integer by 2 rational numbers.
Definition: mathematics.c:142
vertex_shader_input_layout
static const D3D11_INPUT_ELEMENT_DESC vertex_shader_input_layout[]
Definition: vsrc_ddagrab.c:333
ConstBufferData
Definition: vsrc_ddagrab.c:325
DdagrabContext::mouse_x
int mouse_x
Definition: vsrc_ddagrab.c:70
av_usleep
int av_usleep(unsigned usec)
Sleep for a period of time.
Definition: time.c:84
if
if(ret)
Definition: filter_design.txt:179
ConstBufferData::height
float height
Definition: vsrc_ddagrab.c:328
DdagrabContext::vertex_shader
ID3D11VertexShader * vertex_shader
Definition: vsrc_ddagrab.c:88
framerate
float framerate
Definition: av1_levels.c:29
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:66
NULL
#define NULL
Definition: coverity.c:32
ddagrab_config_props
static int ddagrab_config_props(AVFilterLink *outlink)
Definition: vsrc_ddagrab.c:855
AVHWFramesContext::sw_format
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:210
DdagrabContext::mouse_texture
ID3D11Texture2D * mouse_texture
Definition: vsrc_ddagrab.c:71
av_buffer_unref
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:139
DdagrabContext::const_buffer
ID3D11Buffer * const_buffer
Definition: vsrc_ddagrab.c:91
AVRational
Rational number (pair of numerator and denominator).
Definition: rational.h:58
AV_OPT_TYPE_IMAGE_SIZE
@ AV_OPT_TYPE_IMAGE_SIZE
offset must point to two consecutive integers
Definition: opt.h:255
AVCOL_PRI_BT709
@ AVCOL_PRI_BT709
also ITU-R BT1361 / IEC 61966-2-4 / SMPTE RP 177 Annex B
Definition: pixfmt.h:557
time.h
inputs
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several inputs
Definition: filter_design.txt:243
AVD3D11VAFramesContext
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_d3d11va.h:131
ddagrab_uninit
static av_cold void ddagrab_uninit(AVFilterContext *avctx)
Definition: vsrc_ddagrab.c:146
AV_PIX_FMT_X2BGR10
#define AV_PIX_FMT_X2BGR10
Definition: pixfmt.h:537
DdagrabContext::mouse_y
int mouse_y
Definition: vsrc_ddagrab.c:70
init
int(* init)(AVBSFContext *ctx)
Definition: dts2pts.c:366
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:94
av_frame_ref
int av_frame_ref(AVFrame *dst, const AVFrame *src)
Set up a new reference to the data described by the source frame.
Definition: frame.c:384
release_resource
static void release_resource(void *resource)
Definition: vsrc_ddagrab.c:137
AVFILTER_FLAG_HWDEVICE
#define AVFILTER_FLAG_HWDEVICE
The filter can create hardware frames using AVFilterContext.hw_device_ctx.
Definition: avfilter.h:138
DdagrabContext::out_fmt
int out_fmt
Definition: vsrc_ddagrab.c:103
size
int size
Definition: twinvq_data.h:10344
AVERROR_OUTPUT_CHANGED
#define AVERROR_OUTPUT_CHANGED
Output changed between calls. Reconfiguration is required. (can be OR-ed with AVERROR_INPUT_CHANGED)
Definition: error.h:76
TIMER_RES64
#define TIMER_RES64
Definition: vsrc_ddagrab.c:53
height
#define height
AVERROR_EXTERNAL
#define AVERROR_EXTERNAL
Generic error in an external library.
Definition: error.h:59
offset
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf offset
Definition: writing_filters.txt:86
AV_PIX_FMT_D3D11
@ AV_PIX_FMT_D3D11
Hardware surfaces for Direct3D11.
Definition: pixfmt.h:336
DdagrabContext::time_base
AVRational time_base
Definition: vsrc_ddagrab.c:76
input
and forward the test the status of outputs and forward it to the corresponding return FFERROR_NOT_READY If the filters stores internally one or a few frame for some input
Definition: filter_design.txt:172
FLAGS
#define FLAGS
Definition: vsrc_ddagrab.c:110
internal.h
FILTER_SINGLE_PIXFMT
#define FILTER_SINGLE_PIXFMT(pix_fmt_)
Definition: internal.h:172
DdagrabContext::raw_format
DXGI_FORMAT raw_format
Definition: vsrc_ddagrab.c:81
buffer_data
Definition: avio_read_callback.c:37
DXGI_FORMAT_R16G16B16A16_FLOAT
@ DXGI_FORMAT_R16G16B16A16_FLOAT
Definition: dds.c:62
uninit
static void uninit(AVBSFContext *ctx)
Definition: pcm_rechunk.c:68
DdagrabContext::mouse_resource_view
ID3D11ShaderResourceView * mouse_resource_view
Definition: vsrc_ddagrab.c:72
AVD3D11VADeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_d3d11va.h:45
av_assert1
#define av_assert1(cond)
assert() equivalent, that does not lie in speed critical code.
Definition: avassert.h:56
DdagrabContext::draw_mouse
int draw_mouse
Definition: vsrc_ddagrab.c:97
av_inv_q
static av_always_inline AVRational av_inv_q(AVRational q)
Invert a rational.
Definition: rational.h:159
DdagrabContext::allow_fallback
int allow_fallback
Definition: vsrc_ddagrab.c:104
FLOAT
float FLOAT
Definition: faandct.c:33
AVFilterPad::name
const char * name
Pad name.
Definition: internal.h:39
DdagrabContext::offset_y
int offset_y
Definition: vsrc_ddagrab.c:102
stride
#define stride
Definition: h264pred_template.c:537
DdagrabContext::time_timeout
int64_t time_timeout
Definition: vsrc_ddagrab.c:78
AVFilter
Filter definition.
Definition: avfilter.h:166
convert_mono_buffer
static int convert_mono_buffer(uint8_t *input, uint8_t **rgba_out, uint8_t **xor_out, int *_width, int *_height, int *_pitch)
Definition: vsrc_ddagrab.c:507
DdagrabContext::output_idx
int output_idx
Definition: vsrc_ddagrab.c:96
AVHWFramesContext
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:115
ret
ret
Definition: filter_design.txt:187
AVHWDeviceContext::type
enum AVHWDeviceType type
This field identifies the underlying API used for hardware access.
Definition: hwcontext.h:72
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
AVHWFramesContext::hwctx
void * hwctx
The format-specific data, allocated and freed automatically along with this context.
Definition: hwcontext.h:150
init_dxgi_dda
static av_cold int init_dxgi_dda(AVFilterContext *avctx)
Definition: vsrc_ddagrab.c:172
av_hwdevice_ctx_create
int av_hwdevice_ctx_create(AVBufferRef **pdevice_ref, enum AVHWDeviceType type, const char *device, AVDictionary *opts, int flags)
Open a device of the specified type and create an AVHWDeviceContext for it.
Definition: hwcontext.c:600
pos
unsigned int pos
Definition: spdifenc.c:414
DdagrabContext::input_layout
ID3D11InputLayout * input_layout
Definition: vsrc_ddagrab.c:89
DdagrabContext::dup_frames
int dup_frames
Definition: vsrc_ddagrab.c:106
av_frame_replace
int av_frame_replace(AVFrame *dst, const AVFrame *src)
Ensure the destination frame refers to the same data described by the source frame,...
Definition: frame.c:483
DdagrabContext::frames_ref
AVBufferRef * frames_ref
Definition: vsrc_ddagrab.c:62
DdagrabContext::time_frame
int64_t time_frame
Definition: vsrc_ddagrab.c:77
DdagrabContext::height
int height
Definition: vsrc_ddagrab.c:100
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Definition: opt.h:245
avfilter.h
AVFilterContext
An instance of a filter.
Definition: avfilter.h:407
desc
const char * desc
Definition: libsvtav1.c:79
pixel_shader_bytes
static const uint8_t pixel_shader_bytes[]
Definition: vsrc_ddagrab_shaders.h:101
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
mem.h
AVBufferRef
A reference to a data buffer.
Definition: buffer.h:82
vertex_shader_bytes
static const uint8_t vertex_shader_bytes[]
Definition: vsrc_ddagrab_shaders.h:63
init_hwframes_ctx
static av_cold int init_hwframes_ctx(AVFilterContext *avctx)
Definition: vsrc_ddagrab.c:807
DdagrabContext::buffer_texture
ID3D11Texture2D * buffer_texture
Definition: vsrc_ddagrab.c:86
av_free
#define av_free(p)
Definition: tableprint_vlc.h:33
update_mouse_pointer
static int update_mouse_pointer(AVFilterContext *avctx, DXGI_OUTDUPL_FRAME_INFO *frame_info)
Definition: vsrc_ddagrab.c:598
AV_PIX_FMT_RGBAF16
#define AV_PIX_FMT_RGBAF16
Definition: pixfmt.h:546
AV_OPT_TYPE_BOOL
@ AV_OPT_TYPE_BOOL
Definition: opt.h:261
FILTER_OUTPUTS
#define FILTER_OUTPUTS(array)
Definition: internal.h:183
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
DdagrabContext::sampler_state
ID3D11SamplerState * sampler_state
Definition: vsrc_ddagrab.c:92
ddagrab_outputs
static const AVFilterPad ddagrab_outputs[]
Definition: vsrc_ddagrab.c:1236
DdagrabContext::offset_x
int offset_x
Definition: vsrc_ddagrab.c:101
DdagrabContext::probed_texture
ID3D11Texture2D * probed_texture
Definition: vsrc_ddagrab.c:85
ddagrab_init
static av_cold int ddagrab_init(AVFilterContext *avctx)
Definition: vsrc_ddagrab.c:440
DdagrabContext::output_desc
DXGI_OUTPUT_DESC output_desc
Definition: vsrc_ddagrab.c:66
hwcontext.h
AVERROR_BUG
#define AVERROR_BUG
Internal bug, also see AVERROR_BUG2.
Definition: error.h:52
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
DdagrabContext::device_ref
AVBufferRef * device_ref
Definition: vsrc_ddagrab.c:58
ConstBufferData::padding
uint64_t padding
Definition: vsrc_ddagrab.c:330
AVD3D11VADeviceContext::device_context
ID3D11DeviceContext * device_context
If unset, this will be set from the device field on init.
Definition: hwcontext_d3d11va.h:64
ID3D11Device
void ID3D11Device
Definition: nvenc.h:28
h
h
Definition: vp9dsp_template.c:2038
DdagrabContext
Definition: vsrc_ddagrab.c:55
avstring.h
DdagrabContext::mouse_xor_texture
ID3D11Texture2D * mouse_xor_texture
Definition: vsrc_ddagrab.c:73
AV_OPT_TYPE_CONST
@ AV_OPT_TYPE_CONST
Definition: opt.h:254
hwcontext_d3d11va.h
OFFSET
#define OFFSET(x)
Definition: vsrc_ddagrab.c:109
probe_output_format
static int probe_output_format(AVFilterContext *avctx)
Definition: vsrc_ddagrab.c:779
w32dlfcn.h
DdagrabContext::blend_state_xor
ID3D11BlendState * blend_state_xor
Definition: vsrc_ddagrab.c:94