FFmpeg
amfenc.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 #include "config.h"
20 #include "config_components.h"
21 
22 #include "libavutil/avassert.h"
23 #include "libavutil/imgutils.h"
24 #include "libavutil/hwcontext.h"
25 #if CONFIG_D3D11VA
27 #endif
28 #if CONFIG_DXVA2
29 #define COBJMACROS
31 #endif
32 #include "libavutil/mem.h"
33 #include "libavutil/pixdesc.h"
34 #include "libavutil/time.h"
35 
36 #include "amfenc.h"
37 #include "encode.h"
38 #include "internal.h"
39 
40 #if CONFIG_D3D11VA
41 #include <d3d11.h>
42 #endif
43 
44 #ifdef _WIN32
45 #include "compat/w32dlfcn.h"
46 #else
47 #include <dlfcn.h>
48 #endif
49 
50 #define FFMPEG_AMF_WRITER_ID L"ffmpeg_amf"
51 
52 #define PTS_PROP L"PtsProp"
53 
57 #if CONFIG_D3D11VA
59 #endif
60 #if CONFIG_DXVA2
62 #endif
64 };
65 
66 typedef struct FormatMap {
68  enum AMF_SURFACE_FORMAT amf_format;
69 } FormatMap;
70 
71 static const FormatMap format_map[] =
72 {
73  { AV_PIX_FMT_NONE, AMF_SURFACE_UNKNOWN },
74  { AV_PIX_FMT_NV12, AMF_SURFACE_NV12 },
75  { AV_PIX_FMT_BGR0, AMF_SURFACE_BGRA },
76  { AV_PIX_FMT_RGB0, AMF_SURFACE_RGBA },
77  { AV_PIX_FMT_GRAY8, AMF_SURFACE_GRAY8 },
78  { AV_PIX_FMT_YUV420P, AMF_SURFACE_YUV420P },
79  { AV_PIX_FMT_YUYV422, AMF_SURFACE_YUY2 },
80 };
81 
82 static enum AMF_SURFACE_FORMAT amf_av_to_amf_format(enum AVPixelFormat fmt)
83 {
84  int i;
85  for (i = 0; i < amf_countof(format_map); i++) {
86  if (format_map[i].av_format == fmt) {
87  return format_map[i].amf_format;
88  }
89  }
90  return AMF_SURFACE_UNKNOWN;
91 }
92 
93 static void AMF_CDECL_CALL AMFTraceWriter_Write(AMFTraceWriter *pThis,
94  const wchar_t *scope, const wchar_t *message)
95 {
96  AmfTraceWriter *tracer = (AmfTraceWriter*)pThis;
97  av_log(tracer->avctx, AV_LOG_DEBUG, "%ls: %ls", scope, message); // \n is provided from AMF
98 }
99 
100 static void AMF_CDECL_CALL AMFTraceWriter_Flush(AMFTraceWriter *pThis)
101 {
102 }
103 
104 static AMFTraceWriterVtbl tracer_vtbl =
105 {
106  .Write = AMFTraceWriter_Write,
107  .Flush = AMFTraceWriter_Flush,
108 };
109 
111 {
112  AmfContext *ctx = avctx->priv_data;
113  AMFInit_Fn init_fun;
114  AMFQueryVersion_Fn version_fun;
115  AMF_RESULT res;
116 
117  ctx->delayed_frame = av_frame_alloc();
118  if (!ctx->delayed_frame) {
119  return AVERROR(ENOMEM);
120  }
121  // hardcoded to current HW queue size - will auto-realloc if too small
122  ctx->timestamp_list = av_fifo_alloc2(avctx->max_b_frames + 16, sizeof(int64_t),
124  if (!ctx->timestamp_list) {
125  return AVERROR(ENOMEM);
126  }
127  ctx->dts_delay = 0;
128 
129 
130  ctx->library = dlopen(AMF_DLL_NAMEA, RTLD_NOW | RTLD_LOCAL);
131  AMF_RETURN_IF_FALSE(ctx, ctx->library != NULL,
132  AVERROR_UNKNOWN, "DLL %s failed to open\n", AMF_DLL_NAMEA);
133 
134  init_fun = (AMFInit_Fn)dlsym(ctx->library, AMF_INIT_FUNCTION_NAME);
135  AMF_RETURN_IF_FALSE(ctx, init_fun != NULL, AVERROR_UNKNOWN, "DLL %s failed to find function %s\n", AMF_DLL_NAMEA, AMF_INIT_FUNCTION_NAME);
136 
137  version_fun = (AMFQueryVersion_Fn)dlsym(ctx->library, AMF_QUERY_VERSION_FUNCTION_NAME);
138  AMF_RETURN_IF_FALSE(ctx, version_fun != NULL, AVERROR_UNKNOWN, "DLL %s failed to find function %s\n", AMF_DLL_NAMEA, AMF_QUERY_VERSION_FUNCTION_NAME);
139 
140  res = version_fun(&ctx->version);
141  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "%s failed with error %d\n", AMF_QUERY_VERSION_FUNCTION_NAME, res);
142  res = init_fun(AMF_FULL_VERSION, &ctx->factory);
143  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "%s failed with error %d\n", AMF_INIT_FUNCTION_NAME, res);
144  res = ctx->factory->pVtbl->GetTrace(ctx->factory, &ctx->trace);
145  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "GetTrace() failed with error %d\n", res);
146  res = ctx->factory->pVtbl->GetDebug(ctx->factory, &ctx->debug);
147  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "GetDebug() failed with error %d\n", res);
148  return 0;
149 }
150 
151 #if CONFIG_D3D11VA
152 static int amf_init_from_d3d11_device(AVCodecContext *avctx, AVD3D11VADeviceContext *hwctx)
153 {
154  AmfContext *ctx = avctx->priv_data;
155  AMF_RESULT res;
156 
157  res = ctx->context->pVtbl->InitDX11(ctx->context, hwctx->device, AMF_DX11_1);
158  if (res != AMF_OK) {
159  if (res == AMF_NOT_SUPPORTED)
160  av_log(avctx, AV_LOG_ERROR, "AMF via D3D11 is not supported on the given device.\n");
161  else
162  av_log(avctx, AV_LOG_ERROR, "AMF failed to initialise on the given D3D11 device: %d.\n", res);
163  return AVERROR(ENODEV);
164  }
165 
166  return 0;
167 }
168 #endif
169 
170 #if CONFIG_DXVA2
171 static int amf_init_from_dxva2_device(AVCodecContext *avctx, AVDXVA2DeviceContext *hwctx)
172 {
173  AmfContext *ctx = avctx->priv_data;
174  HANDLE device_handle;
175  IDirect3DDevice9 *device;
176  HRESULT hr;
177  AMF_RESULT res;
178  int ret;
179 
180  hr = IDirect3DDeviceManager9_OpenDeviceHandle(hwctx->devmgr, &device_handle);
181  if (FAILED(hr)) {
182  av_log(avctx, AV_LOG_ERROR, "Failed to open device handle for Direct3D9 device: %lx.\n", (unsigned long)hr);
183  return AVERROR_EXTERNAL;
184  }
185 
186  hr = IDirect3DDeviceManager9_LockDevice(hwctx->devmgr, device_handle, &device, FALSE);
187  if (SUCCEEDED(hr)) {
188  IDirect3DDeviceManager9_UnlockDevice(hwctx->devmgr, device_handle, FALSE);
189  ret = 0;
190  } else {
191  av_log(avctx, AV_LOG_ERROR, "Failed to lock device handle for Direct3D9 device: %lx.\n", (unsigned long)hr);
193  }
194 
195  IDirect3DDeviceManager9_CloseDeviceHandle(hwctx->devmgr, device_handle);
196 
197  if (ret < 0)
198  return ret;
199 
200  res = ctx->context->pVtbl->InitDX9(ctx->context, device);
201 
202  IDirect3DDevice9_Release(device);
203 
204  if (res != AMF_OK) {
205  if (res == AMF_NOT_SUPPORTED)
206  av_log(avctx, AV_LOG_ERROR, "AMF via D3D9 is not supported on the given device.\n");
207  else
208  av_log(avctx, AV_LOG_ERROR, "AMF failed to initialise on given D3D9 device: %d.\n", res);
209  return AVERROR(ENODEV);
210  }
211 
212  return 0;
213 }
214 #endif
215 
217 {
218  AmfContext *ctx = avctx->priv_data;
219  AMFContext1 *context1 = NULL;
220  AMF_RESULT res;
221  av_unused int ret;
222 
223  ctx->hwsurfaces_in_queue = 0;
224  ctx->hwsurfaces_in_queue_max = 16;
225 
226  // configure AMF logger
227  // the return of these functions indicates old state and do not affect behaviour
228  ctx->trace->pVtbl->EnableWriter(ctx->trace, AMF_TRACE_WRITER_DEBUG_OUTPUT, ctx->log_to_dbg != 0 );
229  if (ctx->log_to_dbg)
230  ctx->trace->pVtbl->SetWriterLevel(ctx->trace, AMF_TRACE_WRITER_DEBUG_OUTPUT, AMF_TRACE_TRACE);
231  ctx->trace->pVtbl->EnableWriter(ctx->trace, AMF_TRACE_WRITER_CONSOLE, 0);
232  ctx->trace->pVtbl->SetGlobalLevel(ctx->trace, AMF_TRACE_TRACE);
233 
234  // connect AMF logger to av_log
235  ctx->tracer.vtbl = &tracer_vtbl;
236  ctx->tracer.avctx = avctx;
237  ctx->trace->pVtbl->RegisterWriter(ctx->trace, FFMPEG_AMF_WRITER_ID,(AMFTraceWriter*)&ctx->tracer, 1);
238  ctx->trace->pVtbl->SetWriterLevel(ctx->trace, FFMPEG_AMF_WRITER_ID, AMF_TRACE_TRACE);
239 
240  res = ctx->factory->pVtbl->CreateContext(ctx->factory, &ctx->context);
241  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "CreateContext() failed with error %d\n", res);
242 
243  // If a device was passed to the encoder, try to initialise from that.
244  if (avctx->hw_frames_ctx) {
245  AVHWFramesContext *frames_ctx = (AVHWFramesContext*)avctx->hw_frames_ctx->data;
246 
247  if (amf_av_to_amf_format(frames_ctx->sw_format) == AMF_SURFACE_UNKNOWN) {
248  av_log(avctx, AV_LOG_ERROR, "Format of input frames context (%s) is not supported by AMF.\n",
249  av_get_pix_fmt_name(frames_ctx->sw_format));
250  return AVERROR(EINVAL);
251  }
252 
253  switch (frames_ctx->device_ctx->type) {
254 #if CONFIG_D3D11VA
256  ret = amf_init_from_d3d11_device(avctx, frames_ctx->device_ctx->hwctx);
257  if (ret < 0)
258  return ret;
259  break;
260 #endif
261 #if CONFIG_DXVA2
263  ret = amf_init_from_dxva2_device(avctx, frames_ctx->device_ctx->hwctx);
264  if (ret < 0)
265  return ret;
266  break;
267 #endif
268  default:
269  av_log(avctx, AV_LOG_ERROR, "AMF initialisation from a %s frames context is not supported.\n",
271  return AVERROR(ENOSYS);
272  }
273 
274  ctx->hw_frames_ctx = av_buffer_ref(avctx->hw_frames_ctx);
275  if (!ctx->hw_frames_ctx)
276  return AVERROR(ENOMEM);
277 
278  if (frames_ctx->initial_pool_size > 0)
279  ctx->hwsurfaces_in_queue_max = frames_ctx->initial_pool_size - 1;
280 
281  } else if (avctx->hw_device_ctx) {
282  AVHWDeviceContext *device_ctx = (AVHWDeviceContext*)avctx->hw_device_ctx->data;
283 
284  switch (device_ctx->type) {
285 #if CONFIG_D3D11VA
287  ret = amf_init_from_d3d11_device(avctx, device_ctx->hwctx);
288  if (ret < 0)
289  return ret;
290  break;
291 #endif
292 #if CONFIG_DXVA2
294  ret = amf_init_from_dxva2_device(avctx, device_ctx->hwctx);
295  if (ret < 0)
296  return ret;
297  break;
298 #endif
299  default:
300  av_log(avctx, AV_LOG_ERROR, "AMF initialisation from a %s device is not supported.\n",
301  av_hwdevice_get_type_name(device_ctx->type));
302  return AVERROR(ENOSYS);
303  }
304 
305  ctx->hw_device_ctx = av_buffer_ref(avctx->hw_device_ctx);
306  if (!ctx->hw_device_ctx)
307  return AVERROR(ENOMEM);
308 
309  } else {
310  res = ctx->context->pVtbl->InitDX11(ctx->context, NULL, AMF_DX11_1);
311  if (res == AMF_OK) {
312  av_log(avctx, AV_LOG_VERBOSE, "AMF initialisation succeeded via D3D11.\n");
313  } else {
314  res = ctx->context->pVtbl->InitDX9(ctx->context, NULL);
315  if (res == AMF_OK) {
316  av_log(avctx, AV_LOG_VERBOSE, "AMF initialisation succeeded via D3D9.\n");
317  } else {
318  AMFGuid guid = IID_AMFContext1();
319  res = ctx->context->pVtbl->QueryInterface(ctx->context, &guid, (void**)&context1);
320  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "CreateContext1() failed with error %d\n", res);
321 
322  res = context1->pVtbl->InitVulkan(context1, NULL);
323  context1->pVtbl->Release(context1);
324  if (res != AMF_OK) {
325  if (res == AMF_NOT_SUPPORTED)
326  av_log(avctx, AV_LOG_ERROR, "AMF via Vulkan is not supported on the given device.\n");
327  else
328  av_log(avctx, AV_LOG_ERROR, "AMF failed to initialise on the given Vulkan device: %d.\n", res);
329  return AVERROR(ENOSYS);
330  }
331  av_log(avctx, AV_LOG_VERBOSE, "AMF initialisation succeeded via Vulkan.\n");
332  }
333  }
334  }
335  return 0;
336 }
337 
339 {
340  AmfContext *ctx = avctx->priv_data;
341  const wchar_t *codec_id = NULL;
342  AMF_RESULT res;
343  enum AVPixelFormat pix_fmt;
344 
345  switch (avctx->codec->id) {
346  case AV_CODEC_ID_H264:
347  codec_id = AMFVideoEncoderVCE_AVC;
348  break;
349  case AV_CODEC_ID_HEVC:
350  codec_id = AMFVideoEncoder_HEVC;
351  break;
352  case AV_CODEC_ID_AV1 :
353  codec_id = AMFVideoEncoder_AV1;
354  break;
355  default:
356  break;
357  }
358  AMF_RETURN_IF_FALSE(ctx, codec_id != NULL, AVERROR(EINVAL), "Codec %d is not supported\n", avctx->codec->id);
359 
360  if (ctx->hw_frames_ctx)
361  pix_fmt = ((AVHWFramesContext*)ctx->hw_frames_ctx->data)->sw_format;
362  else
363  pix_fmt = avctx->pix_fmt;
364 
365  ctx->format = amf_av_to_amf_format(pix_fmt);
366  AMF_RETURN_IF_FALSE(ctx, ctx->format != AMF_SURFACE_UNKNOWN, AVERROR(EINVAL),
367  "Format %s is not supported\n", av_get_pix_fmt_name(pix_fmt));
368 
369  res = ctx->factory->pVtbl->CreateComponent(ctx->factory, ctx->context, codec_id, &ctx->encoder);
370  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_ENCODER_NOT_FOUND, "CreateComponent(%ls) failed with error %d\n", codec_id, res);
371 
372  return 0;
373 }
374 
376 {
377  AmfContext *ctx = avctx->priv_data;
378 
379  if (ctx->delayed_surface) {
380  ctx->delayed_surface->pVtbl->Release(ctx->delayed_surface);
381  ctx->delayed_surface = NULL;
382  }
383 
384  if (ctx->encoder) {
385  ctx->encoder->pVtbl->Terminate(ctx->encoder);
386  ctx->encoder->pVtbl->Release(ctx->encoder);
387  ctx->encoder = NULL;
388  }
389 
390  if (ctx->context) {
391  ctx->context->pVtbl->Terminate(ctx->context);
392  ctx->context->pVtbl->Release(ctx->context);
393  ctx->context = NULL;
394  }
395  av_buffer_unref(&ctx->hw_device_ctx);
396  av_buffer_unref(&ctx->hw_frames_ctx);
397 
398  if (ctx->trace) {
399  ctx->trace->pVtbl->UnregisterWriter(ctx->trace, FFMPEG_AMF_WRITER_ID);
400  }
401  if (ctx->library) {
402  dlclose(ctx->library);
403  ctx->library = NULL;
404  }
405  ctx->trace = NULL;
406  ctx->debug = NULL;
407  ctx->factory = NULL;
408  ctx->version = 0;
409  ctx->delayed_drain = 0;
410  av_frame_free(&ctx->delayed_frame);
411  av_fifo_freep2(&ctx->timestamp_list);
412 
413  return 0;
414 }
415 
416 static int amf_copy_surface(AVCodecContext *avctx, const AVFrame *frame,
417  AMFSurface* surface)
418 {
419  AMFPlane *plane;
420  uint8_t *dst_data[4];
421  int dst_linesize[4];
422  int planes;
423  int i;
424 
425  planes = surface->pVtbl->GetPlanesCount(surface);
426  av_assert0(planes < FF_ARRAY_ELEMS(dst_data));
427 
428  for (i = 0; i < planes; i++) {
429  plane = surface->pVtbl->GetPlaneAt(surface, i);
430  dst_data[i] = plane->pVtbl->GetNative(plane);
431  dst_linesize[i] = plane->pVtbl->GetHPitch(plane);
432  }
433  av_image_copy2(dst_data, dst_linesize,
435  avctx->width, avctx->height);
436 
437  return 0;
438 }
439 
440 static int amf_copy_buffer(AVCodecContext *avctx, AVPacket *pkt, AMFBuffer *buffer)
441 {
442  AmfContext *ctx = avctx->priv_data;
443  int ret;
444  AMFVariantStruct var = {0};
445  int64_t timestamp = AV_NOPTS_VALUE;
446  int64_t size = buffer->pVtbl->GetSize(buffer);
447 
448  if ((ret = ff_get_encode_buffer(avctx, pkt, size, 0)) < 0) {
449  return ret;
450  }
451  memcpy(pkt->data, buffer->pVtbl->GetNative(buffer), size);
452 
453  switch (avctx->codec->id) {
454  case AV_CODEC_ID_H264:
455  buffer->pVtbl->GetProperty(buffer, AMF_VIDEO_ENCODER_OUTPUT_DATA_TYPE, &var);
456  if(var.int64Value == AMF_VIDEO_ENCODER_OUTPUT_DATA_TYPE_IDR) {
458  }
459  break;
460  case AV_CODEC_ID_HEVC:
461  buffer->pVtbl->GetProperty(buffer, AMF_VIDEO_ENCODER_HEVC_OUTPUT_DATA_TYPE, &var);
462  if (var.int64Value == AMF_VIDEO_ENCODER_HEVC_OUTPUT_DATA_TYPE_IDR) {
464  }
465  break;
466  case AV_CODEC_ID_AV1:
467  buffer->pVtbl->GetProperty(buffer, AMF_VIDEO_ENCODER_AV1_OUTPUT_FRAME_TYPE, &var);
468  if (var.int64Value == AMF_VIDEO_ENCODER_AV1_OUTPUT_FRAME_TYPE_KEY) {
470  }
471  default:
472  break;
473  }
474 
475  buffer->pVtbl->GetProperty(buffer, PTS_PROP, &var);
476 
477  pkt->pts = var.int64Value; // original pts
478 
479 
480  AMF_RETURN_IF_FALSE(ctx, av_fifo_read(ctx->timestamp_list, &timestamp, 1) >= 0,
481  AVERROR_UNKNOWN, "timestamp_list is empty\n");
482 
483  // calc dts shift if max_b_frames > 0
484  if ((ctx->max_b_frames > 0 || ((ctx->pa_adaptive_mini_gop == 1) ? true : false)) && ctx->dts_delay == 0) {
485  int64_t timestamp_last = AV_NOPTS_VALUE;
486  size_t can_read = av_fifo_can_read(ctx->timestamp_list);
487 
488  AMF_RETURN_IF_FALSE(ctx, can_read > 0, AVERROR_UNKNOWN,
489  "timestamp_list is empty while max_b_frames = %d\n", avctx->max_b_frames);
490  av_fifo_peek(ctx->timestamp_list, &timestamp_last, 1, can_read - 1);
491  if (timestamp < 0 || timestamp_last < AV_NOPTS_VALUE) {
492  return AVERROR(ERANGE);
493  }
494  ctx->dts_delay = timestamp_last - timestamp;
495  }
496  pkt->dts = timestamp - ctx->dts_delay;
497  return 0;
498 }
499 
500 // amfenc API implementation
502 {
503  int ret;
504 
505  if ((ret = amf_load_library(avctx)) == 0) {
506  if ((ret = amf_init_context(avctx)) == 0) {
507  if ((ret = amf_init_encoder(avctx)) == 0) {
508  return 0;
509  }
510  }
511  }
512  ff_amf_encode_close(avctx);
513  return ret;
514 }
515 
516 static AMF_RESULT amf_set_property_buffer(AMFSurface *object, const wchar_t *name, AMFBuffer *val)
517 {
518  AMF_RESULT res;
519  AMFVariantStruct var;
520  res = AMFVariantInit(&var);
521  if (res == AMF_OK) {
522  AMFGuid guid_AMFInterface = IID_AMFInterface();
523  AMFInterface *amf_interface;
524  res = val->pVtbl->QueryInterface(val, &guid_AMFInterface, (void**)&amf_interface);
525 
526  if (res == AMF_OK) {
527  res = AMFVariantAssignInterface(&var, amf_interface);
528  amf_interface->pVtbl->Release(amf_interface);
529  }
530  if (res == AMF_OK) {
531  res = object->pVtbl->SetProperty(object, name, var);
532  }
533  AMFVariantClear(&var);
534  }
535  return res;
536 }
537 
538 static AMF_RESULT amf_get_property_buffer(AMFData *object, const wchar_t *name, AMFBuffer **val)
539 {
540  AMF_RESULT res;
541  AMFVariantStruct var;
542  res = AMFVariantInit(&var);
543  if (res == AMF_OK) {
544  res = object->pVtbl->GetProperty(object, name, &var);
545  if (res == AMF_OK) {
546  if (var.type == AMF_VARIANT_INTERFACE) {
547  AMFGuid guid_AMFBuffer = IID_AMFBuffer();
548  AMFInterface *amf_interface = AMFVariantInterface(&var);
549  res = amf_interface->pVtbl->QueryInterface(amf_interface, &guid_AMFBuffer, (void**)val);
550  } else {
551  res = AMF_INVALID_DATA_TYPE;
552  }
553  }
554  AMFVariantClear(&var);
555  }
556  return res;
557 }
558 
559 static AMFBuffer *amf_create_buffer_with_frame_ref(const AVFrame *frame, AMFContext *context)
560 {
561  AVFrame *frame_ref;
562  AMFBuffer *frame_ref_storage_buffer = NULL;
563  AMF_RESULT res;
564 
565  res = context->pVtbl->AllocBuffer(context, AMF_MEMORY_HOST, sizeof(frame_ref), &frame_ref_storage_buffer);
566  if (res == AMF_OK) {
567  frame_ref = av_frame_clone(frame);
568  if (frame_ref) {
569  memcpy(frame_ref_storage_buffer->pVtbl->GetNative(frame_ref_storage_buffer), &frame_ref, sizeof(frame_ref));
570  } else {
571  frame_ref_storage_buffer->pVtbl->Release(frame_ref_storage_buffer);
572  frame_ref_storage_buffer = NULL;
573  }
574  }
575  return frame_ref_storage_buffer;
576 }
577 
578 static void amf_release_buffer_with_frame_ref(AMFBuffer *frame_ref_storage_buffer)
579 {
580  AVFrame *frame_ref;
581  memcpy(&frame_ref, frame_ref_storage_buffer->pVtbl->GetNative(frame_ref_storage_buffer), sizeof(frame_ref));
582  av_frame_free(&frame_ref);
583  frame_ref_storage_buffer->pVtbl->Release(frame_ref_storage_buffer);
584 }
585 
587 {
588  AmfContext *ctx = avctx->priv_data;
589  AMFSurface *surface;
590  AMF_RESULT res;
591  int ret;
592  AMF_RESULT res_query;
593  AMFData *data = NULL;
594  AVFrame *frame = ctx->delayed_frame;
595  int block_and_wait;
596  int query_output_data_flag = 0;
597  AMF_RESULT res_resubmit;
598 
599  if (!ctx->encoder)
600  return AVERROR(EINVAL);
601 
602  if (!frame->buf[0]) {
603  ret = ff_encode_get_frame(avctx, frame);
604  if (ret < 0 && ret != AVERROR_EOF)
605  return ret;
606  }
607 
608  if (!frame->buf[0]) { // submit drain
609  if (!ctx->eof) { // submit drain one time only
610  if (ctx->delayed_surface != NULL) {
611  ctx->delayed_drain = 1; // input queue is full: resubmit Drain() in ff_amf_receive_packet
612  } else if(!ctx->delayed_drain) {
613  res = ctx->encoder->pVtbl->Drain(ctx->encoder);
614  if (res == AMF_INPUT_FULL) {
615  ctx->delayed_drain = 1; // input queue is full: resubmit Drain() in ff_amf_receive_packet
616  } else {
617  if (res == AMF_OK) {
618  ctx->eof = 1; // drain started
619  }
620  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "Drain() failed with error %d\n", res);
621  }
622  }
623  }
624  } else if (!ctx->delayed_surface) { // submit frame
625  int hw_surface = 0;
626 
627  // prepare surface from frame
628  switch (frame->format) {
629 #if CONFIG_D3D11VA
630  case AV_PIX_FMT_D3D11:
631  {
632  static const GUID AMFTextureArrayIndexGUID = { 0x28115527, 0xe7c3, 0x4b66, { 0x99, 0xd3, 0x4f, 0x2a, 0xe6, 0xb4, 0x7f, 0xaf } };
633  ID3D11Texture2D *texture = (ID3D11Texture2D*)frame->data[0]; // actual texture
634  int index = (intptr_t)frame->data[1]; // index is a slice in texture array is - set to tell AMF which slice to use
635 
636  av_assert0(frame->hw_frames_ctx && ctx->hw_frames_ctx &&
637  frame->hw_frames_ctx->data == ctx->hw_frames_ctx->data);
638 
639  texture->lpVtbl->SetPrivateData(texture, &AMFTextureArrayIndexGUID, sizeof(index), &index);
640 
641  res = ctx->context->pVtbl->CreateSurfaceFromDX11Native(ctx->context, texture, &surface, NULL); // wrap to AMF surface
642  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR(ENOMEM), "CreateSurfaceFromDX11Native() failed with error %d\n", res);
643 
644  hw_surface = 1;
645  }
646  break;
647 #endif
648 #if CONFIG_DXVA2
650  {
651  IDirect3DSurface9 *texture = (IDirect3DSurface9 *)frame->data[3]; // actual texture
652 
653  res = ctx->context->pVtbl->CreateSurfaceFromDX9Native(ctx->context, texture, &surface, NULL); // wrap to AMF surface
654  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR(ENOMEM), "CreateSurfaceFromDX9Native() failed with error %d\n", res);
655 
656  hw_surface = 1;
657  }
658  break;
659 #endif
660  default:
661  {
662  res = ctx->context->pVtbl->AllocSurface(ctx->context, AMF_MEMORY_HOST, ctx->format, avctx->width, avctx->height, &surface);
663  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR(ENOMEM), "AllocSurface() failed with error %d\n", res);
664  amf_copy_surface(avctx, frame, surface);
665  }
666  break;
667  }
668 
669  if (hw_surface) {
670  AMFBuffer *frame_ref_storage_buffer;
671 
672  // input HW surfaces can be vertically aligned by 16; tell AMF the real size
673  surface->pVtbl->SetCrop(surface, 0, 0, frame->width, frame->height);
674 
675  frame_ref_storage_buffer = amf_create_buffer_with_frame_ref(frame, ctx->context);
676  AMF_RETURN_IF_FALSE(ctx, frame_ref_storage_buffer != NULL, AVERROR(ENOMEM), "create_buffer_with_frame_ref() returned NULL\n");
677 
678  res = amf_set_property_buffer(surface, L"av_frame_ref", frame_ref_storage_buffer);
679  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "SetProperty failed for \"av_frame_ref\" with error %d\n", res);
680  ctx->hwsurfaces_in_queue++;
681  frame_ref_storage_buffer->pVtbl->Release(frame_ref_storage_buffer);
682  }
683 
684  surface->pVtbl->SetPts(surface, frame->pts);
685  AMF_ASSIGN_PROPERTY_INT64(res, surface, PTS_PROP, frame->pts);
686 
687  switch (avctx->codec->id) {
688  case AV_CODEC_ID_H264:
689  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_INSERT_AUD, !!ctx->aud);
690  break;
691  case AV_CODEC_ID_HEVC:
692  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_HEVC_INSERT_AUD, !!ctx->aud);
693  break;
694  //case AV_CODEC_ID_AV1 not supported
695  default:
696  break;
697  }
698 
699  // submit surface
700  res = ctx->encoder->pVtbl->SubmitInput(ctx->encoder, (AMFData*)surface);
701  if (res == AMF_INPUT_FULL) { // handle full queue
702  //store surface for later submission
703  ctx->delayed_surface = surface;
704  } else {
705  int64_t pts = frame->pts;
706  surface->pVtbl->Release(surface);
707  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "SubmitInput() failed with error %d\n", res);
708 
710  ret = av_fifo_write(ctx->timestamp_list, &pts, 1);
711  if (ret < 0)
712  return ret;
713  }
714  }
715 
716 
717  do {
718  block_and_wait = 0;
719  // poll data
720  if (!avpkt->data && !avpkt->buf) {
721  res_query = ctx->encoder->pVtbl->QueryOutput(ctx->encoder, &data);
722  if (data) {
723  // copy data to packet
724  AMFBuffer *buffer;
725  AMFGuid guid = IID_AMFBuffer();
726  query_output_data_flag = 1;
727  data->pVtbl->QueryInterface(data, &guid, (void**)&buffer); // query for buffer interface
728  ret = amf_copy_buffer(avctx, avpkt, buffer);
729 
730  buffer->pVtbl->Release(buffer);
731 
732  if (data->pVtbl->HasProperty(data, L"av_frame_ref")) {
733  AMFBuffer* frame_ref_storage_buffer;
734  res = amf_get_property_buffer(data, L"av_frame_ref", &frame_ref_storage_buffer);
735  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "GetProperty failed for \"av_frame_ref\" with error %d\n", res);
736  amf_release_buffer_with_frame_ref(frame_ref_storage_buffer);
737  ctx->hwsurfaces_in_queue--;
738  }
739 
740  data->pVtbl->Release(data);
741 
742  AMF_RETURN_IF_FALSE(ctx, ret >= 0, ret, "amf_copy_buffer() failed with error %d\n", ret);
743  }
744  }
745  res_resubmit = AMF_OK;
746  if (ctx->delayed_surface != NULL) { // try to resubmit frame
747  res_resubmit = ctx->encoder->pVtbl->SubmitInput(ctx->encoder, (AMFData*)ctx->delayed_surface);
748  if (res_resubmit != AMF_INPUT_FULL) {
749  int64_t pts = ctx->delayed_surface->pVtbl->GetPts(ctx->delayed_surface);
750  ctx->delayed_surface->pVtbl->Release(ctx->delayed_surface);
751  ctx->delayed_surface = NULL;
752  av_frame_unref(ctx->delayed_frame);
753  AMF_RETURN_IF_FALSE(ctx, res_resubmit == AMF_OK, AVERROR_UNKNOWN, "Repeated SubmitInput() failed with error %d\n", res_resubmit);
754 
755  ret = av_fifo_write(ctx->timestamp_list, &pts, 1);
756  if (ret < 0)
757  return ret;
758  }
759  } else if (ctx->delayed_drain) { // try to resubmit drain
760  res = ctx->encoder->pVtbl->Drain(ctx->encoder);
761  if (res != AMF_INPUT_FULL) {
762  ctx->delayed_drain = 0;
763  ctx->eof = 1; // drain started
764  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "Repeated Drain() failed with error %d\n", res);
765  } else {
766  av_log(avctx, AV_LOG_WARNING, "Data acquired but delayed drain submission got AMF_INPUT_FULL- should not happen\n");
767  }
768  }
769 
770  if (query_output_data_flag == 0) {
771  if (res_resubmit == AMF_INPUT_FULL || ctx->delayed_drain || (ctx->eof && res_query != AMF_EOF) || (ctx->hwsurfaces_in_queue >= ctx->hwsurfaces_in_queue_max)) {
772  block_and_wait = 1;
773  av_usleep(1000);
774  }
775  }
776  } while (block_and_wait);
777 
778  if (res_query == AMF_EOF) {
779  ret = AVERROR_EOF;
780  } else if (data == NULL) {
781  ret = AVERROR(EAGAIN);
782  } else {
783  ret = 0;
784  }
785  return ret;
786 }
787 
789 #if CONFIG_D3D11VA
790  HW_CONFIG_ENCODER_FRAMES(D3D11, D3D11VA),
791  HW_CONFIG_ENCODER_DEVICE(NONE, D3D11VA),
792 #endif
793 #if CONFIG_DXVA2
794  HW_CONFIG_ENCODER_FRAMES(DXVA2_VLD, DXVA2),
796 #endif
797  NULL,
798 };
AVHWDeviceContext::hwctx
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:85
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:186
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:71
name
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default minimum maximum flags name is the option name
Definition: writing_filters.txt:88
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
AMFTraceWriter_Write
static void AMF_CDECL_CALL AMFTraceWriter_Write(AMFTraceWriter *pThis, const wchar_t *scope, const wchar_t *message)
Definition: amfenc.c:93
FFMPEG_AMF_WRITER_ID
#define FFMPEG_AMF_WRITER_ID
Definition: amfenc.c:50
message
Definition: api-threadmessage-test.c:46
NONE
@ NONE
Definition: af_afade.c:61
AVERROR_EOF
#define AVERROR_EOF
End of file.
Definition: error.h:57
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:90
int64_t
long long int64_t
Definition: coverity.c:34
av_unused
#define av_unused
Definition: attributes.h:131
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:130
av_fifo_peek
int av_fifo_peek(const AVFifo *f, void *buf, size_t nb_elems, size_t offset)
Read data from a FIFO without modifying FIFO state.
Definition: fifo.c:255
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:344
pixdesc.h
AVFrame::pts
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:456
AVFrame::width
int width
Definition: frame.h:416
internal.h
AVPacket::data
uint8_t * data
Definition: packet.h:522
encode.h
data
const char data[16]
Definition: mxf.c:148
AVDXVA2DeviceContext::devmgr
IDirect3DDeviceManager9 * devmgr
Definition: hwcontext_dxva2.h:40
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:196
amf_set_property_buffer
static AMF_RESULT amf_set_property_buffer(AMFSurface *object, const wchar_t *name, AMFBuffer *val)
Definition: amfenc.c:516
av_buffer_ref
AVBufferRef * av_buffer_ref(const AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:103
AMF_RETURN_IF_FALSE
#define AMF_RETURN_IF_FALSE(avctx, exp, ret_value,...)
Error handling helper.
Definition: amfenc.h:176
amf_copy_surface
static int amf_copy_surface(AVCodecContext *avctx, const AVFrame *frame, AMFSurface *surface)
Definition: amfenc.c:416
AVERROR_UNKNOWN
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:73
AVFrame::buf
AVBufferRef * buf[AV_NUM_DATA_POINTERS]
AVBuffer references backing the data for this frame.
Definition: frame.h:557
AMFTraceWriter_Flush
static void AMF_CDECL_CALL AMFTraceWriter_Flush(AMFTraceWriter *pThis)
Definition: amfenc.c:100
AV_PKT_FLAG_KEY
#define AV_PKT_FLAG_KEY
The packet contains a keyframe.
Definition: packet.h:577
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:365
FormatMap::amf_format
enum AMF_SURFACE_FORMAT amf_format
Definition: amfenc.c:68
AVCodecContext::codec
const struct AVCodec * codec
Definition: avcodec.h:454
av_fifo_write
int av_fifo_write(AVFifo *f, const void *buf, size_t nb_elems)
Write data into a FIFO.
Definition: fifo.c:188
ff_amf_encode_close
int av_cold ff_amf_encode_close(AVCodecContext *avctx)
Common encoder termination function.
Definition: amfenc.c:375
AV_HWDEVICE_TYPE_D3D11VA
@ AV_HWDEVICE_TYPE_D3D11VA
Definition: hwcontext.h:35
ff_amf_encode_init
int ff_amf_encode_init(AVCodecContext *avctx)
Common encoder initization function.
Definition: amfenc.c:501
val
static double val(void *priv, double ch)
Definition: aeval.c:78
pts
static int64_t pts
Definition: transcode_aac.c:643
AVHWDeviceContext
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:60
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:118
avassert.h
pkt
AVPacket * pkt
Definition: movenc.c:59
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
av_cold
#define av_cold
Definition: attributes.h:90
av_fifo_read
int av_fifo_read(AVFifo *f, void *buf, size_t nb_elems)
Read data from a FIFO.
Definition: fifo.c:240
AV_PIX_FMT_DXVA2_VLD
@ AV_PIX_FMT_DXVA2_VLD
HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer.
Definition: pixfmt.h:134
AVD3D11VADeviceContext::device
ID3D11Device * device
Device used for texture creation and access.
Definition: hwcontext_d3d11va.h:56
pix_fmt
static enum AVPixelFormat pix_fmt
Definition: demux_decode.c:41
amf_av_to_amf_format
static enum AMF_SURFACE_FORMAT amf_av_to_amf_format(enum AVPixelFormat fmt)
Definition: amfenc.c:82
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:40
amf_init_encoder
static int amf_init_encoder(AVCodecContext *avctx)
Definition: amfenc.c:338
AV_LOG_DEBUG
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:201
ctx
AVFormatContext * ctx
Definition: movenc.c:48
av_frame_clone
AVFrame * av_frame_clone(const AVFrame *src)
Create a new frame that references the same data as src.
Definition: frame.c:563
av_hwdevice_get_type_name
const char * av_hwdevice_get_type_name(enum AVHWDeviceType type)
Get the string name of an AVHWDeviceType.
Definition: hwcontext.c:112
codec_id
enum AVCodecID codec_id
Definition: vaapi_decode.c:387
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:73
av_usleep
int av_usleep(unsigned usec)
Sleep for a period of time.
Definition: time.c:84
AV_CODEC_ID_H264
@ AV_CODEC_ID_H264
Definition: codec_id.h:79
frame
static AVFrame * frame
Definition: demux_decode.c:54
AmfTraceWriter::avctx
AVCodecContext * avctx
Definition: amfenc.h:42
if
if(ret)
Definition: filter_design.txt:179
context
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default minimum maximum flags name is the option keep it simple and lowercase description are in without and describe what they for example set the foo of the bar offset is the offset of the field in your context
Definition: writing_filters.txt:91
tracer_vtbl
static AMFTraceWriterVtbl tracer_vtbl
Definition: amfenc.c:104
AVPacket::buf
AVBufferRef * buf
A reference to the reference-counted buffer where the packet data is stored.
Definition: packet.h:505
NULL
#define NULL
Definition: coverity.c:32
AVHWFramesContext::sw_format
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:210
av_buffer_unref
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:139
AV_CODEC_ID_AV1
@ AV_CODEC_ID_AV1
Definition: codec_id.h:280
AV_HWDEVICE_TYPE_DXVA2
@ AV_HWDEVICE_TYPE_DXVA2
Definition: hwcontext.h:32
AV_PIX_FMT_YUYV422
@ AV_PIX_FMT_YUYV422
packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
Definition: pixfmt.h:74
ff_amf_receive_packet
int ff_amf_receive_packet(AVCodecContext *avctx, AVPacket *avpkt)
Ecoding one frame - common function for all AMF encoders.
Definition: amfenc.c:586
av_fifo_can_read
size_t av_fifo_can_read(const AVFifo *f)
Definition: fifo.c:87
amf_copy_buffer
static int amf_copy_buffer(AVCodecContext *avctx, AVPacket *pkt, AMFBuffer *buffer)
Definition: amfenc.c:440
AV_PIX_FMT_BGR0
@ AV_PIX_FMT_BGR0
packed BGR 8:8:8, 32bpp, BGRXBGRX... X=unused/undefined
Definition: pixfmt.h:265
time.h
PTS_PROP
#define PTS_PROP
Definition: amfenc.c:52
AV_PIX_FMT_GRAY8
@ AV_PIX_FMT_GRAY8
Y , 8bpp.
Definition: pixfmt.h:81
FormatMap::av_format
enum AVPixelFormat av_format
Definition: amfenc.c:67
index
int index
Definition: gxfenc.c:89
AmfTraceWriter
AMF trace writer callback class Used to capture all AMF logging.
Definition: amfenc.h:40
hwcontext_dxva2.h
HW_CONFIG_ENCODER_DEVICE
#define HW_CONFIG_ENCODER_DEVICE(format, device_type_)
Definition: hwconfig.h:95
ff_amf_pix_fmts
enum AVPixelFormat ff_amf_pix_fmts[]
Supported formats.
Definition: amfenc.c:54
size
int size
Definition: twinvq_data.h:10344
AV_NOPTS_VALUE
#define AV_NOPTS_VALUE
Undefined timestamp value.
Definition: avutil.h:248
AVFrame::format
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
Definition: frame.h:431
AVCodecHWConfigInternal
Definition: hwconfig.h:25
AVPacket::dts
int64_t dts
Decompression timestamp in AVStream->time_base units; the time at which the packet is decompressed.
Definition: packet.h:521
amf_create_buffer_with_frame_ref
static AMFBuffer * amf_create_buffer_with_frame_ref(const AVFrame *frame, AMFContext *context)
Definition: amfenc.c:559
AVERROR_EXTERNAL
#define AVERROR_EXTERNAL
Generic error in an external library.
Definition: error.h:59
AVPacket::flags
int flags
A combination of AV_PKT_FLAG values.
Definition: packet.h:528
AV_PIX_FMT_RGB0
@ AV_PIX_FMT_RGB0
packed RGB 8:8:8, 32bpp, RGBXRGBX... X=unused/undefined
Definition: pixfmt.h:263
AV_PIX_FMT_D3D11
@ AV_PIX_FMT_D3D11
Hardware surfaces for Direct3D11.
Definition: pixfmt.h:336
AVCodec::id
enum AVCodecID id
Definition: codec.h:201
HW_CONFIG_ENCODER_FRAMES
#define HW_CONFIG_ENCODER_FRAMES(format, device_type_)
Definition: hwconfig.h:98
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:255
AVPacket::pts
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
Definition: packet.h:515
AVDXVA2DeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_dxva2.h:39
ff_amfenc_hw_configs
const AVCodecHWConfigInternal *const ff_amfenc_hw_configs[]
Definition: amfenc.c:788
amf_load_library
static int amf_load_library(AVCodecContext *avctx)
Definition: amfenc.c:110
AVD3D11VADeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_d3d11va.h:45
AV_CODEC_ID_HEVC
@ AV_CODEC_ID_HEVC
Definition: codec_id.h:226
av_frame_unref
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:576
AVCodecContext::hw_device_ctx
AVBufferRef * hw_device_ctx
A reference to the AVHWDeviceContext describing the device which will be used by a hardware encoder/d...
Definition: avcodec.h:1497
AVCodecContext::height
int height
Definition: avcodec.h:618
AVCodecContext::pix_fmt
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:657
AVCodecContext::hw_frames_ctx
AVBufferRef * hw_frames_ctx
A reference to the AVHWFramesContext describing the input (for encoding) or output (decoding) frames.
Definition: avcodec.h:1475
AVHWFramesContext
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:115
ret
ret
Definition: filter_design.txt:187
AVHWDeviceContext::type
enum AVHWDeviceType type
This field identifies the underlying API used for hardware access.
Definition: hwcontext.h:72
AV_PIX_FMT_NV12
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:96
AVHWFramesContext::device_ctx
AVHWDeviceContext * device_ctx
The parent AVHWDeviceContext.
Definition: hwcontext.h:134
FormatMap
Definition: amfenc.c:66
av_fifo_alloc2
AVFifo * av_fifo_alloc2(size_t nb_elems, size_t elem_size, unsigned int flags)
Allocate and initialize an AVFifo with a given element size.
Definition: fifo.c:47
AVFrame::hw_frames_ctx
AVBufferRef * hw_frames_ctx
For hwaccel-format frames, this should be a reference to the AVHWFramesContext describing the frame.
Definition: frame.h:695
AVFormatContext::debug
int debug
Flags to enable debugging.
Definition: avformat.h:1533
AVCodecContext
main external API structure.
Definition: avcodec.h:445
AVFrame::height
int height
Definition: frame.h:416
buffer
the frame and frame reference mechanism is intended to as much as expensive copies of that data while still allowing the filters to produce correct results The data is stored in buffers represented by AVFrame structures Several references can point to the same frame buffer
Definition: filter_design.txt:49
ff_get_encode_buffer
int ff_get_encode_buffer(AVCodecContext *avctx, AVPacket *avpkt, int64_t size, int flags)
Get a buffer for a packet.
Definition: encode.c:105
av_image_copy2
static void av_image_copy2(uint8_t *const dst_data[4], const int dst_linesizes[4], uint8_t *const src_data[4], const int src_linesizes[4], enum AVPixelFormat pix_fmt, int width, int height)
Wrapper around av_image_copy() to workaround the limitation that the conversion from uint8_t * const ...
Definition: imgutils.h:184
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:72
amf_get_property_buffer
static AMF_RESULT amf_get_property_buffer(AMFData *object, const wchar_t *name, AMFBuffer **val)
Definition: amfenc.c:538
L
#define L(x)
Definition: vpx_arith.h:36
amfenc.h
AVHWFramesContext::initial_pool_size
int initial_pool_size
Initial size of the frame pool.
Definition: hwcontext.h:187
AVERROR_ENCODER_NOT_FOUND
#define AVERROR_ENCODER_NOT_FOUND
Encoder not found.
Definition: error.h:56
mem.h
AVCodecContext::max_b_frames
int max_b_frames
maximum number of B-frames between non-B-frames Note: The output will be delayed by max_b_frames+1 re...
Definition: avcodec.h:795
ff_encode_get_frame
int ff_encode_get_frame(AVCodecContext *avctx, AVFrame *frame)
Called by encoders to get the next frame for encoding.
Definition: encode.c:204
planes
static const struct @386 planes[]
format_map
static const FormatMap format_map[]
Definition: amfenc.c:71
AVPacket
This structure stores compressed data.
Definition: packet.h:499
AVCodecContext::priv_data
void * priv_data
Definition: avcodec.h:472
AVCodecContext::width
int width
picture width / height.
Definition: avcodec.h:618
imgutils.h
hwcontext.h
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:389
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
AmfContext
AMF encoder context.
Definition: amfenc.h:49
av_fifo_freep2
void av_fifo_freep2(AVFifo **f)
Free an AVFifo and reset pointer to NULL.
Definition: fifo.c:286
amf_release_buffer_with_frame_ref
static void amf_release_buffer_with_frame_ref(AMFBuffer *frame_ref_storage_buffer)
Definition: amfenc.c:578
hwcontext_d3d11va.h
AV_FIFO_FLAG_AUTO_GROW
#define AV_FIFO_FLAG_AUTO_GROW
Automatically resize the FIFO on writes, so that the data fits.
Definition: fifo.h:67
w32dlfcn.h
av_get_pix_fmt_name
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:2882
amf_init_context
static int amf_init_context(AVCodecContext *avctx)
Definition: amfenc.c:216