FFmpeg
vf_hwmap.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 #include "libavutil/buffer.h"
20 #include "libavutil/hwcontext.h"
21 #include "libavutil/log.h"
22 #include "libavutil/opt.h"
23 #include "libavutil/pixdesc.h"
24 
25 #include "avfilter.h"
26 #include "formats.h"
27 #include "internal.h"
28 #include "video.h"
29 
30 typedef struct HWMapContext {
31  const AVClass *class;
32 
34 
35  int mode;
37  int reverse;
38 } HWMapContext;
39 
41 {
42  int ret;
43 
45  &avctx->inputs[0]->outcfg.formats)) < 0 ||
47  &avctx->outputs[0]->incfg.formats)) < 0)
48  return ret;
49 
50  return 0;
51 }
52 
53 static int hwmap_config_output(AVFilterLink *outlink)
54 {
55  AVFilterContext *avctx = outlink->src;
56  HWMapContext *ctx = avctx->priv;
57  AVFilterLink *inlink = avctx->inputs[0];
58  AVHWFramesContext *hwfc;
59  AVBufferRef *device;
60  const AVPixFmtDescriptor *desc;
61  int err, device_is_derived;
62 
63  av_log(avctx, AV_LOG_DEBUG, "Configure hwmap %s -> %s.\n",
64  av_get_pix_fmt_name(inlink->format),
65  av_get_pix_fmt_name(outlink->format));
66 
67  av_buffer_unref(&ctx->hwframes_ref);
68 
69  device = avctx->hw_device_ctx;
70  device_is_derived = 0;
71 
72  if (inlink->hw_frames_ctx) {
73  hwfc = (AVHWFramesContext*)inlink->hw_frames_ctx->data;
74 
75  if (ctx->derive_device_type) {
76  enum AVHWDeviceType type;
77 
78  type = av_hwdevice_find_type_by_name(ctx->derive_device_type);
79  if (type == AV_HWDEVICE_TYPE_NONE) {
80  av_log(avctx, AV_LOG_ERROR, "Invalid device type.\n");
81  err = AVERROR(EINVAL);
82  goto fail;
83  }
84 
85  err = av_hwdevice_ctx_create_derived(&device, type,
86  hwfc->device_ref, 0);
87  if (err < 0) {
88  av_log(avctx, AV_LOG_ERROR, "Failed to created derived "
89  "device context: %d.\n", err);
90  goto fail;
91  }
92  device_is_derived = 1;
93  }
94 
95  desc = av_pix_fmt_desc_get(outlink->format);
96  if (!desc) {
97  err = AVERROR(EINVAL);
98  goto fail;
99  }
100 
101  if (inlink->format == hwfc->format &&
102  (desc->flags & AV_PIX_FMT_FLAG_HWACCEL) &&
103  !ctx->reverse) {
104  // Map between two hardware formats (including the case of
105  // undoing an existing mapping).
106 
107  if (!device) {
108  av_log(avctx, AV_LOG_ERROR, "A device reference is "
109  "required to map to a hardware format.\n");
110  err = AVERROR(EINVAL);
111  goto fail;
112  }
113 
114  err = av_hwframe_ctx_create_derived(&ctx->hwframes_ref,
115  outlink->format,
116  device,
117  inlink->hw_frames_ctx,
118  ctx->mode);
119  if (err < 0) {
120  av_log(avctx, AV_LOG_ERROR, "Failed to create derived "
121  "frames context: %d.\n", err);
122  goto fail;
123  }
124 
125  } else if (inlink->format == hwfc->format &&
126  (desc->flags & AV_PIX_FMT_FLAG_HWACCEL) &&
127  ctx->reverse) {
128  // Map between two hardware formats, but do it in reverse.
129  // Make a new hwframe context for the target type, and then
130  // overwrite the input hwframe context with a derived context
131  // mapped from that back to the source type.
134 
135  ctx->hwframes_ref = av_hwframe_ctx_alloc(device);
136  if (!ctx->hwframes_ref) {
137  err = AVERROR(ENOMEM);
138  goto fail;
139  }
140  frames = (AVHWFramesContext*)ctx->hwframes_ref->data;
141 
142  frames->format = outlink->format;
143  frames->sw_format = hwfc->sw_format;
144  frames->width = hwfc->width;
145  frames->height = hwfc->height;
146 
147  if (avctx->extra_hw_frames >= 0)
148  frames->initial_pool_size = 2 + avctx->extra_hw_frames;
149 
150  err = av_hwframe_ctx_init(ctx->hwframes_ref);
151  if (err < 0) {
152  av_log(avctx, AV_LOG_ERROR, "Failed to initialise "
153  "target frames context: %d.\n", err);
154  goto fail;
155  }
156 
158  inlink->format,
159  hwfc->device_ref,
160  ctx->hwframes_ref,
161  ctx->mode);
162  if (err < 0) {
163  av_log(avctx, AV_LOG_ERROR, "Failed to create "
164  "derived source frames context: %d.\n", err);
165  goto fail;
166  }
167 
168  // Here is the naughty bit. This overwriting changes what
169  // ff_get_video_buffer() in the previous filter returns -
170  // it will now give a frame allocated here mapped back to
171  // the format it expects. If there were any additional
172  // constraints on the output frames there then this may
173  // break nastily.
174  av_buffer_unref(&inlink->hw_frames_ctx);
175  inlink->hw_frames_ctx = source;
176 
177  } else if ((outlink->format == hwfc->format &&
178  inlink->format == hwfc->sw_format) ||
179  inlink->format == hwfc->format) {
180  // Map from a hardware format to a software format, or
181  // undo an existing such mapping.
182 
183  ctx->hwframes_ref = av_buffer_ref(inlink->hw_frames_ctx);
184  if (!ctx->hwframes_ref) {
185  err = AVERROR(ENOMEM);
186  goto fail;
187  }
188 
189  } else {
190  // Non-matching formats - not supported.
191 
192  av_log(avctx, AV_LOG_ERROR, "Unsupported formats for "
193  "hwmap: from %s (%s) to %s.\n",
194  av_get_pix_fmt_name(inlink->format),
196  av_get_pix_fmt_name(outlink->format));
197  err = AVERROR(EINVAL);
198  goto fail;
199  }
200  } else if (avctx->hw_device_ctx) {
201  // Map from a software format to a hardware format. This
202  // creates a new hwframe context like hwupload, but then
203  // returns frames mapped from that to the previous link in
204  // order to fill them without an additional copy.
205 
206  if (!device) {
207  av_log(avctx, AV_LOG_ERROR, "A device reference is "
208  "required to create new frames with reverse "
209  "mapping.\n");
210  err = AVERROR(EINVAL);
211  goto fail;
212  }
213 
214  ctx->reverse = 1;
215 
216  ctx->hwframes_ref = av_hwframe_ctx_alloc(device);
217  if (!ctx->hwframes_ref) {
218  err = AVERROR(ENOMEM);
219  goto fail;
220  }
221  hwfc = (AVHWFramesContext*)ctx->hwframes_ref->data;
222 
223  hwfc->format = outlink->format;
224  hwfc->sw_format = inlink->format;
225  hwfc->width = inlink->w;
226  hwfc->height = inlink->h;
227 
228  if (avctx->extra_hw_frames >= 0)
229  hwfc->initial_pool_size = 2 + avctx->extra_hw_frames;
230 
231  err = av_hwframe_ctx_init(ctx->hwframes_ref);
232  if (err < 0) {
233  av_log(avctx, AV_LOG_ERROR, "Failed to create frame "
234  "context for reverse mapping: %d.\n", err);
235  goto fail;
236  }
237 
238  } else {
239  av_log(avctx, AV_LOG_ERROR, "Mapping requires a hardware "
240  "context (a device, or frames on input).\n");
241  return AVERROR(EINVAL);
242  }
243 
244  outlink->hw_frames_ctx = av_buffer_ref(ctx->hwframes_ref);
245  if (!outlink->hw_frames_ctx) {
246  err = AVERROR(ENOMEM);
247  goto fail;
248  }
249 
250  outlink->w = inlink->w;
251  outlink->h = inlink->h;
252 
253  if (device_is_derived)
254  av_buffer_unref(&device);
255  return 0;
256 
257 fail:
258  if (device_is_derived)
259  av_buffer_unref(&device);
260  av_buffer_unref(&ctx->hwframes_ref);
261  return err;
262 }
263 
265 {
266  AVFilterContext *avctx = inlink->dst;
267  AVFilterLink *outlink = avctx->outputs[0];
268  HWMapContext *ctx = avctx->priv;
269 
270  if (ctx->reverse && !inlink->hw_frames_ctx) {
271  AVFrame *src, *dst;
272  int err;
273 
274  src = ff_get_video_buffer(outlink, w, h);
275  if (!src) {
276  av_log(avctx, AV_LOG_ERROR, "Failed to allocate source "
277  "frame for software mapping.\n");
278  return NULL;
279  }
280 
281  dst = av_frame_alloc();
282  if (!dst) {
283  av_frame_free(&src);
284  return NULL;
285  }
286 
287  err = av_hwframe_map(dst, src, ctx->mode);
288  if (err) {
289  av_log(avctx, AV_LOG_ERROR, "Failed to map frame to "
290  "software: %d.\n", err);
291  av_frame_free(&src);
292  av_frame_free(&dst);
293  return NULL;
294  }
295 
296  av_frame_free(&src);
297  return dst;
298  } else {
300  }
301 }
302 
304 {
305  AVFilterContext *avctx = link->dst;
306  AVFilterLink *outlink = avctx->outputs[0];
307  HWMapContext *ctx = avctx->priv;
308  AVFrame *map = NULL;
309  int err;
310 
311  av_log(ctx, AV_LOG_DEBUG, "Filter input: %s, %ux%u (%"PRId64").\n",
312  av_get_pix_fmt_name(input->format),
313  input->width, input->height, input->pts);
314 
315  map = av_frame_alloc();
316  if (!map) {
317  err = AVERROR(ENOMEM);
318  goto fail;
319  }
320 
321  map->format = outlink->format;
322  map->hw_frames_ctx = av_buffer_ref(ctx->hwframes_ref);
323  if (!map->hw_frames_ctx) {
324  err = AVERROR(ENOMEM);
325  goto fail;
326  }
327 
328  if (ctx->reverse && !input->hw_frames_ctx) {
329  // If we mapped backwards from hardware to software, we need
330  // to attach the hardware frame context to the input frame to
331  // make the mapping visible to av_hwframe_map().
332  input->hw_frames_ctx = av_buffer_ref(ctx->hwframes_ref);
333  if (!input->hw_frames_ctx) {
334  err = AVERROR(ENOMEM);
335  goto fail;
336  }
337  }
338 
339  err = av_hwframe_map(map, input, ctx->mode);
340  if (err < 0) {
341  av_log(avctx, AV_LOG_ERROR, "Failed to map frame: %d.\n", err);
342  goto fail;
343  }
344 
345  err = av_frame_copy_props(map, input);
346  if (err < 0)
347  goto fail;
348 
350 
351  av_log(ctx, AV_LOG_DEBUG, "Filter output: %s, %ux%u (%"PRId64").\n",
352  av_get_pix_fmt_name(map->format),
353  map->width, map->height, map->pts);
354 
355  return ff_filter_frame(outlink, map);
356 
357 fail:
359  av_frame_free(&map);
360  return err;
361 }
362 
364 {
365  HWMapContext *ctx = avctx->priv;
366 
367  av_buffer_unref(&ctx->hwframes_ref);
368 }
369 
370 #define OFFSET(x) offsetof(HWMapContext, x)
371 #define FLAGS (AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM)
372 static const AVOption hwmap_options[] = {
373  { "mode", "Frame mapping mode",
376  0, INT_MAX, FLAGS, .unit = "mode" },
377 
378  { "read", "Mapping should be readable",
379  0, AV_OPT_TYPE_CONST, { .i64 = AV_HWFRAME_MAP_READ },
380  INT_MIN, INT_MAX, FLAGS, .unit = "mode" },
381  { "write", "Mapping should be writeable",
383  INT_MIN, INT_MAX, FLAGS, .unit = "mode" },
384  { "overwrite", "Mapping will always overwrite the entire frame",
386  INT_MIN, INT_MAX, FLAGS, .unit = "mode" },
387  { "direct", "Mapping should not involve any copying",
389  INT_MIN, INT_MAX, FLAGS, .unit = "mode" },
390 
391  { "derive_device", "Derive a new device of this type",
392  OFFSET(derive_device_type), AV_OPT_TYPE_STRING,
393  { .str = NULL }, 0, 0, FLAGS },
394  { "reverse", "Map in reverse (create and allocate in the sink)",
395  OFFSET(reverse), AV_OPT_TYPE_INT,
396  { .i64 = 0 }, 0, 1, FLAGS },
397 
398  { NULL }
399 };
400 
401 AVFILTER_DEFINE_CLASS(hwmap);
402 
403 static const AVFilterPad hwmap_inputs[] = {
404  {
405  .name = "default",
406  .type = AVMEDIA_TYPE_VIDEO,
407  .get_buffer.video = hwmap_get_buffer,
408  .filter_frame = hwmap_filter_frame,
409  },
410 };
411 
412 static const AVFilterPad hwmap_outputs[] = {
413  {
414  .name = "default",
415  .type = AVMEDIA_TYPE_VIDEO,
416  .config_props = hwmap_config_output,
417  },
418 };
419 
421  .name = "hwmap",
422  .description = NULL_IF_CONFIG_SMALL("Map hardware frames"),
423  .uninit = hwmap_uninit,
424  .priv_size = sizeof(HWMapContext),
425  .priv_class = &hwmap_class,
429  .flags_internal = FF_FILTER_FLAG_HWFRAME_AWARE,
430  .flags = AVFILTER_FLAG_HWDEVICE,
431 };
ff_get_video_buffer
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:112
HWMapContext::derive_device_type
char * derive_device_type
Definition: vf_hwmap.c:36
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
opt.h
hwmap_options
static const AVOption hwmap_options[]
Definition: vf_hwmap.c:372
FF_FILTER_FLAG_HWFRAME_AWARE
#define FF_FILTER_FLAG_HWFRAME_AWARE
The filter is aware of hardware frames, and any hardware frame context should not be automatically pr...
Definition: internal.h:351
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1018
HWMapContext
Definition: vf_hwmap.c:30
av_pix_fmt_desc_get
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2962
AVHWFramesContext::format
enum AVPixelFormat format
The pixel format identifying the underlying HW surface type.
Definition: hwcontext.h:197
inlink
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
Definition: filter_design.txt:212
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:130
av_hwframe_ctx_init
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
Definition: hwcontext.c:322
test::height
int height
Definition: vc1dsp.c:39
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:344
pixdesc.h
w
uint8_t w
Definition: llviddspenc.c:38
av_hwframe_ctx_alloc
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
Definition: hwcontext.c:248
AV_HWDEVICE_TYPE_NONE
@ AV_HWDEVICE_TYPE_NONE
Definition: hwcontext.h:28
AVOption
AVOption.
Definition: opt.h:346
av_hwframe_map
int av_hwframe_map(AVFrame *dst, const AVFrame *src, int flags)
Map a hardware frame.
Definition: hwcontext.c:778
FILTER_QUERY_FUNC
#define FILTER_QUERY_FUNC(func)
Definition: internal.h:159
HWMapContext::hwframes_ref
AVBufferRef * hwframes_ref
Definition: vf_hwmap.c:33
av_hwdevice_find_type_by_name
enum AVHWDeviceType av_hwdevice_find_type_by_name(const char *name)
Look up an AVHWDeviceType by name.
Definition: hwcontext.c:102
AVFilterContext::hw_device_ctx
AVBufferRef * hw_device_ctx
For filters which will create hardware frames, sets the device the filter should create them in.
Definition: avfilter.h:469
hwmap_outputs
static const AVFilterPad hwmap_outputs[]
Definition: vf_hwmap.c:412
av_buffer_ref
AVBufferRef * av_buffer_ref(const AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:103
AVFilter::name
const char * name
Filter name.
Definition: avfilter.h:170
AVHWFramesContext::width
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:217
video.h
HWMapContext::reverse
int reverse
Definition: vf_hwmap.c:37
AV_HWFRAME_MAP_OVERWRITE
@ AV_HWFRAME_MAP_OVERWRITE
The mapped frame will be overwritten completely in subsequent operations, so the current frame data n...
Definition: hwcontext.h:522
formats.h
ff_default_get_video_buffer
AVFrame * ff_default_get_video_buffer(AVFilterLink *link, int w, int h)
Definition: video.c:107
AVFilterContext::priv
void * priv
private data for use by the filter
Definition: avfilter.h:422
fail
#define fail()
Definition: checkasm.h:179
AV_PIX_FMT_FLAG_HWACCEL
#define AV_PIX_FMT_FLAG_HWACCEL
Pixel format is an HW accelerated format.
Definition: pixdesc.h:128
frames
if it could not because there are no more frames
Definition: filter_design.txt:266
AVFilterContext::extra_hw_frames
int extra_hw_frames
Sets the number of extra hardware frames which the filter will allocate on its output links for use i...
Definition: avfilter.h:492
type
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf type
Definition: writing_filters.txt:86
ff_all_formats
AVFilterFormats * ff_all_formats(enum AVMediaType type)
Return a list of all formats supported by FFmpeg for the given media type.
Definition: formats.c:535
AVFilterPad
A filter pad used for either input or output.
Definition: internal.h:33
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:118
hwmap_uninit
static av_cold void hwmap_uninit(AVFilterContext *avctx)
Definition: vf_hwmap.c:363
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
av_cold
#define av_cold
Definition: attributes.h:90
AVHWFramesContext::height
int height
Definition: hwcontext.h:217
ff_vf_hwmap
const AVFilter ff_vf_hwmap
Definition: vf_hwmap.c:420
ff_formats_ref
int ff_formats_ref(AVFilterFormats *f, AVFilterFormats **ref)
Add *ref as a new reference to formats.
Definition: formats.c:678
AV_HWFRAME_MAP_DIRECT
@ AV_HWFRAME_MAP_DIRECT
The mapping must be direct.
Definition: hwcontext.h:528
AVHWDeviceType
AVHWDeviceType
Definition: hwcontext.h:27
AV_LOG_DEBUG
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:201
ctx
AVFormatContext * ctx
Definition: movenc.c:48
FILTER_INPUTS
#define FILTER_INPUTS(array)
Definition: internal.h:182
link
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a link
Definition: filter_design.txt:23
if
if(ret)
Definition: filter_design.txt:179
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:66
NULL
#define NULL
Definition: coverity.c:32
AVHWFramesContext::sw_format
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:210
av_frame_copy_props
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:679
av_buffer_unref
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:139
AVHWFramesContext::device_ref
AVBufferRef * device_ref
A reference to the parent AVHWDeviceContext.
Definition: hwcontext.h:126
AVFilterContext::inputs
AVFilterLink ** inputs
array of pointers to input links
Definition: avfilter.h:415
av_hwframe_ctx_create_derived
int av_hwframe_ctx_create_derived(AVBufferRef **derived_frame_ctx, enum AVPixelFormat format, AVBufferRef *derived_device_ctx, AVBufferRef *source_frame_ctx, int flags)
Create and initialise an AVHWFramesContext as a mapping of another existing AVHWFramesContext on a di...
Definition: hwcontext.c:856
source
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a source
Definition: filter_design.txt:255
test::width
int width
Definition: vc1dsp.c:38
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:106
HWMapContext::mode
int mode
Definition: vf_hwmap.c:35
AVFILTER_FLAG_HWDEVICE
#define AVFILTER_FLAG_HWDEVICE
The filter can create hardware frames using AVFilterContext.hw_device_ctx.
Definition: avfilter.h:138
buffer.h
input
and forward the test the status of outputs and forward it to the corresponding return FFERROR_NOT_READY If the filters stores internally one or a few frame for some input
Definition: filter_design.txt:172
internal.h
AVFILTER_DEFINE_CLASS
AVFILTER_DEFINE_CLASS(hwmap)
hwmap_query_formats
static int hwmap_query_formats(AVFilterContext *avctx)
Definition: vf_hwmap.c:40
OFFSET
#define OFFSET(x)
Definition: vf_hwmap.c:370
av_hwdevice_ctx_create_derived
int av_hwdevice_ctx_create_derived(AVBufferRef **dst_ref_ptr, enum AVHWDeviceType type, AVBufferRef *src_ref, int flags)
Create a new device of the specified type from an existing device.
Definition: hwcontext.c:703
log.h
AVFilterPad::name
const char * name
Pad name.
Definition: internal.h:39
AVFilter
Filter definition.
Definition: avfilter.h:166
AVHWFramesContext
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:115
AV_HWFRAME_MAP_READ
@ AV_HWFRAME_MAP_READ
The mapping must be readable.
Definition: hwcontext.h:512
ret
ret
Definition: filter_design.txt:187
FLAGS
#define FLAGS
Definition: vf_hwmap.c:371
mode
mode
Definition: ebur128.h:83
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Definition: opt.h:235
avfilter.h
AVFilterContext
An instance of a filter.
Definition: avfilter.h:407
AVHWFramesContext::initial_pool_size
int initial_pool_size
Initial size of the frame pool.
Definition: hwcontext.h:187
desc
const char * desc
Definition: libsvtav1.c:75
hwmap_filter_frame
static int hwmap_filter_frame(AVFilterLink *link, AVFrame *input)
Definition: vf_hwmap.c:303
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
AVBufferRef
A reference to a data buffer.
Definition: buffer.h:82
AVFilterFormatsConfig::formats
AVFilterFormats * formats
List of supported formats (pixel or sample).
Definition: avfilter.h:510
AVPixFmtDescriptor
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:69
map
const VDPAUPixFmtMap * map
Definition: hwcontext_vdpau.c:71
FILTER_OUTPUTS
#define FILTER_OUTPUTS(array)
Definition: internal.h:183
src
INIT_CLIP pixel * src
Definition: h264pred_template.c:418
AV_OPT_TYPE_FLAGS
@ AV_OPT_TYPE_FLAGS
Definition: opt.h:234
hwcontext.h
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
AV_HWFRAME_MAP_WRITE
@ AV_HWFRAME_MAP_WRITE
The mapping must be writeable.
Definition: hwcontext.h:516
h
h
Definition: vp9dsp_template.c:2038
hwmap_config_output
static int hwmap_config_output(AVFilterLink *outlink)
Definition: vf_hwmap.c:53
AV_OPT_TYPE_STRING
@ AV_OPT_TYPE_STRING
Definition: opt.h:239
hwmap_inputs
static const AVFilterPad hwmap_inputs[]
Definition: vf_hwmap.c:403
AV_OPT_TYPE_CONST
@ AV_OPT_TYPE_CONST
Definition: opt.h:244
hwmap_get_buffer
static AVFrame * hwmap_get_buffer(AVFilterLink *inlink, int w, int h)
Definition: vf_hwmap.c:264
av_get_pix_fmt_name
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:2882
AVFilterContext::outputs
AVFilterLink ** outputs
array of pointers to output links
Definition: avfilter.h:419