Go to the documentation of this file.
52 for (
i = 0;
i <
ctx->nb_filter_buffers;
i++) {
53 if (
ctx->filter_buffers[
i] != VA_INVALID_ID) {
54 vaDestroyBuffer(
ctx->hwctx->display,
ctx->filter_buffers[
i]);
55 ctx->filter_buffers[
i] = VA_INVALID_ID;
58 ctx->nb_filter_buffers = 0;
60 if (
ctx->va_context != VA_INVALID_ID) {
61 vaDestroyContext(
ctx->hwctx->display,
ctx->va_context);
62 ctx->va_context = VA_INVALID_ID;
65 if (
ctx->va_config != VA_INVALID_ID) {
66 vaDestroyConfig(
ctx->hwctx->display,
ctx->va_config);
67 ctx->va_config = VA_INVALID_ID;
79 if (
ctx->pipeline_uninit)
80 ctx->pipeline_uninit(avctx);
82 if (!
inlink->hw_frames_ctx) {
84 "required to associate the processing device.\n");
89 if (!
ctx->input_frames_ref) {
111 if (
ctx->pipeline_uninit)
112 ctx->pipeline_uninit(avctx);
114 if (!
ctx->output_width)
116 if (!
ctx->output_height)
119 outlink->
w =
ctx->output_width;
120 outlink->
h =
ctx->output_height;
122 if (
ctx->passthrough) {
123 if (
inlink->hw_frames_ctx)
132 if (!
ctx->device_ref) {
140 vas = vaCreateConfig(
ctx->hwctx->display, VAProfileNone,
141 VAEntrypointVideoProc,
NULL, 0, &
ctx->va_config);
142 if (vas != VA_STATUS_SUCCESS) {
144 "config: %d (%s).\n", vas, vaErrorStr(vas));
164 ctx->output_format =
ctx->input_frames->sw_format;
183 "size %dx%d (constraints: width %d-%d height %d-%d).\n",
184 ctx->output_width,
ctx->output_height,
203 output_frames->
width =
ctx->output_width;
204 output_frames->
height =
ctx->output_height;
215 "context for output: %d\n", err);
219 va_frames = output_frames->
hwctx;
222 vas = vaCreateContext(
ctx->hwctx->display,
ctx->va_config,
223 ctx->output_width,
ctx->output_height,
227 if (vas != VA_STATUS_SUCCESS) {
229 "context: %d (%s).\n", vas, vaErrorStr(vas));
233 if (
ctx->build_filter_params) {
234 err =
ctx->build_filter_params(avctx);
265 { VAProcColorStandardBT601, 5, 6, 5 },
266 { VAProcColorStandardBT601, 6, 6, 6 },
267 { VAProcColorStandardBT709, 1, 1, 1 },
268 { VAProcColorStandardBT470M, 4, 4, 4 },
269 { VAProcColorStandardBT470BG, 5, 5, 5 },
270 { VAProcColorStandardSMPTE170M, 6, 6, 6 },
271 { VAProcColorStandardSMPTE240M, 7, 7, 7 },
272 { VAProcColorStandardGenericFilm, 8, 1, 1 },
273 #if VA_CHECK_VERSION(1, 1, 0)
274 { VAProcColorStandardSRGB, 1, 13, 0 },
275 { VAProcColorStandardXVYCC601, 1, 11, 5 },
276 { VAProcColorStandardXVYCC709, 1, 11, 1 },
277 { VAProcColorStandardBT2020, 9, 14, 9 },
282 VAProcColorStandardType *vacs,
286 int i, j, score, best_score, worst_score;
287 VAProcColorStandardType best_standard;
289 #if VA_CHECK_VERSION(1, 3, 0)
294 for (
i = 0;
i < nb_vacs;
i++) {
295 if (vacs[
i] == VAProcColorStandardExplicit) {
307 best_standard = VAProcColorStandardNone;
314 if (worst_score == 0) {
321 for (
i = 0;
i < nb_vacs;
i++) {
337 if (score < worst_score &&
338 (best_score == -1 || score < best_score)) {
349 #if VA_CHECK_VERSION(1, 1, 0)
350 static const struct {
356 VA_CHROMA_SITING_HORIZONTAL_LEFT },
358 VA_CHROMA_SITING_HORIZONTAL_CENTER },
360 VA_CHROMA_SITING_HORIZONTAL_LEFT },
362 VA_CHROMA_SITING_HORIZONTAL_CENTER },
364 VA_CHROMA_SITING_HORIZONTAL_LEFT },
366 VA_CHROMA_SITING_HORIZONTAL_CENTER },
384 #if VA_CHECK_VERSION(1, 1, 0)
403 VAProcColorStandardType *vacs,
411 "to VA standard %d chroma siting %#x range %#x.\n",
434 VAProcPipelineParameterBuffer *params,
440 VAProcPipelineCaps caps;
443 vas = vaQueryVideoProcPipelineCaps(
ctx->hwctx->display,
ctx->va_context,
444 ctx->filter_buffers,
ctx->nb_filter_buffers,
446 if (vas != VA_STATUS_SUCCESS) {
448 "colour standard support: %d (%s).\n", vas, vaErrorStr(vas));
462 caps.input_color_standards,
463 caps.num_input_color_standards);
471 .chroma_sample_location =
output_frame->chroma_location,
474 caps.output_color_standards,
475 caps.num_output_color_standards);
479 #if VA_CHECK_VERSION(1, 3, 0)
493 if (output_standard) {
504 #if VA_CHECK_VERSION(1, 1, 0)
505 params->input_color_properties = (VAProcColorProperties) {
508 #if VA_CHECK_VERSION(1, 3, 0)
510 .transfer_characteristics = input_props.
color_trc,
511 .matrix_coefficients = input_props.
colorspace,
514 params->output_color_properties = (VAProcColorProperties) {
517 #if VA_CHECK_VERSION(1, 3, 0)
519 .transfer_characteristics = output_props.
color_trc,
520 .matrix_coefficients = output_props.
colorspace,
529 VAProcPipelineParameterBuffer *params,
536 ctx->input_region = (VARectangle) {
539 .width = input_frame->
width -
549 *params = (VAProcPipelineParameterBuffer) {
551 .surface_region = &
ctx->input_region,
552 .output_region =
NULL,
555 .filter_flags = VA_FRAME_PICTURE,
559 #if VA_CHECK_VERSION(1, 1, 0)
560 .rotation_state = VA_ROTATION_NONE,
561 .mirror_state = VA_MIRROR_NONE,
589 vas = vaCreateBuffer(
ctx->hwctx->display,
ctx->va_context,
591 if (vas != VA_STATUS_SUCCESS) {
593 "buffer (type %d): %d (%s).\n",
594 type, vas, vaErrorStr(vas));
606 VAProcPipelineParameterBuffer *params,
607 VABufferID *params_id)
612 vas = vaCreateBuffer(
ctx->hwctx->display,
ctx->va_context,
613 VAProcPipelineParameterBufferType,
614 sizeof(*params), 1, params, params_id);
615 if (vas != VA_STATUS_SUCCESS) {
617 "%d (%s).\n", vas, vaErrorStr(vas));
618 *params_id = VA_INVALID_ID;
624 vas = vaRenderPicture(
ctx->hwctx->display,
ctx->va_context, params_id, 1);
625 if (vas != VA_STATUS_SUCCESS) {
627 "%d (%s).\n", vas, vaErrorStr(vas));
635 VAProcPipelineParameterBuffer *params_list,
640 VABufferID *params_ids;
648 for (
int i = 0;
i < cout;
i++)
649 params_ids[
i] = VA_INVALID_ID;
651 vas = vaBeginPicture(
ctx->hwctx->display,
653 if (vas != VA_STATUS_SUCCESS) {
655 "%d (%s).\n", vas, vaErrorStr(vas));
660 for (
int i = 0;
i < cout;
i++) {
663 goto fail_after_begin;
666 vas = vaEndPicture(
ctx->hwctx->display,
ctx->va_context);
667 if (vas != VA_STATUS_SUCCESS) {
669 "%d (%s).\n", vas, vaErrorStr(vas));
671 goto fail_after_render;
674 if (CONFIG_VAAPI_1 ||
ctx->hwctx->driver_quirks &
676 for (
int i = 0;
i < cout && params_ids[
i] != VA_INVALID_ID;
i++) {
677 vas = vaDestroyBuffer(
ctx->hwctx->display, params_ids[
i]);
678 if (vas != VA_STATUS_SUCCESS) {
680 "%d (%s).\n", vas, vaErrorStr(vas));
694 vaRenderPicture(
ctx->hwctx->display,
ctx->va_context, ¶ms_ids[0], 1);
696 vaEndPicture(
ctx->hwctx->display,
ctx->va_context);
703 VAProcPipelineParameterBuffer *params,
714 ctx->va_config = VA_INVALID_ID;
715 ctx->va_context = VA_INVALID_ID;
718 for (
i = 0;
i < VAProcFilterCount;
i++)
719 ctx->filter_buffers[
i] = VA_INVALID_ID;
720 ctx->nb_filter_buffers = 0;
726 if (
ctx->valid_ids &&
ctx->pipeline_uninit)
727 ctx->pipeline_uninit(avctx);
enum AVColorTransferCharacteristic color_trc
void ff_vaapi_vpp_pipeline_uninit(AVFilterContext *avctx)
void ff_vaapi_vpp_ctx_init(AVFilterContext *avctx)
static int vaapi_vpp_frame_is_rgb(const AVFrame *frame)
static VASurfaceID ff_vaapi_vpp_get_surface_id(const AVFrame *frame)
enum AVColorRange color_range
MPEG vs JPEG YUV range.
AVPixelFormat
Pixel format.
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
VAAPI hardware pipeline configuration details.
AVColorTransferCharacteristic
Color Transfer Characteristic.
int ff_vaapi_vpp_render_picture(AVFilterContext *avctx, VAProcPipelineParameterBuffer *params, AVFrame *output_frame)
void * av_hwdevice_hwconfig_alloc(AVBufferRef *ref)
Allocate a HW-specific configuration structure for a given HW device.
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
uint8_t * data
The data buffer.
enum AVPixelFormat format
The pixel format identifying the underlying HW surface type.
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
enum AVColorPrimaries color_primaries
enum AVColorSpace colorspace
YUV colorspace type.
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
This structure describes decoded (raw) audio or video data.
@ AVCOL_RANGE_JPEG
Full range content.
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
int ff_vaapi_vpp_render_pictures(AVFilterContext *avctx, VAProcPipelineParameterBuffer *params_list, int cout, AVFrame *output_frame)
#define AV_LOG_VERBOSE
Detailed information.
@ AVCOL_SPC_RGB
order of coefficients is actually GBR, also IEC 61966-2-1 (sRGB), YZX and ST 428-1
AVColorPrimaries
Chromaticity coordinates of the source primaries.
AVBufferRef * av_buffer_ref(const AVBufferRef *buf)
Create a new reference to an AVBuffer.
int width
The allocated dimensions of the frames in this pool.
AVHWFramesConstraints * av_hwdevice_get_hwframe_constraints(AVBufferRef *ref, const void *hwconfig)
Get the constraints on HW frames given a device and the HW-specific configuration to be used with tha...
A link between two filters.
AVFilterFormatsConfig outcfg
Lists of supported formats / etc.
const char * av_chroma_location_name(enum AVChromaLocation location)
enum AVColorPrimaries color_primaries
This struct describes the constraints on hardware frames attached to a given device with a hardware-s...
VAConfigID config_id
ID of a VAAPI pipeline configuration.
enum AVChromaLocation chroma_location
VASurfaceID * surface_ids
The surfaces IDs of all surfaces in the pool after creation.
const char * av_color_space_name(enum AVColorSpace space)
void * priv
private data for use by the filter
int min_width
The minimum size of frames in this hw_frames_ctx.
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf type
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
#define FF_ARRAY_ELEMS(a)
enum AVPixelFormat * valid_sw_formats
A list of possible values for sw_format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE.
void av_hwframe_constraints_free(AVHWFramesConstraints **constraints)
Free an AVHWFrameConstraints structure.
enum AVColorSpace colorspace
enum AVColorRange color_range
@ AV_VAAPI_DRIVER_QUIRK_RENDER_PARAM_BUFFERS
The driver does not destroy parameter buffers when they are used by vaRenderPicture().
#define av_assert0(cond)
assert() equivalent, that is always enabled.
static enum AVPixelFormat pix_fmts[]
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
int ff_vaapi_vpp_make_param_buffers(AVFilterContext *avctx, int type, const void *data, size_t size, int count)
static void vaapi_vpp_fill_chroma_sample_location(VAAPIColourProperties *props)
const char * av_color_range_name(enum AVColorRange range)
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
@ AVCHROMA_LOC_LEFT
MPEG-2/4 4:2:0, H.264 default for 4:2:0.
static int vaapi_vpp_colour_properties(AVFilterContext *avctx, VAProcPipelineParameterBuffer *params, const AVFrame *input_frame, AVFrame *output_frame)
@ AVCHROMA_LOC_TOPLEFT
ITU-R 601, SMPTE 274M 296M S314M(DV 4:1:1), mpeg2 4:2:2.
AVFilterLink ** inputs
array of pointers to input links
const char * av_color_primaries_name(enum AVColorPrimaries primaries)
int ff_vaapi_vpp_config_input(AVFilterLink *inlink)
static const VAAPIColourProperties vaapi_colour_standard_map[]
void ff_vaapi_vpp_ctx_uninit(AVFilterContext *avctx)
@ AVCOL_RANGE_UNSPECIFIED
int ff_vaapi_vpp_query_formats(AVFilterContext *avctx)
#define AV_PIX_FMT_FLAG_RGB
The pixel format contains RGB-like data (as opposed to YUV/grayscale).
@ AVCHROMA_LOC_UNSPECIFIED
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
AVFilterContext * src
source filter
AVFilterFormatsConfig incfg
Lists of supported formats / etc.
static int output_frame(H264Context *h, AVFrame *dst, H264Picture *srcp)
#define AVERROR_EXTERNAL
Generic error in an external library.
int max_width
The maximum size of frames in this hw_frames_ctx.
@ AV_PIX_FMT_VAAPI
Hardware acceleration through VA-API, data[3] contains a VASurfaceID.
AVChromaLocation
Location of chroma samples.
#define i(width, name, range_min, range_max)
int w
agreed upon image width
#define av_malloc_array(a, b)
AVColorSpace
YUV colorspace type.
AVBufferRef * hw_frames_ctx
For hwaccel pixel formats, this should be a reference to the AVHWFramesContext describing the frames.
@ AVCOL_RANGE_MPEG
Narrow or limited range content.
This struct describes a set or pool of "hardware" frames (i.e.
void * hwctx
The format-specific data, allocated and freed automatically along with this context.
static void vaapi_vpp_fill_colour_range(VAAPIColourProperties *props)
AVBufferRef * hw_frames_ctx
For hwaccel-format frames, this should be a reference to the AVHWFramesContext describing the frame.
static void vaapi_vpp_fill_colour_properties(AVFilterContext *avctx, VAAPIColourProperties *props, VAProcColorStandardType *vacs, int nb_vacs)
int ff_vaapi_vpp_config_output(AVFilterLink *outlink)
int h
agreed upon image height
the frame and frame reference mechanism is intended to as much as expensive copies of that data while still allowing the filters to produce correct results The data is stored in buffers represented by AVFrame structures Several references can point to the same frame buffer
@ AVCHROMA_LOC_CENTER
MPEG-1 4:2:0, JPEG 4:2:0, H.263 4:2:0.
VAProcColorStandardType va_color_standard
uint8_t va_chroma_sample_location
enum AVChromaLocation chroma_sample_location
static void vaapi_vpp_fill_colour_standard(VAAPIColourProperties *props, VAProcColorStandardType *vacs, int nb_vacs)
int initial_pool_size
Initial size of the frame pool.
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
VAAPI-specific data associated with a frame pool.
static int vaapi_vpp_render_single_pipeline_buffer(AVFilterContext *avctx, VAProcPipelineParameterBuffer *params, VABufferID *params_id)
enum AVColorTransferCharacteristic color_trc
#define VAAPI_VPP_BACKGROUND_BLACK
AVColorRange
Visual content value range.
const char * av_color_transfer_name(enum AVColorTransferCharacteristic transfer)
@ AVCHROMA_LOC_BOTTOMLEFT
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
int ff_filter_init_hw_frames(AVFilterContext *avctx, AVFilterLink *link, int default_pool_size)
Perform any additional setup required for hardware frames.
int ff_vaapi_vpp_init_params(AVFilterContext *avctx, VAProcPipelineParameterBuffer *params, const AVFrame *input_frame, AVFrame *output_frame)
AVFilterLink ** outputs
array of pointers to output links