Go to the documentation of this file.
24 #include "config_components.h"
40 #if !NVDECAPI_CHECK_VERSION(9, 0)
41 #define cudaVideoSurfaceFormat_YUV444 2
42 #define cudaVideoSurfaceFormat_YUV444_16Bit 3
64 #define CHECK_CU(x) FF_CUDA_CHECK_DL(logctx, decoder->cudl, x)
69 #if CONFIG_AV1_NVDEC_HWACCEL
88 int shift_h = 0, shift_v = 0;
91 return cudaVideoChromaFormat_Monochrome;
95 if (shift_h == 1 && shift_v == 1)
96 return cudaVideoChromaFormat_420;
97 else if (shift_h == 1 && shift_v == 0)
98 return cudaVideoChromaFormat_422;
99 else if (shift_h == 0 && shift_v == 0)
100 return cudaVideoChromaFormat_444;
106 CUVIDDECODECREATEINFO *params,
void *logctx)
109 CUVIDDECODECAPS caps = { 0 };
111 caps.eCodecType = params->CodecType;
112 caps.eChromaFormat = params->ChromaFormat;
113 caps.nBitDepthMinus8 = params->bitDepthMinus8;
115 if (!
decoder->cvdl->cuvidGetDecoderCaps) {
116 av_log(logctx,
AV_LOG_WARNING,
"Used Nvidia driver is too old to perform a capability check.\n");
118 #
if defined(_WIN32) || defined(__CYGWIN__)
123 ". Continuing blind.\n");
133 caps.bIsSupported ?
"yes" :
"no", caps.nMaxMBCount);
135 caps.nMinWidth, caps.nMaxWidth);
137 caps.nMinHeight, caps.nMaxHeight);
139 if (!caps.bIsSupported) {
144 if (params->ulWidth > caps.nMaxWidth || params->ulWidth < caps.nMinWidth) {
146 (
int)params->ulWidth, caps.nMinWidth, caps.nMaxWidth);
150 if (params->ulHeight > caps.nMaxHeight || params->ulHeight < caps.nMinHeight) {
152 (
int)params->ulHeight, caps.nMinHeight, caps.nMaxHeight);
156 if ((params->ulWidth * params->ulHeight) / 256 > caps.nMaxMBCount) {
158 (
int)(params->ulWidth * params->ulHeight) / 256, caps.nMaxMBCount);
170 void *logctx =
decoder->hw_device_ref->data;
180 cuvid_free_functions(&
decoder->cvdl);
184 CUVIDDECODECREATEINFO *params,
void *logctx)
204 decoder->cuda_ctx = device_hwctx->cuda_ctx;
205 decoder->cudl = device_hwctx->internal->cuda_dl;
206 decoder->stream = device_hwctx->stream;
208 ret = cuvid_load_functions(&
decoder->cvdl, logctx);
243 unsigned int *intp = obj;
264 ctx->bitstream_len = 0;
265 ctx->bitstream_allocated = 0;
269 ctx->slice_offsets_allocated = 0;
308 if (!frames_ctx->
pool) {
336 CUVIDDECODECREATEINFO params = { 0 };
339 int cuvid_codec_type, cuvid_chroma_format, chroma_444;
349 if (cuvid_codec_type < 0) {
355 if (cuvid_chroma_format < 0) {
359 chroma_444 =
ctx->supports_444 && cuvid_chroma_format == cudaVideoChromaFormat_444;
371 if (!real_hw_frames_ref)
378 cudaVideoSurfaceFormat_NV12;
383 cudaVideoSurfaceFormat_P016;
397 params.bitDepthMinus8 = sw_desc->
comp[0].
depth - 8;
399 params.CodecType = cuvid_codec_type;
400 params.ChromaFormat = cuvid_chroma_format;
406 if (params.ulNumDecodeSurfaces > 32) {
407 av_log(avctx,
AV_LOG_WARNING,
"Using more than 32 (%d) decode surfaces might cause nvdec to fail.\n",
408 (
int)params.ulNumDecodeSurfaces);
417 decoder->unsafe_output = unsafe_output;
418 decoder->real_hw_frames_ref = real_hw_frames_ref;
419 real_hw_frames_ref =
NULL;
431 if (!
ctx->decoder_pool) {
460 void *logctx =
decoder->hw_device_ref->data;
461 CUdeviceptr devptr = (CUdeviceptr)opaque;
488 CUVIDPROCPARAMS vpp = { 0 };
494 unsigned int pitch,
i;
496 int shift_h = 0, shift_v = 0;
499 vpp.progressive_frame = 1;
500 vpp.output_stream =
decoder->stream;
530 unmap_data->idx = cf->idx;
554 if (ret < 0 || decoder->unsafe_output)
567 ctx->bitstream_len = 0;
585 cf->ref_idx = cf->idx = *cf->idx_ref;
612 if (!cf->ref_idx_ref) {
614 if (!cf->ref_idx_ref) {
620 cf->ref_idx = *cf->ref_idx_ref;
623 cf->ref_idx = cf->idx;
636 void *logctx = avctx;
637 CUVIDPICPARAMS *pp = &
ctx->pic_params;
643 pp->nBitstreamDataLen =
ctx->bitstream_len;
644 pp->pBitstreamData =
ctx->bitstream;
645 pp->nNumSlices =
ctx->nb_slices;
646 pp->pSliceDataOffsets =
ctx->slice_offsets;
667 ctx->bitstream_len = 0;
679 (
ctx->nb_slices + 1) *
sizeof(*
ctx->slice_offsets));
701 int cuvid_codec_type, cuvid_chroma_format, chroma_444;
708 if (cuvid_codec_type < 0) {
714 if (cuvid_chroma_format < 0) {
718 chroma_444 = supports_444 && cuvid_chroma_format == cudaVideoChromaFormat_444;
729 switch (sw_desc->comp[0].depth) {
const struct AVHWAccel * hwaccel
Hardware accelerator in use.
AVBufferPool * av_buffer_pool_init(size_t size, AVBufferRef *(*alloc)(size_t size))
Allocate and initialize a buffer pool.
#define AV_LOG_WARNING
Something somehow does not look correct.
@ AV_PIX_FMT_CUDA
HW acceleration through CUDA.
AVPixelFormat
Pixel format.
void * ff_refstruct_ref(void *obj)
Create a new reference to an object managed via this API, i.e.
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
static int map_avcodec_id(enum AVCodecID id)
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
uint8_t * data
The data buffer.
static int map_chroma_format(enum AVPixelFormat pix_fmt)
enum AVPixelFormat format
The pixel format identifying the underlying HW surface type.
static void * ff_refstruct_alloc_ext(size_t size, unsigned flags, void *opaque, void(*free_cb)(FFRefStructOpaque opaque, void *obj))
A wrapper around ff_refstruct_alloc_ext_c() for the common case of a non-const qualified opaque.
int ff_nvdec_get_ref_idx(AVFrame *frame)
This struct stores per-frame lavc-internal data and is attached to it via private_ref.
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
This structure describes decoded (raw) audio or video data.
int av_frame_make_writable(AVFrame *frame)
Ensure that the frame data is writable, avoiding data copy if possible.
void(* free)(struct AVHWFramesContext *ctx)
This field may be set by the caller before calling av_hwframe_ctx_init().
int depth
Number of bits in the component.
#define AV_LOG_VERBOSE
Detailed information.
void(* hwaccel_priv_free)(void *priv)
RefStruct is an API for creating reference-counted objects with minimal overhead.
AVBufferRef * av_buffer_ref(const AVBufferRef *buf)
Create a new reference to an AVBuffer.
int width
The allocated dimensions of the frames in this pool.
AVBufferRef * buf[AV_NUM_DATA_POINTERS]
AVBuffer references backing the data for this frame.
static int nvdec_decoder_frame_init(FFRefStructOpaque opaque, void *obj)
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
int av_pix_fmt_count_planes(enum AVPixelFormat pix_fmt)
static const chunk_decoder decoder[8]
unsigned int * idx_ref
RefStruct reference.
int ff_nvdec_start_frame(AVCodecContext *avctx, AVFrame *frame)
int thread_count
thread count is used to decide how many independent tasks should be passed to execute()
static void ff_refstruct_pool_uninit(FFRefStructPool **poolp)
Mark the pool as being available for freeing.
int av_pix_fmt_get_chroma_sub_sample(enum AVPixelFormat pix_fmt, int *h_shift, int *v_shift)
Utility function to access log2_chroma_w log2_chroma_h from the pixel format AVPixFmtDescriptor.
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
#define cudaVideoSurfaceFormat_YUV444
#define AV_HWACCEL_FLAG_UNSAFE_OUTPUT
Some hardware decoders (namely nvdec) can either output direct decoder surfaces, or make an on-device...
AVBufferPool * pool
A pool from which the frames are allocated by av_hwframe_get_buffer().
int ff_nvdec_start_frame_sep_ref(AVCodecContext *avctx, AVFrame *frame, int has_sep_ref)
void * av_fast_realloc(void *ptr, unsigned int *size, size_t min_size)
Reallocate the given buffer if it is not large enough, otherwise do nothing.
static void nvdec_decoder_frame_pool_free(FFRefStructOpaque opaque)
#define AV_PIX_FMT_YUV444P16
#define AV_BUFFER_FLAG_READONLY
Always treat the buffer as read-only, even when it has only one reference.
static void nvdec_decoder_free(FFRefStructOpaque unused, void *obj)
static enum AVPixelFormat pix_fmt
int ff_nvdec_simple_end_frame(AVCodecContext *avctx)
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
int ff_nvdec_decode_init(AVCodecContext *avctx)
AVBufferRef * device_ref
A reference to the parent AVHWDeviceContext.
AVBufferRef * hw_device_ref
struct AVCodecInternal * internal
Private context used for internal data.
void av_buffer_pool_uninit(AVBufferPool **ppool)
Mark the pool as being available for freeing.
static int nvdec_decoder_create(NVDECDecoder **out, AVBufferRef *hw_device_ref, CUVIDDECODECREATEINFO *params, void *logctx)
AVBufferRef * av_buffer_create(uint8_t *data, size_t size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
AVCodecID
Identify the syntax and semantics of the bitstream.
#define cudaVideoSurfaceFormat_YUV444_16Bit
static AVBufferRef * nvdec_alloc_dummy(size_t size)
AVBufferRef * real_hw_frames_ref
int(* post_process)(void *logctx, AVFrame *frame)
The callback to perform some delayed processing on the frame right before it is returned to the calle...
void * hwaccel_priv_data
hwaccel-specific private data
unsigned int * ref_idx_ref
RefStruct reference.
static int nvdec_test_capabilities(NVDECDecoder *decoder, CUVIDDECODECREATEINFO *params, void *logctx)
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf offset
static void nvdec_free_dummy(struct AVHWFramesContext *ctx)
int ff_nvdec_decode_uninit(AVCodecContext *avctx)
AVBufferRef * private_ref
AVBufferRef for internal use by a single libav* library.
struct NVDECDecoder * decoder
RefStruct reference.
#define i(width, name, range_min, range_max)
static AVBufferRef * hw_device_ctx
static void nvdec_fdd_priv_free(void *priv)
int ff_nvdec_end_frame(AVCodecContext *avctx)
int hwaccel_flags
Bit set of AV_HWACCEL_FLAG_* flags, which affect hardware accelerated decoding (if active).
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
AVBufferRef * hw_device_ctx
A reference to the AVHWDeviceContext describing the device which will be used by a hardware encoder/d...
int av_buffer_replace(AVBufferRef **pdst, const AVBufferRef *src)
Ensure dst refers to the same data as src.
AVBufferRef * hw_frames_ctx
A reference to the AVHWFramesContext describing the input (for encoding) or output (decoding) frames.
static int nvdec_retrieve_data(void *logctx, AVFrame *frame)
This struct describes a set or pool of "hardware" frames (i.e.
This struct is allocated as AVHWDeviceContext.hwctx.
int avcodec_get_hw_frames_parameters(AVCodecContext *avctx, AVBufferRef *device_ref, enum AVPixelFormat hw_pix_fmt, AVBufferRef **out_frames_ref)
Create and return a AVHWFramesContext with values adequate for hardware decoding.
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
static int nvdec_init_hwframes(AVCodecContext *avctx, AVBufferRef **out_frames_ref, int dummy)
AVBufferRef * hw_frames_ctx
For hwaccel-format frames, this should be a reference to the AVHWFramesContext describing the frame.
unsigned int nb_allocated
main external API structure.
int ff_nvdec_simple_decode_slice(AVCodecContext *avctx, const uint8_t *buffer, uint32_t size)
static void nvdec_unmap_mapped_frame(void *opaque, uint8_t *data)
the frame and frame reference mechanism is intended to as much as expensive copies of that data while still allowing the filters to produce correct results The data is stored in buffers represented by AVFrame structures Several references can point to the same frame buffer
int ff_nvdec_frame_params(AVCodecContext *avctx, AVBufferRef *hw_frames_ctx, int dpb_size, int supports_444)
static FFRefStructPool * ff_refstruct_pool_alloc_ext(size_t size, unsigned flags, void *opaque, int(*init_cb)(FFRefStructOpaque opaque, void *obj), void(*reset_cb)(FFRefStructOpaque opaque, void *obj), void(*free_entry_cb)(FFRefStructOpaque opaque, void *obj), void(*free_cb)(FFRefStructOpaque opaque))
A wrapper around ff_refstruct_pool_alloc_ext_c() for the common case of a non-const qualified opaque.
static char * output_format
AVComponentDescriptor comp[4]
Parameters that describe how pixels are packed.
@ AV_PIX_FMT_YUV444P
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
int coded_width
Bitstream width / height, may be different from width/height e.g.
int initial_pool_size
Initial size of the frame pool.
A reference to a data buffer.
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
void * hwaccel_priv
Per-frame private data for hwaccels.
#define AVERROR_BUG
Internal bug, also see AVERROR_BUG2.
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
enum AVPixelFormat sw_pix_fmt
Nominal unaccelerated pixel format, see AV_PIX_FMT_xxx.
@ AV_CODEC_ID_MPEG2VIDEO
preferred ID for MPEG-1/2 video decoding
enum AVPixelFormat pix_fmt
Supported pixel format.
void ff_refstruct_unref(void *objp)
Decrement the reference count of the underlying object and automatically free the object if there are...
void * ff_refstruct_pool_get(FFRefStructPool *pool)
Get an object from the pool, reusing an old one from the pool when available.