35 #define IS_VIDEO_MEMORY(mode) (mode & (MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET | \
36 MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET))
37 #define IS_OPAQUE_MEMORY(mode) (mode & MFX_MEMTYPE_OPAQUE_FRAME)
38 #define IS_SYSTEM_MEMORY(mode) (mode & MFX_MEMTYPE_SYSTEM_MEMORY)
72 MFX_HANDLE_VA_DISPLAY,
73 MFX_HANDLE_D3D9_DEVICE_MANAGER,
74 MFX_HANDLE_D3D11_DEVICE,
80 static mfxStatus
frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req,
81 mfxFrameAllocResponse *resp)
86 if (!(req->Type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET) ||
87 !(req->Type & (MFX_MEMTYPE_FROM_VPPIN | MFX_MEMTYPE_FROM_VPPOUT)) ||
88 !(req->Type & MFX_MEMTYPE_EXTERNAL_FRAME))
89 return MFX_ERR_UNSUPPORTED;
91 if (req->Type & MFX_MEMTYPE_FROM_VPPIN) {
114 static mfxStatus
frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
120 static mfxStatus
frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
122 return MFX_ERR_UNSUPPORTED;
125 static mfxStatus
frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
127 return MFX_ERR_UNSUPPORTED;
140 return MFX_FOURCC_YV12;
142 return MFX_FOURCC_NV12;
144 return MFX_FOURCC_YUY2;
146 return MFX_FOURCC_RGB4;
149 return MFX_FOURCC_NV12;
156 surface->Data.Y = frame->
data[0];
157 surface->Data.UV = frame->
data[1];
160 surface->Data.Y = frame->
data[0];
161 surface->Data.U = frame->
data[1];
162 surface->Data.V = frame->
data[2];
165 surface->Data.Y = frame->
data[0];
166 surface->Data.U = frame->
data[0] + 1;
167 surface->Data.V = frame->
data[0] + 3;
170 surface->Data.B = frame->
data[0];
171 surface->Data.G = frame->
data[0] + 1;
172 surface->Data.R = frame->
data[0] + 2;
173 surface->Data.A = frame->
data[0] + 3;
176 return MFX_ERR_UNSUPPORTED;
178 surface->Data.Pitch = frame->
linesize[0];
196 frames_hwctx = frames_ctx->
hwctx;
197 *frameinfo = frames_hwctx->
surfaces[0].Info;
204 frameinfo->CropX = 0;
205 frameinfo->CropY = 0;
206 frameinfo->Width =
FFALIGN(link->
w, 32);
207 frameinfo->Height =
FFALIGN(link->
h, 32);
208 frameinfo->PicStruct = MFX_PICSTRUCT_PROGRESSIVE;
210 frameinfo->BitDepthLuma = desc->
comp[0].
depth;
211 frameinfo->BitDepthChroma = desc->
comp[0].
depth;
212 frameinfo->Shift = desc->
comp[0].
depth > 8;
214 frameinfo->ChromaFormat = MFX_CHROMAFORMAT_YUV420;
216 frameinfo->ChromaFormat = MFX_CHROMAFORMAT_YUV422;
218 frameinfo->ChromaFormat = MFX_CHROMAFORMAT_YUV444;
221 frameinfo->CropW = link->
w;
222 frameinfo->CropH = link->
h;
248 *list = (*list)->
next;
307 if (!qsv_frame->
frame)
335 qsv_frame->
surface->Info.PicStruct =
338 MFX_PICSTRUCT_FIELD_BFF);
340 qsv_frame->
surface->Info.PicStruct |= MFX_PICSTRUCT_FIELD_REPEATED;
342 qsv_frame->
surface->Info.PicStruct |= MFX_PICSTRUCT_FRAME_DOUBLING;
344 qsv_frame->
surface->Info.PicStruct |= MFX_PICSTRUCT_FRAME_TRIPLING;
366 if (!out_frame->
frame)
382 if (!out_frame->
frame)
422 in_frames_hwctx = frames_ctx->
hwctx;
444 device_hwctx = device_ctx->
hwctx;
453 MFX_MEMTYPE_OPAQUE_FRAME :
454 MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
457 out_frames_hwctx = out_frames_ctx->
hwctx;
480 for (i = 0; i < out_frames_hwctx->
nb_surfaces; i++)
490 ret = MFXQueryIMPL(device_hwctx->
session, &impl);
491 if (ret == MFX_ERR_NONE)
492 ret = MFXQueryVersion(device_hwctx->
session, &ver);
493 if (ret != MFX_ERR_NONE) {
500 if (ret == MFX_ERR_NONE) {
506 if (ret != MFX_ERR_NONE) {
512 ret = MFXInit(impl, &ver, &s->
session);
513 if (ret != MFX_ERR_NONE) {
519 ret = MFXVideoCORE_SetHandle(s->
session, handle_type, handle);
520 if (ret != MFX_ERR_NONE)
526 if (ret != MFX_ERR_NONE)
539 s->
opaque_alloc.Header.BufferId = MFX_EXTBUFF_OPAQUE_SURFACE_ALLOCATION;
542 mfxFrameAllocator frame_allocator = {
551 ret = MFXVideoCORE_SetFrameAllocator(s->
session, &frame_allocator);
552 if (ret != MFX_ERR_NONE)
593 for (i = 0; i < param->
num_crop; i++) {
634 s->
vpp_param.IOPattern |= MFX_IOPATTERN_IN_SYSTEM_MEMORY;
636 s->
vpp_param.IOPattern |= MFX_IOPATTERN_IN_VIDEO_MEMORY;
638 s->
vpp_param.IOPattern |= MFX_IOPATTERN_IN_OPAQUE_MEMORY;
641 s->
vpp_param.IOPattern |= MFX_IOPATTERN_OUT_SYSTEM_MEMORY;
643 s->
vpp_param.IOPattern |= MFX_IOPATTERN_OUT_VIDEO_MEMORY;
645 s->
vpp_param.IOPattern |= MFX_IOPATTERN_OUT_OPAQUE_MEMORY;
711 if (ret == MFX_WRN_DEVICE_BUSY)
713 }
while (ret == MFX_WRN_DEVICE_BUSY);
715 if (ret < 0 && ret != MFX_ERR_MORE_SURFACE) {
717 if (ret == MFX_ERR_MORE_DATA)
722 if (MFXVideoCORE_SyncOperation(s->
session, sync, 1000) < 0)
729 if (filter_ret < 0) {
735 }
while(ret == MFX_ERR_MORE_SURFACE);
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
static enum AVPixelFormat pix_fmt
static const char * format[]
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it...
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
This structure describes decoded (raw) audio or video data.
static const mfxHandleType handle_types[]
static QSVFrame * get_free_frame(QSVFrame **list)
mfxFrameInfo * frame_infos
int frame_type
A combination of MFX_MEMTYPE_* describing the frame pool.
#define AV_LOG_WARNING
Something somehow does not look correct.
mfxHandleType handle_type
This struct is allocated as AVHWFramesContext.hwctx.
int h
agreed upon image height
static int map_frame_to_surface(AVFrame *frame, mfxFrameSurface1 *surface)
int repeat_pict
When decoding, this signals how much the picture must be delayed.
int width
The allocated dimensions of the frames in this pool.
AVBufferRef * hw_device_ctx
For filters which will create hardware frames, sets the device the filter should create them in...
#define QSV_RUNTIME_VERSION_ATLEAST(MFX_VERSION, MAJOR, MINOR)
enum AVPixelFormat format
The pixel format identifying the underlying HW surface type.
mfxFrameSurface1 * surface
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
int av_usleep(unsigned usec)
Sleep for a period of time.
#define IS_SYSTEM_MEMORY(mode)
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
mfxExtBuffer ** ext_buffers
uint8_t log2_chroma_w
Amount to shift the luma width right to find the chroma width.
static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
#define IS_OPAQUE_MEMORY(mode)
AVFilterLink ** inputs
array of pointers to input links
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
AVComponentDescriptor comp[4]
Parameters that describe how pixels are packed.
int ff_qsvvpp_create(AVFilterContext *avctx, QSVVPPContext **vpp, QSVVPPParam *param)
static int pix_fmt_to_mfx_fourcc(int format)
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
mfxFrameSurface1 ** surface_ptrs_in
int(* filter_frame)(AVFilterLink *outlink, AVFrame *frame)
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
static const AVRational default_tb
static void clear_unused_frames(QSVFrame *list)
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
int interlaced_frame
The content of the picture is interlaced.
mfxExtOpaqueSurfaceAlloc opaque_alloc
This struct is allocated as AVHWDeviceContext.hwctx.
int64_t av_rescale_q(int64_t a, AVRational bq, AVRational cq)
Rescale a 64-bit integer by 2 rational numbers.
A link between two filters.
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
uint8_t log2_chroma_h
Amount to shift the luma height right to find the chroma height.
AVRational frame_rate
Frame rate of the stream on the link, or 1/0 if unknown or variable; if left to 0/0, will be automatically copied from the first input of the source filter if it exists.
enum AVPixelFormat out_sw_format
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
packed BGRA 8:8:8:8, 32bpp, BGRABGRA...
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
AVRational time_base
Define the time base used by the PTS of the frames/samples which will pass through this link...
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
static QSVFrame * query_frame(QSVVPPContext *s, AVFilterLink *outlink)
int av_hwframe_get_buffer(AVBufferRef *hwframe_ref, AVFrame *frame, int flags)
Allocate a new frame attached to the given AVHWFramesContext.
int av_frame_copy(AVFrame *dst, const AVFrame *src)
Copy the frame data from src to dst.
#define IS_VIDEO_MEMORY(mode)
int(* filter_frame)(AVFilterLink *outlink, AVFrame *frame)
int w
agreed upon image width
int initial_pool_size
Initial size of the frame pool.
unsigned nb_inputs
number of input pads
AVBufferRef * hw_frames_ctx
For hwaccel pixel formats, this should be a reference to the AVHWFramesContext describing the frames...
static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req, mfxFrameAllocResponse *resp)
static int init_vpp_session(AVFilterContext *avctx, QSVVPPContext *s)
static QSVFrame * submit_frame(QSVVPPContext *s, AVFilterLink *inlink, AVFrame *picref)
static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
mfxFrameSurface1 surface_internal
AVFilterContext * src
source filter
AVFrame * av_frame_clone(const AVFrame *src)
Create a new frame that references the same data as src.
int format
agreed upon media format
#define FF_ARRAY_ELEMS(a)
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames...
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
mfxFrameSurface1 * surfaces
int ff_qsvvpp_filter_frame(QSVVPPContext *s, AVFilterLink *inlink, AVFrame *picref)
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
uint8_t * data
The data buffer.
void * hwctx
The format-specific data, allocated and freed automatically along with this context.
packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
#define AVERROR_BUG
Internal bug, also see AVERROR_BUG2.
int ff_qsvvpp_free(QSVVPPContext **vpp)
Rational number (pair of numerator and denominator).
This struct describes a set or pool of "hardware" frames (i.e.
HW acceleration through QSV, data[3] contains a pointer to the mfxFrameSurface1 structure.
AVRational sample_aspect_ratio
agreed upon sample aspect ratio
AVFilterLink ** outputs
array of pointers to output links
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
AVBufferRef * device_ref
A reference to the parent AVHWDeviceContext.
#define FF_INLINK_IDX(link)
Find the index of a link.
static void clear_frame_list(QSVFrame **list)
A reference to a data buffer.
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
common internal and external API header
mfxFrameSurface1 ** surface_ptrs_out
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
QSVFrame * out_frame_list
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
int top_field_first
If the content is interlaced, is top field displayed first.
AVFilterContext * dst
dest filter
enum AVPixelFormat out_sw_format
Intel Quick Sync Video VPP base function.
An API-specific header for AV_HWDEVICE_TYPE_QSV.
int depth
Number of bits in the component.
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
AVPixelFormat
Pixel format.
static int fill_frameinfo_by_link(mfxFrameInfo *frameinfo, AVFilterLink *link)
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
void * av_mallocz_array(size_t nmemb, size_t size)