46 const int slice_start = (frame->
height * jobnr) / nb_jobs;
51 const int w = s->
white;
55 for (y = slice_start; y <
slice_end; y++) {
56 for (x = 0; x < frame->
width; x++) {
57 if (luma[x] >= b && luma[x] <= w) {
59 }
else if (luma[x] > b - so && luma[x] < w + so) {
61 alpha[x] = 255 - (luma[x] - b + so) * 255 / so;
63 alpha[x] = (luma[x] - w) * 255 / so;
78 const int slice_start = (frame->
height * jobnr) / nb_jobs;
80 uint16_t *
alpha = (uint16_t *)(frame->
data[3] + slice_start * frame->
linesize[3]);
81 const uint16_t *luma = (
const uint16_t *)(frame->
data[0] + slice_start * frame->
linesize[0]);
83 const int w = s->
white;
88 for (y = slice_start; y <
slice_end; y++) {
89 for (x = 0; x < frame->
width; x++) {
90 if (luma[x] >= b && luma[x] <= w) {
92 }
else if (luma[x] > b - so && luma[x] < w + so) {
94 alpha[x] = m - (luma[x] - b + so) * m / so;
96 alpha[x] = (luma[x] - w) * m / so;
120 s->
max = (1 << depth) - 1;
180 #define OFFSET(x) offsetof(LumakeyContext, x)
181 #define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM
196 .priv_class = &lumakey_class,
static int do_lumakey_slice8(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
#define AV_PIX_FMT_YUVA422P16
#define AV_PIX_FMT_YUVA422P9
static float alpha(float a)
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
This structure describes decoded (raw) audio or video data.
int(* do_lumakey_slice)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
#define AV_PIX_FMT_YUVA420P10
static const AVFilterPad lumakey_outputs[]
static int do_lumakey_slice16(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
#define AV_PIX_FMT_YUVA422P10
Main libavfilter public API header.
static const AVOption lumakey_options[]
#define AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC
Some filters support a generic "enable" expression option that can be used to enable or disable a fil...
const char * name
Pad name.
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
planar YUV 4:2:0, 20bpp, (1 Cr & Cb sample per 2x2 Y & A samples)
AVComponentDescriptor comp[4]
Parameters that describe how pixels are packed.
#define AV_PIX_FMT_YUVA420P9
#define AV_PIX_FMT_YUVA420P16
AVFILTER_DEFINE_CLASS(lumakey)
A filter pad used for either input or output.
A link between two filters.
planar YUV 4:2:2 24bpp, (1 Cr & Cb sample per 2x1 Y & A samples)
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
void * priv
private data for use by the filter
#define AVFILTER_FLAG_SLICE_THREADS
The filter supports multithreading by splitting frames into multiple parts and processing them concur...
#define AV_PIX_FMT_YUVA444P16
int ff_filter_get_nb_threads(AVFilterContext *ctx)
Get number of threads for current filter instance.
#define AV_PIX_FMT_YUVA444P10
static const AVFilterPad outputs[]
int format
agreed upon media format
static const AVFilterPad inputs[]
static const AVFilterPad lumakey_inputs[]
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
planar YUV 4:4:4 32bpp, (1 Cr & Cb sample per 1x1 Y & A samples)
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
static av_cold int query_formats(AVFilterContext *ctx)
Describe the class of an AVClass context structure.
const char * name
Filter name.
AVFilterLink ** outputs
array of pointers to output links
int av_frame_make_writable(AVFrame *frame)
Ensure that the frame data is writable, avoiding data copy if possible.
AVFilterInternal * internal
An opaque struct for libavfilter internal use.
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
#define AV_PIX_FMT_YUVA444P9
avfilter_execute_func * execute
static int slice_end(AVCodecContext *avctx, AVFrame *pict)
Handle slice ends.
static int filter_frame(AVFilterLink *link, AVFrame *frame)
AVFilterContext * dst
dest filter
static int config_input(AVFilterLink *inlink)
int depth
Number of bits in the component.
AVPixelFormat
Pixel format.