FFmpeg
vf_colormatrix.c
Go to the documentation of this file.
1 /*
2  * ColorMatrix v2.2 for Avisynth 2.5.x
3  *
4  * Copyright (C) 2006-2007 Kevin Stone
5  *
6  * ColorMatrix 1.x is Copyright (C) Wilbert Dijkhof
7  *
8  * This program is free software; you can redistribute it and/or modify it
9  * under the terms of the GNU General Public License as published by the
10  * Free Software Foundation; either version 2 of the License, or (at your
11  * option) any later version.
12  *
13  * This program is distributed in the hope that it will be useful, but
14  * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15  * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16  * License for more details.
17  *
18  * You should have received a copy of the GNU General Public License
19  * along with this program; if not, write to the Free Software Foundation,
20  * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21  */
22 
23 /**
24  * @file
25  * ColorMatrix 2.0 is based on the original ColorMatrix filter by Wilbert
26  * Dijkhof. It adds the ability to convert between any of: Rec.709, FCC,
27  * Rec.601, and SMPTE 240M. It also makes pre and post clipping optional,
28  * adds an option to use scaled or non-scaled coefficients, and more...
29  */
30 
31 #include <float.h>
32 #include "avfilter.h"
33 #include "internal.h"
34 #include "video.h"
35 #include "libavutil/opt.h"
36 #include "libavutil/pixdesc.h"
37 
38 #define NS(n) ((n) < 0 ? (int)((n)*65536.0-0.5+DBL_EPSILON) : (int)((n)*65536.0+0.5))
39 #define CB(n) av_clip_uint8(n)
40 
41 static const double yuv_coeff_luma[5][3] = {
42  { +0.7152, +0.0722, +0.2126 }, // Rec.709 (0)
43  { +0.5900, +0.1100, +0.3000 }, // FCC (1)
44  { +0.5870, +0.1140, +0.2990 }, // Rec.601 (ITU-R BT.470-2/SMPTE 170M) (2)
45  { +0.7010, +0.0870, +0.2120 }, // SMPTE 240M (3)
46  { +0.6780, +0.0593, +0.2627 }, // Rec.2020 (4)
47 };
48 
49 enum ColorMode {
57 };
58 
59 typedef struct ColorMatrixContext {
60  const AVClass *class;
61  int yuv_convert[25][3][3];
63  int source, dest; ///< ColorMode
64  int mode;
65  int hsub, vsub;
67 
68 typedef struct ThreadData {
69  AVFrame *dst;
70  const AVFrame *src;
71  int c2;
72  int c3;
73  int c4;
74  int c5;
75  int c6;
76  int c7;
77 } ThreadData;
78 
79 #define OFFSET(x) offsetof(ColorMatrixContext, x)
80 #define FLAGS AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM
81 
82 static const AVOption colormatrix_options[] = {
83  { "src", "set source color matrix", OFFSET(source), AV_OPT_TYPE_INT, {.i64=COLOR_MODE_NONE}, COLOR_MODE_NONE, COLOR_MODE_COUNT-1, .flags=FLAGS, .unit="color_mode" },
84  { "dst", "set destination color matrix", OFFSET(dest), AV_OPT_TYPE_INT, {.i64=COLOR_MODE_NONE}, COLOR_MODE_NONE, COLOR_MODE_COUNT-1, .flags=FLAGS, .unit="color_mode" },
85  { "bt709", "set BT.709 colorspace", 0, AV_OPT_TYPE_CONST, {.i64=COLOR_MODE_BT709}, .flags=FLAGS, .unit="color_mode" },
86  { "fcc", "set FCC colorspace ", 0, AV_OPT_TYPE_CONST, {.i64=COLOR_MODE_FCC}, .flags=FLAGS, .unit="color_mode" },
87  { "bt601", "set BT.601 colorspace", 0, AV_OPT_TYPE_CONST, {.i64=COLOR_MODE_BT601}, .flags=FLAGS, .unit="color_mode" },
88  { "bt470", "set BT.470 colorspace", 0, AV_OPT_TYPE_CONST, {.i64=COLOR_MODE_BT601}, .flags=FLAGS, .unit="color_mode" },
89  { "bt470bg", "set BT.470 colorspace", 0, AV_OPT_TYPE_CONST, {.i64=COLOR_MODE_BT601}, .flags=FLAGS, .unit="color_mode" },
90  { "smpte170m", "set SMTPE-170M colorspace", 0, AV_OPT_TYPE_CONST, {.i64=COLOR_MODE_BT601}, .flags=FLAGS, .unit="color_mode" },
91  { "smpte240m", "set SMPTE-240M colorspace", 0, AV_OPT_TYPE_CONST, {.i64=COLOR_MODE_SMPTE240M}, .flags=FLAGS, .unit="color_mode" },
92  { "bt2020", "set BT.2020 colorspace", 0, AV_OPT_TYPE_CONST, {.i64=COLOR_MODE_BT2020}, .flags=FLAGS, .unit="color_mode" },
93  { NULL }
94 };
95 
96 AVFILTER_DEFINE_CLASS(colormatrix);
97 
98 #define ma m[0][0]
99 #define mb m[0][1]
100 #define mc m[0][2]
101 #define md m[1][0]
102 #define me m[1][1]
103 #define mf m[1][2]
104 #define mg m[2][0]
105 #define mh m[2][1]
106 #define mi m[2][2]
107 
108 #define ima im[0][0]
109 #define imb im[0][1]
110 #define imc im[0][2]
111 #define imd im[1][0]
112 #define ime im[1][1]
113 #define imf im[1][2]
114 #define img im[2][0]
115 #define imh im[2][1]
116 #define imi im[2][2]
117 
118 static void inverse3x3(double im[3][3], double m[3][3])
119 {
120  double det = ma * (me * mi - mf * mh) - mb * (md * mi - mf * mg) + mc * (md * mh - me * mg);
121  det = 1.0 / det;
122  ima = det * (me * mi - mf * mh);
123  imb = det * (mc * mh - mb * mi);
124  imc = det * (mb * mf - mc * me);
125  imd = det * (mf * mg - md * mi);
126  ime = det * (ma * mi - mc * mg);
127  imf = det * (mc * md - ma * mf);
128  img = det * (md * mh - me * mg);
129  imh = det * (mb * mg - ma * mh);
130  imi = det * (ma * me - mb * md);
131 }
132 
133 static void solve_coefficients(double cm[3][3], double rgb[3][3], double yuv[3][3])
134 {
135  int i, j;
136  for (i = 0; i < 3; i++)
137  for (j = 0; j < 3; j++)
138  cm[i][j] = yuv[i][0] * rgb[0][j] + yuv[i][1] * rgb[1][j] + yuv[i][2] * rgb[2][j];
139 }
140 
142 {
143  ColorMatrixContext *color = ctx->priv;
144  double yuv_coeff[5][3][3];
145  double rgb_coeffd[5][3][3];
146  double yuv_convertd[25][3][3];
147  double bscale, rscale;
148  int v = 0;
149  int i, j, k;
150  for (i = 0; i < 5; i++) {
151  yuv_coeff[i][0][0] = yuv_coeff_luma[i][0];
152  yuv_coeff[i][0][1] = yuv_coeff_luma[i][1];
153  yuv_coeff[i][0][2] = yuv_coeff_luma[i][2];
154  bscale = 0.5 / (yuv_coeff[i][0][1] - 1.0);
155  rscale = 0.5 / (yuv_coeff[i][0][2] - 1.0);
156  yuv_coeff[i][1][0] = bscale * yuv_coeff[i][0][0];
157  yuv_coeff[i][1][1] = 0.5;
158  yuv_coeff[i][1][2] = bscale * yuv_coeff[i][0][2];
159  yuv_coeff[i][2][0] = rscale * yuv_coeff[i][0][0];
160  yuv_coeff[i][2][1] = rscale * yuv_coeff[i][0][1];
161  yuv_coeff[i][2][2] = 0.5;
162  }
163  for (i = 0; i < 5; i++)
164  inverse3x3(rgb_coeffd[i], yuv_coeff[i]);
165  for (i = 0; i < 5; i++) {
166  for (j = 0; j < 5; j++) {
167  solve_coefficients(yuv_convertd[v], rgb_coeffd[i], yuv_coeff[j]);
168  for (k = 0; k < 3; k++) {
169  color->yuv_convert[v][k][0] = NS(yuv_convertd[v][k][0]);
170  color->yuv_convert[v][k][1] = NS(yuv_convertd[v][k][1]);
171  color->yuv_convert[v][k][2] = NS(yuv_convertd[v][k][2]);
172  }
173  if (color->yuv_convert[v][0][0] != 65536 || color->yuv_convert[v][1][0] != 0 ||
174  color->yuv_convert[v][2][0] != 0) {
175  av_log(ctx, AV_LOG_ERROR, "error calculating conversion coefficients\n");
176  }
177  v++;
178  }
179  }
180 }
181 
182 static const char * const color_modes[] = {"bt709", "fcc", "bt601", "smpte240m", "bt2020"};
183 
185 {
186  ColorMatrixContext *color = ctx->priv;
187 
188  if (color->dest == COLOR_MODE_NONE) {
189  av_log(ctx, AV_LOG_ERROR, "Unspecified destination color space\n");
190  return AVERROR(EINVAL);
191  }
192 
193  if (color->source == color->dest) {
194  av_log(ctx, AV_LOG_ERROR, "Source and destination color space must not be identical\n");
195  return AVERROR(EINVAL);
196  }
197 
199 
200  return 0;
201 }
202 
203 static int process_slice_uyvy422(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
204 {
205  const ThreadData *td = arg;
206  const AVFrame *src = td->src;
207  AVFrame *dst = td->dst;
208  const int height = src->height;
209  const int width = src->width*2;
210  const int src_pitch = src->linesize[0];
211  const int dst_pitch = dst->linesize[0];
212  const int slice_start = (height * jobnr ) / nb_jobs;
213  const int slice_end = (height * (jobnr+1)) / nb_jobs;
214  const unsigned char *srcp = src->data[0] + slice_start * src_pitch;
215  unsigned char *dstp = dst->data[0] + slice_start * dst_pitch;
216  const int c2 = td->c2;
217  const int c3 = td->c3;
218  const int c4 = td->c4;
219  const int c5 = td->c5;
220  const int c6 = td->c6;
221  const int c7 = td->c7;
222  int x, y;
223 
224  for (y = slice_start; y < slice_end; y++) {
225  for (x = 0; x < width; x += 4) {
226  const int u = srcp[x + 0] - 128;
227  const int v = srcp[x + 2] - 128;
228  const int uvval = c2 * u + c3 * v + 1081344;
229  dstp[x + 0] = CB((c4 * u + c5 * v + 8421376) >> 16);
230  dstp[x + 1] = CB((65536 * (srcp[x + 1] - 16) + uvval) >> 16);
231  dstp[x + 2] = CB((c6 * u + c7 * v + 8421376) >> 16);
232  dstp[x + 3] = CB((65536 * (srcp[x + 3] - 16) + uvval) >> 16);
233  }
234  srcp += src_pitch;
235  dstp += dst_pitch;
236  }
237 
238  return 0;
239 }
240 
241 static int process_slice_yuv444p(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
242 {
243  const ThreadData *td = arg;
244  const AVFrame *src = td->src;
245  AVFrame *dst = td->dst;
246  const int height = src->height;
247  const int width = src->width;
248  const int slice_start = (height * jobnr ) / nb_jobs;
249  const int slice_end = (height * (jobnr+1)) / nb_jobs;
250  const int src_pitchY = src->linesize[0];
251  const int src_pitchUV = src->linesize[1];
252  const unsigned char *srcpU = src->data[1] + slice_start * src_pitchUV;
253  const unsigned char *srcpV = src->data[2] + slice_start * src_pitchUV;
254  const unsigned char *srcpY = src->data[0] + slice_start * src_pitchY;
255  const int dst_pitchY = dst->linesize[0];
256  const int dst_pitchUV = dst->linesize[1];
257  unsigned char *dstpU = dst->data[1] + slice_start * dst_pitchUV;
258  unsigned char *dstpV = dst->data[2] + slice_start * dst_pitchUV;
259  unsigned char *dstpY = dst->data[0] + slice_start * dst_pitchY;
260  const int c2 = td->c2;
261  const int c3 = td->c3;
262  const int c4 = td->c4;
263  const int c5 = td->c5;
264  const int c6 = td->c6;
265  const int c7 = td->c7;
266  int x, y;
267 
268  for (y = slice_start; y < slice_end; y++) {
269  for (x = 0; x < width; x++) {
270  const int u = srcpU[x] - 128;
271  const int v = srcpV[x] - 128;
272  const int uvval = c2 * u + c3 * v + 1081344;
273  dstpY[x] = CB((65536 * (srcpY[x] - 16) + uvval) >> 16);
274  dstpU[x] = CB((c4 * u + c5 * v + 8421376) >> 16);
275  dstpV[x] = CB((c6 * u + c7 * v + 8421376) >> 16);
276  }
277  srcpY += src_pitchY;
278  dstpY += dst_pitchY;
279  srcpU += src_pitchUV;
280  srcpV += src_pitchUV;
281  dstpU += dst_pitchUV;
282  dstpV += dst_pitchUV;
283  }
284 
285  return 0;
286 }
287 
288 static int process_slice_yuv422p(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
289 {
290  const ThreadData *td = arg;
291  const AVFrame *src = td->src;
292  AVFrame *dst = td->dst;
293  const int height = src->height;
294  const int width = src->width;
295  const int slice_start = (height * jobnr ) / nb_jobs;
296  const int slice_end = (height * (jobnr+1)) / nb_jobs;
297  const int src_pitchY = src->linesize[0];
298  const int src_pitchUV = src->linesize[1];
299  const unsigned char *srcpU = src->data[1] + slice_start * src_pitchUV;
300  const unsigned char *srcpV = src->data[2] + slice_start * src_pitchUV;
301  const unsigned char *srcpY = src->data[0] + slice_start * src_pitchY;
302  const int dst_pitchY = dst->linesize[0];
303  const int dst_pitchUV = dst->linesize[1];
304  unsigned char *dstpU = dst->data[1] + slice_start * dst_pitchUV;
305  unsigned char *dstpV = dst->data[2] + slice_start * dst_pitchUV;
306  unsigned char *dstpY = dst->data[0] + slice_start * dst_pitchY;
307  const int c2 = td->c2;
308  const int c3 = td->c3;
309  const int c4 = td->c4;
310  const int c5 = td->c5;
311  const int c6 = td->c6;
312  const int c7 = td->c7;
313  int x, y;
314 
315  for (y = slice_start; y < slice_end; y++) {
316  for (x = 0; x < width; x += 2) {
317  const int u = srcpU[x >> 1] - 128;
318  const int v = srcpV[x >> 1] - 128;
319  const int uvval = c2 * u + c3 * v + 1081344;
320  dstpY[x + 0] = CB((65536 * (srcpY[x + 0] - 16) + uvval) >> 16);
321  dstpY[x + 1] = CB((65536 * (srcpY[x + 1] - 16) + uvval) >> 16);
322  dstpU[x >> 1] = CB((c4 * u + c5 * v + 8421376) >> 16);
323  dstpV[x >> 1] = CB((c6 * u + c7 * v + 8421376) >> 16);
324  }
325  srcpY += src_pitchY;
326  dstpY += dst_pitchY;
327  srcpU += src_pitchUV;
328  srcpV += src_pitchUV;
329  dstpU += dst_pitchUV;
330  dstpV += dst_pitchUV;
331  }
332 
333  return 0;
334 }
335 
336 static int process_slice_yuv420p(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
337 {
338  const ThreadData *td = arg;
339  const AVFrame *src = td->src;
340  AVFrame *dst = td->dst;
341  const int height = FFALIGN(src->height, 2) >> 1;
342  const int width = src->width;
343  const int slice_start = ((height * jobnr ) / nb_jobs) << 1;
344  const int slice_end = ((height * (jobnr+1)) / nb_jobs) << 1;
345  const int src_pitchY = src->linesize[0];
346  const int src_pitchUV = src->linesize[1];
347  const int dst_pitchY = dst->linesize[0];
348  const int dst_pitchUV = dst->linesize[1];
349  const unsigned char *srcpY = src->data[0] + src_pitchY * slice_start;
350  const unsigned char *srcpU = src->data[1] + src_pitchUV * (slice_start >> 1);
351  const unsigned char *srcpV = src->data[2] + src_pitchUV * (slice_start >> 1);
352  const unsigned char *srcpN = src->data[0] + src_pitchY * (slice_start + 1);
353  unsigned char *dstpU = dst->data[1] + dst_pitchUV * (slice_start >> 1);
354  unsigned char *dstpV = dst->data[2] + dst_pitchUV * (slice_start >> 1);
355  unsigned char *dstpY = dst->data[0] + dst_pitchY * slice_start;
356  unsigned char *dstpN = dst->data[0] + dst_pitchY * (slice_start + 1);
357  const int c2 = td->c2;
358  const int c3 = td->c3;
359  const int c4 = td->c4;
360  const int c5 = td->c5;
361  const int c6 = td->c6;
362  const int c7 = td->c7;
363  int x, y;
364 
365  for (y = slice_start; y < slice_end; y += 2) {
366  for (x = 0; x < width; x += 2) {
367  const int u = srcpU[x >> 1] - 128;
368  const int v = srcpV[x >> 1] - 128;
369  const int uvval = c2 * u + c3 * v + 1081344;
370  dstpY[x + 0] = CB((65536 * (srcpY[x + 0] - 16) + uvval) >> 16);
371  dstpY[x + 1] = CB((65536 * (srcpY[x + 1] - 16) + uvval) >> 16);
372  dstpN[x + 0] = CB((65536 * (srcpN[x + 0] - 16) + uvval) >> 16);
373  dstpN[x + 1] = CB((65536 * (srcpN[x + 1] - 16) + uvval) >> 16);
374  dstpU[x >> 1] = CB((c4 * u + c5 * v + 8421376) >> 16);
375  dstpV[x >> 1] = CB((c6 * u + c7 * v + 8421376) >> 16);
376  }
377  srcpY += src_pitchY << 1;
378  dstpY += dst_pitchY << 1;
379  srcpN += src_pitchY << 1;
380  dstpN += dst_pitchY << 1;
381  srcpU += src_pitchUV;
382  srcpV += src_pitchUV;
383  dstpU += dst_pitchUV;
384  dstpV += dst_pitchUV;
385  }
386 
387  return 0;
388 }
389 
391 {
392  AVFilterContext *ctx = inlink->dst;
393  ColorMatrixContext *color = ctx->priv;
394  const AVPixFmtDescriptor *pix_desc = av_pix_fmt_desc_get(inlink->format);
395 
396  color->hsub = pix_desc->log2_chroma_w;
397  color->vsub = pix_desc->log2_chroma_h;
398 
399  av_log(ctx, AV_LOG_VERBOSE, "%s -> %s\n",
400  color_modes[color->source], color_modes[color->dest]);
401 
402  return 0;
403 }
404 
406 {
407  AVFilterContext *ctx = link->dst;
408  ColorMatrixContext *color = ctx->priv;
409  AVFilterLink *outlink = ctx->outputs[0];
410  AVFrame *out;
411  ThreadData td = {0};
412 
413  out = ff_get_video_buffer(outlink, outlink->w, outlink->h);
414  if (!out) {
415  av_frame_free(&in);
416  return AVERROR(ENOMEM);
417  }
419 
420  if (color->source == COLOR_MODE_NONE) {
421  enum AVColorSpace cs = in->colorspace;
422  enum ColorMode source;
423 
424  switch(cs) {
425  case AVCOL_SPC_BT709 : source = COLOR_MODE_BT709 ; break;
426  case AVCOL_SPC_FCC : source = COLOR_MODE_FCC ; break;
428  case AVCOL_SPC_BT470BG : source = COLOR_MODE_BT601 ; break;
432  default :
433  av_log(ctx, AV_LOG_ERROR, "Input frame does not specify a supported colorspace, and none has been specified as source either\n");
434  av_frame_free(&out);
435  return AVERROR(EINVAL);
436  }
437  color->mode = source * 5 + color->dest;
438  } else
439  color->mode = color->source * 5 + color->dest;
440 
441  switch(color->dest) {
442  case COLOR_MODE_BT709 : out->colorspace = AVCOL_SPC_BT709 ; break;
443  case COLOR_MODE_FCC : out->colorspace = AVCOL_SPC_FCC ; break;
444  case COLOR_MODE_SMPTE240M: out->colorspace = AVCOL_SPC_SMPTE240M ; break;
445  case COLOR_MODE_BT601 : out->colorspace = AVCOL_SPC_BT470BG ; break;
446  case COLOR_MODE_BT2020 : out->colorspace = AVCOL_SPC_BT2020_NCL; break;
447  }
448 
449  td.src = in;
450  td.dst = out;
451  td.c2 = color->yuv_convert[color->mode][0][1];
452  td.c3 = color->yuv_convert[color->mode][0][2];
453  td.c4 = color->yuv_convert[color->mode][1][1];
454  td.c5 = color->yuv_convert[color->mode][1][2];
455  td.c6 = color->yuv_convert[color->mode][2][1];
456  td.c7 = color->yuv_convert[color->mode][2][2];
457 
458  if (in->format == AV_PIX_FMT_YUV444P)
461  else if (in->format == AV_PIX_FMT_YUV422P)
464  else if (in->format == AV_PIX_FMT_YUV420P)
467  else
470 
471  av_frame_free(&in);
472  return ff_filter_frame(outlink, out);
473 }
474 
475 static const AVFilterPad colormatrix_inputs[] = {
476  {
477  .name = "default",
478  .type = AVMEDIA_TYPE_VIDEO,
479  .config_props = config_input,
480  .filter_frame = filter_frame,
481  },
482 };
483 
485  .name = "colormatrix",
486  .description = NULL_IF_CONFIG_SMALL("Convert color matrix."),
487  .priv_size = sizeof(ColorMatrixContext),
488  .init = init,
495  .priv_class = &colormatrix_class,
497 };
ff_get_video_buffer
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:108
td
#define td
Definition: regdef.h:70
ColorMatrixContext::vsub
int vsub
Definition: vf_colormatrix.c:65
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
opt.h
out
FILE * out
Definition: movenc.c:54
color
Definition: vf_paletteuse.c:511
imh
#define imh
Definition: vf_colormatrix.c:115
u
#define u(width, name, range_min, range_max)
Definition: cbs_h2645.c:250
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:978
av_pix_fmt_desc_get
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2964
ColorMatrixContext::yuv_convert
int yuv_convert[25][3][3]
Definition: vf_colormatrix.c:61
inlink
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
Definition: filter_design.txt:212
md
#define md
Definition: vf_colormatrix.c:101
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:100
AVFrame::colorspace
enum AVColorSpace colorspace
YUV colorspace type.
Definition: frame.h:667
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:340
ThreadData::c3
int c3
Definition: vf_colormatrix.c:72
pixdesc.h
AVOption
AVOption.
Definition: opt.h:251
mh
#define mh
Definition: vf_colormatrix.c:105
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:196
solve_coefficients
static void solve_coefficients(double cm[3][3], double rgb[3][3], double yuv[3][3])
Definition: vf_colormatrix.c:133
float.h
COLOR_MODE_COUNT
@ COLOR_MODE_COUNT
Definition: vf_colormatrix.c:56
AVFilter::name
const char * name
Filter name.
Definition: avfilter.h:170
video.h
AVCOL_SPC_BT2020_CL
@ AVCOL_SPC_BT2020_CL
ITU-R BT2020 constant luminance system.
Definition: pixfmt.h:611
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:361
AVCOL_SPC_BT470BG
@ AVCOL_SPC_BT470BG
also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM / IEC 61966-2-4 xvYCC601
Definition: pixfmt.h:605
rgb
Definition: rpzaenc.c:60
process_slice_uyvy422
static int process_slice_uyvy422(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_colormatrix.c:203
ColorMode
ColorMode
Definition: avf_showspectrum.c:54
ThreadData::c7
int c7
Definition: vf_colormatrix.c:76
AVFilterPad
A filter pad used for either input or output.
Definition: internal.h:47
ColorMatrixContext::dest
int dest
ColorMode.
Definition: vf_colormatrix.c:63
colormatrix_inputs
static const AVFilterPad colormatrix_inputs[]
Definition: vf_colormatrix.c:475
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
av_cold
#define av_cold
Definition: attributes.h:90
imd
#define imd
Definition: vf_colormatrix.c:111
ff_video_default_filterpad
const AVFilterPad ff_video_default_filterpad[1]
An AVFilterPad array whose only entry has name "default" and is of type AVMEDIA_TYPE_VIDEO.
Definition: video.c:36
width
#define width
ColorMatrixContext::mode
int mode
Definition: vf_colormatrix.c:64
process_slice_yuv420p
static int process_slice_yuv420p(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_colormatrix.c:336
mi
#define mi
Definition: vf_colormatrix.c:106
AVCOL_SPC_SMPTE170M
@ AVCOL_SPC_SMPTE170M
also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC / functionally identical to above
Definition: pixfmt.h:606
calc_coefficients
static void calc_coefficients(AVFilterContext *ctx)
Definition: vf_colormatrix.c:141
slice_end
static int slice_end(AVCodecContext *avctx, AVFrame *pict)
Handle slice ends.
Definition: mpeg12dec.c:1979
ctx
AVFormatContext * ctx
Definition: movenc.c:48
filter_frame
static int filter_frame(AVFilterLink *link, AVFrame *in)
Definition: vf_colormatrix.c:405
COLOR_MODE_FCC
@ COLOR_MODE_FCC
Definition: vf_colormatrix.c:52
mg
#define mg
Definition: vf_colormatrix.c:104
AVPixFmtDescriptor::log2_chroma_w
uint8_t log2_chroma_w
Amount to shift the luma width right to find the chroma width.
Definition: pixdesc.h:80
CB
#define CB(n)
Definition: vf_colormatrix.c:39
yuv_coeff_luma
static const double yuv_coeff_luma[5][3]
Definition: vf_colormatrix.c:41
inverse3x3
static void inverse3x3(double im[3][3], double m[3][3])
Definition: vf_colormatrix.c:118
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
FILTER_INPUTS
#define FILTER_INPUTS(array)
Definition: internal.h:192
link
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a link
Definition: filter_design.txt:23
arg
const char * arg
Definition: jacosubdec.c:67
ColorMatrixContext::interlaced
int interlaced
Definition: vf_colormatrix.c:62
ThreadData::dst
AVFrame * dst
Definition: vf_blend.c:56
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:66
NULL
#define NULL
Definition: coverity.c:32
av_frame_copy_props
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:736
me
#define me
Definition: vf_colormatrix.c:102
process_slice_yuv422p
static int process_slice_yuv422p(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_colormatrix.c:288
ColorMatrixContext::hsub
int hsub
Definition: vf_colormatrix.c:65
COLOR_MODE_SMPTE240M
@ COLOR_MODE_SMPTE240M
Definition: vf_colormatrix.c:54
process_slice_yuv444p
static int process_slice_yuv444p(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_colormatrix.c:241
ThreadData::c6
int c6
Definition: vf_colormatrix.c:75
source
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a source
Definition: filter_design.txt:255
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:106
FILTER_PIXFMTS
#define FILTER_PIXFMTS(...)
Definition: internal.h:178
ThreadData::c2
int c2
Definition: vf_colormatrix.c:71
AVFrame::format
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
Definition: frame.h:427
height
#define height
img
#define img
Definition: vf_colormatrix.c:114
config_input
static int config_input(AVFilterLink *inlink)
Definition: vf_colormatrix.c:390
mb
#define mb
Definition: vf_colormatrix.c:99
AVFILTER_DEFINE_CLASS
AVFILTER_DEFINE_CLASS(colormatrix)
internal.h
AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC
#define AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC
Some filters support a generic "enable" expression option that can be used to enable or disable a fil...
Definition: avfilter.h:147
COLOR_MODE_BT709
@ COLOR_MODE_BT709
Definition: vf_colormatrix.c:51
AVCOL_SPC_SMPTE240M
@ AVCOL_SPC_SMPTE240M
derived from 170M primaries and D65 white point, 170M is derived from BT470 System M's primaries
Definition: pixfmt.h:607
init
static av_cold int init(AVFilterContext *ctx)
Definition: vf_colormatrix.c:184
imc
#define imc
Definition: vf_colormatrix.c:110
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:255
AVCOL_SPC_BT2020_NCL
@ AVCOL_SPC_BT2020_NCL
ITU-R BT2020 non-constant luminance system.
Definition: pixfmt.h:610
AVColorSpace
AVColorSpace
YUV colorspace type.
Definition: pixfmt.h:599
ff_filter_get_nb_threads
int ff_filter_get_nb_threads(AVFilterContext *ctx)
Get number of threads for current filter instance.
Definition: avfilter.c:786
mf
#define mf
Definition: vf_colormatrix.c:103
ThreadData
Used for passing data between threads.
Definition: dsddec.c:69
FFMIN
#define FFMIN(a, b)
Definition: macros.h:49
AVFilterPad::name
const char * name
Pad name.
Definition: internal.h:53
AVFilter
Filter definition.
Definition: avfilter.h:166
ff_vf_colormatrix
const AVFilter ff_vf_colormatrix
Definition: vf_colormatrix.c:484
color_modes
static const char *const color_modes[]
Definition: vf_colormatrix.c:182
COLOR_MODE_BT601
@ COLOR_MODE_BT601
Definition: vf_colormatrix.c:53
AV_PIX_FMT_UYVY422
@ AV_PIX_FMT_UYVY422
packed YUV 4:2:2, 16bpp, Cb Y0 Cr Y1
Definition: pixfmt.h:81
AVFrame::height
int height
Definition: frame.h:412
c2
static const uint64_t c2
Definition: murmur3.c:53
ima
#define ima
Definition: vf_colormatrix.c:108
colormatrix_options
static const AVOption colormatrix_options[]
Definition: vf_colormatrix.c:82
AVCOL_SPC_FCC
@ AVCOL_SPC_FCC
FCC Title 47 Code of Federal Regulations 73.682 (a)(20)
Definition: pixfmt.h:604
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Definition: opt.h:225
avfilter.h
ThreadData::c4
int c4
Definition: vf_colormatrix.c:73
cm
#define cm
Definition: dvbsubdec.c:39
imf
#define imf
Definition: vf_colormatrix.c:113
COLOR_MODE_NONE
@ COLOR_MODE_NONE
Definition: vf_colormatrix.c:50
AV_PIX_FMT_YUV444P
@ AV_PIX_FMT_YUV444P
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:71
ColorMatrixContext::source
int source
Definition: vf_colormatrix.c:63
AVFilterContext
An instance of a filter.
Definition: avfilter.h:397
ColorMatrixContext
Definition: vf_colormatrix.c:59
AVFILTER_FLAG_SLICE_THREADS
#define AVFILTER_FLAG_SLICE_THREADS
The filter supports multithreading by splitting frames into multiple parts and processing them concur...
Definition: avfilter.h:117
ime
#define ime
Definition: vf_colormatrix.c:112
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
AV_PIX_FMT_YUV422P
@ AV_PIX_FMT_YUV422P
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:70
ThreadData::c5
int c5
Definition: vf_colormatrix.c:74
AVPixFmtDescriptor
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:69
FFALIGN
#define FFALIGN(x, a)
Definition: macros.h:78
COLOR_MODE_BT2020
@ COLOR_MODE_BT2020
Definition: vf_colormatrix.c:55
imb
#define imb
Definition: vf_colormatrix.c:109
FILTER_OUTPUTS
#define FILTER_OUTPUTS(array)
Definition: internal.h:193
src
INIT_CLIP pixel * src
Definition: h264pred_template.c:418
ThreadData::src
const AVFrame * src
Definition: vf_colormatrix.c:70
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:385
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
ma
#define ma
Definition: vf_colormatrix.c:98
FLAGS
#define FLAGS
Definition: vf_colormatrix.c:80
NS
#define NS(n)
Definition: vf_colormatrix.c:38
ff_filter_execute
static av_always_inline int ff_filter_execute(AVFilterContext *ctx, avfilter_action_func *func, void *arg, int *ret, int nb_jobs)
Definition: internal.h:144
AVCOL_SPC_BT709
@ AVCOL_SPC_BT709
also ITU-R BT1361 / IEC 61966-2-4 xvYCC709 / derived in SMPTE RP 177 Annex B
Definition: pixfmt.h:601
AV_OPT_TYPE_CONST
@ AV_OPT_TYPE_CONST
Definition: opt.h:234
AVPixFmtDescriptor::log2_chroma_h
uint8_t log2_chroma_h
Amount to shift the luma height right to find the chroma height.
Definition: pixdesc.h:89
mc
#define mc
Definition: vf_colormatrix.c:100
OFFSET
#define OFFSET(x)
Definition: vf_colormatrix.c:79
imi
#define imi
Definition: vf_colormatrix.c:116