FFmpeg
vf_scale_npp.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 /**
20  * @file
21  * scale video filter
22  */
23 
24 #include <nppi.h>
25 #include <stdio.h>
26 #include <string.h>
27 
28 #include "libavutil/hwcontext.h"
30 #include "libavutil/cuda_check.h"
31 #include "libavutil/internal.h"
32 #include "libavutil/mem.h"
33 #include "libavutil/opt.h"
34 #include "libavutil/parseutils.h"
35 #include "libavutil/eval.h"
36 #include "libavutil/pixdesc.h"
37 
38 #include "avfilter.h"
39 #include "filters.h"
40 #include "formats.h"
41 #include "scale_eval.h"
42 #include "video.h"
43 
44 #define CHECK_CU(x) FF_CUDA_CHECK_DL(ctx, device_hwctx->internal->cuda_dl, x)
45 
46 static const enum AVPixelFormat supported_formats[] = {
51 };
52 
53 static const enum AVPixelFormat deinterleaved_formats[][2] = {
55 };
56 
57 enum ScaleStage {
62 };
63 
64 typedef struct NPPScaleStageContext {
68 
69  struct {
70  int width;
71  int height;
72  } planes_in[4], planes_out[4];
73 
77 
78 static const char *const var_names[] = {
79  "in_w", "iw",
80  "in_h", "ih",
81  "out_w", "ow",
82  "out_h", "oh",
83  "a",
84  "sar",
85  "dar",
86  "n",
87  "t",
88 #if FF_API_FRAME_PKT
89  "pos",
90 #endif
91  "main_w",
92  "main_h",
93  "main_a",
94  "main_sar",
95  "main_dar", "mdar",
96  "main_n",
97  "main_t",
98 #if FF_API_FRAME_PKT
99  "main_pos",
100 #endif
101  NULL
102 };
103 
104 enum var_name {
114 #if FF_API_FRAME_PKT
115  VAR_POS,
116 #endif
124 #if FF_API_FRAME_PKT
126 #endif
128 };
129 
130 enum EvalMode {
134 };
135 
136 typedef struct NPPScaleContext {
137  const AVClass *class;
138 
142 
144 
145  /**
146  * New dimensions. Special values are:
147  * 0 = original width/height
148  * -1 = keep original aspect
149  */
150  int w, h;
151 
152  /**
153  * Output sw format. AV_PIX_FMT_NONE for no conversion.
154  */
156 
157  char *w_expr; ///< width expression string
158  char *h_expr; ///< height expression string
159  char *format_str;
160 
163 
165 
166  char* size_str;
167 
170 
172 
175 
177 #define IS_SCALE2REF(ctx) ((ctx)->filter == &ff_vf_scale2ref_npp.p)
178 
179 static int config_props(AVFilterLink *outlink);
180 
182 {
183  NPPScaleContext* scale = ctx->priv;
184  unsigned vars_w[VARS_NB] = {0}, vars_h[VARS_NB] = {0};
185 
186  if (!scale->w_pexpr && !scale->h_pexpr)
187  return AVERROR(EINVAL);
188 
189  if (scale->w_pexpr)
190  av_expr_count_vars(scale->w_pexpr, vars_w, VARS_NB);
191  if (scale->h_pexpr)
192  av_expr_count_vars(scale->h_pexpr, vars_h, VARS_NB);
193 
194  if (vars_w[VAR_OUT_W] || vars_w[VAR_OW]) {
195  av_log(ctx, AV_LOG_ERROR, "Width expression cannot be self-referencing: '%s'.\n", scale->w_expr);
196  return AVERROR(EINVAL);
197  }
198 
199  if (vars_h[VAR_OUT_H] || vars_h[VAR_OH]) {
200  av_log(ctx, AV_LOG_ERROR, "Height expression cannot be self-referencing: '%s'.\n", scale->h_expr);
201  return AVERROR(EINVAL);
202  }
203 
204  if ((vars_w[VAR_OUT_H] || vars_w[VAR_OH]) &&
205  (vars_h[VAR_OUT_W] || vars_h[VAR_OW])) {
206  av_log(ctx, AV_LOG_WARNING, "Circular references detected for width '%s' and height '%s' - possibly invalid.\n", scale->w_expr, scale->h_expr);
207  }
208 
209  if (!IS_SCALE2REF(ctx) &&
210  (vars_w[VAR_S2R_MAIN_W] || vars_h[VAR_S2R_MAIN_W] ||
211  vars_w[VAR_S2R_MAIN_H] || vars_h[VAR_S2R_MAIN_H] ||
212  vars_w[VAR_S2R_MAIN_A] || vars_h[VAR_S2R_MAIN_A] ||
213  vars_w[VAR_S2R_MAIN_SAR] || vars_h[VAR_S2R_MAIN_SAR] ||
214  vars_w[VAR_S2R_MAIN_DAR] || vars_h[VAR_S2R_MAIN_DAR] ||
215  vars_w[VAR_S2R_MDAR] || vars_h[VAR_S2R_MDAR] ||
216  vars_w[VAR_S2R_MAIN_N] || vars_h[VAR_S2R_MAIN_N] ||
217  vars_w[VAR_S2R_MAIN_T] || vars_h[VAR_S2R_MAIN_T]
219  || vars_w[VAR_S2R_MAIN_POS] || vars_h[VAR_S2R_MAIN_POS]
220 #endif
221  )) {
222  av_log(ctx, AV_LOG_ERROR, "Expressions with scale2ref_npp variables are not valid in scale_npp filter.\n");
223  return AVERROR(EINVAL);
224  }
225 
226  if (scale->eval_mode == EVAL_MODE_INIT &&
227  (vars_w[VAR_N] || vars_h[VAR_N] ||
228  vars_w[VAR_T] || vars_h[VAR_T] ||
230  vars_w[VAR_POS] || vars_h[VAR_POS] ||
231 #endif
232  vars_w[VAR_S2R_MAIN_N] || vars_h[VAR_S2R_MAIN_N] ||
233  vars_w[VAR_S2R_MAIN_T] || vars_h[VAR_S2R_MAIN_T]
235  || vars_w[VAR_S2R_MAIN_POS] || vars_h[VAR_S2R_MAIN_POS]
236 #endif
237  ) ) {
238  av_log(ctx, AV_LOG_ERROR, "Expressions with frame variables 'n', 't', are not valid in init eval_mode.\n");
239  return AVERROR(EINVAL);
240  }
241 
242  return 0;
243 }
244 
245 static int nppscale_parse_expr(AVFilterContext* ctx, char* str_expr,
246  AVExpr** pexpr_ptr, const char* var,
247  const char* args)
248 {
249  NPPScaleContext* scale = ctx->priv;
250  int ret, is_inited = 0;
251  char* old_str_expr = NULL;
252  AVExpr* old_pexpr = NULL;
253 
254  if (str_expr) {
255  old_str_expr = av_strdup(str_expr);
256  if (!old_str_expr)
257  return AVERROR(ENOMEM);
258  av_opt_set(scale, var, args, 0);
259  }
260 
261  if (*pexpr_ptr) {
262  old_pexpr = *pexpr_ptr;
263  *pexpr_ptr = NULL;
264  is_inited = 1;
265  }
266 
267  ret = av_expr_parse(pexpr_ptr, args, var_names, NULL, NULL, NULL, NULL, 0,
268  ctx);
269  if (ret < 0) {
270  av_log(ctx, AV_LOG_ERROR, "Cannot parse expression for %s: '%s'\n", var,
271  args);
272  goto revert;
273  }
274 
275  ret = check_exprs(ctx);
276  if (ret < 0)
277  goto revert;
278 
279  if (is_inited && (ret = config_props(ctx->outputs[0])) < 0)
280  goto revert;
281 
282  av_expr_free(old_pexpr);
283  old_pexpr = NULL;
284  av_freep(&old_str_expr);
285 
286  return 0;
287 
288 revert:
289  av_expr_free(*pexpr_ptr);
290  *pexpr_ptr = NULL;
291  if (old_str_expr) {
292  av_opt_set(scale, var, old_str_expr, 0);
293  av_free(old_str_expr);
294  }
295  if (old_pexpr)
296  *pexpr_ptr = old_pexpr;
297 
298  return ret;
299 }
300 
302 {
303  NPPScaleContext* scale = ctx->priv;
304  int i, ret;
305 
306  if (!strcmp(scale->format_str, "same")) {
307  scale->format = AV_PIX_FMT_NONE;
308  } else {
309  scale->format = av_get_pix_fmt(scale->format_str);
310  if (scale->format == AV_PIX_FMT_NONE) {
311  av_log(ctx, AV_LOG_ERROR, "Unrecognized pixel format: %s\n", scale->format_str);
312  return AVERROR(EINVAL);
313  }
314  }
315 
316  if (scale->size_str && (scale->w_expr || scale->h_expr)) {
318  "Size and width/height exprs cannot be set at the same time.\n");
319  return AVERROR(EINVAL);
320  }
321 
322  if (scale->w_expr && !scale->h_expr)
323  FFSWAP(char*, scale->w_expr, scale->size_str);
324 
325  if (scale->size_str) {
326  char buf[32];
327  ret = av_parse_video_size(&scale->w, &scale->h, scale->size_str);
328  if (0 > ret) {
329  av_log(ctx, AV_LOG_ERROR, "Invalid size '%s'\n", scale->size_str);
330  return ret;
331  }
332 
333  snprintf(buf, sizeof(buf) - 1, "%d", scale->w);
334  ret = av_opt_set(scale, "w", buf, 0);
335  if (ret < 0)
336  return ret;
337 
338  snprintf(buf, sizeof(buf) - 1, "%d", scale->h);
339  ret = av_opt_set(scale, "h", buf, 0);
340  if (ret < 0)
341  return ret;
342  }
343 
344  if (!scale->w_expr) {
345  ret = av_opt_set(scale, "w", "iw", 0);
346  if (ret < 0)
347  return ret;
348  }
349 
350  if (!scale->h_expr) {
351  ret = av_opt_set(scale, "h", "ih", 0);
352  if (ret < 0)
353  return ret;
354  }
355 
356  ret = nppscale_parse_expr(ctx, NULL, &scale->w_pexpr, "width", scale->w_expr);
357  if (ret < 0)
358  return ret;
359 
360  ret = nppscale_parse_expr(ctx, NULL, &scale->h_pexpr, "height", scale->h_expr);
361  if (ret < 0)
362  return ret;
363 
364  for (i = 0; i < FF_ARRAY_ELEMS(scale->stages); i++) {
365  scale->stages[i].frame = av_frame_alloc();
366  if (!scale->stages[i].frame)
367  return AVERROR(ENOMEM);
368  }
369  scale->tmp_frame = av_frame_alloc();
370  if (!scale->tmp_frame)
371  return AVERROR(ENOMEM);
372 
373  return 0;
374 }
375 
377 {
378  NPPScaleContext* scale = ctx->priv;
379  const char scale2ref = IS_SCALE2REF(ctx);
380  const AVFilterLink* inlink = ctx->inputs[scale2ref ? 1 : 0];
381  char* expr;
382  int eval_w, eval_h;
383  int ret;
384  double res;
385 
386  scale->var_values[VAR_IN_W] = scale->var_values[VAR_IW] = inlink->w;
387  scale->var_values[VAR_IN_H] = scale->var_values[VAR_IH] = inlink->h;
388  scale->var_values[VAR_OUT_W] = scale->var_values[VAR_OW] = NAN;
389  scale->var_values[VAR_OUT_H] = scale->var_values[VAR_OH] = NAN;
390  scale->var_values[VAR_A] = (double)inlink->w / inlink->h;
391  scale->var_values[VAR_SAR] = inlink->sample_aspect_ratio.num ?
392  (double)inlink->sample_aspect_ratio.num / inlink->sample_aspect_ratio.den : 1;
393  scale->var_values[VAR_DAR] = scale->var_values[VAR_A] * scale->var_values[VAR_SAR];
394 
395  if (scale2ref) {
396  const AVFilterLink* main_link = ctx->inputs[0];
397 
398  scale->var_values[VAR_S2R_MAIN_W] = main_link->w;
399  scale->var_values[VAR_S2R_MAIN_H] = main_link->h;
400  scale->var_values[VAR_S2R_MAIN_A] = (double)main_link->w / main_link->h;
401  scale->var_values[VAR_S2R_MAIN_SAR] = main_link->sample_aspect_ratio.num ?
402  (double)main_link->sample_aspect_ratio.num / main_link->sample_aspect_ratio.den : 1;
403  scale->var_values[VAR_S2R_MAIN_DAR] = scale->var_values[VAR_S2R_MDAR] =
404  scale->var_values[VAR_S2R_MAIN_A] * scale->var_values[VAR_S2R_MAIN_SAR];
405  }
406 
407  res = av_expr_eval(scale->w_pexpr, scale->var_values, NULL);
408  eval_w = scale->var_values[VAR_OUT_W] = scale->var_values[VAR_OW] = (int)res == 0 ? inlink->w : (int)res;
409 
410  res = av_expr_eval(scale->h_pexpr, scale->var_values, NULL);
411  if (isnan(res)) {
412  expr = scale->h_expr;
413  ret = AVERROR(EINVAL);
414  goto fail;
415  }
416  eval_h = scale->var_values[VAR_OUT_H] = scale->var_values[VAR_OH] = (int)res == 0 ? inlink->h : (int)res;
417 
418  res = av_expr_eval(scale->w_pexpr, scale->var_values, NULL);
419  if (isnan(res)) {
420  expr = scale->w_expr;
421  ret = AVERROR(EINVAL);
422  goto fail;
423  }
424  eval_w = scale->var_values[VAR_OUT_W] = scale->var_values[VAR_OW] = (int)res == 0 ? inlink->w : (int)res;
425 
426  scale->w = eval_w;
427  scale->h = eval_h;
428 
429  return 0;
430 
431 fail:
432  av_log(ctx, AV_LOG_ERROR, "Error when evaluating the expression '%s'.\n",
433  expr);
434  return ret;
435 }
436 
438 {
439  NPPScaleContext *s = ctx->priv;
440  int i;
441 
442  for (i = 0; i < FF_ARRAY_ELEMS(s->stages); i++) {
443  av_frame_free(&s->stages[i].frame);
444  av_buffer_unref(&s->stages[i].frames_ctx);
445  }
446  av_frame_free(&s->tmp_frame);
447 
448  av_expr_free(s->w_pexpr);
449  av_expr_free(s->h_pexpr);
450  s->w_pexpr = s->h_pexpr = NULL;
451 }
452 
453 static int init_stage(NPPScaleStageContext *stage, AVBufferRef *device_ctx)
454 {
455  AVBufferRef *out_ref = NULL;
456  AVHWFramesContext *out_ctx;
457  int in_sw, in_sh, out_sw, out_sh;
458  int ret, i;
459 
460  av_pix_fmt_get_chroma_sub_sample(stage->in_fmt, &in_sw, &in_sh);
461  av_pix_fmt_get_chroma_sub_sample(stage->out_fmt, &out_sw, &out_sh);
462  if (!stage->planes_out[0].width) {
463  stage->planes_out[0].width = stage->planes_in[0].width;
464  stage->planes_out[0].height = stage->planes_in[0].height;
465  }
466 
467  for (i = 1; i < FF_ARRAY_ELEMS(stage->planes_in); i++) {
468  stage->planes_in[i].width = stage->planes_in[0].width >> in_sw;
469  stage->planes_in[i].height = stage->planes_in[0].height >> in_sh;
470  stage->planes_out[i].width = stage->planes_out[0].width >> out_sw;
471  stage->planes_out[i].height = stage->planes_out[0].height >> out_sh;
472  }
473 
474  if (AV_PIX_FMT_YUVA420P == stage->in_fmt) {
475  stage->planes_in[3].width = stage->planes_in[0].width;
476  stage->planes_in[3].height = stage->planes_in[0].height;
477  stage->planes_out[3].width = stage->planes_out[0].width;
478  stage->planes_out[3].height = stage->planes_out[0].height;
479  }
480 
481  out_ref = av_hwframe_ctx_alloc(device_ctx);
482  if (!out_ref)
483  return AVERROR(ENOMEM);
484  out_ctx = (AVHWFramesContext*)out_ref->data;
485 
486  out_ctx->format = AV_PIX_FMT_CUDA;
487  out_ctx->sw_format = stage->out_fmt;
488  out_ctx->width = FFALIGN(stage->planes_out[0].width, 32);
489  out_ctx->height = FFALIGN(stage->planes_out[0].height, 32);
490 
491  ret = av_hwframe_ctx_init(out_ref);
492  if (ret < 0)
493  goto fail;
494 
495  av_frame_unref(stage->frame);
496  ret = av_hwframe_get_buffer(out_ref, stage->frame, 0);
497  if (ret < 0)
498  goto fail;
499 
500  stage->frame->width = stage->planes_out[0].width;
501  stage->frame->height = stage->planes_out[0].height;
502 
503  av_buffer_unref(&stage->frames_ctx);
504  stage->frames_ctx = out_ref;
505 
506  return 0;
507 fail:
508  av_buffer_unref(&out_ref);
509  return ret;
510 }
511 
512 static int format_is_supported(enum AVPixelFormat fmt)
513 {
514  int i;
515 
516  for (i = 0; i < FF_ARRAY_ELEMS(supported_formats); i++)
517  if (supported_formats[i] == fmt)
518  return 1;
519  return 0;
520 }
521 
523 {
525  int i, planes;
526 
528  if (planes == desc->nb_components)
529  return fmt;
530  for (i = 0; i < FF_ARRAY_ELEMS(deinterleaved_formats); i++)
531  if (deinterleaved_formats[i][0] == fmt)
532  return deinterleaved_formats[i][1];
533  return AV_PIX_FMT_NONE;
534 }
535 
536 static int init_processing_chain(AVFilterContext *ctx, int in_width, int in_height,
537  int out_width, int out_height)
538 {
539  NPPScaleContext *s = ctx->priv;
540  FilterLink *inl = ff_filter_link(ctx->inputs[0]);
541  FilterLink *outl = ff_filter_link(ctx->outputs[0]);
542 
543  AVHWFramesContext *in_frames_ctx;
544 
545  enum AVPixelFormat in_format;
546  enum AVPixelFormat out_format;
547  enum AVPixelFormat in_deinterleaved_format;
548  enum AVPixelFormat out_deinterleaved_format;
549 
550  int i, ret, last_stage = -1;
551 
552  /* check that we have a hw context */
553  if (!inl->hw_frames_ctx) {
554  av_log(ctx, AV_LOG_ERROR, "No hw context provided on input\n");
555  return AVERROR(EINVAL);
556  }
557  in_frames_ctx = (AVHWFramesContext*)inl->hw_frames_ctx->data;
558  in_format = in_frames_ctx->sw_format;
559  out_format = (s->format == AV_PIX_FMT_NONE) ? in_format : s->format;
560 
561  if (!format_is_supported(in_format)) {
562  av_log(ctx, AV_LOG_ERROR, "Unsupported input format: %s\n",
563  av_get_pix_fmt_name(in_format));
564  return AVERROR(ENOSYS);
565  }
566  if (!format_is_supported(out_format)) {
567  av_log(ctx, AV_LOG_ERROR, "Unsupported output format: %s\n",
568  av_get_pix_fmt_name(out_format));
569  return AVERROR(ENOSYS);
570  }
571 
572  in_deinterleaved_format = get_deinterleaved_format(in_format);
573  out_deinterleaved_format = get_deinterleaved_format(out_format);
574  if (in_deinterleaved_format == AV_PIX_FMT_NONE ||
575  out_deinterleaved_format == AV_PIX_FMT_NONE)
576  return AVERROR_BUG;
577 
578  /* figure out which stages need to be done */
579  if (in_width != out_width || in_height != out_height ||
580  in_deinterleaved_format != out_deinterleaved_format) {
581  s->stages[STAGE_RESIZE].stage_needed = 1;
582 
583  if (s->interp_algo == NPPI_INTER_SUPER &&
584  (out_width > in_width && out_height > in_height)) {
585  s->interp_algo = NPPI_INTER_LANCZOS;
586  av_log(ctx, AV_LOG_WARNING, "super-sampling not supported for output dimensions, using lanczos instead.\n");
587  }
588  if (s->interp_algo == NPPI_INTER_SUPER &&
589  !(out_width < in_width && out_height < in_height)) {
590  s->interp_algo = NPPI_INTER_CUBIC;
591  av_log(ctx, AV_LOG_WARNING, "super-sampling not supported for output dimensions, using cubic instead.\n");
592  }
593  }
594 
595  if (!s->stages[STAGE_RESIZE].stage_needed && in_format == out_format)
596  s->passthrough = 1;
597 
598  if (!s->passthrough) {
599  if (in_format != in_deinterleaved_format)
600  s->stages[STAGE_DEINTERLEAVE].stage_needed = 1;
601  if (out_format != out_deinterleaved_format)
602  s->stages[STAGE_INTERLEAVE].stage_needed = 1;
603  }
604 
605  s->stages[STAGE_DEINTERLEAVE].in_fmt = in_format;
606  s->stages[STAGE_DEINTERLEAVE].out_fmt = in_deinterleaved_format;
607  s->stages[STAGE_DEINTERLEAVE].planes_in[0].width = in_width;
608  s->stages[STAGE_DEINTERLEAVE].planes_in[0].height = in_height;
609 
610  s->stages[STAGE_RESIZE].in_fmt = in_deinterleaved_format;
611  s->stages[STAGE_RESIZE].out_fmt = out_deinterleaved_format;
612  s->stages[STAGE_RESIZE].planes_in[0].width = in_width;
613  s->stages[STAGE_RESIZE].planes_in[0].height = in_height;
614  s->stages[STAGE_RESIZE].planes_out[0].width = out_width;
615  s->stages[STAGE_RESIZE].planes_out[0].height = out_height;
616 
617  s->stages[STAGE_INTERLEAVE].in_fmt = out_deinterleaved_format;
618  s->stages[STAGE_INTERLEAVE].out_fmt = out_format;
619  s->stages[STAGE_INTERLEAVE].planes_in[0].width = out_width;
620  s->stages[STAGE_INTERLEAVE].planes_in[0].height = out_height;
621 
622  /* init the hardware contexts */
623  for (i = 0; i < FF_ARRAY_ELEMS(s->stages); i++) {
624  if (!s->stages[i].stage_needed)
625  continue;
626 
627  ret = init_stage(&s->stages[i], in_frames_ctx->device_ref);
628  if (ret < 0)
629  return ret;
630 
631  last_stage = i;
632  }
633 
634  if (last_stage >= 0)
635  outl->hw_frames_ctx = av_buffer_ref(s->stages[last_stage].frames_ctx);
636  else
638 
639  if (!outl->hw_frames_ctx)
640  return AVERROR(ENOMEM);
641 
642  return 0;
643 }
644 
645 static int config_props(AVFilterLink *outlink)
646 {
647  AVFilterContext *ctx = outlink->src;
648  AVFilterLink *inlink0 = outlink->src->inputs[0];
650  outlink->src->inputs[1] :
651  outlink->src->inputs[0];
652  NPPScaleContext *s = ctx->priv;
653  int ret;
654 
655  if ((ret = nppscale_eval_dimensions(ctx)) < 0)
656  goto fail;
657 
659  s->force_original_aspect_ratio,
660  s->force_divisible_by);
661 
662  if (s->w > INT_MAX || s->h > INT_MAX ||
663  (s->h * inlink->w) > INT_MAX ||
664  (s->w * inlink->h) > INT_MAX)
665  av_log(ctx, AV_LOG_ERROR, "Rescaled value for width or height is too big.\n");
666 
667  outlink->w = s->w;
668  outlink->h = s->h;
669 
670  ret = init_processing_chain(ctx, inlink0->w, inlink0->h, outlink->w, outlink->h);
671  if (ret < 0)
672  return ret;
673 
674  av_log(ctx, AV_LOG_VERBOSE, "w:%d h:%d -> w:%d h:%d\n",
675  inlink->w, inlink->h, outlink->w, outlink->h);
676 
677  if (inlink->sample_aspect_ratio.num)
678  outlink->sample_aspect_ratio = av_mul_q((AVRational){outlink->h*inlink->w,
679  outlink->w*inlink->h},
680  inlink->sample_aspect_ratio);
681  else
682  outlink->sample_aspect_ratio = inlink->sample_aspect_ratio;
683 
684  return 0;
685 
686 fail:
687  return ret;
688 }
689 
690 static int config_props_ref(AVFilterLink *outlink)
691 {
692  FilterLink *outl = ff_filter_link(outlink);
693  AVFilterLink *inlink = outlink->src->inputs[1];
695  FilterLink *ol = ff_filter_link(outlink);
696 
697  outlink->w = inlink->w;
698  outlink->h = inlink->h;
699  outlink->sample_aspect_ratio = inlink->sample_aspect_ratio;
700  outlink->time_base = inlink->time_base;
701  ol->frame_rate = inl->frame_rate;
702 
704 
705  return 0;
706 }
707 
709  AVFrame *out, AVFrame *in)
710 {
711  AVHWFramesContext *in_frames_ctx = (AVHWFramesContext*)in->hw_frames_ctx->data;
712  NppStatus err;
713 
714  switch (in_frames_ctx->sw_format) {
715  case AV_PIX_FMT_NV12:
716  err = nppiYCbCr420_8u_P2P3R(in->data[0], in->linesize[0],
717  in->data[1], in->linesize[1],
718  out->data, out->linesize,
719  (NppiSize){ in->width, in->height });
720  break;
721  default:
722  return AVERROR_BUG;
723  }
724  if (err != NPP_SUCCESS) {
725  av_log(ctx, AV_LOG_ERROR, "NPP deinterleave error: %d\n", err);
726  return AVERROR_UNKNOWN;
727  }
728 
729  return 0;
730 }
731 
733  AVFrame *out, AVFrame *in)
734 {
735  NPPScaleContext *s = ctx->priv;
736  NppStatus err;
737  int i;
738 
739  for (i = 0; i < FF_ARRAY_ELEMS(stage->planes_in) && i < FF_ARRAY_ELEMS(in->data) && in->data[i]; i++) {
740  int iw = stage->planes_in[i].width;
741  int ih = stage->planes_in[i].height;
742  int ow = stage->planes_out[i].width;
743  int oh = stage->planes_out[i].height;
744 
745  err = nppiResizeSqrPixel_8u_C1R(in->data[i], (NppiSize){ iw, ih },
746  in->linesize[i], (NppiRect){ 0, 0, iw, ih },
747  out->data[i], out->linesize[i],
748  (NppiRect){ 0, 0, ow, oh },
749  (double)ow / iw, (double)oh / ih,
750  0.0, 0.0, s->interp_algo);
751  if (err != NPP_SUCCESS) {
752  av_log(ctx, AV_LOG_ERROR, "NPP resize error: %d\n", err);
753  return AVERROR_UNKNOWN;
754  }
755  }
756 
757  return 0;
758 }
759 
761  AVFrame *out, AVFrame *in)
762 {
763  AVHWFramesContext *out_frames_ctx = (AVHWFramesContext*)out->hw_frames_ctx->data;
764  NppStatus err;
765 
766  switch (out_frames_ctx->sw_format) {
767  case AV_PIX_FMT_NV12:
768  err = nppiYCbCr420_8u_P3P2R((const uint8_t**)in->data,
769  in->linesize,
770  out->data[0], out->linesize[0],
771  out->data[1], out->linesize[1],
772  (NppiSize){ in->width, in->height });
773  break;
774  default:
775  return AVERROR_BUG;
776  }
777  if (err != NPP_SUCCESS) {
778  av_log(ctx, AV_LOG_ERROR, "NPP deinterleave error: %d\n", err);
779  return AVERROR_UNKNOWN;
780  }
781 
782  return 0;
783 }
784 
786  AVFrame *out, AVFrame *in) = {
790 };
791 
793 {
795  AVFilterContext *ctx = link->dst;
796  NPPScaleContext *s = ctx->priv;
797  AVFilterLink *outlink = ctx->outputs[0];
798  AVFrame *src = in;
799  char buf[32];
800  int i, ret, last_stage = -1;
801  int frame_changed;
802 
803  frame_changed = in->width != link->w ||
804  in->height != link->h ||
805  in->format != link->format ||
808 
809  if (s->eval_mode == EVAL_MODE_FRAME || frame_changed) {
810  unsigned vars_w[VARS_NB] = { 0 }, vars_h[VARS_NB] = { 0 };
811 
812  av_expr_count_vars(s->w_pexpr, vars_w, VARS_NB);
813  av_expr_count_vars(s->h_pexpr, vars_h, VARS_NB);
814 
815  if (s->eval_mode == EVAL_MODE_FRAME && !frame_changed && !IS_SCALE2REF(ctx) &&
816  !(vars_w[VAR_N] || vars_w[VAR_T]
818  || vars_w[VAR_POS]
819 #endif
820  ) &&
821  !(vars_h[VAR_N] || vars_h[VAR_T]
823  || vars_h[VAR_POS]
824 #endif
825  ) && s->w && s->h)
826  goto scale;
827 
828  if (s->eval_mode == EVAL_MODE_INIT) {
829  snprintf(buf, sizeof(buf)-1, "%d", outlink->w);
830  av_opt_set(s, "w", buf, 0);
831  snprintf(buf, sizeof(buf)-1, "%d", outlink->h);
832  av_opt_set(s, "h", buf, 0);
833 
834  ret = nppscale_parse_expr(ctx, NULL, &s->w_pexpr, "width", s->w_expr);
835  if (ret < 0)
836  return ret;
837 
838  ret = nppscale_parse_expr(ctx, NULL, &s->h_pexpr, "height", s->h_expr);
839  if (ret < 0)
840  return ret;
841  }
842 
843  if (IS_SCALE2REF(ctx)) {
844  s->var_values[VAR_S2R_MAIN_N] = inl->frame_count_out;
845  s->var_values[VAR_S2R_MAIN_T] = TS2T(in->pts, link->time_base);
846 #if FF_API_FRAME_PKT
848  s->var_values[VAR_S2R_MAIN_POS] = in->pkt_pos == -1 ? NAN : in->pkt_pos;
850 #endif
851  } else {
852  s->var_values[VAR_N] = inl->frame_count_out;
853  s->var_values[VAR_T] = TS2T(in->pts, link->time_base);
854 #if FF_API_FRAME_PKT
856  s->var_values[VAR_POS] = in->pkt_pos == -1 ? NAN : in->pkt_pos;
858 #endif
859  }
860 
861  link->format = in->format;
862  link->w = in->width;
863  link->h = in->height;
864 
867 
868  if ((ret = config_props(outlink)) < 0)
869  return ret;
870  }
871 
872 scale:
873  for (i = 0; i < FF_ARRAY_ELEMS(s->stages); i++) {
874  if (!s->stages[i].stage_needed)
875  continue;
876 
877  ret = nppscale_process[i](ctx, &s->stages[i], s->stages[i].frame, src);
878  if (ret < 0)
879  return ret;
880 
881  src = s->stages[i].frame;
882  last_stage = i;
883  }
884  if (last_stage < 0)
885  return AVERROR_BUG;
886 
887  ret = av_hwframe_get_buffer(src->hw_frames_ctx, s->tmp_frame, 0);
888  if (ret < 0)
889  return ret;
890 
891  s->tmp_frame->width = src->width;
892  s->tmp_frame->height = src->height;
893 
895  av_frame_move_ref(src, s->tmp_frame);
896 
897  ret = av_frame_copy_props(out, in);
898  if (ret < 0)
899  return ret;
900 
901  if (out->width != in->width || out->height != in->height) {
902  av_frame_side_data_remove_by_props(&out->side_data, &out->nb_side_data,
904  }
905 
906  return 0;
907 }
908 
910 {
911  AVFilterContext *ctx = link->dst;
912  NPPScaleContext *s = ctx->priv;
913  AVFilterLink *outlink = ctx->outputs[0];
914  FilterLink *l = ff_filter_link(outlink);
916  AVCUDADeviceContext *device_hwctx = frames_ctx->device_ctx->hwctx;
917 
918  AVFrame *out = NULL;
919  CUcontext dummy;
920  int ret = 0;
921 
922  if (s->passthrough)
923  return ff_filter_frame(outlink, in);
924 
925  out = av_frame_alloc();
926  if (!out) {
927  ret = AVERROR(ENOMEM);
928  goto fail;
929  }
930 
931  ret = CHECK_CU(device_hwctx->internal->cuda_dl->cuCtxPushCurrent(device_hwctx->cuda_ctx));
932  if (ret < 0)
933  goto fail;
934 
935  ret = nppscale_scale(link, out, in);
936 
937  CHECK_CU(device_hwctx->internal->cuda_dl->cuCtxPopCurrent(&dummy));
938  if (ret < 0)
939  goto fail;
940 
941  av_reduce(&out->sample_aspect_ratio.num, &out->sample_aspect_ratio.den,
942  (int64_t)in->sample_aspect_ratio.num * outlink->h * link->w,
943  (int64_t)in->sample_aspect_ratio.den * outlink->w * link->h,
944  INT_MAX);
945 
946  av_frame_free(&in);
947  return ff_filter_frame(outlink, out);
948 fail:
949  av_frame_free(&in);
950  av_frame_free(&out);
951  return ret;
952 }
953 
955 {
957  NPPScaleContext *scale = link->dst->priv;
958  AVFilterLink *outlink = link->dst->outputs[1];
959  int frame_changed;
960 
961  frame_changed = in->width != link->w ||
962  in->height != link->h ||
963  in->format != link->format ||
966 
967  if (frame_changed) {
968  link->format = in->format;
969  link->w = in->width;
970  link->h = in->height;
973 
974  config_props_ref(outlink);
975  }
976 
977  if (scale->eval_mode == EVAL_MODE_FRAME) {
978  scale->var_values[VAR_N] = inl->frame_count_out;
979  scale->var_values[VAR_T] = TS2T(in->pts, link->time_base);
980 #if FF_API_FRAME_PKT
982  scale->var_values[VAR_POS] = in->pkt_pos == -1 ? NAN : in->pkt_pos;
984 #endif
985  }
986 
987  return ff_filter_frame(outlink, in);
988 }
989 
990 static int request_frame(AVFilterLink *outlink)
991 {
992  return ff_request_frame(outlink->src->inputs[0]);
993 }
994 
995 static int request_frame_ref(AVFilterLink *outlink)
996 {
997  return ff_request_frame(outlink->src->inputs[1]);
998 }
999 
1000 #define OFFSET(x) offsetof(NPPScaleContext, x)
1001 #define FLAGS (AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM)
1002 static const AVOption options[] = {
1003  { "w", "Output video width", OFFSET(w_expr), AV_OPT_TYPE_STRING, .flags = FLAGS },
1004  { "h", "Output video height", OFFSET(h_expr), AV_OPT_TYPE_STRING, .flags = FLAGS },
1005  { "format", "Output pixel format", OFFSET(format_str), AV_OPT_TYPE_STRING, { .str = "same" }, .flags = FLAGS },
1006  { "s", "Output video size", OFFSET(size_str), AV_OPT_TYPE_STRING, { .str = NULL }, .flags = FLAGS },
1007 
1008  { "interp_algo", "Interpolation algorithm used for resizing", OFFSET(interp_algo), AV_OPT_TYPE_INT, { .i64 = NPPI_INTER_CUBIC }, 0, INT_MAX, FLAGS, .unit = "interp_algo" },
1009  { "nn", "nearest neighbour", 0, AV_OPT_TYPE_CONST, { .i64 = NPPI_INTER_NN }, 0, 0, FLAGS, .unit = "interp_algo" },
1010  { "linear", "linear", 0, AV_OPT_TYPE_CONST, { .i64 = NPPI_INTER_LINEAR }, 0, 0, FLAGS, .unit = "interp_algo" },
1011  { "cubic", "cubic", 0, AV_OPT_TYPE_CONST, { .i64 = NPPI_INTER_CUBIC }, 0, 0, FLAGS, .unit = "interp_algo" },
1012  { "cubic2p_bspline", "2-parameter cubic (B=1, C=0)", 0, AV_OPT_TYPE_CONST, { .i64 = NPPI_INTER_CUBIC2P_BSPLINE }, 0, 0, FLAGS, .unit = "interp_algo" },
1013  { "cubic2p_catmullrom", "2-parameter cubic (B=0, C=1/2)", 0, AV_OPT_TYPE_CONST, { .i64 = NPPI_INTER_CUBIC2P_CATMULLROM }, 0, 0, FLAGS, .unit = "interp_algo" },
1014  { "cubic2p_b05c03", "2-parameter cubic (B=1/2, C=3/10)", 0, AV_OPT_TYPE_CONST, { .i64 = NPPI_INTER_CUBIC2P_B05C03 }, 0, 0, FLAGS, .unit = "interp_algo" },
1015  { "super", "supersampling", 0, AV_OPT_TYPE_CONST, { .i64 = NPPI_INTER_SUPER }, 0, 0, FLAGS, .unit = "interp_algo" },
1016  { "lanczos", "Lanczos", 0, AV_OPT_TYPE_CONST, { .i64 = NPPI_INTER_LANCZOS }, 0, 0, FLAGS, .unit = "interp_algo" },
1017  { "force_original_aspect_ratio", "decrease or increase w/h if necessary to keep the original AR", OFFSET(force_original_aspect_ratio), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 2, FLAGS, .unit = "force_oar" },
1018  { "disable", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = 0 }, 0, 0, FLAGS, .unit = "force_oar" },
1019  { "decrease", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = 1 }, 0, 0, FLAGS, .unit = "force_oar" },
1020  { "increase", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = 2 }, 0, 0, FLAGS, .unit = "force_oar" },
1021  { "force_divisible_by", "enforce that the output resolution is divisible by a defined integer when force_original_aspect_ratio is used", OFFSET(force_divisible_by), AV_OPT_TYPE_INT, { .i64 = 1 }, 1, 256, FLAGS },
1022  { "eval", "specify when to evaluate expressions", OFFSET(eval_mode), AV_OPT_TYPE_INT, { .i64 = EVAL_MODE_INIT }, 0, EVAL_MODE_NB-1, FLAGS, .unit = "eval" },
1023  { "init", "eval expressions once during initialization", 0, AV_OPT_TYPE_CONST, { .i64 = EVAL_MODE_INIT }, 0, 0, FLAGS, .unit = "eval" },
1024  { "frame", "eval expressions during initialization and per-frame", 0, AV_OPT_TYPE_CONST, { .i64 = EVAL_MODE_FRAME }, 0, 0, FLAGS, .unit = "eval" },
1025  { NULL },
1026 };
1027 
1028 static const AVClass nppscale_class = {
1029  .class_name = "nppscale",
1030  .item_name = av_default_item_name,
1031  .option = options,
1032  .version = LIBAVUTIL_VERSION_INT,
1033  .category = AV_CLASS_CATEGORY_FILTER,
1034 };
1035 
1036 static const AVFilterPad nppscale_inputs[] = {
1037  {
1038  .name = "default",
1039  .type = AVMEDIA_TYPE_VIDEO,
1040  .filter_frame = nppscale_filter_frame,
1041  }
1042 };
1043 
1044 static const AVFilterPad nppscale_outputs[] = {
1045  {
1046  .name = "default",
1047  .type = AVMEDIA_TYPE_VIDEO,
1048  .config_props = config_props,
1049  }
1050 };
1051 
1053  .p.name = "scale_npp",
1054  .p.description = NULL_IF_CONFIG_SMALL("NVIDIA Performance Primitives video "
1055  "scaling and format conversion"),
1056  .p.priv_class = &nppscale_class,
1057 
1058  .init = nppscale_init,
1059  .uninit = nppscale_uninit,
1060 
1061  .priv_size = sizeof(NPPScaleContext),
1062 
1065 
1067 
1068  .flags_internal = FF_FILTER_FLAG_HWFRAME_AWARE,
1069 };
1070 
1072  {
1073  .name = "default",
1074  .type = AVMEDIA_TYPE_VIDEO,
1075  .filter_frame = nppscale_filter_frame,
1076  },
1077  {
1078  .name = "ref",
1079  .type = AVMEDIA_TYPE_VIDEO,
1080  .filter_frame = nppscale_filter_frame_ref,
1081  }
1082 };
1083 
1085  {
1086  .name = "default",
1087  .type = AVMEDIA_TYPE_VIDEO,
1088  .config_props = config_props,
1089  .request_frame= request_frame,
1090  },
1091  {
1092  .name = "ref",
1093  .type = AVMEDIA_TYPE_VIDEO,
1094  .config_props = config_props_ref,
1095  .request_frame= request_frame_ref,
1096  }
1097 };
1098 
1100  .p.name = "scale2ref_npp",
1101  .p.description = NULL_IF_CONFIG_SMALL("NVIDIA Performance Primitives video "
1102  "scaling and format conversion to the "
1103  "given reference."),
1104  .p.priv_class = &nppscale_class,
1105 
1106  .init = nppscale_init,
1107  .uninit = nppscale_uninit,
1108 
1109  .priv_size = sizeof(NPPScaleContext),
1110 
1113 
1115 
1116  .flags_internal = FF_FILTER_FLAG_HWFRAME_AWARE,
1117 };
format_is_supported
static int format_is_supported(enum AVPixelFormat fmt)
Definition: vf_scale_npp.c:512
AVHWDeviceContext::hwctx
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:85
NPPScaleContext::passthrough
int passthrough
Definition: vf_scale_npp.c:141
FF_ENABLE_DEPRECATION_WARNINGS
#define FF_ENABLE_DEPRECATION_WARNINGS
Definition: internal.h:73
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:215
AV_PIX_FMT_CUDA
@ AV_PIX_FMT_CUDA
HW acceleration through CUDA.
Definition: pixfmt.h:260
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:71
ff_vf_scale_npp
const FFFilter ff_vf_scale_npp
Definition: vf_scale_npp.c:1052
VAR_OW
@ VAR_OW
Definition: vf_scale_npp.c:107
nppscale_inputs
static const AVFilterPad nppscale_inputs[]
Definition: vf_scale_npp.c:1036
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
nppscale2ref_inputs
static const AVFilterPad nppscale2ref_inputs[]
Definition: vf_scale_npp.c:1071
opt.h
var_name
var_name
Definition: noise.c:47
hwcontext_cuda_internal.h
out
FILE * out
Definition: movenc.c:55
NPPScaleContext::h_pexpr
AVExpr * h_pexpr
Definition: vf_scale_npp.c:169
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1078
av_pix_fmt_desc_get
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:3244
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:90
CHECK_CU
#define CHECK_CU(x)
Definition: vf_scale_npp.c:44
AVHWFramesContext::format
enum AVPixelFormat format
The pixel format identifying the underlying HW surface type.
Definition: hwcontext.h:197
int64_t
long long int64_t
Definition: coverity.c:34
inlink
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
Definition: filter_design.txt:212
ScaleStage
ScaleStage
Definition: vf_scale_npp.c:57
nppscale_class
static const AVClass nppscale_class
Definition: vf_scale_npp.c:1028
VAR_S2R_MAIN_N
@ VAR_S2R_MAIN_N
Definition: vf_scale_npp.c:122
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:163
av_hwframe_ctx_init
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
Definition: hwcontext.c:322
FILTER_INPUTS
#define FILTER_INPUTS(array)
Definition: filters.h:262
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:410
pixdesc.h
AVFrame::pts
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:522
AVFrame::width
int width
Definition: frame.h:482
av_hwframe_ctx_alloc
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
Definition: hwcontext.c:248
NPPScaleStageContext::stage_needed
int stage_needed
Definition: vf_scale_npp.c:65
VAR_A
@ VAR_A
Definition: vf_scale_npp.c:109
AVOption
AVOption.
Definition: opt.h:429
ff_request_frame
int ff_request_frame(AVFilterLink *link)
Request an input frame from the filter at the other end of the link.
Definition: avfilter.c:491
supported_formats
static enum AVPixelFormat supported_formats[]
Definition: vf_scale_npp.c:46
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:225
STAGE_NB
@ STAGE_NB
Definition: vf_scale_npp.c:61
VAR_SAR
@ VAR_SAR
Definition: vf_scale_npp.c:110
av_buffer_ref
AVBufferRef * av_buffer_ref(const AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:103
AVFilter::name
const char * name
Filter name.
Definition: avfilter.h:203
VAR_S2R_MAIN_T
@ VAR_S2R_MAIN_T
Definition: vf_scale_npp.c:123
AVERROR_UNKNOWN
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:73
AVHWFramesContext::width
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:217
video.h
NPPScaleStageContext::frame
AVFrame * frame
Definition: vf_scale_npp.c:75
VAR_OH
@ VAR_OH
Definition: vf_scale_npp.c:108
NPPScaleContext::size_str
char * size_str
Definition: vf_scale_npp.c:166
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:431
formats.h
av_expr_parse
int av_expr_parse(AVExpr **expr, const char *s, const char *const *const_names, const char *const *func1_names, double(*const *funcs1)(void *, double), const char *const *func2_names, double(*const *funcs2)(void *, double, double), int log_offset, void *log_ctx)
Parse an expression.
Definition: eval.c:710
VAR_S2R_MAIN_POS
@ VAR_S2R_MAIN_POS
Definition: vf_scale.c:123
av_pix_fmt_count_planes
int av_pix_fmt_count_planes(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:3284
IS_SCALE2REF
#define IS_SCALE2REF(ctx)
Definition: vf_scale_npp.c:177
fail
#define fail()
Definition: checkasm.h:193
get_deinterleaved_format
static enum AVPixelFormat get_deinterleaved_format(enum AVPixelFormat fmt)
Definition: vf_scale_npp.c:522
dummy
int dummy
Definition: motion.c:66
STAGE_RESIZE
@ STAGE_RESIZE
Definition: vf_scale_npp.c:59
NPPScaleContext::tmp_frame
AVFrame * tmp_frame
Definition: vf_scale_npp.c:140
av_pix_fmt_get_chroma_sub_sample
int av_pix_fmt_get_chroma_sub_sample(enum AVPixelFormat pix_fmt, int *h_shift, int *v_shift)
Utility function to access log2_chroma_w log2_chroma_h from the pixel format AVPixFmtDescriptor.
Definition: pixdesc.c:3272
av_opt_set
int av_opt_set(void *obj, const char *name, const char *val, int search_flags)
Definition: opt.c:835
av_reduce
int av_reduce(int *dst_num, int *dst_den, int64_t num, int64_t den, int64_t max)
Reduce a fraction.
Definition: rational.c:35
NPPScaleContext::shift_width
int shift_width
Definition: vf_scale_npp.c:143
av_expr_free
void av_expr_free(AVExpr *e)
Free a parsed expression previously created with av_expr_parse().
Definition: eval.c:358
AVRational::num
int num
Numerator.
Definition: rational.h:59
AV_SIDE_DATA_PROP_SIZE_DEPENDENT
@ AV_SIDE_DATA_PROP_SIZE_DEPENDENT
Side data depends on the video dimensions.
Definition: frame.h:292
AVFilterPad
A filter pad used for either input or output.
Definition: filters.h:38
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:151
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:209
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
av_cold
#define av_cold
Definition: attributes.h:90
VAR_IH
@ VAR_IH
Definition: vf_scale_npp.c:106
AVHWFramesContext::height
int height
Definition: hwcontext.h:217
FFFilter
Definition: filters.h:265
s
#define s(width, name)
Definition: cbs_vp9.c:198
AV_PIX_FMT_YUVA420P
@ AV_PIX_FMT_YUVA420P
planar YUV 4:2:0, 20bpp, (1 Cr & Cb sample per 2x2 Y & A samples)
Definition: pixfmt.h:108
nppscale_uninit
static void nppscale_uninit(AVFilterContext *ctx)
Definition: vf_scale_npp.c:437
av_expr_count_vars
int av_expr_count_vars(AVExpr *e, unsigned *counter, int size)
Track the presence of variables and their number of occurrences in a parsed expression.
Definition: eval.c:782
NPPScaleContext::stages
NPPScaleStageContext stages[STAGE_NB]
Definition: vf_scale_npp.c:139
filters.h
config_props
static int config_props(AVFilterLink *outlink)
Definition: vf_scale_npp.c:645
NPPScaleContext::eval_mode
int eval_mode
Definition: vf_scale_npp.c:173
nppscale_eval_dimensions
static int nppscale_eval_dimensions(AVFilterContext *ctx)
Definition: vf_scale_npp.c:376
ctx
AVFormatContext * ctx
Definition: movenc.c:49
av_expr_eval
double av_expr_eval(AVExpr *e, const double *const_values, void *opaque)
Evaluate a previously parsed expression.
Definition: eval.c:792
NPPScaleContext::force_original_aspect_ratio
int force_original_aspect_ratio
Definition: vf_scale_npp.c:161
AVExpr
Definition: eval.c:158
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:73
FILTER_OUTPUTS
#define FILTER_OUTPUTS(array)
Definition: filters.h:263
check_exprs
static int check_exprs(AVFilterContext *ctx)
Definition: vf_scale_npp.c:181
NAN
#define NAN
Definition: mathematics.h:115
nppscale2ref_outputs
static const AVFilterPad nppscale2ref_outputs[]
Definition: vf_scale_npp.c:1084
link
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a link
Definition: filter_design.txt:23
nppscale_scale
static int nppscale_scale(AVFilterLink *link, AVFrame *out, AVFrame *in)
Definition: vf_scale_npp.c:792
request_frame_ref
static int request_frame_ref(AVFilterLink *outlink)
Definition: vf_scale_npp.c:995
NPPScaleContext
Definition: vf_scale_npp.c:136
if
if(ret)
Definition: filter_design.txt:179
FLAGS
#define FLAGS
Definition: vf_scale_npp.c:1001
var_names
static const char *const var_names[]
Definition: vf_scale_npp.c:78
init_stage
static int init_stage(NPPScaleStageContext *stage, AVBufferRef *device_ctx)
Definition: vf_scale_npp.c:453
LIBAVUTIL_VERSION_INT
#define LIBAVUTIL_VERSION_INT
Definition: version.h:85
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:75
nppscale_interleave
static int nppscale_interleave(AVFilterContext *ctx, NPPScaleStageContext *stage, AVFrame *out, AVFrame *in)
Definition: vf_scale_npp.c:760
NULL
#define NULL
Definition: coverity.c:32
AVHWFramesContext::sw_format
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:210
av_frame_copy_props
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:726
VAR_POS
@ VAR_POS
Definition: noise.c:56
av_buffer_unref
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:139
AVRational
Rational number (pair of numerator and denominator).
Definition: rational.h:58
AVHWFramesContext::device_ref
AVBufferRef * device_ref
A reference to the parent AVHWDeviceContext.
Definition: hwcontext.h:126
isnan
#define isnan(x)
Definition: libm.h:340
AVFilterContext::inputs
AVFilterLink ** inputs
array of pointers to input links
Definition: avfilter.h:265
EVAL_MODE_INIT
@ EVAL_MODE_INIT
Definition: vf_scale_npp.c:131
VAR_IW
@ VAR_IW
Definition: vf_scale_npp.c:105
av_default_item_name
const char * av_default_item_name(void *ptr)
Return the context name.
Definition: log.c:237
parseutils.h
options
Definition: swscale.c:42
request_frame
static int request_frame(AVFilterLink *outlink)
Definition: vf_scale_npp.c:990
NPPScaleContext::h_expr
char * h_expr
height expression string
Definition: vf_scale_npp.c:158
double
double
Definition: af_crystalizer.c:132
ff_filter_link
static FilterLink * ff_filter_link(AVFilterLink *link)
Definition: filters.h:197
AV_CLASS_CATEGORY_FILTER
@ AV_CLASS_CATEGORY_FILTER
Definition: log.h:36
FF_FILTER_FLAG_HWFRAME_AWARE
#define FF_FILTER_FLAG_HWFRAME_AWARE
The filter is aware of hardware frames, and any hardware frame context should not be automatically pr...
Definition: filters.h:206
nppscale_parse_expr
static int nppscale_parse_expr(AVFilterContext *ctx, char *str_expr, AVExpr **pexpr_ptr, const char *var, const char *args)
Definition: vf_scale_npp.c:245
NPPScaleContext::shift_height
int shift_height
Definition: vf_scale_npp.c:143
NPPScaleStageContext::height
int height
Definition: vf_scale_npp.c:71
VAR_IN_H
@ VAR_IN_H
Definition: vf_scale_npp.c:106
VAR_S2R_MAIN_H
@ VAR_S2R_MAIN_H
Definition: vf_scale_npp.c:118
eval.h
VAR_S2R_MAIN_A
@ VAR_S2R_MAIN_A
Definition: vf_scale_npp.c:119
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:94
NPPScaleStageContext::out_fmt
enum AVPixelFormat out_fmt
Definition: vf_scale_npp.c:67
OFFSET
#define OFFSET(x)
Definition: vf_scale_npp.c:1000
NPPScaleStageContext::frames_ctx
AVBufferRef * frames_ctx
Definition: vf_scale_npp.c:74
AVFrame::time_base
AVRational time_base
Time base for the timestamps in this frame.
Definition: frame.h:537
nppscale_filter_frame_ref
static int nppscale_filter_frame_ref(AVFilterLink *link, AVFrame *in)
Definition: vf_scale_npp.c:954
TS2T
#define TS2T(ts, tb)
Definition: filters.h:481
AVFrame::pkt_pos
attribute_deprecated int64_t pkt_pos
reordered pos from the last AVPacket that has been input into the decoder
Definition: frame.h:728
nppscale_filter_frame
static int nppscale_filter_frame(AVFilterLink *link, AVFrame *in)
Definition: vf_scale_npp.c:909
AVFrame::format
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
Definition: frame.h:497
VAR_IN_W
@ VAR_IN_W
Definition: vf_scale_npp.c:105
scale_eval.h
init_processing_chain
static int init_processing_chain(AVFilterContext *ctx, int in_width, int in_height, int out_width, int out_height)
Definition: vf_scale_npp.c:536
VARS_NB
@ VARS_NB
Definition: vf_scale_npp.c:127
FF_API_FRAME_PKT
#define FF_API_FRAME_PKT
Definition: version.h:109
av_parse_video_size
int av_parse_video_size(int *width_ptr, int *height_ptr, const char *str)
Parse str and put in width_ptr and height_ptr the detected values.
Definition: parseutils.c:150
av_frame_side_data_remove_by_props
void av_frame_side_data_remove_by_props(AVFrameSideData ***sd, int *nb_sd, int props)
Remove and free all side data instances that match any of the given side data properties.
Definition: frame.c:963
VAR_S2R_MAIN_W
@ VAR_S2R_MAIN_W
Definition: vf_scale_npp.c:117
VAR_OUT_W
@ VAR_OUT_W
Definition: vf_scale_npp.c:107
nppscale_resize
static int nppscale_resize(AVFilterContext *ctx, NPPScaleStageContext *stage, AVFrame *out, AVFrame *in)
Definition: vf_scale_npp.c:732
VAR_S2R_MAIN_DAR
@ VAR_S2R_MAIN_DAR
Definition: vf_scale_npp.c:121
VAR_S2R_MAIN_SAR
@ VAR_S2R_MAIN_SAR
Definition: vf_scale_npp.c:120
NPPScaleContext::h
int h
Definition: vf_scale_npp.c:150
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:256
internal.h
EvalMode
EvalMode
Definition: af_volume.h:39
EVAL_MODE_FRAME
@ EVAL_MODE_FRAME
Definition: vf_scale_npp.c:132
av_frame_move_ref
void av_frame_move_ref(AVFrame *dst, AVFrame *src)
Move everything contained in src to dst and reset src.
Definition: frame.c:650
av_frame_unref
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:623
AVFilterPad::name
const char * name
Pad name.
Definition: filters.h:44
NPPScaleContext::w
int w
New dimensions.
Definition: vf_scale_npp.c:150
AVHWFramesContext
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:115
AVCUDADeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_cuda.h:42
config_props_ref
static int config_props_ref(AVFilterLink *outlink)
Definition: vf_scale_npp.c:690
ret
ret
Definition: filter_design.txt:187
NPPScaleContext::format
enum AVPixelFormat format
Output sw format.
Definition: vf_scale_npp.c:155
AV_PIX_FMT_NV12
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:96
FFSWAP
#define FFSWAP(type, a, b)
Definition: macros.h:52
AVClass::class_name
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
Definition: log.h:80
AVHWFramesContext::device_ctx
AVHWDeviceContext * device_ctx
The parent AVHWDeviceContext.
Definition: hwcontext.h:134
cuda_check.h
NPPScaleContext::interp_algo
int interp_algo
Definition: vf_scale_npp.c:164
AVFrame::sample_aspect_ratio
AVRational sample_aspect_ratio
Sample aspect ratio for the video frame, 0/1 if unknown/unspecified.
Definition: frame.h:517
ff_scale_adjust_dimensions
int ff_scale_adjust_dimensions(AVFilterLink *inlink, int *ret_w, int *ret_h, int force_original_aspect_ratio, int force_divisible_by)
Transform evaluated width and height obtained from ff_scale_eval_dimensions into actual target width ...
Definition: scale_eval.c:113
NPPScaleContext::w_expr
char * w_expr
width expression string
Definition: vf_scale_npp.c:157
av_get_pix_fmt
enum AVPixelFormat av_get_pix_fmt(const char *name)
Return the pixel format corresponding to name.
Definition: pixdesc.c:3176
NPPScaleStageContext::in_fmt
enum AVPixelFormat in_fmt
Definition: vf_scale_npp.c:66
planes
static const struct @473 planes[]
AVFrame::hw_frames_ctx
AVBufferRef * hw_frames_ctx
For hwaccel-format frames, this should be a reference to the AVHWFramesContext describing the frame.
Definition: frame.h:769
nppscale_init
static av_cold int nppscale_init(AVFilterContext *ctx)
Definition: vf_scale_npp.c:301
ff_vf_scale2ref_npp
const FFFilter ff_vf_scale2ref_npp
Definition: vf_scale_npp.c:176
AVFrame::height
int height
Definition: frame.h:482
EVAL_MODE_NB
@ EVAL_MODE_NB
Definition: vf_scale_npp.c:133
NPPScaleContext::format_str
char * format_str
Definition: vf_scale_npp.c:159
VAR_DAR
@ VAR_DAR
Definition: vf_scale_npp.c:111
STAGE_INTERLEAVE
@ STAGE_INTERLEAVE
Definition: vf_scale_npp.c:60
AVRational::den
int den
Denominator.
Definition: rational.h:60
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:72
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Underlying C type is int.
Definition: opt.h:259
avfilter.h
nppscale_process
static int(*const nppscale_process[])(AVFilterContext *ctx, NPPScaleStageContext *stage, AVFrame *out, AVFrame *in)
Definition: vf_scale_npp.c:785
nppscale_outputs
static const AVFilterPad nppscale_outputs[]
Definition: vf_scale_npp.c:1044
NPPScaleContext::force_divisible_by
int force_divisible_by
Definition: vf_scale_npp.c:162
av_mul_q
AVRational av_mul_q(AVRational b, AVRational c)
Multiply two rationals.
Definition: rational.c:80
NPPScaleStageContext::width
int width
Definition: vf_scale_npp.c:70
AV_PIX_FMT_YUV444P
@ AV_PIX_FMT_YUV444P
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:78
VAR_S2R_MDAR
@ VAR_S2R_MDAR
Definition: vf_scale_npp.c:121
NPPScaleStageContext
Definition: vf_scale_npp.c:64
AVFilterContext
An instance of a filter.
Definition: avfilter.h:257
STAGE_DEINTERLEAVE
@ STAGE_DEINTERLEAVE
Definition: vf_scale_npp.c:58
FF_DISABLE_DEPRECATION_WARNINGS
#define FF_DISABLE_DEPRECATION_WARNINGS
Definition: internal.h:72
VAR_T
@ VAR_T
Definition: vf_scale_npp.c:113
av_strdup
char * av_strdup(const char *s)
Duplicate a string.
Definition: mem.c:272
desc
const char * desc
Definition: libsvtav1.c:79
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
FFFilter::p
AVFilter p
The public AVFilter.
Definition: filters.h:269
NPPScaleStageContext::planes_out
struct NPPScaleStageContext::@354 planes_out[4]
mem.h
AVBufferRef
A reference to a data buffer.
Definition: buffer.h:82
options
static const AVOption options[]
Definition: vf_scale_npp.c:1002
nppscale_deinterleave
static int nppscale_deinterleave(AVFilterContext *ctx, NPPScaleStageContext *stage, AVFrame *out, AVFrame *in)
Definition: vf_scale_npp.c:708
VAR_OUT_H
@ VAR_OUT_H
Definition: vf_scale_npp.c:108
AVPixFmtDescriptor
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:69
av_free
#define av_free(p)
Definition: tableprint_vlc.h:33
scale
static void scale(int *out, const int *in, const int w, const int h, const int shift)
Definition: intra.c:291
FFALIGN
#define FFALIGN(x, a)
Definition: macros.h:78
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
deinterleaved_formats
static enum AVPixelFormat deinterleaved_formats[][2]
Definition: vf_scale_npp.c:53
hwcontext.h
AVERROR_BUG
#define AVERROR_BUG
Internal bug, also see AVERROR_BUG2.
Definition: error.h:52
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:455
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
NPPScaleContext::var_values
double var_values[VARS_NB]
Definition: vf_scale_npp.c:171
AV_OPT_TYPE_STRING
@ AV_OPT_TYPE_STRING
Underlying C type is a uint8_t* that is either NULL or points to a C string allocated with the av_mal...
Definition: opt.h:276
NPPScaleStageContext::planes_in
struct NPPScaleStageContext::@354 planes_in[4]
av_hwframe_get_buffer
int av_hwframe_get_buffer(AVBufferRef *hwframe_ref, AVFrame *frame, int flags)
Allocate a new frame attached to the given AVHWFramesContext.
Definition: hwcontext.c:491
AV_OPT_TYPE_CONST
@ AV_OPT_TYPE_CONST
Special option type for declaring named constants.
Definition: opt.h:299
FILTER_SINGLE_PIXFMT
#define FILTER_SINGLE_PIXFMT(pix_fmt_)
Definition: filters.h:252
snprintf
#define snprintf
Definition: snprintf.h:34
NPPScaleContext::w_pexpr
AVExpr * w_pexpr
Definition: vf_scale_npp.c:168
src
#define src
Definition: vp8dsp.c:248
av_get_pix_fmt_name
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:3164
VAR_N
@ VAR_N
Definition: vf_scale_npp.c:112