00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020 #include "libavutil/cpu.h"
00021 #include "libavutil/common.h"
00022 #include "libavutil/pixdesc.h"
00023 #include "avfilter.h"
00024 #include "yadif.h"
00025
00026 #undef NDEBUG
00027 #include <assert.h>
00028
00029 typedef struct {
00036 int mode;
00037
00043 int parity;
00044
00045 int frame_pending;
00046
00047 AVFilterBufferRef *cur;
00048 AVFilterBufferRef *next;
00049 AVFilterBufferRef *prev;
00050 AVFilterBufferRef *out;
00051 void (*filter_line)(uint8_t *dst,
00052 uint8_t *prev, uint8_t *cur, uint8_t *next,
00053 int w, int prefs, int mrefs, int parity, int mode);
00054
00055 const AVPixFmtDescriptor *csp;
00056 } YADIFContext;
00057
00058 #define CHECK(j)\
00059 { int score = FFABS(cur[mrefs-1+(j)] - cur[prefs-1-(j)])\
00060 + FFABS(cur[mrefs +(j)] - cur[prefs -(j)])\
00061 + FFABS(cur[mrefs+1+(j)] - cur[prefs+1-(j)]);\
00062 if (score < spatial_score) {\
00063 spatial_score= score;\
00064 spatial_pred= (cur[mrefs +(j)] + cur[prefs -(j)])>>1;\
00065
00066 #define FILTER \
00067 for (x = 0; x < w; x++) { \
00068 int c = cur[mrefs]; \
00069 int d = (prev2[0] + next2[0])>>1; \
00070 int e = cur[prefs]; \
00071 int temporal_diff0 = FFABS(prev2[0] - next2[0]); \
00072 int temporal_diff1 =(FFABS(prev[mrefs] - c) + FFABS(prev[prefs] - e) )>>1; \
00073 int temporal_diff2 =(FFABS(next[mrefs] - c) + FFABS(next[prefs] - e) )>>1; \
00074 int diff = FFMAX3(temporal_diff0>>1, temporal_diff1, temporal_diff2); \
00075 int spatial_pred = (c+e)>>1; \
00076 int spatial_score = FFABS(cur[mrefs-1] - cur[prefs-1]) + FFABS(c-e) \
00077 + FFABS(cur[mrefs+1] - cur[prefs+1]) - 1; \
00078 \
00079 CHECK(-1) CHECK(-2) }} }} \
00080 CHECK( 1) CHECK( 2) }} }} \
00081 \
00082 if (mode < 2) { \
00083 int b = (prev2[2*mrefs] + next2[2*mrefs])>>1; \
00084 int f = (prev2[2*prefs] + next2[2*prefs])>>1; \
00085 int max = FFMAX3(d-e, d-c, FFMIN(b-c, f-e)); \
00086 int min = FFMIN3(d-e, d-c, FFMAX(b-c, f-e)); \
00087 \
00088 diff = FFMAX3(diff, min, -max); \
00089 } \
00090 \
00091 if (spatial_pred > d + diff) \
00092 spatial_pred = d + diff; \
00093 else if (spatial_pred < d - diff) \
00094 spatial_pred = d - diff; \
00095 \
00096 dst[0] = spatial_pred; \
00097 \
00098 dst++; \
00099 cur++; \
00100 prev++; \
00101 next++; \
00102 prev2++; \
00103 next2++; \
00104 }
00105
00106 static void filter_line_c(uint8_t *dst,
00107 uint8_t *prev, uint8_t *cur, uint8_t *next,
00108 int w, int prefs, int mrefs, int parity, int mode)
00109 {
00110 int x;
00111 uint8_t *prev2 = parity ? prev : cur ;
00112 uint8_t *next2 = parity ? cur : next;
00113
00114 FILTER
00115 }
00116
00117 static void filter_line_c_16bit(uint16_t *dst,
00118 uint16_t *prev, uint16_t *cur, uint16_t *next,
00119 int w, int prefs, int mrefs, int parity, int mode)
00120 {
00121 int x;
00122 uint16_t *prev2 = parity ? prev : cur ;
00123 uint16_t *next2 = parity ? cur : next;
00124 mrefs /= 2;
00125 prefs /= 2;
00126
00127 FILTER
00128 }
00129
00130 static void filter(AVFilterContext *ctx, AVFilterBufferRef *dstpic,
00131 int parity, int tff)
00132 {
00133 YADIFContext *yadif = ctx->priv;
00134 int y, i;
00135
00136 for (i = 0; i < yadif->csp->nb_components; i++) {
00137 int w = dstpic->video->w;
00138 int h = dstpic->video->h;
00139 int refs = yadif->cur->linesize[i];
00140 int df = (yadif->csp->comp[i].depth_minus1+1) / 8;
00141
00142 if (i) {
00143
00144 w >>= yadif->csp->log2_chroma_w;
00145 h >>= yadif->csp->log2_chroma_h;
00146 }
00147
00148 for (y = 0; y < h; y++) {
00149 if ((y ^ parity) & 1) {
00150 uint8_t *prev = &yadif->prev->data[i][y*refs];
00151 uint8_t *cur = &yadif->cur ->data[i][y*refs];
00152 uint8_t *next = &yadif->next->data[i][y*refs];
00153 uint8_t *dst = &dstpic->data[i][y*dstpic->linesize[i]];
00154 int mode = y==1 || y+2==h ? 2 : yadif->mode;
00155 yadif->filter_line(dst, prev, cur, next, w, y+1<h ? refs : -refs, y ? -refs : refs, parity ^ tff, mode);
00156 } else {
00157 memcpy(&dstpic->data[i][y*dstpic->linesize[i]],
00158 &yadif->cur->data[i][y*refs], w*df);
00159 }
00160 }
00161 }
00162 #if HAVE_MMX
00163 __asm__ volatile("emms \n\t" : : : "memory");
00164 #endif
00165 }
00166
00167 static AVFilterBufferRef *get_video_buffer(AVFilterLink *link, int perms, int w, int h)
00168 {
00169 AVFilterBufferRef *picref;
00170 int width = FFALIGN(w, 32);
00171 int height= FFALIGN(h+2, 32);
00172 int i;
00173
00174 picref = avfilter_default_get_video_buffer(link, perms, width, height);
00175
00176 picref->video->w = w;
00177 picref->video->h = h;
00178
00179 for (i = 0; i < 3; i++)
00180 picref->data[i] += picref->linesize[i];
00181
00182 return picref;
00183 }
00184
00185 static void return_frame(AVFilterContext *ctx, int is_second)
00186 {
00187 YADIFContext *yadif = ctx->priv;
00188 AVFilterLink *link= ctx->outputs[0];
00189 int tff;
00190
00191 if (yadif->parity == -1) {
00192 tff = yadif->cur->video->interlaced ?
00193 yadif->cur->video->top_field_first : 1;
00194 } else {
00195 tff = yadif->parity^1;
00196 }
00197
00198 if (is_second) {
00199 yadif->out = avfilter_get_video_buffer(link, AV_PERM_WRITE | AV_PERM_PRESERVE |
00200 AV_PERM_REUSE, link->w, link->h);
00201 avfilter_copy_buffer_ref_props(yadif->out, yadif->cur);
00202 yadif->out->video->interlaced = 0;
00203 }
00204
00205 if (!yadif->csp)
00206 yadif->csp = &av_pix_fmt_descriptors[link->format];
00207 if (yadif->csp->comp[0].depth_minus1 == 15)
00208 yadif->filter_line = filter_line_c_16bit;
00209
00210 filter(ctx, yadif->out, tff ^ !is_second, tff);
00211
00212 if (is_second) {
00213 if (yadif->next->pts != AV_NOPTS_VALUE &&
00214 yadif->cur->pts != AV_NOPTS_VALUE) {
00215 yadif->out->pts =
00216 (yadif->next->pts&yadif->cur->pts) +
00217 ((yadif->next->pts^yadif->cur->pts)>>1);
00218 } else {
00219 yadif->out->pts = AV_NOPTS_VALUE;
00220 }
00221 avfilter_start_frame(ctx->outputs[0], yadif->out);
00222 }
00223 avfilter_draw_slice(ctx->outputs[0], 0, link->h, 1);
00224 avfilter_end_frame(ctx->outputs[0]);
00225
00226 yadif->frame_pending = (yadif->mode&1) && !is_second;
00227 }
00228
00229 static void start_frame(AVFilterLink *link, AVFilterBufferRef *picref)
00230 {
00231 AVFilterContext *ctx = link->dst;
00232 YADIFContext *yadif = ctx->priv;
00233
00234 if (yadif->frame_pending)
00235 return_frame(ctx, 1);
00236
00237 if (yadif->prev)
00238 avfilter_unref_buffer(yadif->prev);
00239 yadif->prev = yadif->cur;
00240 yadif->cur = yadif->next;
00241 yadif->next = picref;
00242
00243 if (!yadif->cur)
00244 return;
00245
00246 if (!yadif->prev)
00247 yadif->prev = avfilter_ref_buffer(yadif->cur, AV_PERM_READ);
00248
00249 yadif->out = avfilter_get_video_buffer(ctx->outputs[0], AV_PERM_WRITE | AV_PERM_PRESERVE |
00250 AV_PERM_REUSE, link->w, link->h);
00251
00252 avfilter_copy_buffer_ref_props(yadif->out, yadif->cur);
00253 yadif->out->video->interlaced = 0;
00254 avfilter_start_frame(ctx->outputs[0], yadif->out);
00255 }
00256
00257 static void end_frame(AVFilterLink *link)
00258 {
00259 AVFilterContext *ctx = link->dst;
00260 YADIFContext *yadif = ctx->priv;
00261
00262 if (!yadif->out)
00263 return;
00264
00265 return_frame(ctx, 0);
00266 }
00267
00268 static int request_frame(AVFilterLink *link)
00269 {
00270 AVFilterContext *ctx = link->src;
00271 YADIFContext *yadif = ctx->priv;
00272
00273 if (yadif->frame_pending) {
00274 return_frame(ctx, 1);
00275 return 0;
00276 }
00277
00278 do {
00279 int ret;
00280
00281 if ((ret = avfilter_request_frame(link->src->inputs[0])))
00282 return ret;
00283 } while (!yadif->cur);
00284
00285 return 0;
00286 }
00287
00288 static int poll_frame(AVFilterLink *link)
00289 {
00290 YADIFContext *yadif = link->src->priv;
00291 int ret, val;
00292
00293 if (yadif->frame_pending)
00294 return 1;
00295
00296 val = avfilter_poll_frame(link->src->inputs[0]);
00297
00298 if (val==1 && !yadif->next) {
00299 if ((ret = avfilter_request_frame(link->src->inputs[0])) < 0)
00300 return ret;
00301 val = avfilter_poll_frame(link->src->inputs[0]);
00302 }
00303 assert(yadif->next || !val);
00304
00305 return val * ((yadif->mode&1)+1);
00306 }
00307
00308 static av_cold void uninit(AVFilterContext *ctx)
00309 {
00310 YADIFContext *yadif = ctx->priv;
00311
00312 if (yadif->prev) avfilter_unref_buffer(yadif->prev);
00313 if (yadif->cur ) avfilter_unref_buffer(yadif->cur );
00314 if (yadif->next) avfilter_unref_buffer(yadif->next);
00315 }
00316
00317 static int query_formats(AVFilterContext *ctx)
00318 {
00319 static const enum PixelFormat pix_fmts[] = {
00320 PIX_FMT_YUV420P,
00321 PIX_FMT_YUV422P,
00322 PIX_FMT_YUV444P,
00323 PIX_FMT_YUV410P,
00324 PIX_FMT_YUV411P,
00325 PIX_FMT_GRAY8,
00326 PIX_FMT_YUVJ420P,
00327 PIX_FMT_YUVJ422P,
00328 PIX_FMT_YUVJ444P,
00329 AV_NE( PIX_FMT_GRAY16BE, PIX_FMT_GRAY16LE ),
00330 PIX_FMT_YUV440P,
00331 PIX_FMT_YUVJ440P,
00332 AV_NE( PIX_FMT_YUV420P16BE, PIX_FMT_YUV420P16LE ),
00333 AV_NE( PIX_FMT_YUV422P16BE, PIX_FMT_YUV422P16LE ),
00334 AV_NE( PIX_FMT_YUV444P16BE, PIX_FMT_YUV444P16LE ),
00335 PIX_FMT_NONE
00336 };
00337
00338 avfilter_set_common_pixel_formats(ctx, avfilter_make_format_list(pix_fmts));
00339
00340 return 0;
00341 }
00342
00343 static av_cold int init(AVFilterContext *ctx, const char *args, void *opaque)
00344 {
00345 YADIFContext *yadif = ctx->priv;
00346 av_unused int cpu_flags = av_get_cpu_flags();
00347
00348 yadif->mode = 0;
00349 yadif->parity = -1;
00350 yadif->csp = NULL;
00351
00352 if (args) sscanf(args, "%d:%d", &yadif->mode, &yadif->parity);
00353
00354 yadif->filter_line = filter_line_c;
00355 if (HAVE_SSSE3 && cpu_flags & AV_CPU_FLAG_SSSE3)
00356 yadif->filter_line = ff_yadif_filter_line_ssse3;
00357 else if (HAVE_SSE && cpu_flags & AV_CPU_FLAG_SSE2)
00358 yadif->filter_line = ff_yadif_filter_line_sse2;
00359 else if (HAVE_MMX && cpu_flags & AV_CPU_FLAG_MMX)
00360 yadif->filter_line = ff_yadif_filter_line_mmx;
00361
00362 av_log(ctx, AV_LOG_INFO, "mode:%d parity:%d\n", yadif->mode, yadif->parity);
00363
00364 return 0;
00365 }
00366
00367 static void null_draw_slice(AVFilterLink *link, int y, int h, int slice_dir) { }
00368
00369 AVFilter avfilter_vf_yadif = {
00370 .name = "yadif",
00371 .description = NULL_IF_CONFIG_SMALL("Deinterlace the input image"),
00372
00373 .priv_size = sizeof(YADIFContext),
00374 .init = init,
00375 .uninit = uninit,
00376 .query_formats = query_formats,
00377
00378 .inputs = (AVFilterPad[]) {{ .name = "default",
00379 .type = AVMEDIA_TYPE_VIDEO,
00380 .start_frame = start_frame,
00381 .get_video_buffer = get_video_buffer,
00382 .draw_slice = null_draw_slice,
00383 .end_frame = end_frame, },
00384 { .name = NULL}},
00385
00386 .outputs = (AVFilterPad[]) {{ .name = "default",
00387 .type = AVMEDIA_TYPE_VIDEO,
00388 .poll_frame = poll_frame,
00389 .request_frame = request_frame, },
00390 { .name = NULL}},
00391 };