FFmpeg
h264_slice.c
Go to the documentation of this file.
1 /*
2  * H.26L/H.264/AVC/JVT/14496-10/... decoder
3  * Copyright (c) 2003 Michael Niedermayer <michaelni@gmx.at>
4  *
5  * This file is part of FFmpeg.
6  *
7  * FFmpeg is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * FFmpeg is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with FFmpeg; if not, write to the Free Software
19  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20  */
21 
22 /**
23  * @file
24  * H.264 / AVC / MPEG-4 part10 codec.
25  * @author Michael Niedermayer <michaelni@gmx.at>
26  */
27 
28 #include "config_components.h"
29 
30 #include "libavutil/avassert.h"
31 #include "libavutil/mem.h"
32 #include "libavutil/pixdesc.h"
33 #include "libavutil/timecode.h"
34 #include "decode.h"
35 #include "cabac.h"
36 #include "cabac_functions.h"
37 #include "error_resilience.h"
38 #include "avcodec.h"
39 #include "h264.h"
40 #include "h264dec.h"
41 #include "h264data.h"
42 #include "h264chroma.h"
43 #include "h264_ps.h"
44 #include "golomb.h"
45 #include "mathops.h"
46 #include "mpegutils.h"
47 #include "rectangle.h"
48 #include "libavutil/refstruct.h"
49 #include "thread.h"
50 #include "threadframe.h"
51 
52 static const uint8_t field_scan[16+1] = {
53  0 + 0 * 4, 0 + 1 * 4, 1 + 0 * 4, 0 + 2 * 4,
54  0 + 3 * 4, 1 + 1 * 4, 1 + 2 * 4, 1 + 3 * 4,
55  2 + 0 * 4, 2 + 1 * 4, 2 + 2 * 4, 2 + 3 * 4,
56  3 + 0 * 4, 3 + 1 * 4, 3 + 2 * 4, 3 + 3 * 4,
57 };
58 
59 static const uint8_t field_scan8x8[64+1] = {
60  0 + 0 * 8, 0 + 1 * 8, 0 + 2 * 8, 1 + 0 * 8,
61  1 + 1 * 8, 0 + 3 * 8, 0 + 4 * 8, 1 + 2 * 8,
62  2 + 0 * 8, 1 + 3 * 8, 0 + 5 * 8, 0 + 6 * 8,
63  0 + 7 * 8, 1 + 4 * 8, 2 + 1 * 8, 3 + 0 * 8,
64  2 + 2 * 8, 1 + 5 * 8, 1 + 6 * 8, 1 + 7 * 8,
65  2 + 3 * 8, 3 + 1 * 8, 4 + 0 * 8, 3 + 2 * 8,
66  2 + 4 * 8, 2 + 5 * 8, 2 + 6 * 8, 2 + 7 * 8,
67  3 + 3 * 8, 4 + 1 * 8, 5 + 0 * 8, 4 + 2 * 8,
68  3 + 4 * 8, 3 + 5 * 8, 3 + 6 * 8, 3 + 7 * 8,
69  4 + 3 * 8, 5 + 1 * 8, 6 + 0 * 8, 5 + 2 * 8,
70  4 + 4 * 8, 4 + 5 * 8, 4 + 6 * 8, 4 + 7 * 8,
71  5 + 3 * 8, 6 + 1 * 8, 6 + 2 * 8, 5 + 4 * 8,
72  5 + 5 * 8, 5 + 6 * 8, 5 + 7 * 8, 6 + 3 * 8,
73  7 + 0 * 8, 7 + 1 * 8, 6 + 4 * 8, 6 + 5 * 8,
74  6 + 6 * 8, 6 + 7 * 8, 7 + 2 * 8, 7 + 3 * 8,
75  7 + 4 * 8, 7 + 5 * 8, 7 + 6 * 8, 7 + 7 * 8,
76 };
77 
78 static const uint8_t field_scan8x8_cavlc[64+1] = {
79  0 + 0 * 8, 1 + 1 * 8, 2 + 0 * 8, 0 + 7 * 8,
80  2 + 2 * 8, 2 + 3 * 8, 2 + 4 * 8, 3 + 3 * 8,
81  3 + 4 * 8, 4 + 3 * 8, 4 + 4 * 8, 5 + 3 * 8,
82  5 + 5 * 8, 7 + 0 * 8, 6 + 6 * 8, 7 + 4 * 8,
83  0 + 1 * 8, 0 + 3 * 8, 1 + 3 * 8, 1 + 4 * 8,
84  1 + 5 * 8, 3 + 1 * 8, 2 + 5 * 8, 4 + 1 * 8,
85  3 + 5 * 8, 5 + 1 * 8, 4 + 5 * 8, 6 + 1 * 8,
86  5 + 6 * 8, 7 + 1 * 8, 6 + 7 * 8, 7 + 5 * 8,
87  0 + 2 * 8, 0 + 4 * 8, 0 + 5 * 8, 2 + 1 * 8,
88  1 + 6 * 8, 4 + 0 * 8, 2 + 6 * 8, 5 + 0 * 8,
89  3 + 6 * 8, 6 + 0 * 8, 4 + 6 * 8, 6 + 2 * 8,
90  5 + 7 * 8, 6 + 4 * 8, 7 + 2 * 8, 7 + 6 * 8,
91  1 + 0 * 8, 1 + 2 * 8, 0 + 6 * 8, 3 + 0 * 8,
92  1 + 7 * 8, 3 + 2 * 8, 2 + 7 * 8, 4 + 2 * 8,
93  3 + 7 * 8, 5 + 2 * 8, 4 + 7 * 8, 5 + 4 * 8,
94  6 + 3 * 8, 6 + 5 * 8, 7 + 3 * 8, 7 + 7 * 8,
95 };
96 
97 // zigzag_scan8x8_cavlc[i] = zigzag_scan8x8[(i/4) + 16*(i%4)]
98 static const uint8_t zigzag_scan8x8_cavlc[64+1] = {
99  0 + 0 * 8, 1 + 1 * 8, 1 + 2 * 8, 2 + 2 * 8,
100  4 + 1 * 8, 0 + 5 * 8, 3 + 3 * 8, 7 + 0 * 8,
101  3 + 4 * 8, 1 + 7 * 8, 5 + 3 * 8, 6 + 3 * 8,
102  2 + 7 * 8, 6 + 4 * 8, 5 + 6 * 8, 7 + 5 * 8,
103  1 + 0 * 8, 2 + 0 * 8, 0 + 3 * 8, 3 + 1 * 8,
104  3 + 2 * 8, 0 + 6 * 8, 4 + 2 * 8, 6 + 1 * 8,
105  2 + 5 * 8, 2 + 6 * 8, 6 + 2 * 8, 5 + 4 * 8,
106  3 + 7 * 8, 7 + 3 * 8, 4 + 7 * 8, 7 + 6 * 8,
107  0 + 1 * 8, 3 + 0 * 8, 0 + 4 * 8, 4 + 0 * 8,
108  2 + 3 * 8, 1 + 5 * 8, 5 + 1 * 8, 5 + 2 * 8,
109  1 + 6 * 8, 3 + 5 * 8, 7 + 1 * 8, 4 + 5 * 8,
110  4 + 6 * 8, 7 + 4 * 8, 5 + 7 * 8, 6 + 7 * 8,
111  0 + 2 * 8, 2 + 1 * 8, 1 + 3 * 8, 5 + 0 * 8,
112  1 + 4 * 8, 2 + 4 * 8, 6 + 0 * 8, 4 + 3 * 8,
113  0 + 7 * 8, 4 + 4 * 8, 7 + 2 * 8, 3 + 6 * 8,
114  5 + 5 * 8, 6 + 5 * 8, 6 + 6 * 8, 7 + 7 * 8,
115 };
116 
117 static void release_unused_pictures(H264Context *h, int remove_current)
118 {
119  int i;
120 
121  /* release non reference frames */
122  for (i = 0; i < H264_MAX_PICTURE_COUNT; i++) {
123  if (h->DPB[i].f->buf[0] && !h->DPB[i].reference &&
124  (remove_current || &h->DPB[i] != h->cur_pic_ptr)) {
125  ff_h264_unref_picture(&h->DPB[i]);
126  }
127  }
128 }
129 
130 static int alloc_scratch_buffers(H264SliceContext *sl, int linesize)
131 {
132  const H264Context *h = sl->h264;
133  int alloc_size = FFALIGN(FFABS(linesize) + 32, 32);
134 
135  av_fast_malloc(&sl->bipred_scratchpad, &sl->bipred_scratchpad_allocated, 16 * 6 * alloc_size);
136  // edge emu needs blocksize + filter length - 1
137  // (= 21x21 for H.264)
138  av_fast_malloc(&sl->edge_emu_buffer, &sl->edge_emu_buffer_allocated, alloc_size * 2 * 21);
139 
141  h->mb_width * 16 * 3 * sizeof(uint8_t) * 2);
143  h->mb_width * 16 * 3 * sizeof(uint8_t) * 2);
144 
145  if (!sl->bipred_scratchpad || !sl->edge_emu_buffer ||
146  !sl->top_borders[0] || !sl->top_borders[1]) {
149  av_freep(&sl->top_borders[0]);
150  av_freep(&sl->top_borders[1]);
151 
154  sl->top_borders_allocated[0] = 0;
155  sl->top_borders_allocated[1] = 0;
156  return AVERROR(ENOMEM);
157  }
158 
159  return 0;
160 }
161 
163 {
164  const int big_mb_num = h->mb_stride * (h->mb_height + 1) + 1;
165  const int mb_array_size = h->mb_stride * h->mb_height;
166  const int b4_stride = h->mb_width * 4 + 1;
167  const int b4_array_size = b4_stride * h->mb_height * 4;
168 
169  h->qscale_table_pool = av_refstruct_pool_alloc(big_mb_num + h->mb_stride, 0);
170  h->mb_type_pool = av_refstruct_pool_alloc((big_mb_num + h->mb_stride) *
171  sizeof(uint32_t), 0);
172  h->motion_val_pool = av_refstruct_pool_alloc(2 * (b4_array_size + 4) *
173  sizeof(int16_t), 0);
174  h->ref_index_pool = av_refstruct_pool_alloc(4 * mb_array_size, 0);
175 
176  if (!h->qscale_table_pool || !h->mb_type_pool || !h->motion_val_pool ||
177  !h->ref_index_pool) {
178  av_refstruct_pool_uninit(&h->qscale_table_pool);
179  av_refstruct_pool_uninit(&h->mb_type_pool);
180  av_refstruct_pool_uninit(&h->motion_val_pool);
181  av_refstruct_pool_uninit(&h->ref_index_pool);
182  return AVERROR(ENOMEM);
183  }
184 
185  return 0;
186 }
187 
189 {
190  int i, ret = 0;
191 
192  av_assert0(!pic->f->data[0]);
193 
194  if (h->sei.common.lcevc.info) {
195  HEVCSEILCEVC *lcevc = &h->sei.common.lcevc;
197  if (ret < 0)
198  return ret;
199  }
200 
201  pic->tf.f = pic->f;
202  ret = ff_thread_get_ext_buffer(h->avctx, &pic->tf,
203  pic->reference ? AV_GET_BUFFER_FLAG_REF : 0);
204  if (ret < 0)
205  goto fail;
206 
207  if (pic->needs_fg) {
208  pic->f_grain->format = pic->f->format;
209  pic->f_grain->width = pic->f->width;
210  pic->f_grain->height = pic->f->height;
211  ret = ff_thread_get_buffer(h->avctx, pic->f_grain, 0);
212  if (ret < 0)
213  goto fail;
214  }
215 
217  if (ret < 0)
218  goto fail;
219 
220  if (h->decode_error_flags_pool) {
221  pic->decode_error_flags = av_refstruct_pool_get(h->decode_error_flags_pool);
222  if (!pic->decode_error_flags)
223  goto fail;
225  }
226 
227  if (CONFIG_GRAY && !h->avctx->hwaccel && h->flags & AV_CODEC_FLAG_GRAY && pic->f->data[2]) {
228  int h_chroma_shift, v_chroma_shift;
230  &h_chroma_shift, &v_chroma_shift);
231 
232  for(i=0; i<AV_CEIL_RSHIFT(pic->f->height, v_chroma_shift); i++) {
233  memset(pic->f->data[1] + pic->f->linesize[1]*i,
234  0x80, AV_CEIL_RSHIFT(pic->f->width, h_chroma_shift));
235  memset(pic->f->data[2] + pic->f->linesize[2]*i,
236  0x80, AV_CEIL_RSHIFT(pic->f->width, h_chroma_shift));
237  }
238  }
239 
240  if (!h->qscale_table_pool) {
242  if (ret < 0)
243  goto fail;
244  }
245 
246  pic->qscale_table_base = av_refstruct_pool_get(h->qscale_table_pool);
247  pic->mb_type_base = av_refstruct_pool_get(h->mb_type_pool);
248  if (!pic->qscale_table_base || !pic->mb_type_base)
249  goto fail;
250 
251  pic->mb_type = pic->mb_type_base + 2 * h->mb_stride + 1;
252  pic->qscale_table = pic->qscale_table_base + 2 * h->mb_stride + 1;
253 
254  for (i = 0; i < 2; i++) {
255  pic->motion_val_base[i] = av_refstruct_pool_get(h->motion_val_pool);
256  pic->ref_index[i] = av_refstruct_pool_get(h->ref_index_pool);
257  if (!pic->motion_val_base[i] || !pic->ref_index[i])
258  goto fail;
259 
260  pic->motion_val[i] = pic->motion_val_base[i] + 4;
261  }
262 
263  pic->pps = av_refstruct_ref_c(h->ps.pps);
264 
265  pic->mb_width = h->mb_width;
266  pic->mb_height = h->mb_height;
267  pic->mb_stride = h->mb_stride;
268 
269  return 0;
270 fail:
272  return (ret < 0) ? ret : AVERROR(ENOMEM);
273 }
274 
276 {
277  int i;
278 
279  for (i = 0; i < H264_MAX_PICTURE_COUNT; i++) {
280  if (!h->DPB[i].f->buf[0])
281  return i;
282  }
283  return AVERROR_INVALIDDATA;
284 }
285 
286 
287 #define IN_RANGE(a, b, size) (((void*)(a) >= (void*)(b)) && ((void*)(a) < (void*)((b) + (size))))
288 
289 #define REBASE_PICTURE(pic, new_ctx, old_ctx) \
290  (((pic) && (pic) >= (old_ctx)->DPB && \
291  (pic) < (old_ctx)->DPB + H264_MAX_PICTURE_COUNT) ? \
292  &(new_ctx)->DPB[(pic) - (old_ctx)->DPB] : NULL)
293 
294 static void copy_picture_range(H264Picture **to, H264Picture *const *from, int count,
295  H264Context *new_base, const H264Context *old_base)
296 {
297  int i;
298 
299  for (i = 0; i < count; i++) {
300  av_assert1(!from[i] ||
301  IN_RANGE(from[i], old_base, 1) ||
302  IN_RANGE(from[i], old_base->DPB, H264_MAX_PICTURE_COUNT));
303  to[i] = REBASE_PICTURE(from[i], new_base, old_base);
304  }
305 }
306 
307 static void color_frame(AVFrame *frame, const int c[4])
308 {
310 
312 
313  for (int p = 0; p < desc->nb_components; p++) {
314  uint8_t *dst = frame->data[p];
315  int is_chroma = p == 1 || p == 2;
316  int bytes = is_chroma ? AV_CEIL_RSHIFT(frame->width, desc->log2_chroma_w) : frame->width;
317  int height = is_chroma ? AV_CEIL_RSHIFT(frame->height, desc->log2_chroma_h) : frame->height;
318  if (desc->comp[0].depth >= 9) {
319  ((uint16_t*)dst)[0] = c[p];
320  av_memcpy_backptr(dst + 2, 2, bytes - 2);
321  dst += frame->linesize[p];
322  for (int y = 1; y < height; y++) {
323  memcpy(dst, frame->data[p], 2*bytes);
324  dst += frame->linesize[p];
325  }
326  } else {
327  for (int y = 0; y < height; y++) {
328  memset(dst, c[p], bytes);
329  dst += frame->linesize[p];
330  }
331  }
332  }
333 }
334 
336 
338  const AVCodecContext *src)
339 {
340  H264Context *h = dst->priv_data, *h1 = src->priv_data;
341  int inited = h->context_initialized, err = 0;
342  int need_reinit = 0;
343  int i, ret;
344 
345  if (dst == src)
346  return 0;
347 
348  if (inited && !h1->ps.sps)
349  return AVERROR_INVALIDDATA;
350 
351  if (inited &&
352  (h->width != h1->width ||
353  h->height != h1->height ||
354  h->mb_width != h1->mb_width ||
355  h->mb_height != h1->mb_height ||
356  !h->ps.sps ||
357  h->ps.sps->bit_depth_luma != h1->ps.sps->bit_depth_luma ||
358  h->ps.sps->chroma_format_idc != h1->ps.sps->chroma_format_idc ||
359  h->ps.sps->vui.matrix_coeffs != h1->ps.sps->vui.matrix_coeffs)) {
360  need_reinit = 1;
361  }
362 
363  /* copy block_offset since frame_start may not be called */
364  memcpy(h->block_offset, h1->block_offset, sizeof(h->block_offset));
365 
366  // SPS/PPS
367  for (int i = 0; i < FF_ARRAY_ELEMS(h->ps.sps_list); i++)
368  av_refstruct_replace(&h->ps.sps_list[i], h1->ps.sps_list[i]);
369  for (int i = 0; i < FF_ARRAY_ELEMS(h->ps.pps_list); i++)
370  av_refstruct_replace(&h->ps.pps_list[i], h1->ps.pps_list[i]);
371 
372  av_refstruct_replace(&h->ps.pps, h1->ps.pps);
373  h->ps.sps = h1->ps.sps;
374 
375  if (need_reinit || !inited) {
376  h->width = h1->width;
377  h->height = h1->height;
378  h->mb_height = h1->mb_height;
379  h->mb_width = h1->mb_width;
380  h->mb_num = h1->mb_num;
381  h->mb_stride = h1->mb_stride;
382  h->b_stride = h1->b_stride;
383  h->x264_build = h1->x264_build;
384 
385  if (h->context_initialized || h1->context_initialized) {
386  if ((err = h264_slice_header_init(h)) < 0) {
387  av_log(h->avctx, AV_LOG_ERROR, "h264_slice_header_init() failed");
388  return err;
389  }
390  }
391 
392  /* copy block_offset since frame_start may not be called */
393  memcpy(h->block_offset, h1->block_offset, sizeof(h->block_offset));
394  }
395 
396  h->width_from_caller = h1->width_from_caller;
397  h->height_from_caller = h1->height_from_caller;
398  h->first_field = h1->first_field;
399  h->picture_structure = h1->picture_structure;
400  h->mb_aff_frame = h1->mb_aff_frame;
401  h->droppable = h1->droppable;
402 
403  for (i = 0; i < H264_MAX_PICTURE_COUNT; i++) {
404  ret = ff_h264_replace_picture(&h->DPB[i], &h1->DPB[i]);
405  if (ret < 0)
406  return ret;
407  }
408 
409  h->cur_pic_ptr = REBASE_PICTURE(h1->cur_pic_ptr, h, h1);
410  ret = ff_h264_replace_picture(&h->cur_pic, &h1->cur_pic);
411  if (ret < 0)
412  return ret;
413 
414  h->enable_er = h1->enable_er;
415  h->workaround_bugs = h1->workaround_bugs;
416  h->droppable = h1->droppable;
417 
418  // extradata/NAL handling
419  h->is_avc = h1->is_avc;
420  h->nal_length_size = h1->nal_length_size;
421 
422  memcpy(&h->poc, &h1->poc, sizeof(h->poc));
423 
424  memcpy(h->short_ref, h1->short_ref, sizeof(h->short_ref));
425  memcpy(h->long_ref, h1->long_ref, sizeof(h->long_ref));
426  memcpy(h->delayed_pic, h1->delayed_pic, sizeof(h->delayed_pic));
427  memcpy(h->last_pocs, h1->last_pocs, sizeof(h->last_pocs));
428 
429  h->next_output_pic = h1->next_output_pic;
430  h->next_outputed_poc = h1->next_outputed_poc;
431  h->poc_offset = h1->poc_offset;
432 
433  memcpy(h->mmco, h1->mmco, sizeof(h->mmco));
434  h->nb_mmco = h1->nb_mmco;
435  h->mmco_reset = h1->mmco_reset;
436  h->explicit_ref_marking = h1->explicit_ref_marking;
437  h->long_ref_count = h1->long_ref_count;
438  h->short_ref_count = h1->short_ref_count;
439 
440  copy_picture_range(h->short_ref, h1->short_ref, 32, h, h1);
441  copy_picture_range(h->long_ref, h1->long_ref, 32, h, h1);
442  copy_picture_range(h->delayed_pic, h1->delayed_pic,
443  FF_ARRAY_ELEMS(h->delayed_pic), h, h1);
444 
445  h->frame_recovered = h1->frame_recovered;
446 
447  ret = ff_h2645_sei_ctx_replace(&h->sei.common, &h1->sei.common);
448  if (ret < 0)
449  return ret;
450 
451  h->sei.common.unregistered.x264_build = h1->sei.common.unregistered.x264_build;
452 
453  if (!h->cur_pic_ptr)
454  return 0;
455 
456  if (!h->droppable) {
458  h->poc.prev_poc_msb = h->poc.poc_msb;
459  h->poc.prev_poc_lsb = h->poc.poc_lsb;
460  }
461  h->poc.prev_frame_num_offset = h->poc.frame_num_offset;
462  h->poc.prev_frame_num = h->poc.frame_num;
463 
464  h->recovery_frame = h1->recovery_frame;
465  h->non_gray = h1->non_gray;
466 
467  return err;
468 }
469 
471  const AVCodecContext *src)
472 {
473  H264Context *h = dst->priv_data;
474  const H264Context *h1 = src->priv_data;
475 
476  h->is_avc = h1->is_avc;
477  h->nal_length_size = h1->nal_length_size;
478 
479  return 0;
480 }
481 
483 {
484  H264Picture *pic;
485  int i, ret;
486  const int pixel_shift = h->pixel_shift;
487 
488  if (!ff_thread_can_start_frame(h->avctx)) {
489  av_log(h->avctx, AV_LOG_ERROR, "Attempt to start a frame outside SETUP state\n");
490  return AVERROR_BUG;
491  }
492 
494  h->cur_pic_ptr = NULL;
495 
497  if (i < 0) {
498  av_log(h->avctx, AV_LOG_ERROR, "no frame buffer available\n");
499  return i;
500  }
501  pic = &h->DPB[i];
502 
503  pic->reference = h->droppable ? 0 : h->picture_structure;
504  pic->field_picture = h->picture_structure != PICT_FRAME;
505  pic->frame_num = h->poc.frame_num;
506  /*
507  * Zero key_frame here; IDR markings per slice in frame or fields are ORed
508  * in later.
509  * See decode_nal_units().
510  */
511  pic->f->flags &= ~AV_FRAME_FLAG_KEY;
512  pic->mmco_reset = 0;
513  pic->recovered = 0;
514  pic->invalid_gap = 0;
515  pic->sei_recovery_frame_cnt = h->sei.recovery_point.recovery_frame_cnt;
516 
517  pic->f->pict_type = h->slice_ctx[0].slice_type;
518 
519  pic->f->crop_left = h->crop_left;
520  pic->f->crop_right = h->crop_right;
521  pic->f->crop_top = h->crop_top;
522  pic->f->crop_bottom = h->crop_bottom;
523 
524  pic->needs_fg =
525  h->sei.common.film_grain_characteristics &&
526  h->sei.common.film_grain_characteristics->present &&
527  !h->avctx->hwaccel &&
528  !(h->avctx->export_side_data & AV_CODEC_EXPORT_DATA_FILM_GRAIN);
529 
530  if ((ret = alloc_picture(h, pic)) < 0)
531  return ret;
532 
533  h->cur_pic_ptr = pic;
534  ff_h264_unref_picture(&h->cur_pic);
535  if (CONFIG_ERROR_RESILIENCE) {
536  ff_h264_set_erpic(&h->er.cur_pic, NULL);
537  }
538 
539  if ((ret = ff_h264_ref_picture(&h->cur_pic, h->cur_pic_ptr)) < 0)
540  return ret;
541 
542  for (i = 0; i < h->nb_slice_ctx; i++) {
543  h->slice_ctx[i].linesize = h->cur_pic_ptr->f->linesize[0];
544  h->slice_ctx[i].uvlinesize = h->cur_pic_ptr->f->linesize[1];
545  }
546 
547  if (CONFIG_ERROR_RESILIENCE && h->enable_er) {
548  ff_er_frame_start(&h->er);
549  ff_h264_set_erpic(&h->er.last_pic, NULL);
550  ff_h264_set_erpic(&h->er.next_pic, NULL);
551  }
552 
553  for (i = 0; i < 16; i++) {
554  h->block_offset[i] = (4 * ((scan8[i] - scan8[0]) & 7) << pixel_shift) + 4 * pic->f->linesize[0] * ((scan8[i] - scan8[0]) >> 3);
555  h->block_offset[48 + i] = (4 * ((scan8[i] - scan8[0]) & 7) << pixel_shift) + 8 * pic->f->linesize[0] * ((scan8[i] - scan8[0]) >> 3);
556  }
557  for (i = 0; i < 16; i++) {
558  h->block_offset[16 + i] =
559  h->block_offset[32 + i] = (4 * ((scan8[i] - scan8[0]) & 7) << pixel_shift) + 4 * pic->f->linesize[1] * ((scan8[i] - scan8[0]) >> 3);
560  h->block_offset[48 + 16 + i] =
561  h->block_offset[48 + 32 + i] = (4 * ((scan8[i] - scan8[0]) & 7) << pixel_shift) + 8 * pic->f->linesize[1] * ((scan8[i] - scan8[0]) >> 3);
562  }
563 
564  /* We mark the current picture as non-reference after allocating it, so
565  * that if we break out due to an error it can be released automatically
566  * in the next ff_mpv_frame_start().
567  */
568  h->cur_pic_ptr->reference = 0;
569 
570  h->cur_pic_ptr->field_poc[0] = h->cur_pic_ptr->field_poc[1] = INT_MAX;
571 
572  h->next_output_pic = NULL;
573 
574  h->postpone_filter = 0;
575 
576  h->mb_aff_frame = h->ps.sps->mb_aff && (h->picture_structure == PICT_FRAME);
577 
578  if (h->sei.common.unregistered.x264_build >= 0)
579  h->x264_build = h->sei.common.unregistered.x264_build;
580 
581  assert(h->cur_pic_ptr->long_ref == 0);
582 
583  return 0;
584 }
585 
587  const uint8_t *src_y,
588  const uint8_t *src_cb, const uint8_t *src_cr,
589  int linesize, int uvlinesize,
590  int simple)
591 {
592  uint8_t *top_border;
593  int top_idx = 1;
594  const int pixel_shift = h->pixel_shift;
595  int chroma444 = CHROMA444(h);
596  int chroma422 = CHROMA422(h);
597 
598  src_y -= linesize;
599  src_cb -= uvlinesize;
600  src_cr -= uvlinesize;
601 
602  if (!simple && FRAME_MBAFF(h)) {
603  if (sl->mb_y & 1) {
604  if (!MB_MBAFF(sl)) {
605  top_border = sl->top_borders[0][sl->mb_x];
606  AV_COPY128(top_border, src_y + 15 * linesize);
607  if (pixel_shift)
608  AV_COPY128(top_border + 16, src_y + 15 * linesize + 16);
609  if (simple || !CONFIG_GRAY || !(h->flags & AV_CODEC_FLAG_GRAY)) {
610  if (chroma444) {
611  if (pixel_shift) {
612  AV_COPY128(top_border + 32, src_cb + 15 * uvlinesize);
613  AV_COPY128(top_border + 48, src_cb + 15 * uvlinesize + 16);
614  AV_COPY128(top_border + 64, src_cr + 15 * uvlinesize);
615  AV_COPY128(top_border + 80, src_cr + 15 * uvlinesize + 16);
616  } else {
617  AV_COPY128(top_border + 16, src_cb + 15 * uvlinesize);
618  AV_COPY128(top_border + 32, src_cr + 15 * uvlinesize);
619  }
620  } else if (chroma422) {
621  if (pixel_shift) {
622  AV_COPY128(top_border + 32, src_cb + 15 * uvlinesize);
623  AV_COPY128(top_border + 48, src_cr + 15 * uvlinesize);
624  } else {
625  AV_COPY64(top_border + 16, src_cb + 15 * uvlinesize);
626  AV_COPY64(top_border + 24, src_cr + 15 * uvlinesize);
627  }
628  } else {
629  if (pixel_shift) {
630  AV_COPY128(top_border + 32, src_cb + 7 * uvlinesize);
631  AV_COPY128(top_border + 48, src_cr + 7 * uvlinesize);
632  } else {
633  AV_COPY64(top_border + 16, src_cb + 7 * uvlinesize);
634  AV_COPY64(top_border + 24, src_cr + 7 * uvlinesize);
635  }
636  }
637  }
638  }
639  } else if (MB_MBAFF(sl)) {
640  top_idx = 0;
641  } else
642  return;
643  }
644 
645  top_border = sl->top_borders[top_idx][sl->mb_x];
646  /* There are two lines saved, the line above the top macroblock
647  * of a pair, and the line above the bottom macroblock. */
648  AV_COPY128(top_border, src_y + 16 * linesize);
649  if (pixel_shift)
650  AV_COPY128(top_border + 16, src_y + 16 * linesize + 16);
651 
652  if (simple || !CONFIG_GRAY || !(h->flags & AV_CODEC_FLAG_GRAY)) {
653  if (chroma444) {
654  if (pixel_shift) {
655  AV_COPY128(top_border + 32, src_cb + 16 * linesize);
656  AV_COPY128(top_border + 48, src_cb + 16 * linesize + 16);
657  AV_COPY128(top_border + 64, src_cr + 16 * linesize);
658  AV_COPY128(top_border + 80, src_cr + 16 * linesize + 16);
659  } else {
660  AV_COPY128(top_border + 16, src_cb + 16 * linesize);
661  AV_COPY128(top_border + 32, src_cr + 16 * linesize);
662  }
663  } else if (chroma422) {
664  if (pixel_shift) {
665  AV_COPY128(top_border + 32, src_cb + 16 * uvlinesize);
666  AV_COPY128(top_border + 48, src_cr + 16 * uvlinesize);
667  } else {
668  AV_COPY64(top_border + 16, src_cb + 16 * uvlinesize);
669  AV_COPY64(top_border + 24, src_cr + 16 * uvlinesize);
670  }
671  } else {
672  if (pixel_shift) {
673  AV_COPY128(top_border + 32, src_cb + 8 * uvlinesize);
674  AV_COPY128(top_border + 48, src_cr + 8 * uvlinesize);
675  } else {
676  AV_COPY64(top_border + 16, src_cb + 8 * uvlinesize);
677  AV_COPY64(top_border + 24, src_cr + 8 * uvlinesize);
678  }
679  }
680  }
681 }
682 
683 /**
684  * Initialize implicit_weight table.
685  * @param field 0/1 initialize the weight for interlaced MBAFF
686  * -1 initializes the rest
687  */
689 {
690  int ref0, ref1, i, cur_poc, ref_start, ref_count0, ref_count1;
691 
692  for (i = 0; i < 2; i++) {
693  sl->pwt.luma_weight_flag[i] = 0;
694  sl->pwt.chroma_weight_flag[i] = 0;
695  }
696 
697  if (field < 0) {
698  if (h->picture_structure == PICT_FRAME) {
699  cur_poc = h->cur_pic_ptr->poc;
700  } else {
701  cur_poc = h->cur_pic_ptr->field_poc[h->picture_structure - 1];
702  }
703  if (sl->ref_count[0] == 1 && sl->ref_count[1] == 1 && !FRAME_MBAFF(h) &&
704  sl->ref_list[0][0].poc + (int64_t)sl->ref_list[1][0].poc == 2LL * cur_poc) {
705  sl->pwt.use_weight = 0;
706  sl->pwt.use_weight_chroma = 0;
707  return;
708  }
709  ref_start = 0;
710  ref_count0 = sl->ref_count[0];
711  ref_count1 = sl->ref_count[1];
712  } else {
713  cur_poc = h->cur_pic_ptr->field_poc[field];
714  ref_start = 16;
715  ref_count0 = 16 + 2 * sl->ref_count[0];
716  ref_count1 = 16 + 2 * sl->ref_count[1];
717  }
718 
719  sl->pwt.use_weight = 2;
720  sl->pwt.use_weight_chroma = 2;
721  sl->pwt.luma_log2_weight_denom = 5;
723 
724  for (ref0 = ref_start; ref0 < ref_count0; ref0++) {
725  int64_t poc0 = sl->ref_list[0][ref0].poc;
726  for (ref1 = ref_start; ref1 < ref_count1; ref1++) {
727  int w = 32;
728  if (!sl->ref_list[0][ref0].parent->long_ref && !sl->ref_list[1][ref1].parent->long_ref) {
729  int poc1 = sl->ref_list[1][ref1].poc;
730  int td = av_clip_int8(poc1 - poc0);
731  if (td) {
732  int tb = av_clip_int8(cur_poc - poc0);
733  int tx = (16384 + (FFABS(td) >> 1)) / td;
734  int dist_scale_factor = (tb * tx + 32) >> 8;
735  if (dist_scale_factor >= -64 && dist_scale_factor <= 128)
736  w = 64 - dist_scale_factor;
737  }
738  }
739  if (field < 0) {
740  sl->pwt.implicit_weight[ref0][ref1][0] =
741  sl->pwt.implicit_weight[ref0][ref1][1] = w;
742  } else {
743  sl->pwt.implicit_weight[ref0][ref1][field] = w;
744  }
745  }
746  }
747 }
748 
749 /**
750  * initialize scan tables
751  */
753 {
754  int i;
755  for (i = 0; i < 16; i++) {
756 #define TRANSPOSE(x) ((x) >> 2) | (((x) << 2) & 0xF)
757  h->zigzag_scan[i] = TRANSPOSE(ff_zigzag_scan[i]);
758  h->field_scan[i] = TRANSPOSE(field_scan[i]);
759 #undef TRANSPOSE
760  }
761  for (i = 0; i < 64; i++) {
762 #define TRANSPOSE(x) ((x) >> 3) | (((x) & 7) << 3)
763  h->zigzag_scan8x8[i] = TRANSPOSE(ff_zigzag_direct[i]);
764  h->zigzag_scan8x8_cavlc[i] = TRANSPOSE(zigzag_scan8x8_cavlc[i]);
765  h->field_scan8x8[i] = TRANSPOSE(field_scan8x8[i]);
766  h->field_scan8x8_cavlc[i] = TRANSPOSE(field_scan8x8_cavlc[i]);
767 #undef TRANSPOSE
768  }
769  if (h->ps.sps->transform_bypass) { // FIXME same ugly
770  memcpy(h->zigzag_scan_q0 , ff_zigzag_scan , sizeof(h->zigzag_scan_q0 ));
771  memcpy(h->zigzag_scan8x8_q0 , ff_zigzag_direct , sizeof(h->zigzag_scan8x8_q0 ));
772  memcpy(h->zigzag_scan8x8_cavlc_q0 , zigzag_scan8x8_cavlc , sizeof(h->zigzag_scan8x8_cavlc_q0));
773  memcpy(h->field_scan_q0 , field_scan , sizeof(h->field_scan_q0 ));
774  memcpy(h->field_scan8x8_q0 , field_scan8x8 , sizeof(h->field_scan8x8_q0 ));
775  memcpy(h->field_scan8x8_cavlc_q0 , field_scan8x8_cavlc , sizeof(h->field_scan8x8_cavlc_q0 ));
776  } else {
777  memcpy(h->zigzag_scan_q0 , h->zigzag_scan , sizeof(h->zigzag_scan_q0 ));
778  memcpy(h->zigzag_scan8x8_q0 , h->zigzag_scan8x8 , sizeof(h->zigzag_scan8x8_q0 ));
779  memcpy(h->zigzag_scan8x8_cavlc_q0 , h->zigzag_scan8x8_cavlc , sizeof(h->zigzag_scan8x8_cavlc_q0));
780  memcpy(h->field_scan_q0 , h->field_scan , sizeof(h->field_scan_q0 ));
781  memcpy(h->field_scan8x8_q0 , h->field_scan8x8 , sizeof(h->field_scan8x8_q0 ));
782  memcpy(h->field_scan8x8_cavlc_q0 , h->field_scan8x8_cavlc , sizeof(h->field_scan8x8_cavlc_q0 ));
783  }
784 }
785 
786 static enum AVPixelFormat get_pixel_format(H264Context *h, int force_callback)
787 {
788 #define HWACCEL_MAX (CONFIG_H264_DXVA2_HWACCEL + \
789  (CONFIG_H264_D3D11VA_HWACCEL * 2) + \
790  CONFIG_H264_D3D12VA_HWACCEL + \
791  CONFIG_H264_NVDEC_HWACCEL + \
792  CONFIG_H264_VAAPI_HWACCEL + \
793  CONFIG_H264_VIDEOTOOLBOX_HWACCEL + \
794  CONFIG_H264_VDPAU_HWACCEL + \
795  CONFIG_H264_VULKAN_HWACCEL)
796  enum AVPixelFormat pix_fmts[HWACCEL_MAX + 2], *fmt = pix_fmts;
797 
798  switch (h->ps.sps->bit_depth_luma) {
799  case 9:
800  if (CHROMA444(h)) {
801  if (h->avctx->colorspace == AVCOL_SPC_RGB) {
802  *fmt++ = AV_PIX_FMT_GBRP9;
803  } else
804  *fmt++ = AV_PIX_FMT_YUV444P9;
805  } else if (CHROMA422(h))
806  *fmt++ = AV_PIX_FMT_YUV422P9;
807  else
808  *fmt++ = AV_PIX_FMT_YUV420P9;
809  break;
810  case 10:
811 #if CONFIG_H264_VIDEOTOOLBOX_HWACCEL
812  if (h->avctx->colorspace != AVCOL_SPC_RGB)
813  *fmt++ = AV_PIX_FMT_VIDEOTOOLBOX;
814 #endif
815 #if CONFIG_H264_VULKAN_HWACCEL
816  *fmt++ = AV_PIX_FMT_VULKAN;
817 #endif
818 #if CONFIG_H264_NVDEC_HWACCEL
819  *fmt++ = AV_PIX_FMT_CUDA;
820 #endif
821  if (CHROMA444(h)) {
822  if (h->avctx->colorspace == AVCOL_SPC_RGB) {
823  *fmt++ = AV_PIX_FMT_GBRP10;
824  } else
825  *fmt++ = AV_PIX_FMT_YUV444P10;
826  } else if (CHROMA422(h))
827  *fmt++ = AV_PIX_FMT_YUV422P10;
828  else {
829 #if CONFIG_H264_VAAPI_HWACCEL
830  // Just add as candidate. Whether VAProfileH264High10 usable or
831  // not is decided by vaapi_decode_make_config() defined in FFmpeg
832  // and vaQueryCodingProfile() defined in libva.
833  *fmt++ = AV_PIX_FMT_VAAPI;
834 #endif
835  *fmt++ = AV_PIX_FMT_YUV420P10;
836  }
837  break;
838  case 12:
839 #if CONFIG_H264_VULKAN_HWACCEL
840  *fmt++ = AV_PIX_FMT_VULKAN;
841 #endif
842  if (CHROMA444(h)) {
843  if (h->avctx->colorspace == AVCOL_SPC_RGB) {
844  *fmt++ = AV_PIX_FMT_GBRP12;
845  } else
846  *fmt++ = AV_PIX_FMT_YUV444P12;
847  } else if (CHROMA422(h))
848  *fmt++ = AV_PIX_FMT_YUV422P12;
849  else
850  *fmt++ = AV_PIX_FMT_YUV420P12;
851  break;
852  case 14:
853  if (CHROMA444(h)) {
854  if (h->avctx->colorspace == AVCOL_SPC_RGB) {
855  *fmt++ = AV_PIX_FMT_GBRP14;
856  } else
857  *fmt++ = AV_PIX_FMT_YUV444P14;
858  } else if (CHROMA422(h))
859  *fmt++ = AV_PIX_FMT_YUV422P14;
860  else
861  *fmt++ = AV_PIX_FMT_YUV420P14;
862  break;
863  case 8:
864 #if CONFIG_H264_VDPAU_HWACCEL
865  *fmt++ = AV_PIX_FMT_VDPAU;
866 #endif
867 #if CONFIG_H264_VULKAN_HWACCEL
868  *fmt++ = AV_PIX_FMT_VULKAN;
869 #endif
870 #if CONFIG_H264_NVDEC_HWACCEL
871  *fmt++ = AV_PIX_FMT_CUDA;
872 #endif
873 #if CONFIG_H264_VIDEOTOOLBOX_HWACCEL
874  if (h->avctx->colorspace != AVCOL_SPC_RGB)
875  *fmt++ = AV_PIX_FMT_VIDEOTOOLBOX;
876 #endif
877  if (CHROMA444(h)) {
878  if (h->avctx->colorspace == AVCOL_SPC_RGB)
879  *fmt++ = AV_PIX_FMT_GBRP;
880  else if (h->avctx->color_range == AVCOL_RANGE_JPEG)
881  *fmt++ = AV_PIX_FMT_YUVJ444P;
882  else
883  *fmt++ = AV_PIX_FMT_YUV444P;
884  } else if (CHROMA422(h)) {
885  if (h->avctx->color_range == AVCOL_RANGE_JPEG)
886  *fmt++ = AV_PIX_FMT_YUVJ422P;
887  else
888  *fmt++ = AV_PIX_FMT_YUV422P;
889  } else {
890 #if CONFIG_H264_DXVA2_HWACCEL
891  *fmt++ = AV_PIX_FMT_DXVA2_VLD;
892 #endif
893 #if CONFIG_H264_D3D11VA_HWACCEL
894  *fmt++ = AV_PIX_FMT_D3D11VA_VLD;
895  *fmt++ = AV_PIX_FMT_D3D11;
896 #endif
897 #if CONFIG_H264_D3D12VA_HWACCEL
898  *fmt++ = AV_PIX_FMT_D3D12;
899 #endif
900 #if CONFIG_H264_VAAPI_HWACCEL
901  *fmt++ = AV_PIX_FMT_VAAPI;
902 #endif
903  if (h->avctx->color_range == AVCOL_RANGE_JPEG)
904  *fmt++ = AV_PIX_FMT_YUVJ420P;
905  else
906  *fmt++ = AV_PIX_FMT_YUV420P;
907  }
908  break;
909  default:
910  av_log(h->avctx, AV_LOG_ERROR,
911  "Unsupported bit depth %d\n", h->ps.sps->bit_depth_luma);
912  return AVERROR_INVALIDDATA;
913  }
914 
915  *fmt = AV_PIX_FMT_NONE;
916 
917  for (int i = 0; pix_fmts[i] != AV_PIX_FMT_NONE; i++)
918  if (pix_fmts[i] == h->avctx->pix_fmt && !force_callback)
919  return pix_fmts[i];
920  return ff_get_format(h->avctx, pix_fmts);
921 }
922 
923 /* export coded and cropped frame dimensions to AVCodecContext */
925 {
926  const SPS *sps = h->ps.sps;
927  int cr = sps->crop_right;
928  int cl = sps->crop_left;
929  int ct = sps->crop_top;
930  int cb = sps->crop_bottom;
931  int width = h->width - (cr + cl);
932  int height = h->height - (ct + cb);
933  av_assert0(sps->crop_right + sps->crop_left < (unsigned)h->width);
934  av_assert0(sps->crop_top + sps->crop_bottom < (unsigned)h->height);
935 
936  /* handle container cropping */
937  if (h->width_from_caller > 0 && h->height_from_caller > 0 &&
938  !sps->crop_top && !sps->crop_left &&
939  FFALIGN(h->width_from_caller, 16) == FFALIGN(width, 16) &&
940  FFALIGN(h->height_from_caller, 16) == FFALIGN(height, 16) &&
941  h->width_from_caller <= width &&
942  h->height_from_caller <= height) {
943  width = h->width_from_caller;
944  height = h->height_from_caller;
945  cl = 0;
946  ct = 0;
947  cr = h->width - width;
948  cb = h->height - height;
949  } else {
950  h->width_from_caller = 0;
951  h->height_from_caller = 0;
952  }
953 
954  h->avctx->coded_width = h->width;
955  h->avctx->coded_height = h->height;
956  h->avctx->width = width;
957  h->avctx->height = height;
958  h->crop_right = cr;
959  h->crop_left = cl;
960  h->crop_top = ct;
961  h->crop_bottom = cb;
962 }
963 
965 {
966  const SPS *sps = h->ps.sps;
967  int i, ret;
968 
969  if (!sps) {
971  goto fail;
972  }
973 
974  ff_set_sar(h->avctx, sps->vui.sar);
975  av_pix_fmt_get_chroma_sub_sample(h->avctx->pix_fmt,
976  &h->chroma_x_shift, &h->chroma_y_shift);
977 
978  if (sps->timing_info_present_flag) {
979  int64_t den = sps->time_scale;
980  if (h->x264_build < 44U)
981  den *= 2;
982  av_reduce(&h->avctx->framerate.den, &h->avctx->framerate.num,
983  sps->num_units_in_tick * 2, den, 1 << 30);
984  }
985 
987 
988  h->first_field = 0;
989  h->prev_interlaced_frame = 1;
990 
993  if (ret < 0) {
994  av_log(h->avctx, AV_LOG_ERROR, "Could not allocate memory\n");
995  goto fail;
996  }
997 
998  if (sps->bit_depth_luma < 8 || sps->bit_depth_luma > 14 ||
999  sps->bit_depth_luma == 11 || sps->bit_depth_luma == 13
1000  ) {
1001  av_log(h->avctx, AV_LOG_ERROR, "Unsupported bit depth %d\n",
1002  sps->bit_depth_luma);
1004  goto fail;
1005  }
1006 
1007  h->cur_bit_depth_luma =
1008  h->avctx->bits_per_raw_sample = sps->bit_depth_luma;
1009  h->cur_chroma_format_idc = sps->chroma_format_idc;
1010  h->pixel_shift = sps->bit_depth_luma > 8;
1011  h->chroma_format_idc = sps->chroma_format_idc;
1012  h->bit_depth_luma = sps->bit_depth_luma;
1013 
1014  ff_h264dsp_init(&h->h264dsp, sps->bit_depth_luma,
1015  sps->chroma_format_idc);
1016  ff_h264chroma_init(&h->h264chroma, sps->bit_depth_chroma);
1017  ff_h264qpel_init(&h->h264qpel, sps->bit_depth_luma);
1018  ff_h264_pred_init(&h->hpc, AV_CODEC_ID_H264, sps->bit_depth_luma,
1019  sps->chroma_format_idc);
1020  ff_videodsp_init(&h->vdsp, sps->bit_depth_luma);
1021 
1022  if (!HAVE_THREADS || !(h->avctx->active_thread_type & FF_THREAD_SLICE)) {
1023  ff_h264_slice_context_init(h, &h->slice_ctx[0]);
1024  } else {
1025  for (i = 0; i < h->nb_slice_ctx; i++) {
1026  H264SliceContext *sl = &h->slice_ctx[i];
1027 
1028  sl->h264 = h;
1029  sl->intra4x4_pred_mode = h->intra4x4_pred_mode + i * 8 * 2 * h->mb_stride;
1030  sl->mvd_table[0] = h->mvd_table[0] + i * 8 * 2 * h->mb_stride;
1031  sl->mvd_table[1] = h->mvd_table[1] + i * 8 * 2 * h->mb_stride;
1032 
1034  }
1035  }
1036 
1037  h->context_initialized = 1;
1038 
1039  return 0;
1040 fail:
1042  h->context_initialized = 0;
1043  return ret;
1044 }
1045 
1047 {
1048  switch (a) {
1052  default:
1053  return a;
1054  }
1055 }
1056 
1057 static int h264_init_ps(H264Context *h, const H264SliceContext *sl, int first_slice)
1058 {
1059  const SPS *sps;
1060  int needs_reinit = 0, must_reinit, ret;
1061 
1062  if (first_slice)
1063  av_refstruct_replace(&h->ps.pps, h->ps.pps_list[sl->pps_id]);
1064 
1065  if (h->ps.sps != h->ps.pps->sps) {
1066  h->ps.sps = h->ps.pps->sps;
1067 
1068  if (h->mb_width != h->ps.sps->mb_width ||
1069  h->mb_height != h->ps.sps->mb_height ||
1070  h->cur_bit_depth_luma != h->ps.sps->bit_depth_luma ||
1071  h->cur_chroma_format_idc != h->ps.sps->chroma_format_idc
1072  )
1073  needs_reinit = 1;
1074 
1075  if (h->bit_depth_luma != h->ps.sps->bit_depth_luma ||
1076  h->chroma_format_idc != h->ps.sps->chroma_format_idc)
1077  needs_reinit = 1;
1078  }
1079  sps = h->ps.sps;
1080 
1081  must_reinit = (h->context_initialized &&
1082  ( 16*sps->mb_width != h->avctx->coded_width
1083  || 16*sps->mb_height != h->avctx->coded_height
1084  || h->cur_bit_depth_luma != sps->bit_depth_luma
1085  || h->cur_chroma_format_idc != sps->chroma_format_idc
1086  || h->mb_width != sps->mb_width
1087  || h->mb_height != sps->mb_height
1088  ));
1089  if (h->avctx->pix_fmt == AV_PIX_FMT_NONE
1090  || (non_j_pixfmt(h->avctx->pix_fmt) != non_j_pixfmt(get_pixel_format(h, 0))))
1091  must_reinit = 1;
1092 
1093  if (first_slice && av_cmp_q(sps->vui.sar, h->avctx->sample_aspect_ratio))
1094  must_reinit = 1;
1095 
1096  if (!h->setup_finished) {
1097  h->avctx->profile = ff_h264_get_profile(sps);
1098  h->avctx->level = sps->level_idc;
1099  h->avctx->refs = sps->ref_frame_count;
1100 
1101  h->mb_width = sps->mb_width;
1102  h->mb_height = sps->mb_height;
1103  h->mb_num = h->mb_width * h->mb_height;
1104  h->mb_stride = h->mb_width + 1;
1105 
1106  h->b_stride = h->mb_width * 4;
1107 
1108  h->chroma_y_shift = sps->chroma_format_idc <= 1; // 400 uses yuv420p
1109 
1110  h->width = 16 * h->mb_width;
1111  h->height = 16 * h->mb_height;
1112 
1113  init_dimensions(h);
1114 
1115  if (sps->vui.video_signal_type_present_flag) {
1116  h->avctx->color_range = sps->vui.video_full_range_flag > 0 ? AVCOL_RANGE_JPEG
1117  : AVCOL_RANGE_MPEG;
1118  if (sps->vui.colour_description_present_flag) {
1119  if (h->avctx->colorspace != sps->vui.matrix_coeffs)
1120  needs_reinit = 1;
1121  h->avctx->color_primaries = sps->vui.colour_primaries;
1122  h->avctx->color_trc = sps->vui.transfer_characteristics;
1123  h->avctx->colorspace = sps->vui.matrix_coeffs;
1124  }
1125  }
1126 
1127  if (h->sei.common.alternative_transfer.present &&
1128  av_color_transfer_name(h->sei.common.alternative_transfer.preferred_transfer_characteristics) &&
1129  h->sei.common.alternative_transfer.preferred_transfer_characteristics != AVCOL_TRC_UNSPECIFIED) {
1130  h->avctx->color_trc = h->sei.common.alternative_transfer.preferred_transfer_characteristics;
1131  }
1132  }
1133  h->avctx->chroma_sample_location = sps->vui.chroma_location;
1134 
1135  if (!h->context_initialized || must_reinit || needs_reinit) {
1136  int flush_changes = h->context_initialized;
1137  h->context_initialized = 0;
1138  if (sl != h->slice_ctx) {
1139  av_log(h->avctx, AV_LOG_ERROR,
1140  "changing width %d -> %d / height %d -> %d on "
1141  "slice %d\n",
1142  h->width, h->avctx->coded_width,
1143  h->height, h->avctx->coded_height,
1144  h->current_slice + 1);
1145  return AVERROR_INVALIDDATA;
1146  }
1147 
1148  av_assert1(first_slice);
1149 
1150  if (flush_changes)
1152 
1153  if ((ret = get_pixel_format(h, 1)) < 0)
1154  return ret;
1155  h->avctx->pix_fmt = ret;
1156 
1157  av_log(h->avctx, AV_LOG_VERBOSE, "Reinit context to %dx%d, "
1158  "pix_fmt: %s\n", h->width, h->height, av_get_pix_fmt_name(h->avctx->pix_fmt));
1159 
1160  if ((ret = h264_slice_header_init(h)) < 0) {
1161  av_log(h->avctx, AV_LOG_ERROR,
1162  "h264_slice_header_init() failed\n");
1163  return ret;
1164  }
1165  }
1166 
1167  return 0;
1168 }
1169 
1171 {
1172  const SPS *sps = h->ps.sps;
1173  H264Picture *cur = h->cur_pic_ptr;
1174  AVFrame *out = cur->f;
1175  int interlaced_frame = 0, top_field_first = 0;
1176  int ret;
1177 
1178  out->flags &= ~AV_FRAME_FLAG_INTERLACED;
1179  out->repeat_pict = 0;
1180 
1181  /* Signal interlacing information externally. */
1182  /* Prioritize picture timing SEI information over used
1183  * decoding process if it exists. */
1184  if (h->sei.picture_timing.present) {
1185  int ret = ff_h264_sei_process_picture_timing(&h->sei.picture_timing, sps,
1186  h->avctx);
1187  if (ret < 0) {
1188  av_log(h->avctx, AV_LOG_ERROR, "Error processing a picture timing SEI\n");
1189  if (h->avctx->err_recognition & AV_EF_EXPLODE)
1190  return ret;
1191  h->sei.picture_timing.present = 0;
1192  }
1193  }
1194 
1195  if (sps->pic_struct_present_flag && h->sei.picture_timing.present) {
1196  const H264SEIPictureTiming *pt = &h->sei.picture_timing;
1197  switch (pt->pic_struct) {
1199  break;
1202  interlaced_frame = 1;
1203  break;
1207  interlaced_frame = 1;
1208  else
1209  // try to flag soft telecine progressive
1210  interlaced_frame = !!h->prev_interlaced_frame;
1211  break;
1214  /* Signal the possibility of telecined film externally
1215  * (pic_struct 5,6). From these hints, let the applications
1216  * decide if they apply deinterlacing. */
1217  out->repeat_pict = 1;
1218  break;
1220  out->repeat_pict = 2;
1221  break;
1223  out->repeat_pict = 4;
1224  break;
1225  }
1226 
1227  if ((pt->ct_type & 3) &&
1228  pt->pic_struct <= H264_SEI_PIC_STRUCT_BOTTOM_TOP)
1229  interlaced_frame = ((pt->ct_type & (1 << 1)) != 0);
1230  } else {
1231  /* Derive interlacing flag from used decoding process. */
1232  interlaced_frame = !!FIELD_OR_MBAFF_PICTURE(h);
1233  }
1234  h->prev_interlaced_frame = interlaced_frame;
1235 
1236  if (cur->field_poc[0] != cur->field_poc[1]) {
1237  /* Derive top_field_first from field pocs. */
1238  top_field_first = (cur->field_poc[0] < cur->field_poc[1]);
1239  } else {
1240  if (sps->pic_struct_present_flag && h->sei.picture_timing.present) {
1241  /* Use picture timing SEI information. Even if it is a
1242  * information of a past frame, better than nothing. */
1243  if (h->sei.picture_timing.pic_struct == H264_SEI_PIC_STRUCT_TOP_BOTTOM ||
1244  h->sei.picture_timing.pic_struct == H264_SEI_PIC_STRUCT_TOP_BOTTOM_TOP)
1245  top_field_first = 1;
1246  } else if (interlaced_frame) {
1247  /* Default to top field first when pic_struct_present_flag
1248  * is not set but interlaced frame detected */
1249  top_field_first = 1;
1250  } // else
1251  /* Most likely progressive */
1252  }
1253 
1254  out->flags |= (AV_FRAME_FLAG_INTERLACED * interlaced_frame) |
1255  (AV_FRAME_FLAG_TOP_FIELD_FIRST * top_field_first);
1256 
1257  ret = ff_h2645_sei_to_frame(out, &h->sei.common, AV_CODEC_ID_H264, h->avctx,
1258  &sps->vui, sps->bit_depth_luma, sps->bit_depth_chroma,
1259  cur->poc + (unsigned)(h->poc_offset << 5));
1260  if (ret < 0)
1261  return ret;
1262 
1263  if (h->sei.picture_timing.timecode_cnt > 0) {
1264  uint32_t *tc_sd;
1265  char tcbuf[AV_TIMECODE_STR_SIZE];
1266  AVFrameSideData *tcside;
1268  sizeof(uint32_t)*4, &tcside);
1269  if (ret < 0)
1270  return ret;
1271 
1272  if (tcside) {
1273  tc_sd = (uint32_t*)tcside->data;
1274  tc_sd[0] = h->sei.picture_timing.timecode_cnt;
1275 
1276  for (int i = 0; i < tc_sd[0]; i++) {
1277  int drop = h->sei.picture_timing.timecode[i].dropframe;
1278  int hh = h->sei.picture_timing.timecode[i].hours;
1279  int mm = h->sei.picture_timing.timecode[i].minutes;
1280  int ss = h->sei.picture_timing.timecode[i].seconds;
1281  int ff = h->sei.picture_timing.timecode[i].frame;
1282 
1283  tc_sd[i + 1] = av_timecode_get_smpte(h->avctx->framerate, drop, hh, mm, ss, ff);
1284  av_timecode_make_smpte_tc_string2(tcbuf, h->avctx->framerate, tc_sd[i + 1], 0, 0);
1285  av_dict_set(&out->metadata, "timecode", tcbuf, 0);
1286  }
1287  }
1288  h->sei.picture_timing.timecode_cnt = 0;
1289  }
1290 
1291  return 0;
1292 }
1293 
1295 {
1296  const SPS *sps = h->ps.sps;
1297  H264Picture *out = h->cur_pic_ptr;
1298  H264Picture *cur = h->cur_pic_ptr;
1299  int i, pics, out_of_order, out_idx;
1300 
1301  cur->mmco_reset = h->mmco_reset;
1302  h->mmco_reset = 0;
1303 
1304  if (sps->bitstream_restriction_flag ||
1305  h->avctx->strict_std_compliance >= FF_COMPLIANCE_STRICT) {
1306  h->avctx->has_b_frames = FFMAX(h->avctx->has_b_frames, sps->num_reorder_frames);
1307  }
1308 
1309  for (i = 0; 1; i++) {
1310  if(i == H264_MAX_DPB_FRAMES || cur->poc < h->last_pocs[i]){
1311  if(i)
1312  h->last_pocs[i-1] = cur->poc;
1313  break;
1314  } else if(i) {
1315  h->last_pocs[i-1]= h->last_pocs[i];
1316  }
1317  }
1318  out_of_order = H264_MAX_DPB_FRAMES - i;
1319  if( cur->f->pict_type == AV_PICTURE_TYPE_B
1320  || (h->last_pocs[H264_MAX_DPB_FRAMES-2] > INT_MIN && h->last_pocs[H264_MAX_DPB_FRAMES-1] - (int64_t)h->last_pocs[H264_MAX_DPB_FRAMES-2] > 2))
1321  out_of_order = FFMAX(out_of_order, 1);
1322  if (out_of_order == H264_MAX_DPB_FRAMES) {
1323  av_log(h->avctx, AV_LOG_VERBOSE, "Invalid POC %d<%d\n", cur->poc, h->last_pocs[0]);
1324  for (i = 1; i < H264_MAX_DPB_FRAMES; i++)
1325  h->last_pocs[i] = INT_MIN;
1326  h->last_pocs[0] = cur->poc;
1327  cur->mmco_reset = 1;
1328  } else if(h->avctx->has_b_frames < out_of_order && !sps->bitstream_restriction_flag){
1329  int loglevel = h->avctx->frame_num > 1 ? AV_LOG_WARNING : AV_LOG_VERBOSE;
1330  av_log(h->avctx, loglevel, "Increasing reorder buffer to %d\n", out_of_order);
1331  h->avctx->has_b_frames = out_of_order;
1332  }
1333 
1334  pics = 0;
1335  while (h->delayed_pic[pics])
1336  pics++;
1337 
1339 
1340  h->delayed_pic[pics++] = cur;
1341  if (cur->reference == 0)
1342  cur->reference = DELAYED_PIC_REF;
1343 
1344  out = h->delayed_pic[0];
1345  out_idx = 0;
1346  for (i = 1; h->delayed_pic[i] &&
1347  !(h->delayed_pic[i]->f->flags & AV_FRAME_FLAG_KEY) &&
1348  !h->delayed_pic[i]->mmco_reset;
1349  i++)
1350  if (h->delayed_pic[i]->poc < out->poc) {
1351  out = h->delayed_pic[i];
1352  out_idx = i;
1353  }
1354  if (h->avctx->has_b_frames == 0 &&
1355  ((h->delayed_pic[0]->f->flags & AV_FRAME_FLAG_KEY) || h->delayed_pic[0]->mmco_reset))
1356  h->next_outputed_poc = INT_MIN;
1357  out_of_order = out->poc < h->next_outputed_poc;
1358 
1359  if (out_of_order || pics > h->avctx->has_b_frames) {
1360  out->reference &= ~DELAYED_PIC_REF;
1361  for (i = out_idx; h->delayed_pic[i]; i++)
1362  h->delayed_pic[i] = h->delayed_pic[i + 1];
1363  }
1364  if (!out_of_order && pics > h->avctx->has_b_frames) {
1365  h->next_output_pic = out;
1366  if (out_idx == 0 && h->delayed_pic[0] && ((h->delayed_pic[0]->f->flags & AV_FRAME_FLAG_KEY) || h->delayed_pic[0]->mmco_reset)) {
1367  h->next_outputed_poc = INT_MIN;
1368  } else
1369  h->next_outputed_poc = out->poc;
1370 
1371  // We have reached an recovery point and all frames after it in
1372  // display order are "recovered".
1373  h->frame_recovered |= out->recovered;
1374 
1375  out->recovered |= h->frame_recovered & FRAME_RECOVERED_SEI;
1376 
1377  if (!out->recovered) {
1378  if (!(h->avctx->flags & AV_CODEC_FLAG_OUTPUT_CORRUPT) &&
1379  !(h->avctx->flags2 & AV_CODEC_FLAG2_SHOW_ALL)) {
1380  h->next_output_pic = NULL;
1381  } else {
1382  out->f->flags |= AV_FRAME_FLAG_CORRUPT;
1383  }
1384  }
1385  } else {
1386  av_log(h->avctx, AV_LOG_DEBUG, "no picture %s\n", out_of_order ? "ooo" : "");
1387  }
1388 
1389  return 0;
1390 }
1391 
1392 /* This function is called right after decoding the slice header for a first
1393  * slice in a field (or a frame). It decides whether we are decoding a new frame
1394  * or a second field in a pair and does the necessary setup.
1395  */
1397  const H2645NAL *nal, int first_slice)
1398 {
1399  int i;
1400  const SPS *sps;
1401 
1402  int last_pic_structure, last_pic_droppable, ret;
1403 
1404  ret = h264_init_ps(h, sl, first_slice);
1405  if (ret < 0)
1406  return ret;
1407 
1408  sps = h->ps.sps;
1409 
1410  if (sps->bitstream_restriction_flag &&
1411  h->avctx->has_b_frames < sps->num_reorder_frames) {
1412  h->avctx->has_b_frames = sps->num_reorder_frames;
1413  }
1414 
1415  last_pic_droppable = h->droppable;
1416  last_pic_structure = h->picture_structure;
1417  h->droppable = (nal->ref_idc == 0);
1418  h->picture_structure = sl->picture_structure;
1419 
1420  h->poc.frame_num = sl->frame_num;
1421  h->poc.poc_lsb = sl->poc_lsb;
1422  h->poc.delta_poc_bottom = sl->delta_poc_bottom;
1423  h->poc.delta_poc[0] = sl->delta_poc[0];
1424  h->poc.delta_poc[1] = sl->delta_poc[1];
1425 
1426  if (nal->type == H264_NAL_IDR_SLICE)
1427  h->poc_offset = sl->idr_pic_id;
1428  else if (h->picture_intra_only)
1429  h->poc_offset = 0;
1430 
1431  /* Shorten frame num gaps so we don't have to allocate reference
1432  * frames just to throw them away */
1433  if (h->poc.frame_num != h->poc.prev_frame_num) {
1434  int unwrap_prev_frame_num = h->poc.prev_frame_num;
1435  int max_frame_num = 1 << sps->log2_max_frame_num;
1436 
1437  if (unwrap_prev_frame_num > h->poc.frame_num)
1438  unwrap_prev_frame_num -= max_frame_num;
1439 
1440  if ((h->poc.frame_num - unwrap_prev_frame_num) > sps->ref_frame_count) {
1441  unwrap_prev_frame_num = (h->poc.frame_num - sps->ref_frame_count) - 1;
1442  if (unwrap_prev_frame_num < 0)
1443  unwrap_prev_frame_num += max_frame_num;
1444 
1445  h->poc.prev_frame_num = unwrap_prev_frame_num;
1446  }
1447  }
1448 
1449  /* See if we have a decoded first field looking for a pair...
1450  * Here, we're using that to see if we should mark previously
1451  * decode frames as "finished".
1452  * We have to do that before the "dummy" in-between frame allocation,
1453  * since that can modify h->cur_pic_ptr. */
1454  if (h->first_field) {
1455  int last_field = last_pic_structure == PICT_BOTTOM_FIELD;
1456  av_assert0(h->cur_pic_ptr);
1457  av_assert0(h->cur_pic_ptr->f->buf[0]);
1458  assert(h->cur_pic_ptr->reference != DELAYED_PIC_REF);
1459 
1460  /* Mark old field/frame as completed */
1461  if (h->cur_pic_ptr->tf.owner[last_field] == h->avctx) {
1462  ff_thread_report_progress(&h->cur_pic_ptr->tf, INT_MAX, last_field);
1463  }
1464 
1465  /* figure out if we have a complementary field pair */
1466  if (!FIELD_PICTURE(h) || h->picture_structure == last_pic_structure) {
1467  /* Previous field is unmatched. Don't display it, but let it
1468  * remain for reference if marked as such. */
1469  if (last_pic_structure != PICT_FRAME) {
1470  ff_thread_report_progress(&h->cur_pic_ptr->tf, INT_MAX,
1471  last_pic_structure == PICT_TOP_FIELD);
1472  }
1473  } else {
1474  if (h->cur_pic_ptr->frame_num != h->poc.frame_num) {
1475  /* This and previous field were reference, but had
1476  * different frame_nums. Consider this field first in
1477  * pair. Throw away previous field except for reference
1478  * purposes. */
1479  if (last_pic_structure != PICT_FRAME) {
1480  ff_thread_report_progress(&h->cur_pic_ptr->tf, INT_MAX,
1481  last_pic_structure == PICT_TOP_FIELD);
1482  }
1483  } else {
1484  /* Second field in complementary pair */
1485  if (!((last_pic_structure == PICT_TOP_FIELD &&
1486  h->picture_structure == PICT_BOTTOM_FIELD) ||
1487  (last_pic_structure == PICT_BOTTOM_FIELD &&
1488  h->picture_structure == PICT_TOP_FIELD))) {
1489  av_log(h->avctx, AV_LOG_ERROR,
1490  "Invalid field mode combination %d/%d\n",
1491  last_pic_structure, h->picture_structure);
1492  h->picture_structure = last_pic_structure;
1493  h->droppable = last_pic_droppable;
1494  return AVERROR_INVALIDDATA;
1495  } else if (last_pic_droppable != h->droppable) {
1496  avpriv_request_sample(h->avctx,
1497  "Found reference and non-reference fields in the same frame, which");
1498  h->picture_structure = last_pic_structure;
1499  h->droppable = last_pic_droppable;
1500  return AVERROR_PATCHWELCOME;
1501  }
1502  }
1503  }
1504  }
1505 
1506  while (h->poc.frame_num != h->poc.prev_frame_num && !h->first_field &&
1507  h->poc.frame_num != (h->poc.prev_frame_num + 1) % (1 << sps->log2_max_frame_num)) {
1508  const H264Picture *prev = h->short_ref_count ? h->short_ref[0] : NULL;
1509  av_log(h->avctx, AV_LOG_DEBUG, "Frame num gap %d %d\n",
1510  h->poc.frame_num, h->poc.prev_frame_num);
1511  if (!sps->gaps_in_frame_num_allowed_flag)
1512  for(i=0; i<FF_ARRAY_ELEMS(h->last_pocs); i++)
1513  h->last_pocs[i] = INT_MIN;
1514  ret = h264_frame_start(h);
1515  if (ret < 0) {
1516  h->first_field = 0;
1517  return ret;
1518  }
1519 
1520  h->poc.prev_frame_num++;
1521  h->poc.prev_frame_num %= 1 << sps->log2_max_frame_num;
1522  h->cur_pic_ptr->frame_num = h->poc.prev_frame_num;
1523  h->cur_pic_ptr->invalid_gap = !sps->gaps_in_frame_num_allowed_flag;
1524  ff_thread_report_progress(&h->cur_pic_ptr->tf, INT_MAX, 0);
1525  ff_thread_report_progress(&h->cur_pic_ptr->tf, INT_MAX, 1);
1526 
1527  h->explicit_ref_marking = 0;
1529  if (ret < 0 && (h->avctx->err_recognition & AV_EF_EXPLODE))
1530  return ret;
1531  /* Error concealment: If a ref is missing, copy the previous ref
1532  * in its place.
1533  * FIXME: Avoiding a memcpy would be nice, but ref handling makes
1534  * many assumptions about there being no actual duplicates.
1535  * FIXME: This does not copy padding for out-of-frame motion
1536  * vectors. Given we are concealing a lost frame, this probably
1537  * is not noticeable by comparison, but it should be fixed. */
1538  if (h->short_ref_count) {
1539  int c[4] = {
1540  1<<(h->ps.sps->bit_depth_luma-1),
1541  1<<(h->ps.sps->bit_depth_chroma-1),
1542  1<<(h->ps.sps->bit_depth_chroma-1),
1543  -1
1544  };
1545 
1546  if (prev &&
1547  h->short_ref[0]->f->width == prev->f->width &&
1548  h->short_ref[0]->f->height == prev->f->height &&
1549  h->short_ref[0]->f->format == prev->f->format) {
1550  ff_thread_await_progress(&prev->tf, INT_MAX, 0);
1551  if (prev->field_picture)
1552  ff_thread_await_progress(&prev->tf, INT_MAX, 1);
1553  ff_thread_release_ext_buffer(&h->short_ref[0]->tf);
1554  h->short_ref[0]->tf.f = h->short_ref[0]->f;
1555  ret = ff_thread_ref_frame(&h->short_ref[0]->tf, &prev->tf);
1556  if (ret < 0)
1557  return ret;
1558  h->short_ref[0]->poc = prev->poc + 2U;
1559  h->short_ref[0]->gray = prev->gray;
1560  ff_thread_report_progress(&h->short_ref[0]->tf, INT_MAX, 0);
1561  if (h->short_ref[0]->field_picture)
1562  ff_thread_report_progress(&h->short_ref[0]->tf, INT_MAX, 1);
1563  } else if (!h->frame_recovered) {
1564  if (!h->avctx->hwaccel)
1565  color_frame(h->short_ref[0]->f, c);
1566  h->short_ref[0]->gray = 1;
1567  }
1568  h->short_ref[0]->frame_num = h->poc.prev_frame_num;
1569  }
1570  }
1571 
1572  /* See if we have a decoded first field looking for a pair...
1573  * We're using that to see whether to continue decoding in that
1574  * frame, or to allocate a new one. */
1575  if (h->first_field) {
1576  av_assert0(h->cur_pic_ptr);
1577  av_assert0(h->cur_pic_ptr->f->buf[0]);
1578  assert(h->cur_pic_ptr->reference != DELAYED_PIC_REF);
1579 
1580  /* figure out if we have a complementary field pair */
1581  if (!FIELD_PICTURE(h) || h->picture_structure == last_pic_structure) {
1582  /* Previous field is unmatched. Don't display it, but let it
1583  * remain for reference if marked as such. */
1584  h->missing_fields ++;
1585  h->cur_pic_ptr = NULL;
1586  h->first_field = FIELD_PICTURE(h);
1587  } else {
1588  h->missing_fields = 0;
1589  if (h->cur_pic_ptr->frame_num != h->poc.frame_num) {
1590  ff_thread_report_progress(&h->cur_pic_ptr->tf, INT_MAX,
1591  h->picture_structure==PICT_BOTTOM_FIELD);
1592  /* This and the previous field had different frame_nums.
1593  * Consider this field first in pair. Throw away previous
1594  * one except for reference purposes. */
1595  h->first_field = 1;
1596  h->cur_pic_ptr = NULL;
1597  } else if (h->cur_pic_ptr->reference & DELAYED_PIC_REF) {
1598  /* This frame was already output, we cannot draw into it
1599  * anymore.
1600  */
1601  h->first_field = 1;
1602  h->cur_pic_ptr = NULL;
1603  } else {
1604  /* Second field in complementary pair */
1605  h->first_field = 0;
1606  }
1607  }
1608  } else {
1609  /* Frame or first field in a potentially complementary pair */
1610  h->first_field = FIELD_PICTURE(h);
1611  }
1612 
1613  if (!FIELD_PICTURE(h) || h->first_field) {
1614  if (h264_frame_start(h) < 0) {
1615  h->first_field = 0;
1616  return AVERROR_INVALIDDATA;
1617  }
1618  } else {
1619  int field = h->picture_structure == PICT_BOTTOM_FIELD;
1621  h->cur_pic_ptr->tf.owner[field] = h->avctx;
1622  }
1623  /* Some macroblocks can be accessed before they're available in case
1624  * of lost slices, MBAFF or threading. */
1625  if (FIELD_PICTURE(h)) {
1626  for(i = (h->picture_structure == PICT_BOTTOM_FIELD); i<h->mb_height; i++)
1627  memset(h->slice_table + i*h->mb_stride, -1, (h->mb_stride - (i+1==h->mb_height)) * sizeof(*h->slice_table));
1628  } else {
1629  memset(h->slice_table, -1,
1630  (h->mb_height * h->mb_stride - 1) * sizeof(*h->slice_table));
1631  }
1632 
1633  ret = ff_h264_init_poc(h->cur_pic_ptr->field_poc, &h->cur_pic_ptr->poc,
1634  h->ps.sps, &h->poc, h->picture_structure, nal->ref_idc);
1635  if (ret < 0)
1636  return ret;
1637 
1638  memcpy(h->mmco, sl->mmco, sl->nb_mmco * sizeof(*h->mmco));
1639  h->nb_mmco = sl->nb_mmco;
1640  h->explicit_ref_marking = sl->explicit_ref_marking;
1641 
1642  h->picture_idr = nal->type == H264_NAL_IDR_SLICE;
1643 
1644  if (h->sei.recovery_point.recovery_frame_cnt >= 0) {
1645  const int sei_recovery_frame_cnt = h->sei.recovery_point.recovery_frame_cnt;
1646 
1647  if (h->poc.frame_num != sei_recovery_frame_cnt || sl->slice_type_nos != AV_PICTURE_TYPE_I)
1648  h->valid_recovery_point = 1;
1649 
1650  if ( h->recovery_frame < 0
1651  || av_zero_extend(h->recovery_frame - h->poc.frame_num, h->ps.sps->log2_max_frame_num) > sei_recovery_frame_cnt) {
1652  h->recovery_frame = av_zero_extend(h->poc.frame_num + sei_recovery_frame_cnt, h->ps.sps->log2_max_frame_num);
1653 
1654  if (!h->valid_recovery_point)
1655  h->recovery_frame = h->poc.frame_num;
1656  }
1657  }
1658 
1659  h->cur_pic_ptr->f->flags |= AV_FRAME_FLAG_KEY * !!(nal->type == H264_NAL_IDR_SLICE);
1660 
1661  if (nal->type == H264_NAL_IDR_SLICE) {
1662  h->cur_pic_ptr->recovered |= FRAME_RECOVERED_IDR;
1663  // If we have an IDR, all frames after it in decoded order are
1664  // "recovered".
1665  h->frame_recovered |= FRAME_RECOVERED_IDR;
1666  }
1667 
1668  if (h->recovery_frame == h->poc.frame_num && nal->ref_idc) {
1669  h->recovery_frame = -1;
1670  h->cur_pic_ptr->recovered |= FRAME_RECOVERED_SEI;
1671  }
1672 
1673 #if 1
1674  h->cur_pic_ptr->recovered |= h->frame_recovered;
1675 #else
1676  h->cur_pic_ptr->recovered |= !!(h->frame_recovered & FRAME_RECOVERED_IDR);
1677 #endif
1678 
1679  /* Set the frame properties/side data. Only done for the second field in
1680  * field coded frames, since some SEI information is present for each field
1681  * and is merged by the SEI parsing code. */
1682  if (!FIELD_PICTURE(h) || !h->first_field || h->missing_fields > 1) {
1684  if (ret < 0)
1685  return ret;
1686 
1688  if (ret < 0)
1689  return ret;
1690  }
1691 
1692  return 0;
1693 }
1694 
1696  const H2645NAL *nal)
1697 {
1698  const SPS *sps;
1699  const PPS *pps;
1700  int ret;
1701  unsigned int slice_type, tmp, i;
1702  int field_pic_flag, bottom_field_flag;
1703  int first_slice = sl == h->slice_ctx && !h->current_slice;
1704  int picture_structure;
1705 
1706  if (first_slice)
1707  av_assert0(!h->setup_finished);
1708 
1709  sl->first_mb_addr = get_ue_golomb_long(&sl->gb);
1710 
1711  slice_type = get_ue_golomb_31(&sl->gb);
1712  if (slice_type > 9) {
1713  av_log(h->avctx, AV_LOG_ERROR,
1714  "slice type %d too large at %d\n",
1715  slice_type, sl->first_mb_addr);
1716  return AVERROR_INVALIDDATA;
1717  }
1718  if (slice_type > 4) {
1719  slice_type -= 5;
1720  sl->slice_type_fixed = 1;
1721  } else
1722  sl->slice_type_fixed = 0;
1723 
1724  slice_type = ff_h264_golomb_to_pict_type[slice_type];
1725  sl->slice_type = slice_type;
1726  sl->slice_type_nos = slice_type & 3;
1727 
1728  if (nal->type == H264_NAL_IDR_SLICE &&
1730  av_log(h->avctx, AV_LOG_ERROR, "A non-intra slice in an IDR NAL unit.\n");
1731  return AVERROR_INVALIDDATA;
1732  }
1733 
1734  sl->pps_id = get_ue_golomb(&sl->gb);
1735  if (sl->pps_id >= MAX_PPS_COUNT) {
1736  av_log(h->avctx, AV_LOG_ERROR, "pps_id %u out of range\n", sl->pps_id);
1737  return AVERROR_INVALIDDATA;
1738  }
1739  if (!h->ps.pps_list[sl->pps_id]) {
1740  av_log(h->avctx, AV_LOG_ERROR,
1741  "non-existing PPS %u referenced\n",
1742  sl->pps_id);
1743  return AVERROR_INVALIDDATA;
1744  }
1745  pps = h->ps.pps_list[sl->pps_id];
1746  sps = pps->sps;
1747 
1748  sl->frame_num = get_bits(&sl->gb, sps->log2_max_frame_num);
1749  if (!first_slice) {
1750  if (h->poc.frame_num != sl->frame_num) {
1751  av_log(h->avctx, AV_LOG_ERROR, "Frame num change from %d to %d\n",
1752  h->poc.frame_num, sl->frame_num);
1753  return AVERROR_INVALIDDATA;
1754  }
1755  }
1756 
1757  sl->mb_mbaff = 0;
1758 
1759  if (sps->frame_mbs_only_flag) {
1760  picture_structure = PICT_FRAME;
1761  } else {
1762  if (!sps->direct_8x8_inference_flag && slice_type == AV_PICTURE_TYPE_B) {
1763  av_log(h->avctx, AV_LOG_ERROR, "This stream was generated by a broken encoder, invalid 8x8 inference\n");
1764  return -1;
1765  }
1766  field_pic_flag = get_bits1(&sl->gb);
1767  if (field_pic_flag) {
1768  bottom_field_flag = get_bits1(&sl->gb);
1769  picture_structure = PICT_TOP_FIELD + bottom_field_flag;
1770  } else {
1771  picture_structure = PICT_FRAME;
1772  }
1773  }
1774  sl->picture_structure = picture_structure;
1775  sl->mb_field_decoding_flag = picture_structure != PICT_FRAME;
1776 
1777  if (picture_structure == PICT_FRAME) {
1778  sl->curr_pic_num = sl->frame_num;
1779  sl->max_pic_num = 1 << sps->log2_max_frame_num;
1780  } else {
1781  sl->curr_pic_num = 2 * sl->frame_num + 1;
1782  sl->max_pic_num = 1 << (sps->log2_max_frame_num + 1);
1783  }
1784 
1785  if (nal->type == H264_NAL_IDR_SLICE) {
1786  unsigned idr_pic_id = get_ue_golomb_long(&sl->gb);
1787  if (idr_pic_id < 65536) {
1788  sl->idr_pic_id = idr_pic_id;
1789  } else
1790  av_log(h->avctx, AV_LOG_WARNING, "idr_pic_id is invalid\n");
1791  }
1792 
1793  sl->poc_lsb = 0;
1794  sl->delta_poc_bottom = 0;
1795  if (sps->poc_type == 0) {
1796  sl->poc_lsb = get_bits(&sl->gb, sps->log2_max_poc_lsb);
1797 
1798  if (pps->pic_order_present == 1 && picture_structure == PICT_FRAME)
1799  sl->delta_poc_bottom = get_se_golomb(&sl->gb);
1800  }
1801 
1802  sl->delta_poc[0] = sl->delta_poc[1] = 0;
1803  if (sps->poc_type == 1 && !sps->delta_pic_order_always_zero_flag) {
1804  sl->delta_poc[0] = get_se_golomb(&sl->gb);
1805 
1806  if (pps->pic_order_present == 1 && picture_structure == PICT_FRAME)
1807  sl->delta_poc[1] = get_se_golomb(&sl->gb);
1808  }
1809 
1810  sl->redundant_pic_count = 0;
1811  if (pps->redundant_pic_cnt_present)
1812  sl->redundant_pic_count = get_ue_golomb(&sl->gb);
1813 
1814  if (sl->slice_type_nos == AV_PICTURE_TYPE_B)
1815  sl->direct_spatial_mv_pred = get_bits1(&sl->gb);
1816 
1818  &sl->gb, pps, sl->slice_type_nos,
1819  picture_structure, h->avctx);
1820  if (ret < 0)
1821  return ret;
1822 
1823  if (sl->slice_type_nos != AV_PICTURE_TYPE_I) {
1825  if (ret < 0) {
1826  sl->ref_count[1] = sl->ref_count[0] = 0;
1827  return ret;
1828  }
1829  }
1830 
1831  sl->pwt.use_weight = 0;
1832  for (i = 0; i < 2; i++) {
1833  sl->pwt.luma_weight_flag[i] = 0;
1834  sl->pwt.chroma_weight_flag[i] = 0;
1835  }
1836  if ((pps->weighted_pred && sl->slice_type_nos == AV_PICTURE_TYPE_P) ||
1837  (pps->weighted_bipred_idc == 1 &&
1840  sl->slice_type_nos, &sl->pwt,
1841  picture_structure, h->avctx);
1842  if (ret < 0)
1843  return ret;
1844  }
1845 
1846  sl->explicit_ref_marking = 0;
1847  if (nal->ref_idc) {
1848  ret = ff_h264_decode_ref_pic_marking(sl, &sl->gb, nal, h->avctx);
1849  if (ret < 0 && (h->avctx->err_recognition & AV_EF_EXPLODE))
1850  return AVERROR_INVALIDDATA;
1851  }
1852 
1853  if (sl->slice_type_nos != AV_PICTURE_TYPE_I && pps->cabac) {
1854  tmp = get_ue_golomb_31(&sl->gb);
1855  if (tmp > 2) {
1856  av_log(h->avctx, AV_LOG_ERROR, "cabac_init_idc %u overflow\n", tmp);
1857  return AVERROR_INVALIDDATA;
1858  }
1859  sl->cabac_init_idc = tmp;
1860  }
1861 
1862  sl->last_qscale_diff = 0;
1863  tmp = pps->init_qp + (unsigned)get_se_golomb(&sl->gb);
1864  if (tmp > 51 + 6 * (sps->bit_depth_luma - 8)) {
1865  av_log(h->avctx, AV_LOG_ERROR, "QP %u out of range\n", tmp);
1866  return AVERROR_INVALIDDATA;
1867  }
1868  sl->qscale = tmp;
1869  sl->chroma_qp[0] = get_chroma_qp(pps, 0, sl->qscale);
1870  sl->chroma_qp[1] = get_chroma_qp(pps, 1, sl->qscale);
1871  // FIXME qscale / qp ... stuff
1872  if (sl->slice_type == AV_PICTURE_TYPE_SP)
1873  get_bits1(&sl->gb); /* sp_for_switch_flag */
1874  if (sl->slice_type == AV_PICTURE_TYPE_SP ||
1876  get_se_golomb(&sl->gb); /* slice_qs_delta */
1877 
1878  sl->deblocking_filter = 1;
1879  sl->slice_alpha_c0_offset = 0;
1880  sl->slice_beta_offset = 0;
1881  if (pps->deblocking_filter_parameters_present) {
1882  tmp = get_ue_golomb_31(&sl->gb);
1883  if (tmp > 2) {
1884  av_log(h->avctx, AV_LOG_ERROR,
1885  "deblocking_filter_idc %u out of range\n", tmp);
1886  return AVERROR_INVALIDDATA;
1887  }
1888  sl->deblocking_filter = tmp;
1889  if (sl->deblocking_filter < 2)
1890  sl->deblocking_filter ^= 1; // 1<->0
1891 
1892  if (sl->deblocking_filter) {
1893  int slice_alpha_c0_offset_div2 = get_se_golomb(&sl->gb);
1894  int slice_beta_offset_div2 = get_se_golomb(&sl->gb);
1895  if (slice_alpha_c0_offset_div2 > 6 ||
1896  slice_alpha_c0_offset_div2 < -6 ||
1897  slice_beta_offset_div2 > 6 ||
1898  slice_beta_offset_div2 < -6) {
1899  av_log(h->avctx, AV_LOG_ERROR,
1900  "deblocking filter parameters %d %d out of range\n",
1901  slice_alpha_c0_offset_div2, slice_beta_offset_div2);
1902  return AVERROR_INVALIDDATA;
1903  }
1904  sl->slice_alpha_c0_offset = slice_alpha_c0_offset_div2 * 2;
1905  sl->slice_beta_offset = slice_beta_offset_div2 * 2;
1906  }
1907  }
1908 
1909  return 0;
1910 }
1911 
1912 /* do all the per-slice initialization needed before we can start decoding the
1913  * actual MBs */
1915  const H2645NAL *nal)
1916 {
1917  int i, j, ret = 0;
1918 
1919  if (h->picture_idr && nal->type != H264_NAL_IDR_SLICE) {
1920  av_log(h->avctx, AV_LOG_ERROR, "Invalid mix of IDR and non-IDR slices\n");
1921  return AVERROR_INVALIDDATA;
1922  }
1923 
1924  av_assert1(h->mb_num == h->mb_width * h->mb_height);
1925  if (sl->first_mb_addr << FIELD_OR_MBAFF_PICTURE(h) >= h->mb_num ||
1926  sl->first_mb_addr >= h->mb_num) {
1927  av_log(h->avctx, AV_LOG_ERROR, "first_mb_in_slice overflow\n");
1928  return AVERROR_INVALIDDATA;
1929  }
1930  sl->resync_mb_x = sl->mb_x = sl->first_mb_addr % h->mb_width;
1931  sl->resync_mb_y = sl->mb_y = (sl->first_mb_addr / h->mb_width) <<
1933  if (h->picture_structure == PICT_BOTTOM_FIELD)
1934  sl->resync_mb_y = sl->mb_y = sl->mb_y + 1;
1935  av_assert1(sl->mb_y < h->mb_height);
1936 
1937  ret = ff_h264_build_ref_list(h, sl);
1938  if (ret < 0)
1939  return ret;
1940 
1941  if (h->ps.pps->weighted_bipred_idc == 2 &&
1943  implicit_weight_table(h, sl, -1);
1944  if (FRAME_MBAFF(h)) {
1945  implicit_weight_table(h, sl, 0);
1946  implicit_weight_table(h, sl, 1);
1947  }
1948  }
1949 
1952  if (!h->setup_finished)
1954 
1955  if (h->avctx->skip_loop_filter >= AVDISCARD_ALL ||
1956  (h->avctx->skip_loop_filter >= AVDISCARD_NONKEY &&
1957  h->nal_unit_type != H264_NAL_IDR_SLICE) ||
1958  (h->avctx->skip_loop_filter >= AVDISCARD_NONINTRA &&
1960  (h->avctx->skip_loop_filter >= AVDISCARD_BIDIR &&
1962  (h->avctx->skip_loop_filter >= AVDISCARD_NONREF &&
1963  nal->ref_idc == 0))
1964  sl->deblocking_filter = 0;
1965 
1966  if (sl->deblocking_filter == 1 && h->nb_slice_ctx > 1) {
1967  if (h->avctx->flags2 & AV_CODEC_FLAG2_FAST) {
1968  /* Cheat slightly for speed:
1969  * Do not bother to deblock across slices. */
1970  sl->deblocking_filter = 2;
1971  } else {
1972  h->postpone_filter = 1;
1973  }
1974  }
1975  sl->qp_thresh = 15 -
1977  FFMAX3(0,
1978  h->ps.pps->chroma_qp_index_offset[0],
1979  h->ps.pps->chroma_qp_index_offset[1]) +
1980  6 * (h->ps.sps->bit_depth_luma - 8);
1981 
1982  sl->slice_num = ++h->current_slice;
1983 
1984  if (sl->slice_num)
1985  h->slice_row[(sl->slice_num-1)&(MAX_SLICES-1)]= sl->resync_mb_y;
1986  if ( h->slice_row[sl->slice_num&(MAX_SLICES-1)] + 3 >= sl->resync_mb_y
1987  && h->slice_row[sl->slice_num&(MAX_SLICES-1)] <= sl->resync_mb_y
1988  && sl->slice_num >= MAX_SLICES) {
1989  //in case of ASO this check needs to be updated depending on how we decide to assign slice numbers in this case
1990  av_log(h->avctx, AV_LOG_WARNING, "Possibly too many slices (%d >= %d), increase MAX_SLICES and recompile if there are artifacts\n", sl->slice_num, MAX_SLICES);
1991  }
1992 
1993  for (j = 0; j < 2; j++) {
1994  int id_list[16];
1995  int *ref2frm = h->ref2frm[sl->slice_num & (MAX_SLICES - 1)][j];
1996  for (i = 0; i < 16; i++) {
1997  id_list[i] = 60;
1998  if (j < sl->list_count && i < sl->ref_count[j] &&
1999  sl->ref_list[j][i].parent->f->buf[0]) {
2000  int k;
2001  const AVBuffer *buf = sl->ref_list[j][i].parent->f->buf[0]->buffer;
2002  for (k = 0; k < h->short_ref_count; k++)
2003  if (h->short_ref[k]->f->buf[0]->buffer == buf) {
2004  id_list[i] = k;
2005  break;
2006  }
2007  for (k = 0; k < h->long_ref_count; k++)
2008  if (h->long_ref[k] && h->long_ref[k]->f->buf[0]->buffer == buf) {
2009  id_list[i] = h->short_ref_count + k;
2010  break;
2011  }
2012  }
2013  }
2014 
2015  ref2frm[0] =
2016  ref2frm[1] = -1;
2017  for (i = 0; i < 16; i++)
2018  ref2frm[i + 2] = 4 * id_list[i] + (sl->ref_list[j][i].reference & 3);
2019  ref2frm[18 + 0] =
2020  ref2frm[18 + 1] = -1;
2021  for (i = 16; i < 48; i++)
2022  ref2frm[i + 4] = 4 * id_list[(i - 16) >> 1] +
2023  (sl->ref_list[j][i].reference & 3);
2024  }
2025 
2026  if (sl->slice_type_nos == AV_PICTURE_TYPE_I) {
2027  h->cur_pic_ptr->gray = 0;
2028  h->non_gray = 1;
2029  } else {
2030  int gray = 0;
2031  for (j = 0; j < sl->list_count; j++) {
2032  for (i = 0; i < sl->ref_count[j]; i++) {
2033  gray |= sl->ref_list[j][i].parent->gray;
2034  }
2035  }
2036  h->cur_pic_ptr->gray = gray;
2037  }
2038 
2039  if (h->avctx->debug & FF_DEBUG_PICT_INFO) {
2040  av_log(h->avctx, AV_LOG_DEBUG,
2041  "slice:%d %c mb:%d %c%s%s frame:%d poc:%d/%d ref:%d/%d qp:%d loop:%d:%d:%d weight:%d%s %s\n",
2042  sl->slice_num,
2043  (h->picture_structure == PICT_FRAME ? 'F' : h->picture_structure == PICT_TOP_FIELD ? 'T' : 'B'),
2044  sl->mb_y * h->mb_width + sl->mb_x,
2046  sl->slice_type_fixed ? " fix" : "",
2047  nal->type == H264_NAL_IDR_SLICE ? " IDR" : "",
2048  h->poc.frame_num,
2049  h->cur_pic_ptr->field_poc[0],
2050  h->cur_pic_ptr->field_poc[1],
2051  sl->ref_count[0], sl->ref_count[1],
2052  sl->qscale,
2053  sl->deblocking_filter,
2055  sl->pwt.use_weight,
2056  sl->pwt.use_weight == 1 && sl->pwt.use_weight_chroma ? "c" : "",
2057  sl->slice_type == AV_PICTURE_TYPE_B ? (sl->direct_spatial_mv_pred ? "SPAT" : "TEMP") : "");
2058  }
2059 
2060  return 0;
2061 }
2062 
2064 {
2065  H264SliceContext *sl = h->slice_ctx + h->nb_slice_ctx_queued;
2066  int first_slice = sl == h->slice_ctx && !h->current_slice;
2067  int ret;
2068 
2069  sl->gb = nal->gb;
2070 
2071  ret = h264_slice_header_parse(h, sl, nal);
2072  if (ret < 0)
2073  return ret;
2074 
2075  // discard redundant pictures
2076  if (sl->redundant_pic_count > 0) {
2077  sl->ref_count[0] = sl->ref_count[1] = 0;
2078  return 0;
2079  }
2080 
2081  if (sl->first_mb_addr == 0 || !h->current_slice) {
2082  if (h->setup_finished) {
2083  av_log(h->avctx, AV_LOG_ERROR, "Too many fields\n");
2084  return AVERROR_INVALIDDATA;
2085  }
2086  }
2087 
2088  if (sl->first_mb_addr == 0) { // FIXME better field boundary detection
2089  if (h->current_slice) {
2090  // this slice starts a new field
2091  // first decode any pending queued slices
2092  if (h->nb_slice_ctx_queued) {
2093  H264SliceContext tmp_ctx;
2094 
2096  if (ret < 0 && (h->avctx->err_recognition & AV_EF_EXPLODE))
2097  return ret;
2098 
2099  memcpy(&tmp_ctx, h->slice_ctx, sizeof(tmp_ctx));
2100  memcpy(h->slice_ctx, sl, sizeof(tmp_ctx));
2101  memcpy(sl, &tmp_ctx, sizeof(tmp_ctx));
2102  sl = h->slice_ctx;
2103  }
2104 
2105  if (h->cur_pic_ptr && FIELD_PICTURE(h) && h->first_field) {
2106  ret = ff_h264_field_end(h, h->slice_ctx, 1);
2107  if (ret < 0)
2108  return ret;
2109  } else if (h->cur_pic_ptr && !FIELD_PICTURE(h) && !h->first_field && h->nal_unit_type == H264_NAL_IDR_SLICE) {
2110  av_log(h->avctx, AV_LOG_WARNING, "Broken frame packetizing\n");
2111  ret = ff_h264_field_end(h, h->slice_ctx, 1);
2112  ff_thread_report_progress(&h->cur_pic_ptr->tf, INT_MAX, 0);
2113  ff_thread_report_progress(&h->cur_pic_ptr->tf, INT_MAX, 1);
2114  h->cur_pic_ptr = NULL;
2115  if (ret < 0)
2116  return ret;
2117  } else
2118  return AVERROR_INVALIDDATA;
2119  }
2120 
2121  if (!h->first_field) {
2122  if (h->cur_pic_ptr && !h->droppable) {
2123  ff_thread_report_progress(&h->cur_pic_ptr->tf, INT_MAX,
2124  h->picture_structure == PICT_BOTTOM_FIELD);
2125  }
2126  h->cur_pic_ptr = NULL;
2127  }
2128  }
2129 
2130  if (!h->current_slice)
2131  av_assert0(sl == h->slice_ctx);
2132 
2133  if (h->current_slice == 0 && !h->first_field) {
2134  if (
2135  (h->avctx->skip_frame >= AVDISCARD_NONREF && !h->nal_ref_idc) ||
2136  (h->avctx->skip_frame >= AVDISCARD_BIDIR && sl->slice_type_nos == AV_PICTURE_TYPE_B) ||
2137  (h->avctx->skip_frame >= AVDISCARD_NONINTRA && sl->slice_type_nos != AV_PICTURE_TYPE_I) ||
2138  (h->avctx->skip_frame >= AVDISCARD_NONKEY && h->nal_unit_type != H264_NAL_IDR_SLICE && h->sei.recovery_point.recovery_frame_cnt < 0) ||
2139  h->avctx->skip_frame >= AVDISCARD_ALL) {
2140  return 0;
2141  }
2142  }
2143 
2144  if (!first_slice) {
2145  const PPS *pps = h->ps.pps_list[sl->pps_id];
2146 
2147  if (h->ps.pps->sps_id != pps->sps_id ||
2148  h->ps.pps->transform_8x8_mode != pps->transform_8x8_mode /*||
2149  (h->setup_finished && h->ps.pps != pps)*/) {
2150  av_log(h->avctx, AV_LOG_ERROR, "PPS changed between slices\n");
2151  return AVERROR_INVALIDDATA;
2152  }
2153  if (h->ps.sps != pps->sps) {
2154  av_log(h->avctx, AV_LOG_ERROR,
2155  "SPS changed in the middle of the frame\n");
2156  return AVERROR_INVALIDDATA;
2157  }
2158  }
2159 
2160  if (h->current_slice == 0) {
2161  ret = h264_field_start(h, sl, nal, first_slice);
2162  if (ret < 0)
2163  return ret;
2164  } else {
2165  if (h->picture_structure != sl->picture_structure ||
2166  h->droppable != (nal->ref_idc == 0)) {
2167  av_log(h->avctx, AV_LOG_ERROR,
2168  "Changing field mode (%d -> %d) between slices is not allowed\n",
2169  h->picture_structure, sl->picture_structure);
2170  return AVERROR_INVALIDDATA;
2171  } else if (!h->cur_pic_ptr) {
2172  av_log(h->avctx, AV_LOG_ERROR,
2173  "unset cur_pic_ptr on slice %d\n",
2174  h->current_slice + 1);
2175  return AVERROR_INVALIDDATA;
2176  }
2177  }
2178 
2179  ret = h264_slice_init(h, sl, nal);
2180  if (ret < 0)
2181  return ret;
2182 
2183  h->nb_slice_ctx_queued++;
2184 
2185  return 0;
2186 }
2187 
2189 {
2190  switch (sl->slice_type) {
2191  case AV_PICTURE_TYPE_P:
2192  return 0;
2193  case AV_PICTURE_TYPE_B:
2194  return 1;
2195  case AV_PICTURE_TYPE_I:
2196  return 2;
2197  case AV_PICTURE_TYPE_SP:
2198  return 3;
2199  case AV_PICTURE_TYPE_SI:
2200  return 4;
2201  default:
2202  return AVERROR_INVALIDDATA;
2203  }
2204 }
2205 
2207  H264SliceContext *sl,
2208  int mb_type, int top_xy,
2209  const int left_xy[LEFT_MBS],
2210  int top_type,
2211  const int left_type[LEFT_MBS],
2212  int mb_xy, int list)
2213 {
2214  int b_stride = h->b_stride;
2215  int16_t(*mv_dst)[2] = &sl->mv_cache[list][scan8[0]];
2216  int8_t *ref_cache = &sl->ref_cache[list][scan8[0]];
2217  if (IS_INTER(mb_type) || IS_DIRECT(mb_type)) {
2218  if (USES_LIST(top_type, list)) {
2219  const int b_xy = h->mb2b_xy[top_xy] + 3 * b_stride;
2220  const int b8_xy = 4 * top_xy + 2;
2221  const int *ref2frm = &h->ref2frm[h->slice_table[top_xy] & (MAX_SLICES - 1)][list][(MB_MBAFF(sl) ? 20 : 2)];
2222  AV_COPY128(mv_dst - 1 * 8, h->cur_pic.motion_val[list][b_xy + 0]);
2223  ref_cache[0 - 1 * 8] =
2224  ref_cache[1 - 1 * 8] = ref2frm[h->cur_pic.ref_index[list][b8_xy + 0]];
2225  ref_cache[2 - 1 * 8] =
2226  ref_cache[3 - 1 * 8] = ref2frm[h->cur_pic.ref_index[list][b8_xy + 1]];
2227  } else {
2228  AV_ZERO128(mv_dst - 1 * 8);
2229  AV_WN32A(&ref_cache[0 - 1 * 8], ((LIST_NOT_USED) & 0xFF) * 0x01010101u);
2230  }
2231 
2232  if (!IS_INTERLACED(mb_type ^ left_type[LTOP])) {
2233  if (USES_LIST(left_type[LTOP], list)) {
2234  const int b_xy = h->mb2b_xy[left_xy[LTOP]] + 3;
2235  const int b8_xy = 4 * left_xy[LTOP] + 1;
2236  const int *ref2frm = &h->ref2frm[h->slice_table[left_xy[LTOP]] & (MAX_SLICES - 1)][list][(MB_MBAFF(sl) ? 20 : 2)];
2237  AV_COPY32(mv_dst - 1 + 0, h->cur_pic.motion_val[list][b_xy + b_stride * 0]);
2238  AV_COPY32(mv_dst - 1 + 8, h->cur_pic.motion_val[list][b_xy + b_stride * 1]);
2239  AV_COPY32(mv_dst - 1 + 16, h->cur_pic.motion_val[list][b_xy + b_stride * 2]);
2240  AV_COPY32(mv_dst - 1 + 24, h->cur_pic.motion_val[list][b_xy + b_stride * 3]);
2241  ref_cache[-1 + 0] =
2242  ref_cache[-1 + 8] = ref2frm[h->cur_pic.ref_index[list][b8_xy + 2 * 0]];
2243  ref_cache[-1 + 16] =
2244  ref_cache[-1 + 24] = ref2frm[h->cur_pic.ref_index[list][b8_xy + 2 * 1]];
2245  } else {
2246  AV_ZERO32(mv_dst - 1 + 0);
2247  AV_ZERO32(mv_dst - 1 + 8);
2248  AV_ZERO32(mv_dst - 1 + 16);
2249  AV_ZERO32(mv_dst - 1 + 24);
2250  ref_cache[-1 + 0] =
2251  ref_cache[-1 + 8] =
2252  ref_cache[-1 + 16] =
2253  ref_cache[-1 + 24] = LIST_NOT_USED;
2254  }
2255  }
2256  }
2257 
2258  if (!USES_LIST(mb_type, list)) {
2259  fill_rectangle(mv_dst, 4, 4, 8, pack16to32(0, 0), 4);
2260  AV_WN32A(&ref_cache[0 * 8], ((LIST_NOT_USED) & 0xFF) * 0x01010101u);
2261  AV_WN32A(&ref_cache[1 * 8], ((LIST_NOT_USED) & 0xFF) * 0x01010101u);
2262  AV_WN32A(&ref_cache[2 * 8], ((LIST_NOT_USED) & 0xFF) * 0x01010101u);
2263  AV_WN32A(&ref_cache[3 * 8], ((LIST_NOT_USED) & 0xFF) * 0x01010101u);
2264  return;
2265  }
2266 
2267  {
2268  const int8_t *ref = &h->cur_pic.ref_index[list][4 * mb_xy];
2269  const int *ref2frm = &h->ref2frm[sl->slice_num & (MAX_SLICES - 1)][list][(MB_MBAFF(sl) ? 20 : 2)];
2270  uint32_t ref01 = (pack16to32(ref2frm[ref[0]], ref2frm[ref[1]]) & 0x00FF00FF) * 0x0101;
2271  uint32_t ref23 = (pack16to32(ref2frm[ref[2]], ref2frm[ref[3]]) & 0x00FF00FF) * 0x0101;
2272  AV_WN32A(&ref_cache[0 * 8], ref01);
2273  AV_WN32A(&ref_cache[1 * 8], ref01);
2274  AV_WN32A(&ref_cache[2 * 8], ref23);
2275  AV_WN32A(&ref_cache[3 * 8], ref23);
2276  }
2277 
2278  {
2279  int16_t(*mv_src)[2] = &h->cur_pic.motion_val[list][4 * sl->mb_x + 4 * sl->mb_y * b_stride];
2280  AV_COPY128(mv_dst + 8 * 0, mv_src + 0 * b_stride);
2281  AV_COPY128(mv_dst + 8 * 1, mv_src + 1 * b_stride);
2282  AV_COPY128(mv_dst + 8 * 2, mv_src + 2 * b_stride);
2283  AV_COPY128(mv_dst + 8 * 3, mv_src + 3 * b_stride);
2284  }
2285 }
2286 
2287 /**
2288  * @return non zero if the loop filter can be skipped
2289  */
2290 static int fill_filter_caches(const H264Context *h, H264SliceContext *sl, int mb_type)
2291 {
2292  const int mb_xy = sl->mb_xy;
2293  int top_xy, left_xy[LEFT_MBS];
2294  int top_type, left_type[LEFT_MBS];
2295  const uint8_t *nnz;
2296  uint8_t *nnz_cache;
2297 
2298  top_xy = mb_xy - (h->mb_stride << MB_FIELD(sl));
2299 
2300  left_xy[LBOT] = left_xy[LTOP] = mb_xy - 1;
2301  if (FRAME_MBAFF(h)) {
2302  const int left_mb_field_flag = IS_INTERLACED(h->cur_pic.mb_type[mb_xy - 1]);
2303  const int curr_mb_field_flag = IS_INTERLACED(mb_type);
2304  if (sl->mb_y & 1) {
2305  if (left_mb_field_flag != curr_mb_field_flag)
2306  left_xy[LTOP] -= h->mb_stride;
2307  } else {
2308  if (curr_mb_field_flag)
2309  top_xy += h->mb_stride &
2310  (((h->cur_pic.mb_type[top_xy] >> 7) & 1) - 1);
2311  if (left_mb_field_flag != curr_mb_field_flag)
2312  left_xy[LBOT] += h->mb_stride;
2313  }
2314  }
2315 
2316  sl->top_mb_xy = top_xy;
2317  sl->left_mb_xy[LTOP] = left_xy[LTOP];
2318  sl->left_mb_xy[LBOT] = left_xy[LBOT];
2319  {
2320  /* For sufficiently low qp, filtering wouldn't do anything.
2321  * This is a conservative estimate: could also check beta_offset
2322  * and more accurate chroma_qp. */
2323  int qp_thresh = sl->qp_thresh; // FIXME strictly we should store qp_thresh for each mb of a slice
2324  int qp = h->cur_pic.qscale_table[mb_xy];
2325  if (qp <= qp_thresh &&
2326  (left_xy[LTOP] < 0 ||
2327  ((qp + h->cur_pic.qscale_table[left_xy[LTOP]] + 1) >> 1) <= qp_thresh) &&
2328  (top_xy < 0 ||
2329  ((qp + h->cur_pic.qscale_table[top_xy] + 1) >> 1) <= qp_thresh)) {
2330  if (!FRAME_MBAFF(h))
2331  return 1;
2332  if ((left_xy[LTOP] < 0 ||
2333  ((qp + h->cur_pic.qscale_table[left_xy[LBOT]] + 1) >> 1) <= qp_thresh) &&
2334  (top_xy < h->mb_stride ||
2335  ((qp + h->cur_pic.qscale_table[top_xy - h->mb_stride] + 1) >> 1) <= qp_thresh))
2336  return 1;
2337  }
2338  }
2339 
2340  top_type = h->cur_pic.mb_type[top_xy];
2341  left_type[LTOP] = h->cur_pic.mb_type[left_xy[LTOP]];
2342  left_type[LBOT] = h->cur_pic.mb_type[left_xy[LBOT]];
2343  if (sl->deblocking_filter == 2) {
2344  if (h->slice_table[top_xy] != sl->slice_num)
2345  top_type = 0;
2346  if (h->slice_table[left_xy[LBOT]] != sl->slice_num)
2347  left_type[LTOP] = left_type[LBOT] = 0;
2348  } else {
2349  if (h->slice_table[top_xy] == 0xFFFF)
2350  top_type = 0;
2351  if (h->slice_table[left_xy[LBOT]] == 0xFFFF)
2352  left_type[LTOP] = left_type[LBOT] = 0;
2353  }
2354  sl->top_type = top_type;
2355  sl->left_type[LTOP] = left_type[LTOP];
2356  sl->left_type[LBOT] = left_type[LBOT];
2357 
2358  if (IS_INTRA(mb_type))
2359  return 0;
2360 
2361  fill_filter_caches_inter(h, sl, mb_type, top_xy, left_xy,
2362  top_type, left_type, mb_xy, 0);
2363  if (sl->list_count == 2)
2364  fill_filter_caches_inter(h, sl, mb_type, top_xy, left_xy,
2365  top_type, left_type, mb_xy, 1);
2366 
2367  nnz = h->non_zero_count[mb_xy];
2368  nnz_cache = sl->non_zero_count_cache;
2369  AV_COPY32(&nnz_cache[4 + 8 * 1], &nnz[0]);
2370  AV_COPY32(&nnz_cache[4 + 8 * 2], &nnz[4]);
2371  AV_COPY32(&nnz_cache[4 + 8 * 3], &nnz[8]);
2372  AV_COPY32(&nnz_cache[4 + 8 * 4], &nnz[12]);
2373  sl->cbp = h->cbp_table[mb_xy];
2374 
2375  if (top_type) {
2376  nnz = h->non_zero_count[top_xy];
2377  AV_COPY32(&nnz_cache[4 + 8 * 0], &nnz[3 * 4]);
2378  }
2379 
2380  if (left_type[LTOP]) {
2381  nnz = h->non_zero_count[left_xy[LTOP]];
2382  nnz_cache[3 + 8 * 1] = nnz[3 + 0 * 4];
2383  nnz_cache[3 + 8 * 2] = nnz[3 + 1 * 4];
2384  nnz_cache[3 + 8 * 3] = nnz[3 + 2 * 4];
2385  nnz_cache[3 + 8 * 4] = nnz[3 + 3 * 4];
2386  }
2387 
2388  /* CAVLC 8x8dct requires NNZ values for residual decoding that differ
2389  * from what the loop filter needs */
2390  if (!CABAC(h) && h->ps.pps->transform_8x8_mode) {
2391  if (IS_8x8DCT(top_type)) {
2392  nnz_cache[4 + 8 * 0] =
2393  nnz_cache[5 + 8 * 0] = (h->cbp_table[top_xy] & 0x4000) >> 12;
2394  nnz_cache[6 + 8 * 0] =
2395  nnz_cache[7 + 8 * 0] = (h->cbp_table[top_xy] & 0x8000) >> 12;
2396  }
2397  if (IS_8x8DCT(left_type[LTOP])) {
2398  nnz_cache[3 + 8 * 1] =
2399  nnz_cache[3 + 8 * 2] = (h->cbp_table[left_xy[LTOP]] & 0x2000) >> 12; // FIXME check MBAFF
2400  }
2401  if (IS_8x8DCT(left_type[LBOT])) {
2402  nnz_cache[3 + 8 * 3] =
2403  nnz_cache[3 + 8 * 4] = (h->cbp_table[left_xy[LBOT]] & 0x8000) >> 12; // FIXME check MBAFF
2404  }
2405 
2406  if (IS_8x8DCT(mb_type)) {
2407  nnz_cache[scan8[0]] =
2408  nnz_cache[scan8[1]] =
2409  nnz_cache[scan8[2]] =
2410  nnz_cache[scan8[3]] = (sl->cbp & 0x1000) >> 12;
2411 
2412  nnz_cache[scan8[0 + 4]] =
2413  nnz_cache[scan8[1 + 4]] =
2414  nnz_cache[scan8[2 + 4]] =
2415  nnz_cache[scan8[3 + 4]] = (sl->cbp & 0x2000) >> 12;
2416 
2417  nnz_cache[scan8[0 + 8]] =
2418  nnz_cache[scan8[1 + 8]] =
2419  nnz_cache[scan8[2 + 8]] =
2420  nnz_cache[scan8[3 + 8]] = (sl->cbp & 0x4000) >> 12;
2421 
2422  nnz_cache[scan8[0 + 12]] =
2423  nnz_cache[scan8[1 + 12]] =
2424  nnz_cache[scan8[2 + 12]] =
2425  nnz_cache[scan8[3 + 12]] = (sl->cbp & 0x8000) >> 12;
2426  }
2427  }
2428 
2429  return 0;
2430 }
2431 
2432 static void loop_filter(const H264Context *h, H264SliceContext *sl, int start_x, int end_x)
2433 {
2434  uint8_t *dest_y, *dest_cb, *dest_cr;
2435  int linesize, uvlinesize, mb_x, mb_y;
2436  const int end_mb_y = sl->mb_y + FRAME_MBAFF(h);
2437  const int old_slice_type = sl->slice_type;
2438  const int pixel_shift = h->pixel_shift;
2439  const int block_h = 16 >> h->chroma_y_shift;
2440 
2441  if (h->postpone_filter)
2442  return;
2443 
2444  if (sl->deblocking_filter) {
2445  for (mb_x = start_x; mb_x < end_x; mb_x++)
2446  for (mb_y = end_mb_y - FRAME_MBAFF(h); mb_y <= end_mb_y; mb_y++) {
2447  int mb_xy, mb_type;
2448  mb_xy = sl->mb_xy = mb_x + mb_y * h->mb_stride;
2449  mb_type = h->cur_pic.mb_type[mb_xy];
2450 
2451  if (FRAME_MBAFF(h))
2452  sl->mb_mbaff =
2453  sl->mb_field_decoding_flag = !!IS_INTERLACED(mb_type);
2454 
2455  sl->mb_x = mb_x;
2456  sl->mb_y = mb_y;
2457  dest_y = h->cur_pic.f->data[0] +
2458  ((mb_x << pixel_shift) + mb_y * sl->linesize) * 16;
2459  dest_cb = h->cur_pic.f->data[1] +
2460  (mb_x << pixel_shift) * (8 << CHROMA444(h)) +
2461  mb_y * sl->uvlinesize * block_h;
2462  dest_cr = h->cur_pic.f->data[2] +
2463  (mb_x << pixel_shift) * (8 << CHROMA444(h)) +
2464  mb_y * sl->uvlinesize * block_h;
2465  // FIXME simplify above
2466 
2467  if (MB_FIELD(sl)) {
2468  linesize = sl->mb_linesize = sl->linesize * 2;
2469  uvlinesize = sl->mb_uvlinesize = sl->uvlinesize * 2;
2470  if (mb_y & 1) { // FIXME move out of this function?
2471  dest_y -= sl->linesize * 15;
2472  dest_cb -= sl->uvlinesize * (block_h - 1);
2473  dest_cr -= sl->uvlinesize * (block_h - 1);
2474  }
2475  } else {
2476  linesize = sl->mb_linesize = sl->linesize;
2477  uvlinesize = sl->mb_uvlinesize = sl->uvlinesize;
2478  }
2479  backup_mb_border(h, sl, dest_y, dest_cb, dest_cr, linesize,
2480  uvlinesize, 0);
2481  if (fill_filter_caches(h, sl, mb_type))
2482  continue;
2483  sl->chroma_qp[0] = get_chroma_qp(h->ps.pps, 0, h->cur_pic.qscale_table[mb_xy]);
2484  sl->chroma_qp[1] = get_chroma_qp(h->ps.pps, 1, h->cur_pic.qscale_table[mb_xy]);
2485 
2486  if (FRAME_MBAFF(h)) {
2487  ff_h264_filter_mb(h, sl, mb_x, mb_y, dest_y, dest_cb, dest_cr,
2488  linesize, uvlinesize);
2489  } else {
2490  ff_h264_filter_mb_fast(h, sl, mb_x, mb_y, dest_y, dest_cb,
2491  dest_cr, linesize, uvlinesize);
2492  }
2493  }
2494  }
2495  sl->slice_type = old_slice_type;
2496  sl->mb_x = end_x;
2497  sl->mb_y = end_mb_y - FRAME_MBAFF(h);
2498  sl->chroma_qp[0] = get_chroma_qp(h->ps.pps, 0, sl->qscale);
2499  sl->chroma_qp[1] = get_chroma_qp(h->ps.pps, 1, sl->qscale);
2500 }
2501 
2503 {
2504  const int mb_xy = sl->mb_x + sl->mb_y * h->mb_stride;
2505  int mb_type = (h->slice_table[mb_xy - 1] == sl->slice_num) ?
2506  h->cur_pic.mb_type[mb_xy - 1] :
2507  (h->slice_table[mb_xy - h->mb_stride] == sl->slice_num) ?
2508  h->cur_pic.mb_type[mb_xy - h->mb_stride] : 0;
2509  sl->mb_mbaff = sl->mb_field_decoding_flag = IS_INTERLACED(mb_type) ? 1 : 0;
2510 }
2511 
2512 /**
2513  * Draw edges and report progress for the last MB row.
2514  */
2516 {
2517  int top = 16 * (sl->mb_y >> FIELD_PICTURE(h));
2518  int pic_height = 16 * h->mb_height >> FIELD_PICTURE(h);
2519  int height = 16 << FRAME_MBAFF(h);
2520  int deblock_border = (16 + 4) << FRAME_MBAFF(h);
2521 
2522  if (sl->deblocking_filter) {
2523  if ((top + height) >= pic_height)
2524  height += deblock_border;
2525  top -= deblock_border;
2526  }
2527 
2528  if (top >= pic_height || (top + height) < 0)
2529  return;
2530 
2531  height = FFMIN(height, pic_height - top);
2532  if (top < 0) {
2533  height = top + height;
2534  top = 0;
2535  }
2536 
2537  ff_h264_draw_horiz_band(h, sl, top, height);
2538 
2539  if (h->droppable || h->er.error_occurred)
2540  return;
2541 
2542  ff_thread_report_progress(&h->cur_pic_ptr->tf, top + height - 1,
2543  h->picture_structure == PICT_BOTTOM_FIELD);
2544 }
2545 
2547  int startx, int starty,
2548  int endx, int endy, int status)
2549 {
2550  if (!sl->h264->enable_er)
2551  return;
2552 
2553  if (CONFIG_ERROR_RESILIENCE) {
2554  ff_er_add_slice(sl->er, startx, starty, endx, endy, status);
2555  }
2556 }
2557 
2558 static int decode_slice(struct AVCodecContext *avctx, void *arg)
2559 {
2560  H264SliceContext *sl = arg;
2561  const H264Context *h = sl->h264;
2562  int lf_x_start = sl->mb_x;
2563  int orig_deblock = sl->deblocking_filter;
2564  int ret;
2565 
2566  sl->linesize = h->cur_pic_ptr->f->linesize[0];
2567  sl->uvlinesize = h->cur_pic_ptr->f->linesize[1];
2568 
2569  ret = alloc_scratch_buffers(sl, sl->linesize);
2570  if (ret < 0)
2571  return ret;
2572 
2573  sl->mb_skip_run = -1;
2574 
2575  av_assert0(h->block_offset[15] == (4 * ((scan8[15] - scan8[0]) & 7) << h->pixel_shift) + 4 * sl->linesize * ((scan8[15] - scan8[0]) >> 3));
2576 
2577  if (h->postpone_filter)
2578  sl->deblocking_filter = 0;
2579 
2580  sl->is_complex = FRAME_MBAFF(h) || h->picture_structure != PICT_FRAME ||
2581  (CONFIG_GRAY && (h->flags & AV_CODEC_FLAG_GRAY));
2582 
2583  if (!(h->avctx->active_thread_type & FF_THREAD_SLICE) && h->picture_structure == PICT_FRAME && sl->er->error_status_table) {
2584  const int start_i = av_clip(sl->resync_mb_x + sl->resync_mb_y * h->mb_width, 0, h->mb_num - 1);
2585  if (start_i) {
2586  int prev_status = sl->er->error_status_table[sl->er->mb_index2xy[start_i - 1]];
2587  prev_status &= ~ VP_START;
2588  if (prev_status != (ER_MV_END | ER_DC_END | ER_AC_END))
2589  sl->er->error_occurred = 1;
2590  }
2591  }
2592 
2593  if (h->ps.pps->cabac) {
2594  /* realign */
2595  align_get_bits(&sl->gb);
2596 
2597  /* init cabac */
2599  sl->gb.buffer + get_bits_count(&sl->gb) / 8,
2600  (get_bits_left(&sl->gb) + 7) / 8);
2601  if (ret < 0)
2602  return ret;
2603 
2605 
2606  for (;;) {
2607  int ret, eos;
2608  if (sl->mb_x + sl->mb_y * h->mb_width >= sl->next_slice_idx) {
2609  av_log(h->avctx, AV_LOG_ERROR, "Slice overlaps with next at %d\n",
2610  sl->next_slice_idx);
2611  er_add_slice(sl, sl->resync_mb_x, sl->resync_mb_y, sl->mb_x,
2612  sl->mb_y, ER_MB_ERROR);
2613  return AVERROR_INVALIDDATA;
2614  }
2615 
2616  ret = ff_h264_decode_mb_cabac(h, sl);
2617 
2618  if (ret >= 0)
2619  ff_h264_hl_decode_mb(h, sl);
2620 
2621  // FIXME optimal? or let mb_decode decode 16x32 ?
2622  if (ret >= 0 && FRAME_MBAFF(h)) {
2623  sl->mb_y++;
2624 
2625  ret = ff_h264_decode_mb_cabac(h, sl);
2626 
2627  if (ret >= 0)
2628  ff_h264_hl_decode_mb(h, sl);
2629  sl->mb_y--;
2630  }
2631  eos = get_cabac_terminate(&sl->cabac);
2632 
2633  if ((h->workaround_bugs & FF_BUG_TRUNCATED) &&
2634  sl->cabac.bytestream > sl->cabac.bytestream_end + 2) {
2635  er_add_slice(sl, sl->resync_mb_x, sl->resync_mb_y, sl->mb_x - 1,
2636  sl->mb_y, ER_MB_END);
2637  if (sl->mb_x >= lf_x_start)
2638  loop_filter(h, sl, lf_x_start, sl->mb_x + 1);
2639  goto finish;
2640  }
2641  if (sl->cabac.bytestream > sl->cabac.bytestream_end + 2 )
2642  av_log(h->avctx, AV_LOG_DEBUG, "bytestream overread %"PTRDIFF_SPECIFIER"\n", sl->cabac.bytestream_end - sl->cabac.bytestream);
2643  if (ret < 0 || sl->cabac.bytestream > sl->cabac.bytestream_end + 4) {
2644  av_log(h->avctx, AV_LOG_ERROR,
2645  "error while decoding MB %d %d, bytestream %"PTRDIFF_SPECIFIER"\n",
2646  sl->mb_x, sl->mb_y,
2647  sl->cabac.bytestream_end - sl->cabac.bytestream);
2648  er_add_slice(sl, sl->resync_mb_x, sl->resync_mb_y, sl->mb_x,
2649  sl->mb_y, ER_MB_ERROR);
2650  return AVERROR_INVALIDDATA;
2651  }
2652 
2653  if (++sl->mb_x >= h->mb_width) {
2654  loop_filter(h, sl, lf_x_start, sl->mb_x);
2655  sl->mb_x = lf_x_start = 0;
2656  decode_finish_row(h, sl);
2657  ++sl->mb_y;
2658  if (FIELD_OR_MBAFF_PICTURE(h)) {
2659  ++sl->mb_y;
2660  if (FRAME_MBAFF(h) && sl->mb_y < h->mb_height)
2662  }
2663  }
2664 
2665  if (eos || sl->mb_y >= h->mb_height) {
2666  ff_tlog(h->avctx, "slice end %d %d\n",
2667  get_bits_count(&sl->gb), sl->gb.size_in_bits);
2668  er_add_slice(sl, sl->resync_mb_x, sl->resync_mb_y, sl->mb_x - 1,
2669  sl->mb_y, ER_MB_END);
2670  if (sl->mb_x > lf_x_start)
2671  loop_filter(h, sl, lf_x_start, sl->mb_x);
2672  goto finish;
2673  }
2674  }
2675  } else {
2676  for (;;) {
2677  int ret;
2678 
2679  if (sl->mb_x + sl->mb_y * h->mb_width >= sl->next_slice_idx) {
2680  av_log(h->avctx, AV_LOG_ERROR, "Slice overlaps with next at %d\n",
2681  sl->next_slice_idx);
2682  er_add_slice(sl, sl->resync_mb_x, sl->resync_mb_y, sl->mb_x,
2683  sl->mb_y, ER_MB_ERROR);
2684  return AVERROR_INVALIDDATA;
2685  }
2686 
2687  ret = ff_h264_decode_mb_cavlc(h, sl);
2688 
2689  if (ret >= 0)
2690  ff_h264_hl_decode_mb(h, sl);
2691 
2692  // FIXME optimal? or let mb_decode decode 16x32 ?
2693  if (ret >= 0 && FRAME_MBAFF(h)) {
2694  sl->mb_y++;
2695  ret = ff_h264_decode_mb_cavlc(h, sl);
2696 
2697  if (ret >= 0)
2698  ff_h264_hl_decode_mb(h, sl);
2699  sl->mb_y--;
2700  }
2701 
2702  if (ret < 0) {
2703  av_log(h->avctx, AV_LOG_ERROR,
2704  "error while decoding MB %d %d\n", sl->mb_x, sl->mb_y);
2705  er_add_slice(sl, sl->resync_mb_x, sl->resync_mb_y, sl->mb_x,
2706  sl->mb_y, ER_MB_ERROR);
2707  return ret;
2708  }
2709 
2710  if (++sl->mb_x >= h->mb_width) {
2711  loop_filter(h, sl, lf_x_start, sl->mb_x);
2712  sl->mb_x = lf_x_start = 0;
2713  decode_finish_row(h, sl);
2714  ++sl->mb_y;
2715  if (FIELD_OR_MBAFF_PICTURE(h)) {
2716  ++sl->mb_y;
2717  if (FRAME_MBAFF(h) && sl->mb_y < h->mb_height)
2719  }
2720  if (sl->mb_y >= h->mb_height) {
2721  ff_tlog(h->avctx, "slice end %d %d\n",
2722  get_bits_count(&sl->gb), sl->gb.size_in_bits);
2723 
2724  if ( get_bits_left(&sl->gb) == 0
2725  || get_bits_left(&sl->gb) > 0 && !(h->avctx->err_recognition & AV_EF_AGGRESSIVE)) {
2726  er_add_slice(sl, sl->resync_mb_x, sl->resync_mb_y,
2727  sl->mb_x - 1, sl->mb_y, ER_MB_END);
2728 
2729  goto finish;
2730  } else {
2731  er_add_slice(sl, sl->resync_mb_x, sl->resync_mb_y,
2732  sl->mb_x, sl->mb_y, ER_MB_END);
2733 
2734  return AVERROR_INVALIDDATA;
2735  }
2736  }
2737  }
2738 
2739  if (get_bits_left(&sl->gb) <= 0 && sl->mb_skip_run <= 0) {
2740  ff_tlog(h->avctx, "slice end %d %d\n",
2741  get_bits_count(&sl->gb), sl->gb.size_in_bits);
2742 
2743  if (get_bits_left(&sl->gb) == 0) {
2744  er_add_slice(sl, sl->resync_mb_x, sl->resync_mb_y,
2745  sl->mb_x - 1, sl->mb_y, ER_MB_END);
2746  if (sl->mb_x > lf_x_start)
2747  loop_filter(h, sl, lf_x_start, sl->mb_x);
2748 
2749  goto finish;
2750  } else {
2751  er_add_slice(sl, sl->resync_mb_x, sl->resync_mb_y, sl->mb_x,
2752  sl->mb_y, ER_MB_ERROR);
2753 
2754  return AVERROR_INVALIDDATA;
2755  }
2756  }
2757  }
2758  }
2759 
2760 finish:
2761  sl->deblocking_filter = orig_deblock;
2762  return 0;
2763 }
2764 
2765 /**
2766  * Call decode_slice() for each context.
2767  *
2768  * @param h h264 master context
2769  */
2771 {
2772  AVCodecContext *const avctx = h->avctx;
2773  H264SliceContext *sl;
2774  int context_count = h->nb_slice_ctx_queued;
2775  int ret = 0;
2776  int i, j;
2777 
2778  h->slice_ctx[0].next_slice_idx = INT_MAX;
2779 
2780  if (h->avctx->hwaccel || context_count < 1)
2781  return 0;
2782 
2783  av_assert0(context_count && h->slice_ctx[context_count - 1].mb_y < h->mb_height);
2784 
2785  if (context_count == 1) {
2786 
2787  h->slice_ctx[0].next_slice_idx = h->mb_width * h->mb_height;
2788  h->postpone_filter = 0;
2789 
2790  ret = decode_slice(avctx, &h->slice_ctx[0]);
2791  h->mb_y = h->slice_ctx[0].mb_y;
2792  if (ret < 0)
2793  goto finish;
2794  } else {
2795  av_assert0(context_count > 0);
2796  for (i = 0; i < context_count; i++) {
2797  int next_slice_idx = h->mb_width * h->mb_height;
2798  int slice_idx;
2799 
2800  sl = &h->slice_ctx[i];
2801 
2802  /* make sure none of those slices overlap */
2803  slice_idx = sl->mb_y * h->mb_width + sl->mb_x;
2804  for (j = 0; j < context_count; j++) {
2805  H264SliceContext *sl2 = &h->slice_ctx[j];
2806  int slice_idx2 = sl2->mb_y * h->mb_width + sl2->mb_x;
2807 
2808  if (i == j || slice_idx2 < slice_idx)
2809  continue;
2810  next_slice_idx = FFMIN(next_slice_idx, slice_idx2);
2811  }
2812  sl->next_slice_idx = next_slice_idx;
2813  }
2814 
2815  avctx->execute(avctx, decode_slice, h->slice_ctx,
2816  NULL, context_count, sizeof(h->slice_ctx[0]));
2817 
2818  /* pull back stuff from slices to master context */
2819  sl = &h->slice_ctx[context_count - 1];
2820  h->mb_y = sl->mb_y;
2821 
2822  if (h->postpone_filter) {
2823  h->postpone_filter = 0;
2824 
2825  for (i = 0; i < context_count; i++) {
2826  int y_end, x_end;
2827 
2828  sl = &h->slice_ctx[i];
2829  y_end = FFMIN(sl->mb_y + 1, h->mb_height);
2830  x_end = (sl->mb_y >= h->mb_height) ? h->mb_width : sl->mb_x;
2831 
2832  for (j = sl->resync_mb_y; j < y_end; j += 1 + FIELD_OR_MBAFF_PICTURE(h)) {
2833  sl->mb_y = j;
2834  loop_filter(h, sl, j > sl->resync_mb_y ? 0 : sl->resync_mb_x,
2835  j == y_end - 1 ? x_end : h->mb_width);
2836  }
2837  }
2838  }
2839  }
2840 
2841 finish:
2842  h->nb_slice_ctx_queued = 0;
2843  return ret;
2844 }
PICT_FRAME
#define PICT_FRAME
Definition: mpegutils.h:33
er_add_slice
static void er_add_slice(H264SliceContext *sl, int startx, int starty, int endx, int endy, int status)
Definition: h264_slice.c:2546
ff_h264_filter_mb_fast
void ff_h264_filter_mb_fast(const H264Context *h, H264SliceContext *sl, int mb_x, int mb_y, uint8_t *img_y, uint8_t *img_cb, uint8_t *img_cr, unsigned int linesize, unsigned int uvlinesize)
Definition: h264_loopfilter.c:416
h264_slice_header_init
static int h264_slice_header_init(H264Context *h)
Definition: h264_slice.c:964
implicit_weight_table
static void implicit_weight_table(const H264Context *h, H264SliceContext *sl, int field)
Initialize implicit_weight table.
Definition: h264_slice.c:688
H264SliceContext::mb_xy
int mb_xy
Definition: h264dec.h:234
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:216
AV_TIMECODE_STR_SIZE
#define AV_TIMECODE_STR_SIZE
Definition: timecode.h:33
AV_PIX_FMT_CUDA
@ AV_PIX_FMT_CUDA
HW acceleration through CUDA.
Definition: pixfmt.h:260
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:71
H264SliceContext::ref_cache
int8_t ref_cache[2][5 *8]
Definition: h264dec.h:302
ff_h264_free_tables
void ff_h264_free_tables(H264Context *h)
Definition: h264dec.c:141
AV_EF_EXPLODE
#define AV_EF_EXPLODE
abort decoding on minor error detection
Definition: defs.h:51
av_clip
#define av_clip
Definition: common.h:100
h264_init_ps
static int h264_init_ps(H264Context *h, const H264SliceContext *sl, int first_slice)
Definition: h264_slice.c:1057
H264SliceContext::max_pic_num
int max_pic_num
Definition: h264dec.h:334
H264SliceContext::nb_mmco
int nb_mmco
Definition: h264dec.h:325
get_bits_left
static int get_bits_left(GetBitContext *gb)
Definition: get_bits.h:678
CHROMA422
#define CHROMA422(h)
Definition: h264dec.h:91
FF_BUG_TRUNCATED
#define FF_BUG_TRUNCATED
Definition: avcodec.h:1341
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
cabac.h
H264Picture::poc
int poc
frame POC
Definition: h264dec.h:135
h264_export_frame_props
static int h264_export_frame_props(H264Context *h)
Definition: h264_slice.c:1170
H264Picture::f
AVFrame * f
Definition: h264dec.h:115
ff_get_format
int ff_get_format(AVCodecContext *avctx, const enum AVPixelFormat *fmt)
Select the (possibly hardware accelerated) pixel format.
Definition: decode.c:1203
out
FILE * out
Definition: movenc.c:55
cb
static double cb(void *priv, double x, double y)
Definition: vf_geq.c:247
av_clip_int8
#define av_clip_int8
Definition: common.h:109
zigzag_scan8x8_cavlc
static const uint8_t zigzag_scan8x8_cavlc[64+1]
Definition: h264_slice.c:98
av_pix_fmt_desc_get
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:3341
ff_h264_ref_picture
int ff_h264_ref_picture(H264Picture *dst, const H264Picture *src)
Definition: h264_picture.c:108
ff_thread_can_start_frame
int ff_thread_can_start_frame(AVCodecContext *avctx)
Definition: pthread_frame.c:1012
ff_h2645_sei_to_frame
int ff_h2645_sei_to_frame(AVFrame *frame, H2645SEI *sei, enum AVCodecID codec_id, AVCodecContext *avctx, const H2645VUI *vui, unsigned bit_depth_luma, unsigned bit_depth_chroma, int seed)
Definition: h2645_sei.c:718
H264Picture::ref_index
int8_t * ref_index[2]
RefStruct reference.
Definition: h264dec.h:132
int64_t
long long int64_t
Definition: coverity.c:34
HWACCEL_MAX
#define HWACCEL_MAX
MB_MBAFF
#define MB_MBAFF(h)
Definition: h264dec.h:64
H264SliceContext::mvd_table
uint8_t(*[2] mvd_table)[2]
Definition: h264dec.h:315
get_bits_count
static int get_bits_count(const GetBitContext *s)
Definition: get_bits.h:249
H264_SEI_PIC_STRUCT_TOP_BOTTOM
@ H264_SEI_PIC_STRUCT_TOP_BOTTOM
3: top field, bottom field, in that order
Definition: h264_sei.h:35
color_frame
static void color_frame(AVFrame *frame, const int c[4])
Definition: h264_slice.c:307
H264Picture::pps
const PPS * pps
Definition: h264dec.h:158
HEVCSEILCEVC::info
AVBufferRef * info
Definition: h2645_sei.h:54
AV_FRAME_DATA_S12M_TIMECODE
@ AV_FRAME_DATA_S12M_TIMECODE
Timecode which conforms to SMPTE ST 12-1.
Definition: frame.h:152
GetBitContext::size_in_bits
int size_in_bits
Definition: get_bits.h:111
H2645NAL::ref_idc
int ref_idc
H.264 only, nal_ref_idc.
Definition: h2645_parse.h:57
ff_h264_slice_context_init
void ff_h264_slice_context_init(H264Context *h, H264SliceContext *sl)
Init slice context.
Definition: h264dec.c:265
ERContext::mb_index2xy
int * mb_index2xy
Definition: error_resilience.h:61
predict_field_decoding_flag
static void predict_field_decoding_flag(const H264Context *h, H264SliceContext *sl)
Definition: h264_slice.c:2502
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:410
pixdesc.h
AVFrame::width
int width
Definition: frame.h:482
w
uint8_t w
Definition: llviddspenc.c:38
AVCOL_RANGE_JPEG
@ AVCOL_RANGE_JPEG
Full range content.
Definition: pixfmt.h:750
get_ue_golomb
static int get_ue_golomb(GetBitContext *gb)
Read an unsigned Exp-Golomb code in the range 0 to 8190.
Definition: golomb.h:53
ff_h264_update_thread_context
int ff_h264_update_thread_context(AVCodecContext *dst, const AVCodecContext *src)
Definition: h264_slice.c:337
alloc_scratch_buffers
static int alloc_scratch_buffers(H264SliceContext *sl, int linesize)
Definition: h264_slice.c:130
AVCOL_TRC_UNSPECIFIED
@ AVCOL_TRC_UNSPECIFIED
Definition: pixfmt.h:647
FRAME_RECOVERED_IDR
#define FRAME_RECOVERED_IDR
We have seen an IDR, so all the following frames in coded order are correctly decodable.
Definition: h264dec.h:525
decode_finish_row
static void decode_finish_row(const H264Context *h, H264SliceContext *sl)
Draw edges and report progress for the last MB row.
Definition: h264_slice.c:2515
H264SliceContext::ref_count
unsigned int ref_count[2]
num_ref_idx_l0/1_active_minus1 + 1
Definition: h264dec.h:270
FF_COMPLIANCE_STRICT
#define FF_COMPLIANCE_STRICT
Strictly conform to all the things in the spec no matter what consequences.
Definition: defs.h:59
AV_PIX_FMT_YUV420P10
#define AV_PIX_FMT_YUV420P10
Definition: pixfmt.h:528
ff_er_frame_start
void ff_er_frame_start(ERContext *s)
Definition: error_resilience.c:791
H264Picture::qscale_table
int8_t * qscale_table
Definition: h264dec.h:121
H264SliceContext::left_mb_xy
int left_mb_xy[LEFT_MBS]
Definition: h264dec.h:214
AV_PIX_FMT_D3D11VA_VLD
@ AV_PIX_FMT_D3D11VA_VLD
HW decoding through Direct3D11 via old API, Picture.data[3] contains a ID3D11VideoDecoderOutputView p...
Definition: pixfmt.h:254
H264PredWeightTable::use_weight_chroma
int use_weight_chroma
Definition: h264_parse.h:71
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:226
PICT_BOTTOM_FIELD
#define PICT_BOTTOM_FIELD
Definition: mpegutils.h:32
AVCOL_SPC_RGB
@ AVCOL_SPC_RGB
order of coefficients is actually GBR, also IEC 61966-2-1 (sRGB), YZX and ST 428-1
Definition: pixfmt.h:674
AV_WN32A
#define AV_WN32A(p, v)
Definition: intreadwrite.h:534
ff_h264_update_thread_context_for_user
int ff_h264_update_thread_context_for_user(AVCodecContext *dst, const AVCodecContext *src)
Definition: h264_slice.c:470
ff_er_add_slice
void ff_er_add_slice(ERContext *s, int startx, int starty, int endx, int endy, int status)
Add a slice.
Definition: error_resilience.c:826
find_unused_picture
static int find_unused_picture(const H264Context *h)
Definition: h264_slice.c:275
AVFrame::flags
int flags
Frame flags, a combination of AV_FRAME_FLAGS.
Definition: frame.h:654
FFMAX
#define FFMAX(a, b)
Definition: macros.h:47
ff_h264_pred_weight_table
int ff_h264_pred_weight_table(GetBitContext *gb, const SPS *sps, const int *ref_count, int slice_type_nos, H264PredWeightTable *pwt, int picture_structure, void *logctx)
Definition: h264_parse.c:30
FRAME_RECOVERED_SEI
#define FRAME_RECOVERED_SEI
Sufficient number of frames have been decoded since a SEI recovery point, so all the following frames...
Definition: h264dec.h:530
H264SliceContext::is_complex
int is_complex
Definition: h264dec.h:241
ER_DC_END
#define ER_DC_END
Definition: error_resilience.h:33
ff_h264_decode_ref_pic_list_reordering
int ff_h264_decode_ref_pic_list_reordering(H264SliceContext *sl, void *logctx)
Definition: h264_refs.c:426
mpegutils.h
AVFrame::buf
AVBufferRef * buf[AV_NUM_DATA_POINTERS]
AVBuffer references backing the data for this frame.
Definition: frame.h:587
H264Picture::invalid_gap
int invalid_gap
Definition: h264dec.h:154
av_timecode_get_smpte
uint32_t av_timecode_get_smpte(AVRational rate, int drop, int hh, int mm, int ss, int ff)
Convert sei info to SMPTE 12M binary representation.
Definition: timecode.c:70
HEVCSEILCEVC
Definition: h2645_sei.h:53
thread.h
ThreadFrame::f
AVFrame * f
Definition: threadframe.h:28
AV_PIX_FMT_VULKAN
@ AV_PIX_FMT_VULKAN
Vulkan hardware images.
Definition: pixfmt.h:379
FF_DEBUG_PICT_INFO
#define FF_DEBUG_PICT_INFO
Definition: avcodec.h:1375
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:431
H264SliceContext::mb_x
int mb_x
Definition: h264dec.h:233
AV_FRAME_FLAG_TOP_FIELD_FIRST
#define AV_FRAME_FLAG_TOP_FIELD_FIRST
A flag to mark frames where the top field is displayed first if the content is interlaced.
Definition: frame.h:638
H264Picture::frame_num
int frame_num
frame_num (raw frame_num from slice header)
Definition: h264dec.h:136
H264SliceContext::next_slice_idx
int next_slice_idx
Definition: h264dec.h:239
H264SliceContext
Definition: h264dec.h:180
golomb.h
exp golomb vlc stuff
MB_FIELD
#define MB_FIELD(sl)
Definition: h264dec.h:65
get_bits
static unsigned int get_bits(GetBitContext *s, int n)
Read 1-25 bits.
Definition: get_bits.h:318
ff_h264_filter_mb
void ff_h264_filter_mb(const H264Context *h, H264SliceContext *sl, int mb_x, int mb_y, uint8_t *img_y, uint8_t *img_cb, uint8_t *img_cr, unsigned int linesize, unsigned int uvlinesize)
Definition: h264_loopfilter.c:716
H264SliceContext::mv_cache
int16_t mv_cache[2][5 *8][2]
Motion vector cache.
Definition: h264dec.h:301
AV_CODEC_FLAG_OUTPUT_CORRUPT
#define AV_CODEC_FLAG_OUTPUT_CORRUPT
Output even those frames that might be corrupted.
Definition: avcodec.h:221
USES_LIST
#define USES_LIST(a, list)
Definition: h264dec.h:103
AV_PIX_FMT_GBRP14
#define AV_PIX_FMT_GBRP14
Definition: pixfmt.h:546
finish
static void finish(void)
Definition: movenc.c:374
get_chroma_qp
static av_always_inline int get_chroma_qp(const PPS *pps, int t, int qscale)
Get the chroma qp.
Definition: h264dec.h:675
H264Picture::mmco_reset
int mmco_reset
MMCO_RESET set this 1.
Definition: h264dec.h:137
fail
#define fail()
Definition: checkasm.h:194
AV_PIX_FMT_GBRP10
#define AV_PIX_FMT_GBRP10
Definition: pixfmt.h:544
timecode.h
h264_select_output_frame
static int h264_select_output_frame(H264Context *h)
Definition: h264_slice.c:1294
AV_PIX_FMT_YUV422P9
#define AV_PIX_FMT_YUV422P9
Definition: pixfmt.h:526
CABACContext::bytestream
const uint8_t * bytestream
Definition: cabac.h:45
av_pix_fmt_get_chroma_sub_sample
int av_pix_fmt_get_chroma_sub_sample(enum AVPixelFormat pix_fmt, int *h_shift, int *v_shift)
Utility function to access log2_chroma_w log2_chroma_h from the pixel format AVPixFmtDescriptor.
Definition: pixdesc.c:3369
ff_videodsp_init
av_cold void ff_videodsp_init(VideoDSPContext *ctx, int bpc)
Definition: videodsp.c:39
H264Picture::mb_stride
int mb_stride
Definition: h264dec.h:161
IN_RANGE
#define IN_RANGE(a, b, size)
Definition: h264_slice.c:287
scan8
static const uint8_t scan8[16 *3+3]
Definition: h264_parse.h:40
ff_h264_flush_change
void ff_h264_flush_change(H264Context *h)
Definition: h264dec.c:452
ff_h264qpel_init
av_cold void ff_h264qpel_init(H264QpelContext *c, int bit_depth)
Definition: h264qpel.c:49
MAX_SLICES
#define MAX_SLICES
Definition: d3d12va_hevc.c:33
ff_h264_sei_process_picture_timing
int ff_h264_sei_process_picture_timing(H264SEIPictureTiming *h, const SPS *sps, void *logctx)
Parse the contents of a picture timing message given an active SPS.
Definition: h264_sei.c:64
h264_frame_start
static int h264_frame_start(H264Context *h)
Definition: h264_slice.c:482
H264SliceContext::deblocking_filter
int deblocking_filter
disable_deblocking_filter_idc with 1 <-> 0
Definition: h264dec.h:196
H264PredWeightTable::luma_log2_weight_denom
int luma_log2_weight_denom
Definition: h264_parse.h:72
ss
#define ss(width, name, subs,...)
Definition: cbs_vp9.c:202
H264Picture::f_grain
AVFrame * f_grain
Definition: h264dec.h:118
H264SliceContext::picture_structure
int picture_structure
Definition: h264dec.h:243
ff_h264_golomb_to_pict_type
const uint8_t ff_h264_golomb_to_pict_type[5]
Definition: h264data.c:37
release_unused_pictures
static void release_unused_pictures(H264Context *h, int remove_current)
Definition: h264_slice.c:117
H264PredWeightTable::use_weight
int use_weight
Definition: h264_parse.h:70
av_reduce
int av_reduce(int *dst_num, int *dst_den, int64_t num, int64_t den, int64_t max)
Reduce a fraction.
Definition: rational.c:35
H264SliceContext::direct_spatial_mv_pred
int direct_spatial_mv_pred
Definition: h264dec.h:254
H264SliceContext::slice_num
int slice_num
Definition: h264dec.h:185
pack16to32
static av_always_inline uint32_t pack16to32(unsigned a, unsigned b)
Definition: h264_parse.h:127
refstruct.h
ff_frame_new_side_data_from_buf
int ff_frame_new_side_data_from_buf(const AVCodecContext *avctx, AVFrame *frame, enum AVFrameSideDataType type, AVBufferRef **buf)
Similar to ff_frame_new_side_data, but using an existing buffer ref.
Definition: decode.c:2041
non_j_pixfmt
static enum AVPixelFormat non_j_pixfmt(enum AVPixelFormat a)
Definition: h264_slice.c:1046
AV_PIX_FMT_YUV444P10
#define AV_PIX_FMT_YUV444P10
Definition: pixfmt.h:531
ff_h264_init_cabac_states
void ff_h264_init_cabac_states(const H264Context *h, H264SliceContext *sl)
Definition: h264_cabac.c:1262
ff_h264_hl_decode_mb
void ff_h264_hl_decode_mb(const H264Context *h, H264SliceContext *sl)
Definition: h264_mb.c:800
avassert.h
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:210
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
AV_FRAME_FLAG_KEY
#define AV_FRAME_FLAG_KEY
A flag to mark frames that are keyframes.
Definition: frame.h:625
ff_thread_report_progress
void ff_thread_report_progress(ThreadFrame *f, int n, int field)
Notify later decoding threads when part of their reference picture is ready.
Definition: pthread_frame.c:626
av_memcpy_backptr
void av_memcpy_backptr(uint8_t *dst, int back, int cnt)
Overlapping memcpy() implementation.
Definition: mem.c:447
AV_PIX_FMT_YUVJ422P
@ AV_PIX_FMT_YUVJ422P
planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV422P and setting col...
Definition: pixfmt.h:86
H264Picture::qscale_table_base
int8_t * qscale_table_base
RefStruct reference.
Definition: h264dec.h:120
ff_h264_queue_decode_slice
int ff_h264_queue_decode_slice(H264Context *h, const H2645NAL *nal)
Submit a slice for decoding.
Definition: h264_slice.c:2063
H264Context::DPB
H264Picture DPB[H264_MAX_PICTURE_COUNT]
Definition: h264dec.h:349
ff_hwaccel_frame_priv_alloc
int ff_hwaccel_frame_priv_alloc(AVCodecContext *avctx, void **hwaccel_picture_private)
Allocate a hwaccel frame private data if the provided avctx uses a hwaccel method that needs it.
Definition: decode.c:2155
AV_PIX_FMT_DXVA2_VLD
@ AV_PIX_FMT_DXVA2_VLD
HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer.
Definition: pixfmt.h:134
H264PredWeightTable::chroma_log2_weight_denom
int chroma_log2_weight_denom
Definition: h264_parse.h:73
AV_CEIL_RSHIFT
#define AV_CEIL_RSHIFT(a, b)
Definition: common.h:60
AV_ZERO32
#define AV_ZERO32(d)
Definition: intreadwrite.h:662
AV_GET_BUFFER_FLAG_REF
#define AV_GET_BUFFER_FLAG_REF
The decoder will keep a reference to the frame and may reuse it later.
Definition: avcodec.h:411
FIELD_PICTURE
#define FIELD_PICTURE(h)
Definition: h264dec.h:67
ff_thread_get_buffer
int ff_thread_get_buffer(AVCodecContext *avctx, AVFrame *f, int flags)
Wrapper around get_buffer() for frame-multithreaded codecs.
Definition: pthread_frame.c:1048
ff_h264_execute_ref_pic_marking
int ff_h264_execute_ref_pic_marking(H264Context *h)
Execute the reference picture marking (memory management control operations).
Definition: h264_refs.c:605
ff_h264_decode_ref_pic_marking
int ff_h264_decode_ref_pic_marking(H264SliceContext *sl, GetBitContext *gb, const H2645NAL *nal, void *logctx)
Definition: h264_refs.c:827
from
const char * from
Definition: jacosubdec.c:66
to
const char * to
Definition: webvttdec.c:35
h264_slice_header_parse
static int h264_slice_header_parse(const H264Context *h, H264SliceContext *sl, const H2645NAL *nal)
Definition: h264_slice.c:1695
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:40
H264PredWeightTable::chroma_weight_flag
int chroma_weight_flag[2]
7.4.3.2 chroma_weight_lX_flag
Definition: h264_parse.h:75
pix_fmts
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:298
h264data.h
AV_PIX_FMT_YUV420P9
#define AV_PIX_FMT_YUV420P9
Definition: pixfmt.h:525
AV_LOG_DEBUG
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:231
PICT_TOP_FIELD
#define PICT_TOP_FIELD
Definition: mpegutils.h:31
decode.h
field_scan8x8_cavlc
static const uint8_t field_scan8x8_cavlc[64+1]
Definition: h264_slice.c:78
H264SliceContext::slice_alpha_c0_offset
int slice_alpha_c0_offset
Definition: h264dec.h:197
field
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this field
Definition: writing_filters.txt:78
AVFrame::crop_right
size_t crop_right
Definition: frame.h:736
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:73
backup_mb_border
static av_always_inline void backup_mb_border(const H264Context *h, H264SliceContext *sl, const uint8_t *src_y, const uint8_t *src_cb, const uint8_t *src_cr, int linesize, int uvlinesize, int simple)
Definition: h264_slice.c:586
H264SliceContext::slice_type
int slice_type
Definition: h264dec.h:186
H264SliceContext::resync_mb_x
int resync_mb_x
Definition: h264dec.h:235
H264Picture::sei_recovery_frame_cnt
int sei_recovery_frame_cnt
Definition: h264dec.h:155
AVDISCARD_BIDIR
@ AVDISCARD_BIDIR
discard all bidirectional frames
Definition: defs.h:218
get_se_golomb
static int get_se_golomb(GetBitContext *gb)
read signed exp golomb code.
Definition: golomb.h:239
H2645NAL::type
int type
NAL unit type.
Definition: h2645_parse.h:52
AV_CODEC_ID_H264
@ AV_CODEC_ID_H264
Definition: codec_id.h:79
AV_PIX_FMT_YUVJ444P
@ AV_PIX_FMT_YUVJ444P
planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV444P and setting col...
Definition: pixfmt.h:87
H264Context::enable_er
int enable_er
Definition: h264dec.h:567
ff_h264_draw_horiz_band
void ff_h264_draw_horiz_band(const H264Context *h, H264SliceContext *sl, int y, int height)
Definition: h264dec.c:103
H264SliceContext::curr_pic_num
int curr_pic_num
Definition: h264dec.h:333
ff_thread_ref_frame
int ff_thread_ref_frame(ThreadFrame *dst, const ThreadFrame *src)
Definition: utils.c:848
arg
const char * arg
Definition: jacosubdec.c:67
FFABS
#define FFABS(a)
Absolute value, Note, INT_MIN / INT64_MIN result in undefined behavior as they are not representable ...
Definition: common.h:74
if
if(ret)
Definition: filter_design.txt:179
H264_MAX_DPB_FRAMES
@ H264_MAX_DPB_FRAMES
Definition: h264.h:76
AVDISCARD_ALL
@ AVDISCARD_ALL
discard all
Definition: defs.h:221
threadframe.h
GetBitContext::buffer
const uint8_t * buffer
Definition: get_bits.h:109
alloc_picture
static int alloc_picture(H264Context *h, H264Picture *pic)
Definition: h264_slice.c:188
PTRDIFF_SPECIFIER
#define PTRDIFF_SPECIFIER
Definition: internal.h:128
NULL
#define NULL
Definition: coverity.c:32
AV_COPY128
#define AV_COPY128(d, s)
Definition: intreadwrite.h:642
AVERROR_PATCHWELCOME
#define AVERROR_PATCHWELCOME
Not yet implemented in FFmpeg, patches welcome.
Definition: error.h:64
AV_COPY64
#define AV_COPY64(d, s)
Definition: intreadwrite.h:638
H264SliceContext::edge_emu_buffer
uint8_t * edge_emu_buffer
Definition: h264dec.h:286
H264Picture::mb_type_base
uint32_t * mb_type_base
RefStruct reference.
Definition: h264dec.h:126
tmp
static uint8_t tmp[20]
Definition: aes_ctr.c:47
ff_thread_await_progress
void ff_thread_await_progress(const ThreadFrame *f, int n, int field)
Wait for earlier decoding threads to finish reference pictures.
Definition: pthread_frame.c:649
SPS
Sequence parameter set.
Definition: h264_ps.h:44
H264Ref::parent
const H264Picture * parent
Definition: h264dec.h:177
TRANSPOSE
#define TRANSPOSE(x)
AV_PIX_FMT_YUVJ420P
@ AV_PIX_FMT_YUVJ420P
planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P and setting col...
Definition: pixfmt.h:85
ER_MB_ERROR
#define ER_MB_ERROR
Definition: error_resilience.h:36
ff_h264_decode_mb_cabac
int ff_h264_decode_mb_cabac(const H264Context *h, H264SliceContext *sl)
Decode a macroblock.
Definition: h264_cabac.c:1920
AV_PICTURE_TYPE_SI
@ AV_PICTURE_TYPE_SI
Switching Intra.
Definition: avutil.h:283
H264SliceContext::chroma_qp
int chroma_qp[2]
Definition: h264dec.h:191
AV_CODEC_FLAG2_FAST
#define AV_CODEC_FLAG2_FAST
Allow non spec compliant speedup tricks.
Definition: avcodec.h:337
get_bits1
static unsigned int get_bits1(GetBitContext *s)
Definition: get_bits.h:371
AV_PICTURE_TYPE_I
@ AV_PICTURE_TYPE_I
Intra.
Definition: avutil.h:279
PPS
Picture parameter set.
Definition: h264_ps.h:110
av_fast_mallocz
void av_fast_mallocz(void *ptr, unsigned int *size, size_t min_size)
Allocate and clear a buffer, reusing the given one if large enough.
Definition: mem.c:562
ff_thread_release_ext_buffer
void ff_thread_release_ext_buffer(ThreadFrame *f)
Unref a ThreadFrame.
Definition: pthread_frame.c:1077
ff_set_sar
int ff_set_sar(AVCodecContext *avctx, AVRational sar)
Check that the provided sample aspect ratio is valid and set it on the codec context.
Definition: utils.c:106
mathops.h
list
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining list
Definition: filter_design.txt:25
IS_INTERLACED
#define IS_INTERLACED(a)
Definition: mpegutils.h:77
av_refstruct_pool_get
void * av_refstruct_pool_get(AVRefStructPool *pool)
Get an object from the pool, reusing an old one from the pool when available.
Definition: refstruct.c:297
H264Picture::mb_height
int mb_height
Definition: h264dec.h:160
MAX_PPS_COUNT
#define MAX_PPS_COUNT
Definition: h264_ps.h:38
AV_PIX_FMT_D3D12
@ AV_PIX_FMT_D3D12
Hardware surfaces for Direct3D 12.
Definition: pixfmt.h:440
AV_PIX_FMT_YUV422P10
#define AV_PIX_FMT_YUV422P10
Definition: pixfmt.h:529
H264SliceContext::qscale
int qscale
Definition: h264dec.h:190
get_pixel_format
static enum AVPixelFormat get_pixel_format(H264Context *h, int force_callback)
Definition: h264_slice.c:786
fill_filter_caches
static int fill_filter_caches(const H264Context *h, H264SliceContext *sl, int mb_type)
Definition: h264_slice.c:2290
ERContext::error_occurred
int error_occurred
Definition: error_resilience.h:68
AV_ZERO128
#define AV_ZERO128(d)
Definition: intreadwrite.h:670
init_scan_tables
static void init_scan_tables(H264Context *h)
initialize scan tables
Definition: h264_slice.c:752
AV_PIX_FMT_GBRP9
#define AV_PIX_FMT_GBRP9
Definition: pixfmt.h:543
H264SliceContext::top_borders_allocated
int top_borders_allocated[2]
Definition: h264dec.h:290
AV_PICTURE_TYPE_SP
@ AV_PICTURE_TYPE_SP
Switching Predicted.
Definition: avutil.h:284
FIELD_OR_MBAFF_PICTURE
#define FIELD_OR_MBAFF_PICTURE(h)
Definition: h264dec.h:84
H264SliceContext::mb_skip_run
int mb_skip_run
Definition: h264dec.h:240
h264_ps.h
init_dimensions
static void init_dimensions(H264Context *h)
Definition: h264_slice.c:924
c
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that often the output of the computation does not c
Definition: undefined.txt:32
H264SliceContext::top_type
int top_type
Definition: h264dec.h:217
AVFrame::crop_bottom
size_t crop_bottom
Definition: frame.h:734
H264SliceContext::resync_mb_y
int resync_mb_y
Definition: h264dec.h:236
H264_SEI_PIC_STRUCT_BOTTOM_TOP_BOTTOM
@ H264_SEI_PIC_STRUCT_BOTTOM_TOP_BOTTOM
6: bottom field, top field, bottom field repeated, in that order
Definition: h264_sei.h:38
DELAYED_PIC_REF
#define DELAYED_PIC_REF
Value of Picture.reference when Picture is not a reference picture, but is held for delayed output.
Definition: diracdec.c:69
H264SEIPictureTiming
Definition: h264_sei.h:54
H264SliceContext::cabac
CABACContext cabac
Cabac.
Definition: h264dec.h:320
H264SliceContext::redundant_pic_count
int redundant_pic_count
Definition: h264dec.h:247
AVFrame::crop_left
size_t crop_left
Definition: frame.h:735
IS_INTRA
#define IS_INTRA(x, y)
AVDISCARD_NONKEY
@ AVDISCARD_NONKEY
discard all frames except keyframes
Definition: defs.h:220
AVFrame::pict_type
enum AVPictureType pict_type
Picture type of the frame.
Definition: frame.h:502
ff_zigzag_scan
const uint8_t ff_zigzag_scan[16+1]
Definition: mathtables.c:109
H264Picture::reference
int reference
Definition: h264dec.h:152
AV_CODEC_FLAG_GRAY
#define AV_CODEC_FLAG_GRAY
Only decode/encode grayscale.
Definition: avcodec.h:302
CABAC
#define CABAC(h)
Definition: h264_cabac.c:28
LEFT_MBS
#define LEFT_MBS
Definition: h264dec.h:68
height
#define height
Definition: dsp.h:85
dst
uint8_t ptrdiff_t const uint8_t ptrdiff_t int intptr_t intptr_t int int16_t * dst
Definition: dsp.h:83
rectangle.h
for
for(k=2;k<=8;++k)
Definition: h264pred_template.c:424
H264SliceContext::mb_uvlinesize
ptrdiff_t mb_uvlinesize
Definition: h264dec.h:231
VP_START
#define VP_START
current MB is the first after a resync marker
Definition: error_resilience.h:28
AV_PIX_FMT_YUV422P12
#define AV_PIX_FMT_YUV422P12
Definition: pixfmt.h:533
H264SliceContext::pwt
H264PredWeightTable pwt
Definition: h264dec.h:200
AV_FRAME_DATA_LCEVC
@ AV_FRAME_DATA_LCEVC
Raw LCEVC payload data, as a uint8_t array, with NAL emulation bytes intact.
Definition: frame.h:236
H264Picture::tf
ThreadFrame tf
Definition: h264dec.h:116
H264Picture::mb_type
uint32_t * mb_type
Definition: h264dec.h:127
H264Picture::decode_error_flags
atomic_int * decode_error_flags
RefStruct reference; its pointee is shared between decoding threads.
Definition: h264dec.h:164
ff_h264_decode_mb_cavlc
int ff_h264_decode_mb_cavlc(const H264Context *h, H264SliceContext *sl)
Decode a macroblock.
Definition: h264_cavlc.c:665
H264_SEI_PIC_STRUCT_BOTTOM_TOP
@ H264_SEI_PIC_STRUCT_BOTTOM_TOP
4: bottom field, top field, in that order
Definition: h264_sei.h:36
H264Picture::recovered
int recovered
picture at IDR or recovery point + recovery count
Definition: h264dec.h:153
H2645NAL::gb
GetBitContext gb
Definition: h2645_parse.h:47
H264SliceContext::top_mb_xy
int top_mb_xy
Definition: h264dec.h:212
H264SliceContext::qp_thresh
int qp_thresh
QP threshold to skip loopfilter.
Definition: h264dec.h:192
ff_frame_new_side_data
int ff_frame_new_side_data(const AVCodecContext *avctx, AVFrame *frame, enum AVFrameSideDataType type, size_t size, AVFrameSideData **psd)
Wrapper around av_frame_new_side_data, which rejects side data overridden by the demuxer.
Definition: decode.c:2003
H264Picture::gray
int gray
Definition: h264dec.h:166
H2645NAL
Definition: h2645_parse.h:34
AV_PIX_FMT_YUV444P12
#define AV_PIX_FMT_YUV444P12
Definition: pixfmt.h:535
H264SliceContext::top_borders
uint8_t(*[2] top_borders)[(16 *3) *2]
Definition: h264dec.h:287
AVFrameSideData::data
uint8_t * data
Definition: frame.h:267
h264chroma.h
FF_THREAD_SLICE
#define FF_THREAD_SLICE
Decode more than one part of a single frame at once.
Definition: avcodec.h:1573
H264SliceContext::cbp
int cbp
Definition: h264dec.h:258
gray
The official guide to swscale for confused that consecutive non overlapping rectangles of slice_bottom special converter These generally are unscaled converters of common like for each output line the vertical scaler pulls lines from a ring buffer When the ring buffer does not contain the wanted then it is pulled from the input slice through the input converter and horizontal scaler The result is also stored in the ring buffer to serve future vertical scaler requests When no more output can be generated because lines from a future slice would be then all remaining lines in the current slice are horizontally scaled and put in the ring buffer[This is done for luma and chroma, each with possibly different numbers of lines per picture.] Input to YUV Converter When the input to the main path is not planar bits per component YUV or bit gray
Definition: swscale.txt:52
AVFrame::format
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
Definition: frame.h:497
H264SliceContext::left_type
int left_type[LEFT_MBS]
Definition: h264dec.h:219
ff_h264_direct_ref_list_init
void ff_h264_direct_ref_list_init(const H264Context *const h, H264SliceContext *sl)
Definition: h264_direct.c:120
H264SliceContext::mb_y
int mb_y
Definition: h264dec.h:233
H264PredWeightTable::implicit_weight
int implicit_weight[48][48][2]
Definition: h264_parse.h:79
decode_slice
static int decode_slice(struct AVCodecContext *avctx, void *arg)
Definition: h264_slice.c:2558
H264SliceContext::explicit_ref_marking
int explicit_ref_marking
Definition: h264dec.h:326
a
The reader does not expect b to be semantically here and if the code is changed by maybe adding a a division or other the signedness will almost certainly be mistaken To avoid this confusion a new type was SUINT is the C unsigned type but it holds a signed int to use the same example SUINT a
Definition: undefined.txt:41
pt
int pt
Definition: rtp.c:35
H264SliceContext::uvlinesize
ptrdiff_t uvlinesize
Definition: h264dec.h:229
AVBufferRef::buffer
AVBuffer * buffer
Definition: buffer.h:83
AV_PIX_FMT_D3D11
@ AV_PIX_FMT_D3D11
Hardware surfaces for Direct3D11.
Definition: pixfmt.h:336
H264SliceContext::slice_type_nos
int slice_type_nos
S free slice type (SI/SP are remapped to I/P)
Definition: h264dec.h:187
H264SliceContext::delta_poc_bottom
int delta_poc_bottom
Definition: h264dec.h:331
copy_picture_range
static void copy_picture_range(H264Picture **to, H264Picture *const *from, int count, H264Context *new_base, const H264Context *old_base)
Definition: h264_slice.c:294
av_zero_extend
#define av_zero_extend
Definition: common.h:151
AV_PIX_FMT_VAAPI
@ AV_PIX_FMT_VAAPI
Hardware acceleration through VA-API, data[3] contains a VASurfaceID.
Definition: pixfmt.h:126
FRAME_MBAFF
#define FRAME_MBAFF(h)
Definition: h264dec.h:66
IS_DIRECT
#define IS_DIRECT(a)
Definition: mpegutils.h:78
H264_SEI_PIC_STRUCT_FRAME
@ H264_SEI_PIC_STRUCT_FRAME
0: frame
Definition: h264_sei.h:32
get_cabac_terminate
static int av_unused get_cabac_terminate(CABACContext *c)
Definition: cabac_functions.h:187
H264_SEI_PIC_STRUCT_FRAME_TRIPLING
@ H264_SEI_PIC_STRUCT_FRAME_TRIPLING
8: frame tripling
Definition: h264_sei.h:40
field_scan
static const uint8_t field_scan[16+1]
Definition: h264_slice.c:52
loop_filter
static void loop_filter(const H264Context *h, H264SliceContext *sl, int start_x, int end_x)
Definition: h264_slice.c:2432
ff_init_cabac_decoder
int ff_init_cabac_decoder(CABACContext *c, const uint8_t *buf, int buf_size)
Definition: cabac.c:162
H264SliceContext::mb_mbaff
int mb_mbaff
mb_aff_frame && mb_field_decoding_flag
Definition: h264dec.h:245
field_scan8x8
static const uint8_t field_scan8x8[64+1]
Definition: h264_slice.c:59
AV_PIX_FMT_VDPAU
@ AV_PIX_FMT_VDPAU
HW acceleration through VDPAU, Picture.data[3] contains a VdpVideoSurface.
Definition: pixfmt.h:194
av_get_picture_type_char
char av_get_picture_type_char(enum AVPictureType pict_type)
Return a single letter to describe the given picture type pict_type.
Definition: utils.c:40
AV_PIX_FMT_VIDEOTOOLBOX
@ AV_PIX_FMT_VIDEOTOOLBOX
hardware decoding through Videotoolbox
Definition: pixfmt.h:305
LIST_NOT_USED
#define LIST_NOT_USED
Definition: h264dec.h:398
H264Picture::field_picture
int field_picture
whether or not picture was encoded in separate fields
Definition: h264dec.h:145
h264dec.h
H264SliceContext::poc_lsb
int poc_lsb
Definition: h264dec.h:330
H264SliceContext::first_mb_addr
unsigned int first_mb_addr
Definition: h264dec.h:237
ff_h264_direct_dist_scale_factor
void ff_h264_direct_dist_scale_factor(const H264Context *const h, H264SliceContext *sl)
Definition: h264_direct.c:61
H264Picture::needs_fg
int needs_fg
whether picture needs film grain synthesis (see f_grain)
Definition: h264dec.h:156
AVBuffer
A reference counted buffer type.
Definition: buffer_internal.h:38
H264Context
H264Context.
Definition: h264dec.h:340
AVDISCARD_NONINTRA
@ AVDISCARD_NONINTRA
discard all non intra frames
Definition: defs.h:219
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:256
av_timecode_make_smpte_tc_string2
char * av_timecode_make_smpte_tc_string2(char *buf, AVRational rate, uint32_t tcsmpte, int prevent_df, int skip_field)
Get the timecode string from the SMPTE timecode format.
Definition: timecode.c:131
AV_CODEC_FLAG2_SHOW_ALL
#define AV_CODEC_FLAG2_SHOW_ALL
Show all frames before the first keyframe.
Definition: avcodec.h:360
AV_FRAME_FLAG_CORRUPT
#define AV_FRAME_FLAG_CORRUPT
The frame data may be corrupted, e.g.
Definition: frame.h:621
H264_SEI_PIC_STRUCT_FRAME_DOUBLING
@ H264_SEI_PIC_STRUCT_FRAME_DOUBLING
7: frame doubling
Definition: h264_sei.h:39
H264SliceContext::frame_num
int frame_num
Definition: h264dec.h:328
AV_PIX_FMT_GBRP12
#define AV_PIX_FMT_GBRP12
Definition: pixfmt.h:545
ff_h264_execute_decode_slices
int ff_h264_execute_decode_slices(H264Context *h)
Call decode_slice() for each context.
Definition: h264_slice.c:2770
H264SliceContext::mb_linesize
ptrdiff_t mb_linesize
may be equal to s->linesize or s->linesize * 2, for mbaff
Definition: h264dec.h:230
av_assert1
#define av_assert1(cond)
assert() equivalent, that does not lie in speed critical code.
Definition: avassert.h:56
av_always_inline
#define av_always_inline
Definition: attributes.h:49
FFMIN
#define FFMIN(a, b)
Definition: macros.h:49
cabac_functions.h
AV_COPY32
#define AV_COPY32(d, s)
Definition: intreadwrite.h:634
ff_h264_replace_picture
int ff_h264_replace_picture(H264Picture *dst, const H264Picture *src)
Definition: h264_picture.c:135
ff_h264_parse_ref_count
int ff_h264_parse_ref_count(int *plist_count, int ref_count[2], GetBitContext *gb, const PPS *pps, int slice_type_nos, int picture_structure, void *logctx)
Definition: h264_parse.c:222
ff_h264_alloc_tables
int ff_h264_alloc_tables(H264Context *h)
Allocate tables.
Definition: h264dec.c:186
ff_thread_get_ext_buffer
int ff_thread_get_ext_buffer(AVCodecContext *avctx, ThreadFrame *f, int flags)
Wrapper around ff_get_buffer() for frame-multithreaded codecs.
Definition: pthread_frame.c:1056
AV_FRAME_FLAG_INTERLACED
#define AV_FRAME_FLAG_INTERLACED
A flag to mark frames whose content is interlaced.
Definition: frame.h:633
AVCOL_RANGE_MPEG
@ AVCOL_RANGE_MPEG
Narrow or limited range content.
Definition: pixfmt.h:733
AV_PIX_FMT_YUV444P9
#define AV_PIX_FMT_YUV444P9
Definition: pixfmt.h:527
H264SliceContext::list_count
unsigned int list_count
Definition: h264dec.h:271
avcodec.h
H264SliceContext::h264
const struct H264Context * h264
Definition: h264dec.h:181
av_cmp_q
static int av_cmp_q(AVRational a, AVRational b)
Compare two rationals.
Definition: rational.h:89
ff_h264dsp_init
av_cold void ff_h264dsp_init(H264DSPContext *c, const int bit_depth, const int chroma_format_idc)
Definition: h264dsp.c:66
ff_zigzag_direct
const uint8_t ff_zigzag_direct[64]
Definition: mathtables.c:98
ret
ret
Definition: filter_design.txt:187
AV_EF_AGGRESSIVE
#define AV_EF_AGGRESSIVE
consider things that a sane encoder/muxer should not do as an error
Definition: defs.h:56
ff_h264_init_poc
int ff_h264_init_poc(int pic_field_poc[2], int *pic_poc, const SPS *sps, H264POCContext *pc, int picture_structure, int nal_ref_idc)
Definition: h264_parse.c:280
ff_h264_get_profile
int ff_h264_get_profile(const SPS *sps)
Compute profile from profile_idc and constraint_set?_flags.
Definition: h264_parse.c:533
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
h264_field_start
static int h264_field_start(H264Context *h, const H264SliceContext *sl, const H2645NAL *nal, int first_slice)
Definition: h264_slice.c:1396
H264SliceContext::last_qscale_diff
int last_qscale_diff
Definition: h264dec.h:193
sps
static int FUNC() sps(CodedBitstreamContext *ctx, RWContext *rw, H264RawSPS *current)
Definition: cbs_h264_syntax_template.c:260
align_get_bits
static const uint8_t * align_get_bits(GetBitContext *s)
Definition: get_bits.h:544
av_refstruct_pool_alloc
AVRefStructPool * av_refstruct_pool_alloc(size_t size, unsigned flags)
Equivalent to av_refstruct_pool_alloc(size, flags, NULL, NULL, NULL, NULL, NULL)
Definition: refstruct.c:335
AV_PIX_FMT_YUV420P12
#define AV_PIX_FMT_YUV420P12
Definition: pixfmt.h:532
U
#define U(x)
Definition: vpx_arith.h:37
AV_PIX_FMT_YUV422P14
#define AV_PIX_FMT_YUV422P14
Definition: pixfmt.h:537
H264SliceContext::pps_id
unsigned int pps_id
Definition: h264dec.h:281
H264SliceContext::linesize
ptrdiff_t linesize
Definition: h264dec.h:229
H264SliceContext::slice_beta_offset
int slice_beta_offset
Definition: h264dec.h:198
AVCodecContext
main external API structure.
Definition: avcodec.h:431
AVFrame::height
int height
Definition: frame.h:482
get_ue_golomb_31
static int get_ue_golomb_31(GetBitContext *gb)
read unsigned exp golomb code, constraint to a max of 31.
Definition: golomb.h:120
status
ov_status_e status
Definition: dnn_backend_openvino.c:100
ff_h264_build_ref_list
int ff_h264_build_ref_list(H264Context *h, H264SliceContext *sl)
Definition: h264_refs.c:294
H264Picture::motion_val_base
int16_t(*[2] motion_val_base)[2]
RefStruct reference.
Definition: h264dec.h:123
AVCodecContext::execute
int(* execute)(struct AVCodecContext *c, int(*func)(struct AVCodecContext *c2, void *arg), void *arg2, int *ret, int count, int size)
The codec may call this to execute several independent things.
Definition: avcodec.h:1591
av_refstruct_ref_c
const void * av_refstruct_ref_c(const void *obj)
Analog of av_refstruct_ref(), but for constant objects.
Definition: refstruct.c:149
H264SliceContext::bipred_scratchpad
uint8_t * bipred_scratchpad
Definition: h264dec.h:285
ff_h264_pred_init
av_cold void ff_h264_pred_init(H264PredContext *h, int codec_id, const int bit_depth, int chroma_format_idc)
Set the intra prediction function pointers.
Definition: h264pred.c:437
H264Picture::field_poc
int field_poc[2]
top/bottom POC
Definition: h264dec.h:134
AV_PICTURE_TYPE_B
@ AV_PICTURE_TYPE_B
Bi-dir predicted.
Definition: avutil.h:281
H264SliceContext::mmco
MMCO mmco[H264_MAX_MMCO_COUNT]
Definition: h264dec.h:324
av_refstruct_replace
void av_refstruct_replace(void *dstp, const void *src)
Ensure *dstp refers to the same object as src.
Definition: refstruct.c:160
error_resilience.h
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:72
H264Picture::mb_width
int mb_width
Definition: h264dec.h:160
ff_h264_unref_picture
void ff_h264_unref_picture(H264Picture *pic)
Definition: h264_picture.c:39
fill_rectangle
static void fill_rectangle(int x, int y, int w, int h)
Definition: ffplay.c:825
H264Picture
Definition: h264dec.h:114
ERContext::error_status_table
uint8_t * error_status_table
Definition: error_resilience.h:69
ref
static int ref[MAX_W *MAX_W]
Definition: jpeg2000dwt.c:117
AV_PIX_FMT_FLAG_PLANAR
#define AV_PIX_FMT_FLAG_PLANAR
At least one pixel component is not in the first data plane.
Definition: pixdesc.h:132
pps
uint64_t pps
Definition: dovi_rpuenc.c:35
h264_slice_init
static int h264_slice_init(H264Context *h, H264SliceContext *sl, const H2645NAL *nal)
Definition: h264_slice.c:1914
ff_h264chroma_init
av_cold void ff_h264chroma_init(H264ChromaContext *c, int bit_depth)
Definition: h264chroma.c:41
ff_h264_field_end
int ff_h264_field_end(H264Context *h, H264SliceContext *sl, int in_setup)
Definition: h264_picture.c:189
H264_NAL_IDR_SLICE
@ H264_NAL_IDR_SLICE
Definition: h264.h:39
CABACContext::bytestream_end
const uint8_t * bytestream_end
Definition: cabac.h:46
AV_PIX_FMT_YUV444P
@ AV_PIX_FMT_YUV444P
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:78
init_table_pools
static int init_table_pools(H264Context *h)
Definition: h264_slice.c:162
H264SliceContext::ref_list
H264Ref ref_list[2][48]
0..15: frame refs, 16..47: mbaff field refs.
Definition: h264dec.h:272
LBOT
#define LBOT
Definition: h264dec.h:70
H264SliceContext::non_zero_count_cache
uint8_t non_zero_count_cache[15 *8]
non zero coeff count cache.
Definition: h264dec.h:296
AV_PIX_FMT_GBRP
@ AV_PIX_FMT_GBRP
planar GBR 4:4:4 24bpp
Definition: pixfmt.h:165
desc
const char * desc
Definition: libsvtav1.c:79
AV_PICTURE_TYPE_P
@ AV_PICTURE_TYPE_P
Predicted.
Definition: avutil.h:280
IS_INTER
#define IS_INTER(a)
Definition: mpegutils.h:73
AV_PIX_FMT_YUV422P
@ AV_PIX_FMT_YUV422P
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:77
mem.h
get_ue_golomb_long
static unsigned get_ue_golomb_long(GetBitContext *gb)
Read an unsigned Exp-Golomb code in the range 0 to UINT32_MAX-1.
Definition: golomb.h:104
H264Context::nal_length_size
int nal_length_size
Number of bytes used for nal length (1, 2 or 4)
Definition: h264dec.h:458
ff_h2645_sei_ctx_replace
int ff_h2645_sei_ctx_replace(H2645SEI *dst, const H2645SEI *src)
Definition: h2645_sei.c:521
avpriv_request_sample
#define avpriv_request_sample(...)
Definition: tableprint_vlc.h:37
ER_MB_END
#define ER_MB_END
Definition: error_resilience.h:37
AVFrameSideData
Structure to hold side data for an AVFrame.
Definition: frame.h:265
AVPixFmtDescriptor
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:69
H264SliceContext::er
ERContext * er
Definition: h264dec.h:183
H264_SEI_PIC_STRUCT_BOTTOM_FIELD
@ H264_SEI_PIC_STRUCT_BOTTOM_FIELD
2: bottom field
Definition: h264_sei.h:34
H264Picture::hwaccel_picture_private
void * hwaccel_picture_private
RefStruct reference for hardware accelerator private data.
Definition: h264dec.h:130
ER_MV_END
#define ER_MV_END
Definition: error_resilience.h:34
H264SliceContext::idr_pic_id
int idr_pic_id
Definition: h264dec.h:329
av_refstruct_pool_uninit
static void av_refstruct_pool_uninit(AVRefStructPool **poolp)
Mark the pool as being available for freeing.
Definition: refstruct.h:292
fill_filter_caches_inter
static av_always_inline void fill_filter_caches_inter(const H264Context *h, H264SliceContext *sl, int mb_type, int top_xy, const int left_xy[LEFT_MBS], int top_type, const int left_type[LEFT_MBS], int mb_xy, int list)
Definition: h264_slice.c:2206
FFALIGN
#define FFALIGN(x, a)
Definition: macros.h:78
ff_tlog
#define ff_tlog(a,...)
Definition: tableprint_vlc.h:29
cr
static double cr(void *priv, double x, double y)
Definition: vf_geq.c:248
AVFrame::crop_top
size_t crop_top
Definition: frame.h:733
H264SliceContext::gb
GetBitContext gb
Definition: h264dec.h:182
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:35
av_dict_set
int av_dict_set(AVDictionary **pm, const char *key, const char *value, int flags)
Set the given entry in *pm, overwriting an existing entry.
Definition: dict.c:86
av_fast_malloc
void av_fast_malloc(void *ptr, unsigned int *size, size_t min_size)
Allocate a buffer, reusing the given one if large enough.
Definition: mem.c:557
H264SliceContext::intra4x4_pred_mode
int8_t * intra4x4_pred_mode
Definition: h264dec.h:209
FFMAX3
#define FFMAX3(a, b, c)
Definition: macros.h:48
LTOP
#define LTOP
Definition: h264dec.h:69
h264.h
AVERROR_BUG
#define AVERROR_BUG
Internal bug, also see AVERROR_BUG2.
Definition: error.h:52
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:455
H264SliceContext::edge_emu_buffer_allocated
int edge_emu_buffer_allocated
Definition: h264dec.h:289
REBASE_PICTURE
#define REBASE_PICTURE(pic, new_ctx, old_ctx)
Definition: h264_slice.c:289
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
CHROMA444
#define CHROMA444(h)
Definition: h264dec.h:92
AVERROR_INVALIDDATA
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
Definition: error.h:61
ff_h264_get_slice_type
int ff_h264_get_slice_type(const H264SliceContext *sl)
Reconstruct bitstream slice_type.
Definition: h264_slice.c:2188
h
h
Definition: vp9dsp_template.c:2070
H264SliceContext::cabac_init_idc
int cabac_init_idc
Definition: h264dec.h:322
AV_PIX_FMT_YUV444P14
#define AV_PIX_FMT_YUV444P14
Definition: pixfmt.h:538
H264PredWeightTable::luma_weight_flag
int luma_weight_flag[2]
7.4.3.2 luma_weight_lX_flag
Definition: h264_parse.h:74
H264_MAX_PICTURE_COUNT
#define H264_MAX_PICTURE_COUNT
Definition: h264dec.h:49
ER_AC_END
#define ER_AC_END
Definition: error_resilience.h:32
H264SliceContext::bipred_scratchpad_allocated
int bipred_scratchpad_allocated
Definition: h264dec.h:288
atomic_init
#define atomic_init(obj, value)
Definition: stdatomic.h:33
width
#define width
Definition: dsp.h:85
AVDISCARD_NONREF
@ AVDISCARD_NONREF
discard all non reference
Definition: defs.h:217
H264SliceContext::slice_type_fixed
int slice_type_fixed
Definition: h264dec.h:188
H264Ref::poc
int poc
Definition: h264dec.h:174
IS_8x8DCT
#define IS_8x8DCT(a)
Definition: h264dec.h:95
H264_SEI_PIC_STRUCT_TOP_FIELD
@ H264_SEI_PIC_STRUCT_TOP_FIELD
1: top field
Definition: h264_sei.h:33
H264SliceContext::delta_poc
int delta_poc[2]
Definition: h264dec.h:332
av_color_transfer_name
const char * av_color_transfer_name(enum AVColorTransferCharacteristic transfer)
Definition: pixdesc.c:3696
H264Picture::long_ref
int long_ref
1->long term reference 0->short term reference
Definition: h264dec.h:141
H264Ref::reference
int reference
Definition: h264dec.h:173
src
#define src
Definition: vp8dsp.c:248
H264Picture::motion_val
int16_t(*[2] motion_val)[2]
Definition: h264dec.h:124
AV_CODEC_EXPORT_DATA_FILM_GRAIN
#define AV_CODEC_EXPORT_DATA_FILM_GRAIN
Decoding only.
Definition: avcodec.h:400
AV_PIX_FMT_YUV420P14
#define AV_PIX_FMT_YUV420P14
Definition: pixfmt.h:536
H264_SEI_PIC_STRUCT_TOP_BOTTOM_TOP
@ H264_SEI_PIC_STRUCT_TOP_BOTTOM_TOP
5: top field, bottom field, top field repeated, in that order
Definition: h264_sei.h:37
av_get_pix_fmt_name
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:3261
H264SliceContext::mb_field_decoding_flag
int mb_field_decoding_flag
Definition: h264dec.h:244
ff_h264_set_erpic
void ff_h264_set_erpic(ERPicture *dst, const H264Picture *src)
Definition: h264_picture.c:166
H264Context::is_avc
int is_avc
Used to parse AVC variant of H.264.
Definition: h264dec.h:457