Merge remote-tracking branch 'qatar/master'
authorMichael Niedermayer <michaelni@gmx.at>
Sat, 17 Mar 2012 03:43:12 +0000 (04:43 +0100)
committerMichael Niedermayer <michaelni@gmx.at>
Sat, 17 Mar 2012 03:43:12 +0000 (04:43 +0100)
* qatar/master:
  h264: K&R formatting cosmetics
  s3tc.h: Add missing #include to fix standalone header compilation.
  FATE: add capability for audio encode/decode tests with fuzzy psnr comparison
  FATE: allow a tolerance in the size comparison in do_tiny_psnr()
  FATE: use absolute difference from a target value in do_tiny_psnr()
  FATE: allow tests to set CMP_SHIFT to pass to tiny_psnr
  FATE: use $fuzz directly in do_tiny_psnr() instead of passing it around

Conflicts:
libavcodec/h264.c

Merged-by: Michael Niedermayer <michaelni@gmx.at>
1  2 
libavcodec/h264.c
libavcodec/s3tc.h
tests/Makefile
tests/fate-run.sh

@@@ -174,44 -207,39 +210,45 @@@ const uint8_t *ff_h264_decode_nal(H264C
              }
              break;
          }
-         i-= RS;
+         i -= RS;
      }
  
-     bufidx = h->nal_unit_type == NAL_DPC ? 1 : 0; // use second escape buffer for inter data
-     si=h->rbsp_buffer_size[bufidx];
 -    if (i >= length - 1) { // no escaped 0
 -        *dst_length = length;
 -        *consumed   = length + 1; // +1 for the header
 -        return src;
 -    }
 -
+     // use second escape buffer for inter data
+     bufidx = h->nal_unit_type == NAL_DPC ? 1 : 0;
 -    av_fast_malloc(&h->rbsp_buffer[bufidx], &h->rbsp_buffer_size[bufidx],
 -                   length + FF_INPUT_BUFFER_PADDING_SIZE);
++
++    si = h->rbsp_buffer_size[bufidx];
 +    av_fast_padded_malloc(&h->rbsp_buffer[bufidx], &h->rbsp_buffer_size[bufidx], length+MAX_MBPAIR_SIZE);
-     dst= h->rbsp_buffer[bufidx];
+     dst = h->rbsp_buffer[bufidx];
  
-     if (dst == NULL){
+     if (dst == NULL)
          return NULL;
-     }
  
- //printf("decoding esc\n");
 +    if(i>=length-1){ //no escaped 0
 +        *dst_length= length;
 +        *consumed= length+1; //+1 for the header
 +        if(h->s.avctx->flags2 & CODEC_FLAG2_FAST){
 +            return src;
 +        }else{
 +            memcpy(dst, src, length);
 +            return dst;
 +        }
 +    }
 +
+     // printf("decoding esc\n");
      memcpy(dst, src, i);
-     si=di=i;
-     while(si+2<length){
-         //remove escapes (very rare 1:2^22)
-         if(src[si+2]>3){
-             dst[di++]= src[si++];
-             dst[di++]= src[si++];
-         }else if(src[si]==0 && src[si+1]==0){
-             if(src[si+2]==3){ //escape
-                 dst[di++]= 0;
-                 dst[di++]= 0;
-                 si+=3;
+     si = di = i;
+     while (si + 2 < length) {
+         // remove escapes (very rare 1:2^22)
+         if (src[si + 2] > 3) {
+             dst[di++] = src[si++];
+             dst[di++] = src[si++];
+         } else if (src[si] == 0 && src[si + 1] == 0) {
+             if (src[si + 2] == 3) { // escape
+                 dst[di++]  = 0;
+                 dst[di++]  = 0;
+                 si        += 3;
                  continue;
-             }else //next start code
+             } else // next start code
                  goto nsc;
          }
  
@@@ -703,20 -744,24 +753,22 @@@ static av_always_inline void prefetch_m
  {
      /* fetch pixels for estimated mv 4 macroblocks ahead
       * optimized for 64byte cache lines */
-     MpegEncContext * const s = &h->s;
+     MpegEncContext *const s = &h->s;
      const int refn = h->ref_cache[list][scan8[0]];
-     if(refn >= 0){
-         const int mx= (h->mv_cache[list][scan8[0]][0]>>2) + 16*s->mb_x + 8;
-         const int my= (h->mv_cache[list][scan8[0]][1]>>2) + 16*s->mb_y;
+     if (refn >= 0) {
+         const int mx  = (h->mv_cache[list][scan8[0]][0] >> 2) + 16 * s->mb_x + 8;
+         const int my  = (h->mv_cache[list][scan8[0]][1] >> 2) + 16 * s->mb_y;
          uint8_t **src = h->ref_list[list][refn].f.data;
-         int off= (mx << pixel_shift) + (my + (s->mb_x&3)*4)*h->mb_linesize + (64 << pixel_shift);
-         s->dsp.prefetch(src[0]+off, s->linesize, 4);
+         int off       = (mx << pixel_shift) +
+                         (my + (s->mb_x & 3) * 4) * h->mb_linesize +
+                         (64 << pixel_shift);
+         s->dsp.prefetch(src[0] + off, s->linesize, 4);
          if (chroma_idc == 3 /* yuv444 */) {
-             s->dsp.prefetch(src[1]+off, s->linesize, 4);
-             s->dsp.prefetch(src[2]+off, s->linesize, 4);
-         }else{
+             s->dsp.prefetch(src[1] + off, s->linesize, 4);
+             s->dsp.prefetch(src[2] + off, s->linesize, 4);
+         } else {
 -            off = ((mx >> 1) << pixel_shift) +
 -                  ((my >> 1) + (s->mb_x & 7)) * s->uvlinesize +
 -                  (64 << pixel_shift);
 +            off= (((mx>>1)+64)<<pixel_shift) + ((my>>1) + (s->mb_x&7))*s->uvlinesize;
-             s->dsp.prefetch(src[1]+off, src[2]-src[1], 2);
+             s->dsp.prefetch(src[1] + off, src[2] - src[1], 2);
          }
      }
  }
@@@ -946,39 -1013,48 +1020,48 @@@ static void init_dequant_tables(H264Con
      }
  }
  
- int ff_h264_alloc_tables(H264Context *h){
-     MpegEncContext * const s = &h->s;
-     const int big_mb_num= s->mb_stride * (s->mb_height+1);
-     const int row_mb_num= 2*s->mb_stride*FFMAX(s->avctx->thread_count, 1);
-     int x,y;
-     FF_ALLOCZ_OR_GOTO(h->s.avctx, h->intra4x4_pred_mode, row_mb_num * 8  * sizeof(uint8_t), fail)
-     FF_ALLOCZ_OR_GOTO(h->s.avctx, h->non_zero_count    , big_mb_num * 48 * sizeof(uint8_t), fail)
-     FF_ALLOCZ_OR_GOTO(h->s.avctx, h->slice_table_base  , (big_mb_num+s->mb_stride) * sizeof(*h->slice_table_base), fail)
-     FF_ALLOCZ_OR_GOTO(h->s.avctx, h->cbp_table, big_mb_num * sizeof(uint16_t), fail)
-     FF_ALLOCZ_OR_GOTO(h->s.avctx, h->chroma_pred_mode_table, big_mb_num * sizeof(uint8_t), fail)
-     FF_ALLOCZ_OR_GOTO(h->s.avctx, h->mvd_table[0], 16*row_mb_num * sizeof(uint8_t), fail);
-     FF_ALLOCZ_OR_GOTO(h->s.avctx, h->mvd_table[1], 16*row_mb_num * sizeof(uint8_t), fail);
-     FF_ALLOCZ_OR_GOTO(h->s.avctx, h->direct_table, 4*big_mb_num * sizeof(uint8_t) , fail);
-     FF_ALLOCZ_OR_GOTO(h->s.avctx, h->list_counts, big_mb_num * sizeof(uint8_t), fail)
-     memset(h->slice_table_base, -1, (big_mb_num+s->mb_stride)  * sizeof(*h->slice_table_base));
-     h->slice_table= h->slice_table_base + s->mb_stride*2 + 1;
-     FF_ALLOCZ_OR_GOTO(h->s.avctx, h->mb2b_xy  , big_mb_num * sizeof(uint32_t), fail);
-     FF_ALLOCZ_OR_GOTO(h->s.avctx, h->mb2br_xy , big_mb_num * sizeof(uint32_t), fail);
-     for(y=0; y<s->mb_height; y++){
-         for(x=0; x<s->mb_width; x++){
-             const int mb_xy= x + y*s->mb_stride;
-             const int b_xy = 4*x + 4*y*h->b_stride;
-             h->mb2b_xy [mb_xy]= b_xy;
-             h->mb2br_xy[mb_xy]= 8*(FMO ? mb_xy : (mb_xy % (2*s->mb_stride)));
+ int ff_h264_alloc_tables(H264Context *h)
+ {
+     MpegEncContext *const s = &h->s;
+     const int big_mb_num    = s->mb_stride * (s->mb_height + 1);
 -    const int row_mb_num    = s->mb_stride * 2 * s->avctx->thread_count;
++    const int row_mb_num    = 2*s->mb_stride*FFMAX(s->avctx->thread_count, 1);
+     int x, y;
+     FF_ALLOCZ_OR_GOTO(h->s.avctx, h->intra4x4_pred_mode,
+                       row_mb_num * 8 * sizeof(uint8_t), fail)
+     FF_ALLOCZ_OR_GOTO(h->s.avctx, h->non_zero_count,
+                       big_mb_num * 48 * sizeof(uint8_t), fail)
+     FF_ALLOCZ_OR_GOTO(h->s.avctx, h->slice_table_base,
+                       (big_mb_num + s->mb_stride) * sizeof(*h->slice_table_base), fail)
+     FF_ALLOCZ_OR_GOTO(h->s.avctx, h->cbp_table,
+                       big_mb_num * sizeof(uint16_t), fail)
+     FF_ALLOCZ_OR_GOTO(h->s.avctx, h->chroma_pred_mode_table,
+                       big_mb_num * sizeof(uint8_t), fail)
+     FF_ALLOCZ_OR_GOTO(h->s.avctx, h->mvd_table[0],
+                       16 * row_mb_num * sizeof(uint8_t), fail);
+     FF_ALLOCZ_OR_GOTO(h->s.avctx, h->mvd_table[1],
+                       16 * row_mb_num * sizeof(uint8_t), fail);
+     FF_ALLOCZ_OR_GOTO(h->s.avctx, h->direct_table,
+                       4 * big_mb_num * sizeof(uint8_t), fail);
+     FF_ALLOCZ_OR_GOTO(h->s.avctx, h->list_counts,
+                       big_mb_num * sizeof(uint8_t), fail)
+     memset(h->slice_table_base, -1,
+            (big_mb_num + s->mb_stride) * sizeof(*h->slice_table_base));
+     h->slice_table = h->slice_table_base + s->mb_stride * 2 + 1;
+     FF_ALLOCZ_OR_GOTO(h->s.avctx, h->mb2b_xy,
+                       big_mb_num * sizeof(uint32_t), fail);
+     FF_ALLOCZ_OR_GOTO(h->s.avctx, h->mb2br_xy,
+                       big_mb_num * sizeof(uint32_t), fail);
+     for (y = 0; y < s->mb_height; y++)
+         for (x = 0; x < s->mb_width; x++) {
+             const int mb_xy = x + y * s->mb_stride;
+             const int b_xy  = 4 * x + 4 * y * h->b_stride;
+             h->mb2b_xy[mb_xy]  = b_xy;
+             h->mb2br_xy[mb_xy] = 8 * (FMO ? mb_xy : (mb_xy % (2 * s->mb_stride)));
          }
-     }
  
      s->obmc_scratchpad = NULL;
  
@@@ -1030,60 -1116,54 +1123,63 @@@ fail
  
  static int decode_nal_units(H264Context *h, const uint8_t *buf, int buf_size);
  
- static av_cold void common_init(H264Context *h){
-     MpegEncContext * const s = &h->s;
+ static av_cold void common_init(H264Context *h)
+ {
+     MpegEncContext *const s = &h->s;
  
-     s->width = s->avctx->width;
-     s->height = s->avctx->height;
-     s->codec_id= s->avctx->codec->id;
+     s->width    = s->avctx->width;
+     s->height   = s->avctx->height;
+     s->codec_id = s->avctx->codec->id;
  
 -    ff_h264dsp_init(&h->h264dsp, 8, 1);
 -    ff_h264_pred_init(&h->hpc, s->codec_id, 8, 1);
 +    s->avctx->bits_per_raw_sample = 8;
 +    h->cur_chroma_format_idc = 1;
 +
 +    ff_h264dsp_init(&h->h264dsp,
 +                    s->avctx->bits_per_raw_sample, h->cur_chroma_format_idc);
 +    ff_h264_pred_init(&h->hpc, s->codec_id,
 +                      s->avctx->bits_per_raw_sample, h->cur_chroma_format_idc);
  
-     h->dequant_coeff_pps= -1;
-     s->unrestricted_mv=1;
+     h->dequant_coeff_pps = -1;
+     s->unrestricted_mv   = 1;
  
-     ff_dsputil_init(&s->dsp, s->avctx); // needed so that idct permutation is known early
 +    s->dsp.dct_bits = 16;
+     /* needed so that IDCT permutation is known early */
+     ff_dsputil_init(&s->dsp, s->avctx);
  
-     memset(h->pps.scaling_matrix4, 16, 6*16*sizeof(uint8_t));
-     memset(h->pps.scaling_matrix8, 16, 2*64*sizeof(uint8_t));
+     memset(h->pps.scaling_matrix4, 16, 6 * 16 * sizeof(uint8_t));
+     memset(h->pps.scaling_matrix8, 16, 2 * 64 * sizeof(uint8_t));
  }
  
 -int ff_h264_decode_extradata(H264Context *h)
 +int ff_h264_decode_extradata(H264Context *h, const uint8_t *buf, int size)
  {
      AVCodecContext *avctx = h->s.avctx;
  
-     if(!buf || size <= 0)
 -    if (avctx->extradata[0] == 1) {
++    if (!buf || size <= 0)
 +        return -1;
 +
-     if(buf[0] == 1){
++    if (buf[0] == 1) {
          int i, cnt, nalsize;
 -        unsigned char *p = avctx->extradata;
 +        const unsigned char *p = buf;
  
          h->is_avc = 1;
  
-         if(size < 7) {
 -        if (avctx->extradata_size < 7) {
++        if (size < 7) {
              av_log(avctx, AV_LOG_ERROR, "avcC too short\n");
              return -1;
          }
          /* sps and pps in the avcC always have length coded with 2 bytes,
-            so put a fake nal_length_size = 2 while parsing them */
+          * so put a fake nal_length_size = 2 while parsing them */
          h->nal_length_size = 2;
          // Decode sps from avcC
-         cnt = *(p+5) & 0x1f; // Number of sps
-         p += 6;
+         cnt = *(p + 5) & 0x1f; // Number of sps
+         p  += 6;
          for (i = 0; i < cnt; i++) {
              nalsize = AV_RB16(p) + 2;
 -            if (p - avctx->extradata + nalsize > avctx->extradata_size)
 +            if(nalsize > size - (p-buf))
                  return -1;
-             if(decode_nal_units(h, p, nalsize) < 0) {
-                 av_log(avctx, AV_LOG_ERROR, "Decoding sps %d from avcC failed\n", i);
+             if (decode_nal_units(h, p, nalsize) < 0) {
+                 av_log(avctx, AV_LOG_ERROR,
+                        "Decoding sps %d from avcC failed\n", i);
                  return -1;
              }
              p += nalsize;
          cnt = *(p++); // Number of pps
          for (i = 0; i < cnt; i++) {
              nalsize = AV_RB16(p) + 2;
 -            if (p - avctx->extradata + nalsize > avctx->extradata_size)
 +            if(nalsize > size - (p-buf))
                  return -1;
              if (decode_nal_units(h, p, nalsize) < 0) {
-                 av_log(avctx, AV_LOG_ERROR, "Decoding pps %d from avcC failed\n", i);
+                 av_log(avctx, AV_LOG_ERROR,
+                        "Decoding pps %d from avcC failed\n", i);
                  return -1;
              }
              p += nalsize;
          }
-         // Now store right nal length size, that will be use to parse all other nals
+         // Now store right nal length size, that will be used to parse all other nals
 -        h->nal_length_size = (avctx->extradata[4] & 0x03) + 1;
 +        h->nal_length_size = (buf[4] & 0x03) + 1;
      } else {
          h->is_avc = 0;
-         if(decode_nal_units(h, buf, size) < 0)
 -        if (decode_nal_units(h, avctx->extradata, avctx->extradata_size) < 0)
++        if (decode_nal_units(h, buf, size) < 0)
              return -1;
      }
 -    return 0;
 +    return size;
  }
  
- av_cold int ff_h264_decode_init(AVCodecContext *avctx){
-     H264Context *h= avctx->priv_data;
-     MpegEncContext * const s = &h->s;
+ av_cold int ff_h264_decode_init(AVCodecContext *avctx)
+ {
+     H264Context *h = avctx->priv_data;
+     MpegEncContext *const s = &h->s;
      int i;
  
      ff_MPV_decode_defaults(s);
      h->sps.bit_depth_luma = avctx->bits_per_raw_sample = 8;
  
      h->thread_context[0] = h;
-     h->outputed_poc = h->next_outputed_poc = INT_MIN;
+     h->outputed_poc      = h->next_outputed_poc = INT_MIN;
      for (i = 0; i < MAX_DELAYED_PIC_COUNT; i++)
          h->last_pocs[i] = INT_MIN;
-     h->prev_poc_msb= 1<<16;
-     h->prev_frame_num= -1;
-     h->x264_build = -1;
+     h->prev_poc_msb = 1 << 16;
++    h->prev_frame_num = -1;
+     h->x264_build   = -1;
      ff_h264_reset_sei(h);
-     if(avctx->codec_id == CODEC_ID_H264){
-         if(avctx->ticks_per_frame == 1){
-             s->avctx->time_base.den *=2;
-         }
+     if (avctx->codec_id == CODEC_ID_H264) {
+         if (avctx->ticks_per_frame == 1)
+             s->avctx->time_base.den *= 2;
          avctx->ticks_per_frame = 2;
      }
  
-     if(avctx->extradata_size > 0 && avctx->extradata &&
-         ff_h264_decode_extradata(h, avctx->extradata, avctx->extradata_size)<0)
+     if (avctx->extradata_size > 0 && avctx->extradata &&
 -        ff_h264_decode_extradata(h))
++        ff_h264_decode_extradata(h, avctx->extradata, avctx->extradata_size) < 0)
          return -1;
  
-     if(h->sps.bitstream_restriction_flag && s->avctx->has_b_frames < h->sps.num_reorder_frames){
+     if (h->sps.bitstream_restriction_flag &&
+         s->avctx->has_b_frames < h->sps.num_reorder_frames) {
          s->avctx->has_b_frames = h->sps.num_reorder_frames;
-         s->low_delay = 0;
+         s->low_delay           = 0;
      }
  
      return 0;
@@@ -1206,92 -1300,98 +1317,102 @@@ static int decode_update_thread_context
      int inited = s->context_initialized, err;
      int i;
  
-     if(dst == src) return 0;
 -    if (dst == src || !s1->context_initialized)
++    if (dst == src)
+         return 0;
  
      err = ff_mpeg_update_thread_context(dst, src);
-     if(err) return err;
+     if (err)
+         return err;
  
-     //FIXME handle width/height changing
-     if(!inited){
-         for(i = 0; i < MAX_SPS_COUNT; i++)
+     // FIXME handle width/height changing
+     if (!inited) {
+         for (i = 0; i < MAX_SPS_COUNT; i++)
              av_freep(h->sps_buffers + i);
  
-         for(i = 0; i < MAX_PPS_COUNT; i++)
+         for (i = 0; i < MAX_PPS_COUNT; i++)
              av_freep(h->pps_buffers + i);
  
-         memcpy(&h->s + 1, &h1->s + 1, sizeof(H264Context) - sizeof(MpegEncContext)); //copy all fields after MpegEnc
+         // copy all fields after MpegEnc
+         memcpy(&h->s + 1, &h1->s + 1,
+                sizeof(H264Context) - sizeof(MpegEncContext));
          memset(h->sps_buffers, 0, sizeof(h->sps_buffers));
          memset(h->pps_buffers, 0, sizeof(h->pps_buffers));
 +
 +        if (s1->context_initialized) {
          if (ff_h264_alloc_tables(h) < 0) {
              av_log(dst, AV_LOG_ERROR, "Could not allocate memory for h264\n");
              return AVERROR(ENOMEM);
          }
          context_init(h);
  
-         // frame_start may not be called for the next thread (if it's decoding a bottom field)
-         // so this has to be allocated here
-         h->s.obmc_scratchpad = av_malloc(16*6*s->linesize);
++        /* frame_start may not be called for the next thread (if it's decoding
++         * a bottom field) so this has to be allocated here */
++        h->s.obmc_scratchpad = av_malloc(16 * 6 * s->linesize);
 +        }
 +
-         for(i=0; i<2; i++){
-             h->rbsp_buffer[i] = NULL;
+         for (i = 0; i < 2; i++) {
+             h->rbsp_buffer[i]      = NULL;
              h->rbsp_buffer_size[i] = 0;
          }
  
          h->thread_context[0] = h;
  
 -        /* frame_start may not be called for the next thread (if it's decoding
 -         * a bottom field) so this has to be allocated here */
 -        h->s.obmc_scratchpad = av_malloc(16 * 6 * s->linesize);
 -
          s->dsp.clear_blocks(h->mb);
-         s->dsp.clear_blocks(h->mb+(24*16<<h->pixel_shift));
+         s->dsp.clear_blocks(h->mb + (24 * 16 << h->pixel_shift));
      }
  
-     //extradata/NAL handling
-     h->is_avc          = h1->is_avc;
+     // extradata/NAL handling
+     h->is_avc = h1->is_avc;
  
-     //SPS/PPS
-     copy_parameter_set((void**)h->sps_buffers, (void**)h1->sps_buffers, MAX_SPS_COUNT, sizeof(SPS));
-     h->sps             = h1->sps;
-     copy_parameter_set((void**)h->pps_buffers, (void**)h1->pps_buffers, MAX_PPS_COUNT, sizeof(PPS));
-     h->pps             = h1->pps;
+     // SPS/PPS
+     copy_parameter_set((void **)h->sps_buffers, (void **)h1->sps_buffers,
+                        MAX_SPS_COUNT, sizeof(SPS));
+     h->sps = h1->sps;
+     copy_parameter_set((void **)h->pps_buffers, (void **)h1->pps_buffers,
+                        MAX_PPS_COUNT, sizeof(PPS));
+     h->pps = h1->pps;
  
-     //Dequantization matrices
-     //FIXME these are big - can they be only copied when PPS changes?
+     // Dequantization matrices
+     // FIXME these are big - can they be only copied when PPS changes?
      copy_fields(h, h1, dequant4_buffer, dequant4_coeff);
  
-     for(i=0; i<6; i++)
-         h->dequant4_coeff[i] = h->dequant4_buffer[0] + (h1->dequant4_coeff[i] - h1->dequant4_buffer[0]);
+     for (i = 0; i < 6; i++)
+         h->dequant4_coeff[i] = h->dequant4_buffer[0] +
+                                (h1->dequant4_coeff[i] - h1->dequant4_buffer[0]);
  
-     for(i=0; i<6; i++)
-         h->dequant8_coeff[i] = h->dequant8_buffer[0] + (h1->dequant8_coeff[i] - h1->dequant8_buffer[0]);
+     for (i = 0; i < 6; i++)
+         h->dequant8_coeff[i] = h->dequant8_buffer[0] +
+                                (h1->dequant8_coeff[i] - h1->dequant8_buffer[0]);
  
      h->dequant_coeff_pps = h1->dequant_coeff_pps;
  
-     //POC timing
+     // POC timing
      copy_fields(h, h1, poc_lsb, redundant_pic_count);
  
-     //reference lists
+     // reference lists
      copy_fields(h, h1, ref_count, list_count);
-     copy_fields(h, h1, ref_list,  intra_gb);
+     copy_fields(h, h1, ref_list, intra_gb);
      copy_fields(h, h1, short_ref, cabac_init_idc);
  
-     copy_picture_range(h->short_ref,   h1->short_ref,   32, s, s1);
-     copy_picture_range(h->long_ref,    h1->long_ref,    32, s, s1);
-     copy_picture_range(h->delayed_pic, h1->delayed_pic, MAX_DELAYED_PIC_COUNT+2, s, s1);
+     copy_picture_range(h->short_ref, h1->short_ref, 32, s, s1);
+     copy_picture_range(h->long_ref, h1->long_ref, 32, s, s1);
+     copy_picture_range(h->delayed_pic, h1->delayed_pic,
+                        MAX_DELAYED_PIC_COUNT + 2, s, s1);
  
      h->last_slice_type = h1->last_slice_type;
 +    h->sync            = h1->sync;
  
-     if(!s->current_picture_ptr) return 0;
+     if (!s->current_picture_ptr)
+         return 0;
  
-     if(!s->dropable) {
+     if (!s->dropable) {
          err = ff_h264_execute_ref_pic_marking(h, h->mmco, h->mmco_index);
-         h->prev_poc_msb     = h->poc_msb;
-         h->prev_poc_lsb     = h->poc_lsb;
+         h->prev_poc_msb = h->poc_msb;
+         h->prev_poc_lsb = h->poc_lsb;
      }
-     h->prev_frame_num_offset= h->frame_num_offset;
-     h->prev_frame_num       = h->frame_num;
-     h->outputed_poc         = h->next_outputed_poc;
+     h->prev_frame_num_offset = h->frame_num_offset;
+     h->prev_frame_num        = h->frame_num;
+     h->outputed_poc          = h->next_outputed_poc;
  
      return err;
  }
@@@ -1311,8 -1412,7 +1433,8 @@@ int ff_h264_frame_start(H264Context *h
       * See decode_nal_units().
       */
      s->current_picture_ptr->f.key_frame = 0;
-     s->current_picture_ptr->sync = 0;
-     s->current_picture_ptr->mmco_reset= 0;
++    s->current_picture_ptr->sync        = 0;
+     s->current_picture_ptr->mmco_reset  = 0;
  
      assert(s->linesize && s->uvlinesize);
  
@@@ -1448,82 -1558,124 +1579,88 @@@ static void decode_postinit(H264Contex
          }
      }
  
-     //FIXME do something with unavailable reference frames
 +    cur->mmco_reset = h->mmco_reset;
 +    h->mmco_reset = 0;
+     // FIXME do something with unavailable reference frames
  
      /* Sort B-frames into display order */
  
-     if(h->sps.bitstream_restriction_flag
-        && s->avctx->has_b_frames < h->sps.num_reorder_frames){
+     if (h->sps.bitstream_restriction_flag &&
+         s->avctx->has_b_frames < h->sps.num_reorder_frames) {
          s->avctx->has_b_frames = h->sps.num_reorder_frames;
-         s->low_delay = 0;
+         s->low_delay           = 0;
      }
  
-     if(   s->avctx->strict_std_compliance >= FF_COMPLIANCE_STRICT
-        && !h->sps.bitstream_restriction_flag){
+     if (s->avctx->strict_std_compliance >= FF_COMPLIANCE_STRICT &&
+         !h->sps.bitstream_restriction_flag) {
          s->avctx->has_b_frames = MAX_DELAYED_PIC_COUNT - 1;
-         s->low_delay= 0;
+         s->low_delay           = 0;
      }
  
 +    for (i = 0; 1; i++) {
 +        if(i == MAX_DELAYED_PIC_COUNT || cur->poc < h->last_pocs[i]){
 +            if(i)
 +                h->last_pocs[i-1] = cur->poc;
 +            break;
 +        } else if(i) {
 +            h->last_pocs[i-1]= h->last_pocs[i];
 +        }
 +    }
 +    out_of_order = MAX_DELAYED_PIC_COUNT - i;
 +    if(   cur->f.pict_type == AV_PICTURE_TYPE_B
 +       || (h->last_pocs[MAX_DELAYED_PIC_COUNT-2] > INT_MIN && h->last_pocs[MAX_DELAYED_PIC_COUNT-1] - h->last_pocs[MAX_DELAYED_PIC_COUNT-2] > 2))
 +        out_of_order = FFMAX(out_of_order, 1);
 +    if(s->avctx->has_b_frames < out_of_order && !h->sps.bitstream_restriction_flag){
 +        av_log(s->avctx, AV_LOG_WARNING, "Increasing reorder buffer to %d\n", out_of_order);
 +        s->avctx->has_b_frames = out_of_order;
 +        s->low_delay = 0;
 +    }
 +
      pics = 0;
-     while(h->delayed_pic[pics]) pics++;
+     while (h->delayed_pic[pics])
+         pics++;
  
 -    assert(pics <= MAX_DELAYED_PIC_COUNT);
 +    av_assert0(pics <= MAX_DELAYED_PIC_COUNT);
  
      h->delayed_pic[pics++] = cur;
      if (cur->f.reference == 0)
          cur->f.reference = DELAYED_PIC_REF;
  
 -    /* Frame reordering. This code takes pictures from coding order and sorts
 -     * them by their incremental POC value into display order. It supports POC
 -     * gaps, MMCO reset codes and random resets.
 -     * A "display group" can start either with a IDR frame (f.key_frame = 1),
 -     * and/or can be closed down with a MMCO reset code. In sequences where
 -     * there is no delay, we can't detect that (since the frame was already
 -     * output to the user), so we also set h->mmco_reset to detect the MMCO
 -     * reset code.
 -     * FIXME: if we detect insufficient delays (as per s->avctx->has_b_frames),
 -     * we increase the delay between input and output. All frames affected by
 -     * the lag (e.g. those that should have been output before another frame
 -     * that we already returned to the user) will be dropped. This is a bug
 -     * that we will fix later. */
 -    for (i = 0; i < MAX_DELAYED_PIC_COUNT; i++) {
 -        cnt     += out->poc < h->last_pocs[i];
 -        invalid += out->poc == INT_MIN;
 -    }
 -    if (!h->mmco_reset && !cur->f.key_frame &&
 -        cnt + invalid == MAX_DELAYED_PIC_COUNT && cnt > 0) {
 -        h->mmco_reset = 2;
 -        if (pics > 1)
 -            h->delayed_pic[pics - 2]->mmco_reset = 2;
 -    }
 -    if (h->mmco_reset || cur->f.key_frame) {
 -        for (i = 0; i < MAX_DELAYED_PIC_COUNT; i++)
 -            h->last_pocs[i] = INT_MIN;
 -        cnt     = 0;
 -        invalid = MAX_DELAYED_PIC_COUNT;
 -    }
 -    out     = h->delayed_pic[0];
 +    out = h->delayed_pic[0];
      out_idx = 0;
-     for (i = 1; h->delayed_pic[i] && !h->delayed_pic[i]->f.key_frame && !h->delayed_pic[i]->mmco_reset; i++)
-         if(h->delayed_pic[i]->poc < out->poc){
-             out = h->delayed_pic[i];
 -    for (i = 1; i < MAX_DELAYED_PIC_COUNT &&
 -                h->delayed_pic[i] &&
 -                !h->delayed_pic[i - 1]->mmco_reset &&
 -                !h->delayed_pic[i]->f.key_frame;
++    for (i = 1; h->delayed_pic[i] &&
++                !h->delayed_pic[i]->f.key_frame &&
++                !h->delayed_pic[i]->mmco_reset;
+          i++)
+         if (h->delayed_pic[i]->poc < out->poc) {
+             out     = h->delayed_pic[i];
              out_idx = i;
          }
-     if (s->avctx->has_b_frames == 0 && (h->delayed_pic[0]->f.key_frame || h->delayed_pic[0]->mmco_reset))
-         h->next_outputed_poc= INT_MIN;
+     if (s->avctx->has_b_frames == 0 &&
 -        (h->delayed_pic[0]->f.key_frame || h->mmco_reset))
++        (h->delayed_pic[0]->f.key_frame || h->delayed_pic[0]->mmco_reset))
+         h->next_outputed_poc = INT_MIN;
 -    out_of_order = !out->f.key_frame && !h->mmco_reset &&
 -                   (out->poc < h->next_outputed_poc);
 +    out_of_order = out->poc < h->next_outputed_poc;
  
-     if(out_of_order || pics > s->avctx->has_b_frames){
 -    if (h->sps.bitstream_restriction_flag &&
 -        s->avctx->has_b_frames >= h->sps.num_reorder_frames) {
 -    } else if (out_of_order && pics - 1 == s->avctx->has_b_frames &&
 -               s->avctx->has_b_frames < MAX_DELAYED_PIC_COUNT) {
 -        if (invalid + cnt < MAX_DELAYED_PIC_COUNT) {
 -            s->avctx->has_b_frames = FFMAX(s->avctx->has_b_frames, cnt);
 -        }
 -        s->low_delay = 0;
 -    } else if (s->low_delay &&
 -               ((h->next_outputed_poc != INT_MIN &&
 -                 out->poc > h->next_outputed_poc + 2) ||
 -                cur->f.pict_type == AV_PICTURE_TYPE_B)) {
 -        s->low_delay = 0;
 -        s->avctx->has_b_frames++;
 -    }
 -
 -    if (pics > s->avctx->has_b_frames) {
++    if (out_of_order || pics > s->avctx->has_b_frames) {
          out->f.reference &= ~DELAYED_PIC_REF;
-         out->owner2 = s; // for frame threading, the owner must be the second field's thread
-                          // or else the first thread can release the picture and reuse it unsafely
-         for(i=out_idx; h->delayed_pic[i]; i++)
-             h->delayed_pic[i] = h->delayed_pic[i+1];
+         // for frame threading, the owner must be the second field's thread or
+         // else the first thread can release the picture and reuse it unsafely
+         out->owner2       = s;
+         for (i = out_idx; h->delayed_pic[i]; i++)
+             h->delayed_pic[i] = h->delayed_pic[i + 1];
      }
-     if(!out_of_order && pics > s->avctx->has_b_frames){
 -    memmove(h->last_pocs, &h->last_pocs[1],
 -            sizeof(*h->last_pocs) * (MAX_DELAYED_PIC_COUNT - 1));
 -    h->last_pocs[MAX_DELAYED_PIC_COUNT - 1] = cur->poc;
+     if (!out_of_order && pics > s->avctx->has_b_frames) {
          h->next_output_pic = out;
 -        if (out->mmco_reset) {
 -            if (out_idx > 0) {
 -                h->next_outputed_poc                    = out->poc;
 -                h->delayed_pic[out_idx - 1]->mmco_reset = out->mmco_reset;
 -            } else {
 -                h->next_outputed_poc = INT_MIN;
 -            }
 -        } else {
 -            if (out_idx == 0 && pics > 1 && h->delayed_pic[0]->f.key_frame) {
 -                h->next_outputed_poc = INT_MIN;
 -            } else {
 -                h->next_outputed_poc = out->poc;
 -            }
 -        }
 -        h->mmco_reset = 0;
 +        if (out_idx == 0 && h->delayed_pic[0] && (h->delayed_pic[0]->f.key_frame || h->delayed_pic[0]->mmco_reset)) {
 +            h->next_outputed_poc = INT_MIN;
 +        } else
 +            h->next_outputed_poc = out->poc;
-     }else{
+     } else {
 -        av_log(s->avctx, AV_LOG_DEBUG, "no picture\n");
 +        av_log(s->avctx, AV_LOG_DEBUG, "no picture %s\n", out_of_order ? "ooo" : "");
 +    }
 +
 +    if (h->next_output_pic && h->next_output_pic->sync) {
 +        h->sync |= 2;
      }
  
      if (setup_finished)
@@@ -1938,25 -2137,28 +2122,25 @@@ static av_always_inline void hl_decode_
      }
  
      if (!simple && IS_INTRA_PCM(mb_type)) {
 +        const int bit_depth = h->sps.bit_depth_luma;
          if (pixel_shift) {
 -            const int bit_depth = h->sps.bit_depth_luma;
              int j;
              GetBitContext gb;
-             init_get_bits(&gb, (uint8_t*)h->mb, 384*bit_depth);
+             init_get_bits(&gb, (uint8_t *)h->mb, 384 * bit_depth);
  
              for (i = 0; i < 16; i++) {
-                 uint16_t *tmp_y  = (uint16_t*)(dest_y  + i*linesize);
+                 uint16_t *tmp_y = (uint16_t *)(dest_y + i * linesize);
                  for (j = 0; j < 16; j++)
                      tmp_y[j] = get_bits(&gb, bit_depth);
              }
-             if(simple || !CONFIG_GRAY || !(s->flags&CODEC_FLAG_GRAY)){
+             if (simple || !CONFIG_GRAY || !(s->flags & CODEC_FLAG_GRAY)) {
                  if (!h->sps.chroma_format_idc) {
                      for (i = 0; i < block_h; i++) {
-                         uint16_t *tmp_cb = (uint16_t*)(dest_cb + i*uvlinesize);
-                         uint16_t *tmp_cr = (uint16_t*)(dest_cr + i*uvlinesize);
+                         uint16_t *tmp_cb = (uint16_t *)(dest_cb + i * uvlinesize);
 -                        for (j = 0; j < 8; j++)
 -                            tmp_cb[j] = 1 << (bit_depth - 1);
 -                    }
 -                    for (i = 0; i < block_h; i++) {
+                         uint16_t *tmp_cr = (uint16_t *)(dest_cr + i * uvlinesize);
 -                        for (j = 0; j < 8; j++)
 -                            tmp_cr[j] = 1 << (bit_depth - 1);
 +                        for (j = 0; j < 8; j++) {
 +                            tmp_cb[j] = tmp_cr[j] = 1 << (bit_depth - 1);
 +                        }
                      }
                  } else {
                      for (i = 0; i < block_h; i++) {
                  }
              }
          } else {
-             for (i=0; i<16; i++) {
-                 memcpy(dest_y + i*  linesize, h->mb       + i*8, 16);
-             }
-             if(simple || !CONFIG_GRAY || !(s->flags&CODEC_FLAG_GRAY)){
+             for (i = 0; i < 16; i++)
+                 memcpy(dest_y + i * linesize, h->mb + i * 8, 16);
+             if (simple || !CONFIG_GRAY || !(s->flags & CODEC_FLAG_GRAY)) {
                  if (!h->sps.chroma_format_idc) {
-                     for (i=0; i<8; i++) {
 -                    for (i = 0; i < block_h; i++) {
 -                        memset(dest_cb + i * uvlinesize, 128, 8);
 -                        memset(dest_cr + i * uvlinesize, 128, 8);
++                    for (i = 0; i < 8; i++) {
 +                        memset(dest_cb + i*uvlinesize, 1 << (bit_depth - 1), 8);
 +                        memset(dest_cr + i*uvlinesize, 1 << (bit_depth - 1), 8);
                      }
                  } else {
-                     for (i=0; i<block_h; i++) {
-                         memcpy(dest_cb + i*uvlinesize, h->mb + 128 + i*4,  8);
-                         memcpy(dest_cr + i*uvlinesize, h->mb + 160 + i*4,  8);
+                     for (i = 0; i < block_h; i++) {
+                         memcpy(dest_cb + i * uvlinesize, h->mb + 128 + i * 4, 8);
+                         memcpy(dest_cr + i * uvlinesize, h->mb + 160 + i * 4, 8);
                      }
                  }
              }
@@@ -2353,42 -2592,41 +2574,45 @@@ static void implicit_weight_table(H264C
  /**
   * instantaneous decoder refresh.
   */
- static void idr(H264Context *h){
+ static void idr(H264Context *h)
+ {
 +    int i;
      ff_h264_remove_all_refs(h);
-     h->prev_frame_num= 0;
-     h->prev_frame_num_offset= 0;
-     h->prev_poc_msb= 1<<16;
-     h->prev_poc_lsb= 0;
+     h->prev_frame_num        = 0;
+     h->prev_frame_num_offset = 0;
 -    h->prev_poc_msb          =
++    h->prev_poc_msb          = 1<<16;
+     h->prev_poc_lsb          = 0;
 +    for (i = 0; i < MAX_DELAYED_PIC_COUNT; i++)
 +        h->last_pocs[i] = INT_MIN;
  }
  
  /* forget old pics after a seek */
- static void flush_dpb(AVCodecContext *avctx){
-     H264Context *h= avctx->priv_data;
+ static void flush_dpb(AVCodecContext *avctx)
+ {
+     H264Context *h = avctx->priv_data;
      int i;
-     for(i=0; i<=MAX_DELAYED_PIC_COUNT; i++) {
-         if(h->delayed_pic[i])
 -    for (i = 0; i < MAX_DELAYED_PIC_COUNT; i++) {
++    for (i=0; i<=MAX_DELAYED_PIC_COUNT; i++) {
+         if (h->delayed_pic[i])
              h->delayed_pic[i]->f.reference = 0;
-         h->delayed_pic[i]= NULL;
+         h->delayed_pic[i] = NULL;
      }
-     h->outputed_poc=h->next_outputed_poc= INT_MIN;
 -    for (i = 0; i < MAX_DELAYED_PIC_COUNT; i++)
 -        h->last_pocs[i] = INT_MIN;
+     h->outputed_poc = h->next_outputed_poc = INT_MIN;
      h->prev_interlaced_frame = 1;
      idr(h);
-     h->prev_frame_num= -1;
-     if(h->s.current_picture_ptr)
++    h->prev_frame_num = -1;
+     if (h->s.current_picture_ptr)
          h->s.current_picture_ptr->f.reference = 0;
-     h->s.first_field= 0;
+     h->s.first_field = 0;
      ff_h264_reset_sei(h);
      ff_mpeg_flush(avctx);
 +    h->recovery_frame= -1;
 +    h->sync= 0;
  }
  
- static int init_poc(H264Context *h){
-     MpegEncContext * const s = &h->s;
-     const int max_frame_num= 1<<h->sps.log2_max_frame_num;
+ static int init_poc(H264Context *h)
+ {
+     MpegEncContext *const s = &h->s;
+     const int max_frame_num = 1 << h->sps.log2_max_frame_num;
      int field_poc[2];
      Picture *cur = s->current_picture_ptr;
  
@@@ -2628,21 -2873,22 +2859,22 @@@ static int decode_slice_header(H264Cont
      int default_ref_list_done = 0;
      int last_pic_structure;
  
-     s->dropable= h->nal_ref_idc == 0;
+     s->dropable = h->nal_ref_idc == 0;
  
      /* FIXME: 2tap qpel isn't implemented for high bit depth. */
-     if((s->avctx->flags2 & CODEC_FLAG2_FAST) && !h->nal_ref_idc && !h->pixel_shift){
-         s->me.qpel_put= s->dsp.put_2tap_qpel_pixels_tab;
-         s->me.qpel_avg= s->dsp.avg_2tap_qpel_pixels_tab;
-     }else{
-         s->me.qpel_put= s->dsp.put_h264_qpel_pixels_tab;
-         s->me.qpel_avg= s->dsp.avg_h264_qpel_pixels_tab;
+     if ((s->avctx->flags2 & CODEC_FLAG2_FAST) &&
+         !h->nal_ref_idc && !h->pixel_shift) {
+         s->me.qpel_put = s->dsp.put_2tap_qpel_pixels_tab;
+         s->me.qpel_avg = s->dsp.avg_2tap_qpel_pixels_tab;
+     } else {
+         s->me.qpel_put = s->dsp.put_h264_qpel_pixels_tab;
+         s->me.qpel_avg = s->dsp.avg_h264_qpel_pixels_tab;
      }
  
-     first_mb_in_slice= get_ue_golomb_long(&s->gb);
 -    first_mb_in_slice = get_ue_golomb(&s->gb);
++    first_mb_in_slice = get_ue_golomb_long(&s->gb);
  
-     if(first_mb_in_slice == 0){ //FIXME better field boundary detection
-         if(h0->current_slice && FIELD_PICTURE){
+     if (first_mb_in_slice == 0) { // FIXME better field boundary detection
+         if (h0->current_slice && FIELD_PICTURE) {
              field_end(h, 1);
          }
  
  
      s->chroma_y_shift = h->sps.chroma_format_idc <= 1; // 400 uses yuv420p
  
-     s->width = 16*s->mb_width;
-     s->height= 16*s->mb_height;
 -    s->width = 16 * s->mb_width - (2 >> CHROMA444) * FFMIN(h->sps.crop_right, (8 << CHROMA444) - 1);
 -    if (h->sps.frame_mbs_only_flag)
 -        s->height = 16 * s->mb_height - (1 << s->chroma_y_shift) * FFMIN(h->sps.crop_bottom, (16 >> s->chroma_y_shift) - 1);
 -    else
 -        s->height = 16 * s->mb_height - (2 << s->chroma_y_shift) * FFMIN(h->sps.crop_bottom, (16 >> s->chroma_y_shift) - 1);
++    s->width  = 16 * s->mb_width;
++    s->height = 16 * s->mb_height;
++
  
-     if (s->context_initialized
-         && (   s->width != s->avctx->coded_width || s->height != s->avctx->coded_height
+     if (s->context_initialized &&
 -        (s->width != s->avctx->width || s->height != s->avctx->height ||
 -         av_cmp_q(h->sps.sar, s->avctx->sample_aspect_ratio))) {
 -        if (h != h0) {
++        (   s->width != s->avctx->coded_width || s->height != s->avctx->coded_height
 +            || s->avctx->bits_per_raw_sample != h->sps.bit_depth_luma
 +            || h->cur_chroma_format_idc != h->sps.chroma_format_idc
 +            || av_cmp_q(h->sps.sar, s->avctx->sample_aspect_ratio))) {
 +        if(h != h0 || (s->avctx->active_thread_type & FF_THREAD_FRAME)) {
-             av_log_missing_feature(s->avctx, "Width/height/bit depth/chroma idc changing with threads is", 0);
+             av_log_missing_feature(s->avctx,
 -                                   "Width/height changing with threads is", 0);
++                                   "Width/height/bit depth/chroma idc changing with threads is", 0);
              return -1;   // width / height changed during parallelized decoding
          }
          free_tables(h, 0);
      }
      if (!s->context_initialized) {
          if (h != h0) {
-             av_log(h->s.avctx, AV_LOG_ERROR, "Cannot (re-)initialize context during parallel decoding.\n");
+             av_log(h->s.avctx, AV_LOG_ERROR,
+                    "Cannot (re-)initialize context during parallel decoding.\n");
              return -1;
          }
 -
          avcodec_set_dimensions(s->avctx, s->width, s->height);
-         s->avctx->sample_aspect_ratio= h->sps.sar;
 +        s->avctx->width  -= (2>>CHROMA444)*FFMIN(h->sps.crop_right, (8<<CHROMA444)-1);
 +        s->avctx->height -= (1<<s->chroma_y_shift)*FFMIN(h->sps.crop_bottom, (16>>s->chroma_y_shift)-1) * (2 - h->sps.frame_mbs_only_flag);
+         s->avctx->sample_aspect_ratio = h->sps.sar;
          av_assert0(s->avctx->sample_aspect_ratio.den);
  
-         if(h->sps.video_signal_type_present_flag){
-             s->avctx->color_range = h->sps.full_range>0 ? AVCOL_RANGE_JPEG : AVCOL_RANGE_MPEG;
-             if(h->sps.colour_description_present_flag){
 +        if (s->avctx->bits_per_raw_sample != h->sps.bit_depth_luma ||
 +            h->cur_chroma_format_idc != h->sps.chroma_format_idc) {
 +            if (h->sps.bit_depth_luma >= 8 && h->sps.bit_depth_luma <= 10 &&
 +                (h->sps.bit_depth_luma != 9 || !CHROMA422)) {
 +                s->avctx->bits_per_raw_sample = h->sps.bit_depth_luma;
 +                h->cur_chroma_format_idc = h->sps.chroma_format_idc;
 +                h->pixel_shift = h->sps.bit_depth_luma > 8;
 +
 +                ff_h264dsp_init(&h->h264dsp, h->sps.bit_depth_luma, h->sps.chroma_format_idc);
 +                ff_h264_pred_init(&h->hpc, s->codec_id, h->sps.bit_depth_luma, h->sps.chroma_format_idc);
 +                s->dsp.dct_bits = h->sps.bit_depth_luma > 8 ? 32 : 16;
 +                ff_dsputil_init(&s->dsp, s->avctx);
 +            } else {
 +                av_log(s->avctx, AV_LOG_ERROR, "Unsupported bit depth: %d chroma_idc: %d\n",
 +                       h->sps.bit_depth_luma, h->sps.chroma_format_idc);
 +                return -1;
 +            }
 +        }
 +
 -            s->avctx->color_range = h->sps.full_range ? AVCOL_RANGE_JPEG
+         if (h->sps.video_signal_type_present_flag) {
++            s->avctx->color_range = h->sps.full_range>0 ? AVCOL_RANGE_JPEG
+                                                       : AVCOL_RANGE_MPEG;
+             if (h->sps.colour_description_present_flag) {
                  s->avctx->color_primaries = h->sps.color_primaries;
                  s->avctx->color_trc       = h->sps.color_trc;
                  s->avctx->colorspace      = h->sps.colorspace;
          }
  
          switch (h->sps.bit_depth_luma) {
-             case 9 :
-                 if (CHROMA444) {
-                     if (s->avctx->colorspace == AVCOL_SPC_RGB) {
-                         s->avctx->pix_fmt = PIX_FMT_GBRP9;
-                     } else
-                         s->avctx->pix_fmt = PIX_FMT_YUV444P9;
-                 } else if (CHROMA422)
-                     s->avctx->pix_fmt = PIX_FMT_YUV422P9;
-                 else
-                     s->avctx->pix_fmt = PIX_FMT_YUV420P9;
-                 break;
-             case 10 :
-                 if (CHROMA444) {
-                     if (s->avctx->colorspace == AVCOL_SPC_RGB) {
-                         s->avctx->pix_fmt = PIX_FMT_GBRP10;
-                     } else
-                         s->avctx->pix_fmt = PIX_FMT_YUV444P10;
-                 } else if (CHROMA422)
-                     s->avctx->pix_fmt = PIX_FMT_YUV422P10;
-                 else
-                     s->avctx->pix_fmt = PIX_FMT_YUV420P10;
-                 break;
-             case 8:
-                 if (CHROMA444){
-                     s->avctx->pix_fmt = s->avctx->color_range == AVCOL_RANGE_JPEG ? PIX_FMT_YUVJ444P : PIX_FMT_YUV444P;
+         case 9:
+             if (CHROMA444) {
+                 if (s->avctx->colorspace == AVCOL_SPC_RGB) {
+                     s->avctx->pix_fmt = PIX_FMT_GBRP9;
+                 } else
+                     s->avctx->pix_fmt = PIX_FMT_YUV444P9;
+             } else if (CHROMA422)
+                 s->avctx->pix_fmt = PIX_FMT_YUV422P9;
+             else
+                 s->avctx->pix_fmt = PIX_FMT_YUV420P9;
+             break;
+         case 10:
+             if (CHROMA444) {
+                 if (s->avctx->colorspace == AVCOL_SPC_RGB) {
+                     s->avctx->pix_fmt = PIX_FMT_GBRP10;
+                 } else
+                     s->avctx->pix_fmt = PIX_FMT_YUV444P10;
+             } else if (CHROMA422)
+                 s->avctx->pix_fmt = PIX_FMT_YUV422P10;
+             else
+                 s->avctx->pix_fmt = PIX_FMT_YUV420P10;
+             break;
+         case 8:
+             if (CHROMA444) {
 -                if (s->avctx->colorspace == AVCOL_SPC_RGB) {
 -                    s->avctx->pix_fmt = PIX_FMT_GBRP;
 -                } else
+                     s->avctx->pix_fmt = s->avctx->color_range == AVCOL_RANGE_JPEG ? PIX_FMT_YUVJ444P
+                                                                                   : PIX_FMT_YUV444P;
 +                    if (s->avctx->colorspace == AVCOL_SPC_RGB) {
-                        s->avctx->pix_fmt = PIX_FMT_GBR24P;
-                        av_log(h->s.avctx, AV_LOG_DEBUG, "Detected GBR colorspace.\n");
++                        s->avctx->pix_fmt = PIX_FMT_GBR24P;
++                        av_log(h->s.avctx, AV_LOG_DEBUG, "Detected GBR colorspace.\n");
 +                    } else if (s->avctx->colorspace == AVCOL_SPC_YCGCO) {
 +                        av_log(h->s.avctx, AV_LOG_WARNING, "Detected unsupported YCgCo colorspace.\n");
 +                    }
-                 } else if (CHROMA422) {
-                     s->avctx->pix_fmt = s->avctx->color_range == AVCOL_RANGE_JPEG ? PIX_FMT_YUVJ422P : PIX_FMT_YUV422P;
-                 }else{
-                     s->avctx->pix_fmt = s->avctx->get_format(s->avctx,
-                                                              s->avctx->codec->pix_fmts ?
-                                                              s->avctx->codec->pix_fmts :
-                                                              s->avctx->color_range == AVCOL_RANGE_JPEG ?
-                                                              hwaccel_pixfmt_list_h264_jpeg_420 :
-                                                              ff_hwaccel_pixfmt_list_420);
-                 }
-                 break;
-             default:
-                 av_log(s->avctx, AV_LOG_ERROR,
-                        "Unsupported bit depth: %d\n", h->sps.bit_depth_luma);
-                 return AVERROR_INVALIDDATA;
+             } else if (CHROMA422) {
+                 s->avctx->pix_fmt = s->avctx->color_range == AVCOL_RANGE_JPEG ? PIX_FMT_YUVJ422P
+                                                                               : PIX_FMT_YUV422P;
+             } else {
+                 s->avctx->pix_fmt = s->avctx->get_format(s->avctx,
+                                                          s->avctx->codec->pix_fmts ?
+                                                          s->avctx->codec->pix_fmts :
+                                                          s->avctx->color_range == AVCOL_RANGE_JPEG ?
+                                                          hwaccel_pixfmt_list_h264_jpeg_420 :
+                                                          ff_hwaccel_pixfmt_list_420);
+             }
+             break;
+         default:
+             av_log(s->avctx, AV_LOG_ERROR,
+                    "Unsupported bit depth: %d\n", h->sps.bit_depth_luma);
+             return AVERROR_INVALIDDATA;
          }
  
-         s->avctx->hwaccel = ff_find_hwaccel(s->avctx->codec->id, s->avctx->pix_fmt);
+         s->avctx->hwaccel = ff_find_hwaccel(s->avctx->codec->id,
+                                             s->avctx->pix_fmt);
  
          if (ff_MPV_common_init(s) < 0) {
              av_log(h->s.avctx, AV_LOG_ERROR, "ff_MPV_common_init() failed.\n");
                  c = h->thread_context[i] = av_malloc(sizeof(H264Context));
                  memcpy(c, h->s.thread_context[i], sizeof(MpegEncContext));
                  memset(&c->s + 1, 0, sizeof(H264Context) - sizeof(MpegEncContext));
-                 c->h264dsp = h->h264dsp;
-                 c->sps = h->sps;
-                 c->pps = h->pps;
+                 c->h264dsp     = h->h264dsp;
+                 c->sps         = h->sps;
+                 c->pps         = h->pps;
                  c->pixel_shift = h->pixel_shift;
 +                c->cur_chroma_format_idc = h->cur_chroma_format_idc;
                  init_scan_tables(c);
                  clone_tables(c, h, i);
              }
          init_dequant_tables(h);
      }
  
-     h->frame_num= get_bits(&s->gb, h->sps.log2_max_frame_num);
+     h->frame_num = get_bits(&s->gb, h->sps.log2_max_frame_num);
  
-     h->mb_mbaff = 0;
-     h->mb_aff_frame = 0;
+     h->mb_mbaff        = 0;
+     h->mb_aff_frame    = 0;
      last_pic_structure = s0->picture_structure;
-     if(h->sps.frame_mbs_only_flag){
-         s->picture_structure= PICT_FRAME;
-     }else{
-         if(!h->sps.direct_8x8_inference_flag && slice_type == AV_PICTURE_TYPE_B){
+     if (h->sps.frame_mbs_only_flag) {
+         s->picture_structure = PICT_FRAME;
+     } else {
++        if (!h->sps.direct_8x8_inference_flag && slice_type == AV_PICTURE_TYPE_B) {
 +            av_log(h->s.avctx, AV_LOG_ERROR, "This stream was generated by a broken encoder, invalid 8x8 inference\n");
 +            return -1;
 +        }
-         if(get_bits1(&s->gb)) { //field_pic_flag
-             s->picture_structure= PICT_TOP_FIELD + get_bits1(&s->gb); //bottom_field_flag
+         if (get_bits1(&s->gb)) { // field_pic_flag
+             s->picture_structure = PICT_TOP_FIELD + get_bits1(&s->gb); // bottom_field_flag
          } else {
-             s->picture_structure= PICT_FRAME;
-             h->mb_aff_frame = h->sps.mb_aff;
+             s->picture_structure = PICT_FRAME;
+             h->mb_aff_frame      = h->sps.mb_aff;
          }
      }
-     h->mb_field_decoding_flag= s->picture_structure != PICT_FRAME;
+     h->mb_field_decoding_flag = s->picture_structure != PICT_FRAME;
  
-     if(h0->current_slice == 0){
-         // Shorten frame num gaps so we don't have to allocate reference frames just to throw them away
-         if(h->frame_num != h->prev_frame_num && h->prev_frame_num >= 0) {
-             int unwrap_prev_frame_num = h->prev_frame_num, max_frame_num = 1<<h->sps.log2_max_frame_num;
+     if (h0->current_slice == 0) {
+         /* Shorten frame num gaps so we don't have to allocate reference
+          * frames just to throw them away */
 -        if (h->frame_num != h->prev_frame_num) {
++        if (h->frame_num != h->prev_frame_num && h->prev_frame_num >= 0) {
+             int unwrap_prev_frame_num = h->prev_frame_num;
+             int max_frame_num         = 1 << h->sps.log2_max_frame_num;
  
-             if (unwrap_prev_frame_num > h->frame_num) unwrap_prev_frame_num -= max_frame_num;
+             if (unwrap_prev_frame_num > h->frame_num)
+                 unwrap_prev_frame_num -= max_frame_num;
  
              if ((h->frame_num - unwrap_prev_frame_num) > h->sps.ref_frame_count) {
                  unwrap_prev_frame_num = (h->frame_num - h->sps.ref_frame_count) - 1;
              }
          }
  
-         while(h->frame_num !=  h->prev_frame_num && h->prev_frame_num >= 0 &&
-               h->frame_num != (h->prev_frame_num+1)%(1<<h->sps.log2_max_frame_num)){
 -        while (h->frame_num != h->prev_frame_num &&
++        while (h->frame_num != h->prev_frame_num && h->prev_frame_num >= 0 &&
+                h->frame_num != (h->prev_frame_num + 1) % (1 << h->sps.log2_max_frame_num)) {
              Picture *prev = h->short_ref_count ? h->short_ref[0] : NULL;
-             av_log(h->s.avctx, AV_LOG_DEBUG, "Frame num gap %d %d\n", h->frame_num, h->prev_frame_num);
+             av_log(h->s.avctx, AV_LOG_DEBUG, "Frame num gap %d %d\n",
+                    h->frame_num, h->prev_frame_num);
              if (ff_h264_frame_start(h) < 0)
                  return -1;
              h->prev_frame_num++;
  
              /* figure out if we have a complementary field pair */
              if (!FIELD_PICTURE || s->picture_structure == last_pic_structure) {
-                 /*
-                  * Previous field is unmatched. Don't display it, but let it
-                  * remain for reference if marked as such.
-                  */
+                 /* Previous field is unmatched. Don't display it, but let it
+                  * remain for reference if marked as such. */
                  s0->current_picture_ptr = NULL;
-                 s0->first_field = FIELD_PICTURE;
+                 s0->first_field         = FIELD_PICTURE;
              } else {
 -                if (h->nal_ref_idc &&
 -                    s0->current_picture_ptr->f.reference &&
 -                    s0->current_picture_ptr->frame_num != h->frame_num) {
 -                    /* This and the previous field were reference, but had
 +                if (s0->current_picture_ptr->frame_num != h->frame_num) {
 +                    ff_thread_report_progress((AVFrame*)s0->current_picture_ptr, INT_MAX,
 +                                              s0->picture_structure==PICT_BOTTOM_FIELD);
-                     /*
-                      * This and previous field had
-                      * different frame_nums. Consider this field first in
-                      * pair. Throw away previous field except for reference
-                      * purposes.
-                      */
-                     s0->first_field = 1;
++                    /* This and the previous field had
+                      * different frame_nums. Consider this field first in pair.
+                      * Throw away previous one except for reference purposes. */
+                     s0->first_field         = 1;
                      s0->current_picture_ptr = NULL;
                  } else {
                      /* Second field in complementary pair */
                      s0->first_field = 0;
  
      init_poc(h);
  
-     if(h->pps.redundant_pic_cnt_present){
-         h->redundant_pic_count= get_ue_golomb(&s->gb);
-     }
+     if (h->pps.redundant_pic_cnt_present)
+         h->redundant_pic_count = get_ue_golomb(&s->gb);
  
-     //set defaults, might be overridden a few lines later
-     h->ref_count[0]= h->pps.ref_count[0];
-     h->ref_count[1]= h->pps.ref_count[1];
+     // set defaults, might be overridden a few lines later
+     h->ref_count[0] = h->pps.ref_count[0];
+     h->ref_count[1] = h->pps.ref_count[1];
  
-     if(h->slice_type_nos != AV_PICTURE_TYPE_I){
-         unsigned max= s->picture_structure == PICT_FRAME ? 15 : 31;
+     if (h->slice_type_nos != AV_PICTURE_TYPE_I) {
 -        int max_refs = s->picture_structure == PICT_FRAME ? 16 : 32;
++        unsigned max = s->picture_structure == PICT_FRAME ? 15 : 31;
  
-         if(h->slice_type_nos == AV_PICTURE_TYPE_B){
-             h->direct_spatial_mv_pred= get_bits1(&s->gb);
-         }
-         num_ref_idx_active_override_flag= get_bits1(&s->gb);
+         if (h->slice_type_nos == AV_PICTURE_TYPE_B)
+             h->direct_spatial_mv_pred = get_bits1(&s->gb);
+         num_ref_idx_active_override_flag = get_bits1(&s->gb);
  
-         if(num_ref_idx_active_override_flag){
-             h->ref_count[0]= get_ue_golomb(&s->gb) + 1;
-             if(h->slice_type_nos==AV_PICTURE_TYPE_B)
-                 h->ref_count[1]= get_ue_golomb(&s->gb) + 1;
+         if (num_ref_idx_active_override_flag) {
+             h->ref_count[0] = get_ue_golomb(&s->gb) + 1;
+             if (h->slice_type_nos == AV_PICTURE_TYPE_B)
+                 h->ref_count[1] = get_ue_golomb(&s->gb) + 1;
          }
  
 -        if (h->ref_count[0] > max_refs || h->ref_count[1] > max_refs) {
 +        if (h->ref_count[0]-1 > max || h->ref_count[1]-1 > max){
              av_log(h->s.avctx, AV_LOG_ERROR, "reference overflow\n");
              h->ref_count[0] = h->ref_count[1] = 1;
              return AVERROR_INVALIDDATA;
          }
  
-         if(h->slice_type_nos == AV_PICTURE_TYPE_B)
-             h->list_count= 2;
+         if (h->slice_type_nos == AV_PICTURE_TYPE_B)
+             h->list_count = 2;
          else
-             h->list_count= 1;
-     }else
+             h->list_count = 1;
+     } else
 -        h->list_count = 0;
 +        h->ref_count[1]= h->ref_count[0]= h->list_count= 0;
  
-     if(!default_ref_list_done){
+     if (!default_ref_list_done)
          ff_h264_fill_default_ref_list(h);
-     }
  
-     if(h->slice_type_nos!=AV_PICTURE_TYPE_I && ff_h264_decode_ref_pic_list_reordering(h) < 0) {
-         h->ref_count[1]= h->ref_count[0]= 0;
+     if (h->slice_type_nos != AV_PICTURE_TYPE_I &&
+         ff_h264_decode_ref_pic_list_reordering(h) < 0) {
+         h->ref_count[1] = h->ref_count[0] = 0;
          return -1;
      }
  
  
      h0->last_slice_type = slice_type;
      h->slice_num = ++h0->current_slice;
 -    if (h->slice_num >= MAX_SLICES) {
 -        av_log(s->avctx, AV_LOG_ERROR,
 -               "Too many slices, increase MAX_SLICES and recompile\n");
 +
-     if(h->slice_num)
++    if (h->slice_num)
 +        h0->slice_row[(h->slice_num-1)&(MAX_SLICES-1)]= s->resync_mb_y;
 +    if (   h0->slice_row[h->slice_num&(MAX_SLICES-1)] + 3 >= s->resync_mb_y
 +        && h0->slice_row[h->slice_num&(MAX_SLICES-1)] <= s->resync_mb_y
 +        && h->slice_num >= MAX_SLICES) {
 +        //in case of ASO this check needs to be updated depending on how we decide to assign slice numbers in this case
 +        av_log(s->avctx, AV_LOG_WARNING, "Possibly too many slices (%d >= %d), increase MAX_SLICES and recompile if there are artifacts\n", h->slice_num, MAX_SLICES);
      }
  
-     for(j=0; j<2; j++){
+     for (j = 0; j < 2; j++) {
          int id_list[16];
-         int *ref2frm= h->ref2frm[h->slice_num&(MAX_SLICES-1)][j];
-         for(i=0; i<16; i++){
-             id_list[i]= 60;
+         int *ref2frm = h->ref2frm[h->slice_num & (MAX_SLICES - 1)][j];
+         for (i = 0; i < 16; i++) {
+             id_list[i] = 60;
              if (h->ref_list[j][i].f.data[0]) {
                  int k;
                  uint8_t *base = h->ref_list[j][i].f.base[0];
@@@ -3681,21 -3975,25 +3995,26 @@@ static int decode_slice(struct AVCodecC
                  s->mb_x = lf_x_start = 0;
                  decode_finish_row(h);
                  ++s->mb_y;
-                 if(FIELD_OR_MBAFF_PICTURE) {
+                 if (FIELD_OR_MBAFF_PICTURE) {
                      ++s->mb_y;
-                     if(FRAME_MBAFF && s->mb_y < s->mb_height)
+                     if (FRAME_MBAFF && s->mb_y < s->mb_height)
                          predict_field_decoding_flag(h);
                  }
-                 if(s->mb_y >= s->mb_height){
-                     tprintf(s->avctx, "slice end %d %d\n", get_bits_count(&s->gb), s->gb.size_in_bits);
+                 if (s->mb_y >= s->mb_height) {
+                     tprintf(s->avctx, "slice end %d %d\n",
+                             get_bits_count(&s->gb), s->gb.size_in_bits);
  
 -                    if (get_bits_left(&s->gb) == 0) {
 +                    if (   get_bits_left(&s->gb) == 0
 +                        || get_bits_left(&s->gb) > 0 && !(s->avctx->err_recognition & AV_EF_AGGRESSIVE)) {
-                         ff_er_add_slice(s, s->resync_mb_x, s->resync_mb_y, s->mb_x-1, s->mb_y, ER_MB_END&part_mask);
+                         ff_er_add_slice(s, s->resync_mb_x, s->resync_mb_y,
+                                         s->mb_x - 1, s->mb_y,
+                                         ER_MB_END & part_mask);
  
                          return 0;
-                     }else{
-                         ff_er_add_slice(s, s->resync_mb_x, s->resync_mb_y, s->mb_x, s->mb_y, ER_MB_END&part_mask);
+                     } else {
+                         ff_er_add_slice(s, s->resync_mb_x, s->resync_mb_y,
+                                         s->mb_x, s->mb_y,
+                                         ER_MB_END & part_mask);
  
                          return -1;
                      }
@@@ -3731,28 -4035,28 +4056,29 @@@ static int execute_decode_slices(H264Co
      H264Context *hx;
      int i;
  
-     if (s->avctx->hwaccel || s->avctx->codec->capabilities&CODEC_CAP_HWACCEL_VDPAU)
+     if (s->avctx->hwaccel ||
+         s->avctx->codec->capabilities & CODEC_CAP_HWACCEL_VDPAU)
          return 0;
-     if(context_count == 1) {
+     if (context_count == 1) {
          return decode_slice(avctx, &h);
      } else {
-         for(i = 1; i < context_count; i++) {
-             hx = h->thread_context[i];
+         for (i = 1; i < context_count; i++) {
+             hx                    = h->thread_context[i];
              hx->s.err_recognition = avctx->err_recognition;
-             hx->s.error_count = 0;
-             hx->x264_build= h->x264_build;
+             hx->s.error_count     = 0;
++            hx->x264_build        = h->x264_build;
          }
  
-         avctx->execute(avctx, decode_slice,
-                        h->thread_context, NULL, context_count, sizeof(void*));
+         avctx->execute(avctx, decode_slice, h->thread_context,
+                        NULL, context_count, sizeof(void *));
  
          /* pull back stuff from slices to master context */
-         hx = h->thread_context[context_count - 1];
-         s->mb_x = hx->s.mb_x;
-         s->mb_y = hx->s.mb_y;
-         s->dropable = hx->s.dropable;
+         hx                   = h->thread_context[context_count - 1];
+         s->mb_x              = hx->s.mb_x;
+         s->mb_y              = hx->s.mb_y;
+         s->dropable          = hx->s.dropable;
          s->picture_structure = hx->s.picture_structure;
-         for(i = 1; i < context_count; i++)
+         for (i = 1; i < context_count; i++)
              h->s.error_count += h->thread_context[i]->s.error_count;
      }
  
@@@ -3768,92 -4072,97 +4094,99 @@@ static int decode_nal_units(H264Contex
      int context_count;
      int next_avc;
      int pass = !(avctx->active_thread_type & FF_THREAD_FRAME);
-     int nals_needed=0; ///< number of NALs that need decoding before the next frame thread starts
+     int nals_needed = 0; ///< number of NALs that need decoding before the next frame thread starts
      int nal_index;
  
 +    h->nal_unit_type= 0;
 +
 +    if(!s->slice_context_count)
 +         s->slice_context_count= 1;
      h->max_contexts = s->slice_context_count;
-     if(!(s->flags2 & CODEC_FLAG2_CHUNKS)){
+     if (!(s->flags2 & CODEC_FLAG2_CHUNKS)) {
          h->current_slice = 0;
          if (!s->first_field)
-             s->current_picture_ptr= NULL;
+             s->current_picture_ptr = NULL;
          ff_h264_reset_sei(h);
      }
  
-     for(;pass <= 1;pass++){
-         buf_index = 0;
+     for (; pass <= 1; pass++) {
+         buf_index     = 0;
          context_count = 0;
-         next_avc = h->is_avc ? 0 : buf_size;
-         nal_index = 0;
-     for(;;){
-         int consumed;
-         int dst_length;
-         int bit_length;
-         const uint8_t *ptr;
-         int i, nalsize = 0;
-         int err;
-         if(buf_index >= next_avc) {
-             if (buf_index >= buf_size - h->nal_length_size) break;
-             nalsize = 0;
-             for(i = 0; i < h->nal_length_size; i++)
-                 nalsize = (nalsize << 8) | buf[buf_index++];
-             if(nalsize <= 0 || nalsize > buf_size - buf_index){
-                 av_log(h->s.avctx, AV_LOG_ERROR, "AVC: nal size %d\n", nalsize);
-                 break;
-             }
-             next_avc= buf_index + nalsize;
-         } else {
-             // start code prefix search
-             for(; buf_index + 3 < next_avc; buf_index++){
-                 // This should always succeed in the first iteration.
-                 if(buf[buf_index] == 0 && buf[buf_index+1] == 0 && buf[buf_index+2] == 1)
+         next_avc      = h->is_avc ? 0 : buf_size;
+         nal_index     = 0;
+         for (;;) {
+             int consumed;
+             int dst_length;
+             int bit_length;
+             const uint8_t *ptr;
+             int i, nalsize = 0;
+             int err;
+             if (buf_index >= next_avc) {
+                 if (buf_index >= buf_size - h->nal_length_size)
                      break;
-             }
-             if(buf_index+3 >= buf_size) break;
-             buf_index+=3;
-             if(buf_index >= next_avc) continue;
-         }
-         hx = h->thread_context[context_count];
-         ptr= ff_h264_decode_nal(hx, buf + buf_index, &dst_length, &consumed, next_avc - buf_index);
-         if (ptr==NULL || dst_length < 0){
-             return -1;
-         }
-         i= buf_index + consumed;
-         if((s->workaround_bugs & FF_BUG_AUTODETECT) && i+3<next_avc &&
-            buf[i]==0x00 && buf[i+1]==0x00 && buf[i+2]==0x01 && buf[i+3]==0xE0)
-             s->workaround_bugs |= FF_BUG_TRUNCATED;
-         if(!(s->workaround_bugs & FF_BUG_TRUNCATED)){
-         while(dst_length > 0 && ptr[dst_length - 1] == 0)
-             dst_length--;
-         }
-         bit_length= !dst_length ? 0 : (8*dst_length - ff_h264_decode_rbsp_trailing(h, ptr + dst_length - 1));
+                 nalsize = 0;
+                 for (i = 0; i < h->nal_length_size; i++)
+                     nalsize = (nalsize << 8) | buf[buf_index++];
+                 if (nalsize <= 0 || nalsize > buf_size - buf_index) {
+                     av_log(h->s.avctx, AV_LOG_ERROR,
+                            "AVC: nal size %d\n", nalsize);
+                     break;
+                 }
+                 next_avc = buf_index + nalsize;
+             } else {
+                 // start code prefix search
+                 for (; buf_index + 3 < next_avc; buf_index++)
+                     // This should always succeed in the first iteration.
+                     if (buf[buf_index]     == 0 &&
+                         buf[buf_index + 1] == 0 &&
+                         buf[buf_index + 2] == 1)
+                         break;
  
-         if(s->avctx->debug&FF_DEBUG_STARTCODE){
-             av_log(h->s.avctx, AV_LOG_DEBUG, "NAL %d/%d at %d/%d length %d pass %d\n", hx->nal_unit_type, hx->nal_ref_idc, buf_index, buf_size, dst_length, pass);
-         }
+                 if (buf_index + 3 >= buf_size)
+                     break;
  
-         if (h->is_avc && (nalsize != consumed) && nalsize){
-             av_log(h->s.avctx, AV_LOG_DEBUG, "AVC: Consumed only %d bytes instead of %d\n", consumed, nalsize);
-         }
+                 buf_index += 3;
+                 if (buf_index >= next_avc)
+                     continue;
+             }
  
-         buf_index += consumed;
-         nal_index++;
+             hx = h->thread_context[context_count];
  
-         if(pass == 0) {
-             // packets can sometimes contain multiple PPS/SPS
-             // e.g. two PAFF field pictures in one packet, or a demuxer which splits NALs strangely
-             // if so, when frame threading we can't start the next thread until we've read all of them
-             switch (hx->nal_unit_type) {
+             ptr = ff_h264_decode_nal(hx, buf + buf_index, &dst_length,
+                                      &consumed, next_avc - buf_index);
+             if (ptr == NULL || dst_length < 0)
+                 return -1;
+             i = buf_index + consumed;
+             if ((s->workaround_bugs & FF_BUG_AUTODETECT) && i + 3 < next_avc &&
+                 buf[i]     == 0x00 && buf[i + 1] == 0x00 &&
+                 buf[i + 2] == 0x01 && buf[i + 3] == 0xE0)
+                 s->workaround_bugs |= FF_BUG_TRUNCATED;
+             if (!(s->workaround_bugs & FF_BUG_TRUNCATED))
 -                while (ptr[dst_length - 1] == 0 && dst_length > 0)
++                while(dst_length > 0 && ptr[dst_length - 1] == 0)
+                     dst_length--;
+             bit_length = !dst_length ? 0
+                                      : (8 * dst_length -
+                                         ff_h264_decode_rbsp_trailing(h, ptr + dst_length - 1));
+             if (s->avctx->debug & FF_DEBUG_STARTCODE)
 -                av_log(h->s.avctx, AV_LOG_DEBUG,
 -                       "NAL %d at %d/%d length %d\n",
 -                       hx->nal_unit_type, buf_index, buf_size, dst_length);
++                av_log(h->s.avctx, AV_LOG_DEBUG, "NAL %d/%d at %d/%d length %d pass %d\n", hx->nal_unit_type, hx->nal_ref_idc, buf_index, buf_size, dst_length, pass);
+             if (h->is_avc && (nalsize != consumed) && nalsize)
+                 av_log(h->s.avctx, AV_LOG_DEBUG,
+                        "AVC: Consumed only %d bytes instead of %d\n",
+                        consumed, nalsize);
+             buf_index += consumed;
+             nal_index++;
+             if (pass == 0) {
+                 /* packets can sometimes contain multiple PPS/SPS,
+                  * e.g. two PAFF field pictures in one packet, or a demuxer
+                  * which splits NALs strangely if so, when frame threading we
+                  * can't start the next thread until we've read all of them */
+                 switch (hx->nal_unit_type) {
                  case NAL_SPS:
                  case NAL_PPS:
                      nals_needed = nal_index;
                      init_get_bits(&hx->s.gb, ptr, bit_length);
                      if (!get_ue_golomb(&hx->s.gb))
                          nals_needed = nal_index;
+                 }
+                 continue;
              }
-             continue;
-         }
  
-         //FIXME do not discard SEI id
-         if(avctx->skip_frame >= AVDISCARD_NONREF && h->nal_ref_idc  == 0)
-             continue;
-       again:
-         err = 0;
-         switch(hx->nal_unit_type){
-         case NAL_IDR_SLICE:
-             if (h->nal_unit_type != NAL_IDR_SLICE) {
-                 av_log(h->s.avctx, AV_LOG_ERROR, "Invalid mix of idr and non-idr slices\n");
-                 return -1;
-             }
-             idr(h); // FIXME ensure we don't lose some frames if there is reordering
-         case NAL_SLICE:
-             init_get_bits(&hx->s.gb, ptr, bit_length);
-             hx->intra_gb_ptr=
-             hx->inter_gb_ptr= &hx->s.gb;
-             hx->s.data_partitioning = 0;
-             if((err = decode_slice_header(hx, h)))
-                break;
-             if (   h->sei_recovery_frame_cnt >= 0
-                 && (   h->recovery_frame<0
-                     || ((h->recovery_frame - h->frame_num) & ((1 << h->sps.log2_max_frame_num)-1)) > h->sei_recovery_frame_cnt)) {
-                 h->recovery_frame = (h->frame_num + h->sei_recovery_frame_cnt) %
-                                     (1 << h->sps.log2_max_frame_num);
-             }
+             // FIXME do not discard SEI id
+             if (avctx->skip_frame >= AVDISCARD_NONREF && h->nal_ref_idc == 0)
+                 continue;
  
-             s->current_picture_ptr->f.key_frame |=
-                     (hx->nal_unit_type == NAL_IDR_SLICE);
+ again:
+             err = 0;
+             switch (hx->nal_unit_type) {
+             case NAL_IDR_SLICE:
+                 if (h->nal_unit_type != NAL_IDR_SLICE) {
+                     av_log(h->s.avctx, AV_LOG_ERROR,
 -                           "Invalid mix of idr and non-idr slices");
++                           "Invalid mix of idr and non-idr slices\n");
+                     return -1;
+                 }
+                 idr(h); // FIXME ensure we don't lose some frames if there is reordering
+             case NAL_SLICE:
+                 init_get_bits(&hx->s.gb, ptr, bit_length);
+                 hx->intra_gb_ptr        =
+                     hx->inter_gb_ptr    = &hx->s.gb;
+                 hx->s.data_partitioning = 0;
+                 if ((err = decode_slice_header(hx, h)))
+                     break;
  
-             if (h->recovery_frame == h->frame_num) {
-                 s->current_picture_ptr->sync |= 1;
-                 h->recovery_frame = -1;
-             }
++                if (   h->sei_recovery_frame_cnt >= 0
++                    && (   h->recovery_frame<0
++                        || ((h->recovery_frame - h->frame_num) & ((1 << h->sps.log2_max_frame_num)-1)) > h->sei_recovery_frame_cnt)) {
++                    h->recovery_frame = (h->frame_num + h->sei_recovery_frame_cnt) %
++                                        (1 << h->sps.log2_max_frame_num);
++                }
 +
-             h->sync |= !!s->current_picture_ptr->f.key_frame;
-             h->sync |= 3*!!(s->flags2 & CODEC_FLAG2_SHOW_ALL);
-             s->current_picture_ptr->sync |= h->sync;
+                 s->current_picture_ptr->f.key_frame |=
 -                    (hx->nal_unit_type == NAL_IDR_SLICE) ||
 -                    (h->sei_recovery_frame_cnt >= 0);
++                        (hx->nal_unit_type == NAL_IDR_SLICE);
 +
-             if (h->current_slice == 1) {
-                 if(!(s->flags2 & CODEC_FLAG2_CHUNKS)) {
-                     decode_postinit(h, nal_index >= nals_needed);
++                if (h->recovery_frame == h->frame_num) {
++                    s->current_picture_ptr->sync |= 1;
++                    h->recovery_frame = -1;
 +                }
 +
-                 if (s->avctx->hwaccel && s->avctx->hwaccel->start_frame(s->avctx, NULL, 0) < 0)
-                     return -1;
-                 if(CONFIG_H264_VDPAU_DECODER && s->avctx->codec->capabilities&CODEC_CAP_HWACCEL_VDPAU)
-                     ff_vdpau_h264_picture_start(s);
-             }
++                h->sync |= !!s->current_picture_ptr->f.key_frame;
++                h->sync |= 3*!!(s->flags2 & CODEC_FLAG2_SHOW_ALL);
++                s->current_picture_ptr->sync |= h->sync;
+                 if (h->current_slice == 1) {
+                     if (!(s->flags2 & CODEC_FLAG2_CHUNKS))
+                         decode_postinit(h, nal_index >= nals_needed);
  
-             if(hx->redundant_pic_count==0
-                && (avctx->skip_frame < AVDISCARD_NONREF || hx->nal_ref_idc)
-                && (avctx->skip_frame < AVDISCARD_BIDIR  || hx->slice_type_nos!=AV_PICTURE_TYPE_B)
-                && (avctx->skip_frame < AVDISCARD_NONKEY || hx->slice_type_nos==AV_PICTURE_TYPE_I)
-                && avctx->skip_frame < AVDISCARD_ALL){
-                 if(avctx->hwaccel) {
-                     if (avctx->hwaccel->decode_slice(avctx, &buf[buf_index - consumed], consumed) < 0)
+                     if (s->avctx->hwaccel &&
+                         s->avctx->hwaccel->start_frame(s->avctx, NULL, 0) < 0)
                          return -1;
-                 }else
-                 if(CONFIG_H264_VDPAU_DECODER && s->avctx->codec->capabilities&CODEC_CAP_HWACCEL_VDPAU){
-                     static const uint8_t start_code[] = {0x00, 0x00, 0x01};
-                     ff_vdpau_add_data_chunk(s, start_code, sizeof(start_code));
-                     ff_vdpau_add_data_chunk(s, &buf[buf_index - consumed], consumed );
-                 }else
-                     context_count++;
-             }
-             break;
-         case NAL_DPA:
-             init_get_bits(&hx->s.gb, ptr, bit_length);
-             hx->intra_gb_ptr=
-             hx->inter_gb_ptr= NULL;
+                     if (CONFIG_H264_VDPAU_DECODER &&
+                         s->avctx->codec->capabilities & CODEC_CAP_HWACCEL_VDPAU)
+                         ff_vdpau_h264_picture_start(s);
+                 }
  
-             if ((err = decode_slice_header(hx, h)) < 0)
+                 if (hx->redundant_pic_count == 0 &&
+                     (avctx->skip_frame < AVDISCARD_NONREF ||
+                      hx->nal_ref_idc) &&
+                     (avctx->skip_frame < AVDISCARD_BIDIR  ||
+                      hx->slice_type_nos != AV_PICTURE_TYPE_B) &&
+                     (avctx->skip_frame < AVDISCARD_NONKEY ||
+                      hx->slice_type_nos == AV_PICTURE_TYPE_I) &&
+                     avctx->skip_frame < AVDISCARD_ALL) {
+                     if (avctx->hwaccel) {
+                         if (avctx->hwaccel->decode_slice(avctx,
+                                                          &buf[buf_index - consumed],
+                                                          consumed) < 0)
+                             return -1;
+                     } else if (CONFIG_H264_VDPAU_DECODER &&
+                                s->avctx->codec->capabilities & CODEC_CAP_HWACCEL_VDPAU) {
+                         static const uint8_t start_code[] = {
+                             0x00, 0x00, 0x01 };
+                         ff_vdpau_add_data_chunk(s, start_code,
+                                                 sizeof(start_code));
+                         ff_vdpau_add_data_chunk(s, &buf[buf_index - consumed],
+                                                 consumed);
+                     } else
+                         context_count++;
+                 }
                  break;
+             case NAL_DPA:
+                 init_get_bits(&hx->s.gb, ptr, bit_length);
+                 hx->intra_gb_ptr =
+                 hx->inter_gb_ptr = NULL;
  
-             hx->s.data_partitioning = 1;
-             break;
-         case NAL_DPB:
-             init_get_bits(&hx->intra_gb, ptr, bit_length);
-             hx->intra_gb_ptr= &hx->intra_gb;
-             break;
-         case NAL_DPC:
-             init_get_bits(&hx->inter_gb, ptr, bit_length);
-             hx->inter_gb_ptr= &hx->inter_gb;
-             if(hx->redundant_pic_count==0 && hx->intra_gb_ptr && hx->s.data_partitioning
-                && s->context_initialized
-                && (avctx->skip_frame < AVDISCARD_NONREF || hx->nal_ref_idc)
-                && (avctx->skip_frame < AVDISCARD_BIDIR  || hx->slice_type_nos!=AV_PICTURE_TYPE_B)
-                && (avctx->skip_frame < AVDISCARD_NONKEY || hx->slice_type_nos==AV_PICTURE_TYPE_I)
-                && avctx->skip_frame < AVDISCARD_ALL)
-                 context_count++;
-             break;
-         case NAL_SEI:
-             init_get_bits(&s->gb, ptr, bit_length);
-             ff_h264_decode_sei(h);
-             break;
-         case NAL_SPS:
-             init_get_bits(&s->gb, ptr, bit_length);
-             if (ff_h264_decode_seq_parameter_set(h) < 0 && (h->is_avc ? (nalsize != consumed) && nalsize : 1)){
-                 av_log(h->s.avctx, AV_LOG_DEBUG, "SPS decoding failure, trying alternative mode\n");
-                 if(h->is_avc) av_assert0(next_avc - buf_index + consumed == nalsize);
-                 init_get_bits(&s->gb, &buf[buf_index + 1 - consumed], 8*(next_avc - buf_index + consumed - 1));
-                 ff_h264_decode_seq_parameter_set(h);
-             }
-             if (s->flags& CODEC_FLAG_LOW_DELAY ||
-                 (h->sps.bitstream_restriction_flag && !h->sps.num_reorder_frames))
-                 s->low_delay=1;
-             if(avctx->has_b_frames < 2)
-                 avctx->has_b_frames= !s->low_delay;
-             break;
-         case NAL_PPS:
-             init_get_bits(&s->gb, ptr, bit_length);
+                 if ((err = decode_slice_header(hx, h)) < 0)
+                     break;
  
-             ff_h264_decode_picture_parameter_set(h, bit_length);
+                 hx->s.data_partitioning = 1;
+                 break;
+             case NAL_DPB:
+                 init_get_bits(&hx->intra_gb, ptr, bit_length);
+                 hx->intra_gb_ptr = &hx->intra_gb;
+                 break;
+             case NAL_DPC:
+                 init_get_bits(&hx->inter_gb, ptr, bit_length);
+                 hx->inter_gb_ptr = &hx->inter_gb;
+                 if (hx->redundant_pic_count == 0 &&
+                     hx->intra_gb_ptr &&
+                     hx->s.data_partitioning &&
+                     s->context_initialized &&
+                     (avctx->skip_frame < AVDISCARD_NONREF || hx->nal_ref_idc) &&
+                     (avctx->skip_frame < AVDISCARD_BIDIR  ||
+                      hx->slice_type_nos != AV_PICTURE_TYPE_B) &&
+                     (avctx->skip_frame < AVDISCARD_NONKEY ||
+                      hx->slice_type_nos == AV_PICTURE_TYPE_I) &&
+                     avctx->skip_frame < AVDISCARD_ALL)
+                     context_count++;
+                 break;
+             case NAL_SEI:
+                 init_get_bits(&s->gb, ptr, bit_length);
+                 ff_h264_decode_sei(h);
+                 break;
+             case NAL_SPS:
+                 init_get_bits(&s->gb, ptr, bit_length);
 -                if (ff_h264_decode_seq_parameter_set(h) < 0 &&
 -                    h->is_avc && (nalsize != consumed) && nalsize) {
++                if (ff_h264_decode_seq_parameter_set(h) < 0 && (h->is_avc ? (nalsize != consumed) && nalsize : 1)) {
+                     av_log(h->s.avctx, AV_LOG_DEBUG,
 -                           "SPS decoding failure, try parsing the coomplete NAL\n");
 -                    init_get_bits(&s->gb, buf + buf_index + 1 - consumed,
 -                                  8 * (nalsize - 1));
++                           "SPS decoding failure, trying alternative mode\n");
++                    if (h->is_avc)
++                        av_assert0(next_avc - buf_index + consumed == nalsize);
++                    init_get_bits(&s->gb, &buf[buf_index + 1 - consumed],
++                                  8*(next_avc - buf_index + consumed - 1));
+                     ff_h264_decode_seq_parameter_set(h);
+                 }
  
-             break;
-         case NAL_AUD:
-         case NAL_END_SEQUENCE:
-         case NAL_END_STREAM:
-         case NAL_FILLER_DATA:
-         case NAL_SPS_EXT:
-         case NAL_AUXILIARY_SLICE:
-             break;
-         default:
-             av_log(avctx, AV_LOG_DEBUG, "Unknown NAL code: %d (%d bits)\n", hx->nal_unit_type, bit_length);
-         }
+                 if (s->flags & CODEC_FLAG_LOW_DELAY ||
+                     (h->sps.bitstream_restriction_flag &&
+                      !h->sps.num_reorder_frames))
+                     s->low_delay = 1;
 -
+                 if (avctx->has_b_frames < 2)
+                     avctx->has_b_frames = !s->low_delay;
 -
 -                if (avctx->bits_per_raw_sample != h->sps.bit_depth_luma ||
 -                    h->cur_chroma_format_idc   != h->sps.chroma_format_idc) {
 -                    if (h->sps.bit_depth_luma >= 8 && h->sps.bit_depth_luma <= 10) {
 -                        avctx->bits_per_raw_sample = h->sps.bit_depth_luma;
 -                        h->cur_chroma_format_idc   = h->sps.chroma_format_idc;
 -                        h->pixel_shift             = h->sps.bit_depth_luma > 8;
 -
 -                        ff_h264dsp_init(&h->h264dsp, h->sps.bit_depth_luma,
 -                                        h->sps.chroma_format_idc);
 -                        ff_h264_pred_init(&h->hpc, s->codec_id,
 -                                          h->sps.bit_depth_luma,
 -                                          h->sps.chroma_format_idc);
 -                        s->dsp.dct_bits = h->sps.bit_depth_luma > 8 ? 32 : 16;
 -                        ff_dsputil_init(&s->dsp, s->avctx);
 -                    } else {
 -                        av_log(avctx, AV_LOG_ERROR,
 -                               "Unsupported bit depth: %d\n",
 -                               h->sps.bit_depth_luma);
 -                        return -1;
 -                    }
 -                }
+                 break;
+             case NAL_PPS:
+                 init_get_bits(&s->gb, ptr, bit_length);
+                 ff_h264_decode_picture_parameter_set(h, bit_length);
+                 break;
+             case NAL_AUD:
+             case NAL_END_SEQUENCE:
+             case NAL_END_STREAM:
+             case NAL_FILLER_DATA:
+             case NAL_SPS_EXT:
+             case NAL_AUXILIARY_SLICE:
+                 break;
+             default:
+                 av_log(avctx, AV_LOG_DEBUG, "Unknown NAL code: %d (%d bits)\n",
+                        hx->nal_unit_type, bit_length);
+             }
  
-         if(context_count == h->max_contexts) {
-             execute_decode_slices(h, context_count);
-             context_count = 0;
-         }
+             if (context_count == h->max_contexts) {
+                 execute_decode_slices(h, context_count);
+                 context_count = 0;
+             }
  
-         if (err < 0)
-             av_log(h->s.avctx, AV_LOG_ERROR, "decode_slice_header error\n");
-         else if(err == 1) {
-             /* Slice could not be decoded in parallel mode, copy down
-              * NAL unit stuff to context 0 and restart. Note that
-              * rbsp_buffer is not transferred, but since we no longer
-              * run in parallel mode this should not be an issue. */
-             h->nal_unit_type = hx->nal_unit_type;
-             h->nal_ref_idc   = hx->nal_ref_idc;
-             hx = h;
-             goto again;
+             if (err < 0)
+                 av_log(h->s.avctx, AV_LOG_ERROR, "decode_slice_header error\n");
+             else if (err == 1) {
+                 /* Slice could not be decoded in parallel mode, copy down
+                  * NAL unit stuff to context 0 and restart. Note that
+                  * rbsp_buffer is not transferred, but since we no longer
+                  * run in parallel mode this should not be an issue. */
+                 h->nal_unit_type = hx->nal_unit_type;
+                 h->nal_ref_idc   = hx->nal_ref_idc;
+                 hx               = h;
+                 goto again;
+             }
          }
      }
-     }
-     if(context_count)
+     if (context_count)
          execute_decode_slices(h, context_count);
      return buf_index;
  }
  /**
   * Return the number of bytes consumed for building the current frame.
   */
- static int get_consumed_bytes(MpegEncContext *s, int pos, int buf_size){
-         if(pos==0) pos=1; //avoid infinite loops (i doubt that is needed but ...)
-         if(pos+10>buf_size) pos=buf_size; // oops ;)
+ static int get_consumed_bytes(MpegEncContext *s, int pos, int buf_size)
+ {
+     if (pos == 0)
+         pos = 1;          // avoid infinite loops (i doubt that is needed but ...)
+     if (pos + 10 > buf_size)
+         pos = buf_size;                   // oops ;)
  
-         return pos;
+     return pos;
  }
  
- static int decode_frame(AVCodecContext *avctx,
-                              void *data, int *data_size,
-                              AVPacket *avpkt)
+ static int decode_frame(AVCodecContext *avctx, void *data,
+                         int *data_size, AVPacket *avpkt)
  {
      const uint8_t *buf = avpkt->data;
-     int buf_size = avpkt->size;
-     H264Context *h = avctx->priv_data;
-     MpegEncContext *s = &h->s;
-     AVFrame *pict = data;
-     int buf_index = 0;
+     int buf_size       = avpkt->size;
+     H264Context *h     = avctx->priv_data;
+     MpegEncContext *s  = &h->s;
+     AVFrame *pict      = data;
+     int buf_index      = 0;
 +    Picture *out;
 +    int i, out_idx;
  
-     s->flags= avctx->flags;
-     s->flags2= avctx->flags2;
+     s->flags  = avctx->flags;
+     s->flags2 = avctx->flags2;
  
-    /* end of stream, output what is still in the buffers */
+     /* end of stream, output what is still in the buffers */
 -out:
      if (buf_size == 0) {
 -        Picture *out;
 -        int i, out_idx;
 + out:
  
          s->current_picture_ptr = NULL;
  
  
          return buf_index;
      }
 +    if(h->is_avc && buf_size >= 9 && buf[0]==1 && buf[2]==0 && (buf[4]&0xFC)==0xFC && (buf[5]&0x1F) && buf[8]==0x67){
 +        int cnt= buf[5]&0x1f;
 +        const uint8_t *p= buf+6;
 +        while(cnt--){
 +            int nalsize= AV_RB16(p) + 2;
 +            if(nalsize > buf_size - (p-buf) || p[2]!=0x67)
 +                goto not_extra;
 +            p += nalsize;
 +        }
 +        cnt = *(p++);
 +        if(!cnt)
 +            goto not_extra;
 +        while(cnt--){
 +            int nalsize= AV_RB16(p) + 2;
 +            if(nalsize > buf_size - (p-buf) || p[2]!=0x68)
 +                goto not_extra;
 +            p += nalsize;
 +        }
 +
 +        return ff_h264_decode_extradata(h, buf, buf_size);
 +    }
 +not_extra:
  
-     buf_index=decode_nal_units(h, buf, buf_size);
-     if(buf_index < 0)
+     buf_index = decode_nal_units(h, buf, buf_size);
+     if (buf_index < 0)
          return -1;
  
      if (!s->current_picture_ptr && h->nal_unit_type == NAL_END_SEQUENCE) {
          goto out;
      }
  
-     if(!(s->flags2 & CODEC_FLAG2_CHUNKS) && !s->current_picture_ptr){
+     if (!(s->flags2 & CODEC_FLAG2_CHUNKS) && !s->current_picture_ptr) {
 -        if (avctx->skip_frame >= AVDISCARD_NONREF)
 -            return 0;
 +        if (avctx->skip_frame >= AVDISCARD_NONREF ||
 +            buf_size >= 4 && !memcmp("Q264", buf, 4))
 +            return buf_size;
          av_log(avctx, AV_LOG_ERROR, "no frame!\n");
          return -1;
      }
  
          field_end(h, 0);
  
-         *data_size = 0; /* Wait for second field. */
 -        if (!h->next_output_pic) {
 -            /* Wait for second field. */
 -            *data_size = 0;
 -        } else {
++        /* Wait for second field. */
++        *data_size = 0;
 +        if (h->next_output_pic && (h->next_output_pic->sync || h->sync>1)) {
-                 *data_size = sizeof(AVFrame);
-                 *pict      = h->next_output_pic->f;
+             *data_size = sizeof(AVFrame);
+             *pict      = h->next_output_pic->f;
          }
      }
  
@@@ -4169,10 -4467,9 +4506,10 @@@ av_cold void ff_h264_free_context(H264C
  
  av_cold int ff_h264_decode_end(AVCodecContext *avctx)
  {
-     H264Context *h = avctx->priv_data;
+     H264Context *h    = avctx->priv_data;
      MpegEncContext *s = &h->s;
  
 +    ff_h264_remove_all_refs(h);
      ff_h264_free_context(h);
  
      ff_MPV_common_end(s);
@@@ -4199,42 -4496,22 +4536,43 @@@ static const AVProfile profiles[] = 
      { FF_PROFILE_UNKNOWN },
  };
  
 +static const AVOption h264_options[] = {
 +    {"is_avc", "is avc", offsetof(H264Context, is_avc), FF_OPT_TYPE_INT, {.dbl = 0}, 0, 1, 0},
 +    {"nal_length_size", "nal_length_size", offsetof(H264Context, nal_length_size), FF_OPT_TYPE_INT, {.dbl = 0}, 0, 4, 0},
 +    {NULL}
 +};
 +
 +static const AVClass h264_class = {
 +    "H264 Decoder",
 +    av_default_item_name,
 +    h264_options,
 +    LIBAVUTIL_VERSION_INT,
 +};
 +
 +static const AVClass h264_vdpau_class = {
 +    "H264 VDPAU Decoder",
 +    av_default_item_name,
 +    h264_options,
 +    LIBAVUTIL_VERSION_INT,
 +};
 +
  AVCodec ff_h264_decoder = {
-     .name           = "h264",
-     .type           = AVMEDIA_TYPE_VIDEO,
-     .id             = CODEC_ID_H264,
-     .priv_data_size = sizeof(H264Context),
-     .init           = ff_h264_decode_init,
-     .close          = ff_h264_decode_end,
-     .decode         = decode_frame,
-     .capabilities   = /*CODEC_CAP_DRAW_HORIZ_BAND |*/ CODEC_CAP_DR1 | CODEC_CAP_DELAY |
-                       CODEC_CAP_SLICE_THREADS | CODEC_CAP_FRAME_THREADS,
-     .flush= flush_dpb,
-     .long_name = NULL_IF_CONFIG_SMALL("H.264 / AVC / MPEG-4 AVC / MPEG-4 part 10"),
+     .name                  = "h264",
+     .type                  = AVMEDIA_TYPE_VIDEO,
+     .id                    = CODEC_ID_H264,
+     .priv_data_size        = sizeof(H264Context),
+     .init                  = ff_h264_decode_init,
+     .close                 = ff_h264_decode_end,
+     .decode                = decode_frame,
+     .capabilities          = /*CODEC_CAP_DRAW_HORIZ_BAND |*/ CODEC_CAP_DR1 |
+                              CODEC_CAP_DELAY | CODEC_CAP_SLICE_THREADS |
+                              CODEC_CAP_FRAME_THREADS,
+     .flush                 = flush_dpb,
+     .long_name             = NULL_IF_CONFIG_SMALL("H.264 / AVC / MPEG-4 AVC / MPEG-4 part 10"),
      .init_thread_copy      = ONLY_IF_THREADS_ENABLED(decode_init_thread_copy),
      .update_thread_context = ONLY_IF_THREADS_ENABLED(decode_update_thread_context),
-     .profiles = NULL_IF_CONFIG_SMALL(profiles),
-     .priv_class     = &h264_class,
+     .profiles              = NULL_IF_CONFIG_SMALL(profiles),
++    .priv_class            = &h264_class,
  };
  
  #if CONFIG_H264_VDPAU_DECODER
@@@ -4247,10 -4524,10 +4585,11 @@@ AVCodec ff_h264_vdpau_decoder = 
      .close          = ff_h264_decode_end,
      .decode         = decode_frame,
      .capabilities   = CODEC_CAP_DR1 | CODEC_CAP_DELAY | CODEC_CAP_HWACCEL_VDPAU,
-     .flush= flush_dpb,
-     .long_name = NULL_IF_CONFIG_SMALL("H.264 / AVC / MPEG-4 AVC / MPEG-4 part 10 (VDPAU acceleration)"),
-     .pix_fmts = (const enum PixelFormat[]){PIX_FMT_VDPAU_H264, PIX_FMT_NONE},
-     .profiles = NULL_IF_CONFIG_SMALL(profiles),
+     .flush          = flush_dpb,
+     .long_name      = NULL_IF_CONFIG_SMALL("H.264 / AVC / MPEG-4 AVC / MPEG-4 part 10 (VDPAU acceleration)"),
+     .pix_fmts       = (const enum PixelFormat[]) { PIX_FMT_VDPAU_H264,
+                                                    PIX_FMT_NONE},
+     .profiles       = NULL_IF_CONFIG_SMALL(profiles),
 +    .priv_class     = &h264_vdpau_class,
  };
  #endif
Simple merge
diff --cc tests/Makefile
@@@ -136,13 -112,11 +136,13 @@@ endi
  
  FATE_UTILS = base64 tiny_psnr
  
 -fate: $(FATE)
 +TOOL = ffmpeg
 +
 +fate:: $(FATE)
  
 -$(FATE): avconv$(EXESUF) $(FATE_UTILS:%=tests/%$(HOSTEXESUF))
 +$(FATE): $(TOOL)$(EXESUF) $(FATE_UTILS:%=tests/%$(HOSTEXESUF))
        @echo "TEST    $(@:fate-%=%)"
-       $(Q)$(SRC_PATH)/tests/fate-run.sh $@ "$(SAMPLES)" "$(TARGET_EXEC)" "$(TARGET_PATH)" '$(CMD)' '$(CMP)' '$(REF)' '$(FUZZ)' '$(THREADS)' '$(THREAD_TYPE)' '$(CPUFLAGS)'
+       $(Q)$(SRC_PATH)/tests/fate-run.sh $@ "$(SAMPLES)" "$(TARGET_EXEC)" "$(TARGET_PATH)" '$(CMD)' '$(CMP)' '$(REF)' '$(FUZZ)' '$(THREADS)' '$(THREAD_TYPE)' '$(CPUFLAGS)' '$(CMP_SHIFT)' '$(CMP_TARGET)' '$(SIZE_TOLERANCE)'
  
  fate-list:
        @printf '%s\n' $(sort $(FATE))
Simple merge