mpegvideo.c
Go to the documentation of this file.
1 /*
2  * The simplest mpeg encoder (well, it was the simplest!)
3  * Copyright (c) 2000,2001 Fabrice Bellard
4  * Copyright (c) 2002-2004 Michael Niedermayer <michaelni@gmx.at>
5  *
6  * 4MV & hq & B-frame encoding stuff by Michael Niedermayer <michaelni@gmx.at>
7  *
8  * This file is part of Libav.
9  *
10  * Libav is free software; you can redistribute it and/or
11  * modify it under the terms of the GNU Lesser General Public
12  * License as published by the Free Software Foundation; either
13  * version 2.1 of the License, or (at your option) any later version.
14  *
15  * Libav is distributed in the hope that it will be useful,
16  * but WITHOUT ANY WARRANTY; without even the implied warranty of
17  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18  * Lesser General Public License for more details.
19  *
20  * You should have received a copy of the GNU Lesser General Public
21  * License along with Libav; if not, write to the Free Software
22  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
23  */
24 
30 #include "libavutil/imgutils.h"
31 #include "avcodec.h"
32 #include "dsputil.h"
33 #include "internal.h"
34 #include "mathops.h"
35 #include "mpegvideo.h"
36 #include "mjpegenc.h"
37 #include "msmpeg4.h"
38 #include "xvmc_internal.h"
39 #include "thread.h"
40 #include <limits.h>
41 
42 //#undef NDEBUG
43 //#include <assert.h>
44 
46  DCTELEM *block, int n, int qscale);
48  DCTELEM *block, int n, int qscale);
50  DCTELEM *block, int n, int qscale);
52  DCTELEM *block, int n, int qscale);
54  DCTELEM *block, int n, int qscale);
56  DCTELEM *block, int n, int qscale);
58  DCTELEM *block, int n, int qscale);
59 
60 
61 /* enable all paranoid tests for rounding, overflows, etc... */
62 //#define PARANOID
63 
64 //#define DEBUG
65 
66 
68 // 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
69  0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
70  16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31
71 };
72 
74 // 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
75  8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
76  8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
77  8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
78  8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
79  8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
80  8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
81  8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
82  8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
83 };
84 
85 static const uint8_t mpeg2_dc_scale_table1[128] = {
86 // 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
87  4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
88  4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
89  4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
90  4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
91  4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
92  4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
93  4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
94  4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
95 };
96 
97 static const uint8_t mpeg2_dc_scale_table2[128] = {
98 // 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
99  2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
100  2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
101  2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
102  2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
103  2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
104  2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
105  2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
106  2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
107 };
108 
109 static const uint8_t mpeg2_dc_scale_table3[128] = {
110 // 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
111  1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
112  1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
113  1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
114  1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
115  1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
116  1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
117  1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
118  1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
119 };
120 
121 const uint8_t *const ff_mpeg2_dc_scale_table[4] = {
126 };
127 
131 };
132 
134 #if CONFIG_H264_DXVA2_HWACCEL
136 #endif
137 #if CONFIG_H264_VAAPI_HWACCEL
139 #endif
140 #if CONFIG_H264_VDA_HWACCEL
142 #endif
145 };
146 
148  const uint8_t *end,
149  uint32_t * restrict state)
150 {
151  int i;
152 
153  assert(p <= end);
154  if (p >= end)
155  return end;
156 
157  for (i = 0; i < 3; i++) {
158  uint32_t tmp = *state << 8;
159  *state = tmp + *(p++);
160  if (tmp == 0x100 || p == end)
161  return p;
162  }
163 
164  while (p < end) {
165  if (p[-1] > 1 ) p += 3;
166  else if (p[-2] ) p += 2;
167  else if (p[-3]|(p[-1]-1)) p++;
168  else {
169  p++;
170  break;
171  }
172  }
173 
174  p = FFMIN(p, end) - 4;
175  *state = AV_RB32(p);
176 
177  return p + 4;
178 }
179 
180 /* init common dct for both encoder and decoder */
182 {
183  ff_dsputil_init(&s->dsp, s->avctx);
185 
191  if (s->flags & CODEC_FLAG_BITEXACT)
194 
195 #if ARCH_X86
197 #elif ARCH_ALPHA
199 #elif ARCH_ARM
201 #elif HAVE_ALTIVEC
203 #elif ARCH_BFIN
205 #endif
206 
207  /* load & permutate scantables
208  * note: only wmv uses different ones
209  */
210  if (s->alternate_scan) {
213  } else {
216  }
219 
220  return 0;
221 }
222 
224 {
225  *dst = *src;
226  dst->f.type = FF_BUFFER_TYPE_COPY;
227 }
228 
233 {
234  /* WM Image / Screen codecs allocate internal buffers with different
235  * dimensions / colorspaces; ignore user-defined callbacks for these. */
236  if (s->codec_id != AV_CODEC_ID_WMV3IMAGE &&
239  ff_thread_release_buffer(s->avctx, &pic->f);
240  else
243 }
244 
246 {
247  int alloc_size = FFALIGN(FFABS(linesize) + 32, 32);
248 
249  // edge emu needs blocksize + filter length - 1
250  // (= 17x17 for halfpel / 21x21 for h264)
251  // VC1 computes luma and chroma simultaneously and needs 19X19 + 9x9
252  // at uvlinesize. It supports only YUV420 so 24x24 is enough
253  // linesize * interlaced * MBsize
254  FF_ALLOCZ_OR_GOTO(s->avctx, s->edge_emu_buffer, alloc_size * 2 * 24,
255  fail);
256 
257  FF_ALLOCZ_OR_GOTO(s->avctx, s->me.scratchpad, alloc_size * 2 * 16 * 3,
258  fail)
259  s->me.temp = s->me.scratchpad;
260  s->rd_scratchpad = s->me.scratchpad;
261  s->b_scratchpad = s->me.scratchpad;
262  s->obmc_scratchpad = s->me.scratchpad + 16;
263 
264  return 0;
265 fail:
267  return AVERROR(ENOMEM);
268 }
269 
274 {
275  int r, ret;
276 
277  if (s->avctx->hwaccel) {
278  assert(!pic->f.hwaccel_picture_private);
279  if (s->avctx->hwaccel->priv_data_size) {
281  if (!pic->f.hwaccel_picture_private) {
282  av_log(s->avctx, AV_LOG_ERROR, "alloc_frame_buffer() failed (hwaccel private data allocation)\n");
283  return -1;
284  }
285  }
286  }
287 
288  if (s->codec_id != AV_CODEC_ID_WMV3IMAGE &&
291  r = ff_thread_get_buffer(s->avctx, &pic->f);
292  else
293  r = avcodec_default_get_buffer(s->avctx, &pic->f);
294 
295  if (r < 0 || !pic->f.type || !pic->f.data[0]) {
296  av_log(s->avctx, AV_LOG_ERROR, "get_buffer() failed (%d %d %p)\n",
297  r, pic->f.type, pic->f.data[0]);
299  return -1;
300  }
301 
302  if (s->linesize && (s->linesize != pic->f.linesize[0] ||
303  s->uvlinesize != pic->f.linesize[1])) {
305  "get_buffer() failed (stride changed)\n");
306  free_frame_buffer(s, pic);
307  return -1;
308  }
309 
310  if (pic->f.linesize[1] != pic->f.linesize[2]) {
312  "get_buffer() failed (uv stride mismatch)\n");
313  free_frame_buffer(s, pic);
314  return -1;
315  }
316 
317  if (!s->edge_emu_buffer &&
318  (ret = ff_mpv_frame_size_alloc(s, pic->f.linesize[0])) < 0) {
320  "get_buffer() failed to allocate context scratch buffers.\n");
321  free_frame_buffer(s, pic);
322  return ret;
323  }
324 
325  return 0;
326 }
327 
332 int ff_alloc_picture(MpegEncContext *s, Picture *pic, int shared)
333 {
334  const int big_mb_num = s->mb_stride * (s->mb_height + 1) + 1;
335 
336  // the + 1 is needed so memset(,,stride*height) does not sig11
337 
338  const int mb_array_size = s->mb_stride * s->mb_height;
339  const int b8_array_size = s->b8_stride * s->mb_height * 2;
340  const int b4_array_size = s->b4_stride * s->mb_height * 4;
341  int i;
342  int r = -1;
343 
344  if (shared) {
345  assert(pic->f.data[0]);
346  assert(pic->f.type == 0 || pic->f.type == FF_BUFFER_TYPE_SHARED);
348  } else {
349  assert(!pic->f.data[0]);
350 
351  if (alloc_frame_buffer(s, pic) < 0)
352  return -1;
353 
354  s->linesize = pic->f.linesize[0];
355  s->uvlinesize = pic->f.linesize[1];
356  }
357 
358  if (pic->f.qscale_table == NULL) {
359  if (s->encoding) {
360  FF_ALLOCZ_OR_GOTO(s->avctx, pic->mb_var,
361  mb_array_size * sizeof(int16_t), fail)
363  mb_array_size * sizeof(int16_t), fail)
365  mb_array_size * sizeof(int8_t ), fail)
366  }
367 
369  mb_array_size * sizeof(uint8_t) + 2, fail)// the + 2 is for the slice end check
371  (big_mb_num + s->mb_stride) * sizeof(uint8_t),
372  fail)
374  (big_mb_num + s->mb_stride) * sizeof(uint32_t),
375  fail)
376  pic->f.mb_type = pic->mb_type_base + 2 * s->mb_stride + 1;
377  pic->f.qscale_table = pic->qscale_table_base + 2 * s->mb_stride + 1;
378  if (s->out_format == FMT_H264) {
379  for (i = 0; i < 2; i++) {
381  2 * (b4_array_size + 4) * sizeof(int16_t),
382  fail)
383  pic->f.motion_val[i] = pic->motion_val_base[i] + 4;
384  FF_ALLOCZ_OR_GOTO(s->avctx, pic->f.ref_index[i],
385  4 * mb_array_size * sizeof(uint8_t), fail)
386  }
387  pic->f.motion_subsample_log2 = 2;
388  } else if (s->out_format == FMT_H263 || s->encoding ||
389  (s->avctx->debug & FF_DEBUG_MV) || s->avctx->debug_mv) {
390  for (i = 0; i < 2; i++) {
392  2 * (b8_array_size + 4) * sizeof(int16_t),
393  fail)
394  pic->f.motion_val[i] = pic->motion_val_base[i] + 4;
395  FF_ALLOCZ_OR_GOTO(s->avctx, pic->f.ref_index[i],
396  4 * mb_array_size * sizeof(uint8_t), fail)
397  }
398  pic->f.motion_subsample_log2 = 3;
399  }
400  if (s->avctx->debug&FF_DEBUG_DCT_COEFF) {
402  64 * mb_array_size * sizeof(DCTELEM) * 6, fail)
403  }
404  pic->f.qstride = s->mb_stride;
406  1 * sizeof(AVPanScan), fail)
407  }
408 
409  pic->owner2 = s;
410 
411  return 0;
412 fail: // for the FF_ALLOCZ_OR_GOTO macro
413  if (r >= 0)
414  free_frame_buffer(s, pic);
415  return -1;
416 }
417 
421 static void free_picture(MpegEncContext *s, Picture *pic)
422 {
423  int i;
424 
425  if (pic->f.data[0] && pic->f.type != FF_BUFFER_TYPE_SHARED) {
426  free_frame_buffer(s, pic);
427  }
428 
429  av_freep(&pic->mb_var);
430  av_freep(&pic->mc_mb_var);
431  av_freep(&pic->mb_mean);
432  av_freep(&pic->f.mbskip_table);
434  pic->f.qscale_table = NULL;
435  av_freep(&pic->mb_type_base);
436  pic->f.mb_type = NULL;
437  av_freep(&pic->f.dct_coeff);
438  av_freep(&pic->f.pan_scan);
439  pic->f.mb_type = NULL;
440  for (i = 0; i < 2; i++) {
441  av_freep(&pic->motion_val_base[i]);
442  av_freep(&pic->f.ref_index[i]);
443  pic->f.motion_val[i] = NULL;
444  }
445 
446  if (pic->f.type == FF_BUFFER_TYPE_SHARED) {
447  for (i = 0; i < 4; i++) {
448  pic->f.base[i] =
449  pic->f.data[i] = NULL;
450  }
451  pic->f.type = 0;
452  }
453 }
454 
456 {
457  int y_size = s->b8_stride * (2 * s->mb_height + 1);
458  int c_size = s->mb_stride * (s->mb_height + 1);
459  int yc_size = y_size + 2 * c_size;
460  int i;
461 
462  s->edge_emu_buffer =
463  s->me.scratchpad =
464  s->me.temp =
465  s->rd_scratchpad =
466  s->b_scratchpad =
467  s->obmc_scratchpad = NULL;
468 
469  if (s->encoding) {
470  FF_ALLOCZ_OR_GOTO(s->avctx, s->me.map,
471  ME_MAP_SIZE * sizeof(uint32_t), fail)
473  ME_MAP_SIZE * sizeof(uint32_t), fail)
474  if (s->avctx->noise_reduction) {
476  2 * 64 * sizeof(int), fail)
477  }
478  }
479  FF_ALLOCZ_OR_GOTO(s->avctx, s->blocks, 64 * 12 * 2 * sizeof(DCTELEM), fail)
480  s->block = s->blocks[0];
481 
482  for (i = 0; i < 12; i++) {
483  s->pblocks[i] = &s->block[i];
484  }
485 
486  if (s->out_format == FMT_H263) {
487  /* ac values */
489  yc_size * sizeof(int16_t) * 16, fail);
490  s->ac_val[0] = s->ac_val_base + s->b8_stride + 1;
491  s->ac_val[1] = s->ac_val_base + y_size + s->mb_stride + 1;
492  s->ac_val[2] = s->ac_val[1] + c_size;
493  }
494 
495  return 0;
496 fail:
497  return -1; // free() through ff_MPV_common_end()
498 }
499 
501 {
502  if (s == NULL)
503  return;
504 
506  av_freep(&s->me.scratchpad);
507  s->me.temp =
508  s->rd_scratchpad =
509  s->b_scratchpad =
510  s->obmc_scratchpad = NULL;
511 
512  av_freep(&s->dct_error_sum);
513  av_freep(&s->me.map);
514  av_freep(&s->me.score_map);
515  av_freep(&s->blocks);
516  av_freep(&s->ac_val_base);
517  s->block = NULL;
518 }
519 
521 {
522 #define COPY(a) bak->a = src->a
523  COPY(edge_emu_buffer);
524  COPY(me.scratchpad);
525  COPY(me.temp);
526  COPY(rd_scratchpad);
527  COPY(b_scratchpad);
528  COPY(obmc_scratchpad);
529  COPY(me.map);
530  COPY(me.score_map);
531  COPY(blocks);
532  COPY(block);
533  COPY(start_mb_y);
534  COPY(end_mb_y);
535  COPY(me.map_generation);
536  COPY(pb);
537  COPY(dct_error_sum);
538  COPY(dct_count[0]);
539  COPY(dct_count[1]);
540  COPY(ac_val_base);
541  COPY(ac_val[0]);
542  COPY(ac_val[1]);
543  COPY(ac_val[2]);
544 #undef COPY
545 }
546 
548 {
549  MpegEncContext bak;
550  int i, ret;
551  // FIXME copy only needed parts
552  // START_TIMER
553  backup_duplicate_context(&bak, dst);
554  memcpy(dst, src, sizeof(MpegEncContext));
555  backup_duplicate_context(dst, &bak);
556  for (i = 0; i < 12; i++) {
557  dst->pblocks[i] = &dst->block[i];
558  }
559  if (!dst->edge_emu_buffer &&
560  (ret = ff_mpv_frame_size_alloc(dst, dst->linesize)) < 0) {
561  av_log(dst->avctx, AV_LOG_ERROR, "failed to allocate context "
562  "scratch buffers.\n");
563  return ret;
564  }
565  // STOP_TIMER("update_duplicate_context")
566  // about 10k cycles / 0.01 sec for 1000frames on 1ghz with 2 threads
567  return 0;
568 }
569 
571  const AVCodecContext *src)
572 {
573  int i;
574  MpegEncContext *s = dst->priv_data, *s1 = src->priv_data;
575 
576  if (dst == src || !s1->context_initialized)
577  return 0;
578 
579  // FIXME can parameters change on I-frames?
580  // in that case dst may need a reinit
581  if (!s->context_initialized) {
582  memcpy(s, s1, sizeof(MpegEncContext));
583 
584  s->avctx = dst;
587  s->bitstream_buffer = NULL;
589 
591  }
592 
593  if (s->height != s1->height || s->width != s1->width || s->context_reinit) {
594  int err;
595  s->context_reinit = 0;
596  s->height = s1->height;
597  s->width = s1->width;
598  if ((err = ff_MPV_common_frame_size_change(s)) < 0)
599  return err;
600  }
601 
602  s->avctx->coded_height = s1->avctx->coded_height;
603  s->avctx->coded_width = s1->avctx->coded_width;
604  s->avctx->width = s1->avctx->width;
605  s->avctx->height = s1->avctx->height;
606 
607  s->coded_picture_number = s1->coded_picture_number;
608  s->picture_number = s1->picture_number;
609  s->input_picture_number = s1->input_picture_number;
610 
611  memcpy(s->picture, s1->picture, s1->picture_count * sizeof(Picture));
612  memcpy(&s->last_picture, &s1->last_picture,
613  (char *) &s1->last_picture_ptr - (char *) &s1->last_picture);
614 
615  // reset s->picture[].f.extended_data to s->picture[].f.data
616  for (i = 0; i < s->picture_count; i++)
617  s->picture[i].f.extended_data = s->picture[i].f.data;
618 
619  s->last_picture_ptr = REBASE_PICTURE(s1->last_picture_ptr, s, s1);
620  s->current_picture_ptr = REBASE_PICTURE(s1->current_picture_ptr, s, s1);
621  s->next_picture_ptr = REBASE_PICTURE(s1->next_picture_ptr, s, s1);
622 
623  // Error/bug resilience
624  s->next_p_frame_damaged = s1->next_p_frame_damaged;
625  s->workaround_bugs = s1->workaround_bugs;
626 
627  // MPEG4 timing info
628  memcpy(&s->time_increment_bits, &s1->time_increment_bits,
629  (char *) &s1->shape - (char *) &s1->time_increment_bits);
630 
631  // B-frame info
632  s->max_b_frames = s1->max_b_frames;
633  s->low_delay = s1->low_delay;
634  s->droppable = s1->droppable;
635 
636  // DivX handling (doesn't work)
637  s->divx_packed = s1->divx_packed;
638 
639  if (s1->bitstream_buffer) {
640  if (s1->bitstream_buffer_size +
644  s1->allocated_bitstream_buffer_size);
645  s->bitstream_buffer_size = s1->bitstream_buffer_size;
646  memcpy(s->bitstream_buffer, s1->bitstream_buffer,
647  s1->bitstream_buffer_size);
648  memset(s->bitstream_buffer + s->bitstream_buffer_size, 0,
650  }
651 
652  // linesize dependend scratch buffer allocation
653  if (!s->edge_emu_buffer)
654  if (s1->linesize) {
655  if (ff_mpv_frame_size_alloc(s, s1->linesize) < 0) {
656  av_log(s->avctx, AV_LOG_ERROR, "Failed to allocate context "
657  "scratch buffers.\n");
658  return AVERROR(ENOMEM);
659  }
660  } else {
661  av_log(s->avctx, AV_LOG_ERROR, "Context scratch buffers could not "
662  "be allocated due to unknown size.\n");
663  return AVERROR_BUG;
664  }
665 
666  // MPEG2/interlacing info
667  memcpy(&s->progressive_sequence, &s1->progressive_sequence,
668  (char *) &s1->rtp_mode - (char *) &s1->progressive_sequence);
669 
670  if (!s1->first_field) {
671  s->last_pict_type = s1->pict_type;
672  if (s1->current_picture_ptr)
673  s->last_lambda_for[s1->pict_type] = s1->current_picture_ptr->f.quality;
674 
675  if (s1->pict_type != AV_PICTURE_TYPE_B) {
676  s->last_non_b_pict_type = s1->pict_type;
677  }
678  }
679 
680  return 0;
681 }
682 
690 {
691  s->y_dc_scale_table =
694  s->progressive_frame = 1;
695  s->progressive_sequence = 1;
697 
698  s->coded_picture_number = 0;
699  s->picture_number = 0;
700  s->input_picture_number = 0;
701 
702  s->picture_in_gop_number = 0;
703 
704  s->f_code = 1;
705  s->b_code = 1;
706 
707  s->picture_range_start = 0;
709 
710  s->slice_context_count = 1;
711 }
712 
719 {
721 }
722 
727 {
728  int y_size, c_size, yc_size, i, mb_array_size, mv_table_size, x, y;
729 
730  s->mb_width = (s->width + 15) / 16;
731  s->mb_stride = s->mb_width + 1;
732  s->b8_stride = s->mb_width * 2 + 1;
733  s->b4_stride = s->mb_width * 4 + 1;
734  mb_array_size = s->mb_height * s->mb_stride;
735  mv_table_size = (s->mb_height + 2) * s->mb_stride + 1;
736 
737  /* set default edge pos, will be overriden
738  * in decode_header if needed */
739  s->h_edge_pos = s->mb_width * 16;
740  s->v_edge_pos = s->mb_height * 16;
741 
742  s->mb_num = s->mb_width * s->mb_height;
743 
744  s->block_wrap[0] =
745  s->block_wrap[1] =
746  s->block_wrap[2] =
747  s->block_wrap[3] = s->b8_stride;
748  s->block_wrap[4] =
749  s->block_wrap[5] = s->mb_stride;
750 
751  y_size = s->b8_stride * (2 * s->mb_height + 1);
752  c_size = s->mb_stride * (s->mb_height + 1);
753  yc_size = y_size + 2 * c_size;
754 
755  FF_ALLOCZ_OR_GOTO(s->avctx, s->mb_index2xy, (s->mb_num + 1) * sizeof(int),
756  fail); // error ressilience code looks cleaner with this
757  for (y = 0; y < s->mb_height; y++)
758  for (x = 0; x < s->mb_width; x++)
759  s->mb_index2xy[x + y * s->mb_width] = x + y * s->mb_stride;
760 
761  s->mb_index2xy[s->mb_height * s->mb_width] =
762  (s->mb_height - 1) * s->mb_stride + s->mb_width; // FIXME really needed?
763 
764  if (s->encoding) {
765  /* Allocate MV tables */
767  mv_table_size * 2 * sizeof(int16_t), fail);
769  mv_table_size * 2 * sizeof(int16_t), fail);
771  mv_table_size * 2 * sizeof(int16_t), fail);
773  mv_table_size * 2 * sizeof(int16_t), fail);
775  mv_table_size * 2 * sizeof(int16_t), fail);
777  mv_table_size * 2 * sizeof(int16_t), fail);
778  s->p_mv_table = s->p_mv_table_base + s->mb_stride + 1;
782  s->mb_stride + 1;
784  s->mb_stride + 1;
786 
787  /* Allocate MB type table */
788  FF_ALLOCZ_OR_GOTO(s->avctx, s->mb_type, mb_array_size *
789  sizeof(uint16_t), fail); // needed for encoding
790 
791  FF_ALLOCZ_OR_GOTO(s->avctx, s->lambda_table, mb_array_size *
792  sizeof(int), fail);
793 
795  mb_array_size * sizeof(float), fail);
797  mb_array_size * sizeof(float), fail);
798 
799  }
800 
802  mb_array_size * sizeof(uint8_t), fail);
804  mb_array_size * sizeof(uint8_t), fail);
805 
806  if (s->codec_id == AV_CODEC_ID_MPEG4 ||
808  /* interlaced direct mode decoding tables */
809  for (i = 0; i < 2; i++) {
810  int j, k;
811  for (j = 0; j < 2; j++) {
812  for (k = 0; k < 2; k++) {
814  s->b_field_mv_table_base[i][j][k],
815  mv_table_size * 2 * sizeof(int16_t),
816  fail);
817  s->b_field_mv_table[i][j][k] = s->b_field_mv_table_base[i][j][k] +
818  s->mb_stride + 1;
819  }
821  mb_array_size * 2 * sizeof(uint8_t), fail);
823  mv_table_size * 2 * sizeof(int16_t), fail);
824  s->p_field_mv_table[i][j] = s->p_field_mv_table_base[i][j]
825  + s->mb_stride + 1;
826  }
828  mb_array_size * 2 * sizeof(uint8_t), fail);
829  }
830  }
831  if (s->out_format == FMT_H263) {
832  /* cbp values */
833  FF_ALLOCZ_OR_GOTO(s->avctx, s->coded_block_base, y_size, fail);
834  s->coded_block = s->coded_block_base + s->b8_stride + 1;
835 
836  /* cbp, ac_pred, pred_dir */
838  mb_array_size * sizeof(uint8_t), fail);
840  mb_array_size * sizeof(uint8_t), fail);
841  }
842 
843  if (s->h263_pred || s->h263_plus || !s->encoding) {
844  /* dc values */
845  // MN: we need these for error resilience of intra-frames
847  yc_size * sizeof(int16_t), fail);
848  s->dc_val[0] = s->dc_val_base + s->b8_stride + 1;
849  s->dc_val[1] = s->dc_val_base + y_size + s->mb_stride + 1;
850  s->dc_val[2] = s->dc_val[1] + c_size;
851  for (i = 0; i < yc_size; i++)
852  s->dc_val_base[i] = 1024;
853  }
854 
855  /* which mb is a intra block */
856  FF_ALLOCZ_OR_GOTO(s->avctx, s->mbintra_table, mb_array_size, fail);
857  memset(s->mbintra_table, 1, mb_array_size);
858 
859  /* init macroblock skip table */
860  FF_ALLOCZ_OR_GOTO(s->avctx, s->mbskip_table, mb_array_size + 2, fail);
861  // Note the + 1 is for a quicker mpeg4 slice_end detection
862 
864  s->avctx->debug_mv) {
865  s->visualization_buffer[0] = av_malloc((s->mb_width * 16 +
866  2 * EDGE_WIDTH) * s->mb_height * 16 + 2 * EDGE_WIDTH);
867  s->visualization_buffer[1] = av_malloc((s->mb_width * 16 +
868  2 * EDGE_WIDTH) * s->mb_height * 16 + 2 * EDGE_WIDTH);
869  s->visualization_buffer[2] = av_malloc((s->mb_width * 16 +
870  2 * EDGE_WIDTH) * s->mb_height * 16 + 2 * EDGE_WIDTH);
871  }
872 
873  return 0;
874 fail:
875  return AVERROR(ENOMEM);
876 }
877 
883 {
884  int i;
885  int nb_slices = (HAVE_THREADS &&
887  s->avctx->thread_count : 1;
888 
889  if (s->encoding && s->avctx->slices)
890  nb_slices = s->avctx->slices;
891 
893  s->mb_height = (s->height + 31) / 32 * 2;
894  else if (s->codec_id != AV_CODEC_ID_H264)
895  s->mb_height = (s->height + 15) / 16;
896 
897  if (s->avctx->pix_fmt == AV_PIX_FMT_NONE) {
899  "decoding to AV_PIX_FMT_NONE is not supported.\n");
900  return -1;
901  }
902 
903  if (nb_slices > MAX_THREADS || (nb_slices > s->mb_height && s->mb_height)) {
904  int max_slices;
905  if (s->mb_height)
906  max_slices = FFMIN(MAX_THREADS, s->mb_height);
907  else
908  max_slices = MAX_THREADS;
909  av_log(s->avctx, AV_LOG_WARNING, "too many threads/slices (%d),"
910  " reducing to %d\n", nb_slices, max_slices);
911  nb_slices = max_slices;
912  }
913 
914  if ((s->width || s->height) &&
915  av_image_check_size(s->width, s->height, 0, s->avctx))
916  return -1;
917 
919 
920  s->flags = s->avctx->flags;
921  s->flags2 = s->avctx->flags2;
922 
923  /* set chroma shifts */
925  &s->chroma_x_shift,
926  &s->chroma_y_shift);
927 
928  /* convert fourcc to upper case */
930 
932 
933  if (s->width && s->height) {
935 
936  if (s->encoding) {
937  if (s->msmpeg4_version) {
939  2 * 2 * (MAX_LEVEL + 1) *
940  (MAX_RUN + 1) * 2 * sizeof(int), fail);
941  }
942  FF_ALLOCZ_OR_GOTO(s->avctx, s->avctx->stats_out, 256, fail);
943 
945  64 * 32 * sizeof(int), fail);
947  64 * 32 * sizeof(int), fail);
949  64 * 32 * 2 * sizeof(uint16_t), fail);
951  64 * 32 * 2 * sizeof(uint16_t), fail);
953  MAX_PICTURE_COUNT * sizeof(Picture *), fail);
955  MAX_PICTURE_COUNT * sizeof(Picture *), fail);
956 
957  if (s->avctx->noise_reduction) {
959  2 * 64 * sizeof(uint16_t), fail);
960  }
961  }
962  }
963 
966  s->picture_count * sizeof(Picture), fail);
967  for (i = 0; i < s->picture_count; i++) {
969  }
970 
971  if (s->width && s->height) {
972  if (init_context_frame(s))
973  goto fail;
974 
975  s->parse_context.state = -1;
976  }
977 
978  s->context_initialized = 1;
979  s->thread_context[0] = s;
980 
981  if (s->width && s->height) {
982  if (nb_slices > 1) {
983  for (i = 1; i < nb_slices; i++) {
984  s->thread_context[i] = av_malloc(sizeof(MpegEncContext));
985  memcpy(s->thread_context[i], s, sizeof(MpegEncContext));
986  }
987 
988  for (i = 0; i < nb_slices; i++) {
989  if (init_duplicate_context(s->thread_context[i], s) < 0)
990  goto fail;
991  s->thread_context[i]->start_mb_y =
992  (s->mb_height * (i) + nb_slices / 2) / nb_slices;
993  s->thread_context[i]->end_mb_y =
994  (s->mb_height * (i + 1) + nb_slices / 2) / nb_slices;
995  }
996  } else {
997  if (init_duplicate_context(s, s) < 0)
998  goto fail;
999  s->start_mb_y = 0;
1000  s->end_mb_y = s->mb_height;
1001  }
1002  s->slice_context_count = nb_slices;
1003  }
1004 
1005  return 0;
1006  fail:
1007  ff_MPV_common_end(s);
1008  return -1;
1009 }
1010 
1017 {
1018  int i, j, k;
1019 
1020  av_freep(&s->mb_type);
1027  s->p_mv_table = NULL;
1028  s->b_forw_mv_table = NULL;
1029  s->b_back_mv_table = NULL;
1032  s->b_direct_mv_table = NULL;
1033  for (i = 0; i < 2; i++) {
1034  for (j = 0; j < 2; j++) {
1035  for (k = 0; k < 2; k++) {
1036  av_freep(&s->b_field_mv_table_base[i][j][k]);
1037  s->b_field_mv_table[i][j][k] = NULL;
1038  }
1039  av_freep(&s->b_field_select_table[i][j]);
1040  av_freep(&s->p_field_mv_table_base[i][j]);
1041  s->p_field_mv_table[i][j] = NULL;
1042  }
1044  }
1045 
1046  av_freep(&s->dc_val_base);
1048  av_freep(&s->mbintra_table);
1049  av_freep(&s->cbp_table);
1050  av_freep(&s->pred_dir_table);
1051 
1052  av_freep(&s->mbskip_table);
1053 
1055  av_freep(&s->er_temp_buffer);
1056  av_freep(&s->mb_index2xy);
1057  av_freep(&s->lambda_table);
1058  av_freep(&s->cplx_tab);
1059  av_freep(&s->bits_tab);
1060 
1061  s->linesize = s->uvlinesize = 0;
1062 
1063  for (i = 0; i < 3; i++)
1065 
1066  return 0;
1067 }
1068 
1070 {
1071  int i, err = 0;
1072 
1073  if (s->slice_context_count > 1) {
1074  for (i = 0; i < s->slice_context_count; i++) {
1076  }
1077  for (i = 1; i < s->slice_context_count; i++) {
1078  av_freep(&s->thread_context[i]);
1079  }
1080  } else
1082 
1083  free_context_frame(s);
1084 
1085  if (s->picture)
1086  for (i = 0; i < s->picture_count; i++) {
1087  s->picture[i].needs_realloc = 1;
1088  }
1089 
1090  s->last_picture_ptr =
1091  s->next_picture_ptr =
1093 
1094  // init
1096  s->mb_height = (s->height + 31) / 32 * 2;
1097  else if (s->codec_id != AV_CODEC_ID_H264)
1098  s->mb_height = (s->height + 15) / 16;
1099 
1100  if ((s->width || s->height) &&
1101  av_image_check_size(s->width, s->height, 0, s->avctx))
1102  return AVERROR_INVALIDDATA;
1103 
1104  if ((err = init_context_frame(s)))
1105  goto fail;
1106 
1107  s->thread_context[0] = s;
1108 
1109  if (s->width && s->height) {
1110  int nb_slices = s->slice_context_count;
1111  if (nb_slices > 1) {
1112  for (i = 1; i < nb_slices; i++) {
1113  s->thread_context[i] = av_malloc(sizeof(MpegEncContext));
1114  memcpy(s->thread_context[i], s, sizeof(MpegEncContext));
1115  }
1116 
1117  for (i = 0; i < nb_slices; i++) {
1118  if (init_duplicate_context(s->thread_context[i], s) < 0)
1119  goto fail;
1120  s->thread_context[i]->start_mb_y =
1121  (s->mb_height * (i) + nb_slices / 2) / nb_slices;
1122  s->thread_context[i]->end_mb_y =
1123  (s->mb_height * (i + 1) + nb_slices / 2) / nb_slices;
1124  }
1125  } else {
1126  if (init_duplicate_context(s, s) < 0)
1127  goto fail;
1128  s->start_mb_y = 0;
1129  s->end_mb_y = s->mb_height;
1130  }
1131  s->slice_context_count = nb_slices;
1132  }
1133 
1134  return 0;
1135  fail:
1136  ff_MPV_common_end(s);
1137  return err;
1138 }
1139 
1140 /* init common structure for both encoder and decoder */
1142 {
1143  int i;
1144 
1145  if (s->slice_context_count > 1) {
1146  for (i = 0; i < s->slice_context_count; i++) {
1148  }
1149  for (i = 1; i < s->slice_context_count; i++) {
1150  av_freep(&s->thread_context[i]);
1151  }
1152  s->slice_context_count = 1;
1153  } else free_duplicate_context(s);
1154 
1156  s->parse_context.buffer_size = 0;
1157 
1160 
1161  av_freep(&s->avctx->stats_out);
1162  av_freep(&s->ac_stats);
1163 
1164  av_freep(&s->q_intra_matrix);
1165  av_freep(&s->q_inter_matrix);
1168  av_freep(&s->input_picture);
1170  av_freep(&s->dct_offset);
1171 
1172  if (s->picture && !s->avctx->internal->is_copy) {
1173  for (i = 0; i < s->picture_count; i++) {
1174  free_picture(s, &s->picture[i]);
1175  }
1176  }
1177  av_freep(&s->picture);
1178 
1179  free_context_frame(s);
1180 
1183 
1184  s->context_initialized = 0;
1185  s->last_picture_ptr =
1186  s->next_picture_ptr =
1188  s->linesize = s->uvlinesize = 0;
1189 }
1190 
1192  uint8_t static_store[2][2 * MAX_RUN + MAX_LEVEL + 3])
1193 {
1194  int8_t max_level[MAX_RUN + 1], max_run[MAX_LEVEL + 1];
1195  uint8_t index_run[MAX_RUN + 1];
1196  int last, run, level, start, end, i;
1197 
1198  /* If table is static, we can quit if rl->max_level[0] is not NULL */
1199  if (static_store && rl->max_level[0])
1200  return;
1201 
1202  /* compute max_level[], max_run[] and index_run[] */
1203  for (last = 0; last < 2; last++) {
1204  if (last == 0) {
1205  start = 0;
1206  end = rl->last;
1207  } else {
1208  start = rl->last;
1209  end = rl->n;
1210  }
1211 
1212  memset(max_level, 0, MAX_RUN + 1);
1213  memset(max_run, 0, MAX_LEVEL + 1);
1214  memset(index_run, rl->n, MAX_RUN + 1);
1215  for (i = start; i < end; i++) {
1216  run = rl->table_run[i];
1217  level = rl->table_level[i];
1218  if (index_run[run] == rl->n)
1219  index_run[run] = i;
1220  if (level > max_level[run])
1221  max_level[run] = level;
1222  if (run > max_run[level])
1223  max_run[level] = run;
1224  }
1225  if (static_store)
1226  rl->max_level[last] = static_store[last];
1227  else
1228  rl->max_level[last] = av_malloc(MAX_RUN + 1);
1229  memcpy(rl->max_level[last], max_level, MAX_RUN + 1);
1230  if (static_store)
1231  rl->max_run[last] = static_store[last] + MAX_RUN + 1;
1232  else
1233  rl->max_run[last] = av_malloc(MAX_LEVEL + 1);
1234  memcpy(rl->max_run[last], max_run, MAX_LEVEL + 1);
1235  if (static_store)
1236  rl->index_run[last] = static_store[last] + MAX_RUN + MAX_LEVEL + 2;
1237  else
1238  rl->index_run[last] = av_malloc(MAX_RUN + 1);
1239  memcpy(rl->index_run[last], index_run, MAX_RUN + 1);
1240  }
1241 }
1242 
1244 {
1245  int i, q;
1246 
1247  for (q = 0; q < 32; q++) {
1248  int qmul = q * 2;
1249  int qadd = (q - 1) | 1;
1250 
1251  if (q == 0) {
1252  qmul = 1;
1253  qadd = 0;
1254  }
1255  for (i = 0; i < rl->vlc.table_size; i++) {
1256  int code = rl->vlc.table[i][0];
1257  int len = rl->vlc.table[i][1];
1258  int level, run;
1259 
1260  if (len == 0) { // illegal code
1261  run = 66;
1262  level = MAX_LEVEL;
1263  } else if (len < 0) { // more bits needed
1264  run = 0;
1265  level = code;
1266  } else {
1267  if (code == rl->n) { // esc
1268  run = 66;
1269  level = 0;
1270  } else {
1271  run = rl->table_run[code] + 1;
1272  level = rl->table_level[code] * qmul + qadd;
1273  if (code >= rl->last) run += 192;
1274  }
1275  }
1276  rl->rl_vlc[q][i].len = len;
1277  rl->rl_vlc[q][i].level = level;
1278  rl->rl_vlc[q][i].run = run;
1279  }
1280  }
1281 }
1282 
1283 void ff_release_unused_pictures(MpegEncContext*s, int remove_current)
1284 {
1285  int i;
1286 
1287  /* release non reference frames */
1288  for (i = 0; i < s->picture_count; i++) {
1289  if (s->picture[i].f.data[0] && !s->picture[i].f.reference &&
1290  (!s->picture[i].owner2 || s->picture[i].owner2 == s) &&
1291  (remove_current || &s->picture[i] != s->current_picture_ptr)
1292  /* && s->picture[i].type!= FF_BUFFER_TYPE_SHARED */) {
1293  free_frame_buffer(s, &s->picture[i]);
1294  }
1295  }
1296 }
1297 
1298 static inline int pic_is_unused(MpegEncContext *s, Picture *pic)
1299 {
1300  if (pic->f.data[0] == NULL)
1301  return 1;
1302  if (pic->needs_realloc && !(pic->f.reference & DELAYED_PIC_REF))
1303  if (!pic->owner2 || pic->owner2 == s)
1304  return 1;
1305  return 0;
1306 }
1307 
1308 static int find_unused_picture(MpegEncContext *s, int shared)
1309 {
1310  int i;
1311 
1312  if (shared) {
1313  for (i = s->picture_range_start; i < s->picture_range_end; i++) {
1314  if (s->picture[i].f.data[0] == NULL && s->picture[i].f.type == 0)
1315  return i;
1316  }
1317  } else {
1318  for (i = s->picture_range_start; i < s->picture_range_end; i++) {
1319  if (pic_is_unused(s, &s->picture[i]) && s->picture[i].f.type != 0)
1320  return i; // FIXME
1321  }
1322  for (i = s->picture_range_start; i < s->picture_range_end; i++) {
1323  if (pic_is_unused(s, &s->picture[i]))
1324  return i;
1325  }
1326  }
1327 
1328  return AVERROR_INVALIDDATA;
1329 }
1330 
1332 {
1333  int ret = find_unused_picture(s, shared);
1334 
1335  if (ret >= 0 && ret < s->picture_range_end) {
1336  if (s->picture[ret].needs_realloc) {
1337  s->picture[ret].needs_realloc = 0;
1338  free_picture(s, &s->picture[ret]);
1340  }
1341  }
1342  return ret;
1343 }
1344 
1346 {
1347  int intra, i;
1348 
1349  for (intra = 0; intra < 2; intra++) {
1350  if (s->dct_count[intra] > (1 << 16)) {
1351  for (i = 0; i < 64; i++) {
1352  s->dct_error_sum[intra][i] >>= 1;
1353  }
1354  s->dct_count[intra] >>= 1;
1355  }
1356 
1357  for (i = 0; i < 64; i++) {
1358  s->dct_offset[intra][i] = (s->avctx->noise_reduction *
1359  s->dct_count[intra] +
1360  s->dct_error_sum[intra][i] / 2) /
1361  (s->dct_error_sum[intra][i] + 1);
1362  }
1363  }
1364 }
1365 
1371 {
1372  int i;
1373  Picture *pic;
1374  s->mb_skipped = 0;
1375 
1376  /* mark & release old frames */
1377  if (s->out_format != FMT_H264 || s->codec_id == AV_CODEC_ID_SVQ3) {
1378  if (s->pict_type != AV_PICTURE_TYPE_B && s->last_picture_ptr &&
1380  s->last_picture_ptr->f.data[0]) {
1381  if (s->last_picture_ptr->owner2 == s)
1383  }
1384 
1385  /* release forgotten pictures */
1386  /* if (mpeg124/h263) */
1387  if (!s->encoding) {
1388  for (i = 0; i < s->picture_count; i++) {
1389  if (s->picture[i].owner2 == s && s->picture[i].f.data[0] &&
1390  &s->picture[i] != s->last_picture_ptr &&
1391  &s->picture[i] != s->next_picture_ptr &&
1392  s->picture[i].f.reference && !s->picture[i].needs_realloc) {
1393  if (!(avctx->active_thread_type & FF_THREAD_FRAME))
1394  av_log(avctx, AV_LOG_ERROR,
1395  "releasing zombie picture\n");
1396  free_frame_buffer(s, &s->picture[i]);
1397  }
1398  }
1399  }
1400  }
1401 
1402  if (!s->encoding) {
1404 
1405  if (s->current_picture_ptr &&
1406  s->current_picture_ptr->f.data[0] == NULL) {
1407  // we already have a unused image
1408  // (maybe it was set before reading the header)
1409  pic = s->current_picture_ptr;
1410  } else {
1411  i = ff_find_unused_picture(s, 0);
1412  if (i < 0) {
1413  av_log(s->avctx, AV_LOG_ERROR, "no frame buffer available\n");
1414  return i;
1415  }
1416  pic = &s->picture[i];
1417  }
1418 
1419  pic->f.reference = 0;
1420  if (!s->droppable) {
1421  if (s->codec_id == AV_CODEC_ID_H264)
1422  pic->f.reference = s->picture_structure;
1423  else if (s->pict_type != AV_PICTURE_TYPE_B)
1424  pic->f.reference = 3;
1425  }
1426 
1428 
1429  if (ff_alloc_picture(s, pic, 0) < 0)
1430  return -1;
1431 
1432  s->current_picture_ptr = pic;
1433  // FIXME use only the vars from current_pic
1435  if (s->codec_id == AV_CODEC_ID_MPEG1VIDEO ||
1437  if (s->picture_structure != PICT_FRAME)
1440  }
1444  }
1445 
1447  // if (s->flags && CODEC_FLAG_QSCALE)
1448  // s->current_picture_ptr->quality = s->new_picture_ptr->quality;
1450 
1452 
1453  if (s->pict_type != AV_PICTURE_TYPE_B) {
1455  if (!s->droppable)
1457  }
1458  av_dlog(s->avctx, "L%p N%p C%p L%p N%p C%p type:%d drop:%d\n",
1463  s->pict_type, s->droppable);
1464 
1465  if (s->codec_id != AV_CODEC_ID_H264) {
1466  if ((s->last_picture_ptr == NULL ||
1467  s->last_picture_ptr->f.data[0] == NULL) &&
1468  (s->pict_type != AV_PICTURE_TYPE_I ||
1469  s->picture_structure != PICT_FRAME)) {
1470  if (s->pict_type != AV_PICTURE_TYPE_I)
1471  av_log(avctx, AV_LOG_ERROR,
1472  "warning: first frame is no keyframe\n");
1473  else if (s->picture_structure != PICT_FRAME)
1474  av_log(avctx, AV_LOG_INFO,
1475  "allocate dummy last picture for field based first keyframe\n");
1476 
1477  /* Allocate a dummy frame */
1478  i = ff_find_unused_picture(s, 0);
1479  if (i < 0) {
1480  av_log(s->avctx, AV_LOG_ERROR, "no frame buffer available\n");
1481  return i;
1482  }
1483  s->last_picture_ptr = &s->picture[i];
1484 
1485  s->last_picture_ptr->f.reference = 3;
1487 
1488  if (ff_alloc_picture(s, s->last_picture_ptr, 0) < 0) {
1489  s->last_picture_ptr = NULL;
1490  return -1;
1491  }
1492  ff_thread_report_progress(&s->last_picture_ptr->f, INT_MAX, 0);
1493  ff_thread_report_progress(&s->last_picture_ptr->f, INT_MAX, 1);
1494  s->last_picture_ptr->f.reference = 3;
1495  }
1496  if ((s->next_picture_ptr == NULL ||
1497  s->next_picture_ptr->f.data[0] == NULL) &&
1498  s->pict_type == AV_PICTURE_TYPE_B) {
1499  /* Allocate a dummy frame */
1500  i = ff_find_unused_picture(s, 0);
1501  if (i < 0) {
1502  av_log(s->avctx, AV_LOG_ERROR, "no frame buffer available\n");
1503  return i;
1504  }
1505  s->next_picture_ptr = &s->picture[i];
1506 
1507  s->next_picture_ptr->f.reference = 3;
1509 
1510  if (ff_alloc_picture(s, s->next_picture_ptr, 0) < 0) {
1511  s->next_picture_ptr = NULL;
1512  return -1;
1513  }
1514  ff_thread_report_progress(&s->next_picture_ptr->f, INT_MAX, 0);
1515  ff_thread_report_progress(&s->next_picture_ptr->f, INT_MAX, 1);
1516  s->next_picture_ptr->f.reference = 3;
1517  }
1518  }
1519 
1520  if (s->last_picture_ptr)
1522  if (s->next_picture_ptr)
1524 
1525  if (HAVE_THREADS && (avctx->active_thread_type & FF_THREAD_FRAME)) {
1526  if (s->next_picture_ptr)
1527  s->next_picture_ptr->owner2 = s;
1528  if (s->last_picture_ptr)
1529  s->last_picture_ptr->owner2 = s;
1530  }
1531 
1532  if (s->pict_type != AV_PICTURE_TYPE_I &&
1533  !(s->last_picture_ptr && s->last_picture_ptr->f.data[0])) {
1534  av_log(s, AV_LOG_ERROR,
1535  "Non-reference picture received and no reference available\n");
1536  return AVERROR_INVALIDDATA;
1537  }
1538 
1539  if (s->picture_structure!= PICT_FRAME && s->out_format != FMT_H264) {
1540  int i;
1541  for (i = 0; i < 4; i++) {
1543  s->current_picture.f.data[i] +=
1544  s->current_picture.f.linesize[i];
1545  }
1546  s->current_picture.f.linesize[i] *= 2;
1547  s->last_picture.f.linesize[i] *= 2;
1548  s->next_picture.f.linesize[i] *= 2;
1549  }
1550  }
1551 
1552  s->err_recognition = avctx->err_recognition;
1553 
1554  /* set dequantizer, we can't do it during init as
1555  * it might change for mpeg4 and we can't do it in the header
1556  * decode as init is not called for mpeg4 there yet */
1557  if (s->mpeg_quant || s->codec_id == AV_CODEC_ID_MPEG2VIDEO) {
1560  } else if (s->out_format == FMT_H263 || s->out_format == FMT_H261) {
1563  } else {
1566  }
1567 
1568  if (s->dct_error_sum) {
1569  assert(s->avctx->noise_reduction && s->encoding);
1571  }
1572 
1574  return ff_xvmc_field_start(s, avctx);
1575 
1576  return 0;
1577 }
1578 
1579 /* generic function for encode/decode called after a
1580  * frame has been coded/decoded. */
1582 {
1583  int i;
1584  /* redraw edges for the frame if decoding didn't complete */
1585  // just to make sure that all data is rendered.
1587  ff_xvmc_field_end(s);
1588  } else if ((s->error_count || s->encoding) &&
1589  !s->avctx->hwaccel &&
1591  s->unrestricted_mv &&
1593  !s->intra_only &&
1594  !(s->flags & CODEC_FLAG_EMU_EDGE)) {
1596  int hshift = desc->log2_chroma_w;
1597  int vshift = desc->log2_chroma_h;
1599  s->h_edge_pos, s->v_edge_pos,
1601  EDGE_TOP | EDGE_BOTTOM);
1603  s->h_edge_pos >> hshift, s->v_edge_pos >> vshift,
1604  EDGE_WIDTH >> hshift, EDGE_WIDTH >> vshift,
1605  EDGE_TOP | EDGE_BOTTOM);
1607  s->h_edge_pos >> hshift, s->v_edge_pos >> vshift,
1608  EDGE_WIDTH >> hshift, EDGE_WIDTH >> vshift,
1609  EDGE_TOP | EDGE_BOTTOM);
1610  }
1611 
1612  emms_c();
1613 
1614  s->last_pict_type = s->pict_type;
1616  if (s->pict_type!= AV_PICTURE_TYPE_B) {
1618  }
1619 #if 0
1620  /* copy back current_picture variables */
1621  for (i = 0; i < MAX_PICTURE_COUNT; i++) {
1622  if (s->picture[i].f.data[0] == s->current_picture.f.data[0]) {
1623  s->picture[i] = s->current_picture;
1624  break;
1625  }
1626  }
1627  assert(i < MAX_PICTURE_COUNT);
1628 #endif
1629 
1630  if (s->encoding) {
1631  /* release non-reference frames */
1632  for (i = 0; i < s->picture_count; i++) {
1633  if (s->picture[i].f.data[0] && !s->picture[i].f.reference
1634  /* && s->picture[i].type != FF_BUFFER_TYPE_SHARED */) {
1635  free_frame_buffer(s, &s->picture[i]);
1636  }
1637  }
1638  }
1639  // clear copies, to avoid confusion
1640 #if 0
1641  memset(&s->last_picture, 0, sizeof(Picture));
1642  memset(&s->next_picture, 0, sizeof(Picture));
1643  memset(&s->current_picture, 0, sizeof(Picture));
1644 #endif
1646 
1649  }
1650 }
1651 
1659 static void draw_line(uint8_t *buf, int sx, int sy, int ex, int ey,
1660  int w, int h, int stride, int color)
1661 {
1662  int x, y, fr, f;
1663 
1664  sx = av_clip(sx, 0, w - 1);
1665  sy = av_clip(sy, 0, h - 1);
1666  ex = av_clip(ex, 0, w - 1);
1667  ey = av_clip(ey, 0, h - 1);
1668 
1669  buf[sy * stride + sx] += color;
1670 
1671  if (FFABS(ex - sx) > FFABS(ey - sy)) {
1672  if (sx > ex) {
1673  FFSWAP(int, sx, ex);
1674  FFSWAP(int, sy, ey);
1675  }
1676  buf += sx + sy * stride;
1677  ex -= sx;
1678  f = ((ey - sy) << 16) / ex;
1679  for (x = 0; x <= ex; x++) {
1680  y = (x * f) >> 16;
1681  fr = (x * f) & 0xFFFF;
1682  buf[y * stride + x] += (color * (0x10000 - fr)) >> 16;
1683  buf[(y + 1) * stride + x] += (color * fr ) >> 16;
1684  }
1685  } else {
1686  if (sy > ey) {
1687  FFSWAP(int, sx, ex);
1688  FFSWAP(int, sy, ey);
1689  }
1690  buf += sx + sy * stride;
1691  ey -= sy;
1692  if (ey)
1693  f = ((ex - sx) << 16) / ey;
1694  else
1695  f = 0;
1696  for (y = 0; y = ey; y++) {
1697  x = (y * f) >> 16;
1698  fr = (y * f) & 0xFFFF;
1699  buf[y * stride + x] += (color * (0x10000 - fr)) >> 16;
1700  buf[y * stride + x + 1] += (color * fr ) >> 16;
1701  }
1702  }
1703 }
1704 
1712 static void draw_arrow(uint8_t *buf, int sx, int sy, int ex,
1713  int ey, int w, int h, int stride, int color)
1714 {
1715  int dx,dy;
1716 
1717  sx = av_clip(sx, -100, w + 100);
1718  sy = av_clip(sy, -100, h + 100);
1719  ex = av_clip(ex, -100, w + 100);
1720  ey = av_clip(ey, -100, h + 100);
1721 
1722  dx = ex - sx;
1723  dy = ey - sy;
1724 
1725  if (dx * dx + dy * dy > 3 * 3) {
1726  int rx = dx + dy;
1727  int ry = -dx + dy;
1728  int length = ff_sqrt((rx * rx + ry * ry) << 8);
1729 
1730  // FIXME subpixel accuracy
1731  rx = ROUNDED_DIV(rx * 3 << 4, length);
1732  ry = ROUNDED_DIV(ry * 3 << 4, length);
1733 
1734  draw_line(buf, sx, sy, sx + rx, sy + ry, w, h, stride, color);
1735  draw_line(buf, sx, sy, sx - ry, sy + rx, w, h, stride, color);
1736  }
1737  draw_line(buf, sx, sy, ex, ey, w, h, stride, color);
1738 }
1739 
1744 {
1745  if (s->avctx->hwaccel || !pict || !pict->mb_type)
1746  return;
1747 
1749  int x,y;
1750 
1751  av_log(s->avctx,AV_LOG_DEBUG,"New frame, type: ");
1752  switch (pict->pict_type) {
1753  case AV_PICTURE_TYPE_I:
1754  av_log(s->avctx,AV_LOG_DEBUG,"I\n");
1755  break;
1756  case AV_PICTURE_TYPE_P:
1757  av_log(s->avctx,AV_LOG_DEBUG,"P\n");
1758  break;
1759  case AV_PICTURE_TYPE_B:
1760  av_log(s->avctx,AV_LOG_DEBUG,"B\n");
1761  break;
1762  case AV_PICTURE_TYPE_S:
1763  av_log(s->avctx,AV_LOG_DEBUG,"S\n");
1764  break;
1765  case AV_PICTURE_TYPE_SI:
1766  av_log(s->avctx,AV_LOG_DEBUG,"SI\n");
1767  break;
1768  case AV_PICTURE_TYPE_SP:
1769  av_log(s->avctx,AV_LOG_DEBUG,"SP\n");
1770  break;
1771  }
1772  for (y = 0; y < s->mb_height; y++) {
1773  for (x = 0; x < s->mb_width; x++) {
1774  if (s->avctx->debug & FF_DEBUG_SKIP) {
1775  int count = s->mbskip_table[x + y * s->mb_stride];
1776  if (count > 9)
1777  count = 9;
1778  av_log(s->avctx, AV_LOG_DEBUG, "%1d", count);
1779  }
1780  if (s->avctx->debug & FF_DEBUG_QP) {
1781  av_log(s->avctx, AV_LOG_DEBUG, "%2d",
1782  pict->qscale_table[x + y * s->mb_stride]);
1783  }
1784  if (s->avctx->debug & FF_DEBUG_MB_TYPE) {
1785  int mb_type = pict->mb_type[x + y * s->mb_stride];
1786  // Type & MV direction
1787  if (IS_PCM(mb_type))
1788  av_log(s->avctx, AV_LOG_DEBUG, "P");
1789  else if (IS_INTRA(mb_type) && IS_ACPRED(mb_type))
1790  av_log(s->avctx, AV_LOG_DEBUG, "A");
1791  else if (IS_INTRA4x4(mb_type))
1792  av_log(s->avctx, AV_LOG_DEBUG, "i");
1793  else if (IS_INTRA16x16(mb_type))
1794  av_log(s->avctx, AV_LOG_DEBUG, "I");
1795  else if (IS_DIRECT(mb_type) && IS_SKIP(mb_type))
1796  av_log(s->avctx, AV_LOG_DEBUG, "d");
1797  else if (IS_DIRECT(mb_type))
1798  av_log(s->avctx, AV_LOG_DEBUG, "D");
1799  else if (IS_GMC(mb_type) && IS_SKIP(mb_type))
1800  av_log(s->avctx, AV_LOG_DEBUG, "g");
1801  else if (IS_GMC(mb_type))
1802  av_log(s->avctx, AV_LOG_DEBUG, "G");
1803  else if (IS_SKIP(mb_type))
1804  av_log(s->avctx, AV_LOG_DEBUG, "S");
1805  else if (!USES_LIST(mb_type, 1))
1806  av_log(s->avctx, AV_LOG_DEBUG, ">");
1807  else if (!USES_LIST(mb_type, 0))
1808  av_log(s->avctx, AV_LOG_DEBUG, "<");
1809  else {
1810  assert(USES_LIST(mb_type, 0) && USES_LIST(mb_type, 1));
1811  av_log(s->avctx, AV_LOG_DEBUG, "X");
1812  }
1813 
1814  // segmentation
1815  if (IS_8X8(mb_type))
1816  av_log(s->avctx, AV_LOG_DEBUG, "+");
1817  else if (IS_16X8(mb_type))
1818  av_log(s->avctx, AV_LOG_DEBUG, "-");
1819  else if (IS_8X16(mb_type))
1820  av_log(s->avctx, AV_LOG_DEBUG, "|");
1821  else if (IS_INTRA(mb_type) || IS_16X16(mb_type))
1822  av_log(s->avctx, AV_LOG_DEBUG, " ");
1823  else
1824  av_log(s->avctx, AV_LOG_DEBUG, "?");
1825 
1826 
1827  if (IS_INTERLACED(mb_type))
1828  av_log(s->avctx, AV_LOG_DEBUG, "=");
1829  else
1830  av_log(s->avctx, AV_LOG_DEBUG, " ");
1831  }
1832  }
1833  av_log(s->avctx, AV_LOG_DEBUG, "\n");
1834  }
1835  }
1836 
1837  if ((s->avctx->debug & (FF_DEBUG_VIS_QP | FF_DEBUG_VIS_MB_TYPE)) ||
1838  (s->avctx->debug_mv)) {
1839  const int shift = 1 + s->quarter_sample;
1840  int mb_y;
1841  uint8_t *ptr;
1842  int i;
1843  int h_chroma_shift, v_chroma_shift, block_height;
1844  const int width = s->avctx->width;
1845  const int height = s->avctx->height;
1846  const int mv_sample_log2 = 4 - pict->motion_subsample_log2;
1847  const int mv_stride = (s->mb_width << mv_sample_log2) +
1848  (s->codec_id == AV_CODEC_ID_H264 ? 0 : 1);
1849  s->low_delay = 0; // needed to see the vectors without trashing the buffers
1850 
1852  &h_chroma_shift, &v_chroma_shift);
1853  for (i = 0; i < 3; i++) {
1854  memcpy(s->visualization_buffer[i], pict->data[i],
1855  (i == 0) ? pict->linesize[i] * height:
1856  pict->linesize[i] * height >> v_chroma_shift);
1857  pict->data[i] = s->visualization_buffer[i];
1858  }
1859  pict->type = FF_BUFFER_TYPE_COPY;
1860  ptr = pict->data[0];
1861  block_height = 16 >> v_chroma_shift;
1862 
1863  for (mb_y = 0; mb_y < s->mb_height; mb_y++) {
1864  int mb_x;
1865  for (mb_x = 0; mb_x < s->mb_width; mb_x++) {
1866  const int mb_index = mb_x + mb_y * s->mb_stride;
1867  if ((s->avctx->debug_mv) && pict->motion_val) {
1868  int type;
1869  for (type = 0; type < 3; type++) {
1870  int direction = 0;
1871  switch (type) {
1872  case 0:
1873  if ((!(s->avctx->debug_mv & FF_DEBUG_VIS_MV_P_FOR)) ||
1874  (pict->pict_type!= AV_PICTURE_TYPE_P))
1875  continue;
1876  direction = 0;
1877  break;
1878  case 1:
1879  if ((!(s->avctx->debug_mv & FF_DEBUG_VIS_MV_B_FOR)) ||
1880  (pict->pict_type!= AV_PICTURE_TYPE_B))
1881  continue;
1882  direction = 0;
1883  break;
1884  case 2:
1885  if ((!(s->avctx->debug_mv & FF_DEBUG_VIS_MV_B_BACK)) ||
1886  (pict->pict_type!= AV_PICTURE_TYPE_B))
1887  continue;
1888  direction = 1;
1889  break;
1890  }
1891  if (!USES_LIST(pict->mb_type[mb_index], direction))
1892  continue;
1893 
1894  if (IS_8X8(pict->mb_type[mb_index])) {
1895  int i;
1896  for (i = 0; i < 4; i++) {
1897  int sx = mb_x * 16 + 4 + 8 * (i & 1);
1898  int sy = mb_y * 16 + 4 + 8 * (i >> 1);
1899  int xy = (mb_x * 2 + (i & 1) +
1900  (mb_y * 2 + (i >> 1)) * mv_stride) << (mv_sample_log2 - 1);
1901  int mx = (pict->motion_val[direction][xy][0] >> shift) + sx;
1902  int my = (pict->motion_val[direction][xy][1] >> shift) + sy;
1903  draw_arrow(ptr, sx, sy, mx, my, width,
1904  height, s->linesize, 100);
1905  }
1906  } else if (IS_16X8(pict->mb_type[mb_index])) {
1907  int i;
1908  for (i = 0; i < 2; i++) {
1909  int sx = mb_x * 16 + 8;
1910  int sy = mb_y * 16 + 4 + 8 * i;
1911  int xy = (mb_x * 2 + (mb_y * 2 + i) * mv_stride) << (mv_sample_log2 - 1);
1912  int mx = (pict->motion_val[direction][xy][0] >> shift);
1913  int my = (pict->motion_val[direction][xy][1] >> shift);
1914 
1915  if (IS_INTERLACED(pict->mb_type[mb_index]))
1916  my *= 2;
1917 
1918  draw_arrow(ptr, sx, sy, mx + sx, my + sy, width,
1919  height, s->linesize, 100);
1920  }
1921  } else if (IS_8X16(pict->mb_type[mb_index])) {
1922  int i;
1923  for (i = 0; i < 2; i++) {
1924  int sx = mb_x * 16 + 4 + 8 * i;
1925  int sy = mb_y * 16 + 8;
1926  int xy = (mb_x * 2 + i + mb_y * 2 * mv_stride) << (mv_sample_log2 - 1);
1927  int mx = pict->motion_val[direction][xy][0] >> shift;
1928  int my = pict->motion_val[direction][xy][1] >> shift;
1929 
1930  if (IS_INTERLACED(pict->mb_type[mb_index]))
1931  my *= 2;
1932 
1933  draw_arrow(ptr, sx, sy, mx + sx, my + sy, width,
1934  height, s->linesize, 100);
1935  }
1936  } else {
1937  int sx = mb_x * 16 + 8;
1938  int sy = mb_y * 16 + 8;
1939  int xy = (mb_x + mb_y * mv_stride) << mv_sample_log2;
1940  int mx = pict->motion_val[direction][xy][0] >> shift + sx;
1941  int my = pict->motion_val[direction][xy][1] >> shift + sy;
1942  draw_arrow(ptr, sx, sy, mx, my, width, height, s->linesize, 100);
1943  }
1944  }
1945  }
1946  if ((s->avctx->debug & FF_DEBUG_VIS_QP) && pict->motion_val) {
1947  uint64_t c = (pict->qscale_table[mb_index] * 128 / 31) *
1948  0x0101010101010101ULL;
1949  int y;
1950  for (y = 0; y < block_height; y++) {
1951  *(uint64_t *)(pict->data[1] + 8 * mb_x +
1952  (block_height * mb_y + y) *
1953  pict->linesize[1]) = c;
1954  *(uint64_t *)(pict->data[2] + 8 * mb_x +
1955  (block_height * mb_y + y) *
1956  pict->linesize[2]) = c;
1957  }
1958  }
1959  if ((s->avctx->debug & FF_DEBUG_VIS_MB_TYPE) &&
1960  pict->motion_val) {
1961  int mb_type = pict->mb_type[mb_index];
1962  uint64_t u,v;
1963  int y;
1964 #define COLOR(theta, r) \
1965  u = (int)(128 + r * cos(theta * 3.141592 / 180)); \
1966  v = (int)(128 + r * sin(theta * 3.141592 / 180));
1967 
1968 
1969  u = v = 128;
1970  if (IS_PCM(mb_type)) {
1971  COLOR(120, 48)
1972  } else if ((IS_INTRA(mb_type) && IS_ACPRED(mb_type)) ||
1973  IS_INTRA16x16(mb_type)) {
1974  COLOR(30, 48)
1975  } else if (IS_INTRA4x4(mb_type)) {
1976  COLOR(90, 48)
1977  } else if (IS_DIRECT(mb_type) && IS_SKIP(mb_type)) {
1978  // COLOR(120, 48)
1979  } else if (IS_DIRECT(mb_type)) {
1980  COLOR(150, 48)
1981  } else if (IS_GMC(mb_type) && IS_SKIP(mb_type)) {
1982  COLOR(170, 48)
1983  } else if (IS_GMC(mb_type)) {
1984  COLOR(190, 48)
1985  } else if (IS_SKIP(mb_type)) {
1986  // COLOR(180, 48)
1987  } else if (!USES_LIST(mb_type, 1)) {
1988  COLOR(240, 48)
1989  } else if (!USES_LIST(mb_type, 0)) {
1990  COLOR(0, 48)
1991  } else {
1992  assert(USES_LIST(mb_type, 0) && USES_LIST(mb_type, 1));
1993  COLOR(300,48)
1994  }
1995 
1996  u *= 0x0101010101010101ULL;
1997  v *= 0x0101010101010101ULL;
1998  for (y = 0; y < block_height; y++) {
1999  *(uint64_t *)(pict->data[1] + 8 * mb_x +
2000  (block_height * mb_y + y) * pict->linesize[1]) = u;
2001  *(uint64_t *)(pict->data[2] + 8 * mb_x +
2002  (block_height * mb_y + y) * pict->linesize[2]) = v;
2003  }
2004 
2005  // segmentation
2006  if (IS_8X8(mb_type) || IS_16X8(mb_type)) {
2007  *(uint64_t *)(pict->data[0] + 16 * mb_x + 0 +
2008  (16 * mb_y + 8) * pict->linesize[0]) ^= 0x8080808080808080ULL;
2009  *(uint64_t *)(pict->data[0] + 16 * mb_x + 8 +
2010  (16 * mb_y + 8) * pict->linesize[0]) ^= 0x8080808080808080ULL;
2011  }
2012  if (IS_8X8(mb_type) || IS_8X16(mb_type)) {
2013  for (y = 0; y < 16; y++)
2014  pict->data[0][16 * mb_x + 8 + (16 * mb_y + y) *
2015  pict->linesize[0]] ^= 0x80;
2016  }
2017  if (IS_8X8(mb_type) && mv_sample_log2 >= 2) {
2018  int dm = 1 << (mv_sample_log2 - 2);
2019  for (i = 0; i < 4; i++) {
2020  int sx = mb_x * 16 + 8 * (i & 1);
2021  int sy = mb_y * 16 + 8 * (i >> 1);
2022  int xy = (mb_x * 2 + (i & 1) +
2023  (mb_y * 2 + (i >> 1)) * mv_stride) << (mv_sample_log2 - 1);
2024  // FIXME bidir
2025  int32_t *mv = (int32_t *) &pict->motion_val[0][xy];
2026  if (mv[0] != mv[dm] ||
2027  mv[dm * mv_stride] != mv[dm * (mv_stride + 1)])
2028  for (y = 0; y < 8; y++)
2029  pict->data[0][sx + 4 + (sy + y) * pict->linesize[0]] ^= 0x80;
2030  if (mv[0] != mv[dm * mv_stride] || mv[dm] != mv[dm * (mv_stride + 1)])
2031  *(uint64_t *)(pict->data[0] + sx + (sy + 4) *
2032  pict->linesize[0]) ^= 0x8080808080808080ULL;
2033  }
2034  }
2035 
2036  if (IS_INTERLACED(mb_type) &&
2037  s->codec_id == AV_CODEC_ID_H264) {
2038  // hmm
2039  }
2040  }
2041  s->mbskip_table[mb_index] = 0;
2042  }
2043  }
2044  }
2045 }
2046 
2051 {
2052  int my_max = INT_MIN, my_min = INT_MAX, qpel_shift = !s->quarter_sample;
2053  int my, off, i, mvs;
2054 
2055  if (s->picture_structure != PICT_FRAME || s->mcsel)
2056  goto unhandled;
2057 
2058  switch (s->mv_type) {
2059  case MV_TYPE_16X16:
2060  mvs = 1;
2061  break;
2062  case MV_TYPE_16X8:
2063  mvs = 2;
2064  break;
2065  case MV_TYPE_8X8:
2066  mvs = 4;
2067  break;
2068  default:
2069  goto unhandled;
2070  }
2071 
2072  for (i = 0; i < mvs; i++) {
2073  my = s->mv[dir][i][1]<<qpel_shift;
2074  my_max = FFMAX(my_max, my);
2075  my_min = FFMIN(my_min, my);
2076  }
2077 
2078  off = (FFMAX(-my_min, my_max) + 63) >> 6;
2079 
2080  return FFMIN(FFMAX(s->mb_y + off, 0), s->mb_height-1);
2081 unhandled:
2082  return s->mb_height-1;
2083 }
2084 
2085 /* put block[] to dest[] */
2086 static inline void put_dct(MpegEncContext *s,
2087  DCTELEM *block, int i, uint8_t *dest, int line_size, int qscale)
2088 {
2089  s->dct_unquantize_intra(s, block, i, qscale);
2090  s->dsp.idct_put (dest, line_size, block);
2091 }
2092 
2093 /* add block[] to dest[] */
2094 static inline void add_dct(MpegEncContext *s,
2095  DCTELEM *block, int i, uint8_t *dest, int line_size)
2096 {
2097  if (s->block_last_index[i] >= 0) {
2098  s->dsp.idct_add (dest, line_size, block);
2099  }
2100 }
2101 
2102 static inline void add_dequant_dct(MpegEncContext *s,
2103  DCTELEM *block, int i, uint8_t *dest, int line_size, int qscale)
2104 {
2105  if (s->block_last_index[i] >= 0) {
2106  s->dct_unquantize_inter(s, block, i, qscale);
2107 
2108  s->dsp.idct_add (dest, line_size, block);
2109  }
2110 }
2111 
2116 {
2117  int wrap = s->b8_stride;
2118  int xy = s->block_index[0];
2119 
2120  s->dc_val[0][xy ] =
2121  s->dc_val[0][xy + 1 ] =
2122  s->dc_val[0][xy + wrap] =
2123  s->dc_val[0][xy + 1 + wrap] = 1024;
2124  /* ac pred */
2125  memset(s->ac_val[0][xy ], 0, 32 * sizeof(int16_t));
2126  memset(s->ac_val[0][xy + wrap], 0, 32 * sizeof(int16_t));
2127  if (s->msmpeg4_version>=3) {
2128  s->coded_block[xy ] =
2129  s->coded_block[xy + 1 ] =
2130  s->coded_block[xy + wrap] =
2131  s->coded_block[xy + 1 + wrap] = 0;
2132  }
2133  /* chroma */
2134  wrap = s->mb_stride;
2135  xy = s->mb_x + s->mb_y * wrap;
2136  s->dc_val[1][xy] =
2137  s->dc_val[2][xy] = 1024;
2138  /* ac pred */
2139  memset(s->ac_val[1][xy], 0, 16 * sizeof(int16_t));
2140  memset(s->ac_val[2][xy], 0, 16 * sizeof(int16_t));
2141 
2142  s->mbintra_table[xy]= 0;
2143 }
2144 
2145 /* generic function called after a macroblock has been parsed by the
2146  decoder or after it has been encoded by the encoder.
2147 
2148  Important variables used:
2149  s->mb_intra : true if intra macroblock
2150  s->mv_dir : motion vector direction
2151  s->mv_type : motion vector type
2152  s->mv : motion vector
2153  s->interlaced_dct : true if interlaced dct used (mpeg2)
2154  */
2155 static av_always_inline
2157  int is_mpeg12)
2158 {
2159  const int mb_xy = s->mb_y * s->mb_stride + s->mb_x;
2161  ff_xvmc_decode_mb(s);//xvmc uses pblocks
2162  return;
2163  }
2164 
2165  if(s->avctx->debug&FF_DEBUG_DCT_COEFF) {
2166  /* save DCT coefficients */
2167  int i,j;
2168  DCTELEM *dct = &s->current_picture.f.dct_coeff[mb_xy * 64 * 6];
2169  av_log(s->avctx, AV_LOG_DEBUG, "DCT coeffs of MB at %dx%d:\n", s->mb_x, s->mb_y);
2170  for(i=0; i<6; i++){
2171  for(j=0; j<64; j++){
2172  *dct++ = block[i][s->dsp.idct_permutation[j]];
2173  av_log(s->avctx, AV_LOG_DEBUG, "%5d", dct[-1]);
2174  }
2175  av_log(s->avctx, AV_LOG_DEBUG, "\n");
2176  }
2177  }
2178 
2179  s->current_picture.f.qscale_table[mb_xy] = s->qscale;
2180 
2181  /* update DC predictors for P macroblocks */
2182  if (!s->mb_intra) {
2183  if (!is_mpeg12 && (s->h263_pred || s->h263_aic)) {
2184  if(s->mbintra_table[mb_xy])
2186  } else {
2187  s->last_dc[0] =
2188  s->last_dc[1] =
2189  s->last_dc[2] = 128 << s->intra_dc_precision;
2190  }
2191  }
2192  else if (!is_mpeg12 && (s->h263_pred || s->h263_aic))
2193  s->mbintra_table[mb_xy]=1;
2194 
2195  if ((s->flags&CODEC_FLAG_PSNR) || !(s->encoding && (s->intra_only || s->pict_type==AV_PICTURE_TYPE_B) && s->avctx->mb_decision != FF_MB_DECISION_RD)) { //FIXME precalc
2196  uint8_t *dest_y, *dest_cb, *dest_cr;
2197  int dct_linesize, dct_offset;
2198  op_pixels_func (*op_pix)[4];
2199  qpel_mc_func (*op_qpix)[16];
2200  const int linesize = s->current_picture.f.linesize[0]; //not s->linesize as this would be wrong for field pics
2201  const int uvlinesize = s->current_picture.f.linesize[1];
2202  const int readable= s->pict_type != AV_PICTURE_TYPE_B || s->encoding || s->avctx->draw_horiz_band;
2203  const int block_size = 8;
2204 
2205  /* avoid copy if macroblock skipped in last frame too */
2206  /* skip only during decoding as we might trash the buffers during encoding a bit */
2207  if(!s->encoding){
2208  uint8_t *mbskip_ptr = &s->mbskip_table[mb_xy];
2209 
2210  if (s->mb_skipped) {
2211  s->mb_skipped= 0;
2212  assert(s->pict_type!=AV_PICTURE_TYPE_I);
2213  *mbskip_ptr = 1;
2214  } else if(!s->current_picture.f.reference) {
2215  *mbskip_ptr = 1;
2216  } else{
2217  *mbskip_ptr = 0; /* not skipped */
2218  }
2219  }
2220 
2221  dct_linesize = linesize << s->interlaced_dct;
2222  dct_offset = s->interlaced_dct ? linesize : linesize * block_size;
2223 
2224  if(readable){
2225  dest_y= s->dest[0];
2226  dest_cb= s->dest[1];
2227  dest_cr= s->dest[2];
2228  }else{
2229  dest_y = s->b_scratchpad;
2230  dest_cb= s->b_scratchpad+16*linesize;
2231  dest_cr= s->b_scratchpad+32*linesize;
2232  }
2233 
2234  if (!s->mb_intra) {
2235  /* motion handling */
2236  /* decoding or more than one mb_type (MC was already done otherwise) */
2237  if(!s->encoding){
2238 
2240  if (s->mv_dir & MV_DIR_FORWARD) {
2243  0);
2244  }
2245  if (s->mv_dir & MV_DIR_BACKWARD) {
2248  0);
2249  }
2250  }
2251 
2252  op_qpix= s->me.qpel_put;
2253  if ((!s->no_rounding) || s->pict_type==AV_PICTURE_TYPE_B){
2254  op_pix = s->dsp.put_pixels_tab;
2255  }else{
2256  op_pix = s->dsp.put_no_rnd_pixels_tab;
2257  }
2258  if (s->mv_dir & MV_DIR_FORWARD) {
2259  ff_MPV_motion(s, dest_y, dest_cb, dest_cr, 0, s->last_picture.f.data, op_pix, op_qpix);
2260  op_pix = s->dsp.avg_pixels_tab;
2261  op_qpix= s->me.qpel_avg;
2262  }
2263  if (s->mv_dir & MV_DIR_BACKWARD) {
2264  ff_MPV_motion(s, dest_y, dest_cb, dest_cr, 1, s->next_picture.f.data, op_pix, op_qpix);
2265  }
2266  }
2267 
2268  /* skip dequant / idct if we are really late ;) */
2269  if(s->avctx->skip_idct){
2272  || s->avctx->skip_idct >= AVDISCARD_ALL)
2273  goto skip_idct;
2274  }
2275 
2276  /* add dct residue */
2278  || (s->codec_id==AV_CODEC_ID_MPEG4 && !s->mpeg_quant))){
2279  add_dequant_dct(s, block[0], 0, dest_y , dct_linesize, s->qscale);
2280  add_dequant_dct(s, block[1], 1, dest_y + block_size, dct_linesize, s->qscale);
2281  add_dequant_dct(s, block[2], 2, dest_y + dct_offset , dct_linesize, s->qscale);
2282  add_dequant_dct(s, block[3], 3, dest_y + dct_offset + block_size, dct_linesize, s->qscale);
2283 
2284  if(!CONFIG_GRAY || !(s->flags&CODEC_FLAG_GRAY)){
2285  if (s->chroma_y_shift){
2286  add_dequant_dct(s, block[4], 4, dest_cb, uvlinesize, s->chroma_qscale);
2287  add_dequant_dct(s, block[5], 5, dest_cr, uvlinesize, s->chroma_qscale);
2288  }else{
2289  dct_linesize >>= 1;
2290  dct_offset >>=1;
2291  add_dequant_dct(s, block[4], 4, dest_cb, dct_linesize, s->chroma_qscale);
2292  add_dequant_dct(s, block[5], 5, dest_cr, dct_linesize, s->chroma_qscale);
2293  add_dequant_dct(s, block[6], 6, dest_cb + dct_offset, dct_linesize, s->chroma_qscale);
2294  add_dequant_dct(s, block[7], 7, dest_cr + dct_offset, dct_linesize, s->chroma_qscale);
2295  }
2296  }
2297  } else if(is_mpeg12 || (s->codec_id != AV_CODEC_ID_WMV2)){
2298  add_dct(s, block[0], 0, dest_y , dct_linesize);
2299  add_dct(s, block[1], 1, dest_y + block_size, dct_linesize);
2300  add_dct(s, block[2], 2, dest_y + dct_offset , dct_linesize);
2301  add_dct(s, block[3], 3, dest_y + dct_offset + block_size, dct_linesize);
2302 
2303  if(!CONFIG_GRAY || !(s->flags&CODEC_FLAG_GRAY)){
2304  if(s->chroma_y_shift){//Chroma420
2305  add_dct(s, block[4], 4, dest_cb, uvlinesize);
2306  add_dct(s, block[5], 5, dest_cr, uvlinesize);
2307  }else{
2308  //chroma422
2309  dct_linesize = uvlinesize << s->interlaced_dct;
2310  dct_offset = s->interlaced_dct ? uvlinesize : uvlinesize * 8;
2311 
2312  add_dct(s, block[4], 4, dest_cb, dct_linesize);
2313  add_dct(s, block[5], 5, dest_cr, dct_linesize);
2314  add_dct(s, block[6], 6, dest_cb+dct_offset, dct_linesize);
2315  add_dct(s, block[7], 7, dest_cr+dct_offset, dct_linesize);
2316  if(!s->chroma_x_shift){//Chroma444
2317  add_dct(s, block[8], 8, dest_cb+8, dct_linesize);
2318  add_dct(s, block[9], 9, dest_cr+8, dct_linesize);
2319  add_dct(s, block[10], 10, dest_cb+8+dct_offset, dct_linesize);
2320  add_dct(s, block[11], 11, dest_cr+8+dct_offset, dct_linesize);
2321  }
2322  }
2323  }//fi gray
2324  }
2326  ff_wmv2_add_mb(s, block, dest_y, dest_cb, dest_cr);
2327  }
2328  } else {
2329  /* dct only in intra block */
2331  put_dct(s, block[0], 0, dest_y , dct_linesize, s->qscale);
2332  put_dct(s, block[1], 1, dest_y + block_size, dct_linesize, s->qscale);
2333  put_dct(s, block[2], 2, dest_y + dct_offset , dct_linesize, s->qscale);
2334  put_dct(s, block[3], 3, dest_y + dct_offset + block_size, dct_linesize, s->qscale);
2335 
2336  if(!CONFIG_GRAY || !(s->flags&CODEC_FLAG_GRAY)){
2337  if(s->chroma_y_shift){
2338  put_dct(s, block[4], 4, dest_cb, uvlinesize, s->chroma_qscale);
2339  put_dct(s, block[5], 5, dest_cr, uvlinesize, s->chroma_qscale);
2340  }else{
2341  dct_offset >>=1;
2342  dct_linesize >>=1;
2343  put_dct(s, block[4], 4, dest_cb, dct_linesize, s->chroma_qscale);
2344  put_dct(s, block[5], 5, dest_cr, dct_linesize, s->chroma_qscale);
2345  put_dct(s, block[6], 6, dest_cb + dct_offset, dct_linesize, s->chroma_qscale);
2346  put_dct(s, block[7], 7, dest_cr + dct_offset, dct_linesize, s->chroma_qscale);
2347  }
2348  }
2349  }else{
2350  s->dsp.idct_put(dest_y , dct_linesize, block[0]);
2351  s->dsp.idct_put(dest_y + block_size, dct_linesize, block[1]);
2352  s->dsp.idct_put(dest_y + dct_offset , dct_linesize, block[2]);
2353  s->dsp.idct_put(dest_y + dct_offset + block_size, dct_linesize, block[3]);
2354 
2355  if(!CONFIG_GRAY || !(s->flags&CODEC_FLAG_GRAY)){
2356  if(s->chroma_y_shift){
2357  s->dsp.idct_put(dest_cb, uvlinesize, block[4]);
2358  s->dsp.idct_put(dest_cr, uvlinesize, block[5]);
2359  }else{
2360 
2361  dct_linesize = uvlinesize << s->interlaced_dct;
2362  dct_offset = s->interlaced_dct ? uvlinesize : uvlinesize * 8;
2363 
2364  s->dsp.idct_put(dest_cb, dct_linesize, block[4]);
2365  s->dsp.idct_put(dest_cr, dct_linesize, block[5]);
2366  s->dsp.idct_put(dest_cb + dct_offset, dct_linesize, block[6]);
2367  s->dsp.idct_put(dest_cr + dct_offset, dct_linesize, block[7]);
2368  if(!s->chroma_x_shift){//Chroma444
2369  s->dsp.idct_put(dest_cb + 8, dct_linesize, block[8]);
2370  s->dsp.idct_put(dest_cr + 8, dct_linesize, block[9]);
2371  s->dsp.idct_put(dest_cb + 8 + dct_offset, dct_linesize, block[10]);
2372  s->dsp.idct_put(dest_cr + 8 + dct_offset, dct_linesize, block[11]);
2373  }
2374  }
2375  }//gray
2376  }
2377  }
2378 skip_idct:
2379  if(!readable){
2380  s->dsp.put_pixels_tab[0][0](s->dest[0], dest_y , linesize,16);
2381  s->dsp.put_pixels_tab[s->chroma_x_shift][0](s->dest[1], dest_cb, uvlinesize,16 >> s->chroma_y_shift);
2382  s->dsp.put_pixels_tab[s->chroma_x_shift][0](s->dest[2], dest_cr, uvlinesize,16 >> s->chroma_y_shift);
2383  }
2384  }
2385 }
2386 
2388 #if !CONFIG_SMALL
2389  if(s->out_format == FMT_MPEG1) {
2390  MPV_decode_mb_internal(s, block, 1);
2391  } else
2392 #endif
2393  MPV_decode_mb_internal(s, block, 0);
2394 }
2395 
2399 void ff_draw_horiz_band(MpegEncContext *s, int y, int h){
2400  const int field_pic= s->picture_structure != PICT_FRAME;
2401  if(field_pic){
2402  h <<= 1;
2403  y <<= 1;
2404  }
2405 
2406  if (!s->avctx->hwaccel
2408  && s->unrestricted_mv
2410  && !s->intra_only
2411  && !(s->flags&CODEC_FLAG_EMU_EDGE)) {
2413  int sides = 0, edge_h;
2414  int hshift = desc->log2_chroma_w;
2415  int vshift = desc->log2_chroma_h;
2416  if (y==0) sides |= EDGE_TOP;
2417  if (y + h >= s->v_edge_pos) sides |= EDGE_BOTTOM;
2418 
2419  edge_h= FFMIN(h, s->v_edge_pos - y);
2420 
2421  s->dsp.draw_edges(s->current_picture_ptr->f.data[0] + y *s->linesize,
2422  s->linesize, s->h_edge_pos, edge_h,
2423  EDGE_WIDTH, EDGE_WIDTH, sides);
2424  s->dsp.draw_edges(s->current_picture_ptr->f.data[1] + (y>>vshift)*s->uvlinesize,
2425  s->uvlinesize, s->h_edge_pos>>hshift, edge_h>>vshift,
2426  EDGE_WIDTH>>hshift, EDGE_WIDTH>>vshift, sides);
2427  s->dsp.draw_edges(s->current_picture_ptr->f.data[2] + (y>>vshift)*s->uvlinesize,
2428  s->uvlinesize, s->h_edge_pos>>hshift, edge_h>>vshift,
2429  EDGE_WIDTH>>hshift, EDGE_WIDTH>>vshift, sides);
2430  }
2431 
2432  h= FFMIN(h, s->avctx->height - y);
2433 
2434  if(field_pic && s->first_field && !(s->avctx->slice_flags&SLICE_FLAG_ALLOW_FIELD)) return;
2435 
2436  if (s->avctx->draw_horiz_band) {
2437  AVFrame *src;
2438  int offset[AV_NUM_DATA_POINTERS];
2439  int i;
2440 
2442  src = &s->current_picture_ptr->f;
2443  else if(s->last_picture_ptr)
2444  src = &s->last_picture_ptr->f;
2445  else
2446  return;
2447 
2449  for (i = 0; i < AV_NUM_DATA_POINTERS; i++)
2450  offset[i] = 0;
2451  }else{
2452  offset[0]= y * s->linesize;
2453  offset[1]=
2454  offset[2]= (y >> s->chroma_y_shift) * s->uvlinesize;
2455  for (i = 3; i < AV_NUM_DATA_POINTERS; i++)
2456  offset[i] = 0;
2457  }
2458 
2459  emms_c();
2460 
2461  s->avctx->draw_horiz_band(s->avctx, src, offset,
2462  y, s->picture_structure, h);
2463  }
2464 }
2465 
2466 void ff_init_block_index(MpegEncContext *s){ //FIXME maybe rename
2467  const int linesize = s->current_picture.f.linesize[0]; //not s->linesize as this would be wrong for field pics
2468  const int uvlinesize = s->current_picture.f.linesize[1];
2469  const int mb_size= 4;
2470 
2471  s->block_index[0]= s->b8_stride*(s->mb_y*2 ) - 2 + s->mb_x*2;
2472  s->block_index[1]= s->b8_stride*(s->mb_y*2 ) - 1 + s->mb_x*2;
2473  s->block_index[2]= s->b8_stride*(s->mb_y*2 + 1) - 2 + s->mb_x*2;
2474  s->block_index[3]= s->b8_stride*(s->mb_y*2 + 1) - 1 + s->mb_x*2;
2475  s->block_index[4]= s->mb_stride*(s->mb_y + 1) + s->b8_stride*s->mb_height*2 + s->mb_x - 1;
2476  s->block_index[5]= s->mb_stride*(s->mb_y + s->mb_height + 2) + s->b8_stride*s->mb_height*2 + s->mb_x - 1;
2477  //block_index is not used by mpeg2, so it is not affected by chroma_format
2478 
2479  s->dest[0] = s->current_picture.f.data[0] + ((s->mb_x - 1) << mb_size);
2480  s->dest[1] = s->current_picture.f.data[1] + ((s->mb_x - 1) << (mb_size - s->chroma_x_shift));
2481  s->dest[2] = s->current_picture.f.data[2] + ((s->mb_x - 1) << (mb_size - s->chroma_x_shift));
2482 
2484  {
2485  if(s->picture_structure==PICT_FRAME){
2486  s->dest[0] += s->mb_y * linesize << mb_size;
2487  s->dest[1] += s->mb_y * uvlinesize << (mb_size - s->chroma_y_shift);
2488  s->dest[2] += s->mb_y * uvlinesize << (mb_size - s->chroma_y_shift);
2489  }else{
2490  s->dest[0] += (s->mb_y>>1) * linesize << mb_size;
2491  s->dest[1] += (s->mb_y>>1) * uvlinesize << (mb_size - s->chroma_y_shift);
2492  s->dest[2] += (s->mb_y>>1) * uvlinesize << (mb_size - s->chroma_y_shift);
2493  assert((s->mb_y&1) == (s->picture_structure == PICT_BOTTOM_FIELD));
2494  }
2495  }
2496 }
2497 
2499  int i;
2500  MpegEncContext *s = avctx->priv_data;
2501 
2502  if(s==NULL || s->picture==NULL)
2503  return;
2504 
2505  for(i=0; i<s->picture_count; i++){
2506  if (s->picture[i].f.data[0] &&
2507  (s->picture[i].f.type == FF_BUFFER_TYPE_INTERNAL ||
2508  s->picture[i].f.type == FF_BUFFER_TYPE_USER))
2509  free_frame_buffer(s, &s->picture[i]);
2510  }
2512 
2513  s->mb_x= s->mb_y= 0;
2514 
2515  s->parse_context.state= -1;
2517  s->parse_context.overread= 0;
2519  s->parse_context.index= 0;
2520  s->parse_context.last_index= 0;
2521  s->bitstream_buffer_size=0;
2522  s->pp_time=0;
2523 }
2524 
2526  DCTELEM *block, int n, int qscale)
2527 {
2528  int i, level, nCoeffs;
2529  const uint16_t *quant_matrix;
2530 
2531  nCoeffs= s->block_last_index[n];
2532 
2533  if (n < 4)
2534  block[0] = block[0] * s->y_dc_scale;
2535  else
2536  block[0] = block[0] * s->c_dc_scale;
2537  /* XXX: only mpeg1 */
2538  quant_matrix = s->intra_matrix;
2539  for(i=1;i<=nCoeffs;i++) {
2540  int j= s->intra_scantable.permutated[i];
2541  level = block[j];
2542  if (level) {
2543  if (level < 0) {
2544  level = -level;
2545  level = (int)(level * qscale * quant_matrix[j]) >> 3;
2546  level = (level - 1) | 1;
2547  level = -level;
2548  } else {
2549  level = (int)(level * qscale * quant_matrix[j]) >> 3;
2550  level = (level - 1) | 1;
2551  }
2552  block[j] = level;
2553  }
2554  }
2555 }
2556 
2558  DCTELEM *block, int n, int qscale)
2559 {
2560  int i, level, nCoeffs;
2561  const uint16_t *quant_matrix;
2562 
2563  nCoeffs= s->block_last_index[n];
2564 
2565  quant_matrix = s->inter_matrix;
2566  for(i=0; i<=nCoeffs; i++) {
2567  int j= s->intra_scantable.permutated[i];
2568  level = block[j];
2569  if (level) {
2570  if (level < 0) {
2571  level = -level;
2572  level = (((level << 1) + 1) * qscale *
2573  ((int) (quant_matrix[j]))) >> 4;
2574  level = (level - 1) | 1;
2575  level = -level;
2576  } else {
2577  level = (((level << 1) + 1) * qscale *
2578  ((int) (quant_matrix[j]))) >> 4;
2579  level = (level - 1) | 1;
2580  }
2581  block[j] = level;
2582  }
2583  }
2584 }
2585 
2587  DCTELEM *block, int n, int qscale)
2588 {
2589  int i, level, nCoeffs;
2590  const uint16_t *quant_matrix;
2591 
2592  if(s->alternate_scan) nCoeffs= 63;
2593  else nCoeffs= s->block_last_index[n];
2594 
2595  if (n < 4)
2596  block[0] = block[0] * s->y_dc_scale;
2597  else
2598  block[0] = block[0] * s->c_dc_scale;
2599  quant_matrix = s->intra_matrix;
2600  for(i=1;i<=nCoeffs;i++) {
2601  int j= s->intra_scantable.permutated[i];
2602  level = block[j];
2603  if (level) {
2604  if (level < 0) {
2605  level = -level;
2606  level = (int)(level * qscale * quant_matrix[j]) >> 3;
2607  level = -level;
2608  } else {
2609  level = (int)(level * qscale * quant_matrix[j]) >> 3;
2610  }
2611  block[j] = level;
2612  }
2613  }
2614 }
2615 
2617  DCTELEM *block, int n, int qscale)
2618 {
2619  int i, level, nCoeffs;
2620  const uint16_t *quant_matrix;
2621  int sum=-1;
2622 
2623  if(s->alternate_scan) nCoeffs= 63;
2624  else nCoeffs= s->block_last_index[n];
2625 
2626  if (n < 4)
2627  block[0] = block[0] * s->y_dc_scale;
2628  else
2629  block[0] = block[0] * s->c_dc_scale;
2630  quant_matrix = s->intra_matrix;
2631  for(i=1;i<=nCoeffs;i++) {
2632  int j= s->intra_scantable.permutated[i];
2633  level = block[j];
2634  if (level) {
2635  if (level < 0) {
2636  level = -level;
2637  level = (int)(level * qscale * quant_matrix[j]) >> 3;
2638  level = -level;
2639  } else {
2640  level = (int)(level * qscale * quant_matrix[j]) >> 3;
2641  }
2642  block[j] = level;
2643  sum+=level;
2644  }
2645  }
2646  block[63]^=sum&1;
2647 }
2648 
2650  DCTELEM *block, int n, int qscale)
2651 {
2652  int i, level, nCoeffs;
2653  const uint16_t *quant_matrix;
2654  int sum=-1;
2655 
2656  if(s->alternate_scan) nCoeffs= 63;
2657  else nCoeffs= s->block_last_index[n];
2658 
2659  quant_matrix = s->inter_matrix;
2660  for(i=0; i<=nCoeffs; i++) {
2661  int j= s->intra_scantable.permutated[i];
2662  level = block[j];
2663  if (level) {
2664  if (level < 0) {
2665  level = -level;
2666  level = (((level << 1) + 1) * qscale *
2667  ((int) (quant_matrix[j]))) >> 4;
2668  level = -level;
2669  } else {
2670  level = (((level << 1) + 1) * qscale *
2671  ((int) (quant_matrix[j]))) >> 4;
2672  }
2673  block[j] = level;
2674  sum+=level;
2675  }
2676  }
2677  block[63]^=sum&1;
2678 }
2679 
2681  DCTELEM *block, int n, int qscale)
2682 {
2683  int i, level, qmul, qadd;
2684  int nCoeffs;
2685 
2686  assert(s->block_last_index[n]>=0);
2687 
2688  qmul = qscale << 1;
2689 
2690  if (!s->h263_aic) {
2691  if (n < 4)
2692  block[0] = block[0] * s->y_dc_scale;
2693  else
2694  block[0] = block[0] * s->c_dc_scale;
2695  qadd = (qscale - 1) | 1;
2696  }else{
2697  qadd = 0;
2698  }
2699  if(s->ac_pred)
2700  nCoeffs=63;
2701  else
2702  nCoeffs= s->inter_scantable.raster_end[ s->block_last_index[n] ];
2703 
2704  for(i=1; i<=nCoeffs; i++) {
2705  level = block[i];
2706  if (level) {
2707  if (level < 0) {
2708  level = level * qmul - qadd;
2709  } else {
2710  level = level * qmul + qadd;
2711  }
2712  block[i] = level;
2713  }
2714  }
2715 }
2716 
2718  DCTELEM *block, int n, int qscale)
2719 {
2720  int i, level, qmul, qadd;
2721  int nCoeffs;
2722 
2723  assert(s->block_last_index[n]>=0);
2724 
2725  qadd = (qscale - 1) | 1;
2726  qmul = qscale << 1;
2727 
2728  nCoeffs= s->inter_scantable.raster_end[ s->block_last_index[n] ];
2729 
2730  for(i=0; i<=nCoeffs; i++) {
2731  level = block[i];
2732  if (level) {
2733  if (level < 0) {
2734  level = level * qmul - qadd;
2735  } else {
2736  level = level * qmul + qadd;
2737  }
2738  block[i] = level;
2739  }
2740  }
2741 }
2742 
2746 void ff_set_qscale(MpegEncContext * s, int qscale)
2747 {
2748  if (qscale < 1)
2749  qscale = 1;
2750  else if (qscale > 31)
2751  qscale = 31;
2752 
2753  s->qscale = qscale;
2754  s->chroma_qscale= s->chroma_qscale_table[qscale];
2755 
2756  s->y_dc_scale= s->y_dc_scale_table[ qscale ];
2758 }
2759 
2761 {
2764 }
int bitstream_buffer_size
Definition: mpegvideo.h:589
uint8_t * scratchpad
data area for the ME algo, so that the ME does not need to malloc/free
Definition: mpegvideo.h:160
#define PICT_BOTTOM_FIELD
Definition: mpegvideo.h:640
enum AVPixelFormat ff_hwaccel_pixfmt_list_420[]
Definition: mpegvideo.c:133
int last
number of values for last = 0
Definition: rl.h:40
const struct AVCodec * codec
Definition: avcodec.h:1348
int16_t(* b_bidir_back_mv_table_base)[2]
Definition: mpegvideo.h:369
void * av_malloc(size_t size)
Allocate a block of size bytes with alignment suitable for all memory accesses (including vectors if ...
Definition: mem.c:61
int table_size
Definition: get_bits.h:66
#define PICT_TOP_FIELD
Definition: mpegvideo.h:639
discard all frames except keyframes
Definition: avcodec.h:535
void ff_init_block_index(MpegEncContext *s)
Definition: mpegvideo.c:2466
op_pixels_func put_pixels_tab[4][4]
Halfpel motion compensation with rounding (a+b+1)>>1.
Definition: dsputil.h:259
unsigned int stream_codec_tag
fourcc from the AVI stream header (LSB first, so "ABCD" -> ('D'<<24) + ('C'<<16) + ('B'<<8) + ...
Definition: avcodec.h:1373
av_cold void ff_dsputil_init(DSPContext *c, AVCodecContext *avctx)
Definition: dsputil.c:2656
int picture_number
Definition: mpegvideo.h:245
#define MAX_PICTURE_COUNT
Definition: mpegvideo.h:63
ScanTable intra_v_scantable
Definition: mpegvideo.h:268
#define HAVE_THREADS
Definition: config.h:235
S(GMC)-VOP MPEG4.
Definition: avutil.h:248
const uint8_t ff_zigzag_direct[64]
Definition: dsputil.c:59
void ff_release_unused_pictures(MpegEncContext *s, int remove_current)
Definition: mpegvideo.c:1283
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:1435
int time_increment_bits
number of bits to represent the fractional part of time
Definition: mpegvideo.h:533
This structure describes decoded (raw) audio or video data.
Definition: avcodec.h:989
#define IS_SKIP(a)
Definition: mpegvideo.h:110
int qstride
QP store stride.
Definition: avcodec.h:1145
AVPanScan * pan_scan
Pan scan.
Definition: avcodec.h:1260
int ff_alloc_picture(MpegEncContext *s, Picture *pic, int shared)
Allocate a Picture.
Definition: mpegvideo.c:332
int16_t(* p_mv_table)[2]
MV table (1MV per MB) p-frame encoding.
Definition: mpegvideo.h:373
uint8_t * rd_scratchpad
scratchpad for rate distortion mb decision
Definition: mpegvideo.h:338
static void dct_unquantize_mpeg2_intra_c(MpegEncContext *s, DCTELEM *block, int n, int qscale)
Definition: mpegvideo.c:2586
int start_mb_y
start mb_y of this thread (so current thread should process start_mb_y <= row < end_mb_y) ...
Definition: mpegvideo.h:286
const uint8_t * y_dc_scale_table
qscale -> y_dc_scale table
Definition: mpegvideo.h:324
uint8_t * mb_mean
Table for MB luminance.
Definition: mpegvideo.h:145
int coded_width
Bitstream width / height, may be different from width/height.
Definition: avcodec.h:1515
av_cold int ff_dct_common_init(MpegEncContext *s)
Definition: mpegvideo.c:181
void(* dct_unquantize_mpeg2_inter)(struct MpegEncContext *s, DCTELEM *block, int n, int qscale)
Definition: mpegvideo.h:688
av_cold int ff_MPV_common_init(MpegEncContext *s)
init common structure for both encoder and decoder.
Definition: mpegvideo.c:882
void ff_MPV_common_init_arm(MpegEncContext *s)
Definition: mpegvideo_arm.c:42
misc image utilities
void ff_MPV_report_decode_progress(MpegEncContext *s)
Definition: mpegvideo.c:2760
uint8_t * coded_block_base
Definition: mpegvideo.h:327
#define EDGE_TOP
Definition: dsputil.h:441
AVFrame * coded_frame
the picture in the bitstream
Definition: avcodec.h:2725
int end_mb_y
end mb_y of this thread (so current thread should process start_mb_y <= row < end_mb_y) ...
Definition: mpegvideo.h:287
uint16_t * mb_var
Table for MB variances.
Definition: mpegvideo.h:143
static void free_frame_buffer(MpegEncContext *s, Picture *pic)
Release a frame buffer.
Definition: mpegvideo.c:232
void ff_MPV_motion(MpegEncContext *s, uint8_t *dest_y, uint8_t *dest_cb, uint8_t *dest_cr, int dir, uint8_t **ref_picture, op_pixels_func(*pix_op)[4], qpel_mc_func(*qpix_op)[16])
int16_t(*[3] ac_val)[16]
used for for mpeg4 AC prediction, all 3 arrays must be continuous
Definition: mpegvideo.h:330
MJPEG encoder.
void(* idct_add)(uint8_t *dest, int line_size, DCTELEM *block)
block -> idct -> add dest -> clip to unsigned 8 bit -> dest.
Definition: dsputil.h:411
void * hwaccel_picture_private
hardware accelerator private data (Libav-allocated)
Definition: avcodec.h:1280
int v_edge_pos
horizontal / vertical position of the right/bottom edge (pixel replication)
Definition: mpegvideo.h:251
int msmpeg4_version
0=not msmpeg4, 1=mp41, 2=mp42, 3=mp43/divx3 4=wmv1/7 5=wmv2/8
Definition: mpegvideo.h:616
void(* draw_edges)(uint8_t *buf, int wrap, int width, int height, int w, int h, int sides)
Definition: dsputil.h:439
static const uint8_t mpeg2_dc_scale_table3[128]
Definition: mpegvideo.c:109
void ff_xvmc_field_end(MpegEncContext *s)
Complete frame/field rendering by passing any remaining blocks.
int needs_realloc
Picture needs to be reallocated (eg due to a frame size change)
Definition: mpegvideo.h:149
uint8_t * bitstream_buffer
Definition: mpegvideo.h:588
enum AVCodecID codec_id
Definition: mpegvideo.h:227
void(* dct_unquantize_h263_inter)(struct MpegEncContext *s, DCTELEM *block, int n, int qscale)
Definition: mpegvideo.h:692
void ff_init_rl(RLTable *rl, uint8_t static_store[2][2 *MAX_RUN+MAX_LEVEL+3])
Definition: mpegvideo.c:1191
int16_t(*[2] motion_val_base)[2]
Definition: mpegvideo.h:102
HW decoding through VA API, Picture.data[3] contains a vaapi_render_state struct which contains the b...
Definition: pixfmt.h:123
int field_picture
whether or not the picture was encoded in separate fields
Definition: mpegvideo.h:139
void(* dct_unquantize_mpeg2_intra)(struct MpegEncContext *s, DCTELEM *block, int n, int qscale)
Definition: mpegvideo.h:686
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:1533
int16_t(*[2][2] p_field_mv_table)[2]
MV table (2MV per MB) interlaced p-frame encoding.
Definition: mpegvideo.h:379
int picture_range_end
the part of picture that this context can allocate in
Definition: mpegvideo.h:319
static void dct_unquantize_h263_inter_c(MpegEncContext *s, DCTELEM *block, int n, int qscale)
Definition: mpegvideo.c:2717
int16_t(* p_mv_table_base)[2]
Definition: mpegvideo.h:365
uint8_t raster_end[64]
Definition: dsputil.h:184
#define wrap(func)
Definition: w64xmmtest.h:70
uint32_t * score_map
map to store the scores
Definition: mpegvideo.h:166
mpegvideo header.
av_dlog(ac->avr,"%d samples - audio_convert: %s to %s (%s)\n", len, av_get_sample_fmt_name(ac->in_fmt), av_get_sample_fmt_name(ac->out_fmt), use_generic?ac->func_descr_generic:ac->func_descr)
discard all
Definition: avcodec.h:536
uint8_t permutated[64]
Definition: dsputil.h:183
#define IS_INTRA4x4(a)
Definition: mpegvideo.h:105
const int8_t * table_level
Definition: rl.h:43
uint8_t run
Definition: svq3.c:124
static void free_duplicate_context(MpegEncContext *s)
Definition: mpegvideo.c:500
int bits_per_raw_sample
Bits per sample/pixel of internal libavcodec pixel/sample format.
Definition: avcodec.h:2711
int mb_num
number of MBs of a picture
Definition: mpegvideo.h:252
int stride
Definition: mace.c:144
int frame_start_found
Definition: parser.h:34
int ff_xvmc_field_start(MpegEncContext *s, AVCodecContext *avctx)
Find and store the surfaces that are used as reference frames.
static void free_picture(MpegEncContext *s, Picture *pic)
Deallocate a picture.
Definition: mpegvideo.c:421
int qscale
QP.
Definition: mpegvideo.h:342
RLTable.
Definition: rl.h:38
int h263_aic
Advanded INTRA Coding (AIC)
Definition: mpegvideo.h:262
int16_t(* b_back_mv_table)[2]
MV table (1MV per MB) backward mode b-frame encoding.
Definition: mpegvideo.h:375
uint8_t log2_chroma_w
Amount to shift the luma width right to find the chroma width.
Definition: pixdesc.h:66
int chroma_x_shift
Definition: mpegvideo.h:656
int encoding
true if we are encoding (vs decoding)
Definition: mpegvideo.h:229
void ff_MPV_common_init_bfin(MpegEncContext *s)
int block_wrap[6]
Definition: mpegvideo.h:434
static void dct_unquantize_mpeg1_intra_c(MpegEncContext *s, DCTELEM *block, int n, int qscale)
Definition: mpegvideo.c:2525
int16_t(* b_back_mv_table_base)[2]
Definition: mpegvideo.h:367
static void backup_duplicate_context(MpegEncContext *bak, MpegEncContext *src)
Definition: mpegvideo.c:520
#define USES_LIST(a, list)
does this mb use listX, note does not work if subMBs
Definition: mpegvideo.h:126
void ff_clean_intra_table_entries(MpegEncContext *s)
Clean dc, ac, coded_block for the current non-intra MB.
Definition: mpegvideo.c:2115
int picture_range_start
Definition: mpegvideo.h:319
#define COLOR(theta, r)
void av_freep(void *arg)
Free a memory block which has been allocated with av_malloc(z)() or av_realloc() and set the pointer ...
Definition: mem.c:151
#define CONFIG_GRAY
Definition: config.h:276
Switching Intra.
Definition: avutil.h:249
#define MAX_THREADS
Definition: mpegvideo.h:61
uint8_t * visualization_buffer[3]
temporary buffer vor MV visualization
Definition: mpegvideo.h:320
struct AVHWAccel * hwaccel
Hardware accelerator in use.
Definition: avcodec.h:2622
#define CONFIG_WMV2_DECODER
Definition: config.h:524
int picture_in_gop_number
0-> first pic in gop, ...
Definition: mpegvideo.h:246
void av_fast_malloc(void *ptr, unsigned int *size, size_t min_size)
Allocate a buffer, reusing the given one if large enough.
Definition: utils.c:72
int8_t * max_run[2]
encoding & decoding
Definition: rl.h:46
void ff_MPV_common_init_altivec(MpegEncContext *s)
int context_reinit
Definition: mpegvideo.h:717
const uint8_t ff_alternate_vertical_scan[64]
Definition: dsputil.c:97
int16_t * dc_val_base
Definition: mpegvideo.h:322
#define CONFIG_WMV2_ENCODER
Definition: config.h:884
int ff_MPV_common_frame_size_change(MpegEncContext *s)
Definition: mpegvideo.c:1069
uint8_t
DCTELEM(*[12] pblocks)[64]
Definition: mpegvideo.h:672
#define IS_8X16(a)
Definition: mpegvideo.h:117
Picture ** input_picture
next pictures on display order for encoding
Definition: mpegvideo.h:256
#define PICT_FRAME
Definition: mpegvideo.h:641
enum OutputFormat out_format
output format
Definition: mpegvideo.h:219
void(* qpel_mc_func)(uint8_t *dst, uint8_t *src, int stride)
Definition: dsputil.h:144
uint16_t(* dct_offset)[64]
Definition: mpegvideo.h:469
#define AV_RB32
Definition: intreadwrite.h:130
uint8_t * pred_dir_table
used to store pred_dir for partitioned decoding
Definition: mpegvideo.h:336
uint8_t * er_temp_buffer
Definition: mpegvideo.h:710
qpel_mc_func(* qpel_put)[16]
Definition: mpegvideo.h:198
static void dct_unquantize_h263_intra_c(MpegEncContext *s, DCTELEM *block, int n, int qscale)
Definition: mpegvideo.c:2680
#define emms_c()
Definition: internal.h:145
uint8_t motion_subsample_log2
log2 of the size of the block which a single vector in motion_val represents: (4->16x16, 3->8x8, 2-> 4x4, 1-> 2x2)
Definition: avcodec.h:1302
#define IS_GMC(a)
Definition: mpegvideo.h:114
int no_rounding
apply no rounding to motion compensation (MPEG4, msmpeg4, ...) for b-frames rounding mode is always 0...
Definition: mpegvideo.h:407
int interlaced_dct
Definition: mpegvideo.h:661
Picture current_picture
copy of the current picture structure.
Definition: mpegvideo.h:313
int intra_dc_precision
Definition: mpegvideo.h:643
static int pic_is_unused(MpegEncContext *s, Picture *pic)
Definition: mpegvideo.c:1298
op_pixels_func avg_pixels_tab[4][4]
Halfpel motion compensation with rounding (a+b+1)>>1.
Definition: dsputil.h:271
int16_t(* b_bidir_forw_mv_table)[2]
MV table (1MV per MB) bidir mode b-frame encoding.
Definition: mpegvideo.h:376
void(* dct_unquantize_intra)(struct MpegEncContext *s, DCTELEM *block, int n, int qscale)
Definition: mpegvideo.h:698
void(* dct_unquantize_inter)(struct MpegEncContext *s, DCTELEM *block, int n, int qscale)
Definition: mpegvideo.h:700
float * cplx_tab
Definition: mpegvideo.h:713
int8_t * max_level[2]
encoding & decoding
Definition: rl.h:45
uint16_t pp_time
time distance between the last 2 p,s,i frames
Definition: mpegvideo.h:538
uint8_t idct_permutation[64]
idct input permutation.
Definition: dsputil.h:425
uint8_t * b_scratchpad
scratchpad used for writing into write only buffers
Definition: mpegvideo.h:340
int flags2
AVCodecContext.flags2.
Definition: mpegvideo.h:231
int interlaced_frame
The content of the picture is interlaced.
Definition: avcodec.h:1232
int mb_height
number of MBs horizontally & vertically
Definition: mpegvideo.h:247
enum AVPixelFormat ff_pixfmt_list_420[]
Definition: mpegvideo.c:128
void ff_MPV_frame_end(MpegEncContext *s)
Definition: mpegvideo.c:1581
int codec_tag
internal codec_tag upper case converted from avctx codec_tag
Definition: mpegvideo.h:237
char * stats_out
pass1 encoding statistics output buffer
Definition: avcodec.h:2502
void(* dct_unquantize_mpeg1_intra)(struct MpegEncContext *s, DCTELEM *block, int n, int qscale)
Definition: mpegvideo.h:682
int16_t(*[2][2] p_field_mv_table_base)[2]
Definition: mpegvideo.h:371
static int free_context_frame(MpegEncContext *s)
Frees and resets MpegEncContext fields depending on the resolution.
Definition: mpegvideo.c:1016
static void update_noise_reduction(MpegEncContext *s)
Definition: mpegvideo.c:1345
#define MAX_LEVEL
Definition: rl.h:35
#define IS_INTERLACED(a)
Definition: mpegvideo.h:112
void ff_set_qscale(MpegEncContext *s, int qscale)
set qscale and update qscale dependent variables.
Definition: mpegvideo.c:2746
int(* q_inter_matrix)[64]
Definition: mpegvideo.h:461
#define r
Definition: input.c:51
void ff_xvmc_decode_mb(MpegEncContext *s)
Synthesize the data needed by XvMC to render one macroblock of data.
uint8_t * error_status_table
table of the error status of each MB
Definition: mpegvideo.h:493
int(* q_intra_matrix)[64]
precomputed matrix (combine qscale and DCT renorm)
Definition: mpegvideo.h:460
int intra_only
if true, only intra pictures are generated
Definition: mpegvideo.h:217
int16_t * dc_val[3]
used for mpeg4 DC prediction, all 3 arrays must be continuous
Definition: mpegvideo.h:323
int h263_plus
h263 plus headers
Definition: mpegvideo.h:224
int slice_context_count
number of used thread_contexts
Definition: mpegvideo.h:289
int last_non_b_pict_type
used for mpeg4 gmc b-frames & ratecontrol
Definition: mpegvideo.h:351
unsigned int buffer_size
Definition: parser.h:32
int stream_codec_tag
internal stream_codec_tag upper case converted from avctx stream_codec_tag
Definition: mpegvideo.h:238
int last_dc[3]
last DC values for MPEG1
Definition: mpegvideo.h:321
uint8_t log2_chroma_h
Amount to shift the luma height right to find the chroma height.
Definition: pixdesc.h:75
Multithreading support functions.
int mb_skipped
MUST BE SET only during DECODING.
Definition: mpegvideo.h:331
int reference
is this picture used as reference The values for this are the same as the MpegEncContext.picture_structure variable, that is 1->top field, 2->bottom field, 3->frame/both fields.
Definition: avcodec.h:1132
int chroma_y_shift
Definition: mpegvideo.h:657
static int find_unused_picture(MpegEncContext *s, int shared)
Definition: mpegvideo.c:1308
int partitioned_frame
is current frame partitioned
Definition: mpegvideo.h:567
int is_copy
Whether the parent AVCodecContext is a copy of the context which had init() called on it...
Definition: internal.h:63
short * dct_coeff
DCT coefficients.
Definition: avcodec.h:1187
const uint8_t ff_alternate_horizontal_scan[64]
Definition: dsputil.c:86
void(* idct_put)(uint8_t *dest, int line_size, DCTELEM *block)
block -> idct -> clip to unsigned 8 bit -> dest.
Definition: dsputil.h:405
int av_pix_fmt_get_chroma_sub_sample(enum AVPixelFormat pix_fmt, int *h_shift, int *v_shift)
Utility function to access log2_chroma_w log2_chroma_h from the pixel format AVPixFmtDescriptor.
Definition: pixdesc.c:1460
int unrestricted_mv
mv can point outside of the coded picture
Definition: mpegvideo.h:358
int active_thread_type
Which multithreading methods are in use by the codec.
Definition: avcodec.h:2752
int last_lambda_for[5]
last lambda for a specific pict type
Definition: mpegvideo.h:354
int capabilities
Codec capabilities.
Definition: avcodec.h:2979
uint8_t * edge_emu_buffer
temporary buffer for if MVs point to out-of-frame data
Definition: mpegvideo.h:337
uint8_t * base[AV_NUM_DATA_POINTERS]
pointer to the first allocated byte of the picture.
Definition: avcodec.h:1073
#define REBASE_PICTURE(pic, new_ctx, old_ctx)
Definition: mpegvideo.h:720
int flags
CODEC_FLAG_*.
Definition: avcodec.h:1434
static enum AVDiscard skip_idct
Definition: avplay.c:258
void ff_videodsp_init(VideoDSPContext *ctx, int bpc)
Definition: videodsp.c:36
int overread_index
the index into ParseContext.buffer of the overread bytes
Definition: parser.h:36
void av_log(void *avcl, int level, const char *fmt,...)
Definition: log.c:146
int quarter_sample
1->qpel, 0->half pel ME/MC
Definition: mpegvideo.h:557
uint16_t * mb_type
Table for candidate MB types for encoding.
Definition: mpegvideo.h:414
#define IS_INTRA(a)
Definition: mpegvideo.h:108
static void draw_line(uint8_t *buf, int sx, int sy, int ex, int ey, int w, int h, int stride, int color)
Draw a line from (ex, ey) -> (sx, sy).
Definition: mpegvideo.c:1659
int low_delay
no reordering needed / has no b-frames
Definition: mpegvideo.h:570
op_pixels_func put_no_rnd_pixels_tab[4][4]
Halfpel motion compensation with no rounding (a+b)>>1.
Definition: dsputil.h:283
static void dct_unquantize_mpeg2_intra_bitexact(MpegEncContext *s, DCTELEM *block, int n, int qscale)
Definition: mpegvideo.c:2616
VLC vlc
decoding only deprecated FIXME remove
Definition: rl.h:47
uint8_t *[2][2] b_field_select_table
Definition: mpegvideo.h:382
int error_occurred
Definition: mpegvideo.h:492
int8_t len
Definition: get_bits.h:71
int priv_data_size
Size of HW accelerator private data.
Definition: avcodec.h:3139
int off
Definition: dsputil_bfin.c:28
DCTELEM(* blocks)[8][64]
Definition: mpegvideo.h:675
int picture_count
number of allocated pictures (MAX_PICTURE_COUNT * avctx->thread_count)
Definition: mpegvideo.h:318
static const uint8_t ff_default_chroma_qscale_table[32]
Definition: mpegvideo.c:67
void ff_mpeg_flush(AVCodecContext *avctx)
Definition: mpegvideo.c:2498
int coded_picture_number
used to set pic->coded_picture_number, should not be used for/by anything else
Definition: mpegvideo.h:244
int * lambda_table
Definition: mpegvideo.h:346
int n
number of entries of table_vlc minus 1
Definition: rl.h:39
#define IS_8X8(a)
Definition: mpegvideo.h:118
int av_image_check_size(unsigned int w, unsigned int h, int log_offset, void *log_ctx)
Check if the given dimension of an image is valid, meaning that all bytes of the image can be address...
Definition: imgutils.c:220
int err_recognition
Definition: mpegvideo.h:510
void(* dct_unquantize_mpeg1_inter)(struct MpegEncContext *s, DCTELEM *block, int n, int qscale)
Definition: mpegvideo.h:684
void(* draw_horiz_band)(struct AVCodecContext *s, const AVFrame *src, int offset[AV_NUM_DATA_POINTERS], int y, int type, int height)
If non NULL, 'draw_horiz_band' is called by the libavcodec decoder to draw a horizontal band...
Definition: avcodec.h:1567
int progressive_frame
Definition: mpegvideo.h:659
static DCTELEM block[64]
Definition: dct-test.c:169
enum AVPictureType pict_type
Picture type of the frame, see ?_TYPE below.
Definition: avcodec.h:1065
int top_field_first
Definition: mpegvideo.h:645
int err_recognition
Error recognition; may misdetect some more or less valid parts as errors.
Definition: avcodec.h:2602
int overread
the number of bytes which where irreversibly read from the next frame
Definition: parser.h:35
uint16_t(* q_inter_matrix16)[2][64]
Definition: mpegvideo.h:464
int last_index
Definition: parser.h:31
int next_p_frame_damaged
set if the next p frame is damaged, to avoid showing trashed b frames
Definition: mpegvideo.h:509
int width
picture width / height.
Definition: avcodec.h:1508
uint8_t * mbskip_table
used to avoid copy if macroblock skipped (for black regions for example) and used for b-frame encodin...
Definition: mpegvideo.h:332
int type
type of the buffer (to keep track of who has to deallocate data[*])
Definition: avcodec.h:1217
Picture * current_picture_ptr
pointer to the current picture
Definition: mpegvideo.h:317
void ff_copy_picture(Picture *dst, Picture *src)
Definition: mpegvideo.c:223
Picture.
Definition: mpegvideo.h:94
int alternate_scan
Definition: mpegvideo.h:649
unsigned int allocated_bitstream_buffer_size
Definition: mpegvideo.h:590
int16_t(* ac_val_base)[16]
Definition: mpegvideo.h:329
int32_t
uint16_t(* q_intra_matrix16)[2][64]
identical to the above but for MMX & these are not permutated, second 64 entries are bias ...
Definition: mpegvideo.h:463
const int8_t * table_run
Definition: rl.h:42
static av_always_inline void MPV_decode_mb_internal(MpegEncContext *s, DCTELEM block[12][64], int is_mpeg12)
Definition: mpegvideo.c:2156
int16_t(*[2][2][2] b_field_mv_table_base)[2]
Definition: mpegvideo.h:372
int quality
quality (between 1 (good) and FF_LAMBDA_MAX (bad))
Definition: avcodec.h:1122
int(* ac_stats)[2][MAX_LEVEL+1][MAX_RUN+1][2]
[mb_intra][isChroma][level][run][last]
Definition: mpegvideo.h:621
int16_t(* b_forw_mv_table_base)[2]
Definition: mpegvideo.h:366
int block_last_index[12]
last non zero coefficient in block
Definition: mpegvideo.h:261
MotionEstContext me
Definition: mpegvideo.h:405
int ff_mpv_frame_size_alloc(MpegEncContext *s, int linesize)
Definition: mpegvideo.c:245
#define EDGE_BOTTOM
Definition: dsputil.h:442
int mb_decision
macroblock decision mode
Definition: avcodec.h:1882
uint8_t * mbintra_table
used to avoid setting {ac, dc, cbp}-pred stuff to zero on inter MB decoding
Definition: mpegvideo.h:334
#define ME_MAP_SIZE
Definition: mpegvideo.h:65
#define DELAYED_PIC_REF
Value of Picture.reference when Picture is not a reference picture, but is held for delayed output...
Definition: mpegvideo.h:87
int ff_MPV_frame_start(MpegEncContext *s, AVCodecContext *avctx)
generic function for encode/decode called after coding/decoding the header and before a frame is code...
Definition: mpegvideo.c:1370
int ff_mpeg_update_thread_context(AVCodecContext *dst, const AVCodecContext *src)
Definition: mpegvideo.c:570
RL_VLC_ELEM * rl_vlc[32]
decoding only
Definition: rl.h:48
preferred ID for MPEG-1/2 video decoding
Definition: avcodec.h:100
int thread_count
thread count is used to decide how many independent tasks should be passed to execute() ...
Definition: avcodec.h:2733
int block_index[6]
index to current MB in block based arrays with edges
Definition: mpegvideo.h:433
#define IS_16X8(a)
Definition: mpegvideo.h:116
int xvmc_acceleration
XVideo Motion Acceleration.
Definition: avcodec.h:1875
int * mb_index2xy
mb_index -> mb_x + mb_y*mb_stride
Definition: mpegvideo.h:437
int first_field
is 1 for the first field of a field picture 0 otherwise
Definition: mpegvideo.h:663
void(* op_pixels_func)(uint8_t *block, const uint8_t *pixels, int line_size, int h)
Definition: dsputil.h:142
static const int8_t mv[256][2]
Definition: 4xm.c:73
uint32_t * mb_type
macroblock type table mb_type_base + mb_width + 2
Definition: avcodec.h:1180
#define MV_TYPE_16X16
1 vector for the whole mb
Definition: mpegvideo.h:389
NULL
Definition: eval.c:52
uint16_t * mc_mb_var
Table for motion compensated MB variances.
Definition: mpegvideo.h:144
#define MV_DIR_BACKWARD
Definition: mpegvideo.h:386
int16_t(* b_bidir_forw_mv_table_base)[2]
Definition: mpegvideo.h:368
const uint8_t *const ff_mpeg2_dc_scale_table[4]
Definition: mpegvideo.c:121
static int width
Definition: utils.c:156
int coded_picture_number
picture number in bitstream order
Definition: avcodec.h:1109
const uint8_t * avpriv_mpv_find_start_code(const uint8_t *restrict p, const uint8_t *end, uint32_t *restrict state)
Definition: mpegvideo.c:147
uint16_t inter_matrix[64]
Definition: mpegvideo.h:442
uint8_t * buffer
Definition: parser.h:29
struct MpegEncContext * thread_context[MAX_THREADS]
Definition: mpegvideo.h:288
external API header
void ff_thread_await_progress(AVFrame *f, int n, int field)
Wait for earlier decoding threads to finish reference pictures.
Definition: pthread.c:684
int8_t * qscale_table_base
Definition: mpegvideo.h:101
static av_const unsigned int ff_sqrt(unsigned int a)
Definition: mathops.h:198
static void dct_unquantize_mpeg2_inter_c(MpegEncContext *s, DCTELEM *block, int n, int qscale)
Definition: mpegvideo.c:2649
enum AVDiscard skip_idct
Definition: avcodec.h:2900
uint32_t * mb_type_base
Definition: mpegvideo.h:103
int linesize[AV_NUM_DATA_POINTERS]
Size, in bytes, of the data for each picture/channel plane.
Definition: avcodec.h:1008
int debug
debug
Definition: avcodec.h:2568
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:55
main external API structure.
Definition: avcodec.h:1339
ScanTable intra_scantable
Definition: mpegvideo.h:266
uint8_t * coded_block
used for coded block pattern prediction (msmpeg4v3, wmv1)
Definition: mpegvideo.h:328
int height
picture size. must be a multiple of 16
Definition: mpegvideo.h:215
unsigned int codec_tag
fourcc (LSB first, so "ABCD" -> ('D'<<24) + ('C'<<16) + ('B'<<8) + 'A').
Definition: avcodec.h:1365
const uint8_t ff_mpeg1_dc_scale_table[128]
Definition: mpegvideo.c:73
int16_t(*[2] motion_val)[2]
motion vector table
Definition: avcodec.h:1172
#define MV_TYPE_16X8
2 vectors, one per 16x8 block
Definition: mpegvideo.h:391
void avcodec_default_free_buffers(AVCodecContext *s)
Definition: utils.c:1754
void avcodec_default_release_buffer(AVCodecContext *s, AVFrame *pic)
Definition: utils.c:487
uint32_t state
contains the last few bytes in MSB order
Definition: parser.h:33
Picture * picture
main picture buffer
Definition: mpegvideo.h:255
int progressive_sequence
Definition: mpegvideo.h:635
int slice_flags
slice flags
Definition: avcodec.h:1865
void avcodec_get_frame_defaults(AVFrame *frame)
Set the fields of the given AVFrame to default values.
Definition: utils.c:602
int coded_height
Definition: avcodec.h:1515
Switching Predicted.
Definition: avutil.h:250
ScanTable intra_h_scantable
Definition: mpegvideo.h:267
int16_t(*[2][2][2] b_field_mv_table)[2]
MV table (4MV per MB) interlaced b-frame encoding.
Definition: mpegvideo.h:380
uint8_t * cbp_table
used to store cbp, ac_pred for partitioned decoding
Definition: mpegvideo.h:335
void ff_draw_horiz_band(MpegEncContext *s, int y, int h)
Definition: mpegvideo.c:2399
unsigned int avpriv_toupper4(unsigned int x)
Definition: utils.c:2095
uint8_t * index_run[2]
encoding only
Definition: rl.h:44
int context_initialized
Definition: mpegvideo.h:242
int input_picture_number
used to set pic->display_picture_number, should not be used for/by anything else
Definition: mpegvideo.h:243
void ff_MPV_common_init_x86(MpegEncContext *s)
Definition: mpegvideo.c:587
int8_t * ref_index[2]
motion reference frame index the order in which these are stored can depend on the codec...
Definition: avcodec.h:1195
DSPContext dsp
pointers for accelerated dsp functions
Definition: mpegvideo.h:361
#define s1
Definition: regdef.h:38
int f_code
forward MV resolution
Definition: mpegvideo.h:363
#define COPY(a)
short DCTELEM
Definition: dsputil.h:39
HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer. ...
Definition: pixfmt.h:132
#define MV_DIR_FORWARD
Definition: mpegvideo.h:385
int max_b_frames
max number of b-frames for encoding
Definition: mpegvideo.h:232
int pict_type
AV_PICTURE_TYPE_I, AV_PICTURE_TYPE_P, AV_PICTURE_TYPE_B, ...
Definition: mpegvideo.h:349
DCTELEM(* block)[64]
points to one of the following blocks
Definition: mpegvideo.h:674
int h263_pred
use mpeg4/h263 ac/dc predictions
Definition: mpegvideo.h:220
int16_t(* b_bidir_back_mv_table)[2]
MV table (1MV per MB) bidir mode b-frame encoding.
Definition: mpegvideo.h:377
static int init_context_frame(MpegEncContext *s)
Initialize and allocates MpegEncContext fields dependent on the resolution.
Definition: mpegvideo.c:726
static uint32_t state
Definition: trasher.c:27
uint8_t *[2] p_field_select_table
Definition: mpegvideo.h:381
int16_t(* b_direct_mv_table)[2]
MV table (1MV per MB) direct mode b-frame encoding.
Definition: mpegvideo.h:378
Pan Scan area.
Definition: avcodec.h:788
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: avcodec.h:997
const uint8_t * c_dc_scale_table
qscale -> c_dc_scale table
Definition: mpegvideo.h:325
static void add_dequant_dct(MpegEncContext *s, DCTELEM *block, int i, uint8_t *dest, int line_size, int qscale)
Definition: mpegvideo.c:2102
int8_t * qscale_table
QP table.
Definition: avcodec.h:1139
uint8_t level
Definition: svq3.c:125
#define IS_INTRA16x16(a)
Definition: mpegvideo.h:106
qpel_mc_func(* qpel_avg)[16]
Definition: mpegvideo.h:199
int mv[2][4][2]
motion vectors for a macroblock first coordinate : 0 = forward 1 = backward second " : depend...
Definition: mpegvideo.h:399
int16_t(* b_forw_mv_table)[2]
MV table (1MV per MB) forward mode b-frame encoding.
Definition: mpegvideo.h:374
int b8_stride
2*mb_width+1 used for some 8x8 block arrays to allow simple addressing
Definition: mpegvideo.h:249
int noise_reduction
noise reduction strength
Definition: avcodec.h:1914
#define IS_ACPRED(a)
Definition: mpegvideo.h:123
struct MpegEncContext * owner2
pointer to the MpegEncContext that allocated this picture
Definition: mpegvideo.h:148
int height
Definition: gxfenc.c:72
MpegEncContext.
Definition: mpegvideo.h:211
uint8_t run
Definition: get_bits.h:72
Picture * next_picture_ptr
pointer to the next picture (for bidir pred)
Definition: mpegvideo.h:316
#define MAX_RUN
Definition: rl.h:34
struct AVCodecContext * avctx
Definition: mpegvideo.h:213
static void draw_arrow(uint8_t *buf, int sx, int sy, int ex, int ey, int w, int h, int stride, int color)
Draw an arrow from (ex, ey) -> (sx, sy).
Definition: mpegvideo.c:1712
hardware decoding through VDA
Definition: pixfmt.h:153
discard all non reference
Definition: avcodec.h:533
int(* dct_error_sum)[64]
Definition: mpegvideo.h:467
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:65
void ff_init_vlc_rl(RLTable *rl)
Definition: mpegvideo.c:1243
common internal api header.
int mb_stride
mb_width+1 used for some arrays to allow simple addressing of left & top MBs without sig11 ...
Definition: mpegvideo.h:248
void ff_MPV_common_defaults(MpegEncContext *s)
Set the given MpegEncContext to common defaults (same for encoding and decoding). ...
Definition: mpegvideo.c:689
#define IS_PCM(a)
Definition: mpegvideo.h:107
uint8_t * dest[3]
Definition: mpegvideo.h:435
#define FF_ALLOC_OR_GOTO(ctx, p, size, label)
Definition: internal.h:60
int last_pict_type
Definition: mpegvideo.h:350
int b4_stride
4*mb_width+1 used for some 4x4 block arrays to allow simple addressing
Definition: mpegvideo.h:250
Picture last_picture
copy of the previous picture structure.
Definition: mpegvideo.h:295
uint8_t * obmc_scratchpad
Definition: mpegvideo.h:339
static int alloc_frame_buffer(MpegEncContext *s, Picture *pic)
Allocate a frame buffer.
Definition: mpegvideo.c:273
Picture * last_picture_ptr
pointer to the previous picture.
Definition: mpegvideo.h:315
Bi-dir predicted.
Definition: avutil.h:247
int index
Definition: parser.h:30
const uint8_t * chroma_qscale_table
qscale -> chroma_qscale (h263)
Definition: mpegvideo.h:326
static const uint8_t color[]
Definition: log.c:52
uint32_t * map
map to avoid duplicate evaluations
Definition: mpegvideo.h:165
int ff_update_duplicate_context(MpegEncContext *dst, MpegEncContext *src)
Definition: mpegvideo.c:547
DSP utils.
int slices
Number of slices.
Definition: avcodec.h:2095
void * priv_data
Definition: avcodec.h:1382
int picture_structure
Definition: mpegvideo.h:637
VideoDSPContext vdsp
Definition: mpegvideo.h:362
void ff_wmv2_add_mb(MpegEncContext *s, DCTELEM block[6][64], uint8_t *dest_y, uint8_t *dest_cb, uint8_t *dest_cr)
Definition: wmv2.c:59
void ff_MPV_decode_mb(MpegEncContext *s, DCTELEM block[12][64])
Definition: mpegvideo.c:2387
int top_field_first
If the content is interlaced, is top field displayed first.
Definition: avcodec.h:1239
#define IS_DIRECT(a)
Definition: mpegvideo.h:113
int len
void ff_MPV_common_end(MpegEncContext *s)
Definition: mpegvideo.c:1141
void ff_print_debug_info(MpegEncContext *s, AVFrame *pict)
Print debugging info for the given picture.
Definition: mpegvideo.c:1743
void ff_init_scantable(uint8_t *permutation, ScanTable *st, const uint8_t *src_scantable)
Definition: dsputil.c:122
struct AVCodecInternal * internal
Private context used for internal data.
Definition: avcodec.h:1390
int ff_MPV_lowest_referenced_row(MpegEncContext *s, int dir)
find the lowest MB row referenced in the MVs
Definition: mpegvideo.c:2050
ParseContext parse_context
Definition: mpegvideo.h:512
VLC_TYPE(* table)[2]
code, bits
Definition: get_bits.h:65
Picture next_picture
copy of the next picture structure.
Definition: mpegvideo.h:301
#define EDGE_WIDTH
Definition: dsputil.h:440
int key_frame
1 -> keyframe, 0-> not
Definition: avcodec.h:1058
static const uint8_t mpeg2_dc_scale_table1[128]
Definition: mpegvideo.c:85
int linesize
line size, in bytes, may be different from width
Definition: mpegvideo.h:253
uint8_t * mbskip_table
mbskip_table[mb]>=1 if MB didn't change stride= mb_width = (width+15)>>4
Definition: avcodec.h:1158
int16_t level
Definition: get_bits.h:70
#define IS_16X16(a)
Definition: mpegvideo.h:115
Picture ** reordered_input_picture
pointer to the next pictures in codedorder for encoding
Definition: mpegvideo.h:257
int flags2
CODEC_FLAG2_*.
Definition: avcodec.h:1441
static const uint8_t mpeg2_dc_scale_table2[128]
Definition: mpegvideo.c:97
int chroma_qscale
chroma QP
Definition: mpegvideo.h:343
struct AVFrame f
Definition: mpegvideo.h:95
static void add_dct(MpegEncContext *s, DCTELEM *block, int i, uint8_t *dest, int line_size)
Definition: mpegvideo.c:2094
static int init_duplicate_context(MpegEncContext *s, MpegEncContext *base)
Definition: mpegvideo.c:455
int flags
AVCodecContext.flags (HQ, MV4, ...)
Definition: mpegvideo.h:230
uint16_t intra_matrix[64]
matrix transmitted in the bitstream
Definition: mpegvideo.h:440
int workaround_bugs
workaround bugs in encoders which cannot be detected automatically
Definition: mpegvideo.h:236
ScanTable inter_scantable
if inter == intra then intra should be used to reduce tha cache usage
Definition: mpegvideo.h:265
static void put_dct(MpegEncContext *s, DCTELEM *block, int i, uint8_t *dest, int line_size, int qscale)
Definition: mpegvideo.c:2086
uint8_t * temp
Definition: mpegvideo.h:163
int avcodec_default_get_buffer(AVCodecContext *s, AVFrame *pic)
Definition: utils.c:451
static void dct_unquantize_mpeg1_inter_c(MpegEncContext *s, DCTELEM *block, int n, int qscale)
Definition: mpegvideo.c:2557
void ff_thread_report_progress(AVFrame *f, int n, int field)
Notify later decoding threads when part of their reference picture is ready.
Definition: pthread.c:666
int ff_thread_get_buffer(AVCodecContext *avctx, AVFrame *f)
Wrapper around get_buffer() for frame-multithreaded codecs.
Definition: pthread.c:921
int debug_mv
debug
Definition: avcodec.h:2592
int ff_find_unused_picture(MpegEncContext *s, int shared)
Definition: mpegvideo.c:1331
#define MV_TYPE_8X8
4 vectors (h263, mpeg4 4MV)
Definition: mpegvideo.h:390
int16_t(* b_direct_mv_table_base)[2]
Definition: mpegvideo.h:370
int b_code
backward MV resolution for B Frames (mpeg4)
Definition: mpegvideo.h:364
#define CONFIG_MPEG_XVMC_DECODER
Definition: config.h:441
uint8_t ** extended_data
pointers to the data planes/channels.
Definition: avcodec.h:1028
void(* dct_unquantize_h263_intra)(struct MpegEncContext *s, DCTELEM *block, int n, int qscale)
Definition: mpegvideo.h:690
float * bits_tab
Definition: mpegvideo.h:713
#define restrict
Definition: config.h:8
int dct_count[2]
Definition: mpegvideo.h:468
int uvlinesize
line size, for chroma in bytes, may be different from width
Definition: mpegvideo.h:254
void ff_MPV_common_init_axp(MpegEncContext *s)
AVPixelFormat
Pixel format.
Definition: pixfmt.h:63
void * av_mallocz(size_t size)
Allocate a block of size bytes with alignment suitable for all memory accesses (including vectors if ...
Definition: mem.c:158
void ff_thread_release_buffer(AVCodecContext *avctx, AVFrame *f)
Wrapper around release_buffer() frame-for multithreaded codecs.
Definition: pthread.c:979
for(j=16;j >0;--j)
#define FF_ALLOCZ_OR_GOTO(ctx, p, size, label)
Definition: internal.h:69
Predicted.
Definition: avutil.h:246
void ff_MPV_decode_defaults(MpegEncContext *s)
Set the given MpegEncContext to defaults for decoding.
Definition: mpegvideo.c:718
if(!(ptr_align%ac->ptr_align)&&samples_align >=aligned_len)