mpegvideo.c
Go to the documentation of this file.
1 /*
2  * The simplest mpeg encoder (well, it was the simplest!)
3  * Copyright (c) 2000,2001 Fabrice Bellard
4  * Copyright (c) 2002-2004 Michael Niedermayer <michaelni@gmx.at>
5  *
6  * 4MV & hq & B-frame encoding stuff by Michael Niedermayer <michaelni@gmx.at>
7  *
8  * This file is part of Libav.
9  *
10  * Libav is free software; you can redistribute it and/or
11  * modify it under the terms of the GNU Lesser General Public
12  * License as published by the Free Software Foundation; either
13  * version 2.1 of the License, or (at your option) any later version.
14  *
15  * Libav is distributed in the hope that it will be useful,
16  * but WITHOUT ANY WARRANTY; without even the implied warranty of
17  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18  * Lesser General Public License for more details.
19  *
20  * You should have received a copy of the GNU Lesser General Public
21  * License along with Libav; if not, write to the Free Software
22  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
23  */
24 
30 #include "libavutil/imgutils.h"
31 #include "avcodec.h"
32 #include "dsputil.h"
33 #include "internal.h"
34 #include "mathops.h"
35 #include "mpegvideo.h"
36 #include "mjpegenc.h"
37 #include "msmpeg4.h"
38 #include "xvmc_internal.h"
39 #include "thread.h"
40 #include <limits.h>
41 
42 //#undef NDEBUG
43 //#include <assert.h>
44 
46  DCTELEM *block, int n, int qscale);
48  DCTELEM *block, int n, int qscale);
50  DCTELEM *block, int n, int qscale);
52  DCTELEM *block, int n, int qscale);
54  DCTELEM *block, int n, int qscale);
56  DCTELEM *block, int n, int qscale);
58  DCTELEM *block, int n, int qscale);
59 
60 
61 /* enable all paranoid tests for rounding, overflows, etc... */
62 //#define PARANOID
63 
64 //#define DEBUG
65 
66 
68 // 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
69  0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
70  16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31
71 };
72 
74 // 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
75  8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
76  8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
77  8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
78  8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
79  8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
80  8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
81  8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
82  8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
83 };
84 
85 static const uint8_t mpeg2_dc_scale_table1[128] = {
86 // 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
87  4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
88  4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
89  4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
90  4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
91  4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
92  4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
93  4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
94  4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
95 };
96 
97 static const uint8_t mpeg2_dc_scale_table2[128] = {
98 // 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
99  2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
100  2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
101  2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
102  2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
103  2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
104  2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
105  2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
106  2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
107 };
108 
109 static const uint8_t mpeg2_dc_scale_table3[128] = {
110 // 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
111  1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
112  1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
113  1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
114  1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
115  1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
116  1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
117  1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
118  1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
119 };
120 
121 const uint8_t *const ff_mpeg2_dc_scale_table[4] = {
126 };
127 
131 };
132 
134 #if CONFIG_H264_DXVA2_HWACCEL
136 #endif
137 #if CONFIG_H264_VAAPI_HWACCEL
139 #endif
140 #if CONFIG_H264_VDA_HWACCEL
142 #endif
145 };
146 
148  const uint8_t *end,
149  uint32_t * restrict state)
150 {
151  int i;
152 
153  assert(p <= end);
154  if (p >= end)
155  return end;
156 
157  for (i = 0; i < 3; i++) {
158  uint32_t tmp = *state << 8;
159  *state = tmp + *(p++);
160  if (tmp == 0x100 || p == end)
161  return p;
162  }
163 
164  while (p < end) {
165  if (p[-1] > 1 ) p += 3;
166  else if (p[-2] ) p += 2;
167  else if (p[-3]|(p[-1]-1)) p++;
168  else {
169  p++;
170  break;
171  }
172  }
173 
174  p = FFMIN(p, end) - 4;
175  *state = AV_RB32(p);
176 
177  return p + 4;
178 }
179 
180 /* init common dct for both encoder and decoder */
182 {
183  ff_dsputil_init(&s->dsp, s->avctx);
185 
191  if (s->flags & CODEC_FLAG_BITEXACT)
194 
195 #if ARCH_X86
197 #elif ARCH_ALPHA
199 #elif ARCH_ARM
201 #elif HAVE_ALTIVEC
203 #elif ARCH_BFIN
205 #endif
206 
207  /* load & permutate scantables
208  * note: only wmv uses different ones
209  */
210  if (s->alternate_scan) {
213  } else {
216  }
219 
220  return 0;
221 }
222 
224 {
225  *dst = *src;
226  dst->f.type = FF_BUFFER_TYPE_COPY;
227 }
228 
233 {
234  /* WM Image / Screen codecs allocate internal buffers with different
235  * dimensions / colorspaces; ignore user-defined callbacks for these. */
236  if (s->codec_id != AV_CODEC_ID_WMV3IMAGE &&
239  ff_thread_release_buffer(s->avctx, &pic->f);
240  else
243 }
244 
246 {
247  int alloc_size = FFALIGN(FFABS(linesize) + 32, 32);
248 
249  // edge emu needs blocksize + filter length - 1
250  // (= 17x17 for halfpel / 21x21 for h264)
251  // VC1 computes luma and chroma simultaneously and needs 19X19 + 9x9
252  // at uvlinesize. It supports only YUV420 so 24x24 is enough
253  // linesize * interlaced * MBsize
254  FF_ALLOCZ_OR_GOTO(s->avctx, s->edge_emu_buffer, alloc_size * 2 * 24,
255  fail);
256 
257  FF_ALLOCZ_OR_GOTO(s->avctx, s->me.scratchpad, alloc_size * 2 * 16 * 3,
258  fail)
259  s->me.temp = s->me.scratchpad;
260  s->rd_scratchpad = s->me.scratchpad;
261  s->b_scratchpad = s->me.scratchpad;
262  s->obmc_scratchpad = s->me.scratchpad + 16;
263 
264  return 0;
265 fail:
267  return AVERROR(ENOMEM);
268 }
269 
274 {
275  int r, ret;
276 
277  if (s->avctx->hwaccel) {
278  assert(!pic->f.hwaccel_picture_private);
279  if (s->avctx->hwaccel->priv_data_size) {
281  if (!pic->f.hwaccel_picture_private) {
282  av_log(s->avctx, AV_LOG_ERROR, "alloc_frame_buffer() failed (hwaccel private data allocation)\n");
283  return -1;
284  }
285  }
286  }
287 
288  if (s->codec_id != AV_CODEC_ID_WMV3IMAGE &&
291  r = ff_thread_get_buffer(s->avctx, &pic->f);
292  else
293  r = avcodec_default_get_buffer(s->avctx, &pic->f);
294 
295  if (r < 0 || !pic->f.type || !pic->f.data[0]) {
296  av_log(s->avctx, AV_LOG_ERROR, "get_buffer() failed (%d %d %p)\n",
297  r, pic->f.type, pic->f.data[0]);
299  return -1;
300  }
301 
302  if (s->linesize && (s->linesize != pic->f.linesize[0] ||
303  s->uvlinesize != pic->f.linesize[1])) {
305  "get_buffer() failed (stride changed)\n");
306  free_frame_buffer(s, pic);
307  return -1;
308  }
309 
310  if (pic->f.linesize[1] != pic->f.linesize[2]) {
312  "get_buffer() failed (uv stride mismatch)\n");
313  free_frame_buffer(s, pic);
314  return -1;
315  }
316 
317  if (!s->edge_emu_buffer &&
318  (ret = ff_mpv_frame_size_alloc(s, pic->f.linesize[0])) < 0) {
320  "get_buffer() failed to allocate context scratch buffers.\n");
321  free_frame_buffer(s, pic);
322  return ret;
323  }
324 
325  return 0;
326 }
327 
332 int ff_alloc_picture(MpegEncContext *s, Picture *pic, int shared)
333 {
334  const int big_mb_num = s->mb_stride * (s->mb_height + 1) + 1;
335 
336  // the + 1 is needed so memset(,,stride*height) does not sig11
337 
338  const int mb_array_size = s->mb_stride * s->mb_height;
339  const int b8_array_size = s->b8_stride * s->mb_height * 2;
340  const int b4_array_size = s->b4_stride * s->mb_height * 4;
341  int i;
342  int r = -1;
343 
344  if (shared) {
345  assert(pic->f.data[0]);
346  assert(pic->f.type == 0 || pic->f.type == FF_BUFFER_TYPE_SHARED);
348  } else {
349  assert(!pic->f.data[0]);
350 
351  if (alloc_frame_buffer(s, pic) < 0)
352  return -1;
353 
354  s->linesize = pic->f.linesize[0];
355  s->uvlinesize = pic->f.linesize[1];
356  }
357 
358  if (pic->f.qscale_table == NULL) {
359  if (s->encoding) {
360  FF_ALLOCZ_OR_GOTO(s->avctx, pic->mb_var,
361  mb_array_size * sizeof(int16_t), fail)
363  mb_array_size * sizeof(int16_t), fail)
365  mb_array_size * sizeof(int8_t ), fail)
366  }
367 
369  mb_array_size * sizeof(uint8_t) + 2, fail)// the + 2 is for the slice end check
371  (big_mb_num + s->mb_stride) * sizeof(uint8_t),
372  fail)
374  (big_mb_num + s->mb_stride) * sizeof(uint32_t),
375  fail)
376  pic->f.mb_type = pic->mb_type_base + 2 * s->mb_stride + 1;
377  pic->f.qscale_table = pic->qscale_table_base + 2 * s->mb_stride + 1;
378  if (s->out_format == FMT_H264) {
379  for (i = 0; i < 2; i++) {
381  2 * (b4_array_size + 4) * sizeof(int16_t),
382  fail)
383  pic->f.motion_val[i] = pic->motion_val_base[i] + 4;
384  FF_ALLOCZ_OR_GOTO(s->avctx, pic->f.ref_index[i],
385  4 * mb_array_size * sizeof(uint8_t), fail)
386  }
387  pic->f.motion_subsample_log2 = 2;
388  } else if (s->out_format == FMT_H263 || s->encoding ||
389  (s->avctx->debug & FF_DEBUG_MV) || s->avctx->debug_mv) {
390  for (i = 0; i < 2; i++) {
392  2 * (b8_array_size + 4) * sizeof(int16_t),
393  fail)
394  pic->f.motion_val[i] = pic->motion_val_base[i] + 4;
395  FF_ALLOCZ_OR_GOTO(s->avctx, pic->f.ref_index[i],
396  4 * mb_array_size * sizeof(uint8_t), fail)
397  }
398  pic->f.motion_subsample_log2 = 3;
399  }
400  if (s->avctx->debug&FF_DEBUG_DCT_COEFF) {
402  64 * mb_array_size * sizeof(DCTELEM) * 6, fail)
403  }
404  pic->f.qstride = s->mb_stride;
406  1 * sizeof(AVPanScan), fail)
407  }
408 
409  pic->owner2 = s;
410 
411  return 0;
412 fail: // for the FF_ALLOCZ_OR_GOTO macro
413  if (r >= 0)
414  free_frame_buffer(s, pic);
415  return -1;
416 }
417 
421 static void free_picture(MpegEncContext *s, Picture *pic)
422 {
423  int i;
424 
425  if (pic->f.data[0] && pic->f.type != FF_BUFFER_TYPE_SHARED) {
426  free_frame_buffer(s, pic);
427  }
428 
429  av_freep(&pic->mb_var);
430  av_freep(&pic->mc_mb_var);
431  av_freep(&pic->mb_mean);
432  av_freep(&pic->f.mbskip_table);
434  pic->f.qscale_table = NULL;
435  av_freep(&pic->mb_type_base);
436  pic->f.mb_type = NULL;
437  av_freep(&pic->f.dct_coeff);
438  av_freep(&pic->f.pan_scan);
439  pic->f.mb_type = NULL;
440  for (i = 0; i < 2; i++) {
441  av_freep(&pic->motion_val_base[i]);
442  av_freep(&pic->f.ref_index[i]);
443  pic->f.motion_val[i] = NULL;
444  }
445 
446  if (pic->f.type == FF_BUFFER_TYPE_SHARED) {
447  for (i = 0; i < 4; i++) {
448  pic->f.base[i] =
449  pic->f.data[i] = NULL;
450  }
451  pic->f.type = 0;
452  }
453 }
454 
456 {
457  int y_size = s->b8_stride * (2 * s->mb_height + 1);
458  int c_size = s->mb_stride * (s->mb_height + 1);
459  int yc_size = y_size + 2 * c_size;
460  int i;
461 
462  s->edge_emu_buffer =
463  s->me.scratchpad =
464  s->me.temp =
465  s->rd_scratchpad =
466  s->b_scratchpad =
467  s->obmc_scratchpad = NULL;
468 
469  if (s->encoding) {
470  FF_ALLOCZ_OR_GOTO(s->avctx, s->me.map,
471  ME_MAP_SIZE * sizeof(uint32_t), fail)
473  ME_MAP_SIZE * sizeof(uint32_t), fail)
474  if (s->avctx->noise_reduction) {
476  2 * 64 * sizeof(int), fail)
477  }
478  }
479  FF_ALLOCZ_OR_GOTO(s->avctx, s->blocks, 64 * 12 * 2 * sizeof(DCTELEM), fail)
480  s->block = s->blocks[0];
481 
482  for (i = 0; i < 12; i++) {
483  s->pblocks[i] = &s->block[i];
484  }
485 
486  if (s->out_format == FMT_H263) {
487  /* ac values */
489  yc_size * sizeof(int16_t) * 16, fail);
490  s->ac_val[0] = s->ac_val_base + s->b8_stride + 1;
491  s->ac_val[1] = s->ac_val_base + y_size + s->mb_stride + 1;
492  s->ac_val[2] = s->ac_val[1] + c_size;
493  }
494 
495  return 0;
496 fail:
497  return -1; // free() through ff_MPV_common_end()
498 }
499 
501 {
502  if (s == NULL)
503  return;
504 
506  av_freep(&s->me.scratchpad);
507  s->me.temp =
508  s->rd_scratchpad =
509  s->b_scratchpad =
510  s->obmc_scratchpad = NULL;
511 
512  av_freep(&s->dct_error_sum);
513  av_freep(&s->me.map);
514  av_freep(&s->me.score_map);
515  av_freep(&s->blocks);
516  av_freep(&s->ac_val_base);
517  s->block = NULL;
518 }
519 
521 {
522 #define COPY(a) bak->a = src->a
523  COPY(edge_emu_buffer);
524  COPY(me.scratchpad);
525  COPY(me.temp);
526  COPY(rd_scratchpad);
527  COPY(b_scratchpad);
528  COPY(obmc_scratchpad);
529  COPY(me.map);
530  COPY(me.score_map);
531  COPY(blocks);
532  COPY(block);
533  COPY(start_mb_y);
534  COPY(end_mb_y);
535  COPY(me.map_generation);
536  COPY(pb);
537  COPY(dct_error_sum);
538  COPY(dct_count[0]);
539  COPY(dct_count[1]);
540  COPY(ac_val_base);
541  COPY(ac_val[0]);
542  COPY(ac_val[1]);
543  COPY(ac_val[2]);
544 #undef COPY
545 }
546 
548 {
549  MpegEncContext bak;
550  int i, ret;
551  // FIXME copy only needed parts
552  // START_TIMER
553  backup_duplicate_context(&bak, dst);
554  memcpy(dst, src, sizeof(MpegEncContext));
555  backup_duplicate_context(dst, &bak);
556  for (i = 0; i < 12; i++) {
557  dst->pblocks[i] = &dst->block[i];
558  }
559  if (!dst->edge_emu_buffer &&
560  (ret = ff_mpv_frame_size_alloc(dst, dst->linesize)) < 0) {
561  av_log(dst->avctx, AV_LOG_ERROR, "failed to allocate context "
562  "scratch buffers.\n");
563  return ret;
564  }
565  // STOP_TIMER("update_duplicate_context")
566  // about 10k cycles / 0.01 sec for 1000frames on 1ghz with 2 threads
567  return 0;
568 }
569 
571  const AVCodecContext *src)
572 {
573  int i;
574  MpegEncContext *s = dst->priv_data, *s1 = src->priv_data;
575 
576  if (dst == src || !s1->context_initialized)
577  return 0;
578 
579  // FIXME can parameters change on I-frames?
580  // in that case dst may need a reinit
581  if (!s->context_initialized) {
582  memcpy(s, s1, sizeof(MpegEncContext));
583 
584  s->avctx = dst;
587  s->bitstream_buffer = NULL;
589 
591  }
592 
593  if (s->height != s1->height || s->width != s1->width || s->context_reinit) {
594  int err;
595  s->context_reinit = 0;
596  s->height = s1->height;
597  s->width = s1->width;
598  if ((err = ff_MPV_common_frame_size_change(s)) < 0)
599  return err;
600  }
601 
602  s->avctx->coded_height = s1->avctx->coded_height;
603  s->avctx->coded_width = s1->avctx->coded_width;
604  s->avctx->width = s1->avctx->width;
605  s->avctx->height = s1->avctx->height;
606 
607  s->coded_picture_number = s1->coded_picture_number;
608  s->picture_number = s1->picture_number;
609  s->input_picture_number = s1->input_picture_number;
610 
611  memcpy(s->picture, s1->picture, s1->picture_count * sizeof(Picture));
612  memcpy(&s->last_picture, &s1->last_picture,
613  (char *) &s1->last_picture_ptr - (char *) &s1->last_picture);
614 
615  // reset s->picture[].f.extended_data to s->picture[].f.data
616  for (i = 0; i < s->picture_count; i++)
617  s->picture[i].f.extended_data = s->picture[i].f.data;
618 
619  s->last_picture_ptr = REBASE_PICTURE(s1->last_picture_ptr, s, s1);
620  s->current_picture_ptr = REBASE_PICTURE(s1->current_picture_ptr, s, s1);
621  s->next_picture_ptr = REBASE_PICTURE(s1->next_picture_ptr, s, s1);
622 
623  // Error/bug resilience
624  s->next_p_frame_damaged = s1->next_p_frame_damaged;
625  s->workaround_bugs = s1->workaround_bugs;
626 
627  // MPEG4 timing info
628  memcpy(&s->time_increment_bits, &s1->time_increment_bits,
629  (char *) &s1->shape - (char *) &s1->time_increment_bits);
630 
631  // B-frame info
632  s->max_b_frames = s1->max_b_frames;
633  s->low_delay = s1->low_delay;
634  s->droppable = s1->droppable;
635 
636  // DivX handling (doesn't work)
637  s->divx_packed = s1->divx_packed;
638 
639  if (s1->bitstream_buffer) {
640  if (s1->bitstream_buffer_size +
644  s1->allocated_bitstream_buffer_size);
645  s->bitstream_buffer_size = s1->bitstream_buffer_size;
646  memcpy(s->bitstream_buffer, s1->bitstream_buffer,
647  s1->bitstream_buffer_size);
648  memset(s->bitstream_buffer + s->bitstream_buffer_size, 0,
650  }
651 
652  // linesize dependend scratch buffer allocation
653  if (!s->edge_emu_buffer)
654  if (s1->linesize) {
655  if (ff_mpv_frame_size_alloc(s, s1->linesize) < 0) {
656  av_log(s->avctx, AV_LOG_ERROR, "Failed to allocate context "
657  "scratch buffers.\n");
658  return AVERROR(ENOMEM);
659  }
660  } else {
661  av_log(s->avctx, AV_LOG_ERROR, "Context scratch buffers could not "
662  "be allocated due to unknown size.\n");
663  return AVERROR_BUG;
664  }
665 
666  // MPEG2/interlacing info
667  memcpy(&s->progressive_sequence, &s1->progressive_sequence,
668  (char *) &s1->rtp_mode - (char *) &s1->progressive_sequence);
669 
670  if (!s1->first_field) {
671  s->last_pict_type = s1->pict_type;
672  if (s1->current_picture_ptr)
673  s->last_lambda_for[s1->pict_type] = s1->current_picture_ptr->f.quality;
674 
675  if (s1->pict_type != AV_PICTURE_TYPE_B) {
676  s->last_non_b_pict_type = s1->pict_type;
677  }
678  }
679 
680  return 0;
681 }
682 
690 {
691  s->y_dc_scale_table =
694  s->progressive_frame = 1;
695  s->progressive_sequence = 1;
697 
698  s->coded_picture_number = 0;
699  s->picture_number = 0;
700  s->input_picture_number = 0;
701 
702  s->picture_in_gop_number = 0;
703 
704  s->f_code = 1;
705  s->b_code = 1;
706 
707  s->picture_range_start = 0;
709 
710  s->slice_context_count = 1;
711 }
712 
719 {
721 }
722 
727 {
728  int y_size, c_size, yc_size, i, mb_array_size, mv_table_size, x, y;
729 
730  s->mb_width = (s->width + 15) / 16;
731  s->mb_stride = s->mb_width + 1;
732  s->b8_stride = s->mb_width * 2 + 1;
733  s->b4_stride = s->mb_width * 4 + 1;
734  mb_array_size = s->mb_height * s->mb_stride;
735  mv_table_size = (s->mb_height + 2) * s->mb_stride + 1;
736 
737  /* set default edge pos, will be overriden
738  * in decode_header if needed */
739  s->h_edge_pos = s->mb_width * 16;
740  s->v_edge_pos = s->mb_height * 16;
741 
742  s->mb_num = s->mb_width * s->mb_height;
743 
744  s->block_wrap[0] =
745  s->block_wrap[1] =
746  s->block_wrap[2] =
747  s->block_wrap[3] = s->b8_stride;
748  s->block_wrap[4] =
749  s->block_wrap[5] = s->mb_stride;
750 
751  y_size = s->b8_stride * (2 * s->mb_height + 1);
752  c_size = s->mb_stride * (s->mb_height + 1);
753  yc_size = y_size + 2 * c_size;
754 
755  FF_ALLOCZ_OR_GOTO(s->avctx, s->mb_index2xy, (s->mb_num + 1) * sizeof(int),
756  fail); // error ressilience code looks cleaner with this
757  for (y = 0; y < s->mb_height; y++)
758  for (x = 0; x < s->mb_width; x++)
759  s->mb_index2xy[x + y * s->mb_width] = x + y * s->mb_stride;
760 
761  s->mb_index2xy[s->mb_height * s->mb_width] =
762  (s->mb_height - 1) * s->mb_stride + s->mb_width; // FIXME really needed?
763 
764  if (s->encoding) {
765  /* Allocate MV tables */
767  mv_table_size * 2 * sizeof(int16_t), fail);
769  mv_table_size * 2 * sizeof(int16_t), fail);
771  mv_table_size * 2 * sizeof(int16_t), fail);
773  mv_table_size * 2 * sizeof(int16_t), fail);
775  mv_table_size * 2 * sizeof(int16_t), fail);
777  mv_table_size * 2 * sizeof(int16_t), fail);
778  s->p_mv_table = s->p_mv_table_base + s->mb_stride + 1;
782  s->mb_stride + 1;
784  s->mb_stride + 1;
786 
787  /* Allocate MB type table */
788  FF_ALLOCZ_OR_GOTO(s->avctx, s->mb_type, mb_array_size *
789  sizeof(uint16_t), fail); // needed for encoding
790 
791  FF_ALLOCZ_OR_GOTO(s->avctx, s->lambda_table, mb_array_size *
792  sizeof(int), fail);
793 
795  mb_array_size * sizeof(float), fail);
797  mb_array_size * sizeof(float), fail);
798 
799  }
800 
802  mb_array_size * sizeof(uint8_t), fail);
804  mb_array_size * sizeof(uint8_t), fail);
805 
806  if (s->codec_id == AV_CODEC_ID_MPEG4 ||
808  /* interlaced direct mode decoding tables */
809  for (i = 0; i < 2; i++) {
810  int j, k;
811  for (j = 0; j < 2; j++) {
812  for (k = 0; k < 2; k++) {
814  s->b_field_mv_table_base[i][j][k],
815  mv_table_size * 2 * sizeof(int16_t),
816  fail);
817  s->b_field_mv_table[i][j][k] = s->b_field_mv_table_base[i][j][k] +
818  s->mb_stride + 1;
819  }
821  mb_array_size * 2 * sizeof(uint8_t), fail);
823  mv_table_size * 2 * sizeof(int16_t), fail);
824  s->p_field_mv_table[i][j] = s->p_field_mv_table_base[i][j]
825  + s->mb_stride + 1;
826  }
828  mb_array_size * 2 * sizeof(uint8_t), fail);
829  }
830  }
831  if (s->out_format == FMT_H263) {
832  /* cbp values */
833  FF_ALLOCZ_OR_GOTO(s->avctx, s->coded_block_base, y_size, fail);
834  s->coded_block = s->coded_block_base + s->b8_stride + 1;
835 
836  /* cbp, ac_pred, pred_dir */
838  mb_array_size * sizeof(uint8_t), fail);
840  mb_array_size * sizeof(uint8_t), fail);
841  }
842 
843  if (s->h263_pred || s->h263_plus || !s->encoding) {
844  /* dc values */
845  // MN: we need these for error resilience of intra-frames
847  yc_size * sizeof(int16_t), fail);
848  s->dc_val[0] = s->dc_val_base + s->b8_stride + 1;
849  s->dc_val[1] = s->dc_val_base + y_size + s->mb_stride + 1;
850  s->dc_val[2] = s->dc_val[1] + c_size;
851  for (i = 0; i < yc_size; i++)
852  s->dc_val_base[i] = 1024;
853  }
854 
855  /* which mb is a intra block */
856  FF_ALLOCZ_OR_GOTO(s->avctx, s->mbintra_table, mb_array_size, fail);
857  memset(s->mbintra_table, 1, mb_array_size);
858 
859  /* init macroblock skip table */
860  FF_ALLOCZ_OR_GOTO(s->avctx, s->mbskip_table, mb_array_size + 2, fail);
861  // Note the + 1 is for a quicker mpeg4 slice_end detection
862 
864  s->avctx->debug_mv) {
865  s->visualization_buffer[0] = av_malloc((s->mb_width * 16 +
866  2 * EDGE_WIDTH) * s->mb_height * 16 + 2 * EDGE_WIDTH);
867  s->visualization_buffer[1] = av_malloc((s->mb_width * 16 +
868  2 * EDGE_WIDTH) * s->mb_height * 16 + 2 * EDGE_WIDTH);
869  s->visualization_buffer[2] = av_malloc((s->mb_width * 16 +
870  2 * EDGE_WIDTH) * s->mb_height * 16 + 2 * EDGE_WIDTH);
871  }
872 
873  return 0;
874 fail:
875  return AVERROR(ENOMEM);
876 }
877 
883 {
884  int i;
885  int nb_slices = (HAVE_THREADS &&
887  s->avctx->thread_count : 1;
888 
889  if (s->encoding && s->avctx->slices)
890  nb_slices = s->avctx->slices;
891 
893  s->mb_height = (s->height + 31) / 32 * 2;
894  else if (s->codec_id != AV_CODEC_ID_H264)
895  s->mb_height = (s->height + 15) / 16;
896 
897  if (s->avctx->pix_fmt == AV_PIX_FMT_NONE) {
899  "decoding to AV_PIX_FMT_NONE is not supported.\n");
900  return -1;
901  }
902 
903  if (nb_slices > MAX_THREADS || (nb_slices > s->mb_height && s->mb_height)) {
904  int max_slices;
905  if (s->mb_height)
906  max_slices = FFMIN(MAX_THREADS, s->mb_height);
907  else
908  max_slices = MAX_THREADS;
909  av_log(s->avctx, AV_LOG_WARNING, "too many threads/slices (%d),"
910  " reducing to %d\n", nb_slices, max_slices);
911  nb_slices = max_slices;
912  }
913 
914  if ((s->width || s->height) &&
915  av_image_check_size(s->width, s->height, 0, s->avctx))
916  return -1;
917 
919 
920  s->flags = s->avctx->flags;
921  s->flags2 = s->avctx->flags2;
922 
923  /* set chroma shifts */
925  &s->chroma_x_shift,
926  &s->chroma_y_shift);
927 
928  /* convert fourcc to upper case */
930 
932 
933  if (s->width && s->height) {
935 
936  if (s->encoding) {
937  if (s->msmpeg4_version) {
939  2 * 2 * (MAX_LEVEL + 1) *
940  (MAX_RUN + 1) * 2 * sizeof(int), fail);
941  }
942  FF_ALLOCZ_OR_GOTO(s->avctx, s->avctx->stats_out, 256, fail);
943 
945  64 * 32 * sizeof(int), fail);
947  64 * 32 * sizeof(int), fail);
949  64 * 32 * 2 * sizeof(uint16_t), fail);
951  64 * 32 * 2 * sizeof(uint16_t), fail);
953  MAX_PICTURE_COUNT * sizeof(Picture *), fail);
955  MAX_PICTURE_COUNT * sizeof(Picture *), fail);
956 
957  if (s->avctx->noise_reduction) {
959  2 * 64 * sizeof(uint16_t), fail);
960  }
961  }
962  }
963 
966  s->picture_count * sizeof(Picture), fail);
967  for (i = 0; i < s->picture_count; i++) {
969  }
970 
971  if (s->width && s->height) {
972  if (init_context_frame(s))
973  goto fail;
974 
975  s->parse_context.state = -1;
976  }
977 
978  s->context_initialized = 1;
979  s->thread_context[0] = s;
980 
981  if (s->width && s->height) {
982  if (nb_slices > 1) {
983  for (i = 1; i < nb_slices; i++) {
984  s->thread_context[i] = av_malloc(sizeof(MpegEncContext));
985  memcpy(s->thread_context[i], s, sizeof(MpegEncContext));
986  }
987 
988  for (i = 0; i < nb_slices; i++) {
989  if (init_duplicate_context(s->thread_context[i], s) < 0)
990  goto fail;
991  s->thread_context[i]->start_mb_y =
992  (s->mb_height * (i) + nb_slices / 2) / nb_slices;
993  s->thread_context[i]->end_mb_y =
994  (s->mb_height * (i + 1) + nb_slices / 2) / nb_slices;
995  }
996  } else {
997  if (init_duplicate_context(s, s) < 0)
998  goto fail;
999  s->start_mb_y = 0;
1000  s->end_mb_y = s->mb_height;
1001  }
1002  s->slice_context_count = nb_slices;
1003  }
1004 
1005  return 0;
1006  fail:
1007  ff_MPV_common_end(s);
1008  return -1;
1009 }
1010 
1017 {
1018  int i, j, k;
1019 
1020  av_freep(&s->mb_type);
1027  s->p_mv_table = NULL;
1028  s->b_forw_mv_table = NULL;
1029  s->b_back_mv_table = NULL;
1032  s->b_direct_mv_table = NULL;
1033  for (i = 0; i < 2; i++) {
1034  for (j = 0; j < 2; j++) {
1035  for (k = 0; k < 2; k++) {
1036  av_freep(&s->b_field_mv_table_base[i][j][k]);
1037  s->b_field_mv_table[i][j][k] = NULL;
1038  }
1039  av_freep(&s->b_field_select_table[i][j]);
1040  av_freep(&s->p_field_mv_table_base[i][j]);
1041  s->p_field_mv_table[i][j] = NULL;
1042  }
1044  }
1045 
1046  av_freep(&s->dc_val_base);
1048  av_freep(&s->mbintra_table);
1049  av_freep(&s->cbp_table);
1050  av_freep(&s->pred_dir_table);
1051 
1052  av_freep(&s->mbskip_table);
1053 
1055  av_freep(&s->er_temp_buffer);
1056  av_freep(&s->mb_index2xy);
1057  av_freep(&s->lambda_table);
1058  av_freep(&s->cplx_tab);
1059  av_freep(&s->bits_tab);
1060 
1061  s->linesize = s->uvlinesize = 0;
1062 
1063  for (i = 0; i < 3; i++)
1065 
1066  return 0;
1067 }
1068 
1070 {
1071  int i, err = 0;
1072 
1073  if (s->slice_context_count > 1) {
1074  for (i = 0; i < s->slice_context_count; i++) {
1076  }
1077  for (i = 1; i < s->slice_context_count; i++) {
1078  av_freep(&s->thread_context[i]);
1079  }
1080  } else
1082 
1083  free_context_frame(s);
1084 
1085  if (s->picture)
1086  for (i = 0; i < s->picture_count; i++) {
1087  s->picture[i].needs_realloc = 1;
1088  }
1089 
1090  s->last_picture_ptr =
1091  s->next_picture_ptr =
1093 
1094  // init
1096  s->mb_height = (s->height + 31) / 32 * 2;
1097  else if (s->codec_id != AV_CODEC_ID_H264)
1098  s->mb_height = (s->height + 15) / 16;
1099 
1100  if ((s->width || s->height) &&
1101  av_image_check_size(s->width, s->height, 0, s->avctx))
1102  return AVERROR_INVALIDDATA;
1103 
1104  if ((err = init_context_frame(s)))
1105  goto fail;
1106 
1107  s->thread_context[0] = s;
1108 
1109  if (s->width && s->height) {
1110  int nb_slices = s->slice_context_count;
1111  if (nb_slices > 1) {
1112  for (i = 1; i < nb_slices; i++) {
1113  s->thread_context[i] = av_malloc(sizeof(MpegEncContext));
1114  memcpy(s->thread_context[i], s, sizeof(MpegEncContext));
1115  }
1116 
1117  for (i = 0; i < nb_slices; i++) {
1118  if (init_duplicate_context(s->thread_context[i], s) < 0)
1119  goto fail;
1120  s->thread_context[i]->start_mb_y =
1121  (s->mb_height * (i) + nb_slices / 2) / nb_slices;
1122  s->thread_context[i]->end_mb_y =
1123  (s->mb_height * (i + 1) + nb_slices / 2) / nb_slices;
1124  }
1125  } else {
1126  if (init_duplicate_context(s, s) < 0)
1127  goto fail;
1128  s->start_mb_y = 0;
1129  s->end_mb_y = s->mb_height;
1130  }
1131  s->slice_context_count = nb_slices;
1132  }
1133 
1134  return 0;
1135  fail:
1136  ff_MPV_common_end(s);
1137  return err;
1138 }
1139 
1140 /* init common structure for both encoder and decoder */
1142 {
1143  int i;
1144 
1145  if (s->slice_context_count > 1) {
1146  for (i = 0; i < s->slice_context_count; i++) {
1148  }
1149  for (i = 1; i < s->slice_context_count; i++) {
1150  av_freep(&s->thread_context[i]);
1151  }
1152  s->slice_context_count = 1;
1153  } else free_duplicate_context(s);
1154 
1156  s->parse_context.buffer_size = 0;
1157 
1160 
1161  av_freep(&s->avctx->stats_out);
1162  av_freep(&s->ac_stats);
1163 
1164  av_freep(&s->q_intra_matrix);
1165  av_freep(&s->q_inter_matrix);
1168  av_freep(&s->input_picture);
1170  av_freep(&s->dct_offset);
1171 
1172  if (s->picture && !s->avctx->internal->is_copy) {
1173  for (i = 0; i < s->picture_count; i++) {
1174  free_picture(s, &s->picture[i]);
1175  }
1176  }
1177  av_freep(&s->picture);
1178 
1179  free_context_frame(s);
1180 
1183 
1184  s->context_initialized = 0;
1185  s->last_picture_ptr =
1186  s->next_picture_ptr =
1188  s->linesize = s->uvlinesize = 0;
1189 }
1190 
1192  uint8_t static_store[2][2 * MAX_RUN + MAX_LEVEL + 3])
1193 {
1194  int8_t max_level[MAX_RUN + 1], max_run[MAX_LEVEL + 1];
1195  uint8_t index_run[MAX_RUN + 1];
1196  int last, run, level, start, end, i;
1197 
1198  /* If table is static, we can quit if rl->max_level[0] is not NULL */
1199  if (static_store && rl->max_level[0])
1200  return;
1201 
1202  /* compute max_level[], max_run[] and index_run[] */
1203  for (last = 0; last < 2; last++) {
1204  if (last == 0) {
1205  start = 0;
1206  end = rl->last;
1207  } else {
1208  start = rl->last;
1209  end = rl->n;
1210  }
1211 
1212  memset(max_level, 0, MAX_RUN + 1);
1213  memset(max_run, 0, MAX_LEVEL + 1);
1214  memset(index_run, rl->n, MAX_RUN + 1);
1215  for (i = start; i < end; i++) {
1216  run = rl->table_run[i];
1217  level = rl->table_level[i];
1218  if (index_run[run] == rl->n)
1219  index_run[run] = i;
1220  if (level > max_level[run])
1221  max_level[run] = level;
1222  if (run > max_run[level])
1223  max_run[level] = run;
1224  }
1225  if (static_store)
1226  rl->max_level[last] = static_store[last];
1227  else
1228  rl->max_level[last] = av_malloc(MAX_RUN + 1);
1229  memcpy(rl->max_level[last], max_level, MAX_RUN + 1);
1230  if (static_store)
1231  rl->max_run[last] = static_store[last] + MAX_RUN + 1;
1232  else
1233  rl->max_run[last] = av_malloc(MAX_LEVEL + 1);
1234  memcpy(rl->max_run[last], max_run, MAX_LEVEL + 1);
1235  if (static_store)
1236  rl->index_run[last] = static_store[last] + MAX_RUN + MAX_LEVEL + 2;
1237  else
1238  rl->index_run[last] = av_malloc(MAX_RUN + 1);
1239  memcpy(rl->index_run[last], index_run, MAX_RUN + 1);
1240  }
1241 }
1242 
1244 {
1245  int i, q;
1246 
1247  for (q = 0; q < 32; q++) {
1248  int qmul = q * 2;
1249  int qadd = (q - 1) | 1;
1250 
1251  if (q == 0) {
1252  qmul = 1;
1253  qadd = 0;
1254  }
1255  for (i = 0; i < rl->vlc.table_size; i++) {
1256  int code = rl->vlc.table[i][0];
1257  int len = rl->vlc.table[i][1];
1258  int level, run;
1259 
1260  if (len == 0) { // illegal code
1261  run = 66;
1262  level = MAX_LEVEL;
1263  } else if (len < 0) { // more bits needed
1264  run = 0;
1265  level = code;
1266  } else {
1267  if (code == rl->n) { // esc
1268  run = 66;
1269  level = 0;
1270  } else {
1271  run = rl->table_run[code] + 1;
1272  level = rl->table_level[code] * qmul + qadd;
1273  if (code >= rl->last) run += 192;
1274  }
1275  }
1276  rl->rl_vlc[q][i].len = len;
1277  rl->rl_vlc[q][i].level = level;
1278  rl->rl_vlc[q][i].run = run;
1279  }
1280  }
1281 }
1282 
1283 void ff_release_unused_pictures(MpegEncContext*s, int remove_current)
1284 {
1285  int i;
1286 
1287  /* release non reference frames */
1288  for (i = 0; i < s->picture_count; i++) {
1289  if (s->picture[i].f.data[0] && !s->picture[i].f.reference &&
1290  (!s->picture[i].owner2 || s->picture[i].owner2 == s) &&
1291  (remove_current || &s->picture[i] != s->current_picture_ptr)
1292  /* && s->picture[i].type!= FF_BUFFER_TYPE_SHARED */) {
1293  free_frame_buffer(s, &s->picture[i]);
1294  }
1295  }
1296 }
1297 
1298 static inline int pic_is_unused(MpegEncContext *s, Picture *pic)
1299 {
1300  if (pic->f.data[0] == NULL)
1301  return 1;
1302  if (pic->needs_realloc && !(pic->f.reference & DELAYED_PIC_REF))
1303  if (!pic->owner2 || pic->owner2 == s)
1304  return 1;
1305  return 0;
1306 }
1307 
1308 static int find_unused_picture(MpegEncContext *s, int shared)
1309 {
1310  int i;
1311 
1312  if (shared) {
1313  for (i = s->picture_range_start; i < s->picture_range_end; i++) {
1314  if (s->picture[i].f.data[0] == NULL && s->picture[i].f.type == 0)
1315  return i;
1316  }
1317  } else {
1318  for (i = s->picture_range_start; i < s->picture_range_end; i++) {
1319  if (pic_is_unused(s, &s->picture[i]) && s->picture[i].f.type != 0)
1320  return i; // FIXME
1321  }
1322  for (i = s->picture_range_start; i < s->picture_range_end; i++) {
1323  if (pic_is_unused(s, &s->picture[i]))
1324  return i;
1325  }
1326  }
1327 
1328  return AVERROR_INVALIDDATA;
1329 }
1330 
1332 {
1333  int ret = find_unused_picture(s, shared);
1334 
1335  if (ret >= 0 && ret < s->picture_range_end) {
1336  if (s->picture[ret].needs_realloc) {
1337  s->picture[ret].needs_realloc = 0;
1338  free_picture(s, &s->picture[ret]);
1340  }
1341  }
1342  return ret;
1343 }
1344 
1346 {
1347  int intra, i;
1348 
1349  for (intra = 0; intra < 2; intra++) {
1350  if (s->dct_count[intra] > (1 << 16)) {
1351  for (i = 0; i < 64; i++) {
1352  s->dct_error_sum[intra][i] >>= 1;
1353  }
1354  s->dct_count[intra] >>= 1;
1355  }
1356 
1357  for (i = 0; i < 64; i++) {
1358  s->dct_offset[intra][i] = (s->avctx->noise_reduction *
1359  s->dct_count[intra] +
1360  s->dct_error_sum[intra][i] / 2) /
1361  (s->dct_error_sum[intra][i] + 1);
1362  }
1363  }
1364 }
1365 
1371 {
1372  int i;
1373  Picture *pic;
1374  s->mb_skipped = 0;
1375 
1376  /* mark & release old frames */
1377  if (s->out_format != FMT_H264 || s->codec_id == AV_CODEC_ID_SVQ3) {
1378  if (s->pict_type != AV_PICTURE_TYPE_B && s->last_picture_ptr &&
1380  s->last_picture_ptr->f.data[0]) {
1381  if (s->last_picture_ptr->owner2 == s)
1383  }
1384 
1385  /* release forgotten pictures */
1386  /* if (mpeg124/h263) */
1387  if (!s->encoding) {
1388  for (i = 0; i < s->picture_count; i++) {
1389  if (s->picture[i].owner2 == s && s->picture[i].f.data[0] &&
1390  &s->picture[i] != s->last_picture_ptr &&
1391  &s->picture[i] != s->next_picture_ptr &&
1392  s->picture[i].f.reference && !s->picture[i].needs_realloc) {
1393  if (!(avctx->active_thread_type & FF_THREAD_FRAME))
1394  av_log(avctx, AV_LOG_ERROR,
1395  "releasing zombie picture\n");
1396  free_frame_buffer(s, &s->picture[i]);
1397  }
1398  }
1399  }
1400  }
1401 
1402  if (!s->encoding) {
1404 
1405  if (s->current_picture_ptr &&
1406  s->current_picture_ptr->f.data[0] == NULL) {
1407  // we already have a unused image
1408  // (maybe it was set before reading the header)
1409  pic = s->current_picture_ptr;
1410  } else {
1411  i = ff_find_unused_picture(s, 0);
1412  if (i < 0) {
1413  av_log(s->avctx, AV_LOG_ERROR, "no frame buffer available\n");
1414  return i;
1415  }
1416  pic = &s->picture[i];
1417  }
1418 
1419  pic->f.reference = 0;
1420  if (!s->droppable) {
1421  if (s->codec_id == AV_CODEC_ID_H264)
1422  pic->f.reference = s->picture_structure;
1423  else if (s->pict_type != AV_PICTURE_TYPE_B)
1424  pic->f.reference = 3;
1425  }
1426 
1428 
1429  if (ff_alloc_picture(s, pic, 0) < 0)
1430  return -1;
1431 
1432  s->current_picture_ptr = pic;
1433  // FIXME use only the vars from current_pic
1435  if (s->codec_id == AV_CODEC_ID_MPEG1VIDEO ||
1437  if (s->picture_structure != PICT_FRAME)
1440  }
1444  }
1445 
1447  // if (s->flags && CODEC_FLAG_QSCALE)
1448  // s->current_picture_ptr->quality = s->new_picture_ptr->quality;
1450 
1452 
1453  if (s->pict_type != AV_PICTURE_TYPE_B) {
1455  if (!s->droppable)
1457  }
1458  av_dlog(s->avctx, "L%p N%p C%p L%p N%p C%p type:%d drop:%d\n",
1463  s->pict_type, s->droppable);
1464 
1465  if (s->codec_id != AV_CODEC_ID_H264) {
1466  if ((s->last_picture_ptr == NULL ||
1467  s->last_picture_ptr->f.data[0] == NULL) &&
1468  (s->pict_type != AV_PICTURE_TYPE_I ||
1469  s->picture_structure != PICT_FRAME)) {
1470  if (s->pict_type != AV_PICTURE_TYPE_I)
1471  av_log(avctx, AV_LOG_ERROR,
1472  "warning: first frame is no keyframe\n");
1473  else if (s->picture_structure != PICT_FRAME)
1474  av_log(avctx, AV_LOG_INFO,
1475  "allocate dummy last picture for field based first keyframe\n");
1476 
1477  /* Allocate a dummy frame */
1478  i = ff_find_unused_picture(s, 0);
1479  if (i < 0) {
1480  av_log(s->avctx, AV_LOG_ERROR, "no frame buffer available\n");
1481  return i;
1482  }
1483  s->last_picture_ptr = &s->picture[i];
1484  if (ff_alloc_picture(s, s->last_picture_ptr, 0) < 0) {
1485  s->last_picture_ptr = NULL;
1486  return -1;
1487  }
1488  ff_thread_report_progress(&s->last_picture_ptr->f, INT_MAX, 0);
1489  ff_thread_report_progress(&s->last_picture_ptr->f, INT_MAX, 1);
1490  s->last_picture_ptr->f.reference = 3;
1491  }
1492  if ((s->next_picture_ptr == NULL ||
1493  s->next_picture_ptr->f.data[0] == NULL) &&
1494  s->pict_type == AV_PICTURE_TYPE_B) {
1495  /* Allocate a dummy frame */
1496  i = ff_find_unused_picture(s, 0);
1497  if (i < 0) {
1498  av_log(s->avctx, AV_LOG_ERROR, "no frame buffer available\n");
1499  return i;
1500  }
1501  s->next_picture_ptr = &s->picture[i];
1502  if (ff_alloc_picture(s, s->next_picture_ptr, 0) < 0) {
1503  s->next_picture_ptr = NULL;
1504  return -1;
1505  }
1506  ff_thread_report_progress(&s->next_picture_ptr->f, INT_MAX, 0);
1507  ff_thread_report_progress(&s->next_picture_ptr->f, INT_MAX, 1);
1508  s->next_picture_ptr->f.reference = 3;
1509  }
1510  }
1511 
1512  if (s->last_picture_ptr)
1514  if (s->next_picture_ptr)
1516 
1517  if (HAVE_THREADS && (avctx->active_thread_type & FF_THREAD_FRAME)) {
1518  if (s->next_picture_ptr)
1519  s->next_picture_ptr->owner2 = s;
1520  if (s->last_picture_ptr)
1521  s->last_picture_ptr->owner2 = s;
1522  }
1523 
1524  if (s->pict_type != AV_PICTURE_TYPE_I &&
1525  !(s->last_picture_ptr && s->last_picture_ptr->f.data[0])) {
1526  av_log(s, AV_LOG_ERROR,
1527  "Non-reference picture received and no reference available\n");
1528  return AVERROR_INVALIDDATA;
1529  }
1530 
1531  if (s->picture_structure!= PICT_FRAME && s->out_format != FMT_H264) {
1532  int i;
1533  for (i = 0; i < 4; i++) {
1535  s->current_picture.f.data[i] +=
1536  s->current_picture.f.linesize[i];
1537  }
1538  s->current_picture.f.linesize[i] *= 2;
1539  s->last_picture.f.linesize[i] *= 2;
1540  s->next_picture.f.linesize[i] *= 2;
1541  }
1542  }
1543 
1544  s->err_recognition = avctx->err_recognition;
1545 
1546  /* set dequantizer, we can't do it during init as
1547  * it might change for mpeg4 and we can't do it in the header
1548  * decode as init is not called for mpeg4 there yet */
1549  if (s->mpeg_quant || s->codec_id == AV_CODEC_ID_MPEG2VIDEO) {
1552  } else if (s->out_format == FMT_H263 || s->out_format == FMT_H261) {
1555  } else {
1558  }
1559 
1560  if (s->dct_error_sum) {
1561  assert(s->avctx->noise_reduction && s->encoding);
1563  }
1564 
1566  return ff_xvmc_field_start(s, avctx);
1567 
1568  return 0;
1569 }
1570 
1571 /* generic function for encode/decode called after a
1572  * frame has been coded/decoded. */
1574 {
1575  int i;
1576  /* redraw edges for the frame if decoding didn't complete */
1577  // just to make sure that all data is rendered.
1579  ff_xvmc_field_end(s);
1580  } else if ((s->error_count || s->encoding) &&
1581  !s->avctx->hwaccel &&
1583  s->unrestricted_mv &&
1585  !s->intra_only &&
1586  !(s->flags & CODEC_FLAG_EMU_EDGE)) {
1588  int hshift = desc->log2_chroma_w;
1589  int vshift = desc->log2_chroma_h;
1591  s->h_edge_pos, s->v_edge_pos,
1593  EDGE_TOP | EDGE_BOTTOM);
1595  s->h_edge_pos >> hshift, s->v_edge_pos >> vshift,
1596  EDGE_WIDTH >> hshift, EDGE_WIDTH >> vshift,
1597  EDGE_TOP | EDGE_BOTTOM);
1599  s->h_edge_pos >> hshift, s->v_edge_pos >> vshift,
1600  EDGE_WIDTH >> hshift, EDGE_WIDTH >> vshift,
1601  EDGE_TOP | EDGE_BOTTOM);
1602  }
1603 
1604  emms_c();
1605 
1606  s->last_pict_type = s->pict_type;
1608  if (s->pict_type!= AV_PICTURE_TYPE_B) {
1610  }
1611 #if 0
1612  /* copy back current_picture variables */
1613  for (i = 0; i < MAX_PICTURE_COUNT; i++) {
1614  if (s->picture[i].f.data[0] == s->current_picture.f.data[0]) {
1615  s->picture[i] = s->current_picture;
1616  break;
1617  }
1618  }
1619  assert(i < MAX_PICTURE_COUNT);
1620 #endif
1621 
1622  if (s->encoding) {
1623  /* release non-reference frames */
1624  for (i = 0; i < s->picture_count; i++) {
1625  if (s->picture[i].f.data[0] && !s->picture[i].f.reference
1626  /* && s->picture[i].type != FF_BUFFER_TYPE_SHARED */) {
1627  free_frame_buffer(s, &s->picture[i]);
1628  }
1629  }
1630  }
1631  // clear copies, to avoid confusion
1632 #if 0
1633  memset(&s->last_picture, 0, sizeof(Picture));
1634  memset(&s->next_picture, 0, sizeof(Picture));
1635  memset(&s->current_picture, 0, sizeof(Picture));
1636 #endif
1638 
1641  }
1642 }
1643 
1651 static void draw_line(uint8_t *buf, int sx, int sy, int ex, int ey,
1652  int w, int h, int stride, int color)
1653 {
1654  int x, y, fr, f;
1655 
1656  sx = av_clip(sx, 0, w - 1);
1657  sy = av_clip(sy, 0, h - 1);
1658  ex = av_clip(ex, 0, w - 1);
1659  ey = av_clip(ey, 0, h - 1);
1660 
1661  buf[sy * stride + sx] += color;
1662 
1663  if (FFABS(ex - sx) > FFABS(ey - sy)) {
1664  if (sx > ex) {
1665  FFSWAP(int, sx, ex);
1666  FFSWAP(int, sy, ey);
1667  }
1668  buf += sx + sy * stride;
1669  ex -= sx;
1670  f = ((ey - sy) << 16) / ex;
1671  for (x = 0; x <= ex; x++) {
1672  y = (x * f) >> 16;
1673  fr = (x * f) & 0xFFFF;
1674  buf[y * stride + x] += (color * (0x10000 - fr)) >> 16;
1675  buf[(y + 1) * stride + x] += (color * fr ) >> 16;
1676  }
1677  } else {
1678  if (sy > ey) {
1679  FFSWAP(int, sx, ex);
1680  FFSWAP(int, sy, ey);
1681  }
1682  buf += sx + sy * stride;
1683  ey -= sy;
1684  if (ey)
1685  f = ((ex - sx) << 16) / ey;
1686  else
1687  f = 0;
1688  for (y = 0; y = ey; y++) {
1689  x = (y * f) >> 16;
1690  fr = (y * f) & 0xFFFF;
1691  buf[y * stride + x] += (color * (0x10000 - fr)) >> 16;
1692  buf[y * stride + x + 1] += (color * fr ) >> 16;
1693  }
1694  }
1695 }
1696 
1704 static void draw_arrow(uint8_t *buf, int sx, int sy, int ex,
1705  int ey, int w, int h, int stride, int color)
1706 {
1707  int dx,dy;
1708 
1709  sx = av_clip(sx, -100, w + 100);
1710  sy = av_clip(sy, -100, h + 100);
1711  ex = av_clip(ex, -100, w + 100);
1712  ey = av_clip(ey, -100, h + 100);
1713 
1714  dx = ex - sx;
1715  dy = ey - sy;
1716 
1717  if (dx * dx + dy * dy > 3 * 3) {
1718  int rx = dx + dy;
1719  int ry = -dx + dy;
1720  int length = ff_sqrt((rx * rx + ry * ry) << 8);
1721 
1722  // FIXME subpixel accuracy
1723  rx = ROUNDED_DIV(rx * 3 << 4, length);
1724  ry = ROUNDED_DIV(ry * 3 << 4, length);
1725 
1726  draw_line(buf, sx, sy, sx + rx, sy + ry, w, h, stride, color);
1727  draw_line(buf, sx, sy, sx - ry, sy + rx, w, h, stride, color);
1728  }
1729  draw_line(buf, sx, sy, ex, ey, w, h, stride, color);
1730 }
1731 
1736 {
1737  if (s->avctx->hwaccel || !pict || !pict->mb_type)
1738  return;
1739 
1741  int x,y;
1742 
1743  av_log(s->avctx,AV_LOG_DEBUG,"New frame, type: ");
1744  switch (pict->pict_type) {
1745  case AV_PICTURE_TYPE_I:
1746  av_log(s->avctx,AV_LOG_DEBUG,"I\n");
1747  break;
1748  case AV_PICTURE_TYPE_P:
1749  av_log(s->avctx,AV_LOG_DEBUG,"P\n");
1750  break;
1751  case AV_PICTURE_TYPE_B:
1752  av_log(s->avctx,AV_LOG_DEBUG,"B\n");
1753  break;
1754  case AV_PICTURE_TYPE_S:
1755  av_log(s->avctx,AV_LOG_DEBUG,"S\n");
1756  break;
1757  case AV_PICTURE_TYPE_SI:
1758  av_log(s->avctx,AV_LOG_DEBUG,"SI\n");
1759  break;
1760  case AV_PICTURE_TYPE_SP:
1761  av_log(s->avctx,AV_LOG_DEBUG,"SP\n");
1762  break;
1763  }
1764  for (y = 0; y < s->mb_height; y++) {
1765  for (x = 0; x < s->mb_width; x++) {
1766  if (s->avctx->debug & FF_DEBUG_SKIP) {
1767  int count = s->mbskip_table[x + y * s->mb_stride];
1768  if (count > 9)
1769  count = 9;
1770  av_log(s->avctx, AV_LOG_DEBUG, "%1d", count);
1771  }
1772  if (s->avctx->debug & FF_DEBUG_QP) {
1773  av_log(s->avctx, AV_LOG_DEBUG, "%2d",
1774  pict->qscale_table[x + y * s->mb_stride]);
1775  }
1776  if (s->avctx->debug & FF_DEBUG_MB_TYPE) {
1777  int mb_type = pict->mb_type[x + y * s->mb_stride];
1778  // Type & MV direction
1779  if (IS_PCM(mb_type))
1780  av_log(s->avctx, AV_LOG_DEBUG, "P");
1781  else if (IS_INTRA(mb_type) && IS_ACPRED(mb_type))
1782  av_log(s->avctx, AV_LOG_DEBUG, "A");
1783  else if (IS_INTRA4x4(mb_type))
1784  av_log(s->avctx, AV_LOG_DEBUG, "i");
1785  else if (IS_INTRA16x16(mb_type))
1786  av_log(s->avctx, AV_LOG_DEBUG, "I");
1787  else if (IS_DIRECT(mb_type) && IS_SKIP(mb_type))
1788  av_log(s->avctx, AV_LOG_DEBUG, "d");
1789  else if (IS_DIRECT(mb_type))
1790  av_log(s->avctx, AV_LOG_DEBUG, "D");
1791  else if (IS_GMC(mb_type) && IS_SKIP(mb_type))
1792  av_log(s->avctx, AV_LOG_DEBUG, "g");
1793  else if (IS_GMC(mb_type))
1794  av_log(s->avctx, AV_LOG_DEBUG, "G");
1795  else if (IS_SKIP(mb_type))
1796  av_log(s->avctx, AV_LOG_DEBUG, "S");
1797  else if (!USES_LIST(mb_type, 1))
1798  av_log(s->avctx, AV_LOG_DEBUG, ">");
1799  else if (!USES_LIST(mb_type, 0))
1800  av_log(s->avctx, AV_LOG_DEBUG, "<");
1801  else {
1802  assert(USES_LIST(mb_type, 0) && USES_LIST(mb_type, 1));
1803  av_log(s->avctx, AV_LOG_DEBUG, "X");
1804  }
1805 
1806  // segmentation
1807  if (IS_8X8(mb_type))
1808  av_log(s->avctx, AV_LOG_DEBUG, "+");
1809  else if (IS_16X8(mb_type))
1810  av_log(s->avctx, AV_LOG_DEBUG, "-");
1811  else if (IS_8X16(mb_type))
1812  av_log(s->avctx, AV_LOG_DEBUG, "|");
1813  else if (IS_INTRA(mb_type) || IS_16X16(mb_type))
1814  av_log(s->avctx, AV_LOG_DEBUG, " ");
1815  else
1816  av_log(s->avctx, AV_LOG_DEBUG, "?");
1817 
1818 
1819  if (IS_INTERLACED(mb_type))
1820  av_log(s->avctx, AV_LOG_DEBUG, "=");
1821  else
1822  av_log(s->avctx, AV_LOG_DEBUG, " ");
1823  }
1824  }
1825  av_log(s->avctx, AV_LOG_DEBUG, "\n");
1826  }
1827  }
1828 
1829  if ((s->avctx->debug & (FF_DEBUG_VIS_QP | FF_DEBUG_VIS_MB_TYPE)) ||
1830  (s->avctx->debug_mv)) {
1831  const int shift = 1 + s->quarter_sample;
1832  int mb_y;
1833  uint8_t *ptr;
1834  int i;
1835  int h_chroma_shift, v_chroma_shift, block_height;
1836  const int width = s->avctx->width;
1837  const int height = s->avctx->height;
1838  const int mv_sample_log2 = 4 - pict->motion_subsample_log2;
1839  const int mv_stride = (s->mb_width << mv_sample_log2) +
1840  (s->codec_id == AV_CODEC_ID_H264 ? 0 : 1);
1841  s->low_delay = 0; // needed to see the vectors without trashing the buffers
1842 
1844  &h_chroma_shift, &v_chroma_shift);
1845  for (i = 0; i < 3; i++) {
1846  memcpy(s->visualization_buffer[i], pict->data[i],
1847  (i == 0) ? pict->linesize[i] * height:
1848  pict->linesize[i] * height >> v_chroma_shift);
1849  pict->data[i] = s->visualization_buffer[i];
1850  }
1851  pict->type = FF_BUFFER_TYPE_COPY;
1852  ptr = pict->data[0];
1853  block_height = 16 >> v_chroma_shift;
1854 
1855  for (mb_y = 0; mb_y < s->mb_height; mb_y++) {
1856  int mb_x;
1857  for (mb_x = 0; mb_x < s->mb_width; mb_x++) {
1858  const int mb_index = mb_x + mb_y * s->mb_stride;
1859  if ((s->avctx->debug_mv) && pict->motion_val) {
1860  int type;
1861  for (type = 0; type < 3; type++) {
1862  int direction = 0;
1863  switch (type) {
1864  case 0:
1865  if ((!(s->avctx->debug_mv & FF_DEBUG_VIS_MV_P_FOR)) ||
1866  (pict->pict_type!= AV_PICTURE_TYPE_P))
1867  continue;
1868  direction = 0;
1869  break;
1870  case 1:
1871  if ((!(s->avctx->debug_mv & FF_DEBUG_VIS_MV_B_FOR)) ||
1872  (pict->pict_type!= AV_PICTURE_TYPE_B))
1873  continue;
1874  direction = 0;
1875  break;
1876  case 2:
1877  if ((!(s->avctx->debug_mv & FF_DEBUG_VIS_MV_B_BACK)) ||
1878  (pict->pict_type!= AV_PICTURE_TYPE_B))
1879  continue;
1880  direction = 1;
1881  break;
1882  }
1883  if (!USES_LIST(pict->mb_type[mb_index], direction))
1884  continue;
1885 
1886  if (IS_8X8(pict->mb_type[mb_index])) {
1887  int i;
1888  for (i = 0; i < 4; i++) {
1889  int sx = mb_x * 16 + 4 + 8 * (i & 1);
1890  int sy = mb_y * 16 + 4 + 8 * (i >> 1);
1891  int xy = (mb_x * 2 + (i & 1) +
1892  (mb_y * 2 + (i >> 1)) * mv_stride) << (mv_sample_log2 - 1);
1893  int mx = (pict->motion_val[direction][xy][0] >> shift) + sx;
1894  int my = (pict->motion_val[direction][xy][1] >> shift) + sy;
1895  draw_arrow(ptr, sx, sy, mx, my, width,
1896  height, s->linesize, 100);
1897  }
1898  } else if (IS_16X8(pict->mb_type[mb_index])) {
1899  int i;
1900  for (i = 0; i < 2; i++) {
1901  int sx = mb_x * 16 + 8;
1902  int sy = mb_y * 16 + 4 + 8 * i;
1903  int xy = (mb_x * 2 + (mb_y * 2 + i) * mv_stride) << (mv_sample_log2 - 1);
1904  int mx = (pict->motion_val[direction][xy][0] >> shift);
1905  int my = (pict->motion_val[direction][xy][1] >> shift);
1906 
1907  if (IS_INTERLACED(pict->mb_type[mb_index]))
1908  my *= 2;
1909 
1910  draw_arrow(ptr, sx, sy, mx + sx, my + sy, width,
1911  height, s->linesize, 100);
1912  }
1913  } else if (IS_8X16(pict->mb_type[mb_index])) {
1914  int i;
1915  for (i = 0; i < 2; i++) {
1916  int sx = mb_x * 16 + 4 + 8 * i;
1917  int sy = mb_y * 16 + 8;
1918  int xy = (mb_x * 2 + i + mb_y * 2 * mv_stride) << (mv_sample_log2 - 1);
1919  int mx = pict->motion_val[direction][xy][0] >> shift;
1920  int my = pict->motion_val[direction][xy][1] >> shift;
1921 
1922  if (IS_INTERLACED(pict->mb_type[mb_index]))
1923  my *= 2;
1924 
1925  draw_arrow(ptr, sx, sy, mx + sx, my + sy, width,
1926  height, s->linesize, 100);
1927  }
1928  } else {
1929  int sx = mb_x * 16 + 8;
1930  int sy = mb_y * 16 + 8;
1931  int xy = (mb_x + mb_y * mv_stride) << mv_sample_log2;
1932  int mx = pict->motion_val[direction][xy][0] >> shift + sx;
1933  int my = pict->motion_val[direction][xy][1] >> shift + sy;
1934  draw_arrow(ptr, sx, sy, mx, my, width, height, s->linesize, 100);
1935  }
1936  }
1937  }
1938  if ((s->avctx->debug & FF_DEBUG_VIS_QP) && pict->motion_val) {
1939  uint64_t c = (pict->qscale_table[mb_index] * 128 / 31) *
1940  0x0101010101010101ULL;
1941  int y;
1942  for (y = 0; y < block_height; y++) {
1943  *(uint64_t *)(pict->data[1] + 8 * mb_x +
1944  (block_height * mb_y + y) *
1945  pict->linesize[1]) = c;
1946  *(uint64_t *)(pict->data[2] + 8 * mb_x +
1947  (block_height * mb_y + y) *
1948  pict->linesize[2]) = c;
1949  }
1950  }
1951  if ((s->avctx->debug & FF_DEBUG_VIS_MB_TYPE) &&
1952  pict->motion_val) {
1953  int mb_type = pict->mb_type[mb_index];
1954  uint64_t u,v;
1955  int y;
1956 #define COLOR(theta, r) \
1957  u = (int)(128 + r * cos(theta * 3.141592 / 180)); \
1958  v = (int)(128 + r * sin(theta * 3.141592 / 180));
1959 
1960 
1961  u = v = 128;
1962  if (IS_PCM(mb_type)) {
1963  COLOR(120, 48)
1964  } else if ((IS_INTRA(mb_type) && IS_ACPRED(mb_type)) ||
1965  IS_INTRA16x16(mb_type)) {
1966  COLOR(30, 48)
1967  } else if (IS_INTRA4x4(mb_type)) {
1968  COLOR(90, 48)
1969  } else if (IS_DIRECT(mb_type) && IS_SKIP(mb_type)) {
1970  // COLOR(120, 48)
1971  } else if (IS_DIRECT(mb_type)) {
1972  COLOR(150, 48)
1973  } else if (IS_GMC(mb_type) && IS_SKIP(mb_type)) {
1974  COLOR(170, 48)
1975  } else if (IS_GMC(mb_type)) {
1976  COLOR(190, 48)
1977  } else if (IS_SKIP(mb_type)) {
1978  // COLOR(180, 48)
1979  } else if (!USES_LIST(mb_type, 1)) {
1980  COLOR(240, 48)
1981  } else if (!USES_LIST(mb_type, 0)) {
1982  COLOR(0, 48)
1983  } else {
1984  assert(USES_LIST(mb_type, 0) && USES_LIST(mb_type, 1));
1985  COLOR(300,48)
1986  }
1987 
1988  u *= 0x0101010101010101ULL;
1989  v *= 0x0101010101010101ULL;
1990  for (y = 0; y < block_height; y++) {
1991  *(uint64_t *)(pict->data[1] + 8 * mb_x +
1992  (block_height * mb_y + y) * pict->linesize[1]) = u;
1993  *(uint64_t *)(pict->data[2] + 8 * mb_x +
1994  (block_height * mb_y + y) * pict->linesize[2]) = v;
1995  }
1996 
1997  // segmentation
1998  if (IS_8X8(mb_type) || IS_16X8(mb_type)) {
1999  *(uint64_t *)(pict->data[0] + 16 * mb_x + 0 +
2000  (16 * mb_y + 8) * pict->linesize[0]) ^= 0x8080808080808080ULL;
2001  *(uint64_t *)(pict->data[0] + 16 * mb_x + 8 +
2002  (16 * mb_y + 8) * pict->linesize[0]) ^= 0x8080808080808080ULL;
2003  }
2004  if (IS_8X8(mb_type) || IS_8X16(mb_type)) {
2005  for (y = 0; y < 16; y++)
2006  pict->data[0][16 * mb_x + 8 + (16 * mb_y + y) *
2007  pict->linesize[0]] ^= 0x80;
2008  }
2009  if (IS_8X8(mb_type) && mv_sample_log2 >= 2) {
2010  int dm = 1 << (mv_sample_log2 - 2);
2011  for (i = 0; i < 4; i++) {
2012  int sx = mb_x * 16 + 8 * (i & 1);
2013  int sy = mb_y * 16 + 8 * (i >> 1);
2014  int xy = (mb_x * 2 + (i & 1) +
2015  (mb_y * 2 + (i >> 1)) * mv_stride) << (mv_sample_log2 - 1);
2016  // FIXME bidir
2017  int32_t *mv = (int32_t *) &pict->motion_val[0][xy];
2018  if (mv[0] != mv[dm] ||
2019  mv[dm * mv_stride] != mv[dm * (mv_stride + 1)])
2020  for (y = 0; y < 8; y++)
2021  pict->data[0][sx + 4 + (sy + y) * pict->linesize[0]] ^= 0x80;
2022  if (mv[0] != mv[dm * mv_stride] || mv[dm] != mv[dm * (mv_stride + 1)])
2023  *(uint64_t *)(pict->data[0] + sx + (sy + 4) *
2024  pict->linesize[0]) ^= 0x8080808080808080ULL;
2025  }
2026  }
2027 
2028  if (IS_INTERLACED(mb_type) &&
2029  s->codec_id == AV_CODEC_ID_H264) {
2030  // hmm
2031  }
2032  }
2033  s->mbskip_table[mb_index] = 0;
2034  }
2035  }
2036  }
2037 }
2038 
2043 {
2044  int my_max = INT_MIN, my_min = INT_MAX, qpel_shift = !s->quarter_sample;
2045  int my, off, i, mvs;
2046 
2047  if (s->picture_structure != PICT_FRAME || s->mcsel)
2048  goto unhandled;
2049 
2050  switch (s->mv_type) {
2051  case MV_TYPE_16X16:
2052  mvs = 1;
2053  break;
2054  case MV_TYPE_16X8:
2055  mvs = 2;
2056  break;
2057  case MV_TYPE_8X8:
2058  mvs = 4;
2059  break;
2060  default:
2061  goto unhandled;
2062  }
2063 
2064  for (i = 0; i < mvs; i++) {
2065  my = s->mv[dir][i][1]<<qpel_shift;
2066  my_max = FFMAX(my_max, my);
2067  my_min = FFMIN(my_min, my);
2068  }
2069 
2070  off = (FFMAX(-my_min, my_max) + 63) >> 6;
2071 
2072  return FFMIN(FFMAX(s->mb_y + off, 0), s->mb_height-1);
2073 unhandled:
2074  return s->mb_height-1;
2075 }
2076 
2077 /* put block[] to dest[] */
2078 static inline void put_dct(MpegEncContext *s,
2079  DCTELEM *block, int i, uint8_t *dest, int line_size, int qscale)
2080 {
2081  s->dct_unquantize_intra(s, block, i, qscale);
2082  s->dsp.idct_put (dest, line_size, block);
2083 }
2084 
2085 /* add block[] to dest[] */
2086 static inline void add_dct(MpegEncContext *s,
2087  DCTELEM *block, int i, uint8_t *dest, int line_size)
2088 {
2089  if (s->block_last_index[i] >= 0) {
2090  s->dsp.idct_add (dest, line_size, block);
2091  }
2092 }
2093 
2094 static inline void add_dequant_dct(MpegEncContext *s,
2095  DCTELEM *block, int i, uint8_t *dest, int line_size, int qscale)
2096 {
2097  if (s->block_last_index[i] >= 0) {
2098  s->dct_unquantize_inter(s, block, i, qscale);
2099 
2100  s->dsp.idct_add (dest, line_size, block);
2101  }
2102 }
2103 
2108 {
2109  int wrap = s->b8_stride;
2110  int xy = s->block_index[0];
2111 
2112  s->dc_val[0][xy ] =
2113  s->dc_val[0][xy + 1 ] =
2114  s->dc_val[0][xy + wrap] =
2115  s->dc_val[0][xy + 1 + wrap] = 1024;
2116  /* ac pred */
2117  memset(s->ac_val[0][xy ], 0, 32 * sizeof(int16_t));
2118  memset(s->ac_val[0][xy + wrap], 0, 32 * sizeof(int16_t));
2119  if (s->msmpeg4_version>=3) {
2120  s->coded_block[xy ] =
2121  s->coded_block[xy + 1 ] =
2122  s->coded_block[xy + wrap] =
2123  s->coded_block[xy + 1 + wrap] = 0;
2124  }
2125  /* chroma */
2126  wrap = s->mb_stride;
2127  xy = s->mb_x + s->mb_y * wrap;
2128  s->dc_val[1][xy] =
2129  s->dc_val[2][xy] = 1024;
2130  /* ac pred */
2131  memset(s->ac_val[1][xy], 0, 16 * sizeof(int16_t));
2132  memset(s->ac_val[2][xy], 0, 16 * sizeof(int16_t));
2133 
2134  s->mbintra_table[xy]= 0;
2135 }
2136 
2137 /* generic function called after a macroblock has been parsed by the
2138  decoder or after it has been encoded by the encoder.
2139 
2140  Important variables used:
2141  s->mb_intra : true if intra macroblock
2142  s->mv_dir : motion vector direction
2143  s->mv_type : motion vector type
2144  s->mv : motion vector
2145  s->interlaced_dct : true if interlaced dct used (mpeg2)
2146  */
2147 static av_always_inline
2149  int is_mpeg12)
2150 {
2151  const int mb_xy = s->mb_y * s->mb_stride + s->mb_x;
2153  ff_xvmc_decode_mb(s);//xvmc uses pblocks
2154  return;
2155  }
2156 
2157  if(s->avctx->debug&FF_DEBUG_DCT_COEFF) {
2158  /* save DCT coefficients */
2159  int i,j;
2160  DCTELEM *dct = &s->current_picture.f.dct_coeff[mb_xy * 64 * 6];
2161  av_log(s->avctx, AV_LOG_DEBUG, "DCT coeffs of MB at %dx%d:\n", s->mb_x, s->mb_y);
2162  for(i=0; i<6; i++){
2163  for(j=0; j<64; j++){
2164  *dct++ = block[i][s->dsp.idct_permutation[j]];
2165  av_log(s->avctx, AV_LOG_DEBUG, "%5d", dct[-1]);
2166  }
2167  av_log(s->avctx, AV_LOG_DEBUG, "\n");
2168  }
2169  }
2170 
2171  s->current_picture.f.qscale_table[mb_xy] = s->qscale;
2172 
2173  /* update DC predictors for P macroblocks */
2174  if (!s->mb_intra) {
2175  if (!is_mpeg12 && (s->h263_pred || s->h263_aic)) {
2176  if(s->mbintra_table[mb_xy])
2178  } else {
2179  s->last_dc[0] =
2180  s->last_dc[1] =
2181  s->last_dc[2] = 128 << s->intra_dc_precision;
2182  }
2183  }
2184  else if (!is_mpeg12 && (s->h263_pred || s->h263_aic))
2185  s->mbintra_table[mb_xy]=1;
2186 
2187  if ((s->flags&CODEC_FLAG_PSNR) || !(s->encoding && (s->intra_only || s->pict_type==AV_PICTURE_TYPE_B) && s->avctx->mb_decision != FF_MB_DECISION_RD)) { //FIXME precalc
2188  uint8_t *dest_y, *dest_cb, *dest_cr;
2189  int dct_linesize, dct_offset;
2190  op_pixels_func (*op_pix)[4];
2191  qpel_mc_func (*op_qpix)[16];
2192  const int linesize = s->current_picture.f.linesize[0]; //not s->linesize as this would be wrong for field pics
2193  const int uvlinesize = s->current_picture.f.linesize[1];
2194  const int readable= s->pict_type != AV_PICTURE_TYPE_B || s->encoding || s->avctx->draw_horiz_band;
2195  const int block_size = 8;
2196 
2197  /* avoid copy if macroblock skipped in last frame too */
2198  /* skip only during decoding as we might trash the buffers during encoding a bit */
2199  if(!s->encoding){
2200  uint8_t *mbskip_ptr = &s->mbskip_table[mb_xy];
2201 
2202  if (s->mb_skipped) {
2203  s->mb_skipped= 0;
2204  assert(s->pict_type!=AV_PICTURE_TYPE_I);
2205  *mbskip_ptr = 1;
2206  } else if(!s->current_picture.f.reference) {
2207  *mbskip_ptr = 1;
2208  } else{
2209  *mbskip_ptr = 0; /* not skipped */
2210  }
2211  }
2212 
2213  dct_linesize = linesize << s->interlaced_dct;
2214  dct_offset = s->interlaced_dct ? linesize : linesize * block_size;
2215 
2216  if(readable){
2217  dest_y= s->dest[0];
2218  dest_cb= s->dest[1];
2219  dest_cr= s->dest[2];
2220  }else{
2221  dest_y = s->b_scratchpad;
2222  dest_cb= s->b_scratchpad+16*linesize;
2223  dest_cr= s->b_scratchpad+32*linesize;
2224  }
2225 
2226  if (!s->mb_intra) {
2227  /* motion handling */
2228  /* decoding or more than one mb_type (MC was already done otherwise) */
2229  if(!s->encoding){
2230 
2232  if (s->mv_dir & MV_DIR_FORWARD) {
2235  0);
2236  }
2237  if (s->mv_dir & MV_DIR_BACKWARD) {
2240  0);
2241  }
2242  }
2243 
2244  op_qpix= s->me.qpel_put;
2245  if ((!s->no_rounding) || s->pict_type==AV_PICTURE_TYPE_B){
2246  op_pix = s->dsp.put_pixels_tab;
2247  }else{
2248  op_pix = s->dsp.put_no_rnd_pixels_tab;
2249  }
2250  if (s->mv_dir & MV_DIR_FORWARD) {
2251  ff_MPV_motion(s, dest_y, dest_cb, dest_cr, 0, s->last_picture.f.data, op_pix, op_qpix);
2252  op_pix = s->dsp.avg_pixels_tab;
2253  op_qpix= s->me.qpel_avg;
2254  }
2255  if (s->mv_dir & MV_DIR_BACKWARD) {
2256  ff_MPV_motion(s, dest_y, dest_cb, dest_cr, 1, s->next_picture.f.data, op_pix, op_qpix);
2257  }
2258  }
2259 
2260  /* skip dequant / idct if we are really late ;) */
2261  if(s->avctx->skip_idct){
2264  || s->avctx->skip_idct >= AVDISCARD_ALL)
2265  goto skip_idct;
2266  }
2267 
2268  /* add dct residue */
2270  || (s->codec_id==AV_CODEC_ID_MPEG4 && !s->mpeg_quant))){
2271  add_dequant_dct(s, block[0], 0, dest_y , dct_linesize, s->qscale);
2272  add_dequant_dct(s, block[1], 1, dest_y + block_size, dct_linesize, s->qscale);
2273  add_dequant_dct(s, block[2], 2, dest_y + dct_offset , dct_linesize, s->qscale);
2274  add_dequant_dct(s, block[3], 3, dest_y + dct_offset + block_size, dct_linesize, s->qscale);
2275 
2276  if(!CONFIG_GRAY || !(s->flags&CODEC_FLAG_GRAY)){
2277  if (s->chroma_y_shift){
2278  add_dequant_dct(s, block[4], 4, dest_cb, uvlinesize, s->chroma_qscale);
2279  add_dequant_dct(s, block[5], 5, dest_cr, uvlinesize, s->chroma_qscale);
2280  }else{
2281  dct_linesize >>= 1;
2282  dct_offset >>=1;
2283  add_dequant_dct(s, block[4], 4, dest_cb, dct_linesize, s->chroma_qscale);
2284  add_dequant_dct(s, block[5], 5, dest_cr, dct_linesize, s->chroma_qscale);
2285  add_dequant_dct(s, block[6], 6, dest_cb + dct_offset, dct_linesize, s->chroma_qscale);
2286  add_dequant_dct(s, block[7], 7, dest_cr + dct_offset, dct_linesize, s->chroma_qscale);
2287  }
2288  }
2289  } else if(is_mpeg12 || (s->codec_id != AV_CODEC_ID_WMV2)){
2290  add_dct(s, block[0], 0, dest_y , dct_linesize);
2291  add_dct(s, block[1], 1, dest_y + block_size, dct_linesize);
2292  add_dct(s, block[2], 2, dest_y + dct_offset , dct_linesize);
2293  add_dct(s, block[3], 3, dest_y + dct_offset + block_size, dct_linesize);
2294 
2295  if(!CONFIG_GRAY || !(s->flags&CODEC_FLAG_GRAY)){
2296  if(s->chroma_y_shift){//Chroma420
2297  add_dct(s, block[4], 4, dest_cb, uvlinesize);
2298  add_dct(s, block[5], 5, dest_cr, uvlinesize);
2299  }else{
2300  //chroma422
2301  dct_linesize = uvlinesize << s->interlaced_dct;
2302  dct_offset = s->interlaced_dct ? uvlinesize : uvlinesize * 8;
2303 
2304  add_dct(s, block[4], 4, dest_cb, dct_linesize);
2305  add_dct(s, block[5], 5, dest_cr, dct_linesize);
2306  add_dct(s, block[6], 6, dest_cb+dct_offset, dct_linesize);
2307  add_dct(s, block[7], 7, dest_cr+dct_offset, dct_linesize);
2308  if(!s->chroma_x_shift){//Chroma444
2309  add_dct(s, block[8], 8, dest_cb+8, dct_linesize);
2310  add_dct(s, block[9], 9, dest_cr+8, dct_linesize);
2311  add_dct(s, block[10], 10, dest_cb+8+dct_offset, dct_linesize);
2312  add_dct(s, block[11], 11, dest_cr+8+dct_offset, dct_linesize);
2313  }
2314  }
2315  }//fi gray
2316  }
2318  ff_wmv2_add_mb(s, block, dest_y, dest_cb, dest_cr);
2319  }
2320  } else {
2321  /* dct only in intra block */
2323  put_dct(s, block[0], 0, dest_y , dct_linesize, s->qscale);
2324  put_dct(s, block[1], 1, dest_y + block_size, dct_linesize, s->qscale);
2325  put_dct(s, block[2], 2, dest_y + dct_offset , dct_linesize, s->qscale);
2326  put_dct(s, block[3], 3, dest_y + dct_offset + block_size, dct_linesize, s->qscale);
2327 
2328  if(!CONFIG_GRAY || !(s->flags&CODEC_FLAG_GRAY)){
2329  if(s->chroma_y_shift){
2330  put_dct(s, block[4], 4, dest_cb, uvlinesize, s->chroma_qscale);
2331  put_dct(s, block[5], 5, dest_cr, uvlinesize, s->chroma_qscale);
2332  }else{
2333  dct_offset >>=1;
2334  dct_linesize >>=1;
2335  put_dct(s, block[4], 4, dest_cb, dct_linesize, s->chroma_qscale);
2336  put_dct(s, block[5], 5, dest_cr, dct_linesize, s->chroma_qscale);
2337  put_dct(s, block[6], 6, dest_cb + dct_offset, dct_linesize, s->chroma_qscale);
2338  put_dct(s, block[7], 7, dest_cr + dct_offset, dct_linesize, s->chroma_qscale);
2339  }
2340  }
2341  }else{
2342  s->dsp.idct_put(dest_y , dct_linesize, block[0]);
2343  s->dsp.idct_put(dest_y + block_size, dct_linesize, block[1]);
2344  s->dsp.idct_put(dest_y + dct_offset , dct_linesize, block[2]);
2345  s->dsp.idct_put(dest_y + dct_offset + block_size, dct_linesize, block[3]);
2346 
2347  if(!CONFIG_GRAY || !(s->flags&CODEC_FLAG_GRAY)){
2348  if(s->chroma_y_shift){
2349  s->dsp.idct_put(dest_cb, uvlinesize, block[4]);
2350  s->dsp.idct_put(dest_cr, uvlinesize, block[5]);
2351  }else{
2352 
2353  dct_linesize = uvlinesize << s->interlaced_dct;
2354  dct_offset = s->interlaced_dct ? uvlinesize : uvlinesize * 8;
2355 
2356  s->dsp.idct_put(dest_cb, dct_linesize, block[4]);
2357  s->dsp.idct_put(dest_cr, dct_linesize, block[5]);
2358  s->dsp.idct_put(dest_cb + dct_offset, dct_linesize, block[6]);
2359  s->dsp.idct_put(dest_cr + dct_offset, dct_linesize, block[7]);
2360  if(!s->chroma_x_shift){//Chroma444
2361  s->dsp.idct_put(dest_cb + 8, dct_linesize, block[8]);
2362  s->dsp.idct_put(dest_cr + 8, dct_linesize, block[9]);
2363  s->dsp.idct_put(dest_cb + 8 + dct_offset, dct_linesize, block[10]);
2364  s->dsp.idct_put(dest_cr + 8 + dct_offset, dct_linesize, block[11]);
2365  }
2366  }
2367  }//gray
2368  }
2369  }
2370 skip_idct:
2371  if(!readable){
2372  s->dsp.put_pixels_tab[0][0](s->dest[0], dest_y , linesize,16);
2373  s->dsp.put_pixels_tab[s->chroma_x_shift][0](s->dest[1], dest_cb, uvlinesize,16 >> s->chroma_y_shift);
2374  s->dsp.put_pixels_tab[s->chroma_x_shift][0](s->dest[2], dest_cr, uvlinesize,16 >> s->chroma_y_shift);
2375  }
2376  }
2377 }
2378 
2380 #if !CONFIG_SMALL
2381  if(s->out_format == FMT_MPEG1) {
2382  MPV_decode_mb_internal(s, block, 1);
2383  } else
2384 #endif
2385  MPV_decode_mb_internal(s, block, 0);
2386 }
2387 
2391 void ff_draw_horiz_band(MpegEncContext *s, int y, int h){
2392  const int field_pic= s->picture_structure != PICT_FRAME;
2393  if(field_pic){
2394  h <<= 1;
2395  y <<= 1;
2396  }
2397 
2398  if (!s->avctx->hwaccel
2400  && s->unrestricted_mv
2402  && !s->intra_only
2403  && !(s->flags&CODEC_FLAG_EMU_EDGE)) {
2405  int sides = 0, edge_h;
2406  int hshift = desc->log2_chroma_w;
2407  int vshift = desc->log2_chroma_h;
2408  if (y==0) sides |= EDGE_TOP;
2409  if (y + h >= s->v_edge_pos) sides |= EDGE_BOTTOM;
2410 
2411  edge_h= FFMIN(h, s->v_edge_pos - y);
2412 
2413  s->dsp.draw_edges(s->current_picture_ptr->f.data[0] + y *s->linesize,
2414  s->linesize, s->h_edge_pos, edge_h,
2415  EDGE_WIDTH, EDGE_WIDTH, sides);
2416  s->dsp.draw_edges(s->current_picture_ptr->f.data[1] + (y>>vshift)*s->uvlinesize,
2417  s->uvlinesize, s->h_edge_pos>>hshift, edge_h>>vshift,
2418  EDGE_WIDTH>>hshift, EDGE_WIDTH>>vshift, sides);
2419  s->dsp.draw_edges(s->current_picture_ptr->f.data[2] + (y>>vshift)*s->uvlinesize,
2420  s->uvlinesize, s->h_edge_pos>>hshift, edge_h>>vshift,
2421  EDGE_WIDTH>>hshift, EDGE_WIDTH>>vshift, sides);
2422  }
2423 
2424  h= FFMIN(h, s->avctx->height - y);
2425 
2426  if(field_pic && s->first_field && !(s->avctx->slice_flags&SLICE_FLAG_ALLOW_FIELD)) return;
2427 
2428  if (s->avctx->draw_horiz_band) {
2429  AVFrame *src;
2430  int offset[AV_NUM_DATA_POINTERS];
2431  int i;
2432 
2434  src = &s->current_picture_ptr->f;
2435  else if(s->last_picture_ptr)
2436  src = &s->last_picture_ptr->f;
2437  else
2438  return;
2439 
2441  for (i = 0; i < AV_NUM_DATA_POINTERS; i++)
2442  offset[i] = 0;
2443  }else{
2444  offset[0]= y * s->linesize;
2445  offset[1]=
2446  offset[2]= (y >> s->chroma_y_shift) * s->uvlinesize;
2447  for (i = 3; i < AV_NUM_DATA_POINTERS; i++)
2448  offset[i] = 0;
2449  }
2450 
2451  emms_c();
2452 
2453  s->avctx->draw_horiz_band(s->avctx, src, offset,
2454  y, s->picture_structure, h);
2455  }
2456 }
2457 
2458 void ff_init_block_index(MpegEncContext *s){ //FIXME maybe rename
2459  const int linesize = s->current_picture.f.linesize[0]; //not s->linesize as this would be wrong for field pics
2460  const int uvlinesize = s->current_picture.f.linesize[1];
2461  const int mb_size= 4;
2462 
2463  s->block_index[0]= s->b8_stride*(s->mb_y*2 ) - 2 + s->mb_x*2;
2464  s->block_index[1]= s->b8_stride*(s->mb_y*2 ) - 1 + s->mb_x*2;
2465  s->block_index[2]= s->b8_stride*(s->mb_y*2 + 1) - 2 + s->mb_x*2;
2466  s->block_index[3]= s->b8_stride*(s->mb_y*2 + 1) - 1 + s->mb_x*2;
2467  s->block_index[4]= s->mb_stride*(s->mb_y + 1) + s->b8_stride*s->mb_height*2 + s->mb_x - 1;
2468  s->block_index[5]= s->mb_stride*(s->mb_y + s->mb_height + 2) + s->b8_stride*s->mb_height*2 + s->mb_x - 1;
2469  //block_index is not used by mpeg2, so it is not affected by chroma_format
2470 
2471  s->dest[0] = s->current_picture.f.data[0] + ((s->mb_x - 1) << mb_size);
2472  s->dest[1] = s->current_picture.f.data[1] + ((s->mb_x - 1) << (mb_size - s->chroma_x_shift));
2473  s->dest[2] = s->current_picture.f.data[2] + ((s->mb_x - 1) << (mb_size - s->chroma_x_shift));
2474 
2476  {
2477  if(s->picture_structure==PICT_FRAME){
2478  s->dest[0] += s->mb_y * linesize << mb_size;
2479  s->dest[1] += s->mb_y * uvlinesize << (mb_size - s->chroma_y_shift);
2480  s->dest[2] += s->mb_y * uvlinesize << (mb_size - s->chroma_y_shift);
2481  }else{
2482  s->dest[0] += (s->mb_y>>1) * linesize << mb_size;
2483  s->dest[1] += (s->mb_y>>1) * uvlinesize << (mb_size - s->chroma_y_shift);
2484  s->dest[2] += (s->mb_y>>1) * uvlinesize << (mb_size - s->chroma_y_shift);
2485  assert((s->mb_y&1) == (s->picture_structure == PICT_BOTTOM_FIELD));
2486  }
2487  }
2488 }
2489 
2491  int i;
2492  MpegEncContext *s = avctx->priv_data;
2493 
2494  if(s==NULL || s->picture==NULL)
2495  return;
2496 
2497  for(i=0; i<s->picture_count; i++){
2498  if (s->picture[i].f.data[0] &&
2499  (s->picture[i].f.type == FF_BUFFER_TYPE_INTERNAL ||
2500  s->picture[i].f.type == FF_BUFFER_TYPE_USER))
2501  free_frame_buffer(s, &s->picture[i]);
2502  }
2504 
2505  s->mb_x= s->mb_y= 0;
2506 
2507  s->parse_context.state= -1;
2509  s->parse_context.overread= 0;
2511  s->parse_context.index= 0;
2512  s->parse_context.last_index= 0;
2513  s->bitstream_buffer_size=0;
2514  s->pp_time=0;
2515 }
2516 
2518  DCTELEM *block, int n, int qscale)
2519 {
2520  int i, level, nCoeffs;
2521  const uint16_t *quant_matrix;
2522 
2523  nCoeffs= s->block_last_index[n];
2524 
2525  if (n < 4)
2526  block[0] = block[0] * s->y_dc_scale;
2527  else
2528  block[0] = block[0] * s->c_dc_scale;
2529  /* XXX: only mpeg1 */
2530  quant_matrix = s->intra_matrix;
2531  for(i=1;i<=nCoeffs;i++) {
2532  int j= s->intra_scantable.permutated[i];
2533  level = block[j];
2534  if (level) {
2535  if (level < 0) {
2536  level = -level;
2537  level = (int)(level * qscale * quant_matrix[j]) >> 3;
2538  level = (level - 1) | 1;
2539  level = -level;
2540  } else {
2541  level = (int)(level * qscale * quant_matrix[j]) >> 3;
2542  level = (level - 1) | 1;
2543  }
2544  block[j] = level;
2545  }
2546  }
2547 }
2548 
2550  DCTELEM *block, int n, int qscale)
2551 {
2552  int i, level, nCoeffs;
2553  const uint16_t *quant_matrix;
2554 
2555  nCoeffs= s->block_last_index[n];
2556 
2557  quant_matrix = s->inter_matrix;
2558  for(i=0; i<=nCoeffs; i++) {
2559  int j= s->intra_scantable.permutated[i];
2560  level = block[j];
2561  if (level) {
2562  if (level < 0) {
2563  level = -level;
2564  level = (((level << 1) + 1) * qscale *
2565  ((int) (quant_matrix[j]))) >> 4;
2566  level = (level - 1) | 1;
2567  level = -level;
2568  } else {
2569  level = (((level << 1) + 1) * qscale *
2570  ((int) (quant_matrix[j]))) >> 4;
2571  level = (level - 1) | 1;
2572  }
2573  block[j] = level;
2574  }
2575  }
2576 }
2577 
2579  DCTELEM *block, int n, int qscale)
2580 {
2581  int i, level, nCoeffs;
2582  const uint16_t *quant_matrix;
2583 
2584  if(s->alternate_scan) nCoeffs= 63;
2585  else nCoeffs= s->block_last_index[n];
2586 
2587  if (n < 4)
2588  block[0] = block[0] * s->y_dc_scale;
2589  else
2590  block[0] = block[0] * s->c_dc_scale;
2591  quant_matrix = s->intra_matrix;
2592  for(i=1;i<=nCoeffs;i++) {
2593  int j= s->intra_scantable.permutated[i];
2594  level = block[j];
2595  if (level) {
2596  if (level < 0) {
2597  level = -level;
2598  level = (int)(level * qscale * quant_matrix[j]) >> 3;
2599  level = -level;
2600  } else {
2601  level = (int)(level * qscale * quant_matrix[j]) >> 3;
2602  }
2603  block[j] = level;
2604  }
2605  }
2606 }
2607 
2609  DCTELEM *block, int n, int qscale)
2610 {
2611  int i, level, nCoeffs;
2612  const uint16_t *quant_matrix;
2613  int sum=-1;
2614 
2615  if(s->alternate_scan) nCoeffs= 63;
2616  else nCoeffs= s->block_last_index[n];
2617 
2618  if (n < 4)
2619  block[0] = block[0] * s->y_dc_scale;
2620  else
2621  block[0] = block[0] * s->c_dc_scale;
2622  quant_matrix = s->intra_matrix;
2623  for(i=1;i<=nCoeffs;i++) {
2624  int j= s->intra_scantable.permutated[i];
2625  level = block[j];
2626  if (level) {
2627  if (level < 0) {
2628  level = -level;
2629  level = (int)(level * qscale * quant_matrix[j]) >> 3;
2630  level = -level;
2631  } else {
2632  level = (int)(level * qscale * quant_matrix[j]) >> 3;
2633  }
2634  block[j] = level;
2635  sum+=level;
2636  }
2637  }
2638  block[63]^=sum&1;
2639 }
2640 
2642  DCTELEM *block, int n, int qscale)
2643 {
2644  int i, level, nCoeffs;
2645  const uint16_t *quant_matrix;
2646  int sum=-1;
2647 
2648  if(s->alternate_scan) nCoeffs= 63;
2649  else nCoeffs= s->block_last_index[n];
2650 
2651  quant_matrix = s->inter_matrix;
2652  for(i=0; i<=nCoeffs; i++) {
2653  int j= s->intra_scantable.permutated[i];
2654  level = block[j];
2655  if (level) {
2656  if (level < 0) {
2657  level = -level;
2658  level = (((level << 1) + 1) * qscale *
2659  ((int) (quant_matrix[j]))) >> 4;
2660  level = -level;
2661  } else {
2662  level = (((level << 1) + 1) * qscale *
2663  ((int) (quant_matrix[j]))) >> 4;
2664  }
2665  block[j] = level;
2666  sum+=level;
2667  }
2668  }
2669  block[63]^=sum&1;
2670 }
2671 
2673  DCTELEM *block, int n, int qscale)
2674 {
2675  int i, level, qmul, qadd;
2676  int nCoeffs;
2677 
2678  assert(s->block_last_index[n]>=0);
2679 
2680  qmul = qscale << 1;
2681 
2682  if (!s->h263_aic) {
2683  if (n < 4)
2684  block[0] = block[0] * s->y_dc_scale;
2685  else
2686  block[0] = block[0] * s->c_dc_scale;
2687  qadd = (qscale - 1) | 1;
2688  }else{
2689  qadd = 0;
2690  }
2691  if(s->ac_pred)
2692  nCoeffs=63;
2693  else
2694  nCoeffs= s->inter_scantable.raster_end[ s->block_last_index[n] ];
2695 
2696  for(i=1; i<=nCoeffs; i++) {
2697  level = block[i];
2698  if (level) {
2699  if (level < 0) {
2700  level = level * qmul - qadd;
2701  } else {
2702  level = level * qmul + qadd;
2703  }
2704  block[i] = level;
2705  }
2706  }
2707 }
2708 
2710  DCTELEM *block, int n, int qscale)
2711 {
2712  int i, level, qmul, qadd;
2713  int nCoeffs;
2714 
2715  assert(s->block_last_index[n]>=0);
2716 
2717  qadd = (qscale - 1) | 1;
2718  qmul = qscale << 1;
2719 
2720  nCoeffs= s->inter_scantable.raster_end[ s->block_last_index[n] ];
2721 
2722  for(i=0; i<=nCoeffs; i++) {
2723  level = block[i];
2724  if (level) {
2725  if (level < 0) {
2726  level = level * qmul - qadd;
2727  } else {
2728  level = level * qmul + qadd;
2729  }
2730  block[i] = level;
2731  }
2732  }
2733 }
2734 
2738 void ff_set_qscale(MpegEncContext * s, int qscale)
2739 {
2740  if (qscale < 1)
2741  qscale = 1;
2742  else if (qscale > 31)
2743  qscale = 31;
2744 
2745  s->qscale = qscale;
2746  s->chroma_qscale= s->chroma_qscale_table[qscale];
2747 
2748  s->y_dc_scale= s->y_dc_scale_table[ qscale ];
2750 }
2751 
2753 {
2756 }
int bitstream_buffer_size
Definition: mpegvideo.h:589
uint8_t * scratchpad
data area for the ME algo, so that the ME does not need to malloc/free
Definition: mpegvideo.h:160
#define PICT_BOTTOM_FIELD
Definition: mpegvideo.h:640
enum AVPixelFormat ff_hwaccel_pixfmt_list_420[]
Definition: mpegvideo.c:133
int last
number of values for last = 0
Definition: rl.h:40
const struct AVCodec * codec
Definition: avcodec.h:1348
int16_t(* b_bidir_back_mv_table_base)[2]
Definition: mpegvideo.h:369
void * av_malloc(size_t size)
Allocate a block of size bytes with alignment suitable for all memory accesses (including vectors if ...
Definition: mem.c:61
int table_size
Definition: get_bits.h:66
#define PICT_TOP_FIELD
Definition: mpegvideo.h:639
discard all frames except keyframes
Definition: avcodec.h:535
void ff_init_block_index(MpegEncContext *s)
Definition: mpegvideo.c:2458
op_pixels_func put_pixels_tab[4][4]
Halfpel motion compensation with rounding (a+b+1)>>1.
Definition: dsputil.h:259
unsigned int stream_codec_tag
fourcc from the AVI stream header (LSB first, so "ABCD" -> ('D'<<24) + ('C'<<16) + ('B'<<8) + ...
Definition: avcodec.h:1373
av_cold void ff_dsputil_init(DSPContext *c, AVCodecContext *avctx)
Definition: dsputil.c:2656
int picture_number
Definition: mpegvideo.h:245
#define MAX_PICTURE_COUNT
Definition: mpegvideo.h:63
ScanTable intra_v_scantable
Definition: mpegvideo.h:268
#define HAVE_THREADS
Definition: config.h:235
S(GMC)-VOP MPEG4.
Definition: avutil.h:248
const uint8_t ff_zigzag_direct[64]
Definition: dsputil.c:59
void ff_release_unused_pictures(MpegEncContext *s, int remove_current)
Definition: mpegvideo.c:1283
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:1435
int time_increment_bits
number of bits to represent the fractional part of time
Definition: mpegvideo.h:533
This structure describes decoded (raw) audio or video data.
Definition: avcodec.h:989
#define IS_SKIP(a)
Definition: mpegvideo.h:110
int qstride
QP store stride.
Definition: avcodec.h:1145
AVPanScan * pan_scan
Pan scan.
Definition: avcodec.h:1260
int ff_alloc_picture(MpegEncContext *s, Picture *pic, int shared)
Allocate a Picture.
Definition: mpegvideo.c:332
int16_t(* p_mv_table)[2]
MV table (1MV per MB) p-frame encoding.
Definition: mpegvideo.h:373
uint8_t * rd_scratchpad
scratchpad for rate distortion mb decision
Definition: mpegvideo.h:338
static void dct_unquantize_mpeg2_intra_c(MpegEncContext *s, DCTELEM *block, int n, int qscale)
Definition: mpegvideo.c:2578
int start_mb_y
start mb_y of this thread (so current thread should process start_mb_y <= row < end_mb_y) ...
Definition: mpegvideo.h:286
const uint8_t * y_dc_scale_table
qscale -> y_dc_scale table
Definition: mpegvideo.h:324
uint8_t * mb_mean
Table for MB luminance.
Definition: mpegvideo.h:145
int coded_width
Bitstream width / height, may be different from width/height.
Definition: avcodec.h:1515
av_cold int ff_dct_common_init(MpegEncContext *s)
Definition: mpegvideo.c:181
void(* dct_unquantize_mpeg2_inter)(struct MpegEncContext *s, DCTELEM *block, int n, int qscale)
Definition: mpegvideo.h:688
av_cold int ff_MPV_common_init(MpegEncContext *s)
init common structure for both encoder and decoder.
Definition: mpegvideo.c:882
void ff_MPV_common_init_arm(MpegEncContext *s)
Definition: mpegvideo_arm.c:42
misc image utilities
void ff_MPV_report_decode_progress(MpegEncContext *s)
Definition: mpegvideo.c:2752
uint8_t * coded_block_base
Definition: mpegvideo.h:327
#define EDGE_TOP
Definition: dsputil.h:441
AVFrame * coded_frame
the picture in the bitstream
Definition: avcodec.h:2725
int end_mb_y
end mb_y of this thread (so current thread should process start_mb_y <= row < end_mb_y) ...
Definition: mpegvideo.h:287
uint16_t * mb_var
Table for MB variances.
Definition: mpegvideo.h:143
static void free_frame_buffer(MpegEncContext *s, Picture *pic)
Release a frame buffer.
Definition: mpegvideo.c:232
void ff_MPV_motion(MpegEncContext *s, uint8_t *dest_y, uint8_t *dest_cb, uint8_t *dest_cr, int dir, uint8_t **ref_picture, op_pixels_func(*pix_op)[4], qpel_mc_func(*qpix_op)[16])
int16_t(*[3] ac_val)[16]
used for for mpeg4 AC prediction, all 3 arrays must be continuous
Definition: mpegvideo.h:330
MJPEG encoder.
void(* idct_add)(uint8_t *dest, int line_size, DCTELEM *block)
block -> idct -> add dest -> clip to unsigned 8 bit -> dest.
Definition: dsputil.h:411
void * hwaccel_picture_private
hardware accelerator private data (Libav-allocated)
Definition: avcodec.h:1280
int v_edge_pos
horizontal / vertical position of the right/bottom edge (pixel replication)
Definition: mpegvideo.h:251
int msmpeg4_version
0=not msmpeg4, 1=mp41, 2=mp42, 3=mp43/divx3 4=wmv1/7 5=wmv2/8
Definition: mpegvideo.h:616
void(* draw_edges)(uint8_t *buf, int wrap, int width, int height, int w, int h, int sides)
Definition: dsputil.h:439
static const uint8_t mpeg2_dc_scale_table3[128]
Definition: mpegvideo.c:109
void ff_xvmc_field_end(MpegEncContext *s)
Complete frame/field rendering by passing any remaining blocks.
int needs_realloc
Picture needs to be reallocated (eg due to a frame size change)
Definition: mpegvideo.h:149
uint8_t * bitstream_buffer
Definition: mpegvideo.h:588
enum AVCodecID codec_id
Definition: mpegvideo.h:227
void(* dct_unquantize_h263_inter)(struct MpegEncContext *s, DCTELEM *block, int n, int qscale)
Definition: mpegvideo.h:692
void ff_init_rl(RLTable *rl, uint8_t static_store[2][2 *MAX_RUN+MAX_LEVEL+3])
Definition: mpegvideo.c:1191
int16_t(*[2] motion_val_base)[2]
Definition: mpegvideo.h:102
HW decoding through VA API, Picture.data[3] contains a vaapi_render_state struct which contains the b...
Definition: pixfmt.h:123
int field_picture
whether or not the picture was encoded in separate fields
Definition: mpegvideo.h:139
void(* dct_unquantize_mpeg2_intra)(struct MpegEncContext *s, DCTELEM *block, int n, int qscale)
Definition: mpegvideo.h:686
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:1533
int16_t(*[2][2] p_field_mv_table)[2]
MV table (2MV per MB) interlaced p-frame encoding.
Definition: mpegvideo.h:379
int picture_range_end
the part of picture that this context can allocate in
Definition: mpegvideo.h:319
static void dct_unquantize_h263_inter_c(MpegEncContext *s, DCTELEM *block, int n, int qscale)
Definition: mpegvideo.c:2709
int16_t(* p_mv_table_base)[2]
Definition: mpegvideo.h:365
uint8_t raster_end[64]
Definition: dsputil.h:184
#define wrap(func)
Definition: w64xmmtest.h:70
uint32_t * score_map
map to store the scores
Definition: mpegvideo.h:166
mpegvideo header.
av_dlog(ac->avr,"%d samples - audio_convert: %s to %s (%s)\n", len, av_get_sample_fmt_name(ac->in_fmt), av_get_sample_fmt_name(ac->out_fmt), use_generic?ac->func_descr_generic:ac->func_descr)
discard all
Definition: avcodec.h:536
uint8_t permutated[64]
Definition: dsputil.h:183
#define IS_INTRA4x4(a)
Definition: mpegvideo.h:105
const int8_t * table_level
Definition: rl.h:43
uint8_t run
Definition: svq3.c:124
static void free_duplicate_context(MpegEncContext *s)
Definition: mpegvideo.c:500
int bits_per_raw_sample
Bits per sample/pixel of internal libavcodec pixel/sample format.
Definition: avcodec.h:2711
int mb_num
number of MBs of a picture
Definition: mpegvideo.h:252
int stride
Definition: mace.c:144
int frame_start_found
Definition: parser.h:34
int ff_xvmc_field_start(MpegEncContext *s, AVCodecContext *avctx)
Find and store the surfaces that are used as reference frames.
static void free_picture(MpegEncContext *s, Picture *pic)
Deallocate a picture.
Definition: mpegvideo.c:421
int qscale
QP.
Definition: mpegvideo.h:342
RLTable.
Definition: rl.h:38
int h263_aic
Advanded INTRA Coding (AIC)
Definition: mpegvideo.h:262
int16_t(* b_back_mv_table)[2]
MV table (1MV per MB) backward mode b-frame encoding.
Definition: mpegvideo.h:375
uint8_t log2_chroma_w
Amount to shift the luma width right to find the chroma width.
Definition: pixdesc.h:66
int chroma_x_shift
Definition: mpegvideo.h:656
int encoding
true if we are encoding (vs decoding)
Definition: mpegvideo.h:229
void ff_MPV_common_init_bfin(MpegEncContext *s)
int block_wrap[6]
Definition: mpegvideo.h:434
static void dct_unquantize_mpeg1_intra_c(MpegEncContext *s, DCTELEM *block, int n, int qscale)
Definition: mpegvideo.c:2517
int16_t(* b_back_mv_table_base)[2]
Definition: mpegvideo.h:367
static void backup_duplicate_context(MpegEncContext *bak, MpegEncContext *src)
Definition: mpegvideo.c:520
#define USES_LIST(a, list)
does this mb use listX, note does not work if subMBs
Definition: mpegvideo.h:126
void ff_clean_intra_table_entries(MpegEncContext *s)
Clean dc, ac, coded_block for the current non-intra MB.
Definition: mpegvideo.c:2107
int picture_range_start
Definition: mpegvideo.h:319
#define COLOR(theta, r)
void av_freep(void *arg)
Free a memory block which has been allocated with av_malloc(z)() or av_realloc() and set the pointer ...
Definition: mem.c:151
#define CONFIG_GRAY
Definition: config.h:276
Switching Intra.
Definition: avutil.h:249
#define MAX_THREADS
Definition: mpegvideo.h:61
uint8_t * visualization_buffer[3]
temporary buffer vor MV visualization
Definition: mpegvideo.h:320
struct AVHWAccel * hwaccel
Hardware accelerator in use.
Definition: avcodec.h:2622
#define CONFIG_WMV2_DECODER
Definition: config.h:524
int picture_in_gop_number
0-> first pic in gop, ...
Definition: mpegvideo.h:246
void av_fast_malloc(void *ptr, unsigned int *size, size_t min_size)
Allocate a buffer, reusing the given one if large enough.
Definition: utils.c:72
int8_t * max_run[2]
encoding & decoding
Definition: rl.h:46
void ff_MPV_common_init_altivec(MpegEncContext *s)
int context_reinit
Definition: mpegvideo.h:717
const uint8_t ff_alternate_vertical_scan[64]
Definition: dsputil.c:97
int16_t * dc_val_base
Definition: mpegvideo.h:322
#define CONFIG_WMV2_ENCODER
Definition: config.h:884
int ff_MPV_common_frame_size_change(MpegEncContext *s)
Definition: mpegvideo.c:1069
uint8_t
DCTELEM(*[12] pblocks)[64]
Definition: mpegvideo.h:672
#define IS_8X16(a)
Definition: mpegvideo.h:117
Picture ** input_picture
next pictures on display order for encoding
Definition: mpegvideo.h:256
#define PICT_FRAME
Definition: mpegvideo.h:641
enum OutputFormat out_format
output format
Definition: mpegvideo.h:219
void(* qpel_mc_func)(uint8_t *dst, uint8_t *src, int stride)
Definition: dsputil.h:144
uint16_t(* dct_offset)[64]
Definition: mpegvideo.h:469
#define AV_RB32
Definition: intreadwrite.h:130
uint8_t * pred_dir_table
used to store pred_dir for partitioned decoding
Definition: mpegvideo.h:336
uint8_t * er_temp_buffer
Definition: mpegvideo.h:710
qpel_mc_func(* qpel_put)[16]
Definition: mpegvideo.h:198
static void dct_unquantize_h263_intra_c(MpegEncContext *s, DCTELEM *block, int n, int qscale)
Definition: mpegvideo.c:2672
#define emms_c()
Definition: internal.h:145
uint8_t motion_subsample_log2
log2 of the size of the block which a single vector in motion_val represents: (4->16x16, 3->8x8, 2-> 4x4, 1-> 2x2)
Definition: avcodec.h:1302
#define IS_GMC(a)
Definition: mpegvideo.h:114
int no_rounding
apply no rounding to motion compensation (MPEG4, msmpeg4, ...) for b-frames rounding mode is always 0...
Definition: mpegvideo.h:407
int interlaced_dct
Definition: mpegvideo.h:661
Picture current_picture
copy of the current picture structure.
Definition: mpegvideo.h:313
int intra_dc_precision
Definition: mpegvideo.h:643
static int pic_is_unused(MpegEncContext *s, Picture *pic)
Definition: mpegvideo.c:1298
op_pixels_func avg_pixels_tab[4][4]
Halfpel motion compensation with rounding (a+b+1)>>1.
Definition: dsputil.h:271
int16_t(* b_bidir_forw_mv_table)[2]
MV table (1MV per MB) bidir mode b-frame encoding.
Definition: mpegvideo.h:376
void(* dct_unquantize_intra)(struct MpegEncContext *s, DCTELEM *block, int n, int qscale)
Definition: mpegvideo.h:698
void(* dct_unquantize_inter)(struct MpegEncContext *s, DCTELEM *block, int n, int qscale)
Definition: mpegvideo.h:700
float * cplx_tab
Definition: mpegvideo.h:713
int8_t * max_level[2]
encoding & decoding
Definition: rl.h:45
uint16_t pp_time
time distance between the last 2 p,s,i frames
Definition: mpegvideo.h:538
uint8_t idct_permutation[64]
idct input permutation.
Definition: dsputil.h:425
uint8_t * b_scratchpad
scratchpad used for writing into write only buffers
Definition: mpegvideo.h:340
int flags2
AVCodecContext.flags2.
Definition: mpegvideo.h:231
int interlaced_frame
The content of the picture is interlaced.
Definition: avcodec.h:1232
int mb_height
number of MBs horizontally & vertically
Definition: mpegvideo.h:247
enum AVPixelFormat ff_pixfmt_list_420[]
Definition: mpegvideo.c:128
void ff_MPV_frame_end(MpegEncContext *s)
Definition: mpegvideo.c:1573
int codec_tag
internal codec_tag upper case converted from avctx codec_tag
Definition: mpegvideo.h:237
char * stats_out
pass1 encoding statistics output buffer
Definition: avcodec.h:2502
void(* dct_unquantize_mpeg1_intra)(struct MpegEncContext *s, DCTELEM *block, int n, int qscale)
Definition: mpegvideo.h:682
int16_t(*[2][2] p_field_mv_table_base)[2]
Definition: mpegvideo.h:371
static int free_context_frame(MpegEncContext *s)
Frees and resets MpegEncContext fields depending on the resolution.
Definition: mpegvideo.c:1016
static void update_noise_reduction(MpegEncContext *s)
Definition: mpegvideo.c:1345
#define MAX_LEVEL
Definition: rl.h:35
#define IS_INTERLACED(a)
Definition: mpegvideo.h:112
void ff_set_qscale(MpegEncContext *s, int qscale)
set qscale and update qscale dependent variables.
Definition: mpegvideo.c:2738
int(* q_inter_matrix)[64]
Definition: mpegvideo.h:461
#define r
Definition: input.c:51
void ff_xvmc_decode_mb(MpegEncContext *s)
Synthesize the data needed by XvMC to render one macroblock of data.
uint8_t * error_status_table
table of the error status of each MB
Definition: mpegvideo.h:493
int(* q_intra_matrix)[64]
precomputed matrix (combine qscale and DCT renorm)
Definition: mpegvideo.h:460
int intra_only
if true, only intra pictures are generated
Definition: mpegvideo.h:217
int16_t * dc_val[3]
used for mpeg4 DC prediction, all 3 arrays must be continuous
Definition: mpegvideo.h:323
int h263_plus
h263 plus headers
Definition: mpegvideo.h:224
int slice_context_count
number of used thread_contexts
Definition: mpegvideo.h:289
int last_non_b_pict_type
used for mpeg4 gmc b-frames & ratecontrol
Definition: mpegvideo.h:351
unsigned int buffer_size
Definition: parser.h:32
int stream_codec_tag
internal stream_codec_tag upper case converted from avctx stream_codec_tag
Definition: mpegvideo.h:238
int last_dc[3]
last DC values for MPEG1
Definition: mpegvideo.h:321
uint8_t log2_chroma_h
Amount to shift the luma height right to find the chroma height.
Definition: pixdesc.h:75
Multithreading support functions.
int mb_skipped
MUST BE SET only during DECODING.
Definition: mpegvideo.h:331
int reference
is this picture used as reference The values for this are the same as the MpegEncContext.picture_structure variable, that is 1->top field, 2->bottom field, 3->frame/both fields.
Definition: avcodec.h:1132
int chroma_y_shift
Definition: mpegvideo.h:657
static int find_unused_picture(MpegEncContext *s, int shared)
Definition: mpegvideo.c:1308
int partitioned_frame
is current frame partitioned
Definition: mpegvideo.h:567
int is_copy
Whether the parent AVCodecContext is a copy of the context which had init() called on it...
Definition: internal.h:63
short * dct_coeff
DCT coefficients.
Definition: avcodec.h:1187
const uint8_t ff_alternate_horizontal_scan[64]
Definition: dsputil.c:86
void(* idct_put)(uint8_t *dest, int line_size, DCTELEM *block)
block -> idct -> clip to unsigned 8 bit -> dest.
Definition: dsputil.h:405
int av_pix_fmt_get_chroma_sub_sample(enum AVPixelFormat pix_fmt, int *h_shift, int *v_shift)
Utility function to access log2_chroma_w log2_chroma_h from the pixel format AVPixFmtDescriptor.
Definition: pixdesc.c:1460
int unrestricted_mv
mv can point outside of the coded picture
Definition: mpegvideo.h:358
int active_thread_type
Which multithreading methods are in use by the codec.
Definition: avcodec.h:2752
int last_lambda_for[5]
last lambda for a specific pict type
Definition: mpegvideo.h:354
int capabilities
Codec capabilities.
Definition: avcodec.h:2979
uint8_t * edge_emu_buffer
temporary buffer for if MVs point to out-of-frame data
Definition: mpegvideo.h:337
uint8_t * base[AV_NUM_DATA_POINTERS]
pointer to the first allocated byte of the picture.
Definition: avcodec.h:1073
#define REBASE_PICTURE(pic, new_ctx, old_ctx)
Definition: mpegvideo.h:720
int flags
CODEC_FLAG_*.
Definition: avcodec.h:1434
static enum AVDiscard skip_idct
Definition: avplay.c:258
void ff_videodsp_init(VideoDSPContext *ctx, int bpc)
Definition: videodsp.c:36
int overread_index
the index into ParseContext.buffer of the overread bytes
Definition: parser.h:36
void av_log(void *avcl, int level, const char *fmt,...)
Definition: log.c:146
int quarter_sample
1->qpel, 0->half pel ME/MC
Definition: mpegvideo.h:557
uint16_t * mb_type
Table for candidate MB types for encoding.
Definition: mpegvideo.h:414
#define IS_INTRA(a)
Definition: mpegvideo.h:108
static void draw_line(uint8_t *buf, int sx, int sy, int ex, int ey, int w, int h, int stride, int color)
Draw a line from (ex, ey) -> (sx, sy).
Definition: mpegvideo.c:1651
int low_delay
no reordering needed / has no b-frames
Definition: mpegvideo.h:570
op_pixels_func put_no_rnd_pixels_tab[4][4]
Halfpel motion compensation with no rounding (a+b)>>1.
Definition: dsputil.h:283
static void dct_unquantize_mpeg2_intra_bitexact(MpegEncContext *s, DCTELEM *block, int n, int qscale)
Definition: mpegvideo.c:2608
VLC vlc
decoding only deprecated FIXME remove
Definition: rl.h:47
uint8_t *[2][2] b_field_select_table
Definition: mpegvideo.h:382
int error_occurred
Definition: mpegvideo.h:492
int8_t len
Definition: get_bits.h:71
int priv_data_size
Size of HW accelerator private data.
Definition: avcodec.h:3139
int off
Definition: dsputil_bfin.c:28
DCTELEM(* blocks)[8][64]
Definition: mpegvideo.h:675
int picture_count
number of allocated pictures (MAX_PICTURE_COUNT * avctx->thread_count)
Definition: mpegvideo.h:318
static const uint8_t ff_default_chroma_qscale_table[32]
Definition: mpegvideo.c:67
void ff_mpeg_flush(AVCodecContext *avctx)
Definition: mpegvideo.c:2490
int coded_picture_number
used to set pic->coded_picture_number, should not be used for/by anything else
Definition: mpegvideo.h:244
int * lambda_table
Definition: mpegvideo.h:346
int n
number of entries of table_vlc minus 1
Definition: rl.h:39
#define IS_8X8(a)
Definition: mpegvideo.h:118
int av_image_check_size(unsigned int w, unsigned int h, int log_offset, void *log_ctx)
Check if the given dimension of an image is valid, meaning that all bytes of the image can be address...
Definition: imgutils.c:220
int err_recognition
Definition: mpegvideo.h:510
void(* dct_unquantize_mpeg1_inter)(struct MpegEncContext *s, DCTELEM *block, int n, int qscale)
Definition: mpegvideo.h:684
void(* draw_horiz_band)(struct AVCodecContext *s, const AVFrame *src, int offset[AV_NUM_DATA_POINTERS], int y, int type, int height)
If non NULL, 'draw_horiz_band' is called by the libavcodec decoder to draw a horizontal band...
Definition: avcodec.h:1567
int progressive_frame
Definition: mpegvideo.h:659
static DCTELEM block[64]
Definition: dct-test.c:169
enum AVPictureType pict_type
Picture type of the frame, see ?_TYPE below.
Definition: avcodec.h:1065
int top_field_first
Definition: mpegvideo.h:645
int err_recognition
Error recognition; may misdetect some more or less valid parts as errors.
Definition: avcodec.h:2602
int overread
the number of bytes which where irreversibly read from the next frame
Definition: parser.h:35
uint16_t(* q_inter_matrix16)[2][64]
Definition: mpegvideo.h:464
int last_index
Definition: parser.h:31
int next_p_frame_damaged
set if the next p frame is damaged, to avoid showing trashed b frames
Definition: mpegvideo.h:509
int width
picture width / height.
Definition: avcodec.h:1508
uint8_t * mbskip_table
used to avoid copy if macroblock skipped (for black regions for example) and used for b-frame encodin...
Definition: mpegvideo.h:332
int type
type of the buffer (to keep track of who has to deallocate data[*])
Definition: avcodec.h:1217
Picture * current_picture_ptr
pointer to the current picture
Definition: mpegvideo.h:317
void ff_copy_picture(Picture *dst, Picture *src)
Definition: mpegvideo.c:223
Picture.
Definition: mpegvideo.h:94
int alternate_scan
Definition: mpegvideo.h:649
unsigned int allocated_bitstream_buffer_size
Definition: mpegvideo.h:590
int16_t(* ac_val_base)[16]
Definition: mpegvideo.h:329
int32_t
uint16_t(* q_intra_matrix16)[2][64]
identical to the above but for MMX & these are not permutated, second 64 entries are bias ...
Definition: mpegvideo.h:463
const int8_t * table_run
Definition: rl.h:42
static av_always_inline void MPV_decode_mb_internal(MpegEncContext *s, DCTELEM block[12][64], int is_mpeg12)
Definition: mpegvideo.c:2148
int16_t(*[2][2][2] b_field_mv_table_base)[2]
Definition: mpegvideo.h:372
int quality
quality (between 1 (good) and FF_LAMBDA_MAX (bad))
Definition: avcodec.h:1122
int(* ac_stats)[2][MAX_LEVEL+1][MAX_RUN+1][2]
[mb_intra][isChroma][level][run][last]
Definition: mpegvideo.h:621
int16_t(* b_forw_mv_table_base)[2]
Definition: mpegvideo.h:366
int block_last_index[12]
last non zero coefficient in block
Definition: mpegvideo.h:261
MotionEstContext me
Definition: mpegvideo.h:405
int ff_mpv_frame_size_alloc(MpegEncContext *s, int linesize)
Definition: mpegvideo.c:245
#define EDGE_BOTTOM
Definition: dsputil.h:442
int mb_decision
macroblock decision mode
Definition: avcodec.h:1882
uint8_t * mbintra_table
used to avoid setting {ac, dc, cbp}-pred stuff to zero on inter MB decoding
Definition: mpegvideo.h:334
#define ME_MAP_SIZE
Definition: mpegvideo.h:65
#define DELAYED_PIC_REF
Value of Picture.reference when Picture is not a reference picture, but is held for delayed output...
Definition: mpegvideo.h:87
int ff_MPV_frame_start(MpegEncContext *s, AVCodecContext *avctx)
generic function for encode/decode called after coding/decoding the header and before a frame is code...
Definition: mpegvideo.c:1370
int ff_mpeg_update_thread_context(AVCodecContext *dst, const AVCodecContext *src)
Definition: mpegvideo.c:570
RL_VLC_ELEM * rl_vlc[32]
decoding only
Definition: rl.h:48
preferred ID for MPEG-1/2 video decoding
Definition: avcodec.h:100
int thread_count
thread count is used to decide how many independent tasks should be passed to execute() ...
Definition: avcodec.h:2733
int block_index[6]
index to current MB in block based arrays with edges
Definition: mpegvideo.h:433
#define IS_16X8(a)
Definition: mpegvideo.h:116
int xvmc_acceleration
XVideo Motion Acceleration.
Definition: avcodec.h:1875
int * mb_index2xy
mb_index -> mb_x + mb_y*mb_stride
Definition: mpegvideo.h:437
int first_field
is 1 for the first field of a field picture 0 otherwise
Definition: mpegvideo.h:663
void(* op_pixels_func)(uint8_t *block, const uint8_t *pixels, int line_size, int h)
Definition: dsputil.h:142
static const int8_t mv[256][2]
Definition: 4xm.c:73
uint32_t * mb_type
macroblock type table mb_type_base + mb_width + 2
Definition: avcodec.h:1180
#define MV_TYPE_16X16
1 vector for the whole mb
Definition: mpegvideo.h:389
NULL
Definition: eval.c:52
uint16_t * mc_mb_var
Table for motion compensated MB variances.
Definition: mpegvideo.h:144
#define MV_DIR_BACKWARD
Definition: mpegvideo.h:386
int16_t(* b_bidir_forw_mv_table_base)[2]
Definition: mpegvideo.h:368
const uint8_t *const ff_mpeg2_dc_scale_table[4]
Definition: mpegvideo.c:121
static int width
Definition: utils.c:156
int coded_picture_number
picture number in bitstream order
Definition: avcodec.h:1109
const uint8_t * avpriv_mpv_find_start_code(const uint8_t *restrict p, const uint8_t *end, uint32_t *restrict state)
Definition: mpegvideo.c:147
uint16_t inter_matrix[64]
Definition: mpegvideo.h:442
uint8_t * buffer
Definition: parser.h:29
struct MpegEncContext * thread_context[MAX_THREADS]
Definition: mpegvideo.h:288
external API header
void ff_thread_await_progress(AVFrame *f, int n, int field)
Wait for earlier decoding threads to finish reference pictures.
Definition: pthread.c:684
int8_t * qscale_table_base
Definition: mpegvideo.h:101
static av_const unsigned int ff_sqrt(unsigned int a)
Definition: mathops.h:198
static void dct_unquantize_mpeg2_inter_c(MpegEncContext *s, DCTELEM *block, int n, int qscale)
Definition: mpegvideo.c:2641
enum AVDiscard skip_idct
Definition: avcodec.h:2900
uint32_t * mb_type_base
Definition: mpegvideo.h:103
int linesize[AV_NUM_DATA_POINTERS]
Size, in bytes, of the data for each picture/channel plane.
Definition: avcodec.h:1008
int debug
debug
Definition: avcodec.h:2568
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:55
main external API structure.
Definition: avcodec.h:1339
ScanTable intra_scantable
Definition: mpegvideo.h:266
uint8_t * coded_block
used for coded block pattern prediction (msmpeg4v3, wmv1)
Definition: mpegvideo.h:328
int height
picture size. must be a multiple of 16
Definition: mpegvideo.h:215
unsigned int codec_tag
fourcc (LSB first, so "ABCD" -> ('D'<<24) + ('C'<<16) + ('B'<<8) + 'A').
Definition: avcodec.h:1365
const uint8_t ff_mpeg1_dc_scale_table[128]
Definition: mpegvideo.c:73
int16_t(*[2] motion_val)[2]
motion vector table
Definition: avcodec.h:1172
#define MV_TYPE_16X8
2 vectors, one per 16x8 block
Definition: mpegvideo.h:391
void avcodec_default_free_buffers(AVCodecContext *s)
Definition: utils.c:1754
void avcodec_default_release_buffer(AVCodecContext *s, AVFrame *pic)
Definition: utils.c:487
uint32_t state
contains the last few bytes in MSB order
Definition: parser.h:33
Picture * picture
main picture buffer
Definition: mpegvideo.h:255
int progressive_sequence
Definition: mpegvideo.h:635
int slice_flags
slice flags
Definition: avcodec.h:1865
void avcodec_get_frame_defaults(AVFrame *frame)
Set the fields of the given AVFrame to default values.
Definition: utils.c:602
int coded_height
Definition: avcodec.h:1515
Switching Predicted.
Definition: avutil.h:250
ScanTable intra_h_scantable
Definition: mpegvideo.h:267
int16_t(*[2][2][2] b_field_mv_table)[2]
MV table (4MV per MB) interlaced b-frame encoding.
Definition: mpegvideo.h:380
uint8_t * cbp_table
used to store cbp, ac_pred for partitioned decoding
Definition: mpegvideo.h:335
void ff_draw_horiz_band(MpegEncContext *s, int y, int h)
Definition: mpegvideo.c:2391
unsigned int avpriv_toupper4(unsigned int x)
Definition: utils.c:2095
uint8_t * index_run[2]
encoding only
Definition: rl.h:44
int context_initialized
Definition: mpegvideo.h:242
int input_picture_number
used to set pic->display_picture_number, should not be used for/by anything else
Definition: mpegvideo.h:243
void ff_MPV_common_init_x86(MpegEncContext *s)
Definition: mpegvideo.c:587
int8_t * ref_index[2]
motion reference frame index the order in which these are stored can depend on the codec...
Definition: avcodec.h:1195
DSPContext dsp
pointers for accelerated dsp functions
Definition: mpegvideo.h:361
#define s1
Definition: regdef.h:38
int f_code
forward MV resolution
Definition: mpegvideo.h:363
#define COPY(a)
short DCTELEM
Definition: dsputil.h:39
HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer. ...
Definition: pixfmt.h:132
#define MV_DIR_FORWARD
Definition: mpegvideo.h:385
int max_b_frames
max number of b-frames for encoding
Definition: mpegvideo.h:232
int pict_type
AV_PICTURE_TYPE_I, AV_PICTURE_TYPE_P, AV_PICTURE_TYPE_B, ...
Definition: mpegvideo.h:349
DCTELEM(* block)[64]
points to one of the following blocks
Definition: mpegvideo.h:674
int h263_pred
use mpeg4/h263 ac/dc predictions
Definition: mpegvideo.h:220
int16_t(* b_bidir_back_mv_table)[2]
MV table (1MV per MB) bidir mode b-frame encoding.
Definition: mpegvideo.h:377
static int init_context_frame(MpegEncContext *s)
Initialize and allocates MpegEncContext fields dependent on the resolution.
Definition: mpegvideo.c:726
static uint32_t state
Definition: trasher.c:27
uint8_t *[2] p_field_select_table
Definition: mpegvideo.h:381
int16_t(* b_direct_mv_table)[2]
MV table (1MV per MB) direct mode b-frame encoding.
Definition: mpegvideo.h:378
Pan Scan area.
Definition: avcodec.h:788
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: avcodec.h:997
const uint8_t * c_dc_scale_table
qscale -> c_dc_scale table
Definition: mpegvideo.h:325
static void add_dequant_dct(MpegEncContext *s, DCTELEM *block, int i, uint8_t *dest, int line_size, int qscale)
Definition: mpegvideo.c:2094
int8_t * qscale_table
QP table.
Definition: avcodec.h:1139
uint8_t level
Definition: svq3.c:125
#define IS_INTRA16x16(a)
Definition: mpegvideo.h:106
qpel_mc_func(* qpel_avg)[16]
Definition: mpegvideo.h:199
int mv[2][4][2]
motion vectors for a macroblock first coordinate : 0 = forward 1 = backward second " : depend...
Definition: mpegvideo.h:399
int16_t(* b_forw_mv_table)[2]
MV table (1MV per MB) forward mode b-frame encoding.
Definition: mpegvideo.h:374
int b8_stride
2*mb_width+1 used for some 8x8 block arrays to allow simple addressing
Definition: mpegvideo.h:249
int noise_reduction
noise reduction strength
Definition: avcodec.h:1914
#define IS_ACPRED(a)
Definition: mpegvideo.h:123
struct MpegEncContext * owner2
pointer to the MpegEncContext that allocated this picture
Definition: mpegvideo.h:148
int height
Definition: gxfenc.c:72
MpegEncContext.
Definition: mpegvideo.h:211
uint8_t run
Definition: get_bits.h:72
Picture * next_picture_ptr
pointer to the next picture (for bidir pred)
Definition: mpegvideo.h:316
#define MAX_RUN
Definition: rl.h:34
struct AVCodecContext * avctx
Definition: mpegvideo.h:213
static void draw_arrow(uint8_t *buf, int sx, int sy, int ex, int ey, int w, int h, int stride, int color)
Draw an arrow from (ex, ey) -> (sx, sy).
Definition: mpegvideo.c:1704
hardware decoding through VDA
Definition: pixfmt.h:153
discard all non reference
Definition: avcodec.h:533
int(* dct_error_sum)[64]
Definition: mpegvideo.h:467
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:65
void ff_init_vlc_rl(RLTable *rl)
Definition: mpegvideo.c:1243
common internal api header.
int mb_stride
mb_width+1 used for some arrays to allow simple addressing of left & top MBs without sig11 ...
Definition: mpegvideo.h:248
void ff_MPV_common_defaults(MpegEncContext *s)
Set the given MpegEncContext to common defaults (same for encoding and decoding). ...
Definition: mpegvideo.c:689
#define IS_PCM(a)
Definition: mpegvideo.h:107
uint8_t * dest[3]
Definition: mpegvideo.h:435
#define FF_ALLOC_OR_GOTO(ctx, p, size, label)
Definition: internal.h:60
int last_pict_type
Definition: mpegvideo.h:350
int b4_stride
4*mb_width+1 used for some 4x4 block arrays to allow simple addressing
Definition: mpegvideo.h:250
Picture last_picture
copy of the previous picture structure.
Definition: mpegvideo.h:295
uint8_t * obmc_scratchpad
Definition: mpegvideo.h:339
static int alloc_frame_buffer(MpegEncContext *s, Picture *pic)
Allocate a frame buffer.
Definition: mpegvideo.c:273
Picture * last_picture_ptr
pointer to the previous picture.
Definition: mpegvideo.h:315
Bi-dir predicted.
Definition: avutil.h:247
int index
Definition: parser.h:30
const uint8_t * chroma_qscale_table
qscale -> chroma_qscale (h263)
Definition: mpegvideo.h:326
static const uint8_t color[]
Definition: log.c:52
uint32_t * map
map to avoid duplicate evaluations
Definition: mpegvideo.h:165
int ff_update_duplicate_context(MpegEncContext *dst, MpegEncContext *src)
Definition: mpegvideo.c:547
DSP utils.
int slices
Number of slices.
Definition: avcodec.h:2095
void * priv_data
Definition: avcodec.h:1382
int picture_structure
Definition: mpegvideo.h:637
VideoDSPContext vdsp
Definition: mpegvideo.h:362
void ff_wmv2_add_mb(MpegEncContext *s, DCTELEM block[6][64], uint8_t *dest_y, uint8_t *dest_cb, uint8_t *dest_cr)
Definition: wmv2.c:59
void ff_MPV_decode_mb(MpegEncContext *s, DCTELEM block[12][64])
Definition: mpegvideo.c:2379
int top_field_first
If the content is interlaced, is top field displayed first.
Definition: avcodec.h:1239
#define IS_DIRECT(a)
Definition: mpegvideo.h:113
int len
void ff_MPV_common_end(MpegEncContext *s)
Definition: mpegvideo.c:1141
void ff_print_debug_info(MpegEncContext *s, AVFrame *pict)
Print debugging info for the given picture.
Definition: mpegvideo.c:1735
void ff_init_scantable(uint8_t *permutation, ScanTable *st, const uint8_t *src_scantable)
Definition: dsputil.c:122
struct AVCodecInternal * internal
Private context used for internal data.
Definition: avcodec.h:1390
int ff_MPV_lowest_referenced_row(MpegEncContext *s, int dir)
find the lowest MB row referenced in the MVs
Definition: mpegvideo.c:2042
ParseContext parse_context
Definition: mpegvideo.h:512
VLC_TYPE(* table)[2]
code, bits
Definition: get_bits.h:65
Picture next_picture
copy of the next picture structure.
Definition: mpegvideo.h:301
#define EDGE_WIDTH
Definition: dsputil.h:440
int key_frame
1 -> keyframe, 0-> not
Definition: avcodec.h:1058
static const uint8_t mpeg2_dc_scale_table1[128]
Definition: mpegvideo.c:85
int linesize
line size, in bytes, may be different from width
Definition: mpegvideo.h:253
uint8_t * mbskip_table
mbskip_table[mb]>=1 if MB didn't change stride= mb_width = (width+15)>>4
Definition: avcodec.h:1158
int16_t level
Definition: get_bits.h:70
#define IS_16X16(a)
Definition: mpegvideo.h:115
Picture ** reordered_input_picture
pointer to the next pictures in codedorder for encoding
Definition: mpegvideo.h:257
int flags2
CODEC_FLAG2_*.
Definition: avcodec.h:1441
static const uint8_t mpeg2_dc_scale_table2[128]
Definition: mpegvideo.c:97
int chroma_qscale
chroma QP
Definition: mpegvideo.h:343
struct AVFrame f
Definition: mpegvideo.h:95
static void add_dct(MpegEncContext *s, DCTELEM *block, int i, uint8_t *dest, int line_size)
Definition: mpegvideo.c:2086
static int init_duplicate_context(MpegEncContext *s, MpegEncContext *base)
Definition: mpegvideo.c:455
int flags
AVCodecContext.flags (HQ, MV4, ...)
Definition: mpegvideo.h:230
uint16_t intra_matrix[64]
matrix transmitted in the bitstream
Definition: mpegvideo.h:440
int workaround_bugs
workaround bugs in encoders which cannot be detected automatically
Definition: mpegvideo.h:236
ScanTable inter_scantable
if inter == intra then intra should be used to reduce tha cache usage
Definition: mpegvideo.h:265
static void put_dct(MpegEncContext *s, DCTELEM *block, int i, uint8_t *dest, int line_size, int qscale)
Definition: mpegvideo.c:2078
uint8_t * temp
Definition: mpegvideo.h:163
int avcodec_default_get_buffer(AVCodecContext *s, AVFrame *pic)
Definition: utils.c:451
static void dct_unquantize_mpeg1_inter_c(MpegEncContext *s, DCTELEM *block, int n, int qscale)
Definition: mpegvideo.c:2549
void ff_thread_report_progress(AVFrame *f, int n, int field)
Notify later decoding threads when part of their reference picture is ready.
Definition: pthread.c:666
int ff_thread_get_buffer(AVCodecContext *avctx, AVFrame *f)
Wrapper around get_buffer() for frame-multithreaded codecs.
Definition: pthread.c:921
int debug_mv
debug
Definition: avcodec.h:2592
int ff_find_unused_picture(MpegEncContext *s, int shared)
Definition: mpegvideo.c:1331
#define MV_TYPE_8X8
4 vectors (h263, mpeg4 4MV)
Definition: mpegvideo.h:390
int16_t(* b_direct_mv_table_base)[2]
Definition: mpegvideo.h:370
int b_code
backward MV resolution for B Frames (mpeg4)
Definition: mpegvideo.h:364
#define CONFIG_MPEG_XVMC_DECODER
Definition: config.h:441
uint8_t ** extended_data
pointers to the data planes/channels.
Definition: avcodec.h:1028
void(* dct_unquantize_h263_intra)(struct MpegEncContext *s, DCTELEM *block, int n, int qscale)
Definition: mpegvideo.h:690
float * bits_tab
Definition: mpegvideo.h:713
#define restrict
Definition: config.h:8
int dct_count[2]
Definition: mpegvideo.h:468
int uvlinesize
line size, for chroma in bytes, may be different from width
Definition: mpegvideo.h:254
void ff_MPV_common_init_axp(MpegEncContext *s)
AVPixelFormat
Pixel format.
Definition: pixfmt.h:63
void * av_mallocz(size_t size)
Allocate a block of size bytes with alignment suitable for all memory accesses (including vectors if ...
Definition: mem.c:158
void ff_thread_release_buffer(AVCodecContext *avctx, AVFrame *f)
Wrapper around release_buffer() frame-for multithreaded codecs.
Definition: pthread.c:979
for(j=16;j >0;--j)
#define FF_ALLOCZ_OR_GOTO(ctx, p, size, label)
Definition: internal.h:69
Predicted.
Definition: avutil.h:246
void ff_MPV_decode_defaults(MpegEncContext *s)
Set the given MpegEncContext to defaults for decoding.
Definition: mpegvideo.c:718
if(!(ptr_align%ac->ptr_align)&&samples_align >=aligned_len)