FFmpeg  4.4.4
utvideodec.c
Go to the documentation of this file.
1 /*
2  * Ut Video decoder
3  * Copyright (c) 2011 Konstantin Shishkov
4  *
5  * This file is part of FFmpeg.
6  *
7  * FFmpeg is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * FFmpeg is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with FFmpeg; if not, write to the Free Software
19  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20  */
21 
22 /**
23  * @file
24  * Ut Video decoder
25  */
26 
27 #include <inttypes.h>
28 #include <stdlib.h>
29 
30 #define CACHED_BITSTREAM_READER !ARCH_X86_32
31 #define UNCHECKED_BITSTREAM_READER 1
32 
33 #include "libavutil/intreadwrite.h"
34 #include "libavutil/pixdesc.h"
35 #include "avcodec.h"
36 #include "bswapdsp.h"
37 #include "bytestream.h"
38 #include "get_bits.h"
39 #include "internal.h"
40 #include "thread.h"
41 #include "utvideo.h"
42 
43 typedef struct HuffEntry {
44  uint8_t len;
45  uint16_t sym;
46 } HuffEntry;
47 
48 static int build_huff(UtvideoContext *c, const uint8_t *src, VLC *vlc,
49  int *fsym, unsigned nb_elems)
50 {
51  int i;
52  HuffEntry he[1024];
53  uint8_t bits[1024];
54  uint16_t codes_count[33] = { 0 };
55 
56  *fsym = -1;
57  for (i = 0; i < nb_elems; i++) {
58  if (src[i] == 0) {
59  *fsym = i;
60  return 0;
61  } else if (src[i] == 255) {
62  bits[i] = 0;
63  } else if (src[i] <= 32) {
64  bits[i] = src[i];
65  } else
66  return AVERROR_INVALIDDATA;
67 
68  codes_count[bits[i]]++;
69  }
70  if (codes_count[0] == nb_elems)
71  return AVERROR_INVALIDDATA;
72 
73  /* For Ut Video, longer codes are to the left of the tree and
74  * for codes with the same length the symbol is descending from
75  * left to right. So after the next loop --codes_count[i] will
76  * be the index of the first (lowest) symbol of length i when
77  * indexed by the position in the tree with left nodes being first. */
78  for (int i = 31; i >= 0; i--)
79  codes_count[i] += codes_count[i + 1];
80 
81  for (unsigned i = 0; i < nb_elems; i++)
82  he[--codes_count[bits[i]]] = (HuffEntry) { bits[i], i };
83 
84 #define VLC_BITS 11
85  return ff_init_vlc_from_lengths(vlc, VLC_BITS, codes_count[0],
86  &he[0].len, sizeof(*he),
87  &he[0].sym, sizeof(*he), 2, 0, 0, c->avctx);
88 }
89 
90 static int decode_plane10(UtvideoContext *c, int plane_no,
91  uint16_t *dst, ptrdiff_t stride,
92  int width, int height,
93  const uint8_t *src, const uint8_t *huff,
94  int use_pred)
95 {
96  int i, j, slice, pix, ret;
97  int sstart, send;
98  VLC vlc;
99  GetBitContext gb;
100  int prev, fsym;
101 
102  if ((ret = build_huff(c, huff, &vlc, &fsym, 1024)) < 0) {
103  av_log(c->avctx, AV_LOG_ERROR, "Cannot build Huffman codes\n");
104  return ret;
105  }
106  if (fsym >= 0) { // build_huff reported a symbol to fill slices with
107  send = 0;
108  for (slice = 0; slice < c->slices; slice++) {
109  uint16_t *dest;
110 
111  sstart = send;
112  send = (height * (slice + 1) / c->slices);
113  dest = dst + sstart * stride;
114 
115  prev = 0x200;
116  for (j = sstart; j < send; j++) {
117  for (i = 0; i < width; i++) {
118  pix = fsym;
119  if (use_pred) {
120  prev += pix;
121  prev &= 0x3FF;
122  pix = prev;
123  }
124  dest[i] = pix;
125  }
126  dest += stride;
127  }
128  }
129  return 0;
130  }
131 
132  send = 0;
133  for (slice = 0; slice < c->slices; slice++) {
134  uint16_t *dest;
135  int slice_data_start, slice_data_end, slice_size;
136 
137  sstart = send;
138  send = (height * (slice + 1) / c->slices);
139  dest = dst + sstart * stride;
140 
141  // slice offset and size validation was done earlier
142  slice_data_start = slice ? AV_RL32(src + slice * 4 - 4) : 0;
143  slice_data_end = AV_RL32(src + slice * 4);
144  slice_size = slice_data_end - slice_data_start;
145 
146  if (!slice_size) {
147  av_log(c->avctx, AV_LOG_ERROR, "Plane has more than one symbol "
148  "yet a slice has a length of zero.\n");
149  goto fail;
150  }
151 
152  memset(c->slice_bits + slice_size, 0, AV_INPUT_BUFFER_PADDING_SIZE);
153  c->bdsp.bswap_buf((uint32_t *) c->slice_bits,
154  (uint32_t *)(src + slice_data_start + c->slices * 4),
155  (slice_data_end - slice_data_start + 3) >> 2);
156  init_get_bits(&gb, c->slice_bits, slice_size * 8);
157 
158  prev = 0x200;
159  for (j = sstart; j < send; j++) {
160  for (i = 0; i < width; i++) {
161  pix = get_vlc2(&gb, vlc.table, VLC_BITS, 3);
162  if (pix < 0) {
163  av_log(c->avctx, AV_LOG_ERROR, "Decoding error\n");
164  goto fail;
165  }
166  if (use_pred) {
167  prev += pix;
168  prev &= 0x3FF;
169  pix = prev;
170  }
171  dest[i] = pix;
172  }
173  dest += stride;
174  if (get_bits_left(&gb) < 0) {
175  av_log(c->avctx, AV_LOG_ERROR,
176  "Slice decoding ran out of bits\n");
177  goto fail;
178  }
179  }
180  if (get_bits_left(&gb) > 32)
181  av_log(c->avctx, AV_LOG_WARNING,
182  "%d bits left after decoding slice\n", get_bits_left(&gb));
183  }
184 
185  ff_free_vlc(&vlc);
186 
187  return 0;
188 fail:
189  ff_free_vlc(&vlc);
190  return AVERROR_INVALIDDATA;
191 }
192 
193 static int compute_cmask(int plane_no, int interlaced, enum AVPixelFormat pix_fmt)
194 {
195  const int is_luma = (pix_fmt == AV_PIX_FMT_YUV420P) && !plane_no;
196 
197  if (interlaced)
198  return ~(1 + 2 * is_luma);
199 
200  return ~is_luma;
201 }
202 
203 static int decode_plane(UtvideoContext *c, int plane_no,
204  uint8_t *dst, ptrdiff_t stride,
205  int width, int height,
206  const uint8_t *src, int use_pred)
207 {
208  int i, j, slice, pix;
209  int sstart, send;
210  VLC vlc;
211  GetBitContext gb;
212  int ret, prev, fsym;
213  const int cmask = compute_cmask(plane_no, c->interlaced, c->avctx->pix_fmt);
214 
215  if (c->pack) {
216  send = 0;
217  for (slice = 0; slice < c->slices; slice++) {
218  GetBitContext cbit, pbit;
219  uint8_t *dest, *p;
220 
221  ret = init_get_bits8_le(&cbit, c->control_stream[plane_no][slice], c->control_stream_size[plane_no][slice]);
222  if (ret < 0)
223  return ret;
224 
225  ret = init_get_bits8_le(&pbit, c->packed_stream[plane_no][slice], c->packed_stream_size[plane_no][slice]);
226  if (ret < 0)
227  return ret;
228 
229  sstart = send;
230  send = (height * (slice + 1) / c->slices) & cmask;
231  dest = dst + sstart * stride;
232 
233  if (3 * ((dst + send * stride - dest + 7)/8) > get_bits_left(&cbit))
234  return AVERROR_INVALIDDATA;
235 
236  for (p = dest; p < dst + send * stride; p += 8) {
237  int bits = get_bits_le(&cbit, 3);
238 
239  if (bits == 0) {
240  *(uint64_t *) p = 0;
241  } else {
242  uint32_t sub = 0x80 >> (8 - (bits + 1)), add;
243  int k;
244 
245  if ((bits + 1) * 8 > get_bits_left(&pbit))
246  return AVERROR_INVALIDDATA;
247 
248  for (k = 0; k < 8; k++) {
249 
250  p[k] = get_bits_le(&pbit, bits + 1);
251  add = (~p[k] & sub) << (8 - bits);
252  p[k] -= sub;
253  p[k] += add;
254  }
255  }
256  }
257  }
258 
259  return 0;
260  }
261 
262  if (build_huff(c, src, &vlc, &fsym, 256)) {
263  av_log(c->avctx, AV_LOG_ERROR, "Cannot build Huffman codes\n");
264  return AVERROR_INVALIDDATA;
265  }
266  if (fsym >= 0) { // build_huff reported a symbol to fill slices with
267  send = 0;
268  for (slice = 0; slice < c->slices; slice++) {
269  uint8_t *dest;
270 
271  sstart = send;
272  send = (height * (slice + 1) / c->slices) & cmask;
273  dest = dst + sstart * stride;
274 
275  prev = 0x80;
276  for (j = sstart; j < send; j++) {
277  for (i = 0; i < width; i++) {
278  pix = fsym;
279  if (use_pred) {
280  prev += (unsigned)pix;
281  pix = prev;
282  }
283  dest[i] = pix;
284  }
285  dest += stride;
286  }
287  }
288  return 0;
289  }
290 
291  src += 256;
292 
293  send = 0;
294  for (slice = 0; slice < c->slices; slice++) {
295  uint8_t *dest;
296  int slice_data_start, slice_data_end, slice_size;
297 
298  sstart = send;
299  send = (height * (slice + 1) / c->slices) & cmask;
300  dest = dst + sstart * stride;
301 
302  // slice offset and size validation was done earlier
303  slice_data_start = slice ? AV_RL32(src + slice * 4 - 4) : 0;
304  slice_data_end = AV_RL32(src + slice * 4);
305  slice_size = slice_data_end - slice_data_start;
306 
307  if (!slice_size) {
308  av_log(c->avctx, AV_LOG_ERROR, "Plane has more than one symbol "
309  "yet a slice has a length of zero.\n");
310  goto fail;
311  }
312 
313  memset(c->slice_bits + slice_size, 0, AV_INPUT_BUFFER_PADDING_SIZE);
314  c->bdsp.bswap_buf((uint32_t *) c->slice_bits,
315  (uint32_t *)(src + slice_data_start + c->slices * 4),
316  (slice_data_end - slice_data_start + 3) >> 2);
317  init_get_bits(&gb, c->slice_bits, slice_size * 8);
318 
319  prev = 0x80;
320  for (j = sstart; j < send; j++) {
321  for (i = 0; i < width; i++) {
322  pix = get_vlc2(&gb, vlc.table, VLC_BITS, 3);
323  if (pix < 0) {
324  av_log(c->avctx, AV_LOG_ERROR, "Decoding error\n");
325  goto fail;
326  }
327  if (use_pred) {
328  prev += pix;
329  pix = prev;
330  }
331  dest[i] = pix;
332  }
333  if (get_bits_left(&gb) < 0) {
334  av_log(c->avctx, AV_LOG_ERROR,
335  "Slice decoding ran out of bits\n");
336  goto fail;
337  }
338  dest += stride;
339  }
340  if (get_bits_left(&gb) > 32)
341  av_log(c->avctx, AV_LOG_WARNING,
342  "%d bits left after decoding slice\n", get_bits_left(&gb));
343  }
344 
345  ff_free_vlc(&vlc);
346 
347  return 0;
348 fail:
349  ff_free_vlc(&vlc);
350  return AVERROR_INVALIDDATA;
351 }
352 
353 #undef A
354 #undef B
355 #undef C
356 
358  int width, int height, int slices, int rmode)
359 {
360  int i, j, slice;
361  int A, B, C;
362  uint8_t *bsrc;
363  int slice_start, slice_height;
364  const int cmask = ~rmode;
365 
366  for (slice = 0; slice < slices; slice++) {
367  slice_start = ((slice * height) / slices) & cmask;
368  slice_height = ((((slice + 1) * height) / slices) & cmask) -
369  slice_start;
370 
371  if (!slice_height)
372  continue;
373  bsrc = src + slice_start * stride;
374 
375  // first line - left neighbour prediction
376  bsrc[0] += 0x80;
377  c->llviddsp.add_left_pred(bsrc, bsrc, width, 0);
378  bsrc += stride;
379  if (slice_height <= 1)
380  continue;
381  // second line - first element has top prediction, the rest uses median
382  C = bsrc[-stride];
383  bsrc[0] += C;
384  A = bsrc[0];
385  for (i = 1; i < FFMIN(width, 16); i++) { /* scalar loop (DSP need align 16) */
386  B = bsrc[i - stride];
387  bsrc[i] += mid_pred(A, B, (uint8_t)(A + B - C));
388  C = B;
389  A = bsrc[i];
390  }
391  if (width > 16)
392  c->llviddsp.add_median_pred(bsrc + 16, bsrc - stride + 16,
393  bsrc + 16, width - 16, &A, &B);
394 
395  bsrc += stride;
396  // the rest of lines use continuous median prediction
397  for (j = 2; j < slice_height; j++) {
398  c->llviddsp.add_median_pred(bsrc, bsrc - stride,
399  bsrc, width, &A, &B);
400  bsrc += stride;
401  }
402  }
403 }
404 
405 /* UtVideo interlaced mode treats every two lines as a single one,
406  * so restoring function should take care of possible padding between
407  * two parts of the same "line".
408  */
410  int width, int height, int slices, int rmode)
411 {
412  int i, j, slice;
413  int A, B, C;
414  uint8_t *bsrc;
415  int slice_start, slice_height;
416  const int cmask = ~(rmode ? 3 : 1);
417  const ptrdiff_t stride2 = stride << 1;
418 
419  for (slice = 0; slice < slices; slice++) {
420  slice_start = ((slice * height) / slices) & cmask;
421  slice_height = ((((slice + 1) * height) / slices) & cmask) -
422  slice_start;
423  slice_height >>= 1;
424  if (!slice_height)
425  continue;
426 
427  bsrc = src + slice_start * stride;
428 
429  // first line - left neighbour prediction
430  bsrc[0] += 0x80;
431  A = c->llviddsp.add_left_pred(bsrc, bsrc, width, 0);
432  c->llviddsp.add_left_pred(bsrc + stride, bsrc + stride, width, A);
433  bsrc += stride2;
434  if (slice_height <= 1)
435  continue;
436  // second line - first element has top prediction, the rest uses median
437  C = bsrc[-stride2];
438  bsrc[0] += C;
439  A = bsrc[0];
440  for (i = 1; i < FFMIN(width, 16); i++) { /* scalar loop (DSP need align 16) */
441  B = bsrc[i - stride2];
442  bsrc[i] += mid_pred(A, B, (uint8_t)(A + B - C));
443  C = B;
444  A = bsrc[i];
445  }
446  if (width > 16)
447  c->llviddsp.add_median_pred(bsrc + 16, bsrc - stride2 + 16,
448  bsrc + 16, width - 16, &A, &B);
449 
450  c->llviddsp.add_median_pred(bsrc + stride, bsrc - stride,
451  bsrc + stride, width, &A, &B);
452  bsrc += stride2;
453  // the rest of lines use continuous median prediction
454  for (j = 2; j < slice_height; j++) {
455  c->llviddsp.add_median_pred(bsrc, bsrc - stride2,
456  bsrc, width, &A, &B);
457  c->llviddsp.add_median_pred(bsrc + stride, bsrc - stride,
458  bsrc + stride, width, &A, &B);
459  bsrc += stride2;
460  }
461  }
462 }
463 
465  int width, int height, int slices, int rmode)
466 {
467  int i, j, slice;
468  int A, B, C;
469  uint8_t *bsrc;
470  int slice_start, slice_height;
471  const int cmask = ~rmode;
472  int min_width = FFMIN(width, 32);
473 
474  for (slice = 0; slice < slices; slice++) {
475  slice_start = ((slice * height) / slices) & cmask;
476  slice_height = ((((slice + 1) * height) / slices) & cmask) -
477  slice_start;
478 
479  if (!slice_height)
480  continue;
481  bsrc = src + slice_start * stride;
482 
483  // first line - left neighbour prediction
484  bsrc[0] += 0x80;
485  c->llviddsp.add_left_pred(bsrc, bsrc, width, 0);
486  bsrc += stride;
487  if (slice_height <= 1)
488  continue;
489  for (j = 1; j < slice_height; j++) {
490  // second line - first element has top prediction, the rest uses gradient
491  bsrc[0] = (bsrc[0] + bsrc[-stride]) & 0xFF;
492  for (i = 1; i < min_width; i++) { /* dsp need align 32 */
493  A = bsrc[i - stride];
494  B = bsrc[i - (stride + 1)];
495  C = bsrc[i - 1];
496  bsrc[i] = (A - B + C + bsrc[i]) & 0xFF;
497  }
498  if (width > 32)
499  c->llviddsp.add_gradient_pred(bsrc + 32, stride, width - 32);
500  bsrc += stride;
501  }
502  }
503 }
504 
506  int width, int height, int slices, int rmode)
507 {
508  int i, j, slice;
509  int A, B, C;
510  uint8_t *bsrc;
511  int slice_start, slice_height;
512  const int cmask = ~(rmode ? 3 : 1);
513  const ptrdiff_t stride2 = stride << 1;
514  int min_width = FFMIN(width, 32);
515 
516  for (slice = 0; slice < slices; slice++) {
517  slice_start = ((slice * height) / slices) & cmask;
518  slice_height = ((((slice + 1) * height) / slices) & cmask) -
519  slice_start;
520  slice_height >>= 1;
521  if (!slice_height)
522  continue;
523 
524  bsrc = src + slice_start * stride;
525 
526  // first line - left neighbour prediction
527  bsrc[0] += 0x80;
528  A = c->llviddsp.add_left_pred(bsrc, bsrc, width, 0);
529  c->llviddsp.add_left_pred(bsrc + stride, bsrc + stride, width, A);
530  bsrc += stride2;
531  if (slice_height <= 1)
532  continue;
533  for (j = 1; j < slice_height; j++) {
534  // second line - first element has top prediction, the rest uses gradient
535  bsrc[0] = (bsrc[0] + bsrc[-stride2]) & 0xFF;
536  for (i = 1; i < min_width; i++) { /* dsp need align 32 */
537  A = bsrc[i - stride2];
538  B = bsrc[i - (stride2 + 1)];
539  C = bsrc[i - 1];
540  bsrc[i] = (A - B + C + bsrc[i]) & 0xFF;
541  }
542  if (width > 32)
543  c->llviddsp.add_gradient_pred(bsrc + 32, stride2, width - 32);
544 
545  A = bsrc[-stride];
546  B = bsrc[-(1 + stride + stride - width)];
547  C = bsrc[width - 1];
548  bsrc[stride] = (A - B + C + bsrc[stride]) & 0xFF;
549  for (i = 1; i < width; i++) {
550  A = bsrc[i - stride];
551  B = bsrc[i - (1 + stride)];
552  C = bsrc[i - 1 + stride];
553  bsrc[i + stride] = (A - B + C + bsrc[i + stride]) & 0xFF;
554  }
555  bsrc += stride2;
556  }
557  }
558 }
559 
560 static int decode_frame(AVCodecContext *avctx, void *data, int *got_frame,
561  AVPacket *avpkt)
562 {
563  const uint8_t *buf = avpkt->data;
564  int buf_size = avpkt->size;
565  UtvideoContext *c = avctx->priv_data;
566  int i, j;
567  const uint8_t *plane_start[5];
568  int plane_size, max_slice_size = 0, slice_start, slice_end, slice_size;
569  int ret;
570  GetByteContext gb;
571  ThreadFrame frame = { .f = data };
572 
573  if ((ret = ff_thread_get_buffer(avctx, &frame, 0)) < 0)
574  return ret;
575 
576  /* parse plane structure to get frame flags and validate slice offsets */
577  bytestream2_init(&gb, buf, buf_size);
578 
579  if (c->pack) {
580  const uint8_t *packed_stream;
581  const uint8_t *control_stream;
582  GetByteContext pb;
583  uint32_t nb_cbs;
584  int left;
585 
586  c->frame_info = PRED_GRADIENT << 8;
587 
588  if (bytestream2_get_byte(&gb) != 1)
589  return AVERROR_INVALIDDATA;
590  bytestream2_skip(&gb, 3);
591  c->offset = bytestream2_get_le32(&gb);
592 
593  if (buf_size <= c->offset + 8LL)
594  return AVERROR_INVALIDDATA;
595 
596  bytestream2_init(&pb, buf + 8 + c->offset, buf_size - 8 - c->offset);
597 
598  nb_cbs = bytestream2_get_le32(&pb);
599  if (nb_cbs > c->offset)
600  return AVERROR_INVALIDDATA;
601 
602  packed_stream = buf + 8;
603  control_stream = packed_stream + (c->offset - nb_cbs);
604  left = control_stream - packed_stream;
605 
606  for (i = 0; i < c->planes; i++) {
607  for (j = 0; j < c->slices; j++) {
608  c->packed_stream[i][j] = packed_stream;
609  c->packed_stream_size[i][j] = bytestream2_get_le32(&pb);
610  if (c->packed_stream_size[i][j] > left)
611  return AVERROR_INVALIDDATA;
612  left -= c->packed_stream_size[i][j];
613  packed_stream += c->packed_stream_size[i][j];
614  }
615  }
616 
617  left = buf + buf_size - control_stream;
618 
619  for (i = 0; i < c->planes; i++) {
620  for (j = 0; j < c->slices; j++) {
621  c->control_stream[i][j] = control_stream;
622  c->control_stream_size[i][j] = bytestream2_get_le32(&pb);
623  if (c->control_stream_size[i][j] > left)
624  return AVERROR_INVALIDDATA;
625  left -= c->control_stream_size[i][j];
626  control_stream += c->control_stream_size[i][j];
627  }
628  }
629  } else if (c->pro) {
630  if (bytestream2_get_bytes_left(&gb) < c->frame_info_size) {
631  av_log(avctx, AV_LOG_ERROR, "Not enough data for frame information\n");
632  return AVERROR_INVALIDDATA;
633  }
634  c->frame_info = bytestream2_get_le32u(&gb);
635  c->slices = ((c->frame_info >> 16) & 0xff) + 1;
636  for (i = 0; i < c->planes; i++) {
637  plane_start[i] = gb.buffer;
638  if (bytestream2_get_bytes_left(&gb) < 1024 + 4 * c->slices) {
639  av_log(avctx, AV_LOG_ERROR, "Insufficient data for a plane\n");
640  return AVERROR_INVALIDDATA;
641  }
642  slice_start = 0;
643  slice_end = 0;
644  for (j = 0; j < c->slices; j++) {
645  slice_end = bytestream2_get_le32u(&gb);
646  if (slice_end < 0 || slice_end < slice_start ||
647  bytestream2_get_bytes_left(&gb) < slice_end + 1024LL) {
648  av_log(avctx, AV_LOG_ERROR, "Incorrect slice size\n");
649  return AVERROR_INVALIDDATA;
650  }
651  slice_size = slice_end - slice_start;
652  slice_start = slice_end;
653  max_slice_size = FFMAX(max_slice_size, slice_size);
654  }
655  plane_size = slice_end;
656  bytestream2_skipu(&gb, plane_size);
657  bytestream2_skipu(&gb, 1024);
658  }
659  plane_start[c->planes] = gb.buffer;
660  } else {
661  for (i = 0; i < c->planes; i++) {
662  plane_start[i] = gb.buffer;
663  if (bytestream2_get_bytes_left(&gb) < 256 + 4 * c->slices) {
664  av_log(avctx, AV_LOG_ERROR, "Insufficient data for a plane\n");
665  return AVERROR_INVALIDDATA;
666  }
667  bytestream2_skipu(&gb, 256);
668  slice_start = 0;
669  slice_end = 0;
670  for (j = 0; j < c->slices; j++) {
671  slice_end = bytestream2_get_le32u(&gb);
672  if (slice_end < 0 || slice_end < slice_start ||
674  av_log(avctx, AV_LOG_ERROR, "Incorrect slice size\n");
675  return AVERROR_INVALIDDATA;
676  }
677  slice_size = slice_end - slice_start;
678  slice_start = slice_end;
679  max_slice_size = FFMAX(max_slice_size, slice_size);
680  }
681  plane_size = slice_end;
682  bytestream2_skipu(&gb, plane_size);
683  }
684  plane_start[c->planes] = gb.buffer;
685  if (bytestream2_get_bytes_left(&gb) < c->frame_info_size) {
686  av_log(avctx, AV_LOG_ERROR, "Not enough data for frame information\n");
687  return AVERROR_INVALIDDATA;
688  }
689  c->frame_info = bytestream2_get_le32u(&gb);
690  }
691  av_log(avctx, AV_LOG_DEBUG, "frame information flags %"PRIX32"\n",
692  c->frame_info);
693 
694  c->frame_pred = (c->frame_info >> 8) & 3;
695 
696  max_slice_size += 4*avctx->width;
697 
698  if (!c->pack) {
699  av_fast_malloc(&c->slice_bits, &c->slice_bits_size,
700  max_slice_size + AV_INPUT_BUFFER_PADDING_SIZE);
701 
702  if (!c->slice_bits) {
703  av_log(avctx, AV_LOG_ERROR, "Cannot allocate temporary buffer\n");
704  return AVERROR(ENOMEM);
705  }
706  }
707 
708  switch (c->avctx->pix_fmt) {
709  case AV_PIX_FMT_GBRP:
710  case AV_PIX_FMT_GBRAP:
711  for (i = 0; i < c->planes; i++) {
712  ret = decode_plane(c, i, frame.f->data[i],
713  frame.f->linesize[i], avctx->width,
714  avctx->height, plane_start[i],
715  c->frame_pred == PRED_LEFT);
716  if (ret)
717  return ret;
718  if (c->frame_pred == PRED_MEDIAN) {
719  if (!c->interlaced) {
721  frame.f->linesize[i], avctx->width,
722  avctx->height, c->slices, 0);
723  } else {
725  frame.f->linesize[i],
726  avctx->width, avctx->height, c->slices,
727  0);
728  }
729  } else if (c->frame_pred == PRED_GRADIENT) {
730  if (!c->interlaced) {
732  frame.f->linesize[i], avctx->width,
733  avctx->height, c->slices, 0);
734  } else {
736  frame.f->linesize[i],
737  avctx->width, avctx->height, c->slices,
738  0);
739  }
740  }
741  }
742  c->utdsp.restore_rgb_planes(frame.f->data[2], frame.f->data[0], frame.f->data[1],
743  frame.f->linesize[2], frame.f->linesize[0], frame.f->linesize[1],
744  avctx->width, avctx->height);
745  break;
746  case AV_PIX_FMT_GBRAP10:
747  case AV_PIX_FMT_GBRP10:
748  for (i = 0; i < c->planes; i++) {
749  ret = decode_plane10(c, i, (uint16_t *)frame.f->data[i],
750  frame.f->linesize[i] / 2, avctx->width,
751  avctx->height, plane_start[i],
752  plane_start[i + 1] - 1024,
753  c->frame_pred == PRED_LEFT);
754  if (ret)
755  return ret;
756  }
757  c->utdsp.restore_rgb_planes10((uint16_t *)frame.f->data[2], (uint16_t *)frame.f->data[0], (uint16_t *)frame.f->data[1],
758  frame.f->linesize[2] / 2, frame.f->linesize[0] / 2, frame.f->linesize[1] / 2,
759  avctx->width, avctx->height);
760  break;
761  case AV_PIX_FMT_YUV420P:
762  for (i = 0; i < 3; i++) {
763  ret = decode_plane(c, i, frame.f->data[i], frame.f->linesize[i],
764  avctx->width >> !!i, avctx->height >> !!i,
765  plane_start[i], c->frame_pred == PRED_LEFT);
766  if (ret)
767  return ret;
768  if (c->frame_pred == PRED_MEDIAN) {
769  if (!c->interlaced) {
771  avctx->width >> !!i, avctx->height >> !!i,
772  c->slices, !i);
773  } else {
775  avctx->width >> !!i,
776  avctx->height >> !!i,
777  c->slices, !i);
778  }
779  } else if (c->frame_pred == PRED_GRADIENT) {
780  if (!c->interlaced) {
782  avctx->width >> !!i, avctx->height >> !!i,
783  c->slices, !i);
784  } else {
786  avctx->width >> !!i,
787  avctx->height >> !!i,
788  c->slices, !i);
789  }
790  }
791  }
792  break;
793  case AV_PIX_FMT_YUV422P:
794  for (i = 0; i < 3; i++) {
795  ret = decode_plane(c, i, frame.f->data[i], frame.f->linesize[i],
796  avctx->width >> !!i, avctx->height,
797  plane_start[i], c->frame_pred == PRED_LEFT);
798  if (ret)
799  return ret;
800  if (c->frame_pred == PRED_MEDIAN) {
801  if (!c->interlaced) {
803  avctx->width >> !!i, avctx->height,
804  c->slices, 0);
805  } else {
807  avctx->width >> !!i, avctx->height,
808  c->slices, 0);
809  }
810  } else if (c->frame_pred == PRED_GRADIENT) {
811  if (!c->interlaced) {
813  avctx->width >> !!i, avctx->height,
814  c->slices, 0);
815  } else {
817  avctx->width >> !!i, avctx->height,
818  c->slices, 0);
819  }
820  }
821  }
822  break;
823  case AV_PIX_FMT_YUV444P:
824  for (i = 0; i < 3; i++) {
825  ret = decode_plane(c, i, frame.f->data[i], frame.f->linesize[i],
826  avctx->width, avctx->height,
827  plane_start[i], c->frame_pred == PRED_LEFT);
828  if (ret)
829  return ret;
830  if (c->frame_pred == PRED_MEDIAN) {
831  if (!c->interlaced) {
833  avctx->width, avctx->height,
834  c->slices, 0);
835  } else {
837  avctx->width, avctx->height,
838  c->slices, 0);
839  }
840  } else if (c->frame_pred == PRED_GRADIENT) {
841  if (!c->interlaced) {
843  avctx->width, avctx->height,
844  c->slices, 0);
845  } else {
847  avctx->width, avctx->height,
848  c->slices, 0);
849  }
850  }
851  }
852  break;
854  for (i = 0; i < 3; i++) {
855  ret = decode_plane10(c, i, (uint16_t *)frame.f->data[i], frame.f->linesize[i] / 2,
856  avctx->width >> !!i, avctx->height >> !!i,
857  plane_start[i], plane_start[i + 1] - 1024, c->frame_pred == PRED_LEFT);
858  if (ret)
859  return ret;
860  }
861  break;
863  for (i = 0; i < 3; i++) {
864  ret = decode_plane10(c, i, (uint16_t *)frame.f->data[i], frame.f->linesize[i] / 2,
865  avctx->width >> !!i, avctx->height,
866  plane_start[i], plane_start[i + 1] - 1024, c->frame_pred == PRED_LEFT);
867  if (ret)
868  return ret;
869  }
870  break;
871  }
872 
873  frame.f->key_frame = 1;
875  frame.f->interlaced_frame = !!c->interlaced;
876 
877  *got_frame = 1;
878 
879  /* always report that the buffer was completely consumed */
880  return buf_size;
881 }
882 
884 {
885  UtvideoContext * const c = avctx->priv_data;
886  int h_shift, v_shift;
887 
888  c->avctx = avctx;
889 
890  ff_utvideodsp_init(&c->utdsp);
891  ff_bswapdsp_init(&c->bdsp);
892  ff_llviddsp_init(&c->llviddsp);
893 
894  c->slice_bits_size = 0;
895 
896  switch (avctx->codec_tag) {
897  case MKTAG('U', 'L', 'R', 'G'):
898  c->planes = 3;
899  avctx->pix_fmt = AV_PIX_FMT_GBRP;
900  break;
901  case MKTAG('U', 'L', 'R', 'A'):
902  c->planes = 4;
903  avctx->pix_fmt = AV_PIX_FMT_GBRAP;
904  break;
905  case MKTAG('U', 'L', 'Y', '0'):
906  c->planes = 3;
907  avctx->pix_fmt = AV_PIX_FMT_YUV420P;
908  avctx->colorspace = AVCOL_SPC_BT470BG;
909  break;
910  case MKTAG('U', 'L', 'Y', '2'):
911  c->planes = 3;
912  avctx->pix_fmt = AV_PIX_FMT_YUV422P;
913  avctx->colorspace = AVCOL_SPC_BT470BG;
914  break;
915  case MKTAG('U', 'L', 'Y', '4'):
916  c->planes = 3;
917  avctx->pix_fmt = AV_PIX_FMT_YUV444P;
918  avctx->colorspace = AVCOL_SPC_BT470BG;
919  break;
920  case MKTAG('U', 'Q', 'Y', '0'):
921  c->planes = 3;
922  c->pro = 1;
923  avctx->pix_fmt = AV_PIX_FMT_YUV420P10;
924  break;
925  case MKTAG('U', 'Q', 'Y', '2'):
926  c->planes = 3;
927  c->pro = 1;
928  avctx->pix_fmt = AV_PIX_FMT_YUV422P10;
929  break;
930  case MKTAG('U', 'Q', 'R', 'G'):
931  c->planes = 3;
932  c->pro = 1;
933  avctx->pix_fmt = AV_PIX_FMT_GBRP10;
934  break;
935  case MKTAG('U', 'Q', 'R', 'A'):
936  c->planes = 4;
937  c->pro = 1;
938  avctx->pix_fmt = AV_PIX_FMT_GBRAP10;
939  break;
940  case MKTAG('U', 'L', 'H', '0'):
941  c->planes = 3;
942  avctx->pix_fmt = AV_PIX_FMT_YUV420P;
943  avctx->colorspace = AVCOL_SPC_BT709;
944  break;
945  case MKTAG('U', 'L', 'H', '2'):
946  c->planes = 3;
947  avctx->pix_fmt = AV_PIX_FMT_YUV422P;
948  avctx->colorspace = AVCOL_SPC_BT709;
949  break;
950  case MKTAG('U', 'L', 'H', '4'):
951  c->planes = 3;
952  avctx->pix_fmt = AV_PIX_FMT_YUV444P;
953  avctx->colorspace = AVCOL_SPC_BT709;
954  break;
955  case MKTAG('U', 'M', 'Y', '2'):
956  c->planes = 3;
957  c->pack = 1;
958  avctx->pix_fmt = AV_PIX_FMT_YUV422P;
959  avctx->colorspace = AVCOL_SPC_BT470BG;
960  break;
961  case MKTAG('U', 'M', 'H', '2'):
962  c->planes = 3;
963  c->pack = 1;
964  avctx->pix_fmt = AV_PIX_FMT_YUV422P;
965  avctx->colorspace = AVCOL_SPC_BT709;
966  break;
967  case MKTAG('U', 'M', 'Y', '4'):
968  c->planes = 3;
969  c->pack = 1;
970  avctx->pix_fmt = AV_PIX_FMT_YUV444P;
971  avctx->colorspace = AVCOL_SPC_BT470BG;
972  break;
973  case MKTAG('U', 'M', 'H', '4'):
974  c->planes = 3;
975  c->pack = 1;
976  avctx->pix_fmt = AV_PIX_FMT_YUV444P;
977  avctx->colorspace = AVCOL_SPC_BT709;
978  break;
979  case MKTAG('U', 'M', 'R', 'G'):
980  c->planes = 3;
981  c->pack = 1;
982  avctx->pix_fmt = AV_PIX_FMT_GBRP;
983  break;
984  case MKTAG('U', 'M', 'R', 'A'):
985  c->planes = 4;
986  c->pack = 1;
987  avctx->pix_fmt = AV_PIX_FMT_GBRAP;
988  break;
989  default:
990  av_log(avctx, AV_LOG_ERROR, "Unknown Ut Video FOURCC provided (%08X)\n",
991  avctx->codec_tag);
992  return AVERROR_INVALIDDATA;
993  }
994 
995  av_pix_fmt_get_chroma_sub_sample(avctx->pix_fmt, &h_shift, &v_shift);
996  if ((avctx->width & ((1<<h_shift)-1)) ||
997  (avctx->height & ((1<<v_shift)-1))) {
998  avpriv_request_sample(avctx, "Odd dimensions");
999  return AVERROR_PATCHWELCOME;
1000  }
1001 
1002  if (c->pack && avctx->extradata_size >= 16) {
1003  av_log(avctx, AV_LOG_DEBUG, "Encoder version %d.%d.%d.%d\n",
1004  avctx->extradata[3], avctx->extradata[2],
1005  avctx->extradata[1], avctx->extradata[0]);
1006  av_log(avctx, AV_LOG_DEBUG, "Original format %"PRIX32"\n",
1007  AV_RB32(avctx->extradata + 4));
1008  c->compression = avctx->extradata[8];
1009  if (c->compression != 2)
1010  avpriv_request_sample(avctx, "Unknown compression type");
1011  c->slices = avctx->extradata[9] + 1;
1012  } else if (!c->pro && avctx->extradata_size >= 16) {
1013  av_log(avctx, AV_LOG_DEBUG, "Encoder version %d.%d.%d.%d\n",
1014  avctx->extradata[3], avctx->extradata[2],
1015  avctx->extradata[1], avctx->extradata[0]);
1016  av_log(avctx, AV_LOG_DEBUG, "Original format %"PRIX32"\n",
1017  AV_RB32(avctx->extradata + 4));
1018  c->frame_info_size = AV_RL32(avctx->extradata + 8);
1019  c->flags = AV_RL32(avctx->extradata + 12);
1020 
1021  if (c->frame_info_size != 4)
1022  avpriv_request_sample(avctx, "Frame info not 4 bytes");
1023  av_log(avctx, AV_LOG_DEBUG, "Encoding parameters %08"PRIX32"\n", c->flags);
1024  c->slices = (c->flags >> 24) + 1;
1025  c->compression = c->flags & 1;
1026  c->interlaced = c->flags & 0x800;
1027  } else if (c->pro && avctx->extradata_size == 8) {
1028  av_log(avctx, AV_LOG_DEBUG, "Encoder version %d.%d.%d.%d\n",
1029  avctx->extradata[3], avctx->extradata[2],
1030  avctx->extradata[1], avctx->extradata[0]);
1031  av_log(avctx, AV_LOG_DEBUG, "Original format %"PRIX32"\n",
1032  AV_RB32(avctx->extradata + 4));
1033  c->interlaced = 0;
1034  c->frame_info_size = 4;
1035  } else {
1036  av_log(avctx, AV_LOG_ERROR,
1037  "Insufficient extradata size %d, should be at least 16\n",
1038  avctx->extradata_size);
1039  return AVERROR_INVALIDDATA;
1040  }
1041 
1042  return 0;
1043 }
1044 
1046 {
1047  UtvideoContext * const c = avctx->priv_data;
1048 
1049  av_freep(&c->slice_bits);
1050 
1051  return 0;
1052 }
1053 
1055  .name = "utvideo",
1056  .long_name = NULL_IF_CONFIG_SMALL("Ut Video"),
1057  .type = AVMEDIA_TYPE_VIDEO,
1058  .id = AV_CODEC_ID_UTVIDEO,
1059  .priv_data_size = sizeof(UtvideoContext),
1060  .init = decode_init,
1061  .close = decode_end,
1062  .decode = decode_frame,
1063  .capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_FRAME_THREADS,
1064  .caps_internal = FF_CODEC_CAP_INIT_THREADSAFE,
1065 };
#define A(x)
Definition: vp56_arith.h:28
#define av_cold
Definition: attributes.h:88
uint8_t
Libavcodec external API header.
#define AV_RB32
Definition: intreadwrite.h:130
#define AV_RL32
Definition: intreadwrite.h:146
static av_cold int init(AVCodecContext *avctx)
Definition: avrndec.c:31
void ff_free_vlc(VLC *vlc)
Definition: bitstream.c:431
int ff_init_vlc_from_lengths(VLC *vlc_arg, int nb_bits, int nb_codes, const int8_t *lens, int lens_wrap, const void *symbols, int symbols_wrap, int symbols_size, int offset, int flags, void *logctx)
Build VLC decoding tables suitable for use with get_vlc2()
Definition: bitstream.c:381
static av_always_inline void bytestream2_skipu(GetByteContext *g, unsigned int size)
Definition: bytestream.h:174
static av_always_inline int bytestream2_get_bytes_left(GetByteContext *g)
Definition: bytestream.h:158
static av_always_inline void bytestream2_init(GetByteContext *g, const uint8_t *buf, int buf_size)
Definition: bytestream.h:137
static av_always_inline void bytestream2_skip(GetByteContext *g, unsigned int size)
Definition: bytestream.h:168
#define fail()
Definition: checkasm.h:133
#define FFMIN(a, b)
Definition: common.h:105
#define MKTAG(a, b, c, d)
Definition: common.h:478
#define FFMAX(a, b)
Definition: common.h:103
static void decode(AVCodecContext *dec_ctx, AVPacket *pkt, AVFrame *frame, FILE *outfile)
Definition: decode_audio.c:71
static enum AVPixelFormat pix_fmt
static AVFrame * frame
static float add(float src0, float src1)
static float sub(float src0, float src1)
bitstream reader API header.
static unsigned int get_bits_le(GetBitContext *s, int n)
Definition: get_bits.h:420
static int init_get_bits8_le(GetBitContext *s, const uint8_t *buffer, int byte_size)
Definition: get_bits.h:685
static av_always_inline int get_vlc2(GetBitContext *s, VLC_TYPE(*table)[2], int bits, int max_depth)
Parse a vlc code.
Definition: get_bits.h:797
static int get_bits_left(GetBitContext *gb)
Definition: get_bits.h:849
static int init_get_bits(GetBitContext *s, const uint8_t *buffer, int bit_size)
Initialize GetBitContext.
Definition: get_bits.h:659
#define AV_CODEC_CAP_DR1
Codec uses get_buffer() or get_encode_buffer() for allocating buffers and supports custom allocators.
Definition: codec.h:52
#define AV_CODEC_CAP_FRAME_THREADS
Codec supports frame-level multithreading.
Definition: codec.h:108
@ AV_CODEC_ID_UTVIDEO
Definition: codec_id.h:202
#define AV_INPUT_BUFFER_PADDING_SIZE
Required number of additionally allocated bytes at the end of the input bitstream for decoding.
Definition: avcodec.h:215
#define AVERROR_PATCHWELCOME
Not yet implemented in FFmpeg, patches welcome.
Definition: error.h:62
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
Definition: error.h:59
#define AVERROR(e)
Definition: error.h:43
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:215
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:200
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:194
void av_fast_malloc(void *ptr, unsigned int *size, size_t min_size)
Allocate a buffer, reusing the given one if large enough.
Definition: mem.c:502
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
@ AV_PICTURE_TYPE_I
Intra.
Definition: avutil.h:274
#define B
Definition: huffyuvdsp.h:32
RGB2YUV_SHIFT RGB2YUV_SHIFT RGB2YUV_SHIFT RGB2YUV_SHIFT RGB2YUV_SHIFT RGB2YUV_SHIFT RGB2YUV_SHIFT RGB2YUV_SHIFT uint8_t const uint8_t const uint8_t * bsrc
Definition: input.c:399
int i
Definition: input.c:407
#define C
av_cold void ff_bswapdsp_init(BswapDSPContext *c)
Definition: bswapdsp.c:49
#define FF_CODEC_CAP_INIT_THREADSAFE
The codec does not modify any global variables in the init function, allowing to call the init functi...
Definition: internal.h:41
av_cold void ff_utvideodsp_init(UTVideoDSPContext *c)
Definition: utvideodsp.c:75
common internal API header
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:117
void ff_llviddsp_init(LLVidDSPContext *c)
int stride
Definition: mace.c:144
#define mid_pred
Definition: mathops.h:97
static int slice_end(AVCodecContext *avctx, AVFrame *pict)
Handle slice ends.
Definition: mpeg12dec.c:2033
const char data[16]
Definition: mxf.c:142
uint8_t interlaced
Definition: mxfenc.c:2208
int av_pix_fmt_get_chroma_sub_sample(enum AVPixelFormat pix_fmt, int *h_shift, int *v_shift)
Utility function to access log2_chroma_w log2_chroma_h from the pixel format AVPixFmtDescriptor.
Definition: pixdesc.c:2601
#define AV_PIX_FMT_YUV420P10
Definition: pixfmt.h:399
#define AV_PIX_FMT_GBRP10
Definition: pixfmt.h:415
#define AV_PIX_FMT_YUV422P10
Definition: pixfmt.h:400
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
@ AV_PIX_FMT_YUV422P
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:70
@ AV_PIX_FMT_YUV444P
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:71
@ AV_PIX_FMT_GBRAP
planar GBRA 4:4:4:4 32bpp
Definition: pixfmt.h:215
@ AV_PIX_FMT_GBRP
planar GBR 4:4:4 24bpp
Definition: pixfmt.h:168
#define AV_PIX_FMT_GBRAP10
Definition: pixfmt.h:419
@ AVCOL_SPC_BT709
also ITU-R BT1361 / IEC 61966-2-4 xvYCC709 / SMPTE RP177 Annex B
Definition: pixfmt.h:514
@ AVCOL_SPC_BT470BG
also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM / IEC 61966-2-4 xvYCC601
Definition: pixfmt.h:518
FF_ENABLE_DEPRECATION_WARNINGS int ff_thread_get_buffer(AVCodecContext *avctx, ThreadFrame *f, int flags)
Wrapper around get_buffer() for frame-multithreaded codecs.
main external API structure.
Definition: avcodec.h:536
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:746
int width
picture width / height.
Definition: avcodec.h:709
unsigned int codec_tag
fourcc (LSB first, so "ABCD" -> ('D'<<24) + ('C'<<16) + ('B'<<8) + 'A').
Definition: avcodec.h:561
enum AVColorSpace colorspace
YUV colorspace type.
Definition: avcodec.h:1164
uint8_t * extradata
some codecs need / can use extradata like Huffman tables.
Definition: avcodec.h:637
int extradata_size
Definition: avcodec.h:638
void * priv_data
Definition: avcodec.h:563
AVCodec.
Definition: codec.h:197
const char * name
Name of the codec implementation.
Definition: codec.h:204
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:332
int key_frame
1 -> keyframe, 0-> not
Definition: frame.h:396
int interlaced_frame
The content of the picture is interlaced.
Definition: frame.h:465
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:349
enum AVPictureType pict_type
Picture type of the frame.
Definition: frame.h:401
This structure stores compressed data.
Definition: packet.h:346
int size
Definition: packet.h:370
uint8_t * data
Definition: packet.h:369
const uint8_t * buffer
Definition: bytestream.h:34
Definition: exr.c:93
uint8_t len
Definition: exr.c:94
uint16_t sym
Definition: exr.c:95
Definition: vlc.h:26
VLC_TYPE(* table)[2]
code, bits
Definition: vlc.h:28
#define avpriv_request_sample(...)
#define av_freep(p)
#define av_log(a,...)
#define src
Definition: vp8dsp.c:255
#define height
#define width
Common Ut Video header.
@ PRED_MEDIAN
Definition: utvideo.h:41
@ PRED_GRADIENT
Definition: utvideo.h:40
@ PRED_LEFT
Definition: utvideo.h:39
static void restore_gradient_planar_il(UtvideoContext *c, uint8_t *src, ptrdiff_t stride, int width, int height, int slices, int rmode)
Definition: utvideodec.c:505
AVCodec ff_utvideo_decoder
Definition: utvideodec.c:1054
static int decode_plane(UtvideoContext *c, int plane_no, uint8_t *dst, ptrdiff_t stride, int width, int height, const uint8_t *src, int use_pred)
Definition: utvideodec.c:203
static av_cold int decode_init(AVCodecContext *avctx)
Definition: utvideodec.c:883
static void restore_median_planar_il(UtvideoContext *c, uint8_t *src, ptrdiff_t stride, int width, int height, int slices, int rmode)
Definition: utvideodec.c:409
static av_cold int decode_end(AVCodecContext *avctx)
Definition: utvideodec.c:1045
static void restore_median_planar(UtvideoContext *c, uint8_t *src, ptrdiff_t stride, int width, int height, int slices, int rmode)
Definition: utvideodec.c:357
static void restore_gradient_planar(UtvideoContext *c, uint8_t *src, ptrdiff_t stride, int width, int height, int slices, int rmode)
Definition: utvideodec.c:464
static int compute_cmask(int plane_no, int interlaced, enum AVPixelFormat pix_fmt)
Definition: utvideodec.c:193
#define VLC_BITS
static int decode_frame(AVCodecContext *avctx, void *data, int *got_frame, AVPacket *avpkt)
Definition: utvideodec.c:560
static int decode_plane10(UtvideoContext *c, int plane_no, uint16_t *dst, ptrdiff_t stride, int width, int height, const uint8_t *src, const uint8_t *huff, int use_pred)
Definition: utvideodec.c:90
static int build_huff(UtvideoContext *c, const uint8_t *src, VLC *vlc, int *fsym, unsigned nb_elems)
Definition: utvideodec.c:48
static const uint8_t offset[127][2]
Definition: vf_spp.c:107
int len
uint8_t bits
Definition: vp3data.h:141
static double c[64]