]> git.sesse.net Git - ffmpeg/blob - libavcodec/huffyuv.c
doc/example/muxing: fix video timestamps
[ffmpeg] / libavcodec / huffyuv.c
1 /*
2  * huffyuv codec for libavcodec
3  *
4  * Copyright (c) 2002-2003 Michael Niedermayer <michaelni@gmx.at>
5  *
6  * see http://www.pcisys.net/~melanson/codecs/huffyuv.txt for a description of
7  * the algorithm used
8  *
9  * This file is part of FFmpeg.
10  *
11  * FFmpeg is free software; you can redistribute it and/or
12  * modify it under the terms of the GNU Lesser General Public
13  * License as published by the Free Software Foundation; either
14  * version 2.1 of the License, or (at your option) any later version.
15  *
16  * FFmpeg is distributed in the hope that it will be useful,
17  * but WITHOUT ANY WARRANTY; without even the implied warranty of
18  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
19  * Lesser General Public License for more details.
20  *
21  * You should have received a copy of the GNU Lesser General Public
22  * License along with FFmpeg; if not, write to the Free Software
23  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
24  */
25
26 /**
27  * @file
28  * huffyuv codec for libavcodec.
29  */
30
31 #include "avcodec.h"
32 #include "get_bits.h"
33 #include "put_bits.h"
34 #include "dsputil.h"
35 #include "thread.h"
36
37 #define VLC_BITS 11
38
39 #if HAVE_BIGENDIAN
40 #define B 3
41 #define G 2
42 #define R 1
43 #define A 0
44 #else
45 #define B 0
46 #define G 1
47 #define R 2
48 #define A 3
49 #endif
50
51 typedef enum Predictor{
52     LEFT= 0,
53     PLANE,
54     MEDIAN,
55 } Predictor;
56
57 typedef struct HYuvContext{
58     AVCodecContext *avctx;
59     Predictor predictor;
60     GetBitContext gb;
61     PutBitContext pb;
62     int interlaced;
63     int decorrelate;
64     int bitstream_bpp;
65     int version;
66     int yuy2;                               //use yuy2 instead of 422P
67     int bgr32;                              //use bgr32 instead of bgr24
68     int width, height;
69     int flags;
70     int context;
71     int picture_number;
72     int last_slice_end;
73     uint8_t *temp[3];
74     uint64_t stats[3][256];
75     uint8_t len[3][256];
76     uint32_t bits[3][256];
77     uint32_t pix_bgr_map[1<<VLC_BITS];
78     VLC vlc[6];                             //Y,U,V,YY,YU,YV
79     AVFrame picture;
80     uint8_t *bitstream_buffer;
81     unsigned int bitstream_buffer_size;
82     DSPContext dsp;
83 }HYuvContext;
84
85 static const unsigned char classic_shift_luma[] = {
86   34,36,35,69,135,232,9,16,10,24,11,23,12,16,13,10,14,8,15,8,
87   16,8,17,20,16,10,207,206,205,236,11,8,10,21,9,23,8,8,199,70,
88   69,68, 0
89 };
90
91 static const unsigned char classic_shift_chroma[] = {
92   66,36,37,38,39,40,41,75,76,77,110,239,144,81,82,83,84,85,118,183,
93   56,57,88,89,56,89,154,57,58,57,26,141,57,56,58,57,58,57,184,119,
94   214,245,116,83,82,49,80,79,78,77,44,75,41,40,39,38,37,36,34, 0
95 };
96
97 static const unsigned char classic_add_luma[256] = {
98     3,  9,  5, 12, 10, 35, 32, 29, 27, 50, 48, 45, 44, 41, 39, 37,
99    73, 70, 68, 65, 64, 61, 58, 56, 53, 50, 49, 46, 44, 41, 38, 36,
100    68, 65, 63, 61, 58, 55, 53, 51, 48, 46, 45, 43, 41, 39, 38, 36,
101    35, 33, 32, 30, 29, 27, 26, 25, 48, 47, 46, 44, 43, 41, 40, 39,
102    37, 36, 35, 34, 32, 31, 30, 28, 27, 26, 24, 23, 22, 20, 19, 37,
103    35, 34, 33, 31, 30, 29, 27, 26, 24, 23, 21, 20, 18, 17, 15, 29,
104    27, 26, 24, 22, 21, 19, 17, 16, 14, 26, 25, 23, 21, 19, 18, 16,
105    15, 27, 25, 23, 21, 19, 17, 16, 14, 26, 25, 23, 21, 18, 17, 14,
106    12, 17, 19, 13,  4,  9,  2, 11,  1,  7,  8,  0, 16,  3, 14,  6,
107    12, 10,  5, 15, 18, 11, 10, 13, 15, 16, 19, 20, 22, 24, 27, 15,
108    18, 20, 22, 24, 26, 14, 17, 20, 22, 24, 27, 15, 18, 20, 23, 25,
109    28, 16, 19, 22, 25, 28, 32, 36, 21, 25, 29, 33, 38, 42, 45, 49,
110    28, 31, 34, 37, 40, 42, 44, 47, 49, 50, 52, 54, 56, 57, 59, 60,
111    62, 64, 66, 67, 69, 35, 37, 39, 40, 42, 43, 45, 47, 48, 51, 52,
112    54, 55, 57, 59, 60, 62, 63, 66, 67, 69, 71, 72, 38, 40, 42, 43,
113    46, 47, 49, 51, 26, 28, 30, 31, 33, 34, 18, 19, 11, 13,  7,  8,
114 };
115
116 static const unsigned char classic_add_chroma[256] = {
117     3,  1,  2,  2,  2,  2,  3,  3,  7,  5,  7,  5,  8,  6, 11,  9,
118     7, 13, 11, 10,  9,  8,  7,  5,  9,  7,  6,  4,  7,  5,  8,  7,
119    11,  8, 13, 11, 19, 15, 22, 23, 20, 33, 32, 28, 27, 29, 51, 77,
120    43, 45, 76, 81, 46, 82, 75, 55, 56,144, 58, 80, 60, 74,147, 63,
121   143, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79,
122    80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 27, 30, 21, 22,
123    17, 14,  5,  6,100, 54, 47, 50, 51, 53,106,107,108,109,110,111,
124   112,113,114,115,  4,117,118, 92, 94,121,122,  3,124,103,  2,  1,
125     0,129,130,131,120,119,126,125,136,137,138,139,140,141,142,134,
126   135,132,133,104, 64,101, 62, 57,102, 95, 93, 59, 61, 28, 97, 96,
127    52, 49, 48, 29, 32, 25, 24, 46, 23, 98, 45, 44, 43, 20, 42, 41,
128    19, 18, 99, 40, 15, 39, 38, 16, 13, 12, 11, 37, 10,  9,  8, 36,
129     7,128,127,105,123,116, 35, 34, 33,145, 31, 79, 42,146, 78, 26,
130    83, 48, 49, 50, 44, 47, 26, 31, 30, 18, 17, 19, 21, 24, 25, 13,
131    14, 16, 17, 18, 20, 21, 12, 14, 15,  9, 10,  6,  9,  6,  5,  8,
132     6, 12,  8, 10,  7,  9,  6,  4,  6,  2,  2,  3,  3,  3,  3,  2,
133 };
134
135 static inline int sub_left_prediction(HYuvContext *s, uint8_t *dst, uint8_t *src, int w, int left){
136     int i;
137     if(w<32){
138         for(i=0; i<w; i++){
139             const int temp= src[i];
140             dst[i]= temp - left;
141             left= temp;
142         }
143         return left;
144     }else{
145         for(i=0; i<16; i++){
146             const int temp= src[i];
147             dst[i]= temp - left;
148             left= temp;
149         }
150         s->dsp.diff_bytes(dst+16, src+16, src+15, w-16);
151         return src[w-1];
152     }
153 }
154
155 static inline void sub_left_prediction_bgr32(HYuvContext *s, uint8_t *dst, uint8_t *src, int w, int *red, int *green, int *blue, int *alpha){
156     int i;
157     int r,g,b,a;
158     r= *red;
159     g= *green;
160     b= *blue;
161     a= *alpha;
162     for(i=0; i<FFMIN(w,4); i++){
163         const int rt= src[i*4+R];
164         const int gt= src[i*4+G];
165         const int bt= src[i*4+B];
166         const int at= src[i*4+A];
167         dst[i*4+R]= rt - r;
168         dst[i*4+G]= gt - g;
169         dst[i*4+B]= bt - b;
170         dst[i*4+A]= at - a;
171         r = rt;
172         g = gt;
173         b = bt;
174         a = at;
175     }
176     s->dsp.diff_bytes(dst+16, src+16, src+12, w*4-16);
177     *red=   src[(w-1)*4+R];
178     *green= src[(w-1)*4+G];
179     *blue=  src[(w-1)*4+B];
180     *alpha= src[(w-1)*4+A];
181 }
182
183 static inline void sub_left_prediction_rgb24(HYuvContext *s, uint8_t *dst, uint8_t *src, int w, int *red, int *green, int *blue){
184     int i;
185     int r,g,b;
186     r= *red;
187     g= *green;
188     b= *blue;
189     for(i=0; i<FFMIN(w,16); i++){
190         const int rt= src[i*3+0];
191         const int gt= src[i*3+1];
192         const int bt= src[i*3+2];
193         dst[i*3+0]= rt - r;
194         dst[i*3+1]= gt - g;
195         dst[i*3+2]= bt - b;
196         r = rt;
197         g = gt;
198         b = bt;
199     }
200     s->dsp.diff_bytes(dst+48, src+48, src+48-3, w*3-48);
201     *red=   src[(w-1)*3+0];
202     *green= src[(w-1)*3+1];
203     *blue=  src[(w-1)*3+2];
204 }
205
206 static int read_len_table(uint8_t *dst, GetBitContext *gb){
207     int i, val, repeat;
208
209     for(i=0; i<256;){
210         repeat= get_bits(gb, 3);
211         val   = get_bits(gb, 5);
212         if(repeat==0)
213             repeat= get_bits(gb, 8);
214 //printf("%d %d\n", val, repeat);
215         if(i+repeat > 256) {
216             av_log(NULL, AV_LOG_ERROR, "Error reading huffman table\n");
217             return -1;
218         }
219         while (repeat--)
220             dst[i++] = val;
221     }
222     return 0;
223 }
224
225 static int generate_bits_table(uint32_t *dst, const uint8_t *len_table){
226     int len, index;
227     uint32_t bits=0;
228
229     for(len=32; len>0; len--){
230         for(index=0; index<256; index++){
231             if(len_table[index]==len)
232                 dst[index]= bits++;
233         }
234         if(bits & 1){
235             av_log(NULL, AV_LOG_ERROR, "Error generating huffman table\n");
236             return -1;
237         }
238         bits >>= 1;
239     }
240     return 0;
241 }
242
243 #if CONFIG_HUFFYUV_ENCODER || CONFIG_FFVHUFF_ENCODER
244 typedef struct {
245     uint64_t val;
246     int name;
247 } HeapElem;
248
249 static void heap_sift(HeapElem *h, int root, int size)
250 {
251     while(root*2+1 < size) {
252         int child = root*2+1;
253         if(child < size-1 && h[child].val > h[child+1].val)
254             child++;
255         if(h[root].val > h[child].val) {
256             FFSWAP(HeapElem, h[root], h[child]);
257             root = child;
258         } else
259             break;
260     }
261 }
262
263 static void generate_len_table(uint8_t *dst, const uint64_t *stats){
264     HeapElem h[256];
265     int up[2*256];
266     int len[2*256];
267     int offset, i, next;
268     int size = 256;
269
270     for(offset=1; ; offset<<=1){
271         for(i=0; i<size; i++){
272             h[i].name = i;
273             h[i].val = (stats[i] << 8) + offset;
274         }
275         for(i=size/2-1; i>=0; i--)
276             heap_sift(h, i, size);
277
278         for(next=size; next<size*2-1; next++){
279             // merge the two smallest entries, and put it back in the heap
280             uint64_t min1v = h[0].val;
281             up[h[0].name] = next;
282             h[0].val = INT64_MAX;
283             heap_sift(h, 0, size);
284             up[h[0].name] = next;
285             h[0].name = next;
286             h[0].val += min1v;
287             heap_sift(h, 0, size);
288         }
289
290         len[2*size-2] = 0;
291         for(i=2*size-3; i>=size; i--)
292             len[i] = len[up[i]] + 1;
293         for(i=0; i<size; i++) {
294             dst[i] = len[up[i]] + 1;
295             if(dst[i] >= 32) break;
296         }
297         if(i==size) break;
298     }
299 }
300 #endif /* CONFIG_HUFFYUV_ENCODER || CONFIG_FFVHUFF_ENCODER */
301
302 static void generate_joint_tables(HYuvContext *s){
303     uint16_t symbols[1<<VLC_BITS];
304     uint16_t bits[1<<VLC_BITS];
305     uint8_t len[1<<VLC_BITS];
306     if(s->bitstream_bpp < 24){
307         int p, i, y, u;
308         for(p=0; p<3; p++){
309             for(i=y=0; y<256; y++){
310                 int len0 = s->len[0][y];
311                 int limit = VLC_BITS - len0;
312                 if(limit <= 0)
313                     continue;
314                 for(u=0; u<256; u++){
315                     int len1 = s->len[p][u];
316                     if(len1 > limit)
317                         continue;
318                     len[i] = len0 + len1;
319                     bits[i] = (s->bits[0][y] << len1) + s->bits[p][u];
320                     symbols[i] = (y<<8) + u;
321                     if(symbols[i] != 0xffff) // reserved to mean "invalid"
322                         i++;
323                 }
324             }
325             free_vlc(&s->vlc[3+p]);
326             init_vlc_sparse(&s->vlc[3+p], VLC_BITS, i, len, 1, 1, bits, 2, 2, symbols, 2, 2, 0);
327         }
328     }else{
329         uint8_t (*map)[4] = (uint8_t(*)[4])s->pix_bgr_map;
330         int i, b, g, r, code;
331         int p0 = s->decorrelate;
332         int p1 = !s->decorrelate;
333         // restrict the range to +/-16 becaues that's pretty much guaranteed to
334         // cover all the combinations that fit in 11 bits total, and it doesn't
335         // matter if we miss a few rare codes.
336         for(i=0, g=-16; g<16; g++){
337             int len0 = s->len[p0][g&255];
338             int limit0 = VLC_BITS - len0;
339             if(limit0 < 2)
340                 continue;
341             for(b=-16; b<16; b++){
342                 int len1 = s->len[p1][b&255];
343                 int limit1 = limit0 - len1;
344                 if(limit1 < 1)
345                     continue;
346                 code = (s->bits[p0][g&255] << len1) + s->bits[p1][b&255];
347                 for(r=-16; r<16; r++){
348                     int len2 = s->len[2][r&255];
349                     if(len2 > limit1)
350                         continue;
351                     len[i] = len0 + len1 + len2;
352                     bits[i] = (code << len2) + s->bits[2][r&255];
353                     if(s->decorrelate){
354                         map[i][G] = g;
355                         map[i][B] = g+b;
356                         map[i][R] = g+r;
357                     }else{
358                         map[i][B] = g;
359                         map[i][G] = b;
360                         map[i][R] = r;
361                     }
362                     i++;
363                 }
364             }
365         }
366         free_vlc(&s->vlc[3]);
367         init_vlc(&s->vlc[3], VLC_BITS, i, len, 1, 1, bits, 2, 2, 0);
368     }
369 }
370
371 static int read_huffman_tables(HYuvContext *s, const uint8_t *src, int length){
372     GetBitContext gb;
373     int i;
374
375     init_get_bits(&gb, src, length*8);
376
377     for(i=0; i<3; i++){
378         if(read_len_table(s->len[i], &gb)<0)
379             return -1;
380         if(generate_bits_table(s->bits[i], s->len[i])<0){
381             return -1;
382         }
383         free_vlc(&s->vlc[i]);
384         init_vlc(&s->vlc[i], VLC_BITS, 256, s->len[i], 1, 1, s->bits[i], 4, 4, 0);
385     }
386
387     generate_joint_tables(s);
388
389     return (get_bits_count(&gb)+7)/8;
390 }
391
392 static int read_old_huffman_tables(HYuvContext *s){
393 #if 1
394     GetBitContext gb;
395     int i;
396
397     init_get_bits(&gb, classic_shift_luma, sizeof(classic_shift_luma)*8);
398     if(read_len_table(s->len[0], &gb)<0)
399         return -1;
400     init_get_bits(&gb, classic_shift_chroma, sizeof(classic_shift_chroma)*8);
401     if(read_len_table(s->len[1], &gb)<0)
402         return -1;
403
404     for(i=0; i<256; i++) s->bits[0][i] = classic_add_luma  [i];
405     for(i=0; i<256; i++) s->bits[1][i] = classic_add_chroma[i];
406
407     if(s->bitstream_bpp >= 24){
408         memcpy(s->bits[1], s->bits[0], 256*sizeof(uint32_t));
409         memcpy(s->len[1] , s->len [0], 256*sizeof(uint8_t));
410     }
411     memcpy(s->bits[2], s->bits[1], 256*sizeof(uint32_t));
412     memcpy(s->len[2] , s->len [1], 256*sizeof(uint8_t));
413
414     for(i=0; i<3; i++){
415         free_vlc(&s->vlc[i]);
416         init_vlc(&s->vlc[i], VLC_BITS, 256, s->len[i], 1, 1, s->bits[i], 4, 4, 0);
417     }
418
419     generate_joint_tables(s);
420
421     return 0;
422 #else
423     av_log(s->avctx, AV_LOG_DEBUG, "v1 huffyuv is not supported \n");
424     return -1;
425 #endif
426 }
427
428 static av_cold void alloc_temp(HYuvContext *s){
429     int i;
430
431     if(s->bitstream_bpp<24){
432         for(i=0; i<3; i++){
433             s->temp[i]= av_malloc(s->width + 16);
434         }
435     }else{
436         s->temp[0]= av_mallocz(4*s->width + 16);
437     }
438 }
439
440 static av_cold int common_init(AVCodecContext *avctx){
441     HYuvContext *s = avctx->priv_data;
442
443     s->avctx= avctx;
444     s->flags= avctx->flags;
445
446     dsputil_init(&s->dsp, avctx);
447
448     s->width= avctx->width;
449     s->height= avctx->height;
450     assert(s->width>0 && s->height>0);
451
452     return 0;
453 }
454
455 #if CONFIG_HUFFYUV_DECODER || CONFIG_FFVHUFF_DECODER
456 static av_cold int decode_init(AVCodecContext *avctx)
457 {
458     HYuvContext *s = avctx->priv_data;
459
460     common_init(avctx);
461     memset(s->vlc, 0, 3*sizeof(VLC));
462
463     avctx->coded_frame= &s->picture;
464     avcodec_get_frame_defaults(&s->picture);
465     s->interlaced= s->height > 288;
466
467 s->bgr32=1;
468 //if(avctx->extradata)
469 //  printf("extradata:%X, extradata_size:%d\n", *(uint32_t*)avctx->extradata, avctx->extradata_size);
470     if(avctx->extradata_size){
471         if((avctx->bits_per_coded_sample&7) && avctx->bits_per_coded_sample != 12)
472             s->version=1; // do such files exist at all?
473         else
474             s->version=2;
475     }else
476         s->version=0;
477
478     if(s->version==2){
479         int method, interlace;
480
481         if (avctx->extradata_size < 4)
482             return -1;
483
484         method= ((uint8_t*)avctx->extradata)[0];
485         s->decorrelate= method&64 ? 1 : 0;
486         s->predictor= method&63;
487         s->bitstream_bpp= ((uint8_t*)avctx->extradata)[1];
488         if(s->bitstream_bpp==0)
489             s->bitstream_bpp= avctx->bits_per_coded_sample&~7;
490         interlace= (((uint8_t*)avctx->extradata)[2] & 0x30) >> 4;
491         s->interlaced= (interlace==1) ? 1 : (interlace==2) ? 0 : s->interlaced;
492         s->context= ((uint8_t*)avctx->extradata)[2] & 0x40 ? 1 : 0;
493
494         if(read_huffman_tables(s, ((uint8_t*)avctx->extradata)+4, avctx->extradata_size-4) < 0)
495             return -1;
496     }else{
497         switch(avctx->bits_per_coded_sample&7){
498         case 1:
499             s->predictor= LEFT;
500             s->decorrelate= 0;
501             break;
502         case 2:
503             s->predictor= LEFT;
504             s->decorrelate= 1;
505             break;
506         case 3:
507             s->predictor= PLANE;
508             s->decorrelate= avctx->bits_per_coded_sample >= 24;
509             break;
510         case 4:
511             s->predictor= MEDIAN;
512             s->decorrelate= 0;
513             break;
514         default:
515             s->predictor= LEFT; //OLD
516             s->decorrelate= 0;
517             break;
518         }
519         s->bitstream_bpp= avctx->bits_per_coded_sample & ~7;
520         s->context= 0;
521
522         if(read_old_huffman_tables(s) < 0)
523             return -1;
524     }
525
526     switch(s->bitstream_bpp){
527     case 12:
528         avctx->pix_fmt = PIX_FMT_YUV420P;
529         break;
530     case 16:
531         if(s->yuy2){
532             avctx->pix_fmt = PIX_FMT_YUYV422;
533         }else{
534             avctx->pix_fmt = PIX_FMT_YUV422P;
535         }
536         break;
537     case 24:
538     case 32:
539         if(s->bgr32){
540             avctx->pix_fmt = PIX_FMT_RGB32;
541         }else{
542             avctx->pix_fmt = PIX_FMT_BGR24;
543         }
544         break;
545     default:
546         assert(0);
547     }
548
549     alloc_temp(s);
550
551 //    av_log(NULL, AV_LOG_DEBUG, "pred:%d bpp:%d hbpp:%d il:%d\n", s->predictor, s->bitstream_bpp, avctx->bits_per_coded_sample, s->interlaced);
552
553     return 0;
554 }
555
556 static av_cold int decode_init_thread_copy(AVCodecContext *avctx)
557 {
558     HYuvContext *s = avctx->priv_data;
559     int i;
560
561     avctx->coded_frame= &s->picture;
562     alloc_temp(s);
563
564     for (i = 0; i < 6; i++)
565         s->vlc[i].table = NULL;
566
567     if(s->version==2){
568         if(read_huffman_tables(s, ((uint8_t*)avctx->extradata)+4, avctx->extradata_size) < 0)
569             return -1;
570     }else{
571         if(read_old_huffman_tables(s) < 0)
572             return -1;
573     }
574
575     return 0;
576 }
577 #endif /* CONFIG_HUFFYUV_DECODER || CONFIG_FFVHUFF_DECODER */
578
579 #if CONFIG_HUFFYUV_ENCODER || CONFIG_FFVHUFF_ENCODER
580 static int store_table(HYuvContext *s, const uint8_t *len, uint8_t *buf){
581     int i;
582     int index= 0;
583
584     for(i=0; i<256;){
585         int val= len[i];
586         int repeat=0;
587
588         for(; i<256 && len[i]==val && repeat<255; i++)
589             repeat++;
590
591         assert(val < 32 && val >0 && repeat<256 && repeat>0);
592         if(repeat>7){
593             buf[index++]= val;
594             buf[index++]= repeat;
595         }else{
596             buf[index++]= val | (repeat<<5);
597         }
598     }
599
600     return index;
601 }
602
603 static av_cold int encode_init(AVCodecContext *avctx)
604 {
605     HYuvContext *s = avctx->priv_data;
606     int i, j;
607
608     common_init(avctx);
609
610     avctx->extradata= av_mallocz(1024*30); // 256*3+4 == 772
611     avctx->stats_out= av_mallocz(1024*30); // 21*256*3(%llu ) + 3(\n) + 1(0) = 16132
612     s->version=2;
613
614     avctx->coded_frame= &s->picture;
615
616     switch(avctx->pix_fmt){
617     case PIX_FMT_YUV420P:
618         s->bitstream_bpp= 12;
619         break;
620     case PIX_FMT_YUV422P:
621         s->bitstream_bpp= 16;
622         break;
623     case PIX_FMT_RGB32:
624         s->bitstream_bpp= 32;
625         break;
626     case PIX_FMT_RGB24:
627         s->bitstream_bpp= 24;
628         break;
629     default:
630         av_log(avctx, AV_LOG_ERROR, "format not supported\n");
631         return -1;
632     }
633     avctx->bits_per_coded_sample= s->bitstream_bpp;
634     s->decorrelate= s->bitstream_bpp >= 24;
635     s->predictor= avctx->prediction_method;
636     s->interlaced= avctx->flags&CODEC_FLAG_INTERLACED_ME ? 1 : 0;
637     if(avctx->context_model==1){
638         s->context= avctx->context_model;
639         if(s->flags & (CODEC_FLAG_PASS1|CODEC_FLAG_PASS2)){
640             av_log(avctx, AV_LOG_ERROR, "context=1 is not compatible with 2 pass huffyuv encoding\n");
641             return -1;
642         }
643     }else s->context= 0;
644
645     if(avctx->codec->id==CODEC_ID_HUFFYUV){
646         if(avctx->pix_fmt==PIX_FMT_YUV420P){
647             av_log(avctx, AV_LOG_ERROR, "Error: YV12 is not supported by huffyuv; use vcodec=ffvhuff or format=422p\n");
648             return -1;
649         }
650         if(avctx->context_model){
651             av_log(avctx, AV_LOG_ERROR, "Error: per-frame huffman tables are not supported by huffyuv; use vcodec=ffvhuff\n");
652             return -1;
653         }
654         if(s->interlaced != ( s->height > 288 ))
655             av_log(avctx, AV_LOG_INFO, "using huffyuv 2.2.0 or newer interlacing flag\n");
656     }
657
658     if(s->bitstream_bpp>=24 && s->predictor==MEDIAN){
659         av_log(avctx, AV_LOG_ERROR, "Error: RGB is incompatible with median predictor\n");
660         return -1;
661     }
662
663     ((uint8_t*)avctx->extradata)[0]= s->predictor | (s->decorrelate << 6);
664     ((uint8_t*)avctx->extradata)[1]= s->bitstream_bpp;
665     ((uint8_t*)avctx->extradata)[2]= s->interlaced ? 0x10 : 0x20;
666     if(s->context)
667         ((uint8_t*)avctx->extradata)[2]|= 0x40;
668     ((uint8_t*)avctx->extradata)[3]= 0;
669     s->avctx->extradata_size= 4;
670
671     if(avctx->stats_in){
672         char *p= avctx->stats_in;
673
674         for(i=0; i<3; i++)
675             for(j=0; j<256; j++)
676                 s->stats[i][j]= 1;
677
678         for(;;){
679             for(i=0; i<3; i++){
680                 char *next;
681
682                 for(j=0; j<256; j++){
683                     s->stats[i][j]+= strtol(p, &next, 0);
684                     if(next==p) return -1;
685                     p=next;
686                 }
687             }
688             if(p[0]==0 || p[1]==0 || p[2]==0) break;
689         }
690     }else{
691         for(i=0; i<3; i++)
692             for(j=0; j<256; j++){
693                 int d= FFMIN(j, 256-j);
694
695                 s->stats[i][j]= 100000000/(d+1);
696             }
697     }
698
699     for(i=0; i<3; i++){
700         generate_len_table(s->len[i], s->stats[i]);
701
702         if(generate_bits_table(s->bits[i], s->len[i])<0){
703             return -1;
704         }
705
706         s->avctx->extradata_size+=
707         store_table(s, s->len[i], &((uint8_t*)s->avctx->extradata)[s->avctx->extradata_size]);
708     }
709
710     if(s->context){
711         for(i=0; i<3; i++){
712             int pels = s->width*s->height / (i?40:10);
713             for(j=0; j<256; j++){
714                 int d= FFMIN(j, 256-j);
715                 s->stats[i][j]= pels/(d+1);
716             }
717         }
718     }else{
719         for(i=0; i<3; i++)
720             for(j=0; j<256; j++)
721                 s->stats[i][j]= 0;
722     }
723
724 //    printf("pred:%d bpp:%d hbpp:%d il:%d\n", s->predictor, s->bitstream_bpp, avctx->bits_per_coded_sample, s->interlaced);
725
726     alloc_temp(s);
727
728     s->picture_number=0;
729
730     return 0;
731 }
732 #endif /* CONFIG_HUFFYUV_ENCODER || CONFIG_FFVHUFF_ENCODER */
733
734 /* TODO instead of restarting the read when the code isn't in the first level
735  * of the joint table, jump into the 2nd level of the individual table. */
736 #define READ_2PIX(dst0, dst1, plane1){\
737     uint16_t code = get_vlc2(&s->gb, s->vlc[3+plane1].table, VLC_BITS, 1);\
738     if(code != 0xffff){\
739         dst0 = code>>8;\
740         dst1 = code;\
741     }else{\
742         dst0 = get_vlc2(&s->gb, s->vlc[0].table, VLC_BITS, 3);\
743         dst1 = get_vlc2(&s->gb, s->vlc[plane1].table, VLC_BITS, 3);\
744     }\
745 }
746
747 static void decode_422_bitstream(HYuvContext *s, int count){
748     int i;
749
750     count/=2;
751
752     if(count >= (get_bits_left(&s->gb))/(31*4)){
753         for(i=0; i<count && get_bits_count(&s->gb) < s->gb.size_in_bits; i++){
754             READ_2PIX(s->temp[0][2*i  ], s->temp[1][i], 1);
755             READ_2PIX(s->temp[0][2*i+1], s->temp[2][i], 2);
756         }
757     }else{
758         for(i=0; i<count; i++){
759             READ_2PIX(s->temp[0][2*i  ], s->temp[1][i], 1);
760             READ_2PIX(s->temp[0][2*i+1], s->temp[2][i], 2);
761         }
762     }
763 }
764
765 static void decode_gray_bitstream(HYuvContext *s, int count){
766     int i;
767
768     count/=2;
769
770     if(count >= (get_bits_left(&s->gb))/(31*2)){
771         for(i=0; i<count && get_bits_count(&s->gb) < s->gb.size_in_bits; i++){
772             READ_2PIX(s->temp[0][2*i  ], s->temp[0][2*i+1], 0);
773         }
774     }else{
775         for(i=0; i<count; i++){
776             READ_2PIX(s->temp[0][2*i  ], s->temp[0][2*i+1], 0);
777         }
778     }
779 }
780
781 #if CONFIG_HUFFYUV_ENCODER || CONFIG_FFVHUFF_ENCODER
782 static int encode_422_bitstream(HYuvContext *s, int offset, int count){
783     int i;
784     const uint8_t *y = s->temp[0] + offset;
785     const uint8_t *u = s->temp[1] + offset/2;
786     const uint8_t *v = s->temp[2] + offset/2;
787
788     if(s->pb.buf_end - s->pb.buf - (put_bits_count(&s->pb)>>3) < 2*4*count){
789         av_log(s->avctx, AV_LOG_ERROR, "encoded frame too large\n");
790         return -1;
791     }
792
793 #define LOAD4\
794             int y0 = y[2*i];\
795             int y1 = y[2*i+1];\
796             int u0 = u[i];\
797             int v0 = v[i];
798
799     count/=2;
800     if(s->flags&CODEC_FLAG_PASS1){
801         for(i=0; i<count; i++){
802             LOAD4;
803             s->stats[0][y0]++;
804             s->stats[1][u0]++;
805             s->stats[0][y1]++;
806             s->stats[2][v0]++;
807         }
808     }
809     if(s->avctx->flags2&CODEC_FLAG2_NO_OUTPUT)
810         return 0;
811     if(s->context){
812         for(i=0; i<count; i++){
813             LOAD4;
814             s->stats[0][y0]++;
815             put_bits(&s->pb, s->len[0][y0], s->bits[0][y0]);
816             s->stats[1][u0]++;
817             put_bits(&s->pb, s->len[1][u0], s->bits[1][u0]);
818             s->stats[0][y1]++;
819             put_bits(&s->pb, s->len[0][y1], s->bits[0][y1]);
820             s->stats[2][v0]++;
821             put_bits(&s->pb, s->len[2][v0], s->bits[2][v0]);
822         }
823     }else{
824         for(i=0; i<count; i++){
825             LOAD4;
826             put_bits(&s->pb, s->len[0][y0], s->bits[0][y0]);
827             put_bits(&s->pb, s->len[1][u0], s->bits[1][u0]);
828             put_bits(&s->pb, s->len[0][y1], s->bits[0][y1]);
829             put_bits(&s->pb, s->len[2][v0], s->bits[2][v0]);
830         }
831     }
832     return 0;
833 }
834
835 static int encode_gray_bitstream(HYuvContext *s, int count){
836     int i;
837
838     if(s->pb.buf_end - s->pb.buf - (put_bits_count(&s->pb)>>3) < 4*count){
839         av_log(s->avctx, AV_LOG_ERROR, "encoded frame too large\n");
840         return -1;
841     }
842
843 #define LOAD2\
844             int y0 = s->temp[0][2*i];\
845             int y1 = s->temp[0][2*i+1];
846 #define STAT2\
847             s->stats[0][y0]++;\
848             s->stats[0][y1]++;
849 #define WRITE2\
850             put_bits(&s->pb, s->len[0][y0], s->bits[0][y0]);\
851             put_bits(&s->pb, s->len[0][y1], s->bits[0][y1]);
852
853     count/=2;
854     if(s->flags&CODEC_FLAG_PASS1){
855         for(i=0; i<count; i++){
856             LOAD2;
857             STAT2;
858         }
859     }
860     if(s->avctx->flags2&CODEC_FLAG2_NO_OUTPUT)
861         return 0;
862
863     if(s->context){
864         for(i=0; i<count; i++){
865             LOAD2;
866             STAT2;
867             WRITE2;
868         }
869     }else{
870         for(i=0; i<count; i++){
871             LOAD2;
872             WRITE2;
873         }
874     }
875     return 0;
876 }
877 #endif /* CONFIG_HUFFYUV_ENCODER || CONFIG_FFVHUFF_ENCODER */
878
879 static av_always_inline void decode_bgr_1(HYuvContext *s, int count, int decorrelate, int alpha){
880     int i;
881     for(i=0; i<count; i++){
882         int code = get_vlc2(&s->gb, s->vlc[3].table, VLC_BITS, 1);
883         if(code != -1){
884             *(uint32_t*)&s->temp[0][4*i] = s->pix_bgr_map[code];
885         }else if(decorrelate){
886             s->temp[0][4*i+G] = get_vlc2(&s->gb, s->vlc[1].table, VLC_BITS, 3);
887             s->temp[0][4*i+B] = get_vlc2(&s->gb, s->vlc[0].table, VLC_BITS, 3) + s->temp[0][4*i+G];
888             s->temp[0][4*i+R] = get_vlc2(&s->gb, s->vlc[2].table, VLC_BITS, 3) + s->temp[0][4*i+G];
889         }else{
890             s->temp[0][4*i+B] = get_vlc2(&s->gb, s->vlc[0].table, VLC_BITS, 3);
891             s->temp[0][4*i+G] = get_vlc2(&s->gb, s->vlc[1].table, VLC_BITS, 3);
892             s->temp[0][4*i+R] = get_vlc2(&s->gb, s->vlc[2].table, VLC_BITS, 3);
893         }
894         if(alpha)
895             s->temp[0][4*i+A] = get_vlc2(&s->gb, s->vlc[2].table, VLC_BITS, 3);
896     }
897 }
898
899 static void decode_bgr_bitstream(HYuvContext *s, int count){
900     if(s->decorrelate){
901         if(s->bitstream_bpp==24)
902             decode_bgr_1(s, count, 1, 0);
903         else
904             decode_bgr_1(s, count, 1, 1);
905     }else{
906         if(s->bitstream_bpp==24)
907             decode_bgr_1(s, count, 0, 0);
908         else
909             decode_bgr_1(s, count, 0, 1);
910     }
911 }
912
913 static inline int encode_bgra_bitstream(HYuvContext *s, int count, int planes){
914     int i;
915
916     if(s->pb.buf_end - s->pb.buf - (put_bits_count(&s->pb)>>3) < 4*planes*count){
917         av_log(s->avctx, AV_LOG_ERROR, "encoded frame too large\n");
918         return -1;
919     }
920
921 #define LOAD3\
922             int g= s->temp[0][planes==3 ? 3*i+1 : 4*i+G];\
923             int b= (s->temp[0][planes==3 ? 3*i+2 : 4*i+B] - g) & 0xff;\
924             int r= (s->temp[0][planes==3 ? 3*i+0 : 4*i+R] - g) & 0xff;\
925             int a= s->temp[0][planes*i+A];
926 #define STAT3\
927             s->stats[0][b]++;\
928             s->stats[1][g]++;\
929             s->stats[2][r]++;\
930             if(planes==4) s->stats[2][a]++;
931 #define WRITE3\
932             put_bits(&s->pb, s->len[1][g], s->bits[1][g]);\
933             put_bits(&s->pb, s->len[0][b], s->bits[0][b]);\
934             put_bits(&s->pb, s->len[2][r], s->bits[2][r]);\
935             if(planes==4) put_bits(&s->pb, s->len[2][a], s->bits[2][a]);
936
937     if((s->flags&CODEC_FLAG_PASS1) && (s->avctx->flags2&CODEC_FLAG2_NO_OUTPUT)){
938         for(i=0; i<count; i++){
939             LOAD3;
940             STAT3;
941         }
942     }else if(s->context || (s->flags&CODEC_FLAG_PASS1)){
943         for(i=0; i<count; i++){
944             LOAD3;
945             STAT3;
946             WRITE3;
947         }
948     }else{
949         for(i=0; i<count; i++){
950             LOAD3;
951             WRITE3;
952         }
953     }
954     return 0;
955 }
956
957 #if CONFIG_HUFFYUV_DECODER || CONFIG_FFVHUFF_DECODER
958 static void draw_slice(HYuvContext *s, int y){
959     int h, cy;
960     int offset[4];
961
962     if(s->avctx->draw_horiz_band==NULL)
963         return;
964
965     h= y - s->last_slice_end;
966     y -= h;
967
968     if(s->bitstream_bpp==12){
969         cy= y>>1;
970     }else{
971         cy= y;
972     }
973
974     offset[0] = s->picture.linesize[0]*y;
975     offset[1] = s->picture.linesize[1]*cy;
976     offset[2] = s->picture.linesize[2]*cy;
977     offset[3] = 0;
978     emms_c();
979
980     s->avctx->draw_horiz_band(s->avctx, &s->picture, offset, y, 3, h);
981
982     s->last_slice_end= y + h;
983 }
984
985 static int decode_frame(AVCodecContext *avctx, void *data, int *data_size, AVPacket *avpkt){
986     const uint8_t *buf = avpkt->data;
987     int buf_size = avpkt->size;
988     HYuvContext *s = avctx->priv_data;
989     const int width= s->width;
990     const int width2= s->width>>1;
991     const int height= s->height;
992     int fake_ystride, fake_ustride, fake_vstride;
993     AVFrame * const p= &s->picture;
994     int table_size= 0;
995
996     AVFrame *picture = data;
997
998     av_fast_malloc(&s->bitstream_buffer, &s->bitstream_buffer_size, buf_size + FF_INPUT_BUFFER_PADDING_SIZE);
999     if (!s->bitstream_buffer)
1000         return AVERROR(ENOMEM);
1001
1002     memset(s->bitstream_buffer + buf_size, 0, FF_INPUT_BUFFER_PADDING_SIZE);
1003     s->dsp.bswap_buf((uint32_t*)s->bitstream_buffer, (const uint32_t*)buf, buf_size/4);
1004
1005     if(p->data[0])
1006         ff_thread_release_buffer(avctx, p);
1007
1008     p->reference= 0;
1009     if(ff_thread_get_buffer(avctx, p) < 0){
1010         av_log(avctx, AV_LOG_ERROR, "get_buffer() failed\n");
1011         return -1;
1012     }
1013
1014     if(s->context){
1015         table_size = read_huffman_tables(s, s->bitstream_buffer, buf_size);
1016         if(table_size < 0)
1017             return -1;
1018     }
1019
1020     if((unsigned)(buf_size-table_size) >= INT_MAX/8)
1021         return -1;
1022
1023     init_get_bits(&s->gb, s->bitstream_buffer+table_size, (buf_size-table_size)*8);
1024
1025     fake_ystride= s->interlaced ? p->linesize[0]*2  : p->linesize[0];
1026     fake_ustride= s->interlaced ? p->linesize[1]*2  : p->linesize[1];
1027     fake_vstride= s->interlaced ? p->linesize[2]*2  : p->linesize[2];
1028
1029     s->last_slice_end= 0;
1030
1031     if(s->bitstream_bpp<24){
1032         int y, cy;
1033         int lefty, leftu, leftv;
1034         int lefttopy, lefttopu, lefttopv;
1035
1036         if(s->yuy2){
1037             p->data[0][3]= get_bits(&s->gb, 8);
1038             p->data[0][2]= get_bits(&s->gb, 8);
1039             p->data[0][1]= get_bits(&s->gb, 8);
1040             p->data[0][0]= get_bits(&s->gb, 8);
1041
1042             av_log(avctx, AV_LOG_ERROR, "YUY2 output is not implemented yet\n");
1043             return -1;
1044         }else{
1045
1046             leftv= p->data[2][0]= get_bits(&s->gb, 8);
1047             lefty= p->data[0][1]= get_bits(&s->gb, 8);
1048             leftu= p->data[1][0]= get_bits(&s->gb, 8);
1049                    p->data[0][0]= get_bits(&s->gb, 8);
1050
1051             switch(s->predictor){
1052             case LEFT:
1053             case PLANE:
1054                 decode_422_bitstream(s, width-2);
1055                 lefty= s->dsp.add_hfyu_left_prediction(p->data[0] + 2, s->temp[0], width-2, lefty);
1056                 if(!(s->flags&CODEC_FLAG_GRAY)){
1057                     leftu= s->dsp.add_hfyu_left_prediction(p->data[1] + 1, s->temp[1], width2-1, leftu);
1058                     leftv= s->dsp.add_hfyu_left_prediction(p->data[2] + 1, s->temp[2], width2-1, leftv);
1059                 }
1060
1061                 for(cy=y=1; y<s->height; y++,cy++){
1062                     uint8_t *ydst, *udst, *vdst;
1063
1064                     if(s->bitstream_bpp==12){
1065                         decode_gray_bitstream(s, width);
1066
1067                         ydst= p->data[0] + p->linesize[0]*y;
1068
1069                         lefty= s->dsp.add_hfyu_left_prediction(ydst, s->temp[0], width, lefty);
1070                         if(s->predictor == PLANE){
1071                             if(y>s->interlaced)
1072                                 s->dsp.add_bytes(ydst, ydst - fake_ystride, width);
1073                         }
1074                         y++;
1075                         if(y>=s->height) break;
1076                     }
1077
1078                     draw_slice(s, y);
1079
1080                     ydst= p->data[0] + p->linesize[0]*y;
1081                     udst= p->data[1] + p->linesize[1]*cy;
1082                     vdst= p->data[2] + p->linesize[2]*cy;
1083
1084                     decode_422_bitstream(s, width);
1085                     lefty= s->dsp.add_hfyu_left_prediction(ydst, s->temp[0], width, lefty);
1086                     if(!(s->flags&CODEC_FLAG_GRAY)){
1087                         leftu= s->dsp.add_hfyu_left_prediction(udst, s->temp[1], width2, leftu);
1088                         leftv= s->dsp.add_hfyu_left_prediction(vdst, s->temp[2], width2, leftv);
1089                     }
1090                     if(s->predictor == PLANE){
1091                         if(cy>s->interlaced){
1092                             s->dsp.add_bytes(ydst, ydst - fake_ystride, width);
1093                             if(!(s->flags&CODEC_FLAG_GRAY)){
1094                                 s->dsp.add_bytes(udst, udst - fake_ustride, width2);
1095                                 s->dsp.add_bytes(vdst, vdst - fake_vstride, width2);
1096                             }
1097                         }
1098                     }
1099                 }
1100                 draw_slice(s, height);
1101
1102                 break;
1103             case MEDIAN:
1104                 /* first line except first 2 pixels is left predicted */
1105                 decode_422_bitstream(s, width-2);
1106                 lefty= s->dsp.add_hfyu_left_prediction(p->data[0] + 2, s->temp[0], width-2, lefty);
1107                 if(!(s->flags&CODEC_FLAG_GRAY)){
1108                     leftu= s->dsp.add_hfyu_left_prediction(p->data[1] + 1, s->temp[1], width2-1, leftu);
1109                     leftv= s->dsp.add_hfyu_left_prediction(p->data[2] + 1, s->temp[2], width2-1, leftv);
1110                 }
1111
1112                 cy=y=1;
1113
1114                 /* second line is left predicted for interlaced case */
1115                 if(s->interlaced){
1116                     decode_422_bitstream(s, width);
1117                     lefty= s->dsp.add_hfyu_left_prediction(p->data[0] + p->linesize[0], s->temp[0], width, lefty);
1118                     if(!(s->flags&CODEC_FLAG_GRAY)){
1119                         leftu= s->dsp.add_hfyu_left_prediction(p->data[1] + p->linesize[2], s->temp[1], width2, leftu);
1120                         leftv= s->dsp.add_hfyu_left_prediction(p->data[2] + p->linesize[1], s->temp[2], width2, leftv);
1121                     }
1122                     y++; cy++;
1123                 }
1124
1125                 /* next 4 pixels are left predicted too */
1126                 decode_422_bitstream(s, 4);
1127                 lefty= s->dsp.add_hfyu_left_prediction(p->data[0] + fake_ystride, s->temp[0], 4, lefty);
1128                 if(!(s->flags&CODEC_FLAG_GRAY)){
1129                     leftu= s->dsp.add_hfyu_left_prediction(p->data[1] + fake_ustride, s->temp[1], 2, leftu);
1130                     leftv= s->dsp.add_hfyu_left_prediction(p->data[2] + fake_vstride, s->temp[2], 2, leftv);
1131                 }
1132
1133                 /* next line except the first 4 pixels is median predicted */
1134                 lefttopy= p->data[0][3];
1135                 decode_422_bitstream(s, width-4);
1136                 s->dsp.add_hfyu_median_prediction(p->data[0] + fake_ystride+4, p->data[0]+4, s->temp[0], width-4, &lefty, &lefttopy);
1137                 if(!(s->flags&CODEC_FLAG_GRAY)){
1138                     lefttopu= p->data[1][1];
1139                     lefttopv= p->data[2][1];
1140                     s->dsp.add_hfyu_median_prediction(p->data[1] + fake_ustride+2, p->data[1]+2, s->temp[1], width2-2, &leftu, &lefttopu);
1141                     s->dsp.add_hfyu_median_prediction(p->data[2] + fake_vstride+2, p->data[2]+2, s->temp[2], width2-2, &leftv, &lefttopv);
1142                 }
1143                 y++; cy++;
1144
1145                 for(; y<height; y++,cy++){
1146                     uint8_t *ydst, *udst, *vdst;
1147
1148                     if(s->bitstream_bpp==12){
1149                         while(2*cy > y){
1150                             decode_gray_bitstream(s, width);
1151                             ydst= p->data[0] + p->linesize[0]*y;
1152                             s->dsp.add_hfyu_median_prediction(ydst, ydst - fake_ystride, s->temp[0], width, &lefty, &lefttopy);
1153                             y++;
1154                         }
1155                         if(y>=height) break;
1156                     }
1157                     draw_slice(s, y);
1158
1159                     decode_422_bitstream(s, width);
1160
1161                     ydst= p->data[0] + p->linesize[0]*y;
1162                     udst= p->data[1] + p->linesize[1]*cy;
1163                     vdst= p->data[2] + p->linesize[2]*cy;
1164
1165                     s->dsp.add_hfyu_median_prediction(ydst, ydst - fake_ystride, s->temp[0], width, &lefty, &lefttopy);
1166                     if(!(s->flags&CODEC_FLAG_GRAY)){
1167                         s->dsp.add_hfyu_median_prediction(udst, udst - fake_ustride, s->temp[1], width2, &leftu, &lefttopu);
1168                         s->dsp.add_hfyu_median_prediction(vdst, vdst - fake_vstride, s->temp[2], width2, &leftv, &lefttopv);
1169                     }
1170                 }
1171
1172                 draw_slice(s, height);
1173                 break;
1174             }
1175         }
1176     }else{
1177         int y;
1178         int leftr, leftg, leftb, lefta;
1179         const int last_line= (height-1)*p->linesize[0];
1180
1181         if(s->bitstream_bpp==32){
1182             lefta= p->data[0][last_line+A]= get_bits(&s->gb, 8);
1183             leftr= p->data[0][last_line+R]= get_bits(&s->gb, 8);
1184             leftg= p->data[0][last_line+G]= get_bits(&s->gb, 8);
1185             leftb= p->data[0][last_line+B]= get_bits(&s->gb, 8);
1186         }else{
1187             leftr= p->data[0][last_line+R]= get_bits(&s->gb, 8);
1188             leftg= p->data[0][last_line+G]= get_bits(&s->gb, 8);
1189             leftb= p->data[0][last_line+B]= get_bits(&s->gb, 8);
1190             lefta= p->data[0][last_line+A]= 255;
1191             skip_bits(&s->gb, 8);
1192         }
1193
1194         if(s->bgr32){
1195             switch(s->predictor){
1196             case LEFT:
1197             case PLANE:
1198                 decode_bgr_bitstream(s, width-1);
1199                 s->dsp.add_hfyu_left_prediction_bgr32(p->data[0] + last_line+4, s->temp[0], width-1, &leftr, &leftg, &leftb, &lefta);
1200
1201                 for(y=s->height-2; y>=0; y--){ //Yes it is stored upside down.
1202                     decode_bgr_bitstream(s, width);
1203
1204                     s->dsp.add_hfyu_left_prediction_bgr32(p->data[0] + p->linesize[0]*y, s->temp[0], width, &leftr, &leftg, &leftb, &lefta);
1205                     if(s->predictor == PLANE){
1206                         if(s->bitstream_bpp!=32) lefta=0;
1207                         if((y&s->interlaced)==0 && y<s->height-1-s->interlaced){
1208                             s->dsp.add_bytes(p->data[0] + p->linesize[0]*y,
1209                                              p->data[0] + p->linesize[0]*y + fake_ystride, fake_ystride);
1210                         }
1211                     }
1212                 }
1213                 draw_slice(s, height); // just 1 large slice as this is not possible in reverse order
1214                 break;
1215             default:
1216                 av_log(avctx, AV_LOG_ERROR, "prediction type not supported!\n");
1217             }
1218         }else{
1219
1220             av_log(avctx, AV_LOG_ERROR, "BGR24 output is not implemented yet\n");
1221             return -1;
1222         }
1223     }
1224     emms_c();
1225
1226     *picture= *p;
1227     *data_size = sizeof(AVFrame);
1228
1229     return (get_bits_count(&s->gb)+31)/32*4 + table_size;
1230 }
1231 #endif /* CONFIG_HUFFYUV_DECODER || CONFIG_FFVHUFF_DECODER */
1232
1233 static int common_end(HYuvContext *s){
1234     int i;
1235
1236     for(i=0; i<3; i++){
1237         av_freep(&s->temp[i]);
1238     }
1239     return 0;
1240 }
1241
1242 #if CONFIG_HUFFYUV_DECODER || CONFIG_FFVHUFF_DECODER
1243 static av_cold int decode_end(AVCodecContext *avctx)
1244 {
1245     HYuvContext *s = avctx->priv_data;
1246     int i;
1247
1248     if (s->picture.data[0])
1249         avctx->release_buffer(avctx, &s->picture);
1250
1251     common_end(s);
1252     av_freep(&s->bitstream_buffer);
1253
1254     for(i=0; i<6; i++){
1255         free_vlc(&s->vlc[i]);
1256     }
1257
1258     return 0;
1259 }
1260 #endif /* CONFIG_HUFFYUV_DECODER || CONFIG_FFVHUFF_DECODER */
1261
1262 #if CONFIG_HUFFYUV_ENCODER || CONFIG_FFVHUFF_ENCODER
1263 static int encode_frame(AVCodecContext *avctx, unsigned char *buf, int buf_size, void *data){
1264     HYuvContext *s = avctx->priv_data;
1265     AVFrame *pict = data;
1266     const int width= s->width;
1267     const int width2= s->width>>1;
1268     const int height= s->height;
1269     const int fake_ystride= s->interlaced ? pict->linesize[0]*2  : pict->linesize[0];
1270     const int fake_ustride= s->interlaced ? pict->linesize[1]*2  : pict->linesize[1];
1271     const int fake_vstride= s->interlaced ? pict->linesize[2]*2  : pict->linesize[2];
1272     AVFrame * const p= &s->picture;
1273     int i, j, size=0;
1274
1275     *p = *pict;
1276     p->pict_type= AV_PICTURE_TYPE_I;
1277     p->key_frame= 1;
1278
1279     if(s->context){
1280         for(i=0; i<3; i++){
1281             generate_len_table(s->len[i], s->stats[i]);
1282             if(generate_bits_table(s->bits[i], s->len[i])<0)
1283                 return -1;
1284             size+= store_table(s, s->len[i], &buf[size]);
1285         }
1286
1287         for(i=0; i<3; i++)
1288             for(j=0; j<256; j++)
1289                 s->stats[i][j] >>= 1;
1290     }
1291
1292     init_put_bits(&s->pb, buf+size, buf_size-size);
1293
1294     if(avctx->pix_fmt == PIX_FMT_YUV422P || avctx->pix_fmt == PIX_FMT_YUV420P){
1295         int lefty, leftu, leftv, y, cy;
1296
1297         put_bits(&s->pb, 8, leftv= p->data[2][0]);
1298         put_bits(&s->pb, 8, lefty= p->data[0][1]);
1299         put_bits(&s->pb, 8, leftu= p->data[1][0]);
1300         put_bits(&s->pb, 8,        p->data[0][0]);
1301
1302         lefty= sub_left_prediction(s, s->temp[0], p->data[0], width , 0);
1303         leftu= sub_left_prediction(s, s->temp[1], p->data[1], width2, 0);
1304         leftv= sub_left_prediction(s, s->temp[2], p->data[2], width2, 0);
1305
1306         encode_422_bitstream(s, 2, width-2);
1307
1308         if(s->predictor==MEDIAN){
1309             int lefttopy, lefttopu, lefttopv;
1310             cy=y=1;
1311             if(s->interlaced){
1312                 lefty= sub_left_prediction(s, s->temp[0], p->data[0]+p->linesize[0], width , lefty);
1313                 leftu= sub_left_prediction(s, s->temp[1], p->data[1]+p->linesize[1], width2, leftu);
1314                 leftv= sub_left_prediction(s, s->temp[2], p->data[2]+p->linesize[2], width2, leftv);
1315
1316                 encode_422_bitstream(s, 0, width);
1317                 y++; cy++;
1318             }
1319
1320             lefty= sub_left_prediction(s, s->temp[0], p->data[0]+fake_ystride, 4, lefty);
1321             leftu= sub_left_prediction(s, s->temp[1], p->data[1]+fake_ustride, 2, leftu);
1322             leftv= sub_left_prediction(s, s->temp[2], p->data[2]+fake_vstride, 2, leftv);
1323
1324             encode_422_bitstream(s, 0, 4);
1325
1326             lefttopy= p->data[0][3];
1327             lefttopu= p->data[1][1];
1328             lefttopv= p->data[2][1];
1329             s->dsp.sub_hfyu_median_prediction(s->temp[0], p->data[0]+4, p->data[0] + fake_ystride+4, width-4 , &lefty, &lefttopy);
1330             s->dsp.sub_hfyu_median_prediction(s->temp[1], p->data[1]+2, p->data[1] + fake_ustride+2, width2-2, &leftu, &lefttopu);
1331             s->dsp.sub_hfyu_median_prediction(s->temp[2], p->data[2]+2, p->data[2] + fake_vstride+2, width2-2, &leftv, &lefttopv);
1332             encode_422_bitstream(s, 0, width-4);
1333             y++; cy++;
1334
1335             for(; y<height; y++,cy++){
1336                 uint8_t *ydst, *udst, *vdst;
1337
1338                 if(s->bitstream_bpp==12){
1339                     while(2*cy > y){
1340                         ydst= p->data[0] + p->linesize[0]*y;
1341                         s->dsp.sub_hfyu_median_prediction(s->temp[0], ydst - fake_ystride, ydst, width , &lefty, &lefttopy);
1342                         encode_gray_bitstream(s, width);
1343                         y++;
1344                     }
1345                     if(y>=height) break;
1346                 }
1347                 ydst= p->data[0] + p->linesize[0]*y;
1348                 udst= p->data[1] + p->linesize[1]*cy;
1349                 vdst= p->data[2] + p->linesize[2]*cy;
1350
1351                 s->dsp.sub_hfyu_median_prediction(s->temp[0], ydst - fake_ystride, ydst, width , &lefty, &lefttopy);
1352                 s->dsp.sub_hfyu_median_prediction(s->temp[1], udst - fake_ustride, udst, width2, &leftu, &lefttopu);
1353                 s->dsp.sub_hfyu_median_prediction(s->temp[2], vdst - fake_vstride, vdst, width2, &leftv, &lefttopv);
1354
1355                 encode_422_bitstream(s, 0, width);
1356             }
1357         }else{
1358             for(cy=y=1; y<height; y++,cy++){
1359                 uint8_t *ydst, *udst, *vdst;
1360
1361                 /* encode a luma only line & y++ */
1362                 if(s->bitstream_bpp==12){
1363                     ydst= p->data[0] + p->linesize[0]*y;
1364
1365                     if(s->predictor == PLANE && s->interlaced < y){
1366                         s->dsp.diff_bytes(s->temp[1], ydst, ydst - fake_ystride, width);
1367
1368                         lefty= sub_left_prediction(s, s->temp[0], s->temp[1], width , lefty);
1369                     }else{
1370                         lefty= sub_left_prediction(s, s->temp[0], ydst, width , lefty);
1371                     }
1372                     encode_gray_bitstream(s, width);
1373                     y++;
1374                     if(y>=height) break;
1375                 }
1376
1377                 ydst= p->data[0] + p->linesize[0]*y;
1378                 udst= p->data[1] + p->linesize[1]*cy;
1379                 vdst= p->data[2] + p->linesize[2]*cy;
1380
1381                 if(s->predictor == PLANE && s->interlaced < cy){
1382                     s->dsp.diff_bytes(s->temp[1], ydst, ydst - fake_ystride, width);
1383                     s->dsp.diff_bytes(s->temp[2], udst, udst - fake_ustride, width2);
1384                     s->dsp.diff_bytes(s->temp[2] + width2, vdst, vdst - fake_vstride, width2);
1385
1386                     lefty= sub_left_prediction(s, s->temp[0], s->temp[1], width , lefty);
1387                     leftu= sub_left_prediction(s, s->temp[1], s->temp[2], width2, leftu);
1388                     leftv= sub_left_prediction(s, s->temp[2], s->temp[2] + width2, width2, leftv);
1389                 }else{
1390                     lefty= sub_left_prediction(s, s->temp[0], ydst, width , lefty);
1391                     leftu= sub_left_prediction(s, s->temp[1], udst, width2, leftu);
1392                     leftv= sub_left_prediction(s, s->temp[2], vdst, width2, leftv);
1393                 }
1394
1395                 encode_422_bitstream(s, 0, width);
1396             }
1397         }
1398     }else if(avctx->pix_fmt == PIX_FMT_RGB32){
1399         uint8_t *data = p->data[0] + (height-1)*p->linesize[0];
1400         const int stride = -p->linesize[0];
1401         const int fake_stride = -fake_ystride;
1402         int y;
1403         int leftr, leftg, leftb, lefta;
1404
1405         put_bits(&s->pb, 8, lefta= data[A]);
1406         put_bits(&s->pb, 8, leftr= data[R]);
1407         put_bits(&s->pb, 8, leftg= data[G]);
1408         put_bits(&s->pb, 8, leftb= data[B]);
1409
1410         sub_left_prediction_bgr32(s, s->temp[0], data+4, width-1, &leftr, &leftg, &leftb, &lefta);
1411         encode_bgra_bitstream(s, width-1, 4);
1412
1413         for(y=1; y<s->height; y++){
1414             uint8_t *dst = data + y*stride;
1415             if(s->predictor == PLANE && s->interlaced < y){
1416                 s->dsp.diff_bytes(s->temp[1], dst, dst - fake_stride, width*4);
1417                 sub_left_prediction_bgr32(s, s->temp[0], s->temp[1], width, &leftr, &leftg, &leftb, &lefta);
1418             }else{
1419                 sub_left_prediction_bgr32(s, s->temp[0], dst, width, &leftr, &leftg, &leftb, &lefta);
1420             }
1421             encode_bgra_bitstream(s, width, 4);
1422         }
1423     }else if(avctx->pix_fmt == PIX_FMT_RGB24){
1424         uint8_t *data = p->data[0] + (height-1)*p->linesize[0];
1425         const int stride = -p->linesize[0];
1426         const int fake_stride = -fake_ystride;
1427         int y;
1428         int leftr, leftg, leftb;
1429
1430         put_bits(&s->pb, 8, leftr= data[0]);
1431         put_bits(&s->pb, 8, leftg= data[1]);
1432         put_bits(&s->pb, 8, leftb= data[2]);
1433         put_bits(&s->pb, 8, 0);
1434
1435         sub_left_prediction_rgb24(s, s->temp[0], data+3, width-1, &leftr, &leftg, &leftb);
1436         encode_bgra_bitstream(s, width-1, 3);
1437
1438         for(y=1; y<s->height; y++){
1439             uint8_t *dst = data + y*stride;
1440             if(s->predictor == PLANE && s->interlaced < y){
1441                 s->dsp.diff_bytes(s->temp[1], dst, dst - fake_stride, width*3);
1442                 sub_left_prediction_rgb24(s, s->temp[0], s->temp[1], width, &leftr, &leftg, &leftb);
1443             }else{
1444                 sub_left_prediction_rgb24(s, s->temp[0], dst, width, &leftr, &leftg, &leftb);
1445             }
1446             encode_bgra_bitstream(s, width, 3);
1447         }
1448     }else{
1449         av_log(avctx, AV_LOG_ERROR, "Format not supported!\n");
1450     }
1451     emms_c();
1452
1453     size+= (put_bits_count(&s->pb)+31)/8;
1454     put_bits(&s->pb, 16, 0);
1455     put_bits(&s->pb, 15, 0);
1456     size/= 4;
1457
1458     if((s->flags&CODEC_FLAG_PASS1) && (s->picture_number&31)==0){
1459         int j;
1460         char *p= avctx->stats_out;
1461         char *end= p + 1024*30;
1462         for(i=0; i<3; i++){
1463             for(j=0; j<256; j++){
1464                 snprintf(p, end-p, "%"PRIu64" ", s->stats[i][j]);
1465                 p+= strlen(p);
1466                 s->stats[i][j]= 0;
1467             }
1468             snprintf(p, end-p, "\n");
1469             p++;
1470         }
1471     } else
1472         avctx->stats_out[0] = '\0';
1473     if(!(s->avctx->flags2 & CODEC_FLAG2_NO_OUTPUT)){
1474         flush_put_bits(&s->pb);
1475         s->dsp.bswap_buf((uint32_t*)buf, (uint32_t*)buf, size);
1476     }
1477
1478     s->picture_number++;
1479
1480     return size*4;
1481 }
1482
1483 static av_cold int encode_end(AVCodecContext *avctx)
1484 {
1485     HYuvContext *s = avctx->priv_data;
1486
1487     common_end(s);
1488
1489     av_freep(&avctx->extradata);
1490     av_freep(&avctx->stats_out);
1491
1492     return 0;
1493 }
1494 #endif /* CONFIG_HUFFYUV_ENCODER || CONFIG_FFVHUFF_ENCODER */
1495
1496 #if CONFIG_HUFFYUV_DECODER
1497 AVCodec ff_huffyuv_decoder = {
1498     .name           = "huffyuv",
1499     .type           = AVMEDIA_TYPE_VIDEO,
1500     .id             = CODEC_ID_HUFFYUV,
1501     .priv_data_size = sizeof(HYuvContext),
1502     .init           = decode_init,
1503     .close          = decode_end,
1504     .decode         = decode_frame,
1505     .capabilities   = CODEC_CAP_DR1 | CODEC_CAP_DRAW_HORIZ_BAND | CODEC_CAP_FRAME_THREADS,
1506     .init_thread_copy = ONLY_IF_THREADS_ENABLED(decode_init_thread_copy),
1507     .long_name = NULL_IF_CONFIG_SMALL("Huffyuv / HuffYUV"),
1508 };
1509 #endif
1510
1511 #if CONFIG_FFVHUFF_DECODER
1512 AVCodec ff_ffvhuff_decoder = {
1513     .name           = "ffvhuff",
1514     .type           = AVMEDIA_TYPE_VIDEO,
1515     .id             = CODEC_ID_FFVHUFF,
1516     .priv_data_size = sizeof(HYuvContext),
1517     .init           = decode_init,
1518     .close          = decode_end,
1519     .decode         = decode_frame,
1520     .capabilities   = CODEC_CAP_DR1 | CODEC_CAP_DRAW_HORIZ_BAND | CODEC_CAP_FRAME_THREADS,
1521     .init_thread_copy = ONLY_IF_THREADS_ENABLED(decode_init_thread_copy),
1522     .long_name = NULL_IF_CONFIG_SMALL("Huffyuv FFmpeg variant"),
1523 };
1524 #endif
1525
1526 #if CONFIG_HUFFYUV_ENCODER
1527 AVCodec ff_huffyuv_encoder = {
1528     .name           = "huffyuv",
1529     .type           = AVMEDIA_TYPE_VIDEO,
1530     .id             = CODEC_ID_HUFFYUV,
1531     .priv_data_size = sizeof(HYuvContext),
1532     .init           = encode_init,
1533     .encode         = encode_frame,
1534     .close          = encode_end,
1535     .pix_fmts= (const enum PixelFormat[]){PIX_FMT_YUV422P, PIX_FMT_RGB24, PIX_FMT_RGB32, PIX_FMT_NONE},
1536     .long_name = NULL_IF_CONFIG_SMALL("Huffyuv / HuffYUV"),
1537 };
1538 #endif
1539
1540 #if CONFIG_FFVHUFF_ENCODER
1541 AVCodec ff_ffvhuff_encoder = {
1542     .name           = "ffvhuff",
1543     .type           = AVMEDIA_TYPE_VIDEO,
1544     .id             = CODEC_ID_FFVHUFF,
1545     .priv_data_size = sizeof(HYuvContext),
1546     .init           = encode_init,
1547     .encode         = encode_frame,
1548     .close          = encode_end,
1549     .pix_fmts= (const enum PixelFormat[]){PIX_FMT_YUV420P, PIX_FMT_YUV422P, PIX_FMT_RGB24, PIX_FMT_RGB32, PIX_FMT_NONE},
1550     .long_name = NULL_IF_CONFIG_SMALL("Huffyuv FFmpeg variant"),
1551 };
1552 #endif