} RawVideoContext;
static const PixelFormatTag pixelFormatBpsAVI[] = {
+ { PIX_FMT_PAL8, 4 },
{ PIX_FMT_PAL8, 8 },
{ PIX_FMT_RGB555, 15 },
{ PIX_FMT_RGB555, 16 },
static const PixelFormatTag pixelFormatBpsMOV[] = {
/* FIXME fix swscaler to support those */
/* http://developer.apple.com/documentation/QuickTime/QTFF/QTFFChap3/chapter_4_section_2.html */
+ { PIX_FMT_PAL8, 4 },
{ PIX_FMT_PAL8, 8 },
{ PIX_FMT_BGR555, 16 },
{ PIX_FMT_RGB24, 24 },
return PIX_FMT_YUV420P;
}
-static int raw_init_decoder(AVCodecContext *avctx)
+static av_cold int raw_init_decoder(AVCodecContext *avctx)
{
RawVideoContext *context = avctx->priv_data;
static int raw_decode(AVCodecContext *avctx,
void *data, int *data_size,
- uint8_t *buf, int buf_size)
+ const uint8_t *buf, int buf_size)
{
RawVideoContext *context = avctx->priv_data;
frame->interlaced_frame = avctx->coded_frame->interlaced_frame;
frame->top_field_first = avctx->coded_frame->top_field_first;
+ //4bpp raw in avi and mov (yes this is ugly ...)
+ if(avctx->bits_per_sample == 4 && avctx->pix_fmt==PIX_FMT_PAL8 &&
+ (!avctx->codec_tag || avctx->codec_tag == MKTAG('r','a','w',' '))){
+ int i;
+ for(i=256*2; i+1 < context->length>>1; i++){
+ context->buffer[2*i+0]= buf[i-256*2]>>4;
+ context->buffer[2*i+1]= buf[i-256*2]&15;
+ }
+ buf= context->buffer + 256*4;
+ buf_size= context->length - 256*4;
+ }
+
if(buf_size < context->length - (avctx->pix_fmt==PIX_FMT_PAL8 ? 256*4 : 0))
return -1;
return buf_size;
}
-static int raw_close_decoder(AVCodecContext *avctx)
+static av_cold int raw_close_decoder(AVCodecContext *avctx)
{
RawVideoContext *context = avctx->priv_data;
NULL,
raw_close_decoder,
raw_decode,
+ .long_name = "raw video",
};