X-Git-Url: https://git.sesse.net/?a=blobdiff_plain;f=libavcodec%2Fdpx.c;h=8e77c09bb17aec20b54cccee501a08e52984fc86;hb=d92f38c179591a608390ffa9fee59c309142e79d;hp=b1833ed9ef06af59685ad3c76db30c3057178199;hpb=bad70b7af6b909691f5389e14eb7d0c03db10af9;p=ffmpeg diff --git a/libavcodec/dpx.c b/libavcodec/dpx.c index b1833ed9ef0..8e77c09bb17 100644 --- a/libavcodec/dpx.c +++ b/libavcodec/dpx.c @@ -23,10 +23,43 @@ #include "libavutil/intreadwrite.h" #include "libavutil/intfloat.h" #include "libavutil/imgutils.h" +#include "libavutil/timecode.h" #include "bytestream.h" #include "avcodec.h" #include "internal.h" +enum DPX_TRC { + DPX_TRC_USER_DEFINED = 0, + DPX_TRC_PRINTING_DENSITY = 1, + DPX_TRC_LINEAR = 2, + DPX_TRC_LOGARITHMIC = 3, + DPX_TRC_UNSPECIFIED_VIDEO = 4, + DPX_TRC_SMPTE_274 = 5, + DPX_TRC_ITU_R_709_4 = 6, + DPX_TRC_ITU_R_601_625 = 7, + DPX_TRC_ITU_R_601_525 = 8, + DPX_TRC_SMPTE_170 = 9, + DPX_TRC_ITU_R_624_4_PAL = 10, + DPX_TRC_Z_LINEAR = 11, + DPX_TRC_Z_HOMOGENEOUS = 12, +}; + +enum DPX_COL_SPEC { + DPX_COL_SPEC_USER_DEFINED = 0, + DPX_COL_SPEC_PRINTING_DENSITY = 1, + /* 2 = N/A */ + /* 3 = N/A */ + DPX_COL_SPEC_UNSPECIFIED_VIDEO = 4, + DPX_COL_SPEC_SMPTE_274 = 5, + DPX_COL_SPEC_ITU_R_709_4 = 6, + DPX_COL_SPEC_ITU_R_601_625 = 7, + DPX_COL_SPEC_ITU_R_601_525 = 8, + DPX_COL_SPEC_SMPTE_170 = 9, + DPX_COL_SPEC_ITU_R_624_4_PAL = 10, + /* 11 = N/A */ + /* 12 = N/A */ +}; + static unsigned int read16(const uint8_t **ptr, int is_big) { unsigned int temp; @@ -126,14 +159,15 @@ static int decode_frame(AVCodecContext *avctx, AVFrame *const p = data; uint8_t *ptr[AV_NUM_DATA_POINTERS]; uint32_t header_version, version = 0; - char creator[101]; - char input_device[33]; + char creator[101] = { 0 }; + char input_device[33] = { 0 }; unsigned int offset; int magic_num, endian; - int x, y, stride, i, ret; + int x, y, stride, i, j, ret; int w, h, bits_per_color, descriptor, elements, packing; - int encoding, need_align = 0; + int yuv, color_trc, color_spec; + int encoding, need_align = 0, unpadded_10bit = 0; unsigned int rgbBuffer = 0; int n_datum = 0; @@ -192,6 +226,8 @@ static int decode_frame(AVCodecContext *avctx, // Need to end in 0x320 to read the descriptor buf += 20; descriptor = buf[0]; + color_trc = buf[1]; + color_spec = buf[2]; // Need to end in 0x323 to read the bits per color buf += 3; @@ -206,6 +242,9 @@ static int decode_frame(AVCodecContext *avctx, return AVERROR_PATCHWELCOME; } + if (bits_per_color > 32) + return AVERROR_INVALIDDATA; + buf += 820; avctx->sample_aspect_ratio.num = read32(&buf, endian); avctx->sample_aspect_ratio.den = read32(&buf, endian); @@ -216,31 +255,109 @@ static int decode_frame(AVCodecContext *avctx, else avctx->sample_aspect_ratio = (AVRational){ 0, 1 }; + /* preferred frame rate from Motion-picture film header */ if (offset >= 1724 + 4) { buf = avpkt->data + 1724; i = read32(&buf, endian); - if(i) { + if(i && i != 0xFFFFFFFF) { + AVRational q = av_d2q(av_int2float(i), 4096); + if (q.num > 0 && q.den > 0) + avctx->framerate = q; + } + } + + /* alternative frame rate from television header */ + if (offset >= 1940 + 4 && + !(avctx->framerate.num && avctx->framerate.den)) { + buf = avpkt->data + 1940; + i = read32(&buf, endian); + if(i && i != 0xFFFFFFFF) { AVRational q = av_d2q(av_int2float(i), 4096); if (q.num > 0 && q.den > 0) avctx->framerate = q; } } + /* SMPTE TC from television header */ + if (offset >= 1920 + 4) { + uint32_t tc; + uint32_t *tc_sd; + char tcbuf[AV_TIMECODE_STR_SIZE]; + + buf = avpkt->data + 1920; + // read32 to native endian, av_bswap32 to opposite of native for + // compatibility with av_timecode_make_smpte_tc_string2 etc + tc = av_bswap32(read32(&buf, endian)); + + if (i != 0xFFFFFFFF) { + AVFrameSideData *tcside = + av_frame_new_side_data(p, AV_FRAME_DATA_S12M_TIMECODE, + sizeof(uint32_t) * 4); + if (!tcside) + return AVERROR(ENOMEM); + + tc_sd = (uint32_t*)tcside->data; + tc_sd[0] = 1; + tc_sd[1] = tc; + + av_timecode_make_smpte_tc_string2(tcbuf, avctx->framerate, + tc_sd[1], 0, 0); + av_dict_set(&p->metadata, "timecode", tcbuf, 0); + } + } + + /* color range from television header */ + if (offset >= 1964 + 4) { + buf = avpkt->data + 1952; + i = read32(&buf, endian); + + buf = avpkt->data + 1964; + j = read32(&buf, endian); + + if (i != 0xFFFFFFFF && j != 0xFFFFFFFF) { + float minCV, maxCV; + minCV = av_int2float(i); + maxCV = av_int2float(j); + if (bits_per_color >= 1 && + minCV == 0.0f && maxCV == ((1<color_range = AVCOL_RANGE_JPEG; + } else if (bits_per_color >= 8 && + minCV == (1 <<(bits_per_color - 4)) && + maxCV == (235<<(bits_per_color - 8))) { + avctx->color_range = AVCOL_RANGE_MPEG; + } + } + } + switch (descriptor) { + case 1: // R + case 2: // G + case 3: // B + case 4: // A case 6: // Y elements = 1; + yuv = 1; + break; + case 50: // RGB + elements = 3; + yuv = 0; break; case 52: // ABGR case 51: // RGBA - case 103: // UYVA4444 elements = 4; + yuv = 0; + break; + case 100: // UYVY422 + elements = 2; + yuv = 1; break; - case 50: // RGB case 102: // UYV444 elements = 3; + yuv = 1; break; - case 100: // UYVY422 - elements = 2; + case 103: // UYVA4444 + elements = 4; + yuv = 1; break; default: avpriv_report_missing_feature(avctx, "Descriptor %d", descriptor); @@ -275,8 +392,10 @@ static int decode_frame(AVCodecContext *avctx, case 16: stride = 2 * avctx->width * elements; break; - case 1: case 32: + stride = 4 * avctx->width * elements; + break; + case 1: case 64: avpriv_report_missing_feature(avctx, "Depth %d", bits_per_color); return AVERROR_PATCHWELCOME; @@ -284,6 +403,82 @@ static int decode_frame(AVCodecContext *avctx, return AVERROR_INVALIDDATA; } + switch (color_trc) { + case DPX_TRC_LINEAR: + avctx->color_trc = AVCOL_TRC_LINEAR; + break; + case DPX_TRC_SMPTE_274: + case DPX_TRC_ITU_R_709_4: + avctx->color_trc = AVCOL_TRC_BT709; + break; + case DPX_TRC_ITU_R_601_625: + case DPX_TRC_ITU_R_601_525: + case DPX_TRC_SMPTE_170: + avctx->color_trc = AVCOL_TRC_SMPTE170M; + break; + case DPX_TRC_ITU_R_624_4_PAL: + avctx->color_trc = AVCOL_TRC_GAMMA28; + break; + case DPX_TRC_USER_DEFINED: + case DPX_TRC_UNSPECIFIED_VIDEO: + /* Nothing to do */ + break; + default: + av_log(avctx, AV_LOG_VERBOSE, "Cannot map DPX transfer characteristic " + "%d to color_trc.\n", color_trc); + break; + } + + switch (color_spec) { + case DPX_COL_SPEC_SMPTE_274: + case DPX_COL_SPEC_ITU_R_709_4: + avctx->color_primaries = AVCOL_PRI_BT709; + break; + case DPX_COL_SPEC_ITU_R_601_625: + case DPX_COL_SPEC_ITU_R_624_4_PAL: + avctx->color_primaries = AVCOL_PRI_BT470BG; + break; + case DPX_COL_SPEC_ITU_R_601_525: + case DPX_COL_SPEC_SMPTE_170: + avctx->color_primaries = AVCOL_PRI_SMPTE170M; + break; + case DPX_COL_SPEC_USER_DEFINED: + case DPX_COL_SPEC_UNSPECIFIED_VIDEO: + /* Nothing to do */ + break; + default: + av_log(avctx, AV_LOG_VERBOSE, "Cannot map DPX color specification " + "%d to color_primaries.\n", color_spec); + break; + } + + if (yuv) { + switch (color_spec) { + case DPX_COL_SPEC_SMPTE_274: + case DPX_COL_SPEC_ITU_R_709_4: + avctx->colorspace = AVCOL_SPC_BT709; + break; + case DPX_COL_SPEC_ITU_R_601_625: + case DPX_COL_SPEC_ITU_R_624_4_PAL: + avctx->colorspace = AVCOL_SPC_BT470BG; + break; + case DPX_COL_SPEC_ITU_R_601_525: + case DPX_COL_SPEC_SMPTE_170: + avctx->colorspace = AVCOL_SPC_SMPTE170M; + break; + case DPX_COL_SPEC_USER_DEFINED: + case DPX_COL_SPEC_UNSPECIFIED_VIDEO: + /* Nothing to do */ + break; + default: + av_log(avctx, AV_LOG_INFO, "Cannot map DPX color specification " + "%d to colorspace.\n", color_spec); + break; + } + } else { + avctx->colorspace = AVCOL_SPC_RGB; + } + // Table 3c: Runs will always break at scan line boundaries. Packing // will always break to the next 32-bit word at scan-line boundaries. // Unfortunately, the encoder produced invalid files, so attempt @@ -305,6 +500,14 @@ static int decode_frame(AVCodecContext *avctx, } switch (1000 * descriptor + 10 * bits_per_color + endian) { + case 1081: + case 1080: + case 2081: + case 2080: + case 3081: + case 3080: + case 4081: + case 4080: case 6081: case 6080: avctx->pix_fmt = AV_PIX_FMT_GRAY8; @@ -313,6 +516,20 @@ static int decode_frame(AVCodecContext *avctx, case 6120: avctx->pix_fmt = AV_PIX_FMT_GRAY12; break; + case 1320: + case 2320: + case 3320: + case 4320: + case 6320: + avctx->pix_fmt = AV_PIX_FMT_GRAYF32LE; + break; + case 1321: + case 2321: + case 3321: + case 4321: + case 6321: + avctx->pix_fmt = AV_PIX_FMT_GRAYF32BE; + break; case 50081: case 50080: avctx->pix_fmt = AV_PIX_FMT_RGB24; @@ -363,6 +580,18 @@ static int decode_frame(AVCodecContext *avctx, case 51160: avctx->pix_fmt = AV_PIX_FMT_RGBA64LE; break; + case 50320: + avctx->pix_fmt = AV_PIX_FMT_GBRPF32LE; + break; + case 50321: + avctx->pix_fmt = AV_PIX_FMT_GBRPF32BE; + break; + case 51320: + avctx->pix_fmt = AV_PIX_FMT_GBRAPF32LE; + break; + case 51321: + avctx->pix_fmt = AV_PIX_FMT_GBRAPF32BE; + break; case 100081: avctx->pix_fmt = AV_PIX_FMT_UYVY422; break; @@ -373,7 +602,8 @@ static int decode_frame(AVCodecContext *avctx, avctx->pix_fmt = AV_PIX_FMT_YUVA444P; break; default: - av_log(avctx, AV_LOG_ERROR, "Unsupported format\n"); + av_log(avctx, AV_LOG_ERROR, "Unsupported format %d\n", + 1000 * descriptor + 10 * bits_per_color + endian); return AVERROR_PATCHWELCOME; } @@ -390,6 +620,12 @@ static int decode_frame(AVCodecContext *avctx, input_device[32] = '\0'; av_dict_set(&p->metadata, "Input Device", input_device, 0); + // Some devices do not pad 10bit samples to whole 32bit words per row + if (!memcmp(input_device, "Scanity", 7) || + !memcmp(creator, "Lasergraphics Inc.", 18)) { + unpadded_10bit = 1; + } + // Move pointer to offset from start of file buf = avpkt->data + offset; @@ -422,7 +658,7 @@ static int decode_frame(AVCodecContext *avctx, read10in32(&buf, &rgbBuffer, &n_datum, endian, shift); } - if (memcmp(input_device, "Scanity", 7)) + if (!unpadded_10bit) n_datum = 0; for (i = 0; i < elements; i++) ptr[i] += p->linesize[i]; @@ -465,6 +701,36 @@ static int decode_frame(AVCodecContext *avctx, buf += need_align; } break; + case 32: + if (elements == 1) { + av_image_copy_plane(ptr[0], p->linesize[0], + buf, stride, + elements * avctx->width * 4, avctx->height); + } else { + for (y = 0; y < avctx->height; y++) { + ptr[0] = p->data[0] + y * p->linesize[0]; + ptr[1] = p->data[1] + y * p->linesize[1]; + ptr[2] = p->data[2] + y * p->linesize[2]; + ptr[3] = p->data[3] + y * p->linesize[3]; + for (x = 0; x < avctx->width; x++) { + AV_WN32(ptr[2], AV_RN32(buf)); + AV_WN32(ptr[0], AV_RN32(buf + 4)); + AV_WN32(ptr[1], AV_RN32(buf + 8)); + if (avctx->pix_fmt == AV_PIX_FMT_GBRAPF32BE || + avctx->pix_fmt == AV_PIX_FMT_GBRAPF32LE) { + AV_WN32(ptr[3], AV_RN32(buf + 12)); + buf += 4; + ptr[3] += 4; + } + + buf += 12; + ptr[2] += 4; + ptr[0] += 4; + ptr[1] += 4; + } + } + } + break; case 16: elements *= 2; case 8: @@ -496,7 +762,7 @@ static int decode_frame(AVCodecContext *avctx, return buf_size; } -AVCodec ff_dpx_decoder = { +const AVCodec ff_dpx_decoder = { .name = "dpx", .long_name = NULL_IF_CONFIG_SMALL("DPX (Digital Picture Exchange) image"), .type = AVMEDIA_TYPE_VIDEO,