typedef struct ADPCMDecodeContext {
ADPCMChannelStatus status[14];
int vqa_version; /**< VQA version. Used for ADPCM_IMA_WS */
- int has_status;
+ int has_status; /**< Status flag. Reset to 0 after a flush. */
} ADPCMDecodeContext;
+static void adpcm_flush(AVCodecContext *avctx);
+
static av_cold int adpcm_decode_init(AVCodecContext * avctx)
{
ADPCMDecodeContext *c = avctx->priv_data;
unsigned int min_channels = 1;
unsigned int max_channels = 2;
+ adpcm_flush(avctx);
+
switch(avctx->codec->id) {
- case AV_CODEC_ID_ADPCM_IMA_CUNNING:
+ case AV_CODEC_ID_ADPCM_IMA_AMV:
max_channels = 1;
break;
case AV_CODEC_ID_ADPCM_DTK:
min_channels = 2;
max_channels = 8;
if (avctx->channels & 1) {
- avpriv_request_sample(avctx, "channel count %d\n", avctx->channels);
+ avpriv_request_sample(avctx, "channel count %d", avctx->channels);
return AVERROR_PATCHWELCOME;
}
break;
case AV_CODEC_ID_ADPCM_PSX:
max_channels = 8;
+ if (avctx->channels <= 0 || avctx->block_align % (16 * avctx->channels))
+ return AVERROR_INVALIDDATA;
break;
case AV_CODEC_ID_ADPCM_IMA_DAT4:
case AV_CODEC_ID_ADPCM_THP:
}
switch(avctx->codec->id) {
- case AV_CODEC_ID_ADPCM_CT:
- c->status[0].step = c->status[1].step = 511;
- break;
case AV_CODEC_ID_ADPCM_IMA_WAV:
if (avctx->bits_per_coded_sample < 2 || avctx->bits_per_coded_sample > 5)
return AVERROR_INVALIDDATA;
break;
- case AV_CODEC_ID_ADPCM_IMA_APC:
- if (avctx->extradata && avctx->extradata_size >= 8) {
- c->status[0].predictor = av_clip_intp2(AV_RL32(avctx->extradata ), 18);
- c->status[1].predictor = av_clip_intp2(AV_RL32(avctx->extradata + 4), 18);
- }
- break;
- case AV_CODEC_ID_ADPCM_IMA_APM:
- if (avctx->extradata) {
- if (avctx->extradata_size >= 28) {
- c->status[0].predictor = av_clip_intp2(AV_RL32(avctx->extradata + 16), 18);
- c->status[0].step_index = av_clip(AV_RL32(avctx->extradata + 20), 0, 88);
- c->status[1].predictor = av_clip_intp2(AV_RL32(avctx->extradata + 4), 18);
- c->status[1].step_index = av_clip(AV_RL32(avctx->extradata + 8), 0, 88);
- } else if (avctx->extradata_size >= 16) {
- c->status[0].predictor = av_clip_intp2(AV_RL32(avctx->extradata + 0), 18);
- c->status[0].step_index = av_clip(AV_RL32(avctx->extradata + 4), 0, 88);
- c->status[1].predictor = av_clip_intp2(AV_RL32(avctx->extradata + 8), 18);
- c->status[1].step_index = av_clip(AV_RL32(avctx->extradata + 12), 0, 88);
- }
- }
- break;
- case AV_CODEC_ID_ADPCM_IMA_WS:
- if (avctx->extradata && avctx->extradata_size >= 2)
- c->vqa_version = AV_RL16(avctx->extradata);
- break;
case AV_CODEC_ID_ADPCM_ARGO:
- if (avctx->bits_per_coded_sample != 4)
+ if (avctx->bits_per_coded_sample != 4 || avctx->block_align != 17 * avctx->channels)
return AVERROR_INVALIDDATA;
break;
case AV_CODEC_ID_ADPCM_ZORK:
switch (avctx->codec->id) {
case AV_CODEC_ID_ADPCM_AICA:
+ case AV_CODEC_ID_ADPCM_IMA_CUNNING:
case AV_CODEC_ID_ADPCM_IMA_DAT4:
case AV_CODEC_ID_ADPCM_IMA_QT:
case AV_CODEC_ID_ADPCM_IMA_WAV:
case AV_CODEC_ID_ADPCM_PSX:
case AV_CODEC_ID_ADPCM_MTAF:
case AV_CODEC_ID_ADPCM_ARGO:
+ case AV_CODEC_ID_ADPCM_IMA_MOFLEX:
avctx->sample_fmt = AV_SAMPLE_FMT_S16P;
break;
case AV_CODEC_ID_ADPCM_IMA_WS:
default:
avctx->sample_fmt = AV_SAMPLE_FMT_S16;
}
-
return 0;
}
}
}
-static inline int16_t adpcm_argo_expand_nibble(ADPCMChannelStatus *cs, int nibble, int control, int shift)
+int16_t ff_adpcm_argo_expand_nibble(ADPCMChannelStatus *cs, int nibble, int shift, int flag)
{
- int sample = nibble * (1 << shift);
+ int sample = sign_extend(nibble, 4) * (1 << shift);
- if (control & 0x04)
+ if (flag)
sample += (8 * cs->sample1) - (4 * cs->sample2);
else
sample += 4 * cs->sample1;
return 0;
nb_samples = 64;
break;
- case AV_CODEC_ID_ADPCM_ARGO:
- if (buf_size < 17 * ch)
- return 0;
- nb_samples = 32;
- break;
/* simple 4-bit adpcm */
case AV_CODEC_ID_ADPCM_CT:
case AV_CODEC_ID_ADPCM_IMA_APC:
case AV_CODEC_ID_ADPCM_4XM:
case AV_CODEC_ID_ADPCM_AGM:
case AV_CODEC_ID_ADPCM_IMA_DAT4:
+ case AV_CODEC_ID_ADPCM_IMA_MOFLEX:
case AV_CODEC_ID_ADPCM_IMA_ISS: header_size = 4 * ch; break;
- case AV_CODEC_ID_ADPCM_IMA_AMV: header_size = 8; break;
case AV_CODEC_ID_ADPCM_IMA_SMJPEG: header_size = 4 * ch; break;
}
if (header_size > 0)
/* more complex formats */
switch (avctx->codec->id) {
+ case AV_CODEC_ID_ADPCM_IMA_AMV:
+ bytestream2_skip(gb, 4);
+ has_coded_samples = 1;
+ *coded_samples = bytestream2_get_le32u(gb);
+ nb_samples = FFMIN((buf_size - 8) * 2, *coded_samples);
+ bytestream2_seek(gb, -8, SEEK_CUR);
+ break;
case AV_CODEC_ID_ADPCM_EA:
has_coded_samples = 1;
*coded_samples = bytestream2_get_le32(gb);
case AV_CODEC_ID_ADPCM_PSX:
nb_samples = buf_size / (16 * ch) * 28;
break;
+ case AV_CODEC_ID_ADPCM_ARGO:
+ nb_samples = buf_size / avctx->block_align * 32;
+ break;
case AV_CODEC_ID_ADPCM_ZORK:
nb_samples = buf_size / ch;
break;
*samples++ = adpcm_ima_expand_nibble(&c->status[st], v2, 3);
}
break;
+ case AV_CODEC_ID_ADPCM_IMA_MOFLEX:
+ for (channel = 0; channel < avctx->channels; channel++) {
+ cs = &c->status[channel];
+ cs->step_index = sign_extend(bytestream2_get_le16u(&gb), 16);
+ cs->predictor = sign_extend(bytestream2_get_le16u(&gb), 16);
+ if (cs->step_index > 88u){
+ av_log(avctx, AV_LOG_ERROR, "ERROR: step_index[%d] = %i\n",
+ channel, cs->step_index);
+ return AVERROR_INVALIDDATA;
+ }
+ }
+
+ for (int subframe = 0; subframe < nb_samples / 256; subframe++) {
+ for (channel = 0; channel < avctx->channels; channel++) {
+ samples = samples_p[channel] + 256 * subframe;
+ for (n = 0; n < 256; n += 2) {
+ int v = bytestream2_get_byteu(&gb);
+ *samples++ = adpcm_ima_expand_nibble(&c->status[channel], v & 0x0F, 3);
+ *samples++ = adpcm_ima_expand_nibble(&c->status[channel], v >> 4 , 3);
+ }
+ }
+ }
+ break;
case AV_CODEC_ID_ADPCM_IMA_DAT4:
for (channel = 0; channel < avctx->channels; channel++) {
cs = &c->status[channel];
}
break;
case AV_CODEC_ID_ADPCM_IMA_CUNNING:
- for (n = 0; n < nb_samples / 2; n++) {
- int v = bytestream2_get_byteu(&gb);
- *samples++ = adpcm_ima_cunning_expand_nibble(&c->status[0], v & 0x0F);
- *samples++ = adpcm_ima_cunning_expand_nibble(&c->status[0], v >> 4);
+ for (channel = 0; channel < avctx->channels; channel++) {
+ int16_t *smp = samples_p[channel];
+ for (n = 0; n < nb_samples / 2; n++) {
+ int v = bytestream2_get_byteu(&gb);
+ *smp++ = adpcm_ima_cunning_expand_nibble(&c->status[channel], v & 0x0F);
+ *smp++ = adpcm_ima_cunning_expand_nibble(&c->status[channel], v >> 4);
+ }
}
break;
case AV_CODEC_ID_ADPCM_IMA_OKI:
for (n = nb_samples / 2; n > 0; n--) {
int v = bytestream2_get_byteu(&gb);
- *smp++ = adpcm_ima_expand_nibble(&c->status[channel], v >> 4 , 3);
*smp++ = adpcm_ima_expand_nibble(&c->status[channel], v & 0x0F, 3);
+ *smp++ = adpcm_ima_expand_nibble(&c->status[channel], v >> 4 , 3);
}
}
} else {
for (n = nb_samples / 2; n > 0; n--) {
for (channel = 0; channel < avctx->channels; channel++) {
int v = bytestream2_get_byteu(&gb);
- *samples++ = adpcm_ima_expand_nibble(&c->status[channel], v >> 4 , 3);
- samples[st] = adpcm_ima_expand_nibble(&c->status[channel], v & 0x0F, 3);
+ *samples++ = adpcm_ima_expand_nibble(&c->status[channel], v & 0x0F, 3);
+ samples[st] = adpcm_ima_expand_nibble(&c->status[channel], v >> 4 , 3);
}
samples += avctx->channels;
}
}
break;
case AV_CODEC_ID_ADPCM_IMA_AMV:
+ av_assert0(avctx->channels == 1);
+
+ /*
+ * Header format:
+ * int16_t predictor;
+ * uint8_t step_index;
+ * uint8_t reserved;
+ * uint32_t frame_size;
+ *
+ * Some implementations have step_index as 16-bits, but others
+ * only use the lower 8 and store garbage in the upper 8.
+ */
c->status[0].predictor = sign_extend(bytestream2_get_le16u(&gb), 16);
c->status[0].step_index = bytestream2_get_byteu(&gb);
bytestream2_skipu(&gb, 5);
return AVERROR_INVALIDDATA;
}
- for (n = nb_samples >> (1 - st); n > 0; n--) {
+ for (n = nb_samples >> 1; n > 0; n--) {
int v = bytestream2_get_byteu(&gb);
*samples++ = adpcm_ima_expand_nibble(&c->status[0], v >> 4, 3);
*samples++ = adpcm_ima_expand_nibble(&c->status[0], v & 0xf, 3);
}
+
+ if (nb_samples & 1) {
+ int v = bytestream2_get_byteu(&gb);
+ *samples++ = adpcm_ima_expand_nibble(&c->status[0], v >> 4, 3);
+
+ if (v & 0x0F) {
+ /* Holds true on all the http://samples.mplayerhq.hu/amv samples. */
+ av_log(avctx, AV_LOG_WARNING, "Last nibble set on packet with odd sample count.\n");
+ av_log(avctx, AV_LOG_WARNING, "Sample will be skipped.\n");
+ }
+ }
break;
case AV_CODEC_ID_ADPCM_IMA_SMJPEG:
for (i = 0; i < avctx->channels; i++) {
}
break;
case AV_CODEC_ID_ADPCM_AICA:
- if (!c->has_status) {
- for (channel = 0; channel < avctx->channels; channel++)
- c->status[channel].step = 0;
- c->has_status = 1;
- }
for (channel = 0; channel < avctx->channels; channel++) {
samples = samples_p[channel];
for (n = nb_samples >> 1; n > 0; n--) {
}
break;
case AV_CODEC_ID_ADPCM_PSX:
- for (channel = 0; channel < avctx->channels; channel++) {
- samples = samples_p[channel];
-
- /* Read in every sample for this channel. */
- for (i = 0; i < nb_samples / 28; i++) {
- int filter, shift, flag, byte;
-
- filter = bytestream2_get_byteu(&gb);
- shift = filter & 0xf;
- filter = filter >> 4;
- if (filter >= FF_ARRAY_ELEMS(xa_adpcm_table))
- return AVERROR_INVALIDDATA;
- flag = bytestream2_get_byteu(&gb);
-
- /* Decode 28 samples. */
- for (n = 0; n < 28; n++) {
- int sample = 0, scale;
-
- if (flag < 0x07) {
- if (n & 1) {
- scale = sign_extend(byte >> 4, 4);
- } else {
- byte = bytestream2_get_byteu(&gb);
- scale = sign_extend(byte, 4);
+ for (int block = 0; block < avpkt->size / FFMAX(avctx->block_align, 16 * avctx->channels); block++) {
+ int nb_samples_per_block = 28 * FFMAX(avctx->block_align, 16 * avctx->channels) / (16 * avctx->channels);
+ for (channel = 0; channel < avctx->channels; channel++) {
+ samples = samples_p[channel] + block * nb_samples_per_block;
+ av_assert0((block + 1) * nb_samples_per_block <= nb_samples);
+
+ /* Read in every sample for this channel. */
+ for (i = 0; i < nb_samples_per_block / 28; i++) {
+ int filter, shift, flag, byte;
+
+ filter = bytestream2_get_byteu(&gb);
+ shift = filter & 0xf;
+ filter = filter >> 4;
+ if (filter >= FF_ARRAY_ELEMS(xa_adpcm_table))
+ return AVERROR_INVALIDDATA;
+ flag = bytestream2_get_byteu(&gb);
+
+ /* Decode 28 samples. */
+ for (n = 0; n < 28; n++) {
+ int sample = 0, scale;
+
+ if (flag < 0x07) {
+ if (n & 1) {
+ scale = sign_extend(byte >> 4, 4);
+ } else {
+ byte = bytestream2_get_byteu(&gb);
+ scale = sign_extend(byte, 4);
+ }
+
+ scale = scale * (1 << 12);
+ sample = (int)((scale >> shift) + (c->status[channel].sample1 * xa_adpcm_table[filter][0] + c->status[channel].sample2 * xa_adpcm_table[filter][1]) / 64);
}
-
- scale = scale * (1 << 12);
- sample = (int)((scale >> shift) + (c->status[channel].sample1 * xa_adpcm_table[filter][0] + c->status[channel].sample2 * xa_adpcm_table[filter][1]) / 64);
+ *samples++ = av_clip_int16(sample);
+ c->status[channel].sample2 = c->status[channel].sample1;
+ c->status[channel].sample1 = sample;
}
- *samples++ = av_clip_int16(sample);
- c->status[channel].sample2 = c->status[channel].sample1;
- c->status[channel].sample1 = sample;
}
}
}
* uint4_t right_samples[nb_samples];
*
* Format of the control byte:
- * MSB [SSSSDRRR] LSB
+ * MSB [SSSSRDRR] LSB
* S = (Shift Amount - 2)
* D = Decoder flag.
* R = Reserved
* Each block relies on the previous two samples of each channel.
* They should be 0 initially.
*/
- for (channel = 0; channel < avctx->channels; channel++) {
- int control, shift;
+ for (int block = 0; block < avpkt->size / avctx->block_align; block++) {
+ for (channel = 0; channel < avctx->channels; channel++) {
+ int control, shift;
- samples = samples_p[channel];
- cs = c->status + channel;
+ samples = samples_p[channel] + block * 32;
+ cs = c->status + channel;
- /* Get the control byte and decode the samples, 2 at a time. */
- control = bytestream2_get_byteu(&gb);
- shift = (control >> 4) + 2;
+ /* Get the control byte and decode the samples, 2 at a time. */
+ control = bytestream2_get_byteu(&gb);
+ shift = (control >> 4) + 2;
- for (n = 0; n < nb_samples / 2; n++) {
- int sample = bytestream2_get_byteu(&gb);
- *samples++ = adpcm_argo_expand_nibble(cs, sign_extend(sample >> 4, 4), control, shift);
- *samples++ = adpcm_argo_expand_nibble(cs, sign_extend(sample >> 0, 4), control, shift);
+ for (n = 0; n < 16; n++) {
+ int sample = bytestream2_get_byteu(&gb);
+ *samples++ = ff_adpcm_argo_expand_nibble(cs, sample >> 4, shift, control & 0x04);
+ *samples++ = ff_adpcm_argo_expand_nibble(cs, sample >> 0, shift, control & 0x04);
+ }
}
}
break;
case AV_CODEC_ID_ADPCM_ZORK:
- if (!c->has_status) {
- for (channel = 0; channel < avctx->channels; channel++) {
- c->status[channel].predictor = 0;
- c->status[channel].step_index = 0;
- }
- c->has_status = 1;
- }
for (n = 0; n < nb_samples * avctx->channels; n++) {
int v = bytestream2_get_byteu(&gb);
*samples++ = adpcm_zork_expand_nibble(&c->status[n % avctx->channels], v);
static void adpcm_flush(AVCodecContext *avctx)
{
ADPCMDecodeContext *c = avctx->priv_data;
- c->has_status = 0;
+
+ /* Just nuke the entire state and re-init. */
+ memset(c, 0, sizeof(ADPCMDecodeContext));
+
+ switch(avctx->codec_id) {
+ case AV_CODEC_ID_ADPCM_CT:
+ c->status[0].step = c->status[1].step = 511;
+ break;
+
+ case AV_CODEC_ID_ADPCM_IMA_APC:
+ if (avctx->extradata && avctx->extradata_size >= 8) {
+ c->status[0].predictor = av_clip_intp2(AV_RL32(avctx->extradata ), 18);
+ c->status[1].predictor = av_clip_intp2(AV_RL32(avctx->extradata + 4), 18);
+ }
+ break;
+
+ case AV_CODEC_ID_ADPCM_IMA_APM:
+ if (avctx->extradata) {
+ if (avctx->extradata_size >= 28) {
+ c->status[0].predictor = av_clip_intp2(AV_RL32(avctx->extradata + 16), 18);
+ c->status[0].step_index = av_clip(AV_RL32(avctx->extradata + 20), 0, 88);
+ c->status[1].predictor = av_clip_intp2(AV_RL32(avctx->extradata + 4), 18);
+ c->status[1].step_index = av_clip(AV_RL32(avctx->extradata + 8), 0, 88);
+ } else if (avctx->extradata_size >= 16) {
+ c->status[0].predictor = av_clip_intp2(AV_RL32(avctx->extradata + 0), 18);
+ c->status[0].step_index = av_clip(AV_RL32(avctx->extradata + 4), 0, 88);
+ c->status[1].predictor = av_clip_intp2(AV_RL32(avctx->extradata + 8), 18);
+ c->status[1].step_index = av_clip(AV_RL32(avctx->extradata + 12), 0, 88);
+ }
+ }
+ break;
+
+ case AV_CODEC_ID_ADPCM_IMA_WS:
+ if (avctx->extradata && avctx->extradata_size >= 2)
+ c->vqa_version = AV_RL16(avctx->extradata);
+ break;
+ default:
+ /* Other codecs may want to handle this during decoding. */
+ c->has_status = 0;
+ return;
+ }
+
+ c->has_status = 1;
}
AV_SAMPLE_FMT_NONE };
#define ADPCM_DECODER(id_, sample_fmts_, name_, long_name_) \
-AVCodec ff_ ## name_ ## _decoder = { \
+const AVCodec ff_ ## name_ ## _decoder = { \
.name = #name_, \
.long_name = NULL_IF_CONFIG_SMALL(long_name_), \
.type = AVMEDIA_TYPE_AUDIO, \
.flush = adpcm_flush, \
.capabilities = AV_CODEC_CAP_DR1, \
.sample_fmts = sample_fmts_, \
+ .caps_internal = FF_CODEC_CAP_INIT_THREADSAFE, \
}
/* Note: Do not forget to add new entries to the Makefile as well. */
ADPCM_DECODER(AV_CODEC_ID_ADPCM_IMA_AMV, sample_fmts_s16, adpcm_ima_amv, "ADPCM IMA AMV");
ADPCM_DECODER(AV_CODEC_ID_ADPCM_IMA_APC, sample_fmts_s16, adpcm_ima_apc, "ADPCM IMA CRYO APC");
ADPCM_DECODER(AV_CODEC_ID_ADPCM_IMA_APM, sample_fmts_s16, adpcm_ima_apm, "ADPCM IMA Ubisoft APM");
-ADPCM_DECODER(AV_CODEC_ID_ADPCM_IMA_CUNNING, sample_fmts_s16, adpcm_ima_cunning, "ADPCM IMA Cunning Developments");
+ADPCM_DECODER(AV_CODEC_ID_ADPCM_IMA_CUNNING, sample_fmts_s16p, adpcm_ima_cunning, "ADPCM IMA Cunning Developments");
ADPCM_DECODER(AV_CODEC_ID_ADPCM_IMA_DAT4, sample_fmts_s16, adpcm_ima_dat4, "ADPCM IMA Eurocom DAT4");
ADPCM_DECODER(AV_CODEC_ID_ADPCM_IMA_DK3, sample_fmts_s16, adpcm_ima_dk3, "ADPCM IMA Duck DK3");
ADPCM_DECODER(AV_CODEC_ID_ADPCM_IMA_DK4, sample_fmts_s16, adpcm_ima_dk4, "ADPCM IMA Duck DK4");
ADPCM_DECODER(AV_CODEC_ID_ADPCM_IMA_EA_EACS, sample_fmts_s16, adpcm_ima_ea_eacs, "ADPCM IMA Electronic Arts EACS");
ADPCM_DECODER(AV_CODEC_ID_ADPCM_IMA_EA_SEAD, sample_fmts_s16, adpcm_ima_ea_sead, "ADPCM IMA Electronic Arts SEAD");
ADPCM_DECODER(AV_CODEC_ID_ADPCM_IMA_ISS, sample_fmts_s16, adpcm_ima_iss, "ADPCM IMA Funcom ISS");
+ADPCM_DECODER(AV_CODEC_ID_ADPCM_IMA_MOFLEX, sample_fmts_s16p, adpcm_ima_moflex, "ADPCM IMA MobiClip MOFLEX");
ADPCM_DECODER(AV_CODEC_ID_ADPCM_IMA_MTF, sample_fmts_s16, adpcm_ima_mtf, "ADPCM IMA Capcom's MT Framework");
ADPCM_DECODER(AV_CODEC_ID_ADPCM_IMA_OKI, sample_fmts_s16, adpcm_ima_oki, "ADPCM IMA Dialogic OKI");
ADPCM_DECODER(AV_CODEC_ID_ADPCM_IMA_QT, sample_fmts_s16p, adpcm_ima_qt, "ADPCM IMA QuickTime");