X-Git-Url: https://git.sesse.net/?a=blobdiff_plain;f=libavcodec%2Fatrac1.c;h=9a897852a7a2897bd6bd45b6ed4315700cdf63e5;hb=849b9d34c7ef70b370c53e7af3940f51cbc07d0f;hp=4de1dcea04bb84bb28d34772d7fa302ba51f7ca0;hpb=d36beb3f6902b1217beda576aa18abf7eb72b03c;p=ffmpeg diff --git a/libavcodec/atrac1.c b/libavcodec/atrac1.c index 4de1dcea04b..9a897852a7a 100644 --- a/libavcodec/atrac1.c +++ b/libavcodec/atrac1.c @@ -1,28 +1,28 @@ /* - * Atrac 1 compatible decoder + * ATRAC1 compatible decoder * Copyright (c) 2009 Maxim Poliakovski * Copyright (c) 2009 Benjamin Larsson * - * This file is part of FFmpeg. + * This file is part of Libav. * - * FFmpeg is free software; you can redistribute it and/or + * Libav is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * - * FFmpeg is distributed in the hope that it will be useful, + * Libav is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software + * License along with Libav; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ /** * @file - * Atrac 1 compatible decoder. + * ATRAC1 compatible decoder. * This decoder handles raw ATRAC1 data and probably SDDS data. */ @@ -32,10 +32,12 @@ #include #include +#include "libavutil/float_dsp.h" #include "avcodec.h" #include "get_bits.h" -#include "dsputil.h" #include "fft.h" +#include "internal.h" +#include "sinewin.h" #include "atrac.h" #include "atrac1data.h" @@ -59,11 +61,11 @@ typedef struct { int log2_block_count[AT1_QMF_BANDS]; ///< log2 number of blocks in a band int num_bfus; ///< number of Block Floating Units float* spectrum[2]; - DECLARE_ALIGNED(16, float, spec1)[AT1_SU_SAMPLES]; ///< mdct buffer - DECLARE_ALIGNED(16, float, spec2)[AT1_SU_SAMPLES]; ///< mdct buffer - DECLARE_ALIGNED(16, float, fst_qmf_delay)[46]; ///< delay line for the 1st stacked QMF filter - DECLARE_ALIGNED(16, float, snd_qmf_delay)[46]; ///< delay line for the 2nd stacked QMF filter - DECLARE_ALIGNED(16, float, last_qmf_delay)[256+23]; ///< delay line for the last stacked QMF filter + DECLARE_ALIGNED(32, float, spec1)[AT1_SU_SAMPLES]; ///< mdct buffer + DECLARE_ALIGNED(32, float, spec2)[AT1_SU_SAMPLES]; ///< mdct buffer + DECLARE_ALIGNED(32, float, fst_qmf_delay)[46]; ///< delay line for the 1st stacked QMF filter + DECLARE_ALIGNED(32, float, snd_qmf_delay)[46]; ///< delay line for the 2nd stacked QMF filter + DECLARE_ALIGNED(32, float, last_qmf_delay)[256+23]; ///< delay line for the last stacked QMF filter } AT1SUCtx; /** @@ -71,16 +73,14 @@ typedef struct { */ typedef struct { AT1SUCtx SUs[AT1_MAX_CHANNELS]; ///< channel sound unit - DECLARE_ALIGNED(16, float, spec)[AT1_SU_SAMPLES]; ///< the mdct spectrum buffer + DECLARE_ALIGNED(32, float, spec)[AT1_SU_SAMPLES]; ///< the mdct spectrum buffer - DECLARE_ALIGNED(16, float, low)[256]; - DECLARE_ALIGNED(16, float, mid)[256]; - DECLARE_ALIGNED(16, float, high)[512]; + DECLARE_ALIGNED(32, float, low)[256]; + DECLARE_ALIGNED(32, float, mid)[256]; + DECLARE_ALIGNED(32, float, high)[512]; float* bands[3]; - DECLARE_ALIGNED(16, float, out_samples)[AT1_MAX_CHANNELS][AT1_SU_SAMPLES]; FFTContext mdct_ctx[3]; - int channels; - DSPContext dsp; + AVFloatDSPContext fdsp; } AT1Ctx; /** size of the transform in samples in the long mode for each QMF band */ @@ -99,7 +99,7 @@ static void at1_imdct(AT1Ctx *q, float *spec, float *out, int nbits, for (i = 0; i < transf_size / 2; i++) FFSWAP(float, spec[i], spec[transf_size - 1 - i]); } - ff_imdct_half(mdct_context, out, spec); + mdct_context->imdct_half(mdct_context, out, spec); } @@ -128,7 +128,7 @@ static int at1_imdct_block(AT1SUCtx* su, AT1Ctx *q) nbits = mdct_long_nbits[band_num] - log2_block_count; if (nbits != 5 && nbits != 7 && nbits != 8) - return -1; + return AVERROR_INVALIDDATA; } else { block_size = 32; nbits = 5; @@ -140,8 +140,8 @@ static int at1_imdct_block(AT1SUCtx* su, AT1Ctx *q) at1_imdct(q, &q->spec[pos], &su->spectrum[0][ref_pos + start_pos], nbits, band_num); /* overlap and window */ - q->dsp.vector_fmul_window(&q->bands[band_num][start_pos], prev_buf, - &su->spectrum[0][ref_pos + start_pos], ff_sine_32, 0, 16); + q->fdsp.vector_fmul_window(&q->bands[band_num][start_pos], prev_buf, + &su->spectrum[0][ref_pos + start_pos], ff_sine_32, 16); prev_buf = &su->spectrum[0][ref_pos+start_pos + 16]; start_pos += block_size; @@ -172,14 +172,14 @@ static int at1_parse_bsm(GetBitContext* gb, int log2_block_cnt[AT1_QMF_BANDS]) /* low and mid band */ log2_block_count_tmp = get_bits(gb, 2); if (log2_block_count_tmp & 1) - return -1; + return AVERROR_INVALIDDATA; log2_block_cnt[i] = 2 - log2_block_count_tmp; } /* high band */ log2_block_count_tmp = get_bits(gb, 2); if (log2_block_count_tmp != 0 && log2_block_count_tmp != 3) - return -1; + return AVERROR_INVALIDDATA; log2_block_cnt[IDX_HIGH_BAND] = 3 - log2_block_count_tmp; skip_bits(gb, 2); @@ -228,7 +228,7 @@ static int at1_unpack_dequant(GetBitContext* gb, AT1SUCtx* su, /* check for bitstream overflow */ if (bits_used > AT1_SU_MAX_BITS) - return -1; + return AVERROR_INVALIDDATA; /* get the position of the 1st spec according to the block size mode */ pos = su->log2_block_count[band_num] ? bfu_start_short[bfu_num] : bfu_start_long[bfu_num]; @@ -258,34 +258,41 @@ static void at1_subband_synthesis(AT1Ctx *q, AT1SUCtx* su, float *pOut) float iqmf_temp[512 + 46]; /* combine low and middle bands */ - atrac_iqmf(q->bands[0], q->bands[1], 128, temp, su->fst_qmf_delay, iqmf_temp); + ff_atrac_iqmf(q->bands[0], q->bands[1], 128, temp, su->fst_qmf_delay, iqmf_temp); /* delay the signal of the high band by 23 samples */ memcpy( su->last_qmf_delay, &su->last_qmf_delay[256], sizeof(float) * 23); memcpy(&su->last_qmf_delay[23], q->bands[2], sizeof(float) * 256); /* combine (low + middle) and high bands */ - atrac_iqmf(temp, su->last_qmf_delay, 256, pOut, su->snd_qmf_delay, iqmf_temp); + ff_atrac_iqmf(temp, su->last_qmf_delay, 256, pOut, su->snd_qmf_delay, iqmf_temp); } static int atrac1_decode_frame(AVCodecContext *avctx, void *data, - int *data_size, AVPacket *avpkt) + int *got_frame_ptr, AVPacket *avpkt) { + AVFrame *frame = data; const uint8_t *buf = avpkt->data; int buf_size = avpkt->size; AT1Ctx *q = avctx->priv_data; - int ch, ret, i; + int ch, ret; GetBitContext gb; - float* samples = data; - if (buf_size < 212 * q->channels) { - av_log(q,AV_LOG_ERROR,"Not enought data to decode!\n"); - return -1; + if (buf_size < 212 * avctx->channels) { + av_log(avctx, AV_LOG_ERROR, "Not enough data to decode!\n"); + return AVERROR_INVALIDDATA; } - for (ch = 0; ch < q->channels; ch++) { + /* get output buffer */ + frame->nb_samples = AT1_SU_SAMPLES; + if ((ret = ff_get_buffer(avctx, frame, 0)) < 0) { + av_log(avctx, AV_LOG_ERROR, "get_buffer() failed\n"); + return ret; + } + + for (ch = 0; ch < avctx->channels; ch++) { AT1SUCtx* su = &q->SUs[ch]; init_get_bits(&gb, &buf[212 * ch], 212 * 8); @@ -302,44 +309,54 @@ static int atrac1_decode_frame(AVCodecContext *avctx, void *data, ret = at1_imdct_block(su, q); if (ret < 0) return ret; - at1_subband_synthesis(q, su, q->out_samples[ch]); + at1_subband_synthesis(q, su, (float *)frame->extended_data[ch]); } - /* interleave; FIXME, should create/use a DSP function */ - if (q->channels == 1) { - /* mono */ - memcpy(samples, q->out_samples[0], AT1_SU_SAMPLES * 4); - } else { - /* stereo */ - for (i = 0; i < AT1_SU_SAMPLES; i++) { - samples[i * 2] = q->out_samples[0][i]; - samples[i * 2 + 1] = q->out_samples[1][i]; - } - } + *got_frame_ptr = 1; - *data_size = q->channels * AT1_SU_SAMPLES * sizeof(*samples); return avctx->block_align; } +static av_cold int atrac1_decode_end(AVCodecContext * avctx) +{ + AT1Ctx *q = avctx->priv_data; + + ff_mdct_end(&q->mdct_ctx[0]); + ff_mdct_end(&q->mdct_ctx[1]); + ff_mdct_end(&q->mdct_ctx[2]); + + return 0; +} + + static av_cold int atrac1_decode_init(AVCodecContext *avctx) { AT1Ctx *q = avctx->priv_data; + int ret; - avctx->sample_fmt = AV_SAMPLE_FMT_FLT; + avctx->sample_fmt = AV_SAMPLE_FMT_FLTP; - q->channels = avctx->channels; + if (avctx->channels < 1 || avctx->channels > AT1_MAX_CHANNELS) { + av_log(avctx, AV_LOG_ERROR, "Unsupported number of channels: %d\n", + avctx->channels); + return AVERROR(EINVAL); + } /* Init the mdct transforms */ - ff_mdct_init(&q->mdct_ctx[0], 6, 1, -1.0/ (1 << 15)); - ff_mdct_init(&q->mdct_ctx[1], 8, 1, -1.0/ (1 << 15)); - ff_mdct_init(&q->mdct_ctx[2], 9, 1, -1.0/ (1 << 15)); + if ((ret = ff_mdct_init(&q->mdct_ctx[0], 6, 1, -1.0/ (1 << 15))) || + (ret = ff_mdct_init(&q->mdct_ctx[1], 8, 1, -1.0/ (1 << 15))) || + (ret = ff_mdct_init(&q->mdct_ctx[2], 9, 1, -1.0/ (1 << 15)))) { + av_log(avctx, AV_LOG_ERROR, "Error initializing MDCT\n"); + atrac1_decode_end(avctx); + return ret; + } ff_init_ff_sine_windows(5); - atrac_generate_tables(); + ff_atrac_generate_tables(); - dsputil_init(&q->dsp, avctx); + avpriv_float_dsp_init(&q->fdsp, avctx->flags & CODEC_FLAG_BITEXACT); q->bands[0] = q->low; q->bands[1] = q->mid; @@ -355,23 +372,16 @@ static av_cold int atrac1_decode_init(AVCodecContext *avctx) } -static av_cold int atrac1_decode_end(AVCodecContext * avctx) { - AT1Ctx *q = avctx->priv_data; - - ff_mdct_end(&q->mdct_ctx[0]); - ff_mdct_end(&q->mdct_ctx[1]); - ff_mdct_end(&q->mdct_ctx[2]); - return 0; -} - - AVCodec ff_atrac1_decoder = { - .name = "atrac1", - .type = AVMEDIA_TYPE_AUDIO, - .id = CODEC_ID_ATRAC1, + .name = "atrac1", + .long_name = NULL_IF_CONFIG_SMALL("ATRAC1 (Adaptive TRansform Acoustic Coding)"), + .type = AVMEDIA_TYPE_AUDIO, + .id = AV_CODEC_ID_ATRAC1, .priv_data_size = sizeof(AT1Ctx), - .init = atrac1_decode_init, - .close = atrac1_decode_end, - .decode = atrac1_decode_frame, - .long_name = NULL_IF_CONFIG_SMALL("Atrac 1 (Adaptive TRansform Acoustic Coding)"), + .init = atrac1_decode_init, + .close = atrac1_decode_end, + .decode = atrac1_decode_frame, + .capabilities = CODEC_CAP_DR1, + .sample_fmts = (const enum AVSampleFormat[]) { AV_SAMPLE_FMT_FLTP, + AV_SAMPLE_FMT_NONE }, };