Merge commit 'e3db34291f4401a16f6ac92721617a9f33cd4c31'

* commit 'e3db34291f4401a16f6ac92721617a9f33cd4c31':
  amrnb: decode directly to the user-provided AVFrame
  als: decode directly to the user-provided AVFrame
  alac: decode directly to the user-provided AVFrame
  adxenc: alloc/free coded_frame instead of keeping it in the ADXContext
  adx: decode directly to the user-provided AVFrame

Conflicts:
	libavcodec/alsdec.c
	libavcodec/amrnbdec.c

Merged-by: Michael Niedermayer <michaelni@gmx.at>
This commit is contained in:
Michael Niedermayer 2013-02-13 11:36:28 +01:00
commit d13f434dbb
6 changed files with 43 additions and 49 deletions

View File

@ -40,7 +40,6 @@ typedef struct ADXChannelState {
} ADXChannelState;
typedef struct ADXContext {
AVFrame frame;
int channels;
ADXChannelState prev[2];
int header_parsed;

View File

@ -52,9 +52,6 @@ static av_cold int adx_decode_init(AVCodecContext *avctx)
avctx->sample_fmt = AV_SAMPLE_FMT_S16P;
avcodec_get_frame_defaults(&c->frame);
avctx->coded_frame = &c->frame;
return 0;
}
@ -98,6 +95,7 @@ static int adx_decode(ADXContext *c, int16_t *out, int offset,
static int adx_decode_frame(AVCodecContext *avctx, void *data,
int *got_frame_ptr, AVPacket *avpkt)
{
AVFrame *frame = data;
int buf_size = avpkt->size;
ADXContext *c = avctx->priv_data;
int16_t **samples;
@ -143,12 +141,12 @@ static int adx_decode_frame(AVCodecContext *avctx, void *data,
}
/* get output buffer */
c->frame.nb_samples = num_blocks * BLOCK_SAMPLES;
if ((ret = ff_get_buffer(avctx, &c->frame)) < 0) {
frame->nb_samples = num_blocks * BLOCK_SAMPLES;
if ((ret = ff_get_buffer(avctx, frame)) < 0) {
av_log(avctx, AV_LOG_ERROR, "get_buffer() failed\n");
return ret;
}
samples = (int16_t **)c->frame.extended_data;
samples = (int16_t **)frame->extended_data;
samples_offset = 0;
while (num_blocks--) {
@ -164,8 +162,7 @@ static int adx_decode_frame(AVCodecContext *avctx, void *data,
samples_offset += BLOCK_SAMPLES;
}
*got_frame_ptr = 1;
*(AVFrame *)data = c->frame;
*got_frame_ptr = 1;
return buf - avpkt->data;
}

View File

@ -107,6 +107,14 @@ static int adx_encode_header(AVCodecContext *avctx, uint8_t *buf, int bufsize)
return HEADER_SIZE;
}
#if FF_API_OLD_ENCODE_AUDIO
static av_cold int adx_encode_close(AVCodecContext *avctx)
{
av_freep(&avctx->coded_frame);
return 0;
}
#endif
static av_cold int adx_encode_init(AVCodecContext *avctx)
{
ADXContext *c = avctx->priv_data;
@ -118,8 +126,8 @@ static av_cold int adx_encode_init(AVCodecContext *avctx)
avctx->frame_size = BLOCK_SAMPLES;
#if FF_API_OLD_ENCODE_AUDIO
avcodec_get_frame_defaults(&c->frame);
avctx->coded_frame = &c->frame;
if (!(avctx->coded_frame = avcodec_alloc_frame()))
return AVERROR(ENOMEM);
#endif
/* the cutoff can be adjusted, but this seems to work pretty well */
@ -167,6 +175,9 @@ AVCodec ff_adpcm_adx_encoder = {
.id = AV_CODEC_ID_ADPCM_ADX,
.priv_data_size = sizeof(ADXContext),
.init = adx_encode_init,
#if FF_API_OLD_ENCODE_AUDIO
.close = adx_encode_close,
#endif
.encode2 = adx_encode_frame,
.sample_fmts = (const enum AVSampleFormat[]) { AV_SAMPLE_FMT_S16,
AV_SAMPLE_FMT_NONE },

View File

@ -58,7 +58,6 @@
typedef struct {
AVCodecContext *avctx;
AVFrame frame;
GetBitContext gb;
int channels;
@ -254,7 +253,7 @@ static void append_extra_bits(int32_t *buffer[2], int32_t *extra_bits_buffer[2],
buffer[ch][i] = (buffer[ch][i] << extra_bits) | extra_bits_buffer[ch][i];
}
static int decode_element(AVCodecContext *avctx, void *data, int ch_index,
static int decode_element(AVCodecContext *avctx, AVFrame *frame, int ch_index,
int channels)
{
ALACContext *alac = avctx->priv_data;
@ -289,8 +288,8 @@ static int decode_element(AVCodecContext *avctx, void *data, int ch_index,
}
if (!alac->nb_samples) {
/* get output buffer */
alac->frame.nb_samples = output_samples;
if ((ret = ff_get_buffer(avctx, &alac->frame)) < 0) {
frame->nb_samples = output_samples;
if ((ret = ff_get_buffer(avctx, frame)) < 0) {
av_log(avctx, AV_LOG_ERROR, "get_buffer() failed\n");
return ret;
}
@ -302,7 +301,7 @@ static int decode_element(AVCodecContext *avctx, void *data, int ch_index,
alac->nb_samples = output_samples;
if (alac->direct_output) {
for (ch = 0; ch < channels; ch++)
alac->output_samples_buffer[ch] = (int32_t *)alac->frame.extended_data[ch_index + ch];
alac->output_samples_buffer[ch] = (int32_t *)frame->extended_data[ch_index + ch];
}
if (is_compressed) {
@ -390,7 +389,7 @@ static int decode_element(AVCodecContext *avctx, void *data, int ch_index,
switch(alac->sample_size) {
case 16: {
for (ch = 0; ch < channels; ch++) {
int16_t *outbuffer = (int16_t *)alac->frame.extended_data[ch_index + ch];
int16_t *outbuffer = (int16_t *)frame->extended_data[ch_index + ch];
for (i = 0; i < alac->nb_samples; i++)
*outbuffer++ = alac->output_samples_buffer[ch][i];
}}
@ -405,7 +404,7 @@ static int decode_element(AVCodecContext *avctx, void *data, int ch_index,
}else{
switch(alac->sample_size) {
case 16: {
int16_t *outbuffer = ((int16_t *)alac->frame.extended_data[0]) + ch_index;
int16_t *outbuffer = ((int16_t *)frame->extended_data[0]) + ch_index;
for (i = 0; i < alac->nb_samples; i++) {
for (ch = 0; ch < channels; ch++)
*outbuffer++ = alac->output_samples_buffer[ch][i];
@ -414,7 +413,7 @@ static int decode_element(AVCodecContext *avctx, void *data, int ch_index,
}
break;
case 24: {
int32_t *outbuffer = ((int32_t *)alac->frame.extended_data[0]) + ch_index;
int32_t *outbuffer = ((int32_t *)frame->extended_data[0]) + ch_index;
for (i = 0; i < alac->nb_samples; i++) {
for (ch = 0; ch < channels; ch++)
*outbuffer++ = alac->output_samples_buffer[ch][i] << 8;
@ -423,7 +422,7 @@ static int decode_element(AVCodecContext *avctx, void *data, int ch_index,
}
break;
case 32: {
int32_t *outbuffer = ((int32_t *)alac->frame.extended_data[0]) + ch_index;
int32_t *outbuffer = ((int32_t *)frame->extended_data[0]) + ch_index;
for (i = 0; i < alac->nb_samples; i++) {
for (ch = 0; ch < channels; ch++)
*outbuffer++ = alac->output_samples_buffer[ch][i];
@ -441,6 +440,7 @@ static int alac_decode_frame(AVCodecContext *avctx, void *data,
int *got_frame_ptr, AVPacket *avpkt)
{
ALACContext *alac = avctx->priv_data;
AVFrame *frame = data;
enum AlacRawDataBlockType element;
int channels;
int ch, ret, got_end;
@ -469,7 +469,7 @@ static int alac_decode_frame(AVCodecContext *avctx, void *data,
return AVERROR_INVALIDDATA;
}
ret = decode_element(avctx, data,
ret = decode_element(avctx, frame,
ff_alac_channel_layout_offsets[alac->channels - 1][ch],
channels);
if (ret < 0 && get_bits_left(&alac->gb))
@ -487,8 +487,7 @@ static int alac_decode_frame(AVCodecContext *avctx, void *data,
avpkt->size * 8 - get_bits_count(&alac->gb));
}
*got_frame_ptr = 1;
*(AVFrame *)data = alac->frame;
*got_frame_ptr = 1;
return avpkt->size;
}
@ -616,9 +615,6 @@ static av_cold int alac_decode_init(AVCodecContext * avctx)
return ret;
}
avcodec_get_frame_defaults(&alac->frame);
avctx->coded_frame = &alac->frame;
return 0;
}

View File

@ -192,7 +192,6 @@ typedef struct {
typedef struct {
AVCodecContext *avctx;
AVFrame frame;
ALSSpecificConfig sconf;
GetBitContext gb;
DSPContext dsp;
@ -1450,6 +1449,7 @@ static int decode_frame(AVCodecContext *avctx, void *data, int *got_frame_ptr,
AVPacket *avpkt)
{
ALSDecContext *ctx = avctx->priv_data;
AVFrame *frame = data;
ALSSpecificConfig *sconf = &ctx->sconf;
const uint8_t *buffer = avpkt->data;
int buffer_size = avpkt->size;
@ -1479,8 +1479,8 @@ static int decode_frame(AVCodecContext *avctx, void *data, int *got_frame_ptr,
ctx->frame_id++;
/* get output buffer */
ctx->frame.nb_samples = ctx->cur_frame_length;
if ((ret = ff_get_buffer(avctx, &ctx->frame)) < 0) {
frame->nb_samples = ctx->cur_frame_length;
if ((ret = ff_get_buffer(avctx, frame)) < 0) {
av_log(avctx, AV_LOG_ERROR, "get_buffer() failed.\n");
return ret;
}
@ -1488,7 +1488,7 @@ static int decode_frame(AVCodecContext *avctx, void *data, int *got_frame_ptr,
// transform decoded frame into output format
#define INTERLEAVE_OUTPUT(bps) \
{ \
int##bps##_t *dest = (int##bps##_t*)ctx->frame.data[0]; \
int##bps##_t *dest = (int##bps##_t*)frame->data[0]; \
shift = bps - ctx->avctx->bits_per_raw_sample; \
if (!ctx->cs_switch) { \
for (sample = 0; sample < ctx->cur_frame_length; sample++) \
@ -1512,7 +1512,7 @@ static int decode_frame(AVCodecContext *avctx, void *data, int *got_frame_ptr,
int swap = HAVE_BIGENDIAN != sconf->msb_first;
if (ctx->avctx->bits_per_raw_sample == 24) {
int32_t *src = (int32_t *)ctx->frame.data[0];
int32_t *src = (int32_t *)frame->data[0];
for (sample = 0;
sample < ctx->cur_frame_length * avctx->channels;
@ -1533,7 +1533,7 @@ static int decode_frame(AVCodecContext *avctx, void *data, int *got_frame_ptr,
if (swap) {
if (ctx->avctx->bits_per_raw_sample <= 16) {
int16_t *src = (int16_t*) ctx->frame.data[0];
int16_t *src = (int16_t*) frame->data[0];
int16_t *dest = (int16_t*) ctx->crc_buffer;
for (sample = 0;
sample < ctx->cur_frame_length * avctx->channels;
@ -1541,12 +1541,12 @@ static int decode_frame(AVCodecContext *avctx, void *data, int *got_frame_ptr,
*dest++ = av_bswap16(src[sample]);
} else {
ctx->dsp.bswap_buf((uint32_t*)ctx->crc_buffer,
(uint32_t *)ctx->frame.data[0],
(uint32_t *)frame->data[0],
ctx->cur_frame_length * avctx->channels);
}
crc_source = ctx->crc_buffer;
} else {
crc_source = ctx->frame.data[0];
crc_source = frame->data[0];
}
ctx->crc = av_crc(ctx->crc_table, ctx->crc, crc_source,
@ -1562,9 +1562,7 @@ static int decode_frame(AVCodecContext *avctx, void *data, int *got_frame_ptr,
}
}
*got_frame_ptr = 1;
*(AVFrame *)data = ctx->frame;
*got_frame_ptr = 1;
bytes_read = invalid_frame ? buffer_size :
(get_bits_count(&ctx->gb) + 7) >> 3;
@ -1761,9 +1759,6 @@ static av_cold int decode_init(AVCodecContext *avctx)
ff_dsputil_init(&ctx->dsp, avctx);
avcodec_get_frame_defaults(&ctx->frame);
avctx->coded_frame = &ctx->frame;
return 0;
}

View File

@ -98,7 +98,6 @@
#define AMR_AGC_ALPHA 0.9
typedef struct AMRContext {
AVFrame avframe; ///< AVFrame for decoded samples
AMRNBFrame frame; ///< decoded AMR parameters (lsf coefficients, codebook indexes, etc)
uint8_t bad_frame_indicator; ///< bad frame ? 1 : 0
enum Mode cur_frame_mode;
@ -185,9 +184,6 @@ static av_cold int amrnb_decode_init(AVCodecContext *avctx)
for (i = 0; i < 4; i++)
p->prediction_error[i] = MIN_ENERGY;
avcodec_get_frame_defaults(&p->avframe);
avctx->coded_frame = &p->avframe;
ff_acelp_filter_init(&p->acelpf_ctx);
ff_acelp_vectors_init(&p->acelpv_ctx);
ff_celp_filter_init(&p->celpf_ctx);
@ -954,6 +950,7 @@ static int amrnb_decode_frame(AVCodecContext *avctx, void *data,
{
AMRContext *p = avctx->priv_data; // pointer to private data
AVFrame *frame = data;
const uint8_t *buf = avpkt->data;
int buf_size = avpkt->size;
float *buf_out; // pointer to the output data buffer
@ -965,12 +962,12 @@ static int amrnb_decode_frame(AVCodecContext *avctx, void *data,
const float *synth_fixed_vector; // pointer to the fixed vector that synthesis should use
/* get output buffer */
p->avframe.nb_samples = AMR_BLOCK_SIZE;
if ((ret = ff_get_buffer(avctx, &p->avframe)) < 0) {
frame->nb_samples = AMR_BLOCK_SIZE;
if ((ret = ff_get_buffer(avctx, frame)) < 0) {
av_log(avctx, AV_LOG_ERROR, "get_buffer() failed\n");
return ret;
}
buf_out = (float *)p->avframe.data[0];
buf_out = (float *)frame->data[0];
p->cur_frame_mode = unpack_bitstream(p, buf, buf_size);
if (p->cur_frame_mode == NO_DATA) {
@ -1078,8 +1075,7 @@ static int amrnb_decode_frame(AVCodecContext *avctx, void *data,
p->acelpv_ctx.weighted_vector_sumf(p->lsf_avg, p->lsf_avg, p->lsf_q[3],
0.84, 0.16, LP_FILTER_ORDER);
*got_frame_ptr = 1;
*(AVFrame *)data = p->avframe;
*got_frame_ptr = 1;
/* return the amount of bytes consumed if everything was OK */
return frame_sizes_nb[p->cur_frame_mode] + 1; // +7 for rounding and +8 for TOC