8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
};
+const enum PixelFormat ff_pixfmt_list_420[] = {
+ PIX_FMT_YUV420P,
+ PIX_FMT_NONE
+};
+
+const enum PixelFormat ff_hwaccel_pixfmt_list_420[] = {
+ PIX_FMT_VAAPI_VLD,
+ PIX_FMT_YUV420P,
+ PIX_FMT_NONE
+};
const uint8_t *ff_find_start_code(const uint8_t * restrict p, const uint8_t *end, uint32_t * restrict state){
int i;
}
/* init common dct for both encoder and decoder */
-int ff_dct_common_init(MpegEncContext *s)
+av_cold int ff_dct_common_init(MpegEncContext *s)
{
s->dct_unquantize_h263_intra = dct_unquantize_h263_intra_c;
s->dct_unquantize_h263_inter = dct_unquantize_h263_inter_c;
dst->type= FF_BUFFER_TYPE_COPY;
}
+/**
+ * Releases a frame buffer
+ */
+static void free_frame_buffer(MpegEncContext *s, Picture *pic)
+{
+ s->avctx->release_buffer(s->avctx, (AVFrame*)pic);
+ av_freep(&pic->hwaccel_picture_private);
+}
+
+/**
+ * Allocates a frame buffer
+ */
+static int alloc_frame_buffer(MpegEncContext *s, Picture *pic)
+{
+ int r;
+
+ if (s->avctx->hwaccel) {
+ assert(!pic->hwaccel_picture_private);
+ if (s->avctx->hwaccel->priv_data_size) {
+ pic->hwaccel_picture_private = av_mallocz(s->avctx->hwaccel->priv_data_size);
+ if (!pic->hwaccel_picture_private) {
+ av_log(s->avctx, AV_LOG_ERROR, "alloc_frame_buffer() failed (hwaccel private data allocation)\n");
+ return -1;
+ }
+ }
+ }
+
+ r = s->avctx->get_buffer(s->avctx, (AVFrame*)pic);
+
+ if (r<0 || !pic->age || !pic->type || !pic->data[0]) {
+ av_log(s->avctx, AV_LOG_ERROR, "get_buffer() failed (%d %d %d %p)\n", r, pic->age, pic->type, pic->data[0]);
+ av_freep(&pic->hwaccel_picture_private);
+ return -1;
+ }
+
+ if (s->linesize && (s->linesize != pic->linesize[0] || s->uvlinesize != pic->linesize[1])) {
+ av_log(s->avctx, AV_LOG_ERROR, "get_buffer() failed (stride changed)\n");
+ free_frame_buffer(s, pic);
+ return -1;
+ }
+
+ if (pic->linesize[1] != pic->linesize[2]) {
+ av_log(s->avctx, AV_LOG_ERROR, "get_buffer() failed (uv stride mismatch)\n");
+ free_frame_buffer(s, pic);
+ return -1;
+ }
+
+ return 0;
+}
+
/**
* allocates a Picture
* The pixels are allocated/set by calling get_buffer() if shared=0
}else{
assert(!pic->data[0]);
- r= s->avctx->get_buffer(s->avctx, (AVFrame*)pic);
-
- if(r<0 || !pic->age || !pic->type || !pic->data[0]){
- av_log(s->avctx, AV_LOG_ERROR, "get_buffer() failed (%d %d %d %p)\n", r, pic->age, pic->type, pic->data[0]);
- return -1;
- }
-
- if(s->linesize && (s->linesize != pic->linesize[0] || s->uvlinesize != pic->linesize[1])){
- av_log(s->avctx, AV_LOG_ERROR, "get_buffer() failed (stride changed)\n");
- s->avctx->release_buffer(s->avctx, (AVFrame*)pic);
- return -1;
- }
-
- if(pic->linesize[1] != pic->linesize[2]){
- av_log(s->avctx, AV_LOG_ERROR, "get_buffer() failed (uv stride mismatch)\n");
- s->avctx->release_buffer(s->avctx, (AVFrame*)pic);
+ if (alloc_frame_buffer(s, pic) < 0)
return -1;
- }
s->linesize = pic->linesize[0];
s->uvlinesize= pic->linesize[1];
return 0;
fail: //for the CHECKED_ALLOCZ macro
if(r>=0)
- s->avctx->release_buffer(s->avctx, (AVFrame*)pic);
+ free_frame_buffer(s, pic);
return -1;
}
int i;
if(pic->data[0] && pic->type!=FF_BUFFER_TYPE_SHARED){
- s->avctx->release_buffer(s->avctx, (AVFrame*)pic);
+ free_frame_buffer(s, pic);
}
av_freep(&pic->mb_var);
s->block= s->blocks[0];
for(i=0;i<12;i++){
- s->pblocks[i] = (short *)(&s->block[i]);
+ s->pblocks[i] = &s->block[i];
}
return 0;
fail:
memcpy(dst, src, sizeof(MpegEncContext));
backup_duplicate_context(dst, &bak);
for(i=0;i<12;i++){
- dst->pblocks[i] = (short *)(&dst->block[i]);
+ dst->pblocks[i] = &dst->block[i];
}
//STOP_TIMER("update_duplicate_context") //about 10k cycles / 0.01 sec for 1000frames on 1ghz with 2 threads
}
* init common structure for both encoder and decoder.
* this assumes that some variables like width/height are already set
*/
-int MPV_common_init(MpegEncContext *s)
+av_cold int MPV_common_init(MpegEncContext *s)
{
int y_size, c_size, yc_size, i, mb_array_size, mv_table_size, x, y, threads;
+ if(s->codec_id == CODEC_ID_MPEG2VIDEO && !s->progressive_sequence)
+ s->mb_height = (s->height + 31) / 32 * 2;
+ else
s->mb_height = (s->height + 15) / 16;
+ if(s->avctx->pix_fmt == PIX_FMT_NONE){
+ av_log(s->avctx, AV_LOG_ERROR, "decoding to PIX_FMT_NONE is not supported.\n");
+ return -1;
+ }
+
if(s->avctx->thread_count > MAX_THREADS || (s->avctx->thread_count > s->mb_height && s->mb_height)){
av_log(s->avctx, AV_LOG_ERROR, "too many threads\n");
return -1;
/* mark&release old frames */
if (s->pict_type != FF_B_TYPE && s->last_picture_ptr && s->last_picture_ptr != s->next_picture_ptr && s->last_picture_ptr->data[0]) {
if(s->out_format != FMT_H264 || s->codec_id == CODEC_ID_SVQ3){
- avctx->release_buffer(avctx, (AVFrame*)s->last_picture_ptr);
+ free_frame_buffer(s, s->last_picture_ptr);
/* release forgotten pictures */
/* if(mpeg124/h263) */
for(i=0; i<MAX_PICTURE_COUNT; i++){
if(s->picture[i].data[0] && &s->picture[i] != s->next_picture_ptr && s->picture[i].reference){
av_log(avctx, AV_LOG_ERROR, "releasing zombie picture\n");
- avctx->release_buffer(avctx, (AVFrame*)&s->picture[i]);
+ free_frame_buffer(s, &s->picture[i]);
}
}
}
/* release non reference frames */
for(i=0; i<MAX_PICTURE_COUNT; i++){
if(s->picture[i].data[0] && !s->picture[i].reference /*&& s->picture[i].type!=FF_BUFFER_TYPE_SHARED*/){
- s->avctx->release_buffer(s->avctx, (AVFrame*)&s->picture[i]);
+ free_frame_buffer(s, &s->picture[i]);
}
}
update_noise_reduction(s);
}
-#if CONFIG_MPEG_XVMC_DECODER
- if(s->avctx->xvmc_acceleration)
+ if(CONFIG_MPEG_XVMC_DECODER && s->avctx->xvmc_acceleration)
return ff_xvmc_field_start(s, avctx);
-#endif
+
return 0;
}
{
int i;
/* draw edge for correct motion prediction if outside */
-#if CONFIG_MPEG_XVMC_DECODER
-//just to make sure that all data is rendered.
- if(s->avctx->xvmc_acceleration){
+ //just to make sure that all data is rendered.
+ if(CONFIG_MPEG_XVMC_DECODER && s->avctx->xvmc_acceleration){
ff_xvmc_field_end(s);
- }else
-#endif
- if(!(s->avctx->codec->capabilities&CODEC_CAP_HWACCEL_VDPAU)
+ }else if(!s->avctx->hwaccel
+ && !(s->avctx->codec->capabilities&CODEC_CAP_HWACCEL_VDPAU)
&& s->unrestricted_mv
&& s->current_picture.reference
&& !s->intra_only
/* release non-reference frames */
for(i=0; i<MAX_PICTURE_COUNT; i++){
if(s->picture[i].data[0] && !s->picture[i].reference /*&& s->picture[i].type!=FF_BUFFER_TYPE_SHARED*/){
- s->avctx->release_buffer(s->avctx, (AVFrame*)&s->picture[i]);
+ free_frame_buffer(s, &s->picture[i]);
}
}
}
*/
void ff_print_debug_info(MpegEncContext *s, AVFrame *pict){
- if(!pict || !pict->mb_type) return;
+ if(s->avctx->hwaccel || !pict || !pict->mb_type) return;
if(s->avctx->debug&(FF_DEBUG_SKIP | FF_DEBUG_QP | FF_DEBUG_MB_TYPE)){
int x,y;
{
int mb_x, mb_y;
const int mb_xy = s->mb_y * s->mb_stride + s->mb_x;
-#if CONFIG_MPEG_XVMC_DECODER
- if(s->avctx->xvmc_acceleration){
+ if(CONFIG_MPEG_XVMC_DECODER && s->avctx->xvmc_acceleration){
ff_xvmc_decode_mb(s);//xvmc uses pblocks
return;
}
-#endif
mb_x = s->mb_x;
mb_y = s->mb_y;
for(i=0; i<MAX_PICTURE_COUNT; i++){
if(s->picture[i].data[0] && ( s->picture[i].type == FF_BUFFER_TYPE_INTERNAL
|| s->picture[i].type == FF_BUFFER_TYPE_USER))
- avctx->release_buffer(avctx, (AVFrame*)&s->picture[i]);
+ free_frame_buffer(s, &s->picture[i]);
}
s->current_picture_ptr = s->last_picture_ptr = s->next_picture_ptr = NULL;