501 lines
20 KiB
Diff
501 lines
20 KiB
Diff
commit 97c2dd0de4f578ad40d547eddf78fcb1e4a008a4
|
|
Author: Dan Dennedy <dan@dennedy.org>
|
|
Date: Sat Sep 5 13:06:21 2015 -0700
|
|
|
|
Fix avformat build against FFmpeg and Libav master.
|
|
|
|
This drops support for FFmpeg v1.0; requires at least v1.1. Still works
|
|
with Libav v9.
|
|
CPU flags are no longer required/used by libswscale. They are detected
|
|
at runtime automatically.
|
|
|
|
diff --git src/modules/avformat/consumer_avformat.c src/modules/avformat/consumer_avformat.c
|
|
index ef8b153..7947c47 100644
|
|
--- src/modules/avformat/consumer_avformat.c
|
|
+++ src/modules/avformat/consumer_avformat.c
|
|
@@ -439,18 +439,18 @@ static void apply_properties( void *obj, mlt_properties properties, int flags )
|
|
}
|
|
}
|
|
|
|
-static enum PixelFormat pick_pix_fmt( mlt_image_format img_fmt )
|
|
+static enum AVPixelFormat pick_pix_fmt( mlt_image_format img_fmt )
|
|
{
|
|
switch ( img_fmt )
|
|
{
|
|
case mlt_image_rgb24:
|
|
- return PIX_FMT_RGB24;
|
|
+ return AV_PIX_FMT_RGB24;
|
|
case mlt_image_rgb24a:
|
|
- return PIX_FMT_RGBA;
|
|
+ return AV_PIX_FMT_RGBA;
|
|
case mlt_image_yuv420p:
|
|
- return PIX_FMT_YUV420P;
|
|
+ return AV_PIX_FMT_YUV420P;
|
|
default:
|
|
- return PIX_FMT_YUYV422;
|
|
+ return AV_PIX_FMT_YUYV422;
|
|
}
|
|
}
|
|
|
|
@@ -798,7 +798,7 @@ static AVStream *add_video_stream( mlt_consumer consumer, AVFormatContext *oc, A
|
|
st->time_base = c->time_base;
|
|
|
|
// Default to the codec's first pix_fmt if possible.
|
|
- c->pix_fmt = pix_fmt? av_get_pix_fmt( pix_fmt ) : codec? codec->pix_fmts[0] : PIX_FMT_YUV420P;
|
|
+ c->pix_fmt = pix_fmt? av_get_pix_fmt( pix_fmt ) : codec? codec->pix_fmts[0] : AV_PIX_FMT_YUV420P;
|
|
|
|
switch ( colorspace )
|
|
{
|
|
@@ -1032,7 +1032,7 @@ static int open_video( mlt_properties properties, AVFormatContext *oc, AVStream
|
|
|
|
if( codec && codec->pix_fmts )
|
|
{
|
|
- const enum PixelFormat *p = codec->pix_fmts;
|
|
+ const enum AVPixelFormat *p = codec->pix_fmts;
|
|
for( ; *p!=-1; p++ )
|
|
{
|
|
if( *p == video_enc->pix_fmt )
|
|
@@ -1791,12 +1791,6 @@ static void *consumer_thread( void *arg )
|
|
|
|
// Do the colour space conversion
|
|
int flags = SWS_BICUBIC;
|
|
-#ifdef USE_MMX
|
|
- flags |= SWS_CPU_CAPS_MMX;
|
|
-#endif
|
|
-#ifdef USE_SSE
|
|
- flags |= SWS_CPU_CAPS_MMX2;
|
|
-#endif
|
|
struct SwsContext *context = sws_getContext( width, height, pick_pix_fmt( img_fmt ),
|
|
width, height, c->pix_fmt, flags, NULL, NULL, NULL);
|
|
sws_scale( context, (const uint8_t* const*) video_avframe->data, video_avframe->linesize, 0, height,
|
|
@@ -1808,9 +1802,9 @@ static void *consumer_thread( void *arg )
|
|
// Apply the alpha if applicable
|
|
if ( !mlt_properties_get( properties, "mlt_image_format" ) ||
|
|
strcmp( mlt_properties_get( properties, "mlt_image_format" ), "rgb24a" ) )
|
|
- if ( c->pix_fmt == PIX_FMT_RGBA ||
|
|
- c->pix_fmt == PIX_FMT_ARGB ||
|
|
- c->pix_fmt == PIX_FMT_BGRA )
|
|
+ if ( c->pix_fmt == AV_PIX_FMT_RGBA ||
|
|
+ c->pix_fmt == AV_PIX_FMT_ARGB ||
|
|
+ c->pix_fmt == AV_PIX_FMT_BGRA )
|
|
{
|
|
uint8_t *alpha = mlt_frame_get_alpha_mask( frame );
|
|
register int n;
|
|
@@ -1844,8 +1838,6 @@ static void *consumer_thread( void *arg )
|
|
av_init_packet(&pkt);
|
|
|
|
// Set frame interlace hints
|
|
- c->coded_frame->interlaced_frame = !mlt_properties_get_int( frame_properties, "progressive" );
|
|
- c->coded_frame->top_field_first = mlt_properties_get_int( frame_properties, "top_field_first" );
|
|
if ( mlt_properties_get_int( frame_properties, "progressive" ) )
|
|
c->field_order = AV_FIELD_PROGRESSIVE;
|
|
else
|
|
diff --git src/modules/avformat/filter_avcolour_space.c src/modules/avformat/filter_avcolour_space.c
|
|
index f70fd08..910de0c 100644
|
|
--- src/modules/avformat/filter_avcolour_space.c
|
|
+++ src/modules/avformat/filter_avcolour_space.c
|
|
@@ -47,17 +47,17 @@ static int convert_mlt_to_av_cs( mlt_image_format format )
|
|
switch( format )
|
|
{
|
|
case mlt_image_rgb24:
|
|
- value = PIX_FMT_RGB24;
|
|
+ value = AV_PIX_FMT_RGB24;
|
|
break;
|
|
case mlt_image_rgb24a:
|
|
case mlt_image_opengl:
|
|
- value = PIX_FMT_RGBA;
|
|
+ value = AV_PIX_FMT_RGBA;
|
|
break;
|
|
case mlt_image_yuv422:
|
|
- value = PIX_FMT_YUYV422;
|
|
+ value = AV_PIX_FMT_YUYV422;
|
|
break;
|
|
case mlt_image_yuv420p:
|
|
- value = PIX_FMT_YUV420P;
|
|
+ value = AV_PIX_FMT_YUV420P;
|
|
break;
|
|
default:
|
|
mlt_log_error( NULL, "[filter avcolor_space] Invalid format %s\n",
|
|
@@ -123,16 +123,10 @@ static int av_convert_image( uint8_t *out, uint8_t *in, int out_fmt, int in_fmt,
|
|
int flags = SWS_BICUBIC | SWS_ACCURATE_RND;
|
|
int error = -1;
|
|
|
|
- if ( out_fmt == PIX_FMT_YUYV422 )
|
|
+ if ( out_fmt == AV_PIX_FMT_YUYV422 )
|
|
flags |= SWS_FULL_CHR_H_INP;
|
|
else
|
|
flags |= SWS_FULL_CHR_H_INT;
|
|
-#ifdef USE_MMX
|
|
- flags |= SWS_CPU_CAPS_MMX;
|
|
-#endif
|
|
-#ifdef USE_SSE
|
|
- flags |= SWS_CPU_CAPS_MMX2;
|
|
-#endif
|
|
|
|
avpicture_fill( &input, in, in_fmt, width, height );
|
|
avpicture_fill( &output, out, out_fmt, width, height );
|
|
@@ -141,7 +135,7 @@ static int av_convert_image( uint8_t *out, uint8_t *in, int out_fmt, int in_fmt,
|
|
if ( context )
|
|
{
|
|
// libswscale wants the RGB colorspace to be SWS_CS_DEFAULT, which is = SWS_CS_ITU601.
|
|
- if ( out_fmt == PIX_FMT_RGB24 || out_fmt == PIX_FMT_RGBA )
|
|
+ if ( out_fmt == AV_PIX_FMT_RGB24 || out_fmt == AV_PIX_FMT_RGBA )
|
|
dst_colorspace = 601;
|
|
error = set_luma_transfer( context, src_colorspace, dst_colorspace, use_full_range );
|
|
sws_scale( context, (const uint8_t* const*) input.data, input.linesize, 0, height,
|
|
@@ -326,7 +320,7 @@ mlt_filter filter_avcolour_space_init( void *arg )
|
|
int *width = (int*) arg;
|
|
if ( *width > 0 )
|
|
{
|
|
- struct SwsContext *context = sws_getContext( *width, *width, PIX_FMT_RGB32, 64, 64, PIX_FMT_RGB32, SWS_BILINEAR, NULL, NULL, NULL);
|
|
+ struct SwsContext *context = sws_getContext( *width, *width, AV_PIX_FMT_RGB32, 64, 64, AV_PIX_FMT_RGB32, SWS_BILINEAR, NULL, NULL, NULL);
|
|
if ( context )
|
|
sws_freeContext( context );
|
|
else
|
|
diff --git src/modules/avformat/filter_avdeinterlace.c src/modules/avformat/filter_avdeinterlace.c
|
|
index 9c0189e..91c435c 100644
|
|
--- src/modules/avformat/filter_avdeinterlace.c
|
|
+++ src/modules/avformat/filter_avdeinterlace.c
|
|
@@ -234,28 +234,28 @@ static int mlt_avpicture_deinterlace(AVPicture *dst, const AVPicture *src,
|
|
{
|
|
int i;
|
|
|
|
- if (pix_fmt != PIX_FMT_YUV420P &&
|
|
- pix_fmt != PIX_FMT_YUV422P &&
|
|
- pix_fmt != PIX_FMT_YUYV422 &&
|
|
- pix_fmt != PIX_FMT_YUV444P &&
|
|
- pix_fmt != PIX_FMT_YUV411P)
|
|
+ if (pix_fmt != AV_PIX_FMT_YUV420P &&
|
|
+ pix_fmt != AV_PIX_FMT_YUV422P &&
|
|
+ pix_fmt != AV_PIX_FMT_YUYV422 &&
|
|
+ pix_fmt != AV_PIX_FMT_YUV444P &&
|
|
+ pix_fmt != AV_PIX_FMT_YUV411P)
|
|
return -1;
|
|
if ((width & 3) != 0 || (height & 3) != 0)
|
|
return -1;
|
|
|
|
- if ( pix_fmt != PIX_FMT_YUYV422 )
|
|
+ if ( pix_fmt != AV_PIX_FMT_YUYV422 )
|
|
{
|
|
for(i=0;i<3;i++) {
|
|
if (i == 1) {
|
|
switch(pix_fmt) {
|
|
- case PIX_FMT_YUV420P:
|
|
+ case AV_PIX_FMT_YUV420P:
|
|
width >>= 1;
|
|
height >>= 1;
|
|
break;
|
|
- case PIX_FMT_YUV422P:
|
|
+ case AV_PIX_FMT_YUV422P:
|
|
width >>= 1;
|
|
break;
|
|
- case PIX_FMT_YUV411P:
|
|
+ case AV_PIX_FMT_YUV411P:
|
|
width >>= 2;
|
|
break;
|
|
default:
|
|
@@ -312,8 +312,8 @@ static int filter_get_image( mlt_frame frame, uint8_t **image, mlt_image_format
|
|
AVPicture *output = mlt_pool_alloc( sizeof( AVPicture ) );
|
|
|
|
// Fill the picture
|
|
- avpicture_fill( output, *image, PIX_FMT_YUYV422, *width, *height );
|
|
- mlt_avpicture_deinterlace( output, output, PIX_FMT_YUYV422, *width, *height );
|
|
+ avpicture_fill( output, *image, AV_PIX_FMT_YUYV422, *width, *height );
|
|
+ mlt_avpicture_deinterlace( output, output, AV_PIX_FMT_YUYV422, *width, *height );
|
|
|
|
// Free the picture
|
|
mlt_pool_release( output );
|
|
diff --git src/modules/avformat/filter_swscale.c src/modules/avformat/filter_swscale.c
|
|
index b8213a3..37c156d 100644
|
|
--- src/modules/avformat/filter_swscale.c
|
|
+++ src/modules/avformat/filter_swscale.c
|
|
@@ -37,17 +37,17 @@ static inline int convert_mlt_to_av_cs( mlt_image_format format )
|
|
switch( format )
|
|
{
|
|
case mlt_image_rgb24:
|
|
- value = PIX_FMT_RGB24;
|
|
+ value = AV_PIX_FMT_RGB24;
|
|
break;
|
|
case mlt_image_rgb24a:
|
|
case mlt_image_opengl:
|
|
- value = PIX_FMT_RGBA;
|
|
+ value = AV_PIX_FMT_RGBA;
|
|
break;
|
|
case mlt_image_yuv422:
|
|
- value = PIX_FMT_YUYV422;
|
|
+ value = AV_PIX_FMT_YUYV422;
|
|
break;
|
|
case mlt_image_yuv420p:
|
|
- value = PIX_FMT_YUV420P;
|
|
+ value = AV_PIX_FMT_YUV420P;
|
|
break;
|
|
default:
|
|
fprintf( stderr, "Invalid format...\n" );
|
|
@@ -108,12 +108,6 @@ static int filter_scale( mlt_frame frame, uint8_t **image, mlt_image_format *for
|
|
// XXX: we only know how to rescale packed formats
|
|
return 1;
|
|
}
|
|
-#ifdef USE_MMX
|
|
- interp |= SWS_CPU_CAPS_MMX;
|
|
-#endif
|
|
-#ifdef USE_SSE
|
|
- interp |= SWS_CPU_CAPS_MMX2;
|
|
-#endif
|
|
|
|
// Convert the pixel formats
|
|
int avformat = convert_mlt_to_av_cs( *format );
|
|
@@ -148,7 +142,7 @@ static int filter_scale( mlt_frame frame, uint8_t **image, mlt_image_format *for
|
|
uint8_t *alpha = mlt_frame_get_alpha( frame );
|
|
if ( alpha )
|
|
{
|
|
- avformat = PIX_FMT_GRAY8;
|
|
+ avformat = AV_PIX_FMT_GRAY8;
|
|
struct SwsContext *context = sws_getContext( iwidth, iheight, avformat, owidth, oheight, avformat, interp, NULL, NULL, NULL);
|
|
avpicture_fill( &input, alpha, avformat, iwidth, iheight );
|
|
outbuf = mlt_pool_alloc( owidth * oheight );
|
|
@@ -182,7 +176,7 @@ mlt_filter filter_swscale_init( mlt_profile profile, void *arg )
|
|
int *width = (int*) arg;
|
|
if ( *width > 0 )
|
|
{
|
|
- struct SwsContext *context = sws_getContext( *width, *width, PIX_FMT_RGB32, 64, 64, PIX_FMT_RGB32, SWS_BILINEAR, NULL, NULL, NULL);
|
|
+ struct SwsContext *context = sws_getContext( *width, *width, AV_PIX_FMT_RGB32, 64, 64, AV_PIX_FMT_RGB32, SWS_BILINEAR, NULL, NULL, NULL);
|
|
if ( context )
|
|
sws_freeContext( context );
|
|
else
|
|
diff --git src/modules/avformat/producer_avformat.c src/modules/avformat/producer_avformat.c
|
|
index 3495e2a..55e326d 100644
|
|
--- src/modules/avformat/producer_avformat.c
|
|
+++ src/modules/avformat/producer_avformat.c
|
|
@@ -33,6 +33,7 @@
|
|
#include <libavutil/pixdesc.h>
|
|
#include <libavutil/dict.h>
|
|
#include <libavutil/opt.h>
|
|
+#include <libavutil/channel_layout.h>
|
|
|
|
#ifdef VDPAU
|
|
# include <libavcodec/vdpau.h>
|
|
@@ -504,21 +505,21 @@ static char* parse_url( mlt_profile profile, const char* URL, AVInputFormat **fo
|
|
return result;
|
|
}
|
|
|
|
-static enum PixelFormat pick_pix_fmt( enum PixelFormat pix_fmt )
|
|
+static enum AVPixelFormat pick_pix_fmt( enum AVPixelFormat pix_fmt )
|
|
{
|
|
switch ( pix_fmt )
|
|
{
|
|
- case PIX_FMT_ARGB:
|
|
- case PIX_FMT_RGBA:
|
|
- case PIX_FMT_ABGR:
|
|
- case PIX_FMT_BGRA:
|
|
- return PIX_FMT_RGBA;
|
|
+ case AV_PIX_FMT_ARGB:
|
|
+ case AV_PIX_FMT_RGBA:
|
|
+ case AV_PIX_FMT_ABGR:
|
|
+ case AV_PIX_FMT_BGRA:
|
|
+ return AV_PIX_FMT_RGBA;
|
|
#if defined(FFUDIV) && (LIBSWSCALE_VERSION_INT >= ((2<<16)+(5<<8)+102))
|
|
case AV_PIX_FMT_BAYER_RGGB16LE:
|
|
- return PIX_FMT_RGB24;
|
|
+ return AV_PIX_FMT_RGB24;
|
|
#endif
|
|
default:
|
|
- return PIX_FMT_YUV422P;
|
|
+ return AV_PIX_FMT_YUV422P;
|
|
}
|
|
}
|
|
|
|
@@ -976,26 +977,26 @@ static int set_luma_transfer( struct SwsContext *context, int src_colorspace,
|
|
brightness, contrast, saturation );
|
|
}
|
|
|
|
-static mlt_image_format pick_image_format( enum PixelFormat pix_fmt )
|
|
+static mlt_image_format pick_image_format( enum AVPixelFormat pix_fmt )
|
|
{
|
|
switch ( pix_fmt )
|
|
{
|
|
- case PIX_FMT_ARGB:
|
|
- case PIX_FMT_RGBA:
|
|
- case PIX_FMT_ABGR:
|
|
- case PIX_FMT_BGRA:
|
|
+ case AV_PIX_FMT_ARGB:
|
|
+ case AV_PIX_FMT_RGBA:
|
|
+ case AV_PIX_FMT_ABGR:
|
|
+ case AV_PIX_FMT_BGRA:
|
|
return mlt_image_rgb24a;
|
|
- case PIX_FMT_YUV420P:
|
|
- case PIX_FMT_YUVJ420P:
|
|
- case PIX_FMT_YUVA420P:
|
|
+ case AV_PIX_FMT_YUV420P:
|
|
+ case AV_PIX_FMT_YUVJ420P:
|
|
+ case AV_PIX_FMT_YUVA420P:
|
|
return mlt_image_yuv420p;
|
|
- case PIX_FMT_RGB24:
|
|
- case PIX_FMT_BGR24:
|
|
- case PIX_FMT_GRAY8:
|
|
- case PIX_FMT_MONOWHITE:
|
|
- case PIX_FMT_MONOBLACK:
|
|
- case PIX_FMT_RGB8:
|
|
- case PIX_FMT_BGR8:
|
|
+ case AV_PIX_FMT_RGB24:
|
|
+ case AV_PIX_FMT_BGR24:
|
|
+ case AV_PIX_FMT_GRAY8:
|
|
+ case AV_PIX_FMT_MONOWHITE:
|
|
+ case AV_PIX_FMT_MONOBLACK:
|
|
+ case AV_PIX_FMT_RGB8:
|
|
+ case AV_PIX_FMT_BGR8:
|
|
#if defined(FFUDIV) && (LIBSWSCALE_VERSION_INT >= ((2<<16)+(5<<8)+102))
|
|
case AV_PIX_FMT_BAYER_RGGB16LE:
|
|
return mlt_image_rgb24;
|
|
@@ -1071,21 +1072,14 @@ static int convert_image( producer_avformat self, AVFrame *frame, uint8_t *buffe
|
|
mlt_profile profile = mlt_service_profile( MLT_PRODUCER_SERVICE( self->parent ) );
|
|
int result = self->yuv_colorspace;
|
|
|
|
-#ifdef USE_MMX
|
|
- flags |= SWS_CPU_CAPS_MMX;
|
|
-#endif
|
|
-#ifdef USE_SSE
|
|
- flags |= SWS_CPU_CAPS_MMX2;
|
|
-#endif
|
|
-
|
|
mlt_log_debug( MLT_PRODUCER_SERVICE(self->parent), "%s @ %dx%d space %d->%d\n",
|
|
mlt_image_format_name( *format ),
|
|
width, height, self->yuv_colorspace, profile->colorspace );
|
|
|
|
// extract alpha from planar formats
|
|
- if ( ( pix_fmt == PIX_FMT_YUVA420P
|
|
+ if ( ( pix_fmt == AV_PIX_FMT_YUVA420P
|
|
#if defined(FFUDIV)
|
|
- || pix_fmt == PIX_FMT_YUVA444P
|
|
+ || pix_fmt == AV_PIX_FMT_YUVA444P
|
|
#endif
|
|
) &&
|
|
*format != mlt_image_rgb24a && *format != mlt_image_opengl &&
|
|
@@ -1110,10 +1104,10 @@ static int convert_image( producer_avformat self, AVFrame *frame, uint8_t *buffe
|
|
// avformat with no filters and explicitly requested.
|
|
#if defined(FFUDIV) && (LIBAVFORMAT_VERSION_INT >= ((55<<16)+(48<<8)+100))
|
|
struct SwsContext *context = sws_getContext(width, height, src_pix_fmt,
|
|
- width, height, PIX_FMT_YUV420P, flags, NULL, NULL, NULL);
|
|
+ width, height, AV_PIX_FMT_YUV420P, flags, NULL, NULL, NULL);
|
|
#else
|
|
struct SwsContext *context = sws_getContext( width, height, pix_fmt,
|
|
- width, height, self->full_luma ? PIX_FMT_YUVJ420P : PIX_FMT_YUV420P,
|
|
+ width, height, self->full_luma ? AV_PIX_FMT_YUVJ420P : AV_PIX_FMT_YUV420P,
|
|
flags, NULL, NULL, NULL);
|
|
#endif
|
|
|
|
@@ -1133,9 +1127,9 @@ static int convert_image( producer_avformat self, AVFrame *frame, uint8_t *buffe
|
|
else if ( *format == mlt_image_rgb24 )
|
|
{
|
|
struct SwsContext *context = sws_getContext( width, height, src_pix_fmt,
|
|
- width, height, PIX_FMT_RGB24, flags | SWS_FULL_CHR_H_INT, NULL, NULL, NULL);
|
|
+ width, height, AV_PIX_FMT_RGB24, flags | SWS_FULL_CHR_H_INT, NULL, NULL, NULL);
|
|
AVPicture output;
|
|
- avpicture_fill( &output, buffer, PIX_FMT_RGB24, width, height );
|
|
+ avpicture_fill( &output, buffer, AV_PIX_FMT_RGB24, width, height );
|
|
// libswscale wants the RGB colorspace to be SWS_CS_DEFAULT, which is = SWS_CS_ITU601.
|
|
set_luma_transfer( context, self->yuv_colorspace, 601, self->full_luma, 0 );
|
|
sws_scale( context, (const uint8_t* const*) frame->data, frame->linesize, 0, height,
|
|
@@ -1145,9 +1139,9 @@ static int convert_image( producer_avformat self, AVFrame *frame, uint8_t *buffe
|
|
else if ( *format == mlt_image_rgb24a || *format == mlt_image_opengl )
|
|
{
|
|
struct SwsContext *context = sws_getContext( width, height, src_pix_fmt,
|
|
- width, height, PIX_FMT_RGBA, flags | SWS_FULL_CHR_H_INT, NULL, NULL, NULL);
|
|
+ width, height, AV_PIX_FMT_RGBA, flags | SWS_FULL_CHR_H_INT, NULL, NULL, NULL);
|
|
AVPicture output;
|
|
- avpicture_fill( &output, buffer, PIX_FMT_RGBA, width, height );
|
|
+ avpicture_fill( &output, buffer, AV_PIX_FMT_RGBA, width, height );
|
|
// libswscale wants the RGB colorspace to be SWS_CS_DEFAULT, which is = SWS_CS_ITU601.
|
|
set_luma_transfer( context, self->yuv_colorspace, 601, self->full_luma, 0 );
|
|
sws_scale( context, (const uint8_t* const*) frame->data, frame->linesize, 0, height,
|
|
@@ -1158,13 +1152,13 @@ static int convert_image( producer_avformat self, AVFrame *frame, uint8_t *buffe
|
|
{
|
|
#if defined(FFUDIV) && (LIBAVFORMAT_VERSION_INT >= ((55<<16)+(48<<8)+100))
|
|
struct SwsContext *context = sws_getContext( width, height, src_pix_fmt,
|
|
- width, height, PIX_FMT_YUYV422, flags | SWS_FULL_CHR_H_INP, NULL, NULL, NULL);
|
|
+ width, height, AV_PIX_FMT_YUYV422, flags | SWS_FULL_CHR_H_INP, NULL, NULL, NULL);
|
|
#else
|
|
struct SwsContext *context = sws_getContext( width, height, pix_fmt,
|
|
- width, height, PIX_FMT_YUYV422, flags | SWS_FULL_CHR_H_INP, NULL, NULL, NULL);
|
|
+ width, height, AV_PIX_FMT_YUYV422, flags | SWS_FULL_CHR_H_INP, NULL, NULL, NULL);
|
|
#endif
|
|
AVPicture output;
|
|
- avpicture_fill( &output, buffer, PIX_FMT_YUYV422, width, height );
|
|
+ avpicture_fill( &output, buffer, AV_PIX_FMT_YUYV422, width, height );
|
|
if ( !set_luma_transfer( context, self->yuv_colorspace, profile->colorspace, self->full_luma, 0 ) )
|
|
result = profile->colorspace;
|
|
sws_scale( context, (const uint8_t* const*) frame->data, frame->linesize, 0, height,
|
|
@@ -1297,12 +1291,11 @@ static int producer_get_image( mlt_frame frame, uint8_t **buffer, mlt_image_form
|
|
double delay = mlt_properties_get_double( properties, "video_delay" );
|
|
|
|
// Seek if necessary
|
|
- const char *interp = mlt_properties_get( frame_properties, "rescale.interp" );
|
|
- int preseek = must_decode
|
|
+ int preseek = must_decode && codec_context->has_b_frames;
|
|
#if defined(FFUDIV)
|
|
- && ( interp && strcmp( interp, "nearest" ) )
|
|
+ const char *interp = mlt_properties_get( frame_properties, "rescale.interp" );
|
|
+ preseek = preseek && interp && strcmp( interp, "nearest" );
|
|
#endif
|
|
- && codec_context->has_b_frames;
|
|
int paused = seek_video( self, position, req_position, preseek );
|
|
|
|
// Seek might have reopened the file
|
|
@@ -1310,10 +1303,10 @@ static int producer_get_image( mlt_frame frame, uint8_t **buffer, mlt_image_form
|
|
stream = context->streams[ self->video_index ];
|
|
codec_context = stream->codec;
|
|
if ( *format == mlt_image_none || *format == mlt_image_glsl ||
|
|
- codec_context->pix_fmt == PIX_FMT_ARGB ||
|
|
- codec_context->pix_fmt == PIX_FMT_RGBA ||
|
|
- codec_context->pix_fmt == PIX_FMT_ABGR ||
|
|
- codec_context->pix_fmt == PIX_FMT_BGRA )
|
|
+ codec_context->pix_fmt == AV_PIX_FMT_ARGB ||
|
|
+ codec_context->pix_fmt == AV_PIX_FMT_RGBA ||
|
|
+ codec_context->pix_fmt == AV_PIX_FMT_ABGR ||
|
|
+ codec_context->pix_fmt == AV_PIX_FMT_BGRA )
|
|
*format = pick_image_format( codec_context->pix_fmt );
|
|
#if defined(FFUDIV) && (LIBSWSCALE_VERSION_INT >= ((2<<16)+(5<<8)+102))
|
|
else if ( codec_context->pix_fmt == AV_PIX_FMT_BAYER_RGGB16LE ) {
|
|
@@ -1346,7 +1339,7 @@ static int producer_get_image( mlt_frame frame, uint8_t **buffer, mlt_image_form
|
|
picture.linesize[1] = codec_context->width / 2;
|
|
picture.linesize[2] = codec_context->width / 2;
|
|
yuv_colorspace = convert_image( self, (AVFrame*) &picture, *buffer,
|
|
- PIX_FMT_YUV420P, format, *width, *height, &alpha );
|
|
+ AV_PIX_FMT_YUV420P, format, *width, *height, &alpha );
|
|
}
|
|
else
|
|
#endif
|
|
@@ -1539,7 +1532,7 @@ static int producer_get_image( mlt_frame frame, uint8_t **buffer, mlt_image_form
|
|
VdpStatus status = vdp_surface_get_bits( render->surface, dest_format, planes, pitches );
|
|
if ( status == VDP_STATUS_OK )
|
|
{
|
|
- yuv_colorspace = convert_image( self, self->video_frame, *buffer, PIX_FMT_YUV420P,
|
|
+ yuv_colorspace = convert_image( self, self->video_frame, *buffer, AV_PIX_FMT_YUV420P,
|
|
format, *width, *height, &alpha );
|
|
mlt_properties_set_int( frame_properties, "colorspace", yuv_colorspace );
|
|
}
|
|
diff --git src/modules/avformat/vdpau.c src/modules/avformat/vdpau.c
|
|
index 719db1c..4f2e6b9 100644
|
|
--- src/modules/avformat/vdpau.c
|
|
+++ src/modules/avformat/vdpau.c
|
|
@@ -136,9 +136,9 @@ static int vdpau_init( producer_avformat self )
|
|
return success;
|
|
}
|
|
|
|
-static enum PixelFormat vdpau_get_format( struct AVCodecContext *s, const enum PixelFormat *fmt )
|
|
+static enum AVPixelFormat vdpau_get_format( struct AVCodecContext *s, const enum AVPixelFormat *fmt )
|
|
{
|
|
- return PIX_FMT_VDPAU_H264;
|
|
+ return AV_PIX_FMT_VDPAU_H264;
|
|
}
|
|
|
|
static int vdpau_get_buffer( AVCodecContext *codec_context, AVFrame *frame )
|
|
@@ -240,7 +240,7 @@ static int vdpau_decoder_init( producer_avformat self )
|
|
self->video_codec->release_buffer = vdpau_release_buffer;
|
|
self->video_codec->draw_horiz_band = vdpau_draw_horiz;
|
|
self->video_codec->slice_flags = SLICE_FLAG_CODED_ORDER | SLICE_FLAG_ALLOW_FIELD;
|
|
- self->video_codec->pix_fmt = PIX_FMT_VDPAU_H264;
|
|
+ self->video_codec->pix_fmt = AV_PIX_FMT_VDPAU_H264;
|
|
|
|
VdpDecoderProfile profile = VDP_DECODER_PROFILE_H264_HIGH;
|
|
uint32_t max_references = self->video_codec->refs;
|