]>
Commit | Line | Data |
---|---|---|
bb8ae389 JR |
1 | --- |
2 | src/modules/avformat/consumer_avformat.c | 25 +++--- | |
3 | src/modules/avformat/filter_avcolour_space.c | 19 ++-- | |
4 | src/modules/avformat/filter_avdeinterlace.c | 22 ++--- | |
5 | src/modules/avformat/filter_swscale.c | 17 ++-- | |
6 | src/modules/avformat/producer_avformat.c | 103 +++++++++++++-------------- | |
7 | src/modules/avformat/vdpau.c | 6 - | |
8 | 6 files changed, 98 insertions(+), 94 deletions(-) | |
9 | ||
10 | Index: mlt-0.9.8/src/modules/avformat/consumer_avformat.c | |
11 | =================================================================== | |
12 | --- mlt-0.9.8.orig/src/modules/avformat/consumer_avformat.c | |
13 | +++ mlt-0.9.8/src/modules/avformat/consumer_avformat.c | |
14 | @@ -41,6 +41,7 @@ | |
15 | #include <libavutil/mathematics.h> | |
16 | #include <libavutil/samplefmt.h> | |
17 | #include <libavutil/opt.h> | |
18 | +#include <libpostproc/postprocess.h> | |
19 | ||
20 | #if LIBAVCODEC_VERSION_MAJOR < 55 | |
21 | #define AV_CODEC_ID_PCM_S16LE CODEC_ID_PCM_S16LE | |
22 | @@ -439,18 +440,18 @@ static void apply_properties( void *obj, | |
23 | } | |
24 | } | |
25 | ||
26 | -static enum PixelFormat pick_pix_fmt( mlt_image_format img_fmt ) | |
27 | +static enum AVPixelFormat pick_pix_fmt( mlt_image_format img_fmt ) | |
28 | { | |
29 | switch ( img_fmt ) | |
30 | { | |
31 | case mlt_image_rgb24: | |
32 | - return PIX_FMT_RGB24; | |
33 | + return AV_PIX_FMT_RGB24; | |
34 | case mlt_image_rgb24a: | |
35 | - return PIX_FMT_RGBA; | |
36 | + return AV_PIX_FMT_RGBA; | |
37 | case mlt_image_yuv420p: | |
38 | - return PIX_FMT_YUV420P; | |
39 | + return AV_PIX_FMT_YUV420P; | |
40 | default: | |
41 | - return PIX_FMT_YUYV422; | |
42 | + return AV_PIX_FMT_YUYV422; | |
43 | } | |
44 | } | |
45 | ||
46 | @@ -798,7 +799,7 @@ static AVStream *add_video_stream( mlt_c | |
47 | st->time_base = c->time_base; | |
48 | ||
49 | // Default to the codec's first pix_fmt if possible. | |
50 | - c->pix_fmt = pix_fmt? av_get_pix_fmt( pix_fmt ) : codec? codec->pix_fmts[0] : PIX_FMT_YUV420P; | |
51 | + c->pix_fmt = pix_fmt? av_get_pix_fmt( pix_fmt ) : codec? codec->pix_fmts[0] : AV_PIX_FMT_YUV420P; | |
52 | ||
53 | switch ( colorspace ) | |
54 | { | |
55 | @@ -1032,7 +1033,7 @@ static int open_video( mlt_properties pr | |
56 | ||
57 | if( codec && codec->pix_fmts ) | |
58 | { | |
59 | - const enum PixelFormat *p = codec->pix_fmts; | |
60 | + const enum AVPixelFormat *p = codec->pix_fmts; | |
61 | for( ; *p!=-1; p++ ) | |
62 | { | |
63 | if( *p == video_enc->pix_fmt ) | |
64 | @@ -1792,10 +1793,10 @@ static void *consumer_thread( void *arg | |
65 | // Do the colour space conversion | |
66 | int flags = SWS_BICUBIC; | |
67 | #ifdef USE_MMX | |
68 | - flags |= SWS_CPU_CAPS_MMX; | |
69 | + flags |= PP_CPU_CAPS_MMX; | |
70 | #endif | |
71 | #ifdef USE_SSE | |
72 | - flags |= SWS_CPU_CAPS_MMX2; | |
73 | + flags |= PP_CPU_CAPS_MMX2; | |
74 | #endif | |
75 | struct SwsContext *context = sws_getContext( width, height, pick_pix_fmt( img_fmt ), | |
76 | width, height, c->pix_fmt, flags, NULL, NULL, NULL); | |
77 | @@ -1808,9 +1809,9 @@ static void *consumer_thread( void *arg | |
78 | // Apply the alpha if applicable | |
79 | if ( !mlt_properties_get( properties, "mlt_image_format" ) || | |
80 | strcmp( mlt_properties_get( properties, "mlt_image_format" ), "rgb24a" ) ) | |
81 | - if ( c->pix_fmt == PIX_FMT_RGBA || | |
82 | - c->pix_fmt == PIX_FMT_ARGB || | |
83 | - c->pix_fmt == PIX_FMT_BGRA ) | |
84 | + if ( c->pix_fmt == AV_PIX_FMT_RGBA || | |
85 | + c->pix_fmt == AV_PIX_FMT_ARGB || | |
86 | + c->pix_fmt == AV_PIX_FMT_BGRA ) | |
87 | { | |
88 | uint8_t *alpha = mlt_frame_get_alpha_mask( frame ); | |
89 | register int n; | |
90 | Index: mlt-0.9.8/src/modules/avformat/filter_avcolour_space.c | |
91 | =================================================================== | |
92 | --- mlt-0.9.8.orig/src/modules/avformat/filter_avcolour_space.c | |
93 | +++ mlt-0.9.8/src/modules/avformat/filter_avcolour_space.c | |
94 | @@ -26,6 +26,7 @@ | |
95 | // ffmpeg Header files | |
96 | #include <libavformat/avformat.h> | |
97 | #include <libswscale/swscale.h> | |
98 | +#include <libpostproc/postprocess.h> | |
99 | ||
100 | #include <stdio.h> | |
101 | #include <stdlib.h> | |
102 | @@ -47,17 +48,17 @@ static int convert_mlt_to_av_cs( mlt_ima | |
103 | switch( format ) | |
104 | { | |
105 | case mlt_image_rgb24: | |
106 | - value = PIX_FMT_RGB24; | |
107 | + value = AV_PIX_FMT_RGB24; | |
108 | break; | |
109 | case mlt_image_rgb24a: | |
110 | case mlt_image_opengl: | |
111 | - value = PIX_FMT_RGBA; | |
112 | + value = AV_PIX_FMT_RGBA; | |
113 | break; | |
114 | case mlt_image_yuv422: | |
115 | - value = PIX_FMT_YUYV422; | |
116 | + value = AV_PIX_FMT_YUYV422; | |
117 | break; | |
118 | case mlt_image_yuv420p: | |
119 | - value = PIX_FMT_YUV420P; | |
120 | + value = AV_PIX_FMT_YUV420P; | |
121 | break; | |
122 | default: | |
123 | mlt_log_error( NULL, "[filter avcolor_space] Invalid format %s\n", | |
124 | @@ -123,15 +124,15 @@ static int av_convert_image( uint8_t *ou | |
125 | int flags = SWS_BICUBIC | SWS_ACCURATE_RND; | |
126 | int error = -1; | |
127 | ||
128 | - if ( out_fmt == PIX_FMT_YUYV422 ) | |
129 | + if ( out_fmt == AV_PIX_FMT_YUYV422 ) | |
130 | flags |= SWS_FULL_CHR_H_INP; | |
131 | else | |
132 | flags |= SWS_FULL_CHR_H_INT; | |
133 | #ifdef USE_MMX | |
134 | - flags |= SWS_CPU_CAPS_MMX; | |
135 | + flags |= PP_CPU_CAPS_MMX; | |
136 | #endif | |
137 | #ifdef USE_SSE | |
138 | - flags |= SWS_CPU_CAPS_MMX2; | |
139 | + flags |= PP_CPU_CAPS_MMX2; | |
140 | #endif | |
141 | ||
142 | avpicture_fill( &input, in, in_fmt, width, height ); | |
143 | @@ -141,7 +142,7 @@ static int av_convert_image( uint8_t *ou | |
144 | if ( context ) | |
145 | { | |
146 | // libswscale wants the RGB colorspace to be SWS_CS_DEFAULT, which is = SWS_CS_ITU601. | |
147 | - if ( out_fmt == PIX_FMT_RGB24 || out_fmt == PIX_FMT_RGBA ) | |
148 | + if ( out_fmt == AV_PIX_FMT_RGB24 || out_fmt == AV_PIX_FMT_RGBA ) | |
149 | dst_colorspace = 601; | |
150 | error = set_luma_transfer( context, src_colorspace, dst_colorspace, use_full_range ); | |
151 | sws_scale( context, (const uint8_t* const*) input.data, input.linesize, 0, height, | |
152 | @@ -326,7 +327,7 @@ mlt_filter filter_avcolour_space_init( v | |
153 | int *width = (int*) arg; | |
154 | if ( *width > 0 ) | |
155 | { | |
156 | - struct SwsContext *context = sws_getContext( *width, *width, PIX_FMT_RGB32, 64, 64, PIX_FMT_RGB32, SWS_BILINEAR, NULL, NULL, NULL); | |
157 | + struct SwsContext *context = sws_getContext( *width, *width, AV_PIX_FMT_RGB32, 64, 64, AV_PIX_FMT_RGB32, SWS_BILINEAR, NULL, NULL, NULL); | |
158 | if ( context ) | |
159 | sws_freeContext( context ); | |
160 | else | |
161 | Index: mlt-0.9.8/src/modules/avformat/filter_avdeinterlace.c | |
162 | =================================================================== | |
163 | --- mlt-0.9.8.orig/src/modules/avformat/filter_avdeinterlace.c | |
164 | +++ mlt-0.9.8/src/modules/avformat/filter_avdeinterlace.c | |
165 | @@ -234,28 +234,28 @@ static int mlt_avpicture_deinterlace(AVP | |
166 | { | |
167 | int i; | |
168 | ||
169 | - if (pix_fmt != PIX_FMT_YUV420P && | |
170 | - pix_fmt != PIX_FMT_YUV422P && | |
171 | - pix_fmt != PIX_FMT_YUYV422 && | |
172 | - pix_fmt != PIX_FMT_YUV444P && | |
173 | - pix_fmt != PIX_FMT_YUV411P) | |
174 | + if (pix_fmt != AV_PIX_FMT_YUV420P && | |
175 | + pix_fmt != AV_PIX_FMT_YUV422P && | |
176 | + pix_fmt != AV_PIX_FMT_YUYV422 && | |
177 | + pix_fmt != AV_PIX_FMT_YUV444P && | |
178 | + pix_fmt != AV_PIX_FMT_YUV411P) | |
179 | return -1; | |
180 | if ((width & 3) != 0 || (height & 3) != 0) | |
181 | return -1; | |
182 | ||
183 | - if ( pix_fmt != PIX_FMT_YUYV422 ) | |
184 | + if ( pix_fmt != AV_PIX_FMT_YUYV422 ) | |
185 | { | |
186 | for(i=0;i<3;i++) { | |
187 | if (i == 1) { | |
188 | switch(pix_fmt) { | |
189 | - case PIX_FMT_YUV420P: | |
190 | + case AV_PIX_FMT_YUV420P: | |
191 | width >>= 1; | |
192 | height >>= 1; | |
193 | break; | |
194 | - case PIX_FMT_YUV422P: | |
195 | + case AV_PIX_FMT_YUV422P: | |
196 | width >>= 1; | |
197 | break; | |
198 | - case PIX_FMT_YUV411P: | |
199 | + case AV_PIX_FMT_YUV411P: | |
200 | width >>= 2; | |
201 | break; | |
202 | default: | |
203 | @@ -312,8 +312,8 @@ static int filter_get_image( mlt_frame f | |
204 | AVPicture *output = mlt_pool_alloc( sizeof( AVPicture ) ); | |
205 | ||
206 | // Fill the picture | |
207 | - avpicture_fill( output, *image, PIX_FMT_YUYV422, *width, *height ); | |
208 | - mlt_avpicture_deinterlace( output, output, PIX_FMT_YUYV422, *width, *height ); | |
209 | + avpicture_fill( output, *image, AV_PIX_FMT_YUYV422, *width, *height ); | |
210 | + mlt_avpicture_deinterlace( output, output, AV_PIX_FMT_YUYV422, *width, *height ); | |
211 | ||
212 | // Free the picture | |
213 | mlt_pool_release( output ); | |
214 | Index: mlt-0.9.8/src/modules/avformat/filter_swscale.c | |
215 | =================================================================== | |
216 | --- mlt-0.9.8.orig/src/modules/avformat/filter_swscale.c | |
217 | +++ mlt-0.9.8/src/modules/avformat/filter_swscale.c | |
218 | @@ -25,6 +25,7 @@ | |
219 | // ffmpeg Header files | |
220 | #include <libavformat/avformat.h> | |
221 | #include <libswscale/swscale.h> | |
222 | +#include <libpostproc/postprocess.h> | |
223 | ||
224 | #include <stdio.h> | |
225 | #include <stdlib.h> | |
226 | @@ -37,17 +38,17 @@ static inline int convert_mlt_to_av_cs( | |
227 | switch( format ) | |
228 | { | |
229 | case mlt_image_rgb24: | |
230 | - value = PIX_FMT_RGB24; | |
231 | + value = AV_PIX_FMT_RGB24; | |
232 | break; | |
233 | case mlt_image_rgb24a: | |
234 | case mlt_image_opengl: | |
235 | - value = PIX_FMT_RGBA; | |
236 | + value = AV_PIX_FMT_RGBA; | |
237 | break; | |
238 | case mlt_image_yuv422: | |
239 | - value = PIX_FMT_YUYV422; | |
240 | + value = AV_PIX_FMT_YUYV422; | |
241 | break; | |
242 | case mlt_image_yuv420p: | |
243 | - value = PIX_FMT_YUV420P; | |
244 | + value = AV_PIX_FMT_YUV420P; | |
245 | break; | |
246 | default: | |
247 | fprintf( stderr, "Invalid format...\n" ); | |
248 | @@ -109,10 +110,10 @@ static int filter_scale( mlt_frame frame | |
249 | return 1; | |
250 | } | |
251 | #ifdef USE_MMX | |
252 | - interp |= SWS_CPU_CAPS_MMX; | |
253 | + interp |= PP_CPU_CAPS_MMX; | |
254 | #endif | |
255 | #ifdef USE_SSE | |
256 | - interp |= SWS_CPU_CAPS_MMX2; | |
257 | + interp |= PP_CPU_CAPS_MMX2; | |
258 | #endif | |
259 | ||
260 | // Convert the pixel formats | |
261 | @@ -148,7 +149,7 @@ static int filter_scale( mlt_frame frame | |
262 | uint8_t *alpha = mlt_frame_get_alpha( frame ); | |
263 | if ( alpha ) | |
264 | { | |
265 | - avformat = PIX_FMT_GRAY8; | |
266 | + avformat = AV_PIX_FMT_GRAY8; | |
267 | struct SwsContext *context = sws_getContext( iwidth, iheight, avformat, owidth, oheight, avformat, interp, NULL, NULL, NULL); | |
268 | avpicture_fill( &input, alpha, avformat, iwidth, iheight ); | |
269 | outbuf = mlt_pool_alloc( owidth * oheight ); | |
270 | @@ -182,7 +183,7 @@ mlt_filter filter_swscale_init( mlt_prof | |
271 | int *width = (int*) arg; | |
272 | if ( *width > 0 ) | |
273 | { | |
274 | - struct SwsContext *context = sws_getContext( *width, *width, PIX_FMT_RGB32, 64, 64, PIX_FMT_RGB32, SWS_BILINEAR, NULL, NULL, NULL); | |
275 | + struct SwsContext *context = sws_getContext( *width, *width, AV_PIX_FMT_RGB32, 64, 64, AV_PIX_FMT_RGB32, SWS_BILINEAR, NULL, NULL, NULL); | |
276 | if ( context ) | |
277 | sws_freeContext( context ); | |
278 | else | |
279 | Index: mlt-0.9.8/src/modules/avformat/producer_avformat.c | |
280 | =================================================================== | |
281 | --- mlt-0.9.8.orig/src/modules/avformat/producer_avformat.c | |
282 | +++ mlt-0.9.8/src/modules/avformat/producer_avformat.c | |
283 | @@ -33,6 +33,7 @@ | |
284 | #include <libavutil/pixdesc.h> | |
285 | #include <libavutil/dict.h> | |
286 | #include <libavutil/opt.h> | |
287 | +#include <libpostproc/postprocess.h> | |
288 | ||
289 | #ifdef VDPAU | |
290 | # include <libavcodec/vdpau.h> | |
291 | @@ -504,21 +505,21 @@ static char* parse_url( mlt_profile prof | |
292 | return result; | |
293 | } | |
294 | ||
295 | -static enum PixelFormat pick_pix_fmt( enum PixelFormat pix_fmt ) | |
296 | +static enum AVPixelFormat pick_pix_fmt( enum AVPixelFormat pix_fmt ) | |
297 | { | |
298 | switch ( pix_fmt ) | |
299 | { | |
300 | - case PIX_FMT_ARGB: | |
301 | - case PIX_FMT_RGBA: | |
302 | - case PIX_FMT_ABGR: | |
303 | - case PIX_FMT_BGRA: | |
304 | - return PIX_FMT_RGBA; | |
305 | + case AV_PIX_FMT_ARGB: | |
306 | + case AV_PIX_FMT_RGBA: | |
307 | + case AV_PIX_FMT_ABGR: | |
308 | + case AV_PIX_FMT_BGRA: | |
309 | + return AV_PIX_FMT_RGBA; | |
310 | #if defined(FFUDIV) && (LIBSWSCALE_VERSION_INT >= ((2<<16)+(5<<8)+102)) | |
311 | case AV_PIX_FMT_BAYER_RGGB16LE: | |
312 | - return PIX_FMT_RGB24; | |
313 | + return AV_PIX_FMT_RGB24; | |
314 | #endif | |
315 | default: | |
316 | - return PIX_FMT_YUV422P; | |
317 | + return AV_PIX_FMT_YUV422P; | |
318 | } | |
319 | } | |
320 | ||
321 | @@ -759,7 +760,7 @@ static void prepare_reopen( producer_avf | |
322 | { | |
323 | while ( ( pkt = mlt_deque_pop_back( self->apackets ) ) ) | |
324 | { | |
325 | - av_free_packet( pkt ); | |
326 | + av_packet_unref( pkt ); | |
327 | free( pkt ); | |
328 | } | |
329 | mlt_deque_close( self->apackets ); | |
330 | @@ -769,7 +770,7 @@ static void prepare_reopen( producer_avf | |
331 | { | |
332 | while ( ( pkt = mlt_deque_pop_back( self->vpackets ) ) ) | |
333 | { | |
334 | - av_free_packet( pkt ); | |
335 | + av_packet_unref( pkt ); | |
336 | free( pkt ); | |
337 | } | |
338 | mlt_deque_close( self->vpackets ); | |
339 | @@ -810,7 +811,7 @@ static void find_first_pts( producer_avf | |
340 | if ( self->first_pts != AV_NOPTS_VALUE ) | |
341 | toscan = 0; | |
342 | } | |
343 | - av_free_packet( &pkt ); | |
344 | + av_packet_unref( &pkt ); | |
345 | } | |
346 | av_seek_frame( context, -1, 0, AVSEEK_FLAG_BACKWARD ); | |
347 | } | |
348 | @@ -976,26 +977,26 @@ static int set_luma_transfer( struct Sws | |
349 | brightness, contrast, saturation ); | |
350 | } | |
351 | ||
352 | -static mlt_image_format pick_image_format( enum PixelFormat pix_fmt ) | |
353 | +static mlt_image_format pick_image_format( enum AVPixelFormat pix_fmt ) | |
354 | { | |
355 | switch ( pix_fmt ) | |
356 | { | |
357 | - case PIX_FMT_ARGB: | |
358 | - case PIX_FMT_RGBA: | |
359 | - case PIX_FMT_ABGR: | |
360 | - case PIX_FMT_BGRA: | |
361 | + case AV_PIX_FMT_ARGB: | |
362 | + case AV_PIX_FMT_RGBA: | |
363 | + case AV_PIX_FMT_ABGR: | |
364 | + case AV_PIX_FMT_BGRA: | |
365 | return mlt_image_rgb24a; | |
366 | - case PIX_FMT_YUV420P: | |
367 | - case PIX_FMT_YUVJ420P: | |
368 | - case PIX_FMT_YUVA420P: | |
369 | + case AV_PIX_FMT_YUV420P: | |
370 | + case AV_PIX_FMT_YUVJ420P: | |
371 | + case AV_PIX_FMT_YUVA420P: | |
372 | return mlt_image_yuv420p; | |
373 | - case PIX_FMT_RGB24: | |
374 | - case PIX_FMT_BGR24: | |
375 | - case PIX_FMT_GRAY8: | |
376 | - case PIX_FMT_MONOWHITE: | |
377 | - case PIX_FMT_MONOBLACK: | |
378 | - case PIX_FMT_RGB8: | |
379 | - case PIX_FMT_BGR8: | |
380 | + case AV_PIX_FMT_RGB24: | |
381 | + case AV_PIX_FMT_BGR24: | |
382 | + case AV_PIX_FMT_GRAY8: | |
383 | + case AV_PIX_FMT_MONOWHITE: | |
384 | + case AV_PIX_FMT_MONOBLACK: | |
385 | + case AV_PIX_FMT_RGB8: | |
386 | + case AV_PIX_FMT_BGR8: | |
387 | #if defined(FFUDIV) && (LIBSWSCALE_VERSION_INT >= ((2<<16)+(5<<8)+102)) | |
388 | case AV_PIX_FMT_BAYER_RGGB16LE: | |
389 | return mlt_image_rgb24; | |
390 | @@ -1072,10 +1073,10 @@ static int convert_image( producer_avfor | |
391 | int result = self->yuv_colorspace; | |
392 | ||
393 | #ifdef USE_MMX | |
394 | - flags |= SWS_CPU_CAPS_MMX; | |
395 | + flags |= PP_CPU_CAPS_MMX; | |
396 | #endif | |
397 | #ifdef USE_SSE | |
398 | - flags |= SWS_CPU_CAPS_MMX2; | |
399 | + flags |= PP_CPU_CAPS_MMX2; | |
400 | #endif | |
401 | ||
402 | mlt_log_debug( MLT_PRODUCER_SERVICE(self->parent), "%s @ %dx%d space %d->%d\n", | |
403 | @@ -1083,9 +1084,9 @@ static int convert_image( producer_avfor | |
404 | width, height, self->yuv_colorspace, profile->colorspace ); | |
405 | ||
406 | // extract alpha from planar formats | |
407 | - if ( ( pix_fmt == PIX_FMT_YUVA420P | |
408 | + if ( ( pix_fmt == AV_PIX_FMT_YUVA420P | |
409 | #if defined(FFUDIV) | |
410 | - || pix_fmt == PIX_FMT_YUVA444P | |
411 | + || pix_fmt == AV_PIX_FMT_YUVA444P | |
412 | #endif | |
413 | ) && | |
414 | *format != mlt_image_rgb24a && *format != mlt_image_opengl && | |
415 | @@ -1110,10 +1111,10 @@ static int convert_image( producer_avfor | |
416 | // avformat with no filters and explicitly requested. | |
417 | #if defined(FFUDIV) && (LIBAVFORMAT_VERSION_INT >= ((55<<16)+(48<<8)+100)) | |
418 | struct SwsContext *context = sws_getContext(width, height, src_pix_fmt, | |
419 | - width, height, PIX_FMT_YUV420P, flags, NULL, NULL, NULL); | |
420 | + width, height, AV_PIX_FMT_YUV420P, flags, NULL, NULL, NULL); | |
421 | #else | |
422 | struct SwsContext *context = sws_getContext( width, height, pix_fmt, | |
423 | - width, height, self->full_luma ? PIX_FMT_YUVJ420P : PIX_FMT_YUV420P, | |
424 | + width, height, self->full_luma ? AV_PIX_FMT_YUVJ420P : AV_PIX_FMT_YUV420P, | |
425 | flags, NULL, NULL, NULL); | |
426 | #endif | |
427 | ||
428 | @@ -1133,9 +1134,9 @@ static int convert_image( producer_avfor | |
429 | else if ( *format == mlt_image_rgb24 ) | |
430 | { | |
431 | struct SwsContext *context = sws_getContext( width, height, src_pix_fmt, | |
432 | - width, height, PIX_FMT_RGB24, flags | SWS_FULL_CHR_H_INT, NULL, NULL, NULL); | |
433 | + width, height, AV_PIX_FMT_RGB24, flags | SWS_FULL_CHR_H_INT, NULL, NULL, NULL); | |
434 | AVPicture output; | |
435 | - avpicture_fill( &output, buffer, PIX_FMT_RGB24, width, height ); | |
436 | + avpicture_fill( &output, buffer, AV_PIX_FMT_RGB24, width, height ); | |
437 | // libswscale wants the RGB colorspace to be SWS_CS_DEFAULT, which is = SWS_CS_ITU601. | |
438 | set_luma_transfer( context, self->yuv_colorspace, 601, self->full_luma, 0 ); | |
439 | sws_scale( context, (const uint8_t* const*) frame->data, frame->linesize, 0, height, | |
440 | @@ -1145,9 +1146,9 @@ static int convert_image( producer_avfor | |
441 | else if ( *format == mlt_image_rgb24a || *format == mlt_image_opengl ) | |
442 | { | |
443 | struct SwsContext *context = sws_getContext( width, height, src_pix_fmt, | |
444 | - width, height, PIX_FMT_RGBA, flags | SWS_FULL_CHR_H_INT, NULL, NULL, NULL); | |
445 | + width, height, AV_PIX_FMT_RGBA, flags | SWS_FULL_CHR_H_INT, NULL, NULL, NULL); | |
446 | AVPicture output; | |
447 | - avpicture_fill( &output, buffer, PIX_FMT_RGBA, width, height ); | |
448 | + avpicture_fill( &output, buffer, AV_PIX_FMT_RGBA, width, height ); | |
449 | // libswscale wants the RGB colorspace to be SWS_CS_DEFAULT, which is = SWS_CS_ITU601. | |
450 | set_luma_transfer( context, self->yuv_colorspace, 601, self->full_luma, 0 ); | |
451 | sws_scale( context, (const uint8_t* const*) frame->data, frame->linesize, 0, height, | |
452 | @@ -1158,13 +1159,13 @@ static int convert_image( producer_avfor | |
453 | { | |
454 | #if defined(FFUDIV) && (LIBAVFORMAT_VERSION_INT >= ((55<<16)+(48<<8)+100)) | |
455 | struct SwsContext *context = sws_getContext( width, height, src_pix_fmt, | |
456 | - width, height, PIX_FMT_YUYV422, flags | SWS_FULL_CHR_H_INP, NULL, NULL, NULL); | |
457 | + width, height, AV_PIX_FMT_YUYV422, flags | SWS_FULL_CHR_H_INP, NULL, NULL, NULL); | |
458 | #else | |
459 | struct SwsContext *context = sws_getContext( width, height, pix_fmt, | |
460 | - width, height, PIX_FMT_YUYV422, flags | SWS_FULL_CHR_H_INP, NULL, NULL, NULL); | |
461 | + width, height, AV_PIX_FMT_YUYV422, flags | SWS_FULL_CHR_H_INP, NULL, NULL, NULL); | |
462 | #endif | |
463 | AVPicture output; | |
464 | - avpicture_fill( &output, buffer, PIX_FMT_YUYV422, width, height ); | |
465 | + avpicture_fill( &output, buffer, AV_PIX_FMT_YUYV422, width, height ); | |
466 | if ( !set_luma_transfer( context, self->yuv_colorspace, profile->colorspace, self->full_luma, 0 ) ) | |
467 | result = profile->colorspace; | |
468 | sws_scale( context, (const uint8_t* const*) frame->data, frame->linesize, 0, height, | |
469 | @@ -1310,10 +1311,10 @@ static int producer_get_image( mlt_frame | |
470 | stream = context->streams[ self->video_index ]; | |
471 | codec_context = stream->codec; | |
472 | if ( *format == mlt_image_none || *format == mlt_image_glsl || | |
473 | - codec_context->pix_fmt == PIX_FMT_ARGB || | |
474 | - codec_context->pix_fmt == PIX_FMT_RGBA || | |
475 | - codec_context->pix_fmt == PIX_FMT_ABGR || | |
476 | - codec_context->pix_fmt == PIX_FMT_BGRA ) | |
477 | + codec_context->pix_fmt == AV_PIX_FMT_ARGB || | |
478 | + codec_context->pix_fmt == AV_PIX_FMT_RGBA || | |
479 | + codec_context->pix_fmt == AV_PIX_FMT_ABGR || | |
480 | + codec_context->pix_fmt == AV_PIX_FMT_BGRA ) | |
481 | *format = pick_image_format( codec_context->pix_fmt ); | |
482 | #if defined(FFUDIV) && (LIBSWSCALE_VERSION_INT >= ((2<<16)+(5<<8)+102)) | |
483 | else if ( codec_context->pix_fmt == AV_PIX_FMT_BAYER_RGGB16LE ) { | |
484 | @@ -1346,7 +1347,7 @@ static int producer_get_image( mlt_frame | |
485 | picture.linesize[1] = codec_context->width / 2; | |
486 | picture.linesize[2] = codec_context->width / 2; | |
487 | yuv_colorspace = convert_image( self, (AVFrame*) &picture, *buffer, | |
488 | - PIX_FMT_YUV420P, format, *width, *height, &alpha ); | |
489 | + AV_PIX_FMT_YUV420P, format, *width, *height, &alpha ); | |
490 | } | |
491 | else | |
492 | #endif | |
493 | @@ -1374,7 +1375,7 @@ static int producer_get_image( mlt_frame | |
494 | { | |
495 | // Read a packet | |
496 | if ( self->pkt.stream_index == self->video_index ) | |
497 | - av_free_packet( &self->pkt ); | |
498 | + av_packet_unref( &self->pkt ); | |
499 | av_init_packet( &self->pkt ); | |
500 | pthread_mutex_lock( &self->packets_mutex ); | |
501 | if ( mlt_deque_count( self->vpackets ) ) | |
502 | @@ -1539,7 +1540,7 @@ static int producer_get_image( mlt_frame | |
503 | VdpStatus status = vdp_surface_get_bits( render->surface, dest_format, planes, pitches ); | |
504 | if ( status == VDP_STATUS_OK ) | |
505 | { | |
506 | - yuv_colorspace = convert_image( self, self->video_frame, *buffer, PIX_FMT_YUV420P, | |
507 | + yuv_colorspace = convert_image( self, self->video_frame, *buffer, AV_PIX_FMT_YUV420P, | |
508 | format, *width, *height, &alpha ); | |
509 | mlt_properties_set_int( frame_properties, "colorspace", yuv_colorspace ); | |
510 | } | |
511 | @@ -1573,7 +1574,7 @@ static int producer_get_image( mlt_frame | |
512 | // Free packet data if not video and not live audio packet | |
513 | if ( self->pkt.stream_index != self->video_index && | |
514 | !( !self->seekable && self->pkt.stream_index == self->audio_index ) ) | |
515 | - av_free_packet( &self->pkt ); | |
516 | + av_packet_unref( &self->pkt ); | |
517 | } | |
518 | } | |
519 | ||
520 | @@ -2314,7 +2315,7 @@ static int producer_get_audio( mlt_frame | |
521 | } | |
522 | ||
523 | if ( self->seekable || index != self->video_index ) | |
524 | - av_free_packet( &pkt ); | |
525 | + av_packet_unref( &pkt ); | |
526 | ||
527 | } | |
528 | ||
529 | @@ -2600,7 +2601,7 @@ static void producer_avformat_close( pro | |
530 | mlt_log_debug( NULL, "producer_avformat_close\n" ); | |
531 | ||
532 | // Cleanup av contexts | |
533 | - av_free_packet( &self->pkt ); | |
534 | + av_packet_unref( &self->pkt ); | |
535 | av_free( self->video_frame ); | |
536 | av_free( self->audio_frame ); | |
537 | if ( self->is_mutex_init ) | |
538 | @@ -2648,7 +2649,7 @@ static void producer_avformat_close( pro | |
539 | { | |
540 | while ( ( pkt = mlt_deque_pop_back( self->apackets ) ) ) | |
541 | { | |
542 | - av_free_packet( pkt ); | |
543 | + av_packet_unref( pkt ); | |
544 | free( pkt ); | |
545 | } | |
546 | mlt_deque_close( self->apackets ); | |
547 | @@ -2658,7 +2659,7 @@ static void producer_avformat_close( pro | |
548 | { | |
549 | while ( ( pkt = mlt_deque_pop_back( self->vpackets ) ) ) | |
550 | { | |
551 | - av_free_packet( pkt ); | |
552 | + av_packet_unref( pkt ); | |
553 | free( pkt ); | |
554 | } | |
555 | mlt_deque_close( self->vpackets ); | |
556 | Index: mlt-0.9.8/src/modules/avformat/vdpau.c | |
557 | =================================================================== | |
558 | --- mlt-0.9.8.orig/src/modules/avformat/vdpau.c | |
559 | +++ mlt-0.9.8/src/modules/avformat/vdpau.c | |
560 | @@ -125,9 +125,9 @@ static int vdpau_init( producer_avformat | |
561 | return success; | |
562 | } | |
563 | ||
564 | -static enum PixelFormat vdpau_get_format( struct AVCodecContext *s, const enum PixelFormat *fmt ) | |
565 | +static enum AVPixelFormat vdpau_get_format( struct AVCodecContext *s, const enum AVPixelFormat *fmt ) | |
566 | { | |
567 | - return PIX_FMT_VDPAU_H264; | |
568 | + return AV_PIX_FMT_VDPAU_H264; | |
569 | } | |
570 | ||
571 | static int vdpau_get_buffer( AVCodecContext *codec_context, AVFrame *frame ) | |
572 | @@ -229,7 +229,7 @@ static int vdpau_decoder_init( producer_ | |
573 | self->video_codec->release_buffer = vdpau_release_buffer; | |
574 | self->video_codec->draw_horiz_band = vdpau_draw_horiz; | |
575 | self->video_codec->slice_flags = SLICE_FLAG_CODED_ORDER | SLICE_FLAG_ALLOW_FIELD; | |
576 | - self->video_codec->pix_fmt = PIX_FMT_VDPAU_H264; | |
577 | + self->video_codec->pix_fmt = AV_PIX_FMT_VDPAU_H264; | |
578 | ||
579 | VdpDecoderProfile profile = VDP_DECODER_PROFILE_H264_HIGH; | |
580 | uint32_t max_references = self->video_codec->refs; |